diff --git a/.github/labeler.yml b/.github/labeler.yml
index f3ca5a77c40..47c9096fae2 100644
--- a/.github/labeler.yml
+++ b/.github/labeler.yml
@@ -2,7 +2,6 @@
# Add project labels to PRs
# Invocation is done by .github/workflows/prlabeler.yml
-# changes in the documentation
'Documentation':
- changed-files:
- any-glob-to-any-file: 'book_source/**'
@@ -12,44 +11,36 @@
- any-glob-to-any-file: 'DEV-INTRO.md'
- any-glob-to-any-file: 'README.md'
-# Add 'Dockerfile' label to any changes in the docker directory
'Dockerfile':
- changed-files:
- any-glob-to-any-file: 'docker/**'
-
-# Add 'Website' label to any changes in the web directory
-'Website':
+'Web Frontend':
- changed-files:
- any-glob-to-any-file: 'web/**'
-# Add 'Base' label to any changes in the base directory
'Base':
- changed-files:
- any-glob-to-any-file: 'base/**'
-# Add 'Models' label to any changes in the models directory
'Models':
- changed-files:
- any-glob-to-any-file: 'models/**'
-# Add 'Modules' label to any changes in the modules directory
'Modules':
- changed-files:
- any-glob-to-any-file: 'modules/**'
-# Add 'GitHub Actions' label to any changes in the .github/workflows directory
'GitHub Actions':
- changed-files:
- any-glob-to-any-file: '.github/workflows/**'
-# Add 'Scripts' label to any changes in the scripts directory
-
+# TODO: scripts folder is due for a reorganization by theme;
+# update these tags when that is done
'Scripts':
- changed-files:
- any-glob-to-any-file: 'scripts/**'
-# Add 'Tests' label to any changes in the tests directory
'Tests':
- all:
- changed-files:
diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml
index 5f155a4ccbe..208a83af1e4 100644
--- a/.github/workflows/check.yml
+++ b/.github/workflows/check.yml
@@ -23,7 +23,6 @@ jobs:
env:
GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }}
- _R_CHECK_LENGTH_1_CONDITION_: true
_R_CHECK_LENGTH_1_LOGIC2_: true
# Avoid compilation check warnings that come from the system Makevars
# See https://stat.ethz.ch/pipermail/r-package-devel/2019q2/003898.html
@@ -67,3 +66,11 @@ jobs:
- name: check for out-of-date files
uses: infotroph/tree-is-clean@v1
+
+ - name: check package versions
+ run: |
+ tag=$(\
+ curl -s https://api.github.com/repos/pecanproject/pecan/releases/latest \
+ | sed -n 's!.*"tag_name": "\(.*\)".*!\1!p')
+ git fetch --tags --depth=1
+ ./scripts/ensure_version_bump.sh "$tag"
diff --git a/.github/workflows/ci-weekly.yml b/.github/workflows/ci-weekly.yml
index b2b24d8a86e..1399c1cbba3 100644
--- a/.github/workflows/ci-weekly.yml
+++ b/.github/workflows/ci-weekly.yml
@@ -13,8 +13,8 @@ jobs:
fail-fast: false
matrix:
R:
- - "4.1"
- "4.3"
+ - "4.5"
- "devel"
uses: ./.github/workflows/test.yml
with:
@@ -26,8 +26,8 @@ jobs:
fail-fast: false
matrix:
R:
- - "4.1"
- "4.3"
+ - "4.5"
- "devel"
uses: ./.github/workflows/check.yml
with:
@@ -40,8 +40,8 @@ jobs:
fail-fast: false
matrix:
R:
- - "4.1"
- "4.3"
+ - "4.5"
- "devel"
uses: ./.github/workflows/sipnet.yml
with:
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index 63850e4a150..7e34b43504c 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -20,18 +20,18 @@ on:
type: choice
default: "$DEFAULT_R_VERSION"
options:
- - 4.1
- 4.2
- 4.3
- 4.4
+ - 4.5
- devel
schedule:
# 1:30 AM UTC, different R version each day
- cron: '30 1 * * 1' # Rdevel
- - cron: '30 1 * * 2' # R4.4
+ - cron: '30 1 * * 2' # R4.2
- cron: '30 1 * * 3' # R4.3
- - cron: '30 1 * * 4' # R4.2
- - cron: '30 1 * * 5' # R4.1
+ - cron: '30 1 * * 4' # R4.4
+ - cron: '30 1 * * 5' # R4.5
jobs:
# ----------------------------------------------------------------------
@@ -49,23 +49,33 @@ jobs:
run: echo "R_VERSION=devel" >> "$GITHUB_OUTPUT"
- id: tue
if: github.event.schedule == '30 1 * * 2'
- run: echo "R_VERSION=4.4" >> "$GITHUB_OUTPUT"
+ run: echo "R_VERSION=4.2" >> "$GITHUB_OUTPUT"
- id: wed
if: github.event.schedule == '30 1 * * 3'
run: echo "R_VERSION=4.3" >> "$GITHUB_OUTPUT"
- id: thu
if: github.event.schedule == '30 1 * * 4'
- run: echo "R_VERSION=4.2" >> "$GITHUB_OUTPUT"
+ run: echo "R_VERSION=4.4" >> "$GITHUB_OUTPUT"
- id: fri
if: github.event.schedule == '30 1 * * 5'
- run: echo "R_VERSION=4.1" >> "$GITHUB_OUTPUT"
+ run: echo "R_VERSION=4.5" >> "$GITHUB_OUTPUT"
- id: default
if: github.event_name != 'schedule'
run: echo "R_VERSION=${{ github.event.inputs.r_version || env.DEFAULT_R_VERSION }}" >> "$GITHUB_OUTPUT"
+ - id: platform
+ # upstream rocker/tidyverse image is only on ARM for R 4.5.* (so far)
+ shell: bash
+ run: |
+ if [[ "${{ join(steps.*.outputs.R_VERSION, '') }}" == "4.5" ]]; then
+ echo "PLATFORMS=linux/amd64,linux/arm64" >> "$GITHUB_OUTPUT"
+ else
+ echo "PLATFORMS=linux/amd64" >> "$GITHUB_OUTPUT"
+ fi
outputs:
# Note: "steps.*" seems to mean "all step ids", not "all steps"
# If seeing weird results here, check that all steps above have an id set.
R_VERSION: ${{ join(steps.*.outputs.R_VERSION, '') }}
+ PLATFORMS: ${{ join(steps.platform.outputs.PLATFORMS, '') }}
# ----------------------------------------------------------------------
# depends image has all the dependencies installed
@@ -80,7 +90,7 @@ jobs:
build-context: docker/depends
dockerfile: docker/depends/Dockerfile
r-version: ${{ needs.rversion.outputs.R_VERSION }}
- platforms: "linux/amd64"
+ platforms: ${{ needs.rversion.outputs.PLATFORMS }}
secrets: inherit
# ----------------------------------------------------------------------
@@ -95,7 +105,7 @@ jobs:
dockerfile: docker/base/Dockerfile
r-version: ${{ needs.rversion.outputs.R_VERSION }}
parent-image: "depends"
- platforms: "linux/amd64"
+ platforms: ${{ needs.rversion.outputs.PLATFORMS }}
secrets: inherit
# ----------------------------------------------------------------------
@@ -110,6 +120,7 @@ jobs:
dockerfile: docker/models/Dockerfile
r-version: ${{ needs.rversion.outputs.R_VERSION }}
parent-image: "base"
+ platforms: ${{ needs.rversion.outputs.PLATFORMS }}
secrets: inherit
# ----------------------------------------------------------------------
@@ -131,37 +142,31 @@ jobs:
- name: basgra
CONTEXT: models/basgra
DOCKERFILE: models/basgra/Dockerfile
- PLATFORM: "linux/amd64"
MODEL: basgra
VERSION: BASGRA_N_v1
- name: biocro
CONTEXT: models/biocro
DOCKERFILE: models/biocro/Dockerfile
- PLATFORM: "linux/amd64"
MODEL: biocro
VERSION: "0.95"
# - name: ed2_2.2.0
# CONTEXT: models/ed
# DOCKERFILE: models/ed/Dockerfile
- # PLATFORM: "linux/amd64"
# MODEL: ed2
# VERSION: "2.2.0"
- name: ed2_git
CONTEXT: models/ed
DOCKERFILE: models/ed/Dockerfile
- PLATFORM: "linux/amd64"
MODEL: ed2
VERSION: "git"
- name: maespa
CONTEXT: models/maespa
DOCKERFILE: models/maespa/Dockerfile
- PLATFORM: "linux/amd64"
MODEL: maespa
VERSION: "git"
- name: sipnet
CONTEXT: models/sipnet
DOCKERFILE: models/sipnet/Dockerfile
- PLATFORM: "linux/amd64"
MODEL: sipnet
VERSION: "git"
uses: ./.github/workflows/docker-build-image.yml
@@ -172,7 +177,7 @@ jobs:
r-version: ${{ needs.rversion.outputs.R_VERSION }}
parent-image: "models"
model-version: ${{ matrix.VERSION }}
- platforms: ${{ matrix.PLATFORM }}
+ platforms: ${{ needs.rversion.outputs.PLATFORMS }}
secrets: inherit
# ----------------------------------------------------------------------
@@ -191,22 +196,20 @@ jobs:
- name: docs
CONTEXT: .
DOCKERFILE: docker/docs/Dockerfile
- PLATFORM: "linux/amd64"
- name: executor
CONTEXT: docker/executor
DOCKERFILE: docker/executor/Dockerfile
- PLATFORM: "linux/amd64"
- name: api
CONTEXT: apps/api
DOCKERFILE: apps/api/Dockerfile
- PLATFORM: "linux/amd64"
uses: ./.github/workflows/docker-build-image.yml
with:
image-name: ${{ matrix.name }}
build-context: ${{ matrix.CONTEXT }}
dockerfile: ${{ matrix.DOCKERFILE }}
r-version: ${{ needs.rversion.outputs.R_VERSION }}
- platforms: ${{ matrix.PLATFORM }}
+ parent-image: "base"
+ platforms: ${{ needs.rversion.outputs.PLATFORMS }}
secrets: inherit
# ----------------------------------------------------------------------
@@ -229,7 +232,7 @@ jobs:
DOCKERFILE: docker/web/Dockerfile
PLATFORM: "linux/amd64,linux/arm64"
- name: shiny-dbsync
- CONTEXT: .
+ CONTEXT: shiny/dbsync
DOCKERFILE: shiny/dbsync/Dockerfile
PLATFORM: "linux/amd64"
- name: data
diff --git a/.github/workflows/pkgdown.yml b/.github/workflows/pkgdown.yml
index d068b000cba..563a70db88e 100644
--- a/.github/workflows/pkgdown.yml
+++ b/.github/workflows/pkgdown.yml
@@ -16,15 +16,16 @@ jobs:
runs-on: ubuntu-latest
env:
GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }}
+ PECAN_GIT_BRANCH: ${{ github.event_name == 'pull_request' && github.base_ref || github.ref_name }}
container:
- image: pecan/depends:develop
+ image: pecan/base:develop
steps:
# Checkout source code
- uses: actions/checkout@v4
- # Install pkgdown
+ # Install dependencies
- name: Install dependencies
run: Rscript -e 'install.packages("pkgdown")'
diff --git a/.github/workflows/render-quarto.yml b/.github/workflows/render-quarto.yml
new file mode 100644
index 00000000000..636d67400b1
--- /dev/null
+++ b/.github/workflows/render-quarto.yml
@@ -0,0 +1,51 @@
+name: Render Demo Notebooks
+
+on:
+ push:
+ pull_request:
+ workflow_dispatch:
+
+jobs:
+ render-notebook:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Build PEcAn base Docker image
+ run: |
+ docker build -t pecan-base-ci:latest -f docker/base/Dockerfile .
+
+ - name: Set up Quarto
+ uses: quarto-dev/quarto-actions/setup@v2
+ with:
+ version: '1.8.17'
+
+ - name: Check R code style with styler
+ run: |
+ docker run --rm -v ${{ github.workspace }}:/work pecan-base-ci:latest bash -c "Rscript -e '
+ if (!requireNamespace(\"styler\", quietly = TRUE)) install.packages(\"styler\");
+ files_to_check <- c(
+ \"/work/documentation/tutorials/Demo_1_Basic_Run/run_pecan.qmd\",
+ \"/work/documentation/tutorials/Demo_02_Uncertainty_Analysis/uncertainty.qmd\"
+ );
+ unstyled <- styler::style_file(files_to_check);
+ if (is.data.frame(unstyled) && \"changed\" %in% names(unstyled) && any(unstyled\$changed == TRUE)) {
+ print(unstyled[unstyled\$changed, ]);
+ stop(\"One or more .qmd files are not styled. Please run styler::style_file() locally and commit the changes.\");
+ } else {
+ cat(\"All checked .qmd files are styled.\\n\");
+ }
+ '"
+
+ - name: Render Demo 1 notebook to HTML
+ run: |
+ docker run --rm -v ${{ github.workspace }}:/work pecan-base-ci:latest bash -c "
+ quarto render /work/documentation/tutorials/Demo_1_Basic_Run/run_pecan.qmd --to html
+ "
+ - name: Render Demo 2 notebook to HTML
+ run: |
+ docker run --rm -v ${{ github.workspace }}:/work pecan-base-ci:latest bash -c "
+ quarto render /work/documentation/tutorials/Demo_02_Uncertainty_Analysis/uncertainty.qmd --to html
+ "
\ No newline at end of file
diff --git a/.github/workflows/sipnet.yml b/.github/workflows/sipnet.yml
index 6a19d129eab..c3d24836185 100644
--- a/.github/workflows/sipnet.yml
+++ b/.github/workflows/sipnet.yml
@@ -54,15 +54,12 @@ jobs:
curl -L -o sipnet-linux "https://github.com/PecanProject/sipnet/releases/download/v1.3.0/sipnet-linux-v1.3.0"
chmod +x sipnet-linux
- - name: Download climate file from GitHub
- run: |
- curl -L -o niwot.clim "https://raw.githubusercontent.com/PecanProject/sipnet/refs/heads/master/tests/smoke/niwot.clim"
-
-
# compile PEcAn code
- name: build
run: make -j1
# run SIPNET test
+ - name: copy met file into working directory
+ run: Rscript -e 'file.copy(system.file("niwot.clim", package = "PEcAn.SIPNET"), "niwot.clim")'
- name: integration test
run: ./tests/integration.sh ghaction
diff --git a/.gitignore b/.gitignore
index c24fc853303..d910f1e4e30 100644
--- a/.gitignore
+++ b/.gitignore
@@ -110,3 +110,11 @@ contrib/modellauncher/modellauncher
# don't track project level .Rprofile
.Rprofile
+
+
+# Ignore any folder named demo_outdir (Quarto notebook outputs)
+**/demo_outdir/
+dbfiles/
+**/temperate.coniferous/
+*.sensitivity.analysis.*.pdf
+*.variance.decomposition.*.pdf
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9f9f4a00b64..1d5165828e0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,19 +1,74 @@
# Change Log
+
All notable changes are kept in this file. All changes made should be added to the section called
`Unreleased`. Once a new release is made this file will be updated to create a new `Unreleased`
section for the next release.
For more information about this file see also [Keep a Changelog](http://keepachangelog.com/) .
+## Unreleased
+
+### Added
+
+- Add function `clip_and_save_raster_file()` for subsetting rasters to match a polygon of interest (#3537).
+- Add CH4 and N2O to standard_vars in PEcAn.utils
+- New function `sat_vapor_pressure()` added for computing saturation vapor pressure from temperature using various methods.
+- Added `AmeriFlux_met_ensemble()` function with ERA5 fallback for AmeriFlux meteorological data processing and ensemble generation
+- Added `all_site_nc_merge_by_year()` and `single_site_nc_merge()` functions to merge netCDF files across ensembles and sites from pecan model netCDF outputs.
+- Added parallel mode for the entire SDA workflow.
+- Define, add support for, and parse events schema
+ - Events schema and validate_events() function to PEcAn.data.land (#3623, #3521)
+ - Add `write.events.SIPNET()` to generate SIPNET `events.in` files from a `events.json` file.
+- Included all relevant carbon pools (`ROOT_BIOMASS`, `AG_BIOMASS`, `SOIL_STOCK`, `LIT_BIOMASS`) in BADM-based IC extraction; excluded non-pool variables like `SOIL_CHEM`.
+- Added explicit support for `LIT_BIOMASS` to fully utilize **BADM** biomass capabilities.
+- Added `test-IC_BADM_Utilities.R` to validate BADM initial condition extraction and processing
+- Added function for merging images from the same tiling system (MODIS, GLANCE, ICESat-2, HLS, etc.).
+- Added function for converting images towards the GDAL-supported formats (H5, NetCDF, HDF4, GeoTIFF, etc .).
+- New utility script `IC_SOILGRID_Utilities.R` for processing SoilGrids data to generate soil carbon initial condition (IC) files. This includes (#3508):
+ - **`soilgrids_ic_process`**: A function to extract, process, and generate ensemble members from SoilGrids250m data.
+ - **`preprocess_soilgrids_data`**: A helper function to handle missing values and ensure data integrity during preprocessing.
+ - **`generate_soilgrids_ensemble`**: A function to create ensemble members for a site based on processed soil carbon data.
+- `extract.nc.ERA5()` and `met2CF.ERA5` now supports both ensemble and reanalysis data processing .
+- Initial Quarto notebook `run_pecan.qmd` to run PEcAn Demo 1 workflow from a pre-configured `pecan.xml` file, enabling notebook-based model runs, analysis, and visualization (#3531)
+ - Directory structure for PEcAn Quarto notebooks under `pecan/documentation/tutorials/Demo_1_Basic_Run`
+ - Support for inspecting and plotting NetCDF output variables within the notebook workflow.
+- added support for soil temperature, relative humidity, soil moisture, and PPFD downscaling to `met_temporal_downscale.Gaussian_ensemble`
+- The PEcAn uncertainty analysis tutorial ("Demo 2") has been updated and reimplemented as a Quarto notebook at `documentation/tutorials/Demo_02_Uncertainty_Analysis/uncertainty.qmd`. (#3570)
+- Added the shared `input_design` matrix, generated via
+ `runModule.run.write.configs()`/`generate_joint_ensemble_design()`, that keeps
+ parameter draws and sampled inputs aligned across `run.write.configs()`,
+ `write.ensemble.configs()`(#3535, #3634, #3677).
+
+### Fixed
+
+- Fixed a bugs and BADM now process both single-site and multi-site settings, detecting the input structure and processing each site independently to generate the correct number of ensemble members per site.
+- Fixed "external pointer is not valid" error and addressed key bugs in `soilgrids_soilC_extract()` function (#3506)
+- Fixed a bug within the `model2netcdf.SIPNET` function where we assumed the constant calculations of `pecan_start_doy` across years (the calculations should vary depending on the last date from the last loop and the start date of the current loop), which will lead to incorrect calculations of the start `sub_dates` and `sub_dates_cf` if we are jumping between years (e.g., from 2012-12-31 to 2013-01-01). The `sipnet2datetime` function is no longer used anywhere and therefore has been removed.
+
+### Changed
+
+- Package `PEcAn.uncertainty` has changed licensing. With approval from all its contributors, we now provide it under a BSD 3-clause license rather than the previously used NCSA Open Source license.
+- Ensemble and sensitivity analyses now assign an ensemble ID if one is not specified in the XML, even when running with no DB (#3654).
+- `download.ERA5_cds` now uses the R package ecmwfr (replacing python dependency of cdsapi via reticulate), enabling direct NetCDF downloads; and made flexible for both reanalysis and ensemble data product.
+- `extract_soil_gssurgo` now supports spatial sampling using a grid of user-defined size and spacing. And supports ensemble simulation of soil organic carbon (SOC) stocks, using area-weighted aggregation
+- The ERA5 NC extraction function can now handle multi-site instead of one
+- All of the `met2model.*` functions no longer write a list of variables (`*.nc.var`) file alongside each output netcdf. If you need var files, use `PEcAn.utils::nc_write_vars()` after the run completes (#3611, #3616).
+- Refactor `convert_input` to Perform tasks via helper function. Subtask of [#3307](https://github.com/PecanProject/pecan/issues/3307)
+- Stopped testing on R 4.1, started testing on R 4.5, and updated prebuilt Docker images to match -- they are now available for R releases 4.2 through 4.5 as well as for R under development.
+- `write.config.STICS()` now modifies parameters with vectors rather than individually.
+- Code for DART has been moved from `modules/` to `contrib/` and its license more clearly described.
+
+
+
## [1.9.0] - 2025-05-25
### Added
-- Documentation of `make` options including addition of `make help`
+- Documentation of `make` options including addition of `make help`
- Removed reference to PEcAn VM from documentation #3478
- Add make option to document a single package with `make documentation pathto/package`
- `settings$host$qsub` and `settings$host$modellauncher$qsub.extra` will now expand `@NJOBS@` to the number of models in the run, allowing e.g. `--array=1-@NJOBS@`. Note that qsub still by default submits every model as a separate job, so for now this is mostly useful for custom modellauncher scripts
-- Added automated pkgdown documentation for all PEcAn packages (@divine7022, #3482):
+- Added automated pkgdown documentation for all PEcAn packages (@divine7022, #3482):
- Compiled pages are live at https://pecanproject.github.io/package-documentation and inside Docker at `pecan.localhost/pkgdocs/`, and these are automatically updated each time a PR to the source packages is merged.
- You can compile all pkgdown pages locally at any time with `make pkgdocs`.
@@ -46,6 +101,8 @@ For more information about this file see also [Keep a Changelog](http://keepacha
- Package `PEcAn.PRELES` is no longer built or tested by default, because of ongoing build failures in the `RPreles` package it depends on.
Install it manually as needed, and be aware it is not being routinely checked by CI (so if you're checking it, please file bugs and submit fixes!).
+
+
## [1.8.0] - 2024-07-12
### Added
@@ -72,8 +129,9 @@ For more information about this file see also [Keep a Changelog](http://keepacha
- Added GEDI AGB preparation workflow.
- Added new feature of downloading datasets from the NASA DAAC ORNL database.
- Extended downscale function and created 'downscale_hrly' so that it handles more frequent data
-- Added 'aggregate' as a new feature for downscaled data
+- Added 'aggregate' as a new feature for downscaled data.
- Added ERA5 download function that applies to the new CDS API.
+- Added downscale functions and scripts that apply to the North America SDA run.
### Fixed
@@ -164,6 +222,8 @@ convert data for a single PFT fixed (#1329, #2974, #2981)
- Unused (and apparently long-broken) function `PEcAn.data.land::find.land` has been removed.
- No longer building r136 sipnet docker image.
+
+
## [1.7.2] - 2021-10-04
### Due to dependencies, PEcAn is now using R 4.0.3 for Docker images.
@@ -268,6 +328,8 @@ This is a major change:
`logger.setOutputFile`, `logger.setQuitOnSevere`, `logger.setWidth`, `logger.severe`, `logger.warn`.
These are now in `PEcAn.logger`
+
+
## [1.7.1] - 2018-09-12
### Fixed
@@ -323,6 +385,8 @@ This is a major change:
- Removed package `PEcAn.data.mining` from the Make build. It can still be installed directly from R if desired, but is skipped by default because it is in early development, does not yet export any functions, and creates a dependency on the (large, often annoying to install) ImageMagick library.
- Fully deprecate support for `MySQL` database driver. Now, only `PostgreSQL` (and, experimentally, `RPostgres`) are supported. With this, remove `RMySQL` dependency in several places.
+
+
## [1.7.0] - 2018-12-09
### Fixes
@@ -377,6 +441,8 @@ This is a major change:
- Change base image for R code from `r-base` to `rocker/tidyverse:3.5.1`. This (1) saves build time (because many R packages and system dependencies are pre-installed), and (2) enhances reproducibility (because of the strict versioning rules of the `rocker` packages)
- Re-factor web interface RabbitMQ create connections and post messages into their own PHP functions.
+
+
## [1.6.0] - 2018-09-01
### Fixes
@@ -407,6 +473,7 @@ This is a major change:
- Small change to modules/data.atmosphere/R/download.NARR_site.R to set parallel=TRUE to match documentation and sub-function calls
+
## [1.6.0] - Not yet
### Fixes
@@ -503,6 +570,8 @@ This is a major change:
- read.ensemble.output, get.ensemble.samples and write.ensemble.configs have been moved to PEcAn.uncertainty
- Change the way packages are checked for and called in SHINY apps. DESCRIPTION files in SHINY apps are not the place to declare pacakge dpendencies.
+
+
## [1.5.3] - 2018-05-15
### Fixes
@@ -555,6 +624,8 @@ This is a major change:
- Edited met2model.ED2 to not enforce leap years.
- Integrate demo 1 into basic user guide
+
+
## [1.5.2] - 2017-12-07
### Fixes
@@ -579,6 +650,8 @@ This is a major change:
- `fqdn` moved to `PEcAn.remote`
- PEcAnRTM: Removed effective sample size normalization from likelihood calculation. It was giving weird results.
+
+
## [1.5.1] - 2017-10-05
### Fixes
@@ -630,7 +703,9 @@ This is a major change:
- Rpreles and Maeswrap package moved to suggest checked for within package function.
+
## [1.5.0] - 2017-07-13
+
### Added
- Added cron job and script for the sync of the database.
- Added PEcAn.utils::download.file() to allow for use of alternative FTP programs
@@ -647,6 +722,8 @@ This is a major change:
- upscale_met now accepts ~any valid CF file (not just full years), retains correct time units, and respects the previously ignored `overwrite` parameter
- Better date handling in BioCro functions
+
+
## [1.4.10.1] - 2017-04-18
### Changed
@@ -656,6 +733,7 @@ This is a major change:
- Bugfixes in met.process
+
## [1.4.10] - 2017-03-27
Documentation
@@ -669,6 +747,8 @@ Documentation
### Removed
- Ameriflux is no longer selectable from the web gui [#1291](https://github.com/PecanProject/pecan/issues/1291)
+
+
## [1.4.9] - 2016-12-10
Benchmarking, code cleanup
@@ -679,8 +759,12 @@ Benchmarking, code cleanup
- no more build.sh, using Makefile
- Lots of code cleanup thanks to @bpbond
+
+
## [1.4.8] - 2016-08-11
Camp PEON: Assimilation, multi-site, soil params, Maespa, LPJ-GUESS, improvements to web & tutorials
+
+
## [1.4.7] - 2016-07-13
CMIP5, Shiny, FLUXNET2015, Global Sensitivity
diff --git a/CITATION.cff b/CITATION.cff
index 5c4de2bdf35..e871eba01ff 100644
--- a/CITATION.cff
+++ b/CITATION.cff
@@ -120,6 +120,13 @@ authors:
orcid: 'https://orcid.org/0000-0002-7430-7879'
- given-names: Harunobu Ishii
affiliation: Boston University Software & Application Innovation Lab(SAIL)
+ - affiliation: Finnish Meteorological Institute
+ given-names: Quentin Bell
+ orcid: 'https://orcid.org/0009-0005-0253-8642'
+ - given-names: Akash BV
+ affiliation: CMR Institute of Technology, Bengaluru
+ - given-names: Aritra Dey
+ affiliation: National Institute of Technology, Tiruchirappalli
preferred-citation:
type: article
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 1ec2e870917..4c949543cdf 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -116,7 +116,7 @@ git push -u origin GH-issuenumber-title-of-issue
## Additional Resources
-- [Adding models to PEcAn](https://pecanproject.github.io/pecan-documentation/latest/adding-an-ecosystem-model.html)
-- [PEcAn configuration files](https://pecanproject.github.io/pecan-documentation/latest/pecan-xml-configuration.html)
+- [Adding models to PEcAn](https://pecanproject.github.io/pecan-documentation/develop/adding-model.html)
+- [PEcAn configuration files](https://pecanproject.github.io/pecan-documentation/develop/pecanXML.html)
- [Development help](https://pecanproject.github.io/pecan-documentation/latest/developer-guide.html)
- [PEcAn Code of Conduct](CODE_OF_CONDUCT.md)
diff --git a/DEV-INTRO.md b/DEV-INTRO.md
index 476cdcd7e00..68637228bf1 100644
--- a/DEV-INTRO.md
+++ b/DEV-INTRO.md
@@ -42,7 +42,7 @@ _Note for Linux (including Windows WSL2) users:_ add your user to the docker gro
```sh
# for linux users
-sudo adduser ${USER} docker.
+sudo adduser ${USER} docker
```
### Deploying PEcAn in Docker
diff --git a/Makefile b/Makefile
index fa508f1c17f..bb4998f900b 100644
--- a/Makefile
+++ b/Makefile
@@ -15,7 +15,7 @@ MODULES := allometry assim.batch assim.sequential benchmark \
# (Most need more development first)
# If you need one of these on your system, add it to the appropriate line above.
# MODELS: cable preles
-# MODULES: data.mining DART
+# MODULES: data.mining
SHINY := $(dir $(wildcard shiny/*/.))
SHINY := $(SHINY:%/=%)
@@ -148,7 +148,7 @@ help:
@echo " make .doc/modules/assim.sequential # Generate documentation for a specific package"
@echo ""
@echo "Notes:"
- @echo " - Components not included by default: cable and preles (models), data.mining and DART (modules)."
+ @echo " - Components not included by default: cable and preles (models), data.mining (modules)."
@echo " To install any of these, see comments in the Makefile and be aware they may need code updates."
@echo " - Standard workflow: install packages, run checks, test, and document before submitting a PR."
@echo " - Before submitting a PR, please ensure that all tests pass, code is linted, and documentation is up-to-date."
diff --git a/README.md b/README.md
index cedd88e6f25..0ac10fc2770 100644
--- a/README.md
+++ b/README.md
@@ -21,11 +21,21 @@ PEcAn is not itself an ecosystem model, and it can be used to with a variety of
## Documentation
-Consult documentation of the PEcAn Project; either the [latest stable development](https://pecanproject.github.io/pecan-documentation/develop/) branch, the latest [release](https://pecanproject.github.io/pecan-documentation/latest/). Documentation from [earlier releases is here](https://pecanproject.github.io/documentation.html).
+PEcAn documentation is available for both the [latest stable development](https://pecanproject.github.io/pecan-documentation/develop/) branch and the [latest release](https://pecanproject.github.io/pecan-documentation/latest/). Documentation for [earlier releases](https://pecanproject.github.io/documentation/) is also available.
+
+Package-level reference documentation for PEcAn modules is generated using [pkgdown](https://pkgdown.r-lib.org/) and can be found [here](https://pecanproject.github.io/package-documentation/develop/)
## Getting Started
-See our ["Tutorials Page"](https://pecanproject.github.io/tutorials.html) that provides self-guided tutorials, links to vignettes, and an overview presentation.
+See our ["Tutorials Page"](https://pecanproject.github.io/tutorials/) that provides self-guided tutorials, links to vignettes, and an overview presentation.
+
+### Notebook-based tutorials
+
+The **recommended** way to get started with PEcAn is through the notebook-based tutorials. These provide reproducible, end-to-end examples of PEcAn workflows using rendered **Quarto** notebooks.
+
+Current tutorials include:
+- [Running a PEcAn workflow](https://pecanproject.github.io/pecan-documentation/develop/rendered-demo-notebooks/run_pecan.html)
+- [Uncertainty analysis tutorial](https://pecanproject.github.io/pecan-documentation/develop/rendered-demo-notebooks/uncertainty.html)
### Installation
@@ -49,12 +59,11 @@ Some functionalities will be limited without also installing the Postgres databa
Visit our [webpage](https://pecanproject.github.io) to keep up with latest news, version, and information about the PEcAn Project
-#### Web Interface demo
+#### Web Interface demo (legacy)
-The fastest way to begin modeling ecosystems is through the PEcAn web interface.
-We have a [demo website](http://pecan.ncsa.illinois.edu/pecan/01-introduction.php) that runs the current version of PEcAn. Using this instance you can perform a run using either ED or SIPNET at any of the predefined sites.
+The PEcAn web interface is considered a legacy interface and is **no longer** the recommended entry point for new users.
-The demo instance only allows for runs at pecan.ncsa.illinois.edu. Once you have set up the run it will execute on our server; depending on the number of people executing a model and the model selected this can take between a few seconds and a few minutes to finish. Once it's finished, you see the results of the execution and can plot the outputs of the model. Complete examples of a few executions can be found in our online [tutorials](http://pecanproject.github.io/tutorials.html).
+Notebook-based tutorials provide reproducible, up-to-date examples of PEcAn workflows and are the recommended starting point for learning and exploration.
## Publications
diff --git a/apps/api/R/entrypoint.R b/apps/api/R/entrypoint.R
index 5f1d8a3fb94..7cd5c9c28be 100755
--- a/apps/api/R/entrypoint.R
+++ b/apps/api/R/entrypoint.R
@@ -69,6 +69,15 @@ root$mount("/api/runs", runs_pr)
runs_pr <- plumber::Plumber$new("available-models.R")
root$mount("/api/availableModels", runs_pr)
+# The endpoints mounted here are related to details of PEcAn posteriors
+runs_pr <- plumber::Plumber$new("posteriors.R")
+root$mount("/api/posteriors", runs_pr)
+
+# Run a meta-analysis from an uploaded XML file
+# Caution: Minimally tested.
+ma_pr <- plumber::Plumber$new("ma.R")
+root$mount("/api/ma", ma_pr)
+
# set swagger documentation
root$setApiSpec("../pecanapi-spec.yml")
diff --git a/apps/api/R/ma.R b/apps/api/R/ma.R
new file mode 100644
index 00000000000..88269408736
--- /dev/null
+++ b/apps/api/R/ma.R
@@ -0,0 +1,61 @@
+library(dplyr)
+library("PEcAn.all")
+library("RCurl")
+
+#' Post a settings file for running a Meta-Analysis
+#' @param req Send pecan.xml in body as xml filetype
+#' @return A list of post.distns.MA.R
+#' @author Nihar Sanda
+#* @post /run
+submitWorkflow <- function(req, res){
+ if(req$HTTP_CONTENT_TYPE == "application/xml") {
+ # read req$bosy as xml
+ settingsXml <- XML::xmlParseString(stringr::str_replace(req$body, ".*?>\n", ""))
+
+ ## convert the xml to a list
+ settings <- XML::xmlToList(settingsXml)
+ settings <- as.Settings(settings)
+ settings <- expandMultiSettings(settings)
+
+ # Update/fix/check settings.
+ # Will only run the first time it's called, unless force=TRUE
+ settings <-
+ PEcAn.settings::prepare.settings(settings, force = FALSE)
+
+ # Changing update to TRUE
+ settings$meta.analysis$update <- TRUE
+
+ # Write pecan.CHECKED.xml
+ PEcAn.settings::write.settings(settings, outputfile = "pecan.CHECKED.xml")
+
+ # Do conversions
+ settings <- PEcAn.workflow::do_conversions(settings)
+ settings <- PEcAn.workflow::runModule.get.trait.data(settings)
+
+ # initiating variables needed for running meta analysis
+ pfts <- settings$pfts
+ iterations <- settings$meta.analysis$iter
+ random <- settings$meta.analysis$random.effects$on
+ use_ghs <- settings$meta.analysis$random.effects$use_ghs
+ threshold <- settings$meta.analysis$threshold
+ dbfiles <- settings$database$dbfiles
+ database <- settings$database$bety
+
+ # running meta analysis
+ run.meta.analysis(pfts, iterations, random, threshold,
+ dbfiles, database, use_ghs)
+
+ #PEcAn.MA::runModule.run.meta.analysis(settings = ma_settings)
+
+ if(dir.exists(settings$pfts$pft$outdir)){
+ filepath <- paste0(settings$pfts$pft$outdir, "/post.distns.Rdata")
+ e <- new.env(parent = emptyenv())
+ load(filepath, envir = e)
+ return(list(status = "Meta Analysis ran successfully", data = as.list(e)))
+ }
+ }
+ else{
+ res$status <- 415
+ return(paste("Unsupported request content type:", req$HTTP_CONTENT_TYPE))
+ }
+}
\ No newline at end of file
diff --git a/apps/api/R/posteriors.R b/apps/api/R/posteriors.R
new file mode 100644
index 00000000000..86579486fce
--- /dev/null
+++ b/apps/api/R/posteriors.R
@@ -0,0 +1,176 @@
+library(dplyr)
+
+#' Search for Posteriors containing wildcards for filtering
+#' @param pft_id PFT Id (character)
+#' @param offset
+#' @param limit
+#' @return Information about Posteriors based on pft
+#' @author Nihar Sanda
+#* @get /
+searchPosteriors <- function(req, pft_id = NA, host_id = NA,
+ offset = 0, limit = 50, res) {
+ if (!limit %in% c(10, 20, 50, 100, 500)) {
+ res$status <- 400
+ return(list(error = "limit parameter must be 10, 20, 50, 100, or 500"))
+ }
+
+ posteriors <- tbl(global_db_pool, "posteriors") %>%
+ select(everything())
+
+ posteriors <- tbl(global_db_pool, "dbfiles") %>%
+ select(file_name,
+ file_path,
+ container_type,
+ id = container_id,
+ machine_id) %>%
+ inner_join(posteriors, by = "id") %>%
+ filter(container_type == "Posterior") %>%
+ select(-container_type)
+
+ posteriors <- tbl(global_db_pool, "machines") %>%
+ select(hostname, machine_id = id) %>%
+ inner_join(posteriors, by = "machine_id")
+
+ posteriors <- tbl(global_db_pool, "pfts") %>%
+ select(pft_name = name, pft_id = id) %>%
+ inner_join(posteriors, by = "pft_id")
+
+ if (!is.na(pft_id)) {
+ posteriors <- posteriors %>%
+ filter(pft_id == !!pft_id)
+ }
+
+ if (!is.na(host_id)) {
+ posteriors <- posteriors %>%
+ filter(machine_id == !!host_id)
+ }
+
+ qry_res <- posteriors %>%
+ select(-pft_id, -machine_id) %>%
+ distinct() %>%
+ arrange(id) %>%
+ collect()
+
+ if (nrow(qry_res) == 0 || as.numeric(offset) >= nrow(qry_res)) {
+ res$status <- 404
+ return(list(error = "Posterior(s) not found"))
+ } else {
+ has_next <- FALSE
+ has_prev <- FALSE
+ if (nrow(qry_res) > (as.numeric(offset) + as.numeric(limit))) {
+ has_next <- TRUE
+ }
+ if (as.numeric(offset) != 0) {
+ has_prev <- TRUE
+ }
+
+ start_idx <- as.numeric(offset) + 1
+ end_idx <- min((as.numeric(offset) + as.numeric(limit)), nrow(qry_res))
+ qry_res <- qry_res[start_idx:end_idx, ]
+
+ result <- list(posteriors = qry_res)
+ result$count <- nrow(qry_res)
+ if (has_next) {
+ if (grepl("offset=", req$QUERY_STRING, fixed = TRUE)) {
+ result$next_page <- paste0(
+ req$rook.url_scheme, "://",
+ req$HTTP_HOST,
+ "/api/posteriors",
+ req$PATH_INFO,
+ substr(req$QUERY_STRING,
+ 0,
+ stringr::str_locate(req$QUERY_STRING, "offset=")[[2]]),
+ (as.numeric(limit) + as.numeric(offset)),
+ "&limit=",
+ limit
+ )
+ } else {
+ result$next_page <- paste0(
+ req$rook.url_scheme, "://",
+ req$HTTP_HOST,
+ "/api/posteriors",
+ req$PATH_INFO,
+ substr(req$QUERY_STRING,
+ 0,
+ stringr::str_locate(req$QUERY_STRING, "limit=")[[2]] - 6),
+ "offset=",
+ (as.numeric(limit) + as.numeric(offset)),
+ "&limit=",
+ limit
+ )
+ }
+ }
+ if (has_prev) {
+ result$prev_page <- paste0(
+ req$rook.url_scheme, "://",
+ req$HTTP_HOST,
+ "/api/workflows",
+ req$PATH_INFO,
+ substr(req$QUERY_STRING,
+ 0,
+ stringr::str_locate(req$QUERY_STRING, "offset=")[[2]]),
+ max(0, (as.numeric(offset) - as.numeric(limit))),
+ "&limit=",
+ limit
+ )
+ }
+
+ return(result)
+ }
+}
+
+################################################################################
+
+#' Download the posterior specified by the id
+#' @param id Posterior id (character)
+#' @param filename Optional filename specified if the id points to a folder
+#' instead of file (character). If this is passed with an id that actually
+#' points to a file, this name will be ignored
+#' @return Posterior file specified by user
+#' @author Nihar Sanda
+#* @serializer contentType list(type="application/octet-stream")
+#* @get /
+downloadPosterior <- function(posterior_id, filename = "", req, res) {
+ db_hostid <- PEcAn.DB::dbHostInfo(global_db_pool)$hostid
+
+ # This is just for temporary testing due to the existing issue in dbHostInfo()
+ db_hostid <- ifelse(db_hostid == 99, 99000000001, db_hostid)
+
+ posterior <- tbl(global_db_pool, "dbfiles") %>%
+ select(file_name, file_path, container_id, machine_id, container_type) %>%
+ filter(machine_id == !!db_hostid) %>%
+ filter(container_type == "Posterior") %>%
+ filter(container_id == !!posterior_id) %>%
+ collect()
+
+ if (filename != "") {
+ posterior <- posterior %>%
+ filter(file_name == !!filename)
+ }
+
+ if (nrow(posterior) == 0) {
+ res$status <- 404
+ return("Posterior not found")
+ }
+
+ # Generate the full file path using the file_path & file_name
+ filepath <- file.path(posterior$file_path, posterior$file_name)
+
+ if (length(filepath) > 1 || dir.exists(filepath)) {
+ # Don't know which file to send. Return 400 Bad Request error
+ # TODO provide an endpoint to list the available files from one posterior
+ # (maybe `/posteriors/{posterior_id}/files`?)
+ res$status <- 400
+ return("Multiple matches. Please specify filename")
+ }
+
+ # If the file doesn't exist, return 404 error
+ if (!file.exists(filepath)) {
+ res$status <- 404
+ return("Posterior file not found")
+ }
+
+ # Read the data in binary form & return it
+ bin <- readBin(filepath, "raw", n = file.info(filepath)$size)
+ return(bin)
+}
diff --git a/apps/api/R/runs.R b/apps/api/R/runs.R
index 090a01b948c..65a25546697 100644
--- a/apps/api/R/runs.R
+++ b/apps/api/R/runs.R
@@ -305,26 +305,39 @@ getRunInputs <- function(indir){
#' @return Output details of the run
#' @author Tezan Sahu
-getRunOutputs <- function(outdir){
+getRunOutputs <- function(outdir) {
outputs <- list()
- if(file.exists(paste0(outdir, "/logfile.txt"))){
+ if (file.exists(file.path(outdir, "logfile.txt"))) {
outputs$logfile <- "logfile.txt"
}
-
- if(file.exists(paste0(outdir, "/README.txt"))){
+
+ if (file.exists(file.path(outdir, "README.txt"))) {
outputs$info <- "README.txt"
}
-
- year_files <- list.files(outdir, pattern="*.nc$")
+
+ varfile_path <- file.path(outdir, "nc_vars.txt")
+ if (!file.exists(varfile_path)) {
+ PEcAn.utils::nc_write_varfiles(outdir, write_mode = "collected")
+ }
+ whole_run_varlines <- readLines(varfile_path)
+
+ year_files <- list.files(outdir, pattern = "*.nc$")
years <- stringr::str_replace_all(year_files, ".nc", "")
years_data <- c()
outputs$years <- list()
- for(year in years){
- var_lines <- readLines(paste0(outdir, "/", year, ".nc.var"))
+ for (year in years) {
+ # Get variable list from a directly paired yyyy.nc.var if it exists,
+ # else use the vars defined in nc_vars.txt
+ paired_varfile <- file.path(outdir, paste0(year, ".nc.var"))
+ if (file.exists(paired_varfile)) {
+ var_lines <- readLines(paired_varfile)
+ } else {
+ var_lines <- whole_run_varlines
+ }
keys <- stringr::word(var_lines, 1)
values <- stringr::word(var_lines, 2, -1)
vars <- list()
- for(i in 1:length(keys)){
+ for (i in seq_along(keys)) {
vars[keys[i]] <- values[i]
}
years_data <- c(years_data, list(list(
@@ -332,7 +345,7 @@ getRunOutputs <- function(outdir){
variables = vars
)))
}
- for(i in 1:length(years)){
+ for (i in seq_along(years)) {
outputs$years[years[i]] <- years_data[i]
}
return(outputs)
diff --git a/apps/api/R/workflows.R b/apps/api/R/workflows.R
index 302ff56fa6a..3485e44859a 100644
--- a/apps/api/R/workflows.R
+++ b/apps/api/R/workflows.R
@@ -244,3 +244,75 @@ getWorkflowFile <- function(req, id, filename, res){
return(bin)
}
}
+
+
+#################################################################################################
+#' Get the list of files in a workflow specified by the id
+#' @param id Workflow id (character)
+#' @return List of files
+#' @author Nihar Sanda
+#* @get //files
+
+getWorkflowFileDetails <- function(req, id, res) {
+ Workflow <- tbl(global_db_pool, "workflows") %>%
+ select(id, user_id, folder) %>%
+ filter(id == !!id)
+
+ qry_res <- Workflow %>% collect()
+
+ if (nrow(qry_res) == 0) {
+ res$status <- 404
+ return(list(error = "Workflow with specified ID was not found"))
+ }
+
+ list(workflow_id = id, file_names = list.files(qry_res$folder))
+}
+
+#################################################################################################
+#' Get the zip of specified files of the workflow specified by the id
+#' @param id Workflow id (character)
+#' @return Details of requested workflow
+#' @author Nihar Sanda
+#* @serializer contentType list(type="application/octet-stream")
+#* @post //file-multiple/
+
+getWorkflowFilesAsZip <- function(req, id, filenames, res){
+ if(req$HTTP_CONTENT_TYPE == "application/json") {
+ filenames_req <- req$postBody
+ }
+
+ filenamesList <- jsonlite::fromJSON(filenames_req)
+ filenames <- filenamesList$files
+
+ Workflow <- tbl(global_db_pool, "workflows") %>%
+ select(id, user_id, folder) %>%
+ filter(id == !!id)
+
+ qry_res <- Workflow %>% collect()
+
+ if (nrow(qry_res) == 0) {
+ res$status <- 404
+ return()
+ } else {
+ full_files <- vector(mode = "character", length = length(filenames))
+ for (i in seq_along(filenames)) {
+ # Check if the requested file exists on the host
+ filepath <- file.path(qry_res$folder, filenames[[i]])
+ if (! file.exists(filepath)) {
+ res$status <- 404
+ return()
+ }
+
+ if (Sys.getenv("AUTH_REQ") == TRUE) {
+ if (qry_res$user_id != req$user$userid) {
+ res$status <- 403
+ return()
+ }
+ }
+
+ full_files[i] <- filepath
+ }
+ zip_file <- zip::zipr("output.zip", full_files)
+ return(zip_file)
+ }
+}
diff --git a/apps/api/pecanapi-spec.yml b/apps/api/pecanapi-spec.yml
index f06d3dfde0b..1d0d76803ca 100644
--- a/apps/api/pecanapi-spec.yml
+++ b/apps/api/pecanapi-spec.yml
@@ -41,6 +41,8 @@ tags:
description: Everything about PEcAn PFTs (Plant Functional Types)
- name: inputs
description: Everything about PEcAn inputs
+ - name: posteriors
+ description: Everything about PEcAn posteriors
#####################################################################################################################
##################################################### API Endpoints #################################################
@@ -779,6 +781,40 @@ paths:
description: Authentication required
'403':
description: Access forbidden
+
+ /api/workflows/{id}/file-multiple/:
+ post:
+ tags:
+ - workflows
+ summary: Download multiple files
+ parameters:
+ - in: path
+ name: id
+ description: ID of the PEcAn Workflow
+ required: true
+ schema:
+ type: string
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/WorkflowFiles_POST'
+
+
+ responses:
+ '200':
+ description: Download the zip file consisting of the desired files
+ content:
+ application/octet-stream:
+ schema:
+ type: string
+ format: binary
+ '401':
+ description: Authentication required
+ '415':
+ description: Unsupported request content type
+
/api/runs/:
get:
@@ -992,6 +1028,123 @@ paths:
description: Access forbidden
'404':
description: Run data not found
+
+ /api/posteriors/:
+ get:
+ tags:
+ - posteriors
+ summary: Search for the posteriors
+ parameters:
+ - in: query
+ name: pft_id
+ description: If provided, returns all posteriors for the provided pft_id
+ required: false
+ schema:
+ type: string
+ - in: query
+ name: host_id
+ description: If provided, returns all posteriors for the provided host_id
+ required: false
+ schema:
+ type: string
+ - in: query
+ name: offset
+ description: The number of posteriors to skip before starting to collect the result set.
+ schema:
+ type: integer
+ minimum: 0
+ default: 0
+ required: false
+ - in: query
+ name: limit
+ description: The number of posteriors to return.
+ schema:
+ type: integer
+ default: 50
+ enum:
+ - 10
+ - 20
+ - 50
+ - 100
+ - 500
+ required: false
+ responses:
+ '200':
+ description: List of posteriors
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ inputs:
+ type: array
+ items:
+ type: object
+ properties:
+ id:
+ type: string
+ file_name:
+ type: string
+ file_path:
+ type: string
+ pft_name:
+ type: string
+ tag:
+ type: string
+ hostname:
+ type: string
+ start_date:
+ type: string
+ end_date:
+ type: string
+ count:
+ type: integer
+ next_page:
+ type: string
+ prev_page:
+ type: string
+
+ '401':
+ description: Authentication required
+ '403':
+ description: Access forbidden
+ '404':
+ description: Posteriors not found
+
+ /api/posteriors/{posterior_id}:
+ get:
+ tags:
+ - posteriors
+ summary: Download a desired PEcAn posterior file
+ parameters:
+ - in: path
+ name: posterior_id
+ description: ID of the PEcAn Posterior to be downloaded
+ required: true
+ schema:
+ type: string
+ - in: query
+ name: filename
+ description: Optional filename specified if the id points to a folder instead of file
+ required: false
+ schema:
+ type: string
+ responses:
+ '200':
+ description: Contents of the desired posterior file
+ content:
+ application/octet-stream:
+ schema:
+ type: string
+ format: binary
+ '400':
+ description: Bad request. Posterior ID points to directory & filename is not specified
+ '401':
+ description: Authentication required
+ '403':
+ description: Access forbidden
+
+
#####################################################################################################################
###################################################### Components ###################################################
#####################################################################################################################
@@ -1275,6 +1428,20 @@ components:
dbfiles:
type: string
example: pecan/dbfiles
+
+ WorkflowFiles_POST:
+ type: object
+
+ properties:
+ files:
+ type: array
+ items:
+ type: string
+ example: [
+ "pecan.xml",
+ "workflow.R"
+ ]
+
securitySchemes:
basicAuth:
type: http
diff --git a/base/all/DESCRIPTION b/base/all/DESCRIPTION
index f7693ed3a01..ed4fd394579 100644
--- a/base/all/DESCRIPTION
+++ b/base/all/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.all
Type: Package
Title: PEcAn Functions Used for Ecological Forecasts and Reanalysis
-Version: 1.9.0
+Version: 1.10.0
Authors@R: c(person("Mike", "Dietze", role = c("aut"),
email = "dietze@bu.edu"),
person("David", "LeBauer", role = c("aut", "cre"),
@@ -43,6 +43,8 @@ Description: The Predictive Ecosystem Carbon Analyzer
PEcAn is to streamline the interaction between data and
models, and to improve the efficacy of scientific
investigation.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Depends:
PEcAn.DB,
PEcAn.settings,
@@ -78,3 +80,4 @@ LazyData: true
Encoding: UTF-8
Roxygen: list(markdown = TRUE)
RoxygenNote: 7.3.2
+X-schema.org-keywords: PEcAn, model-validation, model-evaluation, model-comparison, reproducibility
diff --git a/base/all/NEWS.md b/base/all/NEWS.md
index f4ea29e41dd..23ca78a828c 100644
--- a/base/all/NEWS.md
+++ b/base/all/NEWS.md
@@ -1,3 +1,8 @@
+# PEcAn.all 1.10.10
+
+* Added keywords and bug reporting URL to DESCRIPTION.
+* Updated `pecan_version_history` and `pecan_releases` to report the package versions used in PEcAn v1.10.0.
+
# PEcAn.all 1.9.0
## License change
diff --git a/base/all/data/pecan_releases.csv b/base/all/data/pecan_releases.csv
index 89e705ff0dc..8d0ec33aa94 100644
--- a/base/all/data/pecan_releases.csv
+++ b/base/all/data/pecan_releases.csv
@@ -33,3 +33,4 @@
"v1.7.2",2021-10-08,1.7.2
"v1.8.0",2024-07-12,1.8.0
"v1.9.0",2025-05-25,1.9.0
+"v1.10.0",2026-01-02,1.10.0
diff --git a/base/all/data/pecan_version_history.csv b/base/all/data/pecan_version_history.csv
index 1cd9a3cdd79..b228f0e83ce 100644
--- a/base/all/data/pecan_version_history.csv
+++ b/base/all/data/pecan_version_history.csv
@@ -1,54 +1,54 @@
-"package","v1.0","v1.0.1","v1.1","v1.2.5","v1.2.6","v1.3","v1.3.2","v1.3.4","v1.3.5","v1.3.6","v1.3.6.1","1.3.7","v1.4.0","v1.4.1","v1.4.2","v.1.4.3","v1.4.4","v1.4.5","1.4.6","1.4.7","v1.4.8","v1.4.9","v1.4.10","v1.4.10.1","v1.5.0","v1.5.1","v1.5.2","v1.5.3","v1.6.0","v1.7.0","v1.7.1","v1.7.2","v1.8.0","v1.9.0"
-"PEcAn",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA
-"PECAn",1.0,1.0,1.1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA
-"PEcAn.all",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0
-"PEcAn.allometry",NA,NA,NA,NA,NA,NA,NA,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.assim.batch",NA,NA,NA,NA,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0
-"PEcAn.assim.sequential",NA,NA,NA,NA,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,NA,NA
-"PEcAn.BASGRA",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.7.2,1.8.0,1.8.1
-"PEcAn.benchmark",NA,NA,NA,NA,NA,NA,NA,NA,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.BIOCRO",NA,NA,NA,NA,NA,1.2,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.CABLE",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.CLM45",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.common",NA,NA,NA,1.2.5,1.2.6,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA
-"PEcAn.dalec",NA,NA,NA,NA,NA,NA,NA,1.3.1,1.3.1,1.3.1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA
-"PEcAn.DALEC",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.3.7,1.3.1,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.data.atmosphere",NA,NA,NA,NA,NA,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0
-"PEcAn.data.hydrology",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,NA,NA
-"PEcAn.data.land",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1
-"PEcAn.data.mining",NA,NA,NA,NA,NA,NA,NA,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.data.remote",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0
-"PEcAn.DB",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1
-"PEcAn.dvmdostem",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.ED",NA,NA,NA,1.2.5,1.2.6,1.2.6,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA
-"PEcAn.ED2",NA,NA,NA,NA,NA,NA,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1
-"PEcAn.emulator",NA,NA,NA,NA,NA,1.0,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1
-"PEcAn.FATES",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.8.0
-"PEcAn.GDAY",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.IBIS",NA,NA,NA,1.2.5,1.2.6,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA
-"PEcAn.JULES",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.LDNDC",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.0.0,1.0.1
-"PEcAn.LINKAGES",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.logger",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.8.0,1.8.2,1.8.3
-"PEcAn.LPJGUESS",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.8.0
-"PEcAn.MA",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.MAAT",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.MAESPA",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.ModelName",NA,NA,NA,NA,NA,1.2,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1
-"PEcAn.photosynthesis",NA,NA,NA,NA,NA,1.2.6,1.3.1,1.3.3,1.3.3,1.3.6,1.3.7,1.3.6,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.PRELES",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.priors",NA,NA,NA,NA,NA,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.qaqc",NA,NA,NA,NA,NA,1.0,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
-"PEcAn.remote",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0
-"PEcAn.rtm",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA
-"PEcAn.settings",NA,NA,NA,NA,NA,NA,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0
-"PEcAn.SIBCASA",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0.0.1,0.0.2
-"PEcAn.SIPNET",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0
-"PEcAn.STICS",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.7.2,1.8.0,1.8.1
-"PEcAn.uncertainty",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1
-"PEcAn.utils",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1
-"PEcAn.visualization",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1
-"PEcAn.workflow",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0
-"pecanapi",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.7.0,1.7.1,NA,NA,NA
-"PEcAnAssimSequential",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.8.0,1.9.0
-"PEcAnRTM",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4
+"package","v1.0","v1.0.1","v1.1","v1.2.5","v1.2.6","v1.3","v1.3.2","v1.3.4","v1.3.5","v1.3.6","v1.3.6.1","1.3.7","v1.4.0","v1.4.1","v1.4.2","v.1.4.3","v1.4.4","v1.4.5","1.4.6","1.4.7","v1.4.8","v1.4.9","v1.4.10","v1.4.10.1","v1.5.0","v1.5.1","v1.5.2","v1.5.3","v1.6.0","v1.7.0","v1.7.1","v1.7.2","v1.8.0","v1.9.0","v1.10.0"
+"PEcAn",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA
+"PECAn",1.0,1.0,1.1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA
+"PEcAn.all",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0,1.10.0
+"PEcAn.allometry",NA,NA,NA,NA,NA,NA,NA,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.assim.batch",NA,NA,NA,NA,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0,1.9.1
+"PEcAn.assim.sequential",NA,NA,NA,NA,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,NA,NA,NA
+"PEcAn.BASGRA",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.7.2,1.8.0,1.8.1,1.8.2
+"PEcAn.benchmark",NA,NA,NA,NA,NA,NA,NA,NA,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.BIOCRO",NA,NA,NA,NA,NA,1.2,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.CABLE",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.CLM45",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.common",NA,NA,NA,1.2.5,1.2.6,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA
+"PEcAn.dalec",NA,NA,NA,NA,NA,NA,NA,1.3.1,1.3.1,1.3.1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA
+"PEcAn.DALEC",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.3.7,1.3.1,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.data.atmosphere",NA,NA,NA,NA,NA,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0,1.9.1
+"PEcAn.data.hydrology",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,NA,NA,NA
+"PEcAn.data.land",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1,1.9.0
+"PEcAn.data.mining",NA,NA,NA,NA,NA,NA,NA,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.data.remote",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0,1.9.1
+"PEcAn.DB",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1,1.8.2
+"PEcAn.dvmdostem",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.ED",NA,NA,NA,1.2.5,1.2.6,1.2.6,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA
+"PEcAn.ED2",NA,NA,NA,NA,NA,NA,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1,1.8.2
+"PEcAn.emulator",NA,NA,NA,NA,NA,1.0,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1,1.8.2
+"PEcAn.FATES",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.8.0,1.8.1
+"PEcAn.GDAY",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.IBIS",NA,NA,NA,1.2.5,1.2.6,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA
+"PEcAn.JULES",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.LDNDC",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.0.0,1.0.1,1.0.2
+"PEcAn.LINKAGES",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.logger",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.8.0,1.8.2,1.8.3,1.8.4
+"PEcAn.LPJGUESS",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.8.0,1.9.0
+"PEcAn.MA",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.MAAT",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.MAESPA",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.ModelName",NA,NA,NA,NA,NA,1.2,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1,0.0.1
+"PEcAn.photosynthesis",NA,NA,NA,NA,NA,1.2.6,1.3.1,1.3.3,1.3.3,1.3.6,1.3.7,1.3.6,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.PRELES",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.priors",NA,NA,NA,NA,NA,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.qaqc",NA,NA,NA,NA,NA,1.0,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.7.4,1.7.5
+"PEcAn.remote",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0,1.9.1
+"PEcAn.rtm",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA
+"PEcAn.settings",NA,NA,NA,NA,NA,NA,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0,1.9.1
+"PEcAn.SIBCASA",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,0.0.1,0.0.2,0.0.3
+"PEcAn.SIPNET",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0,1.10.0
+"PEcAn.STICS",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.7.2,1.8.0,1.8.1,1.8.2
+"PEcAn.uncertainty",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1,1.9.0
+"PEcAn.utils",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1,1.8.2
+"PEcAn.visualization",NA,NA,NA,1.2.5,1.2.6,1.2.6,1.3.1,1.3.3,1.3.3,1.3.3,1.3.7,1.3.3,1.4.0,1.4.1,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.8.1,1.8.2
+"PEcAn.workflow",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.8.0,1.9.0,1.10.0
+"pecanapi",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.7.0,1.7.1,NA,NA,NA,NA
+"PEcAnAssimSequential",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.8.0,1.9.0,1.10.0
+"PEcAnRTM",NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,1.4.2,1.4.3,1.4.4,1.4.5,1.4.6,1.4.7,1.4.8,1.4.9,1.4.10,1.4.10.1,1.5.0,1.5.1,1.5.2,1.5.3,1.6.0,1.7.0,1.7.1,1.7.2,1.7.3,1.9.0,1.9.1
diff --git a/base/db/DESCRIPTION b/base/db/DESCRIPTION
index d310c3fc2fd..71cb7dfb1a9 100644
--- a/base/db/DESCRIPTION
+++ b/base/db/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.DB
Type: Package
Title: PEcAn Functions Used for Ecological Forecasts and Reanalysis
-Version: 1.8.1
+Version: 1.8.2
Authors@R: c(person("David", "LeBauer", role = c("aut", "cre"),
email = "dlebauer@email.arizona.edu"),
person("Mike", "Dietze", role = c("aut"),
@@ -39,6 +39,8 @@ Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
model parameterization, execution, and analysis. The goal of PECAn is to
streamline the interaction between data and models, and to improve the
efficacy of scientific investigation.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
DBI,
dbplyr (>= 2.4.0),
@@ -79,3 +81,4 @@ LazyLoad: yes
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: PEcAn, database
diff --git a/base/db/NAMESPACE b/base/db/NAMESPACE
index 938b1cfe2d4..22594703c20 100644
--- a/base/db/NAMESPACE
+++ b/base/db/NAMESPACE
@@ -66,5 +66,6 @@ export(workflows)
importFrom(magrittr,"%>%")
importFrom(rlang,"!!!")
importFrom(rlang,"!!")
+importFrom(rlang,"%||%")
importFrom(rlang,":=")
importFrom(rlang,.data)
diff --git a/base/db/NEWS.md b/base/db/NEWS.md
index 00e29432756..84d9c556aa8 100644
--- a/base/db/NEWS.md
+++ b/base/db/NEWS.md
@@ -1,3 +1,10 @@
+# PEcAn.DB 1.8.2
+
+* Refactored `convert.input()` internals into smaller, and hopefully more testable, chunks. No user-visible changes expected.
+* Roxygen cleanup.
+
+
+
# PEcAn.DB 1.8.1
## License change
diff --git a/base/db/R/check_missing_files.R b/base/db/R/check_missing_files.R
new file mode 100644
index 00000000000..4471ee90404
--- /dev/null
+++ b/base/db/R/check_missing_files.R
@@ -0,0 +1,56 @@
+#' Check for Missing or Empty Files in Conversion Results
+#'
+#' This function inspects the file paths in a list of data frames (typically produced by a download or conversion routine) to ensure that each file is present and non-empty. Specifically, it checks whether any file path is missing or has a file size of zero, and logs an error if such files are detected. It also normalizes `existing.input` and `existing.dbfile` so that each is returned as a list of data frames.
+#'
+#' @param result A list of data frames containing file information. Each data frame is expected to have a column named `file` with absolute file paths created by a data-conversion or download function. For example, this might be the structure returned by a "download_X" or "met2model_X" function when invoked via [convert_input()].
+#' @param existing.input A data frame or list of data frames (possibly zero rows) representing input records in the BETY `inputs` table that match (or partially match) the data being added. This is converted to a list of data frames if it is not already.
+#' @param existing.dbfile A data frame or list of data frames (possibly zero rows) representing dbfile records in the BETY `dbfiles` table that match (or partially match) the data being added. This is also converted to a list of data frames if it is not already.
+#'
+#' @return A list containing:
+#' \itemize{
+#' \item A list of data frames for `existing.input`
+#' \item A list of data frames for `existing.dbfile`
+#' }
+#'
+#' @details
+#' The function calculates the file size for each file specified in the `result` data frames. If any file path is missing (`NA`) or any file size is zero, the function raises a fatal error (via [PEcAn.logger::logger.severe]) indicating that an expected file is either nonexistent or empty. If no such issues are found, it merely ensures that `existing.input` and `existing.dbfile` are each wrapped in a list for consistent downstream usage.
+#'
+#' @author Betsy Cowdery, Michael Dietze, Ankur Desai, Tony Gardella, Luke Dramko
+
+check_missing_files <- function(result, existing.input = NULL, existing.dbfile = NULL) {
+ result_sizes <- purrr::map_dfr(
+ result,
+ ~ dplyr::mutate(
+ .,
+ file_size = purrr::map_dbl(file, file.size),
+ missing = is.na(file_size),
+ empty = file_size == 0
+ )
+ )
+
+ if (any(result_sizes$missing) || any(result_sizes$empty)) {
+ PEcAn.logger::logger.severe(
+ "Requested Processing produced empty files or Nonexistent files:\n",
+ log_format_df(result_sizes[, c(1, 8, 9, 10)]),
+ "\n Table of results printed above.",
+ wrap = FALSE
+ )
+ }
+
+
+ # Wrap in a list for consistent processing later
+ if (is.data.frame(existing.input)) {
+ existing.input <- list(existing.input)
+ }
+
+ if (is.data.frame(existing.dbfile)) {
+ existing.dbfile <- list(existing.dbfile)
+ }
+ return(list(existing.input, existing.dbfile))
+}
+
+log_format_df <- function(df) {
+ formatted_df <- rbind(colnames(df), format(df))
+ formatted_text <- purrr::reduce(formatted_df, paste, sep = " ")
+ paste(formatted_text, collapse = "\n")
+}
\ No newline at end of file
diff --git a/base/db/R/convert_input.R b/base/db/R/convert_input.R
index 1ff74a13014..f819db2ca93 100644
--- a/base/db/R/convert_input.R
+++ b/base/db/R/convert_input.R
@@ -174,17 +174,15 @@ convert_input <-
# Date/time processing for existing input
existing.input[[i]]$start_date <- lubridate::force_tz(lubridate::as_datetime(existing.input[[i]]$start_date), "UTC")
existing.input[[i]]$end_date <- lubridate::force_tz(lubridate::as_datetime(existing.input[[i]]$end_date), "UTC")
-
+
## Obtain machine information
+ machine.host.info <- get_machine_host(host, con = con)
+ machine.host <- machine.host.info$machine.host
+ machine <- machine.host.info$machine
#Grab machine info of file that exists
existing.machine <- db.query(paste0("SELECT * from machines where id = '",
existing.dbfile[[i]]$machine_id, "'"), con)
- #Grab machine info of host machine
- machine.host <- ifelse(host$name == "localhost", PEcAn.remote::fqdn(), host$name)
- machine <- db.query(paste0("SELECT * from machines where hostname = '",
- machine.host, "'"), con)
-
# If the files aren't on the machine, we have to download them, so "overwrite" is meaningless.
if (existing.machine$id == machine$id) {
@@ -343,33 +341,16 @@ convert_input <-
add = TRUE
) # Close on.exit
}
-
-
-
- #Grab machine info of file that exists
- existing.machine <- db.query(paste0("SELECT * from machines where id = '",
- existing.dbfile$machine_id, "'"), con)
-
- #Grab machine info of host machine
- machine.host <- ifelse(host$name == "localhost", PEcAn.remote::fqdn(), host$name)
- machine <- db.query(paste0("SELECT * from machines where hostname = '",
- machine.host, "'"), con)
-
- if (existing.machine$id != machine$id) {
-
- PEcAn.logger::logger.info("Valid Input record found that spans desired dates, but valid files do not exist on this machine.")
- PEcAn.logger::logger.info("Downloading all years of Valid input to ensure consistency")
- insert.new.file <- TRUE
- start_date <- existing.input$start_date
- end_date <- existing.input$end_date
-
- } else {
- # There's an existing input that spans desired start/end dates with files on this machine
- PEcAn.logger::logger.info("Skipping this input conversion because files are already available.")
- return(list(input.id = existing.input$id, dbfile.id = existing.dbfile$id))
+
+ existing_files_result <- check_and_handle_existing_files(existing.dbfile, host, con, existing.input, start_date, end_date)
+ if (!is.null(existing_files_result$input.id)) {
+ return(existing_files_result)
+ } else {
+ insert.new.file <- existing_files_result$insert.new.file
+ start_date <- existing_files_result$start_date
+ end_date <- existing_files_result$end_date
}
-
-
+
} else {
# No existing record found. Should be good to go with regular conversion.
}
@@ -382,7 +363,7 @@ convert_input <-
if (!is.null(ensemble) && ensemble) {
return.all <-TRUE
- }else{
+ } else {
return.all <- FALSE
}
existing.dbfile <- dbfile.input.check(siteid = site.id,
@@ -469,25 +450,13 @@ convert_input <-
} else if ((start_date >= existing.input$start_date) &&
(end_date <= existing.input$end_date)) {
- #Grab machine info of file that exists
- existing.machine <- db.query(paste0("SELECT * from machines where id = '",
- existing.dbfile$machine_id, "'"), con)
-
- #Grab machine info of
- machine.host <- ifelse(host$name == "localhost", PEcAn.remote::fqdn(), host$name)
- machine <- db.query(paste0("SELECT * from machines where hostname = '",
- machine.host, "'"), con)
-
- if(existing.machine$id != machine$id){
- PEcAn.logger::logger.info("Valid Input record found that spans desired dates, but valid files do not exist on this machine.")
- PEcAn.logger::logger.info("Downloading all years of Valid input to ensure consistency")
- insert.new.file <- TRUE
- start_date <- existing.input$start_date
- end_date <- existing.input$end_date
+ existing_files_result <- check_and_handle_existing_files(existing.dbfile, host, con, existing.input, start_date, end_date)
+ if (!is.null(existing_files_result$input.id)) {
+ return(existing_files_result)
} else {
- # There's an existing input that spans desired start/end dates with files on this machine
- PEcAn.logger::logger.info("Skipping this input conversion because files are already available.")
- return(list(input.id = existing.input$id, dbfile.id = existing.dbfile$id))
+ insert.new.file <- existing_files_result$insert.new.file
+ start_date <- existing_files_result$start_date
+ end_date <- existing_files_result$end_date
}
} else {
@@ -512,66 +481,21 @@ convert_input <-
# we'll need to update its start/end dates .
}
} else {
- # No existing record found. Should be good to go.
+ PEcAn.logger::logger.debug("No existing record found. Should be good to go.")
}
}
#---------------------------------------------------------------------------------------------------------------#
# Get machine information
+ machine.info <- get_machine_info(host, input.args = input.args, input.id = input.id)
- machine.host <- ifelse(host$name == "localhost", PEcAn.remote::fqdn(), host$name)
- machine <- db.query(paste0("SELECT * from machines where hostname = '",
- machine.host, "'"), con)
-
- if (nrow(machine) == 0) {
- PEcAn.logger::logger.error("machine not found", host$name)
- return(NULL)
- }
-
- if (missing(input.id) || is.na(input.id) || is.null(input.id)) {
- input <- dbfile <- NULL
- } else {
- input <- db.query(paste("SELECT * from inputs where id =", input.id), con)
- if (nrow(input) == 0) {
- PEcAn.logger::logger.error("input not found", input.id)
- return(NULL)
- }
-
- if(!is.null(input.args$dbfile.id)){
- dbfile <-
- db.query(
- paste(
- "SELECT * from dbfiles where id=",input.args$dbfile.id," and container_id =",
- input.id,
- " and container_type = 'Input' and machine_id =",
- machine$id
- ),
- con
- )
- }else{
- dbfile <-
- db.query(
- paste(
- "SELECT * from dbfiles where container_id =",
- input.id,
- " and container_type = 'Input' and machine_id =",
- machine$id
- ),
- con
- )
- }
-
-
-
- if (nrow(dbfile) == 0) {
- PEcAn.logger::logger.error("dbfile not found", input.id)
- return(NULL)
- }
- if (nrow(dbfile) > 1) {
- PEcAn.logger::logger.warn("multiple dbfile records, using last", dbfile)
- dbfile <- dbfile[nrow(dbfile), ]
- }
+ if (any(sapply(machine.info, is.null))) {
+ PEcAn.logger::logger.error("failed lookup of inputs or dbfiles")
+ return(NULL)
}
+ machine <- machine.info$machine
+ input <- machine.info$input
+ dbfile <- machine.info$dbfile
#--------------------------------------------------------------------------------------------------#
# Perform Conversion
@@ -634,143 +558,28 @@ convert_input <-
#--------------------------------------------------------------------------------------------------#
# Check if result has empty or missing files
- result_sizes <- purrr::map_dfr(
- result,
- ~ dplyr::mutate(
- .,
- file_size = purrr::map_dbl(file, file.size),
- missing = is.na(file_size),
- empty = file_size == 0
- )
- )
-
- if (any(result_sizes$missing) || any(result_sizes$empty)){
- log_format_df = function(df){
- rbind(colnames(df), format(df))
- purrr::reduce( paste, sep=" ") %>%
- paste(collapse="\n")
- }
-
- PEcAn.logger::logger.severe(
- "Requested Processing produced empty files or Nonexistant files :\n",
- log_format_df(result_sizes[,c(1,8,9,10)]),
- "\n Table of results printed above.",
- wrap = FALSE)
- }
-
- # Insert into Database
- outlist <- unlist(strsplit(outname, "_"))
-
- # Wrap in a list for consistant processing later
- if (exists("existing.input") && is.data.frame(existing.input)) {
- existing.input <- list(existing.input)
- }
-
- if (exists("existing.dbfile") && is.data.frame(existing.dbfile)) {
- existing.dbfile <- list(existing.dbfile)
- }
+ checked.missing.files <- check_missing_files(result, existing.input, existing.dbfile)
+
+ # Unwrap parameters after performing checks for missing files
+ existing.input <- checked.missing.files$existing.input
+ existing.dbfile <- checked.missing.files$existing.dbfile
#---------------------------------------------------------------#
# New arrangement of database adding code to deal with ensembles.
if (write) {
-
- # Setup newinput. This list will contain two variables: a vector of input IDs and a vector of DB IDs for each entry in result.
- # This list will be returned.
- newinput = list(input.id = NULL, dbfile.id = NULL) #Blank vectors are null.
- for(i in 1:length(result)) { # Master for loop
- id_not_added <- TRUE
-
- if (exists("existing.input") && nrow(existing.input[[i]]) > 0 &&
- (existing.input[[i]]$start_date != start_date || existing.input[[i]]$end_date != end_date)) {
-
- # Updating record with new dates
- db.query(paste0("UPDATE inputs SET start_date='", start_date, "', end_date='",
- end_date, "' WHERE id=", existing.input[[i]]$id),
- con)
- id_not_added = FALSE
-
- # The overall structure of this loop has been set up so that exactly one input.id and one dbfile.id will be written to newinput every interation.
- newinput$input.id = c(newinput$input.id, existing.input[[i]]$id)
- newinput$dbfile.id = c(newinput$dbfile.id, existing.dbfile[[i]]$id)
- }
-
- if (overwrite) {
- # A bit hacky, but need to make sure that all fields are updated to expected
- # values (i.e., what they'd be if convert_input was creating a new record)
- if (exists("existing.input") && nrow(existing.input[[i]]) > 0) {
- db.query(paste0("UPDATE inputs SET name='", basename(dirname(result[[i]]$file[1])),
- "' WHERE id=", existing.input[[i]]$id), con)
-
- }
-
- if (exists("existing.dbfile") && nrow(existing.dbfile[[i]]) > 0) {
- db.query(paste0("UPDATE dbfiles SET file_path='", dirname(result[[i]]$file[1]),
- "', ", "file_name='", result[[i]]$dbfile.name[1],
- "' WHERE id=", existing.dbfile[[i]]$id), con)
-
- }
- }
-
- # If there is no ensemble then for each record there should be one parent
- #But when you have ensembles, all of the members have one parent !!
- if (is.numeric(ensemble)){
- parent.id <- ifelse(is.null(input[i]), NA, input[1]$id)
- }else{
- parent.id <- ifelse(is.null(input[i]), NA, input[i]$id)
- }
-
-
-
- if ("newsite" %in% names(input.args) && !is.null(input.args[["newsite"]])) {
- site.id <- input.args$newsite
- }
-
- if (insert.new.file && id_not_added) {
- dbfile.id <- dbfile.insert(in.path = dirname(result[[i]]$file[1]),
- in.prefix = result[[i]]$dbfile.name[1],
- 'Input', existing.input[[i]]$id,
- con, reuse=TRUE, hostname = machine$hostname)
- newinput$input.id <- c(newinput$input.id, existing.input[[i]]$id)
- newinput$dbfile.id <- c(newinput$dbfile.id, dbfile.id)
- } else if (id_not_added) {
-
- # This is to tell input.insert if we are wrting ensembles
- # Why does it need it ? bc it checks for inputs with the same time period, site and machine
- # and if it returns somethings it does not insert anymore, but for ensembles it needs to bypass this condition
- if (!is.null(ensemble) | is.null(ensemble_name)){
- ens.flag <- TRUE
- }else{
- ens.flag <- FALSE
- }
-
- new_entry <- dbfile.input.insert(in.path = dirname(result[[i]]$file[1]),
- in.prefix = result[[i]]$dbfile.name[1],
- siteid = site.id,
- startdate = start_date,
- enddate = end_date,
- mimetype,
- formatname,
- parentid = parent.id,
- con = con,
- hostname = machine$hostname,
- allow.conflicting.dates = allow.conflicting.dates,
- ens=ens.flag
- )
-
-
- newinput$input.id <- c(newinput$input.id, new_entry$input.id)
- newinput$dbfile.id <- c(newinput$dbfile.id, new_entry$dbfile.id)
- }
-
- } #End for loop
-
- successful <- TRUE
- return(newinput)
- } else {
+ return(
+ update_ensemble_writes(result, con, start_date,
+ end_date, overwrite,
+ insert.new.file, input.args,
+ machine, mimetype, formatname,
+ allow.conflicting.dates, ensemble,
+ ensemble_name, existing.input,
+ existing.dbfile, input)
+ )
+ }
+ # if we got here, nothing left to do
PEcAn.logger::logger.warn("Input was not added to the database")
- successful <- TRUE
return(NULL)
- }
} # convert_input
diff --git a/base/db/R/fetch.stats2se.R b/base/db/R/fetch.stats2se.R
index d0bad8ae465..d856d369374 100644
--- a/base/db/R/fetch.stats2se.R
+++ b/base/db/R/fetch.stats2se.R
@@ -2,14 +2,13 @@
##'
##' Performs query and then uses \code{transformstats} to convert miscellaneous statistical summaries
##' to SE
-##' @name fetch.stats2se
-##' @title Fetch data and transform stats to SE
##' @param connection connection to trait database
##' @param query to send to databse
##' @return dataframe with trait data
-##' @seealso used in \code{\link{query.trait.data}}; \code{\link{transformstats}} performs transformation calculations
+##' @seealso used in \code{\link{query.trait.data}};
+##' \code{\link[PEcAn.utils]{transformstats}} performs transformation calculations
##' @author
-fetch.stats2se <- function(connection, query){
+fetch.stats2se <- function(connection, query) {
transformed <- PEcAn.utils::transformstats(db.query(query = query, con = connection))
return(transformed)
-}
\ No newline at end of file
+}
diff --git a/base/db/R/get.trait.data.R b/base/db/R/get.trait.data.R
index b307da08c0b..377761d931f 100644
--- a/base/db/R/get.trait.data.R
+++ b/base/db/R/get.trait.data.R
@@ -6,7 +6,12 @@
##' - `settings$database$bety`
##' - `settings$database$dbfiles`
##' - `settings$meta.analysis$update`
-##'
+##'
+##' If either `input_file` or `settings$pfts$file_path` is provided,
+##' it should be a valid path to a CSV (with at least columns
+##' `name`, `distn`, `parama`, `paramb`, `n`) and will be used instead of
+##' `database` for trait lookup.
+##'
##' @param pfts the list of pfts to get traits for
##' @param modeltype type of model that is used, this is is used to distinguish
##' between different PFTs with the same name.
@@ -21,30 +26,69 @@
##' @param trait.names Character vector of trait names to search. If
##' `NULL` (default), use all traits that have a prior for at least
##' one of the `pfts`.
+##' @param input_file Path to a CSV file containing prior information.
+##' If specified, `database` is not used.
##' @return list of PFTs with update posteriorids
##' @author David LeBauer, Shawn Serbin, Alexey Shiklomanov
+##' @importFrom rlang %||%
##' @export
-get.trait.data <-
- function(pfts,
- modeltype,
- dbfiles,
- database,
- forceupdate,
- write = FALSE,
- trait.names = NULL) {
-
+get.trait.data <- function(pfts,
+ modeltype,
+ dbfiles,
+ database,
+ forceupdate,
+ write = FALSE,
+ trait.names = NULL,
+ input_file = NULL) {
if (!is.list(pfts)) {
- PEcAn.logger::logger.severe('pfts must be a list')
+ PEcAn.logger::logger.severe("pfts must be a list")
}
# Check that all PFTs have associated outdir entries
- pft_outdirs <- lapply(pfts, '[[', 'outdir')
+ pft_outdirs <- lapply(pfts, "[[", "outdir")
if (any(sapply(pft_outdirs, is.null))) {
- PEcAn.logger::logger.severe('At least one pft in settings is missing its "outdir"')
+ PEcAn.logger::logger.severe("At least one pft in settings is missing its `outdir`")
+ }
+
+ #check for flatfile path, if present use it
+ file_path <- input_file %||% pfts$file_path
+ if (!is.null(file_path)) {
+ if (!file.exists(file_path)) {
+ PEcAn.logger::logger.error("trait data file not found at specified path", sQuote(file_path))
+ }
+ PEcAn.logger::logger.info("Using flat file for trait data instead of database")
+
+ # Load flat file as data.frame
+ trait_data_flat <- utils::read.csv(file_path, stringsAsFactors = FALSE)
+
+ # Build trait.names from flat file if not already provided
+ if (is.null(trait.names)) {
+ pft_names <- vapply(pfts, "[[", character(1), "name")
+ pft_ids <- unique(trait_data_flat$pft_id[
+ trait_data_flat$pft_name %in% pft_names &
+ trait_data_flat$pft_type == modeltype
+ ])
+ trait.names <- unique(trait_data_flat$trait_name[
+ trait_data_flat$pft_id %in% pft_ids
+ ])
+ }
+
+ # Call get.trait.data.pft with trait_data instead of dbcon
+ result <- lapply(pfts, get.trait.data.pft,
+ modeltype = modeltype,
+ dbfiles = dbfiles,
+ dbcon = NULL,
+ trait_data = trait_data_flat,
+ write = write,
+ forceupdate = forceupdate,
+ trait.names = trait.names)
+
+ return(invisible(result))
}
-
+
+
dbcon <- db.open(database)
on.exit(db.close(dbcon), add = TRUE)
-
+
if (is.null(trait.names)) {
PEcAn.logger::logger.debug(paste0(
"`trait.names` is NULL, so retrieving all traits ",
@@ -55,7 +99,7 @@ get.trait.data <-
# NOTE: Use `format` here to avoid implicit (incorrect) coercion
# to double by `lapply`. This works fine if we switch to
# `query_priors`, but haven't done so yet because that requires
- # prepared statements and therefore requires the Postgres driver.
+ # prepared statements and therefore requires the Postgres driver.
all_priors_list <- lapply(format(pft_ids, scientific = FALSE), query.priors,
con = dbcon, trstr = trait.names)
trait.names <- unique(unlist(lapply(all_priors_list, rownames)))
@@ -63,7 +107,7 @@ get.trait.data <-
# all_priors <- query_priors(pfts, params = database)
# trait.names <- unique(all_priors[["name"]])
}
-
+
# process all pfts
result <- lapply(pfts, get.trait.data.pft,
modeltype = modeltype,
@@ -72,6 +116,6 @@ get.trait.data <-
write = write,
forceupdate = forceupdate,
trait.names = trait.names)
-
+
invisible(result)
-}
\ No newline at end of file
+}
diff --git a/base/db/R/get_machine_info.R b/base/db/R/get_machine_info.R
new file mode 100644
index 00000000000..f3d22bcdbab
--- /dev/null
+++ b/base/db/R/get_machine_info.R
@@ -0,0 +1,104 @@
+#' Get machine information from db
+#' @param host host information
+#' @param input.args input args for existing records
+#' @param input.id input id for existing records
+#' @param con database connection
+#'
+#' @return list of machine, input, and dbfile records
+#' @author Betsy Cowdery, Michael Dietze, Ankur Desai, Tony Gardella, Luke Dramko
+
+get_machine_info <- function(host, input.args, input.id = NULL, con = NULL) {
+
+ machine.host.info <- get_machine_host(host, con = con)
+ machine.host <- machine.host.info$machine.host
+ machine <- machine.host.info$machine
+
+ if (nrow(machine) == 0) {
+ PEcAn.logger::logger.error("machine not found", host$name)
+ return(NULL)
+ }
+
+ if (is.na(input.id) || is.null(input.id)) {
+ input <- dbfile <- NULL
+ } else {
+ input <- db.query(paste("SELECT * from inputs where id =", input.id), con)
+ if (nrow(input) == 0) {
+ PEcAn.logger::logger.error("input not found", input.id)
+ return(NULL)
+ }
+
+ if (!is.null(input.args$dbfile.id)) {
+ dbfile <-
+ db.query(
+ paste(
+ "SELECT * from dbfiles where id=", input.args$dbfile.id, " and container_id =",
+ input.id,
+ " and container_type = 'Input' and machine_id =",
+ machine$id
+ ),
+ con
+ )
+ } else {
+ dbfile <-
+ db.query(
+ paste(
+ "SELECT * from dbfiles where container_id =",
+ input.id,
+ " and container_type = 'Input' and machine_id =",
+ machine$id
+ ),
+ con
+ )
+ }
+
+
+
+ if (nrow(dbfile) == 0) {
+ PEcAn.logger::logger.error("dbfile not found", input.id)
+ return(NULL)
+ }
+ if (nrow(dbfile) > 1) {
+ PEcAn.logger::logger.warn("multiple dbfile records, using last", dbfile)
+ dbfile <- dbfile[nrow(dbfile), ]
+ }
+ }
+
+ return(list(machine = machine, input = input, dbfile = dbfile))
+}
+
+#' Helper Function to retrieve machine host and machine informations
+#' @param host host information
+#' @param con database connection
+#' @return list of machine host and machine information
+#' @author Abhinav Pandey
+get_machine_host <- function(host, con) {
+ #Grab machine info of host machine
+ machine.host <- ifelse(host$name == "localhost", PEcAn.remote::fqdn(), host$name)
+ machine <- db.query(paste0(
+ "SELECT * from machines where hostname = '",
+ machine.host, "'"
+ ), con)
+
+ list(machine.host = machine.host, machine = machine)
+}
+
+check_and_handle_existing_files <- function(existing.dbfile, host, con, existing.input, start_date, end_date) {
+ # Grab machine info of file that exists
+ existing.machine <- db.query(paste0("SELECT * from machines where id = '",
+ existing.dbfile$machine_id, "'"), con)
+
+ # Grab machine info of host machine
+ machine.host.info <- get_machine_host(host, con = con)
+ machine.host <- machine.host.info$machine.host
+ machine <- machine.host.info$machine
+
+ if (existing.machine$id != machine$id) {
+ PEcAn.logger::logger.info("Valid Input record found that spans desired dates, but valid files do not exist on this machine.")
+ PEcAn.logger::logger.info("Downloading all years of Valid input to ensure consistency")
+ return(list(insert.new.file = TRUE, start_date = existing.input$start_date, end_date = existing.input$end_date))
+ } else {
+ # There's an existing input that spans desired start/end dates with files on this machine
+ PEcAn.logger::logger.info("Skipping this input conversion because files are already available.")
+ return(list(input.id = existing.input$id, dbfile.id = existing.dbfile$id))
+ }
+}
\ No newline at end of file
diff --git a/base/db/R/query.data.R b/base/db/R/query.data.R
index 73ca64a7b81..69742bf8f3c 100644
--- a/base/db/R/query.data.R
+++ b/base/db/R/query.data.R
@@ -1,7 +1,5 @@
##' Function to query data from database for specific species and convert stat to SE
##'
-##' @name query.data
-##' @title Query data and transform stats to SE by calling \code{\link{fetch.stats2se}};
##' @param trait trait to query from the database
##' @param spstr IDs of species to query from, as a single comma-separated string
##' @param con database connection
@@ -9,7 +7,8 @@
##' @param ids_are_cultivars if TRUE, ids is a vector of cultivar IDs, otherwise they are species IDs
##' @param ... extra arguments
##' @param store.unconverted determines whether or not a copy of the mean and stat fields are returned with _unconverted appended to the column names
-##' @seealso used in \code{\link{query.trait.data}}; \code{\link{fetch.stats2se}}; \code{\link{transformstats}} performs transformation calculations
+##' @seealso used in \code{\link{query.trait.data}}; \code{\link{fetch.stats2se}};
+##' \code{\link[PEcAn.utils]{transformstats}} performs transformation calculations
##' @author David LeBauer, Carl Davidson
query.data <- function(
trait,
diff --git a/base/db/R/query.yields.R b/base/db/R/query.yields.R
index dfaef5956dc..bc530601702 100644
--- a/base/db/R/query.yields.R
+++ b/base/db/R/query.yields.R
@@ -1,14 +1,13 @@
##' Function to query yields data from database for specific species and convert stat to SE
##'
-##' @name query.yields
-##' @title Query yield data and transform stats to SE by calling \code{\link{fetch.stats2se}};
##' @param trait yield trait to query
##' @param spstr species to query for yield data
##' @param extra.columns other query terms to pass in. Optional
##' @param con database connection
##' @param ids_are_cultivars if TRUE, spstr contains cultivar IDs, otherwise they are species IDs
##' @param ... extra arguments
-##' @seealso used in \code{\link{query.trait.data}}; \code{\link{fetch.stats2se}}; \code{\link{transformstats}} performs transformation calculations
+##' @seealso used in \code{\link{query.trait.data}}; \code{\link{fetch.stats2se}};
+##' \code{\link[PEcAn.utils]{transformstats}} performs transformation calculations
##' @author
query.yields <- function(trait = 'yield', spstr, extra.columns = '', con = NULL,
ids_are_cultivars = FALSE, ...){
diff --git a/base/db/R/update_ensemble_writes.R b/base/db/R/update_ensemble_writes.R
new file mode 100644
index 00000000000..8076ccb209c
--- /dev/null
+++ b/base/db/R/update_ensemble_writes.R
@@ -0,0 +1,135 @@
+#' Insert or Update Database Records for New or Modified Input Data
+#'
+#' @title Insert or Update Database Records for New or Modified Input Data
+#' @description This function is called internally by [convert_input()] to insert or update **input** and **dbfile** records in the PEcAn BETY database after one or more data-conversion or download functions have produced local or remote files. It is specifically intended for use with the output from data-conversion functions called by [convert_input()] (e.g. various "download_X" or "met2model_X" functions), but can be adapted if the return structure matches the requirements below.
+#'
+#' @param result list of data frames, each data frame corresponding to one piece or "chunk" of newly-created data. Typically, these data frames are produced by the function specified in `convert_input(..., fcn=...)`. Each data frame must contain at least: \describe{ \item{file}{Absolute file path(s) to the newly created file(s).} \item{dbfile.name}{The base filename(s) (without leading path) for each corresponding file.} } Additional columns are allowed but unused by this function.
+#' @param con database connection object (as returned by, e.g., \code{\link[DBI]{dbConnect}}).
+#' @param start_date Date or character. The start date of the data (in UTC). Acceptable types include Date objects (`as.Date`) or character strings that can be parsed to a Date via standard R conversions.
+#' @param end_date Date or character. The end date of the data (in UTC). Acceptable types include Date objects (`as.Date`) or character strings that can be parsed to a Date via standard R conversions.
+#' @param overwrite logical. If `TRUE`, any existing database records and files for the same input and date range should be overwritten with the new files. If `FALSE`, existing files are preserved.
+#' @param insert.new.file logical. If `TRUE`, forces the creation of a new **dbfile** entry even if an existing entry is found. Typically used for forecast or ensemble data that may be partially present.
+#' @param input.args list. This is passed from [convert_input()] and contains auxiliary arguments or settings that were passed along internally. It may include items such as `newsite` (integer site ID), among others. Its exact contents are not strictly defined but typically include the arguments provided to `convert_input()`.
+#' @param machine data.frame. Single row describing the machine on which the new data resides. It typically has columns like `id` and `hostname`, indicating the corresponding row in BETY's `machines` table.
+#' @param mimetype character. String indicating the file's MIME type (e.g. `"text/csv"`, `"application/x-netcdf"`, etc.).
+#' @param formatname character. String describing the file format (as listed in BETYdb's `formats` table). For example `"CF Meteorology"`.
+#' @param allow.conflicting.dates logical. If `TRUE`, allows creation or insertion of new file records even if their date range overlaps with existing records. If `FALSE`, overlapping ranges may cause errors or be disallowed.
+#' @param ensemble integer or logical. If an integer > 1, indicates that multiple ensemble members were generated (often for forecast data) and that each member may need separate database entries. If `FALSE`, the data are not an ensemble.
+#' @param ensemble_name character. String providing a descriptive label or identifier for an ensemble member. Typically used if `convert_input()` is called iteratively for each member.
+#' @param existing.input data.frame. Possibly zero rows representing the current record(s) in the `inputs` table that match (or partially match) the data being added. If no matching record exists, an empty data frame is supplied.
+#' @param existing.dbfile data.frame. Possibly zero rows representing the current record(s) in the `dbfiles` table that match (or partially match) the data being added. If no matching record exists, an empty data frame is supplied.
+#' @param input data.frame. Single row with the parent input record from BETYdb, typically including columns like `id`, `start_date`, `end_date`, etc. If the new data are derived from an existing input, this links them in the `parent_id` column of the new entries.
+#'
+#' @return list with two elements: \describe{ \item{input.id}{A numeric vector of new (or updated) input record IDs.} \item{dbfile.id}{A numeric vector of new (or updated) dbfile record IDs.} }
+#'
+#' @details This function consolidates the final step of adding or updating records in the BETY database to reflect newly created data files. It either updates existing `input` and `dbfile` records or creates new records, depending on the provided arguments (`overwrite`, `insert.new.file`, etc.) and whether a matching record already exists. Typically, these records represent model-ready meteorological or other environmental data, after format conversion or downloading has taken place in [convert_input()].
+#'
+#' @author Betsy Cowdery, Michael Dietze, Ankur Desai, Tony Gardella, Luke Dramko
+
+update_ensemble_writes <- function(
+ result, con, start_date,
+ end_date, overwrite,
+ insert.new.file, input.args,
+ machine, mimetype, formatname,
+ allow.conflicting.dates, ensemble,
+ ensemble_name, existing.input,
+ existing.dbfile, input) {
+ # Setup newinput. This list will contain two variables: a vector of input IDs and a vector of DB IDs for each entry in result.
+ # This list will be returned.
+ newinput <- list(input.id = NULL, dbfile.id = NULL) # Blank vectors are null.
+
+ for (i in seq_along(result)) { # Master for loop
+ id_not_added <- TRUE
+
+ if (!is.null(existing.input) && nrow(existing.input[[i]]) > 0 &&
+ (existing.input[[i]]$start_date != start_date || existing.input[[i]]$end_date != end_date)) {
+ # Updating record with new dates
+ db.query(
+ paste0(
+ "UPDATE inputs SET start_date='", start_date, "', end_date='", end_date,
+ "' WHERE id=", existing.input[[i]]$id
+ ),
+ con
+ )
+ id_not_added <- FALSE
+
+ # The overall structure of this loop has been set up so that exactly one input.id and one dbfile.id will be written to newinput every iteration.
+ newinput$input.id <- c(newinput$input.id, existing.input[[i]]$id)
+ newinput$dbfile.id <- c(newinput$dbfile.id, existing.dbfile[[i]]$id)
+ }
+
+ if (overwrite) {
+ # A bit hacky, but need to make sure that all fields are updated to expected values (i.e., what they'd be if convert_input was creating a new record)
+ if (!is.null(existing.input) && nrow(existing.input[[i]]) > 0) {
+ db.query(
+ paste0(
+ "UPDATE dbfiles SET file_path='", dirname(result[[i]]$file[1]),
+ "', file_name='", result[[i]]$dbfile.name[1],
+ "' WHERE id=", existing.dbfile[[i]]$id
+ ),
+ con
+ )
+ }
+
+ if (!is.null(existing.dbfile) && nrow(existing.dbfile[[i]]) > 0) {
+ db.query(paste0(
+ "UPDATE dbfiles SET file_path='", dirname(result[[i]]$file[1]),
+ "', file_name='", result[[i]]$dbfile.name[1],
+ "' WHERE id=", existing.dbfile[[i]]$id
+ ), con)
+ }
+ }
+
+ # If there is no ensemble then for each record there should be one parent
+ # But when you have ensembles, all of the members have one parent !!
+ parent.id <- if (is.numeric(ensemble)) {
+ ifelse(is.null(input[[i]]), NA, input[[1]]$id)
+ } else {
+ ifelse(is.null(input[[i]]), NA, input[[i]]$id)
+ }
+
+
+ if ("newsite" %in% names(input.args) && !is.null(input.args[["newsite"]])) {
+ site.id <- input.args$newsite
+ }
+
+ if (insert.new.file && id_not_added) {
+ dbfile.id <- dbfile.insert(
+ in.path = dirname(result[[i]]$file[1]),
+ in.prefix = result[[i]]$dbfile.name[1],
+ "Input",
+ existing.input[[i]]$id,
+ con,
+ reuse = TRUE,
+ hostname = machine$hostname
+ )
+
+ newinput$input.id <- c(newinput$input.id, existing.input[[i]]$id)
+ newinput$dbfile.id <- c(newinput$dbfile.id, dbfile.id)
+ } else if (id_not_added) {
+ # This is to tell input.insert if we are writing ensembles
+ # Why does it need it? Because it checks for inputs with the same time period, site, and machine
+ # and if it returns something it does not insert anymore, but for ensembles, it needs to bypass this condition
+ ens.flag <- if (!is.null(ensemble) || is.null(ensemble_name)) TRUE else FALSE
+
+ new_entry <- dbfile.input.insert(
+ in.path = dirname(result[[i]]$file[1]),
+ in.prefix = result[[i]]$dbfile.name[1],
+ siteid = site.id,
+ startdate = start_date,
+ enddate = end_date,
+ mimetype = mimetype,
+ formatname = formatname,
+ parentid = parent.id,
+ con = con,
+ hostname = machine$hostname,
+ allow.conflicting.dates = allow.conflicting.dates,
+ ens = ens.flag
+ )
+
+ newinput$input.id <- c(newinput$input.id, new_entry$input.id)
+ newinput$dbfile.id <- c(newinput$dbfile.id, new_entry$dbfile.id)
+ }
+ } # End for loop
+ return(newinput)
+}
diff --git a/base/db/man/PEcAn.DB-package.Rd b/base/db/man/PEcAn.DB-package.Rd
index cdd80bdc1d9..db7c298cafa 100644
--- a/base/db/man/PEcAn.DB-package.Rd
+++ b/base/db/man/PEcAn.DB-package.Rd
@@ -8,6 +8,14 @@
\description{
This package provides an interface between PEcAn and the BETY database.
For usage examples, please see \code{vignette("betydb_access")}
+}
+\seealso{
+Useful links:
+\itemize{
+ \item \url{https://pecanproject.github.io}
+ \item Report bugs at \url{https://github.com/PecanProject/pecan/issues}
+}
+
}
\author{
\strong{Maintainer}: David LeBauer \email{dlebauer@email.arizona.edu}
diff --git a/base/db/man/check_missing_files.Rd b/base/db/man/check_missing_files.Rd
new file mode 100644
index 00000000000..35ad0f24331
--- /dev/null
+++ b/base/db/man/check_missing_files.Rd
@@ -0,0 +1,31 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/check_missing_files.R
+\name{check_missing_files}
+\alias{check_missing_files}
+\title{Check for Missing or Empty Files in Conversion Results}
+\usage{
+check_missing_files(result, existing.input = NULL, existing.dbfile = NULL)
+}
+\arguments{
+\item{result}{A list of data frames containing file information. Each data frame is expected to have a column named `file` with absolute file paths created by a data-conversion or download function. For example, this might be the structure returned by a "download_X" or "met2model_X" function when invoked via [convert_input()].}
+
+\item{existing.input}{A data frame or list of data frames (possibly zero rows) representing input records in the BETY `inputs` table that match (or partially match) the data being added. This is converted to a list of data frames if it is not already.}
+
+\item{existing.dbfile}{A data frame or list of data frames (possibly zero rows) representing dbfile records in the BETY `dbfiles` table that match (or partially match) the data being added. This is also converted to a list of data frames if it is not already.}
+}
+\value{
+A list containing:
+\itemize{
+ \item A list of data frames for `existing.input`
+ \item A list of data frames for `existing.dbfile`
+}
+}
+\description{
+This function inspects the file paths in a list of data frames (typically produced by a download or conversion routine) to ensure that each file is present and non-empty. Specifically, it checks whether any file path is missing or has a file size of zero, and logs an error if such files are detected. It also normalizes `existing.input` and `existing.dbfile` so that each is returned as a list of data frames.
+}
+\details{
+The function calculates the file size for each file specified in the `result` data frames. If any file path is missing (`NA`) or any file size is zero, the function raises a fatal error (via [PEcAn.logger::logger.severe]) indicating that an expected file is either nonexistent or empty. If no such issues are found, it merely ensures that `existing.input` and `existing.dbfile` are each wrapped in a list for consistent downstream usage.
+}
+\author{
+Betsy Cowdery, Michael Dietze, Ankur Desai, Tony Gardella, Luke Dramko
+}
diff --git a/base/db/man/fetch.stats2se.Rd b/base/db/man/fetch.stats2se.Rd
index 481bfdd1bee..f55003717c3 100644
--- a/base/db/man/fetch.stats2se.Rd
+++ b/base/db/man/fetch.stats2se.Rd
@@ -2,7 +2,7 @@
% Please edit documentation in R/fetch.stats2se.R
\name{fetch.stats2se}
\alias{fetch.stats2se}
-\title{Fetch data and transform stats to SE}
+\title{Queries data from the trait database and transforms statistics to SE}
\usage{
fetch.stats2se(connection, query)
}
@@ -15,14 +15,12 @@ fetch.stats2se(connection, query)
dataframe with trait data
}
\description{
-Queries data from the trait database and transforms statistics to SE
-}
-\details{
Performs query and then uses \code{transformstats} to convert miscellaneous statistical summaries
to SE
}
\seealso{
-used in \code{\link{query.trait.data}}; \code{\link{transformstats}} performs transformation calculations
+used in \code{\link{query.trait.data}};
+ \code{\link[PEcAn.utils]{transformstats}} performs transformation calculations
}
\author{
diff --git a/base/db/man/get.trait.data.Rd b/base/db/man/get.trait.data.Rd
index 5ff68a92a4a..151a9195504 100644
--- a/base/db/man/get.trait.data.Rd
+++ b/base/db/man/get.trait.data.Rd
@@ -11,7 +11,8 @@ get.trait.data(
database,
forceupdate,
write = FALSE,
- trait.names = NULL
+ trait.names = NULL,
+ input_file = NULL
)
}
\arguments{
@@ -35,6 +36,9 @@ BETYdb. Defaults to FALSE.}
\item{trait.names}{Character vector of trait names to search. If
`NULL` (default), use all traits that have a prior for at least
one of the `pfts`.}
+
+\item{input_file}{Path to a CSV file containing prior information.
+If specified, `database` is not used.}
}
\value{
list of PFTs with update posteriorids
@@ -47,6 +51,12 @@ This will use the following items from settings:
- `settings$database$dbfiles`
- `settings$meta.analysis$update`
}
+\details{
+If either `input_file` or `settings$pfts$file_path` is provided,
+ it should be a valid path to a CSV (with at least columns
+ `name`, `distn`, `parama`, `paramb`, `n`) and will be used instead of
+ `database` for trait lookup.
+}
\author{
David LeBauer, Shawn Serbin, Alexey Shiklomanov
}
diff --git a/base/db/man/get_machine_host.Rd b/base/db/man/get_machine_host.Rd
new file mode 100644
index 00000000000..4dbc2258ab7
--- /dev/null
+++ b/base/db/man/get_machine_host.Rd
@@ -0,0 +1,22 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/get_machine_info.R
+\name{get_machine_host}
+\alias{get_machine_host}
+\title{Helper Function to retrieve machine host and machine informations}
+\usage{
+get_machine_host(host, con)
+}
+\arguments{
+\item{host}{host information}
+
+\item{con}{database connection}
+}
+\value{
+list of machine host and machine information
+}
+\description{
+Helper Function to retrieve machine host and machine informations
+}
+\author{
+Abhinav Pandey
+}
diff --git a/base/db/man/get_machine_info.Rd b/base/db/man/get_machine_info.Rd
new file mode 100644
index 00000000000..68221a9c565
--- /dev/null
+++ b/base/db/man/get_machine_info.Rd
@@ -0,0 +1,26 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/get_machine_info.R
+\name{get_machine_info}
+\alias{get_machine_info}
+\title{Get machine information from db}
+\usage{
+get_machine_info(host, input.args, input.id = NULL, con = NULL)
+}
+\arguments{
+\item{host}{host information}
+
+\item{input.args}{input args for existing records}
+
+\item{input.id}{input id for existing records}
+
+\item{con}{database connection}
+}
+\value{
+list of machine, input, and dbfile records
+}
+\description{
+Get machine information from db
+}
+\author{
+Betsy Cowdery, Michael Dietze, Ankur Desai, Tony Gardella, Luke Dramko
+}
diff --git a/base/db/man/query.data.Rd b/base/db/man/query.data.Rd
index 8d313ef0d37..716b49985c0 100644
--- a/base/db/man/query.data.Rd
+++ b/base/db/man/query.data.Rd
@@ -2,7 +2,7 @@
% Please edit documentation in R/query.data.R
\name{query.data}
\alias{query.data}
-\title{Query data and transform stats to SE by calling \code{\link{fetch.stats2se}};}
+\title{Function to query data from database for specific species and convert stat to SE}
\usage{
query.data(
trait,
@@ -34,7 +34,8 @@ query.data(
Function to query data from database for specific species and convert stat to SE
}
\seealso{
-used in \code{\link{query.trait.data}}; \code{\link{fetch.stats2se}}; \code{\link{transformstats}} performs transformation calculations
+used in \code{\link{query.trait.data}}; \code{\link{fetch.stats2se}};
+ \code{\link[PEcAn.utils]{transformstats}} performs transformation calculations
}
\author{
David LeBauer, Carl Davidson
diff --git a/base/db/man/query.yields.Rd b/base/db/man/query.yields.Rd
index 1d5d224cd4f..2ca476ea371 100644
--- a/base/db/man/query.yields.Rd
+++ b/base/db/man/query.yields.Rd
@@ -2,7 +2,7 @@
% Please edit documentation in R/query.yields.R
\name{query.yields}
\alias{query.yields}
-\title{Query yield data and transform stats to SE by calling \code{\link{fetch.stats2se}};}
+\title{Function to query yields data from database for specific species and convert stat to SE}
\usage{
query.yields(
trait = "yield",
@@ -30,7 +30,8 @@ query.yields(
Function to query yields data from database for specific species and convert stat to SE
}
\seealso{
-used in \code{\link{query.trait.data}}; \code{\link{fetch.stats2se}}; \code{\link{transformstats}} performs transformation calculations
+used in \code{\link{query.trait.data}}; \code{\link{fetch.stats2se}};
+ \code{\link[PEcAn.utils]{transformstats}} performs transformation calculations
}
\author{
diff --git a/base/db/man/update_ensemble_writes.Rd b/base/db/man/update_ensemble_writes.Rd
new file mode 100644
index 00000000000..587de12b1b3
--- /dev/null
+++ b/base/db/man/update_ensemble_writes.Rd
@@ -0,0 +1,72 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/update_ensemble_writes.R
+\name{update_ensemble_writes}
+\alias{update_ensemble_writes}
+\title{Insert or Update Database Records for New or Modified Input Data}
+\usage{
+update_ensemble_writes(
+ result,
+ con,
+ start_date,
+ end_date,
+ overwrite,
+ insert.new.file,
+ input.args,
+ machine,
+ mimetype,
+ formatname,
+ allow.conflicting.dates,
+ ensemble,
+ ensemble_name,
+ existing.input,
+ existing.dbfile,
+ input
+)
+}
+\arguments{
+\item{result}{list of data frames, each data frame corresponding to one piece or "chunk" of newly-created data. Typically, these data frames are produced by the function specified in `convert_input(..., fcn=...)`. Each data frame must contain at least: \describe{ \item{file}{Absolute file path(s) to the newly created file(s).} \item{dbfile.name}{The base filename(s) (without leading path) for each corresponding file.} } Additional columns are allowed but unused by this function.}
+
+\item{con}{database connection object (as returned by, e.g., \code{\link[DBI]{dbConnect}}).}
+
+\item{start_date}{Date or character. The start date of the data (in UTC). Acceptable types include Date objects (`as.Date`) or character strings that can be parsed to a Date via standard R conversions.}
+
+\item{end_date}{Date or character. The end date of the data (in UTC). Acceptable types include Date objects (`as.Date`) or character strings that can be parsed to a Date via standard R conversions.}
+
+\item{overwrite}{logical. If `TRUE`, any existing database records and files for the same input and date range should be overwritten with the new files. If `FALSE`, existing files are preserved.}
+
+\item{insert.new.file}{logical. If `TRUE`, forces the creation of a new **dbfile** entry even if an existing entry is found. Typically used for forecast or ensemble data that may be partially present.}
+
+\item{input.args}{list. This is passed from [convert_input()] and contains auxiliary arguments or settings that were passed along internally. It may include items such as `newsite` (integer site ID), among others. Its exact contents are not strictly defined but typically include the arguments provided to `convert_input()`.}
+
+\item{machine}{data.frame. Single row describing the machine on which the new data resides. It typically has columns like `id` and `hostname`, indicating the corresponding row in BETY's `machines` table.}
+
+\item{mimetype}{character. String indicating the file's MIME type (e.g. `"text/csv"`, `"application/x-netcdf"`, etc.).}
+
+\item{formatname}{character. String describing the file format (as listed in BETYdb's `formats` table). For example `"CF Meteorology"`.}
+
+\item{allow.conflicting.dates}{logical. If `TRUE`, allows creation or insertion of new file records even if their date range overlaps with existing records. If `FALSE`, overlapping ranges may cause errors or be disallowed.}
+
+\item{ensemble}{integer or logical. If an integer > 1, indicates that multiple ensemble members were generated (often for forecast data) and that each member may need separate database entries. If `FALSE`, the data are not an ensemble.}
+
+\item{ensemble_name}{character. String providing a descriptive label or identifier for an ensemble member. Typically used if `convert_input()` is called iteratively for each member.}
+
+\item{existing.input}{data.frame. Possibly zero rows representing the current record(s) in the `inputs` table that match (or partially match) the data being added. If no matching record exists, an empty data frame is supplied.}
+
+\item{existing.dbfile}{data.frame. Possibly zero rows representing the current record(s) in the `dbfiles` table that match (or partially match) the data being added. If no matching record exists, an empty data frame is supplied.}
+
+\item{input}{data.frame. Single row with the parent input record from BETYdb, typically including columns like `id`, `start_date`, `end_date`, etc. If the new data are derived from an existing input, this links them in the `parent_id` column of the new entries.}
+}
+\value{
+list with two elements: \describe{ \item{input.id}{A numeric vector of new (or updated) input record IDs.} \item{dbfile.id}{A numeric vector of new (or updated) dbfile record IDs.} }
+}
+\description{
+This function is called internally by [convert_input()] to insert or update **input** and **dbfile** records in the PEcAn BETY database after one or more data-conversion or download functions have produced local or remote files. It is specifically intended for use with the output from data-conversion functions called by [convert_input()] (e.g. various "download_X" or "met2model_X" functions), but can be adapted if the return structure matches the requirements below.
+}
+\details{
+Insert or Update Database Records for New or Modified Input Data
+
+This function consolidates the final step of adding or updating records in the BETY database to reflect newly created data files. It either updates existing `input` and `dbfile` records or creates new records, depending on the provided arguments (`overwrite`, `insert.new.file`, etc.) and whether a matching record already exists. Typically, these records represent model-ready meteorological or other environmental data, after format conversion or downloading has taken place in [convert_input()].
+}
+\author{
+Betsy Cowdery, Michael Dietze, Ankur Desai, Tony Gardella, Luke Dramko
+}
diff --git a/base/db/tests/testthat/test-query.pft.R b/base/db/tests/testthat/test-query.pft.R
index 0b48a185071..7ef4eb3f934 100644
--- a/base/db/tests/testthat/test-query.pft.R
+++ b/base/db/tests/testthat/test-query.pft.R
@@ -36,7 +36,6 @@ test_that("nonexistant PFTs and modeltypes return empty dataframes", {
test_that("query.pft_cultivars finds cultivars for a PFT", {
- skip("Disabled until Travis bety contains Pavi_alamo and Pavi_all (#1958)")
one_cv <- query.pft_cultivars(pft = "Pavi_alamo", modeltype = NULL, con)
expect_is(one_cv, "data.frame")
expect_equal(nrow(one_cv), 1)
diff --git a/base/db/tests/testthat/test.check.missing.files.R b/base/db/tests/testthat/test.check.missing.files.R
new file mode 100644
index 00000000000..2dd4d2a9156
--- /dev/null
+++ b/base/db/tests/testthat/test.check.missing.files.R
@@ -0,0 +1,21 @@
+test_that("`check_missing_files()` able to return correct missing files", {
+ # Mock `file.size`
+ mocked_size <- mockery::mock(100, 200)
+ mockery::stub(check_missing_files, "file.size", mocked_size)
+
+ res <- check_missing_files(
+ result = list(data.frame(file = c("A", "B"))),
+ existing.input = data.frame(),
+ existing.dbfile = data.frame()
+ )
+
+ # Check that result has expected structure
+ expect_equal(length(res), 2)
+ expect_true(is.list(res[[1]]))
+ expect_true(is.list(res[[2]]))
+
+ # Verify mock was called correctly
+ mockery::expect_called(mocked_size, 2)
+ expect_equal(mockery::mock_args(mocked_size)[[1]], list("A"))
+ expect_equal(mockery::mock_args(mocked_size)[[2]], list("B"))
+})
diff --git a/base/db/tests/testthat/test.convert_input.R b/base/db/tests/testthat/test.convert_input.R
index c2e7f49c1e9..c6c27bde320 100644
--- a/base/db/tests/testthat/test.convert_input.R
+++ b/base/db/tests/testthat/test.convert_input.R
@@ -1,10 +1,26 @@
test_that("`convert_input()` able to call the respective download function for a data item with the correct arguments", {
mocked_res <- mockery::mock(list(c("A", "B")))
- mockery::stub(convert_input, 'dbfile.input.check', data.frame())
- mockery::stub(convert_input, 'db.query', data.frame(id = 1))
- mockery::stub(convert_input, 'PEcAn.remote::remote.execute.R', mocked_res)
- mockery::stub(convert_input, 'purrr::map_dfr', data.frame(missing = c(FALSE), empty = c(FALSE)))
+ mockery::stub(convert_input, "dbfile.input.check", data.frame())
+ mockery::stub(convert_input, "db.query", data.frame(id = 1))
+ mockery::stub(convert_input, "get_machine_info", list(
+ machine = data.frame(id = 1),
+ input = data.frame(id = 1),
+ dbfile = data.frame(id = 1)
+ ))
+ mockery::stub(convert_input, "PEcAn.remote::remote.execute.R", mocked_res)
+ mockery::stub(convert_input, "check_missing_files", list(
+ result_sizes = data.frame(
+ file = c("A", "B"),
+ file_size = c(100, 200),
+ missing = c(FALSE, FALSE),
+ empty = c(FALSE, FALSE)
+ ),
+ outlist = "test",
+ existing.input = list(data.frame(file = character(0))),
+ existing.dbfile = list(data.frame(file = character(0)))
+ ))
+ mockery::stub(convert_input, "update_ensemble_writes", list(input.id = 1, dbfile.id = 1))
convert_input(
input.id = NA,
@@ -14,18 +30,18 @@ test_that("`convert_input()` able to call the respective download function for a
site.id = 1,
start_date = "2011-01-01",
end_date = "2011-12-31",
- pkg = 'PEcAn.data.atmosphere',
- fcn = 'download.AmerifluxLBL',
+ pkg = "PEcAn.data.atmosphere",
+ fcn = "download.AmerifluxLBL",
con = NULL,
host = data.frame(name = "localhost"),
write = FALSE,
lat.in = 40,
lon.in = -88
)
-
+
args <- mockery::mock_args(mocked_res)
expect_equal(
- args[[1]]$script,
+ args[[1]]$script,
"PEcAn.data.atmosphere::download.AmerifluxLBL(lat.in=40, lon.in=-88, overwrite=FALSE, outfolder='test/', start_date='2011-01-01', end_date='2011-12-31')"
)
})
@@ -35,4 +51,4 @@ test_that("`.get.file.deletion.commands()` able to return correct file deletion
expect_equal(res$move.to.tmp, "dir.create(c('./tmp'), recursive=TRUE, showWarnings=FALSE); file.rename(from=c('test'), to=c('./tmp/test'))")
expect_equal(res$delete.tmp, "unlink(c('./tmp'), recursive=TRUE)")
expect_equal(res$replace.from.tmp, "file.rename(from=c('./tmp/test'), to=c('test'));unlink(c('./tmp'), recursive=TRUE)")
-})
\ No newline at end of file
+})
diff --git a/base/db/tests/testthat/test.query.dplyr.R b/base/db/tests/testthat/test.query.dplyr.R
index cc3e6436eea..9edbc3fbfc7 100644
--- a/base/db/tests/testthat/test.query.dplyr.R
+++ b/base/db/tests/testthat/test.query.dplyr.R
@@ -1,12 +1,15 @@
test_that("`fancy_scientific()` converts numbers to scientific expressions with proper formatting", {
result <- fancy_scientific(1234567890)
- expect_equal(result, expression("1.234568" %*% 10^+9))
+ expected <- expression("1.234568" %*% 10^+9)
+ expect_equal(deparse(result[[1]]), deparse(expected[[1]]))
result <- fancy_scientific(0.00000123)
- expect_equal(result, expression("1.23" %*% 10^-6))
+ expected <- expression("1.23" %*% 10^-6)
+ expect_equal(deparse(result[[1]]), deparse(expected[[1]]))
result <- fancy_scientific(1e-20)
- expect_equal(result, expression("1" %*% 10^-20))
+ expected <- expression("1" %*% 10^-20)
+ expect_equal(deparse(result[[1]]), deparse(expected[[1]]))
})
test_that("`dplyr.count()` returns the correct count of rows in a dataframe", {
diff --git a/base/logger/DESCRIPTION b/base/logger/DESCRIPTION
index ac12a0f090f..d117ffb90b3 100644
--- a/base/logger/DESCRIPTION
+++ b/base/logger/DESCRIPTION
@@ -1,6 +1,6 @@
Package: PEcAn.logger
Title: Logger Functions for 'PEcAn'
-Version: 1.8.3
+Version: 1.8.4
Authors@R: c(person("Rob", "Kooper", role = c("aut", "cre"),
email = "kooper@illinois.edu"),
person("Alexey", "Shiklomanov", role = c("aut"),
@@ -19,8 +19,8 @@ Description: Convenience functions for logging outputs from 'PEcAn',
and lenience when running large batches of simulations that should not be
terminated by errors in individual models. It is loosely based on
the 'log4j' package.
-BugReports: https://github.com/PecanProject/pecan/issues
URL: https://pecanproject.github.io/
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
utils,
stringi
@@ -31,3 +31,4 @@ License: BSD_3_clause + file LICENSE
Encoding: UTF-8
RoxygenNote: 7.3.2
Roxygen: list(markdown = TRUE)
+X-schema.org-keywords: PEcAn, logger, error-handling, logging-levels, message-filtering
diff --git a/base/logger/NEWS.md b/base/logger/NEWS.md
index 349ac5740e5..ec27e5145b4 100644
--- a/base/logger/NEWS.md
+++ b/base/logger/NEWS.md
@@ -1,23 +1,38 @@
+# PEcAn.logger 1.8.4
+
+- All `logger.set*` functions now invisibly return the previously set value. This can be handy for restoring settings after a temporary change.
+- Multipart logger messages passed in `...` now get a more robust conversion to string before concatenating, hopefully giving nicer results for nontext objects such as dates and lists.
+
+
+
# PEcAn.logger 1.8.3
- Maintenance release with no user-visible changes.
+
+
# PEcAn.logger 1.8.2
- `PEcAn.logger::setLevel()` now invisibly returns the previously set logger level
+
+
# PEcAn.logger 1.8.1
## Re-licensing
* PEcAn.logger is now distributed as BSD-3 license. Prior to this the package was released under the NCSA Opensource license from 2009-2021.
+
+
# PEcAn.logger 1.8.0
## Fixed
* Logger calls no longer create a stray `dump.log` object in the global environment
+
+
# PEcAn.logger 1.7.1
* All changes in 1.7.1 and earlier were recorded in a single file for all of the PEcAn packages; please see
diff --git a/base/logger/R/logger.R b/base/logger/R/logger.R
index ee0fc9c8e62..722c9e2aca4 100644
--- a/base/logger/R/logger.R
+++ b/base/logger/R/logger.R
@@ -4,12 +4,12 @@
.utils.logger$stderr <- TRUE
.utils.logger$quit <- FALSE
.utils.logger$level <- 0
-.utils.logger$width <- ifelse(getOption("width") < 10,
- getOption("width"),
+.utils.logger$width <- ifelse(getOption("width") < 10,
+ getOption("width"),
getOption("width") - 5)
##' Prints a debug message.
-##'
+##'
##' This function will print a debug message.
##'
##' @param msg the message that should be printed.
@@ -26,7 +26,7 @@ logger.debug <- function(msg, ...) {
##' Prints an informational message.
-##'
+##'
##' This function will print an informational message.
##'
##' @param msg the message that should be printed.
@@ -43,7 +43,7 @@ logger.info <- function(msg, ...) {
##' Prints a warning message.
-##'
+##'
##' This function will print a warning message.
##'
##' @param msg the message that should be printed.
@@ -60,7 +60,7 @@ logger.warn <- function(msg, ...) {
##' Prints an error message.
-##'
+##'
##' This function will print an error message.
##'
##' @param msg the message that should be printed.
@@ -76,14 +76,13 @@ logger.error <- function(msg, ...) {
} # logger.error
-##' Prints an severe message and stops execution.
-##'
+##' Prints a severe message and stops execution.
+##'
##' This function will print a message and stop execution of the code. This
##' should only be used if the application should terminate.
-##'
-##' set \code{\link{logger.setQuitOnSevere}(FALSE)} to avoid terminating
-##' the session. This is set by default to TRUE if interactive or running
-##' inside Rstudio.
+##'
+##' Set \code{\link{logger.setQuitOnSevere}(FALSE)} to avoid terminating
+##' the session. The default is to not quit if running interactively.
##'
##' @param msg the message that should be printed.
##' @param ... any additional text that should be printed.
@@ -96,13 +95,13 @@ logger.error <- function(msg, ...) {
##' }
logger.severe <- function(msg, ..., wrap = TRUE) {
logger.message("SEVERE", msg, ...)
-
+
# run option
error <- getOption("error")
if (!is.null(error)) {
eval(error)
}
-
+
# quit if not interactive, otherwise use stop
if (.utils.logger$quit) {
quit(save = "no", status = 1)
@@ -113,11 +112,13 @@ logger.severe <- function(msg, ..., wrap = TRUE) {
##' Prints a message at a certain log level.
-##'
-##' This function will print a message. This is the function that is responsible for
-##' the actual printing of the message.
##'
-##' This is a place holder and will be later filled in with a more complex logging set
+##' This function will print a message. This is the function that is responsible
+##' for the actual printing of the message.
+##'
+##' This is a place holder and will be later filled in with a more complex
+##' logging set
+##'
##' @param level the level of the message (DEBUG, INFO, WARN, ERROR)
##' @param msg the message that should be printed.
##' @param ... any additional text that should be printed.
@@ -138,24 +139,28 @@ logger.message <- function(level, msg, ..., wrap = TRUE) {
if (length(func) == 0) {
func <- "console"
}
-
+
stamp.text <- sprintf("%s %-6s [%s] :", Sys.time(), level, func)
- long.msg <- stringi::stri_trans_general(paste(c(msg, ...), collapse = " "), "latin-ascii")
+ args <- sapply(list(...), FUN = toString)
+ long.msg <- stringi::stri_trans_general(
+ paste(c(msg, args), collapse = " "),
+ "latin-ascii"
+ )
if (nchar(long.msg) > 20 && wrap) {
- new.msg <- paste("\n", strwrap(long.msg, width = .utils.logger$width,
+ new.msg <- paste("\n", strwrap(long.msg, width = .utils.logger$width,
indent = 2, exdent = 2), collapse = " ")
} else {
new.msg <- long.msg
}
text <- paste(stamp.text, new.msg, "\n")
-
+
if (.utils.logger$console) {
if (.utils.logger$stderr) {
cat(text, file = stderr())
} else {
cat(text, file = stdout())
}
-
+
}
if (!is.na(.utils.logger$filename)) {
cat(text, file = .utils.logger$filename, append = TRUE)
@@ -165,11 +170,18 @@ logger.message <- function(level, msg, ..., wrap = TRUE) {
##' Configure logging level.
-##'
+##'
##' This will configure the logger level. This allows to turn DEBUG, INFO,
-##' WARN and ERROR messages on and off.
+##' WARN, ERROR, and SEVERE messages on and off.
+##'
+##' Note that this controls _printing_ of messages and does not change other behavior.
+##' In particular, suppressing SEVERE by setting the level to "OFF" does not prevent
+##' logger.severe() from signaling an error (and terminating the program if
+##' `logger.setQuitOnSevere(TRUE)`).
+##'
+##' @param level the level of the message. One of "ALL", "DEBUG", "INFO", "WARN",
+##' "ERROR", "SEVERE", or "OFF".
##'
-##' @param level the level of the message (ALL, DEBUG, INFO, WARN, ERROR, OFF)
##' @export
##' @return When logger level is set, the previous level is returned invisibly.
##' This can be passed to `logger.setLevel()` to restore the previous level.
@@ -177,23 +189,37 @@ logger.message <- function(level, msg, ..., wrap = TRUE) {
##' @examples
##' \dontrun{
##' logger.setLevel('DEBUG')
+##'
+##' # Temporarily turn logger off
+##' old_logger_level <- logger.setLevel("OFF")
+##' # code here
+##' logger.setLevel(old_logger_level)
##' }
logger.setLevel <- function(level) {
original_level <- logger.getLevel()
.utils.logger$level <- logger.getLevelNumber(level)
+
invisible(original_level)
} # logger.setLevel
## Given the string representation this will return the numeric value
-## DEBUG = 10
-## INFO = 20
-## WARN = 30
-## ERROR = 40
-## ALL = 99
##
-##@return level the level of the message
-##@author Rob Kooper
+## Supported levels
+## ALL = 0
+## DEBUG = 10
+## INFO = 20
+## WARN = 30
+## ERROR = 40
+## SEVERE = 50
+## OFF = 60
+##
+## SEVERE is treated as more serious than ERROR,
+## and will terminate the session if `logger.setQuitOnSevere(TRUE)`
+## or call stop() otherwise
+##
+## @return level the level of the message
+## @author Rob Kooper
logger.getLevelNumber <- function(level) {
if (toupper(level) == "ALL") {
return(0)
@@ -206,7 +232,7 @@ logger.getLevelNumber <- function(level) {
} else if (toupper(level) == "ERROR") {
return(40)
} else if (toupper(level) == "SEVERE") {
- return(40)
+ return(50)
} else if (toupper(level) == "OFF") {
return(60)
} else {
@@ -217,10 +243,15 @@ logger.getLevelNumber <- function(level) {
##' Get configured logging level.
+##'
+##' This will return the current level configured of the logging messages.
##'
-##' This will return the current level configured of the logging messages
+##' Note that `logger.setLevel()` invisibly returns current level, so
+##' `logger.getLevel()` is not required to restore the level after a
+##' temporary change.
##'
-##' @return level the level of the message (ALL, DEBUG, INFO, WARN, ERROR, OFF)
+##' @return A string giving the lowest message level that will be reported, one of
+##' "ALL", "DEBUG", "INFO", "WARN", "ERROR", "SEVERE", or "OFF".
##' @export
##' @author Rob Kooper
##' @examples
@@ -247,11 +278,14 @@ logger.getLevel <- function() {
##' Configure logging to console.
-##'
+##'
##' Should the logging to be printed to the console or not.
##'
##' @param console set to true to print logging to console.
-##' @param stderr set to true (default) to use stderr instead of stdout for logging
+##' @param stderr set to true (default) to log to stderr instead of stdout
+##' @return Invisibly, a list of the previously set values of `console`
+##' and `stderr`. This can be used to restore the previous settings after a
+##' temporary change.
##' @export
##' @author Rob Kooper
##' @examples
@@ -259,16 +293,22 @@ logger.getLevel <- function() {
##' logger.setUseConsole(TRUE)
##' }
logger.setUseConsole <- function(console, stderr = TRUE) {
+ old <- list(console = .utils.logger$console, stderr = .utils.logger$stderr)
.utils.logger$console <- console
.utils.logger$stderr <- stderr
+
+ invisible(old)
} # logger.setUseConsole
##' Configure logging output filename.
-##'
+##'
##' The name of the file where the logging information should be written to.
##'
-##' @param filename the file to send the log messages to (or NA to not write to file)
+##' @param filename the file to send the log messages to
+##' (or NA to not write to file)
+##' @return Invisibly, the previously set filename.
+##' This can be used to restore settings after a temporary change.
##' @export
##' @author Rob Kooper
##' @examples
@@ -276,25 +316,32 @@ logger.setUseConsole <- function(console, stderr = TRUE) {
##' logger.setOutputFile('pecan.log')
##' }
logger.setOutputFile <- function(filename) {
+ old <- .utils.logger$filename
.utils.logger$filename <- filename
+
+ invisible(old)
} # logger.setOutputFile
##' Configure whether severe should quit.
-##'
-##' The default is for a non-interactive session to quit. Setting this to false is
-##' especially useful for running tests when placed in \code{inst/tests/test..R},
-##' but is not passed from \code{tests/run.all.R}.
+##'
+##' The default is for a non-interactive session to quit.
+##' Setting this to false is especially useful for running tests.
##'
##' @param severeQuits should R quit on a severe error.
##' @export
+##' @return invisibly, the previous value of `severeQuits`.
+##' This can be used to restore settings after a temporary change.
##' @author Rob Kooper
##' @examples
##' \dontrun{
##' logger.setQuitOnSevere(FALSE)
##' }
logger.setQuitOnSevere <- function(severeQuits) {
+ old <- .utils.logger$quit
.utils.logger$quit <- severeQuits
+
+ invisible(old)
} # logger.setQuitOnSevere
@@ -304,6 +351,8 @@ logger.setQuitOnSevere <- function(severeQuits) {
##' wrap the line when printing a message at that many chars.
##'
##' @param width number of chars to print before wrapping to next line.
+##' @return Invisibly, the previously set width.
+##' This can be used to restore settings after a temporary change.
##' @export
##' @author David LeBauer
##' @examples
@@ -311,5 +360,8 @@ logger.setQuitOnSevere <- function(severeQuits) {
##' logger.setWidth(70)
##' }
logger.setWidth <- function(width) {
+ old <- .utils.logger$width
.utils.logger$width <- width
+
+ invisible(old)
} # logger.setWidth
diff --git a/base/logger/man/logger.getLevel.Rd b/base/logger/man/logger.getLevel.Rd
index 94f5981b74a..196457f75c7 100644
--- a/base/logger/man/logger.getLevel.Rd
+++ b/base/logger/man/logger.getLevel.Rd
@@ -7,10 +7,16 @@
logger.getLevel()
}
\value{
-level the level of the message (ALL, DEBUG, INFO, WARN, ERROR, OFF)
+A string giving the lowest message level that will be reported, one of
+"ALL", "DEBUG", "INFO", "WARN", "ERROR", "SEVERE", or "OFF".
}
\description{
-This will return the current level configured of the logging messages
+This will return the current level configured of the logging messages.
+}
+\details{
+Note that \code{logger.setLevel()} invisibly returns current level, so
+\code{logger.getLevel()} is not required to restore the level after a
+temporary change.
}
\examples{
\dontrun{
diff --git a/base/logger/man/logger.message.Rd b/base/logger/man/logger.message.Rd
index 88f95648da2..309dc63715c 100644
--- a/base/logger/man/logger.message.Rd
+++ b/base/logger/man/logger.message.Rd
@@ -18,11 +18,12 @@ logger.message(level, msg, ..., wrap = TRUE)
for specifically formatted error messages.}
}
\description{
-This function will print a message. This is the function that is responsible for
-the actual printing of the message.
+This function will print a message. This is the function that is responsible
+for the actual printing of the message.
}
\details{
-This is a place holder and will be later filled in with a more complex logging set
+This is a place holder and will be later filled in with a more complex
+logging set
}
\examples{
\dontrun{
diff --git a/base/logger/man/logger.setLevel.Rd b/base/logger/man/logger.setLevel.Rd
index bb68aa5c82b..ea92f1a4aac 100644
--- a/base/logger/man/logger.setLevel.Rd
+++ b/base/logger/man/logger.setLevel.Rd
@@ -7,7 +7,8 @@
logger.setLevel(level)
}
\arguments{
-\item{level}{the level of the message (ALL, DEBUG, INFO, WARN, ERROR, OFF)}
+\item{level}{the level of the message. One of "ALL", "DEBUG", "INFO", "WARN",
+"ERROR", "SEVERE", or "OFF".}
}
\value{
When logger level is set, the previous level is returned invisibly.
@@ -15,11 +16,22 @@ This can be passed to \code{logger.setLevel()} to restore the previous level.
}
\description{
This will configure the logger level. This allows to turn DEBUG, INFO,
-WARN and ERROR messages on and off.
+WARN, ERROR, and SEVERE messages on and off.
+}
+\details{
+Note that this controls \emph{printing} of messages and does not change other behavior.
+In particular, suppressing SEVERE by setting the level to "OFF" does not prevent
+logger.severe() from signaling an error (and terminating the program if
+\code{logger.setQuitOnSevere(TRUE)}).
}
\examples{
\dontrun{
logger.setLevel('DEBUG')
+
+# Temporarily turn logger off
+old_logger_level <- logger.setLevel("OFF")
+ # code here
+logger.setLevel(old_logger_level)
}
}
\author{
diff --git a/base/logger/man/logger.setOutputFile.Rd b/base/logger/man/logger.setOutputFile.Rd
index 95b9d0afb34..6809333bd6a 100644
--- a/base/logger/man/logger.setOutputFile.Rd
+++ b/base/logger/man/logger.setOutputFile.Rd
@@ -7,7 +7,12 @@
logger.setOutputFile(filename)
}
\arguments{
-\item{filename}{the file to send the log messages to (or NA to not write to file)}
+\item{filename}{the file to send the log messages to
+(or NA to not write to file)}
+}
+\value{
+Invisibly, the previously set filename.
+This can be used to restore settings after a temporary change.
}
\description{
The name of the file where the logging information should be written to.
diff --git a/base/logger/man/logger.setQuitOnSevere.Rd b/base/logger/man/logger.setQuitOnSevere.Rd
index 344bd843a9e..f19e7d5edd1 100644
--- a/base/logger/man/logger.setQuitOnSevere.Rd
+++ b/base/logger/man/logger.setQuitOnSevere.Rd
@@ -9,10 +9,13 @@ logger.setQuitOnSevere(severeQuits)
\arguments{
\item{severeQuits}{should R quit on a severe error.}
}
+\value{
+invisibly, the previous value of \code{severeQuits}.
+This can be used to restore settings after a temporary change.
+}
\description{
-The default is for a non-interactive session to quit. Setting this to false is
-especially useful for running tests when placed in \code{inst/tests/test..R},
-but is not passed from \code{tests/run.all.R}.
+The default is for a non-interactive session to quit.
+Setting this to false is especially useful for running tests.
}
\examples{
\dontrun{
diff --git a/base/logger/man/logger.setUseConsole.Rd b/base/logger/man/logger.setUseConsole.Rd
index cdb0b51a6bb..a6b3b9c7b5d 100644
--- a/base/logger/man/logger.setUseConsole.Rd
+++ b/base/logger/man/logger.setUseConsole.Rd
@@ -9,7 +9,12 @@ logger.setUseConsole(console, stderr = TRUE)
\arguments{
\item{console}{set to true to print logging to console.}
-\item{stderr}{set to true (default) to use stderr instead of stdout for logging}
+\item{stderr}{set to true (default) to log to stderr instead of stdout}
+}
+\value{
+Invisibly, a list of the previously set values of \code{console}
+and \code{stderr}. This can be used to restore the previous settings after a
+temporary change.
}
\description{
Should the logging to be printed to the console or not.
diff --git a/base/logger/man/logger.setWidth.Rd b/base/logger/man/logger.setWidth.Rd
index 8f8bd1d2d05..99b071c91d0 100644
--- a/base/logger/man/logger.setWidth.Rd
+++ b/base/logger/man/logger.setWidth.Rd
@@ -9,6 +9,10 @@ logger.setWidth(width)
\arguments{
\item{width}{number of chars to print before wrapping to next line.}
}
+\value{
+Invisibly, the previously set width.
+This can be used to restore settings after a temporary change.
+}
\description{
The default is for 60 chars per line. Setting this to any value will
wrap the line when printing a message at that many chars.
diff --git a/base/logger/man/logger.severe.Rd b/base/logger/man/logger.severe.Rd
index 4bda00ef83a..556cd61ae53 100644
--- a/base/logger/man/logger.severe.Rd
+++ b/base/logger/man/logger.severe.Rd
@@ -2,7 +2,7 @@
% Please edit documentation in R/logger.R
\name{logger.severe}
\alias{logger.severe}
-\title{Prints an severe message and stops execution.}
+\title{Prints a severe message and stops execution.}
\usage{
logger.severe(msg, ..., wrap = TRUE)
}
@@ -20,9 +20,8 @@ This function will print a message and stop execution of the code. This
should only be used if the application should terminate.
}
\details{
-set \code{\link{logger.setQuitOnSevere}(FALSE)} to avoid terminating
-the session. This is set by default to TRUE if interactive or running
-inside Rstudio.
+Set \code{\link{logger.setQuitOnSevere}(FALSE)} to avoid terminating
+the session. The default is to not quit if running interactively.
}
\examples{
\dontrun{
diff --git a/base/logger/tests/testthat/test.logger.R b/base/logger/tests/testthat/test.logger.R
index bc6f354dda9..ac182cbd1fd 100644
--- a/base/logger/tests/testthat/test.logger.R
+++ b/base/logger/tests/testthat/test.logger.R
@@ -7,7 +7,7 @@ test_that("`logger.getLevelNumber` returns correct level number",{
expect_equal(logger.getLevelNumber("info"), 20)
expect_equal(logger.getLevelNumber("warn"), 30)
expect_equal(logger.getLevelNumber("error"), 40)
- expect_equal(logger.getLevelNumber("severe"), 40)
+ expect_equal(logger.getLevelNumber("severe"), 50)
expect_equal(logger.getLevelNumber("off"), 60)
old_settings <- logger.setLevel("ERROR")
diff --git a/base/qaqc/DESCRIPTION b/base/qaqc/DESCRIPTION
index 78a72ea9b32..8e1e8f71611 100644
--- a/base/qaqc/DESCRIPTION
+++ b/base/qaqc/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.qaqc
Type: Package
Title: QAQC
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("David", "LeBauer", role = c("aut", "cre"),
email = "dlebauer@email.arizona.edu"),
person("Tess", "McCabe", role = c("aut"),
@@ -10,6 +10,8 @@ Authors@R: c(person("David", "LeBauer", role = c("aut", "cre"),
Author: David LeBauer, Tess McCabe
Maintainer: David LeBauer
Description: PEcAn integration and model skill testing
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
dplyr,
graphics,
@@ -38,3 +40,4 @@ Encoding: UTF-8
VignetteBuilder: knitr, rmarkdown
Config/testthat/edition: 3
RoxygenNote: 7.3.2
+X-schema.org-keywords: PEcAn, quality-control, data-cleaning
diff --git a/base/qaqc/NEWS.md b/base/qaqc/NEWS.md
index 359fb287d41..2415764e1df 100644
--- a/base/qaqc/NEWS.md
+++ b/base/qaqc/NEWS.md
@@ -1,9 +1,16 @@
+# PEcAn.qaqc 1.7.5
+
+* Added keywords and bug reporting URL to DESCRIPTION. No code changes in this release.
+
+
+
# PEcAn.qaqc 1.7.4
## License change
* PEcAn.qaqc is now distributed under the BSD three-clause license instead of the NCSA Open Source license.
+
# PEcAn.qaqc 1.7.1
* All changes in 1.7.1 and earlier were recorded in a single file for all of the PEcAn packages; please see
diff --git a/base/remote/DESCRIPTION b/base/remote/DESCRIPTION
index d44a1b6a43f..d2d6079108b 100644
--- a/base/remote/DESCRIPTION
+++ b/base/remote/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.remote
Type: Package
Title: PEcAn Model Execution Utilities
-Version: 1.9.0
+Version: 1.9.1
Authors@R: c(person("David", "LeBauer", role = c("aut"),
email = "dlebauer@email.arizona.edu"),
person("Rob", "Kooper", role = c("aut", "cre"),
@@ -15,6 +15,8 @@ Authors@R: c(person("David", "LeBauer", role = c("aut"),
person("University of Illinois, NCSA", role = c("cph")))
Description: This package contains utilities for communicating with and executing code on local and remote hosts.
In particular, it has PEcAn-specific utilities for starting ecosystem model runs.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
dplyr,
foreach,
@@ -35,3 +37,4 @@ Encoding: UTF-8
LazyData: true
Roxygen: list(markdown = TRUE)
RoxygenNote: 7.3.2
+X-schema.org-keywords: PEcAn, distributed-computing
diff --git a/base/remote/NEWS.md b/base/remote/NEWS.md
index 33331873691..b246506048c 100644
--- a/base/remote/NEWS.md
+++ b/base/remote/NEWS.md
@@ -1,3 +1,9 @@
+# PEcAn.remote 1.9.1
+
+* Added keywords and bug reporting URL to DESCRIPTION. No code changes in this release.
+
+
+
# PEcAn.remote 1.9.0
* PEcAn.remote is now distributed under the BSD 3-clause license instead of the NCSA Open Source license.
diff --git a/base/settings/DESCRIPTION b/base/settings/DESCRIPTION
index a1efa5acca4..07e5b212928 100644
--- a/base/settings/DESCRIPTION
+++ b/base/settings/DESCRIPTION
@@ -5,13 +5,15 @@ Authors@R: c(person("David", "LeBauer", role = c("aut", "cre"),
person("Rob", "Kooper", role = c("aut"),
email = "kooper@illinois.edu"),
person("University of Illinois, NCSA", role = c("cph")))
-Version: 1.9.0
+Version: 1.9.1
License: BSD_3_clause + file LICENSE
Copyright: Authors
LazyLoad: yes
LazyData: FALSE
Require: hdf5
Description: Contains functions to read PEcAn settings files.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Depends:
methods
Imports:
@@ -31,3 +33,4 @@ Suggests:
Encoding: UTF-8
RoxygenNote: 7.3.2
Roxygen: list(markdown = TRUE)
+X-schema.org-keywords: PEcAn, workflow-configuration
diff --git a/base/settings/NEWS.md b/base/settings/NEWS.md
index 1416a6f632f..c560f6db089 100644
--- a/base/settings/NEWS.md
+++ b/base/settings/NEWS.md
@@ -1,3 +1,17 @@
+# PEcAn.settings 1.9.1
+
+## Fixed
+
+* listToXml.MultiSettings now produces valid XML from a MultiSettings with length 1 (#3546) .
+* setEnsemblePaths no longer wraps single paths in a list, giving `/your/file.here` instead of the previous `/your/file.here` (#3561).
+
+## Changed
+
+* The `tag` argument to `listToXml()` is now optional and will default to "pecan" if not specified. This sets the name of the root tag of the resulting XML object (#3558).
+* The first argument of `listToXml.MultiSettings()` has been renamed from `item` to `x`, and it now accepts but ignores `...`, both for consistency with the generic (#3558).
+* `setEnsemblePaths()` no longer restricts the set of values accepted for `input_type` (#3633)
+
+
# PEcAn.settings 1.9.0
## Changed
@@ -20,11 +34,13 @@
* Internal helper function `getRunSettings` is no longer exported. As the documentation has long noted, it was not intended to be called directly.
+
# PEcAn.settings 1.8.0
* Bug fixes for ensemble runs.
+
# PEcAn.settings 1.7.1
* All changes in 1.7.1 and earlier were recorded in a single file for all of the
diff --git a/base/settings/R/MultiSettings.R b/base/settings/R/MultiSettings.R
index 2bc8dfae088..2aa06a846e8 100644
--- a/base/settings/R/MultiSettings.R
+++ b/base/settings/R/MultiSettings.R
@@ -176,25 +176,25 @@ printAll.MultiSettings <- function(x) {
.expandableItemsTag <- "multisettings"
#' @export
-listToXml.MultiSettings <- function(item, tag, collapse = TRUE) {
- if (collapse && length(item) > 1) {
- if (.expandableItemsTag %in% names(item)) {
+listToXml.MultiSettings <- function(x, tag = "pecan", collapse = TRUE, ...) {
+ if (collapse) {
+ if (.expandableItemsTag %in% names(x)) {
stop("Settings can't contain reserved tag 'multisettings'.")
}
tmp <- list()
expandableItems <- list()
- for (setting in names(item)) {
- value <- item[[setting, setAttributes = TRUE]]
+ for (setting in names(x)) {
+ value <- x[[setting, setAttributes = TRUE]]
tmp[[setting]] <- value
if (attr(value, "settingType") == "multi") {
expandableItems <- c(expandableItems, setting)
}
}
- item <- tmp
+ x <- tmp
names(expandableItems) <- rep(.expandableItemsTag, length(expandableItems))
- item[[.expandableItemsTag]] <- expandableItems
+ x[[.expandableItemsTag]] <- expandableItems
}
NextMethod()
diff --git a/base/settings/R/listToXml.R b/base/settings/R/listToXml.R
index e354a33e6c4..43a46f855cd 100644
--- a/base/settings/R/listToXml.R
+++ b/base/settings/R/listToXml.R
@@ -14,22 +14,13 @@ listToXml <- function(x, ...) {
#' @title List to XML
#' @param x object to be converted.
#' Despite the function name, need not actually be a list
-#' @param ... further arguments.
-#' Used to set the element name of the created XML object,
-#' which is taken from an argument named `tag` if present,
-#' or otherwise from the first element of `...`
+#' @param tag name to use for the root tag of the resulting XML tree
+#' @param ... further arguments, currently ignored
#' @return xmlNode
#' @export
#' @author David LeBauer, Carl Davidson, Rob Kooper
-listToXml.default <- function(x, ...) {
- args <- list(...)
- if (length(args) == 0) {
- PEcAn.logger::logger.error("no tag provided")
- } else if ("tag" %in% names(args)) {
- tag <- args$tag
- } else {
- tag <- args[[1]]
- }
+listToXml.default <- function(x, tag = "pecan", ...) {
+
# just a textnode, or empty node with attributes
if (typeof(x) != "list") {
if (length(x) > 1) {
@@ -42,7 +33,7 @@ listToXml.default <- function(x, ...) {
return(XML::xmlNode(tag, x))
}
}
-
+
# create the node
if (identical(names(x), c("text", ".attrs"))) {
# special case a node with text and attributes
@@ -56,7 +47,7 @@ listToXml.default <- function(x, ...) {
}
}
}
-
+
# add attributes to node
attrs <- x[[".attrs"]]
for (name in names(attrs)) {
diff --git a/base/settings/R/setEnsemblePaths.R b/base/settings/R/setEnsemblePaths.R
index 607c8220603..ffaad563c84 100644
--- a/base/settings/R/setEnsemblePaths.R
+++ b/base/settings/R/setEnsemblePaths.R
@@ -15,10 +15,9 @@
#' replaced with the siteid of each site, and any other variables need to be
#' passed as named arguments in `...`.
#'
-#' Note that for consistency, every site in `settings` must contain an
-#' element named `inputs$` before you call this.
-#' If `inputs$$path` does not exist it will be created;
-#' if it does exist it will be overwritten.
+#' If `inputs$` does not exist, it will be created with a `path`
+#' element that matches the requested pattern. If it does exist, any existing
+#' `path` element will be overwritten.
#'
#' @param settings a PEcAn MultiSettings object
#' @param n_reps number of replicates to insert for each path.
@@ -50,7 +49,7 @@
setEnsemblePaths <- function(
settings,
n_reps,
- input_type = c("met", "poolinitcond", "soilinitcond"),
+ input_type = "met",
path_template = "./{id}/{n}.nc",
...) {
if (!is.MultiSettings(settings)) {
@@ -58,7 +57,6 @@ setEnsemblePaths <- function(
"Setting ensemble paths is only implemented for MultiSettings objects"
)
}
- input_type <- match.arg(input_type)
papply(
settings,
@@ -110,7 +108,8 @@ set_site_paths <- function(settings, input_type, ...) {
#' paths, as a `glue::glue()` input (see examples)
#' @param ... other variables to be interpolated into the path, each with length
#' either 1 or equal to `n`.
-#' @return list of paths the same length as `n`, with names set as `path`
+#' @return list of paths the same length as `n`, with names set as `path`,
+#' or a single path if there is only one of them
#' @keywords internal
### ^ Internal for now, but OK to export later if that proves useful.
### If/when exporting, remove the \dontrun{} below along with this comment --
@@ -124,8 +123,14 @@ build_pathset <- function(n, glue_str = "./file_{n}.nc", ...) {
if (length(n) == 1 && is.numeric(n)) {
n <- seq_len(n)
}
- values <- list(n = n, ...)
- glue::glue_data(.x = values, glue_str) |>
+ paths <- list(n = n, ...) |>
+ glue::glue_data(glue_str)
+
+ if (length(paths) == 1) {
+ return(as.character(paths))
+ }
+
+ paths |>
as.list() |>
stats::setNames(glue::glue("path{n}", n = n))
}
diff --git a/base/settings/man/build_pathset.Rd b/base/settings/man/build_pathset.Rd
index 80e1d8529fc..27b7788c64d 100644
--- a/base/settings/man/build_pathset.Rd
+++ b/base/settings/man/build_pathset.Rd
@@ -17,7 +17,8 @@ paths, as a \code{glue::glue()} input (see examples)}
either 1 or equal to \code{n}.}
}
\value{
-list of paths the same length as \code{n}, with names set as \verb{path}
+list of paths the same length as \code{n}, with names set as \verb{path},
+or a single path if there is only one of them
}
\description{
Given a template string and a set of variables that includes a replicate ID,
diff --git a/base/settings/man/listToXml.default.Rd b/base/settings/man/listToXml.default.Rd
index 87cb9a568e9..ce7de3659f0 100644
--- a/base/settings/man/listToXml.default.Rd
+++ b/base/settings/man/listToXml.default.Rd
@@ -4,16 +4,15 @@
\alias{listToXml.default}
\title{List to XML}
\usage{
-\method{listToXml}{default}(x, ...)
+\method{listToXml}{default}(x, tag = "pecan", ...)
}
\arguments{
\item{x}{object to be converted.
Despite the function name, need not actually be a list}
-\item{...}{further arguments.
-Used to set the element name of the created XML object,
-which is taken from an argument named \code{tag} if present,
-or otherwise from the first element of \code{...}}
+\item{tag}{name to use for the root tag of the resulting XML tree}
+
+\item{...}{further arguments, currently ignored}
}
\value{
xmlNode
diff --git a/base/settings/man/setEnsemblePaths.Rd b/base/settings/man/setEnsemblePaths.Rd
index 762af1d9230..8613a950067 100644
--- a/base/settings/man/setEnsemblePaths.Rd
+++ b/base/settings/man/setEnsemblePaths.Rd
@@ -7,7 +7,7 @@
setEnsemblePaths(
settings,
n_reps,
- input_type = c("met", "poolinitcond", "soilinitcond"),
+ input_type = "met",
path_template = "./{id}/{n}.nc",
...
)
@@ -43,10 +43,9 @@ with curly braces wrapping any expressions to be interpolated.
replaced with the siteid of each site, and any other variables need to be
passed as named arguments in \code{...}.
-Note that for consistency, every site in \code{settings} must contain an
-element named \verb{inputs$} before you call this.
-If \verb{inputs$$path} does not exist it will be created;
-if it does exist it will be overwritten.
+If \verb{inputs$} does not exist, it will be created with a \code{path}
+element that matches the requested pattern. If it does exist, any existing
+\code{path} element will be overwritten.
}
\examples{
s <- as.Settings(list(
diff --git a/base/settings/tests/Rcheck_reference.log b/base/settings/tests/Rcheck_reference.log
index c1602a41073..1b37fff7eeb 100644
--- a/base/settings/tests/Rcheck_reference.log
+++ b/base/settings/tests/Rcheck_reference.log
@@ -94,16 +94,6 @@ Package in Depends field not imported from: ‘methods’
These packages need to be imported from (in the NAMESPACE file)
for when this namespace is loaded but not attached.
* checking S3 generic/method consistency ... WARNING
-listToXml:
- function(x, ...)
-listToXml.MultiSettings:
- function(item, tag, collapse)
-
-listToXml:
- function(x, ...)
-listToXml.default:
- function(item, tag)
-
update:
function(object, ...)
update.settings:
@@ -126,12 +116,7 @@ hand side must be named ‘value’.
* checking Rd metadata ... OK
* checking Rd line widths ... OK
* checking Rd cross-references ... OK
-* checking for missing documentation entries ... WARNING
-Undocumented code objects:
- ‘expandMultiSettings’ ‘listToXml’ ‘printAll’ ‘settingNames’
-All user-level objects in a package should have documentation entries.
-See chapter ‘Writing R documentation files’ in the ‘Writing R
-Extensions’ manual.
+* checking for missing documentation entries ... OK
* checking for code/documentation mismatches ... OK
* checking Rd \usage sections ... OK
* checking Rd contents ... OK
diff --git a/base/settings/tests/testthat/test.MultiSettings.class.R b/base/settings/tests/testthat/test.MultiSettings.class.R
index 28d2114b2a9..b1469febe8c 100644
--- a/base/settings/tests/testthat/test.MultiSettings.class.R
+++ b/base/settings/tests/testthat/test.MultiSettings.class.R
@@ -14,8 +14,10 @@ test_that("MultiSettings constructor works as expected", {
expect_error(MultiSettings(settings, l))
multiSettings <- MultiSettings(settings, settings, settings)
+ multiSettings1 <- MultiSettings(settings)
multiSettings2 <- MultiSettings(list(settings, settings, settings))
multiSettings3 <- MultiSettings(multiSettings)
+ expect_identical(multiSettings1[[1]], settings)
expect_identical(multiSettings2, multiSettings)
expect_identical(multiSettings3, multiSettings)
@@ -291,6 +293,12 @@ test_that("multiSettings write to and read from xml as expcted (i.e., with colla
expect_true(are.equal.possiblyNumericToCharacter(msNew, msOrig))
})
+test_that("length one MultiSettings is collapsed same as longer ones", {
+ ms1 <- MultiSettings(settings)
+ ms1XML <- listToXml(ms1, "pecan")
+ expect_length(XML::getNodeSet(ms1XML, "/pecan/multisettings"), 1)
+})
+
test_that("expandMultiSettings does nothing to a non-MultiSettings list", {
expect_identical(settings, expandMultiSettings(settings))
diff --git a/base/settings/tests/testthat/test.get_args.R b/base/settings/tests/testthat/test.get_args.R
index a23cb12eae9..edf033beed8 100644
--- a/base/settings/tests/testthat/test.get_args.R
+++ b/base/settings/tests/testthat/test.get_args.R
@@ -1,7 +1,7 @@
test_that("`get_args` throws an error with missing settings file", {
withr::with_envvar(c(PECAN_SETTINGS = "doesnotexists.xml"), {
expect_error(
- get_args(),
+ get_args(),
"--settings \"doesnotexists.xml\" not a valid file"
)
})
@@ -15,4 +15,4 @@ test_that("`get_args` works for existing settings file", {
expect_equal(args$continue, FALSE)
expect_equal(args$help, FALSE)
})
-})
\ No newline at end of file
+})
diff --git a/base/settings/tests/testthat/test.listToXml.R b/base/settings/tests/testthat/test.listToXml.R
new file mode 100644
index 00000000000..3fc6191e21f
--- /dev/null
+++ b/base/settings/tests/testthat/test.listToXml.R
@@ -0,0 +1,9 @@
+test_that("listToXml", {
+ s <- read.settings("data/testsettings.xml")
+ s_xml <- listToXml(s)
+
+ # root tag defaults to "pecan" whether passed named or unnamed
+ expect_identical(XML::xmlName(s_xml), "pecan")
+ expect_identical(s_xml, listToXml(s, tag = "pecan"))
+ expect_identical(s_xml, listToXml(s, "pecan"))
+})
diff --git a/base/settings/tests/testthat/test.setEnsemblePaths.R b/base/settings/tests/testthat/test.setEnsemblePaths.R
index 207d258eca8..9ceba0c1634 100644
--- a/base/settings/tests/testthat/test.setEnsemblePaths.R
+++ b/base/settings/tests/testthat/test.setEnsemblePaths.R
@@ -4,7 +4,10 @@ test_that("setEnsemblePaths sets paths across sites", {
run = list(
start.date = "2015-01-01",
end.date = "2015-12-31",
- inputs = list("a", "b")
+ inputs = list(
+ met = list(id = "a"),
+ soil_physics = list(name = "b", path = list("overwritten"))
+ )
)
)
)
@@ -32,11 +35,17 @@ test_that("setEnsemblePaths sets paths across sites", {
)
}
- # only known input types accepted
- expect_error(
- setEnsemblePaths(settings, 3, input_type = "fake"),
- ".arg. should be one of .met., .poolinitcond., .soilinitcond."
- )
+ # input block is added if not present in original,
+ # without changing length of existing inputs
+ add_in <- setEnsemblePaths(with_paths, 2, input_type = "novel")
+ for (i in seq_along(add_in)) {
+ expect_named(
+ add_in$run[[i]]$inputs,
+ c("met", "soil_physics", "novel")
+ )
+ expect_length(add_in$run[[i]]$inputs$novel$path, 2)
+ expect_length(add_in$run[[i]]$inputs$met$path, 3)
+ }
# extra vars passed through to glue
with_extras <- setEnsemblePaths(
@@ -46,4 +55,12 @@ test_that("setEnsemblePaths sets paths across sites", {
path_template = "../{id}_{foo}{n}.{ext}"
)
expect_match(with_extras$run$site.s1$inputs$met$path$path2, "s1_bar2.txt")
+
+ # Ensembles of 1 return bare path not list
+ one_path <- setEnsemblePaths(settings, n_reps = 1)
+ two_path <- setEnsemblePaths(settings, n_reps = 2)
+ expect_identical(
+ one_path$run$site.s1$inputs$met$path,
+ two_path$run$site.s1$inputs$met$path$path1
+ )
})
diff --git a/base/settings/tests/testthat/test.site_pft_link_settings.R b/base/settings/tests/testthat/test.site_pft_link_settings.R
index 061a46122b9..194ea8ffe50 100644
--- a/base/settings/tests/testthat/test.site_pft_link_settings.R
+++ b/base/settings/tests/testthat/test.site_pft_link_settings.R
@@ -9,18 +9,16 @@ test_that("`site.pft.link.settings` able to link sites to pfts and update settin
)
)
)
- )
+ )
df <- data.frame(
site = c("1000025731", "1000025732"),
pft = c("temperate.broadleaf.deciduous1", "temperate.needleleaf.evergreen")
)
write.csv(df, tf, row.names = FALSE)
updated_settings <- site.pft.link.settings(settings)
- print(updated_settings)
- print(length(updated_settings$pfts))
- for(i in 1:length(updated_settings$pfts)) {
+ for (i in seq_along(updated_settings$pfts)) {
expect_equal(updated_settings$pfts[[i]]$name, df$pft[i])
expect_equal(updated_settings$pfts$pft$constants, 1)
}
})
-})
\ No newline at end of file
+})
diff --git a/base/utils/DESCRIPTION b/base/utils/DESCRIPTION
index f5dda7cdfef..a399a658f2f 100644
--- a/base/utils/DESCRIPTION
+++ b/base/utils/DESCRIPTION
@@ -2,7 +2,7 @@ Package: PEcAn.utils
Type: Package
Title: PEcAn Functions Used for Ecological Forecasts and
Reanalysis
-Version: 1.8.1
+Version: 1.8.2
Authors@R: c(person("Mike", "Dietze", role = c("aut"),
email = "dietze@bu.edu"),
person("Rob", "Kooper", role = c("aut", "cre"),
@@ -30,10 +30,14 @@ Description: The Predictive Ecosystem Carbon Analyzer
PEcAn is to streamline the interaction between data and
models, and to improve the efficacy of scientific
investigation.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
+Depends: R (>= 4.1.0)
Imports:
abind (>= 1.4.5),
curl,
dplyr,
+ foreach,
lubridate (>= 1.6.0),
magrittr,
ncdf4 (>= 1.15),
@@ -45,6 +49,7 @@ Imports:
Suggests:
coda (>= 0.18),
data.table,
+ doSNOW,
ggplot2,
MASS,
mockery,
@@ -59,3 +64,4 @@ LazyData: true
Encoding: UTF-8
RoxygenNote: 7.3.2
Roxygen: list(markdown = TRUE)
+X-schema.org-keywords: PEcAn
diff --git a/base/utils/NAMESPACE b/base/utils/NAMESPACE
index 2f8d208fcec..6f50b6ee555 100644
--- a/base/utils/NAMESPACE
+++ b/base/utils/NAMESPACE
@@ -15,6 +15,7 @@ export(distn.stats)
export(distn.table.stats)
export(download.url)
export(download_file)
+export(extract_nc_sda)
export(full.path)
export(get.ensemble.inputs)
export(get.parameter.stat)
@@ -32,6 +33,8 @@ export(misc.are.convertible)
export(misc.convert)
export(mstmipvar)
export(n_leap_day)
+export(nc_merge_all_sites_by_year)
+export(nc_write_varfiles)
export(need_packages)
export(paste.stats)
export(r2bugs.distributions)
@@ -62,5 +65,6 @@ export(units_are_equivalent)
export(vecpaste)
export(zero.bounded.density)
export(zero.truncate)
+importFrom(foreach,"%dopar%")
importFrom(magrittr,"%>%")
importFrom(rlang,.data)
diff --git a/base/utils/NEWS.md b/base/utils/NEWS.md
index 4f2e0a66eeb..e6effc30eed 100644
--- a/base/utils/NEWS.md
+++ b/base/utils/NEWS.md
@@ -1,3 +1,14 @@
+# PEcAn.utils 1.8.2
+
+## Added
+
+* New function `nc_write_varfiles()` creates text summaries of the variables in all netCDF files in an output directory, writing either one file named `nc_vars.txt` if `output_mode = "combined"` or a separate `[filename].nc.var` alongside each netCDF if `output_mode = "paired"` (#3611).
+* New function `nc_merge_all_sites_by_year()` combines many netCDFs into one file per year (#3620)
+* New function `extract_nc_sda`() extracts data from a multi-site netCDF created by `nc_merge_all_sites_by_year` (#3620).
+* Added CH4 and N2O variables to `standard_vars`
+
+
+
# PEcAn.utils 1.8.1
## License change
@@ -8,6 +19,7 @@
* Roxygen fixes
+
# PEcAn.utils 1.8.0
## Added
diff --git a/base/utils/R/combine_model_netcdf.R b/base/utils/R/combine_model_netcdf.R
new file mode 100644
index 00000000000..568499c497c
--- /dev/null
+++ b/base/utils/R/combine_model_netcdf.R
@@ -0,0 +1,232 @@
+#' Combine many netCDFs into one file per year
+#'
+#' Merges model outputted netCDF files by the time steps specified in a pecan settings file.
+#'
+#' The function is only tested for SIPNET model runs that were run with state data assimilation enabled.
+#' Please make sure you have the same netCDF formats if you want to proceed with different models.
+#' We could also have more functions that deal with different dimensions (e.g., by site instead of by year).
+#'
+#' @param model.outdir character: path to the folder that contains model outputs.
+#' @param nc.outdir character: physical path to the folder that contains the merged netCDF files.
+#' @param ens.num numeric: number of ensembles for the model run.
+#' @param site.ids numeric or character: vector of site ids across locations.
+#' @param start.date date or character in YYYY-MM-DD format: start date of the model run.
+#' @param end.date date or character in YYYY-MM-DD format: end date of the model run.
+#' @param time.step character: time step of the model run. Default is 1 year.
+#' @param cores numeric: the number of CPUs for the parallel computation. Default is 1.
+#'
+#' @return character: file paths to the merged netCDF files.
+#' @export
+#'
+#' @author Dongchen Zhang
+#' @importFrom magrittr %>%
+#' @importFrom foreach %dopar%
+nc_merge_all_sites_by_year <- function (model.outdir,
+ nc.outdir,
+ ens.num,
+ site.ids,
+ start.date,
+ end.date,
+ time.step = "1 year",
+ cores = 1) {
+ # check shell environments.
+ if (suppressWarnings(system2("which", "cdo", stdout = FALSE)) != 0) {
+ PEcAn.logger::logger.info("The cdo function is not detected in shell command.")
+ return(NA)
+ }
+ # create the nc output folder if it doesn't exist.
+ if (!file.exists(nc.outdir)) {
+ dir.create(nc.outdir)
+ }
+ # calculate time points.
+ time.points <- lubridate::year(seq(lubridate::date(start.date),
+ lubridate::date(end.date),
+ time.step))
+
+ # loop over time.
+ # initialize parallel.
+ cl <- parallel::makeCluster(as.numeric(cores))
+ doSNOW::registerDoSNOW(cl)
+ #progress bar
+ pb <- utils::txtProgressBar(min = 1, max = length(site.ids), style = 3)
+ progress <- function(n) utils::setTxtProgressBar(pb, n)
+ opts <- list(progress=progress)
+ # record nc paths.
+ nc.paths <- c()
+ for (t in seq_along(time.points)) {
+ time <- time.points[t] # grab the current time point.
+ # record previous file.
+ if (file.exists(file.path(nc.outdir, paste0(time, ".nc")))) {
+ nc.paths <- c(nc.paths, file.path(nc.outdir, paste0(time, ".nc")))
+ next
+ }
+ # loop over sites.
+ s <- NULL # For passing the GitHub actions.
+ nc.files <-
+ foreach::foreach(s = seq_along(site.ids),
+ .packages = c("purrr", "ncdf4"),
+ .options.snow=opts) %dopar% {
+ nc_merge_single_site(model.outdir = model.outdir,
+ nc.outdir = nc.outdir,
+ ens.num = ens.num,
+ # cdo collgrid only works for numeric data type.
+ site.id = site.ids[s],
+ time)
+ } %>% unlist
+ # merge across sites using CDO command.
+ cmd <- "cdo -P @CORES@ collgrid @NC.OUTDIR@/*@TIME@.nc @OUTFILE@"
+ cmd <- gsub("@CORES@", cores, cmd)
+ cmd <- gsub("@NC.OUTDIR@", nc.outdir, cmd)
+ cmd <- gsub("@TIME@", time, cmd)
+ cmd <- gsub("@OUTFILE@", file.path(nc.outdir, paste0(time, ".nc")), cmd)
+ out <- system(cmd, intern = TRUE, ignore.stdout = TRUE, ignore.stderr = TRUE)
+ # if we have site ids in character format.
+ if (all(is.character(site.ids))) {
+ nc <- ncdf4::nc_open(file.path(nc.outdir, paste0(time, ".nc")))
+ site_dim <- ncdf4::ncdim_def("site", units = "", vals = seq_along(site.ids))
+ site_id_var <- ncdf4::ncvar_def("site_id", units = "", dim = site_dim, prec = "char")
+ ncdf4::ncvar_put(nc, varid = "site_id", vals = site.ids)
+ ncdf4::nc_close(nc) # close nc connection.
+ }
+ # record the current nc path.
+ nc.paths <- c(nc.paths, file.path(nc.outdir, paste0(time, ".nc")))
+ # remove nc files for each site.
+ unlink(nc.files)
+ }
+ # stop parallel.
+ parallel::stopCluster(cl)
+ foreach::registerDoSEQ()
+ # return nc paths.
+ return(nc.paths)
+}
+
+#' Merge model outputted netCDF files across ensembles for a single site.
+#' @details
+#' The function is only tested for SIPNET model runs.
+#' Please make sure you have the same netCDF formats if you want to proceed with different models.
+#'
+#' This function requires `site.id` to be an integer.
+#' If your sites have non-numeric IDs, one possible workaround is to
+#' pass a dummy value and then edit the output file afterward to replace its `site_id` variable
+#' with character data. If you do this, do be aware many legacy netCDF tools have poor support
+#' for netCDFs containing character data.
+#'
+#' @param model.outdir character: physical path to the model output folder.
+#' @param nc.outdir character: physical path to the folder that contains the merged netCDF files.
+#' @param ens.num numeric: ensemble size.
+#' @param site.id numeric: identification number of the site.
+#' @param time numeric or character: the current time of netCDF files to be extracted.
+#' See details for use with non-numeric siteIDs
+#' @return character: file path to the merged netCDF file.
+#'
+#' @author Dongchen Zhang
+nc_merge_single_site <- function (model.outdir, nc.outdir, ens.num, site.id, time) {
+ # grab basic formats from the first nc file of the site.
+ # create the folder name associated with first ensemble and first site.
+ prefix <- "ENS-"
+ folder.name <- paste0(prefix, sprintf("%05d", 1), "-", site.id)
+ # read nc file.
+ nc <- ncdf4::nc_open(file.path(model.outdir, folder.name, paste0(time, ".nc")))
+ nc.vars <- nc$var # grab variable definitions.
+ time.values <- nc$dim$time # grab time dimensions.
+ lat <- nc$dim$lat$vals
+ lon <- nc$dim$lon$vals
+ ncdf4::nc_close(nc) # close nc connection.
+ # dimension and variable definitions.
+ # site dimension.
+ site_dim <- ncdf4::ncdim_def("site", units = "", vals = site.id)
+ # time dimension.
+ time_dim <- ncdf4::ncdim_def("time", longname = "time", units = time.values$units, vals = time.values$vals)
+ # ensemble dimension.
+ ens_dim <- ncdf4::ncdim_def("ensemble", longname = "ensemble member", unit = "", vals = 1:ens.num)
+ # define site-specific variables.
+ lat_var <- ncdf4::ncvar_def("latitude", units = "degrees_north", dim = site_dim, prec = "double")
+ lon_var <- ncdf4::ncvar_def("longitude", units = "degrees_east", dim = site_dim, prec = "double")
+ site_id_var <- ncdf4::ncvar_def("site_id", units = "", dim = site_dim, prec = "integer")
+ # loop over variables.
+ first.creation <- TRUE
+ for (i in seq_along(nc.vars)) {
+ # grab the variable name.
+ var <- nc.vars[[i]]$name
+ # skip if it's time related variable.
+ if (grepl("time", var, fixed = T)) next
+ # loop over ensembles.
+ var.mat <- matrix(NA, time.values$len, ens.num)
+ for (ens in 1:ens.num) {
+ # TODO: add checks to make sure every thing in files are in the same shape and format.
+ folder.name <- paste0(prefix, sprintf("%05d", ens), "-", site.id)
+ nc <- ncdf4::nc_open(file.path(model.outdir, folder.name, paste0(time, ".nc")))
+ var.mat[,ens] <- ncdf4::ncvar_get(nc, var = var)
+ ncdf4::nc_close(nc)
+ }
+ # define the current model variable.
+ temp_var <- ncdf4::ncvar_def(nc.vars[[i]]$name,
+ units = nc.vars[[i]]$units,
+ dim = list(site_dim, ens_dim, time_dim),
+ prec = nc.vars[[i]]$prec)
+ # if it's the first variable, we will need to create the NC file along with the site-specific variables.
+ if (first.creation) {
+ # turn the flag off.
+ first.creation <- !first.creation
+ # create nc file.
+ nc_file <- ncdf4::nc_create(file.path(nc.outdir, paste0(site.id, "_", time, ".nc")), list(site_id_var, lon_var, lat_var, temp_var))
+ # add the site-specific variables.
+ ncdf4::ncvar_put(nc_file, varid = "site_id", vals = site.id)
+ ncdf4::ncvar_put(nc_file, varid = "latitude", vals = lat)
+ ncdf4::ncvar_put(nc_file, varid = "longitude", vals = lon)
+ # add the current variable.
+ ncdf4::ncvar_put(nc_file, varid = nc.vars[[i]]$name, vals = var.mat)
+ } else {
+ # add additional variable.
+ nc_file <- ncdf4::ncvar_add(nc_file, temp_var)
+ # update data.
+ ncdf4::ncvar_put(nc_file, varid = nc.vars[[i]]$name, vals = var.mat)
+ }
+ }
+ # close nc connection.
+ ncdf4::nc_close(nc_file)
+ # return all nc paths.
+ return(file.path(nc.outdir, paste0(site.id, "_", time, ".nc")))
+}
+
+#' Extract netCDF file by site.id, time window, and variable name.
+#' @details
+#' The function is only tested for netCDF files generated by the `nc_merge_all_sites_by_year` function.
+#'
+#' @param site.id numeric or character: identification of the site.
+#' @param start.date date in YYYY-MM-DD format: start date of the requested time window.
+#' @param end.date date in YYYY-MM-DD format: end date of the requested time window.
+#' @param var.name character: variable name.
+#' @param nc.path character: physical path to the target netCDF file.
+#'
+#' @return list: a list contains requested array, time steps, site id, variable name, and ensemble size.
+#'
+#' @author Dongchen Zhang
+#' @export
+extract_nc_sda <- function (site.id, start.date, end.date, var.name, nc.path) {
+ # open NC file.
+ nc <- ncdf4::nc_open(nc.path)
+ # grab the index for the requested site.id.
+ site.ind <- which(nc$dim$site$vals == site.id)
+ # calculate real time.
+ time.val <- nc$dim$time$vals
+ time.unit <- nc$dim$time$units
+ origin <- strsplit(x = time.unit, split = "since ", fixed = TRUE)[[1]][2]
+ real_time <- as.POSIXct(time.val*3600*24, origin = origin, tz = "UTC")
+ time.steps <- length(real_time)
+ # grab ensemble size.
+ ensemble.size <- nc$dim$ensemble$len
+ # if we have the time window.
+ if (start.date >= real_time[1] & end.date <= real_time[time.steps]) {
+ time.inds <- which(real_time >= start.date & real_time <= end.date)
+ } else {
+ PEcAn.logger::logger.info("The netCDF file doesn't include the date range you asking for.")
+ return(0)
+ }
+ # grab outputs.
+ res <- ncdf4::ncvar_get(nc, var.name, start = c(site.ind, 1, time.inds[1]), count = c(1, ensemble.size, length(time.inds)))
+ # close NC connection.
+ ncdf4::nc_close(nc)
+ # prepare outputs.
+ return(list(mat = res, time.points = real_time[time.inds], site.ids = site.id, var.name = var.name, ensemble.size = ensemble.size))
+}
\ No newline at end of file
diff --git a/base/utils/R/nc_write_varfiles.R b/base/utils/R/nc_write_varfiles.R
new file mode 100644
index 00000000000..c5c273cb37f
--- /dev/null
+++ b/base/utils/R/nc_write_varfiles.R
@@ -0,0 +1,56 @@
+#' Summarize netcdf variables into text file
+#'
+#' @param nc_dir directory to scan for netCDF files (including in subdirs)
+#' @param write_mode how many files to write:
+#' "paired" creates files named *.nc.var` alongside every `*.nc`,
+#' "collected" writes a single `/nc_vars.txt` that lists every variable
+#' defined in any nc file in the directory
+#' @importFrom rlang .data
+#' @export
+nc_write_varfiles <- function(nc_dir,
+ write_mode = c("paired", "collected")) {
+
+ ncfiles <- list.files(
+ nc_dir,
+ pattern = "\\.nc$",
+ recursive = TRUE,
+ full.names = TRUE
+ )
+ vartables <- lapply(ncfiles, nc_longnames)
+
+ write_mode <- match.arg(write_mode)
+ if (write_mode == "paired") {
+ varfiles <- paste0(ncfiles, ".var")
+ purrr::walk2(vartables, varfiles, nc_write_varfile)
+ return(NULL)
+ } else {
+ vartables |>
+ do.call(what = "rbind") |>
+ unique() |>
+ dplyr::arrange(.data$name) |>
+ nc_write_varfile(file.path(nc_dir, "nc_vars.txt"))
+ }
+}
+
+
+
+nc_longnames <- function(ncfile) {
+ nc <- ncdf4::nc_open(ncfile)
+ on.exit(ncdf4::nc_close(nc))
+
+ # A named vector: c(name1 = "longname_1", ...)
+ nm <- sapply(nc$var, `[[`, "longname")
+
+ data.frame(name = names(nm), longname = nm)
+}
+
+
+nc_write_varfile <- function(df, varfile) {
+ utils::write.table(
+ x = df,
+ file = varfile,
+ col.names = FALSE,
+ row.names = FALSE,
+ quote = FALSE
+ )
+}
diff --git a/base/utils/data/standard_vars.csv b/base/utils/data/standard_vars.csv
index 7453a3db34d..7691a0ebf32 100755
--- a/base/utils/data/standard_vars.csv
+++ b/base/utils/data/standard_vars.csv
@@ -21,6 +21,8 @@ TotalResp,NA,kg C m-2 s-1,Total Respiration,Carbon Fluxes,real,lon,lat,time,NA,"
AutoResp,plant_respiration_carbon_flux,kg C m-2 s-1,Autotrophic Respiration,Carbon Fluxes,real,lon,lat,time,NA,Autotrophic respiration rate (always positive),
HeteroResp,heterotrophic_respiration_carbon_flux,kg C m-2 s-1,Heterotrophic Respiration,Carbon Fluxes,real,lon,lat,time,NA,Heterotrophic respiration rate (always positive),
SoilResp,soil_respiration_carbon_flux,kg C m-2 s-1,Soil Respiration,Carbon Fluxes,real,lon,lat,time,NA,Sum of respiration in the soil by heterotrophs and by the roots of plants (autotrophs),
+CH4_flux,surface_upward_mass_flux_of_methane_expressed_as_carbon,kg C m-2 s-1,Methane Flux,Carbon Fluxes,real,lon,lat,time,NA,Net methane flux between ecosystem and atmosphere (positive into atmosphere),
+N2O_flux,surface_upward_mass_flux_of_nitrous_oxide_expressed_as_nitrogen,kg N m-2 s-1,Nitrous Oxide Flux,Nitrogen Fluxes,real,lon,lat,time,NA,Net nitrous oxide flux between ecosystem and atmosphere (positive into atmosphere),
DOC_flux,NA,kg C m-2 s-1,Dissolved Organic Carbon flux,Carbon Fluxes,real,lon,lat,time,NA,Loss of organic carbon dissolved in ground water or rivers (positive out of grid cell),
Fire_flux,NA,kg C m-2 s-1,Fire emissions,Carbon Fluxes,real,lon,lat,time,NA,Flux of carbon due to fires (always positive),
litter_carbon_flux,litter_carbon_flux,kg C m-2 s-1,Litter Carbon Flux,Carbon Fluxes,real,lon,lat,time,NA,"Total carbon flux of litter, excluding coarse woody debris",
@@ -79,11 +81,11 @@ z_top,NA,m,Soil Layer Top Depth,Deprecated,real,depth,NA,NA,NA,Depth from soil s
z_node,NA,m,Soil Layer Node Depth,Deprecated,real,depth,NA,NA,NA,"Depth from soil surface to layer prognostic variables, typically center of soil layer",
z_bottom,NA,m,Soil Layer Bottom Depth,Deprecated,real,depth,NA,NA,NA,Depth from soil surface to bottom of soil layer,
SoilMoist,NA,kg m-2,Average Layer Soil Moisture,Physical Variables,real,lon,lat,depth,time,"Soil water content in each soil layer, including liquid, vapor and ice",
-SoilMoistFrac,NA,(-),Average Layer Fraction of Saturation,Physical Variables,real,lon,lat,depth,time,"Fraction of saturation of soil water in each soil layer, including liquid and ice",
+SoilMoistFrac,volume_fraction_of_condensed_water_in_soil,1,Average Layer Fraction of Saturation,Physical Variables,real,lon,lat,depth,time,"Fraction of saturation of soil water in each soil layer, including liquid and ice",
SoilWet,NA,(-),Total Soil Wetness,Physical Variables,real,lon,lat,time,NA,Vertically integrated soil moisture divided by maximum allowable soil moisture above wilting point,
Qs,NA,kg m-2 s-1,Surface runoff,Physical Variables,real,lon,lat,time,NA,Runoff from the landsurface and/or subsurface stormflow,
Qsb,NA,kg m-2 s-1,Subsurface runoff,Physical Variables,real,lon,lat,time,NA,Gravity soil water drainage and/or soil water lateral flow,
-SoilTemp,NA,K,Average Layer Soil Temperature,Physical Variables,real,lon,lat,depth,time,Average soil temperature in each soil layer,
+SoilTemp,soil_temperature,K,Average Layer Soil Temperature,Physical Variables,real,lon,lat,depth,time,Average soil temperature in each soil layer,
Tdepth,NA,m,Active Layer Thickness,Physical Variables,real,lon,lat,time,NA,Thaw depth to zero centigrade isotherm in permafrost,
Fdepth,NA,m,Frozen Thickness,Physical Variables,real,lon,lat,time,NA,Freeze depth to zero centigrade isotherm in non-permafrost,
Tcan,NA,K,Canopy Temperature,Physical Variables,real,lon,lat,time,NA,Canopy or vegetation temperature (or temperature used in photosynthesis calculations),
@@ -91,8 +93,9 @@ SWE,NA,kg m-2,Snow Water Equivalent,Physical Variables,real,lon,lat,time,NA,"Tot
SnowDen,NA,kg m-3,Bulk Snow Density,Physical Variables,real,lon,lat,time,NA,"Overall bulk density of the snow pack, including ice and liquid water",
SnowDepth,NA,m,Total snow depth,Physical Variables,real,lon,lat,time,NA,Total snow depth,
CO2CAS,NA,ppmv,CO2CAS,Physical Variables,real,lon,lat,time,NA,CO2 in canopy air space; ED2 output variable,
-CO2air,NA,micromol mol-1,Near surface CO2 concentration,Driver,real,lon,lat,time,NA,Near surface dry air CO2 mole fraction,
-LWdown,surface_downwelling_longwave_flux_in_air,W/m2,Surface incident longwave radiation,Driver,real,lon,lat,time,NA,Surface incident longwave radiation,
+CO2air,NA,micromol mol-1,Near surface CO2 concentration,Driver,real,lon,lat,time,NA,Near surface dry air CO2 mole fraction expressed as ppmv (umol / mol),
+CO2air_mf,mole_fraction_of_carbon_dioxide_in_air,1,Near-surface CO2 mole fraction (CF-compliant),Driver,real,lon,lat,time,NA,Mole fraction of CO2 in air near the surface; dimensionless (mol/mol).,
+LWdown,surface_downwelling_longwave_flux_in_air,W m-2,Surface incident longwave radiation,Driver,real,lon,lat,time,NA,Surface incident longwave radiation,
Psurf,air_pressure,Pa,Surface pressure,Driver,real,lon,lat,time,NA,Surface pressure,
Qair,specific_humidity,kg kg-1,Near surface specific humidity,Driver,real,lon,lat,time,NA,Near surface specific humidity,
Rainf,precipitation_flux,kg m-2 s-1,Rainfall rate,Driver,real,lon,lat,time,NA,Rainfall rate,
diff --git a/base/utils/man/PEcAn.Rd b/base/utils/man/PEcAn.Rd
index 4a6752d93c7..0bf5ab7e4f5 100644
--- a/base/utils/man/PEcAn.Rd
+++ b/base/utils/man/PEcAn.Rd
@@ -49,6 +49,14 @@ Current development is focused on developing PEcAn into a real-time data
assimilation and forecasting system. This system will provide a detailed
analysis of the past and present ecosystem functioning that seamlessly
transitions into forecasts.
+}
+\seealso{
+Useful links:
+\itemize{
+ \item \url{https://pecanproject.github.io}
+ \item Report bugs at \url{https://github.com/PecanProject/pecan/issues}
+}
+
}
\author{
\strong{Maintainer}: Rob Kooper \email{kooper@illinois.edu}
diff --git a/base/utils/man/extract_nc_sda.Rd b/base/utils/man/extract_nc_sda.Rd
new file mode 100644
index 00000000000..6e0d34fddc2
--- /dev/null
+++ b/base/utils/man/extract_nc_sda.Rd
@@ -0,0 +1,31 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/combine_model_netcdf.R
+\name{extract_nc_sda}
+\alias{extract_nc_sda}
+\title{Extract netCDF file by site.id, time window, and variable name.}
+\usage{
+extract_nc_sda(site.id, start.date, end.date, var.name, nc.path)
+}
+\arguments{
+\item{site.id}{numeric or character: identification of the site.}
+
+\item{start.date}{date in YYYY-MM-DD format: start date of the requested time window.}
+
+\item{end.date}{date in YYYY-MM-DD format: end date of the requested time window.}
+
+\item{var.name}{character: variable name.}
+
+\item{nc.path}{character: physical path to the target netCDF file.}
+}
+\value{
+list: a list contains requested array, time steps, site id, variable name, and ensemble size.
+}
+\description{
+Extract netCDF file by site.id, time window, and variable name.
+}
+\details{
+The function is only tested for netCDF files generated by the \code{nc_merge_all_sites_by_year} function.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/base/utils/man/nc_merge_all_sites_by_year.Rd b/base/utils/man/nc_merge_all_sites_by_year.Rd
new file mode 100644
index 00000000000..45eb668c8a4
--- /dev/null
+++ b/base/utils/man/nc_merge_all_sites_by_year.Rd
@@ -0,0 +1,48 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/combine_model_netcdf.R
+\name{nc_merge_all_sites_by_year}
+\alias{nc_merge_all_sites_by_year}
+\title{Combine many netCDFs into one file per year}
+\usage{
+nc_merge_all_sites_by_year(
+ model.outdir,
+ nc.outdir,
+ ens.num,
+ site.ids,
+ start.date,
+ end.date,
+ time.step = "1 year",
+ cores = 1
+)
+}
+\arguments{
+\item{model.outdir}{character: path to the folder that contains model outputs.}
+
+\item{nc.outdir}{character: physical path to the folder that contains the merged netCDF files.}
+
+\item{ens.num}{numeric: number of ensembles for the model run.}
+
+\item{site.ids}{numeric or character: vector of site ids across locations.}
+
+\item{start.date}{date or character in YYYY-MM-DD format: start date of the model run.}
+
+\item{end.date}{date or character in YYYY-MM-DD format: end date of the model run.}
+
+\item{time.step}{character: time step of the model run. Default is 1 year.}
+
+\item{cores}{numeric: the number of CPUs for the parallel computation. Default is 1.}
+}
+\value{
+character: file paths to the merged netCDF files.
+}
+\description{
+Merges model outputted netCDF files by the time steps specified in a pecan settings file.
+}
+\details{
+The function is only tested for SIPNET model runs that were run with state data assimilation enabled.
+Please make sure you have the same netCDF formats if you want to proceed with different models.
+We could also have more functions that deal with different dimensions (e.g., by site instead of by year).
+}
+\author{
+Dongchen Zhang
+}
diff --git a/base/utils/man/nc_merge_single_site.Rd b/base/utils/man/nc_merge_single_site.Rd
new file mode 100644
index 00000000000..0b522fb1511
--- /dev/null
+++ b/base/utils/man/nc_merge_single_site.Rd
@@ -0,0 +1,39 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/combine_model_netcdf.R
+\name{nc_merge_single_site}
+\alias{nc_merge_single_site}
+\title{Merge model outputted netCDF files across ensembles for a single site.}
+\usage{
+nc_merge_single_site(model.outdir, nc.outdir, ens.num, site.id, time)
+}
+\arguments{
+\item{model.outdir}{character: physical path to the model output folder.}
+
+\item{nc.outdir}{character: physical path to the folder that contains the merged netCDF files.}
+
+\item{ens.num}{numeric: ensemble size.}
+
+\item{site.id}{numeric: identification number of the site.}
+
+\item{time}{numeric or character: the current time of netCDF files to be extracted.
+See details for use with non-numeric siteIDs}
+}
+\value{
+character: file path to the merged netCDF file.
+}
+\description{
+Merge model outputted netCDF files across ensembles for a single site.
+}
+\details{
+The function is only tested for SIPNET model runs.
+Please make sure you have the same netCDF formats if you want to proceed with different models.
+
+This function requires \code{site.id} to be an integer.
+If your sites have non-numeric IDs, one possible workaround is to
+pass a dummy value and then edit the output file afterward to replace its \code{site_id} variable
+with character data. If you do this, do be aware many legacy netCDF tools have poor support
+for netCDFs containing character data.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/base/utils/man/nc_write_varfiles.Rd b/base/utils/man/nc_write_varfiles.Rd
new file mode 100644
index 00000000000..2b331060d34
--- /dev/null
+++ b/base/utils/man/nc_write_varfiles.Rd
@@ -0,0 +1,18 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/nc_write_varfiles.R
+\name{nc_write_varfiles}
+\alias{nc_write_varfiles}
+\title{Summarize netcdf variables into text file}
+\usage{
+nc_write_varfiles(nc_dir, write_mode = c("paired", "collected"))
+}
+\arguments{
+\item{nc_dir}{directory to scan for netCDF files (including in subdirs)}
+
+\item{write_mode}{how many files to write:
+"paired" creates files named \emph{.nc.var\verb{alongside every}}.nc\verb{, "collected" writes a single }/nc_vars.txt` that lists every variable
+defined in any nc file in the directory}
+}
+\description{
+Summarize netcdf variables into text file
+}
diff --git a/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00001-e968e9c8f8574cb2/2019.nc b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00001-e968e9c8f8574cb2/2019.nc
new file mode 100644
index 00000000000..d6bec13480c
Binary files /dev/null and b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00001-e968e9c8f8574cb2/2019.nc differ
diff --git a/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00001-e968e9c8f8574cb2/2020.nc b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00001-e968e9c8f8574cb2/2020.nc
new file mode 100644
index 00000000000..f83203242a5
Binary files /dev/null and b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00001-e968e9c8f8574cb2/2020.nc differ
diff --git a/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00001-ebb783e86d2ac6fb/2019.nc b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00001-ebb783e86d2ac6fb/2019.nc
new file mode 100644
index 00000000000..30a986e3c4e
Binary files /dev/null and b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00001-ebb783e86d2ac6fb/2019.nc differ
diff --git a/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00001-ebb783e86d2ac6fb/2020.nc b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00001-ebb783e86d2ac6fb/2020.nc
new file mode 100644
index 00000000000..4af6df14ccc
Binary files /dev/null and b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00001-ebb783e86d2ac6fb/2020.nc differ
diff --git a/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00002-e968e9c8f8574cb2/2019.nc b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00002-e968e9c8f8574cb2/2019.nc
new file mode 100644
index 00000000000..9da58cdcec8
Binary files /dev/null and b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00002-e968e9c8f8574cb2/2019.nc differ
diff --git a/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00002-e968e9c8f8574cb2/2020.nc b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00002-e968e9c8f8574cb2/2020.nc
new file mode 100644
index 00000000000..3bf8177755f
Binary files /dev/null and b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00002-e968e9c8f8574cb2/2020.nc differ
diff --git a/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00002-ebb783e86d2ac6fb/2019.nc b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00002-ebb783e86d2ac6fb/2019.nc
new file mode 100644
index 00000000000..97ca728010b
Binary files /dev/null and b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00002-ebb783e86d2ac6fb/2019.nc differ
diff --git a/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00002-ebb783e86d2ac6fb/2020.nc b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00002-ebb783e86d2ac6fb/2020.nc
new file mode 100644
index 00000000000..08aa1f47be8
Binary files /dev/null and b/base/utils/tests/testthat/data/ensemble_fixtures/ENS-00002-ebb783e86d2ac6fb/2020.nc differ
diff --git a/base/utils/tests/testthat/data/ensemble_fixtures/make_ensemble_fixtures.sh b/base/utils/tests/testthat/data/ensemble_fixtures/make_ensemble_fixtures.sh
new file mode 100755
index 00000000000..3a4e0ad62dc
--- /dev/null
+++ b/base/utils/tests/testthat/data/ensemble_fixtures/make_ensemble_fixtures.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+# vars & window to keep small
+VARS="GPP,AGB"
+YEARS="2019 2020"
+
+
+# input data location
+# these will need to be changed to wherever the model ouptuts are
+OUTPUTS=~/ccmmf/modelout/ccmmf_phase_2b_mixed_pfts_20250701/out/
+# where to write fixtures inside the repo
+# relative to pecan repository base dir
+FIXTURES=base/utils/tests/testthat/data/ensemble_fixtures/
+
+mkdir -p "$FIXTURES"
+
+for d in ENS-00001-e968e9c8f8574cb2 \
+ ENS-00001-ebb783e86d2ac6fb \
+ ENS-00002-e968e9c8f8574cb2 \
+ ENS-00002-ebb783e86d2ac6fb; do
+ mkdir -p "$FIXTURES/$d"
+ for y in $YEARS; do
+ in="$OUTPUTS/$d/$y.nc" # Define the input file path
+ out="$FIXTURES/$d/$y.nc"
+ # subset variables and time range (keeps lat=1, lon=1)
+ ncks -O -4 -v $VARS,lat,lon,time,time_bounds "$in" "$out"
+ # add metadata to file explaining where this came from
+ ncatted -O -a history,global,o,c,"Created by make_ensemble_fixtures.sh script on $(date)" "$out"
+ # compress by keeping only 3 significant digits
+ ncks -O --ppc default=3 "$out" "$out"
+ done
+done
+ls -larth */ | grep nc
\ No newline at end of file
diff --git a/base/utils/tests/testthat/test-combine_model_netcdf.R b/base/utils/tests/testthat/test-combine_model_netcdf.R
new file mode 100644
index 00000000000..4b9221c503d
--- /dev/null
+++ b/base/utils/tests/testthat/test-combine_model_netcdf.R
@@ -0,0 +1,90 @@
+test_that("merge_by_time works on real-ish fixtures", {
+
+ #--- Setup ------------------------------------------
+ library(ncdf4)
+ skip_if_not_installed("ncdf4")
+ skip_if_not(nzchar(Sys.which("cdo")), "cdo not available")
+
+ src <- testthat::test_path("data/ensemble_fixtures") # PEcAn output fixtures
+ wd <- withr::local_tempdir()
+
+ files_copied <- file.copy(list.files(src, full.names = TRUE), wd, recursive = TRUE)
+ expect_true(all(files_copied))
+
+ # Set fixed values describing test files
+ ens_num <- 2
+ site.ids <- c("e968e9c8f8574cb2", "ebb783e86d2ac6fb")
+ start.date <- "2019-01-01"
+ end.date <- "2020-12-31"
+
+ # --- run function merge funciton ----------------------
+ nc_merge_all_sites_by_year(
+ model.outdir = src,
+ nc.outdir = wd,
+ ens.num = ens_num,
+ site.ids = site.ids,
+ start.date = "2019-01-01",
+ end.date = "2020-12-31",
+ cores = 2
+ )
+
+ # Assert shape + contents
+ f <- ncdf4::nc_open(file.path(wd, "2019.nc")); withr::defer(ncdf4::nc_close(f))
+ expect_true(all(c("time", "site", "ensemble") %in% names(f$dim)))
+ sid <- ncdf4::ncvar_get(f, "site_id")
+ expect_equal(length(sid), f$dim$site$len)
+ expect_setequal(as.character(sid), unique(site.ids))
+
+ # Compare GPP for sites 1 and 2 in merged file to source file
+ # Site 1
+ gpp_merged <- ncdf4::ncvar_get(f, "GPP")
+ src_first <- file.path(
+ src,
+ "ENS-00001-e968e9c8f8574cb2",
+ "2019.nc"
+ )
+ f0 <- ncdf4::nc_open(src_first)
+ withr::defer(ncdf4::nc_close(f0))
+ gpp_src <- ncdf4::ncvar_get(f0, "GPP")
+
+ # take the first N time points that exist in both files
+ expect_equal(
+ gpp_merged,
+ gpp_src,
+ tolerance = 1e-6
+ )
+
+ # Site 2
+ gpp_merged_s2 <- ncdf4::ncvar_get(f, "GPP")
+ src_s2 <- file.path(
+ src,
+ "ENS-00001-ebb783e86d2ac6fb",
+ "2019.nc"
+ )
+ f2 <- ncdf4::nc_open(src_s2)
+ withr::defer(ncdf4::nc_close(f2))
+ gpp_src_s2 <- ncdf4::ncvar_get(f2, "GPP")
+
+ expect_equal(
+ gpp_merged_s2,
+ gpp_src_s2,
+ tolerance = 1e-6
+ )
+
+ # Ensure that re-running does not change contents
+ gpp_before <- ncdf4::ncvar_get(f, "GPP")
+ ncdf4::nc_close(f)
+ nc_merge_all_sites_by_year(
+ model.outdir = src,
+ nc.outdir = wd,
+ ens.num = ens_num,
+ site.ids = site.ids,
+ start.date = "2019-01-01",
+ end.date = "2020-12-31",
+ cores = 2
+ )
+ f <- ncdf4::nc_open(file.path(wd, "2019.nc"))
+ withr::defer(ncdf4::nc_close(f))
+ gpp_after <- ncdf4::ncvar_get(f, "GPP")
+ expect_equal(gpp_after, gpp_before, tolerance = 1e-6)
+})
\ No newline at end of file
diff --git a/base/utils/tests/testthat/test-nc_write_varfiles.R b/base/utils/tests/testthat/test-nc_write_varfiles.R
new file mode 100644
index 00000000000..8c9fa0fd57a
--- /dev/null
+++ b/base/utils/tests/testthat/test-nc_write_varfiles.R
@@ -0,0 +1,34 @@
+test_that("nc_write_varfiles", {
+ outdir <- withr::local_tempdir()
+
+ nc1 <- example_netcdf(c("a", "b"), file.path(outdir, "2015.nc"))
+ nc2 <- example_netcdf(c("a", "c"), file.path(outdir, "foo.nc"))
+
+
+ nc1_nm <- nc_longnames(nc1)
+ expect_equal(
+ nc1_nm,
+ data.frame(
+ name = c("a", "b"),
+ longname = c("a", "b"),
+ row.names = c("a", "b")
+ )
+ )
+
+
+ nc1_varfile <- file.path(outdir, "nc1_vars.txt")
+ nc_write_varfile(nc1_nm, nc1_varfile)
+ expect_equal(readLines(nc1_varfile), c("a a", "b b"))
+
+
+ nc_write_varfiles(outdir, write_mode = "paired")
+ var_files <- file.path(outdir, c("2015.nc.var", "foo.nc.var"))
+ expect_true(all(file.exists(var_files)))
+ expect_equal(readLines(var_files[[1]]), c("a a", "b b"))
+ expect_equal(readLines(var_files[[2]]), c("a a", "c c"))
+
+ vars_file <- file.path(outdir, "nc_vars.txt")
+ nc_write_varfiles(outdir, write_mode = "collected")
+ expect_true(file.exists(vars_file))
+ expect_equal(readLines(vars_file), c("a a", "b b", "c c"))
+})
diff --git a/base/utils/tests/testthat/test-unit_is_parseable.R b/base/utils/tests/testthat/test-unit_is_parseable.R
index fc6d11f5112..075ecbdac66 100644
--- a/base/utils/tests/testthat/test-unit_is_parseable.R
+++ b/base/utils/tests/testthat/test-unit_is_parseable.R
@@ -11,11 +11,7 @@ test_that("Non-parseable unit", {
expect_false(unit_is_parseable("kg / fake"))
expect_false(unit_is_parseable(NULL))
- # Note: This behavior differs from `udunits2::ud.is.parseable("")`
- # (which returns TRUE), but is better aligned with PEcAn's usage
- # of "parseable" to mean "will work when passed to ud_convert".
- # Since ud_convert does not support any useful conversions of "",
- # we report it as unparseable.
- expect_false(unit_is_parseable(""))
+ # Note: Deleted test of unit_is_parseable("") here.
+ # It was FALSE with {units} < v1.0, TRUE after that... and the change turned
+ # out not to make any practical difference in PEcAn, so why test it?
})
-
diff --git a/base/visualization/DESCRIPTION b/base/visualization/DESCRIPTION
index bbf4f281061..3dce1c83d7b 100644
--- a/base/visualization/DESCRIPTION
+++ b/base/visualization/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.visualization
Type: Package
Title: PEcAn visualization functions
-Version: 1.8.1
+Version: 1.8.2
Authors@R: c(person("Mike", "Dietze", role = c("aut"),
email = "dietze@bu.edu"),
person("David", "LeBauer", role = c("aut", "cre"),
@@ -27,6 +27,8 @@ Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
efficacy of scientific investigation.
This module is used to create more complex visualizations from the data
generated by PEcAn code, specifically the models.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
data.table,
ggplot2,
@@ -54,3 +56,4 @@ Encoding: UTF-8
VignetteBuilder: knitr, rmarkdown
RoxygenNote: 7.3.2
Roxygen: list(markdown = TRUE)
+X-schema.org-keywords: PEcAn, data-visualization, statistical-graphics, ggplot2
diff --git a/base/visualization/NEWS.md b/base/visualization/NEWS.md
index 300101a43e8..224a9b04d24 100644
--- a/base/visualization/NEWS.md
+++ b/base/visualization/NEWS.md
@@ -1,3 +1,9 @@
+# PEcAn.visualization 1.8.2
+
+Added keywords and bug reporting URL to DESCRIPTION. No code changes in this release.
+
+
+
# PEcAn.visualization 1.8.1
## License change
diff --git a/base/workflow/DESCRIPTION b/base/workflow/DESCRIPTION
index 7c13ebc6d49..43a543cad74 100644
--- a/base/workflow/DESCRIPTION
+++ b/base/workflow/DESCRIPTION
@@ -2,7 +2,7 @@ Package: PEcAn.workflow
Type: Package
Title: PEcAn Functions Used for Ecological Forecasts and
Reanalysis
-Version: 1.9.0
+Version: 1.10.0
Authors@R: c(person("Mike", "Dietze", role = c("aut"),
email = "dietze@bu.edu"),
person("David", "LeBauer", role = c("aut", "cre"),
@@ -25,6 +25,8 @@ Description: The Predictive Ecosystem Carbon Analyzer
models, and to improve the efficacy of scientific
investigation. This package provides workhorse functions
that can be used to run the major steps of a PEcAn analysis.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
License: BSD_3_clause + file LICENSE
Imports:
dplyr,
@@ -46,3 +48,4 @@ Suggests:
Copyright: Authors
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: PEcAn, data-assimilation, ecosystem-modeling, reproducibility
diff --git a/base/workflow/NEWS.md b/base/workflow/NEWS.md
index 0957ac8b927..da1ad6e7c7d 100644
--- a/base/workflow/NEWS.md
+++ b/base/workflow/NEWS.md
@@ -1,3 +1,13 @@
+# PEcAn.workflow 1.10.0
+
+## Changed
+
+Breaking: Ensemble runs now use shared input samples across all sites instead of independent sampling per site.
+
+* `run.write.configs()` now has two new required args `ensemble.size` and `input_design`, and removes `ens.sample.method` (#3535, #3612).
+* `runModule.run.write.configs()` has new arg `input_design`, with default NULL meaning to generate one internally (#3535).
+
+
# PEcAn.workflow 1.9.0
* PEcAn.workflow is now distributed under the BSD 3-clause license instead of the NCSA Open Source license.
diff --git a/base/workflow/R/do_conversions.R b/base/workflow/R/do_conversions.R
index b3527ccf8fc..4a380b88bc6 100644
--- a/base/workflow/R/do_conversions.R
+++ b/base/workflow/R/do_conversions.R
@@ -30,6 +30,12 @@ do_conversions <- function(settings, overwrite.met = FALSE, overwrite.fia = FALS
input.tag <- names(settings$run$input)[i]
PEcAn.logger::logger.info("PROCESSING: ",input.tag)
+
+ # Check for existing file paths , skips if the file exists
+ if (!is.null(input$path) && is.null(input$force)) {
+ PEcAn.logger::logger.info("Skipping input with existing path:", input.tag)
+ next
+ }
ic.flag <- fia.flag <- FALSE
diff --git a/base/workflow/R/run.write.configs.R b/base/workflow/R/run.write.configs.R
index a219b94a043..31e0a388d29 100644
--- a/base/workflow/R/run.write.configs.R
+++ b/base/workflow/R/run.write.configs.R
@@ -5,9 +5,12 @@
#' \code{write.config.*} function for your specific ecosystem model
#' (e.g. write.config.ED2, write.config.SIPNET).
#'
+#'
#' @param settings a PEcAn settings list
+#' @param ensemble.size number of ensemble runs
+#' @param input_design data frame containing the design matrix describing parameter and input indices, as
+#' documented in \code{runModule.run.write.configs()}.
#' @param write should the runs be written to the database?
-#' @param ens.sample.method how to sample the ensemble members('halton' sequence or 'uniform' random)
#' @param posterior.files Filenames for posteriors for drawing samples for ensemble and sensitivity
#' analysis (e.g. post.distns.Rdata, or prior.distns.Rdata)
#' @param overwrite logical: Replace output files that already exist?
@@ -22,62 +25,79 @@
#' @export
#'
#' @author David LeBauer, Shawn Serbin, Ryan Kelly, Mike Dietze
-run.write.configs <- function(settings, write = TRUE, ens.sample.method = "uniform",
- posterior.files = rep(NA, length(settings$pfts)),
+
+run.write.configs <- function(settings, ensemble.size, input_design, write = TRUE,
+ posterior.files = rep(NA, length(settings$pfts)),
overwrite = TRUE) {
+
+ # Validate that input_design matches ensemble.size
+ if (nrow(input_design) != ensemble.size) {
+ stop(
+ "input_design has ", nrow(input_design), " rows, but ensemble.size is ",
+ ensemble.size, ".The design matrix must have exactly one row for each run."
+ )
+ }
+
## Skip database connection if settings$database is NULL or write is False
if (!isTRUE(write) && is.null(settings$database)) {
PEcAn.logger::logger.info("Not writing this run to database, so database connection skipped")
- con <- NULL # Set con to NULL to avoid errors in subsequent code
- } else if(is.null(settings$database)) {
+ con <- NULL # Set con to NULL to avoid errors in subsequent code
+ } else if (is.null(settings$database)) {
PEcAn.logger::logger.error(
"Database is NULL but writing is enabled. Provide valid database settings in pecan.xml."
)
stop("Database connection required but settings$database is NULL.")
- } else{
- tryCatch({
- con <- PEcAn.DB::db.open(settings$database$bety)
- on.exit(PEcAn.DB::db.close(con), add = TRUE)
- }, error = function(e) {
- PEcAn.logger::logger.severe(
- "Connection requested, but failed to open with the following error: ",
- conditionMessage(e))
- })
+ } else {
+ tryCatch(
+ {
+ con <- PEcAn.DB::db.open(settings$database$bety)
+ on.exit(PEcAn.DB::db.close(con), add = TRUE)
+ },
+ error = function(e) {
+ PEcAn.logger::logger.severe(
+ "Connection requested, but failed to open with the following error: ",
+ conditionMessage(e)
+ )
+ }
+ )
}
-
+
## Which posterior to use?
for (i in seq_along(settings$pfts)) {
## if posterior.files is specified us that
if (is.na(posterior.files[i])) {
## otherwise, check to see if posteriorid exists
if (!is.null(settings$pfts[[i]]$posteriorid)) {
- #TODO: sometimes `files` is a 0x0 tibble and other operations with it fail.
+ # TODO: sometimes `files` is a 0x0 tibble and other operations with it fail.
files <- PEcAn.DB::dbfile.check("Posterior",
- settings$pfts[[i]]$posteriorid,
- con, settings$host$name, return.all = TRUE)
- pid <- grep("post.distns.*Rdata", files$file_name) ## is there a posterior file?
+ settings$pfts[[i]]$posteriorid,
+ con, settings$host$name,
+ return.all = TRUE
+ )
+ pid <- grep("post.distns.*Rdata", files$file_name) ## is there a posterior file?
if (length(pid) == 0) {
- pid <- grep("prior.distns.Rdata", files$file_name) ## is there a prior file?
+ pid <- grep("prior.distns.Rdata", files$file_name) ## is there a prior file?
}
if (length(pid) > 0) {
posterior.files[i] <- file.path(files$file_path[pid], files$file_name[pid])
- } ## otherwise leave posteriors as NA
+ } ## otherwise leave posteriors as NA
}
## otherwise leave NA and get.parameter.samples will look for local
} else {
## does posterior.files point to a directory instead of a file?
- if(utils::file_test("-d",posterior.files[i])){
- pfiles = dir(posterior.files[i],pattern = "post.distns.*Rdata",full.names = TRUE)
- if(length(pfiles)>1){
- pid = grep("post.distns.Rdata",pfiles)
- if(length(pid > 0)){
- pfiles = pfiles[grep("post.distns.Rdata",pfiles)]
+ if (utils::file_test("-d", posterior.files[i])) {
+ pfiles <- dir(posterior.files[i], pattern = "post.distns.*Rdata", full.names = TRUE)
+ if (length(pfiles) > 1) {
+ pid <- grep("post.distns.Rdata", pfiles)
+ if (length(pid > 0)) {
+ pfiles <- pfiles[grep("post.distns.Rdata", pfiles)]
} else {
PEcAn.logger::logger.error(
"run.write.configs: could not uniquely identify posterior files within",
- posterior.files[i])
+ posterior.files[i]
+ )
}
- posterior.files[i] = pfiles
+ posterior.files[i] <- pfiles
}
}
## also, double check PFT outdir exists
@@ -85,111 +105,129 @@ run.write.configs <- function(settings, write = TRUE, ens.sample.method = "unifo
## no outdir
settings$pfts[[i]]$outdir <- file.path(settings$outdir, "pfts", settings$pfts[[i]]$name)
}
- } ## end else
+ } ## end else
} ## end for loop over pfts
-
+
## Sample parameters
model <- settings$model$type
scipen <- getOption("scipen")
options(scipen = 12)
-
- PEcAn.uncertainty::get.parameter.samples(settings, posterior.files, ens.sample.method)
+
samples.file <- file.path(settings$outdir, "samples.Rdata")
if (file.exists(samples.file)) {
samples <- new.env()
load(samples.file, envir = samples) ## loads ensemble.samples, trait.samples, sa.samples, runs.samples, env.samples
trait.samples <- samples$trait.samples
- ensemble.samples <- samples$ensemble.samples
+ trait_sample_indices <- input_design[["param"]]
+ ensemble.samples <- list()
+ for (pft in names(trait.samples)) {
+ pft_traits <- trait.samples[[pft]]
+ ensemble.samples[[pft]] <- as.data.frame(
+ lapply(
+ names(pft_traits),
+ function(trait) pft_traits[[trait]][trait_sample_indices]
+ )
+ )
+ names(ensemble.samples[[pft]]) <- names(pft_traits)
+ }
sa.samples <- samples$sa.samples
runs.samples <- samples$runs.samples
## env.samples <- samples$env.samples
} else {
PEcAn.logger::logger.error(samples.file, "not found, this file is required by the run.write.configs function")
}
-
+
## remove previous runs.txt
if (overwrite && file.exists(file.path(settings$rundir, "runs.txt"))) {
PEcAn.logger::logger.warn("Existing runs.txt file will be removed.")
unlink(file.path(settings$rundir, "runs.txt"))
}
-
+
PEcAn.utils::load.modelpkg(model)
-
+
## Check for model-specific write configs
-
- my.write.config <- paste0("write.config.",model)
+
+ my.write.config <- paste0("write.config.", model)
if (!exists(my.write.config)) {
- PEcAn.logger::logger.error(my.write.config,
- "does not exist, please make sure that the model package contains a function called",
- my.write.config)
+ PEcAn.logger::logger.error(
+ my.write.config,
+ "does not exist, please make sure that the model package contains a function called",
+ my.write.config
+ )
}
-
+
## Prepare for model output. Clean up any old config files (if exists)
- #TODO: shouldn't this check if the files exist before removing them?
+ # TODO: shouldn't this check if the files exist before removing them?
my.remove.config <- paste0("remove.config.", model)
if (exists(my.remove.config)) {
do.call(my.remove.config, args = list(settings$rundir, settings))
}
-
+
# TODO RK : need to write to runs_inputs table
-
+
# Save names
pft.names <- names(trait.samples)
trait.names <- lapply(trait.samples, names)
-
+
### NEED TO IMPLEMENT: Load Environmental Priors and Posteriors
-
+
### Sensitivity Analysis
if ("sensitivity.analysis" %in% names(settings)) {
-
### Write out SA config files
- PEcAn.logger::logger.info("\n ----- Writing model run config files ----")
- sa.runs <- PEcAn.uncertainty::write.sa.configs(defaults = settings$pfts,
- quantile.samples = sa.samples,
- settings = settings,
- model = model,
- write.to.db = write)
-
+ PEcAn.logger::logger.info("\n ----- Writing model config files for sensitivity run ----")
+ sa.runs <- PEcAn.uncertainty::write.sa.configs(
+ defaults = settings$pfts,
+ quantile.samples = sa.samples,
+ settings = settings,
+ model = model,
+ write.to.db = write
+ )
+
# Store output in settings and output variables
runs.samples$sa <- sa.run.ids <- sa.runs$runs
settings$sensitivity.analysis$ensemble.id <- sa.ensemble.id <- sa.runs$ensemble.id
-
+
# Save sensitivity analysis info
fname <- PEcAn.uncertainty::sensitivity.filename(settings, "sensitivity.samples", "Rdata",
- all.var.yr = TRUE, pft = NULL)
+ all.var.yr = TRUE, pft = NULL
+ )
save(sa.run.ids, sa.ensemble.id, sa.samples, pft.names, trait.names, file = fname)
-
- } ### End of SA
-
+ } ### End of SA
+
### Write ENSEMBLE
if ("ensemble" %in% names(settings)) {
- ens.runs <- PEcAn.uncertainty::write.ensemble.configs(defaults = settings$pfts,
- ensemble.samples = ensemble.samples,
- settings = settings,
- model = model,
- write.to.db = write)
-
+ ens.runs <- PEcAn.uncertainty::write.ensemble.configs(
+ defaults = settings$pfts,
+ ensemble.size = ensemble.size,
+ ensemble.samples = ensemble.samples,
+ settings = settings,
+ model = model,
+ input_design = input_design,
+ write.to.db = write
+ )
+
# Store output in settings and output variables
runs.samples$ensemble <- ens.run.ids <- ens.runs$runs
settings$ensemble$ensemble.id <- ens.ensemble.id <- ens.runs$ensemble.id
- ens.samples <- ensemble.samples # rename just for consistency
-
+ ens.samples <- ensemble.samples # rename just for consistency
+
# Save ensemble analysis info
fname <- PEcAn.uncertainty::ensemble.filename(settings, "ensemble.samples", "Rdata", all.var.yr = TRUE)
save(ens.run.ids, ens.ensemble.id, ens.samples, pft.names, trait.names, file = fname)
} else {
PEcAn.logger::logger.info("not writing config files for ensemble, settings are NULL")
- } ### End of Ensemble
-
+ } ### End of Ensemble
+
PEcAn.logger::logger.info("###### Finished writing model run config files #####")
PEcAn.logger::logger.info("config files samples in ", file.path(settings$outdir, "run"))
-
+
### Save output from SA/Ensemble runs
- # A lot of this is duplicate with the ensemble/sa specific output above, but kept for backwards compatibility.
- save(ensemble.samples, trait.samples, sa.samples, runs.samples, pft.names, trait.names,
- file = file.path(settings$outdir, "samples.Rdata"))
+ # A lot of this is duplicate with the ensemble/sa specific output above, but kept for backwards compatibility.
+ save(ensemble.samples, trait.samples, sa.samples, runs.samples, pft.names, trait.names,
+ file = file.path(settings$outdir, "samples.Rdata")
+ )
PEcAn.logger::logger.info("parameter values for runs in ", file.path(settings$outdir, "samples.RData"))
options(scipen = scipen)
-
- return(invisible(settings))
+ invisible(settings)
+ return(settings)
}
diff --git a/base/workflow/R/runModule.run.write.configs.R b/base/workflow/R/runModule.run.write.configs.R
index 4061351e4ca..ae6ef91a90c 100644
--- a/base/workflow/R/runModule.run.write.configs.R
+++ b/base/workflow/R/runModule.run.write.configs.R
@@ -2,34 +2,80 @@
#'
#' @param settings a PEcAn Settings or MultiSettings object
#' @param overwrite logical: Replace config files if they already exist?
+#' @param input_design data.frame design matrix linking parameter draws and any
+#' sampled inputs across runs. Include a `param` column whose values select
+#' rows from `trait.samples`/`ensemble.samples` plus optional columns named for
+#' `settings$run$inputs` tags (e.g. `met`, `soil`) with index (i.e., row number)
+#' into each input's `path` list. Provide at least one row per planned run
+#' (median + all SA members and/or `ensemble.size`). Usually generated by
+#' `generate_joint_ensemble_design()` but custom designs may be supplied.
+#' If NULL, `generate_joint_ensemble_design()` will be called internally.
#' @return A modified settings object, invisibly
#' @importFrom dplyr %>%
#' @export
-runModule.run.write.configs <- function(settings, overwrite = TRUE) {
+
+runModule.run.write.configs <- function(settings,
+ overwrite = TRUE,
+ input_design = NULL) {
if (PEcAn.settings::is.MultiSettings(settings)) {
if (overwrite && file.exists(file.path(settings$rundir, "runs.txt"))) {
PEcAn.logger::logger.warn("Existing runs.txt file will be removed.")
unlink(file.path(settings$rundir, "runs.txt"))
}
- return(PEcAn.settings::papply(settings, runModule.run.write.configs, overwrite = FALSE))
+ if (is.null(input_design)) {
+ ensemble_size <- settings$ensemble$size
+ design_result <- PEcAn.uncertainty::generate_joint_ensemble_design(
+ settings = settings[1],
+ ensemble_size = ensemble_size
+ )
+ input_design <- design_result$X
+ }
+
+ # Validate design matrix size for MultiSettings
+ if (!is.null(settings$ensemble$size) && nrow(input_design) != settings$ensemble$size) {
+ PEcAn.logger::logger.severe("Input_design has", nrow(input_design), "rows but settings$ensemble$size is",
+ settings$ensemble$size, ". Design matrix must have exactly one row per run.")
+ }
+
+ return(PEcAn.settings::papply(settings,
+ runModule.run.write.configs,
+ overwrite = FALSE,
+ input_design = input_design))
} else if (PEcAn.settings::is.Settings(settings)) {
# double check making sure we have method for parameter sampling
if (is.null(settings$ensemble$samplingspace$parameters$method)) {
settings$ensemble$samplingspace$parameters$method <- "uniform"
}
+ if (is.null(input_design)) {
+ ensemble_size <- settings$ensemble$size
+ design_result <- PEcAn.uncertainty::generate_joint_ensemble_design(
+ settings = settings,
+ ensemble_size = ensemble_size
+ )
+ input_design <- design_result$X
+ }
+
+ # Validate design matrix size for Settings
+ if (!is.null(settings$ensemble$size) && nrow(input_design) != settings$ensemble$size) {
+ PEcAn.logger::logger.severe("Input_design has", nrow(input_design), "rows but settings$ensemble$size is",
+ settings$ensemble$size, ". Design matrix must have exactly one row per run.")
+ }
+
+ ensemble_size <- nrow(input_design)
- #check to see if there are posterior.files tags under pft
- posterior.files <- settings$pfts %>%
+ # check to see if there are posterior.files tags under pft
+ posterior.files <- settings$pfts %>%
purrr::map_chr("posterior.files", .default = NA_character_)
return(PEcAn.workflow::run.write.configs(
settings = settings,
+ ensemble.size = ensemble_size,
write = isTRUE(settings$database$bety$write), # treat null as FALSE
- ens.sample.method = settings$ensemble$samplingspace$parameters$method,
posterior.files = posterior.files,
- overwrite = overwrite
+ overwrite = overwrite,
+ input_design = input_design
))
} else {
stop("runModule.run.write.configs only works with Settings or MultiSettings")
diff --git a/base/workflow/man/run.write.configs.Rd b/base/workflow/man/run.write.configs.Rd
index b5be69a3287..e53c6924db0 100644
--- a/base/workflow/man/run.write.configs.Rd
+++ b/base/workflow/man/run.write.configs.Rd
@@ -6,8 +6,9 @@
\usage{
run.write.configs(
settings,
+ ensemble.size,
+ input_design,
write = TRUE,
- ens.sample.method = "uniform",
posterior.files = rep(NA, length(settings$pfts)),
overwrite = TRUE
)
@@ -15,9 +16,12 @@ run.write.configs(
\arguments{
\item{settings}{a PEcAn settings list}
-\item{write}{should the runs be written to the database?}
+\item{ensemble.size}{number of ensemble runs}
+
+\item{input_design}{data frame containing the design matrix describing parameter and input indices, as
+documented in \code{runModule.run.write.configs()}.}
-\item{ens.sample.method}{how to sample the ensemble members('halton' sequence or 'uniform' random)}
+\item{write}{should the runs be written to the database?}
\item{posterior.files}{Filenames for posteriors for drawing samples for ensemble and sensitivity
analysis (e.g. post.distns.Rdata, or prior.distns.Rdata)}
diff --git a/base/workflow/man/runModule.run.write.configs.Rd b/base/workflow/man/runModule.run.write.configs.Rd
index fcb253947b3..3ab40fc4b2c 100644
--- a/base/workflow/man/runModule.run.write.configs.Rd
+++ b/base/workflow/man/runModule.run.write.configs.Rd
@@ -4,12 +4,21 @@
\alias{runModule.run.write.configs}
\title{Generate model-specific run configuration files for one or more PEcAn runs}
\usage{
-runModule.run.write.configs(settings, overwrite = TRUE)
+runModule.run.write.configs(settings, overwrite = TRUE, input_design = NULL)
}
\arguments{
\item{settings}{a PEcAn Settings or MultiSettings object}
\item{overwrite}{logical: Replace config files if they already exist?}
+
+\item{input_design}{data.frame design matrix linking parameter draws and any
+ sampled inputs across runs. Include a `param` column whose values select
+ rows from `trait.samples`/`ensemble.samples` plus optional columns named for
+ `settings$run$inputs` tags (e.g. `met`, `soil`) with index (i.e., row number)
+ into each input's `path` list. Provide at least one row per planned run
+ (median + all SA members and/or `ensemble.size`). Usually generated by
+ `generate_joint_ensemble_design()` but custom designs may be supplied.
+If NULL, `generate_joint_ensemble_design()` will be called internally.}
}
\value{
A modified settings object, invisibly
diff --git a/book_source/02_demos_tutorials_workflows/05_developer_workflows/01_update_pecan_code.Rmd b/book_source/02_demos_tutorials_workflows/05_developer_workflows/01_update_pecan_code.Rmd
index 32b84955aab..161a55e9b4a 100644
--- a/book_source/02_demos_tutorials_workflows/05_developer_workflows/01_update_pecan_code.Rmd
+++ b/book_source/02_demos_tutorials_workflows/05_developer_workflows/01_update_pecan_code.Rmd
@@ -2,7 +2,7 @@
Release notes for all releases can be found [here](https://github.com/PecanProject/pecan/releases).
-This page will only list any steps you have to do to upgrade an existing system. When updating PEcAn it is highly encouraged to update BETY. You can find instructions on how to do this, as well on how to update the database in the [Updating BETYdb](https://pecan.gitbooks.io/betydb-documentation/content/updating_betydb_when_new_versions_are_released.html) gitbook page.
+This page will only list any steps you have to do to upgrade an existing system. When updating PEcAn it is highly encouraged to update BETY. You can find instructions on how to do this, as well on how to update the database in the [Updating BETYdb](https://pecan.gitbook.io/betydbdoc-dataentry) gitbook page.
### Updating PEcAn {#pecan-make}
diff --git a/book_source/03_topical_pages/02_pecan_standards.Rmd b/book_source/03_topical_pages/02_pecan_standards.Rmd
index 35703a846ef..2a43afb1c99 100644
--- a/book_source/03_topical_pages/02_pecan_standards.Rmd
+++ b/book_source/03_topical_pages/02_pecan_standards.Rmd
@@ -8,7 +8,7 @@
## Time Standard
Internal PEcAn standard time follows ISO_8601 format for dates and time (https://en.wikipedia.org/wiki/ISO_8601). For example ordinal dates go from 1 365/366 (https://en.wikipedia.org/wiki/ISO_8601#Ordinal_dates). However, time used in met drivers or model outputs follows CF convention with julian dates following the 0 to 364/365 format
-To aid in the conversion between PEcAn internal ISO_8601 standard and CF convention used in all met drivers and PEcAn standard output you can utilize the functions: "cf2datetime","datetime2doy","cf2doy", and for SIPNET "sipnet2datetime"
+To aid in the conversion between PEcAn internal ISO_8601 standard and CF convention used in all met drivers and PEcAn standard output you can utilize the functions: "cf2datetime","datetime2doy",and "cf2doy"
## Input Standards
diff --git a/book_source/03_topical_pages/03_pecan_xml.Rmd b/book_source/03_topical_pages/03_pecan_xml.Rmd
index 7f521941cbf..86a94c321e4 100644
--- a/book_source/03_topical_pages/03_pecan_xml.Rmd
+++ b/book_source/03_topical_pages/03_pecan_xml.Rmd
@@ -580,6 +580,9 @@ Tags related to ensemble setup are:
* `size` : (required) the number of runs in the ensemble.
* `samplingspace`: (optional) Contains tags for defining how the ensembles will be generated.
+Shared sampling design: In multi-site workflows, PEcAn now generates one joint ensemble design and reuses the same input sample indices (parameters, meteorology, etc.) across all sites to ensure consistent draws; previously, inputs were sampled independently per site. This change does not introduce new XML tags and applies whenever multiple sites are processed together.
+The joint sampling design is created once at the start of configuration using generate_joint_ensemble_design(), called from the run configuration module, and the resulting indices are threaded through to write.ensemble.configs().
+
Each piece in the sampling space can potentially have a method tag and a parent tag. Method refers to the sampling method and parent refers to the cases where we need to link the samples of two components. When no tag is defined for one component, one sample will be generated and used for all the ensembles. This allows for partitioning/studying different sources of uncertainties. For example, if no met tag is defined then, one met path will be used for all the ensembles and as a result the output uncertainty will come from the variability in the parameters. At the moment no sampling method is implemented for soil and vegetation.
Available sampling methods for `parameters` can be found in the documentation of the `PEcAn.utils::get.ensemble.samples` function.
For the cases where we need simulations with a predefined set of parameters, met and initial condition we can use the restart argument. Restart needs to be a list with name tags of `runid`, `inputs`, `new.params` (parameters), `new.state` (initial condition), `ensemble.id` (ensemble ids), `start.time`, and `stop.time`.
@@ -633,6 +636,35 @@ This information is currently used by the following PEcAn workflow functions:
- `PEcAn.::write.configs.` -- See [above](#pecan-write-configs)
- `PEcAn.uncertainty::run.sensitivity.analysis` -- Executes the uncertainty analysis
+#### Coordinating inputs with the `input_design` design matrix {#xml-input-design}
+
+Multi-site ensembles that sample over input files use an `input_design`
+data.frame to keep parameter draws and input files aligned across runs. The
+design is created up front (typically via `generate_joint_ensemble_design()`)
+and passed to `runModule.run.write.configs()`. It is not saved automatically to
+`samples.Rdata`, so keep your copy if you need to reuse it.
+
+- **Parameter column:** `param` gives the index (i.e. row number) of the
+posterior draw to use for this run. For example, `param = 5` means use the 5th
+parameter sample from `samples.Rdata`.
+- **Input columns:** any name that matches a tag under `run/inputs` (for
+example `met`, `soil`, `veg`, `poolinitcond`). Values are indices into that
+input’s `path` list. Leaving a column out keeps that input fixed across runs.
+- **Row count and order:** must include exactly one row per run. For ensembles
+ this means `ensemble.size` rows.
+
+Example layout (CSV or `data.frame`):
+
+| param | met | soil |
+|------:|----:|-----:|
+| 1 | 1 | 1 |
+| 2 | 2 | 1 |
+| 3 | 1 | 2 |
+| 4 | 2 | 2 |
+
+In this example, run 2 would reuse the second parameter draw and also switch to
+the second met driver while keeping the first soil file.
+
### Parameter Data Assimilation {#xml-parameter-data-assimilation}
The following tags can be used for parameter data assimilation. More detailed information can be found here: [Parameter Data Assimilation Documentation](#pda)
diff --git a/book_source/Makefile b/book_source/Makefile
index 1411d6fcd75..afb47d5b146 100755
--- a/book_source/Makefile
+++ b/book_source/Makefile
@@ -1,6 +1,6 @@
-.PHONY: all bkdcheck build clean deploy
+.PHONY: all bkdcheck build clean
-all: bkdcheck build deploy
+all: bkdcheck build
bkdcheck:
./check_bkd_pkg.R
@@ -28,11 +28,10 @@ build: bkdcheck
# Remove when this is fixed in Bookdown
Rscript -e 'options(bookdown.render.file_scope=FALSE); bookdown::render_book("index.Rmd", "bookdown::gitbook")'
-clean:
- rm -rf ../book/*
+CLEAN_DIRS = _book extfiles
-deploy: build
- ./deploy.sh
+clean:
+ rm -rf $(CLEAN_DIRS)
pdf: bkdcheck
Rscript -e 'options(bookdown.render.file_scope=FALSE); bookdown::render_book("index.Rmd", "bookdown::pdf_book")'
diff --git a/book_source/deploy.sh b/book_source/deploy.sh
deleted file mode 100755
index 112ebb1e479..00000000000
--- a/book_source/deploy.sh
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/bin/bash
-
-#exit on error
-set -e
-
-#check for environment variable
-if [ -z "${GITHUB_PAT}" ]; then
- echo "GITHUB_PAT is not set. Not deploying."
- exit 0
-fi
-
-#Print who made GITHUB_PAT variable
-echo "GITHUB_PAT variable made by Tony Gardella"
-
-# don't run on pull requests
-if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then
- echo "TRAVIS_PULL_REQUEST is 'true'. Not building documentation."
- exit 0
-fi
-
-# find version if we are develop/latest/release and if should be pushed
-if [ "$TRAVIS_BRANCH" = "master" ]; then
- VERSION="master"
-elif [ "$TRAVIS_BRANCH" = "develop" ]; then
- VERSION="develop"
-elif [ "$( echo $TRAVIS_BRANCH | sed -e 's#^release/.*$#release#')" = "release" ]; then
- VERSION="$( echo $TRAVIS_BRANCH | sed -e 's#^release/\(.*\)$#\1#' )"
-else
- echo "Not Master, Develop, or Release Branch. Will not render Book."
- exit 0
-fi
-
-#set USER
-GH_USER=${TRAVIS_REPO_SLUG%/*}
-
-# configure your name and email if you have not done so
-git config --global user.email "pecanproj@gmail.com"
-git config --global user.name "TRAVIS-DOC-BUILD"
-
-# Don't deploy if documentation git repo does not exist
-GH_STATUS=$(curl -s -w %{http_code} -I https://github.com/${GH_USER}/pecan-documentation -o /dev/null)
-if [[ $GH_STATUS != 200 ]]; then
- echo "Can't find a repository at https://github.com/${GH_USER}/pecan-documentation"
- echo "Will not render Book."
- exit 0
-fi
-
-git clone https://${GITHUB_PAT}@github.com/${GH_USER}/pecan-documentation.git book_hosted
-cd book_hosted
-
-## Check if branch named directory exists
-if [ ! -d $VERSION ]; then
- mkdir $VERSION
-fi
-
-# copy new documentation
-rsync -a --delete ../_book/ $VERSION/
-
-# push updated documentation back up
-git add --all *
-git commit -m "Update the book `date`" || true
-git push -q origin master
diff --git a/modules/DART/DART/Kodiak/mkmf/mkmf.template b/contrib/DART/DART/Kodiak/mkmf/mkmf.template
similarity index 100%
rename from modules/DART/DART/Kodiak/mkmf/mkmf.template
rename to contrib/DART/DART/Kodiak/mkmf/mkmf.template
diff --git a/modules/DART/DART/Kodiak/models/ED2/ED2IN b/contrib/DART/DART/Kodiak/models/ED2/ED2IN
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/ED2IN
rename to contrib/DART/DART/Kodiak/models/ED2/ED2IN
diff --git a/modules/DART/DART/Kodiak/models/ED2/input.nml b/contrib/DART/DART/Kodiak/models/ED2/input.nml
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/input.nml
rename to contrib/DART/DART/Kodiak/models/ED2/input.nml
diff --git a/modules/DART/DART/Kodiak/models/ED2/model_mod.f90 b/contrib/DART/DART/Kodiak/models/ED2/model_mod.f90
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/model_mod.f90
rename to contrib/DART/DART/Kodiak/models/ED2/model_mod.f90
diff --git a/modules/DART/DART/Kodiak/models/ED2/model_mod.nml b/contrib/DART/DART/Kodiak/models/ED2/model_mod.nml
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/model_mod.nml
rename to contrib/DART/DART/Kodiak/models/ED2/model_mod.nml
diff --git a/modules/DART/DART/Kodiak/models/ED2/shell_scripts/advance_model.csh b/contrib/DART/DART/Kodiak/models/ED2/shell_scripts/advance_model.csh
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/shell_scripts/advance_model.csh
rename to contrib/DART/DART/Kodiak/models/ED2/shell_scripts/advance_model.csh
diff --git a/modules/DART/DART/Kodiak/models/ED2/shell_scripts/run_filter.csh b/contrib/DART/DART/Kodiak/models/ED2/shell_scripts/run_filter.csh
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/shell_scripts/run_filter.csh
rename to contrib/DART/DART/Kodiak/models/ED2/shell_scripts/run_filter.csh
diff --git a/modules/DART/DART/Kodiak/models/ED2/utils/F2R.f90 b/contrib/DART/DART/Kodiak/models/ED2/utils/F2R.f90
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/utils/F2R.f90
rename to contrib/DART/DART/Kodiak/models/ED2/utils/F2R.f90
diff --git a/modules/DART/DART/Kodiak/models/ED2/utils/R2F.f90 b/contrib/DART/DART/Kodiak/models/ED2/utils/R2F.f90
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/utils/R2F.f90
rename to contrib/DART/DART/Kodiak/models/ED2/utils/R2F.f90
diff --git a/modules/DART/DART/Kodiak/models/ED2/utils/README b/contrib/DART/DART/Kodiak/models/ED2/utils/README
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/utils/README
rename to contrib/DART/DART/Kodiak/models/ED2/utils/README
diff --git a/modules/DART/DART/Kodiak/models/ED2/utils/adjValue.R b/contrib/DART/DART/Kodiak/models/ED2/utils/adjValue.R
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/utils/adjValue.R
rename to contrib/DART/DART/Kodiak/models/ED2/utils/adjValue.R
diff --git a/modules/DART/DART/Kodiak/models/ED2/utils/createInput.R b/contrib/DART/DART/Kodiak/models/ED2/utils/createInput.R
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/utils/createInput.R
rename to contrib/DART/DART/Kodiak/models/ED2/utils/createInput.R
diff --git a/modules/DART/DART/Kodiak/models/ED2/utils/createTransit.R b/contrib/DART/DART/Kodiak/models/ED2/utils/createTransit.R
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/utils/createTransit.R
rename to contrib/DART/DART/Kodiak/models/ED2/utils/createTransit.R
diff --git a/modules/DART/DART/Kodiak/models/ED2/utils/end_file.txt b/contrib/DART/DART/Kodiak/models/ED2/utils/end_file.txt
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/utils/end_file.txt
rename to contrib/DART/DART/Kodiak/models/ED2/utils/end_file.txt
diff --git a/modules/DART/DART/Kodiak/models/ED2/utils/file_name.txt b/contrib/DART/DART/Kodiak/models/ED2/utils/file_name.txt
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/utils/file_name.txt
rename to contrib/DART/DART/Kodiak/models/ED2/utils/file_name.txt
diff --git a/modules/DART/DART/Kodiak/models/ED2/utils/mkmf_trans_time b/contrib/DART/DART/Kodiak/models/ED2/utils/mkmf_trans_time
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/utils/mkmf_trans_time
rename to contrib/DART/DART/Kodiak/models/ED2/utils/mkmf_trans_time
diff --git a/modules/DART/DART/Kodiak/models/ED2/utils/path_names_trans_time b/contrib/DART/DART/Kodiak/models/ED2/utils/path_names_trans_time
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/utils/path_names_trans_time
rename to contrib/DART/DART/Kodiak/models/ED2/utils/path_names_trans_time
diff --git a/modules/DART/DART/Kodiak/models/ED2/utils/readValue.R b/contrib/DART/DART/Kodiak/models/ED2/utils/readValue.R
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/utils/readValue.R
rename to contrib/DART/DART/Kodiak/models/ED2/utils/readValue.R
diff --git a/modules/DART/DART/Kodiak/models/ED2/utils/sim_year b/contrib/DART/DART/Kodiak/models/ED2/utils/sim_year
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/utils/sim_year
rename to contrib/DART/DART/Kodiak/models/ED2/utils/sim_year
diff --git a/modules/DART/DART/Kodiak/models/ED2/utils/temp_ic b/contrib/DART/DART/Kodiak/models/ED2/utils/temp_ic
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/utils/temp_ic
rename to contrib/DART/DART/Kodiak/models/ED2/utils/temp_ic
diff --git a/modules/DART/DART/Kodiak/models/ED2/utils/trans_time.f90 b/contrib/DART/DART/Kodiak/models/ED2/utils/trans_time.f90
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/utils/trans_time.f90
rename to contrib/DART/DART/Kodiak/models/ED2/utils/trans_time.f90
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/.cppdefs b/contrib/DART/DART/Kodiak/models/ED2/work/.cppdefs
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/.cppdefs
rename to contrib/DART/DART/Kodiak/models/ED2/work/.cppdefs
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/4Rdata.dat b/contrib/DART/DART/Kodiak/models/ED2/work/4Rdata.dat
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/4Rdata.dat
rename to contrib/DART/DART/Kodiak/models/ED2/work/4Rdata.dat
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/ED2IN b/contrib/DART/DART/Kodiak/models/ED2/work/ED2IN
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/ED2IN
rename to contrib/DART/DART/Kodiak/models/ED2/work/ED2IN
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/F2R b/contrib/DART/DART/Kodiak/models/ED2/work/F2R
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/F2R
rename to contrib/DART/DART/Kodiak/models/ED2/work/F2R
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/Makefile b/contrib/DART/DART/Kodiak/models/ED2/work/Makefile
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/Makefile
rename to contrib/DART/DART/Kodiak/models/ED2/work/Makefile
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/ObsDiagAtts.m b/contrib/DART/DART/Kodiak/models/ED2/work/ObsDiagAtts.m
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/ObsDiagAtts.m
rename to contrib/DART/DART/Kodiak/models/ED2/work/ObsDiagAtts.m
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/R2F b/contrib/DART/DART/Kodiak/models/ED2/work/R2F
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/R2F
rename to contrib/DART/DART/Kodiak/models/ED2/work/R2F
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/RAW_STATE_VARIABLE_anl_times.dat b/contrib/DART/DART/Kodiak/models/ED2/work/RAW_STATE_VARIABLE_anl_times.dat
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/RAW_STATE_VARIABLE_anl_times.dat
rename to contrib/DART/DART/Kodiak/models/ED2/work/RAW_STATE_VARIABLE_anl_times.dat
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/RAW_STATE_VARIABLE_ges_times.dat b/contrib/DART/DART/Kodiak/models/ED2/work/RAW_STATE_VARIABLE_ges_times.dat
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/RAW_STATE_VARIABLE_ges_times.dat
rename to contrib/DART/DART/Kodiak/models/ED2/work/RAW_STATE_VARIABLE_ges_times.dat
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/R_ED2IN b/contrib/DART/DART/Kodiak/models/ED2/work/R_ED2IN
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/R_ED2IN
rename to contrib/DART/DART/Kodiak/models/ED2/work/R_ED2IN
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/S_ED2IN b/contrib/DART/DART/Kodiak/models/ED2/work/S_ED2IN
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/S_ED2IN
rename to contrib/DART/DART/Kodiak/models/ED2/work/S_ED2IN
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/T_ED2IN b/contrib/DART/DART/Kodiak/models/ED2/work/T_ED2IN
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/T_ED2IN
rename to contrib/DART/DART/Kodiak/models/ED2/work/T_ED2IN
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/adjValue.R b/contrib/DART/DART/Kodiak/models/ED2/work/adjValue.R
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/adjValue.R
rename to contrib/DART/DART/Kodiak/models/ED2/work/adjValue.R
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/advance_model.csh b/contrib/DART/DART/Kodiak/models/ED2/work/advance_model.csh
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/advance_model.csh
rename to contrib/DART/DART/Kodiak/models/ED2/work/advance_model.csh
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/build.log b/contrib/DART/DART/Kodiak/models/ED2/work/build.log
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/build.log
rename to contrib/DART/DART/Kodiak/models/ED2/work/build.log
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/can_I_create_a_file.txt b/contrib/DART/DART/Kodiak/models/ED2/work/can_I_create_a_file.txt
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/can_I_create_a_file.txt
rename to contrib/DART/DART/Kodiak/models/ED2/work/can_I_create_a_file.txt
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/createInput.R b/contrib/DART/DART/Kodiak/models/ED2/work/createInput.R
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/createInput.R
rename to contrib/DART/DART/Kodiak/models/ED2/work/createInput.R
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/createTransit.R b/contrib/DART/DART/Kodiak/models/ED2/work/createTransit.R
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/createTransit.R
rename to contrib/DART/DART/Kodiak/models/ED2/work/createTransit.R
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/creation.csh b/contrib/DART/DART/Kodiak/models/ED2/work/creation.csh
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/creation.csh
rename to contrib/DART/DART/Kodiak/models/ED2/work/creation.csh
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/ed_2.1-opt b/contrib/DART/DART/Kodiak/models/ED2/work/ed_2.1-opt
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/ed_2.1-opt
rename to contrib/DART/DART/Kodiak/models/ED2/work/ed_2.1-opt
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/end_date b/contrib/DART/DART/Kodiak/models/ED2/work/end_date
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/end_date
rename to contrib/DART/DART/Kodiak/models/ED2/work/end_date
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/end_file.txt b/contrib/DART/DART/Kodiak/models/ED2/work/end_file.txt
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/end_file.txt
rename to contrib/DART/DART/Kodiak/models/ED2/work/end_file.txt
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/f_ics b/contrib/DART/DART/Kodiak/models/ED2/work/f_ics
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/f_ics
rename to contrib/DART/DART/Kodiak/models/ED2/work/f_ics
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/filter.sh b/contrib/DART/DART/Kodiak/models/ED2/work/filter.sh
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/filter.sh
rename to contrib/DART/DART/Kodiak/models/ED2/work/filter.sh
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/filter_ics b/contrib/DART/DART/Kodiak/models/ED2/work/filter_ics
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/filter_ics
rename to contrib/DART/DART/Kodiak/models/ED2/work/filter_ics
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/filter_ics_back b/contrib/DART/DART/Kodiak/models/ED2/work/filter_ics_back
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/filter_ics_back
rename to contrib/DART/DART/Kodiak/models/ED2/work/filter_ics_back
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/filter_restart b/contrib/DART/DART/Kodiak/models/ED2/work/filter_restart
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/filter_restart
rename to contrib/DART/DART/Kodiak/models/ED2/work/filter_restart
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/input.dat b/contrib/DART/DART/Kodiak/models/ED2/work/input.dat
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/input.dat
rename to contrib/DART/DART/Kodiak/models/ED2/work/input.dat
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/input.nml b/contrib/DART/DART/Kodiak/models/ED2/work/input.nml
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/input.nml
rename to contrib/DART/DART/Kodiak/models/ED2/work/input.nml
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/int.out b/contrib/DART/DART/Kodiak/models/ED2/work/int.out
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/int.out
rename to contrib/DART/DART/Kodiak/models/ED2/work/int.out
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/mfilter.csh b/contrib/DART/DART/Kodiak/models/ED2/work/mfilter.csh
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/mfilter.csh
rename to contrib/DART/DART/Kodiak/models/ED2/work/mfilter.csh
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/mkmf_create_fixed_network_seq b/contrib/DART/DART/Kodiak/models/ED2/work/mkmf_create_fixed_network_seq
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/mkmf_create_fixed_network_seq
rename to contrib/DART/DART/Kodiak/models/ED2/work/mkmf_create_fixed_network_seq
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/mkmf_create_obs_sequence b/contrib/DART/DART/Kodiak/models/ED2/work/mkmf_create_obs_sequence
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/mkmf_create_obs_sequence
rename to contrib/DART/DART/Kodiak/models/ED2/work/mkmf_create_obs_sequence
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/mkmf_filter b/contrib/DART/DART/Kodiak/models/ED2/work/mkmf_filter
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/mkmf_filter
rename to contrib/DART/DART/Kodiak/models/ED2/work/mkmf_filter
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/mkmf_integrate_model b/contrib/DART/DART/Kodiak/models/ED2/work/mkmf_integrate_model
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/mkmf_integrate_model
rename to contrib/DART/DART/Kodiak/models/ED2/work/mkmf_integrate_model
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/mkmf_obs_diag b/contrib/DART/DART/Kodiak/models/ED2/work/mkmf_obs_diag
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/mkmf_obs_diag
rename to contrib/DART/DART/Kodiak/models/ED2/work/mkmf_obs_diag
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/mkmf_obs_sequence_tool b/contrib/DART/DART/Kodiak/models/ED2/work/mkmf_obs_sequence_tool
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/mkmf_obs_sequence_tool
rename to contrib/DART/DART/Kodiak/models/ED2/work/mkmf_obs_sequence_tool
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/mkmf_perfect_model_obs b/contrib/DART/DART/Kodiak/models/ED2/work/mkmf_perfect_model_obs
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/mkmf_perfect_model_obs
rename to contrib/DART/DART/Kodiak/models/ED2/work/mkmf_perfect_model_obs
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/mkmf_preprocess b/contrib/DART/DART/Kodiak/models/ED2/work/mkmf_preprocess
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/mkmf_preprocess
rename to contrib/DART/DART/Kodiak/models/ED2/work/mkmf_preprocess
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/mkmf_restart_file_tool b/contrib/DART/DART/Kodiak/models/ED2/work/mkmf_restart_file_tool
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/mkmf_restart_file_tool
rename to contrib/DART/DART/Kodiak/models/ED2/work/mkmf_restart_file_tool
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/mkmf_wakeup_filter b/contrib/DART/DART/Kodiak/models/ED2/work/mkmf_wakeup_filter
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/mkmf_wakeup_filter
rename to contrib/DART/DART/Kodiak/models/ED2/work/mkmf_wakeup_filter
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/obs_seq.final b/contrib/DART/DART/Kodiak/models/ED2/work/obs_seq.final
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/obs_seq.final
rename to contrib/DART/DART/Kodiak/models/ED2/work/obs_seq.final
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/obs_seq.in b/contrib/DART/DART/Kodiak/models/ED2/work/obs_seq.in
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/obs_seq.in
rename to contrib/DART/DART/Kodiak/models/ED2/work/obs_seq.in
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/obs_seq.out b/contrib/DART/DART/Kodiak/models/ED2/work/obs_seq.out
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/obs_seq.out
rename to contrib/DART/DART/Kodiak/models/ED2/work/obs_seq.out
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/output.log b/contrib/DART/DART/Kodiak/models/ED2/work/output.log
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/output.log
rename to contrib/DART/DART/Kodiak/models/ED2/work/output.log
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/output_file.dat b/contrib/DART/DART/Kodiak/models/ED2/work/output_file.dat
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/output_file.dat
rename to contrib/DART/DART/Kodiak/models/ED2/work/output_file.dat
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/path_names_create_fixed_network_seq b/contrib/DART/DART/Kodiak/models/ED2/work/path_names_create_fixed_network_seq
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/path_names_create_fixed_network_seq
rename to contrib/DART/DART/Kodiak/models/ED2/work/path_names_create_fixed_network_seq
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/path_names_create_obs_sequence b/contrib/DART/DART/Kodiak/models/ED2/work/path_names_create_obs_sequence
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/path_names_create_obs_sequence
rename to contrib/DART/DART/Kodiak/models/ED2/work/path_names_create_obs_sequence
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/path_names_filter b/contrib/DART/DART/Kodiak/models/ED2/work/path_names_filter
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/path_names_filter
rename to contrib/DART/DART/Kodiak/models/ED2/work/path_names_filter
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/path_names_integrate_model b/contrib/DART/DART/Kodiak/models/ED2/work/path_names_integrate_model
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/path_names_integrate_model
rename to contrib/DART/DART/Kodiak/models/ED2/work/path_names_integrate_model
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/path_names_obs_diag b/contrib/DART/DART/Kodiak/models/ED2/work/path_names_obs_diag
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/path_names_obs_diag
rename to contrib/DART/DART/Kodiak/models/ED2/work/path_names_obs_diag
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/path_names_obs_sequence_tool b/contrib/DART/DART/Kodiak/models/ED2/work/path_names_obs_sequence_tool
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/path_names_obs_sequence_tool
rename to contrib/DART/DART/Kodiak/models/ED2/work/path_names_obs_sequence_tool
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/path_names_perfect_model_obs b/contrib/DART/DART/Kodiak/models/ED2/work/path_names_perfect_model_obs
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/path_names_perfect_model_obs
rename to contrib/DART/DART/Kodiak/models/ED2/work/path_names_perfect_model_obs
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/path_names_preprocess b/contrib/DART/DART/Kodiak/models/ED2/work/path_names_preprocess
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/path_names_preprocess
rename to contrib/DART/DART/Kodiak/models/ED2/work/path_names_preprocess
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/path_names_restart_file_tool b/contrib/DART/DART/Kodiak/models/ED2/work/path_names_restart_file_tool
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/path_names_restart_file_tool
rename to contrib/DART/DART/Kodiak/models/ED2/work/path_names_restart_file_tool
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/path_names_wakeup_filter b/contrib/DART/DART/Kodiak/models/ED2/work/path_names_wakeup_filter
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/path_names_wakeup_filter
rename to contrib/DART/DART/Kodiak/models/ED2/work/path_names_wakeup_filter
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/qs.sh b/contrib/DART/DART/Kodiak/models/ED2/work/qs.sh
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/qs.sh
rename to contrib/DART/DART/Kodiak/models/ED2/work/qs.sh
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/quickbuild.csh b/contrib/DART/DART/Kodiak/models/ED2/work/quickbuild.csh
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/quickbuild.csh
rename to contrib/DART/DART/Kodiak/models/ED2/work/quickbuild.csh
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/readValue.R b/contrib/DART/DART/Kodiak/models/ED2/work/readValue.R
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/readValue.R
rename to contrib/DART/DART/Kodiak/models/ED2/work/readValue.R
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/set_def.out b/contrib/DART/DART/Kodiak/models/ED2/work/set_def.out
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/set_def.out
rename to contrib/DART/DART/Kodiak/models/ED2/work/set_def.out
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/set_det.out b/contrib/DART/DART/Kodiak/models/ED2/work/set_det.out
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/set_det.out
rename to contrib/DART/DART/Kodiak/models/ED2/work/set_det.out
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/sim_year b/contrib/DART/DART/Kodiak/models/ED2/work/sim_year
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/sim_year
rename to contrib/DART/DART/Kodiak/models/ED2/work/sim_year
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/using_mpi_for_filter b/contrib/DART/DART/Kodiak/models/ED2/work/using_mpi_for_filter
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/using_mpi_for_filter
rename to contrib/DART/DART/Kodiak/models/ED2/work/using_mpi_for_filter
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/using_mpi_for_wakeup_filter b/contrib/DART/DART/Kodiak/models/ED2/work/using_mpi_for_wakeup_filter
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/using_mpi_for_wakeup_filter
rename to contrib/DART/DART/Kodiak/models/ED2/work/using_mpi_for_wakeup_filter
diff --git a/modules/DART/DART/Kodiak/models/ED2/work/year.dat b/contrib/DART/DART/Kodiak/models/ED2/work/year.dat
similarity index 100%
rename from modules/DART/DART/Kodiak/models/ED2/work/year.dat
rename to contrib/DART/DART/Kodiak/models/ED2/work/year.dat
diff --git a/modules/DART/DART/Kodiak/obs_def/obs_def_phen_mod.f90 b/contrib/DART/DART/Kodiak/obs_def/obs_def_phen_mod.f90
similarity index 100%
rename from modules/DART/DART/Kodiak/obs_def/obs_def_phen_mod.f90
rename to contrib/DART/DART/Kodiak/obs_def/obs_def_phen_mod.f90
diff --git a/modules/DART/DART/Kodiak/obs_kind/DEFAULT_obs_kind_mod.F90 b/contrib/DART/DART/Kodiak/obs_kind/DEFAULT_obs_kind_mod.F90
similarity index 100%
rename from modules/DART/DART/Kodiak/obs_kind/DEFAULT_obs_kind_mod.F90
rename to contrib/DART/DART/Kodiak/obs_kind/DEFAULT_obs_kind_mod.F90
diff --git a/modules/DART/ED2/build/bin/include.mk.opt b/contrib/DART/ED2/build/bin/include.mk.opt
similarity index 100%
rename from modules/DART/ED2/build/bin/include.mk.opt
rename to contrib/DART/ED2/build/bin/include.mk.opt
diff --git a/modules/DART/ED2/run/ED2IN b/contrib/DART/ED2/run/ED2IN
similarity index 100%
rename from modules/DART/ED2/run/ED2IN
rename to contrib/DART/ED2/run/ED2IN
diff --git a/modules/DART/ED2/src/dynamics/phenology_aux.f90 b/contrib/DART/ED2/src/dynamics/phenology_aux.f90
similarity index 100%
rename from modules/DART/ED2/src/dynamics/phenology_aux.f90
rename to contrib/DART/ED2/src/dynamics/phenology_aux.f90
diff --git a/modules/DART/ED2/src/dynamics/phenology_driv.f90 b/contrib/DART/ED2/src/dynamics/phenology_driv.f90
similarity index 100%
rename from modules/DART/ED2/src/dynamics/phenology_driv.f90
rename to contrib/DART/ED2/src/dynamics/phenology_driv.f90
diff --git a/modules/DART/ED2/src/init/ed_params.f90 b/contrib/DART/ED2/src/init/ed_params.f90
similarity index 100%
rename from modules/DART/ED2/src/init/ed_params.f90
rename to contrib/DART/ED2/src/init/ed_params.f90
diff --git a/modules/DART/ED2/src/init/phenology_startup.f90 b/contrib/DART/ED2/src/init/phenology_startup.f90
similarity index 100%
rename from modules/DART/ED2/src/init/phenology_startup.f90
rename to contrib/DART/ED2/src/init/phenology_startup.f90
diff --git a/modules/DART/ED2/src/io/ed_init_full_history.F90 b/contrib/DART/ED2/src/io/ed_init_full_history.F90
similarity index 100%
rename from modules/DART/ED2/src/io/ed_init_full_history.F90
rename to contrib/DART/ED2/src/io/ed_init_full_history.F90
diff --git a/modules/DART/ED2/src/memory/ed_state_vars.f90 b/contrib/DART/ED2/src/memory/ed_state_vars.f90
similarity index 100%
rename from modules/DART/ED2/src/memory/ed_state_vars.f90
rename to contrib/DART/ED2/src/memory/ed_state_vars.f90
diff --git a/modules/DART/ED2/src/utils/allometry.f90 b/contrib/DART/ED2/src/utils/allometry.f90
similarity index 100%
rename from modules/DART/ED2/src/utils/allometry.f90
rename to contrib/DART/ED2/src/utils/allometry.f90
diff --git a/contrib/DART/LICENSE b/contrib/DART/LICENSE
new file mode 100644
index 00000000000..74e04dac96c
--- /dev/null
+++ b/contrib/DART/LICENSE
@@ -0,0 +1,35 @@
+Files labeled as part of the DART software are copyright UCAR.
+They are provided as-is subject to the DART terms of use available at
+https://dart.ucar.edu.
+
+---
+
+Other code in this directory was written by Toni Viskari for
+the PEcAn project and is provided under the BSD 3-clause license:
+
+Copyright (c) 2012-2024 Pecan Project
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation
+and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its contributors
+may be used to endorse or promote products derived from this software without
+specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/modules/DART/R/ObsSeq.R b/contrib/DART/R/ObsSeq.R
similarity index 100%
rename from modules/DART/R/ObsSeq.R
rename to contrib/DART/R/ObsSeq.R
diff --git a/modules/DART/R/adjValue.R b/contrib/DART/R/adjValue.R
similarity index 100%
rename from modules/DART/R/adjValue.R
rename to contrib/DART/R/adjValue.R
diff --git a/modules/DART/R/createInput.R b/contrib/DART/R/createInput.R
similarity index 100%
rename from modules/DART/R/createInput.R
rename to contrib/DART/R/createInput.R
diff --git a/modules/DART/R/date2month.R b/contrib/DART/R/date2month.R
similarity index 100%
rename from modules/DART/R/date2month.R
rename to contrib/DART/R/date2month.R
diff --git a/modules/DART/R/filter_ics.R b/contrib/DART/R/filter_ics.R
similarity index 100%
rename from modules/DART/R/filter_ics.R
rename to contrib/DART/R/filter_ics.R
diff --git a/modules/DART/fluxnet/willow/WCr.NACP.lat45.5lon-90.5.css b/contrib/DART/fluxnet/willow/WCr.NACP.lat45.5lon-90.5.css
similarity index 100%
rename from modules/DART/fluxnet/willow/WCr.NACP.lat45.5lon-90.5.css
rename to contrib/DART/fluxnet/willow/WCr.NACP.lat45.5lon-90.5.css
diff --git a/modules/DART/fluxnet/willow/WCr.NACP.lat45.5lon-90.5.pss b/contrib/DART/fluxnet/willow/WCr.NACP.lat45.5lon-90.5.pss
similarity index 100%
rename from modules/DART/fluxnet/willow/WCr.NACP.lat45.5lon-90.5.pss
rename to contrib/DART/fluxnet/willow/WCr.NACP.lat45.5lon-90.5.pss
diff --git a/modules/DART/fluxnet/willow/WCr.NACP.lat45.5lon-90.5.site b/contrib/DART/fluxnet/willow/WCr.NACP.lat45.5lon-90.5.site
similarity index 100%
rename from modules/DART/fluxnet/willow/WCr.NACP.lat45.5lon-90.5.site
rename to contrib/DART/fluxnet/willow/WCr.NACP.lat45.5lon-90.5.site
diff --git a/modules/DART/fluxnet/willow/phenology.lat45.5lon-90.5.txt b/contrib/DART/fluxnet/willow/phenology.lat45.5lon-90.5.txt
similarity index 100%
rename from modules/DART/fluxnet/willow/phenology.lat45.5lon-90.5.txt
rename to contrib/DART/fluxnet/willow/phenology.lat45.5lon-90.5.txt
diff --git a/docker-compose.yml b/docker-compose.yml
index 51f96bda62f..8ad2b54a335 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -377,7 +377,7 @@ services:
- "traefik.enable=true"
- "traefik.http.routers.dbsync.rule=Host(`${TRAEFIK_HOST:-pecan.localhost}`) && PathPrefix(`/dbsync/`)"
- "traefik.http.routers.dbsync.middlewares=dbsync-stripprefix"
- - "traefik.http.middlewares.dbsync-stripprefix.stripprefix.prefixes=/monitor"
+ - "traefik.http.middlewares.dbsync-stripprefix.stripprefix.prefixes=/dbsync"
healthcheck:
test: "curl --silent --fail http://localhost:3838 > /dev/null || exit 1"
interval: 10s
diff --git a/docker/depends/pecan_package_dependencies.csv b/docker/depends/pecan_package_dependencies.csv
index bcc4a0ff8e2..d839a811865 100644
--- a/docker/depends/pecan_package_dependencies.csv
+++ b/docker/depends/pecan_package_dependencies.csv
@@ -1,5 +1,6 @@
"package","version","needed_by_dir","type","is_pecan"
"abind","*","modules/assim.batch","Imports",FALSE
+"abind","*","modules/assim.sequential","Suggests",FALSE
"abind",">= 1.4.5","base/utils","Imports",FALSE
"abind",">= 1.4.5","models/ed","Imports",FALSE
"abind",">= 1.4.5","modules/data.atmosphere","Imports",FALSE
@@ -41,6 +42,9 @@
"doParallel","*","modules/data.atmosphere","Suggests",FALSE
"doParallel","*","modules/data.remote","Imports",FALSE
"doSNOW","*","base/remote","Suggests",FALSE
+"doSNOW","*","base/utils","Suggests",FALSE
+"doSNOW","*","modules/assim.sequential","Suggests",FALSE
+"doSNOW","*","modules/data.atmosphere","Suggests",FALSE
"doSNOW","*","modules/data.land","Imports",FALSE
"doSNOW","*","modules/data.remote","Suggests",FALSE
"dplR","*","modules/data.land","Imports",FALSE
@@ -61,11 +65,13 @@
"dplyr","*","modules/uncertainty","Imports",FALSE
"dplyr",">= 0.8.1","modules/data.atmosphere","Imports",FALSE
"dplyr",">= 1.1.2","base/db","Imports",FALSE
+"ecmwfr",">= 2.0.0","modules/data.atmosphere","Suggests",FALSE
"ellipse","*","modules/assim.batch","Imports",FALSE
-"emdbook","*","modules/assim.sequential","Suggests",FALSE
"exactextractr","*","modules/assim.sequential","Suggests",FALSE
"foreach","*","base/remote","Imports",FALSE
-"foreach","*","modules/data.atmosphere","Suggests",FALSE
+"foreach","*","base/utils","Imports",FALSE
+"foreach","*","modules/assim.sequential","Imports",FALSE
+"foreach","*","modules/data.atmosphere","Imports",FALSE
"foreach","*","modules/data.land","Imports",FALSE
"foreach","*","modules/data.remote","Imports",FALSE
"fs","*","base/db","Imports",FALSE
@@ -89,7 +95,7 @@
"ggplot2","*","base/visualization","Imports",FALSE
"ggplot2","*","modules/assim.sequential","Imports",FALSE
"ggplot2","*","modules/benchmark","Imports",FALSE
-"ggplot2","*","modules/data.atmosphere","Imports",FALSE
+"ggplot2","*","modules/data.atmosphere","Suggests",FALSE
"ggplot2","*","modules/data.remote","Suggests",FALSE
"ggplot2","*","modules/meta.analysis","Suggests",FALSE
"ggplot2","*","modules/priors","Imports",FALSE
@@ -123,10 +129,14 @@
"httr","*","modules/data.land","Imports",FALSE
"httr","*","modules/data.remote","Suggests",FALSE
"IDPmisc","*","modules/assim.batch","Imports",FALSE
+"itertools","*","modules/assim.sequential","Suggests",FALSE
"jsonlite","*","base/remote","Imports",FALSE
+"jsonlite","*","models/sipnet","Imports",FALSE
"jsonlite","*","models/stics","Imports",FALSE
"jsonlite","*","modules/data.atmosphere","Imports",FALSE
+"jsonlite","*","modules/data.land","Suggests",FALSE
"jsonlite","*","modules/data.remote","Suggests",FALSE
+"jsonvalidate","*","modules/data.land","Suggests",FALSE
"keras3",">= 1.0.0","modules/assim.sequential","Suggests",FALSE
"knitr","*","base/visualization","Suggests",FALSE
"knitr","*","models/biocro","Suggests",FALSE
@@ -173,7 +183,6 @@
"magrittr","*","base/db","Imports",FALSE
"magrittr","*","base/utils","Imports",FALSE
"magrittr","*","models/ed","Imports",FALSE
-"magrittr","*","modules/assim.sequential","Imports",FALSE
"magrittr","*","modules/benchmark","Imports",FALSE
"magrittr","*","modules/data.land","Imports",FALSE
"magrittr","*","modules/data.remote","Imports",FALSE
@@ -182,6 +191,7 @@
"MASS","*","base/utils","Suggests",FALSE
"MASS","*","modules/assim.batch","Imports",FALSE
"MASS","*","modules/data.atmosphere","Imports",FALSE
+"MASS","*","modules/data.land","Suggests",FALSE
"MASS","*","modules/meta.analysis","Imports",FALSE
"MASS","*","modules/priors","Imports",FALSE
"MASS","*","modules/rtm","Imports",FALSE
@@ -209,13 +219,14 @@
"mockery","*","base/workflow","Suggests",FALSE
"mockery","*","modules/data.atmosphere","Suggests",FALSE
"mockery","*","modules/meta.analysis","Suggests",FALSE
+"mockery","*","modules/uncertainty","Suggests",FALSE
"mockery",">= 0.3.0","models/biocro","Suggests",FALSE
"mockery",">= 0.4.3","base/db","Suggests",FALSE
"MODISTools",">= 1.1.0","modules/data.remote","Imports",FALSE
"mvbutils","*","base/qaqc","Suggests",FALSE
"mvtnorm","*","modules/allometry","Imports",FALSE
"mvtnorm","*","modules/assim.batch","Imports",FALSE
-"mvtnorm","*","modules/assim.sequential","Imports",FALSE
+"mvtnorm","*","modules/assim.sequential","Suggests",FALSE
"mvtnorm","*","modules/data.land","Imports",FALSE
"mvtnorm","*","modules/emulator","Imports",FALSE
"ncdf4","*","base/db","Imports",FALSE
@@ -468,6 +479,7 @@
"reshape2","*","modules/benchmark","Imports",FALSE
"reshape2","*","modules/data.atmosphere","Imports",FALSE
"reshape2",">= 1.4.2","modules/assim.sequential","Suggests",FALSE
+"reticulate","*","modules/assim.sequential","Imports",FALSE
"reticulate","*","modules/data.atmosphere","Suggests",FALSE
"reticulate","*","modules/data.land","Suggests",FALSE
"reticulate","*","modules/data.remote","Imports",FALSE
@@ -548,6 +560,7 @@
"RPostgreSQL","*","base/db","Suggests",FALSE
"RPostgreSQL","*","models/biocro","Suggests",FALSE
"RSQLite","*","base/db","Suggests",FALSE
+"sensitivity","*","modules/uncertainty","Imports",FALSE
"sessioninfo","*","base/all","Suggests",FALSE
"sf","*","modules/assim.sequential","Suggests",FALSE
"sf","*","modules/data.atmosphere","Imports",FALSE
@@ -565,6 +578,7 @@
"stats","*","modules/allometry","Imports",FALSE
"stats","*","modules/assim.batch","Imports",FALSE
"stats","*","modules/assim.sequential","Suggests",FALSE
+"stats","*","modules/data.remote","Suggests",FALSE
"stats","*","modules/photosynthesis","Imports",FALSE
"stats","*","modules/rtm","Imports",FALSE
"SticsRFiles","*","models/stics","Suggests",FALSE
@@ -575,11 +589,11 @@
"stringr","*","modules/assim.sequential","Imports",FALSE
"stringr","*","modules/benchmark","Imports",FALSE
"stringr","*","modules/data.land","Imports",FALSE
+"stringr","*","modules/data.remote","Suggests",FALSE
"stringr",">= 1.1.0","base/visualization","Imports",FALSE
"stringr",">= 1.1.0","models/ed","Imports",FALSE
"stringr",">= 1.1.0","modules/data.atmosphere","Imports",FALSE
"suntools","*","modules/data.atmosphere","Imports",FALSE
-"swfscMisc","*","modules/data.land","Imports",FALSE
"terra","*","modules/assim.sequential","Suggests",FALSE
"terra","*","modules/data.atmosphere","Imports",FALSE
"terra","*","modules/data.land","Imports",FALSE
@@ -606,10 +620,8 @@
"testthat",">= 1.0.2","models/maespa","Suggests",FALSE
"testthat",">= 1.0.2","models/sipnet","Suggests",FALSE
"testthat",">= 1.0.2","models/stics","Suggests",FALSE
-"testthat",">= 1.0.2","models/template","Suggests",FALSE
"testthat",">= 1.0.2","modules/allometry","Suggests",FALSE
"testthat",">= 1.0.2","modules/assim.batch","Suggests",FALSE
-"testthat",">= 1.0.2","modules/data.land","Suggests",FALSE
"testthat",">= 1.0.2","modules/data.remote","Suggests",FALSE
"testthat",">= 1.0.2","modules/meta.analysis","Suggests",FALSE
"testthat",">= 1.0.2","modules/rtm","Suggests",FALSE
@@ -619,8 +631,10 @@
"testthat",">= 2.0.0","base/utils","Suggests",FALSE
"testthat",">= 2.0.0","models/biocro","Suggests",FALSE
"testthat",">= 2.0.0","modules/benchmark","Suggests",FALSE
+"testthat",">= 3.0","models/template","Suggests",FALSE
"testthat",">= 3.0.0","models/sibcasa","Suggests",FALSE
"testthat",">= 3.0.4","base/qaqc","Suggests",FALSE
+"testthat",">= 3.1.0","modules/data.land","Suggests",FALSE
"testthat",">= 3.1.7","modules/data.atmosphere","Suggests",FALSE
"tibble","*","base/db","Imports",FALSE
"tibble","*","models/ed","Imports",FALSE
@@ -675,6 +689,8 @@
"withr","*","models/sipnet","Suggests",FALSE
"withr","*","modules/allometry","Suggests",FALSE
"withr","*","modules/data.atmosphere","Suggests",FALSE
+"withr","*","modules/data.land","Suggests",FALSE
+"xgboost","*","modules/assim.sequential","Suggests",FALSE
"XML","*","base/workflow","Imports",FALSE
"XML","*","models/biocro","Imports",FALSE
"XML","*","models/maat","Imports",FALSE
diff --git a/docker/docs/Dockerfile b/docker/docs/Dockerfile
index 3581d63f2dd..cce1f680111 100644
--- a/docker/docs/Dockerfile
+++ b/docker/docs/Dockerfile
@@ -15,7 +15,6 @@ RUN apt-get update \
-e 'remotes::install_version("rmarkdown", ">= 2.19", dependencies = TRUE, upgrade = FALSE, repos = repos)' \
-e 'remotes::install_version("knitr", ">= 1.42", dependencies = TRUE, upgrade = FALSE, repos = repos)' \
-e 'remotes::install_version("bookdown", ">= 0.31", dependencies = TRUE, upgrade = FALSE, repos = repos)' \
- -e 'install.packages("pkgdown", repos = repos)' \
&& rm -rf /var/lib/apt/lists/*
# ----------------------------------------------------------------------
diff --git a/docker/monitor/requirements.txt b/docker/monitor/requirements.txt
index d5537687d4d..8bf47055ece 100644
--- a/docker/monitor/requirements.txt
+++ b/docker/monitor/requirements.txt
@@ -1,4 +1,4 @@
pika==1.3.2
-requests==2.32.0
+requests==2.32.4
psycopg2-binary==2.9.9
python-dateutil==2.8.2
diff --git a/documentation/README.md b/documentation/README.md
index b457094e574..f8a0fbaea58 100644
--- a/documentation/README.md
+++ b/documentation/README.md
@@ -1,5 +1,7 @@
-# Readme.md
+# PEcAn Papers and Tutorials
-This folder contains published articles describing the development and application of PEcAn as well as tutorials.
+This directory contains:
+- published articles describing the development and application of PEcAn
+- tutorials
-The full documentation can be found in the book_source directory, and is published at with each new release.
+The full PEcAn documentation is maintained in the [`book_source/`](https://github.com/PecanProject/pecan/tree/develop/book_source) directory and is published at [pecanproject.github.io/pecan-documentation/](https://pecanproject.github.io/documentation).
diff --git a/documentation/tutorials/Demo_02_Uncertainty_Analysis/pecan.xml b/documentation/tutorials/Demo_02_Uncertainty_Analysis/pecan.xml
new file mode 100644
index 00000000000..a4d0d38088c
--- /dev/null
+++ b/documentation/tutorials/Demo_02_Uncertainty_Analysis/pecan.xml
@@ -0,0 +1,67 @@
+
+
+
+
+ -1
+
+ 2025-06-19-15-34-01
+
+ demo_outdir
+
+
+ temperate.coniferous
+ pft/temperate.coniferous/prior.distns.Rdata
+ pft/temperate.coniferous
+
+
+
+ 50
+ NPP
+
+
+ halton
+
+
+ 2004
+ 2004
+
+
+
+ -1
+ 1
+
+ NPP
+ 2004
+ 2004
+
+
+ SIPNET
+ 1.3.0
+ FALSE
+ demo_outdir/sipnet
+
+
+
+ 2004/01/01
+ 2004/12/31
+ Niwot Ridge Forest/LTER NWT1 (US-NR1)
+ 40.0329
+ -105.546
+
+
+
+ AmerifluxLBL
+
+ Aritra_2004
+
+ dbfiles/AMF_US-NR1_BASE_HH_23-5.2004-01-01.2004-12-31.clim
+
+
+
+ 2004/01/01
+ 2004/12/31
+
+
+ localhost
+
+
\ No newline at end of file
diff --git a/documentation/tutorials/Demo_02_Uncertainty_Analysis/pft/temperate.coniferous/prior.distns.Rdata b/documentation/tutorials/Demo_02_Uncertainty_Analysis/pft/temperate.coniferous/prior.distns.Rdata
new file mode 100644
index 00000000000..1648cc1dfa8
Binary files /dev/null and b/documentation/tutorials/Demo_02_Uncertainty_Analysis/pft/temperate.coniferous/prior.distns.Rdata differ
diff --git a/documentation/tutorials/Demo_02_Uncertainty_Analysis/uncertainty.qmd b/documentation/tutorials/Demo_02_Uncertainty_Analysis/uncertainty.qmd
new file mode 100644
index 00000000000..caf15e3495e
--- /dev/null
+++ b/documentation/tutorials/Demo_02_Uncertainty_Analysis/uncertainty.qmd
@@ -0,0 +1,555 @@
+---
+title: "Uncertainty Analysis Using PEcAn"
+author:
+ - "Aritra Dey"
+ - "David LeBauer"
+format:
+ pdf:
+ toc: true
+ number-sections: true
+ fig-width: 10
+ fig-height: 6
+ fig-dpi: 300
+---
+
+# Introduction {#introduction}
+
+In Demo 2 we will be looking at how PEcAn can use information about parameter uncertainty to perform three automated analyses:
+
+- **Ensemble Analysis**: Repeat numerous model runs, each sampling from the parameter uncertainty, to generate a probability distribution of model projections. Allows us to put a confidence interval on the model.
+- **Sensitivity Analysis**: Repeats numerous model runs to assess how changes in model parameters will affect model outputs. Allows us to identify which parameters the model is most sensitive to.
+- **Uncertainty Analysis**: Combines information about model sensitivity with information about parameter uncertainty to determine the contribution of each model parameter to the uncertainty in model outputs. Allow us to identify which parameters are driving model uncertainty.
+
+This demo shows how to run an uncertainty analysis workflow in PEcAn using an R-based Quarto notebook. It covers loading settings, configuring models, running simulations, and performing ensemble and sensitivity analyses to assess uncertainty and parameter importance. This programmatic approach complements the web-based PEcAn interface.
+
+**Context & modeling scenario:**
+
+We simulate plant and ecosystem carbon balance (Net Primary Productivity and Net Ecosystem Exchange) at the AmeriFlux Niwot Ridge Forest site ([US‑NR1](https://ameriflux.lbl.gov/sites/siteinfo/US-NR1)) during the year 2004. We use SIPNET parameterized as a temperate conifer PFT and driven by AmeriFlux meteorology following the analysis in [Moore et al. (2007)](https://doi.org/10.1016/j.agrformet.2008.04.013). This notebook also provides a compact template that can be extended to more years, locations, and PFTs.
+
+**What this notebook does:**
+
+1. Configure a PEcAn workflow by loading and validating a `pecan.xml` settings file.
+2. Run a set of ecosystem model simulations by writing model configuration files and then running SIPNET.
+3. Quantify uncertainty using ensemble analysis, sensitivity analyses, and variance decomposition.
+4. Visualize results to identify important parameters and how they influence model variance.
+5. Change configuration settings and re-run the workflow.
+
+## Prerequisites
+
+To run this notebook, you will need to install PEcAn and its dependencies, as well as download the SIPNET model binary.
+
+### PEcAn packages and dependencies.
+
+```
+# Enable repository from pecanproject
+options(repos = c(
+ pecanproject = 'https://pecanproject.r-universe.dev',
+ CRAN = 'https://cloud.r-project.org'))
+install.packages('PEcAn.all')
+```
+
+A valid `pecan.xml` configuration file. Start with the example at `pecan/documentation/tutorials/Demo_02_Uncertainty_Analysis/pecan.xml`.
+
+### SIPNET v1.3.0
+
+If you haven't already installed the SIPNET model, you can do so by running the following code. This will download the SIPNET binary to `demo_outdir/sipnet` and make it executable.
+
+> Note: The `demo_outdir` directory will be created inside of your PEcAn installation, at `documentation/tutorials/Demo_02_Uncertainty_Analysis/demo_outdir/`. This directory will contain the SIPNET binary as well as the output generated by PEcAn in this demo.
+
+```{r download-sipnet}
+# Download and install SIPNET v1.3.0
+source(
+ here::here(
+ "documentation/tutorials/Demo_1_Basic_Run/download_sipnet.R"
+ )
+)
+```
+
+> Note: You can find the most recent version of the SIPNET binary at: [SIPNET GitHub Releases](https://github.com/PecanProject/sipnet/releases), but this notebook is designed to work with SIPNET v1.3.0.
+
+# Load PEcAn Packages
+
+First, we need to load the PEcAn R packages. These packages provide all the functions we'll use to run the workflow.
+
+```{r libraries}
+# Load the PEcAn.all package, which includes all necessary PEcAn functionality
+library("PEcAn.all")
+```
+
+# Load PEcAn Settings File
+
+Use the XML settings file (`pecan.xml`) exactly as in the Demo 1 Basic Run tutorial. See the "Load PEcAn Settings File" section of Demo 1 for a more detailed walkthrough of fields and schema. In this tutorial we focus on settings relevant to this notebook and explain the additional options associated with uncertainty analysis.
+
+## Settings
+
+Example settings for this demo live at `pecan/documentation/tutorials/Demo_02_Uncertainty_Analysis/pecan.xml` and you can read more about the settings in the [PEcAn Documentation](https://pecanproject.github.io/pecan-documentation/develop/pecanXML.html#pecanXML), and sections focused on [ensemble](https://pecanproject.github.io/pecan-documentation/develop/xml-advanced.html#xml-ensemble) and [sensitivity analysis](https://pecanproject.github.io/pecan-documentation/develop/xml-advanced.html#xml-sensitivity-analysis) settings in particular.
+
+Open that settings file and look at the ensemble and sensitivity analysis sections. You can modify these settings to change the number of ensemble members, the variable to analyze, and the sampling method. Some of the key settings in this demo include:
+
+**Ensemble size**
+
+The number of runs in the ensemble is set to **50** in this demo, but a larger ensemble size (100-5000) is often used in practice to better estimate the posterior distribution of the output.
+
+**Output variable**
+
+The output variable for the ensemble analysis is set to **NPP** (Net Primary Productivity). You can change this to other variables like NEE (Net Ecosystem Exchange), LAI (Leaf Area Index), ET (Evapotranspiration), etc. You can also specify multiple variables by providing a vector of variable names (e.g., `c("NPP", "NEE", "LAI")`) to analyze uncertainty across several ecosystem processes simultaneously.
+
+**Sampling method**
+
+The sampling method for generating parameter sets is set to **halton** in this demo. This tells PEcAn to sample using the [Halton sequence](https://en.wikipedia.org/wiki/Halton_sequence), which is a quasi-random sampling method that more efficiently explores parameter space than random sampling. Other options include Latin Hypercube (lhc), which is another quasi-random sequence, as well as uniform that draws random samples. A random sampler requires a larger ensemble size to adequately explore parameter space.
+
+**Sensitivity analysis quantiles**
+
+PEcAn's sensitivity analysis includes a handy shortcut that converts a specified standard deviation into its normal quantile equivalent. In the example pecan.xml, these are set to **-1, 1** (the median value, 0, occurs by default) which are converted internally to the 15.9th and 84.1th quantiles of the parameter distribution. You can add more quantiles to explore a wider range of parameter values: {-3, -2, -1, 1, 2, 3} is often used in practice.
+
+By working in quantiles relative to each parameter’s distribution, the sensitivity and variance decomposition reflect sensitivity across the range of parameter values. Many sensitivity analyses tools use a fixed perturbation size such as the mean +/- 10%. PEcAn's SA does not take this approach because it does not capture the uncertainty across the parameter distribution and can not be used for variance decomposition.
+
+## Load the settings file
+
+```{r load-settings}
+settings_path <- here::here("documentation/tutorials/Demo_02_Uncertainty_Analysis/pecan.xml")
+
+settings <- PEcAn.settings::read.settings(settings_path)
+
+settings <- PEcAn.settings::prepare.settings(settings)
+```
+
+See Demo 1 Section 6 for details on what these functions do. Briefly, they read the XML file, convert it into an R list object that PEcAn can use, check that settings are valid, fill in defaults, and create the output directory.
+
+# Write Model Configuration Files {#sec-write-configs}
+
+This step generates the model-specific configuration files that will be used to run the ecosystem model. The process involves:
+
+1. Disabling database write operations because we are not using a database
+```{r disable-db-write}
+settings$database <- NULL # Disable database operations for this demo
+```
+2. Generating SIPNET configuration files using the `runModule.run.write.configs()` function.
+```{r write-configs}
+settings <- PEcAn.workflow::runModule.run.write.configs(settings)
+```
+
+# Run Model Simulations
+
+This section executes the SIPNET simulations and retrieves the results.
+
+It uses the function `runModule_start_model_runs(settings)` to initiate the model runs using the configuration files generated in the previous step.
+
+```{r run-model}
+PEcAn.workflow::runModule_start_model_runs(settings)
+```
+
+The PEcAn workflow will take a longer time to complete than in Demo 1 because we have just asked for over a hundred model runs. Once the runs are complete we will continue.
+
+# Fetch Model Outputs
+
+Next we convert all of the model output from the previous run to a standard format that PEcAn can use for analysis. This is done using the `runModule.get.results()` function.
+
+```{r get-model-results}
+runModule.get.results(settings)
+```
+
+# Ensemble and Sensitivity Analysis {#sec-run-uncertainty}
+
+Next, we use the outputs from the previous step to perform ensemble and sensitivity analyses.
+
+**Ensemble Analysis**: Quantifies uncertainty in model predictions by running multiple simulations with parameters sampled from their uncertainty distributions. This generates probability distributions of model outputs and confidence intervals.
+
+```{r run-ensemble-analysis}
+runModule.run.ensemble.analysis(settings)
+```
+
+**Sensitivity Analysis**: Systematically varies individual parameters to assess their influence on model outputs. This identifies which parameters most strongly affect model predictions and helps prioritize parameter refinement efforts.
+
+```{r run-sensitivity-analysis}
+runModule.run.sensitivity.analysis(settings)
+```
+
+# PEcAn Outputs {#sec-outputs}
+
+## Output Directory Structure
+
+These are the key folders and files that will be created under the directory defined by `settings$outdir` (e.g., `demo_outdir` in the example). The file contents are described in the next section.
+
+We discussed the output directory in Demo 1 (Basic Run), but now we have three new folders that contain outputs from the sensitivity, ensemble, and variance decomposition analyses. Here we focus on the additional outputs generated by the ensemble and sensitivity analyses.
+
+```
+demo_outdir/
+├── run/ # Configuration & execution metadata
+│ ├── runs.txt # List of run IDs (SA and ensemble runs)
+│ ├── ENS-*-*/ # Ensemble run directories (e.g., ENS-00001--1/)
+│ └── SA-*-*/ # Sensitivity analysis run directories
+├── out/ # Raw model outputs by run ID
+│ └── / # E.g., daily or sub-daily SIPNET output files
+├── ensemble.analysis.*.pdf # Ensemble analysis plots
+├── ensemble.output.*.Rdata # Raw ensemble outputs
+├── ensemble.samples.*.Rdata # Parameter samples used for ensemble
+├── ensemble.ts.*.Rdata # Time series data from ensemble
+├── samples.Rdata # Parameter samples for both SA and ensemble
+├── sensitivity.output.*.Rdata # SA model outputs
+├── sensitivity.results.*.Rdata # Processed SA results
+├── sensitivity.samples.*.Rdata # SA parameter samples
+├── variance.decomposition.*.pdf # Variance decomposition analysis
+├── pft/ # Parameter (prior/posterior) files per PFT
+│ └── temperate.coniferous/
+└── sipnet # SIPNET binary (downloaded earlier)
+```
+
+### Model outputs and logs
+
+- Standardized netCDF files (`[year].nc`) for analysis and visualization
+- Raw model output (for SIPNET, e.g., `sipnet.out` per run)
+- `logfile.txt` with model and workflow messages
+- Note: `pft/` contains parameter files used in estimation; see the parameter-estimation tutorial (Demo 3) for details
+
+# Understanding PEcAn Uncertainty Analysis Outputs
+
+After running ensemble and sensitivity analyses, PEcAn generates several important outputs that help you understand model uncertainty and parameter sensitivity.
+
+The `samples.Rdata` file contains the parameter values used in the sensitivity and ensemble runs. It stores two objects, `sa.samples` and `ensemble.samples`, which are the parameter values for the sensitivity analysis and ensemble runs, respectively.
+
+## Ensemble Analysis Outputs
+
+The ensemble analysis produces:
+
+- **`ensemble.Rdata`**: Contains `ensemble.output` object with model predictions for all ensemble members
+- **`ensemble.analysis.[RunID].[Variable].[StartYear].[EndYear].pdf`**: Histogram and boxplot of ensemble predictions
+- **`ensemble.ts.[RunID].[Variable].[StartYear].[EndYear].pdf`**: Time-series plot showing ensemble mean, median, and 95% confidence intervals
+
+## Sensitivity Analysis Outputs
+
+The sensitivity analysis generates:
+
+- **`sensitivity.analysis.[RunID].[Variable].[StartYear].[EndYear].pdf`**: Raw data points from univariate analyses with spline fits.
+- **`sensitivity.output.[RunID].[Variable].[StartYear].[EndYear].Rdata`**: Model outputs corresponding to parameter variations.
+- `sensitivity.analysis.[RunID].[Variable].[StartYear].[EndYear].pdf` shows the raw data points from univariate one-at-a-time analyses and spline fits through the points. _Open this file_ to determine which parameters are most and least sensitive.
+
+## Variance Decomposition Outputs
+
+The variance decomposition produces:
+
+- **`variance.decomposition.[RunID].[Variable].[StartYear].[EndYear].pdf`**: Three-column analysis showing:
+ - Coefficient of variation (normalized posterior variance)
+ - Elasticity (normalized sensitivity)
+ - Partial standard deviation of each parameter
+
+## Interpreting the Results
+
+**Variance Decomposition Analysis:**
+
+- Parameters are sorted by their contribution to model output uncertainty (the right column).
+- Identify parameters that are:
+ - Highly sensitive but low uncertainty.
+ - Highly uncertain but low sensitivity.
+ - Both sensitive and uncertain.
+- Identify parameters that are both sensitive and uncertain for future constraint with data or expert knowledge.
+- Potential gotchas:
+ - Flat sensitivity curves: check that parameter values were correctly generated and read by the model.
+ - Parameters with high uncertainty: consider revising priors.
+ - Multi-modal or otherwise unexpected parameter distributions: check that parameter was specified correctly.
+
+ **Choose the parameter that you think provides the most efficient means of reducing model uncertainty and propose how you might best reduce uncertainty in this process**. In making this choice remember that not all processes in models can be directly observed, and that the cost-per-sample for different measurements can vary tremendously (and thus the parameter you measure next is not always the one contributing the most to model variability). Also consider the role of parameter uncertainty versus model sensitivity in justifying your choice of what parameters to constrain.
+
+# Visualize Uncertainty Analysis Results {#sec-visualize}
+
+This section loads the results from the uncertainty analyses and generates plots directly in the notebook. This provides an immediate view of the ensemble time series, sensitivity plots, and variance decomposition.
+
+## Ensemble Analysis Visualization
+
+Here we visualize the results of the ensemble analysis. It shows the overall distribution of the model output and how the output and its uncertainty change over time.
+
+This section reproduces the plots saved in the `ensemble.analysis/` folder in order to show the user how to access and visualize the results programmatically so that they can further investigate output and customize plots.
+
+```{r visualize-ensemble-results}
+# --- 1. Define Helper Variables ---
+# Extract key variables from the settings object to simplify file path construction
+# and plotting. This makes the code cleaner and easier to read.
+variable <- settings$ensemble$variable
+pft <- settings$pfts[[1]]
+start.year <- lubridate::year(settings$run$start.date)
+end.year <- lubridate::year(settings$run$end.date)
+ensemble.id <- if (!is.null(settings$ensemble$id)) settings$ensemble$id else "NOENSEMBLEID"
+
+# --- 2. Load and Plot Ensemble Output Distribution ---
+# This section visualizes the distribution of the ensemble model runs.
+# It generates a histogram and a boxplot to show the central tendency, spread,
+# and shape of the output variable's distribution.
+
+# Construct the path to the ensemble output file
+ens_file <- PEcAn.uncertainty::ensemble.filename(
+ settings,
+ prefix = "ensemble.output",
+ ensemble.id = ensemble.id,
+ variable = variable,
+ start.year = start.year,
+ end.year = end.year
+)
+
+# Check if the file exists, then load and plot the data
+if (file.exists(ens_file)) {
+ ens_env <- new.env()
+ load(ens_file, envir = ens_env)
+ ens_data <- as.numeric(unlist(ens_env$ensemble.output))
+
+ # Define units for plot labels
+ units <- paste0(variable, " (", PEcAn.utils::mstmipvar(variable, silent = TRUE)$units, ")")
+
+ # Create side-by-side histogram and boxplot
+ par(mfrow = c(1, 2), mar = c(4, 4.8, 1, 2))
+ hist(ens_data, xlab = units, main = "Ensemble Distribution", cex.lab = 1.4, col = "grey85")
+ box(lwd = 2.2)
+ boxplot(ens_data, ylab = units, main = "Ensemble Boxplot", col = "grey85", cex.lab = 1.5)
+ box(lwd = 2.2)
+ par(mfrow = c(1, 1))
+} else {
+ PEcAn.logger::logger.warn("Could not find ensemble output file:", ens_file)
+}
+
+
+# --- 3. Plot Ensemble Time Series ---
+# This section visualizes the ensemble results over time, showing the mean,
+# median, and 95% confidence interval of the model output.
+ens_ts_data <- PEcAn.uncertainty::read.ensemble.ts(settings, variable = variable)
+if (!is.null(ens_ts_data)) {
+ PEcAn.uncertainty::ensemble.ts(ens_ts_data)
+}
+```
+
+## Sensitivity and Variance Decomposition Visualization
+
+This block visualizes the results of the sensitivity analysis. The plots show how sensitive the model output is to changes in each parameter and which parameters contribute most to the overall uncertainty.
+
+```{r visualize-sensitivity-results}
+# --- 1. Load Sensitivity Analysis Results ---
+# This section loads the saved sensitivity analysis data, which contains the
+# outputs needed to generate the sensitivity and variance decomposition plots.
+
+# Construct the path to the sensitivity analysis results file
+sens_file <- file.path(
+ settings$outdir,
+ paste0("sensitivity.results", ".", ensemble.id, ".", variable, ".", start.year, ".", end.year, ".Rdata")
+)
+
+# Check if the file exists, then load the data and generate plots
+if (file.exists(sens_file)) {
+ sens_env <- new.env()
+ load(sens_file, envir = sens_env)
+ sensitivity.results <- sens_env$sensitivity.results
+
+ # --- 2. Generate Sensitivity Plots ---
+ # These plots show how the model output changes as each parameter is varied
+ # one at a time, helping to identify which parameters have the strongest influence.
+ sa_plots <- PEcAn.uncertainty::plot_sensitivities(
+ sensitivity.results[[pft$name]]$sensitivity.output
+ )
+ print(do.call(gridExtra::grid.arrange, c(sa_plots, ncol = floor(sqrt(length(sa_plots))))))
+
+ # --- 3. Generate Variance Decomposition Plots ---
+ # These plots break down the total output variance into contributions from
+ # each parameter, highlighting the most important sources of uncertainty.
+ vd_plots <- PEcAn.uncertainty::plot_variance_decomposition(
+ sensitivity.results[[pft$name]]$variance.decomposition.output
+ )
+ print(do.call(gridExtra::grid.arrange, c(vd_plots, ncol = 4)))
+} else {
+ PEcAn.logger::logger.warn("Could not find sensitivity results file:", sens_file)
+}
+```
+
+# Customizing Ensemble Analysis Parameters (Optional)
+
+## (Optional) Use this section only if you want to override the default ensemble analysis parameters. Skip if defaults are sufficient.
+> **Important:** If you modify the ensemble analysis parameters in this section, re-run Section @sec-run-uncertainty and then Section @sec-visualize to regenerate outputs and plots.
+
+```{r customize-ensemble}
+# Set the number of ensemble members (model runs)
+settings$ensemble$size <- 50
+
+# Specify the variable(s) to be analyzed in the ensemble
+# Single variable:
+settings$ensemble$variable <- "NEE"
+# Multiple variables (uncomment to use):
+# settings$ensemble$variable <- c("NEE", "NPP", "LAI")
+
+# Choose the method for sampling the parameter space (options: "uniform", "lhc", "halton", "sobol", "torus")
+settings$ensemble$samplingspace$parameters$method <- "halton"
+```
+
+
+# Customizing Sensitivity Analysis Parameters (Optional)
+
+## (Optional) Use this section only if you want to override the default sensitivity analysis parameters. Skip if defaults are sufficient.
+> **Important:** If you modify the sensitivity analysis parameters in this section, re-run Section @sec-run-uncertainty and then Section @sec-visualize to regenerate outputs and plots.
+
+```{r customize-sensitivity}
+# Set the quantiles (in standard deviations) for parameter distribution in sensitivity analysis
+settings$sensitivity.analysis$quantiles$sigma <- c(-2, -1, 1, 2)
+
+# Specify the variable to be analyzed in sensitivity analysis
+settings$sensitivity.analysis$variable <- "NEE"
+```
+
+# Extract Model Results and Prepare for Analysis
+
+After the model simulation completes, we need to extract the results and prepare them for analysis. This involves:
+
+1. Reading the run ID
+2. Setting up output paths
+3. Defining time period
+4. Loading model output
+
+```{r get-plot-vars}
+runid <- as.character(read.table(paste(settings$outdir, "/run/", "runs.txt", sep = ""))[1, 1]) # Note: if you are using an xml from a run with multiple ensembles this line will provide only the first run id
+# You can change [1,1] to [10,1], [5,1], etc. to select different run IDs from runs.txt
+# For example: [10,1] selects the 10th run ID, [5,1] selects the 5th run ID
+outdir <- paste(settings$outdir, "/out/", runid, sep = "")
+start.year <- as.numeric(lubridate::year(settings$run$start.date))
+end.year <- as.numeric(lubridate::year(settings$run$end.date))
+model_output <- PEcAn.utils::read.output(
+ runid,
+ outdir,
+ start.year,
+ end.year,
+ variables = NULL,
+ dataframe = TRUE,
+ verbose = FALSE
+)
+available_vars <- names(model_output)[!names(model_output) %in% c("posix", "time_bounds")]
+```
+
+# Display Available Model Variables
+
+This section shows all the variables that are available in the model output. These variables represent different ecosystem processes and states that the model has simulated.
+
+```{r show-variables}
+vars_df <- PEcAn.utils::standard_vars |>
+ dplyr::select(
+ Variable = Variable.Name,
+ Description = Long.name
+ ) |>
+ dplyr::filter(Variable %in% available_vars) |>
+ # TODO: add year to PEcAn.utils::standard vars
+ dplyr::bind_rows(
+ dplyr::tibble(
+ Variable = "year",
+ Description = "Year"
+ )
+ )
+
+vars_df$Description[is.na(vars_df$Description)] <- "(No description available)"
+knitr::kable(vars_df, caption = "Model Output Variables and Descriptions")
+```
+
+# Visualize Model Results
+
+This section provides examples of how to create time series plots of different model variables. The examples cover various ecosystem processes including carbon fluxes, carbon pools, water variables, and structural variables like Leaf Area Index (LAI).
+
+## Plot Carbon Fluxes
+
+```{r plot-carbon-fluxes}
+# Plot Gross Primary Productivity (GPP) and Net Primary Productivity (NPP)
+plot(model_output$posix, model_output$GPP,
+ type = "l",
+ col = "green",
+ xlab = "Date",
+ ylab = "Carbon Flux (kg C m-2 s-1)",
+ main = paste("Carbon Fluxes Over Time — PEcAn", runid)
+)
+lines(model_output$posix, model_output$NPP, col = "blue")
+legend("topright", legend = c("GPP", "NPP"), col = c("green", "blue"), lty = 1)
+```
+
+## Plot Carbon Pools
+
+```{r plot-carbon-pools}
+# Plot Total Live Biomass and Total Soil Carbon
+plot(model_output$posix, model_output$TotLivBiom,
+ type = "l",
+ col = "darkgreen",
+ xlab = "Date",
+ ylab = "Carbon Pool (kg C m-2)",
+ main = paste("Carbon Pools Over Time — PEcAn", runid)
+)
+lines(model_output$posix, model_output$TotSoilCarb, col = "brown")
+legend("topright", legend = c("Total Live Biomass", "Total Soil Carbon"), col = c("darkgreen", "brown"), lty = 1)
+```
+
+## Plot Water Variables
+
+```{r plot-water-variables}
+# Plot Soil Moisture and Snow Water Equivalent
+plot(model_output$posix, model_output$SoilMoist,
+ type = "l",
+ col = "blue",
+ xlab = "Date",
+ ylab = "Soil Moisture (kg m-2)",
+ main = paste("Soil Moisture Over Time — PEcAn", runid)
+)
+lines(model_output$posix, model_output$SWE, col = "lightblue")
+legend("topright", legend = c("Soil Moisture", "Snow Water Equivalent"), col = c("blue", "lightblue"), lty = 1)
+```
+
+## Plot LAI and Biomass
+
+```{r plot-lai-biomass}
+# Plot Leaf Area Index (LAI) and Above Ground Wood
+plot(model_output$posix, model_output$LAI,
+ type = "l",
+ col = "darkgreen",
+ xlab = "Date",
+ ylab = "LAI (m2 m-2)",
+ main = paste("Leaf Area Index Over Time — PEcAn", runid)
+)
+lines(model_output$posix, model_output$AbvGrndWood, col = "brown")
+legend("topright", legend = c("LAI", "Above Ground Wood"), col = c("darkgreen", "brown"), lty = 1)
+```
+
+# Conclusion
+
+This notebook demonstrated how to set up, run, and analyze a PEcAn ecosystem model workflow programmatically. You can now modify parameters, try different models, or extend the analysis as needed.
+
+Try editing the `pecan.xml` file. Give it a new name and update the **settings_path** variable at the beginning of this Demo to point to the new file. See how the changes affect the model output!
+
+# Further Exploration
+
+The [next set of tutorials](#demo-table) will focus on the process of data assimilation and parameter estimation. The next two steps are in “.Rmd” files which can be viewed online.
+
+**Assimilation 'by hand'**
+
+[Explore](https://github.com/PecanProject/pecan/blob/main/documentation/tutorials/sensitivity/PEcAn_sensitivity_tutorial_v1.0.Rmd) how model error changes as a function of parameter value (i.e. data assimilation ‘by hand’)
+
+
+**MCMC Concepts**
+[Explore](https://github.com/PecanProject/pecan/blob/main/documentation/tutorials/MCMC/MCMC_Concepts.Rmd) Bayesian MCMC concepts using the photosynthesis module
+
+**More info about tools, analyses, and specific tasks…**
+
+Additional information about specific tasks (adding sites, models, data; software updates; etc.) and analyses (e.g. data assimilation) can be found in the PEcAn [documentation](https://pecanproject.github.io/pecan-documentation/)
+
+If you encounter a problem with PEcAn that’s not covered in the documentation, or if PEcAn is missing functionality you need, please search [known bugs and issues](https://github.com/PecanProject/pecan/issues?q=), submit a [bug report](https://github.com/PecanProject/pecan/issues/new/choose), or ask a question in our [chat room](https://join.slack.com/t/pecanproject/shared_invite/enQtMzkyODUyMjQyNTgzLWEzOTM1ZjhmYWUxNzYwYzkxMWVlODAyZWQwYjliYzA0MDA0MjE4YmMyOTFhMjYyMjYzN2FjODE4N2Y4YWFhZmQ).
+
+# Clean Up Workflow Output (Optional)
+
+If you want to remove all files and directories created by this workflow and start fresh, you can run the following code. This will delete the entire output directory specified in your settings. **Use with caution!**
+
+```{r cleanup}
+# WARNING: This will permanently delete all workflow output files!
+# Uncomment the line below to enable cleanup.
+# fs::dir_delete(settings$outdir)
+```
+
+
+# Session Information
+
+
+### PEcAn package versions.
+
+```{r version-info}
+PEcAn.all::pecan_version()
+```
+
+### R session information:
+
+```{r session-info}
+sessionInfo()
+```
diff --git a/documentation/tutorials/Demo_1_Basic_Run/download_sipnet.R b/documentation/tutorials/Demo_1_Basic_Run/download_sipnet.R
new file mode 100644
index 00000000000..498d63098e6
--- /dev/null
+++ b/documentation/tutorials/Demo_1_Basic_Run/download_sipnet.R
@@ -0,0 +1,77 @@
+# This script downloads the SIPNET binary for the appropriate operating system
+# and sets it up for use in the Demo_Run tutorial.
+os <- Sys.info()["sysname"]
+if(os == "Darwin") {
+ os <- "MacOS"
+}
+
+base_url <- "https://github.com/PecanProject/sipnet/releases/download/v1.3.0/"
+
+if (os == "Linux") {
+ download_url <- paste0(base_url, "sipnet-linux-v1.3.0")
+} else if (os == "MacOS") {
+ download_url <- paste0(base_url, "sipnet-macos-v1.3.0")
+} else {
+ PEcAn.logger::logger.error("Unsupported operating system: ", os)
+}
+
+demo_outdir <- file.path("demo_outdir")
+dest_path <- file.path(demo_outdir, "sipnet")
+if (!dir.exists(demo_outdir)) {
+ # using if(!dir.exists) instead of `showWarnings = FALSE`
+ # to allow warnings like 'cannot create dir ...'
+ dir.create(demo_outdir,
+ recursive = TRUE
+ )
+}
+
+PEcAn.logger::logger.info(
+ "Downloading SIPNET binary for", os, "..."
+)
+
+download.file(
+ url = download_url,
+ destfile = dest_path,
+ mode = "wb"
+)
+
+# Make executable
+Sys.chmod(dest_path, mode = "0755")
+
+## Now we are run, lets just check that `sipnet -h` works
+tryCatch(
+ {
+ # This block runs if system2 succeeds with exit code 0 (status attribute is NULL).
+ # Exit code 0 means successful execution.
+ system2(dest_path, "-h", stderr = TRUE, stdout = TRUE)
+ PEcAn.logger::logger.info("SIPNET has been installed!")
+ },
+ warning = function(w) {
+ # This block runs if system2 returns a non-zero exit code.
+ # We check the warning message for the expected status of 1.
+ if (grepl("had status 1", w$message, fixed = TRUE)) {
+ PEcAn.logger::logger.info("SIPNET has been installed!")
+ } else {
+ PEcAn.logger::logger.error("SIPNET ran but failed with an unexpected status.", "Details:", w$message)
+ }
+ },
+ error = function(e) {
+ # This block runs if system2 fails to execute the command at all.
+ PEcAn.logger::logger.error(
+ "SIPNET command failed to execute. The binary may be incompatible with your system.",
+ "Details:", e$message
+ )
+ }
+)
+
+dir.create("dbfiles", showWarnings = FALSE)
+# Download demo .clim file
+climfile <- "https://gist.githubusercontent.com/dlebauer/8aea1146dc8f915e1dea7a7335d7ec24/raw/4cc127098b0b42a0d428fc7de580e17aafca4e8b/AMF_US-NR1_BASE_HH_23-5.2004-01-01.2004-12-31.clim"
+clim_dest <- file.path("dbfiles", basename(climfile))
+if (!file.exists(clim_dest)) {
+ download.file(
+ url = climfile,
+ destfile = clim_dest,
+ mode = "wb"
+ )
+}
diff --git a/documentation/tutorials/Demo_1_Basic_Run/pecan.xml b/documentation/tutorials/Demo_1_Basic_Run/pecan.xml
new file mode 100644
index 00000000000..78790960d2d
--- /dev/null
+++ b/documentation/tutorials/Demo_1_Basic_Run/pecan.xml
@@ -0,0 +1,50 @@
+
+
+
+
+ -1
+
+ 2025-06-19-15-34-01
+
+ demo_outdir
+
+
+ temperate.coniferous
+ pft/temperate.coniferous/prior.distns.Rdata
+ pft/temperate.coniferous/pft/temperate.coniferous
+
+
+
+ NPP
+
+
+ SIPNET
+ git
+ FALSE
+ demo_outdir/sipnet
+
+
+
+ 2004/01/01
+ 2004/12/31
+ Niwot Ridge Forest/LTER NWT1 (US-NR1)
+ 40.0329
+ -105.546
+
+
+
+ AmerifluxLBL
+
+ Aritra_2004
+
+ dbfiles/AMF_US-NR1_BASE_HH_23-5.2004-01-01.2004-12-31.clim
+
+
+
+ 2004/01/01
+ 2004/12/31
+
+
+ localhost
+
+
\ No newline at end of file
diff --git a/documentation/tutorials/Demo_1_Basic_Run/pft/temperate.coniferous/prior.distns.Rdata b/documentation/tutorials/Demo_1_Basic_Run/pft/temperate.coniferous/prior.distns.Rdata
new file mode 100644
index 00000000000..1648cc1dfa8
Binary files /dev/null and b/documentation/tutorials/Demo_1_Basic_Run/pft/temperate.coniferous/prior.distns.Rdata differ
diff --git a/documentation/tutorials/Demo_1_Basic_Run/run_pecan.qmd b/documentation/tutorials/Demo_1_Basic_Run/run_pecan.qmd
new file mode 100644
index 00000000000..4dee5069ea6
--- /dev/null
+++ b/documentation/tutorials/Demo_1_Basic_Run/run_pecan.qmd
@@ -0,0 +1,361 @@
+---
+title: "Running Ecosystem Simulations Using PEcAn"
+author:
+ - "Aritra Dey"
+ - "David LeBauer"
+format:
+ pdf:
+ toc: true
+ number-sections: true
+ fig-width: 10
+ fig-height: 6
+ fig-dpi: 300
+---
+
+# Introduction {#introduction}
+
+Welcome to this PEcAn workflow notebook! This notebook will guide you through running an ecosystem model using PEcAn's programmatic interface.
+
+## What Is PEcAn?
+
+PEcAn (Predictive Ecosystem Analyzer) is a scientific workflow system designed to make ecosystem modeling more transparent, repeatable, and accessible. It helps researchers:
+
+- Run ecosystem models with standardized inputs and outputs
+- Perform uncertainty analysis on model parameters
+- Compare model predictions with observations
+- Share and reproduce scientific workflows
+
+## What This Notebook Does
+
+This notebook demonstrates how to:
+
+1. Set up and configure a PEcAn workflow
+2. Run an ecosystem model simulation
+3. Analyze and visualize the results
+
+### The Scenario Being Modeled:
+
+We are modeling carbon and productivity dynamics at the Niwot Ridge Forest AmeriFlux site ([US-NR1](https://ameriflux.lbl.gov/sites/siteinfo/US-NR1), a high-elevation temperate coniferous forest in Colorado. The model configuration uses the SIPNET process-based ecosystem model, parameterized with a temperate coniferous plant functional type (PFT).
+
+The simulation is run for the full year 2004 (January 1 – December 31) using AmeriFlux LBL meteorological drivers from the Niwot Ridge site. The ensemble setup specifies one model run focusing on net primary productivity (NPP) as the target output variable.
+
+This scenario is designed to be a minimal, reproducible example to demonstrate how to run SIPNET within the PEcAn workflow. In later steps, this same framework can be extended to include more ensemble members, additional PFTs, longer time periods, or alternative meteorological inputs.
+
+This run is based on a study by [Moore et. al. (2007)](https://doi.org/10.1016/j.agrformet.2008.04.013) that uses SIPNET to understand the relationship between water and carbon balance at this site.
+
+
+
+## Prerequisites
+
+Before running this notebook, make sure you have:
+
+- All the PEcAn packages installed. You can install all PEcAn packages and their dependencies by running the following command in the root of your PEcAn repository:
+
+```
+
+# Enable repository from pecanproject
+options(repos = c(
+ pecanproject = 'https://pecanproject.r-universe.dev',
+ CRAN = 'https://cloud.r-project.org'))
+# Download and install PEcAn.all in R
+install.packages('PEcAn.all')
+
+```
+
+- A valid `pecan.xml` configuration file or use the example provided: `pecan/documentation/tutorials/Demo_1_Basic_Run/pecan.xml`
+
+## How to Use This Notebook
+
+1. Each section is clearly marked with a heading
+2. Code chunks are provided with explanations
+3. You can run the code chunks sequentially
+4. Once you have successfully run the demo, you can modify parameters to configure new runs and analyses
+
+**Objective:**
+
+This demo illustrates how to run a basic PEcAn workflow using an R-based Quarto notebook. It will cover loading settings, writing model configuration files, and running model simulations. This approach provides a programmatic alternative to the web-based PEcAn interface for executing ecosystem models.
+
+# Session Info
+
+This section prints your R session information for reproducibility. Having this information at the beginning helps with debugging even if the notebook encounters errors later.
+
+```{r version-info}
+PEcAn.all::pecan_version()
+```
+
+# Install SIPNET v1.3.0
+
+If you haven't already installed the SIPNET binary, you can do so by running the following code. This will download the SIPNET binary to `demo_outdir/sipnet` and make it executable.
+
+> Note: The `demo_outdir` directory will be created in the root of your PEcAn installation (i.e., at `pecan/demo_outdir/`). This directory will contain the SIPNET binary as well as the output generated by PEcAn in this demo.
+
+```{r download-sipnet}
+# Download and install SIPNET v1.3.0
+source(
+ here::here(
+ "documentation/tutorials/Demo_1_Basic_Run/download_sipnet.R"
+ )
+)
+```
+
+> Note: You can find the most recent version of the SIPNET binary at: [SIPNET GitHub Releases](https://github.com/PecanProject/sipnet/releases), but this notebook is designed to work with SIPNET v1.3.0.
+
+# Load PEcAn Packages
+
+First, we need to load the PEcAn R packages. These packages provide all the functions we'll use to run the workflow.
+
+```{r libraries}
+# Load the PEcAn.all package, which includes all necessary PEcAn functionality
+library("PEcAn.all")
+```
+
+# Load PEcAn Settings File
+
+PEcAn uses an XML-based settings file (pecan.xml) to configure model runs. This file defines key information about the run including: PFT(s), site location, time period of the run, the location of input files and where outputs will be saved. Other settings outside the scope of this demo include the types of analyses that will be performed, how to connect to a database, and how to run it on a high performance computing cluster (we are using the default single model run on a single computer).
+
+You can read more about the settings file in the ["PEcAn XML" chapter](https://pecanproject.github.io/pecan-documentation/develop/pecanXML.html#pecanXML) of the documentation.
+
+There is an example `pecan.xml` that has been configured for this demonstration. You can find it at `pecan/documentation/tutorials/Demo_1_Basic_Run/pecan.xml`.
+
+
+This is how PEcAn loads the settings file:
+
+```{r load-settings}
+settings_path <- here::here("documentation/tutorials/Demo_1_Basic_Run/pecan.xml")
+```
+
+# Prepare and Validate Settings
+
+After specifying the path to the `pecan.xml` file, the next step involves reading and preparing these settings. PEcAn provides utilities to process and validate the configurations before execution begins.
+
+- `PEcAn.settings::read.settings(settings_path)`: Reads the `pecan.xml` file and converts it to an R list object.
+- `PEcAn.settings::prepare.settings(settings)`: Prepares and validates settings. It sets defaults for missing fields, changes file paths to absolute paths, and generally ensures consistency.
+
+```{r read-prepare-settings}
+# Read the settings from the pecan.xml file
+settings <- PEcAn.settings::read.settings(settings_path)
+
+# Prepare and validate the settings
+settings <- PEcAn.settings::prepare.settings(settings)
+```
+
+# Explore the Settings Object
+
+Once the settings have been read and prepared, it is useful to inspect the structure of the `settings` object. This object is an R list containing all parameters and configurations for the PEcAn workflow.
+
+* `str(settings)` displays the internal structure of the `settings` object. This shows how the settings are represented in R and is useful for debugging and verifying settings.
+
+```{r explore-settings}
+str(settings)
+```
+
+Your turn: explore the `settings` object further using the following commands:
+* `names(settings)` to list the top-level keys in the settings object
+* Once you know the names, you can look at each component in detail, for example:
+* `settings$run` to access the run-specific settings, such as start and end dates, model type, and output directory
+* `settings$pfts` to explore the Plant Functional Types settings
+
+Now you can update each of these settings. Here is a simple example:
+
+```{r add-info-to-settings}
+settings$info <- list(
+ author = "Aritra Dey",
+ date = Sys.Date(),
+ description = "Demo run of PEcAn using SIPNET"
+)
+```
+
+Editing the more interesting settings to change the PFT (`settings$pfts`) or extend the run (`settings$run$end.date`) is beyond the scope of this demo. You _could_ change the pft or the end date, but you would need a new file containing parameters for that PFT (`settings$pfts$pft$posterior.files`), or a climate file (`settings$run$met$path$path1`) that extends to the desired simulation period.
+
+# Create Output Directory
+
+Before running the workflow, we need to ensure that the output directory specified in the settings exists. This directory will store all the model outputs, configuration files, and results from the simulation.
+
+> **Note:** All outputs will be written to `settings$outdir`. You can change this directory in your `pecan.xml` file or by modifying `settings$outdir` in the notebook before running the workflow. You can also put the `sipnet` executable anywhere you prefer, as long as you update `settings$model$binary`.
+
+```{r create-outdir}
+dir.create(settings$outdir, recursive = TRUE, showWarnings = FALSE)
+```
+
+
+The directory structure created by PEcAn for this demo run will look like this:
+
+```
+demo_outdir/ # Root output directory
+├── run/ # Configuration & execution metadata
+│ ├── runs.txt # List of run IDs (one per model realization)
+│ ├── / # Model-specific config copies (sometimes)
+│ └── config.* # Generated model configs (e.g., SIPNET)
+├── out/ # Raw model outputs by run ID
+│ └── / # E.g., daily or sub-daily SIPNET output files
+```
+
+The root output directory is defined here as `demo_outdir/` by `settings$outdir`. This directory contains log and record files from the PEcAn workflow. They provide a detailed record of how data was generated and are key components of the analysis metadata and provenance. These can be useful for debugging as well as for downstream analysis.
+
+Key subdirectories include `run/` and `out/` that contain files used to configure and run the model, files generated by the underlying ecosystem model, and PEcAn standard outputs used in downstream analyses. These are described in subsequent sections.
+
+Additional outputs include logs, a `STATUS` file that records the steps of the workflow along with timestamps and whether each step was successful, and a copy of the `pecan.*.xml` file.
+
+# Write Model Configuration Files
+
+This step generates the model-specific configuration files and scripts that will be used to run the ecosystem model. The process involves:
+
+1. Disabling database write operations because we are not using a database
+2. Generating SIPNET configuration files using the `runModule.run.write.configs()` function.
+
+```{r write-configs}
+settings$database <- NULL # Disable database operations for this demo
+settings <- PEcAn.workflow::runModule.run.write.configs(settings)
+```
+
+# Run Model Simulations and Fetch Results
+
+This section executes the actual model simulations and retrieves the results. The process is managed by PEcAn's workflow system, which handles the execution of your chosen ecosystem model.
+
+* `runModule_start_model_runs(settings)`: This function initiates the model runs based on your configuration. It manages the execution of your chosen ecosystem model, using the configuration files generated in the previous step.
+
+```{r run-model}
+PEcAn.workflow::runModule_start_model_runs(settings)
+```
+
+
+This step generates raw model outputs in model-specific format (in this case, `sipnet.out`) as well as log files.
+
+# Extract Model Results and Prepare for Analysis
+
+After the model simulation completes, we need to extract the results and prepare them for analysis. This involves:
+
+1. Reading the run ID
+2. Setting up output paths
+3. Defining time period
+4. Loading model output
+5. Convert to a standard format
+
+```{r get-plot-vars}
+runid <- as.character(read.table(paste(settings$outdir, "/run/", "runs.txt", sep = ""))[1, 1]) # Note: if you are using an xml from a run with multiple ensembles this line will provide only the first run id
+outdir <- file.path(settings$outdir, "/out/", runid)
+start.year <- lubridate::year(settings$run$start.date)
+end.year <- lubridate::year(settings$run$end.date)
+model_output <- PEcAn.utils::read.output(
+ runid,
+ outdir,
+ start.year,
+ end.year,
+ variables = NULL,
+ dataframe = TRUE,
+ verbose = FALSE
+)
+available_vars <- names(model_output)[!names(model_output) %in% c("posix", "time_bounds")]
+```
+
+Running this code will convert model specific output files into a standardized netCDF ([year].nc) that can be downloaded for visualization and analysis (R, Matlab, ncview, panoply, etc). This is a key step, because this standardization enables PEcAn to apply downstream analyses to outputs from different ecosystem models.
+
+# Display Available Model Variables
+
+This section shows all the variables that are available in the model output. These variables represent different ecosystem processes and states that the model has simulated.
+
+```{r show-variables, echo=FALSE}
+vars_df <- PEcAn.utils::standard_vars |>
+ dplyr::select(
+ Variable = Variable.Name,
+ Description = Long.name
+ ) |>
+ dplyr::filter(Variable %in% available_vars) |>
+ # TODO: add year to PEcAn.utils::standard vars
+ dplyr::bind_rows(
+ dplyr::tibble(
+ Variable = "year",
+ Description = "Year"
+ )
+ )
+
+vars_df$Description[is.na(vars_df$Description)] <- "(No description available)"
+knitr::kable(vars_df, caption = "Model Output Variables and Descriptions")
+```
+
+# Visualize Model Results
+
+This section provides examples of how to create time series plots of different model variables. The examples cover various ecosystem processes including carbon fluxes, carbon pools, water variables, and structural variables like Leaf Area Index (LAI).
+
+## Plot Carbon Fluxes
+
+```{r plot-carbon-fluxes}
+# Plot Gross Primary Productivity (GPP) and Net Primary Productivity (NPP)
+plot(model_output$posix, model_output$GPP,
+ type = "l",
+ col = "green",
+ xlab = "Date",
+ ylab = "Carbon Flux (kg C m-2 s-1)",
+ main = "Carbon Fluxes Over Time"
+)
+lines(model_output$posix, model_output$NPP, col = "blue")
+legend("topright", legend = c("GPP", "NPP"), col = c("green", "blue"), lty = 1)
+```
+
+## Plot Carbon Pools
+
+```{r plot-carbon-pools}
+# Plot Total Live Biomass and Total Soil Carbon
+plot(model_output$posix, model_output$TotLivBiom,
+ type = "l",
+ col = "darkgreen",
+ xlab = "Date",
+ ylab = "Carbon Pool (kg C m-2)",
+ main = "Carbon Pools Over Time"
+)
+lines(model_output$posix, model_output$TotSoilCarb, col = "brown")
+legend("topright", legend = c("Total Live Biomass", "Total Soil Carbon"), col = c("darkgreen", "brown"), lty = 1)
+```
+
+## Plot Water Variables
+
+```{r plot-water-variables}
+# Plot Soil Moisture and Snow Water Equivalent
+plot(model_output$posix, model_output$SoilMoist,
+ type = "l",
+ col = "blue",
+ xlab = "Date",
+ ylab = "Soil Moisture (kg m-2)",
+ main = "Soil Moisture Over Time"
+)
+lines(model_output$posix, model_output$SWE, col = "lightblue")
+legend("topright", legend = c("Soil Moisture", "Snow Water Equivalent"), col = c("blue", "lightblue"), lty = 1)
+```
+
+## Plot LAI and Biomass
+
+```{r plot-lai-biomass}
+# Plot Leaf Area Index (LAI) and Above Ground Wood
+plot(model_output$posix, model_output$LAI,
+ type = "l",
+ col = "darkgreen",
+ xlab = "Date",
+ ylab = "LAI (m2 m-2)",
+ main = "Leaf Area Index Over Time"
+)
+lines(model_output$posix, model_output$AbvGrndWood, col = "brown")
+legend("topright", legend = c("LAI", "Above Ground Wood"), col = c("darkgreen", "brown"), lty = 1)
+```
+
+# Conclusion
+
+This notebook demonstrated how to set up, run, and analyze a PEcAn ecosystem model workflow programmatically. You can now modify parameters, try different models, or extend the analysis as needed.
+
+Try editing the `pecan.xml` file. Give it a new name and update the **settings_path** variable at the beginning of this Demo to point to the new file. See how the changes affect the model output!
+
+# Clean Up Workflow Output (Optional)
+
+If you want to remove all files and directories created by this workflow and start fresh, you can run the following code. This will delete the entire output directory specified in your settings. **Use with caution!**
+
+```{r cleanup}
+# WARNING: This will permanently delete all workflow output files!
+# Uncomment the line below to enable cleanup.
+# fs::dir_delete(settings$outdir)
+```
+
+# Session Info
+This section prints your R session information for reproducibility.
+
+```{r session-info}
+sessionInfo()
+```
diff --git a/documentation/tutorials/multisite-workflow/multisite-workflow-example.qmd b/documentation/tutorials/multisite-workflow/multisite-workflow-example.qmd
new file mode 100644
index 00000000000..01e7755fd3b
--- /dev/null
+++ b/documentation/tutorials/multisite-workflow/multisite-workflow-example.qmd
@@ -0,0 +1,117 @@
+---
+title: "PEcAn Multisite Flat-File Workflow Example"
+format: html
+editor: visual
+---
+
+## Introduction
+
+This notebook demonstrates how to set up a multisite workflow in PEcAn that uses a subset of the sites defined in a flat-file (`site_info.csv`), without querying the database.
+You will learn how to read site metadata, filter by group, and create a MultiSettings object for your runs.
+
+Sites stored in the BETY database can be queried by `sitegroup` to retrieve predefined sets of sites. Inside the database a sitegroup is nothing but a name given to a list of siteIDs, so in a flat file context we can do the same thing by adding one or more grouping columns.
+
+## Example `site_info.csv`
+
+```{r}
+# For this demo:
+site_info <- tibble::tribble(
+ ~id, ~lat, ~lon, ~elev, ~site_group,
+ 101, 40.1, -88.2, 10, "NEON",
+ 102, 41.2, -87.9, 1980, "NEON",
+ 201, 39.9, -90.0, 925, "Ameriflux",
+ 202, 38.5, -89.5, 1108, "Ameriflux"
+)
+# In a real workflow:
+# site_info <- read.csv("site_info.csv")
+```
+
+
+## Load Required Libraries
+
+```{r}
+library(dplyr)
+library(PEcAn.settings)
+```
+
+
+> **NOTE:** You must provide a 'settings' object (as an R list) before running the code below.
+> Or load from file: `settings <- read.settings("settings.xml")`
+
+###(sample settings object for running the notebook for multisitesettings)
+
+```{r}
+settings <- list(
+ outdir = "pecan_runs",
+ run = list(
+ start.date = "2002-01-01",
+ end.date = "2002-12-31",
+ site = list(),
+ inputs = list(
+ met = list(
+ id = 99000000001
+ )
+ )
+ ),
+ model = list(
+ id = 1000000022,
+ type = "SIPNET"
+ ),
+ pfts = list(
+ )
+)
+settings <- as.Settings(settings)
+
+```
+
+
+## Filter by one group
+
+```{r}
+# Filter for NEON sites (change "NEON" to your desired group)
+selected_sites <- site_info %>% filter(site_group == "NEON")
+print(selected_sites) ## prints demo output (not required in main code)
+settings_neon <- createMultiSiteSettings(settings, selected_sites) ## populates settings with metadata for each site in the sitegroup
+listToXml(settings_neon, tag = "pecan")
+
+```
+
+
+## Group by multiple characterstics
+
+Say you're developing a new parameterization for the conifer PFT and you want to test it specifically at high elevation. By filtering and reassigning, this can be done without any edits to the saved `site_info.csv`:
+
+> **NOTE:** Before running the code below, clear your R workspace and recreate the 'settings' object.
+> If 'settings' is already a MultiSettings object from a previous run, this code will fail.
+> Always start with a fresh 'settings' (as a single Settings object) to avoid errors.
+
+```{r}
+
+settings$pfts <- settings$pfts |>
+ append(list(
+ name="new_conifer",
+ posterior.files = "/path/to/new_conifer/post.distns.Rdata"))
+neon_hi_elev <- site_info |>
+ filter(
+ site_group == "NEON",
+ elev > 1000) |>
+ mutate(site.pft = "new_conifer")
+settings_neon <- createMultiSiteSettings(settings, neon_hi_elev)
+
+## Shows settings object before and after "createMultiSiteSettings(settings, selected_sites)"
+
+```{r}
+listToXml(settings, tag = "pecan")
+listToXml(settings_neon, tag = "pecan")
+```
+```
+
+
+
+## Summary
+- You can use a flat-file (site_info.csv) to manage multisite workflows in PEcAn.
+- Filter your sites in R, then pass the filtered data frame to createMultiSiteSettings().
+- Group your data by multiple characterstics.
+- No database queries are needed for site metadata.
+
+
diff --git a/models/basgra/DESCRIPTION b/models/basgra/DESCRIPTION
index d2e785cd67d..e354c4ebcb8 100644
--- a/models/basgra/DESCRIPTION
+++ b/models/basgra/DESCRIPTION
@@ -1,11 +1,13 @@
Package: PEcAn.BASGRA
Type: Package
Title: PEcAn Package for Integration of the BASGRA Model
-Version: 1.8.1
+Version: 1.8.2
Authors@R: c(person("Istem", "Fer", role = c("aut", "cre"),
email = "istem.fer@fmi.fi"),
person("University of Illinois, NCSA", role = c("cph")))
Description: This module provides functions to link the BASGRA model to PEcAn.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Depends: R (>= 4.0.0)
Imports:
PEcAn.logger,
@@ -24,3 +26,4 @@ LazyLoad: yes
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema-keywords: BASGRA, ecosystem-modeling
diff --git a/models/basgra/NEWS.md b/models/basgra/NEWS.md
index d9f25031d67..43aaec0fdd2 100644
--- a/models/basgra/NEWS.md
+++ b/models/basgra/NEWS.md
@@ -1,3 +1,8 @@
+# PEcAn.BASGRA 1.8.2
+
+* Minor documentation fixes
+
+
# PEcAn.BASGRA 1.8.1
## License change
diff --git a/models/basgra/R/write_restart.BASGRA.R b/models/basgra/R/write_restart.BASGRA.R
index 6b4c8062a7c..3098581b05e 100644
--- a/models/basgra/R/write_restart.BASGRA.R
+++ b/models/basgra/R/write_restart.BASGRA.R
@@ -1,11 +1,11 @@
-##' @title write_restart.SIPNET
-##'
+##' write_restart.BASGRA
+##'
##' @author Istem Fer
##'
##' @inheritParams PEcAn.ModelName::write_restart.ModelName
##'
-##' @description Write restart files for BASGRA
-##'
+##' @description Write restart files for BASGRA
+##'
##' @return TRUE if successful
##' @export
write_restart.BASGRA <- function(outdir, runid, start.time, stop.time, settings, new.state,
diff --git a/models/basgra/man/write_restart.BASGRA.Rd b/models/basgra/man/write_restart.BASGRA.Rd
index aa970cc931d..2e5dff801cd 100644
--- a/models/basgra/man/write_restart.BASGRA.Rd
+++ b/models/basgra/man/write_restart.BASGRA.Rd
@@ -2,7 +2,7 @@
% Please edit documentation in R/write_restart.BASGRA.R
\name{write_restart.BASGRA}
\alias{write_restart.BASGRA}
-\title{write_restart.SIPNET}
+\title{write_restart.BASGRA}
\usage{
write_restart.BASGRA(
outdir,
diff --git a/models/biocro/DESCRIPTION b/models/biocro/DESCRIPTION
index 98dffd63eb2..ada108754c3 100644
--- a/models/biocro/DESCRIPTION
+++ b/models/biocro/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.BIOCRO
Type: Package
Title: PEcAn Package for Integration of the BioCro Model
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("David", "LeBauer", role = c("aut", "cre"),
email = "dlebauer@email.arizona.edu"),
person("Chris", "Black", role = c("aut"),
@@ -12,6 +12,8 @@ Authors@R: c(person("David", "LeBauer", role = c("aut", "cre"),
Author: David LeBauer, Deepak Jaiswal, Christopher Black
Maintainer: David LeBauer
Description: This module provides functions to link BioCro to PEcAn.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
PEcAn.logger,
PEcAn.remote,
@@ -40,3 +42,4 @@ Copyright: Energy Biosciences Institute, Authors
Encoding: UTF-8
VignetteBuilder: knitr, rmarkdown
RoxygenNote: 7.3.2
+X-schema-keywords: BIOCRO, ecosystem-modeling
diff --git a/models/biocro/NEWS.md b/models/biocro/NEWS.md
index f0d93834c79..49945dc266f 100644
--- a/models/biocro/NEWS.md
+++ b/models/biocro/NEWS.md
@@ -1,3 +1,8 @@
+# PEcAn.BIOCRO 1.7.5
+
+* model2netcdf.BIOCRO no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed.
+
+
# PEcAn.BIOCRO 1.7.4
## License change
diff --git a/models/biocro/R/model2netcdf.BIOCRO.R b/models/biocro/R/model2netcdf.BIOCRO.R
index d6fb4dbbff7..f1015a32067 100644
--- a/models/biocro/R/model2netcdf.BIOCRO.R
+++ b/models/biocro/R/model2netcdf.BIOCRO.R
@@ -82,16 +82,10 @@ model2netcdf.BIOCRO <- function(result, genus = NULL, outdir, lat = -9999, lon =
ncdf4::ncatt_put(nc, 0, "description", "This is an output from the BioCro Crop model generated by the model2netcdf.BIOCRO.R function in the PEcAn.BIOCRO package; see https://pecanproject.github.io/pecan-documentation/latest/ for more information")
}
- varfile <- file(file.path(outdir, paste(yeari, "nc", "var", sep = ".")), "w")
-
## Output netCDF data
for (.vname in names(vars)) {
ncdf4::ncvar_put(nc, varid = vars[[.vname]], vals = result_yeari_std[[.vname]])
- cat(paste(vars[[.vname]]$name, vars[[.vname]]$longname), file = varfile,
- sep = "\n")
}
-
- close(varfile)
ncdf4::nc_close(nc)
}
} # model2netcdf.BIOCRO
diff --git a/models/biocro/tests/testthat/test.met2model.R b/models/biocro/tests/testthat/test.met2model.R
index 7e87a7a8832..5a4871ea41f 100644
--- a/models/biocro/tests/testthat/test.met2model.R
+++ b/models/biocro/tests/testthat/test.met2model.R
@@ -5,10 +5,6 @@ setup(dir.create(outfolder, showWarnings = FALSE))
teardown(unlink(outfolder, recursive = TRUE))
test_that("Met conversion runs without error", {
- skip(paste0(
- "BIOCRO met2model is currently broken. ",
- "See issue #2274 (https://github.com/PecanProject/pecan/issues/2274)."
- ))
nc_path <- system.file("test-data", "CRUNCEP.2000.nc",
package = "PEcAn.utils")
in.path <- dirname(nc_path)
diff --git a/models/cable/DESCRIPTION b/models/cable/DESCRIPTION
index 6168e099010..8be92e17311 100644
--- a/models/cable/DESCRIPTION
+++ b/models/cable/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.CABLE
Type: Package
Title: PEcAn package for integration of the CABLE model
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("Kaitlin", "Ragosta", role = c("aut")),
person("Tony", "Gardella", role = c("aut", "cre"),
email = "tonygard@bu.edu"),
@@ -9,6 +9,8 @@ Authors@R: c(person("Kaitlin", "Ragosta", role = c("aut")),
Author: Kaitlin Ragosta
Maintainer: Tony Gardella
Description: This module provides functions to link the (CABLE) to PEcAn.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
PEcAn.logger,
PEcAn.utils (>= 1.4.8)
@@ -22,3 +24,4 @@ LazyLoad: yes
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: CABLE, ecosystem-modeling
diff --git a/models/clm45/DESCRIPTION b/models/clm45/DESCRIPTION
index d44e478ca47..1ebb23ccf53 100644
--- a/models/clm45/DESCRIPTION
+++ b/models/clm45/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.CLM45
Type: Package
Title: PEcAn Package for Integration of CLM4.5 Model
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("Mike", "Dietze", role = c("aut", "cre"),
email = "dietze@bu.edu"),
person("University of Illinois, NCSA", role = c("cph")))
@@ -11,6 +11,8 @@ Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
streamline the interaction between data and models, and to improve the
efficacy of scientific investigation. This package provides functions to
link the Community Land Model, version 4.5, to PEcAn.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Depends:
PEcAn.logger,
PEcAn.utils
@@ -24,3 +26,4 @@ LazyLoad: yes
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: CLM45, ecosystem-modeling
diff --git a/models/clm45/NEWS.md b/models/clm45/NEWS.md
index feb0c6f76fc..fa3c8ad6306 100644
--- a/models/clm45/NEWS.md
+++ b/models/clm45/NEWS.md
@@ -1,3 +1,8 @@
+# PEcAn.CLM45 1.7.5
+
+* Added keywords and project URL to DESCRIPTION
+
+
# PEcAn.CLM45 1.7.4
## License change
diff --git a/models/dalec/DESCRIPTION b/models/dalec/DESCRIPTION
index 7b2fa281c8f..ac39d9ebba2 100644
--- a/models/dalec/DESCRIPTION
+++ b/models/dalec/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.DALEC
Type: Package
Title: PEcAn Package for Integration of the DALEC Model
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("Mike", "Dietze", role = c("aut", "cre"),
email = "dietze@bu.edu"),
person("Tristan", "Quaife", role = c("aut")),
@@ -10,6 +10,8 @@ Authors@R: c(person("Mike", "Dietze", role = c("aut", "cre"),
Author: Mike Dietze, Tristain Quaife
Maintainer: Mike Dietze
Description: This module provides functions to link DALEC to PEcAn.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
PEcAn.logger,
PEcAn.remote,
@@ -26,3 +28,4 @@ LazyLoad: yes
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: DALEC, ecosystem-modeling
diff --git a/models/dalec/NEWS.md b/models/dalec/NEWS.md
index 1bf03068ca9..530e4e2a2f9 100644
--- a/models/dalec/NEWS.md
+++ b/models/dalec/NEWS.md
@@ -1,3 +1,8 @@
+# PEcAn.DALEC 1.7.5
+
+* model2netcdf.DALEC no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed.
+
+
# PEcAn.DALEC 1.7.4
## License change
diff --git a/models/dalec/R/model2netcdf.DALEC.R b/models/dalec/R/model2netcdf.DALEC.R
index 18be5fcb01f..4b40b49fc16 100644
--- a/models/dalec/R/model2netcdf.DALEC.R
+++ b/models/dalec/R/model2netcdf.DALEC.R
@@ -143,16 +143,12 @@ model2netcdf.DALEC <- function(outdir, sitelat, sitelon, start_date, end_date) {
### Output netCDF data
nc <- ncdf4::nc_create(file.path(outdir, paste(y, "nc", sep = ".")), nc_var)
ncdf4::ncatt_put(nc, "time", "bounds", "time_bounds", prec=NA)
- varfile <- file(file.path(outdir, paste(y, "nc", "var", sep = ".")), "w")
for (i in seq_along(nc_var)) {
ncdf4::ncvar_put(nc, nc_var[[i]], output[[i]])
- cat(paste(nc_var[[i]]$name, nc_var[[i]]$longname), file = varfile, sep = "\n")
}
- close(varfile)
ncdf4::nc_close(nc)
} ### End of year loop
-
} # model2netcdf.DALEC
# ==================================================================================================#
## EOF
diff --git a/models/dvmdostem/DESCRIPTION b/models/dvmdostem/DESCRIPTION
index cf78089b1dc..5e1a3bf322b 100644
--- a/models/dvmdostem/DESCRIPTION
+++ b/models/dvmdostem/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.dvmdostem
Type: Package
Title: PEcAn Package for Integration of the Dvmdostem Model
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("Shawn", "Serbin", role = c("aut"),
email = "sserbin@bnl.gov"),
person("Tobey", "Carman", role = c("aut", "cre"),
@@ -11,6 +11,8 @@ Author: Tobey Carman, Shawn Serbin
Maintainer: Tobey Carman , Shawn Serbin
Description: This module provides functions to link the dvmdostem model to
PEcAn.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
lubridate,
ncdf4,
@@ -27,3 +29,6 @@ LazyLoad: yes
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: dvmdostem, ecosystem-modeling
+
+
diff --git a/models/dvmdostem/NEWS.md b/models/dvmdostem/NEWS.md
index 9e556142db1..a97becbc891 100644
--- a/models/dvmdostem/NEWS.md
+++ b/models/dvmdostem/NEWS.md
@@ -1,3 +1,8 @@
+# PEcAn.dvmdostem 1.7.5
+
+* model2netcdf.dvmdostem no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed.
+
+
# PEcAn.dvmdostem 1.7.4
## License change
diff --git a/models/dvmdostem/R/model2netcdf.dvmdostem.R b/models/dvmdostem/R/model2netcdf.dvmdostem.R
index 2bb0517e69f..66323fbd8b5 100644
--- a/models/dvmdostem/R/model2netcdf.dvmdostem.R
+++ b/models/dvmdostem/R/model2netcdf.dvmdostem.R
@@ -325,14 +325,6 @@ model2netcdf.dvmdostem <- function(outdir, runstart, runend, pecan_requested_var
}
ncout <- ncdf4::nc_create(file.path(outdir, paste0(as.character(lubridate::year(all_yrs[i])), ".nc")), newvars)
- # extract variable and long names to VAR file for PEcAn visibility
- # THIS NEEDS TO BE KEPT AND USED FOR PROPER PLOTTING
- write.table(sapply(ncout$var, function(x) { x$longname }),
- file = file.path(outdir,paste0(as.character(lubridate::year(all_yrs[i])), ".nc.var")),
- col.names = FALSE,
- row.names = TRUE,
- quote = FALSE)
-
ncdf4::nc_close(ncout)
}
diff --git a/models/ed/DESCRIPTION b/models/ed/DESCRIPTION
index 4c18b42319e..cf0210efd7e 100644
--- a/models/ed/DESCRIPTION
+++ b/models/ed/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.ED2
Type: Package
Title: PEcAn Package for Integration of ED2 Model
-Version: 1.8.1
+Version: 1.8.2
Authors@R: c(person("Mike", "Dietze", role = c("aut", "cre"),
email = "dietze@bu.edu"),
person("David", "LeBauer", role = c("aut"),
@@ -30,6 +30,8 @@ Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
streamline the interaction between data and models, and to improve the
efficacy of scientific investigation. This package provides functions to
link the Ecosystem Demography Model, version 2, to PEcAn.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Depends:
R (>= 3.5)
Imports:
@@ -66,3 +68,4 @@ Encoding: UTF-8
RoxygenNote: 7.3.2
Roxygen: list(markdown = TRUE)
Config/testthat/edition: 2
+X-schema.org-keywords: ED2, ecosystem-modeling
diff --git a/models/ed/NEWS.md b/models/ed/NEWS.md
index 53cb07d3ab1..5511ddca588 100644
--- a/models/ed/NEWS.md
+++ b/models/ed/NEWS.md
@@ -1,3 +1,9 @@
+# PEcAn.ED2 1.8.2
+
+* model2netcdf.ed2 no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed.
+* Minor test fixes
+
+
# PEcAn.ED2 1.8.1
* Fixed vignette compilation
diff --git a/models/ed/R/model2netcdf.ED2.R b/models/ed/R/model2netcdf.ED2.R
index 6bcf9150e16..edc2d06b4a6 100644
--- a/models/ed/R/model2netcdf.ED2.R
+++ b/models/ed/R/model2netcdf.ED2.R
@@ -193,15 +193,11 @@ model2netcdf.ED2 <- function(outdir,
if (file.check[["-E-"]]==TRUE) {
ncdf4::ncatt_put(nc, "dtime", "bounds", "dtime_bounds", prec = NA)
}
- varfile <- file(file.path(outdir, paste(y, "nc", "var", sep = ".")), "w")
# fill nc file with data
for (i in seq_along(nc_var)) {
var_put(nc, varid = nc_var[[i]], vals = out[[i]])
- cat(paste(nc_var[[i]]$name, nc_var[[i]]$longname), file = varfile,
- sep = "\n")
}
ncdf4::nc_close(nc)
- close(varfile)
} # end year-loop
} # model2netcdf.ED2
diff --git a/models/ed/tests/testthat/test-read_ET_files.R b/models/ed/tests/testthat/test-read_ET_files.R
index ef7fd6c0ce8..3caca5d46e0 100644
--- a/models/ed/tests/testthat/test-read_ET_files.R
+++ b/models/ed/tests/testthat/test-read_ET_files.R
@@ -12,6 +12,7 @@ settings <-
PEcAn.settings::read.settings(file.path(testdir, "outdir", "pecan_checked.xml"))
settings$outdir <- file.path(testdir, "outdir")
+expected_vars <- c("AGB_PFT", "BSEEDS", "DBH", "NPP_PFT", "TRANSP_PFT", "DENS", "PFT")
test_that("read E files without ED2 pft number", {
pfts_without_number <- list(
@@ -34,8 +35,7 @@ test_that("read E files without ED2 pft number", {
settings = settings
)
expect_type(result, "list")
- expect_equal(length(result), 7) #TODO: expectation of number of variables will have to change
- #TODO: better test would be to check for specific variables in output
+ expect_equal(names(result), expected_vars)
})
test_that("read E files without settings arg and with ED2 pft number", {
@@ -51,7 +51,7 @@ test_that("read E files without settings arg and with ED2 pft number", {
pfts = pft_with_number
)
expect_type(result, "list")
- expect_equal(length(result), 7)
+ expect_equal(names(result), expected_vars)
})
test_that("read E files without only settings arg", {
@@ -66,7 +66,7 @@ test_that("read E files without only settings arg", {
settings = settings
)
expect_type(result, "list")
- expect_equal(length(result), 7)
+ expect_equal(names(result), expected_vars)
})
diff --git a/models/ed/tests/testthat/test.model2netcdf.ED2.R b/models/ed/tests/testthat/test.model2netcdf.ED2.R
index 64ca5b24c0a..2d1f4497e26 100644
--- a/models/ed/tests/testthat/test.model2netcdf.ED2.R
+++ b/models/ed/tests/testthat/test.model2netcdf.ED2.R
@@ -34,20 +34,16 @@ test_that("a valid .nc file is produced for each corresponding ED2 output", {
h5_T_files <- dir(outdir, pattern = "-T-.*.h5")
nc_files <- dir(outdir, pattern = ".nc$")
- nc_var_files <- dir(outdir, pattern = ".nc.var$")
-
+
expect_equal(length(h5_T_files), length(nc_files))
- expect_equal(length(h5_T_files), length(nc_var_files))
- h5years <- str_extract(h5_T_files, "\\d{4}")
- ncyears <- str_extract(nc_files, "\\d{4}")
+ h5years <- str_extract(h5_T_files, "\\d{4}")
+ ncyears <- str_extract(nc_files, "\\d{4}")
expect_equal(as.numeric(ncyears), as.numeric(h5years))
- ncvaryears <- str_extract(nc_var_files, "\\d{4}")
- expect_equal(as.numeric(ncvaryears), as.numeric(h5years))
})
-test_that("read.output extracts data from nc file",{
+test_that("read.output extracts data from nc file", {
vars <- c("GPP", "NEE", "DOC_flux", "Evap", "TVeg", "Qsb", "Rainf")
x <-
PEcAn.utils::read.output(runid = runid,
diff --git a/models/fates/DESCRIPTION b/models/fates/DESCRIPTION
index 5d5d32e19c0..8ce1acbea2f 100644
--- a/models/fates/DESCRIPTION
+++ b/models/fates/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.FATES
Type: Package
Title: PEcAn Package for Integration of FATES Model
-Version: 1.8.0
+Version: 1.8.1
Authors@R: c(person("Mike", "Dietze", role = c("aut", "cre"),
email = "dietze@bu.edu"),
person("Shawn", "Serbin", role = c("aut"),
@@ -15,6 +15,8 @@ Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
streamline the interaction between data and models, and to improve the
efficacy of scientific investigation. This package provides functions to
link the FATES model to PEcAn.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
stringr,
PEcAn.logger,
@@ -31,3 +33,4 @@ LazyLoad: yes
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: FATES, ecosystem-modeling
diff --git a/models/fates/NEWS.md b/models/fates/NEWS.md
index 09142e09061..9455d55d32b 100644
--- a/models/fates/NEWS.md
+++ b/models/fates/NEWS.md
@@ -1,3 +1,8 @@
+# PEcAn.FATES 1.8.1
+
+* model2netcdf.FATES no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed.
+
+
# PEcAn.FATES 1.8.0
## License change
diff --git a/models/fates/R/model2netcdf.FATES.R b/models/fates/R/model2netcdf.FATES.R
index 382c23056f1..89381b5348f 100644
--- a/models/fates/R/model2netcdf.FATES.R
+++ b/models/fates/R/model2netcdf.FATES.R
@@ -9,8 +9,8 @@
##' @param end_date End time of the simulation, not string
##' @param vars_names Names of Selected variables in PEcAn format, (e.g. c("",""))
##' @param pfts a named vector of PFT numbers where the names are PFT names
-##'
-##' @examples
+##'
+##' @examples
##' \dontrun{
##' example.output <- system.file("case.clm2.h0.2004-01-01-00000.nc",package="PEcAn.FATES")
##' model2netcdf.FATES(outdir="~/",sitelat, sitelon, start_date, end_date, vars_names, pfts)
@@ -155,13 +155,7 @@ model2netcdf.FATES <- function(outdir, sitelat, sitelon, start_date, end_date, v
}
}
}
- } ## monthly convert variable into PEcAn format
- }
- ## extract variable and long names to VAR file for PEcAn vis
- utils::write.table(sapply(ncout$var, function(x) { x$longname }),
- file = paste0(oname, ".var"),
- col.names = FALSE,
- row.names = TRUE,
- quote = FALSE)
- try(ncdf4::nc_close(ncout)) ## end of year for loop
+ } ## monthly convert variable into PEcAn format
+ try(ncdf4::nc_close(ncout))
+ } ## end of year for loop
} ## model2netcdf.FATES
\ No newline at end of file
diff --git a/models/fates/man/model2netcdf.FATES.Rd b/models/fates/man/model2netcdf.FATES.Rd
index 2fafb91db21..c427123291d 100644
--- a/models/fates/man/model2netcdf.FATES.Rd
+++ b/models/fates/man/model2netcdf.FATES.Rd
@@ -33,7 +33,6 @@ model2netcdf.FATES(
Code to convert FATES netcdf output into into CF standard
}
\examples{
-
\dontrun{
example.output <- system.file("case.clm2.h0.2004-01-01-00000.nc",package="PEcAn.FATES")
model2netcdf.FATES(outdir="~/",sitelat, sitelon, start_date, end_date, vars_names, pfts)
diff --git a/models/gday/DESCRIPTION b/models/gday/DESCRIPTION
index 181ddcdd227..c7c6bf09af0 100644
--- a/models/gday/DESCRIPTION
+++ b/models/gday/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.GDAY
Type: Package
Title: PEcAn Package for Integration of the GDAY Model
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("Martin", "De Kauwe", role = c("aut", "cre"),
email = "mdekauwe@gmail.com"),
person("Tony", "Gardella", role = c("aut"),
@@ -10,6 +10,8 @@ Authors@R: c(person("Martin", "De Kauwe", role = c("aut", "cre"),
Author: Martin De Kauwe
Maintainer: Martin De Kauwe
Description: This module provides functions to link the GDAY model to PEcAn.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Depends:
PEcAn.utils
Imports:
@@ -27,3 +29,4 @@ LazyLoad: yes
LazyData: TRUE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: GDAY, ecosystem-modeling
diff --git a/models/gday/NEWS.md b/models/gday/NEWS.md
index ab95d9d2a5d..1436c00ecbb 100644
--- a/models/gday/NEWS.md
+++ b/models/gday/NEWS.md
@@ -1,3 +1,8 @@
+# PEcAn.GDAY 1.7.5
+
+* model2netcdf.GDAY no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed.
+
+
# PEcAn.GDAY 1.7.4
## License change
diff --git a/models/gday/R/model2netcdf.GDAY.R b/models/gday/R/model2netcdf.GDAY.R
index 60abe96bf6f..46ad0fb0670 100644
--- a/models/gday/R/model2netcdf.GDAY.R
+++ b/models/gday/R/model2netcdf.GDAY.R
@@ -108,12 +108,9 @@ model2netcdf.GDAY <- function(outdir, sitelat, sitelon, start_date, end_date) {
### Output netCDF data
nc <- nc_create(file.path(outdir, paste(y, "nc", sep = ".")), nc_var)
- varfile <- file(file.path(outdir, paste(y, "nc", "var", sep = ".")), "w")
for (i in seq_along(nc_var)) {
ncvar_put(nc, nc_var[[i]], output[[i]])
- cat(paste(nc_var[[i]]$name, nc_var[[i]]$longname), file = varfile, sep = "\n")
}
- close(varfile)
nc_close(nc)
} ### End of year loop
} # model2netcdf.GDAY
diff --git a/models/jules/DESCRIPTION b/models/jules/DESCRIPTION
index 7bab88720e7..40a730adef4 100644
--- a/models/jules/DESCRIPTION
+++ b/models/jules/DESCRIPTION
@@ -1,11 +1,13 @@
Package: PEcAn.JULES
Type: Package
Title: PEcAn Package for Integration of the JULES Model
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("Mike", "Dietze", role = c("aut", "cre"),
email = "dietze@bu.edu"),
person("University of Illinois, NCSA", role = c("cph")))
Description: This module provides functions to link the (JULES) to PEcAn.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
PEcAn.data.atmosphere,
PEcAn.logger,
@@ -23,3 +25,4 @@ LazyLoad: yes
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: JULES, ecosystem-modeling
diff --git a/models/jules/NEWS.md b/models/jules/NEWS.md
index 6e492968c9e..5999278a1fd 100644
--- a/models/jules/NEWS.md
+++ b/models/jules/NEWS.md
@@ -1,3 +1,8 @@
+# PEcAn.JULES 1.7.5
+
+* model2netcdf.JULES no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed.
+
+
# PEcAn.JULES 1.7.4
## License change
diff --git a/models/jules/R/model2netcdf.JULES.R b/models/jules/R/model2netcdf.JULES.R
index 733bef29e1b..5d1a742f880 100755
--- a/models/jules/R/model2netcdf.JULES.R
+++ b/models/jules/R/model2netcdf.JULES.R
@@ -15,13 +15,7 @@ model2netcdf.JULES <- function(outdir) {
for (fname in files) {
print(fname)
nc <- ncdf4::nc_open(fname, write = TRUE)
- ## extract variable and long names
- utils::write.table(sapply(nc$var, function(x) { x$longname }),
- file = paste0(fname, ".var"),
- col.names = FALSE,
- row.names = TRUE,
- quote = FALSE)
-
+
vars <- names(nc[["var"]])
# Check that frac is reported
if("frac_grid" %in% vars){
diff --git a/models/ldndc/DESCRIPTION b/models/ldndc/DESCRIPTION
index 1f267dbfbc2..15156b0c354 100644
--- a/models/ldndc/DESCRIPTION
+++ b/models/ldndc/DESCRIPTION
@@ -1,10 +1,12 @@
Package: PEcAn.LDNDC
Type: Package
Title: PEcAn package for integration of the LDNDC model
-Version: 1.0.1
+Version: 1.0.2
Authors@R: c(person("Henri", "Kajasilta", role = c("aut", "cre"),
email = "henri.kajasilta@fmi.fi"))
Description: This module provides functions to link the (LDNDC) to PEcAn.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
dplyr,
lubridate,
@@ -26,3 +28,4 @@ LazyLoad: yes
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: LDNDC, ecosystem-modeling
diff --git a/models/ldndc/NEWS.md b/models/ldndc/NEWS.md
index 9317cb8e056..060746bf40d 100644
--- a/models/ldndc/NEWS.md
+++ b/models/ldndc/NEWS.md
@@ -1,3 +1,8 @@
+# PEcAn.LDNDC 1.0.2
+
+* model2netcdf.LDNDC no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed.
+
+
# PEcAn.LDNDC 1.0.1
## License change
diff --git a/models/ldndc/R/model2netcdf.LDNDC.R b/models/ldndc/R/model2netcdf.LDNDC.R
index 2c9b5f89b1e..f811515754e 100644
--- a/models/ldndc/R/model2netcdf.LDNDC.R
+++ b/models/ldndc/R/model2netcdf.LDNDC.R
@@ -241,16 +241,13 @@ model2netcdf.LDNDC <- function(outdir, sitelat, sitelon, start_date, end_date, d
## Output netCDF data
nc <- ncdf4::nc_create(file.path(outdir, paste(y, "nc", sep = ".")), nc_var)
- varfile <- file(file.path(outdir, paste(y, "nc", "var", sep = ".")), "w")
ncdf4::ncatt_put(nc, "time", "bounds", "time_bounds", prec = NA)
for(i in seq_along(nc_var)){
ncdf4::ncvar_put(nc, nc_var[[i]], output[[i]])
- cat(paste(nc_var[[i]]$name, nc_var[[i]]$longname), file = varfile, sep = "\n")
}
- close(varfile)
ncdf4::nc_close(nc)
}
@@ -258,5 +255,4 @@ model2netcdf.LDNDC <- function(outdir, sitelat, sitelon, start_date, end_date, d
if (delete.raw) {
unlink(output_dir, recursive=TRUE)
}
-
} # model2netcdf.LDNDC
diff --git a/models/linkages/DESCRIPTION b/models/linkages/DESCRIPTION
index dec21ccd51d..57b45ee0005 100644
--- a/models/linkages/DESCRIPTION
+++ b/models/linkages/DESCRIPTION
@@ -1,13 +1,15 @@
Package: PEcAn.LINKAGES
Type: Package
Title: PEcAn Package for Integration of the LINKAGES Model
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("Mike", "Dietze", role = c("aut"),
email = "dietze@bu.edu"),
person("Ann", "Raiho", role = c("aut", "cre"),
email = "araiho@nd.edu"),
person("University of Illinois, NCSA", role = c("cph")))
Description: This module provides functions to link the (LINKAGES) to PEcAn.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
PEcAn.data.land,
PEcAn.DB,
@@ -30,3 +32,4 @@ LazyLoad: yes
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: LINKAGES, ecosystem-modeling
diff --git a/models/linkages/NEWS.md b/models/linkages/NEWS.md
index 0753c8ed73e..ead6e4e3995 100644
--- a/models/linkages/NEWS.md
+++ b/models/linkages/NEWS.md
@@ -1,3 +1,8 @@
+# PEcAn.LINKAGES 1.7.5
+
+* model2netcdf.LINKAGES no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed.
+
+
# PEcAn.LINKAGES 1.7.4
## License change
diff --git a/models/linkages/R/model2netcdf.LINKAGES.R b/models/linkages/R/model2netcdf.LINKAGES.R
index dc27a25a6de..b2581479a87 100644
--- a/models/linkages/R/model2netcdf.LINKAGES.R
+++ b/models/linkages/R/model2netcdf.LINKAGES.R
@@ -107,18 +107,15 @@ model2netcdf.LINKAGES <- function(outdir, sitelat, sitelon, start_date = NULL,
longname = paste(pft_names, collapse=","))
# ******************** Declare netCDF variables ********************#
-
+
### Output netCDF data
nc <- ncdf4::nc_create(file.path(outdir, paste(formatC(years[y], width = 4, format = "d", flag = "0"), "nc", sep = ".")), nc_var)
- varfile <- file(file.path(outdir, paste(formatC(years[y], width = 4, format = "d", flag = "0"), "nc", "var", sep = ".")), "w")
-
+
for (i in seq_along(nc_var)) {
print(i)
ncdf4::ncvar_put(nc, nc_var[[i]], output[[i]])
- cat(paste(nc_var[[i]]$name, nc_var[[i]]$longname), file = varfile, sep = "\n")
}
- close(varfile)
ncdf4::nc_close(nc)
-
+
} ### End of year loop
} # model2netcdf.LINKAGES
diff --git a/models/lpjguess/DESCRIPTION b/models/lpjguess/DESCRIPTION
index 374b87dea2e..856ab97e256 100644
--- a/models/lpjguess/DESCRIPTION
+++ b/models/lpjguess/DESCRIPTION
@@ -1,13 +1,15 @@
Package: PEcAn.LPJGUESS
Type: Package
Title: PEcAn Package for Integration of the LPJ-GUESS Model
-Version: 1.8.0
+Version: 1.9.0
Authors@R: c(person("Istem", "Fer", role = c("aut", "cre"),
email = "istem.fer@fmi.fi"),
person("Tony", "Gardella", role = c("aut"),
email = "tonygard@bu.edu"),
person("University of Illinois, NCSA", role = c("cph")))
Description: This module provides functions to link LPJ-GUESS to PEcAn.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
PEcAn.logger,
PEcAn.remote,
@@ -29,3 +31,4 @@ LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
LinkingTo: Rcpp
+X-schema.org-keywords: LPJ-GUESS, ecosystem-modeling
diff --git a/models/lpjguess/NAMESPACE b/models/lpjguess/NAMESPACE
index 56bfda9e366..e814d6a2aec 100644
--- a/models/lpjguess/NAMESPACE
+++ b/models/lpjguess/NAMESPACE
@@ -6,10 +6,14 @@ export(met2model.LPJGUESS)
export(model2netcdf.LPJGUESS)
export(pecan2lpjguess)
export(readStateBinary)
+export(read_binary_LPJGUESS)
+export(read_restart.LPJGUESS)
export(split_inputs.LPJGUESS)
export(update_state_LPJGUESS)
export(write.config.LPJGUESS)
export(write.insfile.LPJGUESS)
+export(write_binary_LPJGUESS)
+export(write_restart.LPJGUESS)
importFrom(PEcAn.utils,days_in_year)
importFrom(Rcpp,sourceCpp)
importFrom(ncdf4,nc_close)
diff --git a/models/lpjguess/NEWS.md b/models/lpjguess/NEWS.md
index c400855ccb4..70e0f93fc76 100644
--- a/models/lpjguess/NEWS.md
+++ b/models/lpjguess/NEWS.md
@@ -1,3 +1,10 @@
+# PEcAn.LPJGUESS 1.9.0
+
+* model2netcdf.LPJGUESS no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed.
+* Support for model restarts via new functions read.restart.LPJGUESS, read.binary.LPJGUESS, write.restart.LPJGUESS and write.binary.LPJGUESS (#3533, @yinghaoSunn)
+* CRU driver tweaks
+
+
# PEcAn.LPJGUESS 1.8.0
* PEcAn.LPJGUESS is now distributed under the BSD three-clause license instead of the NCSA Open Source license.
diff --git a/models/lpjguess/R/model2netcdf.LPJGUESS.R b/models/lpjguess/R/model2netcdf.LPJGUESS.R
index d01ef7010ad..ef24fb53d7c 100644
--- a/models/lpjguess/R/model2netcdf.LPJGUESS.R
+++ b/models/lpjguess/R/model2netcdf.LPJGUESS.R
@@ -110,13 +110,10 @@ model2netcdf.LPJGUESS <- function(outdir, sitelat, sitelon, start_date, end_date
### Output netCDF data
nc <- ncdf4::nc_create(file.path(outdir, paste(y, "nc", sep = ".")), nc_var)
- varfile <- file(file.path(outdir, paste(y, "nc", "var", sep = ".")), "w")
for (i in seq_along(nc_var)) {
# print(i)
ncdf4::ncvar_put(nc, nc_var[[i]], output[[i]])
- cat(paste(nc_var[[i]]$name, nc_var[[i]]$longname), file = varfile, sep = "\n")
}
- close(varfile)
ncdf4::nc_close(nc)
} ### End of year loop
} # model2netcdf.LPJGUESS
diff --git a/models/lpjguess/R/read_restart.LPJGUESS.R b/models/lpjguess/R/read_restart.LPJGUESS.R
index d828137c8f1..a8bd857f302 100644
--- a/models/lpjguess/R/read_restart.LPJGUESS.R
+++ b/models/lpjguess/R/read_restart.LPJGUESS.R
@@ -1,13 +1,25 @@
-
-# developing
-# outdir = "/fs/data2/output//PEcAn_1000010473/out"
-# runid = 1002656839
-# stop.time = "1960-12-31 23:59:59 UTC"
-# load("/fs/data2/output/PEcAn_1000010473/SDAsettings_develop.Rdata")
-# var.names = c("AGB.pft", "TotSoilCarb")
-# load("/fs/data2/output/PEcAn_1000010473/SDAparams_develop.Rdata")
-
-
+#' Read Restart for LPJGUESS
+#'
+#' @param outdir output directory
+#' @param runid run ID
+#' @param stop.time year that is being read
+#' @param settings PEcAn settings object
+#' @param var.names var.names to be extracted
+#' @param params passed on to return value
+#'
+#' @return X_tmp vector of forecasts
+#' @export
+#' @examples
+#' \dontrun{
+#' rx <- read_restart.LPJGUESS(
+#' outdir = "/projectnb/…/LPJ_output",
+#' runid = "123456",
+#' stop.time = as.POSIXct("2001-12-31 23:59:59", tz = "UTC"),
+#' settings = settings,
+#' var.names = c("AGB.pft"),
+#' params = params)
+#' }
+#' @author Istem Fer, Yinghao Sun
read_restart.LPJGUESS <- function(outdir, runid, stop.time, settings, var.names, params){
# which LPJ-GUESS version, the structure of state file depends a lot on version
@@ -24,7 +36,6 @@ read_restart.LPJGUESS <- function(outdir, runid, stop.time, settings, var.names,
# read binary state file, takes a couple of minutes
Gridcell_container <- read_binary_LPJGUESS(outdir = file.path(outdir, runid),
version = lpjguess_ver)
-
forecast <- list()
# additional varnames for LPJ-GUESS?
@@ -33,8 +44,8 @@ read_restart.LPJGUESS <- function(outdir, runid, stop.time, settings, var.names,
if (var_name == "AGB.pft") {
- cmass_sap_perpft <- calculateGridcellVariablePerPFT(model.state = Gridcell_container, variable = "cmass_sap")
- cmass_heart_perpft <- calculateGridcellVariablePerPFT(model.state = Gridcell_container, variable = "cmass_heart")
+ cmass_sap_perpft <- calculateGridcellVariablePerPFT(model.state = Gridcell_container$state, variable = "cmass_sap")
+ cmass_heart_perpft <- calculateGridcellVariablePerPFT(model.state = Gridcell_container$state, variable = "cmass_heart")
cmass_wood <- cmass_sap_perpft + cmass_heart_perpft
cmass_wood <- PEcAn.utils::ud_convert(cmass_wood, "kg/m^2", "Mg/ha")
@@ -45,11 +56,12 @@ read_restart.LPJGUESS <- function(outdir, runid, stop.time, settings, var.names,
cmass_abvg_wood <- cmass_wood - cmass_blwg_wood
forecast[[length(forecast) + 1]] <- cmass_abvg_wood
- names(forecast[[length(forecast)]]) <- paste0("AGB.pft.", unlist(Gridcell_container$meta_data$pft))
+ names(forecast[[length(forecast)]]) <- paste0("AGB.pft.", unlist(Gridcell_container$state$meta_data$pft))
}
}
+ # params$LPJGUESS_state include state, pos_list, siz_list
params$LPJGUESS_state <- Gridcell_container
PEcAn.logger::logger.info("Finished --", runid)
diff --git a/models/lpjguess/R/read_state.R b/models/lpjguess/R/read_state.R
index c1f0ee149bc..7d54c4a8525 100644
--- a/models/lpjguess/R/read_state.R
+++ b/models/lpjguess/R/read_state.R
@@ -1,6 +1,3 @@
-
-######################## Helper functions ########################
-
#' Find Stream Variable
#'
#' A helper function that lists streamed variables. It returns the names of streamed variables.
@@ -8,6 +5,7 @@
#' @param file_in A character vector representing the file content to search through.
#' @param line_nos A numeric vector of length 2, specifying the start and end lines to search for streamed variables.
#' @return A character vector of streamed variable names.
+#' @keywords internal
# helper function that lists streamed variables, it just returns the names, types are checked by other fucntion
find_stream_var <- function(file_in, line_nos){
@@ -85,6 +83,7 @@ find_stream_var <- function(file_in, line_nos){
#' @param pattern A character string pattern to look for in the file.
#' @return A numeric vector of length 2, giving the start and end line numbers.
#' @importFrom stringr str_match
+#' @keywords internal
# helper function that scans LPJ-GUESS that returns the beginning and the ending lines of serialized object
serialize_starts_ends <- function(file_in, pattern = "void Gridcell::serialize"){
# find the starting line from the given pattern
@@ -159,6 +158,7 @@ find_closing <- function(find = "}", line_no, file_in, if_else_check = FALSE){
#' @return A numeric value representing the size (number of streamed variables).
#' @importFrom stringr str_match
#' @importFrom utils glob2rx
+#' @keywords internal
# helper function that determines the stream size to read
find_stream_size <- function(current_stream_type, guessh_in, LPJ_GUESS_TYPES, LPJ_GUESS_CONST_INTS){
@@ -322,6 +322,7 @@ read_state <- function(file_path) {
#' @param LPJ_GUESS_TYPES A character vector of recognized LPJ-GUESS types.
#' @param guessh_in A character vector of LPJ-GUESS header file content.
#' @return A character string indicating the stream type.
+#' @keywords internal
# helper function to decide the type of the stream
# this function relies on the architecture of LPJ-GUESS and has bunch of harcoded checks, see model documentation
find_stream_type <- function(class = NULL, current_stream_var, LPJ_GUESS_CLASSES, LPJ_GUESS_TYPES, guessh_in){
@@ -429,13 +430,8 @@ find_stream_type <- function(class = NULL, current_stream_var, LPJ_GUESS_CLASSES
return(list(type = gsub(" ", "", stream_type), name = stream_name, substring = sub_string))
} # find_stream_type
-
-###################################### READ STATE
-
-
# this fcn is for potential natural vegetation only
# when there is landcover, there will be more stand types
-
# also for cohort mode only
# Gridcell: Top-level object containing all dynamic and static data for a particular gridcell
@@ -450,26 +446,31 @@ find_stream_type <- function(class = NULL, current_stream_var, LPJ_GUESS_CLASSES
# Soil : Stores state variables for soils and the snow pack. One object of class Soil is defined for each patch.
# Fluxes : The Fluxes class stores accumulated monthly and annual fluxes. One object of type Fluxes is defined for each patch.
# Individual : Stores state variables for an average individual plant. In cohort mode, it is the average individual of a cohort of plants approximately the same age and from the same patch.
-
-
-# test path
-#outdir <- "/fs/data2/output/PEcAn_1000010473/out/1002656304"
-
-# outdir, at least model version, maybe also settings
+#
#' Read Binary File for LPJ-GUESS
#'
#' Reads a binary file formatted for LPJ-GUESS and extracts relevant data.
#'
-#' @param outdir A character string specifying the output directory containing the binary state files.
+#' @param outdir The output directory where ".state" and "meta.bin" will be written
#' @param version A character string specifying the LPJ-GUESS version (default is "PalEON").
#' @importFrom stringr str_match
#' @importFrom utils glob2rx
#' @return A matrix or list containing the extracted data.
+#' @export
+#' @author Istem Fer, Yinghao Sun
read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
+ # ## FOR TEST
+ # outdir <- "/projectnb/dietzelab/yinghao/try/write_test/out"
+ # rundir <- "/projectnb/dietzelab/yinghao/try/write_test/run"
+
# find rundir too, params.ins is in there and we need to get some values from there
rundir <- file.path(dirname(dirname(outdir)), "run", basename(outdir))
+ # create lists to store byte offset and byte size for each variable
+ pos_list <- list()
+ siz_list <- list()
+
# guess.cpp has the info of what is being written
guesscpp_name <- paste0("guess.", version, ".cpp") # these are gonna be in the package guess.VERSION.cpp
guesscpp_in <- readLines(con = system.file(guesscpp_name, package = "PEcAn.LPJGUESS"), n = -1)
@@ -629,6 +630,10 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
Gridcell <- list()
level <- "Gridcell"
for(g_i in seq_along(streamed_vars_gridcell)){ # Gridcell-loop starts
+
+ # # Debug for empty nstands
+ # if(g_i == 7) browser()
+
current_stream <- streamed_vars_gridcell[g_i]
# weird, it doesn't go into Gridcell st
if(current_stream == "st[i]") next #current_stream <- "Gridcellst"
@@ -643,7 +648,12 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
# note that this is streamed under Gridcell, not Stand in guess.cpp,
# but I think this info needs to go together with the Stand sublist
# so prepend landcovertype to the streamed_vars_stand EDIT: I'll actually just read it here
- Gridcell[["Stand"]][["landcovertype"]] <- readBin(zz, what = integer(), n = 1, size = 4)
+
+ ## Past version
+ #Gridcell[["Stand"]][["landcovertype"]] <- readBin(zz, what = integer(), n = 1, size = 4)
+
+ # # Landcover will be read again under stand. So "landcovertype" here is meaningless but we need to read/write.
+ Gridcell[["landcovertype"]] <- readBin(zz, what = integer(), n = 1, size = 4)
num_stnd <- as.numeric(Gridcell$nstands)
Gridcell[["Stand"]] <- vector("list", num_stnd)
@@ -652,7 +662,6 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
# "(*this)[*]" points to different things under different levels, here it is stand
if(grepl(utils::glob2rx("(*this)[*]"), current_stream)){ # note that first else-part will be evaluated considering the order in guess.cpp
-
# STAND
level <- "Stand"
current_stream <- "Stand"
@@ -669,15 +678,16 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
for(stnd_i in seq_len(num_stnd)){ #looping over the stands
for(svs_i in seq_along(streamed_vars_stand)){ # looping over the streamed stand vars
-
current_stream <- streamed_vars_stand[svs_i]
if(grepl(utils::glob2rx("pft[*]"), current_stream)) current_stream <- paste0(level, "pft") # i counter might change, using wildcard
if(current_stream == "nobj" & level == "Stand"){
- # nobj points to different things under different levels, here it is the number of patches
+ # nobj: Number of Patches
# number of patches is set through insfiles, read by write.configs and passed to this fcn
# but it's also written to the state file, need to move bytes
+ pos <- seek(zz)
nofpatch <- readBin(zz, integer(), 1, size = 4)
+ # browser()
if(npatches == nofpatch){ # also not a bad place to check if everything is going fine so far
Gridcell[["Stand"]][[stnd_i]]$npatches <- npatches
#Gridcell[["Stand"]] <- vector("list", npatches)
@@ -687,8 +697,8 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
next
}
- # "(*this)[*]" points to different things under different levels, here it is patch
- if(grepl(utils::glob2rx("(*this)[*]"), current_stream)){
+ ##### "(*this)[*]" points to different things under different levels, here it is PATCH ####
+ if(grepl(utils::glob2rx("(*this)[*]"), current_stream)){
# PATCH
level <- "Patch"
current_stream <- "Patch"
@@ -704,6 +714,7 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
for(ptch_i in seq_len(npatches)){ #looping over the patches
for(svp_i in seq_along(streamed_vars_patch)){ #looping over the streamed patch vars
+ # if(svp_i == 17) browser()
current_stream <- streamed_vars_patch[svp_i]
if(grepl(utils::glob2rx("pft[*]"), current_stream)) current_stream <- paste0(level, "pft") # i counter might change, using wildcard
@@ -734,16 +745,20 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
streamed_vars_veg <- find_stream_var(file_in = guesscpp_in, line_nos = beg_end)
# NOTE : Unlike other parts, this bit is a lot less generalized!!!
- # I'm gonna asumme Vegetation class won't change much in the future
+ # I'm gonna assume Vegetation class won't change much in the future
# indiv.pft.id and indiv needs to be looped over nobj times
if(!setequal(streamed_vars_veg, c("nobj", "indiv.pft.id", "indiv"))){
PEcAn.logger::logger.severe("Vegetation class object changed in this model version, you need to fix read.state")
}
# nobj points to different things under different levels, here it is the number of individuals
+ pos <- seek(zz)
number_of_individuals <- readBin(zz, integer(), 1, size = 4)
Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Vegetation"]] <- list()
Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Vegetation"]][["number_of_individuals"]] <- number_of_individuals
+ key <- file.path("Gridcell", "Stand", stnd_i, "Patch", ptch_i, "Vegetation", "number_of_individuals", fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- 4
# few checks for sensible vals
if(number_of_individuals < 0 | number_of_individuals > 10000){ # should there be an upper limit here too?
@@ -764,7 +779,12 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
for(indv_i in seq_len(number_of_individuals)){
Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Vegetation"]][["Individuals"]][[indv_i]] <- list()
# which PFT is this?
+ pos <- seek(zz)
Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Vegetation"]][["Individuals"]][[indv_i]][["indiv.pft.id"]] <- readBin(zz, integer(), 1, size = 4)
+ key <- file.path("Gridcell", "Stand", stnd_i, "Patch", ptch_i, "Vegetation", "Individuals", indv_i, "indiv.pft.id", fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- 4
+
# read all the individual class
for(svi_i in seq_along(streamed_vars_indv)){ #
@@ -797,11 +817,14 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
current_stream_specs <- find_stream_size(current_stream_type, guessh_in, LPJ_GUESS_TYPES, LPJ_GUESS_CONST_INTS)
# and read!
-
+ pos <- seek(zz)
Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Vegetation"]][["Individuals"]][[indv_i]][["PhotosynthesisResult"]][[current_stream_type$name]] <- readBin(con = zz,
what = current_stream_specs$what,
n = current_stream_specs$n,
size = current_stream_specs$size)
+ key <- file.path("Gridcell", "Stand", stnd_i, "Patch", ptch_i, "Vegetation", "Individuals", indv_i, "PhotosynthesisResult", current_stream_type$name, fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size
}# streamed_vars_photo-loop ends
@@ -812,16 +835,24 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
current_stream_specs <- find_stream_size(current_stream_type, guessh_in, LPJ_GUESS_TYPES, LPJ_GUESS_CONST_INTS)
if(current_stream_specs$single){
+ pos <- seek(zz)
Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Vegetation"]][["Individuals"]][[indv_i]][[current_stream_type$name]] <- readBin(con = zz,
what = current_stream_specs$what,
n = current_stream_specs$n,
size = current_stream_specs$size)
+ key <- file.path("Gridcell", "Stand", stnd_i, "Patch", ptch_i, "Vegetation", "Individuals", indv_i, current_stream_type$name, fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size
}else{
for(css.i in seq_along(current_stream_specs$what)){
+ pos <- seek(zz)
Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Vegetation"]][["Individuals"]][[indv_i]][[current_stream_specs$names[css.i]]]<- readBin(con = zz,
what = current_stream_specs$what[css.i],
n = current_stream_specs$n[css.i],
size = current_stream_specs$size[css.i])
+ key <- file.path("Gridcell", "Stand", stnd_i, "Patch", ptch_i, "Vegetation", "Individuals", indv_i, current_stream_specs$names[css.i], fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size[css.i]
}
}
}
@@ -845,18 +876,33 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
# parse from guess.h
PerPFTFluxType <- c("NPP", "GPP", "RA", "ISO", "MON")
Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Fluxes"]][["annual_fluxes_per_pft"]] <- list()
- key1 <- readBin(zz, "integer", 1, 8)
+ # The number of PFTS
+ pos <- seek(zz)
+ key1 <- readBin(zz, "integer", 1, 8)
Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Fluxes"]][["annual_fluxes_per_pft"]][["n_pft"]] <- key1
- for(fpft_i in seq_len(key1)){ # key1 11 PFTs
- Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Fluxes"]][["annual_fluxes_per_pft"]][[fpft_i]] <- list()
+ key <- file.path("Gridcell", "Stand", stnd_i, "Patch", ptch_i, "Fluxes", "annual_fluxes_per_pft", "n_pft", fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- 8
+
+ for(fpft_i in seq_len(key1)){ # key1 12 PFTs
+ Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Fluxes"]][["annual_fluxes_per_pft"]][[paste0("pft", fpft_i)]] <- list()
+ pos <- seek(zz)
key2 <- readBin(zz, "integer", 1, 8)
if(key2 > 10000){ #make sure you dind't read a weird number, this is supposed to be number of fluxes per pft, can't have too many
PEcAn.logger::logger.severe("Number of fluxes per pft read from the state file is too high. Check read.state function")
}
- Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Fluxes"]][["annual_fluxes_per_pft"]][[fpft_i]][["key2"]] <- key2
+ Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Fluxes"]][["annual_fluxes_per_pft"]][[paste0("pft", fpft_i)]][["key2"]] <- key2
+ key <- file.path("Gridcell", "Stand", stnd_i, "Patch", ptch_i, "Fluxes", "annual_fluxes_per_pft", paste0("pft", fpft_i), "key2", fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- 8
+
for(flux_i in seq_len(key2)){
# is this double?
- Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Fluxes"]][["annual_fluxes_per_pft"]][[fpft_i]][[PerPFTFluxType[flux_i]]] <- readBin(zz, "double", 1, 8)
+ pos <- seek(zz)
+ Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Fluxes"]][["annual_fluxes_per_pft"]][[paste0("pft", fpft_i)]][[PerPFTFluxType[flux_i]]] <- readBin(zz, "double", 1, 8)
+ key <- file.path("Gridcell", "Stand", stnd_i, "Patch", ptch_i, "Fluxes", "annual_fluxes_per_pft", paste0("pft", fpft_i), PerPFTFluxType[flux_i], fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- 8
}
}
@@ -864,16 +910,25 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
# double monthly_fluxes_patch[12][NPERPATCHFLUXTYPES];
# maybe read this as a matrix?
n_monthly_fluxes_patch <- 12 * LPJ_GUESS_CONST_INTS$val[LPJ_GUESS_CONST_INTS$var =="PerPatchFluxType"]
+ pos <- seek(zz)
Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Fluxes"]][["monthly_fluxes_patch"]] <- readBin(zz, "double", n_monthly_fluxes_patch, 8)
+ key <- file.path("Gridcell", "Stand", stnd_i, "Patch", ptch_i, "Fluxes", "monthly_fluxes_patch", fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- 8
# monthly_fluxes_pft read as a vector at once
# double monthly_fluxes_pft[12][NPERPFTFLUXTYPES];
# maybe read this as a matrix?
n_monthly_fluxes_pft <- 12 * LPJ_GUESS_CONST_INTS$val[LPJ_GUESS_CONST_INTS$var =="PerPFTFluxType"]
+ pos <- seek(zz)
Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Fluxes"]][["monthly_fluxes_pft"]] <- readBin(zz, "double", n_monthly_fluxes_pft, 8)
+ key <- file.path("Gridcell", "Stand", stnd_i, "Patch", ptch_i, "Fluxes", "monthly_fluxes_pft", fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- 8
}else{
- # NOT VEGETATION OR FLUX
+ # NOT VEGETATION OR FLUX.
+ # Patchpft or Soil in this case
streamed_vars <- find_stream_var(file_in = guesscpp_in, line_nos = beg_end)
# NO CROPS, NATURAL VEG
if("*cropphen" %in% streamed_vars) streamed_vars <- streamed_vars[!(streamed_vars == "*cropphen")]
@@ -883,6 +938,12 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][[current_stream_type$name]][[varname]] <- vector("list", num_pft)
}
+ if (current_stream == "soil"){
+ past_stream <- tools::toTitleCase(current_stream)
+ } else{
+ past_stream <- current_stream
+ }
+
# maybe try modifying this bit later to make it a function
for(pft_i in seq_len(num_pft)){
for(sv_i in seq_along(streamed_vars)){
@@ -921,10 +982,17 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
current_stream_specs <- find_stream_size(current_stream_type, guessh_in, LPJ_GUESS_TYPES, LPJ_GUESS_CONST_INTS)
if(current_stream_specs$single){
+ pos <- seek(zz)
Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][["Soil"]][["Sompool"]][[current_stream_type$name]][[som_i]] <- readBin(con = zz,
what = current_stream_specs$what,
n = current_stream_specs$n,
size = current_stream_specs$size)
+
+
+ key <- file.path("Gridcell", "Stand", stnd_i, "Patch", ptch_i, "Soil", "Sompool", current_stream_type$name, som_i, fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size
+
}else{
PEcAn.logger::logger.severe("Historic under sompool.") # Not expecting any
}
@@ -935,16 +1003,24 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
current_stream_specs <- find_stream_size(current_stream_type, guessh_in, LPJ_GUESS_TYPES, LPJ_GUESS_CONST_INTS)
# and read!
if(current_stream_specs$single){ # maybe use current_stream in sublist names to find correct place
- Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][[length( Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]])]][[current_stream_type$name]][[pft_i]] <- readBin(con = zz,
+ pos <- seek(zz)
+ Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][[length(Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]])]][[current_stream_type$name]][[pft_i]] <- readBin(con = zz,
what = current_stream_specs$what,
n = current_stream_specs$n,
size = current_stream_specs$size)
+ key <- file.path("Gridcell", "Stand", stnd_i, "Patch", ptch_i, past_stream, current_stream_type$name, pft_i, fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size
}else{ # only for historic type?
for(css.i in seq_along(current_stream_specs$what)){ # maybe use current_stream in sublist names to find correct place
- Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][[length( Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]])]][[current_stream_specs$names[css.i]]]<- readBin(con = zz,
+ pos <- seek(zz)
+ Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][[length(Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]])]][[current_stream_specs$names[css.i]]]<- readBin(con = zz,
what = current_stream_specs$what[css.i],
n = current_stream_specs$n[css.i],
size = current_stream_specs$size[css.i])
+ key <- file.path("Gridcell", "Stand", stnd_i, "Patch", ptch_i, past_stream, current_stream_type$names[css.i], pft_i, fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size[css.i]
}
}
}
@@ -958,18 +1034,25 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
current_stream_specs <- find_stream_size(current_stream_type, guessh_in, LPJ_GUESS_TYPES, LPJ_GUESS_CONST_INTS)
# and read!
if(current_stream_specs$single){
-
+ pos <- seek(zz)
Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][[current_stream_type$name]] <- readBin(con = zz,
what = current_stream_specs$what,
n = current_stream_specs$n,
size = current_stream_specs$size)
+ key <- file.path("Gridcell", "Stand", stnd_i, "Patch", ptch_i, current_stream_type$name, fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size
}else{ # probably don't need this but let's keep
for(css_i in seq_along(current_stream_specs$what)){
- # CHANGE ALL THESE HISTORIC TYPES SO THAT cirrent_index and full goes together with the variable
+ # CHANGE ALL THESE HISTORIC TYPES SO THAT current_index and full goes together with the variable
+ pos <- seek(zz)
Gridcell[["Stand"]][[stnd_i]][["Patch"]][[ptch_i]][[current_stream_specs$names[css_i]]] <- readBin(con = zz,
what = current_stream_specs$what[css_i],
n = current_stream_specs$n[css_i],
size = current_stream_specs$size[css_i])
+ key <- file.path("Gridcell", "Stand", stnd_i, "Patch", ptch_i, current_stream_specs$names[css_i], fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size[css_i]
}
}
}# end if-class within Patch
@@ -988,6 +1071,9 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
Gridcell[["Stand"]][[stnd_i]][[length(Gridcell[["Stand"]][[stnd_i]])+1]] <- list()
names(Gridcell[["Stand"]][[stnd_i]])[length(Gridcell[["Stand"]][[stnd_i]])] <- current_stream_type$name
+ # Save the past stream like Standpft
+ past_stream <- current_stream
+
if(current_stream_type$type == "class"){
# CLASS
@@ -1019,16 +1105,24 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
current_stream_specs <- find_stream_size(current_stream_type, guessh_in, LPJ_GUESS_TYPES, LPJ_GUESS_CONST_INTS)
# and read!
if(current_stream_specs$single){
+ pos <- seek(zz)
Gridcell[["Stand"]][[stnd_i]][[length(Gridcell[["Stand"]][[stnd_i]])]][[current_stream_type$name]][[pft_i]] <- readBin(con = zz,
what = current_stream_specs$what,
n = current_stream_specs$n,
size = current_stream_specs$size)
+ key <- file.path("Gridcell", "Stand", stnd_i, past_stream, current_stream_type$name, pft_i, fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size
}else{
for(css.i in seq_along(current_stream_specs$what)){
+ pos <- seek(zz)
Gridcell[[length(Gridcell)]][[current_stream_type$name]][[pft_i]][[current_stream_specs$names[css.i]]]<- readBin(con = zz,
what = current_stream_specs$what[css.i],
n = current_stream_specs$n[css.i],
size = current_stream_specs$size[css.i])
+ key <- file.path("Gridcell", "Stand", stnd_i, past_stream, current_stream_type$name[css.i], fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size[css.i]
}
}
}
@@ -1040,16 +1134,24 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
current_stream_specs <- find_stream_size(current_stream_type, guessh_in, LPJ_GUESS_TYPES, LPJ_GUESS_CONST_INTS)
# and read!
if(current_stream_specs$single){
+ pos <- seek(zz)
Gridcell[["Stand"]][[stnd_i]][[current_stream_type$name]] <- readBin(con = zz,
what = current_stream_specs$what,
n = current_stream_specs$n,
size = current_stream_specs$size)
+ key <- file.path("Gridcell", "Stand", stnd_i, current_stream_type$name, fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size
}else{ # probably don't need this but let's keep
for(css_i in seq_along(current_stream_specs$what)){
+ pos <- seek(zz)
Gridcell[[length(Gridcell)]][[current_stream_specs$names[css_i]]] <- readBin(con = zz,
what = current_stream_specs$what[css_i],
n = current_stream_specs$n[css_i],
size = current_stream_specs$size[css_i])
+ key <- file.path("Gridcell", "Stand", stnd_i, current_stream_type$name, fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size[css_i]
}
}
}# end if-class within Stand
@@ -1062,6 +1164,7 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
}else{ #not reading in Stand variables
# NOT STAND
+ past_stream <- current_stream
current_stream_type <- find_stream_type(NULL, current_stream, LPJ_GUESS_CLASSES, LPJ_GUESS_TYPES, guessh_in)
@@ -1092,27 +1195,40 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
current_stream_specs <- find_stream_size(current_stream_type, guessh_in, LPJ_GUESS_TYPES, LPJ_GUESS_CONST_INTS)
# and read!
if(current_stream_specs$single){
+ pos <- seek(zz)
Gridcell[[length(Gridcell)]][[current_stream_type$name]][[pft_i]] <- readBin(con = zz,
what = current_stream_specs$what,
n = current_stream_specs$n,
size = current_stream_specs$size)
- }else if(current_stream_specs$name %in% c("hmtemp_20", "hmprec_20", "hmeet_20")){
+ key <- file.path("Gridcell", past_stream, current_stream_type$name, pft_i, fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size
+
+ }else if(current_stream_type$name %in% c("hmtemp_20", "hmprec_20", "hmeet_20")){
# these three are just too different, maybe extract their names in the beginning
# be careful while writing back to the binary
# Gridcell[[length(Gridcell)]][[current_stream_type$name]] <- readBin(con = zz, double(), 264, 8)
Gridcell[[length(Gridcell)]][[current_stream_type$name]] <- vector("list", length(current_stream_specs) - 2)
for(css.i in seq_len(length(current_stream_specs) - 2)){
+ pos <- seek(zz)
Gridcell[[length(Gridcell)]][[current_stream_type$name]][[css.i]] <- readBin(con = zz,
what = current_stream_specs[[css.i]]$what,
n = current_stream_specs[[css.i]]$n,
size = current_stream_specs[[css.i]]$size)
+ key <- file.path("Gridcell", past_stream, current_stream_type$name, css.i, fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs[[css.i]]$size
}
}else{
for(css.i in seq_along(current_stream_specs$what)){
+ pos <- seek(zz)
Gridcell[[length(Gridcell)]][[current_stream_type$name]][[pft_i]][[current_stream_specs$names[css.i]]]<- readBin(con = zz,
what = current_stream_specs$what[css.i],
n = current_stream_specs$n[css.i],
size = current_stream_specs$size[css.i])
+ key <- file.path("Gridcell", past_stream, current_stream_type$name, pft_i, current_stream_specs$names[css.i], fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size[css.i]
}
}
@@ -1124,16 +1240,24 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
current_stream_specs <- find_stream_size(current_stream_type, guessh_in, LPJ_GUESS_TYPES, LPJ_GUESS_CONST_INTS)
# and read!
if(current_stream_specs$single){
+ pos <- seek(zz)
Gridcell[[length(Gridcell)]][[current_stream_type$name]] <- readBin(con = zz,
what = current_stream_specs$what,
n = current_stream_specs$n,
size = current_stream_specs$size)
- }else{ # probably don't need this but let's keep
+ key <- file.path("Gridcell", past_stream, current_stream_type$name, fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size
+ }else{
for(css_i in seq_along(current_stream_specs$what)){
+ pos <- seek(zz)
Gridcell[[length(Gridcell)]][[current_stream_specs$names[css_i]]] <- readBin(con = zz,
what = current_stream_specs$what[css_i],
n = current_stream_specs$n[css_i],
size = current_stream_specs$size[css_i])
+ key <- file.path("Gridcell", past_stream, current_stream_type$name[css_i], fsep = "/")
+ pos_list[[key]] <- pos
+ siz_list[[key]] <- current_stream_specs$size[css_i]
}
}
}# end if-class within Gridcell
@@ -1145,7 +1269,12 @@ read_binary_LPJGUESS <- function(outdir, version = "PalEON"){
Gridcell$meta_data <- meta_data
- return(Gridcell)
+ # return(Gridcell)
+ return(list(
+ state = Gridcell,
+ pos_list = pos_list,
+ siz_list = siz_list
+ ))
} # read_binary_LPJGUESS end
diff --git a/models/lpjguess/R/split_inputs.LPJGUESS.R b/models/lpjguess/R/split_inputs.LPJGUESS.R
index 9a598e50e8b..8ba5ef99491 100644
--- a/models/lpjguess/R/split_inputs.LPJGUESS.R
+++ b/models/lpjguess/R/split_inputs.LPJGUESS.R
@@ -14,120 +14,125 @@
##' @importFrom PEcAn.utils days_in_year
##' @export
split_inputs.LPJGUESS <- function(settings, start.time, stop.time, inputs, overwrite = FALSE, outpath = NULL){
-
- #### Lubridate start and end times
- start.day <- lubridate::yday(start.time)
- start.year <- lubridate::year(start.time)
- end.day <- lubridate::yday(stop.time)
- end.year <- lubridate::year(stop.time)
-
- # Whole run period
- run.start <- lubridate::year(settings$run$start.date)
- run.end <- lubridate::year(settings$run$end.date)
-
- #### Get met paths
- met <- inputs
- path <- dirname(met)
- prefix <- substr(basename(met), 1, nchar(basename(met))-16) #assuming we'll always have "PREFIX.1920.2010.tmp"
- if(is.null(outpath)){
- outpath <- path
- }
- if(!dir.exists(outpath)) dir.create(outpath)
-
- var.names <- c("tmp", "pre", "cld")
- long.names <- c("air_temperature",
- "precipitation_flux",
- "surface_downwelling_shortwave_flux_in_air")
-
- # !!! always full years with LPJ-GUESS !!!
- files.in <- file.path(outpath, paste0(prefix, run.start, ".", run.end, ".", var.names, ".nc"))
- files.out <- file.path(outpath, paste0(prefix, start.year, ".", end.year, ".", var.names, ".nc"))
-
- if(file.exists(files.out[1]) & !overwrite){
- return(files.out[1])
- }
-
- ## open netcdf files
- fnc.tmp <- ncdf4::nc_open(files.in[1])
- fnc.pre <- ncdf4::nc_open(files.in[2])
- fnc.cld <- ncdf4::nc_open(files.in[3])
-
- ## read climate data
- nc.tmp <- ncdf4::ncvar_get(fnc.tmp, var.names[1])
- nc.pre <- ncdf4::ncvar_get(fnc.pre, var.names[2])
- nc.cld <- ncdf4::ncvar_get(fnc.cld, var.names[3])
-
- # cut where
- if(start.year == run.start){
- years <- start.year:end.year
- inds <- 1:sum(PEcAn.utils::days_in_year(years))
+ #### If using CRU input, return directly
+ if (grepl("\\.cru(\\.bin)?$", inputs, ignore.case = TRUE)) {
+ PEcAn.logger::logger.info(paste("Input is a CRU file:", inputs, "- returning path directly without splitting."))
+ return(inputs) # Without cropping, use the original file directly
}else{
- ### come back
- }
-
- # split
- nc.tmp <- nc.tmp[1,1,inds]
- nc.pre <- nc.pre[1,1,inds]
- nc.cld <- nc.cld[1,1,inds]
-
- var.list <- list(nc.tmp, nc.pre, nc.cld)
-
- # not that these will be different than "K", "kg m-2 s-1", "W m-2"
- var.units <- c(fnc.tmp$var$tmp$units,
- fnc.pre$var$pre$units,
- fnc.cld$var$cld$units)
-
- # get other stuff to be written to ncdf
-
- ## retrieve lat/lon
- lon <- ncdf4::ncvar_get(fnc.tmp, "lon")
- lat <- ncdf4::ncvar_get(fnc.tmp, "lat")
-
- # write back
- ## write climate data define dimensions
-
- latdim <- ncdf4::ncdim_def(name = "lat", "degrees_north", as.double(lat))
- londim <- ncdf4::ncdim_def(name = "lon", "degrees_east", as.double(lon))
- timedim <- ncdf4::ncdim_def("time", paste0("days since ", start.year - 1, "-12-31", sep = ""), as.double(c(1:length(nc.tmp))))
-
- fillvalue <- 9.96920996838687e+36
-
- for (n in seq_along(var.names)) {
- # define variable
- var.def <- ncdf4::ncvar_def(name = var.names[n],
- units = var.units[n],
- dim = (list(londim, latdim, timedim)),
- fillvalue, long.names[n],
- verbose = FALSE,
- prec = "float")
-
- # create netCD file for LPJ-GUESS
- ncfile <- ncdf4::nc_create(files.out[[n]], vars = var.def, force_v4 = TRUE)
-
-
- # put variable, rep(...,each=4) is a hack to write the same data for all grids (which all are the
- # same)
- ncdf4::ncvar_put(ncfile, var.def, rep(var.list[[n]], each = 4))
-
-
- # additional attributes for LPJ-GUESS
- ncdf4::ncatt_put(nc = ncfile, varid = var.names[n], attname = "standard_name", long.names[n])
-
- ncdf4::ncatt_put(nc = ncfile, varid = "lon", attname = "axis", "X")
- ncdf4::ncatt_put(nc = ncfile, varid = "lon", attname = "standard_name", "longitude")
-
- ncdf4::ncatt_put(nc = ncfile, varid = "lat", attname = "axis", "Y")
- ncdf4::ncatt_put(nc = ncfile, varid = "lat", attname = "standard_name", "latitude")
-
- ncdf4::ncatt_put(nc = ncfile, varid = "time", attname = "calendar", "gregorian")
-
- ncdf4::nc_close(ncfile)
+ #### Lubridate start and end times
+ start.day <- lubridate::yday(start.time)
+ start.year <- lubridate::year(start.time)
+ end.day <- lubridate::yday(stop.time)
+ end.year <- lubridate::year(stop.time)
+
+ # Whole run period
+ run.start <- lubridate::year(settings$run$start.date)
+ run.end <- lubridate::year(settings$run$end.date)
+
+ #### Get met paths
+ met <- inputs
+ path <- dirname(met)
+ prefix <- substr(basename(met), 1, nchar(basename(met))-16) #assuming we'll always have "PREFIX.1920.2010.tmp"
+ if(is.null(outpath)){
+ outpath <- path
+ }
+ if(!dir.exists(outpath)) dir.create(outpath)
+
+ var.names <- c("tmp", "pre", "cld")
+ long.names <- c("air_temperature",
+ "precipitation_flux",
+ "surface_downwelling_shortwave_flux_in_air")
+
+ # !!! always full years with LPJ-GUESS !!!
+ files.in <- file.path(outpath, paste0(prefix, run.start, ".", run.end, ".", var.names, ".nc"))
+ files.out <- file.path(outpath, paste0(prefix, start.year, ".", end.year, ".", var.names, ".nc"))
+
+ if(file.exists(files.out[1]) & !overwrite){
+ return(files.out[1])
+ }
+
+ ## open netcdf files
+ fnc.tmp <- ncdf4::nc_open(files.in[1])
+ fnc.pre <- ncdf4::nc_open(files.in[2])
+ fnc.cld <- ncdf4::nc_open(files.in[3])
+
+ ## read climate data
+ nc.tmp <- ncdf4::ncvar_get(fnc.tmp, var.names[1])
+ nc.pre <- ncdf4::ncvar_get(fnc.pre, var.names[2])
+ nc.cld <- ncdf4::ncvar_get(fnc.cld, var.names[3])
+
+ # cut where
+ if(start.year == run.start){
+ years <- start.year:end.year
+ inds <- 1:sum(PEcAn.utils::days_in_year(years))
+ }else{
+ ### come back
+ }
+
+ # split
+ nc.tmp <- nc.tmp[1,1,inds]
+ nc.pre <- nc.pre[1,1,inds]
+ nc.cld <- nc.cld[1,1,inds]
+
+ var.list <- list(nc.tmp, nc.pre, nc.cld)
+
+ # not that these will be different than "K", "kg m-2 s-1", "W m-2"
+ var.units <- c(fnc.tmp$var$tmp$units,
+ fnc.pre$var$pre$units,
+ fnc.cld$var$cld$units)
+
+ # get other stuff to be written to ncdf
+
+ ## retrieve lat/lon
+ lon <- ncdf4::ncvar_get(fnc.tmp, "lon")
+ lat <- ncdf4::ncvar_get(fnc.tmp, "lat")
+
+ # write back
+ ## write climate data define dimensions
+
+ latdim <- ncdf4::ncdim_def(name = "lat", "degrees_north", as.double(lat))
+ londim <- ncdf4::ncdim_def(name = "lon", "degrees_east", as.double(lon))
+ timedim <- ncdf4::ncdim_def("time", paste0("days since ", start.year - 1, "-12-31", sep = ""), as.double(c(1:length(nc.tmp))))
+
+ fillvalue <- 9.96920996838687e+36
+
+ for (n in seq_along(var.names)) {
+ # define variable
+ var.def <- ncdf4::ncvar_def(name = var.names[n],
+ units = var.units[n],
+ dim = (list(londim, latdim, timedim)),
+ fillvalue, long.names[n],
+ verbose = FALSE,
+ prec = "float")
+
+ # create netCD file for LPJ-GUESS
+ ncfile <- ncdf4::nc_create(files.out[[n]], vars = var.def, force_v4 = TRUE)
+
+
+ # put variable, rep(...,each=4) is a hack to write the same data for all grids (which all are the
+ # same)
+ ncdf4::ncvar_put(ncfile, var.def, rep(var.list[[n]], each = 4))
+
+
+ # additional attributes for LPJ-GUESS
+ ncdf4::ncatt_put(nc = ncfile, varid = var.names[n], attname = "standard_name", long.names[n])
+
+ ncdf4::ncatt_put(nc = ncfile, varid = "lon", attname = "axis", "X")
+ ncdf4::ncatt_put(nc = ncfile, varid = "lon", attname = "standard_name", "longitude")
+
+ ncdf4::ncatt_put(nc = ncfile, varid = "lat", attname = "axis", "Y")
+ ncdf4::ncatt_put(nc = ncfile, varid = "lat", attname = "standard_name", "latitude")
+
+ ncdf4::ncatt_put(nc = ncfile, varid = "time", attname = "calendar", "gregorian")
+
+ ncdf4::nc_close(ncfile)
+ }
+
+ # close nc
+ ncdf4::nc_close(fnc.tmp)
+ ncdf4::nc_close(fnc.pre)
+ ncdf4::nc_close(fnc.cld)
+
+ return(files.out[1])
}
-
- # close nc
- ncdf4::nc_close(fnc.tmp)
- ncdf4::nc_close(fnc.pre)
- ncdf4::nc_close(fnc.cld)
-
- return(files.out[1])
} # split_inputs.LPJGUESS
\ No newline at end of file
diff --git a/models/lpjguess/R/write.config.LPJGUESS.R b/models/lpjguess/R/write.config.LPJGUESS.R
index 6f2ea4f5d7f..b1ac07b5c45 100644
--- a/models/lpjguess/R/write.config.LPJGUESS.R
+++ b/models/lpjguess/R/write.config.LPJGUESS.R
@@ -14,7 +14,6 @@
##' @export
##' @author Istem Fer, Tony Gardella
write.config.LPJGUESS <- function(defaults, trait.values, settings, run.id, restart = NULL) {
-
# find out where to write run/ouput
rundir <- file.path(settings$host$rundir, run.id)
if (!file.exists(rundir)) {
@@ -194,16 +193,20 @@ write.insfile.LPJGUESS <- function(settings, trait.values, rundir, outdir, run.i
paramsins <- paramsins[-pftindx]
paramsins <- c(paramsins, unlist(write2pftblock))
-
- # write clim file names
-
- tmp.file <- settings$run$inputs$met$path
- pre.file <- gsub(".tmp.nc", ".pre.nc", tmp.file)
- cld.file <- gsub(".tmp.nc", ".cld.nc", tmp.file)
-
- guessins <- gsub("@TEMP_FILE@", tmp.file, guessins)
- guessins <- gsub("@PREC_FILE@", pre.file, guessins)
- guessins <- gsub("@INSOL_FILE@", cld.file, guessins)
+ # # Past version: write clim file names (cf input)
+ # tmp.file <- settings$run$inputs$met$path
+ # pre.file <- gsub(".tmp.nc", ".pre.nc", tmp.file)
+ # cld.file <- gsub(".tmp.nc", ".cld.nc", tmp.file)
+ #
+ # guessins <- gsub("@TEMP_FILE@", tmp.file, guessins)
+ # guessins <- gsub("@PREC_FILE@", pre.file, guessins)
+ # guessins <- gsub("@INSOL_FILE@", cld.file, guessins)
+
+ # when using cru input, lpjguess will not use these clim files
+ cru.file <- settings$run$inputs$met$path
+ misc.file <- sub("\\.bin$", "misc.bin", cru.file)
+ guessins <- gsub("@MET_AND_SOIL_FILE@", cru.file, guessins)
+ guessins <- gsub("@MISC_FILE@", misc.file, guessins)
# create and write CO2 file
start.year <- lubridate::year(settings$run$start.date)
@@ -232,9 +235,12 @@ write.insfile.LPJGUESS <- function(settings, trait.values, rundir, outdir, run.i
utils::write.table(CO2, file = co2.file, row.names = FALSE, col.names = FALSE, sep = "\t", eol = "\n")
guessins <- gsub("@CO2_FILE@", co2.file, guessins)
- # write soil file path
- soil.file <- settings$run$inputs$soil$path
- guessins <- gsub("@SOIL_FILE@", soil.file, guessins)
+ # # write soil file path
+ # # when using cru input, it's also climate file
+ # soil.file <- settings$run$inputs$soil$path
+ # misc.file <- sub("\\.bin$", "misc.bin", soil.file)
+ # guessins <- gsub("@SOIL_FILE@", soil.file, guessins)
+ # guessins <- gsub("@MISC_FILE@", misc.file, guessins)
settings$model$insfile <- file.path(settings$rundir, run.id, "guess.ins")
diff --git a/models/lpjguess/R/write_restart.LPJGUESS.R b/models/lpjguess/R/write_restart.LPJGUESS.R
new file mode 100644
index 00000000000..49271ef0e55
--- /dev/null
+++ b/models/lpjguess/R/write_restart.LPJGUESS.R
@@ -0,0 +1,114 @@
+##' write_restart.LPJGUESS
+##'
+##' Write restart files for LPJGUESS
+##' new.state includes X (AGB.pft) from Analysis
+##' new.params includes LPJGUESS_state
+##'
+##' @param outdir output directory
+##' @param runid run ID
+##' @param start.time start date and time for each SDA ensemble
+##' @param stop.time stop date and time for each SDA ensemble
+##' @param settings PEcAn settings object
+##' @param new.state analysis state vector
+##' @param RENAME flag to either rename output file or not
+##' @param new.params list of parameters to convert between different states
+##' @param inputs list of model inputs to use in write.configs.SIPNET
+##' @param verbose decide if we want to print the runid
+##'
+##' @return NONE
+##'
+##' @export
+##' @author Yinghao Sun
+write_restart.LPJGUESS <- function(outdir, runid,
+ start.time, stop.time, settings,
+ new.state, RENAME = TRUE,
+ new.params, inputs = NULL, verbose = FALSE){
+
+ rundir <- settings$host$rundir
+ variables <- colnames(new.state)
+
+ ## ---- Rename old output, remove old clim ----
+ if (RENAME) {
+ file.rename(file.path(outdir, runid, "lpjguess.out"),
+ file.path(outdir, runid, paste0("lpjguess.", as.Date(start.time), ".out")))
+ system(paste("rm", file.path(rundir, runid, "lpjguess.clim")))
+ } else {
+ PEcAn.logger::logger.severe(paste("rename = FALSE: Restart cannot proceed without output file",
+ "lpjguess.out being renamed for", start.time))
+ stop("RENAME flag is FALSE. Must rerun this timestep before continuing.")
+ }
+
+ settings$run$start.date <- start.time
+ settings$run$end.date <- stop.time
+
+ ## ---- Pull old state ----
+ if (is.null(new.params$LPJGUESS_state))
+ PEcAn.logger::logger.severe("LPJGUESS_state missing in new.params")
+ # new.params$LPJGUESS_state include state, pos_list, siz_list
+ Gridcell <- new.params$LPJGUESS_state$state
+ pos_list <- new.params$LPJGUESS_state$pos_list
+ siz_list <- new.params$LPJGUESS_state$siz_list
+
+ ## ---- Build PFT parameter table from new.params ----
+ # TODO: find accurate parameters; read params from settings
+ pft_par_table <- data.frame()
+ # PFTs <- c("Ace_rub","Bet_all","Fag_gra","Que_rub","Tsu_can")
+ PFTs <- names(new.params)
+ for(PFT in PFTs) {
+ this.param.row <- c()
+ this.param.row["sla"] <- new.params[[PFT]]$SLA
+ this.param.row["k_latosa"] <- new.params[[PFT]]$sapwood_ratio
+ this.param.row["wooddens"] <- 200 #kg/m-3
+ # this.param.row["wooddens"] <- 0.2 #g/cm-3
+ this.param.row["lifeform"] <- 1
+ this.param.row["k_rp"] <- 1.6
+ this.param.row["k_allom1"] <- 250
+ this.param.row["k_allom2"] <- 60
+ this.param.row["k_allom3"] <- 0.67
+ this.param.row["crownarea_max"] <- 50
+ # conifer special case
+ if(PFT == "Tsu_can") {
+ this.param.row["k_allom1"] <- 150
+ }
+ pft_par_table <- rbind(pft_par_table , this.param.row)
+ }
+ names(pft_par_table) <- c("sla", "k_latosa", "wooddens", "lifeform", "k_rp", "k_allom1", "k_allom2", "k_allom3", "crownarea_max")
+ rownames(pft_par_table) <- PFTs
+
+ ## --- Build initial & target AGB vectors (kg m-2) ---
+ agb.init <- calculateGridcellVariablePerPFT(Gridcell, "AbvGrndWood", min.diam=min.diam, pft.params=pft_par_table)
+ if (any(grepl("^AGB.pft", variables))) { # column names were set in read.restart
+ agb.targ <- PEcAn.utils::ud_convert(
+ unlist(new.state[, grepl("^AGB.pft", variables), drop=TRUE]),
+ "Mg/ha","kg/m^2")
+ }
+
+ ### dens will not change because we wont do dens SDA temporarily
+ dens.init <- calculateGridcellVariablePerPFT(Gridcell, "densindiv", min.diam=min.diam, pft.params=pft_par_table)
+ dens.targ <- dens.init
+
+ ## --- Update state ---
+ # choose a minimum diameter
+ min.diam = 0.5
+ Gridcell_updated <- update_state_LPJGUESS(Gridcell, pft_par_table,
+ dens.init, dens.targ,
+ agb.init, agb.targ,
+ AbvGrndWood.epsilon = 0.05,
+ trace = FALSE, min.diam)
+
+ State_updated <- list(state = Gridcell_updated,
+ pos_list = pos_list,
+ siz_list = siz_list)
+
+ write_binary_LPJGUESS(State_updated, file.path(outdir, runid))
+
+ ## --- Regenerate config for next run ---
+ do.call(write.config.LPJGUESS,
+ list(defaults = NULL,
+ trait.values = new.params,
+ settings = settings,
+ run.id = runid)
+ )
+
+ if(verbose) PEcAn.logger::logger.info("restart written for", runid)
+}
diff --git a/models/lpjguess/R/write_state.R b/models/lpjguess/R/write_state.R
new file mode 100644
index 00000000000..a65c27e58bf
--- /dev/null
+++ b/models/lpjguess/R/write_state.R
@@ -0,0 +1,103 @@
+#' Extract nested value from a state list using flat key
+#'
+#' @param state A nested list (usually the model.state$state)
+#' @param key A flat string like "Gridcell/Stand/1/Patch/1/Vegetation/Individuals/3/cmass_leaf"
+#' @return The value stored at that nested position
+#' @keywords internal
+#' @author Yinghao Sun
+extract_from_state_by_key <- function(state, key) {
+ # Optional: remove "Gridcell/" prefix
+ key <- sub("^Gridcell/", "", key)
+
+ parts <- strsplit(key, "/")[[1]]
+ val <- state
+
+ for (p in parts) {
+ if (is.null(val)) {
+ warning("NULL reached prematurely at: ", p)
+ return(NULL)
+ }
+
+ # Case 1: numeric index
+ if (grepl("^[0-9]+$", p)) {
+ idx <- as.integer(p)
+ if (idx > length(val)) {
+ warning("Index out of bounds: ", idx)
+ return(NULL)
+ }
+ val <- val[[idx]]
+
+ # Case 2: named element (case-insensitive match)
+ } else {
+ val_names <- names(val)
+ match_idx <- which(tolower(val_names) == tolower(p))
+
+ if (length(match_idx) == 0) {
+ warning("Name not found (case-insensitive): ", p)
+ return(NULL)
+ }
+
+ val <- val[[match_idx[1]]] # use first match
+ }
+ }
+
+ return(val)
+}
+
+
+#' Write updated variables into a copy of the original LPJ-GUESS .state file
+#'
+#' @param State_updated A list containing updated state variables, position list and size list (get from read_binary)
+#' @param outdir Path to a directory containing the `0.state` and `meta.bin` files.
+#'
+#' @return No return value. Writes files to disk as side effect.
+#' @author Yinghao Sun
+#' @export
+write_binary_LPJGUESS <- function(State_updated, outdir) {
+
+ # Build full paths to source files
+ src_state <- file.path(outdir, "0.state")
+ meta_file <- file.path(outdir, "meta.bin")
+
+ # back-up
+ bak_state <- file.path(outdir, "bak.state")
+ file.copy(src_state, bak_state, overwrite = TRUE)
+
+ # a copy to the temporary file
+ new_state <- file.path(outdir, "new.state")
+ file.copy(src_state, new_state, overwrite = TRUE)
+
+ # # Ensure output directory exists
+ # dir.create(output_dir, recursive = TRUE, showWarnings = FALSE)
+ #
+ # # Copy template files to output directory so we don't overwrite it
+ # file.copy(c(meta_file, original_state), to = output_dir, overwrite = TRUE)
+ #
+ # # Open copied 0.state file for binary modification
+ # state_path <- file.path(outdir, "0.state")
+ # con <- file(state_path, open = "r+b")
+
+ # Open temporary new.state file for binary modification
+ con <- file(new_state, open = "r+b")
+
+ # A named list of byte positions for each variable (generated during reading)
+ pos_list <- State_updated$pos_list
+ # A named list of writeBin sizes for each variable (same keys as pos_list)
+ siz_list <- State_updated$siz_list
+
+ # Loop over all keys
+ for (key in names(pos_list)) {
+ value <- extract_from_state_by_key(State_updated$state, key)
+ pos <- pos_list[[key]]
+ size <- siz_list[[key]]
+
+ # Seek and write
+ seek(con, where = pos, origin = "start")
+ writeBin(object = value, con = con, size = size)
+ }
+
+ close(con)
+
+ # Atomic substitution
+ file.rename(new_state, src_state) # After success, bak is still there and can be manually deleted
+}
diff --git a/models/lpjguess/inst/pecan.ins b/models/lpjguess/inst/pecan.ins
index d8d52274477..9d9cdb06ad1 100755
--- a/models/lpjguess/inst/pecan.ins
+++ b/models/lpjguess/inst/pecan.ins
@@ -73,7 +73,7 @@ title 'LPJ-GUESS cohort mode - global pfts'
vegmode "cohort" ! "cohort", "individual" or "population"
nyear_spinup 500 ! number of years to spin up the simulation for
-spinup_lifeform "nolifeform"
+! spinup_lifeform "tree"
ifcalcsla 0 ! whether to calculate SLA from leaf longevity
! (PFT-specific value can be specified in this file instead)
ifcalccton 1 ! whether to calculate leaf C:N min from leaf longevity
@@ -84,7 +84,7 @@ patcharea 1000 ! patch area (m2)
estinterval 5 ! years between establishment events in cohort mode
ifdisturb 1 ! whether generic patch-destroying disturbances enabled
distinterval 500 ! average return time for generic patch-destroying disturbances
-disturb_year -1
+! disturb_year -1
ifbgestab 1 ! whether background establishment enabled
ifsme 1 ! whether spatial mass effect enabled
ifstochestab 1 ! whether establishment stochastic
diff --git a/models/lpjguess/inst/template.ins b/models/lpjguess/inst/template.ins
index aa5520e66c4..25094f1b2e7 100755
--- a/models/lpjguess/inst/template.ins
+++ b/models/lpjguess/inst/template.ins
@@ -14,10 +14,11 @@ coordinates_precision 2
! Forcing Data & gridlists
!
-param "file_gridlist_cf" (str "@GRID_FILE@")
+param "file_gridlist" (str "@GRID_FILE@")
param "file_co2" (str "@CO2_FILE@")
-param "file_cru" (str "@SOIL_FILE@")
+param "file_cru" (str "@MET_AND_SOIL_FILE@")
+param "file_cru_misc" (str "@MISC_FILE@")
! N deposition (blank string to use constant pre-industrial level of 2 kgN/ha/year)
param "file_ndep" (str "")
diff --git a/models/lpjguess/man/extract_from_state_by_key.Rd b/models/lpjguess/man/extract_from_state_by_key.Rd
new file mode 100644
index 00000000000..4d72aface7a
--- /dev/null
+++ b/models/lpjguess/man/extract_from_state_by_key.Rd
@@ -0,0 +1,23 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/write_state.R
+\name{extract_from_state_by_key}
+\alias{extract_from_state_by_key}
+\title{Extract nested value from a state list using flat key}
+\usage{
+extract_from_state_by_key(state, key)
+}
+\arguments{
+\item{state}{A nested list (usually the model.state$state)}
+
+\item{key}{A flat string like "Gridcell/Stand/1/Patch/1/Vegetation/Individuals/3/cmass_leaf"}
+}
+\value{
+The value stored at that nested position
+}
+\description{
+Extract nested value from a state list using flat key
+}
+\author{
+Yinghao Sun
+}
+\keyword{internal}
diff --git a/models/lpjguess/man/find_stream_size.Rd b/models/lpjguess/man/find_stream_size.Rd
index bc57bf17949..2aebe539c95 100644
--- a/models/lpjguess/man/find_stream_size.Rd
+++ b/models/lpjguess/man/find_stream_size.Rd
@@ -26,3 +26,4 @@ A numeric value representing the size (number of streamed variables).
\description{
Determines the size (number of variables) in a stream based on the file content.
}
+\keyword{internal}
diff --git a/models/lpjguess/man/find_stream_type.Rd b/models/lpjguess/man/find_stream_type.Rd
index d89e8c40136..df03f662ef2 100644
--- a/models/lpjguess/man/find_stream_type.Rd
+++ b/models/lpjguess/man/find_stream_type.Rd
@@ -29,3 +29,4 @@ A character string indicating the stream type.
\description{
Determines the type of a given stream variable in an LPJ-GUESS file.
}
+\keyword{internal}
diff --git a/models/lpjguess/man/find_stream_var.Rd b/models/lpjguess/man/find_stream_var.Rd
index 2de82ee1895..95c672f2c88 100644
--- a/models/lpjguess/man/find_stream_var.Rd
+++ b/models/lpjguess/man/find_stream_var.Rd
@@ -17,3 +17,4 @@ A character vector of streamed variable names.
\description{
A helper function that lists streamed variables. It returns the names of streamed variables.
}
+\keyword{internal}
diff --git a/models/lpjguess/man/read_binary_LPJGUESS.Rd b/models/lpjguess/man/read_binary_LPJGUESS.Rd
index 5ff044916b9..81a68fa47e8 100644
--- a/models/lpjguess/man/read_binary_LPJGUESS.Rd
+++ b/models/lpjguess/man/read_binary_LPJGUESS.Rd
@@ -7,7 +7,7 @@
read_binary_LPJGUESS(outdir, version = "PalEON")
}
\arguments{
-\item{outdir}{A character string specifying the output directory containing the binary state files.}
+\item{outdir}{The output directory where ".state" and "meta.bin" will be written}
\item{version}{A character string specifying the LPJ-GUESS version (default is "PalEON").}
}
@@ -17,3 +17,6 @@ A matrix or list containing the extracted data.
\description{
Reads a binary file formatted for LPJ-GUESS and extracts relevant data.
}
+\author{
+Istem Fer, Yinghao Sun
+}
diff --git a/models/lpjguess/man/read_restart.LPJGUESS.Rd b/models/lpjguess/man/read_restart.LPJGUESS.Rd
new file mode 100644
index 00000000000..0bacaa4070b
--- /dev/null
+++ b/models/lpjguess/man/read_restart.LPJGUESS.Rd
@@ -0,0 +1,41 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/read_restart.LPJGUESS.R
+\name{read_restart.LPJGUESS}
+\alias{read_restart.LPJGUESS}
+\title{Read Restart for LPJGUESS}
+\usage{
+read_restart.LPJGUESS(outdir, runid, stop.time, settings, var.names, params)
+}
+\arguments{
+\item{outdir}{output directory}
+
+\item{runid}{run ID}
+
+\item{stop.time}{year that is being read}
+
+\item{settings}{PEcAn settings object}
+
+\item{var.names}{var.names to be extracted}
+
+\item{params}{passed on to return value}
+}
+\value{
+X_tmp vector of forecasts
+}
+\description{
+Read Restart for LPJGUESS
+}
+\examples{
+\dontrun{
+ rx <- read_restart.LPJGUESS(
+ outdir = "/projectnb/…/LPJ_output",
+ runid = "123456",
+ stop.time = as.POSIXct("2001-12-31 23:59:59", tz = "UTC"),
+ settings = settings,
+ var.names = c("AGB.pft"),
+ params = params)
+}
+}
+\author{
+Istem Fer, Yinghao Sun
+}
diff --git a/models/lpjguess/man/serialize_starts_ends.Rd b/models/lpjguess/man/serialize_starts_ends.Rd
index 5c743458b55..3b3bfa242fd 100644
--- a/models/lpjguess/man/serialize_starts_ends.Rd
+++ b/models/lpjguess/man/serialize_starts_ends.Rd
@@ -17,3 +17,4 @@ A numeric vector of length 2, giving the start and end line numbers.
\description{
Finds the start and end lines for serialization.
}
+\keyword{internal}
diff --git a/models/lpjguess/man/write_binary_LPJGUESS.Rd b/models/lpjguess/man/write_binary_LPJGUESS.Rd
new file mode 100644
index 00000000000..e6718a5155e
--- /dev/null
+++ b/models/lpjguess/man/write_binary_LPJGUESS.Rd
@@ -0,0 +1,22 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/write_state.R
+\name{write_binary_LPJGUESS}
+\alias{write_binary_LPJGUESS}
+\title{Write updated variables into a copy of the original LPJ-GUESS .state file}
+\usage{
+write_binary_LPJGUESS(State_updated, outdir)
+}
+\arguments{
+\item{State_updated}{A list containing updated state variables, position list and size list (get from read_binary)}
+
+\item{outdir}{Path to a directory containing the `0.state` and `meta.bin` files.}
+}
+\value{
+No return value. Writes files to disk as side effect.
+}
+\description{
+Write updated variables into a copy of the original LPJ-GUESS .state file
+}
+\author{
+Yinghao Sun
+}
diff --git a/models/lpjguess/man/write_restart.LPJGUESS.Rd b/models/lpjguess/man/write_restart.LPJGUESS.Rd
new file mode 100644
index 00000000000..2b06018b468
--- /dev/null
+++ b/models/lpjguess/man/write_restart.LPJGUESS.Rd
@@ -0,0 +1,51 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/write_restart.LPJGUESS.R
+\name{write_restart.LPJGUESS}
+\alias{write_restart.LPJGUESS}
+\title{write_restart.LPJGUESS}
+\usage{
+write_restart.LPJGUESS(
+ outdir,
+ runid,
+ start.time,
+ stop.time,
+ settings,
+ new.state,
+ RENAME = TRUE,
+ new.params,
+ inputs = NULL,
+ verbose = FALSE
+)
+}
+\arguments{
+\item{outdir}{output directory}
+
+\item{runid}{run ID}
+
+\item{start.time}{start date and time for each SDA ensemble}
+
+\item{stop.time}{stop date and time for each SDA ensemble}
+
+\item{settings}{PEcAn settings object}
+
+\item{new.state}{analysis state vector}
+
+\item{RENAME}{flag to either rename output file or not}
+
+\item{new.params}{list of parameters to convert between different states}
+
+\item{inputs}{list of model inputs to use in write.configs.SIPNET}
+
+\item{verbose}{decide if we want to print the runid}
+}
+\value{
+NONE
+}
+\description{
+Write restart files for LPJGUESS
+new.state includes X (AGB.pft) from Analysis
+new.params includes LPJGUESS_state
+}
+\author{
+Yinghao Sun
+}
diff --git a/models/maat/DESCRIPTION b/models/maat/DESCRIPTION
index 38e108cadcc..435ca029760 100644
--- a/models/maat/DESCRIPTION
+++ b/models/maat/DESCRIPTION
@@ -1,11 +1,13 @@
Package: PEcAn.MAAT
Type: Package
Title: PEcAn Package for Integration of the MAAT Model
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(
person("Shawn", "Serbin", role = c("aut", "cre"), email="sserbin@bnl.gov"),
person("Anthony", "Walker", role = "aut", email="walkerap@ornl.gov"))
Description: This module provides functions to wrap the MAAT model into the PEcAn workflows.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
PEcAn.data.atmosphere,
PEcAn.logger,
@@ -28,3 +30,4 @@ LazyLoad: yes
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: MAAT, ecosystem-modeling
diff --git a/models/maat/NEWS.md b/models/maat/NEWS.md
index 313959da366..9076c41c750 100644
--- a/models/maat/NEWS.md
+++ b/models/maat/NEWS.md
@@ -1,3 +1,8 @@
+# PEcAn.MAAT 1.7.5
+
+* model2netcdf.MAAT no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed.
+* Vignette: Turned off evaluation of code chunks that download Ameriflux data.
+
# PEcAn.MAAT 1.7.4
## License change
diff --git a/models/maat/R/model2netcdf.MAAT.R b/models/maat/R/model2netcdf.MAAT.R
index dd1a21a9080..92fa0db7b49 100755
--- a/models/maat/R/model2netcdf.MAAT.R
+++ b/models/maat/R/model2netcdf.MAAT.R
@@ -179,20 +179,12 @@ model2netcdf.MAAT <- function(rundir, outdir, sitelat = -999, sitelon = -999, st
ncout <- ncdf4::nc_create(file.path(outdir, paste(year, "nc", sep = ".")), output$var)
ncdf4::ncatt_put(ncout, "time", "bounds", "time_bounds", prec=NA)
for (i in seq_along(output$var)) {
- #print(i) # for debugging
ncdf4::ncvar_put(ncout, output$var[[i]], output$dat[[i]])
}
-
- ## extract variable and long names to VAR file for PEcAn vis
- utils::write.table(sapply(ncout$var, function(x) { x$longname }),
- file = file.path(outdir, paste(year, "nc.var", sep = ".")),
- col.names = FALSE,
- row.names = TRUE,
- quote = FALSE)
-
+
# close netCDF file
try(ncdf4::nc_close(ncout))
-
+
} ## Year loop
} # model2netcdf.MAAT
##-------------------------------------------------------------------------------------------------#
diff --git a/models/maat/vignettes/create_amerifluxLBL_drivers_for_maat.Rmd b/models/maat/vignettes/create_amerifluxLBL_drivers_for_maat.Rmd
index a5be19dd635..5899ff373bf 100644
--- a/models/maat/vignettes/create_amerifluxLBL_drivers_for_maat.Rmd
+++ b/models/maat/vignettes/create_amerifluxLBL_drivers_for_maat.Rmd
@@ -105,7 +105,7 @@ format <- list(
## Download AmerifluxLBL data for selected site and dates (US-WCr, 2000 to 2005)
-```{r download}
+```{r download, eval = FALSE}
m2mdir <- tempfile("met2model_testing")
dir.create(m2mdir)
download.AmerifluxLBL(sitename = "US-WCr", outfolder = m2mdir,
@@ -113,21 +113,21 @@ download.AmerifluxLBL(sitename = "US-WCr", outfolder = m2mdir,
```
## Convert downloaded AmerifluxLBL data to CF format
-```{r met2cf}
+```{r met2cf, eval = FALSE}
cfdir <- file.path(m2mdir, "CF")
met2CF.AmerifluxLBL(in.path = m2mdir, in.prefix = "AMF_US-WCr", outfolder = cfdir,
start_date = "2000-01-01", end_date = "2005-12-31",format=format)
```
## Gapfill CF met drivers
-```{r metgapfill}
+```{r metgapfill, eval = FALSE}
gapfilldir <- file.path(cfdir, "gapfill")
metgapfill(in.path = cfdir, in.prefix = "AMF_US-WCr_BASE_HH_14-5",
outfolder = gapfilldir, start_date = "2000-01-01", end_date = "2005-12-31")
```
## Create MAAT model-specific met drivers
-```{r met2model}
+```{r met2model, eval = FALSE}
in.path <- gapfilldir
in.prefix <- "AMF_US-WCr_BASE_HH_14-5"
outfolder <- file.path(gapfilldir, "maat_drivers")
diff --git a/models/maespa/DESCRIPTION b/models/maespa/DESCRIPTION
index 1074f8ba2ce..ea753f7fa38 100644
--- a/models/maespa/DESCRIPTION
+++ b/models/maespa/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.MAESPA
Type: Package
Title: PEcAn Functions Used for Ecological Forecasts and Reanalysis using MAESPA
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("Tony", "Gardella", role = c("aut", "cre"),
email = "tonygard@bu.edu"),
person("University of Illinois, NCSA", role = c("cph")))
@@ -11,6 +11,8 @@ Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
streamline the interaction between data and models, and to improve the
efficacy of scientific investigation.This package allows for MAESPA to be
run through the PEcAN workflow.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
PEcAn.data.atmosphere,
PEcAn.logger,
@@ -31,3 +33,5 @@ Copyright: Authors
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: MAESPA, ecosystem-modeling
+
diff --git a/models/maespa/NEWS.md b/models/maespa/NEWS.md
index cd9e533fb3f..32ef18d68bf 100644
--- a/models/maespa/NEWS.md
+++ b/models/maespa/NEWS.md
@@ -1,3 +1,8 @@
+# PEcAn.MAESPA 1.7.5
+
+* model2netcdf.MAESPA no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed.
+
+
# PEcAn.MAESPA 1.7.4
## License change
diff --git a/models/maespa/R/model2netcdf.MAESPA.R b/models/maespa/R/model2netcdf.MAESPA.R
index 65a29b71243..f145f837f56 100755
--- a/models/maespa/R/model2netcdf.MAESPA.R
+++ b/models/maespa/R/model2netcdf.MAESPA.R
@@ -70,13 +70,10 @@ model2netcdf.MAESPA <- function(outdir, sitelat, sitelon, start_date, end_date,
### Output netCDF data
nc <- ncdf4::nc_create(file.path(outdir, paste(y, "nc", sep = ".")), nc_var)
- varfile <- file(file.path(outdir, paste(y, "nc", "var", sep = ".")), "w")
for (i in seq_along(nc_var)) {
# print(i)
ncdf4::ncvar_put(nc, nc_var[[i]], output[[i]])
- cat(paste(nc_var[[i]]$name, nc_var[[i]]$longname), file = varfile, sep = "\n")
}
- close(varfile)
ncdf4::nc_close(nc)
} ### End of year loop
diff --git a/models/preles/DESCRIPTION b/models/preles/DESCRIPTION
index 305100bdcbe..e413031934d 100644
--- a/models/preles/DESCRIPTION
+++ b/models/preles/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.PRELES
Type: Package
Title: PEcAn Package for Integration of the PRELES Model
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("Mike", "Dietze", role = c("aut"),
email = "dietze@bu.edu"),
person("Tony", "Gardella", role = c("aut", "cre"),
@@ -14,6 +14,8 @@ Description: This module provides functions to run the PREdict Light use
parameterization,execution, and analysis. The goal of PECAn is to
streamline the interaction between data and models, and to improve the
efficacy of scientific investigation.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
PEcAn.logger,
lubridate (>= 1.6.0),
@@ -30,3 +32,4 @@ License: BSD_3_clause + file LICENSE
Copyright: Authors
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: PRELES, ecosystem-modeling
diff --git a/models/preles/NEWS.md b/models/preles/NEWS.md
index f4b4acbe382..8be4e11d0f9 100644
--- a/models/preles/NEWS.md
+++ b/models/preles/NEWS.md
@@ -1,3 +1,8 @@
+# PEcAn.PRELES 1.7.5
+
+* runPRELES.jobsh no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed.
+
+
# PEcAn.PRELES 1.7.4
## License change
diff --git a/models/preles/R/runPRELES.jobsh.R b/models/preles/R/runPRELES.jobsh.R
index db3dbc8f46d..d1d0d828752 100644
--- a/models/preles/R/runPRELES.jobsh.R
+++ b/models/preles/R/runPRELES.jobsh.R
@@ -178,12 +178,9 @@ runPRELES.jobsh <- function(met.file, outdir, parameters, sitelat, sitelon, star
nc_var[[5]] <- PEcAn.utils::to_ncvar("TVeg", dims)
nc <- ncdf4::nc_create(file.path(outdir, paste(y, "nc", sep = ".")), nc_var)
- varfile <- file(file.path(outdir, paste(y, "nc", "var", sep = ".")), "w")
for (i in seq_along(nc_var)) {
ncdf4::ncvar_put(nc, nc_var[[i]], output[[i]])
- cat(paste(nc_var[[i]]$name, nc_var[[i]]$longname), file = varfile, sep = "\n")
}
- close(varfile)
ncdf4::nc_close(nc)
}
} # runPRELES.jobsh
diff --git a/models/sibcasa/DESCRIPTION b/models/sibcasa/DESCRIPTION
index 2623056827b..2013e0dc448 100644
--- a/models/sibcasa/DESCRIPTION
+++ b/models/sibcasa/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.SIBCASA
Type: Package
Title: PEcAn Package for Integration of the SiBCASA Model
-Version: 0.0.2
+Version: 0.0.3
Authors@R: c(person("Rob", "Kooper", role = "cre",
email = "kooper@illinois.edu"),
person("Tony", "Gardella", role = c("aut"),
@@ -12,6 +12,8 @@ Authors@R: c(person("Rob", "Kooper", role = "cre",
person("University of Illinois, NCSA", role = c("cph")))
Description: This module provides functions to link (SiBCASA) to PEcAn.
It is a work in progress and is not yet fully functional.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
ncdf4,
PEcAn.logger
@@ -25,3 +27,4 @@ Copyright: Authors
LazyData: TRUE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: SiBCASA, ecosystem-modeling
diff --git a/models/sibcasa/NEWS.md b/models/sibcasa/NEWS.md
index f4d0012e476..a2cdf8bf072 100644
--- a/models/sibcasa/NEWS.md
+++ b/models/sibcasa/NEWS.md
@@ -1,3 +1,9 @@
+# PEcAn.SIBCASA 0.0.3
+
+* model2netcdf.SIBCASA no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed.
+
+
+
# PEcAn.SIBCASA 0.0.2
## License change
diff --git a/models/sibcasa/R/model2netcdf.SIBCASA.R b/models/sibcasa/R/model2netcdf.SIBCASA.R
index 236b705599c..ca5abd9a23e 100644
--- a/models/sibcasa/R/model2netcdf.SIBCASA.R
+++ b/models/sibcasa/R/model2netcdf.SIBCASA.R
@@ -103,13 +103,10 @@ model2netcdf.SIBCASA <- function(outdir, sitelat, sitelon, start_date, end_date)
## Write out File
### Output netCDF data
nc <- ncdf4::nc_create(file.path(outdir, paste(year, "nc", sep = ".")), nc_var)
- varfile <- file(file.path(outdir, paste(year, "nc", "var", sep = ".")), "w")
for (i in seq_along(nc_var)) {
# print(i)
ncdf4::ncvar_put(nc, nc_var[[i]], output[[i]])
- cat(paste(nc_var[[i]]$name, nc_var[[i]]$longname), file = varfile, sep = "\n")
}
- close(varfile)
ncdf4::nc_close(nc)
}
} # model2netcdf.SIBCASA
diff --git a/models/sipnet/DESCRIPTION b/models/sipnet/DESCRIPTION
index d1d84b3fa20..955e2a7403a 100644
--- a/models/sipnet/DESCRIPTION
+++ b/models/sipnet/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.SIPNET
Type: Package
Title: PEcAn Functions Used for Ecological Forecasts and Reanalysis
-Version: 1.9.0
+Version: 1.10.0
Authors@R: c(person("Mike", "Dietze", role = c("aut", "cre"),
email = "dietze@bu.edu"),
person("University of Illinois, NCSA", role = c("cph")))
@@ -10,8 +10,12 @@ Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
model parameterization, execution, and analysis. The goal of PECAn is to
streamline the interaction between data and models, and to improve the
efficacy of scientific investigation.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
+Depends: R (>= 4.1.0)
Imports:
dplyr,
+ jsonlite,
lubridate (>= 1.6.0),
ncdf4 (>= 1.15),
PEcAn.data.atmosphere,
@@ -31,3 +35,4 @@ License: BSD_3_clause + file LICENSE
Copyright: Authors
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: SIPNET, ecosystem-modeling
diff --git a/models/sipnet/NAMESPACE b/models/sipnet/NAMESPACE
index e2cdb0e98d0..efa0bc19570 100644
--- a/models/sipnet/NAMESPACE
+++ b/models/sipnet/NAMESPACE
@@ -6,10 +6,11 @@ export(model2netcdf.SIPNET)
export(read_restart.SIPNET)
export(remove.config.SIPNET)
export(sample.IC.SIPNET)
-export(sipnet2datetime)
export(split_inputs.SIPNET)
export(veg2model.SIPNET)
export(write.config.SIPNET)
+export(write.events.SIPNET)
export(write_restart.SIPNET)
importFrom(dplyr,"%>%")
+importFrom(rlang,"%||%")
importFrom(rlang,.data)
diff --git a/models/sipnet/NEWS.md b/models/sipnet/NEWS.md
index a90e5d0d1fb..5d5abf636a8 100644
--- a/models/sipnet/NEWS.md
+++ b/models/sipnet/NEWS.md
@@ -1,3 +1,27 @@
+# PEcAn.SIPNET 1.10.0
+
+## Added
+* `write.events.SIPNET()` generates SIPNET `events.in` files from an `events.json` file (#3623).
+* `met2model.SIPNET` now accepts argument `var.names`, listing which variables should be extracted from the file. If not provided, it extracts all variables in the file (#3563).
+
+## Removed
+* The `sipnet2datetime` function is no longer used anywhere and therefore has been removed (#3622).
+
+## Changed
+* Breaking: Renamed the setting used to pass soil and hydrology parameters. `write.config.SIPNET` previously read these from `settings$run$inputs$soilinitcond`, now `settings$run$inputs$soil_physics` to better reflect that these are state factors applicable to the whole run rather than initial conditions (Quianyu Li, #3406).
+* model2netcdf.SIPNET no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed (#3611).
+* Restart and met2model functions now print less to the console unless `verbose = TRUE` (#3544, #3563).
+
+## Fixed
+* `write.config.SIPNET` now checks more carefully whether an optional variable exists in an initial condition file before trying to read it, therefore printing fewer messages about (expectedly) missing variables (#3545).
+* When passed a vector of multiple input paths, `write.config.SIPNET` was choosing one at random; it now throws an error (Blesson Thomas, #3298). Note that a single input path per call has always been the intended usage; being passed many was a second bug in PEcAn.uncertainty that is also now fixed.
+* `model2netcdf.SIPNET` no longer assumes a constant value of `pecan_start_doy` across years, which lead to incorrect calculations of `sub_dates` and `sub_dates_cf` at year boundaries (@DongchenZ, #3622).
+* When phenology inputs contain missing values, `write.config.SIPNET` now tries to use an average across years for that site before falling back to fixed cross-site defaults (Quianyu Li, #3680).
+* `write.config.SIPNET` now adjusts soil water capacity to match the specified soil depth (#3634).
+* Fixed unit errors in `write.config.SIPNET` calculation of `leafCSpWt` and `Amax` (#3608, #3664).
+
+
+
# PEcAn.SIPNET 1.9.0
## License change
diff --git a/models/sipnet/R/met2model.SIPNET.R b/models/sipnet/R/met2model.SIPNET.R
index ac888f03d12..ccd687d3b71 100644
--- a/models/sipnet/R/met2model.SIPNET.R
+++ b/models/sipnet/R/met2model.SIPNET.R
@@ -38,18 +38,19 @@
#' (will only use the year part of the date)
#' @param end_date the end date of the data to be downloaded
#' (will only use the year part of the date)
+#' @param var.names character: list of variable names to be extracted. Default is NULL.
#' @param overwrite should existing files be overwritten
#' @param verbose should the function be very verbose
#' @param year.fragment the function should ignore whether or not the data is
#' stored as a set of complete years (such as for forecasts).
#' @param ... Additional arguments, currently ignored
#' @author Luke Dramko, Michael Dietze, Alexey Shiklomanov, Rob Kooper
-met2model.SIPNET <- function(in.path, in.prefix, outfolder, start_date, end_date,
+met2model.SIPNET <- function(in.path, in.prefix, outfolder, start_date, end_date, var.names = NULL,
overwrite = FALSE, verbose = FALSE, year.fragment = FALSE, ...) {
-
-
- PEcAn.logger::logger.info("START met2model.SIPNET")
+ if (verbose) {
+ PEcAn.logger::logger.info("START met2model.SIPNET")
+ }
start_date <- as.POSIXlt(start_date, tz = "UTC")
end_date <- as.POSIXlt(end_date, tz = "UTC")
if (year.fragment) { # in.prefix is not guaranteed to contain the file extension.
@@ -90,11 +91,15 @@ met2model.SIPNET <- function(in.path, in.prefix, outfolder, start_date, end_date
enddate = end_date,
dbfile.name = out.file,
stringsAsFactors = FALSE)
- PEcAn.logger::logger.info("internal results")
- PEcAn.logger::logger.info(results)
+ if (verbose) {
+ PEcAn.logger::logger.info("internal results")
+ PEcAn.logger::logger.info(results)
+ }
if (file.exists(out.file.full) && !overwrite) {
- PEcAn.logger::logger.debug("File '", out.file.full, "' already exists, skipping to next file.")
+ if (verbose) {
+ PEcAn.logger::logger.debug("File '", out.file.full, "' already exists, skipping to next file.")
+ }
return(invisible(results))
}
@@ -119,7 +124,9 @@ met2model.SIPNET <- function(in.path, in.prefix, outfolder, start_date, end_date
for (year in start_year:end_year) {
skip <- FALSE
- PEcAn.logger::logger.info(year)
+ if (verbose) {
+ PEcAn.logger::logger.info(year)
+ }
diy <- PEcAn.utils::days_in_year(year)
@@ -131,8 +138,12 @@ met2model.SIPNET <- function(in.path, in.prefix, outfolder, start_date, end_date
if (file.exists(old.file)) {
## open netcdf
- nc <- ncdf4::nc_open(old.file)
-
+ nc <- ncdf4::nc_open(old.file)
+ if (!is.null(var.names)) {
+ nc.var.names <- var.names
+ } else {
+ nc.var.names <- names(nc$var)
+ }
## convert time to seconds
sec <- nc$dim$time$vals
sec <- PEcAn.utils::ud_convert(sec, unlist(strsplit(nc$dim$time$units, " "))[1], "seconds")
@@ -155,12 +166,17 @@ met2model.SIPNET <- function(in.path, in.prefix, outfolder, start_date, end_date
Tair <-ncdf4::ncvar_get(nc, "air_temperature") ## in Kelvin
Tair_C <- PEcAn.utils::ud_convert(Tair, "K", "degC")
Qair <-ncdf4::ncvar_get(nc, "specific_humidity") #humidity (kg/kg)
- ws <- try(ncdf4::ncvar_get(nc, "wind_speed"))
- if (!is.numeric(ws)) {
+
+ # if we have wind speed.
+ if ("wind_speed" %in% nc.var.names) {
+ ws <- ncdf4::ncvar_get(nc, "wind_speed")
+ } else {
U <- ncdf4::ncvar_get(nc, "eastward_wind")
V <- ncdf4::ncvar_get(nc, "northward_wind")
ws <- sqrt(U ^ 2 + V ^ 2)
- PEcAn.logger::logger.info("wind_speed absent; calculated from eastward_wind and northward_wind")
+ if (verbose) {
+ PEcAn.logger::logger.info("wind_speed absent; calculated from eastward_wind and northward_wind")
+ }
}
Rain <- ncdf4::ncvar_get(nc, "precipitation_flux")
@@ -169,32 +185,42 @@ met2model.SIPNET <- function(in.path, in.prefix, outfolder, start_date, end_date
SW <- ncdf4::ncvar_get(nc, "surface_downwelling_shortwave_flux_in_air") ## in W/m2
- PAR <- try(ncdf4::ncvar_get(nc, "surface_downwelling_photosynthetic_photon_flux_in_air")) ## in umol/m2/s
- if (!is.numeric(PAR)) {
+ # if we have PAR.
+ if ("surface_downwelling_photosynthetic_photon_flux_in_air" %in% nc.var.names) {
+ PAR <- ncdf4::ncvar_get(nc, "surface_downwelling_photosynthetic_photon_flux_in_air")
+ } else {
PAR <- PEcAn.utils::ud_convert(PEcAn.data.atmosphere::sw2ppfd(SW), "umol ", "mol")
- PEcAn.logger::logger.info("surface_downwelling_photosynthetic_photon_flux_in_air absent; PAR set to SW * 0.45")
+ if (verbose) {
+ PEcAn.logger::logger.info("surface_downwelling_photosynthetic_photon_flux_in_air absent; PAR set to SW * 0.45")
+ }
}
- soilT <- try(ncdf4::ncvar_get(nc, "soil_temperature"))
- if (!is.numeric(soilT)) {
+ # if we have soil temperature.
+ if ("soil_temperature" %in% nc.var.names) {
+ soilT <- ncdf4::ncvar_get(nc, "soil_temperature")
+ soilT <- PEcAn.utils::ud_convert(soilT, "K", "degC")
+ } else {
# approximation borrowed from SIPNET CRUNCEP preprocessing's tsoil.py
tau <- 15 * tstep
filt <- exp(-(1:length(Tair)) / tau)
filt <- (filt / sum(filt))
soilT <- stats::convolve(Tair, filt)
soilT <- PEcAn.utils::ud_convert(soilT, "K", "degC")
- PEcAn.logger::logger.info("soil_temperature absent; soilT approximated from Tair")
- } else {
- soilT <- PEcAn.utils::ud_convert(soilT, "K", "degC")
+ if (verbose) {
+ PEcAn.logger::logger.info("soil_temperature absent; soilT approximated from Tair")
+ }
}
SVP <- PEcAn.utils::ud_convert(PEcAn.data.atmosphere::get.es(Tair_C), "millibar", "Pa") ## Saturation vapor pressure
- VPD <- try(ncdf4::ncvar_get(nc, "water_vapor_saturation_deficit")) ## in Pa
- if (!is.numeric(VPD)) {
-
+
+ # if we have VPD.
+ if ("water_vapor_saturation_deficit" %in% nc.var.names) {
+ VPD <- ncdf4::ncvar_get(nc, "water_vapor_saturation_deficit")
+ } else {
VPD <- SVP * (1 - PEcAn.data.atmosphere::qair2rh(Qair, Tair_C, press = press/100))
-
- PEcAn.logger::logger.info("water_vapor_saturation_deficit absent; VPD calculated from Qair, Tair, and SVP (saturation vapor pressure) ")
+ if (verbose) {
+ PEcAn.logger::logger.info("water_vapor_saturation_deficit absent; VPD calculated from Qair, Tair, and SVP (saturation vapor pressure) ")
+ }
}
e_a <- SVP - VPD
VPDsoil <- PEcAn.utils::ud_convert(PEcAn.data.atmosphere::get.es(soilT), "millibar", "Pa") *
@@ -202,7 +228,9 @@ met2model.SIPNET <- function(in.path, in.prefix, outfolder, start_date, end_date
ncdf4::nc_close(nc)
} else {
- PEcAn.logger::logger.info("Skipping to next year")
+ if (verbose) {
+ PEcAn.logger::logger.info("Skipping to next year")
+ }
next
}
diff --git a/models/sipnet/R/model2netcdf.SIPNET.R b/models/sipnet/R/model2netcdf.SIPNET.R
index ddc2eb14884..5d7324ef6dc 100644
--- a/models/sipnet/R/model2netcdf.SIPNET.R
+++ b/models/sipnet/R/model2netcdf.SIPNET.R
@@ -38,41 +38,6 @@ mergeNC <- function(
invisible(outfile)
}
-#--------------------------------------------------------------------------------------------------#
-##'
-##' Convert SIPNET DOY to datetime
-##'
-##' @param sipnet_tval vector of SIPNET DOY values
-##' @param base_year base year to calculate datetime from DOY
-##' @param base_month reference month for converting from DOY to datetime
-##' @param force_cf force output to follow CF convention. Default FALSE
-##'
-##' @export
-##'
-##' @author Alexey Shiklomanov, Shawn Serbin
-##'
-sipnet2datetime <- function(sipnet_tval, base_year, base_month = 1,
- force_cf = FALSE) {
- base_date <- ISOdatetime(base_year, base_month, 1,
- 0, 0, 0, "UTC")
- base_date_str <- strftime(base_date, "%F %T %z", tz = "UTC")
- if (force_cf) {
- is_cf <- TRUE
- } else {
- # HACK: Determine heuristically
- # Is CF if first time step is zero
- is_cf <- sipnet_tval[[1]] == 0
- }
-
- if (is_cf) {
- cfval <- sipnet_tval
- } else {
- cfval <- sipnet_tval - 1
- }
-
- PEcAn.utils::cf2datetime(cfval, paste("days since", base_date_str))
-}
-
#--------------------------------------------------------------------------------------------------#
##' Convert SIPNET output to netCDF
##'
@@ -93,12 +58,11 @@ sipnet2datetime <- function(sipnet_tval, base_year, base_month = 1,
##' @author Shawn Serbin, Michael Dietze
model2netcdf.SIPNET <- function(outdir, sitelat, sitelon, start_date, end_date, delete.raw = FALSE, revision, prefix = "sipnet.out",
overwrite = FALSE, conflict = FALSE) {
-
### Read in model output in SIPNET format
sipnet_out_file <- file.path(outdir, prefix)
sipnet_output <- utils::read.table(sipnet_out_file, header = T, skip = 1, sep = "")
#sipnet_output_dims <- dim(sipnet_output)
-
+
### Determine number of years and output timestep
#start.day <- sipnet_output$day[1]
num_years <- length(unique(sipnet_output$year))
@@ -106,22 +70,22 @@ model2netcdf.SIPNET <- function(outdir, sitelat, sitelon, start_date, end_date,
# get all years that we want data from
year_seq <- seq(lubridate::year(start_date), lubridate::year(end_date))
-
+
# check that specified years and output years match
if (!all(year_seq %in% simulation_years)) {
PEcAn.logger::logger.severe("Years selected for model run and SIPNET output years do not match ")
}
-
+
# get number of model timesteps per day
# outday is the number of time steps in a day - for example 6 hours would have out_day of 4
-
- out_day <- sum(
- sipnet_output$year == simulation_years[1] &
- sipnet_output$day == unique(sipnet_output$day)[1],
- na.rm = TRUE
- ) # switched to day 2 in case first day is partial
-
-
+
+ out_day <- sum(
+ sipnet_output$year == simulation_years[1] &
+ sipnet_output$day == unique(sipnet_output$day)[1],
+ na.rm = TRUE
+ ) # switched to day 2 in case first day is partial
+
+
timestep.s <- 86400 / out_day
@@ -138,29 +102,30 @@ model2netcdf.SIPNET <- function(outdir, sitelat, sitelon, start_date, end_date,
file.rename(file.path(outdir, paste(y, "nc", sep = ".")), file.path(outdir, "previous.nc"))
}
print(paste("---- Processing year: ", y)) # turn on for debugging
-
+
## Subset data for processing
sub.sipnet.output <- subset(sipnet_output, sipnet_output$year == y)
+
+ raw_time <- sub.sipnet.output[["time"]] # decimal hours (eg 13.75 = 1:45 PM)
+ doy <- sub.sipnet.output[["day"]] # day of year, not of month
+ hr <- floor(raw_time)
+ minsec <- PEcAn.utils::ud_convert(raw_time - hr, "hour", "min")
+ min <- floor(minsec)
+ sec <- PEcAn.utils::ud_convert(minsec - min, "minute", "second")
+ sub_dates <- strptime(
+ paste(y, doy, hr, min, sec),
+ "%Y %j %H %M %S",
+ tz = "UTC"
+ )
+ sub_dates_cf <- PEcAn.utils::datetime2cf(
+ sub_dates,
+ paste0("days since ", y, "-01-01"),
+ tz = "UTC"
+ )
+
sub.sipnet.output.dims <- dim(sub.sipnet.output)
dayfrac <- 1 / out_day
- step <- utils::head(seq(0, 1, by = dayfrac), -1) ## probably dont want to use
- ## hard-coded "step" because
- ## leap years may not contain
- ## all "steps", or
- ## if model run doesnt start
- ## at 00:00:00
- # try to determine if DOY is CF compliant (i.e. 0 based index) or not (1 base index)
- pecan_start_doy <- PEcAn.utils::datetime2cf(start_date, paste0("days since ",lubridate::year(start_date),"-01-01"),
- tz = "UTC")
- tvals <- sub.sipnet.output[["day"]] + sub.sipnet.output[["time"]] / 24
- if (sub.sipnet.output[["day"]][1]-pecan_start_doy==1) {
- sub_dates <- sipnet2datetime(tvals, y, force_cf = FALSE)
- } else {
- sub_dates <- sipnet2datetime(tvals, y, force_cf = TRUE)
- }
- sub_dates_cf <- PEcAn.utils::datetime2cf(sub_dates, paste0("days since ",paste0(y,"-01-01")))
-
# create netCDF time.bounds variable
bounds <- array(data=NA, dim=c(length(sub_dates_cf),2))
bounds[,1] <- sub_dates_cf
@@ -172,17 +137,17 @@ model2netcdf.SIPNET <- function(outdir, sitelat, sitelon, start_date, end_date,
output <- list(
"GPP" = (sub.sipnet.output$gpp * 0.001) / timestep.s, # GPP in kgC/m2/s
"NPP" = (sub.sipnet.output$gpp * 0.001) / timestep.s - ((sub.sipnet.output$rAboveground *
- 0.001) / timestep.s + (sub.sipnet.output$rRoot * 0.001) / timestep.s), # NPP in kgC/m2/s. Post SIPNET calculation
+ 0.001) / timestep.s + (sub.sipnet.output$rRoot * 0.001) / timestep.s), # NPP in kgC/m2/s. Post SIPNET calculation
"TotalResp" = (sub.sipnet.output$rtot * 0.001) / timestep.s, # Total Respiration in kgC/m2/s
"AutoResp" = (sub.sipnet.output$rAboveground * 0.001) / timestep.s + (sub.sipnet.output$rRoot *
- 0.001) / timestep.s, # Autotrophic Respiration in kgC/m2/s
+ 0.001) / timestep.s, # Autotrophic Respiration in kgC/m2/s
"HeteroResp" = ((sub.sipnet.output$rSoil - sub.sipnet.output$rRoot) * 0.001) / timestep.s, # Heterotrophic Respiration in kgC/m2/s
"SoilResp" = (sub.sipnet.output$rSoil * 0.001) / timestep.s, # Soil Respiration in kgC/m2/s
"NEE" = (sub.sipnet.output$nee * 0.001) / timestep.s, # NEE in kgC/m2/s
"AbvGrndWood" = (sub.sipnet.output$plantWoodC * 0.001), # Above ground wood kgC/m2
"leaf_carbon_content" = (sub.sipnet.output$plantLeafC * 0.001), # Leaf C kgC/m2
"TotLivBiom" = (sub.sipnet.output$plantWoodC * 0.001) + (sub.sipnet.output$plantLeafC * 0.001) +
- (sub.sipnet.output$coarseRootC + sub.sipnet.output$fineRootC) * 0.001, # Total living C kgC/m2
+ (sub.sipnet.output$coarseRootC + sub.sipnet.output$fineRootC) * 0.001, # Total living C kgC/m2
"TotSoilCarb" = (sub.sipnet.output$soil * 0.001) + (sub.sipnet.output$litter * 0.001) # Total soil C kgC/m2
)
if (revision == "unk") {
@@ -193,7 +158,7 @@ model2netcdf.SIPNET <- function(outdir, sitelat, sitelon, start_date, end_date,
## latent heat of vaporization is not constant and it varies slightly with temperature, get.lv() returns 2.5e6 J kg-1 by default
output[["Qle"]] <- (sub.sipnet.output$npp * 10 * PEcAn.data.atmosphere::get.lv()) / timestep.s # Qle W/m2
} else {
- output[["Qle"]] <- (sub.sipnet.output$evapotranspiration * 10 * PEcAn.data.atmosphere::get.lv()) / timestep.s # Qle W/m2
+ output[["Qle"]] <- (sub.sipnet.output$evapotranspiration * 10 * PEcAn.data.atmosphere::get.lv()) / timestep.s # Qle W/m2
}
output[["Transp"]] <- (sub.sipnet.output$fluxestranspiration * 10) / timestep.s # Transpiration kgW/m2/s
output[["SoilMoist"]] <- (sub.sipnet.output$soilWater * 10) # Soil moisture kgW/m2
@@ -203,8 +168,8 @@ model2netcdf.SIPNET <- function(outdir, sitelat, sitelon, start_date, end_date,
output[["litter_mass_content_of_water"]] <- (sub.sipnet.output$litterWater * 10) # Litter water kgW/m2
#calculate LAI for standard output
param <- utils::read.table(file.path(gsub(pattern = "/out/",
- replacement = "/run/", x = outdir),
- "sipnet.param"), stringsAsFactors = FALSE)
+ replacement = "/run/", x = outdir),
+ "sipnet.param"), stringsAsFactors = FALSE)
id <- which(param[, 1] == "leafCSpWt")
leafC <- 0.48
SLA <- 1000 / param[id, 2] #SLA, m2/kgC
@@ -217,11 +182,11 @@ model2netcdf.SIPNET <- function(outdir, sitelat, sitelon, start_date, end_date,
# ******************** Declare netCDF variables ********************#
t <- ncdf4::ncdim_def(name = "time",
- longname = "time",
- units = paste0("days since ", y, "-01-01 00:00:00"),
- vals = sub_dates_cf,
- calendar = "standard",
- unlim = TRUE)
+ longname = "time",
+ units = paste0("days since ", y, "-01-01 00:00:00"),
+ vals = sub_dates_cf,
+ calendar = "standard",
+ unlim = TRUE)
lat <- ncdf4::ncdim_def("lat", "degrees_north", vals = as.numeric(sitelat),
longname = "station_latitude")
lon <- ncdf4::ncdim_def("lon", "degrees_east", vals = as.numeric(sitelon),
@@ -232,12 +197,12 @@ model2netcdf.SIPNET <- function(outdir, sitelat, sitelon, start_date, end_date,
vals = 1:2, units="")
## ***** Need to dynamically update the UTC offset here *****
-
+
for (i in seq_along(output)) {
if (length(output[[i]]) == 0)
output[[i]] <- rep(-999, length(t$vals))
}
-
+
# ******************** Declare netCDF variables ********************#
mstmipvar <- PEcAn.utils::mstmipvar
nc_var <- list(
@@ -264,12 +229,12 @@ model2netcdf.SIPNET <- function(outdir, sitelat, sitelon, start_date, end_date,
"fine_root_carbon_content" = PEcAn.utils::to_ncvar("fine_root_carbon_content", dims),
"coarse_root_carbon_content" = PEcAn.utils::to_ncvar("coarse_root_carbon_content", dims),
"GWBI" = ncdf4::ncvar_def("GWBI", units = "kg C m-2", dim = list(lon, lat, t), missval = -999,
- longname = "Gross Woody Biomass Increment"),
+ longname = "Gross Woody Biomass Increment"),
"AGB" = ncdf4::ncvar_def("AGB", units = "kg C m-2", dim = list(lon, lat, t), missval = -999,
- longname = "Total aboveground biomass"),
+ longname = "Total aboveground biomass"),
"time_bounds" = ncdf4::ncvar_def(name="time_bounds", units='',
- longname = "history time interval endpoints", dim=list(time_interval,time = t),
- prec = "double")
+ longname = "history time interval endpoints", dim=list(time_interval,time = t),
+ prec = "double")
)
# ******************** Create netCDF and output variables ********************#
@@ -277,12 +242,9 @@ model2netcdf.SIPNET <- function(outdir, sitelat, sitelon, start_date, end_date,
if(conflicted & conflict){
nc <- ncdf4::nc_create(file.path(outdir, paste("current", "nc", sep = ".")), nc_var)
ncdf4::ncatt_put(nc, "time", "bounds", "time_bounds", prec=NA)
- varfile <- file(file.path(outdir, paste(y, "nc", "var", sep = ".")), "w")
for (key in names(nc_var)) {
ncdf4::ncvar_put(nc, nc_var[[key]], output[[key]])
- cat(paste(nc_var[[key]]$name, nc_var[[key]]$longname), file = varfile, sep = "\n")
}
- close(varfile)
ncdf4::nc_close(nc)
#merge nc files of the same year together to enable the assimilation of sub-annual data
@@ -302,16 +264,13 @@ model2netcdf.SIPNET <- function(outdir, sitelat, sitelon, start_date, end_date,
}else{
nc <- ncdf4::nc_create(file.path(outdir, paste(y, "nc", sep = ".")), nc_var)
ncdf4::ncatt_put(nc, "time", "bounds", "time_bounds", prec=NA)
- varfile <- file(file.path(outdir, paste(y, "nc", "var", sep = ".")), "w")
for (i in seq_along(nc_var)) {
ncdf4::ncvar_put(nc, nc_var[[i]], output[[i]])
- cat(paste(nc_var[[i]]$name, nc_var[[i]]$longname), file = varfile, sep = "\n")
}
- close(varfile)
ncdf4::nc_close(nc)
}
} ### End of year loop
-
+
## Delete raw output, if requested
if (delete.raw) {
file.remove(sipnet_out_file)
diff --git a/models/sipnet/R/read_restart.SIPNET.R b/models/sipnet/R/read_restart.SIPNET.R
index d56fb8636b6..0d2ddf3ee9d 100755
--- a/models/sipnet/R/read_restart.SIPNET.R
+++ b/models/sipnet/R/read_restart.SIPNET.R
@@ -139,8 +139,6 @@ read_restart.SIPNET <- function(outdir, runid, stop.time, settings, var.names, p
#remove any remaining NAs from params$restart
params$restart <- stats::na.omit(params$restart)
- print(runid)
-
X_tmp <- list(X = unlist(forecast), params = params)
return(X_tmp)
diff --git a/models/sipnet/R/write.configs.SIPNET.R b/models/sipnet/R/write.configs.SIPNET.R
index dc1dd8bbf30..3fae8216f18 100755
--- a/models/sipnet/R/write.configs.SIPNET.R
+++ b/models/sipnet/R/write.configs.SIPNET.R
@@ -10,6 +10,7 @@
##' @param restart In case this is a continuation of an old simulation. restart needs to be a list with name tags of runid, inputs, new.params (parameters), new.state (initial condition), ensemble.id (ensemble id), start.time and stop.time.See Details.
##' @param spinup currently unused, included for compatibility with other models
##' @export
+##' @importFrom rlang %||%
##' @author Michael Dietze
write.config.SIPNET <- function(defaults, trait.values, settings, run.id, inputs = NULL, IC = NULL,
restart = NULL, spinup = NULL) {
@@ -130,6 +131,17 @@ write.config.SIPNET <- function(defaults, trait.values, settings, run.id, inputs
writeLines(jobsh, con = file.path(settings$rundir, run.id, "job.sh"))
Sys.chmod(file.path(settings$rundir, run.id, "job.sh"))
+
+ ### Copy event file
+ event_file <- inputs$events$path %||% settings$run$inputs$events$path
+ if (!is.null(event_file)) {
+ if (!file.exists(event_file)) {
+ PEcAn.logger::logger.warn("Event file not found at", event_file)
+ }
+ file.copy(event_file, file.path(rundir, "events.in"))
+ }
+
+
### WRITE *.param-spatial
template.paramSpatial <- system.file("template.param-spatial", package = "PEcAn.SIPNET")
file.copy(template.paramSpatial, file.path(settings$rundir, run.id, "sipnet.param-spatial"))
@@ -165,7 +177,7 @@ write.config.SIPNET <- function(defaults, trait.values, settings, run.id, inputs
}
for (pft in seq_along(trait.values)) {
pft.traits <- unlist(trait.values[[pft]])
- pft.names <- names(pft.traits)
+ pft.trait.names <- names(pft.traits)
## Append/replace params specified as constants
constant.traits <- unlist(defaults[[1]]$constants)
@@ -173,10 +185,10 @@ write.config.SIPNET <- function(defaults, trait.values, settings, run.id, inputs
# Replace matches
for (i in seq_along(constant.traits)) {
- ind <- match(constant.names[i], pft.names)
+ ind <- match(constant.names[i], pft.trait.names)
if (is.na(ind)) {
# Add to list
- pft.names <- c(pft.names, constant.names[i])
+ pft.trait.names <- c(pft.trait.names, constant.names[i])
pft.traits <- c(pft.traits, constant.traits[i])
} else {
# Replace existing value
@@ -186,78 +198,84 @@ write.config.SIPNET <- function(defaults, trait.values, settings, run.id, inputs
# Remove NAs. Constants may be specified as NA to request template defaults. Note that it is 'NA'
# (character) not actual NA due to being read in as XML
- pft.names <- pft.names[pft.traits != "NA" & !is.na(pft.traits)]
+ pft.trait.names <- pft.trait.names[pft.traits != "NA" & !is.na(pft.traits)]
pft.traits <- pft.traits[pft.traits != "NA" & !is.na(pft.traits)]
pft.traits <- as.numeric(pft.traits)
-
+
# Leaf carbon concentration
- leafC <- 0.48 #0.5
- if ("leafC" %in% pft.names) {
- leafC <- pft.traits[which(pft.names == "leafC")]
+ if ("leafC" %in% pft.trait.names) {
+ leafC <- pft.traits[pft.trait.names == "leafC"] |>
+ PEcAn.utils::ud_convert("percent", "1") # percentage to fraction
id <- which(param[, 1] == "cFracLeaf")
- param[id, 2] <- leafC * 0.01 # convert to percentage from 0 to 1
+ param[id, 2] <- leafC
+ } else {
+ leafC <- 0.48 # Fixed value if not available, because it is used in downstream calculations
}
-
+
# Specific leaf area converted to SLW
- SLA <- NA
+ # leafCSpWt [gC/m2 leaf], SLA [m2 leaf/kg leaf], leafC [g C / g leaf]
id <- which(param[, 1] == "leafCSpWt")
- if ("SLA" %in% pft.names) {
- SLA <- pft.traits[which(pft.names == "SLA")]
- param[id, 2] <- 1000 * leafC * 0.01 / SLA
+ if ("SLA" %in% pft.trait.names) {
+ SLA <- pft.traits[which(pft.trait.names == "SLA")]
+ param[id, 2] <- PEcAn.utils::ud_convert(leafC / SLA, "kg/m2", "g/m2")
} else {
- SLA <- 1000 * leafC / param[id, 2]
+ SLA <- PEcAn.utils::ud_convert(leafC / param[id, 2], "m2/g", "m2/kg")
}
-
+
# Maximum photosynthesis
- Amax <- NA
+ # SIPNET: aMax [nmol CO2 / g leaf / sec]
+ # PEcAn: Amax [umol CO2 / m^2 leaf / sec]
id <- which(param[, 1] == "aMax")
- if ("Amax" %in% pft.names) {
- Amax <- pft.traits[which(pft.names == "Amax")]
- param[id, 2] <- Amax * SLA
+ SLA_g <- PEcAn.utils::ud_convert(SLA, "1/kg", "1/g")
+ if ("Amax" %in% pft.trait.names) {
+ Amax_area <- pft.traits[which(pft.trait.names == "Amax")] # [µmol/m2/s]
+ param[id, 2] <- PEcAn.utils::ud_convert(Amax_area * SLA_g, "umol", "nmol") # [nmol/g/s]
} else {
- Amax <- param[id, 2] * SLA
+ amax_mass <- param[id, 2] # [nmol/g/s]
+ Amax_area <- PEcAn.utils::ud_convert(amax_mass / SLA_g, "nmol", "umol") # [umol/m2/s]
}
+
# Daily fraction of maximum photosynthesis
- if ("AmaxFrac" %in% pft.names) {
- param[which(param[, 1] == "aMaxFrac"), 2] <- pft.traits[which(pft.names == "AmaxFrac")]
+ if ("AmaxFrac" %in% pft.trait.names) {
+ param[which(param[, 1] == "aMaxFrac"), 2] <- pft.traits[which(pft.trait.names == "AmaxFrac")]
}
### Canopy extinction coefficiet (k)
- if ("extinction_coefficient" %in% pft.names) {
- param[which(param[, 1] == "attenuation"), 2] <- pft.traits[which(pft.names == "extinction_coefficient")]
+ if ("extinction_coefficient" %in% pft.trait.names) {
+ param[which(param[, 1] == "attenuation"), 2] <- pft.traits[which(pft.trait.names == "extinction_coefficient")]
}
# Leaf respiration rate converted to baseFolRespFrac
- if ("leaf_respiration_rate_m2" %in% pft.names) {
- Rd <- pft.traits[which(pft.names == "leaf_respiration_rate_m2")]
+ if ("leaf_respiration_rate_m2" %in% pft.trait.names) {
+ Rd <- pft.traits[which(pft.trait.names == "leaf_respiration_rate_m2")]
id <- which(param[, 1] == "baseFolRespFrac")
- param[id, 2] <- max(min(Rd/Amax, 1), 0)
+ param[id, 2] <- max(min(Rd / Amax_area, 1), 0)
}
# Low temp threshold for photosynethsis
- if ("Vm_low_temp" %in% pft.names) {
- param[which(param[, 1] == "psnTMin"), 2] <- pft.traits[which(pft.names == "Vm_low_temp")]
+ if ("Vm_low_temp" %in% pft.trait.names) {
+ param[which(param[, 1] == "psnTMin"), 2] <- pft.traits[which(pft.trait.names == "Vm_low_temp")]
}
# Opt. temp for photosynthesis
- if ("psnTOpt" %in% pft.names) {
- param[which(param[, 1] == "psnTOpt"), 2] <- pft.traits[which(pft.names == "psnTOpt")]
+ if ("psnTOpt" %in% pft.trait.names) {
+ param[which(param[, 1] == "psnTOpt"), 2] <- pft.traits[which(pft.trait.names == "psnTOpt")]
}
# Growth respiration factor (fraction of GPP)
- if ("growth_resp_factor" %in% pft.names) {
- param[which(param[, 1] == "growthRespFrac"), 2] <- pft.traits[which(pft.names == "growth_resp_factor")]
+ if ("growth_resp_factor" %in% pft.trait.names) {
+ param[which(param[, 1] == "growthRespFrac"), 2] <- pft.traits[which(pft.trait.names == "growth_resp_factor")]
}
### !!! NOT YET USED
#Jmax = NA
- #if("Jmax" %in% pft.names){
- # Jmax = pft.traits[which(pft.names == 'Jmax')]
+ #if("Jmax" %in% pft.trait.names){
+ # Jmax = pft.traits[which(pft.trait.names == 'Jmax')]
### Using Jmax scaled to 25 degC. Maybe not be the best approach
#}
#alpha = NA
- #if("quantum_efficiency" %in% pft.names){
- # alpha = pft.traits[which(pft.names == 'quantum_efficiency')]
+ #if("quantum_efficiency" %in% pft.trait.names){
+ # alpha = pft.traits[which(pft.trait.names == 'quantum_efficiency')]
#}
# Half saturation of PAR. PAR at which photosynthesis occurs at 1/2 theoretical maximum (Einsteins * m^-2 ground area * day^-1).
@@ -271,90 +289,90 @@ write.config.SIPNET <- function(defaults, trait.values, settings, run.id, inputs
# Half saturation of PAR. PAR at which photosynthesis occurs at 1/2 theoretical maximum (Einsteins * m^-2 ground area * day^-1).
# Temporary implementation until above is working.
- if ("half_saturation_PAR" %in% pft.names) {
- param[which(param[, 1] == "halfSatPar"), 2] <- pft.traits[which(pft.names == "half_saturation_PAR")]
+ if ("half_saturation_PAR" %in% pft.trait.names) {
+ param[which(param[, 1] == "halfSatPar"), 2] <- pft.traits[which(pft.trait.names == "half_saturation_PAR")]
}
# Ball-berry slomatal slope parameter m
- if ("stomatal_slope.BB" %in% pft.names) {
+ if ("stomatal_slope.BB" %in% pft.trait.names) {
id <- which(param[, 1] == "m_ballBerry")
- param[id, 2] <- pft.traits[which(pft.names == "stomatal_slope.BB")]
+ param[id, 2] <- pft.traits[which(pft.trait.names == "stomatal_slope.BB")]
}
# Slope of VPD–photosynthesis relationship. dVpd = 1 - dVpdSlope * vpd^dVpdExp
- if ("dVPDSlope" %in% pft.names) {
- param[which(param[, 1] == "dVpdSlope"), 2] <- pft.traits[which(pft.names == "dVPDSlope")]
+ if ("dVPDSlope" %in% pft.trait.names) {
+ param[which(param[, 1] == "dVpdSlope"), 2] <- pft.traits[which(pft.trait.names == "dVPDSlope")]
}
# VPD–water use efficiency relationship. dVpd = 1 - dVpdSlope * vpd^dVpdExp
- if ("dVpdExp" %in% pft.names) {
- param[which(param[, 1] == "dVpdExp"), 2] <- pft.traits[which(pft.names == "dVpdExp")]
+ if ("dVpdExp" %in% pft.trait.names) {
+ param[which(param[, 1] == "dVpdExp"), 2] <- pft.traits[which(pft.trait.names == "dVpdExp")]
}
# Leaf turnover rate average turnover rate of leaves, in fraction per day NOTE: read in as
# per-year rate!
- if ("leaf_turnover_rate" %in% pft.names) {
- param[which(param[, 1] == "leafTurnoverRate"), 2] <- pft.traits[which(pft.names == "leaf_turnover_rate")]
+ if ("leaf_turnover_rate" %in% pft.trait.names) {
+ param[which(param[, 1] == "leafTurnoverRate"), 2] <- pft.traits[which(pft.trait.names == "leaf_turnover_rate")]
}
- if ("wueConst" %in% pft.names) {
- param[which(param[, 1] == "wueConst"), 2] <- pft.traits[which(pft.names == "wueConst")]
+ if ("wueConst" %in% pft.trait.names) {
+ param[which(param[, 1] == "wueConst"), 2] <- pft.traits[which(pft.trait.names == "wueConst")]
}
# vegetation respiration Q10.
- if ("veg_respiration_Q10" %in% pft.names) {
- param[which(param[, 1] == "vegRespQ10"), 2] <- pft.traits[which(pft.names == "veg_respiration_Q10")]
+ if ("veg_respiration_Q10" %in% pft.trait.names) {
+ param[which(param[, 1] == "vegRespQ10"), 2] <- pft.traits[which(pft.trait.names == "veg_respiration_Q10")]
}
# Base vegetation respiration. vegetation maintenance respiration at 0 degrees C (g C respired * g^-1 plant C * day^-1)
# NOTE: only counts plant wood C - leaves handled elsewhere (both above and below-ground: assumed for now to have same resp. rate)
# NOTE: read in as per-year rate!
- if ("stem_respiration_rate" %in% pft.names) {
+ if ("stem_respiration_rate" %in% pft.trait.names) {
vegRespQ10 <- param[which(param[, 1] == "vegRespQ10"), 2]
id <- which(param[, 1] == "baseVegResp")
## Convert from umols CO2 kg s-1 to gC g day-1
- stem_resp_g <- (((pft.traits[which(pft.names == "stem_respiration_rate")]) *
+ stem_resp_g <- (((pft.traits[which(pft.trait.names == "stem_respiration_rate")]) *
(44.0096 / 1e+06) * (12.01 / 44.0096)) / 1000) * 86400
## use Q10 to convert stem resp from reference of 25C to 0C param[id,2] =
- ## pft.traits[which(pft.names=='stem_respiration_rate')]*vegRespQ10^(-25/10)
+ ## pft.traits[which(pft.trait.names=='stem_respiration_rate')]*vegRespQ10^(-25/10)
param[id, 2] <- stem_resp_g * vegRespQ10^(-25/10)
}
# turnover of fine roots (per year rate)
- if ("root_turnover_rate" %in% pft.names) {
+ if ("root_turnover_rate" %in% pft.trait.names) {
id <- which(param[, 1] == "fineRootTurnoverRate")
- param[id, 2] <- pft.traits[which(pft.names == "root_turnover_rate")]
+ param[id, 2] <- pft.traits[which(pft.trait.names == "root_turnover_rate")]
}
# fine root respiration Q10
- if ("fine_root_respiration_Q10" %in% pft.names) {
- param[which(param[, 1] == "fineRootQ10"), 2] <- pft.traits[which(pft.names == "fine_root_respiration_Q10")]
+ if ("fine_root_respiration_Q10" %in% pft.trait.names) {
+ param[which(param[, 1] == "fineRootQ10"), 2] <- pft.traits[which(pft.trait.names == "fine_root_respiration_Q10")]
}
# base respiration rate of fine roots (per year rate)
- if ("root_respiration_rate" %in% pft.names) {
+ if ("root_respiration_rate" %in% pft.trait.names) {
fineRootQ10 <- param[which(param[, 1] == "fineRootQ10"), 2]
id <- which(param[, 1] == "baseFineRootResp")
## Convert from umols CO2 kg s-1 to gC g day-1
- root_resp_rate_g <- (((pft.traits[which(pft.names == "root_respiration_rate")]) *
+ root_resp_rate_g <- (((pft.traits[which(pft.trait.names == "root_respiration_rate")]) *
(44.0096/1e+06) * (12.01 / 44.0096)) / 1000) * 86400
## use Q10 to convert stem resp from reference of 25C to 0C param[id,2] =
- ## pft.traits[which(pft.names=='root_respiration_rate')]*fineRootQ10^(-25/10)
+ ## pft.traits[which(pft.trait.names=='root_respiration_rate')]*fineRootQ10^(-25/10)
param[id, 2] <- root_resp_rate_g * fineRootQ10 ^ (-25 / 10)
}
# coarse root respiration Q10
- if ("coarse_root_respiration_Q10" %in% pft.names) {
- param[which(param[, 1] == "coarseRootQ10"), 2] <- pft.traits[which(pft.names == "coarse_root_respiration_Q10")]
+ if ("coarse_root_respiration_Q10" %in% pft.trait.names) {
+ param[which(param[, 1] == "coarseRootQ10"), 2] <- pft.traits[which(pft.trait.names == "coarse_root_respiration_Q10")]
}
# WARNING: fineRootAllocation + woodAllocation + leafAllocation isn't supposed to exceed 1
# see sipnet.c code L2005 :
# fluxes.coarseRootCreation=(1-params.leafAllocation-params.fineRootAllocation-params.woodAllocation)*npp;
# priors can be chosen accordingly, and SIPNET doesn't really crash when sum>1 but better keep an eye
alloc_params <- c("root_allocation_fraction", "wood_allocation_fraction", "leaf_allocation_fraction")
- if (all(alloc_params %in% pft.names)) {
- sum_alloc <- pft.traits[which(pft.names == "root_allocation_fraction")] +
- pft.traits[which(pft.names == "wood_allocation_fraction")] +
- pft.traits[which(pft.names == "leaf_allocation_fraction")]
+ if (all(alloc_params %in% pft.trait.names)) {
+ sum_alloc <- pft.traits[which(pft.trait.names == "root_allocation_fraction")] +
+ pft.traits[which(pft.trait.names == "wood_allocation_fraction")] +
+ pft.traits[which(pft.trait.names == "leaf_allocation_fraction")]
if(sum_alloc > 1){
# I want this to be a severe for now, lateer can be changed back to warning
PEcAn.logger::logger.warn("Sum of allocation parameters exceeds 1 for runid = ", run.id,
@@ -364,52 +382,52 @@ write.config.SIPNET <- function(defaults, trait.values, settings, run.id, inputs
# fineRootAllocation
- if ("root_allocation_fraction" %in% pft.names) {
- param[which(param[, 1] == "fineRootAllocation"), 2] <- pft.traits[which(pft.names == "root_allocation_fraction")]
+ if ("root_allocation_fraction" %in% pft.trait.names) {
+ param[which(param[, 1] == "fineRootAllocation"), 2] <- pft.traits[which(pft.trait.names == "root_allocation_fraction")]
}
# woodAllocation
- if ("wood_allocation_fraction" %in% pft.names) {
- param[which(param[, 1] == "woodAllocation"), 2] <- pft.traits[which(pft.names == "wood_allocation_fraction")]
+ if ("wood_allocation_fraction" %in% pft.trait.names) {
+ param[which(param[, 1] == "woodAllocation"), 2] <- pft.traits[which(pft.trait.names == "wood_allocation_fraction")]
}
# leafAllocation
- if ("leaf_allocation_fraction" %in% pft.names) {
- param[which(param[, 1] == "leafAllocation"), 2] <- pft.traits[which(pft.names == "leaf_allocation_fraction")]
+ if ("leaf_allocation_fraction" %in% pft.trait.names) {
+ param[which(param[, 1] == "leafAllocation"), 2] <- pft.traits[which(pft.trait.names == "leaf_allocation_fraction")]
}
# wood_turnover_rate
- if ("wood_turnover_rate" %in% pft.names) {
- param[which(param[, 1] == "woodTurnoverRate"), 2] <- pft.traits[which(pft.names == "wood_turnover_rate")]
+ if ("wood_turnover_rate" %in% pft.trait.names) {
+ param[which(param[, 1] == "woodTurnoverRate"), 2] <- pft.traits[which(pft.trait.names == "wood_turnover_rate")]
}
### ----- Soil parameters soil respiration Q10.
- if ("soil_respiration_Q10" %in% pft.names) {
- param[which(param[, 1] == "soilRespQ10"), 2] <- pft.traits[which(pft.names == "soil_respiration_Q10")]
+ if ("soil_respiration_Q10" %in% pft.trait.names) {
+ param[which(param[, 1] == "soilRespQ10"), 2] <- pft.traits[which(pft.trait.names == "soil_respiration_Q10")]
}
# soil respiration rate -- units = 1/year, reference = 0C
- if ("som_respiration_rate" %in% pft.names) {
- param[which(param[, 1] == "baseSoilResp"), 2] <- pft.traits[which(pft.names == "som_respiration_rate")]
+ if ("som_respiration_rate" %in% pft.trait.names) {
+ param[which(param[, 1] == "baseSoilResp"), 2] <- pft.traits[which(pft.trait.names == "som_respiration_rate")]
}
# litterBreakdownRate
- if ("turn_over_time" %in% pft.names) {
+ if ("turn_over_time" %in% pft.trait.names) {
id <- which(param[, 1] == "litterBreakdownRate")
- param[id, 2] <- pft.traits[which(pft.names == "turn_over_time")]
+ param[id, 2] <- pft.traits[which(pft.trait.names == "turn_over_time")]
}
# frozenSoilEff
- if ("frozenSoilEff" %in% pft.names) {
- param[which(param[, 1] == "frozenSoilEff"), 2] <- pft.traits[which(pft.names == "frozenSoilEff")]
+ if ("frozenSoilEff" %in% pft.trait.names) {
+ param[which(param[, 1] == "frozenSoilEff"), 2] <- pft.traits[which(pft.trait.names == "frozenSoilEff")]
}
# frozenSoilFolREff
- if ("frozenSoilFolREff" %in% pft.names) {
- param[which(param[, 1] == "frozenSoilFolREff"), 2] <- pft.traits[which(pft.names == "frozenSoilFolREff")]
+ if ("frozenSoilFolREff" %in% pft.trait.names) {
+ param[which(param[, 1] == "frozenSoilFolREff"), 2] <- pft.traits[which(pft.trait.names == "frozenSoilFolREff")]
}
# soilWHC
- if ("soilWHC" %in% pft.names) {
- param[which(param[, 1] == "soilWHC"), 2] <- pft.traits[which(pft.names == "soilWHC")]
+ if ("soilWHC" %in% pft.trait.names) {
+ param[which(param[, 1] == "soilWHC"), 2] <- pft.traits[which(pft.trait.names == "soilWHC")]
}
# 10/31/2017 IF: these were the two assumptions used in the emulator paper in order to reduce dimensionality
# These results in improved winter soil respiration values
@@ -422,38 +440,38 @@ write.config.SIPNET <- function(defaults, trait.values, settings, run.id, inputs
param[which(param[, 1] == "baseSoilRespCold"), 2] <- param[which(param[, 1] == "baseSoilResp"), 2] * 0.25
}
- if ("immedEvapFrac" %in% pft.names) {
- param[which(param[, 1] == "immedEvapFrac"), 2] <- pft.traits[which(pft.names == "immedEvapFrac")]
+ if ("immedEvapFrac" %in% pft.trait.names) {
+ param[which(param[, 1] == "immedEvapFrac"), 2] <- pft.traits[which(pft.trait.names == "immedEvapFrac")]
}
- if ("leafWHC" %in% pft.names) {
- param[which(param[, 1] == "leafPoolDepth"), 2] <- pft.traits[which(pft.names == "leafWHC")]
+ if ("leafWHC" %in% pft.trait.names) {
+ param[which(param[, 1] == "leafPoolDepth"), 2] <- pft.traits[which(pft.trait.names == "leafWHC")]
}
- if ("waterRemoveFrac" %in% pft.names) {
- param[which(param[, 1] == "waterRemoveFrac"), 2] <- pft.traits[which(pft.names == "waterRemoveFrac")]
+ if ("waterRemoveFrac" %in% pft.trait.names) {
+ param[which(param[, 1] == "waterRemoveFrac"), 2] <- pft.traits[which(pft.trait.names == "waterRemoveFrac")]
}
- if ("fastFlowFrac" %in% pft.names) {
- param[which(param[, 1] == "fastFlowFrac"), 2] <- pft.traits[which(pft.names == "fastFlowFrac")]
+ if ("fastFlowFrac" %in% pft.trait.names) {
+ param[which(param[, 1] == "fastFlowFrac"), 2] <- pft.traits[which(pft.trait.names == "fastFlowFrac")]
}
- if ("rdConst" %in% pft.names) {
- param[which(param[, 1] == "rdConst"), 2] <- pft.traits[which(pft.names == "rdConst")]
+ if ("rdConst" %in% pft.trait.names) {
+ param[which(param[, 1] == "rdConst"), 2] <- pft.traits[which(pft.trait.names == "rdConst")]
}
### ----- Phenology parameters GDD leaf on
- if ("GDD" %in% pft.names) {
- param[which(param[, 1] == "gddLeafOn"), 2] <- pft.traits[which(pft.names == "GDD")]
+ if ("GDD" %in% pft.trait.names) {
+ param[which(param[, 1] == "gddLeafOn"), 2] <- pft.traits[which(pft.trait.names == "GDD")]
}
# Fraction of leaf fall per year (should be 1 for decid)
- if ("fracLeafFall" %in% pft.names) {
- param[which(param[, 1] == "fracLeafFall"), 2] <- pft.traits[which(pft.names == "fracLeafFall")]
+ if ("fracLeafFall" %in% pft.trait.names) {
+ param[which(param[, 1] == "fracLeafFall"), 2] <- pft.traits[which(pft.trait.names == "fracLeafFall")]
}
# Leaf growth. Amount of C added to the leaf during the greenup period
- if ("leafGrowth" %in% pft.names) {
- param[which(param[, 1] == "leafGrowth"), 2] <- pft.traits[which(pft.names == "leafGrowth")]
+ if ("leafGrowth" %in% pft.trait.names) {
+ param[which(param[, 1] == "leafGrowth"), 2] <- pft.traits[which(pft.trait.names == "leafGrowth")]
}
#update LeafOnday and LeafOffDay
@@ -469,15 +487,46 @@ write.config.SIPNET <- function(defaults, trait.values, settings, run.id, inputs
leaf_pheno_path <- settings$run$inputs$leaf_phenology$path
if (!is.null(leaf_pheno_path)) {
##read data
- leafphdata <- utils::read.csv(leaf_pheno_path)
+ leafphdata <- utils::read.csv(leaf_pheno_path) #leaf phenology data starting from 2001-01-01 to current
leafOnDay <- leafphdata$leafonday[leafphdata$year == obs_year_start
& leafphdata$site_id == settings$run$site$id]
leafOffDay <- leafphdata$leafoffday[leafphdata$year == obs_year_start
& leafphdata$site_id == settings$run$site$id]
- if (!is.na(leafOnDay)) {
- param[which(param[, 1] == "leafOnDay"), 2] <- leafOnDay
+ # when we have NAs for phenology (or missing years)
+ if (length(leafOnDay) == 0 || is.na(leafOnDay)) {
+ # 1. Try to calculate the mean across all available years for this site
+ site_phenology_on <- leafphdata$leafonday[leafphdata$site_id == settings$run$site$id]
+ mean_on <- mean(site_phenology_on, na.rm = TRUE)
+
+ if (!is.nan(mean_on) && !is.na(mean_on)) {
+ leafOnDay <- round(mean_on)
+ PEcAn.logger::logger.info(paste("Missing leafOnDay for current year. Using site mean:", leafOnDay))
+ } else {
+ # 2. If no site history exists, fall back to parameter file
+ leafOnDay <- param[which(param[, 1] == "leafOnDay"), 2]
+ PEcAn.logger::logger.warn("Missing leafOnDay and no site history. Using parameter file default.")
+ }
+ }
+
+ if (length(leafOffDay) == 0 || is.na(leafOffDay)) {
+ # 1. Try to calculate the mean across all available years for this site
+ site_phenology_off <- leafphdata$leafoffday[leafphdata$site_id == settings$run$site$id]
+ mean_off <- mean(site_phenology_off, na.rm = TRUE)
+
+ if (!is.nan(mean_off) && !is.na(mean_off)) {
+ leafOffDay <- round(mean_off)
+ PEcAn.logger::logger.info(paste("Missing leafOffDay for current year. Using site mean:", leafOffDay))
+ } else {
+ # 2. If no site history exists, fall back to parameter file
+ leafOffDay <- param[which(param[, 1] == "leafOffDay"), 2]
+ PEcAn.logger::logger.warn("Missing leafOffDay and no site history. Using parameter file default.")
+ }
}
- if (!is.na(leafOffDay)) {
+
+ # when we have Leaf off date larger than leaf on date.
+ # Otherwise the phenology will not be used.
+ if (leafOffDay > leafOnDay) {
+ param[which(param[, 1] == "leafOnDay"), 2] <- leafOnDay
param[which(param[, 1] == "leafOffDay"), 2] <- leafOffDay
}
} else {
@@ -510,6 +559,14 @@ write.config.SIPNET <- function(defaults, trait.values, settings, run.id, inputs
if ("volume_fraction_of_water_in_soil_at_saturation" %in% names(soil_IC_list$vals)) {
#if depth is provided in the file
if ("depth" %in% names(soil_IC_list$dims)) {
+ # reduce estimates to the pre-defined soil depth.
+ if (!is.null(settings$run$inputs$soil_physics$soil_depth)) {
+ inds.depth <- which(soil_IC_list$dims$depth <= as.numeric(settings$run$inputs$soil_physics$soil_depth))
+ soil_IC_list$dims$depth <- soil_IC_list$dims$depth[inds.depth]
+ for (soil.val in names(soil_IC_list$vals)) {
+ soil_IC_list$vals[[soil.val]] <- soil_IC_list$vals[[soil.val]][inds.depth]
+ }
+ }
# Calculate the thickness of soil layers based on the assumption that the depth values are at bottoms and the first layer top is at 0
thickness<-c(soil_IC_list$dims$depth[1],diff(soil_IC_list$dims$depth))
thickness<-PEcAn.utils::ud_convert(thickness, "m", "cm")
@@ -598,24 +655,48 @@ write.config.SIPNET <- function(defaults, trait.values, settings, run.id, inputs
if ("microbe" %in% ic.names) {
param[which(param[, 1] == "microbeInit"), 2] <- IC$microbe
}
- }
-
- else if (length(settings$run$inputs$poolinitcond$path)>0) {
- ICs_num <- length(settings$run$inputs$poolinitcond$path)
- IC.path <- settings$run$inputs$poolinitcond$path[[sample(1:ICs_num, 1)]]
+ } else if (length(settings$run$inputs$poolinitcond$path) > 0) {
+ IC.path <- settings$run$inputs$poolinitcond$path
+ if (length(IC.path) > 1) {
+ PEcAn.logger::logger.error(
+ "write.config.SIPNET needs one poolinitcond path",
+ "got", length(IC.path)
+ )
+ }
+
IC.pools <- PEcAn.data.land::prepare_pools(IC.path, constants = list(sla = SLA))
- if(!is.null(IC.pools)){
+ if (!is.null(IC.pools)) {
IC.nc <- ncdf4::nc_open(IC.path) #for additional variables specific to SIPNET
+
+ # Optional variables: Use these if present, but don't complain if missing
+ # TODO: Each variable here is used in a corresponding `if` block below,
+ # which are mixed in among the variables from prepare_pools.
+ # Should reorder to separate these, and consider making this an input
+ # to let user control at runtime what's optional and what's mandatory
+ ic_ncvars_to_try <- c(
+ "nee",
+ "SoilMoistFrac",
+ "SWE",
+ "date_of_budburst",
+ "date_of_senescence",
+ "Microbial Biomass C"
+ )
+ ic_has_ncvars <- ic_ncvars_to_try %in% names(IC.nc$var)
+ names(ic_has_ncvars) <- ic_ncvars_to_try
+
## plantWoodInit gC/m2
if ("wood" %in% names(IC.pools)) {
- param[which(param[, 1] == "plantWoodInit"), 2] <- PEcAn.utils::ud_convert(IC.pools$wood, "kg m-2", "g m-2")
+ fineRootFrac <- param[which(param[,1] == "fineRootFrac"),2]
+ coarseRootFrac <- param[which(param[,1] == "coarseRootFrac"),2]
+ # accounts for the fact that SIPNET take plantWoodInit as all woods (including roots).
+ param[which(param[, 1] == "plantWoodInit"), 2] <- PEcAn.utils::ud_convert(IC.pools$wood, "kg m-2", "g m-2")/(1-fineRootFrac-coarseRootFrac)
}
## laiInit m2/m2
lai <- IC.pools$LAI
if (!is.na(lai) && is.numeric(lai)) {
- param[which(param[, 1] == "laiInit"), 2] <- lai
+ param[param[, 1] == "laiInit", 2] <- lai
}
# Sipnet always starts from initial LAI whether day 0 is in or out of the
@@ -642,72 +723,97 @@ write.config.SIPNET <- function(defaults, trait.values, settings, run.id, inputs
}
## neeInit gC/m2
- nee <- try(ncdf4::ncvar_get(IC.nc,"nee"),silent = TRUE)
- if (!is.na(nee) && is.numeric(nee)) {
- param[which(param[, 1] == "neeInit"), 2] <- nee
+ if (ic_has_ncvars[["nee"]]) {
+ nee <- ncdf4::ncvar_get(IC.nc, "nee")
+ if (!is.na(nee) && is.numeric(nee)) {
+ param[param[, 1] == "neeInit", 2] <- nee
+ }
}
## litterInit gC/m2
if ("litter" %in% names(IC.pools)) {
- param[which(param[, 1] == "litterInit"), 2] <- PEcAn.utils::ud_convert(IC.pools$litter, 'g m-2', 'g m-2') # BETY: kgC m-2
+ param[param[, 1] == "litterInit", 2] <- PEcAn.utils::ud_convert(IC.pools$litter, "g m-2", "g m-2") # BETY: kgC m-2
}
## soilInit gC/m2
if ("soil" %in% names(IC.pools)) {
- param[which(param[, 1] == "soilInit"), 2] <- PEcAn.utils::ud_convert(sum(IC.pools$soil), 'kg m-2', 'g m-2') # BETY: kgC m-2
+ param[param[, 1] == "soilInit", 2] <- PEcAn.utils::ud_convert(sum(IC.pools$soil), "kg m-2", "g m-2") # BETY: kgC m-2
}
## soilWFracInit fraction
- soilWFrac <- try(ncdf4::ncvar_get(IC.nc,"SoilMoistFrac"),silent = TRUE)
- if (!"try-error" %in% class(soilWFrac)) {
+ if (ic_has_ncvars[["SoilMoistFrac"]]) {
+ soilWFrac <- ncdf4::ncvar_get(IC.nc, "SoilMoistFrac")
if (!is.na(soilWFrac) && is.numeric(soilWFrac)) {
- param[which(param[, 1] == "soilWFracInit"), 2] <- sum(soilWFrac)/100
+ param[param[, 1] == "soilWFracInit", 2] <- sum(soilWFrac) / 100
+ ## litterWFracInit fraction
+ litterWFrac <- soilWFrac
}
}
- ## litterWFracInit fraction
- litterWFrac <- soilWFrac
+
## snowInit cm water equivalent (cm = g / cm2 because 1 g water = 1 cm3 water)
- snow = try(ncdf4::ncvar_get(IC.nc,"SWE"),silent = TRUE)
- if (!is.na(snow) && is.numeric(snow)) {
- param[which(param[, 1] == "snowInit"), 2] <- PEcAn.utils::ud_convert(snow, "kg m-2", "g cm-2") # BETY: kg m-2
+ if (ic_has_ncvars[["SWE"]]) {
+ snow <- ncdf4::ncvar_get(IC.nc, "SWE")
+ if (!is.na(snow) && is.numeric(snow)) {
+ param[param[, 1] == "snowInit", 2] <- PEcAn.utils::ud_convert(snow, "kg m-2", "g cm-2") # BETY: kg m-2
+ }
}
## leafOnDay
- leafOnDay <- try(ncdf4::ncvar_get(IC.nc,"date_of_budburst"),silent = TRUE)
- if (!is.na(leafOnDay) && is.numeric(leafOnDay)) {
- param[which(param[, 1] == "leafOnDay"), 2] <- leafOnDay
+ if (ic_has_ncvars[["date_of_budburst"]]) {
+ leafOnDay <- ncdf4::ncvar_get(IC.nc, "date_of_budburst")
+ if (!is.na(leafOnDay) && is.numeric(leafOnDay)) {
+ param[param[, 1] == "leafOnDay", 2] <- leafOnDay
+ }
}
## leafOffDay
- leafOffDay <- try(ncdf4::ncvar_get(IC.nc,"date_of_senescence"),silent = TRUE)
- if (!is.na(leafOffDay) && is.numeric(leafOffDay)) {
- param[which(param[, 1] == "leafOffDay"), 2] <- leafOffDay
+ if (ic_has_ncvars[["date_of_senescence"]]) {
+ leafOffDay <- ncdf4::ncvar_get(IC.nc, "date_of_senescence")
+ if (!is.na(leafOffDay) && is.numeric(leafOffDay)) {
+ param[param[, 1] == "leafOffDay", 2] <- leafOffDay
+ }
}
- microbe <- try(ncdf4::ncvar_get(IC.nc,"Microbial Biomass C"),silent = TRUE)
- if (!is.na(microbe) && is.numeric(microbe)) {
- param[which(param[, 1] == "microbeInit"), 2] <- PEcAn.utils::ud_convert(microbe, "mg kg-1", "mg g-1") #BETY: mg microbial C kg-1 soil
+ if (ic_has_ncvars[["Microbial Biomass C"]]) {
+ microbe <- ncdf4::ncvar_get(IC.nc, "Microbial Biomass C")
+ if (!is.na(microbe) && is.numeric(microbe)) {
+ param[param[, 1] == "microbeInit", 2] <- PEcAn.utils::ud_convert(microbe, "mg kg-1", "mg g-1") #BETY: mg microbial C kg-1 soil
+ }
}
-
+
ncdf4::nc_close(IC.nc)
- }else{
+ } else {
PEcAn.logger::logger.error("Bad initial conditions filepath; keeping defaults")
}
- }else{
+ } else {
#some stuff about IC file that we can give in lieu of actual ICs
}
-
-
+
+
if (!is.null(settings$run$inputs$soilmoisture)) {
#read soil moisture netcdf file, grab closet date to start_date, set equal to soilWFrac
- if(!is.null(settings$run$inputs$soilmoisture$path)){
+ if (!is.null(settings$run$inputs$soilmoisture$path)) {
soil.path <- settings$run$inputs$soilmoisture$path
soilWFrac <- ncdf4::ncvar_get(ncdf4::nc_open(soil.path), varid = "mass_fraction_of_unfrozen_water_in_soil_moisture")
-
+
param[which(param[, 1] == "soilWFracInit"), 2] <- soilWFrac
}
-
+
}
- if(file.exists(file.path(settings$rundir, run.id, "sipnet.param"))) file.rename(file.path(settings$rundir, run.id, "sipnet.param"),file.path(settings$rundir, run.id, paste0("sipnet_",lubridate::year(settings$run$start.date),"_",lubridate::year(settings$run$end.date),".param")))
-
+ if (file.exists(file.path(settings$rundir, run.id, "sipnet.param"))) {
+ file.rename(
+ file.path(settings$rundir, run.id, "sipnet.param"),
+ file.path(
+ settings$rundir,
+ run.id,
+ paste0("sipnet_", lubridate::year(settings$run$start.date), "_", lubridate::year(settings$run$end.date), ".param")
+ )
+ )
+ }
+
- utils::write.table(param, file.path(settings$rundir, run.id, "sipnet.param"), row.names = FALSE, col.names = FALSE,
- quote = FALSE)
+ utils::write.table(
+ param,
+ file.path(settings$rundir, run.id, "sipnet.param"),
+ row.names = FALSE,
+ col.names = FALSE,
+ quote = FALSE
+ )
} # write.config.SIPNET
diff --git a/models/sipnet/R/write.events.SIPNET.R b/models/sipnet/R/write.events.SIPNET.R
new file mode 100644
index 00000000000..d95111561e1
--- /dev/null
+++ b/models/sipnet/R/write.events.SIPNET.R
@@ -0,0 +1,122 @@
+## TODO:
+## - integrate call into write.configs.SIPNET
+## - parameterize planting allocation fractions
+## - make sure files are written in correct output directory
+## - map crops associated w/ planting and harvest --> PFTs; this will need to be handled separate from events.in
+#' Write SIPNET events.in files from a PEcAn events.json
+#'
+#' Reads a single PEcAn events.json containing one or more site objects and
+#' writes one SIPNET `events.in` file per site. Events are translated according to [SIPNET's `events.in`
+#' specification](https://pecanproject.github.io/sipnet/parameters/#agronomic-events).
+#' The writer expects inputs to already match the PEcAn MVP schema v0.1.0 naming and units where applicable.
+#'
+#' @details
+#' - Supported `event_type` values: `tillage`, `planting`, `fertilization`,
+#' `irrigation`, `harvest`.
+#' - Units translated from PEcAn standard_vars to SIPNET events.in specification:
+#' `kg/m^2` to `g/m^2`; irrigation `amount_mm` to `cm`.
+#' - Planting allocation uses fixed internal parameters. Future work should use the same values
+#' that are written to `sipnet.parms` (e.g. after integrating this into `write.configs.SIPNET`)
+#'
+#' @param events_json character. Path to an `events.json` file containing an
+#' array of site objects with `site_id`, optional `pft`, and `events`.
+#' @param outdir character. Output directory where per-site `events-.in`
+#' files are written.
+#'
+#' @return Invisibly, a vector of files written.
+#'
+#' @examples
+#' # Example with two events for a single site
+#' tmp <- withr::local_tempfile(fileext = ".json")
+#' site <- list(
+#' site_id = "EX1",
+#' events = list(
+#' list(event_type = "tillage", date = "2022-02-04", tillage_eff_0to1 = 0.2),
+#' list(event_type = "planting", date = "2022-02-19", leaf_c_kg_m2 = 0.01)
+#' )
+#' )
+#' jsonlite::write_json(list(site), tmp, auto_unbox = TRUE)
+#' outdir <- withr::local_tempdir()
+#' files <- write.events.SIPNET(tmp, outdir)
+#' files
+#'
+#' @importFrom rlang %||%
+#' @export
+write.events.SIPNET <- function(events_json, outdir) {
+ # Validate input JSON against PEcAn events schema
+ PEcAn.data.land::validate_events_json(events_json)
+
+ # TODO add overwrite argument
+ x <- jsonlite::fromJSON(events_json, simplifyVector = FALSE)
+ # allow a single site events.json that does not have a site_id
+ site_objs <- if (!is.null(x$site_id)) list(x) else x
+ files_written <- vector()
+
+ leafAllocation <- 0.50
+ woodAllocation <- 0.15
+ fineRootAllocation <- 0.10
+ coarseRootAllocation <- 0.25
+
+ # Unit conversion helpers
+ kg2g <- as.numeric(PEcAn.utils::ud_convert(1, "kg", "g")) # 1000
+ mm2cm <- as.numeric(PEcAn.utils::ud_convert(1, "mm", "cm")) # 0.1
+
+ # For each site, build event time series and write file
+ for (site in site_objs) {
+ sid <- site$site_id
+ evs <- site$events
+ # Order by date and build lines
+ dates <- as.Date(vapply(evs, function(e) as.character(e$date), character(1)))
+ ord <- order(dates)
+ lines <- character()
+ for (e in evs[ord]) {
+ d <- as.Date(e$date)
+ year <- as.integer(format(d, "%Y"))
+ day <- as.integer(format(d, "%j"))
+ type <- e$event_type
+ if (type == "tillage") {
+ f <- if (is.null(e$tillage_eff_0to1)) 0 else e$tillage_eff_0to1
+ # TODO: consider validating up front against schema rather than here
+ lines <- c(lines, sprintf("%d %d till %s", year, day, f))
+ } else if (type == "planting") {
+ # infer total planted biomass from leaf pool and allocation fraction
+ leaf_g <- as.numeric(if (is.null(e$leaf_c_kg_m2)) 0 else e$leaf_c_kg_m2) * kg2g
+ total_g <- if (leafAllocation > 0) leaf_g / leafAllocation else leaf_g
+ wood_g <- woodAllocation * total_g
+ fr_g <- fineRootAllocation * total_g
+ cr_g <- coarseRootAllocation * total_g
+ lines <- c(lines, sprintf("%d %d plant %s %s %s %s", year, day, leaf_g, wood_g, fr_g, cr_g))
+ } else if (type == "fertilization") {
+ orgN_g <- as.numeric(if (is.null(e$org_n_kg_m2)) 0 else e$org_n_kg_m2) * kg2g
+ orgC_g <- as.numeric(if (is.null(e$org_c_kg_m2)) 0 else e$org_c_kg_m2) * kg2g
+ nh4_g <- as.numeric(if (is.null(e$nh4_n_kg_m2)) 0 else e$nh4_n_kg_m2) * kg2g
+ no3_g <- as.numeric(if (is.null(e$no3_n_kg_m2)) 0 else e$no3_n_kg_m2) * kg2g
+ minN_g <- nh4_g + no3_g
+ lines <- c(lines, sprintf("%d %d fert %s %s %s", year, day, orgN_g, orgC_g, minN_g))
+ } else if (type == "irrigation") {
+ amt_cm <- as.numeric(if (is.null(e$amount_mm)) 0 else e$amount_mm) * mm2cm
+ method_code <- if (is.null(e$method) || e$method == "soil") 1 else 0
+ lines <- c(lines, sprintf("%d %d irrig %s %s", year, day, amt_cm, method_code))
+ } else if (type == "harvest") {
+ abv_rem <- e$frac_above_removed_0to1 %||% 0
+ blw_rem <- e$frac_below_removed_0to1 %||% 0
+ abv_lit <- e$frac_above_to_litter_0to1 %||% (1.0 - abv_rem)
+ blw_lit <- e$frac_below_to_litter_0to1 %||% (1.0 - blw_rem)
+ lines <- c(
+ lines,
+ sprintf(
+ "%d %d harv %s %s %s %s",
+ year, day,
+ abv_rem, blw_rem,
+ abv_lit, blw_lit
+ )
+ )
+ }
+ }
+ dir.create(outdir, showWarnings = FALSE, recursive = TRUE)
+ fp <- file.path(outdir, sprintf("events-%s.in", sid))
+ writeLines(lines, fp)
+ files_written <- c(files_written, fp)
+ }
+ invisible(files_written)
+}
diff --git a/models/sipnet/R/write_restart.SIPNET.R b/models/sipnet/R/write_restart.SIPNET.R
index e2e58718330..a3ac00e8a3b 100755
--- a/models/sipnet/R/write_restart.SIPNET.R
+++ b/models/sipnet/R/write_restart.SIPNET.R
@@ -37,7 +37,9 @@ write_restart.SIPNET <- function(outdir, runid, start.time, stop.time, settings,
file.path(outdir, runid, paste0("sipnet.", as.Date(start.time), ".out")))
system(paste("rm", file.path(rundir, runid, "sipnet.clim")))
} else {
- print(paste("Files not renamed -- Need to rerun timestep", start.time, "before next time step"))
+ if (verbose) {
+ print(paste("Files not renamed -- Need to rerun timestep", start.time, "before next time step"))
+ }
}
settings$run$start.date <- start.time
diff --git a/models/sipnet/inst/niwot.clim b/models/sipnet/inst/niwot.clim
new file mode 100644
index 00000000000..788e325079a
--- /dev/null
+++ b/models/sipnet/inst/niwot.clim
@@ -0,0 +1,5237 @@
+0 1998 305 0.00 0.292 1.5314 0.8162 0.0000 0.0000 105.7962 70.1711 711.6313 0.9200 0.0000
+0 1998 305 7.00 0.417 3.6460 1.8311 5.6016 0.0000 125.7018 23.5578 809.4069 1.1270 0.0000
+0 1998 305 17.00 0.583 1.9850 1.3510 0.0000 0.0000 108.1324 75.9155 732.7946 1.1350 0.0000
+0 1998 306 7.00 0.417 2.2580 1.4513 2.7104 1.0000 114.1442 71.6831 741.8206 0.9690 0.0000
+0 1998 306 17.00 0.583 -0.1471 0.7272 0.0000 0.0000 101.2110 142.4979 635.3659 0.2700 0.0000
+0 1998 307 7.00 0.417 2.9320 0.8204 17.5849 0.0000 380.9791 259.3449 522.7077 2.0130 0.0000
+0 1998 307 17.00 0.583 -6.4643 -1.2355 0.0000 0.0000 140.8336 329.9315 359.5320 4.4471 0.0000
+0 1998 308 7.00 0.417 -4.6315 -0.6292 15.4651 0.0000 192.9658 346.5749 368.3694 1.8340 0.0000
+0 1998 308 17.00 0.583 -6.7750 -1.2542 0.0000 0.0000 146.7945 346.1592 341.9642 1.1693 0.0000
+0 1998 309 7.00 0.417 -0.6205 -0.4438 18.4613 0.0000 436.0609 434.3983 289.1516 2.2115 0.0000
+0 1998 309 17.00 0.583 -6.7750 -1.0894 0.0000 0.0000 82.8851 286.0290 409.3606 1.3579 0.0000
+0 1998 310 7.00 0.417 -3.6305 -0.7723 19.5107 0.0000 297.6430 411.3825 296.9597 4.5435 0.0000
+0 1998 310 17.00 0.583 -7.0786 -1.1326 0.0000 0.0000 91.8134 303.9500 388.8968 1.7454 0.0000
+0 1998 311 7.00 0.417 -4.9505 -1.0790 13.0270 2.0000 265.8706 406.1544 289.0650 2.7420 0.0000
+0 1998 311 17.00 0.583 -7.1557 -1.7459 0.0000 0.0000 180.9898 368.6815 298.2356 2.2521 0.0000
+0 1998 312 7.00 0.417 -1.4030 -0.4739 10.8382 3.0000 340.4554 374.5204 347.3092 3.0130 0.0000
+0 1998 312 17.00 0.583 -5.1764 -0.7922 0.0000 0.0000 88.1475 255.4617 452.6006 1.4679 0.0000
+0 1998 313 7.00 0.417 -10.2150 -1.6018 3.8288 11.0000 46.7733 321.4738 351.2685 1.9730 0.0000
+0 1998 313 17.00 0.583 -13.7814 -2.8809 0.0000 0.0000 90.1292 391.3220 230.6856 7.1196 0.0000
+0 1998 314 7.00 0.417 -11.7450 -2.0585 12.2096 2.0000 133.7186 423.8192 230.7083 12.4710 0.0000
+0 1998 314 17.00 0.583 -8.7550 -2.5731 0.0000 0.0000 224.0515 421.8732 211.7972 11.5193 0.0000
+0 1998 315 7.00 0.417 -3.2250 -0.9362 19.1293 0.0000 313.2859 398.4886 302.9748 2.0165 0.0000
+0 1998 315 17.00 0.583 -6.1714 -1.2762 0.0000 0.0000 142.6518 322.0822 364.5059 2.2893 0.0000
+0 1998 316 7.00 0.417 -2.3430 -0.7364 17.7518 0.0000 313.9503 378.1247 331.9471 5.6260 0.0000
+0 1998 316 17.00 0.583 -0.1436 -1.3097 0.0000 0.0000 577.3210 523.6761 161.6771 7.5325 0.0000
+0 1998 317 7.00 0.417 3.0090 -0.0360 18.4180 0.0000 551.8631 396.7713 344.9526 6.4910 0.0000
+0 1998 317 17.00 0.583 1.5018 -0.2624 0.0000 0.0000 404.6647 319.6765 411.5994 12.7004 0.0000
+0 1998 318 7.00 0.417 1.4860 0.1072 6.0104 0.0000 346.0474 275.8911 472.6009 5.0445 0.0000
+0 1998 318 17.00 0.583 2.9214 0.0120 0.0000 0.0000 519.6170 370.5056 373.4467 2.9900 0.0000
+0 1998 319 7.00 0.417 5.2530 0.0035 19.4294 0.0000 722.0967 435.5236 307.8883 12.4975 0.0000
+0 1998 319 17.00 0.583 1.4650 -0.7185 0.0000 0.0000 516.0820 411.1181 300.1592 10.7764 0.0000
+0 1998 320 7.00 0.417 2.8890 -0.6278 15.7725 0.0000 675.6022 496.0717 219.1904 9.1600 0.0000
+0 1998 320 17.00 0.583 1.7982 -0.8216 0.0000 0.0000 671.2941 546.4083 159.9251 2.1375 0.0000
+0 1998 321 7.00 0.417 4.7675 -0.2679 12.9568 2.0000 831.6952 560.9893 170.1190 3.5835 0.0000
+0 1998 321 17.00 0.583 -3.1861 -1.2872 0.0000 0.0000 298.6230 359.5174 327.6594 4.4025 0.0000
+0 1998 322 7.00 0.417 -5.7710 -1.4856 14.0886 0.0000 221.8663 379.3292 298.3842 7.5725 0.0000
+0 1998 322 17.00 0.583 -7.3571 -1.8536 0.0000 0.0000 147.1090 336.7638 325.7602 2.8261 0.0000
+0 1998 323 7.00 0.417 -7.6685 -2.1457 9.7349 0.0000 216.8945 404.5375 245.8570 1.1405 0.0000
+0 1998 323 17.00 0.583 -9.3100 -2.7564 0.0000 0.0000 215.0847 422.6077 203.8202 9.0804 0.0000
+0 1998 324 7.00 0.417 -3.9155 -2.0725 17.0417 0.0000 484.6456 551.1642 102.4968 5.4970 0.0000
+0 1998 324 17.00 0.583 -3.5114 -2.6621 0.0000 0.0000 473.5208 505.2659 124.7446 13.9807 0.0000
+0 1998 325 7.00 0.417 1.9440 -1.0831 13.9212 0.0000 601.9067 452.8942 242.8120 15.5050 0.0000
+0 1998 325 17.00 0.583 3.2871 -0.7151 0.0000 0.0000 636.7706 435.7127 275.1840 12.1164 0.0000
+0 1998 326 7.00 0.417 5.2920 -0.2207 17.1983 0.0000 759.3233 456.8389 276.3548 9.8950 0.0000
+0 1998 326 17.00 0.583 0.4679 -1.4863 0.0000 0.0000 541.8700 453.6209 224.2803 7.7371 0.0000
+0 1998 327 7.00 0.417 3.6165 -0.6882 15.0021 0.0000 689.6565 458.4679 254.2639 5.5505 0.0000
+0 1998 327 17.00 0.583 4.4532 -0.1750 0.0000 0.0000 802.4271 557.0845 177.9435 9.2118 0.0000
+0 1998 328 7.00 0.417 3.0420 -0.2160 14.0218 0.0000 715.1185 548.6207 184.5389 9.3225 0.0000
+0 1998 328 17.00 0.604 -2.4931 -0.4269 0.0000 0.0000 345.2560 430.6796 292.9583 10.3383 0.0000
+0 1998 329 7.50 0.396 -0.1900 -0.6200 7.4045 0.0000 441.8966 419.0805 295.9073 12.5947 0.0000
+0 1998 329 17.00 0.604 2.4069 -0.4376 0.0000 0.0000 468.9608 328.1475 394.9908 9.3941 0.0000
+0 1998 330 7.50 0.396 8.3095 -0.1711 16.0931 0.0000 908.0688 396.4958 338.7253 2.4221 0.0000
+0 1998 330 17.00 0.604 5.6483 -0.1383 0.0000 0.0000 711.2221 392.3008 344.4066 2.8786 0.0000
+0 1998 331 7.50 0.396 8.3916 -0.0037 14.7573 0.0000 1034.2429 523.1500 219.7522 2.0374 0.0000
+0 1998 331 17.00 0.604 5.6907 -0.0838 0.0000 0.0000 912.2178 593.4310 145.7756 2.3262 0.0000
+0 1998 332 7.50 0.396 6.0068 -0.0311 9.2074 0.0000 871.8341 534.0227 207.6161 3.7368 0.0000
+0 1998 332 17.00 0.604 1.9797 -0.0124 0.0000 0.0000 460.8787 359.9929 382.4976 6.1693 0.0000
+0 1998 333 7.50 0.396 0.3716 -0.0221 12.7253 0.0000 359.4890 340.1770 401.8669 4.1858 0.0000
+0 1998 333 17.00 0.604 -2.5024 -0.2238 0.0000 0.0000 212.7322 308.9927 423.8466 9.0297 0.0000
+0 1998 334 7.50 0.375 1.4383 -0.3978 16.1476 0.0000 459.0626 365.5258 359.4145 4.2083 0.0000
+0 1998 334 16.50 0.625 3.6103 -0.3360 0.0000 0.0000 718.7391 514.8210 212.8924 4.2323 0.0000
+0 1998 335 7.50 0.375 6.9517 -0.1667 15.0689 0.0000 882.0950 470.4620 264.9614 2.9972 0.0000
+0 1998 335 16.50 0.625 2.0337 -0.0727 0.0000 0.0000 513.7165 409.4967 330.2191 4.9100 0.0000
+0 1998 336 7.50 0.375 4.1239 -0.0706 15.2827 0.0000 699.4792 476.5900 263.2282 7.1372 0.0000
+0 1998 336 16.50 0.625 2.2497 -0.2113 0.0000 0.0000 638.0613 516.2470 217.1437 6.2497 0.0000
+0 1998 337 7.50 0.375 5.8456 -0.2161 14.9364 0.0000 799.8236 455.8345 277.3515 1.7667 0.0000
+0 1998 337 16.50 0.625 -1.6443 -0.3527 0.0000 0.0000 482.8260 533.7303 193.2828 2.8753 0.0000
+0 1998 338 7.50 0.375 4.4967 -0.4294 13.1935 0.0000 765.0951 500.4164 223.1219 2.6228 0.0000
+0 1998 338 16.50 0.625 -2.5907 -0.6173 0.0000 0.0000 477.7509 556.4442 158.7356 3.4737 0.0000
+0 1998 339 7.50 0.375 -5.8167 -0.9517 6.0525 0.0000 233.0562 406.0072 294.4019 2.0283 0.0000
+0 1998 339 16.50 0.625 -13.6100 -1.4700 0.0000 0.0000 79.4308 431.6570 246.7441 1.3333 0.0000
+0 1998 340 7.50 0.375 -10.9611 -2.3194 13.6719 0.0000 246.6973 509.7145 133.6838 3.0900 0.0000
+0 1998 340 16.50 0.625 -16.2887 -3.1377 0.0000 0.0000 122.3700 455.7349 156.2084 2.0363 0.0000
+0 1998 341 7.50 0.375 -13.1700 -4.2106 13.0630 0.0000 207.9148 447.1107 125.1857 6.2017 0.0000
+0 1998 341 16.50 0.625 -10.4497 -5.2467 0.0000 0.0000 300.1888 440.1495 96.7373 8.3740 0.0000
+0 1998 342 7.50 0.375 -6.0506 -4.7067 4.0925 0.0000 342.2598 384.0724 171.1131 3.2650 0.0000
+0 1998 342 16.50 0.625 -10.0913 -4.3000 0.0000 0.0000 180.6122 348.1464 221.0910 1.1707 0.0000
+0 1998 343 7.50 0.375 -12.6494 -4.3344 3.6239 5.0000 44.6218 269.7198 298.2758 2.9672 0.0000
+0 1998 343 16.50 0.625 -18.0353 -3.6200 0.0000 0.0000 54.7185 393.3445 200.2429 2.2823 0.0000
+0 1998 344 7.50 0.375 -8.9189 -3.6189 13.3931 1.0000 181.7390 341.6106 252.0103 2.0761 0.0000
+0 1998 344 16.50 0.625 -10.8203 -3.4157 0.0000 0.0000 270.9805 485.7988 115.3382 4.1553 0.0000
+0 1998 345 7.50 0.375 -1.6244 -3.4394 13.9115 0.0000 470.9410 395.0282 205.2403 3.6783 0.0000
+0 1998 345 16.50 0.625 -3.8227 -3.0513 0.0000 0.0000 373.7604 401.9755 212.8802 5.7663 0.0000
+0 1998 346 7.50 0.375 -2.9189 -2.8606 4.1753 0.0000 328.8332 330.8979 291.2735 9.9867 0.0000
+0 1998 346 16.50 0.625 -2.6877 -2.7527 0.0000 0.0000 490.7028 487.9569 138.3846 6.7683 0.0000
+0 1998 347 7.50 0.375 3.1956 -2.6728 14.1041 0.0000 716.9061 437.0274 192.4581 6.4722 0.0000
+0 1998 347 16.50 0.625 1.4757 -2.3050 0.0000 0.0000 682.7287 512.0491 131.9234 6.0610 0.0000
+0 1998 348 7.50 0.375 5.0967 -2.1567 13.4784 0.0000 947.8649 572.3322 77.6154 3.3200 0.0000
+0 1998 348 16.50 0.625 -4.2990 -2.2520 0.0000 0.0000 489.9019 562.0013 84.1425 2.6997 0.0000
+0 1998 349 7.50 0.375 1.7717 -2.6139 14.0191 0.0000 721.0262 511.4666 120.3036 2.9350 0.0000
+0 1998 349 16.50 0.625 -1.5617 -2.3287 0.0000 0.0000 388.3524 354.7304 288.2981 7.8157 0.0000
+0 1998 350 7.50 0.375 0.4156 -2.2744 11.5628 0.0000 474.8664 354.5778 290.6254 9.1028 0.0000
+0 1998 350 16.50 0.625 0.1220 -2.0910 0.0000 0.0000 475.3501 378.3638 274.2149 10.1443 0.0000
+0 1998 351 7.50 0.375 2.0894 -1.9633 13.0750 0.0000 624.0604 435.0760 222.7038 13.7400 0.0000
+0 1998 351 16.50 0.625 -0.2487 -1.9503 0.0000 0.0000 515.3714 439.0297 219.2816 8.6407 0.0000
+0 1998 352 7.50 0.375 -2.4372 -2.2228 13.8798 0.0000 405.1289 401.0131 246.2552 4.8739 0.0000
+0 1998 352 16.50 0.625 -19.1497 -3.7627 0.0000 0.0000 42.9413 383.5259 206.8982 1.4670 0.0000
+0 1998 353 7.50 0.375 -18.0283 -6.6817 10.1781 0.0000 46.9929 283.4624 208.0158 1.5833 0.0000
+0 1998 353 16.50 0.625 -20.7270 -7.4877 0.0000 0.0000 45.6734 291.4648 176.7293 1.0940 0.0000
+0 1998 354 7.50 0.375 -14.6122 -7.5550 5.5849 4.0000 45.7431 206.3978 259.6633 1.0783 0.0000
+0 1998 354 16.50 0.625 -17.9357 -6.6183 0.0000 0.0000 47.4003 287.8406 205.5286 0.6213 0.0000
+0 1998 355 7.50 0.375 -19.4156 -6.6072 10.5034 4.0000 93.4634 353.5424 140.1643 7.2956 0.0000
+0 1998 355 16.50 0.625 -16.0313 -7.0473 0.0000 0.0000 229.1115 421.5558 59.0070 9.4423 0.0000
+0 1998 356 7.50 0.375 -10.6494 -6.6900 14.3860 0.0000 325.6754 430.2689 60.9564 6.9450 0.0000
+0 1998 356 16.50 0.625 -18.2540 -6.9097 0.0000 0.0000 115.9757 346.1886 138.5705 2.6420 0.0000
+0 1998 357 7.50 0.375 -13.1256 -7.1028 14.4295 0.0000 228.0969 372.5131 106.4226 5.2239 0.0000
+0 1998 357 16.50 0.625 -14.4590 -7.0640 0.0000 0.0000 207.4142 379.3846 100.6853 8.7440 0.0000
+0 1998 358 7.50 0.375 -12.4672 -7.0989 7.9465 0.0000 192.9274 325.2635 153.7861 14.7544 0.0000
+0 1998 358 16.50 0.625 -11.8647 -6.7223 0.0000 0.0000 137.3709 268.5128 221.7202 13.7377 0.0000
+0 1998 359 7.50 0.375 -8.8461 -6.3483 11.2874 0.0000 149.8762 220.0040 281.6615 12.4644 0.0000
+0 1998 359 16.50 0.625 -6.9530 -5.5657 0.0000 0.0000 152.6784 195.4151 330.9894 13.3637 0.0000
+0 1998 360 7.50 0.375 -4.4144 -4.9261 10.3806 3.0000 236.5687 218.3180 329.2552 11.0717 0.0000
+0 1998 360 16.50 0.625 -8.1967 -4.3867 0.0000 0.0000 111.3832 228.2160 337.8794 2.7337 0.0000
+0 1998 361 7.50 0.375 -9.1389 -4.1578 5.7913 4.0000 122.9089 273.8980 300.2547 4.4439 0.0000
+0 1998 361 16.50 0.625 -8.1410 -3.9673 0.0000 0.0000 145.6442 276.8268 304.1295 13.3043 0.0000
+0 1998 362 7.50 0.375 -6.2733 -3.8039 3.4740 4.0000 119.1781 201.7403 385.1218 10.1694 0.0000
+0 1998 362 16.50 0.625 -5.3727 -3.5190 0.0000 0.0000 126.1917 190.8060 406.5014 13.4710 0.0000
+0 1998 363 7.50 0.375 -3.3967 -3.2922 6.5351 0.0000 166.3918 169.7471 436.0009 9.8722 0.0000
+0 1998 363 16.50 0.625 -2.0000 -3.0420 0.0000 0.0000 227.9276 186.3028 428.9154 13.3673 0.0000
+0 1998 364 7.50 0.375 -1.4006 -2.8100 12.6686 1.0000 224.6282 167.3362 456.7865 11.6506 0.0000
+0 1998 364 16.50 0.625 -3.5637 -2.6450 0.0000 0.0000 206.7393 241.2334 389.3022 3.9623 0.0000
+0 1998 365 7.50 0.375 0.4694 -2.5756 11.6890 0.0000 446.4379 308.3279 324.9308 1.9417 0.0000
+0 1998 365 16.50 0.625 -2.8783 -2.4067 0.0000 0.0000 238.3889 254.3262 385.5993 4.2957 0.0000
+0 1999 1 7.50 0.375 -5.2844 -2.3483 9.1591 0.0000 183.9665 289.0074 353.2357 4.4122 0.0000
+0 1999 1 16.50 0.625 -12.5833 -2.3423 0.0000 0.0000 73.5903 368.2766 274.2083 2.2510 0.0000
+0 1999 2 7.50 0.375 -12.0311 -2.5728 8.4604 2.0000 119.7190 397.1690 236.2018 3.8739 0.0000
+0 1999 2 16.50 0.625 -15.4980 -2.7290 0.0000 0.0000 93.1531 429.7923 197.4749 3.9543 0.0000
+0 1999 3 7.50 0.375 -15.9511 -3.0244 6.4838 1.0000 102.2304 435.2731 180.6113 6.9006 0.0000
+0 1999 3 16.50 0.625 -14.9680 -3.2740 0.0000 0.0000 97.0896 404.4082 202.0271 6.3177 0.0000
+0 1999 4 7.50 0.375 -11.5500 -3.4628 8.5304 3.0000 103.5678 335.5190 263.8632 12.7572 0.0000
+0 1999 4 16.50 0.625 -8.2283 -3.4407 0.0000 0.0000 101.2898 254.2031 346.0024 14.4020 0.0000
+0 1999 5 7.50 0.375 -6.5944 -3.2744 6.6202 3.0000 91.5501 203.6259 402.7881 12.9644 0.0000
+0 1999 5 16.50 0.625 -5.2763 -3.0640 0.0000 0.0000 104.1833 182.5470 431.8291 14.3810 0.0000
+0 1999 6 7.50 0.375 -2.8928 -2.8600 13.2941 0.0000 235.6445 236.3237 385.8656 12.7178 0.0000
+0 1999 6 16.50 0.625 -4.2403 -2.6707 0.0000 0.0000 260.6294 318.4841 311.0530 10.2503 0.0000
+0 1999 7 7.50 0.375 -1.9356 -2.3369 17.5176 0.0000 374.9638 356.7072 286.0458 8.7078 0.0000
+0 1999 7 16.50 0.625 -7.0380 -2.7002 0.0000 0.0000 185.3905 327.3089 301.1658 6.9533 0.0000
+0 1999 8 7.50 0.375 -9.2622 -2.7448 11.1384 0.0000 147.7498 353.8274 272.8542 3.9361 0.0000
+0 1999 8 16.50 0.625 -11.0840 -3.5720 0.0000 0.0000 146.4707 365.4688 229.9675 12.8003 0.0000
+0 1999 9 7.50 0.375 -7.7494 -2.9345 14.4725 1.0000 227.9322 386.1476 233.2409 13.3428 0.0000
+0 1999 9 16.50 0.625 -8.3063 -2.8653 0.0000 0.0000 97.8670 274.7723 347.2282 11.6440 0.0000
+0 1999 10 7.50 0.375 -6.1689 -2.3767 13.7866 2.0000 151.6283 284.5364 356.6331 12.6956 0.0000
+0 1999 10 16.50 0.625 -5.0930 -2.7602 0.0000 0.0000 199.0602 282.5934 343.4766 15.9170 0.0000
+0 1999 11 7.50 0.375 -2.0511 -2.2057 12.4623 0.0000 361.0928 352.1866 295.7978 6.8856 0.0000
+0 1999 11 16.50 0.625 -3.3490 -2.6730 0.0000 0.0000 394.1559 419.2557 210.2117 5.6690 0.0000
+0 1999 12 7.50 0.375 -3.9711 -2.2486 4.6872 0.0000 276.6229 341.3333 304.9787 1.5383 0.0000
+0 1999 12 16.50 0.625 -6.3473 -2.5088 0.0000 0.0000 214.6509 348.5648 287.4215 5.7147 0.0000
+0 1999 13 7.50 0.375 -6.5639 -2.3836 6.6413 0.0000 185.1232 329.7577 311.1039 4.5400 0.0000
+0 1999 13 16.50 0.625 -9.1600 -2.3503 0.0000 0.0000 183.3468 402.9397 239.2465 8.6057 0.0000
+0 1999 14 7.50 0.396 -4.4795 -2.4558 8.3252 0.0000 446.8279 518.5554 119.4237 13.6800 0.0000
+0 1999 14 17.00 0.604 -1.7848 -2.4855 0.0000 0.0000 583.4772 555.1635 81.6382 16.7955 0.0000
+0 1999 15 7.50 0.396 -2.1474 -2.4579 12.8590 0.0000 411.4468 397.0266 240.8670 11.9695 0.0000
+0 1999 15 17.00 0.604 -5.4279 -2.4314 0.0000 0.0000 280.6595 388.3572 250.5862 9.2014 0.0000
+0 1999 16 7.50 0.396 -3.7342 -2.4911 10.5768 0.0000 302.8471 347.9786 288.6049 10.8716 0.0000
+0 1999 16 17.00 0.604 -8.6076 -2.4424 0.0000 0.0000 109.0208 305.0574 333.4509 5.8600 0.0000
+0 1999 17 7.50 0.396 -11.9037 -2.4005 10.2425 5.0000 124.8447 406.7358 233.4319 9.9884 0.0000
+0 1999 17 17.00 0.604 -7.3245 -2.5400 0.0000 0.0000 138.6945 294.9875 339.6718 12.5410 0.0000
+0 1999 18 7.50 0.396 -2.1789 -2.5474 4.1681 8.0000 167.6113 152.7708 481.5946 6.1289 0.0000
+0 1999 18 17.00 0.604 -3.1648 -2.3645 0.0000 0.0000 123.1659 154.1642 487.4416 4.3593 0.0000
+0 1999 19 7.50 0.396 -2.8316 -2.1847 16.3802 9.0000 242.6760 267.5792 381.2166 11.0663 0.0000
+0 1999 19 17.00 0.583 -4.0489 -2.0693 0.0000 0.0000 207.5158 280.1211 373.3387 9.5125 0.0000
+0 1999 20 7.00 0.417 -2.6170 -1.9880 12.2999 1.0000 405.2879 427.4846 229.2786 6.7795 0.0000
+0 1999 20 17.00 0.583 -6.4939 -1.9200 0.0000 0.0000 174.5189 332.4844 327.0563 4.1800 0.0000
+0 1999 21 7.00 0.417 -6.8770 -1.8990 4.1171 15.0000 81.9035 256.3151 404.0849 1.4935 0.0000
+0 1999 21 17.00 0.583 -11.6129 -1.8450 0.0000 0.0000 42.8717 335.7736 326.8455 1.5046 0.0000
+0 1999 22 7.00 0.417 -10.2210 -1.8250 11.9956 0.0000 128.1857 394.3434 269.0968 5.1620 0.0000
+0 1999 22 17.00 0.583 -10.2575 -1.8157 0.0000 0.0000 142.4002 409.4439 254.3792 11.1436 0.0000
+0 1999 23 7.00 0.417 -3.9430 -1.8980 8.7890 1.0000 232.6222 309.1609 351.2801 5.2685 0.0000
+0 1999 23 17.00 0.583 -3.3096 -1.8679 0.0000 0.0000 226.4749 282.2805 379.3970 4.8957 0.0000
+0 1999 24 7.00 0.417 -3.3690 -1.8210 2.2903 2.0000 123.8503 184.2854 479.3195 4.7320 0.0000
+0 1999 24 17.00 0.583 -3.2368 -1.7500 0.0000 0.0000 241.5167 299.2871 367.2488 3.0207 0.0000
+0 1999 25 7.00 0.417 2.6090 -1.6630 11.6788 1.0000 634.3030 414.7505 255.3969 2.9100 0.0000
+0 1999 25 17.00 0.583 -0.7332 -1.5700 0.0000 0.0000 515.1478 471.8788 202.1469 2.5532 0.0000
+0 1999 26 7.00 0.417 -5.1430 -1.5040 12.6959 1.0000 229.7823 355.2020 321.5904 7.5785 0.0000
+0 1999 26 17.00 0.583 -10.2300 -1.4250 0.0000 0.0000 160.8065 444.4738 235.6464 7.2821 0.0000
+0 1999 27 7.00 0.417 -7.9360 -1.4530 16.3771 0.0000 233.4410 455.3393 223.5998 2.0575 0.0000
+0 1999 27 17.00 0.583 -12.7921 -1.4407 0.0000 0.0000 130.5111 468.8998 210.5567 2.8400 0.0000
+0 1999 28 7.00 0.417 -6.1790 -1.5590 19.8322 0.0000 376.9932 521.2562 153.2343 2.1700 0.0000
+0 1999 28 17.00 0.583 -18.3932 -1.6414 0.0000 0.0000 209.8115 630.7188 40.3354 1.5629 0.0000
+0 1999 29 7.00 0.417 -5.8365 -1.8070 18.3411 0.0000 478.9633 580.6445 83.5379 1.7650 0.0000
+0 1999 29 17.00 0.583 -10.3439 -1.8521 0.0000 0.0000 318.0130 583.6388 78.6850 2.2514 0.0000
+0 1999 30 7.00 0.417 -2.8665 -1.9720 11.1031 0.0000 399.8546 413.2375 244.1798 1.7520 0.0000
+0 1999 30 17.00 0.583 -8.7014 -1.9750 0.0000 0.0000 178.8123 391.7415 265.5529 1.9900 0.0000
+0 1999 31 7.00 0.417 -0.0960 -1.9640 18.0319 0.0000 614.8866 505.1929 152.5499 2.8610 0.0000
+0 1999 31 17.00 0.583 -4.1261 -1.9886 0.0000 0.0000 357.4047 435.1118 221.6278 3.1039 0.0000
+0 1999 32 7.00 0.417 -8.8465 -1.9140 9.2500 1.0000 150.7915 379.4479 280.3377 9.6590 0.0000
+0 1999 32 17.00 0.583 -11.5561 -1.8486 0.0000 0.0000 159.5240 456.2597 206.2103 14.3164 0.0000
+0 1999 33 7.00 0.417 -8.1930 -1.9110 18.9549 0.0000 276.8670 485.4282 174.4827 15.3290 0.0000
+0 1999 33 17.00 0.583 -7.5732 -1.9786 0.0000 0.0000 237.8578 429.4048 227.7421 10.4711 0.0000
+0 1999 34 7.00 0.417 -6.5840 -2.0050 19.6686 0.0000 274.6472 434.2193 221.8509 12.3765 0.0000
+0 1999 34 17.00 0.583 -4.5564 -1.9771 0.0000 0.0000 414.8317 506.9201 150.2863 2.9825 0.0000
+0 1999 35 7.00 0.417 2.2405 -1.9370 21.0353 0.0000 633.2704 434.5534 224.2922 7.6630 0.0000
+0 1999 35 17.00 0.583 -2.0282 -1.9207 0.0000 0.0000 379.4373 381.9641 277.5476 5.6214 0.0000
+0 1999 36 7.00 0.417 -2.2505 -1.8060 21.8380 0.0000 317.3514 334.9205 329.3037 6.7450 0.0000
+0 1999 36 17.00 0.583 -5.6407 -1.7414 0.0000 0.0000 178.1827 320.5210 346.3699 3.3764 0.0000
+0 1999 37 7.00 0.417 -4.6955 -1.6650 18.8662 0.0000 262.3987 376.5960 293.4658 5.3460 0.0000
+0 1999 37 17.00 0.583 -5.3704 -1.6586 0.0000 0.0000 194.7616 331.3709 338.9578 12.6454 0.0000
+0 1999 38 7.00 0.417 -0.7610 -1.6180 18.4754 1.0000 347.8441 308.1158 363.9041 11.6160 0.0000
+0 1999 38 17.00 0.583 -2.0339 -1.5764 0.0000 0.0000 191.7960 210.3403 463.4162 6.2539 0.0000
+0 1999 39 7.00 0.417 -0.7510 -1.4630 23.1997 2.0000 381.7218 349.0431 329.4766 9.1760 0.0000
+0 1999 39 17.00 0.583 -2.5089 -1.4050 0.0000 0.0000 260.8652 304.9197 376.0456 5.5636 0.0000
+0 1999 40 7.00 0.417 1.7120 -1.3030 22.7219 0.0000 524.7305 380.0446 305.2494 8.0205 0.0000
+0 1999 40 17.00 0.583 0.6275 -1.2757 0.0000 0.0000 516.4930 430.0116 256.4422 8.0750 0.0000
+0 1999 41 7.00 0.417 -0.6245 -1.1830 18.5746 0.0000 455.1471 411.8407 278.5797 6.9270 0.0000
+0 1999 41 17.00 0.583 -16.1782 -1.1443 0.0000 0.0000 126.3378 531.3801 160.7006 5.1286 0.0000
+0 1999 42 7.00 0.417 -19.2320 -1.2740 13.0653 0.0000 109.9735 561.4295 125.1011 6.1255 0.0000
+0 1999 42 17.00 0.583 -13.2914 -1.5900 0.0000 0.0000 267.6826 602.9518 70.2541 12.8007 0.0000
+0 1999 43 7.00 0.417 -4.1255 -1.9500 18.2029 0.0000 416.5321 496.2148 162.1014 8.7790 0.0000
+0 1999 43 17.00 0.583 -3.0746 -2.0579 0.0000 0.0000 472.5274 511.2007 142.7212 6.7921 0.0000
+0 1999 44 7.00 0.438 2.9486 -1.9595 25.3244 0.0000 826.9583 584.1245 73.8066 3.2652 0.0000
+0 1999 44 17.50 0.562 1.5011 -1.9315 0.0000 0.0000 745.8043 588.3004 70.7725 4.2030 0.0000
+0 1999 45 7.00 0.438 1.7333 -1.7724 18.8734 0.0000 700.5197 534.5663 131.0479 4.5895 0.0000
+0 1999 45 17.50 0.562 -4.6670 -1.7252 0.0000 0.0000 266.3391 373.8737 293.6898 4.5415 0.0000
+0 1999 46 7.00 0.438 -6.5881 -1.6776 17.8106 0.0000 247.8439 422.1439 247.3928 2.6638 0.0000
+0 1999 46 17.50 0.562 -10.9704 -1.6248 0.0000 0.0000 195.3846 488.0017 183.7336 1.9981 0.0000
+0 1999 47 7.00 0.438 -7.9543 -1.8071 13.6570 0.0000 292.0244 500.3940 163.7884 6.9229 0.0000
+0 1999 47 17.50 0.542 -8.1662 -1.9458 0.0000 0.0000 210.0413 419.6715 238.8157 5.3181 0.0000
+0 1999 48 6.50 0.458 -6.9641 -2.0727 7.5166 5.0000 148.2963 317.9967 335.3225 8.2386 0.0000
+0 1999 48 17.50 0.542 -12.4762 -2.0119 0.0000 0.0000 114.4955 422.4837 233.3058 8.8581 0.0000
+0 1999 49 6.50 0.458 -10.3277 -2.0273 24.0277 0.0000 189.5826 448.5665 206.5986 6.4309 0.0000
+0 1999 49 17.50 0.542 -6.6527 -2.0973 0.0000 0.0000 148.4711 307.9017 344.4218 4.3388 0.0000
+0 1999 50 6.50 0.458 -8.0818 -2.0923 9.9063 7.0000 128.3267 328.0757 324.4522 4.2718 0.0000
+0 1999 50 17.50 0.542 -11.9515 -1.9662 0.0000 0.0000 137.0521 437.2421 220.4128 6.7854 0.0000
+0 1999 51 6.50 0.458 -10.1486 -1.9523 26.6150 0.0000 214.5627 473.7551 184.4656 3.0550 0.0000
+0 1999 51 17.50 0.542 -11.4358 -1.9023 0.0000 0.0000 255.9728 547.3717 112.8928 2.9146 0.0000
+0 1999 52 6.50 0.458 -2.7786 -1.9927 19.6169 1.0000 413.1634 430.4217 226.1494 2.9373 0.0000
+0 1999 52 17.50 0.542 -7.2412 -2.0177 0.0000 0.0000 162.0740 324.4523 331.1021 4.3727 0.0000
+0 1999 53 6.50 0.458 -14.5414 -1.9159 11.3555 5.0000 86.7596 439.5884 220.1188 10.4691 0.0000
+0 1999 53 17.50 0.542 -12.7788 -1.8988 0.0000 0.0000 122.7627 442.6048 217.8013 15.3965 0.0000
+0 1999 54 6.50 0.458 -6.8118 -1.9768 23.6990 0.0000 243.9756 407.0536 250.1660 10.1727 0.0000
+0 1999 54 17.50 0.542 -5.9838 -2.0238 0.0000 0.0000 226.5838 368.5469 286.7563 6.5873 0.0000
+0 1999 55 6.50 0.458 -2.6459 -1.9964 14.2445 0.0000 355.7360 375.4354 280.9869 4.4973 0.0000
+0 1999 55 17.50 0.542 -1.3865 -1.9388 0.0000 0.0000 419.2645 396.1154 262.6538 6.1869 0.0000
+0 1999 56 6.50 0.458 1.4036 -1.8241 24.2249 0.0000 641.4861 491.5020 171.9799 3.4664 0.0000
+0 1999 56 17.50 0.542 -2.5181 -1.7615 0.0000 0.0000 478.0646 506.5320 159.5265 10.2346 0.0000
+0 1999 57 6.50 0.458 -5.0014 -1.6759 27.1919 1.0000 315.4898 439.1023 230.5068 8.4236 0.0000
+0 1999 57 17.50 0.542 -12.9435 -1.5896 0.0000 0.0000 102.5353 436.3814 236.8245 5.9308 0.0000
+0 1999 58 6.50 0.458 -9.7382 -1.6377 29.4473 1.0000 228.9932 480.0470 191.1551 11.3073 0.0000
+0 1999 58 17.50 0.542 -4.2246 -1.7569 0.0000 0.0000 339.7182 433.0318 233.2174 12.7950 0.0000
+0 1999 59 6.50 0.458 -1.8045 -1.7555 29.2844 0.0000 359.7346 360.2789 306.0316 9.8809 0.0000
+0 1999 59 17.50 0.542 -1.7585 -1.7385 0.0000 0.0000 376.4368 376.8942 290.1191 11.2808 0.0000
+0 1999 60 6.50 0.458 0.7491 -1.5973 24.0275 3.0000 503.4614 393.2945 279.5956 8.2741 0.0000
+0 1999 60 17.50 0.542 -6.9585 -1.5762 0.0000 0.0000 134.5719 321.9140 351.8544 6.7619 0.0000
+0 1999 61 6.50 0.458 -8.2514 -1.4909 26.6485 1.0000 268.2749 497.8490 179.4933 4.0732 0.0000
+0 1999 61 17.50 0.542 -6.2454 -1.4750 0.0000 0.0000 327.7393 499.3429 178.6693 3.7146 0.0000
+0 1999 62 6.50 0.458 0.5036 -1.4759 29.7499 0.0000 570.2244 476.1826 201.7918 9.4991 0.0000
+0 1999 62 17.50 0.542 -0.9015 -1.5108 0.0000 0.0000 463.0021 434.4624 242.0456 7.2154 0.0000
+0 1999 63 6.50 0.458 -4.3495 -1.4577 10.4657 7.0000 191.3731 301.6136 377.1257 1.5700 0.0000
+0 1999 63 17.50 0.542 -8.2831 -1.4077 0.0000 0.0000 71.6677 306.1520 374.6997 2.1915 0.0000
+0 1999 64 6.50 0.458 -13.3232 -1.3186 12.8457 3.0000 119.6824 474.8686 209.7578 5.1345 0.0000
+0 1999 64 17.50 0.542 -14.4488 -1.2981 0.0000 0.0000 118.0098 493.3958 192.1049 3.3585 0.0000
+0 1999 65 6.50 0.458 -8.7045 -1.4245 20.3794 1.0000 100.4140 335.1375 345.0050 1.3800 0.0000
+0 1999 65 17.50 0.542 -3.9081 -1.5104 0.0000 0.0000 314.7427 404.6911 271.8336 3.4077 0.0000
+0 1999 66 6.50 0.458 1.5414 -1.5005 28.0526 0.0000 619.1505 473.6967 203.2473 3.5123 0.0000
+0 1999 66 17.50 0.542 -5.0135 -1.5338 0.0000 0.0000 176.8861 303.9845 371.5547 3.0204 0.0000
+0 1999 67 6.50 0.458 -8.3855 -1.4714 25.4520 2.0000 182.7270 417.7037 260.4611 6.2150 0.0000
+0 1999 67 17.50 0.542 -7.3900 -1.4338 0.0000 0.0000 208.2738 415.9350 263.8113 5.0450 0.0000
+0 1999 68 6.50 0.479 -1.6726 -1.4235 29.5767 2.0000 399.1300 404.4308 275.7547 6.0209 0.0000
+0 1999 68 18.00 0.521 -6.4592 -1.4512 0.0000 0.0000 140.2509 315.9246 363.0897 2.8996 0.0000
+0 1999 69 6.50 0.479 -6.1791 -1.4122 33.4985 5.0000 229.9201 400.6812 279.9803 8.2422 0.0000
+0 1999 69 18.00 0.521 -9.6412 -1.3388 0.0000 0.0000 125.7945 394.8792 288.8934 2.0320 0.0000
+0 1999 70 6.50 0.479 -4.6948 -1.3378 21.3376 0.0000 192.0959 317.9894 365.8228 1.5222 0.0000
+0 1999 70 18.00 0.500 -8.7404 -1.3625 0.0000 0.0000 49.4998 296.3961 386.3681 0.8979 0.0000
+0 1999 71 6.00 0.500 -11.1617 -1.3400 11.6006 8.0000 36.5392 345.3124 338.4062 2.0421 0.0000
+0 1999 71 18.00 0.500 -9.9650 -1.2983 0.0000 0.0000 108.6134 389.1647 296.3251 2.0371 0.0000
+0 1999 72 6.00 0.500 -2.9142 -1.3525 32.6385 0.0000 346.0577 401.5046 281.6849 2.7392 0.0000
+0 1999 72 18.00 0.500 -2.2425 -1.4008 0.0000 0.0000 398.0433 432.2294 248.9126 5.3379 0.0000
+0 1999 73 6.00 0.500 3.5504 -1.3375 34.1359 0.0000 690.6107 444.2827 239.5466 4.9279 0.0000
+0 1999 73 18.00 0.500 1.3379 -1.2650 0.0000 0.0000 648.2118 526.2025 160.7079 2.9004 0.0000
+0 1999 74 6.00 0.500 4.3021 -1.1142 23.0299 0.0000 857.4946 570.5999 122.7874 2.4433 0.0000
+0 1999 74 18.00 0.500 1.3887 -0.9950 0.0000 0.0000 631.8695 519.8159 178.7094 2.7183 0.0000
+0 1999 75 6.00 0.500 3.0350 -0.8250 30.9524 0.0000 747.7278 554.0699 151.8774 4.5467 0.0000
+0 1999 75 18.00 0.500 -0.2258 -0.8650 0.0000 0.0000 560.7546 530.1138 174.0742 2.9096 0.0000
+0 1999 76 6.00 0.500 -2.3942 -0.8325 34.4034 0.0000 345.5208 405.4442 300.1709 2.2617 0.0000
+0 1999 76 18.00 0.500 -5.0692 -0.8750 0.0000 0.0000 253.1735 406.2689 297.4814 2.2713 0.0000
+0 1999 77 6.00 0.500 0.1887 -0.8342 34.3658 0.0000 465.3530 416.2849 289.2587 2.4117 0.0000
+0 1999 77 18.00 0.500 -4.3579 -0.9458 0.0000 0.0000 230.6570 363.7524 336.9088 1.5583 0.0000
+0 1999 78 6.00 0.500 0.8742 -0.8625 35.9219 0.0000 356.5936 265.9949 438.3109 2.0925 0.0000
+0 1999 78 18.00 0.500 -2.1350 -0.8383 0.0000 0.0000 325.0627 379.1202 326.2352 3.0467 0.0000
+0 1999 79 6.00 0.500 2.5158 -0.6850 35.2679 0.0000 573.8245 411.3969 300.7206 2.4821 0.0000
+0 1999 79 18.00 0.500 -0.3512 -0.6308 0.0000 0.0000 428.8270 415.1326 299.3724 2.3225 0.0000
+0 1999 80 6.00 0.500 4.9529 -0.5042 32.2078 0.0000 819.3931 523.2805 196.8776 4.1213 0.0000
+0 1999 80 18.00 0.500 -1.2888 -0.6033 0.0000 0.0000 484.9525 512.1249 203.6030 4.4092 0.0000
+0 1999 81 6.00 0.500 -1.2363 -0.5467 35.1540 0.0000 419.5289 448.8917 269.3634 6.4321 0.0000
+0 1999 81 18.00 0.500 -4.5963 -0.6050 0.0000 0.0000 157.1390 313.0971 402.5555 1.2154 0.0000
+0 1999 82 6.00 0.500 0.8692 -0.5500 35.1203 0.0000 556.5883 481.8903 236.2196 2.0825 0.0000
+0 1999 82 18.00 0.500 -2.7958 -0.6675 0.0000 0.0000 312.3072 398.7556 314.1254 2.1583 0.0000
+0 1999 83 6.00 0.500 0.9563 -0.6108 23.7821 0.0000 438.7513 359.6011 355.7976 2.2650 0.0000
+0 1999 83 18.00 0.500 -1.6171 -0.6750 0.0000 0.0000 279.8851 319.6943 392.8534 2.3475 0.0000
+0 1999 84 6.00 0.500 4.8517 -0.5483 36.1159 0.0000 664.5214 370.1429 348.0452 2.1358 0.0000
+0 1999 84 18.00 0.500 2.5262 -0.5692 0.0000 0.0000 521.6577 369.1269 348.1219 3.9987 0.0000
+0 1999 85 6.00 0.500 5.9938 -0.3692 27.5922 0.0000 873.7783 518.2058 208.0191 2.9462 0.0000
+0 1999 85 18.00 0.500 0.3342 -0.3658 0.0000 0.0000 271.6607 234.4519 491.9137 3.7817 0.0000
+0 1999 86 6.00 0.500 -0.7821 -0.3083 22.9478 4.0000 208.9855 224.9130 504.0539 5.3075 0.0000
+0 1999 86 18.00 0.500 -6.8113 -0.3092 0.0000 0.0000 235.9221 474.6057 254.3209 4.4796 0.0000
+0 1999 87 6.00 0.500 -2.0329 -0.5378 30.6029 1.0000 471.6496 529.9010 188.7935 3.3663 0.0000
+0 1999 87 18.00 0.500 -4.3271 -0.4274 0.0000 0.0000 431.7177 583.9884 139.6274 4.1712 0.0000
+0 1999 88 6.00 0.500 2.8775 -0.2642 38.1814 0.0000 784.7477 612.7719 118.2031 3.8725 0.0000
+0 1999 88 18.00 0.500 3.0100 -0.3833 0.0000 0.0000 724.6420 552.8252 172.7515 4.0075 0.0000
+0 1999 89 6.00 0.500 6.6629 -0.2925 37.1047 0.0000 1005.7097 604.4142 125.2745 4.3629 0.0000
+0 1999 89 18.00 0.500 2.5804 -0.2908 0.0000 0.0000 716.3962 571.1130 158.6445 5.5508 0.0000
+0 1999 90 6.00 0.521 4.6048 -0.1780 33.3266 0.0000 813.8232 553.0427 181.8575 3.9996 0.0000
+0 1999 90 18.50 0.479 -1.5239 -0.2543 0.0000 0.0000 305.2074 352.5854 378.8297 2.6343 0.0000
+0 1999 91 6.00 0.521 -6.3884 -0.2772 6.2122 5.0000 26.4130 255.6700 474.7055 1.7588 0.0000
+0 1999 91 18.50 0.479 -10.5883 -0.2335 0.0000 0.0000 35.3153 378.6435 353.7188 1.0387 0.0000
+0 1999 92 6.00 0.521 -11.6892 -0.2760 0.5026 5.0000 41.0432 408.1170 322.3141 1.4968 0.0000
+0 1999 92 18.50 0.479 -14.1422 -0.3383 0.0000 0.0000 39.0859 452.4913 275.1201 1.4330 0.0000
+0 1999 93 6.00 0.521 -11.1376 -0.5240 11.9403 2.0000 90.4896 432.5085 286.7606 6.4716 0.0000
+0 1999 93 18.50 0.458 -10.6450 -0.6591 0.0000 0.0000 146.2557 473.1790 240.0734 2.9986 0.0000
+0 1999 94 5.50 0.542 -7.3754 -0.8365 16.0106 3.0000 150.7530 383.6282 321.8106 1.8585 0.0000
+0 1999 94 18.50 0.458 -8.0264 -0.9255 0.0000 0.0000 89.4121 338.2264 363.3223 2.8855 0.0000
+0 1999 95 5.50 0.542 -5.1281 -0.9742 39.8844 1.0000 267.9068 422.6594 276.7675 8.8415 0.0000
+0 1999 95 18.50 0.458 -4.7227 -1.0177 0.0000 0.0000 294.0914 434.9283 262.6119 4.8923 0.0000
+0 1999 96 5.50 0.542 1.7950 -0.9946 41.9164 0.0000 594.8963 451.9715 246.5744 2.4300 0.0000
+0 1999 96 18.50 0.458 0.5182 -1.0068 0.0000 0.0000 537.6188 467.7884 230.2242 4.1518 0.0000
+0 1999 97 5.50 0.542 4.5512 -0.8058 37.2827 0.0000 723.1623 436.6644 270.1382 2.8769 0.0000
+0 1999 97 18.50 0.458 -1.6641 -0.5864 0.0000 0.0000 300.1607 318.6330 397.8495 8.2859 0.0000
+0 1999 98 5.50 0.542 -4.7404 -0.5027 38.0493 3.0000 186.7306 345.7460 374.4697 13.2385 0.0000
+0 1999 98 18.50 0.458 -4.1014 -0.4664 0.0000 0.0000 304.2192 447.4018 274.4417 2.2318 0.0000
+0 1999 99 5.50 0.542 -2.9992 -0.5185 24.2348 1.0000 325.1390 411.5321 307.9805 5.9038 0.0000
+0 1999 99 18.50 0.458 -10.9659 -0.5555 0.0000 0.0000 119.1328 457.3528 260.5056 13.9286 0.0000
+0 1999 100 5.50 0.542 -8.2254 -0.6723 42.2141 0.0000 218.3473 478.1483 234.5251 12.7423 0.0000
+0 1999 100 18.50 0.458 -7.1895 -0.7750 0.0000 0.0000 301.1612 532.4286 175.7065 2.8227 0.0000
+0 1999 101 5.50 0.542 -1.3735 -0.8531 35.7935 1.0000 489.7682 506.2306 198.4825 1.9262 0.0000
+0 1999 101 18.50 0.458 -1.5914 -0.9768 0.0000 0.0000 371.6136 397.5076 301.8070 1.9627 0.0000
+0 1999 102 5.50 0.542 4.5635 -0.8242 32.9609 0.0000 718.3000 433.3111 272.6760 2.1935 0.0000
+0 1999 102 18.50 0.458 1.4500 -0.7641 0.0000 0.0000 475.8594 368.8344 339.7816 2.3173 0.0000
+0 1999 103 5.50 0.542 3.3815 -0.4946 32.4238 0.0000 471.8274 269.0162 451.5830 2.8727 0.0000
+0 1999 103 18.50 0.458 -1.3245 -0.4118 0.0000 0.0000 89.7764 126.9355 597.3575 1.6200 0.0000
+0 1999 104 5.50 0.542 -6.3935 -0.3619 8.2031 19.0000 33.1162 257.3725 469.1691 2.6596 0.0000
+0 1999 104 18.50 0.458 -11.0514 -0.2800 0.0000 0.0000 34.2782 385.5813 344.6681 1.8000 0.0000
+0 1999 105 5.50 0.542 -12.3038 -0.2538 34.5949 19.0000 152.2601 532.9309 198.5056 4.2869 0.0000
+0 1999 105 18.50 0.458 -13.5555 -0.2255 0.0000 0.0000 130.6181 538.5051 194.2220 5.5305 0.0000
+0 1999 106 5.50 0.542 -10.5773 -0.3073 22.6699 0.0000 147.7550 487.3832 241.6294 4.7212 0.0000
+0 1999 106 18.50 0.458 -13.5273 -0.3032 0.0000 0.0000 121.4418 524.3437 204.8542 4.4627 0.0000
+0 1999 107 5.50 0.542 -3.1877 -0.4288 44.7266 1.0000 383.1475 481.5898 241.9395 8.9138 0.0000
+0 1999 107 18.50 0.458 0.0532 -0.5005 0.0000 0.0000 436.9552 411.1225 309.1935 9.1009 0.0000
+0 1999 108 5.50 0.542 3.0954 -0.5200 24.8897 0.0000 602.5029 412.7207 306.7215 4.6785 0.0000
+0 1999 108 18.50 0.458 2.4050 -0.5050 0.0000 0.0000 508.1447 364.9231 355.1889 3.0077 0.0000
+0 1999 109 5.50 0.542 6.4088 -0.3481 31.6820 0.0000 797.1692 410.5671 316.6109 2.8277 0.0000
+0 1999 109 18.50 0.458 4.5523 -0.2527 0.0000 0.0000 658.7064 400.3010 331.1858 0.9064 0.0000
+0 1999 110 5.50 0.542 5.0785 -0.1150 35.5453 0.0000 561.0686 271.9601 465.8253 2.3327 0.0000
+0 1999 110 18.50 0.458 -0.0636 -0.1414 0.0000 0.0000 243.4541 231.2338 505.3326 1.8009 0.0000
+0 1999 111 5.50 0.542 -0.6092 -0.1165 22.4361 12.0000 368.2689 385.3982 352.3095 4.0388 0.0000
+0 1999 111 18.50 0.458 -4.1223 -0.1268 0.0000 0.0000 14.2358 175.1932 562.0381 2.1518 0.0000
+0 1999 112 5.50 0.542 -4.6946 -0.1335 11.2842 41.0000 26.5492 206.3881 530.5391 2.2835 0.0000
+0 1999 112 18.50 0.458 -6.6268 -0.1145 0.0000 0.0000 21.9461 265.8764 471.9171 3.1305 0.0000
+0 1999 113 5.50 0.542 -7.3088 -0.1100 12.0947 13.0000 26.8174 291.0956 446.9064 1.8219 0.0000
+0 1999 113 18.50 0.458 -6.7359 -0.0841 0.0000 0.0000 21.6873 269.8979 469.2932 1.6214 0.0000
+0 1999 114 5.50 0.562 -0.4456 -0.0581 37.8664 0.0000 50.1357 60.7344 679.6512 1.4707 0.0000
+0 1999 114 19.00 0.438 -2.5467 -0.0671 0.0000 0.0000 6.8810 111.9340 628.0356 1.0295 0.0000
+0 1999 115 5.50 0.562 -2.7033 -0.0515 22.2498 4.0000 125.6831 236.2720 504.4187 2.1030 0.0000
+0 1999 115 19.00 0.438 -3.5167 -0.0729 0.0000 0.0000 145.5755 287.6651 452.0419 2.0333 0.0000
+0 1999 116 5.50 0.562 -0.5293 -0.0344 26.8855 3.0000 183.5224 203.8348 537.6407 5.7674 0.0000
+0 1999 116 19.00 0.438 0.5305 -0.0181 0.0000 0.0000 210.0960 184.2521 557.9759 2.7195 0.0000
+0 1999 117 5.50 0.562 4.4070 0.0481 22.5439 0.0000 439.9691 200.3766 544.9166 2.0289 0.0000
+0 1999 117 19.00 0.417 1.6475 -0.0180 0.0000 0.0000 227.6788 146.2321 596.0013 2.0975 0.0000
+0 1999 118 5.00 0.583 4.0236 0.0557 17.5383 11.0000 355.6178 140.0717 605.5786 2.4861 0.0000
+0 1999 118 19.00 0.417 -0.8090 -0.0000 0.0000 0.0000 -0.0004 35.6726 707.3904 1.0970 0.0000
+0 1999 119 5.00 0.583 -0.9082 -0.0057 1.4627 24.0000 0.2344 40.2246 702.5745 1.5686 0.0000
+0 1999 119 19.00 0.417 -1.4170 -0.0060 0.0000 0.0000 0.9605 62.9351 679.8509 1.4610 0.0000
+0 1999 120 5.00 0.583 -1.7246 -0.0093 0.5812 35.0000 8.9154 83.1200 659.5143 0.3371 0.0000
+0 1999 120 19.00 0.417 0.2570 -0.0210 0.0000 0.0000 83.4821 70.2169 671.8774 0.4630 0.0000
+0 1999 121 5.00 0.583 5.9014 -0.0179 5.9029 18.0000 216.8192 -127.5784 869.8180 1.2321 0.0000
+0 1999 121 19.00 0.417 3.1310 -0.0210 0.0000 0.0000 103.6781 -59.1014 801.1957 2.4130 0.0000
+0 1999 122 5.00 0.583 2.8243 -0.0200 34.1366 4.0000 222.3557 77.2965 664.8440 7.9614 0.0000
+0 1999 122 19.00 0.417 -0.1820 0.0020 0.0000 0.0000 149.8277 156.8994 586.2557 2.8140 0.0000
+0 1999 123 5.00 0.583 1.7921 0.0007 32.9330 1.0000 341.1474 236.9100 506.1861 3.7736 0.0000
+0 1999 123 19.00 0.417 -3.8015 -0.0050 0.0000 0.0000 197.6419 352.9094 389.9227 8.0015 0.0000
+0 1999 124 5.00 0.583 -4.1332 0.0021 45.2088 0.0000 257.0060 423.8757 319.2865 9.4007 0.0000
+0 1999 124 19.00 0.417 -7.8275 -0.0010 0.0000 0.0000 132.3049 416.6162 326.4004 10.7855 0.0000
+0 1999 125 5.00 0.583 -6.5957 -0.0057 43.9603 1.0000 196.4123 442.9380 299.8613 6.6543 0.0000
+0 1999 125 19.00 0.417 -10.1160 0.0280 0.0000 0.0000 146.6868 491.9417 252.4154 7.1945 0.0000
+0 1999 126 5.00 0.583 -4.5000 0.0100 50.7074 0.0000 305.3621 478.3260 265.1992 8.9125 0.0000
+0 1999 126 19.00 0.417 -0.7990 -0.0260 0.0000 0.0000 385.0077 419.1891 322.6744 12.0980 0.0000
+0 1999 127 5.00 0.583 4.8425 0.0629 43.8523 0.0000 728.1859 456.8006 289.1746 5.4257 0.0000
+0 1999 127 19.00 0.417 4.3825 0.0190 0.0000 0.0000 596.7060 364.9022 379.0385 4.0340 0.0000
+0 1999 128 5.00 0.583 9.6104 0.1279 38.4309 0.0000 1012.2329 407.1969 341.7945 2.4871 0.0000
+0 1999 128 19.00 0.417 7.1525 0.0550 0.0000 0.0000 832.3412 419.0897 326.5168 2.7085 0.0000
+0 1999 129 5.00 0.583 9.4961 0.1543 39.7586 0.0000 1108.8258 513.9992 236.2278 3.0018 0.0000
+0 1999 129 19.00 0.417 1.0745 0.0780 0.0000 0.0000 231.8243 168.7292 577.9473 2.4715 0.0000
+0 1999 130 5.00 0.583 -5.7004 0.0171 30.0272 12.0000 119.4953 337.6163 406.2386 5.5839 0.0000
+0 1999 130 19.00 0.417 -9.1685 0.0310 0.0000 0.0000 116.3017 438.3626 306.1328 3.5535 0.0000
+0 1999 131 5.00 0.583 -4.9032 0.0200 43.3294 0.0000 317.3282 507.6105 236.3768 1.9579 0.0000
+0 1999 131 19.00 0.417 -5.0635 0.0070 0.0000 0.0000 296.6758 496.7327 246.6534 1.2165 0.0000
+0 1999 132 5.00 0.583 2.1682 0.0414 38.4719 0.0000 568.4230 443.6958 301.2871 2.0846 0.0000
+0 1999 132 19.00 0.417 3.5385 0.0290 0.0000 0.0000 506.4895 319.7992 424.6045 1.9785 0.0000
+0 1999 133 5.00 0.583 10.1736 0.1671 41.1615 0.0000 953.7620 301.7003 449.1250 3.8843 0.0000
+0 1999 133 19.00 0.417 2.1830 0.0830 0.0000 0.0000 155.3306 44.8292 702.0784 1.8375 0.0000
+0 1999 134 5.00 0.583 2.4839 0.1086 34.3030 0.0000 260.9455 135.0188 613.0767 4.3857 0.0000
+0 1999 134 19.00 0.417 0.6990 0.0580 0.0000 0.0000 114.5509 81.1103 664.6353 2.2395 0.0000
+0 1999 135 5.00 0.583 3.5757 0.1379 22.4764 0.0000 351.0447 161.6194 587.8429 1.9050 0.0000
+0 1999 135 19.00 0.417 1.4755 0.0650 0.0000 0.0000 216.6980 141.6184 604.4528 1.5440 0.0000
+0 1999 136 5.00 0.583 -0.9200 0.0536 11.6044 3.0000 80.6031 123.6005 621.9401 1.9821 0.0000
+0 1999 136 19.00 0.417 -5.1740 0.0190 0.0000 0.0000 112.4816 316.1963 427.7446 5.9110 0.0000
+0 1999 137 5.00 0.583 0.5371 0.0821 51.2386 1.0000 479.7760 439.4936 307.3796 2.7957 0.0000
+0 1999 137 19.00 0.417 1.8910 0.0210 0.0000 0.0000 549.1479 456.9662 287.0677 1.0800 0.0000
+0 1999 138 5.00 0.583 8.4314 0.1557 49.7695 0.0000 967.6758 447.4879 302.8117 2.0500 0.0000
+0 1999 138 19.00 0.417 6.2005 0.0750 0.0000 0.0000 778.6570 431.5240 315.0131 2.4975 0.0000
+0 1999 139 5.00 0.583 11.2986 0.1850 33.3904 0.0000 1202.1865 455.6408 296.0225 2.2575 0.0000
+0 1999 139 19.00 0.417 5.2910 0.1130 0.0000 0.0000 545.4499 250.3001 498.0013 2.1365 0.0000
+0 1999 140 5.00 0.583 2.8682 0.1364 16.0880 0.0000 42.6211 -97.0235 846.4134 1.5382 0.0000
+0 1999 140 19.00 0.417 1.9215 0.0740 0.0000 0.0000 18.6206 -73.2539 819.7412 1.7355 0.0000
+0 1999 141 5.00 0.583 10.2082 0.1614 42.4231 0.0000 896.0197 237.1819 513.3811 1.5089 0.0000
+0 1999 141 19.00 0.417 7.1380 0.0850 0.0000 0.0000 752.9943 342.4079 404.5938 2.6575 0.0000
+0 1999 142 5.00 0.583 11.6632 0.1643 45.5548 0.0000 1058.5486 264.3098 486.3868 2.0986 0.0000
+0 1999 142 19.00 0.417 3.2620 0.1400 0.0000 0.0000 16.8445 -146.1472 895.7054 1.6300 0.0000
+0 1999 143 5.00 0.604 7.1855 0.1766 41.1363 0.0000 162.6971 -257.2325 1008.4978 1.8648 0.0000
+0 1999 143 19.50 0.396 5.5753 0.1168 0.0000 0.0000 199.7192 -102.5544 851.0347 2.0763 0.0000
+0 1999 144 5.00 0.604 6.1779 0.1503 16.2180 16.0000 255.4000 -89.3032 839.3419 1.5855 0.0000
+0 1999 144 19.50 0.396 3.5926 0.0811 0.0000 0.0000 163.5909 -19.4320 766.2464 1.1489 0.0000
+0 1999 145 5.00 0.604 3.8976 0.1117 13.5293 12.0000 102.2950 -98.8591 847.0997 1.7559 0.0000
+0 1999 145 19.50 0.396 2.1968 0.0600 0.0000 0.0000 214.8349 108.5952 637.2432 3.5200 0.0000
+0 1999 146 5.00 0.604 7.0345 0.1662 28.7032 0.0000 399.5642 -2.8781 753.6614 2.2217 0.0000
+0 1999 146 19.50 0.396 3.3495 0.0989 0.0000 0.0000 73.6675 -94.8279 842.4742 0.9563 0.0000
+0 1999 147 5.00 0.604 3.0062 0.1255 11.5132 3.0000 22.5724 -124.8649 873.7463 1.3310 0.0000
+0 1999 147 19.50 0.396 2.5168 0.0695 0.0000 0.0000 164.3906 41.4497 704.8279 2.6347 0.0000
+0 1999 148 5.00 0.604 8.8366 0.1703 55.9359 0.0000 708.8497 163.7458 587.2324 3.6190 0.0000
+0 1999 148 19.50 0.375 5.9950 0.0872 0.0000 0.0000 442.8754 111.6724 635.4315 3.0611 0.0000
+0 1999 149 4.50 0.625 8.8583 0.1940 40.1980 0.0000 653.8889 112.9096 639.1724 4.3733 0.0000
+0 1999 149 19.50 0.375 5.5917 0.1506 0.0000 0.0000 407.3617 105.5165 644.5299 2.1533 0.0000
+0 1999 150 4.50 0.625 9.6680 0.1987 46.2562 0.0000 872.2360 258.0281 494.2680 2.4470 0.0000
+0 1999 150 19.50 0.375 5.6483 0.1111 0.0000 0.0000 561.6997 248.1293 500.0838 1.9272 0.0000
+0 1999 151 4.50 0.625 6.0030 0.1697 33.3265 2.0000 569.4955 217.9632 532.9819 3.6293 0.0000
+0 1999 151 19.50 0.375 -0.0644 0.0650 0.0000 0.0000 212.5094 218.3448 527.7252 12.4183 0.0000
+0 1999 152 4.50 0.625 6.9807 0.1683 50.6453 2.0000 686.5936 273.5167 477.3679 3.7160 0.0000
+0 1999 152 19.50 0.375 4.8083 0.1022 0.0000 0.0000 441.1295 184.4882 563.3126 2.3628 0.0000
+0 1999 153 4.50 0.625 9.6257 0.1937 30.7109 1.0000 743.3473 135.1818 616.8853 2.9483 0.0000
+0 1999 153 19.50 0.375 3.3817 0.1150 0.0000 0.0000 324.9416 155.9941 592.4006 4.5856 0.0000
+0 1999 154 4.50 0.625 9.3460 0.2090 49.5329 0.0000 893.9007 298.2055 454.5822 2.5337 0.0000
+0 1999 154 19.50 0.375 8.0167 0.1472 0.0000 0.0000 920.7477 449.1933 300.7032 3.0050 0.0000
+0 1999 155 4.50 0.625 10.3747 0.3283 42.7070 0.0000 968.3384 310.4501 447.9792 2.8387 0.0000
+0 1999 155 19.50 0.375 5.6300 0.2261 0.0000 0.0000 746.5686 432.2895 321.3026 2.4839 0.0000
+0 1999 156 4.50 0.625 1.8963 0.1483 18.1366 0.0000 208.2069 119.6798 630.2646 4.5667 0.0000
+0 1999 156 19.50 0.375 -1.4000 0.0728 0.0000 0.0000 191.7464 256.7006 489.7305 10.7867 0.0000
+0 1999 157 4.50 0.625 4.3047 0.1390 25.3552 2.0000 424.5099 175.7013 573.8156 7.8917 0.0000
+0 1999 157 19.50 0.375 5.3356 0.1033 0.0000 0.0000 478.7637 190.2860 557.5652 2.4594 0.0000
+0 1999 158 4.50 0.625 11.9157 0.6600 47.2779 0.0000 1117.9701 326.9212 447.5268 2.4920 0.0000
+0 1999 158 19.50 0.375 10.3628 0.6211 0.0000 0.0000 1091.9688 453.9123 318.4095 5.0122 0.0000
+0 1999 159 4.50 0.625 13.7477 1.3713 51.9926 0.0000 1485.3368 553.3365 256.3467 3.6570 0.0000
+0 1999 159 19.50 0.375 9.5633 1.0606 0.0000 0.0000 1081.8242 528.2137 265.6029 3.8750 0.0000
+0 1999 160 4.50 0.625 7.0297 1.7513 31.4419 0.0000 338.2404 17.4318 812.5920 2.2520 0.0000
+0 1999 160 19.50 0.375 2.8844 1.8222 0.0000 0.0000 36.2210 -21.4336 853.8048 1.5289 0.0000
+0 1999 161 4.50 0.625 4.3267 1.9377 14.0359 4.0000 33.3020 -102.0089 940.4981 1.7383 0.0000
+0 1999 161 19.50 0.375 1.6822 1.2861 0.0000 0.0000 54.7993 33.4711 771.6039 1.7033 0.0000
+0 1999 162 4.50 0.625 6.1863 2.7703 34.2736 1.0000 316.1693 104.2997 782.6564 2.7150 0.0000
+0 1999 162 19.50 0.375 3.7972 2.7128 0.0000 0.0000 170.1699 108.7197 771.5859 2.2350 0.0000
+0 1999 163 4.50 0.625 5.6210 2.6683 24.7689 4.0000 205.3913 25.3245 853.1331 1.8587 0.0000
+0 1999 163 19.50 0.375 2.6794 2.1706 0.0000 0.0000 141.6895 114.9491 736.2543 2.9272 0.0000
+0 1999 164 4.50 0.625 5.4827 2.8393 31.0973 1.0000 91.9507 -69.6140 959.3551 1.7300 0.0000
+0 1999 164 19.50 0.375 3.8906 2.9244 0.0000 0.0000 34.7158 -20.1940 911.8317 1.7594 0.0000
+0 1999 165 4.50 0.625 8.9223 3.3483 24.8927 4.0000 533.0370 151.3329 765.2101 2.3117 0.0000
+0 1999 165 19.50 0.375 4.7233 3.1828 0.0000 0.0000 194.3850 103.4951 802.6240 3.3639 0.0000
+0 1999 166 4.50 0.625 3.8553 2.7497 10.8797 6.0000 33.4907 -29.4730 911.6042 1.7063 0.0000
+0 1999 166 19.50 0.375 1.8472 2.6783 0.0000 0.0000 7.5356 50.8585 827.1686 1.2750 0.0000
+0 1999 167 4.50 0.625 3.4943 2.7720 14.8360 3.0000 -4.8314 -46.1342 930.0811 1.6190 0.0000
+0 1999 167 19.50 0.375 4.9644 3.0944 0.0000 0.0000 124.3793 11.2611 889.4954 2.7406 0.0000
+0 1999 168 4.50 0.625 8.9117 4.4883 40.8407 2.0000 461.4363 149.6222 835.7605 5.5647 0.0000
+0 1999 168 19.50 0.375 6.5250 4.2322 0.0000 0.0000 385.0881 238.0681 728.7227 4.6783 0.0000
+0 1999 169 4.50 0.625 11.4550 8.6390 52.5582 1.0000 818.0984 641.1279 703.3340 5.8747 0.0000
+0 1999 169 19.50 0.375 9.3550 5.3700 0.0000 0.0000 582.2587 296.6921 743.1555 4.6056 0.0000
+0 1999 170 4.50 0.625 13.0763 11.5640 35.8472 3.0000 939.6317 863.7391 729.9370 3.3937 0.0000
+0 1999 170 19.50 0.375 9.1811 4.5028 0.0000 0.0000 599.0249 273.0815 718.2030 2.4994 0.0000
+0 1999 171 4.50 0.625 13.3293 11.4100 34.0954 4.0000 988.0362 916.8029 717.0803 3.2000 0.0000
+0 1999 171 19.50 0.375 9.3322 4.0800 0.0000 0.0000 628.2461 267.4614 695.8498 3.5017 0.0000
+0 1999 172 4.50 0.625 12.5300 12.2137 37.0064 2.0000 838.9390 918.5551 779.5043 2.7887 0.0000
+0 1999 172 19.50 0.375 8.9939 7.3611 0.0000 0.0000 531.2180 408.2693 765.4587 3.9378 0.0000
+0 1999 173 4.50 0.625 13.1150 14.0337 44.6582 0.0000 1044.6625 1186.6422 628.7856 4.5903 0.0000
+0 1999 173 19.50 0.375 9.9828 7.8044 0.0000 0.0000 601.7538 434.7022 783.3956 1.8839 0.0000
+0 1999 174 4.50 0.625 13.8027 15.1810 48.6383 0.0000 1114.4550 1372.0970 640.9996 2.2630 0.0000
+0 1999 174 19.50 0.375 11.2028 8.3782 0.0000 0.0000 780.5976 551.6003 705.0032 2.8094 0.0000
+0 1999 175 4.50 0.625 15.1563 8.9746 53.6056 0.0000 1359.2358 759.1517 544.0993 3.1260 0.0000
+0 1999 175 19.50 0.375 13.0722 8.8352 0.0000 0.0000 991.7797 613.6130 670.7595 3.1572 0.0000
+0 1999 176 4.50 0.625 17.1457 9.8949 50.1637 0.0000 1562.8063 809.6924 568.3646 3.4910 0.0000
+0 1999 176 19.50 0.375 13.5433 8.8616 0.0000 0.0000 1248.7263 822.5515 465.8228 2.6006 0.0000
+0 1999 177 4.50 0.625 13.9107 9.2656 49.0779 0.0000 962.7886 536.7552 789.0518 2.5123 0.0000
+0 1999 177 19.50 0.375 11.2567 8.6462 0.0000 0.0000 760.8861 532.0084 740.6947 2.5250 0.0000
+0 1999 178 4.50 0.625 15.6120 9.5820 49.9842 0.0000 1485.1404 890.0614 461.5511 4.0407 0.0000
+0 1999 178 19.50 0.375 11.4956 8.1586 0.0000 0.0000 1161.3488 868.2712 369.4828 2.6539 0.0000
+0 1999 179 4.50 0.625 11.0153 8.4185 52.6559 0.0000 736.8426 518.8015 739.2830 1.8137 0.0000
+0 1999 179 19.50 0.375 10.3322 8.3934 0.0000 0.0000 722.9797 560.2917 690.0833 1.6444 0.0000
+0 1999 180 4.50 0.625 13.0107 9.2224 51.4006 0.0000 1082.6687 737.4759 585.3075 8.4617 0.0000
+0 1999 180 19.50 0.375 9.7144 8.3477 0.0000 0.0000 698.4242 585.1566 662.8484 3.8333 0.0000
+0 1999 181 4.50 0.625 12.5243 9.4078 50.8435 1.0000 916.0731 635.8037 701.9800 8.7897 0.0000
+0 1999 181 19.50 0.375 11.0800 8.8350 0.0000 0.0000 816.2500 625.4435 661.5085 4.3222 0.0000
+0 1999 182 4.50 0.625 15.7533 10.8290 64.7912 0.0000 1351.1769 818.4970 640.9099 3.6127 0.0000
+0 1999 182 19.50 0.375 16.1761 9.5583 0.0000 0.0000 1441.2734 767.4774 574.5771 4.0428 0.0000
+0 1999 183 4.50 0.625 18.6977 11.1460 43.2101 0.0000 1743.8600 891.4351 590.3732 3.9590 0.0000
+0 1999 183 19.50 0.375 16.0650 9.8411 0.0000 0.0000 1410.9167 785.7852 579.4329 6.4033 0.0000
+0 1999 184 4.50 0.625 17.1877 11.9603 51.5513 0.0000 1503.5193 924.1264 637.3660 6.5017 0.0000
+0 1999 184 19.50 0.375 14.3906 10.0550 0.0000 0.0000 1127.1234 711.0519 672.2823 5.4289 0.0000
+0 1999 185 4.50 0.625 17.5367 12.2773 63.4219 0.0000 1517.8726 924.3426 667.1035 5.5673 0.0000
+0 1999 185 19.50 0.375 15.9344 10.5267 0.0000 0.0000 1342.3749 793.1701 630.8114 6.3694 0.0000
+0 1999 186 4.50 0.625 15.4710 12.6223 53.7262 0.0000 949.2717 643.7588 981.5622 3.3733 0.0000
+0 1999 186 19.50 0.375 10.3272 10.5283 0.0000 0.0000 200.5844 218.1091 1207.2366 1.8961 0.0000
+0 1999 187 4.50 0.625 15.4800 12.2563 45.4275 0.0000 751.9230 411.5268 1177.0551 2.5470 0.0000
+0 1999 187 19.50 0.375 14.7606 9.4950 0.0000 0.0000 1233.9178 732.0555 605.7899 3.2572 0.0000
+0 1999 188 4.50 0.625 17.7457 11.3997 41.5341 0.0000 1435.5099 738.4057 768.2333 2.1577 0.0000
+0 1999 188 19.50 0.375 14.7706 10.2722 0.0000 0.0000 907.8673 467.7256 933.7347 4.8989 0.0000
+0 1999 189 4.50 0.625 14.3907 12.0267 44.3023 0.0000 664.7546 426.4148 1136.1906 4.3740 0.0000
+0 1999 189 19.50 0.375 12.5767 10.0589 0.0000 0.0000 954.5096 724.6331 659.6326 4.4467 0.0000
+0 1999 190 4.50 0.625 9.8237 10.9283 40.9055 0.0000 413.1705 509.5511 952.7516 1.9527 0.0000
+0 1999 190 19.50 0.375 7.2100 9.2411 0.0000 0.0000 250.1759 402.7178 914.3159 1.2444 0.0000
+0 1999 191 4.50 0.625 11.7313 11.3153 63.1955 0.0000 877.7441 836.1288 666.2369 2.5483 0.0000
+0 1999 191 19.50 0.375 9.0850 8.8111 0.0000 0.0000 593.8590 572.9361 709.6505 3.1783 0.0000
+0 1999 192 4.50 0.625 12.2127 10.2923 35.6445 2.0000 846.0120 668.7712 737.0438 2.3280 0.0000
+0 1999 192 19.50 0.375 6.9606 8.4111 0.0000 0.0000 354.4383 460.0685 791.4258 3.9594 0.0000
+0 1999 193 4.50 0.625 12.5687 10.6853 56.1911 0.0000 894.2262 717.0693 727.3006 2.5857 0.0000
+0 1999 193 19.50 0.375 11.9817 8.6206 0.0000 0.0000 974.7073 684.4534 582.6913 3.3656 0.0000
+0 1999 194 4.50 0.625 16.9310 10.7257 59.4675 0.0000 1542.2343 881.4009 563.1570 3.7000 0.0000
+0 1999 194 19.50 0.375 14.2267 8.5622 0.0000 0.0000 1093.4655 573.6256 689.1605 4.3661 0.0000
+0 1999 195 4.50 0.625 13.4150 10.5677 33.4939 2.0000 821.1194 544.6708 886.5070 3.6467 0.0000
+0 1999 195 19.50 0.375 9.9956 8.5622 0.0000 0.0000 386.1615 270.7910 991.9950 3.2206 0.0000
+0 1999 196 4.50 0.625 11.9743 10.5677 42.0093 0.0000 628.9260 500.5073 930.6706 3.2587 0.0000
+0 1999 196 19.50 0.375 10.1200 8.5622 0.0000 0.0000 598.2309 472.3457 790.4404 2.8044 0.0000
+0 1999 197 4.50 0.625 10.7130 10.5677 19.0552 6.0000 399.2303 384.0071 1047.1708 1.9680 0.0000
+0 1999 197 19.50 0.396 12.0342 8.5316 0.0000 0.0000 364.9096 63.4426 1196.9762 1.6000 0.0000
+0 1999 198 5.00 0.604 14.2141 10.6569 42.8171 1.0000 756.2380 409.3675 1029.1678 2.1779 0.0000
+0 1999 198 19.50 0.396 11.0858 8.5316 0.0000 0.0000 414.0344 200.5502 1059.8687 1.8537 0.0000
+0 1999 199 5.00 0.604 13.3786 10.6569 21.8653 1.0000 825.5856 566.3771 872.1583 2.5283 0.0000
+0 1999 199 19.50 0.396 11.3453 8.5316 0.0000 0.0000 502.6092 266.3851 994.0336 2.0421 0.0000
+0 1999 200 5.00 0.604 12.5948 10.6569 28.5525 8.0000 479.8875 297.7497 1140.7858 1.8103 0.0000
+0 1999 200 19.50 0.396 11.3174 8.5316 0.0000 0.0000 474.2511 240.5720 1019.8467 3.5121 0.0000
+0 1999 201 5.00 0.604 13.7910 10.6569 54.7047 0.0000 820.5990 517.3396 921.1958 3.0897 0.0000
+0 1999 201 19.50 0.396 12.5016 9.2121 0.0000 0.0000 719.6209 427.7331 885.9100 3.0516 0.0000
+0 1999 202 5.00 0.604 16.2986 11.1634 52.5152 0.0000 1172.8499 632.9290 849.8632 2.9269 0.0000
+0 1999 202 19.50 0.396 12.1116 9.5226 0.0000 0.0000 700.3301 469.6929 869.3779 3.5916 0.0000
+0 1999 203 5.00 0.604 13.2579 11.1634 24.1499 1.0000 597.8371 392.9838 1089.8085 1.9993 0.0000
+0 1999 203 19.50 0.396 11.1511 9.5226 0.0000 0.0000 483.6944 343.2653 995.8055 4.9005 0.0000
+0 1999 204 5.00 0.583 16.4425 11.5011 40.7686 0.0000 1285.3666 757.5754 756.8872 2.0571 0.0000
+0 1999 204 19.00 0.417 13.9130 9.0050 0.0000 0.0000 1046.0787 596.3665 701.8961 3.0530 0.0000
+0 1999 205 5.00 0.583 14.7093 10.7639 34.6318 15.0000 825.5643 429.9174 1016.7021 2.2875 0.0000
+0 1999 205 19.00 0.417 12.5020 9.7800 0.0000 0.0000 705.5175 460.5388 899.6737 6.0160 0.0000
+0 1999 206 5.00 0.583 14.8457 11.9582 36.7801 2.0000 960.8608 663.8605 893.6277 3.5464 0.0000
+0 1999 206 19.00 0.417 11.6010 9.6800 0.0000 0.0000 646.6531 478.3197 874.9362 4.1080 0.0000
+0 1999 207 5.00 0.583 14.3357 11.0446 32.6115 1.0000 927.5096 595.7745 876.8395 1.8921 0.0000
+0 1999 207 19.00 0.417 12.9360 9.3735 0.0000 0.0000 836.2249 514.6398 812.9692 2.8120 0.0000
+0 1999 208 5.00 0.583 15.1350 11.7950 39.5613 3.0000 849.2720 495.6277 1050.3763 2.7546 0.0000
+0 1999 208 19.00 0.417 12.4310 10.4240 0.0000 0.0000 519.7076 336.4740 1078.3669 2.5855 0.0000
+0 1999 209 5.00 0.583 14.4239 11.9018 23.4967 8.0000 704.9199 440.7279 1110.8284 2.6443 0.0000
+0 1999 209 19.00 0.417 12.0610 9.9255 0.0000 0.0000 538.5545 348.2467 1024.2334 4.1550 0.0000
+0 1999 210 5.00 0.583 14.9221 12.1846 40.3443 1.0000 824.3287 542.2525 1037.2377 4.7493 0.0000
+0 1999 210 19.00 0.417 12.6345 10.5285 0.0000 0.0000 626.7921 430.8439 993.2190 5.4770 0.0000
+0 1999 211 5.00 0.583 13.2968 10.5179 34.7855 48.0000 573.8964 313.2993 1119.3961 3.4707 0.0000
+0 1999 211 19.00 0.417 12.0450 8.4230 0.0000 0.0000 396.5999 86.9210 1164.7638 1.9500 0.0000
+0 1999 212 5.00 0.583 14.0050 9.0182 9.3499 29.0000 454.4125 -8.0353 1306.7106 1.7229 0.0000
+0 1999 212 19.00 0.417 12.1910 7.9220 0.0000 0.0000 332.0236 -31.7082 1246.0179 1.5130 0.0000
+0 1999 213 5.00 0.583 8.2425 8.7468 13.6521 4.5760 23.0854 58.5956 1218.4915 1.6754 0.0000
+0 1999 213 19.00 0.417 5.9145 7.8085 0.0000 0.5080 33.3213 163.4044 1042.6328 1.6420 0.0000
+0 1999 214 5.00 0.583 11.2918 10.4282 48.4360 0.0000 282.5447 209.5291 1216.0227 1.9550 0.0000
+0 1999 214 19.00 0.417 8.8800 7.9965 0.0000 0.0000 604.9999 539.6412 682.5612 2.8900 0.0000
+0 1999 215 5.00 0.583 12.2811 9.5739 49.5893 0.0000 833.7255 595.3249 753.6061 2.2675 0.0000
+0 1999 215 19.00 0.417 8.8595 8.6565 0.0000 0.0000 303.6341 288.0058 981.8478 1.6855 0.0000
+0 1999 216 5.00 0.583 9.5829 9.4443 13.8562 34.5460 119.1036 102.4535 1230.7949 1.6236 0.0000
+0 1999 216 19.00 0.417 11.8990 8.7675 0.0000 0.0000 322.8635 53.7137 1224.4860 1.4960 0.0000
+0 1999 217 5.00 0.583 10.5625 9.9714 19.0166 9.9060 82.2383 29.0876 1347.8978 1.2461 0.0000
+0 1999 217 19.00 0.417 9.3740 9.1085 0.0000 0.2540 200.6550 179.5509 1126.1071 2.9625 0.0000
+0 1999 218 5.00 0.583 12.0300 10.1786 41.2988 0.0000 503.5338 334.5380 1060.9851 2.9293 0.0000
+0 1999 218 19.00 0.417 10.9405 8.9760 0.0000 0.0000 496.5961 331.4490 963.3149 4.9360 0.0000
+0 1999 219 5.00 0.583 12.8421 11.0604 42.0790 0.0000 597.8368 431.6139 1043.3945 2.5857 0.0000
+0 1999 219 19.00 0.417 10.6940 9.1240 0.0000 0.0000 437.1347 307.0467 1001.1795 2.7125 0.0000
+0 1999 220 5.00 0.583 14.1000 10.4943 35.5059 0.0000 905.8950 557.6940 866.1230 2.1946 0.0000
+0 1999 220 19.00 0.417 10.2830 9.1200 0.0000 0.0000 439.4116 343.6994 963.1638 3.0980 0.0000
+0 1999 221 5.00 0.583 12.9482 10.1700 25.6273 0.2540 698.9730 433.6504 960.3513 2.3336 0.0000
+0 1999 221 19.00 0.417 10.5775 9.3330 0.0000 0.0000 447.9279 343.3374 980.1636 2.2945 0.0000
+0 1999 222 5.00 0.583 11.7243 10.2900 32.1088 7.3580 368.4507 237.0062 1167.0983 2.3807 0.0000
+0 1999 222 19.00 0.417 10.8655 8.7340 0.0000 0.0000 487.5778 310.4050 965.8141 4.5340 0.0000
+0 1999 223 5.00 0.583 11.9368 10.3746 47.0531 0.0000 731.0593 586.7682 825.8334 5.5221 0.0000
+0 1999 223 19.00 0.417 8.3430 8.5060 0.0000 0.0000 537.2633 548.9357 709.4756 4.5315 0.0000
+0 1999 224 5.00 0.583 10.7318 9.2314 35.7269 0.0000 735.2798 601.7121 715.2780 3.6971 0.0000
+0 1999 224 19.00 0.417 8.0660 7.9595 0.0000 0.0000 445.9560 434.6217 782.7883 1.7665 0.0000
+0 1999 225 5.00 0.583 13.1054 10.4082 56.7329 0.0000 947.6591 690.2438 731.3452 2.2407 0.0000
+0 1999 225 19.00 0.417 12.1920 8.8515 0.0000 0.0000 931.0730 639.6684 645.6210 2.9830 0.0000
+0 1999 226 5.00 0.583 14.4489 10.6221 35.7745 0.5080 1133.4041 745.9716 689.0170 2.4464 0.0000
+0 1999 226 19.00 0.417 9.9500 9.3140 0.0000 0.0000 449.0473 396.3389 925.6791 6.2660 0.0000
+0 1999 227 5.00 0.583 11.7829 9.6271 20.8843 0.0000 728.9937 537.6313 810.1575 4.3893 0.0000
+0 1999 227 19.00 0.417 10.8720 8.6465 0.0000 0.0000 714.3662 529.7219 739.3456 4.7350 0.0000
+0 1999 228 5.00 0.583 14.2864 10.9971 52.9150 0.0000 1098.8477 770.1713 700.3481 2.7825 0.0000
+0 1999 228 19.00 0.417 12.6905 9.9185 0.0000 0.0000 822.9763 570.8687 801.0545 2.8275 0.0000
+0 1999 229 5.00 0.583 12.7568 10.8525 28.4668 2.5440 683.0851 498.0399 955.4364 2.5825 0.0000
+0 1999 229 19.00 0.438 9.1195 8.5943 0.0000 0.0000 537.6912 497.5309 768.7116 3.8095 0.0000
+0 1999 230 5.50 0.562 13.7033 10.5574 54.9581 0.0000 1106.1246 803.2303 629.1179 2.1393 0.0000
+0 1999 230 19.00 0.438 12.0557 8.3819 0.0000 0.0000 914.8503 601.5790 647.7733 3.0367 0.0000
+0 1999 231 5.50 0.562 12.5752 9.7363 23.1802 2.5440 691.3493 421.7304 936.5087 2.4167 0.0000
+0 1999 231 19.00 0.438 9.1519 8.7819 0.0000 3.3060 262.7403 232.6750 1046.7468 4.8876 0.0000
+0 1999 232 5.50 0.562 13.0274 10.5956 31.8789 3.0480 596.7846 363.4401 1068.2693 2.7007 0.0000
+0 1999 232 19.00 0.438 10.0752 8.5457 0.0000 0.0000 495.0152 371.9350 890.3065 3.0714 0.0000
+0 1999 233 5.50 0.542 12.7285 9.9223 22.7113 0.2540 604.9548 347.4435 1026.2274 1.9365 0.0000
+0 1999 233 18.50 0.458 10.0577 9.0982 0.0000 0.0000 338.1138 258.4123 1046.1887 2.5791 0.0000
+0 1999 234 5.50 0.542 12.7815 10.3954 32.5269 0.0000 737.6793 515.5775 898.4756 3.0146 0.0000
+0 1999 234 18.50 0.458 10.7050 8.2682 0.0000 0.0000 647.5211 448.8287 792.6140 3.2282 0.0000
+0 1999 235 5.50 0.542 14.7473 10.6235 53.2890 0.0000 1086.7072 680.9240 757.7286 2.2181 0.0000
+0 1999 235 18.50 0.458 12.3459 9.0709 0.0000 0.0000 759.3406 472.1900 830.7444 3.8395 0.0000
+0 1999 236 5.50 0.542 16.5092 10.8473 44.7413 0.0000 1252.7184 659.0467 795.8848 2.3462 0.0000
+0 1999 236 18.50 0.458 13.7491 9.4882 0.0000 0.0000 918.1143 522.2797 813.9904 3.9859 0.0000
+0 1999 237 5.50 0.542 13.4423 10.1092 23.6660 3.3100 707.5074 383.1540 1004.9832 2.4169 0.0000
+0 1999 237 18.50 0.458 11.1482 8.6295 0.0000 0.0000 483.0561 271.9492 995.9804 4.0177 0.0000
+0 1999 238 5.50 0.542 14.9100 10.5765 35.3637 0.0000 863.9734 435.6537 996.0153 1.9738 0.0000
+0 1999 238 18.50 0.458 12.2114 9.6709 0.0000 0.0000 646.2310 415.8318 935.6349 2.5282 0.0000
+0 1999 239 5.50 0.542 14.1454 10.8692 26.2969 0.7620 759.9130 423.8956 1031.0282 2.1919 0.0000
+0 1999 239 18.50 0.458 9.3105 9.2936 0.0000 3.8080 200.1269 198.2018 1122.5750 2.6677 0.0000
+0 1999 240 5.50 0.542 12.1492 9.6592 18.7036 0.5080 461.8810 236.1586 1114.2795 2.2612 0.0000
+0 1999 240 18.50 0.458 10.5714 8.1068 0.0000 0.0000 455.8368 256.2571 971.5035 3.0577 0.0000
+0 1999 241 5.50 0.542 14.2119 9.4931 37.0921 0.0000 944.3123 488.3715 849.4474 3.1573 0.0000
+0 1999 241 18.50 0.458 11.6682 8.2641 0.0000 0.0000 661.4528 374.7528 864.8622 3.6023 0.0000
+0 1999 242 5.50 0.542 15.2988 10.1942 31.6247 0.2540 979.7447 472.9202 924.4739 3.0454 0.0000
+0 1999 242 18.50 0.458 13.5355 9.3736 0.0000 0.0000 832.7801 449.9558 876.8586 4.9141 0.0000
+0 1999 243 5.50 0.542 15.0785 11.1527 46.6907 0.0000 952.6779 554.3705 927.4755 2.3565 0.0000
+0 1999 243 18.50 0.458 9.0082 8.6900 0.0000 2.7960 275.6424 251.8694 1022.3186 2.3273 0.0000
+0 1999 244 5.50 0.542 12.4865 9.8381 30.3196 2.0000 438.9637 199.5299 1167.8373 2.1988 0.0000
+0 1999 244 18.50 0.458 9.7005 8.7918 0.0000 0.0000 366.7427 291.7708 988.9010 1.9277 0.0000
+0 1999 245 5.50 0.542 10.0885 8.4208 26.7464 11.0000 373.1269 235.4341 1018.3663 2.1546 0.0000
+0 1999 245 18.50 0.458 5.8118 6.1686 0.0000 0.0000 133.3995 156.8072 932.8622 1.7955 0.0000
+0 1999 246 5.50 0.542 8.6981 7.6385 21.8939 4.0000 277.1031 193.9680 1000.7611 2.2015 0.0000
+0 1999 246 18.50 0.458 5.8909 5.8405 0.0000 0.0000 363.5688 355.8795 712.8992 2.3459 0.0000
+0 1999 247 5.50 0.542 8.6846 7.4646 47.3047 0.0000 634.5009 540.9850 642.6478 3.0058 0.0000
+0 1999 247 18.50 0.458 4.9882 6.2941 0.0000 0.0000 162.7911 248.4656 850.6965 1.7855 0.0000
+0 1999 248 5.50 0.542 11.1312 8.2508 50.2821 0.0000 1090.4821 843.5819 400.5106 3.3869 0.0000
+0 1999 248 18.50 0.458 9.9345 6.9036 0.0000 0.0000 952.1594 718.6393 421.5733 5.4855 0.0000
+0 1999 249 5.50 0.542 13.4035 8.9704 48.9395 0.0000 1294.9828 889.1351 409.0115 5.5177 0.0000
+0 1999 249 18.50 0.458 11.4595 7.8118 0.0000 0.0000 1059.0186 757.4739 448.1513 4.5532 0.0000
+0 1999 250 5.50 0.542 14.2315 9.3696 48.5957 0.0000 1335.6072 877.9354 452.6606 1.8965 0.0000
+0 1999 250 18.50 0.458 4.3432 8.2836 0.0000 0.0000 156.5902 404.9321 837.5038 1.3264 0.0000
+0 1999 251 5.50 0.542 7.5138 8.5100 48.4679 0.0000 392.2276 451.3795 812.1403 1.8131 0.0000
+0 1999 251 18.50 0.458 7.1686 6.9082 0.0000 0.0000 664.7138 644.2018 497.9258 3.2423 0.0000
+0 1999 252 5.50 0.542 12.2173 7.7942 39.3883 0.0000 1172.5680 792.1899 416.9270 2.3815 0.0000
+0 1999 252 18.50 0.458 9.9514 7.4682 0.0000 0.0000 924.6303 730.6715 450.3132 3.5200 0.0000
+0 1999 253 5.50 0.542 10.5877 7.9638 21.0199 0.0000 833.4122 618.2064 599.9398 4.6000 0.0000
+0 1999 253 18.50 0.479 7.0500 7.4757 0.0000 0.0000 412.9845 442.3117 738.7122 4.2717 0.0000
+0 1999 254 6.00 0.521 7.5596 7.7504 23.9000 0.0000 437.1429 448.6292 752.9361 4.3588 0.0000
+0 1999 254 18.50 0.479 1.7870 6.8022 0.0000 0.0000 101.5003 389.1151 744.7603 1.6552 0.0000
+0 1999 255 6.00 0.521 -0.1408 5.8424 15.0296 0.0000 -5.4015 325.3460 742.2704 1.7352 0.0000
+0 1999 255 18.50 0.479 -0.4348 4.6539 0.0000 0.0000 50.2049 319.0391 673.3900 1.0387 0.0000
+0 1999 256 6.00 0.521 8.3924 6.1280 47.0605 0.0000 691.1865 509.1943 583.3701 1.5484 0.0000
+0 1999 256 18.50 0.479 3.8817 5.3387 0.0000 0.0000 360.3922 434.4030 602.0847 1.9191 0.0000
+0 1999 257 6.00 0.500 4.6687 5.9525 34.5658 0.0000 234.2608 318.4947 760.1490 1.8583 0.0000
+0 1999 257 18.00 0.500 3.2342 5.1479 0.0000 0.0000 297.3242 411.7284 612.4862 2.4379 0.0000
+0 1999 258 6.00 0.500 6.6217 5.8479 39.2170 0.0000 608.4891 559.4275 513.1229 2.0325 0.0000
+0 1999 258 18.00 0.500 4.7954 4.8038 0.0000 0.0000 487.1288 488.9897 513.8345 2.8992 0.0000
+0 1999 259 6.00 0.500 9.3425 5.9525 38.1386 0.0000 811.9120 562.4613 517.0912 2.3829 0.0000
+0 1999 259 18.00 0.500 7.2342 4.9738 0.0000 0.0000 686.8563 536.2031 476.5497 2.8817 0.0000
+0 1999 260 6.00 0.500 10.0858 6.2996 25.7850 0.0000 944.5339 656.8120 444.2787 1.6433 0.0000
+0 1999 260 18.00 0.500 7.2725 4.9275 0.0000 0.0000 731.0936 574.4501 436.0171 3.3463 0.0000
+0 1999 261 6.00 0.500 10.9462 5.8479 28.8571 0.0000 1019.5217 627.2515 444.5129 1.5554 0.0000
+0 1999 261 18.00 0.500 6.6758 6.1775 0.0000 0.0000 538.1134 497.3974 592.9492 2.4850 0.0000
+0 1999 262 6.00 0.500 3.4800 4.5346 8.6357 7.0000 174.0807 236.5094 748.5784 1.6117 0.0000
+0 1999 262 18.00 0.500 3.7875 3.4792 0.0000 0.0000 155.0853 137.0386 785.2472 0.9292 0.0000
+0 1999 263 6.00 0.500 5.8542 3.1104 9.2455 6.0000 203.9178 36.5069 864.9120 1.0517 0.0000
+0 1999 263 18.00 0.500 2.9550 2.7592 0.0000 0.0000 192.7412 178.0572 703.9151 2.3217 0.0000
+0 1999 264 6.00 0.500 7.5458 2.6933 44.7498 0.0000 665.7393 342.3821 536.0412 1.4875 0.0000
+0 1999 264 18.00 0.500 5.3629 2.5525 0.0000 0.0000 592.4536 426.2927 444.4859 2.5912 0.0000
+0 1999 265 6.00 0.500 11.1971 3.5742 43.6270 0.0000 1123.6063 562.4993 368.4860 2.3912 0.0000
+0 1999 265 18.00 0.500 7.8996 3.7800 0.0000 0.0000 801.9341 529.0413 411.3644 2.7792 0.0000
+0 1999 266 6.00 0.500 10.3858 5.3967 39.7166 0.0000 899.8961 526.5442 517.8754 2.1163 0.0000
+0 1999 266 18.00 0.500 7.6658 5.3446 0.0000 0.0000 616.5759 455.6212 579.9887 4.6279 0.0000
+0 1999 267 6.00 0.500 7.6142 6.0646 27.7537 0.0000 509.6331 394.2102 689.4380 4.7483 0.0000
+0 1999 267 18.00 0.500 6.6413 5.4296 0.0000 0.0000 371.3660 289.3956 751.4006 2.8517 0.0000
+0 1999 268 6.00 0.500 10.9567 6.3133 39.6153 0.0000 805.2650 428.9077 671.7718 13.5067 0.0000
+0 1999 268 18.00 0.500 10.5146 6.3529 0.0000 0.0000 876.8408 553.6895 548.2358 8.5013 0.0000
+0 1999 269 6.00 0.500 6.9571 6.6504 36.1661 0.0000 565.7363 520.4664 602.6605 4.3608 0.0000
+0 1999 269 18.00 0.500 -3.6813 4.4429 0.0000 0.0000 9.0930 395.6595 585.1342 1.1508 0.0000
+0 1999 270 6.00 0.500 -3.0838 3.3333 14.9035 0.0000 62.2965 359.5921 554.4641 1.5087 0.0000
+0 1999 270 18.00 0.500 -5.1838 2.5654 0.0000 0.0000 13.2663 345.2177 526.2764 1.1883 0.0000
+0 1999 271 6.00 0.500 -6.7512 2.2687 8.7901 10.0000 18.9379 385.0648 470.4591 1.6492 0.0000
+0 1999 271 18.00 0.500 -5.1942 2.2313 0.0000 0.0000 191.4768 500.8269 352.7139 3.1442 0.0000
+0 1999 272 6.00 0.500 6.8250 2.1837 41.3582 5.0000 929.8864 633.0191 218.0042 6.5654 0.0000
+0 1999 272 18.00 0.500 5.5375 2.2062 0.0000 0.0000 859.3516 661.6462 190.5630 1.8829 0.0000
+0 1999 273 6.00 0.500 7.4504 2.1796 40.3556 0.0000 828.4953 473.4991 377.3008 1.8396 0.0000
+0 1999 273 18.00 0.500 7.6608 2.0650 0.0000 0.0000 948.7494 597.6528 247.1151 9.7413 0.0000
+0 1999 274 6.00 0.500 7.5700 2.8875 30.6072 0.0000 844.3510 523.7900 366.0400 4.4129 0.0000
+0 1999 274 18.00 0.500 2.4446 2.6158 0.0000 0.0000 416.0843 423.0604 451.3469 5.1833 0.0000
+0 1999 275 6.00 0.500 6.9421 3.0792 33.1812 0.0000 919.7171 673.5170 227.5930 5.0829 0.0000
+0 1999 275 18.00 0.500 4.7175 3.1400 0.0000 0.0000 767.3422 669.9399 233.6060 3.9475 0.0000
+0 1999 276 6.00 0.500 7.6758 3.0154 40.3520 0.0000 929.1637 618.2375 278.9791 5.8133 0.0000
+0 1999 276 18.00 0.521 4.5604 2.6128 0.0000 0.0000 713.9590 600.1317 274.1954 4.1268 0.0000
+0 1999 277 6.50 0.479 9.9339 3.6248 38.6415 0.0000 1187.7245 742.8023 189.6812 5.6091 0.0000
+0 1999 277 18.00 0.521 6.9296 2.7384 0.0000 0.0000 1006.8996 745.2366 136.3660 3.1676 0.0000
+0 1999 278 6.50 0.479 11.1552 3.6670 38.0398 0.0000 1265.4408 715.3229 220.7269 2.6513 0.0000
+0 1999 278 18.00 0.521 9.3768 3.9740 0.0000 0.0000 884.3024 507.7024 443.3583 2.5324 0.0000
+0 1999 279 6.50 0.479 9.5552 5.0800 16.8764 0.0000 721.1496 395.3724 624.0925 2.3765 0.0000
+0 1999 279 18.00 0.521 4.7712 4.4248 0.0000 0.0000 381.3354 357.3491 621.3177 3.6764 0.0000
+0 1999 280 6.50 0.458 1.5909 2.8636 7.5821 3.0000 46.1663 112.0229 775.7838 1.6468 0.0000
+0 1999 280 17.50 0.542 2.1654 2.2554 0.0000 0.0000 301.9323 306.4094 548.4360 6.1723 0.0000
+0 1999 281 6.50 0.458 6.1036 3.1305 36.8663 1.0000 516.4776 331.3272 572.8887 1.9018 0.0000
+0 1999 281 17.50 0.542 6.6450 3.2792 0.0000 0.0000 645.3888 434.0599 477.0202 5.0650 0.0000
+0 1999 282 6.50 0.458 11.8868 4.4955 37.0973 0.0000 1205.1162 639.6027 344.5871 5.1627 0.0000
+0 1999 282 17.50 0.542 9.2677 4.1785 0.0000 0.0000 940.7806 584.8295 378.8401 2.4288 0.0000
+0 1999 283 6.50 0.458 12.1482 4.9668 36.5564 0.0000 1190.2904 625.3779 388.2388 3.4764 0.0000
+0 1999 283 17.50 0.542 9.1081 4.0288 0.0000 0.0000 949.4020 599.2377 356.1972 3.5085 0.0000
+0 1999 284 6.50 0.458 12.4532 4.8909 36.3951 0.0000 1329.3041 732.3967 276.0456 4.7705 0.0000
+0 1999 284 17.50 0.542 9.1177 4.2435 0.0000 0.0000 1056.0227 713.4937 254.3697 2.4081 0.0000
+0 1999 285 6.50 0.458 11.3291 4.5064 25.5119 0.0000 1226.4044 705.8772 278.4681 3.6155 0.0000
+0 1999 285 17.50 0.542 8.0769 4.1204 0.0000 0.0000 976.9790 707.8247 252.6367 2.8227 0.0000
+0 1999 286 6.50 0.458 11.8964 3.0418 35.3862 0.0000 1289.0088 631.2289 266.4712 2.1232 0.0000
+0 1999 286 17.50 0.542 9.0808 3.4969 0.0000 0.0000 1029.7487 648.4368 275.5321 3.2531 0.0000
+0 1999 287 6.50 0.458 9.9527 4.6205 25.4243 0.0000 1185.8568 794.6258 196.1012 4.3577 0.0000
+0 1999 287 17.50 0.542 7.7515 4.0842 0.0000 0.0000 1048.1915 802.9670 154.9246 3.4112 0.0000
+0 1999 288 6.50 0.458 2.9659 3.9786 21.9334 0.0000 483.7806 498.7734 452.9032 2.1818 0.0000
+0 1999 288 17.50 0.542 -4.8096 2.6627 0.0000 0.0000 13.0490 335.9933 540.9340 1.6385 0.0000
+0 1999 289 6.50 0.458 -9.3268 1.8409 7.7346 4.0000 26.7687 440.8890 392.2094 1.7745 0.0000
+0 1999 289 17.50 0.542 -11.2477 1.4804 0.0000 0.0000 106.2737 547.3777 267.3408 1.7338 0.0000
+0 1999 290 6.50 0.458 -1.4627 1.2186 34.5128 1.0000 344.9955 458.3499 343.2024 2.0727 0.0000
+0 1999 290 17.50 0.542 -1.8546 1.3981 0.0000 0.0000 341.9955 486.8073 323.6907 4.1246 0.0000
+0 1999 291 6.50 0.458 -5.8473 1.3450 11.6980 12.0000 17.4885 307.7832 500.0418 1.7741 0.0000
+0 1999 291 17.50 0.542 -7.8638 1.3919 0.0000 0.0000 33.3496 385.6626 424.5204 1.7515 0.0000
+0 1999 292 6.50 0.458 -1.5059 1.4064 32.5105 0.0000 358.2152 484.5881 326.3228 3.3318 0.0000
+0 1999 292 17.50 0.542 0.1581 1.5058 0.0000 0.0000 516.6469 581.9235 234.0079 7.4650 0.0000
+0 1999 293 6.50 0.458 5.1464 1.5395 33.3434 0.0000 762.8954 552.3420 265.3040 6.4132 0.0000
+0 1999 293 17.50 0.542 4.3958 1.6142 0.0000 0.0000 802.0112 646.4672 174.9797 6.6188 0.0000
+0 1999 294 6.50 0.458 8.5545 1.6136 32.8658 0.0000 1088.0031 644.2474 177.1691 5.0605 0.0000
+0 1999 294 17.50 0.542 4.5727 1.5596 0.0000 0.0000 806.2849 636.6351 182.0303 4.2892 0.0000
+0 1999 295 6.50 0.458 7.2768 1.5145 32.2841 0.0000 885.0129 532.0212 284.3551 2.0073 0.0000
+0 1999 295 17.50 0.542 4.4154 1.4450 0.0000 0.0000 739.7385 573.9374 238.9242 2.6796 0.0000
+0 1999 296 6.50 0.458 8.5745 1.3018 31.9444 0.0000 1035.2733 571.9982 233.6615 2.0273 0.0000
+0 1999 296 17.50 0.542 6.4800 1.3062 0.0000 0.0000 981.8403 676.7192 129.1582 2.5373 0.0000
+0 1999 297 6.50 0.458 9.5855 1.2445 30.8254 0.0000 1227.4647 682.1069 120.6917 4.1373 0.0000
+0 1999 297 17.50 0.542 5.6546 1.2723 0.0000 0.0000 894.2605 640.9659 163.2185 3.7888 0.0000
+0 1999 298 6.50 0.458 8.9877 1.3582 31.1245 0.0000 1141.8925 649.2475 159.2678 2.6632 0.0000
+0 1999 298 17.50 0.542 7.4088 1.4985 0.0000 0.0000 1017.3646 656.3557 159.2074 4.5088 0.0000
+0 1999 299 6.50 0.458 11.1591 2.2982 30.4889 0.0000 1318.0142 685.3177 172.4165 2.7250 0.0000
+0 1999 299 17.50 0.562 7.2781 1.7870 0.0000 0.0000 1005.1355 666.5236 163.9296 6.6330 0.0000
+0 1999 300 7.00 0.438 7.7614 2.2790 30.7102 0.0000 969.9126 615.4864 240.9788 6.1871 0.0000
+0 1999 300 17.50 0.562 2.3256 1.6093 0.0000 0.0000 532.7676 474.9556 346.4186 2.6833 0.0000
+0 1999 301 7.00 0.438 8.7152 1.8219 22.9739 0.0000 1079.9052 637.0750 195.4311 4.8129 0.0000
+0 1999 301 17.50 0.562 0.8522 1.8556 0.0000 0.0000 309.0117 339.9459 494.0649 4.2774 0.0000
+0 1999 302 7.00 0.438 -3.6924 1.2871 26.0951 2.0000 242.9212 456.0175 348.9135 5.8705 0.0000
+0 1999 302 17.50 0.562 -4.4026 0.9144 0.0000 0.0000 276.1787 495.6569 290.8846 8.6752 0.0000
+0 1999 303 7.00 0.438 4.5829 0.7029 29.6467 0.0000 749.3799 530.7804 245.4577 2.7143 0.0000
+0 1999 303 17.50 0.562 4.4511 0.7641 0.0000 0.0000 690.2792 487.0425 292.1604 2.8267 0.0000
+0 1999 304 7.00 0.417 10.5685 0.6010 29.2923 0.0000 1219.1333 560.3706 210.9829 3.4895 0.0000
+0 1999 304 17.00 0.583 4.2357 0.8275 0.0000 0.0000 649.3510 453.9455 328.3199 1.9914 0.0000
+0 1999 305 7.00 0.417 -3.8380 0.8145 25.8005 0.0000 305.0428 498.3689 283.2650 1.9470 0.0000
+0 1999 305 17.00 0.583 -4.3989 0.2846 0.0000 0.0000 404.6828 579.5822 176.9203 2.3500 0.0000
+0 1999 306 7.00 0.417 4.5705 0.0200 27.1636 0.0000 888.2788 643.9581 100.0639 4.7875 0.0000
+0 1999 306 17.00 0.583 2.2032 0.2568 0.0000 0.0000 694.3481 595.7683 159.2469 7.3864 0.0000
+0 1999 307 7.00 0.417 5.0295 0.2520 26.7134 0.0000 922.3051 659.0149 95.7845 7.6250 0.0000
+0 1999 307 17.00 0.583 3.7182 0.3968 0.0000 0.0000 759.8107 585.1727 176.4365 7.9000 0.0000
+0 1999 308 7.00 0.417 6.6235 0.5870 15.1474 0.0000 926.4244 573.4139 197.2838 9.4345 0.0000
+0 1999 308 17.00 0.583 3.9707 0.8539 0.0000 0.0000 730.0505 551.3954 232.1653 6.4329 0.0000
+0 1999 309 7.00 0.417 7.4945 1.1380 28.0548 0.0000 996.4031 597.9354 199.8515 4.3075 0.0000
+0 1999 309 17.00 0.583 6.4211 1.1914 0.0000 0.0000 958.6184 652.0408 148.1378 6.0161 0.0000
+0 1999 310 7.00 0.417 9.3950 1.7330 27.0650 0.0000 1163.0157 658.7277 169.1386 2.0635 0.0000
+0 1999 310 17.00 0.583 6.1143 1.0204 0.0000 0.0000 929.3166 635.1299 156.7059 4.0004 0.0000
+0 1999 311 7.00 0.417 9.8365 1.3515 25.9879 0.0000 1213.6938 654.5184 154.0329 3.6065 0.0000
+0 1999 311 17.00 0.583 7.6871 1.1529 0.0000 0.0000 1031.1769 632.3566 166.0059 3.1479 0.0000
+0 1999 312 7.00 0.417 8.4035 1.8100 25.5202 0.0000 944.6380 525.2674 306.5203 3.2900 0.0000
+0 1999 312 17.00 0.583 4.6121 1.5829 0.0000 0.0000 649.9073 477.9981 341.9276 3.3250 0.0000
+0 1999 313 7.00 0.417 4.0365 1.4985 26.2757 0.0000 687.5880 546.9869 268.6820 4.5775 0.0000
+0 1999 313 17.00 0.583 2.9211 1.0404 0.0000 0.0000 680.0355 580.9805 211.7497 5.7629 0.0000
+0 1999 314 7.00 0.417 8.2240 1.4940 17.9123 0.0000 1105.8448 668.9400 146.6959 2.8755 0.0000
+0 1999 314 17.00 0.583 6.1336 1.1525 0.0000 0.0000 950.1088 661.0110 137.3199 3.0346 0.0000
+0 1999 315 7.00 0.417 8.7230 1.6425 10.1309 0.0000 1182.8149 725.9448 97.2407 4.3700 0.0000
+0 1999 315 17.00 0.583 7.4625 1.1821 0.0000 0.0000 1087.4720 706.9080 93.1025 3.0939 0.0000
+0 1999 316 7.00 0.417 11.1890 1.7965 25.3577 0.0000 1392.4218 737.1891 94.2100 3.2620 0.0000
+0 1999 316 17.00 0.583 8.0757 1.8139 0.0000 0.0000 1137.8511 743.2602 88.5110 6.8193 0.0000
+0 1999 317 7.00 0.417 10.6890 2.4955 25.1381 0.0000 1344.7056 770.8033 97.4855 5.6045 0.0000
+0 1999 317 17.00 0.583 7.3843 1.3321 0.0000 0.0000 1024.2748 657.1208 150.6138 3.3929 0.0000
+0 1999 318 7.00 0.417 10.6810 1.6040 24.7617 0.0000 1231.6489 613.7623 207.7878 2.3740 0.0000
+0 1999 318 17.00 0.583 7.7614 0.9221 0.0000 0.0000 1052.0088 636.8513 150.2984 3.1329 0.0000
+0 1999 319 7.00 0.417 11.5140 1.5110 24.5259 0.0000 1394.9030 696.1219 120.6418 4.4430 0.0000
+0 1999 319 17.00 0.583 7.2682 1.2100 0.0000 0.0000 1039.3142 673.4645 127.7623 3.7543 0.0000
+0 1999 320 7.00 0.417 9.6235 1.8930 23.8349 0.0000 1102.4274 588.5374 247.9015 3.4300 0.0000
+0 1999 320 17.00 0.583 7.0396 1.6596 0.0000 0.0000 803.6329 477.3911 346.5490 2.5993 0.0000
+0 1999 321 7.00 0.417 8.7995 2.5200 23.5337 0.0000 887.8195 473.9761 395.6006 6.1325 0.0000
+0 1999 321 17.00 0.583 1.0521 1.8625 0.0000 2.5400 381.4949 394.1652 440.5124 6.9454 0.0000
+0 1999 322 7.00 0.417 -6.6690 0.7460 19.9273 1.0160 165.3609 451.6664 326.6627 12.6465 0.0000
+0 1999 322 17.00 0.583 -3.0296 0.3854 0.0000 0.0000 425.5532 561.1609 199.9128 8.2868 0.0000
+0 1999 323 7.00 0.417 3.3845 0.4660 20.7220 0.0000 821.7053 667.7036 97.1989 6.1835 0.0000
+0 1999 323 17.00 0.583 3.9996 0.5764 0.0000 0.0000 783.3287 600.2471 169.9109 7.5875 0.0000
+0 1999 324 7.00 0.417 1.4205 0.5610 20.5259 0.0000 504.0924 458.7416 310.6824 9.8080 0.0000
+0 1999 324 17.00 0.583 -4.5646 0.2882 0.0000 0.0000 189.2135 383.7526 372.7715 7.5379 0.0000
+0 1999 325 7.00 0.417 -4.6935 0.0585 16.4529 1.2700 229.5189 415.9141 329.8580 4.4640 0.0000
+0 1999 325 17.00 0.583 -9.0929 -0.0025 0.0000 8.6360 27.5259 346.1189 396.8293 1.6964 0.0000
+0 1999 326 7.00 0.417 -9.9985 0.0695 10.5890 3.5560 30.0764 374.6891 371.5912 1.8530 0.0000
+0 1999 326 17.00 0.583 -12.3343 0.1075 0.0000 0.2540 69.3675 467.9111 280.1323 3.2025 0.0000
+0 1999 327 7.00 0.417 -12.1525 0.0150 22.1683 0.0000 157.1599 547.2684 196.4877 4.0325 0.0000
+0 1999 327 17.00 0.583 -12.5182 -0.0711 0.0000 0.0000 220.5888 614.8901 124.9011 3.8289 0.0000
+0 1999 328 7.00 0.417 -10.0840 -0.1765 22.1305 0.0000 274.1323 608.9630 125.9962 7.9055 0.0000
+0 1999 328 17.00 0.604 -9.9386 -0.2214 0.0000 0.0000 177.2180 506.2294 226.6839 13.8562 0.0000
+0 1999 329 7.50 0.396 -6.1479 -0.2089 15.5860 0.0000 151.7509 376.3645 357.1155 14.7847 0.0000
+0 1999 329 17.00 0.604 -2.8755 -0.0883 0.0000 3.8100 117.6818 234.3564 504.6451 14.6745 0.0000
+0 1999 330 7.50 0.396 1.2058 0.0353 20.5963 0.0000 307.9352 249.7886 494.9054 13.5321 0.0000
+0 1999 330 17.00 0.604 1.6234 0.1172 0.0000 0.0000 473.3904 399.7998 348.6956 11.4955 0.0000
+0 1999 331 7.50 0.396 2.2563 0.1563 21.2920 0.0000 482.2572 377.3928 372.9221 11.6511 0.0000
+0 1999 331 17.00 0.604 0.1834 0.1838 0.0000 0.0000 413.2591 412.0095 339.5872 4.6486 0.0000
+0 1999 332 7.50 0.396 2.4184 0.1711 20.9345 0.0000 542.0164 427.7196 323.2835 2.5237 0.0000
+0 1999 332 17.00 0.604 1.2666 0.1876 0.0000 0.0000 503.9045 451.5392 300.2356 3.9621 0.0000
+0 1999 333 7.50 0.396 5.0032 0.1916 21.2396 0.0000 742.1721 478.2764 273.6856 4.2042 0.0000
+0 1999 333 17.00 0.604 5.2490 0.2759 0.0000 0.0000 761.2972 486.2700 269.6389 6.3614 0.0000
+0 1999 334 7.50 0.375 7.4067 0.3417 12.4952 0.0000 949.2961 530.9357 228.0701 4.9617 0.0000
+0 1999 334 16.50 0.625 3.5260 0.3927 0.0000 0.0000 798.6539 633.2783 128.1360 8.3000 0.0000
+0 1999 335 7.50 0.375 4.6006 0.4044 18.7355 0.0000 832.0424 603.3065 158.6654 3.3367 0.0000
+0 1999 335 16.50 0.625 -1.4263 0.4140 0.0000 0.0000 347.3062 425.1044 337.3212 3.7197 0.0000
+0 1999 336 7.50 0.375 -3.0472 0.2844 20.7869 0.0000 352.4689 493.3708 262.9420 5.5028 0.0000
+0 1999 336 16.50 0.625 -5.5627 0.0983 0.0000 6.6200 197.9833 417.2848 330.3361 3.9490 0.0000
+0 1999 337 7.50 0.375 -8.6761 0.0328 6.5531 7.3660 27.2133 336.3554 408.2221 3.6072 0.0000
+0 1999 337 16.50 0.625 -11.4903 0.0523 0.0000 0.7620 46.9396 424.8744 320.6085 2.2830 0.0000
+0 1999 338 7.50 0.375 -9.6917 0.0267 19.9548 0.2540 136.8934 470.0742 274.2207 2.9900 0.0000
+0 1999 338 16.50 0.625 -6.0653 -0.0013 0.0000 0.0000 354.1333 584.2349 158.7669 6.1390 0.0000
+0 1999 339 7.50 0.375 -0.5911 -0.0567 20.9624 0.0000 587.1967 609.7329 130.7184 7.7567 0.0000
+0 1999 339 16.50 0.625 -2.7850 -0.0177 0.0000 0.0000 524.8233 638.7958 103.4520 4.9330 0.0000
+0 1999 340 7.50 0.375 -5.3861 -0.0378 20.9403 0.2540 309.7897 518.0377 223.2833 12.4411 0.0000
+0 1999 340 16.50 0.625 -5.6830 -0.0673 0.0000 0.0000 271.5767 488.7280 251.2330 7.5177 0.0000
+0 1999 341 7.50 0.375 -5.7961 -0.0883 12.2818 1.0160 157.2665 374.9002 364.0959 6.0589 0.0000
+0 1999 341 16.50 0.625 -10.4880 -0.0877 0.0000 2.5400 76.5379 423.9000 315.1274 2.5543 0.0000
+0 1999 342 7.50 0.375 -12.1050 -0.1378 5.2589 2.5440 34.8792 417.6853 319.0440 1.9478 0.0000
+0 1999 342 16.50 0.625 -12.0180 -0.1337 0.0000 0.0000 161.6665 541.6655 195.2519 3.5630 0.0000
+0 1999 343 7.50 0.375 -3.1761 -0.1622 19.1792 0.0000 516.7947 638.4976 97.1137 3.3606 0.0000
+0 1999 343 16.50 0.625 -4.4970 -0.1380 0.0000 0.0000 323.7159 497.5786 239.1406 2.4960 0.0000
+0 1999 344 7.50 0.375 -2.7517 -0.1461 20.0169 0.0000 359.7294 466.8204 269.5275 1.9406 0.0000
+0 1999 344 16.50 0.625 -9.9517 -0.1313 0.0000 0.0000 118.7219 451.9041 285.1203 1.5393 0.0000
+0 1999 345 7.50 0.375 -9.7844 -0.1761 20.3215 0.0000 215.5338 543.2363 191.7407 5.9939 0.0000
+0 1999 345 16.50 0.625 -10.8910 -0.2243 0.0000 0.0000 165.3690 517.1046 215.6741 7.7477 0.0000
+0 1999 346 7.50 0.375 -3.3700 -0.2856 20.1713 1.0000 419.6354 539.5988 190.3975 6.9117 0.0000
+0 1999 346 16.50 0.625 -5.2143 -0.2593 0.0000 0.0000 313.2464 505.3185 225.8681 4.7437 0.0000
+0 1999 347 7.50 0.375 -4.6317 -0.2556 12.6756 0.0000 266.9491 438.2112 293.1469 5.6956 0.0000
+0 1999 347 16.50 0.625 -12.4730 -0.2573 0.0000 0.5080 109.7172 483.4653 247.8128 9.0847 0.0000
+0 1999 348 7.50 0.375 -16.4111 -0.3856 13.1501 0.0000 119.3010 569.4340 156.0430 6.5800 0.0000
+0 1999 348 16.50 0.625 -15.5667 -0.5203 0.0000 0.2540 108.0172 538.3380 181.0915 15.5067 0.0000
+0 1999 349 7.50 0.375 -11.8444 -0.6306 12.7843 0.0000 151.7812 506.4223 208.0953 13.8667 0.0000
+0 1999 349 16.50 0.625 -9.0723 -0.6467 0.0000 0.7620 101.6461 390.2984 323.5042 15.6910 0.0000
+0 1999 350 7.50 0.375 -5.4194 -0.6306 16.0240 0.0000 159.4754 341.3455 373.1720 11.4467 0.0000
+0 1999 350 16.50 0.625 -4.8740 -0.5390 0.0000 2.5400 137.1282 305.9583 412.6357 10.6850 0.0000
+0 1999 351 7.50 0.375 -5.6033 -0.4644 12.7550 0.5080 180.6749 377.2923 344.6364 3.6206 0.0000
+0 1999 351 16.50 0.625 -6.0100 -0.4307 0.0000 0.2540 293.8483 502.4958 220.9493 7.9433 0.0000
+0 1999 352 7.50 0.375 -4.7200 -0.4628 9.0677 2.7980 258.7339 421.5019 300.5014 7.1283 0.0000
+0 1999 352 16.50 0.625 -9.9470 -0.4280 0.0000 1.5240 104.6858 424.5646 299.0003 8.6237 0.0000
+0 1999 353 7.50 0.375 -10.6678 -0.4583 12.8245 0.0000 129.1018 465.4911 256.7116 5.9006 0.0000
+0 1999 353 16.50 0.625 -11.6013 -0.5140 0.0000 1.0160 96.5888 451.5984 268.1117 10.1240 0.0000
+0 1999 354 7.50 0.375 -11.5789 -0.5717 10.7089 0.0000 132.3902 484.3210 232.8149 3.7578 0.0000
+0 1999 354 16.50 0.625 -12.3053 -0.6050 0.0000 0.5080 107.0575 472.9277 242.7250 5.7063 0.0000
+0 1999 355 7.50 0.375 -11.6411 -0.6517 7.1039 0.7620 56.9226 406.5891 306.9918 2.4928 0.0000
+0 1999 355 16.50 0.625 -14.0717 -0.6603 0.0000 0.7620 84.2630 482.0623 231.1345 6.4393 0.0000
+0 1999 356 7.50 0.375 -9.6500 -0.6922 15.3149 0.0000 187.8937 487.8848 223.9008 10.2144 0.0000
+0 1999 356 16.50 0.625 -7.5157 -0.6927 0.0000 0.0000 185.2978 429.0587 282.7072 9.4360 0.0000
+0 1999 357 7.50 0.375 -1.0778 -0.6622 19.7874 0.0000 401.7540 415.8833 297.2300 3.1211 0.0000
+0 1999 357 16.50 0.625 -1.8937 -0.5767 0.0000 0.0000 419.8522 475.7632 241.1508 3.6660 0.0000
+0 1999 358 7.50 0.375 1.3744 -0.5844 19.3122 0.0000 556.3433 460.5474 256.0197 2.3483 0.0000
+0 1999 358 16.50 0.625 -0.9557 -0.5577 0.0000 0.0000 480.3181 497.4891 220.2711 2.6363 0.0000
+0 1999 359 7.50 0.375 1.2611 -0.6017 19.6789 0.0000 578.4732 488.8172 226.9841 2.3056 0.0000
+0 1999 359 16.50 0.625 -3.6510 -0.5897 0.0000 0.0000 314.8934 437.8394 278.4953 2.2303 0.0000
+0 1999 360 7.50 0.375 0.5928 -0.6622 19.7343 0.0000 505.5257 443.5813 269.5328 1.8717 0.0000
+0 1999 360 16.50 0.625 -1.2103 -0.6403 0.0000 0.0000 477.6758 501.4790 212.6054 4.2327 0.0000
+0 1999 361 7.50 0.375 0.1978 -0.6072 19.4598 0.0000 486.1096 446.3345 269.2191 3.4867 0.0000
+0 1999 361 16.50 0.625 2.5417 -0.5240 0.0000 0.0000 619.0571 464.4433 254.8217 7.9610 0.0000
+0 1999 362 7.50 0.375 6.1694 -0.4072 19.8067 0.0000 903.1602 533.3886 191.1116 3.0433 0.0000
+0 1999 362 16.50 0.625 3.1527 -0.2853 0.0000 0.0000 744.8231 570.0106 159.9970 7.2580 0.0000
+0 1999 363 7.50 0.375 3.0694 -0.2483 19.5961 0.0000 765.1964 596.0516 135.6348 9.3489 0.0000
+0 1999 363 16.50 0.625 -1.4383 -0.2367 0.0000 0.0000 498.5781 548.9095 183.3075 7.8067 0.0000
+0 1999 364 7.50 0.375 -0.1522 -0.2956 20.0534 0.0000 551.4215 541.5231 188.0201 5.2161 0.0000
+0 1999 364 16.50 0.625 -2.9407 -0.3233 0.0000 0.0000 416.4024 524.5122 203.7735 6.6237 0.0000
+0 1999 365 7.50 0.375 -1.6700 -0.4094 20.0932 0.0000 514.4086 567.5634 156.8364 10.7844 0.0000
+0 1999 365 16.50 0.625 -2.6183 -0.4557 0.0000 0.0000 486.5772 575.7722 146.5509 2.8567 0.0000
+0 2000 1 7.50 0.375 -3.6433 -0.5217 17.1088 0.0000 279.4025 405.5224 313.8444 4.2244 0.0000
+0 2000 1 16.50 0.625 -8.4230 -0.5103 0.0000 0.2540 89.9719 365.6837 354.1898 2.1040 0.0000
+0 2000 2 7.50 0.375 -8.1133 -0.5667 14.7046 0.2540 165.4830 432.2470 285.1118 2.3111 0.0000
+0 2000 2 16.50 0.625 -13.1233 -0.6277 0.0000 0.0000 111.4885 489.1741 225.4741 4.2227 0.0000
+0 2000 3 7.50 0.375 -15.5811 -0.8011 17.5437 0.0000 159.6605 577.9340 129.0564 6.4606 0.0000
+0 2000 3 16.50 0.625 -14.8487 -1.0017 0.0000 0.0000 121.5325 517.0674 181.1764 13.0447 0.0000
+0 2000 4 7.50 0.375 -6.0217 -1.1856 20.0871 0.0000 256.2194 430.3482 259.9592 11.8300 0.0000
+0 2000 4 16.50 0.625 -7.3063 -1.0943 0.0000 0.0000 209.0288 427.2251 267.0072 8.1553 0.0000
+0 2000 5 7.50 0.375 -10.7728 -0.9894 17.3227 0.0000 157.9234 473.2015 225.5643 7.9033 0.0000
+0 2000 5 16.50 0.625 -14.5460 -1.0157 0.0000 0.0000 133.8241 524.4395 173.1907 7.7577 0.0000
+0 2000 6 7.50 0.375 -9.8556 -1.1611 20.9408 0.0000 297.3701 580.3404 111.0167 6.9778 0.0000
+0 2000 6 16.50 0.625 -6.3300 -1.2007 0.0000 0.0000 332.9577 518.6580 171.0017 5.5193 0.0000
+0 2000 7 7.50 0.375 -5.0200 -1.1650 16.4884 0.0000 352.8870 498.4305 192.7585 2.7733 0.0000
+0 2000 7 16.50 0.625 -7.5880 -1.0637 0.0000 0.0000 253.4273 482.9854 212.5677 7.7380 0.0000
+0 2000 8 7.50 0.375 -6.6583 -1.0689 19.3603 0.0000 332.8374 535.0888 160.2378 10.0894 0.0000
+0 2000 8 16.50 0.625 -10.5267 -1.1050 0.0000 0.5080 145.5888 449.1519 244.6180 11.7890 0.0000
+0 2000 9 7.50 0.375 -12.1056 -1.2044 12.6166 0.0000 111.9400 447.5607 241.9377 11.1583 0.0000
+0 2000 9 16.50 0.625 -10.2790 -1.2670 0.0000 8.8900 84.3981 375.8665 310.9583 8.5130 0.0000
+0 2000 10 7.50 0.375 -8.2167 -1.2439 2.3731 9.6560 71.6727 311.5584 376.2529 4.9539 0.0000
+0 2000 10 16.50 0.625 -5.2240 -1.1347 0.0000 7.8740 129.5334 282.9621 409.5326 7.9723 0.0000
+0 2000 11 7.50 0.375 -1.2022 -1.0067 21.1774 1.7820 415.9135 422.5237 275.4960 10.4139 0.0000
+0 2000 11 16.50 0.625 -1.2833 -0.8827 0.0000 0.0000 370.8637 384.5368 318.8809 6.9707 0.0000
+0 2000 12 7.50 0.375 -5.1806 -0.7706 5.0267 0.7620 193.8932 362.9028 345.4283 5.1872 0.0000
+0 2000 12 16.50 0.625 -5.7340 -0.6900 0.0000 1.7780 153.0060 343.4295 368.4550 4.7447 0.0000
+0 2000 13 7.50 0.375 -1.6344 -0.6400 20.7932 0.7620 309.0591 350.0254 364.0728 7.9911 0.0000
+0 2000 13 16.50 0.625 -0.0623 -0.5997 0.0000 0.0000 524.8928 498.4944 217.3954 10.9350 0.0000
+0 2000 14 7.50 0.396 3.6358 -0.5574 20.0801 0.0000 819.7831 600.8483 116.9249 4.3021 0.0000
+0 2000 14 17.00 0.604 1.4993 -0.5079 0.0000 0.0000 680.2203 583.8541 136.1272 10.5352 0.0000
+0 2000 15 7.50 0.396 2.4205 -0.4653 19.4951 0.0000 609.1887 466.0281 255.8637 7.3226 0.0000
+0 2000 15 17.00 0.604 2.0410 -0.4224 0.0000 0.0000 424.4048 303.7037 420.1127 7.3372 0.0000
+0 2000 16 7.50 0.396 2.6916 -0.3679 17.6530 0.0000 557.7662 400.9092 325.3627 13.3021 0.0000
+0 2000 16 17.00 0.604 -0.1114 -0.3128 0.0000 0.5080 250.6850 241.0540 487.7099 15.0483 0.0000
+0 2000 17 7.50 0.396 1.7695 -0.2805 20.3263 0.0000 375.7716 276.0626 454.1618 13.2158 0.0000
+0 2000 17 17.00 0.604 0.1348 -0.2521 0.0000 0.2540 243.8814 225.8563 505.6604 6.8028 0.0000
+0 2000 18 7.50 0.396 -0.4663 -0.2258 6.9898 5.3340 135.2094 145.6089 587.1030 3.1826 0.0000
+0 2000 18 17.00 0.604 -0.5728 -0.2017 0.0000 28.1840 117.6802 132.4251 601.3831 5.9910 0.0000
+0 2000 19 7.50 0.396 -3.5689 -0.1742 13.7830 1.2700 188.6971 328.1739 406.8897 8.1495 0.0000
+0 2000 19 17.00 0.583 -3.0611 -0.1568 0.0000 0.0000 261.4377 382.7455 353.1143 11.5121 0.0000
+0 2000 20 7.00 0.417 -0.6400 -0.1500 21.1246 0.0000 447.8256 468.7130 267.4570 12.7600 0.0000
+0 2000 20 17.00 0.583 -1.4164 -0.1586 0.0000 0.0000 281.8666 337.0656 398.7125 7.1029 0.0000
+0 2000 21 7.00 0.417 -1.5115 -0.1600 13.1398 2.2860 200.3952 258.0214 477.6914 8.3130 0.0000
+0 2000 21 17.00 0.583 -6.7400 -0.1600 0.0000 1.2700 134.8378 378.8741 356.8387 6.6196 0.0000
+0 2000 22 7.00 0.417 -8.8740 -0.1600 9.6953 0.2540 123.4104 429.0858 306.6270 3.0790 0.0000
+0 2000 22 17.00 0.583 -10.4436 -0.1729 0.0000 2.2860 86.5953 430.1616 304.9638 3.7696 0.0000
+0 2000 23 7.00 0.417 -10.0760 -0.1955 23.8631 1.2740 170.8183 503.1360 230.9560 10.1545 0.0000
+0 2000 23 17.00 0.583 -7.1932 -0.2243 0.0000 0.0000 323.8555 579.3179 153.4626 10.1379 0.0000
+0 2000 24 7.00 0.417 -5.6725 -0.2555 10.9372 1.7780 135.0573 342.5075 388.8532 9.4275 0.0000
+0 2000 24 17.00 0.583 -4.8721 -0.2800 0.0000 0.5080 118.7465 297.8061 432.4422 3.3850 0.0000
+0 2000 25 7.00 0.417 -2.8410 -0.2900 8.2714 1.5240 146.4688 252.4266 477.3681 1.4130 0.0000
+0 2000 25 17.00 0.583 -2.6468 -0.2846 0.0000 2.2860 121.8776 221.0063 509.0314 4.4736 0.0000
+0 2000 26 7.00 0.417 -4.6585 -0.2645 6.6661 0.7620 47.3653 220.9512 510.0007 1.9340 0.0000
+0 2000 26 17.00 0.583 -7.3739 -0.2411 0.0000 5.3340 30.8809 291.2170 440.7996 1.5054 0.0000
+0 2000 27 7.00 0.417 -9.1880 -0.2245 13.2647 0.7620 127.7442 438.2958 294.4747 2.4290 0.0000
+0 2000 27 17.00 0.583 -14.7061 -0.2143 0.0000 0.0000 104.4226 533.0563 200.1794 2.9461 0.0000
+0 2000 28 7.00 0.417 -13.4860 -0.2235 22.3106 0.0000 147.1017 552.9961 179.8200 3.2430 0.0000
+0 2000 28 17.00 0.583 -15.0311 -0.2500 0.0000 0.0000 107.3891 541.0205 190.5902 5.9325 0.0000
+0 2000 29 7.00 0.417 -13.3125 -0.2870 18.4327 0.0000 148.0263 547.8697 182.0612 4.8115 0.0000
+0 2000 29 17.00 0.583 -13.0379 -0.3311 0.0000 0.0000 150.2601 543.0795 184.8557 3.1271 0.0000
+0 2000 30 7.00 0.417 -9.6895 -0.3735 26.5383 0.0000 243.0641 558.8630 167.1559 5.0730 0.0000
+0 2000 30 17.00 0.583 -10.4232 -0.4143 0.0000 0.0000 216.3669 549.1399 175.0418 5.9332 0.0000
+0 2000 31 7.00 0.417 -6.1140 -0.4510 26.4483 0.0000 360.0190 570.9352 151.5965 3.3285 0.0000
+0 2000 31 17.00 0.583 -9.6032 -0.4779 0.0000 0.0000 266.4704 575.9061 145.4211 2.5164 0.0000
+0 2000 32 7.00 0.417 -3.2800 -0.5020 26.2404 0.0000 518.8884 630.3940 89.8522 4.7630 0.0000
+0 2000 32 17.00 0.583 -1.1046 -0.5411 0.0000 0.0000 468.0515 491.4649 227.0354 10.5718 0.0000
+0 2000 33 7.00 0.417 1.7935 -0.5600 18.2158 0.0000 645.1357 530.7269 186.9288 12.0355 0.0000
+0 2000 33 17.00 0.583 0.6668 -0.5354 0.0000 0.0000 634.3458 577.6559 141.0995 9.6889 0.0000
+0 2000 34 7.00 0.417 -0.4365 -0.4905 22.5851 0.0000 514.8080 501.3940 219.3670 5.5695 0.0000
+0 2000 34 17.00 0.583 -10.8364 -0.4386 0.0000 0.0000 31.4187 371.2029 351.8872 1.5275 0.0000
+0 2000 35 7.00 0.417 -5.1455 -0.4090 26.0482 0.0000 337.9990 514.9541 209.4652 2.0095 0.0000
+0 2000 35 17.00 0.583 -2.1011 -0.4100 0.0000 0.0000 444.0334 513.4125 210.9618 3.7646 0.0000
+0 2000 36 7.00 0.417 -1.4195 -0.4100 23.5238 0.0000 437.6655 477.5764 246.7979 2.6800 0.0000
+0 2000 36 17.00 0.583 -3.9811 -0.4000 0.0000 0.0000 223.3864 366.6677 358.1569 6.4400 0.0000
+0 2000 37 7.00 0.417 -2.4025 -0.3770 17.7966 0.0000 367.9717 451.6581 274.2028 4.0630 0.0000
+0 2000 37 17.00 0.583 -4.4039 -0.3600 0.0000 0.0000 246.0145 406.2159 320.4121 7.6668 0.0000
+0 2000 38 7.00 0.417 -2.7770 -0.3500 28.3667 0.0000 298.4228 396.4606 330.6188 8.8525 0.0000
+0 2000 38 17.00 0.583 -0.8664 -0.3439 0.0000 0.0000 509.9432 532.6404 194.7134 7.5464 0.0000
+0 2000 39 7.00 0.417 1.9520 -0.3380 25.4113 0.0000 728.5018 614.6846 112.9371 7.5425 0.0000
+0 2000 39 17.00 0.583 0.4304 -0.3229 0.0000 0.0000 650.5555 614.0820 114.2245 2.5589 0.0000
+0 2000 40 7.00 0.417 -1.2925 -0.3095 11.7689 0.0000 370.2714 412.8765 316.0347 2.9330 0.0000
+0 2000 40 17.00 0.583 -3.9411 -0.2964 0.0000 0.7620 113.8189 261.2752 468.2281 6.3768 0.0000
+0 2000 41 7.00 0.417 -3.8215 -0.2815 9.0151 0.5080 92.1072 235.8365 494.3437 2.0895 0.0000
+0 2000 41 17.00 0.583 -6.2221 -0.2711 0.0000 3.8140 18.3862 243.0784 487.5751 1.7429 0.0000
+0 2000 42 7.00 0.417 -4.6200 -0.2600 15.0158 5.0760 81.5668 253.2551 477.9010 5.4430 0.0000
+0 2000 42 17.00 0.583 -4.1261 -0.2543 0.0000 0.0000 235.3277 391.1665 340.2493 13.8814 0.0000
+0 2000 43 7.00 0.417 -3.3140 -0.2500 16.1977 0.2540 251.9575 377.7907 353.8198 2.8885 0.0000
+0 2000 43 17.00 0.583 -6.6814 -0.2454 0.0000 3.3020 85.2371 324.7519 407.0697 4.9957 0.0000
+0 2000 44 7.00 0.438 -8.6276 -0.2400 14.4754 2.0320 114.3642 409.9734 322.0918 9.6848 0.0000
+0 2000 44 17.50 0.562 -6.5570 -0.2381 0.0000 2.5400 119.0734 354.9780 377.1715 9.9133 0.0000
+0 2000 45 7.00 0.438 -1.3505 -0.2457 13.8848 5.5880 149.8711 196.2823 535.5231 8.1276 0.0000
+0 2000 45 17.50 0.562 -2.8148 -0.2500 0.0000 7.1160 164.2006 266.4075 465.2030 4.4885 0.0000
+0 2000 46 7.00 0.438 -5.3738 -0.2371 19.2820 3.5640 202.2713 401.3716 330.8236 4.3533 0.0000
+0 2000 46 17.50 0.562 -5.0404 -0.2233 0.0000 0.0000 241.4734 430.3033 302.5203 10.7926 0.0000
+0 2000 47 7.00 0.438 -1.3114 -0.2100 28.4904 0.0000 366.9053 413.2046 320.2263 4.9343 0.0000
+0 2000 47 17.50 0.542 -2.7046 -0.2012 0.0000 0.0000 210.6381 314.5110 419.3232 2.8846 0.0000
+0 2000 48 6.50 0.458 -7.3345 -0.1936 11.2081 3.8140 69.1213 326.7463 407.4307 1.8259 0.0000
+0 2000 48 17.50 0.542 -12.3427 -0.1842 0.0000 11.1840 33.0548 418.3499 316.2562 1.0819 0.0000
+0 2000 49 6.50 0.458 -7.3977 -0.1800 12.3413 5.3340 100.7423 363.1339 371.6653 3.9586 0.0000
+0 2000 49 17.50 0.542 -9.3369 -0.1800 0.0000 0.5080 106.4990 422.7882 312.0110 8.4296 0.0000
+0 2000 50 6.50 0.458 -5.0514 -0.1800 33.4231 0.0000 391.2065 577.4213 157.3779 3.9673 0.0000
+0 2000 50 17.50 0.542 -4.7738 -0.1862 0.0000 0.0000 465.3519 646.1545 88.3639 2.7738 0.0000
+0 2000 51 6.50 0.458 1.5064 -0.1900 24.4411 0.0000 658.8015 567.8304 166.5124 2.5782 0.0000
+0 2000 51 17.50 0.542 0.2069 -0.1900 0.0000 0.0000 474.6579 455.2444 279.0984 6.5058 0.0000
+0 2000 52 6.50 0.458 2.2786 -0.1900 30.2144 0.0000 506.8218 380.9393 353.4035 4.6891 0.0000
+0 2000 52 17.50 0.542 0.0731 -0.1835 0.0000 0.0000 438.5661 425.4157 309.2255 2.5669 0.0000
+0 2000 53 6.50 0.458 -1.2077 -0.1732 10.5822 0.7620 311.5024 352.7577 382.3528 3.8295 0.0000
+0 2000 53 17.50 0.542 -4.3927 -0.1569 0.0000 0.0000 179.0756 348.6549 387.1985 6.5665 0.0000
+0 2000 54 6.50 0.458 0.6491 -0.1450 34.3787 0.0000 585.9093 540.9933 195.4055 4.0564 0.0000
+0 2000 54 17.50 0.542 -1.8704 -0.1373 0.0000 0.0000 348.7522 422.5783 314.1725 2.2685 0.0000
+0 2000 55 6.50 0.458 0.3632 -0.1277 21.7112 0.0000 485.5740 460.4878 276.7016 2.7532 0.0000
+0 2000 55 17.50 0.542 -8.1365 -0.1200 0.0000 0.0000 173.3844 452.5872 284.9563 9.5392 0.0000
+0 2000 56 6.50 0.458 -12.0859 -0.1200 29.1389 0.0000 152.5337 535.6865 201.8570 15.1605 0.0000
+0 2000 56 17.50 0.542 -12.8950 -0.1196 0.0000 0.0000 114.0944 513.9638 223.5972 16.2146 0.0000
+0 2000 57 6.50 0.458 -8.8868 -0.1300 35.7865 0.0000 277.9712 579.8298 157.2556 14.0782 0.0000
+0 2000 57 17.50 0.542 -6.9465 -0.1562 0.0000 0.0000 201.2726 452.9097 282.9790 9.5669 0.0000
+0 2000 58 6.50 0.458 1.4345 -0.1841 32.1466 0.0000 528.2558 434.5608 300.0517 3.2341 0.0000
+0 2000 58 17.50 0.542 2.0681 -0.2058 0.0000 0.0000 560.7051 448.8411 284.7827 4.5319 0.0000
+0 2000 59 6.50 0.458 1.7541 -0.2100 35.6483 0.0000 553.6696 457.9793 275.4516 5.2005 0.0000
+0 2000 59 17.50 0.542 -4.4235 -0.2058 0.0000 0.0000 246.9749 413.6010 320.0228 7.1554 0.0000
+0 2000 60 6.50 0.458 -3.5855 -0.1864 37.1335 0.0000 329.9974 467.0844 267.4244 5.8391 0.0000
+0 2000 60 17.50 0.542 -3.4808 -0.1696 0.0000 0.0000 435.0195 570.9507 164.3227 2.6085 0.0000
+0 2000 61 6.50 0.458 -1.3868 -0.1600 26.1783 0.0000 463.4783 515.8566 219.8562 2.3523 0.0000
+0 2000 61 17.50 0.542 -4.3296 -0.1646 0.0000 0.0000 197.9385 364.1490 371.3529 1.8654 0.0000
+0 2000 62 6.50 0.458 -4.4655 -0.1700 12.9985 0.5080 55.3929 226.7391 508.5168 2.9977 0.0000
+0 2000 62 17.50 0.542 -5.6035 -0.1665 0.0000 0.0000 127.0974 337.1977 398.2163 2.8719 0.0000
+0 2000 63 6.50 0.458 0.3427 -0.1641 38.3170 0.0000 389.0296 356.1537 379.3721 2.2500 0.0000
+0 2000 63 17.50 0.542 -0.3500 -0.1604 0.0000 0.0000 491.9583 499.5616 236.1336 3.3885 0.0000
+0 2000 64 6.50 0.458 4.7141 -0.1700 38.9156 0.0000 853.7484 587.3365 147.9194 5.0305 0.0000
+0 2000 64 17.50 0.542 2.1119 -0.1569 0.0000 0.0000 570.4003 458.3410 277.5125 3.1912 0.0000
+0 2000 65 6.50 0.458 3.2886 -0.1418 26.2035 0.0000 564.4629 385.7732 350.7712 3.5732 0.0000
+0 2000 65 17.50 0.542 -5.9250 -0.1192 0.0000 2.5400 133.2103 353.5483 384.0305 13.6100 0.0000
+0 2000 66 6.50 0.458 2.3250 -0.0977 35.2030 0.0000 533.0883 397.7052 340.8596 4.5082 0.0000
+0 2000 66 17.50 0.542 -0.3858 -0.0792 0.0000 0.0000 372.5784 384.0516 355.3624 3.0819 0.0000
+0 2000 67 6.50 0.458 0.0423 -0.0636 19.9539 0.7620 259.4753 252.2038 487.9270 2.4659 0.0000
+0 2000 67 17.50 0.542 -6.4788 -0.0415 0.0000 4.3180 131.0021 366.1269 375.0208 14.6308 0.0000
+0 2000 68 6.50 0.479 -5.5348 -0.0270 33.2770 0.0000 232.2828 445.9269 295.8924 11.5878 0.0000
+0 2000 68 18.00 0.521 -7.0384 -0.0240 0.0000 0.0000 136.3228 396.3215 345.6341 3.2760 0.0000
+0 2000 69 6.50 0.479 -6.0239 -0.0357 19.8349 1.0160 147.8959 376.7857 364.6330 5.5074 0.0000
+0 2000 69 18.00 0.521 -9.0984 -0.0476 0.0000 0.0000 151.9924 467.7476 273.1209 9.2208 0.0000
+0 2000 70 6.50 0.479 -7.8022 -0.0678 38.7409 0.0000 232.1298 511.3946 228.5435 6.7352 0.0000
+0 2000 70 18.00 0.500 -9.3950 -0.0883 0.0000 0.0000 156.3588 478.6299 260.3661 5.7746 0.0000
+0 2000 71 6.00 0.500 -5.9412 -0.1163 33.9552 0.0000 235.3393 456.4843 281.2311 11.9096 0.0000
+0 2000 71 18.00 0.500 -5.8604 -0.1483 0.0000 1.0160 113.0757 331.5085 404.7379 5.6025 0.0000
+0 2000 72 6.00 0.500 -6.3479 -0.1742 24.8818 1.7780 122.9891 354.7285 380.3371 4.1796 0.0000
+0 2000 72 18.00 0.500 -6.2213 -0.1871 0.0000 0.0000 194.4172 423.2546 311.2213 10.9362 0.0000
+0 2000 73 6.00 0.500 -2.7583 -0.1971 42.6785 0.0000 347.9564 453.5540 280.4657 10.5813 0.0000
+0 2000 73 18.00 0.500 -3.9954 -0.2000 0.0000 0.0000 287.6887 440.3478 293.5390 5.3962 0.0000
+0 2000 74 6.00 0.500 -1.9600 -0.2000 42.4091 0.5080 323.4049 395.0871 338.7997 12.8717 0.0000
+0 2000 74 18.00 0.500 -2.3483 -0.2000 0.0000 0.0000 243.1943 334.1105 399.7762 11.0146 0.0000
+0 2000 75 6.00 0.500 -8.6004 -0.1946 5.5481 18.2920 36.7333 332.0092 402.1246 2.0571 0.0000
+0 2000 75 18.00 0.500 -11.6812 -0.1875 0.0000 4.5720 51.5033 421.3485 313.1084 1.9467 0.0000
+0 2000 76 6.00 0.500 -5.0113 -0.1800 40.8499 5.3360 145.4783 330.1611 404.6381 2.1296 0.0000
+0 2000 76 18.00 0.500 -2.1988 -0.1800 0.0000 0.0000 394.5413 479.5008 255.2984 2.9113 0.0000
+0 2000 77 6.00 0.500 -1.9304 -0.1800 42.3538 0.2540 323.3437 395.6798 339.1194 4.4462 0.0000
+0 2000 77 18.00 0.500 -9.3383 -0.1800 0.0000 0.0000 149.0091 464.0365 270.7627 6.6642 0.0000
+0 2000 78 6.00 0.500 -9.0529 -0.1800 25.3517 0.0000 157.7686 465.4800 269.3192 3.9346 0.0000
+0 2000 78 18.00 0.500 -7.3479 -0.1800 0.0000 0.0000 275.4805 534.3651 200.4341 11.9925 0.0000
+0 2000 79 6.00 0.500 -0.0208 -0.1733 43.9652 0.0000 428.7175 416.9836 318.1201 11.2958 0.0000
+0 2000 79 18.00 0.500 -3.0925 -0.1700 0.0000 1.5280 211.9410 329.9688 405.2871 2.0554 0.0000
+0 2000 80 6.00 0.500 -11.0279 -0.1700 13.1332 2.0320 32.2683 388.7743 346.4815 3.4558 0.0000
+0 2000 80 18.00 0.500 -13.1962 -0.1646 0.0000 3.3020 34.3357 438.1002 297.4032 2.7783 0.0000
+0 2000 81 6.00 0.500 -7.9437 -0.1600 26.4991 0.2540 25.9112 299.2256 436.4872 1.6879 0.0000
+0 2000 81 18.00 0.500 -3.5538 -0.1600 0.0000 0.0000 233.6298 370.9907 364.7221 3.6825 0.0000
+0 2000 82 6.00 0.500 -1.0658 -0.1525 22.5715 0.2540 219.1621 256.0959 479.9598 2.1871 0.0000
+0 2000 82 18.00 0.500 -2.1613 -0.1533 0.0000 0.0000 157.8309 243.5910 492.4266 3.9254 0.0000
+0 2000 83 6.00 0.500 0.2408 -0.1592 43.1732 0.0000 367.4028 346.8437 388.9073 4.6058 0.0000
+0 2000 83 18.00 0.500 -2.0587 -0.1517 0.0000 0.7620 225.4532 306.1584 429.9355 8.8204 0.0000
+0 2000 84 6.00 0.500 -0.6746 -0.1437 40.4618 0.0000 363.7333 384.1304 352.3255 8.0933 0.0000
+0 2000 84 18.00 0.500 -1.8138 -0.1321 0.0000 0.0000 295.7974 368.3083 368.6817 3.2508 0.0000
+0 2000 85 6.00 0.500 2.1150 -0.1171 38.3058 0.0000 546.5925 431.1349 306.5422 3.1071 0.0000
+0 2000 85 18.00 0.500 0.0375 -0.1029 0.0000 0.0000 311.4439 303.5327 434.7940 8.0192 0.0000
+0 2000 86 6.00 0.500 0.3233 -0.0842 41.6196 1.2700 337.3044 315.6300 423.5572 11.1279 0.0000
+0 2000 86 18.00 0.500 0.0904 -0.0608 0.0000 0.0000 411.7422 404.4034 335.8563 13.3246 0.0000
+0 2000 87 6.00 0.500 4.0179 -0.0417 48.4736 0.0000 710.1304 490.2065 250.9353 6.8817 0.0000
+0 2000 87 18.00 0.500 1.9471 -0.0171 0.0000 0.0000 568.0572 469.9123 272.3624 2.4367 0.0000
+0 2000 88 6.00 0.500 2.1188 0.0096 24.5245 0.7620 425.4229 315.2528 428.2527 2.4071 0.0000
+0 2000 88 18.00 0.500 -2.6979 0.0275 0.0000 1.2700 130.0391 242.9431 501.3904 3.0812 0.0000
+0 2000 89 6.00 0.500 -2.8079 0.0446 47.4354 0.0000 380.9962 496.4775 248.6464 4.1062 0.0000
+0 2000 89 18.00 0.500 -3.8767 0.0587 0.0000 0.0000 237.0762 397.5444 348.2357 1.7475 0.0000
+0 2000 90 6.00 0.500 -7.4717 0.0700 11.4075 5.8460 29.3880 306.2010 440.1005 1.3633 0.0000
+0 2000 90 18.00 0.500 -10.2262 0.0792 0.0000 6.6040 29.0544 379.3820 367.3446 3.0317 0.0000
+0 2000 91 6.00 0.521 -10.3856 0.0800 16.5260 3.8100 30.9373 385.3470 361.4184 2.9684 0.0000
+0 2000 91 18.50 0.479 -11.6843 0.0800 0.0000 0.5080 32.0433 415.4916 331.2737 2.2161 0.0000
+0 2000 92 6.00 0.521 -6.6440 0.0788 47.1184 8.6340 209.8046 458.8240 287.8856 2.6396 0.0000
+0 2000 92 18.50 0.479 -6.3252 0.0700 0.0000 0.0000 205.8539 449.6952 296.6064 10.0835 0.0000
+0 2000 93 6.00 0.521 -6.3648 0.0612 18.6545 6.1000 113.8685 353.4915 392.4021 4.1016 0.0000
+0 2000 93 18.50 0.458 -11.8268 0.0523 0.0000 3.5600 43.3174 427.3882 318.0918 1.4605 0.0000
+0 2000 94 5.50 0.542 -8.0335 0.0500 51.6704 2.5400 192.3380 475.5666 269.8081 9.3600 0.0000
+0 2000 94 18.50 0.458 -2.8473 0.0468 0.0000 0.0000 291.4652 412.7000 332.5273 13.6850 0.0000
+0 2000 95 5.50 0.542 5.2204 0.0388 51.7320 0.0000 739.6262 441.7897 303.0687 12.3854 0.0000
+0 2000 95 18.50 0.458 5.4668 0.0686 0.0000 0.0000 749.2980 451.5207 294.7177 13.3764 0.0000
+0 2000 96 5.50 0.542 6.1185 0.0900 50.4952 0.0000 741.3835 399.0342 348.1953 9.3946 0.0000
+0 2000 96 18.50 0.458 2.9182 0.1182 0.0000 0.0000 506.0392 361.9772 386.5619 7.3114 0.0000
+0 2000 97 5.50 0.542 4.6827 0.1331 34.3681 0.0000 689.8016 441.4950 307.7370 8.9838 0.0000
+0 2000 97 18.50 0.458 0.5350 0.1432 0.0000 1.0160 431.2126 400.8623 348.8403 6.0445 0.0000
+0 2000 98 5.50 0.542 -2.1462 0.1512 51.3054 0.5080 384.9743 481.6248 268.4492 6.7138 0.0000
+0 2000 98 18.50 0.458 -3.2559 0.1600 0.0000 0.0000 401.4837 542.5513 207.9350 3.9564 0.0000
+0 2000 99 5.50 0.542 4.6150 0.1619 51.9376 0.0000 869.5626 613.0661 137.5099 4.4469 0.0000
+0 2000 99 18.50 0.458 3.2591 0.1700 0.0000 0.0000 655.0296 495.8727 255.0800 2.8550 0.0000
+0 2000 100 5.50 0.542 7.7946 0.1712 46.9757 0.0000 938.0959 477.7197 273.2870 4.3969 0.0000
+0 2000 100 18.50 0.458 1.6545 0.1809 0.0000 0.0000 490.8320 406.5525 344.9095 2.7732 0.0000
+0 2000 101 5.50 0.542 0.1931 0.1892 27.3326 0.0000 333.8758 330.5216 421.3290 2.5708 0.0000
+0 2000 101 18.50 0.458 -3.8791 0.1945 0.0000 1.2700 37.9019 204.7471 547.3518 2.0845 0.0000
+0 2000 102 5.50 0.542 0.3815 0.2000 34.7719 0.0000 172.3082 156.7612 595.5927 2.0969 0.0000
+0 2000 102 18.50 0.458 0.9177 0.2082 0.0000 0.0000 433.4746 398.9133 353.8231 4.1027 0.0000
+0 2000 103 5.50 0.542 5.2258 0.2100 34.4752 0.0000 750.8957 466.0569 286.7645 6.0304 0.0000
+0 2000 103 18.50 0.458 3.9459 0.2168 0.0000 0.0000 654.4289 456.3271 296.8134 4.1618 0.0000
+0 2000 104 5.50 0.542 7.0808 0.2200 37.7921 0.0000 867.0319 456.6876 296.6017 3.3012 0.0000
+0 2000 104 18.50 0.458 5.5168 0.2200 0.0000 0.0000 763.4433 470.0023 283.2870 5.5345 0.0000
+0 2000 105 5.50 0.542 6.2465 0.2254 37.5102 0.0000 781.9023 429.1124 324.4290 3.9465 0.0000
+0 2000 105 18.50 0.458 -3.7645 0.2300 0.0000 0.0000 15.4645 178.2023 575.5552 1.9645 0.0000
+0 2000 106 5.50 0.542 -3.9981 0.2350 8.8925 8.8980 14.1596 188.1838 565.8079 1.8746 0.0000
+0 2000 106 18.50 0.458 -3.2400 0.2400 0.0000 3.3020 102.3089 248.5898 505.6362 9.1305 0.0000
+0 2000 107 5.50 0.542 1.7646 0.2400 35.5743 0.0000 424.5939 335.6493 418.5767 3.4065 0.0000
+0 2000 107 18.50 0.458 2.9236 0.2473 0.0000 0.0000 532.2123 394.1860 360.3809 7.7805 0.0000
+0 2000 108 5.50 0.542 8.1127 0.2500 55.3262 0.0000 971.3839 486.7425 267.9522 4.3223 0.0000
+0 2000 108 18.50 0.458 6.0305 0.2500 0.0000 0.0000 800.5704 474.2460 280.4487 3.6423 0.0000
+0 2000 109 5.50 0.542 5.6192 0.2512 38.1064 0.5080 758.9918 418.7957 335.9531 6.2927 0.0000
+0 2000 109 18.50 0.458 -5.4191 0.2600 0.0000 0.0000 188.0137 411.3268 343.8369 7.6532 0.0000
+0 2000 110 5.50 0.542 -5.1242 0.2658 16.1908 1.5240 131.0251 343.1468 412.2878 11.2119 0.0000
+0 2000 110 18.50 0.458 -2.4709 0.2700 0.0000 0.2540 183.6290 301.5697 454.0634 11.3109 0.0000
+0 2000 111 5.50 0.542 3.8227 0.2700 56.1641 0.0000 533.3871 336.3896 419.2434 4.2785 0.0000
+0 2000 111 18.50 0.458 2.8541 0.2727 0.0000 0.0000 476.4102 344.5500 411.2112 2.5786 0.0000
+0 2000 112 5.50 0.542 7.6915 0.2704 37.9114 0.0000 888.0723 440.3779 315.2733 3.3042 0.0000
+0 2000 112 18.50 0.458 3.7532 0.2791 0.0000 0.0000 510.5034 327.1158 428.9443 2.5018 0.0000
+0 2000 113 5.50 0.542 3.4585 0.2823 16.5535 5.5840 389.7844 212.1374 544.0738 4.1765 0.0000
+0 2000 113 18.50 0.458 0.3986 0.2900 0.0000 4.3180 90.1560 84.9267 671.6459 4.7550 0.0000
+0 2000 114 5.50 0.562 2.2859 0.2900 38.6959 4.0680 254.8358 153.9014 602.6713 3.7811 0.0000
+0 2000 114 19.00 0.438 -0.2062 0.2910 0.0000 0.2540 170.9263 193.2517 563.3657 2.6514 0.0000
+0 2000 115 5.50 0.562 -0.5781 0.2993 30.2981 2.5360 253.3220 291.3810 465.6270 5.7563 0.0000
+0 2000 115 19.00 0.438 -2.2105 0.3043 0.0000 0.0000 363.8438 473.1931 284.0515 7.1986 0.0000
+0 2000 116 5.50 0.562 4.2204 0.3070 53.2629 0.0000 735.5779 517.5521 239.8219 3.2656 0.0000
+0 2000 116 19.00 0.438 5.0181 0.3100 0.0000 0.0000 756.3962 496.5766 260.9368 7.1948 0.0000
+0 2000 117 5.50 0.562 8.0167 0.3081 57.9856 0.0000 929.2057 460.9323 296.4940 7.7719 0.0000
+0 2000 117 19.00 0.417 2.2105 0.3050 0.0000 0.0000 293.0547 192.4436 564.8346 1.6065 0.0000
+0 2000 118 5.00 0.583 4.0111 0.3079 56.9237 0.0000 269.3486 66.2191 691.1935 2.2982 0.0000
+0 2000 118 19.00 0.417 2.9285 0.3095 0.0000 0.0000 348.2840 204.3922 553.0977 2.3135 0.0000
+0 2000 119 5.00 0.583 10.9032 0.3100 41.2882 0.0000 1105.9089 395.4291 362.0843 3.0486 0.0000
+0 2000 119 19.00 0.417 7.4835 0.3100 0.0000 0.0000 779.2705 355.0638 402.4496 2.2455 0.0000
+0 2000 120 5.00 0.583 8.3239 0.3143 33.6910 0.0000 766.6673 269.0569 488.6583 3.9364 0.0000
+0 2000 120 19.00 0.417 -0.6680 0.3175 0.0000 4.5720 91.3426 126.6196 631.2469 2.3475 0.0000
+0 2000 121 5.00 0.583 -3.9293 0.3246 7.9060 2.7940 10.3312 186.0636 572.1393 1.3339 0.0000
+0 2000 121 19.00 0.417 -4.2715 0.3310 0.0000 0.0000 129.7024 315.9707 442.5318 2.1755 0.0000
+0 2000 122 5.00 0.583 3.9504 0.3314 58.6631 0.0000 537.9876 334.8232 423.6995 2.6361 0.0000
+0 2000 122 19.00 0.417 3.9135 0.3300 0.0000 0.0000 506.8940 317.4263 441.0290 2.8885 0.0000
+0 2000 123 5.00 0.583 9.6911 0.3243 57.6373 0.0000 814.6747 213.2251 544.9611 2.3939 0.0000
+0 2000 123 19.00 0.417 7.4660 0.3215 0.0000 0.0000 766.2948 343.5223 414.5326 3.5440 0.0000
+0 2000 124 5.00 0.583 11.8125 0.3196 59.2349 0.0000 1165.8773 375.5760 382.3915 3.1686 0.0000
+0 2000 124 19.00 0.417 9.3205 0.3205 0.0000 0.0000 883.9352 317.4789 440.5288 2.3380 0.0000
+0 2000 125 5.00 0.583 14.5714 0.3150 59.7110 0.0000 1522.0050 452.5863 305.1626 3.5718 0.0000
+0 2000 125 19.00 0.417 11.1445 0.3160 0.0000 0.0000 1203.1425 482.1472 275.6487 5.3445 0.0000
+0 2000 126 5.00 0.583 14.9750 0.3111 60.2728 0.0000 1630.7017 513.1497 244.4142 4.8929 0.0000
+0 2000 126 19.00 0.417 11.0645 0.3095 0.0000 0.0000 1254.8953 538.0985 219.3913 5.4835 0.0000
+0 2000 127 5.00 0.583 10.6182 0.3086 46.9971 0.0000 885.2538 208.2832 549.1630 2.7650 0.0000
+0 2000 127 19.00 0.417 9.0150 0.3175 0.0000 0.0000 924.5422 384.1873 373.6792 3.3355 0.0000
+0 2000 128 5.00 0.583 9.2257 0.3136 51.2741 0.0000 814.3707 242.9569 514.7247 4.5364 0.0000
+0 2000 128 19.00 0.417 1.2675 0.3220 0.0000 0.7620 155.6675 104.6326 653.4459 3.4400 0.0000
+0 2000 129 5.00 0.583 -1.9554 0.3361 15.5700 12.4460 5.1816 105.2444 653.4971 1.3529 0.0000
+0 2000 129 19.00 0.417 -2.2350 0.3475 0.0000 1.5240 68.9469 181.2031 578.0773 3.6205 0.0000
+0 2000 130 5.00 0.583 2.3714 0.3500 57.1676 0.0000 412.0340 290.8477 468.5507 7.3343 0.0000
+0 2000 130 19.00 0.417 4.9350 0.3500 0.0000 0.7620 389.5684 137.5360 621.8624 5.8245 0.0000
+0 2000 131 5.00 0.583 8.9646 0.3379 48.4843 0.0000 619.1550 72.7026 686.1232 3.8389 0.0000
+0 2000 131 19.00 0.417 9.8670 0.3415 0.0000 0.0000 1022.1447 409.2694 349.7281 7.1860 0.0000
+0 2000 132 5.00 0.583 3.0450 0.3861 32.2034 2.7940 287.6115 137.1603 623.9434 3.8014 0.0000
+0 2000 132 19.00 0.417 -6.6920 0.3955 0.0000 1.0160 125.7647 390.8557 370.6926 7.2850 0.0000
+0 2000 133 5.00 0.583 -6.2032 0.4082 30.5417 0.0000 266.2542 519.4729 242.6772 5.8346 0.0000
+0 2000 133 19.00 0.417 -5.8120 0.4150 0.0000 0.0000 316.1168 559.8655 202.6060 6.0505 0.0000
+0 2000 134 5.00 0.583 1.2664 0.3929 52.1865 0.0000 618.3666 564.5950 196.8282 2.5432 0.0000
+0 2000 134 19.00 0.417 1.0610 0.3915 0.0000 0.0000 519.0229 486.4292 274.9299 1.8095 0.0000
+0 2000 135 5.00 0.583 4.9407 0.4254 41.0454 0.0000 638.9497 388.1224 374.8420 2.2407 0.0000
+0 2000 135 19.00 0.417 6.0195 0.4895 0.0000 0.0000 743.7089 429.8474 336.1617 7.3465 0.0000
+0 2000 136 5.00 0.583 10.3639 0.8336 46.6601 0.0000 1179.5541 547.8521 234.7990 5.1929 0.0000
+0 2000 136 19.00 0.417 9.3385 0.8565 0.0000 0.0000 1118.9727 578.5223 205.1641 2.7020 0.0000
+0 2000 137 5.00 0.583 13.0725 1.5421 59.2382 0.0000 1326.8665 476.8384 341.8532 3.5479 0.0000
+0 2000 137 19.00 0.417 7.3170 1.6260 0.0000 0.0000 937.0059 578.6722 243.5249 3.1485 0.0000
+0 2000 138 5.00 0.583 -1.0582 0.9704 7.8570 14.7360 151.7193 236.8663 552.3922 9.6646 0.0000
+0 2000 138 19.00 0.417 -2.7190 0.7430 0.0000 2.0320 87.2962 237.3736 540.7969 4.6450 0.0000
+0 2000 139 5.00 0.583 -2.1321 0.6700 17.4454 1.0160 4.9900 127.7886 646.8597 1.4879 0.0000
+0 2000 139 19.00 0.417 -0.5550 0.6390 0.0000 0.0000 160.9140 215.2969 557.8605 2.5985 0.0000
+0 2000 140 5.00 0.583 4.1418 0.6021 45.5933 0.0000 334.3002 141.0471 630.3420 2.5621 0.0000
+0 2000 140 19.00 0.417 2.8605 0.5800 0.0000 0.0000 251.3426 133.7759 636.5524 3.4460 0.0000
+0 2000 141 5.00 0.583 5.2543 0.7861 35.6692 0.2540 536.8578 280.4714 499.8598 3.2443 0.0000
+0 2000 141 19.00 0.417 5.3285 0.8615 0.0000 0.0000 624.4732 371.5209 412.4200 2.8385 0.0000
+0 2000 142 5.00 0.583 8.6125 2.0850 59.6228 0.0000 783.7580 354.1010 493.9196 4.5429 0.0000
+0 2000 142 19.00 0.417 8.1680 2.2335 0.0000 0.0000 664.9091 284.8005 568.9183 4.9195 0.0000
+0 2000 143 5.00 0.583 11.8811 3.7061 54.5168 0.0000 1016.6502 398.4541 539.9893 3.8079 0.0000
+0 2000 143 19.00 0.417 10.7260 3.7690 0.0000 0.0000 859.8854 355.7234 583.6912 2.8205 0.0000
+0 2000 144 5.00 0.604 13.9245 5.0038 50.2724 0.0000 1121.0967 374.5218 643.3685 4.7321 0.0000
+0 2000 144 19.50 0.396 11.5700 5.0200 0.0000 0.0000 843.5720 331.0159 684.0992 3.9079 0.0000
+0 2000 145 5.00 0.604 8.9324 5.3979 32.2909 3.5600 461.2589 207.0422 833.6012 2.8383 0.0000
+0 2000 145 19.50 0.396 5.3689 4.5074 0.0000 6.0940 225.5934 170.5833 813.3458 2.6679 0.0000
+0 2000 146 5.00 0.604 9.8403 6.1579 48.0402 1.0160 711.1133 431.6586 665.6652 3.6955 0.0000
+0 2000 146 19.50 0.396 4.9442 4.9063 0.0000 1.0160 290.4673 287.3553 721.1034 2.3911 0.0000
+0 2000 147 5.00 0.604 5.3717 4.5717 27.9677 1.5240 290.0107 239.7462 747.4857 5.8172 0.0000
+0 2000 147 19.50 0.396 2.0968 3.5311 0.0000 4.3220 178.0960 257.3620 668.6248 4.1495 0.0000
+0 2000 148 5.00 0.604 7.8779 5.6755 60.5155 0.0000 696.3371 534.5590 533.7883 3.6948 0.0000
+0 2000 148 19.50 0.396 9.1163 5.2021 0.0000 0.0000 733.7687 454.7730 572.4860 2.2568 0.0000
+0 2000 149 5.00 0.604 15.1655 6.6207 39.9960 0.0000 1327.7382 548.1088 579.2441 3.4300 0.0000
+0 2000 149 19.50 0.375 11.0467 6.6617 0.0000 0.0000 836.6649 480.0890 643.9111 2.9289 0.0000
+0 2000 150 4.50 0.625 17.1620 8.5957 57.7435 0.0000 1664.4685 799.3500 479.6036 4.4980 0.0000
+0 2000 150 19.50 0.375 15.5344 7.8494 0.0000 0.0000 1613.2961 895.5350 313.7108 3.8006 0.0000
+0 2000 151 4.50 0.625 14.5177 9.5333 54.3163 0.0000 1100.7064 600.6460 750.1207 3.8903 0.0000
+0 2000 151 19.50 0.375 12.7839 7.8872 0.0000 0.0000 1125.4316 696.3157 516.7769 3.4861 0.0000
+0 2000 152 4.50 0.625 17.2163 9.5107 61.7974 0.0000 1850.8846 1058.4666 291.6410 3.9420 0.0000
+0 2000 152 19.50 0.375 10.0161 7.9622 0.0000 0.0000 807.8901 631.7347 587.0198 2.5361 0.0000
+0 2000 153 4.50 0.625 6.9700 8.1037 57.4063 0.0000 474.8204 560.9564 674.3376 2.3413 0.0000
+0 2000 153 19.50 0.375 4.9750 6.5722 0.0000 0.0000 297.4979 401.4236 716.7079 2.1328 0.0000
+0 2000 154 4.50 0.625 12.3600 7.7903 45.0589 0.0000 889.7854 506.8596 705.0961 2.3010 0.0000
+0 2000 154 19.50 0.375 10.8122 6.5767 0.0000 0.0000 983.0628 652.7348 466.1383 3.4261 0.0000
+0 2000 155 4.50 0.625 14.6953 8.3317 61.8527 0.0000 1508.4669 919.5534 338.0452 3.6800 0.0000
+0 2000 155 19.50 0.375 9.4889 7.3756 0.0000 0.0000 748.9106 575.3196 599.0825 3.2006 0.0000
+0 2000 156 4.50 0.625 6.8790 7.5427 26.9422 0.0000 95.1412 140.2521 1045.9537 2.2067 0.0000
+0 2000 156 19.50 0.375 6.9144 6.5422 0.0000 0.0000 333.2773 305.2168 810.4559 2.8556 0.0000
+0 2000 157 4.50 0.625 14.5550 8.6463 54.4113 0.0000 1288.9360 740.7609 541.4373 3.1997 0.0000
+0 2000 157 19.50 0.375 12.3461 7.7317 0.0000 0.0000 1123.8427 733.9523 466.7973 3.9972 0.0000
+0 2000 158 4.50 0.625 17.1717 9.8010 59.8007 0.0000 1784.2866 1020.7640 352.5799 5.0397 0.0000
+0 2000 158 19.50 0.375 13.8817 8.0594 0.0000 0.0000 1437.9106 917.6299 309.0441 3.3289 0.0000
+0 2000 159 4.50 0.625 17.7940 9.7467 56.1809 0.0000 1895.2793 1050.7297 321.1743 3.1960 0.0000
+0 2000 159 19.50 0.375 14.7294 8.9867 0.0000 0.0000 1410.6067 870.4651 426.0375 3.9944 0.0000
+0 2000 160 4.50 0.625 16.2953 9.4130 32.5476 0.0000 1513.5389 820.7555 511.4382 3.4857 0.0000
+0 2000 160 19.50 0.375 11.0878 8.3939 0.0000 0.0000 809.2881 579.6581 670.0656 6.8278 0.0000
+0 2000 161 4.50 0.625 12.2463 9.3983 41.4653 0.0000 963.0989 709.2033 624.4809 5.0433 0.0000
+0 2000 161 19.50 0.375 7.0456 7.5944 0.0000 0.0000 745.3937 776.4016 414.5224 4.3756 0.0000
+0 2000 162 4.50 0.625 10.2183 8.5950 51.0691 0.0000 872.9501 740.8389 533.0719 2.9000 0.0000
+0 2000 162 19.50 0.375 8.2667 7.3978 0.0000 0.0000 722.8394 657.3574 519.4744 2.6472 0.0000
+0 2000 163 4.50 0.625 13.0287 8.4887 47.9232 0.0000 1228.9895 829.8331 435.4580 2.5737 0.0000
+0 2000 163 19.50 0.375 10.2772 6.9411 0.0000 0.0000 1037.9875 779.7753 364.5128 3.6800 0.0000
+0 2000 164 4.50 0.625 14.8487 8.8800 56.3821 0.0000 1559.9587 998.8770 301.1861 3.6953 0.0000
+0 2000 164 19.50 0.375 11.5961 8.3083 0.0000 0.0000 1219.6644 935.2725 308.1984 5.7150 0.0000
+0 2000 165 4.50 0.625 7.9650 8.7353 67.4312 3.0520 633.3868 683.0018 597.9054 9.3897 0.0000
+0 2000 165 19.50 0.375 3.6506 7.1089 0.0000 0.0000 301.2552 523.3117 632.1386 5.8767 0.0000
+0 2000 166 4.50 0.625 9.7333 8.6727 65.2253 0.0000 819.7233 720.6318 561.3143 6.9600 0.0000
+0 2000 166 19.50 0.375 10.5544 8.3500 0.0000 0.0000 826.1476 641.6729 604.7433 8.0428 0.0000
+0 2000 167 4.50 0.625 10.9527 9.3507 47.5632 0.0000 908.7917 718.8364 611.8125 6.3727 0.0000
+0 2000 167 19.50 0.375 0.1572 7.3089 0.0000 0.0000 -3.2992 415.5047 754.3257 1.6256 0.0000
+0 2000 168 4.50 0.625 -1.2640 5.4327 8.7390 0.2540 -1.7888 352.2346 688.8322 1.1447 0.0000
+0 2000 168 19.50 0.375 -1.7006 4.2961 0.0000 0.0000 -0.2467 301.2948 668.9243 0.9361 0.0000
+0 2000 169 4.50 0.625 1.3717 4.3580 37.9031 12.4380 29.7326 185.8349 788.8033 1.6520 0.0000
+0 2000 169 19.50 0.375 5.0183 4.2072 0.0000 0.0000 470.1692 414.3963 550.9473 2.6422 0.0000
+0 2000 170 4.50 0.625 12.0160 6.4747 45.8172 0.7620 1047.2190 592.6055 526.2271 4.9417 0.0000
+0 2000 170 19.50 0.375 8.2217 6.6867 0.0000 0.0000 441.3536 329.6625 795.3156 4.3544 0.0000
+0 2000 171 4.50 0.625 8.1003 6.9160 34.9729 20.0540 452.3075 356.5673 785.1569 7.7730 0.0000
+0 2000 171 19.50 0.375 3.9478 5.1417 0.0000 14.2420 151.2823 223.7623 798.6918 8.1878 0.0000
+0 2000 172 4.50 0.625 7.4910 6.7620 65.4476 0.0000 692.1381 624.3754 513.0253 9.0130 0.0000
+0 2000 172 19.50 0.375 6.5622 5.7106 0.0000 0.0000 684.0493 626.6124 433.7056 2.5361 0.0000
+0 2000 173 4.50 0.625 11.8587 7.9500 64.9174 0.0000 1095.7876 758.4098 473.6757 3.1953 0.0000
+0 2000 173 19.50 0.375 10.4539 7.0222 0.0000 0.0000 862.6744 595.2591 554.9937 2.9244 0.0000
+0 2000 174 4.50 0.625 15.9087 9.0160 62.6519 0.0000 1581.6519 909.8019 402.7997 4.2957 0.0000
+0 2000 174 19.50 0.375 11.2272 8.0861 0.0000 0.0000 1052.4908 792.2096 435.2696 4.1700 0.0000
+0 2000 175 4.50 0.625 14.2843 9.5820 45.5717 0.0000 1267.2931 822.2458 530.0394 4.3033 0.0000
+0 2000 175 19.50 0.375 11.2628 8.3483 0.0000 0.0000 923.5072 680.4232 566.6797 3.9144 0.0000
+0 2000 176 4.50 0.625 14.9277 9.8553 55.8049 0.0000 1305.3271 807.3955 566.2542 4.6353 0.0000
+0 2000 176 19.50 0.375 12.2900 9.2050 0.0000 0.0000 938.5610 666.9681 646.5968 5.4900 0.0000
+0 2000 177 4.50 0.625 10.8830 9.1587 24.8477 0.7620 616.0355 463.0081 846.9855 4.1790 0.0000
+0 2000 177 19.50 0.375 4.7722 8.0111 0.0000 6.6040 -6.5700 213.5894 1007.2190 0.7100 0.0000
+0 2000 178 4.50 0.625 5.5393 7.5873 15.9662 8.6320 9.4530 149.6458 1039.3302 1.8483 0.0000
+0 2000 178 19.50 0.375 5.2433 6.7556 0.0000 0.0000 148.6451 248.8687 881.2161 2.7100 0.0000
+0 2000 179 4.50 0.625 8.8123 7.7390 42.2212 0.7620 377.1924 293.6298 911.0043 2.5783 0.0000
+0 2000 179 19.50 0.375 4.4656 7.2539 0.0000 1.2700 47.2179 227.5234 937.8239 3.3167 0.0000
+0 2000 180 4.50 0.625 7.8683 7.9677 43.2884 0.2540 223.8499 232.0220 990.2075 2.6407 0.0000
+0 2000 180 19.50 0.375 8.7911 7.3217 0.0000 0.0000 466.9813 350.9137 820.0459 2.2911 0.0000
+0 2000 181 4.50 0.625 14.7593 9.4907 54.3339 0.0000 1324.3440 820.4089 530.1776 3.6893 0.0000
+0 2000 181 19.50 0.375 8.7961 8.6078 0.0000 0.0000 412.6866 396.2387 871.8356 2.4650 0.0000
+0 2000 182 4.50 0.625 15.4563 10.3463 58.5781 0.0000 1357.4766 848.9252 575.1349 2.9137 0.0000
+0 2000 182 19.50 0.375 12.7578 9.4061 0.0000 0.0000 1027.8875 727.7504 603.7170 4.5883 0.0000
+0 2000 183 4.50 0.625 15.3887 10.5707 49.9021 0.0000 1225.9861 740.5010 693.2501 4.7100 0.0000
+0 2000 183 19.50 0.375 12.8767 9.5533 0.0000 0.0000 1009.8231 709.3486 633.1417 3.6728 0.0000
+0 2000 184 4.50 0.625 14.8950 10.0943 37.2373 0.2540 1124.8872 651.7198 737.2029 3.1867 0.0000
+0 2000 184 19.50 0.375 12.5733 8.9772 0.0000 0.0000 865.6297 548.1915 747.8356 4.8711 0.0000
+0 2000 185 4.50 0.625 15.1443 10.3657 42.3130 0.0000 1183.9767 704.7025 711.5640 4.1827 0.0000
+0 2000 185 19.50 0.375 13.8411 9.7461 0.0000 0.0000 1308.4756 924.0295 434.1509 4.7372 0.0000
+0 2000 186 4.50 0.625 15.7527 11.0053 56.5472 0.0000 1576.4276 1085.8706 389.7507 3.5900 0.0000
+0 2000 186 19.50 0.375 13.5550 9.5922 0.0000 0.0000 1510.9706 1146.2469 199.3724 5.8567 0.0000
+0 2000 187 4.50 0.625 15.3083 10.7507 45.1226 0.0000 1143.5728 685.8340 762.7733 2.8187 0.0000
+0 2000 187 19.50 0.375 13.7117 9.7283 0.0000 0.0000 1159.2838 788.6180 568.0638 3.9717 0.0000
+0 2000 188 4.50 0.625 18.3720 11.5640 50.2904 0.0000 1816.7051 1045.8361 483.3532 2.7083 0.0000
+0 2000 188 19.50 0.375 15.9628 10.7572 0.0000 0.0000 1466.1342 934.1699 510.4200 4.4806 0.0000
+0 2000 189 4.50 0.625 17.6833 11.7423 39.9461 0.0000 1615.1270 957.3486 580.6028 2.4540 0.0000
+0 2000 189 19.50 0.375 13.2983 10.8406 0.0000 0.0000 945.9139 712.5179 739.3066 2.4683 0.0000
+0 2000 190 4.50 0.625 15.9707 11.7327 41.9711 0.0000 1234.1437 789.5699 748.4811 4.4383 0.0000
+0 2000 190 19.50 0.375 12.6700 11.0172 0.0000 0.2540 733.9515 572.6445 894.4496 4.7550 0.0000
+0 2000 191 4.50 0.625 14.3890 12.3773 62.4218 0.0000 889.4703 681.8905 920.7205 3.3867 0.0000
+0 2000 191 19.50 0.375 9.7528 11.1239 0.0000 5.5880 300.1752 419.0137 1058.8909 3.4167 0.0000
+0 2000 192 4.50 0.625 15.1607 12.4850 59.8635 0.0000 892.2794 608.9452 1006.1450 3.0373 0.0000
+0 2000 192 19.50 0.375 13.4039 11.2856 0.0000 0.0000 884.9647 676.9265 815.7293 2.8344 0.0000
+0 2000 193 4.50 0.625 16.0660 12.3070 57.7125 17.7000 1225.9598 818.4315 779.3929 3.0823 0.0000
+0 2000 193 19.50 0.375 12.9328 11.1544 0.0000 0.0000 758.0693 589.6500 891.3750 2.6483 0.0000
+0 2000 194 4.50 0.625 15.3217 11.2790 40.0529 0.0000 985.5935 568.1819 924.9100 2.7867 0.0000
+0 2000 194 19.50 0.375 13.5261 10.5028 0.0000 0.0000 878.6229 592.8779 829.7454 3.0050 0.0000
+0 2000 195 4.50 0.625 16.5303 11.2357 37.8769 1.5240 1224.8667 664.6980 826.6006 2.5387 0.0000
+0 2000 195 19.50 0.375 14.1778 10.5978 0.0000 0.0000 890.6401 545.2416 885.7484 2.9322 0.0000
+0 2000 196 4.50 0.625 17.9520 11.8413 42.3792 0.0000 1379.9680 696.0569 855.0203 2.1563 0.0000
+0 2000 196 19.50 0.375 15.6672 10.9672 0.0000 0.0000 1314.1300 835.8881 627.8473 3.6250 0.0000
+0 2000 197 4.50 0.625 17.6683 11.8540 34.6124 0.0000 1477.1147 825.8088 723.4354 3.6983 0.0000
+0 2000 197 19.50 0.396 13.7953 10.9653 0.0000 0.0000 909.2844 633.5986 828.9734 2.2226 0.0000
+0 2000 198 5.00 0.604 12.5031 11.2345 19.8828 3.0480 470.4813 350.2585 1137.1963 2.0310 0.0000
+0 2000 198 19.50 0.396 8.6332 10.3247 0.0000 6.3500 86.2724 224.6977 1181.6381 2.0942 0.0000
+0 2000 199 5.00 0.604 11.5859 10.7779 37.2154 1.5280 321.2381 245.6665 1201.9600 2.3407 0.0000
+0 2000 199 19.50 0.396 9.0079 9.2795 0.0000 7.6180 412.1700 430.6399 890.0361 4.9874 0.0000
+0 2000 200 5.00 0.604 16.7359 11.6879 61.7989 0.0000 1437.9839 892.1306 653.0029 3.9993 0.0000
+0 2000 200 19.50 0.396 11.4832 10.1253 0.0000 0.0000 939.8586 810.4268 581.9013 3.6153 0.0000
+0 2000 201 5.00 0.604 16.2148 11.7652 56.1377 0.0000 1475.5461 1006.8999 544.1443 3.0928 0.0000
+0 2000 201 19.50 0.396 11.4389 10.4137 0.0000 0.0000 587.7671 492.0947 924.2584 2.4895 0.0000
+0 2000 202 5.00 0.604 15.1079 11.8145 51.7856 0.0000 1104.6183 769.8290 781.8586 2.5334 0.0000
+0 2000 202 19.50 0.396 8.9521 10.7958 0.0000 0.0000 304.8528 457.7536 990.8392 1.9221 0.0000
+0 2000 203 5.00 0.604 16.2983 11.4400 42.9316 0.0000 1552.9945 1038.2128 474.0731 4.2603 0.0000
+0 2000 203 19.50 0.396 10.5068 10.2800 0.0000 0.0000 505.2137 474.3411 928.9874 2.4342 0.0000
+0 2000 204 5.00 0.583 13.8818 11.7821 48.0645 0.0000 740.2944 535.2156 1013.0899 2.4018 0.0000
+0 2000 204 19.00 0.417 12.3800 10.2470 0.0000 0.0000 928.8302 737.6743 664.7422 4.1430 0.0000
+0 2000 205 5.00 0.583 19.2518 11.8061 51.4688 0.0000 2076.9463 1211.8682 340.7394 3.1332 0.0000
+0 2000 205 19.00 0.417 16.0600 10.7465 0.0000 0.0000 1588.0032 1042.2480 402.7614 2.9095 0.0000
+0 2000 206 5.00 0.583 17.0154 11.8654 39.8101 0.7620 1509.6680 944.1256 606.8522 4.4307 0.0000
+0 2000 206 19.00 0.417 14.0785 11.0000 0.0000 0.0000 1110.3336 809.9142 655.8125 6.6290 0.0000
+0 2000 207 5.00 0.583 17.0775 12.6529 46.8598 0.0000 1511.0568 1018.3019 611.4354 3.8836 0.0000
+0 2000 207 19.00 0.417 13.8950 10.8705 0.0000 0.0000 1133.1827 841.0300 614.3761 3.8820 0.0000
+0 2000 208 5.00 0.583 16.3914 11.7704 32.0362 1.0160 1279.0182 778.1370 760.5807 3.5136 0.0000
+0 2000 208 19.00 0.417 12.6920 10.3685 0.0000 0.0000 819.9694 607.3104 804.0341 3.5745 0.0000
+0 2000 209 5.00 0.583 17.1275 12.2986 46.8148 0.0000 1353.7725 820.9695 775.3715 2.9300 0.0000
+0 2000 209 19.00 0.417 13.2210 10.8420 0.0000 0.0000 927.5462 703.0998 749.8197 3.2360 0.0000
+0 2000 210 5.00 0.583 15.4589 11.9836 36.1831 1.0160 1030.5665 661.0403 901.2946 2.7232 0.0000
+0 2000 210 19.00 0.417 11.1505 10.7475 0.0000 0.0000 514.5878 480.2869 964.4577 2.9735 0.0000
+0 2000 211 5.00 0.583 15.8936 11.7575 48.5188 0.0000 1397.5316 962.3840 583.0657 3.5879 0.0000
+0 2000 211 19.00 0.417 11.8425 10.5155 0.0000 0.0000 697.6553 579.2798 845.6022 2.6060 0.0000
+0 2000 212 5.00 0.583 15.2154 11.8204 51.3729 0.0000 1210.2439 864.0916 688.2272 3.2636 0.0000
+0 2000 212 19.00 0.417 10.8510 10.4600 0.0000 0.0000 669.0192 634.6140 785.6143 3.1100 0.0000
+0 2000 213 5.00 0.583 16.9450 11.9543 53.9050 0.0000 1663.9974 1120.7524 445.5022 3.5700 0.0000
+0 2000 213 19.00 0.417 14.2550 10.5300 0.0000 0.0000 1231.5194 873.7045 552.9086 2.8960 0.0000
+0 2000 214 5.00 0.583 18.0957 11.5050 33.9664 3.5540 1606.6378 869.3018 647.3768 2.9936 0.0000
+0 2000 214 19.00 0.417 15.7875 11.0610 0.0000 0.0000 1295.4747 806.9233 664.0958 3.9955 0.0000
+0 2000 215 5.00 0.583 17.7868 12.5511 33.2732 2.2880 1508.3672 904.6854 711.5963 4.0707 0.0000
+0 2000 215 19.00 0.417 15.3155 11.3630 0.0000 0.0000 1192.8416 789.5549 708.7554 4.5825 0.0000
+0 2000 216 5.00 0.583 17.0104 12.5204 26.4449 0.0000 1300.0192 800.8032 810.3963 2.7432 0.0000
+0 2000 216 19.00 0.417 15.3055 11.4370 0.0000 0.0000 1158.9625 763.4590 742.0792 3.4040 0.0000
+0 2000 217 5.00 0.583 17.4725 12.9850 45.7029 0.0000 1332.3000 817.8986 842.8871 5.4446 0.0000
+0 2000 217 19.00 0.417 14.4850 12.0485 0.0000 0.0000 1042.1093 793.6210 768.8745 3.5745 0.0000
+0 2000 218 5.00 0.583 15.4407 12.5439 44.9761 0.2540 1173.8818 864.8295 748.5162 3.7768 0.0000
+0 2000 218 19.00 0.417 11.7665 10.7120 0.0000 0.0000 892.2173 789.7664 652.3965 2.7870 0.0000
+0 2000 219 5.00 0.583 15.9964 12.0607 53.9343 0.0000 1405.7411 982.2747 592.5117 3.0529 0.0000
+0 2000 219 19.00 0.417 13.4105 10.5965 0.0000 0.0000 1223.9565 960.7161 472.5865 3.0180 0.0000
+0 2000 220 5.00 0.583 18.3904 11.9275 57.1837 0.0000 1884.6097 1156.0361 408.9463 3.3589 0.0000
+0 2000 220 19.00 0.417 13.9020 11.1425 0.0000 0.0000 1262.1509 992.9329 487.2689 4.1690 0.0000
+0 2000 221 5.00 0.583 18.6818 12.4900 51.2851 0.0000 1908.7131 1188.4893 428.6861 3.5732 0.0000
+0 2000 221 19.00 0.417 15.8650 11.2120 0.0000 0.0000 1503.3629 1023.7778 463.8327 2.8045 0.0000
+0 2000 222 5.00 0.583 19.6336 12.4661 49.8674 0.0000 1943.5581 1091.9183 522.0195 3.7661 0.0000
+0 2000 222 19.00 0.417 14.4880 11.5295 0.0000 0.0000 1128.1379 831.0543 684.2435 2.3675 0.0000
+0 2000 223 5.00 0.583 17.2129 12.6768 46.3070 0.0000 1368.4803 867.0722 767.2172 2.7132 0.0000
+0 2000 223 19.00 0.417 13.9255 11.8485 0.0000 0.0000 881.5398 675.3780 868.8087 2.8540 0.0000
+0 2000 224 5.00 0.583 17.6975 13.1489 41.6131 0.0000 1386.8259 859.6132 817.4315 4.6732 0.0000
+0 2000 224 19.00 0.417 14.9835 11.8830 0.0000 0.0000 1083.7217 766.1060 780.8202 3.8945 0.0000
+0 2000 225 5.00 0.583 14.7207 11.7646 17.5579 0.2540 1000.2198 692.2162 842.9928 3.2293 0.0000
+0 2000 225 19.00 0.417 12.7960 10.6545 0.0000 0.0000 904.2322 704.9832 730.7927 2.6260 0.0000
+0 2000 226 5.00 0.583 16.4571 12.3511 45.4106 1.7780 1184.1157 738.4526 862.7128 2.7714 0.0000
+0 2000 226 19.00 0.417 13.3515 10.9265 0.0000 0.0000 897.4778 666.5035 794.1733 4.5520 0.0000
+0 2000 227 5.00 0.583 17.8479 11.5125 21.5856 0.0000 1612.9758 910.9735 602.6450 3.2607 0.0000
+0 2000 227 19.00 0.417 14.9820 10.8525 0.0000 0.0000 1325.3647 908.0737 545.0673 2.1000 0.0000
+0 2000 228 5.00 0.583 16.2239 11.4664 25.6270 0.0000 1198.5455 694.2620 816.9027 2.3325 0.0000
+0 2000 228 19.00 0.417 12.7450 11.2685 0.0000 0.0000 561.9558 420.4658 1069.1343 2.0385 0.0000
+0 2000 229 5.00 0.583 13.9954 11.9625 34.0405 3.8120 782.5461 563.1317 993.9076 4.7411 0.0000
+0 2000 229 19.00 0.438 9.0367 10.1067 0.0000 0.0000 330.5555 418.5359 969.7540 2.3133 0.0000
+0 2000 230 5.50 0.562 8.8115 10.4393 21.3030 8.1260 159.9608 290.8538 1127.1377 1.9274 0.0000
+0 2000 230 19.00 0.438 8.1805 9.5676 0.0000 3.8080 180.6489 289.2848 1053.7891 2.6610 0.0000
+0 2000 231 5.50 0.562 11.3170 10.2596 23.6999 1.0160 339.6896 246.5114 1156.0007 1.7174 0.0000
+0 2000 231 19.00 0.438 10.1605 9.5848 0.0000 0.2540 463.6871 414.4911 929.7775 5.6833 0.0000
+0 2000 232 5.50 0.562 14.6259 11.3404 54.0450 0.0000 1099.4132 761.6773 742.8347 6.1444 0.0000
+0 2000 232 19.00 0.438 12.4405 10.5390 0.0000 0.0000 884.3951 711.3259 714.8453 4.9824 0.0000
+0 2000 233 5.50 0.562 13.8563 11.2259 37.0951 0.2540 900.6716 642.7096 844.8190 4.3507 0.0000
+0 2000 233 19.00 0.438 11.2290 9.6029 0.0000 0.0000 715.6343 575.8258 771.4262 3.1919 0.0000
+0 2000 234 5.50 0.542 14.2838 10.6377 38.9711 0.0000 982.5829 626.6395 812.5743 2.7069 0.0000
+0 2000 234 18.50 0.458 12.0677 10.0827 0.0000 0.0000 946.8147 769.2495 617.0656 5.4268 0.0000
+0 2000 235 5.50 0.542 13.5423 11.1315 35.5770 1.2700 932.0123 684.7097 796.1832 3.4465 0.0000
+0 2000 235 18.50 0.458 9.3732 9.4409 0.0000 0.0000 472.7475 477.7133 855.9825 2.5041 0.0000
+0 2000 236 5.50 0.542 14.4900 10.8673 42.7295 0.0000 1094.5983 736.5351 724.7202 2.7558 0.0000
+0 2000 236 18.50 0.458 12.1200 9.8941 0.0000 0.0000 801.9309 605.6240 766.8373 2.9955 0.0000
+0 2000 237 5.50 0.542 15.9450 11.1619 39.6641 0.2540 1192.7633 699.4804 789.0222 3.2508 0.0000
+0 2000 237 18.50 0.458 13.5755 10.3782 0.0000 0.0000 938.8035 637.8481 774.2754 3.5077 0.0000
+0 2000 238 5.50 0.542 14.4450 10.9719 23.8804 0.2540 876.3415 530.7618 934.1731 2.9427 0.0000
+0 2000 238 18.50 0.458 10.9868 10.2641 0.0000 1.0160 475.8190 411.3814 990.3790 2.8332 0.0000
+0 2000 239 5.50 0.542 13.3985 10.5654 23.8640 0.5080 767.7026 495.9402 932.6283 2.7162 0.0000
+0 2000 239 18.50 0.458 11.4614 10.1305 0.0000 0.0000 589.2407 471.5054 918.0498 5.3909 0.0000
+0 2000 240 5.50 0.542 14.9700 11.5604 45.9070 0.0000 993.6320 644.0002 877.2518 4.9358 0.0000
+0 2000 240 18.50 0.458 11.7455 10.6418 0.0000 0.0000 594.2847 495.6256 938.8911 3.4568 0.0000
+0 2000 241 5.50 0.542 13.1827 10.5223 23.8662 10.9280 696.9702 440.3056 985.1140 3.0312 0.0000
+0 2000 241 18.50 0.458 8.4482 8.8764 0.0000 10.6720 158.5745 189.3139 1097.4624 3.3195 0.0000
+0 2000 242 5.50 0.542 12.8065 10.3123 24.7547 8.1220 547.6223 310.2680 1097.8610 2.8338 0.0000
+0 2000 242 18.50 0.458 9.7709 9.5414 0.0000 0.0000 294.7621 275.4378 1065.4358 4.7055 0.0000
+0 2000 243 5.50 0.542 12.6050 10.2892 30.4042 0.5080 634.3848 415.0074 990.9539 5.8977 0.0000
+0 2000 243 18.50 0.458 8.8309 9.2105 0.0000 0.0000 380.1811 399.6692 914.6920 2.4318 0.0000
+0 2000 244 5.50 0.542 8.7396 8.6950 23.8910 4.0600 179.7844 173.9970 1099.5201 1.9504 0.0000
+0 2000 244 18.50 0.458 8.0700 8.3655 0.0000 0.5080 275.1068 297.9533 949.9718 2.4155 0.0000
+0 2000 245 5.50 0.542 11.3650 9.1942 26.3099 4.8300 582.0511 394.9838 922.7460 2.3127 0.0000
+0 2000 245 18.50 0.458 8.6105 8.3991 0.0000 0.0000 562.0155 543.4322 707.6882 3.1109 0.0000
+0 2000 246 5.50 0.542 11.6988 9.2915 50.4238 0.0000 997.1794 780.9297 546.9717 3.0838 0.0000
+0 2000 246 18.50 0.458 9.7686 8.2673 0.0000 0.0000 884.8215 767.2822 474.8522 3.1614 0.0000
+0 2000 247 5.50 0.542 12.2981 8.9342 35.7802 0.0000 1008.8189 715.6455 581.4674 2.3612 0.0000
+0 2000 247 18.50 0.458 11.3818 8.6168 0.0000 0.0000 959.5722 727.3284 540.3884 3.2791 0.0000
+0 2000 248 5.50 0.542 15.0808 10.1465 35.3495 0.0000 1232.6971 750.4183 648.9075 2.1488 0.0000
+0 2000 248 18.50 0.458 13.5368 9.8650 0.0000 0.0000 1053.5437 713.0182 655.2573 3.4977 0.0000
+0 2000 249 5.50 0.542 16.3762 10.8285 37.9082 0.7620 1343.9052 747.2061 710.4283 4.4888 0.0000
+0 2000 249 18.50 0.458 11.8468 9.6805 0.0000 0.0000 893.0568 700.6956 652.4507 3.4205 0.0000
+0 2000 250 5.50 0.542 13.7104 10.7562 43.1606 0.5080 1046.5272 754.4520 693.9233 5.1396 0.0000
+0 2000 250 18.50 0.458 8.1836 9.1423 0.0000 0.0000 629.7184 701.8231 607.6541 3.9964 0.0000
+0 2000 251 5.50 0.542 10.9738 9.6912 43.0029 0.0000 729.1351 613.3872 745.1808 2.5358 0.0000
+0 2000 251 18.50 0.458 8.7173 8.6768 0.0000 0.7620 501.5555 493.1721 779.2600 3.2750 0.0000
+0 2000 252 5.50 0.542 11.4288 9.7715 37.0976 5.3340 685.0874 523.6072 840.3292 3.9708 0.0000
+0 2000 252 18.50 0.458 6.7936 8.0823 0.0000 0.5080 461.8190 555.8052 670.8204 7.7655 0.0000
+0 2000 253 5.50 0.542 11.2269 9.0312 48.9278 0.0000 942.8569 745.5350 558.6356 5.3385 0.0000
+0 2000 253 18.50 0.458 9.4159 8.2432 0.0000 0.0000 792.3130 699.8054 538.8571 2.7055 0.0000
+0 2000 254 5.50 0.542 13.5446 9.4688 43.6896 0.0000 1305.3181 916.0272 423.7289 5.5981 0.0000
+0 2000 254 18.50 0.479 10.6039 8.7557 0.0000 0.0000 1060.0808 898.4646 379.7346 4.1617 0.0000
+0 2000 255 6.00 0.521 12.5980 9.4976 40.9649 0.0000 1247.6956 963.3839 376.9318 3.5496 0.0000
+0 2000 255 18.50 0.479 10.1083 8.2670 0.0000 0.0000 931.4604 783.3569 457.2625 4.9835 0.0000
+0 2000 256 6.00 0.521 14.5984 9.8664 46.6589 0.0000 1384.1956 924.9325 447.5127 4.2128 0.0000
+0 2000 256 18.50 0.479 12.5874 8.7252 0.0000 0.0000 1155.8669 817.6848 457.9314 4.0143 0.0000
+0 2000 257 6.00 0.521 15.8068 10.2844 43.3372 0.0000 1516.7190 954.0793 453.4322 3.8684 0.0000
+0 2000 257 18.50 0.479 11.2904 8.3170 0.0000 0.0000 998.5817 751.0070 495.2903 2.5570 0.0000
+0 2000 258 6.00 0.500 13.6396 9.3642 45.2290 0.0000 1105.9843 716.6288 616.5521 2.8308 0.0000
+0 2000 258 18.00 0.500 12.1079 8.2758 0.0000 0.0000 961.5181 635.3987 607.1902 2.7721 0.0000
+0 2000 259 6.00 0.500 17.7304 9.5908 44.4307 0.0000 1788.6492 934.0461 418.2750 3.3221 0.0000
+0 2000 259 18.00 0.500 14.1308 9.0646 0.0000 0.0000 1450.5837 978.5081 324.4417 4.6588 0.0000
+0 2000 260 6.00 0.500 17.8538 10.2954 39.0902 0.0000 1868.0079 1057.6825 352.0165 3.1675 0.0000
+0 2000 260 18.00 0.500 14.2450 8.9362 0.0000 0.0000 1404.2997 913.2222 381.0561 3.2808 0.0000
+0 2000 261 6.00 0.500 17.4546 10.0717 37.9894 0.0000 1776.2552 998.6708 390.5779 4.2413 0.0000
+0 2000 261 18.00 0.500 11.1000 9.7167 0.0000 0.0000 839.8429 708.1758 647.2402 5.6213 0.0000
+0 2000 262 6.00 0.500 10.9558 9.8554 39.2044 0.0000 807.9347 705.4310 662.3968 6.8196 0.0000
+0 2000 262 18.00 0.500 9.0183 8.8442 0.0000 0.0000 664.0180 648.3730 636.2806 7.6188 0.0000
+0 2000 263 6.00 0.500 10.7417 9.1108 25.3325 0.0000 763.6655 617.1154 689.5190 3.7912 0.0000
+0 2000 263 18.00 0.500 1.8875 7.5442 0.0000 15.9980 160.5770 469.3170 720.4324 1.9767 0.0000
+0 2000 264 6.00 0.500 -0.8704 5.0012 22.4478 0.2540 -0.0841 308.5422 705.0470 1.4867 0.0000
+0 2000 264 18.00 0.500 6.1325 4.6208 0.0000 0.0000 518.3109 399.9975 589.9873 6.0862 0.0000
+0 2000 265 6.00 0.500 7.0712 6.6679 27.5861 17.0040 383.3479 340.9159 783.5728 5.8133 0.0000
+0 2000 265 18.00 0.500 2.2246 5.4892 0.0000 9.9120 49.8941 238.3248 806.3392 2.8737 0.0000
+0 2000 266 6.00 0.500 -1.0900 5.4783 20.8193 0.0000 -2.4454 343.8175 700.3921 1.2012 0.0000
+0 2000 266 18.00 0.500 -5.4200 3.8921 0.0000 0.0000 15.6446 421.7454 524.7845 0.6800 0.0000
+0 2000 267 6.00 0.500 -5.9450 2.8963 10.0998 2.2860 15.5595 390.2837 499.2513 1.0512 0.0000
+0 2000 267 18.00 0.500 -7.6871 2.6067 0.0000 9.9100 20.0028 431.0077 442.6520 1.1050 0.0000
+0 2000 268 6.00 0.500 -5.1496 2.5554 9.8474 0.5080 29.2500 356.4589 514.4240 4.1950 0.0000
+0 2000 268 18.00 0.500 -1.0029 2.5487 0.0000 0.0000 436.4873 603.4000 267.1234 3.9987 0.0000
+0 2000 269 6.00 0.500 6.7700 2.4388 42.2156 0.0000 885.7429 612.0938 252.5068 2.9117 0.0000
+0 2000 269 18.00 0.500 4.8267 2.3912 0.0000 0.0000 744.0849 601.4756 260.5779 3.2754 0.0000
+0 2000 270 6.00 0.500 8.7396 2.3988 41.4727 0.0000 1050.7798 631.1670 231.2885 3.5217 0.0000
+0 2000 270 18.00 0.500 5.1613 2.3167 0.0000 0.0000 628.9688 463.0719 394.9999 3.7383 0.0000
+0 2000 271 6.00 0.500 9.3942 3.1675 41.0346 0.0000 944.9755 512.3871 393.8273 2.7417 0.0000
+0 2000 271 18.00 0.500 6.6483 3.1879 0.0000 0.0000 667.7080 449.6982 456.7047 3.2613 0.0000
+0 2000 272 6.00 0.500 11.5533 5.1708 35.9048 0.0000 1015.9183 525.3282 504.5024 3.1500 0.0000
+0 2000 272 18.00 0.500 9.3742 5.7454 0.0000 0.2540 687.4709 418.2729 643.2439 3.4729 0.0000
+0 2000 273 6.00 0.500 8.8117 6.5175 20.7873 2.5420 554.4528 375.7360 738.6535 3.0479 0.0000
+0 2000 273 18.00 0.500 6.8137 5.8267 0.0000 0.0000 360.7924 293.4980 773.2632 4.8412 0.0000
+0 2000 274 6.00 0.500 9.3067 6.1654 24.6168 0.0000 680.1249 442.3148 647.6852 8.5383 0.0000
+0 2000 274 18.00 0.500 9.0337 6.2133 0.0000 0.0000 739.6484 531.9086 560.6990 7.8104 0.0000
+0 2000 275 6.00 0.500 11.6062 6.4963 27.6791 0.0000 1034.6725 615.9678 497.0251 6.7942 0.0000
+0 2000 275 18.00 0.500 10.6500 6.2517 0.0000 0.0000 1055.9419 715.7406 379.4403 6.1337 0.0000
+0 2000 276 6.00 0.500 12.4500 6.9808 38.8148 0.0000 1220.2775 759.0243 388.4893 4.2125 0.0000
+0 2000 276 18.00 0.500 9.5929 6.2346 0.0000 0.0000 953.7351 697.4232 397.4589 2.7638 0.0000
+0 2000 277 6.00 0.500 9.0767 6.5163 31.0950 0.0000 726.4780 526.9108 587.4651 4.2929 0.0000
+0 2000 277 18.00 0.521 5.2136 5.3992 0.0000 0.0000 359.8955 370.7845 668.8691 3.8712 0.0000
+0 2000 278 6.50 0.479 7.3509 5.9587 27.7736 0.2540 618.3666 517.5012 558.8063 6.0978 0.0000
+0 2000 278 18.00 0.521 1.8036 4.9756 0.0000 0.5080 156.5380 331.2181 681.2346 2.2876 0.0000
+0 2000 279 6.50 0.479 -1.6696 4.3861 26.1902 1.2700 44.6055 346.5270 629.3508 2.3626 0.0000
+0 2000 279 18.00 0.521 1.7996 3.1860 0.0000 0.0000 571.2554 638.2124 267.6661 5.7004 0.0000
+0 2000 280 6.50 0.479 3.7539 3.5183 38.3384 0.0000 695.7271 662.8868 262.5541 4.7522 0.0000
+0 2000 280 18.00 0.521 -7.6584 2.4264 0.0000 0.0000 28.8677 429.8333 434.4543 0.8380 0.0000
+0 2000 281 6.50 0.458 -6.1886 1.6264 13.1589 0.0000 20.0919 335.0635 487.0065 0.8191 0.0000
+0 2000 281 17.50 0.542 -5.0712 0.9715 0.0000 0.0000 178.6790 415.5530 374.1354 2.0046 0.0000
+0 2000 282 6.50 0.458 1.9318 0.5741 36.7376 0.0000 423.4580 354.4431 415.9481 1.9064 0.0000
+0 2000 282 17.50 0.542 2.6258 1.1185 0.0000 0.0000 619.1096 539.6380 256.9657 3.9512 0.0000
+0 2000 283 6.50 0.458 6.4523 1.4027 34.1222 0.0000 798.1324 498.0059 313.1344 3.1286 0.0000
+0 2000 283 17.50 0.542 3.6058 1.6204 0.0000 0.0000 487.2303 379.3770 442.4020 3.9212 0.0000
+0 2000 284 6.50 0.458 6.9573 2.8155 30.5827 0.0000 627.0189 368.4799 518.4144 2.5818 0.0000
+0 2000 284 17.50 0.542 4.5523 2.6019 0.0000 0.0000 533.2187 420.5547 453.6413 2.3646 0.0000
+0 2000 285 6.50 0.458 9.6859 3.2918 35.0120 0.0000 965.6116 513.7125 400.6656 3.2123 0.0000
+0 2000 285 17.50 0.542 5.4365 3.5381 0.0000 0.0000 589.7972 467.5125 458.6805 2.4254 0.0000
+0 2000 286 6.50 0.458 5.4850 3.8573 30.3174 0.0000 371.2895 269.9012 676.0771 2.2332 0.0000
+0 2000 286 17.50 0.542 0.3408 2.9150 0.0000 0.0000 369.4481 496.7382 394.5983 3.0538 0.0000
+0 2000 287 6.50 0.458 0.6091 2.2073 29.6289 0.0000 495.2087 573.4233 278.9575 5.4323 0.0000
+0 2000 287 17.50 0.542 -1.9815 1.6808 0.0000 0.0000 307.4493 474.5511 350.4257 6.1862 0.0000
+0 2000 288 6.50 0.458 -0.5014 1.5445 23.6759 0.0000 349.2494 445.7952 372.2177 3.4232 0.0000
+0 2000 288 17.50 0.542 -1.7631 1.3400 0.0000 0.0000 278.0181 419.4011 388.2449 2.3585 0.0000
+0 2000 289 6.50 0.458 3.0959 1.4282 33.9266 0.0000 561.3580 467.0200 345.4232 2.2300 0.0000
+0 2000 289 17.50 0.542 0.8981 1.3065 0.0000 0.0000 397.0756 416.9837 388.9810 3.7146 0.0000
+0 2000 290 6.50 0.458 5.5518 1.9014 33.5216 0.0000 787.6576 569.6396 267.2854 3.6936 0.0000
+0 2000 290 17.50 0.542 2.0119 1.4708 0.0000 0.0000 494.8907 465.4144 349.0287 1.9038 0.0000
+0 2000 291 6.50 0.458 6.0859 1.7595 33.0181 0.0000 739.0726 480.1425 349.8746 2.1318 0.0000
+0 2000 291 17.50 0.542 5.3423 1.4469 0.0000 0.0000 706.3666 483.9783 329.2924 3.1138 0.0000
+0 2000 292 6.50 0.458 10.2409 2.5005 32.6198 0.0000 1124.2080 590.8415 279.2054 3.2659 0.0000
+0 2000 292 17.50 0.542 6.3304 2.1188 0.0000 0.0000 811.0458 557.5753 290.4930 3.7246 0.0000
+0 2000 293 6.50 0.458 8.5464 3.1355 30.1124 0.0000 977.2479 618.0577 286.3905 3.4236 0.0000
+0 2000 293 17.50 0.542 3.2142 2.6385 0.0000 0.0000 541.0748 503.3809 372.8652 2.1388 0.0000
+0 2000 294 6.50 0.458 5.8214 2.8055 31.5016 0.0000 610.1274 427.0807 459.0717 2.1336 0.0000
+0 2000 294 17.50 0.542 5.0542 2.1250 0.0000 0.0000 650.9365 480.4808 367.7549 3.7777 0.0000
+0 2000 295 6.50 0.458 7.4395 3.4750 25.5780 0.0000 851.1397 594.5829 328.9793 3.1732 0.0000
+0 2000 295 17.50 0.542 2.6400 2.4058 0.0000 0.0000 432.5864 417.3886 446.5014 2.0735 0.0000
+0 2000 296 6.50 0.458 -0.3777 1.9532 5.8093 4.3180 16.9632 129.2576 709.6961 1.7468 0.0000
+0 2000 296 17.50 0.542 -1.1777 1.9808 0.0000 0.5080 8.5273 157.3800 682.9810 1.6131 0.0000
+0 2000 297 6.50 0.458 2.5382 2.0977 19.1902 0.5080 46.9174 19.6295 826.9542 1.6305 0.0000
+0 2000 297 17.50 0.542 1.9454 2.3158 0.0000 0.0000 87.7427 106.4599 751.5829 2.4788 0.0000
+0 2000 298 6.50 0.458 4.5482 2.9736 26.0798 0.0000 317.6956 223.6008 671.2972 2.1105 0.0000
+0 2000 298 17.50 0.542 -0.7265 2.1927 0.0000 0.0000 250.1052 390.4632 461.4562 7.5769 0.0000
+0 2000 299 6.50 0.458 0.6059 1.5273 26.4868 0.0000 337.7274 382.0066 435.0324 6.4727 0.0000
+0 2000 299 17.50 0.542 -1.5004 1.2781 0.0000 0.0000 249.5048 376.6226 427.9206 4.8450 0.0000
+0 2000 300 6.50 0.458 2.1832 1.2155 29.7475 0.0000 430.3915 377.4077 424.1058 2.3586 0.0000
+0 2000 300 17.50 0.562 0.0700 0.9263 0.0000 0.0000 297.3435 337.9029 449.3748 2.4633 0.0000
+0 2000 301 7.00 0.438 3.5281 0.9695 24.1211 0.0000 419.7702 282.6095 506.9670 2.2124 0.0000
+0 2000 301 17.50 0.562 2.3596 1.2685 0.0000 0.0000 266.1914 209.2379 594.7662 2.1785 0.0000
+0 2000 302 7.00 0.438 3.9324 1.8329 12.6083 0.0000 259.7133 142.0886 690.9713 1.7871 0.0000
+0 2000 302 17.50 0.562 1.5326 1.8948 0.0000 0.0000 208.3199 225.9213 610.0275 5.6711 0.0000
+0 2000 303 7.00 0.438 0.7929 1.7186 28.6364 0.0000 308.0557 353.8712 472.9732 9.6876 0.0000
+0 2000 303 17.50 0.562 1.1396 0.9893 0.0000 0.0000 535.0423 526.8409 263.5082 2.7104 0.0000
+0 2000 304 7.00 0.438 3.2471 0.9748 25.6655 0.0000 484.7172 361.7303 428.0263 2.0271 0.0000
+0 2000 304 17.50 0.562 -0.5189 0.8589 0.0000 0.0000 263.1742 325.2803 458.6526 1.9733 0.0000
+0 2000 305 7.00 0.417 -2.5140 0.6570 8.9829 4.8300 33.0660 170.7999 603.2712 2.1300 0.0000
+0 2000 305 17.00 0.583 -6.4829 0.7043 0.0000 5.0840 104.9864 383.3175 392.9911 11.8593 0.0000
+0 2000 306 7.00 0.417 -5.3805 0.6145 23.0225 0.5080 158.0048 397.0845 374.8988 9.0445 0.0000
+0 2000 306 17.00 0.583 -9.0175 0.5111 0.0000 0.0000 76.1629 416.1669 350.8776 1.9546 0.0000
+0 2000 307 7.00 0.417 -7.3715 0.3860 25.7931 0.2540 174.7260 462.8581 298.2432 3.0845 0.0000
+0 2000 307 17.00 0.583 -8.3696 0.4179 0.0000 2.0320 24.5379 343.2817 419.3269 1.5782 0.0000
+0 2000 308 7.00 0.417 -2.8925 0.2990 27.8903 0.0000 243.9740 375.3993 381.6045 2.4325 0.0000
+0 2000 308 17.00 0.583 -2.7243 0.3714 0.0000 0.0000 371.2981 504.0816 256.3357 4.0564 0.0000
+0 2000 309 7.00 0.417 2.6795 0.2255 27.0038 0.0000 728.7494 600.0957 153.4714 3.7045 0.0000
+0 2000 309 17.00 0.583 -0.8736 0.4789 0.0000 1.2700 291.7153 351.8901 413.6161 6.5789 0.0000
+0 2000 310 7.00 0.417 -6.4185 0.5570 17.9942 1.7780 133.2565 399.5151 369.7138 6.3095 0.0000
+0 2000 310 17.00 0.583 -10.8007 0.2661 0.0000 0.0000 144.8322 517.2774 238.2084 4.7864 0.0000
+0 2000 311 7.00 0.417 -11.0205 -0.1085 16.4207 0.0000 178.4159 538.6912 199.3815 5.7390 0.0000
+0 2000 311 17.00 0.583 -13.5386 -0.3171 0.0000 0.0000 120.3816 523.4363 205.1451 5.9582 0.0000
+0 2000 312 7.00 0.417 -9.7795 -0.5315 21.8074 0.0000 168.7792 478.4038 240.5255 2.7975 0.0000
+0 2000 312 17.00 0.583 -9.8596 -0.5436 0.0000 0.0000 137.7141 450.7279 267.6644 2.0229 0.0000
+0 2000 313 7.00 0.417 -5.0725 -0.6530 26.1189 0.0000 366.6260 535.2434 178.2834 3.7355 0.0000
+0 2000 313 17.00 0.583 -5.8136 -0.5779 0.0000 0.0000 281.1842 478.1962 238.6646 3.1925 0.0000
+0 2000 314 7.00 0.417 -4.2645 -0.5365 22.1917 0.0000 239.1366 386.3463 332.3718 4.1345 0.0000
+0 2000 314 17.00 0.583 -9.1693 -0.5911 0.0000 0.0000 74.3208 367.8757 348.4410 1.6954 0.0000
+0 2000 315 7.00 0.417 -4.1540 -1.0075 22.4334 0.0000 288.5121 407.4099 290.5876 3.0265 0.0000
+0 2000 315 17.00 0.583 -9.7075 -0.7954 0.0000 1.0160 73.5625 364.9964 342.2509 1.1525 0.0000
+0 2000 316 7.00 0.417 -11.2135 -0.9610 7.8507 1.2740 58.9093 384.4172 315.5847 1.6185 0.0000
+0 2000 316 17.00 0.583 -13.9589 -1.1600 0.0000 0.0000 79.3621 453.0284 238.4135 4.1089 0.0000
+0 2000 317 7.00 0.417 -13.8475 -1.5300 21.3977 0.0000 150.6593 506.4265 169.2748 4.7065 0.0000
+0 2000 317 17.00 0.583 -17.3407 -1.7964 0.0000 0.0000 120.9833 524.5208 140.1385 9.7721 0.0000
+0 2000 318 7.00 0.417 -12.2905 -2.0920 24.8530 0.0000 247.3270 548.9512 103.5916 4.8270 0.0000
+0 2000 318 17.00 0.583 -9.5554 -2.1307 0.0000 0.0000 356.2227 592.4595 58.5218 3.5164 0.0000
+0 2000 319 7.00 0.417 -2.3705 -2.1675 22.9072 0.0000 547.7959 552.3919 97.1408 1.9150 0.0000
+0 2000 319 17.00 0.583 -5.7136 -1.8382 0.0000 0.0000 343.2259 479.8930 183.0178 2.5407 0.0000
+0 2000 320 7.00 0.417 -11.4880 -2.0385 17.8276 0.5080 132.5535 419.1296 235.5793 8.6375 0.0000
+0 2000 320 17.00 0.583 -14.4004 -2.2625 0.0000 0.0000 109.2204 445.6822 200.0085 5.7921 0.0000
+0 2000 321 7.00 0.417 -13.0660 -2.4610 20.9236 0.0000 167.9125 470.7415 167.0305 4.4600 0.0000
+0 2000 321 17.00 0.583 -13.2668 -2.6561 0.0000 0.0000 198.8484 497.4747 132.6509 7.6575 0.0000
+0 2000 322 7.00 0.417 -11.4280 -2.7770 15.4640 0.0000 229.8389 484.8098 140.6004 3.0545 0.0000
+0 2000 322 17.00 0.583 -15.9429 -2.7107 0.0000 2.0320 111.7638 455.8547 172.1319 4.9943 0.0000
+0 2000 323 7.00 0.417 -6.4435 -2.7630 23.1575 0.0000 330.7355 451.6031 174.3656 10.7780 0.0000
+0 2000 323 17.00 0.583 -9.6629 -2.3679 0.0000 0.2540 191.9040 422.5807 218.8857 13.5475 0.0000
+0 2000 324 7.00 0.417 -7.3130 -2.3645 22.7799 0.0000 263.8603 429.6461 211.9567 12.3130 0.0000
+0 2000 324 17.00 0.583 -8.1671 -2.1818 0.0000 0.5080 177.8365 376.6889 272.2249 10.2582 0.0000
+0 2000 325 7.00 0.417 -4.4930 -2.1010 22.7117 0.0000 421.7439 506.0032 146.1740 9.2385 0.0000
+0 2000 325 17.00 0.583 -3.2871 -1.9329 0.0000 0.0000 522.9184 575.5583 83.4556 7.6936 0.0000
+0 2000 326 7.00 0.417 -0.0380 -1.8175 19.1515 0.0000 576.1025 496.1003 167.6611 5.0095 0.0000
+0 2000 326 17.00 0.583 -1.0007 -1.5632 0.0000 0.0000 544.9745 520.5369 153.7724 2.3771 0.0000
+0 2000 327 7.00 0.417 2.0065 -1.4405 19.7091 0.0000 706.1852 539.7307 139.7569 2.2475 0.0000
+0 2000 327 17.00 0.583 -5.6286 -1.3825 0.0000 0.0000 299.4635 455.2758 226.6813 1.8568 0.0000
+0 2000 328 7.00 0.417 -2.7870 -1.8685 20.1085 1.0200 415.8755 449.6115 212.0467 1.8260 0.0000
+0 2000 328 17.00 0.604 -3.8390 -1.9269 0.0000 0.0000 439.2121 512.3405 146.9780 3.7817 0.0000
+0 2000 329 7.50 0.396 -0.6853 -2.1547 21.4411 0.0000 570.1965 505.8925 144.1404 4.3968 0.0000
+0 2000 329 17.00 0.604 -8.8734 -2.0179 0.0000 0.0000 177.3230 397.3591 258.2255 2.2990 0.0000
+0 2000 330 7.50 0.396 -5.9268 -2.4274 21.3295 0.0000 330.7287 453.1026 186.0057 6.2937 0.0000
+0 2000 330 17.00 0.604 -4.9669 -2.2893 0.0000 0.0000 417.0926 514.9705 129.6288 10.8683 0.0000
+0 2000 331 7.50 0.396 -6.3042 -2.2563 9.3839 0.0000 162.3364 304.9725 340.9516 10.3589 0.0000
+0 2000 331 17.00 0.604 -5.5838 -2.0931 0.0000 0.2540 165.7420 292.2432 360.2501 6.8883 0.0000
+0 2000 332 7.50 0.396 -4.1842 -1.9321 8.8285 2.7940 135.7005 221.1862 437.8726 5.2163 0.0000
+0 2000 332 17.00 0.604 -6.1376 -1.6107 0.0000 7.1120 106.6884 270.7174 401.6129 7.8193 0.0000
+0 2000 333 7.50 0.396 -5.4747 -1.4626 17.4206 0.5080 171.0946 319.9787 358.5549 11.8011 0.0000
+0 2000 333 17.00 0.604 -4.6255 -1.3517 0.0000 0.0000 243.9075 369.1637 314.0577 9.4334 0.0000
+0 2000 334 7.50 0.396 -0.2947 -1.2921 20.5178 0.0000 415.9856 369.1623 316.5941 9.5795 0.0000
+0 2000 334 17.00 0.604 -0.1872 -1.1124 0.0000 0.7620 399.2843 353.7861 339.6671 7.8459 0.0000
+0 2000 335 7.50 0.375 -6.3700 -0.9950 11.1074 1.5240 205.2648 402.5768 295.9480 5.3378 0.0000
+0 2000 335 16.50 0.625 -10.4257 -1.0923 0.0000 0.0000 172.4454 473.9842 220.3404 4.2310 0.0000
+0 2000 336 7.50 0.375 -6.5706 -1.4011 20.3758 0.0000 360.8920 544.3937 136.7406 3.3411 0.0000
+0 2000 336 16.50 0.625 -5.4147 -1.5470 0.0000 0.0000 437.3266 579.7883 95.2062 1.8793 0.0000
+0 2000 337 7.50 0.375 1.0739 -1.7239 20.1490 0.0000 624.3010 494.3193 173.2985 3.6717 0.0000
+0 2000 337 16.50 0.625 -0.5580 -1.4623 0.0000 0.0000 549.3412 509.8256 168.7268 6.2890 0.0000
+0 2000 338 7.50 0.375 0.3983 -1.2950 15.0800 0.0000 576.1136 498.0738 187.5618 8.2772 0.0000
+0 2000 338 16.50 0.625 -2.9430 -1.1790 0.0000 0.0000 433.5766 503.2814 187.3074 6.3613 0.0000
+0 2000 339 7.50 0.375 -3.7500 -1.2717 19.9105 0.0000 335.7610 427.8756 258.7505 2.1278 0.0000
+0 2000 339 16.50 0.625 -2.6787 -1.2767 0.0000 0.0000 442.3033 499.2542 187.1589 8.1043 0.0000
+0 2000 340 7.50 0.375 -4.4122 -1.3067 7.8786 2.5300 237.2014 355.2683 329.8669 8.5139 0.0000
+0 2000 340 16.50 0.625 -6.6830 -1.2563 0.0000 1.2500 220.5127 405.9031 281.3774 7.0570 0.0000
+0 2000 341 7.50 0.375 2.6922 -1.2822 18.7036 0.0000 604.1269 408.3601 277.8187 6.6767 0.0000
+0 2000 341 16.50 0.625 -0.5763 -1.0913 0.0000 0.0000 408.8528 385.0640 309.2948 5.2440 0.0000
+0 2000 342 7.50 0.375 0.8156 -1.0406 6.0169 0.0000 500.6869 413.1459 283.4091 4.8861 0.0000
+0 2000 342 16.50 0.625 -6.0757 -1.2170 0.0000 0.0000 123.2912 298.6035 390.4209 2.2293 0.0000
+0 2000 343 7.50 0.375 -2.4572 -1.5778 15.9374 0.0000 297.6020 332.7507 340.9524 5.7250 0.0000
+0 2000 343 16.50 0.625 -3.7297 -1.5213 0.0000 0.0000 163.2834 249.6545 426.4099 3.9143 0.0000
+0 2000 344 7.50 0.375 -4.0972 -1.6011 13.5984 0.5000 172.2229 268.4778 404.2463 11.4217 0.0000
+0 2000 344 16.50 0.625 -4.9083 -1.8543 0.0000 0.0000 204.4528 318.5214 343.7354 11.9503 0.0000
+0 2000 345 7.50 0.375 -3.2811 -1.9450 13.4389 0.0000 227.2081 279.1002 379.4311 4.6494 0.0000
+0 2000 345 16.50 0.625 -11.4520 -1.7483 0.0000 3.5400 67.9923 357.2137 309.3984 3.1337 0.0000
+0 2000 346 7.50 0.375 -16.4961 -2.1539 14.2600 1.0100 89.5001 465.5165 184.5389 9.9539 0.0000
+0 2000 346 16.50 0.625 -13.3137 -2.5397 0.0000 0.0000 112.5771 416.7412 217.9385 2.7833 0.0000
+0 2000 347 7.50 0.375 -7.3172 -2.6111 15.8619 0.0000 150.9836 307.3688 324.4975 2.1594 0.0000
+0 2000 347 16.50 0.625 -9.8937 -2.3440 0.0000 0.5000 45.3415 283.2678 359.1501 1.3903 0.0000
+0 2000 348 7.50 0.375 -10.3922 -2.2789 3.8499 0.5000 56.3557 309.1992 335.8166 2.9794 0.0000
+0 2000 348 16.50 0.625 -9.8203 -2.2143 0.0000 1.2500 104.3687 345.9492 301.6548 5.3250 0.0000
+0 2000 349 7.50 0.375 -7.5933 -2.1611 9.3558 1.5000 110.4654 295.0650 354.6819 6.1444 0.0000
+0 2000 349 16.50 0.625 -8.3747 -2.0120 0.0000 0.0000 131.7423 344.1958 311.5907 8.3283 0.0000
+0 2000 350 7.50 0.375 -8.5211 -1.9811 7.9553 3.7900 106.5817 324.1682 332.8751 9.8650 0.0000
+0 2000 350 16.50 0.625 -14.2987 -1.9213 0.0000 3.5400 78.3459 424.0017 235.4835 7.3783 0.0000
+0 2000 351 7.50 0.375 -13.9633 -1.9722 13.9726 0.5000 118.6900 455.9406 201.4662 16.1233 0.0000
+0 2000 351 16.50 0.625 -6.8677 -2.0643 0.0000 0.0000 195.1894 355.5878 298.0713 12.4490 0.0000
+0 2000 352 7.50 0.375 -4.6417 -1.9828 9.9765 2.5500 268.8043 354.9307 302.0468 8.1467 0.0000
+0 2000 352 16.50 0.625 -14.6613 -1.8327 0.0000 1.2500 81.1155 439.1722 223.9525 7.6167 0.0000
+0 2000 353 7.50 0.375 -14.6361 -1.9006 11.6578 0.2500 103.3768 457.4910 202.8464 9.6683 0.0000
+0 2000 353 16.50 0.625 -9.2070 -2.0337 0.0000 0.0000 303.1995 534.9451 119.9606 12.1817 0.0000
+0 2000 354 7.50 0.375 -4.7917 -2.0994 11.8292 0.0000 425.4700 524.1296 128.1068 14.0344 0.0000
+0 2000 354 16.50 0.625 -7.9920 -2.0247 0.0000 5.0400 144.6127 344.3424 310.9292 9.7547 0.0000
+0 2000 355 7.50 0.375 -12.5678 -1.9322 7.9530 0.2500 106.4136 420.5634 238.4765 6.9556 0.0000
+0 2000 355 16.50 0.625 -13.7783 -1.9117 0.0000 0.0000 126.5152 465.8167 194.0645 13.2263 0.0000
+0 2000 356 7.50 0.375 -9.5967 -1.9994 17.0519 0.0000 163.2825 405.6071 250.6898 13.0878 0.0000
+0 2000 356 16.50 0.625 -4.7010 -2.0140 0.0000 0.2500 156.6856 255.8624 399.8422 9.0293 0.0000
+0 2000 357 7.50 0.375 -4.6250 -1.8756 9.7149 0.0000 221.2810 324.7592 336.6042 5.0044 0.0000
+0 2000 357 16.50 0.625 -8.0870 -1.7020 0.0000 3.7600 115.6019 332.3000 336.2270 7.8243 0.0000
+0 2000 358 7.50 0.375 -6.3472 -1.6283 18.6169 0.0000 204.6563 373.5919 297.9962 10.1083 0.0000
+0 2000 358 16.50 0.625 -6.3157 -1.6050 0.0000 0.0000 260.2353 430.0540 242.5076 2.5167 0.0000
+0 2000 359 7.50 0.375 -2.8961 -1.6417 13.3788 0.0000 402.4822 450.9309 220.1020 2.0539 0.0000
+0 2000 359 16.50 0.625 -7.1343 -1.6223 0.0000 1.0000 91.1814 281.9316 389.9081 1.3347 0.0000
+0 2000 360 7.50 0.375 -9.6444 -1.5572 4.0377 0.5000 30.6709 294.8747 379.6847 1.9806 0.0000
+0 2000 360 16.50 0.625 -10.6963 -1.5420 0.0000 0.0000 83.2872 371.9201 303.2771 2.1133 0.0000
+0 2000 361 7.50 0.375 -3.1672 -1.6300 19.0489 0.5000 386.6363 443.7712 227.7491 2.1711 0.0000
+0 2000 361 16.50 0.625 -3.8553 -1.7170 0.0000 0.0000 352.5211 435.0552 232.8470 3.4223 0.0000
+0 2000 362 7.50 0.375 0.6033 -1.7856 18.9955 0.0000 521.2530 412.1019 252.9642 4.6622 0.0000
+0 2000 362 16.50 0.625 -2.8707 -1.7247 0.0000 0.0000 299.5692 342.9784 324.6092 9.0087 0.0000
+0 2000 363 7.50 0.375 -4.7572 -1.5967 18.1696 0.0000 260.4453 379.5170 293.3930 7.8806 0.0000
+0 2000 363 16.50 0.625 -8.6213 -1.5423 0.0000 0.0000 242.2704 480.3309 194.8524 7.7747 0.0000
+0 2000 364 7.50 0.375 -6.3061 -1.6289 18.4758 0.0000 371.7044 539.4835 132.0827 6.8611 0.0000
+0 2000 364 16.50 0.625 -7.2433 -1.7157 0.0000 0.0000 341.7676 534.8584 133.0996 9.3560 0.0000
+0 2000 365 7.50 0.375 -6.4867 -1.8011 11.4733 0.7500 167.5592 333.9761 330.4483 10.0400 0.0000
+0 2000 365 16.50 0.625 -7.9283 -1.7920 0.0000 1.2500 116.5070 325.3541 339.4463 6.3193 0.0000
+0 2000 366 7.50 0.375 -7.4994 -1.7456 19.1714 0.0000 214.9027 413.3211 253.3980 9.9117 0.0000
+0 2000 366 16.50 0.625 -7.4550 -1.7413 0.0000 0.0000 176.1126 374.2153 292.6786 12.1683 0.0000
+0 2001 1 7.50 0.375 -5.9294 -1.7533 15.0754 0.0000 183.3719 334.3958 332.0015 7.7367 0.0000
+0 2001 1 16.50 0.625 -9.0153 -1.7133 0.0000 0.0000 203.1537 444.5958 223.4584 4.4407 0.0000
+0 2001 2 7.50 0.375 -4.7711 -1.7939 19.4409 0.0000 396.8211 505.1350 159.5891 4.0522 0.0000
+0 2001 2 16.50 0.625 -3.2087 -1.8653 0.0000 0.0000 464.3821 514.3518 147.4292 9.9063 0.0000
+0 2001 3 7.50 0.375 0.3167 -1.8239 19.5968 0.0000 498.6161 400.5604 262.9256 5.8261 0.0000
+0 2001 3 16.50 0.625 0.7933 -1.6530 0.0000 0.0000 490.2970 375.5248 295.0418 7.4390 0.0000
+0 2001 4 7.50 0.375 1.9661 -1.4883 19.7518 0.0000 656.7874 491.4325 186.0195 9.0128 0.0000
+0 2001 4 16.50 0.625 0.8583 -1.3410 0.0000 0.0000 575.2117 472.4782 211.1997 9.8360 0.0000
+0 2001 5 7.50 0.375 5.1000 -1.2628 19.8961 0.0000 772.3601 435.4562 251.5494 8.6067 0.0000
+0 2001 5 16.50 0.625 -0.0307 -1.0980 0.0000 0.0000 406.9697 358.3242 335.7493 6.2653 0.0000
+0 2001 6 7.50 0.375 0.5789 -1.0478 20.1541 0.0000 548.8410 473.5343 222.7045 7.0328 0.0000
+0 2001 6 16.50 0.625 -8.2783 -1.0507 0.0000 0.0000 150.0347 384.7932 311.3221 3.2223 0.0000
+0 2001 7 7.50 0.375 -7.5550 -1.2511 19.7465 0.0000 200.4841 418.6852 268.8238 2.2344 0.0000
+0 2001 7 16.50 0.625 -4.0840 -1.4963 0.0000 0.0000 511.0656 609.7708 67.3519 4.9910 0.0000
+0 2001 8 7.50 0.375 0.3872 -1.7089 20.1618 0.0000 693.7423 598.3721 69.8686 2.1283 0.0000
+0 2001 8 16.50 0.625 -1.6390 -1.7687 0.0000 0.0000 613.7836 606.1847 59.5824 2.9223 0.0000
+0 2001 9 7.50 0.375 2.6806 -2.0722 16.4266 0.0000 840.6999 614.4232 38.9241 2.9200 0.0000
+0 2001 9 16.50 0.625 -1.6057 -2.0390 0.0000 0.0000 469.2018 447.3484 207.3448 5.1557 0.0000
+0 2001 10 7.50 0.375 -3.2461 -1.8933 20.4043 0.0000 275.6832 328.5913 332.0417 6.2828 0.0000
+0 2001 10 16.50 0.625 -6.6020 -1.7753 0.0000 0.0000 272.4417 442.4330 223.0558 5.7030 0.0000
+0 2001 11 7.50 0.375 -0.5811 -1.8833 20.8440 0.0000 632.8268 570.2292 90.8146 2.1822 0.0000
+0 2001 11 16.50 0.625 -0.6367 -1.8607 0.0000 0.0000 545.6613 492.5783 169.3949 5.3837 0.0000
+0 2001 12 7.50 0.375 1.6278 -1.8094 20.1136 0.0000 701.3759 540.5539 123.5273 5.0317 0.0000
+0 2001 12 16.50 0.625 -4.6443 -1.6660 0.0000 0.0000 387.2152 495.8951 174.1256 2.3733 0.0000
+0 2001 13 7.50 0.375 -8.3111 -1.7350 12.6832 0.0000 193.7782 414.1111 253.0459 8.9900 0.0000
+0 2001 13 16.50 0.625 -11.7570 -1.9210 0.0000 0.0000 160.0471 458.1639 201.3508 11.3533 0.0000
+0 2001 14 7.50 0.396 -11.5426 -2.2863 7.9346 0.0000 182.4833 461.0585 183.6674 6.8405 0.0000
+0 2001 14 17.00 0.604 -13.2510 -2.5128 0.0000 0.0000 137.5238 441.8703 193.8618 3.6252 0.0000
+0 2001 15 7.50 0.396 -12.8174 -2.7947 13.9228 0.5080 116.5397 401.6866 223.0318 2.4842 0.0000
+0 2001 15 17.00 0.604 -14.9645 -2.9403 0.0000 1.2700 38.7294 358.6147 260.4814 0.9431 0.0000
+0 2001 16 7.50 0.396 -14.8995 -2.9379 9.2624 0.2540 39.0448 357.7294 261.4606 1.8653 0.0000
+0 2001 16 17.00 0.604 -15.8059 -2.8934 0.0000 0.0000 75.6000 410.7643 210.1340 2.0552 0.0000
+0 2001 17 7.50 0.396 -9.5289 -3.0058 21.2695 0.0000 164.0826 365.6857 250.9071 2.2232 0.0000
+0 2001 17 17.00 0.604 -12.4410 -3.0376 0.0000 0.0000 168.6988 436.8631 178.5137 3.0138 0.0000
+0 2001 18 7.50 0.396 -9.2853 -3.0916 22.0725 0.0000 217.0433 409.3810 203.9404 4.7568 0.0000
+0 2001 18 17.00 0.604 -11.0000 -3.0252 0.0000 0.0000 168.1102 405.6320 210.2189 8.3207 0.0000
+0 2001 19 7.50 0.396 -5.5863 -2.9968 22.6825 0.0000 403.7015 492.1841 124.7482 6.0342 0.0000
+0 2001 19 17.00 0.583 -6.2382 -2.8546 0.0000 0.0000 314.5078 430.6332 191.7638 7.9164 0.0000
+0 2001 20 7.00 0.417 -9.4040 -2.7275 17.8058 0.0000 198.2758 409.0602 218.2584 8.0075 0.0000
+0 2001 20 17.00 0.583 -10.2139 -2.7032 0.0000 0.0000 200.0502 430.2610 198.0029 6.6646 0.0000
+0 2001 21 7.00 0.417 -2.9955 -2.8025 23.1631 0.0000 513.8210 518.8714 105.5372 4.6865 0.0000
+0 2001 21 17.00 0.583 -1.9211 -2.6682 0.0000 0.0000 577.2819 546.7405 82.8927 5.1639 0.0000
+0 2001 22 7.00 0.417 -0.8135 -2.5490 10.5440 0.0000 648.2145 572.1162 62.1839 2.1775 0.0000
+0 2001 22 17.00 0.583 -7.2829 -2.3796 0.0000 0.0000 282.5974 448.2473 192.7519 2.0439 0.0000
+0 2001 23 7.00 0.417 -2.2435 -2.4515 17.0057 0.0000 412.2189 403.1484 234.9990 3.0605 0.0000
+0 2001 23 17.00 0.583 -7.2457 -2.3432 0.0000 0.0000 231.2959 397.3437 245.1052 1.7679 0.0000
+0 2001 24 7.00 0.417 -5.0840 -2.5475 23.0042 0.0000 259.9798 350.0066 284.3605 2.0355 0.0000
+0 2001 24 17.00 0.583 -3.8100 -2.5957 0.0000 0.0000 377.8112 421.2765 211.1891 2.2729 0.0000
+0 2001 25 7.00 0.417 -5.5045 -2.6620 14.4913 0.0000 290.8408 387.9303 241.9408 7.8715 0.0000
+0 2001 25 17.00 0.583 -9.9325 -2.6046 0.0000 0.0000 130.4629 359.2364 272.8787 11.8379 0.0000
+0 2001 26 7.00 0.417 -8.8135 -2.6930 19.0327 0.0000 171.1453 366.3893 262.2729 3.1820 0.0000
+0 2001 26 17.00 0.583 -10.8036 -2.6932 0.0000 0.0000 69.4689 313.2685 315.3846 1.3286 0.0000
+0 2001 27 7.00 0.417 -6.4615 -2.7960 11.7301 0.2540 163.3056 288.4030 336.2585 1.9195 0.0000
+0 2001 27 17.00 0.583 -11.6800 -2.7261 0.0000 0.7620 36.2964 299.9524 327.4233 1.6889 0.0000
+0 2001 28 7.00 0.417 -9.6995 -2.7440 14.3777 0.2540 43.5031 260.2845 366.3935 1.6630 0.0000
+0 2001 28 17.00 0.583 -12.2789 -2.7404 0.0000 0.0000 65.7149 341.6933 285.1252 4.0604 0.0000
+0 2001 29 7.00 0.417 -9.4045 -2.7740 23.8943 0.0000 185.8544 393.1539 232.3588 10.0110 0.0000
+0 2001 29 17.00 0.583 -11.2296 -2.7846 0.0000 0.0000 199.8409 451.5510 173.5490 10.9268 0.0000
+0 2001 30 7.00 0.417 -10.7465 -2.8655 15.8386 0.0000 227.6223 464.5388 157.4365 6.6060 0.0000
+0 2001 30 17.00 0.583 -13.5957 -2.8996 0.0000 0.0000 189.7873 485.9337 134.7270 5.7846 0.0000
+0 2001 31 7.00 0.417 -12.0980 -3.0735 18.3573 0.0000 210.2597 469.3354 144.6763 5.3840 0.0000
+0 2001 31 17.00 0.583 -14.6532 -3.1900 0.0000 0.0000 121.9364 426.7714 182.8235 9.5571 0.0000
+0 2001 32 7.00 0.417 -10.5685 -3.3500 26.1207 0.0000 200.5537 410.4101 193.1723 8.0795 0.0000
+0 2001 32 17.00 0.583 -7.1668 -3.3061 0.0000 0.2540 194.3432 322.1595 283.0691 10.7404 0.0000
+0 2001 33 7.00 0.417 -4.3340 -3.1445 23.2526 0.0000 266.2941 307.8072 303.5090 13.0730 0.0000
+0 2001 33 17.00 0.583 -4.5204 -2.8732 0.0000 0.2540 206.0628 265.4089 356.2780 14.7618 0.0000
+0 2001 34 7.00 0.417 -6.0835 -2.6715 10.1452 4.8260 108.9266 228.5151 400.9865 8.8955 0.0000
+0 2001 34 17.00 0.583 -7.1011 -2.5168 0.0000 1.0160 111.1329 267.3351 368.2358 10.2407 0.0000
+0 2001 35 7.00 0.417 -4.8000 -2.4055 26.6452 0.0000 181.6275 267.0885 372.8817 12.3195 0.0000
+0 2001 35 17.00 0.583 -3.6350 -2.2979 0.0000 0.0000 191.7396 242.5947 401.6645 14.1596 0.0000
+0 2001 36 7.00 0.417 -1.8050 -2.1825 17.6762 0.2540 280.6165 263.1276 385.7572 8.0455 0.0000
+0 2001 36 17.00 0.583 -4.2818 -2.0429 0.0000 6.6080 150.3291 234.9058 419.6283 8.2446 0.0000
+0 2001 37 7.00 0.417 -0.5420 -1.9230 26.5003 0.0000 402.6817 340.8692 318.5481 6.1900 0.0000
+0 2001 37 17.00 0.583 -2.4268 -1.8214 0.0000 0.7620 259.1925 282.1742 381.4145 3.1793 0.0000
+0 2001 38 7.00 0.417 -1.1390 -1.7265 25.7582 0.2540 393.9775 367.3596 300.1490 4.8485 0.0000
+0 2001 38 17.00 0.583 -4.9879 -1.6450 0.0000 0.5080 207.8441 329.8065 341.0878 1.8061 0.0000
+0 2001 39 7.00 0.417 -10.1410 -1.6225 9.3121 0.5080 41.9816 314.0192 357.8121 1.5445 0.0000
+0 2001 39 17.00 0.583 -17.8536 -1.6557 0.0000 0.0000 39.1408 452.1145 218.3336 1.3564 0.0000
+0 2001 40 7.00 0.417 -9.6805 -1.8155 27.6138 0.0000 297.4379 539.1779 124.6587 4.2125 0.0000
+0 2001 40 17.00 0.583 -8.8561 -2.0118 0.0000 0.0000 230.3380 455.4451 200.3506 7.3586 0.0000
+0 2001 41 7.00 0.417 -2.7885 -2.0980 28.7437 0.0000 506.7120 528.4510 123.8442 5.6435 0.0000
+0 2001 41 17.00 0.583 -5.3436 -2.0800 0.0000 0.0000 371.6763 490.2106 162.8134 2.8111 0.0000
+0 2001 42 7.00 0.417 -7.8185 -2.0865 19.0458 1.5240 186.3687 379.9512 272.8093 4.8285 0.0000
+0 2001 42 17.00 0.583 -9.3525 -2.0804 0.0000 0.0000 155.8355 391.0119 261.9975 4.1025 0.0000
+0 2001 43 7.00 0.417 -4.0115 -2.1020 29.8030 0.0000 335.1396 404.2614 247.8718 3.9335 0.0000
+0 2001 43 17.00 0.583 -4.6236 -2.0868 0.0000 0.0000 372.4073 467.0805 185.6687 3.5561 0.0000
+0 2001 44 7.00 0.438 -1.2500 -2.0667 29.0416 0.0000 506.8369 470.2259 183.3384 4.3614 0.0000
+0 2001 44 17.50 0.562 -9.7819 -1.9841 0.0000 2.7980 56.9778 301.9263 354.9976 3.1419 0.0000
+0 2001 45 7.00 0.438 -7.0176 -1.9514 30.0132 0.7620 212.0611 388.5239 269.7307 8.3829 0.0000
+0 2001 45 17.50 0.562 -9.5581 -1.9607 0.0000 0.0000 160.6628 405.8830 251.9913 10.5744 0.0000
+0 2001 46 7.00 0.438 -7.3738 -2.0010 30.5555 0.0000 263.8250 447.4762 208.7590 6.9343 0.0000
+0 2001 46 17.50 0.562 -7.5193 -2.0422 0.0000 0.0000 242.8308 429.4098 225.1465 5.1119 0.0000
+0 2001 47 7.00 0.438 -3.3324 -2.0867 24.5785 0.0000 409.8168 455.3793 197.3746 3.3510 0.0000
+0 2001 47 17.50 0.542 -5.6365 -2.0612 0.0000 0.0000 288.7156 416.8727 236.9160 4.4854 0.0000
+0 2001 48 6.50 0.458 -2.9050 -2.0082 30.6698 0.0000 443.8453 473.2202 182.7202 5.2900 0.0000
+0 2001 48 17.50 0.542 -2.8765 -1.9777 0.0000 0.0000 455.9831 491.4491 165.7339 2.6008 0.0000
+0 2001 49 6.50 0.458 -2.3973 -1.9623 20.5634 0.0000 351.6598 368.6628 289.1489 3.7500 0.0000
+0 2001 49 17.50 0.542 -3.3723 -1.8981 0.0000 0.0000 216.2070 273.8801 386.5587 12.2435 0.0000
+0 2001 50 6.50 0.458 -2.2755 -1.8191 21.1142 0.2540 271.3216 288.8733 374.8100 5.0777 0.0000
+0 2001 50 17.50 0.542 -3.5323 -1.7173 0.0000 0.0000 298.0559 369.0103 298.8803 7.4515 0.0000
+0 2001 51 6.50 0.458 -0.7473 -1.6682 28.1050 0.0000 481.3543 440.3629 229.5660 6.9759 0.0000
+0 2001 51 17.50 0.542 -3.7431 -1.5996 0.0000 0.0000 236.2182 319.1861 353.6014 4.5042 0.0000
+0 2001 52 6.50 0.458 -5.2755 -1.5595 14.8718 2.5400 133.4318 271.6684 402.7937 3.9405 0.0000
+0 2001 52 17.50 0.542 -5.5350 -1.5431 0.0000 2.5400 120.3917 268.1288 407.0231 4.1373 0.0000
+0 2001 53 6.50 0.458 -3.1386 -1.5305 26.1428 0.2540 248.3906 309.6746 366.0066 5.5341 0.0000
+0 2001 53 17.50 0.542 -3.5588 -1.4877 0.0000 0.5080 209.1729 290.1830 387.2951 2.1804 0.0000
+0 2001 54 6.50 0.458 0.6105 -1.4623 27.8523 0.0000 567.2286 468.4875 210.0601 3.8986 0.0000
+0 2001 54 17.50 0.542 -5.8392 -1.4350 0.0000 0.0000 229.8914 386.8697 292.8283 5.1915 0.0000
+0 2001 55 6.50 0.458 -8.0950 -1.4241 31.4188 0.0000 201.4133 430.4030 249.7552 10.5982 0.0000
+0 2001 55 17.50 0.542 -10.4019 -1.4950 0.0000 0.0000 134.7669 419.9186 257.2527 9.3646 0.0000
+0 2001 56 6.50 0.458 -6.1455 -1.6145 33.1538 0.0000 262.9659 424.6207 247.5438 10.3745 0.0000
+0 2001 56 17.50 0.542 -7.7862 -1.6777 0.0000 0.0000 215.6706 425.3479 244.1857 7.4254 0.0000
+0 2001 57 6.50 0.458 -6.2905 -1.7432 22.5738 0.0000 164.5428 326.6005 340.2177 2.8059 0.0000
+0 2001 57 17.50 0.542 -7.5192 -1.7723 0.0000 0.0000 86.0144 282.4459 383.1671 1.5512 0.0000
+0 2001 58 6.50 0.458 -12.4809 -1.8136 14.7517 1.7780 35.7295 353.1315 310.7769 1.4982 0.0000
+0 2001 58 17.50 0.542 -7.8700 -1.8788 0.0000 0.0000 107.4401 310.6899 350.5366 2.4758 0.0000
+0 2001 59 6.50 0.458 -4.8368 -1.9559 23.2735 0.0000 130.6655 236.5813 421.4911 1.9091 0.0000
+0 2001 59 17.50 0.542 -7.3519 -1.9569 0.0000 5.8420 83.9432 268.7472 389.2837 6.9115 0.0000
+0 2001 60 6.50 0.458 -5.9818 -1.8818 36.4646 0.0000 219.7047 365.5449 295.5597 7.8864 0.0000
+0 2001 60 17.50 0.542 -8.8323 -1.8296 0.0000 0.0000 119.5250 351.0616 312.1884 2.6446 0.0000
+0 2001 61 6.50 0.458 -5.4841 -1.8255 37.1761 0.0000 304.5150 436.6245 226.7966 5.2809 0.0000
+0 2001 61 17.50 0.542 -8.2369 -1.8385 0.0000 0.0000 218.2539 433.8283 229.0573 3.8538 0.0000
+0 2001 62 6.50 0.458 -5.4877 -1.8636 30.0456 0.0000 272.8593 403.6649 258.1863 2.7482 0.0000
+0 2001 62 17.50 0.542 -8.8085 -1.8869 0.0000 1.2700 72.0372 300.5111 360.3839 2.5904 0.0000
+0 2001 63 6.50 0.458 -1.8945 -1.8614 37.8338 0.0000 516.9231 509.7031 152.2409 3.2473 0.0000
+0 2001 63 17.50 0.542 -1.8085 -1.8277 0.0000 0.2540 522.4608 521.2848 142.0447 3.2123 0.0000
+0 2001 64 6.50 0.458 1.2523 -1.7795 26.5878 0.0000 517.7363 377.5239 287.7902 3.7018 0.0000
+0 2001 64 17.50 0.542 -1.0773 -1.6788 0.0000 0.0000 348.1168 322.5160 346.9725 2.2542 0.0000
+0 2001 65 6.50 0.458 -0.2550 -1.5477 17.9849 0.0000 340.5821 283.2464 391.7118 2.3823 0.0000
+0 2001 65 17.50 0.542 -3.9981 -1.4154 0.0000 1.5240 72.1728 172.5952 507.9330 1.4923 0.0000
+0 2001 66 6.50 0.458 -1.7164 -1.3114 19.9908 2.0320 134.8061 149.5567 535.3789 1.5991 0.0000
+0 2001 66 17.50 0.542 -4.1804 -1.2358 0.0000 0.2540 92.8048 206.4234 481.7354 2.1588 0.0000
+0 2001 67 6.50 0.458 -0.6645 -1.2050 28.6531 0.0000 312.3461 283.3717 406.1024 1.9459 0.0000
+0 2001 67 17.50 0.542 -1.9846 -1.2027 0.0000 0.2540 257.9946 290.4967 399.0762 3.8738 0.0000
+0 2001 68 6.50 0.479 0.0457 -1.2070 19.7008 0.0000 414.3256 355.0692 334.3210 5.7191 0.0000
+0 2001 68 18.00 0.521 -2.8568 -1.1616 0.0000 1.0160 161.7129 227.9368 463.3987 1.2096 0.0000
+0 2001 69 6.50 0.479 -5.7122 -1.1117 3.8102 8.6440 63.4204 234.3066 459.1720 3.5191 0.0000
+0 2001 69 18.00 0.521 -9.1704 -1.1028 0.0000 0.2540 56.1432 327.6314 366.2323 2.9608 0.0000
+0 2001 70 6.50 0.479 -6.9722 -1.1313 3.3030 2.2860 110.9298 319.1700 373.4668 3.1952 0.0000
+0 2001 70 18.00 0.500 -8.4292 -1.1746 0.0000 1.5240 65.4215 314.4495 376.3282 10.6704 0.0000
+0 2001 71 6.00 0.500 -7.8725 -1.2079 35.9534 0.0000 132.3296 364.1022 325.2470 12.5754 0.0000
+0 2001 71 18.00 0.500 -6.7775 -1.2350 0.0000 0.2540 122.4638 321.7033 366.4875 14.4404 0.0000
+0 2001 72 6.00 0.500 -3.4354 -1.2479 36.7301 0.0000 262.6359 348.3283 339.3107 5.6775 0.0000
+0 2001 72 18.00 0.500 -5.2454 -1.2383 0.0000 0.0000 228.3713 378.9033 309.1451 7.8021 0.0000
+0 2001 73 6.00 0.500 -8.6479 -1.2075 10.9714 6.0960 111.5548 359.9189 329.4480 6.7137 0.0000
+0 2001 73 18.00 0.500 -12.0354 -1.2000 0.0000 0.2540 103.2645 437.3806 252.3074 10.3179 0.0000
+0 2001 74 6.00 0.500 -10.3462 -1.2121 36.7814 0.0000 179.4281 474.1147 215.0562 3.7042 0.0000
+0 2001 74 18.00 0.500 -11.4708 -1.2475 0.0000 0.2540 48.3768 368.4041 319.2528 1.5483 0.0000
+0 2001 75 6.00 0.500 -8.7667 -1.2829 18.4000 1.0160 62.2177 314.8013 371.3450 2.1858 0.0000
+0 2001 75 18.00 0.500 -9.2183 -1.3125 0.0000 4.5720 27.6679 291.5438 393.3431 1.2317 0.0000
+0 2001 76 6.00 0.500 -8.8496 -1.3200 14.4668 0.7620 27.8766 281.5860 402.9820 1.4146 0.0000
+0 2001 76 18.00 0.500 -8.3737 -1.3200 0.0000 0.2540 74.3069 315.4691 369.0988 5.6483 0.0000
+0 2001 77 6.00 0.500 -6.1413 -1.3133 35.0651 0.2540 204.0833 379.0333 305.8181 5.5667 0.0000
+0 2001 77 18.00 0.500 -7.3417 -1.3071 0.0000 0.0000 202.6249 415.4700 269.6472 6.4567 0.0000
+0 2001 78 6.00 0.500 -0.7887 -1.2933 44.6895 0.0000 398.6163 370.1501 315.5523 8.5875 0.0000
+0 2001 78 18.00 0.500 0.2554 -1.2767 0.0000 0.0000 321.5937 252.9251 433.4874 11.6379 0.0000
+0 2001 79 6.00 0.500 2.6504 -1.2313 26.2226 0.0000 395.4748 198.2859 490.0655 3.2971 0.0000
+0 2001 79 18.00 0.500 1.3446 -1.0829 0.0000 0.0000 315.0246 200.9316 493.7931 2.2575 0.0000
+0 2001 80 6.00 0.500 2.9162 -0.9154 32.3424 0.2540 413.3013 216.5610 485.4313 4.4142 0.0000
+0 2001 80 18.00 0.500 -0.5600 -0.5792 0.0000 0.7620 191.4002 189.5317 527.2773 5.0158 0.0000
+0 2001 81 6.00 0.500 2.0692 -0.4437 35.3054 0.0000 359.1920 232.6676 490.1903 3.0108 0.0000
+0 2001 81 18.00 0.500 -0.1688 -0.3342 0.0000 0.0000 264.2826 256.6241 471.1721 4.5171 0.0000
+0 2001 82 6.00 0.500 -0.0079 -0.2596 18.0500 2.2860 215.8172 198.8993 532.2762 3.1783 0.0000
+0 2001 82 18.00 0.500 -3.6604 -0.2125 0.0000 0.0000 80.7433 221.4855 511.8317 2.1925 0.0000
+0 2001 83 6.00 0.500 1.1996 -0.1854 39.1604 0.0000 395.0650 324.8183 409.7337 2.7917 0.0000
+0 2001 83 18.00 0.500 -1.0229 -0.1446 0.0000 0.7620 128.2683 166.9711 569.4469 2.2137 0.0000
+0 2001 84 6.00 0.500 0.6167 -0.1217 32.3280 1.0160 266.0098 222.5325 514.9346 4.3592 0.0000
+0 2001 84 18.00 0.500 -2.4858 -0.0338 0.0000 13.7280 45.4816 150.0043 591.5037 1.4633 0.0000
+0 2001 85 6.00 0.500 -0.8650 0.0050 32.8136 1.2700 169.7363 207.5390 535.7547 2.5079 0.0000
+0 2001 85 18.00 0.500 -3.5808 0.0196 0.0000 0.5080 113.0070 261.5053 482.4621 1.9913 0.0000
+0 2001 86 6.00 0.500 -3.3129 0.0321 22.1229 0.2540 252.3437 390.3226 354.2229 2.9721 0.0000
+0 2001 86 18.00 0.500 -6.9488 0.0442 0.0000 0.0000 162.3954 423.0486 322.0561 4.4296 0.0000
+0 2001 87 6.00 0.500 -5.4104 0.0471 16.9506 1.2700 157.0315 369.2318 376.0079 4.3733 0.0000
+0 2001 87 18.00 0.500 -4.9833 0.0288 0.0000 1.0160 118.7687 317.3459 427.0454 3.2858 0.0000
+0 2001 88 6.00 0.500 -4.9375 0.0075 10.7874 7.6260 35.7101 231.4824 511.9267 1.6525 0.0000
+0 2001 88 18.00 0.500 -6.1458 -0.0171 0.0000 2.0320 53.6630 287.7163 454.5583 1.8125 0.0000
+0 2001 89 6.00 0.500 -4.4679 -0.0396 32.4589 0.2540 178.9500 355.1060 386.1316 7.6712 0.0000
+0 2001 89 18.00 0.500 -6.1550 -0.0604 0.0000 1.0160 124.7102 355.1866 385.0923 5.5821 0.0000
+0 2001 90 6.00 0.521 -2.9516 -0.0796 44.7116 0.2540 340.8687 453.6358 285.7613 6.6444 0.0000
+0 2001 90 18.50 0.479 -0.8870 -0.0957 0.0000 0.2540 353.3404 388.6312 350.0288 13.0178 0.0000
+0 2001 91 6.00 0.521 2.5072 -0.1056 35.6071 0.2540 507.0892 370.7301 367.4735 7.6116 0.0000
+0 2001 91 18.50 0.479 -0.9561 -0.0570 0.0000 0.0000 204.8212 243.0987 497.3395 1.6870 0.0000
+0 2001 92 6.00 0.521 3.8756 -0.0248 29.9308 0.0000 567.9062 355.1746 386.7442 3.8344 0.0000
+0 2001 92 18.50 0.479 1.3083 0.0083 0.0000 0.0000 445.3460 378.6924 364.7521 4.0057 0.0000
+0 2001 93 6.00 0.521 4.8680 0.0484 34.5607 0.0000 520.7252 257.8415 487.4596 2.5828 0.0000
+0 2001 93 18.50 0.458 2.9623 0.1386 0.0000 0.0000 535.8399 389.1176 360.3745 2.7841 0.0000
+0 2001 94 5.50 0.542 4.2196 0.2073 35.3997 0.0000 458.3661 235.9796 516.7163 2.5169 0.0000
+0 2001 94 18.50 0.458 1.9655 0.2673 0.0000 0.0000 330.5235 245.6384 509.8669 2.4391 0.0000
+0 2001 95 5.50 0.542 5.5077 0.2946 31.3765 0.0000 782.3035 487.9144 268.8753 3.7500 0.0000
+0 2001 95 18.50 0.458 1.8882 0.3123 0.0000 0.0000 472.9799 393.5636 364.0568 3.5836 0.0000
+0 2001 96 5.50 0.542 -0.6700 0.3246 24.9354 3.0480 183.5107 224.6568 533.5449 6.1419 0.0000
+0 2001 96 18.50 0.458 -4.7118 0.3300 0.0000 2.7940 147.7938 351.1137 407.3416 15.3455 0.0000
+0 2001 97 5.50 0.542 -0.5508 0.3365 35.0737 0.0000 392.3436 426.0135 332.7500 5.4815 0.0000
+0 2001 97 18.50 0.458 -2.7127 0.3400 0.0000 0.0000 262.3061 390.8386 368.0881 1.9986 0.0000
+0 2001 98 5.50 0.542 -1.3392 0.3435 38.1186 0.7620 331.2822 402.9103 356.1797 4.3654 0.0000
+0 2001 98 18.50 0.458 -5.7627 0.3500 0.0000 0.0000 204.9524 443.9944 315.4040 3.9727 0.0000
+0 2001 99 5.50 0.542 -0.4996 0.3481 42.6251 0.0000 429.0125 461.3744 297.9333 3.0969 0.0000
+0 2001 99 18.50 0.458 -3.9818 0.3500 0.0000 0.0000 76.6126 252.8000 506.5983 1.7282 0.0000
+0 2001 100 5.50 0.542 -5.8815 0.3538 12.6659 4.3180 18.1468 261.3306 498.2493 1.9827 0.0000
+0 2001 100 18.50 0.458 -6.9959 0.3564 0.0000 7.6200 20.2775 297.8539 461.8448 2.1645 0.0000
+0 2001 101 5.50 0.542 -5.9942 0.3558 28.3235 1.5240 86.1791 332.1109 427.5598 4.4696 0.0000
+0 2001 101 18.50 0.458 -8.3395 0.3600 0.0000 0.2540 23.5854 339.0837 420.7866 0.9914 0.0000
+0 2001 102 5.50 0.542 -6.3065 0.3600 19.3135 2.2900 19.5870 275.3111 484.5593 1.8573 0.0000
+0 2001 102 18.50 0.458 -7.6150 0.3600 0.0000 1.5240 82.6283 376.9727 382.8976 7.3650 0.0000
+0 2001 103 5.50 0.542 -4.1608 0.3558 49.2782 0.0000 345.6742 519.4306 240.2401 9.5281 0.0000
+0 2001 103 18.50 0.458 -2.9895 0.3500 0.0000 0.0000 387.5984 529.3640 230.0343 4.5518 0.0000
+0 2001 104 5.50 0.542 -1.7742 0.3500 35.7320 2.2860 296.2834 388.2645 371.1339 5.7085 0.0000
+0 2001 104 18.50 0.458 -2.6827 0.3500 0.0000 0.5080 210.1257 340.1964 419.2020 7.5218 0.0000
+0 2001 105 5.50 0.542 2.1015 0.3412 46.4231 0.0000 535.8978 439.3615 319.6196 7.1150 0.0000
+0 2001 105 18.50 0.458 0.2945 0.3409 0.0000 0.0000 420.1500 412.5920 346.3776 5.9800 0.0000
+0 2001 106 5.50 0.542 -3.4050 0.3450 50.1350 0.0000 121.6962 273.2895 485.8731 2.4854 0.0000
+0 2001 106 18.50 0.458 -0.8205 0.3500 0.0000 0.0000 376.6713 425.4974 333.9010 2.1745 0.0000
+0 2001 107 5.50 0.542 7.3331 0.3473 52.6547 0.0000 819.1013 397.2006 362.0708 2.6650 0.0000
+0 2001 107 18.50 0.458 5.7359 0.3591 0.0000 0.0000 774.1409 472.6431 287.1843 2.0986 0.0000
+0 2001 108 5.50 0.542 10.1615 0.3627 46.5822 0.0000 1159.9313 516.8404 243.1571 3.6019 0.0000
+0 2001 108 18.50 0.458 7.8668 0.3759 0.0000 0.0000 950.0300 499.5199 261.1020 9.3900 0.0000
+0 2001 109 5.50 0.542 7.3535 0.3800 35.2712 0.0000 938.2193 526.2932 234.5220 6.0912 0.0000
+0 2001 109 18.50 0.458 5.0932 0.3818 0.0000 0.0000 862.2093 602.0894 158.8118 3.6968 0.0000
+0 2001 110 5.50 0.542 2.2931 0.3869 22.0124 0.7620 535.5144 437.0372 324.1053 4.9523 0.0000
+0 2001 110 18.50 0.458 -1.6518 0.3900 0.0000 0.0000 433.0454 522.1956 239.0925 4.3109 0.0000
+0 2001 111 5.50 0.542 -2.8592 0.3900 13.7646 8.1320 36.5057 175.1223 586.1658 2.2815 0.0000
+0 2001 111 18.50 0.458 -3.6791 0.3900 0.0000 21.8540 8.5677 176.6918 584.5963 1.5886 0.0000
+0 2001 112 5.50 0.542 -6.3954 0.3908 16.3759 12.3360 20.4275 280.9887 480.3357 3.2431 0.0000
+0 2001 112 18.50 0.458 -7.2950 0.4000 0.0000 0.2000 33.6589 322.0797 439.6815 6.3418 0.0000
+0 2001 113 5.50 0.542 -3.5288 0.3946 51.4127 3.8120 148.0446 309.0806 452.4258 6.0662 0.0000
+0 2001 113 18.50 0.458 -2.3755 0.3900 0.0000 0.0000 207.5403 327.5955 433.6926 8.5605 0.0000
+0 2001 114 5.50 0.562 2.3448 0.3900 55.6232 0.0000 437.1328 331.7628 429.5252 3.8911 0.0000
+0 2001 114 19.00 0.438 0.2776 0.3900 0.0000 0.0000 253.8592 259.0260 502.2620 2.8152 0.0000
+0 2001 115 5.50 0.562 6.0385 0.3900 48.0744 0.0000 652.7496 325.4106 435.8775 2.5467 0.0000
+0 2001 115 19.00 0.438 3.5567 0.3952 0.0000 0.0000 490.9459 325.6255 435.9103 2.9743 0.0000
+0 2001 116 5.50 0.562 8.0211 0.3930 40.1206 0.0000 791.5430 327.1094 434.3188 2.4948 0.0000
+0 2001 116 19.00 0.438 5.1981 0.4000 0.0000 0.0000 548.1790 283.9587 477.8025 3.3429 0.0000
+0 2001 117 5.50 0.562 9.6241 0.3919 34.5550 0.0000 939.7904 351.6681 409.7076 2.3278 0.0000
+0 2001 117 19.00 0.417 6.4695 0.4000 0.0000 0.0000 645.3531 296.4088 465.3524 4.1030 0.0000
+0 2001 118 5.00 0.583 8.1818 0.3982 32.3753 0.0000 684.5701 210.2282 551.4485 3.8321 0.0000
+0 2001 118 19.00 0.417 5.6420 0.4000 0.0000 0.0000 538.9223 246.1213 515.6399 6.7005 0.0000
+0 2001 119 5.00 0.583 8.1021 0.4000 44.3749 0.2540 816.2329 343.4412 418.3200 5.0993 0.0000
+0 2001 119 19.00 0.417 3.1480 0.4065 0.0000 0.7620 422.9169 280.1429 481.9260 12.0005 0.0000
+0 2001 120 5.00 0.583 6.3375 0.4014 45.4941 0.0000 734.5286 377.8349 383.9940 5.3257 0.0000
+0 2001 120 19.00 0.417 7.1975 0.4050 0.0000 0.0000 766.7922 368.0694 393.9285 6.0585 0.0000
+0 2001 121 5.00 0.583 10.6229 0.4021 50.5305 0.0000 1103.7690 423.4234 338.4392 6.6379 0.0000
+0 2001 121 19.00 0.417 1.4620 0.4085 0.0000 13.7280 201.5025 129.7449 632.4188 2.6795 0.0000
+0 2001 122 5.00 0.583 -6.1846 0.4100 13.5461 3.5560 17.8456 272.2806 489.9540 1.8579 0.0000
+0 2001 122 19.00 0.417 -7.8810 0.4100 0.0000 2.7940 21.8620 327.4146 434.8201 0.6550 0.0000
+0 2001 123 5.00 0.583 -6.8511 0.4111 17.2622 7.6200 20.3330 295.7917 466.4937 1.0250 0.0000
+0 2001 123 19.00 0.417 -6.3240 0.4100 0.0000 1.2700 16.5138 276.1688 486.0659 0.6910 0.0000
+0 2001 124 5.00 0.583 -3.8875 0.4150 9.3897 7.8820 10.8013 188.7588 573.7127 1.4500 0.0000
+0 2001 124 19.00 0.417 -3.5380 0.4200 0.0000 5.5880 7.9065 173.7163 588.9921 1.6900 0.0000
+0 2001 125 5.00 0.583 -0.3471 0.4200 26.7062 5.0800 85.6394 119.3097 643.3987 6.8032 0.0000
+0 2001 125 19.00 0.417 -1.2115 0.4200 0.0000 3.0480 143.8940 214.5208 548.1876 13.6095 0.0000
+0 2001 126 5.00 0.583 0.3839 0.4200 46.2577 0.0000 412.2842 406.3594 356.3490 5.1696 0.0000
+0 2001 126 19.00 0.417 -0.8085 0.4200 0.0000 0.0000 365.3427 417.1023 345.6061 2.2680 0.0000
+0 2001 127 5.00 0.583 3.4707 0.4200 59.0962 0.0000 718.9681 543.9254 218.7829 2.7864 0.0000
+0 2001 127 19.00 0.417 3.3240 0.4200 0.0000 0.0000 590.1827 439.1969 323.5115 6.7885 0.0000
+0 2001 128 5.00 0.583 8.4696 0.4189 52.9263 0.0000 946.3993 444.1603 318.4973 4.3518 0.0000
+0 2001 128 19.00 0.417 7.2090 0.4200 0.0000 0.0000 870.7658 471.2130 291.4954 4.9635 0.0000
+0 2001 129 5.00 0.583 8.4557 0.4200 31.9046 0.7620 841.6227 336.7582 425.9501 3.6018 0.0000
+0 2001 129 19.00 0.417 7.2775 0.4200 0.0000 0.0000 693.8300 288.1756 474.5327 3.9915 0.0000
+0 2001 130 5.00 0.583 8.7414 0.4200 49.0315 0.7620 766.4634 240.7634 521.9450 4.6689 0.0000
+0 2001 130 19.00 0.417 5.1045 0.4280 0.0000 0.0000 593.5947 330.0545 433.0331 2.7860 0.0000
+0 2001 131 5.00 0.583 7.9939 0.4239 57.9692 0.0000 719.6968 251.9459 510.9487 2.5300 0.0000
+0 2001 131 19.00 0.417 6.3515 0.4300 0.0000 0.0000 656.1609 316.5274 446.6550 3.3125 0.0000
+0 2001 132 5.00 0.583 12.4575 0.4275 58.5508 0.0000 1281.3853 437.5889 325.4750 2.8475 0.0000
+0 2001 132 19.00 0.417 9.0595 0.4335 0.0000 0.0000 859.6401 320.9543 442.3941 4.1165 0.0000
+0 2001 133 5.00 0.583 12.7779 0.4296 44.0595 0.0000 1196.9971 323.8862 439.2793 3.4743 0.0000
+0 2001 133 19.00 0.417 8.7340 0.4400 0.0000 0.0000 694.8508 178.7145 584.9423 5.1970 0.0000
+0 2001 134 5.00 0.583 10.8389 0.4318 54.1370 0.0000 761.4341 68.3642 694.9030 4.8893 0.0000
+0 2001 134 19.00 0.417 8.0140 0.4390 0.0000 0.0000 607.8313 150.4942 613.1151 5.4435 0.0000
+0 2001 135 5.00 0.583 10.5225 0.4311 34.2088 0.0000 911.4178 248.2033 515.0299 7.4168 0.0000
+0 2001 135 19.00 0.417 9.8515 0.4300 0.0000 0.0000 853.4484 250.3988 512.7836 5.1830 0.0000
+0 2001 136 5.00 0.583 9.7946 0.4300 42.1645 0.0000 737.6861 134.4618 628.7206 3.6979 0.0000
+0 2001 136 19.00 0.417 7.4040 0.4385 0.0000 0.0000 623.4181 209.6154 553.9702 5.2020 0.0000
+0 2001 137 5.00 0.583 3.0857 0.4514 11.4595 12.1940 10.5354 -125.8377 890.0372 1.8979 0.0000
+0 2001 137 19.00 0.417 2.4300 0.4660 0.0000 0.2540 144.4559 44.4759 720.4156 4.1470 0.0000
+0 2001 138 5.00 0.583 8.0879 0.6393 53.4058 0.0000 689.7845 227.4709 545.7372 3.4521 0.0000
+0 2001 138 19.00 0.417 5.1970 0.6575 0.0000 0.0000 260.4013 6.7108 767.3389 1.6200 0.0000
+0 2001 139 5.00 0.583 4.7432 0.7368 18.3769 3.5520 80.3862 -139.6988 917.5881 1.9375 0.0000
+0 2001 139 19.00 0.417 4.8315 0.7210 0.0000 0.2540 425.5705 199.0703 578.0432 7.9200 0.0000
+0 2001 140 5.00 0.583 4.0554 1.1379 42.4728 5.8440 557.3387 354.2884 443.6290 5.6536 0.0000
+0 2001 140 19.00 0.417 -8.4425 0.9160 0.0000 5.8420 69.8281 414.4664 372.1445 3.9705 0.0000
+0 2001 141 5.00 0.583 -3.6114 0.6582 50.5242 0.0000 306.1443 476.4785 297.6039 4.7632 0.0000
+0 2001 141 19.00 0.417 -0.5215 0.5960 0.0000 0.0000 409.7073 461.2501 309.8447 7.9745 0.0000
+0 2001 142 5.00 0.583 5.6718 0.7368 55.9834 0.0000 668.8206 376.9783 400.9540 4.3561 0.0000
+0 2001 142 19.00 0.417 5.6815 0.7685 0.0000 0.0000 672.4646 394.5985 384.8232 4.4255 0.0000
+0 2001 143 5.00 0.604 9.5955 2.5583 56.4743 0.0000 965.2073 486.0123 388.6614 5.6786 0.0000
+0 2001 143 19.50 0.396 5.6521 2.1168 0.0000 0.0000 678.5823 466.5241 381.3891 4.2795 0.0000
+0 2001 144 5.00 0.604 9.4397 3.6303 61.3899 0.0000 1002.2074 596.4916 340.0761 6.2845 0.0000
+0 2001 144 19.50 0.396 5.6242 2.8121 0.0000 0.0000 677.3467 505.6904 380.2538 4.4016 0.0000
+0 2001 145 5.00 0.604 9.3783 4.1583 48.0830 0.0000 786.5891 419.1326 547.2935 4.0610 0.0000
+0 2001 145 19.50 0.396 6.7174 3.4911 0.0000 0.0000 461.5005 257.9705 665.6559 2.4974 0.0000
+0 2001 146 5.00 0.604 9.1324 4.2248 38.5344 1.7780 549.0167 203.9457 764.2178 2.7641 0.0000
+0 2001 146 19.50 0.396 5.1726 3.7784 0.0000 1.5240 179.4580 94.7238 845.2086 2.3979 0.0000
+0 2001 147 5.00 0.604 8.7934 5.1379 36.6008 4.3220 596.1885 328.9410 700.0367 4.0103 0.0000
+0 2001 147 19.50 0.396 4.6147 3.9411 0.0000 0.0000 147.2168 106.2222 843.3002 2.4200 0.0000
+0 2001 148 5.00 0.604 9.0538 5.3900 28.8785 0.2540 595.0135 326.6022 717.8163 2.2017 0.0000
+0 2001 148 19.50 0.375 4.6089 4.6367 0.0000 1.5240 147.9172 149.5514 841.9868 3.2422 0.0000
+0 2001 149 4.50 0.625 8.5590 5.2850 40.9284 0.5080 638.9602 403.9691 631.0771 4.3557 0.0000
+0 2001 149 19.50 0.375 6.3783 5.0067 0.0000 0.0000 434.1396 339.6273 675.0046 4.4867 0.0000
+0 2001 150 4.50 0.625 5.3437 4.8047 24.1627 0.2540 211.9892 177.3083 825.3077 2.4383 0.0000
+0 2001 150 19.50 0.375 4.2422 3.7489 0.0000 0.0000 241.3147 211.0970 727.7877 2.4578 0.0000
+0 2001 151 4.50 0.625 9.2163 5.7927 52.0907 0.0000 821.8585 577.1116 498.0460 4.1917 0.0000
+0 2001 151 19.50 0.375 7.7606 5.1411 0.0000 0.0000 707.3350 528.9680 494.4624 3.4000 0.0000
+0 2001 152 4.50 0.625 12.2243 7.2260 62.6719 0.0000 1214.9794 793.4966 382.2282 5.0610 0.0000
+0 2001 152 19.50 0.375 8.5461 6.0333 0.0000 0.0000 663.1021 477.4856 604.6546 3.3600 0.0000
+0 2001 153 4.50 0.625 15.6423 8.1683 60.4507 0.0000 1593.9521 890.2033 356.8598 4.2090 0.0000
+0 2001 153 19.50 0.375 12.1806 7.6178 0.0000 0.0000 1152.9534 768.8721 423.2312 4.6394 0.0000
+0 2001 154 4.50 0.625 9.4337 7.8143 36.8042 0.0000 613.3577 484.1795 724.3036 3.0020 0.0000
+0 2001 154 19.50 0.375 5.0900 6.9211 0.0000 0.7620 -4.4681 117.2796 1024.2122 1.3572 0.0000
+0 2001 155 4.50 0.625 5.6790 6.6167 38.4806 0.5080 226.4944 283.7198 836.6298 3.1673 0.0000
+0 2001 155 19.50 0.375 1.7244 5.6272 0.0000 0.2540 5.2617 228.6325 825.4081 1.5061 0.0000
+0 2001 156 4.50 0.625 7.2067 6.9793 57.1079 0.0000 564.6925 542.9765 614.0172 2.5633 0.0000
+0 2001 156 19.50 0.375 6.4539 6.1139 0.0000 0.0000 555.7432 534.6898 553.1112 1.9044 0.0000
+0 2001 157 4.50 0.625 12.8073 7.8850 60.9269 0.0000 1103.1021 678.5001 547.9134 2.8873 0.0000
+0 2001 157 19.50 0.375 8.7106 7.1489 0.0000 0.0000 497.5734 382.2578 777.1907 2.2806 0.0000
+0 2001 158 4.50 0.625 12.8397 8.6287 51.8056 0.0000 888.3729 516.7894 764.3180 2.6637 0.0000
+0 2001 158 19.50 0.375 9.0433 7.6867 0.0000 0.0000 446.8330 337.9680 859.5142 2.8633 0.0000
+0 2001 159 4.50 0.625 13.7867 8.9563 52.4551 0.0000 1072.0527 633.6866 670.1888 2.9467 0.0000
+0 2001 159 19.50 0.375 11.1700 7.8161 0.0000 0.0000 813.5794 540.0897 667.2352 2.8050 0.0000
+0 2001 160 4.50 0.625 13.9300 7.9560 23.4142 0.0000 1156.5656 621.1766 596.7570 3.9080 0.0000
+0 2001 160 19.50 0.375 11.8017 7.0589 0.0000 0.0000 1020.1748 630.7044 520.9076 3.6644 0.0000
+0 2001 161 4.50 0.625 16.9380 9.2290 55.2652 0.0000 1675.2170 896.7446 433.3443 2.8697 0.0000
+0 2001 161 19.50 0.375 14.7194 8.6944 0.0000 0.0000 1556.7880 994.0189 279.6072 4.6072 0.0000
+0 2001 162 4.50 0.625 17.1400 10.2537 62.9410 0.0000 1854.9581 1132.7402 278.2340 5.0473 0.0000
+0 2001 162 19.50 0.375 11.8517 8.5883 0.0000 0.0000 1079.7972 801.0298 465.6758 2.4528 0.0000
+0 2001 163 4.50 0.625 14.6430 8.9283 45.4061 0.0000 1561.6541 1026.4758 270.9185 3.4593 0.0000
+0 2001 163 19.50 0.375 4.4706 7.5067 0.0000 1.0160 474.8409 649.5595 535.8267 9.5411 0.0000
+0 2001 164 4.50 0.625 0.2903 5.2437 18.0824 2.7940 214.9605 480.2539 549.0123 4.7140 0.0000
+0 2001 164 19.50 0.375 -2.5600 3.7933 0.0000 0.2540 219.7977 525.7979 414.8042 8.0800 0.0000
+0 2001 165 4.50 0.625 -0.4667 3.0240 35.5529 0.0000 318.2865 483.2119 413.5109 11.7753 0.0000
+0 2001 165 19.50 0.375 3.1422 3.0150 0.0000 0.0000 543.2011 535.7867 360.3249 7.5272 0.0000
+0 2001 166 4.50 0.625 9.5680 5.9797 56.1793 0.2000 1019.3316 741.1074 348.4874 5.5520 0.0000
+0 2001 166 19.50 0.375 9.5278 6.0311 0.0000 0.0000 971.3682 710.8865 369.9779 3.7994 0.0000
+0 2001 167 4.50 0.625 14.6220 8.1197 60.2041 0.0000 1496.7869 893.9052 348.6505 3.2897 0.0000
+0 2001 167 19.50 0.375 12.1794 7.0389 0.0000 0.0000 1271.8733 848.4428 303.1983 3.5739 0.0000
+0 2001 168 4.50 0.625 16.6883 8.8940 60.3886 0.0000 1834.9714 1055.5424 247.3795 3.9083 0.0000
+0 2001 168 19.50 0.375 14.0944 8.0533 0.0000 0.0000 1576.3846 1031.6149 193.1855 2.8667 0.0000
+0 2001 169 4.50 0.625 15.0363 8.8443 42.4795 0.0000 1536.4001 941.6469 348.6833 4.3210 0.0000
+0 2001 169 19.50 0.375 5.7400 8.3428 0.0000 0.0000 152.8249 328.9656 917.5530 2.0883 0.0000
+0 2001 170 4.50 0.625 6.9033 8.7160 43.7861 0.0000 181.4266 314.3536 963.8654 2.3120 0.0000
+0 2001 170 19.50 0.375 6.9650 7.0844 0.0000 0.0000 368.2185 374.1461 779.8599 2.3606 0.0000
+0 2001 171 4.50 0.625 11.7673 8.8910 52.6160 0.0000 799.0662 550.9085 749.3951 3.2447 0.0000
+0 2001 171 19.50 0.375 5.3889 8.4244 0.0000 0.5080 1.7379 215.0476 1037.4437 1.9578 0.0000
+0 2001 172 4.50 0.625 9.9173 9.3690 46.5941 0.0000 336.3807 285.2375 1047.3947 2.4153 0.0000
+0 2001 172 19.50 0.375 9.5806 8.0439 0.0000 0.0000 551.6424 431.3033 793.2194 3.4194 0.0000
+0 2001 173 4.50 0.625 13.8470 9.1960 40.7929 1.0160 1155.6372 715.0469 605.4937 3.8133 0.0000
+0 2001 173 19.50 0.375 12.1239 7.8628 0.0000 0.0000 1051.8036 693.0461 517.1035 4.2856 0.0000
+0 2001 174 4.50 0.625 16.9400 10.0983 52.9360 0.0000 1596.7827 887.5942 511.3402 3.7820 0.0000
+0 2001 174 19.50 0.375 14.1189 9.0533 0.0000 0.0000 1224.7111 756.2685 545.3707 4.6906 0.0000
+0 2001 175 4.50 0.625 14.5230 10.3807 43.1908 2.5440 986.9515 576.0762 839.5905 4.4473 0.0000
+0 2001 175 19.50 0.375 11.3606 9.1556 0.0000 3.0480 604.7245 414.3355 895.3656 5.3794 0.0000
+0 2001 176 4.50 0.625 15.6850 11.0297 54.3339 0.5080 1153.6445 678.9702 798.8414 4.5830 0.0000
+0 2001 176 19.50 0.375 12.7611 9.6967 0.0000 0.0000 850.1035 573.3084 781.2101 2.5472 0.0000
+0 2001 177 4.50 0.625 14.0970 10.8157 39.5653 6.3540 825.7032 502.3680 955.5911 3.4410 0.0000
+0 2001 177 19.50 0.375 10.2739 9.5294 0.0000 0.5080 469.5969 407.6570 932.4667 5.9728 0.0000
+0 2001 178 4.50 0.625 14.1150 10.4753 59.4976 0.0000 1106.7418 749.3727 676.2490 5.7817 0.0000
+0 2001 178 19.50 0.375 12.1222 9.5244 0.0000 0.0000 824.0636 594.9073 744.8650 3.3467 0.0000
+0 2001 179 4.50 0.625 15.6810 10.7083 43.6529 0.0000 1162.9604 655.2122 789.8536 3.0583 0.0000
+0 2001 179 19.50 0.375 14.0056 9.5478 0.0000 0.0000 1010.6517 595.0043 747.7158 3.5194 0.0000
+0 2001 180 4.50 0.625 16.5133 10.4653 42.9482 5.3340 1269.4419 638.6708 785.2962 2.9900 0.0000
+0 2001 180 19.50 0.375 13.6439 9.6194 0.0000 0.0000 939.0092 565.6624 782.3415 2.9306 0.0000
+0 2001 181 4.50 0.625 18.7147 11.4120 51.4210 0.0000 1729.4504 902.3249 614.2430 3.7893 0.0000
+0 2001 181 19.50 0.375 15.1828 10.7656 0.0000 0.0000 1240.7888 799.7883 646.1421 3.0856 0.0000
+0 2001 182 4.50 0.625 19.8343 12.4003 54.0243 0.0000 1968.5286 1077.9679 530.9109 4.1593 0.0000
+0 2001 182 19.50 0.375 14.0144 11.2322 0.0000 0.0000 1073.3597 799.8163 688.2459 2.6722 0.0000
+0 2001 183 4.50 0.625 17.7187 11.6543 38.9061 0.0000 1616.6064 938.8366 591.6080 4.4220 0.0000
+0 2001 183 19.50 0.375 13.7906 10.4689 0.0000 0.0000 1067.5916 747.8633 671.8480 3.6506 0.0000
+0 2001 184 4.50 0.625 17.3933 11.8717 47.2068 0.2540 1321.9229 702.1039 850.7618 3.4240 0.0000
+0 2001 184 19.50 0.375 14.7267 10.7533 0.0000 0.0000 916.3784 523.2177 922.0526 2.8733 0.0000
+0 2001 185 4.50 0.625 18.8907 12.0463 43.1158 0.0000 1631.1180 841.8848 731.4552 3.3640 0.0000
+0 2001 185 19.50 0.375 14.4800 11.5044 0.0000 0.0000 996.6893 689.4697 822.0590 3.0028 0.0000
+0 2001 186 4.50 0.625 18.2130 12.8937 44.4605 0.5080 1480.1996 845.6003 809.0366 3.5737 0.0000
+0 2001 186 19.50 0.375 15.6167 11.6589 0.0000 0.0000 1184.0098 767.9805 757.7413 4.2117 0.0000
+0 2001 187 4.50 0.625 18.7540 12.4550 34.9918 0.2540 1516.5336 769.3929 833.5300 3.3450 0.0000
+0 2001 187 19.50 0.375 13.3467 11.3028 0.0000 5.8420 655.4401 453.9372 1039.0146 5.1506 0.0000
+0 2001 188 4.50 0.625 17.3703 12.4000 39.5611 1.5200 1080.6107 519.9891 1081.1526 2.4770 0.0000
+0 2001 188 19.50 0.375 12.6317 11.6267 0.0000 8.1300 337.8411 238.8406 1283.8350 2.7872 0.0000
+0 2001 189 4.50 0.625 14.6357 11.9230 25.0873 11.6780 624.4985 315.4130 1237.1453 3.0667 0.0000
+0 2001 189 19.50 0.375 13.0311 10.0233 0.0000 0.0000 842.6222 561.1175 819.8350 3.6983 0.0000
+0 2001 190 4.50 0.625 15.2753 12.0530 44.6272 0.0000 843.4892 516.1845 1059.3102 2.2870 0.0000
+0 2001 190 19.50 0.375 13.8606 11.3717 0.0000 0.0000 977.4186 734.5140 765.9177 3.1211 0.0000
+0 2001 191 4.50 0.625 12.0257 12.2863 31.2777 6.6040 304.1304 322.9386 1268.7520 2.4967 0.0000
+0 2001 191 19.50 0.375 10.3606 10.4794 0.0000 3.5540 434.4590 443.4785 976.8593 3.9872 0.0000
+0 2001 192 4.50 0.625 14.4140 12.3130 61.3589 0.0000 808.4391 593.0880 1010.2220 3.3207 0.0000
+0 2001 192 19.50 0.375 12.3311 11.8983 0.0000 0.0000 552.2264 511.8360 1036.9926 2.9133 0.0000
+0 2001 193 4.50 0.625 13.3523 11.7433 26.0291 8.3860 554.1542 387.4295 1146.6833 2.7040 0.0000
+0 2001 193 19.50 0.375 10.1394 10.3317 0.0000 0.0000 316.4003 332.9012 1074.5424 2.7183 0.0000
+0 2001 194 4.50 0.625 12.0083 9.5617 29.5661 40.3840 485.7531 259.0750 1089.0505 2.9663 0.0000
+0 2001 194 19.50 0.375 9.8906 8.2133 0.0000 0.5080 386.9915 252.1005 983.3076 4.3650 0.0000
+0 2001 195 4.50 0.625 12.2653 10.3073 37.9791 1.2700 686.5175 509.7878 900.6443 4.0233 0.0000
+0 2001 195 19.50 0.375 10.3889 9.7011 0.0000 0.0000 514.0181 455.4964 898.5844 5.7644 0.0000
+0 2001 196 4.50 0.625 12.1270 10.4140 53.2147 1.2700 779.5804 617.1571 801.0297 4.9317 0.0000
+0 2001 196 19.50 0.375 9.8106 9.4311 0.0000 0.0000 661.6456 628.9326 703.8247 2.4583 0.0000
+0 2001 197 4.50 0.625 14.3720 11.1397 62.2064 0.0000 1231.8979 899.3997 592.5760 3.7110 0.0000
+0 2001 197 19.50 0.396 12.5826 10.1958 0.0000 0.0000 1222.9290 1005.3556 391.5845 4.2768 0.0000
+0 2001 198 5.00 0.604 14.7741 10.6703 37.7361 0.0000 1311.7782 905.6917 535.0256 3.1903 0.0000
+0 2001 198 19.50 0.396 12.6400 9.7574 0.0000 0.0000 1052.2122 793.1551 566.3519 4.1516 0.0000
+0 2001 199 5.00 0.604 14.7972 11.7400 49.4809 2.2900 1107.5502 796.4515 747.0114 4.3652 0.0000
+0 2001 199 19.50 0.396 11.5837 10.0568 0.0000 0.0000 787.2130 652.7947 732.4140 4.0474 0.0000
+0 2001 200 5.00 0.604 15.1714 11.2290 38.3560 0.0000 1174.8447 776.4631 716.3882 3.3576 0.0000
+0 2001 200 19.50 0.396 13.1232 10.4321 0.0000 0.0000 1011.4067 761.0214 655.6007 4.1468 0.0000
+0 2001 201 5.00 0.604 16.5548 12.0417 48.0339 0.0000 1322.6746 835.6417 734.7184 3.6962 0.0000
+0 2001 201 19.50 0.396 14.5847 11.2768 0.0000 0.0000 1090.6151 759.7210 731.5289 3.5547 0.0000
+0 2001 202 5.00 0.604 16.5862 11.9686 40.2698 0.0000 1230.1260 732.5814 824.9753 3.2086 0.0000
+0 2001 202 19.50 0.396 14.1889 11.0195 0.0000 0.0000 991.7356 679.9914 787.6025 4.0832 0.0000
+0 2001 203 5.00 0.604 17.9900 12.7607 58.0385 0.0000 1719.5929 1117.5938 524.0045 3.9069 0.0000
+0 2001 203 19.50 0.396 14.0711 11.6274 0.0000 0.0000 1139.5496 893.5744 631.1891 2.7789 0.0000
+0 2001 204 5.00 0.583 14.5525 12.1125 35.9341 2.2840 759.8350 506.3581 1067.1241 2.2646 0.0000
+0 2001 204 19.00 0.417 10.3270 11.1090 0.0000 0.2540 314.3752 381.4970 1095.0240 2.5955 0.0000
+0 2001 205 5.00 0.583 14.2007 11.7139 36.2088 11.1700 865.4738 602.8398 932.7249 3.6346 0.0000
+0 2001 205 19.00 0.417 10.4370 10.2370 0.0000 0.0000 484.9584 464.8883 934.7548 2.9060 0.0000
+0 2001 206 5.00 0.583 15.5396 11.0107 34.2790 0.0000 1184.0885 725.6556 746.7634 2.1596 0.0000
+0 2001 206 19.00 0.417 12.2945 10.5925 0.0000 0.0000 827.6329 672.4713 757.9154 2.6660 0.0000
+0 2001 207 5.00 0.583 12.2839 10.6264 32.8648 1.7780 636.8243 479.8772 953.7339 3.2532 0.0000
+0 2001 207 19.00 0.417 9.8525 10.0650 0.0000 0.2540 442.6898 461.0464 923.8391 2.2795 0.0000
+0 2001 208 5.00 0.583 12.9900 10.9071 40.1983 0.5080 801.5195 603.4221 858.7689 3.3664 0.0000
+0 2001 208 19.00 0.417 11.2815 9.5350 0.0000 0.0000 784.9395 636.2122 705.7158 3.7625 0.0000
+0 2001 209 5.00 0.583 17.6439 11.6754 58.8982 0.0000 1717.5984 1059.0403 485.2059 2.9139 0.0000
+0 2001 209 19.00 0.417 15.7420 11.0965 0.0000 0.0000 1463.4841 988.7815 487.3171 3.3785 0.0000
+0 2001 210 5.00 0.583 19.6936 12.7014 55.9445 0.0000 1982.5938 1142.5345 496.3815 3.2921 0.0000
+0 2001 210 19.00 0.417 17.2455 11.9880 0.0000 0.0000 1684.2661 1105.8011 451.9174 3.7885 0.0000
+0 2001 211 5.00 0.583 19.1443 12.2425 36.7376 0.0000 1792.7688 983.4191 601.2939 3.5711 0.0000
+0 2001 211 19.00 0.417 14.7765 12.0935 0.0000 0.0000 994.4201 715.0007 851.3126 5.8190 0.0000
+0 2001 212 5.00 0.583 14.9511 12.7175 49.0030 0.0000 984.0911 741.7874 887.8715 6.7696 0.0000
+0 2001 212 19.00 0.417 12.6875 11.4700 0.0000 0.0000 863.8895 749.0529 759.5774 3.5805 0.0000
+0 2001 213 5.00 0.583 13.4975 11.5896 20.4957 2.0320 845.0662 654.2576 866.3746 2.0521 0.0000
+0 2001 213 19.00 0.417 12.2995 10.7185 0.0000 0.0000 666.7369 518.3209 922.3388 3.1730 0.0000
+0 2001 214 5.00 0.583 14.5525 11.2164 24.2167 2.2860 776.4038 436.8411 1048.7876 2.7568 0.0000
+0 2001 214 19.00 0.417 12.8900 10.0895 0.0000 0.0000 861.8812 606.0023 781.7842 3.4025 0.0000
+0 2001 215 5.00 0.583 17.4350 11.3321 39.0762 0.0000 1308.4338 644.7798 858.5024 2.4296 0.0000
+0 2001 215 19.00 0.417 14.4845 11.4805 0.0000 0.0000 851.1550 548.8682 960.2220 4.5640 0.0000
+0 2001 216 5.00 0.583 16.5989 13.0196 50.5121 0.0000 1069.2692 673.7830 991.5306 4.5082 0.0000
+0 2001 216 19.00 0.417 14.9215 12.1745 0.0000 0.0000 874.3366 591.4667 983.0915 3.9570 0.0000
+0 2001 217 5.00 0.583 17.2757 12.6800 33.0647 2.2900 1239.3530 716.2576 909.1516 3.2125 0.0000
+0 2001 217 19.00 0.417 13.1410 11.7690 0.0000 3.0440 527.4615 394.3719 1142.2148 3.1985 0.0000
+0 2001 218 5.00 0.583 15.0554 12.0618 31.3357 1.0160 689.7427 365.7864 1199.1541 3.0571 0.0000
+0 2001 218 19.00 0.417 13.5145 11.3895 0.0000 0.0000 706.9944 501.5862 999.3845 2.8040 0.0000
+0 2001 219 5.00 0.583 14.4443 11.4039 25.6860 8.6420 773.4160 459.4060 1044.2458 3.0086 0.0000
+0 2001 219 19.00 0.417 12.8480 10.2150 0.0000 0.0000 625.0272 382.2376 1014.8395 4.3100 0.0000
+0 2001 220 5.00 0.583 14.3086 11.0850 21.9098 3.3060 710.5577 386.8252 1087.9827 3.1850 0.0000
+0 2001 220 19.00 0.417 10.2815 10.7705 0.0000 0.5080 391.3955 426.3837 1018.8910 2.8225 0.0000
+0 2001 221 5.00 0.583 7.4761 9.9361 14.3923 36.0780 5.0234 196.4637 1176.8118 2.1525 0.0000
+0 2001 221 19.00 0.417 7.3335 9.6285 0.0000 0.5080 35.1287 211.1330 1136.4462 2.0030 0.0000
+0 2001 222 5.00 0.583 10.1825 10.2814 20.3420 8.8880 179.3274 185.2630 1219.4392 1.7561 0.0000
+0 2001 222 19.00 0.417 9.2045 9.2860 0.0000 0.7620 375.3354 382.2658 938.5424 3.5650 0.0000
+0 2001 223 5.00 0.583 12.3946 10.5104 43.7470 0.0000 596.9742 429.7877 1002.2846 2.7804 0.0000
+0 2001 223 19.00 0.417 11.8720 9.6050 0.0000 0.0000 752.1331 554.4511 793.3206 3.2315 0.0000
+0 2001 224 5.00 0.583 14.3432 11.1168 43.5429 6.0920 902.3141 586.4087 900.5764 3.4539 0.0000
+0 2001 224 19.00 0.417 10.1265 8.9150 0.0000 0.2540 363.6151 264.1207 1026.5160 3.1960 0.0000
+0 2001 225 5.00 0.583 13.5361 10.6529 39.8698 0.7620 715.5022 439.0741 1005.6831 2.8582 0.0000
+0 2001 225 19.00 0.417 10.1955 10.4755 0.0000 4.8340 385.0052 406.8479 1012.6263 3.3940 0.0000
+0 2001 226 5.00 0.583 10.9504 10.5225 22.5288 2.0320 391.4404 352.8609 1071.2147 3.2489 0.0000
+0 2001 226 19.00 0.417 7.6470 8.8690 0.0000 5.8420 230.8588 323.9246 962.8552 4.8780 0.0000
+0 2001 227 5.00 0.583 9.7232 9.6921 32.1557 0.2540 384.3720 377.7626 976.7483 2.3961 0.0000
+0 2001 227 19.00 0.417 7.5645 8.8885 0.0000 0.0000 273.0384 374.0139 914.5191 2.2730 0.0000
+0 2001 228 5.00 0.583 9.2375 9.2146 28.6468 2.2880 346.1796 345.0887 971.1047 1.9486 0.0000
+0 2001 228 19.00 0.417 7.4770 8.4070 0.0000 0.2540 393.2807 463.2465 788.1559 3.6190 0.0000
+0 2001 229 5.00 0.583 12.0021 9.4321 39.9053 0.0000 844.4394 621.3786 717.1213 2.8618 0.0000
+0 2001 229 19.00 0.438 9.8995 8.4805 0.0000 0.0000 775.9839 664.1633 594.3604 4.2738 0.0000
+0 2001 230 5.50 0.562 16.0781 10.3193 50.9285 0.0000 1458.3367 873.0177 546.3198 3.8148 0.0000
+0 2001 230 19.00 0.438 14.6400 10.5571 0.0000 0.0000 1292.9550 894.1790 533.0656 6.5557 0.0000
+0 2001 231 5.50 0.562 17.2700 11.7559 38.2644 0.0000 1576.5685 968.7743 571.6394 5.0204 0.0000
+0 2001 231 19.00 0.438 13.2076 10.8657 0.0000 0.0000 1065.0221 843.0675 610.7605 3.6110 0.0000
+0 2001 232 5.50 0.562 13.3600 11.2667 26.1526 0.5080 933.0936 727.2112 763.7563 3.9937 0.0000
+0 2001 232 19.00 0.438 10.3076 10.1043 0.0000 0.7620 557.5477 540.0044 847.6124 5.7748 0.0000
+0 2001 233 5.50 0.542 12.1273 11.1331 44.0404 0.0000 724.1630 627.7155 853.6000 6.4200 0.0000
+0 2001 233 18.50 0.458 10.5886 10.0059 0.0000 0.0000 731.5500 681.5165 698.6284 3.7182 0.0000
+0 2001 234 5.50 0.542 10.9304 9.6704 19.5001 2.2820 561.1360 447.8789 903.8869 2.7442 0.0000
+0 2001 234 18.50 0.458 7.5309 8.9195 0.0000 0.0000 328.7757 433.8636 857.1445 3.6850 0.0000
+0 2001 235 5.50 0.542 11.7996 10.1396 48.7843 0.0000 758.6983 603.1319 793.6803 5.1719 0.0000
+0 2001 235 18.50 0.458 11.8582 9.8000 0.0000 0.0000 940.4408 757.6781 604.9501 5.6777 0.0000
+0 2001 236 5.50 0.542 16.0031 11.0204 48.7352 0.0000 1441.8073 922.9943 551.0908 3.5788 0.0000
+0 2001 236 18.50 0.458 12.7400 10.3168 0.0000 0.0000 1063.5378 839.1232 568.2613 2.4541 0.0000
+0 2001 237 5.50 0.542 13.4462 10.4165 36.8858 0.5080 984.6379 696.8112 721.0717 2.1254 0.0000
+0 2001 237 18.50 0.458 12.1023 8.8614 0.0000 0.0000 982.6871 703.5259 584.5694 2.9532 0.0000
+0 2001 238 5.50 0.542 16.9842 10.4262 46.9703 0.0000 1574.5287 890.5527 536.1124 2.6535 0.0000
+0 2001 238 18.50 0.458 14.1486 9.7973 0.0000 0.0000 1277.9268 868.6617 495.3787 3.2036 0.0000
+0 2001 239 5.50 0.542 17.1796 10.4581 33.1286 0.0000 1569.8705 861.8202 561.9517 2.4565 0.0000
+0 2001 239 18.50 0.458 13.5941 10.0686 0.0000 0.0000 1166.9425 837.7251 547.9186 4.2268 0.0000
+0 2001 240 5.50 0.542 15.3896 10.8435 38.7513 1.7800 1303.0414 831.4091 623.4149 4.6831 0.0000
+0 2001 240 18.50 0.458 12.1050 9.2364 0.0000 0.0000 1001.9637 751.9279 565.1529 4.5650 0.0000
+0 2001 241 5.50 0.542 14.9581 10.4354 32.6711 0.2540 1298.3942 850.7864 569.4734 3.8819 0.0000
+0 2001 241 18.50 0.458 9.2755 9.0568 0.0000 0.0000 649.6774 622.8489 680.6473 2.2055 0.0000
+0 2001 242 5.50 0.542 11.6904 9.3973 30.2749 1.5240 688.8292 480.4459 852.3277 2.2838 0.0000
+0 2001 242 18.50 0.458 8.9223 8.8986 0.0000 0.0000 468.2672 464.7994 824.2401 3.7914 0.0000
+0 2001 243 5.50 0.542 11.2450 9.5742 25.7636 2.8000 544.5486 393.3240 951.6379 2.2942 0.0000
+0 2001 243 18.50 0.458 9.7600 8.8809 0.0000 0.0000 603.3185 532.5259 755.3469 4.3900 0.0000
+0 2001 244 5.50 0.542 11.7285 9.7008 31.7853 1.5240 777.7188 599.1835 756.8759 6.4281 0.0000
+0 2001 244 18.50 0.458 10.8659 8.8141 0.0000 0.0000 806.6527 635.5399 646.7092 5.8759 0.0000
+0 2001 245 5.50 0.542 14.3096 10.6019 47.3558 0.0000 1171.6146 810.9548 626.8911 4.8415 0.0000
+0 2001 245 18.50 0.458 11.7845 9.7705 0.0000 0.0000 955.1032 778.2678 583.0969 4.2036 0.0000
+0 2001 246 5.50 0.542 15.7227 10.6115 35.3840 0.0000 1339.6180 818.9963 616.8456 2.5850 0.0000
+0 2001 246 18.50 0.458 13.2023 9.3091 0.0000 0.0000 1100.8412 749.1518 574.3521 3.1514 0.0000
+0 2001 247 5.50 0.542 16.7200 10.6412 47.1539 0.0000 1505.6816 875.5825 568.1617 2.9796 0.0000
+0 2001 247 18.50 0.458 12.9968 9.9573 0.0000 0.0000 1063.7461 784.9169 591.4352 3.9091 0.0000
+0 2001 248 5.50 0.542 14.0827 10.8119 38.0077 2.5400 1041.6716 713.1995 740.0089 3.7762 0.0000
+0 2001 248 18.50 0.458 10.0973 9.4418 0.0000 0.0000 628.5032 573.5664 759.3983 2.9964 0.0000
+0 2001 249 5.50 0.542 12.0546 10.3200 37.9701 2.0360 958.4027 786.2672 625.6051 5.0123 0.0000
+0 2001 249 18.50 0.458 2.1950 7.7832 0.0000 0.0000 199.8110 542.8651 662.7396 4.1945 0.0000
+0 2001 250 5.50 0.542 2.2650 6.9238 48.5782 0.0000 438.2096 718.5987 426.2054 7.1746 0.0000
+0 2001 250 18.50 0.458 -0.8300 5.8341 0.0000 20.8000 92.9679 443.1268 624.6413 1.7564 0.0000
+0 2001 251 5.50 0.542 -3.8638 4.5381 11.6374 13.7000 12.3539 412.1336 572.7470 1.8681 0.0000
+0 2001 251 18.50 0.458 -2.6550 3.9945 0.0000 0.2000 221.7032 540.5366 411.6758 6.1264 0.0000
+0 2001 252 5.50 0.542 6.1119 3.6015 48.0434 0.4000 716.6854 545.4545 383.8654 4.4908 0.0000
+0 2001 252 18.50 0.458 8.2482 3.2600 0.0000 0.0000 950.2860 620.9833 288.8202 3.6136 0.0000
+0 2001 253 5.50 0.542 12.8842 3.0100 47.1661 0.0000 1426.1051 672.2451 223.5814 3.6581 0.0000
+0 2001 253 18.50 0.479 10.0252 2.8887 0.0000 0.0000 1094.0441 601.8044 287.2791 3.3513 0.0000
+0 2001 254 6.00 0.521 13.7184 3.7640 46.4891 0.0000 1383.2953 589.2695 350.4619 3.3460 0.0000
+0 2001 254 18.50 0.479 9.4983 3.4600 0.0000 0.0000 1001.2602 583.7369 337.7701 2.9626 0.0000
+0 2001 255 6.00 0.521 13.8796 6.6624 41.0508 0.0000 1276.1255 663.8398 473.7939 2.6596 0.0000
+0 2001 255 18.50 0.479 11.7209 6.1735 0.0000 0.0000 1060.3629 620.2714 470.8765 3.6517 0.0000
+0 2001 256 6.00 0.521 12.9244 8.4048 25.6502 5.8380 934.6992 527.7193 726.4916 3.2688 0.0000
+0 2001 256 18.50 0.479 8.1004 7.2861 0.0000 0.2540 365.1051 304.3487 863.4851 4.3148 0.0000
+0 2001 257 6.00 0.500 11.6637 8.5904 38.3839 1.0200 809.8690 549.9850 719.4404 3.4167 0.0000
+0 2001 257 18.00 0.500 5.0283 7.4142 0.0000 0.0000 93.1694 252.1138 925.8158 1.6696 0.0000
+0 2001 258 6.00 0.500 8.0000 7.9300 31.0446 0.5080 337.3834 330.4305 889.5978 2.6367 0.0000
+0 2001 258 18.00 0.500 6.0454 6.7479 0.0000 0.0000 271.7963 320.0387 811.7491 2.4183 0.0000
+0 2001 259 6.00 0.500 10.0167 7.2400 28.2105 0.0000 782.8611 568.4586 600.6701 2.8033 0.0000
+0 2001 259 18.00 0.500 7.0329 6.6667 0.0000 0.0000 463.5064 437.8913 686.2128 2.6862 0.0000
+0 2001 260 6.00 0.500 5.0379 6.7121 12.9186 4.0600 232.6803 341.1782 785.5716 2.3854 0.0000
+0 2001 260 18.00 0.500 4.3304 5.7329 0.0000 0.7620 248.6256 336.1325 724.3702 3.0592 0.0000
+0 2001 261 6.00 0.500 6.5075 6.7213 28.9915 0.2540 421.7065 431.8847 697.5013 4.5004 0.0000
+0 2001 261 18.00 0.500 5.8633 5.5550 0.0000 0.0000 465.3029 445.9673 604.3291 2.1729 0.0000
+0 2001 262 6.00 0.500 10.5013 7.2083 45.2857 0.0000 857.3999 593.8978 574.4849 4.1462 0.0000
+0 2001 262 18.00 0.500 9.1196 6.5571 0.0000 0.0000 800.4658 609.6576 507.1207 2.2787 0.0000
+0 2001 263 6.00 0.500 11.7004 7.2712 30.4340 0.0000 1064.3231 695.4380 473.0224 3.1988 0.0000
+0 2001 263 18.00 0.500 9.7579 5.7838 0.0000 0.0000 935.9982 642.3742 423.4736 3.6821 0.0000
+0 2001 264 6.00 0.500 13.6304 7.7192 41.1399 0.0000 1284.7285 766.6292 439.6227 3.9246 0.0000
+0 2001 264 18.00 0.500 9.8600 6.3196 0.0000 0.0000 981.5762 714.7541 386.7120 4.3604 0.0000
+0 2001 265 6.00 0.500 11.7079 7.6596 34.5272 0.0000 1076.6899 737.9058 462.0489 3.1913 0.0000
+0 2001 265 18.00 0.500 5.3233 5.6608 0.0000 0.0000 477.5412 499.8301 559.3000 2.3912 0.0000
+0 2001 266 6.00 0.500 11.3579 6.8438 42.1871 0.0000 996.6903 635.3782 508.3603 2.3829 0.0000
+0 2001 266 18.00 0.500 9.5108 5.5871 0.0000 0.0000 916.6816 628.3987 424.0675 3.7125 0.0000
+0 2001 267 6.00 0.500 14.6875 7.7529 40.9439 0.0000 1455.6355 827.1750 380.9783 3.8158 0.0000
+0 2001 267 18.00 0.500 10.1121 6.0279 0.0000 0.0000 993.4451 685.2977 396.7686 3.6788 0.0000
+0 2001 268 6.00 0.500 14.5621 7.6092 39.2297 0.0000 1444.9633 818.2773 380.0509 2.6000 0.0000
+0 2001 268 18.00 0.500 10.5588 6.3967 0.0000 0.0000 1023.7554 703.2394 402.6300 3.9267 0.0000
+0 2001 269 6.00 0.500 14.3283 8.3450 38.8906 0.0000 1359.9882 813.8570 436.6100 3.4975 0.0000
+0 2001 269 18.00 0.500 11.4471 6.9150 0.0000 0.0000 1085.7380 721.7523 420.9826 3.6775 0.0000
+0 2001 270 6.00 0.500 15.1217 8.4729 35.5709 0.0000 1456.0579 831.9340 429.4319 2.4308 0.0000
+0 2001 270 18.00 0.500 11.7604 6.6462 0.0000 0.0000 1101.9442 691.2066 433.2218 3.1679 0.0000
+0 2001 271 6.00 0.500 14.8779 7.9600 26.9519 0.0000 1388.2983 753.6956 467.7090 2.8187 0.0000
+0 2001 271 18.00 0.500 11.4846 7.6687 0.0000 0.0000 990.9850 675.3574 519.8597 3.5438 0.0000
+0 2001 272 6.00 0.500 12.2717 8.3154 24.0019 0.0000 1060.6768 710.4741 534.8799 2.9654 0.0000
+0 2001 272 18.00 0.500 6.7204 6.2500 0.0000 0.0000 448.3762 416.7483 680.5405 2.5146 0.0000
+0 2001 273 6.00 0.500 8.9362 6.7796 27.2236 1.0200 488.2220 326.0627 809.1458 2.1921 0.0000
+0 2001 273 18.00 0.500 7.5571 5.7017 0.0000 0.0000 574.0865 445.5829 614.3800 3.8171 0.0000
+0 2001 274 6.00 0.500 11.8604 6.9842 32.7481 0.0000 1044.5557 645.8098 504.9849 3.2442 0.0000
+0 2001 274 18.00 0.500 9.5688 6.5588 0.0000 0.0000 876.4250 649.3729 466.7300 5.4208 0.0000
+0 2001 275 6.00 0.500 11.4117 5.5400 34.0718 0.0000 1091.8354 632.3743 416.1443 5.2562 0.0000
+0 2001 275 18.00 0.500 7.1642 4.3137 0.0000 0.0000 730.8324 541.9736 429.3744 2.4462 0.0000
+0 2001 276 6.00 0.500 11.5029 4.8221 38.1694 0.0000 1187.5297 673.2100 329.5948 3.7783 0.0000
+0 2001 276 18.00 0.521 7.5000 4.3012 0.0000 0.0000 866.9924 651.3159 319.2787 2.6468 0.0000
+0 2001 277 6.50 0.479 6.8387 4.8578 35.8981 0.0000 694.1810 533.2571 471.7343 3.5248 0.0000
+0 2001 277 18.00 0.521 -5.0016 4.3012 0.0000 1.4360 15.2025 431.9788 538.6158 2.1532 0.0000
+0 2001 278 6.50 0.479 2.4413 4.8578 35.7021 0.7620 581.8379 681.7943 323.1971 2.5709 0.0000
+0 2001 278 18.00 0.521 3.4712 4.3012 0.0000 0.0000 812.3342 860.2521 110.3425 2.1844 0.0000
+0 2001 279 6.50 0.479 8.2552 4.8578 27.2480 0.0000 985.8082 745.5300 259.4614 3.6400 0.0000
+0 2001 279 18.00 0.521 6.8876 4.3012 0.0000 0.0000 747.8945 578.7513 391.8433 4.9772 0.0000
+0 2001 280 6.50 0.458 7.8677 4.8591 30.3930 0.0000 632.9808 424.8464 580.2469 3.0609 0.0000
+0 2001 280 17.50 0.542 4.9604 4.3215 0.0000 0.2540 368.2564 329.0791 642.7522 7.5331 0.0000
+0 2001 281 6.50 0.458 8.3159 5.6450 34.5100 0.0000 753.8532 563.0505 493.5142 5.0650 0.0000
+0 2001 281 17.50 0.542 4.0446 4.5731 0.0000 0.0000 357.1710 385.3086 602.8712 1.8008 0.0000
+0 2001 282 6.50 0.458 1.5559 4.1332 20.3924 2.2860 226.3508 362.9275 597.9686 5.1391 0.0000
+0 2001 282 17.50 0.542 -5.0177 2.5512 0.0000 1.0160 146.1524 470.9712 400.0014 6.9658 0.0000
+0 2001 283 6.50 0.458 -2.6368 1.9218 33.9554 0.0000 337.8746 541.1298 296.2062 4.0855 0.0000
+0 2001 283 17.50 0.542 -1.8188 1.2277 0.0000 0.0000 436.2751 571.7663 230.4539 6.8473 0.0000
+0 2001 284 6.50 0.458 1.2132 1.0841 22.9885 0.2540 553.5357 542.9228 252.0370 5.2395 0.0000
+0 2001 284 17.50 0.542 -4.2512 1.1996 0.0000 0.0000 173.8406 395.9292 404.7409 4.2415 0.0000
+0 2001 285 6.50 0.458 -5.0341 0.6764 30.2575 0.2540 261.3504 491.4659 283.5304 7.1027 0.0000
+0 2001 285 17.50 0.542 -5.0212 -0.1746 0.0000 1.7780 179.5109 369.1089 366.0094 15.6062 0.0000
+0 2001 286 6.50 0.458 -1.2827 0.2823 23.5939 5.5880 176.9225 245.8206 510.4841 13.5555 0.0000
+0 2001 286 17.50 0.542 -0.8662 0.6435 0.0000 0.2540 282.1584 351.2310 422.1414 10.3888 0.0000
+0 2001 287 6.50 0.458 2.8959 0.8027 23.7587 0.5080 499.2102 386.2603 394.8263 9.5677 0.0000
+0 2001 287 17.50 0.542 -1.8327 1.1381 0.0000 4.0640 169.9843 288.9997 508.5155 4.2115 0.0000
+0 2001 288 6.50 0.458 0.5809 1.2845 31.9232 0.0000 537.4465 561.9106 242.8863 5.6905 0.0000
+0 2001 288 17.50 0.542 2.0442 1.1404 0.0000 0.0000 729.1172 682.4810 115.1574 3.0600 0.0000
+0 2001 289 6.50 0.458 6.9918 1.1532 21.0298 0.0000 977.8143 623.2571 175.0186 4.6036 0.0000
+0 2001 289 17.50 0.542 6.3231 1.3546 0.0000 0.0000 864.7863 572.7427 235.5647 5.7131 0.0000
+0 2001 290 6.50 0.458 8.7100 1.4318 23.3024 0.0000 1011.4090 542.5248 269.6678 4.2995 0.0000
+0 2001 290 17.50 0.542 5.9196 1.4800 0.0000 0.0000 753.0875 487.8357 326.7923 9.0200 0.0000
+0 2001 291 6.50 0.458 5.1382 2.0714 27.7502 0.0000 681.0125 502.8296 342.7636 9.5973 0.0000
+0 2001 291 17.50 0.542 2.5577 1.3827 0.0000 0.0000 673.4581 610.8693 198.9970 13.7615 0.0000
+0 2001 292 6.50 0.458 5.0359 1.2950 25.2067 0.0000 738.4168 518.0563 287.3836 9.4309 0.0000
+0 2001 292 17.50 0.542 5.3204 1.6588 0.0000 0.0000 745.1237 533.5106 290.2279 10.9004 0.0000
+0 2001 293 6.50 0.458 7.1691 2.8305 26.8731 0.0000 885.4284 599.7552 288.1013 3.3255 0.0000
+0 2001 293 17.50 0.542 4.9115 2.4346 0.0000 0.0000 676.1699 528.9277 336.0966 3.8038 0.0000
+0 2001 294 6.50 0.458 6.7732 2.9191 18.6032 0.0000 838.3231 595.7549 297.1150 3.2573 0.0000
+0 2001 294 17.50 0.542 2.2131 2.7262 0.0000 1.0160 312.0703 338.2870 542.1802 6.3927 0.0000
+0 2001 295 6.50 0.458 1.4805 2.1700 13.7583 7.1160 213.1477 247.1235 603.2606 7.6300 0.0000
+0 2001 295 17.50 0.542 -0.0208 1.6546 0.0000 0.2540 270.4247 351.7147 471.8581 12.7996 0.0000
+0 2001 296 6.50 0.458 2.5827 1.4364 11.1125 2.0320 450.2365 387.8813 424.5454 7.8445 0.0000
+0 2001 296 17.50 0.542 -6.8812 1.4273 0.0000 4.0680 122.5096 435.5511 376.4325 4.8465 0.0000
+0 2001 297 6.50 0.458 -7.0155 0.9782 24.1828 0.0000 274.5868 575.4852 214.1432 7.9559 0.0000
+0 2001 297 17.50 0.542 -5.0342 0.6650 0.0000 0.0000 361.0190 591.3088 183.1207 11.9827 0.0000
+0 2001 298 6.50 0.458 0.1282 0.5800 28.8997 0.0000 553.1638 567.8788 202.4711 6.0686 0.0000
+0 2001 298 17.50 0.542 -0.3142 0.5508 0.0000 0.0000 457.9588 497.7230 271.2161 4.1923 0.0000
+0 2001 299 6.50 0.458 4.7168 0.6186 29.6228 0.0000 733.3558 505.2384 266.9726 2.5573 0.0000
+0 2001 299 17.50 0.562 5.1641 0.8259 0.0000 0.0000 802.1412 560.1157 222.0735 4.2737 0.0000
+0 2001 300 7.00 0.438 8.1548 0.9024 17.0224 0.0000 921.9206 471.7490 314.1681 6.1457 0.0000
+0 2001 300 17.50 0.562 7.7026 1.0922 0.0000 0.0000 718.2555 315.8490 479.4062 4.9248 0.0000
+0 2001 301 7.00 0.438 8.8257 2.8314 19.4044 0.0000 784.6247 384.9629 502.6656 3.4590 0.0000
+0 2001 301 17.50 0.562 6.3519 2.7181 0.0000 0.0000 535.5237 312.5019 567.9311 3.0252 0.0000
+0 2001 302 7.00 0.438 9.1605 3.7110 20.7679 0.0000 788.8687 414.9796 523.4453 1.9943 0.0000
+0 2001 302 17.50 0.562 5.9563 2.8722 0.0000 0.0000 607.1140 419.5601 469.5059 2.4711 0.0000
+0 2001 303 7.00 0.438 8.8848 3.5081 20.4363 0.0000 947.4896 578.3318 347.5687 2.9129 0.0000
+0 2001 303 17.50 0.562 7.2274 3.3770 0.0000 0.5080 781.8205 534.8939 381.7071 8.3748 0.0000
+0 2001 304 7.00 0.417 2.7965 3.1790 18.5564 4.0680 314.6348 329.6631 575.7313 9.3115 0.0000
+0 2001 304 17.00 0.583 -0.1029 1.5239 0.0000 0.5080 287.0133 365.9557 451.4255 15.0414 0.0000
+0 2001 305 7.00 0.417 0.9730 0.7960 22.3054 0.0000 403.7090 391.9164 388.8299 3.8650 0.0000
+0 2001 305 17.00 0.583 -0.0146 0.7789 0.0000 0.0000 309.3548 346.3925 433.5218 2.9029 0.0000
+0 2001 306 7.00 0.417 3.6975 1.1105 27.3515 0.0000 540.9983 396.7050 399.7425 4.3560 0.0000
+0 2001 306 17.00 0.583 2.8893 0.7886 0.0000 0.0000 518.5300 409.5834 370.9285 1.9225 0.0000
+0 2001 307 7.00 0.417 4.5045 0.9185 26.2152 0.0000 559.2524 361.1441 425.8329 2.2275 0.0000
+0 2001 307 17.00 0.583 2.7821 0.4775 0.0000 0.0000 588.0076 467.2514 298.4050 2.4668 0.0000
+0 2001 308 7.00 0.417 9.1270 0.3765 26.1747 0.0000 1058.5566 508.5988 252.1902 2.7170 0.0000
+0 2001 308 17.00 0.583 5.4629 0.3029 0.0000 0.0000 697.4981 411.3094 345.8751 4.2550 0.0000
+0 2001 309 7.00 0.417 8.1265 0.9215 25.3325 0.0000 825.4551 382.5984 404.5610 4.3955 0.0000
+0 2001 309 17.00 0.583 4.4871 0.8746 0.0000 0.0000 488.6216 290.4922 494.2119 3.7461 0.0000
+0 2001 310 7.00 0.417 6.2935 1.4345 18.8231 0.0000 582.2806 295.6795 517.8819 2.1965 0.0000
+0 2001 310 17.00 0.583 4.3436 1.2443 0.0000 0.0000 448.4554 278.0731 525.0862 2.7196 0.0000
+0 2001 311 7.00 0.417 4.6675 1.9920 13.0750 0.0000 454.8118 299.7628 541.8023 2.8435 0.0000
+0 2001 311 17.00 0.583 -4.4761 1.3321 0.0000 11.4360 20.6917 262.9334 544.5627 1.9439 0.0000
+0 2001 312 7.00 0.417 -0.5925 0.8745 24.4170 0.2540 236.9507 302.2403 482.3131 1.4550 0.0000
+0 2001 312 17.00 0.583 0.3125 0.3789 0.0000 0.0000 706.7380 704.9717 55.8975 3.9464 0.0000
+0 2001 313 7.00 0.417 7.4120 0.7455 24.9269 0.0000 1107.3053 706.6828 72.3722 2.0335 0.0000
+0 2001 313 17.00 0.583 2.6729 0.7950 0.0000 0.0000 830.3564 733.2029 47.5905 3.2600 0.0000
+0 2001 314 7.00 0.417 5.7265 1.0185 24.6582 0.0000 945.5798 675.5081 116.3749 2.1695 0.0000
+0 2001 314 17.00 0.583 1.9593 0.9558 0.0000 0.0000 602.7672 551.6143 236.9686 3.2768 0.0000
+0 2001 315 7.00 0.417 3.0195 0.6341 21.6265 0.0000 500.2351 374.8289 398.1184 3.3150 0.0000
+0 2001 315 17.00 0.583 2.3089 0.9358 0.0000 0.0000 400.3295 329.9439 457.6292 2.5893 0.0000
+0 2001 316 7.00 0.417 4.3370 0.8430 23.9195 0.0000 571.5919 381.0266 402.0424 2.9105 0.0000
+0 2001 316 17.00 0.583 2.4664 0.8463 0.0000 0.0000 521.2878 437.8593 345.3296 2.2436 0.0000
+0 2001 317 7.00 0.417 5.2370 0.6882 14.4895 0.0000 775.9974 519.7341 255.7991 2.0265 0.0000
+0 2001 317 17.00 0.583 1.3771 0.8033 0.0000 0.0000 460.0084 430.1445 350.9787 2.1493 0.0000
+0 2001 318 7.00 0.417 3.5845 0.7084 21.4009 0.0000 570.7963 414.7031 361.8170 2.0855 0.0000
+0 2001 318 17.00 0.583 1.6271 0.9260 0.0000 0.0000 473.0055 437.6034 349.4770 2.6486 0.0000
+0 2001 319 7.00 0.417 5.5150 1.0111 23.0918 0.0000 639.7097 381.6636 409.5980 2.3595 0.0000
+0 2001 319 17.00 0.583 3.0689 1.1217 0.0000 0.0000 526.4070 423.6049 373.0988 2.7682 0.0000
+0 2001 320 7.00 0.417 6.4000 1.0409 22.8375 0.0000 745.5531 430.0761 362.6533 2.0655 0.0000
+0 2001 320 17.00 0.583 2.5321 1.0338 0.0000 0.0000 504.8765 427.4518 364.9111 3.6500 0.0000
+0 2001 321 7.00 0.417 4.9555 0.8151 22.7860 0.0000 705.2313 475.0252 306.6768 4.9935 0.0000
+0 2001 321 17.00 0.583 1.3764 0.8203 0.0000 0.0000 423.8432 393.4766 388.4485 6.8471 0.0000
+0 2001 322 7.00 0.417 -2.2350 0.4386 11.4466 4.0680 156.1576 266.2279 497.4763 2.5125 0.0000
+0 2001 322 17.00 0.583 -6.7475 0.3272 0.0000 1.7780 83.2749 351.0347 407.3222 2.0032 0.0000
+0 2001 323 7.00 0.417 2.8220 0.4094 22.5113 0.2540 741.0612 608.7009 153.5464 3.9335 0.0000
+0 2001 323 17.00 0.583 4.3175 0.5389 0.0000 0.0000 930.1639 726.6811 41.7127 7.5554 0.0000
+0 2001 324 7.00 0.417 7.3675 0.6987 22.1116 0.0000 1136.9467 738.0701 38.0323 7.3685 0.0000
+0 2001 324 17.00 0.583 2.9964 0.3985 0.0000 0.0000 856.8077 721.2459 40.4673 2.9996 0.0000
+0 2001 325 7.00 0.417 2.1435 0.1499 17.8166 0.0000 694.4730 594.4899 155.6082 5.8335 0.0000
+0 2001 325 17.00 0.583 -0.6707 0.2972 0.0000 0.0000 400.9067 444.9487 311.9926 4.4621 0.0000
+0 2001 326 7.00 0.417 -0.7515 0.5665 11.6062 0.5080 182.9590 242.3970 527.3027 5.2045 0.0000
+0 2001 326 17.00 0.583 -3.4082 0.6284 0.0000 0.5080 71.0213 240.3717 532.3030 1.7643 0.0000
+0 2001 327 7.00 0.417 -5.2435 -0.0401 13.6370 0.0000 212.6991 416.6605 324.6315 5.7210 0.0000
+0 2001 327 17.00 0.583 -8.2868 -0.1783 0.0000 0.0000 136.6124 425.1331 309.7600 7.4768 0.0000
+0 2001 328 7.00 0.417 -7.7270 -0.3125 19.4760 0.0000 223.1981 489.9283 238.9989 6.5820 0.0000
+0 2001 328 17.00 0.604 -6.1745 0.0054 0.0000 0.0000 182.2548 416.6782 326.8921 1.6003 0.0000
+0 2001 329 7.50 0.396 -4.8479 -0.0620 14.8819 1.5240 188.3063 372.2788 368.0491 1.9726 0.0000
+0 2001 329 17.00 0.604 -10.9997 -0.4847 0.0000 4.0680 76.3818 418.9245 302.1457 7.6634 0.0000
+0 2001 330 7.50 0.396 -10.0795 -0.3383 20.9759 0.0000 176.7938 504.6258 223.1238 5.3068 0.0000
+0 2001 330 17.00 0.604 -14.6676 -0.6708 0.0000 0.0000 37.3484 444.4253 268.3278 1.0766 0.0000
+0 2001 331 7.50 0.396 -16.2289 -0.8063 7.6261 0.0000 39.3481 467.9543 238.9309 2.0074 0.0000
+0 2001 331 17.00 0.604 -17.8038 -0.8225 0.0000 0.0000 44.3751 494.9735 211.1596 2.7069 0.0000
+0 2001 332 7.50 0.396 -9.9616 -0.5607 20.5738 0.0000 246.6907 561.1879 156.5643 4.4174 0.0000
+0 2001 332 17.00 0.604 -6.9476 -0.8786 0.0000 0.0000 355.7054 573.4133 130.3441 6.8000 0.0000
+0 2001 333 7.50 0.396 -2.8895 -0.1655 16.8854 0.0000 295.8377 408.4260 327.1270 7.6405 0.0000
+0 2001 333 17.00 0.604 -8.7203 -0.7315 0.0000 0.0000 172.2273 446.4988 263.6265 7.1186 0.0000
+0 2001 334 7.50 0.375 -9.2717 -0.6928 9.2709 1.0160 153.7626 445.5675 266.2426 5.4650 0.0000
+0 2001 334 16.50 0.625 -10.3290 -1.3947 0.0000 0.2540 123.6940 412.0508 270.0654 7.8090 0.0000
+0 2001 335 7.50 0.375 -5.7550 -2.2161 20.7392 0.0000 371.5794 492.2975 155.2364 10.4861 0.0000
+0 2001 335 16.50 0.625 -4.7313 -2.1160 0.0000 0.2540 214.6381 311.6067 339.9615 12.1713 0.0000
+0 2001 336 7.50 0.375 -0.5317 -2.2161 19.2949 0.0000 327.5536 251.1988 396.3351 8.8728 0.0000
+0 2001 336 16.50 0.625 0.3273 -2.1160 0.0000 0.0000 335.5337 228.1590 423.4093 9.1803 0.0000
+0 2001 337 7.50 0.375 1.7294 -2.2161 19.8027 0.0000 570.5707 389.9246 257.6093 7.3983 0.0000
+0 2001 337 16.50 0.625 -1.6667 -2.1160 0.0000 0.0000 438.9117 419.3965 232.1717 2.4373 0.0000
+0 2001 338 7.50 0.375 -0.4506 -2.2161 13.9347 0.0000 479.2878 403.1141 244.4198 2.2444 0.0000
+0 2001 338 16.50 0.625 -7.7140 -2.1160 0.0000 0.0000 161.3763 337.5945 313.9738 6.5413 0.0000
+0 2001 339 7.50 0.375 -9.5489 -2.2161 18.9461 0.0000 216.4154 448.4348 199.0990 4.8639 0.0000
+0 2001 339 16.50 0.625 -8.4470 -2.1160 0.0000 0.5080 165.1297 375.4381 276.1302 8.9883 0.0000
+0 2001 340 7.50 0.375 -4.5594 -2.2161 16.0436 0.0000 249.6877 335.8941 311.6398 10.2494 0.0000
+0 2001 340 16.50 0.625 -6.9963 -2.1160 0.0000 3.0480 126.8066 293.0769 358.4914 5.5313 0.0000
+0 2001 341 7.50 0.375 -10.2011 -2.1356 5.2022 2.2860 94.1142 347.8541 302.9331 2.5333 0.0000
+0 2001 341 16.50 0.625 -11.1757 -2.1147 0.0000 0.0000 162.8545 438.1134 213.5121 9.0417 0.0000
+0 2001 342 7.50 0.375 -1.5483 -2.2344 19.0037 0.0000 482.2308 443.0227 203.7746 6.1189 0.0000
+0 2001 342 16.50 0.625 -6.9217 -2.1237 0.0000 0.0000 243.6478 400.6883 250.5692 4.2887 0.0000
+0 2001 343 7.50 0.375 -0.7756 -2.1683 10.0229 0.0000 558.6936 497.4023 152.0543 7.2883 0.0000
+0 2001 343 16.50 0.625 -9.1210 -2.0373 0.0000 0.0000 255.4031 464.5058 190.2548 3.2933 0.0000
+0 2001 344 7.50 0.375 -4.8039 -2.3306 17.0411 0.0000 330.1845 409.5947 233.3594 2.1956 0.0000
+0 2001 344 16.50 0.625 -8.2753 -2.3013 0.0000 0.2540 182.9191 377.5167 266.6054 4.4017 0.0000
+0 2001 345 7.50 0.375 -8.9167 -2.4528 19.2717 0.0000 230.2382 438.7529 199.3436 5.6000 0.0000
+0 2001 345 16.50 0.625 -12.1197 -2.5837 0.0000 0.2540 89.0611 367.5146 265.4288 1.8733 0.0000
+0 2001 346 7.50 0.375 -11.1028 -2.7156 15.6296 0.0000 78.6971 329.7684 298.0148 1.9994 0.0000
+0 2001 346 16.50 0.625 -11.1747 -2.7257 0.0000 0.0000 210.7552 460.7971 166.5983 8.2820 0.0000
+0 2001 347 7.50 0.375 -7.1972 -2.8256 15.8042 0.0000 289.3601 434.5900 188.9309 11.0461 0.0000
+0 2001 347 16.50 0.625 -7.8613 -2.7303 0.0000 0.5080 130.2005 299.3788 327.8293 15.6677 0.0000
+0 2001 348 7.50 0.375 -2.2700 -2.6261 10.2523 0.0000 404.7907 387.4475 243.8313 5.5256 0.0000
+0 2001 348 16.50 0.625 -3.8147 -2.4953 0.0000 0.0000 439.4619 488.3022 148.1211 2.7113 0.0000
+0 2001 349 7.50 0.375 -6.9456 -2.7406 7.8730 1.7780 204.6031 346.3389 280.4725 2.3467 0.0000
+0 2001 349 16.50 0.625 -11.3087 -2.5930 0.0000 3.5600 74.6400 335.0497 297.5237 4.7657 0.0000
+0 2001 350 7.50 0.375 -6.1767 -2.6083 18.8143 0.0000 327.5959 448.9439 183.0265 10.2022 0.0000
+0 2001 350 16.50 0.625 -3.4170 -2.5153 0.0000 0.0000 330.6207 363.6007 272.0284 8.6610 0.0000
+0 2001 351 7.50 0.375 -0.7261 -2.3656 18.2801 0.0000 513.3194 440.4197 201.1386 8.3556 0.0000
+0 2001 351 16.50 0.625 -3.3160 -2.1987 0.0000 0.0000 401.8084 436.1066 212.1295 6.9997 0.0000
+0 2001 352 7.50 0.375 -8.3761 -2.2200 16.5006 0.0000 228.2413 431.8651 215.5117 8.2650 0.0000
+0 2001 352 16.50 0.625 -9.5760 -2.4337 0.0000 0.0000 252.7222 478.6823 160.1846 12.1900 0.0000
+0 2001 353 7.50 0.375 -5.1278 -2.6128 16.4241 0.0000 297.0533 386.5074 245.2890 12.2067 0.0000
+0 2001 353 16.50 0.625 -3.4417 -2.4857 0.0000 0.0000 426.5782 463.0894 173.7075 9.0183 0.0000
+0 2001 354 7.50 0.375 2.2289 -2.4100 16.1978 0.0000 702.0258 485.1287 154.6662 3.5061 0.0000
+0 2001 354 16.50 0.625 1.5697 -2.1483 0.0000 0.0000 719.3257 548.5916 101.6729 5.0637 0.0000
+0 2001 355 7.50 0.375 -2.7239 -2.0600 11.6667 0.0000 419.7816 438.4539 215.3811 5.5956 0.0000
+0 2001 355 16.50 0.625 -10.1273 -2.1297 0.0000 0.0000 137.2533 386.2808 264.7436 8.0503 0.0000
+0 2001 356 7.50 0.375 -12.1794 -2.4433 13.1521 0.0000 138.0784 424.1881 214.2873 14.5833 0.0000
+0 2001 356 16.50 0.625 -10.9270 -2.7017 0.0000 0.0000 155.7267 403.2490 225.0842 11.0437 0.0000
+0 2001 357 7.50 0.375 -5.4872 -2.8706 18.4799 0.0000 314.1278 405.9695 215.8106 2.3767 0.0000
+0 2001 357 16.50 0.625 -8.6923 -2.9503 0.0000 0.0000 281.6942 465.4507 153.2852 3.0210 0.0000
+0 2001 358 7.50 0.375 -7.3483 -3.3183 17.9663 0.0000 306.6717 439.2599 165.5077 6.3911 0.0000
+0 2001 358 16.50 0.625 -9.6113 -3.2560 0.0000 0.0000 259.0557 455.0012 152.1049 9.5547 0.0000
+0 2001 359 7.50 0.375 -6.8328 -3.2867 17.5432 0.0000 337.1719 455.3263 150.6280 11.5278 0.0000
+0 2001 359 16.50 0.625 -6.3840 -3.1483 0.0000 0.0000 323.4178 433.0503 178.1189 8.9790 0.0000
+0 2001 360 7.50 0.375 -2.2350 -3.0267 18.5913 0.0000 461.4534 428.6508 187.1451 6.5661 0.0000
+0 2001 360 16.50 0.625 -6.2463 -2.8797 0.0000 0.0000 323.6900 438.7155 182.7135 5.2160 0.0000
+0 2001 361 7.50 0.375 -6.2983 -2.9972 18.8298 0.0000 308.2633 420.8646 196.0539 7.3289 0.0000
+0 2001 361 16.50 0.625 -7.9127 -3.0763 0.0000 0.7620 228.4251 386.3675 227.5378 14.3590 0.0000
+0 2001 362 7.50 0.375 -6.6539 -3.1239 6.9327 1.2700 126.2035 245.9315 366.1667 9.2117 0.0000
+0 2001 362 16.50 0.625 -8.6000 -2.8980 0.0000 0.2540 130.3240 313.8062 306.9189 8.6887 0.0000
+0 2001 363 7.50 0.375 -5.8689 -2.8494 18.3029 0.0000 266.0555 371.5833 251.0109 8.1406 0.0000
+0 2001 363 16.50 0.625 -11.0377 -2.8057 0.0000 0.0000 66.9965 310.9845 313.3015 1.2773 0.0000
+0 2001 364 7.50 0.375 -8.4283 -2.8717 15.8893 0.0000 146.7548 325.2561 296.4810 2.0750 0.0000
+0 2001 364 16.50 0.625 -10.1833 -2.8550 0.0000 0.0000 134.2591 359.1524 263.2276 4.2307 0.0000
+0 2001 365 7.50 0.375 -7.5606 -2.9550 19.1317 0.0000 263.2244 414.4195 204.1151 5.4911 0.0000
+0 2001 365 16.50 0.625 -12.1090 -2.9533 0.0000 1.0160 62.3212 323.9718 294.6261 1.3713 0.3191
+0 2002 1 7.50 0.375 -12.6300 -2.9839 4.4770 0.0000 36.7656 310.7697 306.6580 1.4700 0.3191
+0 2002 1 16.50 0.625 -13.9273 -3.0230 0.0000 0.2540 158.1583 454.4245 161.5121 3.2700 0.3197
+0 2002 2 7.50 0.375 -4.2528 -3.2722 19.3144 0.0000 506.3784 534.5837 71.9160 5.1239 0.3225
+0 2002 2 16.50 0.625 -3.0940 -3.2283 0.0000 0.0000 434.9096 427.5009 180.6497 9.7753 0.3193
+0 2002 3 7.50 0.375 0.2928 -3.0039 19.2546 0.0000 467.0577 324.0852 292.5840 6.3006 0.3175
+0 2002 3 16.50 0.625 -3.4630 -2.6980 0.0000 0.0000 392.6461 417.5837 210.8885 5.3510 0.3167
+0 2002 4 7.50 0.375 -7.6867 -2.5972 7.6970 0.0000 157.2716 327.4367 304.9691 2.4200 0.3196
+0 2002 4 16.50 0.625 -10.9430 -2.6613 0.0000 0.0000 183.5343 433.5551 196.3520 7.4523 0.3194
+0 2002 5 7.50 0.375 -8.5628 -2.9528 16.2891 0.0000 271.1481 450.7495 167.8719 12.7433 0.3194
+0 2002 5 16.50 0.625 -9.4323 -3.0187 0.0000 0.0000 128.6263 328.9417 287.1574 12.2313 0.3194
+0 2002 6 7.50 0.375 -4.7294 -3.0611 17.8873 0.0000 254.2641 313.4348 301.0460 10.1883 0.3191
+0 2002 6 16.50 0.625 -3.6647 -2.8610 0.0000 0.0000 158.5532 187.2554 434.8992 12.7830 0.3167
+0 2002 7 7.50 0.375 0.4022 -2.6367 17.6958 0.0000 319.4833 185.9118 444.9550 10.5417 0.3156
+0 2002 7 16.50 0.625 0.5363 -2.3450 0.0000 0.0000 425.3777 297.1203 345.2622 8.4770 0.3173
+0 2002 8 7.50 0.375 3.7528 -2.1889 14.0942 0.0000 697.7202 405.0575 243.5713 6.1039 0.3191
+0 2002 8 16.50 0.625 2.4547 -1.9427 0.0000 0.5080 437.0160 227.9379 430.6826 10.2330 0.3209
+0 2002 9 7.50 0.375 -0.5217 -1.7422 12.1529 3.8100 164.2166 111.3376 555.5225 7.8989 0.3232
+0 2002 9 16.50 0.625 -7.0363 -1.6250 0.0000 10.6840 39.5369 224.8116 446.9157 1.8423 0.3226
+0 2002 10 7.50 0.375 -5.6756 -1.6722 16.3566 0.2540 178.0800 320.1438 349.6173 2.3044 0.3225
+0 2002 10 16.50 0.625 -4.7633 -1.7190 0.0000 0.0000 366.5452 479.8776 187.9419 7.9073 0.3247
+0 2002 11 7.50 0.375 -3.7789 -1.7789 18.4633 0.0000 375.3809 452.5634 212.7778 8.9539 0.3246
+0 2002 11 16.50 0.625 -4.9940 -1.7963 0.0000 0.0000 291.7916 410.6801 253.9412 4.9267 0.3241
+0 2002 12 7.50 0.375 -1.6578 -1.8417 16.8498 0.0000 321.3640 312.9124 349.8414 11.9772 0.3241
+0 2002 12 16.50 0.625 -5.5513 -1.7883 0.0000 3.0480 214.0552 334.1070 330.8452 10.6000 0.3251
+0 2002 13 7.50 0.375 -10.9589 -1.7700 16.4680 0.2540 124.9425 411.5315 254.1770 10.7078 0.3251
+0 2002 13 16.50 0.625 -12.7127 -1.8647 0.0000 0.0000 150.0419 470.1883 191.6229 8.3470 0.3235
+0 2002 14 7.50 0.396 -5.7842 -2.0337 19.9861 0.0000 445.7775 571.2520 83.6535 4.1363 0.3234
+0 2002 14 17.00 0.604 -6.7307 -2.1317 0.0000 0.0000 365.1228 520.8406 130.0912 3.0534 0.3234
+0 2002 15 7.50 0.396 -3.4253 -2.1995 9.4999 0.0000 441.3492 487.9064 160.2948 4.0868 0.3234
+0 2002 15 17.00 0.604 -10.5931 -2.1455 0.0000 0.0000 133.7868 391.5840 258.7911 3.9645 0.3256
+0 2002 16 7.50 0.396 -9.0842 -2.1868 19.9933 0.0000 250.1346 472.8084 175.9021 9.1700 0.3245
+0 2002 16 17.00 0.604 -11.2000 -2.2728 0.0000 0.7620 111.0476 381.1168 264.1444 5.9148 0.3244
+0 2002 17 7.50 0.396 -11.6674 -2.3511 19.8121 0.0000 192.9355 471.0459 171.0899 6.6432 0.3241
+0 2002 17 17.00 0.604 -12.5266 -2.4607 0.0000 0.0000 119.5079 411.1181 226.6671 2.8186 0.3243
+0 2002 18 7.50 0.396 -13.6174 -2.5837 16.4745 0.0000 137.9221 445.8818 187.0555 7.2937 0.3212
+0 2002 18 17.00 0.604 -16.6883 -2.6679 0.0000 0.0000 147.0878 499.5063 130.1358 11.2507 0.3208
+0 2002 19 7.50 0.396 -6.6237 -2.8505 21.7413 0.0000 390.1505 518.4466 104.1070 9.3679 0.3205
+0 2002 19 17.00 0.583 -10.0839 -2.8636 0.0000 0.0000 270.2716 485.9064 136.1427 7.2593 0.3195
+0 2002 20 7.00 0.417 -15.0620 -2.8860 16.1181 0.5080 113.3813 436.8531 184.3324 13.5500 0.3200
+0 2002 20 17.00 0.583 -11.6100 -2.9936 0.0000 0.5080 137.3888 388.5565 228.5024 16.5796 0.3191
+0 2002 21 7.00 0.417 -6.3270 -3.0550 21.4225 0.0000 232.7196 341.5994 273.1135 11.8460 0.3191
+0 2002 21 17.00 0.583 -4.5546 -2.9443 0.0000 0.0000 330.1053 388.7057 230.2417 10.2243 0.3191
+0 2002 22 7.00 0.417 -4.4980 -2.8280 15.0768 0.0000 261.7557 322.1241 301.2990 6.3220 0.3191
+0 2002 22 17.00 0.583 -12.4629 -2.6646 0.0000 3.8140 46.0240 322.5083 307.2618 2.0082 0.3191
+0 2002 23 7.00 0.417 -15.6115 -2.6005 21.1813 2.2860 100.7026 444.3969 187.8803 3.5360 0.3191
+0 2002 23 17.00 0.583 -17.3114 -2.6318 0.0000 0.2540 122.6889 491.0748 139.9776 12.0314 0.3191
+0 2002 24 7.00 0.417 -7.5635 -2.7255 23.2279 0.0000 311.9582 467.0164 160.3802 9.5280 0.3191
+0 2002 24 17.00 0.583 -5.2068 -2.7400 0.0000 0.0000 318.5981 404.9594 221.8730 12.2254 0.3191
+0 2002 25 7.00 0.417 -1.1165 -2.6715 13.4237 0.0000 518.9547 454.0323 175.4684 9.1675 0.3191
+0 2002 25 17.00 0.583 -2.0443 -2.5361 0.0000 0.0000 504.2538 483.4929 151.3179 3.2021 0.3191
+0 2002 26 7.00 0.417 1.4970 -2.4445 23.6300 0.0000 674.5472 496.3089 142.1151 10.4515 0.3191
+0 2002 26 17.00 0.583 0.0454 -2.3250 0.0000 0.0000 662.9691 560.0861 83.0892 11.4411 0.3191
+0 2002 27 7.00 0.417 1.2260 -2.2480 22.2561 0.0000 721.4288 563.8233 82.4295 8.2190 0.3191
+0 2002 27 17.00 0.583 -1.5061 -2.1711 0.0000 0.0000 531.0379 502.5055 146.8396 3.9589 0.3191
+0 2002 28 7.00 0.417 -1.6705 -2.1300 19.4209 0.0000 528.8430 508.5673 142.4339 4.8815 0.3191
+0 2002 28 17.00 0.583 -6.2300 -2.0693 0.0000 0.0000 347.0894 489.9207 163.5376 2.5843 0.3191
+0 2002 29 7.00 0.417 -11.6580 -2.1050 10.8259 0.2540 37.1294 325.3592 326.6530 1.8465 0.3094
+0 2002 29 17.00 0.583 -15.9221 -2.1854 0.0000 1.7780 38.7354 401.8781 246.8919 1.6529 0.3220
+0 2002 30 7.00 0.417 -16.1235 -2.2885 16.2475 1.7780 40.9924 404.9542 239.6779 1.6205 0.3198
+0 2002 30 17.00 0.583 -18.9375 -2.3807 0.0000 0.0000 39.7104 441.3738 199.5814 2.1100 0.3196
+0 2002 31 7.00 0.417 -15.4280 -2.5000 25.6590 0.0000 151.7567 493.5426 142.6895 5.9890 0.3196
+0 2002 31 17.00 0.583 -12.6586 -2.6382 0.0000 0.2540 177.4872 464.8235 165.9784 6.5071 0.3386
+0 2002 32 7.00 0.417 -7.5985 -2.7300 23.5422 0.0000 319.2228 478.4070 148.8143 3.3420 0.3572
+0 2002 32 17.00 0.583 -7.7604 -2.7461 0.0000 0.0000 332.3021 498.2097 128.3866 4.1368 0.3591
+0 2002 33 7.00 0.417 -5.0340 -2.7570 22.0259 0.0000 438.6937 519.4528 106.7191 2.8465 0.3589
+0 2002 33 17.00 0.583 -9.2429 -2.7254 0.0000 0.0000 246.4222 452.9843 174.4174 3.0225 0.3612
+0 2002 34 7.00 0.417 -4.5145 -2.7595 26.1383 0.0000 432.0089 492.4863 133.5889 3.8440 0.3594
+0 2002 34 17.00 0.583 -6.4154 -2.7382 0.0000 0.0000 374.8224 501.5310 125.3709 4.9686 0.3608
+0 2002 35 7.00 0.417 -6.7500 -2.7430 24.5587 0.0000 351.4367 486.4259 140.2897 5.3220 0.3606
+0 2002 35 17.00 0.583 -14.7996 -2.7189 0.0000 0.0000 166.4538 490.4921 137.1597 4.3054 0.3643
+0 2002 36 7.00 0.417 -8.6910 -2.8505 27.1903 0.0000 318.9546 498.1717 124.3856 2.1745 0.3423
+0 2002 36 17.00 0.583 -5.4296 -2.9704 0.0000 0.0000 438.8439 523.2661 94.6799 5.2604 0.3605
+0 2002 37 7.00 0.417 -1.5060 -3.0210 27.6608 0.0000 538.8751 475.1347 140.8748 3.9510 0.3568
+0 2002 37 17.00 0.583 -4.3332 -2.8864 0.0000 0.0000 469.3839 521.0452 100.1260 4.5839 0.3646
+0 2002 38 7.00 0.417 -0.1595 -2.8375 24.1792 0.0000 683.6117 569.9365 53.1186 5.9585 0.3596
+0 2002 38 17.00 0.583 -3.0525 -2.7421 0.0000 0.0000 531.6155 543.0595 83.6914 7.4025 0.3634
+0 2002 39 7.00 0.417 -6.0995 -2.6675 20.8773 0.7620 286.9739 399.2915 230.3648 9.7920 0.3619
+0 2002 39 17.00 0.583 -14.2161 -2.6050 0.0000 1.0160 95.1894 414.8040 217.2971 12.3768 0.3652
+0 2002 40 7.00 0.417 -12.0305 -2.6880 9.8082 0.0000 151.6890 424.6886 204.1690 7.2450 0.3615
+0 2002 40 17.00 0.583 -13.7179 -2.7936 0.0000 0.0000 197.1767 498.8603 125.8953 4.2539 0.3643
+0 2002 41 7.00 0.417 -3.2545 -2.9805 29.0731 0.0000 523.9203 517.8943 99.6674 3.5690 0.3600
+0 2002 41 17.00 0.583 0.4814 -3.0300 0.0000 0.0000 642.3299 490.5268 125.1409 9.2832 0.3584
+0 2002 42 7.00 0.417 -0.4355 -2.8645 24.0914 0.0000 610.5659 508.8848 113.1313 9.6170 0.3621
+0 2002 42 17.00 0.583 -5.9832 -2.6686 0.0000 0.0000 336.3176 440.6346 188.9828 4.4421 0.3634
+0 2002 43 7.00 0.417 -12.2225 -2.6240 23.6018 0.0000 80.0635 355.9301 275.4269 2.4005 0.3626
+0 2002 43 17.00 0.583 -11.5939 -2.7093 0.0000 0.0000 190.4015 447.8406 180.1883 4.5625 0.3649
+0 2002 44 7.00 0.438 -1.5648 -2.8738 29.8604 0.0000 626.4771 571.3596 50.2963 4.8833 0.3574
+0 2002 44 17.50 0.562 -4.6511 -2.8504 0.0000 1.5240 455.9957 520.4984 102.0605 13.8030 0.3619
+0 2002 45 7.00 0.438 -8.7495 -2.8119 6.6571 3.5560 91.0344 279.8834 344.1609 3.4033 0.3621
+0 2002 45 17.50 0.562 -11.7004 -2.7681 0.0000 1.5240 138.2047 400.5809 225.1588 4.1126 0.3625
+0 2002 46 7.00 0.438 -2.3967 -2.7690 30.6142 0.5080 529.3469 509.1461 116.5584 3.8895 0.3601
+0 2002 46 17.50 0.562 -2.8633 -2.7511 0.0000 0.0000 564.3046 567.9272 58.4738 5.2678 0.3642
+0 2002 47 7.00 0.438 2.2386 -2.7295 29.8998 0.0000 793.8089 563.3561 63.8833 2.9014 0.3601
+0 2002 47 17.50 0.542 -2.4992 -2.6585 0.0000 0.0000 482.6752 475.9932 154.0179 3.7112 0.3643
+0 2002 48 6.50 0.458 0.6718 -2.5645 27.3859 0.0000 567.9839 422.7203 210.9687 4.2595 0.3619
+0 2002 48 17.50 0.542 -3.2019 -2.4192 0.0000 0.0000 358.0640 387.3116 252.1166 3.4746 0.3645
+0 2002 49 6.50 0.458 -3.6068 -2.3195 25.0241 0.0000 308.6076 356.1808 287.2102 3.6223 0.3640
+0 2002 49 17.50 0.542 -6.8523 -2.2565 0.0000 0.0000 146.3178 304.6810 341.2300 2.7869 0.3646
+0 2002 50 6.50 0.458 -6.7745 -2.2473 26.3614 0.0000 221.1754 378.4529 267.8290 6.2991 0.3621
+0 2002 50 17.50 0.542 -8.6250 -2.2708 0.0000 1.5240 138.3518 347.2308 298.1099 9.2119 0.3646
+0 2002 51 6.50 0.458 -7.1414 -2.3332 14.0355 2.0320 132.7880 297.2216 345.6255 6.4818 0.3634
+0 2002 51 17.50 0.542 -9.9119 -2.3296 0.0000 0.2540 150.4679 387.3205 255.6688 5.3931 0.3656
+0 2002 52 6.50 0.458 -7.5536 -2.3264 33.3202 0.0000 278.2545 451.6362 191.4827 6.2623 0.3636
+0 2002 52 17.50 0.542 -4.2823 -2.3500 0.0000 0.0000 439.3421 510.6898 131.4867 12.8550 0.3632
+0 2002 53 6.50 0.458 -0.6059 -2.3441 27.0146 0.0000 502.5770 424.1327 218.2794 12.3468 0.3615
+0 2002 53 17.50 0.542 1.3765 -2.2600 0.0000 0.0000 642.7898 478.9830 166.7905 11.6508 0.3645
+0 2002 54 6.50 0.458 4.8509 -2.1495 21.2256 0.0000 815.2115 457.6250 192.5882 7.7100 0.3634
+0 2002 54 17.50 0.542 -1.2215 -1.9723 0.0000 0.0000 371.3486 332.2636 325.1441 5.9004 0.3648
+0 2002 55 6.50 0.458 -5.5545 -1.8464 31.5061 0.0000 222.7914 358.1565 304.4041 7.5209 0.3638
+0 2002 55 17.50 0.542 -10.5804 -1.8369 0.0000 4.3180 100.3510 360.6942 302.2548 3.7169 0.3645
+0 2002 56 6.50 0.458 -17.3245 -1.8836 26.2057 0.5080 102.0609 501.2102 159.8201 6.6682 0.3650
+0 2002 56 17.50 0.542 -22.2950 -1.9815 0.0000 0.0000 80.3810 535.7995 121.2281 7.5981 0.3660
+0 2002 57 6.50 0.458 -16.2423 -2.1436 35.9611 0.0000 207.7002 569.7467 80.7076 11.9564 0.3656
+0 2002 57 17.50 0.542 -11.6815 -2.3369 0.0000 0.0000 270.8202 548.5793 94.1215 10.4662 0.3651
+0 2002 58 6.50 0.458 -14.0945 -2.4645 28.9982 0.0000 139.1342 461.9210 175.7105 7.9482 0.3642
+0 2002 58 17.50 0.542 -11.5085 -2.5477 0.0000 0.0000 223.1770 489.1894 145.1624 12.2477 0.3650
+0 2002 59 6.50 0.458 -8.4523 -2.6241 15.5720 0.0000 215.5562 405.1910 226.1622 7.3400 0.3631
+0 2002 59 17.50 0.542 -15.3377 -2.6250 0.0000 8.6360 46.7277 377.9066 253.4110 2.3012 0.3630
+0 2002 60 6.50 0.458 -22.6327 -2.5945 18.7072 1.0160 41.6437 478.4399 154.0708 2.2109 0.3625
+0 2002 60 17.50 0.542 -24.4038 -2.6169 0.0000 0.0000 40.4109 493.4407 138.1932 2.0373 0.3630
+0 2002 61 6.50 0.458 -20.6364 -2.6950 33.0556 2.5400 74.1871 484.6200 143.9645 5.5636 0.3609
+0 2002 61 17.50 0.542 -22.0908 -2.8050 0.0000 0.2540 89.3918 512.4326 111.8799 6.7554 0.3570
+0 2002 62 6.50 0.458 -17.5664 -2.8914 38.0641 0.0000 158.9915 518.6409 102.3381 10.6727 0.3567
+0 2002 62 17.50 0.542 -13.6385 -2.9650 0.0000 0.0000 190.9714 485.5357 132.6154 14.8104 0.3568
+0 2002 63 6.50 0.458 -6.6295 -3.0018 37.3039 0.0000 325.7296 443.5605 173.1815 10.7145 0.3569
+0 2002 63 17.50 0.542 -6.7119 -2.9831 0.0000 0.0000 320.5334 447.1036 170.3554 4.9458 0.3555
+0 2002 64 6.50 0.458 -3.7245 -2.9200 36.0207 0.0000 431.5885 458.9933 160.8842 10.1695 0.3538
+0 2002 64 17.50 0.542 -4.3350 -2.8373 0.0000 0.0000 431.6082 486.5662 136.4974 5.2427 0.3610
+0 2002 65 6.50 0.458 -1.1495 -2.7505 32.6484 0.0000 518.6849 448.2921 178.1343 8.9291 0.3600
+0 2002 65 17.50 0.542 -0.4135 -2.6581 0.0000 0.0000 532.8133 437.7050 192.3205 5.8292 0.3604
+0 2002 66 6.50 0.458 -2.4991 -2.5409 23.5260 0.0000 296.0459 293.7556 340.8636 6.2082 0.3607
+0 2002 66 17.50 0.542 -4.8919 -2.4108 0.0000 0.7620 145.7539 236.5304 403.2325 2.8773 0.3614
+0 2002 67 6.50 0.458 -7.9159 -2.2995 18.7828 1.2700 134.7119 301.5959 342.5943 3.3695 0.3623
+0 2002 67 17.50 0.542 -17.7942 -2.6454 0.0000 0.0000 109.1895 484.9988 145.5687 9.8112 0.3626
+0 2002 68 6.50 0.479 -7.5574 -2.7113 40.5583 0.0000 374.3199 528.6823 99.2661 7.8883 0.0000
+0 2002 68 18.00 0.521 -3.4332 -2.7340 0.0000 0.0000 444.8042 468.8195 158.2464 5.1436 0.0000
+0 2002 69 6.50 0.479 0.9304 -2.7113 35.6083 0.0000 612.6929 450.2381 177.7103 9.2761 0.0000
+0 2002 69 18.00 0.521 -3.8028 -2.2696 0.0000 0.7080 266.5054 318.3535 327.2919 5.2420 0.0000
+0 2002 70 6.50 0.479 -3.0852 -1.8426 39.3775 0.0000 350.0531 392.3637 270.3515 6.1439 0.0000
+0 2002 70 18.00 0.500 -4.2475 -1.8383 0.0000 0.0000 284.1609 375.8588 287.0320 10.0783 0.0000
+0 2002 71 6.00 0.500 0.9287 -2.0075 25.8545 0.0000 536.5690 396.0243 259.9605 10.4617 0.3464
+0 2002 71 18.00 0.500 0.8833 -2.0208 0.0000 0.0000 464.7284 334.6885 320.7380 3.8579 0.3649
+0 2002 72 6.00 0.500 1.3700 -1.9212 29.6383 0.0000 392.2126 236.6229 422.8667 2.6392 0.3633
+0 2002 72 18.00 0.500 -6.9313 -1.7842 0.0000 11.7000 21.9613 198.8486 466.2772 1.7704 0.3642
+0 2002 73 6.00 0.500 -10.2608 -1.6671 38.3083 1.5240 92.3043 366.0876 303.8876 5.9196 0.3637
+0 2002 73 18.00 0.500 -15.4254 -1.6246 0.0000 0.5080 42.8423 423.0349 248.7095 1.8963 0.3640
+0 2002 74 6.00 0.500 -12.9392 -1.6258 20.8960 0.0000 51.8194 385.6046 286.0877 1.7258 0.3649
+0 2002 74 18.00 0.500 -14.7121 -1.6712 0.0000 0.0000 40.3066 406.4793 263.3222 2.2538 0.3660
+0 2002 75 6.00 0.500 -9.2971 -1.7325 24.8184 0.5080 144.6847 390.9839 276.2763 3.9196 0.3649
+0 2002 75 18.00 0.500 -12.2804 -1.8062 0.0000 0.0000 146.9939 460.7135 203.4991 4.3408 0.3651
+0 2002 76 6.00 0.500 -9.5583 -1.8675 30.8560 0.0000 231.3172 479.8655 181.8270 4.0404 0.3645
+0 2002 76 18.00 0.500 -11.6854 -1.9267 0.0000 0.0000 192.3036 488.2914 170.9757 2.8546 0.3654
+0 2002 77 6.00 0.500 -9.0396 -1.9783 20.7108 3.0480 153.0095 383.0948 274.0621 2.2146 0.3652
+0 2002 77 18.00 0.500 -10.5300 -2.0367 0.0000 0.7620 71.5113 337.1800 317.6022 2.2571 0.3647
+0 2002 78 6.00 0.500 -7.0983 -1.9421 32.2589 0.0000 269.8478 446.1902 212.4621 8.4871 0.3644
+0 2002 78 18.00 0.500 -6.6650 -1.8383 0.0000 0.0000 232.1652 402.8926 259.9983 8.4150 0.0000
+0 2002 79 6.00 0.500 -1.8875 -1.8421 38.8588 0.0000 425.2839 416.9709 245.7660 4.7467 0.0000
+0 2002 79 18.00 0.500 -2.4667 -1.5812 0.0000 0.0000 370.7283 406.3928 267.2493 2.7383 0.0000
+0 2002 80 6.00 0.500 0.9038 -1.3133 40.3022 0.0000 538.7787 432.6471 252.2046 4.2879 0.0000
+0 2002 80 18.00 0.500 0.0854 -1.2992 0.0000 0.0000 505.6882 443.7149 241.7401 2.2608 0.0000
+0 2002 81 6.00 0.500 4.4675 -1.3133 44.3363 0.0000 793.6891 488.4638 196.3879 2.8004 0.0000
+0 2002 81 18.00 0.500 3.0563 -1.2992 0.0000 0.0000 696.4032 482.5982 202.8568 4.2817 0.0000
+0 2002 82 6.00 0.500 3.7725 -1.3133 32.0819 0.0000 737.2430 480.9265 203.9252 4.9971 0.0000
+0 2002 82 18.00 0.500 -2.5083 -1.2992 0.0000 0.0000 425.6637 470.3224 215.1325 3.0696 0.0000
+0 2002 83 6.00 0.500 -5.2400 -1.3133 19.4836 2.5400 200.4598 345.1594 339.6923 2.2037 0.0000
+0 2002 83 18.00 0.500 -8.9446 -1.2992 0.0000 1.7780 52.8582 309.8901 375.5649 1.4667 0.0000
+0 2002 84 6.00 0.500 -5.3371 -1.3158 15.8231 1.0160 103.2360 252.7350 432.0106 1.8513 0.2638
+0 2002 84 18.00 0.500 -6.4783 -1.2992 0.0000 0.0000 155.4221 343.2061 342.2488 5.4204 0.0000
+0 2002 85 6.00 0.500 -3.0475 -1.3133 33.6307 0.0000 349.1658 412.5301 272.3216 4.1467 0.0000
+0 2002 85 18.00 0.500 -3.0575 -1.2992 0.0000 1.2700 260.2579 330.0297 355.4253 5.4642 0.0000
+0 2002 86 6.00 0.500 -0.8042 -1.3737 26.9523 0.2540 334.2748 306.6511 375.6448 6.5092 0.3600
+0 2002 86 18.00 0.500 -1.6754 -1.4450 0.0000 0.0000 334.5603 343.8758 335.3999 6.9654 0.3683
+0 2002 87 6.00 0.500 1.4117 -1.4071 41.8039 0.7620 551.0541 415.8557 265.0208 11.7342 0.3688
+0 2002 87 18.00 0.500 -0.9433 -1.3625 0.0000 0.0000 426.7845 403.6539 279.1104 8.4875 0.3683
+0 2002 88 6.00 0.500 1.2450 -1.3254 37.7071 0.0000 563.4092 433.7778 250.5600 5.1617 0.3668
+0 2002 88 18.00 0.500 -1.5617 -1.2958 0.0000 0.0000 336.7328 345.3755 340.2208 4.1371 0.3688
+0 2002 89 6.00 0.500 -0.1954 -1.2608 39.0312 0.0000 408.0245 356.4065 330.6811 9.4117 0.3672
+0 2002 89 18.00 0.500 -1.6929 -1.2358 0.0000 0.0000 282.7981 301.0581 387.0971 5.6942 0.3686
+0 2002 90 6.00 0.521 3.6460 -1.1928 43.8221 0.0000 637.9594 383.7017 306.2975 6.0580 0.3694
+0 2002 90 18.50 0.479 3.0204 -0.9543 0.0000 0.0000 609.9123 412.4115 287.8811 7.6848 0.3451
+0 2002 91 6.00 0.521 5.6392 -0.8320 46.9066 0.0000 872.2435 510.6505 194.9910 6.5484 0.3263
+0 2002 91 18.50 0.479 0.7087 -0.5422 0.0000 0.0000 494.2132 408.6472 309.8057 5.2509 0.3369
+0 2002 92 6.00 0.521 -7.1156 -0.5132 46.2362 0.0000 213.9664 417.7460 301.9995 3.6620 0.3355
+0 2002 92 18.50 0.479 -8.1683 -0.5413 0.0000 0.0000 219.9489 443.4713 275.0186 4.2043 0.3341
+0 2002 93 6.00 0.521 2.8576 -0.6084 44.1061 0.0000 628.5697 448.9747 266.5271 6.1096 0.3355
+0 2002 93 18.50 0.458 -0.5905 -0.5755 0.0000 0.0000 416.2144 413.8337 303.1337 4.0327 0.3360
+0 2002 94 5.50 0.542 3.3681 -0.5446 48.2553 0.0000 716.6982 501.9143 216.4279 3.0038 0.3362
+0 2002 94 18.50 0.458 3.0395 -0.4727 0.0000 0.0000 674.0942 497.4767 224.0806 2.3118 0.3362
+0 2002 95 5.50 0.542 5.2673 -0.5215 42.4096 0.0000 720.7866 408.0094 311.3668 2.9554 0.3378
+0 2002 95 18.50 0.458 2.6723 -0.4909 0.0000 0.0000 453.6205 296.8011 423.9420 2.8523 0.3445
+0 2002 96 5.50 0.542 7.0485 -0.4546 29.2910 0.0000 873.4396 440.7908 281.5799 2.3912 0.3485
+0 2002 96 18.50 0.458 2.5586 -0.4505 0.0000 0.0000 584.6002 431.4217 291.1365 2.6118 0.0000
+0 2002 97 5.50 0.542 2.7896 -0.4727 29.6492 0.0000 395.3632 230.7195 490.8399 2.6119 0.0000
+0 2002 97 18.50 0.458 -0.3168 -0.4505 0.0000 0.0000 423.9178 414.5922 307.9660 3.2300 0.0000
+0 2002 98 5.50 0.542 0.5900 -0.3838 42.7941 0.0000 263.6911 213.4135 512.1599 2.6888 0.4000
+0 2002 98 18.50 0.458 -0.4041 -0.2286 0.0000 0.0000 232.7770 239.8585 492.7237 2.2741 0.4097
+0 2002 99 5.50 0.542 4.5673 -0.2331 35.7736 0.0000 698.5157 428.6491 303.7311 3.4908 0.4149
+0 2002 99 18.50 0.458 4.5868 -0.2059 0.0000 0.0000 605.0883 350.6808 382.9366 3.1336 0.4590
+0 2002 100 5.50 0.542 6.2331 -0.1588 39.3896 0.0000 699.8226 338.0422 397.7247 4.9723 0.4933
+0 2002 100 18.50 0.458 -0.1959 -0.1059 0.0000 2.0320 192.2148 190.3063 547.8832 3.7309 0.5329
+0 2002 101 5.50 0.542 -0.3192 -0.1035 27.8415 0.7620 254.7709 260.5643 477.7374 3.3508 0.5267
+0 2002 101 18.50 0.458 -0.9655 -0.1000 0.0000 0.7620 211.2387 249.7685 488.6919 4.1936 0.5406
+0 2002 102 5.50 0.542 2.6812 -0.1077 25.7150 0.0000 449.7799 306.0067 432.1010 2.5615 0.5429
+0 2002 102 18.50 0.458 0.6800 -0.1100 0.0000 0.0000 258.8954 221.4818 516.5200 5.2668 0.5598
+0 2002 103 5.50 0.542 5.4304 -0.1262 49.9093 0.0000 683.5793 368.0851 369.1765 6.5235 0.5658
+0 2002 103 18.50 0.458 5.3795 -0.1200 0.0000 0.0000 680.3063 380.0447 357.4987 10.2377 0.6002
+0 2002 104 5.50 0.542 8.5485 -0.1277 27.4187 0.0000 920.2816 387.2007 349.9904 6.2688 0.6247
+0 2002 104 18.50 0.458 7.4268 -0.1300 0.0000 0.0000 847.4538 407.2382 329.8471 4.0691 0.6631
+0 2002 105 5.50 0.542 9.7438 -0.1500 28.8909 0.0000 1110.8208 479.8933 256.2770 3.9165 0.7092
+0 2002 105 18.50 0.458 5.1955 -0.1441 0.0000 0.0000 664.9718 367.5952 368.8452 5.1968 0.7598
+0 2002 106 5.50 0.542 -0.6946 -0.1762 46.4126 0.7620 408.5546 430.8839 304.0916 8.4296 0.7327
+0 2002 106 18.50 0.458 -0.3123 -0.1900 0.0000 0.0000 479.9881 485.1609 249.1819 2.8764 0.7244
+0 2002 107 5.50 0.542 6.2804 -0.2223 51.5498 0.0000 894.4355 517.1634 215.7073 5.1146 0.7466
+0 2002 107 18.50 0.458 2.2277 -0.2105 0.0000 0.0000 574.3771 444.8041 288.6061 4.7327 0.7878
+0 2002 108 5.50 0.542 2.5858 -0.1319 44.4794 0.5080 573.2233 429.1361 307.8680 4.0454 0.7838
+0 2002 108 18.50 0.458 -6.2445 -0.0736 0.0000 1.7780 22.3043 256.1626 483.5085 2.2727 0.8072
+0 2002 109 5.50 0.542 -6.3519 -0.0765 20.2622 0.0000 20.8262 258.4878 481.0499 2.0342 0.7769
+0 2002 109 18.50 0.458 -7.9373 -0.0700 0.0000 0.0000 23.9293 308.5108 431.3273 1.6927 0.7551
+0 2002 110 5.50 0.542 -6.2435 -0.0765 15.2960 3.0520 21.5554 255.1981 484.3396 4.0662 0.7357
+0 2002 110 18.50 0.458 -7.1805 -0.0732 0.0000 3.0480 114.2902 376.6931 362.9988 5.0986 0.7259
+0 2002 111 5.50 0.542 -5.7335 -0.0796 31.3101 0.2540 202.2620 417.3818 322.0145 7.9323 0.7108
+0 2002 111 18.50 0.458 -2.8341 -0.0800 0.0000 0.0000 384.9227 501.0067 238.3720 10.2432 0.7027
+0 2002 112 5.50 0.542 1.5569 -0.0858 55.6420 0.0000 572.0876 482.3928 256.7209 8.8912 0.6939
+0 2002 112 18.50 0.458 2.2814 -0.0800 0.0000 0.0000 593.9792 476.9457 262.4330 6.8605 0.6913
+0 2002 113 5.50 0.542 6.4604 -0.0869 55.0674 0.0000 877.2065 495.6960 243.3646 9.2927 0.7062
+0 2002 113 18.50 0.458 3.7064 -0.0800 0.0000 0.0000 666.9432 460.1506 279.2280 5.6086 0.7545
+0 2002 114 5.50 0.562 3.9570 -0.0815 55.5187 0.0000 818.2101 590.4633 148.8473 6.5252 0.7765
+0 2002 114 19.00 0.438 0.2119 -0.0781 0.0000 0.0000 517.3456 493.0406 246.4256 2.0757 0.8055
+0 2002 115 5.50 0.562 5.9989 -0.0822 37.0771 0.0000 813.7294 472.0984 267.1782 3.3089 0.8202
+0 2002 115 19.00 0.438 1.1500 -0.0800 0.0000 0.7620 188.1031 127.4306 611.9480 2.2376 0.8470
+0 2002 116 5.50 0.562 3.6959 -0.0804 33.5507 2.0320 299.6942 96.5515 642.8101 2.6367 0.8533
+0 2002 116 19.00 0.438 0.4657 -0.0790 0.0000 1.7780 180.5356 152.1764 587.2460 6.3071 0.8596
+0 2002 117 5.50 0.562 -0.3704 -0.0778 43.7072 1.5240 289.2132 298.2859 441.1949 11.9559 0.8162
+0 2002 117 19.00 0.417 -1.3570 -0.0735 0.0000 0.5080 207.8799 264.3716 475.3057 6.9545 0.7934
+0 2002 118 5.00 0.583 3.0679 -0.0807 50.4945 0.0000 528.3332 358.9729 380.3729 8.4096 0.8199
+0 2002 118 19.00 0.417 2.7585 -0.0800 0.0000 0.0000 551.1637 408.2810 331.0976 8.2070 0.8538
+0 2002 119 5.00 0.583 8.0950 -0.0850 55.1704 0.0000 943.8336 440.8866 298.2624 6.4325 0.8802
+0 2002 119 19.00 0.417 6.5895 -0.0800 0.0000 0.0000 909.3378 529.3707 210.0079 5.7050 0.8864
+0 2002 120 5.00 0.583 8.8518 -0.0804 55.9046 0.0000 1105.7345 550.7200 188.6422 6.1975 0.8816
+0 2002 120 19.00 0.417 4.8965 -0.0720 0.0000 0.0000 782.0099 510.6291 229.1172 3.7855 0.8837
+0 2002 121 5.00 0.583 -1.3161 -0.0382 29.6705 0.0000 295.0156 328.2886 413.0145 2.6311 0.8540
+0 2002 121 19.00 0.417 -6.2865 -0.0600 0.0000 3.5600 30.5295 267.0633 473.2347 1.8920 0.8362
+0 2002 122 5.00 0.583 -0.7450 -0.0679 43.1631 0.2540 240.5411 265.5931 474.3435 2.1089 0.8383
+0 2002 122 19.00 0.417 -0.9920 -0.0665 0.0000 0.0000 323.1413 364.1072 375.8918 5.5815 0.8262
+0 2002 123 5.00 0.583 2.6689 -0.0700 42.9536 0.0000 550.8351 403.2498 336.5883 6.2739 0.8185
+0 2002 123 19.00 0.417 1.3960 -0.0700 0.0000 0.2540 398.3000 325.2946 414.5435 5.3240 0.8406
+0 2002 124 5.00 0.583 4.2304 0.0307 40.8942 0.6000 494.5630 266.9935 477.5003 2.5986 0.8526
+0 2002 124 19.00 0.417 3.2155 -0.0375 0.0000 0.0000 465.3258 298.8243 442.5114 2.1985 0.8802
+0 2002 125 5.00 0.583 7.9357 0.3339 46.3975 0.3000 948.9557 484.0823 274.7258 4.1875 0.8805
+0 2002 125 19.00 0.417 5.2595 0.0545 0.0000 0.0000 760.8305 472.7945 272.8098 2.5690 0.8777
+0 2002 126 5.00 0.583 9.4543 0.7889 55.7762 0.0000 1073.3040 508.6055 272.4721 3.7225 0.8714
+0 2002 126 19.00 0.417 6.2455 0.2945 0.0000 0.0000 787.8502 448.0723 308.8505 2.8595 0.8783
+0 2002 127 5.00 0.583 10.2093 1.1700 48.3300 0.0000 1129.8850 529.1971 271.3346 3.6114 0.8634
+0 2002 127 19.00 0.417 5.8695 0.8890 0.0000 0.0000 828.6287 535.7120 249.8327 5.0180 0.8719
+0 2002 128 5.00 0.583 1.6679 1.2657 58.1922 0.2540 445.4422 424.1345 380.8258 5.6025 0.8450
+0 2002 128 19.00 0.417 -6.8260 0.3670 0.0000 0.0000 271.7171 534.2505 226.0302 1.6305 0.8379
+0 2002 129 5.00 0.583 0.3557 0.1271 57.3896 0.0000 594.9742 557.1288 191.8488 3.7618 0.7641
+0 2002 129 19.00 0.417 1.5680 0.0615 0.0000 0.0000 578.4285 502.9470 242.9643 2.4815 0.7813
+0 2002 130 5.00 0.583 4.4521 0.3689 26.8105 0.0000 669.3912 447.8573 312.6175 2.4000 0.7653
+0 2002 130 19.00 0.417 1.9485 0.2690 0.0000 0.0000 343.9051 259.8995 495.7561 3.8055 0.7791
+0 2002 131 5.00 0.583 1.4639 1.1111 29.7839 9.6560 282.0302 252.4988 544.9114 3.3661 0.7761
+0 2002 131 19.00 0.417 -3.7060 0.3715 0.0000 10.1680 9.2899 179.1024 581.3411 1.2050 0.8087
+0 2002 132 5.00 0.583 -3.9575 0.1057 15.3263 4.8260 13.1767 177.8183 570.1434 1.1421 0.7901
+0 2002 132 19.00 0.417 -3.0410 0.0265 0.0000 0.0000 102.0565 230.7061 513.5815 1.0015 0.7813
+0 2002 133 5.00 0.583 3.7557 0.0157 58.0057 1.2700 433.4870 223.5356 520.2531 1.8114 0.8163
+0 2002 133 19.00 0.417 3.9955 0.0145 0.0000 0.0000 560.1712 351.1604 392.5721 2.9095 0.8940
+0 2002 134 5.00 0.583 7.6546 0.0179 37.6414 1.5280 779.6401 319.5475 424.3406 3.3525 0.9008
+0 2002 134 19.00 0.417 4.1890 0.0205 0.0000 0.0000 439.0138 217.7057 526.3043 3.9270 0.9128
+0 2002 135 5.00 0.583 7.1404 0.4614 41.5103 0.2540 671.5568 274.0044 490.9019 3.5975 0.8998
+0 2002 135 19.00 0.417 3.1320 0.3260 0.0000 0.5080 315.7338 169.3648 588.9612 2.5980 0.8951
+0 2002 136 5.00 0.583 0.1821 0.1943 10.8092 8.3820 30.5133 29.5466 722.5621 1.4250 0.8561
+0 2002 136 19.00 0.417 -0.2935 0.0820 0.0000 0.5080 73.4795 90.5559 656.3030 1.5605 0.8434
+0 2002 137 5.00 0.583 1.6293 0.1068 36.0126 0.5080 76.0653 0.8699 747.1552 1.9311 0.8474
+0 2002 137 19.00 0.417 4.4215 0.2000 0.0000 0.0000 441.4133 201.4318 550.9362 3.0465 0.8749
+0 2002 138 5.00 0.583 10.2750 2.2893 38.8674 0.0000 868.2096 322.2153 539.5616 2.6189 0.8506
+0 2002 138 19.00 0.417 8.5745 2.2020 0.0000 0.0000 739.0867 327.0305 526.0107 3.1480 0.8555
+0 2002 139 5.00 0.583 11.0029 4.0954 43.9420 0.0000 927.3817 410.7934 556.9973 3.0782 0.7989
+0 2002 139 19.00 0.417 5.0010 3.5120 0.0000 0.0000 193.4701 104.6856 820.7828 2.0930 0.7968
+0 2002 140 5.00 0.583 7.7129 4.6257 41.7244 1.5240 381.1198 171.5100 825.9108 2.4136 0.7708
+0 2002 140 19.00 0.417 6.2535 3.4140 0.0000 0.0000 387.2792 209.0277 711.0419 2.8990 0.7881
+0 2002 141 5.00 0.583 9.2136 4.4482 34.8503 0.0000 851.7214 499.6857 485.3145 5.8839 0.7578
+0 2002 141 19.00 0.417 -1.3195 2.1610 0.0000 0.0000 485.1329 646.2441 205.0676 8.3795 0.7619
+0 2002 142 5.00 0.583 0.7036 1.7943 61.1586 0.0000 551.0587 598.3060 233.7620 9.0089 0.7363
+0 2002 142 19.00 0.417 -0.4510 0.9745 0.0000 0.0000 443.9178 508.8476 280.7967 6.2700 0.7396
+0 2002 143 5.00 0.604 -0.6369 0.8676 27.9284 10.1600 224.6882 291.6377 492.7444 2.3997 0.7326
+0 2002 143 19.50 0.396 -3.9726 0.6684 0.0000 16.2660 10.2973 203.5505 571.0266 1.5679 0.7458
+0 2002 144 5.00 0.604 -2.4421 0.5100 30.1543 5.8460 46.1104 170.5117 596.4756 1.7197 0.7457
+0 2002 144 19.50 0.396 -0.2163 0.4168 0.0000 0.0000 199.9276 229.1190 533.4402 2.5300 0.7471
+0 2002 145 5.00 0.604 4.0269 0.3007 35.3731 0.0000 444.4543 228.4615 528.6205 2.7841 0.7561
+0 2002 145 19.50 0.396 4.2453 0.1916 0.0000 0.0000 586.3085 370.3537 381.6066 3.1468 0.8267
+0 2002 146 5.00 0.604 8.2272 0.1800 42.2923 0.0000 882.7248 387.0447 364.3752 4.2293 0.8779
+0 2002 146 19.50 0.396 5.6468 0.1168 0.0000 0.0000 614.3426 304.7874 443.6906 2.9700 0.9008
+0 2002 147 5.00 0.604 10.3841 0.5979 47.0959 0.0000 996.9390 350.4174 421.1269 2.8866 0.8939
+0 2002 147 19.50 0.396 6.9563 0.6558 0.0000 0.0000 672.1241 302.3095 471.7716 3.6932 0.8804
+0 2002 148 5.00 0.604 11.2983 2.6169 50.6713 0.0000 1042.7386 424.6609 455.0591 3.8669 0.8357
+0 2002 148 19.50 0.375 8.6272 2.8594 0.0000 0.0000 789.5977 410.6167 478.0235 3.3544 0.8132
+0 2002 149 4.50 0.625 12.4393 4.4373 45.9293 0.0000 1094.5927 476.5912 510.1221 3.9947 0.7716
+0 2002 149 19.50 0.375 11.4294 4.7250 0.0000 0.0000 919.8647 411.6267 585.4348 3.2967 0.7634
+0 2002 150 4.50 0.625 16.4847 7.3553 56.9882 0.0000 1408.0897 535.5940 649.2571 2.3773 0.7309
+0 2002 150 19.50 0.375 14.0406 6.5300 0.0000 0.0000 1196.9386 551.3727 564.9669 3.5322 0.7343
+0 2002 151 4.50 0.625 16.6023 8.0377 46.3786 0.2540 1446.3071 614.0362 620.4969 2.5483 0.7088
+0 2002 151 19.50 0.375 13.3211 6.9444 0.0000 0.2540 1091.1840 549.2096 595.4168 3.9283 0.7097
+0 2002 152 4.50 0.625 17.2650 8.0483 37.1984 0.2540 1672.1986 748.3696 481.1985 3.7290 0.6790
+0 2002 152 19.50 0.375 13.7000 6.8006 0.0000 0.0000 1309.6023 717.2271 416.9240 3.1644 0.6872
+0 2002 153 4.50 0.625 15.1553 8.4627 54.0873 0.0000 1579.8035 951.4945 313.7957 5.1150 0.6443
+0 2002 153 19.50 0.375 9.3417 7.1844 0.0000 0.0000 789.3958 600.1954 560.3783 3.5022 0.6487
+0 2002 154 4.50 0.625 6.5143 6.2080 29.0788 17.5260 222.2750 196.3816 896.2692 2.7403 0.6877
+0 2002 154 19.50 0.375 0.8839 3.0694 0.0000 11.9380 2.7349 116.8874 782.7219 1.7694 0.8556
+0 2002 155 4.50 0.625 2.0683 2.7590 15.2356 5.0800 -1.6946 33.9085 848.2182 1.0173 0.8735
+0 2002 155 19.50 0.375 1.0472 2.5572 0.0000 0.0000 -1.0993 76.6400 794.6768 2.2633 0.8501
+0 2002 156 4.50 0.625 8.9520 5.1000 62.9073 0.0000 633.3762 347.2958 685.3297 3.2357 0.8082
+0 2002 156 19.50 0.375 8.9267 4.6444 0.0000 0.0000 764.1600 465.3170 526.9654 4.3911 0.7887
+0 2002 157 4.50 0.625 13.1153 7.1513 54.9004 0.0000 1148.2993 634.9786 534.0900 4.1733 0.7502
+0 2002 157 19.50 0.375 11.8039 6.0017 0.0000 0.0000 1096.8799 638.0159 442.4001 3.2067 0.7567
+0 2002 158 4.50 0.625 16.9913 7.9590 57.9069 0.0000 1644.6180 759.9980 471.2161 3.3237 0.7187
+0 2002 158 19.50 0.375 10.7644 6.9733 0.0000 0.0000 802.9126 490.2918 656.9473 2.2689 0.7142
+0 2002 159 4.50 0.625 18.0773 8.7023 53.1905 0.0000 1921.6287 948.8585 338.1603 4.0590 0.6810
+0 2002 159 19.50 0.375 14.9233 7.8250 0.0000 0.0000 1522.5192 870.1171 337.4222 3.4294 0.6863
+0 2002 160 4.50 0.625 17.9430 9.8377 58.3994 0.0000 1773.8223 915.2483 464.5840 3.0730 0.6488
+0 2002 160 19.50 0.375 13.1850 8.5178 0.0000 0.0000 1332.5172 898.4230 362.3132 5.5844 0.6449
+0 2002 161 4.50 0.625 10.8737 9.2283 57.2668 0.0000 878.9095 742.4497 581.7347 3.3910 0.6311
+0 2002 161 19.50 0.375 6.8750 6.7856 0.0000 0.0000 623.1253 618.0264 516.5620 2.0106 0.6402
+0 2002 162 4.50 0.625 9.9313 7.9417 55.1628 0.0000 732.5457 576.4352 652.1089 2.5500 0.6088
+0 2002 162 19.50 0.375 8.1461 6.4050 0.0000 0.0000 679.9645 555.8516 551.9496 2.1317 0.6206
+0 2002 163 4.50 0.625 11.7597 7.7730 60.2599 0.0000 1137.4926 798.0309 418.0155 3.2630 0.5885
+0 2002 163 19.50 0.375 5.5133 6.3556 0.0000 0.0000 460.8124 501.1857 603.2947 2.0872 0.5957
+0 2002 164 4.50 0.625 7.0377 7.5970 59.0711 0.0000 346.9930 390.5420 812.1816 2.4133 0.5721
+0 2002 164 19.50 0.375 4.3678 6.6839 0.0000 0.0000 170.9555 320.1870 805.7519 1.3994 0.5785
+0 2002 165 4.50 0.625 9.6207 7.6743 44.7273 0.0000 678.7454 525.0011 678.9808 2.3017 0.5553
+0 2002 165 19.50 0.375 9.8544 6.1706 0.0000 0.0000 823.4609 547.5881 543.5916 3.1750 0.5650
+0 2002 166 4.50 0.625 11.1350 6.4427 31.8220 0.0000 820.1521 444.1601 666.3759 2.2980 0.5440
+0 2002 166 19.50 0.375 6.6233 5.5872 0.0000 0.0000 322.9133 252.0567 799.6883 2.7156 0.5525
+0 2002 167 4.50 0.625 12.5433 7.7407 53.1065 0.4100 997.8892 589.4228 622.5541 3.1970 0.5228
+0 2002 167 19.50 0.375 10.6517 6.4083 0.0000 0.0000 889.7011 560.7029 546.5790 2.5294 0.5319
+0 2002 168 4.50 0.625 17.0773 8.5767 58.6579 0.2000 1796.3837 940.9009 337.5737 3.6010 0.4950
+0 2002 168 19.50 0.375 14.0878 7.8483 0.0000 0.0000 1485.9126 919.0120 289.9742 2.3317 0.5012
+0 2002 169 4.50 0.625 17.2487 9.7007 57.4666 0.0000 1849.7184 1043.0591 321.4435 3.0270 0.4706
+0 2002 169 19.50 0.375 15.2439 8.5011 0.0000 0.0000 1626.7512 985.5977 273.3344 3.5633 0.4761
+0 2002 170 4.50 0.625 14.0083 9.4970 39.1675 0.5080 1020.3272 599.3079 745.3457 2.4177 0.4528
+0 2002 170 19.50 0.375 7.2956 8.3478 0.0000 0.5080 125.1936 201.9332 1044.6151 2.0850 0.4650
+0 2002 171 4.50 0.625 12.0003 9.6040 36.6512 0.0000 403.8895 183.0936 1169.0851 2.5570 0.4528
+0 2002 171 19.50 0.375 11.8933 8.3517 0.0000 0.0000 715.7185 414.5505 832.3113 3.7017 0.4650
+0 2002 172 4.50 0.625 15.7367 9.5953 32.5969 0.2540 1266.8889 655.9747 695.4272 3.8277 0.4355
+0 2002 172 19.50 0.375 10.9022 8.2733 0.0000 3.0480 569.6477 351.1832 889.8342 3.4633 0.4643
+0 2002 173 4.50 0.625 16.4057 10.0327 51.6226 0.0000 1376.4052 720.6016 669.7421 3.4730 0.4349
+0 2002 173 19.50 0.375 13.5678 8.4828 0.0000 0.0000 1380.5190 914.3414 343.6516 4.2694 0.4312
+0 2002 174 4.50 0.625 16.4080 10.1187 57.2999 0.0000 1572.2664 917.3034 486.5266 2.9907 0.4078
+0 2002 174 19.50 0.375 13.7011 8.5172 0.0000 0.0000 1206.2969 741.2639 519.6533 3.6422 0.4246
+0 2002 175 4.50 0.625 17.9413 9.7137 44.1446 0.5120 1781.0345 908.5051 453.0709 2.7010 0.3939
+0 2002 175 19.50 0.375 14.1583 8.5589 0.0000 0.0000 1271.2809 757.0466 506.6604 3.1522 0.4085
+0 2002 176 4.50 0.625 16.9210 9.6733 38.9534 0.0000 1492.2676 743.3177 612.6339 2.8653 0.3793
+0 2002 176 19.50 0.375 12.0206 8.3461 0.0000 0.0000 854.6324 540.0625 707.1716 2.4722 0.3905
+0 2002 177 4.50 0.625 15.8620 9.9687 43.6529 0.0000 1360.2457 768.9490 618.1652 2.8740 0.3806
+0 2002 177 19.50 0.375 13.0922 8.0578 0.0000 0.0000 1108.5258 670.4518 555.6155 2.9939 0.3898
+0 2002 178 4.50 0.625 16.4267 9.0260 34.5155 0.0000 1409.4491 674.9810 628.5493 2.9753 0.3545
+0 2002 178 19.50 0.375 13.3794 8.0150 0.0000 0.0000 1104.9282 633.5211 588.5575 3.0800 0.3723
+0 2002 179 4.50 0.625 15.5987 9.2993 35.0836 0.2540 1228.2629 610.5339 716.6157 2.8917 0.3681
+0 2002 179 19.50 0.375 13.7017 8.3000 0.0000 0.0000 1129.4565 646.9742 596.2726 4.3567 0.3674
+0 2002 180 4.50 0.625 19.3613 10.4663 54.9419 0.0000 2022.0587 1016.1000 412.6974 4.8740 0.3478
+0 2002 180 19.50 0.375 16.7694 9.3617 0.0000 0.0000 1825.6637 1074.4347 251.9179 5.8144 0.3499
+0 2002 181 4.50 0.625 19.3370 10.7390 47.8017 0.0000 2001.6259 1026.0229 421.6193 2.9913 0.3348
+0 2002 181 19.50 0.375 16.2589 9.4756 0.0000 0.0000 1578.1659 901.7884 434.3255 3.8244 0.3655
+0 2002 182 4.50 0.625 20.7410 11.2573 51.0223 0.0000 2264.6592 1118.9840 378.1796 3.4780 0.3397
+0 2002 182 19.50 0.375 17.2694 10.1911 0.0000 0.0000 1709.7949 967.6964 428.7109 3.3506 0.3596
+0 2002 183 4.50 0.625 19.3307 11.1080 33.8661 0.0000 1961.7706 1015.4582 463.4458 3.4607 0.3352
+0 2002 183 19.50 0.375 15.0850 9.8150 0.0000 0.0000 1234.9653 723.0543 640.7443 2.9878 0.3574
+0 2002 184 4.50 0.625 16.5263 10.6943 29.4286 0.0000 1319.6757 708.4182 732.5115 3.1493 0.3396
+0 2002 184 19.50 0.375 11.9111 9.3744 0.0000 0.0000 710.2140 486.5791 841.2799 2.8556 0.3504
+0 2002 185 4.50 0.625 15.0117 10.6890 43.4520 0.0000 986.4467 555.4894 890.0221 2.6360 0.3339
+0 2002 185 19.50 0.375 11.8933 9.2828 0.0000 0.0000 713.8399 487.2486 833.5737 3.2417 0.3532
+0 2002 186 4.50 0.625 15.6263 10.7357 46.1401 0.0000 962.9407 468.7621 981.5392 2.8097 0.3279
+0 2002 186 19.50 0.375 10.6367 10.3161 0.0000 0.0000 211.9922 178.4242 1227.3214 1.6244 0.3288
+0 2002 187 4.50 0.625 12.3577 10.4933 24.1171 5.5840 420.1408 244.0295 1178.0979 2.4607 0.3918
+0 2002 187 19.50 0.375 11.5756 8.3917 0.0000 0.0000 563.6161 296.2206 954.0349 2.7800 0.4390
+0 2002 188 4.50 0.625 15.9850 10.4587 39.4531 7.6180 1004.1543 439.5169 989.1897 2.5923 0.4349
+0 2002 188 19.50 0.375 13.0461 8.7772 0.0000 0.0000 549.4594 170.6721 1109.4178 2.6439 0.4652
+0 2002 189 4.50 0.625 16.7603 11.2500 43.2542 0.0000 1011.3215 431.8788 1073.1766 1.8907 0.4569
+0 2002 189 19.50 0.375 16.0728 10.4361 0.0000 0.0000 1190.6479 615.6136 800.8940 4.7500 0.4173
+0 2002 190 4.50 0.625 17.8633 11.9783 45.8467 0.0000 1289.1678 634.8162 926.4557 2.4700 0.4075
+0 2002 190 19.50 0.375 13.2922 9.9956 0.0000 0.0000 800.8707 493.5655 886.3463 2.1939 0.3993
+0 2002 191 4.50 0.625 14.3050 10.6197 34.7297 10.9200 475.2028 110.9584 1326.7329 2.3430 0.4221
+0 2002 191 19.50 0.375 10.3861 8.5356 0.0000 0.0000 224.3251 73.2505 1187.6364 2.7039 0.5267
+0 2002 192 4.50 0.625 14.0923 10.8270 61.4417 0.0000 572.9714 258.2273 1206.0319 1.9623 0.4948
+0 2002 192 19.50 0.375 10.4311 9.0322 0.0000 0.0000 595.6100 478.6292 823.4329 2.6039 0.4872
+0 2002 193 4.50 0.625 17.6270 10.4970 61.8649 0.0000 1674.5790 911.4417 528.5046 2.6583 0.4475
+0 2002 193 19.50 0.375 13.7383 9.1128 0.0000 0.0000 1292.5455 867.1163 440.6728 4.5050 0.4433
+0 2002 194 4.50 0.625 17.7637 10.2790 49.5574 0.0000 1666.8981 858.2856 551.5024 4.0130 0.4030
+0 2002 194 19.50 0.375 13.6994 9.1328 0.0000 0.0000 1108.3185 691.2199 618.0038 3.4567 0.4061
+0 2002 195 4.50 0.625 18.1947 11.0407 56.6640 0.0000 1679.0590 896.8423 587.5399 2.7163 0.3787
+0 2002 195 19.50 0.375 13.9061 9.4922 0.0000 0.0000 1209.5400 801.5192 537.9121 3.2711 0.3839
+0 2002 196 4.50 0.625 18.6023 11.0223 60.5612 0.0000 1710.2317 865.2702 615.4434 2.9297 0.3582
+0 2002 196 19.50 0.375 14.9217 9.6167 0.0000 0.0000 1294.3407 785.1434 564.1442 2.8139 0.3723
+0 2002 197 4.50 0.625 18.7590 10.6047 59.8662 0.0000 1830.2368 922.6000 517.4916 2.6933 0.3455
+0 2002 197 19.50 0.396 15.3026 9.2916 0.0000 0.0000 1398.0339 818.8511 503.1096 3.0716 0.3619
+0 2002 198 5.00 0.604 18.9786 9.3610 57.4146 0.0000 1809.8051 764.6091 563.9308 2.8090 0.3321
+0 2002 198 19.50 0.396 15.5747 9.3663 0.0000 0.0000 1397.2142 793.1295 534.6850 2.8342 0.3516
+0 2002 199 5.00 0.604 19.2976 8.6207 57.8880 0.0000 1848.3772 696.1996 573.2089 2.9793 0.3231
+0 2002 199 19.50 0.396 15.6568 9.7842 0.0000 0.0000 1338.4969 758.7168 602.5834 3.0600 0.3375
+0 2002 200 5.00 0.604 18.9734 9.1131 57.9479 0.0000 1756.1180 689.4257 618.3344 2.8166 0.3158
+0 2002 200 19.50 0.396 15.0368 9.8195 0.0000 0.0000 1138.1681 623.4352 741.5507 2.2479 0.3297
+0 2002 201 5.00 0.604 14.9838 9.7169 30.3710 5.0880 957.3242 419.2362 937.1272 3.9003 0.3190
+0 2002 201 19.50 0.396 10.7642 8.2347 0.0000 0.5080 427.6960 221.1146 1016.7133 4.6926 0.3380
+0 2002 202 5.00 0.604 14.2666 10.2938 49.1837 0.2540 806.3149 421.6167 992.8522 2.6997 0.3253
+0 2002 202 19.50 0.396 9.1242 8.0737 0.0000 0.0000 72.1584 -9.9215 1235.7585 1.6568 0.3351
+0 2002 203 5.00 0.604 10.6655 9.9514 26.9615 0.0000 124.7781 60.7358 1316.2247 2.1986 0.3252
+0 2002 203 19.50 0.396 12.0447 8.5795 0.0000 0.0000 610.8561 310.8102 953.5778 2.9258 0.3303
+0 2002 204 5.00 0.583 14.1657 9.3304 31.3013 13.2160 817.8781 352.2280 972.1626 2.7804 0.3571
+0 2002 204 19.00 0.417 11.2385 8.1295 0.0000 0.7620 587.0527 328.9124 900.8698 4.9710 0.4030
+0 2002 205 5.00 0.583 17.9175 10.3207 46.7384 0.5120 1523.2758 706.9025 710.6350 3.0621 0.3774
+0 2002 205 19.00 0.417 15.1855 9.3380 0.0000 0.0000 1221.9670 658.6841 665.5473 3.0150 0.3766
+0 2002 206 5.00 0.583 15.5646 10.0768 27.5609 1.0200 1101.3716 541.2142 845.4174 3.8575 0.3701
+0 2002 206 19.00 0.417 9.5590 8.1945 0.0000 0.2540 323.3541 213.5878 1021.6713 3.7220 0.3851
+0 2002 207 5.00 0.583 12.8846 9.4686 47.2480 0.5080 816.8806 498.7624 840.2221 5.5107 0.3705
+0 2002 207 19.00 0.417 11.4360 8.2635 0.0000 0.0000 800.6549 534.5136 705.3798 4.0410 0.3728
+0 2002 208 5.00 0.583 13.9014 9.5589 46.9573 0.7620 1035.1884 623.4796 721.4979 3.5311 0.3568
+0 2002 208 19.00 0.417 11.5130 7.3925 0.0000 0.0000 874.2039 536.4960 639.2025 6.4000 0.3564
+0 2002 209 5.00 0.583 14.8582 8.8011 53.3183 0.0000 1250.2729 666.7310 618.8568 4.6207 0.3426
+0 2002 209 19.00 0.417 12.7855 7.2800 0.0000 0.0000 1239.8430 773.5823 393.6910 2.3155 0.3506
+0 2002 210 5.00 0.583 17.7229 9.8457 59.3699 0.5120 1770.0343 931.1222 450.2832 3.5382 0.3260
+0 2002 210 19.00 0.417 14.6130 8.9300 0.0000 0.0000 1315.0088 781.8342 510.4471 3.5485 0.3328
+0 2002 211 5.00 0.583 19.0096 10.6754 47.4822 0.0000 1882.4586 946.7745 502.3350 3.2386 0.3143
+0 2002 211 19.00 0.417 15.9140 9.0810 0.0000 0.0000 1472.2194 804.2521 501.1087 3.5310 0.3287
+0 2002 212 5.00 0.583 20.3957 9.9993 50.1768 0.5120 2189.0430 990.0239 395.9552 2.8704 0.3103
+0 2002 212 19.00 0.417 16.7665 9.8475 0.0000 0.0000 1658.1005 941.0759 425.4285 2.9905 0.3172
+0 2002 213 5.00 0.583 11.2857 9.7896 25.3283 0.4540 674.5475 531.1949 831.0672 2.2154 0.3119
+0 2002 213 19.00 0.417 12.5620 8.6490 0.0000 0.0000 920.8152 572.3116 697.3809 2.4965 0.3183
+0 2002 214 5.00 0.583 14.7304 9.4411 28.6825 1.1000 1010.8287 498.5056 835.9373 3.2232 0.3119
+0 2002 214 19.00 0.417 11.6525 8.3885 0.0000 0.2540 645.4260 369.5345 879.5471 3.2320 0.3255
+0 2002 215 5.00 0.583 12.9143 8.8757 24.4188 2.0320 719.9203 356.8079 930.6650 2.1625 0.3214
+0 2002 215 19.00 0.417 9.8075 8.1190 0.0000 0.0000 317.7228 183.4520 1045.2100 2.0920 0.3311
+0 2002 216 5.00 0.583 13.3096 9.5239 27.3667 8.6000 645.5879 288.8524 1053.6306 2.3343 0.3512
+0 2002 216 19.00 0.417 10.6790 8.9445 0.0000 0.0000 298.9022 152.8876 1139.6967 1.9810 0.3804
+0 2002 217 5.00 0.583 12.8696 9.5043 19.5262 10.6880 424.8910 108.3187 1230.9102 2.2346 0.3959
+0 2002 217 19.00 0.417 10.1410 7.8435 0.0000 5.0780 416.2408 230.4587 977.6644 2.7735 0.4264
+0 2002 218 5.00 0.583 12.4200 8.1382 20.9248 6.1020 567.6609 190.4022 1041.6246 2.5371 0.4436
+0 2002 218 19.00 0.417 11.3575 8.0175 0.0000 0.0000 577.0450 298.8422 922.5808 3.4815 0.4760
+0 2002 219 5.00 0.583 12.2711 9.2236 28.7617 7.3600 547.1085 273.5989 1045.9355 3.6200 0.4743
+0 2002 219 19.00 0.417 10.1520 8.1405 0.0000 0.0000 500.7752 340.3230 891.1510 3.1510 0.4966
+0 2002 220 5.00 0.583 14.6079 9.9750 48.9996 0.0000 1095.4080 647.2229 738.8417 3.3304 0.4663
+0 2002 220 19.00 0.417 10.7680 8.2330 0.0000 0.0000 723.9127 514.5299 725.6964 2.6265 0.4736
+0 2002 221 5.00 0.583 14.4843 9.2764 54.7029 0.0000 1324.0017 828.9441 500.0575 3.4186 0.4386
+0 2002 221 19.00 0.417 9.4125 7.5990 0.0000 0.0000 677.9529 539.1859 654.0242 2.8845 0.4443
+0 2002 222 5.00 0.583 16.1146 9.1904 56.8089 0.0000 1570.0518 877.1841 450.0987 2.7025 0.4211
+0 2002 222 19.00 0.417 12.7315 8.2165 0.0000 0.0000 1245.7736 856.1729 383.0417 3.3750 0.4294
+0 2002 223 5.00 0.583 16.8796 9.3111 56.9419 0.0000 1788.8177 1020.9646 311.7444 4.4900 0.4067
+0 2002 223 19.00 0.417 10.8840 7.9140 0.0000 0.0000 1016.2255 734.5771 480.9839 2.7970 0.4196
+0 2002 224 5.00 0.583 6.7286 8.3921 31.8717 0.0000 160.3851 279.9703 974.2834 2.2500 0.4090
+0 2002 224 19.00 0.417 3.6255 6.8610 0.0000 0.0000 88.0058 292.3163 845.4783 2.0165 0.4177
+0 2002 225 5.00 0.583 8.8689 7.8539 48.3813 0.0000 643.3632 565.3253 649.4943 2.5646 0.4038
+0 2002 225 19.00 0.417 11.1770 7.3205 0.0000 0.0000 995.9125 684.4924 485.3378 4.8930 0.4040
+0 2002 226 5.00 0.583 15.5496 9.6179 55.5782 0.0000 1472.8663 875.0561 480.3268 3.7382 0.3801
+0 2002 226 19.00 0.417 14.7140 8.8225 0.0000 0.0000 1548.9440 992.3105 290.7173 7.1005 0.3800
+0 2002 227 5.00 0.583 15.7082 10.4514 53.7295 0.0000 1401.2880 850.6215 575.0756 2.8064 0.3685
+0 2002 227 19.00 0.417 14.9600 9.3955 0.0000 0.0000 1375.7747 838.0716 491.2718 4.1630 0.3732
+0 2002 228 5.00 0.583 19.6918 11.1968 48.4803 0.0000 2173.0559 1186.2313 303.9646 6.6461 0.3546
+0 2002 228 19.00 0.417 12.7555 9.6515 0.0000 0.0000 1271.1770 947.5548 404.1034 2.8550 0.3615
+0 2002 229 5.00 0.583 15.7600 10.4336 44.0829 0.0000 1529.7354 973.2042 450.3246 2.7175 0.3451
+0 2002 229 19.00 0.438 15.8233 9.2419 0.0000 0.0000 1680.1008 1032.6626 284.9168 2.2681 0.3491
+0 2002 230 5.50 0.562 18.1756 10.7737 39.4337 0.0000 1867.5237 1052.3457 401.2512 4.1481 0.3310
+0 2002 230 19.00 0.438 8.7186 8.9767 0.0000 0.0000 808.3364 826.3039 471.7761 2.3267 0.3479
+0 2002 231 5.50 0.562 15.9059 10.4670 39.4457 0.0000 1401.0399 837.2704 590.2761 2.7344 0.3275
+0 2002 231 19.00 0.438 13.3400 9.7229 0.0000 0.0000 1016.2823 681.6171 674.2803 4.9124 0.3386
+0 2002 232 5.50 0.562 12.9993 10.4193 28.7789 0.0000 802.3624 561.3056 854.1932 4.4974 0.3250
+0 2002 232 19.00 0.438 10.7210 9.2652 0.0000 0.0000 585.0530 460.9135 857.3828 4.4105 0.3372
+0 2002 233 5.50 0.542 11.7477 10.2285 35.0996 0.0000 757.1653 617.9726 781.3721 4.7350 0.3221
+0 2002 233 18.50 0.458 9.3332 8.5700 0.0000 0.0000 679.4975 619.6194 644.9700 2.6168 0.3340
+0 2002 234 5.50 0.542 12.3965 9.3477 30.3633 0.0000 921.2618 646.5187 681.6226 2.7712 0.3218
+0 2002 234 18.50 0.458 9.2468 7.5886 0.0000 1.0200 671.5497 545.3858 645.9051 3.2264 0.3385
+0 2002 235 5.50 0.542 13.7950 8.9838 33.8209 0.0000 1141.1829 699.0909 602.9491 2.2912 0.3242
+0 2002 235 18.50 0.458 11.1568 8.0277 0.0000 0.2540 903.8046 645.0788 577.7913 2.7800 0.3308
+0 2002 236 5.50 0.542 11.7773 9.1069 34.1201 0.0000 556.1246 323.8816 986.7868 2.3135 0.3193
+0 2002 236 18.50 0.458 9.3809 7.9609 0.0000 0.0000 506.1708 392.6653 825.9321 2.4732 0.3280
+0 2002 237 5.50 0.542 15.5712 9.3365 42.9818 0.0000 1495.2786 888.0435 446.0440 2.5781 0.3126
+0 2002 237 18.50 0.458 14.1350 8.3986 0.0000 0.0000 1455.3120 934.8087 316.7389 2.4200 0.3203
+0 2002 238 5.50 0.542 16.9312 9.8962 35.3155 0.0000 1753.6602 1029.9500 346.3936 2.6054 0.3028
+0 2002 238 18.50 0.458 9.4159 9.3791 0.0000 2.5400 407.0299 373.0832 954.6904 2.2305 0.3170
+0 2002 239 5.50 0.542 8.8388 9.5096 22.6948 3.3020 138.3787 185.5534 1153.6686 1.9362 0.3326
+0 2002 239 18.50 0.458 7.4986 7.3155 0.0000 0.0000 207.7467 194.6561 977.2392 2.2182 0.3462
+0 2002 240 5.50 0.542 11.8173 8.4581 30.6977 0.2540 573.8422 279.4156 981.5978 2.2746 0.3255
+0 2002 240 18.50 0.458 9.5209 7.5582 0.0000 0.0000 439.5255 284.1082 904.0403 2.7045 0.3397
+0 2002 241 5.50 0.542 11.1342 8.1919 25.1262 7.6260 669.2211 410.6929 825.4998 3.2438 0.3349
+0 2002 241 18.50 0.458 7.4232 6.3650 0.0000 0.0000 421.6118 347.5666 756.4111 2.3841 0.3545
+0 2002 242 5.50 0.542 13.4188 8.2462 46.7354 0.0000 1195.9130 732.1994 516.1710 4.1531 0.3308
+0 2002 242 18.50 0.458 11.1127 7.0886 0.0000 0.0000 925.5807 605.0206 550.1373 3.6486 0.3476
+0 2002 243 5.50 0.542 15.0888 8.8773 46.7033 0.0000 1443.2990 850.7937 443.2689 4.1723 0.3221
+0 2002 243 18.50 0.458 12.2468 8.0818 0.0000 0.0000 1101.5342 742.3408 485.0635 2.8636 0.3360
+0 2002 244 5.50 0.542 14.3923 8.5369 31.6312 0.0000 1452.1265 898.0319 367.2931 3.1058 0.3232
+0 2002 244 18.50 0.458 13.0809 8.1705 0.0000 0.0000 1216.4261 786.1225 447.2231 4.7845 0.3364
+0 2002 245 5.50 0.542 15.5304 9.7404 49.5373 0.0000 1445.4321 876.0126 488.1417 3.9588 0.3142
+0 2002 245 18.50 0.458 12.6177 8.1118 0.0000 0.0000 1089.4945 701.5454 527.6729 3.8786 0.3207
+0 2002 246 5.50 0.542 16.4277 9.5346 26.4328 0.0000 1532.2493 839.4847 503.9624 2.9731 0.3106
+0 2002 246 18.50 0.458 11.8650 8.7059 0.0000 0.0000 918.8944 646.9966 627.1742 5.5055 0.3232
+0 2002 247 5.50 0.542 13.8954 9.8358 36.5125 0.0000 1117.0894 730.5474 637.9646 5.5762 0.3083
+0 2002 247 18.50 0.458 12.1732 8.2182 0.0000 0.0000 1054.9839 718.1246 518.6531 5.2350 0.3201
+0 2002 248 5.50 0.542 16.4581 9.9015 38.7304 0.0000 1544.9290 873.2553 503.6223 3.2500 0.3031
+0 2002 248 18.50 0.458 13.6364 8.3718 0.0000 0.0000 1225.9153 755.9056 494.2758 3.2209 0.3211
+0 2002 249 5.50 0.542 16.4619 9.2362 25.6622 0.2540 1438.8782 716.0351 604.1066 2.9473 0.3064
+0 2002 249 18.50 0.458 13.1268 8.8773 0.0000 0.0000 940.1463 557.8483 729.6097 4.1614 0.3215
+0 2002 250 5.50 0.542 15.4931 10.2423 30.6733 0.5080 1100.5261 576.1958 826.0338 2.8450 0.3023
+0 2002 250 18.50 0.458 11.8336 9.2264 0.0000 0.0000 748.2412 518.4171 797.0519 3.3114 0.3238
+0 2002 251 5.50 0.542 11.9350 10.0292 25.8451 5.8480 579.8964 403.5368 979.4623 2.4319 0.3111
+0 2002 251 18.50 0.458 8.3277 8.8332 0.0000 9.6480 121.9219 160.8425 1123.0182 2.4668 0.3470
+0 2002 252 5.50 0.542 7.7838 8.7331 16.2949 0.0000 114.3540 185.1337 1091.1125 1.8758 0.3507
+0 2002 252 18.50 0.458 5.2427 7.3918 0.0000 30.4980 -1.8693 144.0462 1031.5166 1.7373 0.4275
+0 2002 253 5.50 0.542 4.6885 7.0423 9.5659 0.7620 10.8868 165.6314 984.3579 1.5927 0.4809
+0 2002 253 18.50 0.479 5.2243 6.5917 0.0000 0.0000 27.1826 116.4318 1001.8450 1.5417 0.4844
+0 2002 254 6.00 0.521 9.5728 8.1248 24.4507 6.6000 159.0356 44.0190 1187.8853 1.9076 0.4844
+0 2002 254 18.50 0.479 6.9722 7.3396 0.0000 0.5080 205.1922 231.4832 940.1403 2.7470 0.5367
+0 2002 255 6.00 0.521 8.4668 7.8492 28.9748 1.5280 333.8484 284.7882 925.8604 2.2044 0.5156
+0 2002 255 18.50 0.479 5.0526 6.4613 0.0000 0.0000 138.3567 231.5234 879.0414 1.8239 0.5171
+0 2002 256 6.00 0.521 6.8464 6.7440 21.9856 0.0000 347.5750 339.6320 790.1624 3.6968 0.5130
+0 2002 256 18.50 0.479 6.1835 5.6209 0.0000 0.0000 560.7537 523.1359 530.4931 4.4778 0.5112
+0 2002 257 6.00 0.500 8.6183 6.7858 38.3965 0.0000 597.1800 463.3492 672.2822 2.9887 0.4986
+0 2002 257 18.00 0.500 6.7083 4.9721 0.0000 0.0000 519.1165 405.4235 608.8713 3.1154 0.5048
+0 2002 258 6.00 0.500 13.0763 6.6054 45.2065 0.0000 1295.6477 754.3687 373.6070 2.6267 0.4778
+0 2002 258 18.00 0.500 9.7642 5.1592 0.0000 0.0000 965.9174 631.5786 394.5756 3.5717 0.4945
+0 2002 259 6.00 0.500 13.5833 6.8767 42.2615 0.0000 1236.8125 663.6110 482.8549 3.0775 0.4665
+0 2002 259 18.00 0.500 9.0300 6.0654 0.0000 0.0000 827.5237 604.8541 479.1978 5.8275 0.4761
+0 2002 260 6.00 0.500 7.7496 6.1158 36.9776 2.0280 630.4326 511.6806 575.2051 8.7733 0.4683
+0 2002 260 18.00 0.500 3.3054 4.6854 0.0000 2.7980 236.2966 316.8386 677.7717 9.0046 0.4853
+0 2002 261 6.00 0.500 2.8908 4.7221 19.7572 0.2540 234.8044 337.0952 660.1728 3.0312 0.4888
+0 2002 261 18.00 0.500 -0.5929 2.9725 0.0000 0.0000 69.4909 245.8330 648.1221 2.5900 0.5225
+0 2002 262 6.00 0.500 2.6204 3.0096 42.5779 0.2540 337.5731 353.1844 543.3906 5.8183 0.5262
+0 2002 262 18.00 0.500 4.2821 2.8229 0.0000 0.0000 443.1379 357.8636 527.6712 8.6550 0.5356
+0 2002 263 6.00 0.500 9.7096 4.7746 43.5839 0.0000 897.2534 536.0571 467.3502 5.4517 0.5176
+0 2002 263 18.00 0.500 7.1817 4.3700 0.0000 0.0000 678.8687 492.4741 482.7798 3.3983 0.5176
+0 2002 264 6.00 0.500 8.2413 5.3504 38.1328 0.0000 706.3201 486.3050 552.6058 6.2992 0.4975
+0 2002 264 18.00 0.500 0.2150 3.7963 0.0000 0.0000 215.1834 392.4689 549.6459 2.1796 0.5156
+0 2002 265 6.00 0.500 9.8275 4.3804 41.4385 0.0000 1174.5894 778.7377 200.4467 4.5292 0.4993
+0 2002 265 18.00 0.500 5.0154 3.5542 0.0000 0.0000 741.5816 632.4727 295.8116 2.1129 0.5080
+0 2002 266 6.00 0.500 5.5758 4.0517 41.4074 0.0000 553.5370 450.1639 509.8293 2.3742 0.4924
+0 2002 266 18.00 0.500 6.5258 3.1017 0.0000 0.0000 696.0208 480.2370 421.5585 3.5429 0.5028
+0 2002 267 6.00 0.500 11.6713 5.4113 39.7148 0.2000 1197.1753 708.2550 335.2618 5.9558 0.4663
+0 2002 267 18.00 0.500 8.9871 4.3717 0.0000 0.4000 961.7845 640.8470 335.3883 2.9242 0.4849
+0 2002 268 6.00 0.500 8.7738 5.4350 28.6218 2.1000 741.3442 485.0557 558.6800 2.3829 0.4638
+0 2002 268 18.00 0.500 2.1758 4.2858 0.0000 4.2000 71.6862 190.7421 779.4255 1.7337 0.4917
+0 2002 269 6.00 0.500 4.2371 4.5733 30.6061 0.2000 328.5721 344.0950 644.1801 4.0696 0.4966
+0 2002 269 18.00 0.500 5.2013 3.6988 0.0000 0.0000 530.0206 439.3087 496.2811 5.1242 0.4922
+0 2002 270 6.00 0.500 8.1904 4.8212 32.9087 0.2540 718.3970 482.0905 523.4684 3.8883 0.4775
+0 2002 270 18.00 0.500 5.0413 3.6196 0.0000 0.0000 450.8671 364.8929 566.0474 2.1554 0.4902
+0 2002 271 6.00 0.500 6.4667 4.6267 22.8897 0.0000 539.3275 418.4388 573.6071 2.2458 0.4833
+0 2002 271 18.00 0.500 4.1154 3.4137 0.0000 0.0000 395.8225 354.9957 564.4787 3.4717 0.4805
+0 2002 272 6.00 0.500 5.8725 4.1271 35.8434 0.2540 477.6611 363.2095 598.8448 7.8871 0.4676
+0 2002 272 18.00 0.500 2.5450 3.0192 0.0000 0.5080 339.9309 365.0197 532.0779 13.4296 0.4725
+0 2002 273 6.00 0.500 6.0950 3.5525 39.1034 0.0000 603.6323 440.5061 488.1968 6.2500 0.4637
+0 2002 273 18.00 0.500 4.5004 2.5246 0.0000 0.2000 519.5391 403.1586 467.2983 2.7662 0.4810
+0 2002 274 6.00 0.500 1.1046 2.3621 15.1346 8.6000 100.2906 164.8396 696.3575 1.9275 0.4736
+0 2002 274 18.00 0.500 -2.3408 1.9471 0.0000 6.3000 4.9882 199.7630 638.8769 0.5300 0.4768
+0 2002 275 6.00 0.500 -1.2517 1.6883 23.0810 0.8000 3.3113 138.9627 686.2783 1.3617 0.4885
+0 2002 275 18.00 0.500 -0.9954 1.5075 0.0000 0.0000 16.9045 133.0751 682.9454 2.0575 0.5004
+0 2002 276 6.00 0.500 -0.2138 1.4662 24.9511 0.0000 188.9057 263.8401 550.0917 6.4329 0.4965
+0 2002 276 18.00 0.521 -3.2256 1.3188 0.0000 0.0000 128.0084 326.1823 480.3317 7.0000 0.5157
+0 2002 277 6.50 0.479 -0.3126 1.2826 20.1677 0.0000 245.4078 319.5117 485.1884 3.0509 0.5249
+0 2002 277 18.00 0.521 -0.0332 1.2980 0.0000 0.0000 172.1390 235.9834 569.4854 2.3940 0.5323
+0 2002 278 6.50 0.479 2.0661 1.3096 25.0699 0.2540 303.4555 262.5877 543.4617 6.5330 0.5423
+0 2002 278 18.00 0.521 1.3400 1.2100 0.0000 0.0000 278.3857 271.5945 529.4847 4.6924 0.5479
+0 2002 279 6.50 0.479 3.5022 1.2870 16.0805 0.0000 392.6969 268.9167 536.0089 2.7074 0.5552
+0 2002 279 18.00 0.521 4.3276 1.1688 0.0000 0.0000 517.5903 344.0218 455.0127 5.8044 0.5530
+0 2002 280 6.50 0.458 7.7664 2.0127 37.1934 0.0000 724.1251 361.5719 481.2625 6.4305 0.5909
+0 2002 280 17.50 0.542 4.9050 1.6931 0.0000 0.0000 535.8273 352.6024 473.0417 4.8708 0.6224
+0 2002 281 6.50 0.458 6.2105 2.9518 28.3970 0.0000 607.4487 403.4693 491.4675 2.4159 0.6211
+0 2002 281 17.50 0.542 2.4496 2.0688 0.0000 0.0000 293.9106 269.1338 576.5485 2.7381 0.6419
+0 2002 282 6.50 0.458 7.9259 2.8686 35.9093 0.0000 856.1473 529.3696 361.0609 3.1391 0.6253
+0 2002 282 17.50 0.542 5.1142 2.2496 0.0000 0.0000 677.2182 510.8408 344.3101 5.3085 0.6206
+0 2002 283 6.50 0.458 8.5109 3.3014 34.5926 0.0000 957.2106 606.1555 308.5991 3.4555 0.6014
+0 2002 283 17.50 0.542 7.1500 2.9142 0.0000 0.0000 846.0073 579.2874 311.5952 6.7131 0.6039
+0 2002 284 6.50 0.458 9.0650 3.9577 31.1261 0.0000 930.3561 575.5397 376.4693 6.1355 0.5847
+0 2002 284 17.50 0.542 1.3319 2.6173 0.0000 0.2540 487.7664 544.7653 331.0137 9.6485 0.5930
+0 2002 285 6.50 0.458 3.7473 1.3973 35.5444 0.0000 835.0410 692.2308 118.3699 3.3914 0.5853
+0 2002 285 17.50 0.542 -0.0342 0.7965 0.0000 0.0000 636.8556 674.5974 106.3035 2.2565 0.5946
+0 2002 286 6.50 0.458 7.2995 0.5268 34.8370 0.0000 1078.0944 660.7924 107.0450 2.7200 0.5721
+0 2002 286 17.50 0.542 3.9323 0.5885 0.0000 0.0000 823.7108 644.9587 125.7760 9.2650 0.5743
+0 2002 287 6.50 0.458 5.4923 1.3736 34.5069 0.0000 789.4868 548.6015 261.4046 2.9591 0.5555
+0 2002 287 17.50 0.542 -1.8831 0.6323 0.0000 0.0000 414.2585 521.8203 251.1621 2.4481 0.5776
+0 2002 288 6.50 0.458 5.8432 0.6155 33.5110 0.0000 884.5494 585.8234 186.3250 3.4991 0.5445
+0 2002 288 17.50 0.542 1.7992 0.7465 0.0000 0.0000 583.3041 518.9767 259.3878 6.2062 0.5607
+0 2002 289 6.50 0.458 0.6832 0.9568 32.3347 0.0000 411.4647 419.8510 368.9302 2.9873 0.5520
+0 2002 289 17.50 0.542 -0.3304 0.5369 0.0000 0.0000 412.1895 444.1826 324.1110 3.3235 0.5645
+0 2002 290 6.50 0.458 6.8027 0.8900 33.5513 0.0000 883.0361 525.3074 260.4376 4.0784 0.5368
+0 2002 290 17.50 0.542 2.5104 0.5338 0.0000 0.0000 615.9027 514.2963 253.9690 3.8123 0.5509
+0 2002 291 6.50 0.458 5.9309 0.4664 32.8050 0.0000 837.0261 526.4144 240.2186 3.5945 0.5340
+0 2002 291 17.50 0.542 -0.1050 0.4538 0.0000 0.0000 483.6559 507.6887 256.9884 2.3354 0.5578
+0 2002 292 6.50 0.458 5.3005 0.5491 32.6423 0.0000 805.8591 538.3634 230.5935 2.7236 0.5288
+0 2002 292 17.50 0.542 2.5254 0.3742 0.0000 0.0000 672.4994 563.5649 196.9887 4.0192 0.5520
+0 2002 293 6.50 0.458 4.7691 0.6427 19.6877 0.0000 815.8365 588.3278 185.1288 5.2118 0.5340
+0 2002 293 17.50 0.542 1.3215 0.3204 0.0000 0.0000 634.9377 584.8992 173.1754 2.3558 0.5498
+0 2002 294 6.50 0.458 3.8705 0.2168 29.0250 0.0000 647.9599 451.8329 301.3479 2.9164 0.5286
+0 2002 294 17.50 0.542 0.4573 0.1088 0.0000 0.0000 452.2906 435.5972 312.5337 2.9504 0.5367
+0 2002 295 6.50 0.458 -2.4445 0.1259 21.4148 0.0000 173.8275 271.0034 477.9149 2.4305 0.5211
+0 2002 295 17.50 0.542 -6.8500 -0.1338 0.0000 0.2540 21.0387 265.8521 471.0811 1.5385 0.5250
+0 2002 296 6.50 0.458 0.0991 0.0205 25.1253 1.0160 207.9408 200.9807 543.0488 2.4609 0.5178
+0 2002 296 17.50 0.542 -2.8946 0.1227 0.0000 2.7940 64.6278 192.4416 556.3074 1.7392 0.5221
+0 2002 297 6.50 0.458 -1.1736 0.1673 20.4898 0.2540 238.6616 297.6252 453.2023 3.2282 0.5215
+0 2002 297 17.50 0.542 -3.8731 0.1177 0.0000 1.7780 150.6957 314.2268 434.2910 6.8077 0.5218
+0 2002 298 6.50 0.458 -1.7255 0.0723 28.3693 0.0000 274.3815 350.0085 396.4023 6.2836 0.5191
+0 2002 298 17.50 0.542 -2.6046 0.0142 0.0000 0.0000 207.6957 318.7715 424.9642 2.3500 0.5170
+0 2002 299 6.50 0.458 1.0964 -0.0355 25.1057 0.0000 431.3172 373.8938 367.5533 2.1132 0.5101
+0 2002 299 17.50 0.562 -2.1556 0.0993 0.0000 0.5080 105.3536 202.1991 545.4606 1.1837 0.5092
+0 2002 300 7.00 0.438 -1.4700 0.1352 18.1285 0.0000 94.5197 163.5629 585.7715 1.6490 0.5070
+0 2002 300 17.50 0.562 -2.6459 0.0630 0.0000 0.0000 180.4749 295.8703 450.1171 4.0215 0.5114
+0 2002 301 7.00 0.438 -0.9376 -0.0052 23.7740 0.0000 337.6657 378.6467 364.1855 4.3776 0.5023
+0 2002 301 17.50 0.562 -5.2541 -0.0081 0.0000 5.3380 60.8399 262.6996 479.9912 1.8307 0.5058
+0 2002 302 7.00 0.438 -10.4405 -0.0838 8.6126 5.5880 29.7597 376.3906 362.8132 2.2824 0.4936
+0 2002 302 17.50 0.562 -15.8885 -0.0963 0.0000 0.2540 36.2459 489.3853 249.2462 1.3544 0.5013
+0 2002 303 7.00 0.438 -6.8414 -0.1614 19.9012 0.2540 96.6078 341.9818 393.6671 3.2995 0.4924
+0 2002 303 17.50 0.562 -6.3537 -0.0537 0.0000 4.0640 41.0330 279.1579 461.4303 1.5674 0.4920
+0 2002 304 7.00 0.417 -3.2385 0.0005 17.6530 0.7620 174.7629 308.9121 434.1742 3.4285 0.4940
+0 2002 304 17.00 0.583 -9.7893 0.0543 0.0000 9.9140 29.1683 361.8656 383.7094 0.9796 0.4927
+0 2002 305 7.00 0.417 -2.8846 0.0593 19.4261 1.0160 87.7272 206.6167 539.1865 1.7310 0.4938
+0 2002 305 17.00 0.583 -1.9653 0.0935 0.0000 0.0000 200.2642 289.4918 457.9020 2.5054 0.4932
+0 2002 306 7.00 0.417 -1.4121 0.1059 19.9403 0.0000 162.9361 226.2657 521.7000 2.4075 0.4964
+0 2002 306 17.00 0.583 -10.7757 0.1336 0.0000 1.5240 55.8446 418.8648 330.3902 2.8358 0.4980
+0 2002 307 7.00 0.417 -5.8575 0.1168 22.8764 0.0000 293.8287 510.1826 238.2922 2.9711 0.4974
+0 2002 307 17.00 0.583 -2.6771 0.1004 0.0000 0.0000 436.9655 554.2728 193.4379 2.6396 0.4971
+0 2002 308 7.00 0.417 -2.2170 0.0569 21.8205 0.0000 341.8815 438.8109 306.8811 9.0765 0.4970
+0 2002 308 17.00 0.583 -8.0746 0.0786 0.0000 0.0000 158.0610 450.5063 296.1945 7.3918 0.4953
+0 2002 309 7.00 0.417 -4.5145 0.0401 27.7605 0.0000 358.2216 532.3304 212.5860 10.9105 0.4966
+0 2002 309 17.00 0.583 -2.4732 0.0383 0.0000 0.0000 439.7568 545.4656 199.3685 9.0854 0.4976
+0 2002 310 7.00 0.417 2.5105 0.0380 26.9029 0.0000 715.1386 588.4642 156.3529 10.0335 0.4962
+0 2002 310 17.00 0.583 2.0512 0.0806 0.0000 0.0000 704.8506 605.6868 141.1068 3.6436 0.4976
+0 2002 311 7.00 0.417 4.1495 0.0790 20.3595 0.0000 839.2750 622.7170 124.0045 6.5935 0.4960
+0 2002 311 17.00 0.583 -0.4055 0.1141 0.0000 0.0000 411.5738 431.3688 316.9807 6.0229 0.5008
+0 2002 312 7.00 0.417 -0.3664 0.1146 18.1954 2.2860 258.5309 279.2858 469.0844 12.3945 0.5019
+0 2002 312 17.00 0.583 -1.0674 0.1511 0.0000 6.6040 146.0217 200.1903 549.8817 4.5343 0.4992
+0 2002 313 7.00 0.417 -3.8760 0.1746 7.0686 9.4020 82.7970 247.4762 503.6913 4.2565 0.5006
+0 2002 313 17.00 0.583 -7.9114 0.1985 0.0000 3.3020 105.6379 401.0740 351.2114 8.2046 0.5018
+0 2002 314 7.00 0.417 -7.4425 0.2066 20.1363 0.2540 137.2094 420.4104 332.2520 11.6175 0.5030
+0 2002 314 17.00 0.583 -9.1243 0.2065 0.0000 0.2540 114.7409 443.7671 308.8889 6.3918 0.5056
+0 2002 315 7.00 0.417 -8.1430 0.1963 14.1562 0.0000 168.3109 470.5558 281.6251 5.4795 0.5062
+0 2002 315 17.00 0.583 -9.4118 0.1819 0.0000 0.0000 197.8646 532.3805 219.1291 8.3146 0.5021
+0 2002 316 7.00 0.417 -4.0710 0.1494 25.2452 0.0000 418.6085 590.1109 159.8815 10.5750 0.5038
+0 2002 316 17.00 0.583 -0.2484 0.1303 0.0000 0.0000 403.5735 417.4445 331.6577 7.6329 0.5056
+0 2002 317 7.00 0.417 1.2457 0.1462 18.6960 0.0000 478.0461 419.3730 330.4702 4.0945 0.5011
+0 2002 317 17.00 0.583 -5.4404 0.1944 0.0000 0.0000 127.7975 347.8568 404.2353 4.0186 0.5032
+0 2002 318 7.00 0.417 -6.0575 0.2114 16.6469 0.0000 210.9340 452.8671 300.0221 4.3290 0.5023
+0 2002 318 17.00 0.583 -8.0100 0.1999 0.0000 0.0000 111.5224 410.3191 342.0298 2.2181 0.5035
+0 2002 319 7.00 0.417 -7.1500 0.1695 16.6587 0.0000 132.8225 405.3601 345.5695 3.3850 0.5032
+0 2002 319 17.00 0.583 -8.1343 0.1386 0.0000 0.0000 210.3668 508.3667 241.1231 9.9161 0.5055
+0 2002 320 7.00 0.417 -3.2025 0.0817 20.3136 0.0000 439.2866 575.0264 171.8180 12.1120 0.5049
+0 2002 320 17.00 0.583 -1.0708 0.0629 0.0000 0.0000 541.7029 591.4385 154.5352 13.3789 0.5026
+0 2002 321 7.00 0.417 1.5515 0.0614 21.6747 0.0000 693.8070 618.2911 127.6117 10.5790 0.5011
+0 2002 321 17.00 0.583 -5.0001 0.0894 0.0000 0.0000 248.9916 444.2171 302.9826 9.5111 0.5000
+0 2002 322 7.00 0.417 -4.7370 0.0590 23.4183 0.0000 337.6857 525.5807 220.2112 10.5820 0.5011
+0 2002 322 17.00 0.583 -5.3050 0.0189 0.0000 0.0000 313.2550 522.0236 221.9138 8.4332 0.4985
+0 2002 323 7.00 0.417 -2.9855 -0.0233 22.6344 0.0000 362.3317 485.9342 256.0538 10.5715 0.4979
+0 2002 323 17.00 0.583 -1.7954 -0.0263 0.0000 0.0000 294.0209 370.1985 371.6501 9.9839 0.4967
+0 2002 324 7.00 0.417 2.8889 -0.0098 22.8984 0.0000 559.1323 409.8963 332.7166 4.7205 0.4949
+0 2002 324 17.00 0.583 1.9543 0.0569 0.0000 0.0000 430.9848 334.2342 411.4603 6.7539 0.4909
+0 2002 325 7.00 0.417 3.8145 0.0956 22.7044 0.0000 543.0141 343.1695 404.3200 3.4040 0.4923
+0 2002 325 17.00 0.583 3.5611 0.0934 0.0000 0.0000 719.1020 537.4216 209.9637 5.3446 0.4953
+0 2002 326 7.00 0.417 5.5920 0.0938 13.9305 0.0000 788.9291 484.2794 263.1268 7.0085 0.4972
+0 2002 326 17.00 0.583 -0.3132 0.1399 0.0000 0.0000 354.9768 366.8728 382.6785 2.4725 0.4965
+0 2002 327 7.00 0.417 3.5691 0.1263 18.7155 0.0000 712.2744 529.6242 219.2926 4.6285 0.4945
+0 2002 327 17.00 0.583 -4.2371 0.1234 0.0000 2.7940 248.0185 396.9117 351.8697 4.0136 0.4953
+0 2002 328 7.00 0.417 -9.5685 0.0971 12.0356 1.2700 65.1560 395.8211 351.7405 2.8525 0.4989
+0 2002 328 17.00 0.604 -12.7510 0.0576 0.0000 1.5240 48.0806 450.9479 294.7771 1.3107 0.4971
+0 2002 329 7.50 0.396 -7.7868 0.0062 21.2082 0.2540 205.5856 489.2390 254.1108 5.1347 0.4945
+0 2002 329 17.00 0.604 -9.9079 -0.0417 0.0000 0.0000 262.7642 599.8319 141.3089 8.1407 0.4926
+0 2002 330 7.50 0.396 -7.3105 -0.0981 21.1910 0.0000 358.9576 623.6370 114.9132 4.8342 0.4916
+0 2002 330 17.00 0.604 -8.6179 -0.1476 0.0000 0.0000 304.8236 604.0864 132.1945 3.6517 0.4876
+0 2002 331 7.50 0.396 -6.4832 -0.2078 16.1280 0.0000 282.3090 517.8319 215.6976 5.4821 0.4856
+0 2002 331 17.00 0.604 -3.4497 -0.2347 0.0000 0.0000 327.1563 454.9916 277.3152 9.1072 0.4848
+0 2002 332 7.50 0.396 4.1608 -0.2615 21.1762 0.0000 604.0184 367.2028 363.8845 3.6716 0.4822
+0 2002 332 17.00 0.604 2.0129 -0.1970 0.0000 0.0000 510.6017 401.1915 332.8316 6.1700 0.4806
+0 2002 333 7.50 0.396 2.8594 -0.1460 20.4351 0.0000 554.8566 400.2807 336.0724 5.8863 0.4770
+0 2002 333 17.00 0.604 -0.4226 -0.1140 0.0000 0.0000 389.3076 402.2877 335.5323 3.8866 0.4792
+0 2002 334 7.50 0.375 0.3248 -0.1148 20.5710 0.0000 356.6298 333.6644 404.1183 2.7235 0.4785
+0 2002 334 16.50 0.625 0.2186 -0.0990 0.0000 0.0000 461.7571 445.9820 292.5244 4.5373 0.4776
+0 2002 335 7.50 0.375 -1.0124 -0.1498 19.9120 0.0000 281.5187 317.8672 418.3413 8.0439 0.4757
+0 2002 335 16.50 0.625 -3.8403 -0.1812 0.0000 0.0000 168.2920 317.3971 417.3489 9.3817 0.4742
+0 2002 336 7.50 0.375 -0.5269 -0.2089 20.4926 0.0000 394.6230 406.0184 327.4635 3.7811 0.4771
+0 2002 336 16.50 0.625 -3.9993 -0.1731 0.0000 0.0000 266.8810 421.1893 313.9276 2.2476 0.4730
+0 2002 337 7.50 0.375 -3.4628 -0.2656 13.5494 0.0000 172.2214 302.8336 428.0714 1.5091 0.4775
+0 2002 337 16.50 0.625 -5.7733 -0.4353 0.0000 0.0000 102.9093 305.8353 417.4102 2.2300 0.4702
+0 2002 338 7.50 0.375 -4.2400 -0.6971 17.3499 0.0000 203.1515 342.4620 369.1701 3.0998 0.4695
+0 2002 338 16.50 0.625 -6.4583 -0.6553 0.0000 0.5080 99.7332 314.5581 398.8712 4.8772 0.4654
+0 2002 339 7.50 0.375 -4.9211 -0.7799 19.6201 0.0000 204.5297 364.1611 343.7800 4.9883 0.4655
+0 2002 339 16.50 0.625 -6.5543 -0.7221 0.0000 0.0000 256.6924 471.7001 238.7820 7.2517 0.4580
+0 2002 340 7.50 0.375 -4.2206 -0.5693 20.1491 0.0000 329.6394 473.3387 243.9293 8.6733 0.4563
+0 2002 340 16.50 0.625 -4.9237 -0.4310 0.0000 0.0000 368.4185 543.2281 180.2035 2.9053 0.4528
+0 2002 341 7.50 0.375 -2.6139 -0.5012 15.3370 0.0000 440.6389 527.5674 192.7142 2.5842 0.4492
+0 2002 341 16.50 0.625 -7.3837 -0.5207 0.0000 0.0000 214.1104 460.1278 259.2809 2.1413 0.4416
+0 2002 342 7.50 0.375 -5.0289 -0.6071 17.0082 0.0000 237.3766 402.7181 312.8438 1.8950 0.4388
+0 2002 342 16.50 0.625 -2.3753 -0.6310 0.0000 0.0000 485.2133 558.2506 156.2454 7.3693 0.4299
+0 2002 343 7.50 0.375 0.0238 -0.7053 19.6856 0.0000 627.9618 593.8924 117.3253 7.4000 0.4293
+0 2002 343 16.50 0.625 -2.5017 -0.6171 0.0000 0.0000 529.0546 607.0526 108.0841 5.6620 0.4251
+0 2002 344 7.50 0.375 -0.2258 -0.7998 19.2437 0.0000 642.9225 615.6550 91.4110 4.7039 0.4243
+0 2002 344 16.50 0.625 -5.0290 -0.6439 0.0000 0.0000 377.6026 543.5605 170.3920 5.4547 0.4201
+0 2002 345 7.50 0.375 -6.7644 -0.8856 18.1101 0.0000 255.6857 469.1623 234.1522 7.1517 0.4196
+0 2002 345 16.50 0.625 -10.4193 -1.0350 0.0000 0.0000 143.2743 447.6278 249.2423 6.8180 0.4155
+0 2002 346 7.50 0.375 -8.1644 -1.3506 15.1780 0.0000 178.3934 412.4668 270.8273 8.4744 0.4132
+0 2002 346 16.50 0.625 -5.7883 -1.2827 0.0000 0.0000 171.9838 338.1111 348.0543 10.4140 0.4115
+0 2002 347 7.50 0.375 -2.2683 -1.1941 19.3438 0.0000 357.9222 401.3133 288.6693 10.1628 0.4080
+0 2002 347 16.50 0.625 -3.2017 -1.0874 0.0000 0.0000 334.9808 420.0913 274.4626 11.1463 0.4038
+0 2002 348 7.50 0.375 1.0156 -1.1658 17.4039 0.0000 565.4529 462.0648 229.1307 5.3211 0.4009
+0 2002 348 16.50 0.625 1.6804 -0.8809 0.0000 0.0000 501.6560 379.4360 324.0607 11.6303 0.3987
+0 2002 349 7.50 0.375 0.7311 -0.9087 11.2959 0.0000 510.5970 434.6261 267.6554 7.0072 0.3927
+0 2002 349 16.50 0.625 -3.7813 -1.1918 0.0000 0.0000 375.1798 475.5343 214.5944 10.5953 0.3950
+0 2002 350 7.50 0.375 -0.8039 -1.3398 19.2777 0.0000 477.4169 453.3384 230.5110 6.7283 0.3955
+0 2002 350 16.50 0.625 -4.3323 -0.9150 0.0000 0.0000 231.3306 363.3971 338.6465 4.2873 0.3956
+0 2002 351 7.50 0.375 -5.4661 -1.0361 15.2459 0.7620 172.4015 339.0781 357.7372 5.3507 0.3931
+0 2002 351 16.50 0.625 -9.6873 -0.7272 0.0000 1.0160 108.5065 408.3307 301.9117 5.3970 0.3915
+0 2002 352 7.50 0.375 -10.0689 -0.8608 7.5293 0.0000 156.3603 460.7798 243.5949 3.9239 0.3915
+0 2002 352 16.50 0.625 -12.7247 -0.9609 0.0000 0.0000 116.4995 474.2845 225.7283 2.9847 0.3896
+0 2002 353 7.50 0.375 -14.9356 -1.1450 12.9781 0.0000 119.2374 511.6880 180.3611 11.7356 0.3879
+0 2002 353 16.50 0.625 -12.9130 -1.2947 0.0000 0.0000 136.5858 482.9087 202.7409 13.4937 0.3848
+0 2002 354 7.50 0.375 -6.8394 -1.3278 18.6432 0.0000 344.4477 541.0592 143.1796 13.2611 0.3813
+0 2002 354 16.50 0.625 -10.3763 -1.3183 0.0000 0.2540 173.7607 463.2759 221.3629 3.5668 0.3783
+0 2002 355 7.50 0.375 -11.9600 -1.3511 16.4076 0.0000 119.9389 445.6568 237.5902 9.1300 0.3745
+0 2002 355 16.50 0.625 -11.1430 -1.3987 0.0000 0.0000 134.4929 440.8742 240.3586 4.7737 0.3722
+0 2002 356 7.50 0.375 -9.4056 -1.4428 13.8774 0.0000 191.6245 452.7912 226.5781 2.9562 0.3683
+0 2002 356 16.50 0.625 -13.0350 -1.4350 0.0000 0.0000 45.7116 389.9504 289.7478 1.2988 0.3689
+0 2002 357 7.50 0.375 -12.7889 -1.4872 7.3017 1.0160 47.1873 384.6810 292.8166 2.4211 0.3655
+0 2002 357 16.50 0.625 -13.7833 -1.5070 0.0000 0.0000 73.4066 429.3611 247.3048 2.1683 0.3654
+0 2002 358 7.50 0.375 -10.7756 -1.5483 14.6236 0.0000 120.2853 411.7357 263.1964 3.2983 0.3650
+0 2002 358 16.50 0.625 -13.0373 -1.5510 0.0000 0.2540 103.2656 442.8115 232.0087 7.1827 0.3620
+0 2002 359 7.50 0.375 -12.8067 -1.5511 15.2142 0.0000 174.6502 509.6292 165.1862 8.1550 0.3615
+0 2002 359 16.50 0.625 -14.4447 -1.5750 0.0000 0.0000 155.2405 520.5618 153.2538 7.1810 0.3604
+0 2002 360 7.50 0.375 -13.0044 -1.6578 17.2939 0.0000 146.7805 481.2922 189.0706 13.2322 0.3610
+0 2002 360 16.50 0.625 -10.4943 -1.7243 0.0000 0.0000 140.3418 417.3047 250.2933 15.4263 0.3617
+0 2002 361 7.50 0.375 -5.6283 -1.7422 16.0349 0.0000 206.1801 347.1329 319.7242 14.3178 0.3615
+0 2002 361 16.50 0.625 -2.3837 -1.6440 0.0000 0.0000 294.8505 323.9789 346.9595 11.8113 0.3609
+0 2002 362 7.50 0.375 1.8877 -1.4978 18.3519 0.0000 536.8354 371.2534 305.8013 2.8767 0.3572
+0 2002 362 16.50 0.625 1.2474 -1.3080 0.0000 0.0000 611.5112 491.5946 193.4890 3.8907 0.3567
+0 2002 363 7.50 0.375 -1.5159 -1.1822 18.3376 0.0000 465.7622 479.1054 211.3448 5.7117 0.3574
+0 2002 363 16.50 0.625 -7.6267 -1.1517 0.0000 0.0000 224.0664 445.8390 245.9225 10.5063 0.3583
+0 2002 364 7.50 0.375 -7.2139 -1.2683 19.6608 0.0000 348.8863 556.8681 129.9024 8.6683 0.3605
+0 2002 364 16.50 0.625 -4.2660 -0.6950 0.0000 0.0000 355.4550 496.6952 215.6362 7.6273 0.3611
+0 2002 365 7.50 0.375 -2.1973 0.0000 16.4903 0.0000 248.5170 341.7769 401.2858 7.0606 0.3608
+0 2002 365 16.50 0.625 -7.0677 -0.3453 0.0000 0.0000 148.1294 392.5578 334.9022 6.1173 0.3576
+0 2003 1 7.50 0.375 -6.8650 -0.7079 10.7154 0.0000 189.7449 414.3724 296.7209 3.0522 0.3612
+0 2003 1 16.50 0.625 -8.5663 -0.7522 0.0000 0.2540 246.5784 515.8517 193.2922 10.4977 0.3613
+0 2003 2 7.50 0.375 -4.2100 -0.8304 18.2149 0.0000 315.5375 447.3175 258.3860 12.1900 0.3605
+0 2003 2 16.50 0.625 -3.3947 -0.8100 0.0000 0.0000 184.9361 289.0935 417.5041 9.6553 0.3570
+0 2003 3 7.50 0.375 -1.3816 -0.7684 9.1838 0.0000 191.4762 217.4236 491.0020 15.3550 0.3556
+0 2003 3 16.50 0.625 -1.8317 -1.6881 0.0000 0.0000 219.0816 224.9344 445.0196 16.3070 0.3567
+0 2003 4 7.50 0.375 -2.1850 -0.9519 11.4683 0.0000 195.6780 246.7962 453.9096 12.8528 0.3563
+0 2003 4 16.50 0.625 -2.3883 -1.3311 0.0000 0.2540 193.4198 236.7042 447.6631 10.4837 0.3563
+0 2003 5 7.50 0.375 -2.9511 -0.7061 8.7759 0.0000 197.9473 288.9666 422.8474 3.9717 0.3570
+0 2003 5 16.50 0.625 -5.9933 -0.6755 0.0000 4.0640 21.8917 221.4011 491.6457 2.0606 0.3565
+0 2003 6 7.50 0.375 -2.6081 -0.5618 20.1917 0.0000 167.1430 250.0695 467.6169 2.0422 0.3584
+0 2003 6 16.50 0.625 1.4670 -0.5045 0.0000 0.0000 657.0193 560.9535 159.1815 3.6887 0.3594
+0 2003 7 7.50 0.375 6.9978 -0.5529 19.8507 0.0000 950.5446 513.7891 204.1843 2.0248 0.3603
+0 2003 7 16.50 0.625 5.5113 -0.5609 0.0000 0.0000 862.4895 533.9599 183.6557 5.8820 0.3583
+0 2003 8 7.50 0.375 6.6250 -0.5243 20.4546 0.0000 970.3724 567.7934 151.4567 6.0961 0.3570
+0 2003 8 16.50 0.625 -2.5748 -0.4128 0.0000 0.0000 453.5903 524.9047 199.3478 2.1121 0.3579
+0 2003 9 7.50 0.375 -8.5872 -0.4621 19.4671 0.0000 180.6584 450.4534 271.5853 3.3161 0.3608
+0 2003 9 16.50 0.625 -7.3607 -0.8854 0.0000 0.0000 184.6973 409.2991 294.0253 3.1527 0.3584
+0 2003 10 7.50 0.375 -4.3167 -1.1117 7.0011 0.2540 173.4580 297.8451 395.6377 5.0872 0.3553
+0 2003 10 16.50 0.625 -5.3863 -1.0189 0.0000 0.7620 144.2074 309.2071 388.2851 5.1183 0.3563
+0 2003 11 7.50 0.375 -5.3483 -0.9616 14.6358 0.0000 127.9510 293.5231 406.4518 1.7372 0.3556
+0 2003 11 16.50 0.625 -7.1347 -0.9295 0.0000 0.0000 146.3324 368.9983 332.3783 7.3211 0.3529
+0 2003 12 7.50 0.375 -3.4830 -1.0911 21.3380 0.0000 414.3661 505.4085 188.9594 7.8356 0.3534
+0 2003 12 16.50 0.625 -3.9647 -1.1243 0.0000 0.0000 257.4434 368.6808 324.2557 13.2440 0.3523
+0 2003 13 7.50 0.375 -2.0677 -1.1294 17.4297 0.0000 290.8976 327.5989 365.1179 15.3711 0.3530
+0 2003 13 16.50 0.625 -2.3020 -1.0030 0.0000 0.0000 250.6249 304.2775 393.9015 5.6167 0.3516
+0 2003 14 7.50 0.396 0.2183 -0.9479 19.4425 0.0000 360.9214 306.7043 393.8694 11.9253 0.3498
+0 2003 14 17.00 0.604 -0.6867 -0.8393 0.0000 0.0000 447.7422 439.7708 265.5409 7.8328 0.3513
+0 2003 15 7.50 0.396 -6.6368 -0.9129 7.7444 2.0320 138.4920 343.8005 358.2954 7.2132 0.3512
+0 2003 15 17.00 0.604 -12.3634 -0.9343 0.0000 0.7620 105.9128 458.1137 243.0476 6.3052 0.3522
+0 2003 16 7.50 0.396 -9.9737 -0.9899 17.9744 0.0000 208.1470 503.2336 195.5130 7.2895 0.3532
+0 2003 16 17.00 0.604 -10.2290 -1.0807 0.0000 1.0160 118.1861 416.7559 278.0626 3.9969 0.3514
+0 2003 17 7.50 0.396 -9.3989 -1.1611 19.0091 0.0000 157.4358 431.9492 259.4092 5.8311 0.3485
+0 2003 17 17.00 0.604 -9.9031 -1.2041 0.0000 0.0000 216.0551 501.5377 187.9734 10.0534 0.3483
+0 2003 18 7.50 0.396 -7.0232 -1.2521 21.5667 0.0000 270.6990 474.8132 212.6472 14.4532 0.3467
+0 2003 18 17.00 0.604 -4.8741 -1.2934 0.0000 0.0000 274.7365 410.3802 275.3174 13.8817 0.3483
+0 2003 19 7.50 0.396 -0.6277 -1.2979 22.1298 0.0000 523.3104 490.7881 194.7202 10.9042 0.3478
+0 2003 19 17.00 0.583 0.8291 -1.2661 0.0000 0.0000 665.5106 569.0911 117.7736 12.0629 0.3471
+0 2003 20 7.00 0.417 -0.0682 -1.2120 20.3565 0.0000 576.9152 525.7242 163.4503 7.9010 0.3479
+0 2003 20 17.00 0.583 -2.7018 -1.1496 0.0000 0.0000 334.5703 396.5012 295.3477 7.6161 0.3485
+0 2003 21 7.00 0.417 -3.6765 -1.3340 13.9801 0.0000 208.2681 299.5210 384.6622 5.4920 0.3477
+0 2003 21 17.00 0.583 -4.2450 -2.1007 0.0000 0.0000 219.1304 299.4823 352.7028 7.0332 0.3480
+0 2003 22 7.00 0.417 -4.7055 -2.1000 17.1273 0.0000 229.3991 325.6457 326.5682 10.0945 0.3477
+0 2003 22 17.00 0.583 -3.7579 -2.1118 0.0000 0.0000 221.9055 284.9127 366.8245 11.3086 0.3485
+0 2003 23 7.00 0.417 -1.5161 -2.1485 20.8993 0.0000 219.5349 192.6821 457.5724 13.7525 0.3479
+0 2003 23 17.00 0.583 -2.5132 -2.0814 0.0000 1.0200 172.5067 189.2167 463.7508 8.4021 0.3486
+0 2003 24 7.00 0.417 -3.1200 -2.0125 5.8152 0.0000 138.9276 181.4657 474.2996 1.9849 0.3481
+0 2003 24 17.00 0.583 -4.0668 -1.9482 0.0000 0.5080 144.6145 224.9757 433.4105 6.4254 0.3480
+0 2003 25 7.00 0.417 -4.7050 -1.9860 17.2103 0.0000 221.7509 322.9314 333.9129 5.5775 0.3483
+0 2003 25 17.00 0.583 -5.8518 -1.9707 0.0000 0.0000 152.2140 292.4162 365.0535 9.1171 0.3488
+0 2003 26 7.00 0.417 -2.6015 -1.6925 21.6568 0.0000 288.3047 322.5291 346.3952 13.8865 0.3481
+0 2003 26 17.00 0.583 -1.3728 -1.4775 0.0000 0.0000 248.3656 243.2487 434.6691 10.2293 0.3477
+0 2003 27 7.00 0.417 2.1572 -1.2140 20.4200 0.0000 469.9854 307.3056 381.7874 13.4165 0.3487
+0 2003 27 17.00 0.583 0.9732 -0.9676 0.0000 0.0000 413.4420 322.2797 377.4382 10.1071 0.3474
+0 2003 28 7.00 0.417 -2.2078 -0.9382 13.2442 0.5080 201.2160 252.9072 448.0862 2.6145 0.3483
+0 2003 28 17.00 0.583 -4.7936 -0.9510 0.0000 0.0000 185.8796 333.0659 367.3754 3.4004 0.3480
+0 2003 29 7.00 0.417 -3.8790 -1.1285 23.8825 0.0000 267.0665 374.1536 318.6038 9.5470 0.3489
+0 2003 29 17.00 0.583 -3.4429 -1.1729 0.0000 0.0000 257.4737 348.0964 342.7554 15.0036 0.3480
+0 2003 30 7.00 0.417 -1.7185 -1.2125 14.6925 0.2540 227.9930 248.9605 440.1925 12.2140 0.3474
+0 2003 30 17.00 0.583 -1.7886 -1.1779 0.0000 2.2860 163.7242 189.3135 501.3253 13.2536 0.3465
+0 2003 31 7.00 0.417 0.2855 -1.0775 20.0164 0.5080 252.4289 188.8950 506.0606 12.6860 0.3479
+0 2003 31 17.00 0.583 1.4020 -0.9461 0.0000 0.0000 413.5322 303.2868 397.3622 13.9314 0.3486
+0 2003 32 7.00 0.417 4.4400 -0.8748 21.8386 0.0000 662.5775 384.8091 318.9503 12.2310 0.3479
+0 2003 32 17.00 0.583 2.6120 -0.7908 0.0000 0.0000 628.6528 459.6019 247.8414 8.3086 0.3476
+0 2003 33 7.00 0.417 -3.0234 -0.7599 11.1510 3.3060 247.8519 319.9209 388.8808 3.1730 0.3489
+0 2003 33 17.00 0.583 -12.0161 -0.7953 0.0000 5.8420 33.7468 384.1481 323.0944 2.2788 0.3489
+0 2003 34 7.00 0.417 -10.9460 -0.9171 26.7139 0.5080 139.4546 460.7037 241.2126 6.7638 0.3509
+0 2003 34 17.00 0.583 -12.1375 -1.0569 0.0000 0.0000 127.0045 469.5262 226.3200 4.9186 0.3489
+0 2003 35 7.00 0.417 -12.3180 -1.1535 10.7620 0.2540 123.0226 464.9354 226.7476 2.5140 0.3485
+0 2003 35 17.00 0.583 -14.5786 -1.2246 0.0000 3.5560 40.2136 422.5252 266.1087 1.1759 0.3474
+0 2003 36 7.00 0.417 -13.6575 -1.1077 18.0648 0.7620 116.8529 487.3431 206.3530 2.2936 0.3477
+0 2003 36 17.00 0.583 -18.0386 -0.9473 0.0000 2.5400 40.3299 487.7560 212.8421 1.6406 0.3477
+0 2003 37 7.00 0.417 -19.8070 -1.0101 14.4742 0.2540 40.4149 510.1965 187.6760 1.6132 0.3489
+0 2003 37 17.00 0.583 -20.2221 -1.0886 0.0000 0.0000 74.9646 546.9409 147.5371 4.2955 0.3486
+0 2003 38 7.00 0.417 -11.5710 -1.1670 28.0724 1.2700 267.8851 589.0539 102.0496 3.9555 0.3481
+0 2003 38 17.00 0.583 -13.5457 -1.2414 0.0000 0.0000 240.3905 603.2543 84.6622 4.8007 0.3485
+0 2003 39 7.00 0.417 -11.1185 -1.3030 26.0932 0.0000 286.1497 595.5471 89.7444 3.6830 0.3481
+0 2003 39 17.00 0.583 -14.5521 -1.3607 0.0000 0.0000 85.2349 461.0504 221.7894 2.4643 0.3485
+0 2003 40 7.00 0.417 -15.5045 -1.3930 15.5228 0.2540 89.7701 481.2168 200.2554 9.5365 0.3477
+0 2003 40 17.00 0.583 -13.7757 -1.4096 0.0000 0.2540 96.0298 455.0964 225.6720 16.1561 0.3474
+0 2003 41 7.00 0.417 -9.2635 -1.4200 15.7856 1.2700 130.1474 390.0121 290.3185 8.6840 0.3481
+0 2003 41 17.00 0.583 -8.9436 -1.4200 0.0000 0.2540 146.7611 398.9042 281.4265 14.1704 0.3451
+0 2003 42 7.00 0.417 -5.7080 -1.4185 26.8894 0.0000 341.2529 496.4291 183.9649 11.3765 0.3432
+0 2003 42 17.00 0.583 -5.0296 -1.4014 0.0000 0.0000 395.4182 532.2921 148.8236 3.8429 0.3444
+0 2003 43 7.00 0.417 0.4482 -1.3615 26.8374 0.0000 653.0352 567.3228 115.4837 7.8775 0.3447
+0 2003 43 17.00 0.583 -0.3621 -1.2857 0.0000 0.0000 370.6857 329.4023 356.6264 2.3096 0.3442
+0 2003 44 7.00 0.438 2.2493 -1.2014 15.0492 0.0000 304.7342 137.0215 552.6057 1.9044 0.3441
+0 2003 44 17.50 0.562 0.1301 -0.9794 0.0000 0.2540 170.3176 118.8224 580.3910 2.3733 0.3436
+0 2003 45 7.00 0.438 -2.0700 -0.8352 12.1359 0.2540 173.5296 225.0491 480.4479 4.2057 0.3441
+0 2003 45 17.50 0.562 -6.8122 -0.8117 0.0000 5.5960 25.8664 242.4831 464.0413 1.5554 0.3434
+0 2003 46 7.00 0.438 -6.8981 -0.7282 8.2463 2.0360 25.4904 249.3095 460.8855 1.3904 0.3443
+0 2003 46 17.50 0.562 -6.3448 -0.7007 0.0000 0.0000 201.4999 407.8011 303.6111 3.1625 0.3444
+0 2003 47 7.00 0.438 -0.3422 -0.6958 22.9583 0.2540 516.5884 497.4613 214.1676 6.3176 0.3437
+0 2003 47 17.50 0.542 -5.8473 -0.7133 0.0000 3.0480 162.2478 345.5713 365.2809 7.0465 0.3444
+0 2003 48 6.50 0.458 -7.6495 -0.7180 28.5958 0.0000 233.0138 477.7508 232.8958 9.1618 0.3443
+0 2003 48 17.50 0.542 -7.3062 -0.7137 0.0000 0.0000 216.2486 453.5966 257.2419 6.7905 0.3444
+0 2003 49 6.50 0.458 -7.4127 -0.7150 16.6003 0.2540 212.7642 452.5363 258.2448 1.9914 0.3447
+0 2003 49 17.50 0.542 -11.2650 -0.7235 0.0000 0.0000 114.8667 452.3510 258.0529 3.2918 0.3444
+0 2003 50 6.50 0.458 -6.7823 -0.7416 35.1276 0.0000 330.8001 546.8146 162.7896 3.0705 0.3443
+0 2003 50 17.50 0.542 -8.8915 -0.7872 0.0000 0.0000 275.5101 553.4673 154.1335 4.4802 0.3447
+0 2003 51 6.50 0.458 -5.6755 -0.8301 28.6865 0.0000 297.4447 478.0609 227.6555 3.4136 0.3443
+0 2003 51 17.50 0.542 -7.3727 -0.8790 0.0000 0.0000 189.1542 420.6459 282.9313 4.2777 0.3447
+0 2003 52 6.50 0.458 -7.9532 -0.8990 14.4347 1.0160 149.1332 396.7827 305.9212 6.0018 0.3447
+0 2003 52 17.50 0.542 -8.6335 -0.9113 0.0000 1.2700 109.5801 375.2791 326.8876 6.3154 0.3447
+0 2003 53 6.50 0.458 -8.0059 -0.9183 18.1108 1.7820 112.9985 361.0336 340.8257 5.7868 0.3441
+0 2003 53 17.50 0.542 -12.6100 -0.9131 0.0000 0.2540 87.6688 444.6711 257.4150 6.1585 0.3444
+0 2003 54 6.50 0.458 -12.7527 -0.9029 15.1511 0.0000 117.2630 478.4827 224.0506 3.9427 0.3441
+0 2003 54 17.50 0.542 -13.8635 -0.9119 0.0000 2.2860 67.9457 448.0315 254.1067 2.8423 0.3447
+0 2003 55 6.50 0.458 -14.0341 -0.9320 29.7645 1.0200 67.6065 449.1930 252.0702 3.1782 0.3443
+0 2003 55 17.50 0.542 -8.5531 -0.9603 0.0000 0.2540 107.4230 356.8677 343.1636 4.6005 0.3444
+0 2003 56 6.50 0.458 -7.7136 -0.9870 13.5838 5.5880 45.3932 281.6229 417.2469 2.2759 0.3447
+0 2003 56 17.50 0.542 -8.0596 -0.9993 0.0000 1.7780 63.8876 308.1941 390.1453 1.3983 0.3447
+0 2003 57 6.50 0.458 -7.0564 -0.9885 10.6281 0.2540 50.8672 268.9803 429.8244 1.9954 0.3443
+0 2003 57 17.50 0.542 -8.7754 -0.9697 0.0000 2.7940 30.3151 297.4052 402.2182 0.7210 0.3447
+0 2003 58 6.50 0.458 -7.0309 -0.9488 6.2406 1.2700 63.1357 282.0416 418.4915 1.5606 0.3447
+0 2003 58 17.50 0.542 -10.9385 -0.9242 0.0000 0.0000 30.8153 351.4839 350.1177 0.9659 0.3442
+0 2003 59 6.50 0.458 -10.1832 -0.9007 6.0988 0.7620 31.0084 335.4720 367.1566 1.3029 0.3431
+0 2003 59 17.50 0.542 -10.7623 -0.8835 0.0000 0.5080 103.7217 423.0997 280.2774 3.1643 0.3447
+0 2003 60 6.50 0.458 -10.7268 -0.8720 15.7776 4.0640 83.4272 402.3988 301.4806 2.8027 0.3437
+0 2003 60 17.50 0.542 -13.7046 -0.8669 0.0000 3.8100 52.7649 433.8593 270.2457 2.0209 0.3439
+0 2003 61 6.50 0.458 -9.6677 -0.8698 36.3029 0.5080 175.9608 468.7936 235.1851 11.2945 0.3439
+0 2003 61 17.50 0.542 -9.6654 -0.8785 0.0000 0.0000 148.5604 442.1838 261.4151 9.6935 0.3444
+0 2003 62 6.50 0.458 -6.4236 -0.8852 30.2602 0.0000 263.7764 462.4006 240.9026 6.0382 0.3447
+0 2003 62 17.50 0.542 -6.0881 -0.8882 0.0000 0.0000 214.3495 407.3576 295.8145 5.4450 0.3439
+0 2003 63 6.50 0.458 -7.4568 -0.8862 22.9543 1.0160 148.6129 378.9344 324.3271 4.3277 0.3429
+0 2003 63 17.50 0.542 -12.4954 -0.8732 0.0000 2.2860 63.1905 420.8291 282.9984 3.2398 0.3447
+0 2003 64 6.50 0.458 -12.3782 -0.8577 29.5181 0.2540 124.9452 480.0835 224.4221 11.5377 0.3441
+0 2003 64 17.50 0.542 -10.0123 -0.8433 0.0000 0.0000 122.1527 425.6482 279.4886 12.6935 0.3444
+0 2003 65 6.50 0.458 -5.9655 -0.8357 23.9968 1.0160 179.0874 369.3989 336.0720 8.0682 0.3447
+0 2003 65 17.50 0.542 -5.5285 -0.8313 0.0000 2.0320 167.1189 345.1322 360.5287 5.4135 0.3447
+0 2003 66 6.50 0.458 -2.2310 -0.8275 35.4098 0.0000 332.6667 387.6955 318.1320 10.8809 0.3443
+0 2003 66 17.50 0.542 -3.3208 -0.8167 0.0000 0.5080 257.1643 357.5956 348.7097 10.1962 0.3440
+0 2003 67 6.50 0.458 -1.4179 -0.7969 33.8773 0.0000 392.3905 414.7106 292.4634 6.6941 0.3443
+0 2003 67 17.50 0.542 -2.7250 -0.7708 0.0000 0.0000 326.7915 406.2729 302.0466 6.2627 0.3447
+0 2003 68 6.50 0.479 -0.6125 -0.7414 36.8241 0.0000 473.9464 463.7002 245.9128 10.0483 0.3447
+0 2003 68 18.00 0.521 -1.1422 -0.7113 0.0000 0.0000 462.5356 481.0317 229.9117 12.7680 0.3447
+0 2003 69 6.50 0.479 0.1121 -0.6833 37.8599 0.0000 461.1769 422.9503 289.2296 7.3800 0.3447
+0 2003 69 18.00 0.521 -2.3828 -0.6555 0.0000 0.0000 246.0117 317.8578 395.5523 10.2484 0.3440
+0 2003 70 6.50 0.479 -0.3317 -0.6273 38.4639 0.0000 351.1658 334.0548 380.6068 12.7261 0.3447
+0 2003 70 18.00 0.500 -0.7643 -0.6010 0.0000 0.0000 265.9494 272.4707 443.3596 11.6079 0.3443
+0 2003 71 6.00 0.500 1.3092 -0.5736 36.8505 0.0000 440.8441 345.8997 371.1507 8.9517 0.3447
+0 2003 71 18.00 0.500 1.3997 -0.5433 0.0000 0.0000 524.6514 432.2749 286.1243 5.7646 0.3447
+0 2003 72 6.00 0.500 5.9938 -0.4517 40.4943 0.0000 897.1823 533.9481 188.5646 4.9192 0.3491
+0 2003 72 18.00 0.500 4.1363 -0.3051 0.0000 0.0000 670.9472 439.4282 289.6814 7.8617 0.3566
+0 2003 73 6.00 0.500 6.8208 -0.1873 35.3671 0.0000 881.7851 478.1143 256.3663 4.2021 0.3557
+0 2003 73 18.00 0.500 1.1018 0.0021 0.0000 0.0000 478.3143 419.6144 323.5465 9.4292 0.3606
+0 2003 74 6.00 0.500 3.4932 0.0100 39.5648 0.0000 683.5462 497.7650 245.7577 6.2808 0.3635
+0 2003 74 18.00 0.500 0.6532 0.0179 0.0000 0.0000 449.6325 417.2341 326.6544 3.5192 0.3679
+0 2003 75 6.00 0.500 1.5436 0.0187 16.4678 0.0000 386.3888 311.1932 432.7318 2.4750 0.3730
+0 2003 75 18.00 0.500 -1.6609 0.0197 0.0000 4.5720 114.6676 186.3768 557.5964 1.5134 0.3720
+0 2003 76 6.00 0.500 -2.6658 0.0192 9.5533 13.7280 6.7175 120.8759 623.0723 2.1924 0.3741
+0 2003 76 18.00 0.500 -5.1242 0.0194 0.0000 26.6640 14.0058 214.7703 529.1875 5.2017 0.3768
+0 2003 77 6.00 0.500 -6.2075 0.0295 7.1805 26.1820 19.2877 257.6078 486.8181 4.6792 0.3750
+0 2003 77 18.00 0.500 -5.6033 0.0457 0.0000 29.2750 23.4871 242.7131 502.4629 4.7717 0.3697
+0 2003 78 6.00 0.500 -3.3342 0.0515 10.8575 37.2750 19.4440 158.7568 586.6893 2.6617 0.3635
+0 2003 78 18.00 0.500 -5.1250 0.0561 0.0000 1.8500 34.3151 238.5465 507.1119 1.8100 0.3791
+0 2003 79 6.00 0.500 -3.3087 0.0620 24.6330 38.6050 47.4282 186.7524 559.1782 2.1929 0.3764
+0 2003 79 18.00 0.500 -3.8746 0.0646 0.0000 2.1500 1.2224 162.4992 583.5532 1.3080 0.3793
+0 2003 80 6.00 0.500 -2.7156 0.0661 32.5704 0.4000 164.1981 279.9781 466.1418 3.7833 0.3817
+0 2003 80 18.00 0.500 -5.5333 0.0671 0.0000 0.6000 94.2841 312.2485 433.9196 8.1075 0.3800
+0 2003 81 6.00 0.500 -1.2570 0.0680 38.3883 0.0000 280.4250 333.4727 412.7341 7.9596 0.3800
+0 2003 81 18.00 0.500 -0.6201 0.0684 0.0000 0.0000 215.7068 246.8618 499.3662 9.9025 0.3801
+0 2003 82 6.00 0.500 3.2547 0.0690 40.0643 0.0000 481.7481 313.9647 432.2885 6.8504 0.3805
+0 2003 82 18.00 0.500 -0.5042 0.0688 0.0000 2.8000 195.1722 215.4642 530.7812 2.4458 0.3773
+0 2003 83 6.00 0.500 -3.3653 0.0693 15.5333 20.6300 84.4452 224.4228 521.8478 2.7950 0.3794
+0 2003 83 18.00 0.500 -5.6900 0.0709 0.0000 0.0000 122.4802 343.9231 402.4209 7.7796 0.3823
+0 2003 84 6.00 0.500 -3.2111 0.0748 45.5955 0.0000 353.5070 486.3385 260.1872 7.7492 0.3732
+0 2003 84 18.00 0.500 -2.0792 0.0812 0.0000 0.0000 344.2876 437.9041 308.9154 10.4692 0.3777
+0 2003 85 6.00 0.500 0.4135 0.0873 21.0437 0.2540 357.1636 340.3637 406.7381 6.1425 0.3794
+0 2003 85 18.00 0.500 -6.5533 0.0897 0.0000 5.3460 106.6282 342.1064 405.1114 6.7725 0.3805
+0 2003 86 6.00 0.500 -12.0733 0.0908 23.7216 3.0480 80.4805 472.3221 274.9460 5.2108 0.3794
+0 2003 86 18.00 0.500 -15.4204 0.0923 0.0000 1.7780 64.4859 519.8647 227.4731 2.2858 0.3816
+0 2003 87 6.00 0.500 -11.9721 0.0953 36.0207 1.0160 124.3389 512.8787 234.5984 3.4875 0.3794
+0 2003 87 18.00 0.500 -13.7754 0.0995 0.0000 0.0000 177.1272 604.0096 143.6591 6.9004 0.3785
+0 2003 88 6.00 0.500 -10.7383 0.1032 31.8961 0.0000 128.9145 490.5636 257.2812 4.7750 0.3801
+0 2003 88 18.00 0.500 -9.0042 0.1058 0.0000 0.2540 108.7735 429.7737 318.1892 10.6538 0.3812
+0 2003 89 6.00 0.500 -2.8701 0.1064 46.2578 0.0000 255.9665 376.0007 371.9893 8.8754 0.3816
+0 2003 89 18.00 0.500 -1.4100 0.1042 0.0000 0.0000 259.4247 326.4672 421.4241 10.0367 0.3819
+0 2003 90 6.00 0.521 3.3788 0.1003 34.1107 0.0000 617.1362 441.8192 305.8896 7.8136 0.3818
+0 2003 90 18.50 0.479 2.1851 0.0965 0.0000 0.0000 545.2747 440.8409 306.6893 2.8361 0.3754
+0 2003 91 6.00 0.521 6.4396 0.0946 41.0570 0.0000 850.4816 470.9403 276.5008 3.9560 0.3782
+0 2003 91 18.50 0.479 4.3261 0.0978 0.0000 0.0000 774.1660 548.7872 198.8036 4.6917 0.0000
+0 2003 92 6.00 0.521 5.3308 0.1021 40.0108 0.0000 809.8726 514.3869 233.4036 4.0460 0.0000
+0 2003 92 18.50 0.479 -0.9433 0.1064 0.0000 0.0000 420.0529 458.3708 289.6200 4.2900 0.0000
+0 2003 93 6.00 0.521 -2.8212 0.1108 39.7023 0.0000 356.0622 475.5826 272.6113 5.1552 0.0000
+0 2003 93 18.50 0.458 -8.8823 0.1149 0.0000 0.0000 133.3435 449.2354 299.1513 7.2027 0.0000
+0 2003 94 5.50 0.542 -9.2204 0.1193 36.7606 0.0000 193.2345 518.8234 229.7661 7.6985 0.0000
+0 2003 94 18.50 0.458 -9.4750 0.1235 0.0000 0.5080 146.7023 480.7399 268.0486 2.2465 0.0000
+0 2003 95 5.50 0.542 -6.1823 0.1279 17.7298 3.8100 77.5223 318.6249 430.3672 2.0103 0.0000
+0 2003 95 18.50 0.458 -7.4618 0.1322 0.0000 0.5080 34.3429 314.3904 434.8021 1.7280 0.0000
+0 2003 96 5.50 0.542 -7.0365 0.1365 41.8128 0.2540 190.7192 458.1715 291.2217 9.1623 0.0000
+0 2003 96 18.50 0.458 -9.3991 0.1408 0.0000 0.0000 167.0072 499.8081 249.7843 4.2109 0.0000
+0 2003 97 5.50 0.542 -8.3808 0.1452 20.8674 5.8500 93.8579 400.2421 349.5523 3.1423 0.0000
+0 2003 97 18.50 0.458 -9.1091 0.1495 0.0000 0.5080 111.6366 437.5899 312.4049 4.0715 0.0000
+0 2003 98 5.50 0.542 -0.4125 0.1538 49.9522 0.0000 514.5607 524.7103 225.4874 5.2327 0.0000
+0 2003 98 18.50 0.458 1.1427 0.1581 0.0000 0.0000 626.7047 578.9844 171.4129 6.2786 0.0000
+0 2003 99 5.50 0.542 6.8115 0.1624 51.3334 0.0000 993.5979 603.1014 147.4980 6.0927 0.0000
+0 2003 99 18.50 0.458 3.9532 0.1667 0.0000 0.0000 775.0678 575.6765 175.1236 3.5064 0.0000
+0 2003 100 5.50 0.542 7.5262 0.1711 50.2500 0.0000 993.1736 555.0709 195.9321 3.4562 0.0000
+0 2003 100 18.50 0.458 4.0091 0.1754 0.0000 0.0000 718.4106 515.5200 235.6831 3.8209 0.0000
+0 2003 101 5.50 0.542 7.7781 0.1797 35.7664 0.0000 926.4371 471.8320 279.5731 2.7911 0.0000
+0 2003 101 18.50 0.458 3.9627 0.1840 0.0000 0.0000 650.1884 450.6127 300.9936 3.5650 0.0000
+0 2003 102 5.50 0.542 8.3277 0.1883 44.8165 0.0000 1022.9125 525.4171 226.3904 3.1027 0.0000
+0 2003 102 18.50 0.458 5.2050 0.1926 0.0000 0.0000 800.3621 525.8426 226.1671 6.6527 0.0000
+0 2003 103 5.50 0.542 10.0658 0.1969 48.3209 0.0000 1199.5085 557.9414 194.2686 3.8038 0.0000
+0 2003 103 18.50 0.458 5.5855 0.2013 0.0000 0.0000 843.8635 542.1409 210.2724 3.7641 0.0000
+0 2003 104 5.50 0.542 8.6819 0.2056 45.5495 0.0000 1078.4155 544.5444 208.0702 3.3804 0.0000
+0 2003 104 18.50 0.458 4.6336 0.2099 0.0000 0.0000 702.1330 461.8355 290.9817 3.3959 0.0000
+0 2003 105 5.50 0.542 2.7940 0.2142 25.9193 5.3420 455.8711 301.0922 451.9254 5.7712 0.0000
+0 2003 105 18.50 0.458 -3.7882 0.2185 0.0000 0.5080 108.6739 272.9109 480.3103 14.5145 0.0000
+0 2003 106 5.50 0.542 -0.3694 0.2228 53.6452 0.0000 387.1907 401.8044 351.6181 7.5685 0.0000
+0 2003 106 18.50 0.458 -1.3615 0.2272 0.0000 0.0000 249.9679 319.7882 433.8373 2.1000 0.0000
+0 2003 107 5.50 0.542 3.5808 0.2315 39.7873 0.0000 556.9764 379.0770 374.7489 2.6412 0.0000
+0 2003 107 18.50 0.458 -1.3743 0.2358 0.0000 2.2900 198.0344 263.3430 490.6848 2.8041 0.0000
+0 2003 108 5.50 0.542 -1.6237 0.2401 48.0630 2.2900 244.0895 322.0017 432.2279 9.8773 0.0000
+0 2003 108 18.50 0.458 -4.5764 0.2445 0.0000 0.0000 178.0607 371.3309 383.1039 2.4793 0.0000
+0 2003 109 5.50 0.542 -3.6742 0.2487 13.7176 18.3000 62.3235 224.3075 530.3277 2.4655 0.0000
+0 2003 109 18.50 0.458 -5.7305 0.2530 0.0000 3.5540 57.0397 289.4662 465.3713 2.3720 0.0000
+0 2003 110 5.50 0.542 -1.2642 0.2573 44.7191 0.2540 245.7313 307.8176 447.2216 3.6704 0.0000
+0 2003 110 18.50 0.458 -3.7423 0.2617 0.0000 0.0000 102.4176 267.7834 487.4593 1.9930 0.0000
+0 2003 111 5.50 0.542 2.5565 0.2660 34.3885 0.7620 319.4003 197.3355 558.1099 2.1197 0.0000
+0 2003 111 18.50 0.458 0.7176 0.2703 0.0000 0.0000 203.9532 181.8964 573.7516 2.4895 0.0000
+0 2003 112 5.50 0.542 2.1160 0.2747 26.7555 3.8080 234.3097 141.4615 614.3901 2.4750 0.0000
+0 2003 112 18.50 0.458 -0.3241 0.2789 0.0000 0.2540 140.3085 167.1116 588.9399 2.2388 0.0000
+0 2003 113 5.50 0.542 -4.4392 0.2833 13.6864 3.5560 127.0446 315.0717 441.1847 9.8619 0.0000
+0 2003 113 18.50 0.458 -3.8759 0.2875 0.0000 3.3020 90.5448 262.2716 494.1857 6.2809 0.0000
+0 2003 114 5.50 0.562 -1.6933 0.2920 39.2183 3.8100 97.8298 177.4738 579.1929 4.2629 0.0000
+0 2003 114 19.00 0.438 0.8614 0.2963 0.0000 0.0000 281.8262 254.7223 502.1459 4.3476 0.0000
+0 2003 115 5.50 0.562 4.9548 0.3006 53.5347 0.0000 441.9494 181.4723 575.6002 3.2514 0.0000
+0 2003 115 19.00 0.438 3.5000 0.3049 0.0000 0.0000 485.5517 318.5269 438.7467 2.2900 0.0000
+0 2003 116 5.50 0.562 6.7989 0.3093 53.8744 0.0000 804.3525 419.3602 338.1183 4.0256 0.0000
+0 2003 116 19.00 0.438 2.6076 0.3135 0.0000 0.0000 481.0075 359.0907 398.5886 2.6252 0.0000
+0 2003 117 5.50 0.562 2.8337 0.3179 39.6545 0.0000 345.8229 216.1747 541.7100 2.4593 0.0000
+0 2003 117 19.00 0.417 1.6247 0.3221 0.0000 0.0000 334.9819 270.4207 487.6624 2.1835 0.0000
+0 2003 118 5.00 0.583 4.4600 0.3264 29.9307 0.0000 346.9308 121.2390 637.0481 2.2271 0.0000
+0 2003 118 19.00 0.417 3.4160 0.3307 0.0000 0.0000 474.5183 314.0189 444.4694 2.5090 0.0000
+0 2003 119 5.00 0.583 4.3600 0.3350 38.5587 0.0000 509.3535 291.4645 467.2281 2.9746 0.0000
+0 2003 119 19.00 0.417 -0.6511 0.3394 0.0000 0.0000 152.5978 197.0525 561.8459 2.1001 0.0000
+0 2003 120 5.00 0.583 0.4966 0.3437 37.7561 0.2540 383.7148 373.0857 386.0145 4.5390 0.0000
+0 2003 120 19.00 0.417 -3.0145 0.3480 0.0000 0.0000 206.2996 348.5085 410.7955 2.7436 0.0000
+0 2003 121 5.00 0.583 -1.1828 0.3523 44.2638 1.2700 268.6039 335.8912 423.6167 5.3918 0.0000
+0 2003 121 19.00 0.417 -2.8160 0.3566 0.0000 0.0000 206.9446 342.5205 417.1894 1.3822 0.0000
+0 2003 122 5.00 0.583 1.4596 0.3609 34.8223 0.0000 406.6322 347.1048 412.8094 3.7821 0.0000
+0 2003 122 19.00 0.417 2.8370 0.3653 0.0000 0.0000 504.9077 378.1891 381.9315 3.0131 0.0000
+0 2003 123 5.00 0.583 6.3354 0.3696 41.2709 0.0000 867.5374 522.2574 238.0649 4.4700 0.0000
+0 2003 123 19.00 0.417 -0.0080 0.3739 0.0000 0.2540 290.8432 304.7855 455.7414 5.5725 0.0000
+0 2003 124 5.00 0.583 -0.9747 0.3782 30.7129 0.2540 237.9002 297.6169 463.1122 8.0182 0.0000
+0 2003 124 19.00 0.417 -2.4690 0.3826 0.0000 0.7620 157.7758 280.6219 480.3139 9.7905 0.0000
+0 2003 125 5.00 0.583 -2.2651 0.3868 27.2316 0.2540 254.1942 365.2733 395.8644 4.0679 0.0000
+0 2003 125 19.00 0.417 -3.0635 0.3912 0.0000 0.0000 229.7009 375.7904 385.5544 3.4415 0.0000
+0 2003 126 5.00 0.583 -0.1194 0.3955 50.2458 0.0000 414.0865 432.4878 329.0587 3.6364 0.0000
+0 2003 126 19.00 0.417 -1.5202 0.3997 0.0000 0.0000 342.0059 427.4937 334.2556 1.8430 0.0000
+0 2003 127 5.00 0.583 1.7378 0.4041 35.6826 2.0280 343.5924 274.5575 487.3981 3.0146 0.0000
+0 2003 127 19.00 0.417 0.1099 0.4085 0.0000 0.0000 289.6783 302.3475 459.8138 3.0390 0.0000
+0 2003 128 5.00 0.583 -0.0091 0.4124 47.0986 2.2860 267.9609 284.2491 478.0971 7.0936 0.6092
+0 2003 128 19.00 0.417 -2.4290 0.4146 0.0000 2.2900 51.2565 174.2578 588.1923 1.5874 0.6145
+0 2003 129 5.00 0.583 -3.4386 0.4159 17.9362 6.6080 9.2429 171.3175 591.1946 1.5797 0.6184
+0 2003 129 19.00 0.417 -5.5055 0.4178 0.0000 15.7520 27.4090 260.3331 502.2733 3.8939 0.6281
+0 2003 130 5.00 0.583 -4.6611 0.4197 40.5962 2.7980 100.3052 303.9428 458.7503 4.6111 0.6299
+0 2003 130 19.00 0.417 -4.3275 0.4206 0.0000 0.0000 149.8567 344.2601 418.4767 3.3780 0.6355
+0 2003 131 5.00 0.583 0.0832 0.4215 41.2300 0.0000 326.3530 333.8394 428.9384 3.2593 0.6356
+0 2003 131 19.00 0.417 0.4865 0.4223 0.0000 0.0000 335.7782 332.2833 430.5318 2.1208 0.6362
+0 2003 132 5.00 0.583 6.7836 0.4235 56.1865 0.0000 753.3094 374.1254 388.7489 3.1604 0.6375
+0 2003 132 19.00 0.417 6.3320 0.4253 0.0000 0.0000 817.1606 479.5198 283.4397 6.6340 0.6423
+0 2003 133 5.00 0.583 7.0950 0.4300 48.4600 0.0000 759.8462 368.0272 395.1535 6.8811 0.6565
+0 2003 133 19.00 0.417 4.3735 0.4320 0.0000 0.5080 437.9156 225.1027 538.1769 7.3815 0.6574
+0 2003 134 5.00 0.583 8.0857 0.4271 52.7594 0.0000 703.4034 229.7781 533.2672 2.8475 0.6866
+0 2003 134 19.00 0.417 5.5505 0.4353 0.0000 0.0000 462.4400 176.1306 587.3007 2.9090 0.7162
+0 2003 135 5.00 0.583 6.5864 0.4396 31.2293 0.7620 393.3202 35.1894 728.4504 2.2188 0.7062
+0 2003 135 19.00 0.417 4.6105 0.4464 0.0000 0.0000 238.6882 11.7088 752.2540 6.4570 0.7249
+0 2003 136 5.00 0.583 9.3229 0.4574 52.9435 0.0000 824.4149 258.9075 505.5733 6.0154 0.7594
+0 2003 136 19.00 0.417 7.8010 0.4710 0.0000 0.0000 674.7872 234.3470 530.7844 2.1700 0.7921
+0 2003 137 5.00 0.583 12.6014 0.4726 46.7896 0.0000 1165.3867 301.5654 463.6400 3.4293 0.7875
+0 2003 137 19.00 0.417 9.3230 0.4776 0.0000 0.0000 886.3141 327.5007 437.9422 2.6265 0.8234
+0 2003 138 5.00 0.583 7.3225 0.4756 39.2689 1.2700 503.8770 90.4775 674.8723 3.7886 0.8071
+0 2003 138 19.00 0.417 1.2777 0.4816 0.0000 0.2540 145.6022 105.2124 660.4207 2.9075 0.8279
+0 2003 139 5.00 0.583 -1.5157 0.4807 27.5088 0.7620 56.0271 142.3472 623.2421 1.8542 0.8018
+0 2003 139 19.00 0.417 -5.2820 0.4832 0.0000 0.7620 24.3148 254.0136 511.6980 2.3590 0.7885
+0 2003 140 5.00 0.583 -0.9169 0.4803 29.6315 2.0320 5.6270 59.1563 706.4125 1.4379 0.7851
+0 2003 140 19.00 0.417 3.2650 0.4830 0.0000 0.0000 503.5246 358.7528 406.9469 2.0390 0.7926
+0 2003 141 5.00 0.583 8.3461 0.4759 59.5067 0.0000 886.5568 393.7328 371.6288 2.6682 0.8094
+0 2003 141 19.00 0.417 7.2645 0.4818 0.0000 0.0000 716.2153 315.5569 450.0882 2.0019 0.8451
+0 2003 142 5.00 0.583 11.0764 0.4749 52.9583 0.0000 1023.8799 308.2361 457.0780 2.6371 0.8584
+0 2003 142 19.00 0.417 9.0410 0.4830 0.0000 0.0000 826.4290 291.2423 474.4598 2.6685 0.8938
+0 2003 143 5.00 0.604 13.0262 0.4744 57.8156 0.0000 1139.7996 237.5907 527.7007 3.3128 0.8980
+0 2003 143 19.50 0.396 9.0463 0.4838 0.0000 0.0000 782.2975 247.2879 518.4494 3.7021 0.9203
+0 2003 144 5.00 0.604 8.2886 0.4770 33.1123 0.0000 236.7496 -240.9805 1006.3949 1.8962 0.8935
+0 2003 144 19.50 0.396 6.1811 0.4834 0.0000 0.0000 289.9108 -34.8877 800.6050 2.6658 0.9299
+0 2003 145 5.00 0.604 9.5362 0.4765 43.2658 3.8080 328.6351 -253.8244 1019.2141 2.7235 0.9171
+0 2003 145 19.50 0.396 7.8495 0.4828 0.0000 0.0000 468.1504 24.5063 741.1834 5.9484 0.9550
+0 2003 146 5.00 0.604 11.7362 0.4755 45.0042 0.2540 816.9463 39.2830 726.0592 3.3580 0.9805
+0 2003 146 19.50 0.396 9.5321 0.4829 0.0000 0.0000 607.1878 33.0083 732.6864 4.5105 1.0000
+0 2003 147 5.00 0.604 13.5617 0.4752 56.2896 0.5080 909.1804 -46.2178 811.5469 3.2154 1.0000
+0 2003 147 19.50 0.396 9.3805 0.4824 0.0000 0.0000 274.1976 -287.9440 1053.6161 2.2858 1.0000
+0 2003 148 5.00 0.604 13.7197 0.4748 55.5385 0.0000 552.4520 -418.6258 1183.9368 2.2110 1.0000
+0 2003 148 19.50 0.375 12.7256 0.4811 0.0000 0.0000 823.2648 -39.2696 804.8795 3.2656 1.0000
+0 2003 149 4.50 0.625 15.9967 0.4730 53.1106 1.2740 1366.5579 118.8296 646.3930 5.9680 1.0000
+0 2003 149 19.50 0.375 12.3133 0.4809 0.0000 0.0000 894.0206 71.8501 693.7492 6.3089 1.0000
+0 2003 150 4.50 0.625 15.0610 0.4777 45.0428 0.0000 1120.7395 -1.5178 766.9655 3.4707 1.0000
+0 2003 150 19.50 0.375 8.3856 0.5986 0.0000 0.0000 188.6696 -294.6316 1065.8539 1.9892 1.0000
+0 2003 151 4.50 0.625 13.8570 0.6876 49.3031 0.5080 952.3164 -26.1368 801.6351 5.3873 1.0000
+0 2003 151 19.50 0.375 9.2206 0.8219 0.0000 0.0000 502.6315 -30.9662 812.9623 5.6517 1.0000
+0 2003 152 4.50 0.625 8.8410 0.8836 31.9854 4.5680 561.5184 47.3961 737.6014 6.9563 1.0000
+0 2003 152 19.50 0.375 5.4076 0.8736 0.0000 0.0000 326.0396 55.0572 729.4498 2.8657 1.0000
+0 2003 153 4.50 0.625 9.2375 0.8110 50.8302 0.2540 733.4094 177.4655 604.0053 3.6367 1.0000
+0 2003 153 19.50 0.375 5.8589 0.8288 0.0000 0.0000 484.1722 185.4750 596.8578 4.3498 1.0000
+0 2003 154 4.50 0.625 5.3410 1.1640 40.9388 0.0000 159.8562 -85.9154 884.8372 2.0791 1.0000
+0 2003 154 19.50 0.375 3.7267 1.6061 0.0000 0.0000 74.9336 -41.3940 862.4270 2.0907 1.0000
+0 2003 155 4.50 0.625 2.3216 1.6757 19.4653 3.5560 68.8671 28.8154 795.8126 1.7261 1.0000
+0 2003 155 19.50 0.375 0.1994 1.2694 0.0000 1.2700 18.5173 70.2727 733.8522 1.6166 1.0000
+0 2003 156 4.50 0.625 1.1496 1.5637 18.6039 1.0160 4.2994 23.6948 795.4910 1.8170 1.0000
+0 2003 156 19.50 0.375 0.2378 1.7283 0.0000 0.0000 49.5296 121.9521 705.5580 1.7307 1.0000
+0 2003 157 4.50 0.625 4.6543 2.7407 35.8881 1.7780 386.7051 268.7088 615.6937 2.9607 1.0000
+0 2003 157 19.50 0.375 -0.9808 2.1061 0.0000 5.7100 14.6472 162.0631 685.4402 1.5893 1.0000
+0 2003 158 4.50 0.625 1.8525 2.4183 53.7846 0.3800 273.1166 290.8164 574.7261 2.9893 1.0000
+0 2003 158 19.50 0.375 1.2001 2.1589 0.0000 0.0000 320.8647 369.3594 480.8684 3.2119 1.0000
+0 2003 159 4.50 0.625 9.3263 4.5047 54.8944 0.0000 956.0400 614.7535 382.7283 3.5200 1.0000
+0 2003 159 19.50 0.375 8.7367 4.0522 0.0000 0.0000 832.6292 512.9943 443.8304 2.9538 1.0000
+0 2003 160 4.50 0.625 9.8177 6.1157 38.9652 2.5400 704.0086 427.0606 667.5934 4.1103 0.9735
+0 2003 160 19.50 0.375 6.7972 5.5317 0.0000 0.0000 334.4915 248.4470 799.1464 2.7183 0.9884
+0 2003 161 4.50 0.625 9.8147 6.6473 33.7352 1.3120 640.8068 392.4485 732.9238 4.8920 0.9648
+0 2003 161 19.50 0.375 7.6261 5.6128 0.0000 0.0000 434.4180 295.7177 757.5612 2.4733 0.9511
+0 2003 162 4.50 0.625 11.2320 7.6150 51.9280 0.2540 947.0710 651.5781 554.1551 5.6440 0.9152
+0 2003 162 19.50 0.375 8.0994 6.1739 0.0000 0.0000 740.3196 604.0372 486.9968 2.6922 0.9366
+0 2003 163 4.50 0.625 9.1263 6.9280 38.2242 2.2840 677.8406 511.4631 636.4031 2.5863 0.9000
+0 2003 163 19.50 0.375 5.4572 5.9283 0.0000 0.5080 311.6201 342.5033 731.5662 4.0528 0.9291
+0 2003 164 4.50 0.625 7.0217 6.1467 26.0729 2.3730 315.3157 253.7468 836.1457 2.3063 0.9062
+0 2003 164 19.50 0.375 5.5928 5.1456 0.0000 1.0160 250.9381 222.4775 800.4314 3.8033 0.9260
+0 2003 165 4.50 0.625 9.8830 8.0697 57.5068 0.0000 580.0744 445.2425 797.5892 4.2686 0.8709
+0 2003 165 19.50 0.375 7.8944 6.9050 0.0000 0.0000 487.7668 419.0732 723.8861 3.6856 0.8991
+0 2003 166 4.50 0.625 13.3617 8.9647 51.9802 0.0000 1068.1964 683.0189 633.2612 3.0480 0.8418
+0 2003 166 19.50 0.375 9.9261 7.6233 0.0000 0.0000 706.3820 527.2275 666.4933 2.8467 0.8733
+0 2003 167 4.50 0.625 11.5840 8.2947 32.8716 0.0000 721.1334 445.6621 801.7852 2.5940 0.8346
+0 2003 167 19.50 0.375 7.5733 7.4367 0.0000 0.0000 215.4442 200.7969 977.8015 1.9444 0.8496
+0 2003 168 4.50 0.625 8.7577 7.7463 32.3479 10.6540 202.8240 119.0324 1082.9814 2.1463 0.8268
+0 2003 168 19.50 0.375 5.4400 6.8622 0.0000 0.0000 61.1677 156.2463 981.4104 1.5217 0.8567
+0 2003 169 4.50 0.625 5.9860 6.7590 14.0499 1.5200 3.1234 54.8098 1076.3773 1.6087 0.8447
+0 2003 169 19.50 0.375 6.1061 6.7406 0.0000 0.0000 86.8170 130.2106 998.8564 2.0174 0.8553
+0 2003 170 4.50 0.625 8.5613 7.0613 28.0799 1.7820 242.3050 130.6752 1023.7539 2.0925 0.8410
+0 2003 170 19.50 0.375 7.0794 6.1672 0.0000 0.0000 262.4226 200.6126 889.9045 2.5706 0.8634
+0 2003 171 4.50 0.625 9.1423 7.4877 36.3971 2.0320 492.4682 363.5330 823.7299 2.7597 0.8338
+0 2003 171 19.50 0.375 6.4961 6.0517 0.0000 0.0000 502.4635 473.3135 609.7018 2.6967 0.8570
+0 2003 172 4.50 0.625 11.7163 8.5143 60.8798 0.0000 994.2441 728.3083 550.5845 2.7797 0.8065
+0 2003 172 19.50 0.375 7.7211 7.2544 0.0000 0.0000 524.0294 483.4906 684.4337 2.3833 0.8262
+0 2003 173 4.50 0.625 9.1537 8.3100 34.1115 0.0000 346.3147 284.5057 970.4122 2.1807 0.7902
+0 2003 173 19.50 0.375 7.1461 7.6806 0.0000 0.0000 157.4561 192.5566 1004.7548 1.6862 0.8040
+0 2003 174 4.50 0.625 7.6977 8.1703 23.0859 0.0000 140.3354 172.3896 1062.1095 1.9164 0.7848
+0 2003 174 19.50 0.375 7.7744 7.8478 0.0000 0.0000 60.7763 62.6017 1146.0441 1.6807 0.7728
+0 2003 175 4.50 0.625 6.9940 8.8820 45.4631 0.0000 140.4471 285.3826 1010.2259 2.2233 0.7613
+0 2003 175 19.50 0.375 2.3816 7.4000 0.0000 0.2540 -7.5583 305.0154 871.4127 1.0272 0.7941
+0 2003 176 4.50 0.625 5.4408 8.0390 47.5595 0.2540 366.8876 548.0327 682.0624 3.8197 0.7613
+0 2003 176 19.50 0.375 3.1106 6.3922 0.0000 0.0000 221.6930 426.1999 680.1578 1.9622 0.7865
+0 2003 177 4.50 0.625 7.6847 7.7253 53.8885 0.0000 569.2777 574.3456 637.0536 2.7643 0.7465
+0 2003 177 19.50 0.375 6.1944 6.1289 0.0000 0.0000 477.5692 475.4294 613.7708 2.8189 0.7669
+0 2003 178 4.50 0.625 11.9697 8.4573 60.5523 0.0000 1042.9506 741.8385 533.2736 3.1807 0.7187
+0 2003 178 19.50 0.375 9.6050 7.3522 0.0000 0.0000 825.2946 651.4070 522.9422 3.2056 0.7307
+0 2003 179 4.50 0.625 14.4900 9.6547 59.2345 0.0000 1243.0452 791.4830 581.0844 2.8593 0.6898
+0 2003 179 19.50 0.375 8.4206 9.5450 0.0000 0.0000 293.6149 372.7957 969.0493 2.0578 0.7149
+0 2003 180 4.50 0.625 8.7923 10.3943 36.8015 0.0000 81.0209 210.6684 1208.2498 2.0188 0.6899
+0 2003 180 19.50 0.375 9.2917 8.6356 0.0000 0.0000 405.1539 353.6448 916.6356 3.2994 0.6927
+0 2003 181 4.50 0.625 16.0617 10.5607 53.9924 0.0000 1369.8896 811.5844 634.7507 3.8197 0.6644
+0 2003 181 19.50 0.375 13.5667 9.5306 0.0000 0.0000 1023.7573 653.5129 688.0363 3.7172 0.6879
+0 2003 182 4.50 0.625 18.6407 11.2897 54.7875 0.0000 1810.8719 987.7560 522.0015 3.0767 0.6424
+0 2003 182 19.50 0.375 15.1511 10.3828 0.0000 0.0000 1469.9211 994.9005 418.5053 3.2194 0.6489
+0 2003 183 4.50 0.625 18.4457 11.9513 62.9180 0.0000 1929.3372 1195.5405 379.0410 3.8015 0.6135
+0 2003 183 19.50 0.375 15.3250 10.6322 0.0000 0.0000 1474.8481 1003.6415 431.9212 3.2883 0.6246
+0 2003 184 4.50 0.625 18.9163 12.3433 57.7418 0.0000 1970.4767 1210.7014 401.4237 4.0877 0.5821
+0 2003 184 19.50 0.375 14.8189 10.9361 0.0000 0.0000 1428.2542 1035.3188 426.9308 3.5600 0.5922
+0 2003 185 4.50 0.625 17.8980 12.5080 61.7512 0.0000 1883.7968 1272.3483 357.3720 3.0365 0.5604
+0 2003 185 19.50 0.375 14.0739 10.9928 0.0000 0.0000 1353.6766 1053.5516 414.2089 3.6550 0.5738
+0 2003 186 4.50 0.625 18.6617 12.6540 57.1499 0.0000 2000.1169 1303.8861 340.3721 4.6333 0.5340
+0 2003 186 19.50 0.375 14.7517 11.3122 0.0000 0.0000 1524.0538 1180.4244 316.1794 3.7144 0.5496
+0 2003 187 4.50 0.625 17.3257 11.7350 42.4319 0.0000 1691.7894 1076.1647 466.6263 3.1157 0.5174
+0 2003 187 19.50 0.375 13.6244 10.5667 0.0000 0.0000 1152.1141 860.8969 567.6097 3.2606 0.5251
+0 2003 188 4.50 0.625 16.4947 12.0663 44.8989 0.0000 1357.4052 876.2616 704.4429 2.7720 0.4939
+0 2003 188 19.50 0.375 10.9572 10.8889 0.0000 0.0000 458.0190 452.1743 1004.8910 3.0811 0.5035
+0 2003 189 4.50 0.625 19.8177 12.9953 61.4348 0.0000 2140.1462 1308.7821 372.6692 4.3300 0.4769
+0 2003 189 19.50 0.375 14.5294 12.0806 0.0000 0.0000 1532.4211 1261.0889 306.0919 5.3300 0.4811
+0 2003 190 4.50 0.625 15.7423 13.5507 62.1915 0.0000 1638.5400 1395.3510 342.3795 2.9592 0.4688
+0 2003 190 19.50 0.375 13.0961 11.9450 0.0000 0.0000 1311.1553 1202.2111 353.3976 3.1472 0.4861
+0 2003 191 4.50 0.625 17.1577 13.7450 61.6587 0.0000 1675.6263 1303.2465 459.2168 2.9200 0.4508
+0 2003 191 19.50 0.375 14.2061 12.5356 0.0000 0.0000 1229.1591 1061.8110 550.0861 3.8156 0.4634
+0 2003 192 4.50 0.625 18.7123 13.5783 46.9649 0.0000 1874.5854 1262.3185 467.3572 3.5970 0.4373
+0 2003 192 19.50 0.375 13.7161 12.5139 0.0000 0.0000 925.4631 802.5139 805.8176 2.8167 0.4508
+0 2003 193 4.50 0.625 18.8937 14.1507 49.8314 0.0000 1773.9589 1206.5917 588.4902 3.2283 0.4289
+0 2003 193 19.50 0.375 16.1239 12.9500 0.0000 0.0000 1495.0381 1150.5424 501.7829 3.6744 0.4423
+0 2003 194 4.50 0.625 20.4113 14.6107 50.6329 0.0000 2164.3301 1424.5765 421.2753 4.0690 0.4089
+0 2003 194 19.50 0.375 16.4350 13.5578 0.0000 0.0000 1605.0660 1281.3435 432.6778 4.0222 0.4184
+0 2003 195 4.50 0.625 19.5063 15.4993 51.1373 0.0000 2015.3398 1511.2981 439.6454 3.0373 0.3872
+0 2003 195 19.50 0.375 15.4233 14.1789 0.0000 0.0000 1337.7144 1202.9237 576.6783 3.3189 0.3995
+0 2003 196 4.50 0.625 18.8760 15.4183 43.1119 1.5240 1728.9219 1271.3534 660.5705 4.1003 0.3803
+0 2003 196 19.50 0.375 16.4761 14.2444 0.0000 0.0000 1340.4204 1085.7524 698.0762 5.4511 0.3941
+0 2003 197 4.50 0.625 18.7750 16.3003 45.8996 0.0000 1630.4512 1309.4214 730.4009 5.1560 0.3826
+0 2003 197 19.50 0.396 16.4437 14.7963 0.0000 0.0000 1364.6959 1176.2814 669.1497 3.8874 0.3922
+0 2003 198 5.00 0.604 20.2197 16.2124 41.6430 0.0000 1793.4806 1269.4243 754.6737 2.3827 0.3638
+0 2003 198 19.50 0.396 16.8700 15.2584 0.0000 0.0000 1352.3008 1160.6375 736.3604 3.4268 0.3648
+0 2003 199 5.00 0.604 17.8279 15.5783 37.2165 2.3320 1231.2286 949.2382 988.8430 2.3957 0.3483
+0 2003 199 19.50 0.396 11.9258 13.9600 0.0000 2.7980 458.7228 657.6456 1097.4911 2.9245 0.3810
+0 2003 200 5.00 0.604 16.6990 15.1752 43.9229 4.0640 1053.9796 875.2069 1023.6874 2.7072 0.3648
+0 2003 200 19.50 0.396 13.0905 13.6600 0.0000 0.2540 749.4745 807.4105 915.6359 4.4632 0.3892
+0 2003 201 5.00 0.604 14.9338 13.1576 25.5655 2.0320 901.1007 700.7380 970.4291 2.6441 0.3808
+0 2003 201 19.50 0.396 13.5384 11.8295 0.0000 0.0000 921.9757 754.8549 787.1580 3.5353 0.4000
+0 2003 202 5.00 0.604 17.2417 13.5048 39.5535 2.5400 1254.8948 821.8899 892.6185 4.1841 0.3877
+0 2003 202 19.50 0.396 11.9368 12.4289 0.0000 1.0160 701.7399 746.9536 852.8676 2.9968 0.4208
+0 2003 203 5.00 0.604 15.5479 14.8683 55.8134 0.0000 962.4050 904.4383 977.0244 2.2158 0.4053
+0 2003 203 19.50 0.396 13.9853 13.7000 0.0000 0.0000 894.7780 867.0434 861.0374 4.2811 0.4289
+0 2003 204 5.00 0.583 18.4200 14.7314 40.1804 0.0000 1547.8345 1097.5911 749.5912 3.7911 0.4144
+0 2003 204 19.00 0.417 16.1660 13.9470 0.0000 0.0000 1224.5551 976.8858 776.0900 5.9355 0.4332
+0 2003 205 5.00 0.583 19.5304 16.6789 46.3182 0.0000 1682.3099 1314.4454 782.3100 5.5325 0.4220
+0 2003 205 19.00 0.417 16.6535 15.5095 0.0000 0.0000 1323.4906 1189.9524 736.5435 4.8420 0.4466
+0 2003 206 5.00 0.583 17.6814 16.0771 31.6484 1.5200 1314.4109 1106.7189 892.2899 4.3636 0.4426
+0 2003 206 19.00 0.417 15.5240 14.1870 0.0000 0.0000 1115.6656 965.2285 813.0135 3.1725 0.4670
+0 2003 207 5.00 0.583 17.1014 14.9550 33.7873 1.0160 1096.5576 840.0326 1029.0916 2.6443 0.4786
+0 2003 207 19.00 0.417 13.1795 13.4470 0.0000 0.0000 659.1365 682.8935 1017.9096 2.0390 0.6206
+0 2003 208 5.00 0.583 14.5554 12.5875 19.6240 10.4200 654.3390 429.6633 1183.8477 2.7483 0.6226
+0 2003 208 19.00 0.417 12.1855 11.2320 0.0000 0.0000 457.6283 369.0388 1118.0791 2.9970 0.6772
+0 2003 209 5.00 0.583 12.4550 11.7354 27.7351 4.1520 203.5354 134.6395 1401.1165 1.7685 0.6328
+0 2003 209 19.00 0.417 11.2370 11.0445 0.0000 0.2540 379.3253 363.0363 1107.7889 2.6855 0.6166
+0 2003 210 5.00 0.583 11.6554 11.5821 31.9662 4.3140 424.5062 411.4619 1111.9108 3.0029 0.6000
+0 2003 210 19.00 0.417 9.9265 10.3315 0.0000 0.0000 383.0373 418.5833 989.7451 2.7000 0.6196
+0 2003 211 5.00 0.583 14.4796 11.1771 43.6106 0.1280 1019.9375 697.4850 792.3901 2.4786 0.5919
+0 2003 211 19.00 0.417 13.4750 9.7920 0.0000 0.0000 997.5948 658.7058 705.5560 3.4665 0.5966
+0 2003 212 5.00 0.583 16.5950 12.0757 42.6237 0.0000 1319.9310 841.1489 737.7916 3.3426 0.5590
+0 2003 212 19.00 0.417 14.3115 11.1900 0.0000 0.0000 1110.2374 803.1180 680.5526 4.5540 0.5521
+0 2003 213 5.00 0.583 16.1271 12.6457 32.5575 0.0000 1152.0396 777.1193 849.1100 3.6893 0.5211
+0 2003 213 19.00 0.417 13.0025 10.8615 0.0000 0.0000 816.2421 615.6405 840.4221 3.5900 0.5313
+0 2003 214 5.00 0.583 17.3818 12.9357 51.1276 0.0000 1308.5165 816.5309 847.6998 3.5969 0.4886
+0 2003 214 19.00 0.417 14.3955 11.7245 0.0000 0.0000 846.1088 579.3537 953.8277 4.0655 0.4983
+0 2003 215 5.00 0.583 14.5568 12.1914 25.4197 5.3260 757.5826 506.1389 1071.6196 3.1575 0.4834
+0 2003 215 19.00 0.417 11.7075 11.1065 0.0000 3.0520 442.5285 387.0748 1088.4319 3.4685 0.5194
+0 2003 216 5.00 0.583 15.1057 12.7379 56.1926 0.0000 1033.4795 784.1655 854.6819 4.2889 0.4904
+0 2003 216 19.00 0.417 12.7150 10.8575 0.0000 0.0000 841.6475 671.1375 785.4549 3.1000 0.4932
+0 2003 217 5.00 0.583 15.7136 12.2039 45.1929 0.0000 1054.2555 686.6630 901.8716 2.5237 0.4644
+0 2003 217 19.00 0.417 13.6800 10.8945 0.0000 0.0000 901.1868 635.8967 822.8840 2.8290 0.4668
+0 2003 218 5.00 0.583 18.6389 12.4118 38.3946 0.0000 1638.0059 920.2582 687.9811 2.7300 0.4383
+0 2003 218 19.00 0.417 15.5080 11.6875 0.0000 0.0000 1244.3134 848.0753 682.2236 3.2940 0.4426
+0 2003 219 5.00 0.583 16.7504 13.2793 36.9822 1.7820 1277.1670 863.3436 830.0226 4.8346 0.4191
+0 2003 219 19.00 0.417 12.5085 11.4450 0.0000 0.0000 700.0449 599.3922 906.7586 3.3865 0.4430
+0 2003 220 5.00 0.583 15.4732 12.9936 37.9280 4.5680 995.7133 729.1259 934.0114 3.2432 0.4163
+0 2003 220 19.00 0.417 12.3760 10.4660 0.0000 1.7820 590.3568 416.2555 1003.4472 3.7610 0.5960
+0 2003 221 5.00 0.583 15.6436 12.3582 37.6861 0.2540 931.2131 589.9119 1011.7327 2.9581 0.5553
+0 2003 221 19.00 0.417 13.7570 11.0425 0.0000 0.0000 890.9913 630.7083 841.3458 3.8755 0.5560
+0 2003 222 5.00 0.583 16.9207 12.3661 44.5064 0.7620 1091.5428 592.3455 1010.7952 2.3098 0.5263
+0 2003 222 19.00 0.417 13.8395 11.2075 0.0000 0.0000 721.5119 467.7284 1019.1318 2.7150 0.5326
+0 2003 223 5.00 0.583 17.0475 12.0957 31.7633 0.0000 1215.6530 676.2323 897.3535 2.1571 0.5052
+0 2003 223 19.00 0.417 14.5995 11.4655 0.0000 0.0000 976.4086 664.9842 845.7304 3.0790 0.5219
+0 2003 224 5.00 0.583 17.6561 13.1921 51.2431 0.0000 1265.2374 763.6061 927.8775 2.9194 0.4717
+0 2003 224 19.00 0.417 14.7735 11.8845 0.0000 0.0000 822.3228 530.8629 1018.6138 2.8358 0.5009
+0 2003 225 5.00 0.583 17.6979 13.7043 55.5263 0.0000 1184.0396 729.0040 1015.2609 2.9932 0.4559
+0 2003 225 19.00 0.417 13.4835 11.2735 0.0000 0.0000 1019.1205 810.0544 685.1779 3.1140 0.4932
+0 2003 226 5.00 0.583 16.0139 12.4936 55.1524 0.0000 1315.7379 946.0575 673.9126 2.8362 0.4498
+0 2003 226 19.00 0.417 10.9680 10.1385 0.0000 0.0000 836.7061 766.3536 627.0319 2.2235 0.4774
+0 2003 227 5.00 0.583 14.4204 11.8868 52.8038 0.0000 1011.6139 758.9120 802.5407 2.7255 0.4357
+0 2003 227 19.00 0.417 12.6985 10.4005 0.0000 0.0000 881.3021 671.6869 743.2084 3.9045 0.4617
+0 2003 228 5.00 0.583 13.9146 11.7743 30.7695 0.0000 894.5949 676.9760 863.7184 4.1604 0.4372
+0 2003 228 19.00 0.417 9.2510 10.5090 0.0000 0.0000 439.8160 544.5273 878.4282 6.9815 0.4532
+0 2003 229 5.00 0.583 9.4914 10.5861 28.5972 6.6080 398.9965 483.7436 947.8969 6.6607 0.4550
+0 2003 229 19.00 0.438 8.8014 9.4467 0.0000 3.5560 387.7162 432.7439 900.3789 4.0505 0.4776
+0 2003 230 5.50 0.562 10.9181 11.2207 39.8298 0.0000 457.4430 480.9899 1011.3112 2.7711 0.4626
+0 2003 230 19.00 0.438 8.2786 8.6414 0.0000 0.5080 398.4734 428.5504 842.3740 2.3890 0.4853
+0 2003 231 5.50 0.562 13.9552 10.5641 46.4990 0.0000 1058.6478 725.8498 710.0220 4.5459 0.4424
+0 2003 231 19.00 0.438 13.0429 9.3019 0.0000 0.0000 981.6494 645.6577 677.5598 2.4552 0.4596
+0 2003 232 5.50 0.562 18.1800 11.8989 51.9051 0.0000 1684.6191 976.3939 582.7691 3.3442 0.4109
+0 2003 232 19.00 0.438 15.6962 10.5181 0.0000 0.0000 1353.8383 833.1962 591.7758 3.3352 0.4519
+0 2003 233 5.50 0.542 17.3535 11.9704 31.1588 0.2540 1364.1653 772.8182 788.7440 2.4858 0.4175
+0 2003 233 18.50 0.458 14.5991 10.8236 0.0000 0.0000 1089.5817 719.9586 733.0934 3.0332 0.4507
+0 2003 234 5.50 0.542 15.7950 11.6092 23.6311 0.2540 1069.4117 630.5214 895.0122 2.4681 0.4267
+0 2003 234 18.50 0.458 13.7436 10.9718 0.0000 0.0000 833.1954 564.0518 899.1237 4.7545 0.4402
+0 2003 235 5.50 0.542 14.1473 11.9619 21.2403 0.0000 802.6736 581.0977 975.0826 4.3173 0.4270
+0 2003 235 18.50 0.458 11.6945 10.7791 0.0000 0.0000 630.3226 547.6747 898.9587 4.2441 0.4317
+0 2003 236 5.50 0.542 13.0008 11.1442 16.1644 5.0800 611.2025 428.9460 1050.9070 2.9673 0.4375
+0 2003 236 18.50 0.458 10.9009 10.1905 0.0000 1.0160 417.2646 355.8438 1039.4467 4.3395 0.4671
+0 2003 237 5.50 0.542 11.5704 10.4865 14.2107 3.0480 501.0481 399.2338 1021.8684 3.2119 0.4643
+0 2003 237 18.50 0.458 10.9723 8.9118 0.0000 0.0000 661.3779 489.9912 801.7108 2.9123 0.5159
+0 2003 238 5.50 0.542 14.3050 10.5073 35.0921 0.0000 990.7100 627.7772 804.2078 2.4211 0.4858
+0 2003 238 18.50 0.458 12.0986 9.6455 0.0000 0.0000 704.2376 489.3402 862.2711 2.6532 0.5004
+0 2003 239 5.50 0.542 15.5946 11.7400 43.1876 0.0000 1104.1506 704.3660 838.5794 3.7112 0.4609
+0 2003 239 18.50 0.458 11.6145 10.8859 0.0000 0.0000 610.1921 544.2628 911.5377 5.0741 0.4785
+0 2003 240 5.50 0.542 12.3465 11.6396 30.0236 0.2540 592.9462 522.9934 1003.2329 2.0925 0.4594
+0 2003 240 18.50 0.458 10.8755 9.6564 0.0000 0.0000 630.0657 530.3272 823.9870 2.9359 0.4921
+0 2003 241 5.50 0.542 11.4015 10.4615 40.4978 0.2540 502.5667 425.7758 1002.6997 2.8571 0.4597
+0 2003 241 18.50 0.458 4.7859 8.2568 0.0000 20.4100 7.8783 248.2241 994.4197 2.8573 0.7172
+0 2003 242 5.50 0.542 7.1273 8.4319 17.2100 1.8210 -1.8810 92.8070 1161.0602 1.8930 0.6921
+0 2003 242 18.50 0.458 4.2009 7.2114 0.0000 0.2540 7.2447 205.3302 958.1049 2.1345 0.6938
+0 2003 243 5.50 0.542 8.2769 7.7531 18.5932 0.0000 85.5347 44.5941 1157.8906 1.7475 0.6576
+0 2003 243 18.50 0.458 5.7395 6.6745 0.0000 0.0000 62.6846 125.8860 999.3109 2.2572 0.6716
+0 2003 244 5.50 0.542 12.6338 8.7012 49.8734 0.0000 998.5685 652.9662 632.8181 2.5965 0.6394
+0 2003 244 18.50 0.458 11.0559 7.5923 0.0000 0.0000 863.8412 585.0923 606.9509 3.1300 0.6586
+0 2003 245 5.50 0.542 14.1642 9.2085 37.5851 2.0280 1052.6759 592.8778 730.5796 2.4600 0.6244
+0 2003 245 18.50 0.458 8.5205 8.8514 0.0000 2.0280 292.5942 318.9500 967.1987 2.9232 0.6776
+0 2003 246 5.50 0.542 8.2900 7.8000 19.8437 5.5920 129.1598 92.4165 1115.1926 1.9160 0.6897
+0 2003 246 18.50 0.458 7.5750 7.0923 0.0000 0.0000 330.2529 295.4883 858.9367 2.9479 0.7248
+0 2003 247 5.50 0.542 12.7646 8.6908 44.7807 0.0000 746.4391 394.7672 890.2492 2.2246 0.6943
+0 2003 247 18.50 0.458 10.0395 8.2118 0.0000 0.0000 476.9199 332.3021 905.0165 3.0191 0.7468
+0 2003 248 5.50 0.542 13.4823 9.0777 27.1571 3.8080 988.2052 574.8575 736.6158 3.2265 0.7142
+0 2003 248 18.50 0.458 9.6127 8.2155 0.0000 0.1270 513.3041 402.6844 834.0734 3.3592 0.7667
+0 2003 249 5.50 0.542 9.0477 8.7646 5.6657 0.5070 325.0094 302.1176 978.1218 1.8628 0.7853
+0 2003 249 18.50 0.458 7.5441 8.0241 0.0000 0.0420 244.0880 280.4871 941.7927 2.7380 0.8079
+0 2003 250 5.50 0.542 7.5323 7.7615 9.2031 0.1270 248.7258 265.7472 936.4269 3.4385 0.7872
+0 2003 250 18.50 0.458 8.0973 7.3182 0.0000 0.0000 415.4897 358.1379 811.6432 5.9295 0.7812
+0 2003 251 5.50 0.542 12.4538 9.0835 18.8926 0.0000 898.0452 600.0247 710.6620 3.9885 0.7434
+0 2003 251 18.50 0.458 5.8359 7.9068 0.0000 4.4030 243.5803 387.6472 827.3978 4.5277 0.8118
+0 2003 252 5.50 0.542 9.4638 8.2812 8.8489 0.0000 516.6934 422.3903 821.1213 2.3550 0.8025
+0 2003 252 18.50 0.458 5.5386 7.0768 0.0000 0.0840 332.5424 434.6966 719.3737 8.9014 0.7886
+0 2003 253 5.50 0.542 4.2869 7.2142 33.2937 0.0420 357.6765 549.4218 614.6874 4.2315 0.7717
+0 2003 253 18.50 0.479 0.6570 4.8222 0.0000 0.0000 235.6875 464.4973 539.5187 5.7500 0.7858
+0 2003 254 6.00 0.521 3.2879 5.4376 34.4491 0.2540 336.4645 465.7123 577.8104 5.2148 0.7617
+0 2003 254 18.50 0.479 4.4391 4.4613 0.0000 0.0000 394.9569 395.9969 584.2502 8.2783 0.7637
+0 2003 255 6.00 0.521 7.7876 5.8748 16.5272 0.0000 532.0192 389.1116 684.2777 6.0828 0.7363
+0 2003 255 18.50 0.479 1.5117 5.8330 0.0000 0.0000 153.4090 371.7169 697.1239 1.7504 0.7549
+0 2003 256 6.00 0.521 -2.1536 3.6692 10.1317 1.7780 4.4131 286.6297 646.5900 1.5686 0.7821
+0 2003 256 18.50 0.479 -2.0096 2.7322 0.0000 0.0000 155.7825 380.2502 500.4280 2.3015 0.7902
+0 2003 257 6.00 0.500 6.1343 3.4537 8.4371 0.0000 845.2703 658.9541 264.9303 3.0154 0.7571
+0 2003 257 18.00 0.500 6.4262 3.3208 0.0000 0.0000 861.6586 668.3763 245.3306 6.2204 0.7679
+0 2003 258 6.00 0.500 11.2042 5.7004 35.8350 0.0000 1173.6969 747.4615 316.4772 5.4633 0.7232
+0 2003 258 18.00 0.500 9.2033 5.3017 0.0000 0.0000 932.7062 651.9807 381.1461 3.9733 0.7427
+0 2003 259 6.00 0.500 12.3337 6.8467 22.4295 0.0000 1123.0746 667.6989 473.9711 3.9296 0.7099
+0 2003 259 18.00 0.500 10.8663 6.3629 0.0000 0.0000 1024.4579 673.9695 429.5137 3.7042 0.7241
+0 2003 260 6.00 0.500 9.7296 7.1213 19.4339 0.2540 768.7351 539.3986 619.7297 3.4979 0.6995
+0 2003 260 18.00 0.500 -3.2822 4.4883 0.0000 0.5080 105.1951 480.5496 504.5823 2.7151 0.7598
+0 2003 261 6.00 0.500 0.5218 3.9233 34.1496 0.0000 326.9837 504.1346 446.3779 2.8596 0.7317
+0 2003 261 18.00 0.500 0.9190 2.7750 0.0000 0.0000 409.1793 501.3860 382.2511 2.5482 0.7473
+0 2003 262 6.00 0.500 7.8429 4.3817 41.3058 0.0000 837.9332 598.2070 382.3935 3.4098 0.7018
+0 2003 262 18.00 0.500 6.3692 4.2471 0.0000 0.0000 711.0581 575.4326 392.3110 3.9846 0.7204
+0 2003 263 6.00 0.500 7.8850 5.3538 37.3521 0.0000 879.6928 700.3332 338.7526 3.2775 0.6924
+0 2003 263 18.00 0.500 3.2300 3.9071 0.0000 0.0000 484.5242 517.5403 431.2108 2.2638 0.7261
+0 2003 264 6.00 0.500 8.3283 4.4742 39.6920 0.0000 975.8630 705.7769 278.9129 6.6892 0.6846
+0 2003 264 18.00 0.500 3.9787 3.9050 0.0000 0.0000 566.3671 543.3632 404.7873 3.0400 0.7085
+0 2003 265 6.00 0.500 11.1992 5.3242 17.8745 0.0000 1192.7511 732.1035 306.3616 4.9812 0.6661
+0 2003 265 18.00 0.500 10.4350 5.0908 0.0000 0.0000 1122.1812 725.5679 293.8072 4.8721 0.6803
+0 2003 266 6.00 0.500 13.1675 6.6388 39.3779 0.0000 1381.3524 827.0468 297.7582 5.5462 0.6443
+0 2003 266 18.00 0.500 10.7013 5.9733 0.0000 0.0000 1201.4640 833.7847 243.3025 6.1633 0.6569
+0 2003 267 6.00 0.500 10.2867 6.7183 29.1463 0.0000 884.4172 600.4772 530.8810 2.2421 0.6413
+0 2003 267 18.00 0.500 7.7350 4.7700 0.0000 0.0000 857.6187 648.0544 352.6146 3.0967 0.6656
+0 2003 268 6.00 0.500 12.9929 6.2700 18.8602 0.0000 1466.9263 906.5237 193.8999 7.8358 0.6183
+0 2003 268 18.00 0.500 10.9246 6.3012 0.0000 0.0000 1179.2395 815.6288 282.8881 9.9733 0.6197
+0 2003 269 6.00 0.500 12.7587 7.5471 33.4561 0.0000 1296.0677 843.6464 345.8480 5.2308 0.5949
+0 2003 269 18.00 0.500 4.2280 5.6171 0.0000 0.0000 369.5820 445.7847 609.1270 1.5412 0.6241
+0 2003 270 6.00 0.500 6.9371 6.1413 26.2172 0.0000 484.6378 430.5803 660.5288 2.2625 0.5979
+0 2003 270 18.00 0.500 1.9182 4.2450 0.0000 0.0000 234.0713 360.8729 607.5118 1.6978 0.6215
+0 2003 271 6.00 0.500 5.3664 5.0025 37.7773 0.0000 463.5732 435.7963 581.4048 2.2063 0.5927
+0 2003 271 18.00 0.500 5.4404 3.6225 0.0000 0.0000 515.6652 397.8729 533.2508 2.0471 0.6122
+0 2003 272 6.00 0.500 6.9050 5.2700 26.8730 0.0000 540.0630 420.2960 613.3794 2.4671 0.5773
+0 2003 272 18.00 0.500 5.3851 4.4483 0.0000 0.0000 490.1834 399.0309 580.3929 3.0095 0.5869
+0 2003 273 6.00 0.500 5.1358 5.6146 36.2014 0.0000 351.0239 380.1685 674.0247 2.3450 0.5652
+0 2003 273 18.00 0.500 7.7238 3.8533 0.0000 0.0000 786.7344 529.8626 414.6521 2.8946 0.5874
+0 2003 274 6.00 0.500 10.0425 5.2729 23.4865 0.0000 909.2667 553.7017 480.9677 2.5246 0.5532
+0 2003 274 18.00 0.500 6.8508 5.2258 0.0000 0.0000 474.7492 365.7932 661.9774 4.2362 0.5592
+0 2003 275 6.00 0.500 6.9979 6.5829 18.5784 2.5360 436.7835 398.0371 721.3200 3.5350 0.5415
+0 2003 275 18.00 0.500 3.5950 5.4938 0.0000 0.0000 151.8144 266.6592 778.5477 1.9813 0.0000
+0 2003 276 6.00 0.500 3.3463 5.9321 18.7325 0.0000 200.1979 357.1116 717.7312 1.8194 0.0000
+0 2003 276 18.00 0.521 2.2710 5.1804 0.0000 0.0000 290.6538 459.7574 565.6934 1.8852 0.0000
+0 2003 277 6.50 0.479 6.0791 5.9391 18.4018 0.0000 387.0731 378.6921 697.3522 1.6091 0.0000
+0 2003 277 18.00 0.521 4.4524 5.6124 0.0000 0.0000 293.2459 365.8700 687.0488 3.7901 0.5224
+0 2003 278 6.50 0.479 7.4291 7.3274 22.3796 1.5240 627.4808 616.4404 558.6864 3.2857 0.5021
+0 2003 278 18.00 0.521 3.7064 6.3060 0.0000 0.2540 354.5958 518.7476 581.1304 4.7660 0.5210
+0 2003 279 6.50 0.479 9.0404 7.5778 28.5585 0.0000 858.5389 748.0829 446.0996 2.8163 0.4973
+0 2003 279 18.00 0.521 6.7628 6.5180 0.0000 0.0000 729.3383 713.9581 401.0467 3.2920 0.5219
+0 2003 280 6.50 0.458 10.0268 7.6023 29.4715 0.0000 939.1427 751.8141 445.5931 3.3873 0.4936
+0 2003 280 17.50 0.542 7.5035 7.3096 0.0000 0.0000 689.3937 675.8868 493.9427 4.1623 0.5067
+0 2003 281 6.50 0.458 10.6014 8.2877 18.9989 0.0000 922.5946 734.3294 510.6741 4.0382 0.4886
+0 2003 281 17.50 0.542 9.1435 7.2912 0.0000 0.0000 784.6309 645.0201 523.7484 3.6377 0.5029
+0 2003 282 6.50 0.458 13.2141 9.4100 32.9554 0.0000 1233.8939 891.4424 450.4393 3.8605 0.4787
+0 2003 282 17.50 0.542 10.3696 8.1638 0.0000 0.0000 969.1453 792.2910 441.7576 4.7492 0.5003
+0 2003 283 6.50 0.458 11.8986 9.5505 21.0408 0.0000 1024.2513 821.8972 527.6615 4.7709 0.4774
+0 2003 283 17.50 0.542 0.5326 7.6235 0.0000 0.3810 243.5235 655.5345 540.5086 4.5658 0.5049
+0 2003 284 6.50 0.458 3.2000 7.1777 36.6785 0.0000 741.2701 981.8330 185.5546 3.1623 0.4847
+0 2003 284 17.50 0.542 6.4896 5.8031 0.0000 0.0000 960.9420 915.2892 151.3458 5.6592 0.4966
+0 2003 285 6.50 0.458 9.9264 7.4532 33.1279 0.0000 1165.0497 975.6682 211.9368 3.4827 0.4743
+0 2003 285 17.50 0.542 6.0385 6.0400 0.0000 0.0000 795.3231 789.5197 293.5602 2.8719 0.4902
+0 2003 286 6.50 0.458 -0.5812 5.3786 26.8885 1.5240 395.5320 716.5406 322.1205 4.7295 0.4849
+0 2003 286 17.50 0.542 -2.5523 2.5069 0.0000 0.0000 411.1758 646.8859 223.2447 7.9396 0.5018
+0 2003 287 6.50 0.458 4.8340 3.7190 35.4846 0.0000 910.9878 839.9071 104.0064 7.6127 0.4729
+0 2003 287 17.50 0.542 4.4696 2.7888 0.0000 0.0000 828.5143 731.8575 152.9961 8.2496 0.4843
+0 2003 288 6.50 0.458 6.5814 3.9136 31.3858 0.0000 938.5737 766.3162 187.2034 9.3209 0.4667
+0 2003 288 17.50 0.542 6.5742 2.9231 0.0000 0.0000 902.0764 676.7944 215.3068 11.9596 0.4759
+0 2003 289 6.50 0.458 6.6368 4.0768 34.7512 0.0000 790.8248 629.6589 333.1422 10.9205 0.4650
+0 2003 289 17.50 0.542 7.1577 2.7338 0.0000 0.0000 772.4606 495.9762 385.9213 7.0758 0.4730
+0 2003 290 6.50 0.458 11.7432 4.4427 23.5556 0.0000 1123.3395 571.8786 416.3941 4.0268 0.4545
+0 2003 290 17.50 0.542 10.2762 3.0327 0.0000 0.0000 1024.8400 520.4219 378.5872 6.9173 0.4696
+0 2003 291 6.50 0.458 13.2386 4.4091 28.9448 0.0000 1402.5538 703.6819 282.2754 5.0818 0.4520
+0 2003 291 17.50 0.542 9.9519 2.0635 0.0000 0.0000 1101.8041 574.5889 273.2947 4.0727 0.4738
+0 2003 292 6.50 0.458 13.7732 3.1787 22.7804 0.0000 1541.2211 716.4620 197.3397 5.3127 0.4511
+0 2003 292 17.50 0.542 10.8269 1.9822 0.0000 0.0000 1220.7448 608.9491 233.1064 6.6819 0.4620
+0 2003 293 6.50 0.458 12.3686 3.3302 28.2074 0.0000 1308.2209 631.7809 290.5826 3.2650 0.4456
+0 2003 293 17.50 0.542 11.2596 1.3880 0.0000 0.0000 1223.6801 543.7886 268.0791 4.9131 0.4620
+0 2003 294 6.50 0.458 14.6932 3.0600 26.3888 0.0000 1541.1115 610.2490 296.0896 2.7783 0.4412
+0 2003 294 17.50 0.542 10.4742 0.6345 0.0000 0.0000 1145.7367 502.7470 273.4539 3.2615 0.4694
+0 2003 295 6.50 0.458 15.0573 1.8427 32.3559 0.0000 1630.9969 593.9702 250.3060 3.0495 0.4404
+0 2003 295 17.50 0.542 11.6977 0.6056 0.0000 0.0000 1260.0887 501.9241 271.8274 3.7135 0.4548
+0 2003 296 6.50 0.458 12.4009 1.6284 3.4170 0.0000 1356.8202 588.8696 240.5008 5.5177 0.4387
+0 2003 296 17.50 0.542 6.2519 -0.1590 0.0000 0.0000 826.0986 451.7778 286.1068 2.7046 0.4591
+0 2003 297 6.50 0.458 4.5391 0.1502 19.1343 0.0000 798.5046 565.6955 189.1805 5.9632 0.4445
+0 2003 297 17.50 0.542 -6.2112 -1.9527 0.0000 0.2540 166.4021 307.9103 352.4054 2.0417 0.4686
+0 2003 298 6.50 0.458 -5.9159 -2.8136 26.5842 0.0000 241.9651 348.0688 277.1745 2.0500 0.4524
+0 2003 298 17.50 0.542 -4.7734 -4.7558 0.0000 0.0000 379.2508 375.4806 178.7849 6.3287 0.4661
+0 2003 299 6.50 0.458 2.0657 -2.9540 27.7561 0.0000 502.8035 278.2008 345.0383 7.1432 0.4385
+0 2003 299 17.50 0.562 2.1640 -3.2763 0.0000 0.0000 378.9848 135.2516 471.4805 9.6548 0.4262
+0 2003 300 7.00 0.438 3.4543 -3.3805 11.9074 0.0000 526.1393 206.5051 396.0905 12.6214 0.4002
+0 2003 300 17.50 0.562 3.1178 -3.5756 0.0000 0.0000 530.5011 221.6424 373.6658 9.3144 0.4025
+0 2003 301 7.00 0.438 4.0286 -1.6409 27.4708 0.0000 496.0515 212.7147 461.9849 11.5510 0.3951
+0 2003 301 17.50 0.562 4.4774 -2.0231 0.0000 0.0000 480.7916 154.8284 500.8865 14.6207 0.4000
+0 2003 302 7.00 0.438 8.8043 -0.0586 25.8937 0.0000 893.9496 351.7615 393.2400 10.3729 0.3929
+0 2003 302 17.50 0.562 -0.2729 -0.8314 0.0000 0.0000 249.5646 179.7933 526.8392 2.0708 0.4028
+0 2003 303 7.00 0.438 -8.7005 -1.9457 12.5207 0.0000 25.8878 248.5691 409.9723 1.0060 0.4073
+0 2003 303 17.50 0.562 -12.3841 -3.4141 0.0000 0.0000 30.6380 283.2866 318.4128 0.6573 0.4132
+0 2003 304 7.00 0.417 -10.7335 -3.5255 14.3334 0.0000 30.5718 243.0733 354.5898 0.7433 0.4079
+0 2003 304 17.00 0.583 -2.0982 -3.2575 0.0000 0.0000 268.3984 195.8036 411.3018 2.9593 0.4097
+0 2003 305 7.00 0.417 3.3345 0.0284 21.5059 0.0000 432.9353 267.5273 481.5316 4.1020 0.3923
+0 2003 305 17.00 0.583 1.0803 0.4015 0.0000 0.0000 374.4567 341.4407 420.6632 2.9657 0.3950
+0 2003 306 7.00 0.417 2.2675 1.3127 17.3088 0.0000 307.3663 256.1923 550.7356 3.1240 0.3726
+0 2003 306 17.00 0.583 -1.2841 1.2614 0.0000 0.6350 46.7004 162.7645 640.8957 1.5950 0.3907
+0 2003 307 7.00 0.417 -0.3424 1.2320 17.1856 0.5080 210.8453 283.7792 518.4064 2.7990 0.3913
+0 2003 307 17.00 0.583 -6.6461 1.1379 0.0000 2.3740 111.5999 412.3374 385.1993 6.1364 0.4094
+0 2003 308 7.00 0.417 -7.3340 0.8493 28.8138 0.0000 213.8268 523.6064 259.7329 5.5805 0.4030
+0 2003 308 17.00 0.583 -7.8275 0.4391 0.0000 0.0000 182.7430 488.2483 275.4890 4.1893 0.4041
+0 2003 309 7.00 0.417 -6.3660 0.2825 19.6948 0.0000 174.3013 428.9430 327.3407 4.9775 0.3983
+0 2003 309 17.00 0.583 -7.4504 0.6058 0.0000 1.0160 127.8163 430.4738 341.0905 6.4861 0.4046
+0 2003 310 7.00 0.417 -4.8905 0.7076 25.2476 0.0000 270.6796 497.2199 279.2752 6.7555 0.3970
+0 2003 310 17.00 0.583 -5.3239 0.9484 0.0000 0.0000 183.2690 436.8811 351.2841 1.9050 0.3982
+0 2003 311 7.00 0.417 -3.1465 0.9269 11.1899 0.0000 213.3184 386.6962 400.4226 1.9485 0.3919
+0 2003 311 17.00 0.583 -2.9925 0.9678 0.0000 0.0000 191.0652 362.7325 426.3911 1.6239 0.3945
+0 2003 312 7.00 0.417 -1.5424 1.0072 17.3752 0.0000 260.0494 374.3885 416.6754 2.5470 0.3879
+0 2003 312 17.00 0.583 -2.6279 1.1715 0.0000 0.0000 266.2796 433.7813 365.3994 2.1539 0.3942
+0 2003 313 7.00 0.417 2.2584 1.1708 23.7816 0.0000 537.4664 478.5215 320.9413 3.1475 0.3860
+0 2003 313 17.00 0.583 -0.4185 1.4975 0.0000 0.5080 226.6342 317.6248 497.9017 7.3871 0.3953
+0 2003 314 7.00 0.417 -1.5760 1.4465 16.8369 1.2700 184.9889 323.8140 489.1216 6.6780 0.3928
+0 2003 314 17.00 0.583 -4.0429 1.5025 0.0000 4.1900 110.2889 347.0846 468.6812 4.0404 0.3979
+0 2003 315 7.00 0.417 -3.6150 1.5160 12.9544 0.5500 205.1087 427.3520 389.0981 7.5670 0.3949
+0 2003 315 17.00 0.583 -4.3289 1.5439 0.0000 1.7780 157.4134 406.5789 411.2888 7.4536 0.3973
+0 2003 316 7.00 0.417 -1.1799 1.5535 20.9999 0.0000 429.8565 554.2108 264.1432 5.9270 0.3940
+0 2003 316 17.00 0.583 -0.1511 1.5321 0.0000 0.0000 559.6556 640.3621 176.9087 2.4619 0.3965
+0 2003 317 7.00 0.417 4.4412 1.5245 21.4827 0.0000 499.3098 321.6639 495.2282 3.1345 0.3906
+0 2003 317 17.00 0.583 -2.1907 1.5421 0.0000 2.2860 154.2504 320.5767 497.2129 6.6114 0.4044
+0 2003 318 7.00 0.417 -4.3230 1.4065 12.2598 2.0320 134.2268 376.7520 434.1638 7.4415 0.4011
+0 2003 318 17.00 0.583 -4.4593 1.3661 0.0000 0.0000 188.8441 434.1116 374.7706 6.2818 0.4036
+0 2003 319 7.00 0.417 -3.4400 1.3065 21.8824 0.0000 256.9917 462.3897 343.5043 5.9405 0.4006
+0 2003 319 17.00 0.583 -5.1354 1.2479 0.0000 0.0000 191.0155 452.9447 350.0204 2.3732 0.4041
+0 2003 320 7.00 0.417 -1.4123 1.1540 19.4249 0.0000 434.6375 544.3348 253.9607 3.3630 0.3981
+0 2003 320 17.00 0.583 -4.6636 1.0882 0.0000 0.0420 203.3679 441.3558 353.6854 9.2461 0.3995
+0 2003 321 7.00 0.417 -6.1788 0.9914 6.5141 3.1360 116.0539 395.0352 395.2369 5.6180 0.3947
+0 2003 321 17.00 0.583 -10.4321 0.9316 0.0000 0.0420 104.6010 500.5219 286.8207 11.6818 0.3974
+0 2003 322 7.00 0.417 -6.2410 0.8651 24.0260 0.0000 194.3995 466.7727 317.3199 14.0945 0.3974
+0 2003 322 17.00 0.583 -3.2211 0.8009 0.0000 0.0000 264.9786 433.3238 347.6500 9.8036 0.4023
+0 2003 323 7.00 0.417 2.3572 0.7459 23.6982 0.0000 587.9188 502.1554 276.1563 9.2865 0.3953
+0 2003 323 17.00 0.583 2.7829 0.8087 0.0000 0.0000 691.4446 588.9468 192.4072 7.9300 0.3985
+0 2003 324 7.00 0.417 4.2915 0.8748 21.1851 0.0000 806.0922 618.9241 165.6442 7.7865 0.3957
+0 2003 324 17.00 0.583 0.3934 0.9275 0.0000 0.0000 532.8646 556.6490 230.4906 4.2800 0.3989
+0 2003 325 7.00 0.417 1.8101 0.8651 16.8564 0.0000 591.3722 542.5856 241.5129 4.8010 0.3906
+0 2003 325 17.00 0.583 -8.9915 0.9251 0.0000 0.0000 64.1486 417.2899 369.7355 1.8414 0.4021
+0 2003 326 7.00 0.417 -15.0355 0.6690 3.3451 2.3280 36.7024 510.5679 264.0418 1.2175 0.4123
+0 2003 326 17.00 0.583 -19.6589 0.3629 0.0000 0.0420 54.4885 583.4118 176.6254 6.3411 0.4160
+0 2003 327 7.00 0.417 -19.1890 -0.1511 20.1403 0.0000 101.6835 600.4377 135.7041 12.9610 0.4217
+0 2003 327 17.00 0.583 -9.5718 -0.5345 0.0000 0.0000 219.6932 518.3486 200.4535 12.1536 0.4175
+0 2003 328 7.00 0.417 -2.7980 -0.5724 19.9448 0.0000 446.5941 538.5416 178.5626 11.7090 0.4121
+0 2003 328 17.00 0.604 -6.9514 -0.3332 0.0000 0.0000 272.4148 515.2579 212.5846 8.3976 0.4100
+0 2003 329 7.50 0.396 -7.7079 -0.3184 13.5629 0.2540 181.6356 448.2267 280.2831 7.9653 0.4072
+0 2003 329 17.00 0.604 -10.5452 -0.2576 0.0000 2.5400 95.6759 437.6161 293.6515 4.9307 0.4060
+0 2003 330 7.50 0.396 -10.9589 -0.2539 11.7312 1.0160 115.4276 467.5376 263.8959 3.8321 0.4069
+0 2003 330 17.00 0.604 -14.2476 -0.2589 0.0000 0.0000 92.9702 511.4937 219.7120 6.9221 0.4066
+0 2003 331 7.50 0.396 -13.9247 -0.3988 16.9813 0.0000 113.7834 520.6185 204.2631 10.0153 0.4099
+0 2003 331 17.00 0.604 -10.4476 -0.5400 0.0000 0.0000 237.4394 560.5206 158.0318 8.7872 0.4088
+0 2003 332 7.50 0.396 -3.4989 -0.6110 11.6225 0.0000 426.7119 541.2051 174.1807 6.4453 0.4085
+0 2003 332 17.00 0.604 -0.2608 -0.4823 0.0000 0.0000 569.4385 558.1932 162.9417 15.1300 0.4087
+0 2003 333 7.50 0.396 3.1678 -0.2818 16.1494 0.0000 619.1815 440.7338 289.4366 11.0632 0.4076
+0 2003 333 17.00 0.604 1.4459 0.0510 0.0000 0.0000 385.9985 318.3711 427.0625 4.2841 0.4048
+0 2003 334 7.50 0.375 3.2633 0.2563 13.4167 0.0000 423.5683 264.7953 490.1960 2.5500 0.4024
+0 2003 334 16.50 0.625 -1.6392 0.4336 0.0000 0.0000 179.6585 260.6859 502.6705 2.7055 0.4043
+0 2003 335 7.50 0.375 6.3889 0.4894 20.6751 0.0000 993.9341 654.6133 111.3932 11.0961 0.4009
+0 2003 335 16.50 0.625 3.6267 0.5776 0.0000 0.0000 809.3976 646.2096 124.0048 9.5953 0.4047
+0 2003 336 7.50 0.375 0.3517 0.5797 17.2803 0.0000 519.1500 525.9839 244.3312 5.5750 0.4043
+0 2003 336 16.50 0.625 -4.2910 0.5287 0.0000 0.0000 297.7600 494.0712 273.8127 9.0900 0.4061
+0 2003 337 7.50 0.375 -1.3708 0.3103 14.9190 0.0000 476.4771 551.2172 206.3115 5.9411 0.4097
+0 2003 337 16.50 0.625 -3.4273 0.2139 0.0000 0.0000 375.1723 525.9001 227.1040 6.2773 0.4081
+0 2003 338 7.50 0.375 -4.3750 0.0778 20.9776 0.0000 242.3223 421.3948 325.2715 3.2756 0.4087
+0 2003 338 16.50 0.625 -5.5097 -0.0697 0.0000 0.0000 389.9608 597.1367 142.7294 2.3490 0.4072
+0 2003 339 7.50 0.375 0.3896 -0.3877 19.1460 0.0000 668.0926 627.2354 98.1487 1.4218 0.4125
+0 2003 339 16.50 0.625 0.9947 -0.3210 0.0000 0.0000 548.0132 485.4877 242.9041 4.1560 0.4102
+0 2003 340 7.50 0.375 3.1094 -0.1683 17.1954 0.0000 523.6796 352.8148 382.5227 4.7744 0.4040
+0 2003 340 16.50 0.625 0.5661 0.1247 0.0000 0.0000 387.0777 363.7357 385.1166 10.9373 0.4014
+0 2003 341 7.50 0.375 -0.5029 0.2347 16.3415 0.5080 239.5959 272.1801 481.7989 8.7250 0.4021
+0 2003 341 16.50 0.625 -2.7297 0.3272 0.0000 0.0000 203.9955 333.3338 424.9904 4.6162 0.4040
+0 2003 342 7.50 0.375 -6.7856 0.3022 4.2069 1.6070 20.9640 288.0857 469.0619 2.1939 0.4052
+0 2003 342 16.50 0.625 -12.3420 0.3033 0.0000 0.5900 43.8077 451.3418 305.8565 2.5002 0.4047
+0 2003 343 7.50 0.375 -11.4361 0.1993 17.8178 0.0000 125.0258 508.9992 243.3257 5.6094 0.4033
+0 2003 343 16.50 0.625 -9.5120 0.0591 0.0000 0.0000 261.2397 591.1157 154.6822 4.1514 0.4052
+0 2003 344 7.50 0.375 -3.3778 -0.0767 19.2723 0.0000 464.6241 598.4669 141.0658 6.4239 0.4054
+0 2003 344 16.50 0.625 -10.3830 -0.0934 0.0000 0.2960 112.6607 458.4119 280.3531 5.8867 0.4061
+0 2003 345 7.50 0.375 -8.8994 -0.1075 9.0342 0.0000 168.3382 477.1170 260.9996 2.8678 0.4059
+0 2003 345 16.50 0.625 -10.6857 -0.1158 0.0000 0.0000 115.2510 467.3802 270.3542 4.5005 0.4055
+0 2003 346 7.50 0.375 -9.3572 -0.1347 20.0708 0.0000 211.8300 530.7408 206.1285 5.6433 0.4045
+0 2003 346 16.50 0.625 -11.0540 -0.1984 0.0000 0.0000 144.7718 501.7268 232.2354 12.4340 0.4058
+0 2003 347 7.50 0.375 -8.0300 -0.2818 15.5462 0.0000 146.0315 423.1265 307.0389 13.2228 0.4083
+0 2003 347 16.50 0.625 -4.8837 -0.3251 0.0000 0.0000 146.1235 322.5806 405.6245 15.2330 0.4088
+0 2003 348 7.50 0.375 0.3259 -0.3026 17.6948 0.0000 438.5509 405.9485 323.2776 5.7478 0.4078
+0 2003 348 16.50 0.625 -5.8440 -0.1466 0.0000 0.5080 219.3223 429.9797 306.3479 3.3826 0.4062
+0 2003 349 7.50 0.375 -12.1411 -0.0927 10.0145 0.0000 153.9680 539.4255 199.3691 6.5583 0.4043
+0 2003 349 16.50 0.625 -15.0040 -0.1922 0.0000 0.0000 141.7866 576.7195 157.5260 10.9157 0.4051
+0 2003 350 7.50 0.375 -5.3706 -0.3714 19.7487 0.0000 442.2415 631.3752 94.7394 11.6011 0.4083
+0 2003 350 16.50 0.625 -3.4887 -0.5169 0.0000 0.0000 389.2770 507.5704 212.0107 10.9897 0.4084
+0 2003 351 7.50 0.375 -2.3228 -0.5392 17.7521 0.0000 410.9767 485.9916 232.5937 15.7606 0.4076
+0 2003 351 16.50 0.625 -3.2913 -0.4468 0.0000 0.0000 402.5514 519.2333 203.4878 7.9897 0.4065
+0 2003 352 7.50 0.375 0.3981 -0.4234 18.8497 0.0000 661.3230 621.9991 101.7705 7.5261 0.4064
+0 2003 352 16.50 0.625 0.2015 -0.3488 0.0000 0.0000 585.2111 558.7672 168.3662 4.6763 0.4026
+0 2003 353 7.50 0.375 4.0069 -0.2989 19.4652 0.0000 777.1559 549.0746 180.3173 4.0917 0.4002
+0 2003 353 16.50 0.625 2.6747 -0.1654 0.0000 0.0000 684.9106 542.8699 192.6022 8.1460 0.3955
+0 2003 354 7.50 0.375 3.3189 -0.0250 11.0981 0.0000 712.5486 540.5795 201.3314 7.1328 0.3939
+0 2003 354 16.50 0.625 0.6619 0.1242 0.0000 0.0000 524.6765 497.7859 251.0347 5.4083 0.3953
+0 2003 355 7.50 0.375 -0.0535 0.1491 16.5878 0.0000 393.4049 401.1833 348.7956 3.5383 0.3939
+0 2003 355 16.50 0.625 -6.3840 0.2065 0.0000 1.7780 95.1147 343.0762 409.5818 2.4985 0.3983
+0 2003 356 7.50 0.375 -7.5506 0.1323 10.4895 0.0000 70.9287 353.0808 396.1148 2.6326 0.4043
+0 2003 356 16.50 0.625 -3.3836 0.0155 0.0000 0.2540 457.4465 592.5940 151.1893 8.0530 0.4043
+0 2003 357 7.50 0.375 2.0785 -0.1111 19.3539 0.0000 703.4249 595.1097 142.8413 4.3317 0.4043
+0 2003 357 16.50 0.625 0.8339 -0.0672 0.0000 0.0000 698.5596 655.4845 84.4843 4.4893 0.4033
+0 2003 358 7.50 0.375 2.8339 -0.0769 14.5028 0.0000 827.0533 676.6371 62.8846 3.2128 0.4054
+0 2003 358 16.50 0.625 0.2321 0.0090 0.0000 0.0000 586.4336 575.5230 167.9569 3.5403 0.4014
+0 2003 359 7.50 0.375 -0.9066 0.0534 12.3813 0.0000 349.5279 390.9606 354.5741 4.6811 0.3962
+0 2003 359 16.50 0.625 -3.2470 0.1266 0.0000 0.0000 280.3787 421.4683 327.4617 3.2700 0.4017
+0 2003 360 7.50 0.375 -0.8162 -0.0584 13.1251 1.0160 387.4847 411.5599 328.8135 4.7522 0.4080
+0 2003 360 16.50 0.625 -11.4080 -0.1223 0.0000 0.0000 145.0470 510.7670 226.6717 9.0627 0.4045
+0 2003 361 7.50 0.375 -14.0711 -0.3151 18.0163 0.0000 154.4770 567.9405 160.7199 12.3339 0.4052
+0 2003 361 16.50 0.625 -17.3690 -0.5222 0.0000 0.0000 115.1572 573.7638 145.5869 12.0937 0.4108
+0 2003 362 7.50 0.375 -16.0611 -0.7668 13.1479 0.0000 152.1532 579.6879 128.8127 10.6489 0.4090
+0 2003 362 16.50 0.625 -16.2027 -1.0005 0.0000 0.0000 105.0398 524.8414 173.4522 10.5573 0.4101
+0 2003 363 7.50 0.375 -12.0050 -1.2183 15.5212 0.0000 157.3247 486.9735 201.9324 9.1300 0.4092
+0 2003 363 16.50 0.625 -7.0453 -1.3000 0.0000 0.0000 151.4462 354.0764 331.3428 8.4253 0.4079
+0 2003 364 7.50 0.375 -5.6844 -1.1889 10.9210 1.0160 128.8472 296.1204 394.0463 9.0139 0.4054
+0 2003 364 16.50 0.625 -5.6587 -0.9777 0.0000 0.0000 163.6844 339.2375 360.0420 8.4570 0.4018
+0 2003 365 7.50 0.375 -2.4622 -0.8627 17.7020 0.0000 442.3685 507.7057 196.5841 6.0133 0.3993
+0 2003 365 16.50 0.625 -5.7483 -0.7840 0.0000 0.0000 334.3011 519.8730 187.8682 10.3110 0.3997
+0 2004 1 7.50 0.375 -4.4300 -0.7822 16.5268 0.0000 353.0783 492.6940 215.1260 5.4244 0.4038
+0 2004 1 16.50 0.625 -4.9080 -0.7742 0.0000 3.5560 172.4614 331.2016 376.9706 7.6733 0.3997
+0 2004 2 7.50 0.375 -6.3806 -0.7002 7.1745 2.0320 131.3834 342.0653 369.3670 5.2078 0.4005
+0 2004 2 16.50 0.625 -10.4020 -0.6127 0.0000 5.3340 39.5365 360.2462 355.0650 1.3502 0.3983
+0 2004 3 7.50 0.375 -13.0672 -0.5831 2.2516 2.2860 35.0389 417.3368 299.2894 1.6958 0.3962
+0 2004 3 16.50 0.625 -14.5113 -0.6059 0.0000 0.5080 63.7000 470.0283 245.5827 3.3625 0.3979
+0 2004 4 7.50 0.375 -17.9717 -0.6593 14.3991 0.2540 92.1413 553.4277 159.8135 8.0372 0.3965
+0 2004 4 16.50 0.625 -19.0140 -0.6925 0.0000 0.0000 91.5560 565.5053 146.2696 5.7283 0.4014
+0 2004 5 7.50 0.375 -19.3611 -0.7501 7.9427 0.0000 85.6675 561.7543 147.4766 7.9950 0.4002
+0 2004 5 16.50 0.625 -16.5780 -0.8316 0.0000 0.0000 164.7296 596.4161 109.2336 11.9927 0.4020
+0 2004 6 7.50 0.375 -8.6667 -0.9242 16.4195 0.0000 253.5114 511.8654 189.7396 9.9350 0.4028
+0 2004 6 16.50 0.625 -3.4157 -1.0011 0.0000 0.0000 248.8174 343.1645 355.0975 7.1400 0.4031
+0 2004 7 7.50 0.375 -2.6717 -1.0222 11.0731 0.0000 258.0992 325.5758 371.7689 4.5889 0.4005
+0 2004 7 16.50 0.625 -6.1883 -0.9470 0.0000 1.5240 128.2666 320.9523 379.6603 9.1170 0.3989
+0 2004 8 7.50 0.375 -6.5556 -0.8386 17.4941 0.0000 159.0691 368.2432 337.0998 13.3461 0.3983
+0 2004 8 16.50 0.625 -5.2757 -0.7741 0.0000 0.0000 165.6820 337.8619 370.3146 13.0253 0.3976
+0 2004 9 7.50 0.375 -1.8326 -0.7291 21.0974 0.0000 281.1584 325.6387 384.5202 9.4239 0.3993
+0 2004 9 16.50 0.625 -0.3673 -0.6628 0.0000 0.0000 402.7437 386.3355 326.7530 5.0670 0.3955
+0 2004 10 7.50 0.375 2.4237 -0.5924 21.0719 0.0000 625.7875 476.9256 239.2855 7.3378 0.3953
+0 2004 10 16.50 0.625 0.4113 -0.4978 0.0000 0.0000 603.0149 560.8230 159.6142 7.6550 0.3943
+0 2004 11 7.50 0.375 2.5892 -0.4334 21.6455 0.0000 738.5612 587.7175 135.6052 5.0972 0.3915
+0 2004 11 16.50 0.625 -0.6023 -0.3772 0.0000 0.0000 599.6297 608.8577 116.9947 4.0827 0.3955
+0 2004 12 7.50 0.375 1.8906 -0.3842 21.4596 0.0000 724.5651 611.6159 113.9196 3.3933 0.3948
+0 2004 12 16.50 0.625 -1.6547 -0.4032 0.0000 0.0000 553.4401 607.0811 117.6009 3.9513 0.3966
+0 2004 13 7.50 0.375 0.8191 -0.4663 21.6855 0.0000 680.9876 619.1201 102.7242 4.7661 0.3979
+0 2004 13 16.50 0.625 -0.9991 -0.4915 0.0000 0.0000 529.0984 550.6844 170.0301 5.6553 0.3991
+0 2004 14 7.50 0.396 3.1993 -0.4823 20.5320 0.0000 680.0486 489.5844 231.5453 3.3184 0.3933
+0 2004 14 17.00 0.604 0.6170 -0.4014 0.0000 0.0000 598.9088 551.2450 173.5170 3.1148 0.3956
+0 2004 15 7.50 0.396 1.9523 -0.4155 15.8979 0.0000 639.1856 522.1236 202.0051 1.6483 0.3978
+0 2004 15 17.00 0.604 -2.3500 -0.4030 0.0000 0.0000 366.7881 446.9963 277.6927 3.5559 0.3969
+0 2004 16 7.50 0.396 -3.6216 -0.3596 8.6049 0.0000 255.3193 387.1771 339.4698 2.5316 0.3966
+0 2004 16 17.00 0.604 -7.6517 -0.3239 0.0000 0.0000 88.3034 353.2180 375.0433 1.3055 0.3959
+0 2004 17 7.50 0.396 -4.9916 -0.3729 21.8505 0.0000 168.4441 345.8918 380.1562 1.6541 0.3987
+0 2004 17 17.00 0.604 -5.0945 -0.4910 0.0000 0.0000 311.9124 487.3105 233.4284 2.6690 0.3974
+0 2004 18 7.50 0.396 -1.3006 -0.6415 18.7486 0.0000 562.4929 590.1661 123.8681 5.9574 0.4002
+0 2004 18 17.00 0.604 -4.9207 -0.7617 0.0000 0.0000 403.6151 563.1948 145.5272 7.5841 0.4013
+0 2004 19 7.50 0.396 -5.7605 -0.8213 7.0496 0.0000 293.2024 478.4555 227.6476 8.1342 0.3993
+0 2004 19 17.00 0.583 -9.3464 -0.8185 0.0000 0.0000 101.9117 387.8711 318.3516 2.1439 0.3991
+0 2004 20 7.00 0.417 -10.1670 -0.8686 7.2807 2.0320 31.0769 337.3918 366.6391 2.0150 0.3996
+0 2004 20 17.00 0.583 -11.8111 -0.9290 0.0000 0.7620 72.0501 412.6099 288.7857 2.7548 0.3983
+0 2004 21 7.00 0.417 -7.2920 -0.9675 23.6045 0.0000 226.8878 451.1689 248.5521 5.7695 0.3994
+0 2004 21 17.00 0.583 -8.0050 -1.0065 0.0000 0.0000 311.0822 555.0038 143.0207 5.9746 0.4014
+0 2004 22 7.00 0.417 -5.2690 -1.0405 19.7253 0.0000 385.5390 543.6499 152.9037 5.7320 0.4000
+0 2004 22 17.00 0.583 -3.5082 -1.0561 0.0000 0.0000 409.8340 506.8034 189.0768 9.1650 0.3992
+0 2004 23 7.00 0.417 -0.5992 -1.0535 15.6378 0.0000 520.6014 497.7485 198.2429 3.6970 0.4002
+0 2004 23 17.00 0.583 -1.0305 -0.9262 0.0000 0.0000 577.8461 582.0090 119.5086 4.7071 0.3974
+0 2004 24 7.00 0.417 0.9397 -0.8055 22.1174 0.0000 684.8372 600.5444 106.2508 3.3320 0.3928
+0 2004 24 17.00 0.583 -5.5564 -0.7041 0.0000 0.0000 325.4256 505.1511 206.1117 6.7364 0.3953
+0 2004 25 7.00 0.417 -8.6310 -0.7055 15.4577 0.0000 170.2904 443.5294 267.6695 4.0620 0.3951
+0 2004 25 17.00 0.583 -17.5239 -0.7555 0.0000 0.2540 112.7735 562.3887 146.6041 6.6689 0.3991
+0 2004 26 7.00 0.417 -16.8510 -0.8382 24.5006 0.0000 146.4636 582.7482 122.6108 10.6760 0.4009
+0 2004 26 17.00 0.583 -16.5007 -0.9567 0.0000 0.0000 114.6624 540.9531 159.2390 11.9832 0.4017
+0 2004 27 7.00 0.417 -11.6525 -1.1075 20.8476 0.0000 142.1947 469.3199 224.3432 15.3270 0.4013
+0 2004 27 17.00 0.583 -9.1375 -1.2311 0.0000 0.0000 145.5471 410.5839 277.7762 11.5325 0.4014
+0 2004 28 7.00 0.417 -6.0885 -1.3020 24.8554 0.0000 240.6113 415.5804 269.7533 11.5390 0.3996
+0 2004 28 17.00 0.583 -7.7861 -1.3225 0.0000 0.0000 148.3887 373.4681 310.9937 13.9046 0.3991
+0 2004 29 7.00 0.417 -6.3540 -1.3125 17.7038 0.0000 188.0466 370.7590 314.1279 10.3890 0.3989
+0 2004 29 17.00 0.583 -5.3957 -1.2961 0.0000 0.0000 168.9850 322.2327 363.3534 12.2157 0.3974
+0 2004 30 7.00 0.417 -2.7130 -1.2575 14.4179 0.0000 278.2617 336.0468 351.1833 5.3595 0.3970
+0 2004 30 17.00 0.583 -4.6296 -1.1300 0.0000 0.0000 164.7604 298.2430 394.4527 7.6820 0.3953
+0 2004 31 7.00 0.417 -8.9695 -1.0365 5.1703 6.6040 28.5115 295.8619 400.8649 2.2035 0.3945
+0 2004 31 17.00 0.583 -14.7189 -0.9887 0.0000 2.2860 36.7225 430.0957 268.7033 1.4424 0.3957
+0 2004 32 7.00 0.417 -13.9445 -1.0051 22.1972 0.2540 139.9187 519.0413 179.0436 5.5970 0.3960
+0 2004 32 17.00 0.583 -14.6161 -1.0246 0.0000 0.0000 82.7840 474.4821 222.7577 3.5493 0.3968
+0 2004 33 7.00 0.417 -13.0490 -1.0235 23.5328 0.0000 151.4347 511.2700 186.0193 5.5245 0.3991
+0 2004 33 17.00 0.583 -10.1246 -1.0314 0.0000 0.0000 161.5768 459.4187 237.5273 3.2671 0.3980
+0 2004 34 7.00 0.417 -6.7890 -1.0515 11.9593 0.0000 219.5982 426.3918 269.6860 1.7414 0.3972
+0 2004 34 17.00 0.583 -11.3221 -1.0679 0.0000 1.7780 36.8354 359.5663 335.8048 1.0739 0.3951
+0 2004 35 7.00 0.417 -12.1655 -1.0660 4.5062 2.7940 34.1225 376.5246 318.9267 1.8915 0.3960
+0 2004 35 17.00 0.583 -14.8429 -1.0579 0.0000 1.5240 35.9765 430.3031 265.5000 0.9445 0.3959
+0 2004 36 7.00 0.417 -13.7620 -1.0500 4.6842 4.5720 36.0719 410.7018 285.4409 1.6055 0.3964
+0 2004 36 17.00 0.583 -14.2711 -1.0425 0.0000 0.7620 90.7132 475.5505 220.9165 4.7830 0.3959
+0 2004 37 7.00 0.417 -12.6410 -1.0400 24.8526 0.0000 125.4648 479.0370 217.5381 9.4985 0.3972
+0 2004 37 17.00 0.583 -8.8532 -1.0375 0.0000 0.0000 297.9269 557.6393 139.0439 9.2893 0.3956
+0 2004 38 7.00 0.417 -2.2392 -1.0350 26.1871 0.0000 457.4089 501.8946 194.8969 3.9395 0.3936
+0 2004 38 17.00 0.583 -8.3271 -1.0350 0.0000 0.0000 254.3840 497.2207 199.5708 7.3243 0.3932
+0 2004 39 7.00 0.417 -11.8375 -1.0245 20.4999 0.0000 143.4357 480.7708 216.4752 8.7185 0.3936
+0 2004 39 17.00 0.583 -13.8543 -1.0079 0.0000 0.0000 112.9273 491.2167 206.7504 5.1564 0.3951
+0 2004 40 7.00 0.417 -12.4135 -0.9992 26.3806 0.0000 179.0918 529.0771 169.2653 3.9265 0.3964
+0 2004 40 17.00 0.583 -13.0429 -0.9982 0.0000 0.0000 176.9732 538.4055 159.9797 4.0825 0.3971
+0 2004 41 7.00 0.417 -5.7430 -1.0085 27.3525 0.0000 445.1446 620.6489 77.2926 6.0510 0.3962
+0 2004 41 17.00 0.583 -11.7550 -1.0318 0.0000 0.2540 157.8453 489.6474 207.2833 2.0930 0.3954
+0 2004 42 7.00 0.417 -15.9550 -1.0535 10.8737 2.5400 38.6692 450.7856 245.2058 2.4707 0.3964
+0 2004 42 17.00 0.583 -21.3864 -1.0621 0.0000 0.0750 47.2867 533.3164 162.3015 1.6042 0.3971
+0 2004 43 7.00 0.417 -14.8085 -1.0735 27.6130 0.2250 85.2085 476.3405 218.7870 1.9202 0.3979
+0 2004 43 17.00 0.583 -12.8932 -1.1025 0.0000 0.0000 191.8117 543.6231 150.2536 4.2718 0.3980
+0 2004 44 7.00 0.438 -5.2490 -1.1495 28.2412 0.0000 329.0046 481.4761 210.3775 3.3790 0.3970
+0 2004 44 17.50 0.562 -6.0678 -1.1967 0.0000 0.0000 385.2730 564.5454 125.2855 3.4033 0.3946
+0 2004 45 7.00 0.438 -1.6746 -1.2286 31.1084 0.0000 580.5056 597.4870 90.9786 3.6310 0.3953
+0 2004 45 17.50 0.562 -5.5059 -1.2485 0.0000 0.0000 367.8125 525.3587 162.2546 4.9078 0.3942
+0 2004 46 7.00 0.438 -4.8076 -1.2624 30.2792 0.0000 387.8474 521.3857 165.6359 5.4843 0.3943
+0 2004 46 17.50 0.562 -9.1615 -1.2611 0.0000 0.0000 177.0750 440.6653 246.4105 4.0600 0.3937
+0 2004 47 7.00 0.438 -8.2681 -1.2490 27.6018 0.0000 214.5383 454.1323 233.4584 7.2424 0.3931
+0 2004 47 17.50 0.542 -6.2342 -1.2350 0.0000 0.0000 197.3771 379.0568 309.1339 6.9646 0.3935
+0 2004 48 6.50 0.458 -0.4117 -1.2255 21.8647 0.0000 284.8548 240.3668 448.2320 6.2082 0.3928
+0 2004 48 17.50 0.542 1.3560 -1.1965 0.0000 0.0000 414.3385 295.2469 394.5900 7.9923 0.3861
+0 2004 49 6.50 0.458 5.5345 -1.0832 28.9382 0.0000 802.3864 441.8249 252.8865 7.4259 0.3793
+0 2004 49 17.50 0.542 1.4765 -0.9257 0.0000 0.0000 525.9053 410.2377 291.3048 6.9258 0.3887
+0 2004 50 6.50 0.458 -3.3182 -0.7578 9.5091 4.0660 154.7778 257.4416 451.4511 2.0791 0.3897
+0 2004 50 17.50 0.542 -8.3269 -0.6517 0.0000 12.9580 28.5339 296.3928 417.1896 1.6836 0.3895
+0 2004 51 6.50 0.458 -7.5864 -0.5868 19.1756 0.0000 47.4274 297.8042 418.6591 1.5758 0.3888
+0 2004 51 17.50 0.542 -7.7973 -0.5614 0.0000 0.0000 120.4620 377.1387 340.4553 1.8241 0.3899
+0 2004 52 6.50 0.458 -3.4000 -0.5765 21.9298 0.0000 237.1157 350.6726 366.2480 2.2650 0.3925
+0 2004 52 17.50 0.542 -5.9854 -0.6023 0.0000 0.0000 138.2870 340.5620 375.2101 2.0446 0.3908
+0 2004 53 6.50 0.458 -5.0923 -0.6035 16.8580 3.5560 115.7587 288.7505 426.9686 1.9927 0.3915
+0 2004 53 17.50 0.542 -7.0892 -0.5997 0.0000 0.7620 70.6148 306.3387 409.5497 2.0614 0.3910
+0 2004 54 6.50 0.458 -4.2605 -0.5918 27.3241 0.0000 144.8375 288.2153 428.0254 2.1570 0.3926
+0 2004 54 17.50 0.542 -6.0804 -0.6063 0.0000 0.0000 132.5394 337.8738 377.7204 2.2798 0.3918
+0 2004 55 6.50 0.458 -2.5870 -0.6159 23.7496 0.0000 256.0035 335.6272 379.5425 1.9155 0.3909
+0 2004 55 17.50 0.542 -3.7377 -0.6388 0.0000 0.0000 185.3333 309.1634 404.9893 4.1468 0.3890
+0 2004 56 6.50 0.458 -2.7568 -0.6213 33.5925 0.0000 309.4018 396.2475 318.6799 4.2900 0.3876
+0 2004 56 17.50 0.542 -3.4135 -0.5874 0.0000 0.0000 373.1408 487.8309 228.6052 3.0212 0.3885
+0 2004 57 6.50 0.458 1.6396 -0.5746 26.7009 0.0000 646.7627 530.7870 186.2185 3.1257 0.3874
+0 2004 57 17.50 0.542 -2.0362 -0.5686 0.0000 0.0000 334.8053 395.8230 321.4488 9.5996 0.3885
+0 2004 58 6.50 0.458 -1.1127 -0.5315 23.5565 1.2700 309.3719 330.5912 388.3363 3.7343 0.3863
+0 2004 58 17.50 0.542 -3.8827 -0.4804 0.0000 0.0000 154.1925 290.8898 430.3230 2.1727 0.3867
+0 2004 59 6.50 0.458 -4.7027 -0.4339 18.2898 1.0160 92.4684 259.4677 463.8335 1.9227 0.3857
+0 2004 59 17.50 0.542 -9.0327 -0.4253 0.0000 0.5080 110.9870 408.0800 315.6076 5.2719 0.3866
+0 2004 60 6.50 0.458 -9.0091 -0.4229 19.4627 0.0000 167.1369 463.9403 259.8534 8.8514 0.3859
+0 2004 60 17.50 0.542 -10.9250 -0.4538 0.0000 0.0000 126.3981 468.9930 253.4145 14.1546 0.3925
+0 2004 61 6.50 0.458 -8.0491 -0.5035 34.9545 0.0000 233.6551 500.0536 220.1256 5.9177 0.3901
+0 2004 61 17.50 0.542 -11.7700 -0.5164 0.0000 0.0000 32.5756 389.3108 330.2903 1.7658 0.3877
+0 2004 62 6.50 0.458 -7.4918 -0.5313 20.5632 2.2860 77.5955 325.7572 393.1784 1.9678 0.3886
+0 2004 62 17.50 0.542 -7.5527 -0.5515 0.0000 0.2540 103.0317 355.0290 363.0058 5.0768 0.3887
+0 2004 63 6.50 0.458 -4.8495 -0.5778 32.5540 0.0000 211.5794 375.1597 341.7042 3.4350 0.3888
+0 2004 63 17.50 0.542 -5.6308 -0.5937 0.0000 0.2540 153.5659 345.1339 371.0214 2.8295 0.3871
+0 2004 64 6.50 0.458 -4.2600 -0.6142 31.1359 1.5240 208.6155 350.4235 364.8209 3.2473 0.3876
+0 2004 64 17.50 0.542 -9.5950 -0.6213 0.0000 12.4400 34.8661 335.5757 379.3521 1.9157 0.3867
+0 2004 65 6.50 0.458 -10.1973 -0.6255 27.9799 0.0000 122.4939 439.2859 275.4537 3.5591 0.3874
+0 2004 65 17.50 0.542 -9.3188 -0.6182 0.0000 0.0000 124.0132 419.8408 295.2271 16.1046 0.3892
+0 2004 66 6.50 0.458 -6.1955 -0.6093 12.6181 6.8620 101.2899 309.9886 405.4718 9.9727 0.3872
+0 2004 66 17.50 0.542 -6.2346 -0.5982 0.0000 2.7940 96.9883 307.4271 408.5263 14.2973 0.3876
+0 2004 67 6.50 0.458 -1.2935 -0.5884 37.5168 0.0000 327.9984 348.4577 367.9326 10.4127 0.3880
+0 2004 67 17.50 0.542 1.1352 -0.5755 0.0000 0.0000 479.9176 398.5316 318.4351 8.6335 0.3879
+0 2004 68 6.50 0.479 4.0754 -0.5448 39.5279 0.0000 614.5247 368.7338 349.6008 5.4465 0.3874
+0 2004 68 18.00 0.521 -0.9367 -0.4983 0.0000 0.0000 271.0505 288.7279 431.6834 2.4132 0.3857
+0 2004 69 6.50 0.479 6.6678 -0.4334 39.9484 0.0000 863.0952 458.1790 265.1438 5.4087 0.3869
+0 2004 69 18.00 0.521 1.4328 -0.3648 0.0000 0.0000 441.6473 350.7250 375.6852 6.8064 0.3866
+0 2004 70 6.50 0.479 -1.9482 -0.2747 14.1651 0.0000 197.0912 263.6963 466.7954 3.6491 0.3841
+0 2004 70 18.00 0.500 -5.9887 -0.1873 0.0000 0.0000 178.2356 397.6812 336.7859 2.3787 0.3867
+0 2004 71 6.00 0.500 0.3483 -0.1394 40.2870 0.0000 461.7380 427.1377 309.5165 2.0143 0.3844
+0 2004 71 18.00 0.500 -1.2199 -0.1227 0.0000 0.0000 384.3532 431.1531 306.2644 2.0082 0.3869
+0 2004 72 6.00 0.500 4.1425 -0.1198 40.2894 0.0000 776.3566 543.8359 193.7189 4.4300 0.3842
+0 2004 72 18.00 0.500 0.6557 -0.1097 0.0000 0.0000 494.7355 456.7750 281.2403 8.3379 0.3863
+0 2004 73 6.00 0.500 0.3555 -0.0741 31.0866 0.0000 433.6837 409.5906 330.0581 5.4921 0.3833
+0 2004 73 18.00 0.500 -5.3288 -0.0257 0.0000 0.0000 141.1127 343.4092 398.4699 2.6296 0.3842
+0 2004 74 6.00 0.500 0.4191 0.0081 38.1637 0.0000 493.1414 467.7059 275.7302 7.5379 0.3848
+0 2004 74 18.00 0.500 -3.8542 0.0247 0.0000 3.0520 142.3028 300.0488 444.1536 8.2142 0.3853
+0 2004 75 6.00 0.500 -4.5037 0.0397 27.2880 1.5240 208.8346 390.9189 353.9794 8.3804 0.3848
+0 2004 75 18.00 0.500 -4.6079 0.0528 0.0000 0.5080 237.4867 423.4843 322.0216 10.5621 0.3860
+0 2004 76 6.00 0.500 -3.3547 0.0523 33.3104 1.0160 231.4210 366.5031 378.9796 9.7379 0.3872
+0 2004 76 18.00 0.500 -2.1811 0.0445 0.0000 0.0000 264.2917 358.8506 386.2695 16.4287 0.3867
+0 2004 77 6.00 0.500 0.3947 0.0400 44.0143 0.0000 370.2978 344.8723 400.0395 12.7417 0.3872
+0 2004 77 18.00 0.500 -0.5335 0.0479 0.0000 0.0000 361.2541 384.9565 360.3199 5.2033 0.3851
+0 2004 78 6.00 0.500 3.1162 0.0657 44.9169 0.0000 578.9573 410.8652 335.2374 4.2867 0.3849
+0 2004 78 18.00 0.500 2.8546 0.1357 0.0000 0.0000 565.8815 427.7338 321.6236 8.7913 0.3848
+0 2004 79 6.00 0.500 7.5763 0.2122 34.2508 0.0000 892.4167 446.1497 306.7735 5.7725 0.3871
+0 2004 79 18.00 0.500 5.7496 0.2833 0.0000 0.0000 747.0803 439.4051 316.8550 4.4775 0.4716
+0 2004 80 6.00 0.500 6.6694 0.3472 43.0637 0.0000 792.8911 410.0845 349.1807 3.3937 0.4883
+0 2004 80 18.00 0.500 2.4444 0.2677 0.0000 0.0000 497.7617 371.2233 384.3517 2.4650 0.5506
+0 2004 81 6.00 0.500 8.4258 0.2953 33.2019 0.0000 1006.5879 502.3710 254.5368 4.3450 0.5509
+0 2004 81 18.00 0.500 5.1200 0.1365 0.0000 0.0000 751.1561 476.2649 273.1880 3.3963 0.5404
+0 2004 82 6.00 0.500 8.9688 0.0294 37.5215 0.0000 1029.4752 473.5583 270.9346 3.7467 0.7532
+0 2004 82 18.00 0.500 5.8458 0.2918 0.0000 0.0000 683.3046 371.2389 385.4413 2.4033 0.6298
+0 2004 83 6.00 0.500 5.9167 0.1957 28.2315 0.0000 562.2416 232.7822 519.4045 4.5687 0.0000
+0 2004 83 18.00 0.500 3.3483 0.3954 0.0000 0.0000 365.1272 211.4668 550.0870 6.2987 0.0000
+0 2004 84 6.00 0.500 5.5479 0.2657 33.6904 0.0000 529.3593 231.2809 524.1811 4.9887 0.0000
+0 2004 84 18.00 0.500 1.0872 0.3900 0.0000 1.0160 188.9767 152.0973 609.2338 2.8171 0.0000
+0 2004 85 6.00 0.500 5.9096 0.1126 44.0911 0.2540 674.3490 338.1628 410.1798 3.3179 0.0000
+0 2004 85 18.00 0.500 3.9908 0.2329 0.0000 0.0000 665.5067 465.8979 288.0478 2.3758 0.0000
+0 2004 86 6.00 0.500 8.8146 0.0878 39.3870 0.0000 1077.2823 531.5309 215.6506 3.2317 0.0000
+0 2004 86 18.00 0.500 1.7575 0.0551 0.0000 0.0000 483.7605 384.6815 360.9878 7.0192 0.0000
+0 2004 87 6.00 0.500 -4.9517 -0.3222 26.4336 0.2540 262.5949 443.8699 284.4821 5.5196 0.0000
+0 2004 87 18.00 0.500 -7.9825 -0.1810 0.0000 0.2540 135.1965 415.9017 318.8661 11.6000 0.0000
+0 2004 88 6.00 0.500 -7.2142 -0.2183 21.0684 2.5400 139.1002 395.8001 337.2685 5.1492 0.0000
+0 2004 88 18.00 0.500 -7.1517 -0.1633 0.0000 0.2540 183.4925 441.5233 294.0518 11.5308 0.0000
+0 2004 89 6.00 0.500 -0.1611 -0.1701 47.5354 0.0000 527.4700 509.6938 225.5626 8.2971 0.0000
+0 2004 89 18.00 0.500 -0.0962 -0.1466 0.0000 0.0000 553.5108 549.7106 186.6215 2.9733 0.0000
+0 2004 90 6.00 0.500 6.4992 -0.1275 47.0776 0.0000 1005.4867 623.2737 113.9584 2.8354 0.0000
+0 2004 90 18.00 0.500 3.9254 0.0841 0.0000 0.0000 816.3811 614.8247 132.1332 3.4408 0.0000
+0 2004 91 6.00 0.521 8.6348 0.1456 47.0314 0.0000 1131.2998 605.4592 144.4356 5.2672 0.0000
+0 2004 91 18.50 0.479 4.9017 0.2601 0.0000 0.0000 812.5630 559.1689 196.0118 3.5861 0.0000
+0 2004 92 6.00 0.521 8.9496 0.2575 34.0755 0.0000 1077.6021 535.5875 219.4895 3.6716 0.0000
+0 2004 92 18.50 0.479 3.8787 0.3227 0.0000 0.0000 636.7749 444.4822 313.6839 1.7458 0.0000
+0 2004 93 6.00 0.521 -1.3235 0.4239 13.0780 1.5280 48.1995 126.2608 636.6373 3.1412 0.0000
+0 2004 93 18.50 0.458 -3.7955 0.3543 0.0000 27.9400 8.6056 178.6638 580.9658 3.1923 0.0000
+0 2004 94 5.50 0.542 -3.2569 0.2711 16.1214 1.5240 7.6253 154.2114 601.4897 1.2658 0.0000
+0 2004 94 18.50 0.458 -1.1390 0.4687 0.0000 0.0000 171.0640 240.6740 524.3549 3.3705 0.0000
+0 2004 95 5.50 0.542 3.7142 0.3865 42.4271 0.0000 334.7198 157.0806 604.0703 1.9987 0.0000
+0 2004 95 18.50 0.458 2.6223 0.1719 0.0000 0.0000 456.0338 332.4465 418.6066 2.2167 0.0000
+0 2004 96 5.50 0.542 2.2795 0.3646 24.1373 1.7780 250.6077 152.2134 607.8894 2.8927 0.0000
+0 2004 96 18.50 0.458 -0.7862 0.6596 0.0000 19.3160 10.0721 76.3141 697.8440 2.1177 0.0000
+0 2004 97 5.50 0.542 0.6079 0.6070 22.6616 2.0360 4.7105 1.3182 770.3236 1.5800 0.0000
+0 2004 97 18.50 0.458 -0.0365 0.7787 0.0000 0.0000 113.3689 151.7239 628.1909 3.3895 0.0000
+0 2004 98 5.50 0.542 0.4067 0.5649 10.4765 9.9080 47.7512 53.1370 716.5222 2.2013 0.0000
+0 2004 98 18.50 0.458 -1.7150 0.5382 0.0000 3.0480 -0.9000 99.1961 669.1406 1.8586 0.0000
+0 2004 99 5.50 0.542 0.5468 0.5156 24.5399 5.8400 40.1362 35.2317 732.0474 1.3772 0.0000
+0 2004 99 18.50 0.458 -1.1826 0.6072 0.0000 1.7780 -3.6839 77.3169 694.3202 1.0951 0.0000
+0 2004 100 5.50 0.542 -4.3208 0.3711 11.5111 17.0220 9.0356 198.7030 561.7567 2.4627 0.0000
+0 2004 100 18.50 0.458 -8.9232 -0.0220 0.0000 10.9260 21.3385 333.9552 408.1097 2.6455 0.0000
+0 2004 101 5.50 0.542 -9.5138 0.1948 19.6123 7.6200 24.6201 363.0948 389.1032 2.2169 0.0000
+0 2004 101 18.50 0.458 -11.1895 -0.3146 0.0000 0.0000 26.8329 380.5844 348.1169 1.9901 0.0000
+0 2004 102 5.50 0.542 -6.2731 0.2133 45.0561 0.7620 123.5070 366.8003 386.2779 2.5084 0.0000
+0 2004 102 18.50 0.458 -8.8336 -0.1387 0.0000 5.0880 70.2255 373.0114 363.7247 3.5656 0.0000
+0 2004 103 5.50 0.542 -3.7709 0.1568 52.7564 1.0160 297.0663 449.6423 300.7131 7.0204 0.0000
+0 2004 103 18.50 0.458 -1.7595 0.3952 0.0000 0.0000 221.5166 316.8904 444.6502 6.6477 0.0000
+0 2004 104 5.50 0.542 3.7235 0.1826 50.4693 0.0000 539.1019 345.3344 406.2721 3.0305 0.0000
+0 2004 104 18.50 0.458 2.2265 0.3357 0.0000 0.0000 468.7987 373.7441 384.9956 3.0018 0.0000
+0 2004 105 5.50 0.542 4.7281 0.0839 42.3494 0.0000 665.0056 408.8058 338.2017 4.5677 0.0000
+0 2004 105 18.50 0.458 0.6139 0.1427 0.0000 0.7620 380.5080 351.4633 398.2254 7.1145 0.0000
+0 2004 106 5.50 0.542 4.1229 0.0534 46.5008 0.0000 753.4568 517.3965 228.2538 7.2754 0.0000
+0 2004 106 18.50 0.458 3.7073 0.1533 0.0000 0.0000 681.3907 495.7575 254.4209 4.8668 0.0000
+0 2004 107 5.50 0.542 6.7788 0.0966 53.2420 0.0000 893.7440 497.7802 249.8168 4.2208 0.0000
+0 2004 107 18.50 0.458 4.5927 0.0919 0.0000 0.0000 807.2933 565.1027 182.2165 3.1827 0.0000
+0 2004 108 5.50 0.542 9.1273 0.1401 53.1845 0.0000 1087.0061 516.1390 233.5298 4.4892 0.0000
+0 2004 108 18.50 0.458 3.3401 0.3096 0.0000 0.0000 536.2836 370.4831 387.0299 3.9086 0.0000
+0 2004 109 5.50 0.542 -1.6749 0.1068 40.4674 1.7780 243.2430 320.3039 427.7496 9.4496 0.0000
+0 2004 109 18.50 0.458 -2.5177 0.1895 0.0000 0.0000 259.5719 375.4463 376.4723 7.1486 0.0000
+0 2004 110 5.50 0.542 1.8274 0.2903 36.9445 0.0000 433.2199 351.8304 404.8900 2.8704 0.9450
+0 2004 110 18.50 0.458 -2.0143 0.5952 0.0000 0.0000 256.5189 369.9982 401.0593 9.8141 0.9441
+0 2004 111 5.50 0.542 -1.6599 0.5920 48.7072 1.0160 312.1225 409.5183 361.3828 12.2146 0.9421
+0 2004 111 18.50 0.458 -3.4109 0.5978 0.0000 1.0160 174.7931 344.3920 426.7896 10.4886 0.9429
+0 2004 112 5.50 0.542 -3.9531 0.5987 12.8746 11.7020 76.8624 265.6243 505.6010 1.9262 0.9367
+0 2004 112 18.50 0.458 -5.8982 0.6012 0.0000 4.8260 16.0301 271.1913 500.1515 1.2533 0.9329
+0 2004 113 5.50 0.542 -5.4750 0.6008 9.5844 9.6560 14.7154 256.3303 514.9946 2.6262 0.9291
+0 2004 113 18.50 0.458 -7.1591 0.6025 0.0000 5.5880 18.5521 312.5994 458.8044 2.8930 0.9259
+0 2004 114 5.50 0.562 -5.6793 0.6019 7.8925 3.5560 15.3035 263.5354 507.8413 2.1483 0.9218
+0 2004 114 19.00 0.438 -6.2524 0.5981 0.0000 0.0000 19.4111 285.4185 485.7764 1.0133 0.9181
+0 2004 115 5.50 0.562 -0.5869 0.5981 49.4261 0.2540 300.0090 349.6151 421.5788 7.7459 0.9152
+0 2004 115 19.00 0.438 -3.0466 0.5965 0.0000 2.7940 58.5377 213.4891 557.6305 2.2023 0.9133
+0 2004 116 5.50 0.562 -1.4966 0.5963 37.6046 2.7940 157.2200 243.8092 527.2977 3.4244 0.9133
+0 2004 116 19.00 0.438 -0.6017 0.5971 0.0000 0.0000 266.3830 321.3549 449.7921 4.0800 0.9153
+0 2004 117 5.50 0.562 4.8316 0.5937 55.4701 0.0000 634.1366 384.8878 386.0949 4.5422 0.9124
+0 2004 117 19.00 0.417 4.2165 0.5968 0.0000 0.0000 602.0746 407.4699 363.6605 4.5980 0.9128
+0 2004 118 5.00 0.583 8.1918 0.5918 51.6390 0.0000 947.2182 475.3503 295.5424 4.4775 0.9217
+0 2004 118 19.00 0.417 5.5615 0.5955 0.0000 0.0000 803.2185 523.9935 247.0771 9.9835 0.9668
+0 2004 119 5.00 0.583 7.6786 0.5920 37.5535 0.0000 913.6955 482.2403 288.6609 7.6946 0.9527
+0 2004 119 19.00 0.417 -0.3765 0.5976 0.0000 0.2540 218.5276 240.3268 530.8444 1.7670 0.9685
+0 2004 120 5.00 0.583 -5.3996 0.6012 17.3233 0.0000 15.3806 254.6177 516.7250 1.0082 0.9652
+0 2004 120 19.00 0.417 -7.2645 0.6033 0.0000 0.7620 18.8501 315.5075 455.9368 0.4414 0.9566
+0 2004 121 5.00 0.583 -6.5454 0.6015 16.4802 7.3660 18.2820 293.8572 477.5026 0.6001 0.9492
+0 2004 121 19.00 0.417 -7.4215 0.6048 0.0000 0.0000 58.8101 360.3653 411.1534 1.6154 0.9249
+0 2004 122 5.00 0.583 -0.7475 0.5981 55.3641 0.5080 316.7927 366.1122 405.0851 4.7036 0.9226
+0 2004 122 19.00 0.417 0.3660 0.6001 0.0000 0.0000 337.0122 347.6404 423.6506 4.0555 0.9166
+0 2004 123 5.00 0.583 5.9550 0.5938 54.1213 0.0000 727.5681 413.1219 357.8665 4.6493 0.9138
+0 2004 123 19.00 0.417 5.3110 0.5921 0.0000 0.0000 626.7193 364.3355 406.5698 8.8270 0.9594
+0 2004 124 5.00 0.583 9.0289 0.4359 51.1912 0.0000 888.6877 342.3143 421.2498 5.0854 0.9805
+0 2004 124 19.00 0.417 6.6425 0.3808 0.0000 0.0000 629.4714 266.4758 494.3965 2.5530 0.0000
+0 2004 125 5.00 0.583 9.8914 0.1563 47.0232 0.0000 904.9687 270.8641 479.6475 3.1689 0.0000
+0 2004 125 19.00 0.417 9.2045 0.4727 0.0000 0.0000 896.5238 347.4674 417.7612 2.7845 0.0000
+0 2004 126 5.00 0.583 13.0750 -0.0844 55.3065 0.0000 1315.2157 382.0583 357.4359 6.0775 0.0000
+0 2004 126 19.00 0.417 10.1970 0.4155 0.0000 0.0000 1054.8121 421.5605 340.9392 6.6195 0.0000
+0 2004 127 5.00 0.583 12.9861 0.0361 57.1663 0.0000 1380.2437 464.2205 280.8354 5.0150 0.0000
+0 2004 127 19.00 0.417 9.3455 0.3044 0.0000 0.0000 1054.2627 485.4478 271.8069 6.3980 0.0000
+0 2004 128 5.00 0.583 12.2068 0.1435 56.5054 0.0000 1282.6234 448.1950 301.9483 3.7643 0.0000
+0 2004 128 19.00 0.417 8.7770 0.2877 0.0000 0.0000 1005.9348 478.9555 277.5226 5.5850 0.0000
+0 2004 129 5.00 0.583 10.9232 0.2372 42.9492 0.0000 1149.0260 435.9828 318.4308 3.5882 0.0000
+0 2004 129 19.00 0.417 8.6390 0.3411 0.0000 0.0000 957.7307 447.5276 311.4619 3.4435 0.0000
+0 2004 130 5.00 0.583 11.1636 0.4364 45.3582 0.0000 1101.5760 381.4316 382.6169 3.3550 0.0000
+0 2004 130 19.00 0.417 7.8195 0.2963 0.0000 0.0000 899.3366 446.5466 310.3289 2.9425 0.0000
+0 2004 131 5.00 0.583 9.9875 0.5071 51.0648 0.0000 723.4668 110.0702 657.7213 2.8489 0.0000
+0 2004 131 19.00 0.417 7.5920 0.5567 0.0000 0.0000 565.8491 145.6754 623.5642 3.0615 0.0000
+0 2004 132 5.00 0.583 9.5418 0.3793 50.1294 0.0000 1002.9800 406.3581 355.1806 4.8139 0.0000
+0 2004 132 19.00 0.417 -1.0164 -0.0919 0.0000 0.7620 301.5747 341.2379 397.6048 6.3980 0.0000
+0 2004 133 5.00 0.583 -1.8471 0.4290 12.7574 20.0700 128.5515 227.4013 536.4800 1.9146 0.0000
+0 2004 133 19.00 0.417 -4.9020 -0.1000 0.0000 3.0480 12.0546 200.6625 537.8090 0.9370 0.0000
+0 2004 134 5.00 0.583 -4.7779 0.3440 21.0542 7.6240 13.9691 219.4758 540.0014 1.6350 0.0000
+0 2004 134 19.00 0.417 -4.0100 -0.0244 0.0000 0.0000 54.8291 217.2970 524.6632 1.8079 0.0000
+0 2004 135 5.00 0.583 0.0419 0.6225 46.9266 1.7780 240.0492 263.9658 508.6535 4.5702 0.9947
+0 2004 135 19.00 0.417 0.8816 0.6112 0.0000 0.5080 292.2573 278.8273 492.9958 4.3200 0.9887
+0 2004 136 5.00 0.583 4.2268 0.6078 31.0736 0.2540 363.5166 164.3135 607.3476 3.5171 0.9847
+0 2004 136 19.00 0.417 3.9905 0.6054 0.0000 0.0000 326.6674 144.9221 626.6229 2.4620 0.9940
+0 2004 137 5.00 0.583 8.2857 0.5949 40.9928 0.0000 853.4734 377.9243 393.1172 5.1307 1.0000
+0 2004 137 19.00 0.417 4.3025 0.5913 0.0000 0.0000 251.3306 46.2150 724.6520 1.7101 1.0000
+0 2004 138 5.00 0.583 6.1996 0.8640 29.4622 0.0000 270.3154 -41.1342 825.3809 2.3310 1.0000
+0 2004 138 19.00 0.417 4.9955 0.9656 0.0000 0.0000 360.8859 136.0511 653.0984 1.9965 1.0000
+0 2004 139 5.00 0.583 8.9907 2.1253 41.7581 0.0000 431.5484 -19.3020 869.4317 2.4438 1.0000
+0 2004 139 19.00 0.417 9.3390 2.1145 0.0000 0.0000 888.9395 412.1784 435.4847 2.8242 1.0000
+0 2004 140 5.00 0.583 11.4164 3.3625 53.8995 0.0000 887.8387 297.1924 621.3050 2.8417 1.0000
+0 2004 140 19.00 0.417 6.4725 2.9350 0.0000 0.0000 318.5330 95.5989 796.6259 1.8855 1.0000
+0 2004 141 5.00 0.583 7.1596 3.3089 32.7380 0.0000 196.0833 -51.8388 965.4005 2.6061 1.0000
+0 2004 141 19.00 0.417 7.2135 2.5910 0.0000 0.0000 608.9148 320.0285 553.4674 2.6885 1.0000
+0 2004 142 5.00 0.583 10.4654 3.1507 48.0845 0.0000 1106.9677 587.6281 318.2709 3.9668 1.0000
+0 2004 142 19.00 0.417 5.7725 2.8820 0.0000 0.0000 648.6895 468.4247 420.7941 3.5710 1.0000
+0 2004 143 5.00 0.583 4.4046 2.4354 27.1699 4.3220 405.4583 288.3465 576.2147 2.8596 1.0000
+0 2004 143 19.00 0.417 2.2495 1.7015 0.0000 0.2540 416.2076 387.5434 438.5753 7.5480 1.0000
+0 2004 144 5.00 0.604 8.0138 2.6817 51.5456 0.0000 880.4922 525.4401 354.4875 6.6479 1.0000
+0 2004 144 19.50 0.396 6.2374 2.6216 0.0000 0.0000 866.7824 645.3784 229.4423 4.8616 1.0000
+0 2004 145 5.00 0.604 6.0969 3.2593 58.2832 0.0000 615.6897 437.2612 474.3098 4.0048 1.0000
+0 2004 145 19.50 0.396 -0.2582 2.8695 0.0000 2.2900 11.8083 167.2450 721.2072 1.2988 1.0000
+0 2004 146 5.00 0.604 1.0454 2.3959 21.5889 1.0160 18.3700 83.4332 779.2001 1.8983 1.0000
+0 2004 146 19.50 0.396 1.3494 1.9963 0.0000 0.2540 220.5123 253.6641 587.7197 5.5105 1.0000
+0 2004 147 5.00 0.604 6.7321 2.6628 63.1713 0.0000 761.3019 496.9911 381.5855 10.0393 1.0000
+0 2004 147 19.50 0.396 5.4947 2.7421 0.0000 0.0000 640.5336 473.4803 407.7083 3.3558 1.0000
+0 2004 148 5.00 0.604 8.1190 4.2931 39.5525 0.0000 683.8438 413.9993 559.9276 3.2041 1.0000
+0 2004 148 19.50 0.396 5.9621 3.7142 0.0000 0.2540 390.7668 251.3424 685.1024 2.1453 1.0000
+0 2004 149 5.00 0.604 12.0045 5.7000 44.8981 0.0000 1075.0754 564.8229 500.8650 3.1507 1.0000
+0 2004 149 19.50 0.375 8.8367 5.3572 0.0000 0.0000 842.8373 585.5635 451.9391 3.2706 1.0000
+0 2004 150 4.50 0.625 3.7133 5.1910 40.3795 2.2860 325.2766 404.7993 622.1918 4.9817 1.0000
+0 2004 150 19.50 0.375 -3.0211 2.7589 0.0000 0.0000 188.2608 452.9954 429.7272 3.3850 1.0000
+0 2004 151 4.50 0.625 -2.0184 2.6587 36.8485 0.2540 335.5599 550.3993 327.2979 6.2437 1.0000
+0 2004 151 19.50 0.375 -1.4523 1.4800 0.0000 0.0000 270.1796 405.6936 409.1610 8.3356 1.0000
+0 2004 152 4.50 0.625 3.6447 2.9465 60.0045 0.7620 553.3577 498.1296 399.1874 9.2097 0.9584
+0 2004 152 19.50 0.375 3.9652 3.0589 0.0000 0.0000 579.2206 519.0876 380.1037 3.9106 0.9574
+0 2004 153 4.50 0.625 7.9227 4.8713 49.3888 0.0000 829.4065 614.7804 398.9854 4.1107 0.9199
+0 2004 153 19.50 0.375 5.1561 4.2928 0.0000 0.0000 471.6055 416.7809 553.7161 2.2983 0.9288
+0 2004 154 4.50 0.625 7.4443 6.3960 49.3120 0.0000 451.1086 383.5302 731.3893 2.6491 0.8976
+0 2004 154 19.50 0.375 6.6533 4.9917 0.0000 0.0000 441.1206 330.0515 683.7032 3.2350 0.9080
+0 2004 155 4.50 0.625 12.4867 6.7870 38.8331 1.7000 1056.4771 581.8836 557.2011 3.6690 0.8582
+0 2004 155 19.50 0.375 10.2083 5.6667 0.0000 0.0000 875.7729 535.8366 521.0217 3.4817 0.8619
+0 2004 156 4.50 0.625 14.2233 7.8583 40.1493 0.0000 1342.3934 769.9764 449.5950 3.4677 0.8153
+0 2004 156 19.50 0.375 9.4639 6.2550 0.0000 0.0000 816.9545 578.6299 517.6699 2.9007 0.8154
+0 2004 157 4.50 0.625 11.5010 8.1337 46.5389 0.0000 702.9926 426.5402 811.8798 2.3166 0.7749
+0 2004 157 19.50 0.375 10.4211 6.6433 0.0000 0.0000 717.3287 424.4316 698.8928 2.7717 0.7730
+0 2004 158 4.50 0.625 17.2183 9.2397 53.8199 0.0000 1692.6558 876.5525 453.0295 3.8000 0.7146
+0 2004 158 19.50 0.375 14.4078 7.7322 0.0000 0.0000 1407.3339 808.0722 393.4662 2.9622 0.7154
+0 2004 159 4.50 0.625 18.4033 10.1477 62.9031 0.0000 1957.6012 1064.8590 341.9026 4.1967 0.6521
+0 2004 159 19.50 0.375 14.6006 8.6256 0.0000 0.0000 1490.9392 935.3259 333.4557 4.1189 0.6572
+0 2004 160 4.50 0.625 13.4970 10.3797 45.4302 1.2000 783.1556 494.8754 926.9637 2.1391 0.6197
+0 2004 160 19.50 0.375 8.3600 8.5617 0.0000 4.7000 278.8310 294.7331 969.4857 2.9567 0.7102
+0 2004 161 4.50 0.625 10.5530 9.9600 45.7648 9.6950 312.7909 266.5768 1119.5210 2.3751 0.7491
+0 2004 161 19.50 0.375 6.7133 8.1822 0.0000 0.3000 109.8118 215.9689 1017.7079 2.9044 0.8714
+0 2004 162 4.50 0.625 7.6983 8.9700 59.8507 0.5000 813.7850 908.0488 393.2569 6.0853 0.8264
+0 2004 162 19.50 0.375 0.5099 6.1383 0.0000 0.4000 266.0148 586.2264 502.6500 12.4500 0.8423
+0 2004 163 4.50 0.625 5.2265 6.7237 60.6629 0.0000 627.6802 719.9986 415.5406 8.2627 0.7972
+0 2004 163 19.50 0.375 4.3389 6.0644 0.0000 0.0000 546.8505 654.0723 428.8513 2.9683 0.8054
+0 2004 164 4.50 0.625 5.7366 6.9410 43.8060 2.7980 427.8623 499.9088 646.7668 3.7013 0.7797
+0 2004 164 19.50 0.375 5.7983 6.2061 0.0000 0.0000 437.3248 464.5502 627.4906 2.6650 0.7896
+0 2004 165 4.50 0.625 11.5210 8.4710 47.1778 0.0000 928.2027 660.2250 607.5501 4.3740 0.7357
+0 2004 165 19.50 0.375 10.2850 7.4839 0.0000 0.0000 840.3684 609.4810 572.9284 2.6172 0.7352
+0 2004 166 4.50 0.625 14.3133 9.6313 51.8905 0.2540 1237.6965 782.7685 579.8419 3.6473 0.6922
+0 2004 166 19.50 0.375 11.6328 8.1939 0.0000 0.0000 950.6820 661.3254 574.0634 3.7411 0.6856
+0 2004 167 4.50 0.625 13.1157 10.2430 46.6637 1.2700 920.2686 651.0867 758.0201 2.6413 0.6404
+0 2004 167 19.50 0.375 8.6722 8.3139 0.0000 0.0000 585.5402 549.2256 695.1978 2.7622 0.6423
+0 2004 168 4.50 0.625 4.3680 7.5473 11.7082 8.6360 -6.9837 203.7205 982.1617 1.7538 0.6765
+0 2004 168 19.50 0.375 3.0556 6.5372 0.0000 0.2540 0.0897 215.3968 899.3701 1.8094 0.7714
+0 2004 169 4.50 0.625 5.9023 7.1090 28.8859 12.2690 103.8777 182.8822 974.2816 2.3303 0.8193
+0 2004 169 19.50 0.375 2.7206 6.4511 0.0000 6.0940 -8.2795 219.5511 888.9933 1.0750 0.9983
+0 2004 170 4.50 0.625 2.6060 6.2097 15.6154 7.9470 -10.4679 206.9153 885.3295 1.7735 0.9732
+0 2004 170 19.50 0.375 3.7811 5.0911 0.0000 3.0520 43.2133 122.6162 897.2662 1.9958 1.0000
+0 2004 171 4.50 0.625 9.7743 8.4833 50.6424 1.7780 588.1546 482.6459 782.9446 3.9803 0.9240
+0 2004 171 19.50 0.375 6.6883 6.0589 0.0000 0.5080 338.5932 295.9880 786.6442 3.4661 0.9201
+0 2004 172 4.50 0.625 9.6007 8.8583 45.9184 5.5940 652.0423 582.3960 709.6490 3.9270 0.9149
+0 2004 172 19.50 0.375 5.3478 6.2517 0.0000 0.0000 232.0674 286.0521 809.2863 1.9271 0.9331
+0 2004 173 4.50 0.625 1.0222 4.0707 11.1002 12.9660 -6.4176 157.3227 799.7788 1.2610 1.0000
+0 2004 173 19.50 0.375 0.3393 2.8867 0.0000 0.2540 106.5944 236.4254 652.6201 2.8094 1.0000
+0 2004 174 4.50 0.625 6.9161 5.8843 61.2472 0.0000 619.3563 543.8903 542.6790 2.0988 0.9804
+0 2004 174 19.50 0.375 5.8506 5.9511 0.0000 0.0000 485.4137 492.7938 583.2703 3.3444 0.9260
+0 2004 175 4.50 0.625 10.7050 8.0440 51.8474 0.0000 845.9130 636.0866 601.8157 2.8901 0.8789
+0 2004 175 19.50 0.375 9.7711 6.8022 0.0000 0.0000 894.1490 668.6166 465.5158 2.9256 0.8915
+0 2004 176 4.50 0.625 9.4480 8.3797 43.3446 0.0000 475.3878 393.4695 862.7562 2.9860 0.8591
+0 2004 176 19.50 0.375 6.1517 7.5117 0.0000 0.0000 134.4565 229.9534 954.1055 1.9806 0.8643
+0 2004 177 4.50 0.625 9.0447 8.3533 26.4625 0.5080 404.2978 352.1778 898.8710 3.0067 0.8383
+0 2004 177 19.50 0.375 5.6656 7.2689 0.0000 4.5740 145.0395 253.3875 912.8868 2.8944 0.8723
+0 2004 178 4.50 0.625 8.1243 8.5390 37.9126 2.7960 325.8527 355.5079 908.2238 2.7900 0.8678
+0 2004 178 19.50 0.375 5.6133 7.1094 0.0000 0.0000 215.5494 317.8015 837.3193 2.8172 0.8943
+0 2004 179 4.50 0.625 6.0977 7.1363 19.6893 6.8620 106.9659 175.8165 981.7479 2.2238 0.9582
+0 2004 179 19.50 0.375 4.5883 6.3600 0.0000 2.5400 131.1777 245.4842 857.3209 2.6750 1.0000
+0 2004 180 4.50 0.625 8.7377 8.6257 43.3659 0.0000 184.9378 182.4569 1096.2546 2.0212 0.9650
+0 2004 180 19.50 0.375 8.3444 7.8183 0.0000 0.0000 417.5772 378.7412 828.2109 2.4872 0.9530
+0 2004 181 4.50 0.625 10.7413 9.0263 41.1968 3.8180 582.5561 439.2646 865.5255 2.4551 0.9140
+0 2004 181 19.50 0.375 6.7078 8.1289 0.0000 6.8600 226.0813 327.6867 901.9109 3.0678 1.0000
+0 2004 182 4.50 0.625 7.5200 8.0143 20.5387 13.2080 222.4566 256.1759 965.0123 3.0679 1.0000
+0 2004 182 19.50 0.375 6.7817 7.3278 0.0000 0.0000 271.7462 310.5865 859.6133 3.8944 1.0000
+0 2004 183 4.50 0.625 10.8383 9.6953 58.7693 0.0000 608.8850 509.3092 856.1696 4.2391 0.9638
+0 2004 183 19.50 0.375 9.6778 8.2139 0.0000 0.0000 701.6137 586.8699 650.1183 3.4022 0.9350
+0 2004 184 4.50 0.625 14.6513 10.1143 61.0274 0.0000 1269.6064 832.6130 570.0234 3.0147 0.8945
+0 2004 184 19.50 0.375 8.5578 8.7633 0.0000 0.0000 396.4278 410.3745 868.9265 2.2828 0.8868
+0 2004 185 4.50 0.625 14.1990 10.1560 52.3328 0.0000 1383.2390 997.3948 405.6542 2.9960 0.8550
+0 2004 185 19.50 0.375 9.4778 8.6872 0.0000 0.0000 785.6334 721.7237 551.6633 3.0600 0.8423
+0 2004 186 4.50 0.625 13.1140 9.6963 43.4672 0.0000 1241.7118 925.9518 434.1101 3.3173 0.8052
+0 2004 186 19.50 0.375 8.0300 8.5483 0.0000 0.0000 543.6939 581.0830 680.4588 3.3756 0.7875
+0 2004 187 4.50 0.625 9.8990 9.0583 34.1673 4.0680 595.6486 523.4291 779.8674 4.2857 0.7844
+0 2004 187 19.50 0.375 4.6511 8.1761 0.0000 1.7820 44.9022 285.2898 947.6782 2.1452 0.8501
+0 2004 188 4.50 0.625 10.4150 10.0107 61.9184 0.0000 599.8165 553.7095 836.9852 2.4768 0.8116
+0 2004 188 19.50 0.375 10.6322 8.7206 0.0000 0.0000 825.4758 665.8705 609.5650 2.1461 0.8035
+0 2004 189 4.50 0.625 15.0977 10.8763 57.5652 0.0000 1357.9670 929.2803 537.5504 4.6350 0.7596
+0 2004 189 19.50 0.375 13.0761 9.5767 0.0000 0.0000 1100.7317 781.4759 562.8909 3.9089 0.7456
+0 2004 190 4.50 0.625 17.2287 11.2863 58.7435 0.0000 1759.1132 1116.9526 387.1634 3.4473 0.7010
+0 2004 190 19.50 0.375 12.2789 9.5911 0.0000 0.0000 1061.6249 818.9361 526.8162 3.1900 0.6995
+0 2004 191 4.50 0.625 15.0883 10.8777 44.0051 0.0000 1163.9702 743.2795 718.0372 3.0167 0.6668
+0 2004 191 19.50 0.375 12.3933 9.8372 0.0000 0.0000 929.0581 697.7341 667.3793 2.6300 0.6660
+0 2004 192 4.50 0.625 13.6343 11.3753 43.9585 0.0000 899.8024 677.0346 829.1406 3.4790 0.6245
+0 2004 192 19.50 0.375 11.6528 9.7944 0.0000 0.0000 796.8081 634.5576 727.8011 3.8156 0.6158
+0 2004 193 4.50 0.625 17.2007 11.8930 53.7112 0.0000 1586.0293 1003.9836 557.0353 2.7464 0.5933
+0 2004 193 19.50 0.375 14.7894 10.4478 0.0000 0.0000 1228.2740 803.8926 614.9336 3.4411 0.5783
+0 2004 194 4.50 0.625 18.5777 12.2507 54.7235 0.0000 1707.9811 982.9199 610.7286 2.9763 0.5492
+0 2004 194 19.50 0.375 14.8033 10.9833 0.0000 0.0000 1023.3841 643.7217 821.5457 2.7144 0.5600
+0 2004 195 4.50 0.625 19.2520 11.7290 40.5224 0.0000 1741.9298 866.2812 669.3601 2.6403 0.5285
+0 2004 195 19.50 0.375 15.8239 10.9378 0.0000 0.0000 1266.2526 763.8550 695.9334 3.8433 0.5203
+0 2004 196 4.50 0.625 15.9980 13.1620 39.6080 0.5080 940.2838 629.5075 1051.5426 2.8458 0.5079
+0 2004 196 19.50 0.375 13.7172 11.1744 0.0000 0.0000 899.2183 653.4267 828.2579 3.6467 0.5194
+0 2004 197 4.50 0.625 15.4897 12.3547 37.1040 0.7620 898.0692 560.7891 1036.0018 2.4012 0.5095
+0 2004 197 19.50 0.396 9.4279 11.2642 0.0000 0.2540 27.9216 185.6929 1303.3440 1.3193 0.5194
+0 2004 198 5.00 0.604 9.7014 11.1283 14.0930 34.2900 16.7898 138.0401 1338.6472 1.9403 0.7121
+0 2004 198 19.50 0.396 8.5447 9.9374 0.0000 0.0000 107.5097 220.8156 1153.4021 1.8899 0.9471
+0 2004 199 5.00 0.604 10.9672 10.6710 23.4730 7.6180 154.0185 129.5464 1309.3857 2.2691 0.9299
+0 2004 199 19.50 0.396 9.9900 9.5037 0.0000 0.0000 380.4525 341.9367 997.3674 3.3716 0.9382
+0 2004 200 5.00 0.604 14.4355 11.6217 41.1137 0.2540 670.4191 397.7329 1138.3544 2.2126 0.8944
+0 2004 200 19.50 0.396 13.1637 10.7642 0.0000 0.0000 623.8215 398.0211 1046.7833 4.2374 0.8988
+0 2004 201 5.00 0.604 14.2362 11.8917 32.1009 9.1440 619.7220 381.4796 1168.6005 3.4212 0.9247
+0 2004 201 19.50 0.396 12.4647 10.7742 0.0000 0.0000 555.1497 398.8069 1046.8121 4.5326 0.9619
+0 2004 202 5.00 0.604 14.6648 11.6507 28.6488 0.0000 771.0573 460.5567 1066.0535 2.7710 0.9111
+0 2004 202 19.50 0.396 12.9958 11.0879 0.0000 0.0000 778.8788 598.1342 875.1298 4.8605 0.9088
+0 2004 203 5.00 0.604 14.9814 12.6103 42.8488 0.0000 1015.0333 769.5276 854.1884 2.7176 0.8612
+0 2004 203 19.50 0.396 11.8258 10.8584 0.0000 0.0000 667.8421 580.8344 872.8961 3.7795 0.8567
+0 2004 204 5.00 0.583 9.8854 10.5443 18.4275 10.6100 294.6660 330.9831 1094.6329 2.2630 0.8986
+0 2004 204 19.00 0.417 6.5540 9.7310 0.0000 7.3720 -10.2218 230.3657 1125.6909 1.1626 1.0000
+0 2004 205 5.00 0.583 6.6664 9.6129 9.2814 12.4380 -14.6650 207.8398 1138.4069 2.1035 1.0000
+0 2004 205 19.00 0.417 4.3930 8.5795 0.0000 2.5380 -15.0986 272.3560 991.7464 1.2036 1.0000
+0 2004 206 5.00 0.583 5.0179 9.1936 22.3539 2.0360 -16.6508 281.0159 1032.6042 2.0365 1.0000
+0 2004 206 19.00 0.417 4.1765 7.8825 0.0000 0.0000 69.2138 315.0143 897.5105 2.1400 1.0000
+0 2004 207 5.00 0.583 9.6911 9.6200 51.9773 0.0000 406.8833 410.6133 949.7440 2.4632 0.9758
+0 2004 207 19.00 0.417 8.4420 8.7220 0.0000 0.0000 477.5234 501.3889 775.8799 3.3865 0.9645
+0 2004 208 5.00 0.583 12.8882 9.7021 36.4671 0.5080 918.8483 630.0684 730.8941 3.3107 0.9131
+0 2004 208 19.00 0.417 10.0995 9.3615 0.0000 0.0000 588.0070 525.7194 800.2385 5.1635 0.9340
+0 2004 209 5.00 0.583 10.9161 10.4686 41.1641 0.7620 582.7880 543.5334 880.0641 4.2029 0.8804
+0 2004 209 19.00 0.417 8.6400 9.4070 0.0000 0.0000 427.2723 488.6919 841.2629 3.1715 0.8755
+0 2004 210 5.00 0.583 8.9714 9.8961 25.7505 0.2170 310.5111 386.7000 985.4225 2.7234 0.8456
+0 2004 210 19.00 0.417 6.7915 8.4865 0.0000 1.6720 244.7572 370.3437 887.9684 2.8844 0.8923
+0 2004 211 5.00 0.583 13.0925 9.8982 49.8927 0.1150 1067.5872 769.6436 611.8568 4.5613 0.8406
+0 2004 211 19.00 0.417 9.4490 9.0450 0.0000 0.0000 693.9141 659.6183 642.0039 3.0330 0.8381
+0 2004 212 5.00 0.583 12.2900 10.0464 46.5876 1.7820 929.8325 723.3334 667.2766 4.4682 0.7980
+0 2004 212 19.00 0.417 10.8945 9.3570 0.0000 0.0000 806.0142 675.8774 650.0912 3.0765 0.7815
+0 2004 213 5.00 0.583 16.3139 10.7829 46.8969 0.0000 1465.0244 889.0143 566.4813 3.1580 0.7667
+0 2004 213 19.00 0.417 14.7115 9.5890 0.0000 0.0000 1328.1067 840.2054 505.7456 3.0090 0.7381
+0 2004 214 5.00 0.583 17.2675 11.4929 42.5238 0.0000 1538.6217 913.7426 608.6395 3.0261 0.6922
+0 2004 214 19.00 0.417 13.5415 10.3085 0.0000 0.0000 1042.2030 737.6313 668.1672 3.1845 0.7115
+0 2004 215 5.00 0.583 14.6836 11.2064 26.6856 3.3040 995.4841 634.6448 852.0348 3.3779 0.6900
+0 2004 215 19.00 0.417 11.3370 10.2830 0.0000 0.5080 588.6575 493.8138 908.6840 5.2170 0.7281
+0 2004 216 5.00 0.583 14.4025 12.3304 49.8870 0.0000 1032.6427 820.1052 777.5363 4.9182 0.6795
+0 2004 216 19.00 0.417 11.7590 10.1255 0.0000 0.0000 872.7034 721.1954 669.3923 2.2625 0.6668
+0 2004 217 5.00 0.583 14.8332 11.8029 47.1146 0.0000 956.4470 650.5217 902.2125 2.4274 0.6254
+0 2004 217 19.00 0.417 11.6555 10.5850 0.0000 0.0000 527.1584 428.7771 1000.7995 3.0770 0.6098
+0 2004 218 5.00 0.583 13.6414 11.5475 30.7337 8.6300 644.2120 429.3177 1087.9490 3.2425 0.6679
+0 2004 218 19.00 0.417 10.2400 10.6445 0.0000 0.2540 291.5330 326.6996 1107.8741 3.6749 0.7664
+0 2004 219 5.00 0.583 14.6407 12.3239 53.1272 0.7620 996.9217 763.0391 836.7225 5.1125 0.7147
+0 2004 219 19.00 0.417 12.0455 10.6985 0.0000 0.0000 817.2258 693.7889 745.4875 3.6985 0.7164
+0 2004 220 5.00 0.583 14.5554 12.0907 46.3547 0.0000 1105.7648 854.6210 718.5364 4.6161 0.6622
+0 2004 220 19.00 0.417 11.2560 10.1635 0.0000 0.0000 1020.7428 925.4197 469.4976 4.1780 0.6751
+0 2004 221 5.00 0.583 10.4393 11.2104 48.9067 0.0000 331.8997 399.3370 1097.1044 2.4836 0.6347
+0 2004 221 19.00 0.417 9.5960 9.8660 0.0000 0.0000 430.1174 454.3171 915.4124 2.4985 0.6494
+0 2004 222 5.00 0.583 12.3804 11.1504 50.2326 0.0000 527.9222 409.9744 1084.1667 2.3624 0.6043
+0 2004 222 19.00 0.417 9.5285 10.6980 0.0000 0.0000 357.3527 452.5576 987.1452 1.8375 0.6174
+0 2004 223 5.00 0.583 14.1286 11.6289 50.3040 0.1170 1055.8593 799.2110 734.5917 3.5929 0.5796
+0 2004 223 19.00 0.417 5.7430 10.7925 0.0000 1.7930 -5.2141 378.5025 1068.9218 1.5939 0.6111
+0 2004 224 5.00 0.583 7.3557 10.5914 23.7362 0.0000 50.9242 304.3384 1125.6688 1.8111 0.5976
+0 2004 224 19.00 0.417 5.5965 9.1590 0.0000 0.0000 127.9249 385.5350 927.3051 1.2911 0.6036
+0 2004 225 5.00 0.583 8.6407 9.9054 48.8807 0.0000 426.6571 530.8624 851.5985 2.0986 0.5780
+0 2004 225 19.00 0.417 7.0590 8.7125 0.0000 0.0000 502.0530 623.8599 653.0370 3.7070 0.5704
+0 2004 226 5.00 0.583 10.9968 9.8229 49.3277 0.0000 836.8007 743.0539 632.3371 2.3574 0.5564
+0 2004 226 19.00 0.417 7.8185 8.6370 0.0000 0.0000 541.3534 605.4979 665.5511 2.9543 0.5649
+0 2004 227 5.00 0.583 12.0696 9.7382 43.1612 0.0000 788.2064 587.7834 779.7281 2.5885 0.5424
+0 2004 227 19.00 0.417 9.1725 9.0775 0.0000 0.0000 530.1296 524.0295 780.5845 3.0400 0.5374
+0 2004 228 5.00 0.583 13.3807 9.8796 37.1722 0.2540 928.1103 604.5761 770.6277 2.6601 0.5144
+0 2004 228 19.00 0.417 10.6445 8.6500 0.0000 0.0000 766.3439 603.1933 668.1154 3.0785 0.5451
+0 2004 229 5.00 0.583 13.4679 10.2839 50.6506 0.0000 922.1197 630.5176 785.0948 2.6735 0.5059
+0 2004 229 19.00 0.438 10.8529 9.3314 0.0000 0.0000 683.0074 553.8019 770.2666 3.7067 0.4886
+0 2004 230 5.50 0.562 12.4885 10.0963 26.3482 3.5580 754.0773 532.8632 855.2639 3.2248 0.4976
+0 2004 230 19.00 0.438 9.8433 9.4786 0.0000 0.0000 559.9517 528.8166 806.9580 4.1324 0.5181
+0 2004 231 5.50 0.562 8.0589 9.7578 16.2866 12.9540 215.1243 345.0727 1013.9135 2.5023 0.5133
+0 2004 231 19.00 0.438 4.4495 8.3686 0.0000 13.7300 -5.8167 262.6255 985.3264 1.8593 0.6162
+0 2004 232 5.50 0.562 5.5367 8.7930 15.6598 9.1480 -10.3511 222.7211 1058.8376 1.6924 0.7174
+0 2004 232 19.00 0.438 5.4252 7.7810 0.0000 0.2540 131.1208 295.0721 909.6580 2.3131 0.7431
+0 2004 233 5.50 0.562 6.7674 8.3281 22.4589 5.3380 121.1023 236.9456 1010.6648 1.6730 0.7508
+0 2004 233 19.00 0.438 6.7557 7.7586 0.0000 0.7620 225.3841 296.8252 905.6433 2.6542 0.8365
+0 2004 234 5.50 0.542 10.1877 9.2150 30.9588 0.0000 497.2426 416.4333 900.4692 2.8462 0.7885
+0 2004 234 18.50 0.458 8.3727 8.6432 0.0000 0.5080 349.1237 370.9092 899.2647 4.6023 0.8019
+0 2004 235 5.50 0.542 10.4327 9.4504 28.6421 2.0360 536.2627 451.7174 886.1656 3.2067 0.7730
+0 2004 235 18.50 0.458 9.9005 8.4914 0.0000 0.0000 629.6487 516.1931 741.0707 4.3105 0.7845
+0 2004 236 5.50 0.542 10.7427 8.7815 19.3226 0.0000 840.1302 676.9858 603.1302 4.9877 0.7475
+0 2004 236 18.50 0.458 9.4895 8.0605 0.0000 0.0000 878.8672 766.3870 458.5986 3.3200 0.7234
+0 2004 237 5.50 0.542 10.4196 8.8412 29.7123 0.2540 837.2098 706.9849 581.1528 2.8792 0.7074
+0 2004 237 18.50 0.458 7.4127 8.0286 0.0000 0.0000 574.8234 620.0925 602.5193 3.6659 0.7062
+0 2004 238 5.50 0.542 11.3769 8.7746 38.5176 0.0000 936.9082 700.7106 583.6459 6.5331 0.6725
+0 2004 238 18.50 0.458 9.8136 8.1864 0.0000 0.0000 772.0686 624.8752 609.6413 3.3782 0.6809
+0 2004 239 5.50 0.542 8.5258 8.5738 24.5117 0.0000 336.2994 339.9808 926.5104 2.2392 0.6417
+0 2004 239 18.50 0.458 4.2709 8.1832 0.0000 2.7900 70.1831 331.7366 902.9538 4.1877 0.6834
+0 2004 240 5.50 0.542 2.7469 7.1069 12.1309 2.7980 31.1481 304.7978 850.3181 2.8061 0.6902
+0 2004 240 18.50 0.458 1.7161 5.7723 0.0000 0.0000 138.3608 374.8251 689.1641 2.0049 0.7957
+0 2004 241 5.50 0.542 8.0492 7.2465 52.2865 0.0000 772.1154 704.3992 468.9749 2.8862 0.7430
+0 2004 241 18.50 0.458 6.9495 5.9564 0.0000 0.0000 689.9141 623.3584 454.0605 2.1309 0.7559
+0 2004 242 5.50 0.542 11.8531 8.0146 50.9263 0.0000 1042.7828 721.7990 510.8059 2.6735 0.7043
+0 2004 242 18.50 0.458 9.0459 6.9000 0.0000 0.0000 749.0431 590.4891 552.1036 3.2627 0.7145
+0 2004 243 5.50 0.542 13.4165 8.4112 42.3742 0.0000 1218.8408 776.4565 485.1188 2.6353 0.6589
+0 2004 243 18.50 0.458 10.2036 7.7186 0.0000 0.0000 897.0637 702.1442 498.8687 3.5627 0.6694
+0 2004 244 5.50 0.542 12.9219 8.4850 36.7914 0.0000 1084.7733 698.3967 565.4261 2.4221 0.6391
+0 2004 244 18.50 0.458 9.0105 7.1391 0.0000 0.0000 677.2396 538.4443 620.9030 3.0223 0.6627
+0 2004 245 5.50 0.542 13.4265 8.8385 44.1338 0.0000 1043.7465 636.4285 659.4079 2.3887 0.6169
+0 2004 245 18.50 0.458 11.7800 7.7382 0.0000 0.0000 1066.9946 732.3801 469.7434 4.2091 0.6273
+0 2004 246 5.50 0.542 15.4238 9.3727 42.2369 0.0000 1430.3933 843.9902 488.4916 3.9547 0.5642
+0 2004 246 18.50 0.458 11.7495 8.4636 0.0000 0.0000 1045.6917 766.7991 489.2025 2.6991 0.5681
+0 2004 247 5.50 0.542 13.9696 9.2162 32.6938 0.2540 1091.3800 647.6678 670.4412 3.3304 0.5430
+0 2004 247 18.50 0.458 10.0368 8.6805 0.0000 0.0000 698.5809 581.0018 691.1783 2.6918 0.5253
+0 2004 248 5.50 0.542 6.1273 7.7977 12.0479 1.7780 48.8388 165.3361 1038.9486 2.3097 0.5159
+0 2004 248 18.50 0.458 2.0801 5.9636 0.0000 5.8460 152.4636 378.9586 698.1932 5.7023 0.5279
+0 2004 249 5.50 0.542 2.4710 5.2192 29.3772 2.5400 171.8205 328.6241 700.3048 7.3938 0.5439
+0 2004 249 18.50 0.458 5.1932 4.8159 0.0000 0.0000 459.2346 435.9563 566.7623 3.9168 0.5584
+0 2004 250 5.50 0.542 8.8800 6.5023 48.7142 0.0000 745.9888 571.7024 553.3677 2.8737 0.5278
+0 2004 250 18.50 0.458 6.9973 5.6577 0.0000 0.0000 462.7942 372.7929 684.9432 2.6073 0.5574
+0 2004 251 5.50 0.542 11.7527 6.9219 47.1832 0.0000 1024.3778 637.0534 519.3409 3.4492 0.5213
+0 2004 251 18.50 0.458 9.1705 6.3759 0.0000 0.0000 827.9371 621.3856 483.9344 3.9964 0.5472
+0 2004 252 5.50 0.542 13.4296 7.6954 42.6349 0.0000 1228.7441 733.8942 476.0545 3.6927 0.5196
+0 2004 252 18.50 0.458 10.5723 7.0405 0.0000 0.0000 976.5693 699.2442 451.6768 5.0414 0.5447
+0 2004 253 5.50 0.542 13.6485 8.2135 43.1544 0.0000 1213.7990 731.3691 512.4448 4.2535 0.5149
+0 2004 253 18.50 0.458 11.0873 7.5927 0.0000 0.0000 934.9346 652.0538 538.8420 3.6173 0.5460
+0 2004 254 5.50 0.542 9.8904 7.9835 18.8072 5.5900 612.9814 458.2181 761.3784 4.7488 0.5309
+0 2004 254 18.50 0.479 7.9583 6.5452 0.0000 1.7800 472.0134 367.5403 748.0631 4.5591 0.5684
+0 2004 255 6.00 0.521 14.2584 8.3224 46.8419 0.0000 1276.2581 732.5091 516.1891 4.5280 0.5178
+0 2004 255 18.50 0.479 11.7143 7.0622 0.0000 0.0000 963.0898 584.8326 568.1012 3.1696 0.5510
+0 2004 256 6.00 0.521 14.5320 8.3980 30.3268 0.0000 1177.7781 610.6700 643.6244 3.4038 0.5076
+0 2004 256 18.50 0.479 12.0961 8.3835 0.0000 0.0000 906.2825 588.4586 660.3477 4.6809 0.5060
+0 2004 257 6.00 0.521 13.7396 9.3720 36.9169 0.0000 1078.4872 667.3913 661.4136 4.0604 0.4955
+0 2004 257 18.50 0.479 7.3717 8.3683 0.0000 0.0000 216.2038 284.7721 963.6747 1.5936 0.5177
+0 2004 258 6.00 0.500 11.5646 8.6846 37.4737 0.2540 987.9100 737.5532 536.5712 5.3629 0.4860
+0 2004 258 18.00 0.500 2.0067 6.3721 0.0000 0.0000 390.4041 643.6379 463.6808 9.3854 0.5266
+0 2004 259 6.00 0.500 7.5108 5.8771 44.1352 0.0000 946.3716 815.8702 256.1577 5.3504 0.4917
+0 2004 259 18.00 0.500 6.8992 5.0354 0.0000 0.0000 789.3275 665.0952 351.1754 2.0450 0.4961
+0 2004 260 6.00 0.500 12.6850 6.8767 45.4803 0.0000 1254.2709 764.5475 377.5836 6.1008 0.4534
+0 2004 260 18.00 0.500 10.5225 6.2429 0.0000 0.0000 1040.5131 710.8239 384.4756 3.3133 0.4610
+0 2004 261 6.00 0.500 14.6050 7.9617 39.7755 0.0000 1375.3796 767.3745 453.3461 3.9422 0.4429
+0 2004 261 18.00 0.500 12.6054 7.2946 0.0000 0.0000 1279.4669 832.3206 336.1924 4.6383 0.4415
+0 2004 262 6.00 0.500 15.1554 8.6637 30.5447 0.0000 1409.9117 796.1083 478.2902 2.8817 0.4314
+0 2004 262 18.00 0.500 11.5742 8.5092 0.0000 0.0000 863.6114 600.9158 657.4619 2.8532 0.4037
+0 2004 263 6.00 0.500 11.3379 9.2963 25.6121 0.0000 692.3434 499.4663 822.6641 4.2258 0.4115
+0 2004 263 18.00 0.500 8.8283 7.9554 0.0000 0.0000 518.6999 451.7435 765.0908 2.7158 0.4273
+0 2004 264 6.00 0.500 10.3842 8.4775 32.5510 0.7620 548.7108 391.0539 866.9254 4.2354 0.4170
+0 2004 264 18.00 0.500 1.4564 6.8183 0.0000 10.6720 18.6285 332.7841 803.0434 1.9073 0.4459
+0 2004 265 6.00 0.500 -2.1354 4.8038 5.2242 11.4380 3.0043 353.3486 647.9067 1.4769 0.4410
+0 2004 265 18.00 0.500 -3.9083 4.1067 0.0000 4.0680 12.7856 387.9786 570.8891 1.2407 0.4863
+0 2004 266 6.00 0.500 -1.2249 3.6729 39.8942 1.0160 189.6549 432.9625 500.4426 3.5323 0.5039
+0 2004 266 18.00 0.500 -2.8204 3.2825 0.0000 0.5080 136.6677 423.8281 487.2830 7.9113 0.5195
+0 2004 267 6.00 0.500 0.7722 2.9754 19.3359 0.0000 165.7061 274.4119 619.4698 6.1446 0.5355
+0 2004 267 18.00 0.500 3.4229 2.9017 0.0000 0.0000 358.3912 328.9502 560.8497 5.3433 0.5709
+0 2004 268 6.00 0.500 5.2562 2.9075 28.8967 0.0000 426.2172 285.7226 604.4437 2.8080 0.6516
+0 2004 268 18.00 0.500 2.2850 2.8046 0.0000 0.0000 198.8963 226.5262 657.9813 2.6412 0.7007
+0 2004 269 6.00 0.500 5.9125 3.3046 28.2020 0.5080 317.0210 154.7475 758.6415 2.3304 0.6839
+0 2004 269 18.00 0.500 4.2450 3.4954 0.0000 0.0000 291.0040 245.7725 677.8057 3.0675 0.7262
+0 2004 270 6.00 0.500 8.2804 4.2942 33.6570 0.0000 662.8242 390.7869 581.3179 3.4200 0.7197
+0 2004 270 18.00 0.500 4.8229 3.9454 0.0000 0.0000 460.7851 406.6472 543.3077 4.1121 0.7484
+0 2004 271 6.00 0.500 5.7812 4.2108 18.0827 1.2740 332.5967 233.4169 733.4247 2.3817 0.7317
+0 2004 271 18.00 0.500 1.3838 4.3783 0.0000 1.0160 10.8206 175.6170 799.8165 1.9329 0.7605
+0 2004 272 6.00 0.500 5.8187 5.0312 25.6680 0.0000 195.1836 140.5223 877.1807 1.6515 0.7427
+0 2004 272 18.00 0.500 5.6825 4.3292 0.0000 0.0000 482.2317 397.2170 575.4945 4.4587 0.7511
+0 2004 273 6.00 0.500 7.4575 5.4296 23.9052 6.3520 543.1945 400.8315 642.4968 2.7800 0.7298
+0 2004 273 18.00 0.500 3.0979 4.5267 0.0000 5.5800 105.0378 188.5572 796.0737 2.7268 0.8860
+0 2004 274 6.00 0.500 1.8008 4.5283 15.8948 0.2540 130.9557 284.3174 700.7700 3.8842 0.8676
+0 2004 274 18.00 0.500 0.5688 3.4987 0.0000 3.3020 61.7119 214.7384 709.0010 2.9797 0.8550
+0 2004 275 6.00 0.500 -0.7829 3.0687 10.1323 4.0640 -1.2027 189.7816 709.3266 1.5121 0.8800
+0 2004 275 18.00 0.500 2.2729 2.7738 0.0000 0.0000 204.9485 231.3499 651.4800 3.7088 0.9248
+0 2004 276 6.00 0.500 4.9300 3.1679 28.9056 0.0000 392.4312 285.2047 620.0569 3.3917 0.9053
+0 2004 276 18.00 0.500 4.0929 3.3133 0.0000 0.0000 355.3441 309.9169 602.9846 5.8175 0.8840
+0 2004 277 6.00 0.500 4.6863 3.7158 15.2265 0.5080 361.0242 300.1277 636.4128 2.4018 0.8807
+0 2004 277 18.00 0.521 1.9984 3.1124 0.0000 0.0000 192.6781 252.5947 649.1987 2.3804 0.8866
+0 2004 278 6.50 0.479 4.7330 3.3500 21.5295 0.2540 306.5878 224.0902 691.8730 1.7890 0.8633
+0 2004 278 18.00 0.521 3.6972 2.6772 0.0000 0.0000 420.2417 362.2238 515.4653 3.0388 0.8694
+0 2004 279 6.50 0.479 5.9578 3.3452 21.0545 0.0000 457.7195 294.8875 621.5358 2.2045 0.8461
+0 2004 279 18.00 0.521 2.0040 3.0372 0.0000 14.4720 154.3232 207.2535 690.3767 2.8766 0.8667
+0 2004 280 6.50 0.479 1.3840 2.0191 10.0162 6.7800 36.0290 67.8542 774.5379 2.4243 0.9845
+0 2004 280 18.00 0.521 2.2612 1.9656 0.0000 0.0000 256.8877 241.1375 598.4341 5.5008 0.9883
+0 2004 281 6.50 0.458 6.0182 2.3936 35.8834 0.0000 606.1069 387.3094 475.2802 6.8800 0.9397
+0 2004 281 17.50 0.542 4.2773 2.7731 0.0000 0.0000 497.2652 404.2529 478.6031 3.1896 0.9514
+0 2004 282 6.50 0.458 7.9914 3.0136 34.2723 0.0000 798.6609 469.5887 427.5106 2.5223 0.9085
+0 2004 282 17.50 0.542 6.2215 2.6650 0.0000 0.0000 742.6159 526.4273 350.7763 3.2681 0.8993
+0 2004 283 6.50 0.458 10.3182 3.1923 35.8714 0.0000 1106.0513 603.2768 304.3788 3.3437 0.8687
+0 2004 283 17.50 0.542 6.4592 2.8100 0.0000 0.0000 762.3842 537.1818 348.0185 4.0231 0.8624
+0 2004 284 6.50 0.458 7.9968 3.2645 33.6059 0.0000 776.5813 464.7205 446.5592 3.1060 0.8534
+0 2004 284 17.50 0.542 1.7959 2.7442 0.0000 0.0000 214.8811 264.0599 617.4451 1.9375 0.8584
+0 2004 285 6.50 0.458 1.2328 2.7255 11.0905 0.0000 67.4224 145.2475 735.3557 2.0409 0.8493
+0 2004 285 17.50 0.542 0.9089 2.4488 0.0000 0.0000 201.8607 278.9802 586.4333 2.9146 0.8538
+0 2004 286 6.50 0.458 6.9491 2.6350 33.3335 0.0000 816.6885 545.9293 330.0365 4.4930 0.8230
+0 2004 286 17.50 0.542 -0.8852 2.6538 0.0000 20.8280 90.2738 250.9671 625.4282 2.3227 0.8424
+0 2004 287 6.50 0.458 -2.9507 2.1173 26.4877 0.5080 117.4756 344.9212 502.5982 3.4500 0.8557
+0 2004 287 17.50 0.542 -2.1588 1.9319 0.0000 0.0000 322.8567 509.5089 328.3129 9.6296 0.8532
+0 2004 288 6.50 0.458 2.7880 1.8214 33.3089 0.0000 368.1596 309.8133 522.2693 8.1105 0.8441
+0 2004 288 17.50 0.542 3.2158 1.7881 0.0000 0.0000 407.6188 329.4934 500.8712 11.4538 0.8548
+0 2004 289 6.50 0.458 4.3905 1.7550 24.6060 0.0000 528.0015 377.5117 451.1486 7.2136 0.8650
+0 2004 289 17.50 0.542 1.5577 1.6846 0.0000 0.0000 370.0165 359.5363 465.5105 6.9100 0.9106
+0 2004 290 6.50 0.458 8.0945 1.6459 33.4187 0.0000 886.0726 473.6417 349.4233 10.1177 0.9277
+0 2004 290 17.50 0.542 6.5700 1.7204 0.0000 0.0000 811.6945 520.6325 306.2495 10.1542 0.9795
+0 2004 291 6.50 0.458 6.7755 2.1750 23.8008 0.0000 787.8408 503.7450 346.9732 4.7550 0.9652
+0 2004 291 17.50 0.542 1.0126 2.0338 0.0000 0.0000 306.0560 357.0581 486.1878 5.3400 1.0000
+0 2004 292 6.50 0.458 1.3878 1.5800 26.9988 5.0840 241.9256 250.9852 568.7196 9.9318 0.9547
+0 2004 292 17.50 0.542 -0.8255 1.4592 0.0000 0.0000 171.5553 278.7997 534.7806 2.5958 0.9444
+0 2004 293 6.50 0.458 2.8417 1.3536 20.2011 0.0000 345.7218 262.7933 545.4645 2.8636 0.9294
+0 2004 293 17.50 0.542 1.4779 1.3331 0.0000 0.0000 261.9069 254.2363 552.9904 2.1088 0.9560
+0 2004 294 6.50 0.458 6.3356 1.5159 27.7889 0.0000 587.7894 295.6811 521.0067 2.4274 0.9447
+0 2004 294 17.50 0.542 5.5627 1.8700 0.0000 0.0000 653.7736 438.5370 396.0934 6.4904 0.9720
+0 2004 295 6.50 0.458 7.5682 2.4914 28.5198 0.0000 775.1346 451.1153 416.9011 3.3879 0.9648
+0 2004 295 17.50 0.542 4.2581 2.1162 0.0000 0.0000 552.9078 431.1370 416.5533 3.4812 0.9712
+0 2004 296 6.50 0.458 -3.0082 1.5427 20.5359 2.7940 169.9676 367.4864 450.3331 13.0027 0.9526
+0 2004 296 17.50 0.542 -4.7554 1.2469 0.0000 0.0000 185.1712 433.0357 369.8955 14.2381 0.9324
+0 2004 297 6.50 0.458 2.6488 1.0089 29.6122 0.0000 469.1547 375.2165 415.9162 5.7155 0.9041
+0 2004 297 17.50 0.542 1.0978 0.9623 0.0000 0.0000 458.8557 448.7806 340.0647 5.0800 0.8903
+0 2004 298 6.50 0.458 -0.3093 0.8200 11.5759 2.0360 220.9055 272.8641 509.0350 3.6541 0.8874
+0 2004 298 17.50 0.542 -2.3165 0.8400 0.0000 0.2540 69.9939 208.8387 574.0365 1.9558 0.8825
+0 2004 299 6.50 0.458 1.4685 0.7991 17.4118 0.7620 255.5401 219.0612 561.8271 2.0353 0.8886
+0 2004 299 17.50 0.542 -0.9130 0.8267 0.0000 0.0000 164.6287 243.9350 538.2913 2.8569 0.8841
+0 2004 300 6.50 0.458 3.0057 0.7940 24.8885 0.0000 368.3799 248.1407 532.4984 2.1372 0.8774
+0 2004 300 17.50 0.562 2.3048 0.7961 0.0000 0.0000 399.2414 320.4778 460.2665 2.6778 0.8695
+0 2004 301 7.00 0.438 6.6752 0.7500 23.0068 0.0000 533.2054 181.3376 597.1680 2.4167 0.8711
+0 2004 301 17.50 0.562 4.9748 0.7924 0.0000 0.0000 483.9405 251.3436 529.2187 2.7059 0.8848
+0 2004 302 7.00 0.438 8.3262 0.8055 28.1418 0.0000 838.4937 370.5709 410.6272 3.7933 0.8991
+0 2004 302 17.50 0.562 -2.3191 0.8390 0.0000 0.7620 248.2077 369.6407 413.1840 6.6770 0.9518
+0 2004 303 7.00 0.438 -6.3148 0.7756 16.5674 1.7780 126.0304 402.7413 377.0081 15.5262 0.9335
+0 2004 303 17.50 0.562 -5.4459 0.7125 0.0000 0.0000 153.8563 400.1823 376.5149 13.5041 0.9147
+0 2004 304 7.00 0.438 -3.8024 0.6904 27.9805 0.0000 250.9457 438.1026 337.5291 5.9471 0.8758
+0 2004 304 17.50 0.562 -5.2570 0.6626 0.0000 0.0000 208.4671 446.1589 328.1329 6.5589 0.8533
+0 2004 305 7.00 0.417 -3.1830 0.6327 18.9930 4.0680 216.4369 375.8672 396.9853 2.2855 0.8349
+0 2004 305 17.00 0.583 -8.8886 0.6457 0.0000 3.3020 50.3477 391.3240 382.1577 2.2537 0.8198
+0 2004 306 7.00 0.417 -10.5005 0.6359 12.4417 3.0480 30.1692 413.3613 359.6447 2.2390 0.8255
+0 2004 306 17.00 0.583 -10.4939 0.6288 0.0000 0.0000 198.8824 574.0901 198.5761 3.3719 0.8152
+0 2004 307 7.00 0.417 0.2674 0.5183 27.8919 0.0000 463.0673 471.5214 295.8622 1.5157 0.8230
+0 2004 307 17.00 0.583 0.5172 0.5269 0.0000 0.0000 507.9328 507.7812 260.0211 3.0521 0.8114
+0 2004 308 7.00 0.417 4.5210 0.4005 26.1933 0.0000 681.0485 456.8059 304.9930 4.8330 0.8072
+0 2004 308 17.00 0.583 -2.6309 0.5687 0.0000 0.0000 227.8398 355.5716 414.2185 2.0863 0.7983
+0 2004 309 7.00 0.417 3.7283 0.4751 28.4924 0.0000 668.4185 483.8915 281.4381 2.5930 0.8000
+0 2004 309 17.00 0.583 2.5771 0.5961 0.0000 0.0000 679.5112 578.1377 192.9606 3.6486 0.7916
+0 2004 310 7.00 0.417 5.5165 0.5911 24.9138 0.0000 884.6167 607.8604 162.9997 10.0055 0.7960
+0 2004 310 17.00 0.583 2.8089 0.6086 0.0000 0.0000 712.4094 598.3558 173.3413 3.2796 0.7892
+0 2004 311 7.00 0.417 5.3343 0.5696 23.5216 0.0000 889.8257 614.4582 155.3759 2.7180 0.7894
+0 2004 311 17.00 0.583 5.0086 0.6025 0.0000 0.0000 843.6384 600.8615 170.5462 5.4161 0.7878
+0 2004 312 7.00 0.417 8.5630 0.5950 24.6557 0.0000 1083.0258 589.3801 181.6642 3.4110 0.7809
+0 2004 312 17.00 0.583 5.7646 0.6074 0.0000 0.0000 829.6563 538.2811 233.3578 2.8279 0.8008
+0 2004 313 7.00 0.417 8.1920 0.5925 18.6325 0.0000 933.9886 468.6160 302.3109 1.6896 0.7921
+0 2004 313 17.00 0.583 5.8036 0.6068 0.0000 0.0000 611.8266 317.6901 453.9231 2.5714 0.8018
+0 2004 314 7.00 0.417 5.1088 0.6079 15.0535 0.0000 577.2114 321.4500 450.2149 3.1150 0.8162
+0 2004 314 17.00 0.583 -1.0439 0.6157 0.0000 2.2860 158.0346 233.0533 538.9863 5.1743 0.8363
+0 2004 315 7.00 0.417 -3.4885 0.6224 4.4575 9.9060 33.6382 206.6745 565.6882 2.8125 0.8430
+0 2004 315 17.00 0.583 -7.6825 0.6314 0.0000 1.0160 23.7729 333.2186 439.5717 1.3333 0.8413
+0 2004 316 7.00 0.417 -1.4926 0.6323 17.7598 0.2540 103.5036 196.4530 576.3826 1.2126 0.8532
+0 2004 316 17.00 0.583 -2.9707 0.6394 0.0000 1.2740 66.1775 220.8819 552.2927 1.4748 0.8582
+0 2004 317 7.00 0.417 -3.4440 0.6438 4.5910 1.0160 19.7971 192.9425 580.4431 1.8162 0.8602
+0 2004 317 17.00 0.583 -5.1675 0.6496 0.0000 0.0000 28.4018 262.3195 511.3458 1.5436 0.8657
+0 2004 318 7.00 0.417 -4.2470 0.6502 14.5988 0.0000 43.0614 245.1811 528.5121 1.5746 0.8696
+0 2004 318 17.00 0.583 -5.8454 0.6534 0.0000 0.0000 25.5473 281.2991 492.5517 1.4361 0.8663
+0 2004 319 7.00 0.417 -1.4475 0.6458 24.6884 0.0000 210.4152 302.7632 470.7209 3.2553 0.8626
+0 2004 319 17.00 0.583 -2.1186 0.6496 0.0000 0.0000 259.2770 379.5949 394.0739 2.8689 0.8650
+0 2004 320 7.00 0.417 3.6218 0.6320 24.0267 0.0000 670.7141 508.6600 264.1612 3.5438 0.8585
+0 2004 320 17.00 0.583 1.8118 0.6337 0.0000 0.0000 498.1421 439.3133 333.5885 3.1757 0.8559
+0 2004 321 7.00 0.417 5.4240 0.6256 19.4925 0.0000 746.1461 473.0648 299.4492 1.8023 0.8506
+0 2004 321 17.00 0.583 1.5453 0.6344 0.0000 0.0000 502.0052 455.6415 317.2928 2.1607 0.8511
+0 2004 322 7.00 0.417 2.1903 0.6261 12.1086 0.0000 469.6508 388.8714 383.6666 1.5961 0.8470
+0 2004 322 17.00 0.583 -2.7552 0.6374 0.0000 0.0000 71.0441 217.1506 555.9296 1.0481 0.8485
+0 2004 323 7.00 0.417 -1.0419 0.6379 20.1909 0.0000 113.7736 188.2011 584.9034 1.6989 0.8519
+0 2004 323 17.00 0.583 -0.9764 0.6418 0.0000 0.0000 321.7464 393.2560 380.0369 4.6125 0.8540
+0 2004 324 7.00 0.417 -1.3023 0.6281 21.3915 0.0000 365.7303 448.8694 323.7670 3.7975 0.8498
+0 2004 324 17.00 0.583 -8.3611 0.5771 0.0000 2.0320 24.6612 349.2316 420.9626 1.6751 0.8444
+0 2004 325 7.00 0.417 -10.8205 0.3566 3.1758 1.2700 29.9350 407.4943 352.2173 1.2495 0.8257
+0 2004 325 17.00 0.583 -10.8918 0.2657 0.0000 3.0480 28.6767 403.1292 352.3031 0.7799 0.8143
+0 2004 326 7.00 0.417 -2.9148 0.2319 19.9874 0.2540 205.4654 333.3608 420.4893 2.3390 0.7951
+0 2004 326 17.00 0.583 -4.4539 0.3182 0.0000 0.0000 142.7910 336.7678 421.1324 8.3429 0.7787
+0 2004 327 7.00 0.417 -1.9142 0.3363 21.8443 0.0000 288.6771 387.0500 371.7052 7.5495 0.7755
+0 2004 327 17.00 0.583 -5.9093 0.3299 0.0000 0.2540 103.4345 344.0813 414.3732 2.3868 0.7767
+0 2004 328 7.00 0.417 -5.8420 0.3088 11.8940 0.2540 98.9561 336.6630 420.7956 1.9810 0.7715
+0 2004 328 17.00 0.604 -9.3331 0.3466 0.0000 0.2540 76.3111 416.6049 342.6334 4.6600 0.7720
+0 2004 329 7.50 0.396 -8.3400 0.2309 11.8655 0.2540 167.4971 476.8299 276.9696 13.3553 0.7689
+0 2004 329 17.00 0.604 -5.9321 0.2477 0.0000 0.0000 92.0783 330.5929 423.9938 12.9438 0.7547
+0 2004 330 7.50 0.396 -2.3421 0.3109 19.4733 0.0000 245.8560 359.3371 398.2216 6.0021 0.7485
+0 2004 330 17.00 0.604 -3.2393 0.3734 0.0000 2.2860 129.4385 281.2329 479.2696 4.5355 0.7476
+0 2004 331 7.50 0.396 -6.6968 0.4180 14.5491 1.0160 92.9876 361.8940 400.7199 5.4890 0.7496
+0 2004 331 17.00 0.604 -11.0669 0.3784 0.0000 0.0000 125.6092 509.4037 251.3382 7.3790 0.7573
+0 2004 332 7.50 0.396 -9.6358 0.2586 10.7200 3.0480 105.4483 448.9883 306.1115 4.0807 0.7559
+0 2004 332 17.00 0.604 -11.5390 0.2663 0.0000 11.4300 31.5733 420.6419 334.8181 0.7086 0.7517
+0 2004 333 7.50 0.396 -14.8347 0.2781 7.5898 2.2860 34.7148 487.8983 268.1130 2.6572 0.7411
+0 2004 333 17.00 0.604 -19.0776 0.2761 0.0000 1.2700 36.4681 555.2621 200.6562 2.0253 0.7309
+0 2004 334 7.50 0.396 -17.0932 0.2311 13.2575 2.5360 37.3495 524.8586 228.9484 1.4414 0.7393
+0 2004 334 17.00 0.604 -13.7779 0.1926 0.0000 0.2540 164.5899 592.4608 159.5451 6.1779 0.7256
+0 2004 335 7.50 0.375 -6.9656 0.1789 21.5065 0.0000 293.7623 560.4300 190.9376 8.8228 0.7208
+0 2004 335 16.50 0.625 -9.4523 0.1894 0.0000 0.0000 252.9328 588.9934 162.8653 6.9440 0.7250
+0 2004 336 7.50 0.375 -10.4306 0.1864 20.3133 0.0000 221.8046 581.8237 169.8943 2.8006 0.7220
+0 2004 336 16.50 0.625 -12.6310 0.1724 0.0000 0.0000 187.6035 595.4666 155.5982 6.3967 0.7210
+0 2004 337 7.50 0.375 -8.2633 0.1631 21.0875 0.0000 300.9467 604.0995 146.5293 9.1328 0.7241
+0 2004 337 16.50 0.625 -8.5103 0.1568 0.0000 0.0000 312.1769 622.8943 127.4414 14.7787 0.7207
+0 2004 338 7.50 0.375 -2.2827 0.1463 20.9320 0.0000 415.5138 513.4103 236.4391 9.0906 0.7149
+0 2004 338 16.50 0.625 -1.6764 0.1755 0.0000 0.0000 450.4121 530.9428 220.2675 5.1150 0.7155
+0 2004 339 7.50 0.375 0.7496 0.2157 20.6587 0.0000 595.7770 567.9457 185.1436 4.0596 0.7083
+0 2004 339 16.50 0.625 -3.6070 0.2592 0.0000 0.0000 317.6810 477.2195 277.9055 2.3992 0.7160
+0 2004 340 7.50 0.375 -6.1567 0.2353 13.3006 0.0000 208.8937 454.9955 299.0118 8.1028 0.7156
+0 2004 340 16.50 0.625 -8.6013 0.1962 0.0000 0.0000 198.9804 513.3016 238.8759 8.8757 0.7121
+0 2004 341 7.50 0.375 -8.5022 0.1425 16.2519 0.0000 216.5672 526.3316 223.3392 6.6317 0.7111
+0 2004 341 16.50 0.625 -9.1357 0.1384 0.0000 0.0000 207.8731 532.9877 216.4938 8.2413 0.7068
+0 2004 342 7.50 0.375 -4.5522 0.1303 20.4962 0.0000 375.4881 562.9673 186.1345 12.8678 0.7113
+0 2004 342 16.50 0.625 -7.0710 0.1265 0.0000 0.0000 212.7048 479.0806 269.8469 3.4913 0.7040
+0 2004 343 7.50 0.375 -6.0144 0.1191 16.5719 0.0000 242.4423 478.2491 270.3330 8.7617 0.7019
+0 2004 343 16.50 0.625 -5.3693 0.1143 0.0000 5.8420 110.3836 325.5656 422.7943 7.0283 0.7000
+0 2004 344 7.50 0.375 -4.7794 0.1476 4.9959 6.0960 83.0853 280.4313 469.4783 11.0972 0.7031
+0 2004 344 16.50 0.625 -5.2380 0.2455 0.0000 1.2700 115.4953 332.5898 421.8969 13.4393 0.7010
+0 2004 345 7.50 0.375 -2.5394 0.3047 14.6266 0.2540 130.4167 252.3102 504.9523 9.3600 0.7059
+0 2004 345 16.50 0.625 -0.5383 0.3493 0.0000 0.2540 234.8246 274.0640 485.3002 11.9157 0.7052
+0 2004 346 7.50 0.375 2.7856 0.3898 17.1661 0.0000 561.6828 437.9438 323.3338 8.4522 0.7073
+0 2004 346 16.50 0.625 1.5147 0.4262 0.0000 0.5080 476.4628 422.4239 340.5801 13.5273 0.7085
+0 2004 347 7.50 0.375 1.4956 0.4496 15.5610 0.0000 417.9227 366.3457 397.7672 7.0872 0.7102
+0 2004 347 16.50 0.625 -0.8025 0.4731 0.0000 0.0000 264.0330 318.3098 446.9193 4.4027 0.7098
+0 2004 348 7.50 0.375 -2.5528 0.4943 11.2913 0.0000 476.4644 607.9565 158.2799 1.6819 0.7026
+0 2004 348 16.50 0.625 -2.5523 0.4879 0.0000 0.0000 393.5401 524.1950 241.7380 2.9407 0.7000
+0 2004 349 7.50 0.375 -0.8492 0.4568 15.0272 0.0000 325.8557 385.0054 379.4506 6.9967 0.7128
+0 2004 349 16.50 0.625 -3.7257 0.4589 0.0000 0.0000 215.6114 388.7157 375.8368 9.1277 0.7140
+0 2004 350 7.50 0.375 -5.3044 0.4478 12.2519 0.0000 238.1468 466.9270 297.1016 5.3611 0.7106
+0 2004 350 16.50 0.625 -8.9117 0.4252 0.0000 1.0160 73.4227 406.5689 356.3877 2.1512 0.7027
+0 2004 351 7.50 0.375 -5.4800 0.3853 19.4928 0.0000 290.4355 517.4753 243.5897 3.6006 0.7043
+0 2004 351 16.50 0.625 -2.4023 0.3393 0.0000 0.0000 520.5510 636.9902 121.9038 7.3607 0.7037
+0 2004 352 7.50 0.375 -0.4405 0.3155 12.4185 0.0000 539.9549 570.1658 187.6065 7.6467 0.7125
+0 2004 352 16.50 0.625 -3.4883 0.3470 0.0000 0.0000 376.8236 536.1959 223.0627 6.3947 0.7072
+0 2004 353 7.50 0.375 -3.6983 0.3477 19.2348 0.0000 378.4519 546.0679 213.2204 6.1150 0.7069
+0 2004 353 16.50 0.625 -3.6710 0.3272 0.0000 0.0000 377.7381 543.7269 214.5982 8.0873 0.7011
+0 2004 354 7.50 0.375 -0.4743 0.3036 17.7975 0.0000 385.6792 419.6345 337.5757 12.4200 0.7092
+0 2004 354 16.50 0.625 -2.2297 0.3300 0.0000 0.0000 444.6901 553.2360 205.2211 10.9377 0.7072
+0 2004 355 7.50 0.375 -3.8144 0.3270 12.5059 0.0000 336.2966 507.7327 250.5813 6.1522 0.7059
+0 2004 355 16.50 0.625 -9.2190 0.3213 0.0000 0.0000 164.8225 499.6771 258.3668 7.1437 0.7011
+0 2004 356 7.50 0.375 -10.9661 0.2587 20.2975 0.0000 230.7655 606.6326 148.4720 6.3378 0.7050
+0 2004 356 16.50 0.625 -16.7850 0.1635 0.0000 1.2700 38.4295 518.5844 232.0646 1.3446 0.6889
+0 2004 357 7.50 0.375 -15.7800 0.1285 8.4094 0.2540 88.1426 551.2496 197.7694 2.4304 0.6832
+0 2004 357 16.50 0.625 -19.7027 0.1237 0.0000 0.5080 65.7685 583.6926 165.1030 2.2003 0.6860
+0 2004 358 7.50 0.375 -23.8706 0.1176 7.1172 0.2540 45.2155 608.7111 139.7988 2.6589 0.6983
+0 2004 358 16.50 0.625 -14.7213 0.0959 0.0000 0.5080 181.6937 612.7976 134.7060 10.6433 0.6820
+0 2004 359 7.50 0.375 -4.4850 0.0650 17.9038 0.0000 346.8549 526.4586 219.6111 12.1244 0.6702
+0 2004 359 16.50 0.625 -5.7293 0.0510 0.0000 0.0000 266.6548 489.2106 256.2120 11.5400 0.6725
+0 2004 360 7.50 0.375 -3.1989 0.0423 19.0097 0.0000 345.0206 477.6100 267.4071 9.8811 0.6695
+0 2004 360 16.50 0.625 -2.9630 0.0422 0.0000 0.0000 415.0109 541.4906 203.5245 2.8263 0.6651
+0 2004 361 7.50 0.375 -0.0373 0.0398 18.7148 0.0000 561.9948 559.9359 184.9655 6.5250 0.6648
+0 2004 361 16.50 0.625 -0.0014 0.0793 0.0000 0.0000 597.3093 600.3503 146.3830 2.1850 0.6635
+0 2004 362 7.50 0.375 1.8808 0.1059 11.5390 0.0000 623.8162 536.0279 211.9422 2.5867 0.6700
+0 2004 362 16.50 0.625 -2.2035 0.1760 0.0000 0.0000 345.5425 446.7524 304.4813 3.9220 0.6635
+0 2004 363 7.50 0.375 -1.2296 0.2042 19.2840 0.0000 361.4281 424.2809 328.2677 11.5117 0.6714
+0 2004 363 16.50 0.625 -0.3410 0.2092 0.0000 0.0000 408.9118 433.0547 319.7277 7.1407 0.6728
+0 2004 364 7.50 0.375 1.6477 0.2237 9.3228 0.0000 428.7167 357.4362 396.0274 2.1120 0.6719
+0 2004 364 16.50 0.625 -3.2704 0.2978 0.0000 4.3180 142.3490 283.1500 473.7906 5.2733 0.6559
+0 2004 365 7.50 0.375 -6.7589 0.3369 5.7252 1.5240 144.2174 413.6302 345.1525 5.6906 0.6461
+0 2004 365 16.50 0.625 -5.8903 0.3226 0.0000 0.0000 218.0539 459.4965 298.6118 7.6087 0.6523
+0 2004 366 7.50 0.375 -0.9645 0.2898 18.3003 0.0000 425.2680 478.5703 277.9920 7.1117 0.6641
+0 2004 366 16.50 0.625 -1.9302 0.0224 0.0000 0.0000 375.3344 458.4439 285.7510 4.3487 0.6589
+0 2005 1 7.50 0.375 -4.9422 -0.2310 19.3143 0.0000 247.3236 432.6330 299.8417 6.9089 0.6558
+0 2005 1 16.50 0.625 -5.8133 -0.2333 0.0000 0.0000 330.8451 540.7859 191.5856 3.2413 0.6616
+0 2005 2 7.50 0.375 -1.0813 -0.2578 14.1997 0.0000 570.7645 606.0820 125.1727 2.6949 0.6686
+0 2005 2 16.50 0.625 -3.6360 -0.2726 0.0000 0.0000 444.6855 580.1255 150.4601 2.8013 0.6587
+0 2005 3 7.50 0.375 -2.4396 -0.2794 19.8203 0.0000 399.4870 488.6564 241.6171 2.6162 0.6548
+0 2005 3 16.50 0.625 -5.1423 -0.2858 0.0000 0.0000 208.9636 397.3948 332.5889 1.7016 0.6487
+0 2005 4 7.50 0.375 -10.7733 -0.3048 6.0853 3.5560 40.3890 383.7578 345.3647 1.3950 0.6466
+0 2005 4 16.50 0.625 -15.5247 -0.3098 0.0000 0.7620 35.6658 473.2491 255.6484 1.5040 0.6706
+0 2005 5 7.50 0.375 -12.5128 -0.3240 11.1741 1.0160 88.2118 455.0261 273.2288 5.8867 0.6657
+0 2005 5 16.50 0.625 -10.1750 -0.3459 0.0000 0.0000 128.6313 458.2062 269.0570 12.8797 0.6611
+0 2005 6 7.50 0.375 -10.0617 -0.3564 18.2339 0.2540 165.7406 492.3476 234.4409 10.7322 0.6610
+0 2005 6 16.50 0.625 -12.8703 -0.3583 0.0000 0.0000 129.0056 516.9232 209.7814 7.8060 0.6600
+0 2005 7 7.50 0.375 -9.4517 -0.3697 20.8672 0.0000 242.2981 549.3151 176.8766 5.6861 0.6572
+0 2005 7 16.50 0.625 -6.3637 -0.3888 0.0000 0.0000 207.4057 428.5004 296.8271 8.0883 0.6511
+0 2005 8 7.50 0.375 -4.3644 -0.4021 6.0458 0.2540 185.2477 342.9638 381.7681 4.7172 0.6371
+0 2005 8 16.50 0.625 -4.3450 -0.3918 0.0000 3.3020 106.4133 263.5186 461.6737 4.7890 0.6357
+0 2005 9 7.50 0.375 -2.0775 -0.3742 11.6943 0.2540 201.9145 273.9565 452.0297 5.3617 0.6376
+0 2005 9 16.50 0.625 -0.5726 -0.3483 0.0000 0.5080 191.5121 200.7592 526.3957 9.1407 0.6374
+0 2005 10 7.50 0.375 0.0112 -0.3169 14.5307 1.7780 209.5542 193.0743 535.5024 10.9400 0.6338
+0 2005 10 16.50 0.625 -2.7042 -0.2866 0.0000 0.5080 151.4017 251.6991 478.2500 4.7669 0.6342
+0 2005 11 7.50 0.375 -0.2972 -0.2644 15.4528 0.5080 284.3365 283.8903 447.0640 6.5933 0.6366
+0 2005 11 16.50 0.625 -4.5753 -0.2438 0.0000 0.2540 125.8765 293.5563 438.3346 2.6089 0.6380
+0 2005 12 7.50 0.375 -7.7967 -0.2303 16.3667 0.0000 144.9875 417.6025 314.9025 5.4154 0.6475
+0 2005 12 16.50 0.625 -12.3857 -0.2212 0.0000 0.5080 118.3395 503.1299 229.7893 11.4913 0.6555
+0 2005 13 7.50 0.375 -11.0250 -0.2299 16.3740 0.0000 156.1696 510.8236 221.6991 11.6728 0.6586
+0 2005 13 16.50 0.625 -10.2253 -0.2492 0.0000 0.0000 225.0522 560.4797 171.1658 12.6603 0.6562
+0 2005 14 7.50 0.396 -7.1400 -0.2768 21.8981 0.0000 335.5462 587.2302 143.1637 7.0521 0.6549
+0 2005 14 17.00 0.604 -9.0790 -0.3009 0.0000 0.0000 231.5271 535.9500 193.3494 5.9548 0.6527
+0 2005 15 7.50 0.396 -7.0811 -0.3267 22.1221 0.0000 264.1780 512.1086 216.0224 12.0937 0.6495
+0 2005 15 17.00 0.604 -6.8055 -0.3469 0.0000 0.0000 270.1778 509.1676 218.0537 6.6083 0.6502
+0 2005 16 7.50 0.396 -2.5942 -0.3712 18.8523 0.0000 250.9039 342.4133 383.7111 9.4679 0.6340
+0 2005 16 17.00 0.604 -4.3300 -0.3793 0.0000 2.2860 80.9915 237.8984 487.8568 2.7169 0.6214
+0 2005 17 7.50 0.396 -3.8811 -0.3684 16.1853 0.2540 156.3038 297.9344 428.3159 5.5858 0.6188
+0 2005 17 17.00 0.604 -2.3054 -0.3476 0.0000 0.0000 393.7853 472.9431 254.2469 10.4376 0.6346
+0 2005 18 7.50 0.396 1.9026 -0.3368 11.3367 0.0000 401.9110 291.7880 435.8861 12.8911 0.6307
+0 2005 18 17.00 0.604 -0.3576 -0.3216 0.0000 0.0000 229.1696 230.3524 498.0132 8.3607 0.6315
+0 2005 19 7.50 0.396 3.0208 -0.2989 22.7564 0.0000 525.4011 353.7747 375.6145 8.6553 0.6370
+0 2005 19 17.00 0.583 4.6571 -0.2778 0.0000 0.0000 732.0507 468.4178 261.9310 7.8579 0.6377
+0 2005 20 7.00 0.417 8.9015 -0.2583 22.8606 0.0000 1100.8793 536.0277 195.2057 4.9025 0.6370
+0 2005 20 17.00 0.583 4.9821 -0.2128 0.0000 0.0000 800.0187 516.3904 216.9123 4.1668 0.6374
+0 2005 21 7.00 0.417 1.8549 -0.1848 17.7997 0.0000 552.8071 451.6731 282.9093 6.1275 0.6398
+0 2005 21 17.00 0.583 -4.5491 -0.1726 0.0000 0.0000 143.5605 313.0500 422.0868 1.5910 0.6489
+0 2005 22 7.00 0.417 1.0316 -0.1818 22.9233 0.0000 438.3303 368.0484 366.6687 2.7740 0.6460
+0 2005 22 17.00 0.583 2.7768 -0.2036 0.0000 0.0000 688.7977 538.8782 194.8442 10.2475 0.6350
+0 2005 23 7.00 0.417 6.3550 -0.2132 23.1342 0.0000 924.2770 552.1366 181.1485 6.0425 0.6455
+0 2005 23 17.00 0.583 2.9786 -0.2000 0.0000 0.0000 675.7228 515.1838 218.7047 2.9543 0.6383
+0 2005 24 7.00 0.417 6.2120 -0.1958 23.9600 0.0000 860.7889 498.2367 235.8416 1.8440 0.6498
+0 2005 24 17.00 0.583 1.0636 -0.1994 0.0000 0.0000 591.2968 527.9005 206.0156 5.0796 0.6442
+0 2005 25 7.00 0.417 1.5678 -0.2033 24.4020 0.0000 583.0287 493.8131 239.9232 3.2778 0.6491
+0 2005 25 17.00 0.583 -2.1817 -0.1953 0.0000 0.0000 405.5457 489.7805 244.3212 2.3165 0.6536
+0 2005 26 7.00 0.417 0.7486 -0.2063 20.7275 0.0000 558.4738 511.5324 222.0649 3.1495 0.6506
+0 2005 26 17.00 0.583 -2.4007 -0.2144 0.0000 0.0000 349.8235 442.4912 290.7412 1.7276 0.6509
+0 2005 27 7.00 0.417 -0.3724 -0.2234 14.6154 0.0000 291.0367 295.9541 436.8688 2.4162 0.6494
+0 2005 27 17.00 0.583 -2.7087 -0.2258 0.0000 0.0000 202.3222 306.2909 426.4211 2.5721 0.6468
+0 2005 28 7.00 0.417 -1.9945 -0.2315 21.0508 0.0000 273.1661 348.4522 383.9974 3.4320 0.6430
+0 2005 28 17.00 0.583 -5.7443 -0.2275 0.0000 0.0000 126.2794 336.3376 396.2963 1.7507 0.6468
+0 2005 29 7.00 0.417 -6.0080 -0.2310 8.8122 2.2900 89.2266 308.9174 423.5551 1.9588 0.6483
+0 2005 29 17.00 0.583 -6.5668 -0.2530 0.0000 14.4820 19.3664 255.7257 475.7485 1.7650 0.6391
+0 2005 30 7.00 0.417 -6.4625 -0.2635 9.5016 4.8340 19.2681 251.6353 479.3619 1.7598 0.6336
+0 2005 30 17.00 0.583 -9.0800 -0.2596 0.0000 20.0700 25.0884 329.7618 401.4138 1.6914 0.6322
+0 2005 31 7.00 0.417 -10.1800 -0.2559 13.9304 1.2700 29.9656 363.5355 367.8069 2.0515 0.6251
+0 2005 31 17.00 0.583 -11.6482 -0.2474 0.0000 0.2540 66.4089 434.3933 297.3358 3.5352 0.6299
+0 2005 32 7.00 0.417 -9.8380 -0.2378 17.8413 1.0160 107.4301 433.2961 298.8669 2.0274 0.6413
+0 2005 32 17.00 0.583 -11.8807 -0.2310 0.0000 0.2540 110.9291 484.0471 248.4260 2.8625 0.6406
+0 2005 33 7.00 0.417 -4.7910 -0.2293 26.8031 0.0000 220.7832 396.7868 335.7652 1.6818 0.6426
+0 2005 33 17.00 0.583 -5.1632 -0.2268 0.0000 0.0000 229.6810 422.4239 310.2426 3.5229 0.6419
+0 2005 34 7.00 0.417 -1.0973 -0.2252 27.3327 0.0000 264.6602 297.6919 435.0490 1.8114 0.6364
+0 2005 34 17.00 0.583 -2.1988 -0.2225 0.0000 0.0000 293.5695 377.4455 355.4178 3.2982 0.6404
+0 2005 35 7.00 0.417 0.7984 -0.2250 27.4578 0.0000 422.1598 371.9373 360.8105 3.4435 0.6366
+0 2005 35 17.00 0.583 -2.5775 -0.2210 0.0000 0.0000 413.3163 512.3813 220.5470 9.8836 0.6416
+0 2005 36 7.00 0.417 -2.5090 -0.2195 22.4030 0.0000 454.8918 551.1401 181.8558 10.4700 0.6381
+0 2005 36 17.00 0.583 -6.0886 -0.2106 0.0000 0.0000 290.0086 510.7055 222.6994 2.4311 0.6421
+0 2005 37 7.00 0.417 -10.7320 -0.2091 9.1017 0.0000 39.8176 388.3435 345.1284 1.8725 0.6443
+0 2005 37 17.00 0.583 -7.7275 -0.2041 0.0000 0.0000 213.0736 481.1394 252.5617 1.7609 0.6439
+0 2005 38 7.00 0.417 -4.6675 -0.2110 17.9971 0.0000 277.9609 453.2397 280.1479 2.3872 0.6457
+0 2005 38 17.00 0.583 -10.0064 -0.2104 0.0000 0.7620 70.8027 397.8752 335.5378 1.8693 0.6438
+0 2005 39 7.00 0.417 -10.6285 -0.2173 21.2044 0.5080 113.0146 458.5476 274.5508 3.6350 0.6423
+0 2005 39 17.00 0.583 -12.5696 -0.2211 0.0000 0.0000 121.6528 510.0377 222.8873 7.1843 0.6424
+0 2005 40 7.00 0.417 -10.1035 -0.2284 28.6667 0.0000 221.9670 549.1450 183.4480 11.1000 0.6421
+0 2005 40 17.00 0.583 -5.5632 -0.2346 0.0000 0.0000 470.2330 673.5376 58.7746 7.0957 0.6392
+0 2005 41 7.00 0.417 -1.2039 -0.2495 29.4320 0.0000 620.6119 661.1608 70.4702 7.3398 0.6423
+0 2005 41 17.00 0.583 -2.3681 -0.2522 0.0000 0.0000 566.2712 654.6460 76.8623 3.0399 0.6389
+0 2005 42 7.00 0.417 1.8821 -0.2634 23.8095 0.0000 762.6763 654.5659 76.4359 1.8565 0.6396
+0 2005 42 17.00 0.583 1.1866 -0.2669 0.0000 0.0000 503.1974 433.8508 296.9907 1.8518 0.6365
+0 2005 43 7.00 0.417 0.9284 -0.2707 13.2749 0.0000 277.2950 219.8071 510.8609 2.6978 0.6306
+0 2005 43 17.00 0.583 -3.6358 -0.2605 0.0000 0.0000 150.5251 284.8561 446.2758 13.0336 0.6333
+0 2005 44 7.00 0.438 -4.5767 -0.2481 28.5832 0.0000 243.3629 412.7238 318.9712 12.0810 0.6377
+0 2005 44 17.50 0.562 -2.5570 -0.2328 0.0000 0.0000 387.9595 486.2816 246.1122 14.9485 0.6351
+0 2005 45 7.00 0.438 -1.6813 -0.2282 28.9838 0.0000 325.2676 387.5902 345.0102 14.5419 0.6347
+0 2005 45 17.50 0.562 -3.3670 -0.2217 0.0000 1.7780 193.0858 320.7423 412.1538 6.3219 0.6364
+0 2005 46 7.00 0.438 -9.0195 -0.2137 9.1027 4.5720 52.3084 359.3158 373.9459 2.7776 0.6365
+0 2005 46 17.50 0.562 -6.8819 -0.2084 0.0000 0.0000 127.7007 372.9597 360.5454 8.8519 0.6394
+0 2005 47 7.00 0.438 -6.9348 -0.2075 17.4145 0.7620 139.1483 387.1580 346.3858 7.0362 0.6381
+0 2005 47 17.50 0.542 -9.3554 -0.1985 0.0000 0.0000 305.2828 620.6601 113.2951 6.6438 0.6367
+0 2005 48 6.50 0.458 -2.4672 -0.2010 31.6635 0.0000 559.3118 650.7932 83.0479 4.8268 0.6387
+0 2005 48 17.50 0.542 -4.4004 -0.1954 0.0000 0.0000 488.4799 656.5819 77.5135 2.6954 0.6408
+0 2005 49 6.50 0.458 0.7262 -0.2028 28.0725 0.0000 674.9172 626.1086 107.6517 3.9564 0.6391
+0 2005 49 17.50 0.542 -1.1867 -0.2082 0.0000 0.0000 286.5335 329.4898 404.0235 5.4946 0.6383
+0 2005 50 6.50 0.458 -1.1202 -0.2170 24.9526 1.0160 315.2513 350.9793 382.1306 8.7891 0.6337
+0 2005 50 17.50 0.542 -5.0885 -0.2128 0.0000 0.7620 148.1015 339.1973 394.1040 6.7254 0.6370
+0 2005 51 6.50 0.458 -3.0771 -0.2110 33.2058 0.2540 263.5657 380.3840 353.0034 7.7195 0.6366
+0 2005 51 17.50 0.542 -5.6515 -0.2012 0.0000 2.2860 163.9983 373.8237 360.0069 6.5985 0.6393
+0 2005 52 6.50 0.458 -4.4782 -0.1977 29.8893 0.2540 240.8504 409.5621 324.4283 7.2268 0.6393
+0 2005 52 17.50 0.542 -5.4538 -0.1920 0.0000 0.0000 176.9270 380.3441 353.9057 2.7746 0.6385
+0 2005 53 6.50 0.458 -5.8050 -0.1903 15.5838 1.2700 51.6834 266.4580 467.8724 2.1323 0.6379
+0 2005 53 17.50 0.542 -5.6265 -0.1855 0.0000 0.0000 76.5398 285.9620 448.5879 1.5196 0.6376
+0 2005 54 6.50 0.458 -7.5755 -0.1832 13.8984 0.5080 25.1334 293.8921 440.7618 1.8205 0.6402
+0 2005 54 17.50 0.542 -9.2112 -0.1799 0.0000 0.0000 65.8803 378.6009 356.2018 1.6212 0.6398
+0 2005 55 6.50 0.458 -4.4482 -0.1793 36.1869 0.0000 313.2566 481.6909 253.1415 2.2944 0.6422
+0 2005 55 17.50 0.542 -7.1750 -0.1755 0.0000 0.0000 215.7743 473.7234 261.2830 2.5871 0.6432
+0 2005 56 6.50 0.458 -3.7414 -0.1819 36.4916 0.0000 343.4445 485.9790 248.7351 2.9764 0.6437
+0 2005 56 17.50 0.542 -7.0127 -0.1811 0.0000 0.0000 214.6626 466.9473 267.8010 3.0123 0.6424
+0 2005 57 6.50 0.458 -4.7768 -0.1927 24.7668 0.0000 265.3674 445.3138 288.9047 3.5632 0.6402
+0 2005 57 17.50 0.542 -7.4365 -0.1989 0.0000 0.0000 148.3159 411.2173 322.7203 3.2112 0.6417
+0 2005 58 6.50 0.458 -5.7582 -0.2089 31.2200 0.0000 224.2824 434.0824 299.4004 3.2163 0.6420
+0 2005 58 17.50 0.542 -7.4927 -0.2130 0.0000 0.2540 169.9154 434.9564 298.3361 7.1492 0.6391
+0 2005 59 6.50 0.458 -5.0814 -0.2223 38.1309 0.0000 298.7123 484.8837 247.9882 8.2623 0.6385
+0 2005 59 17.50 0.542 -5.9023 -0.2230 0.0000 0.0000 284.9017 501.8341 231.0065 6.9750 0.6378
+0 2005 60 6.50 0.458 -4.5414 -0.2325 27.1672 0.0000 273.6213 444.3793 288.0250 5.4477 0.6368
+0 2005 60 17.50 0.542 -8.0696 -0.2306 0.0000 0.0000 163.7840 443.6948 288.7974 5.4204 0.6375
+0 2005 61 6.50 0.458 -5.4336 -0.2362 38.7721 0.0000 288.3206 484.7917 247.4451 6.9041 0.6364
+0 2005 61 17.50 0.542 -5.3927 -0.2352 0.0000 0.0000 367.0730 566.9321 165.3536 4.4558 0.6358
+0 2005 62 6.50 0.458 -1.5844 -0.2435 32.0853 0.0000 525.9368 582.8799 149.0283 2.8612 0.6338
+0 2005 62 17.50 0.542 -4.4535 -0.2415 0.0000 0.0000 382.3639 550.2586 181.7384 3.0230 0.6342
+0 2005 63 6.50 0.458 -0.6905 -0.2531 32.1003 0.0000 500.4462 517.5229 213.9452 3.3668 0.6306
+0 2005 63 17.50 0.542 -3.2523 -0.2513 0.0000 0.0000 402.7255 526.2518 205.2993 2.5673 0.6331
+0 2005 64 6.50 0.458 1.3620 -0.2627 30.3213 0.0000 580.8448 500.1631 230.8693 3.2377 0.6288
+0 2005 64 17.50 0.542 -2.3447 -0.2614 0.0000 0.0000 325.3615 412.2901 318.8015 6.3327 0.6267
+0 2005 65 6.50 0.458 -0.7299 -0.2671 38.6508 0.0000 457.1751 474.2615 256.5727 10.5059 0.6257
+0 2005 65 17.50 0.542 -4.7435 -0.2594 0.0000 0.0000 221.4424 392.7814 338.4010 5.4842 0.6252
+0 2005 66 6.50 0.458 -3.6814 -0.2573 36.6688 0.0000 321.9022 457.5246 273.7534 7.5127 0.6259
+0 2005 66 17.50 0.542 -3.9715 -0.2522 0.0000 2.0320 129.8763 280.4406 451.0685 12.9512 0.6237
+0 2005 67 6.50 0.458 -4.1750 -0.2496 29.1631 2.5400 176.2312 334.0918 397.5353 6.7268 0.6240
+0 2005 67 17.50 0.542 -4.7250 -0.2412 0.0000 0.0000 164.6261 341.9953 390.0157 8.9827 0.6227
+0 2005 68 6.50 0.479 1.0642 -0.2434 36.4037 0.0000 358.5550 292.3415 439.5676 10.9748 0.6235
+0 2005 68 18.00 0.521 -0.9123 -0.2360 0.0000 0.0000 356.2387 384.5003 347.7469 8.5140 0.6250
+0 2005 69 6.50 0.479 -2.1033 -0.2295 29.6809 0.5080 244.7776 324.2177 408.3242 5.5370 0.6239
+0 2005 69 18.00 0.521 -4.4308 -0.2163 0.0000 0.0000 267.0598 435.0794 298.0636 6.7532 0.6286
+0 2005 70 6.50 0.479 1.0584 -0.2137 37.4687 0.0000 546.8621 483.7713 249.4892 11.4048 0.6263
+0 2005 70 18.00 0.500 0.0173 -0.2056 0.0000 0.0000 475.1327 464.3743 269.2560 10.6350 0.6234
+0 2005 71 6.00 0.500 1.3855 -0.2030 37.6121 0.0000 534.9579 453.6016 280.1465 6.1163 0.6230
+0 2005 71 18.00 0.500 -8.5755 -0.1931 0.0000 12.2040 53.8608 339.4198 394.7805 2.0921 0.6252
+0 2005 72 6.00 0.500 -11.9629 -0.1871 27.6926 7.3680 33.5695 410.3011 324.1729 2.0925 0.6254
+0 2005 72 18.00 0.500 -15.5267 -0.1828 0.0000 0.0000 47.8040 491.7251 242.9467 2.0550 0.6262
+0 2005 73 6.00 0.500 -15.0171 -0.1765 17.5601 2.2860 35.8588 471.8469 263.1102 2.2217 0.6326
+0 2005 73 18.00 0.500 -16.5083 -0.1752 0.0000 0.5080 95.3861 556.4044 178.6136 3.9441 0.6303
+0 2005 74 6.00 0.500 -10.8138 -0.1791 40.6573 1.0160 224.5974 572.9483 161.8909 2.9633 0.6349
+0 2005 74 18.00 0.500 -11.2238 -0.1817 0.0000 0.2540 160.5424 522.2106 212.5126 6.6154 0.6296
+0 2005 75 6.00 0.500 -7.8350 -0.1871 35.7447 0.0000 181.8105 455.2171 279.2569 8.6121 0.6300
+0 2005 75 18.00 0.500 -7.3888 -0.1915 0.0000 0.0000 150.9767 414.6137 319.6607 6.0567 0.6280
+0 2005 76 6.00 0.500 -5.5142 -0.1975 26.4950 2.5440 216.4390 420.4951 313.5075 10.4021 0.6289
+0 2005 76 18.00 0.500 -9.8850 -0.2044 0.0000 3.0480 79.5718 407.7630 325.9243 6.0087 0.6254
+0 2005 77 6.00 0.500 -9.1446 -0.2068 25.8857 0.2540 125.8739 433.9820 299.5932 4.6588 0.6245
+0 2005 77 18.00 0.500 -7.1079 -0.2087 0.0000 0.0000 200.2543 453.1418 280.3461 5.4154 0.6250
+0 2005 78 6.00 0.500 -2.4269 -0.2102 28.8241 0.0000 317.0595 405.8226 327.5989 3.2271 0.6257
+0 2005 78 18.00 0.500 -3.4292 -0.2117 0.0000 0.0000 212.5585 344.4274 388.9276 2.6337 0.6278
+0 2005 79 6.00 0.500 -2.0894 -0.2123 29.0078 0.7620 216.6102 293.6894 439.6352 2.5050 0.6298
+0 2005 79 18.00 0.500 -5.3950 -0.2150 0.0000 0.5080 97.3318 296.8521 436.3492 5.0315 0.6280
+0 2005 80 6.00 0.500 -5.1283 -0.2118 38.2591 0.0000 223.7868 415.3237 318.0237 9.3375 0.6291
+0 2005 80 18.00 0.500 -6.6775 -0.2072 0.0000 0.0000 187.1653 428.7613 304.7950 8.1992 0.6282
+0 2005 81 6.00 0.500 -2.7290 -0.2021 46.5699 0.0000 352.9727 453.5161 280.2756 3.0500 0.6266
+0 2005 81 18.00 0.500 -3.9996 -0.1984 0.0000 0.0000 165.0801 318.8432 415.1157 1.3169 0.6268
+0 2005 82 6.00 0.500 -1.8804 -0.1960 18.0992 3.5580 213.1682 282.1487 451.9224 2.5346 0.6275
+0 2005 82 18.00 0.500 -5.8696 -0.1901 0.0000 0.5080 147.8021 364.3769 369.9603 3.7367 0.6277
+0 2005 83 6.00 0.500 -5.9354 -0.1869 19.3784 2.2860 109.6843 328.0509 406.4326 2.4308 0.6298
+0 2005 83 18.00 0.500 -11.0408 -0.1824 0.0000 1.2700 29.8711 386.6041 348.0848 1.3253 0.6284
+0 2005 84 6.00 0.500 -8.2075 -0.1780 26.9674 0.2540 26.2529 308.8697 426.0227 1.4302 0.6238
+0 2005 84 18.00 0.500 -11.0763 -0.1748 0.0000 0.2540 29.4057 387.5372 347.4979 1.0080 0.6305
+0 2005 85 6.00 0.500 -8.0067 -0.1727 15.6819 0.0000 25.6603 305.1081 430.0240 1.5550 0.6261
+0 2005 85 18.00 0.500 -7.1008 -0.1706 0.0000 0.0000 173.4369 428.1980 307.0312 7.4017 0.6287
+0 2005 86 6.00 0.500 1.2648 -0.1729 46.9359 0.0000 477.8145 406.8366 328.2880 7.8817 0.6262
+0 2005 86 18.00 0.500 -0.0683 -0.1720 0.0000 0.0000 419.4195 413.9464 321.2182 4.2250 0.6264
+0 2005 87 6.00 0.500 4.5104 -0.1735 42.9588 0.0000 742.6151 488.6615 246.4326 5.6217 0.6289
+0 2005 87 18.00 0.500 -1.4745 -0.1692 0.0000 0.0000 319.1757 372.2798 363.0104 5.9529 0.6268
+0 2005 88 6.00 0.500 -4.0725 -0.1646 22.1190 1.5240 220.2471 377.7635 357.7398 4.6067 0.6278
+0 2005 88 18.00 0.500 -7.4679 -0.1573 0.0000 0.0000 196.0034 463.2610 272.5756 8.5296 0.6333
+0 2005 89 6.00 0.500 -6.4979 -0.1529 22.6393 3.8140 182.9526 420.2746 315.7620 3.2162 0.6284
+0 2005 89 18.00 0.500 -9.7804 -0.1490 0.0000 0.5080 102.7285 431.4171 304.7967 3.2182 0.6356
+0 2005 90 6.00 0.521 -9.3508 -0.1473 19.8100 3.5560 59.2537 377.4783 358.8162 2.3876 0.6293
+0 2005 90 18.50 0.479 -8.5026 -0.1470 0.0000 0.2540 229.5188 524.4842 211.8250 7.5365 0.6326
+0 2005 91 6.00 0.521 -0.4861 -0.1513 51.8298 0.0000 574.9314 586.0668 150.0428 8.8432 0.6327
+0 2005 91 18.50 0.479 -1.2415 -0.1512 0.0000 0.0000 483.7726 531.9425 204.1718 8.8504 0.6316
+0 2005 92 6.00 0.521 2.7134 -0.1576 42.6340 0.0000 693.9458 538.7111 197.1096 3.9700 0.6342
+0 2005 92 18.50 0.479 1.5877 -0.1587 0.0000 0.0000 630.7477 545.8367 189.9358 2.2924 0.6359
+0 2005 93 6.00 0.521 6.1736 -0.1622 50.5679 0.0000 905.4924 545.5594 190.0547 4.1188 0.6371
+0 2005 93 18.50 0.458 3.0514 -0.1376 0.0000 0.0000 668.4595 505.7391 230.9990 3.4305 0.6400
+0 2005 94 5.50 0.542 3.3411 -0.1078 33.8888 0.0000 574.4882 394.3503 343.7504 3.5300 0.6437
+0 2005 94 18.50 0.458 -2.2354 -0.0850 0.0000 0.0000 209.6028 300.2901 438.8589 6.1318 0.6338
+0 2005 95 5.50 0.542 -2.1325 -0.0824 42.7008 0.0000 285.6839 370.2959 368.9714 8.9331 0.6429
+0 2005 95 18.50 0.458 -3.9250 -0.0719 0.0000 0.0000 286.1452 442.8064 296.9440 6.4841 0.6418
+0 2005 96 5.50 0.542 1.4844 -0.0709 49.4722 0.0000 458.4357 372.1849 367.6108 2.4735 0.6498
+0 2005 96 18.50 0.458 1.4963 -0.0576 0.0000 0.0000 572.4222 494.3404 246.0683 3.3182 0.6451
+0 2005 97 5.50 0.542 7.5535 -0.0598 53.1584 0.0000 938.8740 483.3099 256.9969 2.4188 0.6488
+0 2005 97 18.50 0.458 5.1577 -0.0418 0.0000 0.0000 756.6091 473.7562 267.3785 2.4857 0.6325
+0 2005 98 5.50 0.542 4.8502 -0.0371 30.9931 0.2540 666.8306 393.3785 347.9727 4.8481 0.6524
+0 2005 98 18.50 0.458 -2.9281 -0.0267 0.0000 0.0000 267.5986 388.1201 353.7097 7.1309 0.6538
+0 2005 99 5.50 0.542 -1.0532 -0.0213 32.1275 0.0000 408.5170 451.4249 290.6548 3.3765 0.6545
+0 2005 99 18.50 0.458 -4.8641 -0.0210 0.0000 14.2340 59.4752 249.6823 492.4137 2.6914 0.6536
+0 2005 100 5.50 0.542 -7.8888 -0.0215 15.0340 20.1000 22.7301 307.8644 434.2046 2.3677 0.6568
+0 2005 100 18.50 0.458 -8.4859 -0.0172 0.0000 5.5920 49.5757 351.4785 390.7893 2.9033 0.6660
+0 2005 101 5.50 0.542 -6.0900 -0.0157 51.2464 0.2540 175.8135 405.0004 337.3383 7.1842 0.6736
+0 2005 101 18.50 0.458 -4.5645 -0.0125 0.0000 0.0000 205.3779 387.9304 354.5534 10.9505 0.6824
+0 2005 102 5.50 0.542 1.2210 -0.0120 54.4206 0.0000 535.7620 462.2007 280.3065 5.3200 0.6866
+0 2005 102 18.50 0.458 0.1724 -0.0144 0.0000 0.0000 513.5608 504.1414 238.2564 3.1427 0.6778
+0 2005 103 5.50 0.542 4.1450 -0.0128 52.6440 0.0000 706.4600 482.5719 259.8980 2.4724 0.6802
+0 2005 103 18.50 0.458 4.2914 -0.0097 0.0000 0.0000 669.7349 440.8403 301.7756 3.2345 0.6801
+0 2005 104 5.50 0.542 7.5131 -0.0087 52.0838 0.0000 877.3706 432.2076 310.4558 3.8685 0.6882
+0 2005 104 18.50 0.458 0.6137 -0.0005 0.0000 0.0000 154.4084 121.4983 621.5435 1.8895 0.6901
+0 2005 105 5.50 0.542 -0.3096 -0.0032 37.4539 0.0000 262.9664 274.7448 468.1706 2.2550 0.6943
+0 2005 105 18.50 0.458 1.4409 0.0001 0.0000 0.0000 423.4989 353.6227 389.4463 2.8498 0.6942
+0 2005 106 5.50 0.542 6.5662 -0.0056 52.2254 0.0000 693.9342 314.5545 428.2509 2.7338 0.7108
+0 2005 106 18.50 0.458 4.4900 0.0026 0.0000 0.0000 534.3372 294.9370 448.2455 3.1723 0.7344
+0 2005 107 5.50 0.542 8.1646 -0.0019 49.7070 0.0000 894.8925 401.4131 341.5626 3.7900 0.7396
+0 2005 107 18.50 0.458 4.9768 0.0045 0.0000 0.0000 649.7546 380.9399 362.3285 4.4277 0.7725
+0 2005 108 5.50 0.542 8.0742 0.0008 43.0682 0.0000 946.7668 461.1982 281.9001 3.1012 0.7846
+0 2005 108 18.50 0.458 4.3377 0.0063 0.0000 0.0000 720.0963 487.1709 256.1815 4.3750 0.8064
+0 2005 109 5.50 0.542 3.7638 0.0045 40.6720 0.0000 302.6487 105.8910 637.3778 2.7976 0.8209
+0 2005 109 18.50 0.458 0.6844 0.0081 0.0000 0.0000 33.9065 -1.8257 745.2642 1.6509 0.8503
+0 2005 110 5.50 0.542 -1.9956 0.0083 20.7369 1.5240 20.1098 106.5521 636.8926 2.3853 0.8807
+0 2005 110 18.50 0.458 -5.3409 0.0128 0.0000 1.0160 114.9089 323.6038 420.0489 9.3373 0.8822
+0 2005 111 5.50 0.542 -3.2419 0.0100 41.1059 1.7780 188.4811 322.0504 421.4759 7.4692 0.8673
+0 2005 111 18.50 0.458 -3.8882 0.0131 0.0000 0.2540 180.5286 340.3105 403.3568 6.3045 0.8723
+0 2005 112 5.50 0.542 -1.5842 0.0102 33.7854 0.0000 171.2824 240.2980 503.2336 3.0042 0.8573
+0 2005 112 18.50 0.458 -3.0827 0.0130 0.0000 0.0000 19.2452 148.9966 594.6665 1.3163 0.8528
+0 2005 113 5.50 0.542 3.4082 0.0102 36.6459 0.0000 399.0042 210.7391 532.7926 2.2161 0.8313
+0 2005 113 18.50 0.458 1.0380 0.0147 0.0000 0.0000 153.6561 103.5992 640.1417 2.1260 0.8567
+0 2005 114 5.50 0.562 -0.0099 0.0113 12.1458 9.9140 72.7494 71.7328 671.8533 1.5954 0.8440
+0 2005 114 19.00 0.438 -2.1067 0.0125 0.0000 10.9300 5.9307 97.3315 646.3096 2.2299 0.8586
+0 2005 115 5.50 0.562 -2.2266 0.0126 33.0727 4.8240 54.2539 149.5078 594.1382 2.7218 0.8621
+0 2005 115 19.00 0.438 -3.7567 0.0137 0.0000 0.0000 190.1357 344.1856 399.5106 5.0967 0.8580
+0 2005 116 5.50 0.562 -0.7589 0.0145 48.3785 0.2540 238.0190 269.6356 474.0978 4.3048 0.8511
+0 2005 116 19.00 0.438 -2.5021 0.0130 0.0000 0.7620 79.0800 184.6991 558.9663 2.4671 0.8519
+0 2005 117 5.50 0.562 -0.1287 0.0133 32.7639 9.3960 153.9434 152.5642 591.1144 3.9241 0.8536
+0 2005 117 19.00 0.417 -5.8715 0.0141 0.0000 8.1320 14.6384 240.6046 503.1094 2.4134 0.8570
+0 2005 118 5.00 0.583 -4.9967 0.0153 23.5207 10.6660 40.9441 233.5702 510.2003 3.2149 0.8543
+0 2005 118 19.00 0.417 -8.5590 0.0155 0.0000 13.7120 24.1468 318.6182 425.1628 1.5310 0.8557
+0 2005 119 5.00 0.583 -7.2311 0.0162 27.0516 5.3340 62.2233 325.3282 418.4851 2.7934 0.8529
+0 2005 119 19.00 0.417 -7.5365 0.0193 0.0000 0.7620 106.3019 382.6003 361.3541 5.6160 0.8532
+0 2005 120 5.00 0.583 -5.5546 0.0155 28.6884 5.8420 93.5120 308.1655 435.6132 4.3900 0.8418
+0 2005 120 19.00 0.417 -8.6540 0.0147 0.0000 0.7620 26.2665 334.0999 409.6396 1.0914 0.8464
+0 2005 121 5.00 0.583 -6.9514 0.0145 13.6896 6.8580 20.0360 279.7134 464.0191 1.7809 0.8594
+0 2005 121 19.00 0.417 -8.3145 0.0146 0.0000 0.7620 21.5787 320.2330 423.5041 1.2801 0.8564
+0 2005 122 5.00 0.583 -4.5711 0.0141 39.5150 1.5240 51.4187 232.4523 511.2620 1.9669 0.8515
+0 2005 122 19.00 0.417 -4.5675 0.0153 0.0000 0.0000 119.8827 303.7150 440.0544 2.7700 0.8655
+0 2005 123 5.00 0.583 -0.7554 0.0153 41.1712 0.5080 201.2990 234.4169 509.3518 2.0690 0.8678
+0 2005 123 19.00 0.417 -1.1452 0.0158 0.0000 0.2540 195.5392 246.8793 496.9109 2.9675 0.8898
+0 2005 124 5.00 0.583 2.8553 0.0153 47.2168 0.0000 423.1318 269.3873 474.3799 3.6382 0.8868
+0 2005 124 19.00 0.417 2.3524 0.0151 0.0000 0.0000 344.5768 227.9675 515.7927 2.8380 0.8930
+0 2005 125 5.00 0.583 5.5539 0.0155 44.3294 0.2540 503.1460 190.5491 553.2313 3.0532 0.8927
+0 2005 125 19.00 0.417 4.6190 0.0160 0.0000 0.0000 472.7285 226.1751 517.6267 2.4800 0.9006
+0 2005 126 5.00 0.583 6.7339 0.0165 33.9197 0.0000 597.2090 207.6279 536.1987 3.1286 0.9050
+0 2005 126 19.00 0.417 1.0711 0.0161 0.0000 0.0000 250.8320 196.6526 547.1561 5.6335 0.9430
+0 2005 127 5.00 0.583 -0.2450 0.0166 41.2300 5.5920 278.9018 288.3240 455.5059 6.8111 0.9401
+0 2005 127 19.00 0.417 -1.1651 0.0160 0.0000 2.7940 125.0807 177.5634 566.2407 14.1665 0.9340
+0 2005 128 5.00 0.583 1.1513 0.0162 33.8811 0.2540 265.6324 206.1960 537.6141 9.6296 0.9380
+0 2005 128 19.00 0.417 2.1035 0.0161 0.0000 0.0000 381.8030 278.4711 465.3353 3.0205 0.9164
+0 2005 129 5.00 0.583 6.5468 0.0160 43.5863 0.0000 600.1687 216.6435 527.1600 2.6794 0.9135
+0 2005 129 19.00 0.417 5.2355 0.0175 0.0000 0.0000 637.2047 351.5223 392.3488 3.3770 0.9468
+0 2005 130 5.00 0.583 7.8100 0.0155 48.6299 1.0200 697.5097 227.2885 516.4902 3.0350 0.9801
+0 2005 130 19.00 0.417 2.0149 0.0172 0.0000 8.9020 -4.9692 -107.0597 850.9193 2.7480 1.0000
+0 2005 131 5.00 0.583 -1.3698 0.0162 14.1062 16.2500 1.9000 61.8611 681.9490 3.2673 1.0000
+0 2005 131 19.00 0.417 -3.9885 0.0196 0.0000 0.0000 79.7297 243.3102 500.6603 2.9635 1.0000
+0 2005 132 5.00 0.583 -0.0303 0.0189 50.6471 0.5080 313.2230 311.6812 432.2544 4.7886 1.0000
+0 2005 132 19.00 0.417 -1.5980 0.0174 0.0000 1.5240 158.1851 228.9822 514.8867 4.8725 1.0000
+0 2005 133 5.00 0.583 1.6369 0.0180 53.5484 0.2540 312.7012 225.1349 518.7609 3.7043 0.9883
+0 2005 133 19.00 0.417 0.0604 0.0219 0.0000 0.0000 260.4264 257.8758 486.1987 2.3235 1.0000
+0 2005 134 5.00 0.583 4.3852 0.0187 52.3655 0.0000 517.7307 280.5793 463.3463 2.8193 0.9886
+0 2005 134 19.00 0.417 2.2885 0.0208 0.0000 0.0000 222.5343 109.5248 634.4989 2.1999 1.0000
+0 2005 135 5.00 0.583 7.5746 0.0175 56.3494 0.0000 729.1901 277.9042 465.9669 3.1257 1.0000
+0 2005 135 19.00 0.417 6.3415 0.0192 0.0000 0.0000 570.9215 212.9607 530.9890 2.3640 1.0000
+0 2005 136 5.00 0.583 8.9200 0.0174 29.7465 0.0000 689.6961 125.7046 618.1616 2.2087 1.0000
+0 2005 136 19.00 0.417 8.4025 0.0207 0.0000 0.0000 699.8271 192.5816 551.4374 3.4625 1.0000
+0 2005 137 5.00 0.583 3.6639 0.0198 45.5766 0.0000 506.0397 314.1140 429.8645 6.7721 1.0000
+0 2005 137 19.00 0.417 1.8495 0.0238 0.0000 0.0000 445.6874 355.4074 388.7550 6.5480 1.0000
+0 2005 138 5.00 0.583 6.6265 0.0204 38.6067 0.0000 651.0508 247.6395 496.3654 5.0396 1.0000
+0 2005 138 19.00 0.417 7.9590 0.0228 0.0000 0.0000 673.2627 200.0979 544.0206 7.7210 1.0000
+0 2005 139 5.00 0.583 12.7639 0.0195 62.5173 0.0000 1106.2484 199.3580 544.6056 3.2946 1.0000
+0 2005 139 19.00 0.417 11.1460 0.0766 0.0000 0.0000 1016.0634 282.9452 463.6659 2.3210 1.0000
+0 2005 140 5.00 0.583 16.4664 0.0826 64.1276 0.0000 1671.0188 359.2456 387.6424 3.0375 1.0000
+0 2005 140 19.00 0.417 14.0720 0.0675 0.0000 0.0000 1356.1112 336.6594 409.5263 6.4605 1.0000
+0 2005 141 5.00 0.583 14.6907 0.0564 50.9223 0.0000 1342.2655 241.2813 504.3912 5.0193 1.0000
+0 2005 141 19.00 0.417 9.6255 0.0742 0.0000 0.0000 559.3908 -45.7270 792.2209 2.5350 1.0000
+0 2005 142 5.00 0.583 15.4604 0.0676 63.4125 0.0000 1427.5801 240.2646 505.9243 2.6714 1.0000
+0 2005 142 19.00 0.417 10.5720 0.0758 0.0000 0.0000 849.6189 165.0416 581.5289 4.1105 1.0000
+0 2005 143 5.00 0.604 14.1279 0.0673 49.7210 0.0000 1184.0682 153.8464 592.3304 3.6410 1.0000
+0 2005 143 19.50 0.396 9.0642 0.0752 0.0000 0.0000 715.7031 159.3891 587.1516 3.0700 1.0000
+0 2005 144 5.00 0.604 12.4990 0.0693 49.5923 0.0000 966.5037 102.0573 644.2106 2.5480 1.0000
+0 2005 144 19.50 0.396 7.5321 0.0751 0.0000 0.0000 577.7013 130.2936 616.2422 2.7653 1.0000
+0 2005 145 5.00 0.604 6.3579 0.0687 63.6404 0.0000 448.6051 87.8756 658.3635 2.6341 1.0000
+0 2005 145 19.50 0.396 -0.1284 0.0760 0.0000 0.0000 73.8088 80.0599 666.5199 1.2137 1.0000
+0 2005 146 5.00 0.604 2.8241 0.0758 61.3556 0.0000 192.0650 42.3355 704.2331 2.4583 1.0000
+0 2005 146 19.50 0.396 1.1054 0.1026 0.0000 0.0000 95.3932 44.8488 702.9673 1.3203 1.0000
+0 2005 147 5.00 0.604 5.9853 0.1009 64.7962 0.0000 494.5869 141.6317 606.1038 2.4520 1.0000
+0 2005 147 19.50 0.396 3.9989 0.1279 0.0000 0.0000 411.1519 206.9128 542.0781 2.9589 1.0000
+0 2005 148 5.00 0.604 8.5372 0.1354 38.2799 0.0000 870.1505 354.9846 394.3546 2.5031 1.0000
+0 2005 148 19.50 0.375 5.0550 0.1523 0.0000 0.0000 518.7373 247.5137 502.6127 2.2489 1.0000
+0 2005 149 4.50 0.625 1.4616 0.1535 19.0560 2.5250 28.5630 -36.3887 786.5736 1.3325 1.0000
+0 2005 149 19.50 0.375 1.2687 0.1454 0.0000 1.7750 2.8081 -51.5254 801.3333 1.6856 1.0000
+0 2005 150 4.50 0.625 2.8989 0.1577 20.9908 18.5760 16.0178 -128.9315 879.3105 2.0173 1.0000
+0 2005 150 19.50 0.375 0.4680 0.1770 0.0000 0.2540 96.2684 81.8950 669.3846 5.4706 1.0000
+0 2005 151 4.50 0.625 4.8648 0.1843 62.6580 0.0000 510.0041 242.4157 509.2062 3.4633 1.0000
+0 2005 151 19.50 0.375 4.6072 0.1930 0.0000 0.0000 504.3700 266.9384 485.0882 2.4150 1.0000
+0 2005 152 4.50 0.625 9.3140 0.4141 48.0771 0.0000 800.3850 222.6557 539.8962 4.2150 1.0000
+0 2005 152 19.50 0.375 4.3594 0.8134 0.0000 1.5280 196.8173 0.9718 780.6323 4.4311 1.0000
+0 2005 153 4.50 0.625 5.3903 1.4548 26.8189 3.0480 79.4784 -148.6568 962.4296 2.2230 1.0000
+0 2005 153 19.50 0.375 2.1711 1.0292 0.0000 0.2540 128.9277 70.1992 722.0933 3.0628 1.0000
+0 2005 154 4.50 0.625 3.6045 1.3917 20.5459 5.7840 99.4978 -24.0704 834.6049 3.1553 1.0000
+0 2005 154 19.50 0.375 0.1239 0.9643 0.0000 0.2540 134.7243 174.2016 614.7883 4.5328 1.0000
+0 2005 155 4.50 0.625 1.3312 1.0529 28.0591 1.0160 155.5910 138.1315 655.5146 3.2950 1.0000
+0 2005 155 19.50 0.375 2.1683 1.1977 0.0000 0.2540 197.0437 147.2593 653.2582 5.1083 1.0000
+0 2005 156 4.50 0.625 7.3780 2.9053 64.8504 0.0000 642.5072 350.2331 543.8173 4.8767 1.0000
+0 2005 156 19.50 0.375 7.0683 2.2650 0.0000 0.0000 597.3159 300.0813 555.7353 2.9344 1.0000
+0 2005 157 4.50 0.625 12.6507 4.2540 60.2608 0.0000 1224.8213 567.6691 408.0145 4.1590 1.0000
+0 2005 157 19.50 0.375 7.7022 3.6094 0.0000 0.0000 767.1144 492.6245 438.1751 3.6733 1.0000
+0 2005 158 4.50 0.625 8.7633 4.4217 50.3195 0.0000 781.3248 466.6876 514.8424 3.7197 1.0000
+0 2005 158 19.50 0.375 5.0878 3.1494 0.0000 0.0000 707.5996 590.7410 313.7241 6.1656 1.0000
+0 2005 159 4.50 0.625 8.4953 4.5157 52.0866 0.0000 803.7847 529.1450 459.5142 3.1677 1.0000
+0 2005 159 19.50 0.375 5.0317 3.7217 0.0000 0.0000 458.6380 368.2564 568.9499 1.7187 1.0000
+0 2005 160 4.50 0.625 5.8887 4.4967 47.9191 1.5240 401.5457 307.0865 678.7994 3.6710 1.0000
+0 2005 160 19.50 0.375 1.4070 4.0256 0.0000 3.5600 20.1610 162.8029 791.5110 1.0837 1.0000
+0 2005 161 4.50 0.625 1.5428 3.1407 16.4815 7.1140 55.7480 139.7321 763.4523 1.5038 1.0000
+0 2005 161 19.50 0.375 0.9421 2.1983 0.0000 0.0000 194.4937 258.4856 593.5345 3.7917 1.0000
+0 2005 162 4.50 0.625 4.5615 4.3077 48.0064 3.3020 354.2532 340.9668 638.2513 3.1743 1.0000
+0 2005 162 19.50 0.375 2.6972 3.5206 0.0000 5.3280 88.9080 134.5847 790.3448 2.4344 1.0000
+0 2005 163 4.50 0.625 2.5817 3.5843 39.4852 1.7780 206.2417 260.8005 668.1045 7.1060 1.0000
+0 2005 163 19.50 0.375 -0.5254 2.0589 0.0000 0.7620 205.2016 330.3246 514.3880 14.8939 1.0000
+0 2005 164 4.50 0.625 3.9045 3.4530 67.5967 0.0000 516.1082 472.6521 453.1814 11.0843 1.0000
+0 2005 164 19.50 0.375 5.1900 3.2183 0.0000 0.0000 594.0408 476.2298 431.5278 4.6494 1.0000
+0 2005 165 4.50 0.625 8.9563 6.1107 64.7627 0.0000 627.7709 427.1371 672.5396 3.0430 1.0000
+0 2005 165 19.50 0.375 8.9183 4.7411 0.0000 0.0000 688.1428 396.0818 602.3724 2.4756 0.9853
+0 2005 166 4.50 0.625 12.4263 6.6527 42.8631 0.0000 977.7722 504.9412 627.7359 2.9617 0.9498
+0 2005 166 19.50 0.375 8.6661 5.6089 0.0000 0.0000 651.1428 433.5967 619.9567 3.6406 0.9551
+0 2005 167 4.50 0.625 12.8207 7.5047 50.2357 3.3000 964.4210 511.9732 683.4596 2.3840 0.9092
+0 2005 167 19.50 0.375 9.1700 5.8256 0.0000 0.0000 502.7306 259.8394 807.9233 3.2111 0.9449
+0 2005 168 4.50 0.625 15.1620 8.9003 64.9904 0.0000 1136.4836 551.1382 758.7430 3.2047 0.9089
+0 2005 168 19.50 0.375 13.0167 7.6478 0.0000 0.0000 1169.1716 707.7834 487.5499 4.2667 0.9118
+0 2005 169 4.50 0.625 16.4113 9.9687 58.8128 0.0000 1396.9344 751.6430 641.9296 2.4309 0.8668
+0 2005 169 19.50 0.375 12.8544 8.8189 0.0000 1.0160 979.8354 623.0370 662.0261 4.9200 0.8709
+0 2005 170 4.50 0.625 15.7890 9.7497 38.4012 1.0200 1171.7089 573.1297 793.3154 2.8000 0.8304
+0 2005 170 19.50 0.375 13.0583 7.7200 0.0000 0.0000 1030.2124 569.8207 630.3663 3.6339 0.8494
+0 2005 171 4.50 0.625 15.6597 9.7063 36.8378 0.0000 1247.8802 665.6372 697.9661 3.1097 0.7989
+0 2005 171 19.50 0.375 13.7367 8.2856 0.0000 0.0000 1111.4674 623.4089 618.7809 3.9061 0.7757
+0 2005 172 4.50 0.625 16.6887 10.0720 47.1584 3.3000 1302.6586 622.2343 774.3093 2.8143 0.7526
+0 2005 172 19.50 0.375 11.3539 8.4244 0.0000 1.2740 719.9977 470.0706 783.6558 4.5194 0.7487
+0 2005 173 4.50 0.625 15.7963 9.5707 39.3991 0.0000 1178.7343 561.5828 786.1842 3.3180 0.7357
+0 2005 173 19.50 0.375 11.4933 8.4789 0.0000 1.5200 727.4707 473.3610 783.8938 4.5244 0.7284
+0 2005 174 4.50 0.625 14.0057 9.6637 56.5289 0.0000 948.5057 540.4286 814.9663 5.5513 0.7138
+0 2005 174 19.50 0.375 10.4450 8.7150 0.0000 0.0000 559.3083 415.4652 859.5616 5.0872 0.7203
+0 2005 175 4.50 0.625 12.3010 10.4643 47.5783 3.5600 699.0392 529.3468 898.6134 3.4337 0.6922
+0 2005 175 19.50 0.375 8.9778 8.3117 0.0000 0.2540 457.3655 405.5187 838.7352 3.6650 0.6981
+0 2005 176 4.50 0.625 13.1377 10.3420 49.2372 0.0000 860.0338 596.0516 823.1927 3.0262 0.6821
+0 2005 176 19.50 0.375 9.2833 8.9267 0.0000 0.2540 518.1533 487.8223 804.5192 5.2594 0.6835
+0 2005 177 4.50 0.625 11.5257 10.1850 56.3449 1.7780 805.3906 686.8353 715.6993 4.0737 0.6616
+0 2005 177 19.50 0.375 9.6911 7.8433 0.0000 0.0000 731.6461 586.9080 621.9853 2.4733 0.6511
+0 2005 178 4.50 0.625 14.7600 9.9750 66.2481 0.0000 1396.7960 934.8257 455.8246 3.4153 0.6190
+0 2005 178 19.50 0.375 12.6617 8.3011 0.0000 0.0000 1332.1265 952.6374 290.5387 3.9878 0.6191
+0 2005 179 4.50 0.625 13.3113 9.7213 36.5062 2.7960 1088.3306 742.8825 617.6281 3.6290 0.5987
+0 2005 179 19.50 0.375 7.0217 7.7567 0.0000 4.3180 357.9127 409.1913 792.7646 5.2122 0.6234
+0 2005 180 4.50 0.625 11.0757 9.1910 58.2511 0.0000 787.7415 621.9988 698.4337 5.5087 0.5946
+0 2005 180 19.50 0.375 8.6206 7.5528 0.0000 0.0000 579.0652 497.6733 689.7725 2.5367 0.5894
+0 2005 181 4.50 0.625 12.3703 9.6610 59.7559 0.0000 811.6352 566.9368 799.2296 2.3337 0.5752
+0 2005 181 19.50 0.375 9.7928 8.0167 0.0000 0.0000 638.6340 499.0323 723.2368 2.3383 0.5823
+0 2005 182 4.50 0.625 14.4087 10.0187 63.3005 0.0000 1292.3403 870.0297 521.9433 3.9040 0.5509
+0 2005 182 19.50 0.375 12.1956 8.5594 0.0000 0.0000 972.8013 658.3083 604.3745 2.7583 0.5300
+0 2005 183 4.50 0.625 16.0390 10.6990 55.0341 0.0000 1446.4874 890.7975 558.3467 3.3450 0.5186
+0 2005 183 19.50 0.375 11.4689 8.7061 0.0000 0.2540 897.5078 664.6190 610.6879 2.8561 0.5208
+0 2005 184 4.50 0.625 10.3033 9.3203 35.8456 4.5680 772.4896 672.7166 655.7141 6.0497 0.5119
+0 2005 184 19.50 0.375 8.3733 7.3372 0.0000 0.0000 634.4391 556.1671 614.8910 4.2617 0.5168
+0 2005 185 4.50 0.625 12.7283 9.6217 62.3461 0.0000 1114.0342 831.3355 528.9638 3.6663 0.5085
+0 2005 185 19.50 0.375 9.1444 7.7089 0.0000 0.0000 659.2702 546.7357 652.4271 2.6434 0.5139
+0 2005 186 4.50 0.625 14.4377 9.1063 47.7170 0.0000 1342.6235 833.4863 481.4651 3.5330 0.4855
+0 2005 186 19.50 0.375 11.6311 8.0017 0.0000 0.0000 973.7198 669.3647 551.3265 3.6017 0.4865
+0 2005 187 4.50 0.625 16.1350 10.1180 52.2072 0.0000 1437.5779 827.4232 571.2126 2.7076 0.4655
+0 2005 187 19.50 0.375 13.1294 8.4472 0.0000 0.0000 1129.2338 716.2892 538.7623 3.3539 0.4818
+0 2005 188 4.50 0.625 17.6420 11.4063 60.6514 0.0000 1663.3043 985.2412 534.4833 3.7277 0.4559
+0 2005 188 19.50 0.375 14.1339 9.4183 0.0000 0.0000 1190.1597 751.4822 581.6280 3.1906 0.4565
+0 2005 189 4.50 0.625 17.3117 10.6587 41.2838 0.0000 1587.8750 879.2833 565.7640 3.2241 0.4433
+0 2005 189 19.50 0.375 14.0978 9.5800 0.0000 0.0000 1155.1351 732.0507 612.9311 3.3406 0.4277
+0 2005 190 4.50 0.625 17.3807 11.4943 43.8832 0.0000 1644.7313 998.6090 520.9728 3.8583 0.4250
+0 2005 190 19.50 0.375 13.7394 9.8083 0.0000 0.0000 1173.0461 806.9962 556.8640 4.7722 0.4234
+0 2005 191 4.50 0.625 16.9740 12.3397 55.9344 0.0000 1610.7611 1109.2531 495.4547 5.4977 0.4072
+0 2005 191 19.50 0.375 12.6550 10.3494 0.0000 0.0000 1028.6180 810.8631 600.9705 2.7600 0.4220
+0 2005 192 4.50 0.625 17.8523 12.4593 63.1587 0.0000 1693.3221 1091.7620 529.8421 2.8256 0.4075
+0 2005 192 19.50 0.375 13.2044 11.0078 0.0000 0.0000 776.6234 568.9891 900.0394 2.5696 0.4085
+0 2005 193 4.50 0.625 16.7957 13.1667 54.7371 0.0000 1117.2499 725.1807 966.5937 2.6963 0.3970
+0 2005 193 19.50 0.375 13.6861 11.1600 0.0000 0.0000 737.3959 495.5244 987.0695 2.7689 0.4052
+0 2005 194 4.50 0.625 18.2623 13.0583 63.2035 0.0000 1570.9368 974.2934 704.3284 2.7563 0.3913
+0 2005 194 19.50 0.375 15.5900 10.9694 0.0000 0.0000 1292.8323 824.0948 640.6812 3.8050 0.3915
+0 2005 195 4.50 0.625 18.3537 12.7867 30.1816 0.5080 1586.3992 926.1049 718.0233 4.8283 0.3847
+0 2005 195 19.50 0.375 14.0850 10.3056 0.0000 0.0000 984.6530 621.1288 784.7251 2.8422 0.3924
+0 2005 196 4.50 0.625 17.6560 12.1070 46.3425 0.0000 1349.9180 734.4442 842.2822 2.6593 0.3809
+0 2005 196 19.50 0.375 14.0000 10.5433 0.0000 0.0000 1065.3438 732.2971 694.7236 4.0356 0.3858
+0 2005 197 4.50 0.625 19.5117 12.7203 44.2962 0.0000 1854.4974 1045.2914 595.9648 4.7277 0.3647
+0 2005 197 19.50 0.396 17.3116 12.0468 0.0000 0.0000 1489.9591 909.2044 652.8950 5.1858 0.3987
+0 2005 198 5.00 0.604 16.0955 13.7528 47.5248 0.2540 1052.0321 766.9527 974.1323 4.2286 0.3736
+0 2005 198 19.50 0.396 8.8537 11.5763 0.0000 0.0000 196.6568 425.6274 1093.0555 1.1484 0.3845
+0 2005 199 5.00 0.604 13.0255 13.1652 56.5176 0.0000 580.5543 573.5214 1109.1722 2.5136 0.3762
+0 2005 199 19.50 0.396 13.4121 10.5395 0.0000 0.0000 1130.9332 863.7766 565.1453 3.4337 0.3913
+0 2005 200 5.00 0.604 19.4817 12.8272 62.6883 0.0000 2022.8380 1223.8558 432.5110 4.2679 0.3642
+0 2005 200 19.50 0.396 17.2016 11.4132 0.0000 0.0000 1604.6294 980.8855 524.0664 3.5716 0.3747
+0 2005 201 5.00 0.604 21.3366 13.9838 47.9210 0.0000 2150.1343 1194.8168 578.3467 3.5752 0.3554
+0 2005 201 19.50 0.396 16.9816 12.3863 0.0000 0.0000 1429.4078 926.4232 670.7496 3.3932 0.3700
+0 2005 202 5.00 0.604 20.6990 14.0621 42.2665 0.0000 1983.4058 1135.5134 640.8392 3.1679 0.3533
+0 2005 202 19.50 0.396 17.0437 12.0558 0.0000 0.0000 1408.5751 862.1473 702.8318 2.8789 0.3709
+0 2005 203 5.00 0.604 21.0241 14.5000 54.0267 0.0000 1980.2367 1134.2347 697.7951 3.6059 0.3501
+0 2005 203 19.50 0.396 17.7132 12.7500 0.0000 0.0000 1613.6227 1050.3086 580.7035 4.5558 0.3783
+0 2005 204 5.00 0.583 19.1129 14.8486 41.1424 0.0000 1615.1265 1080.1414 781.8295 4.7793 0.3486
+0 2005 204 19.00 0.417 13.4255 12.6590 0.0000 0.0000 784.2593 708.8350 913.3719 3.7495 0.3753
+0 2005 205 5.00 0.583 13.4979 12.5364 26.5695 24.1320 555.6229 451.2111 1158.9811 2.4995 0.4064
+0 2005 205 19.00 0.417 10.7835 10.6060 0.0000 3.8140 323.4880 307.6606 1122.9240 5.4455 0.6317
+0 2005 206 5.00 0.583 12.5754 11.4714 26.4026 0.7620 518.1795 407.5749 1104.2018 3.7354 0.6176
+0 2005 206 19.00 0.417 7.9030 10.1940 0.0000 1.0200 179.9045 359.7278 1036.0056 2.4206 0.6270
+0 2005 207 5.00 0.583 7.1879 10.1354 25.8687 3.8140 61.5549 287.7988 1102.9567 2.1615 0.6178
+0 2005 207 19.00 0.417 4.8985 8.7740 0.0000 0.0000 23.2374 295.8294 984.5365 1.6559 0.6272
+0 2005 208 5.00 0.583 12.0911 10.4582 60.5731 0.0000 779.7961 631.5062 802.4893 2.1816 0.5953
+0 2005 208 19.00 0.417 11.0975 9.0010 0.0000 0.0000 925.1176 750.4167 548.9896 3.5120 0.5972
+0 2005 209 5.00 0.583 16.8289 10.6879 61.6217 0.0000 1640.8184 998.1505 455.1330 2.9318 0.5533
+0 2005 209 19.00 0.417 14.3060 9.5710 0.0000 0.0000 1311.2076 864.9858 479.0800 4.1160 0.5668
+0 2005 210 5.00 0.583 19.0289 11.8846 51.9432 0.0000 1980.6925 1159.7719 400.3758 3.0803 0.5223
+0 2005 210 19.00 0.417 15.7585 9.9850 0.0000 0.0000 1484.5973 908.4426 470.6211 3.9430 0.5140
+0 2005 211 5.00 0.583 18.3939 11.6139 35.7265 0.0000 1717.3549 953.8265 572.8721 3.5425 0.4856
+0 2005 211 19.00 0.417 13.1885 10.4015 0.0000 0.0000 931.1915 668.5342 745.7133 2.2005 0.4883
+0 2005 212 5.00 0.583 15.8657 11.8929 40.3209 0.2540 1147.3250 730.3542 825.6546 2.4887 0.4515
+0 2005 212 19.00 0.417 13.1200 10.3425 0.0000 0.0000 849.6251 588.1943 821.3860 2.5035 0.4668
+0 2005 213 5.00 0.583 16.4264 12.2871 40.6797 0.2540 1240.3922 794.2731 803.9363 3.4886 0.4451
+0 2005 213 19.00 0.417 13.8955 10.9035 0.0000 0.2540 958.1000 666.7075 790.8518 4.3175 0.4081
+0 2005 214 5.00 0.583 14.9921 12.5332 40.3328 3.0440 958.0289 689.0390 929.3287 4.5746 0.4207
+0 2005 214 19.00 0.417 11.6310 10.3315 0.0000 0.2540 601.0746 485.2509 922.1183 4.4805 0.3870
+0 2005 215 5.00 0.583 12.1229 10.9832 21.7965 6.8620 514.3773 401.9671 1064.3422 2.2634 0.4117
+0 2005 215 19.00 0.417 8.0545 9.4660 0.0000 3.0460 183.6436 285.3911 1049.0690 2.8912 0.4666
+0 2005 216 5.00 0.583 6.1943 8.7689 8.3293 11.4360 5.8707 192.8335 1085.6145 1.5895 0.5704
+0 2005 216 19.00 0.417 5.9995 8.3960 0.0000 3.5560 -8.7938 162.4260 1087.1425 1.3968 0.6211
+0 2005 217 5.00 0.583 10.3532 10.8568 46.6203 0.0000 238.2774 274.4485 1190.5151 1.8556 0.6058
+0 2005 217 19.00 0.417 9.6920 9.2475 0.0000 0.0000 417.2702 381.5399 936.9501 3.7000 0.6260
+0 2005 218 5.00 0.583 14.4114 10.6371 45.5942 0.0000 1126.3599 760.0035 682.3487 2.9279 0.5745
+0 2005 218 19.00 0.417 10.6800 8.9570 0.0000 0.0000 895.9426 753.6888 542.0297 3.3030 0.5902
+0 2005 219 5.00 0.583 15.6629 10.6500 54.1826 0.0000 1353.3417 851.6951 597.6492 2.9168 0.5556
+0 2005 219 19.00 0.417 12.8255 9.0820 0.0000 0.0000 1042.7806 710.7863 594.4222 3.2995 0.5581
+0 2005 220 5.00 0.583 16.9161 10.6921 44.6530 0.0000 1410.0750 758.0145 689.0824 2.7889 0.5213
+0 2005 220 19.00 0.417 14.5360 9.8860 0.0000 0.0000 1152.7004 706.5323 663.0655 3.4275 0.4977
+0 2005 221 5.00 0.583 13.8382 11.0446 26.6835 0.4000 786.3121 511.1596 961.2374 2.7766 0.5108
+0 2005 221 19.00 0.417 10.3300 10.1610 0.0000 1.0000 292.8074 275.8112 1116.8398 2.3586 0.4872
+0 2005 222 5.00 0.583 13.1643 11.1036 32.4327 0.5000 555.9784 359.0078 1118.5160 2.5432 0.5014
+0 2005 222 19.00 0.417 9.8745 10.3630 0.0000 5.0160 206.8703 244.0230 1165.9662 2.3530 0.4894
+0 2005 223 5.00 0.583 11.5836 11.0161 28.5407 1.7780 355.1070 301.0550 1168.1849 2.3634 0.5024
+0 2005 223 19.00 0.417 8.7565 9.3400 0.0000 0.0000 315.1202 361.9806 963.5397 2.2890 0.5209
+0 2005 224 5.00 0.583 10.7086 10.2093 37.7721 0.0000 352.4055 312.4137 1089.2537 2.0951 0.5280
+0 2005 224 19.00 0.417 9.0930 8.9570 0.0000 0.0000 510.6038 493.0262 801.6880 2.7125 0.5343
+0 2005 225 5.00 0.583 5.9221 8.9721 17.7675 3.8100 -8.1594 213.6262 1081.3857 1.8011 0.5337
+0 2005 225 19.00 0.417 5.3075 7.8375 0.0000 3.3000 58.5200 234.1805 974.6053 2.0624 0.5832
+0 2005 226 5.00 0.583 9.9718 9.0468 46.1963 0.0000 402.1822 328.0297 981.4667 2.3830 0.5508
+0 2005 226 19.00 0.417 9.1140 8.0545 0.0000 0.0000 499.4628 419.6211 807.2031 3.0895 0.5649
+0 2005 227 5.00 0.583 13.1739 9.2771 45.2667 0.0000 1001.8808 655.3000 673.9603 2.4464 0.5312
+0 2005 227 19.00 0.417 11.2995 8.4435 0.0000 0.0000 748.5403 511.3287 743.7243 2.6230 0.5432
+0 2005 228 5.00 0.583 12.2907 9.2143 28.3515 0.5080 799.8521 526.9792 790.7593 3.1672 0.5167
+0 2005 228 19.00 0.417 9.2600 8.5735 0.0000 0.5080 510.3390 454.5762 809.0744 3.1565 0.5249
+0 2005 229 5.00 0.583 11.8182 9.3943 52.5844 0.0000 868.2688 649.3173 683.4489 5.7632 0.5097
+0 2005 229 19.00 0.438 10.2038 8.5271 0.0000 0.0000 723.2734 586.4831 673.7613 2.6681 0.4940
+0 2005 230 5.50 0.562 13.2233 10.4385 51.6893 1.7820 985.3198 722.0933 703.4910 3.2507 0.4730
+0 2005 230 19.00 0.438 10.2038 8.0238 0.0000 0.2540 763.8339 591.3259 632.1754 2.8300 0.4689
+0 2005 231 5.50 0.562 14.0070 10.6400 56.5972 0.0000 1154.4751 831.3483 611.4944 3.0100 0.4451
+0 2005 231 19.00 0.438 10.0576 8.0386 0.0000 0.0000 847.2629 685.8391 538.6190 3.4024 0.4582
+0 2005 232 5.50 0.562 12.4711 9.6993 35.0289 1.0160 860.1862 607.4952 751.5777 2.5770 0.4285
+0 2005 232 19.00 0.438 10.4952 8.3062 0.0000 0.0000 665.6751 488.0480 755.1576 2.5086 0.4373
+0 2005 233 5.50 0.542 11.4062 9.8450 27.0657 3.2980 655.0068 514.7634 853.8994 2.9364 0.4319
+0 2005 233 18.50 0.458 7.9155 7.5350 0.0000 0.0000 359.7385 333.0020 853.6037 2.7209 0.5031
+0 2005 234 5.50 0.542 11.2881 9.2912 36.5070 0.0000 563.5120 392.9665 932.0743 1.8232 0.4773
+0 2005 234 18.50 0.458 10.0359 8.0991 0.0000 0.2540 581.0316 425.3624 802.8495 3.1432 0.4919
+0 2005 235 5.50 0.542 12.9835 9.9027 50.3135 0.0000 940.9672 660.8816 715.0042 4.3235 0.4607
+0 2005 235 18.50 0.458 10.1123 8.3918 0.0000 0.0000 724.5851 585.8264 663.8851 4.7609 0.4648
+0 2005 236 5.50 0.542 14.4654 10.5912 53.6676 0.0000 1214.8748 832.6907 605.4368 4.6538 0.4347
+0 2005 236 18.50 0.458 11.9927 8.7700 0.0000 0.0000 943.6407 665.5507 614.3239 2.8777 0.4170
+0 2005 237 5.50 0.542 12.9335 9.8938 38.1502 2.6780 854.0416 566.2458 808.5530 4.2838 0.3938
+0 2005 237 18.50 0.458 10.0482 8.6986 0.0000 0.0000 610.0436 500.2043 773.0364 4.6536 0.3772
+0 2005 238 5.50 0.542 13.9327 9.9908 38.1896 0.0000 1070.1418 695.2214 687.6625 3.4223 0.3758
+0 2005 238 18.50 0.458 12.5350 8.8164 0.0000 0.0000 987.7992 660.8568 621.8872 4.3095 0.3876
+0 2005 239 5.50 0.542 14.1473 10.4731 40.3553 0.0000 1138.4565 784.8809 642.7134 3.1003 0.3723
+0 2005 239 18.50 0.458 9.7300 8.6205 0.0000 0.0000 824.6532 734.9246 534.0494 4.6195 0.3747
+0 2005 240 5.50 0.542 13.3654 9.1142 30.4314 0.0000 1171.9866 783.7110 525.5983 2.9487 0.3664
+0 2005 240 18.50 0.458 10.8950 7.3227 0.0000 0.0000 961.6473 677.1071 494.7794 2.9250 0.3739
+0 2005 241 5.50 0.542 16.0596 9.1854 51.8752 0.0000 1616.7034 938.5079 387.2456 3.1900 0.3571
+0 2005 241 18.50 0.458 13.5868 7.8455 0.0000 0.0000 1276.6207 770.9132 437.9659 3.8755 0.3669
+0 2005 242 5.50 0.542 17.3954 9.7996 49.8638 0.0000 1696.2688 901.2973 466.7234 4.2704 0.3556
+0 2005 242 18.50 0.458 10.8432 8.3986 0.0000 0.0000 1062.5530 841.9043 410.7237 3.7509 0.3669
+0 2005 243 5.50 0.542 7.6831 8.2312 48.4915 0.0000 602.8561 637.6216 602.7079 2.2430 0.3596
+0 2005 243 18.50 0.458 7.8605 6.1664 0.0000 0.0000 784.2142 663.9674 426.9183 2.9536 0.3714
+0 2005 244 5.50 0.542 13.4038 7.8281 50.2125 0.0000 1380.3605 886.6886 327.8128 3.7323 0.3486
+0 2005 244 18.50 0.458 10.4877 6.4809 0.0000 0.0000 1033.2017 725.4239 387.0386 3.2095 0.3656
+0 2005 245 5.50 0.542 14.9219 8.0931 46.3937 0.0000 1547.3846 916.8753 317.4789 2.8558 0.3488
+0 2005 245 18.50 0.458 12.3223 7.1632 0.0000 0.0000 1109.3582 680.1221 478.7794 3.5441 0.3551
+0 2005 246 5.50 0.542 15.8350 8.7315 27.9187 0.0000 1490.8416 803.0257 475.0079 2.9519 0.3445
+0 2005 246 18.50 0.458 12.3068 8.1682 0.0000 0.0000 982.1030 625.7933 606.8923 3.5077 0.3528
+0 2005 247 5.50 0.542 13.4804 9.4485 42.0817 0.0000 933.9232 561.5611 775.0771 3.7569 0.3507
+0 2005 247 18.50 0.458 11.0482 7.8664 0.0000 0.0000 765.8367 505.8897 704.4232 3.6082 0.3573
+0 2005 248 5.50 0.542 14.6750 9.7392 49.7070 0.0000 1162.3938 681.1797 679.9750 4.7742 0.3455
+0 2005 248 18.50 0.458 11.3264 8.1409 0.0000 0.0000 850.6018 585.7769 646.3929 2.7323 0.3526
+0 2005 249 5.50 0.542 10.8612 8.6931 25.8034 0.5080 533.4437 351.3154 923.4774 2.0793 0.3498
+0 2005 249 18.50 0.458 10.2536 7.4568 0.0000 0.0000 651.5317 431.0390 749.0997 2.8268 0.3551
+0 2005 250 5.50 0.542 14.8785 9.2519 43.1759 0.0000 1150.3352 609.1671 712.1802 3.4991 0.3411
+0 2005 250 18.50 0.458 12.0891 7.9650 0.0000 0.0000 822.5339 474.7149 743.0839 3.2177 0.3578
+0 2005 251 5.50 0.542 14.0312 9.2077 21.6866 0.5080 943.4198 492.5854 822.2761 3.6254 0.3444
+0 2005 251 18.50 0.458 9.9841 8.4091 0.0000 3.8100 347.1169 219.8848 1030.6818 5.8177 0.3638
+0 2005 252 5.50 0.542 12.1400 8.8815 35.1438 2.2820 705.5739 415.1949 872.5909 4.4196 0.3555
+0 2005 252 18.50 0.458 10.5686 8.0945 0.0000 0.0000 657.6477 456.0576 770.6789 2.4655 0.3660
+0 2005 253 5.50 0.542 12.1281 8.8788 45.0339 0.0000 917.1459 605.8820 684.0191 3.9233 0.3514
+0 2005 253 18.50 0.479 9.8361 7.2083 0.0000 0.0000 870.5012 661.4171 501.4171 3.0970 0.3619
+0 2005 254 6.00 0.521 13.9628 8.3584 42.5845 0.0000 1327.6486 814.2641 436.7467 3.4220 0.3437
+0 2005 254 18.50 0.479 10.1070 7.8200 0.0000 0.0000 855.6242 671.8475 534.7139 4.9726 0.3597
+0 2005 255 6.00 0.521 10.8856 8.2860 45.3134 0.0000 1093.4421 874.2202 368.9400 4.7204 0.3447
+0 2005 255 18.50 0.479 5.3248 6.2935 0.0000 0.0000 543.1751 600.7344 498.4118 2.4748 0.3549
+0 2005 256 6.00 0.521 2.9277 5.8456 24.2076 2.2840 285.0567 460.2421 608.0319 2.2440 0.3549
+0 2005 256 18.50 0.479 0.8486 4.4778 0.0000 1.2700 145.9790 343.7934 637.5408 2.4367 0.3697
+0 2005 257 6.00 0.500 4.5850 4.4837 26.6560 1.2700 337.6767 327.2866 654.7227 2.2435 0.3748
+0 2005 257 18.00 0.500 3.3000 4.0087 0.0000 0.0000 462.4658 503.4572 449.8094 4.3775 0.3775
+0 2005 258 6.00 0.500 9.0083 5.5250 45.8228 0.0000 939.7004 680.4645 369.9151 3.6404 0.3603
+0 2005 258 18.00 0.500 7.7992 4.6300 0.0000 0.0000 837.4662 623.4440 367.7278 2.8654 0.3690
+0 2005 259 6.00 0.500 10.9504 6.2504 36.3332 0.0000 1090.2079 720.9841 378.2148 2.8537 0.3537
+0 2005 259 18.00 0.500 9.0079 5.9350 0.0000 0.0000 981.0569 754.2947 319.8271 6.8225 0.3544
+0 2005 260 6.00 0.500 12.2100 7.0504 45.1266 0.0000 1331.3737 895.4578 257.4723 4.1992 0.3429
+0 2005 260 18.00 0.500 8.6362 5.6246 0.0000 0.0000 1053.7377 836.0665 218.9185 4.2150 0.3544
+0 2005 261 6.00 0.500 11.6629 6.6133 40.4081 0.0000 1177.5853 769.8827 354.4414 3.1346 0.3404
+0 2005 261 18.00 0.500 8.8875 5.7475 0.0000 0.0000 1014.1240 786.1967 275.7296 2.6388 0.3477
+0 2005 262 6.00 0.500 11.7413 6.7758 44.3485 0.0000 1266.3177 854.8932 278.8948 4.8842 0.3388
+0 2005 262 18.00 0.500 11.2413 6.1146 0.0000 0.0000 1237.6805 836.3593 249.4624 5.6033 0.3452
+0 2005 263 6.00 0.500 15.1558 8.0425 38.1055 0.0000 1601.9786 934.1501 291.1735 3.5461 0.3301
+0 2005 263 18.00 0.500 13.1508 7.6675 0.0000 0.0000 1163.7242 687.2988 507.4418 3.7562 0.3404
+0 2005 264 6.00 0.500 12.9625 8.5529 27.6372 2.5420 855.0873 454.6050 807.5492 3.4637 0.3372
+0 2005 264 18.00 0.500 5.9500 6.5979 0.0000 7.1100 229.5753 270.9698 848.6005 3.9488 0.3755
+0 2005 265 6.00 0.500 7.1017 6.7283 28.4039 0.0000 219.1646 191.7273 936.9489 2.4008 0.3739
+0 2005 265 18.00 0.500 5.9963 5.5608 0.0000 0.0000 343.8442 315.2987 734.9454 2.6598 0.3851
+0 2005 266 6.00 0.500 11.4008 6.8346 33.6892 0.0000 842.4579 471.4269 665.7994 3.1447 0.3688
+0 2005 266 18.00 0.500 9.6067 5.8108 0.0000 0.0000 803.2427 523.8910 542.5599 2.4337 0.3777
+0 2005 267 6.00 0.500 11.5742 6.8742 28.2850 0.0000 813.8896 430.6509 709.7877 2.2792 0.3574
+0 2005 267 18.00 0.500 9.3333 5.9183 0.0000 0.0000 883.0919 632.2102 441.0620 2.9808 0.3704
+0 2005 268 6.00 0.500 10.9675 6.6238 42.7701 0.0000 1164.4174 819.1790 302.6523 5.4096 0.3543
+0 2005 268 18.00 0.500 5.9746 5.3496 0.0000 0.0000 611.7433 563.9156 472.2415 3.5812 0.3663
+0 2005 269 6.00 0.500 12.1446 6.4629 42.0653 0.0000 1326.0458 851.0673 260.6844 3.7329 0.3562
+0 2005 269 18.00 0.500 11.5104 6.0442 0.0000 0.0000 1129.9552 698.3328 382.7475 3.4567 0.3590
+0 2005 270 6.00 0.500 11.6038 7.3671 33.9714 2.2820 823.4187 466.1336 707.6667 5.1692 0.3379
+0 2005 270 18.00 0.500 4.2146 5.5896 0.0000 12.7040 116.0204 195.5693 855.9509 3.6133 0.4539
+0 2005 271 6.00 0.500 0.7702 4.9612 14.0363 0.5080 -4.4092 225.1218 785.9398 1.6140 0.5035
+0 2005 271 18.00 0.500 3.4489 4.0421 0.0000 0.0000 238.9156 269.0779 686.2080 3.0271 0.5069
+0 2005 272 6.00 0.500 10.0908 5.0763 33.8694 0.0000 780.3345 406.6093 614.1196 1.9743 0.4803
+0 2005 272 18.00 0.500 7.4004 5.1542 0.0000 0.0000 599.0654 444.6842 578.6588 4.5818 0.4833
+0 2005 273 6.00 0.500 11.8612 6.1383 40.2375 0.0000 1078.4865 614.2178 475.2833 4.9308 0.4546
+0 2005 273 18.00 0.500 9.0904 5.6350 0.0000 0.0000 780.0602 530.2545 523.9689 4.7929 0.4376
+0 2005 274 6.00 0.500 13.1750 6.8329 39.1900 0.0000 1242.2415 699.4040 437.4498 4.3704 0.4149
+0 2005 274 18.00 0.500 9.7371 5.9975 0.0000 0.0000 903.1627 623.4152 454.8153 6.9100 0.4305
+0 2005 275 6.00 0.500 12.6146 6.9321 39.1877 0.0000 1206.1956 724.3366 418.9702 6.7258 0.4105
+0 2005 275 18.00 0.500 9.7250 6.0367 0.0000 0.0000 864.9708 587.7919 493.1074 3.8688 0.3901
+0 2005 276 6.00 0.500 13.1658 7.1367 38.9433 0.0000 1166.0948 641.8433 516.5723 3.3212 0.3621
+0 2005 276 18.00 0.521 9.4448 5.6904 0.0000 0.0000 835.8750 559.6409 498.8942 3.4440 0.3631
+0 2005 277 6.50 0.479 8.7830 6.4322 26.9594 4.5760 580.5665 375.6413 733.7312 5.0117 0.3674
+0 2005 277 18.00 0.521 -3.1589 3.8032 0.0000 1.5240 153.7478 482.3679 459.3504 11.7540 0.3944
+0 2005 278 6.50 0.479 -0.7140 2.7870 37.7300 0.0000 378.3161 547.0516 336.4408 4.5822 0.3870
+0 2005 278 18.00 0.521 -5.5976 2.3196 0.0000 0.0000 38.5478 370.2585 488.0963 1.9230 0.3905
+0 2005 279 6.50 0.479 1.7102 2.3965 37.1091 0.0000 311.2974 322.3022 540.2927 2.8643 0.3739
+0 2005 279 18.00 0.521 3.1187 2.2204 0.0000 0.0000 540.7818 488.4224 364.5686 3.4524 0.3869
+0 2005 280 6.50 0.458 10.8373 3.3973 37.7716 0.0000 1273.8698 734.0735 185.1936 5.0514 0.3609
+0 2005 280 17.50 0.542 8.6038 3.2308 0.0000 0.0000 983.9677 626.2552 282.2723 3.1004 0.3687
+0 2005 281 6.50 0.458 11.7823 4.9318 32.2547 0.0000 1149.3546 617.1022 393.8549 3.1886 0.3586
+0 2005 281 17.50 0.542 6.7031 4.2281 0.0000 0.0000 611.1505 443.8938 522.4891 2.4483 0.3591
+0 2005 282 6.50 0.458 0.0914 3.2623 3.0760 21.3580 33.2952 193.1514 717.0901 1.7710 0.3936
+0 2005 282 17.50 0.542 -4.4081 2.5219 0.0000 14.4860 10.5058 313.4306 555.6485 3.0669 0.4136
+0 2005 283 6.50 0.458 -4.7232 2.4127 11.3478 4.3180 12.4538 320.9870 542.2173 3.4282 0.4240
+0 2005 283 17.50 0.542 -5.4531 2.3162 0.0000 1.2700 28.0670 355.8027 502.2427 0.9778 0.4545
+0 2005 284 6.50 0.458 -1.2538 2.2345 24.7473 0.2540 113.5613 278.6729 575.0347 3.4541 0.4379
+0 2005 284 17.50 0.542 -1.6042 2.1650 0.0000 0.0000 114.5273 291.9129 558.1168 5.8069 0.4501
+0 2005 285 6.50 0.458 0.5245 2.1159 23.7455 0.0000 228.0439 306.9210 540.5214 4.0568 0.4822
+0 2005 285 17.50 0.542 0.0166 2.0462 0.0000 0.0000 211.3891 311.2807 532.4992 5.6058 0.4849
+0 2005 286 6.50 0.458 4.6526 2.0114 35.1200 0.0000 581.9683 423.9207 418.0380 7.9795 0.4863
+0 2005 286 17.50 0.542 5.9350 1.9377 0.0000 0.0000 723.8433 487.3734 350.7429 5.5842 0.5141
+0 2005 287 6.50 0.458 9.3918 1.7145 34.9856 0.0000 927.6697 422.7637 403.8949 2.4732 0.5364
+0 2005 287 17.50 0.542 6.6577 1.6462 0.0000 0.0000 725.4167 425.1923 397.8969 3.1558 0.5408
+0 2005 288 6.50 0.458 10.0041 1.6432 32.1593 0.0000 934.8096 374.6294 448.3507 2.4814 0.5441
+0 2005 288 17.50 0.542 5.4612 1.4558 0.0000 0.0000 600.9015 371.4643 441.9385 3.6438 0.5692
+0 2005 289 6.50 0.458 8.2118 2.1155 33.7017 0.0000 750.4271 360.3284 487.3863 5.8409 0.5928
+0 2005 289 17.50 0.542 5.8585 1.9485 0.0000 0.0000 482.1121 251.0379 587.6383 7.8835 0.7173
+0 2005 290 6.50 0.458 8.9945 1.9482 33.6140 0.0000 846.3442 382.0420 456.6197 5.0277 0.7224
+0 2005 290 17.50 0.542 7.3742 1.9654 0.0000 0.0000 822.9523 487.5800 351.9781 3.2035 0.7383
+0 2005 291 6.50 0.458 10.6673 1.9727 32.4228 0.0000 1217.8710 619.6755 220.2731 2.4762 0.7133
+0 2005 291 17.50 0.542 2.4230 2.0985 0.0000 2.2860 300.5903 269.0339 577.4918 1.6187 0.7260
+0 2005 292 6.50 0.458 -0.0496 2.0027 15.2198 1.7780 13.7530 112.5897 728.9182 2.6457 0.7246
+0 2005 292 17.50 0.542 2.1312 1.9254 0.0000 0.0000 196.4059 185.0979 652.3782 7.5650 0.7378
+0 2005 293 6.50 0.458 5.0195 1.9509 28.4486 0.0000 384.9248 208.1572 630.6719 6.4677 0.7132
+0 2005 293 17.50 0.542 2.9254 2.0842 0.0000 0.0000 256.5204 208.4361 637.3416 4.5627 0.7190
+0 2005 294 6.50 0.458 2.4638 2.4145 19.0276 0.0000 172.9037 164.8767 698.5997 1.9105 0.7126
+0 2005 294 17.50 0.542 2.8519 2.3658 0.0000 0.0000 489.2598 461.8854 398.8453 3.5827 0.7239
+0 2005 295 6.50 0.458 5.0651 2.0668 29.3030 0.0000 514.3257 334.7057 510.1663 3.6332 0.6662
+0 2005 295 17.50 0.542 -0.9456 2.1538 0.0000 0.0000 69.6786 215.5222 633.9223 2.0020 0.6509
+0 2005 296 6.50 0.458 1.3003 1.9477 29.8799 0.0000 266.6949 298.6894 539.9513 2.5518 0.6246
+0 2005 296 17.50 0.542 1.2920 1.8281 0.0000 0.0000 411.5187 436.9399 395.4990 3.6185 0.6660
+0 2005 297 6.50 0.458 8.1327 1.5768 31.1080 0.0000 923.9459 504.1932 315.3497 2.6409 0.5820
+0 2005 297 17.50 0.542 4.0619 1.5462 0.0000 0.0000 580.4606 441.3080 376.6735 2.7577 0.6406
+0 2005 298 6.50 0.458 7.2323 1.4041 27.1865 0.0000 701.8771 346.6176 464.1784 1.8754 0.5907
+0 2005 298 17.50 0.542 4.0131 1.4554 0.0000 0.0000 510.8950 369.0963 444.2859 2.6085 0.5699
+0 2005 299 6.50 0.458 5.7236 1.3427 29.5138 0.0000 623.7379 365.3283 442.3825 2.5663 0.5812
+0 2005 299 17.50 0.562 2.6048 1.3233 0.0000 0.0000 541.3090 473.3823 333.3561 3.0119 0.5795
+0 2005 300 7.00 0.438 6.1943 1.1495 21.6431 0.0000 813.6918 519.1469 278.9280 2.0262 0.5491
+0 2005 300 17.50 0.562 4.7604 1.1681 0.0000 0.0000 638.0789 438.2384 360.7588 2.0359 0.5504
+0 2005 301 7.00 0.438 6.2167 1.2929 14.5018 0.0000 672.9221 383.3095 421.9458 2.6243 0.5514
+0 2005 301 17.50 0.562 2.1894 1.5359 0.0000 0.0000 315.5457 280.4159 537.0470 2.8441 0.5485
+0 2005 302 7.00 0.438 0.9674 1.6214 15.5278 0.7620 220.7059 252.0932 569.7653 2.7790 0.5504
+0 2005 302 17.50 0.562 -1.8062 1.7193 0.0000 0.5080 151.8119 313.8152 513.0130 2.4004 0.5625
+0 2005 303 7.00 0.438 -1.5620 1.5195 13.7509 1.0160 175.7240 316.4966 500.1347 2.3500 0.5538
+0 2005 303 17.50 0.562 -5.9937 1.4356 0.0000 0.5080 159.7366 457.7486 354.6332 5.0615 0.5584
+0 2005 304 7.00 0.417 -3.2100 1.2910 28.4687 0.0000 391.9155 584.7768 220.3446 8.7470 0.5598
+0 2005 304 17.00 0.583 -0.8776 1.1375 0.0000 0.0000 307.8139 401.2106 396.2698 15.3032 0.5497
+0 2005 305 7.00 0.417 4.2365 1.0325 29.2638 0.0000 659.4799 479.6833 312.6092 4.3510 0.5457
+0 2005 305 17.00 0.583 4.0893 1.0244 0.0000 0.0000 706.4922 540.1105 251.7832 6.1293 0.5536
+0 2005 306 7.00 0.417 7.3985 0.9708 25.6348 0.0000 904.1412 512.2537 277.0073 11.3710 0.5404
+0 2005 306 17.00 0.583 4.9629 0.9967 0.0000 0.0000 753.2943 531.0682 259.4665 9.6579 0.5474
+0 2005 307 7.00 0.417 2.9600 1.0220 19.0862 0.0000 528.8827 426.2767 365.4991 7.5700 0.5409
+0 2005 307 17.00 0.583 1.6842 1.0536 0.0000 0.0000 474.5711 442.6197 350.7104 8.9168 0.5485
+0 2005 308 7.00 0.417 0.3697 1.0160 21.1591 0.0000 395.7745 426.4837 364.9969 6.7240 0.5428
+0 2005 308 17.00 0.583 -3.5220 1.0196 0.0000 6.0960 142.0837 331.7904 459.8693 4.5525 0.5479
+0 2005 309 7.00 0.417 -6.7820 0.9919 15.3047 2.0360 149.8847 451.3428 338.9561 8.0620 0.5421
+0 2005 309 17.00 0.583 -3.4506 0.9668 0.0000 0.0000 278.3006 463.6874 325.3753 13.9332 0.5356
+0 2005 310 7.00 0.417 2.4926 0.9352 20.4634 0.0000 462.1783 374.4675 413.0523 12.5320 0.5447
+0 2005 310 17.00 0.583 2.3461 0.9525 0.0000 0.0000 516.9951 445.3792 342.9868 3.5075 0.5477
+0 2005 311 7.00 0.417 6.4845 0.9545 22.1665 0.0000 758.7490 431.7639 356.6958 2.8425 0.5460
+0 2005 311 17.00 0.583 4.6857 0.9745 0.0000 0.0000 678.4085 472.5794 316.8614 8.3475 0.5515
+0 2005 312 7.00 0.417 5.6430 0.9962 20.9212 0.0000 704.5390 437.1362 353.3713 9.7865 0.5426
+0 2005 312 17.00 0.583 3.0393 1.0225 0.0000 0.0000 489.2329 382.7466 409.0537 10.7157 0.5488
+0 2005 313 7.00 0.417 6.1675 1.0320 25.4035 0.0000 760.6935 460.1598 332.1079 9.3640 0.5515
+0 2005 313 17.00 0.583 6.0764 1.0586 0.0000 0.0000 662.5349 372.8994 420.6772 7.9107 0.5573
+0 2005 314 7.00 0.417 8.7100 1.0765 21.0691 0.0000 869.7501 389.3914 405.0698 7.8070 0.5670
+0 2005 314 17.00 0.583 7.2475 1.2850 0.0000 0.0000 753.0068 393.5176 411.3154 3.9643 0.6506
+0 2005 315 7.00 0.417 6.9565 2.0660 12.3032 0.0000 704.5060 402.8145 442.2916 2.3645 0.6443
+0 2005 315 17.00 0.583 -1.8400 2.0364 0.0000 7.3700 127.3447 307.5092 535.8630 12.5721 0.6468
+0 2005 316 7.00 0.417 -2.3175 1.6260 9.9663 7.8860 150.7786 329.0695 492.9830 10.4025 0.6681
+0 2005 316 17.00 0.583 -5.9979 1.4629 0.0000 0.7620 145.8096 446.1641 367.5981 12.5529 0.6793
+0 2005 317 7.00 0.417 -2.5995 1.3480 17.2556 0.5080 252.2650 426.9302 381.0448 6.3190 0.6647
+0 2005 317 17.00 0.583 -3.9089 1.2875 0.0000 2.7940 125.1735 346.5304 458.4131 8.7054 0.6670
+0 2005 318 7.00 0.417 -4.6485 1.2515 2.7927 13.9820 73.5135 318.8436 484.3017 6.3135 0.6570
+0 2005 318 17.00 0.583 -12.0950 1.2318 0.0000 3.8100 64.5799 505.5055 296.6567 3.6430 0.6350
+0 2005 319 7.00 0.417 -14.9730 1.2180 21.9328 0.2540 115.4780 617.5395 183.9360 6.5830 0.6521
+0 2005 319 17.00 0.583 -12.7511 1.1954 0.0000 0.0000 145.4788 604.3279 196.0211 14.4257 0.6538
+0 2005 320 7.00 0.417 -6.5395 1.1605 21.7551 0.0000 162.9023 464.0145 334.6033 14.5145 0.6309
+0 2005 320 17.00 0.583 -5.0114 1.1304 0.0000 1.2700 135.9865 388.2893 408.8344 10.5314 0.6287
+0 2005 321 7.00 0.417 -6.5940 1.1105 8.9756 5.0800 45.7842 346.6759 449.4650 2.6128 0.6257
+0 2005 321 17.00 0.583 -7.0493 1.1011 0.0000 0.0000 180.8657 492.7835 302.8911 5.1261 0.6093
+0 2005 322 7.00 0.417 -1.4679 1.0955 23.4475 0.0000 438.7602 553.5028 241.8965 10.1665 0.6179
+0 2005 322 17.00 0.583 -5.4261 1.0836 0.0000 0.0000 228.7520 490.3170 304.4930 9.0061 0.6254
+0 2005 323 7.00 0.417 -5.2165 1.0725 21.3541 0.0000 294.8102 550.0326 244.2310 5.5000 0.6219
+0 2005 323 17.00 0.583 -3.4504 1.0618 0.0000 0.0000 423.4851 615.6749 178.0602 4.7461 0.6173
+0 2005 324 7.00 0.417 3.8247 1.0425 22.9540 0.0000 656.0010 502.6437 290.1411 3.7560 0.6164
+0 2005 324 17.00 0.583 0.7780 1.0264 0.0000 0.0000 553.7339 565.1373 226.8562 4.1796 0.6114
+0 2005 325 7.00 0.417 3.2847 1.0170 22.2222 0.0000 676.1625 552.1255 239.4042 2.7706 0.6213
+0 2005 325 17.00 0.583 3.2054 1.0050 0.0000 0.0000 761.2920 644.8388 146.1010 6.6979 0.5874
+0 2005 326 7.00 0.417 4.6320 0.9984 22.3964 0.0000 844.9937 643.2678 147.3453 4.5815 0.6087
+0 2005 326 17.00 0.583 3.2125 0.9987 0.0000 0.0000 804.0630 686.9108 103.7219 7.3104 0.6024
+0 2005 327 7.00 0.417 4.9765 0.9922 22.0888 0.0000 914.3544 686.3486 103.9625 5.2795 0.6126
+0 2005 327 17.00 0.583 0.2707 0.9920 0.0000 0.0000 659.9542 692.3808 97.9187 3.9293 0.6143
+0 2005 328 7.00 0.417 0.5628 0.9758 22.2629 0.0000 605.0649 623.9404 165.5636 2.6080 0.6145
+0 2005 328 17.00 0.604 -1.8948 0.9445 0.0000 0.0000 500.3354 627.2749 160.6969 6.0059 0.5978
+0 2005 329 7.50 0.396 1.6569 0.9129 21.8418 0.0000 730.2728 691.3247 95.1021 8.1626 0.5989
+0 2005 329 17.00 0.604 -0.3587 0.8894 0.0000 0.0000 599.5754 656.7708 128.5097 5.1683 0.5944
+0 2005 330 7.50 0.396 -2.8431 0.8643 16.0536 0.5080 295.9479 451.2604 332.7964 6.0616 0.6020
+0 2005 330 17.00 0.604 -9.8362 0.8453 0.0000 0.2540 149.5748 525.5720 257.5581 7.1524 0.6166
+0 2005 331 7.50 0.396 -10.8753 0.7809 16.2842 0.0000 203.1610 601.8492 178.1550 10.3195 0.6094
+0 2005 331 17.00 0.604 -13.6486 0.6875 0.0000 0.0000 153.8006 606.1186 169.3733 14.2790 0.5742
+0 2005 332 7.50 0.396 -13.8995 0.5661 20.2343 0.0000 161.2691 612.4217 157.2430 14.2437 0.5651
+0 2005 332 17.00 0.604 -14.7748 0.4669 0.0000 0.0000 144.2370 605.9960 158.9411 11.6159 0.5554
+0 2005 333 7.50 0.396 -8.9442 0.3791 20.5259 0.0000 218.2648 550.2728 210.4981 7.3763 0.5933
+0 2005 333 17.00 0.604 -6.1034 0.3166 0.0000 4.3180 105.5709 351.4678 406.3545 6.6007 0.5972
+0 2005 334 7.50 0.375 -12.6911 0.2836 8.1631 0.7620 105.9706 519.3618 236.9081 8.7100 0.5551
+0 2005 334 16.50 0.625 -11.1890 0.2609 0.0000 0.5080 191.3514 569.3589 185.8472 12.2133 0.5450
+0 2005 335 7.50 0.375 -4.6883 0.2301 5.6750 5.3340 114.6436 311.8914 441.8688 7.0922 0.5674
+0 2005 335 16.50 0.625 -2.5253 0.2102 0.0000 6.6080 161.0283 277.7820 475.0488 9.7317 0.5835
+0 2005 336 7.50 0.375 -2.0217 0.2059 8.1575 3.0480 123.7985 220.9535 531.6782 8.6678 0.5714
+0 2005 336 16.50 0.625 -5.7517 0.2123 0.0000 3.8100 71.0889 296.8953 456.0323 3.7999 0.5732
+0 2005 337 7.50 0.375 -12.4317 0.2268 11.7918 0.2540 115.7062 521.4204 232.1862 5.3822 0.5596
+0 2005 337 16.50 0.625 -15.5783 0.2394 0.0000 0.0000 88.1854 553.4404 200.7574 3.2687 0.5513
+0 2005 338 7.50 0.375 -16.8950 0.2453 9.4159 0.2540 95.3634 582.0840 172.3920 4.4067 0.5532
+0 2005 338 16.50 0.625 -19.6477 0.2463 0.0000 0.2540 82.2973 606.8164 147.7049 11.2257 0.5607
+0 2005 339 7.50 0.375 -16.0717 0.2322 15.2937 0.0000 121.6298 594.3597 159.4993 12.0444 0.5589
+0 2005 339 16.50 0.625 -14.5530 0.2064 0.0000 0.0000 109.9143 556.2172 196.4361 5.2130 0.5550
+0 2005 340 7.50 0.375 -18.2467 0.1781 5.1393 0.7620 53.0776 551.9263 199.4051 1.9850 0.5494
+0 2005 340 16.50 0.625 -26.3413 0.1617 0.0000 0.7620 39.4337 627.4561 123.1080 1.3397 0.5624
+0 2005 341 7.50 0.375 -21.6917 0.1448 6.9827 0.5080 72.8354 616.5807 133.1987 2.7710 0.5709
+0 2005 341 16.50 0.625 -18.0160 0.1185 0.0000 0.5080 156.4052 645.9437 102.6117 10.0593 0.5706
+0 2005 342 7.50 0.375 -6.7667 0.0850 17.9017 0.0000 424.3904 681.1085 65.8889 13.1239 0.5532
+0 2005 342 16.50 0.625 -8.6697 0.0541 0.0000 0.0000 374.2108 680.1765 65.3881 7.8047 0.5437
+0 2005 343 7.50 0.375 -7.9078 0.0290 19.7881 0.0000 337.4796 619.3850 125.0179 9.7700 0.5343
+0 2005 343 16.50 0.625 -7.2727 0.0056 0.0000 0.0000 259.8295 526.8469 216.4761 9.4730 0.5251
+0 2005 344 7.50 0.375 -2.7633 -0.0174 17.9434 0.0000 500.6011 615.0391 127.2214 8.2956 0.5149
+0 2005 344 16.50 0.625 -3.3529 -0.0207 0.0000 0.0000 425.6000 561.1053 181.0025 4.0517 0.5104
+0 2005 345 7.50 0.375 0.9101 -0.0224 19.7416 0.0000 560.9543 515.6758 226.3515 5.8022 0.4981
+0 2005 345 16.50 0.625 -3.0750 -0.0093 0.0000 0.0000 368.6908 496.9480 245.6872 3.1719 0.4976
+0 2005 346 7.50 0.375 0.4929 -0.0033 19.4928 0.0000 520.3864 493.5768 249.3347 2.3969 0.4965
+0 2005 346 16.50 0.625 -2.5413 0.0010 0.0000 0.0000 404.9568 509.3477 233.7613 6.0780 0.4906
+0 2005 347 7.50 0.375 -7.0044 0.0103 14.7787 0.0000 145.3233 406.8243 336.7157 12.8122 0.4931
+0 2005 347 16.50 0.625 -13.5317 0.0226 0.0000 3.3020 78.2363 489.0374 255.0680 4.8087 0.5238
+0 2005 348 7.50 0.375 -16.2106 0.0318 16.6069 0.2540 105.1028 571.0859 173.4454 11.2850 0.5385
+0 2005 348 16.50 0.625 -17.2563 0.0259 0.0000 0.0000 96.3009 578.0920 166.1674 11.7090 0.5241
+0 2005 349 7.50 0.375 -11.9500 0.0041 19.2041 0.0000 243.1223 627.7930 115.4571 10.7217 0.5340
+0 2005 349 16.50 0.625 -12.4750 -0.0166 0.0000 0.0000 188.5848 584.3500 157.9484 7.7827 0.5394
+0 2005 350 7.50 0.375 -11.5683 -0.0378 13.7097 0.0000 152.3606 527.9714 213.3494 4.5256 0.5260
+0 2005 350 16.50 0.625 -14.4767 -0.0497 0.0000 0.0000 78.4911 511.3591 229.4143 2.0124 0.5250
+0 2005 351 7.50 0.375 -15.0467 -0.0613 9.8208 0.0000 53.3729 495.3519 244.8847 1.6611 0.5239
+0 2005 351 16.50 0.625 -11.1137 -0.0763 0.0000 0.5080 111.3110 474.4678 265.0824 4.7063 0.5223
+0 2005 352 7.50 0.375 -9.3839 -0.0887 6.7874 0.7620 121.3306 443.5349 295.4458 6.0806 0.5191
+0 2005 352 16.50 0.625 -8.2160 -0.0955 0.0000 0.5080 113.1002 404.0867 334.5818 7.4957 0.5194
+0 2005 353 7.50 0.375 -6.4633 -0.0904 6.4875 2.7940 90.5712 331.1129 407.7861 6.7022 0.5083
+0 2005 353 16.50 0.625 -6.1147 -0.0809 0.0000 2.0320 107.1363 337.5133 401.8225 6.8817 0.5028
+0 2005 354 7.50 0.375 -3.9322 -0.0779 15.6648 0.2540 148.8679 305.5670 433.9086 9.1744 0.5033
+0 2005 354 16.50 0.625 -3.4683 -0.0656 0.0000 0.0000 142.2850 281.9870 458.0519 9.2577 0.4949
+0 2005 355 7.50 0.375 -0.5079 -0.0544 11.4809 0.0000 205.0975 225.0674 515.4886 11.1367 0.4844
+0 2005 355 16.50 0.625 1.2710 -0.0412 0.0000 0.0000 363.6931 300.4594 440.7053 11.6337 0.4634
+0 2005 356 7.50 0.375 0.8164 -0.0298 15.2207 0.0000 319.9832 279.7521 461.9346 13.1889 0.4643
+0 2005 356 16.50 0.625 -0.6407 -0.0148 0.0000 1.0160 247.9000 275.3557 467.0226 14.7847 0.4755
+0 2005 357 7.50 0.375 -3.0906 -0.0026 10.6737 0.2540 205.2576 333.6823 409.2599 8.6856 0.4823
+0 2005 357 16.50 0.625 -6.8750 0.0104 0.0000 0.0000 191.0531 448.3648 295.1783 10.4967 0.5017
+0 2005 358 7.50 0.375 -2.1533 0.0146 18.7959 0.0000 334.4274 421.9048 321.8328 11.9706 0.5005
+0 2005 358 16.50 0.625 0.7813 0.0162 0.0000 0.0000 404.5550 366.0338 377.7758 9.8930 0.4862
+0 2005 359 7.50 0.375 4.8161 0.0173 19.2844 0.0000 705.3093 445.7199 298.1435 6.3583 0.4749
+0 2005 359 16.50 0.625 1.1014 0.0234 0.0000 0.0000 508.9252 455.1874 288.9549 4.4467 0.4767
+0 2005 360 7.50 0.375 1.8281 0.0258 17.3590 0.0000 605.2507 512.8252 231.4312 3.4611 0.4816
+0 2005 360 16.50 0.625 -3.0650 0.0351 0.0000 0.5080 254.1182 379.9432 364.7403 9.6893 0.4936
+0 2005 361 7.50 0.375 -5.2917 0.0396 16.6714 0.0000 178.2370 387.5973 357.2939 11.9000 0.4960
+0 2005 361 16.50 0.625 -5.5503 0.0471 0.0000 0.0000 165.1547 382.5225 362.7180 17.4087 0.5108
+0 2005 362 7.50 0.375 -1.5839 0.0444 15.5973 0.0000 148.9860 219.8768 525.2406 14.8906 0.4976
+0 2005 362 16.50 0.625 -0.3454 0.0479 0.0000 0.9000 228.5479 244.6694 500.6081 8.0210 0.4887
+0 2005 363 7.50 0.375 -7.2650 0.0544 3.8816 5.4540 115.5135 385.2825 360.2981 5.6022 0.5012
+0 2005 363 16.50 0.625 -11.1730 0.0634 0.0000 0.2540 116.8891 487.5245 258.4710 10.5767 0.5153
+0 2005 364 7.50 0.375 -7.0806 0.0659 16.4624 0.0000 274.1467 538.8781 207.2353 13.7794 0.5206
+0 2005 364 16.50 0.625 -3.6468 0.0669 0.0000 2.5400 147.6911 296.6040 449.5538 13.0483 0.5095
+0 2005 365 7.50 0.375 0.7681 0.0652 18.8064 0.2540 261.3143 227.2251 518.8549 12.8689 0.4953
+0 2005 365 16.50 0.312 -0.3465 0.0692 0.0000 0.0000 265.9180 283.6248 462.6395 2.7100 0.4939
diff --git a/models/sipnet/man/met2model.SIPNET.Rd b/models/sipnet/man/met2model.SIPNET.Rd
index ea76e17666a..6e4c2194302 100644
--- a/models/sipnet/man/met2model.SIPNET.Rd
+++ b/models/sipnet/man/met2model.SIPNET.Rd
@@ -10,6 +10,7 @@ met2model.SIPNET(
outfolder,
start_date,
end_date,
+ var.names = NULL,
overwrite = FALSE,
verbose = FALSE,
year.fragment = FALSE,
@@ -29,6 +30,8 @@ met2model.SIPNET(
\item{end_date}{the end date of the data to be downloaded
(will only use the year part of the date)}
+\item{var.names}{character: list of variable names to be extracted. Default is NULL.}
+
\item{overwrite}{should existing files be overwritten}
\item{verbose}{should the function be very verbose}
diff --git a/models/sipnet/man/sipnet2datetime.Rd b/models/sipnet/man/sipnet2datetime.Rd
deleted file mode 100644
index 845904aa03c..00000000000
--- a/models/sipnet/man/sipnet2datetime.Rd
+++ /dev/null
@@ -1,23 +0,0 @@
-% Generated by roxygen2: do not edit by hand
-% Please edit documentation in R/model2netcdf.SIPNET.R
-\name{sipnet2datetime}
-\alias{sipnet2datetime}
-\title{Convert SIPNET DOY to datetime}
-\usage{
-sipnet2datetime(sipnet_tval, base_year, base_month = 1, force_cf = FALSE)
-}
-\arguments{
-\item{sipnet_tval}{vector of SIPNET DOY values}
-
-\item{base_year}{base year to calculate datetime from DOY}
-
-\item{base_month}{reference month for converting from DOY to datetime}
-
-\item{force_cf}{force output to follow CF convention. Default FALSE}
-}
-\description{
-Convert SIPNET DOY to datetime
-}
-\author{
-Alexey Shiklomanov, Shawn Serbin
-}
diff --git a/models/sipnet/man/write.events.SIPNET.Rd b/models/sipnet/man/write.events.SIPNET.Rd
new file mode 100644
index 00000000000..153683a2c61
--- /dev/null
+++ b/models/sipnet/man/write.events.SIPNET.Rd
@@ -0,0 +1,48 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/write.events.SIPNET.R
+\name{write.events.SIPNET}
+\alias{write.events.SIPNET}
+\title{Write SIPNET events.in files from a PEcAn events.json}
+\usage{
+write.events.SIPNET(events_json, outdir)
+}
+\arguments{
+\item{events_json}{character. Path to an `events.json` file containing an
+array of site objects with `site_id`, optional `pft`, and `events`.}
+
+\item{outdir}{character. Output directory where per-site `events-.in`
+files are written.}
+}
+\value{
+Invisibly, a vector of files written.
+}
+\description{
+Reads a single PEcAn events.json containing one or more site objects and
+writes one SIPNET `events.in` file per site. Events are translated according to [SIPNET's `events.in`
+specification](https://pecanproject.github.io/sipnet/parameters/#agronomic-events).
+The writer expects inputs to already match the PEcAn MVP schema v0.1.0 naming and units where applicable.
+}
+\details{
+- Supported `event_type` values: `tillage`, `planting`, `fertilization`,
+ `irrigation`, `harvest`.
+- Units translated from PEcAn standard_vars to SIPNET events.in specification:
+ `kg/m^2` to `g/m^2`; irrigation `amount_mm` to `cm`.
+- Planting allocation uses fixed internal parameters. Future work should use the same values
+ that are written to `sipnet.parms` (e.g. after integrating this into `write.configs.SIPNET`)
+}
+\examples{
+# Example with two events for a single site
+tmp <- withr::local_tempfile(fileext = ".json")
+site <- list(
+ site_id = "EX1",
+ events = list(
+ list(event_type = "tillage", date = "2022-02-04", tillage_eff_0to1 = 0.2),
+ list(event_type = "planting", date = "2022-02-19", leaf_c_kg_m2 = 0.01)
+ )
+)
+jsonlite::write_json(list(site), tmp, auto_unbox = TRUE)
+outdir <- withr::local_tempdir()
+files <- write.events.SIPNET(tmp, outdir)
+files
+
+}
diff --git a/models/sipnet/tests/testthat.R b/models/sipnet/tests/testthat.R
index 7a1fc04289f..b7ccbc8832f 100644
--- a/models/sipnet/tests/testthat.R
+++ b/models/sipnet/tests/testthat.R
@@ -2,4 +2,4 @@ library(testthat)
library(PEcAn.utils)
PEcAn.logger::logger.setQuitOnSevere(FALSE)
-#test_check("PEcAn.SIPNET")
+test_check("PEcAn.SIPNET")
diff --git a/models/sipnet/tests/testthat/test-write.config.SIPNET.R b/models/sipnet/tests/testthat/test-write.config.SIPNET.R
new file mode 100644
index 00000000000..2f146967906
--- /dev/null
+++ b/models/sipnet/tests/testthat/test-write.config.SIPNET.R
@@ -0,0 +1,69 @@
+test_that("write.config.SIPNET", {
+ pth <- withr::local_tempdir()
+
+ event_lines <- "2025 1 irrig 0 1"
+ event_src_path <- file.path(pth, "events-a.in")
+ dir.create(file.path(pth, "run", "run1"), recursive = TRUE)
+ writeLines(event_lines, con = event_src_path)
+
+ s <- PEcAn.settings::as.Settings(
+ list(
+ outdir = file.path(pth, "out"),
+ rundir = file.path(pth, "run"),
+ pfts = list(pft1 = list()),
+ model = list(binary = "", revision = ""),
+ run = list(
+ site = list(name = "site1", lat = 40, lon = -88),
+ inputs = list(
+ met = list(path = ""),
+ events = list(path = event_src_path)
+ ),
+ start.date = "2025-01-01",
+ end.date = "2025-01-02"
+ ),
+ host = list(
+ name = "",
+ outdir = file.path(pth, "out"),
+ rundir = file.path(pth, "run")
+ )
+ )
+ )
+
+ res <- write.config.SIPNET(
+ defaults = list(pft1 = list(constants = list(SLA = 2.0))),
+ trait.values = list(pft1 = list(Amax = 5, AmaxFrac = 0.99, leafC = 47)),
+ settings = s,
+ run.id = "run1"
+ )
+
+ # events file correctly copied
+ expect_match(
+ readLines(file.path(pth, "run", "run1", "events.in")),
+ event_lines,
+ fixed = TRUE,
+ all = TRUE
+ )
+
+ # (at least some) parameters updated
+ param_result <- readLines(file.path(pth, "run", "run1", "sipnet.param"))
+ expect_match(
+ param_result,
+ "aMax 10", # this is Amax * SLA,
+ fixed = TRUE,
+ all = FALSE
+ )
+ expect_match(
+ param_result,
+ "aMaxFrac 0.99", # raw template had 0.76
+ fixed = TRUE,
+ all = FALSE
+ )
+ # leaf C specific weight is leafC / SLA, with units converted to g C/m2 leaf
+ expect_match(
+ param_result,
+ "leafCSpWt 235 ", # space at end to catch unit errors (eg fail on 23500)
+ fixed = TRUE,
+ all = FALSE
+ )
+
+})
diff --git a/models/sipnet/tests/testthat/test-write.events.SIPNET.R b/models/sipnet/tests/testthat/test-write.events.SIPNET.R
new file mode 100644
index 00000000000..f792a1fcc09
--- /dev/null
+++ b/models/sipnet/tests/testthat/test-write.events.SIPNET.R
@@ -0,0 +1,41 @@
+context("write.events.SIPNET")
+
+# Helper to remove excess whitespace
+norm <- function(x) gsub("\\s+", " ", trimws(x))
+
+testthat::test_that("write.events.SIPNET produces expected lines", {
+ ev_json1 <- system.file(file.path("events_fixtures", "events_site1.json"),
+ package = "PEcAn.data.land"
+ )
+ outdir <- withr::local_tempdir()
+ files <- write.events.SIPNET(ev_json1, outdir)
+ expect_length(files, 1)
+ got <- readLines(files[1])
+ expected <- c(
+ "2022 35 till 0.2",
+ "2022 40 till 0.1",
+ "2022 40 irrig 5 1",
+ "2022 40 fert 0 0 10",
+ "2022 50 plant 10 3 2 5",
+ "2022 250 harv 0.1 0 0 0"
+ )
+ expect_equal(norm(got), norm(expected))
+ # TODO determine What's generating the whitespace differences and eliminate use of norm()
+})
+
+testthat::test_that("write.events.SIPNET handles multi-site events.json (one file per site)", {
+ ev_json2 <- system.file(file.path("events_fixtures", "events_site1_site2.json"),
+ package = "PEcAn.data.land"
+ )
+ outdir <- withr::local_tempdir()
+ files <- write.events.SIPNET(ev_json2, outdir)
+ testthat::expect_length(files, 2)
+ testthat::expect_true(all(file.exists(files)))
+ # quick sanity checks for each site's first/last event ordering
+ got1 <- readLines(files[grepl("events-S1\\.in$", files)])
+ got2 <- readLines(files[grepl("events-S2\\.in$", files)])
+ testthat::expect_true(startsWith(norm(got1[1]), "2022 15 till"))
+ testthat::expect_true(startsWith(norm(tail(got1, 1)), "2022 244 harv"))
+ testthat::expect_true(startsWith(norm(got2[1]), "2022 60 plant"))
+ testthat::expect_true(startsWith(norm(tail(got2, 1)), "2022 69 irrig"))
+})
diff --git a/models/sipnet/tests/testthat/test.dummy.R b/models/sipnet/tests/testthat/test.dummy.R
deleted file mode 100644
index b025803854d..00000000000
--- a/models/sipnet/tests/testthat/test.dummy.R
+++ /dev/null
@@ -1 +0,0 @@
-test_that("dummy", {})
diff --git a/models/sipnet/tests/testthat/test.met2model.R b/models/sipnet/tests/testthat/test.met2model.R
index b2dd3af4c7c..9d1a0417e8c 100644
--- a/models/sipnet/tests/testthat/test.met2model.R
+++ b/models/sipnet/tests/testthat/test.met2model.R
@@ -16,8 +16,11 @@ add_gaps_to_nc <- function(src_nc, gapped_nc,
}
test_that("Met conversion runs without error", {
- nc_path <- system.file("test-data", "CRUNCEP.2000.nc",
- package = "PEcAn.utils")
+ nc_path <- system.file(
+ "test-data",
+ "CRUNCEP.2000.nc",
+ package = "PEcAn.utils"
+ )
in.path <- dirname(nc_path)
in.prefix <- "CRUNCEP"
start_date <- "2000-01-01"
diff --git a/models/stics/DESCRIPTION b/models/stics/DESCRIPTION
index cc150a42fec..8227ecb68a8 100644
--- a/models/stics/DESCRIPTION
+++ b/models/stics/DESCRIPTION
@@ -1,12 +1,14 @@
Package: PEcAn.STICS
Type: Package
Title: PEcAn Package for Integration of the STICS Model
-Version: 1.8.1
+Version: 1.8.2
Authors@R: c(
person("Istem", "Fer",
email = "istem.fer@fmi.fi",
role = c("aut", "cre")))
Description: This module provides functions to link the STICS to PEcAn.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
PEcAn.settings,
PEcAn.logger,
@@ -31,3 +33,4 @@ LazyLoad: yes
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: STICS, ecosystem-modeling
diff --git a/models/stics/NAMESPACE b/models/stics/NAMESPACE
index e30dd908e7a..d7e1ed7ff17 100644
--- a/models/stics/NAMESPACE
+++ b/models/stics/NAMESPACE
@@ -2,5 +2,6 @@
export(met2model.STICS)
export(model2netcdf.STICS)
+export(pecan2stics)
export(write.config.STICS)
importFrom(dplyr,"%>%")
diff --git a/models/stics/NEWS.md b/models/stics/NEWS.md
new file mode 100644
index 00000000000..695ef3500c3
--- /dev/null
+++ b/models/stics/NEWS.md
@@ -0,0 +1,16 @@
+# PEcAn.STICS 1.8.2
+
+* model2netcdf.STICS no longer writes separate `.nc.var` files for every year of output. Use `PEcAn.utils::nc_write_varfiles()` to create these as needed.
+* `write.config.STICS()` now modifies parameters with vectors rather than individually, substantially speeding up XML creation (@qdbell, #3395)
+
+
+
+# PEcAn.STICS 1.8.1
+
+## License change
+* PEcAn.STICS is now distributed under the BSD three-clause license instead of the NCSA Open Source license.
+
+
+# PEcAn.STICS 1.7.1
+
+First public release, numbered to match the current PEcAn version.
\ No newline at end of file
diff --git a/models/stics/R/model2netcdf.STICS.R b/models/stics/R/model2netcdf.STICS.R
index 924c67b3637..8060c32422d 100644
--- a/models/stics/R/model2netcdf.STICS.R
+++ b/models/stics/R/model2netcdf.STICS.R
@@ -103,15 +103,11 @@ model2netcdf.STICS <- function(outdir, sitelat, sitelon, start_date, end_date, o
### Output netCDF data
nc <- ncdf4::nc_create(file.path(outdir, paste(y, "nc", sep = ".")), nc_var)
- varfile <- file(file.path(outdir, paste(y, "nc", "var", sep = ".")), "w")
for (i in seq_along(nc_var)) {
- # print(i)
ncdf4::ncvar_put(nc, nc_var[[i]], outlist[[i]])
- cat(paste(nc_var[[i]]$name, nc_var[[i]]$longname), file = varfile, sep = "\n")
}
- close(varfile)
ncdf4::nc_close(nc)
-
+
} ### End of year loop
diff --git a/models/stics/R/write.config.STICS.R b/models/stics/R/write.config.STICS.R
index 224f57b4986..a25063193cb 100644
--- a/models/stics/R/write.config.STICS.R
+++ b/models/stics/R/write.config.STICS.R
@@ -15,7 +15,7 @@
##' @author Istem Fer
##-------------------------------------------------------------------------------------------------#
write.config.STICS <- function(defaults, trait.values, settings, run.id) {
-
+
## the rest of the code assumes only plant PFTs
## little modification here as not to have a bigger re-write for now
if(any(grepl("soil", names(trait.values)))){
@@ -59,8 +59,8 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
p2 <- ifelse(events_file$rotation$planted_crop2[uic] != "-99.0", tolower(events_file$rotation$planted_crop2[uic]), "")
uname <- paste0(p1,p2)
usmdirs[uic] <- paste0(file.path(settings$host$rundir, run.id, uname), "_",
- lubridate::year(events_file$rotation$rotation_begin[uic]), "-",
- lubridate::year(events_file$rotation$rotation_end[uic]))
+ lubridate::year(events_file$rotation$rotation_begin[uic]), "-",
+ lubridate::year(events_file$rotation$rotation_end[uic]))
}
}else{
@@ -123,13 +123,13 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
}
}
-
+
}
# TODO: have a better way to determine USMs
########################## finish usmdirs
-
+
## make sure rundir and outdir exist
dir.create(rundir, showWarnings = FALSE, recursive = TRUE)
@@ -146,13 +146,13 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
prf_list$entry$text <- rundir
XML::saveXML(PEcAn.settings::listToXml(prf_list, "properties"),
- file = file.path(cfgdir, "preferences.xml"),
- prefix = '\n\n')
+ file = file.path(cfgdir, "preferences.xml"),
+ prefix = '\n\n')
# stics and javastics path
stics_path <- settings$model$binary
-
+
# Per STICS development team, there are two types of STICS inputs
# Global input: _plt.xml, param_gen.xml, param_newform.xml
@@ -164,9 +164,11 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
## this is where we overwrite model parameters
+ # Convert pecan parameters to stics names
+ trait.values <- pecan2stics(trait.values)
# read in template plt file, has all the formalisms
plt_xml <- XML::xmlParse(system.file("crop_plt.xml", package = "PEcAn.STICS"))
- #plt_list <- XML::xmlToList(plt_xml)
+
plt_files <- list()
for (pft in seq_along(trait.values)) {
@@ -175,7 +177,6 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
plant_file <- file.path(rundir, paste0(names(trait.values)[pft], "_plt.xml"))
-
if(names(trait.values)[pft] != "env"){
# save the template, will be overwritten below
XML::saveXML(plt_xml, file = plant_file)
@@ -183,505 +184,69 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
next
}
+ # Apply changes to those parameters specified by trait.values for this pft.
+ if (!is.null(pft.traits)) {
+ SticsRFiles::set_param_xml(plant_file, param = names(pft.traits), values = as.list(unname(pft.traits)), overwrite = TRUE)
+ }
+
plt_files[[pft]] <- plant_file
# to learn the parameters in a plant file
- # SticsRFiles::get_param_info(file_path = plant_file)
-
- # go over each formalism and replace params following the order in crop_plt
- # TODO: vary more params
+ # SticsRFiles::get_param_info()
- # plant name and group
- # effect of atmospheric CO2 concentration
-
- # phasic development
# to see parameters per formalism
# values = SticsRFiles::get_param_xml(plant_file, select = "formalisme", select_value = "phasic development")
# unlist(values)
+ # Creating a dataframe of parameter names and their values for feeding into SticsRFiles::set_param_xml.
+ # Note that the parameters in this data frame are either hardcoded for now or otherwise require special treatment.
+ plt_df <- data.frame(codebfroid = 2) # vernalization requirement, hardcoding for now, 2==yes.
+
# name code of plant in 3 letters
# a handful of plants have to have specific codes, e.g. forages need to be 'fou' and vine needs to be 'vig'
# but others can be anything? if not, either consider a LUT or passing via settings
- if(names(trait.values)[pft] %in% c("frg", "wcl", "alf")){
- codeplante <- 'fou'
- codeperenne <- 2
+ if(names(trait.values)[pft] %in% c("frg", "wcl", "alf")){
+ plt_df$codeplante <- "fou"
+ plt_df$codeperenne <- 2
}else{
- codeplante <- base::substr(names(trait.values)[pft],1,3)
- codeperenne <- 1
- }
- codebfroid <- 2 # vernalization requirement, hardcoding for now, 2==yes
- SticsRFiles::set_param_xml(plant_file, "codeplante", codeplante, overwrite = TRUE)
- SticsRFiles::set_param_xml(plant_file, "codeperenne", codeperenne, overwrite = TRUE)
- SticsRFiles::set_param_xml(plant_file, "codebfroid", codebfroid, overwrite = TRUE)
-
- # minimum temperature below which development stops (degree C)
- if ("tdmin" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "tdmin", pft.traits[which(pft.names == "tdmin")], overwrite = TRUE)
- }
-
- # maximum temperature above which development stops (degree C)
- if ("tdmax" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "tdmax", pft.traits[which(pft.names == "tdmax")], overwrite = TRUE)
- }
-
- # basal photoperiod
- if ("phobase" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "phobase", pft.traits[which(pft.names == "phobase")], overwrite = TRUE)
- }
-
- # saturating photoperiod
- if ("phosat" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "phosat", pft.traits[which(pft.names == "phosat")], overwrite = TRUE)
- }
-
-
- # maximum phasic delay allowed due to stresses
- if ("phasic_delay_max" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "stressdev", pft.traits[which(pft.names == "phasic_delay_max")], overwrite = TRUE)
- }
-
- # minimum number of vernalising days (d) [0,7]
- if ("vernalization_days_min" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "jvcmini", round(pft.traits[which(pft.names == "vernalization_days_min")]), overwrite = TRUE)
- }
-
- # day of initiation of vernalisation in perennial crops (julian d) [1,731]
- # this only takes effect for perennial crops
- if ("vernalization_init" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "julvernal", round(pft.traits[which(pft.names == "vernalization_init")]), overwrite = TRUE)
- }
-
- # optimal temperature for vernalisation (degreeC)
- if ("vernalization_TOpt" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "tfroid", pft.traits[which(pft.names == "vernalization_TOpt")], overwrite = TRUE)
- }
-
- # semi thermal amplitude for vernalising effect (degreeC)
- if ("vernalization_TAmp" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "ampfroid", pft.traits[which(pft.names == "vernalization_TAmp")], overwrite = TRUE)
- }
-
- if ("coeflevamf" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "coeflevamf", pft.traits[which(pft.names == "coeflevamf")], overwrite = TRUE)
- }
-
- if ("coefamflax" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "coefamflax", pft.traits[which(pft.names == "coefamflax")], overwrite = TRUE)
- }
-
- if ("coeflaxsen" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "coeflaxsen", pft.traits[which(pft.names == "coeflaxsen")], overwrite = TRUE)
- }
-
- if ("coefsenlan" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "coefsenlan", pft.traits[which(pft.names == "coefsenlan")], overwrite = TRUE)
- }
-
- if ("coeflevdrp" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "coeflevdrp", pft.traits[which(pft.names == "coeflevdrp")], overwrite = TRUE)
- }
-
- if ("coefdrpmat" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "coefdrpmat", pft.traits[which(pft.names == "coefdrpmat")], overwrite = TRUE)
- }
-
- if ("coefflodrp" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "coefflodrp", pft.traits[which(pft.names == "coefflodrp")], overwrite = TRUE)
- }
-
-
- # emergence and starting
- # values = SticsRFiles::get_param_xml(plant_file, select = "formalisme", select_value = "emergence and starting")
- # unlist(values)
-
- # minimum temperature below which emergence is stopped (degreeC)
- if ("emergence_Tmin" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "tgmin", pft.traits[which(pft.names == "emergence_Tmin")], overwrite = TRUE)
+ plt_df$codeplante <- base::substr(names(trait.values)[pft],1,3)
+ plt_df$codeperenne <- 1
}
# nbfeuilplant, leaf number per plant when planting, default 0, skipping for now
-
# this is a switch, for now hardcoding to have delay at the beginning of the crop (1)
# if starting the simulation from a later stage (e.g. lev) this has no effect
# codegermin, option of simulation of a germination phase or a delay at the beginning of the crop (1) or direct starting (2)
- SticsRFiles::set_param_xml(plant_file, "codegermin", 1, overwrite = TRUE)
+ plt_df$codegermin <- 1
- # cumulative thermal time allowing germination (degree-d)
- if ("cum_thermal_germin" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "stpltger", pft.traits[which(pft.names == "cum_thermal_germin")], overwrite = TRUE)
- }
-
- # skipping the other parameters related to this switch, they don't seem influential, at least on NPP and LAI
+ # skipping the other parameters related to this switch for now
# potgermi: soil water potential under which seed imbibition is impeded
# nbjgerlim: maximum number of days after grain imbibition allowing full germination
# propjgermin: minimal proportion of the duration nbjgerlim when the temperature is higher than the temperature threshold Tdmax
-
- # parameter of the curve of coleoptile elongation
- if ("belong" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "belong", pft.traits[which(pft.names == "belong")], overwrite = TRUE)
- }
-
- # parameter of the plantlet elongation curve
- if ("celong" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "celong", pft.traits[which(pft.names == "celong")], overwrite = TRUE)
- }
-
- # maximum elongation of the coleoptile in darkness condition
- if ("coleoptile_elong_dark_max" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "elmax", pft.traits[which(pft.names == "coleoptile_elong_dark_max")], overwrite = TRUE)
- }
-
- # number of days after germination after which plant emergence is reduced
- if ("days_reduced_emergence_postgerm" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "nlevlim1", round(pft.traits[which(pft.names == "days2reduced_emergence_postgerm")]), overwrite = TRUE)
- }
-
- # number of days after germination after which plant emergence is impossible
- if ("days2stopped_emergence_postgerm" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "nlevlim2", round(pft.traits[which(pft.names == "days2stopped_emergence_postgerm")]), overwrite = TRUE)
- }
-
- # plant vigor index allowing to emerge through a soil crust, vigueurbat == 1 inactivates some soil crust related parameters, skipping for now
-
- # there are also "planting" related parameters
-
- # leaves
- # values = SticsRFiles::get_param_xml(plant_file, select = "formalisme", select_value = "leaves")
- # unlist(values)
-
-
- # phyllotherme, thermal duration between the apparition of two successive leaves on the main stem (degree day)
- # assuming this is the same as phyllochron
- if ("phyllochron" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "phyllotherme", pft.traits[which(pft.names == "phyllochron")], overwrite = TRUE)
- }
-
- # minimal density above which interplant competition starts (m-2)
- if ("dens_comp" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "bdens", pft.traits[which(pft.names == "dens_comp")], overwrite = TRUE)
- }
-
- # LAI above which competition between plants starts (m2 m-2)
- if ("lai_comp" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "laicomp", pft.traits[which(pft.names == "lai_comp")], overwrite = TRUE)
- }
-
- # basal height of crop (m)
- if ("height" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "hautbase", pft.traits[which(pft.names == "height")], overwrite = TRUE)
- }
-
- # maximum height of crop
- if ("HTMAX" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "hautmax", pft.traits[which(pft.names == "HTMAX")], overwrite = TRUE)
- }
-
- # minimum temperature at which growth ceases
- if ("tcmin_growth" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "tcmin", pft.traits[which(pft.names == "tcmin_growth")], overwrite = TRUE)
- }
-
- # maximum temperature at which growth ceases
- if ("tcmax_growth" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "tcmax", pft.traits[which(pft.names == "tcmax_growth")], overwrite = TRUE)
- }
-
# temperature beyond which foliar growth stops
- if ("tcmax_foliar_growth" %in% pft.names) {
+ if ("tcxstop" %in% pft.names | "tdmax" %in% pft.names) {
# tcxstop must be > tdmax, priors should be set that way, and we can let the simulation fail afterwards, but putting a warning here
- tdmax <- SticsRFiles::get_param_xml(plant_file, param="tdmax", select = "formalisme", select_value = "phasic development")[[1]][[1]]
- tcxstop <- pft.traits[which(pft.names == "tcmax_foliar_growth")]
+ # Retrieve the new values if they exist, otherwise read them from the plant file
+ if ("tcxstop" %in% pft.names) {
+ tcxstop <- pft.traits[which(pft.names == "tcxstop")]
+ } else {
+ tcxstop <- SticsRFiles::get_param_xml(plant_file, param="tcxstop", select = "formalisme", select_value = "leaves")[[1]][[1]]
+ }
+ if ("tdmax" %in% pft.names) {
+ tdmax <- pft.traits[which(pft.names == "tdmax")]
+ } else {
+ tdmax <- SticsRFiles::get_param_xml(plant_file, param="tdmax", select = "formalisme", select_value = "phasic development")[[1]][[1]]
+ }
if(tcxstop < tdmax){
PEcAn.logger::logger.warn("tcmax_foliar_growth value (", tcxstop, ") should be greater than tdmax (", tdmax, ").")
}
- SticsRFiles::set_param_xml(plant_file, "tcxstop", tcxstop, overwrite = TRUE)
-
- }
-
- # ulai at the inflexion point of the function DELTAI=f(ULAI)
- if ("vlaimax" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "vlaimax", pft.traits[which(pft.names == "vlaimax")], overwrite = TRUE)
- }
-
- # parameter of the logistic curve of LAI growth
- if ("pentlaimax" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "pentlaimax", pft.traits[which(pft.names == "pentlaimax")], overwrite = TRUE)
- }
-
- # ulai from which the rate of leaf growth decreases
- if ("udlaimax" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "udlaimax", pft.traits[which(pft.names == "udlaimax")], overwrite = TRUE)
- }
-
- # life span of early leaves expressed as a fraction of the life span of the last leaves emitted DURVIEF
- if ("early2last_leaflife" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "ratiodurvieI", pft.traits[which(pft.names == "early2last_leaflife")], overwrite = TRUE)
- }
-
- # fraction of senescent biomass (relative to total biomass)
- if ("senes2total_biomass" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "ratiosen", pft.traits[which(pft.names == "senes2total_biomass")], overwrite = TRUE)
- }
-
- # fraction of senescent leaves falling to the soil
- # not sure if this is supposed to be a fraction or a percentage in STICS, values look like a fraction but min-max is given as 0-100
- # treating it like a fraction for now
- if ("fracLeafFall" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "abscission", pft.traits[which(pft.names == "fracLeafFall")], overwrite = TRUE)
- }
-
- # parameter relating the C/N of dead leaves and the INN
- if ("parazofmorte" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "parazofmorte", pft.traits[which(pft.names == "parazofmorte")], overwrite = TRUE)
- }
-
- # parameter of the N stress function active on leaf expansion (INNLAI), bilinear function vs INN passing through the point (INNmin, INNturgmin)
- if ("innturgmin" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "innturgmin", pft.traits[which(pft.names == "innturgmin")], overwrite = TRUE)
- }
-
- # accelerating parameter for the lai growth rate
- if ("lai_growth_rate_accelerating" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "dlaimin", pft.traits[which(pft.names == "lai_growth_rate_accelerating")], overwrite = TRUE)
- }
-
- # maximum rate of the setting up of LAI
- if ("lai_max_rate" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "dlaimaxbrut", pft.traits[which(pft.names == "lai_max_rate")], overwrite = TRUE)
- }
-
- # relative additional lifespan due to N excess in plant (INN > 1)
- if ("relative_addlifespan_DT_excessN" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "durviesupmax", pft.traits[which(pft.names == "relative_addlifespan_DT_excessN")], overwrite = TRUE)
- }
-
- # parameter of the N stress function active on senescence (INNsenes), bilinear function vs INN passing through the point (INNmin, INNsen)
- if ("innsen" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "innsen", pft.traits[which(pft.names == "innsen")], overwrite = TRUE)
- }
-
- # threshold soil water content active to simulate water senescence stress as a proportion of the turgor stress
- if ("rapsenturg" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "rapsenturg", pft.traits[which(pft.names == "rapsenturg")], overwrite = TRUE)
- }
-
-
- # radiation interception
- # values = SticsRFiles::get_param_xml(plant_file, select = "formalisme", select_value = "radiation interception")
-
- # extinction coefficient of photosynthetic active radiation in the canopy
- if ("extinction_coefficient_diffuse" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "extin", pft.traits[which(pft.names == "extinction_coefficient_diffuse")], overwrite = TRUE)
- }
-
- # shoot biomass growth
- # values = SticsRFiles::get_param_xml(plant_file, select = "formalisme", select_value = "shoot biomass growth")
-
- # minimum temperature for development
- if ("temin" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "temin", pft.traits[which(pft.names == "temin")], overwrite = TRUE)
- }
-
- # maximal temperature above which plant growth stops
- if ("temax" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "temax", pft.traits[which(pft.names == "temax")], overwrite = TRUE)
- }
-
- # optimal temperature (1/2) for plant growth
- if ("teopt" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "teopt", pft.traits[which(pft.names == "teopt")], overwrite = TRUE)
- }
-
- # optimal temperature (2/2) for plant growth
- if ("teoptbis" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "teoptbis", pft.traits[which(pft.names == "teoptbis")], overwrite = TRUE)
- }
-
- # maximum radiation use efficiency during the juvenile phase
- if ("RUE_juv" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "efcroijuv", pft.traits[which(pft.names == "RUE_juv")], overwrite = TRUE)
- }
-
- # maximum radiation use efficiency during the vegetative stage
- if ("RUE_veg" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "efcroiveg", pft.traits[which(pft.names == "RUE_veg")], overwrite = TRUE)
- }
-
- # maximum radiation use efficiency during the grain filling phase
- if ("RUE_rep" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "efcroirepro", pft.traits[which(pft.names == "RUE_rep")], overwrite = TRUE)
- }
-
- # fraction of daily remobilisable C reserves
- if ("remobres" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "remobres", pft.traits[which(pft.names == "remobres")], overwrite = TRUE)
- }
-
- # ratio biomass / useful height cut of crops (t.ha-1.m-1)
- if ("biomass2usefulheight" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "coefmshaut", pft.traits[which(pft.names == "biomass2usefulheight")], overwrite = TRUE)
- }
-
-
- # partitioning of biomass in organs
- # values = SticsRFiles::get_param_xml(plant_file, select = "formalisme", select_value = "partitioning of biomass in organs")
-
- # maximum SLA (specific leaf area) of green leaves (cm2 g-1)
- if ("SLAMAX" %in% pft.names) {
- slamax <- pft.traits[which(pft.names == "SLAMAX")]
- slamax <- PEcAn.utils::ud_convert(PEcAn.utils::ud_convert(slamax, "m2", "cm2"), "kg-1", "g-1") # m2 kg-1 to cm2 g-1
- SticsRFiles::set_param_xml(plant_file, "slamax", slamax, overwrite = TRUE)
- }
-
- # minimum SLA (specific leaf area) of green leaves (cm2 g-1)
- if ("SLAMIN" %in% pft.names) {
- slamin <- pft.traits[which(pft.names == "SLAMIN")]
- slamin <- PEcAn.utils::ud_convert(PEcAn.utils::ud_convert(slamin, "m2", "cm2"), "kg-1", "g-1") # m2 kg-1 to cm2 g-1
- SticsRFiles::set_param_xml(plant_file, "slamin", slamin, overwrite = TRUE)
- }
-
-
- # ratio stem (structural part)/leaf
- if ("stem2leaf" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "tigefeuil", pft.traits[which(pft.names == "stem2leaf")], overwrite = TRUE)
- }
-
- # skipping: envfruit, fraction of envelop in grainmaxi (w:w)
- # skipping: sea, specific area of fruit envelops
-
- # yield formation, will get back
-
- # roots
- # values = SticsRFiles::get_param_xml(plant_file, select = "formalisme", select_value = "roots")
-
-
- # sensanox, index of anoxia sensitivity (0 = insensitive), 0 for now
- # stoprac, stage when root growth stops (LAX= maximum leaf area index, end of leaf growth or SEN=beginning of leaf senescence)
-
- # sensrsec, index of root sensitivity to drought (1=insensitive)
- if ("rootsens2drought" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "sensrsec", pft.traits[which(pft.names == "rootsens2drought")], overwrite = TRUE)
- }
-
- # contrdamax, maximal reduction in root growth rate due to soil strengthness (high bulk density)
- if ("db_reduc_rgr_max" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "contrdamax", pft.traits[which(pft.names == "db_reduc_rgr_max")], overwrite = TRUE)
- }
-
- # draclong, maximum rate of root length production per plant (cm plant-1 degreeD-1)
- if ("rootlength_prod_max" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "draclong", pft.traits[which(pft.names == "rootlength_prod_max")], overwrite = TRUE)
- }
-
- # debsenrac, sum of degrees-days defining the beginning of root senescence (root life time) (degreeD)
- if ("root_sen_dday" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "debsenrac", round(pft.traits[which(pft.names == "root_sen_dday")]), overwrite = TRUE)
- }
-
- #lvfront, root density at the root apex (cm cm-3)
- if ("rootdens_at_apex" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "lvfront", pft.traits[which(pft.names == "rootdens_at_apex")], overwrite = TRUE)
- }
-
- # longsperac - specific root length (cm g-1)
- if ("SRL" %in% pft.names) {
- srl_val <- PEcAn.utils::ud_convert(pft.traits[which(pft.names == "SRL")], "m", "cm")
- SticsRFiles::set_param_xml(plant_file, "longsperac", srl_val, overwrite = TRUE)
+ # TODO: Do we force one of these to change or let the simulation fail?
}
# option to activate the N influence on root partitioning within the soil profile (1 = yes, 2 = no)
- SticsRFiles::set_param_xml(plant_file, "codazorac", 1, overwrite = TRUE)
-
- # reduction factor on root growth when soil mineral N is limiting (< minazorac)
- if ("minefnra" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "minefnra", pft.traits[which(pft.names == "minefnra")], overwrite = TRUE)
- }
-
- # mineral N concentration in soil below which root growth is reduced (kg.ha-1.cm-1)
- if ("minazorac" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "minazorac", pft.traits[which(pft.names == "minazorac")], overwrite = TRUE)
- }
-
- # mineral N concentration in soil above which root growth is maximum (kg.ha-1.cm-1)
- if ("maxazorac" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "maxazorac", pft.traits[which(pft.names == "maxazorac")], overwrite = TRUE)
- }
-
- # frost
-
- # formalism - water
-
- # psisto, potential of stomatal closing (absolute value) (bars)
- # note: units in betyDB are m, but my prior is for testing
- if ("psi_stomata_closure" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "psisto", pft.traits[which(pft.names == "psi_stomata_closure")], overwrite = TRUE)
- }
-
- # psiturg, potential of the beginning of decrease of the cellular extension (absolute value) (bars)
- # may or may not be leaf_psi_tlp in betyDB
- if ("leaf_psi_tlp" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "psiturg", pft.traits[which(pft.names == "leaf_psi_tlp")], overwrite = TRUE)
- }
-
- # h2ofeuilverte, water content of green leaves (relative to fresh matter) (g g-1)
- # may or may not be water_content_TLP_leaf in betyDB
- if ("water_content_TLP_leaf" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "h2ofeuilverte", pft.traits[which(pft.names == "water_content_TLP_leaf")], overwrite = TRUE)
- }
-
- # skipping:
- # h2ofeuiljaune
- # h2otigestruc
- # h2otigestruc
- # h2ofrvert
- # deshydbase
- # tempdeshyd
-
- # kmax, maximum crop coefficient for water requirements (=MET/PET)
- if ("crop_water_max" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "kmax", pft.traits[which(pft.names == "crop_water_max")], overwrite = TRUE)
- }
-
- # nitrogen
- # masecNmax
- if ("masecNmax" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "masecNmax", pft.traits[which(pft.names == "masecNmax")], overwrite = TRUE)
- }
-
- # Nreserve
- if ("Nreserve" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "Nreserve", pft.traits[which(pft.names == "Nreserve")], overwrite = TRUE)
- }
-
-
- # Kmabs1
- if ("Kmabs1" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "Kmabs1", pft.traits[which(pft.names == "Kmabs1")], overwrite = TRUE)
- }
-
- # adil
- if ("adil" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "adil", pft.traits[which(pft.names == "adil")], overwrite = TRUE)
- }
-
- # bdil
- if ("bdil" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "bdil", pft.traits[which(pft.names == "bdil")], overwrite = TRUE)
- }
-
- # INNmin
- if ("INNmin" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "INNmin", pft.traits[which(pft.names == "INNmin")], overwrite = TRUE)
- }
-
- # Nmeta
- if ("Nmeta" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "Nmeta", pft.traits[which(pft.names == "Nmeta")]*100, overwrite = TRUE)
- }
-
- # correspondance code BBCH
+ plt_df$codazorac <- 1
# cultivar parameters
# values = SticsRFiles::get_param_xml(plant_file, select = "formalisme", select_value = "cultivar parameters")
@@ -689,53 +254,10 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
# there are multiple cultivars (varietes) in plt file
# for now I assume we will always use only #1 in simulations
# hence, _tec file will always say variete==1, if you change the logic don't forget to update handling of the _tec file accordingly
+ # by default set_param_xml modifies the given parameter in all cultivars.
- # maximal lifespan of an adult leaf expressed in summation of Q10=2 (2**(T-Tbase))
- if ("leaf_lifespan_max" %in% pft.names) {
- # this will modifies all varietes' durvieFs by default
- SticsRFiles::set_param_xml(plant_file, "durvieF", pft.traits[which(pft.names == "leaf_lifespan_max")], overwrite = TRUE)
- # see example for setting a particular (the Grindstad) cultivar param
- # SticsRFiles::set_param_xml(plant_file, "durvieF", pft.traits[which(pft.names == "leaf_lifespan_max")], select = "Grindstad", overwrite = TRUE)
- }
-
- # cumulative thermal time between the stages LEV (emergence) and AMF (maximum acceleration of leaf growth, end of juvenile phase)
- if ("cum_thermal_juvenile" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "stlevamf", pft.traits[which(pft.names == "cum_thermal_juvenile")], overwrite = TRUE)
- }
-
- # cumulative thermal time between the stages AMF (maximum acceleration of leaf growth, end of juvenile phase) and LAX (maximum leaf area index, end of leaf growth)
- if ("cum_thermal_growth" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "stamflax", pft.traits[which(pft.names == "cum_thermal_growth")], overwrite = TRUE)
- }
-
- # cumulative thermal time between the stages LEV (emergence) and DRP (starting date of filling of harvested organs)
- if ("cum_thermal_filling" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "stlevdrp", pft.traits[which(pft.names == "cum_thermal_filling")], overwrite = TRUE)
- }
-
- if ("adens" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "adens", pft.traits[which(pft.names == "adens")], overwrite = TRUE)
- }
-
- if ("croirac" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "croirac", pft.traits[which(pft.names == "croirac")], overwrite = TRUE)
- }
-
- # extinction coefficient connecting LAI to crop height
- if ("LAI2height" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "khaut", pft.traits[which(pft.names == "LAI2height")], overwrite = TRUE)
- }
-
- # average root radius
- if ("rayon" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "rayon", pft.traits[which(pft.names == "rayon")], overwrite = TRUE)
- }
-
- # minimal value for drought stress index
- if ("swfacmin" %in% pft.names) {
- SticsRFiles::set_param_xml(plant_file, "swfacmin", pft.traits[which(pft.names == "swfacmin")], overwrite = TRUE)
- }
-
+ # Set the parameters that have been added to plt_df in the plant file.
+ SticsRFiles::set_param_xml(plant_file, names(plt_df), plt_df[1, ], overwrite = TRUE)
# convert xml2txt
if(names(trait.values)[pft] != "env"){
SticsRFiles::convert_xml2txt(file = plant_file)
@@ -756,11 +278,18 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
## these also have plant parameters as well as soil
## at the moment everything is treated as params, but some could be IC or come from the events file
- # these parameters won't change as crop changes in a continous rotation
+ # these parameters won't change as crop changes in a continuous rotation
+
+ # Convert pecan parameters to stics names for soil
+ # prepare for pecan2stics call, expects a list
+ soil_params_list <- list()
+ soil_params_list[[1]] <- soil_params
+ soil_params <- pecan2stics(soil_params_list)[[1]]
soil.names <- names(soil_params)
for (pft in seq_along(trait.values)) {
+
if(names(trait.values)[pft] == "env"){
next
}
@@ -768,382 +297,29 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
gen_xml <- XML::xmlParse(system.file("param_gen.xml", package = "PEcAn.STICS"))
gen_file <- file.path(rundir, "param_gen.xml")
XML::saveXML(gen_xml, file = gen_file)
- codeinitprec <- ifelse(length(usmdirs>1), 1, 2)
- SticsRFiles::set_param_xml(gen_file, "codeinitprec", codeinitprec, overwrite = TRUE)
+ # This input file is created from the template and not modified.
newf_xml <- XML::xmlParse(system.file("param_newform.xml", package = "PEcAn.STICS"))
newf_file <- file.path(rundir, "param_newform.xml")
XML::saveXML(newf_xml, file = newf_file)
-
-
- pft.traits <- unlist(trait.values[[pft]])
- pft.names <- names(pft.traits)
-
- ### Shoot growth
- # parameter defining radiation effect on conversion efficiency
- if ("rad_on_conversion_eff" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "coefb", pft.traits[which(pft.names == "rad_on_conversion_eff")], overwrite = TRUE)
- }
-
- # ratio of root mass to aerial mass at harvest
- if ("root2aerial_harvest" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "proprac", pft.traits[which(pft.names == "root2aerial_harvest")], overwrite = TRUE)
- }
-
- # minimal amount of root mass at harvest (when aerial biomass is nil) t.ha-1
- if ("rootmin_harvest" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "y0msrac", pft.traits[which(pft.names == "rootmin_harvest")], overwrite = TRUE)
- }
-
- ### Root growth
-
- # bulk density of soil below which root growth is reduced due to a lack of soil cohesion (g.cm-3)
- if ("bd_rootgrowth_reduced" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "dacohes", pft.traits[which(pft.names == "bd_rootgrowth_reduced")], overwrite = TRUE)
- }
-
- # bulk density of soil above which root growth is maximal (g.cm-3)
- if ("bd_rootgrowth_maximal" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "daseuilbas", pft.traits[which(pft.names == "bd_rootgrowth_maximal")], overwrite = TRUE)
- }
-
- # bulk density of soil above which root growth becomes impossible (g.cm-3)
- if ("bd_rootgrowth_impossible" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "daseuilhaut", pft.traits[which(pft.names == "bd_rootgrowth_impossible")], overwrite = TRUE)
- }
-
- ### Water absorption and nitrogen content of the plant
-
- # parameter of increase of maximal transpiration when a water stress occurs
- if ("maxTPincrease_waterstress" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "beta", pft.traits[which(pft.names == "maxTPincrease_waterstress")], overwrite = TRUE)
- }
-
- # root length density (RLD) above which water and N uptake are maximum and independent of RLD
- if ("lvopt" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "lvopt", pft.traits[which(pft.names == "lvopt")], overwrite = TRUE)
- }
-
- # diffusion coefficient of nitrate N in soil at field capacity
- if ("difN_FC" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "difN", soil_params[which(soil.names == "difN_FC")], overwrite = TRUE)
- }
-
- # skipping
- # concrr: inorganic N concentration (NH4+NO3-N) in the rain
-
- # minimal amount of rain required to start an automatic fertilisation (N mm.d-1)
- if ("plNmin" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "plNmin", soil_params[which(soil.names == "plNmin")], overwrite = TRUE)
- }
-
- # skipping, irrlev:
- # amount of irrigation applied automatically on the sowing day to allow germination when the model calculates automaticaly
- # the amount of irrigations or when the irrigation dates are calculated by sum of temperature
-
- # minimal amount of N in the plant required to compute INN (kg.ha-1)
- if ("QNpltminINN" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "QNpltminINN", pft.traits[which(pft.names == "QNpltminINN")], overwrite = TRUE)
- }
-
- ### Soil C and N processes and fertiliser losses
-
- # minimal temperature for decomposition of humified organic matter (degreeC)
- if ("tmin_mineralisation" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "tmin_mineralisation", soil_params[which(soil.names == "tmin_mineralisation")], overwrite = TRUE)
- }
-
- # parameter (1/2) of the temperature function on humus decomposition rate
- if ("T_p1_Hdecomp_rate" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "FTEMh", soil_params[which(soil.names == "T_p1_Hdecomp_rate")], overwrite = TRUE)
- }
-
- # parameter (2/2) of the temperature function on humus decomposition rate
- if ("T_p2_Hdecomp_rate" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "FTEMha", soil_params[which(soil.names == "T_p2_Hdecomp_rate")], overwrite = TRUE)
- }
-
- # reference temperature for decomposition of humified organic matter
- if ("T_r_HOMdecomp" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "TREFh", soil_params[which(soil.names == "T_r_HOMdecomp")], overwrite = TRUE)
- }
-
- # parameter (1/2) of the temperature function on decomposition rate of organic residues
- if ("FTEMr" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "FTEMr", soil_params[which(soil.names == "FTEMr")], overwrite = TRUE)
- }
-
- # parameter (2/2) of the temperature function on decomposition rate of organic residues
- if ("FTEMra" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "FTEMra", soil_params[which(soil.names == "FTEMra")], overwrite = TRUE)
- }
-
- # reference temperature for decomposition of organic residues
- if ("T_r_ORdecomp" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "TREFr", soil_params[which(soil.names == "T_r_ORdecomp")], overwrite = TRUE)
- }
-
- # TODO: come back to these
- # # not used anymore, or at least not with this name!!!
- # # relative potential mineralization rate: K2 = fmin1 * exp(- fmin2*argi) / (1+fmin3*calc)
- # if ("FMIN1" %in% soil.names) {
- # SticsRFiles::set_param_xml(gen_file, "FMIN1", soil_params[which(soil.names == "FMIN1")], overwrite = TRUE)
- # }
- #
- # # not used anymore, or at least not with this name!!!
- # # parameter defining the effect of clay on the potential mineralization rate: K2 = fmin1 * exp(-fmin2*argi) / (1+fmin3*calc)
- # if ("FMIN2" %in% soil.names) {
- # SticsRFiles::set_param_xml(gen_file, "FMIN2", soil_params[which(soil.names == "FMIN2")], overwrite = TRUE)
- # }
- #
- # # not used anymore, or at least not with this name!!!
- # # parameter defining the effect of CaCO3 on the potential mineralization rate: K2 = fmin1 * exp(-fmin2*argi) / (1+fmin3*calc)
- # if ("FMIN3" %in% soil.names) {
- # SticsRFiles::set_param_xml(gen_file, "FMIN3", soil_params[which(soil.names == "FMIN3")], overwrite = TRUE)
- # }
-
- # N/C ratio of soil humus
- if ("Wh" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "Wh", soil_params[which(soil.names == "Wh")], overwrite = TRUE)
- }
-
- # soil pH below which NH3 volatilisation derived from fertiliser is nil
- if ("pHminvol" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "pHminvol", soil_params[which(soil.names == "pHminvol")], overwrite = TRUE)
- }
-
- # soil pH above which NH3 volatilisation derived from fertiliser is maximum
- if ("pHmaxvol" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "pHmaxvol", soil_params[which(soil.names == "pHmaxvol")], overwrite = TRUE)
- }
-
- # N uptake rate at which fertilizer loss is divided by 2
- if ("Nupt_fertloss_halve" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "Vabs2", soil_params[which(soil.names == "Nupt_fertloss_halve")], overwrite = TRUE)
- }
-
- # maximal amount of N immobilised in soil derived from the mineral fertilizer
- if ("maxNimm_mineralfert" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "Xorgmax", soil_params[which(soil.names == "maxNimm_mineralfert")], overwrite = TRUE)
- }
- # relative water content (fraction of field capacity) below which mineralisation rate is nil
- if ("hminm" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "hminm", soil_params[which(soil.names == "hminm")], overwrite = TRUE)
- }
-
- # relative water content (fraction of field capacity) below which mineralisation rate is maximum
- if ("hoptm" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "hoptm", soil_params[which(soil.names == "hoptm")], overwrite = TRUE)
- }
-
- # skipping, alphaph:
- # maximal soil pH variation per unit of inorganic N added with slurry
-
- # skipping, dphvolmax:
- # maximal pH increase following the application of slurry
-
- # skipping, phvols:
- # parameter used to calculate the variation of soil pH after the addition of slurry
- # relative soil mineralisation rate at water saturation
- if ("fhminsat" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "fhminsat", soil_params[which(soil.names == "fhminsat")], overwrite = TRUE)
- }
-
- # reduction factor of decomposition rate of organic residues when mineral N is limiting
- if ("Nlim_reductionOMdecomp" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "fredkN", soil_params[which(soil.names == "Nlim_reductionOMdecomp")], overwrite = TRUE)
- }
+ # Creating a dataframe of parameter names and their values for feeding into SticsRFiles::set_param_xml.
+ # Note that the parameters in this data frame are either hardcoded for now or otherwise require special treatment.
+ gen_df <- data.frame(codeinitprec = ifelse(length(usmdirs>1), 1, 2)) # reset initial conditions in chained simulations
- # reduction factor of decomposition rate of microbial biomass when mineral N is limiting
- if ("Nlim_reductionMBdecomp" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "fredlN", soil_params[which(soil.names == "Nlim_reductionMBdecomp")], overwrite = TRUE)
- }
-
- # minimal value for the ratio N/C of the microbial biomass when N limits decomposition
- if ("fNCbiomin" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "fNCbiomin", soil_params[which(soil.names == "fNCbiomin")], overwrite = TRUE)
- }
-
- # additional reduction factor of residues decomposition rate when mineral N is very limited in soil
- if ("fredNsup" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "fredNsup", soil_params[which(soil.names == "fredNsup")], overwrite = TRUE)
- }
-
- # maximum priming ratio (relative to SOM decomposition SD rate)
- if ("Primingmax" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "Primingmax", soil_params[which(soil.names == "Primingmax")], overwrite = TRUE)
- }
-
- ### Nitrification, denitrification and associated N2O emissions
- ### TODO: modify these params
-
- ### Soil hydrology and compaction
-
- # minimal amount of rain required to produce runoff (mm.d-1)
- if ("precmin4runoff" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "pminruis", soil_params[which(soil.names == "precmin4runoff")], overwrite = TRUE)
- }
-
- # soil thermal diffusivity (cm2.s-1)
- if ("soil_thermal_diffusivity" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "diftherm", soil_params[which(soil.names == "soil_thermal_diffusivity")], overwrite = TRUE)
- }
-
- # skipping, bformnappe:
- # coefficient for the water table shape (artificially drained soil)
-
- # drain radius (cm)
- if ("rdrain" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "rdrain", soil_params[which(soil.names == "rdrain")], overwrite = TRUE)
- }
-
- # soil water potential corresponding to wilting point (Mpa)
- if ("SWP_WP" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "psihumin", soil_params[which(soil.names == "SWP_WP")], overwrite = TRUE)
- }
-
- # soil water potential corresponding to field capacity (Mpa)
- if ("SWP_FC" %in% soil.names) {
- SticsRFiles::set_param_xml(gen_file, "psihucc", soil_params[which(soil.names == "SWP_FC")], overwrite = TRUE)
- }
-
- # soil moisture content (fraction of field capacity) above which compaction may occur and delay sowing
- if ("SMC_compaction_delay_sow" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "prophumtasssem", pft.traits[which(pft.names == "SMC_compaction_delay_sow")], overwrite = TRUE)
- }
-
- # soil moisture content (fraction of field capacity) above which compaction may occur and delay harvest
- if ("SMC_compaction_delay_harvest" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "prophumtassrec", pft.traits[which(pft.names == "SMC_compaction_delay_harvest")], overwrite = TRUE)
- }
-
- ### skipping
- ### Soil tillage if soil compaction activated
-
- ### Typology of pebbles fertilisers and residues
- ### should some of these parameters come from event files?
-
- ### codetypeng: Types of mineral fertilisers - 1 atm
- # 1: Ammonium.nitrate
- # 2: Urea.Ammonium.Nitrate.solution
- # 3: Urea
- # 4: Anhydrous.ammonia
- # 5: Ammonium.sulphate
- # 6: Ammonium.phosphate
- # 7: Calcium.nitrate
- # 8: Fixed.efficiency
-
- # each option has 4 params
- # engamm: fraction of ammonium in the N fertilizer
- # orgeng: maximal amount of fertilizer N that can be immobilized in the soil (fraction for type 8)
- # deneng: maximal fraction of the mineral fertilizer that can be denitrified (used if codedenit is not activated)
- # voleng: maximal fraction of mineral fertilizer that can be volatilized
-
- ### codetypres: Type of residues for decomposition parameters - 21 atm
- # 1: Main crop on surface
- # 2: Intermediate crop on surface
- # 3: Manure on surface
- # 4: Green compost on surface
- # 5: Sewage sludge on surface
- # 6: Vinasse on surface
- # 7: Horn on surface
- # 8: Grapevine shoots on surface
- # 9: Others.1 on surface
- # 10: Others.2 on surface
- # 11: Main crop ploughed in
- # 12: Intermediate crop ploughed in
- # 13: Manure ploughed in
- # 14: Green compost ploughed in
- # 15: Sewage sludge ploughed in
- # 16: Vinasse ploughed in
- # 17: Cattle horn ploughed in
- # 18: Grapevine shoots ploughed in
- # 19: Others.1 ploughed in
- # 20: Others.2 ploughed in
- # 21: Dead roots in soil
-
- # each option has 17 params
-
- # fraction of organic residue which is decomposable
- if ("fOR_decomp" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "CroCo", pft.traits[which(pft.names == "fOR_decomp")], overwrite = TRUE)
- }
-
- # parameter of organic residues decomposition: kres=akres+bkres/CsurNres
- if ("ORdecomp_par" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "akres", pft.traits[which(pft.names == "ORdecomp_par")], overwrite = TRUE)
- }
-
- # potential rate of decomposition of organic residues: kres=akres+bkres/CsurNres
- if ("ORdecomp_rate" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "bkres", pft.traits[which(pft.names == "ORdecomp_rate")], overwrite = TRUE)
- }
-
- # parameter determining C/N ratio of biomass during organic residues decomposition: CsurNbio=awb+bwb/CsurNres
- if ("awb" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "awb", pft.traits[which(pft.names == "awb")], overwrite = TRUE)
- }
-
- # parameter determining C/N ratio of biomass during organic residues decomposition: CsurNbio=awb+bwb/CsurNres
- if ("bwb" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "bwb", pft.traits[which(pft.names == "bwb")], overwrite = TRUE)
- }
-
- # minimum ratio C/N of microbial biomass decomposing organic residues
- if ("minC2N_microbialbiomass" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "cwb", pft.traits[which(pft.names == "minC2N_microbialbiomass")], overwrite = TRUE)
- }
-
- # parameter of organic residues humification: hres = 1 - ahres*CsurNres/(bhres+CsurNres)
- if ("ahres" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "ahres", pft.traits[which(pft.names == "ahres")], overwrite = TRUE)
- }
-
- # parameter of organic residues humification: hres = 1 - ahres*CsurNres/(bhres+CsurNres)
- if ("bhres" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "bhres", pft.traits[which(pft.names == "bhres")], overwrite = TRUE)
- }
-
-
- # TODO: we need a soil PFT
-
- # potential decay rate of microbial biomass decomposing organic residues
- if ("microbialbiomass_decay" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "kbio", pft.traits[which(pft.names == "microbialbiomass_decay")], overwrite = TRUE)
- }
-
- # Carbon assimilation yield by the microbial biomass during crop residues decomposition
- if ("microbialbiomass_C_yield" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "yres", pft.traits[which(pft.names == "microbialbiomass_C_yield")], overwrite = TRUE)
- }
-
- # minimum value of C/N ratio of organic residue (g.g-1)
- if ("CNresmin" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "CNresmin", pft.traits[which(pft.names == "CNresmin")], overwrite = TRUE)
- }
+ pft.traits <- unlist(trait.values[[pft]])
+ pft.names <- names(pft.traits)
- # maximum value of C/N ratio of organic residue (g.g-1)
- if ("CNresmax" %in% pft.names) {
- SticsRFiles::set_param_xml(gen_file, "CNresmax", pft.traits[which(pft.names == "CNresmax")], overwrite = TRUE)
+ # Apply changes to those parameters specified by trait.values for this pft.
+ # Currently no checking/differentiation between parameters that are in the plant xml vs these xmls, but, for now, SticsRFiles just throws a warning when the parameter is not in that file.
+ if (!is.null(pft.traits)) {
+ SticsRFiles::set_param_xml(gen_file, param = names(pft.traits), values = as.list(unname(pft.traits)), overwrite = TRUE)
}
-
- # skipping, qmulchruis0:
- # amount of mulch above which runoff is suppressed
-
- # skipping, mouillabilmulch:
- # maximum wettability of crop mulch
-
- # skipping, kcouvmlch:
- # extinction coefficient connecting the soil cover to the amount of plant mulch
- # skipping, albedomulchresidus:
- # albedo of crop mulch
+ # Set the parameters that have been added to gen_df in the param_gen file.
+ SticsRFiles::set_param_xml(gen_file, names(gen_df), gen_df[1, ], overwrite = TRUE)
- # skipping, Qmulchdec:
- # maximal amount of decomposable mulch
-
SticsRFiles::convert_xml2txt(file = gen_file)
this_usm <- grep(names(trait.values)[pft], usmdirs)
@@ -1151,7 +327,7 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
file.copy(file.path(rundir, "tempopar.sti"), file.path(usmdirs[x], "tempopar.sti"), overwrite = TRUE)
})
- ### new formulations
+ ### new formulations
# DO NOTHING ELSE FOR NOW
SticsRFiles::convert_xml2txt(file = newf_file)
@@ -1169,8 +345,7 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
# read in template ini file
ini_xml <- XML::xmlParse(system.file("pecan_ini.xml", package = "PEcAn.STICS"))
for(i in seq_along(usmdirs)){
-
- # doesn't really matter what these are called, they will all be eventually 'ficini.txt'
+
ini_file <- file.path(rundir, paste0(basename(usmdirs[i]), "_ini.xml"))
# write the ini file
@@ -1183,32 +358,29 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
# these may or may not be modified depending on how crop cycles work in STICS
# 'snu' is bare soil
# fine for annual crops but need to change for perennials
- SticsRFiles::set_param_xml(file = ini_file, param = "stade0", values = "snu", select = "plante", select_value = "1", overwrite = TRUE)
+ SticsRFiles::set_param_xml(file = ini_file, param = "stade0", values = "dor", select = "plante", select_value = "1", overwrite = TRUE)
# when snu others are set to 0 by STICS
-
+
}else if(!is.null(settings$run$inputs$poolinitcond)){
ic_path <- settings$run$inputs$poolinitcond$path
ic_nc <- ncdf4::nc_open(ic_path)
# initial leaf area index (m2 m-2)
lai0 <- ncdf4::ncvar_get(ic_nc, "LAI")
- SticsRFiles::set_param_xml(file = ini_file, param = "lai0", values = lai0, select = "plante", select_value = "1", overwrite = TRUE)
# initial aerial biomass (kg m-2 --> t ha-1)
masec0 <- ncdf4::ncvar_get(ic_nc, "AGB")
- SticsRFiles::set_param_xml(file = ini_file, param = "masec0", values = PEcAn.utils::ud_convert(masec0, "kg m-2", "t ha-1"), select = "plante", select_value = "1", overwrite = TRUE)
# initial depth of root apex of the crop (m --> cm)
zrac0 <- ncdf4::ncvar_get(ic_nc, "rooting_depth")
if(zrac0 < 0.2) zrac0 <- 0.2
- SticsRFiles::set_param_xml(file = ini_file, param = "zrac0", values = PEcAn.utils::ud_convert(zrac0, "m", "cm"), select = "plante", select_value = "1", overwrite = TRUE)
# initial grain dry weight - haven't started any simulations from this stage yet
# SticsRFiles::set_param_xml(file = ini_file, param = "magrain0", values = 0, select = "plante", select_value = "1", overwrite = TRUE)
# initial N amount in the plant (kg m-2 --> kg ha-1)
QNplante0 <- ncdf4::ncvar_get(ic_nc, "plant_nitrogen_content")
- SticsRFiles::set_param_xml(file = ini_file, param = "QNplante0", values = PEcAn.utils::ud_convert(QNplante0, "kg m-2", "kg ha-1"), select = "plante", select_value = "1", overwrite = TRUE)
+ QNplante0 <- PEcAn.utils::ud_convert(QNplante0, "kg m-2", "kg ha-1")
# Not anymore
# initial reserve of biomass (kg m-2 --> t ha-1)
@@ -1227,18 +399,21 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
}else if(zrac0 < 0.8){
densinitial[5] <-0 #densinitial layers should not be filled if zrac0 is not there
}
- SticsRFiles::set_param_xml(file = ini_file, param = "densinitial", values = densinitial, select = "plante", select_value = "1", overwrite = TRUE)
# default 'lev'
# SticsRFiles::set_param_xml(file = ini_file, param = "stade0", values = "plt", select = "plante", select_value = "1", overwrite = TRUE)
+ ic_list <- list(lai0 = lai0, masec0 = masec0, zrac0 = zrac0, QNplante0 = QNplante0, densinitial = densinitial)
+
+ SticsRFiles::set_param_xml(file = ini_file, param = names(ic_list), values = ic_list, select = "plante", select_value = "1", overwrite = TRUE)
+
ncdf4::nc_close(ic_nc)
}
SticsRFiles::convert_xml2txt(file = ini_file)
file.rename(file.path(rundir, "ficini.txt"), file.path(usmdirs[i], "ficini.txt"))
}
-
+
############################ Prepare Soils ##################################
@@ -1246,21 +421,9 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
#### THERE IS SOME BUG IN SticsRFiles::convert_xml2txt FOR SOLS.XML
#### I NOW PUT TXT VERSION TO THE MODEL PACKAGE: param.sol
- #### TODO: revise others to have txt templates directly in the package
-
- # # changed from FINERT to finert and moved to the sols.xml
- # # initial fraction of soil organic N inactive for mineralisation (= stable SON/ total SON)
- # if ("FINERT" %in% soil.names) {
- # SticsRFiles::set_param_xml(gen_file, "finert", soil_params[which(soil.names == "FINERT")], overwrite = TRUE)
- # }
-
- sols_file <- file.path(rundir, "param.sol")
-
- # cp template sols file (txt)
- file.copy(system.file("param.sol", package = "PEcAn.STICS"), sols_file)
-
- # check param names
- # sols_vals <- SticsRFiles::get_soil_txt(sols_file)
+ #### sols_file <- file.path(rundir, "param.sol")
+ #### Note this has changed now, if all is working might delete these comments
+ sols_file <- file.path(rundir, "sols.xml")
site_id <- tryCatch(
as.numeric(settings$run$site$id),
@@ -1274,8 +437,7 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
str_ns <- site_id
}
- # I guess not important what this is called as long as it's consistent in usms
- SticsRFiles::set_soil_txt(file = sols_file, param="typsol", value=paste0("sol", str_ns))
+ soils_df <- data.frame(soil_name = paste0("sol", str_ns))
if(!is.null(settings$run$inputs$poolinitcond)){
ic_path <- settings$run$inputs$poolinitcond$path
@@ -1283,53 +445,60 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
# pH
pH <- ncdf4::ncvar_get(ic_nc, "pH")
- pH <- round(pH[1], digits = 1) # STICS uses 1 pH value
- SticsRFiles::set_soil_txt(file = sols_file, param="pH", value=pH)
-
- sapply(1:5, function(x) SticsRFiles::set_soil_txt(file = sols_file, param="epc", value=20, layer = x))
+ soils_df$pH <- round(pH[1], digits = 1) # STICS uses 1 pH value
+
+ # Thickness of each soil layer. This sets all (five) at 20cm, to set individual ones use epc_1, epc_2, etc.
+ soils_df$epc <- 20
# volume_fraction_of_water_in_soil_at_field_capacity
hccf <- ncdf4::ncvar_get(ic_nc, "volume_fraction_of_water_in_soil_at_field_capacity")
hccf <- round(hccf*100, digits = 2)
- sapply(seq_along(hccf), function(x) SticsRFiles::set_soil_txt(file = sols_file, param="hccf", value=hccf[x], layer = x))
+ names(hccf) <- paste0("HCCF_", c(1:length(hccf)))
+ soils_df <- cbind(soils_df, t(hccf))
# volume_fraction_of_condensed_water_in_soil_at_wilting_point
hminf <- ncdf4::ncvar_get(ic_nc, "volume_fraction_of_condensed_water_in_soil_at_wilting_point")
hminf <- round(hminf*100, digits = 2)
- sapply(seq_along(hminf), function(x) SticsRFiles::set_soil_txt(file = sols_file, param="hminf", value=hminf[x], layer = x))
+ names(hminf) <- paste0("HMINF_", c(1:length(hminf)))
+ soils_df <- cbind(soils_df, t(hminf))
# soil_organic_nitrogen_content
Norg <- ncdf4::ncvar_get(ic_nc, "soil_organic_nitrogen_content")
Norg <- round(Norg[1]*100, digits = 2) # STICS uses 1 Norg value
- SticsRFiles::set_soil_txt(file = sols_file, param="Norg", value=Norg)
-
+ soils_df$norg <- Norg
+
# mass_fraction_of_clay_in_soil
argi <- ncdf4::ncvar_get(ic_nc, "mass_fraction_of_clay_in_soil")
argi <- round(argi[1]*100, digits = 0) # STICS uses 1 argi value
- SticsRFiles::set_soil_txt(file = sols_file, param="argi", value=argi)
+ soils_df$argi <- argi
# soil_density (kg m-3 --> g cm-3)
DAF <- ncdf4::ncvar_get(ic_nc, "soil_density")
DAF <- round(PEcAn.utils::ud_convert(DAF, "kg m-3", "g cm-3"), digits = 1)
- sapply(seq_along(DAF), function(x) SticsRFiles::set_soil_txt(file = sols_file, param="DAF", value=DAF[x], layer = x))
+ names(DAF) <- paste0("DAF_", c(1:length(DAF)))
+ soils_df <- cbind(soils_df, t(DAF))
# c2n_humus
- #CsurNsol0 <- ncdf4::ncvar_get(ic_nc, "c2n_humus")
- #SticsRFiles::set_soil_txt(file = sols_file, param="CsurNsol", value=CsurNsol0)
+ # CsurNsol0 <- ncdf4::ncvar_get(ic_nc, "c2n_humus")
+ # soils_df$CsurNsol0 <- CsurNsol0
- # epd
+ # epd: thickness of mixing cells in each soil layer ( = 2 * dispersion length)
epd <- rep(10, 5)
- sapply(seq_along(epd), function(x) SticsRFiles::set_soil_txt(file = sols_file, param="epd", value=epd[x], layer = x))
+ names(epd) <- paste0("epd_", c(1:length(epd)))
+ soils_df <- cbind(soils_df, t(epd))
ncdf4::nc_close(ic_nc)
}
- file.copy(sols_file, file.path(usmdirs, "param.sol"))
+ SticsRFiles::gen_sols_xml(sols_file, param_df = soils_df, template = system.file("sols.xml", package = "PEcAn.STICS"))
+ SticsRFiles::convert_xml2txt(file = sols_file)
+ file.copy(file.path(rundir, "param.sol"), file.path(usmdirs, "param.sol"))
+
+ # check param values
+ # sols_vals <- SticsRFiles::get_soil_txt(file.path(rundir, "param.sol"), stics_version = SticsRFiles::get_stics_versions_compat()$latest_version)
# DO NOTHING ELSE FOR NOW
-
- # this has some bug for sols.xml
- # SticsRFiles::convert_xml2txt(file = sols_file, javastics = javastics_path)
+
######################### Prepare Weather Station File ###############################
@@ -1355,13 +524,13 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
# DO NOTHING ELSE FOR NOW
# Should these be prepared by met2model.STICS?
-
+
############################## Prepare LAI forcing ####################################
## skipping for now
-
+
############################ Prepare Technical File ##################################
@@ -1382,7 +551,7 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
tec_df$concirr <- 0.11 # concentration of mineral N in irrigation water (kg ha-1 mm-1)
tec_df$ressuite <- 'straw+roots' # type of crop residue
tec_df$h2ograinmax <- 0.32 # maximal water content of fruits at harvest
-
+
# the following formalisms exist in the tec file:
## supply of organic residus
## soil tillage
@@ -1436,10 +605,10 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
"tauxexportfauche",
"restit",
"mscoupemini") # amount of mineral N added by fertiliser application at each cut of a forage crop, kg.ha-1
-
-
+
+
harvest_sub <- events_sub[events_sub$mgmt_operations_event == "harvest",]
-
+
harvest_list <- list()
for(hrow in seq_len(nrow(harvest_sub))){
@@ -1511,7 +680,7 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
# empty
fert_df <- data.frame(jul = NA, val = NA)
-
+
# If given fertilization date is within simulation days
if(as.Date(fert_sub$date[frow]) %in% dseq_sub){
@@ -1531,30 +700,51 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
}
fert_tec <- do.call("cbind", fert_list)
} #fertilizer-if end
-
-
- # DO NOTHING ELSE FOR NOW
- # TODO: ADD OTHER MANAGEMENT
-
- # same usm -> continue columns
- usm_tec_df <- cbind(tec_df, harvest_tec, fert_tec)
-
- usm_tec_df$ratiol <- 0
-
- SticsRFiles::gen_tec_xml(param_df = usm_tec_df,
- file=system.file("pecan_tec.xml", package = "PEcAn.STICS"),
- out_dir = usmdirs[usmi])
-
- # TODO: more than 1 USM, rbind
-
- SticsRFiles::convert_xml2txt(file = file.path(usmdirs[usmi], "tmp_tec.xml"))
-
- } # end-loop over usms
- } # TODO: if no events file is given modify other harvest parameters, e.g. harvest decision
+
+ # DO NOTHING ELSE FOR NOW
+ # TODO: ADD OTHER MANAGEMENT
+
+ # same usm -> continue columns
+ usm_tec_df <- cbind(tec_df, harvest_tec, fert_tec)
+
+ usm_tec_df$ratiol <- 0
+
+ SticsRFiles::gen_tec_xml(param_df = usm_tec_df,
+ file=system.file("pecan_tec.xml", package = "PEcAn.STICS"),
+ out_dir = usmdirs[usmi])
+
+ # TODO: more than 1 USM, rbind
+
+ SticsRFiles::convert_xml2txt(file = file.path(usmdirs[usmi], "tmp_tec.xml"))
+
+
+ } # end-loop over usms
+ } # TODO: if no events file is given modify other harvest parameters, e.g. harvest decision
+
+ ################################ Prepare Climate file ######################################
+ # symlink climate files
+ met_path <- settings$run$inputs$met$path
+
+ for(usmi in seq_along(usmdirs)){
+
+ usm_years <- c(sapply(strsplit(sub(".*_", "", basename(usmdirs)[usmi]), "-"), function(x) (as.numeric(x))))
+ dseq_sub <- dseq[lubridate::year(dseq) %in% usm_years]
+
+ clim_list <- list() # temporary solution
+ for(clim in seq_along(usm_years)){
+ # currently assuming only first year file has been passed to the settings, modify met2model if changing the logic
+ met_file <- gsub(paste0(lubridate::year(settings$run$start.date), ".climate"), paste0(usm_years[clim], ".climate"), met_path)
+ clim_list[[clim]] <- utils::read.table(met_file)
+ }
+ clim_run <- do.call("rbind", clim_list)
+ utils::write.table(clim_run, file.path(usmdirs[usmi], "climat.txt"), col.names = FALSE, row.names = FALSE)
+
+ }
+
################################ Prepare USM file ######################################
-
+
# loop for each USM
#ncodesuite <- ifelse(length(usmdirs) > 1, 1,0)
@@ -1585,8 +775,8 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
}else{
SticsRFiles::set_usm_txt(usm_file, "codesuite", 1, append = FALSE)
}
-
-
+
+
# number of simulated plants (sole crop=1; intercropping=2)
SticsRFiles::set_usm_txt(usm_file, "nbplantes", 1, append = FALSE) # hardcode for now
@@ -1658,30 +848,10 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
# TODO: more than 1 PFTs
# STICS can run 2 PFTs max: main crop + intercrop
}
-
-
-
- ################################ Prepare Run ######################################
- # symlink climate files
- met_path <- settings$run$inputs$met$path
- for(usmi in seq_along(usmdirs)){
-
- usm_years <- c(sapply(strsplit(sub(".*_", "", basename(usmdirs)[usmi]), "-"), function(x) (as.numeric(x))))
- dseq_sub <- dseq[lubridate::year(dseq) %in% usm_years]
-
- clim_list <- list() # temporary solution
- for(clim in seq_along(usm_years)){
- # currently assuming only first year file has been passed to the settings, modify met2model if changing the logic
- met_file <- gsub(paste0(lubridate::year(settings$run$start.date), ".climate"), paste0(usm_years[clim], ".climate"), met_path)
- clim_list[[clim]] <- utils::read.table(met_file)
- }
- clim_run <- do.call("rbind", clim_list)
- utils::write.table(clim_run, file.path(usmdirs[usmi], "climat.txt"), col.names = FALSE, row.names = FALSE)
-
- }
+ ################################ Prepare Run ######################################
# symlink to binary
file.symlink(stics_path, bindir)
@@ -1699,7 +869,7 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
# cmd_generate <- paste("java -jar", jexe,"--generate-txt", rundir, usm_name)
# cmd_run <- paste("java -jar", jexe,"--run", rundir, usm_name)
-
+
#-----------------------------------------------------------------------
# create launch script (which will create symlink)
if (!is.null(settings$model$jobtemplate) && file.exists(settings$model$jobtemplate)) {
@@ -1752,5 +922,193 @@ write.config.STICS <- function(defaults, trait.values, settings, run.id) {
writeLines(jobsh, con = file.path(settings$rundir, run.id, "job.sh"))
Sys.chmod(file.path(settings$rundir, run.id, "job.sh"))
-
+
} # write.config.STICS
+
+
+# ==================================================================================================#
+#' Function to translate pecan param names and units to stics names and units.
+#' @export
+#' @param trait.values trait.values, list
+#' @return translated list
+#' @author Quentin Bell
+# Based on pecan2lpjguess function by Istem Fer https://github.com/PecanProject/pecan/blob/develop/models/lpjguess/R/write.config.LPJGUESS.R#L229
+pecan2stics <- function(trait.values){
+
+ # TODO :match all stics and pecan names
+ vartable <- dplyr::tribble(
+ ~sticsname, ~pecanname, ~sticsunits, ~pecanunits, ~sticsfile,
+ # Plant and soil related parameters
+ "abscission", "fracLeafFall", NA, NA, "plt.xml",
+ "adens", "adens", NA, NA, "plt.xml",
+ "adil", "adil", NA, NA, "plt.xml",
+ "ahres", "ahres", NA, NA, "param_gen.xml",
+ "akres", "ORdecomp_par", NA, NA, "param_gen.xml",
+ "ampfroid", "vernalization_TAmp", NA, NA, "plt.xml",
+ "awb", "awb", NA, NA, "param_gen.xml",
+ "bdens", "dens_comp", NA, NA, "plt.xml",
+ "bdil", "bdil", NA, NA, "plt.xml",
+ "belong", "belong", NA, NA, "plt.xml",
+ "beta", "maxTPincrease_waterstress", NA, NA, "param_gen.xml",
+ "bhres", "bhres", NA, NA, "param_gen.xml",
+ "bkres", "ORdecomp_rate", NA, NA, "param_gen.xml",
+ "bwb", "bwb", NA, NA, "param_gen.xml",
+ "celong", "celong", NA, NA, "plt.xml",
+ "CNresmax", "CNresmax", NA, NA, "param_gen.xml",
+ "CNresmin", "CNresmin", NA, NA, "param_gen.xml",
+ "coefamflax", "coefamflax", NA, NA, "plt.xml",
+ "coefb", "rad_on_conversion_eff", NA, NA, "param_gen.xml",
+ "coefdrpmat", "coefdrpmat", NA, NA, "plt.xml",
+ "coefflodrp", "coefflodrp", NA, NA, "plt.xml",
+ "coeflaxsen", "coeflaxsen", NA, NA, "plt.xml",
+ "coeflevamf", "coeflevamf", NA, NA, "plt.xml",
+ "coeflevdrp", "coeflevdrp", NA, NA, "plt.xml",
+ "coefmshaut", "biomass2usefulheight", NA, NA, "plt.xml",
+ "coefsenlan", "coefsenlan", NA, NA, "plt.xml",
+ "contrdamax", "db_reduc_rgr_max", NA, NA, "plt.xml",
+ "CroCo", "fOR_decomp", NA, NA, "param_gen.xml",
+ "croirac", "croirac", NA, NA, "plt.xml",
+ "cwb", "minC2N_microbialbiomass", NA, NA, "param_gen.xml",
+ "dacohes", "bd_rootgrowth_reduced", NA, NA, "param_gen.xml",
+ "daseuilbas", "bd_rootgrowth_maximal", NA, NA, "param_gen.xml",
+ "daseuilhaut", "bd_rootgrowth_impossible", NA, NA, "param_gen.xml",
+ "debsenrac", "root_sen_dday", "round", "0", "plt.xml",
+ "difN", "difN_FC", NA, NA, "param_gen.xml",
+ "diftherm", "soil_thermal_diffusivity", NA, NA, "param_gen.xml",
+ "dlaimaxbrut", "lai_max_rate", NA, NA, "plt.xml",
+ "dlaimin", "lai_growth_rate_accelerating", NA, NA, "plt.xml",
+ "draclong", "rootlength_prod_max", NA, NA, "plt.xml",
+ "durvieF", "leaf_lifespan_max", NA, NA, "plt.xml",
+ "durviesupmax", "relative_addlifespan_DT_excessN", NA, NA, "plt.xml",
+ "efcroijuv", "RUE_juv", NA, NA, "plt.xml",
+ "efcroirepro", "RUE_rep", NA, NA, "plt.xml",
+ "efcroiveg", "RUE_veg", NA, NA, "plt.xml",
+ "elmax", "coleoptile_elong_dark_max", NA, NA, "plt.xml",
+ "extin", "extinction_coefficient_diffuse", NA, NA, "plt.xml",
+ "fhminsat", "fhminsat", NA, NA, "param_gen.xml",
+ "FINERT", "FINERT", NA, NA, "sols.xml",
+ "fNCbiomin", "fNCbiomin", NA, NA, "param_gen.xml",
+ "fredkN", "Nlim_reductionOMdecomp", NA, NA, "param_gen.xml",
+ "fredlN", "Nlim_reductionMBdecomp", NA, NA, "param_gen.xml",
+ "fredNsup", "fredNsup", NA, NA, "param_gen.xml",
+ "FTEMh", "T_p1_Hdecomp_rate", NA, NA, "param_gen.xml",
+ "FTEMha", "T_p2_Hdecomp_rate", NA, NA, "param_gen.xml",
+ "FTEMr", "FTEMr", NA, NA, "param_gen.xml",
+ "FTEMra", "FTEMra", NA, NA, "param_gen.xml",
+ "h2ofeuilverte", "water_content_TLP_leaf", NA, NA, "plt.xml",
+ "hautmax", "HTMAX", NA, NA, "plt.xml",
+ "hautbase", "height", NA, NA, "plt.xml",
+ "hminm", "hminm", NA, NA, "param_gen.xml",
+ "hoptm", "hoptm", NA, NA, "param_gen.xml",
+ "INNmin", "INNmin", NA, NA, "plt.xml",
+ "innsen", "innsen", NA, NA, "plt.xml",
+ "innturgmin", "innturgmin", NA, NA, "plt.xml",
+ "julvernal", "vernalization_init", "round", "0", "plt.xml",
+ "jvcmini", "vernalization_days_min", "round", "0", "plt.xml",
+ "kbio", "microbialbiomass_decay", NA, NA, "param_gen.xml",
+ "khaut", "LAI2height", NA, NA, "plt.xml",
+ "Kmabs1", "Kmabs1", NA, NA, "plt.xml",
+ "kmax", "crop_water_max", NA, NA, "plt.xml",
+ "laicomp", "lai_comp", NA, NA, "plt.xml",
+ "longsperac", "SRL", NA, NA, "plt.xml",
+ "lvfront", "rootdens_at_apex", NA, NA, "plt.xml",
+ "lvopt", "lvopt", NA, NA, "param_gen.xml",
+ "masecNmax", "masecNmax", NA, NA, "plt.xml",
+ "maxazorac", "maxazorac", NA, NA, "plt.xml",
+ "minazorac", "minazorac", NA, NA, "plt.xml",
+ "minefnra", "minefnra", NA, NA, "plt.xml",
+ "nlevlim1", "days2reduced_emergence_postgerm", "round", "0", "plt.xml",
+ "nlevlim2", "days2stopped_emergence_postgerm", "round", "0", "plt.xml",
+ "Nmeta", "Nmeta", NA, NA, "plt.xml",
+ "Nreserve", "Nreserve", NA, NA, "plt.xml",
+ "parazofmorte", "parazofmorte", NA, NA, "plt.xml",
+ "pentlaimax", "pentlaimax", NA, NA, "plt.xml",
+ "pHmaxvol", "pHmaxvol", NA, NA, "param_gen.xml",
+ "pHminvol", "pHminvol", NA, NA, "param_gen.xml",
+ "phobase", "phobase", NA, NA, "plt.xml",
+ "phosat", "phosat", NA, NA, "plt.xml",
+ "phyllotherme", "phyllochron", NA, NA, "plt.xml",
+ "plNmin", "plNmin", NA, NA, "param_gen.xml",
+ "pminruis", "precmin4runoff", NA, NA, "param_gen.xml",
+ "Primingmax", "Primingmax", NA, NA, "param_gen.xml",
+ "prophumtassrec", "SMC_compaction_delay_harvest", NA, NA, "param_gen.xml",
+ "prophumtasssem", "SMC_compaction_delay_sow", NA, NA, "param_gen.xml",
+ "proprac", "root2aerial_harvest", NA, NA, "param_gen.xml",
+ "psihucc", "SWP_FC", NA, NA, "param_gen.xml",
+ "psihumin", "SWP_WP", NA, NA, "param_gen.xml",
+ "psisto", "psi_stomata_closure", NA, NA, "plt.xml", # psisto, potential of stomatal closing (absolute value) (bars). note: units in betyDB are m, but Istem's prior is for testing
+ "psiturg", "leaf_psi_tlp", NA, NA, "plt.xml",
+ "QNpltminINN", "QNpltminINN", NA, NA, "param_gen.xml",
+ "rapsenturg", "rapsenturg", NA, NA, "plt.xml",
+ "ratiodurvieI", "early2last_leaflife", NA, NA, "plt.xml",
+ "ratiosen", "senes2total_biomass", NA, NA, "plt.xml",
+ "rayon", "rayon", NA, NA, "plt.xml",
+ "rdrain", "rdrain", NA, NA, "param_gen.xml",
+ "remobres", "remobres", NA, NA, "plt.xml",
+ "sensrsec", "rootsens2drought", NA, NA, "plt.xml",
+ "slamax", "SLAMAX", "cm2 g-1", "m2 kg-1", "plt.xml",
+ "slamin", "SLAMIN", "cm2 g-1", "m2 kg-1", "plt.xml",
+ "stamflax", "cum_thermal_growth", NA, NA, "plt.xml",
+ "stlevamf", "cum_thermal_juvenile", NA, NA, "plt.xml",
+ "stlevdrp", "cum_thermal_filling", NA, NA, "plt.xml",
+ "stpltger", "cum_thermal_germin", NA, NA, "plt.xml",
+ "stressdev", "phasic_delay_max", NA, NA, "plt.xml",
+ "swfacmin", "swfacmin", NA, NA, "plt.xml",
+ "tcmax", "tcmax_growth", NA, NA, "plt.xml",
+ "tcmin", "tcmin_growth", NA, NA, "plt.xml",
+ "tcxstop", "tcmax_foliar_growth", NA, NA, "plt.xml",
+ "tdmax", "tdmax", NA, NA, "plt.xml",
+ "tdmin", "tdmin", NA, NA, "plt.xml",
+ "temax", "temax", NA, NA, "plt.xml",
+ "temin", "temin", NA, NA, "plt.xml",
+ "teopt", "teopt", NA, NA, "plt.xml",
+ "teoptbis", "teoptbis", NA, NA, "plt.xml",
+ "tfroid", "vernalization_TOpt", NA, NA, "plt.xml",
+ "tgmin", "emergence_Tmin", NA, NA, "plt.xml",
+ "tigefeuil", "stem2leaf", NA, NA, "plt.xml",
+ "tmin_mineralisation", "tmin_mineralisation", NA, NA, "param_gen.xml",
+ "TREFh", "T_r_HOMdecomp", NA, NA, "param_gen.xml",
+ "TREFr", "T_r_ORdecomp", NA, NA, "param_gen.xml",
+ "udlaimax", "udlaimax", NA, NA, "plt.xml",
+ "Vabs2", "Nupt_fertloss_halve", NA, NA, "param_gen.xml",
+ "vlaimax", "vlaimax", NA, NA, "plt.xml",
+ "Wh", "Wh", NA, NA, "param_gen.xml",
+ "GMIN1", "GMIN1", NA, NA, "param_gen.xml",
+ "GMIN2", "GMIN2", NA, NA, "param_gen.xml",
+ "GMIN3", "GMIN3", NA, NA, "param_gen.xml",
+ "GMIN4", "GMIN4", NA, NA, "param_gen.xml",
+ "GMIN5", "GMIN5", NA, NA, "param_gen.xml",
+ "GMIN6", "GMIN6", NA, NA, "param_gen.xml",
+ "GMIN7", "GMIN7", NA, NA, "param_gen.xml",
+ "Xorgmax", "maxNimm_mineralfert", NA, NA, "param_gen.xml",
+ "y0msrac", "rootmin_harvest", NA, NA, "param_gen.xml",
+ "yres", "microbialbiomass_C_yield", NA, NA, "param_gen.xml",
+ # Missing pecan parameters without corresponding STICS parameters
+ )
+
+ trait.values <- lapply(trait.values, function(x){
+ names(x) <- vartable$sticsname[match(names(x), vartable$pecanname)]
+ return(x)
+ })
+
+ # TODO : unit conversions?
+ toconvert <- vartable$sticsname[!is.na(vartable$sticsunits)]
+ trait.values <- lapply(trait.values, function(x){
+ canconvert <- toconvert[toconvert %in% names(x)]
+ if(length(canconvert) != 0){
+ for(noc in seq_along(canconvert)){
+ if(vartable$sticsunits[vartable$sticsname == canconvert[noc]] == "round"){
+ x[,names(x) == canconvert[noc]] <- round(x[,names(x) == canconvert[noc]])
+ }else{
+ x[,names(x) == canconvert[noc]] <- PEcAn.utils::ud_convert(x[,names(x) == canconvert[noc]],
+ vartable$pecanunits[vartable$sticsname == canconvert[noc]],
+ vartable$sticsunits[vartable$sticsname == canconvert[noc]])
+ }
+
+ }
+ }
+ return(x)
+ })
+
+ return(trait.values)
+}
diff --git a/models/stics/man/pecan2stics.Rd b/models/stics/man/pecan2stics.Rd
new file mode 100644
index 00000000000..458c0faa2f7
--- /dev/null
+++ b/models/stics/man/pecan2stics.Rd
@@ -0,0 +1,20 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/write.config.STICS.R
+\name{pecan2stics}
+\alias{pecan2stics}
+\title{Function to translate pecan param names and units to stics names and units.}
+\usage{
+pecan2stics(trait.values)
+}
+\arguments{
+\item{trait.values}{trait.values, list}
+}
+\value{
+translated list
+}
+\description{
+Function to translate pecan param names and units to stics names and units.
+}
+\author{
+Quentin Bell
+}
diff --git a/models/template/DESCRIPTION b/models/template/DESCRIPTION
index 19231e43732..c6955b1f58d 100644
--- a/models/template/DESCRIPTION
+++ b/models/template/DESCRIPTION
@@ -1,17 +1,19 @@
Package: PEcAn.ModelName
Type: Package
Title: PEcAn Package for Integration of the ModelName Model
-Version: 1.8.1
+Version: 0.0.1
Authors@R: c(person("Jane", "Doe", role = c("aut", "cre"),
email = "jdoe@illinois.edu"),
person("John", "Doe", role = c("aut")))
Description: This module provides functions to link the (ModelName) to PEcAn.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
PEcAn.DB,
PEcAn.logger,
PEcAn.utils (>= 1.4.8)
Suggests:
- testthat (>= 1.0.2)
+ testthat (>= 3.0)
SystemRequirements: ModelName
OS_type: unix
License: BSD_3_clause + file LICENSE
diff --git a/models/template/NEWS.md b/models/template/NEWS.md
index cce9a7a711d..f821b073a7a 100644
--- a/models/template/NEWS.md
+++ b/models/template/NEWS.md
@@ -1,7 +1,3 @@
-# PEcAn.ModelName 1.8.1
+# PEcAn.ModelName 0.0.1
-## License change
-* PEcAn.ModelName is now distributed under the BSD three-clause license instead of the NCSA Open Source license.
-
-## Added
-* Added a `NEWS.md` file to track changes to the package. Prior to this point changes are tracked in the main CHANGELOG for the PEcAn repository.
+* Initial version of ModelName, with support for...
\ No newline at end of file
diff --git a/modules/allometry/DESCRIPTION b/modules/allometry/DESCRIPTION
index 482721a83dc..7fa1af4f75c 100644
--- a/modules/allometry/DESCRIPTION
+++ b/modules/allometry/DESCRIPTION
@@ -1,12 +1,14 @@
Package: PEcAn.allometry
Type: Package
Title: PEcAn Allometry Functions
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("Mike", "Dietze", role = c("aut", "cre"),
email = "dietze@bu.edu"),
person("Shashank", "Singh", role = c("ctb")),
person("University of Illinois, NCSA", role = c("cph")))
Description: Synthesize allometric equations or fit allometries to data.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
coda (>= 0.18),
grDevices,
@@ -31,3 +33,5 @@ LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
VignetteBuilder: knitr, rmarkdown
+X-schema.org-keywords: allometry, biomass, plant-traits
+
diff --git a/modules/allometry/NEWS.md b/modules/allometry/NEWS.md
index 35332c1a9ca..9aa911fcc1d 100644
--- a/modules/allometry/NEWS.md
+++ b/modules/allometry/NEWS.md
@@ -1,3 +1,9 @@
+# PEcAn.allometry 1.7.5
+
+* Added keywords and bug reporting URL to DESCRIPTION.
+* No code changes in this release.
+
+
# PEcAn.allometry 1.7.4
## License change
diff --git a/modules/assim.batch/DESCRIPTION b/modules/assim.batch/DESCRIPTION
index 25ed2de5366..c0f796a1f88 100644
--- a/modules/assim.batch/DESCRIPTION
+++ b/modules/assim.batch/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.assim.batch
Type: Package
Title: PEcAn Functions Used for Ecological Forecasts and Reanalysis
-Version: 1.9.0
+Version: 1.9.1
Authors@R: c(person("Mike", "Dietze", role = c("aut"),
email = "dietze@bu.edu"),
person("Istem", "Fer", role = c("aut", "cre"),
@@ -14,6 +14,8 @@ Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
model parameterization, execution, and analysis. The goal of PECAn is to
streamline the interaction between data and models, and to improve the
efficacy of scientific investigation.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
VignetteBuilder: knitr, rmarkdown
Imports:
abind,
@@ -59,3 +61,4 @@ LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
Roxygen: list(markdown = TRUE)
+X-schema.org-keywords: data-assimilation, parameter-estimation, Bayesian-inference, ecological-modeling, model-calibration
diff --git a/modules/assim.batch/NEWS.md b/modules/assim.batch/NEWS.md
index 362afeb6d2a..f6cc87e7ca1 100644
--- a/modules/assim.batch/NEWS.md
+++ b/modules/assim.batch/NEWS.md
@@ -1,3 +1,9 @@
+# PEcAn.assim.batch 1.9.1
+
+* Added keywords and bug reporting URL to DESCRIPTION.
+* No code changes in this release.
+
+
# PEcAn.assim.batch 1.9.0
## License change
diff --git a/modules/assim.sequential/DESCRIPTION b/modules/assim.sequential/DESCRIPTION
index 2b470c001b5..50f608a289d 100644
--- a/modules/assim.sequential/DESCRIPTION
+++ b/modules/assim.sequential/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAnAssimSequential
Type: Package
Title: PEcAn Functions Used for Ecological Forecasts and Reanalysis
-Version: 1.9.0
+Version: 1.10.0
Author: Mike Dietze
Maintainer: Mike Dietze
Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
@@ -9,16 +9,18 @@ Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
model parameterization, execution, and analysis. The goal of PECAn is to
streamline the interaction between data and models, and to improve the
efficacy of scientific investigation.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
+Depends: R (>= 4.1.0)
Imports:
coda,
dplyr,
+ foreach,
furrr,
future,
ggplot2,
lubridate (>= 1.6.0),
- magrittr,
Matrix,
- mvtnorm,
ncdf4,
nimble,
PEcAn.DB,
@@ -28,16 +30,20 @@ Imports:
PEcAn.uncertainty,
PEcAn.workflow,
purrr,
+ reticulate,
rlang,
stringr
Suggests:
+ abind,
corrplot,
+ doSNOW,
exactextractr,
+ mvtnorm,
ggrepel,
- emdbook,
glue,
ggpubr,
gridExtra,
+ itertools,
magic (>= 1.5.0),
methods,
PEcAn.benchmark,
@@ -61,8 +67,10 @@ Suggests:
tidyr,
sp,
utils,
+ xgboost,
XML
License: BSD_3_clause + file LICENSE
Copyright: Authors
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: state-data-assimilation, ensemble-kalman-filter, particle-filter, ecological-forecasting, forecast-verification
diff --git a/modules/assim.sequential/NAMESPACE b/modules/assim.sequential/NAMESPACE
index db21f07876e..90364e9ec3f 100644
--- a/modules/assim.sequential/NAMESPACE
+++ b/modules/assim.sequential/NAMESPACE
@@ -29,6 +29,7 @@ export(assessParams)
export(block_matrix)
export(conj_wt_wishart_sampler)
export(construct_nimble_H)
+export(downscale_qsub_main)
export(dwtmnorm)
export(get_ensemble_weights)
export(hop_test)
@@ -46,6 +47,8 @@ export(post.analysis.multisite.ggplot)
export(postana.bias.plotting.sda)
export(postana.bias.plotting.sda.corr)
export(postana.timeser.plotting.sda)
+export(qsub_sda)
+export(qsub_sda_batch)
export(rescaling_stateVars)
export(rwtmnorm)
export(sample_met)
@@ -53,8 +56,11 @@ export(sampler_toggle)
export(sda.enkf)
export(sda.enkf.multisite)
export(sda.enkf.original)
+export(sda.enkf_local)
+export(sda_assemble)
export(sda_weights_site)
export(simple.local)
+export(stack_covariates_2_geotiff)
export(tobit.model)
export(tobit2space.model)
export(tobit_model_censored)
@@ -63,6 +69,6 @@ import(furrr)
import(lubridate)
import(nimble)
importFrom(dplyr,"%>%")
+importFrom(foreach,"%dopar%")
importFrom(lubridate,"%m+%")
-importFrom(magrittr,"%>%")
importFrom(rlang,.data)
diff --git a/modules/assim.sequential/NEWS.md b/modules/assim.sequential/NEWS.md
index fab179b50be..fddb0352ca0 100644
--- a/modules/assim.sequential/NEWS.md
+++ b/modules/assim.sequential/NEWS.md
@@ -1,3 +1,16 @@
+# PEcAnAssimSequential 1.10.0
+
+## Added
+
+* New function `stack_covariates_2_geotiff` stacks data layers from various GeoTIFFs (with different extents, CRS, and resolutions) to a single map (#3450)
+* New functions `qsub_sda()`, `downscale_qsub_main()`, `qsub_sda()`, `qsub_sda_batch()`, `sda.enkf_local()` for submitting SDA batch jobs by splitting a large number of sites into multiple small groups of sites and spreading their computation across all cores of multiple HPC nodes (#3634, #3450, #3544).
+
+## Changed
+
+* The entire SDA workflow has been overhauled to support parallel computation throughout.
+
+
+
# PEcAnAssimSequential 1.9.0
## Added
@@ -11,6 +24,7 @@
* Documentation improvements
+
# PEcAnAssimSequential 1.8.0
diff --git a/modules/assim.sequential/R/Adjustment.R b/modules/assim.sequential/R/Adjustment.R
index 93c97228c4c..32b03511107 100644
--- a/modules/assim.sequential/R/Adjustment.R
+++ b/modules/assim.sequential/R/Adjustment.R
@@ -15,7 +15,7 @@
##' @export
adj.ens<-function(Pf, X, mu.f, mu.a, Pa){
-
+ X <- as.matrix(X)
S_f <- svd(Pf)
L_f <- S_f$d
V_f <- S_f$v
@@ -25,7 +25,7 @@ adj.ens<-function(Pf, X, mu.f, mu.a, Pa){
for(i in seq_len(nrow(X))){
- Z[i,] <- 1/sqrt(L_f) * t(V_f)%*%(X[i,]-mu.f)
+ Z[i,] <- 1/sqrt(L_f) * t(V_f)%*%matrix(X[i,]-mu.f) %>% t
}
Z[is.na(Z)]<-0
diff --git a/modules/assim.sequential/R/Analysis_sda_block.R b/modules/assim.sequential/R/Analysis_sda_block.R
index 6c37b4f251f..a032ae4be35 100644
--- a/modules/assim.sequential/R/Analysis_sda_block.R
+++ b/modules/assim.sequential/R/Analysis_sda_block.R
@@ -10,7 +10,7 @@
##' @param t time point in format of YYYY-MM-DD.
##' @param nt total length of time steps, corresponding to the `nt` variable in the `sda.enkf.multisite` function.
##' @param MCMC.args arguments for the MCMC sampling, details can be found in the roxygen strucutre for control list in the `sda.enkf.multisite` function.
-##' @param block.list.all.pre pre-existed block.list.all object for passing the aqq and bqq to the current SDA run, the default is NULL. Details can be found in the roxygen structure for `pre_enkf_params` of the `sda.enkf.multisite` function
+##' @param block.list.all.pre pre-existed block.list.all object for passing the aqq and bqq to the current SDA run, the default is NULL. Details can be found in the roxygen structure for `pre_enkf_params` of the `sda.enkf.multisite` function.
##' @details This function will add data and constants into each block that are needed for the MCMC sampling.
##'
##' @description This function provides the block-based MCMC sampling approach.
@@ -18,6 +18,15 @@
##' @return It returns the `build.block.xy` object and the analysis results.
##' @importFrom dplyr %>%
analysis_sda_block <- function (settings, block.list.all, X, obs.mean, obs.cov, t, nt, MCMC.args, block.list.all.pre = NULL) {
+ # grab cores from settings.
+ cores <- as.numeric(settings$state.data.assimilation$batch.settings$general.job$cores)
+ # if we didn't assign number of CPUs in the settings.
+ if (length(cores) == 0 | is.null(cores)) {
+ cores <- parallel::detectCores()
+ }
+ cores <- cores - 1
+ # if we only have one CPU.
+ if (cores < 1) cores <- 1
#convert from vector values to block lists.
if ("try-error" %in% class(try(block.results <- build.block.xy(settings = settings,
block.list.all = block.list.all,
@@ -58,14 +67,25 @@ analysis_sda_block <- function (settings, block.list.all, X, obs.mean, obs.cov,
#parallel for loop over each block.
PEcAn.logger::logger.info(paste0("Running MCMC ", "for ", length(block.list.all[[t]]), " blocks"))
- if ("try-error" %in% class(try(block.list.all[[t]] <- furrr::future_map(block.list.all[[t]], MCMC_block_function, .progress = T)))) {
+ cl <- parallel::makeCluster(as.numeric(cores))
+ doSNOW::registerDoSNOW(cl)
+ l <- NULL
+ if ("try-error" %in% class(try(block.list.all[[t]] <- foreach::foreach(l = block.list.all[[t]],
+ .packages = c("Kendall",
+ "purrr",
+ "nimble",
+ "PEcAnAssimSequential")) %dopar% {MCMC_block_function(l)}))) {
+ parallel::stopCluster(cl)
+ foreach::registerDoSEQ()
PEcAn.logger::logger.severe("Something wrong within the MCMC_block_function function.")
return(0)
}
+ parallel::stopCluster(cl)
+ foreach::registerDoSEQ()
PEcAn.logger::logger.info("Completed!")
#convert from block lists to vector values.
- if ("try-error" %in% class(try(V <- block.2.vector(block.list.all[[t]], X, H)))) {
+ if ("try-error" %in% class(try(V <- block.2.vector(block.list.all[[t]], X, H, settings$state.data.assimilation$adjustment)))) {
PEcAn.logger::logger.severe("Something wrong within the block.2.vector function.")
return(0)
}
@@ -77,7 +97,8 @@ analysis_sda_block <- function (settings, block.list.all, X, obs.mean, obs.cov,
mu.a = V$mu.a,
Pa = V$Pa,
Y = Y,
- R = R))
+ R = R,
+ analysis = V$analysis))
}
##' @title build.block.xy
@@ -94,7 +115,6 @@ analysis_sda_block <- function (settings, block.list.all, X, obs.mean, obs.cov,
##'
##' @description This function split long vector and covariance matrix into blocks corresponding to the localization.
##'
-##' @return It returns the `build.block.xy` object with data and constants filled in.
build.block.xy <- function(settings, block.list.all, X, obs.mean, obs.cov, t) {
#set q.type from settings.
if (settings$state.data.assimilation$q.type == "vector") {
@@ -104,7 +124,7 @@ build.block.xy <- function(settings, block.list.all, X, obs.mean, obs.cov, t) {
}
#grab basic arguments based on X.
site.ids <- unique(attributes(X)$Site)
- var.names <- unique(attributes(X)$dimnames[[2]])
+ var.names <- unique(colnames(X))
mu.f <- colMeans(X)
Pf <- stats::cov(X)
if (length(diag(Pf)[which(diag(Pf)==0)]) > 0) {
@@ -112,15 +132,16 @@ build.block.xy <- function(settings, block.list.all, X, obs.mean, obs.cov, t) {
PEcAn.logger::logger.warn("The zero variances in Pf is being replaced by one fifth of the minimum variance in those matrices respectively.")
}
#distance calculations and localization
- site.locs <- settings$run %>%
- purrr::map('site') %>%
- purrr::map_dfr(~c(.x[['lon']],.x[['lat']]) %>% as.numeric)%>%
- t %>%
- `colnames<-`(c("Lon","Lat")) %>%
- `rownames<-`(site.ids)
- #Finding the distance between the sites
- dis.matrix <- sp::spDists(site.locs, longlat = TRUE)
- if (!is.null(settings$state.data.assimilation$Localization.FUN)) {
+ if (!is.null(settings$state.data.assimilation$Localization.FUN) &&
+ ! as.numeric(settings$state.data.assimilation$scalef) == 0) {
+ site.locs <- settings$run %>%
+ purrr::map('site') %>%
+ purrr::map_dfr(~c(.x[['lon']],.x[['lat']]) %>% as.numeric)%>%
+ t %>%
+ `colnames<-`(c("Lon","Lat")) %>%
+ `rownames<-`(site.ids)
+ #Finding the distance between the sites
+ dis.matrix <- sp::spDists(site.locs, longlat = TRUE)
Localization.FUN <- get(settings$state.data.assimilation$Localization.FUN)
#turn that into a blocked matrix format
blocked.dis <- block_matrix(dis.matrix %>% as.numeric(), rep(length(var.names), length(site.ids)))
@@ -175,7 +196,8 @@ build.block.xy <- function(settings, block.list.all, X, obs.mean, obs.cov, t) {
f.2.y.ind <- obs.mean[[t]] %>%
purrr::map(\(x)which(var.names %in% names(x))) %>%
unlist %>%
- unique
+ unique %>%
+ sort
H <- list(ind = f.2.y.ind %>% purrr::map(function(start){
seq(start, length(site.ids) * length(var.names), length(var.names))
}) %>% unlist() %>% sort)
@@ -214,9 +236,10 @@ build.block.xy <- function(settings, block.list.all, X, obs.mean, obs.cov, t) {
block.list[[i]]$data$r <- diag(1, length(var.names))
block.h <- matrix(1, 1, length(var.names))
} else {
- block.list[[i]]$data$y.censored <- rep(NA, max(obs_per_site))
- block.list[[i]]$data$r <- diag(1, max(obs_per_site))
- block.h <- matrix(1, 1, max(obs_per_site))
+ block.list[[i]]$data$y.censored <- rep(NA, length(f.2.y.ind))
+ block.list[[i]]$data$r <- diag(1, length(f.2.y.ind))
+ block.h <- matrix(NA, 1, length(var.names))
+ block.h[1, f.2.y.ind] <- 1
}
} else {
block.list[[i]]$data$y.censored <- y.censored[y.start:y.end]
@@ -227,7 +250,7 @@ build.block.xy <- function(settings, block.list.all, X, obs.mean, obs.cov, t) {
block.list[[i]]$H <- block.h
block.list[[i]]$constant$H <- which(apply(block.h, 2, sum) == 1)
block.list[[i]]$constant$N <- length(f.start:f.end)
- block.list[[i]]$constant$YN <- length(y.start:y.end)
+ block.list[[i]]$constant$YN <- length(block.list[[i]]$data$y.censored)
block.list[[i]]$constant$q.type <- q.type
}
names(block.list) <- site.ids
@@ -315,6 +338,7 @@ build.block.xy <- function(settings, block.list.all, X, obs.mean, obs.cov, t) {
#if it's Wishart Q, we need to replace any NA Y with corresponding muf, and r with Pf.
#also, if length of observation is 1, the Wishart Q is not suitable for the MCMC.
#we will then need to change the Q type to 3, which is the vector Q.
+ #the wishart-MCMC is still under development, so I commented them out for now.
if (q.type == 4) {
for (i in seq_along(block.list)) {
#check length.
@@ -357,7 +381,7 @@ build.block.xy <- function(settings, block.list.all, X, obs.mean, obs.cov, t) {
##'
##' @return It returns the `block.list` object with initial conditions filled in.
MCMC_Init <- function (block.list, X) {
- var.names <- unique(attributes(X)$dimnames[[2]])
+ var.names <- unique(names(X))
#sample mu.f from X.
sample.mu.f <- colMeans(X)
for (i in seq_along(block.list)) {
@@ -378,7 +402,11 @@ MCMC_Init <- function (block.list, X) {
#if we want the vector q.
if (block.list[[i]]$constant$q.type == 3) {
for (j in seq_along(block.list[[i]]$data$y.censored)) {
- block.list[[i]]$Inits$q <- c(block.list[[i]]$Inits$q, stats::rgamma(1, shape = block.list[[i]]$data$aq[j], rate = block.list[[i]]$data$bq[j]))
+ temp.q <- stats::rgamma(1, shape = block.list[[i]]$data$aq[j], rate = block.list[[i]]$data$bq[j])
+ if (temp.q < 0.001) {
+ temp.q <- 0.001
+ }
+ block.list[[i]]$Inits$q <- c(block.list[[i]]$Inits$q, temp.q)
}
} else if (block.list[[i]]$constant$q.type == 4) {
#if we want the wishart Q.
@@ -401,6 +429,8 @@ MCMC_Init <- function (block.list, X) {
##'
##' @return It returns the `block` object with analysis results filled in.
MCMC_block_function <- function(block) {
+ # disable printing out messages.
+ nimbleOptions(verbose = FALSE, MCMCprogressBar = FALSE, checkNimbleFunction = FALSE, checkDuplicateNodeDefinitions = FALSE)
#build nimble model
#TODO: harmonize the MCMC code between block-based and general analysis functions to reduce the complexity of code.
model_pred <- nimble::nimbleModel(GEF.MultiSite.Nimble,
@@ -430,12 +460,11 @@ MCMC_block_function <- function(block) {
conf$addSampler(target = samplerLists[[X.mod.ind]]$target, type = "ess",
control = list(propCov= block$data$pf, adaptScaleOnly = TRUE,
latents = "X", pfOptimizeNparticles = TRUE))
-
#add toggle Y sampler.
for (i in 1:block$constant$YN) {
conf$addSampler(paste0("y.censored[", i, "]"), 'toggle', control=list(type='RW'))
}
- conf$printSamplers()
+ # conf$printSamplers()
#compile MCMC
Rmcmc <- nimble::buildMCMC(conf)
Cmodel <- nimble::compileNimble(model_pred)
@@ -504,15 +533,21 @@ MCMC_block_function <- function(block) {
mua <- colMeans(dat[, iX])
pa <- stats::cov(dat[, iX])
}
-
- if (length(iX.mod) == 1) {
- mufa <- mean(dat[, iX.mod])
- pfa <- stats::var(dat[, iX.mod])
+ # construct X.all object.
+ # NA only occurs when there is zero observation.
+ if (!any(is.na(block$data$y.censored))) {
+ H <- colSums(block$H)
+ obs.inds <- which(H == 1)
+ non.obs.inds <- which(H == 0)
+ X.all.inds <- H
+ X.all.inds[obs.inds] <- iX
+ X.all.inds[non.obs.inds] <- iX.mod[non.obs.inds]
+ mufa <- colMeans(dat[, X.all.inds])
+ pfa <- stats::cov(dat[, X.all.inds])
} else {
mufa <- colMeans(dat[, iX.mod])
pfa <- stats::cov(dat[, iX.mod])
}
-
#return values.
block$update <- list(aq = aq, bq = bq, mua = mua, pa = pa, mufa = mufa, pfa = pfa)
return(block)
@@ -537,6 +572,14 @@ update_q <- function (block.list.all, t, nt, aqq.Init = NULL, bqq.Init = NULL, M
if (is.null(MCMC_dat)) {
#loop over blocks
if (t == 1) {
+ fresh.run <- TRUE
+ } else if (t > 1 & is.null(block.list.all[[t-1]])) {
+ fresh.run <- TRUE
+ } else {
+ fresh.run <- FALSE
+ }
+ #if t=1 or if it's a fresh run.
+ if (fresh.run) {
for (i in seq_along(block.list)) {
nvar <- length(block.list[[i]]$data$muf)
nobs <- length(block.list[[i]]$data$y.censored)
@@ -562,7 +605,7 @@ update_q <- function (block.list.all, t, nt, aqq.Init = NULL, bqq.Init = NULL, M
block.list[[i]]$data$bq <- block.list[[i]]$bqq[t]
}
}
- } else if (t > 1) {
+ } else {
if (!is.null(block.list.all.pre)) {
block.list.pre <- block.list.all.pre[[t - 1]]
} else {
@@ -609,27 +652,38 @@ update_q <- function (block.list.all, t, nt, aqq.Init = NULL, bqq.Init = NULL, M
##' @param block.list lists of blocks generated by the `build.block.xy` function.
##' @param X A matrix contains ensemble forecasts.
##' @param H H index created by the `construct_nimble_H` function.
+##' @param adjustment logical variable determine if we want to adjust the analysis ensembles based on likelihood.
##'
##' @return It returns a list of analysis results by MCMC sampling.
-block.2.vector <- function (block.list, X, H) {
+block.2.vector <- function (block.list, X, H, adjustment) {
+ # initialize site.ids, mu.a, mu.f, pa, and pf.
site.ids <- attributes(X)$Site
mu.f <- mu.a <- c()
Pf <- Pa <- matrix(0, length(site.ids), length(site.ids))
+ analysis <- X
+ # loop over blocks.
for (L in block.list) {
+ # grab index for the locations within the current block.
ind <- c()
for (id in L$site.ids) {
ind <- c(ind, which(site.ids == id))
}
- #convert mu.f and pf
- mu.a[ind] <- mu.f[ind] <- L$update$mufa
- Pa[ind, ind] <- Pf[ind, ind] <- L$update$pfa
- #convert mu.a and pa
- ind <- intersect(ind, H$H.ind)
- mu.a[ind] <- L$update$mua
- Pa[ind, ind] <- L$update$pa
+ # grab mu.a, mu.f, pa, and pf from the MCMC updates.
+ mu.a[ind] <- L$update$mufa
+ Pa[ind, ind] <- L$update$pfa
+ mu.f[ind] <- L$data$muf
+ Pf[ind, ind] <- L$data$pf
+ # adjustment.
+ if (as.logical(adjustment)) {
+ sample <- as.data.frame(adj.ens(Pf[ind, ind], X[,ind], mu.f[ind], mu.a[ind], Pa[ind, ind]))
+ } else {
+ sample <- as.data.frame(mvtnorm::rmvnorm(nrow(X), L$update$mufa, L$update$pfa, method = "svd"))
+ }
+ analysis[,ind] <- sample
}
return(list(mu.f = mu.f,
Pf = Pf,
mu.a = mu.a,
- Pa = Pa))
+ Pa = Pa,
+ analysis = analysis))
}
\ No newline at end of file
diff --git a/modules/assim.sequential/R/Helper.functions.R b/modules/assim.sequential/R/Helper.functions.R
index 9c3ccfcc31f..56730abbc84 100644
--- a/modules/assim.sequential/R/Helper.functions.R
+++ b/modules/assim.sequential/R/Helper.functions.R
@@ -7,7 +7,7 @@
#' @return A list the same dimension as X, with each column of each dataframe
#' modified by replacing outlier points with the column median
#' @export
-#' @importFrom magrittr %>%
+#' @importFrom dplyr %>%
#'
outlier.detector.boxplot<-function(X) {
X <- X %>%
@@ -86,7 +86,7 @@ SDA_control <-
#' @description This function uses a set of scaling factors defined in the pecan XML to scale a given matrix
#' @return rescaled Matrix
#' @export
-#' @importFrom magrittr %>%
+#' @importFrom dplyr %>%
rescaling_stateVars <- function(settings, X, multiply=TRUE) {
FUN <- ifelse(multiply, .Primitive('*'), .Primitive('/'))
diff --git a/modules/assim.sequential/R/Nimble_codes.R b/modules/assim.sequential/R/Nimble_codes.R
index cc63a7e2968..90ca65c71e7 100644
--- a/modules/assim.sequential/R/Nimble_codes.R
+++ b/modules/assim.sequential/R/Nimble_codes.R
@@ -187,14 +187,12 @@ GEF.MultiSite.Nimble <- nimbleCode({
# Sorting out qs
q[1:YN, 1:YN] ~ dwish(R = aq[1:YN, 1:YN], df = bq) ## aq and bq are estimated over time
}
-
for (i in 1:nH) {
tmpX[i] <- X.mod[H[i]]
Xs[i] <- tmpX[i]
}
## add process error to x model but just for the state variables that we have data and H knows who
X[1:YN] ~ dmnorm(Xs[1:YN], prec = q[1:YN, 1:YN])
-
## Likelihood
y.censored[1:YN] ~ dmnorm(X[1:YN], prec = r[1:YN, 1:YN])
@@ -213,7 +211,7 @@ GEF.MultiSite.Nimble <- nimbleCode({
if (length(H) == 1) {
X[i] ~ dnorm(X.mod[H], sd = 1/sqrt(q[i]))
#likelihood
- y.censored[i] ~ dnorm(X[i], sd = 1/sqrt(r[i]))
+ y.censored[i] ~ dnorm(X[i], sd = 1/sqrt(r[i, i]))
} else {
#sample latent variable X.
X[i] ~ dnorm(X.mod[H[i]], sd = 1/sqrt(q[i]))
diff --git a/modules/assim.sequential/R/Prep_OBS_SDA.R b/modules/assim.sequential/R/Prep_OBS_SDA.R
index ff048b3e0ea..2127b4105f3 100644
--- a/modules/assim.sequential/R/Prep_OBS_SDA.R
+++ b/modules/assim.sequential/R/Prep_OBS_SDA.R
@@ -7,7 +7,7 @@
#'
#' @return mean and covariance of observations
#'
-#' @importFrom magrittr %>%
+#' @importFrom dplyr %>%
#' @export
#'
Prep_OBS_SDA <- function(settings, out_dir, AGB_dir, Search_Window=30){
diff --git a/modules/assim.sequential/R/SDA_parallel_downscale.R b/modules/assim.sequential/R/SDA_parallel_downscale.R
new file mode 100644
index 00000000000..681a3a20941
--- /dev/null
+++ b/modules/assim.sequential/R/SDA_parallel_downscale.R
@@ -0,0 +1,443 @@
+#' @description
+#' This function helps to stack target data layers from various GeoTIFF maps (with different extents, CRS, and resolutions) to a single map.
+#' @title stack_covariates_2_geotiff
+#'
+#' @param outdir character: the output directory where the stacked GeoTIFF file will be generated.
+#' @param year numeric: the year of when the covariates are stacked.
+#' @param base.map.dir character: path to the GeoTIFF file within which the extents and CRS will be used to generate the final map.
+#' @param cov.tif.file.list list: a list contains sub-lists with each including path to the corresponding map and the variables to be extracted (e.g., list(LC = list(dir = "path/to/landcover.tiff", var.name = "LC")).
+#' @param normalize boolean: decide if we want to normalize each data layer, the default is TRUE.
+#' @param cores numeric: how many CPus to be used in the calculation, the default is the total CPU number you have.
+#'
+#' @return path to the exported GeoTIFF file.
+#'
+#' @export
+#'
+#' @author Dongchen Zhang
+#' @importFrom foreach %dopar%
+stack_covariates_2_geotiff <- function(outdir, year, base.map.dir, cov.tif.file.list, normalize = T, cores = parallel::detectCores()) {
+ # create the folder if it doesn't exist.
+ if (!file.exists(outdir)) {
+ dir.create(outdir)
+ }
+ # parallel loop.
+ # register parallel nodes.
+ if (cores > length(cov.tif.file.list)) {
+ cores <- length(cov.tif.file.list)
+ }
+ cl <- parallel::makeCluster(as.numeric(cores))
+ doSNOW::registerDoSNOW(cl)
+ #progress bar.
+ pb <- utils::txtProgressBar(min=1, max=length(cov.tif.file.list), style=3)
+ progress <- function(n) utils::setTxtProgressBar(pb, n)
+ opts <- list(progress=progress)
+ # foreach loop.
+ f <- NULL
+ paths <- foreach::foreach(f = cov.tif.file.list,
+ .packages=c("Kendall", "terra"),
+ .options.snow=opts) %dopar% {
+ # load the base map.
+ base.map <- terra::rast(base.map.dir)
+ # read geotif file.
+ temp.rast <- terra::rast(f$dir)
+ # normalize.
+ if (normalize & !"LC" %in% f$var.name) {
+ nx <- terra::minmax(temp.rast)
+ temp.rast <- (temp.rast - nx[1,]) / (nx[2,] - nx[1,])
+ }
+ # set name to layers if we set it up in advance.
+ # otherwise the original layer name will be used.
+ if (!is.null(f$var.name)) {
+ names(temp.rast) <- f$var.name
+ }
+ # raster operations.
+ if (! terra::crs(base.map) == terra::crs(temp.rast)) {
+ terra::crs(temp.rast) <- terra::crs(base.map)
+ }
+ if (! terra::ext(base.map) == terra::ext(temp.rast)) {
+ temp.rast <- terra::crop(temp.rast, base.map)
+ }
+ if (! all(c(nrow(base.map) == nrow(temp.rast), ncol(base.map) == ncol(temp.rast)))) {
+ temp.rast <- terra::resample(temp.rast, base.map)
+ }
+ # write the raster into disk.
+ file.name <- paste0(f$var.name, collapse = "_")
+ path <- file.path(outdir, paste0(file.name, ".tiff"))
+ terra::writeRaster(temp.rast, path)
+ return(path)
+ } %>% unlist
+ # stop parallel.
+ parallel::stopCluster(cl)
+ foreach::registerDoSEQ()
+ gc()
+ # combine rasters.
+ all.rast <- terra::rast(paths)
+ # write all covariates into disk.
+ terra::writeRaster(all.rast, file.path(outdir, paste0("covariates_", year, ".tiff")), overwrite = T)
+ # remove previous tiff files.
+ unlink(paths)
+ # return results.
+ return(file.path(outdir, paste0("covariates_", year, ".tiff")))
+}
+
+#' @description
+#' convert settings to geospatial points in terra.
+#' @title pecan_settings_2_pts
+#'
+#' @param settings PEcAn settings: either a character that points to the settings or shape file or the actual pecan settings object will be accepted.
+#'
+#' @return terra spatial points object.
+#'
+#' @author Dongchen Zhang
+pecan_settings_2_pts <- function(settings) {
+ if (is.character(settings)) {
+ # if it's shapefile.
+ if (grepl(".shp", settings)) {
+ return(terra::vect(settings))
+ }
+ # read settings.
+ settings <- PEcAn.settings::read.settings(settings)
+ }
+ # grab lat/lon.
+ site.locs <- settings$run %>% purrr::map('site') %>%
+ purrr::map_dfr(~c(.x[['lon']],.x[['lat']]) %>% as.numeric)%>%
+ t %>% `colnames<-`(c("Lon","Lat")) %>% as.data.frame()
+ # convert lat/lon to terra::vect.
+ pts <- terra::vect(site.locs, geom = c("Lon", "Lat"), crs = "EPSG:4326")
+ return(pts)
+}
+
+#' @description
+#' This function helps to build the data frame (pixels by data columns) for only vegetated pixels to improve the efficiency.
+#' Note that the `LC` field using the `MODIS land cover` observations (MCD12Q1.061) must be supplied in the covariates to make this function work.
+#' @title stack_covariates_2_df
+#'
+#' @param rast.dir character: a character that points to the covariates raster file generated by the `stack_covariates_2_geotiff` function.
+#' @param cores numeric: how many CPus to be used in the calculation, the default is the total CPU number you have.
+#'
+#' @return list containing the data frame of covariates for vegetated pixels and the corresponding index of the pixels.
+#'
+#' @author Dongchen Zhang
+#' @importFrom foreach %dopar%
+stack_covariates_2_df <- function(rast.dir, cores = parallel::detectCores()) {
+ # load maps.
+ all.rast <- terra::rast(rast.dir)
+ # parallel loop.
+ layer.names <- names(all.rast)
+ # register parallel nodes.
+ if (cores > length(layer.names)) {
+ cores <- length(layer.names)
+ }
+ cl <- parallel::makeCluster(as.numeric(cores))
+ doSNOW::registerDoSNOW(cl)
+ #progress bar.
+ pb <- utils::txtProgressBar(min=1, max=length(layer.names), style=3)
+ progress <- function(n) utils::setTxtProgressBar(pb, n)
+ opts <- list(progress=progress)
+ # foreach loop.
+ r <- NULL
+ vecs <- foreach::foreach(r = seq_along(layer.names),
+ .packages=c("Kendall", "terra"),
+ .options.snow=opts) %dopar% {
+ all.rast <- terra::rast(rast.dir)
+ temp.vec <- matrix(all.rast[[r]], byrow = T)
+ na.inds <- which(is.na(temp.vec))
+ # if it's LC layer.
+ if ("LC" == names(all.rast)[r]) {
+ non.veg.inds <- which(! temp.vec %in% 1:8)
+ # non.veg.inds <- which(! temp.vec %in% 0:11)
+ na.inds <- unique(c(na.inds, non.veg.inds))
+ }
+ return(list(vec = temp.vec,
+ na.inds = na.inds))
+ }
+ # stop parallel.
+ parallel::stopCluster(cl)
+ foreach::registerDoSEQ()
+ gc()
+ # grab uniqued NA index.
+ na.inds <- vecs %>% purrr::map("na.inds") %>% unlist %>% unique
+ # remove NA from each covariate.
+ cov.vecs <- vecs %>% purrr::map(function(v){
+ return(v$vec[-na.inds])
+ }) %>% dplyr::bind_cols() %>% `colnames<-`(layer.names) %>% as.data.frame()
+ non.na.inds <- seq_along(matrix(all.rast[[1]]))[-na.inds]
+ return(list(df = cov.vecs, non.na.inds = non.na.inds))
+}
+
+#' @description
+#' This function helps to create the training dataset of specific variable type and locations for downscaling.
+#' TODO: Add a ratio argument (training sample size/total sample size) so that we could calculate the out-of-sample accuracy.
+#' @title prepare_train_dat
+#'
+#' @param pts spatialpoints: spatial points returned by `terra::vectors` function.
+#' @param analysis numeric: data frame (rows: ensemble member; columns: site*state_variables) of updated ensemble analysis results from the `sda_enkf` function.
+#' @param covariates.dir character: path to the exported covariates GeoTIFF file.
+#' @param variable character: name of state variable. It should match up with the column names of the analysis data frame.
+#'
+#' @return matrix (num.sites, num.variables * num.ensemble + num.covariates) within which the first sets of columns contain values of state variables for each ensemble member of every site, and the rest columns contain the corresponding covariates.
+#'
+#' @author Dongchen Zhang
+prepare_train_dat <- function(pts, analysis, covariates.dir, variable) {
+ # read covariates.
+ cov.rast <- terra::rast(covariates.dir)
+ # extract covariates by locations.
+ predictors <- as.data.frame(terra::extract(cov.rast, pts, ID = FALSE))
+ covariate_names <- names(predictors)
+ if ("ID" %in% covariate_names) {
+ rm.ind <- which("ID" %in% covariate_names)
+ covariate_names <- covariate_names[-rm.ind]
+ predictors <- predictors[,-rm.ind]
+ }
+ # grab carbon data.
+ var.dat <- analysis[,which(colnames(analysis) == variable)] %>% t %>%
+ as.data.frame() %>% `colnames<-`(paste0("ensemble", seq(nrow(analysis))))
+ # combine carbon and predictor.
+ full_data <- cbind(var.dat, predictors)
+ full_data <- full_data[which(full_data$LC %in% 1:8),]
+ return(full_data)
+}
+
+#' @description
+#' This function helps to train the ML model across ensemble members in parallel.
+#' @title parallel_train
+#'
+#' @param full_data numeric: the matrix generated using the `prepare_train_dat` function.
+#' @param method character: machine learning method (currently support randomForest and xgboost).
+#' @param cores numeric: how many CPus to be used in the calculation, the default is the total CPU number you have.
+#'
+#' @return list of trained models across ensemble members.
+#'
+#' @author Dongchen Zhang
+#' @importFrom foreach %dopar%
+parallel_train <- function(full_data, method = "randomForest", cores = parallel::detectCores()) {
+ # grab ensemble and predictor index.
+ col.names <- colnames(full_data)
+ ensemble.inds <- which(grepl("ensemble", col.names, fixed = TRUE))
+ predictor.inds <- seq_along(col.names)[-ensemble.inds]
+ # parallel train.
+ # register parallel nodes.
+ if (cores > length(ensemble.inds)) {
+ cores <- length(ensemble.inds)
+ }
+ cl <- parallel::makeCluster(as.numeric(cores))
+ doSNOW::registerDoSNOW(cl)
+ #progress bar.
+ pb <- utils::txtProgressBar(min=1, max=length(ensemble.inds), style=3)
+ progress <- function(n) utils::setTxtProgressBar(pb, n)
+ opts <- list(progress=progress)
+ # foreach loop.
+ i <- NULL
+ models <- foreach::foreach(i = ensemble.inds,
+ .packages=c("Kendall", "stats", method),
+ .options.snow=opts) %dopar% {
+ ensemble_col <- col.names[ensemble.inds[i]]
+ predictor_col <- col.names[predictor.inds]
+ # if it's randomForest.
+ if (method == "randomForest") {
+ formula <- stats::as.formula(paste(ensemble_col, "~", paste(predictor_col, collapse = " + ")))
+ model <- randomForest::randomForest(formula,
+ data = full_data,
+ ntree = 1000,
+ na.action = stats::na.omit,
+ keep.forest = TRUE,
+ importance = TRUE)
+ }
+ # if it's xgboost.
+ if (method == "xgboost") {
+ formula <- stats::as.formula(paste0("~ ", paste(predictor_col, collapse = " + "), " - 1"))
+ train.df <- stats::model.matrix(formula, data = full_data)
+ train.df <- xgboost::xgb.DMatrix(data = train.df, label = full_data[[ensemble_col]])
+ model <- xgboost::xgb.train(
+ params = list(
+ objective = "reg:squarederror",
+ eta = 0.3,
+ max_depth = 6,
+ subsample = 1,
+ colsample_bytree = 1
+ ),
+ data = train.df,
+ nrounds = 1000,
+ nthread = 1,
+ verbose = 0
+ )
+ }
+ model
+ }
+ # stop parallel.
+ parallel::stopCluster(cl)
+ foreach::registerDoSEQ()
+ gc()
+ return(models)
+}
+
+#' @description
+#' This function helps to predict the target variable observations based on the covariates.
+#' The prediction is working in parallel across vegetated pixels.
+#' @title parallel_prediction
+#'
+#' @param base.map.dir character: path to the GeoTIFF file within which the extents and CRS will be used to generate the ensemble maps.
+#' @param models list: trained models across ensemble members generated by the `parallel_train` function.
+#' @param cov.vecs numeric: data frame containing covaraites across vegetated pixels generated from the `stack_covariates_2_df` function.
+#' @param non.na.inds numeric: the corresponding index of vegetated pixels generated from the `stack_covariates_2_df` function.
+#' @param outdir character: the output directory where the downscaled maps will be stored.
+#' @param name list: containing the time and variable name to create the final GeoTIFF file name.
+#' @param cores numeric: how many CPus to be used in the calculation, the default is the total CPU number you have.
+#'
+#' @return paths to the ensemble downscaled maps.
+#'
+#' @author Dongchen Zhang
+#' @importFrom foreach %dopar%
+parallel_prediction <- function(base.map.dir, models, cov.vecs, non.na.inds, outdir, name, cores = parallel::detectCores()) {
+ # load base map.
+ base.map <- terra::rast(base.map.dir)
+ dims <- dim(base.map)
+ # setup progress bar for ensemble members.
+ pb <- utils::txtProgressBar(min = 0, max = length(models), style = 3)
+ paths <- c()
+ # loop over ensemble members.
+ for (i in seq_along(models)) {
+ # update progress bar.
+ utils::setTxtProgressBar(pb, i)
+ # go to the next if the current file has already been generated.
+ file.name <- paste0(c("ensemble", i, name$time, name$variable), collapse = "_")
+ if (file.exists(file.path(outdir, paste0(file.name, ".tiff")))) {
+ next
+ }
+ # register parallel nodes.
+ cl <- parallel::makeCluster(cores)
+ doSNOW::registerDoSNOW(cl)
+ # foreach parallel.
+ model <- models[[i]]
+ d <- NULL
+ output <- foreach::foreach(d=itertools::isplitRows(cov.vecs, chunks=cores),
+ .packages=c("stats", "randomForest", "xgboost")) %dopar% {
+ stats::predict(model, d)
+ } %>% unlist
+ # export to geotiff map.
+ vec <- rep(NA, dims[1]*dims[2])
+ vec[non.na.inds] <- output
+ map <- terra::rast(matrix(vec, dims[1], dims[2], byrow = T))
+ terra::ext(map) <- terra::ext(base.map)
+ terra::crs(map) <- terra::crs(base.map)
+ terra::writeRaster(map, file.path(outdir, paste0(file.name, ".tiff")))
+ paths <- c(paths, file.path(outdir, paste0(file.name, ".tiff")))
+ # stop parallel.
+ parallel::stopCluster(cl)
+ foreach::registerDoSEQ()
+ gc()
+ }
+ return(paths)
+}
+
+#' @description
+#' This is the main function to execute the machine learning training and prediction.
+#' Note it will be deployed by each node you requested if the qsub feature is enabled below.
+#' @title downscale_main
+#'
+#' @param settings character: physical path that points to the pecan settings XML file.
+#' @param analysis numeric: data frame (rows: ensemble member; columns: site*state_variables) of updated ensemble analysis results from the `sda_enkf` function.
+#' @param covariates.dir character: path to the exported covariates GeoTIFF file.
+#' @param time character: the time tag used to differentiate the outputs from others.
+#' @param variable character: name of state variable. It should match up with the column names of the analysis data frame.
+#' @param outdir character: the output directory where the downscaled maps will be stored.
+#' @param base.map.dir character: path to the GeoTIFF file within which the extents and CRS will be used to generate the ensemble maps.
+#' @param method character: machine learning method, default is randomForest (currently support randomForest and xgboost).
+#' @param cores numeric: how many CPus to be used in the calculation, the default is the total CPU number you have.
+#'
+#' @return paths to the ensemble downscaled maps.
+#'
+#' @author Dongchen Zhang
+downscale_main <- function(settings, analysis, covariates.dir, time, variable, outdir, base.map.dir, method = "randomForest", cores = parallel::detectCores()) {
+ # check packages.
+ if (!method %in% rownames(utils::installed.packages())) {
+ PEcAn.logger::logger.info(paste("The package:", method, "is not installed."))
+ return(0)
+ }
+ if (!"itertools" %in% rownames(utils::installed.packages())) {
+ PEcAn.logger::logger.info("The package: itertools is not installed.")
+ return(0)
+ }
+ if (!"doSNOW" %in% rownames(utils::installed.packages())) {
+ PEcAn.logger::logger.info("The package: doSNOW is not installed.")
+ return(0)
+ }
+ if (!"foreach" %in% rownames(utils::installed.packages())) {
+ PEcAn.logger::logger.info("The package: foreach is not installed.")
+ return(0)
+ }
+ # create folder specific for the time and carbon type.
+ folder.name <- file.path(outdir, paste0(c(variable, time), collapse = "_"))
+ if (!file.exists(folder.name)) {
+ dir.create(folder.name)
+ }
+ # prepare training data.
+ PEcAn.logger::logger.info("Preparing training data.")
+ # convert settings into geospatial points.
+ pts <- pecan_settings_2_pts(settings)
+ full_data <- prepare_train_dat(pts = pts,
+ analysis = analysis,
+ covariates.dir = covariates.dir,
+ variable = variable)
+ # remove NAs from the training data set.
+ full_data <- full_data[stats::complete.cases(full_data),]
+ # convert LC into factor.
+ if ("LC" %in% colnames(full_data)) {
+ full_data[,"LC"] <- factor(full_data[,"LC"])
+ }
+ # parallel train.
+ PEcAn.logger::logger.info("Parallel training.")
+ models <- parallel_train(full_data = full_data, method = method, cores = cores)
+ # save trained models for future analysis.
+ # saveRDS(models, file.path(folder.name, "rf_models.rds"))
+ save(models, file = file.path(folder.name, "ml_models.Rdata"))
+ # convert stacked covariates geotiff file into data frame.
+ PEcAn.logger::logger.info("Converting geotiff to df.")
+ cov.df <- stack_covariates_2_df(rast.dir = covariates.dir, cores = cores)
+ # reconstruct LC because of the computation accuracy.
+ # cov.df$df$LC[which(cov.df$df$LC < 1)] <- 0
+ # convert LC into factor.
+ if ("LC" %in% colnames(cov.df$df)) {
+ cov.df$df[,"LC"] <- factor(cov.df$df[,"LC"])
+ }
+ # format the prediction covariates for xgboost.
+ if (method == "xgboost") {
+ formula <- stats::as.formula(paste0("~ ", paste(colnames(cov.df$df), collapse = " + "), " - 1"))
+ cov.df$df <- stats::model.matrix(formula, data = cov.df$df)
+ }
+ # parallel prediction.
+ PEcAn.logger::logger.info("Parallel prediction.")
+ paths <- parallel_prediction(base.map.dir = base.map.dir,
+ models = models,
+ cov.vecs = cov.df$df,
+ non.na.inds = cov.df$non.na.inds,
+ outdir = folder.name,
+ name = list(time = as.character(time), variable = variable),
+ cores = cores)
+ # calculate mean and std.
+ PEcAn.logger::logger.info("Calculate mean and std.")
+ ras.all <- terra::rast(paths)
+ mean <- terra::app(ras.all, "mean")
+ std <- terra::app(ras.all, "std")
+ # write into geotiff files.
+ image.base.name <- paste0(time, "_", variable, ".tiff")
+ terra::writeRaster(mean, filename = file.path(folder.name, paste0("mean_", image.base.name)))
+ terra::writeRaster(std, filename = file.path(folder.name, paste0("std_", image.base.name)))
+ return(list(ensemble.prediction.files = paths,
+ mean.prediction.file = file.path(folder.name, paste0("mean_", image.base.name)),
+ std.prediction.file = file.path(folder.name, paste0("std_", image.base.name))))
+}
+
+#' @description
+#' This qsub function helps to run the submitted qsub jobs for running the downscale_main function.
+#' @title downscale_qsub_main
+#'
+#' @param folder.path Character: physical path to which the job file is located.
+#'
+#' @export
+#' @author Dongchen Zhang
+downscale_qsub_main <- function(folder.path) {
+ dat <- readRDS(file.path(folder.path, "dat.rds"))
+ out <- downscale_main(dat$settings, dat$analysis.yr, dat$covariates.dir, lubridate::year(dat$time), dat$variable, dat$outdir, dat$base.map.dir, dat$method, dat$cores)
+ saveRDS(out, file.path(folder.path, "res.rds"))
+}
\ No newline at end of file
diff --git a/modules/assim.sequential/R/covariates_loader.R b/modules/assim.sequential/R/covariates_loader.R
new file mode 100644
index 00000000000..1fe78f916e0
--- /dev/null
+++ b/modules/assim.sequential/R/covariates_loader.R
@@ -0,0 +1,85 @@
+#' Generate site-year covariates from yearly GeoTIFF stacks (internal)
+#'
+#' Scans `cov_dir` for files like `.tiff`, extracts raster
+#' values at the provided site coordinates, and returns a long tibble.
+#'
+#' @param site_coords data.frame with columns: site (chr or coercible), lon (num), lat (num).
+#' @param cov_dir directory containing yearly multi-layer GeoTIFFs.
+#' @param crs CRS string for the input points (default "EPSG:4326").
+#' @param file_prefix character prefix before the 4-digit year (default "covariates_").
+#' Ignored if `file_regex` is provided.
+#' @param file_regex optional full regex to match files (must contain a 4-digit year).
+#'
+#' @return tibble with columns site, year, and per-layer covariates.
+#' @keywords internal
+#' @noRd
+generate_covariates_df <- function(site_coords,
+ cov_dir,
+ crs = "EPSG:4326",
+ file_prefix = "covariates_",
+ file_regex = NULL) {
+ if (!dir.exists(cov_dir)) stop("`cov_dir` does not exist: ", cov_dir)
+
+ # validate site_coords
+ if (!all(c("lon", "lat") %in% names(site_coords))) {
+ stop("`site_coords` must have columns: lon, lat (and ideally site).")
+ }
+ if (!("site" %in% names(site_coords))) site_coords$site <- seq_len(nrow(site_coords))
+ site_coords$site <- as.character(site_coords$site)
+
+ site_coords$lon <- suppressWarnings(as.numeric(site_coords$lon))
+ site_coords$lat <- suppressWarnings(as.numeric(site_coords$lat))
+ if (anyNA(site_coords$lon) || anyNA(site_coords$lat)) {
+ bad <- site_coords$site[is.na(site_coords$lon) | is.na(site_coords$lat)]
+ stop("Found non-numeric lon/lat for sites: ", paste(bad, collapse = ", "))
+ }
+
+ # build points
+ coords_mat <- as.matrix(site_coords[, c("lon", "lat")])
+ pts <- terra::vect(coords_mat, type = "points", crs = crs)
+ pts$site <- site_coords$site
+
+ # discover files/years
+ pattern <- if (is.null(file_regex)) {
+ # escape any regex chars in prefix, then expect YYYY.tiff
+ paste0("^",
+ stringr::str_replace_all(file_prefix, "([\\^$.|?*+()\\[\\]{}])", "\\\\\\1"),
+ "\\d{4}\\.tiff$")
+ } else {
+ file_regex
+ }
+ tif_files <- list.files(cov_dir, pattern = pattern, full.names = TRUE)
+ if (length(tif_files) == 0) {
+ stop("No files matched pattern in: ", cov_dir, " (pattern: ", pattern, ")")
+ }
+
+ years <- as.integer(stringr::str_extract(basename(tif_files), "\\d{4}"))
+ if (any(is.na(years))) {
+ stop("Could not parse years from filenames: ",
+ paste(basename(tif_files)[is.na(years)], collapse = ", "))
+ }
+ ord <- order(years)
+ tif_files <- tif_files[ord]
+ years <- years[ord]
+
+ # per-year extractor
+ extract_year <- function(tif_path, year) {
+ r <- terra::rast(tif_path)
+ vals <- terra::extract(r, pts)
+
+ if ("ID" %in% names(vals)) {
+ vals <- vals[, setdiff(names(vals), "ID"), drop = FALSE]
+ }
+
+ out <- dplyr::as_tibble(vals)
+ if (nrow(out) != nrow(site_coords)) {
+ stop("Row mismatch for year ", year, ": expected ", nrow(site_coords),
+ " but got ", nrow(out), ". Check CRS/coordinates or raster extent.")
+ }
+
+ dplyr::mutate(out, site = site_coords$site, year = as.integer(year)) |>
+ dplyr::select("site", "year", dplyr::everything())
+ }
+
+ purrr::map2_dfr(tif_files, years, extract_year)
+}
diff --git a/modules/assim.sequential/R/debias_py.R b/modules/assim.sequential/R/debias_py.R
new file mode 100644
index 00000000000..af4ba1e84d4
--- /dev/null
+++ b/modules/assim.sequential/R/debias_py.R
@@ -0,0 +1,75 @@
+#' Load debias Python module (internal)
+#'
+#' Locate and import the Python residual-model module used by the SDA debias step.
+#' The search order prefers installed package paths and then falls back to a
+#' developer tree when running from source.
+#'
+#' **Search logic (first match wins):**
+#' 1. Installed package dirs: `system.file("python", pkg)`, `system.file("python_models", pkg)`
+#' 2. Dev fallbacks (from namespace path or `inst/`):
+#' - `/python`, `/python_models`
+#' - `inst/python`, `inst/python_models`
+#'
+#' **Module names recognized:**
+#' - Package directory: `pecan_debias/__init__.py` → imports `"pecan_debias"`
+#' - Single file: `debias.py` → imports `"debias"`
+#'
+#' The imported module is cached across calls. Pass `reload = TRUE` to force
+#' re-import (e.g., after editing the Python code in development).
+#'
+#' @param reload Logical; if `TRUE`, force re-import even if a cached module exists.
+#'
+#' @return A reticulate Python module object. Errors if no suitable path is found
+#' or if the import returns a null Python pointer.
+#'
+#' @keywords internal
+#' @noRd
+.get_debias_mod <- local({
+ mod <- NULL
+ function(reload = FALSE) {
+ if (reload || is.null(mod) || reticulate::py_is_null_xptr(mod)) {
+
+ pkg <- "PEcAnAssimSequential"
+
+ # Installed package roots
+ roots <- Filter(nzchar, c(
+ system.file("python", package = pkg),
+ system.file("python_models", package = pkg)
+ ))
+
+ # Dev fallbacks (if running from source)
+ if (!length(roots)) {
+ ns_path <- tryCatch(getNamespaceInfo(pkg, "path"), error = function(e) NA_character_)
+ roots <- unique(stats::na.omit(c(
+ file.path(ns_path, "python"),
+ file.path(ns_path, "python_models"),
+ normalizePath(file.path("inst", "python"), mustWork = FALSE),
+ normalizePath(file.path("inst", "python_models"), mustWork = FALSE)
+ )))
+ roots <- roots[dir.exists(roots)]
+ }
+
+ if (!length(roots)) {
+ stop("Could not find a python dir (inst/python or inst/python_models) in ", pkg, ".")
+ }
+
+ root <- roots[1]
+ # Prefer package dir; else single file
+ if (dir.exists(file.path(root, "pecan_debias")) &&
+ file.exists(file.path(root, "pecan_debias", "__init__.py"))) {
+ mod_name <- "pecan_debias"
+ } else if (file.exists(file.path(root, "debias.py"))) {
+ mod_name <- "debias"
+ } else {
+ stop("Expected either 'pecan_debias/__init__.py' or 'debias.py' under: ", root)
+ }
+
+ mod <<- reticulate::import_from_path(mod_name, path = root, convert = TRUE)
+ if (reticulate::py_is_null_xptr(mod)) {
+ stop("Import returned a null Python object (py_is_null_xptr == TRUE).")
+ }
+ }
+ mod
+ }
+})
+
diff --git a/modules/assim.sequential/R/sda.enkf_MultiSite.R b/modules/assim.sequential/R/sda.enkf_MultiSite.R
index 55b50248e19..f037fc96bc0 100644
--- a/modules/assim.sequential/R/sda.enkf_MultiSite.R
+++ b/modules/assim.sequential/R/sda.enkf_MultiSite.R
@@ -17,20 +17,24 @@
#' @param Q Process covariance matrix given if there is no data to estimate it.
#' @param restart Used for iterative updating previous forecasts. Default NULL. List object includes file path to previous runs and start date for SDA.
#' @param pre_enkf_params Used for passing pre-existing time-series of process error into the current SDA runs to ignore the impact by the differences between process errors.
-#' @param ensemble.samples Pass ensemble.samples from outside to avoid GitHub check issues.
+#' @param ensemble.samples list of ensemble parameters across PFTs. Default is NULL.
#' @param control List of flags controlling the behavior of the SDA.
-#' `trace` for reporting back the SDA outcomes;
#' `TimeseriesPlot` for post analysis examination;
-#' `debug` decide if we want to pause the code and examining the variables inside the function;
-#' `pause` decide if we want to pause the SDA workflow at current time point t;
-#' `Profiling` decide if we want to export the temporal SDA outputs in CSV file;
#' `OutlierDetection` decide if we want to execute the outlier detection each time after the model forecasting;
-#' `parallel_qsub` decide if we want to execute the `qsub` job submission under parallel mode;
#' `send_email` contains lists for sending email to report the SDA progress;
#' `keepNC` decide if we want to keep the NetCDF files inside the out directory;
#' `forceRun` decide if we want to proceed the Bayesian MCMC sampling without observations;
#' `run_parallel` decide if we want to run the SDA under parallel mode for the `future_map` function;
#' `MCMC.args` include lists for controling the MCMC sampling process (iteration, nchains, burnin, and nthin.).
+#' `merge_nc` determine if we want to merge all netCDF files across sites and ensembles.
+#' If it's set as `TRUE`, we will then combine all netCDF files into the `merged_nc` folder within the `outdir`.
+#' `execution` decide the way we want to execute model
+#' including `local` ,where we execute the model locally;
+#' `qsub`, where we use the traditional `start_model_runs` function for submission;
+#' `qsub_parallel`, where we first combine jobs and submit them into the SCC.
+#' @param debias List: R list containing the covariance directory and the start year.
+#' covariance directory should include GeoTIFF files named by year.
+#' start year is numeric input which decide when to start the debiasing feature.
#' @param ... Additional arguments, currently ignored
#'
#' @return NONE
@@ -44,19 +48,16 @@ sda.enkf.multisite <- function(settings,
restart = NULL,
pre_enkf_params = NULL,
ensemble.samples = NULL,
- control=list(trace = TRUE,
- TimeseriesPlot = FALSE,
- debug = FALSE,
- pause = FALSE,
- Profiling = FALSE,
+ control=list(TimeseriesPlot = FALSE,
OutlierDetection=FALSE,
- parallel_qsub = TRUE,
send_email = NULL,
keepNC = TRUE,
forceRun = TRUE,
run_parallel = TRUE,
- MCMC.args = NULL),
- ...) {
+ MCMC.args = NULL,
+ merge_nc = TRUE,
+ execution = "local"),
+ debias = list(cov.dir = NULL, start.year = NULL), ...) {
#add if/else for when restart points to folder instead if T/F set restart as T
if(is.list(restart)){
old.dir <- restart$filepath
@@ -65,6 +66,7 @@ sda.enkf.multisite <- function(settings,
}else{
restart_flag = FALSE
}
+ # register parallel nodes.
if(control$run_parallel){
if (future::supportsMulticore()) {
future::plan(future::multicore)
@@ -72,8 +74,6 @@ sda.enkf.multisite <- function(settings,
future::plan(future::multisession)
}
}
- if (control$debug) browser()
- tictoc::tic("Preparation")
###-------------------------------------------------------------------###
### read settings ###
###-------------------------------------------------------------------###
@@ -84,49 +84,24 @@ sda.enkf.multisite <- function(settings,
outdir <- settings$modeloutdir # currently model runs locally, this will change if remote is enabled
rundir <- settings$host$rundir
host <- settings$host
-
forecast.time.step <- settings$state.data.assimilation$forecast.time.step #idea for later generalizing
nens <- as.numeric(settings$ensemble$size)
- processvar <- settings$state.data.assimilation$process.variance
- if(processvar=="TRUE"){
- processvar <- TRUE
- }else{
- processvar <- FALSE
- }
- Localization.FUN <- settings$state.data.assimilation$Localization.FUN # localization function
- scalef <- settings$state.data.assimilation$scalef %>% as.numeric() # scale factor for localization
+ processvar <- settings$state.data.assimilation$process.variance %>% as.logical
var.names <- sapply(settings$state.data.assimilation$state.variable, '[[', "variable.name")
names(var.names) <- NULL
multi.site.flag <- PEcAn.settings::is.MultiSettings(settings)
- readsFF <- NULL # this keeps the forward forecast
-
- is.local <- PEcAn.remote::is.localhost(settings$host)
- #------------------Reading up the MCMC settings
- nitr.GEF <- ifelse(is.null(settings$state.data.assimilation$nitrGEF),
- 5e4,
- settings$state.data.assimilation$nitrGEF %>%
- as.numeric)
- nthin <- ifelse(is.null(settings$state.data.assimilation$nthin),
- 10,
- settings$state.data.assimilation$nthin %>%
- as.numeric)
- nburnin<- ifelse(is.null(settings$state.data.assimilation$nburnin),
- 1e4,
- settings$state.data.assimilation$nburnin %>%
- as.numeric)
- censored.data<-ifelse(is.null(settings$state.data.assimilation$censored.data),
- TRUE,
- settings$state.data.assimilation$censored.data %>%
- as.logical)
+ is.local <- PEcAn.remote::is.localhost(host)
+ # if we want to censor data in the GEF.
+ censored.data<-settings$state.data.assimilation$censored.data
+ if (is.null(censored.data)) censored.data <- TRUE
#--------Initialization
FORECAST <- ANALYSIS <- ens_weights <- list()
enkf.params <- list()
restart.list <- NULL
#create SDA folder to store output
if(!dir.exists(settings$outdir)) dir.create(settings$outdir, showWarnings = FALSE)
-
##### Creating matrices that describe the bounds of the state variables
- ##### interval is remade everytime depending on the data at time t
+ ##### interval is remade every time depending on the data at time t
##### state.interval stays constant and converts new.analysis to be within the correct bounds
interval <- NULL
state.interval <- cbind(as.numeric(lapply(settings$state.data.assimilation$state.variables,'[[','min_value')),
@@ -146,13 +121,10 @@ sda.enkf.multisite <- function(settings,
distances <- sp::spDists(site.locs, longlat = TRUE)
#turn that into a blocked matrix format
blocked.dis <- block_matrix(distances %>% as.numeric(), rep(length(var.names), length(site.ids)))
-
}else{
conf.settings <- list(settings)
site.ids <- as.character(settings$run$site$id)
}
-
-
###-------------------------------------------------------------------###
### check dates before data assimilation ###
###-------------------------------------------------------------------###----
@@ -160,12 +132,10 @@ sda.enkf.multisite <- function(settings,
if (restart_flag) {
start.cut <- lubridate::ymd_hms(start.cut) #start.cut taken from restart list as date to begin runs
Start.year <-lubridate::year(start.cut)
-
}else{
start.cut <- lubridate::ymd_hms(settings$state.data.assimilation$start.date, truncated = 3)
Start.year <- (lubridate::year(settings$state.data.assimilation$start.date))
}
-
End.year <- lubridate::year(settings$state.data.assimilation$end.date) # dates that assimilations will be done for - obs will be subsetted based on this
assim.sda <- Start.year:End.year
obs.mean <- obs.mean[sapply(lubridate::year(names(obs.mean)), function(obs.year) obs.year %in% (assim.sda))] #checks obs.mean dates against assimyear dates
@@ -189,13 +159,8 @@ sda.enkf.multisite <- function(settings,
read_restart_times <- c(lubridate::ymd_hms(start.cut, truncated = 3), obs.times)
nt <- length(obs.times) #sets length of for loop for Forecast/Analysis
if (nt==0) PEcAn.logger::logger.severe('There has to be at least one Obs.')
-
-# Model Specific Setup ----------------------------------------------------
-
+ # Model Specific Setup ----------------------------------------------------
#--get model specific functions
- #my.write_restart <- paste0("PEcAn.", model, "::write_restart.", model)
- #my.read_restart <- paste0("PEcAn.", model, "::read_restart.", model)
- #my.split_inputs <- paste0("PEcAn.", model, "::split_inputs.", model)
do.call("library", list(paste0("PEcAn.", model)))
my.write_restart <- paste0("write_restart.", model)
my.read_restart <- paste0("read_restart.", model)
@@ -206,19 +171,17 @@ sda.enkf.multisite <- function(settings,
register.xml <- system.file(paste0("register.", model, ".xml"), package = paste0("PEcAn.", model))
register <- XML::xmlToList(XML::xmlParse(register.xml))
no_split <- !as.logical(register$exact.dates)
-
if (!exists(my.split_inputs) & !no_split) {
PEcAn.logger::logger.warn(my.split_inputs, "does not exist")
PEcAn.logger::logger.severe("please make sure that the PEcAn interface is loaded for", model)
PEcAn.logger::logger.warn(my.split_inputs, "If your model does not need the split function you can specify that in register.Model.xml in model's inst folder by adding FALSE tag.")
-
}
#split met if model calls for it
#create a folder to store extracted met files
if(!file.exists(paste0(settings$outdir, "/Extracted_met/"))){
dir.create(paste0(settings$outdir, "/Extracted_met/"))
}
-
+ PEcAn.logger::logger.info("Splitting mets!")
conf.settings <-conf.settings %>%
`class<-`(c("list")) %>% #until here, it separates all the settings for all sites that listed in the xml file
furrr::future_map(function(settings) {
@@ -242,13 +205,12 @@ sda.enkf.multisite <- function(settings,
# changing the start and end date which will be used for model2netcdf.model
settings$run$start.date <- lubridate::ymd_hms(settings$state.data.assimilation$start.date, truncated = 3)
settings$run$end.date <- lubridate::ymd_hms(settings$state.data.assimilation$end.date, truncated = 3)
-
}
} else{
inputs.split <- inputs
}
settings
- })
+ }, .progress = F)
conf.settings<- PEcAn.settings::as.MultiSettings(conf.settings)
###-------------------------------------------------------------------###
### If this is a restart - Picking up were we left last time ###
@@ -271,7 +233,7 @@ sda.enkf.multisite <- function(settings,
#sim.time <-2:nt # if It's restart I added +1 from the start to nt (which is the last year of old sim) to make the first sim in restart time t=2
#new.params and params.list are already loaded in the environment only need to grab X
X <-FORECAST[[length(FORECAST)]]
- }else{
+ } else {
PEcAn.logger::logger.info("The SDA output from the older simulation doesn't exist, assuming first SDA run with unconstrainded forecast output")
#loading param info from previous forecast
if(!exists("ensemble.samples") || is.null(ensemble.samples)){
@@ -289,14 +251,26 @@ sda.enkf.multisite <- function(settings,
#out.configs object required to build X and restart.list object required for build X
#TODO: there should be an easier way to do this than to rerun write.ensemble.configs
restart.list <- vector("list", length(conf.settings))
+ # make sure we have the input_design variable before running the write configuration function.
+ if (!exists("input_design")) {
+ PEcAn.logger::logger.info("The input_design is not found for write configuration function call.")
+ return(0)
+ }
out.configs <- conf.settings %>%
`class<-`(c("list")) %>%
furrr::future_map2(restart.list, function(settings, restart.arg) {
# Loading the model package - this is required bc of the furrr
library(paste0("PEcAn.",settings$model$type), character.only = TRUE)
# wrtting configs for each settings - this does not make a difference with the old code
+ # if we don't specify the input_design.
+ if (!exists("input_design")) {
+ input_design <- NULL
+ }
+ # wrtting configs for each settings - this does not make a difference with the old code
PEcAn.uncertainty::write.ensemble.configs(
- defaults = settings$pfts,
+ input_design = input_design,
+ ensemble.size = nens,
+ defaults = defaults,
ensemble.samples = ensemble.samples,
settings = settings,
model = settings$model$type,
@@ -316,7 +290,7 @@ sda.enkf.multisite <- function(settings,
var.names = var.names,
my.read_restart = my.read_restart,
restart_flag = restart_flag)
-
+
#let's read the parameters of each site/ens
params.list <- reads %>% purrr::map(~.x %>% purrr::map("params"))
# Now let's read the state variables of site/ens
@@ -343,442 +317,405 @@ sda.enkf.multisite <- function(settings,
# weight matrix
wt.mat <- matrix(NA, nrow = nens, ncol = nt)
# Reading param samples-------------------------------
- #create params object using samples generated from TRAITS functions
- if(restart_flag){
- new.params <- new.params
- }else{
- if(!file.exists(file.path(settings$outdir, "samples.Rdata"))) PEcAn.logger::logger.severe("samples.Rdata cannot be found. Make sure you generate samples by running the get.parameter.samples function before running SDA.")
- #Generate parameter needs to be run before this to generate the samples. This is hopefully done in the main workflow.
- if(is.null(ensemble.samples)){
- load(file.path(settings$outdir, "samples.Rdata"))
- }
- #reformatting params
- new.params <- sda_matchparam(settings, ensemble.samples, site.ids, nens)
- }
-
-
- #TODO: incorporate Phyllis's restart work
- #sample all inputs specified in the settings$ensemble
- #now looking into the xml
- samp <- conf.settings$ensemble$samplingspace
- #finding who has a parent
- parents <- lapply(samp,'[[', 'parent')
- #order parents based on the need of who has to be first
- order <- names(samp)[lapply(parents, function(tr) which(names(samp) %in% tr)) %>% unlist()]
- #new ordered sampling space
- samp.ordered <- samp[c(order, names(samp)[!(names(samp) %in% order)])]
- #performing the sampling
- inputs <- vector("list", length(conf.settings))
- #for the tags specified in the xml, do the sampling for a random site and then replicate the same sample ids for the remaining sites for each ensemble member
- for (i in seq_along(samp.ordered)) {
- random_site <- sample(1:length(conf.settings),1)
- if (is.null(inputs[[random_site]])) {
- inputs[[random_site]] <- list()
- }
- input_tag<-names(samp.ordered)[i]
- #call the function responsible for generating the ensemble for the random site
- inputs[[random_site]][[input_tag]] <- PEcAn.uncertainty::input.ens.gen(settings=conf.settings[[random_site]],
- input=input_tag,
- method=samp.ordered[[i]]$method,
- parent_ids=NULL)
- #replicate the same ids for the remaining sites
- for (s in seq_along(conf.settings)) {
- if (s!= random_site) {
- if (is.null(inputs[[s]])) {
- inputs[[s]] <- list()
- }
- input_path <- conf.settings[[s]]$run$inputs[[tolower(input_tag)]]$path
- inputs[[s]][[input_tag]]$ids<-inputs[[random_site]][[input_tag]]$ids
- inputs[[s]][[input_tag]]$samples<- input_path[inputs[[random_site]][[input_tag]]$ids]
- }
+ #create params object using samples generated from TRAITS functions
+ if(restart_flag){
+ new.params <- new.params
+ } else {
+ if(!file.exists(file.path(settings$outdir, "samples.Rdata"))) PEcAn.logger::logger.severe("samples.Rdata cannot be found. Make sure you generate samples by running the get.parameter.samples function before running SDA.")
+ #Generate parameter needs to be run before this to generate the samples. This is hopefully done in the main workflow.
+ if(is.null(ensemble.samples)){
+ load(file.path(settings$outdir, "samples.Rdata"))
}
+ #reformatting params
+ new.params <- sda_matchparam(settings, ensemble.samples, site.ids, nens)
+ # if it's not a restart run, we will generate the joint input design.
+ # get the joint input design.
+ input_design <- PEcAn.uncertainty::generate_joint_ensemble_design(settings = settings[[1]],
+ ensemble_samples = ensemble.samples,
+ ensemble_size = nens)[[1]]
}
-
-
-
###------------------------------------------------------------------------------------------------###
### loop over time ###
###------------------------------------------------------------------------------------------------###
+ # initialize the lists of covariates for the debias feature.
+ pre.states <- vector("list", length = length(var.names)) %>% purrr::set_names(var.names)
+ # initialize the lists of forecasts for all time points.
+ all.X <- vector("list", length = nt)
for(t in 1:nt){
- obs.t <- as.character(lubridate::date(obs.times[t]))
- obs.year <- lubridate::year(obs.t)
- ###-------------------------------------------------------------------------###
- ### Taking care of Forecast. Splitting / Writting / running / reading back###
- ###-------------------------------------------------------------------------###-----
- #- Check to see if this is the first run or not and what inputs needs to be sent to write.ensemble configs
- if (t>1){
- #for next time step split the met if model requires
- #-Splitting the input for the models that they don't care about the start and end time of simulations and they run as long as their met file.
- inputs.split <- metSplit(conf.settings, inputs, settings, model, no_split = FALSE, obs.times, t, nens, restart_flag = FALSE, my.split_inputs)
-
- #---------------- setting up the restart argument for each site separately and keeping them in a list
- restart.list <-
- furrr::future_pmap(list(out.configs, conf.settings %>% `class<-`(c("list")), params.list, inputs.split),
- function(configs, settings, new.params, inputs) {
- #if the new state for each site only has one row/col.
- #then we need to convert it to matrix to solve the indexing issue.
- new_state_site <- new.state[, which(attr(X, "Site") %in% settings$run$site$id)]
- if(is.vector(new_state_site)){
- new_state_site <- matrix(new_state_site)
- }
- list(
- runid = configs$runs$id,
- start.time = strptime(obs.times[t -1], format = "%Y-%m-%d %H:%M:%S") + lubridate::second(lubridate::hms("00:00:01")),
- stop.time = strptime(obs.times[t], format ="%Y-%m-%d %H:%M:%S"),
- settings = settings,
- new.state = new_state_site,
- new.params = new.params,
- inputs = inputs,
- RENAME = TRUE,
- ensemble.id = settings$ensemble$ensemble.id
- )
- })
- } else { ## t == 1
- restart.list <- vector("list", length(conf.settings))
+ obs.t <- as.character(lubridate::date(obs.times[t]))
+ obs.year <- lubridate::year(obs.t)
+ PEcAn.logger::logger.info(paste("Processing date:", obs.t))
+ ###-------------------------------------------------------------------------###
+ ### Taking care of Forecast. Splitting / Writting / running / reading back###
+ ###-------------------------------------------------------------------------###-----
+ #- Check to see if this is the first run or not and what inputs needs to be sent to write.ensemble configs
+ if (t>1){
+ #for next time step split the met if model requires
+ #-Splitting the input for the models that they don't care about the start and end time of simulations and they run as long as their met file.
+ inputs.split <- metSplit(conf.settings, inputs, settings, model, no_split = FALSE, obs.times, t, nens, restart_flag = FALSE, my.split_inputs)
+ #---------------- setting up the restart argument for each site separately and keeping them in a list
+ restart.list <-
+ furrr::future_pmap(list(out.configs, conf.settings %>% `class<-`(c("list")), params.list, inputs.split),
+ function(configs, settings, new.params, inputs) {
+ #if the new state for each site only has one row/col.
+ #then we need to convert it to matrix to solve the indexing issue.
+ new_state_site <- new.state[, which(attr(X, "Site") %in% settings$run$site$id)]
+ if(is.vector(new_state_site)){
+ new_state_site <- matrix(new_state_site)
+ }
+ list(
+ runid = configs$runs$id,
+ start.time = strptime(obs.times[t -1], format = "%Y-%m-%d %H:%M:%S") + lubridate::second(lubridate::hms("00:00:01")),
+ stop.time = strptime(obs.times[t], format ="%Y-%m-%d %H:%M:%S"),
+ settings = settings,
+ new.state = new_state_site,
+ new.params = new.params,
+ inputs = inputs,
+ RENAME = TRUE,
+ ensemble.id = settings$ensemble$ensemble.id
+ )
+ })
+ } else { ## t == 1
+ restart.list <- vector("list", length(conf.settings))
+ }
+ #add flag for restart t=1 to skip model runs
+ if(restart_flag & t == 1){
+ #for restart when t=1 do not need to do model runs and X should already exist in environment by this point
+ X <- X
+ } else {
+ # writing configs for each settings
+ # here we use the foreach instead of furrr
+ # because for some reason, the furrr has problem returning the sample paths.
+ PEcAn.logger::logger.info("Writting configs!")
+ cl <- parallel::makeCluster(parallel::detectCores() - 1)
+ doSNOW::registerDoSNOW(cl)
+ temp.settings <- NULL
+ restart.arg <- NULL
+ out.configs <- foreach::foreach(temp.settings = as.list(conf.settings),
+ restart.arg = restart.list,
+ .packages = c("Kendall",
+ "purrr",
+ "PEcAn.uncertainty",
+ paste0("PEcAn.", model),
+ "PEcAnAssimSequential")) %dopar% {
+ temp <- PEcAn.uncertainty::write.ensemble.configs(
+ input_design = input_design,
+ ensemble.size = nens,
+ defaults = temp.settings$pfts,
+ ensemble.samples = ensemble.samples,
+ settings = temp.settings,
+ model = temp.settings$model$type,
+ write.to.db = temp.settings$database$bety$write,
+ restart = restart.arg,
+ # samples=inputs,
+ rename = TRUE
+ )
+ return(temp)
+ } %>% stats::setNames(site.ids)
+ parallel::stopCluster(cl)
+ foreach::registerDoSEQ()
+ # update the file paths of different inputs when t = 1.
+ if (t == 1) {
+ inputs <- out.configs %>% purrr::map(~.x$samples)
}
- #add flag for restart t=1 to skip model runs
- if(restart_flag & t == 1){
- #for restart when t=1 do not need to do model runs and X should already exist in environment by this point
- X <- X
- }else{
- if (control$debug) browser()
-
- out.configs <-furrr::future_pmap(list(conf.settings %>% `class<-`(c("list")),restart.list, inputs), function(settings, restart.arg, inputs) {
- # Loading the model package - this is required bc of the furrr
- library(paste0("PEcAn.",settings$model$type), character.only = TRUE)
- # wrtting configs for each settings - this does not make a difference with the old code
- PEcAn.uncertainty::write.ensemble.configs(
- defaults = settings$pfts,
- ensemble.samples = ensemble.samples,
- settings = settings,
- model = settings$model$type,
- write.to.db = settings$database$bety$write,
- restart = restart.arg,
- samples=inputs,
- rename = TRUE
- )
- }) %>%
- stats::setNames(site.ids)
-
- #if it's a rabbitmq job sumbmission, we will first copy and paste the whole run folder within the SDA to the remote host.
- if (!is.null(settings$host$rabbitmq)) {
- settings$host$rabbitmq$prefix <- paste0(obs.year, ".nc")
- cp2cmd <- gsub("@RUNDIR@", settings$host$rundir, settings$host$rabbitmq$cp2cmd)
- try(system(cp2cmd, intern = TRUE))
+ #if it's a rabbitmq job sumbmission, we will first copy and paste the whole run folder within the SDA to the remote host.
+ if (!is.null(settings$host$rabbitmq)) {
+ settings$host$rabbitmq$prefix <- paste0(obs.year, ".nc")
+ cp2cmd <- gsub("@RUNDIR@", rundir, settings$host$rabbitmq$cp2cmd)
+ try(system(cp2cmd, intern = TRUE))
+ }
+ # get ensemble ids for each site.
+ ensemble.ids <- site.ids %>% furrr::future_map(function(i){
+ run.list <- c()
+ for (j in 1:nens) {
+ run.list <- c(run.list, paste0("ENS-", sprintf("%05d", j), "-", i))
}
-
- #I'm rewriting the runs because when I use the parallel approach for writing configs the run.txt will get messed up; because multiple cores want to write on it at the same time.
- runs.tmp <- list.dirs(rundir, full.names = F)
- runs.tmp <- runs.tmp[grepl("ENS-*|[0-9]", runs.tmp)]
- writeLines(runs.tmp[runs.tmp != ''], file.path(rundir, 'runs.txt'))
- paste(file.path(rundir, 'runs.txt')) ## testing
- Sys.sleep(0.01) ## testing
- if(control$parallel_qsub){
- if (is.null(control$jobs.per.file)) {
- PEcAn.remote::qsub_parallel(settings, prefix = paste0(obs.year, ".nc"))
- } else {
- PEcAn.remote::qsub_parallel(settings, files=PEcAn.remote::merge_job_files(settings, control$jobs.per.file), prefix = paste0(obs.year, ".nc"))
- }
- }else{
- PEcAn.workflow::start_model_runs(settings, write=settings$database$bety$write)
+ return(run.list)}, .progress = F) %>% unlist
+ # create folder paths to each ensemble of each site.
+ runs.tmp <- file.path(rundir, ensemble.ids)
+ # start model runs.
+ PEcAn.logger::logger.info("Running models!")
+ # if we want to submit jobs through the combined job file.
+ if(control$execution == "qsub_parallel"){
+ if (is.null(control$jobs.per.file)) {
+ PEcAn.remote::qsub_parallel(settings, prefix = paste0(obs.year, ".nc"))
+ } else {
+ PEcAn.remote::qsub_parallel(settings, files=PEcAn.remote::merge_job_files(settings, control$jobs.per.file), prefix = paste0(obs.year, ".nc"))
}
- #------------- Reading - every iteration and for SDA
- #put building of X into a function that gets called
- max_t <- 0
- while("try-error" %in% class(
- try(reads <- build_X(out.configs = out.configs,
- settings = settings,
- new.params = new.params,
- nens = nens,
- read_restart_times = read_restart_times,
- outdir = outdir,
- t = t,
- var.names = var.names,
- my.read_restart = my.read_restart,
- restart_flag = restart_flag), silent = T))
- ){
- Sys.sleep(10)
- max_t <- max_t + 1
- if(max_t > 3){
- PEcAn.logger::logger.info("Can't find outputed NC file! Please rerun the code!")
- break
- return(0)
- }
- PEcAn.logger::logger.info("Empty folder, try again!")
+ } else if (control$execution == "local") {
+ # if we want to execute jobs locally.
+ job.files <- file.path(runs.tmp, "job.sh")
+ temp <- job.files %>% furrr::future_map(function(f){
+ cmd <- paste0("cd ", dirname(f), ";./job.sh")
+ system(cmd, intern = F, ignore.stdout = T, ignore.stderr = T)
+ }, .progress = F)
+ } else if (control$execution == "qsub") {
+ # if we want to submit jobs through the regular job submission function.
+ PEcAn.workflow::start_model_runs(settings, write=settings$database$bety$write)
+ }
+ # Reading model outputs.
+ PEcAn.logger::logger.info("Reading forecast outputs!")
+ reads <- build_X(out.configs = out.configs,
+ settings = settings,
+ new.params = new.params,
+ nens = nens,
+ read_restart_times = read_restart_times,
+ outdir = outdir,
+ t = t,
+ var.names = var.names,
+ my.read_restart = my.read_restart,
+ restart_flag = restart_flag)
+ #let's read the parameters of each site/ens
+ params.list <- reads %>% purrr::map(~.x %>% purrr::map("params"))
+ # Now let's read the state variables of site/ens
+ #don't need to build X when t=1
+ X <- reads %>% purrr::map(~.x %>% purrr::map_df(~.x[["X"]] %>% t %>% as.data.frame))
+ #replacing crazy outliers before it's too late
+ if (control$OutlierDetection){
+ X <- outlier.detector.boxplot(X)
+ PEcAn.logger::logger.info("Outlier Detection.")
+ }
+ # convert from forecast list to data frame.
+ X <- seq_along(X) %>% furrr::future_map(function(i){
+ temp <- do.call(cbind, X[i])
+ colnames(temp) <- paste0(var.names, ".", i)
+ return(temp)
+ }) %>%
+ dplyr::bind_cols() %>%
+ `colnames<-`(c(rep(var.names, length(X)))) %>%
+ `attr<-`('Site',c(rep(site.ids, each=length(var.names))))
+ } ## end else from restart & t==1
+ all.X[[t]] <- X
+ # start debiasing.
+ debias.out <- NULL
+ if (!is.null(debias$start.year)) {
+ if (obs.year >= debias$start.year) {
+ PEcAn.logger::logger.info("Start debiasing!")
+ debias.out <- sda_bias_correction(site.locs,
+ t, all.X,
+ obs.mean,
+ state.interval,
+ debias$cov.dir,
+ pre.states,
+ .get_debias_mod)
+ X <- debias.out$X
+ pre.states <- debias.out$pre.states
+ }
+ }
+ FORECAST[[obs.t]] <- all.X[[t]] <- X
+ ###-------------------------------------------------------------------###
+ ### preparing OBS ###
+ ###-------------------------------------------------------------------###----
+ #To trigger the analysis function with free run, you need to first specify the control$forceRun as TRUE,
+ #Then specify the settings$state.data.assimilation$scalef as 0, and settings$state.data.assimilation$free.run as TRUE.
+ if (!is.null(obs.mean[[t]][[1]]) | (as.logical(settings$state.data.assimilation$free.run) & control$forceRun)) {
+ #decide if we want to estimate the process variance and choose the according function.
+ if(processvar == FALSE) {
+ an.method <- EnKF
+ } else if (processvar == TRUE && settings$state.data.assimilation$q.type %in% c("SINGLE", "SITE")) {
+ an.method <- GEF.MultiSite
+ }
+ #initialize MCMC arguments.
+ if (is.null(control$MCMC.args)) {
+ MCMC.args <- list(niter = 1e5,
+ nthin = 10,
+ nchain = 1,
+ nburnin = 5e4)
+ } else {
+ MCMC.args <- control$MCMC.args
+ }
+ #decide if we want the block analysis function or multi-site analysis function.
+ if (processvar == TRUE && settings$state.data.assimilation$q.type %in% c("vector", "wishart")) {
+ #initialize block.list.all.
+ if (t == 1 | !exists("block.list.all")) {
+ block.list.all <- obs.mean %>% purrr::map(function(l){NULL})
}
-
- if (control$debug) browser()
- #let's read the parameters of each site/ens
- params.list <- reads %>% purrr::map(~.x %>% purrr::map("params"))
- # Now let's read the state variables of site/ens
- #don't need to build X when t=1
- X <- reads %>% purrr::map(~.x %>% purrr::map_df(~.x[["X"]] %>% t %>% as.data.frame))
-
-
- #replacing crazy outliers before it's too late
- if (control$OutlierDetection){
- X <- outlier.detector.boxplot(X)
- PEcAn.logger::logger.info("Outlier Detection.")
- }
-
- # Now we have a matrix that columns are state variables and rows are ensembles.
- # this matrix looks like this
- # GWBI AbvGrndWood GWBI AbvGrndWood
- #[1,] 3.872521 37.2581 3.872521 37.2581
- # But therer is an attribute called `Site` which tells yout what column is for what site id - check out attr (X,"Site")
- if (multi.site.flag){
- X <- X %>%
- purrr::map_dfc(~.x) %>%
- as.matrix() %>%
- `colnames<-`(c(rep(var.names, length(X)))) %>%
- `attr<-`('Site',c(rep(site.ids, each=length(var.names))))
+ #running analysis function.
+ enkf.params[[obs.t]] <- analysis_sda_block(settings, block.list.all, X, obs.mean, obs.cov, t, nt, MCMC.args, pre_enkf_params)
+ enkf.params[[obs.t]] <- c(enkf.params[[obs.t]], RestartList = list(restart.list %>% stats::setNames(site.ids)))
+ block.list.all <- enkf.params[[obs.t]]$block.list.all
+ #Forecast
+ mu.f <- enkf.params[[obs.t]]$mu.f
+ Pf <- enkf.params[[obs.t]]$Pf
+ #Analysis
+ Pa <- enkf.params[[obs.t]]$Pa
+ mu.a <- enkf.params[[obs.t]]$mu.a
+ } else if (exists("an.method")) {
+ #Making R and Y
+ Obs.cons <- Construct.R(site.ids, var.names, obs.mean[[t]], obs.cov[[t]])
+ Y <- Obs.cons$Y
+ R <- Obs.cons$R
+ if (length(Y) > 1) {
+ PEcAn.logger::logger.info("The zero variances in R and Pf is being replaced by half and one fifth of the minimum variance in those matrices respectively.")
+ diag(R)[which(diag(R)==0)] <- min(diag(R)[which(diag(R) != 0)])/2
}
-
- } ## end else from restart & t==1
- FORECAST[[obs.t]] <- X
-
- ###-------------------------------------------------------------------###
- ### preparing OBS ###
- ###-------------------------------------------------------------------###----
- #To trigger the analysis function with free run, you need to first specify the control$forceRun as TRUE,
- #Then specify the settings$state.data.assimilation$scalef as 0, and settings$state.data.assimilation$free.run as TRUE.
- if (!is.null(obs.mean[[t]][[1]]) | (as.logical(settings$state.data.assimilation$free.run) & control$forceRun)) {
- # TODO: as currently configured, Analysis runs even if all obs are NA,
- # which clearly should be triggering the `else` of this if, but the
- # `else` has not been invoked in a while an may need updating
-
-
- #decide if we want to estimate the process variance and choose the according function.
- if(processvar == FALSE) {
- an.method<-EnKF
- } else if (processvar == TRUE && settings$state.data.assimilation$q.type %in% c("SINGLE", "SITE")) {
- an.method<-GEF.MultiSite
+ # making the mapping operator
+ H <- Construct.H.multisite(site.ids, var.names, obs.mean[[t]])
+ #Pass aqq and bqq.
+ aqq <- NULL
+ bqq <- numeric(nt + 1)
+ Pf <- NULL
+ #if t>1
+ if(is.null(pre_enkf_params) && t>1){
+ aqq <- enkf.params[[t-1]]$aqq
+ bqq <- enkf.params[[t-1]]$bqq
+ X.new<-enkf.params[[t-1]]$X.new
}
-
- #decide if we want the block analysis function or multi-site analysis function.
- if (processvar == TRUE && settings$state.data.assimilation$q.type %in% c("vector", "wishart")) {
- #initialize block.list.all.
- if (t == 1 | !exists("block.list.all")) {
- block.list.all <- obs.mean %>% purrr::map(function(l){NULL})
- }
- #initialize MCMC arguments.
- if (is.null(control$MCMC.args)) {
- MCMC.args <- list(niter = 1e5,
- nthin = 10,
- nchain = 3,
- nburnin = 5e4)
- } else {
- MCMC.args <- control$MCMC.args
- }
- #running analysis function.
- enkf.params[[obs.t]] <- analysis_sda_block(settings, block.list.all, X, obs.mean, obs.cov, t, nt, MCMC.args, pre_enkf_params)
- enkf.params[[obs.t]] <- c(enkf.params[[obs.t]], RestartList = list(restart.list %>% stats::setNames(site.ids)))
- block.list.all <- enkf.params[[obs.t]]$block.list.all
- #Forecast
- mu.f <- enkf.params[[obs.t]]$mu.f
- Pf <- enkf.params[[obs.t]]$Pf
- #Analysis
- Pa <- enkf.params[[obs.t]]$Pa
- mu.a <- enkf.params[[obs.t]]$mu.a
- } else if (exists("an.method")) {
- #Making R and Y
- Obs.cons <- Construct.R(site.ids, var.names, obs.mean[[t]], obs.cov[[t]])
- Y <- Obs.cons$Y
- R <- Obs.cons$R
- if (length(Y) > 1) {
- PEcAn.logger::logger.info("The zero variances in R and Pf is being replaced by half and one fifth of the minimum variance in those matrices respectively.")
- diag(R)[which(diag(R)==0)] <- min(diag(R)[which(diag(R) != 0)])/2
- }
- # making the mapping operator
- H <- Construct.H.multisite(site.ids, var.names, obs.mean[[t]])
- #Pass aqq and bqq.
- aqq <- NULL
- bqq <- numeric(nt + 1)
- Pf <- NULL
- #if t>1
- if(is.null(pre_enkf_params) && t>1){
- aqq <- enkf.params[[t-1]]$aqq
- bqq <- enkf.params[[t-1]]$bqq
- X.new<-enkf.params[[t-1]]$X.new
- }
- if(!is.null(pre_enkf_params) && t>1){
- aqq <- pre_enkf_params[[t-1]]$aqq
- bqq <- pre_enkf_params[[t-1]]$bqq
- X.new<-pre_enkf_params[[t-1]]$X.new
- }
- if(!is.null(pre_enkf_params)){
- Pf <- pre_enkf_params[[t]]$Pf
- }
- recompileTobit = !exists('Cmcmc_tobit2space')
- recompileGEF = !exists('Cmcmc')
- #weight list
- # This reads ensemble weights generated by `get_ensemble_weights` function from assim.sequential package
- weight_list <- list()
- if(!file.exists(file.path(settings$outdir, "ensemble_weights.Rdata"))){
- PEcAn.logger::logger.warn("ensemble_weights.Rdata cannot be found. Make sure you generate samples by running the get.ensemble.weights function before running SDA if you want the ensembles to be weighted.")
- #create null list
- for(tt in 1:length(obs.times)){
- weight_list[[tt]] <- rep(1,nens) #no weights
- }
- } else{
- load(file.path(settings$outdir, "ensemble_weights.Rdata")) ## loads ensemble.samples
- }
- wts <- unlist(weight_list[[t]])
- #-analysis function
- enkf.params[[obs.t]] <- GEF.MultiSite(
- settings,
- FUN = an.method,
- Forecast = list(Q = Q, X = X),
- Observed = list(R = R, Y = Y),
- H = H,
- extraArg = list(
- aqq = aqq,
- bqq = bqq,
- Pf = Pf,
- t = t,
- nitr.GEF = nitr.GEF,
- nthin = nthin,
- nburnin = nburnin,
- censored.data = censored.data,
- recompileGEF = recompileGEF,
- recompileTobit = recompileTobit,
- wts = wts
- ),
- choose = choose,
- nt = nt,
- obs.mean = obs.mean,
- nitr = 100000,
- nburnin = 10000,
- obs.cov = obs.cov,
- site.ids = site.ids,
- blocked.dis = blocked.dis,
- distances = distances
- )
- tictoc::tic(paste0("Preparing for Adjustment for cycle = ", t))
- #Forecast
- mu.f <- enkf.params[[obs.t]]$mu.f
- Pf <- enkf.params[[obs.t]]$Pf
- #Analysis
- Pa <- enkf.params[[obs.t]]$Pa
- mu.a <- enkf.params[[obs.t]]$mu.a
- #extracting extra outputs
- if (control$debug) browser()
- if (processvar) {
- aqq <- enkf.params[[obs.t]]$aqq
- bqq <- enkf.params[[obs.t]]$bqq
- }
- # Adding obs elements to the enkf.params
- #This can later on help with diagnostics
- enkf.params[[obs.t]] <-
- c(
- enkf.params[[obs.t]],
- R = list(R),
- Y = list(Y),
- RestartList = list(restart.list %>% stats::setNames(site.ids))
- )
+ if(!is.null(pre_enkf_params) && t>1){
+ aqq <- pre_enkf_params[[t-1]]$aqq
+ bqq <- pre_enkf_params[[t-1]]$bqq
+ X.new<-pre_enkf_params[[t-1]]$X.new
}
-
- ###-------------------------------------------------------------------###
- ### Trace ###
- ###-------------------------------------------------------------------###----
- #-- writing Trace--------------------
- if(control$trace) {
- PEcAn.logger::logger.warn ("\n --------------------------- ",obs.year," ---------------------------\n")
- PEcAn.logger::logger.warn ("\n --------------Obs mean----------- \n")
- print(enkf.params[[obs.t]]$Y)
- PEcAn.logger::logger.warn ("\n --------------Obs Cov ----------- \n")
- print(enkf.params[[obs.t]]$R)
- PEcAn.logger::logger.warn ("\n --------------Forecast mean ----------- \n")
- print(enkf.params[[obs.t]]$mu.f)
- PEcAn.logger::logger.warn ("\n --------------Forecast Cov ----------- \n")
- print(enkf.params[[obs.t]]$Pf)
- PEcAn.logger::logger.warn ("\n --------------Analysis mean ----------- \n")
- print(t(enkf.params[[obs.t]]$mu.a))
- PEcAn.logger::logger.warn ("\n --------------Analysis Cov ----------- \n")
- print(enkf.params[[obs.t]]$Pa)
- PEcAn.logger::logger.warn ("\n ------------------------------------------------------\n")
+ if(!is.null(pre_enkf_params)){
+ Pf <- pre_enkf_params[[t]]$Pf
}
- if (control$debug) browser()
- if (control$pause) readline(prompt="Press [enter] to continue \n")
- } else {
- ###-------------------------------------------------------------------###
- ### No Observations -- ###----
- ###-----------------------------------------------------------------###
- ### no process variance -- forecast is the same as the analysis ###
- if (processvar==FALSE) {
- mu.a <- mu.f
- Pa <- Pf + Q
- ### yes process variance -- no data
- } else {
- mu.f <- colMeans(X) #mean Forecast - This is used as an initial condition
- mu.a <- mu.f
- if(is.null(Q)){
- q.bar <- diag(ncol(X))
- PEcAn.logger::logger.warn('Process variance not estimated. Analysis has been given uninformative process variance')
+ recompileTobit = !exists('Cmcmc_tobit2space')
+ recompileGEF = !exists('Cmcmc')
+ #weight list
+ # This reads ensemble weights generated by `get_ensemble_weights` function from assim.sequential package
+ weight_list <- list()
+ if(!file.exists(file.path(settings$outdir, "ensemble_weights.Rdata"))){
+ PEcAn.logger::logger.warn("ensemble_weights.Rdata cannot be found. Make sure you generate samples by running the get.ensemble.weights function before running SDA if you want the ensembles to be weighted.")
+ #create null list
+ for(tt in 1:length(obs.times)){
+ weight_list[[tt]] <- rep(1,nens) #no weights
}
- # Pa <- Pf + matrix(solve(q.bar), dim(Pf)[1], dim(Pf)[2])
- #will throw an error when q.bar and Pf are different sizes i.e. when you are running with no obs and do not variance for all state variables
- #Pa <- Pf + solve(q.bar)
- #hack have Pa = Pf for now
- # if(!is.null(pre_enkf_params)){
- # Pf <- pre_enkf_params[[t]]$Pf
- # }else{
- # Pf <- stats::cov(X) # Cov Forecast - This is used as an initial condition
- # }
- Pf <- stats::cov(X)
- Pa <- Pf
+ } else{
+ load(file.path(settings$outdir, "ensemble_weights.Rdata")) ## loads ensemble.samples
+ }
+ wts <- unlist(weight_list[[t]])
+ #-analysis function
+ enkf.params[[obs.t]] <- GEF.MultiSite(
+ settings,
+ FUN = an.method,
+ Forecast = list(Q = Q, X = X),
+ Observed = list(R = R, Y = Y),
+ H = H,
+ extraArg = list(
+ aqq = aqq,
+ bqq = bqq,
+ Pf = Pf,
+ t = t,
+ nitr.GEF = MCMC.args$niter,
+ nthin = MCMC.args$nthin,
+ nburnin = MCMC.args$nburnin,
+ censored.data = censored.data,
+ recompileGEF = recompileGEF,
+ recompileTobit = recompileTobit,
+ wts = wts
+ ),
+ choose = choose,
+ nt = nt,
+ obs.mean = obs.mean,
+ nitr = 100000,
+ nburnin = 10000,
+ obs.cov = obs.cov,
+ site.ids = site.ids,
+ blocked.dis = blocked.dis,
+ distances = distances
+ )
+ tictoc::tic(paste0("Preparing for Adjustment for cycle = ", t))
+ #Forecast
+ mu.f <- enkf.params[[obs.t]]$mu.f
+ Pf <- enkf.params[[obs.t]]$Pf
+ #Analysis
+ Pa <- enkf.params[[obs.t]]$Pa
+ mu.a <- enkf.params[[obs.t]]$mu.a
+ #extracting extra outputs
+ if (processvar) {
+ aqq <- enkf.params[[obs.t]]$aqq
+ bqq <- enkf.params[[obs.t]]$bqq
}
- enkf.params[[obs.t]] <- list(mu.f = mu.f, Pf = Pf, mu.a = mu.a, Pa = Pa)
+ # Adding obs elements to the enkf.params
+ #This can later on help with diagnostics
+ enkf.params[[obs.t]] <-
+ c(
+ enkf.params[[obs.t]],
+ R = list(R),
+ Y = list(Y),
+ RestartList = list(restart.list %>% stats::setNames(site.ids))
+ )
}
-
+ } else {
###-------------------------------------------------------------------###
- ### adjust/update state matrix ###
- ###-------------------------------------------------------------------###----
- tictoc::tic(paste0("Adjustment for cycle = ", t))
+ ### No Observations -- ###----
+ ###-----------------------------------------------------------------###
+ ### no process variance -- forecast is the same as the analysis ###
+ if (processvar==FALSE) {
+ mu.a <- mu.f
+ Pa <- Pf + Q
+ ### yes process variance -- no data
+ } else {
+ mu.f <- colMeans(X) #mean Forecast - This is used as an initial condition
+ mu.a <- mu.f
+ if(is.null(Q)){
+ q.bar <- diag(ncol(X))
+ PEcAn.logger::logger.warn('Process variance not estimated. Analysis has been given uninformative process variance')
+ }
+ # Pa <- Pf + matrix(solve(q.bar), dim(Pf)[1], dim(Pf)[2])
+ #will throw an error when q.bar and Pf are different sizes i.e. when you are running with no obs and do not variance for all state variables
+ #Pa <- Pf + solve(q.bar)
+ #hack have Pa = Pf for now
+ # if(!is.null(pre_enkf_params)){
+ # Pf <- pre_enkf_params[[t]]$Pf
+ # }else{
+ # Pf <- stats::cov(X) # Cov Forecast - This is used as an initial condition
+ # }
+ Pf <- stats::cov(X)
+ Pa <- Pf
+ }
+ enkf.params[[obs.t]] <- list(mu.f = mu.f, Pf = Pf, mu.a = mu.a, Pa = Pa)
+ }
+
+ ###-------------------------------------------------------------------###
+ ### adjust/update state matrix ###
+ ###-------------------------------------------------------------------###----
+ tictoc::tic(paste0("Adjustment for cycle = ", t))
+ # if we don't have the analysis from the analysis function.
+ if (is.null(enkf.params[[obs.t]]$analysis)) {
if(adjustment == TRUE){
analysis <-adj.ens(Pf, X, mu.f, mu.a, Pa)
} else {
analysis <- as.data.frame(mvtnorm::rmvnorm(as.numeric(nrow(X)), mu.a, Pa, method = "svd"))
}
- colnames(analysis) <- colnames(X)
- ##### Mapping analysis vectors to be in bounds of state variables
- for(i in 1:ncol(analysis)){
- int.save <- state.interval[which(colnames(analysis)[i]==var.names),]
- analysis[analysis[,i] < int.save[1],i] <- int.save[1]
- analysis[analysis[,i] > int.save[2],i] <- int.save[2]
- }
- ## in the future will have to be separated from analysis
-
- new.state <- as.data.frame(analysis)
- ANALYSIS[[obs.t]] <- analysis
- ens_weights[[obs.t]] <- PEcAnAssimSequential::sda_weights_site(FORECAST, ANALYSIS, t, as.numeric(settings$ensemble$size))
- ###-------------------------------------------------------------------###
- ### save outputs ###
- ###-------------------------------------------------------------------###----
- Viz.output <- list(settings, obs.mean, obs.cov) #keeping obs data and settings for later visualization in Dashboard
-
- save(site.locs,
- t,
- FORECAST,
- ANALYSIS,
- enkf.params,
- new.state, new.params,params.list, ens_weights,
- out.configs, ensemble.samples, inputs, Viz.output,
- file = file.path(settings$outdir, "sda.output.Rdata"))
-
- tictoc::tic(paste0("Visulization for cycle = ", t))
-
- #writing down the image - either you asked for it or nor :)
- if ((t%%2 == 0 | t == nt) & (control$TimeseriesPlot)){
- if (as.logical(settings$state.data.assimilation$free.run)) {
- SDA_timeseries_plot(ANALYSIS, FORECAST, obs.mean, obs.cov, settings$outdir, by = "var", types = c("FORECAST", "ANALYSIS"))
- } else {
- SDA_timeseries_plot(ANALYSIS, FORECAST, obs.mean, obs.cov, settings$outdir, by = "var", types = c("FORECAST", "ANALYSIS", "OBS"))
- }
- }
- #Saving the profiling result
- if (control$Profiling) alltocs(file.path(settings$outdir,"SDA", "Profiling.csv"))
+ } else {
+ analysis <- enkf.params[[obs.t]]$analysis
+ }
+ colnames(analysis) <- colnames(X)
+ ##### Mapping analysis vectors to be in bounds of state variables
+ for(i in 1:ncol(analysis)){
+ int.save <- state.interval[which(colnames(analysis)[i]==var.names),]
+ analysis[analysis[,i] < int.save[1],i] <- int.save[1]
+ analysis[analysis[,i] > int.save[2],i] <- int.save[2]
+ }
+ ## in the future will have to be separated from analysis
+ new.state <- as.data.frame(analysis)
+ ANALYSIS[[obs.t]] <- analysis
+ ens_weights[[obs.t]] <- PEcAnAssimSequential::sda_weights_site(FORECAST, ANALYSIS, t, as.numeric(settings$ensemble$size))
+ ###-------------------------------------------------------------------###
+ ### save outputs ###
+ ###-------------------------------------------------------------------###----
+ Viz.output <- list(settings, obs.mean, obs.cov) #keeping obs data and settings for later visualization in Dashboard
+ # save SDA outputs.
+ save(site.locs,
+ t,
+ FORECAST,
+ ANALYSIS,
+ enkf.params,
+ new.state, new.params,params.list, ens_weights,
+ out.configs, ensemble.samples, inputs, Viz.output,
+ debias.out,
+ file = file.path(settings$outdir, "sda.output.Rdata"))
+ tictoc::tic(paste0("Visulization for cycle = ", t))
+ # writing down the image - either you asked for it or nor :)
+ if ((t%%2 == 0 | t == nt) & (control$TimeseriesPlot)){
+ if (as.logical(settings$state.data.assimilation$free.run)) {
+ SDA_timeseries_plot(ANALYSIS, FORECAST, obs.mean, obs.cov, settings$outdir, by = "var", types = c("FORECAST", "ANALYSIS"))
+ } else {
+ SDA_timeseries_plot(ANALYSIS, FORECAST, obs.mean, obs.cov, settings$outdir, by = "var", types = c("FORECAST", "ANALYSIS", "OBS"))
+ }
+ }
# remove files as SDA runs
if (!(control$keepNC) && t == 1){
unlink(list.files(outdir, "*.nc", recursive = TRUE, full.names = TRUE))
@@ -790,12 +727,23 @@ sda.enkf.multisite <- function(settings,
system2(sendmail, c("-f", paste0("\"", control$send_email$from, "\""), paste0("\"", control$send_email$to, "\""), "<", mailfile))
unlink(mailfile)
}
- gc()
- # useful for debugging to keep .nc files for assimilated years. T = 2, because this loops removes the files that were run when starting the next loop
-# if (keepNC && t == 1){
-# unlink(list.files(outdir, "*.nc", recursive = TRUE, full.names = TRUE))
-# }
- ## MCD: I commented the above "if" out because if you are restarting from a previous forecast, this might delete the files in that earlier folder
+ gc()
} ### end loop over time
-} # sda.enkf
-
\ No newline at end of file
+ # merge NC files.
+ if (control$merge_nc) {
+ nc.folder <- file.path(settings$outdir, "merged_nc")
+ if (file.exists(nc.folder)) unlink(nc.folder)
+ dir.create(nc.folder)
+ temp <- PEcAn.utils::nc_merge_all_sites_by_year(model.outdir = outdir,
+ nc.outdir = nc.folder,
+ ens.num = nens,
+ site.ids = as.numeric(site.ids),
+ start.date = obs.times[1],
+ end.date = obs.times[length(obs.times)],
+ time.step = paste(1, settings$state.data.assimilation$forecast.time.step),
+ cores = parallel::detectCores() - 1)
+ # remove rundir and outdir.
+ unlink(rundir, recursive = T)
+ unlink(outdir, recursive = T)
+ }
+} # sda.enkf
\ No newline at end of file
diff --git a/modules/assim.sequential/R/sda.enkf_parallel.R b/modules/assim.sequential/R/sda.enkf_parallel.R
new file mode 100644
index 00000000000..937989f1a32
--- /dev/null
+++ b/modules/assim.sequential/R/sda.enkf_parallel.R
@@ -0,0 +1,733 @@
+#' @description This function provides complete support for the multi-core and multi-node computation on the general HPC system.
+#' Thus, this script will be more computationally efficient, making it possible to run SDA over thousands of locations.
+#' @title sda.enkf_local
+#' @name sda.enkf_local
+#' @author Dongchen Zhang \email{zhangdc@@bu.edu}
+#'
+#' @param settings PEcAn settings object
+#' @param obs.mean Lists of date times named by time points, which contains lists of sites named by site ids, which contains observation means for each state variables of each site for each time point.
+#' @param obs.cov Lists of date times named by time points, which contains lists of sites named by site ids, which contains observation covariances for all state variables of each site for each time point.
+#' @param Q Process covariance matrix given if there is no data to estimate it.
+#' @param pre_enkf_params Used for passing pre-existing time-series of process error into the current SDA runs to ignore the impact by the differences between process errors.
+#' @param ensemble.samples list of ensemble parameters across PFTs. Default is NULL.
+#' @param outdir physical path to the folder that stores the SDA outputs. Default is NULL.
+#' @param control List of flags controlling the behavior of the SDA.
+#' `TimeseriesPlot` for post analysis examination;
+#' `OutlierDetection` decide if we want to execute the outlier detection each time after the model forecasting;
+#' `send_email` contains lists for sending email to report the SDA progress;
+#' `keepNC` decide if we want to keep the NetCDF files inside the out directory;
+#' `forceRun` decide if we want to proceed the Bayesian MCMC sampling without observations;
+#' `MCMC.args` include lists for controling the MCMC sampling process (iteration, nchains, burnin, and nthin.).
+#' `merge_nc` determine if we want to merge all netCDF files across sites and ensembles.
+#' If it's set as `TRUE`, we will then combine all netCDF files into the `merged_nc` folder within the `outdir`.
+#' @param debias List: R list containing the covariance directory and the start year.
+#' covariance directory should include GeoTIFF files named by year.
+#' start year is numeric input which decide when to start the debiasing feature.
+#'
+#' @return NONE
+#' @export
+#'
+sda.enkf_local <- function(settings,
+ obs.mean,
+ obs.cov,
+ Q = NULL,
+ pre_enkf_params = NULL,
+ ensemble.samples = NULL,
+ outdir = NULL,
+ control=list(TimeseriesPlot = FALSE,
+ OutlierDetection = FALSE,
+ send_email = NULL,
+ keepNC = TRUE,
+ forceRun = TRUE,
+ MCMC.args = NULL,
+ merge_nc = TRUE),
+ debias = list(cov.dir = NULL, start.year = NULL)) {
+ # grab cores from settings.
+ cores <- as.numeric(settings$state.data.assimilation$batch.settings$general.job$cores)
+ # if we didn't assign number of CPUs in the settings.
+ if (length(cores) == 0 | is.null(cores)) {
+ cores <- parallel::detectCores()
+ }
+ cores <- cores - 1
+ # if we only have one CPU.
+ if (cores < 1) cores <- 1
+ # initialize parallel.
+ if (future::supportsMulticore()) {
+ future::plan(future::multicore, workers = cores)
+ } else {
+ future::plan(future::multisession, workers = cores)
+ }
+ # Tweak outdir if it's specified from outside.
+ if (!is.null(outdir)) {
+ PEcAn.logger::logger.info(paste0("Replacing model output directories with ", outdir, "."))
+ PEcAn.logger::logger.info("Please note that the workflow will only work locally.")
+ PEcAn.logger::logger.info("Please swap the SDA function to `sda.enkf.multisite` function if you would like to run jobs through remote server.")
+ settings$outdir <- outdir
+ settings$rundir <- file.path(outdir, "run")
+ settings$modeloutdir <- file.path(outdir, "out")
+ settings$host$folder <- file.path(outdir, "out")
+ settings$host$outdir <- file.path(outdir, "out")
+ settings$host$rundir <- file.path(outdir, "run")
+ }
+ ###-------------------------------------------------------------------###
+ ### read settings ###
+ ###-------------------------------------------------------------------###
+ adjustment <- settings$state.data.assimilation$adjustment
+ model <- settings$model$type
+ defaults <- settings$pfts
+ outdir <- settings$modeloutdir # currently model runs locally, this will change if remote is enabled
+ rundir <- settings$host$rundir
+ nens <- as.numeric(settings$ensemble$size)
+ var.names <- sapply(settings$state.data.assimilation$state.variable, '[[', "variable.name")
+ names(var.names) <- NULL
+ #--------Initialization
+ restart.list <- NULL
+ #create SDA folder to store output
+ if(!dir.exists(settings$outdir)) dir.create(settings$outdir, showWarnings = FALSE)
+
+ ##### Creating matrices that describe the bounds of the state variables
+ ##### interval is remade everytime depending on the data at time t
+ ##### state.interval stays constant and converts new.analysis to be within the correct bounds
+ interval <- NULL
+ state.interval <- cbind(as.numeric(lapply(settings$state.data.assimilation$state.variables,'[[','min_value')),
+ as.numeric(lapply(settings$state.data.assimilation$state.variables,'[[','max_value')))
+ rownames(state.interval) <- var.names
+ #------------------------------Multi - site specific - settings
+ #Here I'm trying to make a temp config list name and put it into map to iterate
+ conf.settings <- settings
+ site.ids <- conf.settings %>% purrr::map(~.x[['run']] ) %>% purrr::map('site') %>% purrr::map('id') %>% base::unlist() %>% base::as.character()
+ # a matrix ready to be sent to spDistsN1 in sp package - first col is the long second is the lat and row names are the site ids
+ site.locs <- conf.settings %>% purrr::map(~.x[['run']] ) %>%
+ purrr::map('site') %>% purrr::map(function(s){
+ temp <- as.numeric(c(s$lon, s$lat))
+ names(temp) <- c("Lon", "Lat")
+ temp
+ }) %>%
+ dplyr::bind_rows() %>%
+ as.data.frame() %>%
+ `rownames<-`(site.ids)
+ ###-------------------------------------------------------------------###
+ ### check dates before data assimilation ###
+ ###-------------------------------------------------------------------###----
+ #filtering obs data based on years specifited in setting > state.data.assimilation
+ start.cut <- lubridate::ymd_hms(settings$state.data.assimilation$start.date, truncated = 3)
+ Start.year <- (lubridate::year(settings$state.data.assimilation$start.date))
+ End.year <- lubridate::year(settings$state.data.assimilation$end.date) # dates that assimilations will be done for - obs will be subsetted based on this
+ assim.sda <- Start.year:End.year
+ obs.mean <- obs.mean[sapply(lubridate::year(names(obs.mean)), function(obs.year) obs.year %in% (assim.sda))] #checks obs.mean dates against assimyear dates
+ obs.cov <- obs.cov[sapply(lubridate::year(names(obs.cov)), function(obs.year) obs.year %in% (assim.sda))] #checks obs.cov dates against assimyear dates
+ #checking that there are dates in obs.mean and adding midnight as the time
+ obs.times <- names(obs.mean)
+ obs.times.POSIX <- lubridate::ymd_hms(obs.times)
+ for (i in seq_along(obs.times)) {
+ if (is.na(obs.times.POSIX[i])) {
+ if (is.na(lubridate::ymd(obs.times[i]))) {
+ PEcAn.logger::logger.warn("Error: no dates associated with observations")
+ } else {
+ ### Data does not have time associated with dates
+ ### Adding 12:59:59PM assuming next time step starts one second later
+ # PEcAn.logger::logger.warn("Pumpkin Warning: adding one minute before midnight time assumption to dates associated with data")
+ obs.times.POSIX[i] <- lubridate::ymd_hms(paste(obs.times[i], "23:59:59"))
+ }
+ }
+ }
+ obs.times <- obs.times.POSIX
+ read_restart_times <- c(lubridate::ymd_hms(start.cut, truncated = 3), obs.times)
+ nt <- length(obs.times) #sets length of for loop for Forecast/Analysis
+ if (nt==0) PEcAn.logger::logger.severe('There has to be at least one Obs.')
+
+ # Model Specific Setup ----------------------------------------------------
+ #--get model specific functions
+ do.call("library", list(paste0("PEcAn.", model)))
+ my.write_restart <- paste0("write_restart.", model)
+ my.read_restart <- paste0("read_restart.", model)
+ my.split_inputs <- paste0("split_inputs.", model)
+ #- Double checking some of the inputs
+ if (is.null(adjustment)) adjustment <- TRUE
+ # models that don't need split_inputs, check register file for that
+ register.xml <- system.file(paste0("register.", model, ".xml"), package = paste0("PEcAn.", model))
+ register <- XML::xmlToList(XML::xmlParse(register.xml))
+ no_split <- !as.logical(register$exact.dates)
+ if (!exists(my.split_inputs) & !no_split) {
+ PEcAn.logger::logger.warn(my.split_inputs, "does not exist")
+ PEcAn.logger::logger.severe("please make sure that the PEcAn interface is loaded for", model)
+ PEcAn.logger::logger.warn(my.split_inputs, "If your model does not need the split function you can specify that in register.Model.xml in model's inst folder by adding FALSE tag.")
+
+ }
+ #split met if model calls for it
+ #create a folder to store extracted met files
+ if(!file.exists(paste0(settings$outdir, "/Extracted_met/"))){
+ dir.create(paste0(settings$outdir, "/Extracted_met/"))
+ }
+ PEcAn.logger::logger.info("Splitting mets!")
+ conf.settings <-conf.settings %>%
+ `class<-`(c("list")) %>% #until here, it separates all the settings for all sites that listed in the xml file
+ furrr::future_map(function(settings) {
+ library(paste0("PEcAn.",settings$model$type), character.only = TRUE)#solved by including the model in the settings
+ inputs.split <- list()
+ if (!no_split) {
+ for (i in 1:length(settings$run$inputs$met$path)) {
+ #---------------- model specific split inputs
+ ### model specific split inputs
+ settings$run$inputs$met$path[[i]] <- do.call(
+ my.split_inputs,
+ args = list(
+ settings = settings,
+ start.time = lubridate::ymd_hms(settings$run$site$met.start, truncated = 3), # This depends if we are restart or not
+ stop.time = lubridate::ymd_hms(settings$run$site$met.end, truncated = 3),
+ inputs = settings$run$inputs$met$path[[i]],
+ outpath = paste0(paste0(settings$outdir, "/Extracted_met/"), settings$run$site$id),
+ overwrite =F
+ )
+ )
+ # changing the start and end date which will be used for model2netcdf.model
+ settings$run$start.date <- lubridate::ymd_hms(settings$state.data.assimilation$start.date, truncated = 3)
+ settings$run$end.date <- lubridate::ymd_hms(settings$state.data.assimilation$end.date, truncated = 3)
+ }
+ } else{
+ inputs.split <- inputs
+ }
+ settings
+ }, .progress = F)
+ conf.settings<- PEcAn.settings::as.MultiSettings(conf.settings)
+ ###-------------------------------------------------------------------###
+ ### set up for data assimilation ###
+ ###-------------------------------------------------------------------###----
+ # Reading param samples-------------------------------
+ #create params object using samples generated from TRAITS functions
+ if (is.null(ensemble.samples)) {
+ load(file.path(settings$outdir, "samples.Rdata"))
+ }
+ #reformatting params
+ new.params <- sda_matchparam(settings, ensemble.samples, site.ids, nens)
+ # get the joint input design.
+ for (i in seq_along(settings)) {
+ # get the input names that are registered for sampling.
+ names.sampler <- names(settings$ensemble$samplingspace)
+ # get the input names for the current site.
+ names.site.input <- names(settings[[i]]$run$inputs)
+ # remove parameters field from the list.
+ names.sampler <- names.sampler[-which(names.sampler == "parameters")]
+ # find a site that has all registered inputs except for the parameter field.
+ if (all(names.sampler %in% names.site.input)) {
+ input_design <- PEcAn.uncertainty::generate_joint_ensemble_design(settings = settings[[i]],
+ ensemble_samples = ensemble.samples,
+ ensemble_size = nens)[[1]]
+ break
+ }
+ }
+ ###------------------------------------------------------------------------------------------------###
+ ### loop over time ###
+ ###------------------------------------------------------------------------------------------------###
+ # initialize the lists of covariates for the debias feature.
+ pre.states <- vector("list", length = length(var.names)) %>% purrr::set_names(var.names)
+ # initialize the lists of forecasts for all time points.
+ all.X <- vector("list", length = nt)
+ for (t in 1:nt) {
+ # initialize dat for saving memory usage.
+ sda.outputs <- FORECAST <- enkf.params <- ANALYSIS <- ens_weights <- list()
+ obs.t <- as.character(lubridate::date(obs.times[t]))
+ obs.year <- lubridate::year(obs.t)
+ PEcAn.logger::logger.info(paste("Processing date:", obs.t))
+ ###-------------------------------------------------------------------------###
+ ### Taking care of Forecast. Splitting / Writting / running / reading back###
+ ###-------------------------------------------------------------------------###-----
+ #- Check to see if this is the first run or not and what inputs needs to be sent to write.ensemble configs
+ if (t>1){
+ #for next time step split the met if model requires
+ #-Splitting the input for the models that they don't care about the start and end time of simulations and they run as long as their met file.
+ PEcAn.logger::logger.info("Splitting mets!")
+ inputs.split <-
+ furrr::future_pmap(list(conf.settings %>% `class<-`(c("list")), inputs, model), function(settings, inputs, model) {
+ # Loading the model package - this is required bc of the furrr
+ library(paste0("PEcAn.",model), character.only = TRUE)
+ inputs.split <- inputs
+ if (!no_split) {
+ for (i in seq_len(nens)) {
+ #---------------- model specific split inputs
+ inputs.split$met$samples[i] <- do.call(
+ my.split_inputs,
+ args = list(
+ settings = settings,
+ start.time = (lubridate::ymd_hms(obs.times[t - 1], truncated = 3) + lubridate::second(lubridate::hms("00:00:01"))),
+ stop.time = lubridate::ymd_hms(obs.times[t], truncated = 3),
+ inputs = inputs$met$samples[[i]])
+ )
+ }
+ } else{
+ inputs.split <- inputs
+ }
+ inputs.split
+ })
+ #---------------- setting up the restart argument for each site separately and keeping them in a list
+ PEcAn.logger::logger.info("Collecting restart info!")
+ restart.list <-
+ furrr::future_pmap(list(out.configs, conf.settings %>% `class<-`(c("list")), params.list, inputs.split),
+ function(configs, settings, new.params, inputs) {
+ #if the new state for each site only has one row/col.
+ #then we need to convert it to matrix to solve the indexing issue.
+ new_state_site <- new.state[, which(attr(X, "Site") %in% settings$run$site$id)]
+ if(is.vector(new_state_site)){
+ new_state_site <- matrix(new_state_site)
+ }
+ list(
+ runid = configs$runs$id,
+ start.time = strptime(obs.times[t -1], format = "%Y-%m-%d %H:%M:%S") + lubridate::second(lubridate::hms("00:00:01")),
+ stop.time = strptime(obs.times[t], format ="%Y-%m-%d %H:%M:%S"),
+ settings = settings,
+ new.state = new_state_site,
+ new.params = new.params,
+ inputs = inputs,
+ RENAME = FALSE,
+ ensemble.id = settings$ensemble$ensemble.id
+ )
+ })
+ } else { ## t == 1
+ restart.list <- vector("list", length(conf.settings))
+ }
+ # release memory.
+ gc()
+ # submit jobs for writing configs.
+ # writing configs for each settings
+ PEcAn.logger::logger.info("Writting configs!")
+ # here we use the foreach instead of furrr
+ # because for some reason, the furrr has problem returning the sample paths.
+ cl <- parallel::makeCluster(cores)
+ doSNOW::registerDoSNOW(cl)
+ temp.settings <- NULL
+ restart.arg <- NULL
+ out.configs <- foreach::foreach(temp.settings = as.list(conf.settings),
+ restart.arg = restart.list,
+ .packages = c("Kendall",
+ "purrr",
+ "PEcAn.uncertainty",
+ paste0("PEcAn.", model),
+ "PEcAnAssimSequential")) %dopar% {
+ temp <- PEcAn.uncertainty::write.ensemble.configs(
+ input_design = input_design,
+ ensemble.size = nens,
+ defaults = temp.settings$pfts,
+ ensemble.samples = ensemble.samples,
+ settings = temp.settings,
+ model = temp.settings$model$type,
+ write.to.db = temp.settings$database$bety$write,
+ restart = restart.arg,
+ # samples=inputs,
+ rename = FALSE
+ )
+ return(temp)
+ } %>% stats::setNames(site.ids)
+ parallel::stopCluster(cl)
+ foreach::registerDoSEQ()
+ # update the file paths of different inputs when t = 1.
+ if (t == 1) {
+ inputs <- out.configs %>% purrr::map(~.x$samples)
+ }
+ # collect run info.
+ # get ensemble ids for each site.
+ ensemble.ids <- site.ids %>% furrr::future_map(function(i){
+ run.list <- c()
+ for (j in 1:nens) {
+ run.list <- c(run.list, paste0("ENS-", sprintf("%05d", j), "-", i))
+ }
+ return(run.list)}, .progress = F) %>% unlist
+ runs.tmp <- file.path(rundir, ensemble.ids)
+ # local model executions.
+ PEcAn.logger::logger.info("Running models!")
+ job.files <- file.path(runs.tmp, "job.sh")
+ temp <- job.files %>% furrr::future_map(function(f){
+ cmd <- paste0("cd ", dirname(f), ";./job.sh")
+ system(cmd, intern = F, ignore.stdout = T, ignore.stderr = T)
+ }, .progress = F)
+ # submit jobs for reading sda outputs.
+ PEcAn.logger::logger.info("Reading forecast outputs!")
+ reads <- build_X(out.configs = out.configs,
+ settings = settings,
+ new.params = new.params,
+ nens = nens,
+ read_restart_times = read_restart_times,
+ outdir = outdir,
+ t = t,
+ var.names = var.names,
+ my.read_restart = my.read_restart,
+ restart_flag = FALSE)
+ #let's read the parameters of each site/ens
+ params.list <- reads %>% purrr::map(~.x %>% purrr::map("params"))
+ # add namespace for variables inside the foreach.
+ X <- reads %>% furrr::future_map(function(r){
+ r %>% purrr::map_df(~.x[["X"]] %>% t %>% as.data.frame)
+ })
+ #replacing crazy outliers before it's too late
+ if (control$OutlierDetection){
+ X <- outlier.detector.boxplot(X)
+ PEcAn.logger::logger.info("Outlier Detection.")
+ }
+ # convert from forecast list to data frame.
+ X <- seq_along(X) %>% furrr::future_map(function(i){
+ temp <- do.call(cbind, X[i])
+ colnames(temp) <- paste0(var.names, ".", i)
+ return(temp)
+ }) %>%
+ dplyr::bind_cols() %>%
+ `colnames<-`(c(rep(var.names, length(X)))) %>%
+ `attr<-`('Site',c(rep(site.ids, each=length(var.names))))
+ all.X[[t]] <- X
+ # start debiasing.
+ debias.out <- NULL
+ if (!is.null(debias$start.year)) {
+ if (obs.year >= debias$start.year) {
+ PEcAn.logger::logger.info("Start debiasing!")
+ debias.out <- sda_bias_correction(site.locs,
+ t, all.X,
+ obs.mean,
+ state.interval,
+ debias$cov.dir,
+ pre.states,
+ .get_debias_mod)
+ X <- debias.out$X
+ pre.states <- debias.out$pre.states
+ }
+ }
+ FORECAST[[obs.t]] <- all.X[[t]] <- X
+ gc()
+ ###-------------------------------------------------------------------###
+ ### preparing OBS ###
+ ###-------------------------------------------------------------------###----
+ #To trigger the analysis function with free run, you need to first specify the control$forceRun as TRUE,
+ #Then specify the settings$state.data.assimilation$scalef as 0, and settings$state.data.assimilation$free.run as TRUE.
+ if (!is.null(obs.mean[[t]][[1]]) || (as.logical(settings$state.data.assimilation$free.run) & control$forceRun)) {
+ #decide if we want the block analysis function or multi-site analysis function.
+ #initialize block.list.all.
+ if (t == 1 || !exists("block.list.all")) {
+ block.list.all <- obs.mean %>% purrr::map(function(l){NULL})
+ }
+ #initialize MCMC arguments.
+ if (is.null(control$MCMC.args)) {
+ MCMC.args <- list(niter = 1e5,
+ nthin = 10,
+ nchain = 1,
+ nburnin = 5e4)
+ } else {
+ MCMC.args <- control$MCMC.args
+ }
+ #running analysis function.
+ # forbid submitting jobs to remote.
+ settings$state.data.assimilation$batch.settings$analysis <- NULL
+ enkf.params[[obs.t]] <- analysis_sda_block(settings, block.list.all, X, obs.mean, obs.cov, t, nt, MCMC.args, pre_enkf_params)
+ enkf.params[[obs.t]] <- c(enkf.params[[obs.t]], RestartList = list(restart.list %>% stats::setNames(site.ids)))
+ block.list.all <- enkf.params[[obs.t]]$block.list.all
+ #Forecast
+ mu.f <- enkf.params[[obs.t]]$mu.f
+ Pf <- enkf.params[[obs.t]]$Pf
+ #Analysis
+ Pa <- enkf.params[[obs.t]]$Pa
+ mu.a <- enkf.params[[obs.t]]$mu.a
+ } else {
+ mu.f <- colMeans(X) #mean Forecast - This is used as an initial condition
+ mu.a <- mu.f
+ if(is.null(Q)){
+ q.bar <- diag(ncol(X))
+ PEcAn.logger::logger.warn('Process variance not estimated. Analysis has been given uninformative process variance')
+ }
+ Pf <- stats::cov(X)
+ Pa <- Pf
+ enkf.params[[obs.t]] <- list(mu.f = mu.f, Pf = Pf, mu.a = mu.a, Pa = Pa)
+ }
+ ###-------------------------------------------------------------------###
+ ### adjust/update state matrix ###
+ ###-------------------------------------------------------------------###----
+ # if we don't have the analysis from the analysis function.
+ if (is.null(enkf.params[[obs.t]]$analysis)) {
+ analysis <- as.data.frame(mvtnorm::rmvnorm(as.numeric(nrow(X)), mu.a, Pa, method = "svd"))
+ } else {
+ analysis <- enkf.params[[obs.t]]$analysis
+ }
+ enkf.params[[obs.t]]$analysis <- NULL
+ ##### Mapping analysis vectors to be in bounds of state variables
+ for(i in 1:ncol(analysis)){
+ int.save <- state.interval[which(startsWith(colnames(analysis)[i], var.names)),]
+ analysis[analysis[,i] < int.save[1],i] <- int.save[1]
+ analysis[analysis[,i] > int.save[2],i] <- int.save[2]
+ }
+ ## in the future will have to be separated from analysis
+ new.state <- as.data.frame(analysis)
+ ANALYSIS[[obs.t]] <- analysis
+ ens_weights[[obs.t]] <- PEcAnAssimSequential::sda_weights_site(FORECAST, ANALYSIS, 1, nens)
+ ###-------------------------------------------------------------------###
+ ### save outputs ###
+ ###-------------------------------------------------------------------###----
+ sda.outputs <- list(obs.mean = obs.mean[[t]],
+ obs.cov = obs.cov[[t]],
+ forecast = FORECAST[[obs.t]],
+ analysis = ANALYSIS[[obs.t]],
+ enkf.params = enkf.params[[obs.t]],
+ ens_weights[[obs.t]],
+ params.list = params.list,
+ restart.list = restart.list,
+ debias.out = debias.out)
+
+ # save file to the out folder.
+ save(sda.outputs, file = file.path(settings$outdir, paste0("sda.output", t, ".Rdata")))
+ # remove files as SDA runs
+ if (!(control$keepNC) && t == 1){
+ PEcAn.logger::logger.info("Deleting NC files!")
+ outs.tmp <- file.path(outdir, ensemble.ids)
+ temp <- outs.tmp %>% furrr::future_map(function(f){
+ temp <- list.files(f, "*.nc", full.names = T)
+ unlink(temp)
+ }, .progress = F)
+ }
+ if(!is.null(control$send_email)){
+ sendmail <- Sys.which("sendmail")
+ mailfile <- tempfile("mail")
+ cat(paste0("From: ", control$send_email$from, "\n", "Subject: ", "SDA progress report", "\n", "To: ", control$send_email$to, "\n", "\n", paste("Time point:", obs.times[t], "has been completed!")), file = mailfile)
+ system2(sendmail, c("-f", paste0("\"", control$send_email$from, "\""), paste0("\"", control$send_email$to, "\""), "<", mailfile))
+ unlink(mailfile)
+ }
+ }
+ # assemble results.
+ sda.out.files <- file.path(settings$outdir, paste0("sda.output", 1:nt, ".Rdata"))
+ analysis.all <- forecast.all <- vector("list", nt)
+ for (file in seq_along(sda.out.files)) {
+ res_env <- new.env()
+ load(sda.out.files[file], envir = res_env)
+ analysis.all[[file]] <- res_env$sda.outputs$analysis
+ forecast.all[[file]] <- res_env$sda.outputs$forecast
+ }
+ names(analysis.all) <- as.character(lubridate::date(obs.times))
+ names(forecast.all) <- as.character(lubridate::date(obs.times))
+ save(list = c("analysis.all", "forecast.all"), file = file.path(settings$outdir, "sda.all.forecast.analysis.Rdata"))
+ # merge NC files.
+ if (control$merge_nc) {
+ nc.folder <- file.path(settings$outdir, "merged_nc")
+ if (file.exists(nc.folder)) unlink(nc.folder)
+ dir.create(nc.folder)
+ temp <- PEcAn.utils::nc_merge_all_sites_by_year(model.outdir = outdir,
+ nc.outdir = nc.folder,
+ ens.num = nens,
+ site.ids = as.numeric(site.ids),
+ start.date = obs.times[1],
+ end.date = obs.times[length(obs.times)],
+ time.step = paste(1, settings$state.data.assimilation$forecast.time.step),
+ cores = cores)
+ # remove rundir and outdir.
+ unlink(rundir, recursive = T)
+ unlink(outdir, recursive = T)
+ }
+ # remove met files.
+ unlink(file.path(settings$outdir, "Extracted_met"), recursive = T)
+ gc()
+}
+
+
+##' This function provides means to split large SDA runs into separate `qsub` jobs.
+##' Including job creation, submission, and assemble.
+##' @title qsub_sda
+#' @param settings PEcAn settings object
+#' @param obs.mean Lists of date times named by time points, which contains lists of sites named by site ids, which contains observation means for each state variables of each site for each time point.
+#' @param obs.cov Lists of date times named by time points, which contains lists of sites named by site ids, which contains observation covariances for all state variables of each site for each time point.
+#' @param Q Process covariance matrix given if there is no data to estimate it.
+#' @param pre_enkf_params Used for passing pre-existing time-series of process error into the current SDA runs to ignore the impact by the differences between process errors.
+#' @param ensemble.samples Pass ensemble.samples from outside to avoid GitHub check issues.
+#' @param outdir Physical path to the folder where the SDA outputs will be stored.
+#' The default is NULL, where we will be using outdir from the settings object.
+#' @param control List of flags controlling the behavior of the SDA.
+#' `TimeseriesPlot` for post analysis examination;
+#' `OutlierDetection` decide if we want to execute the outlier detection each time after the model forecasting;
+#' `send_email` contains lists for sending email to report the SDA progress;
+#' `keepNC` decide if we want to keep the NetCDF files inside the out directory;
+#' `forceRun` decide if we want to proceed the Bayesian MCMC sampling without observations;
+#' `MCMC.args` include lists for controling the MCMC sampling process (iteration, nchains, burnin, and nthin.).
+#' @param block.index list of site ids for each block, default is NULL. This is used when the localization turns on.
+#' Please keep using the default value because the localization feature is still in development.
+#' @param debias List: R list containing the covariance directory and the start year.
+#' covariance directory should include GeoTIFF files named by year.
+#' start year is numeric input which decide when to start the debiasing feature.
+#' @param prefix character: the desired folder name to store the outputs.
+#'
+#' @author Dongchen Zhang
+#' @return NONE
+#' @export
+qsub_sda <- function(settings,
+ obs.mean,
+ obs.cov,
+ Q,
+ pre_enkf_params,
+ ensemble.samples,
+ outdir,
+ control,
+ block.index = NULL,
+ debias = list(cov.dir = NULL, start.year = NULL),
+ prefix = "batch") {
+ # read from settings.
+ L <- length(settings)
+ # grab info from settings.
+ # if we aleady specified how many blocks to be running.
+ if (!is.null(block.index)) {
+ num.folder <- length(block.index)
+ } else {
+ num.folder <- as.numeric(settings$state.data.assimilation$batch.settings$general.job$folder.num)
+ }
+ cores <- as.numeric(settings$state.data.assimilation$batch.settings$general.job$cores)
+ qsub.cmd <- settings$state.data.assimilation$batch.settings$qsub.cmd
+ # initialize parallel.
+ cl <- parallel::makeCluster(cores)
+ on.exit(parallel::stopCluster(cl), add = TRUE)
+ doSNOW::registerDoSNOW(cl)
+ num.per.folder <- ceiling(L/num.folder)
+ if (is.null(outdir)) {
+ outdir <- settings$outdir
+ }
+ # create folder for storing job outputs.
+ batch.folder <- file.path(outdir, prefix)
+ # delete the whole folder if it's not empty.
+ if (file.exists(batch.folder)){
+ PEcAn.logger::logger.info("Deleting batch folder!")
+ f <- NULL
+ foreach::foreach(f = list.files(batch.folder, full.names = T),
+ .packages=c("Kendall")) %dopar% {
+ temp <- system(paste0("rm -rf ", f))
+ }
+ unlink(batch.folder, recursive = T)
+ }
+ dir.create(batch.folder)
+ # loop over sub-folders.
+ folder.paths <- job.ids <- rep(NA, num.folder)
+ PEcAn.logger::logger.info(paste("Submitting", num.folder, "jobs."))
+ # setup progress bar.
+ pb <- utils::txtProgressBar(min=1, max=num.folder, style=3)
+ on.exit(close(pb), add = TRUE)
+ progress <- function(n) utils::setTxtProgressBar(pb, n)
+ opts <- list(progress=progress)
+ i <- NULL
+ temp <- foreach::foreach(i = 1:num.folder,
+ .packages=c("Kendall", "purrr"),
+ .options.snow=opts) %dopar% {
+ # create folder for each set of job runs.
+ # calculate start and end index for the current folder.
+ head.num <- (i-1)*num.per.folder + 1
+ if (i*num.per.folder > L) {
+ tail.num <- L
+ } else {
+ tail.num <- i*num.per.folder
+ }
+ if (!is.null(block.index)) {
+ block.site.inds <- block.index[[i]]
+ } else {
+ block.site.inds <- head.num:tail.num
+ }
+ # naming and creating folder by the folder index.
+ folder.name <- paste0("Job_", i)
+ folder.path <- file.path(batch.folder, folder.name)
+ folder.paths[i] <- folder.path
+ dir.create(folder.path)
+ # save corresponding block list to the folder.
+ temp.settings <- PEcAn.settings::write.settings(settings[block.site.inds], outputfile = "pecan.xml", outputdir = folder.path)
+ temp.obs.mean <- obs.mean %>% purrr::map(function(obs){
+ obs[block.site.inds]
+ })
+ temp.obs.cov <- obs.cov %>% purrr::map(function(obs){
+ obs[block.site.inds]
+ })
+ configs <- list(setting = temp.settings,
+ obs.mean = temp.obs.mean,
+ obs.cov = temp.obs.cov,
+ Q = Q,
+ pre_enkf_params = pre_enkf_params,
+ ensemble.samples = ensemble.samples,
+ outdir = folder.path, # outdir
+ cores = cores,
+ control = control,
+ debias = debias,
+ site.ids = block.site.inds)
+ saveRDS(configs, file = file.path(folder.path, "configs.rds"))
+ # create job file.
+ jobsh <- c("#!/bin/bash -l",
+ "module load R/4.1.2",
+ "echo \"require (PEcAnAssimSequential)",
+ " require (PEcAn.uncertainty)",
+ " require (foreach)",
+ " qsub_sda_batch('@FOLDER_PATH@')",
+ " \" | R --no-save")
+ jobsh <- gsub("@FOLDER_PATH@", folder.path, jobsh)
+ writeLines(jobsh, con = file.path(folder.path, "job.sh"))
+ # qsub command.
+ qsub <- qsub.cmd
+ if (grepl("NAME", qsub.cmd, fixed = TRUE)) {
+ qsub <- gsub("@NAME@", paste0("Job-", i), qsub)
+ }
+ if (grepl("STDOUT", qsub.cmd, fixed = TRUE)) {
+ qsub <- gsub("@STDOUT@", file.path(folder.path, "stdout.log"), qsub)
+ }
+ if (grepl("STDERR", qsub.cmd, fixed = TRUE)) {
+ qsub <- gsub("@STDERR@", file.path(folder.path, "stderr.log"), qsub)
+ }
+ if (grepl("CORES", qsub.cmd, fixed = TRUE)) {
+ qsub <- gsub("@CORES@", cores, qsub)
+ }
+ qsub <- strsplit(qsub, " (?=([^\"']*\"[^\"']*\")*[^\"']*$)", perl = TRUE)
+ cmd <- qsub[[1]]
+ out <- system2(cmd, file.path(folder.path, "job.sh"), stdout = TRUE, stderr = TRUE)
+ }
+}
+
+##' This function can help to execute sda function.
+##' @title qsub_sda_batch
+##' @param folder.path character: path where the `configs.rds` file is stored.
+##' @author Dongchen Zhang.
+##' @export
+qsub_sda_batch <- function(folder.path) {
+ configs <- readRDS(file.path(folder.path, "configs.rds"))
+ setting <- PEcAn.settings::read.settings(configs$setting)
+ sda.enkf_local(setting,
+ configs$obs.mean,
+ configs$obs.cov,
+ configs$Q,
+ configs$pre_enkf_params,
+ configs$ensemble.samples,
+ configs$outdir,
+ configs$control,
+ configs$debias)
+}
+
+##' This function can help to assemble sda outputs (analysis and forecasts) from each job execution.
+##' @title sda_assemble
+##' @param batch.folder character: path where the SDA batch jobs stored.
+##' @param outdir character: path where we want to store the assembled analysis and forecasts.
+##' @author Dongchen Zhang.
+##' @export
+sda_assemble <- function (batch.folder, outdir) {
+ # find folder paths to each SDA job.
+ folders <- list.files(batch.folder, full.names = T)
+ fail.folders <- folders[!file.exists(file.path(folders, "sda.all.forecast.analysis.Rdata"))]
+ # if we find any failed job.
+ if (length(fail.folders) > 0) {
+ PEcAn.logger::logger.info("Failed jobs found.")
+ PEcAn.logger::logger.info(fail.folders)
+ return(fail.folders)
+ }
+ # assemble SDA forecast and analysis.
+ # order folder names.
+ folder.inds <- folders %>% purrr::map(function(f){
+ as.numeric(strsplit(basename(f), "_")[[1]][2])
+ }) %>% unlist
+ order.folders <- folders[order(folder.inds)]
+ # stack analysis and forecast results.
+ load(file.path(order.folders[1], "sda.all.forecast.analysis.Rdata"))
+ times <- names(analysis.all)
+ Analysis.all <- analysis.all
+ Forecast.all <- forecast.all
+ for (f in order.folders[2:length(order.folders)]) {
+ load(file.path(f, "sda.all.forecast.analysis.Rdata"))
+ for (t in seq_along(analysis.all)) {
+ Analysis.all[[t]] <- cbind(Analysis.all[[t]], analysis.all[[t]])
+ Forecast.all[[t]] <- cbind(Forecast.all[[t]], forecast.all[[t]])
+ }
+ }
+ analysis.all <- Analysis.all
+ forecast.all <- Forecast.all
+ names(analysis.all) <- times
+ names(forecast.all) <- times
+ # save results.
+ save(list = c("analysis.all", "forecast.all"), file = file.path(outdir, "sda.all.forecast.analysis.Rdata"))
+}
\ No newline at end of file
diff --git a/modules/assim.sequential/R/sda_bias_correction.R b/modules/assim.sequential/R/sda_bias_correction.R
new file mode 100644
index 00000000000..a3bc2e37be8
--- /dev/null
+++ b/modules/assim.sequential/R/sda_bias_correction.R
@@ -0,0 +1,158 @@
+#' @description
+#' This function helps to correct the forecasts' biases based on
+#' ML (random forest) training on the previous time point.
+#' @title sda_bias_correction
+#'
+#' @param site.locs data.frame: data.frame that contains longitude and latitude in its first and second column.
+#' @param t numeric: the current number of time points (e.g., t=1 for the beginning time point).
+#' @param all.X list: lists of data frame of model forecast from the beginning to the current time points
+#' that has n (ensemble size) rows and n.var (number of variables) times n.site (number of locations) columns.
+#' (e.g., 100 ensembles, 4 variables, and 8,000 locations will end up with data.frame of 100 rows and 32,000 columns)
+#' @param obs.mean List: lists of date times named by time points, which contains lists of sites named by site ids,
+#' which contains observation means for each state variables of each site for each time point.
+#' @param state.interval matrix: containing the upper and lower boundaries for each state variable.
+#' @param cov.dir character: physical path to the directory contains the time series covariate maps.
+#' @param py.init R function: R function to initialize the python functions. Default is NULL.
+#' the default random forest will be used if `py.init` is NULL.
+#' @param pre.states list: containing previous covariates for each location.
+#'
+#' @return list: the current X after the bias-correction; the ML model for each variable; predicted residuals.
+#'
+#' @author Dongchen Zhang
+#' @importFrom dplyr %>%
+
+sda_bias_correction <- function (site.locs,
+ t, all.X,
+ obs.mean,
+ state.interval,
+ cov.dir,
+ pre.states,
+ py.init = NULL) {
+ # initialize X.
+ X <- all.X[[t]]
+ pre.X <- all.X[[t-1]]
+ pre.pre.X <- all.X[[t-2]]
+ # if we have prescribed python script to use.
+ if (!is.null(py.init)) {
+ # load python functions.
+ py <- py.init()
+ }
+ # grab variable names.
+ var.names <- rownames(state.interval)
+ # create terra spatial points.
+ pts <- terra::vect(cbind(site.locs$Lon, site.locs$Lat), crs = "epsg:4326")
+ # grab the current year.
+ y <- lubridate::year(names(obs.mean))[t]
+ # if we don't have previous extracted information.
+ # grab the covariate file path.
+ cov.file.pre <- list.files(cov.dir, full.names = T)[which(grepl(y-1, list.files(cov.dir)))] # previous covaraites.
+ # extract covariates for the previous time point.
+ cov.pre <- terra::extract(x = terra::rast(cov.file.pre), y = pts)[,-1] # remove the first ID column.
+ # factorize land cover band.
+ if ("LC" %in% colnames(cov.pre)) {
+ cov.pre[,"LC"] <- factor(cov.pre[,"LC"])
+ }
+ # loop over variables.
+ # initialize model list for each variable.
+ models <- res.vars <- vector("list", length = length(var.names)) %>% purrr::set_names(var.names)
+ for (v in var.names) {
+ message(paste("processing", v))
+ # train residuals on the previous time point.
+ # grab column index for the current variable.
+ inds <- which(grepl(v, colnames(pre.X)))
+ # grab observations for the current variable.
+ obs.v <- obs.mean[[t-1]] %>% purrr::map(function(obs){
+ if (is.null(obs[[v]])) {
+ return(NA)
+ } else {
+ return(obs[[v]])
+ }
+ }) %>% unlist
+ # calculate residuals for the previous time point.
+ res.pre <- colMeans(pre.X[,inds]) - obs.v
+ # grab observations for the current variable.
+ obs.v.pre <- obs.mean[[t-2]] %>% purrr::map(function(obs){
+ if (is.null(obs[[v]])) {
+ return(NA)
+ } else {
+ return(obs[[v]])
+ }
+ }) %>% unlist
+ # calculate residuals for the previous time point.
+ res.pre.pre <- colMeans(pre.pre.X[,inds]) - obs.v.pre
+ # prepare training data set.
+ ml.df <- cbind(cov.pre, res.pre.pre, colMeans(pre.X)[inds], res.pre)
+ colnames(ml.df)[length(ml.df)-1] <- "raw_dat" # rename the column name.
+ colnames(ml.df)[length(ml.df)-2] <- "res_lag" # rename the column name.
+ ml.df <- rbind(pre.states[[v]], ml.df) # grab previous covariates.
+ ml.df <- ml.df[which(stats::complete.cases(ml.df)),]
+ pre.states[[v]] <- ml.df # store the historical covariates for future use.
+ # prepare predicting covariates.
+ # extract covariates for the current time point.
+ cov.file <- list.files(cov.dir, full.names = T)[which(grepl(y, list.files(cov.dir)))] # current covaraites.
+ cov.current <- terra::extract(x = terra::rast(cov.file), y = pts)[,-1] # remove the first ID column.
+ # factorize land cover band.
+ if ("LC" %in% colnames(cov.current)) {
+ cov.current[,"LC"] <- factor(cov.current[,"LC"])
+ }
+ cov.df <- cbind(cov.current, res.pre, colMeans(X)[inds])
+ complete.inds <- which(stats::complete.cases(cov.df))
+ cov.df <- cov.df[complete.inds,]
+ colnames(cov.df)[length(cov.df)] <- "raw_dat"
+ colnames(cov.df)[length(cov.df)-1] <- "res_lag"
+ cov.names <- colnames(cov.df) # grab band names for the covariate map.
+ if (nrow(ml.df) == 0) next # jump to the next loop if we have zero records.
+ if (is.null(py.init)) {
+ # random forest training.
+ formula <- stats::as.formula(paste("res.pre", "~", paste(cov.names, collapse = " + ")))
+ model <- randomForest::randomForest(formula,
+ data = ml.df,
+ ntree = 1000,
+ na.action = stats::na.omit,
+ keep.forest = TRUE,
+ importance = TRUE)
+ var.imp <- randomForest::importance(model)
+ models[[v]] <- var.imp # store the variable importance.
+ # predict residuals for the current time point.
+ res.current <- stats::predict(model, cov.df)
+ } else {
+ # using functions from the python script.
+ # training.
+ fi_ret <- py$train_full_model(
+ name = as.character(v), # current variable name.
+ X = as.matrix(ml.df[,-length(ml.df)]), # covariates + previous forecast means.
+ y = as.numeric(ml.df[["res.pre"]]), # residuals.
+ feature_names = colnames(ml.df[,-length(ml.df)])
+ )
+ # predicting.
+ res.current <- py$predict_residual(as.character(v), as.matrix(cov.df))
+ # store model outputs.
+ # weights.
+ w_now <- try(py$get_model_weights(as.character(v)), silent = TRUE)
+ w_now <- min(max(as.numeric(w_now), 0), 1)
+ w_named <- c(KNN = w_now, TREE = 1 - w_now)
+ # var importance.
+ fi_ret <- tryCatch(reticulate::py_to_r(fi_ret), error = function(e) fi_ret)
+ fn <- as.character(unlist(fi_ret[["names"]], use.names = FALSE))
+ fv <- as.numeric(unlist(fi_ret[["importances"]], use.names = FALSE)) %>% purrr::set_names(fn)
+ models[[v]] <- list(weights = w_named, var.imp = fv) # store the variable importance.
+ }
+ # assign NAs to places with no observations in the previous time point.
+ res <- rep(NA, length(obs.v)) %>% purrr::set_names(unique(attributes(X)$Site))
+ res[complete.inds] <- res.current
+ res[which(is.na(obs.v))] <- NA
+ res.vars[[v]] <- res
+ # correct the current forecasts.
+ for (i in seq_along(inds)) {
+ if (is.na(res[i])) next
+ X[,inds[i]] <- X[,inds[i]] - res[i]
+ }
+ }
+ # map forecasts towards the prescribed variable boundaries.
+ for(i in 1:ncol(X)){
+ int.save <- state.interval[which(startsWith(colnames(X)[i], var.names)),]
+ X[X[,i] < int.save[1],i] <- int.save[1]
+ X[X[,i] > int.save[2],i] <- int.save[2]
+ }
+ return(list(X = X, models = models, res = res.vars, pre.states = pre.states))
+}
\ No newline at end of file
diff --git a/modules/assim.sequential/R/sda_weights_site.R b/modules/assim.sequential/R/sda_weights_site.R
index d3938695b5e..cce8ac8b5d8 100644
--- a/modules/assim.sequential/R/sda_weights_site.R
+++ b/modules/assim.sequential/R/sda_weights_site.R
@@ -14,46 +14,40 @@ sda_weights_site <- function(FORECAST, ANALYSIS, t, ens){
#of the original code "Weights_Site.R" written by Hamzed.
#read site ids from forecast results.
site.ids <- attr(FORECAST[[1]],'Site') %>% unique()
+ Year.applid.weight <- names(FORECAST)[t]
#calculate weights for each ensemble member of each site at time point t.
- Weights.new <- purrr::pmap(list(ANALYSIS[t],
- FORECAST[t],
- names(FORECAST)[t]),
- function(ANALYSIS.r, FORECAST.r, Year.applid.weight) {
- #loop over each site
- site.ids %>%
- future_map_dfr(function(one.site){
- #match site id
- site.ind <- which( attr(FORECAST[[1]],'Site') %in% one.site)
- #match date
- ind <- which( names(FORECAST) %in% Year.applid.weight)
-
- #if we only have single variable.
- if(length(site.ind) == 1){
- #calculate analysis mean value
- mu.a <- mean(ANALYSIS.r[,site.ind])
- #calculate analysis variance
- Pa <- stats::sd(ANALYSIS.r[,site.ind])
- #calculate weights
- w <- stats::dnorm(FORECAST.r[,site.ind], mu.a, Pa, TRUE)
- }else{
- #calculate analysis mean value
- mu.a <- apply(ANALYSIS.r[,site.ind],2 ,mean)
- #calculate analysis covariance matrix
- Pa <- stats::cov(ANALYSIS.r[,site.ind])
- #calculate weights
- w <- emdbook::dmvnorm(FORECAST.r[,site.ind], mu.a, Pa, TRUE)
- }
-
- #return outputs
- data.frame(
- ens = 1:ens,
- raw_weight=w,
- Site= one.site,
- Relative_weight=abs(w)/sum(abs(w)),
- Year=lubridate::year(Year.applid.weight)
- )
- }, .progress = TRUE)
- })
+ Weights.new <- site.ids %>%
+ future_map_dfr(function(one.site){
+ #match site id
+ site.ind <- which(attr(FORECAST[[1]],'Site') %in% one.site)
+ #match date
+ ind <- which(names(FORECAST) %in% Year.applid.weight)
+
+ #if we only have single variable.
+ if(length(site.ind) == 1){
+ #calculate analysis mean value
+ mu.a <- mean(ANALYSIS[[t]][,site.ind])
+ #calculate analysis variance
+ Pa <- stats::sd(ANALYSIS[[t]][,site.ind])
+ #calculate weights
+ w <- stats::dnorm(FORECAST[[t]][,site.ind], mu.a, Pa, TRUE)
+ }else{
+ #calculate analysis mean value
+ mu.a <- apply(ANALYSIS[[t]][,site.ind],2 ,mean)
+ #calculate analysis covariance matrix
+ Pa <- stats::cov(ANALYSIS[[t]][,site.ind])
+ #calculate weights
+ w <- mvtnorm::dmvnorm(x = FORECAST[[t]][,site.ind], mean = mu.a, sigma = Pa, log = TRUE)
+ }
+ #return outputs
+ data.frame(
+ ens = 1:ens,
+ raw_weight=w,
+ Site= one.site,
+ Relative_weight=abs(w)/sum(abs(w)),
+ Year=lubridate::year(Year.applid.weight)
+ )
+ }, .progress = F)
Weights.new
}
\ No newline at end of file
diff --git a/modules/assim.sequential/inst/anchor/NA_Site_Selection.Rmd b/modules/assim.sequential/inst/anchor/NA_Site_Selection.Rmd
new file mode 100644
index 00000000000..06b6a98ed6a
--- /dev/null
+++ b/modules/assim.sequential/inst/anchor/NA_Site_Selection.Rmd
@@ -0,0 +1,1257 @@
+---
+title: "Downscale_anchorSites"
+author: "Dongchen Zhang"
+date: '2024-06-21'
+output: html_document
+---
+# The workflow for preprocessing raster datasets is as follows: 1. read raster; 2. reproject raster to WGS84; 3. crop the raster to the extent of north america; 4. resample raster based on 1-km base map (was reprojected and croped forest age map).
+
+# read base map.
+
+```{r}
+library(purrr)
+base.map <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/base_map.tiff")
+extent <- terra::ext(base.map)
+xres <- terra::xres(base.map)
+yres <- terra::yres(base.map)
+dim <- dim(base.map)
+```
+
+# TWI map.
+
+```{r}
+TWI <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/TWI/TWI_resample.tiff")
+```
+
+# forest age map.
+
+```{r}
+# read tiff.
+forest_age <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/forest_age/forest_age_2010_TC000_crop.tiff")
+```
+
+# GEDI density.
+
+```{r}
+doi <- "10.3334/ORNLDAAC/2299"
+metadata <- PEcAn.data.remote::NASA_DAAC_download(ul_lat = extent[4],
+ ul_lon = extent[1],
+ lr_lat = extent[3],
+ lr_lon = extent[2],
+ from = "2019-04-18",
+ to = "2023-03-16",
+ just_path = F,
+ outdir = "/projectnb/dietzelab/dongchen/anchorSites/downscale/GEDI_Density/GEDI_4B",
+ doi = doi,
+ ncore = parallel::detectCores()-1,
+ netrc_file = "/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC/y2001/hdf_data/netrc")
+
+gedi_density <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/GEDI_Density/GEDI_4B/GEDI04_B_MW019MW223_02_002_02_R01000M_NS.tif")
+# project image.
+sr <- '+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0'
+tempRast <- terra::project(gedi_density, sr)
+# crop image.
+tempRast <- terra::crop(tempRast, base.map)
+# resample image
+tempRast <- terra::resample(tempRast, base.map, method="mode")
+# write to disk.
+terra::writeRaster(tempRast, file="/projectnb/dietzelab/dongchen/anchorSites/downscale/GEDI_Density/gedi_density_reproj_crop_resample.tif")
+```
+
+# MODIS land cover.
+
+```{r}
+library(lubridate)
+doi <- "10.5067/MODIS/MCD12Q1.061"
+dates <- seq(as.Date("2000-01-01"), as.Date("2012-01-01"), by="1 year")
+for (i in seq_along(dates)) {
+ from <- dates[i]
+ to <- dates[i] %m+% years(1)
+ outdir <- file.path("/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC", paste0("y", lubridate::year(from)))
+ if (file.exists(outdir)) {
+ next
+ } else {
+ dir.create(outdir)
+ }
+ # download files.
+ hdf.folder <- file.path(outdir, "hdf_data")
+ dir.create(hdf.folder)
+ metadata <- PEcAn.data.remote::NASA_DAAC_download(ul_lat = extent[4],
+ ul_lon = extent[1],
+ lr_lat = extent[3],
+ lr_lon = extent[2],
+ from = from,
+ to = to,
+ just_path = F,
+ outdir = hdf.folder,
+ doi = doi,
+ ncore = parallel::detectCores()-1,
+ netrc_file = "/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC/y2001/hdf_data/netrc")
+ # convert from hdf to tiff.
+ tiff.folder <- file.path(outdir, "tiff_data")
+ dir.create(tiff.folder)
+ for (j in seq_along(metadata$path)) {
+ temp.img <- terra::rast(metadata$path[j])
+ terra::writeRaster(temp.img, file.path(tiff.folder, paste0("img", j, ".tiff")))
+ if (j%%10 == 0) print(j/length(metadata$path))
+ }
+ zip(file.path(outdir, "tiff_data.zip"), tiff.folder)
+ print(paste("i =", i))
+}
+
+# crop and upscale images.
+folders <- list.files("/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC/", full.names = T)
+for (i in seq_along(folders)) {
+ if (file.exists(file.path(folders[i], "merged_reproj_crop_resample.tif"))) {
+ next
+ }
+ tempRast <- terra::rast(file.path(folders[i], "merged_reproj.tif"))
+ # crop image.
+ if (file.exists(file.path(folders[i], "merged_reproj_crop.tif"))) {
+ tempRast <- terra::rast(file.path(folders[i], "merged_reproj_crop.tif"))
+ } else {
+ tempRast <- terra::crop(tempRast, base.map)
+ terra::writeRaster(tempRast, file=file.path(folders[i], "merged_reproj_crop.tif"))
+ }
+
+ # resample image.
+ tempRast <- terra::resample(tempRast, base.map, method="mode")
+ terra::writeRaster(tempRast, file=file.path(folders[i], "merged_reproj_crop_resample.tif"))
+ print(basename(folders[i]))
+}
+
+# calculate transition matrix.
+library(purrr)
+if (future::supportsMulticore()) {
+ future::plan(future::multicore)
+} else {
+ future::plan(future::multisession)
+}
+2000:2011 %>% furrr::future_map(function(year){
+ lc_type <- c("water_bodies",
+ "Evergreen_Needleleaf_Trees",
+ "Evergreen_Broadleaf_Trees",
+ "Deciduous_Needleleaf_Trees",
+ "Deciduous_Broadleaf_Trees",
+ "Shrub",
+ "Grass",
+ "Cereal_Croplands",
+ "Broadleaf_Croplands",
+ "Urban",
+ "Snow",
+ "Non_Vegetated_Lands")
+ lc_pre <- terra::rast(file.path("/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC", paste0("y", year), "merged_reproj_crop_resample.tif"))
+ lc_after <- terra::rast(file.path("/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC", paste0("y", year+1), "merged_reproj_crop_resample.tif"))
+ lc_pre_vector <- matrix(lc_pre[[5]], byrow = T)
+ lc_after_vector <- matrix(lc_after[[5]], byrow = T)
+ trans.mat <- matrix(0, 12, 12)
+ pb <- utils::txtProgressBar(min = 0, max = length(lc_pre_vector), style = 3)
+ for (i in seq_along(lc_pre_vector)) {
+ trans.mat[lc_pre_vector[i]+1, lc_after_vector[i]+1] <- trans.mat[lc_pre_vector[i]+1, lc_after_vector[i]+1] + 1
+ utils::setTxtProgressBar(pb, i)
+ }
+ trans.mat <- trans.mat %>% `rownames<-`(lc_type) %>% `colnames<-`(lc_type)
+ save(trans.mat, file=file.path("/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC/outputs", paste0(year, "_transMat.Rdata")))
+}, .progress = T)
+
+# filter land cover time-series.
+# loop over years.
+# read tiff file.
+forest_type <- c(1:4)
+grass_type <- c(5:8)
+non_veg_type <- c(0, 9, 10, 11)
+ts_lc <- c()
+for (y in 2000:2012) {
+ # load image.
+ lc_tif <- terra::rast(file.path("/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC", paste0("y", y), "merged_reproj_crop_resample.tif"))
+ lc_matrix <- matrix(lc_tif[[5]], byrow = T)
+ # reclassify.
+ lc_matrix[which(lc_matrix %in% forest_type)] <- 1
+ lc_matrix[which(lc_matrix %in% grass_type)] <- 2
+ lc_matrix[which(lc_matrix %in% non_veg_type)] <- 3
+ # combine image.
+ ts_lc <- cbind(ts_lc, lc_matrix)
+ print(y)
+}
+ind <- which(lc_matrix == 1)[5000]
+vec <- ts_lc[ind,]
+filter.lc.ts(vec)
+# function for filtering time series.
+filter.lc.ts <- function(vec, window.L = 3) {
+ L <- length(vec)
+ window <- c()
+ edge.case <- FALSE
+ #
+ if(length(unique(vec)) == 1) {
+ return(c(unique(vec), unique(vec), length(vec)))
+ }
+ for (i in L:1) {
+ # push item into the window.
+ window <- c(window, vec[i])
+ # print(window)
+ # if window has not reached its size.
+ if (length(window) < window.L) {
+ next
+ }
+ # window operation.
+ uni.val <- unique(window)
+ # if there is no change.
+ if (length(uni.val) == 1) {
+
+ } else if (length(uni.val) > 1) {
+ # check if head == tail.
+ if (head(window, 1) == tail(window, 1)) {
+
+ } else {
+ window.ind <- window.L - tail(which(window == head(uni.val, 1)), 1) + 1
+ return(c(tail(rev(uni.val), 2), L - i + 1 - window.ind + 1))
+ }
+ }
+ # remove the last item from the window.
+ window <- tail(window, -1)
+ }
+}
+
+# loop over NA.
+res <- matrix(NA, dim(ts_lc)[1], 4) %>% `colnames<-`(c("from", "to", "years", "type"))
+pb <- utils::txtProgressBar(min=1, max=dim(ts_lc)[1], style=3)
+for (i in 1:dim(ts_lc)[1]) {
+ res[i, 1:3] <- filter.lc.ts(ts_lc[i,])
+ # grab change patterns.
+ if (all(res[i, 1:2] == c(1, 2))) {
+ res[i, 4] <- 1
+ } else if (all(res[i, 1:2] == c(1, 3))) {
+ res[i, 4] <- 2
+ } else if (all(res[i, 1:2] == c(2, 3))) {
+ res[i, 4] <- 3
+ } else if (all(res[i, 1:2] == c(2, 1))) {
+ res[i, 4] <- 4
+ } else if (all(res[i, 1:2] == c(3, 1))) {
+ res[i, 4] <- 5
+ } else if (all(res[i, 1:2] == c(3, 2))) {
+ res[i, 4] <- 6
+ }
+ utils::setTxtProgressBar(pb, i)
+}
+
+# load res object and create multi-layer raster file.
+load("/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC/outputs/disturbance.Rdata")
+base_map <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC/y2000/merged_reproj_crop_resample.tif")
+base_crs <- terra::crs(base_map)
+base_ext <- terra::ext(base_map)
+disturb_array <- array(NA, c(9360, 19080, 4))
+for (i in 1:4) {
+ disturb_array[, , i] <- matrix(res[, i], 9360, 19080, byrow = T)
+}
+disturb_rast <- terra::rast(disturb_array)
+names(disturb_rast) <- c("from", "to", "years", "type")
+terra::ext(disturb_rast) <- base_ext
+terra::crs(disturb_rast) <- base_crs
+terra::writeRaster(disturb_rast, file="/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC/outputs/disturb.tif")
+```
+
+# agb 2010 map.
+
+```{r}
+agb <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/IC/aboveground_biomass_carbon_2010.tif")
+agb_crop <- terra::crop(agb, base.map)
+agb_resample <- terra::resample(agb_crop, base.map)
+terra::writeRaster(agb_resample, file="/projectnb/dietzelab/dongchen/anchorSites/downscale/AGB/agb.tif")
+```
+
+# handling background mask (non-vegetation pixels)
+
+```{r}
+# load disturbance data.
+load("/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC/outputs/disturbance.Rdata")
+# load forest age data.
+forest_age <- matrix(terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/forest_age/forest_age_2010_TC000_crop.tiff"), byrow = T)
+# load last year MODIS LC map.
+LC <- matrix(terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC/y2012/merged_reproj_crop_resample.tif")[[5]], byrow = T)
+# calculate mean age for different LC types.
+mean_age <- c()
+for (i in 1:8) {
+ mean_age <- c(mean_age, mean(forest_age[which(LC == i)], na.rm = T))
+}
+# any pixel in forest that are tagged as grassland should be replaced with the
+for (i in seq_along(forest_age)) {
+ # if it's diturbed vegetation.
+ if (res[i, "years"] < 13) {
+ forest_age[i] <- res[i, "years"]
+ next
+ }
+ # no record for the forest age.
+ if (is.na(forest_age[i])) {
+ # if it is non vegetation.
+ if (res[i, "to"] == 3) {
+ # forest_age[i] <- 0
+ next
+ } else {
+ # if it's non-disturbed vegetation.
+ forest_age[i] <- mean_age[LC[i]]
+ }
+ }
+}
+# convert to raster file.
+base_map <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC/y2000/merged_reproj_crop_resample.tif")
+base_crs <- terra::crs(base_map)
+base_ext <- terra::ext(base_map)
+forest_age <- terra::rast(matrix(forest_age, 9360, 19080, byrow = T))
+terra::ext(forest_age) <- base_ext
+terra::crs(forest_age) <- base_crs
+terra::writeRaster(forest_age, file="/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC/outputs/age.tif")
+```
+
+# FIA plots.
+
+```{r}
+tree_table <- read.csv("/projectnb/dietzelab/dongchen/anchorSites/downscale/FIA/data/ENTIRE_SITETREE.csv")
+tree_table <- tree_table[which(!is.na(tree_table$DIA)),]
+soil_table <- read.csv("/projectnb/dietzelab/dongchen/anchorSites/downscale/FIA/data/ENTIRE_SOILS_LAB.csv")
+soil_table <- soil_table[which(!is.na(soil_table$BULK_DENSITY) & !is.na(soil_table$C_ORG_PCT)),]
+plot_table <- read.csv("/projectnb/dietzelab/dongchen/anchorSites/downscale/FIA/data/ENTIRE_PLOTGEOM.csv")
+plot_table <- plot_table[which(plot_table$INVYR >= 2000),]
+
+uniq_plot <- unique(c(tree_table$PLT_CN, soil_table$PLT_CN))
+inter_plot <- intersect(tree_table$PLT_CN, soil_table$PLT_CN)
+
+plot_lat <- plot_table$LAT[which(plot_table$CN %in% uniq_plot)]
+plot_lon <- plot_table$LON[which(plot_table$CN %in% uniq_plot)]
+plotCN <- plot_table$CN[which(plot_table$CN %in% uniq_plot)]
+
+pts <- data.frame(lat = plot_lat, lon = plot_lon)
+sp::coordinates(pts) <- ~lon+lat
+sp::proj4string(pts) <- terra::crs("+init=epsg:4326") # set it to lat-long
+pts <- terra::vect(pts)
+
+base.map <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/base_map.tiff")
+result <- terra::rasterize(pts, base.map, fun="length")
+names(result) <- "FIA_dens"
+terra::writeRaster(result, "/projectnb/dietzelab/dongchen/anchorSites/downscale/FIA/num_plts_per_pixel.tif")
+fia.vec <- terra::as.points(result)
+terra::writeVector(fia.vec, file="/projectnb/dietzelab/dongchen/anchorSites/downscale/FIA/fia_vec.shp")
+```
+
+```{r}
+lat.fia <- lat[unlist(points)[which(ecoClim[unlist(points),"fia"] > 0)]]
+lon.fia <- lon[unlist(points)[which(ecoClim[unlist(points),"fia"] > 0)]]
+inds <- c()
+for (i in seq_along(lat.fia)) {
+ inds <- c(inds, which.min(abs(lat.fia[i]-plot_lat)+abs(lon.fia[i]-plot_lon)))
+}
+CN <- plotCN[inds]
+write.table(CN, file="/projectnb/dietzelab/dongchen/anchorSites/downscale/FIA/selected_fia_cn.txt", row.names = F, col.names = F)
+```
+
+# summarize all layers.
+
+```{r}
+# all raster
+all.rast <- terra::rast(c("/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC/outputs/age.tif",
+ "/projectnb/dietzelab/dongchen/anchorSites/downscale/AGB/agb.tif",
+ "/projectnb/dietzelab/dongchen/anchorSites/downscale/FIA/num_plts_per_pixel.tif",
+ "/projectnb/dietzelab/dongchen/anchorSites/downscale/GEDI_Density/gedi_density_reproj_crop_resample.tif",
+ "/projectnb/dietzelab/dongchen/anchorSites/downscale/TWI/TWI_resample.tiff"))
+d <- dim(all.rast)
+base.ext <- terra::ext(all.rast)
+base.crs <- terra::crs(all.rast)
+# preprocess covariates.
+for (i in 1:5) {
+ temp <- matrix(all.rast[[i]], byrow = T)
+ # rescale data into 0-1 scale.
+ if (i == 4){# convert GEDI density into log scale.
+ temp <- log(temp)
+ }
+ max <- max(temp, na.rm = T)
+ min <- min(temp, na.rm = T)
+ temp <- (temp - min)/(max - min)
+ # data with NAs will be assigned as 0.
+ if (i > 1) {
+ temp[which(is.na(temp))] <- 0
+ }
+ # recover raster.
+ tempRast <- terra::rast(matrix(temp, d[1], d[2], byrow = T))
+ terra::ext(tempRast) <- base.ext
+ terra::crs(tempRast) <- base.crs
+ all.rast[[i]] <- tempRast
+ print(i)
+}
+names(all.rast) <- c("year_since_disturb", "agb", "fia", "gedi", "twi")
+terra::writeRaster(all.rast, file="/projectnb/dietzelab/dongchen/anchorSites/downscale/all_rast.tif", overwrite = TRUE)
+```
+
+# Merge previous data layers.
+
+```{r}
+# load data.
+all.rast <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/all_rast.tif")
+d <- dim(all.rast)
+# grab crs and ext.
+base.crs <- terra::crs(all.rast)
+base.ext <- terra::ext(all.rast)
+# load previous data layers.
+# For this section. Please refer to `pecan/modules/assim.sequential/inst/covariates` script.
+# here I will just load the TIFF file generated using this script.
+# met.
+met <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/met.tif")
+# preprocessing (crs, ext).
+met <- terra::crop(met, all.rast)
+met <- terra::resample(met, all.rast)
+terra::ext(met) <- base.ext
+# normalize.
+nx <- terra::minmax(met)
+met <- (met - nx[1,]) / (nx[2,] - nx[1,])
+names(met) <- c("tavg", "srad", "prec", "vapr")
+# soil properties.
+SoilGrids <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/SoilGrids.tif")
+# preprocessing (crs, ext).
+SoilGrids <- terra::crop(SoilGrids, all.rast)
+SoilGrids <- terra::resample(SoilGrids, all.rast)
+terra::ext(SoilGrids) <- base.ext
+# normalize.
+nx <- terra::minmax(SoilGrids)
+SoilGrids <- (SoilGrids - nx[1,]) / (nx[2,] - nx[1,])
+names(SoilGrids) <- c("PH", "N", "SOC", "Sand")
+# Lat/lon.
+latlon <- terra::xyFromCell(all.rast[[1]], which(!is.na(all.rast[[1]][]))) %>%
+ data.frame %>%
+ `colnames<-`(c("lon", "lat"))
+lat <- lon <- matrix(all.rast[[1]], byrow = T)
+lon[which(!is.na(lon))] <- latlon[,1]
+lat[which(!is.na(lat))] <- latlon[,2]
+latlon <- c(terra::rast(matrix(lat, d[1], d[2], byrow = T)),
+ terra::rast(matrix(lon, d[1], d[2], byrow = T)))
+terra::crs(latlon) <- terra::crs(all.rast)
+terra::ext(latlon) <- terra::ext(all.rast)
+# normalize
+nx <- terra::minmax(latlon)
+latlon <- (latlon - nx[1,]) / (nx[2,] - nx[1,])
+names(latlon) <- c("lat", "lon")
+# sp::coordinates(latlon) <- ~lon+lat
+# sp::proj4string(latlon) <- terra::crs("+init=epsg:4326")
+# latlon <- terra::vect(latlon)
+# test <- terra::extract(all.rast[[1]], latlon)
+# test1 <- matrix(all.rast[[1]], byrow = T)
+# merge.
+all.data.layers <- c(all.rast[[1]], all.rast[[2]], all.rast[[3]], all.rast[[4]], all.rast[[5]],
+ met[[1]], met[[2]], met[[3]], met[[4]],
+ SoilGrids[[1]], SoilGrids[[2]], SoilGrids[[3]], SoilGrids[[4]],
+ latlon[[1]], latlon[[2]])
+terra::writeRaster(all.data.layers, file="/projectnb/dietzelab/dongchen/anchorSites/downscale/all_data_layers.tif", overwrite=T)
+```
+# determine cluster sizes.
+
+```{r}
+library(pathviewr)
+load("/projectnb/dietzelab/dongchen/anchorSites/downscale/clusts.Rdata")
+sizes <- c()
+for (i in seq_along(tot.clust)) {
+ inds <- which(names(tot.clust[[i]]) == "tot.withinss")
+ # find elbow location.
+ df <- data.frame(x = 1:20, y = unlist(tot.clust[[i]][inds]))
+ size <- find_curve_elbow(df)
+ sizes <- c(sizes, size)
+}
+```
+
+# NLCD Vs MODIS.
+
+```{r}
+# load land cover map at 2012.
+base.map <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_LC/y2012/merged_reproj_crop_resample.tif")
+LC <- matrix(base.map[[5]], byrow = T)
+nlcd <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/nlcd_resample_clip.tif")
+nlcd_mat <- matrix(nlcd, byrow = T)
+non.na.nlcd.inds <- which(!is.na(nlcd_mat))
+evergreen.inds.nlcd <- which(nlcd_mat==42)
+
+# Lat/lon.
+latlon <- terra::xyFromCell(base.map[[5]], which(!is.na(base.map[[5]][]))) %>%
+ data.frame %>%
+ `colnames<-`(c("lon", "lat"))
+lat <- lon <- matrix(base.map[[5]], byrow = T)
+lon[which(!is.na(lon))] <- latlon[,1]
+lat[which(!is.na(lat))] <- latlon[,2]
+
+# reclassify modis lc.
+LC[which(LC == 6 & nlcd_mat == 42 & lon <= -100)] <- 1 # grass to evergreen needle leaf forest.
+LC[which(LC == 4 & nlcd_mat == 42)] <- 1 # deciduous to evergreen needle leaf forest.
+
+# write into new GeoTIFF file.
+LC.rast <- terra::rast(matrix(LC, dim(base.map)[1], dim(base.map)[2], byrow = T))
+terra::crs(LC.rast) <- terra::crs(base.map)
+terra::ext(LC.rast) <- terra::ext(base.map)
+terra::writeRaster(LC.rast, file="/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_NLCD_LC.tif", overwrite=TRUE)
+```
+
+# load all data layers.
+
+```{r}
+library(dplyr)
+library(xts)
+library(PEcAn.all)
+library(purrr)
+library(furrr)
+library(lubridate)
+library(nimble)
+library(ncdf4)
+library(PEcAnAssimSequential)
+library(dplyr)
+library(sp)
+library(raster)
+library(zoo)
+library(ggplot2)
+library(mnormt)
+library(sjmisc)
+library(stringr)
+library(doParallel)
+library(doSNOW)
+library(Kendall)
+library(lgarch)
+library(parallel)
+setwd("/projectnb/dietzelab/dongchen/anchorSites/")
+# load all data layers.
+all.data.layers <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/all_data_layers.tif")
+# load land cover at 2012.
+base.map <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_NLCD_LC.tif")
+# handle data.
+if (!file.exists("/projectnb/dietzelab/dongchen/anchorSites/downscale/all_data_layers.Rdata")) {
+ ###################################################################################### merge previous sites.
+ # anchor sites.
+ load("/projectnb/dietzelab/dongchen/anchorSites/site_info.Rdata")
+ site.locs.anchor <- cbind(site_info[[2]], site_info[[3]]) %>% `colnames<-`(c("lat", "lon"))
+ site.name.anchor <- site_info$site_name
+
+ # 500 CONUS Sites.
+ settings.500 <- PEcAn.settings::read.settings("/projectnb/dietzelab/hamzed/SDA/ProductionRun/500Sites/SDA/Utility/SDA_500.xml")
+ site.locs.500 <- settings.500$run %>%
+ map("site") %>%
+ map_dfr(~c(.x[['lon']],.x[['lat']]) %>% as.numeric) %>%
+ t %>%
+ `colnames<-`(c("Lon","Lat"))
+ site.names.500 <- settings.500$run %>%
+ map("site") %>%
+ map_dfr(~c(.x[['name']])) %>% unlist
+
+ # Hawaii points.
+ Hawaii.pts <- data.frame(lon = c(-159.49, -155.69, -155.728, -156.458),
+ lat = c(22.05, 19.93, 19.774, 20.825)) %>% as.matrix
+ Hawaii.names <- rep("Hawaii", 4)
+
+ # FIA Tree core sites (26 valid sites).
+ tree.core.locs <- readxl::read_xlsx("/projectnb/dietzelab/dongchen/anchorSites/downscale/extra_site/MissingTreeCoreSites.xlsx")
+ tree.core.names <- paste0("FIA_Tree_Core_CN_", tree.core.locs$CN)
+ tree.core.locs <- tree.core.locs[,2:3]
+
+ # FIA Soil Core Alaska (100 valid sites).
+ soil.core.alaska.locs <- read.table("/projectnb/dietzelab/dongchen/anchorSites/downscale/extra_site/100_suggested_soil_locations_alaska.txt", header = T, sep = ",")
+ soil.core.alaska.names <- rep("soil_core_alaska", dim(soil.core.alaska.locs)[1])
+
+ # Ameriflux site (190 valid sites).
+ ameriflux.locs <- read.csv("/projectnb/dietzelab/dongchen/anchorSites/downscale/extra_site/Unmatched_Sites.csv")
+ ameriflux.names <- rep("ameriflux", dim(ameriflux.locs)[1])
+
+ # combine two settings.
+ site.locs.lat <- c(site.locs.anchor[,1], site.locs.500[,2], Hawaii.pts[,2], tree.core.locs$LAT, soil.core.alaska.locs$LAT, ameriflux.locs$Latitude..degrees.)
+ site.locs.lon <- c(site.locs.anchor[,2], site.locs.500[,1], Hawaii.pts[,1], tree.core.locs$LON, soil.core.alaska.locs$LON, ameriflux.locs$Longitude..degrees.)
+ site.names <- c(site.name.anchor, site.names.500, Hawaii.names, tree.core.names, soil.core.alaska.names, ameriflux.names)
+ site.locs <- cbind(site.locs.lat, site.locs.lon) %>% `colnames<-`(c("lat", "lon"))
+
+ # generate anchor points vector.
+ pts <- data.frame(lat = site.locs[,1], lon = site.locs[,2])
+ sp::coordinates(pts) <- ~lon+lat
+ sp::proj4string(pts) <- terra::crs(base.map)
+ pts <- terra::vect(pts)
+ anchorRast <- terra::rasterize(pts, base.map) # rasterize anchor sites points.
+ # anchorMat <- matrix(anchorRast, byrow = T)
+
+ # Lat/lon.
+ latlon <- terra::xyFromCell(base.map, which(!is.na(base.map[]))) %>%
+ data.frame %>%
+ `colnames<-`(c("lon", "lat"))
+ lat <- lon <- matrix(base.map, byrow = T)
+ lon[which(!is.na(lon))] <- latlon[,1]
+ lat[which(!is.na(lat))] <- latlon[,2]
+ # combine eco-climatic properties.
+ ecoClim <- cbind(matrix(all.data.layers[[1]], byrow = T),
+ matrix(all.data.layers[[2]], byrow = T),
+ matrix(all.data.layers[[3]], byrow = T),
+ matrix(all.data.layers[[4]], byrow = T),
+ matrix(all.data.layers[[5]], byrow = T),
+ matrix(all.data.layers[[6]], byrow = T),
+ matrix(all.data.layers[[7]], byrow = T),
+ matrix(all.data.layers[[8]], byrow = T),
+ matrix(all.data.layers[[9]], byrow = T),
+ matrix(all.data.layers[[10]], byrow = T),
+ matrix(all.data.layers[[11]], byrow = T),
+ matrix(all.data.layers[[12]], byrow = T),
+ matrix(all.data.layers[[13]], byrow = T),
+ matrix(all.data.layers[[14]], byrow = T),
+ matrix(all.data.layers[[15]], byrow = T),
+ matrix(base.map, byrow = T),
+ lat,
+ lon) %>% as.data.frame
+ colnames(ecoClim) <- c(names(all.data.layers), "LC", "latitude", "longitude")
+ na.inds <- c()
+ for (i in 1:ncol(ecoClim)) {
+ na.inds <- unique(c(na.inds, which(is.na(ecoClim[,i]))))
+ print(i)
+ }
+ na.inds <- unique(c(na.inds, which(ecoClim[,"LC"] %in% c(0, 9, 10, 11)))) # non-vegetated pixels.
+ # non.na.inds <- which(complete.cases(ecoClim[,1:11]))
+ L <- dim(ecoClim)[1]
+ non.na.inds <- which(! 1:L %in% na.inds)
+ # add anchor sites to the data layers.
+ # add anchor sites to non-na-inds.
+ # non.na.inds <- sort(unique(c(non.na.inds, which(!is.na(anchorMat)))))
+ ecoClim <- ecoClim[non.na.inds,]
+ anchorMat <- matrix(anchorRast, byrow = T)[non.na.inds] # convert raster to vector
+ anchorMat[which(is.na(anchorMat))] <- 0
+ anchorMat[which(anchorMat>0)] <- 1:length(which(anchorMat>0))
+
+ # find duplicated sites.
+ anchorMat.full <- rep(NA, L)
+ anchorMat.full[non.na.inds] <- anchorMat
+ anchor.Rast <- terra::rast(matrix(anchorMat.full, dim(base.map)[1], dim(base.map)[2], byrow = T))
+ terra::crs(anchor.Rast) <- terra::crs(base.map)
+ terra::ext(anchor.Rast) <- terra::ext(base.map)
+ anchor.extract <- terra::extract(anchor.Rast, pts)
+
+ # remove NA and duplicated sites.
+ anchor.na.inds <- which(is.na(anchor.extract$lyr.1))
+ duplicated.inds <- which(duplicated(anchor.extract$lyr.1))
+ anchor.remove.inds <- unique(c(anchor.na.inds, duplicated.inds))
+ site.names <- site.names[-anchor.remove.inds]
+ site.locs <- site.locs[-anchor.remove.inds,]
+ ecoClim$anchor <- anchorMat
+
+ # save file.
+ save(list = c("ecoClim", "non.na.inds", "L", "site.names", "site.locs"),
+ file="/projectnb/dietzelab/dongchen/anchorSites/downscale/all_data_layers.Rdata")
+} else {
+ load("/projectnb/dietzelab/dongchen/anchorSites/downscale/all_data_layers.Rdata")
+}
+```
+
+# calculating weights.
+
+```{r}
+load("/projectnb/dietzelab/dongchen/anchorSites/downscale/all_data_layers.Rdata")
+# determine cluster sizes.
+library(pathviewr)
+load("/projectnb/dietzelab/dongchen/anchorSites/downscale/clusts.Rdata")
+sizes <- c()
+for (i in seq_along(tot.clust)) {
+ inds <- which(names(tot.clust[[i]]) == "tot.withinss")
+ # find elbow location.
+ df <- data.frame(x = 1:20, y = unlist(tot.clust[[i]][inds]))
+ size <- find_curve_elbow(df)
+ sizes <- c(sizes, size)
+}
+# function for smooth division.
+smooth.div <- function(tot, v) {
+ vec <- c()
+ ceil <- TRUE
+ remain <- tot
+
+ for (i in seq_along(v)) {
+ if (i == length(v)) {
+ vec <- c(vec, remain)
+ break
+ }
+ if (ceil) {
+ temp <- ceiling(v[i])
+ } else {
+ temp <- floor(v[i])
+ }
+ ceil <- !ceil
+ remain <- remain - temp
+ vec <- c(vec, temp)
+ }
+ vec
+}
+# calculate latitude threshold by GEDI density.
+lat.threshold <- max(ecoClim$latitude[which(ecoClim$gedi>0)])
+# calculating weights by FIA and GEDI density layers.
+# FIA mistakenly falls within non-forest area should be removed.
+ecoClim$fia[which(! ecoClim$LC %in% c(1:4))] <- 0
+# base weights of 1.
+weights <- rep(1, dim(ecoClim)[1])
+# FIA weights.
+weights[which(ecoClim$fia > 0)] <- 500 # FIA
+# GEDI weights.
+weights <- weights + 50 * ecoClim$gedi # GEDI density
+# # weights above GEDI boundary.
+# weights[which(ecoClim$latitude > lat.threshold)] <-
+# weights[which(ecoClim$latitude > lat.threshold)] +
+# 100*median(ecoClim$gedi[which(ecoClim$gedi>0)])
+#remove anchor sites from weights.
+weights[which(ecoClim$anchor > 0)] <- 0 # remove anchor sites.
+# conditional on LC.
+LC.inds <- vector("list", 8)
+for (i in seq_along(LC.inds)) {
+ LC.inds[[i]] <- which(ecoClim$LC == (i))
+}
+# calculate number of points for each land cover class.
+pre.selected.LC.num <- c()
+for (i in 1:8) {
+ pre.selected.LC.num <- c(pre.selected.LC.num, length(which(ecoClim$LC == i & ecoClim$anchor > 0)))
+}
+# proportion.
+proportion <- LC.inds %>% purrr::map(length) %>% unlist
+proportion <- proportion/sum(proportion)
+tot.points.num <- smooth.div(8000, (8000 - 200*8) * proportion + 200) - pre.selected.LC.num # each class has 200 base sample
+# loop over land cover classes.
+sub.sampling.num <- 6e4
+# tot.sites.num <- sites.num.remain <- 6400
+L.eco <- dim(ecoClim)[1]
+clusters <- points <- tot.clust <- vector("list", length = 8)
+for (i in seq_along(unique(ecoClim$LC))) {
+ # total pixels.
+ tot.inds <- which(ecoClim$LC == i)
+ # anchor.
+ anchor.inds <- which(ecoClim$LC == i & ecoClim$anchor > 0)
+ # north points.
+ north.tot.inds <- which(ecoClim$LC == i & ecoClim$latitude > lat.threshold)
+ # sampling.
+ if (sub.sampling.num > length(tot.inds)) {
+ north.sub.sample.size <- floor(length(tot.inds) * length(north.tot.inds)/length(tot.inds))
+ } else {
+ north.sub.sample.size <- floor(sub.sampling.num * length(north.tot.inds)/length(tot.inds))
+ }
+ north.sample.inds <- sample(x = north.tot.inds, size = north.sub.sample.size,
+ replace = F, prob = weights[north.tot.inds])
+ # south points.
+ south.tot.inds <- which(ecoClim$LC == i & ecoClim$latitude <= lat.threshold)
+ # sampling.
+ if (sub.sampling.num > length(tot.inds)) {
+ south.sub.sample.size <- floor(length(tot.inds) * length(south.tot.inds)/length(tot.inds))
+ } else {
+ south.sub.sample.size <- floor(sub.sampling.num * length(south.tot.inds)/length(tot.inds))
+ }
+ south.sample.inds <- sample(x = south.tot.inds, size = south.sub.sample.size,
+ replace = F, prob = weights[south.tot.inds])
+ # combine north and south.
+ sub.sampling.inds <- c(north.sample.inds,
+ south.sample.inds,
+ anchor.inds)
+ # k-means clustering.
+ k <- factoextra::hkmeans(ecoClim[sub.sampling.inds, c(1, 2, 5:15)],
+ sizes[i], hc.metric = "euclidean", iter.max = 50)
+ # loop over clusters.
+ point <- cluster <- c()
+ cluster.points.num <- smooth.div(tot.points.num[i], rep(tot.points.num[i]/sizes[i], sizes[i]))
+ for (j in 1:sizes[i]) {
+ # total cluster points.
+ cluster.tot.inds <- sub.sampling.inds[which(k$cluster == j)]
+ # north points.
+ cluster.north.inds <- cluster.tot.inds[which(ecoClim$latitude[cluster.tot.inds] > lat.threshold)]
+ # calculate size.
+ cluster.north.sample.size <- floor(cluster.points.num[j] * length(cluster.north.inds) / length(cluster.tot.inds))
+ # if cluster size is close to the total size it will be assigned as total size.
+ if ((cluster.points.num[j] - cluster.north.sample.size) == 1) {
+ cluster.north.sample.size <- cluster.points.num[j]
+ }
+ # sampling.
+ if (cluster.north.sample.size > 0) {
+ cluster.north.sample <- sample(x = cluster.north.inds, size = cluster.north.sample.size,
+ replace = F, prob = weights[cluster.north.inds])
+ } else {
+ cluster.north.sample <- c()
+ }
+ # south points.
+ cluster.south.inds <- cluster.tot.inds[which(ecoClim$latitude[cluster.tot.inds] <= lat.threshold)]
+ # calculate size.
+ cluster.south.sample.size <- cluster.points.num[j] - cluster.north.sample.size
+ # sampling.
+ if (cluster.south.sample.size > 0) {
+ cluster.south.sample <- sample(x = cluster.south.inds, size = cluster.south.sample.size,
+ replace = F, prob = weights[cluster.south.inds])
+ } else {
+ cluster.south.sample <- c()
+ }
+ # summarize.
+ point <- c(point, cluster.north.sample, cluster.south.sample)
+ cluster <- c(cluster, rep(j, cluster.points.num[j]))
+ }
+ # add anchor sites.
+ point <- c(point, anchor.inds)
+ cluster <- c(cluster, k$cluster[which(sub.sampling.inds %in% anchor.inds)])
+ # check GEDI and FIA.
+ plot(density(ecoClim$gedi[tot.inds[which(ecoClim$latitude[tot.inds] <= lat.threshold)]]))
+ lines(density(ecoClim$gedi[point[which(ecoClim$latitude[point] <= lat.threshold & ecoClim$anchor[point]==0)]]), col=2)
+ print(length(which(ecoClim$fia[point] > 0)))
+ # load to lists.
+ points[[i]] <- point
+ clusters[[i]] <- cluster
+ tot.clust[[i]] <- list(k_means = k, inds = sub.sampling.inds)
+ # print progress.
+ print(i)
+}
+# check GEDI and FIA.
+plot(density(ecoClim$gedi[which(ecoClim$latitude <= lat.threshold)]),
+ xlab = "Normalized GEDI Density at Log Scale",
+ ylab = "Density",
+ main = "",
+ col = 1,
+ lwd = 2,
+ ylim = c(0, 4.5))
+lines(density(ecoClim$gedi[unlist(points)[which(ecoClim$latitude[unlist(points)] <= lat.threshold & ecoClim$anchor[unlist(points)]==0)]]), col=2, lwd = 2)
+legend("topright", lty = 1, lwd = 2, col = c(1, 2), legend = c("Before sampling", "After Sampling"))
+length(which(ecoClim$fia[unlist(points)] > 0))
+# save files.
+save(list = c("points", "clusters"), file = "/projectnb/dietzelab/dongchen/anchorSites/downscale/final_cluster.Rdata")
+save(tot.clust, file = "/projectnb/dietzelab/dongchen/anchorSites/downscale/cluster_by_class.Rdata")
+```
+
+```{r}
+load("/projectnb/dietzelab/dongchen/anchorSites/downscale/final_cluster.Rdata")
+all.data.layers <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/all_data_layers.tif")
+d <- dim(all.data.layers)
+vec1 <- vec2 <- vec3 <- vec4 <- vec5 <- names <- rep(NA, d[1]*d[2])
+pre.selected.inds <- which(ecoClim$anchor > 0)
+
+names[non.na.inds[unlist(points)]] <- 0
+names[non.na.inds[pre.selected.inds]] <- seq_along(site.names)
+
+vec1[non.na.inds[unlist(points)]] <- unlist(clusters)
+# vec1[non.na.inds[pre.selected.inds]] <- 0
+vec2[non.na.inds[unlist(points)]] <- ecoClim$LC[unlist(points)]
+vec3[non.na.inds[unlist(points)]] <- 0
+vec3[non.na.inds[pre.selected.inds]] <- 1
+# only for anchor sites.
+vec4[non.na.inds[pre.selected.inds]] <- ecoClim$anchor[pre.selected.inds] #1
+vec5[non.na.inds[pre.selected.inds]] <- ecoClim$LC[pre.selected.inds]
+# convert vector to raster.
+cluster.rast <- c(terra::rast(matrix(vec1, d[1], d[2], byrow = T)),
+ terra::rast(matrix(vec2, d[1], d[2], byrow = T)),
+ terra::rast(matrix(names, d[1], d[2], byrow = T)))
+terra::ext(cluster.rast) <- terra::ext(all.data.layers)
+terra::crs(cluster.rast) <- terra::crs(all.data.layers)
+names(cluster.rast) <- c("cluster", "landcover", "site_order")
+# terra::writeRaster(cluster.rast, file="/projectnb/dietzelab/dongchen/anchorSites/downscale/final_clust.tif", overwrite=TRUE)
+# cluster.rast <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/final_clust.tif")
+clust_vec <- terra::as.points(cluster.rast)
+terra::writeVector(clust_vec, "/projectnb/dietzelab/dongchen/anchorSites/downscale/clust_points.shp", overwrite=TRUE)
+
+pts <- sf::st_read("/projectnb/dietzelab/dongchen/anchorSites/downscale/clust_points.shp")
+pts.order <- pts$site_order
+site_names <- rep("weighted_sample", dim(pts)[1])
+site_names[which(pts.order>0)] <- site.names
+pts$site_names <- site_names
+sf::st_write(pts, "/projectnb/dietzelab/dongchen/anchorSites/downscale/clust_points_by_name.shp")
+# write anchor shape file.
+anchor.rast <- c(terra::rast(matrix(vec4, d[1], d[2], byrow = T)),
+ terra::rast(matrix(vec5, d[1], d[2], byrow = T)))
+terra::ext(anchor.rast) <- terra::ext(all.data.layers)
+terra::crs(anchor.rast) <- terra::crs(all.data.layers)
+names(anchor.rast) <- c("is_anchor", "landcover")
+anchor_vec <- terra::as.points(anchor.rast)
+terra::writeVector(anchor_vec, "/projectnb/dietzelab/dongchen/anchorSites/downscale/anchor_points.shp", overwrite=TRUE)
+
+```
+
+# calculate proportions.
+
+```{r}
+max(ecoClim[which(ecoClim[,"gedi"]>0),"latitude"])
+```
+
+```{r}
+library(purrr)
+# conditional on LC.
+LC.inds <- vector("list", 8)
+for (i in seq_along(LC.inds)) {
+ LC.inds[[i]] <- which(LC == (i))
+}
+# proportion.
+proportion <- LC.inds %>% purrr::map(length) %>% unlist
+proportion <- proportion/sum(proportion)
+# sub-sampling.
+# calculate weights based on GEDI density and FIA density.
+FIA <- matrix(all.data.layers[[3]], byrow = T)
+FIA[which(FIA > 0)] <- 50
+# assign zero to FIA weights when pixel in non-forest area.
+FIA[which(! LC %in% c(1,2,3,4))] <- 0
+
+# YSD <- matrix(all.data.layers[[1]], byrow = T)
+GEDI <- matrix(all.data.layers[[4]], byrow = T) * 3
+weights <- 1 + FIA + GEDI
+non_na_inds <- which(!is.na(weights) & complete.cases(ecoClim[,1:11]))
+# add 1 or 2 weights to places above GEDI boundary 51.6.
+latlon <- terra::xyFromCell(all.data.layers[[1]], which(!is.na(all.data.layers[[1]][]))) %>%
+ data.frame %>%
+ `colnames<-`(c("lon", "lat"))
+lat <- lon <- matrix(all.data.layers[[1]], byrow = T)
+lon[which(!is.na(lon))] <- latlon[,1]
+lat[which(!is.na(lat))] <- latlon[,2]
+non.GEDI.inds <- non_na_inds[which(lat[non_na_inds] > 51.6)]
+weights[non.GEDI.inds] <- weights[non.GEDI.inds] + 2
+
+# filter out pre-selected sites.
+pre.selected.inds <- pre.selected.inds[-which(!pre.selected.inds %in% non_na_inds)]
+pre.selected.LC <- LC[pre.selected.inds]
+
+weights[pre.selected.inds] <- 0 # remove previous selected sites.
+```
+
+# new k-means clustering
+
+```{r}
+# function for smooth division.
+smooth.div <- function(tot, v) {
+ vec <- c()
+ ceil <- TRUE
+ remain <- tot
+
+ for (i in seq_along(v)) {
+ if (i == length(v)) {
+ vec <- c(vec, remain)
+ break
+ }
+ if (ceil) {
+ temp <- ceiling(v[i])
+ } else {
+ temp <- floor(v[i])
+ }
+ ceil <- !ceil
+ remain <- remain - temp
+ vec <- c(vec, temp)
+ }
+ vec
+}
+# calculate number of points for each land cover class.
+pre.selected.LC.num <- c()
+for (i in 1:8) {
+ pre.selected.LC.num <- c(pre.selected.LC.num, length(which(pre.selected.LC == i)))
+}
+sample.size <- 6e4
+cluster.size <- 20
+# tot.points.num <- 5e3
+tot.points.num <- smooth.div(8000, (8000 - 200*8) * proportion + 200) - pre.selected.LC.num # each class has 200 base sample size.
+
+# for loop over land cover classes.
+points <- clusters <- vector("list", length(LC.inds))
+tot.clust <- vector("list", length(LC.inds))
+for (i in seq_along(LC.inds)) {
+ # calculate total points number based on proportion.
+ # points.num <- ceiling(tot.points.num * proportion[i])
+ points.num <- tot.points.num[i]
+ print(points.num)
+ # sample by weights.
+ inds <- LC.inds[[i]][which(LC.inds[[i]] %in% non_na_inds)] # per cluster non NA inds.
+ if (sample.size > length(inds)) {
+ sample.inds <- inds
+ } else {
+ sample.inds <- sample(x = inds, size = sample.size, replace = F, prob = weights[inds])
+ }
+ # merge with previous sites.
+ sample.inds <- unique(c(sample.inds, pre.selected.inds[which(pre.selected.LC == i)]))
+ ecoClim.sample <- ecoClim[sample.inds,]
+ # k-means.
+ # lc.clust <- c()
+ # for (ksize in 1:20) {
+ # k <- factoextra::hkmeans(ecoClim.sample, ksize, hc.metric = "euclidean", iter.max = 50)
+ # lc.clust <- c(lc.clust, k)
+ # }
+ # tot.clust[[i]] <- lc.clust
+ k <- factoextra::hkmeans(ecoClim.sample, sizes[i], hc.metric = "euclidean", iter.max = 50)
+ cluster.points.nums <- smooth.div(points.num, rep(points.num/sizes[i], sizes[i]))
+ LC.points <- LC.clusters <- c()
+ for (j in 1:sizes[i]) {
+ cluster.inds <- sample.inds[which(k$cluster == j)]
+ # calculate number of points for this cluster.
+ # cluster.proportion <- length(cluster.inds)/sample.size
+ # cluster.points.num <- ceiling(points.num * cluster.proportion)
+ cluster.points.num <- cluster.points.nums[j]
+ # sampling based on weights.
+ if (length(which(weights[cluster.inds] > 0)) < cluster.points.num) {
+ cluster.points <- sample(x = cluster.inds, size = cluster.points.num, replace = F)
+ } else {
+ cluster.points <- sample(x = cluster.inds, size = cluster.points.num, replace = F, prob = weights[cluster.inds])
+ }
+ LC.points <- c(LC.points, cluster.points)
+ LC.clusters <- c(LC.clusters, rep(j, cluster.points.num))
+ }
+ # add previous selected sites.
+ LC.points <- c(LC.points, pre.selected.inds[which(pre.selected.LC == i)])
+ # LC.clusters <- c(LC.clusters, rep(0, length(pre.selected.inds[which(pre.selected.LC == i)])))
+ LC.clusters <- c(LC.clusters, k$cluster[which(sample.inds %in% pre.selected.inds[which(pre.selected.LC == i)])])
+
+ points[[i]] <- LC.points
+ clusters[[i]] <- LC.clusters
+ tot.clust[[i]] <- list(k_means = k, inds = sample.inds)
+ print(i)
+}
+save(list = c("points", "clusters"), file = "/projectnb/dietzelab/dongchen/anchorSites/downscale/final_cluster.Rdata")
+save(tot.clust, file = "/projectnb/dietzelab/dongchen/anchorSites/downscale/cluster_by_class.Rdata")
+# convert points to raster.
+d <- dim(all.data.layers)
+vec1 <- vec2 <- vec3 <- vec4 <- vec5 <- rep(NA, d[1]*d[2])
+vec1[unlist(points)] <- unlist(clusters)
+vec1[pre.selected.inds] <- 0
+vec2[unlist(points)] <- LC[unlist(points)]
+vec3[unlist(points)] <- 0
+vec3[pre.selected.inds] <- 1
+# only for anchor sites.
+vec4[pre.selected.inds] <- 1
+vec5[pre.selected.inds] <- LC[pre.selected.inds]
+# convert vector to raster.
+cluster.rast <- c(terra::rast(matrix(vec1, d[1], d[2], byrow = T)),
+ terra::rast(matrix(vec2, d[1], d[2], byrow = T)),
+ terra::rast(matrix(vec3, d[1], d[2], byrow = T)))
+terra::ext(cluster.rast) <- terra::ext(all.data.layers)
+terra::crs(cluster.rast) <- terra::crs(all.data.layers)
+names(cluster.rast) <- c("cluster", "landcover", "pre-selected")
+terra::writeRaster(cluster.rast, file="/projectnb/dietzelab/dongchen/anchorSites/downscale/final_clust.tif", overwrite=TRUE)
+
+cluster.rast <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/final_clust.tif")
+clust_vec <- terra::as.points(cluster.rast)
+terra::writeVector(clust_vec, "/projectnb/dietzelab/dongchen/anchorSites/downscale/clust_points.shp", overwrite=TRUE)
+
+# write anchor shape file.
+anchor.rast <- c(terra::rast(matrix(vec4, d[1], d[2], byrow = T)),
+ terra::rast(matrix(vec5, d[1], d[2], byrow = T)))
+terra::ext(anchor.rast) <- terra::ext(all.data.layers)
+terra::crs(anchor.rast) <- terra::crs(all.data.layers)
+names(anchor.rast) <- c("is_anchor", "landcover")
+anchor_vec <- terra::as.points(anchor.rast)
+terra::writeVector(anchor_vec, "/projectnb/dietzelab/dongchen/anchorSites/downscale/anchor_points.shp", overwrite=TRUE)
+
+FIA.selected <- FIA[unlist(points)]
+length(which(FIA.selected>0))
+length(which(FIA[points[[8]]]>0))
+# GEDI summary.
+gedi.mat <- log(matrix(terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/GEDI_Density/gedi_density_reproj_crop_resample.tif"), byrow = T))
+length(which(gedi.mat[unlist(points)] > 0))
+
+ind <- unlist(points)[which(! unlist(points) %in% pre.selected.inds)]
+inds <- ind[which(lat[ind] < 51.6)]
+inds1 <- non_na_inds[which(lat[non_na_inds] < 51.6)]
+
+plot(density(na.omit(gedi.mat[inds1])), col = 1, lwd = 2, ylim = c(0, 0.55))
+lines(density(na.omit(gedi.mat[inds])), col = 2, lwd = 2)
+legend("topleft", col=c(1, 2), lty=1, lwd=2, legend=c("GEDI density", "GEDI density after weighted sampling"))
+
+plot(density(na.omit(weights[non_na_inds])), col = 1, lwd = 2)
+lines(density(na.omit(weights[unlist(points)])), col = 2, lwd = 2)
+legend("topright", col=c(1, 2), lty=1, lwd=2, legend=c("Weights density", "Weights density after weighted sampling"))
+
+weights.non.weighted <- weights[inds1]
+weights.weighted <- weights[inds]
+
+plot(density(weights.non.weighted))
+lines(density(weights.weighted), col=2)
+
+plot(sort(weights[non_na_inds]))
+```
+
+# cluster visualizations.
+
+```{r}
+library(ggplot2)
+# load datasets.
+load("/projectnb/dietzelab/dongchen/anchorSites/downscale/final_cluster.Rdata")
+load("/projectnb/dietzelab/dongchen/anchorSites/downscale/cluster_by_class.Rdata")
+all.data.layers <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/all_data_layers.tif")
+lc_type <- c("Evergreen_Needleleaf_Trees",
+ "Evergreen_Broadleaf_Trees",
+ "Deciduous_Needleleaf_Trees",
+ "Deciduous_Broadleaf_Trees",
+ "Shrub",
+ "Grass",
+ "Cereal_Croplands",
+ "Broadleaf_Croplands")
+# initialize variables.
+k.plots <- k.pca <- vector("list", length = length(tot.clust))
+pca.loading <- c()
+d <- dim(all.data.layers)
+cent.locs <- vector("list", length = length(tot.clust))
+for (i in seq_along(tot.clust)) {
+ # progress.
+ print(i)
+ # grab ecoClim for the current land cover class.
+ df <- ecoClim[tot.clust[[i]]$inds, c(1,2, 5:15)]
+ # df$group <- tot.clust[[i]]$k_means$cluster
+ # pca_res <- vegan::rda(df[,-14])
+ # biplot(pca_res)
+ # k-means distance visualization.
+ # k.plots[[i]] <- factoextra::fviz_cluster(tot.clust[[i]]$k_means, df)
+ # PCA visualization.
+ pca_res <- prcomp(as.matrix(df), center = TRUE, scale. = TRUE)
+ summ <- summary(pca_res)
+ variance.explained <- summ$importance[2,]
+ pca.loading <- rbind(pca.loading, t(pca_res$rotation[,1:2]))
+ plot_data <- cbind(as.data.frame(pca_res$x[, 1:2]), cluster = as.character(tot.clust[[i]]$k_means$cluster))
+ # calculate center points.
+ cent.loc <- data.frame()
+ for (j in seq_along(unique(tot.clust[[i]]$k_means$cluster))) {
+ cent.loc <- rbind(cent.loc, list(PC1 = median(plot_data[which(plot_data$cluster == j),1]),
+ PC2 = median(plot_data[which(plot_data$cluster == j),2])))
+ }
+ cent.loc$cluster <- as.character(unique(tot.clust[[i]]$k_means$cluster))
+ cent.locs[[i]] <- cent.loc
+ k.pca[[i]] <- ggplot() +
+ geom_point(data = plot_data, mapping = aes(x = PC1, y = PC2, colour = cluster)) +
+ geom_point(data = cent.loc, mapping = aes(x = PC1, y = PC2, fill = cluster),
+ shape = 23, size = 5, colour = "black") +
+ labs(x = paste0("PC1 (", variance.explained[1]*100, "%)"),
+ y = paste0("PC2 (", variance.explained[2]*100, "%)"))
+ # split shape file by land cover classes.
+ vec <- rep(NA, d[1]*d[2])
+ vec[non.na.inds[points[[i]]]] <- clusters[[i]]
+ # convert vector to raster.
+ cluster.rast <- terra::rast(matrix(vec, d[1], d[2], byrow = T))
+ terra::ext(cluster.rast) <- terra::ext(all.data.layers)
+ terra::crs(cluster.rast) <- terra::crs(all.data.layers)
+ names(cluster.rast) <- c("cluster")
+ clust_vec <- terra::as.points(cluster.rast)
+ terra::writeVector(clust_vec, file.path("/projectnb/dietzelab/dongchen/anchorSites/downscale/cluster_vis/", paste0(lc_type[i], ".shp")), overwrite = TRUE)
+}
+colnames(pca.loading) <- names(all.data.layers)[c(1:2, 5:15)]
+rownames <- c()
+for (i in seq_along(lc_type)) {
+ rownames <- c(rownames, paste0("PC1", lc_type[i]), paste0("PC2", lc_type[i]))
+}
+row.names(pca.loading) <- rownames
+write.csv(pca.loading, file="/projectnb/dietzelab/dongchen/anchorSites/downscale/pca_loadings.csv")
+# # plot cluster sizes.
+# load("/projectnb/dietzelab/dongchen/anchorSites/downscale/clusts.Rdata")
+# for (i in seq_along(tot.clust)) {
+# inds <- which(names(tot.clust[[i]]) == "tot.withinss")
+# plot(unlist(tot.clust[[i]][inds]), xlab="cluster size", ylab="tot.within.ss")
+# abline(v=sizes[i], col=2, lwd=2)
+# }
+```
+
+# some comparisons.
+
+```{r}
+# weights.
+plot(density(weights[non_na_inds]), col=1, main = "Weights Comparison", xlab="Weights", ylab="Density")
+lines(density(weights[unlist(points)]), col=2)
+legend("topright", lty=1, col=c(1, 2), legend=c("background density", "sampled density"))
+
+# FIA density.
+vec <- matrix(all.data.layers[[3]], byrow = T)
+plot(density(vec[non_na_inds]), col=1, main = "FIA Comparison", xlab="Weights", ylab="Density")
+lines(density(vec[unlist(points)]), col=2)
+legend("topright", lty=1, col=c(1, 2), legend=c("background density", "sampled density"))
+
+# # time since disturbance.
+# vec <- matrix(all.data.layers[[1]], byrow = T)
+# plot(density(vec[non_na_inds]), col=1, main = "Time-since-disturbance Comparison", xlab="Weights", ylab="Density")
+# lines(density(vec[unlist(points)]), col=2)
+# legend("topright", lty=1, col=c(1, 2), legend=c("background density", "sampled density"))
+
+# GEDI density.
+vec <- matrix(all.data.layers[[4]], byrow = T)
+plot(density(vec[non_na_inds]), col=1, main = "GEDI Comparison", xlab="Weights", ylab="Density")
+lines(density(vec[unlist(points)]), col=2)
+legend("topright", lty=1, col=c(1, 2), legend=c("background density", "sampled density"))
+```
+
+# k-means clustering.
+
+```{r}
+# load data.
+all.rast <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/all_rast.tif")
+base.crs <- terra::crs(all.rast)
+base.ext <- terra::ext(all.rast)
+# resample to 25km.
+all.rast <- terra::aggregate(all.rast, 30, fun = mean, na.rm=TRUE)
+d <- dim(all.rast)
+# convert raster to data frame.
+df <- data.frame(matrix(all.rast[[1]], byrow = T))
+for (i in 2:5) {
+ temp <- matrix(all.rast[[i]], byrow = T)
+ df <- cbind(df, temp)
+}
+# record NA index for later recovering process.
+non_na_inds <- which(!is.na(df[,1]))
+# remove NAs.
+df <- df[complete.cases(df),]
+colnames(df) <- c("stand_time", "AGB", "FIA", "GEDI", "TWI")
+# # calculate total within sum square to determine best cluster size.
+# totss <- c()
+# for (k in 1:20) {
+# temp <- factoextra::hkmeans(df, k, hc.metric = "euclidean", iter.max = 50)
+# totss <- c(totss, temp$tot.withinss)
+# print(k)
+# }
+# plot(totss, type="l")
+# points(1:20, totss, pch=18)
+# save(totss, file = "/projectnb/dietzelab/dongchen/anchorSites/downscale/totss.Rdata")
+# k-means clustering with proper size.
+k <- factoextra::hkmeans(df, 6, hc.metric = "euclidean", iter.max = 50)
+p <- factoextra::fviz_cluster(k, df); p # plotting.
+# recover to raster.
+real.vec <- rep(NA, d[1] * d[2])
+real.vec[non_na_inds] <- k$cluster
+clust.rast <- terra::rast(matrix(real.vec, d[1], d[2], byrow = T))
+terra::ext(clust.rast) <- base.ext
+terra::crs(clust.rast) <- base.crs
+terra::writeRaster(clust.rast, file="/projectnb/dietzelab/dongchen/anchorSites/downscale/cluster.tif"); terra::plot(clust.rast)
+```
+
+# select points (excluding anchor sites).
+
+```{r}
+library(purrr)
+# load cluster raster file.
+clust.rast <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/cluster.tif")
+d <- dim(clust.rast)
+# load pecan settings for previous anchor sites.
+settings <- PEcAn.settings::read.settings("/projectnb/dietzelab/dongchen/anchorSites/SDA/pecan.xml")
+# convert settings to lat/lon.
+site.ids <- settings$run %>%
+ purrr::map('site') %>%
+ purrr::map('id') %>%
+ base::unlist() %>%
+ base::as.character()
+site.locs <- settings$run %>%
+ map("site") %>%
+ map_dfr(~c(.x[['lon']],.x[['lat']]) %>% as.numeric) %>%
+ t %>%
+ `colnames<-`(c("Lon","Lat")) %>%
+ `rownames<-`(site.ids)
+# generate anchor points vector.
+pts <- data.frame(lat = site.locs[,2], lon = site.locs[,1])
+sp::coordinates(pts) <- ~lon+lat
+sp::proj4string(pts) <- terra::crs("+init=epsg:4326")
+pts <- terra::vect(pts)
+# convert raster to vector for convenience.
+anchorRast <- terra::rasterize(pts, clust.rast, fun="length") # rasterize anchor sites points.
+anchorMat <- matrix(anchorRast, byrow = T) # convert raster to vector.
+clustMat <- matrix(clust.rast, byrow = T) # convert cluster raster to vector.
+# grab index for different clusters.
+# initialize lists of index for different clusters and selected points at different clusters.
+clusterInds <- inds <- vector("list", length = length(unique(clustMat))-1)
+for (i in seq_along(clusterInds)) {
+ clusterInds[[i]] <- which(clustMat == i)
+}
+# grab the rest points.
+# define total points number for each cluster.
+numPerClust <- 1000
+for (i in seq_along(clusterInds)) {
+ # grab index for non-anchor points.
+ non.anchor.inds <- clusterInds[[i]][which(is.na(anchorMat[clusterInds[[i]]]))]
+ # calculate the number for rest points selection.
+ num.sites <- numPerClust - (length(clusterInds[[i]]) - length(non.anchor.inds))
+ # sample from non-anchor points.
+ inds[[i]] <- sample(non.anchor.inds, num.sites)
+}
+# combine index for selected points.
+totInds <- sort(do.call("c", inds))
+# fill in the full-length vector.
+totIndsVec <- rep(NA, length(anchorMat))
+# tag selected points as 1.
+totIndsVec[totInds] <- 1
+# convert vector to raster.
+totIndsRast <- terra::rast(matrix(totIndsVec, d[1], d[2], byrow = T))
+terra::ext(totIndsRast) <- terra::ext(clust.rast)
+terra::crs(totIndsRast) <- terra::crs(clust.rast)
+terra::plot(totIndsRast) # verify.
+# grab coordinates of selected points.
+tot.lat.lon <- terra::xyFromCell(totIndsRast, which(totIndsRast[] == 1))
+```
\ No newline at end of file
diff --git a/modules/assim.sequential/inst/anchor/NA_downscale_script.R b/modules/assim.sequential/inst/anchor/NA_downscale_script.R
new file mode 100644
index 00000000000..a066bebbedf
--- /dev/null
+++ b/modules/assim.sequential/inst/anchor/NA_downscale_script.R
@@ -0,0 +1,280 @@
+library(purrr)
+library(foreach)
+library(PEcAnAssimSequential)
+setwd("/projectnb/dietzelab/dongchen/anchorSites/NA_runs/")
+# prepare stand age time-series.
+modis.lc.folder <- "/projectnb/dietzelab/dongchen/anchorSites/NA_runs/MODIS_LC/LC"
+stand.age.out.folder <- "/projectnb/dietzelab/dongchen/anchorSites/NA_runs/MODIS_LC/stand_age"
+# filter land cover time-series.
+# loop over years.
+# read tiff file.
+forest_type <- c(1:4)
+grass_type <- c(5:8)
+non_veg_type <- c(0, 9, 10, 11)
+base.map <- terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/base_map.tiff")
+base_crs <- terra::crs(base.map)
+base_ext <- terra::ext(base.map)
+# load forest age data.
+forest_age <- matrix(terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/forest_age/forest_age_2010_TC000_crop.tiff"), byrow = T)
+# calculate mean age for different LC types.
+LC <- matrix(terra::rast(file.path(modis.lc.folder, paste0(2010, ".tif")))[[5]], byrow = T)
+mean_age <- c()
+for (i in 1:8) {
+ mean_age <- c(mean_age, mean(forest_age[which(LC == i)], na.rm = T))
+}
+# function for filtering time series.
+filter.lc.ts <- function(vec, window.L = 3) {
+ L <- length(vec)
+ window <- c()
+ edge.case <- FALSE
+ #
+ if(length(unique(vec)) == 1) {
+ return(c(unique(vec), unique(vec), length(vec)))
+ }
+ for (i in L:1) {
+ # push item into the window.
+ window <- c(window, vec[i])
+ # print(window)
+ # if window has not reached its size.
+ if (length(window) < window.L) {
+ next
+ }
+ # window operation.
+ uni.val <- unique(window)
+ # if there is no change.
+ if (length(uni.val) == 1) {
+
+ } else if (length(uni.val) > 1) {
+ # check if head == tail.
+ if (head(window, 1) == tail(window, 1)) {
+
+ } else {
+ window.ind <- window.L - tail(which(window == head(uni.val, 1)), 1) + 1
+ return(c(tail(rev(uni.val), 2), L - i + 1 - window.ind + 1))
+ }
+ }
+ # remove the last item from the window.
+ window <- tail(window, -1)
+ }
+ # if there is no disturbance afterall.
+ return(c(vec[length(vec)], vec[length(vec)], length(vec)))
+}
+
+# store MODIS land cover time-series into matrix.
+ts_lc <- c()
+for (end.year in 2012:2023) {
+ print(end.year)
+ if (end.year == 2012) {
+ start.year <- 2001
+ } else {
+ start.year <- end.year
+ }
+ # load last year MODIS LC map.
+ LC <- matrix(terra::rast(file.path(modis.lc.folder, paste0(end.year, ".tif")))[[5]], byrow = T)
+ # store MODIS land cover time-series into matrix.
+ # ts_lc <- c()
+ for (y in start.year:end.year) {
+ # load image.
+ lc_tif <- terra::rast(file.path(modis.lc.folder, paste0(y, ".tif")))
+ lc_matrix <- matrix(lc_tif[[5]], byrow = T)
+ # reclassify.
+ lc_matrix[which(lc_matrix %in% forest_type)] <- 1
+ lc_matrix[which(lc_matrix %in% grass_type)] <- 2
+ lc_matrix[which(lc_matrix %in% non_veg_type)] <- 3
+ # combine image.
+ ts_lc <- cbind(ts_lc, lc_matrix)
+ # print(y)
+ }
+ # loop over NA.
+ split_data.matrix <- function(matrix, chunk.size=100) {
+ ncols <- dim(matrix)[2]
+ nchunks <- (ncols-1) %/% chunk.size + 1
+ split.data <- list()
+ min <- 1
+ for (i in seq_len(nchunks)) {
+ if (i == nchunks-1) { #make last two chunks of equal size
+ left <- ncols-(i-1)*chunk.size
+ max <- min+round(left/2)-1
+ } else {
+ max <- min(i*chunk.size, ncols)
+ }
+ split.data[[i]] <- t(matrix[,min:max,drop=FALSE])
+ min <- max+1 #for next chunk
+ }
+ return(split.data)
+ }
+ mat.lists <- split_data.matrix(t(ts_lc), floor(dim(ts_lc)[1]/parallel::detectCores()))
+ # register parallel nodes.
+ cl <- parallel::makeCluster(parallel::detectCores())
+ doSNOW::registerDoSNOW(cl)
+ res <- foreach::foreach(d = mat.lists, .packages=c("purrr")) %dopar% {
+ temp.res <- matrix(NA, dim(d)[1], 4) %>% `colnames<-`(c("from", "to", "years", "type"))
+ pb <- utils::txtProgressBar(min=1, max=dim(d)[1], style=3)
+ for (i in 1:dim(d)[1]) {
+ if (any(is.na(d[i,]))) next
+ temp.res[i, 1:3] <- filter.lc.ts(d[i,])
+ # grab change patterns.
+ if (all(temp.res[i, 1:2] == c(1, 2))) {
+ temp.res[i, 4] <- 1
+ } else if (all(temp.res[i, 1:2] == c(1, 3))) {
+ temp.res[i, 4] <- 2
+ } else if (all(temp.res[i, 1:2] == c(2, 3))) {
+ temp.res[i, 4] <- 3
+ } else if (all(temp.res[i, 1:2] == c(2, 1))) {
+ temp.res[i, 4] <- 4
+ } else if (all(temp.res[i, 1:2] == c(3, 1))) {
+ temp.res[i, 4] <- 5
+ } else if (all(temp.res[i, 1:2] == c(3, 2))) {
+ temp.res[i, 4] <- 6
+ }
+ utils::setTxtProgressBar(pb, i)
+ }
+ return(temp.res)
+ }
+ res <- do.call(rbind, res)
+ # any pixel in forest that are tagged as grassland should be replaced with the
+ # load forest age data.
+ forest_age <- matrix(terra::rast("/projectnb/dietzelab/dongchen/anchorSites/downscale/forest_age/forest_age_2010_TC000_crop.tiff"), byrow = T)
+ forest_age <- cbind(forest_age, res, LC) %>% `colnames<-`(c("forest_age", "from", "to", "years", "type", "LC"))
+ forest_age <- split_data.matrix(t(forest_age), floor(dim(forest_age)[1]/parallel::detectCores()))
+ forest_age <- foreach::foreach(d = forest_age, .packages=c("purrr")) %dopar% {
+ for (i in 1:dim(d)[1]) {
+ # if it's diturbed vegetation.
+ if (is.na(d[i, "years"])) next
+ if (d[i, "years"] < (end.year - 2000)) {
+ d[i, "forest_age"] <- d[i, "years"]
+ next
+ }
+ # no record for the forest age.
+ if (is.na(d[i, "forest_age"])) {
+ # if it is non vegetation.
+ if (d[i, "to"] == 3) {
+ # forest_age[i] <- 0
+ next
+ } else {
+ # if it's non-disturbed vegetation.
+ d[i, "forest_age"] <- mean_age[d[i, "LC"]]
+ }
+ }
+ }
+ return(d)
+ }
+ forest_age <- do.call(rbind, forest_age)
+ # stop parallel.
+ parallel::stopCluster(cl)
+ foreach::registerDoSEQ()
+ # write to raster.
+ forest_age <- terra::rast(matrix(forest_age[,"forest_age"], 9360, 19080, byrow = T))
+ terra::ext(forest_age) <- base_ext
+ terra::crs(forest_age) <- base_crs
+ names(forest_age) <- "year_since_disturb"
+ terra::writeRaster(forest_age, file=file.path(stand.age.out.folder, paste0(end.year, "_stand_age.tif")))
+ gc()
+}
+# average ERA5 to climatic covariates.
+outdir <- "/projectnb/dietzelab/dongchen/anchorSites/NA_runs/GridMET"
+in.path <- "/projectnb/dietzelab/dongchen/anchorSites/ERA5/"
+start.dates <- c("2012-01-01", "2012-07-16", "2013-07-16",
+ "2014-07-16", "2015-07-16", "2016-07-16",
+ "2017-07-16", "2018-07-16", "2019-07-16",
+ "2020-07-16", "2021-07-16", "2022-07-16",
+ "2023-07-16")
+end.dates <- c("2012-07-15", "2013-07-15", "2014-07-15",
+ "2015-07-15", "2016-07-15", "2017-07-15",
+ "2018-07-15", "2019-07-15", "2020-07-15",
+ "2021-07-15", "2022-07-15", "2023-07-15",
+ "2024-07-15")
+# parallel average ERA5 into covariates.
+future::plan(future::multisession, workers = 5, gc = T)
+paths <- start.dates %>% furrr::future_map2(end.dates, function(d1, d2){
+ PEcAn.data.atmosphere::Average_ERA5_2_GeoTIFF(d1, d2, in.path, outdir)
+}, .progress = T) %>% unlist
+# create covariates time series.
+for (y in 2012:2024) {
+ print(y)
+ if (y == 2024) {
+ y.lc <- 2023
+ } else {
+ y.lc <- y
+ }
+ # LC <- file.path("/projectnb/dietzelab/dongchen/anchorSites/NA_runs/MODIS_LC/LC", paste0(y.lc, ".tif"))
+ LC <- "/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_NLCD_LC.tif"
+ stand.age <- file.path("/projectnb/dietzelab/dongchen/anchorSites/NA_runs/MODIS_LC/stand_age", paste0(y.lc, "_stand_age.tif"))
+ met <- list.files("/projectnb/dietzelab/dongchen/anchorSites/NA_runs/GridMET", full.names = T)
+ met <- met[which(grepl(y, met))]
+ # setup covariates paths and variable names.
+ cov.tif.file.list <- list(LC = list(dir = LC,
+ var.name = "LC"),
+ year_since_disturb = list(dir = stand.age,
+ var.name = "year_since_disturb"),
+ agb = list(dir = "/projectnb/dietzelab/dongchen/anchorSites/downscale/AGB/agb.tif",
+ var.name = "agb"),
+ twi = list(dir = "/projectnb/dietzelab/dongchen/anchorSites/downscale/TWI/TWI_resample.tiff",
+ var.name = "twi"),
+ met = list(dir = met,
+ var.name = c("temp", "prec", "srad", "vapr")),
+ soil = list(dir = "/projectnb/dietzelab/dongchen/anchorSites/downscale/SoilGrids.tif",
+ var.name = c("PH", "N", "SOC", "Sand")))
+ covariates.dir <- stack_covariates_2_geotiff(outdir = "/projectnb/dietzelab/dongchen/anchorSites/NA_runs/SDA_50ens_2025_4_4/covariates",
+ year = y,
+ base.map.dir = "/projectnb/dietzelab/dongchen/anchorSites/downscale/base_map.tiff",
+ cov.tif.file.list = cov.tif.file.list,
+ normalize = T,
+ cores = parallel::detectCores())
+}
+
+# setup parallel downscaling.
+method <- "randomForest" #xgboost; randomForest;
+base.map.dir <- "/projectnb/dietzelab/dongchen/anchorSites/downscale/MODIS_NLCD_LC.tif"
+load("/projectnb/dietzelab/dongchen/anchorSites/NA_runs/SDA_8k_site/sda.all.forecast.analysis.Rdata")
+variables <- c("AbvGrndWood", "LAI", "SoilMoistFrac", "TotSoilCarb")
+settings <- "/projectnb/dietzelab/dongchen/anchorSites/NA_runs/SDA_8k_site/shapefile/pts.shp"
+outdir <- "/projectnb/dietzelab/dongchen/anchorSites/NA_runs/SDA_8k_site/"
+cores <- 28
+date <- seq(as.Date("2012-07-15"), as.Date("2024-07-15"), "1 year")
+# loop over years.
+for (i in seq_along(date)) {
+ print(i)
+ # Assemble covariates.
+ covariates.dir <- file.path("/projectnb/dietzelab/dongchen/anchorSites/NA_runs/covariates_lc_ts/covariates_with_LAI", paste0("covariates_", lubridate::year(date[i]), ".tiff"))
+ # grab analysis.
+ analysis.yr <- analysis.all[[i]]
+ time <- date[i]
+ # loop over carbon types.
+ for (j in seq_along(variables)) {
+ # setup folder.
+ variable <- variables[j]
+ folder.path <- file.path(file.path(outdir, "downscale_maps_analysis_lc_ts_noGEDI_rf"), paste0(variables[j], "_", date[i]))
+ dir.create(folder.path)
+ saveRDS(list(settings = settings,
+ analysis.yr = analysis.yr,
+ covariates.dir = covariates.dir,
+ time = time,
+ variable = variable,
+ folder.path = folder.path,
+ base.map.dir = base.map.dir,
+ method = method,
+ cores = cores,
+ outdir = file.path(outdir, "downscale_maps_analysis_lc_ts_noGEDI_rf")),
+ file = file.path(folder.path, "dat.rds"))
+ # prepare for qsub.
+ jobsh <- c("#!/bin/bash -l",
+ "module load R/4.1.2",
+ "echo \"require (PEcAnAssimSequential)",
+ " require (foreach)",
+ " require (purrr)",
+ " downscale_qsub_main('@FOLDER_PATH@')",
+ " \" | R --no-save")
+ jobsh <- gsub("@FOLDER_PATH@", folder.path, jobsh)
+ writeLines(jobsh, con = file.path(folder.path, "job.sh"))
+ # qsub command.
+ qsub <- "qsub -l h_rt=24:00:00 -l mem_per_core=4G -l buyin -pe omp @CORES@ -V -N @NAME@ -o @STDOUT@ -e @STDERR@ -S /bin/bash"
+ qsub <- gsub("@CORES@", cores, qsub)
+ qsub <- gsub("@NAME@", paste0("ds_", i, "_", j), qsub)
+ qsub <- gsub("@STDOUT@", file.path(folder.path, "stdout.log"), qsub)
+ qsub <- gsub("@STDERR@", file.path(folder.path, "stderr.log"), qsub)
+ qsub <- strsplit(qsub, " (?=([^\"']*\"[^\"']*\")*[^\"']*$)", perl = TRUE)
+ cmd <- qsub[[1]]
+ out <- system2(cmd, file.path(folder.path, "job.sh"), stdout = TRUE, stderr = TRUE)
+ }
+}
diff --git a/modules/assim.sequential/inst/anchor/SDA_NA_runner.R b/modules/assim.sequential/inst/anchor/SDA_NA_runner.R
new file mode 100644
index 00000000000..e4d1cd959b6
--- /dev/null
+++ b/modules/assim.sequential/inst/anchor/SDA_NA_runner.R
@@ -0,0 +1,149 @@
+# loading libraries.
+library(dplyr)
+library(xts)
+library(PEcAn.all)
+library(purrr)
+library(furrr)
+library(lubridate)
+library(nimble)
+library(ncdf4)
+library(PEcAnAssimSequential)
+library(dplyr)
+library(sp)
+library(raster)
+library(zoo)
+library(ggplot2)
+library(mnormt)
+library(sjmisc)
+library(stringr)
+library(doParallel)
+library(doSNOW)
+library(Kendall)
+library(lgarch)
+library(parallel)
+library(foreach)
+library(terra)
+setwd("/projectnb/dietzelab/dongchen/anchorSites/NA_runs/SDA_8k_site/")
+
+# read settings xml file.
+settings_dir <- "/projectnb/dietzelab/dongchen/anchorSites/NA_runs/SDA_8k_site/pecan.xml"
+settings <- PEcAn.settings::read.settings(settings_dir)
+
+# update settings with the actual PFTs.
+settings <- PEcAn.settings::prepare.settings(settings)
+
+# setup the batch job settings.
+general.job <- list(cores = 28, folder.num = 35)
+batch.settings = structure(list(
+ general.job = general.job,
+ qsub.cmd = "qsub -l h_rt=24:00:00 -l mem_per_core=4G -l buyin -pe omp @CORES@ -V -N @NAME@ -o @STDOUT@ -e @STDERR@ -S /bin/bash"
+))
+settings$state.data.assimilation$batch.settings <- batch.settings
+
+# alter the ensemble size.
+settings$ensemble$size <- 100
+
+# load observations.
+load("/projectnb/dietzelab/dongchen/anchorSites/NA_runs/SDA_8k_site/observation/Rdata/obs.mean.Rdata")
+load("/projectnb/dietzelab/dongchen/anchorSites/NA_runs/SDA_8k_site/observation/Rdata/obs.cov.Rdata")
+
+# replace zero observations and variances with small numbers.
+for (i in 1:length(obs.mean)) {
+ if(is.null(obs.mean[[i]][[1]])){
+ next
+ }
+ for (j in 1:length(obs.mean[[i]])) {
+ if (length(obs.mean[[i]][[j]])==0) {
+ next
+ }
+ obs.mean[[i]][[j]][which(obs.mean[[i]][[j]]==0)] <- 0.01
+ if(length(obs.cov[[i]][[j]]) > 1){
+ diag(obs.cov[[i]][[j]])[which(diag(obs.cov[[i]][[j]]<=0.1))] <- 0.1
+ }else{
+ if(obs.cov[[i]][[j]] <= 0.1){
+ obs.cov[[i]][[j]] <- 0.1
+ }
+ }
+ }
+}
+
+# load PFT parameter file.
+load(file.path(settings$outdir, "samples.Rdata"))
+
+# execute the SDA.
+PEcAnAssimSequential::qsub_sda(settings = settings,
+ obs.mean = obs.mean,
+ obs.cov = obs.cov,
+ Q = NULL,
+ pre_enkf_params = NULL,
+ ensemble.samples = ensemble.samples,
+ outdir = NULL,
+ control = list(TimeseriesPlot = FALSE,
+ OutlierDetection=FALSE,
+ send_email = NULL,
+ keepNC = FALSE,
+ forceRun = TRUE,
+ MCMC.args = NULL,
+ merge_nc = TRUE),
+ block.index = NULL,
+ debias = list(cov.dir = "/projectnb/dietzelab/dongchen/anchorSites/NA_runs/covariates_lc_ts/covariates_nolatlon/",
+ start.year = 2014), prefix = "batch_Oct_26")
+ # debias = list(cov.dir = NULL, start.year = NULL))
+
+# export sda output.
+PEcAnAssimSequential::sda_assemble("/projectnb/dietzelab/dongchen/anchorSites/NA_runs/SDA_8k_site/batch_Oct_25/",
+ "/projectnb/dietzelab/dongchen/anchorSites/NA_runs/SDA_8k_site")
+
+# merge all netcdf files into single files across time steps.
+batch.folder <- file.path(settings$outdir, "batch_Oct_25")
+job.folders <- paste0("Job_", 1:as.numeric(settings$state.data.assimilation$batch.settings$general.job$folder.num))
+time.points <- seq(as.Date(names(obs.mean)[1]),
+ as.Date(names(obs.mean)[length(obs.mean)]),
+ paste(1, settings$state.data.assimilation$forecast.time.step)) %>% lubridate::year()
+nc.outdir <- "/projectnb/dietzelab/dongchen/anchorSites/NA_runs/SDA_8k_site/merged_nc"
+
+# check which folder has not yet merged the nc files.
+nc.files <- file.path(batch.folder, job.folders, "merged_nc", paste0(2024, ".nc"))
+inds <- which(!file.exists(nc.files))
+for (i in seq_along(inds)) {
+ print(which(inds[i]==inds)/length(inds))
+ configs <- readRDS(file.path(batch.folder, job.folders[inds[i]], "configs.rds"))
+ nc.folder <- file.path(batch.folder, job.folders[inds[i]], "merged_nc")
+ if (file.exists(nc.folder)) unlink(nc.folder, recursive = T)
+ dir.create(nc.folder)
+ temp <- PEcAn.utils::nc_merge_all_sites_by_year(model.outdir = file.path(batch.folder, job.folders[inds[i]], "out"),
+ nc.outdir = nc.folder,
+ ens.num = settings$ensemble$size,
+ site.ids = as.numeric(configs$site.ids),
+ start.date = names(obs.mean)[1],
+ end.date = names(obs.mean)[length(obs.mean)],
+ time.step = paste(1, settings$state.data.assimilation$forecast.time.step),
+ cores = parallel::detectCores() - 1)
+
+ # remove rundir and outdir.
+ unlink(file.path(batch.folder, job.folders[inds[i]], "run"), recursive = T)
+ unlink(file.path(batch.folder, job.folders[inds[i]], "out"), recursive = T)
+}
+
+# function for merging nc files.
+merge_multi_nc_files <- function (nc.files, nc.outdir, cores) {
+ # detect if we are merging the same file.
+ fname <- unique(basename(nc.files))
+ if (length(fname) != 1) {
+ PEcAn.logger::logger.info("Files are not in the same name. Please check it!")
+ return(0)
+ }
+ nc.out <- file.path(nc.outdir, fname)
+ # merge across sites using CDO command.
+ cmd <- "cdo -P @CORES@ collgrid @NC_FILES@ @NC.OUTDIR@"
+ cmd <- gsub("@CORES@", cores, cmd)
+ cmd <- gsub("@NC_FILES@", paste(nc.files, collapse = " "), cmd)
+ cmd <- gsub("@NC.OUTDIR@", nc.out, cmd)
+ out <- system(cmd, intern = TRUE, ignore.stdout = TRUE, ignore.stderr = TRUE)
+}
+# loop over time steps.
+for (t in time.points) {
+ nc.files <- file.path(batch.folder, job.folders, "merged_nc", paste0(t, ".nc"))
+ # print(length(which(file.exists(nc.files))))
+ merge_multi_nc_files(nc.files, nc.outdir, parallel::detectCores() - 1)
+}
\ No newline at end of file
diff --git a/modules/assim.sequential/inst/debias_step.R b/modules/assim.sequential/inst/debias_step.R
new file mode 100644
index 00000000000..1e5c3faedc6
--- /dev/null
+++ b/modules/assim.sequential/inst/debias_step.R
@@ -0,0 +1,697 @@
+#' Debias preprocessing utilities (internal)
+#'
+#' A small, **pure** helper module that prepares inputs for the residual
+#' debiasing step used by `sda.enkf.multisite()`. These functions **do not**
+#' read from or mutate `settings`; they operate only on the objects passed in.
+#'
+#' The module provides:
+#' 1. A stable name map between observation variable names and state names.
+#' 2. Site filtering helpers (toggle-able):
+#' - drop sites with **incomplete** covariates for the current year;
+#' - drop sites that become **inconsistent** in their observed variables over time
+#' (e.g., a site reported AGB in 2012 but is missing AGB in 2013).
+#' 3. Covariate extraction for a specific date/year, aligned to the **column order**
+#' of the state matrix `X` (one row per column of `X`).
+#' 4. Observation-vector builder aligned to `X`’s columns for a given time index.
+#' 5. Diagnostics utilities (per-column pre/post/obs comparison and per-variable RMSE).
+#' 6. A one-step debias application that:
+#' - learns residuals at t from data up to t–1,
+#' - mean-shifts the ensemble at t,
+#' - returns metrics and learner weights for logging.
+#'
+#' @section Conventions:
+#' - **Columns of `X`** correspond to site–variable pairs in the vectors `site_index`
+#' and `col_vars`. All alignment is performed using these two vectors.
+#' - **Time indexing `t`** follows the SDA driver (`t` is the current step, `t-1` is
+#' the most-recent completed step with observations to train on).
+#' - **Covariates** are provided in a *long* data frame with columns `site`, `year`,
+#' and one column per covariate layer.
+#'
+#' @keywords internal
+#' @name debias_helpers
+#' @noRd
+NULL
+
+# ------------------------------------------------------------------------------
+# (1) Name mapping
+# ------------------------------------------------------------------------------
+
+#' Map observation names -> state names
+#'
+#' This mapping is applied whenever observations are merged into the state layout.
+#' If your upstream naming changes, **edit here** to keep the rest of the code stable.
+#'
+#' @rdname debias_helpers
+#' @keywords internal
+debias_name_map <- c(
+ AGB = "AbvGrndWood",
+ LAI = "LAI",
+ SMP = "SoilMoistFrac",
+ SoilC = "TotSoilCarb"
+)
+
+# ------------------------------------------------------------------------------
+# (2) Site filtering utilities
+# ------------------------------------------------------------------------------
+
+#' Build tidy rows for learner weights (internal)
+#'
+#' Returns a data.frame with columns: time, var, learner, weight.
+#' If `w_named` has no names, they are auto-labeled as learner_1..k.
+#'
+#' @keywords internal
+debias_weights_rows <- function(time_label, var, w_named) {
+ if (is.null(names(w_named)) || any(!nzchar(names(w_named)))) {
+ names(w_named) <- paste0("learner_", seq_along(w_named))
+ }
+ data.frame(
+ time = rep(as.character(time_label), length(w_named)),
+ var = rep(as.character(var), length(w_named)),
+ learner = names(w_named),
+ weight = as.numeric(w_named),
+ stringsAsFactors = FALSE
+ )
+}
+
+#' Sites with complete covariates in a given year
+#'
+#' Returns the subset of `candidate_sites` whose covariate row at `year` has **no NA**
+#' in any covariate feature column.
+#'
+#' @param covariates_df A long data frame with columns `site`, `year`, and one column
+#' per covariate feature (e.g., climate, soils, topography layers).
+#' @param year Integer year to check (extracted via `lubridate::year()` elsewhere).
+#' @param candidate_sites Character vector of site ids to intersect with.
+#'
+#' @return Character vector of sites that have no missing covariates in `year`.
+#'
+#' @examples
+#' \dontrun{
+#' ok <- debias_sites_with_complete_covariates_year(cov_df, 2012, sites)
+#' }
+#'
+#' @rdname debias_helpers
+#' @keywords internal
+debias_sites_with_complete_covariates_year <- function(covariates_df, year, candidate_sites) {
+ df_year <- covariates_df[
+ covariates_df$year == as.integer(year) & covariates_df$site %in% candidate_sites,
+ , drop = FALSE
+ ]
+ if (nrow(df_year) == 0) return(character(0))
+
+ cov_cols <- setdiff(names(df_year), c("site", "year"))
+ if (length(cov_cols) == 0) return(character(0))
+
+ ok_mask <- rowSums(is.na(df_year[, cov_cols, drop = FALSE])) == 0L
+ df_year$site[ok_mask]
+}
+
+#' Sites that become inconsistent in observed variables over time
+#'
+#' A site is flagged **inconsistent at `t_idx`** if it is missing *any* variable at time
+#' `t_idx` that it had reported in **any earlier time** (1..`t_idx-1`). This prevents
+#' training/evaluation on sites that drop variables mid-series.
+#'
+#' @param obs.mean A list indexed by time (`[[t]]`), each entry a named list by `site`
+#' containing named numeric vectors of observed variables for that site/time.
+#' Names are observation names and will be remapped via `name_map`.
+#' @param t_idx Integer time index to assess (1-based, consistent with SDA loop).
+#' @param name_map Optional named character vector mapping observation names to state names.
+#'
+#' @return Character vector of **site ids** that are inconsistent at `t_idx`.
+#'
+#' @details
+#' This function only inspects **presence/absence** of variables, not their values.
+#'
+#' @rdname debias_helpers
+#' @keywords internal
+debias_sites_inconsistent_obs <- function(obs.mean, t_idx, name_map = debias_name_map) {
+ if (t_idx <= 1L) return(character(0))
+
+ observed_vars_at <- function(tt, site_id) {
+ om <- obs.mean[[tt]][[as.character(site_id)]]
+ if (is.null(om)) return(character(0))
+ vn <- names(om)
+ if (!is.null(name_map)) {
+ keep <- vn %in% names(name_map)
+ if (any(keep)) vn[keep] <- unname(name_map[vn[keep]])
+ }
+ vn
+ }
+
+ all_sites <- unique(unlist(lapply(obs.mean[seq_len(t_idx)], function(om_t) names(om_t))), use.names = FALSE)
+ all_sites <- as.character(all_sites)
+
+ inconsistent <- character(0)
+ for (s in all_sites) {
+ prev_union <- unique(unlist(lapply(seq_len(t_idx - 1L), observed_vars_at, site_id = s), use.names = FALSE))
+ if (length(prev_union) == 0) next
+ cur_vars <- observed_vars_at(t_idx, s)
+ if (length(setdiff(prev_union, cur_vars)) > 0) {
+ inconsistent <- c(inconsistent, s)
+ }
+ }
+ unique(inconsistent)
+}
+
+# ------------------------------------------------------------------------------
+# (3) Covariate accessors aligned to time and X’s layout
+# ------------------------------------------------------------------------------
+
+#' Covariates for a date, with optional site filtering
+#'
+#' Filters sites for the year of `obs_date` according to:
+#' - `drop_incomplete_covariates`: remove sites with any NA in covariate features.
+#' - `enforce_consistent_obs`: remove sites that became inconsistent up to `t_idx`.
+#'
+#' Returns a data frame for that **year × eligible sites**, sorted by `site`, with
+#' attributes listing which sites were dropped (useful for logging).
+#'
+#' @param covariates_df Long data frame with columns `site`, `year`, and feature columns.
+#' @param obs_date Date–time corresponding to the **previous or current** SDA step.
+#' Only the **year** is used.
+#' @param site_index Character or numeric vector giving the **site id per column of X**.
+#' @param obs.mean Observation structure (see `debias_sites_inconsistent_obs()`).
+#' @param t_idx Integer time index used to evaluate consistency up to t or t–1.
+#' @param drop_incomplete_covariates Logical. If `TRUE`, drop sites with any missing
+#' covariate this year; if `FALSE`, keep all sites present in `covariates_df` for that year.
+#' @param enforce_consistent_obs Logical. If `TRUE`, drop sites that became inconsistent
+#' in observations up to `t_idx`. Requires `obs.mean` and `t_idx`.
+#'
+#' @return A data frame with columns `site`, `year`, and covariate features for **eligible sites**.
+#' Attributes:
+#' - `dropped_missing_covariates`: sites removed for missing covariates,
+#' - `dropped_inconsistent_obs`: sites removed for observation inconsistency (if enforced).
+#'
+#' @title Covariates for a date, with optional site filtering
+#' @name debias_get_covariates_for_date
+#' @rdname debias_get_covariates_for_date
+#' @keywords internal
+debias_get_covariates_for_date <- function(covariates_df,
+ obs_date,
+ site_index,
+ obs.mean,
+ t_idx,
+ drop_incomplete_covariates = TRUE,
+ enforce_consistent_obs = TRUE) {
+ if (is.null(covariates_df)) {
+ stop("covariates_df is NULL. Provide columns: site, year, .")
+ }
+ yr <- lubridate::year(obs_date)
+ sites_used <- unique(as.character(site_index))
+
+ # (1) Filter by complete covariates (optional)
+ if (isTRUE(drop_incomplete_covariates)) {
+ complete_sites <- debias_sites_with_complete_covariates_year(covariates_df, yr, sites_used)
+ } else {
+ complete_sites <- intersect(
+ sites_used,
+ as.character(covariates_df$site[covariates_df$year == as.integer(yr)])
+ )
+ }
+
+ # (2) Optionally enforce observation consistency up to t_idx
+ if (isTRUE(enforce_consistent_obs)) {
+ if (is.null(obs.mean) || is.null(t_idx)) {
+ stop("obs.mean and t_idx must be provided when enforce_consistent_obs = TRUE.")
+ }
+ inconsistent_sites <- debias_sites_inconsistent_obs(obs.mean, t_idx, name_map = debias_name_map)
+ eligible_sites <- setdiff(complete_sites, inconsistent_sites)
+ } else {
+ eligible_sites <- complete_sites
+ }
+
+ if (length(eligible_sites) == 0) {
+ return(dplyr::tibble(site = character(0), year = integer(0)))
+ }
+
+ df_year <- covariates_df[
+ covariates_df$year == as.integer(yr) & covariates_df$site %in% eligible_sites,
+ , drop = FALSE
+ ]
+ df_year <- df_year[order(df_year$site), , drop = FALSE]
+
+ # Annotate drops for diagnostics/logging
+ attr(df_year, "dropped_missing_covariates") <- setdiff(sites_used, complete_sites)
+ if (isTRUE(enforce_consistent_obs)) {
+ attr(df_year, "dropped_inconsistent_obs") <- intersect(sites_used, debias_sites_inconsistent_obs(obs.mean, t_idx))
+ }
+
+ df_year
+}
+
+#' Expand per-site covariates to **row-per-column** alignment
+#'
+#' Converts the per-site covariate data into a matrix aligned with the **columns of `X`**.
+#' For any column whose site was filtered out, the function inserts a row of `NA`
+#' features to preserve shape and ordering.
+#'
+#' @param covariates_df See `debias_get_covariates_for_date()`.
+#' @param obs_date Date for which to fetch covariates (year is used).
+#' @param site_index Site id per column of `X`.
+#' @param obs.mean Observation structure used if enforcing consistency.
+#' @param t_idx Time index associated with `obs_date`.
+#' @param drop_incomplete_covariates, enforce_consistent_obs See above.
+#'
+#' @return A numeric matrix with **nrow = length(site_index)** and one column per
+#' covariate feature. Rows align 1:1 with the columns of `X`.
+#'
+#' @title Expand per-site covariates to row-per-column alignment
+#' @name debias_cov_by_columns
+#' @rdname debias_cov_by_columns
+#' @keywords internal
+debias_cov_by_columns <- function(covariates_df,
+ obs_date,
+ site_index,
+ obs.mean,
+ t_idx,
+ drop_incomplete_covariates = TRUE,
+ enforce_consistent_obs = TRUE) {
+ df_year <- debias_get_covariates_for_date(
+ covariates_df = covariates_df,
+ obs_date = obs_date,
+ site_index = site_index,
+ obs.mean = obs.mean,
+ t_idx = t_idx,
+ drop_incomplete_covariates = drop_incomplete_covariates,
+ enforce_consistent_obs = enforce_consistent_obs
+ )
+
+ if (nrow(df_year) == 0) {
+ # Preserve outer shape; no features to return.
+ return(matrix(numeric(0), nrow = length(site_index), ncol = 0))
+ }
+
+ feat_cols <- setdiff(names(df_year), c("site", "year"))
+ idx <- match(as.character(site_index), df_year$site)
+
+ # Filler row of NA to keep alignment when a site is missing
+ na_row <- as.list(rep(NA_real_, length(feat_cols)))
+ names(na_row) <- feat_cols
+ filler <- dplyr::as_tibble(na_row)
+
+ rows <- lapply(seq_along(idx), function(i) {
+ j <- idx[i]
+ if (is.na(j)) filler else df_year[j, feat_cols, drop = FALSE]
+ })
+ out <- dplyr::bind_rows(rows)
+
+ as.matrix(out)
+}
+
+# ------------------------------------------------------------------------------
+# (4) Observation vector aligned to X’s columns
+# ------------------------------------------------------------------------------
+
+#' Observation vector for time `t_idx`, aligned to `X` columns
+#'
+#' Builds a vector with one entry per **column of `X`**, using the `(site, var)` layout
+#' defined by `site_index` and `col_vars`. Observation names in `obs.mean` are
+#' remapped through `name_map`.
+#'
+#' @param t_idx Time index (1-based) to pull observations from `obs.mean`.
+#' @param site_index Site id per column of `X`.
+#' @param col_vars Variable name (state-space name) per column of `X`.
+#' @param obs.mean Observation structure (list by time → list by site → named numeric).
+#' @param name_map Optional map from observation names → state names.
+#'
+#' @return Numeric vector of length `length(col_vars)` with `NA` where not observed.
+#'
+#' @rdname debias_helpers
+#' @keywords internal
+debias_obs_vec_for_time <- function(t_idx, site_index, col_vars, obs.mean, name_map = debias_name_map) {
+ om <- obs.mean[[t_idx]]
+ out <- rep(NA_real_, length(col_vars))
+
+ for (s in unique(site_index)) {
+ vals <- om[[as.character(s)]]
+ if (is.null(vals)) next
+
+ # Normalize names into state-space naming
+ if (!is.null(name_map)) {
+ keep <- names(vals) %in% names(name_map)
+ if (any(keep)) names(vals)[keep] <- unname(name_map[names(vals)[keep]])
+ }
+
+ v_here <- unique(col_vars[site_index == s])
+ vnames <- intersect(names(vals), v_here)
+ for (v in vnames) {
+ idx <- which(site_index == s & col_vars == v)
+ if (length(idx)) out[idx] <- as.numeric(vals[[v]][1])
+ }
+ }
+ out
+}
+
+# ------------------------------------------------------------------------------
+# (5) Diagnostics helpers
+# ------------------------------------------------------------------------------
+
+#' Column-wise comparison table (pre/post/obs)
+#'
+#' @param site_index Site id per column of `X`.
+#' @param col_vars Variable name per column of `X`.
+#' @param pre_mean Vector of pre-debias column means at time `t`.
+#' @param post_mean Vector of post-debias column means at time `t`.
+#' @param obs_vec Observation vector for time `t` (aligned).
+#'
+#' @return Data frame with columns: `site`, `var`, `pre`, `post`, `obs`
+#' sorted by (`var`, `site`).
+#'
+#' @rdname debias_helpers
+#' @keywords internal
+debias_build_comp_df <- function(site_index, col_vars, pre_mean, post_mean, obs_vec) {
+ df <- data.frame(
+ site = site_index,
+ var = col_vars,
+ pre = as.numeric(pre_mean),
+ post = as.numeric(post_mean),
+ obs = as.numeric(obs_vec),
+ stringsAsFactors = FALSE
+ )
+ df[order(df$var, df$site), ]
+}
+
+#' RMSE by variable (pre/post vs obs)
+#'
+#' Computes RMSE for each state variable, separately for the pre- and post-debias
+#' column means against observations. NAs are ignored per variable.
+#'
+#' @param comp_df Output of `debias_build_comp_df()`.
+#'
+#' @return Data frame with columns: `var`, `rmse_pre`, `rmse_post`.
+#'
+#' @rdname debias_helpers
+#' @keywords internal
+debias_rmse_by_var <- function(comp_df) {
+ rmse <- function(a, b) sqrt(mean((a - b)^2, na.rm = TRUE))
+ do.call(
+ rbind,
+ lapply(split(comp_df, comp_df$var), function(d) {
+ data.frame(
+ var = d$var[1],
+ rmse_pre = rmse(d$pre, d$obs),
+ rmse_post = rmse(d$post, d$obs),
+ stringsAsFactors = FALSE
+ )
+ })
+ )
+}
+
+# ------------------------------------------------------------------------------
+# (6) Per-step debias application (uses site filtering + covariates)
+# ------------------------------------------------------------------------------
+
+#' Apply residual debiasing for a single SDA time step
+#'
+#' At time `t`, this function:
+#' 1. Builds a training set from **t–1**: residuals `y = obs_prev - raw_prev` and features
+#' `[covariates_prev, raw_prev]` for each variable.
+#' 2. Trains/updates the Python-side learner (`py$train_full_model`).
+#' 3. Predicts residuals at **t** using `[covariates_t, raw_mean_t]`.
+#' 4. Mean-shifts the ensemble `X` by adding predicted residuals to `raw_mean_t`.
+#' 5. Computes per-variable metrics (RMSE, MAE, bias, R²) pre vs post.
+#'
+#' @param t Integer current time index (t > 1 required to train from t–1).
+#' @param obs.t Character or time label for logging (e.g., ISO date string for `t`).
+#' @param X Numeric matrix of the **current** ensemble at time `t` (members × columns).
+#' @param raw_prev Numeric vector of the **raw** column mean at `t-1`.
+#' @param raw_mean_t Numeric vector of the **raw** column mean at `t`.
+#' @param site_index Vector of site ids per column of `X`.
+#' @param col_vars Vector of variable names per column of `X` (state-space names).
+#' @param obs.times Datetime vector indexed by `t` (length ≥ `t`) for covariate year lookup.
+#' @param obs.mean Observation structure (time → site → named numeric vector).
+#' @param covariates_df Long data frame with columns `site`, `year`, and feature columns.
+#' @param py Python bridge object exposing:
+#' - `train_full_model(name, X, y)`,
+#' - `predict_residual(name, X)`,
+#' - `get_model_weights(name)`,
+#' - `has_model(name)` (logical).
+#' @param train_buf R environment used to accumulate training rows per variable across steps.
+#' @param name_map Optional map from observation names → state names.
+#' @param drop_incomplete_covariates Logical; if `TRUE`, drop sites with missing covariates (per year).
+#' @param enforce_consistent_obs Logical; if `TRUE`, drop sites inconsistent up to relevant `t_idx`.
+#' @param require_obs_at_t_for_predict Logical; if `TRUE`, only predict residuals for columns with
+#' @param state.interval Matrix/data.frame of per-variable bounds; either rownames = variable with columns `min`,`max`,
+#' or a data frame with a `variable` column plus `min`/`max`. Used to clip post-debias values.
+#' @param clip_lower_bound Numeric; minimum floor applied to lower bounds (default 0.01).
+#' **observations present at t** (useful for constrained comparisons).
+#'
+#' @return A list with:
+#' \describe{
+#' \item{X}{The **mean-shifted** ensemble matrix at time `t` (same dims as input `X`).}
+#' \item{weights_entry}{Optional named list of learner weights by variable (if provided by `py`).}
+#' \item{weights_df_rows}{A tidy data frame of weights emitted this step (time, var, learner, weight).}
+#' \item{diag}{A list with:
+#' \itemize{
+#' \item `comp`: per-column comparison table (`pre`, `post`, `obs`) for diagnostics.
+#' \item `rmse`: per-variable metrics (RMSE/MAE/bias/R²) **pre vs post**.
+#' }
+#' }
+#' \item{rmse_rows}{A tidy slice of metrics with the `time` column attached for easy logging.}
+#' }
+#'
+#' @note
+#' - If covariates are missing (no feature columns) for either `t-1` or `t`, the function
+#' returns `X` unchanged and emits NA metrics (shape-preserving behavior).
+#' - Predicted residuals that are non-finite are coerced to 0 to avoid contaminating `X`.
+#' - The **mean-shift** keeps the ensemble spread intact: we subtract the raw mean and
+#' add the corrected mean (`raw_mean_t + predicted_residual`).
+#'
+#' @title Apply residual debiasing for a single SDA time step
+#' @name sda_apply_debias_step
+#' @rdname sda_apply_debias_step
+#' @keywords internal
+sda_apply_debias_step <- function(
+ t, obs.t, X, raw_prev, raw_mean_t,
+ site_index, col_vars,
+ obs.times, obs.mean,
+ covariates_df, py, train_buf,
+ name_map = debias_name_map,
+ drop_incomplete_covariates = TRUE,
+ enforce_consistent_obs = TRUE,
+ require_obs_at_t_for_predict = FALSE,
+ state.interval = state.interval, # <-- pass it through
+ clip_lower_bound = 0.01
+) {
+ # Early return when we cannot train from t-1 or covariates are absent
+ if (t <= 1 || is.null(covariates_df)) {
+ return(list(
+ X = X,
+ weights_entry = NULL,
+ weights_df_rows = utils::head(data.frame(time=character(), var=character(), learner=character(), weight=numeric()), 0),
+ diag = list(
+ comp = debias_build_comp_df(site_index, col_vars, raw_mean_t, raw_mean_t, rep(NA_real_, length(col_vars))),
+ rmse = data.frame(
+ var = unique(col_vars),
+ rmse_pre = NA_real_, rmse_post = NA_real_,
+ mae_pre = NA_real_, mae_post = NA_real_,
+ bias_pre = NA_real_, bias_post = NA_real_,
+ r2_pre = NA_real_, r2_post = NA_real_
+ )
+ ),
+ rmse_rows = utils::head(data.frame(
+ time=character(), var=character(),
+ rmse_pre=numeric(), rmse_post=numeric(),
+ mae_pre=numeric(), mae_post=numeric(),
+ bias_pre=numeric(), bias_post=numeric(),
+ r2_pre=numeric(), r2_post=numeric()
+ ), 0)
+ ))
+ }
+
+ # Build obs/covariates for training (t-1) and prediction (t)
+ obs_prev_vec <- debias_obs_vec_for_time(t - 1, site_index, col_vars, obs.mean, name_map)
+
+ cov_prev_mat <- debias_cov_by_columns(
+ covariates_df = covariates_df, obs_date = obs.times[t - 1],
+ site_index = site_index, obs.mean = obs.mean, t_idx = t - 1,
+ drop_incomplete_covariates = drop_incomplete_covariates,
+ enforce_consistent_obs = enforce_consistent_obs
+ )
+ cov_t_mat <- debias_cov_by_columns(
+ covariates_df = covariates_df, obs_date = obs.times[t],
+ site_index = site_index, obs.mean = obs.mean, t_idx = t,
+ drop_incomplete_covariates = drop_incomplete_covariates,
+ enforce_consistent_obs = enforce_consistent_obs
+ )
+
+ # If no feature columns, skip debias but keep outputs well-formed
+ if (ncol(cov_prev_mat) == 0 || ncol(cov_t_mat) == 0) {
+ return(list(
+ X = X,
+ weights_entry = NULL,
+ weights_df_rows = utils::head(data.frame(time=character(), var=character(), learner=character(), weight=numeric()), 0),
+ diag = list(
+ comp = debias_build_comp_df(site_index, col_vars, raw_mean_t, raw_mean_t, rep(NA_real_, length(col_vars))),
+ rmse = data.frame(
+ var = unique(col_vars),
+ rmse_pre = NA_real_, rmse_post = NA_real_,
+ mae_pre = NA_real_, mae_post = NA_real_,
+ bias_pre = NA_real_, bias_post = NA_real_,
+ r2_pre = NA_real_, r2_post = NA_real_
+ )
+ ),
+ rmse_rows = utils::head(data.frame(
+ time=character(), var=character(),
+ rmse_pre=numeric(), rmse_post=numeric(),
+ mae_pre=numeric(), mae_post=numeric(),
+ bias_pre=numeric(), bias_post=numeric(),
+ r2_pre=numeric(), r2_post=numeric()
+ ), 0)
+ ))
+ }
+
+ pred_resid <- numeric(ncol(X))
+ vars <- unique(col_vars)
+ weights_entry <- list()
+ weights_df_rows <- utils::head(
+ data.frame(time=character(), var=character(), learner=character(), weight=numeric(), stringsAsFactors = FALSE), 0
+ )
+ feature_rows <- utils::head(
+ data.frame(time=character(), var=character(), feature=character(), importance=numeric(),
+ stringsAsFactors = FALSE), 0
+ )
+
+
+ # Optionally restrict predictions to positions with obs at t (diagnostic mode)
+ obs_t_avail <- if (require_obs_at_t_for_predict) {
+ !is.na(debias_obs_vec_for_time(t, site_index, col_vars, obs.mean, name_map))
+ } else rep(TRUE, length(col_vars))
+
+ # Train per variable on t-1 residuals; predict at t
+ for (v in vars) {
+ cols_v <- which(col_vars == v)
+ y_v_all <- obs_prev_vec[cols_v] - as.numeric(raw_prev[cols_v]) # residuals at t-1
+ Xprev_all <- cbind(cov_prev_mat[cols_v, , drop = FALSE],
+ raw = as.numeric(raw_prev[cols_v]))
+ mask <- !is.na(y_v_all) & stats::complete.cases(Xprev_all)
+
+ fi_logged <- FALSE # <- NEW: track whether we captured FI for this var at this step
+
+ if (any(mask)) {
+ # Accumulate per-variable training buffer
+ rec <- if (exists(v, train_buf, inherits = FALSE)) get(v, train_buf) else list(X = NULL, y = NULL)
+ rec$X <- rbind(rec$X, Xprev_all[mask, , drop = FALSE])
+ rec$y <- c(rec$y, y_v_all[mask])
+ assign(v, rec, train_buf)
+ fi_ret <- py$train_full_model(
+ name = as.character(v),
+ X = as.matrix(rec$X),
+ y = as.numeric(rec$y),
+ feature_names = colnames(rec$X)
+ )
+
+ # Works whether convert=TRUE or FALSE:
+ if (!is.null(fi_ret)) {
+ fi_ret <- tryCatch(reticulate::py_to_r(fi_ret), error = function(e) fi_ret)
+ fn <- as.character(unlist(fi_ret[["names"]], use.names = FALSE))
+ fv <- as.numeric(unlist(fi_ret[["importances"]], use.names = FALSE))
+ if (length(fn) == length(fv) && length(fn) > 0) {
+ feature_rows <- rbind(
+ feature_rows,
+ data.frame(
+ time = rep(obs.t, length(fn)),
+ var = rep(as.character(v), length(fn)),
+ feature = fn,
+ importance = fv,
+ stringsAsFactors = FALSE
+ )
+ )
+ }
+ }
+
+ # Optional: collect mixing weight for diagnostics (e.g., KNN vs TREE)
+ w_now <- try(py$get_model_weights(as.character(v)), silent = TRUE)
+ if (!inherits(w_now, "try-error") && !is.null(w_now) && is.finite(w_now)) {
+ w_now <- min(max(as.numeric(w_now), 0), 1)
+ w_named <- c(KNN = w_now, TREE = 1 - w_now)
+ weights_entry[[as.character(v)]] <- w_named
+ weights_df_rows <- rbind(weights_df_rows, debias_weights_rows(obs.t, as.character(v), w_named))
+ }
+ }
+
+
+ # Predict at t for available positions
+ if (py$has_model(as.character(v))) {
+ Xt_v <- cbind(cov_t_mat[cols_v, , drop = FALSE],
+ raw = as.numeric(raw_mean_t[cols_v]))
+ ok <- stats::complete.cases(Xt_v) & obs_t_avail[cols_v]
+ if (any(ok)) {
+ preds <- py$predict_residual(as.character(v), Xt_v[ok, , drop = FALSE])
+ pred_resid[cols_v[ok]] <- as.numeric(preds)
+ }
+ }
+ }
+
+ # Defensive: replace any non-finite predicted residual with 0
+ pred_resid[!is.finite(pred_resid)] <- 0
+
+ # Compute corrected mean and diagnostics
+ pre_mean <- raw_mean_t
+ post_mean <- raw_mean_t + pred_resid
+ obs_t_vec <- debias_obs_vec_for_time(t, site_index, col_vars, obs.mean, name_map)
+
+ comp_df <- debias_build_comp_df(site_index, col_vars, pre_mean, post_mean, obs_t_vec)
+
+ metric_one <- function(pred, obs) {
+ ok <- is.finite(pred) & is.finite(obs)
+ if (!any(ok)) return(c(rmse=NA_real_, mae=NA_real_, bias=NA_real_, r2=NA_real_))
+ e <- pred[ok] - obs[ok]
+ rmse <- sqrt(mean(e^2)); mae <- mean(abs(e)); bias <- mean(e)
+ sst <- sum((obs[ok] - mean(obs[ok]))^2)
+ r2 <- if (sst > 0 && sum(ok) >= 2) 1 - sum(e^2) / sst else NA_real_
+ c(rmse=rmse, mae=mae, bias=bias, r2=r2)
+ }
+
+ metrics_by_var <- do.call(
+ rbind,
+ lapply(split(comp_df, comp_df$var), function(d) {
+ m_pre <- metric_one(d$pre, d$obs)
+ m_post <- metric_one(d$post, d$obs)
+ data.frame(
+ var = d$var[1],
+ rmse_pre = m_pre["rmse"], rmse_post = m_post["rmse"],
+ mae_pre = m_pre["mae"], mae_post = m_post["mae"],
+ bias_pre = m_pre["bias"], bias_post = m_post["bias"],
+ r2_pre = m_pre["r2"], r2_post = m_post["r2"],
+ stringsAsFactors = FALSE
+ )
+ })
+ )
+ diag_metrics <- metrics_by_var
+ metrics_by_var$time <- obs.t
+ rmse_rows <- metrics_by_var[, c("time","var","rmse_pre","rmse_post","mae_pre","mae_post","bias_pre","bias_post","r2_pre","r2_post")]
+
+ # Mean-shift ensemble: preserve spread, adjust mean
+ offsets <- sweep(X, 2, raw_mean_t, FUN = "-")
+ corrected <- post_mean
+ X_new <- sweep(offsets, 2, corrected, FUN = "+")
+ .get_interval <- function(v) {
+ if (!is.null(rownames(state.interval)) && v %in% rownames(state.interval)) {
+ as.numeric(state.interval[v, , drop = TRUE])
+ } else if ("variable" %in% colnames(state.interval)) {
+ hit <- which(state.interval[["variable"]] == v)
+ if (length(hit) == 1L) {
+ as.numeric(state.interval[hit, setdiff(colnames(state.interval), "variable"), drop = TRUE])
+ } else c(NA_real_, NA_real_)
+ } else c(NA_real_, NA_real_)
+ }
+
+ for (j in seq_len(ncol(X_new))) {
+ v <- as.character(col_vars[j])
+ iv <- .get_interval(v) # c(min, max)
+ lb <- iv[1]; ub <- iv[2]
+ lb <- if (is.finite(lb)) max(lb, clip_lower_bound) else clip_lower_bound
+ ub <- if (is.finite(ub)) ub else Inf
+ X_new[, j] <- pmin(pmax(X_new[, j], lb), ub)
+ }
+ list(
+ X = X_new,
+ weights_entry = if (length(weights_entry)) weights_entry else NULL,
+ weights_df_rows = weights_df_rows,
+ diag = list(comp = comp_df, rmse = diag_metrics),
+ rmse_rows = rmse_rows,
+ feature_rows = feature_rows
+ )
+}
diff --git a/modules/assim.sequential/inst/python/pecan_debias/__init__.py b/modules/assim.sequential/inst/python/pecan_debias/__init__.py
new file mode 100644
index 00000000000..01009f3eda9
--- /dev/null
+++ b/modules/assim.sequential/inst/python/pecan_debias/__init__.py
@@ -0,0 +1 @@
+from .debias import train_full_model, predict_residual, get_model_weights, has_model
diff --git a/modules/assim.sequential/inst/python/pecan_debias/debias.py b/modules/assim.sequential/inst/python/pecan_debias/debias.py
new file mode 100644
index 00000000000..287ddeef07c
--- /dev/null
+++ b/modules/assim.sequential/inst/python/pecan_debias/debias.py
@@ -0,0 +1,109 @@
+# debias.py
+import numpy as np
+from sklearn.model_selection import GridSearchCV
+from sklearn.neighbors import KNeighborsRegressor
+from sklearn.ensemble import ExtraTreesRegressor
+from sklearn.pipeline import make_pipeline
+from sklearn.preprocessing import StandardScaler
+from sklearn.metrics import mean_squared_error
+
+_models = {} # name -> dict(knn, tree, w, feat_names, feat_importances)
+
+def _fit_knn(X, y):
+ pipe = make_pipeline(StandardScaler(), KNeighborsRegressor())
+ grid = GridSearchCV(
+ pipe,
+ {'kneighborsregressor__n_neighbors': list(range(1, 31))},
+ cv=max(2, min(5, len(y))),
+ scoring='neg_root_mean_squared_error',
+ n_jobs=-1
+ )
+ grid.fit(X, y)
+ k = grid.best_params_['kneighborsregressor__n_neighbors']
+ knn = make_pipeline(StandardScaler(), KNeighborsRegressor(n_neighbors=k))
+ knn.fit(X, y)
+ return knn
+
+def _fit_extratrees(X, y):
+ param_grid = {
+ 'n_estimators': [200, 400, 800],
+ 'max_depth': [None, 20, 40],
+ 'max_features': ['sqrt', 'log2', None],
+ 'min_samples_leaf': [1, 2, 4],
+ 'min_samples_split': [2, 5, 10],
+ }
+ base = ExtraTreesRegressor(random_state=42, n_jobs=-1)
+ grid = GridSearchCV(
+ base, param_grid,
+ cv=max(2, min(5, len(y))),
+ scoring='neg_root_mean_squared_error',
+ n_jobs=-1
+ )
+ grid.fit(X, y)
+ tree = grid.best_estimator_
+ tree.fit(X, y)
+ return tree
+
+def _fit_one(X, y):
+ knn = _fit_knn(X, y)
+ tree = _fit_extratrees(X, y)
+ # simple blend weight by training RMSE grid (kept for speed)
+ knn_pred = knn.predict(X)
+ tree_pred = tree.predict(X)
+ weights = np.linspace(0, 1, 101)
+ best_w, best_rmse = 0.5, np.inf
+ for w in weights:
+ rmse = np.sqrt(mean_squared_error(y, w*knn_pred + (1-w)*tree_pred))
+ if rmse < best_rmse:
+ best_rmse, best_w = rmse, w
+ return knn, tree, float(best_w)
+
+def train_full_model(name, X, y, feature_names=None):
+ name = str(name)
+ X = np.asarray(X); y = np.asarray(y)
+ knn, tree, w = _fit_one(X, y)
+
+ # stash explainability from the tree
+ if feature_names is None:
+ feature_names = [f"X{j}" for j in range(X.shape[1])]
+ feat_importances = getattr(tree, "feature_importances_", None)
+ if feat_importances is None:
+ feat_importances = np.zeros(X.shape[1], dtype=float)
+
+ _models[name] = {
+ "knn": knn,
+ "tree": tree,
+ "w": float(w),
+ "feat_names": list(map(str, feature_names)),
+ "feat_importances": np.asarray(feat_importances, dtype=float).tolist()
+ }
+
+ return {
+ "names": list(map(str, feature_names)),
+ "importances": np.asarray(feat_importances, dtype=float).tolist()
+ }
+
+def has_model(name):
+ return str(name) in _models
+
+def get_model_weights(name):
+ m = _models.get(str(name))
+ return None if m is None else float(m["w"])
+
+def get_feature_importance(name):
+ m = _models.get(str(name))
+ if m is None:
+ return None
+ return {
+ "names": m["feat_names"],
+ "importances": m["feat_importances"]
+ }
+
+def predict_residual(name, X):
+ name = str(name)
+ X = np.asarray(X)
+ m = _models.get(name)
+ if m is None:
+ return np.zeros(X.shape[0])
+ knn, tree, w = m["knn"], m["tree"], m["w"]
+ return w * knn.predict(X) + (1 - w) * tree.predict(X)
diff --git a/modules/assim.sequential/man/analysis_sda_block.Rd b/modules/assim.sequential/man/analysis_sda_block.Rd
index 4be6f3ba235..c431aa1aa92 100644
--- a/modules/assim.sequential/man/analysis_sda_block.Rd
+++ b/modules/assim.sequential/man/analysis_sda_block.Rd
@@ -33,7 +33,7 @@ analysis_sda_block(
\item{MCMC.args}{arguments for the MCMC sampling, details can be found in the roxygen strucutre for control list in the `sda.enkf.multisite` function.}
-\item{block.list.all.pre}{pre-existed block.list.all object for passing the aqq and bqq to the current SDA run, the default is NULL. Details can be found in the roxygen structure for `pre_enkf_params` of the `sda.enkf.multisite` function}
+\item{block.list.all.pre}{pre-existed block.list.all object for passing the aqq and bqq to the current SDA run, the default is NULL. Details can be found in the roxygen structure for `pre_enkf_params` of the `sda.enkf.multisite` function.}
}
\value{
It returns the `build.block.xy` object and the analysis results.
diff --git a/modules/assim.sequential/man/block.2.vector.Rd b/modules/assim.sequential/man/block.2.vector.Rd
index cf9b1687396..04597227a39 100644
--- a/modules/assim.sequential/man/block.2.vector.Rd
+++ b/modules/assim.sequential/man/block.2.vector.Rd
@@ -4,7 +4,7 @@
\alias{block.2.vector}
\title{block.2.vector}
\usage{
-block.2.vector(block.list, X, H)
+block.2.vector(block.list, X, H, adjustment)
}
\arguments{
\item{block.list}{lists of blocks generated by the `build.block.xy` function.}
@@ -12,6 +12,8 @@ block.2.vector(block.list, X, H)
\item{X}{A matrix contains ensemble forecasts.}
\item{H}{H index created by the `construct_nimble_H` function.}
+
+\item{adjustment}{logical variable determine if we want to adjust the analysis ensembles based on likelihood.}
}
\value{
It returns a list of analysis results by MCMC sampling.
diff --git a/modules/assim.sequential/man/build.block.xy.Rd b/modules/assim.sequential/man/build.block.xy.Rd
index 261301277d8..1c5d4d08566 100644
--- a/modules/assim.sequential/man/build.block.xy.Rd
+++ b/modules/assim.sequential/man/build.block.xy.Rd
@@ -19,9 +19,6 @@ build.block.xy(settings, block.list.all, X, obs.mean, obs.cov, t)
\item{t}{time point.}
}
-\value{
-It returns the `build.block.xy` object with data and constants filled in.
-}
\description{
This function split long vector and covariance matrix into blocks corresponding to the localization.
}
diff --git a/modules/assim.sequential/man/downscale_main.Rd b/modules/assim.sequential/man/downscale_main.Rd
new file mode 100644
index 00000000000..3ac2ae3ae91
--- /dev/null
+++ b/modules/assim.sequential/man/downscale_main.Rd
@@ -0,0 +1,47 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/SDA_parallel_downscale.R
+\name{downscale_main}
+\alias{downscale_main}
+\title{downscale_main}
+\usage{
+downscale_main(
+ settings,
+ analysis,
+ covariates.dir,
+ time,
+ variable,
+ outdir,
+ base.map.dir,
+ method = "randomForest",
+ cores = parallel::detectCores()
+)
+}
+\arguments{
+\item{settings}{character: physical path that points to the pecan settings XML file.}
+
+\item{analysis}{numeric: data frame (rows: ensemble member; columns: site*state_variables) of updated ensemble analysis results from the `sda_enkf` function.}
+
+\item{covariates.dir}{character: path to the exported covariates GeoTIFF file.}
+
+\item{time}{character: the time tag used to differentiate the outputs from others.}
+
+\item{variable}{character: name of state variable. It should match up with the column names of the analysis data frame.}
+
+\item{outdir}{character: the output directory where the downscaled maps will be stored.}
+
+\item{base.map.dir}{character: path to the GeoTIFF file within which the extents and CRS will be used to generate the ensemble maps.}
+
+\item{method}{character: machine learning method, default is randomForest (currently support randomForest and xgboost).}
+
+\item{cores}{numeric: how many CPus to be used in the calculation, the default is the total CPU number you have.}
+}
+\value{
+paths to the ensemble downscaled maps.
+}
+\description{
+This is the main function to execute the machine learning training and prediction.
+Note it will be deployed by each node you requested if the qsub feature is enabled below.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/assim.sequential/man/downscale_qsub_main.Rd b/modules/assim.sequential/man/downscale_qsub_main.Rd
new file mode 100644
index 00000000000..b968c38e71e
--- /dev/null
+++ b/modules/assim.sequential/man/downscale_qsub_main.Rd
@@ -0,0 +1,17 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/SDA_parallel_downscale.R
+\name{downscale_qsub_main}
+\alias{downscale_qsub_main}
+\title{downscale_qsub_main}
+\usage{
+downscale_qsub_main(folder.path)
+}
+\arguments{
+\item{folder.path}{Character: physical path to which the job file is located.}
+}
+\description{
+This qsub function helps to run the submitted qsub jobs for running the downscale_main function.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/assim.sequential/man/parallel_prediction.Rd b/modules/assim.sequential/man/parallel_prediction.Rd
new file mode 100644
index 00000000000..51aeb9072b8
--- /dev/null
+++ b/modules/assim.sequential/man/parallel_prediction.Rd
@@ -0,0 +1,41 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/SDA_parallel_downscale.R
+\name{parallel_prediction}
+\alias{parallel_prediction}
+\title{parallel_prediction}
+\usage{
+parallel_prediction(
+ base.map.dir,
+ models,
+ cov.vecs,
+ non.na.inds,
+ outdir,
+ name,
+ cores = parallel::detectCores()
+)
+}
+\arguments{
+\item{base.map.dir}{character: path to the GeoTIFF file within which the extents and CRS will be used to generate the ensemble maps.}
+
+\item{models}{list: trained models across ensemble members generated by the `parallel_train` function.}
+
+\item{cov.vecs}{numeric: data frame containing covaraites across vegetated pixels generated from the `stack_covariates_2_df` function.}
+
+\item{non.na.inds}{numeric: the corresponding index of vegetated pixels generated from the `stack_covariates_2_df` function.}
+
+\item{outdir}{character: the output directory where the downscaled maps will be stored.}
+
+\item{name}{list: containing the time and variable name to create the final GeoTIFF file name.}
+
+\item{cores}{numeric: how many CPus to be used in the calculation, the default is the total CPU number you have.}
+}
+\value{
+paths to the ensemble downscaled maps.
+}
+\description{
+This function helps to predict the target variable observations based on the covariates.
+The prediction is working in parallel across vegetated pixels.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/assim.sequential/man/parallel_train.Rd b/modules/assim.sequential/man/parallel_train.Rd
new file mode 100644
index 00000000000..e4e833e99e0
--- /dev/null
+++ b/modules/assim.sequential/man/parallel_train.Rd
@@ -0,0 +1,28 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/SDA_parallel_downscale.R
+\name{parallel_train}
+\alias{parallel_train}
+\title{parallel_train}
+\usage{
+parallel_train(
+ full_data,
+ method = "randomForest",
+ cores = parallel::detectCores()
+)
+}
+\arguments{
+\item{full_data}{numeric: the matrix generated using the `prepare_train_dat` function.}
+
+\item{method}{character: machine learning method (currently support randomForest and xgboost).}
+
+\item{cores}{numeric: how many CPus to be used in the calculation, the default is the total CPU number you have.}
+}
+\value{
+list of trained models across ensemble members.
+}
+\description{
+This function helps to train the ML model across ensemble members in parallel.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/assim.sequential/man/pecan_settings_2_pts.Rd b/modules/assim.sequential/man/pecan_settings_2_pts.Rd
new file mode 100644
index 00000000000..1acbac65946
--- /dev/null
+++ b/modules/assim.sequential/man/pecan_settings_2_pts.Rd
@@ -0,0 +1,20 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/SDA_parallel_downscale.R
+\name{pecan_settings_2_pts}
+\alias{pecan_settings_2_pts}
+\title{pecan_settings_2_pts}
+\usage{
+pecan_settings_2_pts(settings)
+}
+\arguments{
+\item{settings}{PEcAn settings: either a character that points to the settings or shape file or the actual pecan settings object will be accepted.}
+}
+\value{
+terra spatial points object.
+}
+\description{
+convert settings to geospatial points in terra.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/assim.sequential/man/prepare_train_dat.Rd b/modules/assim.sequential/man/prepare_train_dat.Rd
new file mode 100644
index 00000000000..4dcde31dd5e
--- /dev/null
+++ b/modules/assim.sequential/man/prepare_train_dat.Rd
@@ -0,0 +1,27 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/SDA_parallel_downscale.R
+\name{prepare_train_dat}
+\alias{prepare_train_dat}
+\title{prepare_train_dat}
+\usage{
+prepare_train_dat(pts, analysis, covariates.dir, variable)
+}
+\arguments{
+\item{pts}{spatialpoints: spatial points returned by `terra::vectors` function.}
+
+\item{analysis}{numeric: data frame (rows: ensemble member; columns: site*state_variables) of updated ensemble analysis results from the `sda_enkf` function.}
+
+\item{covariates.dir}{character: path to the exported covariates GeoTIFF file.}
+
+\item{variable}{character: name of state variable. It should match up with the column names of the analysis data frame.}
+}
+\value{
+matrix (num.sites, num.variables * num.ensemble + num.covariates) within which the first sets of columns contain values of state variables for each ensemble member of every site, and the rest columns contain the corresponding covariates.
+}
+\description{
+This function helps to create the training dataset of specific variable type and locations for downscaling.
+TODO: Add a ratio argument (training sample size/total sample size) so that we could calculate the out-of-sample accuracy.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/assim.sequential/man/qsub_sda.Rd b/modules/assim.sequential/man/qsub_sda.Rd
new file mode 100644
index 00000000000..f69b3270cbf
--- /dev/null
+++ b/modules/assim.sequential/man/qsub_sda.Rd
@@ -0,0 +1,63 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/sda.enkf_parallel.R
+\name{qsub_sda}
+\alias{qsub_sda}
+\title{qsub_sda}
+\usage{
+qsub_sda(
+ settings,
+ obs.mean,
+ obs.cov,
+ Q,
+ pre_enkf_params,
+ ensemble.samples,
+ outdir,
+ control,
+ block.index = NULL,
+ debias = list(cov.dir = NULL, start.year = NULL),
+ prefix = "batch"
+)
+}
+\arguments{
+\item{settings}{PEcAn settings object}
+
+\item{obs.mean}{Lists of date times named by time points, which contains lists of sites named by site ids, which contains observation means for each state variables of each site for each time point.}
+
+\item{obs.cov}{Lists of date times named by time points, which contains lists of sites named by site ids, which contains observation covariances for all state variables of each site for each time point.}
+
+\item{Q}{Process covariance matrix given if there is no data to estimate it.}
+
+\item{pre_enkf_params}{Used for passing pre-existing time-series of process error into the current SDA runs to ignore the impact by the differences between process errors.}
+
+\item{ensemble.samples}{Pass ensemble.samples from outside to avoid GitHub check issues.}
+
+\item{outdir}{Physical path to the folder where the SDA outputs will be stored.
+The default is NULL, where we will be using outdir from the settings object.}
+
+\item{control}{List of flags controlling the behavior of the SDA.
+`TimeseriesPlot` for post analysis examination;
+`OutlierDetection` decide if we want to execute the outlier detection each time after the model forecasting;
+`send_email` contains lists for sending email to report the SDA progress;
+`keepNC` decide if we want to keep the NetCDF files inside the out directory;
+`forceRun` decide if we want to proceed the Bayesian MCMC sampling without observations;
+`MCMC.args` include lists for controling the MCMC sampling process (iteration, nchains, burnin, and nthin.).}
+
+\item{block.index}{list of site ids for each block, default is NULL. This is used when the localization turns on.
+Please keep using the default value because the localization feature is still in development.}
+
+\item{debias}{List: R list containing the covariance directory and the start year.
+covariance directory should include GeoTIFF files named by year.
+start year is numeric input which decide when to start the debiasing feature.}
+
+\item{prefix}{character: the desired folder name to store the outputs.}
+}
+\value{
+NONE
+}
+\description{
+This function provides means to split large SDA runs into separate `qsub` jobs.
+Including job creation, submission, and assemble.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/assim.sequential/man/qsub_sda_batch.Rd b/modules/assim.sequential/man/qsub_sda_batch.Rd
new file mode 100644
index 00000000000..abac7861614
--- /dev/null
+++ b/modules/assim.sequential/man/qsub_sda_batch.Rd
@@ -0,0 +1,17 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/sda.enkf_parallel.R
+\name{qsub_sda_batch}
+\alias{qsub_sda_batch}
+\title{qsub_sda_batch}
+\usage{
+qsub_sda_batch(folder.path)
+}
+\arguments{
+\item{folder.path}{character: path where the `configs.rds` file is stored.}
+}
+\description{
+This function can help to execute sda function.
+}
+\author{
+Dongchen Zhang.
+}
diff --git a/modules/assim.sequential/man/sda.enkf.multisite.Rd b/modules/assim.sequential/man/sda.enkf.multisite.Rd
index 81b79f1c1a1..e24a8b816cd 100644
--- a/modules/assim.sequential/man/sda.enkf.multisite.Rd
+++ b/modules/assim.sequential/man/sda.enkf.multisite.Rd
@@ -12,9 +12,10 @@ sda.enkf.multisite(
restart = NULL,
pre_enkf_params = NULL,
ensemble.samples = NULL,
- control = list(trace = TRUE, TimeseriesPlot = FALSE, debug = FALSE, pause = FALSE,
- Profiling = FALSE, OutlierDetection = FALSE, parallel_qsub = TRUE, send_email = NULL,
- keepNC = TRUE, forceRun = TRUE, run_parallel = TRUE, MCMC.args = NULL),
+ control = list(TimeseriesPlot = FALSE, OutlierDetection = FALSE, send_email = NULL,
+ keepNC = TRUE, forceRun = TRUE, run_parallel = TRUE, MCMC.args = NULL, merge_nc =
+ TRUE, execution = "local"),
+ debias = list(cov.dir = NULL, start.year = NULL),
...
)
}
@@ -31,21 +32,26 @@ sda.enkf.multisite(
\item{pre_enkf_params}{Used for passing pre-existing time-series of process error into the current SDA runs to ignore the impact by the differences between process errors.}
-\item{ensemble.samples}{Pass ensemble.samples from outside to avoid GitHub check issues.}
+\item{ensemble.samples}{list of ensemble parameters across PFTs. Default is NULL.}
\item{control}{List of flags controlling the behavior of the SDA.
-`trace` for reporting back the SDA outcomes;
`TimeseriesPlot` for post analysis examination;
-`debug` decide if we want to pause the code and examining the variables inside the function;
-`pause` decide if we want to pause the SDA workflow at current time point t;
-`Profiling` decide if we want to export the temporal SDA outputs in CSV file;
`OutlierDetection` decide if we want to execute the outlier detection each time after the model forecasting;
-`parallel_qsub` decide if we want to execute the `qsub` job submission under parallel mode;
`send_email` contains lists for sending email to report the SDA progress;
`keepNC` decide if we want to keep the NetCDF files inside the out directory;
`forceRun` decide if we want to proceed the Bayesian MCMC sampling without observations;
`run_parallel` decide if we want to run the SDA under parallel mode for the `future_map` function;
-`MCMC.args` include lists for controling the MCMC sampling process (iteration, nchains, burnin, and nthin.).}
+`MCMC.args` include lists for controling the MCMC sampling process (iteration, nchains, burnin, and nthin.).
+`merge_nc` determine if we want to merge all netCDF files across sites and ensembles.
+If it's set as `TRUE`, we will then combine all netCDF files into the `merged_nc` folder within the `outdir`.
+`execution` decide the way we want to execute model
+including `local` ,where we execute the model locally;
+`qsub`, where we use the traditional `start_model_runs` function for submission;
+`qsub_parallel`, where we first combine jobs and submit them into the SCC.}
+
+\item{debias}{List: R list containing the covariance directory and the start year.
+covariance directory should include GeoTIFF files named by year.
+start year is numeric input which decide when to start the debiasing feature.}
\item{...}{Additional arguments, currently ignored}
}
diff --git a/modules/assim.sequential/man/sda.enkf_local.Rd b/modules/assim.sequential/man/sda.enkf_local.Rd
new file mode 100644
index 00000000000..f804c7d1382
--- /dev/null
+++ b/modules/assim.sequential/man/sda.enkf_local.Rd
@@ -0,0 +1,58 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/sda.enkf_parallel.R
+\name{sda.enkf_local}
+\alias{sda.enkf_local}
+\title{sda.enkf_local}
+\usage{
+sda.enkf_local(
+ settings,
+ obs.mean,
+ obs.cov,
+ Q = NULL,
+ pre_enkf_params = NULL,
+ ensemble.samples = NULL,
+ outdir = NULL,
+ control = list(TimeseriesPlot = FALSE, OutlierDetection = FALSE, send_email = NULL,
+ keepNC = TRUE, forceRun = TRUE, MCMC.args = NULL, merge_nc = TRUE),
+ debias = list(cov.dir = NULL, start.year = NULL)
+)
+}
+\arguments{
+\item{settings}{PEcAn settings object}
+
+\item{obs.mean}{Lists of date times named by time points, which contains lists of sites named by site ids, which contains observation means for each state variables of each site for each time point.}
+
+\item{obs.cov}{Lists of date times named by time points, which contains lists of sites named by site ids, which contains observation covariances for all state variables of each site for each time point.}
+
+\item{Q}{Process covariance matrix given if there is no data to estimate it.}
+
+\item{pre_enkf_params}{Used for passing pre-existing time-series of process error into the current SDA runs to ignore the impact by the differences between process errors.}
+
+\item{ensemble.samples}{list of ensemble parameters across PFTs. Default is NULL.}
+
+\item{outdir}{physical path to the folder that stores the SDA outputs. Default is NULL.}
+
+\item{control}{List of flags controlling the behavior of the SDA.
+`TimeseriesPlot` for post analysis examination;
+`OutlierDetection` decide if we want to execute the outlier detection each time after the model forecasting;
+`send_email` contains lists for sending email to report the SDA progress;
+`keepNC` decide if we want to keep the NetCDF files inside the out directory;
+`forceRun` decide if we want to proceed the Bayesian MCMC sampling without observations;
+`MCMC.args` include lists for controling the MCMC sampling process (iteration, nchains, burnin, and nthin.).
+`merge_nc` determine if we want to merge all netCDF files across sites and ensembles.
+If it's set as `TRUE`, we will then combine all netCDF files into the `merged_nc` folder within the `outdir`.}
+
+\item{debias}{List: R list containing the covariance directory and the start year.
+covariance directory should include GeoTIFF files named by year.
+start year is numeric input which decide when to start the debiasing feature.}
+}
+\value{
+NONE
+}
+\description{
+This function provides complete support for the multi-core and multi-node computation on the general HPC system.
+Thus, this script will be more computationally efficient, making it possible to run SDA over thousands of locations.
+}
+\author{
+Dongchen Zhang \email{zhangdc@bu.edu}
+}
diff --git a/modules/assim.sequential/man/sda_assemble.Rd b/modules/assim.sequential/man/sda_assemble.Rd
new file mode 100644
index 00000000000..1e94c4203de
--- /dev/null
+++ b/modules/assim.sequential/man/sda_assemble.Rd
@@ -0,0 +1,19 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/sda.enkf_parallel.R
+\name{sda_assemble}
+\alias{sda_assemble}
+\title{sda_assemble}
+\usage{
+sda_assemble(batch.folder, outdir)
+}
+\arguments{
+\item{batch.folder}{character: path where the SDA batch jobs stored.}
+
+\item{outdir}{character: path where we want to store the assembled analysis and forecasts.}
+}
+\description{
+This function can help to assemble sda outputs (analysis and forecasts) from each job execution.
+}
+\author{
+Dongchen Zhang.
+}
diff --git a/modules/assim.sequential/man/sda_bias_correction.Rd b/modules/assim.sequential/man/sda_bias_correction.Rd
new file mode 100644
index 00000000000..bd6a2cb2898
--- /dev/null
+++ b/modules/assim.sequential/man/sda_bias_correction.Rd
@@ -0,0 +1,48 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/sda_bias_correction.R
+\name{sda_bias_correction}
+\alias{sda_bias_correction}
+\title{sda_bias_correction}
+\usage{
+sda_bias_correction(
+ site.locs,
+ t,
+ all.X,
+ obs.mean,
+ state.interval,
+ cov.dir,
+ pre.states,
+ py.init = NULL
+)
+}
+\arguments{
+\item{site.locs}{data.frame: data.frame that contains longitude and latitude in its first and second column.}
+
+\item{t}{numeric: the current number of time points (e.g., t=1 for the beginning time point).}
+
+\item{all.X}{list: lists of data frame of model forecast from the beginning to the current time points
+that has n (ensemble size) rows and n.var (number of variables) times n.site (number of locations) columns.
+(e.g., 100 ensembles, 4 variables, and 8,000 locations will end up with data.frame of 100 rows and 32,000 columns)}
+
+\item{obs.mean}{List: lists of date times named by time points, which contains lists of sites named by site ids,
+which contains observation means for each state variables of each site for each time point.}
+
+\item{state.interval}{matrix: containing the upper and lower boundaries for each state variable.}
+
+\item{cov.dir}{character: physical path to the directory contains the time series covariate maps.}
+
+\item{pre.states}{list: containing previous covariates for each location.}
+
+\item{py.init}{R function: R function to initialize the python functions. Default is NULL.
+the default random forest will be used if `py.init` is NULL.}
+}
+\value{
+list: the current X after the bias-correction; the ML model for each variable; predicted residuals.
+}
+\description{
+This function helps to correct the forecasts' biases based on
+ML (random forest) training on the previous time point.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/assim.sequential/man/stack_covariates_2_df.Rd b/modules/assim.sequential/man/stack_covariates_2_df.Rd
new file mode 100644
index 00000000000..edb3011aa83
--- /dev/null
+++ b/modules/assim.sequential/man/stack_covariates_2_df.Rd
@@ -0,0 +1,23 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/SDA_parallel_downscale.R
+\name{stack_covariates_2_df}
+\alias{stack_covariates_2_df}
+\title{stack_covariates_2_df}
+\usage{
+stack_covariates_2_df(rast.dir, cores = parallel::detectCores())
+}
+\arguments{
+\item{rast.dir}{character: a character that points to the covariates raster file generated by the `stack_covariates_2_geotiff` function.}
+
+\item{cores}{numeric: how many CPus to be used in the calculation, the default is the total CPU number you have.}
+}
+\value{
+list containing the data frame of covariates for vegetated pixels and the corresponding index of the pixels.
+}
+\description{
+This function helps to build the data frame (pixels by data columns) for only vegetated pixels to improve the efficiency.
+Note that the `LC` field using the `MODIS land cover` observations (MCD12Q1.061) must be supplied in the covariates to make this function work.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/assim.sequential/man/stack_covariates_2_geotiff.Rd b/modules/assim.sequential/man/stack_covariates_2_geotiff.Rd
new file mode 100644
index 00000000000..ad74d3c6fdb
--- /dev/null
+++ b/modules/assim.sequential/man/stack_covariates_2_geotiff.Rd
@@ -0,0 +1,37 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/SDA_parallel_downscale.R
+\name{stack_covariates_2_geotiff}
+\alias{stack_covariates_2_geotiff}
+\title{stack_covariates_2_geotiff}
+\usage{
+stack_covariates_2_geotiff(
+ outdir,
+ year,
+ base.map.dir,
+ cov.tif.file.list,
+ normalize = T,
+ cores = parallel::detectCores()
+)
+}
+\arguments{
+\item{outdir}{character: the output directory where the stacked GeoTIFF file will be generated.}
+
+\item{year}{numeric: the year of when the covariates are stacked.}
+
+\item{base.map.dir}{character: path to the GeoTIFF file within which the extents and CRS will be used to generate the final map.}
+
+\item{cov.tif.file.list}{list: a list contains sub-lists with each including path to the corresponding map and the variables to be extracted (e.g., list(LC = list(dir = "path/to/landcover.tiff", var.name = "LC")).}
+
+\item{normalize}{boolean: decide if we want to normalize each data layer, the default is TRUE.}
+
+\item{cores}{numeric: how many CPus to be used in the calculation, the default is the total CPU number you have.}
+}
+\value{
+path to the exported GeoTIFF file.
+}
+\description{
+This function helps to stack target data layers from various GeoTIFF maps (with different extents, CRS, and resolutions) to a single map.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/benchmark/DESCRIPTION b/modules/benchmark/DESCRIPTION
index e0e95bd13f4..f636fc9e25d 100644
--- a/modules/benchmark/DESCRIPTION
+++ b/modules/benchmark/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.benchmark
Type: Package
Title: PEcAn Functions Used for Benchmarking
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("Mike", "Dietze", role = c("aut", "cre"),
email = "dietze@bu.edu"),
person("Betsy", "Cowdery", role = c("aut"),
@@ -21,6 +21,8 @@ Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
efficacy of scientific investigation. The PEcAn.benchmark package provides
utilities for comparing models and data, including a suite of statistical
metrics and plots.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
dplyr,
ggplot2,
@@ -51,3 +53,4 @@ LazyLoad: yes
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: model-evaluation, model-data-comparison, PEcAn
diff --git a/modules/benchmark/NEWS.md b/modules/benchmark/NEWS.md
index 98bfd7c3de6..ed0dcbcaee1 100644
--- a/modules/benchmark/NEWS.md
+++ b/modules/benchmark/NEWS.md
@@ -1,3 +1,9 @@
+# PEcAn.benchmark 1.7.5
+
+* Added keywords and bug reporting URL to DESCRIPTION.
+* No code changes in this release.
+
+
# PEcAn.benchmark 1.7.4
* Removed Browndog support
diff --git a/modules/data.atmosphere/.Rbuildignore b/modules/data.atmosphere/.Rbuildignore
index 4952b99da1f..c63223f185f 100644
--- a/modules/data.atmosphere/.Rbuildignore
+++ b/modules/data.atmosphere/.Rbuildignore
@@ -1,3 +1,4 @@
^.*\.Rproj$
^\.Rproj\.user$
^docs$
+.*venv/
diff --git a/modules/data.atmosphere/DESCRIPTION b/modules/data.atmosphere/DESCRIPTION
index 36de48f9ae3..b3283ef3844 100644
--- a/modules/data.atmosphere/DESCRIPTION
+++ b/modules/data.atmosphere/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.data.atmosphere
Type: Package
Title: PEcAn Functions Used for Managing Climate Driver Data
-Version: 1.9.0
+Version: 1.9.1
Authors@R: c(person("Mike", "Dietze", role = c("aut"),
email = "dietze@bu.edu"),
person("David", "LeBauer", role = c("aut", "cre"),
@@ -12,6 +12,9 @@ Authors@R: c(person("Mike", "Dietze", role = c("aut"),
email = "kooper@illinois.edu"),
person("Deepak", "Jaiswal", role = c("aut"),
email = "djaiswal@djaiswal.edu"),
+ person("Chris", "Black", role = c("ctb"),
+ email = "chris@ckblack.org",
+ comment = c(ORCID="https://orcid.org/0000-0001-8382-298X")),
person("University of Illinois, NCSA", role = c("cph")))
Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
workflow management tool that is designed to simplify the management of
@@ -19,6 +22,9 @@ Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
package converts climate driver data into a standard format for models
integrated into PEcAn. As a standalone package, it provides an interface to
access diverse climate data sets.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
+Depends: R (>= 4.1.0)
Imports:
abind (>= 1.4.5),
amerifluxr,
@@ -26,8 +32,8 @@ Imports:
arrow,
curl,
dplyr (>= 0.8.1),
+ foreach,
geonames (> 0.998),
- ggplot2,
glue,
httr,
jsonlite,
@@ -61,10 +67,12 @@ Imports:
zoo
Suggests:
doParallel,
- foreach,
+ ecmwfr (>= 2.0.0),
+ doSNOW,
furrr,
future,
getPass,
+ ggplot2,
knitr,
mockery,
parallel,
@@ -86,3 +94,4 @@ LazyData: FALSE
VignetteBuilder: knitr, rmarkdown
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: climate-data, data-standardization
diff --git a/modules/data.atmosphere/NAMESPACE b/modules/data.atmosphere/NAMESPACE
index ebe22618974..5f282f7dfe4 100644
--- a/modules/data.atmosphere/NAMESPACE
+++ b/modules/data.atmosphere/NAMESPACE
@@ -4,6 +4,8 @@ export(.download.raw.met.module)
export(.extract.nc.module)
export(.met2model.module)
export(AirDens)
+export(AmeriFlux_met_ensemble)
+export(Average_ERA5_2_GeoTIFF)
export(ERA5_met_process)
export(align.met)
export(build_cf_variables_table_url)
@@ -93,6 +95,7 @@ export(process_gridded_noaa_download)
export(qair2rh)
export(read.register)
export(rh2qair)
+export(sat_vapor_pressure)
export(save.betas)
export(save.model)
export(site.lst)
@@ -103,13 +106,14 @@ export(split_wind)
export(subdaily_pred)
export(sw2par)
export(sw2ppfd)
+export(t2es)
export(temporal.downscale.functions)
export(temporal_downscale_half_hour)
export(upscale_met)
export(wide2long)
export(write_noaa_gefs_netcdf)
importFrom(dplyr,"%>%")
-importFrom(purrr,"%>%")
+importFrom(foreach,"%dopar%")
importFrom(rlang,.data)
importFrom(rlang,.env)
importFrom(sf,st_crs)
diff --git a/modules/data.atmosphere/NEWS.md b/modules/data.atmosphere/NEWS.md
index 5568f3864c4..4c4ac24eef7 100644
--- a/modules/data.atmosphere/NEWS.md
+++ b/modules/data.atmosphere/NEWS.md
@@ -1,3 +1,25 @@
+# PEcAn.data.atmosphere 1.9.1
+
+## Added
+* New function `sat_vapor_pressure()` computes saturation vapor pressure from temperature (#3597).
+* New function `AmeriFlux_met_ensemble()` generates weather ensembles from Ameriflux data with ERA5 fallback for missing radiation and soil moisture (#3586).
+* `ERA5_met_process()` gains option `n_cores` to process ensemble data efficiently in parallel (#3563).
+* `download.ERA5_cds` gains options `time`, `dataset`, `product_type`, all defaulting to the values previously hard-coded (#3547).
+
+## Fixed
+* Updated `download.NOAA_GEFS` to work with the current (v12.3) release of GEFS (#3349).
+
+## Changed
+* Dependency `ggplot2` is now suggested rather than required. It is used in two vignettes and for optional diagnostic plots from `debias_met_regression`.
+* `download.ERA5_cds` now downloads NetCDF directly (replacing internal conversion from grib) using the R package ecmwfr (replacing python dependency on cdsapi via reticulate) (#3547).
+* `download.ERA5.cds()` now requires a valid Copernicus CDS API key, replacing the previous `.netrc` authentication. See the [ecmwfr package documentation](https://bluegreen-labs.github.io/ecmwfr/) for details.
+* `extract.nc.ERA5()` and `met2CF.ERA5` now support both ensemble and single-member data products (#3584).
+* added support for soil temperature, relative humidity, soil moisture, and PPFD downscaling to `met_temporal_downscale.Gaussian_ensemble` (#3586).
+* Removed `sitename` and `username` from the formal arguments of `download.NOAA_GEFS`.
+ Before they were silently ignored, now they're treated as part of `...` (which is also ignored!) (#3349).
+
+
+
# PEcAn.data.atmosphere 1.9.0
## Fixed
diff --git a/modules/data.atmosphere/R/Ameriflux_met_ensemble.R b/modules/data.atmosphere/R/Ameriflux_met_ensemble.R
new file mode 100644
index 00000000000..4554071fa26
--- /dev/null
+++ b/modules/data.atmosphere/R/Ameriflux_met_ensemble.R
@@ -0,0 +1,488 @@
+#' Generate AmeriFlux meteorological ensembles
+#'
+#' Downloads AmeriFlux data, applies ERA5 fallback for missing radiation and soil moisture,
+#' performs gap-filling, and generates ensembles.
+#' This function provides a complete pipeline from raw AmeriFlux data to CF-compliant
+#' ensemble meteorological files.
+#'
+#' @param site_id character. AmeriFlux site identifier (e.g, "US-Ha1")
+#' The 'SITE_ID' field in \href{http://ameriflux.lbl.gov/sites/site-list-and-pages/}{list of Ameriflux sites}
+#' @param start_date character or date. Start date in "YYYY-MM-DD" format
+#' @param end_date character or date. End date in "YYYY-MM-DD" format
+#' @param outfolder character. Output directory path for ensemble files
+#' @param ameriflux_username character. AmeriFlux username for data access.
+#' @param ameriflux_useremail character. Email address for AmeriFlux authentication (must contain "@")
+#' @param overwrite logical. Whether to overwrite existing files. Default: FALSE
+#' @param verbose logical. Whether to print detailed logs. Default: FALSE
+#' @param format data frame or List. format specifications for CF conversion. Default: NULL
+#' The AmerifluxLBL format is Bety record 5000000002.
+#' which could be returned from PEcAn.DB::query.format.vars(format.id=5000000002, bety = con)
+#' @param n_ens integer. Number of ensemble members to generate. Default: 10
+#' @param w_len integer. Window length in days. Default: 20
+#' @param era5_user character. CDS user ID (UID) from your CDS profile. Required for authentication.
+#' @param era5_key character. CDS API key from your CDS profile. Required for authentication.
+#' @param threshold numeric. Coverage threshold (0-1) for triggering ERA5 fallback. Default: 0.5
+#' @param dirs named list. Optional configuration for existing data directory paths. Default: NULL
+#' @param ... additional arguments passed to download.AmerifluxLBL
+#' @return A data frame with the paths to the generated ensemble files and their metadata.
+#' @examples
+#' \dontrun{
+#' result <- AmeriFlux_met_process(
+#' site_id = "US-Ha1",
+#' start_date = "2010-01-01",
+#' end_date = "2010-12-31",
+#' outfolder = "/path/to/output",
+#' ameriflux_username = "your_username",
+#' ameriflux_useremail = "your.email@domain.com",
+#' format = format,
+#' era5_user = "your_cds_user",
+#' era5_key = "your_cds_api_key",
+#' n_ens = 5,
+#' verbose = TRUE
+#' )
+#' }
+#'
+#' @author Akash
+#' @export
+
+AmeriFlux_met_ensemble <- function(site_id,
+ start_date,
+ end_date,
+ outfolder,
+ ameriflux_username = "pecan",
+ ameriflux_useremail = "@",
+ overwrite = FALSE,
+ verbose = FALSE,
+ format = NULL,
+ n_ens = 10,
+ w_len = 30,
+ era5_user = NULL,
+ era5_key = NULL,
+ threshold = 0.5,
+ dirs = NULL,
+ ...) {
+
+ # input validation
+ if (!grepl("@", ameriflux_useremail)) {
+ PEcAn.logger::logger.severe("ameriflux_useremail must contain '@' for AmeriFlux authentication")
+ }
+
+ if (!dir.exists(outfolder)) {
+ dir.create(outfolder, showWarnings = FALSE, recursive = TRUE)
+ }
+ # setup directory structure
+ default_paths <- list(
+ amf_downloads = file.path(outfolder, "amf_downloads"),
+ amf_extracted = file.path(outfolder, "amf_extracted"),
+ amf_cf = file.path(outfolder, "amf_cf"),
+ amf_gapfilled = file.path(outfolder, "amf_gapfilled"),
+ era5_downloads = file.path(outfolder, "era5_downloads"),
+ era5_cf = file.path(outfolder, "era5_cf"),
+ ensembles = file.path(outfolder, "ensembles")
+ )
+
+ if (!is.null(dirs)) {
+ dirs <- utils::modifyList(default_paths, dirs)
+ } else {
+ dirs <- default_paths
+ }
+
+ sapply(dirs, function(x) {
+ if (!is.null(x) && !dir.exists(x)) {
+ dir.create(x, showWarnings = FALSE, recursive = TRUE)
+ }
+ })
+
+ tryCatch({
+ # check for existing AmeriFlux data using site_id pattern
+ amf_pattern <- paste0("^AMF_", site_id, "_.*\\.csv$")
+ amf_files <- list.files(dirs$amf_downloads, pattern = amf_pattern, full.names = TRUE)
+
+ if (!overwrite && length(amf_files) > 0) {
+ if(verbose) {
+ PEcAn.logger::logger.info(paste("Found existing AmeriFlux file:", basename(amf_files[1]), "- using existing data"))
+ }
+ csv_file <- amf_files[1]
+ } else {
+ if(verbose) {
+ PEcAn.logger::logger.info(paste("Downloading AmeriFlux data for site", site_id))
+ }
+ download_results <-
+ PEcAn.data.atmosphere::download.AmerifluxLBL(
+ sitename = site_id,
+ outfolder = dirs$amf_downloads,
+ start_date = start_date,
+ end_date = end_date,
+ ameriflux_username = ameriflux_username,
+ ameriflux_useremail = ameriflux_useremail,
+ overwrite = overwrite,
+ verbose = verbose,
+ ...
+ )
+ csv_file <- download_results$file
+ }
+
+ # extract state variables
+ if(verbose) {
+ PEcAn.logger::logger.info("Extracting state variables")
+ }
+ flux_data <- utils::read.csv(
+ csv_file,
+ skip = 2,
+ na.strings = c("-9999", "NA"),
+ stringsAsFactors = FALSE
+ )
+
+ # variable patterns
+ input_names <- list(
+ datetime = c("^TIMESTAMP_START$", "^TIMESTAMP_END$"),
+ air_temp = c("^TA_", "^T_SONIC$"),
+ soil_temp = c("^TS_"),
+ soil_moisture = c("^SWC_"),
+ humidity = c("^RH_"),
+ pressure = c("^PA$"),
+ wind = c("^WS$", "^WD$"),
+ precip = c("^P$"),
+ radiation = c("^SW_IN", "^Rg", "^PPFD_IN", "^PAR")
+ )
+ selected_cols <- unique(unlist(sapply(input_names, function(p) {
+ unlist(sapply(p, function(x) grep(x, names(flux_data), value = TRUE)))
+ })))
+ state_vars <- flux_data[, selected_cols, drop = FALSE]
+ extracted_file <- file.path(dirs$amf_extracted, paste0(site_id, "_state_drivers.csv"))
+ utils::write.csv(
+ state_vars,
+ extracted_file,
+ row.names = FALSE,
+ na = "NA"
+ )
+
+ # prepare CF conversion
+ site_info <- amerifluxr::amf_site_info()
+ format$lat <- site_info$LOCATION_LAT[site_info$SITE_ID == site_id]
+ format$lon <- site_info$LOCATION_LONG[site_info$SITE_ID == site_id]
+ format$skip <- 0 # No header lines in extracted ameriflux csv
+
+ # convert to CF format
+ if(verbose) {
+ PEcAn.logger::logger.info("Converting to CF format")
+ }
+ cf_results <-
+ PEcAn.data.atmosphere::met2CF.AmerifluxLBL(
+ in.path = dirs$amf_extracted,
+ in.prefix = tools::file_path_sans_ext(basename(extracted_file)),
+ outfolder = dirs$amf_cf,
+ start_date = start_date,
+ end_date = end_date,
+ format = format,
+ overwrite = overwrite
+ )
+
+ # ERA5 fallback
+ if (verbose) PEcAn.logger::logger.info("Checking data coverage for ERA5 fallback")
+ # check coverage of radiation variables (PAR and Rg) needed for metgapfill
+ nc <- ncdf4::nc_open(cf_results$file)
+ time_dim <- ncdf4::ncvar_get(nc, "time")
+ n_total <- length(time_dim)
+ # check shortwave radiation(Rg) coverage
+ has_rg <- "surface_downwelling_shortwave_flux_in_air" %in% names(nc$var)
+ rg_coverage <- 0
+ if (has_rg) {
+ rg_data <- ncdf4::ncvar_get(nc, "surface_downwelling_shortwave_flux_in_air")
+ rg_coverage <- sum(!is.na(rg_data)) / n_total
+ }
+ # check PAR coverage
+ has_par <- "surface_downwelling_photosynthetic_photon_flux_in_air" %in% names(nc$var)
+ par_coverage <- 0
+ if (has_par) {
+ par_data <- ncdf4::ncvar_get(nc, "surface_downwelling_photosynthetic_photon_flux_in_air")
+ par_coverage <- sum(!is.na(par_data)) / n_total
+ }
+ # check soil moisture coverage since this is not filled by metgapfill
+ has_swc <- "volume_fraction_of_condensed_water_in_soil" %in% names(nc$var)
+ swc_coverage <- 0
+ if (has_swc) {
+ swc_data <- ncdf4::ncvar_get(nc, "volume_fraction_of_condensed_water_in_soil")
+ swc_coverage <- sum(!is.na(swc_data)) / n_total
+ }
+ ncdf4::nc_close(nc)
+
+ if(verbose) {
+ PEcAn.logger::logger.info(paste("Shortwave radiation (Rg) coverage:", round(rg_coverage * 100, 1), "%"))
+ PEcAn.logger::logger.info(paste("PAR coverage:", round(par_coverage * 100, 1), "%"))
+ PEcAn.logger::logger.info(paste("Soil moisture coverage:", round(swc_coverage * 100, 1), "%"))
+ }
+
+ fill_vars <- c()
+ # if BOTH PAR and Rg have insufficient coverage
+ if ((!has_rg || rg_coverage < threshold) &&
+ (!has_par || par_coverage < threshold)) {
+ fill_vars <- c(fill_vars, "surface_solar_radiation_downwards")
+ if(verbose) {
+ PEcAn.logger::logger.info("Adding shortwave radiation to ERA5 fallback (insufficient PAR and Rg coverage)")
+ }
+ }
+ # if variable exists but has ANY missing values
+ if (has_swc && swc_coverage < 1.0) {
+ fill_vars <- c(fill_vars, "volumetric_soil_water_layer_1")
+ if(verbose) {
+ PEcAn.logger::logger.info("Adding soil moisture to ERA5 fallback (missing data detected)")
+ }
+ }
+ if (length(fill_vars) > 0) {
+ start_year <- lubridate::year(as.Date(start_date))
+ end_year <- lubridate::year(as.Date(end_date))
+ req_years <- start_year:end_year
+
+ # find existing ERA5 files
+ era5_files <- list.files(dirs$era5_downloads, pattern = "^ERA5_\\d{4}\\.nc$", full.names = TRUE)
+ exist_years <- as.numeric(gsub(".*ERA5_(\\d{4})\\.nc", "\\1", basename(era5_files)))
+
+ # check which years need download
+ dl_years <- c()
+ if (overwrite) {
+ dl_years <- req_years
+ } else {
+ dl_years <- req_years[!req_years %in% exist_years]
+ era5_var_map <- list(
+ "surface_solar_radiation_downwards" = "ssrd",
+ "volumetric_soil_water_layer_1" = "swvl1"
+ )
+ for (f in era5_files) {
+ year <- as.numeric(gsub(".*ERA5_(\\d{4})\\.nc", "\\1", basename(f)))
+ if (year %in% req_years) {
+ tryCatch({
+ nc <- ncdf4::nc_open(f)
+ avail_vars <- names(nc$var)
+ ncdf4::nc_close(nc)
+ req_vars <- sapply(fill_vars, function(v) era5_var_map[[v]])
+ miss_vars <- req_vars[!req_vars %in% avail_vars]
+ if (length(miss_vars) > 0) {
+ dl_years <- c(dl_years, year)
+ if(verbose) {
+ PEcAn.logger::logger.info(paste("ERA5", year, "missing vars:", paste(miss_vars, collapse=", ")))
+ }
+ }
+ }, error = function(e) {
+ dl_years <<- c(dl_years, year)
+ if(verbose) PEcAn.logger::logger.warn(paste("Cannot read ERA5", year, "- redownloading"))
+ })
+ }
+ }
+ }
+ dl_years <- unique(dl_years)
+ if (length(dl_years) == 0) {
+ if(verbose) PEcAn.logger::logger.info("All ERA5 files exist with required variables")
+ } else {
+ if(verbose) {
+ PEcAn.logger::logger.info(paste("Downloading ERA5 for years:", paste(sort(dl_years), collapse=", ")))
+ }
+
+ dl_start_date <- paste0(min(dl_years), "-01-01")
+ dl_end_date <- paste0(max(dl_years), "-12-31")
+ lat <- format$lat
+ lon <- format$lon
+
+ era5_files <-
+ PEcAn.data.atmosphere::download.ERA5_cds(
+ outfolder = dirs$era5_downloads,
+ start_date = dl_start_date,
+ end_date = dl_end_date,
+ extent = c(lon - 0.375, lon + 0.375, lat - 0.375, lat + 0.375), # 3*3 grid
+ variables = fill_vars,
+ product_type = "reanalysis",
+ user = era5_user,
+ key = era5_key
+ )
+ }
+ if(verbose) {
+ PEcAn.logger::logger.info("Processing ERA5 data to CF format")
+ }
+ era5_cf_dirs <-
+ PEcAn.data.atmosphere::extract.nc.ERA5(
+ slat = format$lat,
+ slon = format$lon,
+ in.path = dirs$era5_downloads,
+ start_date = start_date,
+ end_date = end_date,
+ outfolder = dirs$era5_cf,
+ in.prefix = "ERA5_",
+ newsite = site_id,
+ overwrite = TRUE,
+ verbose = verbose
+ )
+ # merge ERA5 data with AmeriFlux CF file
+ if(verbose) {
+ PEcAn.logger::logger.info("Merging ERA5 data with AmeriFlux data")
+ }
+ era5_cf_file <- list.files(era5_cf_dirs[[1]], pattern = "\\.nc$", full.names = TRUE)[1]
+ # variable mapping from ERA5 to CF names
+ era5_map <- list(
+ "surface_solar_radiation_downwards" = "surface_downwelling_shortwave_flux_in_air",
+ "volumetric_soil_water_layer_1" = "volume_fraction_of_condensed_water_in_soil"
+ )
+
+ nc_amf <- ncdf4::nc_open(cf_results$file, write = TRUE)
+ nc_era5 <- ncdf4::nc_open(era5_cf_file)
+ tryCatch({
+ amf_time <- ncdf4::ncvar_get(nc_amf, "time")
+ era5_time <- ncdf4::ncvar_get(nc_era5, "time")
+ # convert AmeriFlux time (days since 1700-01-01) to seconds since 1970-01-01
+ amf_time_sec <- as.numeric(as.POSIXct(amf_time * 86400, origin = "1700-01-01", tz = "UTC"))
+ for (era5_var in fill_vars) {
+ cf_var <- era5_map[[era5_var]]
+ if (!cf_var %in% names(nc_era5$var)) {
+ if(verbose) PEcAn.logger::logger.warn(paste("ERA5 variable not found:", cf_var))
+ next
+ }
+ era5_data <- ncdf4::ncvar_get(nc_era5, cf_var)
+ if (cf_var %in% names(nc_amf$var)) {
+ amf_data <- ncdf4::ncvar_get(nc_amf, cf_var)
+
+ na_idx <- which(is.na(amf_data))
+ if (length(na_idx) > 0) {
+ era5_interp <- stats::approx(era5_time, era5_data,
+ xout = amf_time_sec[na_idx],
+ rule = 2)$y
+
+ # fill missing values with interpolated ERA5 data
+ amf_data[na_idx] <- era5_interp
+ ncdf4::ncvar_put(nc_amf, cf_var, amf_data)
+
+ if(verbose) {
+ filled_count <- length(na_idx)
+ PEcAn.logger::logger.info(paste("Filled", filled_count, "missing values for", cf_var, "using ERA5 data"))
+ PEcAn.logger::logger.info(paste("Interpolated range:", paste(range(era5_interp, na.rm=TRUE), collapse=" to ")))
+ }
+ }
+ } else {
+ if(verbose) PEcAn.logger::logger.info(paste("Adding new variable from ERA5:", cf_var))
+ lat_dim <- nc_amf$dim$latitude
+ lon_dim <- nc_amf$dim$longitude
+ time_dim <- nc_amf$dim$time
+ var_units <- nc_era5$var[[cf_var]]$units
+ new_var <- ncdf4::ncvar_def(name = cf_var, units = var_units,
+ dim = list(lon_dim, lat_dim, time_dim),
+ missval = -999)
+
+ nc_amf <- ncdf4::ncvar_add(nc_amf, new_var)
+ era5_interp <- stats::approx(era5_time, era5_data,
+ xout = amf_time_sec,
+ rule = 2)$y
+ ncdf4::ncvar_put(nc_amf, cf_var, era5_interp)
+
+ if(verbose) {
+ PEcAn.logger::logger.info(paste("Added complete", cf_var, "variable from ERA5 data"))
+ PEcAn.logger::logger.info(paste("Added data range:", paste(range(era5_interp, na.rm=TRUE), collapse=" to ")))
+ }
+ }
+ }
+ }, finally = {
+ ncdf4::nc_close(nc_amf)
+ ncdf4::nc_close(nc_era5)
+ })
+ }
+
+ # gap filling
+ if(verbose) {
+ PEcAn.logger::logger.info("Running gap filling")
+ }
+ gapfill_results <-
+ PEcAn.data.atmosphere::metgapfill(
+ in.path = dirs$amf_cf,
+ in.prefix = sub("\\.\\d+$", "", tools::file_path_sans_ext(basename(cf_results$file))),
+ outfolder = dirs$amf_gapfilled,
+ start_date = start_date,
+ end_date = end_date,
+ overwrite = overwrite
+ )
+
+ tryCatch({
+ # remove extra variables from gapfilled file that are not in CF file
+ gapfill_file <- gapfill_results$file
+ nc_cf <- ncdf4::nc_open(cf_results$file)
+ cf_vars <- names(nc_cf$var)
+ ncdf4::nc_close(nc_cf)
+ nc_gap <- ncdf4::nc_open(gapfill_file)
+ gap_vars <- names(nc_gap$var)
+ extra_vars <- setdiff(gap_vars, cf_vars)
+ ncdf4::nc_close(nc_gap)
+
+ if (length(extra_vars) > 0) {
+ if (verbose) {
+ PEcAn.logger::logger.info(paste("removing variables from gapfill file:",
+ paste(extra_vars, collapse = ", ")))
+ }
+ temp_file <- tempfile(tmpdir = dirs$amf_gapfilled, fileext = ".nc")
+ nc_in <- ncdf4::nc_open(gapfill_file)
+ nc_out <- ncdf4::nc_create(
+ temp_file,
+ vars = nc_in$var[setdiff(names(nc_in$var), extra_vars)],
+ force_v4 = TRUE
+ )
+ global_atts <- ncdf4::ncatt_get(nc_in, 0)
+ for (att in names(global_atts)) {
+ ncdf4::ncatt_put(nc_out, 0, att, global_atts[[att]])
+ }
+ for (dim in names(nc_in$dim)) {
+ if (!dim %in% names(nc_out$dim)) {
+ ncdf4::ncvar_add(nc_out, nc_in$dim[[dim]])
+ }
+ }
+ for (v in names(nc_out$var)) {
+ data <- ncdf4::ncvar_get(nc_in, v)
+ ncdf4::ncvar_put(nc_out, v, data)
+ var_atts <- ncdf4::ncatt_get(nc_in, v)
+ for (att in names(var_atts)) {
+ ncdf4::ncatt_put(nc_out, v, att, var_atts[[att]])
+ }
+ }
+ ncdf4::nc_close(nc_in)
+ ncdf4::nc_close(nc_out)
+ file.remove(gapfill_file)
+ file.rename(temp_file, gapfill_file)
+ }
+ }, error = function(e) {
+ if (file.exists(temp_file)) file.remove(temp_file)
+ PEcAn.logger::logger.severe("variable filtering failed:", e$message)
+ })
+
+ # generate ensembles
+ if(verbose) {
+ PEcAn.logger::logger.info(paste("Generating", n_ens, "ensemble members"))
+ }
+ ensemble_results <-
+ PEcAn.data.atmosphere::met_temporal_downscale.Gaussian_ensemble(
+ in.path = dirs$amf_gapfilled,
+ in.prefix = sub("\\.\\d+$", "", tools::file_path_sans_ext(basename(gapfill_results$file))),
+ outfolder = dirs$ensembles,
+ input_met = gapfill_file,
+ train_met = gapfill_file,
+ overwrite = overwrite,
+ verbose = verbose,
+ n_ens = n_ens,
+ w_len = w_len,
+ force_v4 = TRUE
+ )
+
+ # return ensemble paths with metadata
+ results <- do.call(rbind, lapply(seq_along(ensemble_results), function(e) {
+ data.frame(
+ file = ensemble_results[[e]]$file,
+ host = rep(PEcAn.remote::fqdn(), 1),
+ mimetype = "application/x-netcdf",
+ formatname = "CF Meteorology",
+ startdate = format(as.Date(start_date), "%Y-01-01 00:00:00"),
+ enddate = format(as.Date(end_date), "%Y-12-31 23:59:59"),
+ dbfile.name = paste0(site_id, ".AmeriFlux.ens", e),
+ stringsAsFactors = FALSE
+ )
+ }))
+ if(verbose) PEcAn.logger::logger.info("Processing complete")
+ return(results)
+
+ }, error = function(e) {
+ PEcAn.logger::logger.severe("Processing failed: ", e$message)
+ return(NULL)
+ })
+}
\ No newline at end of file
diff --git a/modules/data.atmosphere/R/ERA5_download.R b/modules/data.atmosphere/R/ERA5_download.R
index 9cbb990de71..0014b17e7ad 100644
--- a/modules/data.atmosphere/R/ERA5_download.R
+++ b/modules/data.atmosphere/R/ERA5_download.R
@@ -1,133 +1,149 @@
+#' Download ERA5 Climate Data from the Copernicus CDS API
+#'
#' @description
-#' This function helps to download the yearly ERA5 data based on the prescribed features using the CDS API.
-#' @title ERA5_cds_annual_download
-#'
-#' @param outfolder Character: physical path where the ERA5 data are stored.
+#' Download ERA5 climate data from the Copernicus Climate Data Store (CDS) API as NetCDF files, year by year, according to user-specified parameters.
+#' The function saves one NetCDF file per year in the specified output directory.
+#'
+#' @details
+#' This function requires a valid CDS API key and the \code{ecmwfr} package for accessing the Copernicus Climate Data Store.
+#' To get a Copernicus CDS API key, register at \url{https://cds.climate.copernicus.eu/profile}.
+#' You must provide both \code{user} (UID) and \code{key} parameters from your CDS profile.
+#'
+#' You can check the "CC-BY" license under the \href{https://cds.climate.copernicus.eu/profile?tab=licences}{'licences' tab of your profile page}.
+#' @param outfolder Character. Directory where downloaded NetCDF files will be saved.
#' @param start_date character: the start date of the data to be downloaded. Format is YYYY-MM-DD (will only use the year part of the date)
#' @param end_date character: the end date of the data to be downloaded. Format is YYYY-MM-DD (will only use the year part of the date)
-#' @param extent numeric: a vector of numbers contains the bounding box (formatted as xmin, xmax, ymin, ymax) to be downloaded.
+#' @param extent numeric: a vector of numbers contains the bounding box (formatted as xmin, xmax, ymin, ymax) (longitude and latitude in degrees).
#' @param variables character: a vector contains variables to be downloaded (e.g., c("2m_temperature","surface_pressure")).
-#' @param auto.create.key Boolean: decide if we want to generate the CDS RC file if it doesn't exist, the default is TRUE.
+#' @param time Character vector or NULL. Hours of the day to download (e.g., c("00:00", "12:00")). Default to NULL to download all hours.
+#' @param dataset Character. Name of the CDS dataset to use (default: "reanalysis-era5-single-levels").
+#' @param product_type Character. Product type to request from CDS (default: "ensemble_members").
+#' @param user Character. CDS user ID (UID) from your CDS profile. Required for authentication.
+#' @param key Character. CDS API key from your CDS profile. Required for authentication.
#' @param timeout numeric: the maximum time (in seconds) allowed to download the data. The default is 36000 seconds.
#'
-#' @return A vector containing file paths to the downloaded files.
+#' @return
+#' A list where each element is a list containing:
+#' \item{file}{File path to the downloaded NetCDF file.}
+#' \item{host}{Host name where the file was downloaded.}
+#' \item{startdate}{Start date and time of the data in the file.}
+#' \item{enddate}{End date and time of the data in the file.}
+#' \item{mimetype}{MIME type of the file ("application/x-netcdf").}
+#' \item{formatname}{Format name ("ERA5_year.nc").}
+#'
+#' @examples
+#' \dontrun{
+#' # Download ERA5 reanalysis data for 2020
+#' output_dir <- withr::local_tempdir()
+#' era5_files <- download.ERA5_cds(
+#' outfolder = output_dir,
+#' start_date = "2020-01-01",
+#' end_date = "2020-06-30",
+#' extent = c(-72.2215, -72.1215, 42.4878, 42.5878),
+#' variables = c("2m_temperature", "surface_pressure"),
+#' user = "your_cds_user_id",
+#' key = "your_cds_api_key",
+#' product_type = "reanalysis"
+#' )
+#'
+#' # Download ensemble data for specificed hours only
+#' era5_files <- download.ERA5_cds(
+#' outfolder = output_dir,
+#' start_date = "2020-01-01",
+#' end_date = "2020-12-31",
+#' extent = c(-83.05, -82.95, 42.95, 43.05),
+#' variables = "surface_solar_radiation_downwards",
+#' user = "your_cds_user_id",
+#' key = "your_cds_api_key",
+#' time = c("00:00", "12:00")
+#' )
+#' }
#' @export
#'
-#' @importFrom purrr %>%
-#' @author Dongchen Zhang
-download.ERA5_cds <- function(outfolder, start_date, end_date, extent, variables, auto.create.key = T, timeout = 36000) {
- # check shell environments.
- if ("try-error" %in% class(try(system("grib_to_netcdf"), silent = T))) {
- PEcAn.logger::logger.info("The grib_to_netcdf function is not detected in shell command.")
- return(NA)
+#' @author Dongchen Zhang, Akash
+
+download.ERA5_cds <- function(outfolder, start_date, end_date,
+ extent, variables, user, key, time = NULL,
+ dataset = "reanalysis-era5-single-levels",
+ product_type = "ensemble_members",
+ timeout = 36000) {
+
+ # check for required package
+ if (!requireNamespace("ecmwfr", quietly = TRUE)) {
+ PEcAn.logger::logger.severe(
+ "Package 'ecmwfr' is required for ERA5 downloads. ",
+ "Install with: install.packages('ecmwfr'). ",
+ "Get CDS credentials from: https://cds.climate.copernicus.eu/profile"
+ )
}
+
+ if (!dir.exists(outfolder)) dir.create(outfolder, recursive = TRUE)
+
# setup timeout for download.
options(timeout=timeout)
# convert arguments to CDS API specific arguments.
years <- sort(unique(lubridate::year(seq(lubridate::date(start_date), lubridate::date(end_date), "1 year"))))
- months <- sort(unique(lubridate::month(seq(lubridate::date(start_date), lubridate::date(end_date), "1 month")))) %>%
+ months <- sort(unique(lubridate::month(seq(lubridate::date(start_date), lubridate::date(end_date), "1 month")))) |>
purrr::map(function(d)sprintf("%02d", d))
- days <- sort(unique(lubridate::day(seq(lubridate::date(start_date), lubridate::date(end_date), "1 day")))) %>%
+ days <- sort(unique(lubridate::day(seq(lubridate::date(start_date), lubridate::date(end_date), "1 day")))) |>
purrr::map(function(d)sprintf("%02d", d))
- times <- list('00:00','03:00','06:00',
- '09:00','12:00','15:00',
- '18:00','21:00')
- area <- paste(c(extent[4], extent[1], extent[3], extent[2]), collapse = "/")
- variables <- as.list(variables)
- #load cdsapi from python environment.
- tryCatch({
- cdsapi <- reticulate::import("cdsapi")
- }, error = function(e) {
- PEcAn.logger::logger.severe(
- "Failed to load `cdsapi` Python library. ",
- "Please make sure it is installed to a location accessible to `reticulate`.",
- "You should be able to install it with the following command: ",
- "`pip install --user cdsapi`.",
- "The following error was thrown by `reticulate::import(\"cdsapi\")`: ",
- conditionMessage(e)
- )
- })
- #define function for building credential file.
- #maybe as a helper function.
- getnetrc <- function (dl_dir) {
- netrc <- file.path(dl_dir, ".cdsapirc")
- if (file.exists(netrc) == FALSE ||
- any(grepl("https://cds.climate.copernicus.eu/api/v2",
- readLines(netrc))) == FALSE) {
- netrc_conn <- file(netrc)
- writeLines(c(
- sprintf(
- "url: %s",
- getPass::getPass(msg = "Enter URL from the following link \n (https://cds.climate.copernicus.eu/api-how-to#install-the-cds-api-key):")
- ),
- sprintf(
- "key: %s",
- getPass::getPass(msg = "Enter KEY from the following link \n (https://cds.climate.copernicus.eu/api-how-to#install-the-cds-api-key):")
- )
- ),
- netrc_conn)
- close(netrc_conn)
- message(
- "A netrc file with your CDS Login credentials was stored in the output directory "
- )
- }
- return(netrc)
+
+ # handle time argument: all hours if Null
+ if (is.null(time)) {
+ times <- sprintf("%02d:00", 0:23)
+ } else {
+ times <- time
}
- #check if the token exists for the cdsapi.
- if (!file.exists(file.path(Sys.getenv("HOME"), ".cdsapirc")) & auto.create.key) {
- if ("try-error" %in% class(try(find.package("getPass")))) {
- PEcAn.logger::logger.info("The getPass pacakge is not installed for creating the API key.")
- return(NA)
- } else {
- getnetrc(Sys.getenv("HOME"))
- }
- } else if (!file.exists(file.path(Sys.getenv("HOME"), ".cdsapirc")) & !auto.create.key) {
+
+ # Format area for CDS API (North, West, South, East)
+ area <- round(c(extent[4], extent[1], extent[3], extent[2]), 2)
+ variables <- as.list(variables)
+
+ # Set CDS credentials
+ if (is.null(user) || is.null(key)) {
PEcAn.logger::logger.severe(
- "Please create a `${HOME}/.cdsapirc` file as described here:",
- "https://cds.climate.copernicus.eu/api-how-to#install-the-cds-api-key ."
+ "CDS 'user' and 'key' must be provided. ",
+ "Get them from: https://cds.climate.copernicus.eu/profile"
)
}
- #grab the client object.
- tryCatch({
- c <- cdsapi$Client()
- }, error = function(e) {
- PEcAn.logger::logger.severe(
- "The following error was thrown by `cdsapi$Client()`: ",
- conditionMessage(e)
- )
- })
+ ecmwfr::wf_set_key(user = user, key = key)
+
# loop over years.
nc.paths <- c()
for (y in years) {
- fname <- file.path(outfolder, paste0("ERA5_", y, ".grib"))
- # start retrieving data.
- # you need to have an account for downloaing the files
- # Read the documantion for how to setup your account and settings before trying this
- # https://confluence.ecmwf.int/display/CKB/How+to+download+ERA5#HowtodownloadERA5-3-DownloadERA5datathroughtheCDSAPI
- c$retrieve(
- 'reanalysis-era5-single-levels',
- list(
- 'product_type' = 'ensemble_members',
- 'data_format' = 'grib',
- "download_format" = "unarchived",
- 'day' = days,
- 'time' = times,
- 'month' = months,
- 'year' = as.character(y),
- "area" = area,
- 'variable' = variables
- ),
- fname
+ fname <- file.path(outfolder, paste0("ERA5_", y, ".nc"))
+
+ request <- list(
+ dataset_short_name = dataset,
+ product_type = list(product_type),
+ data_format = 'netcdf',
+ download_format = "unarchived",
+ day = days,
+ time = times,
+ month = months,
+ year = list(as.character(y)),
+ area = area,
+ variable = variables,
+ target = basename(fname)
)
- # convert grib to nc file.
- nc.path <- gsub(".grib", ".nc", fname, fixed = T)
- cmd <- paste("grib_to_netcdf", fname, "-o", nc.path)
- out <- system(cmd, intern = F, ignore.stdout = T, ignore.stderr = T)
- # store the path.
- nc.paths <- c(nc.paths, nc.path)
- # remove previous grib file.
- unlink(fname)
+
+ # Submit request using ecmwfr
+ tryCatch({
+ ecmwfr::wf_request(
+ request = request,
+ user = user,
+ path = outfolder,
+ time_out = timeout
+ )
+ nc.paths <- c(nc.paths, fname)
+ }, error = function(e) {
+ PEcAn.logger::logger.error(
+ "Failed to download data for year ", y, ": ",
+ conditionMessage(e)
+ )
+ })
}
+
# construct results to meet the requirements of pecan.met workflow.
results <- vector("list", length = length(years))
for (i in seq_along(results)) {
diff --git a/modules/data.atmosphere/R/ERA5_met_process.R b/modules/data.atmosphere/R/ERA5_met_process.R
index 84f76b42ed3..34671f72b18 100644
--- a/modules/data.atmosphere/R/ERA5_met_process.R
+++ b/modules/data.atmosphere/R/ERA5_met_process.R
@@ -5,22 +5,18 @@
#' @param out.path output path
#' @param write.db if write into Bety database
#' @param write if write the settings into pecan.xml file in the outdir of settings.
+#' @param ncores numeric: the number of CPUs for the parallel compute. Default is 1.
#'
#' @return if write.db is True then return input IDs with physical paths; if write.db is False then return just physical paths of extracted ERA5 clim files.
#' @export
#'
#' @author Dongchen Zhang
#' @importFrom dplyr %>%
-#'
-ERA5_met_process <- function(settings, in.path, out.path, write.db=FALSE, write = TRUE){
- #Initialize the multicore computation.
- if (future::supportsMulticore()) {
- future::plan(future::multicore)
- } else {
- future::plan(future::multisession)
- }
-
+#' @importFrom foreach %dopar%
+ERA5_met_process <- function(settings, in.path, out.path, write.db=FALSE, write = TRUE, ncores = 1){
#getting site info
+ start_date <- settings$state.data.assimilation$start.date
+ end_date <- settings$state.data.assimilation$end.date
#grab the site info from Bety DB if we can't get the site info directly from the settings object.
if ("try-error" %in% class(try(site_info <- settings %>%
purrr::map(~.x[['run']] ) %>%
@@ -29,7 +25,7 @@ ERA5_met_process <- function(settings, in.path, out.path, write.db=FALSE, write
#conversion from string to number
site.list$lat <- as.numeric(site.list$lat)
site.list$lon <- as.numeric(site.list$lon)
- list(site_id=site.list$id, lat=site.list$lat, lon=site.list$lon, site_name=site.list$name)
+ list(site.id=site.list$id, lat=site.list$lat, lon=site.list$lon, site_name=site.list$name)
})%>%
dplyr::bind_rows() %>%
as.list()))) {
@@ -39,7 +35,6 @@ ERA5_met_process <- function(settings, in.path, out.path, write.db=FALSE, write
obs <- settings[[i]]$run$site$id
observations <- c(observations,obs)
}
-
#query site info
bety <- dplyr::src_postgres(dbname = settings$database$bety$dbname,
host = settings$database$bety$host,
@@ -54,7 +49,6 @@ ERA5_met_process <- function(settings, in.path, out.path, write.db=FALSE, write
site_info <- list(site_id=qry_results$id, site_name=qry_results$sitename, lat=qry_results$lat,
lon=qry_results$lon, time_zone=qry_results$time_zone)
}
-
#initialize db query elements
if(write.db){
mimetype <- "application/x-netcdf"
@@ -62,7 +56,6 @@ ERA5_met_process <- function(settings, in.path, out.path, write.db=FALSE, write
hostname <- PEcAn.remote::fqdn()
# find mimetype, if it does not exist, it will create one
mimetypeid <- PEcAn.DB::get.id("mimetypes", "type_string", mimetype, con, create = TRUE)
-
# find appropriate format, create if it does not exist
formatid <- PEcAn.DB::get.id(
table = "formats",
@@ -72,77 +65,53 @@ ERA5_met_process <- function(settings, in.path, out.path, write.db=FALSE, write
create = TRUE,
dates = TRUE
)
-
# setup parent part of query if specified
parent <- ""
-
#initialize Input_IDs object when looping over each site
Input_IDs <- list()
}
-
- #restructure the site_info into list.
- site_info$start_date <- start_date <- rep(settings$state.data.assimilation$start.date, length(settings))
- site_info$end_date <- end_date <- rep(settings$state.data.assimilation$end.date, length(settings))
- site_info$out.path <- rep(out.path, length(settings))
- site_info$in.path <- rep(in.path, length(settings))
- site_info$model.type <- rep(settings$model$type, length(settings))
- new.site.info <- split(as.data.frame(site_info), seq(nrow(as.data.frame(site_info))))
-
- #Extract ERA5 for each site.
+ # Extract ERA5 nc files.
PEcAn.logger::logger.info("Started extracting ERA5 data!\n")
- Clim_paths <- furrr::future_map(new.site.info, function(site){
- #check if sub-folder exists, if doesn't then create a new folder specific for each site
- site_outFolder <- paste0(site$out.path,'/', site$site.id)
- #check if folder already exists, if it does, then jump to the next loop
- if(!file.exists(site_outFolder)){
- dir.create(site_outFolder)
- }else{
- #grab physical paths of existing ERA5 files
- #need to be generalized when more models come in.
- clim.paths <- list(in.path=list.files(path=site_outFolder, pattern = '*.clim', full.names = T))
- names(clim.paths) <- site$site.id
- return(clim.paths)
- }
-
- #extract ERA5.nc files
- PEcAn.data.atmosphere::extract.nc.ERA5(slat = site$lat,
- slon = site$lon,
- in.path = site$in.path,
- start_date = site$start_date,
- end_date = site$end_date,
- outfolder = site_outFolder,
- in.prefix = 'ERA5_',
- newsite = as.character(site$site.id))
-
- #starting working on met2model.model function over each ensemble
- #setting up met2model function depending on model name from settings
- met2model_method <- do.call("::", list(paste0("PEcAn.", site$model.type), paste0("met2model.", site$model.type)))
- #grab the rbind.xts function
- rbind.xts <- do.call("::", list("xts", "rbind.xts"))
- #find every path associated with each ensemble member
- ens_nc <- list.files(path = site_outFolder, full.names = T)
- #loop over each ensemble member
- for (i in 1:length(ens_nc)) {
- nc_path <- ens_nc[i]
-
- #find a proper in prefix for each ensemble member
- ens_num <- strsplit(basename(nc_path),"_")[[1]][3]
- in_prefix <- paste0("ERA5.", ens_num)
-
- #preparing for the met2model.SIPNET function
- met2model_method(in.path = nc_path,
- in.prefix = in_prefix,
- outfolder = site_outFolder,
- start_date = site$start_date,
- end_date = site$end_date)
- }
- # grab physical paths of ERA5 files
- clim.paths <- list(in.path=list.files(path=site_outFolder, pattern = '*.clim', full.names = T))
- names(clim.paths) <- site$site.id
- return(clim.paths)
- }, .progress = TRUE)
+ final.nc.files <- extract.nc.ERA5(site_info$lat,
+ site_info$lon,
+ in.path,
+ start_date,
+ end_date,
+ out.path,
+ "ERA5_",
+ site_info$site.id,
+ ncores)
+ #Writing CLIM files for each site.
+ PEcAn.logger::logger.info("Writing CLIM files!\n")
+ # initialize parallel.
+ cl <- parallel::makeCluster(ncores)
+ on.exit(parallel::stopCluster(cl), add = TRUE)
+ doSNOW::registerDoSNOW(cl)
+ # setup progress bar.
+ pb <- utils::txtProgressBar(min=1, max=length(final.nc.files), style=3)
+ on.exit(close(pb), add = TRUE)
+ progress <- function(n) utils::setTxtProgressBar(pb, n)
+ opts <- list(progress=progress)
+ # grab specific model function.
+ met2model_method <- do.call("::", list(paste0("PEcAn.", settings$model$type), paste0("met2model.", settings$model$type)))
+ pack.name <- paste0("PEcAn.", settings$model$type)
+ ens.folders <- NULL
+ Clim_paths <-
+ foreach::foreach(ens.folders = final.nc.files,
+ .packages=c("Kendall", pack.name),
+ .options.snow=opts) %dopar% {
+ ensemble.clim.files <- c()
+ for (ens in seq_along(ens.folders)) {
+ out <- met2model_method(in.path = ens.folders[ens],
+ in.prefix = paste0("ERA5.", ens),
+ outfolder = ens.folders[ens],
+ start_date = start_date,
+ end_date = end_date)
+ ensemble.clim.files <- c(ensemble.clim.files, out$file)
+ }
+ ensemble.clim.files
+ }
PEcAn.logger::logger.info("\nFinished!")
-
#write the paths into settings.
if (write) {
#write paths into settings.
@@ -156,12 +125,10 @@ ERA5_met_process <- function(settings, in.path, out.path, write.db=FALSE, write
settings[[i]]$state.data.assimilation$end.date
settings[[i]]$run$inputs$met$path <- as.list(unlist(Clim_paths[[i]])) %>% purrr::set_names(rep("path", length(Clim_paths[[i]])))
}
-
#write settings into xml file.
PEcAn.logger::logger.info(paste0("Write updated pecan.xml file into: ", file.path(settings$outdir, "pecan.xml")))
PEcAn.settings::write.settings(settings, outputfile = "pecan.xml")
}
-
#write into bety
if(write.db){
PEcAn.logger::logger.info("Write into database!")
diff --git a/modules/data.atmosphere/R/GEFS_helper_functions.R b/modules/data.atmosphere/R/GEFS_helper_functions.R
index 754580ae0da..ee6af1064c2 100644
--- a/modules/data.atmosphere/R/GEFS_helper_functions.R
+++ b/modules/data.atmosphere/R/GEFS_helper_functions.R
@@ -7,103 +7,21 @@
#' @param model_name_raw model name for directory creation
#' @param end_hr end hr to determine how many hours to download
#' @param output_directory output directory
-#'
+#'
#' @export
#'
#' @return NA
#'
noaa_grid_download <- function(lat_list, lon_list, forecast_time, forecast_date, model_name_raw, output_directory, end_hr) {
-
-
- download_grid <- function(ens_index, location, directory, hours_char, cycle, base_filename1, vars,working_directory){
- #for(j in 1:31){
- if(ens_index == 1){
- base_filename2 <- paste0("gec00",".t",cycle,"z.pgrb2a.0p50.f")
- curr_hours <- hours_char[hours <= 384]
- }else{
- if((ens_index-1) < 10){
- ens_name <- paste0("0",ens_index - 1)
- }else{
- ens_name <- as.character(ens_index -1)
- }
- base_filename2 <- paste0("gep",ens_name,".t",cycle,"z.pgrb2a.0p50.f")
- curr_hours <- hours_char
- }
-
-
- for(i in 1:length(curr_hours)){
- file_name <- paste0(base_filename2, curr_hours[i])
-
- destfile <- paste0(working_directory,"/", file_name,".grib")
-
- if(file.exists(destfile)){
-
- fsz <- file.info(destfile)$size
- gribf <- file(destfile, "rb")
- fsz4 <- fsz-4
- seek(gribf,where = fsz4,origin = "start")
- last4 <- readBin(gribf,"raw",4)
- if(as.integer(last4[1])==55 & as.integer(last4[2])==55 & as.integer(last4[3])==55 & as.integer(last4[4])==55) {
- download_file <- FALSE
- } else {
- download_file <- TRUE
- }
- close(gribf)
-
- }else{
- download_file <- TRUE
- }
-
- if(download_file){
-
- out <- tryCatch(utils::download.file(paste0(base_filename1, file_name, vars, location, directory),
- destfile = destfile, quiet = TRUE),
- error = function(e){
- warning(paste(e$message, "skipping", file_name),
- call. = FALSE)
- return(NA)
- },
- finally = NULL)
-
- if(is.na(out)) next
- }
- }
- }
-
model_dir <- file.path(output_directory, model_name_raw)
+ #Availability: most recent 4 days
curr_time <- lubridate::with_tz(Sys.time(), tzone = "UTC")
curr_date <- lubridate::as_date(curr_time)
-
- noaa_page <- readLines('https://nomads.ncep.noaa.gov/pub/data/nccf/com/gens/prod/')
-
- potential_dates <- NULL
- for(i in 1:length(noaa_page)){
- if(stringr::str_detect(noaa_page[i], ">gefs.")){
- end <- stringr::str_locate(noaa_page[i], ">gefs.")[2]
- dates <- stringr::str_sub(noaa_page[i], start = end+1, end = end+8)
- potential_dates <- c(potential_dates, dates)
- }
- }
-
-
- last_cycle_page <- readLines(paste0('https://nomads.ncep.noaa.gov/pub/data/nccf/com/gens/prod/gefs.', dplyr::last(potential_dates)))
-
- potential_cycle <- NULL
- for(i in 1:length(last_cycle_page)){
- if(stringr::str_detect(last_cycle_page[i], 'href=\"')){
- end <- stringr::str_locate(last_cycle_page[i], 'href=\"')[2]
- cycles <- stringr::str_sub(last_cycle_page[i], start = end+1, end = end+2)
- if(cycles %in% c("00","06", "12", "18")){
- potential_cycle <- c(potential_cycle, cycles)
- }
- }
- }
-
- potential_dates <- lubridate::as_date(potential_dates)
-
- potential_dates = potential_dates[which(potential_dates == forecast_date)]
+ potential_dates <- curr_date - lubridate::days(3:0)
+
+ potential_dates <- potential_dates[which(potential_dates == forecast_date)]
if(length(potential_dates) == 0){PEcAn.logger::logger.error("Forecast Date not available")}
@@ -118,7 +36,10 @@ noaa_grid_download <- function(lat_list, lon_list, forecast_time, forecast_date,
floor(min(lat_list)))
base_filename1 <- "https://nomads.ncep.noaa.gov/cgi-bin/filter_gefs_atmos_0p50a.pl?file="
- vars <- "&lev_10_m_above_ground=on&lev_2_m_above_ground=on&lev_surface=on&lev_entire_atmosphere=on&var_APCP=on&var_DLWRF=on&var_DSWRF=on&var_PRES=on&var_RH=on&var_TMP=on&var_UGRD=on&var_VGRD=on&var_TCDC=on"
+ vars <- paste0(
+ "&lev_10_m_above_ground=on&lev_2_m_above_ground=on&lev_surface=on&lev_entire_atmosphere=on",
+ "&var_APCP=on&var_DLWRF=on&var_DSWRF=on&var_PRES=on&var_RH=on&var_TMP=on",
+ "&var_UGRD=on&var_VGRD=on&var_TCDC=on")
for(i in 1:length(potential_dates)){
@@ -138,37 +59,29 @@ noaa_grid_download <- function(lat_list, lon_list, forecast_time, forecast_date,
new_download <- TRUE
- if(new_download){
-
+ if (new_download) {
print(paste("Downloading", forecast_date, cycle))
-
- if(cycle == "00"){
- hours <- c(seq(0, 240, 3),seq(246, 384, 6))
- hours <- hours[hours<=end_hr]
- }else{
- hours <- c(seq(0, 240, 3),seq(246, min(end_hr, 840) , 6))
+
+ if (cycle == "00") { # 35 days of forecast
+ hours <- c(seq(0, 240, by = 3), seq(246, 840, by = 6))
+ } else { # 16 days of forecast
+ hours <- c(seq(0, 240, by = 3), seq(246, 384, by = 6))
}
- hours_char <- hours
- hours_char[which(hours < 100)] <- paste0("0",hours[which(hours < 100)])
- hours_char[which(hours < 10)] <- paste0("0",hours_char[which(hours < 10)])
- curr_year <- lubridate::year(forecast_date)
- curr_month <- lubridate::month(forecast_date)
- if(curr_month < 10) curr_month <- paste0("0",curr_month)
- curr_day <- lubridate::day(forecast_date)
- if(curr_day < 10) curr_day <- paste0("0",curr_day)
- curr_date <- paste0(curr_year,curr_month,curr_day)
- directory <- paste0("&dir=%2Fgefs.",curr_date,"%2F",cycle,"%2Fatmos%2Fpgrb2ap5")
-
+ hours <- hours[hours <= end_hr]
+ hours_char <- sprintf("%03i", hours)
+ curr_date <- format(forecast_date, "%Y%m%d")
+ directory <- paste0("&dir=%2Fgefs.", curr_date, "%2F", cycle, "%2Fatmos%2Fpgrb2ap5")
+
ens_index <- 1:31
parallel::mclapply(X = ens_index,
FUN = download_grid,
- location,
- directory,
- hours_char,
- cycle,
- base_filename1,
- vars,
+ location = location,
+ directory = directory,
+ hours_char = hours_char,
+ cycle = cycle,
+ base_filename1 = base_filename1,
+ vars = vars,
working_directory = model_date_hour_dir,
mc.cores = 1)
}else{
@@ -177,6 +90,66 @@ noaa_grid_download <- function(lat_list, lon_list, forecast_time, forecast_date,
}
}
}
+
+
+#' Download all requested timepoints of one GEFS ensemble member
+#'
+#' @param ens_index ensemble member as integer (1-31)
+#' @param location bounding box portion of query,
+#' as a single URL-escaped string
+#' @param directory server path portion of query,
+#' as a single URL-escaped string
+#' (not to be confused with local output dir -- that's `working_directory`)
+#' @param hours_char timepoints to retrieve,
+#' as zero-padded strings e.g. `c("000", "384", "840")`
+#' @param cycle forecast hour to use ("00", "06", "12", or "18")
+#' @param base_filename1 URL onto which to append query string components
+#' @param vars variable listing component of query,
+#' as a single URL-escaped string
+#' @param working_directory path on local disk to write output
+#'
+#' @return NULL
+#'
+download_grid <- function(ens_index, location, directory, hours_char, cycle,
+ base_filename1, vars, working_directory) {
+ member_type <- if (ens_index == 1) "gec" else "gep" # "_c_ontrol", "_p_erturbed"
+ ens_idxname <- stringr::str_pad(ens_index - 1, width = 2, pad = "0")
+ base_filename2 <- paste0(member_type, ens_idxname, ".t", cycle, "z.pgrb2a.0p50.f")
+ for (hr in hours_char) {
+ file_name <- paste0(base_filename2, hr)
+ destfile <- paste0(working_directory, "/", file_name, ".grib")
+
+ download_file <- TRUE
+ if (file.exists(destfile)) {
+ fsz <- file.info(destfile)$size
+ gribf <- file(destfile, "rb")
+ seek(gribf, where = fsz - 4, origin = "start")
+ last4 <- readBin(gribf, "raw", 4)
+ close(gribf)
+ if (all(as.integer(last4) == 55)) {
+ download_file <- FALSE
+ }
+ }
+
+ if (download_file) {
+ tryCatch(
+ download_file_shim(
+ paste0(base_filename1, file_name, vars, location, directory),
+ destfile = destfile,
+ quiet = TRUE
+ ),
+ error = function(e) {
+ warning(paste(e$message, "skipping", file_name),
+ call. = FALSE)
+ return(NA)
+ },
+ finally = NULL
+ )
+ }
+ }
+}
+
+
#' Extract and temporally downscale points from downloaded grid files
#'
#' @param lat_list lat for site
@@ -222,23 +195,13 @@ process_gridded_noaa_download <- function(lat_list,
dlwrfsfc <- array(NA, dim = c(site_length, length(hours_char)))
dswrfsfc <- array(NA, dim = c(site_length, length(hours_char)))
- if(ens_index == 1){
- base_filename2 <- paste0("gec00",".t",cycle,"z.pgrb2a.0p50.f")
- }else{
- if(ens_index-1 < 10){
- ens_name <- paste0("0",ens_index-1)
- }else{
- ens_name <- as.character(ens_index-1)
- }
- base_filename2 <- paste0("gep",ens_name,".t",cycle,"z.pgrb2a.0p50.f")
- }
+ member_type <- if (ens_index == 1) { "gec" } else { "gep" } # "_c_ontrol", "_p_erturbed"
+ ens_idxname <- stringr::str_pad(ens_index - 1, width = 2, pad = "0")
+ base_filename2 <- paste0(member_type,ens_idxname,".t",cycle,"z.pgrb2a.0p50.f")
lats <- round(lat_list/.5)*.5
lons <- round(lon_list/.5)*.5
- if(lons < 0){
- lons <- 360 + lons
- }
curr_hours <- hours_char
for(hr in 1:length(curr_hours)){
@@ -263,8 +226,13 @@ process_gridded_noaa_download <- function(lat_list,
vgrd10m[s, hr] <- grib_data_df$`10[m] HTGL=Specified height level above ground; v-component of wind [m/s]`[index]
if(curr_hours[hr] != "000"){
- apcpsfc[s, hr] <- grib_data_df$`SFC=Ground or water surface; 03 hr Total precipitation [kg/(m^2)]`[index]
- tcdcclm[s, hr] <- grib_data_df$`RESERVED(10) (Reserved); Total cloud cover [%]`[index]
+ # total precip alternates being named as 3 or 6 hr total
+ # TODO: not sure if the contents actually differ or if this is a labeling bug in the grib files
+ precip_hr <- if ((as.numeric(curr_hours[hr]) %% 2) == 1) { "03" } else { "06" }
+ precip_name <- paste("SFC=Ground or water surface;", precip_hr, "hr Total precipitation [kg/(m^2)]")
+ apcpsfc[s, hr] <- grib_data_df[[precip_name]][index]
+
+ tcdcclm[s, hr] <- grib_data_df$`EATM=Entire Atmosphere; Total cloud cover [%]`[index]
dswrfsfc[s, hr] <- grib_data_df$`SFC=Ground or water surface; Downward Short-Wave Rad. Flux [W/(m^2)]`[index]
dlwrfsfc[s, hr] <- grib_data_df$`SFC=Ground or water surface; Downward Long-Wave Rad. Flux [W/(m^2)]`[index]
}
@@ -301,17 +269,15 @@ process_gridded_noaa_download <- function(lat_list,
- cycle <-forecast_time
+ cycle <- forecast_time
curr_forecast_time <- forecast_date + lubridate::hours(cycle)
- if(cycle < 10) cycle <- paste0("0",cycle)
- if(cycle == "00"){
- hours <- c(seq(0, 240, 3),seq(246, 840 , 6))
- }else{
- hours <- c(seq(0, 240, 3),seq(246, 384 , 6))
+ cycle <- stringr::str_pad(cycle, width = 2, pad = "0")
+ if (cycle == "00") {
+ hours <- c(seq(0, 240, 3),seq(246, 840, 6))
+ } else {
+ hours <- c(seq(0, 240, 3),seq(246, 384, 6))
}
- hours_char <- hours
- hours_char[which(hours < 100)] <- paste0("0",hours[which(hours < 100)])
- hours_char[which(hours < 10)] <- paste0("0",hours_char[which(hours < 10)])
+ hours_char <- stringr::str_pad(hours, width = 3, pad = "0") # 3->"003", 384->"384"
raw_files <- list.files(file.path(model_name_raw_dir,forecast_date,cycle))
hours_present <- as.numeric(stringr::str_sub(raw_files, start = 25, end = 27))
@@ -341,19 +307,21 @@ process_gridded_noaa_download <- function(lat_list,
FUN = extract_sites,
hours_char = hours_char,
hours = hours,
- cycle,
- site_id,
- lat_list,
- lon_list,
+ cycle = cycle,
+ site_id = site_id,
+ lat_list = lat_list,
+ lon_list = lon_list,
working_directory = file.path(model_name_raw_dir,forecast_date,cycle),
mc.cores = 1)
- forecast_times <- lubridate::as_datetime(forecast_date) + lubridate::hours(as.numeric(cycle)) + lubridate::hours(as.numeric(hours_char))
+ forecast_times <- lubridate::as_datetime(forecast_date) +
+ lubridate::hours(as.numeric(cycle)) +
+ lubridate::hours(as.numeric(hours_char))
- #Convert negetive longitudes to degrees east
+ #Convert negative longitudes to degrees east
if(lon_list < 0){
lon_east <- 360 + lon_list
}else{
@@ -417,25 +385,28 @@ process_gridded_noaa_download <- function(lat_list,
noaa_data$air_temperature$value <- noaa_data$air_temperature$value + 273.15
- specific_humidity[which(!is.na(noaa_data$relative_humidity$value))] <- PEcAn.data.atmosphere::rh2qair(rh = noaa_data$relative_humidity$value[which(!is.na(noaa_data$relative_humidity$value))],
- T = noaa_data$air_temperature$value[which(!is.na(noaa_data$relative_humidity$value))],
- press = noaa_data$air_pressure$value[which(!is.na(noaa_data$relative_humidity$value))])
-
-
+ specific_humidity[which(!is.na(noaa_data$relative_humidity$value))] <- PEcAn.data.atmosphere::rh2qair(
+ rh = noaa_data$relative_humidity$value[which(!is.na(noaa_data$relative_humidity$value))],
+ T = noaa_data$air_temperature$value[which(!is.na(noaa_data$relative_humidity$value))],
+ press = noaa_data$air_pressure$value[which(!is.na(noaa_data$relative_humidity$value))]
+ )
+
+
#Calculate wind speed from east and north components
wind_speed <- sqrt(noaa_data$eastward_wind$value^2 + noaa_data$northward_wind$value^2)
- forecast_noaa <- tibble::tibble(time = noaa_data$air_temperature$forecast.date,
- NOAA.member = noaa_data$air_temperature$ensembles,
- air_temperature = noaa_data$air_temperature$value,
- air_pressure= noaa_data$air_pressure$value,
- relative_humidity = noaa_data$relative_humidity$value,
- surface_downwelling_longwave_flux_in_air = noaa_data$surface_downwelling_longwave_flux_in_air$value,
- surface_downwelling_shortwave_flux_in_air = noaa_data$surface_downwelling_shortwave_flux_in_air$value,
- precipitation_flux = noaa_data$precipitation_flux$value,
- specific_humidity = specific_humidity,
- cloud_area_fraction = noaa_data$cloud_area_fraction$value,
- wind_speed = wind_speed)
+ forecast_noaa <- tibble::tibble(
+ time = noaa_data$air_temperature$forecast.date,
+ NOAA.member = noaa_data$air_temperature$ensembles,
+ air_temperature = noaa_data$air_temperature$value,
+ air_pressure= noaa_data$air_pressure$value,
+ relative_humidity = noaa_data$relative_humidity$value,
+ surface_downwelling_longwave_flux_in_air = noaa_data$surface_downwelling_longwave_flux_in_air$value,
+ surface_downwelling_shortwave_flux_in_air = noaa_data$surface_downwelling_shortwave_flux_in_air$value,
+ precipitation_flux = noaa_data$precipitation_flux$value,
+ specific_humidity = specific_humidity,
+ cloud_area_fraction = noaa_data$cloud_area_fraction$value,
+ wind_speed = wind_speed)
forecast_noaa$cloud_area_fraction <- forecast_noaa$cloud_area_fraction / 100 #Convert from % to proportion
@@ -455,14 +426,10 @@ process_gridded_noaa_download <- function(lat_list,
for (ens in 1:31) { # i is the ensemble number
#Turn the ensemble number into a string
- if(ens-1< 10){
- ens_name <- paste0("0",ens-1)
- }else{
- ens_name <- ens - 1
- }
+ ens_name <- stringr::str_pad(ens - 1, width = 2, pad = "0")
forecast_noaa_ens <- forecast_noaa %>%
- dplyr::filter(NOAA.member == ens) %>%
+ dplyr::filter(.data$NOAA.member == ens) %>%
dplyr::filter(!is.na(.data$air_temperature))
end_date <- forecast_noaa_ens %>%
@@ -525,6 +492,15 @@ process_gridded_noaa_download <- function(lat_list,
return(results_list)
} #process_gridded_noaa_download
+
+
+
+
+
+
+
+
+
#' @title Downscale NOAA GEFS from 6hr to 1hr
#' @return None
#'
@@ -645,6 +621,14 @@ temporal_downscale <- function(input_file, output_file, overwrite = TRUE, hr = 1
+
+
+
+
+
+
+
+
##' @title Write NOAA GEFS netCDF
##' @name write_noaa_gefs_netcdf
##' @param df data frame of meterological variables to be written to netcdf. Columns
@@ -711,4 +695,4 @@ write_noaa_gefs_netcdf <- function(df, ens = NA, lat, lon, cf_units, output_file
ncdf4::nc_close(nc_flptr) #Write to the disk/storage
}
-}
\ No newline at end of file
+}
diff --git a/modules/data.atmosphere/R/average_ERA5.R b/modules/data.atmosphere/R/average_ERA5.R
new file mode 100644
index 00000000000..2917078e86a
--- /dev/null
+++ b/modules/data.atmosphere/R/average_ERA5.R
@@ -0,0 +1,73 @@
+#' @description
+#' This function helps to average the ERA5 data based on the start and end dates, and convert it to the GeoTIFF file.
+#' @title Average_ERA5_2_GeoTIFF
+#'
+#' @param start.date character: start point of when to average the data (e.g., 2012-01-01).
+#' @param end.date character: end point of when to average the data (e.g., 2021-12-31).
+#' @param in.path character: the directory where your ERA5 data stored (they should named as ERA5_YEAR.nc).
+#' @param outdir character: the output directory where the averaged GeoTIFF file will be generated.
+#'
+#' @return character: path to the exported GeoTIFF file.
+#'
+#' @export
+#' @author Dongchen Zhang
+Average_ERA5_2_GeoTIFF <- function (start.date, end.date, in.path, outdir) {
+ # create dates.
+ years <- sort(unique(lubridate::year(start.date):lubridate::year(end.date)))
+ # initialize final outcomes.
+ temp.all <- precip.all <- srd.all <- dewpoint.all <- c()
+ # loop over years.
+ for (i in seq_along(years)) {
+ # open ERA5 nc file as geotiff format for referencing crs and ext.
+ ERA5.tiff <- terra::rast(file.path(in.path, paste0("ERA5_", years[i], ".nc")))
+ # open ERA5 nc file.
+ met.nc <- ncdf4::nc_open(file.path(in.path, paste0("ERA5_", years[i], ".nc")))
+ # find index for the date.
+ times <- as.POSIXct(met.nc$dim$time$vals*3600, origin="1900-01-01 00:00:00", tz = "UTC")
+ time.inds <- which(lubridate::date(times) >= start.date & lubridate::date(times) <= end.date)
+ # extract temperature.
+ PEcAn.logger::logger.info("entering temperature.")
+ temp.all <- abind::abind(temp.all, apply(ncdf4::ncvar_get(met.nc, "t2m")[,,,time.inds], c(1,2,4), mean), along = 3)
+ # extract precipitation.
+ PEcAn.logger::logger.info("entering precipitation.")
+ precip.all <- abind::abind(precip.all, apply(ncdf4::ncvar_get(met.nc, "tp")[,,,time.inds], c(1,2,4), mean), along = 3)
+ # extract shortwave solar radiation.
+ PEcAn.logger::logger.info("entering solar radiation.")
+ srd.all <- abind::abind(srd.all, apply(ncdf4::ncvar_get(met.nc, "ssrd")[,,,time.inds], c(1,2,4), mean), along = 3)
+ # extract dewpoint.
+ PEcAn.logger::logger.info("entering dewpoint.")
+ dewpoint.all <- abind::abind(dewpoint.all, apply(ncdf4::ncvar_get(met.nc, "d2m")[,,,time.inds], c(1,2,4), mean), along = 3)
+ # close the NC connection.
+ ncdf4::nc_close(met.nc)
+ }
+ # aggregate across time.
+ # temperature.
+ temp <- apply(temp.all, c(1, 2), mean)
+ temp <- PEcAn.utils::ud_convert(temp, "K", "degC")
+ # precipitation.
+ precip <- apply(precip.all, c(1, 2), mean)
+ # solar radiation.
+ srd <- apply(srd.all, c(1, 2), mean)
+ # dewpoint.
+ dewpoint <- apply(dewpoint.all, c(1, 2), mean)
+ dewpoint <- PEcAn.utils::ud_convert(dewpoint, "K", "degC")
+ # convert dew point to relative humidity.
+ beta <- (112 - (0.1 * temp) + dewpoint) / (112 + (0.9 * temp))
+ relative.humidity <- beta ^ 8
+ VPD <- get.vpd(100*relative.humidity, temp)
+ # combine together.
+ PEcAn.logger::logger.info("Aggregate maps.")
+ met.rast <- c(terra::rast(matrix(temp, nrow = dim(temp)[2], ncol = dim(temp)[1], byrow = T)),
+ terra::rast(matrix(precip, nrow = dim(precip)[2], ncol = dim(precip)[1], byrow = T)),
+ terra::rast(matrix(srd, nrow = dim(srd)[2], ncol = dim(srd)[1], byrow = T)),
+ terra::rast(matrix(VPD, nrow = dim(VPD)[2], ncol = dim(VPD)[1], byrow = T)))
+ # adjust crs and extents.
+ terra::crs(met.rast) <- terra::crs(ERA5.tiff)
+ terra::ext(met.rast) <- terra::ext(ERA5.tiff)
+ names(met.rast) <- c("temp", "prec", "srad", "vapr")
+ # write into geotiff file.
+ terra::writeRaster(met.rast, file.path(outdir, paste0("ERA5_met_", lubridate::year(end.date), ".tiff")))
+ # end.
+ gc()
+ return(file.path(outdir, paste0("ERA5_met_", lubridate::year(end.date), ".tiff")))
+}
\ No newline at end of file
diff --git a/modules/data.atmosphere/R/debias_met_regression.R b/modules/data.atmosphere/R/debias_met_regression.R
index bae4dd6f036..ff91b77d5d2 100644
--- a/modules/data.atmosphere/R/debias_met_regression.R
+++ b/modules/data.atmosphere/R/debias_met_regression.R
@@ -1012,7 +1012,14 @@ debias.met.regression <- function(train.data, source.data, n.ens, vars.debias=NU
# -------------
# Save some diagnostic graphs if useful
# -------------
- if(save.diagnostics==TRUE){
+ if (save.diagnostics == TRUE) {
+ if (!requireNamespace("ggplot2", quietly = TRUE)) {
+ stop(
+ "Package `ggplot2` not found but needed for `save.diagnostics` ",
+ "option of `debias_met_regression()`"
+ )
+ }
+
dir.create(path.diagnostics, recursive=T, showWarnings=F)
dat.pred <- source.data$time
diff --git a/modules/data.atmosphere/R/download.NOAA_GEFS.R b/modules/data.atmosphere/R/download.NOAA_GEFS.R
index e68bc7d166a..9ae0e6d0ded 100644
--- a/modules/data.atmosphere/R/download.NOAA_GEFS.R
+++ b/modules/data.atmosphere/R/download.NOAA_GEFS.R
@@ -1,80 +1,84 @@
##' @title Download NOAA GEFS Weather Data
-##'
+##'
##' @section Information on Units:
-##' Information on NOAA weather units can be found below. Note that the temperature is measured in degrees C,
+##' Information on NOAA weather units can be found below. Note that the temperature is measured in degrees C,
##' but is converted at the station and downloaded in Kelvin.
##' @references https://www.ncdc.noaa.gov/crn/measurements.html
-##'
+##'
##' @section NOAA_GEFS General Information:
-##' This function downloads NOAA GEFS weather data. GEFS is an ensemble of 21 different weather forecast models.
-##' A 16 day forecast is avaliable every 6 hours. Each forecast includes information on a total of 8 variables.
-##' These are transformed from the NOAA standard to the internal PEcAn
-##' standard.
-##'
-##' @section Data Avaliability:
-##' NOAA GEFS weather data is avaliable on a rolling 12 day basis; dates provided in "start_date" must be within this range. The end date can be any point after
-##' that, but if the end date is beyond 16 days, only 16 days worth of forecast are recorded. Times are rounded down to the previous 6 hour forecast. NOAA
-##' GEFS weather data isn't always posted immediately, and to compensate, this function adjusts requests made in the last two hours
-##' back two hours (approximately the amount of time it takes to post the data) to make sure the most current forecast is used.
-##'
+##' This function downloads NOAA GEFS weather data. GEFS is an ensemble of 31 different weather forecast models.
+##' A 16 day forecast is available every 6 hours and a 35 day forecast is available every 24 hours.
+##' Both are at 3-hour frequency for the first 10 days of the forecast and 6-hour frequency beyond that.
+##' Each forecast includes information on a total of 8 variables.
+##' These are transformed from the NOAA standard to the internal PEcAn standard.
+##'
+##' @section Data Availability:
+##' NOAA GEFS weather data is available on a rolling 4 day basis.
+##' Dates provided in "start_date" must be within this range.
+##' The end date can be any point after that, but if the end date is beyond 16 days
+##' (35 days for the midnight UTC forecast), only 16 (35) days worth of forecast are retrieved.
+##' Times are rounded down to the previous 6 hour forecast.
+##'
+##' NOAA GEFS weather data isn't always posted immediately. Each 16-day forecast takes
+##' approximately three hours to run, and the once-a-day forecasts for days 17-35 are
+##' posted much later (up to 21 hours) than the forecasts for days 0 to 16.
+##' See the [GEFS v12 release announcement](https://www.weather.gov/media/notification/pdf2/scn20-75gefs_v12_changes.pdf)
+##' for details.
+##'
##' @section Data Save Format:
-##' Data is saved in the netcdf format to the specified directory. File names reflect the precision of the data to the given range of days.
+##' Data is saved in the netcdf format to the specified directory.
+##' File names reflect the precision of the data to the given range of days.
##' NOAA.GEFS.willow creek.3.2018-06-08T06:00.2018-06-24T06:00.nc specifies the forecast, using ensemble number 3 at willow creek on
##' June 6th, 2018 at 6:00 a.m. to June 24th, 2018 at 6:00 a.m.
-##'
+##'
##' @return A list of data frames is returned containing information about the data file that can be used to locate it later. Each
##' data frame contains information about one file.
##'
-##' @param outfolder Directory where results should be written
-##' @param start_date, Range of dates/times to be downloaded (default assumed to be time that function is run)
-##' @param end_date, end date for range of dates to be downloaded (default 16 days from start_date)
+##' @param site_id The unique ID given to each site. This is used as part of the file name.
##' @param lat.in site latitude in decimal degrees
##' @param lon.in site longitude in decimal degrees
-##' @param site_id The unique ID given to each site. This is used as part of the file name.
-##' @param sitename Site name
-##' @param username username from pecan workflow
+##' @param outfolder Directory where results should be written
+##' @param start_date Range of dates/times to be downloaded (default assumed to be time that function is run)
+##' @param end_date end date for range of dates to be downloaded (default 16 days from start_date)
+##' @param downscale logical, assumed True. Indicates whether data should be downscaled to hourly
##' @param overwrite logical. Download a fresh version even if a local file with the same name already exists?
-##' @param downscale logical, assumed True. Indicated whether data should be downscaled to hourly
-##' @param ... Additional optional parameters
+##' @param ... Additional optional parameters, currently ignored
##'
##' @export
-##'
-##' @examples
+##'
+##' @examples
##' \dontrun{
-##' download.NOAA_GEFS(outfolder="~/Working/results",
-##' lat.in= 45.805925,
-##' lon.in = -90.07961,
+##' download.NOAA_GEFS(outfolder="~/Working/results",
+##' lat.in= 45.805925,
+##' lon.in = -90.07961,
##' site_id = 676)
##' }
-##'
-##' @author Quinn Thomas, modified by K Zarada
-##'
+##'
+##' @author Quinn Thomas, modified by K Zarada
+##'
download.NOAA_GEFS <- function(site_id,
- sitename = NULL,
- username = 'pecan',
lat.in,
lon.in,
outfolder,
- start_date= Sys.Date(),
+ start_date = Sys.Date(),
end_date = start_date + lubridate::days(16),
downscale = TRUE,
overwrite = FALSE,
- ...){
-
- forecast_date = as.Date(start_date)
- forecast_time = (lubridate::hour(start_date) %/% 6)*6
-
- end_hr = (as.numeric(difftime(end_date, start_date, units = 'hours')) %/% 6)*6
-
+ ...) {
+
+ forecast_date <- as.Date(start_date)
+ forecast_time <- (lubridate::hour(start_date) %/% 6) * 6
+ end_hr <- (as.numeric(difftime(end_date, start_date, units = "hours")) %/% 6) * 6
+
model_name <- "NOAAGEFS_6hr"
- model_name_ds <-"NOAAGEFS_1hr" #Downscaled NOAA GEFS
+ model_name_ds <- "NOAAGEFS_1hr" #Downscaled NOAA GEFS
model_name_raw <- "NOAAGEFS_raw"
-
+
PEcAn.logger::logger.info(paste0("Downloading GEFS for site ", site_id, " for ", start_date))
-
+
PEcAn.logger::logger.info(paste0("Overwrite existing files: ", overwrite))
-
-
+
+
noaa_grid_download(lat_list = lat.in,
lon_list = lon.in,
end_hr = end_hr,
@@ -82,7 +86,7 @@ download.NOAA_GEFS <- function(site_id,
forecast_date = forecast_date,
model_name_raw = model_name_raw,
output_directory = outfolder)
-
+
results <- process_gridded_noaa_download(lat_list = lat.in,
lon_list = lon.in,
site_id = site_id,
diff --git a/modules/data.atmosphere/R/extract_ERA5.R b/modules/data.atmosphere/R/extract_ERA5.R
index fe5e5b7b5ab..b2a1035b606 100644
--- a/modules/data.atmosphere/R/extract_ERA5.R
+++ b/modules/data.atmosphere/R/extract_ERA5.R
@@ -1,16 +1,18 @@
#' ERA5_extract
#'
-#' @param slat latitude
-#' @param slon longitude
-#' @param in.path path to the directory containing the file to be inserted
-#' @param start_date start date
-#' @param end_date end date
-#' @param outfolder Path to directory where nc files need to be saved.
-#' @param in.prefix initial portion of the filename that does not vary by date.
+#' @param slat numeric: vector of latitudes.
+#' @param slon numeric: vector of longitudes.
+#' @param in.path character: path to the directory containing the file to be inserted
+#' @param start_date character: start date (in YYYY-MM-DD format).
+#' @param end_date character: end date (in YYYY-MM-DD format).
+#' @param outfolder character: Path to directory where nc files need to be saved.
+#' @param in.prefix character: initial portion of the filename that does not vary by date.
#' Does not include directory; specify that as part of in.path.
-#' @param newsite site name.
-#' @param vars variables to be extracted. If NULL all the variables will be
-#' returned.
+#' @param newsite character: vector of site names.
+#' The length should match with that of slat and slon.
+#' @param ncores numeric: the number of CPUs for the parallel compute. Default is 1.
+#' @param vars character: names of variables to be extracted. If NULL all the variables will be
+#' returned. Default is NULL.
#' @param overwrite Logical if files needs to be overwritten.
#' @param verbose Decide if we want to stop printing info.
#' @param ... other inputs.
@@ -21,146 +23,269 @@
#' @export
#' @examples
#' \dontrun{
-#' point.data <- ERA5_extract(sslat=40, slon=-120, years=c(1990:1995), vars=NULL)
-#'
-# point.data %>%
-#' purrr::map(~xts::apply.daily(.x, mean))
+#' point.data <- extract.nc.ERA5(
+#' slat = 43.25,
+#' slon = -83.25,
+#' in.path = "path/to/era5/files",
+#' start_date = "1990-01-01",
+#' end_date = "1995-12-31",
+#' outfolder = "path/to/output",
+#' in.prefix = "ERA5_",
+#' newsite = "my_site",
+#' vars = NULL,
+#' overwrite = FALSE,
+#' verbose = TRUE
+#' )
#'
#' }
+#' @author Dongchen Zhang, Akash
+#' @importFrom dplyr %>%
+#' @importFrom foreach %dopar%
+
extract.nc.ERA5 <-
- function(slat ,
- slon ,
- in.path ,
+ function(slat,
+ slon,
+ in.path,
start_date,
- end_date ,
+ end_date,
outfolder,
in.prefix,
newsite,
+ ncores = 1,
vars = NULL,
overwrite = FALSE,
verbose = FALSE,
...) {
- # library(xts)
- # Distributing the job between whatever core is available.
years <- seq(lubridate::year(start_date),
lubridate::year(end_date),
1
)
- ensemblesN <- seq(1, 10)
-
+ sample_file <- file.path(in.path, paste0(in.prefix, years[1], ".nc"))
+ if (!file.exists(sample_file)) {
+ PEcAn.logger::logger.severe(paste0("ERA5 input file not found: ", sample_file,
+ ". please check the input path and file prefix."))
+ }
- tryCatch({
- #for each ensemble
- one.year.out <- years %>%
- purrr::map(function(year) {
-
- # for each year
- point.data <- ensemblesN %>%
- purrr::map(function(ens) {
-
-
- ncfile <- file.path(in.path, paste0(in.prefix, year, ".nc"))
-
- #printing out initial information.
- if (verbose) {
- PEcAn.logger::logger.info(paste0("Trying to open :", ncfile, " "))
-
- if (!file.exists(ncfile))
- PEcAn.logger::logger.severe("The nc file was not found.")
-
- #msg
- PEcAn.logger::logger.info(paste0(year, " is being processed ", "for ensemble #", ens, " "))
- }
-
- #open the file
- nc_data <- ncdf4::nc_open(ncfile)
- # time stamp
-
- t <- ncdf4::ncvar_get(nc_data, "time")
- tunits <- ncdf4::ncatt_get(nc_data, 'time')
- tustr <- strsplit(tunits$units, " ")
- timestamp <-
- as.POSIXct(t * 3600, tz = "UTC", origin = tustr[[1]][3])
- try(ncdf4::nc_close(nc_data))
-
-
- # set the vars
- if (is.null(vars))
- vars <- names(nc_data$var)
- # for the variables extract the data
- all.data.point <- vars %>%
- purrr::set_names(vars) %>%
- purrr::map_dfc(function(vname) {
- if (verbose) {
- PEcAn.logger::logger.info(paste0(" \t ",vname, "is being extracted ! "))
- }
-
- brick.tmp <-
- raster::brick(ncfile, varname = vname, level = ens)
- nn <-
- raster::extract(brick.tmp,
- sp::SpatialPoints(cbind(slon, slat)),
- method = 'simple')
- if (verbose) {
- if (!is.numeric(nn)) {
- PEcAn.logger::logger.severe(paste0(
- "Expected raster object to be numeric, but it has type `",
- paste0(typeof(nn), collapse = " "),
- "`"
- ))
- }
- }
-
- # replacing the missing/filled values with NA
- nn[nn == nc_data$var[[vname]]$missval] <- NA
- # send out the extracted var as a new col
- t(nn)
-
- })
-
- #close the connection
-
- # send out as xts object
- xts::xts(all.data.point, order.by = timestamp)
- }) %>%
- stats::setNames(paste0("ERA_ensemble_", ensemblesN))
-
- #Merge mean and the speard
- return(point.data)
-
- }) %>%
- stats::setNames(years)
-
-
- # The order of one.year.out is year and then Ens - Mainly because of the spead / I wanted to touch each file just once.
- # This now changes the order to ens - year
- point.data <- ensemblesN %>%
- purrr::map(function(Ensn) {
- rbind.xts <- do.call("::", list("xts", "rbind.xts"))
- one.year.out %>%
- purrr::map( ~ .x [[Ensn]]) %>%
- do.call("rbind.xts", .)
+ # Determine data type (ensemble vs reanalysis)
+ nc_test <- ncdf4::nc_open(sample_file)
+ # initialize variables
+ is_ensemble <- FALSE
+ ens_size <- 1
+ if ("number" %in% names(nc_test$dim)) {
+ is_ensemble <- TRUE
+ ens_size <- nc_test$dim$number$len
+ if (verbose) PEcAn.logger::logger.info(paste0("detected new ERA5 format with ", ens_size, " ensemble members"))
+ } else if (any(sapply(nc_test$var, function(v) v$ndims == 4))) {
+ is_ensemble <- TRUE
+ # new ERA5 ens format [longitude, latitude, valid_time/time, number]
+ var_4d <- names(nc_test$var)[sapply(nc_test$var, function(v) v$ndims == 4)][1]
+ ens_size <- nc_test$var[[var_4d]]$size[4]
+ if (verbose) PEcAn.logger::logger.info(paste0("detected new ERA5 format with ", ens_size, " ensemble members"))
+ } else {
+ # old ERA5 ens format [longitude, latitude, time*ens]
+ var_3d <- names(nc_test$var)[sapply(nc_test$var, function(v) v$ndims == 3)][1]
+ if (!is.na(var_3d) && !is.null(var_3d)) {
+ tryCatch({
+ # Check if time dimension exists
+ if ("time" %in% names(nc_test$dim) && !is.null(nc_test$dim$time$len)) {
+ test_brick <- raster::brick(sample_file, varname = var_3d)
+ total_layers <- raster::nlayers(test_brick)
+ time_size <- nc_test$dim$time$len
+
+ if (!is.na(total_layers) && !is.na(time_size) &&
+ total_layers > time_size && total_layers %% time_size == 0) {
+ is_ensemble <- TRUE
+ ens_size <- total_layers / time_size
+ if (verbose) PEcAn.logger::logger.info(paste0("detected old ERA5 format with ", ens_size, " ensemble members"))
+ }
+ }
+ }, error = function(e) {
+ if (verbose) PEcAn.logger::logger.debug(paste("Error during format detection:", e$message))
})
+ }
+ }
+ if (!is_ensemble && verbose) {
+ PEcAn.logger::logger.info("processing ERA5 reanalysis data")
+ }
+
+ ensemblesN <- if (is_ensemble) seq(1, ens_size) else 1
+ ncdf4::nc_close(nc_test)
+
+ # initialize parallel.
+ cl <- parallel::makeCluster(ncores)
+ on.exit(parallel::stopCluster(cl), add = TRUE)
+ doSNOW::registerDoSNOW(cl)
+
+ # initialize progress bar.
+ pb <- utils::txtProgressBar(min=0, max=length(slat), style=3)
+ on.exit(close(pb), add = TRUE)
+ progress <- function(n) utils::setTxtProgressBar(pb, n)
+ opts <- list(progress=progress)
+ # Distributing the job between whatever core is available.
+ final.nc.files <- vector("list", length = length(years))
+ for (i in seq_along(years)) {
+ # report progress.
+ PEcAn.logger::logger.info(paste0("\nProcessing year ", years[i], ".\n"))
+ year <- years[i]
+ year_start <- if (year == lubridate::year(start_date)) {
+ start_date
+ } else {
+ paste0(year, "-01-01")
+ }
+ year_end <- if (year == lubridate::year(end_date)) {
+ end_date
+ } else {
+ paste0(year, "-12-31")
+ }
+ ncfile <- file.path(in.path, paste0(in.prefix, year, ".nc"))
+ # open the file
+ nc_data <- ncdf4::nc_open(ncfile)
+ time_var <- if ("time" %in% names(nc_data$var)) "time" else "valid_time"
+ t <- ncdf4::ncvar_get(nc_data, time_var)
+ tunits <- ncdf4::ncatt_get(nc_data, time_var)
+ tustr <- strsplit(tunits$units, " since ")
+ # handle different time units: 'time' uses hours, 'valid_time' uses seconds
+ if (time_var == "time") {
+ # traditional format: "hours since YYYY-MM-DD HH:MM:SS"
+ timestamp <- as.POSIXct(t * 3600, tz = "UTC", origin = tustr[[1]][2])
+ } else {
+ # new format: "seconds since YYYY-MM-DD HH:MM:SS" (typically 1970-01-01)
+ timestamp <- as.POSIXct(t, tz = "UTC", origin = tustr[[1]][2])
+ }
- # Calling the met2CF inside extract bc in met process met2CF comes before extract !
- out <-met2CF.ERA5(
- slat,
- slon,
- start_date,
- end_date,
- sitename=newsite,
- outfolder,
- point.data,
- overwrite = FALSE,
- verbose = verbose
- )
- return(out)
+ # set the vars - filter for valid variables
+ if (is.null(vars)) {
+ all_vars <- names(nc_data$var)
+ if (is_ensemble) {
+ # for ensemble data, keep variables with 4 dimensions (lon, lat, time, ensemble)
+ vars <- all_vars[sapply(all_vars, function(v) {
+ var_info <- nc_data$var[[v]]
+ var_info$ndims == 4 &&
+ var_info$prec %in% c("float", "double", "integer") &&
+ !v %in% c("expver")
+ })]
+ } else {
+ # for reanalysis data, keep variables with 3 dimensions (lon, lat, time)
+ vars <- all_vars[sapply(all_vars, function(v) {
+ var_info <- nc_data$var[[v]]
+ var_info$ndims == 3 &&
+ var_info$prec %in% c("float", "double", "integer") &&
+ !v %in% c("longitude", "latitude", "time", "valid_time")
+ })]
+ }
+ if (verbose && length(vars) < length(all_vars)) {
+ skipped <- setdiff(all_vars, vars)
+ PEcAn.logger::logger.info(paste0("Processing variables: ", paste(vars, collapse=", ")))
+ PEcAn.logger::logger.info(paste0("Skipped metadata variables: ", paste(skipped, collapse=", ")))
+ }
+ }
+ ncdf4::nc_close(nc_data)
- }, error = function(e) {
- PEcAn.logger::logger.severe(paste0(conditionMessage(e)))
- })
-
- }
\ No newline at end of file
+ # for the variables extract the data
+ if (verbose) {
+ PEcAn.logger::logger.info("Extracting NC file.\n")
+ }
+ vname <- NULL
+ all.data.point <-
+ foreach::foreach(vname = vars,
+ .packages=c("Kendall", "ncdf4")) %dopar% {
+ nc_data <- ncdf4::nc_open(ncfile)
+ on.exit(ncdf4::nc_close(nc_data), add = TRUE)
+ ens.out <- vector("list", length = length(ensemblesN))
+ for (ens in ensemblesN) {
+ if (is_ensemble) {
+ var_data <- ncdf4::ncvar_get(nc_data, vname)
+ # NEW FORMAT- [longitude, latitude, valid_time, number]
+ var_subset <- if (nc_data$var[[vname]]$ndims == 4) {
+ var_data[, , , ens]
+ } else {
+ # OLD FORMAT - [longitude, latitude, time*ens]
+ # time slice boundaries for specific ensemble member
+ time_steps_per_ens <- dim(var_data)[3] / ens_size
+ start_idx <- (ens - 1) * time_steps_per_ens + 1
+ end_idx <- ens * time_steps_per_ens
+ var_data[, , start_idx:end_idx]
+ }
+ } else {
+ # Direct brick creation for reanalysis
+ var_subset <- NULL
+ }
+
+ brick.tmp <- if (is.null(var_subset)) {
+ raster::brick(ncfile, varname = vname)
+ } else {
+ raster::brick(var_subset,
+ xmn = min(nc_data$dim$longitude$vals),
+ xmx = max(nc_data$dim$longitude$vals),
+ ymn = min(nc_data$dim$latitude$vals),
+ ymx = max(nc_data$dim$latitude$vals),
+ crs = "+proj=longlat +datum=WGS84")
+ }
+ raster::setZ(brick.tmp, timestamp)
+ nn <-
+ raster::extract(brick.tmp,
+ sp::SpatialPoints(cbind(slon, slat)),
+ method = 'simple')
+ # replacing the missing/filled values with NA
+ nn[nn == nc_data$var[[vname]]$missval] <- NA
+ # send out the extracted var as a new col
+ ens.out[[ens]] <- t(nn)
+ }
+ ens.out
+ } %>%
+ purrr::set_names(vars)
+ # progress bar.
+ # TODO wrap into a large matrix (2928*8000*10 rows and 8 columns), and then split them into the foreach.
+ if (verbose) {
+ PEcAn.logger::logger.info("Converting multi-site time series to by-site data frames.\n")
+ }
+ pb <- utils::txtProgressBar(min = 0, max = length(slat), style = 3)
+ all.site.data.point <- vector("list", length = length(slat))
+ for (s.ind in seq_along(all.site.data.point)) {
+ pbi <- s.ind
+ utils::setTxtProgressBar(pb, pbi)
+ all.site.data.point[[s.ind]] <- ensemblesN %>%
+ purrr::map(function(ens) {
+ s.all.data <- vars %>%
+ purrr::set_names(vars) %>%
+ purrr::map_dfc(function(vname){
+ all.data.point[[vname]][[ens]][,s.ind]
+ })
+ s.all.data <- xts::xts(s.all.data, order.by = timestamp)
+ s.all.data
+ })
+ }
+ # Write into NC files.
+ if (verbose) {
+ PEcAn.logger::logger.info("Writing NC files.\n")
+ }
+ data.point <- NULL
+ final.nc.files[[i]] <-
+ foreach::foreach(data.point = all.site.data.point,
+ s.ind = seq_along(slat),
+ .packages=c("Kendall", "ncdf4", "PEcAn.data.atmosphere", "purrr", "xts", "lubridate"),
+ .options.snow=opts,
+ .export = c("met2CF.ERA5")) %dopar% {
+ # Calling the met2CF inside extract bc in met process met2CF comes before extract !
+ out <- met2CF.ERA5(
+ slat[s.ind],
+ slon[s.ind],
+ year_start,
+ year_end,
+ sitename=newsite[s.ind],
+ outfolder,
+ data.point,
+ overwrite = FALSE,
+ verbose = verbose,
+ ens_size = ens_size
+ )
+ out %>% purrr::map(~.x[['file']]) %>% unlist
+ }
+ }
+ # we only need the by-site ensemble folders for the met2model function.
+ final.nc.files <- final.nc.files[[1]] %>% purrr::map(dirname)
+ return(final.nc.files)
+}
\ No newline at end of file
diff --git a/modules/data.atmosphere/R/half_hour_downscale.R b/modules/data.atmosphere/R/half_hour_downscale.R
index bb14748412a..9b2efbb08be 100644
--- a/modules/data.atmosphere/R/half_hour_downscale.R
+++ b/modules/data.atmosphere/R/half_hour_downscale.R
@@ -223,8 +223,16 @@ downscale_ShortWave_to_half_hrly <- function(df,lat, lon, hr = 0.5){
for (k in 1:nrow(data.hrly)) {
if(is.na(data.hrly$surface_downwelling_shortwave_flux_in_air[k])){
- SWflux <- as.matrix(subset(df, .data$day == data.hrly$day[k] & .data$hour == data.hrly$hour[k], data.hrly$surface_downwelling_shortwave_flux_in_air[k]))
- data.hrly$surface_downwelling_shortwave_flux_in_air[k] <- ifelse(data.hrly$rpotHM[k] > 0, as.numeric(SWflux[1])*(data.hrly$rpotH[k]/data.hrly$rpotHM[k]),0)
+ SWflux <- as.matrix(
+ df$surface_downwelling_shortwave_flux_in_air[
+ df$day == data.hrly$day[k] & df$hour == data.hrly$hour[k]
+ ]
+ )
+ data.hrly$surface_downwelling_shortwave_flux_in_air[k] <- ifelse(
+ data.hrly$rpotHM[k] > 0,
+ as.numeric(SWflux[1]) * (data.hrly$rpotH[k] / data.hrly$rpotHM[k]),
+ 0
+ )
}
}
@@ -287,8 +295,6 @@ downscale_repeat_6hr_to_half_hrly <- function(df, varName, hr = 0.5){
for(k in 1:dim(df)[1]){
if (is.na(df$lead_var[k])) {
df$lead_var[k] <- df$lead_var[k-1]
- }else{
- df$lead_var[k] <- df$lead_var[k]
}
}
diff --git a/modules/data.atmosphere/R/load.cfmet.R b/modules/data.atmosphere/R/load.cfmet.R
index f8a9e0cee56..bf1b1af8153 100644
--- a/modules/data.atmosphere/R/load.cfmet.R
+++ b/modules/data.atmosphere/R/load.cfmet.R
@@ -51,11 +51,19 @@ load.cfmet <- function(met.nc, lat, lon, start.date, end.date) {
all.dates <- data.frame(index = seq_along(time.idx), date = date)
- if (start.date + lubridate::days(1) < min(all.dates$date)) {
- PEcAn.logger::logger.severe("run start date", start.date, "before met data starts", min(all.dates$date))
+ delta <- stats::median(diff(all.dates$date), na.rm = TRUE)
+ if (is.na(delta)) {
+ # probably only happens with a one-line met file
+ # fall back to requiring exact match
+ delta <- 0
}
- if (end.date > max(all.dates$date)) {
- PEcAn.logger::logger.severe("run end date", end.date, "after met data ends", max(all.dates$date))
+
+ if (start.date < (min(all.dates$date) - delta)) {
+ PEcAn.logger::logger.severe("run start date", start.date, "before met data starts", min(all.dates$date))
+ }
+
+ if (end.date > (max(all.dates$date) + delta)) {
+ PEcAn.logger::logger.severe("run end date", end.date, "after met data ends", max(all.dates$date))
}
run.dates <- all.dates %>%
diff --git a/modules/data.atmosphere/R/met.process.R b/modules/data.atmosphere/R/met.process.R
index 25602246be5..8a6870f1178 100644
--- a/modules/data.atmosphere/R/met.process.R
+++ b/modules/data.atmosphere/R/met.process.R
@@ -139,15 +139,34 @@ met.process <- function(site, input_met, start_date, end_date, model,
if(is.null(model)){
stage$model <- FALSE
}
-
-
+
+
# setup site database number, lat, lon and name and copy for format.vars if new input
- latlon <- PEcAn.DB::query.site(site$id, con = con)[c("lat", "lon")]
- new.site <- data.frame(id = as.numeric(site$id),
- lat = latlon$lat,
- lon = latlon$lon)
- str_ns <- paste0(new.site$id %/% 1e+09, "-", new.site$id %% 1e+09)
-
+ # TODO why are this and the original site object passed around together?
+ # Could we mutate `site` instead of assigning `new.site`?
+ new.site <- list(
+ id = site$id,
+ lat = site$lat,
+ lon = site$lon
+ )
+ if (is.null(new.site$id)) {
+ PEcAn.logger::logger.info(
+ "no site ID provided. Generating one from lat and lon"
+ )
+ new.site$id <- paste0("lat", new.site$lat, "_lon", new.site$lon)
+ }
+ if (is.null(site$lat) || is.null(new.site$lon)) {
+ latlon <- PEcAn.DB::query.site(site$id, con = con)[c("lat", "lon")]
+ new.site$lat <- latlon$lat
+ new.site$lon <- latlon$lon
+ }
+ if (is.numeric(new.site$id) && new.site$id > 1e9) {
+ # Assume this is a BETY id, format as [server id]-[record number]
+ str_ns <- paste0(new.site$id %/% 1e+09, "-", new.site$id %% 1e+09)
+ } else {
+ str_ns <- as.character(new.site$id)
+ }
+
if (is.null(format.vars$lat)) {
format.vars$lat <- new.site$lat
}
diff --git a/modules/data.atmosphere/R/met2cf.ERA5.R b/modules/data.atmosphere/R/met2cf.ERA5.R
index 2fb21bcd682..1b80e75515b 100644
--- a/modules/data.atmosphere/R/met2cf.ERA5.R
+++ b/modules/data.atmosphere/R/met2cf.ERA5.R
@@ -6,97 +6,135 @@
#' @param end_date end date
#' @param sitename The name of the site used for making the identifier.
#' @param outfolder Path to directory where nc files need to be saved.
-#' @param out.xts Output of the extract.nc.ERA5 function which is a list of time series of met variables for each ensemble member.
+#' @param out.xts Output of the extract.nc.ERA5 function which is a list of time series of met variables for each ensemble member
+#' or single reanalysis dataset.
#' @param overwrite Logical if files needs to be overwritten.
#' @param verbose Logical flag defining if ouput of function be extra verbose.
-
+#' @param ens_size Number of ensemble members to process. Default is 1.
#'
#' @return list of dataframes
+#' @importFrom rlang .data
#' @export
-#'
+#' @author Hamze Dokohaki, Akash
met2CF.ERA5<- function(lat,
- long,
- start_date,
- end_date,
- sitename,
- outfolder,
- out.xts,
- overwrite = FALSE,
- verbose = TRUE) {
-
+ long,
+ start_date,
+ end_date,
+ sitename,
+ outfolder,
+ out.xts,
+ overwrite = FALSE,
+ verbose = TRUE,
+ ens_size = 1) {
+
years <- seq(lubridate::year(start_date),
lubridate::year(end_date),
1
)
- ensemblesN <- seq(1, 10)
-
+ ensemblesN <- seq(1, ens_size)
+
start_date <- paste0(lubridate::year(start_date),"-01-01") %>% as.Date()
end_date <- paste0(lubridate::year(end_date),"-12-31") %>% as.Date()
- # adding RH and converting rain
-
+
+ era5_tbl <- pecan_standard_met_table %>%
+ dplyr::filter(!is.na(.data$era5) & nzchar(.data$era5))
+ era5_to_cf <- stats::setNames(era5_tbl$cf_standard_name, era5_tbl$era5)
+ cf_units_map <- stats::setNames(era5_tbl$units, era5_tbl$cf_standard_name)
+
out.new <- ensemblesN %>%
purrr::map(function(ensi) {
tryCatch({
-
ens <- out.xts[[ensi]]
- # Solar radation conversions
- #https://confluence.ecmwf.int/pages/viewpage.action?pageId=104241513
- #For ERA5 daily ensemble data, the accumulation period is 3 hours. Hence to convert to W/m2:
- ens[, "ssrd"] <- ens[, "ssrd"] / (3 * 3600)
- ens[, "strd"] <- ens[, "strd"] / (3 * 3600)
- #precipitation it's originaly in meters. Meters times the density will give us the kg/m2
- ens[, "tp"] <-
- ens[, "tp"] * 1000 / 3 # divided by 3 because we have 3 hours data
- ens[, "tp"] <-
- PEcAn.utils::ud_convert(as.numeric(ens[, "tp"]), "kg m-2 hr-1", "kg m-2 s-1") #There are 21600 seconds in 6 hours
- #RH
- #Adopted from weathermetrics/R/moisture_conversions.R
- t <-
- PEcAn.utils::ud_convert(ens[, "t2m"] %>% as.numeric(), "K", "degC")
- dewpoint <-
- PEcAn.utils::ud_convert(ens[, "d2m"] %>% as.numeric(), "K", "degC")
- beta <- (112 - (0.1 * t) + dewpoint) / (112 + (0.9 * t))
- relative.humidity <- beta ^ 8
- #specific humidity
- specific_humidity <-
- PEcAn.data.atmosphere::rh2qair(relative.humidity,
- ens[, "t2m"] %>% as.numeric(),
- ens[, "sp"] %>% as.numeric()) # Pressure in Pa
+ if (is.null(ens) || nrow(ens) == 0) {
+ PEcAn.logger::logger.warn(paste("Empty ensemble", ensi))
+ return(NULL)
+ }
+
+ available_vars <- colnames(ens)
+ native_vars <- intersect(names(era5_to_cf), available_vars)
+ if (!length(native_vars)) {
+ PEcAn.logger::logger.warn("No mappable ERA5 vars in ensemble member.")
+ return(NULL)
+ }
+ # detect timestep dynamically
+ time_diffs <- diff(as.numeric(zoo::index(ens)))
+ if (length(time_diffs) > 0) {
+ timestep_seconds <- as.numeric(stats::median(time_diffs))
+ } else {
+ timestep_seconds <- 3 * 3600 # fallback to 3-hourly
+ if (verbose) PEcAn.logger::logger.info("Only one timestamp found. Defaulting to 3-hour timestep for conversion.")
+ }
+ timestep_hours <- timestep_seconds / 3600
+ # solar radiation conversions - J/m2 to W/m2
+ if ("ssrd" %in% available_vars) {
+ ens[, "ssrd"] <- as.numeric(ens[, "ssrd"]) / timestep_seconds
+ }
+ if ("strd" %in% available_vars) {
+ ens[, "strd"] <- as.numeric(ens[, "strd"]) / timestep_seconds
+ }
+ # precipitation - m to kg/m2/s
+ if ("tp" %in% available_vars) {
+ ens[, "tp"] <- (as.numeric(ens[, "tp"]) * 1000) / timestep_seconds
+ }
+
+ # relative and specific humidity (only if all required vars present)
+ specific_humidity <- NULL
+ if (all(c("t2m", "d2m", "sp") %in% available_vars)) {
+ # Vectorized RH via Magnus formula over water (Kelvin inputs)
+ T_k <- as.numeric(ens[, "t2m"]) # K
+ Td_k <- as.numeric(ens[, "d2m"]) # K
+ T_c <- T_k - 273.15
+ Td_c <- Td_k - 273.15
+ es <- 6.112 * exp((17.62 * T_c) / (243.12 + T_c)) # hPa
+ e <- 6.112 * exp((17.62 * Td_c) / (243.12 + Td_c)) # hPa
+ rh_prop <- pmin(pmax(e / es, 0), 1) # [0,1]
+
+ specific_humidity <- PEcAn.data.atmosphere::rh2qair(rh_prop, T_k, as.numeric(ens[, "sp"]))
+ }
+
+ # select available ERA5 variables and convert to CF naming
+ available_era5_vars <- intersect(names(era5_to_cf), available_vars)
+ ens_cf <- ens[, available_era5_vars, drop = FALSE]
+ colnames(ens_cf) <- era5_to_cf[available_era5_vars]
+ if (!is.null(specific_humidity)) {
+ specific_humidity_xts <- xts::xts(specific_humidity, order.by = zoo::index(ens))
+ colnames(specific_humidity_xts) <- "specific_humidity"
+ ens_cf <- xts::merge.xts(ens_cf, specific_humidity_xts)
+ }
+ # attach timestep as attribute for downstream use
+ attr(ens_cf, "timestep_hours") <- timestep_hours
+ return(ens_cf)
+
},
error = function(e) {
PEcAn.logger::logger.severe("Something went wrong during the unit conversion in met2cf ERA5.",
conditionMessage(e))
})
-
- #adding humidity
- xts::merge.xts(ens[, -c(3)], (specific_humidity)) %>%
- `colnames<-`(
- c(
- "air_temperature",
- "air_pressure",
- "precipitation_flux",
- "eastward_wind",
- "northward_wind",
- "surface_downwelling_shortwave_flux_in_air",
- "surface_downwelling_longwave_flux_in_air",
- "specific_humidity"
- )
- )
-
})
-
- #These are the cf standard names
- cf_var_names = colnames(out.new[[1]])
- cf_var_units = c("K", "Pa", "kg m-2 s-1", "m s-1", "m s-1", "W m-2", "W m-2", "1") #Negative numbers indicate negative exponents
+ # filter out NULL results from failed ensembles
+ out.new <- out.new[!sapply(out.new, is.null)]
+ if (length(out.new) == 0) {
+ PEcAn.logger::logger.severe("No valid ensembles processed")
+ return(NULL)
+ }
+
+ cf_var_names <- colnames(out.new[[1]])
+ cf_var_units <- purrr::map_chr(cf_var_names, function(nm) {
+ u <- unname(cf_units_map[nm])
+ if (length(u) == 0 || is.na(u)) {
+ if (identical(nm, "specific_humidity")) return("1") # unitless (mass ratio)
+ return(NA_character_)
+ }
+ as.character(u)
+ })
+ names(cf_var_units) <- cf_var_names
-
results_list <- ensemblesN %>%
purrr::map(function(i) {
-
start_date <- min(zoo::index(out.new[[i]]))
end_date <- max(zoo::index(out.new[[i]]))
# Create a data frame with information about the file. This data frame's format is an internal PEcAn standard, and is stored in the BETY database to
@@ -117,17 +155,13 @@ met2CF.ERA5<- function(lat,
stringsAsFactors = FALSE
)
- # i is the ensemble number
- #Generating a unique identifier string that characterizes a particular data set.
- identifier <- paste("ERA5", sitename, i, sep = "_")
-
- identifier.file <- paste("ERA5",
- i,
- lubridate::year(start_date),
- sep = ".")
-
+ if (ens_size > 1) {
+ identifier <- paste("ERA5", sitename, i, sep = "_")
+ } else {
+ identifier <- paste("ERA5", sitename, "Mean", sep = "_")
+ }
+ identifier.file <- paste("ERA5", i, lubridate::year(start_date), sep = ".")
ensemble_folder <- file.path(outfolder, identifier)
-
#Each file will go in its own folder.
if (!dir.exists(ensemble_folder)) {
dir.create(ensemble_folder,
@@ -142,25 +176,20 @@ met2CF.ERA5<- function(lat,
years %>%
purrr::map(function(year) {
- #
- identifier.file <- paste("ERA5",
- i,
- year,
- sep = ".")
+ identifier.file <- paste("ERA5", i, year, sep = ".")
flname <-file.path(ensemble_folder, paste(identifier.file, "nc", sep = "."))
# Spliting it for this year
data.for.this.year.ens <- out.new[[i]]
data.for.this.year.ens <- data.for.this.year.ens[year %>% as.character]
+ if (nrow(data.for.this.year.ens) == 0) return(NULL)
- #Each ensemble gets its own file.
- time_dim = ncdf4::ncdim_def(
+ time_vals <- as.numeric(zoo::index(data.for.this.year.ens))
+ time_dim <- ncdf4::ncdim_def(
name = "time",
- paste(units = "hours since", format(start_date, "%Y-%m-%dT%H:%M")),
- seq(0, (length(zoo::index(
- data.for.this.year.ens
- )) * 3) - 1 , length.out = length(zoo::index(data.for.this.year.ens))),
+ units = "seconds since 1970-01-01 00:00:00",
+ vals = time_vals,
create_dimvar = TRUE
)
lat_dim = ncdf4::ncdim_def("latitude", "degree_north", lat, create_dimvar = TRUE)
@@ -170,9 +199,6 @@ met2CF.ERA5<- function(lat,
nc_var_list <- purrr::map2(cf_var_names,
cf_var_units,
~ ncdf4::ncvar_def(.x, .y, list(time_dim, lat_dim, lon_dim), missval = NA_real_))
-
- #results$dbfile.name <- flname
-
if (!file.exists(flname) || overwrite) {
tryCatch({
@@ -200,12 +226,9 @@ met2CF.ERA5<- function(lat,
" already exists. It was not overwritten."
))
}
-
-
})
-
return(results)
})
#For each ensemble
return(results_list )
-}
+}
\ No newline at end of file
diff --git a/modules/data.atmosphere/R/met_temporal_downscale.Gaussian_ensemble.R b/modules/data.atmosphere/R/met_temporal_downscale.Gaussian_ensemble.R
index 375c44e1857..c73b7479370 100644
--- a/modules/data.atmosphere/R/met_temporal_downscale.Gaussian_ensemble.R
+++ b/modules/data.atmosphere/R/met_temporal_downscale.Gaussian_ensemble.R
@@ -23,24 +23,43 @@ substrRight <- function(x, n) {
##' @param w_len - numeric value that is the window length in days
##' @param utc_diff - numeric value in HOURS that is local standard time difference from UTC time. CST is -6
##' @param ... further arguments, currently ignored
-##' @author James Simkins
+##' @author James Simkins, Akash
met_temporal_downscale.Gaussian_ensemble <- function(in.path, in.prefix, outfolder,
input_met, train_met, overwrite = FALSE, verbose = FALSE,
swdn_method = "sine", n_ens = 10, w_len = 20, utc_diff = -6, ... ) {
-
+
sub_str <- substrRight(input_met, 7)
year <- substr(sub_str, 1, 4)
year <- as.numeric(year)
eph_year <- year
source_name <- substr(input_met, 1, nchar(input_met) - 8)
- # Variable names
- var <- data.frame(CF.name <- c("air_temperature", "air_temperature_max", "air_temperature_min",
- "surface_downwelling_longwave_flux_in_air", "air_pressure", "surface_downwelling_shortwave_flux_in_air",
- "eastward_wind", "northward_wind", "specific_humidity", "precipitation_flux"),
- units <- c("Kelvin", "Kelvin", "Kelvin", "W/m2", "Pascal", "W/m2", "m/s",
- "m/s", "g/g", "kg/m2/s"))
+
+ # Get meteorological variables from PEcAn's met-specific standard table
+ processed_vars <- c(
+ "air_temperature",
+ "air_temperature_max",
+ "air_temperature_min",
+ "surface_downwelling_longwave_flux_in_air",
+ "air_pressure",
+ "surface_downwelling_shortwave_flux_in_air",
+ "eastward_wind",
+ "northward_wind",
+ "specific_humidity",
+ "precipitation_flux",
+ "soil_temperature",
+ "relative_humidity",
+ "volume_fraction_of_condensed_water_in_soil",
+ "surface_downwelling_photosynthetic_photon_flux_in_air"
+ )
+
+ # Filter pecan_standard_met_table for only variables processed by your function
+ var <- pecan_standard_met_table %>%
+ dplyr::filter(.data$cf_standard_name %in% processed_vars) %>%
+ dplyr::select(CF.name = .data$cf_standard_name,
+ .data$units)
+
# Reading in the training data
train <- list()
tem <- ncdf4::nc_open(train_met)
@@ -100,6 +119,55 @@ met_temporal_downscale.Gaussian_ensemble <- function(in.path, in.prefix, outfold
sp <- 365
}
+ # Estimate soil thermal parameters amplitude damping factor and phase lag hours
+ # to characterize soil-air thermal coupling
+ if (!all(is.na(train$soil_temperature)) && !all(is.na(train$air_temperature))) {
+ # Calculate daily temperature ranges and parameters
+ nsteps <- ceiling(length(train$air_temperature) / (24/reso))
+ idx <- rep(seq_len(nsteps), each = 24/reso, length.out = length(train$air_temperature))
+ air_range <- tapply(train$air_temperature, idx, function(x) diff(range(x, na.rm=TRUE)))
+ soil_range <- tapply(train$soil_temperature, idx, function(x) diff(range(x, na.rm=TRUE)))
+ # The 0.5 K threshold filters out sensor noise and ensures only days with
+ # good diurnal temperature cycles are included in calculations.
+ valid_days <- which(!is.na(air_range) & !is.na(soil_range) & air_range > 0.5 & soil_range > 0)
+
+ # Damping factor calculation
+ if (length(valid_days) >= 10) {
+ damping_factor <- stats::median(soil_range[valid_days] / air_range[valid_days], na.rm = TRUE)
+ # soil temperature amplitude is always reduced compared to air temperature (lower bound 0.3)
+ # but never exceeds it (upper bound 1.0) at shallow depths.
+ damping_factor <- min(max(damping_factor, 0.3), 1.0)
+ } else {
+ damping_factor <- NA
+ }
+
+ # Phase lag calculation
+ air_detrend <- train$air_temperature - mean(train$air_temperature, na.rm = TRUE)
+ soil_detrend <- train$soil_temperature - mean(train$soil_temperature, na.rm = TRUE)
+ valid_idx <- !is.na(air_detrend) & !is.na(soil_detrend)
+
+ # 48 hours represents 2 complete diurnal cycles, which is the minimum for reliable
+ # cross-correlation analysis in temperature time series with 24-hour periodicity
+ if (sum(valid_idx) >= 48) {
+ air_clean <- air_detrend[valid_idx]
+ soil_clean <- soil_detrend[valid_idx]
+ max_lag <- min(48/reso, length(air_clean)/4)
+ if (max_lag >= 1) {
+ ccf_res <- stats::ccf(air_clean, soil_clean, lag.max = max_lag, plot = FALSE)
+ lag_hr <- abs(ccf_res$lag[which.max(ccf_res$acf)]) * reso
+ # depths of 5, 10, 20 and 30 cm the delay amounts to 1, 2, 4 and to about 8 h, respectively
+ phase_lag_hr <- min(max(lag_hr,0),8)
+ } else {
+ phase_lag_hr <- NA
+ }
+ } else {
+ phase_lag_hr <- NA
+ }
+ } else {
+ damping_factor <- NA
+ phase_lag_hr <- NA
+ }
+
# Now we start a for loop for the ensemble members and begin downscaling. A
# random normal distribution is used to downscale as so;
# (mean <- value of source data) (sd <- +/- window_days of train data at the
@@ -141,14 +209,110 @@ met_temporal_downscale.Gaussian_ensemble <- function(in.path, in.prefix, outfold
len_diff <- reso_len - length(tem.met)
tem.met <- append(tem.met,values = rep(NA,len_diff))
} else {
- for (x in seq(from=0, to=reso_len, by=div)){
- tem.met[x] <- sourtemp[x / div]
- }
+ for (x in seq(from=0, to=reso_len, by=div)){
+ tem.met[x] <- sourtemp[x / div]
+ }
}
spline.temp = zoo::na.spline(tem.met)
df[1:reso_len, "air_temperature"] <- spline.temp
+ # air temperature max and min downscaling with Gaussian ensemble
+ if (all(is.na(source$air_temperature_max)) || all(is.na(source$air_temperature_min))) {
+ daily_temps <- split(df$air_temperature, ceiling(seq_along(df$air_temperature)/(24/reso)))
+ daily_max <- sapply(daily_temps, max)
+ daily_min <- sapply(daily_temps, min)
+
+ df$air_temperature_max <- rep(daily_max, each = 24/reso)
+ df$air_temperature_min <- rep(daily_min, each = 24/reso)
+ } else {
+ temp_max <- vector()
+ temp_min <- vector()
+
+ for (x in seq_along(source$air_temperature_max)) {
+ lowday <- (x - w_len) * div
+ highday <- (x + w_len) * div
+ if (lowday < 0) {
+ lowday <- 0
+ }
+ if (highday > reso_len) {
+ highday <- reso_len
+ }
+
+ if (!is.na(source$air_temperature_max[x])) {
+ dwnsc_max <- vector()
+ for (n in seq_len(div)) {
+ dwnsc_max[n] <- stats::rnorm(1,
+ mean = source$air_temperature_max[x],
+ sd = stats::sd(train$air_temperature_max[lowday:highday], na.rm = TRUE))
+ }
+ temp_max <- append(temp_max, dwnsc_max)
+ }
+
+ if (!is.na(source$air_temperature_min[x])) {
+ dwnsc_min <- vector()
+ for (n in seq_len(div)) {
+ dwnsc_min[n] <- stats::rnorm(1,
+ mean = source$air_temperature_min[x],
+ sd = stats::sd(train$air_temperature_min[lowday:highday], na.rm = TRUE))
+ }
+ temp_min <- append(temp_min, dwnsc_min)
+ }
+ }
+ df$air_temperature_max <- temp_max[1:reso_len]
+ df$air_temperature_min <- temp_min[1:reso_len]
+ }
+ df$air_temperature_max <- pmax(df$air_temperature_max, df$air_temperature, na.rm = TRUE)
+ df$air_temperature_min <- pmin(df$air_temperature_min, df$air_temperature, na.rm = TRUE)
+
+ # soil temperature
+ soursoiltemp <- source$soil_temperature
+ if (!all(is.na(soursoiltemp))) {
+ soil.met <- vector()
+
+ # handles the common case where source data is daily but target is sub-daily
+ if (length(soursoiltemp) <= 366) {
+ for (i in seq_along(soursoiltemp)) {
+ soil.met <- append(soil.met, rep(soursoiltemp[i], div))
+ }
+ } else {
+ # source data is sub-daily; used direct sampling
+ for (x in seq(from=0, to=reso_len, by=div)) {
+ soil.met[x] <- soursoiltemp[x / div]
+ }
+ }
+
+ # phase lag from hours to time steps based on target resolution
+ lag_step <- round(phase_lag_hr / reso)
+ # baseline temperature values for soil-air coupling
+ tair_mean <- mean(df$air_temperature, na.rm = TRUE)
+ soil_base <- mean(soursoiltemp, na.rm = TRUE)
+ if (lag_step > 0 && lag_step < length(df$air_temperature)) {
+ # soil temperature responds to air temperature with a delay (typically 1-4 hours)
+ # pad beginning with mean value to maintain series length
+ tair_lag <- c(rep(tair_mean, lag_step),
+ df$air_temperature[1:(length(df$air_temperature) - lag_step)])
+ } else {
+ # this occurs when lag is too large (>data length) or non-positive
+ # still reasonable due to damping factor
+ tair_lag <- df$air_temperature
+ }
+ soil_proc <- soil_base +
+ damping_factor * (tair_lag - tair_mean)
+
+ if (!all(is.na(train$soil_temperature))) {
+ soil_residual_sd <- stats::sd(train$soil_temperature - train$air_temperature, na.rm = TRUE)
+ if (!is.na(soil_residual_sd) && soil_residual_sd > 0) {
+ soil_noise <- stats::rnorm(length(soil_proc), 0, soil_residual_sd * 0.5)
+ soil_proc <- soil_proc + soil_noise
+ }
+ }
+
+ df[1:reso_len, "soil_temperature"] <- soil_proc[1:reso_len]
+ } else {
+ df[1:reso_len, "soil_temperature"] <- rep(NA, reso_len)
+ }
+
# after this maybe we can run it through the random norm to add variation
# but not sure how models will react
@@ -156,7 +320,7 @@ met_temporal_downscale.Gaussian_ensemble <- function(in.path, in.prefix, outfold
# a total possible amount of precip. It randomly distributes the values of
# precipitation
rand_vect_cont <- function(N, M, sd = 1) {
- vec <- truncnorm::rtruncnorm(N, a = 0, b = Inf,M/N, sd)
+ vec <- truncnorm::rtruncnorm(N, a = 0, b = Inf, M/N, sd)
vec/sum(vec) * M
}
precip <- vector()
@@ -177,9 +341,8 @@ met_temporal_downscale.Gaussian_ensemble <- function(in.path, in.prefix, outfold
}
df$precipitation_flux <- precip
- # Specific Humidity, eastward wind and northward wind
wnd <- c("specific_humidity", "eastward_wind", "northward_wind", "surface_downwelling_longwave_flux_in_air",
- "air_pressure")
+ "air_pressure", "relative_humidity", "volume_fraction_of_condensed_water_in_soil")
for (u in wnd) {
train_vec <- vector()
a <- as.numeric(train[[u]])
@@ -199,7 +362,55 @@ met_temporal_downscale.Gaussian_ensemble <- function(in.path, in.prefix, outfold
}
dwnsc_day <- vector()
for (n in seq_len(div)) {
- dwnsc_day[n] <- stats::rnorm(1, mean = sour[x], sd = stats::sd(a[lowday:highday]))
+ if (u == "volume_fraction_of_condensed_water_in_soil") {
+ idx <- (x - 1) * div + n
+ current_precip <- ifelse(idx <= length(precip) && idx >= 1,
+ precip[idx], 0)
+ if (is.na(current_precip)) current_precip <- 0
+ antecedent_sm <- ifelse(idx > 1,
+ sour[max(1, x-1)],
+ sour[x])
+ base_sd <- stats::sd(a[lowday:highday], na.rm = TRUE)
+ sm_cv <- stats::sd(sour, na.rm = TRUE) / mean(sour, na.rm = TRUE)
+ if (!is.na(sm_cv) && is.finite(sm_cv)) {
+ sm_fc <- stats::quantile(sour, 0.75, na.rm = TRUE)
+ moisture_stress <- abs(antecedent_sm - sm_fc) / sm_fc
+ uncertainty_factor <- 1.0 + sm_cv * moisture_stress
+ if (current_precip > 0) {
+ uncertainty_factor <- uncertainty_factor * 1.2 # 20% increase during precipitation
+ }
+ } else {
+ uncertainty_factor <- 1.0
+ }
+ uncertainty_factor <- pmax(0.7, pmin(uncertainty_factor, 1.8))
+ sd_adj <- base_sd * uncertainty_factor
+ dwnsc_day[n] <- truncnorm::rtruncnorm(1, a = 0, b = 1, mean = sour[x], sd = sd_adj)
+ } else if (u == "relative_humidity") {
+ base_sd <- stats::sd(a[lowday:highday], na.rm = TRUE)
+ if (is.na(base_sd) || base_sd <= 0) {
+ base_sd <- 5.0
+ }
+ temp_idx <- (x - 1) * div + n
+ if (temp_idx > 0 && temp_idx <= length(df$air_temperature) && x <= length(source$air_temperature)) {
+ current_temp_c <- PEcAn.utils::ud_convert(df$air_temperature[temp_idx], "kelvin", "celsius")
+ source_temp_c <- PEcAn.utils::ud_convert(source$air_temperature[x], "kelvin", "celsius")
+
+ if (current_temp_c > -40 && current_temp_c < 50 && source_temp_c > -40 && source_temp_c < 50) {
+ # magnus formula for saturation vapor pressure (kPa)
+ es_current <- PEcAn.data.atmosphere::t2es(current_temp_c, method = "Magnus")
+ es_source <- PEcAn.data.atmosphere::t2es(source_temp_c, method = "Magnus")
+ saturation_ratio <- es_source / es_current
+ adjusted_rh <- sour[x] * saturation_ratio
+ } else {
+ adjusted_rh <- sour[x]
+ }
+ } else {
+ adjusted_rh <- sour[x]
+ }
+ dwnsc_day[n] <- truncnorm::rtruncnorm(1, a = 0, b = 100, mean = adjusted_rh, sd = base_sd)
+ } else {
+ dwnsc_day[n] <- stats::rnorm(1, mean = sour[x], sd = stats::sd(a[lowday:highday], na.rm = TRUE))
+ }
}
train_vec <- append(train_vec, dwnsc_day)
}
@@ -209,6 +420,13 @@ met_temporal_downscale.Gaussian_ensemble <- function(in.path, in.prefix, outfold
df$specific_humidity[df$specific_humidity < 0] <- 0
+ if ("volume_fraction_of_condensed_water_in_soil" %in% names(df)) {
+ df$volume_fraction_of_condensed_water_in_soil[
+ df$volume_fraction_of_condensed_water_in_soil < 0] <- 0
+ df$volume_fraction_of_condensed_water_in_soil[
+ df$volume_fraction_of_condensed_water_in_soil > 1] <- 1
+ }
+
# Downwelling shortwave radiation flux Ephemeris is a function to calculate
# sunrise/sunset times and daylength for SW calculations in sine swdn_method
ephemeris <- function(lat, lon, date, span = 1, tz = "UTC") {
@@ -257,10 +475,11 @@ met_temporal_downscale.Gaussian_ensemble <- function(in.path, in.prefix, outfold
# swdn = 0 without sunlight
srs <- eph$sunrise
- hr <- substr(srs[i], 1, 2)
- hr <- as.numeric(hr)
+ srs_hr <- floor(srs[i] / 100) # extract hours (works for both 430 -> 4 and 1215 -> 12)
+ srs_min <- (srs[i] %% 100) / 60 # convert minutes to fraction (30 -> 0.5)
# utc_diff must be used so we can begin the sine wave at local sunrise
- hr <- hr + utc_diff
+ hr <- srs_hr + srs_min + utc_diff
+ hr <- max(0, min(23, hr))
l <- vector()
for (n in seq_len(hr)) {
@@ -269,7 +488,7 @@ met_temporal_downscale.Gaussian_ensemble <- function(in.path, in.prefix, outfold
for (n in seq_along(wav)) {
l[n + hr] <- wav[n]
}
- for (n in seq_len(24 - (length(wav) + hr))) {
+ for (n in seq_len(floor(24 - (length(wav) + hr)))) {
l[n + hr + length(wav)] <- 0
}
@@ -307,8 +526,8 @@ met_temporal_downscale.Gaussian_ensemble <- function(in.path, in.prefix, outfold
as.POSIXct(paste0(eph_year, "-12-31 18:00:00"),tz="UTC"),
by = inter)
days.doy <- as.numeric(format(days,"%j"))
- days.hour <- lubridate::hour(days) + lubridate::minute(days) / 60 + lubridate::second(days) / 3600
- cosZ <- PEcAn.data.atmosphere::cos_solar_zenith_angle(days.doy, lat_train, lon_train, inter, days.hour)
+ days.hour <- as.numeric(lubridate::hour(days) + lubridate::minute(days) / 60 + lubridate::second(days) / 3600)
+ cosZ <- PEcAn.data.atmosphere::cos_solar_zenith_angle(days.doy, lat_train, lon_train, reso*3600, days.hour)
I <- 1000 * cosZ
m <- vector()
for (i in seq_len(12)) {
@@ -327,13 +546,19 @@ met_temporal_downscale.Gaussian_ensemble <- function(in.path, in.prefix, outfold
C <- 0.7
hdry <- vector()
for (i in seq_along(precip)) {
- if (precip[i] > 0) {
- p <- 0.65
- }
- if (precip[i] == 0) {
- p <- 2
+ if (i <= length(df$air_temperature_max) &&
+ i <= length(df$air_temperature_min) &&
+ i <= length(bmlist)) {
+ p <- ifelse(precip[i] > 0, 0.65, 2)
+ if (!is.na(df$air_temperature_max[i]) && !is.na(df$air_temperature_min[i])) {
+ temp_range <- df$air_temperature_max[i] - df$air_temperature_min[i]
+ hdry[i] <- A * p * (1 - exp(-1 * bmlist[i] * (temp_range^C)))
+ } else {
+ hdry[i] <- 0
+ }
+ } else {
+ hdry[i] <- 0
}
- hdry[i] <- A * p * (1 - exp(-1 * bmlist[i] * ((temp_max[i] - temp_min[i])^C)))
}
hdry[hdry < 0] <- 0
swflux <- hdry * I
@@ -342,10 +567,70 @@ met_temporal_downscale.Gaussian_ensemble <- function(in.path, in.prefix, outfold
# Waichler method is the only method with ensembles for downwelling shortwave flux
df$surface_downwelling_shortwave_flux_in_air <- swflux
- # Will need to change below if we figure out how to downscale this
- df$air_temperature_max <- rep(NA, reso_len)
- df$air_temperature_min <- rep(NA, reso_len)
+ # PPFD downscaling
+ ppfd_source <- source$surface_downwelling_photosynthetic_photon_flux_in_air
+ if (all(is.na(ppfd_source))) {
+ ppfd_flux <- PEcAn.data.atmosphere::sw2ppfd(swflux) / 1000 # Convert umol to mol
+ ppfd_flux[ppfd_flux < 0] <- 0
+ df$surface_downwelling_photosynthetic_photon_flux_in_air <- ppfd_flux
+ } else {
+ inter <- paste0(reso, " hour")
+ days <- seq(as.POSIXct(paste0(year, "-01-01 00:00:00"), tz = "UTC"),
+ length.out = reso_len, by = inter)
+ days.doy <- as.numeric(format(days, "%j"))
+ days.hour <- as.numeric(format(days, "%H")) + as.numeric(format(days, "%M"))/60
+ cosZ <- PEcAn.data.atmosphere::cos_solar_zenith_angle(days.doy, lat_train, lon_train,
+ reso*3600, days.hour)
+ is_daylight <- cosZ > 0
+
+ train_ppfd <- train$surface_downwelling_photosynthetic_photon_flux_in_air
+ if (all(is.na(train_ppfd)) && !all(is.na(train$surface_downwelling_shortwave_flux_in_air))) {
+ train_ppfd <- PEcAn.data.atmosphere::sw2ppfd(train$surface_downwelling_shortwave_flux_in_air) / 1000 # Convert to mol
+ }
+
+ train_vec <- vector()
+ for (x in seq_along(ppfd_source)) {
+ lowday <- (x - w_len) * div
+ highday <- (x + w_len) * div
+ if (lowday < 0) {
+ lowday <- 0
+ }
+ if (highday > reso_len) {
+ highday <- reso_len
+ }
+ if (length(train_ppfd) >= highday && !all(is.na(train_ppfd[lowday:highday]))) {
+ base_sd <- stats::sd(train_ppfd[lowday:highday], na.rm = TRUE)
+ } else {
+ base_sd <- NA
+ }
+ if (is.na(base_sd) || base_sd < 1e-6) {
+ cv_ppfd <- stats::sd(ppfd_source, na.rm = TRUE) / mean(ppfd_source, na.rm = TRUE)
+ if (!is.na(cv_ppfd) && is.finite(cv_ppfd)) {
+ base_sd <- cv_ppfd * abs(ppfd_source[x])
+ } else {
+ base_sd <- 0.15 * abs(ppfd_source[x])
+ }
+ }
+
+ dwnsc_day <- vector()
+ for (n in seq_len(div)) {
+ idx <- (x - 1) * div + n
+ if (idx > 0 && idx <= length(is_daylight) && is_daylight[idx]) {
+ dwnsc_day[n] <- max(0, stats::rnorm(1, mean = ppfd_source[x], sd = base_sd)) # daytime using gaussian downscaling
+ } else {
+ dwnsc_day[n] <- 0 # nighttime ppfd must be zero
+ }
+ }
+ train_vec <- c(train_vec, dwnsc_day)
+ }
+ df$surface_downwelling_photosynthetic_photon_flux_in_air <- train_vec[1:reso_len]
+ }
+ df$surface_downwelling_photosynthetic_photon_flux_in_air[
+ df$surface_downwelling_photosynthetic_photon_flux_in_air < 0] <- 0
+ # maximum PPFD is ~0.0025 mol m-2 s-1 (2500 umol m-2 s-1) under full sunlight
+ df$surface_downwelling_photosynthetic_photon_flux_in_air[
+ df$surface_downwelling_photosynthetic_photon_flux_in_air > 0.0025] <- 0.0025
# Putting all the variables together in a data frame
downscaled.met <- data.frame(df)
@@ -366,13 +651,14 @@ met_temporal_downscale.Gaussian_ensemble <- function(in.path, in.prefix, outfold
rows <- 1
dir.create(outfolder, showWarnings = FALSE, recursive = TRUE)
-
- loc.file <- file.path(outfolder, paste0(source_name, ".dwnsc.gauss.ens",
+ source_name <- basename(input_met) # extracts "US-Ha1.2004.nc" from full path
+ loc.file <- file.path(outfolder, paste0(tools::file_path_sans_ext(source_name), ".dwnsc.gauss.ens",
e, ".", year, ".nc"))
- loc <- ncdf4::nc_create(filename = loc.file, vars = train.list, verbose = verbose)
+ loc <- ncdf4::nc_create(filename = loc.file, vars = train.list, force_v4 = TRUE, verbose = verbose)
for (j in seq_along(var$CF.name)) {
- ncdf4::ncvar_put(nc = loc, varid = as.character(var$CF.name[j]), vals = downscaled.met[[j]])
+ var_name <- as.character(var$CF.name[j])
+ ncdf4::ncvar_put(nc = loc, varid = var_name, vals = downscaled.met[[var_name]])
}
ncdf4::nc_close(loc)
diff --git a/modules/data.atmosphere/R/metutils.R b/modules/data.atmosphere/R/metutils.R
index c5662e2db66..55d77a02af0 100644
--- a/modules/data.atmosphere/R/metutils.R
+++ b/modules/data.atmosphere/R/metutils.R
@@ -69,32 +69,111 @@ get.vpd <- function(rh, temp) {
## calculate saturation vapor pressure
es <- get.es(temp)
## calculate vapor pressure deficit
- return(((100 - rh)/100) * es)
+ return(((100 - rh) / 100) * es)
} # get.vpd
-##' Calculate saturation vapor pressure
-##'
-##' @title get es
-##' @param temp temperature in degrees C
-##' @return saturation vapor pressure in mb
-##' @export
-##' @author David LeBauer
-##' @examples
-##' temp <- -30:30
-##' plot(temp, get.es(temp))
-get.es <- function(temp) {
- return(6.11 * exp((2500000/461) * (1/273 - 1/(273 + temp))))
-} # get.es
+#' Saturation vapor pressure (t2es)
+#'
+#' Compute saturation vapor pressure from temperature using one of the
+#' following methods:
+#' - (Default) Clausius–Clapeyron (FAO-56 style) — Recommended for most applications.
+#' Commonly used approximation for terrestrial ecosystem models, consistent with Penman-Monteith
+#' and FAO-56 (Allen et al, 1998).
+#' - Magnus — More accurate in the range −40 to +50 C. Coefficients as in Alduchov & Eskridge (1996).
+#' - Goff–Gratch - Highest accuracy; use when following WMO-style recommendations. Goff–Gratch 1946; WMO, 2014.
+#'
+#' Each method uses different units internally, users can specify units
+#' for both inputs and outputs, with defaults "degC" and "kPa", respectively.
+#'
+#' @param temp numeric vector of temperatures
+#' @param method one of "Magnus","ClausiusClapeyron" (default), or "GoffGratch".
+#' See details for references.
+#' @param temp_units input temperature units ("degC","K","degF"), default "degC"
+#' @param out_units output pressure units ("kPa","hPa","Pa","mb"), default "kPa"
+#' @return numeric vector in `out_units`
+#' @aliases t2es
+#'
+#' @references
+#' Alduchov, O. A., & Eskridge, R. E. (1996). Improved Magnus Form Approximation of Saturation Vapor Pressure. J. Appl. Meteor.*, 35(4), 601–609. 2.0.CO;2>
+#'
+#' Allen, R. G., Pereira, L. S., Raes, D., & Smith, M. (1998). **Crop evapotranspiration – Guidelines for computing crop water requirements.** FAO Irrigation and Drainage Paper 56.
+#'
+#' Goff, J. A., & Gratch, S. (1946). Low-pressure properties of water from −160 to 212F. Trans. ASHVE, 52, 95–122.
+#'
+#' WMO (2014) Guide to Instruments and Methods of Observation (WMO-No. 8), ch. 4.
+#' @md
+#' @author David LeBauer
+#' @examples
+#' # Calculate saturation vapor pressure at 20°C
+#' sat_vapor_pressure(20)
+#' t2es(20)
+#'
+#' # Using different methods
+#' t2es(c(10, 20, 30), method = "Magnus")
+#' t2es(283.15, temp_units = "K", method = "GoffGratch")
+#'
+#' # Different output units
+#' t2es(20, out_units = "hPa")
+#' @export
+sat_vapor_pressure <- function(
+ temp,
+ temp_units = "degC",
+ out_units = "kPa",
+ method = c("ClausiusClapeyron", "Magnus", "GoffGratch")) {
+ method <- match.arg(method)
+ # normalize common alias
+ if (tolower(out_units) == "mb") out_units <- "hPa"
-## TODO: merge SatVapPress with get.es; add option to choose method
-SatVapPres <- function(T) {
- # /estimates saturation vapor pressure (kPa) Goff-Gratch 1946 /input: T = absolute temperature
- T_st <- 373.15 ##steam temperature (K)
- e_st <- 1013.25 ##/saturation vapor pressure at steam temp (hPa)
- return(0.1 * exp(-7.90298 * (T_st/T - 1) + 5.02808 * log(T_st/T) - 1.3816e-07 * (10^(11.344 * (1 - T/T_st)) -
- 1) + 0.0081328 * (10^(-3.49149 * (T_st/T - 1)) - 1) + log(e_st)))
-} # SatVapPres
+ if (method == "Magnus") {
+ # canonical temp: degC; canonical pressure: kPa
+ Tc <- PEcAn.utils::ud_convert(temp, temp_units, "degC")
+ es_kPa <- 0.61078 * exp((17.27 * Tc) / (Tc + 237.3))
+ return(PEcAn.utils::ud_convert(es_kPa, "kPa", out_units))
+ }
+ if (method == "ClausiusClapeyron") {
+ # canonical temp: degC; canonical pressure: hPa
+ Tc <- PEcAn.utils::ud_convert(temp, temp_units, "degC")
+ L <- 2.5e6 # J kg^-1
+ Rv <- 461 # J kg^-1 K^-1
+ es_hPa <- 6.11 * exp((L / Rv) * (1 / 273 - 1 / (273 + Tc)))
+ return(PEcAn.utils::ud_convert(es_hPa, "hPa", out_units))
+ }
+
+ if (method == "GoffGratch") {
+ # canonical temp: K; canonical pressure: hPa
+ Tk <- PEcAn.utils::ud_convert(temp, temp_units, "K")
+ Tst <- 373.15 # K
+ est <- 1013.246 # hPa at steam point
+ lg10 <- function(z) log10(z)
+ log10_es <- -7.90298 * (Tst / Tk - 1) +
+ 5.02808 * lg10(Tst / Tk) -
+ 1.3816e-7 * (10^(11.344 * (1 - Tk / Tst)) - 1) +
+ 8.1328e-3 * (10^(-3.49149 * (Tst / Tk - 1)) - 1) +
+ lg10(est)
+ es_hPa <- 10^log10_es
+ return(PEcAn.utils::ud_convert(es_hPa, "hPa", out_units))
+ }
+}
+
+# ---- Aliases for backward-compatibility ----
+
+#' @rdname sat_vapor_pressure
+#' @export
+get.es <- function(temp) {
+ sat_vapor_pressure(
+ temp = temp,
+ method = "ClausiusClapeyron",
+ temp_units = "degC",
+ out_units = "hPa"
+ )
+}
+
+#' @rdname sat_vapor_pressure
+#' @export
+t2es <- function(temp, temp_units = "degC", out_units = "kPa", method = "ClausiusClapeyron") {
+ sat_vapor_pressure(temp = temp, temp_units = temp_units, out_units = out_units, method = method)
+}
##' Calculate RH from temperature and dewpoint
##'
@@ -201,7 +280,7 @@ sw2ppfd <- function(sw) {
##' Campbell and Norman (1998). Introduction to Environmental Biophysics. pg 151 'the energy content of solar radiation in the PAR waveband is 2.35 x 10^5 J/mol'
##' See also the chapter radiation basics (10)
##' Here the input is the total solar radiation so to obtain in the PAR spectrum need to multiply by 0.486
-##' This last value 0.486 is based on the approximation that PAR is 0.45-0.50 of the total radiation
+##' This is based on the approximation that PAR is 0.45-0.50 of the total radiation
##' This means that 1e6 / (2.35e6) * 0.486 = 2.07
##' 1e6 converts from mol to mu mol
##' 1/3600 divides the values in hours to seconds
diff --git a/modules/data.atmosphere/R/pecan_standard_met_table.R b/modules/data.atmosphere/R/pecan_standard_met_table.R
index 557e1322959..cb29ff0e71c 100644
--- a/modules/data.atmosphere/R/pecan_standard_met_table.R
+++ b/modules/data.atmosphere/R/pecan_standard_met_table.R
@@ -2,23 +2,25 @@
#'
#' @export
pecan_standard_met_table <- tibble::tribble(
- ~`cf_standard_name` , ~units , ~is_required, ~bety , ~isimip , ~cruncep , ~narr , ~ameriflux ,
- "air_temperature" , "K" , TRUE, "airT" , "tasAdjust" , "tair" , "air" , "TA (C)" ,
- "air_temperature_max" , "K" , FALSE, NA , "tasmaxAdjust" , NA , "tmax" , NA ,
- "air_temperature_min" , "K" , FALSE, NA , "tasminAdjust" , NA , "tmin" , NA ,
- "air_pressure" , "Pa" , TRUE, "air_pressure" , NA , NA , NA , "PRESS (KPa)" ,
- "mole_fraction_of_carbon_dioxide_in_air" , "mol/mol" , FALSE, NA , NA , NA , NA , "CO2" ,
- "moisture_content_of_soil_layer" , "kg m-2" , FALSE, NA , NA , NA , NA , NA ,
- "soil_temperature" , "K" , FALSE, "soilT" , NA , NA , NA , "TS1 *(NOT DONE)*" ,
- "relative_humidity" , "%" , FALSE, "relative_humidity" , "rhurs" , NA , "rhum" , "RH" ,
- "specific_humidity" , "1" , TRUE, "specific_humidity" , NA , "qair" , "shum" , "CALC(RH)" ,
- "water_vapor_saturation_deficit" , "Pa" , FALSE, "VPD" , NA , NA , NA , "VPD *(NOT DONE)*" ,
- "surface_downwelling_longwave_flux_in_air" , "W m-2" , TRUE, "same" , "rldsAdjust" , "lwdown" , "dlwrf" , "Rgl" ,
- "surface_downwelling_shortwave_flux_in_air" , "W m-2" , TRUE, "solar_radiation" , "rsdsAdjust" , "swdown" , "dswrf" , "Rg" ,
- "surface_downwelling_photosynthetic_photon_flux_in_air" , "mol m-2 s-1" , FALSE, "PAR" , NA , NA , NA , "PAR *(NOT DONE)*" ,
- "precipitation_flux" , "kg m-2 s-1" , TRUE, "cccc" , "prAdjust" , "rain" , "acpc" , "PREC (mm/s)" ,
- "wind_to_direction" , "degrees" , FALSE, "wind_direction" , NA , NA , NA , "WD" ,
- "wind_speed" , "m/s" , FALSE, "Wspd" , NA , NA , NA , "WS" ,
- "eastward_wind" , "m/s" , TRUE, "eastward_wind" , NA , NA , NA , "CALC(WS+WD)" ,
- "northward_wind" , "m/s" , TRUE, "northward_wind" , NA , NA , NA , "CALC(WS+WD)"
+ ~`cf_standard_name` , ~units , ~is_required, ~bety , ~isimip , ~cruncep , ~narr , ~ameriflux , ~era5 ,
+ "air_temperature" , "K" , TRUE, "airT" , "tasAdjust" , "tair" , "air" , "TA (C)" , "t2m" ,
+ "air_temperature_max" , "K" , FALSE, NA , "tasmaxAdjust" , NA , "tmax" , NA , NA ,
+ "air_temperature_min" , "K" , FALSE, NA , "tasminAdjust" , NA , "tmin" , NA , NA ,
+ "air_pressure" , "Pa" , TRUE, "air_pressure" , NA , NA , NA , "PRESS (KPa)" , "sp" ,
+ "dew_point_temperature" , "K" , FALSE, NA , NA , NA , NA , NA , "d2m" ,
+ "mole_fraction_of_carbon_dioxide_in_air" , "1" , FALSE, NA , NA , NA , NA , "CO2" , NA ,
+ "moisture_content_of_soil_layer" , "kg m-2" , FALSE, NA , NA , NA , NA , NA , NA ,
+ "soil_temperature" , "K" , FALSE, "soilT" , NA , NA , NA , "TS1 *(NOT DONE)*" , NA ,
+ "relative_humidity" , "%" , FALSE, "relative_humidity" , "rhurs" , NA , "rhum" , "RH" , NA ,
+ "specific_humidity" , "1" , TRUE, "specific_humidity" , NA , "qair" , "shum" , "CALC(RH)" , NA ,
+ "water_vapor_saturation_deficit" , "Pa" , FALSE, "VPD" , NA , NA , NA , "VPD *(NOT DONE)*" , NA ,
+ "surface_downwelling_longwave_flux_in_air" , "W m-2" , TRUE, "same" , "rldsAdjust" , "lwdown" , "dlwrf" , "Rgl" , "strd",
+ "surface_downwelling_shortwave_flux_in_air" , "W m-2" , TRUE, "solar_radiation" , "rsdsAdjust" , "swdown" , "dswrf" , "Rg" , "ssrd",
+ "surface_downwelling_photosynthetic_photon_flux_in_air" , "mol m-2 s-1" , FALSE, "PAR" , NA , NA , NA , "PAR *(NOT DONE)*" , NA ,
+ "precipitation_flux" , "kg m-2 s-1" , TRUE, "cccc" , "prAdjust" , "rain" , "acpc" , "PREC (mm/s)" , "tp" ,
+ "wind_to_direction" , "degrees" , FALSE, "wind_direction" , NA , NA , NA , "WD" , NA ,
+ "wind_speed" , "m s-1" , FALSE, "Wspd" , NA , NA , NA , "WS" , NA ,
+ "eastward_wind" , "m s-1" , TRUE, "eastward_wind" , NA , NA , NA , "CALC(WS+WD)" , "u10" ,
+ "northward_wind" , "m s-1" , TRUE, "northward_wind" , NA , NA , NA , "CALC(WS+WD)" , "v10" ,
+ "volume_fraction_of_condensed_water_in_soil" , "1" , FALSE, "soilM" , NA , NA , NA , "SWC_1" , "swvl1"
)
diff --git a/modules/data.atmosphere/R/zzz.R b/modules/data.atmosphere/R/zzz.R
new file mode 100644
index 00000000000..c3c733e65be
--- /dev/null
+++ b/modules/data.atmosphere/R/zzz.R
@@ -0,0 +1,6 @@
+
+# A testing shim:
+# Identical to utils::download.file during normal operation,
+# but lets us replace it under test with stubs that don't use the network.
+# See ?testthat::with_mocked_bindings for details
+download_file_shim <- function(...) utils::download.file(...)
diff --git a/modules/data.atmosphere/README.md b/modules/data.atmosphere/README.md
index cb59cf9df5c..5daff41a180 100644
--- a/modules/data.atmosphere/README.md
+++ b/modules/data.atmosphere/README.md
@@ -26,13 +26,13 @@ The PEcAn.data.atmosphere package is 'standalone'.
## Documentation
-* [Package Documentation](https://pecanproject.github.io/pecan//modules/data.atmosphere/inst/web/index.html)
+* [Package Documentation](https://pecanproject.github.io/package-documentation/develop/data.atmosphere/index.html)
* Vignettes
## PEcAn variable names
-For the most updated list, see https://pecanproject.github.io/pecan-documentation/latest/time-standard.html#input-standards
+For the most updated list, see https://pecanproject.github.io/pecan-documentation/develop/input-standards.html#input-standards
General Note: dates in the database should be datatime (preferably with timezone), and datetime passed around in PEcAn should be of type POSIXlt.
diff --git a/modules/data.atmosphere/man/AmeriFlux_met_ensemble.Rd b/modules/data.atmosphere/man/AmeriFlux_met_ensemble.Rd
new file mode 100644
index 00000000000..d37098fadb1
--- /dev/null
+++ b/modules/data.atmosphere/man/AmeriFlux_met_ensemble.Rd
@@ -0,0 +1,91 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/Ameriflux_met_ensemble.R
+\name{AmeriFlux_met_ensemble}
+\alias{AmeriFlux_met_ensemble}
+\title{Generate AmeriFlux meteorological ensembles}
+\usage{
+AmeriFlux_met_ensemble(
+ site_id,
+ start_date,
+ end_date,
+ outfolder,
+ ameriflux_username = "pecan",
+ ameriflux_useremail = "@",
+ overwrite = FALSE,
+ verbose = FALSE,
+ format = NULL,
+ n_ens = 10,
+ w_len = 30,
+ era5_user = NULL,
+ era5_key = NULL,
+ threshold = 0.5,
+ dirs = NULL,
+ ...
+)
+}
+\arguments{
+\item{site_id}{character. AmeriFlux site identifier (e.g, "US-Ha1")
+The 'SITE_ID' field in \href{http://ameriflux.lbl.gov/sites/site-list-and-pages/}{list of Ameriflux sites}}
+
+\item{start_date}{character or date. Start date in "YYYY-MM-DD" format}
+
+\item{end_date}{character or date. End date in "YYYY-MM-DD" format}
+
+\item{outfolder}{character. Output directory path for ensemble files}
+
+\item{ameriflux_username}{character. AmeriFlux username for data access.}
+
+\item{ameriflux_useremail}{character. Email address for AmeriFlux authentication (must contain "@")}
+
+\item{overwrite}{logical. Whether to overwrite existing files. Default: FALSE}
+
+\item{verbose}{logical. Whether to print detailed logs. Default: FALSE}
+
+\item{format}{data frame or List. format specifications for CF conversion. Default: NULL
+The AmerifluxLBL format is Bety record 5000000002.
+which could be returned from PEcAn.DB::query.format.vars(format.id=5000000002, bety = con)}
+
+\item{n_ens}{integer. Number of ensemble members to generate. Default: 10}
+
+\item{w_len}{integer. Window length in days. Default: 20}
+
+\item{era5_user}{character. CDS user ID (UID) from your CDS profile. Required for authentication.}
+
+\item{era5_key}{character. CDS API key from your CDS profile. Required for authentication.}
+
+\item{threshold}{numeric. Coverage threshold (0-1) for triggering ERA5 fallback. Default: 0.5}
+
+\item{dirs}{named list. Optional configuration for existing data directory paths. Default: NULL}
+
+\item{...}{additional arguments passed to download.AmerifluxLBL}
+}
+\value{
+A data frame with the paths to the generated ensemble files and their metadata.
+}
+\description{
+Downloads AmeriFlux data, applies ERA5 fallback for missing radiation and soil moisture,
+performs gap-filling, and generates ensembles.
+This function provides a complete pipeline from raw AmeriFlux data to CF-compliant
+ensemble meteorological files.
+}
+\examples{
+\dontrun{
+result <- AmeriFlux_met_process(
+ site_id = "US-Ha1",
+ start_date = "2010-01-01",
+ end_date = "2010-12-31",
+ outfolder = "/path/to/output",
+ ameriflux_username = "your_username",
+ ameriflux_useremail = "your.email@domain.com",
+ format = format,
+ era5_user = "your_cds_user",
+ era5_key = "your_cds_api_key",
+ n_ens = 5,
+ verbose = TRUE
+)
+}
+
+}
+\author{
+Akash
+}
diff --git a/modules/data.atmosphere/man/Average_ERA5_2_GeoTIFF.Rd b/modules/data.atmosphere/man/Average_ERA5_2_GeoTIFF.Rd
new file mode 100644
index 00000000000..32f34b1f59d
--- /dev/null
+++ b/modules/data.atmosphere/man/Average_ERA5_2_GeoTIFF.Rd
@@ -0,0 +1,26 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/average_ERA5.R
+\name{Average_ERA5_2_GeoTIFF}
+\alias{Average_ERA5_2_GeoTIFF}
+\title{Average_ERA5_2_GeoTIFF}
+\usage{
+Average_ERA5_2_GeoTIFF(start.date, end.date, in.path, outdir)
+}
+\arguments{
+\item{start.date}{character: start point of when to average the data (e.g., 2012-01-01).}
+
+\item{end.date}{character: end point of when to average the data (e.g., 2021-12-31).}
+
+\item{in.path}{character: the directory where your ERA5 data stored (they should named as ERA5_YEAR.nc).}
+
+\item{outdir}{character: the output directory where the averaged GeoTIFF file will be generated.}
+}
+\value{
+character: path to the exported GeoTIFF file.
+}
+\description{
+This function helps to average the ERA5 data based on the start and end dates, and convert it to the GeoTIFF file.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/data.atmosphere/man/ERA5_met_process.Rd b/modules/data.atmosphere/man/ERA5_met_process.Rd
index 29710098dd0..328c615affe 100644
--- a/modules/data.atmosphere/man/ERA5_met_process.Rd
+++ b/modules/data.atmosphere/man/ERA5_met_process.Rd
@@ -4,7 +4,14 @@
\alias{ERA5_met_process}
\title{Met Processes for ERA5 data}
\usage{
-ERA5_met_process(settings, in.path, out.path, write.db = FALSE, write = TRUE)
+ERA5_met_process(
+ settings,
+ in.path,
+ out.path,
+ write.db = FALSE,
+ write = TRUE,
+ ncores = 1
+)
}
\arguments{
\item{settings}{a multi-settings object}
@@ -16,6 +23,8 @@ ERA5_met_process(settings, in.path, out.path, write.db = FALSE, write = TRUE)
\item{write.db}{if write into Bety database}
\item{write}{if write the settings into pecan.xml file in the outdir of settings.}
+
+\item{ncores}{numeric: the number of CPUs for the parallel compute. Default is 1.}
}
\value{
if write.db is True then return input IDs with physical paths; if write.db is False then return just physical paths of extracted ERA5 clim files.
diff --git a/modules/data.atmosphere/man/download.ERA5_cds.Rd b/modules/data.atmosphere/man/download.ERA5_cds.Rd
index 5c8d1d460e7..53219173029 100644
--- a/modules/data.atmosphere/man/download.ERA5_cds.Rd
+++ b/modules/data.atmosphere/man/download.ERA5_cds.Rd
@@ -2,7 +2,7 @@
% Please edit documentation in R/ERA5_download.R
\name{download.ERA5_cds}
\alias{download.ERA5_cds}
-\title{ERA5_cds_annual_download}
+\title{Download ERA5 Climate Data from the Copernicus CDS API}
\usage{
download.ERA5_cds(
outfolder,
@@ -10,31 +10,85 @@ download.ERA5_cds(
end_date,
extent,
variables,
- auto.create.key = T,
+ user,
+ key,
+ time = NULL,
+ dataset = "reanalysis-era5-single-levels",
+ product_type = "ensemble_members",
timeout = 36000
)
}
\arguments{
-\item{outfolder}{Character: physical path where the ERA5 data are stored.}
+\item{outfolder}{Character. Directory where downloaded NetCDF files will be saved.}
\item{start_date}{character: the start date of the data to be downloaded. Format is YYYY-MM-DD (will only use the year part of the date)}
\item{end_date}{character: the end date of the data to be downloaded. Format is YYYY-MM-DD (will only use the year part of the date)}
-\item{extent}{numeric: a vector of numbers contains the bounding box (formatted as xmin, xmax, ymin, ymax) to be downloaded.}
+\item{extent}{numeric: a vector of numbers contains the bounding box (formatted as xmin, xmax, ymin, ymax) (longitude and latitude in degrees).}
\item{variables}{character: a vector contains variables to be downloaded (e.g., c("2m_temperature","surface_pressure")).}
-\item{auto.create.key}{Boolean: decide if we want to generate the CDS RC file if it doesn't exist, the default is TRUE.}
+\item{user}{Character. CDS user ID (UID) from your CDS profile. Required for authentication.}
+
+\item{key}{Character. CDS API key from your CDS profile. Required for authentication.}
+
+\item{time}{Character vector or NULL. Hours of the day to download (e.g., c("00:00", "12:00")). Default to NULL to download all hours.}
+
+\item{dataset}{Character. Name of the CDS dataset to use (default: "reanalysis-era5-single-levels").}
+
+\item{product_type}{Character. Product type to request from CDS (default: "ensemble_members").}
\item{timeout}{numeric: the maximum time (in seconds) allowed to download the data. The default is 36000 seconds.}
}
\value{
-A vector containing file paths to the downloaded files.
+A list where each element is a list containing:
+ \item{file}{File path to the downloaded NetCDF file.}
+ \item{host}{Host name where the file was downloaded.}
+ \item{startdate}{Start date and time of the data in the file.}
+ \item{enddate}{End date and time of the data in the file.}
+ \item{mimetype}{MIME type of the file ("application/x-netcdf").}
+ \item{formatname}{Format name ("ERA5_year.nc").}
}
\description{
-This function helps to download the yearly ERA5 data based on the prescribed features using the CDS API.
+Download ERA5 climate data from the Copernicus Climate Data Store (CDS) API as NetCDF files, year by year, according to user-specified parameters.
+The function saves one NetCDF file per year in the specified output directory.
+}
+\details{
+This function requires a valid CDS API key and the \code{ecmwfr} package for accessing the Copernicus Climate Data Store.
+To get a Copernicus CDS API key, register at \url{https://cds.climate.copernicus.eu/profile}.
+You must provide both \code{user} (UID) and \code{key} parameters from your CDS profile.
+
+You can check the "CC-BY" license under the \href{https://cds.climate.copernicus.eu/profile?tab=licences}{'licences' tab of your profile page}.
+}
+\examples{
+\dontrun{
+# Download ERA5 reanalysis data for 2020
+output_dir <- withr::local_tempdir()
+era5_files <- download.ERA5_cds(
+ outfolder = output_dir,
+ start_date = "2020-01-01",
+ end_date = "2020-06-30",
+ extent = c(-72.2215, -72.1215, 42.4878, 42.5878),
+ variables = c("2m_temperature", "surface_pressure"),
+ user = "your_cds_user_id",
+ key = "your_cds_api_key",
+ product_type = "reanalysis"
+)
+
+# Download ensemble data for specificed hours only
+era5_files <- download.ERA5_cds(
+ outfolder = output_dir,
+ start_date = "2020-01-01",
+ end_date = "2020-12-31",
+ extent = c(-83.05, -82.95, 42.95, 43.05),
+ variables = "surface_solar_radiation_downwards",
+ user = "your_cds_user_id",
+ key = "your_cds_api_key",
+ time = c("00:00", "12:00")
+)
+}
}
\author{
-Dongchen Zhang
+Dongchen Zhang, Akash
}
diff --git a/modules/data.atmosphere/man/download.NOAA_GEFS.Rd b/modules/data.atmosphere/man/download.NOAA_GEFS.Rd
index 05aa332be43..d5270f16302 100644
--- a/modules/data.atmosphere/man/download.NOAA_GEFS.Rd
+++ b/modules/data.atmosphere/man/download.NOAA_GEFS.Rd
@@ -6,8 +6,6 @@
\usage{
download.NOAA_GEFS(
site_id,
- sitename = NULL,
- username = "pecan",
lat.in,
lon.in,
outfolder,
@@ -21,25 +19,21 @@ download.NOAA_GEFS(
\arguments{
\item{site_id}{The unique ID given to each site. This is used as part of the file name.}
-\item{sitename}{Site name}
-
-\item{username}{username from pecan workflow}
-
\item{lat.in}{site latitude in decimal degrees}
\item{lon.in}{site longitude in decimal degrees}
\item{outfolder}{Directory where results should be written}
-\item{start_date, }{Range of dates/times to be downloaded (default assumed to be time that function is run)}
+\item{start_date}{Range of dates/times to be downloaded (default assumed to be time that function is run)}
-\item{end_date, }{end date for range of dates to be downloaded (default 16 days from start_date)}
+\item{end_date}{end date for range of dates to be downloaded (default 16 days from start_date)}
-\item{downscale}{logical, assumed True. Indicated whether data should be downscaled to hourly}
+\item{downscale}{logical, assumed True. Indicates whether data should be downscaled to hourly}
\item{overwrite}{logical. Download a fresh version even if a local file with the same name already exists?}
-\item{...}{Additional optional parameters}
+\item{...}{Additional optional parameters, currently ignored}
}
\value{
A list of data frames is returned containing information about the data file that can be used to locate it later. Each
@@ -50,38 +44,47 @@ Download NOAA GEFS Weather Data
}
\section{Information on Units}{
-Information on NOAA weather units can be found below. Note that the temperature is measured in degrees C,
+Information on NOAA weather units can be found below. Note that the temperature is measured in degrees C,
but is converted at the station and downloaded in Kelvin.
}
\section{NOAA_GEFS General Information}{
-This function downloads NOAA GEFS weather data. GEFS is an ensemble of 21 different weather forecast models.
-A 16 day forecast is avaliable every 6 hours. Each forecast includes information on a total of 8 variables.
-These are transformed from the NOAA standard to the internal PEcAn
-standard.
+This function downloads NOAA GEFS weather data. GEFS is an ensemble of 31 different weather forecast models.
+A 16 day forecast is available every 6 hours and a 35 day forecast is available every 24 hours.
+Both are at 3-hour frequency for the first 10 days of the forecast and 6-hour frequency beyond that.
+Each forecast includes information on a total of 8 variables.
+These are transformed from the NOAA standard to the internal PEcAn standard.
}
-\section{Data Avaliability}{
+\section{Data Availability}{
+
+NOAA GEFS weather data is available on a rolling 4 day basis.
+Dates provided in "start_date" must be within this range.
+The end date can be any point after that, but if the end date is beyond 16 days
+(35 days for the midnight UTC forecast), only 16 (35) days worth of forecast are retrieved.
+Times are rounded down to the previous 6 hour forecast.
-NOAA GEFS weather data is avaliable on a rolling 12 day basis; dates provided in "start_date" must be within this range. The end date can be any point after
-that, but if the end date is beyond 16 days, only 16 days worth of forecast are recorded. Times are rounded down to the previous 6 hour forecast. NOAA
-GEFS weather data isn't always posted immediately, and to compensate, this function adjusts requests made in the last two hours
-back two hours (approximately the amount of time it takes to post the data) to make sure the most current forecast is used.
+NOAA GEFS weather data isn't always posted immediately. Each 16-day forecast takes
+approximately three hours to run, and the once-a-day forecasts for days 17-35 are
+posted much later (up to 21 hours) than the forecasts for days 0 to 16.
+See the [GEFS v12 release announcement](https://www.weather.gov/media/notification/pdf2/scn20-75gefs_v12_changes.pdf)
+for details.
}
\section{Data Save Format}{
-Data is saved in the netcdf format to the specified directory. File names reflect the precision of the data to the given range of days.
+Data is saved in the netcdf format to the specified directory.
+ File names reflect the precision of the data to the given range of days.
NOAA.GEFS.willow creek.3.2018-06-08T06:00.2018-06-24T06:00.nc specifies the forecast, using ensemble number 3 at willow creek on
June 6th, 2018 at 6:00 a.m. to June 24th, 2018 at 6:00 a.m.
}
\examples{
\dontrun{
- download.NOAA_GEFS(outfolder="~/Working/results",
- lat.in= 45.805925,
- lon.in = -90.07961,
+ download.NOAA_GEFS(outfolder="~/Working/results",
+ lat.in= 45.805925,
+ lon.in = -90.07961,
site_id = 676)
}
diff --git a/modules/data.atmosphere/man/download_grid.Rd b/modules/data.atmosphere/man/download_grid.Rd
new file mode 100644
index 00000000000..8b938c88de6
--- /dev/null
+++ b/modules/data.atmosphere/man/download_grid.Rd
@@ -0,0 +1,42 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/GEFS_helper_functions.R
+\name{download_grid}
+\alias{download_grid}
+\title{Download all requested timepoints of one GEFS ensemble member}
+\usage{
+download_grid(
+ ens_index,
+ location,
+ directory,
+ hours_char,
+ cycle,
+ base_filename1,
+ vars,
+ working_directory
+)
+}
+\arguments{
+\item{ens_index}{ensemble member as integer (1-31)}
+
+\item{location}{bounding box portion of query,
+as a single URL-escaped string}
+
+\item{directory}{server path portion of query,
+as a single URL-escaped string
+(not to be confused with local output dir -- that's `working_directory`)}
+
+\item{hours_char}{timepoints to retrieve,
+as zero-padded strings e.g. `c("000", "384", "840")`}
+
+\item{cycle}{forecast hour to use ("00", "06", "12", or "18")}
+
+\item{base_filename1}{URL onto which to append query string components}
+
+\item{vars}{variable listing component of query,
+as a single URL-escaped string}
+
+\item{working_directory}{path on local disk to write output}
+}
+\description{
+Download all requested timepoints of one GEFS ensemble member
+}
diff --git a/modules/data.atmosphere/man/extract.nc.ERA5.Rd b/modules/data.atmosphere/man/extract.nc.ERA5.Rd
index d377f131bd4..92cf2782fb9 100644
--- a/modules/data.atmosphere/man/extract.nc.ERA5.Rd
+++ b/modules/data.atmosphere/man/extract.nc.ERA5.Rd
@@ -13,6 +13,7 @@ extract.nc.ERA5(
outfolder,
in.prefix,
newsite,
+ ncores = 1,
vars = NULL,
overwrite = FALSE,
verbose = FALSE,
@@ -20,25 +21,28 @@ extract.nc.ERA5(
)
}
\arguments{
-\item{slat}{latitude}
+\item{slat}{numeric: vector of latitudes.}
-\item{slon}{longitude}
+\item{slon}{numeric: vector of longitudes.}
-\item{in.path}{path to the directory containing the file to be inserted}
+\item{in.path}{character: path to the directory containing the file to be inserted}
-\item{start_date}{start date}
+\item{start_date}{character: start date (in YYYY-MM-DD format).}
-\item{end_date}{end date}
+\item{end_date}{character: end date (in YYYY-MM-DD format).}
-\item{outfolder}{Path to directory where nc files need to be saved.}
+\item{outfolder}{character: Path to directory where nc files need to be saved.}
-\item{in.prefix}{initial portion of the filename that does not vary by date.
+\item{in.prefix}{character: initial portion of the filename that does not vary by date.
Does not include directory; specify that as part of in.path.}
-\item{newsite}{site name.}
+\item{newsite}{character: vector of site names.
+The length should match with that of slat and slon.}
-\item{vars}{variables to be extracted. If NULL all the variables will be
-returned.}
+\item{ncores}{numeric: the number of CPUs for the parallel compute. Default is 1.}
+
+\item{vars}{character: names of variables to be extracted. If NULL all the variables will be
+returned. Default is NULL.}
\item{overwrite}{Logical if files needs to be overwritten.}
@@ -58,9 +62,22 @@ For the list of variables check out the documentation at \url{
}
\examples{
\dontrun{
-point.data <- ERA5_extract(sslat=40, slon=-120, years=c(1990:1995), vars=NULL)
-
- purrr::map(~xts::apply.daily(.x, mean))
+point.data <- extract.nc.ERA5(
+ slat = 43.25,
+ slon = -83.25,
+ in.path = "path/to/era5/files",
+ start_date = "1990-01-01",
+ end_date = "1995-12-31",
+ outfolder = "path/to/output",
+ in.prefix = "ERA5_",
+ newsite = "my_site",
+ vars = NULL,
+ overwrite = FALSE,
+ verbose = TRUE
+)
}
}
+\author{
+Dongchen Zhang, Akash
+}
diff --git a/modules/data.atmosphere/man/get.es.Rd b/modules/data.atmosphere/man/get.es.Rd
deleted file mode 100644
index a0f7f621b4a..00000000000
--- a/modules/data.atmosphere/man/get.es.Rd
+++ /dev/null
@@ -1,24 +0,0 @@
-% Generated by roxygen2: do not edit by hand
-% Please edit documentation in R/metutils.R
-\name{get.es}
-\alias{get.es}
-\title{get es}
-\usage{
-get.es(temp)
-}
-\arguments{
-\item{temp}{temperature in degrees C}
-}
-\value{
-saturation vapor pressure in mb
-}
-\description{
-Calculate saturation vapor pressure
-}
-\examples{
-temp <- -30:30
-plot(temp, get.es(temp))
-}
-\author{
-David LeBauer
-}
diff --git a/modules/data.atmosphere/man/met2CF.ERA5.Rd b/modules/data.atmosphere/man/met2CF.ERA5.Rd
index afaf311117b..2b07239079b 100644
--- a/modules/data.atmosphere/man/met2CF.ERA5.Rd
+++ b/modules/data.atmosphere/man/met2CF.ERA5.Rd
@@ -13,7 +13,8 @@ met2CF.ERA5(
outfolder,
out.xts,
overwrite = FALSE,
- verbose = TRUE
+ verbose = TRUE,
+ ens_size = 1
)
}
\arguments{
@@ -29,11 +30,14 @@ met2CF.ERA5(
\item{outfolder}{Path to directory where nc files need to be saved.}
-\item{out.xts}{Output of the extract.nc.ERA5 function which is a list of time series of met variables for each ensemble member.}
+\item{out.xts}{Output of the extract.nc.ERA5 function which is a list of time series of met variables for each ensemble member
+or single reanalysis dataset.}
\item{overwrite}{Logical if files needs to be overwritten.}
\item{verbose}{Logical flag defining if ouput of function be extra verbose.}
+
+\item{ens_size}{Number of ensemble members to process. Default is 1.}
}
\value{
list of dataframes
@@ -41,3 +45,6 @@ list of dataframes
\description{
met2cf.ERA5
}
+\author{
+Hamze Dokohaki, Akash
+}
diff --git a/modules/data.atmosphere/man/met_temporal_downscale.Gaussian_ensemble.Rd b/modules/data.atmosphere/man/met_temporal_downscale.Gaussian_ensemble.Rd
index 253cc6dc550..2753a7cae31 100644
--- a/modules/data.atmosphere/man/met_temporal_downscale.Gaussian_ensemble.Rd
+++ b/modules/data.atmosphere/man/met_temporal_downscale.Gaussian_ensemble.Rd
@@ -51,5 +51,5 @@ takes source data and a training dataset from the same site and temporally
downscales the source dataset to the resolution of the training dataset based on statistics of the training dataset.
}
\author{
-James Simkins
+James Simkins, Akash
}
diff --git a/modules/data.atmosphere/man/pecan_standard_met_table.Rd b/modules/data.atmosphere/man/pecan_standard_met_table.Rd
index 16bfadce2d9..11bacb8ffba 100644
--- a/modules/data.atmosphere/man/pecan_standard_met_table.Rd
+++ b/modules/data.atmosphere/man/pecan_standard_met_table.Rd
@@ -5,7 +5,7 @@
\alias{pecan_standard_met_table}
\title{Conversion table for PEcAn standard meteorology}
\format{
-An object of class \code{tbl_df} (inherits from \code{tbl}, \code{data.frame}) with 18 rows and 8 columns.
+An object of class \code{tbl_df} (inherits from \code{tbl}, \code{data.frame}) with 20 rows and 9 columns.
}
\usage{
pecan_standard_met_table
diff --git a/modules/data.atmosphere/man/sat_vapor_pressure.Rd b/modules/data.atmosphere/man/sat_vapor_pressure.Rd
new file mode 100644
index 00000000000..6cd41b5b70c
--- /dev/null
+++ b/modules/data.atmosphere/man/sat_vapor_pressure.Rd
@@ -0,0 +1,76 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/metutils.R
+\name{sat_vapor_pressure}
+\alias{sat_vapor_pressure}
+\alias{t2es}
+\alias{get.es}
+\title{Saturation vapor pressure (t2es)}
+\usage{
+sat_vapor_pressure(
+ temp,
+ temp_units = "degC",
+ out_units = "kPa",
+ method = c("ClausiusClapeyron", "Magnus", "GoffGratch")
+)
+
+get.es(temp)
+
+t2es(
+ temp,
+ temp_units = "degC",
+ out_units = "kPa",
+ method = "ClausiusClapeyron"
+)
+}
+\arguments{
+\item{temp}{numeric vector of temperatures}
+
+\item{temp_units}{input temperature units ("degC","K","degF"), default "degC"}
+
+\item{out_units}{output pressure units ("kPa","hPa","Pa","mb"), default "kPa"}
+
+\item{method}{one of "Magnus","ClausiusClapeyron" (default), or "GoffGratch".
+See details for references.}
+}
+\value{
+numeric vector in \code{out_units}
+}
+\description{
+Compute saturation vapor pressure from temperature using one of the
+following methods:
+\itemize{
+\item (Default) Clausius–Clapeyron (FAO-56 style) — Recommended for most applications.
+Commonly used approximation for terrestrial ecosystem models, consistent with Penman-Monteith
+and FAO-56 (Allen et al, 1998).
+\item Magnus — More accurate in the range −40 to +50 C. Coefficients as in Alduchov & Eskridge (1996).
+\item Goff–Gratch - Highest accuracy; use when following WMO-style recommendations. Goff–Gratch 1946; WMO, 2014.
+}
+}
+\details{
+Each method uses different units internally, users can specify units
+for both inputs and outputs, with defaults "degC" and "kPa", respectively.
+}
+\examples{
+# Calculate saturation vapor pressure at 20°C
+sat_vapor_pressure(20)
+t2es(20)
+
+# Using different methods
+t2es(c(10, 20, 30), method = "Magnus")
+t2es(283.15, temp_units = "K", method = "GoffGratch")
+
+# Different output units
+t2es(20, out_units = "hPa")
+}
+\references{
+Alduchov, O. A., & Eskridge, R. E. (1996). Improved Magnus Form Approximation of Saturation Vapor Pressure. J. Appl. Meteor.*, 35(4), 601–609. 2.0.CO;2>
+
+Allen, R. G., Pereira, L. S., Raes, D., & Smith, M. (1998). \strong{Crop evapotranspiration – Guidelines for computing crop water requirements.} FAO Irrigation and Drainage Paper 56.
+
+Goff, J. A., & Gratch, S. (1946). Low-pressure properties of water from −160 to 212F. Trans. ASHVE, 52, 95–122.
+
+WMO (2014) Guide to Instruments and Methods of Observation (WMO-No. 8), ch. 4.
+}
+\author{
+David LeBauer
+}
diff --git a/modules/data.atmosphere/man/solarMJ2ppfd.Rd b/modules/data.atmosphere/man/solarMJ2ppfd.Rd
index 4a498a15bd8..b0ae03fcd62 100644
--- a/modules/data.atmosphere/man/solarMJ2ppfd.Rd
+++ b/modules/data.atmosphere/man/solarMJ2ppfd.Rd
@@ -24,7 +24,7 @@ The above conversion is based on the following reasoning
Campbell and Norman (1998). Introduction to Environmental Biophysics. pg 151 'the energy content of solar radiation in the PAR waveband is 2.35 x 10^5 J/mol'
See also the chapter radiation basics (10)
Here the input is the total solar radiation so to obtain in the PAR spectrum need to multiply by 0.486
-This last value 0.486 is based on the approximation that PAR is 0.45-0.50 of the total radiation
+This is based on the approximation that PAR is 0.45-0.50 of the total radiation
This means that 1e6 / (2.35e6) * 0.486 = 2.07
1e6 converts from mol to mu mol
1/3600 divides the values in hours to seconds
diff --git a/modules/data.atmosphere/tests/testthat/helper.R b/modules/data.atmosphere/tests/testthat/helper.R
index deba7cbdd0c..a4c15a3b316 100644
--- a/modules/data.atmosphere/tests/testthat/helper.R
+++ b/modules/data.atmosphere/tests/testthat/helper.R
@@ -34,3 +34,43 @@ expect_log <- function(object, regexp, ...){
invisible(val)
}
+
+
+#' Expectation: Does this directory contain all listed files?
+#' @param object directory to look in
+#' @param files character vector of filenames expected
+#' @others_ok logical: allow files not listed in `paths`?
+#' @param ... passed on to list.files
+expect_files <- function(object, files, others_ok = TRUE, ...) {
+ act <- quasi_label(rlang::enquo(object), arg = "object")
+
+ files_present <- list.files(path = act$val, ...)
+ files_found <- files %in% files_present
+ others_found <- !(files_present %in% files)
+
+
+ if (all(files_found) && (others_ok || !any(others_found))) {
+ succeed()
+ return(invisible(act$val))
+ }
+
+ msg <- ""
+ if (!all(files_found)) {
+ msg <- sprintf(
+ "%s does not contain files(s) %s.",
+ act$lab,
+ paste(files[!files_found], collapse = ", ")
+ )
+ }
+ if (!others_ok && any(others_found)) {
+ msg <- sprintf(
+ "%s %s contains unexpected files(s) %s.",
+ msg,
+ act$lab,
+ paste(files_present[others_found], collapse = ", ")
+ )
+ }
+ fail(msg)
+
+ invisible(act$val)
+}
diff --git a/modules/data.atmosphere/tests/testthat/test-GEFS_helper_functions.R b/modules/data.atmosphere/tests/testthat/test-GEFS_helper_functions.R
new file mode 100644
index 00000000000..e033f0213d8
--- /dev/null
+++ b/modules/data.atmosphere/tests/testthat/test-GEFS_helper_functions.R
@@ -0,0 +1,131 @@
+test_that("noaa_grid_download end times", {
+ local_edition(3)
+ local_mocked_bindings(
+ download_file_shim = function(...) {
+ dl_call_count <<- dl_call_count + 1
+ }
+ )
+ out <- withr::local_tempdir()
+
+ # wrapper for end-time tests
+ dl_hrs <- function(end_hr, start_time = 0) {
+ noaa_grid_download(
+ lat_list = 0,
+ lon_list = 0,
+ forecast_time = start_time,
+ forecast_date = Sys.Date(),
+ model_name_raw = "geftest_raw",
+ output_directory = out,
+ end_hr = end_hr
+ )
+ }
+
+ # Two timepoints (t0, 3-hr forecast)
+ # Expect calls for 31 ensemble members per time
+ dl_call_count <- 0
+ dl_hrs(3)
+ expect_equal(dl_call_count, 62)
+ expect_files(out, "geftest_raw")
+
+ # full 35 days
+ # (t0 + 10 days 3-hrly + 25 days 6-hrly = 181 timepoints)
+ dl_call_count <- 0
+ dl_hrs(840)
+ expect_equal(dl_call_count, 31 * 181)
+
+ # more than 35 days requested -> only 35 returned
+ dl_call_count <- 0
+ dl_hrs(1200)
+ expect_equal(dl_call_count, 31 * 181)
+
+ # nonzero forecast time -> 16 days available
+ dl_call_count <- 0
+ dl_hrs(1200, 12)
+ expect_equal(dl_call_count, 31 * 105)
+})
+
+
+test_that("noaa_grid_download bounding box", {
+ local_edition(3)
+ local_mocked_bindings(
+ download_file_shim = function(...) {
+ args <- list(...)
+ called_urls <<- append(called_urls, args[[1]])
+ return(0)
+ }
+ )
+ out <- withr::local_tempdir()
+
+ dl_loc <- function(lats = 40, lons = -88) {
+ noaa_grid_download(
+ lat_list = lats,
+ lon_list = lons,
+ forecast_time = 0,
+ forecast_date = Sys.Date(),
+ model_name_raw = "geftest_raw",
+ output_directory = out,
+ end_hr = 0
+ )
+ }
+
+ # one location
+ called_urls <- c()
+ dl_loc()
+ expect_match(
+ called_urls,
+ "leftlon=-88&rightlon=-88&toplat=40&bottomlat=40",
+ fixed = TRUE,
+ all = TRUE
+ )
+
+ # multiple locations
+ called_urls <- c()
+ dl_loc(c(35.5, 40, 42.9), c(-88, -89.5, -87))
+ expect_match(
+ called_urls,
+ "leftlon=-90&rightlon=-87&toplat=43&bottomlat=35",
+ fixed = TRUE,
+ all = TRUE
+ )
+})
+
+
+# TODO now that `download_grid` is a separate function,
+# it would be cleaner to test skipping in download_grid directly
+# and focus for noaa_grid_download on whether it correctly constructs/passes
+# each chunk of the query string.
+test_that("noaa_grid_download skips on error", {
+ local_edition(3)
+ local_mocked_bindings(
+ download_file_shim = function(...) stop("NOPE!")
+ )
+ out <- withr::local_tempdir()
+
+ noaa_grid_download(
+ lat_list = 0,
+ lon_list = 0,
+ forecast_time = 0,
+ forecast_date = Sys.Date(),
+ model_name_raw = "geftest_raw",
+ output_directory = out,
+ end_hr = 3
+ ) |>
+ expect_warning("NOPE! skipping gec00.*f000") |>
+ expect_warning("NOPE! skipping gep30.*f003") |>
+ suppressWarnings() # no need to check all 62 warnings
+})
+
+
+# test_that("download_grid", {
+# TODO
+# })
+
+
+# test_that("process_gridded_noaa_download", {
+# TODO
+# })
+
+
+# test_that("write_noaa_gefs_netcdf", {
+# TODO
+# })
diff --git a/modules/data.atmosphere/tests/testthat/test-download.NOAA_GEFS.R b/modules/data.atmosphere/tests/testthat/test-download.NOAA_GEFS.R
new file mode 100644
index 00000000000..1ce0f0991f9
--- /dev/null
+++ b/modules/data.atmosphere/tests/testthat/test-download.NOAA_GEFS.R
@@ -0,0 +1,90 @@
+# Verify that helpers are called with appropriate arguments
+# (We test the actual download and conversion in the helper test file)
+test_that("GEFS interface, helpers mocked out", {
+ local_edition(3)
+ out <- withr::local_tempdir()
+ start_date <- Sys.Date() - lubridate::days(1)
+
+ # Two full days
+ with_mocked_bindings(
+ {
+ res <- download.NOAA_GEFS(
+ site_id = "test_site",
+ lat.in = 40,
+ lon.in = -88,
+ start_date = start_date,
+ end_date = start_date + lubridate::days(2),
+ outfolder = out
+ )
+ },
+ noaa_grid_download = function(...) {
+ args <- list(...)
+ expect_equal(args$forecast_date, start_date)
+ expect_equal(args$forecast_time, 0)
+ expect_equal(args$end_hr, 48)
+
+ NULL
+ },
+ process_gridded_noaa_download = function(...) NULL
+ )
+
+
+ # Nonzero start hour
+ with_mocked_bindings(
+ {
+ res <- download.NOAA_GEFS(
+ site_id = "test_site",
+ lat.in = 32,
+ lon.in = -115,
+ start_date = start_date + lubridate::hours(8),
+ end_date = start_date + lubridate::hours(22),
+ outfolder = out
+ )
+ },
+ noaa_grid_download = function(...) {
+ args <- list(...)
+ expect_equal(args$forecast_date, start_date)
+ expect_equal(args$forecast_time, 6)
+ expect_equal(args$end_hr, 12)
+
+ NULL
+ },
+ process_gridded_noaa_download = function(...) NULL
+ )
+})
+
+
+# Verify responses from live GEFS server
+# (This downloads >200 grib files. They're small, but it still takes time)
+test_that("GEFS live server (slow!)", {
+ skip_on_ci()
+ out <- withr::local_tempdir()
+ start_date <- Sys.Date() - lubridate::days(1)
+ end_date <- start_date + lubridate::hours(18)
+
+ res <- download.NOAA_GEFS(
+ site_id = "test_site",
+ lat.in = 40,
+ lon.in = -88,
+ start_date = start_date,
+ end_date = end_date,
+ outfolder = out
+ )
+
+ expect_files(
+ file.path(out, "NOAAGEFS_raw", start_date, "00"),
+ c("gec00.t00z.pgrb2a.0p50.f000.grib", "gep30.t00z.pgrb2a.0p50.f018.grib")
+ )
+
+ ensemble_members <- sprintf("%02i", 0:30)
+ outnames <- paste(
+ "NOAA_GEFS_test_site", ensemble_members,
+ format(start_date, "%Y-%m-%dT%H:%M"), format(end_date, "%Y-%m-%dT%H:%M"),
+ sep = "_"
+ )
+ expect_files(
+ out,
+ paste0(outnames, "/", outnames, ".nc"),
+ recursive = TRUE
+ )
+})
diff --git a/modules/data.atmosphere/tests/testthat/test.download.ERA5_cds.R b/modules/data.atmosphere/tests/testthat/test.download.ERA5_cds.R
new file mode 100644
index 00000000000..50c90c91e54
--- /dev/null
+++ b/modules/data.atmosphere/tests/testthat/test.download.ERA5_cds.R
@@ -0,0 +1,145 @@
+test_that("download.ERA5_cds parameter validation and core functionality", {
+ outdir <- withr::local_tempdir()
+
+ # Mock only ecmwfr and logger dependencies
+ local_mocked_bindings(
+ wf_set_key = function(user, key) TRUE,
+ wf_request = function(request, user, path, time_out) {
+ # Validate request structure
+ year_str <- sub(".*_(\\d{4})\\.nc$", "\\1", request$target)
+ expect_equal(request$year, list(as.character(year_str)))
+ expect_equal(request$area, c(42.59, -72.22, 42.49, -72.12))
+ expect_equal(request$variable, as.list(c("2m_temperature", "surface_pressure")))
+ # Create mock file
+ target_file <- file.path(path, request$target)
+ writeLines("mock netcdf", target_file)
+ return(target_file)
+ },
+ .package = "ecmwfr"
+ )
+
+ local_mocked_bindings(
+ logger.severe = function(...) stop(paste(...)),
+ .package = "PEcAn.logger"
+ )
+
+ # Test parameter validation (missing user)
+ expect_error(
+ download.ERA5_cds(outdir, "2020-01-01", "2020-12-31",
+ c(-72.22, -72.12, 42.49, 42.59), "2m_temperature",
+ user = NULL, key = "key"),
+ "CDS 'user' and 'key' must be provided"
+ )
+
+ # Test successful download
+ result <- download.ERA5_cds(
+ outfolder = outdir,
+ start_date = "2020-01-01",
+ end_date = "2021-12-31",
+ extent = c(-72.22, -72.12, 42.49, 42.59),
+ variables = c("2m_temperature", "surface_pressure"),
+ user = "test_user",
+ key = "test_key"
+ )
+
+ files <- sapply(result, `[[`, "file")
+ expect_length(result, 2)
+ expect_true(all(file.exists(files)))
+ expect_true(all(grepl("ERA5_202[01]\\.nc$", files)))
+ expect_equal(unique(sapply(result, `[[`, "mimetype")), "application/x-netcdf")
+ expect_equal(unique(sapply(result, `[[`, "formatname")), "ERA5_year.nc")
+})
+
+test_that("download.ERA5_cds handles time parameter and dataset options", {
+ outdir <- withr::local_tempdir()
+
+ local_mocked_bindings(
+ wf_set_key = function(user, key) TRUE,
+ wf_request = function(request, user, path, time_out) {
+ if (!is.null(attr(request, "test_time"))) {
+ expect_equal(request$time, c("00:00", "12:00"))
+ } else {
+ expect_true(length(request$time) %in% c(2, 24))
+ }
+ target_file <- file.path(path, request$target)
+ writeLines("mock netcdf", target_file)
+ return(target_file)
+ },
+ .package = "ecmwfr"
+ )
+
+ local_mocked_bindings(
+ logger.severe = function(...) stop(paste(...)),
+ .package = "PEcAn.logger"
+ )
+
+ # Test NULL time (default all hours)
+ result1 <- download.ERA5_cds(outdir, "2020-01-01", "2020-12-31",
+ c(-72, -71, 42, 43), "2m_temperature",
+ time = NULL, user = "test", key = "test")
+
+ # Test custom time
+ result2 <- download.ERA5_cds(outdir, "2020-01-01", "2020-12-31",
+ c(-72, -71, 42, 43), "2m_temperature",
+ time = c("00:00", "12:00"), user = "test", key = "test")
+ attr(result2, "test_time") <- TRUE
+
+ expect_length(result1, 1)
+ expect_length(result2, 1)
+})
+
+test_that("download.ERA5_cds error handling and ecmwfr dependency", {
+ skip("Cannot reliably mock requireNamespace in base R; use a package-level wrapper for robust tests.")
+ # The following test would check for error handling if ecmwfr is missing,
+ # but is skipped due to R's locked base functions.
+ # If you refactor the code to use a wrapper, update this test to match.
+ outdir <- withr::local_tempdir()
+
+ local_mocked_bindings(
+ logger.info = function(...) NULL,
+ logger.severe = function(...) stop(paste(...)),
+ .package = "PEcAn.logger"
+ )
+
+ # Simulate missing ecmwfr package by wrapping the function call
+ expect_error(
+ download.ERA5_cds(outdir, "2020-01-01", "2020-12-31",
+ c(-72, -71, 42, 43), "2m_temperature",
+ user = "test", key = "test"),
+ "ecmwfr package required"
+ )
+
+ # Mock partial download failure
+ local_mocked_bindings(
+ wf_set_key = function(user, key) TRUE,
+ wf_request = function(request, user, path, time_out) {
+ year_str <- sub(".*_(\\d{4})\\.nc$", "\\1", request$target)
+ if (year_str == "2020") {
+ target_file <- file.path(path, request$target)
+ writeLines("mock netcdf", target_file)
+ return(target_file)
+ } else {
+ PEcAn.logger::logger.severe("Download failed")
+ }
+ },
+ .package = "ecmwfr"
+ )
+ local_mocked_bindings(
+ logger.error = function(...) message("ERROR: ", paste(...)),
+ .package = "PEcAn.logger"
+ )
+
+ # Should continue despite failures and log error
+ expect_message(
+ result <- download.ERA5_cds(outdir, "2020-01-01", "2021-12-31",
+ c(-72, -71, 42, 43), "2m_temperature",
+ user = "test", key = "test"),
+ "ERROR.*Failed to download.*2021"
+ )
+
+ files <- sapply(result, `[[`, "file")
+ expect_length(result, 2)
+ # Case: Only the first file should exist after a partial failure.
+ # If the function changes to return only successful downloads, update this test.
+ expect_true(file.exists(files[1]))
+})
diff --git a/modules/data.atmosphere/tests/testthat/test.load.cfmet.R b/modules/data.atmosphere/tests/testthat/test.load.cfmet.R
index b0df59ff85b..436c71f5559 100644
--- a/modules/data.atmosphere/tests/testthat/test.load.cfmet.R
+++ b/modules/data.atmosphere/tests/testthat/test.load.cfmet.R
@@ -49,7 +49,7 @@ test_that("load.cfmet throws error if start/end date out of range",{
start.date = "1950-12-31", end.date = "1951-12-31"),
"run start date .* before met data starts")
expect_error(load.cfmet(met.nc = daily.nc, lat = 39, lon = -88,
- start.date = "1951-01-02", end.date = "1952-01-01"),
+ start.date = "1951-01-02", end.date = "1952-01-15"),
"run end date .* after met data ends")
})
diff --git a/modules/data.atmosphere/tests/testthat/test.met_temporal_downscale.Gaussian_ensemble.R b/modules/data.atmosphere/tests/testthat/test.met_temporal_downscale.Gaussian_ensemble.R
new file mode 100644
index 00000000000..f3a6376320f
--- /dev/null
+++ b/modules/data.atmosphere/tests/testthat/test.met_temporal_downscale.Gaussian_ensemble.R
@@ -0,0 +1,204 @@
+context("met_temporal_downscale.Gaussian_ensemble")
+
+setup_test_files <- function() {
+ # minimal test netCDF files for input and training data
+ input_met_file <- tempfile(pattern = "input_", fileext = ".2020.nc")
+ train_met_file <- tempfile(pattern = "train_", fileext = ".nc")
+ outfolder <- withr::local_tempdir()
+
+ list(
+ input_met = input_met_file,
+ train_met = train_met_file,
+ outfolder = outfolder
+ )
+}
+
+test_that("Gaussian ensemble function basic functionality", {
+ skip_if_not(require(ncdf4), "ncdf4 package not available")
+ skip_if_not(require(PEcAn.data.atmosphere), "PEcAn.data.atmosphere package not available")
+
+ # This test would require actual test data files
+ # For now, testing the structure and parameter validation
+
+ test_files <- setup_test_files()
+ # Test parameter validation
+ suppressWarnings(
+ expect_error(
+ met_temporal_downscale.Gaussian_ensemble(
+ in.path = "",
+ in.prefix = "",
+ outfolder = test_files$outfolder,
+ input_met = "nonexistent.nc",
+ train_met = "nonexistent.nc"
+ ),
+ "Error in nc_open trying to open file"
+ )
+ )
+})
+
+test_that("Ensemble generation produces correct number of outputs", {
+ skip("Requires test data files")
+
+ test_files <- setup_test_files()
+ n_ens <- 5
+
+ # Mock the function call (would need actual data)
+ # results <- met_temporal_downscale.Gaussian_ensemble(
+ # in.path = dirname(test_files$input_met),
+ # in.prefix = "",
+ # outfolder = test_files$outfolder,
+ # input_met = test_files$input_met,
+ # train_met = test_files$train_met,
+ # n_ens = n_ens
+ # )
+
+ # expect_equal(length(results), n_ens)
+ # expect_true(all(sapply(results, function(x) file.exists(x$file))))
+})
+
+test_that("Temperature downscaling maintains physical constraints", {
+ # Mock data representing daily temperature values
+ daily_temp <- c(298.15, 300.15, 295.15) # K
+ daily_temp_max <- c(305.15, 308.15, 302.15)
+ daily_temp_min <- c(290.15, 292.15, 288.15)
+
+ # Test that max >= mean >= min relationships are preserved
+ # This would test the logic:
+ # df$air_temperature_max <- pmax(df$air_temperature_max, df$air_temperature, na.rm = TRUE)
+ # df$air_temperature_min <- pmin(df$air_temperature_min, df$air_temperature, na.rm = TRUE)
+
+ temp_max <- pmax(daily_temp_max, daily_temp, na.rm = TRUE)
+ temp_min <- pmin(daily_temp_min, daily_temp, na.rm = TRUE)
+
+ expect_true(all(temp_max >= daily_temp))
+ expect_true(all(temp_min <= daily_temp))
+ expect_true(all(temp_max >= temp_min))
+})
+
+test_that("Precipitation downscaling preserves mass conservation", {
+ # Test the precipitation redistribution logic
+
+ # Mock daily precipitation values (kg m-2 s-1)
+ daily_precip <- c(0, 0.000005787, 0, 0.00001157) # kg m-2 s-1
+ div <- 4 # creates 6-hourly output from daily input (24h/4 = 6h intervals)
+
+ # Test that total precipitation is conserved when redistributed
+ # This tests the rand_vect_cont function logic
+
+ # Simple test of mass conservation principle
+ total_input <- sum(daily_precip)
+ redistributed <- numeric(length(daily_precip) * div)
+ for (i in seq_along(daily_precip)) {
+ start_idx <- (i-1) * div + 1
+ end_idx <- i * div
+ # uniform redistribution for testing
+ redistributed[start_idx:end_idx] <- daily_precip[i] / div
+ }
+
+ total_output <- sum(redistributed)
+ expect_equal(total_input, total_output, tolerance = 1e-10)
+})
+
+test_that("Shortwave radiation methods produce valid outputs", {
+ # Mock data
+ daily_sw <- c(200, 250, 180) # W m-2
+ lat <- 40.0
+ lon <- -88.0
+ year <- 2020
+
+ # Test sine method constraints
+ # SW radiation should be >= 0 and follow diurnal pattern
+ # This would test: swflux[swflux < 0] <- 0
+
+ sw_with_neg <- c(-10, 100, 200, -5, 150)
+ sw_valid <- pmax(sw_with_neg, 0)
+
+ expect_true(all(sw_valid >= 0))
+ expect_equal(sw_valid, c(0, 100, 200, 0, 150))
+})
+
+test_that("Soil moisture uncertainty calculations", {
+ # Mock sw data
+ soil_moisture <- c(0.2, 0.35, 0.45, 0.25, 0.15)
+ # Calculate cv
+ sm_cv <- stats::sd(soil_moisture, na.rm = TRUE) / mean(soil_moisture, na.rm = TRUE)
+ # Calculate field capacity (75th percentile)
+ sm_fc <- stats::quantile(soil_moisture, 0.75, na.rm = TRUE)
+ # Test moisture stress calculation for different values
+ test_sm <- c(0.1, 0.25, 0.35, 0.45)
+ moisture_stress <- abs(test_sm - sm_fc) / sm_fc
+ uncertainty_factor <- 1.0 + sm_cv * moisture_stress
+ uncertainty_with_precip <- uncertainty_factor * 1.2
+ uncertainty_bound <- pmax(0.7, pmin(uncertainty_with_precip, 1.8))
+
+ expect_true(all(uncertainty_factor >= 1.0))
+ expect_true(all(uncertainty_with_precip >= uncertainty_factor))
+ expect_true(all(uncertainty_bound >= 0.7))
+ expect_true(all(uncertainty_bound <= 1.8))
+})
+
+test_that("Relative humidity temperature adjustment", {
+ # Test RH adjustment for temperature changes
+
+ # Mock data
+ source_temp_k <- 293.15 # 20C
+ current_temp_k <- 298.15 # 25C
+ source_rh <- 70 # 70%
+
+ # Convert to Celsius for saturation vapor pressure calculation
+ source_temp_c <- source_temp_k - 273.15
+ current_temp_c <- current_temp_k - 273.15
+ expect_true(current_temp_c > -40 && current_temp_c < 50)
+ expect_true(source_temp_c > -40 && source_temp_c < 50)
+
+ # (Warmer air can hold more moisture, so RH should generally decrease)
+ # Magnus formula constants
+ es_source <- 0.61078 * exp((17.27 * source_temp_c) / (source_temp_c + 237.3))
+ es_current <- 0.61078 * exp((17.27 * current_temp_c) / (current_temp_c + 237.3))
+
+ saturation_ratio <- es_source / es_current
+ adjusted_rh <- source_rh * saturation_ratio
+ # For warming (current > source), adjusted RH should be lower
+ expect_true(adjusted_rh < source_rh)
+ expect_true(adjusted_rh > 0 && adjusted_rh <= 100)
+})
+
+test_that("PPFD calculations respect daylight constraints", {
+ # Test PPFD (photosynthetic photon flux density)
+ # Mock daylight conditions
+ is_daylight <- c(FALSE, FALSE, TRUE, TRUE, TRUE, FALSE, FALSE)
+ ppfd_values <- c(0, 0, 0.0005, 0.0008, 0.0006, 0, 0) # in mol m-2 s-1
+
+ # Test that nighttime PPFD is zero
+ nighttime_ppfd <- ppfd_values[!is_daylight]
+ expect_true(all(nighttime_ppfd == 0))
+ # Test PPFD bounds (0 to 0.0025 mol m-2 s-1 under full sunlight)
+ ppfd_test <- c(-0.00005, 0.0001, 0.0015, 0.0030, 0.0025)
+ ppfd_bound <- pmax(0, pmin(ppfd_test, 0.0025))
+
+ expect_equal(ppfd_bound, c(0, 0.0001, 0.0015, 0.0025, 0.0025))
+})
+
+
+test_that("Wind speed and other variables handle missing data correctly", {
+ source_data <- c(2.5, NA, 3.2, 1.8, NA)
+ expect_false(all(is.na(source_data)))
+ expect_true(all(is.na(c(NA, NA, NA))))
+
+ # Test that missing values are handled in sd calculations
+ sd_with_na <- sd(source_data, na.rm = TRUE)
+ expect_true(!is.na(sd_with_na))
+ expect_true(is.finite(sd_with_na))
+})
+
+test_that("Leap year handling works correctly", {
+ leap_year <- 2020
+ non_leap_year <- 2021
+ expect_true(lubridate::leap_year(leap_year))
+ expect_false(lubridate::leap_year(non_leap_year))
+ # Test day count logic
+ leap_days <- ifelse(lubridate::leap_year(leap_year), 366, 365)
+ non_leap_days <- ifelse(lubridate::leap_year(non_leap_year), 366, 365)
+ expect_equal(leap_days, 366)
+ expect_equal(non_leap_days, 365)
+})
\ No newline at end of file
diff --git a/modules/data.atmosphere/tests/testthat/test.metutils.R b/modules/data.atmosphere/tests/testthat/test.metutils.R
index 2e56e57e8db..5ac4606bdb4 100644
--- a/modules/data.atmosphere/tests/testthat/test.metutils.R
+++ b/modules/data.atmosphere/tests/testthat/test.metutils.R
@@ -31,4 +31,52 @@ test_that("get.rh RH from dewpoint",{
expect_equal(getrhtest(25, 10), 38.82, tolerance = 0.2)
expect_equal(getrhtest(0, -5), 69, tolerance = 0.2)
})
+
+test_that("different methods of sat_vapor_pressure work correctly", {
+ expect_equal(
+ sat_vapor_pressure(c(-10, 10), method = "Magnus"),
+ c(0.286, 1.228),
+ tolerance = 0.001
+ )
+ expect_equal(
+ sat_vapor_pressure(c(-10, 10), method = "ClausiusClapeyron"),
+ c(0.287, 1.233),
+ tolerance = 0.001
+ )
+ expect_equal(
+ sat_vapor_pressure(c(-10, 10), method = "GoffGratch"),
+ c(0.286, 1.227),
+ tolerance = 0.001
+ )
+})
+
+test_that("sat_vapor_pressure works with different units", {
+ expect_equal(
+ sat_vapor_pressure(283.15,
+ method = "GoffGratch",
+ temp_units = "K",
+ out_units = "mb"
+ ),
+ 12.27,
+ tolerance = 0.01
+ )
+ expect_equal(
+ sat_vapor_pressure(283.15,
+ method = "ClausiusClapeyron",
+ temp_units = "K",
+ out_units = "kPa"
+ ),
+ 1.227,
+ tolerance = 0.01
+ )
+ expect_equal(
+ sat_vapor_pressure(283.15,
+ method = "Magnus",
+ temp_units = "K",
+ out_units = "Pa"
+ ),
+ 1227,
+ tolerance = 1
+ )
+})
\ No newline at end of file
diff --git a/modules/data.atmosphere/vignettes/ameriflux_demo.Rmd b/modules/data.atmosphere/vignettes/ameriflux_demo.Rmd
index 344e1c27cfb..4d3d3b957ec 100644
--- a/modules/data.atmosphere/vignettes/ameriflux_demo.Rmd
+++ b/modules/data.atmosphere/vignettes/ameriflux_demo.Rmd
@@ -11,9 +11,10 @@ vignette: >
# Overview
-This is a demonstration of the PEcAn utilities for downloading met data, converting it to the PEcAn-CF format (which is based on the Climate Forecasting conventions and similar to MsTMIP). These variables are defined in the [PEcAn documentation](https://pecanproject.github.io/pecan-documentation/latest/met-data.html).
+This is a demonstration of the PEcAn utilities for downloading met data, converting it to the PEcAn-CF format (which is based on the Climate Forecasting conventions and similar to MsTMIP). These variables are described in the [PEcAn met data documentation](https://pecanproject.github.io/pecan-documentation/develop/input-standards.html#meteorology-standards).
+
+We’ll download 12 years of met data from the [Bondville Ameriflux site](https://ameriflux.lbl.gov/sites/siteinfo/US-Bo1), which has a `SITE_ID` of `US-Bo1`.
-In this example we will download 12 years of met data from the [Bondville Ameriflux site](http://ameriflux.ornl.gov/fullsiteinfo.php?sid=44). It has an Ameriflux `SITE_ID` of `US-Bo1`
The PEcAn.data.atmosphere source code is in [`modules/data.atmosphere`](https://github.com/PecanProject/pecan/tree/main/modules/data.atmosphere) and the documentation can be found with either `package?PEcAn.data.atmosphere` or in the [data.atmosphere package documentation](https://pecanproject.github.io/pecan//modules/data.atmosphere/inst/web/index.html).
@@ -21,8 +22,6 @@ The PEcAn.data.atmosphere source code is in [`modules/data.atmosphere`](https://
```{r}
library(knitr)
-library(ggplot2)
-# library(ggthemes)
library(PEcAn.data.atmosphere)
```
@@ -73,7 +72,8 @@ bondville.cfmet <- load.cfmet(bondville.nc, lat = 40.0061988830566, lon = -88.29
```
-```{r, eval=FALSE}
+```{r, error=TRUE, eval=FALSE}
+library("ggplot2", logical.return = TRUE) || stop("Skipping this chunk because ggplot2 not found")
theme_set(theme_tufte())
p1 <- ggplot() + geom_line(data = bondville.cfmet, aes(x = date, y = surface_downwelling_shortwave_flux_in_air)) + ylab(paste(bondville.nc$var$surface_downwelling_shortwave_flux_in_air$longname, bondville.nc$var$surface_downwelling_shortwave_flux_in_air$units))
diff --git a/modules/data.atmosphere/vignettes/compare_narr_cruncep_met.Rmd b/modules/data.atmosphere/vignettes/compare_narr_cruncep_met.Rmd
index efaeffb6f12..6e8d4385df5 100644
--- a/modules/data.atmosphere/vignettes/compare_narr_cruncep_met.Rmd
+++ b/modules/data.atmosphere/vignettes/compare_narr_cruncep_met.Rmd
@@ -43,7 +43,7 @@ TODO: clean up figure titles, labels, write explanations
```{r loading-libraries, eval=FALSE}
library(PEcAn.data.atmosphere)
# library(data.table)
-library(ggplot2)
+library(ggplot2, logical.return = TRUE) || stop("this vignette requires ggplot2")
theme_set(theme_bw())
data(narr_cruncep_ebifarm)
diff --git a/modules/data.land/.Rbuildignore b/modules/data.land/.Rbuildignore
index 6ea8afa18e8..edd7e377255 100644
--- a/modules/data.land/.Rbuildignore
+++ b/modules/data.land/.Rbuildignore
@@ -1,3 +1,4 @@
contrib
data-raw
^docs$
+.*venv/
diff --git a/modules/data.land/DESCRIPTION b/modules/data.land/DESCRIPTION
index 700be8b56a7..69155d6054c 100644
--- a/modules/data.land/DESCRIPTION
+++ b/modules/data.land/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.data.land
Type: Package
Title: PEcAn Functions Used for Ecological Forecasts and Reanalysis
-Version: 1.8.1
+Version: 1.9.0
Authors@R: c(person("Mike", "Dietze", role = c("aut", "cre"),
email = "dietze@bu.edu"),
person("David", "LeBauer", role = c("aut"),
@@ -21,7 +21,9 @@ Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
model parameterization, execution, and analysis. The goal of PECAn is to
streamline the interaction between data and models, and to improve the
efficacy of scientific investigation.
-Depends: R (>= 3.5.0)
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
+Depends: R (>= 4.1.0)
Imports:
coda,
curl,
@@ -40,7 +42,6 @@ Imports:
ncdf4 (>= 1.15),
neonUtilities,
neonstore,
- swfscMisc,
PEcAn.benchmark,
PEcAn.DB,
PEcAn.logger,
@@ -58,17 +59,21 @@ Imports:
tidyr,
tidyselect,
traits,
- XML (>= 3.98-1.4)
+ XML (>= 3.98-1.4)
Suggests:
dataone,
datapack,
+ jsonlite,
+ jsonvalidate,
getPass,
glue,
PEcAn.settings,
redland,
raster,
reticulate,
- testthat (>= 1.0.2)
+ testthat (>= 3.1.0),
+ withr,
+ MASS
Remotes:
github::ropensci/traits
License: BSD_3_clause + file LICENSE
@@ -76,3 +81,4 @@ Copyright: Authors
LazyData: true
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: soil-data, vegetation-data, land-cover
diff --git a/modules/data.land/NAMESPACE b/modules/data.land/NAMESPACE
index 9f1343e0d1c..84e1d16b14d 100644
--- a/modules/data.land/NAMESPACE
+++ b/modules/data.land/NAMESPACE
@@ -10,6 +10,7 @@ export(Read.IC.info.BADM)
export(Read_Tucson)
export(Soilgrids_SoilC_prep)
export(buildJAGSdata_InventoryRings)
+export(clip_and_save_raster_file)
export(cohort2pool)
export(dataone_download)
export(download.SM_CDS)
@@ -27,6 +28,7 @@ export(format_identifier)
export(from.Tag)
export(from.TreeCode)
export(gSSURGO.Query)
+export(generate_soilgrids_ensemble)
export(get.attributes)
export(get.soil)
export(get_resource_map)
@@ -34,31 +36,37 @@ export(get_veg_module)
export(ic_process)
export(id_resolveable)
export(load_veg)
+export(look_up_fertilizer_components)
export(matchInventoryRings)
export(match_pft)
export(match_species_id)
export(mpot2smoist)
export(netcdf.writer.BADM)
+export(om2soc)
export(parse.MatrixNames)
export(partition_roots)
export(plot2AGB)
export(pool_ic_list2netcdf)
export(pool_ic_netcdf2list)
export(prepare_pools)
+export(preprocess_soilgrids_data)
export(put_veg_module)
export(sample_ic)
export(sclass)
export(shp2kml)
+export(soc2ocs)
export(soil.units)
export(soil2netcdf)
export(soil_params)
export(soil_params_ensemble_soilgrids)
export(soil_process)
+export(soilgrids_ic_process)
export(soilgrids_soilC_extract)
export(soilgrids_texture_extraction)
export(subset_layer)
export(to.Tag)
export(to.TreeCode)
+export(validate_events_json)
export(write_ic)
export(write_veg)
importFrom(dplyr,"%>%")
diff --git a/modules/data.land/NEWS.md b/modules/data.land/NEWS.md
index 629f275f22b..b3d6c68de44 100644
--- a/modules/data.land/NEWS.md
+++ b/modules/data.land/NEWS.md
@@ -1,3 +1,29 @@
+# PEcAn.data.land 1.9.0
+
+## Added
+
+* New function `soilgrids_ic_process()`, with helpers `preprocess_soilgrids_data()` and `generate_soilgrids_ensemble()`, generates soil carbon initial conditions from SoilGrids 250m data (#3508).
+* New function `clip_and_save_raster_file()` subsets rasters to match a polygon of interest (#3537).
+* New function `look_up_fertilizer_component()` contains typical carbon and nitrogen composition of common fertilizer types (#3559).
+* New PEcAn standard for `events.json` files. These contain information about management events (planting, harvest, irrigation, etc). The standard is defined in `inst/events_schema_v0.1.0.json` and event files can be validated against the schema with new function `validate_events()` (#3623, #3521).
+
+## Changed
+
+* `Read.IC.info.BADM` now processes both single-site and multi-site settings, and uses more carbon pools (`ROOT_BIOMASS`, `AG_BIOMASS`, `SOIL_STOCK`, `LIT_BIOMASS`) if they are present (#3536).
+* Package `swfscMisc` is no longer imported; it was formerly used in `extract_NEON_veg()` to compute distances and has been replaced by use of `terra::distance()` (#3552).
+* `extract_soil_gssurgo()` now supports spatial grid sampling using new arguments `grid_size` and `grid_spacing`. Previously available argument `radius` has been removed (#3534).
+* `extract_soil_gssurgo()` now reports an estimate of soil organic carbon stocks (#3534).
+
+## Removed
+
+* Removed unused parameter `machine` from `put_veg_module()` (#3575).
+
+## Fixed
+
+* Fixed an invalid external pointer error in `soilgrids_soilC_extract()` (#3506).
+
+
+
# PEcAn.data.land 1.8.1
* Dependency `datapack` is now optional. It is only used by `dataone_download()` (#3373).
@@ -22,6 +48,7 @@
* Removed dependency on `PEcAn.data.atmosphere`, notably by retrieving site latitude and longitude directly from `PEcAn.DB::query.site` instead of custom lookups (#3300, Abhinav Pandey).
+
# PEcAn.data.land 1.7.1
* All changes in 1.7.1 and earlier were recorded in a single file for all of the PEcAn packages; please see
diff --git a/modules/data.land/R/IC_BADM_Utilities.R b/modules/data.land/R/IC_BADM_Utilities.R
index 7c9ec829d1d..0eb82ed8f16 100644
--- a/modules/data.land/R/IC_BADM_Utilities.R
+++ b/modules/data.land/R/IC_BADM_Utilities.R
@@ -31,7 +31,7 @@ Read.IC.info.BADM <-function(lat, long){
biomass.df <- U.S.SB %>%
dplyr::filter(
.data$NA_L2CODE == Code_Level,
- grepl("ROOT_|AG_BIOMASS|SOIL_STOCK|SOIL_CHEM", .data$VARIABLE)
+ grepl("ROOT_|AG_BIOMASS|SOIL_STOCK|LIT_BIOMASS", .data$VARIABLE)
) %>%
dplyr::select("SITE_ID", "GROUP_ID", "VARIABLE_GROUP", "VARIABLE", "DATAVALUE")
@@ -43,7 +43,7 @@ Read.IC.info.BADM <-function(lat, long){
biomass.df <- U.S.SB %>%
dplyr::filter(
.data$NA_L1CODE == Code_Level,
- grepl("ROOT_|AG_BIOMASS|SOIL_STOCK|SOIL_CHEM", .data$VARIABLE)
+ grepl("ROOT_|AG_BIOMASS|SOIL_STOCK|LIT_BIOMASS", .data$VARIABLE)
) %>%
dplyr::select("SITE_ID", "GROUP_ID", "VARIABLE_GROUP", "VARIABLE", "DATAVALUE")
}
@@ -53,7 +53,7 @@ Read.IC.info.BADM <-function(lat, long){
if (nrow(biomass.df) < 3) {
Code_Level <- "ALL"
biomass.df <- U.S.SB %>%
- dplyr::filter(grepl("ROOT_|AG_BIOMASS|SOIL_STOCK|SOIL_CHEM", .data$VARIABLE)) %>%
+ dplyr::filter(grepl("ROOT_|AG_BIOMASS|SOIL_STOCK|LIT_BIOMASS", .data$VARIABLE)) %>%
dplyr::select("SITE_ID", "GROUP_ID", "VARIABLE_GROUP", "VARIABLE", "DATAVALUE")
}
@@ -69,18 +69,16 @@ Read.IC.info.BADM <-function(lat, long){
SoilIni <- NA
litterIni <- NA
Rootini <- NA
- litterIni <- NA
Date.in <- NA
Organ.in <- NA
# find what type of entry it is - biomass/soil or litter
if (nrow(Gdf) > 0) {
type <-
sapply(c(
- "*LIT",
"*SOIL",
- "*_BIOMASS",
+ "*_LIT_BIOMASS",
"*_ROOT_BIOMASS",
- "*_LIT_BIOMASS"
+ "*_BIOMASS"
),
grepl,
Gdf[1, 3])
@@ -100,9 +98,8 @@ Read.IC.info.BADM <-function(lat, long){
#Converting DM to C content
#Variations and determinants of carbon content in plants:a global synthesis - https://www.biogeosciences.net/15/693/2018/bg-15-693-2018.pdf
- if (length(unit.in) > 0)
- if (unit.in =="kgDM m-2") cov.factor <- cov.factor *0.48
-
+ if (length(unit.in) > 0 && unit.in == "kgDM m-2") cov.factor <- cov.factor *0.48
+
unit.ready <- ifelse(unit.in == "gC m-2",
"g/m^2",
ifelse(unit.in == "kgDM m-2", "kg/m^2",
@@ -132,7 +129,7 @@ Read.IC.info.BADM <-function(lat, long){
as.numeric()*cov.factor, unit.ready, "kg/m^2")#"AG_BIOMASS_CROP","AG_BIOMASS_SHRUB","AG_BIOMASS_TREE","AG_BIOMASS_OTHER"
} else if (type == "*SOIL") {
- val <- Gdf %>%
+ val <- Gdf %>%
dplyr::filter(grepl("SOIL_STOCK_C_ORG", .data$VARIABLE)) %>%
dplyr::pull(.data$DATAVALUE) %>%
as.numeric()
@@ -142,8 +139,7 @@ Read.IC.info.BADM <-function(lat, long){
} else if (type == "*_LIT_BIOMASS") {
litterIni <-
- PEcAn.utils::ud_convert(Gdf$DATAVALUE[1] %>%
- as.numeric()*cov.factor, unit.ready, "kg/m^2")
+ PEcAn.utils::ud_convert(suppressWarnings(as.numeric(Gdf$DATAVALUE[1]))*cov.factor, unit.ready, "kg/m^2")
} else if (type == "*_ROOT_BIOMASS") {
Rootini <-
@@ -156,21 +152,19 @@ Read.IC.info.BADM <-function(lat, long){
Site = Gdf$SITE_ID %>% unique(),
Var = Gdf$VARIABLE[1],
Date = Date.in,
- # Organ = Organ.in,
+ Organ = Organ.in,
AGB = PlantWoodIni,
soil_organic_carbon_content = SoilIni,
- litter_carbon_content = litterIni
+ litter_carbon_content = litterIni,
+ root_carbon_content = Rootini
)
)
})
-
#cleaning
-ind <- apply(entries[,5:7], 1, function(x) all(is.na(x)))
+ind <- apply(entries[,5:8], 1, function(x) all(is.na(x)))
entries <- entries[-which(ind),]
-
-
return(entries)
}
@@ -219,8 +213,8 @@ netcdf.writer.BADM <- function(lat, long, siteid, outdir, ens){
input$dims <- dims
input$vals <- variables
-
- return(pool_ic_list2netcdf(
+ if(!dir.exists(outdir)) dir.create(outdir, recursive = TRUE)
+ return(PEcAn.data.land::pool_ic_list2netcdf(
input = input,
outdir = outdir,
siteid = siteid,
@@ -240,6 +234,12 @@ netcdf.writer.BADM <- function(lat, long, siteid, outdir, ens){
#' @export
#'
BADM_IC_process <- function(settings, dir, overwrite=TRUE){
+
+ # check if this is a single-site or multi-site configuration
+ if ("run" %in% names(settings)) {
+ settings <- list(settings)
+ }
+
# create site info.
new.site <-
settings %>%
@@ -253,16 +253,33 @@ BADM_IC_process <- function(settings, dir, overwrite=TRUE){
})%>%
dplyr::bind_rows() %>%
as.list()
-
- out.ense <- seq_len(settings$ensemble$size) %>%
- purrr::map(~ netcdf.writer.BADM(new.site$lat,
- new.site$lon,
- new.site$id,
- outdir=dir,
- ens=.x))
- out.ense <- out.ense %>%
- stats::setNames(rep("path", length(out.ense)))
+ # process each site configuration
+ out.ense <- list()
+
+ for (i in seq_along(settings)) {
+ site.settings <- settings[[i]]
+ ens.size <- ens.size <- max(1, site.settings$ensemble$size %||% 1)
+
+ # get site info for this specific site
+ site.info <- list(
+ id = new.site$id[i],
+ lat = new.site$lat[i],
+ lon = new.site$lon[i]
+ )
+
+ site.outputs <- seq_len(ens.size) %>%
+ purrr::map(~ netcdf.writer.BADM(site.info$lat,
+ site.info$lon,
+ site.info$id,
+ outdir=dir,
+ ens=.x))
+
+ site.outputs <- site.outputs %>%
+ stats::setNames(rep("path", length(site.outputs)))
+
+ out.ense <- c(out.ense, site.outputs)
+ }
return(out.ense)
}
diff --git a/modules/data.land/R/IC_SOILGRID_Utilities.R b/modules/data.land/R/IC_SOILGRID_Utilities.R
new file mode 100644
index 00000000000..ce8f7a9218b
--- /dev/null
+++ b/modules/data.land/R/IC_SOILGRID_Utilities.R
@@ -0,0 +1,355 @@
+#' SoilGrids Initial Conditions (IC) Utilities
+#' @description Functions for generating soil carbon IC files from SoilGrids250m data
+#' @details This module provides functions for extracting, processing, and generating
+#' ensemble members for soil carbon initial conditions using SoilGrids data.
+#' All soil carbon values are in kg/m2.
+#'
+#' Process SoilGrids data for initial conditions
+#'
+#' @param settings PEcAn settings list containing site information
+#' @param dir Output directory for IC files
+#' @param depth Numeric vector of depth values in meters. Can be single value
+#' or multiple values c(0.3, 2.0). Default: c(0.3, 2.0)
+#' @param overwrite Overwrite existing files? (Default: FALSE)
+#' @param verbose Print detailed progress information? (Default: FALSE)
+#'
+#' @return List of paths to generated IC files, organized by site ID
+#' @export
+#'
+#' @examples
+#' \dontrun{
+#' # Process both depths (default)
+#' settings <- PEcAn.settings::read.settings("pecan.xml")
+#' output_dir <- withr::local_tempdir()
+#' ic_files <- soilgrids_ic_process(settings, dir = output_dir)
+#'
+#' # Process only 30cm depth
+#' ic_files <- soilgrids_ic_process(settings, dir = output_dir, depth = 0.3)
+#' }
+#'
+#' @author Akash
+#'
+soilgrids_ic_process <- function(settings, dir, depth = c(0.3, 2.0), overwrite = FALSE, verbose = FALSE) {
+ start_time <- proc.time()
+
+ valid_depths <- c(0.3, 2.0)
+ if (!all(depth %in% valid_depths)) {
+ PEcAn.logger::logger.severe(sprintf("Invalid depth values. Must be from: %s",
+ paste(valid_depths, collapse = ", ")))
+ }
+ depth <- sort(unique(depth))
+ depth_layers <- sapply(depth, function(d) if (d == 0.3) "0-30cm" else "0-200cm")
+
+ if (verbose) {
+ PEcAn.logger::logger.info(sprintf("Processing soil carbon data for depths: %s",
+ paste(paste0(depth, "m (", depth_layers, ")"), collapse = ", ")))
+ }
+
+ site_info <- settings$run$site
+ if (is.list(site_info) && !is.null(site_info$id)) {
+ site_info <- list(site_info)
+ }
+ site_info <- site_info |>
+ purrr::map(function(site) {
+ site$lat <- as.numeric(site$lat)
+ site$lon <- as.numeric(site$lon)
+ data.frame(
+ site_id = site$id,
+ lat = site$lat,
+ lon = site$lon,
+ site_name = site$name,
+ str_id = as.character(site$id),
+ stringsAsFactors = FALSE
+ )
+ }) |>
+ dplyr::bind_rows()
+ n_sites <- nrow(site_info)
+ if (n_sites == 0) {
+ PEcAn.logger::logger.severe("No sites found in the provided input")
+ }
+
+ size <- ifelse(is.null(settings$ensemble$size), 1, settings$ensemble$size)
+
+ if (!dir.exists(dir)) {
+ dir.create(dir, recursive = TRUE)
+ }
+
+ data_dir <- file.path(dir, "SoilGrids_data")
+ if (!dir.exists(data_dir)) {
+ dir.create(data_dir, recursive = TRUE)
+ }
+
+ # Check for cached data
+ soilc_csv_path <- file.path(data_dir, "soilgrids_soilC_data.csv")
+ if (file.exists(soilc_csv_path) && !overwrite) {
+ soil_data <- utils::read.csv(soilc_csv_path, check.names = FALSE)
+ } else {
+ soil_data <- PEcAn.data.land::soilgrids_soilC_extract(
+ site_info = site_info,
+ outdir = data_dir,
+ verbose = verbose
+ )
+ # Save the extracted data for future use
+ utils::write.csv(soil_data, soilc_csv_path, row.names = FALSE)
+ }
+
+ # Validate soil carbon data units through range check for selected depths
+ for (i in seq_along(depth_layers)) {
+ depth_col <- paste0("Total_soilC_", depth_layers[i])
+ if (any(soil_data[[depth_col]] > 150, na.rm = TRUE)) {
+ PEcAn.logger::logger.warn(sprintf("Some soil carbon values exceed 150 kg/m2 for %s, values may be in wrong units",
+ depth_layers[i]))
+ }
+ }
+
+ processed_data <- preprocess_soilgrids_data(soil_data, depth_layers, verbose)
+
+ if (nrow(processed_data$data) == 0) {
+ PEcAn.logger::logger.severe("No valid sites remain after preprocessing")
+ }
+
+ ens_files <- list()
+
+ for (s in 1:nrow(processed_data$data)) {
+ site_data <- processed_data$data[s, ]
+
+ site_idx <- which(site_info$site_id == site_data$Site_ID)
+ if (length(site_idx) == 0) {
+ PEcAn.logger::logger.warn(sprintf("Site %s not found in site_info", site_data$Site_ID))
+ next
+ }
+ current_site <- site_info[site_idx, ]
+
+ # Create output directory for this site
+ site_folder <- file.path(dir, paste0("SoilGrids_site_", current_site$str_id))
+ if (!dir.exists(site_folder)) {
+ dir.create(site_folder, recursive = TRUE)
+ }
+
+ # Check for existing files
+ existing_files <- list.files(site_folder, "*.nc$", full.names = TRUE)
+ if (length(existing_files) > 0 && !overwrite) {
+ ens_files[[current_site$str_id]] <- existing_files
+ next
+ }
+
+ # Generate ensemble members for each requested depth
+ ens_data <- list()
+ for (i in seq_along(depth_layers)) {
+ ens_data[[i]] <- generate_soilgrids_ensemble(
+ processed_data = processed_data,
+ site_id = current_site$site_id,
+ size = size,
+ depth_layer = depth_layers[i],
+ verbose = verbose
+ )
+ }
+
+ site_files <- list()
+
+ # Write each ensemble member to NetCDF files
+ for (ens in seq_len(size)) {
+ soil_c_values <- numeric(length(depth_layers))
+ for (i in seq_along(depth_layers)) {
+ soil_c_values[i] <- ens_data[[i]][ens]
+ }
+
+ ens_input <- list(
+ dims = list(
+ lat = current_site$lat,
+ lon = current_site$lon,
+ time = 1,
+ depth = depth
+ ),
+ vals = list(
+ soil_organic_carbon_content = soil_c_values
+ )
+ )
+ result <- PEcAn.data.land::pool_ic_list2netcdf(
+ input = ens_input,
+ outdir = site_folder,
+ siteid = current_site$site_id,
+ ens = ens
+ )
+
+ site_files[[ens]] <- result$file
+ }
+
+ ens_files[[current_site$str_id]] <- site_files
+ }
+
+ if (verbose) {
+ end_time <- proc.time()
+ elapsed_time <- end_time - start_time
+ PEcAn.logger::logger.info(sprintf("IC generation completed for %d site(s) in %.2f seconds",
+ n_sites, elapsed_time[3]))
+ }
+
+ return(ens_files)
+}
+
+#' Preprocess SoilGrids data for ensemble generation
+#'
+#' @param soil_data Dataframe with SoilGrids soil carbon data
+#' @param depth_layers Character vector of depth layers to process (e.g., c("0-30cm", "0-200cm"))
+#' @param verbose Logical, print detailed progress information
+#'
+#' @return List containing processed data and CV distributions for requested depths
+#' @export
+preprocess_soilgrids_data <- function(soil_data, depth_layers, verbose = FALSE) {
+ if (!requireNamespace("MASS", quietly = TRUE)) {
+ PEcAn.logger::logger.severe("MASS package required for SoilGrids ensemble generation")
+ }
+ if (verbose) {
+ PEcAn.logger::logger.info(sprintf("Preprocessing soil carbon data for depths: %s",
+ paste(depth_layers, collapse = ", ")))
+ }
+
+ mean_cols <- paste0("Total_soilC_", depth_layers)
+ std_cols <- paste0("Std_soilC_", depth_layers)
+
+ complete_sites <- rep(TRUE, nrow(soil_data))
+ for (col in mean_cols) {
+ complete_sites <- complete_sites & !is.na(soil_data[[col]]) & soil_data[[col]] > 0
+ }
+
+ if (!any(complete_sites)) {
+ PEcAn.logger::logger.severe(sprintf("No sites with complete data for all requested depth intervals: %s",
+ paste(depth_layers, collapse = ", ")))
+ }
+
+ processed <- soil_data[complete_sites, ]
+
+ if (verbose) {
+ removed_count <- nrow(soil_data) - nrow(processed)
+ PEcAn.logger::logger.info(sprintf("Removed %d site(s) with incomplete data. Processing %d sites for depths: %s",
+ removed_count, nrow(processed), paste(depth_layers, collapse = ", ")))
+ }
+
+ # Calculate CV distributions for each requested depth
+ cv_distributions <- list()
+ for (i in seq_along(depth_layers)) {
+ mean_col <- mean_cols[i]
+ std_col <- std_cols[i]
+
+ valid_cv <- processed[[mean_col]] > 0 &
+ !is.na(processed[[std_col]]) &
+ processed[[std_col]] > 0
+
+ if (sum(valid_cv) < 5) {
+ cv_distributions[[depth_layers[i]]] <- list(type = "none")
+ } else {
+ cv_values <- processed[[std_col]][valid_cv] / processed[[mean_col]][valid_cv]
+ cv_valid <- cv_values[cv_values > 0 & is.finite(cv_values)]
+
+ if (length(cv_valid) < 5) {
+ cv_distributions[[depth_layers[i]]] <- list(type = "none")
+ } else {
+ gamma_fit <- try(MASS::fitdistr(cv_valid, "gamma"), silent = TRUE)
+ if (!inherits(gamma_fit, "try-error")) {
+ cv_distributions[[depth_layers[i]]] <- list(
+ type = "gamma",
+ shape = gamma_fit$estimate["shape"],
+ rate = gamma_fit$estimate["rate"]
+ )
+ } else {
+ cv_distributions[[depth_layers[i]]] <- list(
+ type = "empirical",
+ values = cv_valid
+ )
+ }
+ }
+ }
+ }
+
+ return(list(
+ data = processed,
+ cv_distributions = cv_distributions,
+ depth_layers = depth_layers
+ ))
+}
+
+#' Generate soil carbon ensemble members for specific depth
+#'
+#' @description Generates ensemble members for soil carbon at specified depth layer.
+#' Uses site-specific uncertainty when available; otherwise integrates over coefficient of
+#' variation distributions fit to population data. Samples are drawn from gamma distributions
+#' to ensure positive, right-skewed values appropriate for soil carbon estimates.
+#'
+#' @param processed_data Output from preprocess_soilgrids_data()
+#' @param site_id Target site ID
+#' @param size Number of ensemble members to generate
+#' @param depth_layer Depth layer ("0-30cm" or "0-200cm")
+#' @param verbose Logical, print detailed progress information
+#'
+#' @return Numeric vector of soil carbon values including uncertainty, length equal to size.
+#' @export
+generate_soilgrids_ensemble <- function(processed_data, site_id, size, depth_layer, verbose = FALSE) {
+ if (verbose) {
+ PEcAn.logger::logger.info(sprintf("Generating %d ensemble members for site %s (%s)",size, site_id, depth_layer))
+ }
+
+ site_row <- which(processed_data$data$Site_ID == site_id)
+ if (length(site_row) == 0) {
+ PEcAn.logger::logger.severe(sprintf("Site %s not found in processed data", site_id))
+ }
+ mean_col <- paste0("Total_soilC_", depth_layer)
+ std_col <- paste0("Std_soilC_", depth_layer)
+
+ mean_c <- processed_data$data[[mean_col]][site_row]
+ original_sd <- processed_data$data[[std_col]][site_row]
+ cv_dist <- processed_data$cv_distributions[[depth_layer]]
+
+ if (is.na(mean_c) || mean_c <= 0) {
+ PEcAn.logger::logger.severe(sprintf("Invalid mean soil carbon value for site %s (%s)",
+ site_id, depth_layer))
+ }
+
+ # Use site-specific uncertainty
+ if (!is.na(original_sd) && original_sd > 0) {
+ shape <- (mean_c^2) / (original_sd^2)
+ rate <- mean_c / (original_sd^2)
+ if (is.finite(shape) && is.finite(rate) && shape > 0 && rate > 0) {
+ soil_c_values <- pmax(stats::rgamma(size, shape, rate), 0)
+ } else {
+ PEcAn.logger::logger.severe("Cannot generate an ensemble, invalid gamma params")
+ }
+ } else if (cv_dist$type != "none") {
+ # Integrate over uncertainty using CV distribution
+ if (cv_dist$type == "gamma") {
+ cv_samples <- stats::rgamma(size, cv_dist$shape, cv_dist$rate)
+ } else {
+ cv_samples <- sample(cv_dist$values, size, replace = TRUE)
+ }
+
+ sd_values <- mean_c * cv_samples
+ valid <- !is.na(sd_values) & sd_values > 0
+
+ if (any(valid)) {
+ soil_c_values <- numeric(size) # pre-allocate since we're doing partial assignment
+ shape_vec <- (mean_c^2) / (sd_values[valid]^2)
+ rate_vec <- mean_c / (sd_values[valid]^2)
+
+ if (any(!is.finite(shape_vec)) || any(!is.finite(rate_vec)) || any(shape_vec <= 0) || any(rate_vec <= 0)) {
+ PEcAn.logger::logger.severe("Cannot generate an ensemble, invalid gamma params")
+ }
+
+ soil_c_values[valid] <- pmax(stats::rgamma(sum(valid), shape_vec, rate_vec), 0)
+ soil_c_values[!valid] <- NA
+ } else {
+ PEcAn.logger::logger.severe(sprintf("No valid sd_values to generate ensemble for site %s (%s)",
+ site_id, depth_layer))
+ }
+ } else {
+ PEcAn.logger::logger.severe(sprintf("No uncertainty information available for ensemble generation at site %s (%s)",
+ site_id, depth_layer))
+ }
+
+ if (verbose) {
+ PEcAn.logger::logger.debug(sprintf("Generated ensemble for site %s (%s): mean=%.2f, sd=%.2f",
+ site_id, depth_layer, mean(soil_c_values), stats::sd(soil_c_values)
+ ))
+ }
+
+ return(soil_c_values)
+}
\ No newline at end of file
diff --git a/modules/data.land/R/clip_and_save_raster_file.R b/modules/data.land/R/clip_and_save_raster_file.R
new file mode 100644
index 00000000000..9a9f4c6542c
--- /dev/null
+++ b/modules/data.land/R/clip_and_save_raster_file.R
@@ -0,0 +1,58 @@
+#' Clip and Save a Raster File
+#'
+#' Clips a raster to a polygon bounding box, optionally masks to polygon, and saves the
+#' output in the same format as the input.
+#'
+#' @param input_path Character. Path to the input raster file.
+#' @param polygon An object or file coercible to a `SpatVector` by `terra::vect()`
+#' (e.g., an `sf` object, a `SpatVector`, or a file path to a vector dataset).
+#' used for clipping and masking. Must have a valid CRS.
+#' @param out_path Character. Path to save the processed raster.
+#' @param mask Logical: Should pixels outside the polygon but inside its bounding box
+#' be masked out (TRUE) or included (FALSE)?
+#' @param overwrite Logical: Replace output file if it already exists?
+#' @return Invisibly, the clipped `SpatRaster` object. The raster is also saved to `out_path`.
+#' @export
+#' @author David LeBauer
+clip_and_save_raster_file <- function(input_path, polygon, out_path, mask = TRUE, overwrite = TRUE) {
+
+ # Check that input and output files have same extension
+ # This function is not designed to convert between raster formats
+ if (tools::file_ext(input_path) != tools::file_ext(out_path)) {
+ PEcAn.logger::logger.error("Input and output files must have the same extension.")
+ }
+
+ rast_in <- terra::rast(input_path)
+
+ # Coerce to SpatVector if not already
+ if (inherits(polygon, "SpatVector")) { # NB passing a SpatVector to terra::vect() fails
+ poly_sv <- polygon
+ } else {
+ poly_sv <- terra::vect(polygon)
+ }
+
+ if (terra::crs(poly_sv) == "") {
+ PEcAn.logger::logger.error("Input polygon must have CRS defined.")
+ }
+
+ # Reproject polygon to raster CRS if different
+ if (!terra::same.crs(poly_sv, rast_in)) {
+ poly_sv <- terra::project(poly_sv, terra::crs(rast_in))
+ }
+
+ rast_crop <- terra::crop(rast_in, poly_sv)
+
+ if (mask) {
+ rast_to_write <- terra::mask(rast_crop, poly_sv)
+ } else {
+ rast_to_write <- rast_crop
+ }
+
+ terra::writeRaster(
+ rast_to_write,
+ filename = out_path,
+ overwrite = overwrite
+ )
+
+ invisible(rast_to_write)
+}
diff --git a/modules/data.land/R/data.R b/modules/data.land/R/data.R
index 78fc30551e4..7867aeb8bed 100644
--- a/modules/data.land/R/data.R
+++ b/modules/data.land/R/data.R
@@ -78,3 +78,35 @@
#' contains an identical texture.csv, also with no obvious source label.
#' See also comments in soil_utils.R
"soil_class"
+
+#' Fertilizer Nutrient Composition Table
+#'
+#' A dataset of fertilizer and organic matter addition types
+#' and their nitrogen and carbon composition, based on the SWAT model's
+#' `fertilizer.frt` table and DayCent model defaults for organic matter
+#' C:N ratio parameters.
+#'
+#' @format A tibble with one row per fertilizer type and the following columns:
+#' \describe{
+#' \item{name}{\code{character}. Short identifier from SWAT (e.g., \code{"urea"}, \code{"manure"}).}
+#' \item{description}{\code{character}. Longer description of the fertilizer or manure type.}
+#' \item{fraction_mineral_n}{\code{numeric}. Fraction of total nitrogen in mineral form.}
+#' \item{fraction_nh3_n}{\code{numeric}. Fraction of fertilizer by mass that is ammonium-n (NH\eqn{_3}-N).}
+#' \item{fraction_no3_n}{\code{numeric}. Fraction of fertilizer by mass that is nitrate-N (NO\eqn{_3}-N).
+#' Computed as \code{fraction_mineral_n - fraction_nh3_n}.}
+#' \item{fraction_organic_n}{\code{numeric}. Fraction of organic matter that is nitrogen.}
+#' \item{fraction_c}{\code{numeric}. Fraction of mass that is carbon.}
+#' \item{cn_ratio}{\code{numeric}. Carbon-to-nitrogen ratio for organic matter.
+#' Assigned based on DayCent organic matter parameterterizations.}
+#' }
+#'
+#' @details
+#' This table is based on SWAT model's \code{fertilizer.frt} file, and uses
+#' C:N ratios (\code{cn_ratio}) from DayCent model default parameter files.
+#' \code{fraction_nh3_n} and \code{fraction_no3_n} represent the fraction of
+#' fertilizer by mass that is ammonium-N and nitrate-N, respectively. This is different from
+#' the SWAT model's definition of \code{fraction_nh3_n} as a fraction of the total mineral N.
+#'
+#' @source https://github.com/swat-model/swatplus
+#' @source DayCent model default parameter file: `omad.100` obtained from the Soil Carbon Solutions Center, https://www.soilcarbonsolutionscenter.com
+"fertilizer_composition_data"
\ No newline at end of file
diff --git a/modules/data.land/R/extract_NEON_veg.R b/modules/data.land/R/extract_NEON_veg.R
index c66f1b6ddfd..3c1d9100754 100644
--- a/modules/data.land/R/extract_NEON_veg.R
+++ b/modules/data.land/R/extract_NEON_veg.R
@@ -50,7 +50,9 @@ extract_NEON_veg <- function(lon, lat, start_date, end_date, store_dir, neonsite
neonsites <- neonstore::neon_sites(api = "https://data.neonscience.org/api/v0", .token = Sys.getenv("NEON_TOKEN"))
}
neonsites <- dplyr::select(neonsites, "siteCode", "siteLatitude", "siteLongitude") #select for relevant columns
- betyneondist <- swfscMisc::distance(lat1 = lat, lon1 = lon, lat2 = neonsites$siteLatitude, lon2 = neonsites$siteLongitude)
+ pt1 <- terra::vect(matrix(c(lon1 = lon, lat1 = lat) , ncol = 2), type = "points", crs = "EPSG:4326")
+ pt2 <- terra::vect(matrix(c(lon2 = neonsites$siteLongitude, lat2 = neonsites$siteLatitude) , ncol = 2), type = "points", crs = "EPSG:4326")
+ betyneondist <- terra::distance(pt1, pt2)
mindist <- min(betyneondist)
distloc <- match(mindist, betyneondist)
lat <- neonsites$siteLatitude[distloc]
diff --git a/modules/data.land/R/extract_soil_nc.R b/modules/data.land/R/extract_soil_nc.R
index da3a836b3fb..a44c001cf23 100644
--- a/modules/data.land/R/extract_soil_nc.R
+++ b/modules/data.land/R/extract_soil_nc.R
@@ -1,16 +1,25 @@
#' Extract soil data from gssurgo
+#' @details This function takes a single lat/lon point and creates a spatial grid
+#' around it for sampling soil variability. The grid_size parameter determines
+#' how many grid points (grid_size x grid_size) are created around the center point.
#'
#' @param outdir Output directory for writing down the netcdf file
-#' @param lat Latitude
-#' @param lon Longitude
+#' @param lat Latitude of center point (single numeric value)
+#' @param lon Longitude of center point (single numeric value)
#' @param size Ensemble size
-#' @param radius radius in meters is used to take soil type samples around the site
-#' @param depths Standard set of soil depths in m to create the ensemble of soil profiles with.
+#' @param grid_size Size of the spatial sampling grid around the center point (default: 3)
+#' @param grid_spacing Spacing between grid cells in meters (default: 100)
+#' @param depths Standard set of soil depths in m to create the ensemble of soil profiles with.
#'
#' @return It returns the address for the generated soil netcdf file
#'
-#' @importFrom rlang .data
+#' @section Current Limitations:
+#' - MUKEY frequency weighting treats occurrence counts as proportional to area coverage
+#' - This approximation may introduce geometric bias for irregular polygon data
+#' - Buffer radius is set to grid_spacing/2 to reduce overlapping queries, but may still miss coverage
+#' - True area-weighted aggregation using polygon geometries is planned (see issue #3609)
#'
+#' @importFrom rlang .data
#' @examples
#' \dontrun{
#' outdir <- "~/paleon/envTest"
@@ -18,77 +27,198 @@
#' lon <- -80
#' PEcAn.data.land::extract_soil_gssurgo(outdir, lat, lon)
#' }
-#' @author Hamze Dokoohaki
+#' @author Hamze Dokoohaki, Akash
#' @export
-#'
-extract_soil_gssurgo<-function(outdir, lat, lon, size=1, radius=500, depths=c(0.15,0.30,0.60)){
+#'
+extract_soil_gssurgo <- function(outdir, lat, lon, size=1, grid_size=3, grid_spacing=100, depths=c(0.15,0.30,0.60)){
# I keep all the ensembles here
all.soil.ens <-list()
+
+ # Grid-based spatial sampling around the center point (via WFS queries)
+ # This creates a grid_size x grid_size sampling grid centered on lat/lon
+ proj_crs <- sf::st_crs("+proj=aea +lat_1=29.5 +lat_2=45.5 +lat_0=23 +lon_0=-96 +x_0=0 +y_0=0 +ellps=GRS80 +datum=NAD83 +units=m +no_defs")
+ wgs84_crs <- sf::st_crs(4326)
+
+ # Convert single center lat/lon to projected coordinates
+ point_sf <- sf::st_sfc(sf::st_point(c(lon, lat)), crs = wgs84_crs)
+ point_proj <- sf::st_transform(point_sf, proj_crs)
+ coords_proj <- sf::st_coordinates(point_proj)
+
+ # Define grid extent
+ half_extent <- (grid_size - 1) / 2 * grid_spacing
+ xmin <- coords_proj[1] - half_extent
+ xmax <- coords_proj[1] + half_extent
+ ymin <- coords_proj[2] - half_extent
+ ymax <- coords_proj[2] + half_extent
- # I ask the gSSURGO to find all the mukeys (loosely can be thought of soil type) within 500m of my site location.
- # Basically I think of this as me going around and taking soil samples within 500m of my site.
- #https://sdmdataaccess.nrcs.usda.gov/SpatialFilterHelp.htm
- mu.Path <- paste0(
- "https://sdmdataaccess.nrcs.usda.gov/Spatial/SDMWGS84Geographic.wfs?",
- "SERVICE=WFS",
- "&VERSION=1.1.0",
- "&REQUEST=GetFeature&TYPENAME=MapunitPoly",
- "&FILTER=",
- "",
- "",
- "Geometry",
- "",
- "", lon, ",", lat, "",
- "",
- "", radius, "",
- "",
- "",
- "&OUTPUTFORMAT=XMLMukeyList"
+ # Create raster template
+ raster_template <- terra::rast(
+ xmin = xmin, xmax = xmax, ymin = ymin, ymax = ymax,
+ resolution = grid_spacing, crs = proj_crs$wkt
)
+ grid_coords <- terra::crds(raster_template)
- xmll <- curl::curl_download(
- mu.Path,
- ssl.verifyhost = FALSE,
- ssl.verifypeer = FALSE)
-
- mukey_str <- XML::xpathApply(
- doc = XML::xmlParse(xmll),
- path = "//MapUnitKeyList",
- fun = XML::xmlValue)
- mukeys <- strsplit(mukey_str, ",")[[1]]
-
- if (length(mukeys) == 0) {
- PEcAn.logger::logger.error("No mapunit keys were found for this site.")
+ # Transform grid coordinates back to WGS84 for gSSURGO queries
+ grid_sf <- sf::st_as_sf(data.frame(x = grid_coords[, 1], y = grid_coords[, 2]),
+ coords = c("x", "y"), crs = proj_crs)
+ grid_wgs84 <- sf::st_transform(grid_sf, wgs84_crs)
+ grid_coords_wgs84 <- sf::st_coordinates(grid_wgs84)
+
+ # Query gSSURGO for each grid point to capture spatial variability
+ buffer_radius <- grid_spacing / 2
+ PEcAn.logger::logger.warn(
+ "Buffer radius set to grid_spacing/2 to avoid overlap",
+ "results may be biased due to lack of area weighting and incomplete spatial coverage."
+ )
+ mukeys_all <- c()
+ for (i in seq_len(nrow(grid_coords_wgs84))) {
+ # Extract coordinates for this grid point (not user input)
+ this_lon <- grid_coords_wgs84[i, 1]
+ this_lat <- grid_coords_wgs84[i, 2]
+
+ # I ask the gSSURGO to find all the mukeys (loosely can be thought of soil type) within grid_spacing distance of each grid point location.
+ # Basically I think of this as me going around and taking soil samples at each grid point.
+ #https://sdmdataaccess.nrcs.usda.gov/SpatialFilterHelp.htm
+ mu.Path <- paste0(
+ "https://sdmdataaccess.nrcs.usda.gov/Spatial/SDMWGS84Geographic.wfs?",
+ "SERVICE=WFS",
+ "&VERSION=1.1.0",
+ "&REQUEST=GetFeature&TYPENAME=MapunitPoly",
+ "&FILTER=",
+ "",
+ "",
+ "Geometry",
+ "",
+ "", this_lon, ",", this_lat, "",
+ "",
+ "", buffer_radius, "",
+ "",
+ "",
+ "&OUTPUTFORMAT=XMLMukeyList"
+ )
+
+ # XML handling with temp file
+ temp_file <- tempfile(fileext = ".xml")
+ xmll <- curl::curl_download(
+ mu.Path,
+ destfile = temp_file,
+ handle = curl::new_handle(ssl_verifypeer = FALSE, ssl_verifyhost = FALSE)
+ )
+
+ # mukey extraction with error recovery
+ mukey_str <- tryCatch({
+ xml_doc <- XML::xmlParse(temp_file)
+ mapunit_nodes <- XML::getNodeSet(xml_doc, "//MapUnitKeyList")
+
+ if (length(mapunit_nodes) > 0) {
+ mukey_data <- XML::xmlValue(mapunit_nodes[[1]])
+ if (!is.null(mukey_data) && nchar(trimws(mukey_data)) > 0) {
+ mukey_data
+ } else {
+ PEcAn.logger::logger.debug(paste("Empty MapUnitKeyList for coordinates",
+ this_lat, ",", this_lon))
+ NULL
+ }
+ } else {
+ PEcAn.logger::logger.debug(paste("No MapUnitKeyList found for coordinates",
+ this_lat, ",", this_lon, "skipping grid point"))
+ NULL
+ }
+ }, error = function(e) {
+ PEcAn.logger::logger.warn(paste("Failed to parse gSSURGO response for coordinates",
+ this_lat, ",", this_lon, ":", e$message))
+ NULL
+ })
+ if (file.exists(temp_file)) unlink(temp_file)
+ if (is.null(mukey_str)) next
+
+ mukeys <- strsplit(mukey_str, ",")[[1]]
+ if (length(mukeys) == 0) next
+
+ mukeys_all <- c(mukeys_all, mukeys)
}
+ # mukey occurrences across all grid points
+ mukey_counts <- table(mukeys_all)
+ # Get unique mukeys from all grid points
+ mukeys_all <- unique(mukeys_all)
+ if (length(mukeys_all) == 0) {
+ PEcAn.logger::logger.severe("No mapunit keys were found for this site.")
+ return(NULL)
+ }
+
# calling the query function sending the mapunit keys
soilprop <- gSSURGO.Query(
- mukeys,
+ mukeys_all,
c("chorizon.sandtotal_r",
"chorizon.silttotal_r",
"chorizon.claytotal_r",
- "chorizon.hzdept_r"))
-
- soilprop.new <- soilprop %>%
+ "chorizon.hzdept_r",
+ "chorizon.hzdepb_r",
+ "chorizon.om_r",
+ "chorizon.dbthirdbar_r", # bulk density at 1/3 bar (field capacity);which is the standard field capacity bulk density measurement
+ "chfrags.fragvol_r",
+ "component.comppct_r"))
+
+ # Two-step aggregation:
+ # (1) Sum fragments within horizons, (2) Component area-weighting by mapunit
+ soilprop.weighted <- soilprop %>%
+ dplyr::group_by(.data$cokey, .data$hzdept_r, .data$hzdepb_r) %>%
+ # Each horizon may have multiple rows from different fragment size classes
+ # Sum fragments across size classes and remove duplicate horizon data
+ dplyr::mutate(fragvol_r = min(sum(.data$fragvol_r, na.rm = TRUE), 100)) %>%
+ dplyr::distinct() %>% # Remove duplicate rows created by multiple fragment size classes
+ dplyr::ungroup() %>%
+ # Component area-weighted aggregation by mapunit and horizon depth
+ dplyr::group_by(.data$mukey, .data$hzdept_r, .data$hzdepb_r) %>%
+ dplyr::summarise(
+ sandtotal_r = stats::weighted.mean(.data$sandtotal_r, .data$comppct_r, na.rm = TRUE),
+ silttotal_r = stats::weighted.mean(.data$silttotal_r, .data$comppct_r, na.rm = TRUE),
+ claytotal_r = stats::weighted.mean(.data$claytotal_r, .data$comppct_r, na.rm = TRUE),
+ om_r = stats::weighted.mean(.data$om_r, .data$comppct_r, na.rm = TRUE),
+ dbthirdbar_r = stats::weighted.mean(.data$dbthirdbar_r, .data$comppct_r, na.rm = TRUE),
+ fragvol_r = stats::weighted.mean(.data$fragvol_r, .data$comppct_r, na.rm = TRUE),
+ .groups = "drop"
+ )
+
+ soilprop.new <- soilprop.weighted %>%
dplyr::arrange(.data$hzdept_r) %>%
dplyr::select(
- fraction_of_sand_in_soil = "sandtotal_r",
- fraction_of_silt_in_soil = "silttotal_r",
- fraction_of_clay_in_soil = "claytotal_r",
- soil_depth = "hzdept_r",
+ fraction_of_sand_in_soil = "sandtotal_r", # %
+ fraction_of_silt_in_soil = "silttotal_r", # %
+ fraction_of_clay_in_soil = "claytotal_r", # %
+ soil_depth = "hzdept_r", # cm
+ soil_depth_bottom = "hzdepb_r", # cm
+ organic_matter_pct = "om_r", # %
+ bulk_density = "dbthirdbar_r", # g/cm3
+ coarse_fragment_pct = "fragvol_r", # %
mukey = "mukey") %>%
- dplyr::mutate(dplyr::across(
- c(dplyr::starts_with("fraction_of"),
- "soil_depth"),
- function(x) x / 100))
-
- soilprop.new <- soilprop.new[ stats::complete.cases(soilprop.new) , ]
+ dplyr::mutate(
+ dplyr::across(c(dplyr::starts_with("fraction_of"), "coarse_fragment_pct"),
+ ~ . / 100),
+ horizon_thickness_cm = .data$soil_depth_bottom - .data$soil_depth,
+ soil_organic_carbon_stock = PEcAn.data.land::soc2ocs(
+ soc_percent = PEcAn.data.land::om2soc(.data$organic_matter_pct),
+ bulk_density = .data$bulk_density,
+ thickness = .data$horizon_thickness_cm,
+ coarse_fraction = .data$coarse_fragment_pct
+ )
+ ) %>%
+ dplyr::filter(stats::complete.cases(.))
+ if(nrow(soilprop.new) == 0) {
+ PEcAn.logger::logger.error("No valid soil properties after filtering")
+ return(NULL)
+ }
+ if(!dir.exists(outdir)) dir.create(outdir, recursive = TRUE)
+
#converting it to list
- soil.data.gssurgo <- names(soilprop.new)[1:4] %>%
- purrr::map(function(var) {
- soilprop.new[, var]
- }) %>%
- stats::setNames(names(soilprop.new)[1:4])
+ soil.data.gssurgo <- list(
+ fraction_of_sand_in_soil = soilprop.new$fraction_of_sand_in_soil,
+ fraction_of_silt_in_soil = soilprop.new$fraction_of_silt_in_soil,
+ fraction_of_clay_in_soil = soilprop.new$fraction_of_clay_in_soil,
+ soil_depth = soilprop.new$soil_depth,
+ soil_organic_carbon_stock = soilprop.new$soil_organic_carbon_stock
+ )
#This ensures that I have at least one soil ensemble in case the modeling part failed
all.soil.ens <-c(all.soil.ens, list(soil.data.gssurgo))
@@ -98,13 +228,21 @@ extract_soil_gssurgo<-function(outdir, lat, lon, size=1, radius=500, depths=c(0.
tryCatch({
# find the soil depth levels based on the depth argument
# if soil profile is deeper than what is specified in the argument then I go as deep as the soil profile.
- if (max(soilprop.new$soil_depth) > max(depths)) depths <- sort (c(depths, max(max(soilprop.new$soil_depth))))
-
+ if (max(soilprop.new$soil_depth) > max(depths)) {
+ depths <- sort(c(depths, max(soilprop.new$soil_depth)))
+ }
depth.levs<-findInterval(soilprop.new$soil_depth, depths)
depth.levs[depth.levs==0] <-1
depth.levs[depth.levs>length(depths)] <-length(depths)
- soilprop.new.grouped<-soilprop.new %>%
+ # Remove any NA depth levels
+ valid_indices <- !is.na(depth.levs)
+ if(sum(!valid_indices) > 0) {
+ soilprop.new <- soilprop.new[valid_indices, ]
+ depth.levs <- depth.levs[valid_indices]
+ }
+
+ soilprop.new.grouped<-soilprop.new %>%
dplyr::mutate(DepthL=depths[depth.levs])
# let's fit dirichlet for each depth level separately
@@ -113,61 +251,89 @@ extract_soil_gssurgo<-function(outdir, lat, lon, size=1, radius=500, depths=c(0.
purrr::map_df(function(DepthL.Data){
tryCatch({
# I model the soil properties for this depth
- dir.model <-DepthL.Data[,c(1:3)]%>%
+ dir.model <-DepthL.Data[,c(1:3)] %>%
as.matrix() %>%
sirt::dirichlet.mle(.)
# Monte Carlo sampling based on my dirichlet model
alpha <- dir.model$alpha
alpha <- matrix(alpha, nrow= size, ncol=length(alpha), byrow=TRUE )
simulated.soil <- sirt::dirichlet.simul(alpha)
- # # using the simulated sand/silt/clay to generate soil ensemble
+ # Validate SOC data before processing
+ if (any(is.na(DepthL.Data$soil_organic_carbon_stock))) {
+ PEcAn.logger::logger.warn("Found NA values in soil_organic_carbon_stock data. Removing incomplete records.")
+ DepthL.Data <- DepthL.Data[!is.na(DepthL.Data$soil_organic_carbon_stock), ]
+ }
+ if (nrow(DepthL.Data) == 0) {
+ PEcAn.logger::logger.warn("No valid SOC data after removing NAs")
+ return(NULL)
+ }
+ # Simulate SOC uncertainty using Gamma distribution
+ soc_mean <- mean(DepthL.Data$soil_organic_carbon_stock, na.rm = TRUE)
+ soc_sd <- stats::sd(DepthL.Data$soil_organic_carbon_stock, na.rm = TRUE)
+
+ # Handle edge cases for SOC simulation
+ if (nrow(DepthL.Data) == 1) {
+ simulated_soc <- rep(NA_real_, size)
+ } else if (is.na(soc_sd) || soc_sd == 0) {
+ simulated_soc <- rep(NA_real_, size)
+ } else {
+ shape <- (soc_mean^2) / (soc_sd^2)
+ rate <- soc_mean / (soc_sd^2)
+ simulated_soc <- stats::rgamma(size, shape=shape, rate=rate)
+ }
+
simulated.soil<-simulated.soil %>%
as.data.frame %>%
- dplyr::mutate(DepthL=rep(DepthL.Data[1,6], size),
- mukey=rep(DepthL.Data[1,5], size)) %>%
+ dplyr::mutate(DepthL=rep(DepthL.Data$DepthL[1], size),
+ mukey=rep(DepthL.Data$mukey[1], size),
+ soil_organic_carbon_stock = simulated_soc) %>%
`colnames<-`(c("fraction_of_sand_in_soil",
"fraction_of_silt_in_soil",
"fraction_of_clay_in_soil",
"soil_depth",
- "mukey"))
+ "mukey",
+ "soil_organic_carbon_stock"))
simulated.soil
},
error = function(e) {
PEcAn.logger::logger.warn(conditionMessage(e))
return(NULL)
})
-
})
# estimating the proportion of areas for those mukeys which are modeled
- mukey_area <- mukey_area %>%
- dplyr::filter(mukeys %in% simulated.soil.props$mukey) %>%
- dplyr::mutate(Area=.data$Area/sum(.data$Area))
-
+
+ # defining mukey_area
+ mukey_area <- data.frame(
+ mukey = names(mukey_counts),
+ Area = as.numeric(mukey_counts) / sum(mukey_counts)
+ ) %>%
+ dplyr::filter(.data$mukey %in% unique(simulated.soil.props$mukey)) %>%
+ dplyr::mutate(Area = .data$Area / sum(.data$Area, na.rm = TRUE))
#--- Mixing the depths
soil.profiles<-simulated.soil.props %>%
- split(.$mukey)%>%
+ split(.$mukey) %>%
purrr::map(function(soiltype.sim){
- sizein <- (mukey_area$Area[ mukey_area$mukey == soiltype.sim$mukey %>% unique()])*size
+ sizein <- mukey_area$Area[mukey_area$mukey == unique(soiltype.sim$mukey)] * size
1:ceiling(sizein) %>%
purrr::map(function(x){
soiltype.sim %>%
- split(.$soil_depth)%>%
+ split(.$soil_depth) %>%
purrr::map_dfr(~.x[x,])
})
}) %>%
purrr::flatten()
-
#- add them to the list of all the ensembles ready to be converted to .nc file
all.soil.ens<-soil.profiles %>%
purrr::map(function(SEns){
+ SEns <- SEns[, names(SEns) != "mukey"]
names(SEns) %>%
purrr::map(function(var){
- SEns[,var]
- })%>%
+ as.numeric(unlist(SEns[, var]))
+ }) %>%
stats::setNames(names(SEns))
- })%>%
+ }) %>%
c(all.soil.ens,.)
},
@@ -187,26 +353,24 @@ extract_soil_gssurgo<-function(outdir, lat, lon, size=1, radius=500, depths=c(0.
new.file <- file.path(outdir, paste0(prefix, ".nc"))
#sending it to the func where some new params will be added and then it will be written down as nc file.
suppressWarnings({
- soil2netcdf(all.soil.ens[[i]][1:4], new.file)
+ PEcAn.data.land::soil2netcdf(all.soil.ens[[i]], new.file)
})
-
new.file
},
error = function(e) {
PEcAn.logger::logger.warn(conditionMessage(e))
return(NULL)
})
-
})
# removing the nulls or the ones that throw exception in the above trycatch
out.ense<- out.ense %>%
purrr::discard(is.null)
- out.ense<-out.ense%>%
+ out.ense<-out.ense %>%
stats::setNames(rep("path", length(out.ense)))
return(out.ense)
-}
+}
@@ -310,7 +474,7 @@ extract_soil_nc <- function(in.file,outdir,lat,lon){
new.file <- file.path(outdir,paste0(prefix,".nc"))
## Calculate soil parameters and export to netcdf
- soil2netcdf(soil.data,new.file)
+ PEcAn.data.land::soil2netcdf(soil.data,new.file)
return(new.file)
@@ -348,7 +512,11 @@ extract_soil_nc <- function(in.file,outdir,lat,lon){
#' * `soil_thermal_conductivity_at_saturation`
#' * `soil_thermal_capacity`
#' * `soil_albedo`
-
+#' * `slpotwp`
+#' * `slpotcp`
+#' * `slcpd`
+#' * `slden`
+#' * `soil_organic_carbon_stock`
#'
#' @param varname character vector. See details
#'
@@ -383,9 +551,13 @@ soil.units <- function(varname = NA){
"soil_thermal_conductivity","W m-1 K-1",
"soil_thermal_conductivity_at_saturation","W m-1 K-1",
"soil_thermal_capacity","J kg-1 K-1",
- "soil_albedo","1"
-
- ),
+ "soil_albedo","1",
+ "slpotwp","m",
+ "slpotcp","m",
+ "slcpd","J m-3 K-1",
+ "slden","kg m-3",
+ "soil_organic_carbon_stock","kg m-2"
+ ),
ncol=2,byrow = TRUE))
colnames(variables) <- c('var','unit')
@@ -402,4 +574,4 @@ soil.units <- function(varname = NA){
return(unit)
}
-}
+}
\ No newline at end of file
diff --git a/modules/data.land/R/gSSURGO_Query.R b/modules/data.land/R/gSSURGO_Query.R
index eaf78b94c39..c7e8b5c2984 100644
--- a/modules/data.land/R/gSSURGO_Query.R
+++ b/modules/data.land/R/gSSURGO_Query.R
@@ -4,15 +4,42 @@
#' @param mukeys map unit key from gssurgo
#' @param fields a character vector of the fields to be extracted. See details and the default argument to find out how to define fields.
#'
-#' @return a dataframe with soil properties. Units can be looked up from database documentation
+#' @return a dataframe with soil properties.
#'
+#' @md
#' @details
-#' Full documention of available tables and their relationships can be found here \url{www.sdmdataaccess.nrcs.usda.gov/QueryHelp.aspx}
-#' There have been occasions where NRCS made some minor changes to the structure of the API which this code is where those changes need
-#' to be implemneted here.
-#' Fields need to be defined with their associate tables. For example, sandtotal is a field in chorizon table which needs to be defined as chorizon.sandotal_(r/l/h), where
-#' r stands for the representative value, l stands for low and h stands for high. At the moment fields from mapunit, component, muaggatt, and chorizon tables can be extracted.
+#' This function queries the NRCS gSSURGO database using map unit keys (mukeys).
#'
+#' * **Available tables**: `mapunit`, `component`, `muaggatt`, `chorizon`, and `chfrags`.
+#' * **Field definitions**: Fields must be specified with their associated table name.
+#' For example, total sand content is stored in the `chorizon` table and must be
+#' requested as `chorizon.sandtotal_(r|l|h)`, where:
+#' - `r` = representative value
+#' - `l` = low value
+#' - `h` = high value
+#'
+#' **Commonly queried fields and units** (see NRCS gSSURGO ["Tables and Columns Report"](https://www.nrcs.usda.gov/sites/default/files/2022-08/SSURGO-Metadata-Tables-and-Columns-Report.pdf)
+#' for full list):
+#'
+#' | Field | Description | Units |
+#' |------------------------|-------------------------------------------|--------------|
+#' | `chorizon.cec7_r` | Cation exchange capacity at pH 7 | cmol(+)/kg |
+#' | `chorizon.sandtotal_r` | Total sand (<2 mm fraction) | % |
+#' | `chorizon.silttotal_r` | Total silt (<2 mm fraction) | % |
+#' | `chorizon.claytotal_r` | Total clay (<0.002 mm fraction) | % |
+#' | `chorizon.om_r` | Organic matter (<2 mm soil) | % |
+#' | `chorizon.hzdept_r` | Horizon top depth | cm |
+#' | `chfrags.fragvol_r` | Rock fragments | % (by volume)|
+#' | `chorizon.dbthirdbar_r`| Bulk density at field capacity | g/cm³ |
+#' | `chorizon.ph1to1h2o_r` | Soil pH (1:1 H2O) | pH (unitless)|
+#' | `chorizon.cokey` | Component key (identifier) | — |
+#' | `chorizon.chkey` | Horizon key (identifier) | — |
+#'
+#' **API stability:** The NRCS occasionally modifies the API schema. If queries fail,
+#' adjustments may be required here to align with the updated structure.
+#'
+#' Full documentation of available tables and their relationships is provided in the
+#' \href{https://sdmdataaccess.nrcs.usda.gov/QueryHelp.aspx}{gSSURGO documentation}.
#' @examples
#' \dontrun{
#' PEcAn.data.land::gSSURGO.Query(
@@ -24,8 +51,8 @@
#' "chorizon.dbovendry_r","chorizon.ph1to1h2o_r",
#' "chorizon.cokey","chorizon.chkey"))
#' }
+#' @author Hamze Dokohaki, Akash
#' @export
-#'
gSSURGO.Query <- function(mukeys,
fields = c("chorizon.sandtotal_r",
"chorizon.silttotal_r",
@@ -43,12 +70,12 @@ gSSURGO.Query <- function(mukeys,
SELECT ',
- paste(fixed_fields, collapse = ", "),
- paste(qry_fields, collapse = ", "),
+ paste(c(fixed_fields, qry_fields), collapse = ", "),
' from mapunit
join muaggatt on mapunit.mukey=muaggatt.mukey
join component on mapunit.mukey=component.mukey
join chorizon on component.cokey=chorizon.cokey
+ left join chfrags on chorizon.chkey=chfrags.chkey
where mapunit.mukey in (', paste(mukeys,collapse = ", "),');
diff --git a/modules/data.land/R/ic_process.R b/modules/data.land/R/ic_process.R
index 099875bb15d..77e0da02ab7 100644
--- a/modules/data.land/R/ic_process.R
+++ b/modules/data.land/R/ic_process.R
@@ -13,15 +13,16 @@ ic_process <- function(settings, input, dir, overwrite = FALSE){
#--------------------------------------------------------------------------------------------------#
# Extract info from settings and setup
- site <- settings$run$site
- model <- list()
- model$type <- settings$model$type
- model$id <- settings$model$id
- host <- settings$host
- dbparms <- settings$database
+ site <- settings$run$site
+ model <- list(
+ type = settings$model$type,
+ id = settings$model$id
+ )
+ host <- settings$host
+ dbparms <- settings$database
# Handle IC Workflow locally
- if(host$name != "localhost"){
+ if (host$name != "localhost") {
host$name <- "localhost"
dir <- settings$database$dbfiles
}
@@ -48,17 +49,27 @@ ic_process <- function(settings, input, dir, overwrite = FALSE){
# set up bety connection
con <- PEcAn.DB::db.open(dbparms$bety)
on.exit(PEcAn.DB::db.close(con), add = TRUE)
-
+
#grab site lat and lon info
- latlon <- PEcAn.DB::query.site(site$id, con = con)[c("lat", "lon")]
- # setup site database number, lat, lon and name and copy for format.vars if new input
- new.site <- data.frame(id = as.numeric(site$id),
- lat = latlon$lat,
- lon = latlon$lon)
- new.site$name <- settings$run$site$name
+ # check if site metadata is available in the settings$run$site
+ if (isTRUE(nzchar(site$lat)) && isTRUE(nzchar(site$lon))) {
+ # if lat and lon are available, use them directly
+ latlon <- data.frame(lat = site$lat, lon = site$lon)
+ } else {
+ # otherwise, query the site information from the database
+ latlon <- PEcAn.DB::query.site(site$id, con = con)[c("lat", "lon")]
+ }
- if (isTRUE(new.site$id > 1e9)) {
+ # setup site database number, lat, lon and name and copy for format.vars if new input
+ new.site <- list(
+ id = site$id,
+ lat = latlon$lat,
+ lon = latlon$lon,
+ name = site$name
+ )
+
+ if (is.numeric(new.site$id) && isTRUE(new.site$id > 1e9)) {
# Assume this is a BETYdb id, condense for readability
str_ns <- paste0(new.site$id %/% 1e+09, "-", new.site$id %% 1e+09)
} else {
@@ -103,14 +114,18 @@ ic_process <- function(settings, input, dir, overwrite = FALSE){
# end_date = as.Date("2021-09-01")
#Note the start and end dates for ICs are not the same as those for the forecast runs
#please check out NEON products DP1.10098.001 for your desired site to check data availability before setting start and end dates
- }else{
-
+ }else if(!is.null(input$startdate) && !is.null(input$enddate)){
+ start_date <- as.Date(input$startdate)
+ end_date <- as.Date(input$enddate)
+ } else{
+
query <- paste0("SELECT * FROM inputs where id = ", input$id)
input_file <- PEcAn.DB::db.query(query, con = con)
start_date <- input_file$start_date
end_date <- input_file$end_date
}
+
# set up host information
if (host$name == "localhost") {
machine.host <- PEcAn.remote::fqdn()
@@ -120,9 +135,11 @@ ic_process <- function(settings, input, dir, overwrite = FALSE){
machine <- PEcAn.DB::db.query(paste0("SELECT * from machines where hostname = '", machine.host, "'"), con)
# retrieve model type info
- if(is.null(model)){
- modeltype_id <- PEcAn.DB::db.query(paste0("SELECT modeltype_id FROM models where id = '", settings$model$id, "'"), con)[[1]]
- model <- PEcAn.DB::db.query(paste0("SELECT name FROM modeltypes where id = '", modeltype_id, "'"), con)[[1]]
+ if(isTRUE(nzchar(settings$model$name))){
+ model$name <- settings$model$name
+ } else {
+ modeltype_id <- PEcAn.DB::db.query(paste0("SELECT modeltype_id FROM models where id = '", model$id, "'"), con)[[1]]
+ model$name <- PEcAn.DB::db.query(paste0("SELECT name FROM modeltypes where id = '", modeltype_id, "'"), con)[[1]]
}
@@ -196,8 +213,7 @@ ic_process <- function(settings, input, dir, overwrite = FALSE){
outfolder = outfolder,
n.ensemble = i,
dir = dir,
- machine = machine,
- model = model,
+ model = model$name,
start_date = start_date,
end_date = end_date,
new_site = new.site,
diff --git a/modules/data.land/R/look_up_fertilizer_components.R b/modules/data.land/R/look_up_fertilizer_components.R
new file mode 100644
index 00000000000..7e35d4ffd79
--- /dev/null
+++ b/modules/data.land/R/look_up_fertilizer_components.R
@@ -0,0 +1,113 @@
+#' Calculate the Nitrogen and Carbon Content of a Fertilizer Application
+#'
+#' This function calculates the different forms of nitrogen (NO3-N, NH4-N, organic N) and organic carbon (C_org) in a fertilizer application.
+#' It can determine fertilizer nitrogen and carbon content using either a lookup table based on
+#' the SWAT model's [`fertilizer.frt`](https://github.com/swat-model/swatplus/blob/main/data/Osu_1hru/fertilizer.frt)
+#' file, determine the fertilizer's nutrient content based on NN-PP-KK format, or use user-specified
+#' fractions of organic nitrogen and carbon.
+#'
+#' Consistent with assumptions in DayCent, DSSAT, and other models, urea is treated as NH3 because the
+#' transformation typically occurs within a day.
+#'
+#' @param type Character string specifying the type of fertilizer. Valid values include NN-PP-KK format (e.g., "45-5-10") as well
+#' as enumerated types including: "urea", "ammonium_nitrate", "compost", "manure", "dairy_fr", "beef_fr".
+#' See notes for full list of valid types.
+#' @param amount Numeric value specifying the amount of fertilizer applied in kg/ha.
+#' @param fraction_organic_n Optional numeric value specifying the fraction of the organic matter that is nitrogen.
+#' Used to define organic matter additions if not provided in the dataset.
+#' @param fraction_organic_c Optional numeric value specifying the fraction of the organic matter that is carbon.
+#' Used to define organic matter additions if not provided in the dataset.
+#'
+#' @md
+#' @note The following is a list of valid fertilizer names:
+#' - Mineral fertilizers: ammonium_nitrate, anhydrous_ammonia, urea
+#' - Fresh manures: manure, beef_fr, broil_fr, dairy_fr, duck_fr, goat_fr, horse_fr,
+#' layer_fr, sheep_fr, swine_fr, trkey_fr, veal_fr
+#' - Compost: org_compost
+#'
+#' @return A list containing:
+#' - `type`: The type of fertilizer used.
+#' - `NO3_N`: The amount of nitrate nitrogen (NO3-N) in kg/ha.
+#' - `NH4_N`: The amount of ammonium nitrogen (NH4-N) in kg/ha.
+#' - `N_org`: The amount of organic nitrogen in kg/ha.
+#' - `C_org`: The amount of organic carbon in kg/ha.
+#'
+#' @examples
+#' # View all available fertilizer types
+#' unique(PEcAn.data.land::fertilizer_composition_data$name)
+#'
+#' # Calculate components for different fertilizer types
+#' look_up_fertilizer_components("urea", 200)
+#' look_up_fertilizer_components("45-00-00", 200)
+#' look_up_fertilizer_components("org_compost", 1000)
+#' look_up_fertilizer_components("dairy_fr", 500)
+#' look_up_fertilizer_components("manure", 1000, fraction_organic_n = 0.02, fraction_organic_c = 0.08)
+#'
+#' @export
+look_up_fertilizer_components <- function(
+ type,
+ amount,
+ fraction_organic_n = NULL,
+ fraction_organic_c = NULL) {
+ # Validate input for organic fertilizers
+ if (!is.null(fraction_organic_n) || !is.null(fraction_organic_c)) {
+ if (is.null(fraction_organic_n) || is.null(fraction_organic_c)) {
+ PEcAn.logger::logger.severe("Both fraction_organic_n and fraction_organic_c must be provided if either is specified.")
+ # could also make an assumption, but that seems error prone
+ }
+ }
+
+ # If user provided organic matter fractions, use those regardless of whether they are in the database
+ if (!is.null(fraction_organic_n) && !is.null(fraction_organic_c)) {
+ return(list(
+ type = type,
+ NO3_N = 0,
+ NH4_N = 0,
+ N_org = round(amount * fraction_organic_n),
+ C_org = round(amount * fraction_organic_c)
+ ))
+ }
+
+ # If not in the database, check if the fertilizer type is in NN-PP-KK format (e.g., 45-5-10)
+ if (stringr::str_detect(type, "^\\d{1,2}-\\d{1,2}-\\d{1,2}$")) {
+ # Split NN-PP-KK format into components
+ if (type %in% PEcAn.data.land::fertilizer_composition_data$name) {
+ fraction_no3_n <- PEcAn.data.land::fertilizer_composition_data |>
+ dplyr::filter(.data$name == type) |>
+ dplyr::pull(.data$fraction_no3_n)
+ } else {
+ fraction_no3_n <- stringr::str_split(type, "-", simplify = TRUE)[1] |>
+ as.numeric() / 100 # convert % to fraction (0-1)
+ }
+ # Assume all nitrogen is in the form of NO3_N, following SWAT assumptions in swat dataset
+ return(list(
+ type = type,
+ NO3_N = round(amount * fraction_no3_n),
+ NH4_N = 0,
+ N_org = 0,
+ C_org = 0
+ ))
+ }
+
+ # Handle the case where the fertilizer type is in the database
+ if (type %in% PEcAn.data.land::fertilizer_composition_data$name) {
+ # Calculate the components directly in the data frame
+ fertilizer_info <- PEcAn.data.land::fertilizer_composition_data |>
+ dplyr::filter(.data$name == type) |>
+ dplyr::mutate(
+ NO3_N = round(amount * .data$fraction_no3_n),
+ NH4_N = round(amount * .data$fraction_nh3_n),
+ N_org = round(amount * .data$fraction_organic_n),
+ C_org = round(amount * .data$fraction_c)
+ )
+
+ res <- fertilizer_info |>
+ dplyr::select("name", "NO3_N", "NH4_N", "N_org", "C_org") |>
+ dplyr::rename(type = "name") |>
+ as.list()
+ return(res)
+ } else {
+ PEcAn.logger::logger.error(paste("Fertilizer type", type, "not found in the database."))
+ return(NULL)
+ }
+}
diff --git a/modules/data.land/R/put_veg_module.R b/modules/data.land/R/put_veg_module.R
index f4fd515d2b2..b2c2ea920f3 100644
--- a/modules/data.land/R/put_veg_module.R
+++ b/modules/data.land/R/put_veg_module.R
@@ -7,7 +7,6 @@
##' @param outfolder path to where the processed files will be written
##' @param n.ensemble integer, ensemble member number
##' @param dir dir path to dbfiles on local machine
-##' @param machine data frame, DB info regarding localhost machine id/hostname etc.
##' @param model model name, e.g. "ED2"
##' @param start_date date in "YYYY-MM-DD" format, in case of source==FIA it's the settings$run$start.date, otherwise start_date of the IC file in DB
##' @param end_date date in "YYYY-MM-DD" format, in case of source==FIA it's the settings$run$end.date, otherwise end_date of the IC file in DB
@@ -21,7 +20,7 @@
put_veg_module <- function(getveg.id, dbparms,
input_veg, pfts,
outfolder, n.ensemble,
- dir, machine, model,
+ dir, model,
start_date, end_date,
new_site,
host, overwrite){
diff --git a/modules/data.land/R/soil2netcdf.R b/modules/data.land/R/soil2netcdf.R
index 8f3afb1affc..5e329c9c247 100644
--- a/modules/data.land/R/soil2netcdf.R
+++ b/modules/data.land/R/soil2netcdf.R
@@ -56,7 +56,7 @@ soil2netcdf <- function(soil.data, new.file){
ncvar <- list()
good_vars <- 0
for(n in seq_along(soil.data)){
- if(all(is.null(soil.data[[n]])) | all(is.na(soil.data[[n]]))) next
+ if(all(is.null(soil.data[[n]])) || all(is.na(soil.data[[n]]))) next
varname <- names(soil.data)[n]
if(length(soil.data[[n]])>1){
## if vector, save by depth
@@ -81,7 +81,7 @@ soil2netcdf <- function(soil.data, new.file){
## add data
for (i in seq_along(ncvar)) {
- if(is.null(soil.data[[i]])|is.na(soil.data[[i]])) next
+ if(is.null(soil.data[[i]]) || all(is.na(soil.data[[i]]))) next
ncdf4::ncvar_put(nc, ncvar[[i]], soil.data[[i]])
}
diff --git a/modules/data.land/R/soil_params_ensemble.R b/modules/data.land/R/soil_params_ensemble.R
index db425798029..dfae3aca705 100644
--- a/modules/data.land/R/soil_params_ensemble.R
+++ b/modules/data.land/R/soil_params_ensemble.R
@@ -159,9 +159,13 @@ soil_params_ensemble_soilgrids <- function(settings,sand,clay,silt,outdir,write_
PATH <- foreach::foreach(i = seq_along(dat), .packages = c("Kendall", "purrr", "PEcAn.data.land"), .options.snow=opts) %dopar% {
samples_ens <- list()
paths <- c()
- siteid <- as.numeric(unique(dat[[i]]$siteid))
+ siteid <- unique(dat[[i]]$siteid)
soil_depth <- unique(dat[[i]]$soil_depth)
- str_ns <- paste0(siteid %/% 1e+09, "-", siteid %% 1e+09)
+ if (is.numeric(siteid) && siteid > 1e9) {
+ str_ns <- paste0(siteid %/% 1e+09, "-", siteid %% 1e+09)
+ } else {
+ str_ns <- as.character(siteid)
+ }
temp_outdir <- file.path(outdir, siteid)
dir.create(temp_outdir)
# Estimate Dirichlet parameters for each depth at each site
diff --git a/modules/data.land/R/soil_process.R b/modules/data.land/R/soil_process.R
index 6cf29ba1a5c..515a2aadcc4 100644
--- a/modules/data.land/R/soil_process.R
+++ b/modules/data.land/R/soil_process.R
@@ -31,12 +31,14 @@ soil_process <- function(settings, input, dbfiles, overwrite = FALSE,run.local=T
con <- PEcAn.DB::db.open(dbparms$bety)
on.exit(PEcAn.DB::db.close(con), add = TRUE)
# get site info
- latlon <- PEcAn.DB::query.site(site$id, con = con)[c("lat", "lon")]
- new.site <- data.frame(id = as.numeric(site$id),
- lat = latlon$lat,
- lon = latlon$lon)
+ if (isTRUE(nzchar(site$lat)) && isTRUE(nzchar(site$lon))) {
+ latlon <- data.frame(lat = site$lat, lon = site$lon)
+ } else {
+ latlon <- PEcAn.DB::query.site(site$id, con = con)[c("lat", "lon")]
+ }
+ new.site <- list(id = site$id, lat = latlon$lat, lon = latlon$lon)
- if (isTRUE(new.site$id > 1e9)) {
+ if (is.numeric(new.site$id) && isTRUE(new.site$id > 1e9)) {
# Assume this is a BETYdb id, condense for readability
str_ns <- paste0(new.site$id %/% 1e+09, "-", new.site$id %% 1e+09)
} else {
@@ -45,7 +47,7 @@ soil_process <- function(settings, input, dbfiles, overwrite = FALSE,run.local=T
outfolder <- file.path(dbfiles, paste0(input$source, "_site_", str_ns))
- if(!dir.exists(outfolder)) dir.create(outfolder)
+ if (!dir.exists(outfolder)) dir.create(outfolder)
#--------------------------------------------------------------------------------------------------#
# if we are reading from gSSURGO
if (input$source=="gSSURGO"){
@@ -56,9 +58,20 @@ soil_process <- function(settings, input, dbfiles, overwrite = FALSE,run.local=T
names(newfile) <- rep("path", length(newfile))
if(length(newfile)==0){
- radiusL <- ifelse(is.null(settings$run$input$soil$radius), 500, as.numeric(settings$run$input$soil$radius))
-
- newfile<-extract_soil_gssurgo(outfolder, lat = latlon$lat, lon=latlon$lon, radius = radiusL)
+ radius <- ifelse(is.null(settings$run$input$soil$radius), 100,
+ as.numeric(settings$run$input$soil$radius))
+ grid_size <- max(3, ifelse(is.null(settings$run$input$soil$grid_size), 3,
+ as.numeric(settings$run$input$soil$grid_size)))
+
+ grid_extent <- radius * sqrt(pi)
+ grid_spacing <- grid_extent / (grid_size - 1)
+ newfile <- extract_soil_gssurgo(
+ outfolder,
+ lat = latlon$lat,
+ lon = latlon$lon,
+ grid_size = grid_size,
+ grid_spacing = grid_spacing
+ )
# register files in DB
for(i in 1:length(newfile)){
diff --git a/modules/data.land/R/soil_utils.R b/modules/data.land/R/soil_utils.R
index 131852e002d..610dcef973c 100644
--- a/modules/data.land/R/soil_utils.R
+++ b/modules/data.land/R/soil_utils.R
@@ -342,4 +342,47 @@ mpot2smoist <- function(mpot,soil_water_potential_at_saturation,soil_hydraulic_b
smfrac = ( mpot / soil_water_potential_at_saturation) ^ (-1. / soil_hydraulic_b)
smoist = smfrac * volume_fraction_of_water_in_soil_at_saturation
return(smoist)
-}#end function
\ No newline at end of file
+}#end function
+
+#' Convert soil organic carbon concentration to organic carbon stock
+#'
+#' @param soc_percent soil organic carbon concentration (percent, 0-100)
+#' @param bulk_density bulk density (g/cm3)
+#' @param thickness layer thickness (cm)
+#' @param coarse_fraction coarse fragment volume fraction (0-1, default = 0)
+#' @return organic carbon stock (kg/m2)
+#' @export
+#' @author Akash
+#' @examples
+#' soc2ocs(2.5, 1.3, 30, 0.15)
+#'
+#' # Multiple soil layers
+#' soc_pct <- c(3.2, 2.1, 1.8)
+#' bd_g_cm3 <- c(1.2, 1.4, 1.5)
+#' thickness_cm <- c(15, 15, 30)
+#' coarse_fraction <- c(0.10, 0.20, 0.25)
+#' soc2ocs(soc_pct, bd_g_cm3, thickness_cm, coarse_fraction)
+
+soc2ocs <- function(soc_percent, bulk_density, thickness, coarse_fraction = 0) {
+ # Convert inputs to standard units for calculation
+ soc_frac <- soc_percent / 100
+ bd_kg_m3 <- PEcAn.utils::ud_convert(bulk_density, "g cm-3", "kg m-3")
+ thick_m <- PEcAn.utils::ud_convert(thickness, "cm", "m")
+
+ # organic carbon stock: SOC × BD × thickness × (1 - coarse_fraction)
+ ocs_kg_m2 <- soc_frac * bd_kg_m3 * thick_m * (1 - coarse_fraction)
+ return(ocs_kg_m2)
+} # soc2ocs
+
+#' Convert organic matter to soil organic carbon
+#'
+#' @description Converts organic matter content to soil organic carbon using the Van Bemmelen factor (1.724).
+#'
+#' @param om_percent organic matter percentage (0-100)
+#' @return soil organic carbon percentage (0-100)
+#' @export
+#' @author Akash
+
+om2soc <- function(om_percent) {
+ return(om_percent / 1.724)
+} # om2soc
diff --git a/modules/data.land/R/soilgrids_soc_extraction.R b/modules/data.land/R/soilgrids_soc_extraction.R
index 81a5995fc75..ceab4486a7c 100644
--- a/modules/data.land/R/soilgrids_soc_extraction.R
+++ b/modules/data.land/R/soilgrids_soc_extraction.R
@@ -66,6 +66,18 @@ soilgrids_soilC_extract <- function (site_info, outdir=NULL, verbose=TRUE) {
# prepare site info for extraction
internal_site_info <- site_info[, c("site_id", "site_name", "lat", "lon")]
+
+ # Early return if no valid sites (after processing internal_site_info)
+ if (nrow(internal_site_info) == 0) {
+ if (verbose) {
+ PEcAn.logger::logger.severe(
+ "No valid sites remaining after NA check. ",
+ "All sites had missing SoilGrids data for the first depth layer."
+ )
+ }
+ return(NULL)
+ }
+
#create a variable to store mean and quantile of organic carbon density (ocd) for each soil depth
ocdquant <- matrix(NA, nrow = 6, ncol = length(internal_site_info$lon) * 4) #row represents soil depth, col represents mean, 5%, 50% and 95%-quantile of ocd for all sites
lonlat <- cbind(internal_site_info$lon, internal_site_info$lat)
@@ -78,17 +90,27 @@ soilgrids_soilC_extract <- function (site_info, outdir=NULL, verbose=TRUE) {
p <- terra::vect(lonlat, crs = "+proj=longlat +datum=WGS84") # Users need to provide lon/lat
newcrs <- "+proj=igh +datum=WGS84 +no_defs +towgs84=0,0,0"
p_reproj <- terra::project(p, newcrs) # Transform the point vector to data with Homolosine projection
+
+ # Extract coordinates for safe parallel transfer
+ p_coords <- terra::crds(p_reproj)
+
data_tag <- c("_mean.vrt", "_Q0.05.vrt", "_Q0.5.vrt", "_Q0.95.vrt")
name_tag <- expand.grid(depths, data_tag, stringsAsFactors = F)#find the combinations between data and depth tags.
L <- split(as.data.frame(name_tag), seq(nrow(as.data.frame(name_tag))))#convert tags into lists.
get_layer <- function(l) {
ocd_url <- paste0(base_data_url, l[[1]], l[[2]])
- ocd_map <- terra::extract(terra::rast(ocd_url), p_reproj)
- unlist(ocd_map[, -1]) / 10
+ tryCatch({
+ # Create temporary vector inside worker
+ p_temp <- terra::vect(p_coords, crs = newcrs)
+ vals <- terra::extract(terra::rast(ocd_url), p_temp)
+ unlist(vals[, -1]) / 10
+ }, error = function(e) {
+ rep(NA, nrow(p_coords))
+ })
}
- ocd_real <- try(furrr::future_map(L, get_layer, .progress = TRUE))
+ ocd_real <- try(furrr::future_map(L, get_layer, .options = furrr::furrr_options(seed = TRUE), .progress = TRUE))
if ("try-error" %in% class(ocd_real)) {
ocd_real <- vector("list", length = length(L))
pb <- utils::txtProgressBar(min = 0, max = length(L), style = 3)
@@ -116,6 +138,19 @@ soilgrids_soilC_extract <- function (site_info, outdir=NULL, verbose=TRUE) {
ocd_df$Value<-as.numeric(ocd_df$Value)
f1<-factor(ocd_df$Siteid,levels=unique(ocd_df$Siteid))
f2<-factor(ocd_df$Depth,levels=unique(ocd_df$Depth))
+
+ # Skip if not enough quantiles (before gamma fitting)
+ if (length(unique(ocd_df$Quantile)) < 2) {
+ if (verbose) {
+ PEcAn.logger::logger.warn(
+ "Insufficient quantiles (", length(unique(ocd_df$Quantile)), ") ",
+ "available for gamma distribution fitting at some sites. ",
+ "Require at least 2 different quantiles to fit parameters."
+ )
+ }
+ return(NULL)
+ }
+
#split data by groups of sites and soil depth, while keeping the original order of each group
dat <- split(ocd_df, list(f1, f2))
@@ -132,22 +167,29 @@ soilgrids_soilC_extract <- function (site_info, outdir=NULL, verbose=TRUE) {
}
fitQ <- function(x) {
- val = x$Value
- stat = as.character(x$Quantile)
- theta = c(10, 10)
- fit <-
- list(Gamma = stats::optim(theta, cgamma, val = val, stat = stat))
- SS <- sapply(fit, function(f) {
- f$value
- })
- par <- sapply(fit, function(f) {
- f$par
- })
- return(list(par = par, SS = SS))
+ val <- x$Value
+ stat <- as.character(x$Quantile)
+ # Skip fitting if all values are NA or not numeric
+ if (all(is.na(val)) || length(val) == 0) {
+ return(list(par = c(NA, NA), SS = NA))
+ }
+ theta <- c(10, 10)
+ fit <- tryCatch(
+ stats::optim(theta, cgamma, val = val, stat = stat),
+ error = function(e) NULL
+ )
+ if (is.null(fit)) {
+ return(list(par = c(NA, NA), SS = NA))
+ }
+ return(list(par = fit$par, SS = fit$value))
}
score <- suppressWarnings(lapply(dat, fitQ))
bestPar <- sapply(score, function(f) { f$par })
+ # Ensure bestPar is a 2-row matrix even when invalid sites are present
+ if (is.null(dim(bestPar)) || nrow(bestPar) != 2) {
+ bestPar <- matrix(bestPar, nrow = 2, byrow = TRUE)
+ }
mean <- bestPar[1,] / bestPar[2,]
std <- sqrt(bestPar[1,] / bestPar[2,] ^ 2)
mean_site <- matrix(mean, length(internal_site_info$lon), 6)
@@ -184,11 +226,17 @@ soilgrids_soilC_extract <- function (site_info, outdir=NULL, verbose=TRUE) {
rownames(soilgrids_soilC_data) <- NULL
if (!is.null(outdir)) {
- PEcAn.logger::logger.info(paste0("Storing results in: ",file.path(outdir,"soilgrids_soilC_data.csv")))
- utils::write.csv(soilgrids_soilC_data,file=file.path(outdir,"soilgrids_soilC_data.csv"),row.names = FALSE)
- }
- else {
- PEcAn.logger::logger.error("No output directory found.")
+ # Ensure the directory exists; create if not
+ if (!dir.exists(outdir)) {
+ dir.create(outdir, recursive = TRUE)
+ PEcAn.logger::logger.info(paste0("Created output directory: ", outdir))
+ }
+ PEcAn.logger::logger.info(paste0("Storing results in: ", file.path(outdir, "soilgrids_soilC_data.csv")))
+ utils::write.csv(soilgrids_soilC_data,
+ file = file.path(outdir, "soilgrids_soilC_data.csv"),
+ row.names = FALSE)
+ } else {
+ PEcAn.logger::logger.warn("No output directory found. Results are only returned to R environment.")
}
# return the results to the terminal as well
return(soilgrids_soilC_data)
diff --git a/modules/data.land/R/validate_events.R b/modules/data.land/R/validate_events.R
new file mode 100644
index 00000000000..95ebd486960
--- /dev/null
+++ b/modules/data.land/R/validate_events.R
@@ -0,0 +1,54 @@
+#' Validate PEcAn events JSON against schema v0.1.0
+#'
+#' Validates a PEcAn events JSON file (single-site object or an array of site
+#' objects) against the bundled JSON Schema (draft 2020-12) using the AJV
+#' engine.
+#'
+#' - Logs an error and returns FALSE if the JSON file does not exist or does
+#' not conform to the schema.
+#' - Logs a warning and returns TRUE if the optional package `jsonvalidate` is
+#' not installed, so calling code can proceed without a hard dependency.
+#'
+#' @param events_json character. Path to the JSON file to validate.
+#' @param verbose logical. When `TRUE`, include detailed AJV messages on error.
+#'
+#' @return Logical TRUE if valid, FALSE if invalid.
+#' NA if validator unavailable.
+#'
+#' @author David LeBauer
+#'
+#' @examples
+#' # validate_events_json(system.file("events_fixtures/events_site1.json",
+#' # package = "PEcAn.data.land"))
+#'
+#' @export
+validate_events_json <- function(events_json, verbose = TRUE) {
+ if (!file.exists(events_json)) {
+ PEcAn.logger::logger.error(glue::glue("events_json file does not exist: {events_json}"))
+ return(FALSE)
+ }
+
+ if (!requireNamespace("jsonvalidate", quietly = TRUE)) {
+ PEcAn.logger::logger.warn("Skipping events schema validation: package 'jsonvalidate' not installed.")
+ return(NA)
+ }
+
+ schema <- system.file("events_schema_v0.1.0.json", package = "PEcAn.data.land", mustWork = TRUE)
+ ok <- jsonvalidate::json_validate(events_json, schema = schema, engine = "ajv", verbose = verbose, error = FALSE)
+ if (isTRUE(ok)) {
+ PEcAn.logger::logger.info(glue::glue("events_json file is valid: {events_json}"))
+ return(TRUE)
+ }
+
+ errs <- attr(ok, "errors")
+ detail <- if (is.null(errs)) {
+ ""
+ } else {
+ paste(sprintf(
+ "%s: %s",
+ ifelse(nzchar(errs$instancePath), errs$instancePath, ""), errs$message
+ ), collapse = "; ")
+ }
+ PEcAn.logger::logger.error(glue::glue("events_json does not conform to schema: {events_json}; {detail}"))
+ FALSE
+}
diff --git a/modules/data.land/data-raw/create_fertilizer_data.R b/modules/data.land/data-raw/create_fertilizer_data.R
new file mode 100644
index 00000000000..7a9a165b56b
--- /dev/null
+++ b/modules/data.land/data-raw/create_fertilizer_data.R
@@ -0,0 +1,71 @@
+# Define DayCent default C:N ratios for manures
+daycent_default_cn <- tibble::tribble(
+ ~swat_name, ~cn_ratio, ~daycent_name, ~daycent_description, ~notes,
+ "dairy_fr", 12.60, "DARY", "LRR dairy solid Inventory managed manure", NA,
+ "beef_fr", 12.60, "BEEF", "LRR beef solid Inventory managed manure", NA,
+ "veal_fr", 12.60, "BEEF", "LRR beef solid Inventory managed manure", "no separate parameter in DayCent, assigned as beef",
+ "swine_fr", 13.45, "SWIN", "LRR swine solid dry Inventory managed manure", NA,
+ "sheep_fr", 11.36, "SHEP", "LRR sheep solid Inventory managed manure", NA,
+ "goat_fr", 11.36, "SHEP", "LRR sheep solid Inventory managed manure", "no separate parameter in DayCent, assigned as sheep",
+ "horse_fr", 30.72, "HORS", "LRR horse solid Inventory managed manure", NA,
+ "layer_fr", 17.4, "POUL", "LRR poultry solid Inventory managed manure", "used for all poultry: layer, broiler, turkey, duck",
+ "broil_fr", 17.4, "POUL", "LRR poultry solid Inventory managed manure", "used for all poultry: layer, broiler, turkey, duck",
+ "trkey_fr", 17.4, "POUL", "LRR poultry solid Inventory managed manure", "used for all poultry: layer, broiler, turkey, duck",
+ "duck_fr", 17.4, "POUL", "LRR poultry solid Inventory managed manure", "used for all poultry: layer, broiler, turkey, duck",
+ "ceap_p_n", 11.98, "BEEF/SHEP", "Composite: beef + sheep", "composite, mean of beef and sheep",
+ "ceap_p_p", 11.98, "BEEF/SHEP", "Composite: beef + sheep", "composite, mean of beef and sheep",
+ "ceap_h_n", 12.60, "DARY", "LRR dairy solid Inventory managed manure", "composite, assigned as dairy",
+ "ceap_h_p", 12.60, "DARY", "LRR dairy solid Inventory managed manure", "composite, assigned as dairy"
+)
+
+convert_swat_fert_table_to_pkg_df <- function() {
+ fertilizer.frt <- "https://raw.githubusercontent.com/swat-model/swatplus/refs/heads/main/data/Osu_1hru/fertilizer.frt"
+ readr::read_table(
+ file = fertilizer.frt,
+ skip = 1,
+ col_types = readr::cols(.default = readr::col_character())
+ ) |>
+ dplyr::filter(
+ !name %in% c("elem_n", "elem_p", "p") |
+ !stringr::str_starts(name, "ceap")
+ ) |>
+ dplyr::mutate(dplyr::across(c(min_n, min_p, org_n, org_p, nh3_n), as.numeric)) |>
+ dplyr::rename(
+ fraction_mineral_n = min_n,
+ fraction_organic_n = org_n,
+ fraction_nh3_n = nh3_n
+ ) |>
+ dplyr::left_join(daycent_default_cn, by = c("name" = "swat_name")) |>
+ dplyr::mutate(
+ description = ifelse(name == "org_compost", "OrganicCompost", description),
+ name = ifelse(name == "anh_nh3", "anhydrous_ammonia", name),
+ # SWAT's fertilizer.frt defines fraction_nh3_n as a fraction of the total mineral N
+ # But this seems unintuitive
+ # defining fraction_nh3_n relative to total mass
+ fraction_nh3_n = fraction_mineral_n * fraction_nh3_n,
+ fraction_no3_n = fraction_mineral_n - fraction_nh3_n,
+ cn_ratio = dplyr::case_when(
+ !is.na(cn_ratio) ~ cn_ratio,
+ fraction_organic_n > 0 ~ NA_real_,
+ TRUE ~ 0 # if no organic N, C:N defined as 0
+ ),
+ fraction_c = ifelse(!is.na(cn_ratio) & fraction_organic_n > 0,
+ cn_ratio * fraction_organic_n,
+ 0)
+ ) |>
+ dplyr::select(name, description, fraction_mineral_n, fraction_nh3_n,
+ fraction_no3_n, fraction_organic_n, fraction_c, cn_ratio)
+}
+
+custom_fertilizers <- tibble::tribble(
+ ~name, ~description, ~fraction_mineral_n, ~fraction_nh3_n, ~fraction_no3_n, ~fraction_organic_n, ~fraction_c, ~cn_ratio,
+ "manure", "Generic mixed animal manure", 0.0138, 0.0137, 0.0001, 0.02, 0.24, 12,
+ "ammonium_nitrate", "Ammonium nitrate", 0.33, 0.17, 0.16, 0.0, 0.0, NA
+)
+fertilizer_composition_data <- dplyr::bind_rows(
+ convert_swat_fert_table_to_pkg_df(),
+ custom_fertilizers
+)
+
+usethis::use_data(fertilizer_composition_data, overwrite = TRUE)
+
diff --git a/modules/data.land/data/fertilizer_composition_data.rda b/modules/data.land/data/fertilizer_composition_data.rda
new file mode 100644
index 00000000000..5ee02bbb545
Binary files /dev/null and b/modules/data.land/data/fertilizer_composition_data.rda differ
diff --git a/modules/data.land/inst/events_fixtures/events_site1.json b/modules/data.land/inst/events_fixtures/events_site1.json
new file mode 100644
index 00000000000..19605953a19
--- /dev/null
+++ b/modules/data.land/inst/events_fixtures/events_site1.json
@@ -0,0 +1,44 @@
+[
+ {
+ "pecan_events_version": "0.1.0",
+ "site_id": "EX1",
+ "events": [
+ {
+ "event_type": "tillage",
+ "date": "2022-02-04",
+ "tillage_eff_0to1": 0.2
+ },
+ {
+ "event_type": "tillage",
+ "date": "2022-02-09",
+ "tillage_eff_0to1": 0.1
+ },
+ {
+ "event_type": "irrigation",
+ "date": "2022-02-09",
+ "amount_mm": 50,
+ "method": "soil"
+ },
+ {
+ "event_type": "fertilization",
+ "date": "2022-02-09",
+ "org_n_kg_m2": 0,
+ "org_c_kg_m2": 0,
+ "nh4_n_kg_m2": 0.01
+ },
+ {
+ "event_type": "planting",
+ "date": "2022-02-19",
+ "leaf_c_kg_m2": 0.01
+ },
+ {
+ "event_type": "harvest",
+ "date": "2022-09-07",
+ "frac_above_removed_0to1": 0.1,
+ "frac_below_removed_0to1": 0.0,
+ "frac_above_to_litter_0to1": 0.0,
+ "frac_below_to_litter_0to1": 0.0
+ }
+ ]
+ }
+]
diff --git a/modules/data.land/inst/events_fixtures/events_site1_site2.json b/modules/data.land/inst/events_fixtures/events_site1_site2.json
new file mode 100644
index 00000000000..84a2ee195d6
--- /dev/null
+++ b/modules/data.land/inst/events_fixtures/events_site1_site2.json
@@ -0,0 +1,40 @@
+[
+ {
+ "pecan_events_version": "0.1.0",
+ "site_id": "S1",
+ "pft": "PFT",
+ "events": [
+ {
+ "event_type": "tillage",
+ "date": "2022-01-15",
+ "tillage_eff_0to1": 0.1
+ },
+ {
+ "event_type": "harvest",
+ "date": "2022-09-01",
+ "frac_above_removed_0to1": 0.2,
+ "frac_below_removed_0to1": 0.0,
+ "frac_above_to_litter_0to1": 0.0,
+ "frac_below_to_litter_0to1": 0.0
+ }
+ ]
+ },
+ {
+ "pecan_events_version": "0.1.0",
+ "site_id": "S2",
+ "pft": "PFT",
+ "events": [
+ {
+ "event_type": "planting",
+ "date": "2022-03-01",
+ "leaf_c_kg_m2": 0.01
+ },
+ {
+ "event_type": "irrigation",
+ "date": "2022-03-10",
+ "amount_mm": 10,
+ "method": "soil"
+ }
+ ]
+ }
+]
diff --git a/modules/data.land/inst/events_schema_v0.1.0.json b/modules/data.land/inst/events_schema_v0.1.0.json
new file mode 100644
index 00000000000..caee982ef5a
--- /dev/null
+++ b/modules/data.land/inst/events_schema_v0.1.0.json
@@ -0,0 +1,82 @@
+{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://pecanproject.org/schema/events-mvp-0-1-0.json",
+ "oneOf": [
+ { "$ref": "#/$defs/site" },
+ { "type": "array", "items": { "$ref": "#/$defs/site" } }
+ ],
+ "$defs": {
+ "site": {
+ "type": "object",
+ "required": ["pecan_events_version", "site_id", "events"],
+ "properties": {
+ "pecan_events_version": { "type": "string", "const": "0.1.0" },
+ "site_id": { "type": "string", "minLength": 1 },
+ "pft": { "type": "string" },
+ "ensemble_id": { "type": ["string", "null"], "minLength": 1 },
+ "geometry_uri": { "type": ["string", "null"], "format": "uri" },
+ "provenance": { "type": "object", "additionalProperties": true },
+ "events": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": ["event_type", "date"],
+ "properties": {
+ "event_type": {
+ "type": "string",
+ "enum": ["planting", "harvest", "irrigation", "fertilization", "tillage"]
+ },
+ "date": { "type": "string", "pattern": "^\\d{4}-\\d{2}-\\d{2}$" },
+ "fraction_area": { "type": "number", "minimum": 0, "maximum": 1, "default": 1.0 },
+ "source": { "type": "string" },
+
+ "leaf_c_kg_m2": { "type": "number", "minimum": 0 },
+ "wood_c_kg_m2": { "type": "number", "minimum": 0 },
+ "fine_root_c_kg_m2": { "type": "number", "minimum": 0 },
+ "coarse_root_c_kg_m2": { "type": "number", "minimum": 0 },
+ "cultivar": { "type": "string" },
+ "crop_code": { "type": "string" },
+ "crop_display": { "type": "string" },
+
+ "frac_above_removed_0to1": { "type": "number", "minimum": 0, "maximum": 1 },
+ "frac_below_removed_0to1": { "type": "number", "minimum": 0, "maximum": 1 },
+ "frac_above_to_litter_0to1": { "type": "number", "minimum": 0, "maximum": 1 },
+ "frac_below_to_litter_0to1": { "type": "number", "minimum": 0, "maximum": 1 },
+
+ "amount_mm": { "type": "number", "minimum": 0 },
+ "method": { "type": "string", "enum": ["soil", "canopy", "flood"] },
+ "immed_evap_frac_0to1": { "type": "number", "minimum": 0, "maximum": 1 },
+
+ "org_c_kg_m2": { "type": "number", "minimum": 0 },
+ "org_n_kg_m2": { "type": "number", "minimum": 0 },
+ "nh4_n_kg_m2": { "type": "number", "minimum": 0 },
+ "no3_n_kg_m2": { "type": "number", "minimum": 0 },
+
+ "tillage_eff_0to1": { "type": "number", "minimum": 0 },
+ "intensity_category": { "type": "string" },
+ "depth_m": { "type": "number", "minimum": 0 }
+ },
+ "allOf": [
+ { "if": { "properties": { "event_type": { "const": "planting" } } },
+ "then": { "required": ["leaf_c_kg_m2"] } },
+ { "if": { "properties": { "event_type": { "const": "harvest" } } },
+ "then": { "required": ["frac_above_removed_0to1"] } },
+ { "if": { "properties": { "event_type": { "const": "irrigation" } } },
+ "then": { "required": ["amount_mm", "method"] } },
+ { "if": { "properties": { "event_type": { "const": "fertilization" } } },
+ "then": { "anyOf": [
+ { "required": ["org_c_kg_m2"] },
+ { "required": ["nh4_n_kg_m2"] },
+ { "required": ["no3_n_kg_m2"] }
+ ] } },
+ { "if": { "properties": { "event_type": { "const": "tillage" } } },
+ "then": { "required": ["tillage_eff_0to1"] } }
+ ],
+ "additionalProperties": true
+ }
+ }
+ },
+ "additionalProperties": false
+ }
+ }
+}
diff --git a/modules/data.land/inst/generate_events.R b/modules/data.land/inst/generate_events.R
new file mode 100755
index 00000000000..f44f148821f
--- /dev/null
+++ b/modules/data.land/inst/generate_events.R
@@ -0,0 +1,258 @@
+#!/usr/bin/env Rscript
+
+# --- Profiling Start ---
+# Rprof("profiling.out")
+# --- End Profiling Start ---
+
+# Minimal MVP: build mvp_events.json from ca_field_attributes.csv
+# - Input: data/ca_field_attributes.csv (columns: site_id, year, pft, ...)
+# - Output: data/mvp_events.json following data/pecan_events_schema_v0.1.0.json
+# - Events (minimal):
+# * planting: annual crops -> every site-year; woody perennials -> first observed year only
+# * harvest: all site-years
+# Each event includes only the schema-required fields per event_type.
+
+# --- Config ---
+data_dir <- "/projectnb2/dietzelab/ccmmf/data"
+field_attr_csv <- file.path(data_dir, "ca_field_attributes.csv")
+sample_output_json <- file.path(data_dir, "events/mvp_events.json")
+output_json <- file.path(data_dir, "events/events.json")
+
+# if TRUE, only generate for design points
+# TODO: generate full set for all sites to use in site selection and downscaling
+DESIGN_POINTS <- TRUE
+
+PRODUCTION <- FALSE # set TRUE for all sites, not needed if DESIGN_POINTS is TRUE
+if (PRODUCTION) {
+ stop("This could be very slow; consider profiling and writing to db or arrow etc")
+}
+set.seed(123)
+
+ca_field_attributes <- vroom::vroom(
+field_attr_csv,
+ show_col_types = FALSE
+)
+
+if (DESIGN_POINTS) {
+ # design_points <- readr::read_csv("https://raw.githubusercontent.com/ccmmf/workflows/refs/heads/main/data/design_points.csv")
+ # d <- update_design_point_site_ids(design_points, ca_field_attributes)
+ # readr::write_csv(d, file.path(data_dir, "design_points.csv"))
+ # readr::write_csv(d, "~/downscaling/data/design_points.csv")
+ # design_points <- readr::read_csv(file.path(data_dir, "design_points.csv"))
+ # use the one under version control
+ design_points <- readr::read_csv("~/downscaling/data/design_points.csv")
+ ca_field_attributes <- ca_field_attributes |>
+ dplyr::filter(site_id %in% design_points$site_id)
+} else if (!PRODUCTION) {
+ ca_field_attributes <- ca_field_attributes |>
+ dplyr::slice_sample(n = 1000)
+}
+
+ca_fields <- ca_field_attributes |>
+ dplyr::select(site_id, pft, crop) |>
+ dplyr::distinct() |>
+ tidyr::crossing(year = 2016:2024) |>
+ dplyr::group_by(site_id) |>
+ dplyr::mutate(first_year = min(year)) |>
+ dplyr::ungroup()
+
+# Planting (annuals)
+planting_annual <- ca_fields |>
+ dplyr::filter(pft == "annual crop") |>
+ dplyr::transmute(
+ event_type = "planting",
+ date = paste0(year, "-03-15"),
+ site_id = site_id,
+ # required for planting
+ leaf_c_kg_m2 = 0.05,
+ crop = crop
+ )
+
+# Planting (woody): first year
+planting_woody <- ca_fields |>
+ dplyr::filter(pft == "woody perennial crop") |>
+ dplyr::filter(year == first_year) |>
+ dplyr::transmute(
+ event_type = "planting",
+ date = paste0(year, "-03-15"),
+ site_id = site_id,
+ leaf_c_kg_m2 = 0.2,
+ crop = crop
+ )
+
+# Fertilization
+fertilization <- ca_fields |>
+ dplyr::transmute(
+ event_type = "fertilization",
+ date = paste0(year, "-02-11"),
+ site_id = site_id,
+ org_n_kg_m2 = 0.0,
+ org_c_kg_m2 = 0.0,
+ nh4_n_kg_m2 = 0.02,
+ no3_n_kg_m2 = 0.03
+ )
+
+# Organic Matter Addition
+organic_matter_addition <- ca_fields |>
+ dplyr::transmute(
+ event_type = "fertilization",
+ date = paste0(year, "-03-11"),
+ site_id = site_id,
+ org_n_kg_m2 = 0.05,
+ org_c_kg_m2 = 0.5,
+ nh4_n_kg_m2 = 0.0,
+ no3_n_kg_m2 = 0.0
+ )
+
+# Harvest
+harvest <- ca_fields |>
+ dplyr::transmute(
+ event_type = "harvest",
+ date = paste0(year, "-10-15"),
+ site_id = site_id,
+ frac_above_removed_0to1 = 0.10,
+ frac_below_removed_0to1 = 0.0,
+ frac_above_to_litter_0to1 = 0.0,
+ frac_below_to_litter_0to1 = 0.0,
+ crop = crop
+ )
+
+# Pruning (woody)
+pruning <- ca_fields |>
+ dplyr::filter(pft == "woody perennial crop") |>
+ dplyr::mutate(offset = year - first_year) |>
+ dplyr::filter(offset %% 4 == 1) |>
+ dplyr::transmute(
+ event_type = "harvest",
+ date = paste0(year, "-12-15"),
+ site_id = site_id,
+ frac_above_removed_0to1 = 0.30,
+ frac_below_removed_0to1 = 0.0,
+ frac_above_to_litter_0to1 = 0.0,
+ frac_below_to_litter_0to1 = 0.0,
+ crop = crop
+ )
+
+# Tillage
+tillage <- ca_fields |>
+ dplyr::filter(pft == "annual crop") |>
+ tidyr::crossing(till_suffix = c("-03-01", "-11-01")) |>
+ dplyr::transmute(
+ event_type = "tillage",
+ date = paste0(year, till_suffix),
+ site_id = site_id,
+ tillage_eff_0to1 = 0.10
+ )
+
+# Irrigation (both pfts): 3 per month for all months
+# TODO: Should annual crops skip irrigation during fallow season?
+months <- sprintf("%02d", 1:12)
+days <- c("05", "15", "25")
+
+irrigation <- ca_fields |>
+ tidyr::crossing(month = months, day = days) |>
+ dplyr::transmute(
+ event_type = "irrigation",
+ date = paste0(year, "-", month, "-", day),
+ site_id = site_id,
+ amount_mm = 40,
+ method = "soil"
+ )
+
+# Combine and order by site/date
+events_all <- dplyr::bind_rows(
+ planting_annual, planting_woody,
+ harvest, pruning,
+ tillage, irrigation,
+ fertilization, organic_matter_addition
+) |>
+ dplyr::arrange(site_id, date)
+
+# --- Build site objects per schema ------------------------------------------
+# Helper: drop NULL/NA fields from a named list
+compact_list <- function(x) {
+ Filter(function(v) !(is.null(v) || (length(v) == 1 && is.atomic(v) && is.na(v))), x)
+}
+
+sites <- unique(events_all$site_id)
+
+site_objs <- purrr::map(sites, function(sid) {
+ evs_df <- events_all |>
+ dplyr::filter(site_id == sid) |>
+ dplyr::arrange(date)
+
+ # Only include required fields for each event type
+ evs_list <- purrr::pmap(
+ evs_df,
+ function(event_type, date, site_id, leaf_c_kg_m2 = NA_real_, frac_above_removed_0to1 = NA_real_,
+ frac_below_removed_0to1 = NA_real_, frac_above_to_litter_0to1 = NA_real_,
+ frac_below_to_litter_0to1 = NA_real_, amount_mm = NA_real_, method = NA_character_,
+ tillage_eff_0to1 = NA_real_, org_c_kg_m2 = NA_real_, org_n_kg_m2 = NA_real_,
+ nh4_n_kg_m2 = NA_real_, no3_n_kg_m2 = NA_real_,
+ crop = NA_character_, ...) {
+ base <- list(event_type = event_type, date = date)
+
+ # Add required fields per event type
+ if (event_type == "planting" && !is.na(leaf_c_kg_m2)) {
+ base$leaf_c_kg_m2 <- leaf_c_kg_m2
+ if (!is.na(crop)) base$crop <- crop
+ }
+ if (event_type == "harvest" && !is.na(frac_above_removed_0to1)) {
+ base$frac_above_removed_0to1 <- frac_above_removed_0to1
+ if (!is.na(frac_below_removed_0to1)) base$frac_below_removed_0to1 <- frac_below_removed_0to1
+ if (!is.na(frac_above_to_litter_0to1)) base$frac_above_to_litter_0to1 <- frac_above_to_litter_0to1
+ if (!is.na(frac_below_to_litter_0to1)) base$frac_below_to_litter_0to1 <- frac_below_to_litter_0to1
+ if (!is.na(crop)) base$crop <- crop
+ }
+ if (event_type == "irrigation" && !is.na(amount_mm) && !is.na(method)) {
+ base$amount_mm <- amount_mm
+ base$method <- method
+ }
+ if (event_type == "tillage" && !is.na(tillage_eff_0to1)) {
+ base$tillage_eff_0to1 <- tillage_eff_0to1
+ }
+ if (event_type == "fertilization" && !is.na(org_c_kg_m2)) {
+ base$org_c_kg_m2 <- org_c_kg_m2
+ if (!is.na(org_n_kg_m2)) base$org_n_kg_m2 <- org_n_kg_m2
+ }
+
+ compact_list(base)
+ }
+ )
+ list(
+ pecan_events_version = "0.1.0",
+ site_id = sid,
+ events = evs_list
+ )
+})
+
+# TODO add PEcAn Schema info
+
+# Validate JSON given schema
+# schema <- "data/pecan_events_schema_v0.1.0.json"
+# validator <- jsonvalidate::json_validator(schema)
+# json_txt_temp <- jsonlite::toJSON(site_objs, auto_unbox = TRUE)
+# if (!validator(json_txt_temp)) {
+# stop("JSON does not match schema")
+# }
+
+# --- Write JSON --------------------------------------------------------------
+
+# Complete
+jsonlite::write_json(site_objs, path = output_json, pretty = FALSE, auto_unbox = TRUE)
+# Single site example
+jsonlite::write_json(site_objs[1:3], path = gsub(".json", "_3sites.json", output_json), pretty = TRUE, auto_unbox = TRUE)
+# When dealing with full dataset, may need to write to more performant files
+# #Sample
+# jsonlite::write_json(site_objs[1:100], path = sample_output_json, pretty = TRUE, auto_unbox = TRUE)
+
+# # Complete - compressed
+output_json_gz <- paste0(output_json, ".gz")
+gz_con <- gzfile(output_json_gz, "w")
+jsonlite::write_json(site_objs, path = gz_con, pretty = FALSE, auto_unbox = TRUE)
+close(gz_con)
+
+# --- Profiling End ---
+# Rprof(NULL)
+# summaryRprof("profiling.out")
+# --- End Profiling End ---
diff --git a/modules/data.land/man/clip_and_save_raster_file.Rd b/modules/data.land/man/clip_and_save_raster_file.Rd
new file mode 100644
index 00000000000..8058fcb5096
--- /dev/null
+++ b/modules/data.land/man/clip_and_save_raster_file.Rd
@@ -0,0 +1,38 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/clip_and_save_raster_file.R
+\name{clip_and_save_raster_file}
+\alias{clip_and_save_raster_file}
+\title{Clip and Save a Raster File}
+\usage{
+clip_and_save_raster_file(
+ input_path,
+ polygon,
+ out_path,
+ mask = TRUE,
+ overwrite = TRUE
+)
+}
+\arguments{
+\item{input_path}{Character. Path to the input raster file.}
+
+\item{polygon}{An object or file coercible to a `SpatVector` by `terra::vect()`
+(e.g., an `sf` object, a `SpatVector`, or a file path to a vector dataset).
+used for clipping and masking. Must have a valid CRS.}
+
+\item{out_path}{Character. Path to save the processed raster.}
+
+\item{mask}{Logical: Should pixels outside the polygon but inside its bounding box
+be masked out (TRUE) or included (FALSE)?}
+
+\item{overwrite}{Logical: Replace output file if it already exists?}
+}
+\value{
+Invisibly, the clipped `SpatRaster` object. The raster is also saved to `out_path`.
+}
+\description{
+Clips a raster to a polygon bounding box, optionally masks to polygon, and saves the
+output in the same format as the input.
+}
+\author{
+David LeBauer
+}
diff --git a/modules/data.land/man/extract_soil_gssurgo.Rd b/modules/data.land/man/extract_soil_gssurgo.Rd
index d8231132824..4f696305016 100644
--- a/modules/data.land/man/extract_soil_gssurgo.Rd
+++ b/modules/data.land/man/extract_soil_gssurgo.Rd
@@ -9,20 +9,23 @@ extract_soil_gssurgo(
lat,
lon,
size = 1,
- radius = 500,
+ grid_size = 3,
+ grid_spacing = 100,
depths = c(0.15, 0.3, 0.6)
)
}
\arguments{
\item{outdir}{Output directory for writing down the netcdf file}
-\item{lat}{Latitude}
+\item{lat}{Latitude of center point (single numeric value)}
-\item{lon}{Longitude}
+\item{lon}{Longitude of center point (single numeric value)}
\item{size}{Ensemble size}
-\item{radius}{radius in meters is used to take soil type samples around the site}
+\item{grid_size}{Size of the spatial sampling grid around the center point (default: 3)}
+
+\item{grid_spacing}{Spacing between grid cells in meters (default: 100)}
\item{depths}{Standard set of soil depths in m to create the ensemble of soil profiles with.}
}
@@ -32,6 +35,19 @@ It returns the address for the generated soil netcdf file
\description{
Extract soil data from gssurgo
}
+\details{
+This function takes a single lat/lon point and creates a spatial grid
+around it for sampling soil variability. The grid_size parameter determines
+how many grid points (grid_size x grid_size) are created around the center point.
+}
+\section{Current Limitations}{
+
+- MUKEY frequency weighting treats occurrence counts as proportional to area coverage
+- This approximation may introduce geometric bias for irregular polygon data
+- Buffer radius is set to grid_spacing/2 to reduce overlapping queries, but may still miss coverage
+- True area-weighted aggregation using polygon geometries is planned (see issue #3609)
+}
+
\examples{
\dontrun{
outdir <- "~/paleon/envTest"
@@ -41,5 +57,5 @@ Extract soil data from gssurgo
}
}
\author{
-Hamze Dokoohaki
+Hamze Dokoohaki, Akash
}
diff --git a/modules/data.land/man/fertilizer_composition_data.Rd b/modules/data.land/man/fertilizer_composition_data.Rd
new file mode 100644
index 00000000000..4a7c41a23b2
--- /dev/null
+++ b/modules/data.land/man/fertilizer_composition_data.Rd
@@ -0,0 +1,43 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/data.R
+\docType{data}
+\name{fertilizer_composition_data}
+\alias{fertilizer_composition_data}
+\title{Fertilizer Nutrient Composition Table}
+\format{
+A tibble with one row per fertilizer type and the following columns:
+\describe{
+ \item{name}{\code{character}. Short identifier from SWAT (e.g., \code{"urea"}, \code{"manure"}).}
+ \item{description}{\code{character}. Longer description of the fertilizer or manure type.}
+ \item{fraction_mineral_n}{\code{numeric}. Fraction of total nitrogen in mineral form.}
+ \item{fraction_nh3_n}{\code{numeric}. Fraction of fertilizer by mass that is ammonium-n (NH\eqn{_3}-N).}
+ \item{fraction_no3_n}{\code{numeric}. Fraction of fertilizer by mass that is nitrate-N (NO\eqn{_3}-N).
+ Computed as \code{fraction_mineral_n - fraction_nh3_n}.}
+ \item{fraction_organic_n}{\code{numeric}. Fraction of organic matter that is nitrogen.}
+ \item{fraction_c}{\code{numeric}. Fraction of mass that is carbon.}
+ \item{cn_ratio}{\code{numeric}. Carbon-to-nitrogen ratio for organic matter.
+ Assigned based on DayCent organic matter parameterterizations.}
+}
+}
+\source{
+https://github.com/swat-model/swatplus
+
+DayCent model default parameter file: `omad.100` obtained from the Soil Carbon Solutions Center, https://www.soilcarbonsolutionscenter.com
+}
+\usage{
+fertilizer_composition_data
+}
+\description{
+A dataset of fertilizer and organic matter addition types
+and their nitrogen and carbon composition, based on the SWAT model's
+`fertilizer.frt` table and DayCent model defaults for organic matter
+C:N ratio parameters.
+}
+\details{
+This table is based on SWAT model's \code{fertilizer.frt} file, and uses
+C:N ratios (\code{cn_ratio}) from DayCent model default parameter files.
+\code{fraction_nh3_n} and \code{fraction_no3_n} represent the fraction of
+fertilizer by mass that is ammonium-N and nitrate-N, respectively. This is different from
+the SWAT model's definition of \code{fraction_nh3_n} as a fraction of the total mineral N.
+}
+\keyword{datasets}
diff --git a/modules/data.land/man/gSSURGO.Query.Rd b/modules/data.land/man/gSSURGO.Query.Rd
index 27a7a4d2cb2..51d6def7dce 100644
--- a/modules/data.land/man/gSSURGO.Query.Rd
+++ b/modules/data.land/man/gSSURGO.Query.Rd
@@ -15,17 +15,47 @@ gSSURGO.Query(
\item{fields}{a character vector of the fields to be extracted. See details and the default argument to find out how to define fields.}
}
\value{
-a dataframe with soil properties. Units can be looked up from database documentation
+a dataframe with soil properties.
}
\description{
This function queries the gSSURGO database for a series of map unit keys
}
\details{
-Full documention of available tables and their relationships can be found here \url{www.sdmdataaccess.nrcs.usda.gov/QueryHelp.aspx}
-There have been occasions where NRCS made some minor changes to the structure of the API which this code is where those changes need
-to be implemneted here.
-Fields need to be defined with their associate tables. For example, sandtotal is a field in chorizon table which needs to be defined as chorizon.sandotal_(r/l/h), where
-r stands for the representative value, l stands for low and h stands for high. At the moment fields from mapunit, component, muaggatt, and chorizon tables can be extracted.
+This function queries the NRCS gSSURGO database using map unit keys (mukeys).
+\itemize{
+\item \strong{Available tables}: \code{mapunit}, \code{component}, \code{muaggatt}, \code{chorizon}, and \code{chfrags}.
+\item \strong{Field definitions}: Fields must be specified with their associated table name.
+For example, total sand content is stored in the \code{chorizon} table and must be
+requested as \code{chorizon.sandtotal_(r|l|h)}, where:
+\itemize{
+\item \code{r} = representative value
+\item \code{l} = low value
+\item \code{h} = high value
+}
+}
+
+\strong{Commonly queried fields and units} (see NRCS gSSURGO \href{https://www.nrcs.usda.gov/sites/default/files/2022-08/SSURGO-Metadata-Tables-and-Columns-Report.pdf}{"Tables and Columns Report"}
+for full list):\tabular{lll}{
+ Field \tab Description \tab Units \cr
+ \code{chorizon.cec7_r} \tab Cation exchange capacity at pH 7 \tab cmol(+)/kg \cr
+ \code{chorizon.sandtotal_r} \tab Total sand (<2 mm fraction) \tab \% \cr
+ \code{chorizon.silttotal_r} \tab Total silt (<2 mm fraction) \tab \% \cr
+ \code{chorizon.claytotal_r} \tab Total clay (<0.002 mm fraction) \tab \% \cr
+ \code{chorizon.om_r} \tab Organic matter (<2 mm soil) \tab \% \cr
+ \code{chorizon.hzdept_r} \tab Horizon top depth \tab cm \cr
+ \code{chfrags.fragvol_r} \tab Rock fragments \tab \% (by volume) \cr
+ \code{chorizon.dbthirdbar_r} \tab Bulk density at field capacity \tab g/cm³ \cr
+ \code{chorizon.ph1to1h2o_r} \tab Soil pH (1:1 H2O) \tab pH (unitless) \cr
+ \code{chorizon.cokey} \tab Component key (identifier) \tab — \cr
+ \code{chorizon.chkey} \tab Horizon key (identifier) \tab — \cr
+}
+
+
+\strong{API stability:} The NRCS occasionally modifies the API schema. If queries fail,
+adjustments may be required here to align with the updated structure.
+
+Full documentation of available tables and their relationships is provided in the
+\href{https://sdmdataaccess.nrcs.usda.gov/QueryHelp.aspx}{gSSURGO documentation}.
}
\examples{
\dontrun{
@@ -39,3 +69,6 @@ r stands for the representative value, l stands for low and h stands for high. A
"chorizon.cokey","chorizon.chkey"))
}
}
+\author{
+Hamze Dokohaki, Akash
+}
diff --git a/modules/data.land/man/generate_soilgrids_ensemble.Rd b/modules/data.land/man/generate_soilgrids_ensemble.Rd
new file mode 100644
index 00000000000..eed9c4c3515
--- /dev/null
+++ b/modules/data.land/man/generate_soilgrids_ensemble.Rd
@@ -0,0 +1,34 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/IC_SOILGRID_Utilities.R
+\name{generate_soilgrids_ensemble}
+\alias{generate_soilgrids_ensemble}
+\title{Generate soil carbon ensemble members for specific depth}
+\usage{
+generate_soilgrids_ensemble(
+ processed_data,
+ site_id,
+ size,
+ depth_layer,
+ verbose = FALSE
+)
+}
+\arguments{
+\item{processed_data}{Output from preprocess_soilgrids_data()}
+
+\item{site_id}{Target site ID}
+
+\item{size}{Number of ensemble members to generate}
+
+\item{depth_layer}{Depth layer ("0-30cm" or "0-200cm")}
+
+\item{verbose}{Logical, print detailed progress information}
+}
+\value{
+Numeric vector of soil carbon values including uncertainty, length equal to size.
+}
+\description{
+Generates ensemble members for soil carbon at specified depth layer.
+Uses site-specific uncertainty when available; otherwise integrates over coefficient of
+variation distributions fit to population data. Samples are drawn from gamma distributions
+to ensure positive, right-skewed values appropriate for soil carbon estimates.
+}
diff --git a/modules/data.land/man/look_up_fertilizer_components.Rd b/modules/data.land/man/look_up_fertilizer_components.Rd
new file mode 100644
index 00000000000..a2d337a95d9
--- /dev/null
+++ b/modules/data.land/man/look_up_fertilizer_components.Rd
@@ -0,0 +1,68 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/look_up_fertilizer_components.R
+\name{look_up_fertilizer_components}
+\alias{look_up_fertilizer_components}
+\title{Calculate the Nitrogen and Carbon Content of a Fertilizer Application}
+\usage{
+look_up_fertilizer_components(
+ type,
+ amount,
+ fraction_organic_n = NULL,
+ fraction_organic_c = NULL
+)
+}
+\arguments{
+\item{type}{Character string specifying the type of fertilizer. Valid values include NN-PP-KK format (e.g., "45-5-10") as well
+as enumerated types including: "urea", "ammonium_nitrate", "compost", "manure", "dairy_fr", "beef_fr".
+See notes for full list of valid types.}
+
+\item{amount}{Numeric value specifying the amount of fertilizer applied in kg/ha.}
+
+\item{fraction_organic_n}{Optional numeric value specifying the fraction of the organic matter that is nitrogen.
+Used to define organic matter additions if not provided in the dataset.}
+
+\item{fraction_organic_c}{Optional numeric value specifying the fraction of the organic matter that is carbon.
+Used to define organic matter additions if not provided in the dataset.}
+}
+\value{
+A list containing:
+\itemize{
+\item \code{type}: The type of fertilizer used.
+\item \code{NO3_N}: The amount of nitrate nitrogen (NO3-N) in kg/ha.
+\item \code{NH4_N}: The amount of ammonium nitrogen (NH4-N) in kg/ha.
+\item \code{N_org}: The amount of organic nitrogen in kg/ha.
+\item \code{C_org}: The amount of organic carbon in kg/ha.
+}
+}
+\description{
+This function calculates the different forms of nitrogen (NO3-N, NH4-N, organic N) and organic carbon (C_org) in a fertilizer application.
+It can determine fertilizer nitrogen and carbon content using either a lookup table based on
+the SWAT model's \href{https://github.com/swat-model/swatplus/blob/main/data/Osu_1hru/fertilizer.frt}{\code{fertilizer.frt}}
+file, determine the fertilizer's nutrient content based on NN-PP-KK format, or use user-specified
+fractions of organic nitrogen and carbon.
+}
+\details{
+Consistent with assumptions in DayCent, DSSAT, and other models, urea is treated as NH3 because the
+transformation typically occurs within a day.
+}
+\note{
+The following is a list of valid fertilizer names:
+\itemize{
+\item Mineral fertilizers: ammonium_nitrate, anhydrous_ammonia, urea
+\item Fresh manures: manure, beef_fr, broil_fr, dairy_fr, duck_fr, goat_fr, horse_fr,
+layer_fr, sheep_fr, swine_fr, trkey_fr, veal_fr
+\item Compost: org_compost
+}
+}
+\examples{
+# View all available fertilizer types
+unique(PEcAn.data.land::fertilizer_composition_data$name)
+
+# Calculate components for different fertilizer types
+look_up_fertilizer_components("urea", 200)
+look_up_fertilizer_components("45-00-00", 200)
+look_up_fertilizer_components("org_compost", 1000)
+look_up_fertilizer_components("dairy_fr", 500)
+look_up_fertilizer_components("manure", 1000, fraction_organic_n = 0.02, fraction_organic_c = 0.08)
+
+}
diff --git a/modules/data.land/man/om2soc.Rd b/modules/data.land/man/om2soc.Rd
new file mode 100644
index 00000000000..115c6c491f2
--- /dev/null
+++ b/modules/data.land/man/om2soc.Rd
@@ -0,0 +1,20 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/soil_utils.R
+\name{om2soc}
+\alias{om2soc}
+\title{Convert organic matter to soil organic carbon}
+\usage{
+om2soc(om_percent)
+}
+\arguments{
+\item{om_percent}{organic matter percentage (0-100)}
+}
+\value{
+soil organic carbon percentage (0-100)
+}
+\description{
+Converts organic matter content to soil organic carbon using the Van Bemmelen factor (1.724).
+}
+\author{
+Akash
+}
diff --git a/modules/data.land/man/preprocess_soilgrids_data.Rd b/modules/data.land/man/preprocess_soilgrids_data.Rd
new file mode 100644
index 00000000000..9bb1b7c520a
--- /dev/null
+++ b/modules/data.land/man/preprocess_soilgrids_data.Rd
@@ -0,0 +1,21 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/IC_SOILGRID_Utilities.R
+\name{preprocess_soilgrids_data}
+\alias{preprocess_soilgrids_data}
+\title{Preprocess SoilGrids data for ensemble generation}
+\usage{
+preprocess_soilgrids_data(soil_data, depth_layers, verbose = FALSE)
+}
+\arguments{
+\item{soil_data}{Dataframe with SoilGrids soil carbon data}
+
+\item{depth_layers}{Character vector of depth layers to process (e.g., c("0-30cm", "0-200cm"))}
+
+\item{verbose}{Logical, print detailed progress information}
+}
+\value{
+List containing processed data and CV distributions for requested depths
+}
+\description{
+Preprocess SoilGrids data for ensemble generation
+}
diff --git a/modules/data.land/man/put_veg_module.Rd b/modules/data.land/man/put_veg_module.Rd
index ac03cab6162..4c76f7a0c2d 100644
--- a/modules/data.land/man/put_veg_module.Rd
+++ b/modules/data.land/man/put_veg_module.Rd
@@ -12,7 +12,6 @@ put_veg_module(
outfolder,
n.ensemble,
dir,
- machine,
model,
start_date,
end_date,
@@ -36,8 +35,6 @@ put_veg_module(
\item{dir}{dir path to dbfiles on local machine}
-\item{machine}{data frame, DB info regarding localhost machine id/hostname etc.}
-
\item{model}{model name, e.g. "ED2"}
\item{start_date}{date in "YYYY-MM-DD" format, in case of source==FIA it's the settings$run$start.date, otherwise start_date of the IC file in DB}
diff --git a/modules/data.land/man/soc2ocs.Rd b/modules/data.land/man/soc2ocs.Rd
new file mode 100644
index 00000000000..fa44936abd9
--- /dev/null
+++ b/modules/data.land/man/soc2ocs.Rd
@@ -0,0 +1,36 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/soil_utils.R
+\name{soc2ocs}
+\alias{soc2ocs}
+\title{Convert soil organic carbon concentration to organic carbon stock}
+\usage{
+soc2ocs(soc_percent, bulk_density, thickness, coarse_fraction = 0)
+}
+\arguments{
+\item{soc_percent}{soil organic carbon concentration (percent, 0-100)}
+
+\item{bulk_density}{bulk density (g/cm3)}
+
+\item{thickness}{layer thickness (cm)}
+
+\item{coarse_fraction}{coarse fragment volume fraction (0-1, default = 0)}
+}
+\value{
+organic carbon stock (kg/m2)
+}
+\description{
+Convert soil organic carbon concentration to organic carbon stock
+}
+\examples{
+soc2ocs(2.5, 1.3, 30, 0.15)
+
+# Multiple soil layers
+soc_pct <- c(3.2, 2.1, 1.8)
+bd_g_cm3 <- c(1.2, 1.4, 1.5)
+thickness_cm <- c(15, 15, 30)
+coarse_fraction <- c(0.10, 0.20, 0.25)
+soc2ocs(soc_pct, bd_g_cm3, thickness_cm, coarse_fraction)
+}
+\author{
+Akash
+}
diff --git a/modules/data.land/man/soil.units.Rd b/modules/data.land/man/soil.units.Rd
index ca37d134142..3d53e4be8d8 100644
--- a/modules/data.land/man/soil.units.Rd
+++ b/modules/data.land/man/soil.units.Rd
@@ -44,6 +44,11 @@ Supported variables are:
\item \code{soil_thermal_conductivity_at_saturation}
\item \code{soil_thermal_capacity}
\item \code{soil_albedo}
+\item \code{slpotwp}
+\item \code{slpotcp}
+\item \code{slcpd}
+\item \code{slden}
+\item \code{soil_organic_carbon_stock}
}
}
\examples{
diff --git a/modules/data.land/man/soilgrids_ic_process.Rd b/modules/data.land/man/soilgrids_ic_process.Rd
new file mode 100644
index 00000000000..b45813c7e06
--- /dev/null
+++ b/modules/data.land/man/soilgrids_ic_process.Rd
@@ -0,0 +1,54 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/IC_SOILGRID_Utilities.R
+\name{soilgrids_ic_process}
+\alias{soilgrids_ic_process}
+\title{SoilGrids Initial Conditions (IC) Utilities}
+\usage{
+soilgrids_ic_process(
+ settings,
+ dir,
+ depth = c(0.3, 2),
+ overwrite = FALSE,
+ verbose = FALSE
+)
+}
+\arguments{
+\item{settings}{PEcAn settings list containing site information}
+
+\item{dir}{Output directory for IC files}
+
+\item{depth}{Numeric vector of depth values in meters. Can be single value
+or multiple values c(0.3, 2.0). Default: c(0.3, 2.0)}
+
+\item{overwrite}{Overwrite existing files? (Default: FALSE)}
+
+\item{verbose}{Print detailed progress information? (Default: FALSE)}
+}
+\value{
+List of paths to generated IC files, organized by site ID
+}
+\description{
+Functions for generating soil carbon IC files from SoilGrids250m data
+}
+\details{
+This module provides functions for extracting, processing, and generating
+ ensemble members for soil carbon initial conditions using SoilGrids data.
+ All soil carbon values are in kg/m2.
+
+Process SoilGrids data for initial conditions
+}
+\examples{
+\dontrun{
+# Process both depths (default)
+settings <- PEcAn.settings::read.settings("pecan.xml")
+output_dir <- withr::local_tempdir()
+ic_files <- soilgrids_ic_process(settings, dir = output_dir)
+
+# Process only 30cm depth
+ic_files <- soilgrids_ic_process(settings, dir = output_dir, depth = 0.3)
+}
+
+}
+\author{
+Akash
+}
diff --git a/modules/data.land/man/validate_events_json.Rd b/modules/data.land/man/validate_events_json.Rd
new file mode 100644
index 00000000000..bab649c3a3c
--- /dev/null
+++ b/modules/data.land/man/validate_events_json.Rd
@@ -0,0 +1,36 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/validate_events.R
+\name{validate_events_json}
+\alias{validate_events_json}
+\title{Validate PEcAn events JSON against schema v0.1.0}
+\usage{
+validate_events_json(events_json, verbose = TRUE)
+}
+\arguments{
+\item{events_json}{character. Path to the JSON file to validate.}
+
+\item{verbose}{logical. When `TRUE`, include detailed AJV messages on error.}
+}
+\value{
+Logical TRUE if valid, FALSE if invalid.
+NA if validator unavailable.
+}
+\description{
+Validates a PEcAn events JSON file (single-site object or an array of site
+objects) against the bundled JSON Schema (draft 2020-12) using the AJV
+engine.
+}
+\details{
+- Logs an error and returns FALSE if the JSON file does not exist or does
+ not conform to the schema.
+- Logs a warning and returns TRUE if the optional package `jsonvalidate` is
+ not installed, so calling code can proceed without a hard dependency.
+}
+\examples{
+# validate_events_json(system.file("events_fixtures/events_site1.json",
+# package = "PEcAn.data.land"))
+
+}
+\author{
+David LeBauer
+}
diff --git a/modules/data.land/tests/Rcheck_reference.log b/modules/data.land/tests/Rcheck_reference.log
index 3974222b7e8..1430528137e 100644
--- a/modules/data.land/tests/Rcheck_reference.log
+++ b/modules/data.land/tests/Rcheck_reference.log
@@ -13,7 +13,7 @@
* package encoding: UTF-8
* checking package namespace information ... OK
* checking package dependencies ... NOTE
-Imports includes 36 non-default packages.
+Imports includes 35 non-default packages.
Importing from so many packages makes the package vulnerable to any of
them becoming unavailable. Move as many as possible to Suggests and
use conditionally.
diff --git a/modules/data.land/tests/testthat/test-IC_BADM_Utilities.R b/modules/data.land/tests/testthat/test-IC_BADM_Utilities.R
new file mode 100644
index 00000000000..d089c16e08b
--- /dev/null
+++ b/modules/data.land/tests/testthat/test-IC_BADM_Utilities.R
@@ -0,0 +1,97 @@
+test_that("Read.IC.info.BADM returns expected structure and pools for a site", {
+ lat <- 42.5378
+ lon <- -72.1715
+ siteid <- "US-Ha1"
+ result <- Read.IC.info.BADM(lat, lon)
+ expect_s3_class(result, "data.frame")
+ expect_true(all(c("Site", "Var", "Date", "Organ", "AGB",
+ "soil_organic_carbon_content", "litter_carbon_content", "root_carbon_content") %in% names(result)))
+ expect_true(any(!is.na(result$AGB)) || any(!is.na(result$soil_organic_carbon_content)) ||
+ any(!is.na(result$litter_carbon_content)) || any(!is.na(result$root_carbon_content)))
+ expect_true(siteid %in% result$Site)
+})
+
+test_that("Read.IC.info.BADM falls back to L1 and ALL if no L2 data", {
+ invalid_lat <- 0
+ invalid_lon <- 0
+ result <- Read.IC.info.BADM(invalid_lat, invalid_lon)
+ expect_s3_class(result, "data.frame")
+ expect_true(nrow(result) >= 0)
+})
+
+
+test_that("EPA_ecoregion_finder returns valid L1 and L2 codes", {
+ eco <- EPA_ecoregion_finder(42.5378, -72.1715)
+ expect_s3_class(eco, "data.frame")
+ expect_true(all(c("L1", "L2") %in% names(eco)))
+ expect_true(nrow(eco) == 1)
+ expect_true(!is.na(eco$L1) && !is.na(eco$L2))
+})
+
+test_that("netcdf.writer.BADM creates a NetCDF file and returns its path", {
+ outdir <- tempdir()
+ file_path <- netcdf.writer.BADM(42.5378, -72.1715, "US-Ha1", outdir, ens = 1)
+ expect_true(file.exists(file_path))
+ expect_true(grepl("\\.nc$", file_path))
+})
+
+test_that("netcdf.writer.BADM creates output directory if missing", {
+ tmp_outdir <- file.path(tempdir(), "badm_ic_test")
+ if (dir.exists(tmp_outdir)) unlink(tmp_outdir, recursive = TRUE)
+ file_path <- netcdf.writer.BADM(42.5378, -72.1715, "US-Ha1", tmp_outdir, ens = 2)
+ expect_true(file.exists(file_path))
+ expect_true(dir.exists(tmp_outdir))
+})
+
+test_that("BADM_IC_process generates correct number of ensemble files for single-site", {
+ settings <- list(
+ run = list(site = list(id = "US-Ha1", lat = 42.5378, lon = -72.1715)),
+ ensemble = list(size = 3)
+ )
+ out_files <- BADM_IC_process(settings, dir = tempdir(), overwrite = TRUE)
+ expect_length(out_files, 3)
+ expect_true(all(file.exists(unlist(out_files))))
+})
+
+test_that("BADM_IC_process generates correct number of ensemble files for multi-site", {
+ settings <- list(
+ list(
+ run = list(site = list(id = "US-Ha1", lat = 42.5378, lon = -72.1715)),
+ ensemble = list(size = 2)
+ ),
+ list(
+ run = list(site = list(id = "US-WCr", lat = 45.805925, lon = -90.07961)),
+ ensemble = list(size = 3)
+ )
+ )
+ out_files <- BADM_IC_process(settings, dir = tempdir(), overwrite = TRUE)
+ expect_length(out_files, 5)
+ expect_true(all(file.exists(unlist(out_files))))
+})
+
+test_that("BADM_IC_process handles missing or malformed settings gracefully", {
+ settings <- list(
+ list(
+ run = list(site = list(id = "US-Ha1", lat = NA, lon = -72.1715)),
+ ensemble = list(size = 1)
+ )
+ )
+ expect_error(BADM_IC_process(settings, dir = tempdir(), overwrite = TRUE))
+})
+
+test_that("BADM_IC_process handles missing ensemble size with fallback", {
+ settings <- list(
+ run = list(site = list(id = "US-Ha1", lat = 42.5378, lon = -72.1715)),
+ ensemble = list(size = 0)
+ )
+ out_files <- BADM_IC_process(settings, dir = tempdir(), overwrite = TRUE)
+ expect_length(out_files, 1)
+})
+
+test_that("Read.IC.info.BADM returns empty dataframe for invalid coordinates", {
+ expect_error(Read.IC.info.BADM(999, 999))
+})
+
+test_that("EPA_ecoregion_finder handles invalid coordinates gracefully", {
+ expect_error(EPA_ecoregion_finder(999, 999))
+})
\ No newline at end of file
diff --git a/modules/data.land/tests/testthat/test-clip_and_save_raster_file.R b/modules/data.land/tests/testthat/test-clip_and_save_raster_file.R
new file mode 100644
index 00000000000..7dbc0f59a18
--- /dev/null
+++ b/modules/data.land/tests/testthat/test-clip_and_save_raster_file.R
@@ -0,0 +1,73 @@
+# helper to create a small test raster
+make_raster <- function(outfile, crs = "EPSG:4326") {
+ r <- terra::rast(matrix(1:16, 4, 4),
+ extent = terra::ext(0, 4, 0, 4),
+ crs = crs
+ )
+ terra::writeRaster(r, outfile, filetype = "GTiff", overwrite = TRUE)
+ return(outfile)
+}
+
+test_that("clip & mask works: output clipped to polygon bbox and masked", {
+ in_r <- withr::local_tempfile(fileext = ".tif")
+ out_f <- withr::local_tempfile(fileext = ".tif")
+
+ make_raster(outfile = in_r)
+
+ poly <- terra::as.polygons(
+ terra::ext(1, 3, 1, 3),
+ crs = "EPSG:4326"
+ )
+
+ clip_and_save_raster_file(input_path = in_r, polygon = poly, out_path = out_f, mask = TRUE)
+
+ expect_true(file.exists(out_f))
+
+ r_out <- terra::rast(out_f)
+ expect_equal(terra::ext(r_out), terra::ext(sf::st_bbox(poly)))
+
+ inside_vals <- terra::values(terra::mask(r_out, poly, inverse = FALSE))
+ expect_true(all(!is.na(inside_vals)))
+
+ outside_vals <- terra::values(terra::mask(r_out, poly, inverse = TRUE))
+ expect_true(all(is.na(outside_vals)))
+})
+
+test_that("clip without mask retains all values within bbox", {
+ in_r <- withr::local_tempfile(fileext = ".tif")
+ make_raster(outfile = in_r)
+
+ poly <- sf::st_as_sf(
+ sf::st_as_sfc(
+ sf::st_bbox(c(xmin = 1, ymin = 1, xmax = 3, ymax = 3), crs = sf::st_crs(4326))
+ )
+ )
+ out_f <- withr::local_tempfile(fileext = ".tif")
+
+ clip_and_save_raster_file(in_r, poly, out_f, mask = FALSE)
+ r_out <- terra::rast(out_f)
+ expect_false(any(is.na(terra::values(r_out))))
+})
+
+test_that("preserves CRS and filetype", {
+ in_r_path <- withr::local_tempfile(fileext = ".tif")
+ make_raster(outfile = in_r_path, crs = "EPSG:3857")
+
+ spatvect_raster <- terra::rast(in_r_path)
+
+ poly <- sf::st_as_sf(
+ sf::st_as_sfc(
+ sf::st_bbox(c(xmin = 1, ymin = 1, xmax = 3, ymax = 3), crs = sf::st_crs(3857))
+ )
+ )
+ out_f_path <- withr::local_tempfile(fileext = ".tif")
+
+ clip_and_save_raster_file(input_path = in_r_path, polygon = poly, out_path = out_f_path)
+ r_out <- terra::rast(out_f_path)
+
+ expect_equal(
+ tools::file_ext(terra::sources(r_out)[1]),
+ tools::file_ext(terra::sources(spatvect_raster)[1])
+ )
+ expect_true(terra::same.crs(r_out, spatvect_raster))
+})
diff --git a/modules/data.land/tests/testthat/test-extract_soil_nc.R b/modules/data.land/tests/testthat/test-extract_soil_nc.R
new file mode 100644
index 00000000000..9574ddd952c
--- /dev/null
+++ b/modules/data.land/tests/testthat/test-extract_soil_nc.R
@@ -0,0 +1,105 @@
+context("extract_soil_gssurgo")
+
+test_that("extract_soil_gssurgo returns valid NetCDF files for valid US coordinates", {
+ skip_on_cran()
+ skip_on_ci()
+ lat <- 40.1164
+ lon <- -88.2434
+ tmp_outdir <- withr::local_tempdir("gssurgo_test_")
+
+ res <- extract_soil_gssurgo(
+ outdir = tmp_outdir,
+ lat = lat,
+ lon = lon,
+ size = 2,
+ grid_size = 3,
+ grid_spacing = 100,
+ depths = c(0.15, 0.30)
+ )
+
+ expect_false(is.null(res))
+
+ expect_type(res, "list")
+ expect_gt(length(res), 1)
+ expect_true(all(names(res) == "path"))
+
+ # Validate files exist
+ file_paths <- unlist(res)
+ expect_true(all(file.exists(file_paths)))
+
+ # Validate NetCDF content
+ if (requireNamespace("ncdf4", quietly = TRUE)) {
+ expected_vars <- c("fraction_of_sand_in_soil", "fraction_of_silt_in_soil",
+ "fraction_of_clay_in_soil", "soil_organic_carbon_stock")
+
+ # Skip first ensemble member (first ensemble member always uses the reported values without sampling)
+ # and use subsequent members are simulated ensemble member with uncertainty
+ nc <- ncdf4::nc_open(file_paths[2])
+ on.exit(ncdf4::nc_close(nc), add = TRUE)
+
+ # Check required variables exist
+ for (var in expected_vars) {
+ expect_true(var %in% names(nc$var))
+ }
+ # Validate data quality
+ sand <- ncdf4::ncvar_get(nc, "fraction_of_sand_in_soil")
+ silt <- ncdf4::ncvar_get(nc, "fraction_of_silt_in_soil")
+ clay <- ncdf4::ncvar_get(nc, "fraction_of_clay_in_soil")
+ soc <- ncdf4::ncvar_get(nc, "soil_organic_carbon_stock")
+
+ expect_true(all(is.finite(sand)))
+ expect_true(all(is.finite(soc)))
+ expect_true(all(sand >= 0 & sand <= 1))
+ expect_true(all(silt >= 0 & silt <= 1))
+ expect_true(all(clay >= 0 & clay <= 1))
+ expect_true(all(soc >= 0))
+
+ # Soil texture fractions should sum to ~1
+ texture_sum <- sand + silt + clay
+ expect_true(all(abs(texture_sum - 1) < 0.01))
+ }
+})
+
+test_that("extract_soil_gssurgo performance is reasonable", {
+ skip_on_cran()
+ skip_on_ci()
+ tmp_outdir <- withr::local_tempdir("gssurgo_test_")
+
+ start_time <- Sys.time()
+ res <- extract_soil_gssurgo(
+ outdir = tmp_outdir,
+ lat = 40.1164,
+ lon = -88.2434,
+ size = 1,
+ grid_size = 3,
+ grid_spacing = 100,
+ depths = c(0.15)
+ )
+ end_time <- Sys.time()
+ exec_time <- as.numeric(difftime(end_time, start_time, units = "secs"))
+ expect_lt(exec_time, 40)
+})
+
+test_that("extract_soil_gssurgo handles ensemble generation", {
+ skip_on_cran()
+ skip_on_ci()
+ tmp_outdir <- withr::local_tempdir("gssurgo_test_")
+
+ res <- extract_soil_gssurgo(
+ outdir = tmp_outdir,
+ lat = 40.1164,
+ lon = -88.2434,
+ size = 3,
+ grid_size = 3,
+ grid_spacing = 100,
+ depths = c(0.15, 0.30)
+ )
+
+ expect_false(is.null(res))
+
+ expect_type(res, "list")
+ expect_equal(length(res), 4)
+
+ file_paths <- unlist(res)
+ expect_true(all(file.exists(file_paths)))
+})
\ No newline at end of file
diff --git a/modules/data.land/tests/testthat/test-match_species_id.R b/modules/data.land/tests/testthat/test-match_species_id.R
index ae3ba6dafb3..53285ff6763 100644
--- a/modules/data.land/tests/testthat/test-match_species_id.R
+++ b/modules/data.land/tests/testthat/test-match_species_id.R
@@ -22,8 +22,14 @@ test_that("Species matching works", {
user = "bety",
password = "bety",
host = "localhost",
- driver = "Postgres")
- con <- PEcAn.DB::db.open(db_params)
+ driver = "Postgres"
+ )
+ con <- tryCatch(
+ PEcAn.DB::db.open(db_params),
+ error = function(e) NULL
+ )
+
+ skip_if(is.null(con), "No database connection available for species matching tests.")
test_merge(c('ACRU', 'TSCA'), 'usda', con)
test_merge(c(316L, 261L), 'fia', con)
@@ -31,11 +37,13 @@ test_that("Species matching works", {
test_table <- data.frame(
bety_species_id = c(30L, 1419L),
- input_code = c('AceRub', 'TsuCan'))
+ input_code = c('AceRub', 'TsuCan')
+ )
test_merge(
input_codes = test_table$input_code,
format_name = 'custom',
bety = con,
- translation_table = test_table)
+ translation_table = test_table
+ )
})
diff --git a/modules/data.land/tests/testthat/test-validate_events_json.R b/modules/data.land/tests/testthat/test-validate_events_json.R
new file mode 100644
index 00000000000..9522aa29101
--- /dev/null
+++ b/modules/data.land/tests/testthat/test-validate_events_json.R
@@ -0,0 +1,32 @@
+context("validate_events_json")
+
+testthat::test_that("validate_events_json validates good fixtures", {
+ f1 <- system.file("events_fixtures/events_site1.json", package = "PEcAn.data.land", mustWork = TRUE)
+ f2 <- system.file("events_fixtures/events_site1_site2.json", package = "PEcAn.data.land", mustWork = TRUE)
+ testthat::expect_true(PEcAn.data.land::validate_events_json(f1))
+ testthat::expect_true(PEcAn.data.land::validate_events_json(f2))
+})
+
+testthat::test_that("validate_events_json returns FALSE on invalid JSON", {
+ bad <- withr::local_tempfile(fileext = ".json")
+ # Missing required field: events
+ jsonlite::write_json(list(pecan_events_version = "0.1.0", site_id = "X"), bad, auto_unbox = TRUE)
+ testthat::expect_false(PEcAn.data.land::validate_events_json(bad))
+})
+
+testthat::test_that("validate_events_json returns NA if jsonvalidate is unavailable", {
+ f1 <- system.file("events_fixtures/events_site1.json", package = "PEcAn.data.land", mustWork = TRUE)
+ # Use testthat mocking to simulate missing jsonvalidate pkg by overriding base::requireNamespace
+ testthat::with_mocked_bindings(
+ requireNamespace = function(pkg, quietly = TRUE) {
+ if (identical(pkg, "jsonvalidate")) {
+ return(FALSE)
+ }
+ base::requireNamespace(pkg, quietly = quietly)
+ },
+ {
+ testthat::expect_true(is.na(PEcAn.data.land::validate_events_json(f1)))
+ },
+ .package = "base"
+ )
+})
diff --git a/modules/data.land/tests/testthat/test.look_up_fertilizer_components.R b/modules/data.land/tests/testthat/test.look_up_fertilizer_components.R
new file mode 100644
index 00000000000..1e3f3b3a403
--- /dev/null
+++ b/modules/data.land/tests/testthat/test.look_up_fertilizer_components.R
@@ -0,0 +1,84 @@
+test_that("N application rate from pre-defined fertilizer types works as expected", {
+ # Test for urea - check actual values from fertilizer_composition_data
+ result <- look_up_fertilizer_components("urea", 100)
+ expect_equal(result,
+ list(type = "urea",
+ NO3_N = 0, NH4_N = 46,
+ N_org = 0, C_org = 0)
+ )
+ # Test for anhydrous ammonia instead of ammonium nitrate for a clearer test
+ result <- look_up_fertilizer_components("anhydrous_ammonia", 100)
+ expect_equal(result,
+ list(type = "anhydrous_ammonia",
+ NO3_N = 0, NH4_N = 82,
+ N_org = 0, C_org = 0)
+ )
+})
+
+test_that("N fertilizer calculation from NN-PP-KK format works as expected", {
+ ## 200kg/ha of 45-00-00 --> 90kg/ha NO3-N
+ ## Because function assumes all nitrogen is in the form of NO3-N
+ result <- look_up_fertilizer_components(type = "45-00-00", amount = 200)
+ expect_equal(
+ result,
+ list(
+ type = "45-00-00",
+ NO3_N = 90,
+ NH4_N = 0,
+ N_org = 0,
+ C_org = 0
+ )
+ )
+})
+
+ test_that("User specified NN-PP-KK format works", {
+ # not realistic value, just testing one that is not in the database
+ result <- look_up_fertilizer_components(type = "01-00-00", amount = 100)
+ expect_equal(
+ result,
+ list(
+ type = "01-00-00",
+ NO3_N = 1,
+ NH4_N = 0,
+ N_org = 0,
+ C_org = 0
+ )
+ )
+})
+
+test_that("Create fertilizer based on specified components", {
+ result <- look_up_fertilizer_components(
+ type = "custom_organic",
+ amount = 1000,
+ fraction_organic_n = 0.02,
+ fraction_organic_c = 0.08)
+ expect_equal(result,
+ list(type = "custom_organic",
+ NO3_N = 0,
+ NH4_N = 0,
+ N_org = 20,
+ C_org = 80)
+ )
+})
+
+test_that("Look up dairy fresh manure from database", {
+ result <- look_up_fertilizer_components("dairy_fr", 1000)
+ expect_equal(result,
+ list(type = "dairy_fr",
+ NO3_N = 0, NH4_N = 7,
+ N_org = 31, C_org = 391)
+ )
+})
+
+test_that("Invalid fertilizer type returns NULL", {
+ # It generates PEcAn.logger::logger.severe, returns NULL
+ # Temporarily disable logging to avoid cluttering test output
+ level <- PEcAn.logger::logger.getLevel()
+ PEcAn.logger::logger.setLevel("OFF")
+
+ expect_null(
+ look_up_fertilizer_components("invalid_type", 1000)
+ )
+
+ PEcAn.logger::logger.setLevel(level)
+})
diff --git a/modules/data.mining/DESCRIPTION b/modules/data.mining/DESCRIPTION
index d67e19f81d9..56d3df01469 100644
--- a/modules/data.mining/DESCRIPTION
+++ b/modules/data.mining/DESCRIPTION
@@ -2,7 +2,9 @@ Package: PEcAn.data.mining
Type: Package
Title: PEcAn Functions Used for Exploring Model Residuals and Structures
Description: (Temporary description) PEcAn functions used for exploring model residuals and structures.
-Version: 1.7.4
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
+Version: 1.7.5
Authors@R: c(person("Mike", "Dietze", role = c("aut", "cre"),
email = "dietze@bu.edu"),
person("University of Illinois, NCSA", role = c("cph")))
diff --git a/modules/data.mining/NEWS.md b/modules/data.mining/NEWS.md
index 7989a94ff9c..0c1419c2fe5 100644
--- a/modules/data.mining/NEWS.md
+++ b/modules/data.mining/NEWS.md
@@ -1,3 +1,11 @@
+# PEcAn.data.mining 1.7.5
+
+* Added keywords and bug reporting URL to DESCRIPTION.
+* Stored some additional scripts in `inst/` for future development.
+* This release contains no changes to exported code.
+
+
+
# PEcAn.data.mining 1.7.4
## License change
diff --git a/modules/data.mining/inst/NEON_soils/soil_resp_markdown.Rmd b/modules/data.mining/inst/NEON_soils/soil_resp_markdown.Rmd
new file mode 100644
index 00000000000..79afaf78838
--- /dev/null
+++ b/modules/data.mining/inst/NEON_soils/soil_resp_markdown.Rmd
@@ -0,0 +1,721 @@
+---
+title: "Soil Resp"
+output: html_document
+date: "2025-04-22"
+---
+
+Libraries
+```{r setup, include=FALSE}
+library(DBI)
+library(RSQLite)
+library(dplyr)
+library(geosphere)
+library(sp)
+library(ggplot2)
+library(sf)
+library(maps)
+library(terra)
+library(knitr)
+library(tidyr)
+library(neonUtilities)
+library(swfscMisc)
+library(maps)
+library(tidyverse)
+library(neonSoilFlux)
+library(parallel)
+library(ncdf4)
+library(data.table)
+library(purrr)
+library(data.table)
+library(parallel)
+library(lubridate)
+library(terra)
+library(httr)
+library(sp)
+library(neonSoilFlux)
+library(stringr)
+library(scales)
+```
+
+Get NEON soil respiration data
+```{r}
+# get a list of NEON sites and their coordinates
+Neonplots <- read.csv("/usr4/ugrad/chaney/R/3_25/Neon_sites_terrestrial.csv")
+
+# Add a column for site numbers in Site_Info (site_info has coordinates of each location in df)
+Site_Info <- readRDS("/usr4/ugrad/chaney/R/3_25/site.locs.rds")
+
+Site_Info$Site_Number <- seq(1, nrow(Site_Info))
+
+# Extract latitude and longitude from Neonplots and Site_Info tables
+neon_coords <- Neonplots[ ,2:3] %>%
+ select(Latitude = field_latitude, Longitude = field_longitude)
+Site_Info_coords <- Site_Info %>%
+ select(Site_Number, lat, lon)
+
+# Put Site_Info latitude/longitude in a matrix
+Site_Coordinates <- as.matrix(Site_Info[, c("lat", "lon")])
+
+# Same for Neonplots
+Neonplots_Coordinates <- as.matrix(Neonplots[, 2:3])
+
+# make list to store matched list
+matched <- data.frame(id = numeric(nrow(Neonplots)), # id in forecast
+ site_id = numeric(nrow(Neonplots))) # NEON id
+
+# Loop through each row in Neonplots and calculate each plot's closest site number
+for (i in 1:nrow(Neonplots)) {
+
+ # Extract the coordinates for the current row in Neonplots
+ plot_coords <- Neonplots_Coordinates[i, ]
+
+ # Compute distances from this plot to all site coordinates (longlat means its in km)
+ distances <- spDistsN1(Site_Coordinates, plot_coords, longlat = TRUE)
+
+ # Find the index of the minimum distance for closest site
+ closest_site_index <- which.min(distances)
+
+ # Assign the corresponding Site_Number from Site_Info to the current row in Neonplots
+ matched$id[i] <- Site_Info$Site_Number[closest_site_index]
+ matched$site_id[i] <- Neonplots$field_site_id[i]
+}
+
+### Get the model output (0715, 16 outputs) (Yang's code)
+years <- 2000:2025
+Nensem <- 1:25
+
+# Initialize the output list
+
+sr_list <- list()
+
+# Process Ensemble Function
+process_ensemble <- function(ens) {
+ ens_id <- sprintf("%05d", ens)
+ sr_ens <- list()
+
+ # loop through all the sites
+ for (i in seq_len(nrow(matched))) {
+ site_id <- matched$site_id[i]
+ ID <- matched$id[i]
+
+ for (year in years) {
+ file_path <- paste0(
+ "/projectnb/dietzelab/dongchen/anchorSites/NA_runs/SDA_25ens_2025_4_19/out/ENS-",
+ ens_id, "-", ID, "/", year, ".nc"
+ )
+
+ # open NetCDF files
+ nc_data <- tryCatch({
+ nc_open(file_path)
+ }, error = function(e) {
+ return(NULL)
+ })
+
+ # file open
+ if (!is.null(nc_data)) {
+
+ if ("time" %in% names(nc_data$dim)) {
+ time_values <- ncvar_get(nc_data, "time")
+ time_units <- ncatt_get(nc_data, "time", "units")$value
+
+ # Analyze the model start date
+ start_datetime <- as.POSIXct(
+ sub("days since ", "", time_units),
+ format = "%Y-%m-%d %H:%M:%S", tz = "UTC"
+ )
+
+ # Calculate the time series
+ time_datetimes <- as.Date(start_datetime) + (time_values - 1)
+
+ # Extract Soil Respiration data
+ sr <- tryCatch({
+ ncvar_get(nc_data, "SoilResp")
+ }, error = function(e) {
+ return(NULL)
+ })
+
+
+ if (!is.null(sr)) {
+
+ sr[sr == -999] <- NA
+
+ # create dataframe
+ sr_df <- data.table(
+ Time = time_datetimes,
+ SR = sr,
+ Ensemble = ens,
+ Site_ID = site_id
+ )
+
+ # save the results
+ sr_ens[[length(sr_ens) + 1]] <- sr_df
+ }
+ }
+
+ # Close NetCDF File
+ nc_close(nc_data)
+ }
+ }
+ }
+
+ # Combine the results
+ sr_result <- if (length(sr_ens) > 0) rbindlist(sr_ens, use.names = TRUE, fill = TRUE) else data.table()
+
+ return(list(sr = sr_result))
+}
+
+
+# Apply parallel calculation
+num_cores <- detectCores() - 1
+results <- mclapply(Nensem, process_ensemble, mc.cores = num_cores)
+
+# Combine results from all ensmebles
+sr_output <- rbindlist(lapply(results, `[[`, "sr"))
+
+# Calculate ensemble mean
+sr_mean <- sr_output[, .(SR = mean(SR, na.rm = TRUE)), by = .(Time, Site_ID)]
+
+# Merge SR mean value
+daily_mean_data <- as.data.table(sr_mean)
+
+# Format Time to character for processing
+daily_mean_data[, Time := as.character(Time)]
+
+# Calculate daily SR(time resolution is 1 day)
+final_daily_sr_data <- daily_mean_data[, .(
+ Final_SR = mean(SR, na.rm = TRUE)), by = .(Time, Site_ID)]
+
+```
+
+Get John's NEON soil respiration data
+```{r}
+# function to get soil fluxes for specified years for 1 site
+soil_flux <- function(site_number, flux_type){
+ summary_table <- data.frame(Time = character(5114),
+ Site_id = character(5114),
+ sr_neon = numeric(5114))
+
+ day_count <- 1
+ temp <- 0
+ na_count <- 0
+ tot <- 0
+
+ years = 2012:2025
+ site = matched$site_id[site_number]
+
+ for (year in years){
+ for (i in 01:12){
+ if (i < 10){
+ file_name <- paste0("/projectnb/dietzelab/jzobitz/02-NEON-sites/flux-results/out-flux-", site, "-",year,"-0",i,".Rda")
+ }else{
+ file_name <- paste0("/projectnb/dietzelab/jzobitz/02-NEON-sites/flux-results/out-flux-", site, "-",year,"-",i,".Rda")
+ }
+ if (file.exists(file_name)== TRUE){
+ load(file_name)
+
+ # sort by date so I can average over all horizontal positions
+ out_fluxes <- out_fluxes[order(out_fluxes$startDateTime),]
+
+ # Average over each day (data is in 30 min increments)
+ for (j in 1:(nrow(out_fluxes)/240)){
+ if (j == 1){
+ start <- 1
+ end <- 240
+ } else{
+ start <- (j-1)*240
+ end <- j*240
+ }
+ for (k in start:end){
+ temp <- out_fluxes[[3]][[k]]$flux[flux_type]
+ if (is.na(temp)){
+ na_count <- na_count + 1}
+ else{
+ tot <- tot + temp
+ }
+ }
+ summary_table$sr_neon[day_count] <- mean()
+ summary_table$Site_id[day_count] <- site
+ if (start == 1){
+ summary_table$Time[day_count] <- str_sub(out_fluxes$startDateTime[start], end=-1)
+ } else{
+ summary_table$Time[day_count] <- str_sub(out_fluxes$startDateTime[k], end=-10)
+ }
+ day_count <- day_count + 1
+ na_count <- 0
+ tot <- 0
+ }
+ }
+ }
+ }
+
+return(list(sr = summary_table))
+}
+r <- soil_flux(1,4)
+
+# Apply parallel calculation for each site
+n = 1:nrow(matched)
+num_cores <- detectCores() - 1
+
+# Use Marshall method at top of soil (000)
+results_j <- mclapply(n, soil_flux, mc.cores = num_cores, flux_type = 4)
+# Combine results from all ensembles
+sr_marshall <- rbindlist(lapply(results_j, `[[`, "sr"))
+# delete rows with all zeros
+sr_marshall_filtered <- filter(sr_marshall, sr_neon != 0)
+
+# Use Milington- Quirk method at top of soil (000)
+results_j_2 <- mclapply(n, soil_flux, mc.cores = num_cores, flux_type = 8)
+# Combine results from all ensembles
+sr_m_q <- rbindlist(lapply(results_j_2, `[[`, "sr"))
+# delete rows where the soil respiration is 0
+sr_m_q_filtered <-filter(sr_m_q, sr_neon != 0)
+
+
+# combine the two methods
+daily_mean_data_neon <- merge(sr_marshall_filtered, sr_m_q_filtered, by = c("Time", "Site_id"))
+
+# rename rows
+names(daily_mean_data_neon)[names(daily_mean_data_neon) == "sr_neon.x"] <- "Marshall_SR"
+names(daily_mean_data_neon)[names(daily_mean_data_neon) == "sr_neon.y"] <- "Mill_Quirk_SR"
+names(daily_mean_data_neon)[names(daily_mean_data_neon) == "Site_id"] <- "Site_ID"
+
+
+# combine data frame into final_daily_sr_data
+daily_sr <- merge(final_daily_sr_data, daily_mean_data_neon, by = c("Time","Site_ID"))
+names(daily_sr)[names(daily_sr) == "Final_SR"] <- "Forecast_SR"
+
+# convert Time to date class
+daily_sr$Time <- as.Date(daily_sr$Time)
+
+# convert NEON data to to kg/mol ^2 *s (currently in umol/m^2 * s)
+daily_sr$Marshall_SR = daily_sr$Marshall_SR*10^-6*12.011*10^-3
+daily_sr$Mill_Quirk_SR = daily_sr$Mill_Quirk_SR*10^-6*12.011*10^-3
+
+```
+
+Find error
+```{r}
+# calculate error
+daily_sr$Marsh_error <- daily_sr$Forecast_SR - daily_sr$Marshall_SR
+daily_sr$MQ_error <- daily_sr$Forecast_SR - daily_sr$Mill_Quirk_SR
+
+# keep a permanent daily_sr
+daily_sr_perm <- daily_sr
+
+# make table for summary statistics
+summary_stat <- data.frame(Site_ID = character(length(unique(daily_sr$Site_ID))),
+ Marsh_corr_coef = numeric(length(unique(daily_sr$Site_ID))),
+ MQ_corr_coef = numeric(length(unique(daily_sr$Site_ID))),
+ Marsh_rms_error = numeric(length(unique(daily_sr$Site_ID))),
+ MQ_rms_error = numeric(length(unique(daily_sr$Site_ID))))
+
+
+# do summary stats
+sites <- unique(daily_sr$Site_ID)
+
+for (i in 1:length(sites)){
+ temp_data <- filter(daily_sr, Site_ID == sites[i])
+ # remove na values
+ temp_data <- na.omit(temp_data)
+
+ # get rid of first row (so i can remove infinite)
+ temp_data <- subset(temp_data, select = -c(Site_ID, Time))
+
+ # remove inf values
+ temp_data <- temp_data[apply(temp_data, 1, function(row) all(is.finite(row))), ]
+
+ # add site name
+ summary_stat$Site_ID[i] <- sites[i]
+
+ # find correlation coeffs
+ summary_stat$Marsh_corr_coef[i] <- cor(temp_data$Marshall_SR, temp_data$Forecast_SR)
+ summary_stat$MQ_corr_coef[i] <- cor(temp_data$Mill_Quirk_SR, temp_data$Forecast_SR)
+
+ # find RMS errors
+ summary_stat$Marsh_rms_error[i] <- sqrt(mean((temp_data$Marshall_SR - temp_data$Forecast_SR)^2))
+ summary_stat$MQ_rms_error[i] <- sqrt(mean((temp_data$Mill_Quirk_SR - temp_data$Forecast_SR)^2))
+}
+
+```
+
+Graph Time Series
+```{r}
+
+for (i in 1:length(sites)){
+ temp_data <- filter(daily_sr, Site_ID == sites[i])
+
+
+print(ggplot(temp_data) +
+ geom_line(aes(x = Time, y = Marshall_SR, group = 1), linewidth = 1, color = "orange") +
+ geom_line(aes(x = Time, y = Forecast_SR,group = 1), linewidth = 1, color = "lightblue") +
+ labs(title = sites[i], x = "date", y = "Soil Respiration (kg/ (m^2 s)"))
+}
+
+```
+
+
+Predicted Obervered plots
+```{r}
+# make predicted- observed plots
+for (i in 1:length(sites)){
+ temp_data <- filter(daily_sr, Site_ID == sites[i])
+
+ print(ggplot(temp_data, aes(x = Forecast_umol, y = Marsh_umol)) + geom_point() +
+ geom_smooth(method = "lm", se = FALSE) +
+ labs(title = sites[i], x = "Predicted (kg/m^2*s)", y = "Observed (kg/m^2*s)"))
+
+}
+```
+
+
+Cross-correlation coefficients graphs
+```{r}
+
+# List of sites with a noticable lag
+lag_sites <- c("MLBS", "HARV","SERC","UNDE","ABBY", "GRSM","WREF","SJER")
+
+peak <- numeric(length(lag_sites))
+count = 1
+
+for (site in lag_sites){
+ temp_data <- filter(daily_sr, Site_ID == site)
+
+ # remove na values
+ temp_data <- na.omit(temp_data)
+
+ name <- temp_data$Site_ID[1]
+ # get rid of first row (so i can remove infinite)
+ temp_data <- subset(temp_data, select = -c(Site_ID, Time))
+
+ # remove inf values
+ temp_data <- temp_data[apply(temp_data, 1, function(row) all(is.finite(row))), ]
+
+ ccf_data <- ccf(temp_data$Marshall_SR, temp_data$Forecast_SR, lag = 91, plot = TRUE, ylim = range(-1,1))
+
+ title(name)
+
+ peak[count] <- ccf_data$lag[which.max(ccf_data$acf)]
+ count = count + 1
+}
+
+```
+
+
+Soil Moisture Fraction from Model
+```{r}
+
+# Initialize the output list
+sm_list <- list()
+
+# Process Ensemble Function
+
+process_ensemble_sm <- function(ens) {
+ ens_id <- sprintf("%05d", ens)
+ sm_ens <- list()
+
+ # go through all the sites
+ for (i in seq_len(nrow(matched))) {
+ site_id <- matched$site_id[i]
+ ID <- matched$id[i]
+
+ # go through the years
+ for (year in years) {
+ file_path <- paste0(
+ "/projectnb/dietzelab/dongchen/anchorSites/NA_runs/SDA_25ens_2024_11_25/run/ENS-",
+ ens_id, "-", ID, "/", year, ".nc"
+ )
+
+ # check if the file exists
+
+ # open NetCDF files
+ nc_data <- tryCatch({
+ nc_open(file_path)
+ }, error = function(e) {
+ return(NULL)
+ })
+
+ # file open
+ if (!is.null(nc_data)) {
+ if ("time" %in% names(nc_data$dim)) {
+ time_values <- ncvar_get(nc_data, "time")
+ time_units <- ncatt_get(nc_data, "time", "units")$value
+
+ # Analyze the model start date
+ start_datetime <- as.POSIXct(
+ sub("days since ", "", time_units),
+ format = "%Y-%m-%d %H:%M:%S", tz = "UTC"
+ )
+
+ # Calculate the time series
+ time_datetimes <- as.Date(start_datetime) + (time_values - 1)
+
+ # get Soil Respiration data
+ sm <- tryCatch({
+ ncvar_get(nc_data, "SoilMoistFrac")
+ }, error = function(e) {
+ return(NULL)
+ })
+
+
+ if (!is.null(sm)) {
+ # replace -999 to NA
+ sm[sm == -999] <- NA
+
+ # create dataframe
+ sm_df <- data.table(
+ Time = time_datetimes,
+ SM = sm,
+ Ensemble = ens,
+ Site_ID = site_id
+ )
+
+ # save the results
+ sm_ens[[length(sm_ens) + 1]] <- sm_df
+ }
+ }
+
+ # Close NetCDF File
+ nc_close(nc_data)
+ }
+ }
+ }
+
+ # Combine the results
+ sm_result <- if (length(sm_ens) > 0) rbindlist(sm_ens, use.names = TRUE, fill = TRUE) else data.table()
+
+ return(list(sm = sm_result))
+}
+
+Nensem <- 1:100
+# Apply parallel calculation
+num_cores <- detectCores() - 1
+
+sm_model_results <- mclapply(Nensem, process_ensemble_sm, mc.cores = num_cores)
+
+# Combine Results from all ensembles
+sm_output <- rbindlist(lapply(sm_model_results, '[[', "sm"))
+
+# Calculate ensemble mean
+sm_mean <- sm_output[, .(sm = mean(sm, na.rm = TRUE)), by = .(Time, Site_ID)]
+
+# Merge SR mean value with a time resolution of 3h (Not 1 day)
+daily_sm_data <- as.data.table(sm_mean)
+
+# Format Time to character for processing
+daily_sm_data[, Time := as.character(Time)]
+
+# Calculate daily SR(time resolution is 1 day)
+final_daily_sm_data <- daily_sm_data[, .(
+ Soil_moist_model = mean(sm, na.rm = TRUE)), by = .(Time, Site_ID)]
+
+```
+
+
+Soil temperature data from Model
+```{r}
+process_ensemble_st <- function(ens) {
+ ens_id <- sprintf("%05d", ens)
+ sm_ens <- list()
+ c = 1
+
+ # create a data frame
+ st_df <- data.table(
+ Time = character(13*365*nrow(matched)),
+ ST = numeric(13*365*nrow(matched)),
+ Site_ID = character(13*365*nrow(matched))
+ )
+
+ # go through all the sites
+ for (i in seq_len(nrow(matched))) {
+
+
+ site_id <- matched$site_id[i]
+ ID <- matched$id[i]
+
+ # get file path
+
+ file_path <- paste0(
+ "/projectnb/dietzelab/dongchen/anchorSites/NA_runs/SDA_25ens_2024_11_25/run/ENS-",
+ ens_id, "-", ID, "/README.txt")
+ if (file.exists(file_path)== TRUE){
+ # read the text file in
+ temp <- readLines(file_path)
+
+ # get the met data path
+ met_data_path <- substr(temp[11],17,119)
+
+ # change path of readme file so that both 2021 are 2024
+ met_data_path <- str_replace(met_data_path,"2021","2024")
+ met_data_path <- str_replace(met_data_path,"2021","2024")
+
+ # open the new file
+ if (file.exists(met_data_path)== TRUE){
+ temp_table <- read.table(met_data_path)
+ temp_table_yr <- temp_table$V2
+
+ for (year in 2012:2024){
+ temp_table_yr <- filter(temp_table, V2 == year)
+ org_date = paste0(year - 1, "-12-31")
+ for (day in 1:366){
+ temp_table_yr_day <- filter(temp_table, V3 == day)
+ st_df$Site_ID[c] = site_id
+ st_df$ST[c] = mean(temp_table_yr_day$V7, na.rm = TRUE)
+
+ # need to convert date into "year-mo-day format"
+ date <- as.Date(day, origin = org_date)
+ st_df$Time[c] <- format(date, "%Y-%m-%d")
+ c = c+1
+ }
+ }
+ }
+ }
+ }
+ return(list(st = st_df))
+}
+
+results <- process_ensemble_st(1)
+# apply parallel computation
+
+results_temp <- mclapply(Nensem, process_ensemble_st, mc.cores = num_cores)
+
+st_output <- rbindlist(lapply(results_temp, `[[`, "st"))
+
+soil_temperature <- filter(st_output, st != 0)
+
+
+```
+
+
+Get Temp and Soil Moisture from John's NEON data
+```{r}
+
+soil_temp_and_moist <- function(site_number){
+ summary_table <- data.frame(Site_ID = character(5114),
+ Time = character(5114),
+ Soil_temp_neon = numeric(5114),
+ Soil_moist_neon = numeric(5114))
+
+ years = 2012:2025
+ site = matched$site_id[site_number]
+ day_count <- 1
+
+ for (year in years){
+ for (i in 01:12){
+ if (i < 10){
+ file_name <- paste0("/projectnb/dietzelab/jzobitz/02-NEON-sites/env-data/env-meas-", site, "-",year,"-0",i,".Rda")
+ }else{
+ file_name <- paste0("/projectnb/dietzelab/jzobitz/02-NEON-sites/env-data/env-meas-", site, "-",year,"-",i,".Rda")
+ }
+ if (file.exists(file_name)== TRUE){
+ load(file_name)
+
+ # get just 501 data
+ site_data_temp <- filter(site_data[[2]][[3]], verticalPosition == "501")
+ site_data_moist <- filter(site_data[[2]][[2]], verticalPosition == "501")
+
+ # sort by date
+ site_data_temp <- site_data_temp[order(site_data_temp$startDateTime),]
+ site_data_moist <- site_data_moist[order(site_data_moist$startDateTime),]
+
+ for (j in 1:(nrow(site_data_temp)/240)){
+ if (j == 1){
+ start <- 1
+ end <- 240
+ } else{
+ start <- (j-1)*240
+ end <- j*240
+ }
+ summary_table$soil_temp[day_count] <- mean(site_data_temp$soilTempMean[start:end], na.rm = TRUE)
+ summary_table$soil_moist[day_count] <- mean(site_data_moist$VSWCMean[start:end], na.rm = TRUE)
+ summary_table$Site_id[day_count] <- site
+ if (start == 1){
+ summary_table$Time[day_count] <- str_sub(site_data_temp$startDateTime[start], end=-1)
+ } else{
+ summary_table$Time[day_count] <- str_sub(site_data_temp$startDateTime[start], end=-10)
+ }
+ day_count = day_count + 1
+ }
+
+ }
+ }
+ }
+
+return(list(stm = summary_table))
+}
+
+# Apply parallel calculation for each site
+n = 1:nrow(matched)
+num_cores <- detectCores() - 1
+
+# soil_temp results
+results_tempandmoist <- mclapply(n, soil_temp_and_moist, mc.cores = num_cores)
+
+# Combine results from all ensembles
+soil_tm <- rbindlist(lapply(results_tempandmoist, `[[`, "stm"))
+
+# Delete all rows with just zeros
+soil_tm <- filter(soil_tm, soil_temp != 0)
+
+# temp is in C, soil moisture is a ratio
+
+```
+Combine the soil moisture data
+```{r}
+soil_moist_table <- merge(final_daily_sm_data, soil_tm, by = c("Time","Site_ID"))
+```
+
+Graph Time series
+```{r}
+
+for (i in 1:length(sites)){
+ temp_data <- filter(soil_moist_table, Site_ID == sites[i])
+
+
+print(ggplot(temp_data) +
+ geom_line(aes(x = Time, y = Soil_moist_neon, group = 1), linewidth = 1, color = "orange") +
+ geom_line(aes(x = Time, y = Soil_moist_model,group = 1), linewidth = 1, color = "lightblue") +
+ labs(title = sites[i], x = "date", y = "Soil Moisture Fraction"))
+}
+
+```
+
+
+Cross- Correlation coefficients
+```{r}
+lag <- data.frame(lag_sites = character(8),
+ half_lag = numeric(8),
+ year_lag = numeric(8))
+
+lag$lag_sites <- c("MLBS", "HARV","SERC","UNDE","ABBY", "GRSM","WREF","SJER")
+
+peak <- numeric(length(lag$lag_sites))
+count = 1
+
+for (site in lag$lag_sites){
+ temp_data <- filter(soil_moist_table, Site_ID == site)
+
+ # remove na values
+ temp_data <- na.omit(temp_data)
+
+ name <- temp_data$Site_ID[1]
+ # get rid of first row (so i can remove infinite)
+ temp_data <- subset(temp_data, select = -c(Site_ID, Time))
+
+ # remove inf values
+ temp_data <- temp_data[apply(temp_data, 1, function(row) all(is.finite(row))), ]
+
+ ccf_data <- ccf(temp_data$Soil_moist_neon, temp_data$Soil_moist_model, lag = 183, plot = TRUE, ylim = range(-1,1))
+
+ #title(name)
+
+ lag$half_lag[count] <- ccf_data$lag[which.max(ccf_data$acf)]
+
+}
+```
+
+
+To do:
+- find function for soil moisture and respiration data, and for soil temperature and respiration
+
+
+
diff --git a/modules/data.mining/inst/NEON_soils/soilcarbon_validation.Rmd b/modules/data.mining/inst/NEON_soils/soilcarbon_validation.Rmd
new file mode 100644
index 00000000000..4967a58f3db
--- /dev/null
+++ b/modules/data.mining/inst/NEON_soils/soilcarbon_validation.Rmd
@@ -0,0 +1,623 @@
+---
+title: "Soil Carbon data validation"
+date: "2025-01-27"
+output: html_document
+---
+
+```{r setup, include=FALSE}
+# Load libraries, WD, cran repository
+library(DBI)
+library(RSQLite)
+library(dplyr)
+library(geosphere)
+library(sp)
+library(ggplot2)
+library(rnaturalearth)
+library(sf)
+library(maps)
+library(scoringRules)
+library(ncf)
+library(terra)
+library(knitr)
+library(gstat)
+library(tidyr)
+library(neonstore)
+library(neonUtilities)
+library(swfscMisc)
+library(PEcAn.utils)
+library(maps)
+
+
+# load functions from other script
+source("extract_neon_sc_func.R")
+
+setwd("/Users/chane/OneDrive/Documents/ef lab/R files")
+options(repos = c(CRAN = "https://cran.rstudio.com/"))
+```
+
+
+Get soil carbon data from SDA forecast (copying Cami's code)
+```{r, echo = FALSE}
+
+# Load data
+SIPNET <- load("sda.all.forecast.analysis.Rdata")
+
+# Get site coordinates
+Site_Info <- readRDS("site.locs.rds")
+
+# Rename lists to years
+names(analysis.all) <- 2012:2021
+year <- 2012:2021
+
+# Names of variables
+variables <- c("AbvGrndWood", "LAI", "SoilMoistFrac", "TotSoilCarb")
+
+# Loop through each year list in analysis.all
+for (year in names(analysis.all)) {
+
+ # Get the current data frame for the year
+ year_df <- analysis.all[[year]]
+
+ # Initialize a vector for new column names
+ new_colnames <- character(ncol(year_df))
+
+ # Loop over each site and assign variable names
+ for (site in 1:6400) {
+ # Calculate the starting column index for each site's variables
+ start_col <- (site - 1) * 4 + 1
+
+ # Assign the names for the 4 variable columns for this site
+ new_colnames[start_col:(start_col + 3)] <- paste0(variables, "_Site", site)
+ }
+
+ # Apply the new column names to the data frame
+ colnames(year_df) <- new_colnames
+ analysis.all[[year]] <- year_df # Update the list with modified data frame
+}
+
+
+# Load list of neon sites with their latitudes and longitudes
+Neonplots <- read.csv("Neon_sites_terrestrial.csv")
+
+# Add a column for site numbers in Site_Info (site_info has coordinates of each location in df)
+Site_Info$Site_Number <- seq(1, nrow(Site_Info))
+
+ # Extract latitude and longitude from Neonplots and Site_Info tables
+neon_coords <- Neonplots[ ,2:3] %>%
+ select(Latitude = field_latitude, Longitude = field_longitude)
+Site_Info_coords <- Site_Info %>%
+ select(Site_Number, lat, lon)
+
+ # Put Site_Info latitude/longitude in a matrix
+Site_Coordinates <- as.matrix(Site_Info[, c("lat", "lon")])
+
+ # Same for Neonplots
+Neonplots_Coordinates <- as.matrix(Neonplots[, 2:3])
+
+ # Create a new column in Neonplots to store the closest Site_Number
+Neonplots$Closest_Site <- NA
+
+# Loop through each row in Neonplots and calculate each plot's closest site number
+for (i in 1:nrow(Neonplots)) {
+ # Extract the coordinates for the current row in Neonplots
+ plot_coords <- Neonplots_Coordinates[i, ]
+
+ # Compute distances from this plot to all site coordinates (longlat means its in km)
+ distances <- spDistsN1(Site_Coordinates, plot_coords, longlat = TRUE)
+
+ # Find the index of the minimum distance for closest site
+ closest_site_index <- which.min(distances)
+
+ # Assign the corresponding Site_Number from Site_Info to the current row in Neonplots
+ Neonplots$Closest_Site[i] <- Site_Info$Site_Number[closest_site_index]}
+
+```
+
+Filtering analysis data with relevant sites
+```{r, echo=FALSE}
+## Filter analysis.all to only include relevant sites ##
+
+# Get the list of sites to keep from Neonplots
+sites_to_keep <- Neonplots$Closest_Site
+
+# Loop through each data frame in the analysis.all list
+analysis.fia = list()
+for (year in names(analysis.all)) {
+ # Get the current data frame
+ df <- analysis.all[[year]]
+
+ # Create a pattern to match columns that include sites in sites_to_keep
+ pattern <- paste0("_Site(", paste(sites_to_keep, collapse = "|"), ")$")
+
+ # Keep only columns that match the pattern
+ df_filtered <- df[, grep(pattern, colnames(df))]
+
+ # Replace the original data frame in the list with the filtered one
+ analysis.fia[[year]] <- df_filtered}
+
+```
+
+Now extract the sites from Neonplots
+```{r, echo = FALSE}
+## Extract the unique site-year combinations from Neonplots ##
+valid_sites <- unique(Neonplots[, "Closest_Site"])
+
+# Loop over each year list in analysis.all
+for (year in names(analysis.fia)) {
+
+ # Extract the current data frame for the year
+ year_df <- analysis.fia[[year]]
+
+ # Identify the year as a numeric value
+ current_year <- as.numeric(substr(year, 1, 4))
+
+ # Identify columns with site numbers that match valid_sites
+ matching_columns <- grep(paste0("_Site(", paste(valid_sites, collapse = "|"), ")$"),
+ colnames(year_df), value = TRUE)
+
+ # Subset the data frame to only include matching columns
+ analysis.fia[[year]] <- year_df[, matching_columns, drop = FALSE]
+}
+```
+
+Extract soil carbon data - mean and standard deviation of each year
+```{r, echo = FALSE}
+# Initialize a list to store summary tables for each year
+summary_list <- list()
+
+# Loop through each year's data frame in analysis.all
+names(analysis.fia) = names(analysis.all)
+for (year in names(analysis.all)) {
+
+ # Extract the year from the full date format
+ year_only <- sub("^(\\d{4}).*", "\\1", year)
+
+ # Get the current data frame
+ df <- analysis.fia[[year]]
+
+ # Create a pattern to match columns that include sites
+ site_numbers <- unique(gsub(".*_Site(\\d+)", "\\1", colnames(df)))
+
+ # Initialize a data frame to hold summary statistics for this year
+ summary_table <- data.frame(Site_Number = numeric(0),
+ Year = character(0),
+ SC_Mean_Mod = numeric(0),
+ SC_SD_Mod = numeric(0),
+ stringsAsFactors = FALSE)
+
+ # Loop through each site number
+ for (site in site_numbers) {
+ # Create a vector for soil carbon data
+ tsc_col <- paste0("TotSoilCarb_Site", site)
+
+ # Check if columns exist in the data frame
+ if (tsc_col %in% colnames(df)) {
+ soilC = df[[tsc_col]]
+
+ # Calculate mean and SD for each variable
+ mean_soil = mean(soilC, na.rm = TRUE) # Average of soil C
+ sd_soil = sd(soilC, na.rm = TRUE) # Standard deviation of soil C
+
+ # Add to summary table
+ summary_table <- rbind(summary_table,
+ data.frame(Site_Number = as.numeric(site),
+ Year = year_only,
+ SC_Mean_Mod = mean_soil,
+ SC_SD_Mod = sd_soil))
+ }
+ }
+ # Store the summary table for this year in the list
+ summary_list[[year]] <- summary_table
+}
+
+# Combine all the summary tables into one data frame
+final_summary_table <- do.call(rbind, summary_list)
+```
+
+Graph the soil carbon year means in a histogram
+```{r}
+hist(final_summary_table[["SC_Mean_Mod"]], main = "Histogram of Soil Carbon Means across NEON Sites and Year", col = "lightblue", breaks = 10, border = "black", freq = FALSE)
+```
+
+
+Loading soil carbon data from NEON (Alexi's code)
+```{r, echo = FALSE}
+
+# load copy of Alexi's code
+source("extract_neon_sc_func.R")
+
+# Set start and end to NA to get all years
+start_date = NA
+end_date = NA
+store_dir <- "/Users/chane/OneDrive/Documents/ef_lab/R_files" # storing to folder I am in
+
+# create summary table for loading data
+summary_table_NEON_data <- data.frame(sitename = character(240),
+ year = character(240),
+ mean_organicC = numeric(240),
+ sd_organicC = numeric(240),
+ mean_bulkD = numeric(240),
+ sd_bulkD = numeric(240),
+ mean_frac30 = numeric(240),
+ sd_frac30 = numeric(240),
+ mean_soilC= numeric(240),
+ sd_soilC = numeric(240))
+
+# create temp data
+temp_table <- data.frame(sitename = character(240),
+ year = character(240),
+ mean_organicC = numeric(240),
+ sd_organicC = numeric(240),
+ mean_bulkD = numeric(240),
+ sd_bulkD = numeric(240),
+ mean_frac30 = numeric(240),
+ sd_frac30 = numeric(240),
+ mean_soilC= numeric(240),
+ sd_soilC = numeric(240))
+
+# assign first data to the table
+sitename <- Neonplots["field_site_id"][1,1]
+summary_table_NEON_data <-extract_NEON_veg(sitename, start_date, end_date, store_dir, neonsites = NULL)
+
+# loop through and mean and sd of each data point from each year at all of the neonplot sites
+for (i in 2:47){
+ sitename <- Neonplots["field_site_id"][i,1]
+ temp_table <- extract_NEON_veg(sitename, start_date, end_date, store_dir, neonsites = NULL)
+ summary_table_NEON_data <- rbind(summary_table_NEON_data, temp_table)
+}
+
+temp_summary_NEON <- summary_table_NEON_data
+
+```
+
+Add model to summary table
+```{r}
+# create summary_table as copy of neon table
+summary_table <- temp_summary_NEON
+
+# remove NAs
+summary_table <- drop_na(summary_table)
+
+# add row for sitename
+summary_table$Site_Number <- numeric(length(summary_table$sitename))
+
+
+# loop through summary table and Neon plots site and when equal and add closest site
+for (i in 1:length(summary_table$year)){
+ for (j in 1:length(Neonplots$field_site_id)){
+ if (Neonplots$field_site_id[j] == summary_table$sitename[i]){
+ summary_table$Site_Number[i] <- Neonplots$Closest_Site[j]
+ }
+ }
+}
+
+# sort by site number
+summary_table <- summary_table[order(summary_table$Site_Number),]
+
+# sort based on year (like final_summary_table)
+summary_table <- summary_table[order(summary_table$year),]
+
+# add rows for forecast to the summary_table_NEON_data
+summary_table$SC_mean_forecast = numeric(nrow(summary_table));
+summary_table$SC_sd_forecast = numeric(nrow(summary_table))
+
+c = 1 # counter
+
+# add forecast data, leave zeros where no forecast data
+for (i in 1:length(final_summary_table$Year)){
+ for (j in 1:nrow(summary_table)){
+ if (summary_table$year[j] < 2022){
+ if (final_summary_table$Year[i] == summary_table$year[j] & final_summary_table$Site_Number[i] == summary_table$Site_Number[j]){
+ summary_table$SC_mean_forecast[c] = final_summary_table$SC_Mean_Mod[i];
+ summary_table$SC_sd_forecast[c] = final_summary_table$SC_SD_Mod[i];
+ c <- c +1
+ }
+ }
+ }
+}
+
+
+```
+
+Analysis!
+```{r, echo = FALSE}
+# Plot summary table
+
+summary_table_2021 = summary_table[1:77, 1:12]
+ggplot(summary_table_2021, aes(x=SC_mean_forecast, y=mean_soilC)) + geom_point(size = 2)
+
+```
+
+Map of RMS error
+```{r}
+# find the error
+summary_table$error_soilC = numeric(nrow(summary_table))
+
+for (i in 1:nrow(summary_table)){
+ # model - actual
+ summary_table$error_soilC[i] = (summary_table$SC_mean_forecast[i] - summary_table$mean_soilC[i])
+}
+
+# find rms error for each site
+
+# make dataframe to store name and rms error of each
+error_df <- data.frame(sitename = character(46),
+ rms_error = numeric(46))
+
+error_df$sitename <- unique(summary_table$sitename) # make list of names
+
+# create temp vector for before averages
+vec <- numeric(1)
+c = 1 # counter
+
+for (i in 1:nrow(error_df)){
+ for (j in 1:nrow(summary_table)){
+ if(error_df$sitename[i] == summary_table$sitename[j]){
+ vec[c] = (summary_table$error_soilC[j])^2
+ c <- c + 1
+ }
+ }
+ error_df$rms_error[i] = sqrt(sum(vec)/(c-1))
+ c <- 1
+ rm(vec)
+ vec <- numeric(1)
+}
+
+# sort alphabetically
+error_df <- error_df[order(error_df$sitename),]
+
+# add lat and long columns
+error_df$site_lat <- numeric(nrow(error_df))
+error_df$site_long <- numeric(nrow(error_df))
+
+# add lat and long
+for (i in 1:nrow(error_df)){
+ for (j in 1:nrow(Neonplots)){
+ if (error_df$sitename[i] == Neonplots$field_site_id[j]){
+ error_df$site_lat[i] <- Neonplots$field_latitude[j]
+ error_df$site_long[i] <- Neonplots$field_longitude[j]
+ }
+ }
+}
+
+# get map of usa
+USA = map_data("world") %>% filter(region == "USA")
+
+# make bubble map with size and color depening on RMS error
+ggplot()+
+ geom_polygon(data = USA, aes(x= long, y = lat, group = group), fill="forestgreen", alpha = 0.3, main = "RMS error of each site") + geom_point(data = error_df, aes(x= site_long, y = site_lat, size = rms_error, color = rms_error)) + geom_point(alpha = 0.7) + xlim(-170, -50) + scale_size_continuous(range = c(1,10)) + theme_void() + labs(title = "RMS error of forecast for NEON sites") + theme(plot.title = element_text(hjust = 0.5),)
+
+# find max errors
+error_df <- error_df[order(error_df$rms_error),]
+
+```
+
+Find rate of change of soil carbon for each
+```{r, echo = FALSE}
+
+# find rate of change of NEON data
+# sort by location
+summary_table <- summary_table[order(summary_table$sitename),]
+
+error_df <- error_df[order(error_df$sitename),]
+
+# create places for rate of change
+error_df$roc_soilC <- numeric(46)
+error_df$roc_forecast <- numeric(46)
+error_df$roc_percent_error <- numeric(46)
+error_df$roc_error <- numeric(46)
+
+c = 1
+for (i in 1:nrow(summary_table)){
+ if (i == 1){
+ if( summary_table$sitename[1] != summary_table$sitename[2]){
+ error_df$roc_soilC[1] <- NaN
+ error_df$roc_forecast[1] <- NaN
+ error_df$roc_percent_error[1] <- NaN
+ error_df$roc_error <- NaN
+ }
+ }
+ if (i>1){
+ if (summary_table$sitename[i] != summary_table$sitename[i - 1]){
+ temp_name = summary_table$sitename[i]
+ j <- i
+ while (summary_table$sitename[j] == temp_name){
+ j <- j+1
+ if (j == 108){
+ break
+ }
+ }
+ c <- c+1
+ error_df$roc_soilC[c] <- ((summary_table$mean_soilC[j-1] - summary_table$mean_soilC[i])/(as.numeric(summary_table$year[j-1]) - as.numeric(summary_table$year[i])))
+ error_df$roc_forecast[c] <- ((summary_table$SC_mean_forecast[j-1] - summary_table$SC_mean_forecast[i])/(as.numeric(summary_table$year[j-1]) - as.numeric(summary_table$year[i])))
+ error_df$roc_error[c] <- (error_df$roc_soilC[c] - error_df$roc_forecast[c])
+ error_df$roc_percent_error[c] <- abs((error_df$roc_soilC[c] - error_df$roc_forecast[c])/error_df$roc_forecast[c])*100
+ }
+ }
+}
+
+# Bubble map with size and color depending on RMS error
+ggplot()+
+ geom_polygon(data = USA, aes(x= long, y = lat, group = group), fill="forestgreen", alpha = 0.3) + geom_point(data = error_df, aes(x= site_long, y = site_lat, size = roc_error, color = roc_error)) + geom_point(alpha = 0.7) + xlim(-170, -50) + scale_size_continuous(range = c(1,10)) + theme_void() + labs(title = "Rate of Change Residual") + theme(plot.title = element_text(hjust = 0.5),)
+
+ggplot()+
+ geom_polygon(data = USA, aes(x= long, y = lat, group = group), fill="forestgreen", alpha = 0.3) + geom_point(data = error_df, aes(x= site_long, y = site_lat, size = roc_percent_error, color = roc_percent_error)) + geom_point(alpha = 0.7) + xlim(-170, -50) + scale_size_continuous(range = c(1,10)) + theme_void() + labs(title="Rate of Change Percent Error") + theme(plot.title = element_text(hjust = 0.5),)
+
+error_df <- error_df[order(error_df$roc_percent_error),]
+
+error_df_roc <- drop_na(error_df)
+```
+
+Residual map of rate of change (copied from Cami's code)
+```{r, echo = FALSE}
+
+# define colors based on sign
+error_df$Color <- with(error_df,
+ ifelse(roc_error > 0, "lightsalmon",
+ ifelse(roc_error < 0, "orangered2", "grey")))
+
+
+USA = map_data("world") %>% filter(region == "USA")
+us_map <- st_as_sf(USA,
+ coords = c("long", "lat"),
+ crs = 4326) # CRS 4326 is WGS84 (latitude/longitude)
+
+
+# Make the plot
+ggplot(data = us_map) +
+ geom_sf(fill = "lightgrey") +
+ geom_point(data = error_df,
+ aes(x = site_long, y = site_lat, color = Color, size = abs(roc_error)),
+ alpha = 0.7) +
+ scale_color_manual(
+ name = "Residuals",
+ values = c(
+ "lightsalmon" = "lightsalmon",
+ "orangered2" = "orangered2"
+ ),
+ labels = c(
+ "lightsalmon" = "Rate of change (+)",
+ "orangered2" = "Rate of change (-)"
+ )
+ ) +
+ scale_size(range = c(1, 10), name = "Residual Size") +
+ coord_sf(xlim = c(-160, -60), ylim = c(25, 70)) +
+ labs(title = "Soil Carbon rate of change1
+ Residuals", x = "Longitude", y = "Latitude") +
+ theme_minimal() +
+ theme(
+ plot.title = element_text(hjust = 0.5),
+ )
+
+
+```
+
+spatial variograms (from cami's code)
+```{r, echo = FALSE}
+
+proj4string(error_df) <- CRS("+proj=longlat +datum=WGS84")
+
+variogram_model_rmse <- variogram(rms_error ~ 1, data = error_df)
+
+ggplot(data = variogram_model_rmse, aes(x = dist, y = gamma)) +
+ geom_point() +
+ geom_line() +
+ labs(title = "Variogram of RMS Error",
+ x = "Distance",
+ y = "Semivariance (γ)") +
+ theme_minimal() +
+ theme(
+ plot.title = element_text(hjust = 0.5, margin = margin(b = 20)),
+ axis.title.x = element_text(margin = margin(t = 10)),
+ axis.title.y = element_text(margin = margin(r = 10)))
+
+
+```
+
+```{r, echo = FALSE}
+# coordinates(error_df_roc) <- ~ site_long + site_lat
+variogram_model_roc <- variogram(roc_error ~ 1, data = error_df_roc)
+
+ggplot(data = variogram_model_roc, aes(x = dist, y = gamma)) +
+ geom_point() +
+ geom_line() +
+ labs(title = "Variogram of Rate of Change residual",
+ x = "Distance",
+ y = "Semivariance (γ)") +
+ theme_minimal() +
+ theme(
+ plot.title = element_text(hjust = 0.5, margin = margin(b = 20)),
+ axis.title.x = element_text(margin = margin(t = 10)),
+ axis.title.y = element_text(margin = margin(r = 10)))
+
+ggplot(data = us_map) +
+ ggplot
+```
+
+Compare error to 13 other factors (from Dongchen's folder on github)
+```{r, echo = FALSE}
+
+# get data on US 13 factors
+dat = terra::rast("all_data_layers.tif")
+
+# add columns to error_df
+error_df$ysd <- numeric(46)
+error_df$abg<- numeric(46)
+error_df$fia <- numeric(46)
+error_df$gedi<- numeric(46)
+error_df$twi<- numeric(46)
+error_df$tavg <- numeric(46)
+error_df$srad <- numeric(46)
+error_df$prec <- numeric(46)
+error_df$vapr <- numeric(46)
+error_df$ph <- numeric(46)
+error_df$n <- numeric(46)
+error_df$soc <- numeric(46)
+error_df$sand <- numeric(46)
+
+pt = numeric(1)
+
+# extract data for each from each Neon site coordinate
+for (i in 1:nrow(error_df)){
+ pt = terra::extract(dat,data.frame(lon=error_df$site_long[i], lat = error_df$site_lat[i]))
+ error_df$ysd[i] = pt$year_since_disturb
+ error_df$abg[i] <- pt$agb
+ error_df$fia[i] <- pt$fia
+ error_df$gedi[i] <- pt$gedi
+ error_df$twi[i] <- pt$twi
+ error_df$tavg[i] <- pt$tavg
+ error_df$srad[i] <- pt$srad
+ error_df$prec[i] <- pt$prec
+ error_df$vapr[i] <- pt$vapr
+ error_df$ph[i] <- pt$PH
+ error_df$n[i] <- pt$N
+ error_df$soc[i] <- pt$SOC
+ error_df$sand[i] <- pt$Sand
+}
+
+# use lm to fit a linear model to the data
+fit_ysd = lm(error_df$ysd ~ error_df$rms_error)
+fit_abg = lm(error_df$abg ~ error_df$rms_error)
+fit_fia = lm(error_df$fia ~ error_df$rms_error)
+fit_gedi = lm(error_df$gedi ~ error_df$rms_error)
+fit_twi = lm(error_df$twi ~ error_df$rms_error)
+fit_tavg = lm(error_df$tavg ~ error_df$rms_error)
+fit_srad = lm(error_df$srad ~ error_df$rms_error)
+fit_prec = lm(error_df$prec~ error_df$rms_error)
+fit_vapr = lm(error_df$vapr ~ error_df$rms_error)
+fit_ph = lm(error_df$ph ~ error_df$rms_error)
+fit_n = lm(error_df$n ~ error_df$rms_error)
+fit_soc = lm(error_df$soc ~ error_df$rms_error)
+fit_sand = lm(error_df$sand ~ error_df$rms_error)
+
+
+# make data frame
+summary_factors <- data.frame(factor_name= character(13),
+ r_squared = numeric(13),
+ slope = numeric (13),
+ p_value = numeric(13))
+
+for (i in 23:35){
+ # fit a line
+ error_df$temp = error_df[[i]]
+ fit = lm(error_df$temp ~ error_df$rms_error)
+
+ # graph
+
+ print(ggplot(error_df, aes(x = temp, y = rms_error)) + geom_point() + geom_smooth(method = "lm", se = FALSE) + xlab(colnames(error_df[(i-13)])))
+
+ # add r squared, slope, p value
+ summary_factors$factor_name[(i-22)] <- colnames(error_df[(i-13)])
+ summary_factors$slope[(i-22)] <- coef(fit)[2]
+ f <- summary(fit)$fstatistic
+ summary_factors$p_value[(i-22)] <- pf(f[1], f[2], f[3], lower.tail=F)
+ summary_factors$r_squared[(i-22)] <- summary(fit)$adj.r.squared
+}
+```
+
+
+
+
+
+
+
diff --git a/modules/data.remote/DESCRIPTION b/modules/data.remote/DESCRIPTION
index bff565d6969..b3cf0319406 100644
--- a/modules/data.remote/DESCRIPTION
+++ b/modules/data.remote/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.data.remote
Type: Package
Title: PEcAn Functions Used for Extracting Remote Sensing Data
-Version: 1.9.0
+Version: 1.9.1
Authors@R: c(person("Mike", "Dietze", role = c("aut"),
email = "dietze@bu.edu"),
person("Bailey", "Morrison", role = c("aut", "cre"),
@@ -10,6 +10,8 @@ Authors@R: c(person("Mike", "Dietze", role = c("aut"),
Author: Mike Dietze, Bailey Morrison
Maintainer: Bailey Morrison
Description: PEcAn module for processing remote data. Python module requirements: requests, json, re, ast, panads, sys. If any of these modules are missing, install using pip install .
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
curl,
DBI,
@@ -45,6 +47,8 @@ Suggests:
raster,
reshape,
sf,
+ stats,
+ stringr,
testthat (>= 1.0.2),
tibble,
utils
@@ -54,3 +58,4 @@ LazyLoad: yes
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: remote-sensing, MODIS, Landsat, earth-observation
diff --git a/modules/data.remote/NAMESPACE b/modules/data.remote/NAMESPACE
index 65e1f2976eb..911acfa091f 100644
--- a/modules/data.remote/NAMESPACE
+++ b/modules/data.remote/NAMESPACE
@@ -17,10 +17,14 @@ export(SMAP_SMP_prep)
export(call_MODIS)
export(download.LandTrendr.AGB)
export(download.NLCD)
-export(download.thredds.AGB)
+export(download_thredds)
export(extract.LandTrendr.AGB)
export(extract_NLCD)
export(extract_phenology_MODIS)
+export(extract_thredds_nc)
+export(gdal_conversion)
+export(get_site_info)
+export(merge_image_tiles)
export(remote_process)
importFrom(foreach,"%do%")
importFrom(foreach,"%dopar%")
diff --git a/modules/data.remote/NEWS.md b/modules/data.remote/NEWS.md
index 0a1602b0147..ad97a8edd82 100644
--- a/modules/data.remote/NEWS.md
+++ b/modules/data.remote/NEWS.md
@@ -1,3 +1,25 @@
+# PEcAn.data.remote 1.9.1
+
+## Added
+
+* New function `extract_thredds_nc` collects data from a Thredds URL into a single dataframe (#2458).
+* New function `get_site_info` looks up in the BETY database all sites found in a settings object (#2458).
+* new function `merge_image_tiles` stitches multiple hdf or tif files into a larger spatial image (#3573, #3617).
+* New function `gdal_conversion` provides an R interface to format conversion utilities provided by the GDAL library (#3585, #3588).
+
+
+## Changed
+
+* `MODIS_LAI_prep` gains two arguments (#3565):
+ - `skip_download` (with default FALSE) to work offline from an existing file named "LAI.csv".
+ - `boundary` (with default NULL, ie no effect) to set upper and lower quantiles for trimming LAI data
+* `download.thredds.AGB` renamed to `download.thredds` (#2458).
+* `GEDI_AGB_prep` argument `credential.folder` (with default "~") renamed to `credential_path` (with default "~/.netrc") (#3540).
+* `GEDI_AGB_prep` will use existing local files if present rather than re-download them (#3572).
+* All `*DAAC` functions now take argument `credential_path`, which should be set to the path to a valid `.netrc` file (#3572).
+
+
+
# PEcAn.data.remote 1.9.0
* Refactored GEDI, LAI, and SMAP workflows for more efficient parallel processing
diff --git a/modules/data.remote/R/GEDI_AGB_prep.R b/modules/data.remote/R/GEDI_AGB_prep.R
index f7cb2c1400f..2efc99ba348 100644
--- a/modules/data.remote/R/GEDI_AGB_prep.R
+++ b/modules/data.remote/R/GEDI_AGB_prep.R
@@ -14,7 +14,7 @@
#' @param prerun Character: series of pre-launch shell command before running the shell job (default is NULL).
#' @param num.folder Numeric: the number of batch folders to be created when submitting jobs to the queue.
#' @param cores Numeric: numbers of core to be used for the parallel computation. The default is the maximum current CPU number.
-#' @param credential.folder Character: the physical path to the folder that contains the credential file (.nasadaacapirc).
+#' @param credential_path Character: the physical path to the credential file. (.netrc).
#'
#' @return A data frame containing AGB and sd for each site and each time step.
#' @export
@@ -50,7 +50,7 @@ GEDI_AGB_prep <- function(site_info,
prerun = NULL,
num.folder = NULL,
cores = parallel::detectCores(),
- credential.folder = "~") {
+ credential_path = "~/.netrc") {
# convert list to vector.
if (is.list(bbox)) {
bbox <- as.numeric(unlist(bbox))
@@ -70,7 +70,7 @@ GEDI_AGB_prep <- function(site_info,
dir.create(outdir)
}
# detect if we generate the NASA DAAC credential file.
- if (!file.exists(file.path(credential.folder, ".nasadaacapirc"))) {
+ if (!file.exists(file.path(credential_path))) {
PEcAn.logger::logger.info("There is no credential file for NASA DAAC server.")
PEcAn.logger::logger.info("Please create the .nasadaacapirc file within the credential folder.")
PEcAn.logger::logger.info("The first and second lines of the file are the username and password.")
@@ -89,63 +89,70 @@ GEDI_AGB_prep <- function(site_info,
AGB_Output$site_id <- site_info$site_id
# loop over each time point.
for (i in seq_along(time_points)) {
- # create start and end dates.
- start_date <- seq(time_points[i], length.out = 2, by = paste0("-", search_window))[2]
- end_date <- seq(time_points[i], length.out = 2, by = search_window)[2]
- # create the download folder for downloaded GEDI tiles.
- download.path <- file.path(outdir, "download")
- if (!dir.exists(download.path)) {
- dir.create(download.path)
+ # if we have pre-existing output.
+ if (file.exists(file.path(outdir, paste0("agb_", time_points[i], ".rds")))) {
+ agb <- readRDS(file.path(outdir, paste0("agb_", time_points[i], ".rds")))
} else {
+ # create start and end dates.
+ start_date <- seq(time_points[i], length.out = 2, by = paste0("-", search_window))[2]
+ end_date <- seq(time_points[i], length.out = 2, by = search_window)[2]
+ # create the download folder for downloaded GEDI tiles.
+ download.path <- file.path(outdir, "download")
+ if (!dir.exists(download.path)) {
+ dir.create(download.path)
+ } else {
+ # delete previous downloaded files.
+ unlink(download.path, recursive = T)
+ dir.create(download.path)
+ }
+ # download GEDI tiles.
+ files <- NASA_DAAC_download(ul_lat = bbox[4],
+ ul_lon = bbox[1],
+ lr_lat = bbox[3],
+ lr_lon = bbox[2],
+ ncore = cores,
+ from = start_date,
+ to = end_date,
+ data_version = "V2_1",
+ outdir = download.path,
+ doi = "10.3334/ORNLDAAC/2056",
+ just_path = F,
+ credential_path = credential_path)
+ # if we want to submit jobs to the queue.
+ if (batch) {
+ if (is.null(num.folder)) {
+ PEcAn.logger::logger.info("Please provide the number of batch folders if you want to submit jobs to the queue!")
+ return(NULL)
+ }
+ which.point.in.which.file <- GEDI_L4A_Finder_batch(files = files,
+ outdir = outdir,
+ site_info = site_info,
+ num.folder = as.numeric(num.folder),
+ buffer = as.numeric(buffer),
+ cores = as.numeric(cores),
+ prerun = prerun)
+ agb <- GEDI_L4A_2_mean_var.batch(site_info = site_info,
+ outdir = outdir,
+ which.point.in.which.file = which.point.in.which.file,
+ num.folder = as.numeric(num.folder),
+ buffer = as.numeric(buffer),
+ cores = as.numeric(cores),
+ prerun = prerun)
+ } else {
+ # if we want to run the job locally.
+ which.point.in.which.file <- GEDI_L4A_Finder_batch(files = files,
+ site_info = site_info,
+ buffer = as.numeric(buffer),
+ cores = as.numeric(cores))
+ agb <- GEDI_L4A_2_mean_var.batch(site_info = site_info,
+ which.point.in.which.file = which.point.in.which.file,
+ buffer = as.numeric(buffer),
+ cores = as.numeric(cores))
+ }
+ saveRDS(agb, file = file.path(outdir, paste0("agb_", time_points[i], ".rds")))
# delete previous downloaded files.
unlink(download.path, recursive = T)
- dir.create(download.path)
- }
- # download GEDI tiles.
- files <- NASA_DAAC_download(ul_lat = bbox[4],
- ul_lon = bbox[1],
- lr_lat = bbox[3],
- lr_lon = bbox[2],
- ncore = cores,
- from = start_date,
- to = end_date,
- outdir = download.path,
- doi = "10.3334/ORNLDAAC/2056",
- just_path = F,
- credential.folder = credential.folder)
- # if we want to submit jobs to the queue.
- if (batch) {
- if (is.null(num.folder)) {
- PEcAn.logger::logger.info("Please provide the number of batch folders if you want to submit jobs to the queue!")
- return(NULL)
- }
- which.point.in.which.file <- GEDI_L4A_Finder_batch(files = files,
- outdir = outdir,
- site_info = site_info,
- num.folder = as.numeric(num.folder),
- buffer = as.numeric(buffer),
- cores = as.numeric(cores),
- prerun = prerun)
- agb <- GEDI_L4A_2_mean_var.batch(site_info = site_info,
- outdir = outdir,
- which.point.in.which.file = which.point.in.which.file,
- num.folder = as.numeric(num.folder),
- buffer = as.numeric(buffer),
- cores = as.numeric(cores),
- prerun = prerun)
- } else {
- # if we want to run the job locally.
- which.point.in.which.file <- GEDI_L4A_Finder_batch(files = files,
- site_info = site_info,
- buffer = as.numeric(buffer),
- cores = as.numeric(cores))
- agb <- GEDI_L4A_2_mean_var.batch(site_info = site_info,
- which.point.in.which.file = which.point.in.which.file,
- buffer = as.numeric(buffer),
- cores = as.numeric(cores))
}
- # delete previous downloaded files.
- unlink(download.path, recursive = T)
# loop over sites.
for (j in seq_len(nrow(agb))) {
# skip NA observations.
diff --git a/modules/data.remote/R/MODIS_LAI_prep.R b/modules/data.remote/R/MODIS_LAI_prep.R
index 9f18b5ffb60..d3f0d247563 100644
--- a/modules/data.remote/R/MODIS_LAI_prep.R
+++ b/modules/data.remote/R/MODIS_LAI_prep.R
@@ -6,7 +6,7 @@
#' @param search_window numeric: search window for locate available LAI values.
#' @param export_csv boolean: decide if we want to export the CSV file.
#' @param sd_threshold numeric or character: for filtering out any estimations with unrealistic high standard error, default is 20. The QC check will be skipped if it's set as NULL.
-#' @param skip.download boolean: determine if we want to use existing LAI.csv file and skip the MODIS LAI download part.
+#' @param skip_download boolean: determine if we want to use existing LAI.csv file and skip the MODIS LAI download part.
#' @param boundary numeric vector or list: the upper and lower quantiles for filtering out noisy LAI values (e.g., c(0.05, 0.95) or list(0.05, 0.95)). The default is NULL.
#'
#' @return A data frame containing LAI and sd for each site and each time step.
@@ -14,7 +14,7 @@
#'
#' @author Dongchen Zhang
#' @importFrom magrittr %>%
-MODIS_LAI_prep <- function(site_info, time_points, outdir = NULL, search_window = 30, export_csv = FALSE, sd_threshold = 20, skip.download = TRUE, boundary = NULL){
+MODIS_LAI_prep <- function(site_info, time_points, outdir = NULL, search_window = 30, export_csv = FALSE, sd_threshold = 20, skip_download = FALSE, boundary = NULL){
# unlist boundary if it's passing from the assembler function.
if (is.list(boundary)) {
boundary <- as.numeric(unlist(boundary))
@@ -56,8 +56,9 @@ MODIS_LAI_prep <- function(site_info, time_points, outdir = NULL, search_window
if (!is.null(boundary)) {
Previous_CSV <- MODIS_LAI_ts_filter(Previous_CSV, boundary = boundary)
}
- LAI_Output <- matrix(NA, length(site_info$site_id), 2*length(time_points)+1) %>%
- `colnames<-`(c("site_id", paste0(time_points, "_LAI"), paste0(time_points, "_SD"))) %>% as.data.frame()#we need: site_id, LAI, std, target time point.
+ LAI_Output <- matrix(NA, length(site_info$site_id), 2*length(time_points)+1) %>%
+ `colnames<-`(c("site_id", paste0(time_points, "_LAI"), paste0(time_points, "_SD"))) %>%
+ as.data.frame()#we need: site_id, LAI, std, target time point.
LAI_Output$site_id <- site_info$site_id
#Calculate LAI for each time step and site.
#loop over time and site
@@ -81,14 +82,15 @@ MODIS_LAI_prep <- function(site_info, time_points, outdir = NULL, search_window
LAI_Output[, paste0(t, "_SD")] <- LAI.list[[i]][,paste0(t, "_SD")]
}
}else{#we don't have any previous downloaded CSV file.
- LAI_Output <- matrix(NA, length(site_info$site_id), 2*length(time_points)+1) %>%
- `colnames<-`(c("site_id", paste0(time_points, "_LAI"), paste0(time_points, "_SD"))) %>% as.data.frame()#we need: site_id, LAI, std, target time point.
+ LAI_Output <- matrix(NA, length(site_info$site_id), 2*length(time_points)+1) %>%
+ `colnames<-`(c("site_id", paste0(time_points, "_LAI"), paste0(time_points, "_SD"))) %>%
+ as.data.frame()#we need: site_id, LAI, std, target time point.
LAI_Output$site_id <- site_info$site_id
}
#only Site that has NA for any time points need to be downloaded.
new_site_info <- site_info %>% purrr::map(function(x)x[!stats::complete.cases(LAI_Output)])
#TODO: only download data for specific date when we have missing data.
- if(length(new_site_info$site_id) != 0 && !skip.download){
+ if(length(new_site_info$site_id) != 0 && !skip_download){
product <- "MCD15A3H"
PEcAn.logger::logger.info("Extracting LAI mean products!")
lai_mean <- split(as.data.frame(new_site_info), seq(nrow(as.data.frame(new_site_info)))) %>%
@@ -244,13 +246,14 @@ MODIS_LAI_ts_filter <- function(lai.csv, boundary = c(0.05, 0.95)) {
#' @param to character: the end time for searching the MODIS products.
#' @param download.outdir character: Where the MODIS tiles will be stored.
#' @param csv.outdir character: Where the final CSV file will be stored.
+#' @param credential_path Character: physical path to the credential file (.netrc file).
#'
#' @return A data frame containing LAI and sd for each site and each time step.
#' @export
#'
#' @author Dongchen Zhang
#' @importFrom magrittr %>%
-Prep.MODIS.CSV.from.DAAC <- function(site_info, extent, from, to, download.outdir, csv.outdir) {
+Prep.MODIS.CSV.from.DAAC <- function(site_info, extent, from, to, download.outdir, csv.outdir, credential_path) {
# load previous CSV file.
if (file.exists(file.path(csv.outdir, "LAI.csv"))) {
previous.csv <- utils::read.csv(file.path(csv.outdir, "LAI.csv"),
@@ -263,7 +266,7 @@ Prep.MODIS.CSV.from.DAAC <- function(site_info, extent, from, to, download.outdi
doParallel::registerDoParallel(cl)
# reproject site locations to MODIS projection.
site.ids <- site_info$site_id
- site.locs <- cbind(site_info$lon, site_info$lon) %>%
+ site.locs <- cbind(site_info$lon, site_info$lat) %>%
`colnames<-`(c("lon","lat")) %>%
`rownames<-`(site.ids)
# create vector based on coordinates and MODIS projection.
@@ -273,18 +276,17 @@ Prep.MODIS.CSV.from.DAAC <- function(site_info, extent, from, to, download.outdi
pts.reproj <- sp::spTransform(pts, "+proj=sinu +a=6371007.181 +b=6371007.181 +units=m")
coords.reproj <- sp::coordinates(pts.reproj) %>% `colnames<-`(c("x", "y"))
# download data.
- metadata <- NASA_DAAC_download(ul_lat = extent[4],
- ul_lon = extent[1],
- lr_lat = extent[3],
- lr_lon = extent[2],
- from = from,
- to = to,
- just_path = F,
- outdir = download.outdir,
- doi = "10.5067/MODIS/MCD15A3H.061",
- ncore = parallel::detectCores()-1)
- # grab file paths for downloaded hdf files.
- modis.out <- list.files(download.outdir, full.names = T, pattern = "*.hdf")
+ modis.out <- NASA_DAAC_download(ul_lat = extent[4],
+ ul_lon = extent[1],
+ lr_lat = extent[3],
+ lr_lon = extent[2],
+ from = from,
+ to = to,
+ just_path = F,
+ outdir = download.outdir,
+ doi = "10.5067/MODIS/MCD15A3H.061",
+ ncore = parallel::detectCores()-1,
+ credential_path = credential_path)
# grab id for each file.
ids <- basename(modis.out)
# split string.
diff --git a/modules/data.remote/R/NASA_DAAC_download.R b/modules/data.remote/R/NASA_DAAC_download.R
index d193e2d86bd..b86d2533952 100644
--- a/modules/data.remote/R/NASA_DAAC_download.R
+++ b/modules/data.remote/R/NASA_DAAC_download.R
@@ -11,9 +11,9 @@
#' "yyyy-mm-dd".
#' @param outdir Character: path of the directory in which to save the
#' downloaded files. Default is the current work directory(getwd()).
-#' @param band Character: the band name of data to be requested.
-#' @param credential.folder Character: physical path to the folder that contains
-#' the credential file. The default is NULL.
+#' @param band Character: the band name (or vector of band names) of data to be requested. Default is NULL.
+#' @param data_version Character: the version (typically starts with V) of data to be requested. Default is NULL.
+#' @param credential_path Character: physical path to the credential file (.netrc file). The default NULL.
#' @param doi Character: data DOI on the NASA DAAC server, it can be obtained
#' directly from the NASA ORNL DAAC data portal (e.g., GEDI L4A through
#' https://daac.ornl.gov/cgi-bin/dsviewer.pl?ds_id=2056).
@@ -24,6 +24,7 @@
#'
#' @examples
#' \dontrun{
+#' # SHIFT Hyper-spectral data.
#' ul_lat <- 35
#' ul_lon <- -121
#' lr_lat <- 33
@@ -31,7 +32,6 @@
#' from <- "2022-02-23"
#' to <- "2022-05-30"
#' doi <- "10.3334/ORNLDAAC/2183"
-#' outdir <- "/projectnb/dietzelab/dongchen/SHIFT/test_download"
#' paths <- NASA_DAAC_download(ul_lat = ul_lat,
#' ul_lon = ul_lon,
#' lr_lat = lr_lat,
@@ -40,6 +40,103 @@
#' to = to,
#' doi = doi,
#' just_path = T)
+#' # GEDI level 4A data.
+#' ul_lat <- 85
+#' ul_lon <- -179
+#' lr_lat <- 7
+#' lr_lon <- -20
+#' from <- "2020-01-01"
+#' to <- "2020-12-31"
+#' doi <- "10.3334/ORNLDAAC/2056"
+#' paths <- NASA_DAAC_download(ul_lat = ul_lat,
+#' ul_lon = ul_lon,
+#' lr_lat = lr_lat,
+#' lr_lon = lr_lon,
+#' from = from,
+#' to = to,
+#' data_version = "V2_1",
+#' doi = doi,
+#' just_path = T)
+#' # MODIS LAI data.
+#' ul_lat <- 85
+#' ul_lon <- -179
+#' lr_lat <- 7
+#' lr_lon <- -20
+#' from <- "2020-01-01"
+#' to <- "2020-01-31"
+#' doi <- "10.5067/MODIS/MCD15A3H.061"
+#' paths <- NASA_DAAC_download(ul_lat = ul_lat,
+#' ul_lon = ul_lon,
+#' lr_lat = lr_lat,
+#' lr_lon = lr_lon,
+#' from = from,
+#' to = to,
+#' doi = doi,
+#' just_path = T)
+#' # SMAP Soil Moisture data.
+#' ul_lat <- 85
+#' ul_lon <- -179
+#' lr_lat <- 7
+#' lr_lon <- -20
+#' from <- "2020-01-01"
+#' to <- "2020-01-31"
+#' doi <- "10.5067/02LGW4DGJYRX"
+#' paths <- NASA_DAAC_download(ul_lat = ul_lat,
+#' ul_lon = ul_lon,
+#' lr_lat = lr_lat,
+#' lr_lon = lr_lon,
+#' from = from,
+#' to = to,
+#' doi = doi,
+#' just_path = T)
+#' # GLANCE Phenology and LC data.
+#' ul_lat <- 85
+#' ul_lon <- -179
+#' lr_lat <- 7
+#' lr_lon <- -20
+#' from <- "2019-01-01"
+#' to <- "2019-12-31"
+#' doi <- "10.5067/MEaSUREs/GLanCE/GLanCE30.001"
+#' paths <- NASA_DAAC_download(ul_lat = ul_lat,
+#' ul_lon = ul_lon,
+#' lr_lat = lr_lat,
+#' lr_lon = lr_lon,
+#' from = from,
+#' to = to,
+#' doi = doi,
+#' just_path = T)
+#' # HLS reflectance data.
+#' ul_lat <- 35
+#' ul_lon <- -121
+#' lr_lat <- 33
+#' lr_lon <- -117
+#' from <- "2022-02-23"
+#' to <- "2022-05-30"
+#' doi <- "10.5067/HLS/HLSS30.002"
+#' paths <- NASA_DAAC_download(ul_lat = ul_lat,
+#' ul_lon = ul_lon,
+#' lr_lat = lr_lat,
+#' lr_lon = lr_lon,
+#' from = from,
+#' to = to,
+#' doi = doi,
+#' just_path = T)
+#' ul_lat <- 35
+#' # HLS Phenology data.
+#' ul_lon <- -121
+#' lr_lat <- 33
+#' lr_lon <- -117
+#' from <- "2019-01-01"
+#' to <- "2019-12-31"
+#' doi <- "10.5067/Community/MuSLI/MSLSP30NA.011"
+#' paths <- NASA_DAAC_download(ul_lat = ul_lat,
+#' ul_lon = ul_lon,
+#' lr_lat = lr_lat,
+#' lr_lon = lr_lon,
+#' from = from,
+#' to = to,
+#' doi = doi,
+#' just_path = T)
#' }
#' @author Dongchen Zhang
#' @importFrom foreach %dopar%
@@ -52,16 +149,24 @@ NASA_DAAC_download <- function(ul_lat,
to,
outdir = getwd(),
band = NULL,
- credential.folder = NULL,
+ data_version = NULL,
+ credential_path = NULL,
doi,
just_path = FALSE) {
# Determine if we have enough inputs.
if (is.null(outdir) & !just_path) {
- PEcAn.logger::logger.info("Please provide outdir if you want to download the file.")
- return(0)
+ message("Please provide outdir if you want to download the file.")
+ return(NA)
}
# setup DAAC Credentials.
- DAAC_Set_Credential(folder.path = credential.folder)
+ # detect if we need the credential or not.
+ if (!just_path & is.null(credential_path)) {
+ PEcAn.logger::logger.info("Please provide the physical path to the credential file!")
+ return(NA)
+ }
+ if (!just_path) {
+ netrc <- getnetrc(credential_path)
+ }
# setup arguments for URL.
daterange <- c(from, to)
# grab provider and concept id from CMR based on DOI.
@@ -89,36 +194,42 @@ NASA_DAAC_download <- function(ul_lat,
granules <- result$feed$entry
if (length(granules) == 0)
break
- # if it's GLANCE product.
- # GLANCE product has special data archive.
- if (doi == "10.5067/MEaSUREs/GLanCE/GLanCE30.001") {
- granules_href <- c(granules_href, sapply(granules, function(x) {
- links <- c()
- for (j in seq_along(x$links)) {
- links <- c(links, x$links[[j]]$href)
- }
- return(links)
- }))
- } else {
- granules_href <- c(granules_href, sapply(granules, function(x) x$links[[1]]$href))
- }
+ # grab raw URLs from the records.
+ granules_href <- c(granules_href, sapply(granules, function(x) {sapply(x$links,function(y) y$href)}))
# grab specific band.
if (!is.null(band)) {
- granules_href <- granules_href[which(grepl(band, granules_href, fixed = T))]
+ granules_href <- granules_href[which(grepl(band, basename(granules_href), fixed = T))]
+ }
+ # grab specific data version
+ if (!is.null(data_version)) {
+ granules_href <- granules_href[which(grepl(data_version, granules_href, fixed = T))]
}
page <- page + 1
}
}
+ # if no files are found.
+ if (is.null(granules_href)) {
+ PEcAn.logger::logger.info("No files found. Please check the spatial and temporal search window.")
+ return(NA)
+ }
+ # remove non-target files (e.g. s3)
+ granules_href <- granules_href[which(grepl("https*", granules_href))]
# remove duplicated files.
inds <- which(duplicated(basename(granules_href)))
if (length(inds) > 0) {
granules_href <- granules_href[-inds]
}
# remove non-image files.
- inds <- which(grepl(".h5", basename(granules_href)) |
- grepl(".tif", basename(granules_href)) |
- grepl(".hdf", basename(granules_href)))
+ inds <- which(stringr::str_ends(basename(granules_href), ".h5") |
+ stringr::str_ends(basename(granules_href), ".tif") |
+ stringr::str_ends(basename(granules_href), ".hdf") |
+ stringr::str_ends(basename(granules_href), ".nc"))
granules_href <- granules_href[inds]
+ # remove URLs that have more than one dots in the basename.
+ inds <- which(nchar(gsub("[^.]", "", basename(granules_href))) > 1)
+ if (length(inds) > 0) {
+ granules_href <- granules_href[-inds]
+ }
# detect existing files if we want to download the files.
if (!just_path) {
same.file.inds <- which(basename(granules_href) %in% list.files(outdir))
@@ -133,7 +244,7 @@ NASA_DAAC_download <- function(ul_lat,
if (!just_path) {
# check if the doSNOW package is available.
if ("try-error" %in% class(try(find.package("doSNOW")))) {
- PEcAn.logger::logger.info("The doSNOW package is not installed.")
+ message("The doSNOW package is not installed.")
return(NA)
}
# printing out parallel environment.
@@ -158,19 +269,19 @@ NASA_DAAC_download <- function(ul_lat,
# if there is a problem in downloading file.
while ("try-error" %in% class(try(
response <-
- httr::GET(
- granules_href[i],
- httr::write_disk(file.path(outdir, basename(granules_href)[i]), overwrite = T),
- httr::authenticate(user = Sys.getenv("ed_un"),
- password = Sys.getenv("ed_pw"))
- )
+ httr::GET(
+ granules_href[i],
+ httr::write_disk(file.path(outdir, basename(granules_href)[i]), overwrite = T),
+ httr::config(netrc = TRUE, netrc_file = netrc),
+ httr::set_cookies("LC" = "cookies")
+ )
))){
response <-
httr::GET(
granules_href[i],
httr::write_disk(file.path(outdir, basename(granules_href)[i]), overwrite = T),
- httr::authenticate(user = Sys.getenv("ed_un"),
- password = Sys.getenv("ed_pw"))
+ httr::config(netrc = TRUE, netrc_file = netrc),
+ httr::set_cookies("LC" = "cookies")
)
}
# Check if we can successfully open the downloaded file.
@@ -178,7 +289,7 @@ NASA_DAAC_download <- function(ul_lat,
if (grepl(pattern = ".h5", x = basename(granules_href)[i], fixed = T)) {
# check if the hdf5r package exists.
if ("try-error" %in% class(try(find.package("hdf5r")))) {
- PEcAn.logger::logger.info("The hdf5r package is not installed.")
+ message("The hdf5r package is not installed.")
return(NA)
}
while ("try-error" %in% class(try(hdf5r::H5File$new(file.path(outdir, basename(granules_href)[i]), mode = "r"), silent = T))) {
@@ -186,21 +297,22 @@ NASA_DAAC_download <- function(ul_lat,
httr::GET(
granules_href[i],
httr::write_disk(file.path(outdir, basename(granules_href)[i]), overwrite = T),
- httr::authenticate(user = Sys.getenv("ed_un"),
- password = Sys.getenv("ed_pw"))
+ httr::config(netrc = TRUE, netrc_file = netrc),
+ httr::set_cookies("LC" = "cookies")
)
}
# if it's HDF4 or regular GeoTIFF file.
} else if (grepl(pattern = ".tif", x = basename(granules_href)[i], fixed = T) |
grepl(pattern = ".tiff", x = basename(granules_href)[i], fixed = T) |
- grepl(pattern = ".hdf", x = basename(granules_href)[i], fixed = T)) {
+ grepl(pattern = ".hdf", x = basename(granules_href)[i], fixed = T) |
+ grepl(pattern = ".nc", x = basename(granules_href)[i], fixed = T)) {
while ("try-error" %in% class(try(terra::rast(file.path(outdir, basename(granules_href)[i])), silent = T))) {
response <-
httr::GET(
granules_href[i],
httr::write_disk(file.path(outdir, basename(granules_href)[i]), overwrite = T),
- httr::authenticate(user = Sys.getenv("ed_un"),
- password = Sys.getenv("ed_pw"))
+ httr::config(netrc = TRUE, netrc_file = netrc),
+ httr::set_cookies("LC" = "cookies")
)
}
}
@@ -215,8 +327,8 @@ NASA_DAAC_download <- function(ul_lat,
httr::GET(
granules_href[i],
httr::write_disk(file.path(outdir, basename(granules_href)[i]), overwrite = T),
- httr::authenticate(user = Sys.getenv("ed_un"),
- password = Sys.getenv("ed_pw"))
+ httr::config(netrc = TRUE, netrc_file = netrc),
+ httr::set_cookies("LC" = "cookies")
)
}
}
@@ -298,30 +410,33 @@ NASA_CMR_finder <- function(doi) {
return(as.list(data.frame(cbind(provider, concept_id))))
}
-#' Set NASA DAAC credentials to the current environment.
+#' Set NASA DAAC credentials to the .netrc file.
#'
-#' @param replace Boolean: determine if we want to replace the current credentials from the environment. The default is FALSE.
-#' @param folder.path Character: physical path to the folder that contains the credential file. The default is NULL.
+#' @param dl_path Character: physical path to the .netrc credential file.
#'
#' @author Dongchen Zhang
-DAAC_Set_Credential <- function(replace = FALSE, folder.path = NULL) {
- if (replace) {
- PEcAn.logger::logger.info("Replace previous stored NASA DAAC credentials.")
- }
- # if we have the credential file.
- if (!is.null(folder.path)) {
- if (file.exists(file.path(folder.path, ".nasadaacapirc"))) {
- key <- readLines(file.path(folder.path, ".nasadaacapirc"))
- Sys.setenv(ed_un = key[1], ed_pw = key[2])
- }
- }
- # otherwise we will type the credentials manually.
- if (replace | nchar(Sys.getenv("ed_un")) == 0 | nchar(Sys.getenv("ed_un")) == 0) {
- Sys.setenv(ed_un = sprintf(
- getPass::getPass(msg = "Enter NASA Earthdata Login Username \n (or create an account at urs.earthdata.nasa.gov) :")
- ),
- ed_pw = sprintf(
- getPass::getPass(msg = "Enter NASA Earthdata Login Password:")
- ))
+getnetrc <- function (dl_path) {
+ netrc <- path.expand(dl_path)
+ if (file.exists(netrc) == FALSE ||
+ any(grepl("urs.earthdata.nasa.gov",
+ readLines(netrc))) == FALSE) {
+ netrc_conn <- file(netrc, open = "at")
+ writeLines(c(
+ "machine urs.earthdata.nasa.gov",
+ sprintf(
+ "login %s",
+ getPass::getPass(msg = "Enter NASA Earthdata Login Username \n (or create an account at urs.earthdata.nasa.gov) :")
+ ),
+ sprintf(
+ "password %s",
+ getPass::getPass(msg = "Enter NASA Earthdata Login Password:")
+ )
+ ),
+ netrc_conn)
+ close(netrc_conn)
+ message(
+ "A netrc file with your Earthdata Login credentials was stored in the output directory "
+ )
}
+ return(netrc)
}
\ No newline at end of file
diff --git a/modules/data.remote/R/SMAP_SMP_prep.R b/modules/data.remote/R/SMAP_SMP_prep.R
index 0d1b3b9ee58..0c184a489b0 100644
--- a/modules/data.remote/R/SMAP_SMP_prep.R
+++ b/modules/data.remote/R/SMAP_SMP_prep.R
@@ -72,7 +72,7 @@ SMAP_SMP_prep <- function(site_info, start_date, end_date, time_points,
time_points <- time_points[which(lubridate::year(time_points)>=2015)] #filter out any time points that are before 2015
#initialize SMAP_Output
SMAP_Output <- matrix(NA, length(site_info$site_id), 2*length(time_points)+1) %>%
- `colnames<-`(c("site_id", paste0(time_points, "_SoilMoist"), paste0(time_points, "_SD"))) %>% as.data.frame()#we need: site_id, LAI, std, target time point.
+ `colnames<-`(c("site_id", paste0(time_points, "_SoilMoistFrac"), paste0(time_points, "_SD"))) %>% as.data.frame()#we need: site_id, LAI, std, target time point.
SMAP_Output$site_id <- site_info$site_id
#Calculate SMAP for each time step and site.
#loop over time and site
@@ -88,15 +88,15 @@ SMAP_SMP_prep <- function(site_info, start_date, end_date, time_points,
out.t <- rbind(out.t, list(mean = NA, sd = NA))
}
}
- out.t %>% purrr::set_names(c(paste0(t, "_SoilMoist"), paste0(t, "_SD")))
+ out.t %>% purrr::set_names(c(paste0(t, "_SoilMoistFrac"), paste0(t, "_SD")))
}, .progress = T)
for (i in seq_along(time_points)) {
t <- time_points[i]#otherwise the t will be number instead of date.
- SMAP_Output[, paste0(t, "_SoilMoist")] <- SMAP.list[[i]][,paste0(t, "_SoilMoist")]
+ SMAP_Output[, paste0(t, "_SoilMoistFrac")] <- SMAP.list[[i]][,paste0(t, "_SoilMoistFrac")]
SMAP_Output[, paste0(t, "_SD")] <- SMAP.list[[i]][,paste0(t, "_SD")]
}
PEcAn.logger::logger.info("SMAP SMP Prep Completed!")
- list(SMP_Output = SMAP_Output, time_points = time_points, var = "SoilMoist")
+ list(SMP_Output = SMAP_Output, time_points = time_points, var = "SoilMoistFrac")
}
#' Prepare SMAP soil moisture profile (SMP) data from the NASA DAAC server for the SDA workflow.
@@ -108,13 +108,14 @@ SMAP_SMP_prep <- function(site_info, start_date, end_date, time_points,
#' @param to character: the end time for searching the MODIS products.
#' @param download.outdir character: Where the MODIS tiles will be stored.
#' @param csv.outdir character: Where the final CSV file will be stored.
+#' @param credential_path Character: physical path to the credential file (.netrc file).
#'
#' @return A data frame containing SMP and sd for each site and each time step.
#' @export
#'
#' @author Dongchen Zhang
#' @importFrom magrittr %>%
-Prep.SMAP.CSV.from.DAAC <- function(site_info, extent, from, to, download.outdir, csv.outdir) {
+Prep.SMAP.CSV.from.DAAC <- function(site_info, extent, from, to, download.outdir, csv.outdir, credential_path) {
# SMAP CRS, EPSG:6933.
smap.crs <- "+proj=cea +lat_ts=30 +lon_0=0 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs"
# load previous CSV file.
@@ -143,10 +144,10 @@ Prep.SMAP.CSV.from.DAAC <- function(site_info, extent, from, to, download.outdir
to = to,
just_path = F,
outdir = download.outdir,
- doi = "10.5067/LWJ6TF5SZRG3",
- ncore = parallel::detectCores()-1)
- smap.out <- metadata$path
- file <- smap.out[1] # select the first file.
+ doi = "10.5067/02LGW4DGJYRX",
+ ncore = parallel::detectCores()-1,
+ credential_path = credential_path)
+ file <- metadata[1] # select the first file.
# grab smap extents, it's from the ArcGIS report using the SMAP H5 file.
smap.ext <- c(-17363027.292480, 17367529.945160, -7319045.227051, 7310037.171387) %>% terra::ext()
# convert h5 file to raster.
diff --git a/modules/data.remote/R/download.thredds.AVHRR.R b/modules/data.remote/R/download.thredds.AVHRR.R
new file mode 100755
index 00000000000..ffaef305661
--- /dev/null
+++ b/modules/data.remote/R/download.thredds.AVHRR.R
@@ -0,0 +1,202 @@
+##' download.thredds
+##'
+##' @param outdir file location to place output
+##' @param site_info list of information with the site_id, site_name, lat, lon, and time_zone.
+##' Derived from BETY using a PEcAn .xml settings file with site information.
+##' Can use the get_site_info function to generate this list.
+##' @param dates vector of start and end date for dataset as YYYYmmdd, YYYY-mm-dd, YYYYjjj, or date object.
+##' @param varid character vector of shorthand variable name. i.e. LAI
+##' @param dir_url catalog url of data from ncei.noaa.gov/thredds website
+##' @param data_url opendap url of data from ncei.noaa.gov/thredds website
+##' @param run_parallel Logical. Download and extract files in parallel?
+##'
+##' @return data.frame summarize the results of the function call
+##'
+##' @examples
+##' \dontrun{
+##' results <- download_thredds(
+##' site_info = site_info,
+##' dates = c("19950201", "19961215"),
+##' varid = "LAI",
+##' dir_url = "https://www.ncei.noaa.gov/thredds/catalog/cdr/lai/files",
+##' data_url = "https://www.ncei.noaa.gov/thredds/dodsC/cdr/lai/files",
+##' run_parallel = FALSE,
+##' outdir = NULL)
+##' }
+##' @importFrom foreach %do% %dopar%
+##' @noRd
+##' @author Bailey Morrison
+##'
+download_thredds_AVHRR <- function(site_info, dates, varid, dir_url, data_url,
+ run_parallel = FALSE, outdir = NULL) {
+ # until the issues with parallel runs are fixed.
+ run_parallel <- FALSE
+
+ #### check that dates are within the date range of the dataset
+
+ # first make sure dates are in date format. Correct if not.
+ if (!(lubridate::is.Date(dates))) {
+ if (!(is.character(dates))) {
+ dates <- as.character(dates)
+ }
+ if (length(grep(dates, pattern = "-")) > 0) {
+ dates <- c(as.Date(dates[1], "%Y-%m-%d"), as.Date(dates[2], "%Y-%m-%d"))
+ } else {
+ dates <- c(as.Date(dates[1], "%Y%m%d"), as.Date(dates[2], "%Y%m%d"))
+ }
+ # Julien Date
+ if (any(nchar(dates) == 7)) {
+ dates <- c(as.Date(dates[1], "%Y%j"), as.Date(dates[2], "%Y%j"))
+ }
+ }
+
+ date_range <- unique(lubridate::year(seq(dates[1], dates[2], by = "1 year")))
+
+ output <- data.frame()
+ if (!(is.null(dir_url))) {
+ for (i in seq_along(date_range)) {
+ result <- readLines(
+ paste(dir_url, date_range[i], "/catalog.html", sep = "/")
+ )
+ files <- XML::getHTMLLinks(result)
+
+ index_dates <- regexpr(pattern = "_[0-9]{8}_", files)
+ files <- files[-(which(index_dates < 0))]
+ index_dates <- index_dates[which(index_dates > 0)]
+
+ dates_avail <- as.Date(
+ substr(files, index_dates + 1, index_dates + 8),
+ "%Y%m%d"
+ )
+
+ if (!(is.null(data_url))) {
+ urls <- sort(paste(
+ data_url,
+ substr(dates_avail, 1, 4),
+ "/",
+ basename(files),
+ sep = ""
+ ))
+
+ if (run_parallel) {
+ # ncores <- parallel::detectCores(all.tests = FALSE, logical = TRUE)
+ # This is a failsafe for computers with low numbers of CPUS
+ # to reduce risk of blowing RAM.
+ # if (ncores >= 3)
+ # {
+ # # failsafe in case someone has a computer with 2-4 nodes.
+ # ncores <- ncores-2
+ # }
+ # # THREDDS has a 10 job limit. Will fail if you try to download more
+ # than 10 values at a time
+ # if (ncores >= 10)
+ # {
+ # ncores <- 9 # went 1 less becasue it still fails sometimes
+ # }
+ # cl <- parallel::makeCluster(ncores, outfile="")
+ # doParallel::registerDoParallel(cl)
+ # out <- foreach(i = urls, .combine = rbind) %dopar%
+ # extract_thredds_nc_AVHRR(site_info = site_info, url = i)
+ # parallel::stopCluster(cl)
+ } else {
+ # start_time <- Sys.time()
+ j <- NULL # avoids R pkg checks "no visible binding" complaint below
+ out <- foreach::foreach(j = urls, .combine = rbind) %do%
+ extract_thredds_nc_AVHRR(site_info, url = j)
+ # end_time <- Sys.time()
+ # end_time - start_time
+ }
+ output <- rbind(output, out)
+
+ if (!(is.null(outdir))) {
+ # this will need to be changed in the future if users want to be able
+ # to save data they haven't already extracted at different sites/dates
+ utils::write.csv(
+ out,
+ file = paste(outdir, "/THREDDS_", varid, "_",
+ dates[1], "-", dates[2], ".csv",
+ sep = "")
+ )
+ }
+ }
+ }
+ }
+ return(output)
+}
+
+
+##' extract_thredds_nc
+##'
+##' @param site_info list containing site_id, site_name, lat, lon, time_zone.
+##' Derived from BETY using a PEcAn .xml settings file with site information.
+##' Can use the get_site_info function to generate this list.
+##' @param url a THREDDS url of a .nc file to extract data from.
+##' @param varid character vector of shorthand variable name. i.e. LAI
+##'
+##'
+##' @return a dataframe with the values for each date/site combination
+##' from a THREDDS file
+##'
+##' @examples
+##' \dontrun{
+##' thredds_url = paste0( # breaking up long URL for readability
+##' "https://www.ncei.noaa.gov/thredds/dodsC/cdr/lai/files/1995/",
+##' "AVHRR-Land_v005_AVH15C1_NOAA-14_19950201_c20180831220722.nc")
+##' output <- extract_thredds_nc(
+##' site_info = site_info,
+##' url = thredds_url)
+##' }
+##' @noRd
+##' @author Bailey Morrison
+##'
+extract_thredds_nc_AVHRR <- function(site_info, url, varid) {
+ index <- regexpr(pattern = "_[0-9]{8}_", url)
+ date <- as.Date(substr(url, index + 1, index + 8), "%Y%m%d")
+
+ mylats <- site_info$lat
+ mylons <- site_info$lon
+ sites <- site_info$site_id
+
+ # open netcdf file and get the correct variable name
+ # based on varid parameter + var names of netcdf
+ data <- ncdf4::nc_open(url)
+ vars <- names(data$var)
+ var <- vars[grep(vars, pattern = varid, ignore.case = TRUE)]
+
+ # get list of all xy coordinates in netcdf
+ lats <- ncdf4::ncvar_get(data, "latitude")
+ lons <- ncdf4::ncvar_get(data, "longitude")
+
+ # find the cell that site coordinates are located in
+ i <- NULL # avoids R pkg checks "no visible binding" complaint below
+ dist_y <- foreach::foreach(i = mylats, .combine = cbind) %do%
+ sqrt((lats - i)^2)
+ dist_x <- foreach::foreach(i = mylons, .combine = cbind) %do%
+ sqrt((lons - i)^2)
+ y <- foreach::foreach(i = 1:ncol(dist_y), .combine = cbind) %do%
+ which(dist_y[, i] == min(dist_y[, i]), arr.ind = TRUE)
+ x <- foreach::foreach(i = 1:ncol(dist_x), .combine = cbind) %do%
+ which(dist_x[, i] == min(dist_x[, i]), arr.ind = TRUE)
+
+ scale <- data$var[[var]]$scaleFact
+
+ d <- as.vector(
+ foreach::foreach(i = seq_along(x), .combine = rbind) %do%
+ ncdf4::ncvar_get(data, var, start = c(x[i], y[i], 1), count = c(1, 1, 1))
+ )
+
+ info <- as.data.frame(
+ cbind(sites, mylons, mylats, as.character(rep(date, length(mylats))), d),
+ stringsAsFactors = FALSE
+ )
+ names(info) <- c("site_id", "lon", "lat", "date", "value")
+
+ na <- which(is.na(info$value))
+ if (length(na) != length(info$site_id) || length(na) != 0) {
+ info <- info[-na, ]
+ } else {
+ info <- info
+ }
+
+ return(info)
+}
diff --git a/modules/data.remote/R/download.thredds.AVHRR.monthAGG.R b/modules/data.remote/R/download.thredds.AVHRR.monthAGG.R
new file mode 100755
index 00000000000..370b6d0e514
--- /dev/null
+++ b/modules/data.remote/R/download.thredds.AVHRR.monthAGG.R
@@ -0,0 +1,139 @@
+##' download.thredds
+##'
+##' @param outdir file location to place output
+##' @param site_info list containing site_id, site_name, lat, lon, time_zone.
+##' Derived from BETY using a PEcAn .xml settings file with site information.
+##' Can use the get_site_info function to generate this list.
+##' @param dates vector of start and end date for dataset as YYYYmmdd,
+##' YYYY-mm-dd, YYYYjjj, or date object.
+##' @param varid character vector of shorthand variable name. i.e. LAI
+##' @param dir_url catalog url of data from ncei.noaa.gov/thredds website
+##' @param data_url opendap url of data from ncei.noaa.gov/thredds website
+##' @param run_parallel Logical. Download and extract files in parallel?
+##'
+##' @return data.frame summarize the results of the function call
+##'
+##' @examples
+##' \dontrun{
+##' results <- download_thredds(
+##' site_info = site_info,
+##' years = c("2000", "2003"),
+##' months = c(6,7,8),
+##' varid = "LAI",
+##' dir_url = "https://www.ncei.noaa.gov/thredds/catalog/cdr/lai/files",
+##' data_url = "https://www.ncei.noaa.gov/thredds/dodsC/cdr/lai/files",
+##' run_parallel = FALSE,
+##' outdir = NULL)
+##' }
+##' @importFrom foreach %do% %dopar%
+##' @noRd
+##' @author Bailey Morrison
+##'
+download_thredds_AVHRR_monthly <- function(site_info,
+ years,
+ months,
+ varid,
+ dir_url,
+ data_url,
+ run_parallel = FALSE,
+ outdir = NULL) {
+ # until the issues with parallel runs are fixed.
+ run_parallel <- FALSE
+
+
+ # assumes there is a max of 31 possible days in a month.
+ # This covers leap years!
+ years_range <- sort(rep(seq(years[1], years[2]), 31))
+
+ if (!(is.null(dir_url))) {
+ output <- data.frame()
+
+ for (i in seq_along(unique(years_range))) {
+ result <- readLines(
+ paste(dir_url, unique(years_range)[i], "/catalog.html", sep = "/")
+ )
+ files <- XML::getHTMLLinks(result)
+
+ index_dates <- regexpr(
+ pattern = paste0(
+ "_[0-9]{4}0[", months[1], "-", months[length(months)], "]{1}[0-9]{2}_"
+ ),
+ files
+ )
+ files <- files[-(which(index_dates < 0))]
+ index_dates <- index_dates[which(index_dates > 0)]
+
+ dates_avail <- as.Date(substr(files, index_dates + 1, index_dates + 8),
+ "%Y%m%d")
+
+ if (!(is.null(data_url))) {
+ urls <- sort(
+ paste(data_url, substr(dates_avail, 1, 4), "/", basename(files),
+ sep = "")
+ )
+
+ if (run_parallel) {
+ # ncores <- parallel::detectCores(all.tests = FALSE, logical = TRUE)
+ # This is a failsafe for computers with low numbers of CPUS to reduce
+ # risk of blowing RAM.
+ # if (ncores >= 3)
+ # {
+ # # failsafe in case someone has a computer with 2-4 nodes.
+ # ncores <- ncores-2
+ # }
+ # # THREDDS has a 10 job limit. Will fail if you try to download more
+ # than 10 values at a time
+ # if (ncores >= 10)
+ # {
+ # ncores <- 9 # went 1 less becasue it still fails sometimes
+ # }
+ # cl <- parallel::makeCluster(ncores, outfile="")
+ # doParallel::registerDoParallel(cl)
+ # out <- foreach(i = urls, .combine = rbind) %dopar%
+ # extract_thredds_nc_AVHRR(site_info = site_info, url = i,
+ # varid = varid)
+ # parallel::stopCluster(cl)
+ } else {
+ i <- NULL # avoids R pkg checks "no visible binding" complaint below
+ out <- foreach::foreach(i = urls, .combine = rbind) %do%
+ extract_thredds_nc_AVHRR(site_info, url = i, varid = varid)
+
+ # get max LAI for each site instead of all days with missing NA filler
+ test <- foreach::foreach(i = unique(out$site_id),
+ .combine = rbind) %do%
+ max_lai(x = out, site = i)
+ test$date <- lubridate::year(test$date)
+
+ output <- rbind(output, test)
+ }
+ }
+ }
+
+ # if (!(is.null(outdir))) {
+ # # this will need to be changed in the future if users want to be able to
+ # # save data they haven't already extracted at different sites/dates.
+ # utils::write.csv(
+ # output,
+ # file = paste(outdir, "/THREDDS_", varid, "_",
+ # years[1], "-", years[2], "_",months[1], "-",
+ # months[length(months)], ".csv",
+ # sep = "")
+ # )
+ # }
+ return(output)
+ }
+}
+
+
+
+
+max_lai <- function(x, site) {
+ site_info_max <- as.data.frame(
+ x[x$site_id == site, ][1, 1:4],
+ stringsAsFactors = FALSE
+ )
+ site_info_max$max <- as.numeric(
+ max(x[x$site_id == site, ]$value, na.rm = TRUE)
+ )
+ return(site_info_max)
+}
diff --git a/modules/data.remote/R/download.thredds.R b/modules/data.remote/R/download.thredds.R
index d4a5ca4c728..5248e7bb0aa 100755
--- a/modules/data.remote/R/download.thredds.R
+++ b/modules/data.remote/R/download.thredds.R
@@ -1,102 +1,275 @@
-#
-##' @title download.thredds.AGB
-##' @name download.thredds.AGB
-##'
-##' @param outdir Where to place output
-##' @param site_ids What locations to download data at?
-##' @param run_parallel Logical. Download and extract files in parallel?
-##' @param ncores Optional. If run_parallel=TRUE how many cores to use? If left as NULL will select max number -1
-##'
-##' @return data.frame summarize the results of the function call
-##'
+##' get_site_info
+##'
+##' @param settings a PEcAn settings object
+##'
+##'
+##' @return a list of site information derived from BETY using a pecan .xml
+##' settings file with site_id, site_name, lat, lon, and time_zone.
+##'
##' @examples
##' \dontrun{
-##' outdir <- "~/scratch/abg_data/"
-
-##' results <- PEcAn.data.remote::download.thredds.AGB(outdir=outdir,
-##' site_ids = c(676, 678, 679, 755, 767, 1000000030, 1000000145, 1000025731),
+##' settings <- PEcAn.settings::read.settings("/path/to/pecan.xml")
+##' site_info <- get_site_info(settings)
+##' results <- download_thredds(
+##' site_info = site_info,
+##' dates = c("19950201", "19961215"),
+##' varid = "LAI",
+##' dir_url = "https://www.ncei.noaa.gov/thredds/catalog/cdr/lai/files",
+##' data_url = "https://www.ncei.noaa.gov/thredds/dodsC/cdr/lai/files",
+##' run_parallel = TRUE,
+##' outdir = NULL)
##' run_parallel = TRUE, ncores = 8)
##' }
##' @export
##' @author Bailey Morrison
##'
-download.thredds.AGB <- function(outdir = NULL, site_ids, run_parallel = FALSE,
- ncores = NULL) {
-
-
+get_site_info <- function(settings) {
+ observation <- c()
+ for (i in seq_along(settings$run)) {
+ command <- paste0("settings$run$settings.", i, "$site$id")
+ obs <- eval(parse(text = command))
+ observation <- c(observation, obs)
+ }
+
+
+ PEcAn.logger::logger.info(
+ "**** Extracting LandTrendr AGB data for model sites ****"
+ )
con <- PEcAn.DB::db.open(
- list(user='bety', password='bety', host='localhost',
- dbname='bety', driver='PostgreSQL',write=TRUE))
- site_ID <- as.character(site_ids)
- suppressWarnings(site_qry <- glue::glue_sql("SELECT *, ST_X(ST_CENTROID(geometry)) AS lon,
- ST_Y(ST_CENTROID(geometry)) AS lat FROM sites WHERE id IN ({ids*})",
- ids = site_ID, .con = con))
- suppressWarnings(qry_results <- DBI::dbSendQuery(con,site_qry))
+ list(
+ user = "bety", password = "bety", host = "localhost",
+ dbname = "bety", driver = "PostgreSQL", write = TRUE
+ )
+ )
+ site_ID <- observation
+ suppressWarnings(
+ site_qry <- glue::glue_sql(
+ "SELECT *, ST_X(ST_CENTROID(geometry)) AS lon,
+ ST_Y(ST_CENTROID(geometry)) AS lat FROM sites WHERE id IN ({ids*})",
+ ids = site_ID,
+ .con = con
+ )
+ )
+ suppressWarnings(qry_results <- DBI::dbSendQuery(con, site_qry))
suppressWarnings(qry_results <- DBI::dbFetch(qry_results))
- site_info <- list(site_id=qry_results$id, site_name=qry_results$sitename, lat=qry_results$lat,
- lon=qry_results$lon, time_zone=qry_results$time_zone)
-
- mylat = site_info$lat
- mylon = site_info$lon
-
- # site specific URL for dataset --> these will be made to work for all THREDDS datasets in the future, but for now, just testing with
- # this one dataset. This specific dataset only has 1 year (2005), so no temporal looping for now.
- obs_file = "https://thredds.daac.ornl.gov/thredds/dodsC/ornldaac/1221/agb_5k.nc4"
- obs_err = "https://thredds.daac.ornl.gov/thredds/dodsC/ornldaac/1221/agb_SE_5k.nc4"
- files = c(obs_file, obs_err)
-
- # function to extract ncdf data from lat and lon values for value + SE URLs
- get_data = function(i)
- {
- data = ncdf4::nc_open(files[1])
- agb_lats = ncdf4::ncvar_get(data, "latitude")
- agb_lons = ncdf4::ncvar_get(data, "longitude")
-
- agb_x = which(abs(agb_lons- mylon[i]) == min(abs(agb_lons - mylon[i])))
- agb_y = which(abs(agb_lats- mylat[i]) == min(abs(agb_lats - mylat[i])))
-
- start = c(agb_x, agb_y)
- count = c(1,1)
- d = ncdf4::ncvar_get(ncdf4::nc_open(files[1]), "abvgrndbiomass", start=start, count = count)
- if (is.na(d)) d <- NA
- sd = ncdf4::ncvar_get(ncdf4::nc_open(files[2]), "agbSE", start=start, count = count)
- if (is.na(sd)) sd <- NA
- date = "2005"
- site = site_ID[i]
- output = as.data.frame(cbind(d, sd, date, site))
- names(output) = c("value", "sd", "date", "siteID")
-
- # option to save output dataset to directory for user.
- if (!(is.null(outdir)))
- {
- utils::write.csv(output, file = paste0(outdir, "THREDDS_", sub("^([^.]*).*", "\\1",basename(files[1])), "_site_", site, ".csv"), row.names = FALSE)
+ site_info <- list(
+ site_id = qry_results$id,
+ site_name = qry_results$sitename,
+ lat = qry_results$lat,
+ lon = qry_results$lon,
+ time_zone = qry_results$time_zone
+ )
+ return(site_info)
+}
+
+
+##' download.thredds
+##'
+##'
+##' @param outdir file location to place output
+##' @param site_info list containing site_id, site_name, lat, lon, time_zone.
+##' Derived from BETY using a PEcAn .xml settings file with site information.
+##' Can use the get_site_info function to generate this list.
+##' @param dates vector of start and end date for dataset as YYYYmmdd,
+##' YYYY-mm-dd, YYYYjjj, or date object.
+##' @param varid character vector of shorthand variable name. i.e. LAI
+##' @param dir_url catalog url of data from ncei.noaa.gov/thredds website
+##' @param data_url opendap url of data from ncei.noaa.gov/thredds website
+##' @param run_parallel Logical. Download and extract files in parallel?
+##'
+##' @return data.frame summarize the results of the function call
+##'
+##' @examples
+##' \dontrun{
+##' results <- download_thredds(
+##' site_info = site_info,
+##' dates = c("19950201", "19961215"),
+##' varid = "LAI",
+##' dir_url = "https://www.ncei.noaa.gov/thredds/catalog/cdr/lai/files",
+##' data_url = "https://www.ncei.noaa.gov/thredds/dodsC/cdr/lai/files",
+##' run_parallel = TRUE,
+##' outdir = NULL)
+##' }
+##'
+##' @export
+##' @author Bailey Morrison
+##'
+download_thredds <- function(site_info,
+ dates,
+ varid,
+ dir_url,
+ data_url,
+ run_parallel = FALSE,
+ outdir = NULL) {
+ # until the issues with parallel runs are fixed.
+ run_parallel <- FALSE
+
+ #### check that dates are within the date range of the dataset
+
+ # first make sure dates are in date format. Correct if not.
+ if (!(lubridate::is.Date(dates))) {
+ if (!(is.character(dates))) {
+ dates <- as.character(dates)
+ }
+ if (length(grep(dates, pattern = "-")) > 0) {
+ dates <- c(as.Date(dates[1], "%Y-%m-%d"), as.Date(dates[2], "%Y-%m-%d"))
+ } else {
+ dates <- c(as.Date(dates[1], "%Y%m%d"), as.Date(dates[2], "%Y%m%d"))
+ }
+ # Julien Date
+ if (nchar(dates) == 7) {
+ dates <- c(as.Date(dates[1], "%Y%j"), as.Date(dates[2], "%Y%j"))
}
-
- return(output)
}
-
- ## setup parallel
- if (run_parallel) {
- if (!is.null(ncores)) {
- ncores <- ncores
+
+ if (!(is.null(dir_url))) {
+ # https://www.ncei.noaa.gov/thredds/catalog/cdr/lai/files/1981/catalog.html
+ # -> link for directory files, not downloads
+ result <- readLines(paste(dir_url, "catalog.html", sep = "/"))
+ files <- XML::getHTMLLinks(result)
+
+ date_year_range <- unique(lubridate::year(dates))
+ if (all((!(substr(files, 1, 4) %in% date_year_range)))) {
+ # give warning that dates aren't available
+ print("something")
+ }
+ }
+
+ # get list of catalog file links to determine actual dates that can be
+ # downloaded with in user range
+ links <- vector()
+ for (i in seq_along(date_year_range)) {
+ links[i] <- readLines(
+ paste(dir_url, date_year_range[i], "catalog.html", sep = "/"))
+ }
+
+ # get list of all dates available from year range provided
+ files <- foreach::foreach(i = seq_along(links), .combine = c) %do%
+ XML::getHTMLLinks(links[i])
+
+ # remove files with no dates and get list of dates available.
+ index_dates <- regexpr(pattern = "[0-9]{8}", files)
+ files <- files[-(which(index_dates < 0))]
+ index_dates <- index_dates[which(index_dates > 0)]
+
+ # get list of files that fall within the specific date range user asks for
+ # (Ymd, not Y)
+ dates_avail <- as.Date(substr(files, index_dates, index_dates + 7), "%Y%m%d")
+ date_range <- seq(dates[1], dates[2], by = "day")
+ get_dates <- date_range[which(date_range %in% dates_avail)]
+
+ # only keep files that are within the true yyyymmdd date range user requested
+ files <- files[foreach::foreach(i = seq_along(get_dates), .combine = c) %do%
+ grep(files, pattern = format(get_dates[i], "%Y%m%d"))]
+ filenames <- basename(files)
+
+ # user must supply data_URL or the netcdf files cannot be downloaded through
+ # thredds. if user has supplied no data_url, the job will fail
+ # supply a warning
+ if (!(is.null(data_url))) {
+ # https://www.ncei.noaa.gov/thredds/dodsC/cdr/lai/files/1981/AVHRR-Land_v005_AVH15C1_NOAA-07_19810624_c20181025194251.nc.html
+ # this is what a link looks like to download threeds data.
+ urls <- sort(
+ paste(data_url, substr(dates_avail, 1, 4), filenames, sep = "/")
+ )
+
+ # parallel seems to have a problem right now with > 500 urls.
+ if (run_parallel) {
+ ncores <- parallel::detectCores(all.tests = FALSE, logical = TRUE)
+ # This is a failsafe for computers with low numbers of CPUS to reduce
+ # risk of blowing RAM.
+ if (ncores >= 3) {
+ # failsafe in case someone has a computer with 2-4 nodes.
+ ncores <- ncores - 2
+ }
+ # THREDDS has a 10 job limit. Will fail if you try to download more than
+ # 10 values at a time
+ if (ncores >= 10) {
+ ncores <- 9 # went 1 less becasue it still fails sometimes
+ }
+ cl <- parallel::makeCluster(ncores, outfile = "")
+ doParallel::registerDoParallel(cl)
+ output <- foreach::foreach(i = urls, .combine = rbind) %dopar%
+ extract_thredds_nc(site_info = site_info, url = i, varid = varid)
+ parallel::stopCluster(cl)
} else {
- ncores <- parallel::detectCores() -1
+ output <- foreach::foreach(i = urls, .combine = rbind) %do%
+ extract_thredds_nc(site_info, url = i, varid = varid)
}
-
- PEcAn.logger::logger.info(paste0("Running in parallel with: ", ncores))
- cl = parallel::makeCluster(ncores)
- doParallel::registerDoParallel(cl)
- data = foreach::foreach(i = seq_along(mylat), .combine = rbind) %dopar% get_data(i)
- parallel::stopCluster(cl)
-
- } else {
- # setup sequential run
- data = data.frame()
- for (i in seq_along(mylat))
- {
- data = rbind(data, get_data(i))
+
+ if (!(is.null(outdir))) {
+ # this will need to be changed in the future if users want to be able to
+ # save data they haven't already extracted at different sites/dates.
+ utils::write.csv(
+ output,
+ file = paste(outdir, "/THREDDS_", varid, "_",
+ dates[1], "-", dates[2], ".csv",
+ sep = "")
+ )
}
+
+ return(output)
}
-
- return(data)
+}
+
+##' extract_thredds_nc
+##'
+##' @param site_info list containing site_id, site_name, lat, lon, time_zone.
+##' Derived from BETY using a PEcAn .xml settings file with site information.
+##' Can use the get_site_info function to generate this list.
+##' @param url a THREDDS url of a .nc file to extract data from.
+##' @param varid character vector of shorthand variable name. i.e. LAI
+##'
+##' @return a dataframe with the values for each date/site combination
+##' from a THREDDS file
+##'
+##' @examples
+##' \dontrun{
+##' thredds_url = paste0( # breaking up long URL for readability
+##' "https://www.ncei.noaa.gov/thredds/dodsC/cdr/lai/files/1995/",
+##' "AVHRR-Land_v005_AVH15C1_NOAA-14_19950201_c20180831220722.nc")
+##' output <- extract_thredds_nc(
+##' site_info = site_info,
+##' url = thredds_url,
+##' varid = "LAI")
+##' }
+##' @export
+##' @author Bailey Morrison
+##'
+extract_thredds_nc <- function(site_info, url, varid) {
+ mylats <- site_info$lat
+ mylons <- site_info$lon
+ sites <- site_info$site_id
+
+ # open netcdf file and get the correct variable name based on varid parameter
+ # + var names of netcdf
+ data <- ncdf4::nc_open(url)
+ vars <- names(data$var)
+ var <- vars[grep(vars, pattern = varid, ignore.case = TRUE)]
+
+ # get list of all xy coordinates in netcdf
+ lats <- ncdf4::ncvar_get(data, "latitude")
+ lons <- ncdf4::ncvar_get(data, "longitude")
+
+ # find the cell that site coordinates are located in
+ i <- NULL # avoids R pkg checks "no visible binding" complaint below
+ dist_y <- foreach::foreach(i = mylats, .combine = cbind) %do%
+ sqrt((lats - i)^2)
+ dist_x <- foreach::foreach(i = mylons, .combine = cbind) %do%
+ sqrt((lons - i)^2)
+ y <- foreach::foreach(i = seq_len(ncol(dist_y)), .combine = c) %do%
+ which(dist_y[, i] == min(dist_y[, i]), arr.ind = TRUE)
+ x <- foreach::foreach(i = seq_len(ncol(dist_x)), .combine = c) %do%
+ which(dist_x[, i] == min(dist_x[, i]), arr.ind = TRUE)
+
+ scale <- data$var[[var]]$scaleFact
+
+ d <- as.vector(foreach::foreach(i = seq_along(x), .combine = rbind) %do%
+ ncdf4::ncvar_get(data, var, start = c(x[i], y[i], 1), count = c(1, 1, 1)))
+
+ info <- as.data.frame(cbind(sites, mylons, mylats, d),
+ stringsAsFactors = FALSE)
+ names(info) <- c("site_id", "lon", "lat", "value")
+
+ return(info)
}
diff --git a/modules/data.remote/R/extract_phenology_MODIS.R b/modules/data.remote/R/extract_phenology_MODIS.R
index abe897edbe7..f993a572159 100644
--- a/modules/data.remote/R/extract_phenology_MODIS.R
+++ b/modules/data.remote/R/extract_phenology_MODIS.R
@@ -84,8 +84,8 @@ extract_phenology_MODIS<- function(site_info,start_date,end_date,outdir,run_para
leafphdata$leafonday[leafphdata$leafon_qa==3]<-NA #exclude the data when QA is poor
leafphdata$leafoffday[leafphdata$leafoff_qa==3]<-NA
leafphdata$leafonday[leafphdata$leafonday>leafphdata$leafoffday]<-NA #exclude the data when leaf-on date is larger than leaf-off date
- leafphdata$leafonday<-lubridate::yday(as.Date(leafphdata$leafonday),origin="1970-01-01") #convert the dates to Day-of-Year format
- leafphdata$leafoffday<-lubridate::yday(as.Date(leafphdata$leafoffday),origin="1970-01-01")
+ leafphdata$leafonday<-lubridate::yday(as.Date(leafphdata$leafonday,origin="1970-01-01")) #convert the dates to Day-of-Year format
+ leafphdata$leafoffday<-lubridate::yday(as.Date(leafphdata$leafoffday,origin="1970-01-01"))
leafphdata$year<-lubridate::year(leafphdata$year)
leafphdata$site_id<-as.character(leafphdata$site_id)
diff --git a/modules/data.remote/R/merge_image_tiles.R b/modules/data.remote/R/merge_image_tiles.R
new file mode 100644
index 00000000000..5e06cc6343d
--- /dev/null
+++ b/modules/data.remote/R/merge_image_tiles.R
@@ -0,0 +1,258 @@
+#' Merge image tiles to a single image (currently support hdf and tif image format).
+#' @details
+#' Please make sure all image tiles are stored in the `folder.path`.
+#' Please refer to the gdalwarp manual for more details
+#' https://gdal.org/en/stable/programs/gdalwarp.html
+#'
+#' @param in.path character: physical path to the folder that contains all the original image tiles.
+#' @param out.path character: physical path to the folder that contains converted and merged images.
+#' @param band.name character: band name of the image. Default is NULL.
+#' @param just.band.name logical: if we just want the band names of the image file. Default is TRUE.
+#' @param keep.files logical: if we want to keep the image tiles at the end.
+#' @param skip.conversion logical: if we want to ignore the image conversion.
+#' Note that this is a experimental feature, which only works when images are all in the GeoTIFF format.
+#' @param image.settings list: settings used during exporting merged image.
+#' Such as image coordinate system (crs), dimension, extents (ext), and average function (fun).
+#' @param computation list: settings used for configuring computation.
+#' Such as maximum memory per CPU (GDAL_CACHEMAX), percentage of total memory (wm),
+#' number of CPUs (NUM_THREADS), compress method (COMPRESS).
+#'
+#' @return character: file path to the merged GeoTIFF file.
+#' @export
+#'
+#' @author Dongchen Zhang
+#' @importFrom purrr %>%
+merge_image_tiles <- function(in.path,
+ out.path = NULL,
+ band.name = NULL,
+ just.band.name = TRUE,
+ keep.files = FALSE,
+ skip.conversion = FALSE,
+ image.settings = list(crs = "EPSG:4326",
+ dimension = NULL,
+ ext = NULL,
+ fun = NULL),
+ computation = list(GDAL_CACHEMAX = 1000,
+ wm = "80%",
+ NUM_THREADS = parallel::detectCores() - 1,
+ COMPRESS = "DEFLATE")) {
+ # print out computation allocation.
+ PEcAn.logger::logger.info(paste0("Using ", computation$wm, " memory."))
+ PEcAn.logger::logger.info(paste0("Using ", computation$NUM_THREADS, " CPUs."))
+ PEcAn.logger::logger.info(paste0("Using ", computation$COMPRESS, " compression mode."))
+ # Detect if we have the gdalwarp module installed.
+ # check shell environments.
+ if (suppressWarnings(system2("which", "gdalwarp", stdout = FALSE)) != 0) {
+ PEcAn.logger::logger.info("The gdalwarp function is not detected in shell command.")
+ return(NA)
+ }
+ # grab file paths.
+ file.paths <- list.files(in.path, full.names = T)
+ # if we only want to know the exact band names from the image files.
+ if (just.band.name) {
+ # here we are assuming all image tiles share the same band names.
+ band.names <- gdal_conversion(file.paths[1], just_band_name = just.band.name)
+ return(band.names)
+ }
+ # Image conversion.
+ if (is.null(out.path)) {
+ PEcAn.logger::logger.info("Please provide the output directory to store the converted/mosaic image tiles.")
+ return(0)
+ }
+ # if we want to ignore the image conversion.
+ if (skip.conversion) {
+ # if we have any file that has format other than .tif or .tiff.
+ if (!all(grepl("tif", unique(tools::file_ext(file.paths)), fixed = TRUE))) {
+ PEcAn.logger::logger.info("Can't ignore the image conversion. Please make sure all images are in the .tif or .tiff format and try again!")
+ return(0)
+ } else {
+ # the input files will become converted files.
+ converted.file.paths <- file.paths
+ # band name should be replaced too.
+ band.name <- "all_bands"
+ }
+ } else {
+ converted.file.paths <- file.paths %>%
+ purrr::map2(seq_along(file.paths), function(f, tile.id) {
+ gdal_conversion(in_path = f,
+ outfolder = out.path,
+ band_name = band.name,
+ tile_id = tile.id,
+ just_band_name = just.band.name)
+ }) %>% unlist
+ }
+ # write job.sh script.
+ # insert image settings.
+ gdal.cmd <- "gdalwarp"
+ # output coordinate system.
+ if (!is.null(image.settings$crs)) {
+ gdal.cmd <- paste(gdal.cmd, "-t_srs", image.settings$crs)
+ }
+ # output image dimension (=resolution).
+ if (!is.null(image.settings$dimension)) {
+ gdal.cmd <- paste(gdal.cmd, "-ts", paste(image.settings$dimension, collapse = " "))
+ }
+ # output image extents (in xmin, ymin, xmax, ymax order).
+ if (!is.null(image.settings$ext)) {
+ gdal.cmd <- paste(gdal.cmd, "-te", paste(image.settings$ext[c(1, 3, 2, 4)], collapse = " "))
+ }
+ # average function used to upscale image.
+ if (!is.null(image.settings$fun)) {
+ gdal.cmd <- paste(gdal.cmd, "-r", image.settings$fun)
+ }
+ # insert computation settings.
+ if (any(!is.null(unlist(computation)))) {
+ gdal.cmd <- paste(gdal.cmd, "--config")
+ }
+ # memory usage per CPU.
+ if (!is.null(computation$GDAL_CACHEMAX)) {
+ gdal.cmd <- paste(gdal.cmd, "GDAL_CACHEMAX", computation$GDAL_CACHEMAX)
+ }
+ # total memory usage.
+ if (!is.null(computation$wm)) {
+ gdal.cmd <- paste(gdal.cmd, "-wm", computation$wm)
+ }
+ # how many CPUs will be used.
+ if (!is.null(computation$NUM_THREADS)) {
+ gdal.cmd <- paste(gdal.cmd, paste0("-multi -wo NUM_THREADS=", computation$NUM_THREADS))
+ }
+ # image compress method.
+ if (!is.null(computation$COMPRESS)) {
+ gdal.cmd <- paste(gdal.cmd, paste0("-co COMPRESS=", computation$COMPRESS))
+ }
+ gdal.cmd <- paste(gdal.cmd, "-co BIGTIFF=YES -co TILED=TRUE @VRT@ @FINALTIFF@")
+ cmd <- c("#!/bin/bash -l",
+ "module load gdal",
+ "gdalbuildvrt @VRT@ @TIF@",
+ gdal.cmd)
+ cmd <- gsub("@VRT@", file.path(out.path, "index.vrt"), cmd)
+ # if we ignore the conversion, the file should be in the original path.
+ if (skip.conversion) {
+ cmd <- gsub("@TIF@", file.path(in.path, "*.tif"), cmd)
+ } else {
+ cmd <- gsub("@TIF@", file.path(out.path, "*.tif"), cmd)
+ }
+ cmd <- gsub("@FINALTIFF@", file.path(out.path, paste0(band.name, ".tif")), cmd)
+ writeLines(cmd, con = file.path(out.path, "job.sh"))
+ # grand permissions to the job file.
+ cmd <- "chmod 744 @JOBFILE@"
+ cmd <- gsub("@JOBFILE@", file.path(out.path, "job.sh"), cmd)
+ out <- system(cmd, intern = TRUE)
+ # enter the folder and run the job file.
+ cmd <- 'cd \"@JOBPATH@\";./job.sh'
+ cmd <- gsub(pattern = "@JOBPATH@", replacement = out.path, x = cmd)
+ out <- system(cmd, intern = TRUE)
+ # remove files.
+ if (!keep.files) {
+ unlink(list.files(out.path, full.names = T)[which(!grepl(paste0(band.name, ".tif"), list.files(out.path)))], recursive = T)
+ }
+ return(file.path(out.path, paste0(band.name, ".tif")))
+}
+
+#' @description This function provides tool for remote sensing image conversion using GDAL utility.
+#' @details
+#' Please note that, this function only supports conversions for one band of one image.
+#' If you want to convert multiple images or bands, make sure to loop over these targets.
+#' Currently tested H5, NetCDF, HDF4, and GeoTIFF formats.
+#' This function should be ready to any GDAL supported image format.
+#'
+#' @title gdal_conversion
+#' @param in_path character: physical path to the image file.
+#' @param outfolder character: physical path to the folder where you want to export the converted image. Default is NULL.
+#' @param band_name character: band name of the image. Default is NULL.
+#' @param tile_id character/numeric: id for differentiate different converted image tiles.
+#' @param just_band_name logical: if we just want the band names of the image file. Default is TRUE.
+#' @param target_format character: target image format. Default is .tif.
+#' @export
+#'
+#' @author Dongchen Zhang
+#' @examples
+#' \dontrun{
+#' in_path <- "/projectnb/dietzelab/malmborg/CARB/HLS_data/MSLSP_10SDH_2016.nc"
+#' outfolder <- "/projectnb/dietzelab/dongchen/anchorSites/NA_runs/MODIS_Phenology"
+#' band_name <- "NumCycles"
+#' # try grab all available bands from the target file.
+#' band_names <-
+#' gdal_conversion(in_path = in_path,
+#' outfolder = outfolder,
+#' band_name = NULL,
+#' just_band_name = T)
+#' # try convert the first band of the available band names to GeoTIFF file.
+#' f <-
+#' gdal_conversion(in_path = in_path,
+#' outfolder = outfolder,
+#' band_name = band_names[1],
+#' just_band_name = F,
+#' target_format = ".tif")
+#' }
+gdal_conversion <- function(in_path, outfolder = NULL, band_name = NULL, tile_id = NULL, just_band_name = TRUE, target_format = ".tif") {
+ # grab subdataset paths.
+ sds <- get_subdatasets(in_path)
+ # grab band names.
+ band_names <- sds %>% purrr::map(function(s){
+ str <- strsplit(s, split = ":", fixed = T)[[1]]
+ return(str[length(str)])
+ }) %>% unlist
+ # return band names.
+ if (just_band_name) {
+ return(band_names)
+ }
+ # conversion.
+ # checks.
+ if (!just_band_name) {
+ if (is.null(band_name)) {
+ PEcAn.logger::logger.info("Please provide band name if you want to do the conversion!")
+ return(0)
+ }
+ if (is.null(outfolder)) {
+ PEcAn.logger::logger.info("Please provide out directory path if you want to do the conversion!")
+ return(0)
+ }
+ }
+ # create target output file name.
+ origin_file_name <- basename(in_path)
+ if (!is.null(tile_id)) {
+ target_file_name <- paste0(strsplit(origin_file_name, split = ".", fixed = T)[[1]][1], "_", band_name, "_", tile_id, target_format)
+ } else {
+ target_file_name <- paste0(strsplit(origin_file_name, split = ".", fixed = T)[[1]][1], "_", band_name, target_format)
+ }
+ # conversion.
+ band.ind <- which(band_names == band_name)
+ out <- gdal_translate(sds[band.ind], file.path(outfolder, target_file_name))
+ return(file.path(outfolder, target_file_name))
+}
+
+#' @description This function provides tool for reading band names of remote sensing image.
+#'
+#' @title get_subdatasets.
+#' @param in_path character: physical path to the image file.
+#'
+#' @author Dongchen Zhang
+get_subdatasets <- function(in_path) {
+ image.info <- terra::describe(in_path)
+ image_rawnames <- image.info[grep(utils::glob2rx("*SUBDATASET*NAME*"), image.info)]
+ sds <- sapply(X = seq(length(image_rawnames)),
+ FUN = function(X) {
+ split1 <- strsplit(image_rawnames[X], "=")
+ return(gsub("\"", "", split1[[1]][2]))
+ })
+ return(sds)
+}
+
+#' @description This function provides tool the gdal_translate execution.
+#'
+#' @title gdal_translate
+#' @param from character: subdataset name.
+#' Generated from the `get_subdatasets` function.
+#' @param to character: physical path to the output file.
+#' @author Dongchen Zhang
+gdal_translate <- function (from, to) {
+ # grab gdal installation path.
+ if ("try-error" %in% class(try(gdal_path <- system("which gdal_translate", intern = TRUE)))) {
+ PEcAn.logger::logger.info("Please make sure the gdal_translate module is installed correctly!")
+ return(0)
+ }
+ # create cmd.
+ cmd <- paste(paste('"',gdal_path,'"',sep=""), paste('"',from,'"',sep=""), paste('"',to,'"',sep=""))
+ out <- system(cmd, intern = T)
+}
\ No newline at end of file
diff --git a/modules/data.remote/inst/LandIQ_new_shapefile_intake_function.R b/modules/data.remote/inst/LandIQ_new_shapefile_intake_function.R
new file mode 100644
index 00000000000..688baa77aed
--- /dev/null
+++ b/modules/data.remote/inst/LandIQ_new_shapefile_intake_function.R
@@ -0,0 +1,125 @@
+# Script for joining new years' data to existing shapefiles
+# First function shapefile_grab is just for pulling shapefile into R before processing
+# Second function get_CARB_data is for processing shapefile data so it can be joined to PEcAn-ready dataframe
+
+####---- Libraries ----####
+install.packages("librarian")
+library(librarian)
+#remotes::install_github("rspatial/terra")
+librarian::shelf(terra, sf, tidyverse, stringr)
+
+
+####---- Function for loading shapefiles into R: ----####
+#' @param base_dir = folder where data are located (character)
+#' @param year = year data were collected (numeric)
+#' @param sf = TRUE -> open as simple feature or FALSE -> open as spatvector (TRUE/FALSE)
+shapefile_grab <- function(base_dir, year, sf) {
+ filelist <- list.files(base_dir, pattern = as.character(year)) # open files for specified year
+ shpfile <- list.files(paste0(base_dir, "/", filelist), pattern = ".shp") # shapefiles
+ file <- paste0(base_dir, "/", filelist, "/", shpfile[1]) # make full filepath
+ if (sf == TRUE){
+ st_read(file) # if TRUE - read in as sf (using sf library)
+ } else {
+ vect(file) # if FALSE - read in as spatvector (using terra library)
+ }
+}
+
+### Wrapper for CARB shapefile dataframe maker functions:
+#' @param crops = crop sf object (sf object, shapefile)
+#' @param crs = desired crs (numeric, I have been using 3857)
+#' @param year = year of data (numeric; YYYY)
+get_CARB_data <- function(crops, crs, year){
+ # transform crs, make valid, and find centroids:
+ centr <- function(shp, crs, year){
+ crops <- st_transform(shp, crs) # change crs
+ crops <- st_make_valid(crops) # make valid
+ cents <- st_centroid(crops) # find centroid
+ cents <- st_transform(cents, crs) # make same projection
+ centpts <- st_coordinates(cents) # get coordinates from centroids
+ crops$centx <- centpts[,'X'] # add columns for coordinates to shapefile
+ crops$centy <- centpts[,'Y']
+ crops$year <- year # add a column for data year
+ crops <- st_zm(crops, drop = T, what = "ZM") # correct the extra Z dimension in geometry
+ return(crops)
+ }
+ crop_new <- centr(crops, crs, year)
+
+ # grab columns for data frame conversion:
+ col_grab <- function(shp){
+ id <- grep("^Unique", names(shp)) # UniqueID from LandIQ
+ year <- grep("year", names(shp)) # year of data
+ lon <- grep("^centx", names(shp)) # centroid lon
+ lat <- grep("^centy", names(shp)) # centroid lat
+ mult <- grep("^MULT", names(shp)) # multiuse code
+ class <- grep("^CLASS", names(shp)) # class code
+ sub <- grep("^SUB", names(shp)) # subclass number
+ spec <- grep("^SPEC", names(shp)) # special condition code
+ sen <- grep("^SEN", names(shp)) # senescing crop
+ emer <- grep("^EMER", names(shp)) # emerging crop
+ irst <- grep("PA", names(shp)) # irrigation status (from IRR_TYP#PA)
+ irty <- grep("PB", names(shp)) # irrigation type (from IRR_TYP#PB)
+ pcnt <- grep("^PCN", names(shp)) # percent cover
+ adoy <- grep("^ADOY", names(shp)) # adjusted day of year for crops
+ yr_pl <- grep("^YR", names(shp)) # year planted
+ hy_reg <- grep("^HYD", names(shp)) # hydro region
+ reg <- grep("^REG", names(shp)) # region
+ cty <- grep("COUNTY", names(shp)) # county
+
+ # combine
+ cols <- c(id, year, lon, lat,
+ mult, class, sub, spec, sen, emer,
+ irst, irty, pcnt, adoy, yr_pl,
+ hy_reg, reg, cty)
+ crops <- shp[,cols]
+ return(crops)
+ }
+ crop_grab <- col_grab(crop_new)
+
+ # build data frame from shapefile object:
+ crop_clean <- function(shp){
+ df <- st_drop_geometry(shp)
+
+ # get rid of asterix values, replace 00 percent codes to 100, make number cols numeric:
+ df_clean <- df %>%
+ mutate(across(everything(), na_if, "**")) %>%
+ mutate(across(everything(), na_if, "*")) %>%
+ mutate(across(everything(), ~replace(., . == "00", "100"))) %>%
+ mutate(across(everything(), as.character))
+
+ # make longer:
+ # prepare columns for sorting into seasons:
+ renamer <- function(df){
+ numb <- str_extract(names(df), "[0-9]")
+ char <- str_extract_all(names(df), "[:alpha:]+", simplify = TRUE)
+ ch <- vector()
+ for (i in 1:nrow(char)){
+ ch[i] <- str_c(char[i,], collapse = "")
+ }
+ newnames <- str_remove(paste(numb, ch, sep = ""), "NA")
+ colnames(df) <- newnames
+ return(df)
+ }
+ df_cl <- renamer(df_clean)
+
+ # pivoting:
+ df_piv <- df_cl %>%
+ pivot_longer(-c(grep("^[a-zA-Z]", names(df_cl))), names_to = c("name"), values_to = "value") %>%
+ mutate(type = str_extract(name, "[A-Z]+")) %>%
+ mutate(season = str_extract(name, "[0-9]")) %>%
+ select(-name) %>% group_by(UniqueID) %>%
+ pivot_wider(names_from = type,
+ values_from = value) %>% ungroup() %>%
+ mutate(across(c("UniqueID", "year", "SUBCLASS", "PCNT", "ADOY", "season"), as.numeric))
+ }
+ crops_out <- crop_clean(crop_grab)
+ return(crops_out)
+}
+
+# join output to combined dataframe (previous year data) with rbind
+
+# example with 2023 provisional data:
+#setwd("/projectnb/dietzelab/malmborg/CARB")
+crs = 3857
+year = 2023
+crops <- shapefile_grab("LandIQ_shps/", year, sf = TRUE)
+new_crops <- get_CARB_data(crops, crs, year)
diff --git a/modules/data.remote/inst/Python/CCMMF_Irrigation_API.py b/modules/data.remote/inst/Python/CCMMF_Irrigation_API.py
new file mode 100644
index 00000000000..5c7d0c16b86
--- /dev/null
+++ b/modules/data.remote/inst/Python/CCMMF_Irrigation_API.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on Thu Mar 6 13:59:07 2025
+
+@author: katherineanne
+"""
+# %% Import modules
+
+import requests
+import numpy as np
+from netCDF4 import Dataset, num2date
+import matplotlib.pyplot as plt
+import pandas as pd
+import os
+from datetime import datetime, date, timedelta
+import pyarrow as pa
+import pyarrow.parquet as pq
+import pyarrow.dataset as ds
+import CCMMF_Irrigation_DataDownload
+import CCMMF_Irrigation_CalcVis
+import CCMMF_Irrigation_Events
+
+
+# %% Define multi use variables
+
+# Define years to look at
+years = list(range(2016, 2026))
+
+# Define main folder
+main_folder = '/projectnb/dietzelab/ccmmf/management/irrigation/'
+
+# Define folder name for csv files
+csv_folder = main_folder + 'WaterBalanceCSV/'
+
+# Define the name of the parquet filename
+pq_filename = main_folder + 'CCMMF_Irrigation_Parquet'
+
+# %% Loading data
+
+# Read in parquet file
+# Load the full dataset
+dataset = ds.dataset(pq_filename, format="parquet", partitioning = 'hive')
+table = dataset.to_table()
+parquet_df = table.to_pandas()
+days_to_download = 0
+
+# Group by the location column and convert to dictionary
+data_dict = {location: location_df for location, location_df in parquet_df.groupby("location")}
+
+# %% Check current date with most current downloaded data
+
+# Delete the current CHIRPS file for this year
+# This will ensure we read in the new data for the current date
+# We only do this if the data is not up to date
+cur_year = datetime.now().year
+today = datetime.now().date()
+chirps_filename = f'{main_folder}chirps-v2.0.{cur_year}.days_p05.nc'
+
+if os.path.exists(chirps_filename):
+ with Dataset(chirps_filename, 'r') as nc:
+
+ time_var = nc.variables['time']
+ dates = num2date(time_var[:], units=time_var.units)
+ most_recent = max(dates)
+ most_recent_date = date(most_recent.year, most_recent.month, most_recent.day)
+
+ if most_recent_date != today:
+ print('Deleted')
+ days_to_download = (today - most_recent_date).days
+ os.remove(chirps_filename)
+
+# %% Define locations
+
+# Read in all lat lons
+df_lat_lon = pd.read_csv(f'{main_folder}design_points.csv')
+
+# Handle duplicates
+df_lat_lon = df_lat_lon.drop_duplicates()
+
+# %% Iterate through locations and download data for each
+
+for row_number in range(35):
+
+ # Load location data
+ latitude = df_lat_lon['lat'].iloc[row_number]
+ longitude = df_lat_lon['lon'].iloc[row_number]
+ location = df_lat_lon['id'].iloc[row_number]
+
+ # Create CSV filename
+ csv_filename = f'{csv_folder}CCMMR_Water_Balance_{latitude}_{longitude}.csv'
+
+ if location in data_dict:
+
+ df = data_dict[location]
+
+ # If we have not downloaded data for today yet...
+ if days_to_download != 0:
+ # Download new data
+ start_date = today - timedelta(days=days_to_download)
+ new_df = CCMMF_Irrigation_DataDownload.new_data_entry_API(latitude, longitude,
+ [start_date.year, cur_year],
+ csv_folder, start_date, today)
+
+ # Concatenate with already saved data
+ old_df = data_dict[location]
+ df = pd.concat([new_df, old_df], ignore_index=True)
+ df = df.sort_values(by='time')
+ data_dict[location] = df
+
+ # Save data
+ df.to_csv(csv_filename, index=False)
+
+ # Check that all years have been read in
+ df['time'] = pd.to_datetime(df['time'])
+ df_years = df['time'].dt.year.unique().tolist()
+
+ if set(df_years) != set(years):
+
+ # Years in what years we want but not in saved data
+ # Does not care if there are values in saved data that are not in wanted years
+ not_saved_years = set(years) - set(df_years)
+ not_saved_years = list(not_saved_years)
+
+ # Download data and calculate for new years
+ new_df = CCMMF_Irrigation_DataDownload.new_data_entry_API(latitude, longitude,
+ not_saved_years, csv_folder)
+
+ # Concatenate with already saved data
+ old_df = data_dict[location]
+ df = pd.concat([new_df, old_df], ignore_index=True)
+ df = df.sort_values(by='time')
+ data_dict[location] = df
+
+ # Save data
+ df.to_csv(csv_filename, index=False)
+
+ # The location is not in the saved dictionary
+ else:
+ # Download and calculate if it doesn't exist
+ df = CCMMF_Irrigation_DataDownload.new_data_entry_API(latitude, longitude,
+ years, csv_folder)
+ data_dict[location] = df
+
+ # Save data
+ df.to_csv(csv_filename, index=False)
+
+# %% Create Event Files
+
+CCMMF_Irrigation_Events.file_creation(data_dict)
+
+# %% Write to parquet
+
+for location, loc_df in data_dict.items():
+ loc_df['location'] = location
+ table = pa.Table.from_pandas(loc_df)
+ pq.write_to_dataset(table, root_path = pq_filename, partition_cols = ['location', 'year'])
\ No newline at end of file
diff --git a/modules/data.remote/inst/Python/CCMMF_Irrigation_CalcVis.py b/modules/data.remote/inst/Python/CCMMF_Irrigation_CalcVis.py
new file mode 100644
index 00000000000..ece689b9160
--- /dev/null
+++ b/modules/data.remote/inst/Python/CCMMF_Irrigation_CalcVis.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on Wed Apr 23 14:46:51 2025
+
+@author: krein21
+"""
+# %% Import modules
+
+import numpy as np
+import matplotlib.pyplot as plt
+import pandas as pd
+
+# %% Turn raw data into usable data
+
+def water_balance(df_water_balance, LAT, LON):
+ print(f'{LAT} {LON}')
+
+ # Handle NAs
+ df_water_balance['et'] = df_water_balance['et'].fillna(0)
+ df_water_balance['precip'] = df_water_balance['precip'].fillna(0)
+
+ # Constants
+ WHC = 500 # units: mm
+ W_min = 0.15 * WHC
+ field_capacity = WHC/2
+
+ # Water Balance Equation
+ df_water_balance['W_t'] = field_capacity
+
+ for row_number in range(1,len(df_water_balance)):
+
+ # Pull all data
+ W_tminusone = df_water_balance['W_t'].iloc[row_number - 1]
+ precip = df_water_balance['precip'].iloc[row_number]
+ et = df_water_balance['et'].iloc[row_number]
+
+ # Calculate initial W_t
+ # W_t = W_t-1 + P_t - ET_t
+ W_t_initial = W_tminusone + precip - et
+
+ # Calculate irrigation
+ # Irr_t = max(Wmin - W_t, 0)
+ irr = max(W_min - W_t_initial, 0)
+
+ # Calculate runoff
+ # Qt = max(Wt - WHC, 0)
+ runoff = max(W_t_initial - WHC, 0)
+
+ # Calculate final W_t
+ # W_t = W_t-1 + P_t + Irr_t - ET_t - Q_t
+ W_t = W_tminusone + precip + irr - et - runoff
+
+ # Add values to dataframe
+ df_water_balance.loc[row_number, 'W_t'] = W_t
+ df_water_balance.loc[row_number, 'irr'] = irr
+ df_water_balance.loc[row_number, 'runoff'] = runoff
+
+ # Add year, day and week values
+ df_water_balance['time'] = pd.to_datetime(df_water_balance['time'])
+ df_water_balance['year'] = df_water_balance['time'].dt.year
+ df_water_balance['week'] = df_water_balance['time'].dt.isocalendar().week
+ df_water_balance['day_of_year'] = df_water_balance['time'].dt.dayofyear
+
+ return df_water_balance
+
+
+# %% Time Series
+
+def timeseries_graphs_API(df_water_balance, LAT, LON, YEAR):
+
+ # Slicing warning if not copied
+ df_water_balance = df_water_balance.copy()
+
+ # Create cumulative sum columns
+ df_water_balance['et_cumsum'] = df_water_balance['et'].cumsum()
+ df_water_balance['precip_cumsum'] = df_water_balance['precip'].cumsum()
+ df_water_balance['irr_cumsum'] = df_water_balance['irr'].cumsum()
+
+ # Ensure time is dates
+ df_water_balance['time'] = pd.to_datetime(df_water_balance['time'])
+
+ # Plot time series
+ plt.figure(figsize=(10, 5))
+ plt.plot(df_water_balance['time'], df_water_balance['et_cumsum'], linestyle = 'dotted', lw = 2.5, label = 'Evapotranspiration')
+ plt.plot(df_water_balance['time'], df_water_balance['precip_cumsum'], linestyle = 'dashed', lw = 2.5, label = 'Precipitation')
+ plt.plot(df_water_balance['time'], df_water_balance['irr_cumsum'], linestyle = 'dashdot', lw = 2.5, label = 'Irrigation')
+ plt.plot(df_water_balance['time'], df_water_balance['runoff'], linestyle = 'solid', lw = 2.5, label = 'Runoff')
+
+ plt.xlabel('Date')
+ plt.ylabel('Cumulative Sum of Evapotransipiration, \nPrecipitation, and Irrigation (mm)')
+ plt.suptitle('Evapotransipiration and Precipitation Time Series in Central Valley CA')
+ plt.title(f'(Lat: {LAT}, Lon: {LON})')
+ plt.legend()
+ plt.grid()
+
+ # Save plot
+ filename = f'/projectnb/dietzelab/ccmmf/management/irrigation/TimeseriesPNG/CCMMR_et_precip_irr_cumsum_{YEAR}_{LAT}_{LON}.png'
+ plt.savefig(filename)
+
+ plt.show()
+
+def timeseries_graphs_GEE(df_water_balance, LAT, LON, YEAR):
+
+ # Slicing warning if not copied
+ df_water_balance = df_water_balance.copy()
+
+ # Create cumulative sum columns
+ df_water_balance['et_cumsum'] = df_water_balance['et'].cumsum()
+ df_water_balance['precip_cumsum'] = df_water_balance['precip'].cumsum()
+ df_water_balance['irr_cumsum'] = df_water_balance['irr'].cumsum()
+
+ # Ensure time is dates
+ df_water_balance['time'] = pd.to_datetime(df_water_balance['time'])
+
+ # Plot time series
+ plt.figure(figsize=(10, 5))
+ plt.plot(df_water_balance['time'], df_water_balance['et_cumsum'], linestyle = 'dotted', lw = 2.5, label = 'Evapotranspiration')
+ plt.plot(df_water_balance['time'], df_water_balance['precip_cumsum'], linestyle = 'dashed', lw = 2.5, label = 'Precipitation')
+ plt.plot(df_water_balance['time'], df_water_balance['irr_cumsum'], linestyle = 'dashdot', lw = 2.5, label = 'Irrigation')
+ plt.plot(df_water_balance['time'], df_water_balance['runoff'], linestyle = 'solid', lw = 2.5, label = 'Runoff')
+
+ plt.xlabel('Date')
+ plt.ylabel('Monthly Cumulative Sum of Evapotransipiration, \nPrecipitation, and Irrigation (mm)')
+ plt.suptitle('Evapotransipiration and Precipitation Time Series in Central Valley CA')
+ plt.title(f'(Lat: {LAT}, Lon: {LON})')
+ plt.legend()
+ plt.grid()
+
+ # Save plot
+ filename = f'/projectnb/dietzelab/ccmmf/management/irrigation/TimeseriesPNG_GEE/CCMMR_GEE_cumsum_{YEAR}_{LAT}_{LON}.png'
+ plt.savefig(filename)
+
+ plt.show()
\ No newline at end of file
diff --git a/modules/data.remote/inst/Python/CCMMF_Irrigation_DataDownload.py b/modules/data.remote/inst/Python/CCMMF_Irrigation_DataDownload.py
new file mode 100644
index 00000000000..f51ecca83aa
--- /dev/null
+++ b/modules/data.remote/inst/Python/CCMMF_Irrigation_DataDownload.py
@@ -0,0 +1,229 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on Wed Apr 23 14:42:41 2025
+
+@author: krein21
+"""
+# %% Import modules
+
+import requests
+import numpy as np
+import pandas as pd
+from netCDF4 import Dataset, num2date
+import CCMMF_Irrigation_CalcVis
+import os
+import ee
+
+ee.Initialize()
+
+
+# %% Download GEE OPEN ET Data
+
+def GEEOpenET(START_DATE, END_DATE, LAT, LON):
+
+ # Access OpenET dataset
+ collection = ee.ImageCollection("OpenET/ENSEMBLE/CONUS/GRIDMET/MONTHLY/v2_0") \
+ .filterDate(START_DATE, END_DATE) \
+ .filterBounds(ee.Geometry.Point([LON, LAT]))
+
+ # Extract et time series
+ def extract_et(img):
+ date = img.date().format()
+ et = img.reduceRegion(ee.Reducer.first(), ee.Geometry.Point([LON, LAT]), 1000).get('et_ensemble_mad')
+ return ee.Feature(None, {'time': date, 'et': et})
+
+ et_series = collection.map(extract_et)
+
+ # Convert data to df
+ et_series = et_series.getInfo() # Convert from ee.List to Python list
+ et_series = et_series['features'] # Select just the features dictionary
+ open_et_df = pd.DataFrame(et_series) # Turn dictionary into dataframe
+ open_et_df = open_et_df['properties'].apply(pd.Series) # Select properties and turn dictionary into dataframe
+ open_et_df['time'] = pd.to_datetime(open_et_df['time'])
+
+ return open_et_df
+
+# %% Request OPEN ET Data (from website)
+
+def OpenETData(START_DATE, END_DATE, LAT, LON):
+
+ # Set directory
+ working_dir = '/projectnb/dietzelab/ccmmf/management/irrigation/'
+ os.chdir(working_dir)
+
+ # Read in API Key
+ with open('OpenETAPIKey.txt', 'r') as file:
+ api_key = file.readline()
+
+ header = {"Authorization": api_key}
+
+ # endpoint arguments
+ args = {
+ "date_range": [START_DATE, END_DATE],
+ "interval": "daily",
+ "geometry": [LON,LAT],
+ "model": "Ensemble",
+ "variable": "ET",
+ "reference_et": "gridMET",
+ "units": "mm",
+ "file_format": "JSON"
+ }
+
+ # query the api
+ resp = requests.post(
+ headers=header,
+ json=args,
+ url="https://openet-api.org/raster/timeseries/point"
+ )
+
+ # Parse the JSON response
+ et_data = resp.json()
+
+ open_et_df = pd.DataFrame(et_data)
+ open_et_df['time'] = pd.to_datetime(open_et_df['time'])
+
+ return open_et_df
+
+# %% Download CHIRPS Data
+
+def CHIRPSData(YEAR, LAT, LON):
+
+ # Set URL and file name
+ url = f'https://data.chc.ucsb.edu/products/CHIRPS-2.0/global_daily/netcdf/p05/chirps-v2.0.{YEAR}.days_p05.nc'
+ destfile = f'/projectnb/dietzelab/ccmmf/management/irrigation/chirps-v2.0.{YEAR}.days_p05.nc'
+
+ # Check if the file already exists before downloading
+ if not os.path.exists(destfile):
+ print(f"{destfile} not found. Downloading now...")
+ response = requests.get(url, timeout=600)
+
+ with open(destfile, 'wb') as f:
+ f.write(response.content)
+
+ # Open the NetCDF file
+ nc_data = Dataset(destfile, 'r')
+
+ # Print metadata for precipitation
+ #precip_variable = nc_data.variables['precip']
+ #print(precip_variable)
+
+ # Extract coordinate and time variables
+ lon = nc_data.variables['longitude'][:]
+ lat = nc_data.variables['latitude'][:]
+ time = nc_data.variables['time']
+
+ # Find the nearest lat/lon index
+ lon_idx = np.abs(lon - LON).argmin()
+ lat_idx = np.abs(lat - LAT).argmin()
+
+ # Extract the data just for that lat lon
+ precip_data = nc_data.variables['precip'][:, lat_idx, lon_idx]
+
+ # Convert time to standard datetime
+ dates = num2date(time[:], units=time.units, calendar=time.calendar)
+ dates = [pd.Timestamp(date.isoformat()) for date in dates]
+
+ # Close the NetCDF file when done
+ nc_data.close()
+
+ # Clean data
+ precip_data = precip_data.filled(np.nan)
+ precip_data_df = pd.DataFrame({
+ 'time': dates,
+ 'precip': precip_data
+ })
+
+ return precip_data_df
+
+# %% Calculate and visualize new data for the API downloded data
+
+def new_data_entry_API(LAT, LON, years, csv_folder, START_DATE = None, END_DATE = None):
+ print(f'{LAT} {LON} {years}')
+
+ # Define start and end date
+ if START_DATE == None or END_DATE == None:
+ START_DATE = f'{years[0]}-01-01'
+ END_DATE = f'{years[-1]}-12-31'
+
+ # Download open et data
+ et_df = OpenETData(START_DATE, END_DATE, LAT, LON)
+
+ # Download CHIRPS data year by year and concatenate
+ precip_data = pd.DataFrame()
+ for year in years:
+ precip_data_year = CHIRPSData(year, LAT, LON)
+ precip_data = pd.concat([precip_data, precip_data_year], ignore_index=True)
+
+ # Organize and water balance
+ df_water_balance = et_df
+ df_water_balance['precip'] = precip_data['precip']
+ df_water_balance = CCMMF_Irrigation_CalcVis.water_balance(df_water_balance, LAT, LON)
+
+ # Graph
+ df_water_balance['time'] = pd.to_datetime(df_water_balance['time'])
+ for year in years:
+ CCMMF_Irrigation_CalcVis.timeseries_graphs_API(df_water_balance[df_water_balance['time'].dt.year == year], LAT, LON, year)
+
+ # Save to csv to ensure data is stored
+ filename = f'{csv_folder}CCMMR_Water_Balance_{LAT}_{LON}.csv'
+ df_water_balance.to_csv(filename, index=False)
+ return df_water_balance
+
+# %% Calculate and visualize new data for the Google Earth Engine downloded data
+
+def new_data_entry_GEE(LAT, LON, years, csv_folder, START_DATE = None, END_DATE = None):
+ print(f'{LAT} {LON} {years}')
+
+ # Define start and end date
+ if START_DATE == None or END_DATE == None:
+ START_DATE = f'{years[0]}-01-01'
+ END_DATE = f'{years[-1]}-12-31'
+
+ # Download open et data
+ et_df = GEEOpenET(START_DATE, END_DATE, LAT, LON)
+
+ # Download CHIRPS data year by year and concatenate
+ precip_data = pd.DataFrame()
+ for year in years:
+ precip_data_year = CHIRPSData(year, LAT, LON)
+ precip_data = pd.concat([precip_data, precip_data_year], ignore_index=True)
+
+ # Interpolate et data to daily
+ # Find average daily et for each month
+ et_df['time'] = pd.to_datetime(et_df['time'])
+ et_df['days_in_month'] = et_df['time'].dt.days_in_month
+ et_df['avg_et'] = et_df['et'] / et_df['days_in_month']
+ et_df.set_index('time', inplace = True)
+
+ # Expand average to daily dataframe
+ end_of_month = et_df.index.max() + pd.offsets.MonthEnd(0) # extend end to the end of the last month
+ daily_index = pd.date_range(start = et_df.index.min(), end = end_of_month, freq = 'D') # find all days in range
+ daily_et_df = et_df.reindex(daily_index) # Expand dataframe to include all days
+
+ daily_et_df['avg_et'] = daily_et_df['avg_et'].ffill() # Fill in all missing values with the starting value
+ #daily_et_df['avg_et'] = daily_et_df['avg_et'].interpolate(method='time') # linear interpolation
+ daily_et_df = daily_et_df[['avg_et']] # select just the avegarged data
+ daily_et_df = daily_et_df.rename(columns={'avg_et': 'et'})
+
+ # Merge precip and et data
+ precip_data['time'] = pd.to_datetime(precip_data['time'])
+ precip_data.set_index('time', inplace = True)
+ df_water_balance = daily_et_df.join(precip_data, how='inner') # merge with et data (only keeping values from both)
+ df_water_balance = df_water_balance.reset_index().rename(columns={'index': 'time'}) # reset index so theirs a time column back
+
+ # Oragaize and water balacne
+ df_water_balance = CCMMF_Irrigation_CalcVis.water_balance(df_water_balance, LAT, LON)
+
+ # Graph
+ df_water_balance['time'] = pd.to_datetime(df_water_balance['time'])
+ years = df_water_balance['time'].dt.year.unique()
+ years.sort()
+
+ for year in years:
+ CCMMF_Irrigation_CalcVis.timeseries_graphs_GEE(df_water_balance[df_water_balance['time'].dt.year == year], LAT, LON, year)
+
+ # Save to csv to ensure data is stored
+ filename = f'{csv_folder}CCMMR_Water_Balance_{LAT}_{LON}_GEE.csv'
+ df_water_balance.to_csv(filename, index=False)
+ return df_water_balance
\ No newline at end of file
diff --git a/modules/data.remote/inst/Python/CCMMF_Irrigation_Events.py b/modules/data.remote/inst/Python/CCMMF_Irrigation_Events.py
new file mode 100644
index 00000000000..c46378d7d20
--- /dev/null
+++ b/modules/data.remote/inst/Python/CCMMF_Irrigation_Events.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on Thu Apr 24 12:15:39 2025
+
+@author: krein21
+
+Columns:
+ - loc: spatial location index (starts at 0)
+ - year: year of start of this timestep
+ - day: day of start of this timestep (1 - 366)
+ - event_type: type of event
+ - amount_added (cm/day)
+ - type: (0 = canopy, 1 = soil, 2 = flood)
+
+"""
+
+# %% Import modules
+
+import pandas as pd
+
+
+# %% Create event file
+
+def file_creation(data_dict):
+
+ # Create an event file for each location
+ for key, df in data_dict.items():
+
+ # Add columns
+ df['event_type'] = 'irrig'
+ df['loc'] = 0
+ df['type'] = 1
+
+ # Calculate new units for irrigation
+ df['irr'] = df['irr'] * 0.1
+
+ # Aggregate by week
+ # Sum irrigation
+ eventfile_df = df.groupby(['year', 'week'], as_index = False).agg({
+ 'loc': 'first',
+ 'year': 'first',
+ 'day_of_year': 'first',
+ 'event_type': 'first',
+ 'irr': 'sum',
+ 'type': 'first'
+ })
+
+ # Remove week column
+ eventfile_df = eventfile_df.drop('week', axis = 1)
+
+ # Drop all 0 irrigation rows
+ eventfile_df = eventfile_df[eventfile_df['irr'] != 0]
+
+ # Write to file(s)
+ folder_name = '/projectnb/dietzelab/ccmmf/management/irrigation/CCMMF_Irrigation_EventFiles/'
+ filename = f'{folder_name}irrigation_eventfile_{key}.txt'
+ eventfile_df.to_csv(filename, sep = ' ', index = False, header = False)
+
+
+
diff --git a/modules/data.remote/inst/Python/CCMMF_Irrigation_GEE.py b/modules/data.remote/inst/Python/CCMMF_Irrigation_GEE.py
new file mode 100644
index 00000000000..6d664d50f3c
--- /dev/null
+++ b/modules/data.remote/inst/Python/CCMMF_Irrigation_GEE.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on Thu Mar 6 13:59:07 2025
+
+@author: katherineanne
+"""
+# %% Import modules
+
+import requests
+import numpy as np
+from netCDF4 import Dataset, num2date
+import matplotlib.pyplot as plt
+import pandas as pd
+import os
+from datetime import datetime, date, timedelta
+import pyarrow as pa
+import pyarrow.parquet as pq
+import pyarrow.dataset as ds
+import ee
+import CCMMF_Irrigation_DataDownload
+import CCMMF_Irrigation_CalcVis
+import CCMMF_Irrigation_Events
+
+ee.Initialize()
+
+# %% Define multi use variables
+
+# Define years to look at
+years = list(range(2016, 2026))
+
+# Define main folder
+main_folder = '/projectnb/dietzelab/ccmmf/management/irrigation/'
+
+# Define folder name for csv files
+csv_folder = main_folder + 'WaterBalanceCSV_GEE/'
+
+# Define the name of the parquet filename
+pq_filename = main_folder + 'CCMMF_Irrigation_Parquet_GEE'
+
+# %% Loading data
+
+# Read in parquet file
+# Load the full dataset
+dataset = ds.dataset(pq_filename, format="parquet", partitioning = 'hive')
+table = dataset.to_table()
+parquet_df = table.to_pandas()
+days_to_download = 0
+
+# Group by the location column and convert to dictionary
+data_dict = {location: location_df for location, location_df in parquet_df.groupby("location")}
+
+# %% Check current date with most current downloaded data
+
+# Delete the current CHIRPS file for this year
+# This will ensure we read in the new data for the current date
+# We only do this if the data is not up to date
+cur_year = datetime.now().year
+today = datetime.now().date()
+chirps_filename = f'{main_folder}chirps-v2.0.{cur_year}.days_p05.nc'
+
+if os.path.exists(chirps_filename):
+ with Dataset(chirps_filename, 'r') as nc:
+
+ time_var = nc.variables['time']
+ dates = num2date(time_var[:], units=time_var.units)
+ most_recent = max(dates)
+ most_recent_date = date(most_recent.year, most_recent.month, most_recent.day)
+
+ if most_recent_date != today:
+ print('Deleted')
+ days_to_download = (today - most_recent_date).days
+ os.remove(chirps_filename)
+
+# %% Define locations
+
+# Read in all lat lons
+df_lat_lon = pd.read_csv(f'{main_folder}design_points.csv')
+
+# Handle duplicates
+df_lat_lon = df_lat_lon.drop_duplicates()
+
+# %% Iterate through locations and download data for each
+
+for row_number in range(5):
+
+ # Load location data
+ latitude = df_lat_lon['lat'].iloc[row_number]
+ longitude = df_lat_lon['lon'].iloc[row_number]
+ location = df_lat_lon['id'].iloc[row_number]
+
+ # Create CSV name
+ csv_filename = f'{csv_folder}CCMMR_Water_Balance_{latitude}_{longitude}_GEE.csv'
+
+ if location in data_dict:
+
+ df = data_dict[location]
+
+ # If we have not downloaded data for today yet...
+ if days_to_download != 0:
+ # Download new data
+ start_date = today - timedelta(days=days_to_download)
+ new_df = CCMMF_Irrigation_DataDownload.new_data_entry_GEE(latitude, longitude,
+ [start_date.year, cur_year],
+ csv_folder, start_date, today)
+
+ # Concatenate with already saved data
+ old_df = data_dict[location]
+ df = pd.concat([new_df, old_df], ignore_index=True)
+ df = df.sort_values(by='time')
+ data_dict[location] = df
+
+ # Save data
+ df.to_csv(csv_filename, index=False)
+
+ # Check that all years have been read in
+ df['time'] = pd.to_datetime(df['time'])
+ df_years = df['time'].dt.year.unique().tolist()
+
+ if set(df_years) != set(years):
+
+ # Years in what years we want but not in saved data
+ # Does not care if there are values in saved data that are not in wanted years
+ not_saved_years = set(years) - set(df_years)
+ not_saved_years = list(not_saved_years)
+
+ # Download data and calculate for new years
+ new_df = CCMMF_Irrigation_DataDownload.new_data_entry_GEE(latitude, longitude,
+ not_saved_years, csv_folder)
+
+ # Concatenate with already saved data
+ old_df = data_dict[location]
+ df = pd.concat([new_df, old_df], ignore_index=True)
+ df = df.sort_values(by='time')
+ data_dict[location] = df
+
+ # Save data
+ df.to_csv(csv_filename, index=False)
+
+ # The location is not in the saved dictionary
+ else:
+ # Download and calculate if it doesn't exist
+ df = CCMMF_Irrigation_DataDownload.new_data_entry_GEE(latitude, longitude,
+ years, csv_folder)
+ data_dict[location] = df
+
+ # Save data
+ df.to_csv(csv_filename, index=False)
+
+# %% Create Event Files
+
+#CCMMF_Irrigation_Events.file_creation(data_dict)
+
+# %% Write to parquet
+
+for location, loc_df in data_dict.items():
+ loc_df['location'] = location
+ table = pa.Table.from_pandas(loc_df)
+ pq.write_to_dataset(table, root_path = pq_filename, partition_cols = ['location', 'year'])
\ No newline at end of file
diff --git a/modules/data.remote/inst/Python/CCMMF_Irrigation_GEEvAPI.py b/modules/data.remote/inst/Python/CCMMF_Irrigation_GEEvAPI.py
new file mode 100644
index 00000000000..819a56ae104
--- /dev/null
+++ b/modules/data.remote/inst/Python/CCMMF_Irrigation_GEEvAPI.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on Thu May 15 21:48:42 2025
+
+@author: krein21
+"""
+
+# %% Import modules
+
+import requests
+import numpy as np
+from netCDF4 import Dataset, num2date
+import matplotlib.pyplot as plt
+import pandas as pd
+import os
+from datetime import datetime, date, timedelta
+import pyarrow as pa
+import pyarrow.parquet as pq
+import pyarrow.dataset as ds
+from sklearn.metrics import mean_squared_error, r2_score
+import seaborn as sns
+
+# %% Define multi use variables
+
+# Define years to look at
+years = list(range(2016, 2026))
+
+# Define main folder
+main_folder = '/projectnb/dietzelab/ccmmf/management/irrigation/'
+
+# Define the name of the parquet filename for API
+API_pq_filename = main_folder + 'CCMMF_Irrigation_Parquet'
+
+# Define the name of the parquet filename for GEE
+GEE_pq_filename = main_folder + 'CCMMF_Irrigation_Parquet_GEE'
+
+# %% Loading data
+
+# GEE Data Download
+
+# Read in parquet file
+# Load the full dataset
+GEE_dataset = ds.dataset(GEE_pq_filename, format="parquet", partitioning = 'hive')
+GEE_table = GEE_dataset.to_table()
+GEE_parquet_df = GEE_table.to_pandas()
+
+# Group by the location column and convert to dictionary
+GEE_data_dict = {location: location_df for location, location_df in GEE_parquet_df.groupby("location")}
+
+# API Data Download
+
+# Read in parquet file
+# Load the full dataset
+API_dataset = ds.dataset(API_pq_filename, format="parquet", partitioning = 'hive')
+API_table = API_dataset.to_table()
+API_parquet_df = API_table.to_pandas()
+
+# Group by the location column and convert to dictionary
+API_data_dict = {location: location_df for location, location_df in API_parquet_df.groupby("location")}
+
+# %% Merge API and GEE data
+
+merged_data_dict = {}
+
+for key in GEE_data_dict.keys():
+
+ if key in API_data_dict:
+ print(key)
+
+ # Select both dataframes
+ df_gee = GEE_data_dict[key].copy()
+ df_api = API_data_dict[key].copy()
+
+ # Merge dataframes
+ # Use time as the connecting feature
+ # Only save data if both dataframes have it
+ # Specify suffixes
+ merged_df = pd.merge(df_gee, df_api, on = 'time', how = 'inner', suffixes = ('_GEE', '_API'))
+
+ # Add to dictionary
+ merged_data_dict[key] = merged_df
+
+
+# %% Aggregate weekly
+
+# Create weekly irrigation, precipitation, et data_dict
+merged_data_dict_weekly = {}
+
+for key, df in merged_data_dict.items():
+
+ # Calculate new units for irrigation et and precip(cm)
+ df['irr_GEE'] = df['irr_GEE'] * 0.1
+ df['irr_API'] = df['irr_API'] * 0.1
+ df['et_GEE'] = df['et_GEE'] * 0.1
+ df['et_API'] = df['et_API'] * 0.1
+ df['precip_GEE'] = df['precip_GEE'] * 0.1
+
+ # Add changed units to data dict
+ #merged_data_dict[key] = df
+
+ # Aggregate by week
+ # Sum irrigation
+ weekly_df = df.groupby(['year_GEE', 'week_GEE'], as_index = False).agg({
+ 'time': 'first',
+ 'et_GEE': 'sum',
+ 'precip_GEE': 'sum',
+ 'irr_GEE': 'sum',
+ 'et_API': 'sum',
+ 'precip_API': 'sum',
+ 'irr_API': 'sum'
+ })
+
+ # Remove week column
+ weekly_df = weekly_df.drop('week_GEE', axis = 1)
+
+ # Add to weekly data_dict
+ merged_data_dict_weekly[key] = weekly_df
+
+# %% Predicted Observed Plots
+
+# One of all locations
+
+# Flatten all data into one dataframe
+flattened_df = pd.concat(
+ [df.assign(id = key) for key, df in merged_data_dict_weekly.items()],
+ ignore_index = True
+)
+
+# Clean data
+flattened_df = flattened_df.dropna(subset=['irr_API', 'irr_GEE'])
+
+# X - weekly irrigation from API (sum weekly)
+# Y - weekly irrigation from GEE (sum weekly)
+# Delineate location by color
+sns.scatterplot(data = flattened_df, x = 'irr_API', y = 'irr_GEE', hue = 'id',
+ s = 10, legend = False)
+
+# 1:1 line
+min_val = 0
+max_val = max(max(flattened_df['irr_API']), max(flattened_df['irr_GEE']))
+plt.plot([min_val, max_val], [min_val, max_val], 'r--', label='1:1 line')
+
+
+# RMSE/R^2
+rmse = np.sqrt(mean_squared_error(flattened_df['irr_API'], flattened_df['irr_GEE']))
+r2 = r2_score(flattened_df['irr_API'], flattened_df['irr_GEE'])
+
+# Labels
+plt.xlabel('Daily Sampled Evapotranspiration Data')
+plt.ylabel('Monthly Sampled Evapotranspiration Data')
+plt.suptitle('Impact of Interpolation on Irrigation (cm) Calculation')
+plt.title(f'RMSE = {rmse:.2f} $R^2$ = {r2:.2f}')
+plt.grid(True)
+plt.tight_layout()
+plt.show()
+
+# One for each location
+for key, df in merged_data_dict_weekly.items():
+
+ # Clean data
+ df = df.dropna(subset=['irr_API', 'irr_GEE'])
+
+ # Scatterplot
+ sns.scatterplot(data = df, x = 'irr_API', y = 'irr_GEE', s = 10, legend = False)
+
+ # 1:1 line
+ min_val = 0
+ max_val = max(max(df['irr_API']), max(df['irr_GEE']))
+ plt.plot([min_val, max_val], [min_val, max_val], 'r--', label='1:1 line')
+
+
+ # RMSE/R^2
+ rmse = np.sqrt(mean_squared_error(df['irr_API'], df['irr_GEE']))
+ r2 = r2_score(df['irr_API'], df['irr_GEE'])
+
+ # Labels
+ plt.xlabel('Daily Sampled Evapotranspiration Data')
+ plt.ylabel('Monthly Sampled Evapotranspiration Data')
+ plt.suptitle(f'Impact of Interpolation on Irrigation (cm) Calculation for {key}')
+ plt.title(f'RMSE = {rmse:.2f} $R^2$ = {r2:.2f}')
+ plt.grid(True)
+ plt.tight_layout()
+ plt.show()
+
+# %% Time Series Plots
+
+for key, df in merged_data_dict_weekly.items():
+
+ # Sort by time
+ df = df.sort_values(by='time')
+
+ # Create cumulative sum columns
+ df['irr_API_cumsum'] = df['irr_API'].cumsum()
+ df['irr_GEE_cumsum'] = df['irr_GEE'].cumsum()
+ df['precip_GEE_cumsum'] = df['precip_GEE'].cumsum()
+ df['et_API_cumsum'] = df['et_API'].cumsum()
+ df['et_GEE_cumsum'] = df['et_GEE'].cumsum()
+
+ # Plot time series
+ plt.figure(figsize=(10, 5))
+ plt.plot(df['time'], df['irr_API_cumsum'], linestyle = 'dotted', lw = 2.5, color = 'royalblue', label = 'API Irrigation')
+ plt.plot(df['time'], df['irr_GEE_cumsum'], linestyle = 'dotted', lw = 2.5, color = 'yellowgreen', label = 'GEE Irrigation')
+ plt.plot(df['time'], df['precip_GEE_cumsum'], linestyle = 'solid', lw = 2.5, color = 'mediumpurple', label = 'Precipitation')
+ plt.plot(df['time'], df['et_API_cumsum'], linestyle = 'solid', lw = 2.5, color = 'royalblue', label = 'API Evapotranspiration')
+ plt.plot(df['time'], df['et_GEE_cumsum'], linestyle = 'solid', lw = 2.5, color = 'yellowgreen', label = 'GEE Evapotranspiration')
+
+ plt.xlabel('Date')
+ plt.ylabel('Cumulative Sum of Evapotransipiration, \nPrecipitation, and Irrigation (cm)')
+ plt.title(f'Timeseries Impact of Interpolation on Irrigation Calculation for {key}')
+ plt.legend()
+ plt.grid()
+ plt.show()
diff --git a/modules/data.remote/inst/Python/README.txt b/modules/data.remote/inst/Python/README.txt
new file mode 100644
index 00000000000..6af839449e4
--- /dev/null
+++ b/modules/data.remote/inst/Python/README.txt
@@ -0,0 +1,170 @@
+This document walks through the code written by Katherine Rein during the
+Spring 2025 semester for the CCMMF project. This code works on downloading and
+manipulating evapotranspiration data and precipitation data for different sites
+in California.
+
+Data Sources:
+- Evapotranspiration: OpenET
+ - https://openet.gitbook.io/docs
+ - https://developers.google.com/earth-engine/datasets/catalog/OpenET_ENSEMBLE_CONUS_GRIDMET_MONTHLY_v2_0
+- Precipitation: CHIRPS
+ - https://data.chc.ucsb.edu/products/CHIRPS-2.0/
+
+Main Storage Folder: /projectnb/dietzelab/ccmmf/management/irrigation
+Github Code Storage Folder: /projectnb/dietzelab/ccmmf/management/irrigation/
+ pecan/modules/data.remote/inst/Python
+
+How to use SCC:
+- When creating desktop ensure -> Extra qsub options: -l buyin
+- Once desktop loads:
+ - Open Terminal
+ - Type: module load miniconda
+ - Create or Load environment
+ - Load: conda activate ccmmf_env
+ - Create (all on one line): conda create -n ccmmf_env python jupyter
+ spyder xarray requests numpy netcdf4 matplotlib pandas pyarrow earthengine-api
+ scikit-learn seaborn
+ - To open spyder: spyder &
+ - This may take a second to run. Be patient it will open eventually.
+
+Google Earth Engine Account:
+- Contact Brian Anderson (andyson@bu.edu) for a new Google Earth Enginge project
+- Go to https://code.earthengine.google.com/
+- Click on your profile picture in the top right corner
+- Select Project Info
+- Under Cloud Project you will find the Cloud Project ID (ex. ee-krein21-s25)
+ - Save this value for later
+- Find the manage cloud project link and click on it
+- Under Project Info on the left hand side select Go to Project Settings
+- Select IAM on the left hand side
+- Select Grant Access
+- Add openet@googlegroups.com as a viewer (under basic)
+- Now open up a terminal window and navigate to the irrigation folder
+- Run the following command in terminal: earthengine authenticate --auth_mode=notebook
+- Paste the link it gives you into a browser and log into your Google account
+that is linked to the Google Earth Engine project
+- Paste the token back into the terminal window
+
+OpenET Account:
+- Click log in/sign up for an account at https://etdata.org/
+- Use the same account as you used for your Google Earth Engine project
+- Once account has been created, add in the saved Cloud Project ID into the
+Cloud Project ID field at the bottom of profile settings
+
+Organization:
+- Python Files
+ - CCMMF_Irrigation_API: This file is the main file that runs the data downloading
+ and other data manipulation for using the OpenET API. It loads in the previously
+ downloaded data and decides which data it needs to download.
+ - CCMMF_Irrigation_DataDownload: This file contains the different download
+ fucntions for each data type. It also contains the fuction that downloads
+ compiles all of the functions to download new data for a new location/years.
+ - CCMMF_Irrigation_CalcVis: This file contains the functions used to clean
+ and visualize the raw data.
+ - CCMMF_Irrigation_Events: This file contains the function that turns a
+ dictionary of dataframes into txt files for each location in the dictionary.
+ It both selects columns and sets constants for other columns. It also aggregates
+ the data by week.
+ - CCMMF_Irrigation_GEE: This is the same as CCMMF_Irrigation_API except it
+ grabs the OpenET data from Google Earth Engine. It also does not create any
+ irrigation event files.
+ - CCMMF_Irrigation_GEEvAPI: This script is completely independent of all other
+ workflows. This reads in all saved data from both the Google Earth Enginge
+ downloads and the API downloads. It then creates graphs and summary statistics
+ to help us identify if we can use Google Earth Engine monthly data instead of
+ the daily data from the API.
+- Folders
+ - WaterBalanceCSV: This is where all of the csv files for each location get
+ saved. This is a back up way to save all of the data and also makes it easier
+ to quickly view data per location. Each file is labeled with the corresponding
+ lat and long coordinate. The folder name is defined in the "Define multi use
+ variables" section of CCMMF_Irrigation_API.
+ - WaterBalanceCSV_GEE: This is the same as the regular WaterBalanceCSV but
+ simply for the et data downloaded from Google Earth Engine.
+ - TimeseriesPNG: This is where the timeseries graphs for each location and
+ each year are saved. There is no variable name for this folder it is simply
+ included in this string f'TimeseriesPNG/CCMMR_et_precip_irr_cumsum_{YEAR}_{LAT}_{LON}.png'
+ in the timeseries_graphs function in CCMMF_Irrigation_CalcVis.
+ - TimeseriesPNG_GEE: This is the same as the regular TimeseriesPNG but simply
+ for the et data downloaded from Google Earth Engine. The format for the files
+ is
+ - CCMMF_Irrigation_Parquet: This folder is a directory for all of the parquet
+ files. It is written in a way that Python and R can then tile the data by
+ both location and year. This folder name is also defined in the "Define multi
+ use variables" section of CCMMF_Irrigation_API.
+ - CCMMF_Irrigation_Parquet_GEE: This is the same as the regular CCMMF_Irrigation_Parquet
+ but simply for the et data downloaded from Google Earth Engine.
+ - CCMMF_Irrigation_EventFiles: This holds all of the event txt files for each
+ location. The column names are in the header of CCMMF_Irrigation_Events. The
+ naming format for the files is irrigation_eventfile_{location_id}.txt.
+ - pecan: This folder contains the entire pecan repo from Github. The only portion
+ of this that is needed is the Python code files which can be found in Python_Code.
+ - Python_Code: This is a symlink to the folder within pecan that holds all
+ of the .py files. This is so that you can add the files to the pecan Github
+ repo.
+- Other
+ - chirps-v2.0.{year}.days_p05.nc: These are the files that contain the downloaded
+ CHIRPS data on a daily scale for the whole world. They are downloaded from the
+ web and then read in for each location and year. They are quite large and
+ take a while to download so if your code isn't running quickly that may be
+ why (if given new years).
+ - design_points.csv: This is the inital locations dataframe that we started
+ with. To scale this program up, simply change the csv that is being read in.
+ Currently the column headers are id, lat, and lon. Keeping these the same
+ will be easiest.
+
+Workflow:
+This workflow is the same for both the OpenET API scripts and the Google Earth
+Engine Scripts.
+
+- Data is read in from parquet file
+- Calculate how old the data is (and how much new data needs to be read in)
+ - If data is old, then delete the most recent CHIRPS file because we want
+ to read in new data
+- Read in location data (lat, lon, location_id)
+- Iterate through the location data and download new data
+- Check if the location id is in the parquet file we downloaded
+ - If yes: check that our data is currently up to date (download/organize
+ new dates if needed)
+ - Also check that the years sequence is the same from what has been
+ downloaded to what we defined as the years we want to look at (This
+ really only catches any years that are new at the front)
+ - If no: download/organize for predefined year span
+- Write irrigation txt files for each location
+- Write the data that has been downloaded and organzied to the parquet file
+
+Functions (by files):
+- CCMMF_Irrigation_DataDownload
+ - GEEOpenET: This function downloads data from Google Earth Engine and turns
+ it into a dataframe with evapotranspiration data and the date.
+ - OpenETData: This function downloads data using the OpenET API and turns
+ it into a dataframe with evapotranspiration data and the date.
+ - CHIRPSData: This function downloads the .nc file from the CHIRPS website
+ and then reads in the values for the closest latitude longitude values. It
+ then returns the data as a dataframe.
+ - new_data_entry_API: This function calls on other functions to download and
+ organize the years and location that was passed to it.
+- CCMMF_Irrigation_CalcVis
+ - water_balance: This function takes the raw data for each location and calculates
+ the water balance equation for each time step. It also calculates the different
+ time columns (week, year, day of year).
+ - timeseries_graphs: This takes a dataframe and saves/prints a cumulative
+ sum graph for evapotranspiration, irrigation, and precipitation. There is
+ also a runoff curve that is not a cumulative sum.
+- CCMMF_Irrigation_Events
+ - file_creation: This function takes in a dictionary of dataframes. It then
+ itterates over each location in the dictionary and selects/calculates the
+ expected columns for the txt file. It also aggregates this data by week.
+
+Next Steps:
+- Figure out what is wrong with time series and predicted observed irrigation plots
+ - What do cumulative and monthly evapotranspiration not match?
+- Missing/mislabeled weekly data in irrigation files
+- Site specific water holding capacity and crop specific rooting depth
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/modules/data.remote/inst/Python/__pycache__/CCMMF_Irrigation_CalcVis.cpython-312.pyc b/modules/data.remote/inst/Python/__pycache__/CCMMF_Irrigation_CalcVis.cpython-312.pyc
new file mode 100644
index 00000000000..8052ac0e291
Binary files /dev/null and b/modules/data.remote/inst/Python/__pycache__/CCMMF_Irrigation_CalcVis.cpython-312.pyc differ
diff --git a/modules/data.remote/inst/Python/__pycache__/CCMMF_Irrigation_DataDownload.cpython-312.pyc b/modules/data.remote/inst/Python/__pycache__/CCMMF_Irrigation_DataDownload.cpython-312.pyc
new file mode 100644
index 00000000000..cfb4ca2428d
Binary files /dev/null and b/modules/data.remote/inst/Python/__pycache__/CCMMF_Irrigation_DataDownload.cpython-312.pyc differ
diff --git a/modules/data.remote/inst/Python/__pycache__/CCMMF_Irrigation_Events.cpython-312.pyc b/modules/data.remote/inst/Python/__pycache__/CCMMF_Irrigation_Events.cpython-312.pyc
new file mode 100644
index 00000000000..2a2466f7080
Binary files /dev/null and b/modules/data.remote/inst/Python/__pycache__/CCMMF_Irrigation_Events.cpython-312.pyc differ
diff --git a/modules/data.remote/inst/download_updated.R b/modules/data.remote/inst/download_updated.R
new file mode 100644
index 00000000000..5075e8dc98f
--- /dev/null
+++ b/modules/data.remote/inst/download_updated.R
@@ -0,0 +1,34 @@
+#DAAC_Set_Credential(replace = TRUE)
+
+# California bounding box is:
+# up_lat <- 42.0095082699265845
+# up_lon <- -124.4820168611238245
+# low_lat <- 32.5288367369123748
+# low_lon <- -114.1312224747231312
+
+ul_lat <- 42.0095082699265845 # y = 4651894 in crs
+ul_lon <- -124.4820168611238245 # x = 377279.7 in crs
+lr_lat <- 32.5288367369123748 # y = 3633946 in crs
+lr_lon <- -114.1312224747231312 # x = 1334269 in crs
+
+from <- "2019-01-01"
+to <- "2019-12-31"
+doi <- "10.5067/HLS/HLSS30.002"
+outdir <- "//projectnb/dietzelab/XinyuanJi/State_of_California_HLSS/2019_Fmask"
+# SWIR - Landsat (B6&7), Sentinel (B11&12)
+band <- "Fmask"
+credential.folder <- "~/projectnb/XinyuanJi"
+paths <- NASA_DAAC_download(ul_lat = ul_lat,
+ ul_lon = ul_lon,
+ lr_lat = lr_lat,
+ lr_lon = lr_lon,
+ ncore = 16,
+ from = from,
+ to = to,
+ outdir = outdir,
+ band = band,
+ credential.folder = credential.folder,
+ doi = doi,
+ just_path = F)
+
+provider_conceptID <- NASA_CMR_finder("10.5067/HLS/HLSS30.002")
diff --git a/modules/data.remote/inst/hlsp-scene-explorer.qmd b/modules/data.remote/inst/hlsp-scene-explorer.qmd
new file mode 100644
index 00000000000..19d224920ea
--- /dev/null
+++ b/modules/data.remote/inst/hlsp-scene-explorer.qmd
@@ -0,0 +1,384 @@
+---
+title: "HLSP Scene Explorer"
+format: html
+editor: visual
+execute:
+ echo: true
+ message: false
+ warning: false
+---
+
+## 1. Setup Environment
+
+```{r}
+librarian::shelf(terra, ncdf4, stringr, ggplot2, maps, readr, DT, knitr, sf,
+ leaflet, dplyr, tidyr)
+
+terraOptions(threads = 16)
+
+# Define HLSP data directory
+hls_dir <- "/projectnb/dietzelab/malmborg/CARB/HLS_data/"
+hls_files <- list.files(hls_dir, pattern = "MSLSP.*\\.nc$", full.names = TRUE)
+
+```
+
+## 2. Extract Variables + Scaling Info
+
+```{r}
+# Extract years and tile IDs
+years_available <- sort(unique(stringr::str_extract(hls_files, "(?<=_)\\d{4}(?=\\.nc$)")))
+
+cat("Available years:", years_available, "\n")
+```
+
+```{r}
+tile_ids <- sort(unique(stringr::str_extract(basename(hls_files), "(?<=MSLSP_)\\w{5}(?=_)")))
+
+cat(length(hls_files), "available tile IDs:", tile_ids)
+# The HLS box we have contains 110 images, only 70 are within California
+# '10SDH 10SDJ 10SEF 10SEG 10SEH 10SEJ 10SFE 10SFF
+# 10SFG 10SFH 10SFJ 10SGD 10SGE 10SGF 10SGG 10SGH
+# 10SGJ 10TCK 10TCL 10TCM 10TDK 10TDL 10TDM 10TEK
+# 10TEL 10TEM 10TFK 10TFL 10TFM 10TGK 10TGL 10TGM
+# 11SKA 11SKB 11SKC 11SKD 11SKT 11SKU 11SKV 11SLA
+# 11SLB 11SLC 11SLT 11SLU 11SLV 11SMA 11SMB 11SMR
+# 11SMS 11SMT 11SMU 11SMV 11SNA 11SNS 11SNT 11SNU
+# 11SNV 11SPS 11SPT 11SPU 11SPV 11SQS 11SQT 11SQU
+# 11SQV 11TKE 11TKF 11TKG 12STC 12STD'
+```
+
+```{r}
+# ---- 1. Load Centroids from GeoJSON ----
+sf_points <- st_read("/projectnb/dietzelab/skanee/ccmmf-phenology/MSLSP_tileCentroids.geojson")
+
+# ---- 2. Load Grid from GeoJSON ----
+grid <- st_read("/projectnb/dietzelab/skanee/ccmmf-phenology/MSLSP_tileGrid.geojson")
+
+# ---- 3. Leaflet Map ----
+leaflet() %>%
+ addProviderTiles("Esri.WorldImagery") %>%
+
+ # Add tile grid outlines
+ addPolylines(data = grid, color = "red", weight = 1, opacity = 0.8) %>%
+
+ # Add centroid circles
+ addCircleMarkers(
+ data = sf_points,
+ radius = 3,
+ color = "white",
+ stroke = FALSE,
+ fillOpacity = 0.7
+ ) %>%
+
+ # Add centroid labels
+ addLabelOnlyMarkers(
+ data = sf_points,
+ label = ~tileID,
+ labelOptions = labelOptions(
+ noHide = TRUE,
+ direction = "top",
+ textOnly = TRUE,
+ textsize = "14px",
+ style = list(
+ "font-weight" = "bold",
+ "color" = "white",
+ "text-shadow" = "0px 0px 5px black",
+ "padding" = "2px"
+ )
+ )
+ )
+```
+
+```{r}
+# Open one sample file
+sample_file <- hls_files[1]
+nc <- ncdf4::nc_open(sample_file)
+
+# Extract variable names and scale info
+var_names <- names(nc$var)
+cat("Available variables:\n")
+
+# Extract long names
+for (v in var_names) {
+ longname <- ncatt_get(nc, v, "long_name")$value
+ cat(v, "→", longname, "\n")
+}
+ncdf4::nc_close(nc)
+```
+
+## 3. Load Tile and Cropland Shapefiles
+
+```{r}
+# Set target tile and year
+target_vars <- c("NumCycles", "OGI", "50PCGI", "Peak", "50PCGD", "OGMn",
+ "EVImax", "EVIamp", "EVIarea",
+ "OGI_2", "50PCGI_2","Peak_2", "50PCGD_2","OGMn_2",
+ "EVImax_2", "EVIamp_2", "EVIarea_2")
+
+target_tile <- "10SGD"
+target_year <- "2018"
+
+# Match raster file
+selected_file <- hls_files[
+ stringr::str_detect(hls_files, target_tile) &
+ stringr::str_detect(hls_files, target_year)
+][1]
+
+# Load + reproject raster
+r <- terra::rast(selected_file, subds = target_vars)
+r <- terra::project(r, "EPSG:4326")
+
+cat("Loaded:", basename(selected_file), "\n")
+cat("Loaded layers:", names(r), "\n")
+```
+
+```{r}
+# --- Set paths and year ---
+landiq_dir <- "/projectnb/dietzelab/ccmmf/LandIQ_data"
+pft_table <- read_csv(file.path(landiq_dir, "CARB_PFTs_table.csv"))
+
+# --- Load the LandIQ shapefile with geometry ---
+shp_path <- paste0(
+ landiq_dir, "/LandIQ_shapefiles/i15_Crop_Mapping_",
+ target_year, "_SHP/i15_Crop_Mapping_",
+ target_year, ".shp"
+)
+plots_sf <- sf::st_read(shp_path, quiet = TRUE)
+
+# --- Load crops_all, filter for target year and add PFT column ---
+load(file.path(landiq_dir, "crops_all_years.RData")) # provides crops_all
+crops_2018 <- crops_all %>% filter(year == target_year) %>% left_join(pft_table
+ %>% select(CLASS = crop_type, SUBCLASS = crop_code,
+ crop_desc, pft_group), by = c("CLASS", "SUBCLASS"))
+
+# --- Ensure ID types match ---
+crops_2018$UniqueID <- as.character(crops_2018$UniqueID)
+plots_sf$UniqueID <- as.character(plots_sf$UniqueID)
+
+# --- Join geometry to long-format attribute table ---
+crops_sf <- crops_2018 %>%
+ left_join(plots_sf %>% select(UniqueID, geometry = geometry), by = "UniqueID") %>%
+ sf::st_as_sf() # ensures the result is an sf object
+
+# Result: long-format sf object with geometry per season
+print(crops_sf)
+```
+
+```{r}
+# Convert sf to terra
+crops_vect <- terra::vect(crops_sf)
+
+# Reproject to match raster CRS if needed
+crops_vect <- terra::project(crops_vect, terra::crs(r))
+
+# Crop to tile extent
+plots_tile <- terra::crop(crops_vect, r)
+
+# Print result
+cat("\nNumber of cropland polygons in", target_tile, ":", nrow(plots_tile), "\n")
+cat("\nAvailable LandIQ Metadata:\n")
+head(plots_tile)
+```
+
+## **Identification & Location**
+
+| Column | Description |
+|----|----|
+| `UniqueID` | Unique polygon identifier for the cropland parcel |
+| `year` | **Water year** associated with this record (e.g., 2018 means Oct 2017–Sep 2018) |
+| `centx` | X-coordinate (longitude) of the polygon centroid |
+| `centy` | Y-coordinate (latitude) of the polygon centroid |
+
+------------------------------------------------------------------------
+
+### **Administrative / Regional**
+
+| Column | Description |
+|----|----|
+| `REGION` | Broad geographic region (e.g., for water agency or policy zone) |
+| `COUNTY` | California county name where the polygon is located |
+| `HYDRORGN` | Hydrologic region (used for water planning) |
+
+------------------------------------------------------------------------
+
+### **Cropping Information**
+
+| Column | Description |
+|----|----|
+| `season` | Season number (1–4) for this crop instance (based on peak NDVI) |
+| `CLASS` | Primary crop classification code (e.g., P = processing tomato) |
+| `SUBCLASS` | Subtype or secondary classification, often numeric |
+| `SPECOND` | Special condition flag: `"Y"` = yes, `"F"` = fallow, `NA` = none |
+| `crop_desc` | Human-readable crop name or description (derived from CLASS) |
+| `pft_group` | Plant Functional Type group (e.g., for ecological modeling) |
+
+------------------------------------------------------------------------
+
+### **Temporal Information**
+
+| Column | Description |
+|----|----|
+| `ADOY` | Adjusted DOY — day of planting relative to water year (Oct–Sep), can be negative |
+| `DOY` | **Calendar year DOY** — converted from `ADOY`, represents true planting date (1–365) |
+| `YRPLANTED` | Calendar year planting occurred (used when multiple years are covered) |
+
+------------------------------------------------------------------------
+
+### **Other Attributes**
+
+| Column | Description |
+|----|----|
+| `MULTIUSE` | Flag for mixed-use land (e.g., pasture + crop) or multi-crop parcels |
+| `PCNT` | Percent of the field occupied by this crop (in multicrop settings) |
+
+## 4. Static Plot
+
+```{r}
+plot(r[["OGI"]], main = paste("Planting DOY -", target_tile, target_year))
+plot(plots_tile, add = TRUE, border = "red", lwd = 0.5)
+```
+
+```{r}
+plot(r[["OGMn"]], main = paste("Harvesting DOY -", target_tile, target_year))
+plot(plots_tile, add = TRUE, border = "red", lwd = 0.5)
+```
+
+## 5. Interactive Map (Leaflet)
+
+```{r}
+# Downsample and prepare color palettes for each variable
+r_down_list <- lapply(target_vars, function(var) {
+ r_down <- terra::aggregate(r[[var]], fact = 4)
+ terra::minmax(r_down)
+ list(
+ raster = r_down,
+ palette = colorNumeric("viridis", domain = values(r_down), na.color = "transparent")
+ )
+})
+names(r_down_list) <- target_vars
+
+# Start leaflet map with satellite basemap
+m <- leaflet() |>
+ addProviderTiles("Esri.WorldImagery", group = "Satellite") |>
+ addProviderTiles("CartoDB.Positron", group = "Basemap") # Optional: switchable basemap
+
+# Add each raster layer
+for (var in target_vars) {
+ m <- m |>
+ addRasterImage(
+ r_down_list[[var]]$raster,
+ colors = r_down_list[[var]]$palette,
+ opacity = 0.7,
+ group = var
+ )
+}
+
+# Add cropland polygons
+m <- m |>
+ addPolygons(data = plots_tile, color = "red", weight = 0.7, group = "Cropland") |>
+ addLayersControl(
+ baseGroups = c("Satellite", "Basemap"),
+ overlayGroups = c(target_vars, "Cropland"),
+ options = layersControlOptions(collapsed = FALSE)
+ ) |>
+ addMiniMap(toggleDisplay = TRUE)
+
+m
+
+```
+
+## 8. Extract Events for One Polygon
+
+```{r}
+# Convert to data.frame to inspect values
+plots_df <- as.data.frame(plots_tile)
+
+# Remove any stale HLSP columns if present
+hlsp_raw_vars <- c("OGI", "50PCGI", "Peak", "50PCGD", "OGMn",
+ "EVImax", "EVIamp", "EVIarea",
+ "OGI_2", "50PCGI_2", "Peak_2", "50PCGD_2", "OGMn_2",
+ "EVImax_2", "EVIamp_2", "EVIarea_2")
+plots_df <- plots_df[, !(names(plots_df) %in% hlsp_raw_vars)]
+
+# Extract mean values for all HLSP layers
+hlsp_means <- terra::extract(r, plots_tile, fun = mean, na.rm = TRUE)
+hlsp_means$ID <- NULL
+names(hlsp_means) <- paste0(names(hlsp_means), "_mean")
+
+# Extract standard deviation values for all HLSP layers
+hlsp_sds <- terra::extract(r, plots_tile, fun = sd, na.rm = TRUE)
+hlsp_sds$ID <- NULL
+names(hlsp_sds) <- paste0(names(hlsp_sds), "_sd")
+
+# Merge both mean and sd columns into plots_df
+plots_df <- cbind(plots_df, hlsp_means, hlsp_sds)
+
+# Rescale vegetation index values
+scale_vars <- list(
+ EVImax = 0.0001,
+ EVIamp = 0.0001,
+ EVIarea = 0.01,
+ EVImax_2 = 0.0001,
+ EVIamp_2 = 0.0001,
+ EVIarea_2 = 0.01
+)
+
+# Apply scaling to both mean and sd columns
+for (var in names(scale_vars)) {
+ sf <- scale_vars[[var]]
+ mean_col <- paste0(var, "_mean")
+ sd_col <- paste0(var, "_sd")
+
+ if (mean_col %in% names(plots_df)) plots_df[[mean_col]] <- plots_df[[mean_col]] * sf
+ if (sd_col %in% names(plots_df)) plots_df[[sd_col]] <- plots_df[[sd_col]] * sf
+}
+
+# (Optional) Drop any columns where all values are NA
+na_cols <- sapply(plots_df, function(col) all(is.na(col)))
+plots_df <- plots_df[, !(names(plots_df) %in% names(na_cols[na_cols]))]
+```
+
+```{r}
+head(plots_df)
+```
+
+```{r}
+doy_to_iso <- function(doy, year) {
+ # For DOY < 1 → previous year
+ # For DOY > 365 → next year
+ base_date <- as.Date(paste0(year, "-01-01"))
+ as.Date(doy - 1, origin = base_date)
+}
+
+date_vars <- c("OGI_mean", "50PCGI_mean", "Peak_mean", "50PCGD_mean", "OGMn_mean",
+ "OGI_2_mean", "50PCGI_2_mean", "Peak_2_mean", "50PCGD_2_mean", "OGMn_2_mean")
+
+# Loop through each variable and add an ISO version
+for (var in date_vars) {
+ new_col <- paste0(var, "_date")
+ plots_df[[new_col]] <- doy_to_iso(plots_df[[var]], plots_df$year)
+}
+
+# 1. Create ADOY_ISO as Date (set NA for ADOY == 0 or NA)
+plots_df$ADOY_ISO <- doy_to_iso(
+ ifelse(plots_df$ADOY == 0 | is.na(plots_df$ADOY), NA, plots_df$ADOY),
+ plots_df$year
+)
+
+# 2. Insert ADOY_ISO right after ADOY (without duplication)
+adoy_index <- which(names(plots_df) == "ADOY")
+col_names <- names(plots_df)
+
+# Reorder: insert ADOY_ISO after ADOY
+new_order <- append(
+ col_names[1:adoy_index],
+ c("ADOY_ISO", col_names[(adoy_index + 1):(ncol(plots_df) - 1)])
+)
+
+# Apply reordered column list
+plots_df <- plots_df[, new_order]
+```
+
+```{r}
+head(plots_df,400)
+```
diff --git a/modules/data.remote/inst/landiq-file-explorer.qmd b/modules/data.remote/inst/landiq-file-explorer.qmd
new file mode 100644
index 00000000000..d3d57ba2d29
--- /dev/null
+++ b/modules/data.remote/inst/landiq-file-explorer.qmd
@@ -0,0 +1,246 @@
+---
+title: "LandIQ File Explorer"
+format: html
+editor: visual
+execute:
+ echo: true
+ message: false
+ warning: false
+---
+
+## Context and Purpose
+
+This script supports a broader workflow to estimate crop phenology using HLS data. While the \`hlsp-scene-explorer.qmd\` script focuses on loading HLS tiles and extracting phenology metrics (e.g. EVImax, OGI, OGMn), this script analyzes the LandIQ cropland dataset to:
+
+\- Understand crop types, cropping intensity (single, double, etc.), and seasonal timing.
+
+\- Clarify the use of adjusted day-of-year (ADOY) and seasonal labels.
+
+\- Build a 2018-only \*\*mastersheet\*\* for integration with phenology metrics.
+
+## 1. Setup
+
+```{r}
+librarian::shelf(dplyr, tidyr, readr, stringr, ggplot2, terra, sf, DT, knitr)
+landiq_dir <- "/projectnb/dietzelab/ccmmf/LandIQ_data"
+```
+
+## 2. Load Data
+
+### 2.1 Load Long-Format Crop Metadata (2018 only)
+
+```{r}
+load(file.path(landiq_dir, "crops_all_years.RData")) # provides `crops_all`
+crops_2018 <- crops_all %>% filter(year == 2018)
+```
+
+### 2.2 Load Crop Type Descriptions (PFTs)
+
+```{r}
+pft_table <- read_csv(file.path(landiq_dir, "CARB_PFTs_table.csv"))
+
+# Join with 2018 crops for description
+crops_2018_joined <- crops_2018 %>%
+ left_join(pft_table %>% select(CLASS = crop_type, SUBCLASS = crop_code, crop_desc, pft_group),
+ by = c("CLASS", "SUBCLASS"))
+```
+
+## 3. Explore Cropping Intensity and Seasonal Use
+
+```{r}
+# Define labels again for safety
+multiuse_labels <- c(
+ S = "Single Crop",
+ D = "Double Crop",
+ T = "Triple Crop",
+ Q = "Quadruple Crop",
+ I = "Intercropped",
+ M = "Mixed Use"
+)
+
+plot_data <- crops_2018_joined %>%
+ filter(MULTIUSE %in% names(multiuse_labels)) %>%
+ filter(!is.na(CLASS), PCNT > 0) %>% # 👈 Keep only actual planted crops
+ select(season, MULTIUSE) %>%
+ ungroup() %>%
+ count(season, MULTIUSE) %>%
+ mutate(
+ MULTIUSE = factor(MULTIUSE, levels = names(multiuse_labels), labels = multiuse_labels),
+ season = factor(season, levels = 1:4)
+ )
+```
+
+### 3.1 Stacked Crop Record by Season
+
+```{r}
+ggplot(plot_data, aes(x = season, y = n, fill = MULTIUSE)) +
+ geom_col(position = "stack", color = "black") +
+ scale_fill_brewer(palette = "Set2") +
+ theme_minimal() +
+ labs(
+ title = "Stacked Crop Records by Season (2018)",
+ x = "Season",
+ y = "Number of Crop Records",
+ fill = "Cropping Type"
+ )
+```
+
+### 3.2 Crop Records by Season Faceted by Cropping Type
+
+```{r}
+ggplot(plot_data, aes(x = season, y = n, fill = MULTIUSE)) +
+ geom_col(color = "black") +
+ scale_fill_brewer(palette = "Set2") +
+ facet_wrap(~ MULTIUSE, scales = "free_y") +
+ theme_minimal() +
+ labs(
+ title = "Crop Records by Season Faceted by Cropping Type (2018)",
+ x = "Season",
+ y = "Number of Crop Records"
+ )
+```
+
+### 3.3 Summary Table
+
+```{r}
+# Pivot summary table (wide format)
+summary_table <- plot_data %>%
+ arrange(season, desc(n)) %>%
+ pivot_wider(
+ names_from = season,
+ values_from = n,
+ values_fill = 0
+ ) %>%
+ rename(`Cropping Type` = MULTIUSE)
+
+# Add row totals
+summary_table_totals <- summary_table %>%
+ mutate(Total = `1` + `2` + `3` + `4`)
+
+# Add grand total row
+grand_total <- summary_table_totals %>%
+ summarise(across(where(is.numeric), sum)) %>%
+ mutate(`Cropping Type` = "Total") %>%
+ select(`Cropping Type`, everything())
+
+# Combine
+summary_table_final <- bind_rows(summary_table_totals, grand_total)
+
+# Display DT table without dropdown
+datatable(
+ summary_table_final,
+ caption = "Crop Record Counts by Season and Cropping Type (2018)",
+ options = list(
+ dom = 't', # Only show the table (no search box etc)
+ ordering = FALSE,
+ autoWidth = TRUE
+ )
+) %>%
+ formatCurrency(columns = colnames(summary_table_final)[-1], currency = "", digits = 0)
+```
+
+## 4. ADOY for Crop Timing
+
+#### Understanding ADOY and Seasonal Mapping
+
+Below is a reference for how LandIQ's crop seasons map to ADOY (Adjusted Day of Year) within the [water year calendar]{.underline}:
+
+| **Season** | **Typical Timing** | **ADOY Range** | **Notes** |
+|----|----|----|----|
+| 1 | October–December (previous CY) | `-92 to -1` | Fall crops starting water year (e.g., Oct–Dec 2017 for 2018) |
+| 2 | January–May | `1 to ~150` | Main winter/spring growing season |
+| 3 | May–August | `~151 to ~240` | Typical summer crops |
+| 4 | August–September | `~241 to ~275+` | Late or intercropped season |
+
+```{r}
+crops_2018_joined %>%
+ filter(!is.na(ADOY), PCNT > 0) %>%
+ ggplot(aes(x = ADOY)) +
+ geom_histogram(binwidth = 10, fill = "#66c2a5", color = "black") +
+ theme_minimal() +
+ labs(
+ title = "Distribution of Adjusted DOY (Peak NDVI) for Crop Records (2018)",
+ x = "Adjusted Day of Year (ADOY)",
+ y = "Count of Crop Records"
+ )
+```
+
+## 5. Crop Types
+
+### 5.1 View Unique Values by Column Name
+
+```{r}
+get_unique_values <- function(data, multiuse_codes, column_name) {
+ data %>%
+ ungroup() %>%
+ filter(MULTIUSE %in% multiuse_codes, !is.na(.data[[column_name]]), PCNT > 0) %>%
+ distinct(.data[[column_name]]) %>%
+ arrange(.data[[column_name]]) %>%
+ pull()
+}
+```
+
+```{r}
+# All crop types (crop descriptions)
+get_unique_values(crops_2018_joined, unique(crops_2018_joined$MULTIUSE), "crop_desc")
+
+```
+
+Note all Q are present in T
+
+```{r}
+get_unique_values(crops_2018_joined, c("T", "Q"), "crop_desc") # Triple or Quadruple
+```
+
+```{r}
+get_unique_values(crops_2018_joined, c("M", "I"), "crop_desc") # Mixed or Intercropped
+```
+
+```{r}
+# PFTs for triple + quadruple crop fields
+get_unique_values(crops_2018_joined, unique(crops_2018_joined$MULTIUSE), "pft_group")
+```
+
+### 5.2 Table of Crops by Season
+
+Note discrepancies in rows 2, 4, 10 (examples)
+
+```{r}
+# Base cleaned data
+crop_summary <- crops_2018_joined %>%
+ filter(!is.na(crop_desc), PCNT > 0) %>%
+ distinct(crop_desc, MULTIUSE, season, pft_group)
+
+# Wide format with season indicators
+crop_summary_wide <- crop_summary %>%
+ mutate(present = 1) %>%
+ pivot_wider(
+ names_from = season,
+ values_from = present,
+ values_fill = 0,
+ names_prefix = ""
+ )
+
+# Collapse MULTIUSE and finalize
+final_crop_table <- crop_summary_wide %>%
+ group_by(crop_desc, pft_group) %>%
+ summarise(
+ MULTIUSE = paste(sort(unique(MULTIUSE)), collapse = ", "),
+ `1` = max(`1`, na.rm = TRUE),
+ `2` = max(`2`, na.rm = TRUE),
+ `3` = max(`3`, na.rm = TRUE),
+ `4` = max(`4`, na.rm = TRUE),
+ .groups = "drop"
+ ) %>%
+ arrange(crop_desc)
+
+DT::datatable(
+ final_crop_table,
+ caption = "All Crop Types by PFT, MULTIUSE and Seasonal Presence (2018)",
+ options = list(
+ pageLength = 10,
+ autoWidth = TRUE,
+ scrollX = TRUE
+ )
+)
+```
diff --git a/modules/data.remote/inst/ndti_evi_timeseries_example.R b/modules/data.remote/inst/ndti_evi_timeseries_example.R
new file mode 100644
index 00000000000..092296b89b6
--- /dev/null
+++ b/modules/data.remote/inst/ndti_evi_timeseries_example.R
@@ -0,0 +1,481 @@
+# --- STEP 1. Setup ---
+# Core libraries
+library(zoo)
+librarian::shelf(
+readr, dplyr, stringr, tidyr, purrr, ggplot2,
+leaflet, sf, terra, progressr, lubridate
+)
+
+# parallel & progress
+terraOptions(threads = 16)
+handlers("txtprogressbar")
+
+
+
+# --- STEP 2. Load Anchor CSV & Pick a Site ---
+# Read and clean the CSV
+path <- "/projectnb/dietzelab/XinyuanJi/anchor_sites_locations.csv"
+raw <- read_lines(path)
+cleaned <- str_replace_all(raw,
+ ",c\\(([0-9\\.-]+), *([0-9\\.-]+)\\),",
+ ",\"c(\\1,\\2)\","
+)
+anchors <- read_csv(paste(cleaned, collapse = "\n"), show_col_types = FALSE) %>%
+select(id, site_name,
+ upper_left_lon, upper_left_lat,
+ lower_right_lon, lower_right_lat) %>%
+mutate(across(starts_with(c("upper_left","lower_right")), as.numeric),
+ center_lon = (upper_left_lon + lower_right_lon)/2,
+ center_lat = (upper_left_lat + lower_right_lat)/2) %>%
+filter(!is.na(center_lon), !is.na(center_lat))
+
+
+
+# ── CHUNK 1 : read tillage sheet, keep Treatment column ─────────────────────
+path <- "/projectnb/dietzelab/XinyuanJi/till_treatment_polygons.csv"
+# path <- "/projectnb/dietzelab/XinyuanJi/anchor_sites_locations.csv"
+
+raw <- readr::read_lines(path)
+cleaned <- stringr::str_replace_all(
+raw,
+",c\\(([0-9\\.-]+), *([0-9\\.-]+)\\),",
+",\"c(\\1,\\2)\"," # quote the “c(x,y)” blobs (same fix as before)
+)
+
+anchors <- readr::read_csv(paste(cleaned, collapse = "\n"),
+ show_col_types = FALSE) %>%
+
+# filter anchors to just 2018
+dplyr::filter(Year == c(2018, 2019)) %>%
+
+
+
+# ── 1 standardise column names so downstream code is unchanged
+dplyr::rename(
+ id = SampleID, # ← tillage file’s ID
+ site_name = ProjectName.x # ← adjust if your column is ProjectName
+) %>%
+
+# ── 2 include Treatment_Control
+dplyr::select(
+ id, site_name, Treatment_Control,
+ upper_left_lon, upper_left_lat,
+ lower_right_lon, lower_right_lat
+) %>%
+
+dplyr::mutate(
+ dplyr::across(
+ dplyr::starts_with(c("upper_left", "lower_right")),
+ as.numeric
+ ),
+ center_lon = (upper_left_lon + lower_right_lon) / 2,
+ center_lat = (upper_left_lat + lower_right_lat) / 2
+) %>%
+dplyr::filter(!is.na(center_lon), !is.na(center_lat))
+
+
+
+# Make the whole code a function
+process_ndti_for_site <- function(selected_anchor) {
+ site_id <- selected_anchor$id
+ site_name <- selected_anchor$site_name
+
+
+
+ # --- STEP 3. Visualize All Anchor Sites ---
+ # Please refer to Step 3 in:
+ # //projectnb/dietzelab/XinyuanJi/ndti-evi-timeseries(Sarah)-L+S.qmd
+
+
+
+ # --- STEP 4. Build Exact tile-polygon ROI ---
+ # A) Build a closed polygon in WGS84 from the CSV bbox
+ corner_df <- data.frame(
+ lon = c(
+ selected_anchor$upper_left_lon,
+ selected_anchor$upper_left_lon,
+ selected_anchor$lower_right_lon,
+ selected_anchor$lower_right_lon,
+ selected_anchor$upper_left_lon # close ring
+ ),
+ lat = c(
+ selected_anchor$upper_left_lat,
+ selected_anchor$lower_right_lat,
+ selected_anchor$lower_right_lat,
+ selected_anchor$upper_left_lat,
+ selected_anchor$upper_left_lat # close ring
+ )
+ )
+
+ # B) Make an sf and then a terra SpatVector
+ bbox_sf <- st_sfc(st_polygon(list(as.matrix(corner_df))), crs = 4326)
+ roi_vect_wgs <- terra::vect(bbox_sf)
+
+
+
+ # --- STEP 5. Determine Which HLS Tile Covers This Site ---
+ ## Determine which HLS tile covers this site ────────────────────────────
+ ## (robust: uses ROI polygon → largest-overlap → nearest fallback)
+
+ # ── Load the MS-LSP grid (already in WGS-84 / EPSG:4326) ───────────────────
+ grid <- st_read(
+ "/projectnb/dietzelab/skanee/ccmmf-phenology/MSLSP_tileGrid.geojson",
+ quiet = TRUE
+ )
+
+ # ── Build the anchor-site polygon in the grid CRS (4326) -------------------
+ bbox_anchor <- st_transform(bbox_sf, st_crs(grid)) # bbox_sf from Step 5 later
+
+ # ── 1) tiles whose polygons intersect the ROI -----------------------------
+ hits <- st_intersects(bbox_anchor, grid) |> unlist()
+
+ if (length(hits) == 1) {
+
+ tileID <- grid$tileID[hits] # exactly one tile
+
+ } else if (length(hits) > 1) {
+
+ # 2) choose the one with the largest overlapping area
+ overlap_areas <- st_area(st_intersection(grid[hits, ], bbox_anchor))
+ tileID <- grid$tileID[hits[which.max(overlap_areas)]]
+
+ } else {
+
+ # 3) ROI outside every grid polygon – pick the nearest grid feature
+ idx <- st_nearest_feature(st_centroid(bbox_anchor), grid)
+ tileID <- grid$tileID[idx]
+ }
+
+ site_id <- selected_anchor$id
+ site_name <- selected_anchor$site_name
+ crop_type <- site_name # keep your original variable
+
+ cat("Selected site:", site_id, "-", site_name, "\n")
+ cat("Using HLS tile:", tileID, "\n")
+
+
+
+ # --- STEP 6. List & Filter HLS NetCDFs for That Tile ---
+ hls_dir <- "/projectnb/dietzelab/malmborg/CARB/HLS_data"
+ hls_files <- list.files(hls_dir, pattern = "MSLSP.*\\.nc$", full.names = TRUE)
+
+ # Filter by tileID in filename (fast, no raster I/O)
+ hls_tile_files <- hls_files[str_detect(basename(hls_files), fixed(tileID))]
+
+ cat("Found", length(hls_tile_files), "HLS files for tile", tileID, "\n")
+
+
+
+ # --- STEP 7. Precompute Projected ROI Extent ---
+ # Load one file to get its CRS
+ metrics <- c("50PCGI","50PCGD")#,"OGMn", "OGI_2", "Peak_2", "OGMn_2")
+ template_r <- rast(hls_tile_files[1], subds = metrics)
+ roi_proj <- project(roi_vect_wgs, crs(template_r))
+ roi_ext <- ext(roi_proj)
+
+
+
+ # --- STEP 8. Extract phenology metrics ---
+ extract_one <- function(nc) {
+ yr <- as.integer(str_extract(basename(nc), "\\d{4}"))
+ r <- try(rast(nc, subds = metrics), silent = TRUE)
+ if (inherits(r, "try-error")) {
+ message("Skipping load: ", basename(nc))
+ return(tibble(year = yr, OGI = NA, Peak = NA, OGMn = NA))
+ }
+ cr <- try(crop(r, roi_ext), silent = TRUE)
+ if (inherits(cr, "try-error")) {
+ message(" Crop failed for ", basename(nc))
+ return(tibble(year = yr, OGI = NA, Peak = NA, OGMn = NA))
+ }
+ ms <- mask(cr, roi_proj)
+ vals <- global(ms, mean, na.rm = TRUE)
+ out <- as_tibble(t(vals)) %>% setNames(names(ms))
+ out %>% mutate(year = yr, .before = 1)
+ }
+
+ # Sequential extraction with a text progress bar
+ ts_list <- vector("list", length(hls_tile_files))
+ pb <- txtProgressBar(0, length(hls_tile_files), style = 3)
+ for (i in seq_along(hls_tile_files)) {
+ ts_list[[i]] <- extract_one(hls_tile_files[i])
+ setTxtProgressBar(pb, i)
+ }
+ close(pb)
+
+ ts_raw <- bind_rows(ts_list) %>% arrange(year)
+
+
+
+ # --- STEP 9. Pivot long & plot phenology ---
+ ts_long <- ts_raw %>%
+ pivot_longer(
+ cols = all_of(metrics),
+ names_to = "metric",
+ values_to = "doy"
+ ) %>%
+ # ensure doy is integer, then add to the first of each year
+ mutate(
+ doy = as.integer(doy),
+ date = as.Date(paste0(year, "-01-01")) + (doy - 1)
+ )
+
+
+
+ ## --- 10. Extract Mean NDTI over same tile polygon ---
+
+ # .tif directory
+ tif_dir <- c("/projectnb/dietzelab/XinyuanJi/State_of_California_HLSL/2018",
+ "/projectnb/dietzelab/XinyuanJi/State_of_California_HLSL/2019",
+ "/projectnb/dietzelab/XinyuanJi/State_of_California_HLSS/2018",
+ "/projectnb/dietzelab/XinyuanJi/State_of_California_HLSS/2019"
+ )
+ # tif_dir <- "/projectnb/dietzelab/XinyuanJi/anchor_sites/anchor_site_1"
+
+ # .Fmask directory
+ fmask_folder <- c("~/projectnb/XinyuanJi/State_of_California_HLSL/2018_Fmask",
+ "~/projectnb/XinyuanJi/State_of_California_HLSL/2019_Fmask",
+ "~/projectnb/XinyuanJi/State_of_California_HLSS/2018_Fmask",
+ "~/projectnb/XinyuanJi/State_of_California_HLSS/2019_Fmask"
+ )
+
+ # read both Landsat & Sentinel
+ ref_file <- list.files(
+ tif_dir,
+ pattern = "B(06|07|11|12).*\\.tif$",
+ full.names = TRUE
+ )[1]
+
+ # setup the ROI
+ r0 <- rast(list.files(tif_dir, pattern="B11.*\\.tif$", full.names=TRUE)[1])
+ roi_ndti <- project(roi_vect_wgs, crs(r0)) |> ext()
+
+ # --------------------------------------------------------------------------
+ # 1) KEEP ONLY THIS SITE’S TILE (e.g. T10SGF), *not* every B06 in 2018
+ # --------------------------------------------------------------------------
+ # fine relevant files
+ b11_all <- list.files(
+ tif_dir,
+ pattern = paste0("T", tileID, ".*B11.*\\.tif$"),
+ full.names = TRUE
+ )
+ # ...file cleaning, etc (copy your existing cleaning/overlap code here) ...
+
+ # extracting dates from file name
+ b11_dates <- as.Date(stringr::str_extract(b11_all, "\\d{7}"), "%Y%j")
+
+ # Date filtering & NA-removal
+ keep_idx <- !is.na(b11_dates) &
+ b11_dates >= as.Date("2018-01-01") &
+ b11_dates <= as.Date("2019-12-31")
+
+ b11_sub <- b11_all[keep_idx]
+ b11_dates <- b11_dates[keep_idx]
+ # ...date filtering etc as before...
+
+ # Pre-allocating Results Vectors
+ n_sen <- length(b11_sub)
+ dates_sen <- as.Date(rep(NA, n_sen))
+ ndvals_sen <- numeric(n_sen)
+
+ # for-loop processing Sentinel-2 data
+ pb <- txtProgressBar(0, n_sen, style = 3)
+ for (i in seq_along(b11_sub)) {
+ b11 <- b11_sub[i]
+ b12 <- sub("B11", "B12", b11)
+ if (!file.exists(b12)) { setTxtProgressBar(pb, i); next }
+ scene <- basename(b11)
+ scene_year <- stringr::str_extract(scene, "\\d{4}")
+ fmask_dir <- file.path("/projectnb/dietzelab/XinyuanJi/State_of_California_HLSS", paste0(scene_year, "_Fmask"))
+ fmask_file <- file.path(fmask_dir, sub("B11.tif$", "Fmask.tif", scene))
+
+ # Skip if missing
+ if (!file.exists(b12) || !file.exists(fmask_file)) {
+ setTxtProgressBar(pb, i)
+ next
+ }
+
+ r11 <- crop(rast(b11), roi_ndti)
+ r12 <- crop(rast(b12), roi_ndti)
+ fmask <- crop(rast(fmask_file), roi_ndti)
+
+ # calculate NDTI
+ ndti <- (r11 - r12) / (r11 + r12)
+
+ # cloud/shadow masking
+ # 1 = cloud; 3 = cloud shadow; 4 = snow
+ badmask <- (bitwAnd(values(fmask), bitwShiftL(1, 1)) != 0) |
+ (bitwAnd(values(fmask), bitwShiftL(1, 3)) != 0) |
+ (bitwAnd(values(fmask), bitwShiftL(1, 4)) != 0)
+ ndti[badmask] <- NA
+
+ # calculate the mean of all pixels in the raster
+ nd_mean <- global(ndti, fun = mean, na.rm = TRUE)[1, 1]
+
+ # storing the result
+ if (is.na(nd_mean)) { setTxtProgressBar(pb, i); next }
+ ndvals_sen[i] <- nd_mean
+ dates_sen[i] <- b11_dates[i]
+ setTxtProgressBar(pb, i)
+ }
+ close(pb)
+
+ ndti_sent <- tibble(
+ date = dates_sen,
+ mean_ndti = ndvals_sen
+ ) %>%
+ filter(!is.na(date) & !is.na(mean_ndti)) %>% # remove invalid rows
+ arrange(date) # sort the final result by date
+
+ # --------------------------------------------------------------------------
+ # LANDSAT: B06/B07; a copy of the code above but for Band 6 & 7
+ # --------------------------------------------------------------------------
+ b06_all <- list.files(
+ tif_dir,
+ pattern = paste0("T", tileID, ".*B06.*\\.tif$"),
+ full.names = TRUE
+ )
+ # ...copy file cleaning, overlap check code from above, but with B06 instead of B11 ...
+
+ b06_dates <- as.Date(stringr::str_extract(b06_all, "\\d{7}"), "%Y%j")
+ keep_idx <- !is.na(b06_dates) &
+ b06_dates >= as.Date("2018-01-01") &
+ b06_dates <= as.Date("2019-12-31")
+ b06_sub <- b06_all[keep_idx]
+ b06_dates <- b06_dates[keep_idx]
+ # ...date filtering...
+
+ n_lan <- length(b06_sub)
+ dates_lan <- as.Date(rep(NA, n_lan))
+ ndvals_lan <- numeric(n_lan)
+
+ pb <- txtProgressBar(0, n_lan, style = 3)
+ for (i in seq_along(b06_sub)) {
+ b06 <- b06_sub[i]
+ b07 <- sub("B06", "B07", b06)
+ if (!file.exists(b07)) { setTxtProgressBar(pb, i); next }
+
+ scene <- basename(b06)
+ scene_year <- stringr::str_extract(scene, "\\d{4}")
+ fmask_dir <- file.path("/projectnb/dietzelab/XinyuanJi/State_of_California_HLSL", paste0(scene_year, "_Fmask"))
+ fmask_file <- file.path(fmask_dir, sub("B06.tif$", "Fmask.tif", scene))
+
+ if (!file.exists(fmask_file)) {
+ setTxtProgressBar(pb, i)
+ next
+ }
+
+ r6 <- crop(rast(b06), roi_ndti)
+ r7 <- crop(rast(b07), roi_ndti)
+ fmask <- crop(rast(fmask_file), roi_ndti)
+
+ # calculate NDTI
+ ndti <- (r6 - r7) / (r6 + r7)
+
+ badmask <- (bitwAnd(values(fmask), bitwShiftL(1, 1)) != 0) |
+ (bitwAnd(values(fmask), bitwShiftL(1, 3)) != 0) |
+ (bitwAnd(values(fmask), bitwShiftL(1, 4)) != 0)
+ ndti[badmask] <- NA
+
+ nd_mean <- global(ndti, fun = mean, na.rm = TRUE)[1, 1]
+
+
+ if (is.na(nd_mean)) { setTxtProgressBar(pb, i); next }
+ ndvals_lan[i] <- nd_mean
+ dates_lan[i] <- b06_dates[i]
+ setTxtProgressBar(pb, i)
+ }
+ close(pb)
+
+ ndti_land <- tibble(
+ date = dates_lan,
+ mean_ndti = ndvals_lan
+ ) %>%
+ filter(!is.na(date) & !is.na(mean_ndti)) %>%
+ arrange(date)
+
+ # --------------------------------------------------------------------------
+ # COMBINE AND (OPTIONALLY) INDICATE SENSOR
+ # --------------------------------------------------------------------------
+ ndti_df <- bind_rows(ndti_land, ndti_sent)
+
+
+
+ # fill the gap
+ ndti_df$mean_ndti_filled <- na.approx(ndti_df$mean_ndti, x = ndti_df$date, na.rm = FALSE)
+ # smoothing with a Moving Average
+ w <- 4
+ k <- rep(1/w, w)
+ ndti_df$smoothed <- as.numeric(stats::filter(ndti_df$mean_ndti_filled, k, sides = 2))
+
+ # find annual minimum
+ ndti_smooth <- ndti_df[!is.na(ndti_df$smoothed), ]
+ min_row <- ndti_smooth[which.min(ndti_smooth$smoothed), ]
+ min_val <- min_row$smoothed
+ min_date <- min_row$date
+
+ # find the maximum before the minimum occured
+ before_min <- ndti_smooth[ndti_smooth$date < min_date, ]
+
+ if (nrow(before_min) > 0 && !all(is.na(before_min$smoothed))) {
+ max_before_min_row <- before_min[which.max(before_min$smoothed), ]
+ max_before_min <- max_before_min_row$smoothed
+ max_before_min_date<- max_before_min_row$date
+ } else {
+ max_before_min <- NA
+ max_before_min_date<- NA
+ }
+
+ # calculate the Percentage Change (aka "drop")
+ ratio <- if (!is.na(max_before_min) && max_before_min != 0) {
+ (max_before_min - min_val) / max_before_min
+ } else NA
+
+ # Keep any extra info, e.g. treatment
+ tibble(
+ id = selected_anchor$id,
+ site_name = selected_anchor$site_name,
+ Treatment_Control = selected_anchor$Treatment_Control,
+ min_ndti_smooth = min_val,
+ min_ndti_date = min_date,
+ max_before_min = max_before_min,
+ max_before_min_date = max_before_min_date,
+ ndti_smooth_drop = ratio
+ )
+}
+
+
+
+# summary
+summary_table <- anchors %>%
+ split(.$id) %>%
+ map_dfr(process_ndti_for_site)
+
+# save the result
+# write.csv(summary_table, "/projectnb/dietzelab/XinyuanJi/ndti_all_sites_summary.csv", row.names = FALSE)
+
+
+
+# ndti_df$mean_ndti_filled <- na.approx(ndti_df$mean_ndti, x = ndti_df$date, na.rm = FALSE)
+# w <- 4
+# k <- rep(1/w, w)
+# ndti_df$smoothed <- as.numeric(stats::filter(ndti_df$mean_ndti_filled, k, sides = 2))
+#
+# ndti_smooth <- ndti_df[!is.na(ndti_df$smoothed), ]
+# min_row <- ndti_smooth[which.min(ndti_smooth$smoothed), ]
+# min_val <- min_row$smoothed
+# min_date <- min_row$date
+# before_min <- ndti_smooth[ndti_smooth$date < min_date, ]
+#
+# if (nrow(before_min) > 0 && !all(is.na(before_min$smoothed))) {
+# max_before_min_row <- before_min[which.max(before_min$smoothed), ]
+# max_before_min <- max_before_min_row$smoothed
+# max_before_min_date<- max_before_min_row$date
+# } else {
+# max_before_min <- NA
+# max_before_min_date<- NA
+# }
+# ratio <- if (!is.na(max_before_min) && max_before_min != 0) {
+# (max_before_min - min_val) / max_before_min
+# } else NA
+
diff --git a/modules/data.remote/man/DAAC_Set_Credential.Rd b/modules/data.remote/man/DAAC_Set_Credential.Rd
deleted file mode 100644
index fd4f566c2cf..00000000000
--- a/modules/data.remote/man/DAAC_Set_Credential.Rd
+++ /dev/null
@@ -1,19 +0,0 @@
-% Generated by roxygen2: do not edit by hand
-% Please edit documentation in R/NASA_DAAC_download.R
-\name{DAAC_Set_Credential}
-\alias{DAAC_Set_Credential}
-\title{Set NASA DAAC credentials to the current environment.}
-\usage{
-DAAC_Set_Credential(replace = FALSE, folder.path = NULL)
-}
-\arguments{
-\item{replace}{Boolean: determine if we want to replace the current credentials from the environment. The default is FALSE.}
-
-\item{folder.path}{Character: physical path to the folder that contains the credential file. The default is NULL.}
-}
-\description{
-Set NASA DAAC credentials to the current environment.
-}
-\author{
-Dongchen Zhang
-}
diff --git a/modules/data.remote/man/GEDI_AGB_prep.Rd b/modules/data.remote/man/GEDI_AGB_prep.Rd
index dac18b276fa..195c8353ef5 100644
--- a/modules/data.remote/man/GEDI_AGB_prep.Rd
+++ b/modules/data.remote/man/GEDI_AGB_prep.Rd
@@ -15,7 +15,7 @@ GEDI_AGB_prep(
prerun = NULL,
num.folder = NULL,
cores = parallel::detectCores(),
- credential.folder = "~"
+ credential_path = "~/.netrc"
)
}
\arguments{
@@ -39,7 +39,7 @@ GEDI_AGB_prep(
\item{cores}{Numeric: numbers of core to be used for the parallel computation. The default is the maximum current CPU number.}
-\item{credential.folder}{Character: the physical path to the folder that contains the credential file (.nasadaacapirc).}
+\item{credential_path}{Character: the physical path to the credential file. (.netrc).}
}
\value{
A data frame containing AGB and sd for each site and each time step.
diff --git a/modules/data.remote/man/MODIS_LAI_prep.Rd b/modules/data.remote/man/MODIS_LAI_prep.Rd
index 25140470b08..1f9f8e4869a 100644
--- a/modules/data.remote/man/MODIS_LAI_prep.Rd
+++ b/modules/data.remote/man/MODIS_LAI_prep.Rd
@@ -11,7 +11,7 @@ MODIS_LAI_prep(
search_window = 30,
export_csv = FALSE,
sd_threshold = 20,
- skip.download = TRUE,
+ skip_download = FALSE,
boundary = NULL
)
}
@@ -28,7 +28,7 @@ MODIS_LAI_prep(
\item{sd_threshold}{numeric or character: for filtering out any estimations with unrealistic high standard error, default is 20. The QC check will be skipped if it's set as NULL.}
-\item{skip.download}{boolean: determine if we want to use existing LAI.csv file and skip the MODIS LAI download part.}
+\item{skip_download}{boolean: determine if we want to use existing LAI.csv file and skip the MODIS LAI download part.}
\item{boundary}{numeric vector or list: the upper and lower quantiles for filtering out noisy LAI values (e.g., c(0.05, 0.95) or list(0.05, 0.95)). The default is NULL.}
}
diff --git a/modules/data.remote/man/NASA_DAAC_download.Rd b/modules/data.remote/man/NASA_DAAC_download.Rd
index a097ecbcc7f..6ec292a3e9d 100644
--- a/modules/data.remote/man/NASA_DAAC_download.Rd
+++ b/modules/data.remote/man/NASA_DAAC_download.Rd
@@ -14,7 +14,8 @@ NASA_DAAC_download(
to,
outdir = getwd(),
band = NULL,
- credential.folder = NULL,
+ data_version = NULL,
+ credential_path = NULL,
doi,
just_path = FALSE
)
@@ -39,10 +40,11 @@ NASA_DAAC_download(
\item{outdir}{Character: path of the directory in which to save the
downloaded files. Default is the current work directory(getwd()).}
-\item{band}{Character: the band name of data to be requested.}
+\item{band}{Character: the band name (or vector of band names) of data to be requested. Default is NULL.}
-\item{credential.folder}{Character: physical path to the folder that contains
-the credential file. The default is NULL.}
+\item{data_version}{Character: the version (typically starts with V) of data to be requested. Default is NULL.}
+
+\item{credential_path}{Character: physical path to the credential file (.netrc file). The default NULL.}
\item{doi}{Character: data DOI on the NASA DAAC server, it can be obtained
directly from the NASA ORNL DAAC data portal (e.g., GEDI L4A through
@@ -58,6 +60,7 @@ Parallel download data from the NASA ORNL DAAC server given period, spatial boun
}
\examples{
\dontrun{
+# SHIFT Hyper-spectral data.
ul_lat <- 35
ul_lon <- -121
lr_lat <- 33
@@ -65,7 +68,6 @@ lr_lon <- -117
from <- "2022-02-23"
to <- "2022-05-30"
doi <- "10.3334/ORNLDAAC/2183"
-outdir <- "/projectnb/dietzelab/dongchen/SHIFT/test_download"
paths <- NASA_DAAC_download(ul_lat = ul_lat,
ul_lon = ul_lon,
lr_lat = lr_lat,
@@ -74,6 +76,103 @@ paths <- NASA_DAAC_download(ul_lat = ul_lat,
to = to,
doi = doi,
just_path = T)
+# GEDI level 4A data.
+ul_lat <- 85
+ul_lon <- -179
+lr_lat <- 7
+lr_lon <- -20
+from <- "2020-01-01"
+to <- "2020-12-31"
+doi <- "10.3334/ORNLDAAC/2056"
+paths <- NASA_DAAC_download(ul_lat = ul_lat,
+ ul_lon = ul_lon,
+ lr_lat = lr_lat,
+ lr_lon = lr_lon,
+ from = from,
+ to = to,
+ data_version = "V2_1",
+ doi = doi,
+ just_path = T)
+# MODIS LAI data.
+ul_lat <- 85
+ul_lon <- -179
+lr_lat <- 7
+lr_lon <- -20
+from <- "2020-01-01"
+to <- "2020-01-31"
+doi <- "10.5067/MODIS/MCD15A3H.061"
+paths <- NASA_DAAC_download(ul_lat = ul_lat,
+ ul_lon = ul_lon,
+ lr_lat = lr_lat,
+ lr_lon = lr_lon,
+ from = from,
+ to = to,
+ doi = doi,
+ just_path = T)
+# SMAP Soil Moisture data.
+ul_lat <- 85
+ul_lon <- -179
+lr_lat <- 7
+lr_lon <- -20
+from <- "2020-01-01"
+to <- "2020-01-31"
+doi <- "10.5067/02LGW4DGJYRX"
+paths <- NASA_DAAC_download(ul_lat = ul_lat,
+ ul_lon = ul_lon,
+ lr_lat = lr_lat,
+ lr_lon = lr_lon,
+ from = from,
+ to = to,
+ doi = doi,
+ just_path = T)
+# GLANCE Phenology and LC data.
+ul_lat <- 85
+ul_lon <- -179
+lr_lat <- 7
+lr_lon <- -20
+from <- "2019-01-01"
+to <- "2019-12-31"
+doi <- "10.5067/MEaSUREs/GLanCE/GLanCE30.001"
+paths <- NASA_DAAC_download(ul_lat = ul_lat,
+ ul_lon = ul_lon,
+ lr_lat = lr_lat,
+ lr_lon = lr_lon,
+ from = from,
+ to = to,
+ doi = doi,
+ just_path = T)
+# HLS reflectance data.
+ul_lat <- 35
+ul_lon <- -121
+lr_lat <- 33
+lr_lon <- -117
+from <- "2022-02-23"
+to <- "2022-05-30"
+doi <- "10.5067/HLS/HLSS30.002"
+paths <- NASA_DAAC_download(ul_lat = ul_lat,
+ ul_lon = ul_lon,
+ lr_lat = lr_lat,
+ lr_lon = lr_lon,
+ from = from,
+ to = to,
+ doi = doi,
+ just_path = T)
+ ul_lat <- 35
+# HLS Phenology data.
+ul_lon <- -121
+lr_lat <- 33
+lr_lon <- -117
+from <- "2019-01-01"
+to <- "2019-12-31"
+doi <- "10.5067/Community/MuSLI/MSLSP30NA.011"
+paths <- NASA_DAAC_download(ul_lat = ul_lat,
+ ul_lon = ul_lon,
+ lr_lat = lr_lat,
+ lr_lon = lr_lon,
+ from = from,
+ to = to,
+ doi = doi,
+ just_path = T)
}
}
\author{
diff --git a/modules/data.remote/man/Prep.MODIS.CSV.from.DAAC.Rd b/modules/data.remote/man/Prep.MODIS.CSV.from.DAAC.Rd
index 7aba4579b64..82833a69771 100644
--- a/modules/data.remote/man/Prep.MODIS.CSV.from.DAAC.Rd
+++ b/modules/data.remote/man/Prep.MODIS.CSV.from.DAAC.Rd
@@ -10,7 +10,8 @@ Prep.MODIS.CSV.from.DAAC(
from,
to,
download.outdir,
- csv.outdir
+ csv.outdir,
+ credential_path
)
}
\arguments{
@@ -25,6 +26,8 @@ Prep.MODIS.CSV.from.DAAC(
\item{download.outdir}{character: Where the MODIS tiles will be stored.}
\item{csv.outdir}{character: Where the final CSV file will be stored.}
+
+\item{credential_path}{Character: physical path to the credential file (.netrc file).}
}
\value{
A data frame containing LAI and sd for each site and each time step.
diff --git a/modules/data.remote/man/Prep.SMAP.CSV.from.DAAC.Rd b/modules/data.remote/man/Prep.SMAP.CSV.from.DAAC.Rd
index 352b2616c6f..238e6ac2d97 100644
--- a/modules/data.remote/man/Prep.SMAP.CSV.from.DAAC.Rd
+++ b/modules/data.remote/man/Prep.SMAP.CSV.from.DAAC.Rd
@@ -11,7 +11,8 @@ Prep.SMAP.CSV.from.DAAC(
from,
to,
download.outdir,
- csv.outdir
+ csv.outdir,
+ credential_path
)
}
\arguments{
@@ -26,6 +27,8 @@ Prep.SMAP.CSV.from.DAAC(
\item{download.outdir}{character: Where the MODIS tiles will be stored.}
\item{csv.outdir}{character: Where the final CSV file will be stored.}
+
+\item{credential_path}{Character: physical path to the credential file (.netrc file).}
}
\value{
A data frame containing SMP and sd for each site and each time step.
diff --git a/modules/data.remote/man/download.thredds.AGB.Rd b/modules/data.remote/man/download.thredds.AGB.Rd
deleted file mode 100644
index 79efcce9998..00000000000
--- a/modules/data.remote/man/download.thredds.AGB.Rd
+++ /dev/null
@@ -1,39 +0,0 @@
-% Generated by roxygen2: do not edit by hand
-% Please edit documentation in R/download.thredds.R
-\name{download.thredds.AGB}
-\alias{download.thredds.AGB}
-\title{download.thredds.AGB}
-\usage{
-download.thredds.AGB(
- outdir = NULL,
- site_ids,
- run_parallel = FALSE,
- ncores = NULL
-)
-}
-\arguments{
-\item{outdir}{Where to place output}
-
-\item{site_ids}{What locations to download data at?}
-
-\item{run_parallel}{Logical. Download and extract files in parallel?}
-
-\item{ncores}{Optional. If run_parallel=TRUE how many cores to use? If left as NULL will select max number -1}
-}
-\value{
-data.frame summarize the results of the function call
-}
-\description{
-download.thredds.AGB
-}
-\examples{
-\dontrun{
-outdir <- "~/scratch/abg_data/"
-results <- PEcAn.data.remote::download.thredds.AGB(outdir=outdir,
- site_ids = c(676, 678, 679, 755, 767, 1000000030, 1000000145, 1000025731),
- run_parallel = TRUE, ncores = 8)
-}
-}
-\author{
-Bailey Morrison
-}
diff --git a/modules/data.remote/man/download_thredds.Rd b/modules/data.remote/man/download_thredds.Rd
new file mode 100644
index 00000000000..dedb54392d4
--- /dev/null
+++ b/modules/data.remote/man/download_thredds.Rd
@@ -0,0 +1,56 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/download.thredds.R
+\name{download_thredds}
+\alias{download_thredds}
+\title{download.thredds}
+\usage{
+download_thredds(
+ site_info,
+ dates,
+ varid,
+ dir_url,
+ data_url,
+ run_parallel = FALSE,
+ outdir = NULL
+)
+}
+\arguments{
+\item{site_info}{list containing site_id, site_name, lat, lon, time_zone.
+Derived from BETY using a PEcAn .xml settings file with site information.
+Can use the get_site_info function to generate this list.}
+
+\item{dates}{vector of start and end date for dataset as YYYYmmdd,
+YYYY-mm-dd, YYYYjjj, or date object.}
+
+\item{varid}{character vector of shorthand variable name. i.e. LAI}
+
+\item{dir_url}{catalog url of data from ncei.noaa.gov/thredds website}
+
+\item{data_url}{opendap url of data from ncei.noaa.gov/thredds website}
+
+\item{run_parallel}{Logical. Download and extract files in parallel?}
+
+\item{outdir}{file location to place output}
+}
+\value{
+data.frame summarize the results of the function call
+}
+\description{
+download.thredds
+}
+\examples{
+\dontrun{
+results <- download_thredds(
+ site_info = site_info,
+ dates = c("19950201", "19961215"),
+ varid = "LAI",
+ dir_url = "https://www.ncei.noaa.gov/thredds/catalog/cdr/lai/files",
+ data_url = "https://www.ncei.noaa.gov/thredds/dodsC/cdr/lai/files",
+ run_parallel = TRUE,
+ outdir = NULL)
+}
+
+}
+\author{
+Bailey Morrison
+}
diff --git a/modules/data.remote/man/extract_thredds_nc.Rd b/modules/data.remote/man/extract_thredds_nc.Rd
new file mode 100644
index 00000000000..7173e853276
--- /dev/null
+++ b/modules/data.remote/man/extract_thredds_nc.Rd
@@ -0,0 +1,38 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/download.thredds.R
+\name{extract_thredds_nc}
+\alias{extract_thredds_nc}
+\title{extract_thredds_nc}
+\usage{
+extract_thredds_nc(site_info, url, varid)
+}
+\arguments{
+\item{site_info}{list containing site_id, site_name, lat, lon, time_zone.
+Derived from BETY using a PEcAn .xml settings file with site information.
+Can use the get_site_info function to generate this list.}
+
+\item{url}{a THREDDS url of a .nc file to extract data from.}
+
+\item{varid}{character vector of shorthand variable name. i.e. LAI}
+}
+\value{
+a dataframe with the values for each date/site combination
+ from a THREDDS file
+}
+\description{
+extract_thredds_nc
+}
+\examples{
+\dontrun{
+thredds_url = paste0( # breaking up long URL for readability
+ "https://www.ncei.noaa.gov/thredds/dodsC/cdr/lai/files/1995/",
+ "AVHRR-Land_v005_AVH15C1_NOAA-14_19950201_c20180831220722.nc")
+output <- extract_thredds_nc(
+ site_info = site_info,
+ url = thredds_url,
+ varid = "LAI")
+}
+}
+\author{
+Bailey Morrison
+}
diff --git a/modules/data.remote/man/gdal_conversion.Rd b/modules/data.remote/man/gdal_conversion.Rd
new file mode 100644
index 00000000000..2dcfd3596ea
--- /dev/null
+++ b/modules/data.remote/man/gdal_conversion.Rd
@@ -0,0 +1,60 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/merge_image_tiles.R
+\name{gdal_conversion}
+\alias{gdal_conversion}
+\title{gdal_conversion}
+\usage{
+gdal_conversion(
+ in_path,
+ outfolder = NULL,
+ band_name = NULL,
+ tile_id = NULL,
+ just_band_name = TRUE,
+ target_format = ".tif"
+)
+}
+\arguments{
+\item{in_path}{character: physical path to the image file.}
+
+\item{outfolder}{character: physical path to the folder where you want to export the converted image. Default is NULL.}
+
+\item{band_name}{character: band name of the image. Default is NULL.}
+
+\item{tile_id}{character/numeric: id for differentiate different converted image tiles.}
+
+\item{just_band_name}{logical: if we just want the band names of the image file. Default is TRUE.}
+
+\item{target_format}{character: target image format. Default is .tif.}
+}
+\description{
+This function provides tool for remote sensing image conversion using GDAL utility.
+}
+\details{
+Please note that, this function only supports conversions for one band of one image.
+If you want to convert multiple images or bands, make sure to loop over these targets.
+Currently tested H5, NetCDF, HDF4, and GeoTIFF formats.
+This function should be ready to any GDAL supported image format.
+}
+\examples{
+\dontrun{
+in_path <- "/projectnb/dietzelab/malmborg/CARB/HLS_data/MSLSP_10SDH_2016.nc"
+outfolder <- "/projectnb/dietzelab/dongchen/anchorSites/NA_runs/MODIS_Phenology"
+band_name <- "NumCycles"
+# try grab all available bands from the target file.
+band_names <-
+ gdal_conversion(in_path = in_path,
+ outfolder = outfolder,
+ band_name = NULL,
+ just_band_name = T)
+# try convert the first band of the available band names to GeoTIFF file.
+f <-
+ gdal_conversion(in_path = in_path,
+ outfolder = outfolder,
+ band_name = band_names[1],
+ just_band_name = F,
+ target_format = ".tif")
+}
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/data.remote/man/gdal_translate.Rd b/modules/data.remote/man/gdal_translate.Rd
new file mode 100644
index 00000000000..b799b847303
--- /dev/null
+++ b/modules/data.remote/man/gdal_translate.Rd
@@ -0,0 +1,20 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/merge_image_tiles.R
+\name{gdal_translate}
+\alias{gdal_translate}
+\title{gdal_translate}
+\usage{
+gdal_translate(from, to)
+}
+\arguments{
+\item{from}{character: subdataset name.
+Generated from the `get_subdatasets` function.}
+
+\item{to}{character: physical path to the output file.}
+}
+\description{
+This function provides tool the gdal_translate execution.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/data.remote/man/get_site_info.Rd b/modules/data.remote/man/get_site_info.Rd
new file mode 100644
index 00000000000..8e8cb38f47d
--- /dev/null
+++ b/modules/data.remote/man/get_site_info.Rd
@@ -0,0 +1,36 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/download.thredds.R
+\name{get_site_info}
+\alias{get_site_info}
+\title{get_site_info}
+\usage{
+get_site_info(settings)
+}
+\arguments{
+\item{settings}{a PEcAn settings object}
+}
+\value{
+a list of site information derived from BETY using a pecan .xml
+ settings file with site_id, site_name, lat, lon, and time_zone.
+}
+\description{
+get_site_info
+}
+\examples{
+\dontrun{
+settings <- PEcAn.settings::read.settings("/path/to/pecan.xml")
+site_info <- get_site_info(settings)
+results <- download_thredds(
+ site_info = site_info,
+ dates = c("19950201", "19961215"),
+ varid = "LAI",
+ dir_url = "https://www.ncei.noaa.gov/thredds/catalog/cdr/lai/files",
+ data_url = "https://www.ncei.noaa.gov/thredds/dodsC/cdr/lai/files",
+ run_parallel = TRUE,
+ outdir = NULL)
+ run_parallel = TRUE, ncores = 8)
+}
+}
+\author{
+Bailey Morrison
+}
diff --git a/modules/data.remote/man/get_subdatasets.Rd b/modules/data.remote/man/get_subdatasets.Rd
new file mode 100644
index 00000000000..d8cc94ca956
--- /dev/null
+++ b/modules/data.remote/man/get_subdatasets.Rd
@@ -0,0 +1,17 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/merge_image_tiles.R
+\name{get_subdatasets}
+\alias{get_subdatasets}
+\title{get_subdatasets.}
+\usage{
+get_subdatasets(in_path)
+}
+\arguments{
+\item{in_path}{character: physical path to the image file.}
+}
+\description{
+This function provides tool for reading band names of remote sensing image.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/data.remote/man/getnetrc.Rd b/modules/data.remote/man/getnetrc.Rd
new file mode 100644
index 00000000000..420cdf42f7b
--- /dev/null
+++ b/modules/data.remote/man/getnetrc.Rd
@@ -0,0 +1,17 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/NASA_DAAC_download.R
+\name{getnetrc}
+\alias{getnetrc}
+\title{Set NASA DAAC credentials to the .netrc file.}
+\usage{
+getnetrc(dl_path)
+}
+\arguments{
+\item{dl_path}{Character: physical path to the .netrc credential file.}
+}
+\description{
+Set NASA DAAC credentials to the .netrc file.
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/data.remote/man/merge_image_tiles.Rd b/modules/data.remote/man/merge_image_tiles.Rd
new file mode 100644
index 00000000000..90a3cc0a1c5
--- /dev/null
+++ b/modules/data.remote/man/merge_image_tiles.Rd
@@ -0,0 +1,53 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/merge_image_tiles.R
+\name{merge_image_tiles}
+\alias{merge_image_tiles}
+\title{Merge image tiles to a single image (currently support hdf and tif image format).}
+\usage{
+merge_image_tiles(
+ in.path,
+ out.path = NULL,
+ band.name = NULL,
+ just.band.name = TRUE,
+ keep.files = FALSE,
+ skip.conversion = FALSE,
+ image.settings = list(crs = "EPSG:4326", dimension = NULL, ext = NULL, fun = NULL),
+ computation = list(GDAL_CACHEMAX = 1000, wm = "80\%", NUM_THREADS =
+ parallel::detectCores() - 1, COMPRESS = "DEFLATE")
+)
+}
+\arguments{
+\item{in.path}{character: physical path to the folder that contains all the original image tiles.}
+
+\item{out.path}{character: physical path to the folder that contains converted and merged images.}
+
+\item{band.name}{character: band name of the image. Default is NULL.}
+
+\item{just.band.name}{logical: if we just want the band names of the image file. Default is TRUE.}
+
+\item{keep.files}{logical: if we want to keep the image tiles at the end.}
+
+\item{skip.conversion}{logical: if we want to ignore the image conversion.
+Note that this is a experimental feature, which only works when images are all in the GeoTIFF format.}
+
+\item{image.settings}{list: settings used during exporting merged image.
+Such as image coordinate system (crs), dimension, extents (ext), and average function (fun).}
+
+\item{computation}{list: settings used for configuring computation.
+Such as maximum memory per CPU (GDAL_CACHEMAX), percentage of total memory (wm),
+number of CPUs (NUM_THREADS), compress method (COMPRESS).}
+}
+\value{
+character: file path to the merged GeoTIFF file.
+}
+\description{
+Merge image tiles to a single image (currently support hdf and tif image format).
+}
+\details{
+Please make sure all image tiles are stored in the `folder.path`.
+Please refer to the gdalwarp manual for more details
+https://gdal.org/en/stable/programs/gdalwarp.html
+}
+\author{
+Dongchen Zhang
+}
diff --git a/modules/emulator/DESCRIPTION b/modules/emulator/DESCRIPTION
index 4cf1d892091..0a07df67371 100644
--- a/modules/emulator/DESCRIPTION
+++ b/modules/emulator/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.emulator
Type: Package
Title: Gausian Process Emulator
-Version: 1.8.1
+Version: 1.8.2
Authors@R: c(person("Mike", "Dietze", role = c("aut", "cre"),
email = "dietze@bu.edu"),
person("University of Illinois, NCSA", role = c("cph")))
@@ -13,6 +13,9 @@ Imports:
Description: Implementation of a Gaussian Process model (both likelihood and
bayesian approaches) for kriging and model emulation. Includes functions
for sampling design and prediction.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
License: BSD_3_clause + file LICENSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: emulator, gaussian-process
diff --git a/modules/emulator/NEWS.md b/modules/emulator/NEWS.md
index e8653523974..0fd4703c018 100644
--- a/modules/emulator/NEWS.md
+++ b/modules/emulator/NEWS.md
@@ -1,3 +1,9 @@
+# PEcAn.emulator 1.8.2
+
+* Added keywords and bug reporting URL to DESCRIPTION. No other files changed.
+
+
+
# PEcAn.emulator 1.8.1
## License change
diff --git a/modules/emulator/man/PEcAn.emulator-package.Rd b/modules/emulator/man/PEcAn.emulator-package.Rd
index ba35157964c..7e30243d008 100644
--- a/modules/emulator/man/PEcAn.emulator-package.Rd
+++ b/modules/emulator/man/PEcAn.emulator-package.Rd
@@ -9,6 +9,14 @@ Supports both likelihood and bayesian approaches for kriging and model
emulation. Includes functions for sampling design and prediction.}
\description{
Implementation of a Gaussian Process model (both likelihood and bayesian approaches) for kriging and model emulation. Includes functions for sampling design and prediction.
+}
+\seealso{
+Useful links:
+\itemize{
+ \item \url{https://pecanproject.github.io}
+ \item Report bugs at \url{https://github.com/PecanProject/pecan/issues}
+}
+
}
\author{
\strong{Maintainer}: Mike Dietze \email{dietze@bu.edu}
diff --git a/modules/meta.analysis/DESCRIPTION b/modules/meta.analysis/DESCRIPTION
index dec56063417..9f03d837886 100644
--- a/modules/meta.analysis/DESCRIPTION
+++ b/modules/meta.analysis/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.MA
Type: Package
Title: PEcAn Functions Used for Meta-Analysis
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("Mike", "Dietze", role = c("aut"),
email = "dietze@bu.edu"),
person("David", "LeBauer", role = c("aut", "cre"),
@@ -24,6 +24,8 @@ Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
streamline the interaction between data and models, and to improve the
efficacy of scientific investigation. The PEcAn.MA package contains
the functions used in the Bayesian meta-analysis of trait data.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
coda (>= 0.18),
lattice,
@@ -48,3 +50,4 @@ LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
Roxygen: list(markdown = TRUE)
+X-schema.org-keywords: parameter-estimation, model-calibration
diff --git a/modules/meta.analysis/NEWS.md b/modules/meta.analysis/NEWS.md
index 98752177b41..564362593e2 100644
--- a/modules/meta.analysis/NEWS.md
+++ b/modules/meta.analysis/NEWS.md
@@ -1,9 +1,17 @@
+# PEcAn.MA 1.7.5
+
+* Added bug reporting URL and keywords to DESCRIPTION.
+* Minor internal code cleanup with no user-visible changes.
+
+
+
# PEcAn.MA 1.7.4
## License change
* PEcAn.MA is now distributed under the BSD three-clause license instead of the NCSA Open Source license.
+
# PEcAn.MA 1.7.1
* All changes in 1.7.1 and earlier were recorded in a single file for all of the PEcAn packages; please see https://github.com/PecanProject/pecan/blob/v1.7.1/CHANGELOG.md for details.
diff --git a/modules/meta.analysis/R/run.meta.analysis.R b/modules/meta.analysis/R/run.meta.analysis.R
index 360d3453a0a..b1580e0779e 100644
--- a/modules/meta.analysis/R/run.meta.analysis.R
+++ b/modules/meta.analysis/R/run.meta.analysis.R
@@ -208,22 +208,26 @@ runModule.run.meta.analysis <- function(settings) {
PEcAn.logger::logger.info(paste0("Running meta-analysis on all PFTs listed by any Settings object in the list: ",
paste(pft.names, collapse = ", ")))
- iterations <- settings$meta.analysis$iter
- random <- settings$meta.analysis$random.effects$on
- use_ghs <- settings$meta.analysis$random.effects$use_ghs
- threshold <- settings$meta.analysis$threshold
- dbfiles <- settings$database$dbfiles
- database <- settings$database$bety
- run.meta.analysis(pfts, iterations, random, threshold, dbfiles, database, use_ghs)
+ run.meta.analysis(
+ pfts,
+ settings$meta.analysis$iter,
+ settings$meta.analysis$random.effects$on,
+ settings$meta.analysis$threshold,
+ settings$database$dbfiles,
+ settings$database$bety,
+ settings$meta.analysis$random.effects$use_ghs
+ )
} else if (PEcAn.settings::is.Settings(settings)) {
- pfts <- settings$pfts
- iterations <- settings$meta.analysis$iter
- random <- settings$meta.analysis$random.effects$on
- use_ghs <- settings$meta.analysis$random.effects$use_ghs
- threshold <- settings$meta.analysis$threshold
- dbfiles <- settings$database$dbfiles
- database <- settings$database$bety
- run.meta.analysis(pfts, iterations, random, threshold, dbfiles, database, use_ghs, update = settings$meta.analysis$update)
+ run.meta.analysis(
+ settings$pfts,
+ settings$meta.analysis$iter,
+ settings$meta.analysis$random.effects$on,
+ settings$meta.analysis$threshold,
+ settings$database$dbfiles,
+ settings$database$bety,
+ settings$meta.analysis$random.effects$use_ghs,
+ update = settings$meta.analysis$update
+ )
} else {
stop("runModule.run.meta.analysis only works with Settings or MultiSettings")
}
diff --git a/modules/meta.analysis/inst/magic/extract_rows_traitID.R b/modules/meta.analysis/inst/magic/extract_rows_traitID.R
new file mode 100644
index 00000000000..564c9b33464
--- /dev/null
+++ b/modules/meta.analysis/inst/magic/extract_rows_traitID.R
@@ -0,0 +1,30 @@
+library(readxl)
+library(dplyr)
+
+extract_rows_traitID <- function(excel_path, sheet_name = 1, trait_ID) {
+
+ #reads the excel sheet
+ data <- readxl::read_excel(excel_path, sheet = sheet_name, col_types = NULL)
+
+ #throws an error if TraitID is not within the sheet
+ if (!"TraitID" %in% colnames(data)) {
+ stop("Column TraitID not found within the excel sheet")
+ }
+
+ # finds the rows with the desired TraitID
+ matched_rows <- which(data$TraitID == trait_ID)
+
+ if (length(matched_rows) > 0) {
+ filtered_rows <- data[matched_rows, ]
+ } else {
+ filtered_rows <- data.frame()
+ message("No rows found with TraitID of ", trait_ID)
+ }
+
+
+ # df2 now has a `common_name` column right after `SpeciesName`
+
+ return (filtered_rows)
+}
+
+
diff --git a/modules/meta.analysis/inst/magic/get_stats.R b/modules/meta.analysis/inst/magic/get_stats.R
new file mode 100644
index 00000000000..071ce63f754
--- /dev/null
+++ b/modules/meta.analysis/inst/magic/get_stats.R
@@ -0,0 +1,26 @@
+library(dplyr)
+
+get_stats <- function(data, value_column, group_column, species_column = "SpeciesName", species_name) {
+
+ # Filter by species
+ data_filtered <- data %>%
+ filter(.data[[species_column]] == species_name)
+
+ # Convert value column to numeric
+ data_filtered[[value_column]] <- as.numeric(data_filtered[[value_column]])
+
+ # Summarize statistics
+ summary_df <- data_filtered %>%
+ group_by(across(all_of(group_column))) %>%
+ summarise(
+ mean_value = mean(.data[[value_column]], na.rm = TRUE),
+ sd = sd(.data[[value_column]], na.rm = TRUE),
+ n = sum(!is.na(.data[[value_column]])),
+ .groups = "drop"
+ ) %>%
+ rename(TraitID = 1)
+
+ return(summary_df)
+}
+
+
diff --git a/modules/meta.analysis/inst/magic/initialize_planting.R b/modules/meta.analysis/inst/magic/initialize_planting.R
new file mode 100644
index 00000000000..badc6e9a613
--- /dev/null
+++ b/modules/meta.analysis/inst/magic/initialize_planting.R
@@ -0,0 +1,24 @@
+library(dplyr)
+
+initialize_planting <- function(species_name) {
+
+ species_stats <- data.frame(
+ get_stats(planting_df, "OrigValueStr", "TraitID", "SpeciesName", species_name)
+ )
+ return(species_stats)
+
+ #calculate for each trait
+
+ #3441 leafC
+
+ #128 wood/stemC
+
+ #3450 rootC
+
+ #2005 fine-rootC
+
+ #1534 coarse-rootC
+
+ #output a table with the information
+
+}
\ No newline at end of file
diff --git a/modules/meta.analysis/inst/magic/main.R b/modules/meta.analysis/inst/magic/main.R
new file mode 100644
index 00000000000..489ac7e48c1
--- /dev/null
+++ b/modules/meta.analysis/inst/magic/main.R
@@ -0,0 +1,18 @@
+#this will be the main file where the entire program will be run
+library(readxl)
+library(dplyr)
+
+source("extract_rows_traitID.R")
+
+main <- function(files, trait_IDs, sheet_name = 1) {
+ combined_data <- data.frame()
+
+ for (file in files) {
+ for (id in trait_IDs) {
+ temp <- extract_rows_traitID(file, sheet_name = 1, trait_ID = id)
+ combined_data <- bind_rows(combined_data, temp)
+ }
+ }
+ return (combined_data)
+}
+
diff --git a/modules/meta.analysis/inst/magic/run_program.R b/modules/meta.analysis/inst/magic/run_program.R
new file mode 100644
index 00000000000..34dc3462a7f
--- /dev/null
+++ b/modules/meta.analysis/inst/magic/run_program.R
@@ -0,0 +1,35 @@
+#this will run the entire program
+
+#example run: test_df1 <- run_program(time = "planting", species = "Rubus idaeus")
+
+library(readxl)
+library(dplyr)
+library(taxize)
+
+source("get_stats.R")
+
+run_program <- function(time, species) {
+ #time is harvest/planting
+ #pool is part of plant
+ #species is species
+
+ temp_plant_df <- get_stats(master_data, value_column = "OrigValueStr", trait_column = "TraitID", species_column = "SpeciesName", species_name = species)
+
+ #need to implement if NA, return species with like traits
+
+ if (time == "planting") {
+ temp_plant_df <- temp_plant_df[temp_plant_df$TraitID %in% c("3441", "128", "2005", "1534"),]
+ }
+ else if (time == "harvest") {
+ temp_plant_df <- temp_plant_df[temp_plant_df$TraitID %in% c("3962", "470"),]
+
+ }
+
+ if (nrow(temp_plant_df) == 0) {
+ return(paste(species, "has nothing"))
+ }
+
+ return (temp_plant_df)
+
+}
+
diff --git a/modules/photosynthesis/DESCRIPTION b/modules/photosynthesis/DESCRIPTION
index 9ceeae6886c..3a2fd98dd2a 100644
--- a/modules/photosynthesis/DESCRIPTION
+++ b/modules/photosynthesis/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.photosynthesis
Type: Package
Title: PEcAn functions used for leaf-level photosynthesis calculations
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("Mike", "Dietze", role = c("aut", "cre"),
email = "dietze@bu.edu"),
person("Xiaohui", "Feng", role = c("aut"),
@@ -18,6 +18,8 @@ Description: The Predictive Ecosystem Carbon Analyzer (PEcAn) is a scientific
efficacy of scientific investigation. The PEcAn.photosynthesis package
contains functions used in the Hierarchical Bayesian calibration of the
Farquhar et al 1980 model.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Depends:
rjags
Imports:
@@ -37,3 +39,4 @@ LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
VignetteBuilder: knitr, rmarkdown
+X-schema.org-keywords: photosynthesis, PEcAn, Bayesian calibration, JAGS
diff --git a/modules/photosynthesis/NEWS.md b/modules/photosynthesis/NEWS.md
index 827899769d5..1d024bbf3a1 100644
--- a/modules/photosynthesis/NEWS.md
+++ b/modules/photosynthesis/NEWS.md
@@ -1,3 +1,9 @@
+# PEcAn.photosynthesis 1.7.5
+
+* Added bug reporting URL and keywords to DESCRIPTION. No code changes in this version.
+
+
+
# PEcAn.photosynthesis 1.7.4
## License change
@@ -6,6 +12,8 @@
## Fixed
* Vignette compilation now works
+
+
# PEcAn.photosynthesis 1.7.3
## Fixed
@@ -13,6 +21,8 @@
* Cleaned up many check warnings (@moki1202, #2821)
* Fixed vignette compilation
+
+
# PEcAn.photosynthesis 1.7.1
* All changes in 1.7.1 and earlier were recorded in a single file for all of the PEcAn packages; please see
diff --git a/modules/priors/DESCRIPTION b/modules/priors/DESCRIPTION
index 1c0c11e1691..ee64ce673eb 100644
--- a/modules/priors/DESCRIPTION
+++ b/modules/priors/DESCRIPTION
@@ -1,11 +1,13 @@
Package: PEcAn.priors
Type: Package
Title: PEcAn Functions Used to Estimate Priors from Data
-Version: 1.7.4
+Version: 1.7.5
Authors@R: c(person("David", "LeBauer", role = c("aut", "cre"),
email = "dlebauer@email.arizona.edu"),
person("University of Illinois, NCSA", role = c("cph")))
Description: Functions to estimate priors from data.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
License: BSD_3_clause + file LICENSE
Copyright: Authors
LazyLoad: yes
@@ -26,3 +28,4 @@ Suggests:
Encoding: UTF-8
VignetteBuilder: knitr, rmarkdown
RoxygenNote: 7.3.2
+X-schema.org-keywords: Bayesian-inference, statistical-validation
diff --git a/modules/priors/NEWS.md b/modules/priors/NEWS.md
index c6771095e80..52d9ed431a9 100644
--- a/modules/priors/NEWS.md
+++ b/modules/priors/NEWS.md
@@ -1,3 +1,10 @@
+# PEcAn.priors 1.7.5
+
+* Added bug reporting URL and keywords to DESCRIPTION.
+* Updated tests for compatibility with testthat 3.3.0
+
+
+
# PEcAn.priors 1.7.4
## License change
@@ -6,6 +13,8 @@
## Fixed
* Cleaned up many `R CMD check` messsages
+
+
# PEcAn.priors 1.7.1
* All changes in 1.7.1 and earlier were recorded in a single file for all of the PEcAn packages; please see
diff --git a/modules/priors/tests/testthat/test.priors.R b/modules/priors/tests/testthat/test.priors.R
index 495e8447935..1c57c4daf37 100644
--- a/modules/priors/tests/testthat/test.priors.R
+++ b/modules/priors/tests/testthat/test.priors.R
@@ -1,45 +1,43 @@
-test_that("pr.dens works",{
+test_that("pr.dens works", {
## pr.dens()
- expect_that(nrow(pr.dens('norm', 0, 1, n=10, alpha=0.5)),
- equals(1))
- expect_that(nrow(pr.dens('norm', 0, 10, n=10, alpha=0.5)),
- equals(1)) # function should set n=1 when alpha = 0.5
- expect_that(nrow(pr.dens('norm', 0, 10, n=10, alpha=0.4)),
- equals(10))
- expect_that(sum(pr.dens('norm', 0, 10, n=10, alpha=0.4)$x),
- equals(0))
+ expect_equal(nrow(pr.dens("norm", 0, 1, n = 10, alpha = 0.5)),
+ 1)
+ expect_equal(nrow(pr.dens("norm", 0, 10, n = 10, alpha = 0.5)),
+ 1) # function should set n=1 when alpha = 0.5
+ expect_equal(nrow(pr.dens("norm", 0, 10, n = 10, alpha = 0.4)),
+ 10)
+ expect_equal(sum(pr.dens("norm", 0, 10, n = 10, alpha = 0.4)$x),
+ 0)
})
-test_that("pr.samp works",{
- ## pr.samp()
- expect_that(length(pr.samp('norm', 0, 1, 2)),
- equals(2))
- expect_that(pr.samp('norm', 0, 1, 1) < 100,
- is_true())
+test_that("pr.samp works", {
+ ## pr.samp()
+ expect_length(pr.samp("norm", 0, 1, 2),
+ 2)
+ expect_lt(pr.samp("norm", 0, 1, 1),
+ 100)
})
-
test_that("create.density.df works on both stated distribution and samples", {
- prior.df <- create.density.df(distribution = list('norm', 0, 1), n = 1000)
- samp.df <- create.density.df(samps = stats::qnorm(1:100/101), n = 1000)
+ prior.df <- create.density.df(distribution = list("norm", 0, 1), n = 1000)
+ samp.df <- create.density.df(samps = stats::qnorm(1:100 / 101), n = 1000)
expect_equal(colnames(prior.df), colnames(samp.df))
expect_equal(dim(prior.df), dim(samp.df))
expect_equal(colnames(prior.df), c("x", "y"))
expect_equal(nrow(prior.df), 1000)
})
-
test_that("get.quantiles.from.density works", {
- samp.df <- create.density.df(samps = stats::qnorm(1:100/101), n = 1000)
+ samp.df <- create.density.df(samps = stats::qnorm(1:100 / 101), n = 1000)
test.q <- get.quantiles.from.density(samp.df, quantiles = c(0.25, 0.5, 0.75))
expect_is(test.q, "data.frame")
expect_equal(signif(test.q$x, 3), c(-0.711, -0.00337, 0.705))
expect_equal(signif(test.q$y, 3), c(0.304, 0.381, 0.305))
- expect_equal(dim(test.q), c(3,2))
+ expect_equal(dim(test.q), c(3, 2))
})
-test_that("plot_prior.density returns ggplot object",{
- expect_is(plot_prior.density(pr.dens('norm', 0, 1)), "ggplot")
+test_that("plot_prior.density returns ggplot object", {
+ expect_is(plot_prior.density(pr.dens("norm", 0, 1)), "ggplot")
})
diff --git a/modules/rtm/DESCRIPTION b/modules/rtm/DESCRIPTION
index 8becb9bedb8..9db680c8968 100644
--- a/modules/rtm/DESCRIPTION
+++ b/modules/rtm/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAnRTM
Type: Package
Title: PEcAn Functions Used for Radiative Transfer Modeling
-Version: 1.9.0
+Version: 1.9.1
Authors@R: c(person("Mike", "Dietze", role = c("aut"),
email = "dietze@bu.edu"),
person("Shawn", "Serbin", role = c("aut"),
@@ -13,6 +13,8 @@ Description: Functions for performing forward runs and inversions of radiative
transfer models (RTMs). Inversions can be performed using maximum
likelihood, or more complex hierarchical Bayesian methods.
Underlying numerical analyses are optimized for speed using Fortran code.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Depends: R (>= 2.10)
Imports:
BayesianTools,
@@ -41,3 +43,4 @@ Encoding: UTF-8
VignetteBuilder: knitr, rmarkdown
Roxygen: list(markdown = TRUE)
RoxygenNote: 7.3.2
+X-schema.org-keywords: radiative-transfer-modeling, Bayesian-inversion, PEcAn
diff --git a/modules/rtm/NEWS.md b/modules/rtm/NEWS.md
index 379cf95da60..7a52dbef5dc 100644
--- a/modules/rtm/NEWS.md
+++ b/modules/rtm/NEWS.md
@@ -1,3 +1,9 @@
+# PEcAnRTM 1.9.1
+
+* Added keywords and bug reporting URL to DESCRIPTION. No code changes in this version.
+
+
+
# PEcAnRTM 1.7.4
## License change
diff --git a/modules/uncertainty/DESCRIPTION b/modules/uncertainty/DESCRIPTION
index d0f4511ada8..6a32d353c6d 100644
--- a/modules/uncertainty/DESCRIPTION
+++ b/modules/uncertainty/DESCRIPTION
@@ -1,7 +1,7 @@
Package: PEcAn.uncertainty
Type: Package
Title: PEcAn Functions Used for Propagating and Partitioning Uncertainties in Ecological Forecasts and Reanalysis
-Version: 1.8.1
+Version: 1.9.0
Authors@R: c(person("Mike", "Dietze", role = c("aut"),
email = "dietze@bu.edu"),
person("David", "LeBauer", role = c("aut", "cre"),
@@ -26,6 +26,8 @@ Description: The Predictive Ecosystem Carbon Analyzer
PECAn is to streamline the interaction between data and
models, and to improve the efficacy of scientific
investigation.
+URL: https://pecanproject.github.io
+BugReports: https://github.com/PecanProject/pecan/issues
Imports:
dplyr,
ggplot2,
@@ -39,12 +41,15 @@ Imports:
plyr (>= 1.8.4),
purrr,
randtoolbox,
- rlang
+ rlang,
+ sensitivity
Suggests:
testthat (>= 1.0.2),
+ mockery
License: BSD_3_clause + file LICENSE
Copyright: Authors
LazyLoad: yes
LazyData: FALSE
Encoding: UTF-8
RoxygenNote: 7.3.2
+X-schema.org-keywords: ecological-forecasting, sensitivity-analysis, PEcAn, uncertainty-propagation
diff --git a/modules/uncertainty/LICENSE b/modules/uncertainty/LICENSE
index 5a9e44128f1..f019813a5c5 100644
--- a/modules/uncertainty/LICENSE
+++ b/modules/uncertainty/LICENSE
@@ -1,34 +1,3 @@
-## This is the master copy of the PEcAn License
-
-University of Illinois/NCSA Open Source License
-
-Copyright (c) 2012, University of Illinois, NCSA. All rights reserved.
-
-PEcAn project
-www.pecanproject.org
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal with the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-- Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimers.
-- Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimers in the
- documentation and/or other materials provided with the distribution.
-- Neither the names of University of Illinois, NCSA, nor the names
- of its contributors may be used to endorse or promote products
- derived from this Software without specific prior written permission.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR
-ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
-CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE SOFTWARE.
-
+YEAR: 2025
+COPYRIGHT HOLDER: PEcAn Project
+ORGANIZATION: PEcAn Project, authors affiliations
diff --git a/modules/uncertainty/NAMESPACE b/modules/uncertainty/NAMESPACE
index c96e652f9f2..0afe45c1822 100644
--- a/modules/uncertainty/NAMESPACE
+++ b/modules/uncertainty/NAMESPACE
@@ -1,8 +1,10 @@
# Generated by roxygen2: do not edit by hand
+export(compute_sobol_indices)
export(ensemble.filename)
export(ensemble.ts)
export(flux.uncertainty)
+export(generate_joint_ensemble_design)
export(get.change)
export(get.coef.var)
export(get.elasticity)
@@ -32,5 +34,5 @@ export(spline.truncate)
export(write.ensemble.configs)
export(write.sa.configs)
importFrom(dplyr,"%>%")
-importFrom(purrr,`%||%`)
+importFrom(rlang,"%||%")
importFrom(rlang,.data)
diff --git a/modules/uncertainty/NEWS.md b/modules/uncertainty/NEWS.md
index 2bc3716c64a..566c867dfe9 100644
--- a/modules/uncertainty/NEWS.md
+++ b/modules/uncertainty/NEWS.md
@@ -1,13 +1,42 @@
+# PEcAn.uncertainty 1.9.0
+
+## License change
+
+PEcAn.uncertainty is now distributed under the BSD 3-clause license instead of the NCSA Open Source license.
+
+
+## Added
+
+* Added the shared `input_design` matrix, generated via new funtion `generate_joint_ensemble_design()`, keeps parameter draws and sampled inputs aligned across `run.write.configs()`, `write.ensemble.configs()`, and `write.sa.configs()`. (@blesson-07, #3535, #3677).
+* New function `compute_sobol_indices` for use in global sensitivity analyses (@blesson-07, #3612).
+
+
+## Changed
+
+* Breaking: `write.ensemble.configs` gains new mandatory arguments `input_design` and `ensemble.size`, and removes argument `samples`. Note that `input_design` and `ensemble.size` are added to the _beginning_ of the argument list, so calls passing `defaults` unnamed as the first arg will break (#3612, #3634).
+* Breaking: `input.ens.gen` gains new mandatory argument `ensemble_size`, added between the existing `settings` and `input` args.
+* Plotting sensitivity now makes less noise in the console and once again produces a one-page PDF as intended (#3560).
+* `write.ensemble.configs` and `write.sa.configs` now generate an ensemble id if one is not provided in a DB-free run (#3654). Runs with DB continue to always generate a new id. Note that multi-site runs with no id provided will now get a separate ensemble ID (and thus generate separate analyses) for each site.
+* Documented that `runModule.run.sensitivity.analysis` does not yet work with multisite settings. This will be fixed in a future release.
+* The default sampling method of `get.ensemble.samples` has changed from "uniform" to "random" (#3535).
+
+
+
+
# PEcAn.uncertainty 1.8.1
* `write.ensemble.configs` now respects argument `write.to.db` when settings$database$bety$write is NULL
(but if settings$database$bety$write is set, it still overrides `write.to.db`).
* Roxygen fixes
+
+
# PEcAn.uncertainty 1.8.0
- Added an optional `pfts` argument to `run.sensitivity.analysis()` so that sensitivity analysis and variance decomposition can be run on a subset of PFTs defined in `settings` if desired (#3155).
+
+
# PEcAn.uncertainty 1.7.2
* Added a `NEWS.md` file to track changes to the package.
diff --git a/modules/uncertainty/R/compute_sobol_indices.R b/modules/uncertainty/R/compute_sobol_indices.R
new file mode 100644
index 00000000000..d565a771f63
--- /dev/null
+++ b/modules/uncertainty/R/compute_sobol_indices.R
@@ -0,0 +1,45 @@
+#' Compute Sobol indices from a finished PEcAn run
+#'
+#' Loads model outputs from a Sobol ensemble, calculates summary
+#' statistics for a chosen variable, feeds them to \code{sensitivity::tell()},
+#' and returns the updated Sobol object.
+#'
+#' @param outdir PEcAn run output directory that contains runs.txt
+#' @param sobol_obj object produced by PEcAn.uncertainty::generate_joint_ensemble_design()
+#' @param var Variable name to summarise (default "GPP").
+#' @param stat_fun Summary statistic applied to var default mean .
+#'
+#' @return sobol_obj
+#' .
+#' @export
+compute_sobol_indices <- function(outdir,
+ sobol_obj,
+ var = "GPP",
+ stat_fun = mean) {
+
+
+
+ runs_file <- file.path(outdir, "runs.txt")
+ if (!file.exists(runs_file)) {
+ PEcAn.logger::logger.error("runs.txt not found in ", outdir)
+ }
+ run_ids <- readLines(runs_file)
+
+
+
+ # Load outputs and compute response vector y
+ y <- vapply(run_ids, function(rid) {
+ fpath <- file.path(outdir, rid)
+ out <- PEcAn.utils::read.output(runid = rid, outdir = fpath)
+ if (!is.list(out) || !var %in% names(out)) {
+ PEcAn.logger::logger.error("Variable '", var, "' missing in output for run ", rid)
+ }
+ stat_fun(out[[var]], na.rm = TRUE)
+ }, numeric(1))
+
+ # Compute Sobol indices
+ sobol_obj <-sensitivity::tell(sobol_obj, y)
+
+ # Return the updated object
+ return(invisible(sobol_obj))
+}
diff --git a/modules/uncertainty/R/ensemble.R b/modules/uncertainty/R/ensemble.R
index f0c0ca17021..6863135ea32 100644
--- a/modules/uncertainty/R/ensemble.R
+++ b/modules/uncertainty/R/ensemble.R
@@ -1,12 +1,3 @@
-#-------------------------------------------------------------------------------
-# Copyright (c) 2012 University of Illinois, NCSA.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the
-# University of Illinois/NCSA Open Source License
-# which accompanies this distribution, and is available at
-# http://opensource.ncsa.illinois.edu/license.html
-#-------------------------------------------------------------------------------
-
##' Reads output from model ensemble
##'
##' Reads output for an ensemble of length specified by \code{ensemble.size} and bounded by \code{start.year}
@@ -79,12 +70,13 @@ read.ensemble.output <- function(ensemble.size, pecandir, outdir, start.year, en
##' @return matrix of (quasi-)random samples from trait distributions
##' @export
##' @author David LeBauer, Istem Fer
-get.ensemble.samples <- function(ensemble.size, pft.samples, env.samples,
- method = "uniform", param.names = NULL, ...) {
-
- if (is.null(method)) {
- PEcAn.logger::logger.info("No sampling method supplied, defaulting to uniform random sampling")
- method <- "uniform"
+get.ensemble.samples <- function( ensemble.size, pft.samples, env.samples,
+ method = "random", param.names = NULL, ...) {
+
+ # Define supported methods
+ supported_methods <- c("random", "uniform", "halton", "sobol", "lhc")
+ if (!method %in% supported_methods) {
+ stop("Invalid sampling method")
}
## force as numeric for compatibility with Fortran code in halton()
@@ -139,48 +131,59 @@ get.ensemble.samples <- function(ensemble.size, pft.samples, env.samples,
ensemble.samples <- list()
-
+ sampled.indices <- list()
col.i <- 0
for (pft.i in seq(pft.samples)) {
ensemble.samples[[pft.i]] <- matrix(nrow = ensemble.size, ncol = length(pft.samples[[pft.i]]))
+ sampled.indices[[pft.i]] <- matrix(nrow = ensemble.size, ncol = length(pft.samples[[pft.i]]))
# meaning we want to keep MCMC samples together
if(length(pft.samples[[pft.i]])>0 & !is.null(param.names)){
if (method == "halton") {
- same.i <- round(randtoolbox::halton(ensemble.size) * length(pft.samples[[pft.i]][[1]]))
+ same.i <- floor(randtoolbox::halton(ensemble.size) * length(pft.samples[[pft.i]][[1]]))+1
} else if (method == "sobol") {
- same.i <- round(randtoolbox::sobol(ensemble.size, scrambling = 3) * length(pft.samples[[pft.i]][[1]]))
+ same.i <- floor(randtoolbox::sobol(ensemble.size, scrambling = 3) * length(pft.samples[[pft.i]][[1]]))+1
} else if (method == "torus") {
- same.i <- round(randtoolbox::torus(ensemble.size) * length(pft.samples[[pft.i]][[1]]))
+ same.i <- floor(randtoolbox::torus(ensemble.size) * length(pft.samples[[pft.i]][[1]]))+1
} else if (method == "lhc") {
- same.i <- round(c(PEcAn.emulator::lhc(t(matrix(0:1, ncol = 1, nrow = 2)), ensemble.size) * length(pft.samples[[pft.i]][[1]])))
+ same.i <- floor(c(PEcAn.emulator::lhc(t(matrix(0:1, ncol = 1, nrow = 2)), ensemble.size) * length(pft.samples[[pft.i]][[1]])))+1
} else if (method == "uniform") {
same.i <- sample.int(length(pft.samples[[pft.i]][[1]]), ensemble.size)
- } else {
- PEcAn.logger::logger.info("Method ", method, " has not been implemented yet, using uniform random sampling")
- # uniform random
- same.i <- sample.int(length(pft.samples[[pft.i]][[1]]), ensemble.size)
+ } else if (method == "random") {
+ PEcAn.logger::logger.info("Using random row sampling for MCMC draws")
+ same.i <- sample(nrow(pft.samples[[pft.i]][[1]]), ensemble.size, replace = TRUE)
+ }
+ else {
+ PEcAn.logger::logger.error("Sampling method %s is not recognized", method)
+
}
}
for (trait.i in seq(pft.samples[[pft.i]])) {
col.i <- col.i + 1
- if(names(pft.samples[[pft.i]])[trait.i] %in% param.names[[pft.i]]){ # keeping samples
- ensemble.samples[[pft.i]][, trait.i] <- pft.samples[[pft.i]][[trait.i]][same.i]
- }else{
+ if (names(pft.samples[[pft.i]])[trait.i] %in% param.names[[pft.i]]) {
+ ensemble.samples[[pft.i]][, trait.i] <- pft.samples[[pft.i]][[trait.i]][same.i]
+ sampled.indices[[pft.i]][, trait.i] <- same.i
+ }else{
+ # Extract original trait values
+ trait.values <- pft.samples[[pft.i]][[trait.i]]
+ sampled.values <- stats::quantile(trait.values, random.samples[, col.i])
+
ensemble.samples[[pft.i]][, trait.i] <- stats::quantile(pft.samples[[pft.i]][[trait.i]],
random.samples[, col.i])
- }
- } # end trait
- ensemble.samples[[pft.i]] <- as.data.frame(ensemble.samples[[pft.i]])
- colnames(ensemble.samples[[pft.i]]) <- names(pft.samples[[pft.i]])
- } #end pft
- names(ensemble.samples) <- names(pft.samples)
- ans <- ensemble.samples
+ sampled.indices[[pft.i]][, trait.i] <- sapply(sampled.values, function(val) {which.min(abs(trait.values - val)) })
+ }
+ }
+ ensemble.samples[[pft.i]] <- as.data.frame(ensemble.samples[[pft.i]])
+ colnames(ensemble.samples[[pft.i]]) <- names(pft.samples[[pft.i]])
+
+ } #end pft
+ names(ensemble.samples) <- names(pft.samples)
+ ans <- ensemble.samples
}
- return(ans)
+ return(list(ans,sampled.indices))
} # get.ensemble.samples
@@ -190,6 +193,12 @@ get.ensemble.samples <- function(ensemble.size, pft.samples, env.samples,
##' Given a pft.xml object, a list of lists as supplied by get.sa.samples,
##' a name to distinguish the output files, and the directory to place the files.
##'
+##' @param input_design design matrix describing sampled inputs (see
+##' `run.write.configs()`). Columns named after `settings$run$inputs` tags give
+##' 1-based indices into each input's `path` list and rows follow run order.
+##' Requires `nrow(input_design) >= ensemble.size`;
+##' extra rows are ignored.
+##' @param ensemble.size size of ensemble
##' @param defaults pft
##' @param ensemble.samples list of lists supplied by \link{get.ensemble.samples}
##' @param settings list of PEcAn settings
@@ -197,22 +206,40 @@ get.ensemble.samples <- function(ensemble.size, pft.samples, env.samples,
##' @param clean remove old output first?
##' @param write.to.db logical: Record this run in BETY?
##' @param restart In case this is a continuation of an old simulation. restart needs to be a list with name tags of runid, inputs, new.params (parameters), new.state (initial condition), ensemble.id (ensemble id), start.time and stop.time.See Details.
-##' @param samples Sampled inputs such as met and parameter files
##' @param rename Decide if we want to rename previous output files, for example convert from sipnet.out to sipnet.2020-07-16.out.
##'
##' @return list, containing $runs = data frame of runids, $ensemble.id = the ensemble ID for these runs and $samples with ids and samples used for each tag. Also writes sensitivity analysis configuration files as a side effect
##' @details The restart functionality is developed using model specific functions by calling write_restart.modelname function. First, you need to make sure that this function is already exist for your desired model.See here \url{https://pecanproject.github.io/pecan-documentation/latest/pecan-models.html}
##' new state is a dataframe with a different column for each state variable. The number of the rows in this dataframe needs to be the same as the ensemble size.
-##' State variables that you can use for setting up the intial conditions differs for different models. You may check the documentation of the write_restart.modelname your model.
+##' The state variables that you can use for setting up initial conditions are model specific. Check the documentation of the write_restart. function for the model you are using.
##' The units for the state variables need to be in the PEcAn standard units which can be found in \link{standard_vars}.
##' new.params also has similar structure to ensemble.samples which is sent as an argument.
##'
##' @importFrom dplyr %>%
-##' @importFrom rlang .data
+##' @importFrom rlang .data %||%
##' @export
##' @author David LeBauer, Carl Davidson, Hamze Dokoohaki
-write.ensemble.configs <- function(defaults, ensemble.samples, settings, model,
- clean = FALSE, write.to.db = TRUE, restart = NULL, samples = NULL, rename = FALSE) {
+
+write.ensemble.configs <- function(input_design , ensemble.size, defaults, ensemble.samples, settings, model,
+ clean = FALSE, write.to.db = TRUE, restart = NULL, rename = FALSE) {
+
+ # Check for required paths
+ for (input_tag in names(settings$run$inputs)) {
+ input <- settings$run$inputs[[input_tag]]
+ input_paths <- input$path
+ if (is.null(input_paths) || length(input_paths) == 0) {
+ PEcAn.logger::logger.error("Input", sQuote(input_tag), "has no paths specified")
+ }
+ # Check for unsampled multi-path inputs
+ if (length(input_paths) > 1 &&
+ !(input_tag %in% names(settings$ensemble$samplingspace))) {
+ PEcAn.logger::logger.error(
+ "Input", sQuote(input_tag), "has", length(input_paths),
+ "paths but no sampling method.",
+ "Add for this input in pecan.xml"
+ )
+ }
+ }
con <- NULL
my.write.config <- paste("write.config.", model, sep = "")
@@ -244,14 +271,14 @@ write.ensemble.configs <- function(defaults, ensemble.samples, settings, model,
# Get the workflow id
- if (!is.null(settings$workflow$id)) {
- workflow.id <- settings$workflow$id
- } else {
- workflow.id <- -1
- }
+ # if workflow$id is null, set to -1
+ # to avoid collision w/ database ids
+ workflow.id <- settings$workflow$id %||% -1
+
#------------------------------------------------- if this is a new fresh run------------------
if (is.null(restart)){
# create an ensemble id
+ # Note: this ignores any existing settings$ensemble$id
if (!is.null(con) && write.to.db) {
# write ensemble first
ensemble.id <- PEcAn.DB::db.query(paste0(
@@ -265,7 +292,10 @@ write.ensemble.configs <- function(defaults, ensemble.samples, settings, model,
"values (", pft$posteriorid, ", ", ensemble.id, ")"), con = con)
}
} else {
- ensemble.id <- NA
+ # Use existing id if provided, or an arbitrary unique value if not
+ # Note: Since write.ensemble.configs is called separately for each site,
+ # a multisite run with no ID provided gives each site its own ensemble id!
+ ensemble.id <- settings$ensemble$id %||% rlang::hash(settings)
}
#-------------------------generating met/param/soil/veg/... for all ensembles----
if (!is.null(con)){
@@ -283,30 +313,23 @@ write.ensemble.configs <- function(defaults, ensemble.samples, settings, model,
}
#now looking into the xml
samp <- settings$ensemble$samplingspace
- #finding who has a parent
- parents <- lapply(samp,'[[', 'parent')
- #order parents based on the need of who has to be first
- order <- names(samp)[lapply(parents, function(tr) which(names(samp) %in% tr)) %>% unlist()]
- #new ordered sampling space
- samp.ordered <- samp[c(order, names(samp)[!(names(samp) %in% order)])]
- if(is.null(samples)){
- #performing the sampling
- samples<-list()
- # For the tags specified in the xml I do the sampling
- for(i in seq_along(samp.ordered)){
- myparent<-samp.ordered[[i]]$parent # do I have a parent ?
- #call the function responsible for generating the ensemble
- samples[[names(samp.ordered[i])]] <- input.ens.gen(settings=settings,
- input=names(samp.ordered)[i],
- method=samp.ordered[[i]]$method,
- parent_ids=if( !is.null(myparent)) samples[[myparent]]) # if I have parent then give me their ids - this is where the ordering matters making sure the parent is done before it's asked
- }
- }
-
+ #performing the sampling
+ samples <- list()
+ input_tags <- names(settings$run$inputs)
+ for (input_tag in input_tags) {
+ if (input_tag %in% colnames(input_design)) {
+ input_paths <- settings$run$inputs[[input_tag]]$path
+ input_indices <- input_design[[input_tag]]
+
+ samples[[input_tag]] <- list(
+ samples = lapply(input_indices, function(idx) input_paths[[idx]])
+ )
+ }
+ }
# if there is a tag required by the model but it is not specified in the xml then I replicate n times the first element
required_tags%>%
purrr::walk(function(r_tag){
- if (is.null(samples[[r_tag]]) & r_tag!="parameters") samples[[r_tag]]$samples <<- rep(settings$run$inputs[[tolower(r_tag)]]$path[1], settings$ensemble$size)
+ if (is.null(samples[[r_tag]]) & r_tag!="parameters") samples[[r_tag]]$samples <<- rep(settings$run$inputs[[tolower(r_tag)]]$path[1], ensemble.size)
})
# Reading the site.pft specific tags from xml
@@ -334,7 +357,7 @@ write.ensemble.configs <- function(defaults, ensemble.samples, settings, model,
}
# if no ensemble piece was in the xml I replicate n times the first element in params
- if ( is.null(samp$parameters) ) samples$parameters$samples <- ensemble.samples %>% purrr::map(~.x[rep(1, settings$ensemble$size) , ])
+ if ( is.null(samp$parameters) ) samples$parameters$samples <- ensemble.samples %>% purrr::map(~.x[rep(1, ensemble.size) , ])
# This where we handle the parameters - ensemble.samples is already generated in run.write.config and it's sent to this function as arg -
if ( is.null(samples$parameters$samples) ) samples$parameters$samples <- ensemble.samples
#------------------------End of generating ensembles-----------------------------------
@@ -344,7 +367,7 @@ write.ensemble.configs <- function(defaults, ensemble.samples, settings, model,
# write configuration for each run of the ensemble
runs <- data.frame()
- for (i in seq_len(settings$ensemble$size)) {
+ for (i in seq_len(ensemble.size)) {
if (!is.null(con) && write.to.db) {
paramlist <- paste("ensemble=", i, sep = "")
# inserting this into the table and getting an id back
@@ -400,7 +423,7 @@ write.ensemble.configs <- function(defaults, ensemble.samples, settings, model,
cat("runtype : ensemble\n",
"workflow id : ", format(workflow.id, scientific = FALSE), "\n",
"ensemble id : ", format(ensemble.id, scientific = FALSE), "\n",
- "run : ", i, "/", settings$ensemble$size, "\n",
+ "run : ", i, "/", ensemble.size, "\n",
"run id : ", format(run.id, scientific = FALSE), "\n",
"pft names : ", as.character(lapply(settings$pfts, function(x) x[["name"]])), "\n",
"model : ", model, "\n",
@@ -414,7 +437,26 @@ write.ensemble.configs <- function(defaults, ensemble.samples, settings, model,
"rundir : ", file.path(settings$host$rundir, run.id), "\n",
"outdir : ", file.path(settings$host$outdir, run.id), "\n",
file = file.path(settings$rundir, run.id, "README.txt"))
+
+
+ #changing the structure of input tag to what the models are expecting
+ for (input_i in seq_along(settings$run$inputs)) {
+ input_tag <- names(settings$run$inputs)[[input_i]]
+ input <- settings$run$inputs[[input_tag]]
+
+
+ if (!input_tag %in% names(samples)) {
+ # Use first path (already validated as single path)
+ settings$run$inputs[[input_tag]]$path <- unlist(input$path[1])}
+ else {
+ # Use sampled path
+ settings$run$inputs[[input_tag]]$path <- samples[[input_tag]][["samples"]][[i]]
+ }
+
+}
+
+
do.call(my.write.config, args = list( defaults = defaults,
trait.values = lapply(samples$parameters$samples, function(x, n) { x[n, , drop=FALSE] }, n=i), # this is the params
@@ -459,9 +501,11 @@ write.ensemble.configs <- function(defaults, ensemble.samples, settings, model,
}
# stop and start time are required by bc we are wrtting them down into job.sh
- for (i in seq_len(settings$ensemble$size)) {
+ for (i in seq_len(ensemble.size)) {
input_list <- list()
for (input_tag in names(inputs)) {
+ # if it's the parameter list, skip.
+ if (input_tag == "parameters") next
if (!is.null(inputs[[input_tag]]$samples[[i]]))
input_list[[input_tag]] <- list(path = inputs[[input_tag]]$samples[[i]])
}
@@ -496,6 +540,7 @@ write.ensemble.configs <- function(defaults, ensemble.samples, settings, model,
#' @param input name of input to sample, e.g. "met", "veg", "pss"
#' @param method Method for sampling - For now looping or sampling with replacement is implemented
#' @param parent_ids This is basically the order of the paths that the parent is sampled.See Details.
+#' @param ensemble_size size of ensemble
#'
#' @return For a given input/tag in the pecan xml and a method, this function returns a list with $id showing the order of sampling and $samples with samples of that input.
#' @details If for example met was a parent and it's sampling method resulted in choosing the first, third and fourth samples, these are the ids that need to be sent as
@@ -505,7 +550,7 @@ write.ensemble.configs <- function(defaults, ensemble.samples, settings, model,
#' @examples
#' \dontrun{input.ens.gen(settings,"met","sampling")}
#'
-input.ens.gen <- function(settings, input, method = "sampling", parent_ids = NULL) {
+input.ens.gen <- function(settings, ensemble_size, input, method = "sampling", parent_ids = NULL) {
#-- reading the dots and exposing them to the inside of the function
samples <- list()
@@ -528,15 +573,16 @@ input.ens.gen <- function(settings, input, method = "sampling", parent_ids = NUL
} else if (tolower(method) == "sampling") {
samples$ids <- sample(
seq_along(input_path),
- settings$ensemble$size,
+ ensemble_size,
replace = TRUE)
} else if (tolower(method) == "looping") {
samples$ids <- rep_len(
seq_along(input_path),
- length.out = settings$ensemble$size)
+ length.out = ensemble_size )
}
#using the sample ids
samples$samples <- input_path[samples$ids]
return(samples)
}
+
diff --git a/modules/uncertainty/R/flux_uncertainty.R b/modules/uncertainty/R/flux_uncertainty.R
index 6902cbe902a..ef18e1ad31d 100644
--- a/modules/uncertainty/R/flux_uncertainty.R
+++ b/modules/uncertainty/R/flux_uncertainty.R
@@ -1,13 +1,3 @@
-#-------------------------------------------------------------------------------
-# Copyright (c) 2012 University of Illinois, NCSA.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the
-# University of Illinois/NCSA Open Source License
-# which accompanies this distribution, and is available at
-# http://opensource.ncsa.illinois.edu/license.html
-#-------------------------------------------------------------------------------
-
-#--------------------------------------------------------------------------------------------------#
#' Read Ameriflux L2 Data
#'
#' @param file.name path to file
diff --git a/modules/uncertainty/R/generate_joint_ensemble_design.R b/modules/uncertainty/R/generate_joint_ensemble_design.R
new file mode 100644
index 00000000000..02c9289871f
--- /dev/null
+++ b/modules/uncertainty/R/generate_joint_ensemble_design.R
@@ -0,0 +1,86 @@
+#' Generate joint ensemble design for parameter sampling
+#' Creates a joint ensemble design that maintains parameter correlations across
+#' all sites in a multi-site run. This function generates sample indices that
+#' are shared across sites to ensure consistent parameter sampling.
+#'
+#' @param settings A PEcAn settings object containing ensemble configuration
+#' @param ensemble_size Integer specifying the number of ensemble members
+#' Since the `input_design` will only be generated once for the entire model run,
+#' the only situation, where we might want to recycle the existing `ensemble_samples`,
+#' is when we split and submit the larger SDA runs (e.g., 8,000 sites) into
+#' smaller SDA experiments (e.g., 100 sites per job), where we want to keep using
+#' the same parameters rather than creating new parameters for each job.
+#' @param sobol for activating sobol
+#' @return A list containing ensemble samples and indices
+#' If `sobol = TRUE`, the list will be a `sensitivity::soboljansen()`
+#' result and will contain the components documented therein.
+#' @export
+
+generate_joint_ensemble_design <- function(settings,
+ ensemble_size,
+ sobol = FALSE) {
+ if (sobol) {
+ ensemble_size <- as.numeric(ensemble_size) * 2
+ }
+ ens.sample.method <- settings$ensemble$samplingspace$parameters$method
+ design_list <- list()
+ sampled_inputs <- list()
+ posterior.files <- settings$pfts %>%
+ purrr::map_chr("posterior.files", .default = NA_character_)
+ samp <- settings$ensemble$samplingspace
+ parents <- lapply(samp, "[[", "parent")
+ order <- names(samp)[
+ lapply(parents, function(tr) which(names(samp) %in% tr)) %>%
+ unlist()
+ ]
+ samp.ordered <- samp[c(order, names(samp)[!(names(samp) %in% order)])]
+
+ # loop over inputs.
+ for (i in seq_along(samp.ordered)) {
+ input_tag <- names(samp.ordered)[i]
+ parent_name <- samp.ordered[[i]]$parent
+
+ parent_ids <- if (!is.null(parent_name)) {
+ sampled_inputs[[parent_name]]
+ } else {
+ NULL
+ }
+
+ input_result <- PEcAn.uncertainty::input.ens.gen(
+ settings = settings,
+ ensemble_size = ensemble_size,
+ input = input_tag,
+ method = samp.ordered[[i]]$method,
+ parent_ids = parent_ids
+ )
+
+ sampled_inputs[[input_tag]] <- input_result$ids
+ design_list[[input_tag]] <- input_result$ids
+ }
+ # Sample parameters if we don't have it.
+ if (!file.exists(file.path(settings$outdir, "samples.Rdata"))) {
+ PEcAn.uncertainty::get.parameter.samples(
+ settings,
+ ensemble.size = ensemble_size,
+ posterior.files,
+ ens.sample.method)
+ }
+ # Here we assumed the length of parameters is identical to the ensemble size.
+ # TODO: detect if they are identical. If not, we will need to resample the
+ # parameters with replacement.
+ design_list[["param"]] <- seq_len(ensemble_size)
+ design_matrix <- data.frame(design_list)
+
+ if (sobol) {
+ half <- floor(ensemble_size / 2)
+ X1 <- design_matrix[1:half, ]
+ X2 <- design_matrix[(half + 1):ensemble_size, ]
+ sobol_obj <- sensitivity::soboljansen(model = NULL, X1 = X1, X2 = X2)
+ return(sobol_obj)
+ }
+ # This ensures that regardless of whether the sobol or non-sobol version is called
+ # that the output is a list that includes the design as X. In the sobol version the
+ # list includes additional info beyond just X that's required by the function that
+ # does the sobol index calculations, but not required to do the runs themselves.
+ return(list(X = design_matrix))
+}
\ No newline at end of file
diff --git a/modules/uncertainty/R/get.analysis.filenames.r b/modules/uncertainty/R/get.analysis.filenames.r
index 8b19c0ded28..f68e22d88bb 100644
--- a/modules/uncertainty/R/get.analysis.filenames.r
+++ b/modules/uncertainty/R/get.analysis.filenames.r
@@ -1,31 +1,31 @@
-##' Generate ensemble filenames
-##'
-##' Generates a vector of filenames to be used for PEcAn ensemble output files.
-##' All paths start from directory `settings$outdir`,
-##' which will be created if it does not exist.
-##'
-##' Typically used by passing only a settings object,
-##' but all values can be overridden for manual use.
-##'
-##' If only a single variable or a subset of years are needed,
-##' the generated filename will identify these in the form
-## `prefix.ensemble_id.variable.startyear.endyear.suffix`
-##' If all vars and years are included, set `all.yr.var` to TRUE
-##' to get a filename of the form `prefix.ensemble_id.suffix`.
-##' All elements are recycled vectorwise.
-##' @param settings list of PEcAn settings.
-##' @param prefix string to appear at the beginning of the filename
-##' @param suffix file extension: string to appear at the end of the filename
-##' @param all.var.yr logical: does ensemble include all vars and years?
-##' If FALSE, filename will include years and vars
-##' @param ensemble.id ensemble ID(s)
-##' @param variable variable(s) included in the ensemble.
-##' @param start.year,end.year first and last year simulated.
-##'
-##' @return a vector of filenames, each in the form
-##' `[settings$outdir]/[prefix].[ensemble.ID].[variable].[start.year].[end.year][suffix]`.
-##' @export
-##' @author Ryan Kelly
+#' Generate ensemble filenames
+#'
+#' Generates a vector of filenames to be used for PEcAn ensemble output files.
+#' All paths start from directory `settings$outdir`,
+#' which will be created if it does not exist.
+#'
+#' Typically used by passing only a settings object,
+#' but all values can be overridden for manual use.
+#'
+#' If only a single variable or a subset of years are needed,
+#' the generated filename will identify these in the form
+# `prefix.ensemble_id.variable.startyear.endyear.suffix`
+#' If all vars and years are included, set `all.yr.var` to TRUE
+#' to get a filename of the form `prefix.ensemble_id.suffix`.
+#' All elements are recycled vectorwise.
+#' @param settings list of PEcAn settings.
+#' @param prefix string to appear at the beginning of the filename
+#' @param suffix file extension: string to appear at the end of the filename
+#' @param all.var.yr logical: does ensemble include all vars and years?
+#' If FALSE, filename will include years and vars
+#' @param ensemble.id ensemble ID(s)
+#' @param variable variable(s) included in the ensemble.
+#' @param start.year,end.year first and last year simulated.
+#'
+#' @return a vector of filenames, each in the form
+#' `[settings$outdir]/[prefix].[ensemble.ID].[variable].[start.year].[end.year][suffix]`.
+#' @export
+#' @author Ryan Kelly
ensemble.filename <- function(settings, prefix = "ensemble.samples", suffix = "Rdata",
all.var.yr = TRUE, ensemble.id = settings$ensemble$ensemble.id,
variable = settings$ensemble$variable,
@@ -57,18 +57,16 @@ ensemble.filename <- function(settings, prefix = "ensemble.samples", suffix = "R
} # ensemble.filename
-##' Generate sensitivity analysis filenames
-##'
-##' @name sensitivity.filename
-##' @title Generate sensitivity analysis filenames
-##' @inheritParams ensemble.filename
-##' @param pft name of PFT used for analysis. If NULL, assumes all
-##' PFTs in run are used and does not add them to the filename
-##' @return a filename
-##' @export
-##'
-##' @details Generally uses values in settings, but can be overwritten for manual uses
-##' @author Ryan Kelly
+#' Generate sensitivity analysis filenames
+#'
+#' @inheritParams ensemble.filename
+#' @param pft name of PFT used for analysis. If NULL, assumes all
+#' PFTs in run are used and does not add them to the filename
+#' @return a filename
+#' @export
+#'
+#' @details Generally uses values in settings, but can be overwritten for manual uses
+#' @author Ryan Kelly
sensitivity.filename <- function(settings,
prefix = "sensitivity.samples", suffix = "Rdata",
all.var.yr = TRUE,
@@ -78,7 +76,7 @@ sensitivity.filename <- function(settings,
start.year = settings$sensitivity.analysis$start.year,
end.year = settings$sensitivity.analysis$end.year) {
- if(is.null(ensemble.id) || is.na(ensemble.id)) {
+ if (is.null(ensemble.id) || is.na(ensemble.id)) {
# This shouldn't generally arise, as run.write.configs() appends ensemble.id to settings. However,it will come up if running run.write.configs(..., write=F), because then no ensemble ID is created in the database. A simple workflow will still work in that case, but provenance will be lost if multiple ensembles are run.
ensemble.id <- "NOENSEMBLEID"
}
@@ -101,14 +99,16 @@ sensitivity.filename <- function(settings,
ind <- which(sapply(settings$pfts, function(x) x$name) == pft)
if (length(ind) == 0) {
## no match
- PEcAn.logger::logger.warn("sensitivity.filename: unmatched PFT = ", pft, " not among ",
- sapply(settings$pfts, function(x) x$name))
+ PEcAn.logger::logger.warn(
+ "sensitivity.filename: unmatched PFT = ", pft, " not among ",
+ sapply(settings$pfts, function(x) x$name))
sensitivity.dir <- file.path(settings$outdir, "pfts", pft)
} else {
if (length(ind) > 1) {
## multiple matches
- PEcAn.logger::logger.warn("sensitivity.filename: multiple matchs of PFT = ", pft,
- " among ", sapply(settings$pfts, function(x) x$name), " USING")
+ PEcAn.logger::logger.warn(
+ "sensitivity.filename: multiple matchs of PFT = ", pft,
+ " among ", sapply(settings$pfts, function(x) x$name), " USING")
ind <- ind[1]
}
if (is.null(settings$pfts[[ind]]$outdir) || is.na(settings$pfts[[ind]]$outdir)) {
@@ -136,7 +136,9 @@ sensitivity.filename <- function(settings,
paste(prefix, ensemble.id, suffix, sep = "."))
} else {
sensitivity.file <- file.path(sensitivity.dir,
- paste(prefix, ensemble.id, variable, start.year, end.year, suffix, sep = "."))
+ paste(prefix, ensemble.id, variable,
+ start.year, end.year, suffix,
+ sep = "."))
}
return(sensitivity.file)
diff --git a/modules/uncertainty/R/get.parameter.samples.R b/modules/uncertainty/R/get.parameter.samples.R
index cb05b39ece3..3e4d5ef367d 100644
--- a/modules/uncertainty/R/get.parameter.samples.R
+++ b/modules/uncertainty/R/get.parameter.samples.R
@@ -1,23 +1,27 @@
-
-##' Convert priors / MCMC samples to chains that can be sampled for model parameters
-##'
-##' @param settings PEcAn settings object
-##' @param posterior.files list of filenames to read from
-##' @param ens.sample.method one of "halton", "sobol", "torus", "lhc", "uniform"
-##' @export
-##'
-##' @author David LeBauer, Shawn Serbin, Istem Fer
-#' @importFrom purrr `%||%`
-### Identify PFTs in the input settings.xml file
-get.parameter.samples <- function(settings,
- posterior.files = rep(NA, length(settings$pfts)),
+#' Convert priors / MCMC samples to chains that can be sampled
+#' for model parameters
+#'
+#' @param settings PEcAn settings object
+#' @param ensemble.size number of runs in model ensemble
+#' @param posterior.files list of filenames to read from
+#' @param ens.sample.method one of "halton", "sobol", "torus", "lhc", "uniform"
+#' @export
+#'
+#' @author David LeBauer, Shawn Serbin, Istem Fer
+#' @importFrom rlang %||%
+get.parameter.samples <- function(settings,
+ ensemble.size = 1,
+ posterior.files = rep(NA, length(settings$pfts)),
ens.sample.method = "uniform") {
- pfts <- settings$pfts
+ ### Identify PFTs in the input settings.xml file
+ pfts <- settings$pfts
pft.names <- list()
- outdirs <- list()
+ outdirs <- list()
if (length(pfts) != length(posterior.files)) {
- PEcAn.logger::logger.error("settings$pfts and posterior.files should be the same length")
+ PEcAn.logger::logger.error(
+ "settings$pfts and posterior.files should be the same length"
+ )
}
## Open database connection
@@ -25,36 +29,44 @@ get.parameter.samples <- function(settings,
on.exit(try(PEcAn.DB::db.close(con), silent = TRUE), add = TRUE)
# If we fail to connect to DB then we set to NULL
- if (inherits(con, "try-error")) {
+ if (inherits(con, "try-error")) {
con <- NULL
- PEcAn.logger::logger.warn("We were not able to successfully establish a connection with Bety ")
+ PEcAn.logger::logger.warn(
+ "We were not able to successfully establish a connection with Bety "
+ )
}
-
+
for (i.pft in seq_along(pfts)) {
# If no name given, use string "NULL" to warn user
pft.names[i.pft] <- settings$pfts[[i.pft]]$name %||% "NULL"
-
+
### Get output directory info
- if(!is.null(settings$pfts[[i.pft]]$outdir)){
+ if (!is.null(settings$pfts[[i.pft]]$outdir)) {
outdirs[i.pft] <- settings$pfts[[i.pft]]$outdir
- } else {
- outdirs[i.pft] <- unique(PEcAn.DB::dbfile.check(type = "Posterior",container.id = settings$pfts[[i.pft]]$posteriorid,con=con)$file_path)
+ } else {
+ outdirs[i.pft] <- unique(
+ PEcAn.DB::dbfile.check(
+ type = "Posterior",
+ container.id = settings$pfts[[i.pft]]$posteriorid,
+ con = con
+ )$file_path
+ )
}
-
- } ### End of for loop to extract pft names
-
+ } ### End of for loop to extract pft names
+
PEcAn.logger::logger.info("Selected PFT(s): ", pft.names)
-
+
## Generate empty list arrays for output.
- trait.samples <- sa.samples <- ensemble.samples <- env.samples <- runs.samples <- param.names <- list()
-
- # flag determining whether samples are independent (e.g. when params fitted individually)
+ trait.samples <- sa.samples <- ensemble.samples <- env.samples <- list()
+ runs.samples <- param.names <- list()
+
+ # flag determining whether samples are independent
+ # (e.g. when params fitted individually)
independent <- TRUE
-
+
## Load PFT priors and posteriors
for (i in seq_along(pft.names)) {
-
- distns = new.env()
+ distns <- new.env()
## Load posteriors
if (!is.na(posterior.files[i])) {
@@ -64,7 +76,8 @@ get.parameter.samples <- function(settings,
distns$prior.distns <- distns$post.distns
}
} else {
- # Default to most recent posterior in the workflow, or the prior if there is none
+ # Default to most recent posterior in the workflow,
+ # or the prior if there is none
fname <- file.path(outdirs[i], "post.distns.Rdata")
if (file.exists(fname)) {
load(fname, envir = distns)
@@ -73,141 +86,182 @@ get.parameter.samples <- function(settings,
load(file.path(outdirs[i], "prior.distns.Rdata"), envir = distns)
}
}
-
+
### Load trait mcmc data (if exists, either from MA or PDA)
if (!is.null(settings$pfts[[i]]$posteriorid) && !is.null(con)) {
# first check if there are any files associated with posterior ids
files <- PEcAn.DB::dbfile.check("Posterior",
- settings$pfts[[i]]$posteriorid,
- con, settings$host$name, return.all = TRUE)
- tid <- grep("trait.mcmc.*Rdata", files$file_name)
+ settings$pfts[[i]]$posteriorid,
+ con, settings$host$name,
+ return.all = TRUE
+ )
+ tid <- grep("trait.mcmc.*Rdata", files$file_name)
if (length(tid) > 0) {
trait.mcmc.file <- file.path(files$file_path[tid], files$file_name[tid])
ma.results <- TRUE
load(trait.mcmc.file, envir = distns)
- # PDA samples are fitted together, to preserve correlations downstream let workflow know they should go together
- if(grepl("mcmc.pda", trait.mcmc.file)) independent <- FALSE
+ # PDA samples are fitted together, to preserve correlations downstream
+ # let workflow know they should go together
+ if (grepl("mcmc.pda", trait.mcmc.file)) independent <- FALSE
# NOTE: Global MA samples will also be together, right?
-
-
- }else{
- PEcAn.logger::logger.info("No trait.mcmc file is associated with this posterior ID.")
+ } else {
+ PEcAn.logger::logger.info(
+ "No trait.mcmc file is associated with this posterior ID."
+ )
ma.results <- FALSE
}
- }else if ("trait.mcmc.Rdata" %in% dir(unlist(outdirs[i]))) {
- PEcAn.logger::logger.info("Defaulting to trait.mcmc file in the pft directory.")
+ } else if ("trait.mcmc.Rdata" %in% dir(unlist(outdirs[i]))) {
+ PEcAn.logger::logger.info(
+ "Defaulting to trait.mcmc file in the pft directory."
+ )
ma.results <- TRUE
load(file.path(outdirs[i], "trait.mcmc.Rdata"), envir = distns)
} else {
ma.results <- FALSE
}
-
+
pft.name <- unlist(pft.names[i])
-
+
### When no ma for a trait, sample from prior
### Trim all chains to shortest mcmc chain, else 20000 samples
- if(!is.null(distns$prior.distns)){
+ if (!is.null(distns$prior.distns)) {
priors <- rownames(distns$prior.distns)
} else {
priors <- NULL
- }
+ }
if (!is.null(distns$trait.mcmc)) {
param.names[[i]] <- names(distns$trait.mcmc)
names(param.names)[i] <- pft.name
-
- samples.num <- min(sapply(distns$trait.mcmc, function(x) nrow(as.matrix(x))))
-
+
+ samples.num <- min(
+ sapply(distns$trait.mcmc, function(x) nrow(as.matrix(x)))
+ )
+
## report which traits use MA results, which use priors
if (length(param.names[[i]]) > 0) {
- PEcAn.logger::logger.info("PFT", pft.names[i], "has MCMC samples for:\n",
- paste0(param.names[[i]], collapse = "\n "))
+ PEcAn.logger::logger.info(
+ "PFT", pft.names[i], "has MCMC samples for:\n",
+ paste0(param.names[[i]], collapse = "\n ")
+ )
}
if (!all(priors %in% param.names[[i]])) {
- PEcAn.logger::logger.info("PFT", pft.names[i], "will use prior distributions for:\n",
- paste0(priors[!priors %in% param.names[[i]]], collapse = "\n "))
+ PEcAn.logger::logger.info(
+ "PFT", pft.names[i], "will use prior distributions for:\n",
+ paste0(priors[!priors %in% param.names[[i]]], collapse = "\n ")
+ )
}
} else {
param.names[[i]] <- list()
samples.num <- 20000
PEcAn.logger::logger.info("No MCMC results for PFT", pft.names[i])
- PEcAn.logger::logger.info("PFT", pft.names[i], "will use prior distributions for",
- priors)
+ PEcAn.logger::logger.info(
+ "PFT", pft.names[i], "will use prior distributions for",
+ priors
+ )
}
- if(is.null(priors)) priors = param.names[[i]]
-
+ if (is.null(priors)) priors <- param.names[[i]]
+
PEcAn.logger::logger.info("using ", samples.num, "samples per trait")
if (ens.sample.method == "halton") {
q_samples <- randtoolbox::halton(n = samples.num, dim = length(priors))
} else if (ens.sample.method == "sobol") {
- q_samples <- randtoolbox::sobol(n = samples.num, dim = length(priors), scrambling = 3)
+ q_samples <- randtoolbox::sobol(
+ n = samples.num,
+ dim = length(priors),
+ scrambling = 3
+ )
} else if (ens.sample.method == "torus") {
q_samples <- randtoolbox::torus(n = samples.num, dim = length(priors))
} else if (ens.sample.method == "lhc") {
- q_samples <- PEcAn.emulator::lhc(t(matrix(0:1, ncol = length(priors), nrow = 2)), samples.num)
+ q_samples <- PEcAn.emulator::lhc(
+ t(matrix(0:1, ncol = length(priors), nrow = 2)),
+ samples.num
+ )
} else if (ens.sample.method == "uniform") {
- q_samples <- matrix(stats::runif(samples.num * length(priors)),
- samples.num,
- length(priors))
+ q_samples <- matrix(
+ stats::runif(samples.num * length(priors)),
+ samples.num,
+ length(priors)
+ )
} else {
- PEcAn.logger::logger.info("Method ", ens.sample.method, " has not been implemented yet, using uniform random sampling")
+ PEcAn.logger::logger.info(
+ "Method ", ens.sample.method,
+ " has not been implemented yet, using uniform random sampling"
+ )
# uniform random
- q_samples <- matrix(stats::runif(samples.num * length(priors)),
- samples.num,
- length(priors))
+ q_samples <- matrix(
+ stats::runif(samples.num * length(priors)),
+ samples.num,
+ length(priors)
+ )
}
for (prior in priors) {
if (prior %in% param.names[[i]]) {
samples <- distns$trait.mcmc[[prior]] %>%
- purrr::map(~ .x[,'beta.o']) %>%
+ purrr::map(~ .x[, "beta.o"]) %>%
unlist() %>%
as.matrix()
} else {
- samples <- PEcAn.priors::get.sample(distns$prior.distns[prior, ], samples.num, q_samples[ , priors==prior])
+ samples <- PEcAn.priors::get.sample(
+ distns$prior.distns[prior, ],
+ samples.num,
+ q_samples[, priors == prior]
+ )
}
trait.samples[[pft.name]][[prior]] <- samples
}
- } ### End for loop
-
+ } ### End for loop
+
# if samples are independent, set param.names to NULL
- # this is important for downstream, when param.names is not NULL MCMC will be sampled accordingly
- if(independent){
+ # this is important for downstream, when param.names is not NULL
+ # MCMC will be sampled accordingly
+ if (independent) {
param.names <- NULL
}
-
+
if ("sensitivity.analysis" %in% names(settings)) {
-
- ### Get info on the quantiles to be run in the sensitivity analysis (if requested)
- quantiles <- PEcAn.utils::get.quantiles(settings$sensitivity.analysis$quantiles)
+ ### Get info on the quantiles to be run in the sensitivity analysis
+ ### (if requested)
+ quantiles <- PEcAn.utils::get.quantiles(
+ settings$sensitivity.analysis$quantiles
+ )
### Get info on the years to run the sensitivity analysis (if requested)
- sa.years <- data.frame(sa.start = settings$sensitivity.analysis$start.year,
- sa.end = settings$sensitivity.analysis$end.year)
-
- PEcAn.logger::logger.info("\n Selected Quantiles: ", PEcAn.utils::vecpaste(round(quantiles, 3)))
-
+ sa.years <- data.frame(
+ sa.start = settings$sensitivity.analysis$start.year,
+ sa.end = settings$sensitivity.analysis$end.year
+ )
+
+ PEcAn.logger::logger.info(
+ "\n Selected Quantiles: ",
+ PEcAn.utils::vecpaste(round(quantiles, 3))
+ )
+
### Generate list of sample quantiles for SA run
- sa.samples <- PEcAn.utils::get.sa.sample.list(pft = trait.samples, env = env.samples,
- quantiles = quantiles)
+ sa.samples <- PEcAn.utils::get.sa.sample.list(
+ pft = trait.samples, env = env.samples,
+ quantiles = quantiles
+ )
}
if ("ensemble" %in% names(settings)) {
- #if it's not there it's one probably
- if (is.null(settings$ensemble$size)) settings$ensemble$size <- 1
- if (settings$ensemble$size == 1) {
+ if (ensemble.size == 1) {
## run at median if only one run in ensemble
ensemble.samples <- PEcAn.utils::get.sa.sample.list(
pft = trait.samples,
env = env.samples,
quantiles = 0.5
)
- } else if (settings$ensemble$size > 1) {
-
+ } else if (ensemble.size > 1) {
## subset the trait.samples to ensemble size using Halton sequence
- ensemble.samples <- get.ensemble.samples(settings$ensemble$size, trait.samples,
- env.samples, ens.sample.method, param.names)
+ ensemble.samples <- get.ensemble.samples(
+ ensemble.size, trait.samples,
+ env.samples, ens.sample.method, param.names
+ )
}
}
- save(ensemble.samples, trait.samples, sa.samples, runs.samples, env.samples,
- file = file.path(settings$outdir, "samples.Rdata"))
+ save(ensemble.samples, trait.samples, sa.samples, runs.samples, env.samples,
+ file = file.path(settings$outdir, "samples.Rdata")
+ )
} # get.parameter.samples
diff --git a/modules/uncertainty/R/get.results.R b/modules/uncertainty/R/get.results.R
index 89024960336..9625f803189 100644
--- a/modules/uncertainty/R/get.results.R
+++ b/modules/uncertainty/R/get.results.R
@@ -1,12 +1,3 @@
-#-------------------------------------------------------------------------------
-# Copyright (c) 2012 University of Illinois, NCSA.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the
-# University of Illinois/NCSA Open Source License
-# which accompanies this distribution, and is available at
-# http://opensource.ncsa.illinois.edu/license.html
-#-------------------------------------------------------------------------------
-
##' Reads model output and runs sensitivity and ensemble analyses
##'
##' Output is placed in model output directory (settings$outdir).
diff --git a/modules/uncertainty/R/plots.R b/modules/uncertainty/R/plots.R
index c39435efb5a..1cc9fa81418 100644
--- a/modules/uncertainty/R/plots.R
+++ b/modules/uncertainty/R/plots.R
@@ -1,12 +1,3 @@
-#-------------------------------------------------------------------------------
-# Copyright (c) 2012 University of Illinois, NCSA.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the
-# University of Illinois/NCSA Open Source License
-# which accompanies this distribution, and is available at
-# http://opensource.ncsa.illinois.edu/license.html
-#-------------------------------------------------------------------------------
-
##--------------------------------------------------------------------------------------------------#
##' Variance Decomposition Plots
##'
diff --git a/modules/uncertainty/R/run.ensemble.analysis.R b/modules/uncertainty/R/run.ensemble.analysis.R
index 9447b59a21c..2f9bbeb2b99 100644
--- a/modules/uncertainty/R/run.ensemble.analysis.R
+++ b/modules/uncertainty/R/run.ensemble.analysis.R
@@ -1,12 +1,3 @@
-#-------------------------------------------------------------------------------
-# Copyright (c) 2012 University of Illinois, NCSA.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the
-# University of Illinois/NCSA Open Source License
-# which accompanies this distribution, and is available at
-# http://opensource.ncsa.illinois.edu/license.html
-#-------------------------------------------------------------------------------
-
#' run ensemble.analysis
#'
#' @param settings PEcAn settings object
diff --git a/modules/uncertainty/R/run.sensitivity.analysis.R b/modules/uncertainty/R/run.sensitivity.analysis.R
index ec7b58a89a0..f6172c1aca6 100644
--- a/modules/uncertainty/R/run.sensitivity.analysis.R
+++ b/modules/uncertainty/R/run.sensitivity.analysis.R
@@ -1,217 +1,257 @@
-#-------------------------------------------------------------------------------
-# Copyright (c) 2012 University of Illinois, NCSA.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the
-# University of Illinois/NCSA Open Source License
-# which accompanies this distribution, and is available at
-# http://opensource.ncsa.illinois.edu/license.html
-#-------------------------------------------------------------------------------
-#--------------------------------------------------------------------------------------------------#
-##' run sensitivity.analysis
-##'
-##' Runs the sensitivity analysis module on a finished run
-##'
-##' @return nothing, saves \code{sensitivity.results} as
-##' sensitivity.results.Rdata, sensitivity plots as sensitivityanalysis.pdf,
-##' and variance decomposition 'popsicle plot' as variancedecomposition.pdf a
-##' side effect (OPTIONAL)
-##'
-##' @param settings a PEcAn settings object
-##' @param plot logical. Option to generate sensitivity analysis and variance
-##' decomposition plots (plot=TRUE) or to turn these plots off (plot=FALSE).
-##' @param ensemble.id ensemble ID
-##' @param variable which varibable(s) to do sensitivity analysis for. Defaults
-##' to all specified in `settings`
-##' @param start.year defaults to what is specified in `settings`
-##' @param end.year defaults to what is specified in `settings`
-##' @param pfts a vector of PFT names found in `settings` to run sensitivity
-##' analysis on
-##' @param ... currently unused
-##'
-##'
-##' @export
-##' @author David LeBauer, Shawn Serbin, Ryan Kelly
-##' @examples
-##' \dontrun{
-##' library(PEcAn.settings)
-##' library(PEcAn.uncertainty)
-##' settings <- read.settings("path/to/pecan.xml")
-##' run.sensitivity.analysis(settings)
-##' }
-##'
-run.sensitivity.analysis <-
- function(settings,
- plot = TRUE,
- ensemble.id = NULL,
- variable = NULL,
- start.year = NULL,
- end.year = NULL,
- pfts = NULL,
- ...) {
-
- if ('sensitivity.analysis' %in% names(settings)) {
- # Set variable and years. Use args first, then settings, then defaults/error
- if(is.null(start.year)) {
- start.year <- settings$sensitivity.analysis$start.year
+#' run sensitivity.analysis
+#'
+#' Runs the sensitivity analysis module on a finished run
+#'
+#' @return nothing, saves \code{sensitivity.results} as
+#' sensitivity.results.Rdata, sensitivity plots as sensitivityanalysis.pdf,
+#' and variance decomposition 'popsicle plot' as variancedecomposition.pdf a
+#' side effect (OPTIONAL)
+#'
+#' @param settings a PEcAn settings object
+#' @param plot logical. Option to generate sensitivity analysis and variance
+#' decomposition plots (plot=TRUE) or to turn these plots off (plot=FALSE).
+#' @param ensemble.id ensemble ID
+#' @param variable which variable(s) to do sensitivity analysis for. Defaults
+#' to all specified in `settings`
+#' @param start.year defaults to what is specified in `settings`
+#' @param end.year defaults to what is specified in `settings`
+#' @param pfts a vector of PFT names found in `settings` to run sensitivity
+#' analysis on
+#' @param ... currently unused
+#'
+#'
+#' @export
+#' @author David LeBauer, Shawn Serbin, Ryan Kelly
+#' @examples
+#' \dontrun{
+#' library(PEcAn.settings)
+#' library(PEcAn.uncertainty)
+#' settings <- read.settings("path/to/pecan.xml")
+#' run.sensitivity.analysis(settings)
+#' }
+#'
+run.sensitivity.analysis <- function(settings,
+ plot = TRUE,
+ ensemble.id = NULL,
+ variable = NULL,
+ start.year = NULL,
+ end.year = NULL,
+ pfts = NULL,
+ ...) {
+
+ if (!"sensitivity.analysis" %in% names(settings)) {
+ # nothing to do
+ return()
+ }
+ # Set variable and years. Use args first, then settings, then defaults/error
+ if (is.null(start.year)) {
+ start.year <- settings$sensitivity.analysis$start.year
+ }
+ if (is.null(end.year)) {
+ end.year <- settings$sensitivity.analysis$end.year
+ }
+ if (is.null(start.year) || is.null(end.year)) {
+ PEcAn.logger::logger.severe("No years given for sensitivity analysis!")
+ }
+ if (is.null(variable)) {
+ if ("variable" %in% names(settings$sensitivity.analysis)) {
+ var <- which(names(settings$sensitivity.analysis) == "variable")
+ for (i in seq_along(var)) {
+ variable[i] <- settings$sensitivity.analysis[[var[i]]]
+ }
}
- if(is.null(end.year)) {
- end.year <- settings$sensitivity.analysis$end.year
+ }
+ if (is.null(variable)) {
+ PEcAn.logger::logger.severe("No variables for sensitivity analysis!")
+ }
+ if (is.null(pfts)) {
+ #extract just pft names
+ pfts <- purrr::map_chr(settings$pfts, "name")
+ if (!is.null(settings$run$site$site.pft)) {
+ pfts <- pfts[pfts %in% settings$run$site$site.pft]
}
- if(is.null(start.year) | is.null(end.year)) {
- PEcAn.logger::logger.severe("No years given for sensitivity analysis!")
+ } else {
+ # validate pfts argument
+ if (!is.character(pfts)) {
+ PEcAn.logger::logger.severe("Please supply a character vector for `pfts`")
}
- if (is.null(variable)) {
- if ("variable" %in% names(settings$sensitivity.analysis)) {
- var <- which(names(settings$sensitivity.analysis) == "variable")
- for (i in seq_along(var)) {
- variable[i] <- settings$sensitivity.analysis[[var[i]]]
- }
- }
+ if (!pfts %in% purrr::map_chr(settings$pfts, "name")) {
+ PEcAn.logger::logger.severe("`pfts` must be a subset of the PFTs defined in `settings`")
}
- if(is.null(variable)) {
- PEcAn.logger::logger.severe("No variables for sensitivity analysis!")
+ }
+
+ variables <- variable
+ for (variable in variables) {
+ PEcAn.logger::logger.warn("Currently performing sensitivity analysis on variable ", variable)
+
+ ### Load samples
+ # Have to load samples.Rdata for the traits.
+ # But can overwrite the run ids if an SA ensemble id provided.
+ # samples.Rdata always has only the most recent ensembles for both ensemble
+ # and sensitivity runs.
+ fname <- file.path(settings$outdir, "samples.Rdata")
+ if (!file.exists(fname)) {
+ PEcAn.logger::logger.severe("No samples.Rdata file found!")
}
- if(is.null(pfts)) {
- #extract just pft names
- pfts <- purrr::map_chr(settings$pfts, "name")
- } else {
- # validate pfts argument
- if(!is.character(pfts)) {
- PEcAn.logger::logger.severe("Please supply a character vector for `pfts`")
- }
- if(!pfts %in% purrr::map_chr(settings$pfts, "name")) {
- PEcAn.logger::logger.severe("`pfts` must be a subset of the PFTs defined in `settings`")
- }
+ samples <- new.env()
+ load(fname, envir = samples)
+
+ # Ensemble ID is expected to be specified in function args or settings.
+ # If none there, create one specific to this site.
+ ensemble.id <- ensemble.id %||%
+ settings$sensitivity.analysis$ensemble.id %||%
+ rlang::hash(settings)
+ fname <- sensitivity.filename(settings, "sensitivity.samples", "Rdata",
+ ensemble.id = ensemble.id,
+ all.var.yr = TRUE)
+ if (file.exists(fname)) {
+ load(fname, envir = samples)
}
- variables <- variable
- if(length(variables) >= 1) {
- for(variable in variables){
- PEcAn.logger::logger.warn(paste0("Currently performing sensitivity analysis on variable ", variable))
-
- ### Load samples
- # Have to load samples.Rdata for the traits. But can overwrite the run ids if a sensitivity analysis ensemble id provided. samples.Rdata always has only the most recent ensembles for both ensemble and sensitivity runs.
- fname <- file.path(settings$outdir, 'samples.Rdata')
- if(!file.exists(fname)) PEcAn.logger::logger.severe("No samples.Rdata file found!")
- samples <- new.env()
- load(fname, envir = samples)
-
- # Can specify ensemble ids manually. If not, look in settings. If none there, will use the most recent, which was loaded with samples.Rdata
- if(!is.null(ensemble.id)) {
- fname <- sensitivity.filename(settings, "sensitivity.samples", "Rdata",
- ensemble.id=ensemble.id, all.var.yr=TRUE)
- } else if(!is.null(settings$sensitivity.analysis$ensemble.id)) {
- ensemble.id <- settings$sensitivity.analysis$ensemble.id
- fname <- sensitivity.filename(settings, "sensitivity.samples", "Rdata",
- ensemble.id=ensemble.id, all.var.yr=TRUE)
- } else {
- ensemble.id <- NULL
- }
- if(file.exists(fname)) {
- load(fname, envir = samples)
+ # For backwards compatibility, define some variables if not just loaded
+ if (is.null(samples$pft.names)) {
+ samples$pft.names <- names(samples$trait.samples)
+ }
+ if (is.null(samples$trait.names)) {
+ samples$trait.names <- lapply(samples$trait.samples, names)
+ }
+ if (is.null(samples$sa.run.ids)) {
+ samples$sa.run.ids <- samples$runs.samples$sa
+ }
+
+ ### Load parsed model results
+ variables <- PEcAn.utils::convert.expr(variable)
+ variable.fn <- variables$variable.drv
+
+ fname <- sensitivity.filename(
+ settings, "sensitivity.output", "Rdata",
+ all.var.yr = FALSE,
+ ensemble.id = ensemble.id,
+ variable = variable.fn,
+ start.year = start.year,
+ end.year = end.year
+ )
+ sens_out <- new.env()
+ load(fname, envir = sens_out)
+
+ ### Generate SA output and diagnostic plots
+ sensitivity.results <- list()
+
+ for (pft in settings$pfts) {
+ if (!pft$name %in% pfts) {
+ next
+ }
+ traits <- samples$trait.names[[pft$name]]
+ quantiles.str <- rownames(samples$sa.samples[[pft$name]])
+ quantiles.str <- quantiles.str[which(quantiles.str != "50")]
+ quantiles <- as.numeric(quantiles.str) / 100
+
+ C.units <- grepl(
+ "^Celsius$",
+ PEcAn.utils::trait.lookup(traits)$units,
+ ignore.case = TRUE
+ )
+ if (any(C.units)) {
+ for (x in which(C.units)) {
+ samples$trait.samples[[pft$name]][[x]] <- PEcAn.utils::ud_convert(
+ samples$trait.samples[[pft$name]][[x]],
+ "degC",
+ "K"
+ )
}
-
- # For backwards compatibility, define some variables if not just loaded
- if(is.null(samples$pft.names)) samples$pft.names <- names(samples$trait.samples)
- if(is.null(samples$trait.names)) samples$trait.names <- lapply(samples$trait.samples, names)
- if(is.null(samples$sa.run.ids)) samples$sa.run.ids <- samples$runs.samples$sa
-
- ### Load parsed model results
- variables <- PEcAn.utils::convert.expr(variable)
- variable.fn <- variables$variable.drv
-
+ }
+
+ ## only perform sensitivity analysis on traits where no more than 2 results are missing
+ good.saruns <- sapply(
+ sens_out$sensitivity.output[[pft$name]],
+ function(x) sum(is.na(x)) <= 2
+ )
+ if (!all(good.saruns)) { # if any bad saruns, reduce list of traits and print warning
+ bad.saruns <- !good.saruns
+ warning(paste(
+ "missing >2 runs for", PEcAn.utils::vecpaste(traits[bad.saruns]),
+ "\n sensitivity analysis or variance decomposition will be performed on these trait(s)",
+ "\n it is likely that the runs did not complete, this should be fixed !!!!!!"
+ ))
+ }
+
+ ### Gather SA results
+ sensitivity.results[[pft$name]] <- sensitivity.analysis(
+ trait.samples = samples$trait.samples[[pft$name]][traits],
+ sa.samples = samples$sa.samples[[pft$name]][, traits, drop = FALSE],
+ sa.output = sens_out$sensitivity.output[[pft$name]][, traits, drop = FALSE],
+ outdir = pft$outdir
+ )
+
+ ### Send diagnostic output to the console
+ print(sensitivity.results[[pft$name]]$variance.decomposition.output)
+ print(sens_out$sensitivity.output[[pft$name]])
+
+ ### Plotting - Optional
+ if (plot) {
fname <- sensitivity.filename(
- settings, "sensitivity.output", "Rdata", all.var.yr = FALSE,
+ settings, "sensitivity.analysis", "pdf",
+ all.var.yr = FALSE, pft = pft$name,
ensemble.id = ensemble.id, variable = variable.fn,
- start.year = start.year, end.year = end.year)
- sens_out <- new.env()
- load(fname, envir = sens_out)
-
- ### Generate SA output and diagnostic plots
- sensitivity.results <- list()
-
- for (pft in settings$pfts) {
- if (pft$name %in% pfts) {
- traits <- samples$trait.names[[pft$name]]
- quantiles.str <- rownames(samples$sa.samples[[pft$name]])
- quantiles.str <- quantiles.str[which(quantiles.str != '50')]
- quantiles <- as.numeric(quantiles.str)/100
-
- C.units <- grepl('^Celsius$', PEcAn.utils::trait.lookup(traits)$units, ignore.case = TRUE)
- if(any(C.units)){
- for(x in which(C.units)) {
- samples$trait.samples[[pft$name]][[x]] <- PEcAn.utils::ud_convert(
- samples$trait.samples[[pft$name]][[x]], "degC", "K")
- }
- }
-
- ## only perform sensitivity analysis on traits where no more than 2 results are missing
- good.saruns <- sapply(sens_out$sensitivity.output[[pft$name]], function(x) sum(is.na(x)) <=2)
- if(!all(good.saruns)) { # if any bad saruns, reduce list of traits and print warning
- bad.saruns <- !good.saruns
- warning(paste('missing >2 runs for', PEcAn.utils::vecpaste(traits[bad.saruns]),
- '\n sensitivity analysis or variance decomposition will be performed on these trait(s)',
- '\n it is likely that the runs did not complete, this should be fixed !!!!!!'))
- }
-
- ### Gather SA results
- sensitivity.results[[pft$name]] <- sensitivity.analysis(
- trait.samples = samples$trait.samples[[pft$name]][traits],
- sa.samples = samples$sa.samples[[pft$name]][ ,traits, drop=FALSE],
- sa.output = sens_out$sensitivity.output[[pft$name]][ ,traits, drop=FALSE],
- outdir = pft$outdir)
-
- ### Send diagnostic output to the console
- print(sensitivity.results[[pft$name]]$variance.decomposition.output)
- print(sens_out$sensitivity.output[[pft$name]])
-
- ### Plotting - Optional
- if(plot){
- fname <- sensitivity.filename(
- settings, "sensitivity.analysis", "pdf",
- all.var.yr=FALSE, pft=pft$name, ensemble.id=ensemble.id, variable=variable.fn,
- start.year=start.year, end.year=end.year)
-
- ### Generate SA diagnostic plots
- sensitivity.plots <- plot_sensitivities(
- sensitivity.results[[pft$name]]$sensitivity.output, linesize = 1, dotsize = 3)
-
- grDevices::pdf(fname, height = 12, width = 9)
- ## arrange plots http://stackoverflow.com/q/10706753/199217
- ncol <- floor(sqrt(length(sensitivity.plots)))
- print(do.call(gridExtra::grid.arrange, c(sensitivity.plots, ncol=ncol)))
- print(sensitivity.plots) # old method. depreciated.
- grDevices::dev.off()
-
- ### Generate VD diagnostic plots
- vd.plots <- plot_variance_decomposition(sensitivity.results[[pft$name]]$variance.decomposition.output)
- #variance.scale = log, variance.prefix='Log')
- fname <- sensitivity.filename(settings, "variance.decomposition", "pdf",
- all.var.yr=FALSE, pft=pft$name, ensemble.id=ensemble.id, variable=variable.fn,
- start.year=start.year, end.year=end.year)
-
- grDevices::pdf(fname, width = 11, height = 8)
- do.call(gridExtra::grid.arrange, c(vd.plots, ncol = 4))
- grDevices::dev.off()
- }
-
- } ## end if sensitivity analysis
-
- fname <- sensitivity.filename(settings, "sensitivity.results", "Rdata",
- all.var.yr=FALSE, pft=NULL, ensemble.id=ensemble.id, variable=variable.fn,
- start.year=start.year, end.year=end.year)
-
- save(sensitivity.results, file = fname)
- }
+ start.year = start.year, end.year = end.year
+ )
+
+ ### Generate SA diagnostic plots
+ sensitivity.plots <- plot_sensitivities(
+ sensitivity.results[[pft$name]]$sensitivity.output,
+ linesize = 1,
+ dotsize = 3
+ )
+
+ grDevices::pdf(fname, height = 12, width = 9)
+ ## arrange plots http://stackoverflow.com/q/10706753/199217
+ ncol <- floor(sqrt(length(sensitivity.plots)))
+ do.call(gridExtra::grid.arrange, c(sensitivity.plots, ncol = ncol))
+ grDevices::dev.off()
+
+ ### Generate VD diagnostic plots
+ vd.plots <- plot_variance_decomposition(
+ sensitivity.results[[pft$name]]$variance.decomposition.output
+ )
+ #variance.scale = log, variance.prefix='Log')
+ fname <- sensitivity.filename(settings, "variance.decomposition", "pdf",
+ all.var.yr = FALSE,
+ pft = pft$name,
+ ensemble.id = ensemble.id,
+ variable = variable.fn,
+ start.year = start.year,
+ end.year = end.year)
+
+ grDevices::pdf(fname, width = 11, height = 8)
+ do.call(gridExtra::grid.arrange, c(vd.plots, ncol = 4))
+ grDevices::dev.off()
}
}
+ fname <- sensitivity.filename(settings, "sensitivity.results", "Rdata",
+ all.var.yr = FALSE,
+ pft = NULL,
+ ensemble.id = ensemble.id,
+ variable = variable.fn,
+ start.year = start.year,
+ end.year = end.year)
+ save(sensitivity.results, file = fname)
}
}
-#==================================================================================================#
-##' @export
+
+
+
+#' Run sensitivity analysis for every Settings in a MultiSettings
+#'
+#' Caution: Not yet working for multisite settings.
+#' It will _run_ the analysis for all sites,
+#' but each site will overwrite the result from the previous one.
+#'
+#' @param settings PEcan settings object
+#' @param ... additional arguments passed on to `run.sensitivity.analysis`
+#' @export
runModule.run.sensitivity.analysis <- function(settings, ...) {
- if(PEcAn.settings::is.MultiSettings(settings)) {
+ if (PEcAn.settings::is.MultiSettings(settings)) {
return(PEcAn.settings::papply(settings, runModule.run.sensitivity.analysis, ...))
} else if (PEcAn.settings::is.Settings(settings)) {
run.sensitivity.analysis(settings, ...)
diff --git a/modules/uncertainty/R/sensitivity.R b/modules/uncertainty/R/sensitivity.R
index 805da534db8..fcbe8188c2a 100644
--- a/modules/uncertainty/R/sensitivity.R
+++ b/modules/uncertainty/R/sensitivity.R
@@ -1,37 +1,27 @@
-#-------------------------------------------------------------------------------
-# Copyright (c) 2012 University of Illinois, NCSA.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the
-# University of Illinois/NCSA Open Source License
-# which accompanies this distribution, and is available at
-# http://opensource.ncsa.illinois.edu/license.html
-#-------------------------------------------------------------------------------
-
-##' Reads output of sensitivity analysis runs
-##'
-##'
-##' @title Read Sensitivity Analysis output
-##' @return dataframe with one col per quantile analysed and one row per trait,
-##' each cell is a list of AGB over time
-##' @param traits model parameters included in the sensitivity analysis
-##' @param quantiles quantiles selected for sensitivity analysis
-##' @param pecandir specifies where pecan writes its configuration files
-##' @param outdir directory with model output to use in sensitivity analysis
-##' @param pft.name name of PFT used in sensitivity analysis (Optional)
-##' @param start.year first year to include in sensitivity analysis
-##' @param end.year last year to include in sensitivity analysis
-##' @param variable variables to be read from model output
-##' @param per.pft flag to determine whether we want SA on pft-specific variables
-##' @param sa.run.ids list of run ids to read.
-##' If NULL, will look in `pecandir` for a file named `samples.Rdata`
-##' and read from that
-##' @export
-##' @author Ryan Kelly, David LeBauer, Rob Kooper, Mike Dietze, Istem Fer
-#--------------------------------------------------------------------------------------------------#
-read.sa.output <- function(traits, quantiles, pecandir, outdir, pft.name = "",
- start.year, end.year, variable, sa.run.ids = NULL, per.pft = FALSE) {
-
-
+#' Reads output of sensitivity analysis runs
+#'
+#'
+#' @return dataframe with one col per quantile analysed and one row per trait,
+#' each cell is a list of AGB over time
+#' @param traits model parameters included in the sensitivity analysis
+#' @param quantiles quantiles selected for sensitivity analysis
+#' @param pecandir specifies where pecan writes its configuration files
+#' @param outdir directory with model output to use in sensitivity analysis
+#' @param pft.name name of PFT used in sensitivity analysis (Optional)
+#' @param start.year first year to include in sensitivity analysis
+#' @param end.year last year to include in sensitivity analysis
+#' @param variable variables to be read from model output
+#' @param per.pft flag to determine whether we want SA on pft-specific variables
+#' @param sa.run.ids list of run ids to read.
+#' If NULL, will look in `pecandir` for a file named `samples.Rdata`
+#' and read from that
+#' @export
+#' @author Ryan Kelly, David LeBauer, Rob Kooper, Mike Dietze, Istem Fer
+read.sa.output <- function(traits, quantiles, pecandir, outdir, pft.name = "",
+ start.year, end.year, variable, sa.run.ids = NULL,
+ per.pft = FALSE) {
+
+
if (is.null(sa.run.ids)) {
samples.file <- file.path(pecandir, "samples.Rdata")
if (file.exists(samples.file)) {
@@ -39,25 +29,28 @@ read.sa.output <- function(traits, quantiles, pecandir, outdir, pft.name = "",
load(samples.file, envir = samples)
sa.run.ids <- samples$runs.samples$sa
} else {
- PEcAn.logger::logger.error(samples.file, "not found, this file is required by the read.sa.output function")
+ PEcAn.logger::logger.error(
+ samples.file, "not found,",
+ "this file is required by the read.sa.output function"
+ )
}
}
-
+
sa.output <- matrix(nrow = length(quantiles),
ncol = length(traits),
dimnames = list(quantiles, traits))
-
+
expr <- variable$expression
variables <- variable$variables
-
- for(trait in traits){
- for(quantile in quantiles){
+
+ for (trait in traits) {
+ for (quantile in quantiles) {
run.id <- sa.run.ids[[pft.name]][quantile, trait]
-
- for(var in seq_along(variables)){
+
+ for (var in seq_along(variables)) {
# if SA is requested on a variable available per pft, pass pft.name to read.output
# so that it only returns values for that pft
- pass_pft <- switch(per.pft + 1, NULL, pft.name)
+ pass_pft <- switch(per.pft + 1, NULL, pft.name)
out.tmp <- PEcAn.utils::read.output(
runid = run.id,
outdir = file.path(outdir, run.id),
@@ -66,11 +59,11 @@ read.sa.output <- function(traits, quantiles, pecandir, outdir, pft.name = "",
pft.name = pass_pft)
assign(variables[var], out.tmp[[variables[var]]])
}
-
+
# derivation
out <- eval(parse(text = expr))
-
- sa.output[quantile, trait] <- mean(out, na.rm=TRUE)
+
+ sa.output[quantile, trait] <- mean(out, na.rm = TRUE)
} ## end loop over quantiles
PEcAn.logger::logger.info("reading sensitivity analysis output for model run at ", quantiles, "quantiles of trait", trait)
@@ -80,26 +73,30 @@ read.sa.output <- function(traits, quantiles, pecandir, outdir, pft.name = "",
} # read.sa.output
-##' Write sensitivity analysis config files
-##'
-##' Writes config files for use in sensitivity analysis.
-##'
-##' @param defaults named list with default parameter values
-##' @param quantile.samples list of lists supplied by \link{get.sa.samples}
-##' @param settings list of settings
-##' @param model name of model to be run
-##' @param clean logical: Delete any existing contents of the directory specified by \code{settings$rundir} before writing to it?
-##' @param write.to.db logical: Record this run to BETY? If TRUE, uses connection settings specified in \code{settings$database}
-##'
-##' @return list, containing $runs = data frame of runids, and $ensemble.id = the ensemble ID for these runs. Also writes sensitivity analysis configuration files as a side effect
-##' @export
-##' @author David LeBauer, Carl Davidson
+#' Write sensitivity analysis config files
+#'
+#' Writes config files for use in sensitivity analysis.
+#'
+#' @param defaults named list with default parameter values
+#' @param quantile.samples list of lists supplied by \link{get.sa.samples}
+#' @param settings list of settings
+#' @param model name of model to be run
+#' @param clean logical: Delete any existing contents of the directory specified
+#' by \code{settings$rundir} before writing to it?
+#' @param write.to.db logical: Record this run to BETY? If TRUE, uses connection
+#' settings specified in \code{settings$database}
+#'
+#' @return list, containing $runs = data frame of runids,
+#' and $ensemble.id = the ensemble ID for these runs.
+#' Also writes sensitivity analysis configuration files as a side effect
+#' @export
+#' @author David LeBauer, Carl Davidson
write.sa.configs <- function(defaults, quantile.samples, settings, model,
clean = FALSE, write.to.db = TRUE) {
scipen <- getOption("scipen")
options(scipen = 12)
my.write.config <- paste("write.config.", model, sep = "")
-
+
if (write.to.db) {
con <- try(PEcAn.DB::db.open(settings$database$bety), silent = TRUE)
if (inherits(con, "try-error")) {
@@ -110,85 +107,105 @@ write.sa.configs <- function(defaults, quantile.samples, settings, model,
} else {
con <- NULL
}
-
+
# Get the workflow id
if ("workflow" %in% names(settings)) {
workflow.id <- settings$workflow$id
} else {
workflow.id <- -1
}
-
+
# find all inputs that have an id
inputs <- names(settings$run$inputs)
inputs <- inputs[grepl(".id$", inputs)]
-
+
runs <- data.frame()
-
+
# Reading the site.pft specific tags from xml
site.pfts.vec <- as.character(unlist(settings$run$site$site.pft))
-
- if(!is.null(site.pfts.vec)){
+
+ if (!is.null(site.pfts.vec)) {
# find the name of pfts defined in the body of pecan.xml
- defined.pfts <- as.character(unlist(purrr::map(settings$pfts, 'name')))
- # subset ensemble samples based on the pfts that are specified in the site and they are also sampled from.
- if (length(which(site.pfts.vec %in% defined.pfts)) > 0 )
- quantile.samples <- quantile.samples [site.pfts.vec[ which(site.pfts.vec %in% defined.pfts) ]]
+ defined.pfts <- as.character(unlist(purrr::map(settings$pfts, "name")))
+ # subset ensemble samples based on the pfts that are specified in the site
+ # and they are also sampled from.
+ if (length(which(site.pfts.vec %in% defined.pfts)) > 0) {
+ quantile.samples <- quantile.samples [site.pfts.vec[which(site.pfts.vec %in% defined.pfts)]]
+ }
# warn if there is a pft specified in the site but it's not defined in the pecan xml.
- if (length(which(!(site.pfts.vec %in% defined.pfts)))>0)
- PEcAn.logger::logger.warn(paste0("The following pfts are specified for the siteid ", settings$run$site$id ," but they are not defined as a pft in pecan.xml:",
- site.pfts.vec[which(!(site.pfts.vec %in% defined.pfts))]))
+ if (length(which(!(site.pfts.vec %in% defined.pfts))) > 0) {
+ PEcAn.logger::logger.warn(
+ "The following pfts are specified for the siteid ",
+ settings$run$site$id,
+ " but they are not defined as a pft in pecan.xml:",
+ site.pfts.vec[which(!(site.pfts.vec %in% defined.pfts))]
+ )
+ }
}
-
-
+
+
## write median run
MEDIAN <- "50"
median.samples <- list()
for (i in seq_along(quantile.samples)) {
- median.samples[[i]] <- quantile.samples[[i]][MEDIAN, , drop=FALSE]
+ median.samples[[i]] <- quantile.samples[[i]][MEDIAN, , drop = FALSE]
}
names(median.samples) <- names(quantile.samples)
if (!is.null(con)) {
+ # Note: ignores any existing run or ensemble ids in settings
ensemble.id <- PEcAn.DB::db.query(paste0(
"INSERT INTO ensembles (runtype, workflow_id) ",
"VALUES ('sensitivity analysis', ", format(workflow.id, scientific = FALSE), ") ",
- "RETURNING id"), con = con)[['id']]
-
- paramlist <- paste0("quantile=MEDIAN,trait=all,pft=",
- paste(lapply(settings$pfts, function(x) x[["name"]]), sep = ","))
- run.id <- PEcAn.DB::db.query(paste0("INSERT INTO runs ",
+ "RETURNING id"), con = con)[["id"]]
+
+ paramlist <- paste0(
+ "quantile=MEDIAN,trait=all,pft=",
+ paste(lapply(settings$pfts, function(x) x[["name"]]), sep = ",")
+ )
+ run.id <- PEcAn.DB::db.query(paste0(
+ "INSERT INTO runs ",
"(model_id, site_id, start_time, finish_time, outdir, ensemble_id, parameter_list) ",
- "values ('",
- settings$model$id, "', '",
- settings$run$site$id, "', '",
- settings$run$start.date, "', '",
- settings$run$end.date, "', '",
- settings$run$outdir, "', ",
- ensemble.id, ", '",
+ "values ('",
+ settings$model$id, "', '",
+ settings$run$site$id, "', '",
+ settings$run$start.date, "', '",
+ settings$run$end.date, "', '",
+ settings$run$outdir, "', ",
+ ensemble.id, ", '",
paramlist, "') ",
- "RETURNING id"), con = con)[['id']]
-
+ "RETURNING id"), con = con)[["id"]]
+
# associate posteriors with ensembles
for (pft in defaults) {
- PEcAn.DB::db.query(paste0(
- "INSERT INTO posteriors_ensembles (posterior_id, ensemble_id) ",
- "values (", pft$posteriorid, ", ", ensemble.id, ")"), con = con)
+ PEcAn.DB::db.query(
+ paste0(
+ "INSERT INTO posteriors_ensembles (posterior_id, ensemble_id) ",
+ "values (", pft$posteriorid, ", ", ensemble.id, ")"),
+ con = con
+ )
}
-
+
# associate inputs with runs
if (!is.null(inputs)) {
for (x in inputs) {
- PEcAn.DB::db.query(paste0(
- "INSERT INTO inputs_runs (input_id, run_id) ",
- "values (", settings$run$inputs[[x]], ", ", run.id, ")"), con = con)
+ PEcAn.DB::db.query(
+ paste0(
+ "INSERT INTO inputs_runs (input_id, run_id) ",
+ "values (", settings$run$inputs[[x]], ", ", run.id, ")"),
+ con = con
+ )
}
}
} else {
- run.id <- PEcAn.utils::get.run.id("SA", "median")
- ensemble.id <- NA
+ run.id <- PEcAn.utils::get.run.id("SA", "median", site.id = settings$run$site$id)
+ # Use SA ensemble id if provided, or an arbitrary unique value if not
+ # Note: Since write.sa.configs is called separately for each site,
+ # a multisite run with no ID provided gives each site its own ensemble id!
+ ensemble.id <- settings$sensitivity.analysis$ensemble.id %||% rlang::hash(settings)
}
medianrun <- run.id
-
+
# create folders (cleaning up old ones if needed)
if (clean) {
unlink(file.path(settings$rundir, run.id))
@@ -196,47 +213,56 @@ write.sa.configs <- function(defaults, quantile.samples, settings, model,
}
dir.create(file.path(settings$rundir, run.id), recursive = TRUE)
dir.create(file.path(settings$modeloutdir, run.id), recursive = TRUE)
-
+
# write run information to disk TODO need to print list of pft names and trait
# names
- cat("runtype : sensitivity analysis\n",
- "workflow id : ", workflow.id, "\n",
+ cat("runtype : sensitivity analysis\n",
+ "workflow id : ", workflow.id, "\n",
"ensemble id : ", ensemble.id, "\n",
- "pft name : ALL PFT", "\n",
- "quantile : MEDIAN\n",
- "trait : ALL TRAIT", "\n",
+ "pft name : ALL PFT", "\n",
+ "quantile : MEDIAN\n",
+ "trait : ALL TRAIT", "\n",
"run id : ", run.id, "\n",
- "model : ", model, "\n",
+ "model : ", model, "\n",
"model id : ", settings$model$id, "\n",
- "site : ", settings$run$site$name, "\n",
- "site id : ", settings$run$site$id, "\n",
+ "site : ", settings$run$site$name, "\n",
+ "site id : ", settings$run$site$id, "\n",
"met data : ", settings$run$site$met, "\n",
- "start date : ", settings$run$start.date, "\n",
- "end date : ", settings$run$end.date, "\n",
- "hostname : ", settings$host$name, "\n",
- "rundir : ", file.path(settings$host$rundir, run.id), "\n",
- "outdir : ", file.path(settings$host$outdir, run.id), "\n",
- file = file.path(settings$rundir, run.id, "README.txt"),
+ "start date : ", settings$run$start.date, "\n",
+ "end date : ", settings$run$end.date, "\n",
+ "hostname : ", settings$host$name, "\n",
+ "rundir : ", file.path(settings$host$rundir, run.id), "\n",
+ "outdir : ", file.path(settings$host$outdir, run.id), "\n",
+ file = file.path(settings$rundir, run.id, "README.txt"),
sep = "")
-
-
- # I check to make sure the path under the met is a list. if it's specified what met needs to be used in 'met.id' under sensitivity analysis of pecan xml we used that otherwise, I use the first met.
- if (is.list(settings$run$inputs$met$path)){
- # This checks for met.id tag in the settings under sensitivity analysis - if it's not there it creates it. Then it's gonna use what it created.
- if (is.null(settings$sensitivity.analysis$met.id)) settings$sensitivity.analysis$met.id <- 1
-
+
+
+ # I check to make sure the path under the met is a list.
+ # if it's specified what met needs to be used in 'met.id' under sensitivity
+ # analysis of pecan xml we used that otherwise, I use the first met.
+ if (is.list(settings$run$inputs$met$path)) {
+ # This checks for met.id tag in the settings under sensitivity analysis -
+ # if it's not there it creates it. Then it's gonna use what it created.
+ if (is.null(settings$sensitivity.analysis$met.id)) {
+ settings$sensitivity.analysis$met.id <- 1
+ }
settings$run$inputs$met$path <- settings$run$inputs$met$path[[settings$sensitivity.analysis$met.id]]
-
+
}
-
-
+
+
# write configuration
- do.call(my.write.config, args = list(defaults = defaults,
- trait.values = median.samples,
+ do.call(my.write.config, args = list(defaults = defaults,
+ trait.values = median.samples,
settings = settings,
run.id = run.id))
- cat(run.id, file = file.path(settings$rundir, "runs.txt"), sep = "\n", append = TRUE)
-
+ cat(
+ run.id,
+ file = file.path(settings$rundir, "runs.txt"),
+ sep = "\n",
+ append = TRUE
+ )
+
## loop over pfts
runs <- list()
for (i in seq_along(names(quantile.samples))) {
@@ -244,45 +270,63 @@ write.sa.configs <- function(defaults, quantile.samples, settings, model,
if (pftname == "env") {
next
}
-
+
traits <- colnames(quantile.samples[[i]])
quantiles.str <- rownames(quantile.samples[[i]])
-
+
runs[[pftname]] <- data.frame()
-
+
## loop over variables
for (trait in traits) {
for (quantile.str in quantiles.str) {
- if (quantile.str != MEDIAN) {
+ if (quantile.str == MEDIAN) {
+ runs[[pftname]][MEDIAN, trait] <- medianrun
+ } else {
quantile <- as.numeric(quantile.str) / 100
trait.samples <- median.samples
- trait.samples[[i]][trait] <- quantile.samples[[i]][quantile.str, trait, drop=FALSE]
-
+ trait.samples[[i]][trait] <- quantile.samples[[i]][quantile.str, trait, drop = FALSE]
+
if (!is.null(con)) {
paramlist <- paste0("quantile=", quantile.str, ",trait=", trait, ",pft=", pftname)
- insert_result <- PEcAn.DB::db.query(paste0("INSERT INTO runs (model_id, site_id, start_time, finish_time, outdir, ensemble_id, parameter_list) values ('",
- settings$model$id, "', '",
- settings$run$site$id, "', '",
- settings$run$start.date, "', '",
- settings$run$end.date, "', '",
- settings$run$outdir, "', ",
- ensemble.id, ", '",
- paramlist, "') RETURNING id"), con = con)
+ insert_result <- PEcAn.DB::db.query(
+ paste0(
+ "INSERT INTO runs (",
+ "model_id, site_id, start_time, finish_time, outdir,",
+ " ensemble_id, parameter_list) ",
+ "values ('",
+ settings$model$id, "', '",
+ settings$run$site$id, "', '",
+ settings$run$start.date, "', '",
+ settings$run$end.date, "', '",
+ settings$run$outdir, "', ",
+ ensemble.id, ", '",
+ paramlist,
+ "') ",
+ "RETURNING id"),
+ con = con
+ )
run.id <- insert_result[["id"]]
-
+
# associate posteriors with ensembles
for (pft in defaults) {
- PEcAn.DB::db.query(paste0("INSERT INTO posteriors_ensembles (posterior_id, ensemble_id) values (",
- pft$posteriorid, ", ",
- ensemble.id, ");"), con = con)
+ PEcAn.DB::db.query(
+ paste0(
+ "INSERT INTO posteriors_ensembles (posterior_id, ensemble_id)",
+ "values (", pft$posteriorid, ", ", ensemble.id, ");"
+ ),
+ con = con
+ )
}
-
+
# associate inputs with runs
if (!is.null(inputs)) {
for (x in inputs) {
- PEcAn.DB::db.query(paste0("INSERT INTO inputs_runs (input_id, run_id) ",
- "values (", settings$run$inputs[[x]], ", ", run.id, ");"),
- con = con)
+ PEcAn.DB::db.query(
+ paste0(
+ "INSERT INTO inputs_runs (input_id, run_id) ",
+ "values (", settings$run$inputs[[x]], ", ", run.id, ");"),
+ con = con
+ )
}
}
} else {
@@ -290,10 +334,12 @@ write.sa.configs <- function(defaults, quantile.samples, settings, model,
run.type = "SA",
index = round(quantile, 3),
trait = trait,
- pft.name = names(trait.samples)[i])
+ pft.name = names(trait.samples)[i],
+ site.id = settings$run$site$id
+ )
}
runs[[pftname]][quantile.str, trait] <- run.id
-
+
# create folders (cleaning up old ones if needed)
if (clean) {
unlink(file.path(settings$rundir, run.id))
@@ -301,38 +347,40 @@ write.sa.configs <- function(defaults, quantile.samples, settings, model,
}
dir.create(file.path(settings$rundir, run.id), recursive = TRUE)
dir.create(file.path(settings$modeloutdir, run.id), recursive = TRUE)
-
+
# write run information to disk
- cat("runtype : sensitivity analysis\n",
- "workflow id : ", workflow.id, "\n",
- "ensemble id : ", ensemble.id, "\n",
- "pft name : ", names(trait.samples)[i], "\n",
- "quantile : ", quantile.str, "\n",
- "trait : ", trait, "\n",
- "run id : ", run.id, "\n",
- "model : ", model, "\n",
- "model id : ", settings$model$id, "\n",
- "site : ", settings$run$site$name, "\n",
- "site id : ", settings$run$site$id, "\n",
- "met data : ", settings$run$site$met, "\n",
- "start date : ", settings$run$start.date, "\n",
+ cat("runtype : sensitivity analysis\n",
+ "workflow id : ", workflow.id, "\n",
+ "ensemble id : ", ensemble.id, "\n",
+ "pft name : ", names(trait.samples)[i], "\n",
+ "quantile : ", quantile.str, "\n",
+ "trait : ", trait, "\n",
+ "run id : ", run.id, "\n",
+ "model : ", model, "\n",
+ "model id : ", settings$model$id, "\n",
+ "site : ", settings$run$site$name, "\n",
+ "site id : ", settings$run$site$id, "\n",
+ "met data : ", settings$run$site$met, "\n",
+ "start date : ", settings$run$start.date, "\n",
"end date : ", settings$run$end.date, "\n",
- "hostname : ", settings$host$name, "\n",
- "rundir : ", file.path(settings$host$rundir, run.id), "\n",
- "outdir : ", file.path(settings$host$outdir, run.id), "\n",
- file = file.path(settings$rundir, run.id, "README.txt"),
+ "hostname : ", settings$host$name, "\n",
+ "rundir : ", file.path(settings$host$rundir, run.id), "\n",
+ "outdir : ", file.path(settings$host$outdir, run.id), "\n",
+ file = file.path(settings$rundir, run.id, "README.txt"),
sep = "")
-
+
# write configuration
do.call(my.write.config, args = list(defaults = defaults,
- trait.values = trait.samples,
+ trait.values = trait.samples,
settings = settings,
run.id))
- cat(run.id, file = file.path(settings$rundir, "runs.txt"), sep = "\n",
- append = TRUE)
- } else {
- runs[[pftname]][MEDIAN, trait] <- medianrun
+ cat(
+ run.id,
+ file = file.path(settings$rundir, "runs.txt"),
+ sep = "\n",
+ append = TRUE
+ )
}
}
}
diff --git a/modules/uncertainty/R/sensitivity.analysis.R b/modules/uncertainty/R/sensitivity.analysis.R
index 69f162d970f..52aa9f0662c 100644
--- a/modules/uncertainty/R/sensitivity.analysis.R
+++ b/modules/uncertainty/R/sensitivity.analysis.R
@@ -1,50 +1,61 @@
-#-------------------------------------------------------------------------------
-# Copyright (c) 2012 University of Illinois, NCSA. All rights reserved. This
-# program and the accompanying materials are made available under the terms of
-# the University of Illinois/NCSA Open Source License which accompanies this
-# distribution, and is available at
-# http://opensource.ncsa.illinois.edu/license.html
-#-------------------------------------------------------------------------------
-
-#--------------------------------------------------------------------------------------------------#
-##' Spline estimate of univariate relationship between parameter value and model output
-##'
-##' Creates a spline function using the splinefun function that estimates univariate response of parameter input to model output
-##'
-##' @param quantiles.input passed to `x` argument of `stats::splinefun`
-##' @param quantiles.output passed to `y` argument of `stats::splinefun`
-##' @export
-##' @return function
+#' Spline estimate of univariate relationship between parameter value and model output
+#'
+#' Creates a spline function using the splinefun function that estimates
+#' univariate response of parameter input to model output
+#'
+#' @param quantiles.input passed to `x` argument of `stats::splinefun`
+#' @param quantiles.output passed to `y` argument of `stats::splinefun`
+#' @export
+#' @return function
sa.splinefun <- function(quantiles.input, quantiles.output) {
- return(stats::splinefun(x = quantiles.input, y = quantiles.output, method = "monoH.FC"))
+ stats::splinefun(
+ x = quantiles.input,
+ y = quantiles.output,
+ method = "monoH.FC"
+ )
} # sa.splinefun
#--------------------------------------------------------------------------------------------------#
-##' Calculates the standard deviation of the variance estimate
-##'
-##' Uses the equation \eqn{\sigma^4\left(\frac{2}{n-1}+\frac{\kappa}{n}\right)}{\sigma^4 (2/(n-1) + \kappa/n)}
-##' @param x sample
-##' @return estimate of standard deviation of the sample variance
-##' @export
-##' @author David LeBauer
-##' @references Mood, Graybill, Boes 1974 'Introduction to the Theory of Statistics' 3rd ed. p 229; Casella and Berger 'Statistical Inference' p 364 ex. 7.45; 'Reference for Var(s^2)' CrossValidated \url{http://stats.stackexchange.com/q/29905/1381}, 'Calculating required sample size, precision of variance estimate' CrossValidated \url{http://stats.stackexchange.com/q/7004/1381}, 'Variance of Sample Variance?' Mathematics - Stack Exchange \url{http://math.stackexchange.com/q/72975/3733}
+#' Calculates the standard deviation of the variance estimate
+#'
+#' Uses the equation
+#' \eqn{
+#' \sigma^4\left(\frac{2}{n-1}+\frac{\kappa}{n}\right)}{\sigma^4 (2/(n-1) +
+#' \kappa/n)
+#' }
+#' @param x sample
+#' @return estimate of standard deviation of the sample variance
+#' @export
+#' @author David LeBauer
+#' @references Mood, Graybill, Boes 1974 'Introduction to the Theory of Statistics' 3rd ed. p 229;
+#' Casella and Berger 'Statistical Inference' p 364 ex. 7.45;
+#' 'Reference for Var(s^2)' CrossValidated
+#' \url{http://stats.stackexchange.com/q/29905/1381},
+#' 'Calculating required sample size, precision of variance estimate'
+#' CrossValidated \url{http://stats.stackexchange.com/q/7004/1381},
+#' 'Variance of Sample Variance?' Mathematics - Stack Exchange
+#' \url{http://math.stackexchange.com/q/72975/3733}
sd.var <- function(x) {
- return(stats::var(x, na.rm = TRUE)^2 * (2 / (sum(!is.na(x)) - 1) + kurtosis(x) / sum(!is.na(x))))
+ stats::var(x, na.rm = TRUE)^2 *
+ (2 / (sum(!is.na(x)) - 1) + kurtosis(x) / sum(!is.na(x)))
} # sd.var
#--------------------------------------------------------------------------------------------------#
-##' Calculates the excess kurtosis of a vector
-##'
-##' Note that this calculates the 'excess kurtosis', which is defined as kurtosis - 3.
-##' This statistic is used in the calculation of the standard deviation of sample variance
-##' in the function \code{\link{sd.var}}.
-##'
-##' @param x vector of values
-##' @return numeric value of kurtosis
-##' @author David LeBauer
-##' @references NIST/SEMATECH e-Handbook of Statistical Methods, \url{http://www.itl.nist.gov/div898/handbook/eda/section3/eda35b.htm}, 2011-06-20.
+#' Calculates the excess kurtosis of a vector
+#'
+#' Note that this calculates the 'excess kurtosis',
+#' which is defined as kurtosis - 3.
+#' This statistic is used in the calculation of the standard deviation of
+#' sample variance in the function \code{\link{sd.var}}.
+#'
+#' @param x vector of values
+#' @return numeric value of kurtosis
+#' @author David LeBauer
+#' @references NIST/SEMATECH e-Handbook of Statistical Methods,
+#' \url{http://www.itl.nist.gov/div898/handbook/eda/section3/eda35b.htm},
+#' 2011-06-20.
kurtosis <- function(x) {
kappa <- sum((x - mean(x, na.rm = TRUE))^4) /
((sum(!is.na(x)) - 1) * stats::sd(x, na.rm = TRUE)^4) - 3
@@ -54,16 +65,16 @@ kurtosis <- function(x) {
#--------------------------------------------------------------------------------------------------#
-##' Calculate the sensitivity of a function at the median
-##'
-##' This function evaluates the sensitivity of a model to a parameter.
-##' This is done by evaluating the first derivative of the univariate spline estimate
-##' of the model response at the parameter median.
-##'
-##' @param trait.samples parameter values to evaluate at their median
-##' @param sa.splinefun fitted spline function. Must take two arguments.
-##' @export
-##' @return numeric estimate of model sensitivity to parameter
+#' Calculate the sensitivity of a function at the median
+#'
+#' This function evaluates the sensitivity of a model to a parameter.
+#' This is done by evaluating the first derivative of the univariate spline
+#' estimate of the model response at the parameter median.
+#'
+#' @param trait.samples parameter values to evaluate at their median
+#' @param sa.splinefun fitted spline function. Must take two arguments.
+#' @export
+#' @return numeric estimate of model sensitivity to parameter
get.sensitivity <- function(trait.samples, sa.splinefun) {
sensitivity <- sa.splinefun(stats::median(trait.samples), 1)
return(sensitivity)
@@ -71,58 +82,68 @@ get.sensitivity <- function(trait.samples, sa.splinefun) {
#--------------------------------------------------------------------------------------------------#
-##' Get coefficient of variance
-##'
-##' Given a set of numbers (a numeric vector), this returns the set's coefficient of variance.
-##'
-##' @param set numeric vector of trait values
-##' @export
-##' @return coeficient of variance
+#' Get coefficient of variance
+#'
+#' Given a set of numbers (a numeric vector), this returns the set's
+#' coefficient of variance.
+#'
+#' @param set numeric vector of trait values
+#' @export
+#' @return coeficient of variance
get.coef.var <- function(set) {
return(sqrt(stats::var(set)) / stats::median(set))
} # get.coef.var
#--------------------------------------------------------------------------------------------------#
-##' Generic function for the elasticity
-##'
-##' Given the sensitivity, samples, and outputs for a single trait, return elasticity
-##'
-##' @param sensitivity univariate sensitivity of model to a parameter, can be calculated by \code{\link{get.sensitivity}}
-##' @param samples samples from trait distribution
-##' @param outputs model output from ensemble runs
-##' @export
-##' @return elasticity = normalized sensitivity
+#' Generic function for the elasticity
+#'
+#' Given the sensitivity, samples, and outputs for a single trait,
+#' return elasticity
+#'
+#' @param sensitivity univariate sensitivity of model to a parameter,
+#' can be calculated by \code{\link{get.sensitivity}}
+#' @param samples samples from trait distribution
+#' @param outputs model output from ensemble runs
+#' @export
+#' @return elasticity = normalized sensitivity
get.elasticity <- function(sensitivity, samples, outputs) {
return(sensitivity / (stats::median(outputs) / stats::median(samples)))
} # get.elasticity
#--------------------------------------------------------------------------------------------------#
-##' Performs univariate sensitivity analysis and variance decomposition
-##'
-##' This function estimates the univariate responses of a model to a parameter for a set of traits, calculates the model sensitivity at the median,
-##' and performs a variance decomposition. This function results in a set of sensitivity plots (one per variable) and plot_variance_decomposition.
-##'
-##' @param trait.samples list of vectors, one per trait, representing samples of the trait value, with length equal to the mcmc chain length.
-##' Samples are taken from either the prior distribution or meta-analysis results
-##' @param sa.samples data.frame with one column per trait and one row for the set of quantiles used in sensitivity analysis.
-## Each cell contains the value of the trait at the given quantile.
-##' @param sa.output list of data.frames, similar to sa.samples, except cells contain the results of a model run
-##' with that trait x quantile combination and all other traits held at their median value
-##' @param outdir directory to which plots are written
-##' @return results of sensitivity analysis
-##' @export
-##' @author David LeBauer
-##' @examples
-##' \dontrun{
-##' sensitivity.analysis(
-##' trait.samples[[pft$name]],
-##' sa.samples[[pft$name]],
-##' sa.agb[[pft$name]],
-##' pft$outdir
-##' )
-##' }
+#' Performs univariate sensitivity analysis and variance decomposition
+#'
+#' This function estimates the univariate responses of a model to a parameter
+#' for a set of traits, calculates the model sensitivity at the median,
+#' and performs a variance decomposition. This function results in a set of
+#' ensitivity plots (one per variable) and plot_variance_decomposition.
+#'
+#' @param trait.samples list of vectors, one per trait, representing samples of
+#' the trait value, with length equal to the mcmc chain length.
+#' Samples are taken from either the prior distribution or meta-analysis
+#' results
+#' @param sa.samples data.frame with one column per trait and one row for the
+#' set of quantiles used in sensitivity analysis.
+# Each cell contains the value of the trait at the given quantile.
+#' @param sa.output list of data.frames, similar to sa.samples,
+#' except cells contain the results of a model run
+#' with that trait x quantile combination and all other traits held at
+#' their median value
+#' @param outdir directory to which plots are written
+#' @return results of sensitivity analysis
+#' @export
+#' @author David LeBauer
+#' @examples
+#' \dontrun{
+#' sensitivity.analysis(
+#' trait.samples[[pft$name]],
+#' sa.samples[[pft$name]],
+#' sa.agb[[pft$name]],
+#' pft$outdir
+#' )
+#' }
sensitivity.analysis <- function(trait.samples, sa.samples, sa.output, outdir) {
traits <- names(trait.samples)
sa.splines <- sapply(
@@ -170,25 +191,25 @@ sensitivity.analysis <- function(trait.samples, sa.samples, sa.output, outdir) {
} # sensitivity.analysis
-##' Truncate spline at zero if...
-##'
-##' Truncate spline at zero if P[x<0] < pnorm(-3)
-##' pnorm(-3) chosen as default value for min quantile
-##' because this is the default low end of range for the
-##' sensitivity analysis.
-##' This parameter could be determined based on minimum value in
-##' settings$sensitivity.analysis$quantiles
-##'
-##' @param x vector
-##' @param min.quantile threshold quantile for testing lower bound on variable
-##' @return either x or a vector with values < 0 converted to zero
-##' @author David LeBauer
-##' @export
-##' @examples
-##' set.seed(0)
-##' x <- c(rgamma(998,1,1), rnorm(10))
-##' min(x) # -0.5238
-##' min(PEcAn.uncertainty::spline.truncate(x))
+#' Truncate spline at zero if...
+#'
+#' Truncate spline at zero if P[x<0] < pnorm(-3)
+#' pnorm(-3) chosen as default value for min quantile
+#' because this is the default low end of range for the
+#' sensitivity analysis.
+#' This parameter could be determined based on minimum value in
+#' settings$sensitivity.analysis$quantiles
+#'
+#' @param x vector
+#' @param min.quantile threshold quantile for testing lower bound on variable
+#' @return either x or a vector with values < 0 converted to zero
+#' @author David LeBauer
+#' @export
+#' @examples
+#' set.seed(0)
+#' x <- c(rgamma(998,1,1), rnorm(10))
+#' min(x) # -0.5238
+#' min(PEcAn.uncertainty::spline.truncate(x))
spline.truncate <- function(x, min.quantile = stats::pnorm(-3)) {
if (stats::quantile(x, min.quantile) > 0) {
x <- PEcAn.utils::zero.truncate(x)
diff --git a/modules/uncertainty/R/variance.R b/modules/uncertainty/R/variance.R
index ee3d3120379..6a8a78a67c9 100644
--- a/modules/uncertainty/R/variance.R
+++ b/modules/uncertainty/R/variance.R
@@ -1,11 +1,3 @@
-#-------------------------------------------------------------------------------
-# Copyright (c) 2012 University of Illinois, NCSA.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the
-# University of Illinois/NCSA Open Source License
-# which accompanies this distribution, and is available at
-# http://opensource.ncsa.illinois.edu/license.html
-#-------------------------------------------------------------------------------
##' Variance and SD(variance)
##'
##' calculates variance and sd of variance using the \code{var} from base R and \code{\link{sd.var}} from PEcAn.
diff --git a/modules/uncertainty/inst/sobol/sobol_analysis.R b/modules/uncertainty/inst/sobol/sobol_analysis.R
new file mode 100644
index 00000000000..e0851c6011f
--- /dev/null
+++ b/modules/uncertainty/inst/sobol/sobol_analysis.R
@@ -0,0 +1,27 @@
+
+# R/run_sobol_analysis.R
+
+
+
+settings <- PEcAn.settings::read.settings("/projectnb/dietzelab/bthomas/pecan_runs/sipnet_test/pecan_updated.xml")
+ensemble_size = settings$ensemble$size
+if (PEcAn.settings::is.MultiSettings(settings)){
+ sobol_obj <- PEcAn.uncertainty::generate_joint_ensemble_design(settings = settings[1], ensemble_size = ensemble_size, sobol = TRUE)
+}else{
+ sobol_obj <- PEcAn.uncertainty::generate_joint_ensemble_design(settings = settings, ensemble_size = ensemble_size, sobol = TRUE) }
+
+PEcAn.workflow::runModule.run.write.configs(settings,input_design = sobol_obj$X )
+
+
+PEcAn.workflow::runModule_start_model_runs(settings, stop.on.error = stop_on_error)
+
+
+
+sobol_results <- PEcAn.uncertainty::compute_sobol_indices(outdir = settings$outdir,
+ sobol_obj = sobol_obj,
+ var = "GPP")
+
+
+
+
+
diff --git a/modules/uncertainty/man/compute_sobol_indices.Rd b/modules/uncertainty/man/compute_sobol_indices.Rd
new file mode 100644
index 00000000000..4ebf55ece50
--- /dev/null
+++ b/modules/uncertainty/man/compute_sobol_indices.Rd
@@ -0,0 +1,26 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/compute_sobol_indices.R
+\name{compute_sobol_indices}
+\alias{compute_sobol_indices}
+\title{Compute Sobol indices from a finished PEcAn run}
+\usage{
+compute_sobol_indices(outdir, sobol_obj, var = "GPP", stat_fun = mean)
+}
+\arguments{
+\item{outdir}{PEcAn run output directory that contains runs.txt}
+
+\item{sobol_obj}{object produced by PEcAn.uncertainty::generate_joint_ensemble_design()}
+
+\item{var}{Variable name to summarise (default "GPP").}
+
+\item{stat_fun}{Summary statistic applied to var default mean .}
+}
+\value{
+sobol_obj
+.
+}
+\description{
+Loads model outputs from a Sobol ensemble, calculates summary
+statistics for a chosen variable, feeds them to \code{sensitivity::tell()},
+and returns the updated Sobol object.
+}
diff --git a/modules/uncertainty/man/generate_joint_ensemble_design.Rd b/modules/uncertainty/man/generate_joint_ensemble_design.Rd
new file mode 100644
index 00000000000..717c96103fe
--- /dev/null
+++ b/modules/uncertainty/man/generate_joint_ensemble_design.Rd
@@ -0,0 +1,34 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/generate_joint_ensemble_design.R
+\name{generate_joint_ensemble_design}
+\alias{generate_joint_ensemble_design}
+\title{Generate joint ensemble design for parameter sampling
+Creates a joint ensemble design that maintains parameter correlations across
+all sites in a multi-site run. This function generates sample indices that
+are shared across sites to ensure consistent parameter sampling.}
+\usage{
+generate_joint_ensemble_design(settings, ensemble_size, sobol = FALSE)
+}
+\arguments{
+\item{settings}{A PEcAn settings object containing ensemble configuration}
+
+\item{ensemble_size}{Integer specifying the number of ensemble members
+Since the `input_design` will only be generated once for the entire model run,
+the only situation, where we might want to recycle the existing `ensemble_samples`,
+is when we split and submit the larger SDA runs (e.g., 8,000 sites) into
+smaller SDA experiments (e.g., 100 sites per job), where we want to keep using
+the same parameters rather than creating new parameters for each job.}
+
+\item{sobol}{for activating sobol}
+}
+\value{
+A list containing ensemble samples and indices
+ If `sobol = TRUE`, the list will be a `sensitivity::soboljansen()`
+ result and will contain the components documented therein.
+}
+\description{
+Generate joint ensemble design for parameter sampling
+Creates a joint ensemble design that maintains parameter correlations across
+all sites in a multi-site run. This function generates sample indices that
+are shared across sites to ensure consistent parameter sampling.
+}
diff --git a/modules/uncertainty/man/get.coef.var.Rd b/modules/uncertainty/man/get.coef.var.Rd
index d595c5b8422..e84a6f2317d 100644
--- a/modules/uncertainty/man/get.coef.var.Rd
+++ b/modules/uncertainty/man/get.coef.var.Rd
@@ -13,5 +13,6 @@ get.coef.var(set)
coeficient of variance
}
\description{
-Given a set of numbers (a numeric vector), this returns the set's coefficient of variance.
+Given a set of numbers (a numeric vector), this returns the set's
+ coefficient of variance.
}
diff --git a/modules/uncertainty/man/get.elasticity.Rd b/modules/uncertainty/man/get.elasticity.Rd
index 502a29a5245..1ab4adb0e44 100644
--- a/modules/uncertainty/man/get.elasticity.Rd
+++ b/modules/uncertainty/man/get.elasticity.Rd
@@ -7,7 +7,8 @@
get.elasticity(sensitivity, samples, outputs)
}
\arguments{
-\item{sensitivity}{univariate sensitivity of model to a parameter, can be calculated by \code{\link{get.sensitivity}}}
+\item{sensitivity}{univariate sensitivity of model to a parameter,
+can be calculated by \code{\link{get.sensitivity}}}
\item{samples}{samples from trait distribution}
@@ -17,5 +18,6 @@ get.elasticity(sensitivity, samples, outputs)
elasticity = normalized sensitivity
}
\description{
-Given the sensitivity, samples, and outputs for a single trait, return elasticity
+Given the sensitivity, samples, and outputs for a single trait,
+ return elasticity
}
diff --git a/modules/uncertainty/man/get.ensemble.samples.Rd b/modules/uncertainty/man/get.ensemble.samples.Rd
index ac981dcb82e..c72f2e5e620 100644
--- a/modules/uncertainty/man/get.ensemble.samples.Rd
+++ b/modules/uncertainty/man/get.ensemble.samples.Rd
@@ -8,7 +8,7 @@ get.ensemble.samples(
ensemble.size,
pft.samples,
env.samples,
- method = "uniform",
+ method = "random",
param.names = NULL,
...
)
diff --git a/modules/uncertainty/man/get.parameter.samples.Rd b/modules/uncertainty/man/get.parameter.samples.Rd
index 44e3619a45a..7a9780eef35 100644
--- a/modules/uncertainty/man/get.parameter.samples.Rd
+++ b/modules/uncertainty/man/get.parameter.samples.Rd
@@ -2,10 +2,12 @@
% Please edit documentation in R/get.parameter.samples.R
\name{get.parameter.samples}
\alias{get.parameter.samples}
-\title{Convert priors / MCMC samples to chains that can be sampled for model parameters}
+\title{Convert priors / MCMC samples to chains that can be sampled
+ for model parameters}
\usage{
get.parameter.samples(
settings,
+ ensemble.size = 1,
posterior.files = rep(NA, length(settings$pfts)),
ens.sample.method = "uniform"
)
@@ -13,12 +15,15 @@ get.parameter.samples(
\arguments{
\item{settings}{PEcAn settings object}
+\item{ensemble.size}{number of runs in model ensemble}
+
\item{posterior.files}{list of filenames to read from}
\item{ens.sample.method}{one of "halton", "sobol", "torus", "lhc", "uniform"}
}
\description{
-Convert priors / MCMC samples to chains that can be sampled for model parameters
+Convert priors / MCMC samples to chains that can be sampled
+ for model parameters
}
\author{
David LeBauer, Shawn Serbin, Istem Fer
diff --git a/modules/uncertainty/man/get.sensitivity.Rd b/modules/uncertainty/man/get.sensitivity.Rd
index 0763edc5706..d18b7b9254b 100644
--- a/modules/uncertainty/man/get.sensitivity.Rd
+++ b/modules/uncertainty/man/get.sensitivity.Rd
@@ -16,6 +16,6 @@ numeric estimate of model sensitivity to parameter
}
\description{
This function evaluates the sensitivity of a model to a parameter.
-This is done by evaluating the first derivative of the univariate spline estimate
-of the model response at the parameter median.
+This is done by evaluating the first derivative of the univariate spline
+estimate of the model response at the parameter median.
}
diff --git a/modules/uncertainty/man/input.ens.gen.Rd b/modules/uncertainty/man/input.ens.gen.Rd
index 13ecf373805..1db269ac903 100644
--- a/modules/uncertainty/man/input.ens.gen.Rd
+++ b/modules/uncertainty/man/input.ens.gen.Rd
@@ -4,11 +4,19 @@
\alias{input.ens.gen}
\title{Function for generating samples based on sampling method, parent or etc}
\usage{
-input.ens.gen(settings, input, method = "sampling", parent_ids = NULL)
+input.ens.gen(
+ settings,
+ ensemble_size,
+ input,
+ method = "sampling",
+ parent_ids = NULL
+)
}
\arguments{
\item{settings}{list of PEcAn settings}
+\item{ensemble_size}{size of ensemble}
+
\item{input}{name of input to sample, e.g. "met", "veg", "pss"}
\item{method}{Method for sampling - For now looping or sampling with replacement is implemented}
diff --git a/modules/uncertainty/man/kurtosis.Rd b/modules/uncertainty/man/kurtosis.Rd
index ae913824d7f..e0241e36ec7 100644
--- a/modules/uncertainty/man/kurtosis.Rd
+++ b/modules/uncertainty/man/kurtosis.Rd
@@ -13,12 +13,15 @@ kurtosis(x)
numeric value of kurtosis
}
\description{
-Note that this calculates the 'excess kurtosis', which is defined as kurtosis - 3.
-This statistic is used in the calculation of the standard deviation of sample variance
-in the function \code{\link{sd.var}}.
+Note that this calculates the 'excess kurtosis',
+ which is defined as kurtosis - 3.
+This statistic is used in the calculation of the standard deviation of
+ sample variance in the function \code{\link{sd.var}}.
}
\references{
-NIST/SEMATECH e-Handbook of Statistical Methods, \url{http://www.itl.nist.gov/div898/handbook/eda/section3/eda35b.htm}, 2011-06-20.
+NIST/SEMATECH e-Handbook of Statistical Methods,
+ \url{http://www.itl.nist.gov/div898/handbook/eda/section3/eda35b.htm},
+ 2011-06-20.
}
\author{
David LeBauer
diff --git a/modules/uncertainty/man/read.sa.output.Rd b/modules/uncertainty/man/read.sa.output.Rd
index a6c252ce23a..07c5fae42e0 100644
--- a/modules/uncertainty/man/read.sa.output.Rd
+++ b/modules/uncertainty/man/read.sa.output.Rd
@@ -2,7 +2,7 @@
% Please edit documentation in R/sensitivity.R
\name{read.sa.output}
\alias{read.sa.output}
-\title{Read Sensitivity Analysis output}
+\title{Reads output of sensitivity analysis runs}
\usage{
read.sa.output(
traits,
diff --git a/modules/uncertainty/man/run.sensitivity.analysis.Rd b/modules/uncertainty/man/run.sensitivity.analysis.Rd
index e4f7f8dc99e..c094aff52bc 100644
--- a/modules/uncertainty/man/run.sensitivity.analysis.Rd
+++ b/modules/uncertainty/man/run.sensitivity.analysis.Rd
@@ -23,7 +23,7 @@ decomposition plots (plot=TRUE) or to turn these plots off (plot=FALSE).}
\item{ensemble.id}{ensemble ID}
-\item{variable}{which varibable(s) to do sensitivity analysis for. Defaults
+\item{variable}{which variable(s) to do sensitivity analysis for. Defaults
to all specified in `settings`}
\item{start.year}{defaults to what is specified in `settings`}
diff --git a/modules/uncertainty/man/runModule.run.sensitivity.analysis.Rd b/modules/uncertainty/man/runModule.run.sensitivity.analysis.Rd
new file mode 100644
index 00000000000..2f7a30cd5f7
--- /dev/null
+++ b/modules/uncertainty/man/runModule.run.sensitivity.analysis.Rd
@@ -0,0 +1,18 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/run.sensitivity.analysis.R
+\name{runModule.run.sensitivity.analysis}
+\alias{runModule.run.sensitivity.analysis}
+\title{Run sensitivity analysis for every Settings in a MultiSettings}
+\usage{
+runModule.run.sensitivity.analysis(settings, ...)
+}
+\arguments{
+\item{settings}{PEcan settings object}
+
+\item{...}{additional arguments passed on to `run.sensitivity.analysis`}
+}
+\description{
+Caution: Not yet working for multisite settings.
+ It will _run_ the analysis for all sites,
+ but each site will overwrite the result from the previous one.
+}
diff --git a/modules/uncertainty/man/sa.splinefun.Rd b/modules/uncertainty/man/sa.splinefun.Rd
index 5e9b788ec11..5dfe96c6f97 100644
--- a/modules/uncertainty/man/sa.splinefun.Rd
+++ b/modules/uncertainty/man/sa.splinefun.Rd
@@ -15,5 +15,6 @@ sa.splinefun(quantiles.input, quantiles.output)
function
}
\description{
-Creates a spline function using the splinefun function that estimates univariate response of parameter input to model output
+Creates a spline function using the splinefun function that estimates
+univariate response of parameter input to model output
}
diff --git a/modules/uncertainty/man/sd.var.Rd b/modules/uncertainty/man/sd.var.Rd
index a1f3845d1c6..44e894d6a7f 100644
--- a/modules/uncertainty/man/sd.var.Rd
+++ b/modules/uncertainty/man/sd.var.Rd
@@ -13,10 +13,21 @@ sd.var(x)
estimate of standard deviation of the sample variance
}
\description{
-Uses the equation \eqn{\sigma^4\left(\frac{2}{n-1}+\frac{\kappa}{n}\right)}{\sigma^4 (2/(n-1) + \kappa/n)}
+Uses the equation
+\eqn{
+ \sigma^4\left(\frac{2}{n-1}+\frac{\kappa}{n}\right)}{\sigma^4 (2/(n-1) +
+ \kappa/n)
+}
}
\references{
-Mood, Graybill, Boes 1974 'Introduction to the Theory of Statistics' 3rd ed. p 229; Casella and Berger 'Statistical Inference' p 364 ex. 7.45; 'Reference for Var(s^2)' CrossValidated \url{http://stats.stackexchange.com/q/29905/1381}, 'Calculating required sample size, precision of variance estimate' CrossValidated \url{http://stats.stackexchange.com/q/7004/1381}, 'Variance of Sample Variance?' Mathematics - Stack Exchange \url{http://math.stackexchange.com/q/72975/3733}
+Mood, Graybill, Boes 1974 'Introduction to the Theory of Statistics' 3rd ed. p 229;
+ Casella and Berger 'Statistical Inference' p 364 ex. 7.45;
+ 'Reference for Var(s^2)' CrossValidated
+ \url{http://stats.stackexchange.com/q/29905/1381},
+ 'Calculating required sample size, precision of variance estimate'
+ CrossValidated \url{http://stats.stackexchange.com/q/7004/1381},
+ 'Variance of Sample Variance?' Mathematics - Stack Exchange
+ \url{http://math.stackexchange.com/q/72975/3733}
}
\author{
David LeBauer
diff --git a/modules/uncertainty/man/sensitivity.analysis.Rd b/modules/uncertainty/man/sensitivity.analysis.Rd
index 563ff528ceb..4cd5983949e 100644
--- a/modules/uncertainty/man/sensitivity.analysis.Rd
+++ b/modules/uncertainty/man/sensitivity.analysis.Rd
@@ -7,13 +7,18 @@
sensitivity.analysis(trait.samples, sa.samples, sa.output, outdir)
}
\arguments{
-\item{trait.samples}{list of vectors, one per trait, representing samples of the trait value, with length equal to the mcmc chain length.
-Samples are taken from either the prior distribution or meta-analysis results}
+\item{trait.samples}{list of vectors, one per trait, representing samples of
+the trait value, with length equal to the mcmc chain length.
+Samples are taken from either the prior distribution or meta-analysis
+results}
-\item{sa.samples}{data.frame with one column per trait and one row for the set of quantiles used in sensitivity analysis.}
+\item{sa.samples}{data.frame with one column per trait and one row for the
+set of quantiles used in sensitivity analysis.}
-\item{sa.output}{list of data.frames, similar to sa.samples, except cells contain the results of a model run
-with that trait x quantile combination and all other traits held at their median value}
+\item{sa.output}{list of data.frames, similar to sa.samples,
+except cells contain the results of a model run
+with that trait x quantile combination and all other traits held at
+their median value}
\item{outdir}{directory to which plots are written}
}
@@ -21,8 +26,10 @@ with that trait x quantile combination and all other traits held at their median
results of sensitivity analysis
}
\description{
-This function estimates the univariate responses of a model to a parameter for a set of traits, calculates the model sensitivity at the median,
- and performs a variance decomposition. This function results in a set of sensitivity plots (one per variable) and plot_variance_decomposition.
+This function estimates the univariate responses of a model to a parameter
+ for a set of traits, calculates the model sensitivity at the median,
+ and performs a variance decomposition. This function results in a set of
+ ensitivity plots (one per variable) and plot_variance_decomposition.
}
\examples{
\dontrun{
diff --git a/modules/uncertainty/man/write.ensemble.configs.Rd b/modules/uncertainty/man/write.ensemble.configs.Rd
index 95e4b797f15..34decd212ce 100644
--- a/modules/uncertainty/man/write.ensemble.configs.Rd
+++ b/modules/uncertainty/man/write.ensemble.configs.Rd
@@ -5,6 +5,8 @@
\title{Write ensemble config files}
\usage{
write.ensemble.configs(
+ input_design,
+ ensemble.size,
defaults,
ensemble.samples,
settings,
@@ -12,11 +14,18 @@ write.ensemble.configs(
clean = FALSE,
write.to.db = TRUE,
restart = NULL,
- samples = NULL,
rename = FALSE
)
}
\arguments{
+\item{input_design}{design matrix describing sampled inputs (see
+`run.write.configs()`). Columns named after `settings$run$inputs` tags give
+1-based indices into each input's `path` list and rows follow run order.
+Requires `nrow(input_design) >= ensemble.size`;
+extra rows are ignored.}
+
+\item{ensemble.size}{size of ensemble}
+
\item{defaults}{pft}
\item{ensemble.samples}{list of lists supplied by \link{get.ensemble.samples}}
@@ -31,8 +40,6 @@ write.ensemble.configs(
\item{restart}{In case this is a continuation of an old simulation. restart needs to be a list with name tags of runid, inputs, new.params (parameters), new.state (initial condition), ensemble.id (ensemble id), start.time and stop.time.See Details.}
-\item{samples}{Sampled inputs such as met and parameter files}
-
\item{rename}{Decide if we want to rename previous output files, for example convert from sipnet.out to sipnet.2020-07-16.out.}
}
\value{
@@ -46,7 +53,7 @@ a name to distinguish the output files, and the directory to place the files.
\details{
The restart functionality is developed using model specific functions by calling write_restart.modelname function. First, you need to make sure that this function is already exist for your desired model.See here \url{https://pecanproject.github.io/pecan-documentation/latest/pecan-models.html}
new state is a dataframe with a different column for each state variable. The number of the rows in this dataframe needs to be the same as the ensemble size.
-State variables that you can use for setting up the intial conditions differs for different models. You may check the documentation of the write_restart.modelname your model.
+The state variables that you can use for setting up initial conditions are model specific. Check the documentation of the write_restart. function for the model you are using.
The units for the state variables need to be in the PEcAn standard units which can be found in \link{standard_vars}.
new.params also has similar structure to ensemble.samples which is sent as an argument.
}
diff --git a/modules/uncertainty/man/write.sa.configs.Rd b/modules/uncertainty/man/write.sa.configs.Rd
index 281894d39d7..545970713c7 100644
--- a/modules/uncertainty/man/write.sa.configs.Rd
+++ b/modules/uncertainty/man/write.sa.configs.Rd
@@ -22,12 +22,16 @@ write.sa.configs(
\item{model}{name of model to be run}
-\item{clean}{logical: Delete any existing contents of the directory specified by \code{settings$rundir} before writing to it?}
+\item{clean}{logical: Delete any existing contents of the directory specified
+by \code{settings$rundir} before writing to it?}
-\item{write.to.db}{logical: Record this run to BETY? If TRUE, uses connection settings specified in \code{settings$database}}
+\item{write.to.db}{logical: Record this run to BETY? If TRUE, uses connection
+settings specified in \code{settings$database}}
}
\value{
-list, containing $runs = data frame of runids, and $ensemble.id = the ensemble ID for these runs. Also writes sensitivity analysis configuration files as a side effect
+list, containing $runs = data frame of runids,
+ and $ensemble.id = the ensemble ID for these runs.
+ Also writes sensitivity analysis configuration files as a side effect
}
\description{
Writes config files for use in sensitivity analysis.
diff --git a/modules/uncertainty/tests/Rcheck_reference.log b/modules/uncertainty/tests/Rcheck_reference.log
index 83272ecfb54..85c2d2f6649 100644
--- a/modules/uncertainty/tests/Rcheck_reference.log
+++ b/modules/uncertainty/tests/Rcheck_reference.log
@@ -49,7 +49,6 @@ License stub is invalid DCF.
* checking for missing documentation entries ... WARNING
Undocumented code objects:
‘runModule.run.ensemble.analysis’
- ‘runModule.run.sensitivity.analysis’
Undocumented data sets:
‘ensemble.output’ ‘sensitivity.output’ ‘ensemble.samples’
‘sa.samples’ ‘settings’ ‘trait.samples’
@@ -57,42 +56,7 @@ All user-level objects in a package should have documentation entries.
See chapter ‘Writing R documentation files’ in the ‘Writing R
Extensions’ manual.
* checking for code/documentation mismatches ... OK
-* checking Rd \usage sections ... WARNING
-Undocumented arguments in documentation object 'ensemble.ts'
- ‘ensemble.ts’ ‘observations’ ‘window’ ‘...’
-
-Undocumented arguments in documentation object 'flux.uncertainty'
- ‘...’
-
-Undocumented arguments in documentation object 'get.change'
- ‘measurement’
-
-Undocumented arguments in documentation object 'get.parameter.samples'
- ‘settings’ ‘posterior.files’ ‘ens.sample.method’
-
-Undocumented arguments in documentation object 'input.ens.gen'
- ‘input’
-
-Documented arguments not in \usage in documentation object 'plot_sensitivities':
- ‘sensitivity.results’
-
-Undocumented arguments in documentation object 'plot_sensitivity'
- ‘linesize’ ‘dotsize’
-
-Undocumented arguments in documentation object 'read.ameriflux.L2'
- ‘file.name’ ‘year’
-
-Undocumented arguments in documentation object 'read.ensemble.ts'
- ‘settings’ ‘ensemble.id’ ‘variable’ ‘start.year’ ‘end.year’
-
-Undocumented arguments in documentation object 'run.ensemble.analysis'
- ‘settings’ ‘ensemble.id’ ‘variable’ ‘start.year’ ‘end.year’ ‘...’
-
-Functions with \usage entries need to have the appropriate \alias
-entries, and all their arguments documented.
-The \usage entries must correspond to syntactically valid R code.
-See chapter ‘Writing R documentation files’ in the ‘Writing R
-Extensions’ manual.
+* checking Rd \usage sections ... OK
* checking Rd contents ... OK
* checking for unstated dependencies in examples ... OK
* checking contents of ‘data’ directory ... OK
diff --git a/modules/uncertainty/tests/testthat.R b/modules/uncertainty/tests/testthat.R
index 09d9953c3c2..c74882655e6 100644
--- a/modules/uncertainty/tests/testthat.R
+++ b/modules/uncertainty/tests/testthat.R
@@ -1,11 +1,3 @@
-#-------------------------------------------------------------------------------
-# Copyright (c) 2012 University of Illinois, NCSA.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the
-# University of Illinois/NCSA Open Source License
-# which accompanies this distribution, and is available at
-# http://opensource.ncsa.illinois.edu/license.html
-#-------------------------------------------------------------------------------
library(testthat)
library(PEcAn.utils)
diff --git a/modules/uncertainty/tests/testthat/test_ensemble.R b/modules/uncertainty/tests/testthat/test_ensemble.R
new file mode 100644
index 00000000000..8197c66d29a
--- /dev/null
+++ b/modules/uncertainty/tests/testthat/test_ensemble.R
@@ -0,0 +1,141 @@
+context("input validation for write.ensemble.configs")
+library(testthat)
+
+# Mock a model write.configs function to avoid model-specific errors
+write.configs.SIPNET <- function(...) TRUE
+
+# Helper: make input with correct structure
+make_input_sets <- function(paths) {
+ lapply(paths, function(p) list(path = p))
+}
+
+# Helper: make ensemble.samples with the correct structure
+make_samples <- function(samples) {
+ lapply(paths, function(p) list(path = p))
+}
+
+# 1. One input, no samples → should pass
+test_that("1 input, no samples: passes", {
+ settings <- list(run = list(inputs = list(input = list(path = "IC1"))))
+ ensemble.samples <- NULL
+ defaults <- list()
+
+ expect_silent(write.ensemble.configs(
+ defaults = defaults,
+ ensemble.samples = ensemble.samples,
+ settings = settings,
+ model = "SIPNET",
+ write.to.db = FALSE
+ ))
+})
+
+
+
+test_that("no input error", {
+ settings <- list(run = list(inputs = list(input = NULL)))
+ ensemble.samples <- NULL
+ defaults <- list()
+
+ # Capture logger message
+ expect_silent(write.ensemble.configs(
+ defaults = defaults,
+ ensemble.samples = ensemble.samples,
+ settings = settings,
+ model = "SIPNET",
+ write.to.db = FALSE
+ ))
+})
+
+
+
+
+
+
+
+
+test_that("multiple inputs and multiple samples", {
+ # Mock the SIPNET config writer
+ mockery::stub(write.ensemble.configs, "write.config.SIPNET", function(...) TRUE)
+
+ # Create temp directories
+ temp_rundir <- tempfile()
+ temp_modeloutdir <- tempfile()
+ dir.create(temp_rundir)
+ dir.create(temp_modeloutdir)
+ on.exit({
+ unlink(temp_rundir, recursive = TRUE)
+ unlink(temp_modeloutdir, recursive = TRUE)
+ }, add = TRUE)
+
+ # Complete settings
+ settings <- list(
+ run = list(
+ inputs = list(input = list(path = "IC1")),
+ site = list(id = 1, name = "Test Site"),
+ start.date = "2000-01-01",
+ end.date = "2000-12-31",
+ outdir = temp_modeloutdir
+ ),
+ ensemble = list(size = 5),
+ database = NULL,
+ rundir = temp_rundir,
+ modeloutdir = temp_modeloutdir,
+ host = list(
+ rundir = temp_rundir,
+ outdir = temp_modeloutdir
+ ),
+ model = list(id = "SIPNET", type = "SIPNET"),
+ pfts = list(
+ list(name = "temperate",
+ constants = list(1),
+ posteriorid = 1)
+ )
+ )
+
+ # Sample parameters
+ ensemble.samples <- list(
+ temperate = data.frame(
+ SLA = c(15.2, 16.8, 14.7, 18.1, 17.5),
+ Vm0 = c(45.0, 50.3, 47.8, 49.1, 51.0)
+ )
+ )
+
+ # Default PFT settings
+ defaults <- list(
+ list(
+ name = "temperate",
+ constants = list(1),
+ posteriorid = 1
+ )
+ )
+
+ input_design <- data.frame(
+ met = c(1,7,3,5,6),
+ poolinitcond = c(43,48,4,21,39),
+ param = 1:5 # Or your actual param column
+ )
+
+ # Run test - should create directories and configs
+ result <- expect_silent(
+ write.ensemble.configs(
+ defaults = defaults,
+ ensemble.size = 5,
+ ensemble.samples = ensemble.samples,
+ settings = settings,
+ model = "SIPNET",
+ write.to.db = FALSE,
+ input_design = input_design
+ )
+ )
+
+ # Verify outputs
+ expect_type(result, "list")
+ expect_named(result, c("runs", "ensemble.id", "samples"))
+ expect_equal(nrow(result$runs), settings$ensemble$size)
+})
+
+
+
+
+
+
diff --git a/scripts/build_pkgdown.R b/scripts/build_pkgdown.R
index 3286796a6c1..587acf20ab7 100644
--- a/scripts/build_pkgdown.R
+++ b/scripts/build_pkgdown.R
@@ -1,15 +1,17 @@
#!/usr/bin/env Rscript
-
# Build pkgdown documentation for PEcAn packages
library(pkgdown)
-
+library(yaml)
+library(desc)
args <- commandArgs(trailingOnly = TRUE)
if (length(args) == 0) {
stop("No package names provided. Please pass package names as arguments.")
}
-
packages <- args
output_dir <- "_pkgdown_docs"
+if (!dir.exists(output_dir)) {
+ dir.create(output_dir, recursive = TRUE)
+}
if (requireNamespace("PEcAn.logger", quietly = TRUE)) {
logger <- PEcAn.logger::logger.info
@@ -21,6 +23,8 @@ if (requireNamespace("PEcAn.logger", quietly = TRUE)) {
logger("Building pkgdown docs for:", paste(packages, collapse = ", "))
+# Define branch variable once for all packages
+branch <- Sys.getenv("PECAN_GIT_BRANCH", unset = "develop")
for (pkg in packages) {
logger("Building pkgdown site for:", pkg)
current_wd <- getwd()
@@ -29,7 +33,23 @@ for (pkg in packages) {
stop(paste("Package directory does not exist:", pkg))
}
setwd(pkg)
- pkgdown::build_site()
+ pkgdown::build_site(
+ pkg = ".",
+ override = list(
+ repo = list(
+ url = list(
+ source = paste0("https://github.com/PecanProject/pecan/blob/",
+ branch, "/", pkg, "/")
+ )
+ ),
+ template = list(
+ bootstrap = 5,
+ includes = list(
+ before_navbar = "← Up"
+ )
+ )
+ )
+ )
setwd(current_wd)
source_docs <- file.path(pkg, "docs")
if (!dir.exists(source_docs)) {
@@ -68,7 +88,7 @@ before_text <- c(
'',
'PEcAn package documentation
',
'Function documentation and articles for each PEcAn package,',
- ' generated from the package source using {pkgdown}.
',
+ ' generated from the package source using pkgdown.
',
'',
''
)
diff --git a/scripts/ensure_version_bump.sh b/scripts/ensure_version_bump.sh
new file mode 100755
index 00000000000..7b85903e2b5
--- /dev/null
+++ b/scripts/ensure_version_bump.sh
@@ -0,0 +1,56 @@
+#!/bin/bash
+
+set -e
+
+# Check for PEcAn packages that need version increments
+#
+# Sample usage: ./ensure_version_bump.sh v1.9.0
+#
+# Enforces a change in version number for every PEcAn package that has changed
+# since the last tagged release.
+# Output: silent if all changed packages have new versions,
+# else prints a list of packages that need updating and gives instructions
+# to update them.
+# Exit status: 0 if all packages OK, 1 if updates needed
+#
+# Note that it only enforces that the version _differs_ from the release:
+# * It does not catch version decreases. Don't do that!
+# * You can decide what version to bump to.
+# The output recommends adding '.9000' to the existing version (the "dev mode"
+# convention), but this is not enforced and it is OK to have multiple updates
+# per release cycle. In particular, in the runup to a PEcAn release this
+# allows bumping package versions one at a time as we complete their
+# prerelease chores (changelog curation, semver check, etc) rather than
+# needing to bump all packages in a single commit.
+
+# Git tag (or hash or other ref) to compare against.
+# If not given as an argument, defaults to the most recently created tag.
+git_ref=${1:-$(git tag --sort=-creatordate | head -n1)}
+
+# Directory inside which to check every R package.
+# If not given as an argument, defaults to the current working directory.
+check_dir=${2:-.}
+
+
+pkg_changed() {
+ ! git diff --quiet "$git_ref" -- "${1}"
+}
+version_changed() {
+ git diff -U0 "$git_ref" -- "${1}"/DESCRIPTION | grep -q '^+Version: '
+}
+
+
+pkgdirs=$(find "$check_dir" -name DESCRIPTION | xargs dirname | sort)
+
+while IFS= read -r pkg; do
+ if pkg_changed "$pkg" && ! version_changed "$pkg"; then
+ export unbumped="$unbumped"" $pkg"
+ fi
+done <<< "$pkgdirs"
+
+if [[ -n "$unbumped" ]]; then
+ echo "These packages have changed since $git_ref and need a version increment:"
+ echo " $unbumped"
+ echo "Edit the DESCRIPTION file(s) to add '.9000' to the 'Version:' field."
+ exit 1
+fi
diff --git a/scripts/get_orphaned_functions.py b/scripts/get_orphaned_functions.py
index 69133e2ea1a..2724b067687 100644
--- a/scripts/get_orphaned_functions.py
+++ b/scripts/get_orphaned_functions.py
@@ -31,7 +31,7 @@
ENDC = "\033[0m"
# Modules to ignore while checking for orphaned functions
-MODULES_TO_IGNORE = ["DART", "data.mining"]
+MODULES_TO_IGNORE = ["data.mining"]
# Function to get a list of all module names in the 'modules' directory, excluding ignored modules and hidden files/directories
diff --git a/shiny/dbsync/DESCRIPTION b/shiny/dbsync/DESCRIPTION
index 9cb073d559a..58c5181c022 100644
--- a/shiny/dbsync/DESCRIPTION
+++ b/shiny/dbsync/DESCRIPTION
@@ -1,5 +1,6 @@
Type: Shiny
Title: Database Sync Vis
+Version: 0.0.2
License: BSD_3_clause + file LICENSE
Author: Rob Kooper
Tags: PEcAn
diff --git a/shiny/dbsync/app.R b/shiny/dbsync/app.R
index da4f3065247..1557d3562e6 100644
--- a/shiny/dbsync/app.R
+++ b/shiny/dbsync/app.R
@@ -34,25 +34,26 @@ ignored_servers <- c()
# given a IP address lookup geo spatital info
# uses a cache to prevent to many requests (1000 per day)
get_geoip <- function(ip) {
- if (length(geoip) == 0 && file.exists("geoip.json")) {
+ if (length(geoip) == 0 && file.exists(geocache)) {
geoip <<- jsonlite::read_json(geocache, simplifyVector = TRUE)
}
if (! ip %in% geoip$ip) {
print(paste("CACHE MISS", ip))
res <- curl::curl_fetch_memory(paste0("http://free.ipwhois.io/json/", ip))
- if (res$status -- 200) {
+ if (res$status == 200) {
geoloc <- jsonlite::parse_json(rawToChar(res$content))
geoloc[lengths(geoloc) == 0] <- NA
- geoloc <- type.convert(geoloc, as.is = TRUE)
+ geoloc <- type.convert(geoloc, as.is = TRUE)[
+ c("ip", "latitude", "longitude", "city", "region", "country", "org")]
} else {
- geoloc <- list(ip=ip, lat=0, lon=0, city="?", countr="?")
+ geoloc <- list(ip=ip, latitude=0, longitude=0, city="?", region="?", country="?", org="?")
}
if (length(geoip) == 0) {
geoip <<- as.data.frame(geoloc)
} else {
- geoip <<- rbind(geoip, as.data.frame(geoloc))
+ geoip <<- merge(geoip, as.data.frame(geoloc), all = TRUE)
}
- jsonlite::write_json(geoip, geocache)
+ jsonlite::write_json(geoip, geocache, pretty = 2)
}
}
@@ -95,10 +96,10 @@ get_servers <- function() {
locations <- geoip %>%
dplyr::filter(ip %in% servers$ip) %>%
dplyr::arrange(ip) %>%
- dplyr::select("city", "country", "latitude", "longitude")
+ dplyr::select("ip", "city", "country", "latitude", "longitude")
# combine tables
- servers <- cbind(servers, locations)
+ servers <- merge(servers, locations, by = "ip")
# add columns for all sync_ids
servers[, paste0("server_", servers$sync_host_id)] <- NA
@@ -283,7 +284,7 @@ server <- function(input, output, session) {
# create a map of all servers that have a sync_host_id and sync_url
output$map <- renderLeaflet({
leaflet(values$servers) %>%
- addProviderTiles(providers$Stamen.TonerLite,
+ addProviderTiles(providers$Stadia.StamenTonerLite,
options = providerTileOptions(noWrap = TRUE)
) %>%
addMarkers(~longitude, ~latitude,
diff --git a/shiny/dbsync/geoip.json b/shiny/dbsync/geoip.json
index 3632da7f652..65e2a017de0 100644
--- a/shiny/dbsync/geoip.json
+++ b/shiny/dbsync/geoip.json
@@ -1 +1,145 @@
-[{"ip":"128.174.124.54","success":true,"type":"IPv4","continent":"North America","country":"United States","country_code":"US","country_flag":"https://cdn.ipwhois.io/flags/us.svg","country_capital":"Washington","country_phone":1,"country_neighbours":"CA,MX,CU","region":"Illinois","city":"Urbana","latitude":40.1106,"longitude":-88.2073,"asn":"AS38","org":"University of Illinois","isp":"University of Illinois","timezone":"America/Chicago","timezone_name":"Central Standard Time","timezone_dstOffset":0,"timezone_gmtOffset":-21600,"timezone_gmt":"GMT -6:00","currency":"US Dollar","currency_code":"USD","currency_symbol":"$","currency_rates":1,"currency_plural":"US dollars","completed_requests":92},{"ip":"128.197.168.114","success":true,"type":"IPv4","continent":"North America","country":"United States","country_code":"US","country_flag":"https://cdn.ipwhois.io/flags/us.svg","country_capital":"Washington","country_phone":1,"country_neighbours":"CA,MX,CU","region":"Massachusetts","city":"Boston","latitude":42.3601,"longitude":-71.0589,"asn":"AS111","org":"Boston University","isp":"Boston University","timezone":"America/New_York","timezone_name":"Eastern Standard Time","timezone_dstOffset":0,"timezone_gmtOffset":-18000,"timezone_gmt":"GMT -5:00","currency":"US Dollar","currency_code":"USD","currency_symbol":"$","currency_rates":1,"currency_plural":"US dollars","completed_requests":92},{"ip":"130.199.3.21","success":true,"type":"IPv4","continent":"North America","country":"United States","country_code":"US","country_flag":"https://cdn.ipwhois.io/flags/us.svg","country_capital":"Washington","country_phone":1,"country_neighbours":"CA,MX,CU","region":"New York","city":"Bellport","latitude":40.757,"longitude":-72.9393,"asn":"AS43","org":"Brookhaven National Laboratory","isp":"Brookhaven National Laboratory","timezone":"America/New_York","timezone_name":"Eastern Standard Time","timezone_dstOffset":0,"timezone_gmtOffset":-18000,"timezone_gmt":"GMT -5:00","currency":"US Dollar","currency_code":"USD","currency_symbol":"$","currency_rates":1,"currency_plural":"US dollars","completed_requests":92},{"ip":"144.92.131.21","success":true,"type":"IPv4","continent":"North America","country":"United States","country_code":"US","country_flag":"https://cdn.ipwhois.io/flags/us.svg","country_capital":"Washington","country_phone":1,"country_neighbours":"CA,MX,CU","region":"Wisconsin","city":"Madison","latitude":43.0731,"longitude":-89.4012,"asn":"AS59","org":"University of Wisconsin Madison","isp":"University of Wisconsin Madison","timezone":"America/Chicago","timezone_name":"Central Standard Time","timezone_dstOffset":0,"timezone_gmtOffset":-21600,"timezone_gmt":"GMT -6:00","currency":"US Dollar","currency_code":"USD","currency_symbol":"$","currency_rates":1,"currency_plural":"US dollars","completed_requests":92},{"ip":"141.142.227.158","success":true,"type":"IPv4","continent":"North America","country":"United States","country_code":"US","country_flag":"https://cdn.ipwhois.io/flags/us.svg","country_capital":"Washington","country_phone":1,"country_neighbours":"CA,MX,CU","region":"Illinois","city":"Urbana","latitude":40.1106,"longitude":-88.2073,"asn":"AS1224","org":"National Center Supercomputing Applications","isp":"National Center for Supercomputing Applications","timezone":"America/Chicago","timezone_name":"Central Standard Time","timezone_dstOffset":0,"timezone_gmtOffset":-21600,"timezone_gmt":"GMT -6:00","currency":"US Dollar","currency_code":"USD","currency_symbol":"$","currency_rates":1,"currency_plural":"US dollars","completed_requests":92},{"ip":"128.174.124.40","success":true,"type":"IPv4","continent":"North America","country":"United States","country_code":"US","country_flag":"https://cdn.ipwhois.io/flags/us.svg","country_capital":"Washington","country_phone":1,"country_neighbours":"CA,MX,CU","region":"Illinois","city":"Urbana","latitude":40.1106,"longitude":-88.2073,"asn":"AS38","org":"University of Illinois","isp":"University of Illinois","timezone":"America/Chicago","timezone_name":"Central Standard Time","timezone_dstOffset":0,"timezone_gmtOffset":-21600,"timezone_gmt":"GMT -6:00","currency":"US Dollar","currency_code":"USD","currency_symbol":"$","currency_rates":1,"currency_plural":"US dollars","completed_requests":92},{"ip":"128.196.65.37","success":true,"type":"IPv4","continent":"North America","country":"United States","country_code":"US","country_flag":"https://cdn.ipwhois.io/flags/us.svg","country_capital":"Washington","country_phone":1,"country_neighbours":"CA,MX,CU","region":"Arizona","city":"Tucson","latitude":32.2217,"longitude":-110.9265,"asn":"AS1706","org":"University of Arizona","isp":"University of Arizona","timezone":"America/Phoenix","timezone_name":"Mountain Standard Time","timezone_dstOffset":0,"timezone_gmtOffset":-25200,"timezone_gmt":"GMT -7:00","currency":"US Dollar","currency_code":"USD","currency_symbol":"$","currency_rates":1,"currency_plural":"US dollars","completed_requests":92},{"ip":"193.166.223.38","success":true,"type":"IPv4","continent":"Europe","country":"Finland","country_code":"FI","country_flag":"https://cdn.ipwhois.io/flags/fi.svg","country_capital":"Helsinki","country_phone":358,"country_neighbours":"NO,RU,SE","region":"Uusimaa","city":"Helsinki","latitude":60.1699,"longitude":24.9384,"asn":"AS1741","org":"FUNET","isp":"CSC - Tieteen tietotekniikan keskus Oy","timezone":"Europe/Helsinki","timezone_name":"Eastern European Standard Time","timezone_dstOffset":0,"timezone_gmtOffset":7200,"timezone_gmt":"GMT +2:00","currency":"Euro","currency_code":"EUR","currency_symbol":"€","currency_rates":0.9195,"currency_plural":"euros","completed_requests":92,"continent_code":"EU"},{"ip":"128.210.26.15","success":true,"type":"IPv4","continent":"North America","country":"United States","country_code":"US","country_flag":"https://cdn.ipwhois.io/flags/us.svg","country_capital":"Washington","country_phone":1,"country_neighbours":"CA,MX,CU","region":"Indiana","city":"West Lafayette","latitude":40.4259,"longitude":-86.9081,"asn":"AS17","org":"Purdue University","isp":"Purdue University","timezone":"America/New_York","timezone_name":"Eastern Standard Time","timezone_dstOffset":0,"timezone_gmtOffset":-18000,"timezone_gmt":"GMT -5:00","currency":"US Dollar","currency_code":"USD","currency_symbol":"$","currency_rates":1,"currency_plural":"US dollars","completed_requests":100},{"ip":"141.142.227.159","success":true,"type":"IPv4","continent":"North America","country":"United States","country_code":"US","country_flag":"https://cdn.ipwhois.io/flags/us.svg","country_capital":"Washington","country_phone":1,"country_neighbours":"CA,MX,CU","region":"Illinois","city":"Urbana","latitude":40.1106,"longitude":-88.2073,"asn":"AS1224","org":"National Center Supercomputing Applications","isp":"National Center for Supercomputing Applications","timezone":"America/Chicago","timezone_name":"Central Standard Time","timezone_dstOffset":0,"timezone_gmtOffset":-21600,"timezone_gmt":"GMT -6:00","currency":"US Dollar","currency_code":"USD","currency_symbol":"$","currency_rates":1,"currency_plural":"US dollars","completed_requests":100},{"ip":"130.127.204.30","success":true,"type":"IPv4","continent":"North America","country":"United States","country_code":"US","country_flag":"https://cdn.ipwhois.io/flags/us.svg","country_capital":"Washington","country_phone":1,"country_neighbours":"CA,MX,CU","region":"South Carolina","city":"Clemson","latitude":34.6834,"longitude":-82.8374,"asn":"AS12148","org":"Clemson University","isp":"Clemson University","timezone":"America/New_York","timezone_name":"Eastern Standard Time","timezone_dstOffset":0,"timezone_gmtOffset":-18000,"timezone_gmt":"GMT -5:00","currency":"US Dollar","currency_code":"USD","currency_symbol":"$","currency_rates":1,"currency_plural":"US dollars","completed_requests":103},{"ip":"131.243.130.42","success":true,"type":"IPv4","continent":"North America","country":"United States","country_code":"US","country_flag":"https://cdn.ipwhois.io/flags/us.svg","country_capital":"Washington","country_phone":1,"country_neighbours":"CA,MX,CU","region":"California","city":"Berkeley","latitude":37.8716,"longitude":-122.2727,"asn":"AS16","org":"Lawrence Berkeley National Laboratory","isp":"Lawrence Berkeley National Laboratory","timezone":"America/Los_Angeles","timezone_name":"Pacific Standard Time","timezone_dstOffset":0,"timezone_gmtOffset":-28800,"timezone_gmt":"GMT -8:00","currency":"US Dollar","currency_code":"USD","currency_symbol":"$","currency_rates":1,"currency_plural":"US dollars","completed_requests":103},{"ip":"128.46.104.5","success":true,"type":"IPv4","continent":"North America","country":"United States","country_code":"US","country_flag":"https://cdn.ipwhois.io/flags/us.svg","country_capital":"Washington","country_phone":1,"country_neighbours":"CA,MX,CU","region":"Indiana","city":"West Lafayette","latitude":40.4259,"longitude":-86.9081,"asn":"AS17","org":"Purdue University","isp":"Purdue University","timezone":"America/New_York","timezone_name":"Eastern Standard Time","timezone_dstOffset":0,"timezone_gmtOffset":-18000,"timezone_gmt":"GMT -5:00","currency":"US Dollar","currency_code":"USD","currency_symbol":"$","currency_rates":1,"currency_plural":"US dollars","completed_requests":105},{"ip":"54.85.105.29","success":true,"type":"IPv4","continent":"North America","country":"United States","country_code":"US","country_flag":"https://cdn.ipwhois.io/flags/us.svg","country_capital":"Washington","country_phone":1,"country_neighbours":"CA,MX,CU","region":"Virginia","city":"Ashburn","latitude":39.0438,"longitude":-77.4874,"asn":"AS14618","org":"Amazon.com, Inc.","isp":"Amazon.com, Inc.","timezone":"America/New_York","timezone_name":"Eastern Standard Time","timezone_dstOffset":0,"timezone_gmtOffset":-18000,"timezone_gmt":"GMT -5:00","currency":"US Dollar","currency_code":"USD","currency_symbol":"$","currency_rates":1,"currency_plural":"US dollars","completed_requests":105}]
+[
+ {
+ "ip": "128.174.124.40",
+ "country": "United States",
+ "city": "Urbana",
+ "latitude": 40.1106,
+ "longitude": -88.2073,
+ "region": "Illinois",
+ "org": "University of Illinois"
+ },
+ {
+ "ip": "128.174.124.54",
+ "country": "United States",
+ "city": "Urbana",
+ "latitude": 40.1106,
+ "longitude": -88.2073,
+ "region": "Illinois",
+ "org": "University of Illinois"
+ },
+ {
+ "ip": "128.196.65.37",
+ "country": "United States",
+ "city": "Tucson",
+ "latitude": 32.2217,
+ "longitude": -110.9265,
+ "region": "Arizona",
+ "org": "University of Arizona"
+ },
+ {
+ "ip": "128.197.168.114",
+ "country": "United States",
+ "city": "Boston",
+ "latitude": 42.3601,
+ "longitude": -71.0589,
+ "region": "Massachusetts",
+ "org": "Boston University"
+ },
+ {
+ "ip": "128.197.23.76",
+ "country": "United States",
+ "city": "Boston",
+ "latitude": 42.3601,
+ "longitude": -71.0589
+ },
+ {
+ "ip": "128.210.26.15",
+ "country": "United States",
+ "city": "West Lafayette",
+ "latitude": 40.4259,
+ "longitude": -86.9081,
+ "region": "Indiana",
+ "org": "Purdue University"
+ },
+ {
+ "ip": "128.46.104.5",
+ "country": "United States",
+ "city": "West Lafayette",
+ "latitude": 40.4259,
+ "longitude": -86.9081,
+ "region": "Indiana",
+ "org": "Purdue University"
+ },
+ {
+ "ip": "130.127.204.30",
+ "country": "United States",
+ "city": "Clemson",
+ "latitude": 34.6834,
+ "longitude": -82.8374,
+ "region": "South Carolina",
+ "org": "Clemson University"
+ },
+ {
+ "ip": "130.199.3.21",
+ "country": "United States",
+ "city": "Bellport",
+ "latitude": 40.757,
+ "longitude": -72.9393,
+ "region": "New York",
+ "org": "Brookhaven National Laboratory"
+ },
+ {
+ "ip": "131.243.130.42",
+ "country": "United States",
+ "city": "Berkeley",
+ "latitude": 37.8716,
+ "longitude": -122.2727,
+ "region": "California",
+ "org": "Lawrence Berkeley National Laboratory"
+ },
+ {
+ "ip": "141.142.218.10",
+ "country": "United States",
+ "city": "Champaign",
+ "latitude": 40.1164,
+ "longitude": -88.2434,
+ "region": "Illinois",
+ "org": "University of Illinois"
+ },
+ {
+ "ip": "141.142.227.158",
+ "country": "United States",
+ "city": "Urbana",
+ "latitude": 40.1106,
+ "longitude": -88.2073,
+ "region": "Illinois",
+ "org": "National Center Supercomputing Applications"
+ },
+ {
+ "ip": "141.142.227.159",
+ "country": "United States",
+ "city": "Urbana",
+ "latitude": 40.1106,
+ "longitude": -88.2073,
+ "region": "Illinois",
+ "org": "National Center Supercomputing Applications"
+ },
+ {
+ "ip": "144.92.131.21",
+ "country": "United States",
+ "city": "Madison",
+ "latitude": 43.0731,
+ "longitude": -89.4012,
+ "region": "Wisconsin",
+ "org": "University of Wisconsin Madison"
+ },
+ {
+ "ip": "193.166.223.38",
+ "country": "Finland",
+ "city": "Helsinki",
+ "latitude": 60.1699,
+ "longitude": 24.9384,
+ "region": "Uusimaa",
+ "org": "FUNET"
+ },
+ {
+ "ip": "54.85.105.29",
+ "country": "United States",
+ "city": "Ashburn",
+ "latitude": 39.0438,
+ "longitude": -77.4874,
+ "region": "Virginia",
+ "org": "Amazon.com, Inc."
+ }
+]
+
diff --git a/web/04-runpecan.php b/web/04-runpecan.php
index 039ac52654e..24694681c89 100644
--- a/web/04-runpecan.php
+++ b/web/04-runpecan.php
@@ -479,6 +479,10 @@
fwrite($fh, " {$url}" . PHP_EOL);
fwrite($fh, " " . PHP_EOL);
}
+fwrite($fh, " " . PHP_EOL);
+fwrite($fh, " " . PHP_EOL);
+fwrite($fh, " paired" . PHP_EOL);
+fwrite($fh, " " . PHP_EOL);
fwrite($fh, "" . PHP_EOL);
fclose($fh);
diff --git a/web/common.php b/web/common.php
index 56fab9b0ef6..64ed304d1e6 100644
--- a/web/common.php
+++ b/web/common.php
@@ -14,7 +14,7 @@ function get_footer() {
Terrestrial Ecosystems, Department of Energy (ARPA-E #DE-AR0000594 and #DE-AR0000598),
Department of Defense, the Arizona Experiment Station, the Energy Biosciences Institute,
and an Amazon AWS in Education Grant.
- PEcAn Version 1.9.0";
+ PEcAn Version 1.10.0";
}
function whoami() {
diff --git a/web/workflow.R b/web/workflow.R
index ea5ae7d9510..c0cf7bcbb0f 100755
--- a/web/workflow.R
+++ b/web/workflow.R
@@ -134,6 +134,19 @@ if (PEcAn.utils::status.check("MODEL") == 0) {
PEcAn.utils::status.end()
}
+# Save text summaries of the variables in each *.nc output?
+# Valid write modes are
+# - "paired": save `*.nc.var` beside each `*.nc`
+# - "collected": save one `nc_vars.txt` in the outdir,
+# containing all vars that appear in any nc in the directory
+if (!is.null(settings$nc_varfile_mode)) {
+ PEcAn.utils::nc_write_varfiles(
+ nc_dir = settings$outdir,
+ write_mode = nc_varfile_mode
+ )
+}
+
+
# Get results of model runs
if (PEcAn.utils::status.check("OUTPUT") == 0) {
PEcAn.utils::status.start("OUTPUT")