From 138c03cc091606f569e6aaf2c0e73471453f119d Mon Sep 17 00:00:00 2001 From: Martin Kilbinger Date: Wed, 12 Feb 2025 08:18:19 +0100 Subject: [PATCH 1/6] added (back) files to run make cat with sm=0 --- example/cfis/config_make_cat_psfex_nosm.ini | 76 ++++++++++++ scripts/sh/curl_canfar_local.sh | 52 ++++---- scripts/sh/functions.sh | 23 ++-- scripts/sh/init_run_exclusive_canfar.sh | 111 +++++++----------- scripts/sh/job_sp_canfar.bash | 39 +++--- .../modules/make_cat_package/make_cat.py | 67 ++++++----- shapepipe/modules/make_cat_runner.py | 37 ++++-- 7 files changed, 244 insertions(+), 161 deletions(-) create mode 100644 example/cfis/config_make_cat_psfex_nosm.ini diff --git a/example/cfis/config_make_cat_psfex_nosm.ini b/example/cfis/config_make_cat_psfex_nosm.ini new file mode 100644 index 00000000..1983f91c --- /dev/null +++ b/example/cfis/config_make_cat_psfex_nosm.ini @@ -0,0 +1,76 @@ +# ShapePipe post-run configuration file: create final catalogs, with +# no spread model on input + + +## Default ShapePipe options +[DEFAULT] + +# verbose mode (optional), default: True, print messages on terminal +VERBOSE = True + +# Name of run (optional) default: shapepipe_run +RUN_NAME = run_sp_Mc + +# Add date and time to RUN_NAME, optional, default: True +; RUN_DATETIME = False + + +## ShapePipe execution options +[EXECUTION] + +# Module name, single string or comma-separated list of valid module runner names +MODULE = make_cat_runner + +# Parallel processing mode, SMP or MPI +MODE = SMP + + +## ShapePipe file handling options +[FILE] + +# Log file master name, optional, default: shapepipe +LOG_NAME = log_sp + +# Runner log file name, optional, default: shapepipe_runs +RUN_LOG_NAME = log_run_sp + +# Input directory, containing input files, single string or list of names with length matching FILE_PATTERN +INPUT_DIR = . + +# Output directory +OUTPUT_DIR = ./output + + +## ShapePipe job handling options +[JOB] + +# Batch size of parallel processing (optional), default is 1, i.e. run all jobs in serial +SMP_BATCH_SIZE = 8 + +# Timeout value (optional), default is None, i.e. no timeout limit applied +TIMEOUT = 96:00:00 + + +## Module options + +[MAKE_CAT_RUNNER] + +# Input directory, containing input files, single string or list of names with length matching FILE_PATTERN +INPUT_DIR = run_sp_tile_Sx:sextractor_runner, last:psfex_interp_runner, last:merge_sep_cats_runner + +# Input file pattern(s), list of strings with length matching number of expected input file types +# Cannot contain wild cards +FILE_PATTERN = sexcat, galaxy_psf, ngmix + +# FILE_EXT (optional) list of string extensions to identify input files +FILE_EXT = .fits, .sqlite, .fits + +# Numbering convention, string that exemplifies a numbering pattern. +# Matches input single exposures (with 'p' removed) +# Needs to be given in this section, will be updated in module +# sections below +NUMBERING_SCHEME = -000-000 + +SM_DO_CLASSIFICATION = False + +SHAPE_MEASUREMENT_TYPE = ngmix diff --git a/scripts/sh/curl_canfar_local.sh b/scripts/sh/curl_canfar_local.sh index d712fb8b..9722eddb 100755 --- a/scripts/sh/curl_canfar_local.sh +++ b/scripts/sh/curl_canfar_local.sh @@ -19,7 +19,6 @@ N_SMP=1 fix=0 version="1.1" cmd_remote="$HOME/shapepipe/scripts/sh/init_run_exclusive_canfar.sh" -batch=30 batch_max=200 dry_run=0 mh_local=0 @@ -27,8 +26,7 @@ sp_local=0 test_only=0 debug_out="-1" scratch="-1" - -script_version=1.1 +sm=1 pat="- " @@ -47,18 +45,18 @@ usage="Usage: $(basename "$0") -j JOB -[e ID |-f file_IDs] -k KIND [OPTIONS] \tmerged header file local (MH=0) or global (MH=1); default is $mh_local\n -s, --sp_local SP\n \tsplit local run local (SP=1) or global (SP=0); default is SP=$sp_local\n + --sm SM\n + \tWith (SM=1; default) or without (SM=0) spread model input\n -N, --N_SMP N_SMOp\n \tnumber of jobs (SMP mode only), default=$N_SMP\n -F, --fix FIX\n - \tfix missing data (re-download tile, unzip) for FIX=1; default is $fix\n + \tfix missing data (re-download tile, unzip) for FIX=1; default is $fix\ -V, --version\n \tversion of docker image, default='$version'\n -C, --command_remote\n \tremote command to run on canfar, default='$cmd_remote'\n -S, --scratch\n \tprocessing scratch directory, default is None ($scratch)\n - -B, --batch\n - \tbatch size = size of subsamples if number of jobs > batch_max\n -b, --batch_max\n \tmaximum batch size = number of jobs run simultaneously, default=$batch_max\n --debug_out PATH\n @@ -98,6 +96,10 @@ while [ $# -gt 0 ]; do sp_local="$2" shift ;; + --sm) + sm="$2" + shift + ;; -e|--exclusive) ID="$2" shift @@ -118,14 +120,6 @@ while [ $# -gt 0 ]; do scratch="$2" shift ;; - -V|--version) - version="$2" - shift - ;; - -B|--batch) - batch="$2" - shift - ;; -b|--batch_max) batch_max="$2" shift @@ -175,16 +169,15 @@ if [ "$dry_run" != 0 ] && [ "$dry_run" != 1 ] && [ "$dry_run" != 2 ]; then fi if [ "$debug_out" != "-1" ]; then - echo "${pat}Starting $(basename "$0") $test_arg" >> $debug_out + echo "${pat}Starting $(basename "$0")" >> $debug_out echo "${pat}curl ID=$ID" >> $debug_out echo ${pat}`date` >> $debug_out fi -#. /opt/conda/etc/profile.d/conda.sh -source activate shapepipe +. /opt/conda/etc/profile.d/conda.sh +conda activate shapepipe if [ "$debug_out" != "-1" ]; then echo "${pat}conda prefix = ${CONDA_PREFIX}" >> $debug_out - echo "${pat}script version = ${script_version}" >> $debug_out fi # command line arguments for remote script: @@ -201,10 +194,11 @@ function submit_batch() { for ID in `cat $path`; do IDt=`echo $ID | tr "." "-"` my_name="SP-${patch}-J${job}-${IDt}" - call_curl $my_name $job $psf $ID $N_SMP $dry_run $dir $mh_local $sp_local $debug_out $fix $scratch $test_arg + call_curl $my_name $job $psf $ID $N_SMP $dry_run $dir $mh_local $sp_local $sm $debug_out $fix $scratch $test_arg done } +batch=50 if [ "$batch" -ge "$batch_max" ]; then ((batch=batch_max/2)) echo "Reducing batch size to $batch" @@ -226,7 +220,7 @@ if [ "$dry_run" == 2 ]; then for ID in `cat $file_IDs`; do IDt=`echo $ID | tr "." "-"` my_name="SP-${patch}-J${job}-${IDt}" - call_curl $my_name $job $psf $ID $N_SMP $dry_run $dir $mh_local $sp_local $debug_out $fix $scratch $test_arg + call_curl $my_name $job $psf $ID $N_SMP $dry_run $dir $mh_local $sp_local $sm $debug_out $fix $scratch $test_arg done else @@ -234,7 +228,7 @@ if [ "$dry_run" == 2 ]; then # Submit image (dry run = 2) IDt=`echo $ID | tr "." "-"` my_name="SP-${patch}-J${job}-${IDt}" - call_curl $my_name $job $psf $ID $N_SMP $dry_run $dir $mh_local $sp_local $debug_out $fix $scratch $test_arg + call_curl $my_name $job $psf $ID $N_SMP $dry_run $dir $mh_local $sp_local $sm $debug_out $fix $scratch $test_arg fi @@ -256,20 +250,20 @@ else echo "Split '$file_IDs' into $n_split batches of size $batch" count=1 - n_running=`stats_jobs_canfar.sh` + n_queued=`stats_jobs_canfar.sh -w all` for batch in $prefix*; do - echo "Number of running jobs = $n_running" + echo "Number of queued jobs = $n_queued" echo "Submitting batch $batch ($count/$n_split)" echo -ne "\033]0;curl patch=$patch job=$job $count/$n_split\007" submit_batch $batch ((count=count+1)) - n_running=`stats_jobs_canfar.sh` + n_queued=`stats_jobs_canfar.sh -w all` - while [ "$n_running" -gt "$n_thresh" ]; do - echo "Wait for #jobs = $n_running jobs to go < $n_thresh ..." + while [ "$n_queued" -gt "$n_thresh" ]; do + echo "Wait for #jobs = $n_queued jobs to go < $n_thresh ..." sleep $sleep - n_running=`stats_jobs_canfar.sh` + n_queued=`stats_jobs_canfar.sh -w all` done done @@ -287,7 +281,7 @@ else # Submit image IDt=`echo $ID | tr "." "-"` my_name="SP-${patch}-J${job}-${IDt}" - call_curl $my_name $job $psf $ID $N_SMP $dry_run $dir $mh_local $sp_local $debug_out $fix $scratch $test_arg + call_curl $my_name $job $psf $ID $N_SMP $dry_run $dir $mh_local $sp_local $sm $debug_out $fix $scratch $test_arg fi @@ -296,5 +290,5 @@ fi echo "Done $(basename "$0")" if [ "$debug_out" != "-1" ]; then - echo "${pat}End $(basename "$0")" $test_arg >> $debug_out + echo "${pat}End $(basename "$0")" >> $debug_out fi diff --git a/scripts/sh/functions.sh b/scripts/sh/functions.sh index e06df3cc..324864f2 100644 --- a/scripts/sh/functions.sh +++ b/scripts/sh/functions.sh @@ -1,4 +1,4 @@ - # Global variables +# Global variables SSL=~/.ssl/cadcproxy.pem SESSION=https://ws-uv.canfar.net/skaha/v0/session IMAGE=images.canfar.net/unions/shapepipe @@ -26,16 +26,16 @@ function call_curl() { my_dir=$7 my_mh_local=$8 my_sp_local=$9 - my_debug_out=${10} - my_fix=${11} - my_scratch=${12} - my_test_arg=${13} + my_sm=${10} + my_debug_out=${11} + my_fix=${12} + my_scratch=${13} + my_test_arg=${14} - my_arg="-j $my_job -p $my_psf -e $my_ID -N $my_N_SMP -n $my_dry_run -d $my_dir -m $my_mh_local -s $my_sp_local --debug_out $my_debug_out -F $my_fix -S $my_scratch $my_test_arg" + my_arg="-j $my_job -p $my_psf -e $my_ID -N $my_N_SMP -n $my_dry_run -d $my_dir -m $my_mh_local -s $my_sp_local --sm $my_sm --debug_out $my_debug_out -F $my_fix -S $my_scratch $my_test_arg" if [ "$my_dry_run" == "0" ]; then - - my_session=`curl -E $SSL "$SESSION?$RESOURCES" -d "image=$IMAGE:$version" -d "name=${my_name}" -d "cmd=$cmd_remote" --data-urlencode "args=${my_arg[@]}"` + my_session=`curl -E $SSL "$SESSION?$RESOURCES" -d "image=$IMAGE:$version" -d "name=${my_name}" -d "cmd=$cmd_remote" --data-urlencode "args=${my_arg[@]}" &> /dev/null` fi cmd=("curl" "-E" "$SSL" "$SESSION?$RESOURCES" "-d" "image=$IMAGE:$version" "-d" "name=${my_name}" "-d" "cmd=$cmd_remote" "--data-urlencode" "args=\"${my_arg}\"") @@ -44,7 +44,7 @@ function call_curl() { echo "${pat}call_curl $my_name $my_arg" >> $my_debug_out echo "${pat}Running ${cmd[@]} (dry_run=$my_dry_run)" >> $my_debug_out fi - echo "${cmd[@]} (dry_run=$my_dry_run)" + #echo "${cmd[@]} (dry_run=$my_dry_run)" # Running $cmd does not work due to unknown problems with passing of args @@ -79,7 +79,7 @@ function command () { res=$? if [ "$debug_out" != "-1" ]; then - echo "${pat}exit code = $res" >> $debug_out + echo "${pat}result=$res" >> $debug_out fi if [ $VERBOSE == 1 ]; then @@ -89,6 +89,9 @@ function command () { echo -e "${RED}error, return value = $res${NC}" if [ $STOP == 1 ]; then echo "${RED}exiting $(basename "$0")', error in command '$cmd'${NC}" + if [ "$debug_out" != "-1" ]; then + echo "${pat}${RED}exiting $(basename "$0")', error in command '$cmd'${NC}" >> $debug_out + fi exit $res else echo "${RED}continuing '$(basename "$0")', error in command '$cmd'${NC}" diff --git a/scripts/sh/init_run_exclusive_canfar.sh b/scripts/sh/init_run_exclusive_canfar.sh index ccd02a1d..815dc585 100755 --- a/scripts/sh/init_run_exclusive_canfar.sh +++ b/scripts/sh/init_run_exclusive_canfar.sh @@ -13,6 +13,7 @@ debug_out=-1 scratch=-1 fix=0 test_only=0 +sm=1 # mh_local is 0 (1) if merge_header_runner is run on all exposures, # which is standard so far (run on exposures of given tile only; new) @@ -39,6 +40,8 @@ usage="Usage: $(basename "$0") -j JOB -e ID -k KIND [OPTIONS] \tmerge header file local (MH=1) or global (MH=0); default is $mh_local\n -s, --sp_local SP\n \tsplit local run local (SP=1) or global (SP=r0wwdefault is $sp_local\n + --sm SM\n + \tWith (SM=1; default) or without (SM=0) spread model input\n -N, --N_SMP N_SMOp\n \tnumber of jobs (SMP mode only), default from original config files\n -d, --directory\n @@ -88,6 +91,10 @@ while [ $# -gt 0 ]; do sp_local="$2" shift ;; + --sm) + sm="$2" + shift + ;; -N|--N_SMP) N_SMP="$2" shift @@ -139,6 +146,7 @@ function message() { # Init message +message "test=$test_only" $debug_out -1 if [ "$test_only" == "1" ]; then msg="init_run_exclusive.py script test mode, exiting." ex=0 @@ -202,19 +210,10 @@ else message "not running in dry run mode" $debug_out -1 fi -source activate shapepipe -if [ "$debug_out" != "-1" ]; then - echo "${pat}conda prefix = ${CONDA_PREFIX}" >> $debug_out -fi - CONDA_PREFIX=$HOME/.conda/envs/shapepipe PATH=$PATH:$CONDA_PREFIX/bin -message "conda prefix = ${CONDA_PREFIX}" $debug_out -1 -message "HOME = ${HOME}" $debug_out -1 -message "path = ${PATH}" $debug_out -1 cd $dir -message "pwd=$pwd" $debug_out -1 if [ ! -d ${kind}_runs ]; then command "mkdir ${kind}_runs" $dry_run @@ -358,7 +357,7 @@ if [ $do_job != 0 ] && [ "$sp_local" == "1" ]; then # run local Sp if not done already; works only with mh_local=1; this step needs to be done # before following mh_local=1 steps message "run local sp" $debug_out -1 - command "rm -rf run_sp_GitFeGie*/get_images_runner_run_2" $dry_run + #command "rm -rf run_sp_GitFeGie*/get_images_runner_run_2" $dry_run command "rm -rf run_sp_Gie*" $dry_run command "rm -rf run_sp_exp_Sp*" $dry_run @@ -411,10 +410,6 @@ if [ $do_job != 0 ] && [ "$sp_local" == "1" ]; then fi if [ "$kind" == "tile" ] && [ "$sp_local" == "1" ]; then - # Link to exposure outputs (runs 2 and 32) - - # Remove previous split exp dir -rm -r run_sp_exp_Sp_shdu cd ../../.. command "link_to_exp_for_tile.py -t $ID -i tile_runs -I exp_runs -s $sp_local" $dry_run cd tile_runs/$ID @@ -439,6 +434,7 @@ else message "ID needs to be given (option -e) for mh_local" $debug_out 6 fi + # Check and remove symbolic (global) mh file link if [ -L log_exp_headers.sqlite ]; then # Local Mh and symlink -> remove previous link to # (potentially incomplete) global mh file @@ -448,6 +444,17 @@ else message "no mh link found" $debug_out -1 fi + # Check size of existing header file + if [ -e log_exp_headers.sqlite ]; then + size=$(stat -c %s log_exp_headers.sqlite) + if (( size > 15000 )); then + message "Found valid local mh file, continuing" $debug_out -1 + else + message "Existing local mh file looks invalid, deleting" $debug_out -1 + rm -f log_exp_headers.sqlite + fi + fi + if [ ! -e log_exp_headers.sqlite ]; then message "Creating local mh file" $debug_out -1 @@ -478,58 +485,23 @@ fi # Update links to exposure run directories, which were created in job 32 (( do_job = $job & 64 )) if [[ $do_job != 0 ]]; then - - # MKDEBUG NEW (04/01/2025): this script was already run earlier with -s 1 - #cd ../../.. - #command "link_to_exp_for_tile.py -t $ID -i tile_runs -I exp_runs -s $sp_local" $dry_run - #cd ${kind}_runs/$ID/output - - # Remove previous runs of this job - rm -rf run_sp_tile_PsViSmVi* -fi - -(( do_job = $job & 128 )) -if [[ $do_job != 0 ]]; then - - echo - - cat_ngmix="run_sp_tile_ngmix_Ng1u/ngmix_runner/output/ngmix-*.fits" - dir_ngmix_prev="run_sp_tile_ngmix_Ng1u_prev/ngmix_runner/output" - cat_ngmix_prev="$dir_ngmix_prev/ngmix-*.fits" - - # Check whether ngmix output exists - if [ -e $cat_ngmix ]; then - message "ngmix output catalogue exists" $debug_out -1 - - # Check whether previous ngmix directory and output cat exist - exists="1" - if [ ! -d $dir_ngmix_prev ]; then - exists="0" - elif [ ! -e "$cat_ngmix_prev" ]; then - exists="1" - fi - if [ "$exists" == "0" ]; then - message "Moving to previous batch-save dir (does not exist yet)" $debug_out -1 - command "mkdir -p $dir_ngmix_prev" $dry_run - command "mv $cat_ngmix $dir_ngmix_prev" $dry_run - else - # Compare file sizes - size_cat_ngmix=$(stat -c%s $cat_ngmix) - size_cat_ngmix_prev=$(stat -c%s $cat_ngmix_prev) - if [ "$size_cat_ngmix" -gt "$size_cat_ngmix_prev" ]; then - message "Moving to batch-save dir, overwriting smaller batch-save cat" $debug_out -1 - command "mv $cat_ngmix $dir_ngmix_prev" $dry_run - else - message "Previous batch-save dir not smaller, removing ngmix output" $debug_out -1 - command "rm $cat_ngmix" $dry_run - fi - fi - else - # Whether or not previous ngmix exists, job_sp_canfar will handle it - message "No ngmix output exists, continuing..." $debug_out -1 - fi - - echo + if [ "$kind" == "tile" ]; then + cd ../../.. + command "link_to_exp_for_tile.py -t $ID -i tile_runs -I exp_runs -s $sp_local" $dry_run + cd ${kind}_runs/$ID/output + + # Remove duplicate job-16 runs (tile detection) + # New (P8) commented + #n_16=`ls -rt1d run_sp_tile_Sx_* | wc -l` + #if [ "$n_16" != "1" ]; then + #n_remove="$(($n_16-1))" + #echo "removing $n_remove duplicate old job-16 runs" + #command "rm -rf `ls -rt1d run_sp_tile_Sx_* | head -$n_remove`" $dry_run + #fi + + # Remove previous runs of this job + rm -rf run_sp_tile_PsViSmVi* + fi fi (( do_job = $job & 256 )) @@ -537,6 +509,7 @@ if [[ $do_job != 0 ]]; then # Remove previous runs of this job rm -rf run_sp_Ms_20??_* + fi (( do_job = $job & 512 )) @@ -572,11 +545,13 @@ if [ "$scratch" != "-1" ]; then command "cd $scratch/${kind}_runs/$ID" $dry_run fi -command "job_sp_canfar.bash -p psfex -j $job -e $ID --n_smp $N_SMP --nsh_jobs $N_SMP --debug_out $debug_out " $dry_run +command "job_sp_canfar.bash -p psfex -j $job -e $ID --n_smp $N_SMP --nsh_jobs $N_SMP --debug_out $debug_out --sm $sm " $dry_run if [ "$scratch" != "-1" ]; then cd ../.. - if [ "$job" == "32" ]; then + if [ "$job" == "16" ]; then + command "mv ${kind}_runs/$ID/output/run_sp_Sx_* $dir/${kind}_runs/$ID/output" $dry_run + elif [ "$job" == "32" ]; then command "mv ${kind}_runs/$ID/output/run_sp_exp_SxSe* $dir/${kind}_runs/$ID/output" $dry_run elif [ "$job" == "64" ]; then command "mv ${kind}_runs/$ID/output/run_sp_tile_PsViSm** $dir/${kind}_runs/$ID/output" $dry_run diff --git a/scripts/sh/job_sp_canfar.bash b/scripts/sh/job_sp_canfar.bash index d6a62f31..54036c93 100755 --- a/scripts/sh/job_sp_canfar.bash +++ b/scripts/sh/job_sp_canfar.bash @@ -22,6 +22,7 @@ results='cosmostat/kilbinger/results_v2' n_smp=-1 nsh_jobs=8 debug_out=-1 +sm=1 pat="--- " @@ -48,6 +49,8 @@ usage="Usage: $(basename "$0") [OPTIONS] [TILE_ID] -s, --star_cat_for_mask\n \tcatalogue for masking bright stars, allowed are 'onthefly', 'save',\n \tdefault is '${star_cat_for_mask}'\n + --sm SM\n + \tWith (SM=1; default) or without (SM=0) spread model input\n -e, --exclusive ID\n \texclusive input filer number string ID (default: None)\n -o, --output_dir\n @@ -96,6 +99,10 @@ while [ $# -gt 0 ]; do star_cat_for_mask="$2" shift ;; + --sm) + sm="$2" + shift + ;; -e|--exclusive) exclusive="$2" shift @@ -141,6 +148,7 @@ if [ "$debug_out" != "-1" ]; then echo "${pat}Starting $(basename "$0")" >> $debug_out fi +CONDA_PREFIX=/arc/home/kilbinger/.conda/envs/shapepipe PATH=$PATH:$CONDA_PREFIX/bin # For tar archives. TODO: Should be unique to each job @@ -196,8 +204,7 @@ function command () { echo "${pat}pwd = `pwd`" >> $debug_out echo "${pat}SP_RUN = $SP_RUN" >> $debug_out echo "${pat}SP_CONFIG = $SP_CONFIG" >> $debug_out - echo "${pat}pwd = `pwd`" >> $debug_out - fi + fi if [ $# == 2 ]; then if [ $VERBOSE == 1 ]; then @@ -214,7 +221,7 @@ function command () { echo "$str: running '$cmd $4 \"$5 $6\"'" fi if [ "$debug_out" != "-1" ]; then - echo "${pat}Running[2] $cmd $4 \"$5 $6\"" >> $debug_out + echo "${pat}Running $cmd $4 \"$5 $6\"" >> $debug_out fi $cmd $4 "$5 $6" @@ -228,7 +235,7 @@ function command () { fi if [ $VERBOSE == 1 ]; then - if [ "$res" == "0" ]; then + if [ $res == 0 ]; then echo -e "${GREEN}success, return value = $res${NC}" else echo -e "${RED}error, return value = $res${NC}" @@ -332,11 +339,6 @@ function update_config() { ### Start ### -command_sp "source activate shapepipe" "Activate conda shapepipe env" -if [ "$debug_out" != "-1" ]; then - echo "${pat}conda prefix = ${CONDA_PREFIX}" >> $debug_out -fi -CONDA_PREFIX=/arc/home/kilbinger/.conda/envs/shapepipe echo "Start processing" # Create input and output directories @@ -460,7 +462,6 @@ if [[ $do_job != 0 ]]; then fi ## Process tiles up to shape measurement -## MKDEBUG new 14/01/25: remove spread_model_runner (( do_job = $job & 64 )) if [[ $do_job != 0 ]]; then @@ -468,8 +469,8 @@ if [[ $do_job != 0 ]]; then letter=${psf:0:1} Letter=${letter^} command_cfg_shapepipe \ - "config_tile_${Letter}iViVi_canfar.ini" \ - "Run shapepipe (tile PsfInterp=$Letter}: up to ngmix" \ + "config_tile_${Letter}iViSmVi_canfar.ini" \ + "Run shapepipe (tile PsfInterp=$Letter}: up to ngmix+galsim)" \ $n_smp \ $exclusive @@ -540,12 +541,11 @@ if [[ $do_job != 0 ]]; then perl -ane \ 's/(N_SPLIT_MAX =) X/$1 '$nsh_jobs'/; print' \ > $SP_CONFIG_MOD/config_merge_sep_cats.ini - + ### Merge separated shapes catalogues command_sp \ "shapepipe_run -c $SP_CONFIG_MOD/config_merge_sep_cats.ini" \ "Run shapepipe (tile: merge sep cats)" \ - $exclusive \ "$VERBOSE" \ "$ID" fi @@ -553,9 +553,16 @@ fi (( do_job = $job & 512 )) if [[ $do_job != 0 ]]; then + # spread_model suffix for config file with or without SM input + if [ "$sm" == "0" ]; then + suff_sm="_nosm" + else + suff_sm="" + fi + ### Merge all relevant information into final catalogue command_cfg_shapepipe \ - "config_make_cat_$psf.ini" \ + "config_make_cat_$psf${suff_sm}.ini" \ "Run shapepipe (tile: create final cat $psf)" \ $n_smp \ $exclusive @@ -575,5 +582,5 @@ if [[ $do_job != 0 ]]; then fi if [ "$debug_out" != "-1" ]; then - echo "${pat}End $(basename "$0") ID=$exclusive" >> $debug_out + echo "${pat}End $(basename "$0") ID=$exclusive success" >> $debug_out fi diff --git a/shapepipe/modules/make_cat_package/make_cat.py b/shapepipe/modules/make_cat_package/make_cat.py index 91a96836..c7aa1e51 100644 --- a/shapepipe/modules/make_cat_package/make_cat.py +++ b/shapepipe/modules/make_cat_package/make_cat.py @@ -84,6 +84,11 @@ def save_sextractor_data(final_cat_file, sexcat_path, remove_vignet=True): remove_vignet : bool If ``True`` will not save the ``VIGNET`` field into the final catalogue + Returns + ------- + int + Number of objects saved + """ sexcat_file = file_io.FITSCatalogue(sexcat_path, SEx_catalogue=True) sexcat_file.open() @@ -109,6 +114,8 @@ def save_sextractor_data(final_cat_file, sexcat_path, remove_vignet=True): sexcat_file.close() + return cat_size + def save_sm_data( final_cat_file, @@ -116,6 +123,7 @@ def save_sm_data( do_classif=True, star_thresh=0.003, gal_thresh=0.01, + n_obj=-1, ): r"""Save Spread-Model Data. @@ -126,7 +134,8 @@ def save_sm_data( final_cat_file : file_io.FITSCatalogue Final catalogue sexcat_sm_path : str - Path to spread-model catalogue to save. + Path to spread-model catalogue to save. If ``None``, spread_model is + set to 99 do_classif : bool If ``True`` objects will be classified into stars, galaxies, and other, using the classifier @@ -137,17 +146,27 @@ def save_sm_data( gal_thresh : float Threshold for galaxy selection; object is classified as galaxy if :math:`{\rm class} >` ``gal_thresh`` + nobj : int, optional + Number of objects, only used if sexcat_sm_path is ``None`` """ final_cat_file.open() - sexcat_sm_file = file_io.FITSCatalogue(sexcat_sm_path, SEx_catalogue=True) - sexcat_sm_file.open() + if sexcat_sm_path is not None: + sexcat_sm_file = file_io.FITSCatalogue( + sexcat_sm_path, + SEx_catalogue=True, + ) + sexcat_sm_file.open() + + sm = np.copy(sexcat_sm_file.get_data()["SPREAD_MODEL"]) + sm_err = np.copy(sexcat_sm_file.get_data()["SPREADERR_MODEL"]) - sm = np.copy(sexcat_sm_file.get_data()["SPREAD_MODEL"]) - sm_err = np.copy(sexcat_sm_file.get_data()["SPREADERR_MODEL"]) + sexcat_sm_file.close() - sexcat_sm_file.close() + else: + sm = np.ones(n_obj) * 99 + sm_err = np.ones(n_obj) * 99 final_cat_file.add_col("SPREAD_MODEL", sm) final_cat_file.add_col("SPREADERR_MODEL", sm_err) @@ -175,10 +194,9 @@ class SaveCatalogue: """ - def __init__(self, final_cat_file, w_log): + def __init__(self, final_cat_file): self.final_cat_file = final_cat_file - self._w_log = w_log def process( self, @@ -281,8 +299,6 @@ def _save_ngmix_data(self, ngmix_cat_path, moments=False): ngmix_n_epoch = ngmix_cat_file.get_data()["n_epoch_model"] ngmix_mom_fail = ngmix_cat_file.get_data()["moments_fail"] - n_obj = len(self._obj_id) - self._w_log.info(f"writing ngmix info for {n_obj} objects") if moments: m = "m" else: @@ -291,8 +307,8 @@ def _save_ngmix_data(self, ngmix_cat_path, moments=False): ngmix_mcal_flags = ngmix_cat_file.get_data()["mcal_flags"] ngmix_id = ngmix_cat_file.get_data()["id"] - self._add2dict("NGMIX_N_EPOCH", np.zeros(n_obj)) - self._add2dict("NGMIX_MOM_FAIL", np.zeros(n_obj)) + self._add2dict("NGMIX_N_EPOCH", np.zeros(len(self._obj_id))) + self._add2dict("NGMIX_MOM_FAIL", np.zeros(len(self._obj_id))) prefix = f"NGMIX{m}" @@ -305,27 +321,26 @@ def _save_ngmix_data(self, ngmix_cat_path, moments=False): f"{prefix}_FLAGS_", f"{prefix}_T_PSFo_", ): - self._update_dict(key_str, np.zeros(n_obj)) + self._update_dict(key_str, np.zeros(len(self._obj_id))) for key_str in (f"NGMIX{m}_FLUX_ERR_", f"NGMIX{m}_MAG_ERR_"): - self._update_dict(key_str, np.ones(n_obj) * -1) + self._update_dict(key_str, np.ones(len(self._obj_id)) * -1) for key_str in ( f"NGMIX{m}_ELL_", f"NGMIX{m}_ELL_ERR_", f"NGMIX{m}_ELL_PSFo_", ): - self._update_dict(key_str, np.ones((n_obj, 2)) * -10.0) + self._update_dict(key_str, np.ones((len(self._obj_id), 2)) * -10.0) self._update_dict( f"NGMIX{m}_T_ERR_", - np.ones(n_obj) * 1e30, + np.ones(len(self._obj_id)) * 1e30, ) - self._add2dict(f"NGMIX{m}_MCAL_FLAGS", np.zeros(n_obj)) + self._add2dict(f"NGMIX{m}_MCAL_FLAGS", np.zeros(len(self._obj_id))) for idx, _ in enumerate(self._obj_id): for key in self._key_ends: x = self._output_dict[f"NGMIX{m}_ELL_{key}"][idx] if np.all(x != np.array([-10.0, -10.0])): - #print(x) - pass + print(x) for idx, id_tmp in enumerate(self._obj_id): ind = np.where(id_tmp == ngmix_id)[0] @@ -413,7 +428,6 @@ def _save_galsim_shapes(self, galsim_cat_path): self._key_ends = galsim_cat_file.get_ext_name()[1:] galsim_id = galsim_cat_file.get_data()["id"] - n_obj = len(self._obj_id) for key_str in ( "GALSIM_GAL_SIGMA_", @@ -421,7 +435,7 @@ def _save_galsim_shapes(self, galsim_cat_path): "GALSIM_FLUX_", "GALSIM_MAG_", ): - self._update_dict(key_str, np.zeros(n_obj)) + self._update_dict(key_str, np.zeros(len(self._obj_id))) for key_str in ("GALSIM_FLUX_ERR_", "GALSIM_MAG_ERR_", "GALSIM_RES_"): self._update_dict(key_str, np.ones(len(self._obj_id)) * -1) for key_str in ( @@ -430,10 +444,10 @@ def _save_galsim_shapes(self, galsim_cat_path): "GALSIM_GAL_ELL_UNCORR_", "GALSIM_PSF_ELL_", ): - self._update_dict(key_str, np.ones((n_obj, 2)) * -10.0) + self._update_dict(key_str, np.ones((len(self._obj_id), 2)) * -10.0) self._update_dict( "GALSIM_FLAGS_", - np.ones(n_obj, dtype="int16"), + np.ones(len(self._obj_id), dtype="int16"), ) for idx, id_tmp in enumerate(self._obj_id): @@ -524,23 +538,22 @@ def _save_psf_data(self, galaxy_psf_path): galaxy_psf_cat = SqliteDict(galaxy_psf_path) max_epoch = np.max(self.final_cat_file.get_data()["N_EPOCH"]) + 1 - n_obj = len(self._obj_id) self._output_dict = { - f"PSF_ELL_{idx + 1}": np.ones((n_obj, 2)) * -10.0 + f"PSF_ELL_{idx + 1}": np.ones((len(self._obj_id), 2)) * -10.0 for idx in range(max_epoch) } self._output_dict = { **self._output_dict, **{ - f"PSF_FWHM_{idx + 1}": np.zeros(n_obj) + f"PSF_FWHM_{idx + 1}": np.zeros(len(self._obj_id)) for idx in range(max_epoch) }, } self._output_dict = { **self._output_dict, **{ - f"PSF_FLAG_{idx + 1}": np.ones(n_obj, dtype="int16") + f"PSF_FLAG_{idx + 1}": np.ones(len(self._obj_id), dtype="int16") for idx in range(max_epoch) }, } diff --git a/shapepipe/modules/make_cat_runner.py b/shapepipe/modules/make_cat_runner.py index 82a20c83..17ead7f0 100644 --- a/shapepipe/modules/make_cat_runner.py +++ b/shapepipe/modules/make_cat_runner.py @@ -2,7 +2,7 @@ Module runner for ``make_cat``. -:Author: Axel Guinot +:Author: Axel Guinot, Martin Kilbinger """ @@ -37,14 +37,25 @@ def make_cat_runner( ): """Define The Make Catalogue Runner.""" # Set input file paths - ( - tile_sexcat_path, - sexcat_sm_path, - galaxy_psf_path, - shape1_cat_path, - ) = input_file_list[0:4] - if len(input_file_list) == 5: - shape2_cat_path = input_file_list[4] + if len(input_file_list) == 3: + # No spread model input + ( + tile_sexcat_path, + galaxy_psf_path, + shape1_cat_path, + ) = input_file_list + sexcat_sm_path = None + else: + # With spread model input + ( + tile_sexcat_path, + sexcat_sm_path, + galaxy_psf_path, + shape1_cat_path, + ) = input_file_list[0:4] + if len(input_file_list) == 5: + # With second shape catalogue input + shape2_cat_path = input_file_list[4] # Fetch classification options do_classif = config.getboolean( @@ -83,20 +94,24 @@ def make_cat_runner( # Save SExtractor data w_log.info("Save SExtractor data") - make_cat.save_sextractor_data(final_cat_file, tile_sexcat_path) + n_obj = make_cat.save_sextractor_data(final_cat_file, tile_sexcat_path) # Save spread-model data w_log.info("Save spread-model data") + if sexcat_sm_path is None: + w_log.info("No sm cat input, setting spread model to 99") + n_obj make_cat.save_sm_data( final_cat_file, sexcat_sm_path, do_classif, star_thresh, gal_thresh, + n_obj=n_obj ) # Save shape data - sc_inst = make_cat.SaveCatalogue(final_cat_file, w_log) + sc_inst = make_cat.SaveCatalogue(final_cat_file) w_log.info("Save shape measurement data") for shape_type in shape_type_list: w_log.info(f"Save {shape_type.lower()} data") From f87a73604eee33504831a0e54f4bebf6a3df1b45 Mon Sep 17 00:00:00 2001 From: Martin Kilbinger Date: Wed, 12 Feb 2025 08:28:07 +0100 Subject: [PATCH 2/6] Update curl_canfar_local.sh restored some v1.4 additions --- scripts/sh/curl_canfar_local.sh | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/scripts/sh/curl_canfar_local.sh b/scripts/sh/curl_canfar_local.sh index 9722eddb..ada5dc4f 100755 --- a/scripts/sh/curl_canfar_local.sh +++ b/scripts/sh/curl_canfar_local.sh @@ -19,6 +19,7 @@ N_SMP=1 fix=0 version="1.1" cmd_remote="$HOME/shapepipe/scripts/sh/init_run_exclusive_canfar.sh" +batch=30 batch_max=200 dry_run=0 mh_local=0 @@ -50,7 +51,7 @@ usage="Usage: $(basename "$0") -j JOB -[e ID |-f file_IDs] -k KIND [OPTIONS] -N, --N_SMP N_SMOp\n \tnumber of jobs (SMP mode only), default=$N_SMP\n -F, --fix FIX\n - \tfix missing data (re-download tile, unzip) for FIX=1; default is $fix\ + \tfix missing data (re-download tile, unzip) for FIX=1; default is $fix\n -V, --version\n \tversion of docker image, default='$version'\n -C, --command_remote\n @@ -120,6 +121,14 @@ while [ $# -gt 0 ]; do scratch="$2" shift ;; + -V|--version) + version="$2" + shift + ;; + -B|--batch) + batch="$2" + shift + ;; -b|--batch_max) batch_max="$2" shift @@ -169,15 +178,15 @@ if [ "$dry_run" != 0 ] && [ "$dry_run" != 1 ] && [ "$dry_run" != 2 ]; then fi if [ "$debug_out" != "-1" ]; then - echo "${pat}Starting $(basename "$0")" >> $debug_out + echo "${pat}Starting $(basename "$0") $test_arg" >> $debug_out echo "${pat}curl ID=$ID" >> $debug_out echo ${pat}`date` >> $debug_out fi -. /opt/conda/etc/profile.d/conda.sh -conda activate shapepipe +source activate shapepipe if [ "$debug_out" != "-1" ]; then echo "${pat}conda prefix = ${CONDA_PREFIX}" >> $debug_out + echo "${pat}script version = ${script_version}" >> $debug_out fi # command line arguments for remote script: @@ -194,7 +203,7 @@ function submit_batch() { for ID in `cat $path`; do IDt=`echo $ID | tr "." "-"` my_name="SP-${patch}-J${job}-${IDt}" - call_curl $my_name $job $psf $ID $N_SMP $dry_run $dir $mh_local $sp_local $sm $debug_out $fix $scratch $test_arg + call_curl $my_name $job $psf $ID $N_SMP $dry_run $dir $mh_local $sp_local $sm $debug_out $fix $scratch $test_arg done } @@ -290,5 +299,5 @@ fi echo "Done $(basename "$0")" if [ "$debug_out" != "-1" ]; then - echo "${pat}End $(basename "$0")" >> $debug_out + echo "${pat}End $(basename "$0") $test_arg" >> $debug_out fi From 674216f5ea3448c57299df30a4da55188352fd2b Mon Sep 17 00:00:00 2001 From: Martin Kilbinger Date: Wed, 12 Feb 2025 08:31:13 +0100 Subject: [PATCH 3/6] Update functions.sh brought back additions from v1.4 --- scripts/sh/functions.sh | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/scripts/sh/functions.sh b/scripts/sh/functions.sh index 324864f2..02f6544b 100644 --- a/scripts/sh/functions.sh +++ b/scripts/sh/functions.sh @@ -35,7 +35,7 @@ function call_curl() { my_arg="-j $my_job -p $my_psf -e $my_ID -N $my_N_SMP -n $my_dry_run -d $my_dir -m $my_mh_local -s $my_sp_local --sm $my_sm --debug_out $my_debug_out -F $my_fix -S $my_scratch $my_test_arg" if [ "$my_dry_run" == "0" ]; then - my_session=`curl -E $SSL "$SESSION?$RESOURCES" -d "image=$IMAGE:$version" -d "name=${my_name}" -d "cmd=$cmd_remote" --data-urlencode "args=${my_arg[@]}" &> /dev/null` + my_session=`curl -E $SSL "$SESSION?$RESOURCES" -d "image=$IMAGE:$version" -d "name=${my_name}" -d "cmd=$cmd_remote" --data-urlencode "args=${my_arg[@]}"` fi cmd=("curl" "-E" "$SSL" "$SESSION?$RESOURCES" "-d" "image=$IMAGE:$version" "-d" "name=${my_name}" "-d" "cmd=$cmd_remote" "--data-urlencode" "args=\"${my_arg}\"") @@ -44,7 +44,7 @@ function call_curl() { echo "${pat}call_curl $my_name $my_arg" >> $my_debug_out echo "${pat}Running ${cmd[@]} (dry_run=$my_dry_run)" >> $my_debug_out fi - #echo "${cmd[@]} (dry_run=$my_dry_run)" + echo "${cmd[@]} (dry_run=$my_dry_run)" # Running $cmd does not work due to unknown problems with passing of args @@ -79,7 +79,7 @@ function command () { res=$? if [ "$debug_out" != "-1" ]; then - echo "${pat}result=$res" >> $debug_out + echo "${pat}exit code=$res" >> $debug_out fi if [ $VERBOSE == 1 ]; then @@ -92,6 +92,9 @@ function command () { if [ "$debug_out" != "-1" ]; then echo "${pat}${RED}exiting $(basename "$0")', error in command '$cmd'${NC}" >> $debug_out fi + if [ "$debug_out" != "-1" ]; then + echo "${pat}${RED}exiting $(basename "$0")', error in command '$cmd'${NC}" >> $debug_out + fi exit $res else echo "${RED}continuing '$(basename "$0")', error in command '$cmd'${NC}" From 4c038f7329452a22fb9770e412d1298ee775765e Mon Sep 17 00:00:00 2001 From: Martin Kilbinger Date: Wed, 12 Feb 2025 08:38:52 +0100 Subject: [PATCH 4/6] Update init_run_exclusive_canfar.sh added back removed parts (128 prev) --- scripts/sh/init_run_exclusive_canfar.sh | 44 +++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/scripts/sh/init_run_exclusive_canfar.sh b/scripts/sh/init_run_exclusive_canfar.sh index 815dc585..67f62797 100755 --- a/scripts/sh/init_run_exclusive_canfar.sh +++ b/scripts/sh/init_run_exclusive_canfar.sh @@ -504,6 +504,50 @@ if [[ $do_job != 0 ]]; then fi fi +(( do_job = $job & 128 )) +if [[ $do_job != 0 ]]; then + + echo + + cat_ngmix="run_sp_tile_ngmix_Ng1u/ngmix_runner/output/ngmix-*.fits" + dir_ngmix_prev="run_sp_tile_ngmix_Ng1u_prev/ngmix_runner/output" + cat_ngmix_prev="$dir_ngmix_prev/ngmix-*.fits" + + # Check whether ngmix output exists + if [ -e $cat_ngmix ]; then + message "ngmix output catalogue exists" $debug_out -1 + + # Check whether previous ngmix directory and output cat exist + exists="1" + if [ ! -d $dir_ngmix_prev ]; then + exists="0" + elif [ ! -e "$cat_ngmix_prev" ]; then + exists="1" + fi + if [ "$exists" == "0" ]; then + message "Moving to previous batch-save dir (does not exist yet)" $debug_out -1 + command "mkdir -p $dir_ngmix_prev" $dry_run + command "mv $cat_ngmix $dir_ngmix_prev" $dry_run + else + # Compare file sizes + size_cat_ngmix=$(stat -c%s $cat_ngmix) + size_cat_ngmix_prev=$(stat -c%s $cat_ngmix_prev) + if [ "$size_cat_ngmix" -gt "$size_cat_ngmix_prev" ]; then + message "Moving to batch-save dir, overwriting smaller batch-save cat" $debug_out -1 + command "mv $cat_ngmix $dir_ngmix_prev" $dry_run + else + message "Previous batch-save dir not smaller, removing ngmix output" $debug_out -1 + command "rm $cat_ngmix" $dry_run + fi + fi + else + # Whether or not previous ngmix exists, job_sp_canfar will handle it + message "No ngmix output exists, continuing..." $debug_out -1 + fi + + echo +fi + (( do_job = $job & 256 )) if [[ $do_job != 0 ]]; then From a719a4450f54dd490b052280d708eba098af754b Mon Sep 17 00:00:00 2001 From: Martin Kilbinger Date: Wed, 12 Feb 2025 08:47:48 +0100 Subject: [PATCH 5/6] Update make_cat.pya added (back) n_obj, w_log --- .../modules/make_cat_package/make_cat.py | 33 +++++++++++-------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/shapepipe/modules/make_cat_package/make_cat.py b/shapepipe/modules/make_cat_package/make_cat.py index c7aa1e51..5e4e7591 100644 --- a/shapepipe/modules/make_cat_package/make_cat.py +++ b/shapepipe/modules/make_cat_package/make_cat.py @@ -194,9 +194,10 @@ class SaveCatalogue: """ - def __init__(self, final_cat_file): + def __init__(self, final_cat_file, w_log): self.final_cat_file = final_cat_file + self._w_log = w_log def process( self, @@ -280,7 +281,7 @@ def _add2dict(self, key, value, index=None): else: self._output_dict[key] = value - def _save_ngmix_data(self, ngmix_cat_path, moments=False): + def _save_ngmix_data(self, ngmix_cat_path, moments=False, w_log): """Save NGMIX Data. Save the NGMIX catalogue into the final one. @@ -299,6 +300,9 @@ def _save_ngmix_data(self, ngmix_cat_path, moments=False): ngmix_n_epoch = ngmix_cat_file.get_data()["n_epoch_model"] ngmix_mom_fail = ngmix_cat_file.get_data()["moments_fail"] + n_obj = len(self._obj_id) + self._w_log.info(f"writing ngmix info for {n_obj} objects") + if moments: m = "m" else: @@ -307,8 +311,8 @@ def _save_ngmix_data(self, ngmix_cat_path, moments=False): ngmix_mcal_flags = ngmix_cat_file.get_data()["mcal_flags"] ngmix_id = ngmix_cat_file.get_data()["id"] - self._add2dict("NGMIX_N_EPOCH", np.zeros(len(self._obj_id))) - self._add2dict("NGMIX_MOM_FAIL", np.zeros(len(self._obj_id))) + self._add2dict("NGMIX_N_EPOCH", np.zeros(n_obj) + self._add2dict("NGMIX_MOM_FAIL", np.zeros(n_obj)) prefix = f"NGMIX{m}" @@ -321,20 +325,20 @@ def _save_ngmix_data(self, ngmix_cat_path, moments=False): f"{prefix}_FLAGS_", f"{prefix}_T_PSFo_", ): - self._update_dict(key_str, np.zeros(len(self._obj_id))) + self._update_dict(key_str, np.zeros(n_obj) for key_str in (f"NGMIX{m}_FLUX_ERR_", f"NGMIX{m}_MAG_ERR_"): - self._update_dict(key_str, np.ones(len(self._obj_id)) * -1) + self._update_dict(key_str, np.ones(n_obj) * -1) for key_str in ( f"NGMIX{m}_ELL_", f"NGMIX{m}_ELL_ERR_", f"NGMIX{m}_ELL_PSFo_", ): - self._update_dict(key_str, np.ones((len(self._obj_id), 2)) * -10.0) + self._update_dict(key_str, np.ones((n_obj, 2)) * -10.0) self._update_dict( f"NGMIX{m}_T_ERR_", np.ones(len(self._obj_id)) * 1e30, ) - self._add2dict(f"NGMIX{m}_MCAL_FLAGS", np.zeros(len(self._obj_id))) + self._add2dict(f"NGMIX{m}_MCAL_FLAGS", np.zeros(n_obj)) for idx, _ in enumerate(self._obj_id): for key in self._key_ends: @@ -428,6 +432,7 @@ def _save_galsim_shapes(self, galsim_cat_path): self._key_ends = galsim_cat_file.get_ext_name()[1:] galsim_id = galsim_cat_file.get_data()["id"] + n_obj = len(self._obj_id) for key_str in ( "GALSIM_GAL_SIGMA_", @@ -435,19 +440,19 @@ def _save_galsim_shapes(self, galsim_cat_path): "GALSIM_FLUX_", "GALSIM_MAG_", ): - self._update_dict(key_str, np.zeros(len(self._obj_id))) + self._update_dict(key_str, np.zeros(n_obj)) for key_str in ("GALSIM_FLUX_ERR_", "GALSIM_MAG_ERR_", "GALSIM_RES_"): - self._update_dict(key_str, np.ones(len(self._obj_id)) * -1) + self._update_dict(key_str, np.ones(n_obj) * -1) for key_str in ( "GALSIM_GAL_ELL_", "GALSIM_GAL_ELL_ERR_", "GALSIM_GAL_ELL_UNCORR_", "GALSIM_PSF_ELL_", ): - self._update_dict(key_str, np.ones((len(self._obj_id), 2)) * -10.0) + self._update_dict(key_str, np.ones((n_obj, 2)) * -10.0) self._update_dict( "GALSIM_FLAGS_", - np.ones(len(self._obj_id), dtype="int16"), + np.ones(n_obj, dtype="int16"), ) for idx, id_tmp in enumerate(self._obj_id): @@ -540,13 +545,13 @@ def _save_psf_data(self, galaxy_psf_path): max_epoch = np.max(self.final_cat_file.get_data()["N_EPOCH"]) + 1 self._output_dict = { - f"PSF_ELL_{idx + 1}": np.ones((len(self._obj_id), 2)) * -10.0 + f"PSF_ELL_{idx + 1}": np.ones((n_obj, 2)) * -10.0 for idx in range(max_epoch) } self._output_dict = { **self._output_dict, **{ - f"PSF_FWHM_{idx + 1}": np.zeros(len(self._obj_id)) + f"PSF_FWHM_{idx + 1}": np.zeros(n_obj) for idx in range(max_epoch) }, } From 18eb3630c7cb73b0e5a6cba257de3ea14663e44c Mon Sep 17 00:00:00 2001 From: Martin Kilbinger Date: Wed, 12 Feb 2025 08:49:22 +0100 Subject: [PATCH 6/6] Update make_cat_runner.pyad added back w_log --- shapepipe/modules/make_cat_runner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/shapepipe/modules/make_cat_runner.py b/shapepipe/modules/make_cat_runner.py index 17ead7f0..7eb231e9 100644 --- a/shapepipe/modules/make_cat_runner.py +++ b/shapepipe/modules/make_cat_runner.py @@ -111,7 +111,7 @@ def make_cat_runner( ) # Save shape data - sc_inst = make_cat.SaveCatalogue(final_cat_file) + sc_inst = make_cat.SaveCatalogue(final_cat_file, w_log) w_log.info("Save shape measurement data") for shape_type in shape_type_list: w_log.info(f"Save {shape_type.lower()} data")