#!/bin/bash #BASH example for processing an AHI granule to generate a variety of product outputs ##################################################################### # Setting environment variables configures the algorithm ##################################################################### export ENV_SATELLITE=Himawari8 export ENV_L1B_FILE=AIT_TEST_DATA/AHI/L1B/HS_H08_20190711_0300_B14_FLDK_R20_S0110.DAT #Choose which algorithms output NetCDF product files. export ENV_OUTPUT_LIST="SAT_AHI" # Examples of other outputs CLOUD_HEIGHT_EN,CLOUD_DCOMP_EN,CLOUD_LAYERS_EN,AEROSOL_AOD_EN,AVIATION_VOL_ASH_EN,AEROSOL_ADP_EN echo "run to generate: $ENV_OUTPUT_LIST" #Define input list and AIT File Timestamp to input files. export ENV_INPUT_LIST="" #export ENV_AIT_FILE_TIME_STAMP="{year=2019;month=03;day=27;hour=15;min=30;sec=21}" #alternate: -PROD_FILE - Filename of the input algorithm NetCDF file. e.g. CLOUD_MASK_EN_PROD_FILE=cloud_mask.nc #Deviation from these numbers is only experimentally supported. export ENV_NUM_COL_SEG=6 export ENV_NUM_ROW_SEG=4 #Set the desired snow mask algorithm used as precedence #Choices: SNOW_MASK_NWP, SNOW_MASK_IMS_SSMI export ENV_SNOW_MASK_COMMON_ALG="SNOW_MASK_IMS_SSMI" #export environmental variable for output and input directory export ENV_OUTPUT_DIRECTORY="./Output/" export ENV_INPUT_DIRECTORY="./Input/" #Standard ancillary and LUT data environment variables export ENV_ALGORITHM_ANCILLARY_DIR="AIT_TEST_DATA/algorithm_ancillary/" export ENV_FRAMEWORK_ANCILLARY_DIR="AIT_TEST_DATA/framework_ancillary/" export ENV_OISST_DAILY_DIR="AIT_TEST_DATA/AHI/oisst_daily/" #export ENV_NWP_RAP_DIR="AIT_TEST_DATA/ABI/nwp_rap/" export ENV_SNOW_MAP_DIR="AIT_TEST_DATA/AHI/snow_map/" #IMS_SSMI ancillary directory export ENV_NWP_GFS_GRIB2_DIR="AIT_TEST_DATA/AHI/gfs_grib2_0.5deg/" #a backwards compatible option #export ENV_NWP_GFS_GRIB2_FILE=gfs_grib2_0.5deg/@YYYY@/@MM@/@DD@/gfs_4_@YYYY@@MM@@DD@_@AN@00_0@FF@.grb2 #an option to specify a file and directory pattern for gfs. ############################################################## # Call a sequential: ############################################################## #Path to configuration XML CFG="./Config/Projects/AHIMA/Config.xml" CMD="./algorithms.exe" #$CMD $CFG ############################################################## # Executing in parallel would have been simple as ############################################################## # defining the maximum processes in parallel. MAX_JOBS=4 # defining the total number of segments to iterate over TOTAL_SEG=$(($ENV_NUM_COL_SEG * $ENV_NUM_ROW_SEG)) # defining a unique job ID: JOB_ID="framework_job_`date +%Y%m%d%H%M%S_%N`_$$" #fi # Step 1: $CMD $CFG -j $JOB_ID -m pre &> "pre_log.txt" ; if [ $? -ne 0 ]; then { echo "Failed, aborting." ; exit 1; } fi # Step 2 (iterate over each segment, running a few segments at a time): # This can be achieved through means of a scheduling tool or a command such as this one: seq 1 $TOTAL_SEG | xargs -P $MAX_JOBS -I{} sh -c "$CMD $CFG -j $JOB_ID -s {} &> seg_{}_log.txt" if [ $? -ne 0 ]; then { echo "Failed, aborting." ; exit 1; } fi # Don't feel as though you need to understand that command. It's just a loop on this with a limit of MAX_JOBS at once: `$CMD $CFG -j $JOB_ID -s $segment_number` # Step 3: $CMD $CFG -j $JOB_ID -m post &> "post_log.txt" ; if [ $? -ne 0 ]; then { echo "Failed, aborting." ; exit 1; } fi # # Note that you would want to capture the output of each of these exections separately. # ##############################################################