TNO Intern

Skip to content
Commits on Source (6)
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# Fortran objects
*.mod
*.o
*.x
......@@ -244,9 +244,13 @@ Use pandas concat to combine dataframes.
py/cso_pal.py
Replaced `where` concstructs by loops after memory errors in output filling.
oper/src/cso_ncfile
oper/src/cso_ncfile.F90
Check on `pix_all` value to know if pixels are assigned to any of the sub-domains.
src/tutorial_oper_S5p.F90
Migration from Copernicus SciHub to DataSpace.
bin/dhusget.sh
doc/source/*
py/*
config/*
......@@ -15,6 +15,9 @@ History
2023-08, Arjo Segers
Define 'CSO_PREFIX' environment variable for use in rcfile settings.
2023-11, Arjo Segers
Reformatted using 'black'.
"""
......@@ -30,7 +33,7 @@ import logging
# prefix of CSO installation:
prefix = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), os.pardir))
# extend path:
sys.path.insert( 0, os.path.join(prefix,'py') )
sys.path.insert(0, os.path.join(prefix, "py"))
# tools:
import utopya
......@@ -41,41 +44,41 @@ import utopya
# -------------------------------------------------
# set environment:
os.environ['CSO_PREFIX'] = prefix
os.environ["CSO_PREFIX"] = prefix
# init script:
utos = utopya.UtopyaRunScriptRc()
# setup standard command line arguments, and also enable rcfile arguments:
utos.ArgumentsSetup( description='CAMS Satellite Operator', rcbase='cso' )
utos.ArgumentsSetup(description="CAMS Satellite Operator", rcbase="cso")
# evaluate known arguments, store the other ;
# might show help text and exit:
args, xargs = utos.ArgumentsParse()
# start, shout info:
logging.info( '' )
logging.info( '** CSO - CAMS Satellite Operator **' )
logging.info( '' )
logging.info( 'installation prefix : %s' % prefix )
logging.info( 'input rcfile : %s' % args.rcfile )
logging.info( 'settings base : %s' % args.rcbase )
logging.info( '' )
logging.info(f"")
logging.info(f"** CSO - CAMS Satellite Operator **")
logging.info(f"")
logging.info(f"installation prefix : {prefix}")
logging.info(f"input rcfile : {args.rcfile}")
logging.info(f"settings base : {args.rcbase}")
logging.info(f"")
# info ...
logging.info( 'start job tree ...' )
logging.info(f"start job tree ...")
# import class as defined in settings by 'cso.class' key;
# should be (derived from) 'utopya.JobStep' :
jbs_cls = utos.ImportClass( 'class' )
jbs_cls = utos.ImportClass("class")
# init job step object, use the 'rcbase' as name:
jbs = jbs_cls(args.rcbase, args.rcfile)
# start first:
jbs.Start(single=args.single)
# info:
logging.info( '' )
logging.info( '** end **' )
logging.info( '' )
logging.info(f"")
logging.info(f"** end **")
logging.info(f"")
# -------------------------------------------------
......
#!/bin/bash
#------------------------------------------------------------------------------------------- #
# Demo script illustrating some examples using the OData interface #
# of the Data Hub Service (DHuS) #
#-------------------------------------------------------------------------------------------
# CHANGE LOG
# v0.3.1:
# - usage switch fixed
# - usage text updated to include the download of Sentinel 2 products
# - introduction of parallel download with check of the server error messages (option -n)
# - insertion of MD5 check
#
# Serco SpA 2015
# CHANGE LOG
# v0.3.2:
# - fixed "-f" option
# - upgraded "-f"
# - added the following options: -s, -e, -S, -E, -F
#
# CHANGE LOG
# v0.3.3:
# - added the following options: -O, -L, -R, -r
#
# CHANGE LOG
# v0.3.4:
# - added the following options: -m, -i, -l, -P, -q, -C, -N, -D
#
# Serco SpA 2015
#
#CHANGE LOG
#v0.3.5:
# -changed "==" with "=" in the MD5 check section
#
#CHANGE LOG
#v0.3.6
# - added Sentinel-5 P references
# - changed "==" with "=" in the MD5 check section [line 818]
# - added CheckSum Echo
#
#CHANGE LOG
#v0.3.7
# - fix download of ".NetCDF" file
# #
#CHANGE LOG
#v0.3.8
# - added management of offline products download
#
#CHANGES FOR CSO
# - Removed options for username and password for safety,
# these should be specified in a ~/.netrc file
# - Do not break on existing lock file, since it is often present
# after a failed download.
#
#------------------------------------------------------------------------------------------- #
## echo:
#set -x
export VERSION=0.3.8
WD=$HOME/dhusget_tmp
PIDFILE=$WD/pid
test -d $WD || mkdir -p $WD
#-
bold=$(tput bold)
normal=$(tput sgr0)
print_script=`echo "$0" | rev | cut -d'/' -f1 | rev`
function print_usage
{
print_script=`echo "$1" | rev | cut -d'/' -f1 | rev`
echo " "
echo "${bold}NAME${normal}"
echo " "
echo " DHuSget $VERSION - The non interactive Sentinels product retriever from the Sentinels Data Hubs"
echo " "
echo "${bold}USAGE${normal}"
echo " "
echo " $print_script [LOGIN OPTIONS]... [SEARCH QUERY OPTIONS]... [SEARCH RESULT OPTIONS]... [DOWNLOAD OPTIONS]... "
echo " "
echo "${bold}DESCRIPTION${normal}"
echo " "
echo " This script allows to get products from Sentinels Data Hubs executing query with different filter. The products can be visualized on shell and saved in list file"
echo " or downloaded in a zip file."
echo " Recommendation: If this script is run as a cronjob, to avoid traffic load, please do not schedule it exactly at on-the-clock hours (e.g 6:00, 5:00)."
echo " "
echo "${bold}OPTIONS"
echo " "
echo " ${bold}LOGIN OPTIONS:${normal}"
echo " "
echo " -d <DHuS URL> : specify the URL of the Data Hub Service;"
# echo " -u <username> : data hub username;"
# echo " -p <password> : data hub password (note: if not provided by command line it is read by stdin);"
echo " "
echo " "
echo " ${bold}SEARCH QUERY OPTIONS:${normal}"
echo " "
echo " -m <mission name> : Sentinel mission name. Possible options are: Sentinel-1, Sentinel-2, Sentinel-3, Sentinel-5 P);"
echo ""
echo " -i <instrument name> : instrument name. Possible options are: SAR, MSI, OLCI, SLSTR, SRAL, TROPOMI);"
echo ""
echo " -t <time in hours> : search for products ingested in the last <time in hours> (integer) from the time of"
echo " execution of the script."
echo " (e.g. '-t 24' to search for products ingested in the last 24 Hours);"
echo ""
echo " -s <ingestion_date_FROM> : Search for products ingested ${bold}after${normal} the date and time specified by <ingestion_date_FROM>."
echo " The date format is ISO 8601, YYYY-MM-DDThh:mm:ss.cccZ (e.g. -s 2016-10-02T06:00:00.000Z);"
echo ""
echo " -e <ingestion_date_TO> : Search for products ingested ${bold}before${normal} the date specified by <ingestion_date_TO>."
echo " The date format is ISO 8601, YYYY-MM-DDThh:mm:ss.cccZ (e.g. -e 2016-10-10T12:00:00.000Z);"
echo ""
echo " -S <sensing_date_FROM> : Search for products with sensing date ${bold}greater than${normal} the date and time specified by <sensing_date_FROM>."
echo " The date format is ISO 8601, YYYY-MM-DDThh:mm:ss.cccZ (e.g. -S 2016-10-02T06:00:00.000Z);"
echo ""
echo " -E <sensing_date_TO> : Search for products with sensing date ${bold}less than${normal} the date and time specified by <sensing_date_TO>."
echo " The date format is ISO 8601, YYYY-MM-DDThh:mm:ss.cccZ (e.g. -E 2016-10-10T12:00:00.000Z);"
echo ""
echo " -f <ingestion_date_file> : Search for products ingested after the date and time provided through an input file. This option overrides option -s"
echo " The date format shall be ISO 8601 (YYYY-MM-DDThh:mm:ss.cccZ)."
echo " <ingestion_date_file> is automatically updated at the end of the script execution"
echo " with the ingestion date of the last sucessfully downloaded product;"
echo " "
echo " -c <lon1,lat1:lon2,lat2> : Search for products intersecting a rectangular Area of Interst (or Bounding Box)"
echo " by providing the geographical coordinates of two opposite vertices. "
echo " Coordinates need to be provided in Decimal Degrees and with the following syntax:"
echo " "
echo " - ${bold}lon1,lat1:lon2,lat2${normal}"
echo " "
echo " where lon1 and lat1 are respectively the longitude and latitude of the first vertex and"
echo " lon2 and lat2 the longitude and latitude of the second vertex."
echo " (e.g. '-c -4.530,29.850:26.750,46.800' is a bounding box enclosing the Mediterranean Sea);"
echo " "
echo " -T <product type> : Search products according to the specified product type."
echo " Sentinel-1 possible options are: SLC, GRD, OCN and RAW. "
echo " Sentinel-2 posiible option is: S2MSI1C ;"
echo " Sentinel-5 P possible options are: RA, IR, O3, NO2, CO, CLOUD, AER and NP. "
echo " "
echo " -F <free OpenSearch query> : free text OpenSearch query. The query must be written enclosed by single apexes '<query>'. "
echo " (e.g. -F 'platformname:Sentinel-1 AND producttype:SLC'). "
echo " (e.g. -F 'platformname:Sentinel-5 P AND producttype:RA'). "
echo " Note: the free text OpenSearch query is in ${bold}AND${normal} with the other possible sspecified search options."
echo " "
echo " "
echo " ${bold}SEARCH RESULT OPTIONS:${normal}"
echo " "
echo " -l <results> : maximum number of results per page [1,2,3,4,..]; default value = 25"
echo " "
echo " -P <page> : page number [1,2,3,4,..]; default value = 1"
echo " "
echo " -q <XMLfile> : write the OpenSearch query results in a specified XML file. Default file is './OSquery-result.xml'"
echo " "
echo " -C <CSVfile> : write the list of product results in a specified CSV file. Default file is './products-list.csv'"
echo " "
echo " "
echo " ${bold}DOWNLOAD OPTIONS:${normal}"
echo " "
echo " -o <download> : THIS OPTION IS MANDATORY FOR DOWNLOADING. Accepted values for <download> are:"
echo " - ${bold}product${normal} : download the Product ZIP files (manifest file included)"
echo " - ${bold}manifest${normal} : download only the manifest files"
echo " - ${bold}all${normal} : download both the Product ZIP files and the manifest files, and"
echo " provide them in separate folders."
echo ""
echo " By default the Product ZIP files are stored in ./product"
echo " unless differently specified by option ${bold}-O${normal}."
echo ""
echo " By default the manifest files are stored in ./manifest ;"
echo " "
echo " "
echo " -O <folder> : save the Product ZIP files in a specified folder. "
echo " "
echo " -N <1...n> : set number of wget download retries. Default value is 5. Fatal errors like 'connection refused'"
echo " or 'not found' (404), are not retried;"
echo " "
echo " -R <file> : write in <file> the list of products that have failed the MD5 integrity check."
echo " By default the list is written in ./failed_MD5_check_list.txt ;"
echo " The format of the output file is compatible with option ${bold}-r${normal} ;"
echo " "
echo " -D : if specified, remove the products that have failed the MD5 integrity check from disk."
echo " By deafult products are not removed;"
echo " "
echo " -r <file> : download the products listed in an input <file> written according to the following format:"
echo " - One product per line."
echo " - <space><one character><space><UUID><space><one character><space><filename>."
echo " Examples:"
echo " ' x 67c7491a-d98a-4eeb-9ca0-8952514c7e1e x S1A_EW_GRDM_1SSH_20160411T113221_20160411T113257_010773_010179_7BE0'"
echo " ' 0 67c7491a-d98a-4eeb-9ca0-8952514c7e1e 0 S1A_EW_GRDM_1SSH_20160411T113221_20160411T113257_010773_010179_7BE0'"
echo " "
echo " -L <lock folder> : by default only one instance of dhusget can be executed at a time. This is ensured by the creation"
echo " of a temporary lock folder $HOME/dhusget_tmp/lock which is removed a the end of each run."
echo " For running more than one dhusget instance at a time is sufficient to assign different lock folders"
echo " using the -L option (e.g. '-L foldername') to each dhusget instance;"
echo " "
echo " -n <1...n> : number of concurrent downloads (either products or manifest files). Default value is 2; this value"
echo " doesn't override the quota limit set on the server side for the user"
echo " "
echo " -w <1...n> : minutes to wait until retrying the download of an offline product. Default value is 10;"
echo " "
echo " "
echo " -W <1...n> : number of download retries for offline products. Default value is 40."
echo " "
echo " "
echo " 'wget' is necessary to run the dhusget"
echo " "
exit -1
}
function print_version
{
echo "dhusget $VERSION"
exit -1
}
#------------------------------------------------------------------------------
#--- added methods and variables for offline product download management BEGIN
#------------------------------------------------------------------------------
export OFFLINE_PRODUCT_LIST=offline_product_list.txt
export TMP_OFFLINE_PRODUCT_LIST=offline_product_list.csv
export OFFLINE_RETRY_ATTEMPTS_COUNTER=0
#AJS: commented, this function does not exist ...
#export -f add_offline_product_to_list
function download_product
{
cat $1 | xargs -n 4 -P $2 sh -c ' while : ; do
echo "Downloading product ${3} from link ${DHUS_DEST}/odata/v1/Products('\''"$1"'\'')/\$value";
# ---added control on S5P products
prod5P=${3}
mission5P="S5P"
if echo $prod5P | grep -q "$mission5P"; then
${WC} ${AUTH} ${TRIES} --progress=dot -e dotbytes=10M -c --output-file=./logs/log.${3}.log -O $output_folder/${3}".nc" "${DHUS_DEST}/odata/v1/Products('\''"$1"'\'')/\$value";
else
${WC} ${AUTH} ${TRIES} --progress=dot -e dotbytes=10M -c --output-file=./logs/log.${3}.log -O $output_folder/${3}".zip" "${DHUS_DEST}/odata/v1/Products('\''"$1"'\'')/\$value";
fi
test=$?;
# --- check HTTP response status in case of offline products download
http_response=`cat ./logs/log.${3}.log| grep '\''403 Forbidden\|202 Accepted\|503 Service Unavailable'\''`;
echo "http_response is $http_response";
if [ ! -z "$http_response" ]; then
echo "The requested product is offline" ;
echo "Adding product ${3} in the offline product download queue" ;
echo "${2},${1},${3}" | tee -a "${TMP_OFFLINE_PRODUCT_LIST}" ;
else
if [ $test -eq 0 ]; then
echo "Product ${3} successfully downloaded at " `tail -2 ./logs/log.${3}.log | head -1 | awk -F"(" '\''{print $2}'\'' | awk -F")" '\''{print $1}'\''`;
remoteMD5=$( ${WC} -qO- ${AUTH} ${TRIES} -c "${DHUS_DEST}/odata/v1/Products('\''"$1"'\'')/Checksum/Value/$value" | awk -F">" '\''{print $3}'\'' | awk -F"<" '\''{print $1}'\'');
if echo $prod5P | grep -q "$mission5P"; then
localMD5=$( openssl md5 $output_folder/${3}".nc" | awk '\''{print $2}'\'');
else
localMD5=$( openssl md5 $output_folder/${3}".zip" | awk '\''{print $2}'\'');
fi
localMD5Uppercase=$(echo "$localMD5" | tr '\''[:lower:]'\'' '\''[:upper:]'\'');
#localMD5Uppercase=1;
if [ "$remoteMD5" = "$localMD5Uppercase" ]; then
echo "Product ${3} successfully MD5 checked";
echo "Remote MD5: $remoteMD5"
echo "Local MD5: $localMD5Uppercase"
else
echo "Checksum for product ${3} failed";
echo "${0} ${1} ${2} ${3}" >> .failed.control.now.txt;
if [ ! -z $save_products_failed ];then
rm $output_folder/${3}".zip"
fi
fi;
else
echo "${0} ${1} ${2} ${3}" >> .failed.control.now.txt;
if [ ! -z $save_products_failed ];then
rm $output_folder/${3}".zip"
fi
fi;
fi;
break;
done '
}
#------------------------------------------------------------------------------
#--- added methods and variables for offline product download management END
#------------------------------------------------------------------------------
#----------------------
#--- Load input parameter
export DHUS_DEST="https://scihub.copernicus.eu/dhus"
#export USERNAME=""
#export PASSWORD=""
export TIME_SUBQUERY=""
export PRODUCT_TYPE=""
export INGEGESTION_TIME_FROM="1970-01-01T00:00:00.000Z"
export INGEGESTION_TIME_TO="NOW"
export SENSING_TIME_FROM="1970-01-01T00:00:00.000Z"
export SENSING_TIME_TO="NOW"
unset TIMEFILE
while getopts ":d:u:p:l:P:q:C:m:i:t:s:e:S:E:f:c:T:o:V:h:F:R:D:r:O:N:L:n:w:W:" opt; do
case $opt in
d)
export DHUS_DEST="$OPTARG"
export DHUS_DEST_IS_SET='OK'
;;
u)
#export USERNAME="$OPTARG"
echo "ERROR - do not specify '-u <user>', use ~/.netrc to store login info."
exit 1
;;
p)
#export PASSWORD="$OPTARG"
echo "ERROR - do not specify '-p <password>', use ~/.netrc to store login info."
exit 1
;;
l)
export ROWS="$OPTARG"
;;
P)
export PAGE="$OPTARG"
;;
q)
export NAMEFILERESULTS="$OPTARG"
;;
C)
export PRODUCTLIST="$OPTARG"
;;
m)
export MISSION="$OPTARG"
;;
i)
export INSTRUMENT="$OPTARG"
;;
t)
export TIME="$OPTARG"
export INGEGESTION_TIME_FROM="NOW-${TIME}HOURS"
export isselected_filtertime_lasthours='OK'
;;
s)
export TIME="$OPTARG"
export INGEGESTION_TIME_FROM="$OPTARG"
export isselected_filtertime_ingestion_date='OK'
;;
e)
export TIME="$OPTARG"
export INGEGESTION_TIME_TO="$OPTARG"
export isselected_filtertime_ingestion_date='OK'
;;
S)
export SENSING_TIME="$OPTARG"
export SENSING_TIME_FROM="$OPTARG"
;;
E)
export SENSING_TIME="$OPTARG"
export SENSING_TIME_TO="$OPTARG"
;;
f)
export TIMEFILE="$OPTARG"
if [ -s $TIMEFILE ]; then
export INGEGESTION_TIME_FROM="`cat $TIMEFILE`"
else
export INGEGESTION_TIME_FROM="1970-01-01T00:00:00.000Z"
fi
;;
c)
ROW=$OPTARG
FIRST=`echo "$ROW" | awk -F\: '{print \$1}' `
SECOND=`echo "$ROW" | awk -F\: '{print \$2}' `
#--
export x1=`echo ${FIRST}|awk -F, '{print $1}'`
export y1=`echo ${FIRST}|awk -F, '{print $2}'`
export x2=`echo ${SECOND}|awk -F, '{print $1}'`
export y2=`echo ${SECOND}|awk -F, '{print $2}'`
;;
T)
export PRODUCT_TYPE="$OPTARG"
;;
o)
export TO_DOWNLOAD="$OPTARG"
;;
V)
print_version $0
;;
h)
print_usage $0
;;
F)
FREE_SUBQUERY_CHECK="OK"
FREE_SUBQUERY="$OPTARG"
;;
R)
export FAILED="$OPTARG"
export check_save_failed='OK'
;;
D)
export save_products_failed='OK'
;;
r)
export FAILED_retry="$OPTARG"
export check_retry='OK'
;;
O)
export output_folder="$OPTARG"
export OUTPUT_FOLDER_IS_SET='OK'
;;
N)
export number_tries="$OPTARG"
;;
L)
export lock_file="$OPTARG"
export LOCK_FILE_IS_SET='OK'
;;
n)
export THREAD_NUMBER="$OPTARG"
export THREAD_NUMBER_IS_SET='OK'
;;
w)
export OFFLINE_WAIT="${OPTARG}m"
;;
W)
export OFFLINE_RETRY_ATTEMPTS="$OPTARG"
;;
esac
done
echo ""
echo "================================================================================================================"
echo ""
echo "dhusget version: $VERSION"
echo ""
echo "USAGE: $print_script [LOGIN OPTIONS]... [SEARCH QUERY OPTIONS]... [SEARCH RESULT OPTIONS]... [DOWNLOAD OPTIONS]... "
echo ""
echo "Type '$print_script -help' for usage information"
echo ""
echo "================================================================================================================"
ISSELECTEDEXIT=false;
trap ISSELECTEDEXIT=true INT;
if [ -z $lock_file ];then
export lock_file="$WD/lock"
fi
mkdir $lock_file
if [ ! $? == 0 ]; then
echo -e "Error! An instance of \"dhusget\" retriever is running !\n Pid is: "`cat ${PIDFILE}` "if it isn't running delete the lockdir ${lock_file}"
# exit
else
echo $$ > $PIDFILE
fi
trap "rm -fr ${lock_file}" EXIT
export TIME_SUBQUERY="ingestiondate:[$INGEGESTION_TIME_FROM TO $INGEGESTION_TIME_TO] "
export SENSING_SUBQUERY="beginPosition:[$SENSING_TIME_FROM TO $SENSING_TIME_TO] "
if [ -z $THREAD_NUMBER ];then
export THREAD_NUMBER="2"
fi
if [ -z $output_folder ];then
export output_folder="PRODUCT"
fi
export WC="wget --no-check-certificate"
echo "LOGIN"
printf "\n"
if [ -z $DHUS_DEST_IS_SET ];then
echo "'-d option' not specified. "
echo "Default Data Hub Service URL is: "
else
echo "Specified Data Hub Service URL is:"
fi
echo $DHUS_DEST
printf "\n"
#if [ ! -z $USERNAME ] && [ -z $PASSWORD ];then
#echo "You have inserted only USERNAME"
#echo ""
#fi
#
#if [ -z $USERNAME ] && [ ! -z $PASSWORD ];then
#echo "You have inserted only PASSWORD"
#echo ""
#fi
#
#if [ -z $USERNAME ];then
# read -p "Enter username: " VAL
# printf "\n"
# export USERNAME=${VAL}
#fi
#
#if [ -z $PASSWORD ];then
# read -s -p "Enter password: " VAL
# printf "\n\n"
# export PASSWORD=${VAL}
#fi
#
#export AUTH="--user=${USERNAME} --password=${PASSWORD}"
if [ -z $number_tries ];then
export TRIES="--tries=5"
else
export TRIES="--tries=${number_tries}"
fi
mkdir -p './logs/'
if [ ! -z $check_retry ] && [ -s $FAILED_retry ]; then
cp $FAILED_retry .failed.control.retry.now.txt
export INPUT_FILE=.failed.control.retry.now.txt
mkdir -p $output_folder
if [ -f .failed.control.now.txt ]; then
rm .failed.control.now.txt
fi
cat ${INPUT_FILE} | xargs -n 4 -P ${THREAD_NUMBER} sh -c ' while : ; do
echo "Downloading product ${3} from link ${DHUS_DEST}/odata/v1/Products('\''"$1"'\'')/\$value";
${WC} ${AUTH} ${TRIES} --progress=dot -e dotbytes=10M -c --output-file=./logs/log.${3}.log -O $output_folder/${3}".zip" "${DHUS_DEST}/odata/v1/Products('\''"$1"'\'')/\$value";
test=$?;
if [ $test -eq 0 ]; then
echo "Product ${3} successfully downloaded at " `tail -2 ./logs/log.${3}.log | head -1 | awk -F"(" '\''{print $2}'\'' | awk -F")" '\''{print $1}'\''`;
remoteMD5=$( ${WC} -qO- ${AUTH} ${TRIES} -c "${DHUS_DEST}/odata/v1/Products('\''"$1"'\'')/Checksum/Value/$value" | awk -F">" '\''{print $3}'\'' | awk -F"<" '\''{print $1}'\'');
localMD5=$( openssl md5 $output_folder/${3}".zip" | awk '\''{print $2}'\'');
localMD5Uppercase=$(echo "$localMD5" | tr '\''[:lower:]'\'' '\''[:upper:]'\'');
echo "Remote MD5: $remoteMD5"
echo "Local MD5: $localMD5Uppercase"
if [ "$remoteMD5" = "$localMD5Uppercase" ]; then
echo "Product ${3} successfully MD5 checked";
else
echo "Checksum for product ${3} failed";
echo "${0} ${1} ${2} ${3}" >> .failed.control.now.txt;
if [ ! -z $save_products_failed ];then
rm $output_folder/${3}".zip"
fi
fi;
else
echo "${0} ${1} ${2} ${3}" >> .failed.control.now.txt;
if [ ! -z $save_products_failed ];then
rm $output_folder/${3}".zip"
fi
fi;
break;
done '
rm .failed.control.retry.now.txt
fi
#----- Options value check
echo "================================================================================================================"
echo ""
echo "SEARCH QUERY OPTIONS"
if [ -z $TIME ] && [ -z $TIMEFILE ] && [ -z $ROW ] && [ -z $PRODUCT_TYPE ] && [ -z $FREE_SUBQUERY_CHECK ] && [ -z $SENSING_TIME ] && [ -z $MISSION ] && [ -z $INSTRUMENT ];
then
echo ""
echo "No Search Options specified. Default query is q='*'."
echo ""
export QUERY_STATEMENT="*"
else
echo ""
fi
if [ -z $MISSION ]; then
echo "'-m option' not specified. Search is performed on all available sentinel missions."
echo ""
else
echo "'-m option' mission is set to $MISSION. "
echo ""
fi
if [ -z $INSTRUMENT ]; then
echo "'-i option' not specified. Search is performed on all available instruments."
echo ""
else
echo "'-i option' instrument is set to $INSTRUMENT. "
echo ""
fi
if [ -z $TIME ]; then
echo " Ingestion date options not specified ('-t', '-s', '-e'). "
echo ""
else
if [ ! -z $isselected_filtertime_ingestion_date ] && [ ! -z $isselected_filtertime_lasthours ]; then
if [ -z $TIMEFILE ];then
echo "'-s option' and '-e option' are set to $INGEGESTION_TIME_FROM and $INGEGESTION_TIME_TO. Search is performed for all products ingested in the period [$INGEGESTION_TIME_FROM,$INGEGESTION_TIME_TO]. "
echo ""
else
echo "'-f option' is specified. Search is performed for all products ingested in the period [$INGEGESTION_TIME_FROM,$INGEGESTION_TIME_TO]. "
echo ""
fi
else
if [ ! -z $isselected_filtertime_lasthours ]; then
if [ -z $TIMEFILE ];then
echo "'-t option' is set to $TIME. Search is performed for all products ingested in the last $TIME hours. "
echo ""
fi
else
echo "'-s option' and '-e option' are set to $INGEGESTION_TIME_FROM and $INGEGESTION_TIME_TO. Search is performed for all products ingested in the period [$INGEGESTION_TIME_FROM,$INGEGESTION_TIME_TO]. "
echo ""
fi
fi
fi
if [ -z $SENSING_TIME ]; then
echo " Sensing date options not specified ('-S', '-E')."
echo ""
else
echo "'-S option' and '-E option' are set to $SENSING_TIME_FROM and $SENSING_TIME_TO. Search for all products having sensing date included in [$SENSING_TIME_FROM,$SENSING_TIME_TO]. "
echo ""
fi
if [ ! -z $TIMEFILE ] && [ -z $isselected_filtertime_ingestion_date ]; then
echo "'-f option' is set to $TIMEFILE. The ingestion date provided through $TIMEFILE is used to search all products ingested in the period [DATEINGESTION,NOW]. The file is updated with the ingestion date of the last available product found. "
echo ""
fi
if [ -z $ROW ]; then
echo "'-c option' not specified. No specified Area of Interest. Search is performed on the whole globe."
echo ""
else
echo "'-c option' is set to $x1,$y1:$x2,$y2. Search is performed on an Area of interest defined as a bounding box delimited by the two opposite vertices P1=[lon1=$x1,lat1=$y1] and P2=[lon2=$x2,lat2=$y2]. "
echo ""
fi
if [ -z $PRODUCT_TYPE ]; then
echo "'-T option' not specified. Search is performed on all available product types. "
echo ""
else
echo "'-T option' product type is set to $PRODUCT_TYPE. "
echo ""
fi
if [ ! -z $FREE_SUBQUERY_CHECK ]; then
echo "'-F option' is set to $FREE_SUBQUERY. This OpenSearch query will be in AND with other search options. "
echo ""
fi
echo "SEARCH RESULT OPTIONS"
echo ""
if [ -z $ROWS ]; then
echo "'-l option' not specified. Default Maximum Number of Results per page is 25. "
echo ""
else
echo "'-l option' is set to $ROWS. The number of results per page is $ROWS. "
echo ""
fi
if [ -z $PAGE ]; then
echo "'-P option' not specified. Default Page Number is 1. "
echo ""
else
echo "'-P option' is set to $PAGE. The page visualized is $PAGE. "
echo ""
fi
if [ -z $NAMEFILERESULTS ]; then
echo "'-q option' not specified. OpenSearch results are stored by default in ./OSquery-result.xml. "
echo ""
else
echo "'-q option' is set to $NAMEFILERESULTS. OpenSearch results are stored in $NAMEFILERESULTS. "
echo ""
fi
if [ -z $PRODUCTLIST ]; then
echo "'-C option' not specified. List of results are stored by default in the CSV file ./products-list.csv. "
echo ""
else
echo "'-C option' is set to $PRODUCTLIST. List of results are stored in the specified CSV file $PRODUCTLIST. "
echo ""
fi
if [ ! -z $TO_DOWNLOAD ] || [ ! -z $check_retry ];then
CHECK_VAR=true;
else
CHECK_VAR=false;
fi
echo "DOWNLOAD OPTIONS"
echo ""
if [ $CHECK_VAR == false ];then
echo "No download options specified. No files will be downloaded. "
echo ""
fi
if [ ! -z $TO_DOWNLOAD ]; then
if [ $TO_DOWNLOAD=="product" ]; then
echo "'-o option' is set to $TO_DOWNLOAD. Downloads are active. By default product downloads are stored in ./PRODUCT unless differently specified by the '-O option'. "
echo ""
else
if [ $TO_DOWNLOAD=="manifest" ]; then
echo "'-o option' is set to $TO_DOWNLOAD. Only manifest files are downloaded. Manifest files are stored in ./manifest. "
echo ""
else
echo "'-o option' is set to $TO_DOWNLOAD. Downloads are active. Products and manifest files are downloded separately. "
echo ""
fi
fi
fi
if [[ $CHECK_VAR == true && ! -z $OUTPUT_FOLDER_IS_SET ]]; then
echo "'-O option' is set to $output_folder. Product downloads are stored in ./$output_folder. "
echo ""
fi
if [[ $CHECK_VAR == true && -z $number_tries ]]; then
echo "'-N option' not specified. By default the number of wget download retries is 5. "
echo ""
else
if [[ $CHECK_VAR == true && ! -z $number_tries ]]; then
echo "'-N option' is set to $number_tries. The number of wget download retries is $number_tries. "
echo ""
fi
fi
if [[ $CHECK_VAR == true && -z $check_save_failed ]]; then
echo "'-R option' not specified. By default the list of products failing the MD5 integrity check is saved in ./failed_MD5_check_list.txt. "
echo ""
else
if [[ $CHECK_VAR == true && ! -z $check_save_failed ]]; then
echo "'-R option' is set to $FAILED. The list of products failing the MD5 integrity check is saved in $FAILED. "
echo ""
fi
fi
if [[ $CHECK_VAR == true && ! -z $save_products_failed ]]; then
echo "'-D option' is active. Products that have failed the MD5 integrity check are deleted from the local disks. "
echo ""
fi
if [[ $CHECK_VAR == true && ! -z $check_retry ]]; then
echo "'-r option' is set to $FAILED_retry. It retries the download of the products listed in $FAILED_retry. "
echo ""
fi
if [ ! -z $LOCK_FILE_IS_SET ]; then
echo "'-L option' is set to $lock_file. This instance of dhusget can be executed in parallel to other instances. "
echo ""
fi
if [[ $CHECK_VAR == true && -z $THREAD_NUMBER_IS_SET ]]; then
echo "'-n option' not specified. By default the number of concurrent downloads (either products or manifest files) is 2. "
echo ""
else
if [[ $CHECK_VAR == true && ! -z $THREAD_NUMBER_IS_SET ]]; then
echo "'-n option' is set to $THREAD_NUMBER. The number of concurrent downloads (either products or manifest files) is set to $THREAD_NUMBER. Attention, this value doesn't override the quota limit set on the server side for the user. "
echo ""
fi
fi
if [[ $CHECK_VAR == true && -z $OFFLINE_WAIT ]]; then
export OFFLINE_WAIT="10m"
fi
if [[ $CHECK_VAR == true && -z $OFFLINE_RETRY_ATTEMPTS ]]; then
export OFFLINE_RETRY_ATTEMPTS="40"
fi
echo "================================================================================================================"
echo ""
if [ ! -z $MISSION ];then
if [ ! -z $QUERY_STATEMENT_CHECK ]; then
export QUERY_STATEMENT="$QUERY_STATEMENT AND "
fi
export QUERY_STATEMENT="$QUERY_STATEMENT platformname:$MISSION"
QUERY_STATEMENT_CHECK='OK'
fi
if [ ! -z $INSTRUMENT ];then
if [ ! -z $QUERY_STATEMENT_CHECK ]; then
export QUERY_STATEMENT="$QUERY_STATEMENT AND "
fi
export QUERY_STATEMENT="$QUERY_STATEMENT instrumentshortname:$INSTRUMENT"
QUERY_STATEMENT_CHECK='OK'
fi
if [ ! -z $PRODUCT_TYPE ];then
if [ ! -z $QUERY_STATEMENT_CHECK ]; then
export QUERY_STATEMENT="$QUERY_STATEMENT AND "
fi
export QUERY_STATEMENT="$QUERY_STATEMENT producttype:$PRODUCT_TYPE"
QUERY_STATEMENT_CHECK='OK'
fi
if [ ! -z $TIME ];then
if [ ! -z $QUERY_STATEMENT_CHECK ]; then
export QUERY_STATEMENT="$QUERY_STATEMENT AND "
fi
export QUERY_STATEMENT="$QUERY_STATEMENT ${TIME_SUBQUERY}"
QUERY_STATEMENT_CHECK='OK'
fi
if [ ! -z $SENSING_TIME ];then
if [ ! -z $QUERY_STATEMENT_CHECK ]; then
export QUERY_STATEMENT="$QUERY_STATEMENT AND "
fi
export QUERY_STATEMENT="$QUERY_STATEMENT ${SENSING_SUBQUERY}"
QUERY_STATEMENT_CHECK='OK'
fi
if [ ! -z $TIMEFILE ];then
if [ ! -z $QUERY_STATEMENT_CHECK ]; then
export QUERY_STATEMENT="$QUERY_STATEMENT AND "
fi
export QUERY_STATEMENT="$QUERY_STATEMENT ${TIME_SUBQUERY}"
QUERY_STATEMENT_CHECK='OK'
fi
if [ ! -z $FREE_SUBQUERY_CHECK ];then
if [ ! -z $QUERY_STATEMENT_CHECK ]; then
export QUERY_STATEMENT="$QUERY_STATEMENT AND "
fi
export QUERY_STATEMENT="$QUERY_STATEMENT $FREE_SUBQUERY"
QUERY_STATEMENT_CHECK='OK'
fi
#---- Prepare query polygon statement
if [ ! -z $x1 ];then
if [[ ! -z $QUERY_STATEMENT ]]; then
export QUERY_STATEMENT="$QUERY_STATEMENT AND "
fi
export GEO_SUBQUERY=`LC_NUMERIC=en_US.UTF-8; printf "( footprint:\"Intersects(POLYGON((%.13f %.13f,%.13f %.13f,%.13f %.13f,%.13f %.13f,%.13f %.13f )))\")" $x1 $y1 $x2 $y1 $x2 $y2 $x1 $y2 $x1 $y1 `
export QUERY_STATEMENT=${QUERY_STATEMENT}" ${GEO_SUBQUERY}"
else
export GEO_SUBQUERY=""
fi
#- ... append on query (without repl
if [ -z $ROWS ];then
export ROWS=25
fi
if [ -z $PAGE ];then
export PAGE=1
fi
START=$((PAGE-1))
START=$((START*ROWS))
export QUERY_STATEMENT="${DHUS_DEST}/search?q="${QUERY_STATEMENT}"&rows="${ROWS}"&start="${START}""
echo "HTTP request done: "$QUERY_STATEMENT""
echo ""
#--- Execute query statement
if [ -z $NAMEFILERESULTS ];then
export NAMEFILERESULTS="OSquery-result.xml"
fi
/bin/rm -f $NAMEFILERESULTS
${WC} ${AUTH} ${TRIES} -c -O "${NAMEFILERESULTS}" "${QUERY_STATEMENT}"
LASTDATE=`date -u +%Y-%m-%dT%H:%M:%S.%NZ`
sleep 5
cat $PWD/"${NAMEFILERESULTS}" | grep '<id>' | tail -n +2 | cut -f2 -d'>' | cut -f1 -d'<' | cat -n > .product_id_list
cat $PWD/"${NAMEFILERESULTS}" | grep '<link rel="alternative" href=' | cut -f4 -d'"' | cat -n | sed 's/\/$//'> .product_link_list
cat $PWD/"${NAMEFILERESULTS}" | grep '<title>' | tail -n +2 | cut -f2 -d'>' | cut -f1 -d'<' | cat -n > .product_title_list
if [ ! -z $TIMEFILE ];then
if [ `cat "${NAMEFILERESULTS}" | grep '="ingestiondate"' | head -n 1 | cut -f2 -d'>' | cut -f1 -d'<' | wc -l` -ne 0 ];
then
lastdate=`cat $PWD/"${NAMEFILERESULTS}" | grep '="ingestiondate"' | head -n 1 | cut -f2 -d'>' | cut -f1 -d'<'`;
years=`echo $lastdate | tr "T" '\n'|head -n 1`;
hours=`echo $lastdate | tr "T" '\n'|tail -n 1`;
echo `date +%Y-%m-%d --date="$years"`"T"`date +%T.%NZ -u --date="$hours + 0.001 seconds"`> $TIMEFILE
fi
fi
paste -d\\n .product_id_list .product_title_list | sed 's/[",:]/ /g' > product_list
cat .product_title_list .product_link_list | sort -k1n,1 -k2r,2 | sed 's/[",:]/ /g' | sed 's/https \/\//https:\/\//' > .product_list_withlink
rm -f .product_id_list .product_link_list .product_title_list .product_ingestion_time_list
echo ""
cat "${NAMEFILERESULTS}" | grep '<subtitle>' | cut -f2 -d'>' | cut -f1 -d'<' | cat -n
NPRODUCT=`cat "${NAMEFILERESULTS}" | grep '<subtitle>' | cut -f2 -d'>' | cut -f1 -d'<' | cat -n | cut -f11 -d' '`;
echo ""
if [ "${NPRODUCT}" == "0" ]; then exit 1; fi
cat .product_list_withlink
if [ -z $PRODUCTLIST ];then
export PRODUCTLIST="products-list.csv"
fi
cp .product_list_withlink $PRODUCTLIST
cat $PRODUCTLIST | cut -f2 -d$'\t' > .products-list-tmp.csv
cat .products-list-tmp.csv | grep -v 'https' > .list_name_products.csv
cat .products-list-tmp.csv | grep 'https' > .list_link_to_products.csv
paste -d',' .list_name_products.csv .list_link_to_products.csv > $PRODUCTLIST
rm .product_list_withlink .products-list-tmp.csv .list_name_products.csv .list_link_to_products.csv
export rv=0
if [ "${TO_DOWNLOAD}" == "manifest" -o "${TO_DOWNLOAD}" == "all" ]; then
export INPUT_FILE=product_list
if [ ! -f ${INPUT_FILE} ]; then
echo "Error: Input file ${INPUT_FILE} not present "
exit
fi
mkdir -p MANIFEST/
cat ${INPUT_FILE} | xargs -n 4 -P ${THREAD_NUMBER} sh -c 'while : ; do
echo "Downloading manifest ${3} from link ${DHUS_DEST}/odata/v1/Products('\''"$1"'\'')/Nodes('\''"$3".SAFE'\'')/Nodes('\'manifest.safe\'')/\$value";
${WC} ${AUTH} ${TRIES} --progress=dot -e dotbytes=10M -c --output-file=./logs/log.${3}.log -O ./MANIFEST/manifest.safe-${3} "${DHUS_DEST}/odata/v1/Products('\''"$1"'\'')/Nodes('\''"$3".SAFE'\'')/Nodes('\'manifest.safe\'')/\$value" ;
test=$?;
if [ $test -eq 0 ]; then
echo "Manifest ${3} successfully downloaded at " `tail -2 ./logs/log.${3}.log | head -1 | awk -F"(" '\''{print $2}'\'' | awk -F")" '\''{print $1}'\''`;
fi;
[[ $test -ne 0 ]] || break;
done '
fi
if [ "${TO_DOWNLOAD}" == "product" -o "${TO_DOWNLOAD}" == "all" ];then
export INPUT_FILE=product_list
mkdir -p $output_folder
#Xargs works here as a thread pool, it launches a download for each thread (P 2), each single thread checks
#if the download is completed succesfully.
#The condition "[[ $? -ne 0 ]] || break" checks the first operand, if it is satisfied the break is skipped, instead if it fails
#(download completed succesfully (?$=0 )) the break in the OR is executed exiting from the intitial "while".
#At this point the current thread is released and another one is launched.
if [ -f .failed.control.now.txt ]; then
rm .failed.control.now.txt
fi
# Call method for product download, if the download option is selected
# remove the Temporary offline product download list before starting the current download
if [ -f ${TMP_OFFLINE_PRODUCT_LIST} ]; then
rm ${TMP_OFFLINE_PRODUCT_LIST}
fi
download_product ${INPUT_FILE} ${THREAD_NUMBER}
#echo "OFFLINE_RETRY_ATTEMPTS is ${OFFLINE_RETRY_ATTEMPTS}"
# Check if there are product offline scheduled for download and if the max number of download attempts has been reached
while [[ -f ${TMP_OFFLINE_PRODUCT_LIST} ]] && [[ ${OFFLINE_RETRY_ATTEMPTS_COUNTER} -lt ${OFFLINE_RETRY_ATTEMPTS} ]]; do
offline_products=`cat ${TMP_OFFLINE_PRODUCT_LIST} | wc -l`
echo "Found ${offline_products} offline products. The download of these products will be tried again in ${OFFLINE_WAIT:: -1} minutes"
#remove old list
if [ -f ${OFFLINE_PRODUCT_LIST} ]; then
rm ${OFFLINE_PRODUCT_LIST}
fi
#Create new input list for offline products
cat "${TMP_OFFLINE_PRODUCT_LIST}" | while IFS=, read idx uuid name
do
echo "$idx $uuid" | tee -a ${OFFLINE_PRODUCT_LIST}
echo "$idx $name" | tee -a ${OFFLINE_PRODUCT_LIST}
# do something
done
rm ${TMP_OFFLINE_PRODUCT_LIST}
#echo "sleep time is ${OFFLINE_WAIT}"
# wait for a configurable number of minutes
sleep ${OFFLINE_WAIT}
# increase the number of offline products download attempts
OFFLINE_RETRY_ATTEMPTS_COUNTER=$((OFFLINE_RETRY_ATTEMPTS_COUNTER+1))
# try again to download the offline products
echo "Attempt #${OFFLINE_RETRY_ATTEMPTS_COUNTER} to get offline products..."
download_product ${OFFLINE_PRODUCT_LIST} ${THREAD_NUMBER}
done
if [[ ${OFFLINE_RETRY_ATTEMPTS_COUNTER} -ge ${OFFLINE_RETRY_ATTEMPTS} ]]; then
offline_products=`cat ${TMP_OFFLINE_PRODUCT_LIST} | wc -l`
echo "Reached the maximum number of attempts to retry the offline products download".
echo "Products not downloaded are ${offline_products} ".
fi
fi
if [ ! -z $check_save_failed ]; then
if [ -f .failed.control.now.txt ]; then
mv .failed.control.now.txt $FAILED
else
if [ ! -f .failed.control.now.txt ] && [ $CHECK_VAR == true ] && [ ! ISSELECTEDEXIT ]; then
echo "All downloaded products have successfully passed MD5 integrity check"
fi
fi
else
if [ -f .failed.control.now.txt ]; then
mv .failed.control.now.txt failed_MD5_check_list.txt
else
if [ ! -f .failed.control.now.txt ] && [ $CHECK_VAR == true ] && [ ! ISSELECTEDEXIT ]; then
echo "All downloaded products have successfully passed MD5 integrity check"
fi
fi
fi
echo 'the end'
......@@ -2,7 +2,7 @@
!!!
!!! CSO - CAMS Satellite Operator
!!!
!!! Settings for S5p/Glyoxal processing.
!!! Settings for S5p/CHOCHO (glyoxal) processing.
!!!
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
......@@ -38,7 +38,7 @@ cso.s5p.chocho.inquire-table-glyretro.timerange.end : ${my.full-timera
! output table, time templates are filled with date of today;
! base path is used for searching input files:
cso.s5p.chocho.inquire-table-glyretro.file : ${my.observations}/GlyRetro/Copernicus_S5p_GLYOX_2022-09-22.csv
cso.s5p.chocho.inquire-table-glyretro.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_CHOCHO_glyretro__2022-09-22.csv
! filename filters relative to listing file that should be scanned for orbit files;
! names could include time templates ;
......@@ -72,7 +72,7 @@ cso.s5p.chocho.inquire-table-pal.producttype : L2__CHOCHO
cso.s5p.chocho.inquire-table-pal.area : ${my.region.west},${my.region.south}:${my.region.east},${my.region.north}
! output table, date of today:
cso.s5p.chocho.inquire-table-pal.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_CHOCHO_pal_%Y-%m-%d.csv
cso.s5p.chocho.inquire-table-pal.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_CHOCHO_pal__%Y-%m-%d.csv
......@@ -86,12 +86,12 @@ cso.s5p.chocho.inquire-plot.renew : True
! listing files, here from two inquires:
cso.s5p.chocho.inquire-plot.file : ${cso.s5p.chocho.inquire-table-glyretro.file} ; \
${cso.s5p.chocho.inquire-table-pal.output.file}
!!~ specify dates ("yyyy-mm-dd") to use historic tables,
!! default is table of today:
!cso.s5p.chocho.inquire-plot.filedate : 2022-01-28 ; 2023-08-08
!~ specify dates ("yyyy-mm-dd") to use historic tables,
! default is table of today:
cso.s5p.chocho.inquire-plot.filedate : 2022-09-22 ;
! annote:
cso.s5p.chocho.inquire-plot.title : S5p/chochoal %Y-%m-%d
cso.s5p.chocho.inquire-plot.title : S5p/CHOCHO %Y-%m-%d
! output figure, date of today:
cso.s5p.chocho.inquire-plot.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_CHOCHO__%Y-%m-%d.png
......@@ -119,8 +119,8 @@ cso.s5p.chocho.convert.timerange.end : ${my.timerange.end}
! listing file or ';'-seperated list of listing files:
cso.s5p.chocho.convert.inquire.file : ${cso.s5p.chocho.inquire-plot.file}
!!~ historic inquire ...
!cso.s5p.chocho.convert.inquire.filedate : ${cso.s5p.chocho.inquire-plot.filedate}
!~ historic inquire ...
cso.s5p.chocho.convert.inquire.filedate : ${cso.s5p.chocho.inquire-plot.filedate}
!! testing: produced only selected files ...
!!~ these files have "no-data" in "PRODUCT/delta_time", while qa_value is 1.0 ...
......@@ -646,9 +646,9 @@ cso.s5p.chocho.gridded-catalogue.input.file : ${my.gridded.dir}/%Y/
!cso.s5p.chocho.gridded-catalogue.input.file : ${my.gridded.dir}/S5p_CHOCHO_%Y%m%d_aver_gridded.nc
! target files, time tempates and variable name are replaced:
cso.s5p.chocho.gridded-catalogue.output.file : ${my.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_CHOCHO_%Y%m%d_%H%M_gridded_%{var}.png
cso.s5p.chocho.gridded-catalogue.output.file : ${my.chocho.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_CHOCHO_%Y%m%d_%H%M_gridded_%{var}.png
!!~ idem for daily average:
!cso.s5p.chocho.gridded-catalogue.output.file : ${my.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_CHOCHO_%Y%m%d_aver_gridded_%{var}.png
!cso.s5p.chocho.gridded-catalogue.output.file : ${my.chocho.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_CHOCHO_%Y%m%d_aver_gridded_%{var}.png
! figure size (inches), default is (8,6):
cso.s5p.chocho.gridded-catalogue.figsize : (6,6)
......@@ -673,7 +673,7 @@ cso.s5p.chocho.gridded-catalogue.var.yr.vmax : None
!-----------------------------------------------------------
! target location:
cso.s5p.chocho.gridded-catalogue-index.outdir : ${my.gridded-catalogue.output.dir}
cso.s5p.chocho.gridded-catalogue-index.outdir : ${my.chocho.gridded-catalogue.output.dir}
! title:
cso.s5p.chocho.gridded-catalogue-index.header : CSO catalogue
......
......@@ -23,42 +23,39 @@
!-----------------------------------------------------------------------
! inquire SciHub portal
! inquire DataSpace
!-----------------------------------------------------------------------
! Obtain names of all available S5p files.
! Stored as csv with processing type, processor version, filenames, etc.
! inquire full time range:
cso.s5p.co.inquire-table-scihub.timerange.start : ${my.full-timerange.start}
cso.s5p.co.inquire-table-scihub.timerange.end : ${my.full-timerange.end}
! full time range:
cso.s5p.co.inquire-table-dataspace.timerange.start : ${my.full-timerange.start}
cso.s5p.co.inquire-table-dataspace.timerange.end : ${my.full-timerange.end}
!
! server url ;
! provide login/password in ~/.netrc:
!
! machine s5phub.copernicus.eu login s5pguest password s5pguest
!
cso.s5p.co.inquire-table-scihub.url : https://s5phub.copernicus.eu/dhus
! API url:
cso.s5p.co.inquire-table-dataspace.url : https://finder.creodias.eu/resto/api
! collection name:
cso.s5p.co.inquire-table-dataspace.collection : Sentinel5P
! product type, always 10 characters!
! L2__CO___
! L2__CO____
! ...
cso.s5p.co.inquire-table-dataspace.producttype : L2__CO____
! target area;
!!~ empty for no limitation:
!cso.s5p.co.inquire-table-scihub.area :
!~ domain specified as: west,south:east,north
cso.s5p.co.inquire-table-scihub.area : ${my.region.west},${my.region.south}:${my.region.east},${my.region.north}
!cso.s5p.co.inquire-table-dataspace.area :
!~ domain specified as: west,south,east,north
cso.s5p.co.inquire-table-dataspace.area : ${my.region.west},${my.region.south},${my.region.east},${my.region.north}
! search query, obtained from interactive download:
!
! platformname : Sentinel-5
! producttype : L2__CO___ | L2__CO____ (always 10 characters!)
! processinglevel : L2
!
cso.s5p.co.inquire-table-scihub.query : platformname:Sentinel-5 AND \
producttype:L2__CO____ AND \
processinglevel:L2
! template for download url given "{product_id}":
cso.s5p.co.inquire-table-dataspace.download_url : https://zipper.dataspace.copernicus.eu/odata/v1/Products({product_id})/$value
! output table, date of today:
cso.s5p.co.inquire-table-scihub.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_CO_scihub__%Y-%m-%d.csv
cso.s5p.co.inquire-table-dataspace.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_CO_dataspace__%Y-%m-%d.csv
!-----------------------------------------------------------
......@@ -69,7 +66,7 @@ cso.s5p.co.inquire-table-scihub.output.file : ${my.work}/Copernicus-
cso.s5p.co.inquire-plot.renew : True
! listing files:
cso.s5p.co.inquire-plot.file : ${cso.s5p.co.inquire-table-scihub.output.file}
cso.s5p.co.inquire-plot.file : ${cso.s5p.co.inquire-table-dataspace.output.file}
!!~ specify dates ("yyyy-mm-dd") to use historic tables,
!! default is table of today:
!cso.s5p.co.inquire-plot.filedate : 2023-08-07
......@@ -78,12 +75,12 @@ cso.s5p.co.inquire-plot.file : ${cso.s5p.co.inquire-table-s
cso.s5p.co.inquire-plot.title : S5p/CO %Y-%m-%d
! output figure, date of today:
cso.s5p.co.inquire-plot.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_CO_scihub__%Y-%m-%d.png
cso.s5p.co.inquire-plot.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_CO_dataspace__%Y-%m-%d.png
!======================================================================
!===
!=== convert
!=== convert (and download)
!===
!======================================================================
......@@ -102,8 +99,8 @@ cso.s5p.co.convert.timerange.end : ${my.timerange.end}
!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
! listing of available source files,
! created by 'inquire-scihub' job:
cso.s5p.co.convert.inquire.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_CO_scihub__%Y-%m-%d.csv
! created by 'inquire-dataspace' job:
cso.s5p.co.convert.inquire.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_CO_dataspace__%Y-%m-%d.csv
!!~ historic inquire ...
!cso.s5p.co.convert.inquire.filedate : ${cso.s5p.co.inquire-plot.filedate}
......
......@@ -23,42 +23,39 @@
!-----------------------------------------------------------------------
! inquire SciHub portal
! inquire DataSpace
!-----------------------------------------------------------------------
! Obtain names of all available S5p files.
! Stored as csv with processing type, processor version, filenames, etc.
! inquire full time range:
cso.s5p.hcho.inquire-table-scihub.timerange.start : ${my.full-timerange.start}
cso.s5p.hcho.inquire-table-scihub.timerange.end : ${my.full-timerange.end}
! full time range:
cso.s5p.hcho.inquire-table-dataspace.timerange.start : ${my.full-timerange.start}
cso.s5p.hcho.inquire-table-dataspace.timerange.end : ${my.full-timerange.end}
!
! server url ;
! provide login/password in ~/.netrc:
!
! machine s5phub.copernicus.eu login s5pguest password s5pguest
!
cso.s5p.hcho.inquire-table-scihub.url : https://s5phub.copernicus.eu/dhus
! API url:
cso.s5p.hcho.inquire-table-dataspace.url : https://finder.creodias.eu/resto/api
! collection name:
cso.s5p.hcho.inquire-table-dataspace.collection : Sentinel5P
! product type, always 10 characters!
! L2__HCHO___
! L2__CO____
! ...
cso.s5p.hcho.inquire-table-dataspace.producttype : L2__HCHO__
! target area;
!!~ empty for no limitation:
!cso.s5p.hcho.inquire-table-scihub.area :
!~ domain specified as: west,south:east,north
cso.s5p.hcho.inquire-table-scihub.area : ${my.region.west},${my.region.south}:${my.region.east},${my.region.north}
!cso.s5p.hcho.inquire-table-dataspace.area :
!~ domain specified as: west,south,east,north
cso.s5p.hcho.inquire-table-dataspace.area : ${my.region.west},${my.region.south},${my.region.east},${my.region.north}
! search query, obtained from interactive download:
!
! platformname : Sentinel-5
! producttype : L2__HCHO___ | L2__CO____ (always 10 characters!)
! processinglevel : L2
!
cso.s5p.hcho.inquire-table-scihub.query : platformname:Sentinel-5 AND \
producttype:L2__HCHO__ AND \
processinglevel:L2
! template for download url given "{product_id}":
cso.s5p.hcho.inquire-table-dataspace.download_url : https://zipper.dataspace.copernicus.eu/odata/v1/Products({product_id})/$value
! output table, date of today:
cso.s5p.hcho.inquire-table-scihub.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_HCHO_scihub__%Y-%m-%d.csv
cso.s5p.hcho.inquire-table-dataspace.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_HCHO_dataspace__%Y-%m-%d.csv
!-----------------------------------------------------------
......@@ -66,24 +63,24 @@ cso.s5p.hcho.inquire-table-scihub.output.file : ${my.work}/Copernicu
!-----------------------------------------------------------
! renew existing plots?
cso.s5p.hcho.inquire-scihub-plot.renew : True
cso.s5p.hcho.inquire-plot.renew : True
! listing files:
cso.s5p.hcho.inquire-scihub-plot.file : ${cso.s5p.hcho.inquire-table-scihub.output.file}
cso.s5p.hcho.inquire-plot.file : ${cso.s5p.hcho.inquire-table-dataspace.output.file}
!!~ specify dates ("yyyy-mm-dd") to use historic tables,
!! default is table of today:
!cso.s5p.hcho.inquire-scihub-plot.filedate : 2023-08-07
!cso.s5p.hcho.inquire-plot.filedate : 2023-08-07
! annote:
cso.s5p.hcho.inquire-plot.title : S5p/HCHO %Y-%m-%d
! output figure, date of today:
cso.s5p.hcho.inquire-scihub-plot.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_HCHO_scihub__%Y-%m-%d.png
cso.s5p.hcho.inquire-plot.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_HCHO_dataspace__%Y-%m-%d.png
!======================================================================
!===
!=== convert
!=== convert (and download)
!===
!======================================================================
......@@ -102,10 +99,10 @@ cso.s5p.hcho.convert.timerange.end : ${my.timerange.end}
!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
! listing of available source files,
! created by 'inquire-scihub' job:
cso.s5p.hcho.convert.inquire.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_HCHO_scihub__%Y-%m-%d.csv
! created by 'inquire-dataspace' job:
cso.s5p.hcho.convert.inquire.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_HCHO_dataspace__%Y-%m-%d.csv
!!~ historic inquire ...
!cso.s5p.hcho.convert.inquire.filedate : ${cso.s5p.hcho.inquire-scihub-plot.filedate}
!cso.s5p.hcho.convert.inquire.filedate : ${cso.s5p.hcho.inquire-plot.filedate}
! selection keyword:
my.s5p.hcho.selection : C03
......@@ -473,8 +470,8 @@ cso.s5p.hcho.catalogue.domain : ${my.region.west} ${my.reg
cso.s5p.hcho.catalogue.figsize : ${my.region.figsize}
! renew existing files?
cso.s5p.hcho.catalogue.renew : False
!cso.s5p.hcho.catalogue.renew : True
!cso.s5p.hcho.catalogue.renew : False
cso.s5p.hcho.catalogue.renew : True
! variables to be plotted:
cso.s5p.hcho.catalogue.vars : vcd vcd_errvar qa_value cloud_fraction
......@@ -628,6 +625,7 @@ cso.s5p.hcho.gridded.filter.quality.units : 1
#error unsupported my.s5p.hcho.gridded-selection "${my.s5p.hcho.gridded-selection}"
#endif
!~
! target file, might contain templates:
......@@ -658,11 +656,11 @@ cso.s5p.hcho.gridded-catalogue.timerange.end : ${my.timerange.end}
cso.s5p.hcho.gridded-catalogue.timerange.step : hour
! renew existing files?
!cso.s5p.hcho.gridded-catalogue.renew : True
cso.s5p.hcho.gridded-catalogue.renew : False
cso.s5p.hcho.gridded-catalogue.renew : True
!cso.s5p.hcho.gridded-catalogue.renew : False
! target directory for catalogue:
my.gridded-catalogue.output.dir : ${my.gridded.dir}/catalogue
my.hcho.gridded-catalogue.output.dir : ${my.gridded.dir}/catalogue
! input files:
cso.s5p.hcho.gridded-catalogue.input.file : ${my.gridded.dir}/%Y/%m/S5p_HCHO_%Y%m%d_%H%M_gridded.nc
......@@ -670,9 +668,9 @@ cso.s5p.hcho.gridded-catalogue.input.file : ${my.gridded.dir}/%Y/%m
!cso.s5p.hcho.gridded-catalogue.input.file : ${my.gridded.dir}/S5p_HCHO_%Y%m%d_aver_gridded.nc
! target files, time tempates and variable name are replaced:
cso.s5p.hcho.gridded-catalogue.output.file : ${my.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_HCHO_%Y%m%d_%H%M_gridded_%{var}.png
cso.s5p.hcho.gridded-catalogue.output.file : ${my.hcho.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_HCHO_%Y%m%d_%H%M_gridded_%{var}.png
!!~ idem for daily average:
!cso.s5p.hcho.gridded-catalogue.output.file : ${my.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_HCHO_%Y%m%d_aver_gridded_%{var}.png
!cso.s5p.hcho.gridded-catalogue.output.file : ${my.hcho.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_HCHO_%Y%m%d_aver_gridded_%{var}.png
! figure size (inches), default is (8,6):
cso.s5p.hcho.gridded-catalogue.figsize : (6,6)
......@@ -686,7 +684,6 @@ cso.s5p.hcho.gridded-catalogue.var.yr.source : yr
cso.s5p.hcho.gridded-catalogue.var.yr.units : umol/m2
! style:
cso.s5p.hcho.gridded-catalogue.var.yr.vmin : 0.0
!gcso.s5p.hcho.gridded-catalogue.var.yr.vmax : 100.0
cso.s5p.hcho.gridded-catalogue.var.yr.vmax : None
......@@ -697,7 +694,7 @@ cso.s5p.hcho.gridded-catalogue.var.yr.vmax : None
!-----------------------------------------------------------
! target location:
cso.s5p.hcho.gridded-catalogue-index.outdir : ${my.gridded-catalogue.output.dir}
cso.s5p.hcho.gridded-catalogue-index.outdir : ${my.hcho.gridded-catalogue.output.dir}
! title:
cso.s5p.hcho.gridded-catalogue-index.header : CSO catalogue
......
......@@ -23,42 +23,39 @@
!-----------------------------------------------------------------------
! inquire SciHub portal
! inquire DataSpace
!-----------------------------------------------------------------------
! Obtain names of all available S5p files.
! Stored as csv with processing type, processor version, filenames, etc.
! inquire full time range:
cso.s5p.no2.inquire-table-scihub.timerange.start : ${my.full-timerange.start}
cso.s5p.no2.inquire-table-scihub.timerange.end : ${my.full-timerange.end}
! full time range:
cso.s5p.no2.inquire-table-dataspace.timerange.start : ${my.full-timerange.start}
cso.s5p.no2.inquire-table-dataspace.timerange.end : ${my.full-timerange.end}
!
! server url ;
! provide login/password in ~/.netrc:
!
! machine s5phub.copernicus.eu login s5pguest password s5pguest
!
cso.s5p.no2.inquire-table-scihub.url : https://s5phub.copernicus.eu/dhus
! API url:
cso.s5p.no2.inquire-table-dataspace.url : https://finder.creodias.eu/resto/api
! collection name:
cso.s5p.no2.inquire-table-dataspace.collection : Sentinel5P
! product type, always 10 characters!
! L2__NO2___
! L2__CO____
! ...
cso.s5p.no2.inquire-table-dataspace.producttype : L2__NO2___
! target area;
!!~ empty for no limitation:
!cso.s5p.no2.inquire-table-scihub.area :
!~ domain specified as: west,south:east,north
cso.s5p.no2.inquire-table-scihub.area : ${my.region.west},${my.region.south}:${my.region.east},${my.region.north}
!cso.s5p.no2.inquire-table-dataspace.area :
!~ domain specified as: west,south,east,north
cso.s5p.no2.inquire-table-dataspace.area : ${my.region.west},${my.region.south},${my.region.east},${my.region.north}
! search query, obtained from interactive download:
!
! platformname : Sentinel-5
! producttype : L2__NO2___ | L2__CO____ (always 10 characters!)
! processinglevel : L2
!
cso.s5p.no2.inquire-table-scihub.query : platformname:Sentinel-5 AND \
producttype:L2__NO2___ AND \
processinglevel:L2
! template for download url given "{product_id}":
cso.s5p.no2.inquire-table-dataspace.download_url : https://zipper.dataspace.copernicus.eu/odata/v1/Products({product_id})/$value
! output table, date of today:
cso.s5p.no2.inquire-table-scihub.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_NO2_scihub__%Y-%m-%d.csv
cso.s5p.no2.inquire-table-dataspace.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_NO2_dataspace__%Y-%m-%d.csv
!!-----------------------------------------------------------------------
......@@ -96,7 +93,7 @@ cso.s5p.no2.inquire-table-scihub.output.file : ${my.work}/Copernicus
cso.s5p.no2.inquire-plot.renew : True
! listing files:
cso.s5p.no2.inquire-plot.file : ${cso.s5p.no2.inquire-table-scihub.output.file}
cso.s5p.no2.inquire-plot.file : ${cso.s5p.no2.inquire-table-dataspace.output.file}
!!~ specify dates ("yyyy-mm-dd") to use historic tables,
!! default is table of today:
!cso.s5p.no2.inquire-plot.filedate : 2023-08-07
......@@ -105,7 +102,7 @@ cso.s5p.no2.inquire-plot.file : ${cso.s5p.no2.inquire-table
cso.s5p.no2.inquire-plot.title : S5p/NO2 %Y-%m-%d
! output figure, date of today:
cso.s5p.no2.inquire-plot.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_NO2_scihub__%Y-%m-%d.png
cso.s5p.no2.inquire-plot.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_NO2_dataspace__%Y-%m-%d.png
!======================================================================
......@@ -129,8 +126,8 @@ cso.s5p.no2.convert.timerange.end : ${my.timerange.end}
!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
! listing of available source files,
! created by 'inquire-scihub' job:
cso.s5p.no2.convert.inquire.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_NO2_scihub__%Y-%m-%d.csv
! created by 'inquire-dataspace' job:
cso.s5p.no2.convert.inquire.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_NO2_dataspace__%Y-%m-%d.csv
!!~ historic inquire ...
!cso.s5p.no2.convert.inquire.filedate : ${cso.s5p.no2.inquire-plot.filedate}
......@@ -717,9 +714,9 @@ cso.s5p.no2.gridded-catalogue.input.file : ${my.gridded.dir}/%Y/%m/
!cso.s5p.no2.gridded-catalogue.input.file : ${my.gridded.dir}/S5p_NO2_%Y%m%d_aver_gridded.nc
! target files, time tempates and variable name are replaced:
cso.s5p.no2.gridded-catalogue.output.file : ${my.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_NO2_%Y%m%d_%H%M_gridded_%{var}.png
cso.s5p.no2.gridded-catalogue.output.file : ${my.no2.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_NO2_%Y%m%d_%H%M_gridded_%{var}.png
!!~ idem for daily average:
!cso.s5p.no2.gridded-catalogue.output.file : ${my.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_NO2_%Y%m%d_aver_gridded_%{var}.png
!cso.s5p.no2.gridded-catalogue.output.file : ${my.no2.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_NO2_%Y%m%d_aver_gridded_%{var}.png
! figure size (inches), default is (8,6):
cso.s5p.no2.gridded-catalogue.figsize : (6,6)
......@@ -743,7 +740,7 @@ cso.s5p.no2.gridded-catalogue.var.yr.vmax : 100.0
!-----------------------------------------------------------
! target location:
cso.s5p.no2.gridded-catalogue-index.outdir : ${my.gridded-catalogue.output.dir}
cso.s5p.no2.gridded-catalogue-index.outdir : ${my.no2.gridded-catalogue.output.dir}
! title:
cso.s5p.no2.gridded-catalogue-index.header : CSO catalogue
......
......@@ -23,42 +23,39 @@
!-----------------------------------------------------------------------
! inquire SciHub portal
! inquire DataSpace
!-----------------------------------------------------------------------
! Obtain names of all available S5p files.
! Stored as csv with processing type, processor version, filenames, etc.
! inquire full time range:
cso.s5p.so2.inquire-table-scihub.timerange.start : ${my.full-timerange.start}
cso.s5p.so2.inquire-table-scihub.timerange.end : ${my.full-timerange.end}
! full time range:
cso.s5p.so2.inquire-table-dataspace.timerange.start : ${my.full-timerange.start}
cso.s5p.so2.inquire-table-dataspace.timerange.end : ${my.full-timerange.end}
!
! server url ;
! provide login/password in ~/.netrc:
!
! machine s5phub.copernicus.eu login s5pguest password s5pguest
!
cso.s5p.so2.inquire-table-scihub.url : https://s5phub.copernicus.eu/dhus
! API url:
cso.s5p.so2.inquire-table-dataspace.url : https://finder.creodias.eu/resto/api
! collection name:
cso.s5p.so2.inquire-table-dataspace.collection : Sentinel5P
! product type, always 10 characters!
! L2__SO2___
! L2__CO____
! ...
cso.s5p.so2.inquire-table-dataspace.producttype : L2__SO2___
! target area;
!!~ empty for no limitation:
!cso.s5p.so2.inquire-table-scihub.area :
!~ domain specified as: west,south:east,north
cso.s5p.so2.inquire-table-scihub.area : ${my.region.west},${my.region.south}:${my.region.east},${my.region.north}
!cso.s5p.so2.inquire-table-dataspace.area :
!~ domain specified as: west,south,east,north
cso.s5p.so2.inquire-table-dataspace.area : ${my.region.west},${my.region.south},${my.region.east},${my.region.north}
! search query, obtained from interactive download:
!
! platformname : Sentinel-5
! producttype : L2__SO2___ | L2__CO____ (always 10 characters!)
! processinglevel : L2
!
cso.s5p.so2.inquire-table-scihub.query : platformname:Sentinel-5 AND \
producttype:L2__SO2___ AND \
processinglevel:L2
! template for download url given "{product_id}":
cso.s5p.so2.inquire-table-dataspace.download_url : https://zipper.dataspace.copernicus.eu/odata/v1/Products({product_id})/$value
! output table, date of today:
cso.s5p.so2.inquire-table-scihub.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_SO2_scihub__%Y-%m-%d.csv
cso.s5p.so2.inquire-table-dataspace.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_SO2_dataspace__%Y-%m-%d.csv
!-----------------------------------------------------------
......@@ -69,7 +66,7 @@ cso.s5p.so2.inquire-table-scihub.output.file : ${my.work}/Copernicus
cso.s5p.so2.inquire-plot.renew : True
! listing files:
cso.s5p.so2.inquire-plot.file : ${cso.s5p.so2.inquire-table-scihub.output.file}
cso.s5p.so2.inquire-plot.file : ${cso.s5p.so2.inquire-table-dataspace.output.file}
!!~ specify dates ("yyyy-mm-dd") to use historic tables,
!! default is table of today:
!cso.s5p.so2.inquire-plot.filedate : 2023-08-07
......@@ -78,7 +75,7 @@ cso.s5p.so2.inquire-plot.file : ${cso.s5p.so2.inquire-table
cso.s5p.so2.inquire-plot.title : S5p/SO2 %Y-%m-%d
! output figure, date of today:
cso.s5p.so2.inquire-plot.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_SO2_scihub__%Y-%m-%d.png
cso.s5p.so2.inquire-plot.output.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_SO2_dataspace__%Y-%m-%d.png
!======================================================================
......@@ -102,8 +99,8 @@ cso.s5p.so2.convert.timerange.end : ${my.timerange.end}
!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
! listing of available source files,
! created by 'inquire-scihub' job:
cso.s5p.so2.convert.inquire.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_SO2_scihub__%Y-%m-%d.csv
! created by 'inquire-dataspace' job:
cso.s5p.so2.convert.inquire.file : ${my.work}/Copernicus-inquire/Copernicus_S5p_SO2_dataspace__%Y-%m-%d.csv
!!~ historic inquire ...
!cso.s5p.so2.convert.inquire.filedate : ${cso.s5p.so2.inquire-plot.filedate}
......@@ -503,6 +500,7 @@ cso.s5p.so2.catalogue-index.date.orbit.var.img : %{date[0:4]}/%{dat
cso.s5p.so2.catalogue-index.date.orbit.var.kwargs : height=300
!======================================================================
!===
!=== gridded orbits
......@@ -580,6 +578,7 @@ cso.s5p.so2.gridded.filter.quality.units : 1
#error unsupported my.s5p.so2.gridded-selection "${my.s5p.so2.gridded-selection}"
#endif
!~
! target file, might contain templates:
......@@ -610,8 +609,8 @@ cso.s5p.so2.gridded-catalogue.timerange.end : ${my.timerange.end}
cso.s5p.so2.gridded-catalogue.timerange.step : hour
! renew existing files?
!cso.s5p.so2.gridded-catalogue.renew : True
cso.s5p.so2.gridded-catalogue.renew : False
cso.s5p.so2.gridded-catalogue.renew : True
!cso.s5p.so2.gridded-catalogue.renew : False
! target directory for catalogue:
my.so2.gridded-catalogue.output.dir : ${my.gridded.dir}/catalogue
......@@ -622,9 +621,9 @@ cso.s5p.so2.gridded-catalogue.input.file : ${my.gridded.dir}/%Y/%m/
!cso.s5p.so2.gridded-catalogue.input.file : ${my.gridded.dir}/S5p_SO2_%Y%m%d_aver_gridded.nc
! target files, time tempates and variable name are replaced:
cso.s5p.so2.gridded-catalogue.output.file : ${my.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_SO2_%Y%m%d_%H%M_gridded_%{var}.png
cso.s5p.so2.gridded-catalogue.output.file : ${my.so2.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_SO2_%Y%m%d_%H%M_gridded_%{var}.png
!!~ idem for daily average:
!cso.s5p.so2.gridded-catalogue.output.file : ${my.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_SO2_%Y%m%d_aver_gridded_%{var}.png
!cso.s5p.so2.gridded-catalogue.output.file : ${my.so2.gridded-catalogue.output.dir}/%Y/%m/%d/S5p_SO2_%Y%m%d_aver_gridded_%{var}.png
! figure size (inches), default is (8,6):
cso.s5p.so2.gridded-catalogue.figsize : (6,6)
......@@ -648,7 +647,7 @@ cso.s5p.so2.gridded-catalogue.var.yr.vmax : 100.0
!-----------------------------------------------------------
! target location:
cso.s5p.so2.gridded-catalogue-index.outdir : ${my.gridded-catalogue.output.dir}
cso.s5p.so2.gridded-catalogue-index.outdir : ${my.so2.gridded-catalogue.output.dir}
! title:
cso.s5p.so2.gridded-catalogue-index.header : CSO catalogue
......
......@@ -98,9 +98,9 @@ my.attr.institution : CSO
my.attr.email : Your.Name@cso.org
! base location for work directories:
my.work : /Scratch/${USER}/CSO-work
my.work : /Scratch/${USER}/CSO-Copernicus
! storage for downloaded observations, used for glyoxal:
! storage for downloaded observations:
my.observations : /Scratch/${USER}/observations
!..............................
......@@ -113,11 +113,26 @@ my.attr.institution : MET Norway
my.attr.email : arjos@met.no
! base location for work directories:
my.work : ${WORK}/projects/CSO
my.work : ${WORK}/projects/CSO-Copernicus
! storage for downloaded observations, used for glyoxal:
! storage for downloaded observations:
my.observations : ${WORK}/../observations
!..............................
#elif "${USER}" == "segersaj"
!..............................
! Attributes written to output files.
my.attr.author : Arjo Segers
my.attr.institution : TNO
my.attr.email : Arjo.Segers@tno.nl
! base location for work directories:
my.work : ${SCRATCH}/CSO-Copernicus
! storage for downloaded observations:
my.observations : ${SCRATCH}/observations
!..............................
#else
#error unsupported USER "${USER}"
......
......@@ -54,7 +54,6 @@ cso.s5p.TRACER.elements : inquire \
convert listing \
catalogue \
gridded gridded-catalogue
!
!~ preprocessor steps one by one ...
!cso.s5p.TRACER.elements : inquire
!cso.s5p.TRACER.elements : convert
......@@ -74,7 +73,7 @@ cso.s5p.TRACER.elements : inquire \
cso.s5p.TRACER.inquire.class : utopya.UtopyaJobStep
! two or more tasks:
#if "TRACER" in ["no2","so2","hcho","co"]
cso.s5p.TRACER.inquire.tasks : table-scihub plot
cso.s5p.TRACER.inquire.tasks : table-dataspace plot
#elif "TRACER" in ["so2-cobra"]
cso.s5p.TRACER.inquire.tasks : table-pal plot
#elif "TRACER" in ["chocho"]
......@@ -82,16 +81,16 @@ cso.s5p.TRACER.inquire.tasks : table-glyretro table-pal pl
#else
#error unsupported tracer "TRACER"
#endif
!~ inquire files available on SciHub:
cso.s5p.TRACER.inquire.table-scihub.class : cso.CSO_SciHub_Inquire
cso.s5p.TRACER.inquire.table-scihub.args : '${PWD}/config/Copernicus/cso-s5p-TRACER.rc', \
rcbase='cso.s5p.TRACER.inquire-table-scihub'
!~ inquire files available on DataSpace:
cso.s5p.TRACER.inquire.table-dataspace.class : cso.CSO_DataSpace_Inquire
cso.s5p.TRACER.inquire.table-dataspace.args : '${PWD}/config/Copernicus/cso-s5p-TRACER.rc', \
rcbase='cso.s5p.TRACER.inquire-table-dataspace'
!~ inquire files available on PAL:
cso.s5p.TRACER.inquire.table-pal.class : cso.CSO_PAL_Inquire
cso.s5p.TRACER.inquire.table-pal.args : '${PWD}/config/Copernicus/cso-s5p-TRACER.rc', \
rcbase='cso.s5p.TRACER.inquire-table-pal'
!~ inquire files downloaded from GlyRetro:
cso.s5p.TRACER.inquire.table-glyretro.class : cso.CSO_SciHub_Listing
cso.s5p.TRACER.inquire.table-glyretro.class : cso.CSO_S5p_Download_Listing
cso.s5p.TRACER.inquire.table-glyretro.args : '${PWD}/config/Copernicus/cso-s5p-TRACER.rc', \
rcbase='cso.s5p.TRACER.inquire-table-glyretro'
!~ create plot of available versions:
......
......@@ -21,11 +21,11 @@ cso.tutorial.class : utopya.UtopyaJobTree
! list of sub-elements:
cso.tutorial.elements : inquire \
convert listing
convert listing \
catalogue \
gridded gridded-catalogue \
sim-catalogue \
sim-gridded sim-gridded-catalogue
gridded gridded-catalogue
! sim-catalogue \
! sim-gridded sim-gridded-catalogue
! ... preprocessor steps one by one:
!cso.tutorial.elements : inquire
......@@ -47,12 +47,12 @@ cso.tutorial.elements : inquire \
! single step:
cso.tutorial.inquire.class : utopya.UtopyaJobStep
! two tasks:
cso.tutorial.inquire.tasks : table-scihub plot
!~ inquire available files:
cso.tutorial.inquire.table-scihub.class : cso.CSO_SciHub_Inquire
cso.tutorial.inquire.table-scihub.args : '${__filename__}', \
rcbase='cso.tutorial.inquire-table-scihub'
!~ create plot of available versions:
cso.tutorial.inquire.tasks : table-dataspace plot
!~ task: inquire available files:
cso.tutorial.inquire.table-dataspace.class : cso.CSO_DataSpace_Inquire
cso.tutorial.inquire.table-dataspace.args : '${__filename__}', \
rcbase='cso.tutorial.inquire-table-dataspace'
!~ task: create plot of available versions:
cso.tutorial.inquire.plot.class : cso.CSO_Inquire_Plot
cso.tutorial.inquire.plot.args : '${__filename__}', \
rcbase='cso.tutorial.inquire-plot'
......@@ -222,8 +222,8 @@ my.attr.institution : CSO
my.attr.email : Your.Name@cso.org
! base location for work directories:
!my.work : /Scratch/${USER}/CSO-tutorial
my.work : ${WORK}/projects/CSO-tutorial
!my.work : /work/${USER}/CSO-Tutorial
my.work : /Scratch/${USER}/CSO-Tutorial
!----------------------------------------------------------
......@@ -254,35 +254,32 @@ my.work : ${WORK}/projects/CSO-tutorial
! Stored as csv with processing type, processor version, filenames, etc.
! full time range:
cso.tutorial.inquire-table-scihub.timerange.start : 2018-01-01 00:00:00
cso.tutorial.inquire-table-scihub.timerange.end : 2024-01-01 00:00:00
cso.tutorial.inquire-table-dataspace.timerange.start : 2018-01-01 00:00:00
cso.tutorial.inquire-table-dataspace.timerange.end : 2024-01-01 00:00:00
!
! server url ;
! provide login/password in ~/.netrc:
!
! machine s5phub.copernicus.eu login s5pguest password s5pguest
!
cso.tutorial.inquire-table-scihub.url : https://s5phub.copernicus.eu/dhus
! API url:
cso.tutorial.inquire-table-dataspace.url : https://finder.creodias.eu/resto/api
! collection name:
cso.tutorial.inquire-table-dataspace.collection : Sentinel5P
! product type, always 10 characters!
! L2__NO2___
! L2__CO____
! ...
cso.tutorial.inquire-table-dataspace.producttype : L2__NO2___
! target area;
!!~ empty for no limitation:
!cso.tutorial.inquire-table-scihub.area :
!~ domain specified as: west,south:east,north
cso.tutorial.inquire-table-scihub.area : ${my.region.west},${my.region.south}:${my.region.east},${my.region.north}
!cso.tutorial.inquire-table-dataspace.area :
!~ domain specified as: west,south,east,north
cso.tutorial.inquire-table-dataspace.area : ${my.region.west},${my.region.south},${my.region.east},${my.region.north}
! search query, obtained from interactive download:
!
! platformname : Sentinel-5
! producttype : L2__NO2___ | L2__CO____ (always 10 characters!)
! processinglevel : L2
!
cso.tutorial.inquire-table-scihub.query : platformname:Sentinel-5 AND \
producttype:L2__NO2___ AND \
processinglevel:L2
! template for download url given "{product_id}":
cso.tutorial.inquire-table-dataspace.download_url : https://zipper.dataspace.copernicus.eu/odata/v1/Products({product_id})/$value
! output table, date of today:
cso.tutorial.inquire-table-scihub.output.file : ${my.work}/Copernicus/Copernicus_S5p_NO2_scihub_%Y-%m-%d.csv
cso.tutorial.inquire-table-dataspace.output.file : ${my.work}/Copernicus/Copernicus_S5p_NO2_dataspace__%Y-%m-%d.csv
! * create plot
......@@ -291,7 +288,7 @@ cso.tutorial.inquire-table-scihub.output.file : ${my.work}/Copernicu
cso.tutorial.inquire-plot.renew : True
! listing files:
cso.tutorial.inquire-plot.file : ${cso.tutorial.inquire-table-scihub.output.file}
cso.tutorial.inquire-plot.file : ${cso.tutorial.inquire-table-dataspace.output.file}
!!~ specify dates ("yyyy-mm-dd") to use historic tables,
!! default is table of today:
!cso.tutorial.inquire-plot.filedate : 2022-01-28
......@@ -300,7 +297,7 @@ cso.tutorial.inquire-plot.file : ${cso.tutorial.inquire-tab
cso.tutorial.inquire-plot.title : S5p/NO2 %Y-%m-%d
! output figure, date of today:
cso.tutorial.inquire-plot.output.file : ${my.work}/Copernicus/Copernicus_S5p_NO2_${my.region}__%Y-%m-%d.png
cso.tutorial.inquire-plot.output.file : ${my.work}/Copernicus/Copernicus_S5p_NO2_dataspace__%Y-%m-%d.png
!======================================================================
......@@ -324,12 +321,9 @@ cso.tutorial.convert.timerange.end : ${my.timerange.end}
!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
! listing of available source files, created by 'inquire' job:
cso.tutorial.convert.inquire.file : ${my.work}/Copernicus/Copernicus_S5p_NO2_scihub_%Y-%m-%d.csv
!!~ historic inquire ...
!cso.tutorial.convert.inquire.filedate : 2022-02-02
! selection keyword:
my.s5p.no2.selection : C03
cso.tutorial.convert.inquire.file : ${my.work}/Copernicus/Copernicus_S5p_NO2_dataspace__%Y-%m-%d.csv
!~ historic inquire ...
cso.tutorial.convert.inquire.filedate : 2023-11-06
! Provide ';' seperated list of to decide if a particular orbit file should be processed.
! If more than one file is available for a particular orbit (from "OFFL" and "RPRO" processing),
......@@ -339,12 +333,8 @@ my.s5p.no2.selection : C03
! (%{collection} == '03') and (%{processing} == 'RPRO') ; \
! (%{collection} == '03') and (%{processing} == 'OFFL')
!
#if "${my.s5p.no2.selection}" == "C03"
cso.tutorial.convert.selection : (%{collection} == '03') and (%{processing} == 'RPRO') ; \
(%{collection} == '03') and (%{processing} == 'OFFL')
#else
#error unsupported my.s5p.no2.selection "${my.s5p.no2.selection}"
#endif
! input directory;
! files are searched here or downloaded to if not present yet;
......
......@@ -22,7 +22,7 @@ copyright = '2020-2023, Arjo Segers'
author = 'Arjo Segers'
# The full version, including alpha/beta/rc tags
release = 'v2.4'
release = 'v2.5'
# -- General configuration ---------------------------------------------------
......
......@@ -53,7 +53,7 @@ The source tree of the documentation files is:
* `pymod-cso_colocate.rst <../../source/pymod-cso_colocate.rst>`_
* `pymod-cso_plot.rst <../../source/pymod-cso_plot.rst>`_
* `pymod-cso_s5p.rst <../../source/pymod-cso_s5p.rst>`_
* `pymod-cso_scihub.rst <../../source/pymod-cso_scihub.rst>`_
* `pymod-cso_dataspace.rst <../../source/pymod-cso_dataspace.rst>`_
* `pymods.rst <../../source/pymods.rst>`_
* `documentation.rst <../../source/documentation.rst>`_
......@@ -173,7 +173,7 @@ Example taken from introduction of the 'cso_s5p' module.
.. toctree::
:maxdepth: 1
pymod-cso_scihub
pymod-cso_dataspace
pymod-cso_s5p <---
:
......