#!/bin/bash # # wget_modis_AV_v2.2.1.sh # ################################################################################ # WisconsinView bash script for harvesting MODIS products from the # Direct Readout facility at the Space Science & Engineering Center # Madison, Wisconsin. This script is intended to be modified # for use by AmericaView member states who wish to download daily # MODIS products to their local machine. This scrip requires an operating system # that supports the bash shell scripting environment such as Linux. I developed # it on a RHEL AS4 operating system and have not tested it elsewhere. # # You are free to use and modify this script as necessary for your purposes. # Please acknowledge on your website "MODIS imagery courtesy of WisconsinView # and the Space Science & Engineering Center at University of Wisconsin-Madison" # if you choose to use the images or this script. Thanks! # # Version 1.0.0 2008_11_19 by Sam Batzli: connect and download with ftp. # Version 1.1.0 2009_02_06 changed name of script from "terra_get_all.sh" to # "modis_get_all_AV.sh" # added Aqua download to happen during the same # connection: now gets 30 files a day # added directory structure for target to sort by year # and day-of-year .../yyyy/yyyyddd # added version number to this document's file name # Version 2.0.0 2009_02_07 changed from ftp to wget because of issues running # under cron. Now also seems faster. # Version 2.0.5 2009_02_07 added a soft link command (ln -s ) for a # "latest" directory at the top level # Version 2.1.0 2009_02_08 added a meta file generator to capture variables in # php format for each day for web parsing # Version 2.2.0 2009_02_10 file name changed to "wget_modis_AV_v2.2.0" # Version 2.2.1 2009_03_04 clean-up for distribution to AV users. # # I run my harvester on a cron that looks like this. I set the shell because # the default is /bin/sh and I want bash, but this may not be necessary. # The cron is basic and runs every 10 min each hour from 11 to 5pm. # It sends messages and errors to trash. I # created it under my login with # the command: crontab -e (and the usual vi editing key strokes "i" to insert # ":wq" to write and quit. # # SHELL=/bin/bash # */10 11,12,13,14,15,16,17 * * * /home/batzli/wget_modis_AV_v2.2.1.sh > /dev/null 2>&1 # # [future enhancement ideas: filter by file existence and size (if .tif # exists and < xxbytes, don't get any products)] # [wget a file list or similar for comparison] # [if error, exit gracefully and send errors to an error file] # [address timezone issues/options] # ################################################################################ # Define variables. Change as needed: ###################################### # For now sensor will always be "modis" so don't change this one. sensor="modis" # We collect imagery from both Terra (late morning) and Aqua (mid afternoon) # whenever possible. This script collects both sets of files by running wget # multiple times. platform1="terra" platform2="aqua" # If you are downloading individual files rather than all 15 for each platform, # you need this sensor code to construct the file name. It is also useful # for metadata production and referencing files by name later. platform_code1="t1" platform_code2="a1" # This grabs your local computer's current date as day-of-year (yyddd or # yyyyddd). This is needed the local directory or to construct an individual # file name. Sometimes is it helpful to specify a previous day. For example, # "yesterday" can be defined like this: "$(date +%y%j -d "-1 day")" so we will # use an "offset" variable to back cast as needed usually this will set to zero # unless you need to back fill. Only works up to "7". offset="0" # (yyddd) used for specific file names both in download and metadata. fileday="$(date +%y%j -d "-$offset day")" # (ddd) day of year, used in combination with year for directory construction. yday="$(date +%j -d "-$offset day")" # (yyyy) used in metadata generator. year="$(date +%Y)" # (Month full name -- use b for abbreviation) used in metadata generator. month="$(date +%B)" # (Day full name -- use a for abbreviation) used in metadata generator. dayname="$(date +%A -d "-$offset day")" # (dd) day of month used for metadata generator. day="$(date +%d -d "-$offset day")" # (ZZZ) timezone of the local computer. timezone="$(date +%Z)" # (12hr time) time of latest download attempt. # Helpful for troubleshooting your cron. updatetime="$(date +%r)" # This grabs the local computer's current date as yyyy_mm_dd_ddd and is needed # to construct the path for all files. Sometimes it is helpful to get a previous # day's image. For example, yesterday's image path can be defined like this: # "$(date +%Y_%m_%d_%j -d "-1 day")" so we use the $offset variable for # consistency and metadata production. date_path="$(date +%Y_%m_%d_%j -d "-$offset day")" # Define the State product group (e.g. AV=AmericaView, WI=Wisconsin). Use # standard 2-letter abbreviation for the state. This is needed for individual # file downloads and bulk downloading. You can browse the ftp directory # manually to varify state abbreviation. state="AV_IA" # If you want all 30 products (TIFFs and JPEGs) for the current state, use # wildcard notation, or specify a specific product name (suffix only -- # everything after the "state" variable). product1="*$state*" # all standard AVstate products product2="*$state*" # all standard AVstate products #product3="*143.250m.tif" # example of single product #product4="*$state*.tif" # example of all .tif files # Define file names and paths for the local files. ########################################## # Modify to match the target base and receiving directory on your machine. # If the directory already exists, this command (mkdir) simply fails and # is skipped. It does NOT overwrite. local_dirbase="/home/batzli/modis" mkdir $local_dirbase/$year mkdir $local_dirbase/$year/$year$yday local_dir="$local_dirbase/$year/$year$yday" cd $local_dir # Define location of data to download ########################################## host_url="ftp://ftp.ssec.wisc.edu" remote_dir1="/pub/eosdb/$platform1/$sensor/$date_path/subsets/" remote_dir2="/pub/eosdb/$platform2/$sensor/$date_path/subsets/" # Modify and un-comment only if getting a single file ########################################## # Define URL information for wget. For example:t1.08345.AV_WI.143.250m.tif # WARNING: I have not tested this -- SAB 2009_02_09 #filename="$platform_code.$fileday.$state.$product3" #wget -N -q $host_url$remote_dir1$filename # Run wget for all stateview products. ########################################## # The -N flag sets a time stamp so that only newer files overwrite older files. # -q runs quitely wget -N -q $host_url$remote_dir1$product1 wget -N -q $host_url$remote_dir2$product2 ########################################## # OPTIONAL: Make link from the latest download to a directory called "latest" # (for daily quick look in web page). Remove old link. Not sure this command # is necessary but I was getting redundant linking without it. #rm $local_dirbase/latest # if the link already exists, this command fails and is skipped. #ln -s $local_dir $local_dirbase/latest # OPTIONAL: make metadata file that can be used as an "include" for webpages. gen_meta() { echo "" echo "" echo "" echo "" echo "" echo "" echo "" echo "" echo "" echo "" echo "" echo "" echo "" echo "" } # Un-comment next line to run the metadata function. WARNING: if the file # already exists, this command OVERWRITES existing meta.php file. #gen_meta>$local_dir/meta.php