diff --git a/scripts/download_db.sh b/scripts/download_db.sh index fbf594f..d50681c 100755 --- a/scripts/download_db.sh +++ b/scripts/download_db.sh @@ -30,18 +30,50 @@ cd "$DATA_DIR" # get date as YYYY-MM-DD get_date() { + local offset=${1:-0} # macOS if [[ "$OSTYPE" == "darwin"* ]]; then - date -v -"$1"d '+%Y-%m-%d' + date -u -v -"$offset"d '+%Y-%m-%d' # linux else - date -d "-$1 days" '+%Y-%m-%d' + date -u -d "-$offset days" '+%Y-%m-%d' fi } +# get most recent GMT hour as HHMM +get_hour() { + local offset=${1:-0} + # macOS + if [[ "$OSTYPE" == "darwin"* ]]; then + date -u -v -"$offset"H '+%H00' + # linux + else + date -u -d "-$offset hours" '+%H00' + fi +} + + +# Look for the most recent archive node DB dump from the last 3 hours +for i in $(seq 0 2); do + DATE=$(get_date) + HOUR=$(get_hour "$i") + FILE="${NETWORK}-archive-dump-${DATE}_${HOUR}.sql.tar.gz" + URL="${BASE_URL}/${FILE}" + + echo "Attempting to download archive node DB dump from: $URL" + + # abort download if the file is an XML error page + if curl -# -O "$URL" && ! grep -q "" "$FILE"; then + tar -xf "$FILE" + mv "${FILE%.tar.gz}" "$PG_DUMP" + rm "$FILE" + echo "Downloaded and extracted to $DATA_DIR/$PG_DUMP" + exit 0 + fi +done -# look for most recent db dump up to 10 days old -for i in $(seq 0 9); do +# If not found, try the last 3 days at 00:00 +for i in $(seq 0 2); do DATE=$(get_date "$i") FILE="${NETWORK}-archive-dump-${DATE}_0000.sql.tar.gz" URL="${BASE_URL}/${FILE}" @@ -49,7 +81,7 @@ for i in $(seq 0 9); do echo "Attempting to download archive node DB dump from: $URL" # abort download if the file is an XML error page - if curl -sf -O "$URL" && ! grep -q "" "$FILE"; then + if curl -# -O "$URL" && ! grep -q "" "$FILE"; then tar -xf "$FILE" mv "${FILE%.tar.gz}" "$PG_DUMP" rm "$FILE" @@ -58,5 +90,5 @@ for i in $(seq 0 9); do fi done -echo "No valid dump found for network=$NETWORK in the last 10 days" +echo "No valid dump found for network=$NETWORK in the last 3 days" exit 1