Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file added COMMAND.back
Empty file.
30 changes: 30 additions & 0 deletions RELEASES.back
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
***************************************************************************************************************
* *
* *
* *
* Input file for the Lagrangian particle dispersion model FLEXPART *
* Please select your options *
* *
* *
* *
***************************************************************************************************************
&RELEASES_CTRL
NSPEC = 1, ! Total number of species
SPECNUM_REL= 16, ! Species numbers in directory SPECIES
/
&RELEASE
IDATE1 = start_date,
ITIME1 = start_time,
IDATE2 = end_date,
ITIME2 = end_time,
LON1 = lon_1,
LON2 = 30.10,
LAT1 = lat_1,
LAT2 = 50.90,
Z1 = 0.00,
Comment on lines +1 to +24
Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should be removed, all .back files and csv_parser

Z2 = 200.00,
ZKIND = 1,
MASS = 1.00e-04,
PARTS = 10000,
COMMENT = release_comment,
/
46 changes: 46 additions & 0 deletions csv_parser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import logging,os,csv

logging.basicConfig(filename="main.log", level=logging.INFO,
format="%(asctime)s %(message)s")

path_to_data_files = '/data/calculations/'
basename = os.path.basename(os.getcwd())

with open('measurem.csv', newline='') as csvfile:
csv_reader = csv.reader(csvfile, delimiter=';')
csv_header = next(csv_reader)
for row in csv_reader:
calc_folder_id = basename + '_' + row[1]
path_to_calc_dir = path_to_data_files + calc_folder_id
lat = row[5]
lon = row[6]
start_date = row[7].replace('.', '')
start_time = row[8].replace(':', '')
end_date = row[9].replace('.', '')
end_time = row[10].replace(':', '')
comment = "RELEASE " + row[1]
print('comment ', comment)
print('comment ', calc_folder_id)
print('path_to_data_files', path_to_calc_dir)

if not os.path.exists(path_to_calc_dir):
os.makedirs(path_to_calc_dir)
logging.info('Folder ' + calc_folder_id + ' created')
logging.info('Parsing ' + comment + ' file')

# Read in the default RELEASES file
with open('/data/opt/RELEASES.back', 'r') as file:
filedata = file.read()

# Replace the target string
filedata = filedata.replace('start_date', start_date)
filedata = filedata.replace('start_time', start_time)
filedata = filedata.replace('end_date', end_date)
filedata = filedata.replace('end_time', end_time)
filedata = filedata.replace('lat_1', lat)
filedata = filedata.replace('lon_1', lon)
filedata = filedata.replace('release_comment', comment)

# Write the file out to the calc dir
with open(path_to_calc_dir+'/'+'RELEASES', 'w') as file:
file.write(filedata)
81 changes: 81 additions & 0 deletions parse_csv_files.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
'''
description: Download weather forecasts from GFS for flexpart
license: APACHE 2.0
author: Synkevych Roman, ([email protected])
https://www.ncei.noaa.gov/data/global-forecast-system/access/
grid-003-1.0-degree/forecast
/202205
/20220511
/gfs_3_20220511_0000_000.grb2

//www.ncei.noaa.gov/data/global-forecast-system/access/
grid-003-1.0-degree/analysis/202004/20200418/gfs_3_20200418_0000_000.grb2
'''

from pygfs.utils import *
import shutil
import os
import requests
path_to_csv_file=''
path_to_default_files=''

number_of_calculations=0
calc_folder_name=''

# Create simlink for simflex and flexpart

# provide start_data and end_date for download forecast
# and create AVAILABLE
# this script should test if file available before downloading

# create a series_id.log that saves info about each calculations


class gfs:
site_url = "https://www.ncei.noaa.gov/data/global-forecast-system/access/"

def download(self, date, fcsthours, resolution):
self.date = date
self.fcsthours = fcsthours
self.resolution = resolution
self.check_fcsthours()
self.build_filelist()
self.download_files()

def build_filelist(self):
self.filelist = [self.get_filename(fcsthour)
for fcsthour in self.fcsthours]

def get_filename(self, fcsthour):
int_, dec = (str(float(self.resolution))).split('.')
yr = str(self.date.year).zfill(4)
mnth = str(self.date.month).zfill(2)
day = str(self.date.day).zfill(2)
baseurl = 'https://rda.ucar.edu/data/ds084.1/'
fpath = os.path.join(yr, yr + mnth + day, 'gfs.' +
int_ + 'p' + dec.zfill(2) + '.' +
yr + mnth + day + '00.f' +
str(fcsthour).zfill(3) + '.grib2')
return (os.path.join(baseurl, fpath))

def download_files(self):
'''
Download the actual files
'''
[self.download_file(fl) for fl in self.filelist]

def download_file(self, url):
'''
Stream download url to localfile
'''
local_filename = url.split('/')[-1]
with self.session.get(url, stream=True) as r:
with open(local_filename, 'wb') as f:
shutil.copyfileobj(r.raw, f)
#check_gribfile(local_filename)
return local_filename

def parse_releases():
# csv file name
# row line
# releases template file name and location
31 changes: 31 additions & 0 deletions template/1.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
id_calc use_no id_measu id_station station country s_lat s_lng id_nuclide name_nuclide date_start time_start date_end time_end val sigma_or_ldl backgr
1 1 1 1 node_291_137 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 05:00:00 2020-04-18 05:00:10 0.00524513 8.3922E-4 6.0E-6
1 1 3 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 07:00:00 2020-04-18 07:00:10 0.01818158 0.00290905 6.0E-6
1 1 4 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 08:00:00 2020-04-18 08:00:10 0.03814321 0.00610291 6.0E-6
1 1 5 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 09:00:00 2020-04-18 09:00:10 0.04771746 0.00763479 6.0E-6
1 1 6 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 10:00:00 2020-04-18 10:00:10 0.03077517 0.00492403 6.0E-6
1 1 7 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 11:00:00 2020-04-18 11:00:10 0.00891589 0.00142654 6.0E-6
1 1 8 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 12:00:00 2020-04-18 12:00:10 0.00124652 1.9944E-4 6.0E-6
1 1 9 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 13:00:00 2020-04-18 13:00:10 1.3097E-4 2.1E-5 6.0E-6
1 1 10 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 14:00:00 2020-04-18 14:00:10 9.6E-7 1.5E-7 6.0E-6
1 1 11 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 15:00:00 2020-04-18 15:00:10 0.0 0.0 6.0E-6
1 1 12 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 16:00:00 2020-04-18 16:00:10 1.1426E-4 1.83E-5 6.0E-6
1 1 13 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 17:00:00 2020-04-18 17:00:10 0.00414009 6.6241E-4 6.0E-6
1 1 14 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 18:00:00 2020-04-18 18:00:10 0.02838107 0.00454097 6.0E-6
1 1 15 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 19:00:00 2020-04-18 19:00:10 0.0516992 0.00827187 6.0E-6
1 1 16 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 20:00:00 2020-04-18 20:00:10 0.03917277 0.00626764 6.0E-6
1 1 17 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 21:00:00 2020-04-18 21:00:10 0.01503785 0.00240606 6.0E-6
1 1 18 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 22:00:00 2020-04-18 22:00:10 0.00435961 6.9754E-4 6.0E-6
1 1 19 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-18 23:00:00 2020-04-18 23:00:10 0.0026768 4.2829E-4 6.0E-6
1 1 20 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-19 00:00:00 2020-04-19 00:00:10 0.01201856 0.00192297 6.0E-6
1 1 21 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-19 01:00:00 2020-04-19 01:00:10 0.0566713 0.00906741 6.0E-6
1 1 22 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-19 02:00:00 2020-04-19 02:00:10 0.07849871 0.01255979 6.0E-6
1 1 23 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-19 03:00:00 2020-04-19 03:00:10 0.06059366 0.00969499 6.0E-6
1 1 24 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-19 04:00:00 2020-04-19 04:00:10 0.05526348 0.00884216 6.0E-6
1 1 25 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-19 05:00:00 2020-04-19 05:00:10 0.02873416 0.00459747 6.0E-6
1 1 26 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-19 06:00:00 2020-04-19 06:00:10 0.00174476 2.7916E-4 6.0E-6
1 1 27 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-19 07:00:00 2020-04-19 07:00:10 0.0 1.0E-6 6.0E-6
1 1 28 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-19 08:00:00 2020-04-19 08:00:10 0.0 1.0E-6 6.0E-6
1 1 29 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-19 09:00:00 2020-04-19 09:00:10 0.0 1.0E-6 6.0E-6
1 1 30 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-19 11:00:00 2020-04-19 11:00:10 0.0 1.0E-6 6.0E-6
1 1 31 1 node_291_138 Ukraine 50.85 30.05 3 Ru-106 2020-04-19 12:00:00 2020-04-19 12:00:10 0.0 1.0E-6 6.0E-6
17 changes: 17 additions & 0 deletions template/1.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<journal>
<id_calc>1</id_calc>
<id_series>1</id_series>
<previous_calc>0</previous_calc>
<se_lat>37.85</se_lat>
<se_lon>0.05</se_lon>
<nx>400.0</nx>
<ny>322.0</ny>
<dlat>0.1</dlat>
<dlon>0.1</dlon>
<imin>2020-04-18 00:00:00</imin>
<imax>2020-04-21 00:00:00</imax>
<minheight>0</minheight>
<maxheight>34</maxheight>
<calc_type>1</calc_type>
</journal>
25 changes: 25 additions & 0 deletions template/calc_releases.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
#!/bin/bash

sed_length=15
startline=15
path=$(pwd)
for i in `seq 1 31`; do
# replace RELEASES file with begining of the file
cp options/RELEASES.tmp options/RELEASES
start=$(( startline * i ))
if [ "$i" -eq 2 ] ; then
start=$((start+1))
elif (( $i > 2 )) ; then
start=$((start+i-1))
fi
end=$(( start + sed_length ))
# add to RELEASES one RELEASE from
sed -n "${start},${end}p" options/RELEASES.back >> options/RELEASES
./FLEXPART>>allout.txt 2>&1
mv output/grid_time_20200421000000.nc output/grid_time_20200421000000_$i.nc
if [ "$i" -eq 1 ] ; then
echo "#id_obs; path_to_file; pointspec_ind;" >> table_srs_paths.txt
else
echo "$i; $(pwd)/output/grid_time_20200421000000_$i.nc; 1">>table_srs_paths.txt
fi
done
88 changes: 88 additions & 0 deletions template/download_grib.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
from datetime import datetime
from datetime import timedelta, date
import os, urllib.request

# 2 month equal to 44 Gb / calc speed and time needed to downloads this data

HHMMSS = ['030000', '060000', '090000', '120000',
'150000', '180000', '210000', '000000']
FILE_HOURS = ['0000', '0000', '0600', '0600', '1200', '1200', '1800', '1800']
FILE_SUFFIX = ['003', '006']
NCEI_URL = "https://www.ncei.noaa.gov/data/global-forecast-system/access/historical/"
FILE_PREFIX = "gfs_4_"
DOMAIN = NCEI_URL + "forecast/grid-004-0.5-degree/"
basename = os.getcwd()
DATA_FOLDER = '/data/grib_data/'
available_template_header = """XXXXXX EMPTY LINES XXXXXXXXX
XXXXXX EMPTY LINES XXXXXXXX
YYYYMMDD HHMMSS name of the file(up to 80 characters)
"""

def write_to_file(file_name, contents, mode='w'):
file = open(basename + '/' + file_name, mode)
file.write(contents)
file.close()

def create_folder(directory=None):
if not os.path.exists(directory):
os.makedirs(directory)

def parse_available_file(date=None, file_name=None):
if date is not None and file_name is not None:
available_template_body = """{yyyymmdd} {hhmmss} {file_name} ON DISC
""".format(yyyymmdd=date.strftime('%Y%m%d'),
hhmmss=date.strftime('%H%M%S'),
file_name=file_name)
write_to_file('AVAILABLE', available_template_body, 'a')

def download_grib(date_start=None, date_end=None):
if date_start and date_end is not None:
# dates should ends with hours that divides to 3
start_date = date_start - timedelta(hours = date_start.hour % 3)
end_date = date_end

# download last dataset using end date
if end_date.hour % 3 == 1:
end_date = date_end + timedelta(hours=2)
elif end_date.hour% 3 == 2:
end_date = date_end + timedelta(hours=1)
else:
end_date = date_end + timedelta(hours=3)

days, seconds = (
end_date - start_date).days, (end_date - start_date).seconds
hours = (days * 24 + seconds / 3600) // 8
print('amount of datasets: ', hours)
if hours <= 0:
print('Error, invalid START or END date')
return

create_folder(DATA_FOLDER)
write_to_file('AVAILABLE', available_template_header)

end_forecast_date = start_forecast_date = start_date
while(end_forecast_date < end_date):
forecast_suffix = ''
if end_forecast_date.hour % 6 == 0:
# start_forecast_date - in Available
start_forecast_date = end_forecast_date - timedelta(hours = 6)
forecast_suffix = FILE_SUFFIX[1]
elif end_forecast_date.hour % 6 == 3:
start_forecast_date = end_forecast_date - timedelta(hours = 3)
forecast_suffix = FILE_SUFFIX[0]
file_name = FILE_PREFIX + \
start_forecast_date.strftime('%Y%m%d_%H%M_') + forecast_suffix + ".grb2"
path_to_file = os.path.join(DATA_FOLDER + '/', file_name)
# test if file exist
print('file_name', file_name)
if os.path.isfile(path_to_file):
print("File", file_name, " exist.")
parse_available_file(end_forecast_date, file_name)
end_forecast_date = start_forecast_date = end_forecast_date + timedelta(hours=3)
continue
else:
URL = DOMAIN + start_forecast_date.strftime('%Y%m/%Y%m%d/') + file_name
urllib.request.urlretrieve(URL, path_to_file)
parse_available_file(end_forecast_date, file_name)
end_forecast_date = start_forecast_date = end_forecast_date + \
timedelta(hours=3)
Loading