Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
snowman2 committed Jun 2, 2016
2 parents d69c955 + e252705 commit 7e24599
Show file tree
Hide file tree
Showing 4 changed files with 63 additions and 57 deletions.
17 changes: 9 additions & 8 deletions imports/ftp_ecmwf_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,12 +186,13 @@ def remove_old_ftp_downloads(folder):
date_now = datetime.datetime.utcnow()
all_paths = glob(os.path.join(folder,'Runoff*netcdf*'))
for path in all_paths:
date_file = datetime.datetime.strptime(os.path.basename(path).split('.')[1],'%Y%m%d')
if date_now - date_file > datetime.timedelta(1):
if os.path.isdir(path):
rmtree(path)
else:
os.remove(path)
date_file = datetime.datetime.strptime(os.path.basename(path).split('.')[1],'%Y%m%d')
if os.path.isdir(path):
rmtree(path)
else:
os.remove(path)
if date_now - date_file < datetime.timedelta(1):
os.mkdir(path)

def download_all_ftp(download_dir, file_match, ftp_host, ftp_login,
ftp_passwd, ftp_directory, max_wait=60):
Expand Down Expand Up @@ -238,7 +239,7 @@ def download_all_ftp(download_dir, file_match, ftp_host, ftp_login,
print dst_filename + ' already exists. Skipping download ...'
#extract from tar.gz
if unzip_file:
print "Extracting: " + dst_filename
print "Extracting: " + dst_filename
ExtractNested(local_path, True)
#add successfully downloaded file to list
all_files_downloaded.append(local_dir)
Expand Down Expand Up @@ -273,4 +274,4 @@ def download_all_ftp(download_dir, file_match, ftp_host, ftp_login,
raise Exception(ftp_exception)

print "All downloads completed!"
return all_files_downloaded
return all_files_downloaded
98 changes: 52 additions & 46 deletions imports/generate_warning_points.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def calc_daily_peak(daily_time_index_array, idx, qout_arr, size_time):
return qout_arr[time_index_start]
return 0

def generate_warning_points(ecmwf_prediction_folder, return_period_file, out_directory, threshold=1):
def generate_warning_points(ecmwf_prediction_folder, return_period_file, out_directory, threshold):
"""
Create warning points from return periods and ECMWD prediction data
Expand Down Expand Up @@ -133,6 +133,13 @@ def generate_warning_points(ecmwf_prediction_folder, return_period_file, out_dir
return_period_20 = return_period_20_data[return_period_comid_index]
return_period_10 = return_period_10_data[return_period_comid_index]
return_period_2 = return_period_2_data[return_period_comid_index]

#create graduated thresholds if needed
if return_period_20 < threshold:
return_period_20 = threshold*10
return_period_10 = threshold*5
return_period_2 = threshold

#get mean
mean_data_first = np.mean(all_data_first, axis=0)
mean_data_second = np.mean(all_data_second, axis=0)
Expand All @@ -149,58 +156,57 @@ def generate_warning_points(ecmwf_prediction_folder, return_period_file, out_dir
mean_plus_std_series = mean_series + std_dev
for idx, daily_time_index in enumerate(daily_time_index_array):
daily_mean_peak = calc_daily_peak(daily_time_index_array, idx, mean_series, size_time)
if daily_mean_peak > threshold:
if daily_mean_peak > return_period_20:
return_20_points.append({ "lat" : return_period_lat_data[return_period_comid_index],
"lon" : return_period_lon_data[return_period_comid_index],
"size": 1,
"mean_peak": float("{0:.2f}".format(daily_mean_peak)),
"peak_date": time_array[daily_time_index].strftime("%Y-%m-%d"),
})
elif daily_mean_peak > return_period_10:
return_10_points.append({ "lat" : return_period_lat_data[return_period_comid_index],
"lon" : return_period_lon_data[return_period_comid_index],
"size": 1,
"mean_peak": float("{0:.2f}".format(daily_mean_peak)),
"peak_date": time_array[daily_time_index].strftime("%Y-%m-%d"),
})
elif daily_mean_peak > return_period_2:
return_2_points.append({ "lat" : return_period_lat_data[return_period_comid_index],
"lon" : return_period_lon_data[return_period_comid_index],
"size": 1,
"mean_peak": float("{0:.2f}".format(daily_mean_peak)),
"peak_date": time_array[daily_time_index].strftime("%Y-%m-%d"),
})
if daily_mean_peak > return_period_20:
return_20_points.append({ "lat" : return_period_lat_data[return_period_comid_index],
"lon" : return_period_lon_data[return_period_comid_index],
"size": 1,
"mean_peak": float("{0:.2f}".format(daily_mean_peak)),
"peak_date": time_array[daily_time_index].strftime("%Y-%m-%d"),
})
elif daily_mean_peak > return_period_10:
return_10_points.append({ "lat" : return_period_lat_data[return_period_comid_index],
"lon" : return_period_lon_data[return_period_comid_index],
"size": 1,
"mean_peak": float("{0:.2f}".format(daily_mean_peak)),
"peak_date": time_array[daily_time_index].strftime("%Y-%m-%d"),
})
elif daily_mean_peak > return_period_2:
return_2_points.append({ "lat" : return_period_lat_data[return_period_comid_index],
"lon" : return_period_lon_data[return_period_comid_index],
"size": 1,
"mean_peak": float("{0:.2f}".format(daily_mean_peak)),
"peak_date": time_array[daily_time_index].strftime("%Y-%m-%d"),
})

daily_mean_plus_std_peak = min(calc_daily_peak(daily_time_index_array, idx, mean_plus_std_series, size_time),
calc_daily_peak(daily_time_index_array, idx, max_series, size_time))
if daily_mean_plus_std_peak > threshold:
if daily_mean_plus_std_peak > return_period_20:
return_20_points.append({ "lat" : return_period_lat_data[return_period_comid_index],
"lon" : return_period_lon_data[return_period_comid_index],
"size": 0,
"mean_plus_std_peak": float("{0:.2f}".format(daily_mean_plus_std_peak)),
"peak_date": time_array[daily_time_index].strftime("%Y-%m-%d"),
})
elif daily_mean_plus_std_peak > return_period_10:
return_10_points.append({ "lat" : return_period_lat_data[return_period_comid_index],
"lon" : return_period_lon_data[return_period_comid_index],
"size": 0,
"mean_plus_std_peak": float("{0:.2f}".format(daily_mean_plus_std_peak)),
"peak_date": time_array[daily_time_index].strftime("%Y-%m-%d"),
})
elif daily_mean_plus_std_peak > return_period_2:
return_2_points.append({ "lat" : return_period_lat_data[return_period_comid_index],
"lon" : return_period_lon_data[return_period_comid_index],
"size": 0,
"mean_plus_std_peak": float("{0:.2f}".format(daily_mean_plus_std_peak)),
"peak_date": time_array[daily_time_index].strftime("%Y-%m-%d"),
})

if daily_mean_plus_std_peak > return_period_20:
return_20_points.append({ "lat" : return_period_lat_data[return_period_comid_index],
"lon" : return_period_lon_data[return_period_comid_index],
"size": 0,
"mean_plus_std_peak": float("{0:.2f}".format(daily_mean_plus_std_peak)),
"peak_date": time_array[daily_time_index].strftime("%Y-%m-%d"),
})
elif daily_mean_plus_std_peak > return_period_10:
return_10_points.append({ "lat" : return_period_lat_data[return_period_comid_index],
"lon" : return_period_lon_data[return_period_comid_index],
"size": 0,
"mean_plus_std_peak": float("{0:.2f}".format(daily_mean_plus_std_peak)),
"peak_date": time_array[daily_time_index].strftime("%Y-%m-%d"),
})
elif daily_mean_plus_std_peak > return_period_2:
return_2_points.append({ "lat" : return_period_lat_data[return_period_comid_index],
"lon" : return_period_lon_data[return_period_comid_index],
"size": 0,
"mean_plus_std_peak": float("{0:.2f}".format(daily_mean_plus_std_peak)),
"peak_date": time_array[daily_time_index].strftime("%Y-%m-%d"),
})

print("Writing Output ...")
with open(os.path.join(out_directory, "return_20_points.txt"), 'wb') as outfile:
outfile.write(dumps(return_20_points))
with open(os.path.join(out_directory, "return_10_points.txt"), 'wb') as outfile:
outfile.write(dumps(return_10_points))
with open(os.path.join(out_directory, "return_2_points.txt"), 'wb') as outfile:
outfile.write(dumps(return_2_points))
outfile.write(dumps(return_2_points))
3 changes: 1 addition & 2 deletions rapid_process.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,8 +354,7 @@ def run_ecmwf_rapid_process(rapid_executable_location, #path to RAPID executable
except Exception, ex:
print ex
pass



#run autoroute process if added
if autoroute_executable_location and autoroute_io_files_location:
#run autoroute on all of the watersheds
Expand Down
2 changes: 1 addition & 1 deletion rapid_process.sh
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
#!/bin/sh
/usr/lib/tethys/bin/python /home/alan/work/scripts/spt_ecmwf_autorapid_process/run.py 1> /home/alan/work/logs/rapid_$(date +%y%m%d%H%M%S).log 2>&1
-/usr/lib/tethys/bin/python /home/alan/work/scripts/spt_ecmwf_autorapid_process/run.py 1> /home/alan/work/logs/rapid_$(date +%y%m%d%H%M%S).log 2>&1

0 comments on commit 7e24599

Please sign in to comment.