Skip to content

Commit

Permalink
push consitutes all changes made to create v1.0.19.
Browse files Browse the repository at this point in the history
  • Loading branch information
Gutenson committed May 3, 2021
1 parent 6468cfb commit 4301ab6
Show file tree
Hide file tree
Showing 9 changed files with 228 additions and 156 deletions.
16 changes: 8 additions & 8 deletions arc/__init__.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,28 @@
# This software was developed by United States Army Corps of Engineers (USACE)
# employees in the course of their official duties. USACE used copyrighted,
# open source code to develop this software, as such this software
# open source code to develop this software, as such this software
# (per 17 USC § 101) is considered "joint work." Pursuant to 17 USC § 105,
# portions of the software developed by USACE employees in the course of their
# official duties are not subject to copyright protection and are in the public
# domain.
#
#
# USACE assumes no responsibility whatsoever for the use of this software by
# other parties, and makes no guarantees, expressed or implied, about its
# quality, reliability, or any other characteristic.
#
# quality, reliability, or any other characteristic.
#
# The software is provided "as is," without warranty of any kind, express or
# implied, including but not limited to the warranties of merchantability,
# fitness for a particular purpose, and noninfringement. In no event shall the
# authors or U.S. Government be liable for any claim, damages or other
# liability, whether in an action of contract, tort or otherwise, arising from,
# out of or in connection with the software or the use or other dealings in the
# software.
#
#
# Public domain portions of this software can be redistributed and/or modified
# freely, provided that any derivative works bear some notice that they are
# derived from it, and any modified versions bear some notice that they have
# been modified.
#
# been modified.
#
# Copyrighted portions of the software are annotated within the source code.
# Open Source Licenses, included in the source code, apply to the applicable
# copyrighted portions. Copyrighted portions of the software are not in the
Expand All @@ -40,7 +40,7 @@



#import arc.ula_window as ula_window
import arc.ula_window as ula_window
import arc.get_all as get_all
import arc.ant_GUI as ant_GUI
import arc.shortcut as shortcut
13 changes: 10 additions & 3 deletions arc/ant_GUI.py
Original file line number Diff line number Diff line change
Expand Up @@ -731,8 +731,15 @@ def calculate_and_graph(self):
elif current_style == 'Switch to Unique Dates': #Means it is currently on CSV
self.get_inputs_csv()
except Exception:
self.L.Wrap(traceback.format_exc())
print('The APT cannot complete this analysis.\n')
print('The following error occurred. Please close the APT and reboot.\n')
# self.L.Wrap(traceback.format_exc())
raise

# python = sys.executable
# os.execl(python, python, * sys.argv)
# self.master.mainloop()

# End of calculate_and_graph method


Expand Down Expand Up @@ -1184,9 +1191,9 @@ def calculate_or_add_batch(self, batch, params):
forecast_enabled = True
# Import anteProcess
try:
from . import anteProcess
except Exception:
import anteProcess
except Exception:
from . import anteProcess
# Set data_variable specific variables
if radio == 'Rain':
if self.rain_instance is None:
Expand Down
91 changes: 28 additions & 63 deletions arc/anteProcess.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@
import matplotlib.ticker as ticker
from matplotlib import rcParams
import pylab

# Stop annoying urllib3 errors for EPQS tests
# import logging
# logging.getLogger("urllib3").setLevel(logging.ERROR)
Expand Down Expand Up @@ -866,8 +867,7 @@ def getStations(self):
self.wimp_scraper.get_season(lat=float(self.site_lat),
lon=float(self.site_long),
month=int(self.dates.observation_month),
output_folder=None,
# output_folder=self.folderPath,
output_folder=self.folderPath,
watershed_analysis=self.watershed_analysis)
del palmer_value, palmer_class, palmer_color
# Query all Elevations
Expand All @@ -881,8 +881,11 @@ def getStations(self):

# find the primary station after multiprocessing finishes
need_primary = True
primary_station = self.getBest(need_primary=need_primary)
print(primary_station.location)
try:
primary_station = self.getBest(need_primary=need_primary)
print(primary_station.location)
except:
self.log.Wrap("No suitable primary station locations were found by the APT...")
secondary_stations_sorted_list = []
if primary_station is not None:
# Note that the primary station has been found
Expand Down Expand Up @@ -1067,37 +1070,23 @@ def createFinalDF(self):
num_rows_antecedent = missing_before_antecedent-missing_after_antecedent
num_rows = num_rows_normal + num_rows_antecedent
if num_rows > 0:
if n == 1: # n=1 should be the primary station in the station list
num_stations_used += 1
# BUILD STATIONS TABLE
vals = []
vals.append(station.name)
vals.append(station.location)
vals.append(station.elevation)
vals.append("{0}*".format(station.distance))
vals.append("{0}*".format(station.elevDiff))
vals.append("{0}*".format(station.weightedDiff))
vals.append(num_rows_normal)
vals.append(num_rows_antecedent)
station_table_values.append(vals)
else:
num_stations_used += 1
# BUILD STATIONS TABLE
vals = []
vals.append(station.name)
vals.append(station.location)
vals.append(station.elevation)
vals.append(station.distance)
vals.append(station.elevDiff)
vals.append(station.weightedDiff)
vals.append(num_rows_normal)
vals.append(num_rows_antecedent)
station_table_values.append(vals)
num_stations_used += 1
# BUILD STATIONS TABLE
vals = []
vals.append(station.name)
vals.append(station.location)
vals.append(station.elevation)
vals.append(station.distance)
vals.append(station.elevDiff)
vals.append(station.weightedDiff)
vals.append(num_rows_normal)
vals.append(num_rows_antecedent)
station_table_values.append(vals)
# SAVE RESULTS TO CSV IN OUTPUT DIRECTORY
if self.save_folder is not None:
# Generate output
try:
station_csv_name = '{}_{}.csv'.format(best_station.name,self.dates.observation_date).replace('/','_') # Slashes keep getting added to file names somehow, causing failures here
station_csv_name = '{}_{}.csv'.format(station.name,self.dates.observation_date).replace('/','_') # Slashes keep getting added to file names somehow, causing failures here
station_csv_path = os.path.join(self.stationFolderPath, station_csv_name)
if os.path.isfile(station_csv_path) is False:
self.log.Wrap('Saving station data to CSV in output folder...')
Expand Down Expand Up @@ -1502,6 +1491,7 @@ def createFinalDF(self):

# Get WebWIMP Wet/Dry Season Determination
if self.data_type == 'PRCP':
# Querying WebWIMP to collect Wet / Dry season info...'
try:
# Querying WebWIMP to collect Wet / Dry season info...'
wet_dry_season_result = self.wimp_scraper.get_season(lat=float(self.site_lat),
Expand Down Expand Up @@ -1552,15 +1542,12 @@ def createFinalDF(self):
# Make graph tic marks face outward
rcParams['xtick.direction'] = 'out'
rcParams['ytick.direction'] = 'out'

# Construct Figure
plt.ion() # MAKES PLOT.SHOW() NON-BLOCKING
fig = plt.figure(figsize=(17, 11))
fig.set_facecolor('0.77')
fig.set_dpi(140)
# add a footer to the station table to describe the asterisk.
footer_text = '*This station is considered the primary station and these values are generated in relation to the location of interest.'
fig.text(0.51,0.31, footer_text, fontsize=10, color="black")
# fig.text(0.51,0.31, fontsize=10, color="black")
if self.data_type == 'PRCP':
# if num_stations_used < 14:
ax1 = plt.subplot2grid((9, 10), (0, 0), colspan=10, rowspan=6)
Expand Down Expand Up @@ -1906,38 +1893,16 @@ def createFinalDF(self):
# None,
# SAVE_FOLDER,
# False]
INPUT_LIST = ['PRCP',
'38.5',
'-121.5',
2018,
INPUT_LIST = [['PRCP',
'36.98',
'-110.084',
2021,
4,
10,
15,
None,
None,
SAVE_FOLDER,
False]
INPUT_LIST = [
['PRCP', '38.5', '-121.5', 1935, 5, 15, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1940, 2, 29, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1941, 2, 28, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1942, 12, 7, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1943, 6, 12, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1944, 7, 19, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1945, 8, 21, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1950, 3, 15, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1951, 6, 16, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1952, 7, 4, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1965, 1, 1, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1971, 5, 28, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1973, 7, 4, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1978, 11, 21, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1981, 12, 2, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1984, 4, 24, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1985, 9, 13, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1989, 5, 18, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 1998, 12, 1, None, None, SAVE_FOLDER, False],
['PRCP', '38.5', '-121.5', 2020, 6, 20, None, None, SAVE_FOLDER, False],
]
False]]
for i in INPUT_LIST:
INSTANCE.setInputs(i, watershed_analysis=False, all_sampling_coordinates=None)
input('Stall for debugging. Press enter or click X to close')
32 changes: 15 additions & 17 deletions arc/custom_watershed_query.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,28 @@
# This software was developed by United States Army Corps of Engineers (USACE)
# employees in the course of their official duties. USACE used copyrighted,
# open source code to develop this software, as such this software
# open source code to develop this software, as such this software
# (per 17 USC § 101) is considered "joint work." Pursuant to 17 USC § 105,
# portions of the software developed by USACE employees in the course of their
# official duties are not subject to copyright protection and are in the public
# domain.
#
#
# USACE assumes no responsibility whatsoever for the use of this software by
# other parties, and makes no guarantees, expressed or implied, about its
# quality, reliability, or any other characteristic.
#
# quality, reliability, or any other characteristic.
#
# The software is provided "as is," without warranty of any kind, express or
# implied, including but not limited to the warranties of merchantability,
# fitness for a particular purpose, and noninfringement. In no event shall the
# authors or U.S. Government be liable for any claim, damages or other
# liability, whether in an action of contract, tort or otherwise, arising from,
# out of or in connection with the software or the use or other dealings in the
# software.
#
#
# Public domain portions of this software can be redistributed and/or modified
# freely, provided that any derivative works bear some notice that they are
# derived from it, and any modified versions bear some notice that they have
# been modified.
#
# been modified.
#
# Copyrighted portions of the software are annotated within the source code.
# Open Source Licenses, included in the source code, apply to the applicable
# copyrighted portions. Copyrighted portions of the software are not in the
Expand Down Expand Up @@ -93,7 +93,7 @@ def findHorizontalUnits(csString):
def shapefile_sample(lat, lon, shapefile):
"""
Identify the HUC of a given huc_digits in which the supplied coordinates lie.
If selected, generate random sampling points (# and minimum spacing determined by HUC Digits)
If selected, generate random sampling points (# and minimum spacing determined by HUC Digits)
"""
# Shapefile Query Adapted from
# https://stackoverflow.com/questions/7861196/check-if-a-geopoint-with-latitude-and-longitude-is-within-a-shapefile/13433127#13433127
Expand Down Expand Up @@ -145,7 +145,7 @@ def shapefile_sample(lat, lon, shapefile):

# Set up a spatial filter such that the only features we see when we
# loop through "lyr_in" are those which overlap the point defined above
log.Wrap(' -Filtering HUC8 features by spatial overlap with selected coordinates...')
log.Wrap(' -Filtering HUC8 features by spatial overlap with selected coordinates...')
lyr_in.SetSpatialFilter(pt)
# Loop through the overlapped features and display the field of interest
for feat_in in lyr_in:
Expand All @@ -158,14 +158,12 @@ def shapefile_sample(lat, lon, shapefile):
if not horizontal_units.lower() in supported_units:
# Transform geometry to Albers
selected_feature_geometry.Transform(transform_source_to_albers)
transform_back = transform_albers_to_wgs
# Update horizontal units
geo_ref = selected_feature_geometry.GetSpatialReference()
horizontal_units = findHorizontalUnits(str(geo_ref))
if horizontal_units.lower() in supported_units:
# Calculate Area
selected_huc_area = selected_feature_geometry.GetArea()
transform_back = rtran
# Convert Area to Square Miles
if horizontal_units.lower() in ['meter', 'meters']:
huc_square_miles = selected_huc_area / 2590000
Expand Down Expand Up @@ -206,7 +204,7 @@ def shapefile_sample(lat, lon, shapefile):
# Announce protocol commencement
log.Wrap('')
log.Wrap('Generating potential sampling points and testing the above conditions...')

# Add initially selected coordinates as the first sampling point
previously_selected_points.append(pt)
coordinates_within_polygon.append([lat, lon])
Expand All @@ -228,7 +226,7 @@ def shapefile_sample(lat, lon, shapefile):
test_x_round = round(test_x, 6)
test_y_round = round(test_y, 6)
if points_tested_since_last_success > 3000:
if num_points < 997:
if num_points < 997:
log.Wrap('Sampling complete (3000 consecutive points tested since the last suitable one was found).')
break
else:
Expand Down Expand Up @@ -261,7 +259,10 @@ def shapefile_sample(lat, lon, shapefile):
num_points -= 1
points_selected += 1
previously_selected_points.append(test_pt)
[wgs_lon, wgs_lat, z] = transform_back.TransformPoint(test_x, test_y)
if not horizontal_units.lower() in supported_units:
[wgs_lon, wgs_lat, z] = transform_albers_to_wgs.TransformPoint(test_x, test_y)
else:
[wgs_lon, wgs_lat, z] = rtran.TransformPoint(test_x, test_y)
wgs_lat = round(wgs_lat, 6)
wgs_lon = round(wgs_lon, 6)
coordinates_within_polygon.append([wgs_lat, wgs_lon])
Expand Down Expand Up @@ -289,6 +290,3 @@ def shapefile_sample(lat, lon, shapefile):
# print(point)
duration = time.clock() - start_time
print('DevOnly: Processing took {} seconds'.format(duration))



Loading

0 comments on commit 4301ab6

Please sign in to comment.