Skip to content

Commit

Permalink
Added final revision 8, program to update tables and cool download bar
Browse files Browse the repository at this point in the history
  • Loading branch information
OneStone2 committed Jul 28, 2016
1 parent 295898c commit bf9c347
Show file tree
Hide file tree
Showing 5 changed files with 143 additions and 69 deletions.
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,15 @@ python run.py [--online] state
```
state code is a 2-letter code for the state (e.g. ME)

state=US if you want to run the model for all contiguous states.

--online is to download the required files on the fly.

```
python update.py [--online] state
```
Run this program after changing the read.py file.

# Obtaining the data

Sometimes, the connection the the FIA database is unstable.
Expand Down
27 changes: 15 additions & 12 deletions analyze.R
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,7 @@ registerDoMC(core = 7)
analyze_r05 <- function(state, human) {
if (human) {
x <- read.csv(paste("data/", state, "_2a.csv", sep=''), as.is=T)
}
else {
} else {
x <- read.csv(paste("data/", state, "_2b.csv", sep=''), as.is=T)
}
r1 <- x[["carb"]]
Expand Down Expand Up @@ -43,8 +42,7 @@ analyze_r05 <- function(state, human) {
analyze_r06 <- function(state, human) {
if (human) {
x <- read.csv(paste("data/", state, "_2a.csv", sep=''), as.is=T)
}
else {
} else {
x <- read.csv(paste("data/", state, "_2b.csv", sep=''), as.is=T)
}
r1 <- x[["carb"]]
Expand Down Expand Up @@ -81,10 +79,11 @@ analyze_r06 <- function(state, human) {
analyze_r07 <- function(state, human) {
if (human) {
x <- read.csv(paste("data/", state, "_2a.csv", sep=''), as.is=T)
}
else {
} else {
x <- read.csv(paste("data/", state, "_2b.csv", sep=''), as.is=T)
}
x['human_p'] <- x['human_p'] + x['human_f']
x['human_f'] <- pmin(1, x['human_f'])
r1 <- x[["carb"]]
r2 <- x[["py"]]
r3 <- x[["human_p"]]
Expand Down Expand Up @@ -123,27 +122,31 @@ analyze_r07 <- function(state, human) {
}

analyze_r08 <- function(state, human) {
state <- 'ME'
human <- F
if (human) {
x <- read.csv(paste("data/", state, "_2a.csv", sep=''), as.is=T)
}
else {
} else {
x <- read.csv(paste("data/", state, "_2b.csv", sep=''), as.is=T)
}
r1 <- x[["carb"]]
r2 <- x[["py"]]
r3 <- x[["human_p"]]
r4 <- x[["human_n"]]
r5 <- x[["human_f"]]
x <- x[-c(nrow(x)),]
r1 <- r1[-c(1)]
r2 <- r2[-c(1)]
r3 <- r3[-c(1)]
r4 <- r4[-c(1)]
r5 <- r5[-c(1)]
x["post_py"] <- r2
x["growth"] <- (x["carb"]-r1)/(x["py"]-x["post_py"])
x["human_p"] <- r3
x["human_n"] <- r4
x["human_f"] <- r5
x <- x[x[,"py"] %/% 10000 == x[,"post_py"] %/% 10000,]
x <- x[, names(x) %in% grep("(growth)|(py)|(post_py)|(lat)|(lon)|(carb)|(iv[0123456789.]+)|(human_[np])|(elevation)", colnames(x), value=T)]
x <- x[, names(x) %in% grep("(growth)|(py)|(post_py)|(lat)|(lon)|(carb)|(iv[0123456789.]+)|(human_[npf])", colnames(x), value=T)]
row.names(x) <- 1:nrow(x)

X <- x[, !names(x) %in% c("growth","py","post_py")]
Expand All @@ -168,11 +171,11 @@ analyze_r08 <- function(state, human) {

state <- 'ME'
#Change state for whatever you want
N_REP <- 5
N_REP <- 3
print("Including human interaction:")
sum <- 0
for (i in 1:N_REP) {
sum <- sum + analyze_r05(state=state, human=T)
sum <- sum + analyze_r05(state=state, human=T)
}
print(paste('[5]', sum / N_REP))
sum <- 0
Expand Down Expand Up @@ -210,4 +213,4 @@ sum <- 0
for (i in 1:N_REP) {
sum <- sum + analyze_r08(state=state, human=F)
}
print(paste('[8]', sum / N_REP))
print(paste('[8]', sum / N_REP))
60 changes: 50 additions & 10 deletions read.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,35 @@
import bisect
import urllib2
import os
import sys

def chunk_report(bytes_so_far, chunk_size, total_size):
percent = float(bytes_so_far) / total_size
percent = round(percent*100, 2)
sys.stdout.write("Downloaded %d of %d bytes (%0.2f%%)\r" %
(bytes_so_far, total_size, percent))

if bytes_so_far >= total_size:
sys.stdout.write('\n')

def chunk_read(response, chunk_size=8192, report_hook=None):
total_size = response.info().getheader('Content-Length').strip()
total_size = int(total_size)
bytes_so_far = 0
data = []

while 1:
chunk = response.read(chunk_size)
bytes_so_far += len(chunk)

if not chunk:
break

data += chunk
if report_hook:
report_hook(bytes_so_far, chunk_size, total_size)

return "".join(data)

def check(row):
"""
Expand Down Expand Up @@ -60,18 +89,24 @@ def check_n(row):

def check_p(row):
"""
Checks for positive human intervention in a plot
Checks for recent planting in a plot
"""
if row['TRTCD1'] == 30.0:
return 1
if row['TRTCD1'] == 50.0:
return 1
if row['TRTCD2'] == 30.0:
return 1
if row['TRTCD2'] == 50.0:
return 1
if row['TRTCD3'] == 30.0:
return 1
return 0

def check_f(row):
"""
Checks for positive human intervention in a plot
"""
if row['TRTCD1'] == 50.0:
return 1
if row['TRTCD2'] == 50.0:
return 1
if row['TRTCD3'] == 50.0:
return 1
return 0
Expand All @@ -94,6 +129,7 @@ def __init__(self, trees, plot, dstrb, py):
self.lat = plot['LAT']
self.human_n = check_n(dstrb)
self.human_p = check_p(dstrb)
self.human_f = check_f(dstrb)

def calc_iv(self):
"""
Expand Down Expand Up @@ -142,7 +178,8 @@ def plot_stats(self):
'lon': self.lon,
'lat': self.lat,
'human_p': self.human_p,
'human_n': self.human_n
'human_n': self.human_n,
'human_f': self.human_f
}
stats.update(self.calc_iv())
return stats
Expand All @@ -165,19 +202,22 @@ def parse(state, online=True):
PLOT_WEB = "http://apps.fs.fed.us/fiadb-downloads/CSV/"+state+"_PLOT.csv"
DSTRB_WEB = "http://apps.fs.fed.us/fiadb-downloads/CSV/"+state+"_COND.csv"
response = urllib2.urlopen(TREES_WEB)
csv = response.read()
print TREES_WEB
csv = chunk_read(response, report_hook=chunk_report)
f = open('temp', 'w')
f.write(csv)
f.close()
trees_df = pd.read_csv('temp', usecols=TREES_COLS)
response = urllib2.urlopen(PLOT_WEB)
csv = response.read()
print PLOT_WEB
csv = chunk_read(response, report_hook=chunk_report)
f = open('temp', 'w')
f.write(csv)
f.close()
plot_df = pd.read_csv('temp', usecols=PLOT_COLS)
response = urllib2.urlopen(DSTRB_WEB)
csv = response.read()
print DSTRB_WEB
csv = chunk_read(response, report_hook=chunk_report)
f = open('temp', 'w')
f.write(csv)
f.close()
Expand Down Expand Up @@ -253,7 +293,7 @@ def clean(state, b):
cur_np = 1
prev_id = data_points.loc[data_points.index[0], 'py'] // 10000
for i, row in data_points.iterrows():
if (prev_id != row['py'] // 10000) or (row['human_n'] == 1) or (row['human_p'] == 1):
if (prev_id != row['py'] // 10000) or (row['human_n'] == 1) or (row['human_p'] == 1) or (row['human_f'] == 1):
cur_np += 1
prev_id = row['py'] // 10000
data_points.loc[i, 'py'] = int(cur_np * 10000 + row['py'] % 10000)
Expand Down
97 changes: 50 additions & 47 deletions run.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,51 +9,54 @@
argparser.add_argument("state", help="2-letter code for the US state")
argparser.add_argument("--online", dest='online', const=True, default=False, action='store_const', help="Use FIA website")
args = argparser.parse_args()

if not (
os.path.isfile('data/'+args.state+'_2a.csv')
or os.path.isfile('data/'+args.state+'_2a.csv')
):
if not os.path.isfile('data/'+args.state+'_1.csv'):
if args.online:
plots = read.parse(args.state, online=True)
else:
plots = read.parse(args.state, online=False)
read.cluster_prep_file(plots, args.state)
read.clean(args.state, b=True)
args.state = [args.state]
if args.state == ['US']:
args.state = ['AL', 'AZ', 'AR', 'CA', 'CO', 'CT', 'DE', 'DC', 'FL', 'GA', 'ID', 'IL', 'IA', 'KS', 'KY', 'LA', 'ME', 'MD', 'MA', 'MI', 'MN', 'MS', 'MO', 'MT', 'NE', 'NV', 'NH', 'NJ', 'NM', 'NY', 'NC', 'ND', 'OH', 'OK', 'OR', 'PA', 'RI', 'SC', 'SD', 'TN', 'TX', 'UT', 'VT', 'VA', 'WA', 'WI', 'WY']
for state in args.state:
if not (
os.path.isfile('data/'+state+'_2a.csv')
or os.path.isfile('data/'+state+'_2a.csv')
):
if not os.path.isfile('data/'+state+'_1.csv'):
if args.online:
plots = read.parse(state, online=True)
else:
plots = read.parse(state, online=False)
read.cluster_prep_file(plots, state)
read.clean(args.state, b=True)

N_REP = 1
print 'Including human interaction:'
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r01(args.state, human=True, time=True)
print '[1]', sum / N_REP
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r02(args.state, human=True, time=True)
print '[2]', sum / N_REP
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r03(args.state, human=True, time=True)
print '[3]', sum / N_REP
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r04(args.state, human=True, time=True)
print '[4]', sum / N_REP
print 'Excluding human interaction:'
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r01(args.state, human=False, time=True)
print '[1]', sum / N_REP
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r02(args.state, human=False, time=True)
print '[2]', sum / N_REP
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r03(args.state, human=False, time=True)
print '[3]', sum / N_REP
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r04(args.state, human=False, time=True)
print '[4]', sum / N_REP
N_REP = 30
print 'Including human interaction:'
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r01(state, human=True, time=True)
print '[1]', sum / N_REP
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r02(state, human=True, time=True)
print '[2]', sum / N_REP
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r03(state, human=True, time=True)
print '[3]', sum / N_REP
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r04(state, human=True, time=True)
print '[4]', sum / N_REP
print 'Excluding human interaction:'
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r01(state, human=False, time=True)
print '[1]', sum / N_REP
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r02(state, human=False, time=True)
print '[2]', sum / N_REP
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r03(state, human=False, time=True)
print '[3]', sum / N_REP
sum = 0
for i in np.arange(N_REP):
sum += analyze.analyze_r04(state, human=False, time=True)
print '[4]', sum / N_REP
21 changes: 21 additions & 0 deletions update.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import argparse
import os.path
import read
import analyze
import numpy as np

if __name__ == '__main__':
argparser = argparse.ArgumentParser()
argparser.add_argument("state", help="2-letter code for the US state, or 'US' for all states")
argparser.add_argument("--online", dest='online', const=True, default=False, action='store_const', help="Use FIA website")
args = argparser.parse_args()
args.state = [args.state]
if args.state == ['US']:
args.state = ['AL', 'AZ', 'AR', 'CA', 'CO', 'CT', 'DE', 'DC', 'FL', 'GA', 'ID', 'IL', 'IA', 'KS', 'KY', 'LA', 'ME', 'MD', 'MA', 'MI', 'MN', 'MS', 'MO', 'MT', 'NE', 'NV', 'NH', 'NJ', 'NM', 'NY', 'NC', 'ND', 'OH', 'OK', 'OR', 'PA', 'RI', 'SC', 'SD', 'TN', 'TX', 'UT', 'VT', 'VA', 'WA', 'WI', 'WY']
for state in args.state:
if args.online:
plots = read.parse(state, online=True)
else:
plots = read.parse(state, online=False)
read.cluster_prep_file(plots, state)
read.clean(state, b=True)

0 comments on commit bf9c347

Please sign in to comment.