forked from brianstock/spatial-bycatch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path1_process_survey.html
335 lines (295 loc) · 39.3 KB
/
1_process_survey.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta charset="utf-8" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<meta name="generator" content="pandoc" />
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="author" content="Brian Stock" />
<title>Step 1: Process West Coast Groundfish Trawl Survey Data</title>
<style type="text/css">code{white-space: pre;}</style>
<style type="text/css">
div.sourceCode { overflow-x: auto; }
table.sourceCode, tr.sourceCode, td.lineNumbers, td.sourceCode {
margin: 0; padding: 0; vertical-align: baseline; border: none; }
table.sourceCode { width: 100%; line-height: 100%; }
td.lineNumbers { text-align: right; padding-right: 4px; padding-left: 4px; color: #aaaaaa; border-right: 1px solid #aaaaaa; }
td.sourceCode { padding-left: 5px; }
code > span.kw { color: #007020; font-weight: bold; } /* Keyword */
code > span.dt { color: #902000; } /* DataType */
code > span.dv { color: #40a070; } /* DecVal */
code > span.bn { color: #40a070; } /* BaseN */
code > span.fl { color: #40a070; } /* Float */
code > span.ch { color: #4070a0; } /* Char */
code > span.st { color: #4070a0; } /* String */
code > span.co { color: #60a0b0; font-style: italic; } /* Comment */
code > span.ot { color: #007020; } /* Other */
code > span.al { color: #ff0000; font-weight: bold; } /* Alert */
code > span.fu { color: #06287e; } /* Function */
code > span.er { color: #ff0000; font-weight: bold; } /* Error */
code > span.wa { color: #60a0b0; font-weight: bold; font-style: italic; } /* Warning */
code > span.cn { color: #880000; } /* Constant */
code > span.sc { color: #4070a0; } /* SpecialChar */
code > span.vs { color: #4070a0; } /* VerbatimString */
code > span.ss { color: #bb6688; } /* SpecialString */
code > span.im { } /* Import */
code > span.va { color: #19177c; } /* Variable */
code > span.cf { color: #007020; font-weight: bold; } /* ControlFlow */
code > span.op { color: #666666; } /* Operator */
code > span.bu { } /* BuiltIn */
code > span.ex { } /* Extension */
code > span.pp { color: #bc7a00; } /* Preprocessor */
code > span.at { color: #7d9029; } /* Attribute */
code > span.do { color: #ba2121; font-style: italic; } /* Documentation */
code > span.an { color: #60a0b0; font-weight: bold; font-style: italic; } /* Annotation */
code > span.cv { color: #60a0b0; font-weight: bold; font-style: italic; } /* CommentVar */
code > span.in { color: #60a0b0; font-weight: bold; font-style: italic; } /* Information */
</style>
<link href="data:text/css;charset=utf-8,body%20%7B%0Abackground%2Dcolor%3A%20%23fff%3B%0Amargin%3A%201em%20auto%3B%0Amax%2Dwidth%3A%20700px%3B%0Aoverflow%3A%20visible%3B%0Apadding%2Dleft%3A%202em%3B%0Apadding%2Dright%3A%202em%3B%0Afont%2Dfamily%3A%20%22Open%20Sans%22%2C%20%22Helvetica%20Neue%22%2C%20Helvetica%2C%20Arial%2C%20sans%2Dserif%3B%0Afont%2Dsize%3A%2014px%3B%0Aline%2Dheight%3A%201%2E35%3B%0A%7D%0A%23header%20%7B%0Atext%2Dalign%3A%20center%3B%0A%7D%0A%23TOC%20%7B%0Aclear%3A%20both%3B%0Amargin%3A%200%200%2010px%2010px%3B%0Apadding%3A%204px%3B%0Awidth%3A%20400px%3B%0Aborder%3A%201px%20solid%20%23CCCCCC%3B%0Aborder%2Dradius%3A%205px%3B%0Abackground%2Dcolor%3A%20%23f6f6f6%3B%0Afont%2Dsize%3A%2013px%3B%0Aline%2Dheight%3A%201%2E3%3B%0A%7D%0A%23TOC%20%2Etoctitle%20%7B%0Afont%2Dweight%3A%20bold%3B%0Afont%2Dsize%3A%2015px%3B%0Amargin%2Dleft%3A%205px%3B%0A%7D%0A%23TOC%20ul%20%7B%0Apadding%2Dleft%3A%2040px%3B%0Amargin%2Dleft%3A%20%2D1%2E5em%3B%0Amargin%2Dtop%3A%205px%3B%0Amargin%2Dbottom%3A%205px%3B%0A%7D%0A%23TOC%20ul%20ul%20%7B%0Amargin%2Dleft%3A%20%2D2em%3B%0A%7D%0A%23TOC%20li%20%7B%0Aline%2Dheight%3A%2016px%3B%0A%7D%0Atable%20%7B%0Amargin%3A%201em%20auto%3B%0Aborder%2Dwidth%3A%201px%3B%0Aborder%2Dcolor%3A%20%23DDDDDD%3B%0Aborder%2Dstyle%3A%20outset%3B%0Aborder%2Dcollapse%3A%20collapse%3B%0A%7D%0Atable%20th%20%7B%0Aborder%2Dwidth%3A%202px%3B%0Apadding%3A%205px%3B%0Aborder%2Dstyle%3A%20inset%3B%0A%7D%0Atable%20td%20%7B%0Aborder%2Dwidth%3A%201px%3B%0Aborder%2Dstyle%3A%20inset%3B%0Aline%2Dheight%3A%2018px%3B%0Apadding%3A%205px%205px%3B%0A%7D%0Atable%2C%20table%20th%2C%20table%20td%20%7B%0Aborder%2Dleft%2Dstyle%3A%20none%3B%0Aborder%2Dright%2Dstyle%3A%20none%3B%0A%7D%0Atable%20thead%2C%20table%20tr%2Eeven%20%7B%0Abackground%2Dcolor%3A%20%23f7f7f7%3B%0A%7D%0Ap%20%7B%0Amargin%3A%200%2E5em%200%3B%0A%7D%0Ablockquote%20%7B%0Abackground%2Dcolor%3A%20%23f6f6f6%3B%0Apadding%3A%200%2E25em%200%2E75em%3B%0A%7D%0Ahr%20%7B%0Aborder%2Dstyle%3A%20solid%3B%0Aborder%3A%20none%3B%0Aborder%2Dtop%3A%201px%20solid%20%23777%3B%0Amargin%3A%2028px%200%3B%0A%7D%0Adl%20%7B%0Amargin%2Dleft%3A%200%3B%0A%7D%0Adl%20dd%20%7B%0Amargin%2Dbottom%3A%2013px%3B%0Amargin%2Dleft%3A%2013px%3B%0A%7D%0Adl%20dt%20%7B%0Afont%2Dweight%3A%20bold%3B%0A%7D%0Aul%20%7B%0Amargin%2Dtop%3A%200%3B%0A%7D%0Aul%20li%20%7B%0Alist%2Dstyle%3A%20circle%20outside%3B%0A%7D%0Aul%20ul%20%7B%0Amargin%2Dbottom%3A%200%3B%0A%7D%0Apre%2C%20code%20%7B%0Abackground%2Dcolor%3A%20%23f7f7f7%3B%0Aborder%2Dradius%3A%203px%3B%0Acolor%3A%20%23333%3B%0Awhite%2Dspace%3A%20pre%2Dwrap%3B%20%0A%7D%0Apre%20%7B%0Aborder%2Dradius%3A%203px%3B%0Amargin%3A%205px%200px%2010px%200px%3B%0Apadding%3A%2010px%3B%0A%7D%0Apre%3Anot%28%5Bclass%5D%29%20%7B%0Abackground%2Dcolor%3A%20%23f7f7f7%3B%0A%7D%0Acode%20%7B%0Afont%2Dfamily%3A%20Consolas%2C%20Monaco%2C%20%27Courier%20New%27%2C%20monospace%3B%0Afont%2Dsize%3A%2085%25%3B%0A%7D%0Ap%20%3E%20code%2C%20li%20%3E%20code%20%7B%0Apadding%3A%202px%200px%3B%0A%7D%0Adiv%2Efigure%20%7B%0Atext%2Dalign%3A%20center%3B%0A%7D%0Aimg%20%7B%0Abackground%2Dcolor%3A%20%23FFFFFF%3B%0Apadding%3A%202px%3B%0Aborder%3A%201px%20solid%20%23DDDDDD%3B%0Aborder%2Dradius%3A%203px%3B%0Aborder%3A%201px%20solid%20%23CCCCCC%3B%0Amargin%3A%200%205px%3B%0A%7D%0Ah1%20%7B%0Amargin%2Dtop%3A%200%3B%0Afont%2Dsize%3A%2035px%3B%0Aline%2Dheight%3A%2040px%3B%0A%7D%0Ah2%20%7B%0Aborder%2Dbottom%3A%204px%20solid%20%23f7f7f7%3B%0Apadding%2Dtop%3A%2010px%3B%0Apadding%2Dbottom%3A%202px%3B%0Afont%2Dsize%3A%20145%25%3B%0A%7D%0Ah3%20%7B%0Aborder%2Dbottom%3A%202px%20solid%20%23f7f7f7%3B%0Apadding%2Dtop%3A%2010px%3B%0Afont%2Dsize%3A%20120%25%3B%0A%7D%0Ah4%20%7B%0Aborder%2Dbottom%3A%201px%20solid%20%23f7f7f7%3B%0Amargin%2Dleft%3A%208px%3B%0Afont%2Dsize%3A%20105%25%3B%0A%7D%0Ah5%2C%20h6%20%7B%0Aborder%2Dbottom%3A%201px%20solid%20%23ccc%3B%0Afont%2Dsize%3A%20105%25%3B%0A%7D%0Aa%20%7B%0Acolor%3A%20%230033dd%3B%0Atext%2Ddecoration%3A%20none%3B%0A%7D%0Aa%3Ahover%20%7B%0Acolor%3A%20%236666ff%3B%20%7D%0Aa%3Avisited%20%7B%0Acolor%3A%20%23800080%3B%20%7D%0Aa%3Avisited%3Ahover%20%7B%0Acolor%3A%20%23BB00BB%3B%20%7D%0Aa%5Bhref%5E%3D%22http%3A%22%5D%20%7B%0Atext%2Ddecoration%3A%20underline%3B%20%7D%0Aa%5Bhref%5E%3D%22https%3A%22%5D%20%7B%0Atext%2Ddecoration%3A%20underline%3B%20%7D%0A%0Acode%20%3E%20span%2Ekw%20%7B%20color%3A%20%23555%3B%20font%2Dweight%3A%20bold%3B%20%7D%20%0Acode%20%3E%20span%2Edt%20%7B%20color%3A%20%23902000%3B%20%7D%20%0Acode%20%3E%20span%2Edv%20%7B%20color%3A%20%2340a070%3B%20%7D%20%0Acode%20%3E%20span%2Ebn%20%7B%20color%3A%20%23d14%3B%20%7D%20%0Acode%20%3E%20span%2Efl%20%7B%20color%3A%20%23d14%3B%20%7D%20%0Acode%20%3E%20span%2Ech%20%7B%20color%3A%20%23d14%3B%20%7D%20%0Acode%20%3E%20span%2Est%20%7B%20color%3A%20%23d14%3B%20%7D%20%0Acode%20%3E%20span%2Eco%20%7B%20color%3A%20%23888888%3B%20font%2Dstyle%3A%20italic%3B%20%7D%20%0Acode%20%3E%20span%2Eot%20%7B%20color%3A%20%23007020%3B%20%7D%20%0Acode%20%3E%20span%2Eal%20%7B%20color%3A%20%23ff0000%3B%20font%2Dweight%3A%20bold%3B%20%7D%20%0Acode%20%3E%20span%2Efu%20%7B%20color%3A%20%23900%3B%20font%2Dweight%3A%20bold%3B%20%7D%20%20code%20%3E%20span%2Eer%20%7B%20color%3A%20%23a61717%3B%20background%2Dcolor%3A%20%23e3d2d2%3B%20%7D%20%0A" rel="stylesheet" type="text/css" />
</head>
<body>
<h1 class="title toc-ignore">Step 1: Process West Coast Groundfish Trawl Survey Data</h1>
<h4 class="author"><em>Brian Stock</em></h4>
<h4 class="date"><em>July 3, 2018</em></h4>
<p>This vignette demonstrates how we ran the spatiotemporal models in:</p>
<blockquote>
<p>Stock BC, Ward EJ, Eguchi T, Jannot JE, Thorson JT, Feist BE, and Semmens BX. “Comparing predictions of fisheries bycatch using multiple spatiotemporal species distribution model frameworks.”</p>
</blockquote>
<p>If you are not interested in the data processing, you can skip ahead to <a href="https://rawgit.com/brianstock/spatial-bycatch/master/2_run_models.html">2_run_models</a>, which uses the saved output of this script (<code>wcann_processed.RData</code>) to run the spatial models.</p>
<div id="download-the-data" class="section level3">
<h3>Download the data</h3>
<p>Because the fisheries observer datasets we used are confidential (<a href="https://www.nwfsc.noaa.gov/research/divisions/fram/observation/data_collection/manuals/2017%20WCGOP%20Training%20Manual%20Final%20website%20copy.pdf">WCGOP</a>, <a href="http://www.nmfs.noaa.gov/pr/interactions/fkwtrt/meeting1/handouts/observer_manual.pdf">HILL</a>), here we perform the same analyses using the publically available <a href="https://www.nwfsc.noaa.gov/research/divisions/fram/groundfish/bottom_trawl.cfm">West Coast Groundfish Trawl Survey</a>.</p>
<p>Download the data from <a href="https://www.nwfsc.noaa.gov/data/map">FRAM</a>:</p>
<ol style="list-style-type: decimal">
<li>Search for “darkblotched rockfish”, “yelloweye rockfish”, and “Pacific halibut”</li>
<li>Start date: 1/1/2003, End date: 12/31/2012</li>
<li>Layers –> Trawl Survey –> Click “CSV” next to <em>Catch</em> and <em>Haul Characteristics</em> to download two files:</li>
</ol>
<ul>
<li><code>wcann_catch_fram.csv</code>: which species and how much of each were caught in each haul</li>
<li><code>wcann_haul_fram.csv</code>: haul stats (e.g. time/lat/long in/out, depth, etc.)</li>
</ul>
</div>
<div id="load-data-into-r" class="section level3">
<h3>Load data into R</h3>
<div class="sourceCode"><pre class="sourceCode r"><code class="sourceCode r"><span class="co"># set working directory to workshop folder with data files</span>
<span class="kw">setwd</span>(<span class="st">"/home/brian/Documents/Bycatch/WCGOP/data/"</span>)
<span class="co"># load haul dataset</span>
HAUL <-<span class="st"> </span><span class="kw">read.csv</span>(<span class="st">"wcann_haul_fram.csv"</span>,<span class="dt">header=</span><span class="ot">TRUE</span>)
<span class="co"># load catch dataset</span>
CATCH <-<span class="st"> </span><span class="kw">read.csv</span>(<span class="st">"wcann_catch_fram.csv"</span>,<span class="dt">header=</span><span class="ot">TRUE</span>)</code></pre></div>
</div>
<div id="combine-haul-and-catch" class="section level3">
<h3>Combine <code>HAUL</code> and <code>CATCH</code></h3>
<p>We want a data frame <code>dat</code> where each row is a unique haul, with the following columns:</p>
<ul>
<li>HAUL_ID: HAUL$trawl_id</li>
<li>YEAR: have to calculate from DATE</li>
<li>DATE: HAUL$date_yyyymmdd</li>
<li>LAT: latitude, HAUL$latitude_hi_prec_dd</li>
<li>LON: longitude, HAUL$longitude_hi_prec_dd</li>
<li>DEPTH: depth (in m), HAUL$depth_hi_prec_m</li>
<li>TOTAL: total vertebrate catch in kg (HAUL$vertebrate_weight_kg)</li>
<li>DBRK: darkblotched rockfish catch in kg (CATCH$total_catch_wt_kg for “Sebastes crameri”)</li>
<li>PHLB: Pacific halibut catch in kg (CATCH$total_catch_wt_kg for “Hippoglossus stenolepis”)</li>
<li>YEYE: yelloweye rockfish catch in kg (CATCH$total_catch_wt_kg for “Sebastes ruberrimus”)</li>
</ul>
<div class="sourceCode"><pre class="sourceCode r"><code class="sourceCode r"><span class="co"># Delete "unsatisfactory" hauls</span>
HAUL <-<span class="st"> </span><span class="kw">subset</span>(HAUL, performance<span class="op">==</span><span class="st">"Satisfactory"</span>)
<span class="co"># Create empty data frame where each row will be a unique haul</span>
cols <-<span class="st"> </span><span class="kw">c</span>(<span class="st">"HAUL_ID"</span>,<span class="st">"YEAR"</span>,<span class="st">"DATE"</span>,<span class="st">"LAT"</span>,<span class="st">"LON"</span>,<span class="st">"DEPTH"</span>,<span class="st">"TOTAL"</span>,<span class="st">"DBRK"</span>,<span class="st">"PHLB"</span>,<span class="st">"YEYE"</span>)
hauls <-<span class="st"> </span><span class="kw">unique</span>(HAUL<span class="op">$</span>trawl_id)
n.hauls <-<span class="st"> </span><span class="kw">length</span>(hauls)
dat <-<span class="st"> </span><span class="kw">matrix</span>(<span class="ot">NA</span>, <span class="dt">nrow=</span>n.hauls, <span class="dt">ncol=</span><span class="kw">length</span>(cols))
dat <-<span class="st"> </span><span class="kw">as.data.frame</span>(dat)
<span class="kw">names</span>(dat) <-<span class="st"> </span>cols
<span class="kw">head</span>(dat)</code></pre></div>
<div class="sourceCode"><pre class="sourceCode r"><code class="sourceCode r"><span class="co"># Fill in columns from HAUL</span>
dat<span class="op">$</span>HAUL_ID <-<span class="st"> </span>HAUL<span class="op">$</span>trawl_id
dat<span class="op">$</span>LAT <-<span class="st"> </span>HAUL<span class="op">$</span>latitude_dd
dat<span class="op">$</span>LON <-<span class="st"> </span>HAUL<span class="op">$</span>longitude_dd
dat<span class="op">$</span>DEPTH <-<span class="st"> </span>HAUL<span class="op">$</span>depth_hi_prec_m
dat<span class="op">$</span>DATE <-<span class="st"> </span><span class="kw">as.Date</span>(<span class="kw">as.character</span>(HAUL<span class="op">$</span>date_yyyymmdd),<span class="dt">format =</span> <span class="st">"%Y%m%d"</span>)
dat<span class="op">$</span>YEAR <-<span class="st"> </span><span class="kw">as.numeric</span>(<span class="kw">format</span>(dat<span class="op">$</span>DATE,<span class="st">"%Y"</span>))
dat<span class="op">$</span>TOTAL <-<span class="st"> </span>HAUL<span class="op">$</span>vertebrate_weight_kg
dat<span class="op">$</span>TOTAL[<span class="kw">which</span>(<span class="kw">is.na</span>(dat<span class="op">$</span>TOTAL))] <-<span class="st"> </span><span class="dv">0</span> <span class="co"># replace NA with 0</span></code></pre></div>
<div class="sourceCode"><pre class="sourceCode r"><code class="sourceCode r"><span class="co"># Add catch of each species by haul (takes a couple min)</span>
<span class="kw">library</span>(dplyr)
dat<span class="op">$</span>YEYE <-<span class="st"> </span>dat<span class="op">$</span>DBRK <-<span class="st"> </span>dat<span class="op">$</span>PHLB <-<span class="st"> </span><span class="dv">0</span>
<span class="cf">for</span>(i <span class="cf">in</span> <span class="dv">1</span><span class="op">:</span>n.hauls){
<span class="co"># get all species caught in the ith haul</span>
cur_haul <-<span class="st"> </span><span class="kw">filter</span>(CATCH, trawl_id<span class="op">==</span>dat<span class="op">$</span>HAUL_ID[i])
<span class="cf">if</span>(<span class="st">"yelloweye rockfish"</span> <span class="op">%in%</span><span class="st"> </span>cur_haul<span class="op">$</span>common_name) dat<span class="op">$</span>YEYE[i] <-<span class="st"> </span><span class="kw">as.numeric</span>(dplyr<span class="op">::</span><span class="kw">filter</span>(cur_haul, common_name<span class="op">==</span><span class="st">"yelloweye rockfish"</span>) <span class="op">%>%</span><span class="st"> </span>dplyr<span class="op">::</span><span class="kw">select</span>(total_catch_wt_kg))
<span class="cf">if</span>(<span class="st">"darkblotched rockfish"</span> <span class="op">%in%</span><span class="st"> </span>cur_haul<span class="op">$</span>common_name) dat<span class="op">$</span>DBRK[i] <-<span class="st"> </span><span class="kw">as.numeric</span>(dplyr<span class="op">::</span><span class="kw">filter</span>(cur_haul,common_name<span class="op">==</span><span class="st">"darkblotched rockfish"</span>) <span class="op">%>%</span><span class="st"> </span>dplyr<span class="op">::</span><span class="kw">select</span>(total_catch_wt_kg))
<span class="cf">if</span>(<span class="st">"Pacific halibut"</span> <span class="op">%in%</span><span class="st"> </span>cur_haul<span class="op">$</span>common_name) dat<span class="op">$</span>PHLB[i] <-<span class="st"> </span><span class="kw">as.numeric</span>(dplyr<span class="op">::</span><span class="kw">filter</span>(cur_haul,common_name<span class="op">==</span><span class="st">"Pacific halibut"</span>) <span class="op">%>%</span><span class="st"> </span>dplyr<span class="op">::</span><span class="kw">select</span>(total_catch_wt_kg))
}
<span class="co"># Order by date</span>
dat <-<span class="st"> </span>dat[<span class="kw">order</span>(dat<span class="op">$</span>DATE),]
<span class="co"># Add DAY covariate: day of the year</span>
dat<span class="op">$</span>DAY <-<span class="st"> </span><span class="kw">as.numeric</span>(dat<span class="op">$</span>DATE <span class="op">-</span><span class="st"> </span><span class="kw">as.Date</span>(<span class="kw">paste0</span>(dat<span class="op">$</span>YEAR,<span class="st">"-01-01"</span>)))
<span class="co"># Add binomial catch columns</span>
dat<span class="op">$</span>DBRK_<span class="dv">01</span> <-<span class="st"> </span>dat<span class="op">$</span>PHLB_<span class="dv">01</span> <-<span class="st"> </span>dat<span class="op">$</span>YEYE_<span class="dv">01</span> <-<span class="st"> </span><span class="dv">0</span>
dat<span class="op">$</span>DBRK_<span class="dv">01</span>[<span class="kw">which</span>(dat<span class="op">$</span>DBRK<span class="op">></span><span class="dv">0</span>)] <-<span class="st"> </span><span class="dv">1</span>
dat<span class="op">$</span>PHLB_<span class="dv">01</span>[<span class="kw">which</span>(dat<span class="op">$</span>PHLB<span class="op">></span><span class="dv">0</span>)] <-<span class="st"> </span><span class="dv">1</span>
dat<span class="op">$</span>YEYE_<span class="dv">01</span>[<span class="kw">which</span>(dat<span class="op">$</span>YEYE<span class="op">></span><span class="dv">0</span>)] <-<span class="st"> </span><span class="dv">1</span></code></pre></div>
</div>
<div id="add-sst-covariate" class="section level3">
<h3>Add SST covariate</h3>
<ul>
<li><code>SST</code>: daily sea surface temperature anomalies (in degC)</li>
</ul>
<p>Download daily sea surface temperature anomalies (.nc files) for 2003-2013 from: <a href="https://www.esrl.noaa.gov/psd/data/gridded/data.noaa.oisst.v2.highres.html" class="uri">https://www.esrl.noaa.gov/psd/data/gridded/data.noaa.oisst.v2.highres.html</a></p>
<div class="sourceCode"><pre class="sourceCode r"><code class="sourceCode r"><span class="co"># function to get SST daily anomaly at the DATE/LON/LAT for each haul</span>
<span class="co"># uses bilinear interpolation from nearest gridpoints</span>
<span class="kw">library</span>(ncdf4)
get_SST <-<span class="st"> </span><span class="cf">function</span>(dat){
<span class="cf">for</span>(i <span class="cf">in</span> <span class="dv">1</span><span class="op">:</span><span class="kw">dim</span>(dat)[<span class="dv">1</span>]){ <span class="co"># for each row i</span>
this.yr =<span class="st"> </span>dat<span class="op">$</span>YEAR[i]
nc =<span class="st"> </span><span class="kw">nc_open</span>(<span class="kw">paste</span>(<span class="st">"/home/brian/Documents/Bycatch/WCGOP/data/sst.day.anom."</span>,this.yr,<span class="st">".v2.nc"</span>,<span class="dt">sep=</span><span class="st">""</span>)) <span class="co"># you will need to edit to where you saved the .nc files</span>
ncdates =<span class="st"> </span>nc<span class="op">$</span>dim<span class="op">$</span>time<span class="op">$</span>vals <span class="co"># gets vector of dates of the current year</span>
ncdates =<span class="st"> </span><span class="kw">as.Date</span>(ncdates,<span class="dt">origin =</span> <span class="st">'1800-1-1'</span>) <span class="co"># formats date vector</span>
date1a =<span class="st"> </span><span class="kw">which.min</span>(<span class="kw">abs</span>(dat<span class="op">$</span>DATE[i] <span class="op">-</span><span class="st"> </span>ncdates)) <span class="co"># finds the day of the calendar year for this haul (e.g. 01/04/year = 4, and 02/01/year = 32)</span>
all.lat =<span class="st"> </span>nc<span class="op">$</span>dim<span class="op">$</span>lat<span class="op">$</span>vals
lat1a =<span class="st"> </span><span class="kw">which.min</span>(<span class="kw">abs</span>(dat<span class="op">$</span>LAT[i] <span class="op">-</span><span class="st"> </span>all.lat)) <span class="co"># index of haul's LAT</span>
all.lon =<span class="st"> </span>nc<span class="op">$</span>dim<span class="op">$</span>lon<span class="op">$</span>vals
lon1a =<span class="st"> </span><span class="kw">which.min</span>(<span class="kw">abs</span>(((<span class="dv">180</span><span class="op">+</span>dat<span class="op">$</span>LON[i])<span class="op">+</span><span class="dv">180</span>) <span class="op">-</span><span class="st"> </span>all.lon)) <span class="co"># index of haul's LON</span>
this.lon =<span class="st"> </span><span class="dv">360</span><span class="op">+</span>dat<span class="op">$</span>LON[i] <span class="co"># haul LONG</span>
this.lat =<span class="st"> </span>dat<span class="op">$</span>LAT[i] <span class="co"># haul LAT</span>
lat.hi =<span class="st"> </span><span class="kw">which</span>(all.lat <span class="op">></span><span class="st"> </span>dat<span class="op">$</span>LAT[i])[<span class="dv">1</span>] <span class="co"># index of LAT *just above* haul LAT</span>
lat.lo =<span class="st"> </span>lat.hi <span class="op">-</span><span class="st"> </span><span class="dv">1</span> <span class="co"># index of LAT *just below* haul LAT</span>
lon.hi =<span class="st"> </span><span class="kw">which</span>(all.lon <span class="op">></span><span class="st"> </span>(<span class="dv">360</span><span class="op">+</span>dat<span class="op">$</span>LON[i]))[<span class="dv">1</span>] <span class="co"># index of LONG *just above* haul LON</span>
lon.lo =<span class="st"> </span>lon.hi <span class="op">-</span><span class="st"> </span><span class="dv">1</span> <span class="co"># index of LON *just below* haul LONG</span>
<span class="co"># get the SST anomolies from the ncdf object</span>
<span class="co"># start = X,Y,time (anom object is 3-D)</span>
<span class="co"># count = how many points to read in each dim</span>
<span class="co"># sstfield grabs the SST anomolies for all lat/lon points on the date of the haul</span>
sstfield =<span class="st"> </span><span class="kw">ncvar_get</span>(nc, <span class="st">"anom"</span>, <span class="dt">start=</span><span class="kw">c</span>(<span class="dv">1</span>,<span class="dv">1</span>,date1a), <span class="dt">count=</span><span class="kw">c</span>(<span class="kw">length</span>(all.lon),<span class="kw">length</span>(all.lat),<span class="dv">1</span>))
sst00 =<span class="st"> </span>sstfield[lon.lo,lat.lo]
sst01 =<span class="st"> </span>sstfield[lon.lo,lat.hi]
sst10 =<span class="st"> </span>sstfield[lon.hi,lat.lo]
sst11 =<span class="st"> </span>sstfield[lon.hi,lat.hi]
<span class="cf">if</span>(<span class="kw">is.na</span>(sst00)) sst00 =<span class="st"> </span>sst10
<span class="cf">if</span>(<span class="kw">is.na</span>(sst10)) sst10 =<span class="st"> </span>sst00
<span class="cf">if</span>(<span class="kw">is.na</span>(sst01)) sst01 =<span class="st"> </span>sst11
<span class="cf">if</span>(<span class="kw">is.na</span>(sst11)) sst11 =<span class="st"> </span>sst01
<span class="co"># This math makes sense if you draw it out </span>
<span class="co"># We first do linear interpolation in the x-direction. This yields</span>
fR1 =<span class="st"> </span>(all.lon[lon.hi]<span class="op">-</span>this.lon)<span class="op">/</span>(all.lon[lon.hi]<span class="op">-</span>all.lon[lon.lo])<span class="op">*</span>sst00 <span class="op">+</span><span class="st"> </span>(this.lon<span class="op">-</span>all.lon[lon.lo])<span class="op">/</span>(all.lon[lon.hi]<span class="op">-</span>all.lon[lon.lo])<span class="op">*</span>sst10
fR2 =<span class="st"> </span>(all.lon[lon.hi]<span class="op">-</span>this.lon)<span class="op">/</span>(all.lon[lon.hi]<span class="op">-</span>all.lon[lon.lo])<span class="op">*</span>sst01 <span class="op">+</span><span class="st"> </span>(this.lon<span class="op">-</span>all.lon[lon.lo])<span class="op">/</span>(all.lon[lon.hi]<span class="op">-</span>all.lon[lon.lo])<span class="op">*</span>sst11
<span class="co"># Next do interpolation of these values in Y-direction. This yields, </span>
sst.interp =<span class="st"> </span>(all.lat[lat.hi]<span class="op">-</span>this.lat)<span class="op">/</span>(all.lat[lat.hi]<span class="op">-</span>all.lat[lat.lo])<span class="op">*</span>fR1 <span class="op">+</span><span class="st"> </span>(this.lat<span class="op">-</span>all.lat[lat.lo])<span class="op">/</span>(all.lat[lat.hi]<span class="op">-</span>all.lat[lat.lo])<span class="op">*</span>fR2
<span class="kw">print</span>(<span class="kw">paste</span>(i,sst.interp,<span class="dt">sep=</span><span class="st">" "</span>))
dat<span class="op">$</span>SST[i] =<span class="st"> </span>sst.interp
<span class="kw">nc_close</span>(nc)
} <span class="co"># end for loop over haul points</span>
<span class="kw">return</span>(dat)
} <span class="co"># end function get_SST</span>
dat<span class="op">$</span>SST =<span class="st"> </span><span class="dv">0</span>
dat <-<span class="st"> </span><span class="kw">get_SST</span>(dat) <span class="co"># takes about 5 min to do all 7,240 locations</span>
<span class="co"># delete records where SST is NA</span>
dat <-<span class="st"> </span>dat[<span class="op">-</span><span class="kw">which</span>(<span class="kw">is.na</span>(dat<span class="op">$</span>SST)),]</code></pre></div>
</div>
<div id="add-inrca-covariate" class="section level3">
<h3>Add inRCA covariate:</h3>
<ul>
<li><code>inRCA</code>: was the haul in/near a Rockfish Conservation Area? 0/1</li>
</ul>
<p><em>Note:</em> The Rockfish Conservation Area (RCA) boundaries have changed by month, year, latitude, and depth. We have prepared <code>rca_boundaries.csv</code> using historical RCA boundaries. For more details, see the <a href="http://www.westcoast.fisheries.noaa.gov/fisheries/management/groundfish_closures/rockfish_areas.html">RCA webpage</a>.</p>
<div class="sourceCode"><pre class="sourceCode r"><code class="sourceCode r"><span class="kw">library</span>(tidyr)
<span class="co"># Get historical RCA boundary limits</span>
rca <-<span class="st"> </span><span class="kw">read.csv</span>(<span class="st">"/home/brian/Documents/Bycatch/WCGOP/data/rca_boundaries.csv"</span>,<span class="dt">header=</span><span class="ot">TRUE</span>)
<span class="co"># Get latitude bins -- different for each year</span>
years <-<span class="st"> </span><span class="kw">sort</span>(<span class="kw">as.numeric</span>(<span class="kw">levels</span>(<span class="kw">as.factor</span>(rca<span class="op">$</span>Year))),<span class="dt">decreasing=</span><span class="ot">TRUE</span>)
get_n_bins <-<span class="st"> </span><span class="cf">function</span>(yr) {a <-<span class="st"> </span>rca <span class="op">%>%</span><span class="st"> </span>dplyr<span class="op">::</span><span class="kw">filter</span>(Year<span class="op">==</span>yr) <span class="op">%>%</span><span class="st"> </span>dplyr<span class="op">::</span><span class="kw">select</span>(Lat.low) <span class="op">%>%</span><span class="st"> </span>dim; <span class="kw">return</span>(a[<span class="dv">1</span>])}
n.bins <-<span class="st"> </span><span class="kw">sapply</span>(years,get_n_bins)
LAT.bins <-<span class="st"> </span><span class="ot">NULL</span>
<span class="cf">for</span>(yr <span class="cf">in</span> <span class="dv">1</span><span class="op">:</span><span class="kw">length</span>(n.bins)){ LAT.bins <-<span class="st"> </span><span class="kw">c</span>(LAT.bins,n.bins[yr]<span class="op">:</span><span class="dv">1</span>) }
rca.new <-<span class="st"> </span>rca <span class="op">%>%</span><span class="st"> </span><span class="kw">mutate</span>(<span class="dt">LAT.bin=</span>LAT.bins) <span class="op">%>%</span><span class="st"> </span><span class="kw">gather</span>(Month,Close,Jan<span class="op">:</span>Dec)
close.lohi <-<span class="st"> </span><span class="kw">matrix</span>(<span class="kw">as.numeric</span>(<span class="kw">unlist</span>(<span class="kw">strsplit</span>(rca.new<span class="op">$</span>Close,<span class="st">"-"</span>))), <span class="dt">ncol=</span><span class="dv">2</span>, <span class="dt">byrow=</span><span class="ot">TRUE</span>)
rca.new <-<span class="st"> </span>rca.new <span class="op">%>%</span><span class="st"> </span><span class="kw">mutate</span>(<span class="dt">close.low=</span>close.lohi[,<span class="dv">1</span>],<span class="dt">close.high=</span>close.lohi[,<span class="dv">2</span>])
<span class="co"># RCA boundaries are defined by depth bins, in fathoms</span>
<span class="co"># Get depth bins for survey haul locations</span>
dat<span class="op">$</span>fath <-<span class="st"> </span>dat<span class="op">$</span>DEPTH<span class="op">*</span><span class="fl">0.546806649</span> <span class="co"># get depth in fathoms, 0.546806649 fathoms/m</span>
fathom.categories <-<span class="st"> </span><span class="kw">c</span>(<span class="st">"0-50"</span>,<span class="st">"50-60"</span>,<span class="st">"60-75"</span>,<span class="st">"75-100"</span>,<span class="st">"100-150"</span>,<span class="st">"150-200"</span>,<span class="st">"200-250"</span>,<span class="st">"250+"</span>) <span class="co"># fathom bins used to define RCAs</span>
dat<span class="op">$</span>fath_categ <-<span class="st"> </span><span class="kw">cut</span>(dat<span class="op">$</span>fath, <span class="dt">breaks=</span><span class="kw">c</span>(<span class="dv">0</span>,<span class="dv">50</span>,<span class="dv">60</span>,<span class="dv">75</span>,<span class="dv">100</span>,<span class="dv">150</span>,<span class="dv">200</span>,<span class="dv">250</span>,<span class="dv">1000</span>), <span class="dt">labels=</span>fathom.categories) <span class="co"># calculate fathom bins for haul locations</span>
dat<span class="op">$</span>id <-<span class="st"> </span><span class="dv">1</span><span class="op">:</span><span class="kw">dim</span>(dat)[<span class="dv">1</span>]
dat<span class="op">$</span>MONTH <-<span class="st"> </span><span class="kw">format</span>(<span class="kw">as.Date</span>(dat<span class="op">$</span>DATE),<span class="st">"%b"</span>)
<span class="co"># Don't need to check inRCA for depths >250 fm or in 2002</span>
checkRCA <-<span class="st"> </span>dplyr<span class="op">::</span><span class="kw">filter</span>(dat, fath_categ<span class="op">!=</span><span class="st">"250+"</span>) <span class="co"># only could be in an RCA if depth < 250 fm</span>
checkRCA <-<span class="st"> </span>dplyr<span class="op">::</span><span class="kw">filter</span>(checkRCA, YEAR<span class="op">!=</span><span class="dv">2002</span>) <span class="co"># no RCA closures in 2002</span>
<span class="co"># Construct inRCA covariate by matching haul year/month/lat/depth to RCA limits</span>
<span class="co"># takes about 1 min to do all locations</span>
dat<span class="op">$</span>inRCA <-<span class="st"> </span><span class="dv">0</span> <span class="co"># add "inRCA" covariate (0 if not, 1 if yes)</span>
dat<span class="op">$</span>bin <-<span class="st"> </span><span class="dv">0</span>
<span class="cf">for</span>(j <span class="cf">in</span> <span class="dv">1</span><span class="op">:</span><span class="kw">nrow</span>(checkRCA)){
i <-<span class="st"> </span>checkRCA<span class="op">$</span>id[j]
breaks <-<span class="st"> </span><span class="kw">c</span>(<span class="dv">55</span>,rca <span class="op">%>%</span><span class="st"> </span>dplyr<span class="op">::</span><span class="kw">filter</span>(Year<span class="op">==</span>dat<span class="op">$</span>YEAR[i]) <span class="op">%>%</span><span class="st"> </span>dplyr<span class="op">::</span><span class="kw">select</span>(Lat.low) <span class="op">%>%</span><span class="st"> </span>unlist)
dat<span class="op">$</span>bin[i] <-<span class="st"> </span><span class="kw">cut</span>(dat<span class="op">$</span>LAT[i],<span class="dt">breaks=</span>breaks,<span class="dt">labels=</span><span class="dv">1</span><span class="op">:</span>(<span class="kw">length</span>(breaks)<span class="op">-</span><span class="dv">1</span>))
low <-<span class="st"> </span>rca.new <span class="op">%>%</span><span class="st"> </span>dplyr<span class="op">::</span><span class="kw">filter</span>(Year<span class="op">==</span>dat<span class="op">$</span>YEAR[i],Month<span class="op">==</span>dat<span class="op">$</span>MONTH[i],LAT.bin<span class="op">==</span>dat<span class="op">$</span>bin[i]) <span class="op">%>%</span><span class="st"> </span>dplyr<span class="op">::</span><span class="kw">select</span>(close.low)
high <-<span class="st"> </span>rca.new <span class="op">%>%</span><span class="st"> </span>dplyr<span class="op">::</span><span class="kw">filter</span>(Year<span class="op">==</span>dat<span class="op">$</span>YEAR[i],Month<span class="op">==</span>dat<span class="op">$</span>MONTH[i],LAT.bin<span class="op">==</span>dat<span class="op">$</span>bin[i]) <span class="op">%>%</span><span class="st"> </span>dplyr<span class="op">::</span><span class="kw">select</span>(close.high)
<span class="cf">if</span>(<span class="kw">abs</span>(dat<span class="op">$</span>fath[i]) <span class="op"><</span><span class="st"> </span>high <span class="op">&</span><span class="st"> </span><span class="kw">abs</span>(dat<span class="op">$</span>fath[i]) <span class="op">></span><span class="st"> </span>low) dat<span class="op">$</span>inRCA[i] =<span class="st"> </span><span class="dv">1</span>
}</code></pre></div>
</div>
<div id="standardize-covariates" class="section level3">
<h3>Standardize covariates</h3>
<p>Now we transform, center (subtract mean), and create quadratic covariates. The original untransformed, uncentered data in ALL CAPS, e.g. DEPTH. The transformed, centered covariates ready to use in the models are in lower case, e.g. logDEPTH, logDEPTH2.</p>
<div class="sourceCode"><pre class="sourceCode r"><code class="sourceCode r"><span class="co"># Log transform covariates on large scales</span>
dat<span class="op">$</span>logDEPTH <-<span class="st"> </span><span class="kw">log</span>(dat<span class="op">$</span>DEPTH)
<span class="co"># Center/de-mean each covariate</span>
dat<span class="op">$</span>sst <-<span class="st"> </span>dat<span class="op">$</span>SST
demean <-<span class="st"> </span><span class="cf">function</span>(vec){ <span class="kw">return</span>(vec <span class="op">-</span><span class="st"> </span><span class="kw">mean</span>(vec))}
dat[,<span class="kw">c</span>(<span class="st">"DAY"</span>,<span class="st">"logDEPTH"</span>,<span class="st">"sst"</span>)] <-<span class="st"> </span><span class="kw">apply</span>(dat[,<span class="kw">c</span>(<span class="st">"DAY"</span>,<span class="st">"logDEPTH"</span>,<span class="st">"sst"</span>)],<span class="dv">2</span>,demean)
<span class="co"># Create squared covariates</span>
dat<span class="op">$</span>sst2 <-<span class="st"> </span>dat<span class="op">$</span>sst<span class="op">^</span><span class="dv">2</span>
dat<span class="op">$</span>logDEPTH2 <-<span class="st"> </span>dat<span class="op">$</span>logDEPTH<span class="op">^</span><span class="dv">2</span>
<span class="co"># Turn categorical variables into factors</span>
dat<span class="op">$</span>YEAR <-<span class="st"> </span><span class="kw">as.factor</span>(dat<span class="op">$</span>YEAR)
dat<span class="op">$</span>DBRK_<span class="dv">01</span> <-<span class="st"> </span><span class="kw">as.factor</span>(dat<span class="op">$</span>DBRK_<span class="dv">01</span>)
dat<span class="op">$</span>PHLB_<span class="dv">01</span> <-<span class="st"> </span><span class="kw">as.factor</span>(dat<span class="op">$</span>PHLB_<span class="dv">01</span>)
dat<span class="op">$</span>YEYE_<span class="dv">01</span> <-<span class="st"> </span><span class="kw">as.factor</span>(dat<span class="op">$</span>YEYE_<span class="dv">01</span>)
dat<span class="op">$</span>inRCA <-<span class="st"> </span><span class="kw">as.factor</span>(dat<span class="op">$</span>inRCA)</code></pre></div>
</div>
<div id="data-are-ready-to-fit" class="section level3">
<h3>Data are ready to fit</h3>
<div class="sourceCode"><pre class="sourceCode r"><code class="sourceCode r"><span class="kw">save</span>(dat, <span class="dt">file=</span><span class="st">"/home/brian/Documents/Bycatch/WCGOP/data/wcann_processed.RData"</span>)
<span class="kw">head</span>(dat)</code></pre></div>
<pre><code>## HAUL_ID YEAR DATE LAT LON DEPTH TOTAL DBRK
## 359 2.00303e+11 2003 2003-06-24 46.09611 -124.7761 564.9317 123.176 0.00
## 6671 2.00303e+11 2003 2003-06-24 46.02472 -124.7389 310.0056 304.980 1.81
## 6925 2.00303e+11 2003 2003-06-24 46.15667 -124.5156 140.7280 501.270 1.35
## 6926 2.00303e+11 2003 2003-06-24 46.50389 -124.7325 606.3237 98.910 0.00
## 7216 2.00303e+11 2003 2003-06-24 46.75500 -124.5428 107.4615 714.960 0.00
## 182 2.00303e+11 2003 2003-06-25 47.60194 -124.8156 106.0853 573.350 0.10
## PHLB YEYE DAY YEYE_01 PHLB_01 DBRK_01 SST fath
## 359 0 0 -43.1275 0 0 0 -0.01410765 308.90841
## 6671 0 0 -43.1275 0 0 1 0.04703642 169.51312
## 6925 18 0 -43.1275 0 1 1 -0.06884963 76.95101
## 6926 0 0 -43.1275 0 0 0 -0.38681021 331.54183
## 7216 0 0 -43.1275 0 0 0 -0.61593420 58.76066
## 182 0 0 -42.1275 0 0 1 -1.53736698 58.00815
## fath_categ id MONTH inRCA bin logDEPTH sst sst2
## 359 250+ 1 Jun 0 0 0.7985194 0.8565284 0.73364093
## 6671 150-200 2 Jun 1 4 0.1984049 0.9176725 0.84212280
## 6925 75-100 3 Jun 1 4 -0.5913565 0.8017864 0.64286150
## 6926 250+ 4 Jun 0 0 0.8692286 0.4838259 0.23408747
## 7216 50-60 5 Jun 1 4 -0.8610528 0.2547019 0.06487305
## 182 50-60 6 Jun 1 4 -0.8739419 -0.6667309 0.44453010
## logDEPTH2
## 359 0.63763326
## 6671 0.03936452
## 6925 0.34970247
## 6926 0.75555833
## 7216 0.74141188
## 182 0.76377450</code></pre>
</div>
<!-- dynamically load mathjax for compatibility with self-contained -->
<script>
(function () {
var script = document.createElement("script");
script.type = "text/javascript";
script.src = "https://mathjax.rstudio.com/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML";
document.getElementsByTagName("head")[0].appendChild(script);
})();
</script>
</body>
</html>