-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathcatch per trip fluke bsbs scup 2022 new.do
1748 lines (1239 loc) · 41.5 KB
/
catch per trip fluke bsbs scup 2022 new.do
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
/* This is a file that produces a dataset that contains #of fish encountered per trip.
This is a port of Scott's "domain_catch_frequencies_gom_cod_wave_2013.sas"
This is a template program for estimating catch frequecies
using the MRIP public-use datasets.
The program is setup to use information in the trip_yyyyw
dataset to define custom domains. The catch frequencies are
estimated within the domains by merging the trip information
onto the catch_yyyyw datasets.
Required input datasets:
trip_yyyyw
catch_yyyyw
yyyy = year
w = wave
*/
qui{
clear
clear matrix
clear mata
set maxvar 100000
version 12.1
/* General strategy
COMPUTE totals and std deviations catch
*/
clear
cd "C:\Users\andrew.carr-harris\Desktop\MRIP_data"
clear
mata: mata clear
tempfile tl1 cl1
dsconcat $triplist
sort year strat_id psu_id id_code
*replace id_code=ID_CODE if id_code=="" & ID_CODE!=""
drop if strmatch(id_code, "*xx*")==1
drop if strat_id==""
duplicates drop
save `tl1'
clear
dsconcat $catchlist
sort year strat_id psu_id id_code
replace common=subinstr(lower(common)," ","",.)
save `cl1'
use `tl1'
merge 1:m year strat_id psu_id id_code using `cl1', keep(1 3) nogenerate
/* THIS IS THE END OF THE DATA MERGING CODE */
*replace mode_fx=MODE_FX if mode_fx=="" & MODE_FX!=""
*replace area_x=AREA_X if area_x=="" & AREA_X!=""
/* ensure only relevant states */
keep if inlist(st,25, 44, 9, 36, 34, 51, 10, 24, 37)
/*This is the "full" mrip data */
tempfile tc1
save `tc1'
gen st2 = string(st,"%02.0f")
gen state="MA" if st==25
replace state="MD" if st==24
replace state="RI" if st==44
replace state="CT" if st==9
replace state="NY" if st==36
replace state="NJ" if st==34
replace state="DE" if st==10
replace state="VA" if st==51
replace state="NC" if st==37
/*
gen mode1="sh" if inlist(mode_fx, "1", "2", "3")
replace mode1="bt" if inlist(mode_fx, "4", "5", "6", "7")
*keep if mode1=="bt"
*/
gen mode1="sh" if inlist(mode_fx, "1", "2", "3")
replace mode1="fh" if inlist(mode_fx, "4", "5")
replace mode1="pr" if inlist(mode_fx, "7")
/* classify trips that I care about into the things I care about (caught or targeted sf/bsb) and things I don't care about "ZZ" */
replace prim1_common=subinstr(lower(prim1_common)," ","",.)
replace prim2_common=subinstr(lower(prim1_common)," ","",.)
gen common_dom="ZZ"
replace common_dom="SF" if inlist(common, "summerflounder")
replace common_dom="SF" if inlist(common, "blackseabass")
replace common_dom="SF" if inlist(common, "scup")
replace common_dom="SF" if inlist(prim1_common, "summerflounder")
replace common_dom="SF" if inlist(prim1_common, "blackseabass")
replace common_dom="SF" if inlist(prim1_common, "scup")
tostring wave, gen(wv2)
tostring year, gen(yr2)
*gen my_dom_id_string=state+"_"+common_dom+"_"+yr2
*gen my_dom_id_string=state+"_"+common_dom
*gen my_dom_id_string=state+"_"+month1+"_"+mode1+"_"+common_dom
*gen my_dom_id_string=state+"_"+wv2+"_"+mode1+"_"+common_dom
gen my_dom_id_string=state+"_"+wv2+"_"+mode1+"_"+common_dom
/* we need to retain 1 observation for each strat_id, psu_id, and id_code. */
/* A. Trip (Targeted or Caught) (Cod or Haddock) then it should be marked in the domain "_ATLCO"
1. Caught my_common. We retain tot_cat
2. Did not catch my_common. We set tot_cat=0
B. Trip did not (Target or Caught) (Cod or Haddock) then it is marked in the the domain "ZZZZZ"
1. Caught my_common. This is impossible.
2. Did not catch my_common. We set tot_cat=0
To do this:
1. We set tot_cat, landing, claim, harvest, and release to zero for all instances of common~="my_common"
2. We set a variable "no_dup"=0 if the record is "my_common" catch and no_dup=1 otherwise.
3. We sort on year, strat_id, psu_id, id_code, "no_dup", and "my_dom_id_string".
For records with duplicate year, strat_id, psu_id, and id_codes, the first entry will be "my_common catch" if it exists. These will all be have sp_dom "ATLCO." If there is no my_common catch, but the
trip targeted (cod or haddock) or caught cod, the secondary sorting on "my_dom_id_string" ensures the trip is properly classified as an (A2 from above).
4. After sorting, we generate a count variable (count_obs1 from 1....n) and we keep only the "first" observations within each "year, strat_id, psu_id, and id_codes" group.
*/
local vars claim harvest release landing tot_cat
foreach v of local vars{
gen sf_`v'=`v' if common=="summerflounder"
egen sum_sf_`v'=sum(sf_`v'), by(strat_id psu_id id_code )
drop sf_`v'
rename sum_sf_`v' sf_`v'
gen bsb_`v'=`v' if common=="blackseabass"
egen sum_bsb_`v'=sum(bsb_`v'), by(strat_id psu_id id_code )
drop bsb_`v'
rename sum_bsb_`v' bsb_`v'
gen scup_`v'=`v' if common=="scup"
egen sum_scup_`v'=sum(scup_`v'), by(strat_id psu_id id_code )
drop scup_`v'
rename sum_scup_`v' scup_`v'
}
/*
1 Set tot_cat, landing, claim, harvest, and release to zero for all instances of common~="my_common"
2. We set a variable "no_dup"=0 if the record is "$my_common" catch and no_dup=1 otherwise.*/
gen no_dup=0
replace no_dup=1 if !inlist(common, "summerflounder", "blackseabass", "scup")
/*
replace no_dup=1 if strmatch(common, "summerflounder")==0
replace no_dup=1 if strmatch(common, "blackseabass")==0
replace no_dup=1 if strmatch(common, "scup")==0
*/
bysort year strat_id psu_id id_code (my_dom_id_string no_dup): gen count_obs1=_n
keep if count_obs1==1 // This keeps only one record for trips with catch of multiple species. We have already computed catch of the species of interest above and saved these in a trip-row
order strat_id psu_id id_code no_dup my_dom_id_string count_obs1 common
keep if common_dom=="SF"
keep var_id strat_id psu_id id_code common sp_code claim harvest release landing ///
tot_cat wp_catch leader cntrbtrs wp_int state mode1 state wv2 ///
sf_claim bsb_claim scup_claim sf_harvest bsb_harvest scup_harvest sf_release bsb_release scup_release sf_landing bsb_landing scup_landing ///
bsb_tot_cat sf_tot_cat scup_tot_cat claim_unadj harvest_unadj release_unadj year month common_dom my_dom_id_string
/*
*original unadjusted catch datasets
local vars sf bsb scup
foreach v of local vars{
svyset psu_id [pweight= wp_int], strata(strat_id) singleunit(certainty)
svy:total `v'_claim
mat list r(table), format(%12.0gc)
svy:total `v'_harvest
mat list r(table), format(%12.0gc)
svy:total `v'_landing
mat list r(table), format(%12.0gc)
svy:total `v'_release
mat list r(table), format(%12.0gc)
svy:total `v'_tot_cat
mat list r(table), format(%12.0gc)
}
*/
/***********************************************************************
Following code defines new catch count fields to handle grouped catches
************************************************************************/
*1)
*proc sort data=mycatch;
* by strat_id psu_id leader id_code;
*run;
sort strat_id psu_id leader id_code
*2) data mycatch;
* set mycatch;
* by strat_id psu_id leader id_code;
* group_order + 1;
* if first.leader then group_order=1;
*run;
bysort strat_id psu_id leader id_code: gen group_order= _n
*3) data mycatch;
* set mycatch;
* if claim=. then claim=0;
* if harvest=. then harvest=0;
* if landing=. then landing=0;
* new_claim1 = claim/cntrbtrs;
*new_release = release;
* new_harvest = harvest;
*run;
local vars claim harvest release landing
foreach v of local vars{
replace sf_`v'=0 if sf_`v'==.
replace bsb_`v'=0 if bsb_`v'==.
replace scup_`v'=0 if scup_`v'==.
}
*As per John Foster's recommendation, we need to divide claim by total contributers 2) harvest and release by counts of id_code within leader
*First, sum the catch within leader:
local vars claim harvest release landing
foreach v of local vars{
egen sum_sf_`v'=sum(sf_`v'), by(strat_id psu_id leader)
egen sum_bsb_`v'=sum(bsb_`v'), by(strat_id psu_id leader)
egen sum_scup_`v'=sum(scup_`v'), by(strat_id psu_id leader)
}
*Now get counts of id_codes within leader:
gen tab=1
egen count_id_codes=sum(tab), by(strat_id psu_id leader)
drop tab
egen max_cntrbtrs=max(cntrbtrs), by(strat_id psu_id leader)
*Now divide claim by cntrbtrs and the other catch variables by count_id_codes:
local vars sf bsb scup
foreach v of local vars{
*gen new_claim1_`v'=sum_`v'_claim/cntrbtrs
gen new_claim1_`v'=sum_`v'_claim/count_id_codes
gen new_harvest1_`v'=sum_`v'_harvest/count_id_codes
gen new_release1_`v'=sum_`v'_release/count_id_codes
mvencode new_claim1_`v' new_harvest1_`v' new_release1_`v', mv(0) override
}
*Now multiply wp_int by count of id_code to get new wp_catch:
gen new_wp_int=wp_int*count_id_codes
*we ultimately keep only the first observation within leader, so mark these rows:
bysort strat_id psu_id leader: gen first=1 if _n==1
*generate total catch for the species of interest:
local vars sf bsb scup
foreach v of local vars{
gen tot_cat_`v'=new_claim1_`v'+new_harvest1_`v'+new_release1_`v'
gen landing_`v'=new_claim1_`v'+new_harvest1_`v'
gen release_`v'=new_release1_`v'
}
*keep if first==1
}
*compute # days in each month; I will merge this file to the catch draw file to ensure each day
*contains 100 catch draws across 100 iterations
preserve
clear
set obs 2
gen day=td(1jan2022) if _n==1
replace day=td(31dec2022) if _n==2
format day %td
tsset day
tsfill, full
gen day_i=_n
gen month=month(day)
gen tab=1
egen ndays=sum(tab), by(month)
gen wv2="1" if inlist(month, 1, 2)
replace wv2="2" if inlist(month, 3,4)
replace wv2="3" if inlist(month, 5,6)
replace wv2="4" if inlist(month, 7,8)
replace wv2="5" if inlist(month, 9,10)
replace wv2="6" if inlist(month, 11,12)
keep month ndays wv2
duplicates drop
*tempfile ndays
*save `ndays', replace
save "ndays.dta", replace
restore
/*
There are discrepencies in total MRIP catch versus simulated MRIP catch due to how we account for MRIP grouped-harvest.
In some MRIP intercepts, total contributers to harvest is higher than number of id_codes (interviewees) on a given trip. This means
the interview did not collect release information for all anglers on the trip. Because we interested in estimated catch-per-angler not catch-per-trip,
as the utility model is at the individual level, we divide harvest (claim) by the number of contributers and release by total interviewees. This leads
to the "new" total survey-weighted harvest (and thus total catch) for the trip being lower than the "old" survey-weighted estimate of total catch.
To account for this discrepency, I estimate total catch per domain using both methods. Where the % difference is more than 5%, I will reallocate
the difference across all trips that already caught that species of fish. Because the difference leads to decimal values, which we then round, I first round
and again recalclate the difference. If the difference is again greater than 5%, I randomly a number of trips that caught fish and reduce their catch by
X fish until the difference is below 5%.
*/
encode my_dom_id_string, gen(my_dom_id)
gen scup_flag=.
gen scup_diff=.
gen bsb_flag=.
gen bsb_diff=.
gen sf_flag=.
gen sf_diff=.
gen scup_harv_flag=.
gen scup_harv_diff=.
gen bsb_harv_flag=.
gen bsb_harv_diff=.
gen sf_harv_flag=.
gen sf_harv_diff=.
replace tot_cat_sf=round(tot_cat_sf)
replace tot_cat_bsb=round(tot_cat_bsb)
replace tot_cat_scup=round(tot_cat_scup)
replace landing_sf=round(landing_sf)
replace landing_bsb=round(landing_bsb)
replace landing_scup=round(landing_scup)
replace release_sf=round(release_sf)
replace release_bsb=round(release_bsb)
replace release_scup=round(release_scup)
/*
svyset psu_id [pweight= wp_int] , strata(strat_id) singleunit(certainty)
svy: total sf_landing if state=="NJ" & mode1=="fh"
svyset psu_id [pweight= new_wp_int], strata(strat_id) singleunit(certainty)
svy: total landing_sf if first==1 & state=="NJ" & mode1=="fh"
svyset psu_id [pweight= wp_int] , strata(strat_id) singleunit(certainty)
svy: total bsb_landing if state=="NJ" & mode1=="fh"
svyset psu_id [pweight= new_wp_int], strata(strat_id) singleunit(certainty)
svy: total landing_bsb if first==1 & state=="NJ" & mode1=="fh"
svyset psu_id [pweight= wp_int] , strata(strat_id) singleunit(certainty)
svy: total bsb_tot_cat if state=="NJ" & mode1=="fh"
svyset psu_id [pweight= new_wp_int], strata(strat_id) singleunit(certainty)
svy: total tot_cat_bsb if first==1 & state=="NJ" & mode1=="fh"
svyset psu_id [pweight= wp_int] , strata(strat_id) singleunit(certainty)
svy: total sf_tot_cat if state=="NJ" & mode1=="fh"
svyset psu_id [pweight= new_wp_int], strata(strat_id) singleunit(certainty)
svy: total tot_cat_sf if first==1 & state=="NJ" & mode1=="fh"
*/
replace new_wp_int=round(new_wp_int)
levelsof my_dom_id_string if common_dom=="SF", local(doms)
foreach d of local doms {
*scup
*total catch
svyset psu_id [pweight= wp_int] , strata(strat_id) singleunit(certainty)
svy: total scup_tot_cat if my_dom_id_string=="`d'"
local base = r(table)[1,1]
di `base'
svyset psu_id [pweight= new_wp_int], strata(strat_id) singleunit(certainty)
svy: total tot_cat_scup if first==1 & my_dom_id_string=="`d'"
local new = r(table)[1,1]
di `new'
di `base'/`new'
local scalar= abs(1-`base'/`new')
di `scalar'
local diff_n_fish= `base'-`new'
di `diff_n_fish'
replace scup_flag=1 if my_dom_id_string=="`d'" & `scalar'>.025 & `scalar' !=.
replace scup_diff = `diff_n_fish' if my_dom_id_string=="`d'" & `scalar'>.025 & `scalar' !=.
*harvest
svyset psu_id [pweight= wp_int] , strata(strat_id) singleunit(certainty)
svy: total scup_landing if my_dom_id_string=="`d'"
local base = r(table)[1,1]
di `base'
svyset psu_id [pweight= new_wp_int], strata(strat_id) singleunit(certainty)
svy: total landing_scup if first==1 & my_dom_id_string=="`d'"
local new = r(table)[1,1]
di `new'
di `base'/`new'
local scalar= abs(1-`base'/`new')
di `scalar'
local diff_n_fish= `base'-`new'
di `diff_n_fish'
replace scup_harv_flag=1 if my_dom_id_string=="`d'" & `scalar'>.025 & `scalar' !=.
replace scup_harv_diff = `diff_n_fish' if my_dom_id_string=="`d'" & `scalar'>.025 & `scalar' !=.
*bsb
*total catch
svyset psu_id [pweight= wp_int] , strata(strat_id) singleunit(certainty)
svy: total bsb_tot_cat if my_dom_id_string=="`d'"
local base = r(table)[1,1]
di `base'
svyset psu_id [pweight= new_wp_int], strata(strat_id) singleunit(certainty)
svy: total tot_cat_bsb if first==1 & my_dom_id_string=="`d'"
local new = r(table)[1,1]
di `new'
di `base'/`new'
local scalar= abs(1-`base'/`new')
di `scalar'
local diff_n_fish= `base'-`new'
di `diff_n_fish'
replace bsb_flag=1 if my_dom_id_string=="`d'" & `scalar'>.025 & `scalar' !=.
replace bsb_diff = `diff_n_fish' if my_dom_id_string=="`d'" & `scalar'>.025 & `scalar' !=.
*harvest
svyset psu_id [pweight= wp_int] , strata(strat_id) singleunit(certainty)
svy: total bsb_landing if my_dom_id_string=="`d'"
local base = r(table)[1,1]
di `base'
svyset psu_id [pweight= new_wp_int], strata(strat_id) singleunit(certainty)
svy: total landing_bsb if first==1 & my_dom_id_string=="`d'"
local new = r(table)[1,1]
di `new'
di `base'/`new'
local scalar= abs(1-`base'/`new')
di `scalar'
local diff_n_fish= `base'-`new'
di `diff_n_fish'
replace bsb_harv_flag=1 if my_dom_id_string=="`d'" & `scalar'>.025 & `scalar' !=.
replace bsb_harv_diff = `diff_n_fish' if my_dom_id_string=="`d'" & `scalar'>.025 & `scalar' !=.
*sf
*total catch
svyset psu_id [pweight= wp_int] , strata(strat_id) singleunit(certainty)
svy: total sf_tot_cat if my_dom_id_string=="`d'"
local base = r(table)[1,1]
di `base'
svyset psu_id [pweight= new_wp_int], strata(strat_id) singleunit(certainty)
svy: total tot_cat_sf if first==1 & my_dom_id_string=="`d'"
local new = r(table)[1,1]
di `new'
di `base'/`new'
local scalar= abs(1-`base'/`new')
di `scalar'
local diff_n_fish= `base'-`new'
di `diff_n_fish'
replace sf_flag=1 if my_dom_id_string=="`d'" & `scalar'>.025 & `scalar' !=.
replace sf_diff = `diff_n_fish' if my_dom_id_string=="`d'" & `scalar'>.025 & `scalar' !=.
*harvest
svyset psu_id [pweight= wp_int] , strata(strat_id) singleunit(certainty)
svy: total sf_landing if my_dom_id_string=="`d'"
local base = r(table)[1,1]
di `base'
svyset psu_id [pweight= new_wp_int], strata(strat_id) singleunit(certainty)
svy: total landing_sf if first==1 & my_dom_id_string=="`d'"
local new = r(table)[1,1]
di `new'
di `base'/`new'
local scalar= abs(1-`base'/`new')
di `scalar'
local diff_n_fish= `base'-`new'
di `diff_n_fish'
replace sf_harv_flag=1 if my_dom_id_string=="`d'" & `scalar'>.025 & `scalar' !=.
replace sf_harv_diff = `diff_n_fish' if my_dom_id_string=="`d'" & `scalar'>.025 & `scalar' !=.
}
/*
svyset psu_id [pweight= wp_int] , strata(strat_id) singleunit(certainty)
svy: total sf_landing if state=="NJ" & mode1=="fh"
svyset psu_id [pweight= new_wp_int], strata(strat_id) singleunit(certainty)
svy: total landing_sf if first==1 & state=="NJ" & mode1=="fh"
svyset psu_id [pweight= wp_int] , strata(strat_id) singleunit(certainty)
svy: total sf_tot_cat if state=="NJ" & mode1=="fh"
svyset psu_id [pweight= new_wp_int], strata(strat_id) singleunit(certainty)
svy: total tot_cat_sf if first==1 & state=="NJ" & mode1=="fh"
svyset psu_id [pweight= new_wp_int], strata(strat_id) singleunit(certainty)
svy: total landing_bsb if first==1 & state=="NJ" & mode1=="fh"
svyset psu_id [pweight= wp_int] , strata(strat_id) singleunit(certainty)
svy: total bsb_landing if state=="NJ" & mode1=="fh" & wv2=="3"
svyset psu_id [pweight= new_wp_int], strata(strat_id) singleunit(certainty)
svy: total landing_bsb if first==1 & state=="NJ" & mode1=="fh" & wv2=="3"
svyset psu_id [pweight= wp_int] , strata(strat_id) singleunit(certainty)
svy: total sf_tot_cat if state=="NJ" & mode1=="fh"
svyset psu_id [pweight= new_wp_int], strata(strat_id) singleunit(certainty)
svy: total tot_cat_sf if first==1 & state=="NJ" & mode1=="fh"
*/
destring month, replace
keep if first==1
*Build a data set of trip catches.
*The simulation model will draw 50 trips * 30 catch draws per state-mode-day.
*Catch per trip is estimated at the state-wave-mode level.
*I will create 10,000 draws of catch per trip for each state-mode-month combination.
*Then, in the calibration model, we will randomly select a set of 50 * 30 catch draws for each state-mode-day
*The following code tries to account for rounding error.
tempfile base0
save `base0', replace
global catchez
levelsof my_dom_id_string if common_dom=="SF" /*& state="NJ"*/, local(doms)
foreach d of local doms{
u `base0', clear
keep if my_dom_id_string=="`d'"
keep my_dom_id_string state mode1 month wv2 tot_cat_sf tot_cat_bsb tot_cat_scup new_wp_int common_dom *_diff landing_scup landing_bsb landing_sf release_scup release_bsb release_sf
drop month
gen month=.
expand 2, gen(dup)
replace month=1 if wv2=="1" & dup==0
replace month=2 if wv2=="1" & dup==1
replace month=3 if wv2=="2" & dup==0
replace month=4 if wv2=="2" & dup==1
replace month=5 if wv2=="3" & dup==0
replace month=6 if wv2=="3" & dup==1
replace month=7 if wv2=="4" & dup==0
replace month=8 if wv2=="4" & dup==1
replace month=9 if wv2=="5" & dup==0
replace month=10 if wv2=="5" & dup==1
replace month=11 if wv2=="6" & dup==0
replace month=12 if wv2=="6" & dup==1
drop dup
replace scup_diff=round(scup_diff/2)
replace bsb_diff=round(bsb_diff/2)
replace sf_diff=round(sf_diff/2)
replace scup_harv_diff=round(scup_harv_diff/2)
replace bsb_harv_diff=round(bsb_harv_diff/2)
replace sf_harv_diff=round(sf_harv_diff/2)
*merge m:1 month wv2 using "ndays.dta", keep(3) nogen
tempfile base
save `base', replace
levelsof month , local(mnths)
foreach m of local mnths{
u `base', clear
keep if month==`m'
expand new_wp_int
gen id=_n
*scup tot_cat adjustments
su scup_diff
local flags=`r(N)'
if `flags'!=0 {
su scup_diff
return list
local nsamp=`r(mean)'
if `nsamp'>=0 {
preserve
sample `nsamp', count
gen tot_cat_scup_new=tot_cat_scup+1
keep id tot_cat_scup_new
tempfile adj_scup
save `adj_scup', replace
restore
merge 1:1 id using `adj_scup'
replace tot_cat_scup = tot_cat_scup_new if _merge==3
drop _merge
}
else{
}
if `nsamp'<0 {
preserve
keep if tot_cat_scup>0
local nsamp_neg=abs(`nsamp')
sample `nsamp_neg', count
gen tot_cat_scup_new=tot_cat_scup-1
keep id tot_cat_scup_new
tempfile adj_scup
save `adj_scup', replace
restore
merge 1:1 id using `adj_scup'
replace tot_cat_scup = tot_cat_scup_new if _merge==3
drop _merge
}
else{
}
}
else{
}
*scup harv adjustments
su scup_harv_diff
local flags=`r(N)'
if `flags'!=0 {
su scup_harv_diff
return list
local nsamp=`r(mean)'
if `nsamp'>=0 {
preserve
sample `nsamp', count
gen landing_scup_new=landing_scup+1
keep id landing_scup_new
tempfile adj_scup
save `adj_scup', replace
restore
merge 1:1 id using `adj_scup'
replace landing_scup = landing_scup_new if _merge==3
drop _merge
}
else{
}
if `nsamp'<0 {
preserve
keep if landing_scup>0
local nsamp_neg=abs(`nsamp')
sample `nsamp_neg', count
gen landing_scup_new=landing_scup-1
keep id landing_scup_new
tempfile adj_scup
save `adj_scup', replace
restore
merge 1:1 id using `adj_scup'
replace landing_scup = landing_scup_new if _merge==3
drop _merge
}
else{
}
}
else{
}
*bsb tot_cat adjustments
su bsb_diff
return list
local flags=`r(N)'
if `flags'!=0 {
su bsb_diff
return list
local nsamp=`r(mean)'
if `nsamp'>=0 {
preserve
sample `nsamp', count
gen tot_cat_bsb_new=tot_cat_bsb+1
keep id tot_cat_bsb_new
tempfile adj_bsb
save `adj_bsb', replace
restore
merge 1:1 id using `adj_bsb'
replace tot_cat_bsb = tot_cat_bsb_new if _merge==3
drop _merge
}
else{
}
if `nsamp'<0 {
preserve
keep if tot_cat_bsb>0
local nsamp_neg=abs(`nsamp')
sample `nsamp_neg', count
gen tot_cat_bsb_new=tot_cat_bsb-1
keep id tot_cat_bsb_new
tempfile adj_bsb
save `adj_bsb', replace
restore
merge 1:1 id using `adj_bsb'
replace tot_cat_bsb = tot_cat_bsb_new if _merge==3
drop _merge
}
else{
}
}
else{
}
*bsb harv adjustments
su bsb_harv_diff
return list
local flags=`r(N)'
if `flags'!=0 {
su bsb_harv_diff
return list
local nsamp=`r(mean)'
if `nsamp'>=0 {
preserve
sample `nsamp', count
gen landing_bsb_new=landing_bsb+1
keep id landing_bsb_new
tempfile adj_bsb
save `adj_bsb', replace
restore
merge 1:1 id using `adj_bsb'
replace landing_bsb = landing_bsb_new if _merge==3
drop _merge
}
else{
}
if `nsamp'<0 {
preserve
keep if landing_bsb>0
local nsamp_neg=abs(`nsamp')
sample `nsamp_neg', count
gen landing_bsb_new=landing_bsb-1
keep id landing_bsb_new
tempfile adj_bsb
save `adj_bsb', replace
restore
merge 1:1 id using `adj_bsb'
replace landing_bsb = landing_bsb_new if _merge==3
drop _merge
}
else{
}
}
else{
}
*sf tot_cat adjustments
su sf_diff
return list
local flags=`r(N)'
if `flags'!=0 {
su sf_diff
return list
local nsamp=`r(mean)'
if `nsamp'>=0 {
preserve
sample `nsamp', count
gen tot_cat_sf_new=tot_cat_sf+1
keep id tot_cat_sf_new
tempfile adj_sf
save `adj_sf', replace
restore
merge 1:1 id using `adj_sf'
replace tot_cat_sf = tot_cat_sf_new if _merge==3
drop _merge
}
else{
}
if `nsamp'<0 {
preserve
keep if tot_cat_sf>0
local nsamp_neg=abs(`nsamp')
sample `nsamp_neg', count
gen tot_cat_sf_new=tot_cat_sf-1
keep id tot_cat_sf_new
tempfile adj_sf
save `adj_sf', replace
restore
merge 1:1 id using `adj_sf'
replace tot_cat_sf = tot_cat_sf_new if _merge==3
drop _merge
}
else{
}
}
else{
}
*sf harv adjustments
su sf_harv_diff
return list
local flags=`r(N)'
di `flags'
if `flags'!=0 {
su sf_harv_diff
return list
local nsamp=`r(mean)'
di `nsamp'
if `nsamp'>=0 {
preserve
sample `nsamp', count
gen landing_sf_new= landing_sf+1
keep id landing_sf_new
tempfile adj_sf
save `adj_sf', replace
restore
merge 1:1 id using `adj_sf'
replace landing_sf = landing_sf_new if _merge==3
drop _merge
}
else{
}
if `nsamp'<0 {
preserve
keep if landing_sf>0
local nsamp_neg=abs(`nsamp')
sample `nsamp_neg', count
gen landing_sf_new= landing_sf-1
keep id landing_sf_new
tempfile adj_sf
save `adj_sf', replace
restore
merge 1:1 id using `adj_sf'
replace landing_sf = landing_sf_new if _merge==3
drop _merge
}
else{
}
}
else{
}
count
local num=`r(N)'
if `num'<=20000{
local expand = round(20000/`num')+2
expand `expand'
sample 20000, count
tempfile catchez`d'`m'
save `catchez`d'`m'', replace
global catchez "$catchez "`catchez`d'`m''" "
}
else{
sample 20000, count
tempfile catchez`d'`m'
save `catchez`d'`m'', replace
global catchez "$catchez "`catchez`d'`m''" "
}
}
}
dsconcat $catchez
drop my_dom_id_string common_dom
**Estimate mean catch and harvest per trip
*gen scup_rel_check = tot_cat_scup-landing_scup
*browse tot_cat_scup landing_scup scup_rel_check release_scup if scup_rel_check!=0
*drop new_wp