|
|
| program define reghdfe
|
|
|
|
|
| cap syntax, store_alphas
|
| if (!c(rc)) {
|
| Store_Alphas
|
| exit
|
| }
|
|
|
|
|
| cap syntax, shrug
|
| if (!c(rc)) {
|
| di as text _n `" {browse "https://www.theawl.com/2014/05/the-life-and-times-of-%C2%AF_%E3%83%84_%C2%AF/":¯\_(ツ)_/¯}"'
|
| exit
|
| }
|
|
|
|
|
|
|
| cap syntax, worker [*]
|
| if (!c(rc)) {
|
| ParallelWorker, `options'
|
| exit
|
| }
|
|
|
|
|
| cap syntax anything(everything) [fw aw pw/], [*] VERSION(integer) [noWARN]
|
| if !c(rc) {
|
|
|
| _assert inlist(`version', 3, 5)
|
|
|
|
|
|
|
| if ("`warn'" != "nowarn") di as error "(running historical version of reghdfe: `version')"
|
| if ("`weight'"!="") local weightexp [`weight'=`exp']
|
|
|
| if (`version' == 3) {
|
| reghdfe3 `anything' `weightexp', `options'
|
| }
|
| else {
|
| reghdfe5 `anything' `weightexp', `options'
|
| }
|
| exit
|
| }
|
|
|
|
|
|
|
| if replay() {
|
| Replay `0'
|
| exit
|
| }
|
|
|
|
|
| loc keep_mata 0
|
| Cleanup 0 `keep_mata'
|
| qui which ftools
|
| ms_get_version ftools, min_version("2.46.0")
|
| cap noi Estimate `0'
|
| Cleanup `c(rc)' `keep_mata'
|
| end
|
|
|
|
|
| program Cleanup
|
|
|
| args rc keep_mata
|
|
|
|
|
| loc cleanup_folder = !`keep_mata' & ("$LAST_PARALLEL_DIR"!="")
|
| if (`cleanup_folder') cap mata: unlink_folder(HDFE.parallel_dir, 0)
|
| global LAST_PARALLEL_DIR
|
| global pids
|
|
|
|
|
| if (!`keep_mata') cap mata: mata drop HDFE
|
| cap mata: mata drop hdfe_*
|
| cap drop __temp_reghdfe_resid__
|
|
|
| if (`rc') exit `rc'
|
| end
|
|
|
|
|
| program Replay, rclass
|
| syntax [, * noHEADer noTABLE noFOOTnote]
|
| if (`"`e(cmd)'"' != "reghdfe") error 301
|
| _get_diopts options, `options'
|
|
|
| if ("`header'" == "") reghdfe_header
|
| if ("`header'" == "" & "`table'" == "") di ""
|
| if ("`table'" == "") _coef_table, `options'
|
| return add
|
| if ("`footnote'" == "") ReghdfeFootnote
|
| end
|
|
|
|
|
| program Estimate, eclass
|
|
|
|
|
|
|
|
|
|
|
|
|
| #delimit ;
|
| syntax varlist(fv ts numeric) [if] [in] [fw aw pw/] [ ,
|
|
|
|
|
| Absorb(string)
|
|
|
|
|
| Group_id(varname numeric)
|
| Individual_id(varname numeric)
|
| AGgregation(string)
|
|
|
|
|
| VCE(string) CLuster(string)
|
| RESiduals(name) RESiduals2
|
|
|
|
|
| DOFadjustments(string)
|
| GROUPVar(name)
|
|
|
|
|
| TEChnique(string)
|
| TOLerance(real 1e-8)
|
| ITERATE(real 16000)
|
|
|
|
|
| TRAnsform(string)
|
| ACCELeration(string)
|
|
|
|
|
| PREConditioner(string)
|
|
|
|
|
| PRUNE
|
|
|
|
|
| NOSAMPle
|
| COMPACT
|
| POOLsize(integer 10)
|
|
|
|
|
| PARallel(string asis)
|
|
|
|
|
| noHEader noTABle noFOOTnote
|
|
|
|
|
| Verbose(integer 0) noWARN
|
| TIMEit
|
|
|
|
|
| KEEPSINgletons
|
|
|
|
|
| noPARTIALout
|
| varlist_is_touse
|
| noREGress
|
| KEEPMATA
|
| FASTREGress
|
|
|
|
|
| noCONstant
|
| noAbsorb2
|
|
|
| |
| |
| |
| |
|
|
| ] [*]
|
| ;
|
| #delimit cr
|
|
|
|
|
|
|
|
|
|
|
|
|
| loc timeit = ("`timeit'"!="")
|
| if (`timeit') timer on 20
|
|
|
| if (`verbose' >= 2) di _n `"{txt}{bf:[CMD]} {inp}reghdfe `0'"'
|
|
|
| cap drop __hdfe*
|
|
|
| if (`verbose' > 0) di as text "{title:Parsing and validating options:}" _n
|
|
|
|
|
| _get_diopts diopts options, `options'
|
|
|
|
|
| loc drop_singletons = ("`keepsingletons'" == "")
|
| loc compact = ("`compact'" != "")
|
| loc has_standard_fe = (`"`absorb'"' != "")
|
| loc report_constant = "`constant'" != "noconstant"
|
| loc has_teams = (`"`group_id'"' != "")
|
| loc has_individual_fe = (`"`individual_id'"' != "")
|
|
|
|
|
| loc stop_before_partial_out = ("`partialout'" == "nopartialout")
|
| loc stop_before_regression = ("`regress'" == "noregress")
|
| loc fast_regression = ("`fastregress'" == "fastregress")
|
|
|
| if (`has_individual_fe') _assert `has_teams', msg("cannot set the individual() identifiers without the group() identifiers") rc(198)
|
|
|
|
|
| if ("`technique'" == "") loc technique = cond("`individual_id'"=="", "map", "lsmr")
|
| if ("`transform'" == "") loc transform "symmetric_kaczmarz"
|
| if ("`acceleration'" == "") loc acceleration "conjugate_gradient"
|
| if ("`preconditioner'" == "") loc preconditioner "block_diagonal"
|
| if (`poolsize' == 0) loc poolsize = .
|
|
|
|
|
| if (`verbose'>-1 & "`keepsingletons'"!="" & "`warn'" != "nowarn") {
|
| loc url "http://scorreia.com/reghdfe/nested_within_cluster.pdf"
|
| loc msg "WARNING: Singleton observations not dropped; statistical significance is biased"
|
| di as error `"`msg' {browse "`url'":(link)}"'
|
| }
|
|
|
|
|
| if ("`cluster'"!="") {
|
| _assert ("`vce'"==""), msg("only one of cluster() and vce() can be specified") rc(198)
|
| loc vce cluster `cluster'
|
| }
|
|
|
|
|
| if ("`aggregation'" == "") loc aggregation mean
|
| if ("`aggregation'" == "average" | "`aggregation'" == "avg") loc aggregation mean
|
| _assert inlist("`aggregation'", "mean", "sum")
|
| loc function_individual "`aggregation'"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| if (`verbose' > 0) di as text "# Parsing varlist: {res}`varlist'" _c
|
| ms_parse_varlist `varlist'
|
| if (`verbose' > 0) return list
|
| loc depvar `r(depvar)'
|
| loc indepvars `r(indepvars)'
|
| loc fe_format "`r(fe_format)'"
|
| loc basevars `r(basevars)'
|
|
|
|
|
| if ("`weight'"!="") unab exp : `exp', min(1) max(1)
|
|
|
|
|
| if (`verbose' > 0) di as text _n "# Parsing vce({res}`vce'{txt})" _c
|
| ms_parse_vce, vce(`vce') weighttype(`weight')
|
| if (`verbose' > 0) sreturn list
|
| loc vcetype `s(vcetype)'
|
| loc clustervars `s(clustervars)'
|
| loc base_clustervars `s(base_clustervars)'
|
| loc num_clusters = `s(num_clusters)'
|
| confirm variable `base_clustervars', exact
|
|
|
| if (`stop_before_partial_out' & "`varlist_is_touse'" != "") {
|
|
|
| loc touse `varlist'
|
| loc varlist
|
| markout `touse' `base_clustervars' `group_id' `individual_id'
|
| }
|
| else {
|
|
|
| loc varlist `depvar' `indepvars' `base_clustervars' `group_id' `individual_id'
|
| marksample touse, strok
|
| la var `touse' "[touse]"
|
| }
|
|
|
| if (`stop_before_partial_out') loc varlist
|
|
|
|
|
| loc valid_techniques map cg lsmr lsqr
|
| _assert (`: list technique in valid_techniques'), msg("invalid technique: `technique'")
|
|
|
|
|
| loc transform = lower("`transform'")
|
| loc valid_transforms cimmino kaczmarz symmetric_kaczmarz rand_kaczmarz
|
| foreach x of local valid_transforms {
|
| if (strpos("`x'", "`transform'")==1) loc transform `x'
|
| }
|
| _assert (`: list transform in valid_transforms'), msg("invalid transform: `transform'")
|
|
|
|
|
| loc acceleration = lower("`acceleration'")
|
| if ("`acceleration'"=="cg") loc acceleration conjugate_gradient
|
| if ("`acceleration'"=="sd") loc acceleration steepest_descent
|
| if ("`acceleration'"=="off") loc acceleration none
|
| loc valid_accelerations conjugate_gradient steepest_descent aitken none
|
| foreach x of local valid_accelerations {
|
| if (strpos("`x'", "`acceleration'")==1) loc acceleration `x'
|
| }
|
| _assert (`: list acceleration in valid_accelerations'), msg("invalid acceleration: `acceleration'")
|
|
|
|
|
| loc preconditioner = lower("`preconditioner'")
|
| if ("`preconditioner'"=="off") loc preconditioner none
|
| loc valid_preconditioners none diagonal block_diagonal
|
| foreach x of local valid_preconditioners {
|
| if (strpos("`x'", "`preconditioner'")==1) loc preconditioner `x'
|
| }
|
| _assert (`: list preconditioner in valid_preconditioners'), msg("invalid preconditioner: `preconditioner'")
|
|
|
|
|
|
|
| if (`verbose' > 0) di as text _n `"# Parsing dof({res}`dofadjustments'{txt})"' _c
|
| ParseDOF, `dofadjustments'
|
| loc dofadjustments `s(dofadjustments)'
|
| if (`verbose' > 0) sreturn list
|
|
|
|
|
| opts_exclusive "`residuals' `residuals2'" residuals
|
| if ("`residuals2'" != "") {
|
| cap drop _reghdfe_resid
|
| loc residuals _reghdfe_resid
|
| }
|
| else if ("`residuals'"!="") {
|
| conf new var `residuals'
|
| }
|
|
|
|
|
| if (`"`parallel'"' != "") {
|
| if (`verbose' > 0) di as text _n `"# Parsing parallel options: {inp}`parallel'"' _c
|
| ParseParallel `parallel'
|
| if (`verbose' > 0) sreturn list
|
| loc parallel_maxproc `s(parallel_maxproc)'
|
| loc parallel_dir `"`s(parallel_dir)'"'
|
| loc parallel_force `s(parallel_force)'
|
| loc parallel_opts `"`s(parallel_opts)'"'
|
| }
|
| else {
|
| loc parallel_maxproc 0
|
| loc parallel_force 0
|
| }
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| if (`has_teams') {
|
| tempvar indiv_tousevar
|
| ValidateGroups `basevars' `base_clustervars' `exp', group_id(`group_id') touse(`touse') indivtouse(`indiv_tousevar') individual(`individual_id')
|
| _assert ("`weight_type'"=="fweight") + ("`indiv_tousevar'" != "") < 2, msg("fweights are incompatible with individual ids as there cannot be two observations for a given group-individual touple")
|
| }
|
|
|
|
|
|
|
| mata: HDFE = FixedEffects()
|
|
|
| if (`verbose' > 0) di as text _n `"# Passing main options to Mata"' _n
|
|
|
|
|
| loc absvars `"`absorb'"'
|
| loc tousevar `"`touse'"'
|
| loc weight_type `"`weight'"'
|
| loc weight_var `"`exp'"'
|
| loc optim_options absvars tousevar weight_type weight_var technique transform acceleration preconditioner parallel_dir parallel_opts
|
| if (`has_teams') loc optim_options `optim_options' group_id individual_id indiv_tousevar function_individual
|
| foreach opt of local optim_options {
|
| if (`verbose' > 0) di as text `" - HDFE.`opt' = {res}`"``opt''"' "'
|
| mata: HDFE.`opt' = `"``opt''"'
|
| }
|
|
|
|
|
| loc maxiter = `iterate'
|
| loc optim_options drop_singletons tolerance maxiter compact poolsize verbose parallel_maxproc parallel_force timeit
|
| foreach opt of local optim_options {
|
| if (`verbose' > 0) di as text `" - HDFE.`opt' = {res}``opt''"'
|
| mata: HDFE.`opt' = ``opt''
|
| }
|
|
|
| if (`verbose' > 0) di as text _n `"# Parsing absorb({res}`absorb'{txt}) and initializing FixedEffects() object"'
|
| if (`timeit') timer on 21
|
| mata: HDFE.init()
|
| if (`timeit') timer off 21
|
|
|
|
|
| mata: add_undocumented_options("HDFE", `"`options'"', `verbose')
|
|
|
|
|
| if (`compact') {
|
| loc panelvar "`_dta[_TSpanel]'"
|
| loc timevar "`_dta[_TStvar]'"
|
|
|
| cap conf var `panelvar', exact
|
| if (c(rc)) loc panelvar
|
| mata: HDFE.panelvar = "`panelvar'"
|
|
|
| cap conf var `timevar', exact
|
| if (c(rc)) loc timevar
|
| mata: HDFE.timevar = "`timevar'"
|
|
|
| if (`verbose' > 0) di as text "## Preserving dataset"
|
| preserve
|
| novarabbrev keep `basevars' `base_clustervars' `panelvar' `timevar' `touse'
|
| }
|
|
|
|
|
| mata: HDFE.vcetype = "`vcetype'"
|
| mata: HDFE.num_clusters = `num_clusters'
|
| mata: HDFE.clustervars = tokens("`clustervars'")
|
| mata: HDFE.base_clustervars = tokens("`base_clustervars'")
|
|
|
|
|
| if (`timeit') timer on 22
|
| mata: estimate_dof(HDFE, tokens("`dofadjustments'"), "`groupvar'")
|
| if (`timeit') timer off 22
|
|
|
| if (`stop_before_partial_out') {
|
| if (`verbose' > 0) di as text "{title:Stopping reghdfe without partialling out}" _n
|
| c_local keep_mata 1
|
| exit
|
| }
|
|
|
| if (`verbose' > 0) di as text "{title:Working on varlist: partialling out and regression}" _n
|
|
|
|
|
| if (`verbose' > 0) di as text "# Parsing and expanding indepvars: {res}`indepvars'" _c
|
| if (`timeit') timer on 23
|
| ms_expand_varlist `indepvars' if `touse'
|
| if (`timeit') timer off 23
|
| if (`verbose' > 0) return list
|
| loc indepvars "`r(varlist)'"
|
| loc fullindepvars "`r(fullvarlist)'"
|
| loc fullindepvars_bn "`r(fullvarlist_bn)'"
|
| loc not_omitted "`r(not_omitted)'"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| if (`timeit') timer on 24
|
| mata: HDFE.partial_out("`depvar' `indepvars'", 1, 1)
|
| if (`timeit') timer off 24
|
|
|
|
|
|
|
| if (`parallel_maxproc' > 0) {
|
| if (`timeit') timer on 27
|
| ParallelBoss
|
| if (`timeit') timer off 27
|
| }
|
|
|
| mata: HDFE.solution.depvar = "`depvar'"
|
| mata: HDFE.solution.indepvars = tokens("`indepvars'")
|
| mata: HDFE.solution.fullindepvars = tokens("`fullindepvars'")
|
| mata: HDFE.solution.fullindepvars_bn = tokens("`fullindepvars_bn'")
|
| mata: HDFE.solution.indepvar_status = !strtoreal(tokens("1 `not_omitted'"))
|
| mata: HDFE.solution.collinear_tol = min(( 1e-6 , HDFE.tolerance / 10))
|
| mata: HDFE.solution.check_collinear_with_fe(`verbose')
|
| mata: HDFE.solution.fast_regression = `fast_regression'
|
|
|
| if (`stop_before_regression') {
|
| if (`verbose' > 0) di as text "{title:Stopping reghdfe without running regression}" _n
|
| c_local keep_mata 1
|
| exit
|
| }
|
|
|
|
|
| mata: HDFE.solution.report_constant = HDFE.has_intercept & `report_constant'
|
| if ("`keepmata'" != "") mata: hdfe_data = HDFE.solution.data
|
| if ("`keepmata'" != "") mata: hdfe_tss = HDFE.solution.tss
|
| if (`timeit') timer on 25
|
| mata: reghdfe_solve_ols(HDFE, HDFE.solution, "vce_small")
|
| if (`timeit') timer off 25
|
| mata: HDFE.solution.cmdline = HDFE.solution.cmd + " " + st_local("0")
|
|
|
|
|
| if (`compact') {
|
| if (`verbose' > 0) di as text "## Restoring dataset"
|
| restore
|
| }
|
|
|
| if (`verbose' > 0) di as text "{title:Posting results to e() and displaying them}" _n
|
|
|
|
|
|
|
| tempname b V
|
| mata: HDFE.solution.expand_results("`b'", "`V'", HDFE.verbose)
|
|
|
|
|
| loc store_sample = ("`nosample'"=="")
|
| EreturnPost `touse' `b' `V' `store_sample'
|
|
|
|
|
|
|
| mata: st_local("save_any_fe", strofreal(HDFE.save_any_fe))
|
| if ("`residuals'" == "" & `save_any_fe') loc residuals "__temp_reghdfe_resid__"
|
| if ("`residuals'" != "") {
|
| if (`verbose' > 0) di as text "# Storing residuals in {res}`residuals'{txt}" _n
|
| mata: HDFE.save_variable("`residuals'", HDFE.solution.resid, "Residuals")
|
| mata: HDFE.solution.residuals_varname = "`residuals'"
|
| }
|
|
|
|
|
| mata: HDFE.solution.post()
|
| mata: HDFE.post_footnote()
|
|
|
|
|
| if (`timeit') timer on 28
|
| reghdfe, store_alphas
|
| if (`timeit') timer off 28
|
|
|
|
|
| Replay, `diopts' `header' `table' `footnote'
|
|
|
| if ("`keepmata'" != "") mata: swap(HDFE.solution.data, hdfe_data)
|
| if ("`keepmata'" != "") mata: swap(HDFE.solution.tss, hdfe_tss)
|
| if ("`keepmata'" != "") c_local keep_mata 1
|
|
|
| if (`timeit') {
|
| timer off 20
|
| ViewTimer, title("Top-level") percent range(20 29) legend(21 "HDFE" 22 "DoF" 23 "Expand factors/Lags" 24 "Partial out" 25 "Solve OLS" 27 "Parallel Boss" 28 "Store alphas")
|
| ViewTimer, title("Partial-out") percent range(40 49) legend(41 "Load data" 42 "Standardize/etc" 46 "MAP/LSMR" 47 "Data assign" 49 "Parallel Save")
|
| }
|
| end
|
|
|
|
|
| program define ParseDOF, sclass
|
| syntax, [ALL NONE] [FIRSTpair PAIRwise] [CLusters] [CONTinuous]
|
|
|
|
|
| if ("`none'`firstpair'`pairwise'`clusters'`continuous'" == "") local all "all"
|
|
|
| opts_exclusive "`all' `none' `firstpair' `pairwise'" dofadjustments
|
| opts_exclusive "`all' `none' `clusters'" dofadjustments
|
| opts_exclusive "`all' `none' `continuous'" dofadjustments
|
|
|
| local opts `pairwise' `firstpair' `clusters' `continuous'
|
| if ("`all'" != "") local opts pairwise clusters continuous
|
| sreturn loc dofadjustments "`opts'"
|
| end
|
|
|
|
|
| program define ValidateGroups, sortpreserve
|
| syntax varlist, Group_id(varname numeric) TOUSE(name) [INDIVIDUAL(string) INDIVTOUSE(name)]
|
|
|
|
|
|
|
|
|
|
|
| sort `touse' `group_id' `individual_id'
|
|
|
|
|
| foreach var of local varlist {
|
| loc msg "variable `var' is not constant within `group_id'"
|
| by `touse' `group_id': _assert2 `var' == `var'[1] if `touse', msg("`msg'") rc(498)
|
| }
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| if ("`individual_id'" != "") {
|
| loc msg "identifiers for group (`group_id') and individual (`individual_id') do not uniquely identify the observations'"
|
| by `touse' `group_id' `individual_id': _assert2 _n == 1 if `touse', msg("`msg'") rc(459)
|
| }
|
|
|
|
|
| rename `touse' `indivtouse'
|
| by `indivtouse' `group_id': gen byte `touse' = (_n == 1) & (`indivtouse')
|
| la var `touse' "[touse]"
|
| la var `indivtouse' "[touse_individual]"
|
|
|
| end
|
|
|
|
|
| program EreturnPost, eclass
|
| ereturn clear
|
| args touse b V store_sample
|
| mata: st_local("depvar", HDFE.solution.depvar)
|
| mata: st_local("indepvars", invtokens(HDFE.solution.fullindepvars))
|
| if (`store_sample') loc esample "esample(`touse')"
|
| if ("`indepvars'" != "") {
|
| matrix colnames `b' = `indepvars'
|
| matrix colnames `V' = `indepvars'
|
| matrix rownames `V' = `indepvars'
|
| _ms_findomitted `b' `V'
|
| ereturn post `b' `V', `esample' buildfvinfo depname(`depvar')
|
| }
|
| else {
|
| ereturn post, `esample' buildfvinfo depname(`depvar')
|
| }
|
| end
|
|
|
|
|
| program define UpdateTouseWithTag
|
| assert 0
|
| args touse tag group
|
| tempvar touse_update
|
| gegen byte `touse_update' = max(`tag'), by(`group_id')
|
|
|
| qui replace `touse' = 0 if `touse_update' == 0
|
|
|
| end
|
|
|
|
|
| program Store_Alphas, eclass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| mata: st_local("save_any_fe", strofreal(HDFE.save_any_fe))
|
| assert inlist(`save_any_fe', 0, 1)
|
| if (`save_any_fe') {
|
| _assert e(depvar) != "", msg("e(depvar) is empty")
|
| _assert e(resid) != "", msg("e(resid) is empty")
|
|
|
| fvrevar `e(depvar)', list
|
| confirm numeric var `e(resid)', exact
|
| tempvar d
|
| if (e(rank)) {
|
| qui _predict double `d' if e(sample), xb
|
| }
|
| else if (e(report_constant)) {
|
| gen double `d' = _b[_cons] if e(sample)
|
| }
|
| else {
|
| gen double `d' = 0 if e(sample)
|
| }
|
| qui replace `d' = `e(depvar)' - `d' - `e(resid)' if e(sample)
|
|
|
| mata: HDFE.store_alphas("`d'")
|
| drop `d'
|
|
|
|
|
| cap drop __temp_reghdfe_resid__
|
| if (!c(rc)) ereturn local resid
|
| }
|
| end
|
|
|
|
|
| program define ViewTimer, rclass
|
| syntax, range(numlist min=2 max=2 integer >0 <=100 ascending) LEGend(string asis) [Title(string)] [percent]
|
| loc show_percent = ("`percent'" != "")
|
|
|
|
|
| forval i = 1/100 {
|
| loc msg`i' "`i':"
|
| }
|
| while (`"`legend'"' != "") {
|
| gettoken key legend : legend
|
| gettoken val legend : legend
|
| loc msg`key' `"`val'"'
|
| }
|
|
|
|
|
| qui timer list
|
| gettoken start end : range
|
| loc index 0
|
|
|
|
|
| if (`show_percent') {
|
| loc total_time = r(t`start') / r(nt`start')
|
| loc ++start
|
| }
|
|
|
| forval i = `start'/`end' {
|
| loc t = r(t`i')
|
| if (!mi(`t')) {
|
| loc ++index
|
| loc t`index' = r(t`i')
|
| loc rownames `"`rownames' "`msg`i''" "'
|
| }
|
| }
|
| loc num_rows `index'
|
|
|
|
|
| tempname timer
|
|
|
| if (`show_percent') {
|
| loc sum_time 0
|
| matrix `timer' = J(`num_rows'+1, 2, .)
|
| forval i = 1/`num_rows' {
|
| loc sum_time = `sum_time' + `t`i''
|
| matrix `timer'[`i', 1] = `t`i''
|
| matrix `timer'[`i', 2] = 100 * `t`i'' / `total_time'
|
| }
|
| matrix `timer'[`num_rows'+1, 1] = `total_time' - `sum_time'
|
| matrix `timer'[`num_rows'+1, 2] = 100 * (`total_time' - `sum_time') / `total_time'
|
| matrix rownames `timer' = `rownames' "(Remainder)"
|
| matrix colnames `timer' = "Time" "(% Total)"
|
|
|
| di as text _n `"{bf: Timer results:} `title'"'
|
| loc spaces = (`: rowsof `timer'' - 1) * "&"
|
| matlist `timer', noblank cspec(& %25s | %10.2fc | %9.1f &) rspec(||`spaces'|) rowtitle(Step)
|
| }
|
| else {
|
| matrix `timer' = J(`num_rows', 1, .)
|
| forval i = 1/`num_rows' {
|
| matrix `timer'[`i', 1] = `t`i''
|
| }
|
| matrix rownames `timer' = `rownames'
|
| matrix colnames `timer' = "Time"
|
|
|
| di as text _n `"{bf: Timer results:} `title'"'
|
| loc spaces = (`: rowsof `timer'' - 1) * "&"
|
| matlist `timer', noblank cspec(& %25s | %10.2fc &) rspec(||`spaces'|) rowtitle(Step)
|
| }
|
|
|
| return matrix timer = `timer'
|
| end
|
|
|
| program _assert2, byable(onecall)
|
|
|
|
|
|
|
|
|
|
|
|
|
| syntax anything(everything) [ , msg(str) rc(str) ]
|
|
|
| if _by() {
|
| capture by `_byvars': assert `anything', fast
|
| }
|
| else {
|
| capture assert `anything', fast
|
| }
|
|
|
| local rcc = _rc
|
| if `rcc' {
|
| if `"`msg'"' != "" {
|
| dis as err `"`msg'"'
|
| }
|
| else {
|
| dis as err `"assert failed: `anything'"'
|
| }
|
|
|
| if "`rc'" != "" {
|
| exit `rc'
|
| }
|
| else {
|
| exit `rcc'
|
| }
|
| }
|
| end
|
|
|
|
|
| program define ReghdfeFootnote
|
| reghdfe_footnote
|
|
|
| end
|
|
|
|
|
| program define ParseParallel, sclass
|
|
|
|
|
| syntax anything(name=maxprocesses id="number of worker processes"),
|
| [MAXprocesses2(integer 0) ID(integer 0) TMP_path(string) FORCE] [*]
|
|
|
|
|
|
|
|
|
|
|
| _assert inrange(`maxprocesses', 0, 1000)
|
| if (`maxprocesses' == 1) & ("`force'" == "") {
|
| di as text "(ignoring parallel(1) as it's slower than parallel(0)"
|
| loc maxprocesses 0
|
| }
|
|
|
| if (`id' <= 0) {
|
| loc seed_time = mod(clock("$S_DATE $S_TIME", "DMYhms")/1000, 24*3600)
|
| loc seed_data = c(N) * c(k)
|
| loc seed_rand = runiformint(1, 1e9-1)
|
| loc seed_rand = runiformint(1, 1e9-1)
|
| loc id = mod((`seed_time' + `seed_data' + `seed_rand'), 1e9-1)
|
| assert inrange(`id', 1, 1e9-1)
|
| }
|
|
|
| if ("`tmp_path'" == "") loc tmp_path = c(tmpdir)
|
|
|
| loc last_char = substr("`tmp_path'", strlen("`tmp_path'"), 1)
|
| if (!inlist("`last_char'", "/", "\")) loc tmp_path = "`tmp_path'`c(dirsep)'"
|
| loc padded_caller_id = string(`id', "%09.0f")
|
| loc parallel_dir = "`tmp_path'PARALLEL_`padded_caller_id'"
|
|
|
| loc options `"maxproc(`maxprocesses') id(`id') tmp_path("`tmp_path'") `options' `force'"'
|
|
|
| loc force_settings = ("`force'" != "")
|
| sreturn clear
|
| sreturn loc parallel_force `force_settings'
|
| sreturn loc parallel_maxproc `maxprocesses'
|
| sreturn loc parallel_dir `"`parallel_dir'"'
|
| sreturn loc parallel_opts `"`options'"'
|
|
|
|
|
|
|
| loc 0 `", `options'"'
|
| syntax, [MAXprocesses(integer 0) COREs_per_process(integer 0) FORCE ID(integer 0)
|
| METHOD(string) STATA_path(string) TMP_path(string) PRograms(string) Verbose]
|
| end
|
|
|
|
|
| program define ParallelBoss
|
|
|
| mata: st_local("n", strofreal(HDFE.parallel_numproc))
|
| mata: st_local("opts", HDFE.parallel_opts)
|
| mata: st_local("parallel_dir", HDFE.parallel_dir)
|
| mata: st_local("verbose", strofreal(HDFE.verbose))
|
|
|
| if (`verbose' <= 0) loc logtable "nologtable"
|
| if (`verbose' > 0) loc verbose_string "verbose"
|
| if (`verbose' > 0) di as text "{title:Starting parallel processes:}" _n
|
|
|
| loc cmd `"parallel_map, val(1/`n') `verbose_string' `logtable' `opts': reghdfe, worker parallel_path("`parallel_dir'")"'
|
| if (`verbose' > 0) di as text `"command: {inp}`cmd'"'
|
| `cmd'
|
|
|
| mata: parallel_combine(HDFE)
|
| end
|
|
|
|
|
| program ParallelWorker
|
| syntax, parallel_path(string)
|
| _assert "${task_id}" != "", msg("global -task_id- cannot be missing")
|
| _assert (${task_id} == int(${task_id})) & inrange(${task_id}, 1, 1000), msg("global -task_id- must be an integer between 1 and 100")
|
|
|
| loc hdfe_object "`parallel_path'`c(dirsep)'data0.tmp"
|
| loc vars_object "`parallel_path'`c(dirsep)'data${task_id}.tmp"
|
| conf file "`hdfe_object'"
|
| conf file "`vars_object'"
|
|
|
| di as text "Files to load: `hdfe_object'"
|
| di as text "Files to load: `vars_object'"
|
|
|
| mata: worker_partial_out("`hdfe_object'", "`vars_object'")
|
| di as text "exiting worker thread"
|
| end
|
|
|
|
|
| include "reghdfe.mata", adopath
|
|
|
| exit
|
|
|