text
stringlengths 0
538k
|
---|
*! version 1.0.1 27Sep2016
*! version 1.0.2 05Jun2017 major changes in the code and how the whole routine works, added exponential and parameters options
*! authors: Gabriele Rovigatti, University of Chicago Booth, Chicago, IL & EIEF, Rome, Italy. mailto: gabriele.rovigatti@gmail.com
*! Vincenzo Mollisi, Bolzano University, Bolzano, Italy & Tor Vergata University, Rome, Italy. mailto: vincenzo.mollisi@gmail.com
/***************************************************************************
** Stata program for prodest Postestimation
**
** Programmed by: Gabriele Rovigatti
**************************************************************************/
cap program drop prodest_p
program define prodest_p, sortpreserve eclass
version 10.0
syntax [anything] [if] [in] [, ///
RESIDuals ///
EXPonential ///
PARameters ///
]
marksample touse // this is not e(sample)
tempvar esample
qui gen byte `esample' = e(sample)
loc varlist `anything'
loc mod = "`e(PFtype)'"
/* check for options in launching command */
if ( "`residuals'" == "" & "`exponential'" == "" & "`parameters'" == ""){
di as error "You must specify RESIDuals, EXPonential or PARameters"
exit 198
}
/* check for correct usage of options */
if ( ("`residuals'" != "" | "`exponential'" != "") & "`parameters'" != ""){
di as error "the 'parameters' option cannot be used with other options"
exit 198
}
if "`mod'" == "Cobb-Douglas"{ /* PART I: COBB-DOUGLAS */
if ("`residuals'" != "" | "`exponential'" != "") {
tempname beta
mat `beta' = e(b)
tempvar rhs
mat score double `rhs' = `beta'
loc lhs `e(depvar)'
if "`exponential'" != ""{
qui gen `varlist' = exp(`lhs' - `rhs') `if'
}
else{
qui gen `varlist' = `lhs' - `rhs' `if'
}
}
else{ /* 'parameters' with cobb-douglas PF yields the results' table */
di _coef_table, level($S_level)
}
}
else { /* PART II: TRANSLOG */
loc free = "`e(free)'"
loc state = "`e(state)'"
loc controls = "`e(controls)'"
loc transvars `free' `state' `controls'
loc translogNum: word count `transvars'
tempname beta
mat `beta' = e(b) // extract the estimated betas
loc n = 1 // regenerate the variables used in the routine in order to fit the values
foreach x of local transvars{
tempvar var_`n' betavar_`n'
qui g `betavar_`n'' = `beta'[1,`n'] * `x'
qui g `var_`n'' = `x'
loc fit `fit' -`betavar_`n''
loc ++n
}
forv i = 1/`translogNum'{
forv j = `i'/`translogNum'{ /* `i' */
tempvar var_`i'`j' betavar_`i'`j'
cap g `betavar_`i'`j'' = `beta'[1,`n'] * (`var_`i'' * `var_`j'')
cap g `var_`i'`j'' = (`var_`i'' * `var_`j'')
loc ++n
}
}
if "`exponential'" != "" {
qui g `varlist' = exp(`e(depvar)' `fit') `if' // here generate the predicted residuals -- exponential
}
else if "`residuals'" != ""{
qui g `varlist' = `e(depvar)' `fit' `if' // here generate the predicted residuals
}
else{ /* in case of 'parameters' option */
loc freenum: word count `free'
loc statenum: word count `state'
loc totnum: word count `free' `state'
forv i = 1/`totnum'{
forv j = 1/`totnum'{
if `i' != `j'{ /* generate the cross variables part only */
cap confirm variable `betavar_`i'`j''
if !_rc{
loc remainder `remainder' + (`betavar_`i'`j''/`var_`i'')
}
}
}
tempvar betafit_`i' // the parameter for translog is defined as beta_Wtranslog = beta_w + 2*beta_ww * W + beta_wx * X
qui gen `betafit_`i'' = `beta'[1,`i'] + 2*(`betavar_`i'`i''/`var_`i'') `remainder' // here we use the previously generated variables and weight them by the ith variable
qui su `betafit_`i'', meanonly
loc beta_`i': di %6.3f `r(mean)'
loc remainder ""
}
di _n _n
di as text "{hline 75}"
di as text "Translog elasticity estimates" _continue
di _col(49) "prodest postestimation"
di as text "{hline 75}"
di as text "Elasticity Parameter" _continue
di _col(49) "Value"
di as text "{hline 75}"
loc i = 1
foreach var of varlist `free' `state'{
di as text "beta_`var'" _continue
di _col(49) "`beta_`i''"
loc ++i
}
di as text "{hline 75}"
}
}
end
|
/**
Using the REDCap API with Stata
Luke Stevens, Murdoch Childrens Research Institute
04-Jan-2019
10. Read_Token_From_File.do
Do not save API tokens in scripts that may get seen by others.
Your script should read in your token from a private location.
*/
version 12
set more off
clear
* specify a text file where the token is stored as the only text of first line
local tokenfile "C:\Users\my.username\Documents\ThisProject\API_Token\token.txt"
* read in your token from token.txt file in working directory
import delimited "`tokenfile'", varnames(nonames)
local token=v1 in 1
local tmpcsvfile "temp.csv"
shell curl ///
--output `tmpcsvfile' ///
--form token=`token' ///
--form content=record ///
--form format=csv ///
--form type=flat ///
"https://redcap.mcri.edu.au/api/"
import delimited `tmpcsvfile', clear
rm `tmpcsvfile'
br
|
/*******************************************************************************
Download a dataset using the REDCap API
Download metadata for the variables and apply labels etc.
Note the following field types cannot have their values labelled because the label information is not incuded in the data dictionary:
- redcap_data_access_group
- text fields utilising the Bioportal ontology lookup
- dynamic sql
Luke Stevens 06-Mar-2020
*******************************************************************************/
cap program drop get_redcap_metadata
program get_redcap_metadata
version 16
args apiurl apitoken
if !regexm("`apiurl'", "^https://.+/api/$") {
display "API url appears invalid (`apiurl')"
exit
}
if !(regexm("`apitoken'", "^[A-F0-9]+$") & strlen("`apitoken'")==32) {
display "API token appears invalid (`apitoken')"
exit
}
* download metadata for dataset variables
quietly {
frame create redcapmetadata
frame change redcapmetadata
local tempcsv="adotempcsv.csv"
local content="metadata"
local outformat="csv"
local data "token=`apitoken'&content=`content'&format=`outformat'"
noisily display "Downloading REDCap data dictionary..."
shell curl --output "`tempcsv'" --data "`data'" "`apiurl'"
capture frame drop redcapmetadata
frame redcapmetadata: import delimited `tempcsv', bindquotes(strict) maxquotedrows(unlimited) stringcols(_all)
erase `tempcsv'
capture confirm variable field_name
if _rc!=0 | _N==0 {
display "Could not read data dictionary"
exit
}
* remove line breaks and other tricky characters from field labels
replace field_label = subinstr(field_label, char(10), " ", .) // LF
replace field_label = subinstr(field_label, char(13), " ", .) // CR
replace field_label = subinstr(field_label, char(34), "'", .) // "
replace select_choices_or_calculations = subinstr(select_choices_or_calculations, char(34), "'", .) // "
* apply metadata
frame change default
* define standard labels (end with two underscores to help avoid conflict with any existing)
label define yesno__ 1 "Yes" 0 "No"
label define truefalse__ 1 "True" 0 "False"
label define checkbox__ 1 "Checked" 0 "Unchecked"
label define complete__ 0 "Incomplete" 1 "Unverified" 2 "Complete"
foreach v of varlist _all {
frame change redcapmetadata
if regexm("`v'", "^.+___.+$") {
* checkbox column e.g. cb___opt1
local ddvar = substr("`v'", 1, strpos("`v'", "___")-1) // e.g. cb
local cbval = substr("`v'", strpos("`v'", "___")+3, .) // e.g. opt1
}
else {
local ddvar = "`v'"
local cbval = ""
}
count if field_name=="`ddvar'"
if r(N)>0 {
* v is present in data dictionary -> read the metadata
preserve
drop if field_name!="`ddvar'"
local lbl=field_label
local frm=form_name
local ftype=field_type
local valtype=text_validation_type_or_show_sli
local enum=select_choices_or_calculations
frame change default
noisily display "Variable `v': `frm' `ftype' `valtype'"
if "`ftype'"=="text" & substr("`valtype'", 1, 5)=="date_" {
* convert date variables from string to date_
tostring `v', replace
gen _temp_ = date(`v',"YMD"), after(`v')
drop `v'
rename _temp_ `v'
format `v' %dM_d,_CY
}
else if "`ftype'"=="text" & substr("`valtype'", 1, 9)=="datetime_" {
* convert date variables from string to date_
tostring `v', replace
gen double _temp_ = Clock(`v',"YMDhms"), after(`v')
drop `v'
rename _temp_ `v'
format `v' %tCMonth_dd,_CCYY_HH:MM:SS
}
else if "`ftype'"=="dropdown" | "`ftype'"=="radio" | "`ftype'"=="checkbox" {
* parse out choices and make label values
local lblvals ""
local skiplabel 0
tokenize "`enum'", parse("|")
while "`*'" != "" {
if "`1'"!="|" {
local choice "`1'"
local choiceval = substr("`choice'", 1, strpos("`choice'", ",")-1)
local choicelbl = strtrim(substr("`choice'", strpos("`choice'", ",")+1, .))
if "`ftype'"=="checkbox" {
if "`choiceval'"=="`cbval'"{
label values `v' checkbox__
local lbl "`lbl' `choicelbl'"
}
}
else {
if (regexm("`choiceval'", "^[0-9/-]+$")) {
label define `v'_ `choiceval' "`choicelbl'", add
}
else {
local skiplabel 1
noisily display "-Cannot label non-integer value `choiceval'"
}
}
}
macro shift // shift tokens `2'->`1', `3'->`2' etc.
}
if "`ftype'"=="checkbox" {
label values `v' checkbox__
}
else {
if (!`skiplabel') {
label values `v' `v'_
}
}
}
else if "`ftype'"=="yesno" {
label values `v' yesno__
}
else if "`ftype'"=="truefalse" {
label values `v' truefalse__
}
* add full label as note in case length >80 hence truncated
note `v': "`lbl'"
frame change redcapmetadata
restore
frame change default
}
else {
frame change default
if substr("`v'", -9, .)=="_complete" {
label values `v' complete__
}
noisily display "Variable `v': not in data dictionary"
local lbl "`ddvar'"
}
label variable `v' "`lbl'"
}
frame drop redcapmetadata
}
end |
/*******************************************************************************
Example of API File Export and Import
Luke Stevens
22-Jan-2021
*******************************************************************************/
version 16
clear
set more off
import delimited tokens.txt
local apisource=url in 1
local apidest=url in 2
local tokensource=token in 1
local tokendest=token in 2
local fieldsource=field_name in 1
local fielddest=field_name in 2
clear
di "'`apisource''"
di "'`apidest''"
di "'`tokensource''"
di "'`tokendest''"
tempname tmpname
local tempexportfile="`tmpname'.csv"
* download records with files from source project
shell curl ///
--output `tempexportfile' ///
--form token=`tokensource' ///
--form content=record ///
--form format=csv ///
--form type=flat ///
--form filterLogic="[`fieldsource']<>''" ///
`apisource'
import delimited `tempexportfile', clear
rm `tempexportfile'
* loop through obs and upload file to destination
local N = _N
local thisrec
local thisfile
forvalues i = 1/`N' {
local thisrec=record_id[`i']
local thisfile=`fieldsource'[`i']
local destrec=`thisrec'
if "`thisfile'"!="" {
* download file from source record
di "Record `thisrec': downloading file `thisfile'"
shell curl ///
--output "`thisfile'" ///
--form token=`tokensource' ///
--form content=file ///
--form action=export ///
--form record="`thisrec'" ///
--form field=`fieldsource' ///
--form returnFomat=json ///
`apisource'
* upload file to destination record
di "Uploading file to destination record `destrec'"
shell curl ///
--form token=`tokendest' ///
--form content=file ///
--form action=import ///
--form record="`destrec'" ///
--form field=`fielddest' ///
--form filename="`thisfile'" ///
--form file=@"`thisfile'" ///
`apidest'
rm `thisfile'
}
}
|
/**
Using the REDCap API with Stata
Luke Stevens, Murdoch Childrens Research Institute
20-Jun-2017
1. Simple_Export.do
A basic API call to download all data from your REDCap project to a CSV file.
*/
version 12
set more off
clear
local token "<insert your token here>"
local outfile "exported_data.csv"
shell c:\curl\curl.exe ///
--output `outfile' ///
--form token=`token' ///
--form content=record ///
--form format=csv ///
--form type=flat ///
"https://redcap.mcri.edu.au/api/"
import delimited `outfile'
br
|
/**
Using the REDCap API with Stata
Luke Stevens, Murdoch Childrens Research Institute
20-Jun-2017
2. Export_Fields_Instruments.do
Specify specific fields and instruments to download.
--form fields=varname // include varname in downloaded dataset
--form fields[]=varname1 // include varname1 ...
--form fields[]=varname2 // ...and varname2 in downloaded dataset
--form forms=formname // include all variables of formname in downloaded dataset
--form forms[]=formname1 // include all variables of formname1 ...
--form forms[]=formname2 // ...and all from formname2 in downloaded dataset
*/
version 12
set more off
clear
local token "<insert your token here>"
local outfile "exported_data.csv"
shell c:\curl\curl.exe ///
--output `outfile' ///
--form token=`token' ///
--form content=record ///
--form format=csv ///
--form type=flat ///
--form fields[]=record_id ///
--form fields[]=gender ///
--form forms=randomisation ///
"https://redcap.mcri.edu.au/api/"
import delimited `outfile'
br
|
/**
Using the REDCap API with Stata
Luke Stevens, Murdoch Childrens Research Institute
20-Jun-2017
3. Export_Filter.do
Specify an expression that will filter the records downloaded.
--form filterLogic="<expr>" // expression in REDCap syntax
*/
version 12
set more off
clear
local token "<insert your token here>"
local outfile "exported_data.csv"
shell c:\curl\curl.exe ///
--output `outfile' ///
--form token=`token' ///
--form content=record ///
--form format=csv ///
--form type=flat ///
--form filterLogic="[enrolment_arm_1][gender]='1'" ///
"https://redcap.mcri.edu.au/api/"
import delimited `outfile'
br
/**
Note this is equivalent to downloading all followed by
keep if redcap_event_name=="enrolment_arm_1" & gender==1
*/
|
/**
Using the REDCap API with Stata
Luke Stevens, Murdoch Childrens Research Institute
20-Jun-2017
4. Export_Report.do
Specify a REDCap report to download.
--form content=report
--form report_id=<id> // numeric report id from right-hand column of report list
--form longitudinal_reports=1 // can be used with "longitudinal reports" too (invalid variable names)
*/
version 12
set more off
clear
local token "<insert your token here>"
local report_id=<report id here>
local outfile "exported_data.csv"
shell c:\curl\curl.exe ///
--output `outfile' ///
--form token=`token' ///
--form content=report ///
--form format=csv ///
--form type=flat ///
--form report_id=`report_id' ///
"https://redcap.mcri.edu.au/api/"
import delimited `outfile'
br
|
/**
Using the REDCap API with Stata
Luke Stevens, Murdoch Childrens Research Institute
20-Jun-2017
5. Export_Metadata.do
Illustrating metadata (data dictionary) download - not just records.
--form fields=varname // download varname metadata (only)
--form fields[]=varname1 // download varname1 ...
--form fields[]=varname2 // ...and varname2 metadata
--form forms=formname // download all variables of formname
--form forms[]=formname1 // download all variables of formname1 ...
--form forms[]=formname2 // ...and all from formname2
*/
version 12
set more off
clear
local token "<insert your token here>"
local outfile "exported_metadata.csv"
shell c:\curl\curl.exe ///
--output `outfile' ///
--form token=`token' ///
--form content=metadata ///
--form format=csv ///
"https://redcap.mcri.edu.au/api/"
import delimited `outfile'
br
|
/**
Using the REDCap API with Stata
Luke Stevens, Murdoch Childrens Research Institute
20-Jun-2017
6. Import_Records.do
Illustrating data import - API is not just for export.
(This example toggles the gender of first record 1 <-> 2)
Ensure that you (your role) has "API Import" permission.
*/
version 12
set more off
clear
local token "<insert your token here>"
local outfile "exported_data.csv"
local fileforimport "data_for_import.csv"
shell c:\curl\curl.exe --output `outfile' --form token=`token' --form content=record --form format=csv --form type=flat --form fields[]=record_id --form fields[]=gender "https://redcap.mcri.edu.au/api/"
import delimited `outfile'
keep if _n==1
replace gender=1+(!(gender-1)) // toggle gender 1 <-> 2
export delimited using `fileforimport', nolabel replace
local cmd="c:\curl\curl.exe" ///
+ " --form token=`token'" ///
+ " --form content=record" ///
+ " --form format=csv" ///
+ " --form type=flat" ///
+ " --form data="+char(34)+"<`fileforimport'"+char(34) /// The < is critical! It causes curl to read the contents of the file, not just send the file name.
+ " https://redcap.mcri.edu.au/api/"
shell `cmd'
|
/**
Using the REDCap API with Stata
Luke Stevens, Murdoch Childrens Research Institute
20-Jun-2017
7. Build_Command_String.do
An example of building the curl command programmatically.
- Timestamp in downloaded file name
- Iterate a supplied list of fields
- Iterate a supplied list of evnts
- Iterate a supplied list of instruments
*/
version 12
set more off
clear
local token="<insert your token here>"
local content="record"
local format="csv"
local type="flat"
local data "token=`token'&content=`content'&format=`format'&type=`type'"
local curlpath="c:\curl\curl.exe"
local apiurl="https://redcap.mcri.edu.au/api/"
local dttm : display %td_CY-N-D date("$S_DATE", "DMY") "_$S_TIME"
local outfile = "redcap_api_export_"+trim(subinstr(subinstr("`dttm'","-","",.),":","",.))+".csv"
// Request selected fields
local param "fields"
local i 0
foreach listentry in ///
record_id /// // include record id field if you're not downloading the first form
dob ///
gender ///
{
local data "`data'&`param'[`i']=`listentry'"
local i=`i'+1
}
// Request selected events
local param "events"
local i=0
foreach listentry in ///
enrolment_arm_1 ///
randomisation_arm_1 ///
3month_questionnai_arm_1 ///
6month_visit_arm_1 ///
9month_questionnai_arm_1 ///
12month_visit_arm_1 ///
{
local data "`data'&`param'[`i']=`listentry'"
local i=`i'+1
}
// Request selected instruments
local param "forms"
local i=0
foreach listentry in ///
visit ///
questionnaire ///
{
local data "`data'&`param'[`i']=`listentry'"
local i=`i'+1
}
shell curl --output "`outfile'" --data "`data'" "`apiurl'"
import delimited using `outfile', varnames(1) clear
br
|
/**
Using the REDCap API with Stata
Luke Stevens, Murdoch Childrens Research Institute
20-Jun-2017
8. Debugging.do
How to troubleshoot your do-file and API calls:
- Badly formed curl command -> display and copy to command prompt
- API returns error -> review contents of outfile
*/
version 12
set more off
clear
local token "<insert your token here>"
local outfile "test_download.csv"
local cmd="c:\curl\curl.exe" ///
+ " --output `outfile'" ///
+ " --form token=`token'" ///
+ " --form content=record" ///
+ " --form format=csv" ///
+ " --form type=flat" /// note no space between this and url -> "no url"
+ "https://redcap.mcri.edu.au/api/"
display "`cmd'" // copy -> open command prompt -> right-click paste -> run
shell `cmd'
import delimited `outfile'
br
|
/**
Using the REDCap API with Stata
Luke Stevens, Murdoch Childrens Research Institute
10-Apr-2018
9. Delete_Records.do
An example of deleting multiple records.
Nb. The user must have the "Delete Records" permission enabled.
*/
version 12
set more off
clear
local curlpath="c:\curl\curl.exe"
local apiurl="https://redcap.mcri.edu.au/api/"
local tempfile "tempdata.csv"
local token "<insert your token here>"
*_______________________________________________________________________________
* first generate and import a few records that we can afterwards delete
** find the name of the record_id field
local cmd="`curlpath'" ///
+ " --form token=`token'" ///
+ " --form content=metadata" ///
+ " --form format=csv" ///
+ " --output `tempfile'" ///
+ " `apiurl'"
shell `cmd'
import delimited `tempfile'
local pkfield=field_name in 1
clear
** generate some test records and import
gen str11 `pkfield'=""
local recidstem="DELETETEST"
local numrecs=4
set obs `numrecs'
forvalues i=1/`numrecs' {
replace `pkfield'="`recidstem'"+string(`i') in `i'
}
export delimited using `tempfile', nolabel replace
local cmd="`curlpath'" ///
+ " --form token=`token'" ///
+ " --form content=record" ///
+ " --form format=csv" ///
+ " --form type=flat" ///
+ " --form data='<`tempfile'' `apiurl'"
shell `cmd'
shell rm `tempfile'
*_______________________________________________________________________________
* now build the delete command
local recordlist
forvalues i=1/`numrecs' {
local recordlist="`recordlist' --form records[]=`recidstem'"+string(`i')
}
local cmd="`curlpath'" ///
+ " --form token=`token'" ///
+ " --form content=record" ///
+ " --form action=delete" ///
+ " `recordlist' " ///
+ " `apiurl'"
shell `cmd'
* now check your project's Logging page...
*_______________________________________________________________________________
|
*! version 1.1
*! helpful if you want to put variable names in matrix row/colnames
program abbrev_all
version 11.0 //just a guess here
syntax , str_list(string asis) out_loc(string) [length(int 32)]
forval i=1/`:word count `str_list''{
local part = abbrev("`: word `i' of `str_list''",`length')
if `i'==1 local abbreved `""`part'""'
if `i'> 1 local abbreved `"`abbreved' "`part'""'
}
c_local `out_loc' `"`abbreved'"'
end
|
*! v1.0 Brian Quistorff <bquistorff@gmail.com>
*! Adds a fake variable. Useful for adding new rows to a table using esttab/estout
*Requires: Needs erepost
program add_fake_coeff_to_e, eclass
version 11.0
*Just a guess at the version
args cname cval
tempname eb eb2 eV eV2
mat `eb' = e(b)
local eb_names : colnames `eb'
mat `eb2' = `eb', `cval'
matrix colnames `eb2' = `eb_names' `cname'
*Need the dimensions of V to match b
mat `eV' = e(V)
local num_eb : word count `eb_names'
mat `eV2' = I(`=`num_eb'+1')
mat `eV2'[1,1] = `eV'
matrix colnames `eV2' = `eb_names' `cname'
matrix rownames `eV2' = `eb_names' `cname'
erepost b=`eb2' V=`eV2'
end
|
*! version 1.1.1 13feb2015 - use net_install to allow relative local paths. Brian Quistorff <bquistorff@gmail.com>
*! version 1.1.0 09jun2011
program adoupdate, rclass
version 9
syntax [anything(name=pkglist id="package list")] [, ///
ALL ///
DIR(string) ///
SSConly ///
UPDATE ///
VERBOSE ///
]
local upd = cond("`update'"=="", 0, 1)
local bos = cond("`ssconly'"=="", 0, 1)
local notall = cond("`all'"=="", 1, 0)
local noisily = cond("`verbose'"=="", 0, 1)
set more off
mata: pkg_chk_and_update("`dir'", "`pkglist'", ///
`upd', `bos', `notall', `noisily')
return local pkglist "`pkglist'"
end
/* ==================================================================== */
version 9
local CMDNAME adoupdate
local DEFAULT_DIR "PLUS"
local SJupfile `""http://www.stata-journal.com/software/filelist.php""'
local SJbasesrc `""http://www.stata-journal.com/software/""'
local STBupfile `""http://www.stata.com/stb/filelist.php""'
local STBbasesrc `""http://www.stata.com/stb/""'
local SSCupfile `""http://repec.org/docs/sscDD.php""'
local NEGRC_NORESPONSE (-677)
local NEGRC_NOHOST (-631)
local NEGRC_NOFILE (-601)
local boolean real scalar
local instpkg string colvector
local instlist pointer(`instpkg') colvector
local EL_relno 1
local EL_upsrc 2
local EL_uppkgname 3
local EL_FIRST 4
local updlist pointer(string colvector) colvector
local L_SJ 1
local L_STB 2
local L_SSC 3
local UPDLIST_DIM 3
local pkgcode real scalar
local PKG_OKAY 0
local PKG_NOLONGER 1
local PKG_NORESPONSE 2
local PKG_CANNOTOPEN 3
local PKG_UNKNOWN 4
local PKG_UPDATED 5
local pkgtype real scalar
local PT_INSTALLED 1
local PT_NET 2
mata:
/* -------------------------------------------------------------------- */
void pkg_chk_and_update(
string scalar dir, string scalar pkglist,
`boolean' update, `boolean' ssconly, `boolean' certainonly,
`boolean' noisily)
{
`instlist' trk
`updlist' upd
string scalar click
/* ------------------------------------------------------------ */
openingmsg()
upd = J(`UPDLIST_DIM', 1, NULL)
/* ------------------------------------------------------------ */
find_and_uninst_dups(dir, 1)
/* ------------------------------------------------------------ */
trk = read_statatrk(dir)
if (ssconly) trk = extract_ssc(trk)
if (pkglist!="") trk = extract_matches(trk, pkglist)
/* ------------------------------------------------------------ */
if (rows(trk)==0) {
printf("{txt}")
if ("pkglist"!="") {
printf(`"(no packages match "%s")\n"', pkglist)
}
else if (ssconly) {
printf("(no packages installed from SSC\n")
}
else {
printf("(no user-written packages installed)\n")
}
st_local("pkglist", "")
return
}
/* ------------------------------------------------------------ */
printf("\n")
printf("{txt}Checking status of ")
printf(pkglist=="" ? "installed packages...\n":
"specified packages...\n")
trk = extract_update_exists(trk, upd, certainonly)
/* ------------------------------------------------------------ */
printf("\n")
if (rows(trk)==0) {
printf("{txt}(no packages require updating)\n")
st_local("pkglist", "")
return
}
printf(update ? "{txt}Packages to be updated are...\n" :
"{txt}Packages that need to be updated are...\n"
)
printf("\n")
list_packages(trk)
set_local_pkglist(trk)
displayflush()
/* ------------------------------------------------------------ */
printf("\n")
if (!update) {
click = rebuildcmdwupdate(dir, pkglist, ssconly, certainonly)
printf("{txt}Recommendation: type\n")
printf(`" {stata `"%s"':%s}\n"', click, click)
return
}
/* ------------------------------------------------------------ */
printf("{txt}Installing updates...\n")
printf("\n")
update_packages(dir, trk, noisily)
/* ------------------------------------------------------------ */
printf("\n")
printf("{txt}Cleaning up...")
displayflush()
find_and_uninst_dups(dir, 0)
printf("{txt} Done\n")
displayflush()
}
void openingmsg()
{
printf("{txt}{p 0 7 2}\n")
printf("(note: {cmd:`CMDNAME'} updates user-written files;\n")
printf("type -{cmd:update}- to check for updates to official Stata)\n")
printf("{p_end}\n")
}
string scalar rebuildcmdwupdate(
string scalar dir, string scalar pkglist,
`boolean' ssconly, `boolean' certainonly)
{
string scalar cmd
cmd = "`CMDNAME'"
if (strtrim(pkglist)!="") cmd = cmd + " " + strtrim(pkglist)
cmd = cmd + ","
if (dir!="") cmd = cmd + " dir(" + dir + ")"
if (ssconly) cmd = cmd + " ssconly"
if (!certainonly) cmd = cmd + " all"
return(cmd + " update")
}
/* -------------------------------------------------------------------- */
`instlist' extract_ssc(`instlist' trk)
{
real scalar i
`instlist' b
pragma unset b
for (i=1; i<=rows(trk); i++) {
if (isssc(srcof(*trk[i]))) b = b \ trk[i]
}
return(b)
}
`instlist' extract_matches(`instlist' trk, string scalar pkglist)
{
real scalar i, j
`instlist' in
string rowvector pat
pragma unset in
pat = tokens(pkglist)
for (i=1; i<=rows(trk); i++) {
for (j=1; j<=cols(pat); j++) {
if (strmatch(pkgnameof(*trk[i]), pat[j])) {
in = in \ trk[i]
break
}
}
}
return(in)
}
/* -------------------------------------------------------------------- */
void update_packages(string scalar dir, `instlist' trk,
`boolean' noisily)
{
real scalar i
string scalar names
names = ""
for (i=1; i<=rows(trk); i++) {
if (netinstall(dir, *trk[i], noisily)) {
names = names + " " + pkgnameof(*trk[i])
}
}
st_local("pkgname", strtrim(names))
}
`boolean' netinstall(string scalar dir, `instpkg' pkg, `boolean' noisily)
{
string scalar uppkgname, pkgname, upsrc, cmd, cmd0
real scalar rc, relno
pkgname = pkgnameof(pkg)
uppkgname = uppkgnameof(pkg)
upsrc = upsrcof(pkg)
relno = relnoof(pkg)
printf("{txt}")
printf(relno<10 ? " " : (relno<100 ? " " : " "))
if (pkgname == uppkgname) {
printf("[%g] {res:%s}\n", relno, pkgname)
}
else {
printf("[%g] {res:%s} using {res:%s}\n",
relno, pkgname, uppkgname)
}
displayflush()
cmd0 = sprintf("net set ado %s", dir=="" ? "`DEFAULT_DIR'" : dir)
if (isssc(upsrc)) {
cmd = sprintf("ssc install %s, replace", uppkgname)
}
else {
cmd = sprintf("net_install %s, from(%s) replace force",
uppkgname, upsrc)
}
if (noisily) {
printf("\n")
printf("{txt}{hline}\n")
printf("{txt}-> . %s\n", cmd0)
if ((rc = _stata(cmd0))) exit(rc)
printf("{txt}-> . %s\n", cmd)
rc = _stata(cmd)
printf("{txt}{hline}\n")
}
else {
if ((rc = _stata(cmd0, 1))) {
if (rc==1) exit(1)
(void) _stata(cmd0)
exit(rc)
/*NOTREACHED*/
}
rc = _stata(cmd, 1)
}
if (rc) {
if (rc==1) exit(1)
printf("{p 8 8 2}\n")
printf("{txt}{res:%s} not updated; return code was %g{break}\n",
pkgname, rc)
printf("Try updating {res:%s} again later,\n", pkgname)
printf("type -{cmd:`CMDNAME' %s, update}-.\n", pkgname)
printf("If that still does not work, type\n")
printf("-{cmd:`CMDNAME' %s, update verbose}-.\n", pkgname)
printf("\n")
return(0)
}
return(1)
}
void set_local_pkglist(`instlist' trk)
{
real scalar i
string scalar list
if (rows(list)==0) {
st_local("pkglist", "")
return
}
list = pkgnameof(*trk[1])
for (i=2; i<=rows(trk); i++) list = list + " " + pkgnameof(*trk[i])
st_local("pkglist", list)
}
void list_packages(`instlist' trk)
{
real scalar i, n, relno
string scalar pkgname, pad
for (i=1; i<=rows(trk); i++) {
pkgname = pkgnameof(*trk[i])
if ((n = 12 - strlen(pkgname)) > 0) {
pad = "{bind:" + n*" " + "}"
}
else pad = ""
relno = relnoof(*trk[i])
if (relno<10) printf("{p 4 24 2}\n")
else if (relno<100) printf("{p 3 24 2}\n")
else printf("{p 2 24 2}\n")
printf("[%g] {res:%s}%s -- %s\n", relno, pkgname, pad,
titleof(*trk[i]))
printf("{p_end}\n")
}
}
`instlist' extract_update_exists(`instlist' trk, `updlist' upd,
`boolean' certainonly)
{
real scalar i
`pkgcode' status
`instlist' res
pragma unset res
status = J(rows(trk), 1, .)
for (i=1; i<=rows(trk); i++) {
status = pkg_update_status_noisily(*trk[i], upd)
if (status==`PKG_UPDATED' |
(certainonly==0 & status==`PKG_UNKNOWN')) {
res = res \ trk[i]
}
}
return(res)
}
`pkgcode' pkg_update_status_noisily(`instpkg' pkg, `updlist' upd)
{
real scalar relno
`pkgcode' res
printf("\n")
relno = relnoof(pkg)
if (relno<10) printf("{p 4 8 2}\n")
else if (relno<100) printf("{p 3 8 2}\n")
else printf("{p 2 8 2}\n")
printf("{txt}[%g] {res:%s} at %s:{break}\n",
relno, pkgnameof(pkg), srcof(pkg))
displayflush()
res = pkg_update_status(pkg, upd)
if (res==`PKG_OKAY') {
printf("installed package is up to date\n")
}
else if (res==`PKG_CANNOTOPEN') {
printf(
"server not responding or package is no longer available\n")
}
else if (res==`PKG_NOLONGER') {
printf("package no longer available\n")
}
else if (res==`PKG_NORESPONSE') {
printf("server not responding\n")
}
else if (res==`PKG_UPDATED') {
printf("{res:package has been updated on server}\n")
}
else {
printf(
"cannot tell because distribution dates not provided by source\n")
}
printf("{p_end}\n")
displayflush()
return(res)
}
`pkgcode' pkg_update_status(`instpkg' pkg, `updlist' upd)
{
string scalar src
src = srcof(pkg)
if (isssc(src)) return(pkg_update_status_ssc(pkg, upd))
if (isstatajournal(src)) return(pkg_update_status_sj( pkg, upd))
if (isstb(src)) return(pkg_update_status_stb(pkg, upd))
return(pkg_update_status_default(pkg))
}
`boolean' isstb(string scalar s)
{
string scalar sl
sl = strlower(s)
if (pstrcmp("http://www.stata.com/stb/", sl)) return(1)
return(0)
}
`boolean' isstatajournal(string scalar s)
{
string scalar sl
sl = strlower(s)
if (pstrcmp("http://www.stata-journal.com/software/", sl)) return(1)
if (pstrcmp("http://www.statajournal.com/software/", sl)) return(1)
if (pstrcmp("http://www.stata-journal.org/software/", sl)) return(1)
if (pstrcmp("http://www.statajournal.org/software/", sl)) return(1)
return(0)
}
`boolean' isssc(string scalar s)
{
string scalar sl
sl = strlower(s)
if (pstrcmp("http://fmwww.bc.edu/repec/", sl)) return(1)
return(0)
}
`boolean' pstrcmp(string scalar substr, string scalar fullstr)
{
return( (substr == substr(fullstr, 1, strlen(substr))) )
}
`pkgcode' pkg_update_status_ssc(`instpkg' pkg, `updlist' upd)
{
real scalar dinstalled, dnet
string scalar strdate
if (upd[`L_SSC']==NULL) {
upd[`L_SSC'] = &(loadsscupfile())
/*
testing of bsearch:
printf("\n")
i = rows(*upd[`L_SSC'])
i
(*upd[`L_SSC'])[1]
(*upd[`L_SSC'])[i]
bsearch_ssclist("_gclsort", *upd[`L_SSC'], 0)
bsearch_ssclist("ztg", *upd[`L_SSC'], 0)
*/
}
strdate = search_ssclist(pkgnameof(pkg), *upd[`L_SSC'])
if (strdate=="") return(`PKG_NOLONGER')
dinstalled = distributiondateof(pkg, `PT_INSTALLED')
dnet = edateofstr(strdate)
if (dinstalled<. & dnet<.) {
if (dinstalled<dnet) {
set_upinfoof(pkg, srcof(pkg), pkgnameof(pkg))
return(`PKG_UPDATED')
}
return(`PKG_OKAY')
}
if (dnet<. & dinstalled>=.) {
set_upinfoof(pkg, srcof(pkg), pkgnameof(pkg))
return(`PKG_UPDATED')
}
return(`PKG_UNKNOWN')
}
string scalar search_ssclist(string scalar pkgname, string colvector ssclist)
{
real scalar i
if (i = bsearch_ssclist(strlower(pkgname), ssclist, 0)) {
return(tokens(ssclist[i])[2])
}
return("")
}
/*
i = bsearch_ssclist(name, ssclist, i0)
Look for name in strictly ascending ssclist[] between i0 and
end-of-list. Specify i0=0 to search entire list.
*/
real scalar bsearch_ssclist(string scalar name,
string vector ssclist,
real scalar j0)
{
real scalar n, jl, ju, jm
jl = j0
ju = (n=length(ssclist)) + 1
while (ju-jl>1) {
jm = trunc((ju+jl)/2)
if ( name > (tokens(ssclist[jm])[1]) ) jl = jm
else ju = jm
}
if (jl<n) {
ju = jl
if ( name == (tokens(ssclist[++ju])[1]) ) return(ju)
}
return(0)
}
`pkgcode' pkg_update_status_default(`instpkg' pkg)
{
string colvector net
string scalar ffn
real scalar dinstalled, dnet
real scalar negrc
ffn = srcffnof(pkg)
pragma unset net
if ((negrc=read_pkg_file(ffn, net))) {
if (negrc==`NEGRC_NORESPONSE') return(`PKG_NORESPONSE')
if (negrc==`NEGRC_NOHOST') return(`PKG_NORESPONSE')
if (negrc==`NEGRC_NOFILE') return(`PKG_NOLONGER')
return(`PKG_CANNOTOPEN')
}
dinstalled = distributiondateof(pkg, `PT_INSTALLED')
dnet = distributiondateof(net, `PT_NET')
if (dinstalled<. & dnet<.) {
if (dinstalled<dnet) {
set_upinfoof(pkg, srcof(pkg),pkgnameof(pkg))
return(`PKG_UPDATED')
}
return(`PKG_OKAY')
}
if (dnet<. & dinstalled>=.) {
set_upinfoof(pkg, srcof(pkg), pkgnameof(pkg))
return(`PKG_UPDATED')
}
return(`PKG_UNKNOWN')
}
`pkgcode' pkg_update_status_stb(`instpkg' pkg, `updlist' upd)
{
real scalar idx
string scalar pkgname
if (pkg_update_status_sj(pkg, upd)==`PKG_UPDATED') {
return(`PKG_UPDATED')
}
pkgname = pkgnameof(pkg)
if (upd[`L_STB']==NULL) upd[`L_STB'] = &loadupfile(`STBupfile')
if (idx = findbestinlist(pkgname, *upd[`L_STB'])) {
set_upinfo_fromupd(pkg, upd, `L_STB', idx, `STBbasesrc')
return(`PKG_UPDATED')
}
return(`PKG_OKAY')
}
`pkgcode' pkg_update_status_sj(`instpkg' pkg, `updlist' upd)
{
real scalar idx
string scalar pkgname
pkgname = pkgnameof(pkg)
if (upd[`L_SJ']==NULL) upd[`L_SJ'] = &loadupfile(`SJupfile')
if (idx = findbestinlist(pkgname, *upd[`L_SJ'])) {
set_upinfo_fromupd(pkg, upd, `L_SJ', idx, `SJbasesrc')
return(`PKG_UPDATED')
}
return(`PKG_OKAY')
}
void set_upinfo_fromupd(`instpkg' pkg,
`updlist' upd, real scalar l, real scalar i,
string scalar basesrc)
{
string rowvector toks
toks = tokens((*upd[l])[i])
set_upinfoof(pkg, pathjoin(basesrc,toks[1]), toks[2])
}
string colvector loadupfile(string scalar ffn)
{
real scalar fh
string scalar line
string colvector res
fh = fopen(ffn, "r")
pragma unset res
while ((line=fget(fh))!=J(0,0,"")) {
if (strpos(line, "_")) res = res \ line
}
fclose(fh)
return(res)
}
string colvector loadsscupfile()
{
real scalar fh
string scalar line
string colvector res
fh = fopen(`SSCupfile', "r")
pragma unset res
while ((line=fget(fh))!=J(0,0,"")) {
if (line!="") {
res = res \ strlower(subinstr(line,":","",1))
}
}
fclose(fh)
_sort(res, 1)
return(res)
}
real scalar findbestinlist(string scalar name, string colvector list)
{
string scalar curlhs, newlhs, newname
real scalar currhs, newrhs
real scalar i, bestidx, bestrhs
pragma unset newlhs
pragma unset newrhs
pragma unset curlhs
pragma unset currhs
splitname(name, curlhs, currhs)
bestrhs = currhs
bestidx = 0
for (i=1; i<=rows(list); i++) {
newname = tokens(list[i])[2]
splitname(newname, newlhs, newrhs)
if (newlhs == curlhs) {
if (newrhs>bestrhs) {
bestrhs = newrhs
bestidx = i
}
}
}
return(bestidx)
}
void splitname(string scalar name, lhs, rhs)
{
real scalar i
if (i = strpos(name, "_")) {
lhs = substr(name, 1, i-1)
rhs = strtoreal(substr(name, i+1, .))
if (rhs<.) return
}
lhs = name
rhs = -1
}
string scalar basepkgname(string scalar name)
{
real scalar i
i = strpos(name, "_")
return(i ? substr(name, 1, i-1) : name)
}
/* -------------------------------------------------------------------- */
void find_and_uninst_dups(string scalar dir, `boolean' noisily)
{
while (1) {
if (find_and_uninst_1dup_std(dir, noisily)==0) break
}
while (1) {
if (find_and_uninst_1dup_sjstb(dir, noisily)==0) break
}
while (1) {
if (find_and_uninst_1dup_ssc(dir, noisily)==0) break
}
}
`boolean' find_and_uninst_1dup_std(string scalar dir, `boolean' noisily)
{
`instlist' trk
string colvector lcffn
string scalar lcpkgname
real scalar i, j
trk = read_statatrk(dir)
lcffn = J(rows(trk), 1, "")
for (i=1; i<=rows(trk); i++) lcffn[i] = strlower(srcffnof(*trk[i]))
for (i=1; i<rows(lcffn) /*sic*/; i++) {
for (j=i+1; j<=rows(lcffn); j++) {
if (lcffn[i]==lcffn[j]) {
rmdup(dir, trk, i, j, noisily)
return(1)
}
}
}
return(0)
}
`boolean' find_and_uninst_1dup_ssc(string scalar dir, `boolean' noisily)
{
`instlist' trk
real colvector isssc
string colvector lcpkgname
real scalar i, j
trk = read_statatrk(dir)
isssc = J(rows(trk), 1, .)
lcpkgname = J(rows(trk), 1, "")
for (i=1; i<=rows(trk); i++) {
isssc[i] = isssc(srcof(*trk[i]))
lcpkgname[i] = strlower(pkgnameof(*trk[i]))
}
for (i=1; i<rows(trk) /*sic*/; i++) {
if (isssc[i]) {
for (j=i+1; j<=rows(trk); j++) {
if (isssc[j]) {
if (lcpkgname[i]==lcpkgname[j]) {
rmdup(dir, trk, i, j,
noisily)
return(1)
}
}
}
}
}
return(0)
}
`boolean' find_and_uninst_1dup_sjstb(string scalar dir, `boolean' noisily)
{
`instlist' trk
string colvector lhs
real colvector rhs
string scalar pkgname, curlhs, src
`boolean' touse, hassjstb
real scalar i, j, currhs, torm
trk = read_statatrk(dir)
lhs = J(rows(trk), 1, "")
rhs = J(rows(trk), 1, .)
hassjstb = 0
for (i=1; i<=rows(trk); i++) {
src = srcof(*trk[i])
touse = 0
if (isstatajournal(src)) touse = 1
else if (isstb(src)) touse = 1
if (touse) {
hassjstb = 1
pkgname = pkgnameof(*trk[i])
pragma unset curlhs
pragma unset currhs
splitname(pkgname, curlhs, currhs)
lhs[i] = curlhs
rhs[i] = currhs
}
}
if (!hassjstb) return(0)
for (i=1; i<rows(trk) /*sic*/; i++) {
if (rhs[i]<.) {
for (j=i+1; j<=rows(trk); j++) {
if (rhs[j]<.) {
if (lhs[i]==lhs[j]) {
torm = (rhs[i]>rhs[j] ? j:i)
netrm(dir, *trk[torm], noisily)
return(1)
}
}
}
}
}
return(0)
}
void rmdup(string scalar dir, `instlist' trk, real scalar i,
real scalar j, `boolean' noisily)
{
real scalar idate, jdate, torm
idate = installdateof(*trk[i])
jdate = installdateof(*trk[j])
if (idate>jdate) torm = j
else if (idate<jdate) torm = i
else {
if (uniqidof(*trk[i])>uniqidof(*trk[j])) torm = j
else torm = i
}
netrm(dir, *trk[torm], noisily)
}
void netrm(string scalar dir, `instpkg' pkg, `boolean' noisily)
{
string scalar cmd
if (noisily) {
printf("{p}\n")
printf(
"{txt}(note: package {res:%s} was installed more than once;\n",
pkgnameof(pkg))
printf("older copy removed)\n")
printf("{p_end}\n")
}
cmd = sprintf("quietly ado uninstall [%g]", relnoof(pkg))
if (dir!="") cmd = cmd + ", from("+dir+")"
stata(cmd)
}
/* -------------------------------------------------------------------- */
`instlist' read_statatrk(string scalar dir)
{
real scalar fh, i
string scalar element
`instlist' res
res = J(0,1,NULL)
if ((fh = _fopen(ffn_of_statatrk(dir), "r")) < 0) {
errprintf(
"directory %s does not have user-installed files\n", dir)
exit(601)
}
i = 0
while((element=read_statatrk_element(fh, ++i))!=J(0,1,"")) {
res = res \ &acopy(element)
}
fclose(fh)
return(res)
}
string scalar ffn_of_statatrk(string scalar dir)
{
string scalar basedir
basedir = pathsubsysdir(dir=="" ? "`DEFAULT_DIR'" : dir)
return(pathjoin(basedir, "stata.trk"))
}
transmorphic matrix acopy(transmorphic matrix x)
{
transmorphic matrix copy
copy = x
return(copy)
}
void read_statatrk_skiphdr(real scalar fh)
{
string scalar line
real scalar pos
pos = ftell(fh)
while ((line=fget(fh))!=J(0,0,"")) {
if (!(substr(line,1,1)=="*" | strtrim(line)=="")) {
fseek(fh, pos, -1)
return
}
pos = ftell(fh)
}
}
`instpkg' read_statatrk_element(real scalar fh, real scalar i)
{
`instpkg' res
string scalar line
res = J(`EL_FIRST', 1, "")
res[`EL_relno'] = sprintf("%g", i)
read_statatrk_skiphdr(fh)
if ((line = fget(fh))==J(0,0,"")) return(J(0,1,""))
if (substr(line, 1, 1)!="S") {
errprintf("stata.trk file invalid header\n")
exit(610)
}
res[`EL_FIRST'] = line
while ((line=fget(fh))!=J(0,0,"")) {
if (line!="" & substr(line,1,1)!="*") {
res = res \ strrtrim(line)
if (substr(line,1,1)=="e") return(res)
}
}
errprintf("stata.trk file invalid contents\n")
exit(610)
/*NOTREACHED*/
}
/* -------------------------------------------------------------------- */
real scalar read_pkg_file(string scalar ffn, string colvector res)
{
real scalar fh
if ((fh = _fopen(ffn, "r")) < 0) {
res = J(0, 1, "")
return(fh)
}
res = read_pkg_element(fh)
fclose(fh)
return(0)
}
string colvector read_pkg_element(real scalar fh)
{
string colvector res
string scalar line
pragma unset res
while ((line=fget(fh))!=J(0,0,"")) {
if (line!="" & substr(line,1,1)!="*") {
res = res \ strrtrim(line)
}
}
return(res)
}
/* -------------------------------------------------------------------- */
string scalar srcof(`instpkg' pkg)
{
return(subelementof(pkg, "S"))
}
string scalar titleof(`instpkg' pkg)
{
string colvector res
res = subelementof(pkg, "d")
return(rows(res)==0 ? "" : res[1])
}
string scalar upsrcof(`instpkg' pkg)
{
return(pkg[`EL_upsrc'])
}
string scalar srcffnof(`instpkg' pkg)
{
string scalar src, fn
src = srcof(pkg)
fn = subelementof(pkg, "N")
if (rows(src)!=1 | rows(fn)!=1) return("")
return(pathjoin(src, fn))
}
string scalar pkgnameof(`instpkg' pkg)
{
string colvector res
real scalar len
res = subelementof(pkg, "N")
if (rows(res)!=1) return("")
len = strlen(res)
if (substr(res, len-3, .) == ".pkg") return(substr(res, 1, len-4))
return(res)
}
string scalar uppkgnameof(`instpkg' pkg)
{
return(pkg[`EL_uppkgname'])
}
real scalar uniqidof(`instpkg' pkg)
{
string colvector res
real scalar uid
res = subelementof(pkg, "U")
if (rows(res)==1) {
uid = strtoreal(res)
if (uid<.) return(uid)
}
pkg_corrupt()
/*NOTREACHED*/
}
real scalar relnoof(`instpkg' pkg)
{
real scalar rel
if ((rel = strtoreal(pkg[`EL_relno'])) < .) return(rel)
pkg_corrupt()
/*NOTREACHED*/
}
real scalar installdateof(`instpkg' pkg)
{
string colvector res
real scalar edate
res = subelementof(pkg, "D")
if (rows(res)==1) {
edate = date(res, "dmy")
if (edate<.) return(edate)
}
pkg_corrupt()
/*NOTREACHED*/
}
real scalar distributiondateof(`instpkg' pkg, `pkgtype' ptype)
{
real scalar i
string colvector d
real scalar e, emax
d = distributiondatesof(pkg, ptype)
emax = 0
for (i=1; i<=rows(d); i++) {
e = edateofstr(d[i])
if (e>emax & e<.) emax = e
}
return(emax ? emax : .)
}
string colvector distributiondatesof(`instpkg' pkg, `pkgtype' ptype)
{
real scalar i
string colvector d
string colvector res
d = (ptype==`PT_INSTALLED' ?
subelementof(pkg, "d") :
subelementnet(pkg, "d")
)
pragma unset res
for (i=1; i<=rows(d); i++) {
/* ----+----1----+--- */
/* Distribution-Date: */
if (strlower(substr(d[i], 1, 18))=="distribution-date:") {
res = res \ strlower(strtrim(substr(d[i], 19, .)))
}
}
return(res)
}
string colvector subelementof(`instpkg' pkg, string scalar ltr)
{
real scalar i
string colvector res
pragma unset res
for (i=`EL_FIRST'; i<=rows(pkg); i++) {
if (substr(pkg[i],1,1)==ltr) {
res = res \ strtrim(substr(pkg[i],2,.))
}
}
return(res)
}
string colvector subelementnet(string colvector gel, string scalar ltr)
{
real scalar i
string colvector res
pragma unset res
for (i=1; i<=rows(gel); i++) {
if (substr(gel[i],1,1)==ltr) {
res = res \ strtrim(substr(gel[i],2,.))
}
}
return(res)
}
void set_upinfoof(`instpkg' pkg, string scalar src, string scalar pkgname)
{
pkg[`EL_upsrc'] = src
pkg[`EL_uppkgname'] = pkgname
}
void pkg_corrupt()
{
errprintf("stata.trk file invalid or corrupted\n")
errprintf("no action taken\n")
exit(610)
/*NOTREACHED*/
}
/* -------------------------------------------------------------------- */
real scalar edateofstr(string scalar s)
{
real scalar res
real scalar yr, mo, da
if (strlen(s)==8) {
yr = strtoreal(substr(s, 1, 4))
mo = strtoreal(substr(s, 5, 2))
da = strtoreal(substr(s, 7, 2))
if (yr<. & mo<. & da<.) {
res = mdy(mo, da, yr)
if (res!=.) return(res)
}
}
return(date(s, "dmy"))
}
real scalar docmd(string scalar rhs)
{
real scalar res
stata("scalar DoCmdStringScalar = " + rhs)
res = st_numscalar("DoCmdStringScalar")
stata("scalar drop DoCmdStringScalar")
return(res)
}
/*
real scalar mdy(real mo, real da, real yr)
{
return(docmd(sprintf("mdy(%g, %g, %g)", mo, da, yr)))
}
real scalar date(string scalar str, string scalar pattern)
{
return(docmd(sprintf(`"date("%s", "%s")"', str, pattern)))
}
*/
/* -------------------------------------------------------------------- */
end
|
*! Version 1.0 Brian Quistorff <bquistorff@gmail.com>
*! Simplifies quick asserts
program assert_count
version 11.0
*Just a guess at the version
syntax [if], rn(string) [message(string)]
qui count `if'
assert_msg `r(N)'`rn', message(`message')
end
|
*! Version 1.0 Brian Quistorff <bquistorff@gmail.com>
*! An assert with a message when false
*! Similar to -_assert- except have a pause
program assert_msg
version 11.0
*Just a guess at the version
syntax anything [, message(string)]
cap assert `anything', fast
if _rc==1 { //Break key
error 1
}
if _rc!=0 {
di "(`message') [`anything']!=0."
pause
error _rc
}
end
|
*! version 1.2 Brian Quistorff <bquistorff@gmail.com>
program assign_treatment
version 11.0 //Just a guess
syntax varlist, generate(namelist max=1) num_treatments(int) [handle_misfit(string)]
if "`handle_misfit'"=="" local handle_misfit = "obalance"
* Error checking
_assert inlist("`handle_misfit'","obalance","full", "full_obalance", "reduction", "missing"), rc(197) ///
msg("Error: handle_misfit() option must be one of: full, reduction, missing, full_obalance, obalance (or empty).")
_assert `num_treatments'> 0 & `num_treatments'<=`=_N', rc(197) ///
msg("Error: num_treatments must be greater than 0 and not greater than _N.")
if "`handle_misfit'"=="reduction" & `: word count `varlist''<2 local handle_misfit = "obalance"
* First-pass randomization. Implicitly does "obalance" and determines misfits.
tempvar cell_id cell_position rand misfit
gen `rand' = runiform()
egen `cell_id' = group(`varlist')
sort `cell_id', stable
by `cell_id' : gen `cell_position' = `rand'[1]
sort `cell_position' `rand'
by `cell_position': gen `misfit' = (_n > floor(_N/`num_treatments')*`num_treatments')
gen int `generate' = mod(_n-1, `num_treatments')+1
if inlist("`handle_misfit'","full","full_obalance","reduction"){
*separate out the misfits
tempfile nonmisfits
preserve
qui drop if `misfit'
qui save `nonmisfits'
restore
qui keep if `misfit'
if "`handle_misfit'"=="reduction"{
tempvar prev_cell_internal_rank prev_cell_id bigger_cell_id prev_cell_rand
by `cell_position': gen int `prev_cell_internal_rank' = _n
local varsleft = "`varlist'"
gen int `prev_cell_id' = `cell_id'
sort `prev_cell_id' `prev_cell_internal_rank' //no change, just update varname in sortorder
while `:word count `varsleft''>1{
gettoken var varsleft : varsleft
cap drop `bigger_cell_id' `prev_cell_rand'
egen int `bigger_cell_id' = group(`varsleft')
by `prev_cell_id' : gen `prev_cell_rand' = `rand'[1]
sort `bigger_cell_id' `prev_cell_rand' `prev_cell_internal_rank'
qui by `bigger_cell_id': replace `prev_cell_internal_rank' = _n
qui replace `prev_cell_id' = `bigger_cell_id'
sort `prev_cell_id' `prev_cell_internal_rank' //no change, just update varname in sortorder
}
qui replace `generate' = mod(_n-1, `num_treatments')+1
}
if "`handle_misfit'"=="full"{
*Assign again but with fake observations to "complete" the misfit groups.
qui set obs `=_N+`num_treatments''
tempvar rank
qui by `cell_position': gen `rank' = _n
fillin `cell_id' `rank'
qui replace `rand' = runiform() //re-fill
sort `cell_id' `rand'
qui replace `generate' = mod(_n-1, `num_treatments')+1
qui drop if _fillin | `cell_id'==.
drop _fillin
}
if "`handle_misfit'"=="full_obalance"{
mata: set_full_obalance(`num_treatments', "`cell_id'", "`generate'")
}
append using `nonmisfits', nolabel nonotes
}
if "`handle_misfit'"=="missing"{
qui replace `generate'=. if `misfit'
}
qui compress `generate'
end
mata:
void set_full_obalance(real scalar T, string scalar cell_id_var, string scalar output_var){
cell_ids = st_data(.,cell_id_var)
N = rows(cell_ids)
ids = 1::N
obs = (ids,cell_ids)
ret = full_obalance(T, obs)
if(missing(ret)) _error(197, "Error: Something went wrong. Not possible to assign misfits to treatments without overlap.")
ts = mod(ids :-1, T) :+ 1
data = sort((obs, ts), 1)
st_store(.,output_var, data[,3])
}
//This functions uses the order to implicitly record the treatment
//obs = (id, cell_id)
//This is a simple constraint-satisfaction solver
real matrix full_obalance(real scalar T, real matrix obs_left, | real matrix obs_fixed, real matrix cell_id_freq){
if(rows(obs_left)==0) return(J(0,2,.))
if(obs_fixed==J(0, 0, .)) obs_fixed = J(0,2,.)
if(cell_id_freq==J(0,0,.)){
cids = obs_fixed[,2] \ obs_left[,2]
cid_max = max(cids)
cell_id_freq = J(cid_max,1,0)
for(i=1; i<=rows(cids); i++){
cell_id_freq[cids[i]] = cell_id_freq[cids[i]]+1
}
}
new_rank = rows(obs_fixed)+1
new_t = mod(new_rank-1,T)+1
obs_available = obs_left
for(i=new_t; i<new_rank; i=i+T){
obs_available = select(obs_available,obs_available[,2]:!=obs_fixed[i,2])
}
n_avail = rows(obs_available)
//pick order to try for next ones
//obs_to_try = jumble(obs_available) //simple/naive method
//the above method will sometimes take quite a while to find a solution
// so instead try to fit first the cell_ids that are hard to do so (ones with most misfits)
//can tune this using the w parameter (w=0 is no weighting)
rand = runiform(n_avail,1)
w=1
for(i=1; i<=n_avail; i++){
rand[i,1] = rand[i,1]*(1+w*cell_id_freq[obs_available[i,2]])
}
perm = order(rand,-1)
obs_to_try = obs_available[perm,]
for(i=1; i<=n_avail; i++){
ob_to_try = obs_to_try[i,]
ret = full_obalance(T, select(obs_left,obs_left[,1]:!=ob_to_try[1,1]), obs_fixed \ ob_to_try, cell_id_freq)
if(!missing(ret)) return(ob_to_try \ ret)
}
return(.)
}
end
|
*! version 1.1 Brian Quistorff April 2015
*! version 1.0.11 (chardel) by NJC 1.0.1 1 April 2000
*! Blanks out all chars for everything in the namelist passed in.
*! Along with reshape, destring also sometimes adds a var[destring] char
program def bchardel
version 10.0
syntax namelist
foreach name in `namelist'{
local chnames : char `name'[]
foreach chname in `chnames'{
char `name'[`chname'] /* blank it out */
}
}
end
|
*! version 1.0
program bcount
version 11 //guess
syntax [if] [in], local(string)
count `if' `in'
c_local `local' `r(N)'
end
|
*! v0.1 Brian Quistorff <bquistorff@gmail.com>
*! allows things like -drop emp*, except(emp)-
program bdrop
version 11
syntax varlist [, except(string)]
unab varlist_full : `varlist'
local to_drop : list varlist_full - except
drop `to_drop'
end
|
*! v0.1 Brian Quistorff <bquistorff@gmail.com>
*! A merge pass-through function with additions:
*! 1) Allows merging when key variables are named differently. (using_match_vars())
*! 2) Shows the full match stats (even when using keep())
*! 3) Maintains sort (using ", stable")
*! 4) return in r() the tab of _merge
program bmerge, rclass
version 12 //use version 12 instead of 11 because of better -rename-
* This parsing was ripped from -merge-
gettoken mtype 0 : 0, parse(" ,")
gettoken token : 0, parse(" ,")
if ("`token'"=="_n") {
gettoken token 0 : 0, parse(" ,")
}
else{
loc token ""
}
* add here using_match_vars()
syntax [varlist(default=none)] using/ [, using_match_vars(string) ///
ASSERT(string) GENerate(name) FORCE KEEP(string) KEEPUSing(string) ///
noLabel NOGENerate noNOTEs REPLACE noREPort SORTED UPDATE]
*weird, aparently "nogenerate" and "generate" get set (unlike normal no-option locs)
qui describe, varlist
local svars `r(sortlist)'
local matchvars "`varlist'"
if "`using_match_vars'"!=""{
rename (`varlist') (`using_match_vars')
local matchvars "`using_match_vars'"
}
local gen_var _merge
if "`nogenerate'"!="nogenerate" & "`generate'"!="" local gen_var `generate'
merge `mtype' `token' `matchvars' using "`using'", assert(`assert') generate(`gen_var') ///
`force' keepusing(`keepusing') `label' `notes' `replace' `report' `sorted' `update'
if "`using_match_vars'"!="" rename (`using_match_vars') (`varlist')
loc keep_keyword_order = "master using match match_update match_conflict"
*Output return values
* First zero-out
foreach ktype in `keep_keyword_order'{
return scalar `ktype' = 0
}
tempname cell_mat row_mat
qui tab `gen_var', matcell(`cell_mat') matrow(`row_mat')
forval row_i =1/`=rowsof(`row_mat')'{
local ktype_ind = `row_mat'[`row_i',1]
local ktype : word `ktype_ind' of `keep_keyword_order'
local ktype_count = `cell_mat'[`row_i',1]
return scalar `ktype' = `ktype_count'
}
if "`keep'"!=""{
foreach ktype in `keep' {
if "`ktype'"=="master" loc keep_list "`keep_list',1"
if "`ktype'"=="using" loc keep_list "`keep_list',2"
if "`ktype'"=="match" loc keep_list "`keep_list',3"
if "`ktype'"=="match_update" loc keep_list "`keep_list',4"
if "`ktype'"=="match_conflict" loc keep_list "`keep_list',5"
}
keep if inlist(`gen_var' `keep_list')
}
if "`nogenerate'"=="nogenerate" drop `gen_var'
if "`svars'"!="" sort `svars', stable
end
|
*! Given a set of predictors, and treatment time:
*! -drops units without complete info
*! -will limit sample size if testing
*! -GE:
*! --will identify donors via GE concerns
*! --will compare predicttion errors with and without GE containated removed
*! -Main estimation (including placebo units)
*! -will check the fit for a t-1 placebo test
* -TODO: re-estimate without the top match
* -TODO: Should I generalize the transformation graphs (dmln->ln, ga->ln)?
* -TODO: Error gracefully when there was an optimization error when estimating on a keeper
program broad_eval_synth_model, rclass
version 11.0 //just a guess here
syntax varname, tr_unit_codes(numlist) tr_unit_titles(string) precise_tyear(int) perms(int) ///
[predictors(string) cust_predictors_note(string) ///
ge_mode(int 0) ge_custom_cmd(string) ///
predictors_plac_remove(string) predictors_plac_add_early(string) predictors_plac_add_late(string) ///
placebo_unit_codes(numlist) placebo_unit_titles(string) limit_donors(string) base_suff(string) ///
savepermweights checkhull plot_pe pred_suff(string) obs_weight_char_ds(string) ///
checklastperiod compare_prederrors full_xlabels(string) tc_gph_opts(string) output_mean_post_RMSPE_donors ///
noplot_tc_ci nooutput_wmat nooutput_X0_X1 nooutput_vmat nooutput_pvals plot_tc ///
redo_without_topmatch donor_limit_for_match_cmd(string) connect_ci_to_pre_t ///
nooutput_diffs nooutput_graph_data]
local depvar "`varlist'"
qui tsset, noquery
local pvar = "`r(panelvar)'"
local tvar = "`r(timevar)'"
* Check inputs
local num_tr_units : word count `tr_unit_codes'
local num_placebo_units : word count `placebo_unit_codes'
di "start run_synths with: depvar=`depvar', perms=`perms', ge_mode=`ge_mode'"
qui do code/synth_consts.do
local keeper_codes = "`tr_unit_codes' `placebo_unit_codes'"
local num_keepers = `num_tr_units' + `num_placebo_units'
local keeper_codes_commas = subinstr(trim("`keeper_codes'"), " ", ", ", .)
local tr_unit_codes_commas = subinstr(trim("`tr_unit_codes'"), " ", ", ", .)
*Get the full list of years
local random_unit : word 1 of `keeper_codes'
if `num_keepers'==0{
* Likely called from cross-validation, so pick a donor at random
local random_unit = `pvar'[1]
}
qui levelsof `tvar' if `pvar'==`random_unit' & `depvar'!=., local(years_tot)
local num_years : word count `years_tot'
*Get the last pret year
forval per=1/`num_years'{
local thisyear : word `per' of `years_tot'
if `thisyear'>`precise_tyear'{
local tper = `per'
continue, break
}
}
local last_pre_year : word `=`tper'-1' of `years_tot'
local first_treatmentyear : word `tper' of `years_tot'
if "`predictors'"==""{
local pred_suff = "full"
*Generate standard (lagged response) variable predictors
forval per=1/`=`tper'-1'{
local thisyear : word `per' of `years_tot'
local predictors = "`predictors' `depvar'(`thisyear')"
}
local note "Predictors: all pre-treatment response variables."
}
else {
if "`cust_predictors_note'"!=""{
local note "Predictors: `cust_predictors_note'."
}
else{
local note "Predictors: `predictors'."
}
}
return local note "`note'"
if `ge_mode'!=${GE_mode_nothing}{
local ge_suff = "_ge`ge_mode'"
}
local file_suff "`base_suff'`depvar'_`pred_suff'`ge_suff'${extra_f_suff}"
return local file_suff "`file_suff'"
cap erase "${dir_base}/data/estimates/todrop_`file_suff'.dta"
*Drop incomplete obs
tempvar complete_case
complete_units `predictors', generate(`complete_case')
di "Dropping incomplete cases"
drop if !`complete_case'
drop `complete_case'
*XXX note in dropfile
if "`limit_donors'"!= ""{
tempvar keeper1 keeper2
qui count if `tvar'==`last_pre_year'
gen `keeper1' = runiform()*r(N)
bys `pvar': gen `keeper2' = `keeper1'[1]
di "Dropping because limiting donors"
keep if (`keeper2' < `limit_donors') | inlist(`pvar', `keeper_codes_commas')
drop `keeper1' `keeper2'
*XXX note in dropfile
}
qui count if `tvar'==`last_pre_year'
di "Running estimation with `r(N)' units"
local predictors_plac_base : list predictors - predictors_plac_remove
local early_predictors : list predictors_plac_base | predictors_plac_add_early
local late_predictors : list predictors_plac_base | predictors_plac_add_late
if "`compare_prederrors'"!="" { //pre-drop
compare_pred_errors , depvar(`depvar') tr_unit_codes(`tr_unit_codes') ///
file_suff(`file_suff') perms(`perms') ge_suff(_predrop) ///
early_predictors(`early_predictors') late_predictors(`late_predictors') last_pre_year(`last_pre_year') ///
precise_tyear(`precise_tyear') first_treatmentyear(`first_treatmentyear') tper(`tper') ///
donor_limit_for_match_cmd(`donor_limit_for_match_cmd')
}
*Drop the units that through general equilibrium might be affected by real treatment
*Nothing if `ge_mode'==${GE_mode_nothing}
if `ge_mode'==${GE_mode_custom_cmd}{
di "Dropping due to custom drop command: `ge_custom_cmd'"
`ge_custom_cmd' , treatment_units(`tr_unit_codes')
}
if `ge_mode'==${GE_mode_trim_early_placebo}{
if "`compare_prederrors'"==""{
local skip_gen_early_placebo "skip_gen_early_placebo"
}
identify_donors_placebo , depvar(`depvar') tr_unit_codes(`tr_unit_codes') predictors(`predictors') ///
file_suff(`file_suff') last_pre_year(`last_pre_year') ///
perms(`perms') `skip_gen_early_placebo' early_predictors(`early_predictors') tper(`tper') ///
precise_tyear(`precise_tyear') first_treatmentyear(`first_treatmentyear') keepunits(`placebo_unit_codes') ///
onlyonce donor_limit_for_match_cmd(`donor_limit_for_match_cmd')
}
*Main estimation
di "Main estimation"
eval_synth_model, depvar(`depvar') predictors("`predictors'") ///
perms(`perms') tc_gph_opts(`tc_gph_opts') ttime(`precise_tyear') ///
file_suff("`file_suff'") `savepermweights' `plot_tc_ci' full_xlabels(`full_xlabels') ///
tr_unit_codes(`tr_unit_codes') tr_unit_titles(`"`tr_unit_titles'"') ///
notes("`note'") placebo_unit_codes(`placebo_unit_codes') placebo_unit_titles(`placebo_unit_titles') ///
obs_weight_char_ds(`obs_weight_char_ds') connect_treat ${do_nest} `plot_pe' ///
`checkhull' `output_wmat' `output_vmat' `output_pvals' `output_mean_post_RMSPE_donors' ///
`output_X0_X1' donor_limit_for_match_cmd(`donor_limit_for_match_cmd') `connect_ci_to_pre_t' ///
`plot_tc' `output_diffs' `output_graph_data'
tempname cis
return local eval_noerr_codes "`r(eval_noerr_codes)'"
if `perms'!=0{
return scalar main_ci_num = `r(ci_num)'
return scalar num_perm_act = `r(num_perm_act)'
foreach unit in `r(eval_noerr_codes)' {
return scalar perc_perms_match_better`unit' = `r(perc_perms_match_better`unit')'
mat `cis' = r(cis`unit')
return matrix cis`unit' = `cis'
}
}
tempname tc_outcome
foreach unit in `r(eval_noerr_codes)' {
mat `tc_outcome' = r(tc_outcome`unit')
return matrix tc_outcome`unit' = `tc_outcome'
}
if "`output_mean_post_RMSPE_donors'"!=""{
tempname mean_post_RMSPE_donors
scalar `mean_post_RMSPE_donors' = r(mean_post_RMSPE_donors)
return scalar mean_post_RMSPE_donors = `mean_post_RMSPE_donors'
}
*Add in nice details to todrop file
tempfile initdata
qui save `initdata'
use "${dir_base}/data/estimates/todrop_`file_suff'.dta", clear
rename `pvar' set
if "`obs_weight_char_ds'"!=""{
merge 1:1 set using "`obs_weight_char_ds'", keep(match) nogenerate
}
qui do code/synth_consts.do
label values reason dr_reasons
qui save12 "${dir_base}/data/estimates/todrop_`file_suff'.dta", replace
use `initdata', clear
*See the p-values testing in the final pre-treatment periods
if "`checklastperiod'"!=""{
di "Checking fit on last pre-treatment period"
eval_synth_model, depvar(`depvar') tr_unit_codes(`tr_unit_codes') predictors(`early_predictors') ///
justall(1) noplot_tc_ci nooutput_vmat nooutput_wmat nooutput_permn nooutput_graph_data nooutput_X0_X1 ${do_nest} ///
file_suff(`file_suff'_checklast) ttime(`last_pre_year') end(`last_pre_year') perms(`perms') ///
donor_limit_for_match_cmd(`donor_limit_for_match_cmd')
}
end
|
*! Builds the matrices needed by the graphing programs
*! Works if the unit is one of the donors a permuation estimation was done on.
program build_graphing_mats, rclass
version 11.0 //just a guess here
syntax anything, depvar(string) startper(int) perms_file(string)
qui tsset, noquery
local pvar = "`r(panelvar)'"
tempname roBr y_diffs_t tc_mat CI_mat y_diff
mkmat `depvar' if `pvar'==`anything', matrix(`roBr')
mat `roBr' = `roBr'[`startper'...,1]
tempfile initdata
qui save `initdata'
use "`perms_file'", clear
keep if unit_type==${Unit_type_donor}
drop unit_type
mkmat PE* if `pvar'==`anything', mat(`y_diffs_t')
use `initdata', clear
mat `y_diff' = `y_diffs_t''
mat `tc_mat' = (`roBr', `roBr'-`y_diff')
return matrix tc_outcome = `tc_mat'
return matrix y_diff = `y_diff'
end
|
*! version 0.1
*! Brian Quistorff
*! Usage:
*! change_line using table.tex, ln(10) insert("blah")
*! change_line using table.tex, ln(10) delete
*! change_line using table.tex, ln(10) replace("blah")
program change_line
version 11 //a guess
syntax using/, ln(int) [insert(string) delete replace(string)]
tempfile newfile
tempname fh fh_new
file open `fh' using `"`using'"', read text
file open `fh_new' using `newfile', write text replace
file read `fh' line
local linenum = 0
while r(eof)==0 {
local linenum = `linenum' + 1
if `ln'==`linenum'{
if `"`insert'"'!=""{
file write `fh_new' `"`insert'"' _newline
file write `fh_new' `"`line'"' _newline
}
else{
if "`replace'"!="" file write `fh_new' `"`replace'"' _newline
}
}
else{
file write `fh_new' `"`line'"' _newline
}
file read `fh' line
}
file close `fh'
file close `fh_new'
copy `newfile' `using', replace
end
|
*! Purpose: Right now just check if inside the upper-/lower-envelope
*! Also graphs the raw data a bit
* To do: maybe eventually use the Gary King program
program check_in_convex_hull
version 11.0 //just a guess here
syntax varname, first_pre(int) last_pre(int) trunit(int) file_suff(string) ///
[gph_tvar(string) main_label(string) end(string) xlabels(string) tper_spec(string) ]
qui tsset, noquery
local tvar = "`r(timevar)'"
local pvar = "`r(panelvar)'"
if "`gph_tvar'"==""{
local gph_tvar "`tvar'"
}
if "`main_label'"==""{
local main_label "Main"
}
/*if "`xlabels'"==""{
foreach labl in `xlabels'{
if `labl'<`tper_spec' {
local xlabels_short "`xlabels_short' `labl'"
}
}
}*/
if "`xlabels'"!=""{
local xla "xlabel(`xlabels')"
}
qui levelsof `gph_tvar' if `pvar'==`trunit', local(tvals)
summ `gph_tvar' if `pvar'==`trunit', meanonly
local tvar_min = r(min)
local gph_min_t : word `=`first_pre'-`tvar_min'+1' of `tvals'
local gph_max_t : word `=`last_pre'-`tvar_min'+1' of `tvals'
di "Checking if we're in the convex hull"
forval tval =`first_pre'/ `last_pre'{
qui summ `varlist' if `pvar'==`trunit' & `tvar'==`tval'
local tr_lvl = r(mean)
qui summ `varlist' if `pvar'!=`trunit' & `tvar'==`tval'
local dp_min = r(min)
local dp_max = r(max)
local inrange = (`tr_lvl' >= `dp_min' & `tr_lvl' <= `dp_max')
autofmt, input(`tr_lvl' `dp_min' `dp_max') dec(3)
di "Inrange=`inrange'. Depvar=`varlist', tvar=`tval'. Treatment Level=`r(output1)'." ///
"Donor pool range=[`r(output2)', `r(output3)']"
}
local grph_pre_cmds = ""
qui levelsof `pvar' if `tvar'==`first_pre', local(units)
local nunits : word count `units'
foreach unit in `units' {
local grph_pre_cmds "`grph_pre_cmds' (line `varlist' `gph_tvar' if `pvar'==`unit' & `gph_tvar'<=`gph_max_t' & `gph_tvar'>=`gph_min_t', lcolor(gs10) lwidth(medthin) lpattern(solid) )"
}
twoway `grph_pre_cmds' (connected `varlist' `gph_tvar' if `pvar'==`trunit' & `gph_tvar'<=`gph_max_t' & `gph_tvar'>=`gph_min_t', lpattern(solid) lwidth(thick) mcolor(black) msymbol(S)), ///
legend(order(`=`nunits'+1' "`main_label'" 1 "Donors")) name(`=strtoname("pre_trends_`file_suff'",1)', replace) ///
/*ylabel(minmax)*/ xlabel(minmax /*xlabels_short*/) title("Pre-treatment trends")
qui save_fig "pre-trends_`file_suff'"
if "`end'"!=""{
local gph_end_t : word `=`end'-`tvar_min'+1' of `tvals'
foreach unit in `units' {
local grph_total_cmds "`grph_total_cmds' (line `varlist' `gph_tvar' if `pvar'==`unit' & `gph_tvar'<=`gph_end_t' & `gph_tvar'>=`gph_min_t', lcolor(gs10) lwidth(medthin) lpattern(solid) )"
}
twoway `grph_total_cmds' (connected `varlist' `gph_tvar' if `pvar'==`trunit' & `gph_tvar'<=`gph_end_t' & `gph_tvar'>=`gph_min_t', lpattern(solid) lwidth(thick) mcolor(black) msymbol(S)), ///
xline(`tper_spec', lpattern(shortdash)) ///
legend(order(`=`nunits'+1' "`main_label'" 1 "Donors")) name(`=strtoname("all_trends_`file_suff'",1)', replace) ///
/*ylabel(minmax)*/ `xla' title("Raw Trends")
qui save_fig "all-trends_`file_suff'"
}
end
|
*! v0.3 Brian Quistorff <bquistorff@gmail.com>
*! Clears more than -clear all-.
*! It can't clear the locals from the calling context, so still might want to -mac drop _all-
*! (but unnecessary if used at top of do file that is only run (no -include-d) as it will have its own local context anyways)
program clear_all
version 12 // is a guess
syntax [, reset_ADO closeallmatafiles closealllogs]
clear all //also clears graphs and sersets
cap restore, not
profiler off
if "`closealllogs'"!="" log close _all
*These are independent commands also. Embed for portability
if "`reset_ADO'"!="" global S_ADO `"BASE;SITE;.;PERSONAL;PLUS;OLDPLACE"'
*Normal open files closed by clear all
if "`close_mata_files'"!=""{
*From http://www.stata.com/statalist/archive/2006-10/msg00794.html
forvalues i=0(1)50 {
capture mata: fclose(`i')
}
}
mac drop _all //has to be after use the options. effectively just clears globals
end
|
*! version 1.0 Brian Quistorff
* Reshape leaves a bunch of cars around so that it can be undone.
* in interactive mode this is nice, but otherwise they are messy.
program clear_reshape_chars
version 11.0 //just a guess here
char _dta[ReS_str]
char _dta[ReS_j]
char _dta[ReS_ver]
char _dta[ReS_i]
char _dta[ReS_Xij]
local poss_chars : char _dta[]
foreach poss_char of local poss_chars{
if regexm("`poss_char'","__Xij") char _dta[`poss_char']
}
end
|
*! Version 1.0 Brian Quistorff <bquistorff@gmail.com>
*! Closes all files that were open in mata
*! -clear all- doesn't close mata open files!
* From http://www.stata.com/statalist/archive/2006-10/msg00794.html
program closeallmatafiles
version 9
forvalues i=0(1)50 {
capture mata: fclose(`i')
}
end
|
*! Version 1.0 Brian Quistorff <bquistorff@gmail.com>
*! This will collapse the dataset and preserve the variable and value labels.
*! The syntax for using this is just like with the collapse command.
*! There is one additional optional option: show stat. If you add this option to the command (collapseandperserve ... ,by(...) omitstatfromvarlabel
*! then it will not show the statistic (i.e. (fist), (mean), (last), etc.) in the variable label
* From: http://shafiquejamal.blogspot.com/2012/11/stata-tip-collapse-dataset-while.html
* Written by Shafique Jamal (shafique.jamal@gmail.com).
program define collapseandpreserve
version 11.0 //just a guess here
syntax anything(id="variable and values" name=arguments equalok) [fweight aweight pweight iweight], by(string asis) [cw fast Omitstatfromvarlabel]
version 9.1
// save all the value labels
tempfile tf
qui label save using `"`tf'"', replace
// get the list of variables to be collapse, and keep track of the value label - variable correspondence
tempname precollapse_listofvars postcollapse_listofvars listofvaluelabels valuelabelname stat oldvarname newvarname
local `stat' "(mean)"
foreach a of local arguments {
*di `"word: `a'"'
if (regexm(`"`a'"',"^\(.*\)$")) { // if there is something like (first), (mean), etc.
local `stat' = `"`a'"'
}
else { // This is a variable. Store the associated variable label and value label name
// What if there is an = in the term? then need two list of variables: a precollapse list and a postcollapse list
if (regexm(`"`a'"',"^(.*)=(.*)$")) {
local `oldvarname' = regexs(2)
local `newvarname' = regexs(1)
// di "Regex match! oldvarname: ``oldvarname''. newvarname: ``newvarname''"
}
else {
local `oldvarname' `"`a'"'
local `newvarname' `"`a'"'
// di "NO regex match! oldvarname: ``oldvarname''. newvarname: ``newvarname''"
}
local `precollapse_listofvars' `"``precollapse_listofvars'' ``oldvarname''"'
local `postcollapse_listofvars' `"``postcollapse_listofvars'' ``newvarname''"'
local `valuelabelname' : value label ``oldvarname''
tempname vl_``newvarname''
local `vl_``newvarname''' : variable label ``oldvarname''
if (`"``vl_``newvarname''''"' == `""') {
local `vl_``newvarname''' `"``newvarname''"'
}
*di `"omitstatfromvarlabel = `omitstatfromvarlabel'"'
if (`"`omitstatfromvarlabel'"'==`""') {
local `vl_``newvarname''' `"``stat'' ``vl_``newvarname''''"'
*di "not omitting"
}
else {
local `vl_``newvarname''' `"``vl_``newvarname''''"'
*di "omitting"
}
if (`"``valuelabelname''"' == `""') { // variable has no value label
local `listofvaluelabels' `"``listofvaluelabels'' ."'
}
else {
local `listofvaluelabels' `"``listofvaluelabels'' ``valuelabelname''"'
}
}
}
*di "`weight'`exp'"
collapse `arguments' [`weight'`exp'], by(`by') `cw' `fast'
// macro list
// retrieve the valuelabels
qui do `"`tf'"'
// reapply the variable labels and the value labels
tempname count
local `count' = 0
*di "------------------------------------------------"
foreach var of local `postcollapse_listofvars' {
*di `"var: `var'"'
*di `"its variable label: ``vl_`var'''"'
// reapply the variable labels
local `count' = ``count'' + 1
label var `var' `"``vl_`var'''"'
// reapply the value labels
local `valuelabelname' : word ``count'' of ``listofvaluelabels''
if (`"``valuelabelname''"' != `"."') {
label values `var' ``valuelabelname''
}
}
end program
|
*! Compares the prediction errors from the normal setup with a placebo test one period before
program compare_pred_errors
version 11.0 //just a guess here
syntax , depvar(string) tr_unit_codes(string) file_suff(string) perms(int) ge_suff(string) ///
early_predictors(string) late_predictors(string) precise_tyear(int) ///
first_treatmentyear(int) last_pre_year(int) tper(int) [plac_late_start(string) ///
donor_limit_for_match_cmd(string)]
local placebo_tper = `tper'-1
di "Comparing predictive errors"
*Do a one decade earlier placebo (1 post)
eval_synth_model, depvar(`depvar') tr_unit_codes(`trunints') predictors(`early_predictors') ///
justall(1) noplot_tc_ci nooutput_vmat nooutput_wmat nooutput_permn nooutput_pvals nooutput_X0_X1 nooutput_graph_data ${do_nest} ///
file_suff(`file_suff'_pret_sub`ge_suff') ttime(`last_pre_year') end(`last_pre_year') perms(`perms') ///
donor_limit_for_match_cmd(`donor_limit_for_match_cmd')
*Do the normal (1 post) but with
eval_synth_model, depvar(`depvar') tr_unit_codes(`trunints') predictors(`late_predictors') ///
justall(1) noplot_tc_ci nooutput_vmat nooutput_wmat nooutput_permn nooutput_pvals nooutput_X0_X1 nooutput_graph_data ${do_nest} ///
file_suff(`file_suff'_pret_sub2`ge_suff') ttime(`precise_tyear') end(`first_treatmentyear') ///
perms(`perms') start(`plac_late_start') donor_limit_for_match_cmd(`donor_limit_for_match_cmd')
tempfile initdata
qui save `initdata'
*Get rid of the treatment
foreach subtype in sub sub2 {
use "${dir_base}/data/estimates/gen_perm_br_`file_suff'_pret_`subtype'`ge_suff'.dta", clear
qui keep if unit_type==${Unit_type_donor}
qui drop unit_type
qui save12 "${dir_base}/data/estimates/gen_perm_br_`file_suff'_pret_`subtype'`ge_suff'.dta", replace
}
*Graph them
use "${dir_base}/data/estimates/gen_perm_br_`file_suff'_pret_sub`ge_suff'.dta", clear
keep PE`placebo_tper' codigo
rename PE`placebo_tper' earlyPE
merge 1:1 codigo using "${dir_base}/data/estimates/gen_perm_br_`file_suff'_pret_sub2`ge_suff'.dta", ///
keep(match) keepusing(PE`tper') nogenerate noreport
rename PE`tper' latePE
local longtext "Kernel density of prediction errors from separate synthetic controls, each using six pre-treatment periods of population."
wrap_text , unwrappedtext("`longtext'")
local wrapped `"`s(wrappedtext)'"'
twoway (kdensity earlyPE /*if abs(earlyPE)<0.5*/) (kdensity latePE /*if abs(latePE)<0.6*/), ///
xtitle("Prediction Errors") title("Density") legend(order(1 "Treatment=1950" 2 "Treatment=1960")) ///
name(`=strtoname("PE_normal_v_plac_`file_suff'`ge_suff'",1)', replace) ///
note(`wrapped')
qui save_fig "pred_errors_normal_earlyplacebo_`file_suff'`ge_suff'"
drop latePE
rename earlyPE PE8
gen byte early = 1
append using "${dir_base}/data/estimates/gen_perm_br_`file_suff'_pret_sub2`ge_suff'.dta", keep(PE8 codigo)
replace early = 0 if early==.
rename PE8 PE
di "The combined p-value is for the null of equality (so high p-value means can't reject that the same)"
ksmirnov PE, by(early)
local p_cor : display %5.3f `r(p_cor)'
writeout_txt `p_cor' "KS_pval_compre_pred_errors_`file_suff'`ge_suff'"
use `initdata', clear
end
|
*! Makes sure
*! Marks obs with which are complete
*! Can handle normal variables and time subscripted variables (e.g. pop(1980))
program complete_units
version 11.0 //just a guess here
syntax anything, generate(string)
cap tsset, noquery
if _rc==0 {
local pvar = "`r(panelvar)'"
local tvar = "`r(timevar)'"
}
generate byte `generate'=1
foreach var in `anything'{
if regexm("`var'", "(.+)\(([0-9]+)\)")!=0{
local had_subscripted_var 1
assert_msg "`tvar'"!="", message("Subscripting variable but not tsset.")
local var = regexs(1)
local timepart = regexs(2)
qui replace `generate'=0 if `var'>=. & `tvar'==`timepart'
}
else{
qui replace `generate'=0 if `var'>=.
}
}
if "`had_subscripted_var'"!="" {
tempvar todrop
bys `pvar': egen `todrop' = min(`generate')
qui replace `generate' = `todrop'
}
end
|
*! version 0.1 Brian Quistorff
*! Cross-fitting a model to product honest predictions/residuals and fit statistics:
*! crossfit newvar [, k(int 5) by(varname) residuals outcome(varname) nodots] : est_cmd
*! est_cmd needs to fit the basic "syntax" format (we sneak in a new 'if' clause)
*! Simple usage: crossfit price_hat_oos, k(5) outcome(price): reg price mpg
*! This will generate out-of-sample predictions price_hat_oos and provide fit metrics: R2, MSE, MAE
program crossfit, eclass
version 12.0 //guess
* I could've allowed est_cmd to be unrestircted, but then I would've had to preserve, filter, restore a bunch and
* this would also kill [in] as the sample changes.
*TODO: Could keep track of the individual estimations (and e(sample)?) and then allow predict on a potentially new/modified sample.
* This could switch to -crossfit- and then -predict- steps.
_on_colon_parse `0'
loc 0 `s(before)'
loc est_cmd_all `s(after)'
syntax anything(name=newvar) [, k(int 5) by(varname) residuals outcome(varname) nodots]
loc 0 `est_cmd_all'
syntax anything(equalok name=est_cmd) [if/] [in] [fw aw pw iw/] [, *]
if "`if'"!="" loc if_and "& `if'"
*get groups
if "`by'"=="" {
tempvar rand by
gen `rand' = runiform()
gen int `by' = floor(`rand'*`k') +1
}
else {
_assert "`k'"!="", msg("k or by() required")
summ `by', meanonly
loc k `r(max)'
}
*sort real from temp vars
if "`residuals'"!="" {
_assert "`outcome'"!="", msg("-, outcome()- required with -, residuals-")
loc res `newvar'
tempvar pred
}
else {
loc pred `newvar'
if "`outcome'"!="" tempvar res
}
if "`weight'"!="" loc weight_str [`weight'=`exp']
tempvar i_pred
qui gen `pred'=.
if "`dots'"!="nodots" _dots 0, title(Folds) reps(`k')
forv i=1/`k' {
qui `est_cmd' if `by'!=`i' `if_and' `in' `weight_str', `options'
qui predict `i_pred', xb
qui replace `pred'=`i_pred' if `by'==`i' `if_and'
drop `i_pred'
if "`dots'"!="nodots" _dots `i' 0
}
if "`dots'"!="nodots" di _n
if "`res'"!="" {
ereturn clear
qui corr `outcome' `pred'
ereturn scalar r2 = r(rho)^2
gen `res' = `outcome' - `pred'
tempvar res2 abs_res
gen `res2' = `res'^2
summ `res2', meanonly
ereturn scalar mse = `r(mean)'
gen `abs_res' = abs(`res')
summ `abs_res', meanonly
ereturn scalar mae = `r(mean)'
}
end
/*
* Tests
sysuse auto
crossfit price_hat_oos, k(5): reg price mpg
*/
|
*! version 1.1
*! For numerical replication need to list operating system, application version, and processor type
*! Essentially a different take on -about- (but I don't care about memory, license, copyright)
* Ref: http://www.stata.com/support/faqs/windows/results-in-different-versions/
* Note that log open/close timestamps don't happen for the batch-mode logs
* Environment variables should be noted as well.
* Some are consequential so list them in $envvars_show and the others in $envvars_hide
program display_run_specs
version 11.0 //Just a guess at the version
stata_flavor
di _skip(17) as text "Flavor = " as result "`r(product_name)'"
qui update
di _skip(15) as text "Revision = " as result %tddd_Mon_CCYY r(inst_exe)
*query compilenumber //This executable should be identified completely from other info
local c_opts_str os osdtl machine_type byteorder hostname pwd
*local c_opts_str = `c_opts_str' sysdir_stata //are you really playing with this given the version?
*S_ADO should be standardized per-project
*local c_opts_str = `c_opts_str' adopath sysdir_base sysdir_site sysdir_plus sysdir_personal sysdir_oldplace
foreach c_opt of local c_opts_str {
local skip = 23 - (length("`c_opt'")+3)
di _skip(`skip') as text "c(`c_opt') = " as result `""`c(`c_opt')'""'
}
local c_opts_num stata_version processors
foreach c_opt of local c_opts_num {
local skip = 23 - (length("`c_opt'")+3)
di _skip(`skip') as text "c(`c_opt') = " as result "`c(`c_opt')'"
}
foreach vname in $envvars_show {
di `"env `vname': `: environment `vname''"'
}
foreach vname in $envvars_hide {
di `"LOGREMOVE env `vname': `: environment `vname''"'
}
end
*Get the real flavor (not c(flavor)!)
*The cl_ext aren't correct for Windows
program stata_flavor, rclass
if "`c(flavor)'"=="Small"{
return local flavor = "Small"
return local product_name = "Small Stata"
*return local cl_ext = "-sm"
}
else{
if c(SE)==0{
return local flavor = "IC"
return local product_name = "Stata/IC"
*return local cl_ext = ""
}
else{
if c(MP)==0{
return local flavor = "SE"
return local product_name = "Stata/SE"
*return local cl_ext = "-se"
}
else{
return local flavor = "MP"
return local product_name = "Stata/MP"
*return local cl_ext = "-mp"
}
}
}
end
|
*! v0.3
*! like -drop if- but appends the if condition to the # dropped msg
*! Helpful if used inside a loop or program where you wouldn't see the command echoed.
program drop_if, rclass
version 10
*version is a guess
syntax anything(equalok everything)
qui count if `anything'
local num=r(N)
qui drop if `anything'
qui count
local num2=r(N)
di `"(`num' observations deleted: `anything'. Leaving `num2')"'
return scalar n_dropped = `num'
end
|
*! v0.3 Brian Quistorff <bquistorff@gmail.com>
*! Shows the version of the Stata dataset whose filename is passed in.
program ds_version, rclass
version 11.0 //just a guess here
args fname
tempname fhandle v
file open `fhandle' using `fname', read binary
file read `fhandle' %1s firstbytechar
if "`firstbytechar'"=="<"{
*In the future there will be more versions, so have to read ahead
di "Minimum Version 117."
di "This utility doesn't not have a full XML parser."
di "I will attempt to get the version number from where"
di "it would be stored if Stata saved this file."
file seek `fhandle' 28
file read `fhandle' %3s ver_str
scalar `v'=`ver_str'
di "Found version " `v'
}
else {
mata: st_numscalar("v", ascii("`firstbytechar'"))
di "Version " `v'
}
file close `fhandle'
return scalar version = `v'
end
|
*! version 1.2 Brian Quistorff <bquistorff@gmail.com>
*! Passthrough command allowing one to use the -ereturn- cmds easily.
* Usage: ereturn_do local l1 yes
* Usage: ereturn_do matrix y = y, copy
/* Notes for "post": 1) you need colnames(V)=rownames(V)=colnames(b)
2) the supplied matrices are moved, not copied
*Example:
. mat b = (1,2)
. mat V = (1, 0 \ 0, 1)
. mat rownames V = `: colnames V'
. ereturn_do post b V
*/
* For -eststo- need to specify b
* For -est store- need: b and the macro 'cmd'
program ereturn_do, eclass
version 11.0
* Version requirement is conservative.
syntax anything(equalok everything) [, *]
if "`options'"!="" loc options `", `options'"'
ereturn `anything' `options'
end
|
*! version 1.1 Brian Quistorff <bquistorff@gmail.com>
*! Escapes Latex meta-chatacters
*! Watch the backslashes as Stata is a bit uncommon in how it deals with them.
*! For the caret, it requires the textcomp package
* http://stackoverflow.com/questions/2627135/how-do-i-sanitize-latex-input
mata:
/*
* Do simultaneous charachter replacement
*/
string scalar simultaneous_char_replace(string scalar input, string rowvector tomatch_chars, string rowvector toreplace_strs){
version 12
string scalar output
output = ""
J = cols(tomatch_chars)
for(i=1; i<=strlen(input); i++){
letter = substr(input,i,1)
for(j=1; j<=J; j++){
if(letter == tomatch_chars[j]){
output = output+toreplace_strs[j]
break
}
}
if(j==J+1){
output = output+letter
}
}
return(output)
}
end
program define escape_latex
version 11.0
*Just a guess at the version
syntax anything(equalok everything name=input) , local(string) [disable_curly]
if substr(`"`input'"',1,1)==`"""' local input `input'
* These replacements use each other's characters so have to do simultanous replacement (not sequential)
mata: st_local("input", simultaneous_char_replace(`"`input'"', ("\","{","}"), ("\textbackslash{}","\{","\}")))
local input = subinstr(`"`input'"',"$", "\$", .)
local input = subinstr(`"`input'"',"&", "\&", .)
local input = subinstr(`"`input'"',"#", "\#", .)
local input = subinstr(`"`input'"',"^", "\textasciicircum{}", .)
local input = subinstr(`"`input'"',"_", "\_", .)
local input = subinstr(`"`input'"',"~", "\textasciitilde{}", .)
local input = subinstr(`"`input'"',"%", "\%", .)
*Some for OT1 encoding (but you're not using that, right!)
local input = subinstr(`"`input'"',"<", "\textless{}", .)
local input = subinstr(`"`input'"',">", "\textgreater{}", .)
local input = subinstr(`"`input'"',"|", "\textbar{}", .)
if "`disable_curly'"!=""{
local input = subinstr(`"`input'"',`"""', "\textquotedbl{}", .)
local input = subinstr(`"`input'"',"'", "\textquotesingle{}", .)
local input = subinstr(`"`input'"',"|", "\textasciigrave{}", .)
}
c_local `local' `"`input'"'
end
|
program escape_latex_file
version 11 //just a guess
syntax, txt_infile(string) tex_outfile(string)
tempname out_handle in_handle
file open `out_handle' using "`tex_outfile'", write text replace
file open `in_handle' using "`txt_infile'" , read text
file read `in_handle' line
while r(eof)==0 {
escape_latex "`line'", local(line_out)
if "`notfirst'"=="1" file write `out_handle' _n(2)
file write `out_handle' "`line_out'"
local notfirst = "1"
file read `in_handle' line
}
file close `out_handle'
file close `in_handle'
end
|
*! eval_synth_model: For given predictors, treatment time, donors, treated units, and placebo units.
*! Computes the model, does RI, produces graphs & tables, converts to delta_t=1
*
*Required globals: dir_base
* Needs the data to be in a strongly balanced panel
program eval_synth_model, rclass
version 11.0 //just a guess here
syntax , depvar(varname) predictors(string) ttime(int) perms(int) ///
[file_suff(string) tr_unit_codes(numlist integer) tr_unit_titles(string) tc_gph_opts(string) notes(string) ///
obs_weight_char_ds(string) justall(int 0) alpha(string) full_xlabels(string) donor_limit_for_match_cmd(string) ///
end(string) start(string) nested placebo_unit_codes(numlist) placebo_unit_titles(string) ///
savepermweights checkhull connect_treat plot_pe plot_tc output_mean_post_RMSPE_donors ///
noplot_tc_ci nooutput_vmat nooutput_wmat nooutput_pvals nooutput_permn ///
nooutput_diffs nooutput_X0_X1 nooutput_graph_data connect_ci_to_pre_t]
*Check arguments
local num_placebo_units : word count `placebo_unit_codes'
assert_msg (`num_placebo_units'==`: word count `placebo_unit_titles''), message("Placebo unit info lenghts !=")
qui do code/synth_consts.do
tempfile initdatafile
qui save "`initdatafile'", replace
di "start eval_model. Depvar=`depvar'"
if "`file_suff'"==""{
local file_suff = "`depvar'"
}
if `"`tr_unit_titles'"'==""{
local tr_unit_titles = "`tr_unit_codes'"
}
qui tsset, noquery
local pvar = "`r(panelvar)'"
local tvar = "`r(timevar)'"
local orig_tvar = "`r(timevar)'"
local num_trunits : word count `tr_unit_codes'
if "`output_diffs'"=="nooutput_diffs"{
tempfile diffsfile
}
else {
local diffsfile "${dir_base}/data/estimates/gen_perm_br_`file_suff'.dta"
}
local random_unit : word 1 of `tr_unit_codes'
if `num_trunits'==0{
* Likely called from cross-validation, so pick a donor at random
local random_unit = `pvar'[1]
}
qui summ `tvar' if `pvar'==`random_unit' & `depvar'!=.
if "`end'"=="" {
local end = r(max)
}
if "`start'"=="" {
local start = r(min)
}
*Remove units that have missing data
*Do I need to recode the time var as periods?
if r(N)<r(max)-r(min)+1{
sort `pvar' `tvar'
qui levelsof `tvar' if `pvar'==`random_unit', local(times)
local num_times : word count `times'
tempname per_year
egen period = group(`tvar')
qui tsset `pvar' period
local tvar = "period"
local endtime = `end'
local starttime = `start'
forval per=`num_times'(-1)1{
local time : word `per' of `times'
local vallabelstr = `"`per' "`time'" `vallabelstr'"'
if `time'>`endtime'{
continue
}
if `time'==`endtime'{
local end = `per'
}
if `time'==`starttime'{
local start = `per'
}
if `time'<`ttime'{
local pre_tvar_labels = "`time' `pre_tvar_labels'"
}
else{
local post_tvar_labels = "`time' `post_tvar_labels'"
local tper = `per' //catch the last one
}
local predictors : subinstr local predictors "(`time')" "(`per')", all
}
label define `per_year' `vallabelstr'
label values period `per_year'
local tperlabel = `ttime'
}
else {
local tper = `ttime'
forval per=`start'/`=`ttime'-1'{
local pre_tvar_labels = "`pre_tvar_labels' `per'"
}
forval per=`ttime'/`end'{
local post_tvar_labels = "`post_tvar_labels' `per'"
}
local tperlabel = `ttime'
}
local last_pre = `tper'-1
**mac dir
local ytitle : variable label `depvar'
tempname Vdiag weights weights_unr x_bal pred_mat y_diff_perm X1 X0
if "`donor_limit_for_match_cmd'"!=""{
local donor_limit_for_match_cmd "`donor_limit_for_match_cmd' \`trun'"
}
forval i = 1/`num_trunits'{
*Initial
if `i'==1{
cap postclose postdiff
matrix_post_lines , matrix(`y_diff_perm') varstub(PE) varnumstart(`start') varnumend(`end')
local ps_posting "`s(ps_posting)'"
qui postfile postdiff `s(ps_init)' int `pvar' byte unit_type using "`diffsfile'", replace
}
local title : word `i' of `tr_unit_titles'
local trun : word `i' of `tr_unit_codes'
local file_ind = `i'-1
tempname tc_outcome`trun' y_diff`trun'
tempfile intm1
qui save `intm1'
foreach trun2 in `tr_unit_codes'{
if `trun2'!=`trun'{
qui drop if `pvar'==`trun2'
}
}
`donor_limit_for_match_cmd'
if "`checkhull'"!=""{
if "`orig_tvar'"!="`tvar"{
local gph_tvar_opt "gph_tvar(`orig_tvar')"
}
check_in_convex_hull `depvar', first_pre(`start') last_pre(`last_pre') ///
end(`end') trunit(`trun') file_suff(t`file_ind'_`file_suff') ///
`gph_tvar_opt' xlabels(`full_xlabels') main_label(`title') tper_spec(`tperlabel')
}
if "`nested'"!=""{
di "Starting synth estimation on treated=`trun'"
}
*Note : trperiod is first year under treatment
qui synth `depvar' `predictors', trunit(`trun') `nested' ///
mspeperiod(`start'(1)`last_pre') resultsperiod(`tper'(1)`end') trperiod(`tper')
if "`nested'"!=""{
di "Finished synth estimation on treated=`trun'"
}
qui use `intm1', clear
if _rc ==1 {
error 1
}
if _rc !=0{
local tr_err_codes "`tr_err_codes' `trun'"
local tr_err_titles `"`tr_err_titles' `title'"'
continue
}
mat `Vdiag' = (vecdiag(e(V_matrix)))'
mat `weights' = e(W_weights)
mat `weights_unr' = e(W_weights_unr)
mat `tc_outcome`trun'' = e(Zbal) \ e(Ybal)
mat `x_bal' = e(X_balance)
mat `X0' = e(X0_normalized)
mat `X1' = e(X1_normalized)
mat `y_diff`trun'' = (`tc_outcome`trun''[1...,1]-`tc_outcome`trun''[1...,2])
mat `y_diff_perm' = `y_diff`trun''
post postdiff `ps_posting' (`trun') (${Unit_type_treated})
if "`output_vmat'"!="nooutput_vmat"{
mat `pred_mat' = `Vdiag', `x_bal'
matrix colnames `pred_mat' = weight treated control
output_pred_mat , mattype("v-weights") year_replace_period_list("`pre_tvar_labels'") ///
file_suff("t`file_ind'_`file_suff'") mat(`pred_mat')
}
*The rounded weights are good for display
if "`output_wmat'"!="nooutput_wmat"{
output_unit_matches , numb(10) file_suff(t`file_ind'_`file_suff') match_file(`obs_weight_char_ds') ///
weights_unr(`weights_unr') weights(`weights')
}
if "`plot_tc'"!=""{
graph_tc , start(`start') xlabels(`full_xlabels') file_suff("t`file_ind'_`file_suff'") title("`title'") ///
notes("`notes'") tper_spec(`tperlabel') ytitle("`ytitle'") tc_outcome(`tc_outcome`trun'') ///
tval_labels("`pre_tvar_labels' `post_tvar_labels'") tc_gph_opts(`tc_gph_opts') main_label(`title')
}
if "`output_X0_X1'"!="nooutput_X0_X1"{
forval j=0/1{
mat `X`j'' = `X`j'''
local cnames : colnames `X`j''
local cnames = subinstr("`cnames'",")","",.)
local cnames = subinstr("`cnames'","(","__",.)
mat colnames `X`j'' = `cnames'
matsave `X`j'', replace path("${dir_base}/data/generated/synth_mats/X`j'_t`file_ind'_`file_suff'.dta")
}
}
*Final
if `i'==`num_trunits'{
postclose postdiff
}
}
local tr_noerr_codes : list tr_unit_codes - tr_err_codes
local tr_noerr_titles : list tr_unit_titles - tr_err_titles
local num_trunits_noerr : word count `tr_noerr_codes'
discard //good measure
if `perms'==0 {
qui use "`initdatafile'", clear
exit = 0
}
tempfile just_donors_file permsdiff intm2 intm3
qui save `intm2'
*Limit to just potential donors
foreach trun in `tr_unit_codes'{
qui drop if `pvar'==`trun'
}
qui save "`just_donors_file'", replace
qui count if `tvar'==`start'
local nunits = r(N)
tempname donor_order donors_touse y_diffs y_diffs_t p_vals outmat
tempvar rand
gen `rand' = uniform()
if `num_placebo_units'>0 {
local placebo_unit_codes_commas = subinstr(trim("`placebo_unit_codes'"), " ", ", ", .)
qui replace `rand'=0 if inlist(`pvar',`placebo_unit_codes_commas')
}
sort `rand'
mkmat `pvar' if `tvar'==`start', matrix(`donor_order')
use `intm2', clear
if `perms'==-1 | `perms'>`nunits'{
local perms = `nunits'
}
mat `donors_touse' = `donor_order'[1..`perms',1]
*The predictors str makes the whole string too long for parallel to process so pass as global
global fargs "predictors(`predictors')"
if "`savepermweights'"!=""{
local permweightsfile "${dir_base}/data/estimates/weights/permweights_`file_suff'.dta"
cap erase "`permweightsfile'"
}
* Run permuations estimations
qui save `intm3'
if "${numclusters}"=="" | "${numclusters}"=="1"{
mata: donors_touse = st_matrix("`donors_touse'")
gen_perm_donors , donor_mat(donors_touse) outfile("`permsdiff'") ///
start(`start') tper(`tper') end(`end') depvar(`depvar') `nested' ///
infile("`just_donors_file'") permweightsfile("`permweightsfile'") donor_limit_for_match_cmd(`donor_limit_for_match_cmd')
}
else{
parallel_justout_helper , donor_mat(`donors_touse') cmd_base(gen_perm_donors) outfile("`permsdiff'") ///
permweightsfile(`permweightsfile') ///
cmd_options(`"start(`start') tper(`tper') end(`end') depvar(`depvar') infile("`just_donors_file'") `nested' donor_limit_for_match_cmd(`donor_limit_for_match_cmd')"')
}
use "`permsdiff'", clear
sort `pvar'
* Filter out the optimization errors
qui keep if PE`start'==.
keep `pvar'
gen reason = ${Synth_opt_error}
cap append using "${dir_base}/data/estimates/todrop_`file_suff'.dta"
qui save12 "${dir_base}/data/estimates/todrop_`file_suff'.dta", replace
use "`permsdiff'", clear
sort `pvar'
qui drop if PE`start'==.
local perms_actual = _N
mkmat PE*, mat(`y_diffs_t')
mat `y_diffs' = `y_diffs_t''
if `num_trunits'>=1{
append using "`diffsfile'"
}
qui save12 "`diffsfile'", replace
if "`permweightsfile'"!=""{
use "`permweightsfile'", clear
qui save12 "`permweightsfile'", replace
}
use `intm3', clear
local pre_len =`tper'-`start'
if "`output_mean_post_RMSPE_donors'"!=""{
tempname mean_post_RMSPE_donors
mata: mean_post_RMSPEs("`y_diffs'", `pre_len')
scalar `mean_post_RMSPE_donors' = r(mean_post_RMSPEs)
return scalar mean_post_RMSPE_donors = `mean_post_RMSPE_donors'
}
if "`tperlabel'"=="" {
if "`post_tvar_labels'"==""{
local tperlabel = `tper'
}
else {
local tperlabel : word 1 of `post_tvar_labels'
}
}
forval i = 1/`num_placebo_units' {
local placebo_unit_code : word `i' of `placebo_unit_codes'
tempname tc_outcome`placebo_unit_code' y_diff`placebo_unit_code'
cap build_graphing_mats `placebo_unit_code', depvar(`depvar') startper(`start') perms_file("`diffsfile'")
if _rc !=0{
local placebo_err_codes "`placebo_err_codes' `placebo_unit_code'"
local placebo_err_titles `"`placebo_err_titles' `: word `i' of `placebo_unit_titles''"'
continue
}
matrix `tc_outcome`placebo_unit_code'' = r(tc_outcome)
matrix `y_diff`placebo_unit_code'' = r(y_diff)
}
local placebo_noerr_codes : list placebo_unit_codes - placebo_err_codes
local placebo_noerr_titles : list placebo_unit_titles - placebo_err_titles
local eval_unit_codes =trim("`tr_noerr_codes' `placebo_noerr_codes'")
local eval_unit_titles =trim(`"`tr_noerr_titles' `placebo_noerr_titles'"')
local num_eval_units : word count `eval_unit_codes'
* Outputs involving permutations
forval i = 1/`num_eval_units'{
local eval_unit : word `i' of `eval_unit_codes'
local title : word `i' of `eval_unit_titles'
tempname CIs`eval_unit'
local file_ind = `i'-1
if "`plot_pe'"!=""{
graph_PEs, start(`start') file_suff("t`file_ind'_`file_suff'") main_label(`title') ///
title("`title'") notes("`notes'") tper_spec(`tperlabel') ytitle("`ytitle'") xlabels(`full_xlabels') ///
tval_labels("`pre_tvar_labels' `post_tvar_labels'") y_diff(`y_diff`eval_unit'') y_diffs(`y_diffs')
}
mata: eval_placebo("`y_diff`eval_unit''", "`y_diffs'", `pre_len', `justall', "`post_tvar_labels'")
local perc_perms_match_better`eval_unit' : display %2.0f 100*r(howgood_match)
mat `p_vals' = r(p_vals)
di "good_match (`title'): `perc_perms_match_better`eval_unit''% of the permutation tests had lower pre-treatment RMSPEs."
if "`output_pvals'"!="nooutput_pvals"{
di "P-values (permutation, `title')"
output_pval_table , matrix(`p_vals') file_base("p-vals_t`file_ind'_`file_suff'") ///
note("P-values from `perms_actual' permutation tests. `perc_perms_match_better`eval_unit''\% of the permutation tests had lower pre-treatment RMSPEs.")
}
mata: makeCIs(`pre_len', "`y_diff`eval_unit''", "`y_diffs'", "`tc_outcome`eval_unit''", `alpha')
mat `CIs`eval_unit'' = r(CIs)
local ci_num : display %2.0f 100*(1-r(CI_pval))
return scalar ci_num = `ci_num'
if "`plot_tc_ci'"!="noplot_tc_ci"{
graph_tc_ci , file_suff("t`file_ind'_`file_suff'") tc_gph_opts(`tc_gph_opts') title("`title'") ///
notes("`notes'") num_reps(`perms_actual') tc_outcome(`tc_outcome`eval_unit'') cis(`CIs`eval_unit'') ///
perc_perms_match_better(`perc_perms_match_better`eval_unit'') `connect_treat' ci_num(`ci_num') ///
tper_spec(`tperlabel') ytitle("`ytitle'") tval_labels("`pre_tvar_labels' `post_tvar_labels'") ///
xlabels(`full_xlabels') start(`start') main_label(`title') `connect_ci_to_pre_t'
}
if "`output_graph_data'"!="nooutput_graph_data"{
mat `outmat' = `tc_outcome`eval_unit'', `CIs`eval_unit''
mat colnames `outmat' = Treatment Control CI_low CI_high
matsave `outmat', replace path("${dir_base}/data/estimates/graph_data_`file_suff'_u`eval_unit'.dta")
}
}
if "`output_permn'"!="nooutput_permn"{
qui writeout_txt `perms_actual' "num_perm_`file_suff'"
}
*Remember can exit in the middle if no perms
qui use "`initdatafile'", clear
return scalar num_perm_act = `perms_actual'
return local eval_noerr_codes "`eval_unit_codes'"
foreach eval_unit in `eval_unit_codes'{
return scalar perc_perms_match_better`eval_unit' = `perc_perms_match_better`eval_unit''
return matrix tc_outcome`eval_unit' = `tc_outcome`eval_unit''
return matrix cis`eval_unit' = `CIs`eval_unit''
}
end
|
*Version 0.1 Brian Quistorff <bquistorff@gmail.com>
* Description: Returns the first line that matches the regular expression (or 0)
program find_in_file
version 12 //guess
syntax using/, regexp(string) local(string) [start_at_ln(int 0)]
tempname fh
local linenum = 0
local line_on = 0
file open `fh' using `"`using'"', read
file read `fh' line
while r(eof)==0 {
local linenum = `linenum' + 1
if `linenum'>=`start_at_ln'{
if regexm(`"`macval(line)'"',`"`regexp'"'){
local line_on = `linenum'
continue, break
}
}
file read `fh' line
}
file close `fh'
c_local `local' `line_on'
end
|
*Version 0.1 Brian Quistorff <bquistorff@gmail.com>
* Description: Similar to sample but for data in file (so big datasets).
program define fsample
version 11.0 //Just a guess at the version
args f gsize ssize
qui describe using "`f'", short
local fsize r(N)
local ncompletegroups `=floor(`fsize'/`gsize')'
tempfile acc_sample
clear
save `acc_sample', emptyok
forvalues groupnum = 1/`ncompletegroups' {
local startobs `=(`groupnum'-1)*`gsize'+1'
local endobs `=`startobs'+`gsize'-1'
use in `startobs'/`endobs' using "`f'", clear
qui sample `ssize', count
append using `acc_sample', nolabel
qui save `acc_sample', replace
}
local final_ssize `=round(`ssize'/`gsize'*mod(`fsize',`gsize'))'
if `final_ssize'>0 {
local startobs `=`ncompletegroups'*`gsize'+1'
local endobs `fsize'
use in `startobs'/`endobs' using "`f'", clear
qui sample `final_ssize', count
append using `acc_sample', nolabel
*save `acc_sample', replace
}
end
|
*! gen_perm_donors: Runs permutation estimations on donors
*! Needs dataset to be with delta_t=1 and with only donors
*
* Required globals: fargs, dir_base
* Suboptions: fargs = predictors(string)
program gen_perm_donors
version 11.0 //just a guess here
syntax , donor_mat(string) depvar(string) infile(string) ///
start(int) tper(int) end(int) outfile(string) ///
[nested permweightsfile(string) logfile(string) donor_limit_for_match_cmd(string)]
local 0 ", ${fargs}"
syntax , predictors(string)
if "`logfile'"!=""{
log using "`logfile'${pll_instance}.log", replace name(gen_perm_donors) //not using log_open.
}
*This stuff doesn't get copied over
if "${pll_instance}"!=""{
qui include proj_prefs.do
}
if "${testing}"=="1"{
*mat dir
*mac dir
*local set_trace "set trace on"
*local unset_trace "set trace off"
}
`set_trace'
di "Generating permuation results."
if "`donor_limit_for_match_cmd'"!=""{
local donor_limit_for_match_cmd "`donor_limit_for_match_cmd' \`curr_tru'"
}
tempname mydonorsmat tc_outcome y_diff_perm weights_unr
mata: st_matrix("`mydonorsmat'", `donor_mat'${pll_instance})
use "`infile'" , clear
qui tsset, noquery
local tvar = "`r(timevar)'"
local pvar = "`r(panelvar)'"
matrix_post_lines , matrix(`y_diff_perm') varstub(PE) varnumstart(`start') varnumend(`end')
local ps_init "`s(ps_init)'"
local ps_posting "`s(ps_posting)'"
cap postclose postperm
qui postfile postperm `ps_init' int `pvar' byte unit_type using "`outfile'${pll_instance}", replace
local last_pre_per = `tper'-1
local pre_len = `tper'-`start'
local reps = rowsof(`mydonorsmat')
forvalues i = 1/`reps'{
print_dots `i' `reps'
local curr_tru = `mydonorsmat'[`i', 1]
use "`infile'" , clear
`donor_limit_for_match_cmd'
cap synth `depvar' `predictors', trunit(`curr_tru') mspeperiod(`start'(1)`last_pre_per') ///
resultsperiod(`tper'(1)`end') `nested' trperiod(`tper') skipchecks
if _rc ==1 {
error 1
}
if _rc != 0{
mat `tc_outcome' = J(`=`end'-`start'+1',2,.)
}
else {
mat `tc_outcome' = e(Zbal) \ e(Ybal)
}
mat `y_diff_perm' = (`tc_outcome'[1...,1]-`tc_outcome'[1...,2])
post postperm `ps_posting' (`curr_tru') (${Unit_type_donor})
* Check for quad-programming error
if `tc_outcome'[1,2]==. {
continue
}
if "`permweightsfile'"!=""{
mat `weights_unr' = e(W_weights_unr)
drop _all
qui svmat `weights_unr', names(w)
rename (w*) (counit weight)
qui compress counit
gen int trunit = `curr_tru'
cap append using "`permweightsfile'${pll_instance}"
qui save "`permweightsfile'${pll_instance}", replace
}
}
postclose postperm
`unset_trace'
cap log close gen_perm_donors
end
|
*! version 1.0 Brian Quistorff <bquistorff@gmail.com>
*! Generates a unique id for each pair (like egen ... group())
*! but makes sure they don't have leading 0s
program gen_robust_id
version 11.0
*Just a guess at the version
syntax varlist, generate(string)
tempvar tv
egen `tv' = group(`varlist')
summ `tv', meanonly
local nitems = `r(max)'
local ndigits = floor(log10(`nitems')) + 1
local base = 10^(`ndigits'-1)
if `nitems'+`base'>=10^`ndigits'{
local base = 10^`ndigits'
}
gen long `generate' = `tv'+`base'
compress `generate'
end
|
*! v1.3 Brian Quistorff <bquistorff@gmail.com>
*! If you like to store your config values in a csv file
*! (with headers "key" and "value") then this can retrieve those
*! Will default to local of the name `key'
*! if testing=1 will check for key-testing first
*! binding quotes will be removed (Stata style)
*! if you want to encode:"yes no","wow wow"
*! Then you should write:`""yes no","wow wow""'
program get_config_value
version 11.0 //just a guess here
syntax namelist(max=1 name=key) [, local(string) global(string) filepath(string) default(string)]
if "`filepath'"=="" local filepath "${main_root}code/config.project.csv"
local key_orig `key'
preserve
*qui insheet using "`filepath'", comma names clear
*import is better with handling double-quotes
qui import delimited "`filepath'", varnames(1) stripquote(no) clear
qui count if key=="testing"
if r(N)>0{
gen byte is_testing = (key=="testing")
sort is_testing
local t = value[_N]
if "`t'"=="1"{
qui count if key=="`key'-testing"
if r(N)>0 loc key "`key'-testing"
}
}
qui keep if key=="`key'"
if _N>0{
local val = value[1]
* If double quotes are just binding, then remove (like you need to enclose a ",")
* Don't use -import delimited, stripquotes(default)- because that converts other quotes
if (length(`"`val'"')>1 & `:word count `val''==1) local val `val'
di `"get_config_value: `key'=`val'"'
}
else{
local val `"`default'"'
di `"get_config_value (default): `key'=`val'"'
}
restore
if "`global'"!="" global `global' `"`val'"'
else{
if "`local'"=="" local local `key_orig'
c_local `local' `"`val'"'
}
end
|
*! version 1.0
*! Retrieves the key
program get_key, sclass
version 11 //guess
loc key : char _dta[key]
di "key: `key'"
sreturn local key "`key'"
end
|
* will store into locals the return values from command (some commands should 1-liners!)
* Ex:
* get_returns pvar=r(panelvar) tvar=r(timevar) : tsset, noquery
program get_returns
gettoken my_opts 0: 0, parse(":")
gettoken colon their_cmd: 0, parse(":")
`their_cmd'
foreach my_opt in `my_opts'{
if regexm("`my_opt'","(.+)=(.+\(.+\))"){
c_local `=regexs(1)' = "``=regexs(2)''"
}
}
end
|
*! v1.1 bquistorff@gmail.com
*! converts a *.gph file to three possible derivative files
program gph2fmt
version 12.0 //just a guess
syntax anything(everything name=gph_file), [plain_file(string) titleless_file(string) bare_file(string)]
tempname toexport
graph use `gph_file', name(`toexport')
if "`plain_file'"!="" graph export "`plain_file'", replace
gr_edit .title.draw_view.setstyle, style(no)
if "`titleless_file'"!="" graph export "`titleless_file'", replace
gr_edit .note.draw_view.setstyle, style(no)
if "`bare_file'"!="" graph export "`bare_file'", replace
graph drop `toexport'
end
|
*! Graphs the prediction errors
*! Required globals: dir_base
program graph_PEs
version 11.0 //just a guess here
syntax , start(int) file_suff(string) title(string) notes(string) ///
tper_spec(int) y_diff(string) y_diffs(string) ///
[ytitle(string) tval_labels(string) xlabels(string) main_label(string)]
tempname y_diff_all
tempfile initdata
qui save `initdata'
drop _all
if "`main_label'"==""{
local main_label "Main"
}
mat `y_diff_all' = `y_diff', `y_diffs'
local ncols = colsof(`y_diff_all')
qui svmat `y_diff_all', names(D)
qui gen year = _n+`start'-1
label variable year "Year"
if trim("`tval_labels'")!=""{
local num_years : word count `tval_labels'
forval i=1/`num_years'{
local year : word `i' of `tval_labels'
local recodestr = "`recodestr' (`i'=`year')"
}
qui recode year `recodestr'
}
wrap_text , unwrappedtext("`notes'") width(90)
local wrapped `"`s(wrappedtext)'"'
local grph_perm_cmds = ""
forval d=2/`ncols'{
local grph_perm_cmds "`grph_perm_cmds' (line D`d' year, lcolor(gs10) lwidth(medthin) lpattern(solid) )"
}
twoway `grph_perm_cmds' (connected D1 year, lpattern(solid) lwidth(thick) msymbol(S) mcolor(black)), ///
xline(`tper_spec', lpattern(shortdash)) legend(order(`ncols' "`main_label'" 1 "Permutations")) ///
ytitle("Pred Errors: `ytitle'") ylabel(minmax) xlabels(`xlabels') title("`title'") ///
note(`wrapped') name(`=strtoname("PEs_`file_suff'",1)', replace)
qui save_fig "PEs_`file_suff'"
use `initdata', clear
end
|
*! Graphs the treatment and control
*! Required globals: dir_base
* To do: Need to convert the xlab code to like it is in graph_tc_ci
program graph_tc
version 11.0 //just a guess here
syntax , start(int) file_suff(string) title(string) notes(string) ///
tper_spec(int) ytitle(string) tc_outcome(string) ///
[tval_labels(string) tc_gph_opts(string) xlabels(string) main_label(string)]
tempfile initdata
qui save `initdata'
drop _all
qui svmat `tc_outcome', names(N)
rename (N*) (Treated Synthetic)
if "`main_label'"==""{
local main_label "Main"
}
qui gen year = _n+`start'-1
label variable year "Year"
if "`tval_labels'"!=""{
local num_years : word count `tval_labels'
forval i=1/`num_years'{
local year : word `i' of `tval_labels'
local recodestr = "`recodestr' (`i'=`year')"
}
qui recode year `recodestr'
}
wrap_text , unwrappedtext("`notes'") width(90)
local wrapped `"`s(wrappedtext)'"'
twoway (line Treated year) (line Synthetic year, lpattern(longdash)), ///
xline(`tper_spec', lpattern(shortdash)) `tc_gph_opts' ytitle("`ytitle'") ylabel(minmax) ///
title("`title'") note(`wrapped') legend(order(1 "`main_label'" 2 "Control (Synth)")) ///
name(`=strtoname("TC_`file_suff'",1)', replace) xlabels(`xlabels')
qui save_fig "TC_`file_suff'"
use `initdata', clear
end
|
*! Graphs Treatment and Control with Confidence Intervals
*! Required globals: dir_base
program graph_tc_ci
version 11.0 //just a guess here
syntax , file_suff(string) title(string) ///
tper_spec(int) num_reps(int) tc_outcome(string) cis(string) ///
[start(int 1) ci_num(string) tval_labels(string) tc_gph_opts(string) ///
graph_logs xlabels(string) connect_treat logs ytitle(string) notes(string) ///
perc_perms_match_better(string) ylabel_scale(string) main_label(string) ///
connect_ci_to_pre_t extra_cmd(string) extra_names(string) extra_legend(string) ///
control_str(string)]
tempname all_to_graph
tempfile initdata
qui save `initdata'
drop _all
mat `all_to_graph' = `tc_outcome', `cis'
qui svmat `all_to_graph', names(N)
rename (N*) (Treated Synthetic LowCI HighCI `extra_names')
if "`connect_ci_to_pre_t'"!=""{
qui count if LowCI==.
local last_per_pre_t = r(N)
qui replace LowCI = Synthetic in `last_per_pre_t'
qui replace HighCI = Synthetic in `last_per_pre_t'
}
if ("`main_label'"=="") local main_label "Main"
if ("`control_str'"=="") local control_str "Control (Synth)"
if ("`control_str'"!="Omit") local control_opt `"3 "`control_str'""'
qui gen year = _n+`start'-1
label variable year "Year"
if trim("`tval_labels'")!=""{
local num_years : word count `tval_labels'
forval i=1/`num_years'{
local year : word `i' of `tval_labels'
local recodestr = "`recodestr' (`i'=`year')"
}
qui recode year `recodestr'
}
if ("`ci_num'"!="") local ci_string = "`ci_num'% CIs"
else local ci_string "CIs"
if `num_reps'!=0{
local notes "`notes' Confidence intervals for control from `num_reps' permutation tests."
}
if "`perc_perms_match_better'"!=""{
local notes "`notes' `perc_perms_match_better'% of the permutation tests had lower pre-treatment RMSPEs."
}
local treat_type "line"
if "`connect_treat'"!="" {
local treat_type "connected"
}
if "`graph_logs'"!="" {
log_axis_ticks , vars(LowCI HighCI Treated Synthetic) label_scale(`ylabel_scale')
local yaxislogopt = `"yscale(log) ymtick(`s(minor_ticks)') ylabel(`s(major_ticks)', angle(horizontal))"'
}
wrap_text , unwrappedtext("`notes'") width(90)
local wrapped `"`s(wrappedtext)'"'
twoway (rarea LowCI HighCI year, color(gs12) fcolor(gs12)) ///
(`treat_type' Treated year, lpattern(solid)) ///
(line Synthetic year, lpattern(dash)) ///
`extra_cmd', ///
title("`title'") ytitle("`ytitle'") /* ylabel(minmax)*/ ///
xline(`tper_spec', lpattern(shortdash)) `tc_gph_opts' xlabels(`xlabels') ///
legend(order(2 "`main_label'" `control_opt' 1 "`ci_string'" `extra_legend') cols(3)) ///
note(`wrapped') `yaxislogopt' name(`=strtoname("TC_CIs_`file_suff'",1)', replace)
qui save_fig "TC_CIs_`file_suff'"
use `initdata', clear
end
|
*! Identifies uncontaminated donors with a temporal placebo test
program identify_donors_placebo
version 11.0 //just a guess here
syntax , depvar(varname) predictors(string) tr_unit_codes(numlist integer) ///
early_predictors(string) precise_tyear(int) last_pre_year(int) tper(int) ///
file_suff(string) perms(int) first_treatmentyear(int) ///
[skip_gen_early_placebo keepunits(numlist integer) width(int 5) onlyonce ///
donor_limit_for_match_cmd(string)]
di "Starting to determine other treated units by comparing early placebo to normal. Width=`width'"
qui do code/synth_consts.do
*pause
*Do the early placebo to get a good distribution
if "`skip_gen_early_placebo'"!="" {
di "Doing an early placebo (pre-drop)"
eval_synth_model, depvar(`depvar') tr_unit_codes(`tr_unit_codes') predictors(`early_predictors') ///
justall(1) noplot_tc_ci nooutput_vmat nooutput_wmat nooutput_permn nooutput_pvals nooutput_X0_X1 nooutput_graph_data ${do_nest} ///
file_suff(`file_suff'_pret_sub_predrop) ttime(`last_pre_year') end(`last_pre_year') perms(`perms') ///
donor_limit_for_match_cmd(`donor_limit_for_match_cmd')
}
count if codigo==codigo[1]
local nyears = r(N)
tempfile initdata
qui save `initdata'
use "${dir_base}/data/estimates/gen_perm_br_`file_suff'_pret_sub_predrop.dta", clear
cap keep if unit_type==${Unit_type_donor}
cap drop unit_type
local placebo_tper = `tper'-1
qui summ PE`placebo_tper', detail
local p_low = "`r(p`width')'"
local p_high = "`r(p`=100-`width'')'"
di "Removing MCAs with deviations larger less than `p_low' and larger than `p_high'"
use `initdata', clear
*Now repeatedly do synth (just 1 post-t year) but clearing away any donors that are above
*might throw out some donors, but that's OK.
*Hopefully I get to a stable set
local last_N = "."
local trim_i = 1
local keepers_commaed = subinstr(trim("`tr_unit_codes' `keepunits'"), " ", ", ", .)
while _N!=`last_N' {
local last_N = _N
di "Starting trim iteration `trim_i'. We have `=`last_N'/`nyears'' units (`last_N' obs) left"
eval_synth_model, depvar(`depvar') predictors(`predictors') perms(`perms') ttime(`precise_tyear') ///
file_suff("`file_suff'_trial`trim_i'") tr_unit_codes(`tr_unit_codes') ${do_nest} ///
noplot_tc_ci nooutput_vmat nooutput_wmat nooutput_permn nooutput_X0_X1 nooutput_pvals nooutput_graph_data ///
end(`first_treatmentyear') donor_limit_for_match_cmd(`donor_limit_for_match_cmd')
*Update the todrop file
tempfile inner
qui save `inner'
use "${dir_base}/data/estimates/gen_perm_br_`file_suff'_trial`trim_i'.dta", replace
keep if unit_type==${Unit_type_donor}
drop unit_type
qui keep if (PE`tper'<`p_low' | PE`tper'>`p_high')
gen reason = ${Synth_PE_low}
replace reason = ${Synth_PE_high} if (PE`tper'>`p_high')
keep codigo reason
cap append using "${dir_base}/data/estimates/todrop_`file_suff'.dta"
qui save "${dir_base}/data/estimates/todrop_`file_suff'.dta", replace
use `inner', clear
*Remove the contaminated ones
merge m:1 codigo using "${dir_base}/data/estimates/todrop_`file_suff'.dta", keep(master match) keepusing(codigo) noreport
di "Dropping for a loop of -identify_donors_placebo-"
drop if _merge==3 & !inlist(codigo, `keepers_commaed')
drop _merge
local trim_i = `trim_i'+1
if "`onlyonce'"!=""{
continue, break
}
}
di "Finished determining other treated units"
end
|
*! v0.2 Brian Quistorff <bquistorff@gmail.com>
*! A replacement for -tempfile- that provides more options for non-local outputs
*! 1) Creates auto-named globally-scoped files in tmpdir (ie persistent files the user is in charge of deleting)
*! 2) Can assign the file names to global macros
* Main usage: a script that uses tempfile's has an error and it is hard to recover because the tempfile were removed on end.
* Solution:
* 1) replace -tempfile- with -interimfile- so that the file will stick around after end
* 2) You can recover the locals that pointed to the files with -interimefile, recover_locals- to investigate
* 3) Once done, do -interimfile, rm_instance_interims- to remove the files
* rm_all_tempfiles is helpful if Stata crashed and left files around (doesn't remove the mini-temp do files created).
* Assumes the c(tmpdir) doesn't change between assignments and rm_*_interims or recover_locals
* Wish this could also be a pass-through to -tempfile- but they'd be auto-deleted at the end of this program
* This should work on unix (though not tested thoroughly) but I'm not sure of the tempfile naming conventions on other platforms
program interimfile
version 11.0 //just a guess here
syntax [namelist] [, globals rm_instance_interims rm_all_interims rm_all_tempfiles recover_locals]
local deleting "`rm_instance_interims'`rm_all_interims'`rm_all_tempfiles'"
if "`namelist'"!="" & "`deleting'"!=""{
di as error "Can't create files and delete existing ones at the same time"
error 1
}
if "${INTERIMFILE_INST_ID}"=="" interimfile_INST_ID INTERIMFILE_INST_ID
local win = ("`c(os)'"=="Windows")
local i_pre "S"
local tmproot ="`c(tmpdir)'"+cond(`win',"","/")
if "`namelist'"!=""{
local macro_assign = cond("`globals'"!="","global","c_local")
if "${INTERIMFILE_FILE_NUM}"==""{
interimfile_delete, win(`win') i_pre(`i_pre') tmproot(`tmproot') rm_instance_interims
global INTERIMFILE_FILE_NUM = -1
}
local namelist_num : list sizeof namelist
forval i=1/`namelist_num'{
global INTERIMFILE_FILE_NUM = ${INTERIMFILE_FILE_NUM}+1
local id_str = string(${INTERIMFILE_FILE_NUM},"%06.0f")
local name : word `i' of `namelist'
local fname ="`i_pre'"+cond(`win',"ST_${INTERIMFILE_INST_ID}`id_str'.tmp","St${INTERIMFILE_INST_ID}.`id_str'")
`macro_assign' `name' "`tmproot'`fname'"
}
if "`globals'"=="" global INTERIMFILE_INST_locs "${INTERIMFILE_INST_locs} `namelist'"
}
if "`deleting'"!=""{
interimfile_delete, win(`win') i_pre(`i_pre') tmproot(`tmproot') `rm_instance_interims' `rm_all_interims' `rm_all_tempfiles'
}
if "`recover_locals'"!=""{
forval i=1/`:word count $INTERIMFILE_INST_locs'{
local lname : word `i' of $INTERIMFILE_INST_locs
local id_str = string(`=`i'-1',"%06.0f")
local fname ="`tmproot'`i_pre'"+cond(`win',"ST_${INTERIMFILE_INST_ID}`id_str'.tmp","St${INTERIMFILE_INST_ID}.`id_str'")
c_local `lname' `fname'
}
}
end
program interimfile_delete
syntax, win(string) i_pre(string) tmproot(string) [rm_instance_interims rm_all_interims rm_all_tempfiles]
local f_prefix = cond(`win',"ST_","St")
local any_inst = cond(`win',"??","?????")
local any_seqno = cond(`win',"??????.tmp",".??????")
if "`rm_instance_interims'"!="" local pattern = "`i_pre'`f_prefix'${INTERIMFILE_INST_ID}`any_seqno'"
if "`rm_all_interims'" !="" local pattern = "`i_pre'`f_prefix'`any_inst'`any_seqno'"
if "`rm_all_tempfiles'" !="" local pattern = "`f_prefix'`any_inst'`any_seqno'"
local files_to_delete : dir "`tmproot'" files "`pattern'", respectcase
foreach file_to_delete of local files_to_delete{
rm "`tmproot'`file_to_delete'"
}
if "`rm_instance_interims'`rm_all_interims'"!=""{
macro drop INTERIMFILE_INST_locs INTERIMFILE_FILE_NUM
}
end
program interimfile_INST_ID
args gname
*Mostly from http://www.stata.com/statalist/archive/2007-08/msg01124.html
tempfile tfullfile
*I think it only goes 0-9a-w but just in case.
local matc = regexm("`tfullfile'",cond("`c(os)'"=="Windows","ST_([a-z0-9][a-z0-9])([a-z0-9]+)\.tmp$","St([a-z0-9]+)\.([a-z0-9]+)$"))
if `matc'!=1 error 1
global `gname' `=regexs(1)'
end
|
*! version 0.1 Brian Quistorff <bquistorff@gmail.com>
* Description: Generates strings corresponding to
* ISO 8601 data and date-time formats. These may
* be useful for generating log-file filenames.
* Returns:
* s(iso8601_d) : 2000-12-25
* s(iso8601_dt) : 2000-12-25T13:01:01
* s(iso8601_dt_file): 2000-12-25T13-01-01
* s(unix_ts) : 977749261 (second since 1970 epoch)
*
* Author: Brian Quistorff (bquistorff@gmail.com)
program define iso8601_strs, sclass
version 12
local curr_date = "`c(current_date)'"
local curr_time = "`c(current_time)'"
local unix_ts : display %12.0g clock("`curr_date' `curr_time'", "DMY hms" )/1000 - clock("1 Jan 1970", "DMY" )/1000
local date : display %tdCCYY-NN-DD date("`curr_date'", "DMY" )
local curr_t_str = subinstr("`curr_time'",":","-",.)
sreturn local iso8601_d "`date'"
sreturn local iso8601_dt = "`date'T`curr_time'"
sreturn local iso8601_dt_file = "`date'T`curr_t_str'"
sreturn local unix_ts = trim("`unix_ts'")
end
|
*! v0.2 Brian Quistorff <bquistorff@gmail.com>
*! reports if the string is a
program is_abs_path, rclass
version 11.0 //just a guess here
syntax anything(everything name=place), local(string)
tempname `is_abs_path'
scalar `is_abs_path' = substr("`place'",1,5)!="http:" | substr("`place'",1,6)!="https:" | substr("`place'",1,1)!="/" | substr("`place'",2,1)!=":"
c_local `local' `is_abs_path'
end
|
*! ivreg2out 1.0
*BQ: 2013-09-24 removed the N_Unique ereturn
* (had a problem with my setup. I think my est's didn't have e(r))
* Combines the two stage estimates into a single estimate
* Originally from roywada@hotmail.com http://www.stata.com/statalist/archive/2009-09/msg00043.html
prog define ivreg2out, eclass
version 8.0
qui {
args one two
local name1=subinstr("`one'","_ivreg2_","",1)
local name2=subinstr("`two'","_ivreg2_","",1)
est restore `one'
tempname b1 V1 b2 V2 v1 v2 b v V
mat `b1'=e(b)
mat `V1'=e(V)
matrix coleq `b1' = `name1'
matrix coleq `V1' = `name1'
local r2_first=e(r2)
est restore `two'
mat `b2'=e(b)
mat `V2'=e(V)
matrix coleq `b2' = `name2'
matrix coleq `V2' = `name2'
mat `b'=`b1',`b2'
mat `v1'=vecdiag(`V1')
mat `v2'=vecdiag(`V2')
mat `v'=`v1',`v2'
mat `V'=diag(`v')
local r2_second=e(r2)
local N=e(N)
local widstat=e(widstat)
local N_unique=e(r)
eret post `b' `V'
eret scalar N=`N'
eret scalar r2_1=`r2_first'
eret scalar r2_2=`r2_second'
eret scalar widstat=`widstat'
* eret scalar N_unique=`N_unique'
eret loc cmd="ivreg2out"
eret loc eqnames= "`name1' `name2'"
eret loc depvar= "`name1' `name2'"
}
end
|
*! 1.2 Brian Quistorff <bquistorff@gmail.com>
* 26-09-2013 modification of -latabstat- (see SSC) to remove the Source footnote and make a smaller tex fragment.
program define latabstat_simple, rclass byable(recall) sort
version 6
syntax varlist(numeric) [if] [in] [aw fw] [ , /*
*/ BY(varname) CASEwise Columns(str) Format Format2(str) /*
*/ LAbelwidth(int 16) LOngstub Missing /*
*/ SAME SAVE noSEP Statistics(str) STATS(str) noTotal /*
*/ tf(string)Replace APPend TX(string) CAPtion(string) /*
*/ CLabel(string) HWidth(string)]
if "`tx'" ~=""{
if "`tx'"=="0" {
local wide="\linewidth"
}
else {
local wide="`tx'cm"
}
}
if "`hwidth'" ~=""{
local hwide="`hwidth'"
local fhwide=`hwidth'+2
}
else {
local hwide="8"
local fhwide="10"
}
tempname hh
if "`casewise'" != "" {
local same same
}
if `"`stats'"' != "" {
if `"`statistics'"' != "" {
di in red /*
*/ "may not specify both statistics() and stats() options"
exit 198
}
local statist `"`stats'"'
local stats
}
if "`total'" != "" & "`by'" == "" {
di in gr "nothing to display
exit 0
}
if "`format'" != "" & `"`format2'"' != "" {
di in re "may not specify both format and format()"
exit 198
}
if `"`format2'"' != "" {
capt local tmp : display `format2' 1
if _rc {
di in re `"invalid %format in format() : `format2'"'
exit 120
}
}
if `"`columns'"' == "" {
local incol "variables"
}
else if `"`columns'"' == substr("variables",1,length(`"`columns'"')) {
local incol "variables"
}
else if `"`columns'"' == substr("statistics",1,length(`"`columns'"')) {
local incol "statistics"
}
else {
di in red `"column(`columns') invalid -- specify "' /*
*/ "column(variables) or column(statistics)"
exit 198
}
if "`longstub'" != "" | "`by'" == "" {
local descr descr
}
* sample selection
marksample touse, novar
if "`same'" != "" {
markout `touse' `varlist'
}
if "`by'" != "" & "`missing'" == "" {
markout `touse' `by' , strok
}
qui count if `touse'
local ntouse = r(N)
if `ntouse' == 0 {
error 2000
}
if `"`weight'"' != "" {
local wght `"[`weight'`exp']"'
}
* varlist -> var1, var2, ... variables
* fmt1, fmt2, ... display formats
tokenize "`varlist'"
local nvars : word count `varlist'
local i 1
while `i' <= `nvars' {
local var`i' ``i''
if "`format'" != "" {
local fmt`i' : format ``i''
}
else if `"`format2'"' != "" {
local fmt`i' `format2'
}
else local fmt`i' %9.0g
local i = `i' + 1
}
if `nvars' == 1 & `"`columns'"' == "" {
local incol statistics
}
* Statistics
Stats `statistics'
local stats `r(names)'
local expr `r(expr)'
local summopt `r(summopt)'
local nstats : word count `stats'
tokenize `expr'
local i 1
while `i' <= `nstats' {
local expr`i' ``i''
local i = `i' + 1
}
tokenize `stats'
local i 1
while `i' <= `nstats' {
local name`i' ``i''
local names "`names' ``i''"
if `i' < `nstats' { local names "`names'," }
local i = `i' + 1
}
if "`sep'" == "" & ( (`nstats' > 1 & "`incol'" == "variables") /*
*/ |(`nvars' > 1 & "`incol'" == "statistics")) {
local sepline yes
}
local matsize : set matsize
local matreq = max(`nstats',`nvars')
if `matsize' < `matreq' {
di in re "set matsize to at least `matreq' (see help matsize for details)"
exit 908
}
* compute the statistics
* ----------------------
if "`by'" != "" {
* conditional statistics are saved in matrices Stat1, Stat2, etc
* the data are sorted on by groups, putting unused obs last
* be careful not to change the sort order
* note that touse is coded -1/0 rather than 1/0!
qui replace `touse' = - `touse'
sort `touse' `by'
local bytype : type `by'
local by2 0
local iby 1
while `by2' < `ntouse' {
tempname Stat`iby'
mat `Stat`iby'' = J(`nstats',`nvars',0)
mat colnames `Stat`iby'' = `varlist'
mat rownames `Stat`iby'' = `stats'
* range `iby1'/`iby2' refer to obs in the current by-group
local by1 = `by2' + 1
qui count if (`by'==`by'[`by1']) & (`touse')
local by2 = `by1' + r(N) - 1
* loop over all variables
local i 1
while `i' <= `nvars' {
qui summ `var`i'' in `by1'/`by2' `wght', `summopt'
local is 1
while `is' <= `nstats' {
* set matrix[is,i] with mv-handling
SetMat `Stat`iby''[`is',`i'] `expr`is''
local is = `is' + 1
}
local i = `i' + 1
}
* save label for groups in lab1, lab2 etc
if substr("`bytype'",1,3) != "str" {
local iby1 = `by'[`by1']
local lab`iby' : label (`by') `iby1'
}
else local lab`iby' = `by'[`by1']
local iby = `iby' + 1
}
local nby = `iby' - 1
/* wwg did not like this. Fine.
if `nby' == 1 {
di in gr "(`by' is constant on selected sample)"
local by
local nby 0
}
*/
}
else local nby 0
if "`total'" == "" {
* unconditional (Total) statistics are stored in Stat`nby+1'
local iby = `nby'+1
tempname Stat`iby'
mat `Stat`iby'' = J(`nstats',`nvars',0)
mat colnames `Stat`iby'' = `varlist'
mat rownames `Stat`iby'' = `stats'
local i 1
while `i' <= `nvars' {
qui summ `var`i'' if `touse' `wght' , `summopt'
local is 1
while `is' <= `nstats' {
* set matrix[is,i] with mv-handling
SetMat `Stat`iby''[`is',`i'] `expr`is''
local is = `is' + 1
}
local i = `i' + 1
}
local lab`iby' "Total"
}
* display results
* ---------------
di
* di in gr "\begin{table}[htbp]\centering"
if "`tx'"~=""{
di in gr "\newcolumntype{Y}{>{\raggedleft\arraybackslash}X}"
di in gr "\parbox{`wide'} {"
}
* constants for displaying results
local labw = min(max(`labelwidth',8),60)
* note changing 32 to 60 determines greatest width for table stub
if "`by'" != "" {
if substr("`bytype'",1,3) != "str" {
local lv : value label `by'
if "`lv'" != "" {
local lg : label (`by') maxlength
local wby = min(`labw',`lg')
}
else local wby 8
}
else {
local wby = min(real(substr("`bytype'",4,.)),`labw')
local bytype str
}
local wby = max(length("`by'"), `wby')
}
else local wby 8
local lleft = (1 + `wby')*("`by'"!="") + 9*("`descr'"!="")
local cbar = `lleft' + 1
local lsize : set display linesize
* number of non-label elements in the row of a block
local neblock = int((`lsize' - `cbar')/10)
* number of blocks if stats horizontal
local nsblock = 1 + int((`nstats'-1)/`neblock')
* number of blocks if variables horizontal
local nvblock = 1 + int((`nvars'-1)/`neblock')
* left align by-label if also descr
if "`descr'" != "" & "`by'" != "" {
local aby "-"
}
if "`incol'" == "statistics" {
* display the results: horizontal = statistics (block wise)
* ---------------------------------------------------------
* header
* di in gr "\caption{\label{`clabel'} "
* di in gr _c "\textbf{`caption'} }"
* loop over all nsblock blocks of statistics
local isblock 1
local is2 0
while `isblock' <= `nsblock' {
* is1..is2 are indices of statistics in a block
local is1 = `is2' + 1
local is2 = min(`nstats', `is1'+`neblock'-1)
if "`tx'"~="" {
di in gr "}"
di in gr _c "\begin{tabularx} {`wide'} {@{} l"
}
else {
di in gr _c "\begin{tabular} {@{} l"
}
local p=1
while `p'<= `nstats' {
if "`tx'"~="" {
di in gr _c " Y"
}
else {
di in gr _c " r"
}
local p=`p'+1
} * end while loop
di in gr _c " @{}} \\\ \hline"
di
di in gr _c "\textbf{"
* display header
if "`by'" != "" { di in gr %`aby'`wby's "`by'" " " _c }
if "`descr'" != "" { di in gr "variable " _c }
di in gr "} & \textbf{" _c
local is `is1'
while `is' <= `is2' {
di in gr %10s "`name`is''" _c
if `is' < `is2'{
* < ensures last column does not end with column separator
di in gr "} & \textbf{" _c
}
local is = `is' + 1
}
di in gr "} \\\"
di in gr "\\hline"
* loop over the categories of -by- (1..nby) and -total- (nby+1)
local nbyt = `nby' + ("`total'"=="")
local iby 1
while `iby' <= `nbyt'{
local i 1
while `i' <= `nvars' {
if "`by'" != "" {
if `i' == 1 {
local lab = substr(`"`lab`iby''"'0, 1, `wby')
di in ye % `aby'`wby's `"`lab'"' " " _c
}
else di in ye _skip(`wby') " " _c
}
if "`descr'" != "" {
di in ye %`fhwide's abbrev("`var`i''", `hwide') " " _c
}
local is `is1'
while `is' <= `is2' {
GetMat s : `fmt`i'' `Stat`iby''[`is',`i']
di in ye " & " _c
di in ye %10s "`s'" _c
local is = `is' + 1
}
di in ye " \\\"
local i = `i' + 1
}
local iby = `iby' + 1
if ("`sepline'"!="") | (`iby'>`nbyt') | /*
*/ ((`iby'==`nbyt') & ("`total'"=="")) {
}
}
di in gr "\hline"
* di in gr _c "\multicolumn{" (`nstats'+1) "}{@{}l}{"
* di in gr "\footnotesize{\emph{Source:} $S_FN}}"
if "`tx'"~=""{
di in gr "\end{tabularx}"
}
else {
di in gr "\end{tabular}"
}
* di in gr "\end{table}"
local isblock = `isblock' + 1
if `isblock' <= `nsblock' {
display
}
} /* isblock */
}
else {
* display the results: horizontal = variables (block wise)
* --------------------------------------------------------
* header
* di in gr "\caption{\label{`clabel'} "
* di in gr _c "\textbf{`caption'} }"
if "`tx'"~="" {
di in gr "}"
di in gr _c "\begin{tabularx} {`wide'} {@{} l"
}
else {
di in gr _c "\begin{tabular} {@{} l"
}
local p=1
while `p'<= `nvars' {
if "`tx'"~="" {
di in gr _c " Y"
}
else {
di in gr _c " r"
}
local p=`p'+1
} * end while loop
di in gr _c " @{}} \\\ \hline"
di
* loop over all nvblock blocks of variables
local iblock 1
local i2 0
while `iblock' <= `nvblock' {
* i1..i2 are indices of variables in a block
local i1 = `i2' + 1
local i2 = min(`nvars', `i1'+`neblock'-1)
di in gr _c "\textbf{"
* display header
if "`by'" != "" { di in gr %`aby'`wby's "`by'" " " _c }
if "`descr'" != "" { di in gr " stats " _c }
di in gr "} & \textbf{" _c
local i `i1'
while `i' <= `i2' {
di in gr %`fhwide's abbrev("`var`i''",`hwide') _c
if `i' < `i2'{
di in gr "} & \textbf{" _c
}
local i = `i' + 1
}
di in gr "} \\\"
di in gr "\\hline"
* loop over the categories of -by- (1..nby) and -total- (nby+1)
local nbyt = `nby' + ("`total'"=="")
local iby 1
while `iby' <= `nbyt'{
local is 1
while `is' <= `nstats' {
if "`by'" != "" {
if `is' == 1 {
local lab = substr(`"`lab`iby''"'0, 1, `wby')
di in ye %`aby'`wby's `"`lab'"' " " _c
}
else di in ye _skip(`wby') " " _c
}
if "`descr'" != "" {
di in ye %8s "`name`is''" " " _c
}
local i `i1'
while `i' <= `i2' {
GetMat s : `fmt`i'' `Stat`iby''[`is',`i']
di in ye " & " _c
di in ye %10s "`s'" _c
local i = `i' + 1
}
di in ye " \\\"
local is = `is' + 1
}
local iby = `iby' + 1
if ("`sepline'"!="") | (`iby'>`nbyt') | /*
*/ ((`iby'==`nbyt') & ("`total'"=="")) {
}
}
di in gr "\hline"
* di in gr "\multicolumn{" (`i2'+1) "}{@{}l}{"
* di in gr "\footnotesize{\emph{Source:} $S_FN}}"
if "`tx'"~=""{
di in gr "\end{tabularx}"
}
else {
di in gr "\end{tabular}"
}
* di in gr "\end{table}"
local iblock = `iblock' + 1
if `iblock' <= `nvblock' {
display
}
} /* iblock */
}
* save results (mainly for certification)
* ---------------------------------------
if "`save'" != "" {
if "`total'" == "" {
local iby = `nby'+1
return matrix StatTot `Stat`iby''
}
if "`by'" == "" { exit }
local iby 1
while `iby' <= `nby' {
return matrix Stat`iby' `Stat`iby''
return local name`iby' `"`lab`iby''"'
local iby = `iby' + 1
}
}
* send to file if requested
* --------------------------
if "`tf'" ~="" {
if "`replace'" == "replace" {local opt "replace"}
if "`append'" == "append" {local opt "append"}
file open `hh' using"`tf'.tex", write `opt'
file write `hh' _n
* send to file the results
* ------------------------
file write `hh' _n
* file write `hh' "\begin{table}[htbp]\centering" _n
if "`tx'"~=""{
file write `hh' "\newcolumntype{Y}{>{\raggedleft\arraybackslash}X}" _n
file write `hh' "\parbox{`wide'} {" _n
}
* constants for displaying results
local labw = min(max(`labelwidth',8),60)
* note changing 32 to 60 determines greatest width for row labels
if "`by'" != "" {
if substr("`bytype'",1,3) != "str" {
local lv : value label `by'
if "`lv'" != "" {
local lg : label (`by') maxlength
local wby = min(`labw',`lg')
}
else local wby 8
}
else {
local wby = min(real(substr("`bytype'",4,.)),`labw')
local bytype str
}
local wby = max(length("`by'"), `wby')
}
else local wby 8
local lleft = (1 + `wby')*("`by'"!="") + 9*("`descr'"!="")
local cbar = `lleft' + 1
local lsize : set display linesize
* number of non-label elements in the row of a block
local neblock = int((`lsize' - `cbar')/10)
* number of blocks if stats horizontal
local nsblock = 1 + int((`nstats'-1)/`neblock')
* number of blocks if variables horizontal
local nvblock = 1 + int((`nvars'-1)/`neblock')
* left align by-label if also descr
if "`descr'" != "" & "`by'" != "" {
local aby "-"
}
if "`incol'" == "statistics" {
* send to file the results: horizontal = statistics (block wise)
* --------------------------------------------------------------
* header
* file write `hh' "\caption{\label{`clabel'} " _n
* file write `hh' "\textbf{`caption'} }"
* loop over all nsblock blocks of statistics
local isblock 1
local is2 0
while `isblock' <= `nsblock' {
* is1..is2 are indices of statistics in a block
local is1 = `is2' + 1
local is2 = min(`nstats', `is1'+`neblock'-1)
if "`tx'"~="" {
file write `hh' "}" _n
file write `hh' "\begin{tabularx} {`wide'} {@{} l"
}
else {
file write `hh' "\begin{tabular} {@{} l"
}
local p=1
while `p'<= `nstats' {
if "`tx'"~="" {
file write `hh' " Y"
}
else {
file write `hh' " r"
}
local p=`p'+1
} * end while loop
file write `hh' " @{}} \\\ \hline"
file write `hh' _n
file write `hh' "\textbf{"
* display header
if "`by'" != "" { file write `hh' %`aby'`wby's "`by'" " " }
if "`descr'" != "" { file write `hh' "variable " }
file write `hh' "} & \textbf{"
local is `is1'
while `is' <= `is2' {
file write `hh' %10s "`name`is''"
if `is' < `is2'{
* < ensures last column does not end with column separator
file write `hh' "} & \textbf{"
}
local is = `is' + 1
}
file write `hh' "} \\\" _n
file write `hh' "\\hline" _n
* loop over the categories of -by- (1..nby) and -total- (nby+1)
local nbyt = `nby' + ("`total'"=="")
local iby 1
while `iby' <= `nbyt'{
local i 1
while `i' <= `nvars' {
if "`by'" != "" {
if `i' == 1 {
local lab = substr(`"`lab`iby''"'0, 1, `wby')
file write `hh' % `aby'`wby's `"`lab'"' " "
}
else file write `hh' _skip(`wby') " "
}
if "`descr'" != "" {
file write `hh' %`fhwide's (abbrev("`var`i''",`hwide')) " "
}
local is `is1'
while `is' <= `is2' {
GetMat s : `fmt`i'' `Stat`iby''[`is',`i']
file write `hh' " & "
file write `hh' %10s "`s'"
local is = `is' + 1
}
file write `hh' " \\\" _n
local i = `i' + 1
}
local iby = `iby' + 1
if ("`sepline'"!="") | (`iby'>`nbyt') | /*
*/ ((`iby'==`nbyt') & ("`total'"=="")) {
}
}
file write `hh' "\hline" _n
* file write `hh' "\multicolumn{" (`nstats'+1) "}{@{}l}{"
* file write `hh' "\footnotesize{\emph{Source:} $S_FN}}" _n
if "`tx'"~=""{
file write `hh' "\end{tabularx}" _n
}
else {
file write `hh' "\end{tabular}" _n
}
* file write `hh' "\end{table}" _n
file write `hh' _n
file write `hh' _n
di
di in white "The table has been written to the file:`tf'.tex"
file write `hh' _n
file close `hh'
local isblock = `isblock' + 1
if `isblock' <= `nsblock' {
file write `hh' _n
}
} /* isblock */
}
else {
* send to file the results: horizontal = variables (block wise)
* -------------------------------------------------------------
* header
* file write `hh' "\caption{\label{`clabel'} " _n
* file write `hh' "\textbf{`caption'} }"
if "`tx'"~="" {
file write `hh' "}" _n
file write `hh' "\begin{tabularx} {`wide'} {@{} l"
}
else {
file write `hh' "\begin{tabular} {@{} l"
}
local p=1
while `p'<= `nvars' {
if "`tx'"~="" {
file write `hh' " Y"
}
else {
file write `hh' " r"
}
local p=`p'+1
} * end while loop
file write `hh' " @{}} \\\ \hline"
di
* loop over all nvblock blocks of variables
local iblock 1
local i2 0
while `iblock' <= `nvblock' {
* i1..i2 are indices of variables in a block
local i1 = `i2' + 1
local i2 = min(`nvars', `i1'+`neblock'-1)
file write `hh' "\textbf{"
* display header
if "`by'" != "" { file write `hh' %`aby'`wby's "`by'" " " }
if "`descr'" != "" { file write `hh' " stats " }
file write `hh' "} & \textbf{"
local i `i1'
while `i' <= `i2' {
file write `hh' %`fhwide's (abbrev("`var`i''",`hwide'))
if `i' < `i2'{
file write `hh' "} & \textbf{"
}
local i = `i' + 1
}
file write `hh' "} \\\" _n
file write `hh' "\\hline" _n
* loop over the categories of -by- (1..nby) and -total- (nby+1)
local nbyt = `nby' + ("`total'"=="")
local iby 1
while `iby' <= `nbyt'{
local is 1
while `is' <= `nstats' {
if "`by'" != "" {
if `is' == 1 {
local lab = substr(`"`lab`iby''"'0, 1, `wby')
file write `hh' %`aby'`wby's `"`lab'"' " "
}
else file write `hh' _skip(`wby') " "
}
if "`descr'" != "" {
file write `hh' %8s "`name`is''" " "
}
local i `i1'
while `i' <= `i2' {
GetMat s : `fmt`i'' `Stat`iby''[`is',`i']
file write `hh' " & "
file write `hh' %10s "`s'"
local i = `i' + 1
}
file write `hh' " \\\" _n
local is = `is' + 1
}
local iby = `iby' + 1
if ("`sepline'"!="") | (`iby'>`nbyt') | /*
*/ ((`iby'==`nbyt') & ("`total'"=="")) {
}
}
file write `hh' "\hline" _n
* file write `hh' "\multicolumn{" (`i2'+1) "}{@{}l}{"
* file write `hh' "\footnotesize{\emph{Source:} $S_FN}}" _n
if "`tx'"~=""{
file write `hh' "\end{tabularx}" _n
}
else {
file write `hh' "\end{tabular}" _n
}
* file write `hh' "\end{table}"_n
file write `hh' _n
file write `hh' _n
di
di in white "The table has been written to the file:`tf'.tex"
file write `hh' _n
file close `hh'
local iblock = `iblock' + 1
if `iblock' <= `nvblock' {
display
}
} /* iblock */
}
} * end send to file
end
* As a work around that matrices can't have missing values,
* mv's are coded as the magic number 1e+300
program define SetMat
matrix `1' = cond(`2' != ., `2', 1e+300)
end
program define GetMat
args rslt colon fmt exp
local s : display `fmt' =cond(`exp' != 1e+300, `exp', .)
c_local `rslt' `s'
end
/* Stats str
processes the contents() option. It returns in
r(names) -- names of statistics, separated by blanks
r(expr) -- r() expressions for statistics, separated by blanks
r(summopt) -- option for summarize command (meanonly, detail)
*/
program define Stats, rclass
if `"`0'"' == "" {
local 0 "mean"
}
* ensure that order of requested statistics is preserved
* invoke syntax for each word in input
local class 0
tokenize `0'
while "`1'" != "" {
local 0 = lower(`", `1'"')
syntax [, n MEan sd SUm COunt MIn MAx Range SKewness Kurtosis /*
*/ SDMean p1 p5 p10 p25 p50 p75 p90 p95 p99 iqr q MEDian ]
if "`median'" != "" {
local p50 p50
}
if "`count'" != "" {
local n n
}
* class 1 : available via -summarize, meanonly-
* summarize.r(N) returns #obs (note capitalization)
if "`n'" != "" {
local n N
}
local s "`n'`min'`mean'`max'`sum'"
if "`s'" != "" {
local names "`names' `s'"
local expr "`expr' r(`s')"
local class = max(`class',1)
}
if "`range'" != "" {
local names "`names' range"
local expr "`expr' r(max)-r(min)"
local class = max(`class',1)
}
* class 2 : available via -summarize-
if "`sd'" != "" {
local names "`names' sd"
local expr "`expr' r(sd)"
local class = max(`class',2)
}
if "`sdmean'" != "" {
local names "`names' sd(mean)"
local expr "`expr' r(sd)/sqrt(r(N))"
local class = max(`class',2)
}
* class 3 : available via -detail-
local s "`skewness'`kurtosis'`p1'`p5'`p10'`p25'`p50'`p75'`p90'`p95'`p99'"
if "`s'" != "" {
local names "`names' `s'"
local expr "`expr' r(`s')"
local class = max(`class',3)
}
if "`iqr'" != "" {
local names "`names' iqr"
local expr "`expr' r(p75)-r(p25)"
local class = max(`class',3)
}
if "`q'" != "" {
local names "`names' p25 p50 p75"
local expr "`expr' r(p25) r(p50) r(p75)"
local class = max(`class',3)
}
mac shift
}
return local names `names'
return local expr `expr'
if `class' == 1 {
return local summopt "meanonly"
}
else if `class' == 3 {
return local summopt "detail"
}
end
|
*! version 0.1 Brian Quistorff
*! in the anything part puts line1\\line2
*There are several ways
*1) Using packages
*1a) \usepackage{makecell}
*1b) \usepackage{pbox} (have to specifying a max width) (better than parbox)
*1c) minipage (have to specifying a width)
*1d) shortstack
* 2) Plain Latex
* 2a) insert a 1 column table
* 2b) You can make vbox of hboxes (but have to parse and insert separately the lines)
*
* Refs:
* http://tex.stackexchange.com/questions/2441/
* http://tex.stackexchange.com/questions/38924/
program latex_multiline_cell
version 11.0 //just a guess here
syntax anything(equalok everything), loc_out(string)
*local out `"\begin{tabular}[x]{@{}c@{}}`anything'\end{tabular}"' //t=vcentering (t,b,c); hcenter: l@ or r@
*local out `"\pbox{\textwidth}{`anything'}"'
local out `"\makecell{`anything'}"'
c_local `loc_out' `"`out'"'
end
|
*! version 1.0 Brian Quistorff bquistorff@gmail.com
*! Makes a multiline label for a variable to be used in a tex table.
*! Usage: latex_multiline_var_label , lines("line 1" "line 2")
program latex_multiline_var_label
version 10.0 //guess
syntax , lines(string asis) [local(string) var2label(string) noindent nomultirow n_rows(string)]
if "`indent'"!="noindent" loc indent_str " \enskip{}"
local n_lines : word count `lines'
forval i=1/`n_lines'{
local line : word `i' of `lines'
if `i'>1 local inside `"`inside'\\ `indent_str'"'
local inside `"`inside'`line'"'
}
latex_multiline_cell `inside', loc_out(inside_broken)
if "`n_rows'"=="" loc n_rows `n_lines'
local full `"\multirow{`n_lines'}{*}{`inside_broken'}"'
if "`var2label'"!="" label variable `var2label' `"`full'"'
if "`local'" !="" c_local `local' `"`full'"'
end
|
*! version 1.0 Brian Quistorff <bquistorff@gmail.com>
*! Creates axis ticks (major & minor) for log scales
*! (Stata's auto ticks are bad)
program log_axis_ticks, sclass
version 11.0
*Just a guess at the version
syntax , [range(numlist) vars(varlist numeric) label_scale(string)]
* Get the range
if "`range'"!=""{
local min : word 1 of `range'
local max : word 2 of `range'
}
else {
local min = .
local max = .
foreach v in `vars'{
summ `v', meanonly
if `min'==. | r(min)<`min'{
local min = r(min)
}
if `max'==. | r(max)>`max'{
local max = r(max)
}
}
}
local logdiff = log10(`max'/`min')
if `logdiff'<1{
* If small just put down 4 markers at equal proportions.
* Not sure what to do better here but probably something
local maj_factor = (`max'/`min')^(1/3)
local major_lst "`min' `=`min'*`maj_factor'' `=`min'*`maj_factor'^2' `max'"
local num_min_small 12
local min_factor = (`max'/`min')^(1/`num_min_small')
local min_next = `min'
forval i=1/`num_min_small'{
local minor_lst "`minor_lst' `min_next'"
local min_next = `min_next'*`min_factor'
}
}
else {
local min_mag_not_bigger = 10^floor(log10(`min'))
local min_mag_not_smaller = 10^ceil(log10(`min'))
local fdigit = substr("`min'",1,1)
* If intermediate do the 2,5,10 scale
if `logdiff'<2 {
local multi_lst = "2 2.5 2"
if (`fdigit'==5 & `min'>5*`min_mag_not_bigger') | `fdigit'>5 | `min'==`min_mag_not_smaller' {
local major_lst = `min_mag_not_smaller'
local multi_ind = 1
}
else {
if (`fdigit'==2 & `min'>2*`min_mag_not_bigger') | `fdigit'>2 {
local major_lst = 5* `min_mag_not_bigger'
local multi_ind = 3
}
else {
local major_lst = 2*`min_mag_not_bigger'
local multi_ind = 2
}
}
local last = `major_lst'
while 1{
local next = `last'*`: word `multi_ind' of `multi_lst''
if `next'>`max'{
continue, break
}
local major_lst "`major_lst' `next'"
local last = `next'
local multi_ind = mod(`multi_ind',3)+1
}
}
*If really big just go up by equal orders of magnitude
else {
if `logdiff'<5 {
local step = 1
}
else {
local step = floor(`logdiff'/3)
}
local major_lst = `min_mag_not_smaller'
local last = `major_lst'
while 1{
local next = `last'*(10^`step')
if `next'>`max'{
continue, break
}
local major_lst "`major_lst' `next'"
local last = `next'
}
if `step'>1{
local min_next = `min_mag_not_bigger'
while 1{
local minor_lst "`minor_lst' `min_next'"
local min_next = 10*`min_next'
if `min_next'>= `max'{
continue, break
}
}
}
}
if "`minor_lst'"==""{
local minor_lst = `fdigit'*`min_mag_not_bigger'
local increment = `min_mag_not_bigger'
local mlast = `minor_lst'
while 1{
local mnext = `mlast' + `increment'
local minor_lst "`minor_lst' `mnext'"
if `mnext'>=`max'{
continue, break
}
local mlast = `mnext'
if `mlast' == 10^ceil(log10(`mlast')){
local increment = 10*`increment'
}
}
}
}
if "`label_scale'"!="" {
local major_lst_orig `"`major_lst'"'
local major_lst ""
foreach mtick in `major_lst_orig'{
local major_lst `"`major_lst' `mtick' "`=`mtick'/`label_scale''""'
}
sreturn local major_lst_orig = "`major_lst_orig'"
}
sreturn local major_ticks = `"`major_lst'"'
sreturn local minor_ticks = "`minor_lst'"
end
|
*! version 0.1
*! Will put commas in between words in a list (so can be used in foreach loop)
*! This uses a word=based matching which works if the words are quoted string
*! (where a simple " "->"," won't work)
program make_for_inlist
version 11.0
*Just a guess at the version
syntax anything(everything), local(string)
local nwords : word count `anything'
local ret `""`: word 1 of `anything''""'
if `nwords'>1 {
forval i=2/`nwords' {
local ret `"`ret',"`:word `i' of `anything''""'
}
}
c_local local `"`ret'"'
end
|
*! Version 1.2
*! Originally from: version 1.1.9 02feb2005 by Marc-Andreas Muendler: muendler@ucsd.edu
program define matload_simple
version 11.0 //just a guess here
syntax anything [, Path(string) ROWname(string) OVERwrite]
global err_mssg = ""
local rc 0
local matname= subinstr("`anything'",",","",1)
local currN = _N
local file = "`path'"
confirm file "`file'"
tempname chk
capture local `chk' = colsof(`matname')
if _rc==0 & "`overwrite'"=="" {
disp as err "no; matrix " in yellow "`matname'" in red " would be lost"
exit 4
}
local saved 0
if `currN'>0 {
tempfile tmp
quietly save `tmp'
local saved 1
drop _all
}
capture use "`file'", clear
if "`rowname'"=="" {
capture confirm variable _rowname
if _rc~=0 {
local rc = _rc
global err_mssg = "_rowname not found"
}
if _N==0 {
local rc = _rc
global err_mssg = "Data set empty or not Stata format"
}
}
else {
capture confirm variable `rowname'
if _rc~=0 {
local rc = _rc
global err_mssg = "`rowname' not found"
}
else {
rename `rowname' _rowname
}
if _N==0 {
local rc = _rc
global err_mssg = "Data set empty or not Stata format"
}
}
capture confirm new variable `matname'
if _rc~=0 {
local rc = _rc
global err_mssg = "matrix `matname' contains variable `matname'"
}
if _rc==0 {
capture {
local j 1
while `j' <= _N {
local rownm`j'=_rowname[`j']
local j = `j'+1
}
drop _rowname
}
if _rc~=0 & "${err_mssg}" == "" {
local rc = _rc
global err_mssg = "error (before mkmat was applied)"
}
capture mkmat _all, matrix(`matname')
if _rc~=0 & "${err_mssg}" == "" {
local rc = _rc
global err_mssg = "error (as mkmat was applied)"
}
capture {
local j 1
while `j' <= _N {
matname `matname' `rownm`j'', rows(`j') explicit
local j = `j'+1
}
local cnam : colfullnames `matname'
tokenize "`cnam'"
local j 1
while `j' <= colsof(`matname') {
local `j'=subinword("``j''","__cons","_cons",1)
local `j'=subinword("``j''","__b","_b",1)
local `j'=subinword("``j''","__coef","_coef",1)
matname `matname' ``j'' , columns(`j') explicit
local j=`j'+1
}
drop _all
}
}
if _rc~=0 & "${err_mssg}" == "" {
local rc = _rc
global err_mssg = "error (after mkmat was applied)"
}
if _rc==0 & `rc'==0 {
disp in green "matrix " in yellow "`matname'" in green " loaded"
}
if `saved' {
use `tmp', clear
disp in green "data in memory restored"
}
if `rc'~=0 {
disp as err "${err_mssg}"
error `rc'
}
global err_mssg = ""
exit `rc'
end
|
*! version 1.0 Brian Quistorff <bquistorff@gmail.com>
*! Sorts a Stata matrix by a column. Fixes problem in -matsort- where row labels with spaces are mangled
*! make sortcol negative if you want descending order
program matrixsort
version 11.0
*Just a guess at the version
args matname sortcol
mata: sort_st_matrix("`matname'", `sortcol')
end
mata:
void sort_st_matrix(string scalar matname, real scalar sortcol){
orig_mat = st_matrix(matname)
perm = order(orig_mat, sortcol)
sort_mat = orig_mat[perm,]
row_l = st_matrixrowstripe(matname)
sort_row_l = row_l[perm,]
st_replacematrix(matname, sort_mat)
st_matrixrowstripe(matname, sort_row_l)
}
end
|
*! Version 1.0 Brian Quistorff <bquistorff@gmail.com>
*! Creates the post init and post string lines that will come from a matrix
program matrix_post_lines, sclass
version 12
syntax , matrix(string) varstub(string) varnumend(int) [varnumstart(int 1)]
*Create the post strings (faster in mata and nicer in traced-log)
local num_per =`varnumend'-`varnumstart'+1
mata: line_init = invtokens(J(1,`num_per', " float `varstub'") + strofreal(`varnumstart'..`varnumend'))
mata: line_post = invtokens(J(1,`num_per', " (`matrix'[") + strofreal(1..`num_per') + J(1,`num_per',",1])"))
*^^^ Does the same as the below.
/*forval i = `varnumstart'/`varnumend' {
local ps_init "`ps_init' float PE`i'"
local ps_posting "`ps_posting' (`matrix'[`=`i'-`varnumstart'+1',1])"
}*/
mata: st_local("ps_init", line_init)
mata: st_local("ps_posting", line_post)
sreturn local ps_init = "`ps_init'"
sreturn local ps_posting = "`ps_posting'"
end
|
*! Version 1.2 Brian Quistorff
*! Originally from: version 1.1.7 24oct2004 by Marc-Andreas Muendler: muendler@ucsd.edu
*! BQ: Make saving automatic (no dropall), remove manual -more-s,
*! put the full filename in path (not just dir) so can save to a name that is not the matrix name
*! Requires save12
program define matsave_simple
version 7
args matname
syntax newvarname [, REPLACE Path(string) Type(string)]
local matname= subinstr("`matname'",",","",1)
if lower("`type'")~="" & lower("`type'")~="byte" & lower("`type'")~="int" & lower("`type'")~="long" & lower("`type'")~="float" & lower("`type'")~="double" {
local type = "float"
}
tempname tst
local tst = colsof(`matname')
local currN = _N
local file = "`path'"
local saved 0
if `currN'>0 {
local dropall = ""
tempfile tmp
quietly save `tmp'
local saved 1
drop _all
}
local chgflg =0
local cnam : colfullnames `matname'
tokenize "`cnam'"
local i 1
while `i' <= colsof(`matname') {
local `i' = subinstr("``i''",":","_",.)
if "``i''" == "_cons" | "``i''" == "_b" | "``i''" == "_coef" {
local chgflg 1
local `i' = "_" + "``i''"
}
matname `matname' :``i'', columns(`i') explicit
local i = `i' + 1
}
local dosv=1
qui svmat `type' `matname', names(col)
local i 1
if `chgflg' {
while `i' <= colsof(`matname') {
matname `matname' ``i'', columns(`i') explicit
local i = `i' +1
}
}
local rnam : rowfullnames `matname'
tokenize "`rnam'"
local maxlen= 0
local j 1
while `j' <= rowsof(`matname') {
if length("``j''") > `maxlen' {local maxlen = length("``j''")}
local j=`j'+1
}
if `maxlen' >80 {local maxlen = 80}
quietly gen str`maxlen' _rowname=""
local j 1
while `j' <= rowsof(`matname') {
quietly replace _rowname = "``j''" in `j'
local j=`j'+1
}
order _rowname
qui save12 "`file'", `replace'
if `saved' {
use `tmp', clear
}
end
|
*! v0.1 Brian Quistorff <bquistorff@gmail.com>
*! pass-through for -net describe- that allows local relative path
program net_describe
version 11.0 //just a guess here
syntax namelist(name=pkgname max=1) [, from(string)]
is_abs_path "`from'"
if r(is_abs_path) {
local from `"`c(pwd)'/`from'"'
}
net describe `pkgname', from(`from')
end
|
*! v0.1 Brian Quistorff <bquistorff@gmail.com>
*! pass-through for -net from- that allows local relative path
program net_from
version 11.0 //just a guess here
args place
is_abs_path "`place'"
if r(is_abs_path) {
local place `"`c(pwd)'/`place'"'
}
net from `place'
end
|
*! v0.1 Brian Quistorff <bquistorff@gmail.com>
*! pass-through for -net get- that allows local relative path
program net_get
version 11.0 //just a guess here
syntax namelist(name=pkgname max=1) [, all replace force from(string)]
is_abs_path "`from'"
if r(is_abs_path) {
local from `"`c(pwd)'/`from'"'
}
net get `pkgname', `all' `replace' `force' from(`from')
end
|
*! v0.1 Brian Quistorff <bquistorff@gmail.com>
*! pass-through for -net install- that allows local relative path
program net_install
version 11.0 //just a guess here
syntax namelist(name=pkgname max=1) [, all replace force from(string)]
is_abs_path "`from'", local(iap)
if `iap' {
local from `"`c(pwd)'/`from'"'
}
net install `pkgname', `all' `replace' `force' from(`from')
end
|
*! Version 1.0 Brian Quistorff <bquistorff@gmail.com>
*! Replaces -profiler report- and create a dta as output (rather than a text file)
*! Requires: save_cmd_output.ado
program nice_profile_report
version 12
syntax , outfile(string)
tempfile textoutput
save_cmd_output, outfile("`textoutput'") command(profiler report)
tempfile initdata
qui save `initdata'
qui {
import delimited "`textoutput'", clear
destring count, replace
gen frac_time = time/time[_N]
save12 "`outfile'", replace
}
use `initdata', clear
end
|
*! Output (in dta and tex formats) a matrix of some quality of the predictors
* Required globals: dir_base
program output_pred_mat
version 11.0 //just a guess here
syntax , file_suff(string) mat(string) mattype(string) [year_replace_period_list(string) nosort]
*Output to a dta file
tempname matw
mat `matw' = `mat'[1...,1]
matsave `matw', replace path("${dir_base}/data/estimates/weights/`mattype'_`file_suff'.dta")
* replace (period) with (year) if necessary
if "`year_replace_period_list'"!="" {
local rnames : rownames `mat'
local year_num : word count `year_replace_period_list'
forval i = 1 / `year_num' {
local year : word `i' of `year_replace_period_list'
local rnames : subinstr local rnames "(`i')" "(`year')", all
}
abbrev_all , str_list(`rnames') out_loc(rnames_new)
mat rownames `mat' = `rnames_new'
}
*replace var name with var label
local rnames : rownames `mat'
local rnames_orig "`rnames'"
foreach rname in `rnames_orig'{
local vname "`rname'"
local year_lab ""
local paren_ind = strpos("`rname'","(")
if `paren_ind'>0{
local vname =substr("`rname'",1,`=`paren_ind'-1')
local year_lab =substr("`rname'",`paren_ind',.)
}
local vname_lab : variable label `vname'
if "`vname_lab'"!=""{
local rnames : subinstr local rnames "`rname'" `""`vname_lab'`year_lab'""', all
}
}
abbrev_all , str_list(`rnames') out_loc(rnames_new)
mat rownames `mat' = `rnames_new'
if "`sort'"!="nosort"{
matrixsort `mat' -1
}
qui frmttable using "${dir_base}/tab/tex/`mattype'_`file_suff'.tex", replace statmat(`mat') tex fragment sdec(3)
end
|
*! Outputs a tex p-value table
program output_pval_table
version 11.0 //just a guess here
syntax , note(string) file_base(string) matrix(string)
local orig_linesize = "`c(linesize)'"
set linesize 160
frmttable using "${dir_base}/tab/tex/`file_base'_temp.tex", ///
replace statmat(`matrix') tex fragment note("`note'")
set linesize `orig_linesize'
qui filefilter "${dir_base}/tab/tex/`file_base'_temp.tex" ///
"${dir_base}/tab/tex/`file_base'_temp2.tex" , ///
from("& 0.00") to("& \BStextless{}0.01") replace
local finalfile "${dir_base}/tab/tex/`file_base'.tex"
qui filefilter "${dir_base}/tab/tex/`file_base'_temp2.tex" ///
"`finalfile'" , ///
from("smallskip} ") to("smallskip}Null distribution ") replace
qui erase "${dir_base}/tab/tex/`file_base'_temp.tex"
qui erase "${dir_base}/tab/tex/`file_base'_temp2.tex"
end
|
*! Outputs matrix (in dta and tex) of unit level matches
*! Requires globals: dir_base
program output_unit_matches
version 11.0 //just a guess here
syntax , numb(int) file_suff(string) weights_unr(string) weights(string) [match_file(string)]
tempfile initdata
qui save `initdata'
qui drop _all
qui svmat `weights_unr', names(W)
rename (W*) (codigo weight)
gsort -weight
qui save12 "${dir_base}/data/estimates/weights/weights_`file_suff'.dta", replace
qui drop _all
qui svmat `weights', names(W)
rename (W*) (codigo weight)
gsort -weight
qui keep in 1/`=min(`numb',_N)'
qui keep if weight >= 0.01
rename codigo set
if "`match_file'"!=""{
qui merge 1:1 set using "`match_file'", keep(match) nogenerate
}
gsort -weight
di "Top matches (`file_suff') :"
list
drop set
tempname top_matches
qui ds *
local allvars "`r(varlist)'"
local notcolvar "Name"
local colvars : list allvars - notcolvar
if "`colvars'"=="`allvars'"{
mkmat `colvars', mat(`top_matches')
}
else {
mkmat `colvars', mat(`top_matches') rownames(Name)
}
qui frmttable using "${dir_base}/tab/tex/top_matches_`file_suff'_temp1.tex", replace ///
statmat(`top_matches') tex fragment nodisplay coljust(lcr)
qui filefilter "${dir_base}/tab/tex/top_matches_`file_suff'_temp1.tex" ///
"${dir_base}/tab/tex/top_matches_`file_suff'_temp2.tex" , from(_) to(" ") replace
qui filefilter "${dir_base}/tab/tex/top_matches_`file_suff'_temp2.tex" ///
"${dir_base}/tab/tex/top_matches_`file_suff'.tex" , from(".00\BS\BS") to("\BS\BS") replace
qui erase "${dir_base}/tab/tex/top_matches_`file_suff'_temp1.tex"
qui erase "${dir_base}/tab/tex/top_matches_`file_suff'_temp2.tex"
use `initdata', clear
end
|
*! v1.0.8 Brian Quistorff <bquistorff@gmail.com>
*! A modified version of -outtable- v1.0.7 (see SSC) to remove the outer \table output so that it can be included in LyX
program define outtable_simple,rclass
version 8.0
syntax using/, mat(string) [Replace APPend noBOX Center ASIS CAPtion(string) Format(string) noROWlab longtable clabel(string) label]
tempname hh dd ddd
local formatn: word count `format'
local nr=rowsof(`mat')
local nc=colsof(`mat')
if "`clabel'"=="" {
local labelc "clabel"
}
else {
local labelc "`clabel'"
}
if "`replace'" == "replace" local opt "replace"
if "`append'" == "append" local opt "append"
file open `hh' using "`using'.tex", write `opt'
file write `hh' "% matrix: `mat' file: `using'.tex $S_DATE $S_TIME" _n
* add h to prefer here
local nc1 = `nc'-1
if "`box'" ~= "nobox" {
local vb "|"
local hl "\hline"
}
else {
local hg "\hline"
}
local align "l"
if "`center'" == "center" local align "c"
local l "`vb'l"
forv i=1/`nc' {
local l "`l'`vb'`align'"
}
local symm 0
if (issym(`mat')) {
local symm 1
}
local rnames : rownames(`mat')
local cnames : colnames(`mat')
local l "`l'`vb'"
if "`longtable'"=="" {
*file write `hh' "\begin{table}[htbp]" _n
if "`caption'" ~= "" {
file write `hh' "\caption{\label{`labelc'} `caption'}\centering\medskip" _n
}
file write `hh' "\begin{tabular}{`l'}" "`hl' `hg' `hg'" _n
forv i=1/`nc' {
local cn : word `i' of `cnames'
local cnw = cond("`asis'"=="asis","`cn'",subinstr("`cn'","_"," ",.))
if `i'==1 & "`rowlab'" == "norowlab" {
file write `hh' " \multicolumn{1}{c}{ `cnw' } "
}
else {
file write `hh' " & `cnw' "
}
}
file write `hh' " \" "\ `hl' `hg' " _n
}
if "`longtable'"!="" {
file write `hh' "\begin{center}" _n
file write `hh' "\begin{longtable}{`l'}" _n
if "`caption'" != "" {
file write `hh' "\caption{\label{`labelc'} `caption'}\\\" _n
}
forv i=1/`nc' {
local cn : word `i' of `cnames'
local cnw = cond("`asis'"=="asis","`cn'",subinstr("`cn'","_"," ",.))
local nc2 = `nc'+1
if `i'==1 & "`rowlab'" == "norowlab" {
local mainheader1 " \multicolumn{1}{c}{ `cnw' } "
local nc2 = `nc'
}
else {
local mainheader1 " \multicolumn{1}{c}{Variable Names} "
local mainheader2 "`mainheader2' & `cnw' "
}
local mainheader "`mainheader1' `mainheader2'"
}
file write `hh' "\hline " _n
file write `hh' "\hline " _n
file write `hh' "`mainheader' \\\" _n
file write `hh' "\hline " _n
file write `hh' " \endfirsthead" _n
file write `hh' "\multicolumn{`nc2'}{l}{\emph{... table \thetable{} continued}} \\\" _n
file write `hh' "\hline \hline " _n
file write `hh' "`mainheader' \\\" _n
file write `hh' "\hline" _n
file write `hh' "\endhead" _n
file write `hh' "\hline" _n
file write `hh' "\multicolumn{`nc2'}{r}{\emph{Continued on next page...}}\\\" _n
file write `hh' "\endfoot" _n
file write `hh' "\endlastfoot" _n
}
local jlim `nc1'
local klim `nc'
forv i=1/`nr' {
local rn : word `i' of `rnames'
if "`label'"!="" & "`rn'" != "r1" {
local rn : variable label `rn'
}
local rnw = cond("`asis'"=="asis","`rn'",subinstr("`rn'","_"," ",.))
if "`rowlab'" ~= "norowlab" file write `hh' "`rnw' & "
if `symm'==1 {
local jlim = `i'-1
local klim = `i'
}
forv j=1/`jlim' {
local fmt
if "`format'"!="" {
if `formatn'>1 local fmt: word `j' of `format'
else local fmt "`format'"
}
file write `hh' `fmt' (`mat'[`i',`j']) " & "
}
if "`format'"!="" {
if `formatn'>1 local fmt: word `nc' of `format'
else local fmt "`format'"
}
file write `hh' `fmt' (`mat'[`i',`klim'])
file write `hh' " \" "\ `hl' " _n
}
*if "`key'" ~= "" {
* file write `hh' "\label{`key'}" _n
* }
if "`longtable'"=="" {
file write `hh' "`hg' `hg' \end{tabular}" _n
*file write `hh' "\end{table}" _n
file close `hh'
}
if "`longtable'"!="" {
file write `hh' "`hg' `hg' \end{longtable} " _n
file write `hh' "\end{center}" _n
file close `hh'
}
end
|
*! cleans up from old parallel and sets the number of clusters (has default for automatic #)
*! Globals required: numclusters
program parallel_clean_setclusters
version 11.0 //just a guess here
syntax [anything] [, noclean]
if "`anything'"==""{
if "${doparallel}"!="1" | "${numclusters}"=="1"{
global numclusters 1
exit 0
}
if "${numclusters}"==""{
global numclusters = ${defnumclusters}
}
}
else{
global numclusters "`anything'"
}
* For now only one parallel instance per FS in interactive mode, so default is clean
if "`clean'"!="noclean" & "`c(mode)'"==""{
cap parallel clean , all force
if _rc != 0{
closeallmatafiles
parallel clean , all force
}
}
parallel setclusters ${numclusters}, force
end
|
*! Does: parses up uneven tasks, takes care of the seed, appends datasets, cleans up temps
*! Assumed globals: numclusters, dir_base
*! If doesn't work do:
*! parallel clean, all force
*! Right now if something goes wrong, tempfiles are left around in temp folder (slightly bad).
program parallel_justout_helper
version 12.0 //just a guess here
*set trace on
syntax , donor_mat(string) cmd_base(string) cmd_options(string) outfile(string) [permweightsfile(string)]
tempfile initdatafile
qui save "`initdatafile'", replace
*-parallel cmd- changes the seed
local rng_state_init = "`c(seed)'"
tempname donor_mat_left
mat `donor_mat_left' = `donor_mat'
local reps_left = rowsof(`donor_mat')
*Rounds of parallel computation
while `reps_left'>0 {
*Determine reps for this round
local reps = `reps_left'
if "${max_rep_per_cl}"!=""{
local max_reps_per_round = ${max_rep_per_cl}*${numclusters}
if `reps_left'>`max_reps_per_round' {
local reps = `max_reps_per_round'
}
}
*Setup the matrices
if `reps'<${numclusters} {
forval i=1/`reps'{
mata: donor_mat`i' = st_matrix("`donor_mat_left'")[`i',1]
}
}
else {
local normal_reps = floor(`reps'/${numclusters})
forval i=1/${numclusters}{
local start_ind = (`i'-1)*`normal_reps'+1
local end_ind = `i'*`normal_reps'
if `i'== ${numclusters} {
local end_ind = `reps'
}
mata: donor_mat`i' = st_matrix("`donor_mat_left'")[`start_ind'..`end_ind',1]
}
}
if `reps'<`reps_left' {
mat `donor_mat_left' = `donor_mat_left'[`=`reps'+1'...,1]
}
local reps_left = `reps_left'-`reps'
*Setup for the main run
if `reps'<${numclusters} {
local oldnumcl = ${numclusters}
parallel_clean_setclusters `reps', noclean
}
forval i=1/${numclusters}{
*cap erase "`outfile'`i'" //post replaces
cap erase "`permweightsfile'`i'"
}
parallel, mata nodata keep: `cmd_base' , `cmd_options' donor_mat(donor_mat) ///
outfile("`outfile'") permweightsfile("`permweightsfile'") ///
logfile("${dir_base}/log/`cmd_base'_run${extra_f_suff}")
assert_msg r(pll_errs)==0
global pid `r(pll_id)'
*Aggregate the estimates
qui drop _all
forval i = 1/${numclusters} {
append using "`outfile'`i'"
}
*assert_msg _N>0
cap append using "`outfile'"
qui save "`outfile'", replace
if "`permweightsfile'"!= ""{
qui drop _all
forval i = 1/${numclusters} {
append using "`permweightsfile'`i'"
}
cap append using "`permweightsfile'"
qui save "`permweightsfile'", replace
}
*Erase temp files after the data appending so that if one process fails, can debug
forval i = 1/${numclusters} {
erase "`outfile'`i'"
erase "${dir_base}/log/`cmd_base'_run${extra_f_suff}`i'.log"
cap erase "`permweightsfile'`i'"
}
*Now restore the normal numclusters
if "`oldnumcl'"!="" {
parallel_clean_setclusters `oldnumcl', noclean
}
*With repeated calls was getting an error (select needs a vector in parallel_clean)
*Unless I take care of the temp files using -parallel clean-.
cap parallel clean, event(${pid}) force
* Got an "unlink(): 3621 attempt to write read-only file \n parallel_clean(): - function returned error
* So trying this
if _rc!=0{
closeallmatafiles
di as error "-parallel clean- gave an error, waiting and trying again"
sleep 1000
cap parallel clean, event(${pid}) force
}
}
set seed `rng_state_init'
qui use "`initdatafile'", clear
*di "Done with parallel_justout_helper"
end
|
*! v1.0 Brian Quistorff <bquistorff@gmail.com>
*! Post your own matrices to e(b) and e(V)
prog define post_eb_eV, eclass
version 8.0
args beta vari
eret post `beta' `vari'
eret loc cmd="post_eb_eV"
end
|
*! Version 1.1
*! A variant of _dots (simple progress bar) with time estimates
*! If you don't want to keep track of curr (e.g. in a foreach loop)
*! Then just pass in one parameter being the end.
/* To make deterministic output
s/^After .+/-normalized-/g
s/^(\.[^0-9]*)[0-9]+(s elapsed\.)/\1-normalized-\2/g
*/
program print_dots
version 12
args curr end
if `c(noisily)'==0 exit 0 //only have one timer going at at time.
local timernum 13
if "$PRINTDOTS_WIDTH"=="" local width 50
else local width = clip(${PRINTDOTS_WIDTH},1,50)
*See if passed in both
if "`end'"==""{
local end `curr'
if "$PRINTDOTS_CURR"=="" global PRINTDOTS_CURR 0
global PRINTDOTS_CURR = $PRINTDOTS_CURR+1
local curr $PRINTDOTS_CURR
}
if `curr'==1 {
timer off `timernum'
timer clear `timernum'
timer on `timernum'
exit 0
}
local start_point = min(5, `end')
if `curr'<`start_point' {
timer off `timernum'
qui timer list `timernum'
local used `r(t`timernum')'
timer on `timernum'
if `used'>60 {
local remaining = `used'*(`end'/`curr'-1)
format_time `= round(`remaining')', local(remaining_toprint)
format_time `= round(`used')', local(used_toprint)
display "After `=`curr'-1': `used_toprint' elapsed, `remaining_toprint' est. remaining"
}
exit 0
}
if `curr'==`start_point' {
timer off `timernum'
qui timer list `timernum'
local used `r(t`timernum')'
timer on `timernum'
local remaining = `used'*(`end'/`curr'-1)
format_time `= round(`remaining')', local(remaining_toprint)
format_time `= round(`used')', local(used_toprint)
display "After `=`curr'-1': `used_toprint' elapsed, `remaining_toprint' est. remaining"
if `end'<`width'{
di "|" _column(`end') "|" _continue
}
else{
local full_header "----+--- 1 ---+--- 2 ---+--- 3 ---+--- 4 ---+--- 5"
local header = substr("`full_header'",1,`width')
di "`header'" _continue
}
di " Total: `end'"
forval i=1/`start_point'{
di "." _continue
}
exit 0
}
if (mod(`curr', `width')==0 | `curr'==`end'){
timer off `timernum'
qui timer list `timernum'
local used `r(t`timernum')'
format_time `= round(`used')', local(used_toprint)
if `end'>`curr'{
timer on `timernum'
local remaining = `used'*(`end'/`curr'-1)
format_time `= round(`remaining')', local(remaining_toprint)
display ". `used_toprint' elapsed. `remaining_toprint' remaining"
}
else{
di "| `used_toprint' elapsed. "
}
}
else{
di "." _continue
}
end
program format_time
syntax anything(name=time), local(string)
local suff "s"
if `time'>100{
local time=`time'/60
local suff "m"
if `time'>100{
local time = `time'/60
local suff "h"
if `time'>36{
local time = `time'/24
local suff "d"
}
}
}
local str =string(`time', "%9.2f")
c_local `local' `str'`suff'
end
|
*! version 1.0 Brian Quistorff <bquistorff@gmail.com>
*! Stata bindings for R's ranger package for random forest. Fits and generate predictions (either in standard or out-of-bag)
*! ranger varlist(fv) [if] [pw/], [predict(string) predict_oob(string) num_trees(int 500)]
* predict_oob will do out-of-bag for estimation sample (if)
* predict will do predictions for whole sample
program ranger, eclass
version 12.0 //guess
*TODO: Allow different sample for predict then fit
*TODO: Could additionally make predictions for obs missing value in variables specified in estimation, but not used any trees in the forest. (though ensure that ranger doesn't error out for that.)
syntax varlist(fv) [if/] [aw pw/], [predict(string) predict_oob(string) num_trees(int 500) respect_unordered_factors(string) seed(string) debug importance(string)]
if "`respect_unordered_factors'"=="" loc respect_unordered_factors "order"
if "`importance'"!="" {
loc importance_opt `", importance="`importance'" "'
loc importance_code `"var_imp = t(as.matrix(rf_fit[["variable.importance"]]));"'
}
if "`seed'"=="" loc seed "NULL"
if "`c(mode)'"=="batch" & "`c(os)'"=="Windows" {
loc shell shell(bshell cmd /c)
}
tempvar id_varname
gettoken outcome Xs : varlist
*Main difference with regression is need to work around fv
foreach token of local Xs {
loc v = subinstr("`token'","i.","",1)
if "`v'"!="`token'" loc as_f `as_f' `v'
loc ctrl_vars `ctrl_vars' `v'
}
loc est_vars `outcome' `ctrl_vars' `exp'
gen long `id_varname'=_n
preserve
if "`if'"!="" {
tempvar if_est
gen byte `if_est' = `if'
loc est_logic `"& df[["`if_est'"]]"'
}
keep `id_varname' `est_vars' `if_est'
if "`exp'"!="" {
loc w_opt `", case.weights=df[cc `est_logic',"`exp'"]"'
loc weight_str "[`weight'=`exp']"
}
if "`predict'`predict_oob'"!="" {
tempfile pred_file
loc pred_file = subinstr("`pred_file'", "\", "/", .)
loc pred_code `"df_save = df[cc,c("`id_varname'"), drop=FALSE]; "'
loc pred_vars ", df_save"
loc pred_code2 `"save.dta13(df_save, "`pred_file'");"'
}
if "`predict'"!="" {
loc pred_code `"`pred_code' df_save[["`predict'"]]=predict(rf_fit, df[cc,])[["predictions"]]; "'
}
if "`predict_oob'"!="" {
loc pred_code `"`pred_code' df_est[["`predict_oob'"]]=rf_fit[["predictions"]]; df_save=merge(df_save, df_est[,c("`id_varname'", "`predict_oob'")], all=TRUE); "'
}
*rcall_check rpart>=4.0, rversion(3.5.0)
*save _st.data.dta, replace
rcall vanilla `shell' `debug': suppressWarnings(suppressPackageStartupMessages(library(ranger))); df <- st.data(); for(vname in strsplit("`as_f'", " ")[[1]]){ if(!is.factor(df[[vname]])) df[[vname]] = as.factor(df[[vname]]);}; cc = complete.cases(df); df_est=df[cc `est_logic',]; form=as.formula(paste0("`outcome' ~", gsub(" ", " + ", "`ctrl_vars'"))); rf_fit=ranger(form, df_est, num.trees=`num_trees', respect.unordered.factors="`respect_unordered_factors'", seed=`seed' `w_opt' `importance_opt'); `pred_code' `pred_code2' `importance_code' rm(cc, df, rf_fit, form, df_est `pred_vars');
if "`importance'"!="" {
tempname vi
mat `vi' = r(var_imp)
ereturn post `vi'
ereturn local depvar = "`outcome'"
ereturn local cmd = "ranger"
}
restore
if "`predict'`predict_oob'"!="" {
qui merge 1:1 `id_varname' using `pred_file', keep(master match) nogenerate
sort `id_varname'
}
if "`importance'"!="" & "`predict'"!="" {
tempvar n_miss
egen `n_miss' = rowmiss(`est_vars')
*loc if_standalone = cond("`if'"=="", "", "if `if'")
loc if_part = cond("`if'"=="", "", "`if' &")
qui count if `if_part' `n_miss'==0
loc e_N = r(N)
ereturn scalar N = `e_N'
qui corr `outcome' `predict' if `if_part' `n_miss'==0 `weight_str'
ereturn scalar r2 = r(rho)*r(rho)
drop `n_miss'
}
drop `id_varname'
end
|
*! version 1.0 Brian Quistorff <bquistorff@gmail.com>
*! clearing will reset all macros but S_ADO (adopath)
program reset_ADO
version 12
global S_ADO `"BASE;SITE;.;PERSONAL;PLUS;OLDPLACE"'
end
|
*! version 1.1 Brian Quistorff <bquistorff@gmail.com>
*! A simple way to advance the Stata RNG state by a fixed amount
*! Will also fill a variable with a sequence of such states
*! (first obs gets current state, user capture state after function for continuing).
program rng_advance
version 12
syntax anything [, var(string) amount(string)]
if "`anything'"=="replace" rng_advance_replace, var(`var')
if "`anything'"=="step" rng_advance_step, amount(`amount')
end
mata:
void m_rng_advance_replace(string scalar varn){
N= st_nobs()
seed_nums = runiform(N,1)*c("maxlong")
seed_sts = J(N,1,"")
final_rng_st = c("seed")
for(i=1; i<=N; i++){
rseed(seed_nums[i,1])
seed_sts[i,1] = rseed()
}
st_sstore(.,varn,seed_sts)
rseed(final_rng_st)
}
end
program rng_advance_replace
version 12
syntax , var(string)
replace `var'="`c(seed)'" if _n==1 //just to widen the string.
mata: m_rng_advance_replace("`var'")
end
* Deprecated. Takes time and requires user to know
* the amount to advance by for each loop (which may not be known ahead of time)
program rng_advance_replace_inline
version 12
syntax , var(string) amount(int)
forvalues i=1/`=_N' {
replace `var'="`c(seed)'" in `i'
rng_advance_step, amount(`amount')
}
end
program rng_advance_step
version 12
syntax , amount(int)
tempname tmp_mat
while `amount'>`c(matsize)' {
*mata's runiform is way faster than matuniform
qui mata: runiform(`c(matsize)',1)
local amount = `amount' - `c(matsize)'
}
qui mata: runiform(`amount',1)
end
|
program run_pywin32_hidden
* Deal with weirdness where if cmd sees something with
loc 0 = strtrim(`"`0'"')
loc 0_len = strlen(`"`0'"')
loc 0_rest = substr(`"`0'"', 9, `=`0_len'-9')
if substr(`"`0'"', 1, 8)==`"cmd /c ""' & substr(`"`0'"', `0_len', 1)==`"""' & strpos(`"`0_rest'"',`"""')>0 {
loc 0 `"cmd /c ""`0_rest'"""'
}
qui findfile run_pywin32_hidden.py
python script "`r(fn)'", args(`"`0'"')
end
|
*! version 0.0.8 Brian Quistorff <bquistorff@gmail.com>
*! Can try to save in either dataset version 114 or 115 which
*! is readable by Stata v11-13
* version map: Stata (dataset): v11 (114) v12(115) v13 (117) v14 (118).
* Stata can always read earlier dataset formats. Additionally, v11
* can read v115 datasets as the 114 and 115 are almost the same (just business dates).
program save12
syntax anything [, replace datasig compress]
if "`compress'"!="" compress
if "`datasig'"!="" {
datasig set, reset
*remove dates so dta file is the same across runs (normalized)
* but this does break -datasig report- though not -datasig confirm-
char _dta[datasignature_dt]
}
cap unab temp: _*
if `:list sizeof temp'>0 di "Warning: Saving with temporary (_*) vars"
if `c(stata_version)'>=13{
if `c(stata_version)'>=15 di "save12 untested for Stata v>=15"
if `c(stata_version)'>=14 local v_opt "version(12)"
saveold `anything', `replace' `v_opt'
}
else {
if `c(stata_version)'<11 di "save12 untested for Stata v<11"
save `anything', `replace'
}
end
|
*! version 0.1 Brian Quistorff <bquistorff@gmail.com>
*! converts a file using save12. Can prefix a command (that produces it).
program save12_convert
if regexm(`"`0'"',":") {
gettoken 0 colon_command : 0, parse(":")
gettoken colon command : colon_command, parse(":")
`command'
}
syntax anything [, replace *]
preserve
use `anything', clear
save12 `anything', replace `options'
restore
end
|
*! version 0.0.1 Brian Quistorff <bquistorff@gmail.com>
*! Some helper utilities when saving so that common saving tasks can be on one line
*! Also warns if saving tempvars. Be careful with these. If you open a dta file with
*! tempvars and Stata's internal temp counter is different (e.g. open in a fresh session) there may be problems.
*! Requires: -saver-
* For example:
*//Session1
* sysuse auto, clear
* tempvar t
* gen `t' = 1
* save temp.dta, replae
* //session2
* use temp.dta, clear
* recode foreign (1=0)
program saver2
syntax anything(name=filename) [, noDATAsig noCOMPress *]
local filename `filename' //remove quotes if any
if "`compress'"!="nocompress" compress
if "`datasig'"!="nodatasig" datasig set, reset
cap unab temp: _*
if `:list sizeof temp'>0 di "Warning: Saving with temporary (_*) vars"
saver "`filename'", `options'
end |
*! Version 1.0 Brian Quistorff <bquistorff@gmail.com>
*! Saves the output form a shell command
*TODO: Anyway to fix-up line wrapping?
program save_cmd_output
version 12
syntax, outfile(string) command(string)
*First get the raw log output
*Set linesize to max so little wrapping
local orig_linesize = `c(linesize)'
local orig_trace "`c(trace)'"
set linesize 255 //max
set trace off
tempfile firstout
*Can't quiet the below line
log using "`firstout'", replace text name(profile)
`command'
log close profile
set linesize `orig_linesize'
set trace `orig_trace'
*Now strip the bad top and bottom
tempname infh outfh
file open `infh' using "`firstout'", read text
qui file open `outfh' using "`outfile'", write text replace
*Skip first five lines
forval i=1/5{
file read `infh' line
}
file read `infh' line
while r(eof)==0 {
if `"`line'"'==" name: profile"{
continue, break
}
file write `outfh' "`line'" _n
file read `infh' line
}
file close `infh'
end
|
*! v1.1 bquistorff@gmail.com
*! saving text versions of title & notes (including wrapping for gph output) as well as the gph_file.
program save_fig
version 12.0 //just a guess
*Strip off and deal with my suboptions
gettoken 0 remainder : 0, parse(":")
syntax , gph_file(string) [title_file(string) caption_file(string) caption_tex_file(string) width(string)]
gettoken colon 0 : remainder, parse(":")
/* If had to load from already written file (but then can' unwrap caption well)
tempname toexport
graph use "`gph_file'", name(`toexport')
qui graph describe
local 0 `"`r(command)'"'
* Remember to -drop drop `toexport'-
*/
syntax anything(equalok everything name=gph_cmd) [, title(string) note(string asis) *]
tempname fhandle
if "`title_file'"!="" & length(`"`title'"')>0{
file open `fhandle' using "`title_file'", write text replace
file write `fhandle' "`title'"
file close `fhandle'
}
if "`caption_tex_file'"!="" & "`caption_file'"=="" tempfile caption_file
if "`caption_file'"!="" & length(`"`note'"')>0{
file open `fhandle' using "`caption_file'", write text replace
if substr(`"`note'"',1,1)==`"""' | substr(`"`note'"',1,2)==`" ""'{
local w_count : list sizeof note
if `w_count'==1 file write `fhandle' `note'
else{
forval i=1/`w_count'{
local line : word `i' of `note'
if `i'>1 file write `fhandle' _n
file write `fhandle' "`line'"
}
}
}
else{
file write `fhandle' `"`note'"'
}
file close `fhandle'
if "`caption_tex_file'"!="" escape_latex_file, txt_infile("`caption_file'") tex_outfile("`caption_tex_file'")
}
if "`width'"!=""{
cap which wrap_text
if _rc==0 wrap_text , unwrappedtext(`note') wrapped_out_loc(note) width(`width')
}
`gph_cmd', `options' title("`title'") note(`note') saving("`gph_file'", replace)
end
|
*! v1.2 Brian Quistorff <bquistorff@gmail.com>
*! Does tasks you normally want to do when you establish a key
program set_key
version 11 //guess
syntax varlist [, sort order xtset tsset]
isid `varlist'
char _dta[key] `varlist'
if "`xtset'"!="" xtset `varlist'
if "`tsset'"!="" tsset `varlist'
if "`sort'"!="" sort `varlist'
if "`order'"!="" order `varlist', first
end
|
*! Version 1.2 Brian Quistorff <bquistorff@gmail.com>
*! Latex output of summary stats
**************************************
*Editted slightly by Brian Quistorff 11-09-2013
* -allows filenames with spaces
* -allows dropping the \begin{table} \end{table} lines (better for customization)
*This is sutex.ado beta version
*04 Sep 2001
*Questions, comments and bug reports :
*terracol@univ-paris1.fr
*************************************
prog define sutex_env, byable(recall,noheader)
version 7.0
syntax [varlist] [if] [in] [aweight fweight], [DIGits(integer 3)] [LABels] [PAR] [NOBS] [MINmax] [NA(string)] [TITle(string)] [KEY(string)] [PLacement(string)] [LONGtable] [NOCHECK] [NOTABLEENV] [FILE(string)] [APPEND] [REPLAce]
********************
* Verifying syntax
********************
capture confirm variable `varlist'
if _rc==7 {
di as error "no variables found" exit
}
if "`file'"=="" & ("`append'"!="" | "`replace'"!="") {
di as error "append and replace are only usable in conjonction with file, options ignored"
}
if `digits'<0 | `digits' >20 {
di as error "DIGits must be between 0 and 20"
exit
}
tempvar touse
mark `touse' `if' `in'
if _by() {
qui replace `touse'=0 if `_byindex'!=_byindex()
}
tempname fich
**********************
*setting file extension
**********************
if "`file'"!="" {
tokenize "`file'", parse(.)
if "`3'"=="" {
local file="`1'.tex"
}
}
if _byindex()>1 {
local replace=""
local append="append"
}
if "`file'"=="" {
local type="di "
}
if "`file'"!=""{
local type="file write `fich'"
local nline="_n"
}
if "`file'"!="" {
file open `fich' using "`file'" ,write `append' `replace' text
}
************************
*Table heads
************************
local nm_vr="Variable"
local nm_me="Mean"
local nm_sd="Std. Dev."
local nm_mn="Min."
local nm_mx="Max."
local headlong="... table \thetable{} continued"
local footlong="Continued on next page..."
*********************************
if "`placement'"=="" {
local placement="htbp"
}
if _by() {
local by=_byindex()
}
local z="`na'"
if "`na'"!="" {
local na2="na(`z')"
}
if "`varlist'"=="" {
local varlist =unav _all
}
if "`title'"=="" {
local title="Summary statistics"
}
if "`key'"=="" {
local key="sumstat"
}
local title="`title' `by'"
if _by()!=0 {
local key="`key'`by'"
}
*************************
*checking number of obs
************************
local v=2
tokenize "`varlist'"
qui su `1'
local q1=r(N)
mac shift
while "`1'" !="" {
qui su `1'
local q`v'=r(N)
if `q`v''!=`q1' {local nobs="nobs"}
local v=`v'+1
mac shift
}
***********************
* Number of digits
***********************
local nbdec="0."
local i=1
while `i'<=`digits'-1 {
local nbdec="`nbdec'0"
local i=`i'+1
}
if `digits'==0 {
local nbdec="1"
}
if `digits'>0 {
local nbdec="`nbdec'1"
}
********************
* setting columns
********************
if "`minmax'"!="" {
local a1=" c c"
}
if "`minmax'"!="" {
local a2=" & \textbf{`nm_mn'} & \textbf{`nm_mx'}"
}
local a3=2
if "`minmax'"!="" {
local a3=`a3'+2
}
if "`nobs'"!="" {
local a3=`a3'+1
}
local a6=`a3'+1
if "`nobs'"!="" {
local a4=" c"
}
if "`nobs'"!="" {
local a5=" & \textbf{N}"
}
if "`par'"!="" {
local op="("
}
if "`par'"!="" {
local fp=")"
}
if "`file'"=="" {
`type' "%------- Begin LaTeX code -------%"_newline
}
if "`file'"!="" {
`type' ""_n
}
******************
* "regular" table
******************
if "`longtable'"==""{
if "`notableenv'"=="" {
`type' "\begin{table}[`placement']\centering \caption{`title'\label{`key'}}"`nline'
}
`type' "\begin{tabular}{l c c `a1' `a4'}\hline\hline"`nline'
`type' "\multicolumn{1}{c}{\textbf{`nm_vr'}} & \textbf{`nm_me'}" _newline " & \textbf{`op'`nm_sd'`fp'}`a2' `a5'\\\ \hline"`nline'
}
*******************
*longtable
******************
if "`longtable'"!="" {
`type' `nline'"\begin{center}"_newline "\begin{longtable}{l c c `a1' `a4'}"`nline'
`type' "\caption{`title'\label{`key'}}\\\"_newline"\hline\hline\multicolumn{1}{c}{\textbf{`nm_vr'}}"_newline" &\textbf{`nm_me'}"_newline" & \textbf{`op'`nm_sd'`fp'}`a2' `a5' \\\ \hline"`nline'
`type' "\endfirsthead"`nline'
`type' "\multicolumn{`a6'}{l}{\emph{`headlong'}}"_newline"\\\ \hline\hline\multicolumn{1}{c}{\textbf{`nm_vr'}}"_newline" & \textbf{`nm_me'}"_newline" & \textbf{`op'`nm_sd'`fp'}`a2' `a5' \\\ \hline"`nline'
`type' "\endhead"`nline'
`type' "\hline"`nline'
`type' "\multicolumn{`a6'}{r}{\emph{`footlong'}}\\\"`nline'
`type' "\endfoot"`nline'
`type' "\endlastfoot"`nline'
}
tokenize "`varlist'"
local l=0
while "`1'" !="" {
local l=`l'+1
mac shift
}
local i=1
while `i'<=`l' {
if "`par'"!="" {
local op="("
}
if "`par'"!="" {
local fp=")"
}
tokenize "`varlist'"
local nom="``i''"
qui su `nom' if `touse' [`weight' `exp']
if "`labels'"!="" {
local lab : variable label ``i''
if "`lab'"!="" {
local nom="\`lab'"
}
}
***************************
*LaTeX special characters
**************************
if "`nocheck'"=="" {
latres ,name(`nom')
local nom="$nom"
}
****************************
local mean=round(r(mean), `nbdec')
local sd=round(sqrt(r(Var)), `nbdec')
if substr("`mean'",1,1)=="." {
local mean="0`mean'"
}
if substr("`mean'",1,2)=="-." {
local pb=substr("`mean'",3,.)
local mean="-0.`pb'"
}
if substr("`sd'",1,1)=="." {
local sd="0`sd'"
}
parse "`mean'", parse(.)
local mean="$_1"+"$_2"+substr("$_3",1,`digits')
parse "`sd'", parse(.)
local sd="$_1"+"$_2"+substr("$_3",1,`digits')
local N`i'=r(N)
if `N`i''==0 {
local mean="`na'"
local sd="`na'"
local op=""
local fp=""
}
if `N`i''==1 {
local sd="`na'"
local op=""
local fp=""
}
local min=round( r(min), `nbdec')
if substr("`min'",1,1)=="." {
local min="0`min'"
}
if substr("`min'",1,2)=="-." {
local pb=substr("`min'",3,.)
local min="-0.`pb'"
}
parse "`min'", parse(.)
local min="$_1"+"$_2"+substr("$_3",1,`digits')
local max=round( r(max), `nbdec')
if substr("`max'",1,1)=="." {
local max="0`max'"
}
if substr("`max'",1,2)=="-." {
local pb=substr("`max'",3,.)
local max="-0.`pb'"
}
parse "`max'", parse(.)
local max="$_1"+"$_2"+substr("$_3",1,`digits')
if `N`i''==0 {
local min="`na'"
local max="`na'"
}
if "`minmax'"!="" {
local extr="& `min' & `max'"
}
if "`nobs'"!="" {
local taille=" & `N`i''"
}
local ligne="\`nom' & `mean' & `op'`sd'`fp' `extr' `taille'"
**************************
* Displaying table lines
**************************
`type' "`ligne'\\\"`nline'
local i=`i'+1
}
if "`nobs'"!="" {
`type' "\hline"
}
local N=r(N)
if "`nobs'"=="" {
`type' "\multicolumn{1}{c}{N} & \multicolumn{`a3'}{c}{`N'}\""\\" " \hline"
}
if "`longtable'"==""{
`type' "\end{tabular}"
if "`notableenv'"==""{
`type' _newline "\end{table}"
}
}
if "`longtable'"!=""{
`type' "\end{longtable}"_newline "\end{center}" }
if "`file'"!="" {
`type' ""_n
}
if "`file'"=="" {
`type' "%------- End LaTeX code -------%"`nline'
}
if "`file'"!="" {
file close `fich'
}
macro drop ligne*
macro drop nom
if "`file'"!="" {
di `"file {view "`file'"} saved"'
}
end
***************************************************
*LaTeX special characters search and replace routine
***************************************************
cap prog drop latres
program define latres
version 7.0
syntax ,name(string) [sortie(string) nom]
if "`sortie'"=="" {
local sortie="nom"
}
local cr1="_"
local crc1="\_"
local cr2="\"
local crc2="$\backslash$ "
local cr3="$"
local crc3="\symbol{36}"
local cr4="{"
local crc4="\{"
local cr5="}"
local crc5="\}"
local cr6="%"
local crc6="\%"
local cr7="#"
local crc7="\#"
local cr8="&"
local crc8="\&"
local cr9="~"
local crc9="\~{}"
local cr10="^"
local crc10="\^{}"
local cr11="<"
local crc11="$<$ "
local cr12=">"
local crc12="$>$ "
local nom="`name'"
local t=length("`nom'")
local rg=1
local mot2=""
while `rg'<=`t' {
local let`rg'=substr("`nom'",`rg',1)
local num=1
while `num'<=12 {
if "`let`rg''"=="`cr`num''" {
local let`rg'="`crc`num''"
}
local num=`num'+1
}
if "`let`rg''"=="" {
local mot2="`mot2'"+" "
}
else if "`let`rg''"!="" {
local mot2="`mot2'"+"`let`rg''"
}
local rg=`rg'+1
}
global `sortie'="`mot2'"
end
|
*! version 0.0.7 Jens Hainmueller 01/26/2014
*! version 0.0.7-bq Brian Quistorff 2014-02
* -output the unrounded weights plus some other convenience vars.
* -Don't leave mats lying around
* -make a bit faster by speeding up averaging, removing some checks, and not calling tsset redudantly
* -allow for spread optimization
* -Fix problem where if all donors with positive weights have same value for predictor that it would error
* To do:
* -If unitsnames included then it loops through all levels of pvar. make that faster.
* -Allow option for computing spread opt first (with reg V as fall-back)
* -Could also resolve indeterminancy by assigning weight to fewest units.
* Thought I could just set H*=-1, but opt notes that "choldc failed" so seems like it's not convex
*
* Note: Confusingly, this code uses Z for pre-treatment dependent var and Y for all dependent vars
* while the 2010 JASA paper uses Z for non-outcome predictors.
* Note: normalize just scales (linearly, not affine) so that std dev is 0.
* If you do spread, you probably should put all y in predictors
program synth2 , eclass
version 9.2
preserve
/* check if data is tsset with panel and time var */
tsset, noquery
local tvar `r(timevar)'
local pvar "`r(panelvar)'"
_assert "`tvar'"!="", msg("panel unit variable missing please use -tsset panelvar timevar-") rc(198)
_assert "`pvar'"!= "", msg("panel time variable missing please use -tsset panelvar timevar-") rc(198)
/* obtain settings */
syntax anything , ///
TRUnit(numlist min=1 max=1 int sort) ///
TRPeriod(numlist min=1 max=1 int sort) ///
[ COUnit(numlist min=2 int sort) ///
counit_ind(varlist max=1 string) ///
xperiod(numlist min=1 >=0 int sort) ///
mspeperiod(numlist min=1 >=0 int sort) ///
resultsperiod(numlist min=1 >=0 int sort) ///
unitnames(varlist max=1 string) ///
FIGure ///
Keep(string) ///
REPlace ///
customV(numlist) ///
margin(real 0.005) ///
maxiter(integer 1000) ///
sigf(integer 12) ///
bound(integer 10) ///
nested ///
allopt ///
skipchecks ///
spread spread_limit(real 0) ///
* ///
]
/* Define Tempvars and speperate Dvar and Predcitors */
tempvar subsample
tempname Xco Xcotemp Xtr Xtrtemp Zco Ztr Yco Ytr Yco_post Ytr_post Xco_nodep Xtr_nodep Xtr_norm Xco_norm
*From pr_loqo.h
local OPTIMAL_SOLUTION 1
local CHOLDC_FAILED 20
/* Check User Inputs ************************* */
qui levelsof `pvar',local(levp)
_assert `: list trunit in levp' != 0, msg("treated unit not found in panelvar - check tr()") rc(198)
/* if the panel vars has labels grab it */
local clab: value label `pvar'
/* if unitname specified, grab the label here */
if "`unitnames'" != "" {
capture confirm string var `unitnames'
_assert !_rc, msg("`unitnames' does not exist as a (string) variable in dataset") rc(198)
/* check if it has a value for all units */
tempvar pcheck1 pcheck2
qui egen `pcheck1' = max(`pvar') , by(`unitnames')
qui egen `pcheck2' = min(`pvar') , by(`unitnames')
qui count if `pcheck1'!=`pcheck2'
_assert r(N)==0, msg("`unitnames' varies within units of `pvar' - revise unitnames variable ") rc(198)
local clab "`pvar'"
tempvar index
gen `index' = _n
/* now label the pvar accoringly */
foreach i in `levp' {
qui su `index' if `pvar' == `i', meanonly
local label = `unitnames'[`r(max)']
local value = `pvar'[`r(max)']
qui label define `clab' `value' `"`label'"', modify
}
label value `pvar' `clab'
}
if "`clab'" != "" {
local tlab: label `clab' `trunit' , strict
}
/* Produce initial output **************************** */
di as txt "{hline}" _newline as res "Synthetic Control Method for Comparative Case Studies"
di as txt "{hline}" _newline(2) as res "First Step: Data Setup" _newline as txt "{hline}"
/* Build pre-treatment period */
qui levelsof `tvar', local(levt)
loc checkinput: list trperiod in levt
_assert `checkinput'!=0, msg("period of treatment is not found in timevar - check trperiod()") rc(198)
/* by default minmum of time var up to intervention (exclusive) is pre-treatment period */
qui levelsof `tvar' if `tvar' < `trperiod' , local(preperiod)
qui levelsof `tvar' if `tvar' >=`trperiod' , local(postperiod)
/* now if not supplied fill in xperiod (time period over which all predictors are averaged) */
if "`xperiod'" == "" {
numlist "`preperiod'" , min(1) integer sort
local xperiod "`r(numlist)'"
}
else {
loc checkinput: list xperiod in levt
_assert `checkinput'!=0, msg("at least one time period specified in xperiod() not found in timevar") rc(198)
}
/* now if not supplied fill in mspeperiod (time period over which all loss is minimized are averaged) */
if "`mspeperiod'" == "" {
numlist "`preperiod'" , min(1) integer sort
local mspeperiod "`r(numlist)'"
}
else {
_assert `: list mspeperiod in levt'!=0, msg("at least one time period specified in mspeperiod() not found in timevar") rc(198)
}
if "`resultsperiod'" == "" {
numlist "`levt'" , min(1) integer sort
local resultsperiod "`r(numlist)'"
local mspeperiod_post : list postperiod & resultsperiod
}
else {
_assert `: list resultsperiod in levt'!=0, msg("at least one time period specified in resultsperiod() not found in timevar") rc(198)
local mspeperiod_post "`postperiod'"
}
/* get depvars */
_assert "`anything'"!="", msg("not a single variable specified. please supply at least a response variable") rc(198)
gettoken dvar anything: anything
capture confirm numeric var `dvar'
_assert !_rc, msg("`dvar' does not exist as a (numeric) variable in dataset") rc(198)
/* Get treated dep matrices ************************************************* */
agdvar `Ztr' , cvar(`dvar') timeno(`mspeperiod') unitno(`trunit') sub(`subsample') ///
tlabel("pre-intervention MSPE period - check mspeperiod()") ///
ulabel("treated unit") trorco("treated") pvar(`pvar') tvar(`tvar') `skipchecks'
agdvar `Ytr' , cvar(`dvar') timeno(`resultsperiod') unitno(`trunit') sub(`subsample') ///
tlabel("results period - check resultsperiod()") ///
ulabel("treated unit") trorco("treated") pvar(`pvar') tvar(`tvar') `skipchecks'
agdvar `Ytr_post' , cvar(`dvar') timeno(`mspeperiod_post') unitno(`trunit') sub(`subsample') ///
tlabel("post period - check resultsperiod()") ///
ulabel("treated unit") trorco("treated") pvar(`pvar') tvar(`tvar') `skipchecks'
local displ_amount 2 //2=FLOOD, 1=STATUS, 0=QUIET
*Get control unit markers
_assert "`counit'"=="" | "`counit_ind'"=="", msg("Can't specify both counit_ind and counit") rc(198)
if "`counit_ind'"==""{
tempvar counit_ind counit_ind_orig
if "`counit'"==""{
local counit : subinstr local levp "`trunit'" " ", all word
gen byte `counit_ind' = (`pvar'!=`trunit')
}
else{
_assert `: list counit in levp'!=0, msg("at least one control unit not found in panelvar - check co()") rc(198)
qui gen byte `counit_ind' = 0
foreach cux of numlist `counit' {
qui replace `counit_ind'=1 if `pvar'==`cux'
}
}
gen byte `counit_ind_orig' = `counit_ind'
}
else{
local counit_ind_orig = `counit_ind'
tempvar counit_ind
gen byte `counit_ind' = `counit_ind_orig'
qui levelsof `pvar' if `counit_ind',local(counit)
}
local counit_orig `counit'
_assert `: list trunit in counit'!=1, msg("treated unit appears among control units - check co() and tr()") rc(198)
local predictors_orig `anything'
local predictors `predictors_orig'
tempname mat_0
tempvar counitno wsolout wsolout_unr
while(1){ //optimize passes
* Construct info for control units and predictors and then optimize
* If optimization needs to remove some, redo the pass
if "`clab'" != "" {
local colabels ""
foreach i in `counit' {
local label : label `clab' `i'
local colabels `"`colabels', `label'"'
}
local colabels : list clean colabels
local colabels : subinstr local colabels "," ""
local colabels : list clean colabels
}
/* Create X matrices */
local trno : list sizeof trunit
local cono : list sizeof counit
/* for now we assume that the user used blanks only to seperate variables */
/* thus we have p predictors */
/* *************************** */
/* begin variable construction */
cap mat drop `Xtr' `Xco' `Xco_nodep' `Xtr_nodep'
local predictors_left `predictors'
local predictors_used ""
local predictor_num = 0
local customV_used ""
while "`predictors_left'" != "" {
gettoken p predictors_left: predictors_left , bind
local predictor_num = `predictor_num'+1
/* check if there is a paranthesis in token */
local whereq = strpos("`p'", "(")
if `whereq' == 0 { /* just a varname */
capture confirm numeric var `p'
_assert !_rc, msg("`p' does not exist as a (numeric) variable in dataset") rc(198)
local var "`p'"
local xtime_orig ""
local xtime "`xperiod'"
/* set empty label for regular time period */
local xtimelab ""
}
else { /* token is varname plus time, so try to disentagngle the two */
/* get var */
local var = substr("`p'",1,`whereq'-1)
qui capture confirm numeric var `var'
_assert !_rc, msg("`var' does not exist as a (numeric) variable in dataset") rc(198)
/* get time token */
local xtime = substr("`p'",`whereq'+1,.)
local xtime_orig = "(`xtime'"
/* save time token to use for label */
local xtimelab `xtime'
local xtimelab : subinstr local xtimelab " " "", all
/* now check wheter this is a second paranthsis */
local wherep = strpos("`xtime'", "(")
/* if no, delete a potential & and done */
if `wherep' == 0 {
local xtime : subinstr local xtime "&" " ", all
local xtime : subinstr local xtime ")" " ", all
} /* if yes, this is a numlist so we remove both paranthesis, but put the first one back in */
else {
local xtime : subinstr local xtime ")" " ", all
local xtime : subinstr local xtime " " ")"
}
numlist "`xtime'" , min(1) integer sort
local xtime "`r(numlist)'"
_assert (`: list xtime in levt'!=0), msg("for predictor `var' some specified periods are not found in panel timevar") rc(198)
}
/* now go an do averaging over xtime period for variable var */
local num_xtime : word count `xtime'
/* Controls *************************** */
if `num_xtime'==1 {
*If no variation in donor then skip
summ `var' if `counit_ind' & `tvar'==`xtime', meanonly
if r(min)==r(max){
continue, break
}
mkmat `var' if `counit_ind' & `tvar'==`xtime', matrix(`Xcotemp') rownames(`pvar')
mkmat `var' if `pvar'==`trunit' & `tvar'==`xtime', matrix(`Xtrtemp') rownames(`pvar')
}
else {
/* Define Subsample (just control units and periods from xtime() ) */
qui reducesample , tno("`xtime'") genname(`subsample') tvar(`tvar') pvar(`pvar') u_ind(`counit_ind')
if "`skipchecks'"==""{
missingchecker , tno("`xtime'") cvar("`var'") sub("`subsample'") ///
ulabel("control units") checkno(`cono') tilab("`xtimelab'") tvar(`tvar')
}
cap noisily agmat `Xcotemp' , cvar(`var') sub(`subsample') ulabel("control units") ///
checkno(`cono') tilab("`xtimelab'") pvar(`pvar') stopifsame
if _rc==2{
qui drop `subsample'
continue, break
}
qui drop `subsample'
/* Now treated ***************************** */
/* Define subsample just treated unit and xtime() periods */
qui reducesample , tno("`xtime'") uno("`trunit'") genname(`subsample') tvar(`tvar') pvar(`pvar')
if "`skipchecks'"==""{
missingchecker , tno("`xtime'") cvar("`var'") sub("`subsample'") ///
ulabel("treated unit") checkno(`trno') tilab("`xtimelab'") tvar(`tvar')
}
agmat `Xtrtemp' , cvar(`var') sub(`subsample') ulabel("treated unit") checkno(`trno') tilab("`xtimelab'") pvar(`pvar')
qui drop `subsample'
}
local predictors_used "`predictors_used' `var'`xtime_orig'"
/* finally name matrices and done */
if "`xtimelab'" == "" {
mat coln `Xcotemp' = "`var'"
mat coln `Xtrtemp' = "`var'"
}
else {
mat coln `Xcotemp' = "`var'(`xtimelab'"
mat coln `Xtrtemp' = "`var'(`xtimelab'"
}
mat `Xtr' = nullmat(`Xtr'),`Xtrtemp'
mat `Xco' = nullmat(`Xco'),`Xcotemp'
if "`var'"!="`dvar'"{
mat `Xtr_nodep' = nullmat(`Xtr'),`Xtrtemp'
mat `Xco_nodep' = nullmat(`Xco'),`Xcotemp'
}
if "`customV'"!=""{
local this_weight : word `predictor_num' of `customV'
local customV_used "`customV_used' `this_weight'"
}
} /* close while loop through predictor string, varibale construction is done */
local predictors_dropped : list predictors - predictors_used
if "`predictors_dropped'"!="" di "Dropping predictors with no donor variation: `predictors_dropped'"
local predictors `predictors_used' //for next time
/* Get control dep matrix for controls ************************************************* */
agdvar `Yco' , cvar(`dvar') timeno(`resultsperiod') unitno(`counit') sub(`subsample') ///
tlabel("results period - check resultsperiod()") ///
ulabel("control units") trorco("control") pvar(`pvar') tvar(`tvar') unit_ind(`counit_ind') `skipchecks'
agdvar `Yco_post' , cvar(`dvar') timeno(`mspeperiod_post') unitno(`counit') sub(`subsample') ///
tlabel("post period - check resultsperiod()") ///
ulabel("control units") trorco("control") pvar(`pvar') tvar(`tvar') unit_ind(`counit_ind') `skipchecks'
agdvar `Zco' , cvar(`dvar') timeno(`mspeperiod') unitno(`counit') sub(`subsample') ///
tlabel("pre-intervention MSPE period - check mspeperiod()") ///
ulabel("control units") trorco("control") pvar(`pvar') tvar(`tvar') unit_ind(`counit_ind') `skipchecks'
/* rownames for final X matrixes */
mat rown `Xco' = `counit'
mat rown `Xtr' = `trunit'
/* transpose for optimization */
mat `Xtr' = (`Xtr')'
mat `Xco' = (`Xco')'
cap confirm matrix `Xco_nodep'
if !_rc {
mat `Xtr_nodep' = (`Xtr_nodep')'
mat `Xco_nodep' = (`Xco_nodep')'
}
di as txt "{hline}" _newline "Data Setup successful" _newline "{hline}"
if "`clab'" != "" {
di "{txt}{p 16 28 0} Treated Unit: {res}`tlab' {p_end}"
*di "{txt}{p 15 30 0} Control Units: {res}`colabels' {p_end}" //can be really long
}
else {
di "{txt}{p 16 28 0} Treated Unit: {res}`trunit' {p_end}"
*di "{txt}{p 15 30 0} Control Units: {res}`counit' {p_end}" //can be really long
}
di as txt "{hline}"
di "{txt}{p 10 30 0} Dependent Variable: {res}`dvar' {p_end}"
di "{txt}{p 2 30 0} MSPE minimized for periods: {res}`mspeperiod'{p_end}"
di "{txt}{p 0 30 0} Results obtained for periods: {res}`resultsperiod'{p_end}"
di as txt "{hline}"
local prednames : rownames `Xco'
di "{txt}{p 18 30 0} Predictors:{res} `prednames'{p_end}"
di as txt "{hline}"
di "{txt}{p 0 30 0} Unless period is specified {p_end}"
di "{txt}{p 0 30 0} predictors are averaged over: {res}`xperiod'{p_end}"
/* now go to optimization */
/* ***************************************************************************** */
di as txt "{hline}" _newline(2) as res "Second Step: Run Optimization" _newline as txt "{hline}"
/* Dataprep finished. Starting optimisation */
tempname sval V
/* normalize the vars */
mata: normalize("`Xtr'","`Xco'")
mat `Xtr_norm' = xtrmat
mat `Xco_norm' = xcomat
mat rowname `Xtr_norm' = `: rownames `Xtr''
mat colname `Xtr_norm' = `: colnames `Xtr''
mat rowname `Xco_norm' = `: rownames `Xco''
mat colname `Xco_norm' = `: colnames `Xco''
/* Set up V matrix */
if "`customV_used'" == "" {
/* go get Regression based V weights */
mata: regsval("`Xtr_norm'","`Xco_norm'","`Ztr'","`Zco'")
mat `V' = vmat
}
else {
di as txt "Using user supplied custom V-weights" _newline "{hline}"
local checkinput : list sizeof customV_used
_assert `checkinput'==rowsof(`Xtr_norm'), msg("wrong number of custom V weights; please specify one V-weight for each predictor") rc(198)
mat input `sval' = (`customV_used')
mata: normweights("`sval'")
mat `V' = matout
}
/* now go into optimization */
/* now if the user wantes the full nested method, go and get Vstar via nested method */
if "`nested'" == "nested" {
di "{txt}{p 0 30 0} Nested optimization requested {p_end}"
/* parse the ml optimization options */
/* retrieve optimization options for ml */
mlopts std , `options'
/* if no technique is specified insert our default */
if "`s(technique)'" == "" {
local technique "tech(nr dfp bfgs)"
local std : list std | technique
}
/* /* if no iterations are specified insert our default */
local std : subinstr local std "iterate" "iterate", count(local isinornot)
if `isinornot' == 0 {
local iterate " iterate(100)"
local std : list std | iterate
} */
/* check wheter user has specified any of the nrtol options */
/* 1. check if shownrtolernace is used */
local std : subinstr local std "shownrtolerance" "shownrtolerance", count(local shownrtoluser)
_assert `shownrtoluser'==0, msg("maximize option shownrtolerance cannot be used with synth") rc(198)
/* 2. check if own ntolernace level is specified */
local std : subinstr local std "nrtolerance(" "nrtolerance(", count(local nrtoluser)
/* 3. check if nontolernace level is specified */
local std : subinstr local std "nonrtolerance" "nonrtolerance", count(local nonrtoluser)
/* delete difficult if specified*/
local std : subinstr local std "difficult" " ", all
/* refine input matrices for ml optimization as globals so that lossfunction can find them */
/* maybe there is a better way to do this */
global Xco_norm : tempvar
global Xtr_norm : tempvar
global Zco : tempvar
global Ztr : tempvar
mat $Xco_norm = `Xco_norm'
mat $Xtr_norm = `Xtr_norm'
mat $Zco = `Zco'
mat $Ztr = `Ztr'
/* set up the liklihood model for optimization */
/* since we optimize on matrices, we need to trick */
/* ml and first simulate a dataset with correct dimensions */
qui drop _all
qui matrix pred = matuniform(rowsof(`V'),rowsof(`V'))
/* now create k articifical vars names pred1, pred2,... */
qui svmat pred
/* get regression based V or user defined V as initial values */
tempname bini
mat `bini' = vecdiag(`V')
/* Run optimization */
tempname lossreg svalreg
di "{txt}{p 0 30 0} Starting nested optimization module {p_end}"
qui wrapml , lstd(`std') lbini("`bini'") lpred("pred*") lnrtoluser(`nrtoluser') lnonrtoluser(`nonrtoluser') lsearch("off")
di "{txt}{p 0 30 0} Optimization done {p_end}"
scalar define `lossreg' = e(lossend)
mat `sval' = e(sval)
/* Now if allopt is specified then rerun optimization using ml search svals, and equal weights */
if "`allopt'" == "allopt" {
di "{txt}{p 0 30 0} Allopt requested. This may take a while{p_end}"
/* **** */
/* optimize with serach way of doing initial values */
tempname losssearch svalsearch
di "{txt}{p 0 30 0} Restarting nested optimization module (search method) {p_end}"
qui wrapml , lstd(`std') lbini("`bini'") lpred("pred*") lnrtoluser(`nrtoluser') lnonrtoluser(`nonrtoluser') lsearch("on")
di "{txt}{p 0 30 0} done{p_end}"
scalar define `losssearch' = e(lossend)
mat `svalsearch' = e(sval)
/* **** */
/* optimize with equal weights way of doing initial values */
/* get equal weights */
mat `bini' = vecdiag(I(rowsof(`V')))
/* run opt */
tempname lossequal svalequal
di "{txt}{p 0 30 0} Restarting nested optimization module (equal method) {p_end}"
qui wrapml , lstd(`std') lbini("`bini'") lpred("pred*") lnrtoluser(`nrtoluser') lnonrtoluser(`nonrtoluser') lsearch("off")
di "done"
scalar define `lossequal' = e(lossend)
mat `svalequal' = e(sval)
/* **** */
/* Done with allopts optimization */
/* now make a decision which loss is lowest. firt reg vs equal, then minimum vs search */
if( `lossreg' < `lossequal' ) {
mat `sval' = `svalequal'
qui scalar define `lossreg' = `lossequal'
}
if( `lossreg' < `losssearch' ) {
mat `sval' = `svalsearch'
}
}
/* now get Vstar vector, normalize once again and create final diag Vstar */
mata: getabs("`sval'")
mat `sval' = matout
mat `V' = diag(`sval')
}
/* now go get W, conditional on V (could be Vstar, regression V, or customV) */
/* Set up quadratic programming */
tempname H c A l u wsol wsol_unr b
mat `b' = 1
mat `H' = (`Xco_norm')' * `V' * `Xco_norm'
mat `c' = (-1 * ((`Xtr_norm')' * `V' * `Xco_norm'))'
assert `cono'==rowsof(`c')
mat `A' = J(1,`cono',1)
mat `l' = J(`cono',1,0)
mat `u' = J(`cono',1,1)
matrix `wsol' = J(`cono',1,.)
/* do quadratic programming step */
cap plugin call synth2opt , `c' `H' `A' `b' `l' `u' `bound' `margin' `maxiter' `sigf' `wsol' `displ_amount' 0 _ret_code
if _rc>0 exit _rc
*check if close enough to do new solve
if "`spread'"!="" & `ret_code'!=`CHOLDC_FAILED'{
tempname Ztr_norm Zco_norm Xtr_nodep_norm Xco_nodep_norm spread_diff spread_diff_m_mat
mata: normalize("`Ztr'","`Zco'")
mat `Ztr_norm' = xtrmat
mat `Zco_norm' = xcomat
mat `A' = `Zco_norm'
mat `b' = `Ztr_norm'
cap confirm matrix `Xco_nodep'
if !_rc {
mata: normalize("`Xtr_nodep'","`Xco_nodep'")
mat `Xtr_nodep_norm' = xtrmat
mat `Xco_nodep_norm' = xcomat
mat `A' = `A' \ `Xco_nodep_norm'
mat `b' = `b' \ `Xtr_nodep_norm'
}
local m_n = rowsof(`A')
mat `spread_diff' = `A'*`wsol'-`b'
//don't divide by b in the next line because already scaled so std-dev=1
mata: st_matrix("`spread_diff'",abs(st_matrix("`spread_diff'")))
mat `spread_diff_m_mat' = `spread_diff'*J(1,`m_n',1)/`m_n'
local spread_diff_m = `spread_diff_m_mat'[1,1]
if `spread_limit'<=0 local spread_limit = 0.01
di as txt "Mean difference between unit and SC for dependent variable and other predictors (for normalized data): `spread_diff_m'"
if `spread_diff_m'<`spread_limit' {
di as txt "Discrepancy less than limit (`spread_limit'). Attempting to resolve indeterminacy by maximum spread"
tempname wsol_second wsol_first
mat `wsol_second' = J(`cono',1,.)
local re_displ_amount 2
local restart 0
mat `c' = J(`cono',1,0)
mat `H' = I(`cono')
mat `A' = J(1,`cono',1) \ `A'
mat `b' = 1 \ `b'
cap plugin call synth2opt , `c' `H' `A' `b' `l' `u' `bound' `margin' `maxiter' `sigf' `wsol_second' `re_displ_amount' `restart' _ret_code
if _rc>0 exit _rc
if `ret_code'==`OPTIMAL_SOLUTION'{
mat `wsol_first' = `wsol'
mat `wsol' = `wsol_second'
di "Successfully optimized to maximize spread"
}
else{
di "Unsuccessful at optimizing to maximize spread (`ret_code'). Using previous results."
}
}
}
/* round */
mat `wsol_unr' = `wsol'
mata: roundmat("`wsol'")
mat `wsol' = matout
/* organize W matrix for display */
mat input `counitno' = (`counit')
mat `counitno' = (`counitno')'
mat `wsolout' = `counitno' , `wsol'
mat `wsolout_unr' = `counitno' , `wsol_unr'
if(`ret_code'!=`CHOLDC_FAILED' | "`re_displ_amount'"!="") continue, break
di "Optimization dropped a variable. Restarting with donors with correct value. That var will get dropped."
*save the unit #s of those that have 0 weight
mata: wsolout = st_matrix("`wsolout'")
mata: wsolout_0 = select(wsolout,wsolout[,2]:==0)
mata: x = invtokens(strofreal(wsolout_0[,1]'))
tempname w0
mata: st_matrix("`w0'", wsolout_0)
mata: st_local("co_to_remove", x)
foreach cux of local co_to_remove{
qui replace `counit_ind'=0 if `pvar'==`cux'
}
local counit : list counit - co_to_remove
mat `mat_0' = nullmat(`mat_0') \ `w0'
}
tempname wsol_final
mat `wsol_final' = `wsol'
cap confirm matrix `mat_0'
if !_rc{ //append the 0 matrix to the weights, and resort.
mat `wsolout' = `wsolout' \ `mat_0'
mat `wsolout_unr' = `wsolout_unr' \ `mat_0'
mata: st_matrix("`wsolout'",sort(st_matrix("`wsolout'"),1))
mata: st_matrix("`wsolout_unr'",sort(st_matrix("`wsolout_unr'"),1))
mat `wsol' = `wsolout'[1...,2]
mat `wsol_unr' = `wsolout_unr'[1...,2]
}
mat colname `wsolout' = "_Co_Number" "_W_Weight"
mat colname `wsolout_unr' = "_Co_Number" "_W_Weight"
qui svmat `wsolout' , names(col)
tempname Xbal Zbal Ybal loss loss_post Xsynth Ysynth Zsynth Ysynth_post gap gap_post gap_pre
/* Compute loss and transform to RMSPE */
mat `Zsynth' = `Zco' * `wsol_final'
mat `Zbal' = `Ztr' , `Zsynth'
mat colname `Zbal' = "Treated" "Synthetic"
mat `gap_pre' = `Ztr' - `Zsynth'
mat `loss' = (`gap_pre')' * ( `gap_pre' )
mat `loss' = `loss' / rowsof(`Ztr')
mata: roottaker("`loss'")
mat rowname `loss' = "RMSPE"
/* *************************************** */
/* Organize output */
di as txt "{hline}" _newline as res "Optimization done" _newline as txt "{hline}"
di as res _newline "Third Step: Obtain Results" _newline as txt "{hline}"
di as res "Loss: Root Mean Squared Prediction Error"
matlist `loss' , tw(8) names(rows) underscore lines(rows) border(rows)
di as txt "{hline}" _newline as res "Unit Weights:"
/* Display either with or without colum names *********** */
label var _Co_Number "Co_No"
label values _Co_Number `clab'
label var _W_Weight "Unit_Weight"
tabdisp _Co_Number if _Co_Number~=. ,c(_W_Weight)
/* Display X Balance */
mat `Xsynth' = `Xco' * `wsol_final'
mat `Xbal' = `Xtr' , `Xsynth'
mat colname `Xbal' = "Treated" "Synthetic"
di as txt "{hline}" _newline as res "Predictor Balance:"
matlist `Xbal' , tw(30) border(rows)
di as txt "{hline}"
/*compute outcome trajectory output */
mat `Ysynth' = `Yco' * `wsol_final'
mat `Ybal' = `Ytr' , `Ysynth'
mat colname `Ybal' = "Treated" "Synthetic"
mat `gap' = `Ytr' - `Ysynth'
*Just the post period
mat `Ysynth_post' = `Yco_post'*`wsol_final'
mat `gap_post' = `Ytr_post' -`Ysynth_post'
mat `loss_post' = (`gap_post')' * ( `gap_post' )
mat `loss_post' = `loss_post' / rowsof(`gap_post')
mata: roottaker("`loss_post'")
mat rowname `loss_post' = "RMSPE"
/* if user wants plot or save */
if "`keep'" != "" | "`figure'" != "" {
/* create vars for plotting */
qui svmat double `Ytr' , names(_Ytreated)
qui svmat double `Ysynth' , names(_Ysynthetic)
qui svmat double `gap' , names(_gap)
/* time variable for plotting */
tempname timetemp
mat input `timetemp' = (`resultsperiod')
mat `timetemp' = (`timetemp')'
qui svmat double `timetemp' , names(_time)
/* rename cosmetics */
qui rename _Ytreated1 _Y_treated
qui rename _Ysynthetic1 _Y_synthetic
qui rename _gap1 _gap
qui rename _time1 _time
if "`clab'" != "" {
qui label var _Y_treated "`tlab'"
qui label var _Y_synthetic "synthetic `tlab'"
}
else {
qui label var _Y_treated "treated unit"
qui label var _Y_synthetic "synthetic control unit"
qui label var _gap "gap in outcomes: treated minus synthetic"
}
}
/* Results Dataset */
if "`keep'" != "" {
qui keep _Co_Number _W_Weight _Y_treated _Y_synthetic _time
qui drop if _Co_Number ==. & _Y_treated==.
if "`replace'" != "" {
qui save `keep' , `replace'
}
else {
qui save `keep'
}
}
/* Plot */
if "`figure'" == "figure" {
twoway (line _Y_treated _time, lcolor(black)) (line _Y_synthetic _time, lpattern(dash) lcolor(black)), ytitle("`dvar'") xtitle("`tvar'") xline(`trperiod', lpattern(shortdash) lcolor(black))
}
/* Return results */
qui ereturn clear
ereturn mat Y_treated `Ytr'
ereturn mat Y_synthetic `Ysynth'
if "`clab'" != "" {
local colabels : subinstr local colabels " " "", all
local colabels : subinstr local colabels "," " ", all
local colabels : list clean colabels
mat rowname `wsolout' = `colabels'
}
else {
mat rowname `wsolout' = `counit'
}
ereturn mat W_weights `wsolout'
ereturn mat W_weights_unr `wsolout_unr'
mat rowname `V' = `prednames'
mat colname `V' = `prednames'
ereturn mat V_matrix `V'
ereturn scalar RMSPE_pre = `loss'[1,1]
ereturn scalar RMSPE_post = `loss_post'[1,1]
ereturn mat RMSPE `loss'
ereturn mat X_balance `Xbal'
ereturn mat Ybal `Ybal'
ereturn mat Zbal `Zbal'
/* drop global macros */
macro drop Xtr Xco
mat drop xcomat xtrmat vmat fmat matout
cap mat drop emat
ereturn mat X1 `Xtr'
ereturn mat X0 `Xco'
ereturn mat X1_normalized `Xtr_norm'
ereturn mat X0_normalized `Xco_norm'
*ereturn mat Z1 `Ztr'
*ereturn mat Z0 `Zco'
if "`spread'"!=""{
ereturn scalar spread_diff_m = `spread_diff_m'
cap confirm matrix `wsol_first'
if !_rc{
ereturn mat W_weights_first `wsol_first'
ereturn scalar spread_opt_succ = 1
}
else ereturn scalar spread_opt_succ = 0
}
end
/* Subroutines */
/* subroutine reducesample: creates subsample marker for specified periods and units */
* can specify u_ind variable instead of walking through uno
program reducesample , rclass
version 9.2
syntax , tno(numlist >=0 integer) genname(string) tvar(string) pvar(string) [uno(numlist integer) u_ind(string)]
local tx: subinstr local tno " " ",", all
/*local ux: subinstr local uno " " ",", all
qui gen `genname' = ( inlist(`tvar',`tx') & inlist(`pvar', `ux')) */
if "`u_ind'"==""{
qui gen `genname' = 0
foreach cux of numlist `uno' {
qui replace `genname'=1 if inlist(`tvar',`tx') & `pvar'==`cux'
}
}
else{
qui gen `genname' = inlist(`tvar',`tx') & `u_ind'
}
end
/* subroutine missingchecker: goes through matrix, checks missing obs and gives informative error */
program missingchecker , rclass
version 9.2
syntax , tno(numlist >=0 integer) cvar(string) sub(string) tvar(string) [checkno(string) ulabel(string) tilab(string) ]
foreach tum of local tno {
tempvar misscheck
qui gen `misscheck' = missing(`cvar') if `tvar' == `tum' & `sub' == 1
qui count if `misscheck' > 0 & `misscheck' !=.
if `r(N)' > 0 {
di as input "`cvar'(`ulabel'): for `r(N)' of out `checkno' units missing obs for predictor `cvar'(`tilab' in period `tum' -ignored for averaging"
}
qui drop `misscheck'
}
end
/* subroutine agmat: aggregate x-values over time, checks missing, and returns predictor matrix */
program agmat
version 9.2
syntax name(name=finalmat) , cvar(string) sub(string) ulabel(string) checkno(string) pvar(string) ///
[ tilab(string) stopifsame]
/*OLD way
qui tabstat `cvar' if `sub' == 1 , by(`pvar') s("mean") nototal save
qui gettabstatmat `finalmat'*/
preserve
collapse (mean) `cvar' if `sub' == 1, by(`pvar') fast
*XXX do I care about losing the labels?
summ `cvar', meanonly
if ("`stopifsame'"!="") & (r(min)==r(max)){
exit 2
}
mkmat `cvar', matrix(`finalmat')
if matmissing(`finalmat') {
qui local checkdimis : display `checkdimis'
di as err "`ulabel': for at least one unit predictor `cvar'(`tilab' is missing for ALL periods specified"
exit 198
}
end
/* subroutine agdvar: aggregates values of outcome varibale over time and returns in transposed form */
/* has a trorco flag for treated or controls, since different aggregation is used */
program agdvar
version 9.2
syntax name(name=outmat) , cvar(string) timeno(numlist >=0 integer) ///
unitno(numlist integer) sub(string) tlabel(string) ///
ulabel(string) trorco(string) pvar(string) tvar(string) [unit_ind(string) skipchecks]
/* reduce sample */
qui reducesample , tno("`timeno'") uno("`unitno'") genname(`sub') tvar(`tvar') pvar(`pvar') u_ind(`unit_ind')
local tino : list sizeof timeno
local cono : list sizeof unitno
if "`skipchecks'"=="" {
foreach tum of local timeno {
qui sum `cvar' if `tvar' == `tum' & `sub' == 1 , meanonly
tempname checkdimis checkdimshould
qui scalar define `checkdimis' = `r(N)'
qui scalar define `checkdimshould' = `cono'
qui scalar define `checkdimis' = `checkdimshould' - `checkdimis'
if `checkdimis' != 0 {
qui local checkdimis : display `checkdimis'
di as err "`ulabel': for `checkdimis' of out `cono' units outcome variable `cvar' is missing in `tum' `tlabel'"
error 198
}
}
}
/* aggregate for controls */
if "`trorco'" == "control" {
qui mata: switchmat("`pvar'","`cvar'", "`sub'")
mat `outmat' = fmat
}
else {
/* and for treated */
qui mkmat `cvar' if `sub' == 1 , matrix(`outmat')
}
_assert !matmissing("`outmat'"), msg("`ulabel': outcome variable missing for `tlabel'") rc(198)
mat coln `outmat' = `unitno'
mat rown `outmat' = `timeno'
qui drop `sub'
end
/* subroutine to run ml in robust way using difficult and without, plus with or without nrtol */
program wrapml , eclass
version 9.2
syntax , lstd(string) lbini(string) lpred(string) lnrtoluser(numlist) lnonrtoluser(numlist) lsearch(string)
/* add search if specified */
if "`lsearch'" == "on" {
local lsearch "search(quietly)"
local lstd : list lstd | lsearch
}
di "started wrapml"
di "Std is: `lstd'"
/* in any case we run twice once with and once without difficult specified */
/* if user specifed any of the nrtol or nortol settings, give him exactly what he wants */
tempname loss1 sval1 loss2 sval2
if `lnrtoluser' > 0 | `lnonrtoluser' > 0 {
di "user did specify nrtol setting"
di "starting 1. attempt without difficult"
qui ml model d0 synth2_ll (xb: = `lpred', noconstant), ///
crittype(double) `lstd' maximize init(`lbini', copy) nowarning
mat `sval1' = e(b)
qui scalar define `loss1' = e(ll)
di "done, loss is:"
display `loss1'
di "starting 2. attempt with difficult"
/* now rerun with difficult */
qui ml model d0 synth2_ll (xb: = `lpred', noconstant), ///
crittype(double) `lstd' maximize init(`lbini', copy) nowarning difficult
mat `sval2' = e(b)
qui scalar define `loss2' = e(ll)
di "done, loss is:"
display `loss2'
}
else {
/* if he did not, try first with nrtol then without */
di "user did not specify nrtol settings"
di "starting 1. attempt with nrtol and without difficult"
qui capture ml model d0 synth2_ll (xb: = `lpred', noconstant), ///
crittype(double) `lstd' maximize init(`lbini', copy) nowarning
di "done"
if _rc { /* if it breaks down we go with */
di "optimization crashed. trying again with nonrtol and without difficult"
qui ml model d0 synth2_ll (xb: = `lpred', noconstant), ///
crittype(double) `lstd' maximize init(`lbini', copy) nowarning nonrtolerance
mat `sval1' = e(b)
qui scalar define `loss1' = e(ll)
di "done, loss is:"
display `loss1'
}
else { /* if it does not break down, store and go on */
mat `sval1' = e(b)
qui scalar define `loss1' = e(ll)
di "optimization successful. loss is:"
display `loss1'
}
/* now rerun with difficult */
di "starting 2. attempt with nrtol and with difficult"
qui capture ml model d0 synth2_ll (xb: = `lpred', noconstant), ///
crittype(double) `lstd' maximize init(`lbini', copy) nowarning difficult
if _rc { /* if it breaks down we go with */
di "optimization crashed. trying again with nonrtol and with difficult"
qui ml model d0 synth2_ll (xb: = `lpred', noconstant), ///
crittype(double) `lstd' maximize init(`lbini', copy) nowarning nonrtolerance difficult
mat `sval2' = e(b)
qui scalar define `loss2' = e(ll)
}
else {
mat `sval2' = e(b)
qui scalar define `loss2' = e(ll)
di "done, loss is:"
display `loss2'
}
}
di "end wrapml: results obtained"
di "loss1:"
display `loss1'
di "loss2:"
display `loss2'
di "and svals1 and 2"
/* now make a decision which reg based loss is lowest */
tempname sval lossend
if `loss1' < `loss2' {
mat `sval' = `sval2'
qui scalar define `lossend' = `loss2'
}
else {
mat `sval' = `sval1'
qui scalar define `lossend' = `loss1'
}
/* return loss and svals */
ereturn scalar lossend = `lossend'
ereturn matrix sval = `sval'
end
/* subroutine quadratic programming (C++ plugin) */
program synth2opt, plugin
|
/* subroutine lossfunction: loss function for nested optimization */
program synth2_ll
version 9.2
args todo b lnf
tempname loss bb VV H c A l u wsol
tempvar loss_var loss_final
*matrix list `b'
/* get abs constrained weights and create V */
mata: getabs("`b'")
mat `bb' = matout
mat `VV' = diag(`bb')
/* Set up quadratic programming */
mat `H' = ($Xco)' * `VV' * $Xco
mat `c' = (-1 * (($Xtr)' * `VV' * $Xco))'
mat `A' = J(1,rowsof(`c'),1)
mat `l' = J(rowsof(`c'),1,0)
mat `u' = J(rowsof(`c'),1,1)
/* Initialize read out matrix */
matrix `wsol' = `l'
/* do quadratic programming step */
plugin call synth2opt , `c' `H' `A' $bslack `l' `u' $bd $marg $maxit $sig `wsol'
/* Compute loss */
mat `loss' = ($Ztr - $Zco * `wsol')' * ( $Ztr - $Zco * `wsol')
mat colnames `loss' = `loss_var'
qui svmat double `loss' ,names(col)
qui gen double `loss_final' = -1 * `loss_var'
qui mlsum `lnf' = `loss_final' if `loss_var' ~=.
* sum `loss_final'
qui drop `loss_final' `loss_var'
end
/* subroutine quadratic programming (C++ plugin) */
program synth2opt, plugin
|
*GE_mode
global GE_mode_nothing 0
global GE_mode_custom_cmd 1
global GE_mode_trim_early_placebo 2
*Drop reason
global Synth_PE_high 2
global Synth_PE_low 3
global Synth_opt_error 4
*Unit types
global Unit_type_treated 1
global Unit_type_donor 2
label define unit_type ${Unit_type_treated} "Treated" ${Unit_type_donor} "Donor", replace
|
* Builds an mlib from mata files
* Call this with a single argument that is a list (wrap all in double quotes).
* The first element is mlib the rest are the mata files.
* (Has to be one list otherwise when calling -$STATABATCH do cli_build_mlib.do l/lp.mlib a/a.mata-
* the logfile will be a.log rather than cli_build_mlib.log on Windows (because of the "/"))
local mlib : word 1 of `1'
local mata_files : list 1 - mlib
_getfilename `mlib'
local mlib_name `r(filename)'
local mlib_path = substr("`mlib'",1,length("`mlib'")-length("`mlib_name'"))
local mlib_base = subinstr("`mlib_name'", ".mlib","",.)
mata: mata clear
foreach mata_file in `mata_files'{
do "`mata_file'"
}
mac dir
mata: mata mlib create `mlib_base', replace dir("`mlib_path'")
mata: mata mlib add `mlib_base' *()
|
sysdir set PERSONAL "."
sysdir set PLUS "." //some commands think this has to be in S_ADO
global S_ADO "PERSONAL;BASE"
net set ado PERSONAL
mata: mata mlib index
|
*! version 1.1.0 05oct1999 Jeroen Weesie/ICS (STB-53: dm75) (modfied)
*http://www.stata.com/statalist/archive/2007-03/msg00584.html
program define tabl
version 6.0
syntax varlist [if ] [in] [, Width(int 40)]
tokenize `varlist'
while "`1'" != "" {
Tabl `1' `if' `in', width(`width')
mac shift
}
end
/* Tabl varname [if] [in], width(#)
displays a one-var tabulate with varlabel and value labels,
wrapping if their length exceeds width
*/
program define Tabl
syntax varname [if] [in], Width(int)
marksample touse, novarlist
local lab : value label `varlist'
if "`lab'" == "" {
* no value labels
tab `varlist' if `touse', miss
exit
}
tempname freq code
qui tab `varlist' if `touse', matcell(`freq') matrow(`code')
if r(N) == 0 {
exit
}
local N = r(N)
local vlab : var label `varlist'
if `"`vlab'"' == "" {
local vlab `varlist'
}
else local vlab `varlist' (`vlab')
* determine max length of varlabel and value labels
local len = length(`"`vlab'"')
local i 1
while `i' <= rowsof(`freq') {
local ci = `code'[`i',1]
local li : label (`varlist') `ci'
local len = max(`len', length(`"`li'"'))
local i = `i'+1
}
if `len' > `width' {
local len = `width'
}
local col1 = `len' + 3
di
local vlab1 : piece 1 `len' of `"`vlab'"'
local vlab2 : piece 2 `len' of `"`vlab'"'
local i 2
while `"`vlab2'"' ~= "" {
di in gr `"`vlab1'"'
local vlab1 `vlab2'
local i = `i'+1
local vlab2 : piece `i' `len' of `"`vlab'"'
}
di in gr "`vlab1'" _col(`col1') " code | freq "
di in gr _dup(`col1') "-" "--------+--------"
local i 1
while `i' <= rowsof(`freq') {
local ci = `code'[`i',1]
local li : label (`varlist') `ci'
local pli : piece 1 `len' of `"`li'"'
di in gr %`len's `"`pli'"' _col(`col1') %6.0f `ci' /*
*/ " |" in ye %7.0f `freq'[`i',1]
* display the rest of the value label
local j 2
local pli : piece `j' `len' of `"`li'"'
while `"`pli'"' != "" {
di in gr %`len's `" `pli'"' _col(`col1') " |"
local j = `j'+1
local pli : piece `j' `len' of `"`li'"'
}
local i = `i'+1
}
qui count if (`varlist'==.) & (`touse'==1)
if r(N) > 0 {
di in gr _dup(`col1') "-" "--------+--------"
di in gr %`len's "<missing value>" _col(`col1') " . |" /*
*/ in ye %7.0f r(N)
}
di in gr _dup(`col1') "-" "--------+--------"
di in gr _col(`col1') " Total |" in ye %7.0f = `N'+r(N)
end
|
*! version 1.0 Brian Quistorff <bquistorff@gmail.com>
*! Makes compressed scales (like log scales) when both positive and negative numbers exist.
*! It makes a linear patch in the middle.
*! Usage:
*! trilog , source(orig_var) generate(new_var)
*! trilog , labels(-100 -10 -1 0 1 10 100)
*! local new_labels `"`r(retlabel)'"'
*! twoway (line new_var x), ylabels(`new_labels')
program trilog, rclass
version 11.0
*Just a guess at the version
syntax , [source(string) generate(string) labels(string)]
if "`source'"!=""{
generate `generate' = `source'/exp(1)
qui replace `generate' = ln( `source') if `source'> exp(1)
qui replace `generate' = -ln(-1*`source') if `source'<-1*exp(1)
}
if "`labels'"!=""{
foreach label in `labels'{
local translabel = `label'/exp(1)
if `label'> exp(1) local translabel = ln( `label')
if `label'<-1*exp(1) local translabel = -ln(-1*`label')
local retlabel `"`retlabel' `translabel' "`label'""'
}
return local retlabel `"`retlabel'"'
}
end
|
log using "net_test.log", replace
local dirs :dir .. dirs ?
foreach dir in `dirs'{
local pkgs : dir "../`dir'/" files "*.pkg"
foreach pkg in `pkgs'{
local pkg_name = substr("`pkg'",1, length("`pkg'")-4)
net describe `pkg_name', from (https://raw.github.com/bquistorff/Stata-modules/master/`dir'/)
}
}
log close
|
set PLUS "`c(pwd)'/../"
set PERSONAL "`c(pwd)'/../"
log using testall.log, name(testall) replace
local dirs :dir . dir *
foreach dir in `dirs'{
cd `dir'
do `test.do
cd ..
}
do net_tests.do
log close testall
|
*Returns the maximum difference of the count of cells in each treatment group
* can omit cell_var (first arg will then be treat_var)
program cell_count_diff_per_t, rclass
args cell_var treat_var
if "`treat_var'"==""{
local treat_var = "`cell_var'"
tempvar cell_var
gen byte `cell_var' = 1
}
summ `cell_var', meanonly
local cell_var_max = r(max)
preserve
contract `treat_var' `cell_var'
local currmax = 0
local currsum = 0
qui drop if mi(`treat_var')
forval g=1/`cell_var_max'{
summ _freq if `cell_var'==`g', meanonly
local currdiff = cond(mi(r(max))& mi(r(min)),0,r(max) - r(min))
if `currdiff'>`currmax' local currmax = `currdiff'
local currsum = `currsum' + `currdiff'
}
restore
return scalar max = `currmax'
return scalar mean = `currsum'/`cell_var_max'
end
|