Last updated on 2026-03-30 07:53:23 CEST.
| Flavor | Version | Tinstall | Tcheck | Ttotal | Status | Flags |
|---|---|---|---|---|---|---|
| r-devel-linux-x86_64-debian-clang | 0.1.1 | 29.66 | 676.63 | 706.29 | OK | |
| r-devel-linux-x86_64-debian-gcc | 0.1.1 | 15.94 | 537.23 | 553.17 | OK | |
| r-devel-linux-x86_64-fedora-clang | 0.1.1 | 42.00 | 722.04 | 764.04 | ERROR | |
| r-devel-linux-x86_64-fedora-gcc | 0.1.1 | 47.00 | 1193.40 | 1240.40 | OK | |
| r-devel-macos-arm64 | 0.1.1 | 6.00 | 136.00 | 142.00 | OK | |
| r-devel-windows-x86_64 | 0.1.1 | 29.00 | 565.00 | 594.00 | OK | |
| r-patched-linux-x86_64 | 0.1.1 | 23.46 | 681.91 | 705.37 | OK | |
| r-release-linux-x86_64 | 0.1.1 | 23.50 | 684.43 | 707.93 | OK | |
| r-release-macos-arm64 | 0.1.1 | OK | ||||
| r-release-macos-x86_64 | 0.1.1 | 15.00 | 434.00 | 449.00 | OK | |
| r-release-windows-x86_64 | 0.1.1 | 27.00 | 556.00 | 583.00 | OK | |
| r-oldrel-macos-arm64 | 0.1.1 | NOTE | ||||
| r-oldrel-macos-x86_64 | 0.1.1 | 16.00 | 456.00 | 472.00 | NOTE | |
| r-oldrel-windows-x86_64 | 0.1.1 | 38.00 | 710.00 | 748.00 | NOTE |
Version: 0.1.1
Check: re-building of vignette outputs
Result: ERROR
Error(s) in re-building vignettes:
--- re-building ‘cram_bandit.Rmd’ using rmarkdown
--- finished re-building ‘cram_bandit.Rmd’
--- re-building ‘cram_bandit_helpers.Rmd’ using rmarkdown
--- finished re-building ‘cram_bandit_helpers.Rmd’
--- re-building ‘cram_bandit_simulation.Rmd’ using rmarkdown
--- finished re-building ‘cram_bandit_simulation.Rmd’
--- re-building ‘cram_ml.Rmd’ using rmarkdown
--- finished re-building ‘cram_ml.Rmd’
--- re-building ‘cram_policy_part_1.Rmd’ using rmarkdown
--- finished re-building ‘cram_policy_part_1.Rmd’
--- re-building ‘cram_policy_part_2.Rmd’ using rmarkdown
--- finished re-building ‘cram_policy_part_2.Rmd’
--- re-building ‘cram_policy_simulation.Rmd’ using rmarkdown
*** caught segfault ***
address 0x7fa58bdf69c8, cause 'memory not mapped'
Traceback:
1: (function (train_matrix, outcome_index, sample_weight_index, use_sample_weights, mtry, num_trees, min_node_size, sample_fraction, honesty, honesty_fraction, honesty_prune_leaves, ci_group_size, alpha, imbalance_penalty, clusters, samples_per_cluster, compute_oob_predictions, num_threads, seed, legacy_seed, verbose) { .Call("_grf_regression_train", PACKAGE = "grf", train_matrix, outcome_index, sample_weight_index, use_sample_weights, mtry, num_trees, min_node_size, sample_fraction, honesty, honesty_fraction, honesty_prune_leaves, ci_group_size, alpha, imbalance_penalty, clusters, samples_per_cluster, compute_oob_predictions, num_threads, seed, legacy_seed, verbose)})(outcome_index = 3L, sample_weight_index = 4, use_sample_weights = FALSE, train_matrix = c(0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 3, 2, 2, 4, 5, 5, 5, 3, 3, 4, 3, 2, 4, 3, 4, 3, 1, 5, 3, 3, 1, 2, 5, 2, 3, 5, 2, 1, 2, 3, 1, 4, 3, 3, 5, 5, 1, 4, 5, 3, 3, 2, 5, 1, 5, 1, 2, 3, 1, 5, 4, 3, 1, 2, 3, 3, 2, 5, 1, 4, 2, 4, 5, 5, 2, 4, 1, 3, 3, 1, 1, 2, 2, 3, 3, 5, 2, 1, 5, 2, 4, 2, 1, 5, 1, 1, 4, 2, 3, 1, 1, 5, 3, 2, 4, 3, 3, 1, 5, 5, 2, 4, 1, 1, 1, 4, 1, 4, 3, 4, 2, 4, 4, 5, 3, 5, 4, 5, 4, 3, 2, 3, 1, 1, 2, 4, 1, 1, 1, 3, 2, 3, 5, 2, 5, 4, 4, 3, 1, 4, 5, 3, 5, 4, 4, 3, 3, 5, 5, 5, 2, 1, 5, 5, 3, 1, 3, 1, 3, 5, 5, 2, 1, 5, 5, 5, 4, 1, 2, 5, 3, 3, 2, 3, 4, 2, 4, 3, 3, 2, 5, 3, 5, 5, 4, 3, 4, 3, 1, 5, 3, 3, 1, 5, 5, 2, 2, 1, 1, 5, 2, 4, 3, 1, 5, 5, 5, 1, 5, 2, 3, 1, 5, 5, 2, 1, 4, 4, 3, 3, 3, 4, 1, 3, 3, 5, 3, 5, 5, 2, 5, 2, 4, 1, 2, 3, 4, 2, 4, 3, 3, 3, 4, 3, 2, 5, 3, 2, 2, 3, 5, 1, 3, 1, 5, 5, 2, 3, 3, 5, 5, 4, 3, 3, 3, 5, 5, 5, 1, 4, 1, 5, 1, 2, 4, 2, 4, 3, 2, 4, 4, 2, 3, 5, 3, 5, 2, 3, 2, 3, 5, 5, 5, 1, 2, 2, 4, 2, 3, 2, -1.28703047603518, -0.280395335170247, -1.06332613397119, -0.530906522170303, -0.41433994791886, 0.54319405923209, -0.594617267459511, -1.00837660827701, 0.636569674033849, 0.0377883991710788, 1.31241297643351, 0.976973386685621, -0.372438756103829, -1.60153617357459, -1.04917700666607, 3.2410399349424, -1.25127136162494, -0.895363357977542, 0.636569674033849, -1.60153617357459, -1.4617555849959, -0.57438868976327, 0.76904224100091, -0.70459646368007, 0.0945835281735714, 0.332202578950118, 0.119245236427584, -1.4617555849959, -1.2847157223178, -1.28703047603518, -1.26015524475811, -1.31080153332797, 0.436523478910183, -1.00837660827701, 0.310480749443137, 0.54319405923209, -1.57214415914549, -0.262197489402468, 0.54319405923209, 0.0945835281735714, -0.236279568941097, -1.2847157223178, -0.0903195939658516, 0.298227591540715, -0.0903195939658516, -0.458365332711106, -0.611165916680421, 0.368964527385086, -1.26015524475811, -0.182925388372727, -0.0540281250854405, -1.60153617357459, -1.51466765378175, 0.119245236427584, 2.12845189901618, 0.214445309581601, 0.516862044313609, 0.332202578950118, 1.26318517608949, -0.324685911490835, -0.349650387953555, 0.243687429599092, -0.895363357977542, -0.865512862653374, 0.787738847475178, -0.372438756103829, -1.57214415914549, -1.00837660827701, 2.12845189901618, 1.26318517608949, -1.23627311888329, 0.787738847475178, -0.349650387953555, 0.32430434416138, 3.2410399349424, 0.214538826629216, 0.707588353835588, -0.499292017172261, -0.0903195939658516, -0.70459646368007, -1.04917700666607, -0.349650387953555, 1.65090746733669, 1.67569693240319, 1.95529396549246, 1.10984813892972, -1.31080153332797, -0.280395335170247, -1.00837660827701, -1.57214415914549, -0.781536487054751, 0.0652930335253153, 0.418982404924464, -0.349650387953555, 1.10992028971364, 0.368964527385086, -0.236279568941097, -1.4617555849959, -0.992507150392037, 0.76904224100091, -0.280395335170247, 0.243687429599092, 1.95529396549246, 0.0847372921971965, -0.499292017172261, -0.197175894348552, -1.57214415914549, 1.05271146557933, 0.687916772975828, 0.0597499373846007, -0.611165916680421, -0.0540281250854405, -0.372438756103829, 1.23247587848534, 2.12845189901618, -0.865512862653374, -0.530906522170303, -0.895363357977542, 1.99721338474797, 1.31241297643351, -0.374580857767014, -0.236279568941097, 1.26318517608949, -1.01559257860354, -0.374580857767014, -0.372438756103829, -0.441163216905286, 1.95529396549246, 0.0847372921971965, 0.32430434416138, -0.119452606630659, 0.88465049897692, -0.594617267459511, -0.611165916680421, -0.41433994791886, 1.99721338474797, 1.99721338474797, -1.28703047603518, -1.01559257860354, 2.19881034888372, 0.617985817166529, 3.2410399349424, -0.182925388372727, 0.0597499373846007, 1.05271146557933, -1.60153617357459, 1.31241297643351, -0.573973479297987, 0.310480749443137, -0.992507150392037, -1.06332613397119, -0.458365332711106, -0.51606383094478, 0.332202578950118, -1.28703047603518, -0.499292017172261, -1.60153617357459, -1.4617555849959, 0.0945835281735714, -0.594617267459511, 1.23247587848534, -0.611165916680421, -0.499292017172261, -0.483780625708744, -0.895363357977542, 2.10010894052567, -0.262197489402468, -1.26015524475811, -0.280395335170247, 0.332202578950118, 0.636569674033849, -0.236279568941097, 0.707588353835588, -1.28703047603518, 1.05271146557933, -0.119452606630659, -0.324685911490835, 0.418982404924464, 0.32430434416138, -0.476246894615578, 1.23247587848534, -0.738527704739573, 0.214538826629216, 0.76904224100091, -0.262197489402468, -1.00837660827701, 0.243687429599092, 0.88465049897692, 0.701784335374711, -0.034067253738464, 0.88465049897692, 0.0945835281735714, 1.65090746733669, 0.310480749443137, -0.182925388372727, -0.71721816157401, 0.787738847475178, -1.4617555849959, 1.65090746733669, 0.332202578950118, 0.516862044313609, -0.741336096272828, -0.788602837850243, -1.51466765378175, -0.0903195939658516, 1.67569693240319, -1.31701613230524, -0.441163216905286, -0.992507150392037, -0.349650387953555, -1.28703047603518, -0.265145056696353, 1.23247587848534, -0.483780625708744, 0.119245236427584, 0.298227591540715, -0.372438756103829, 0.600708823672418, 0.0945835281735714, 0.214445309581601, -0.236279568941097, -0.197175894348552, 1.95529396549246, 0.418982404924464, 0.214445309581601, -0.594617267459511, -0.788602837850243, -0.182925388372727, -0.363657297095253, 0.707588353835588, -0.992507150392037, -0.723065969939874, 2.19881034888372, 1.65090746733669, 0.516862044313609, 2.12845189901618, 0.243687429599092, -0.57438868976327, 0.754053785184521, -1.18548008459731, 0.436523478910183, 0.418982404924464, 1.99721338474797, 0.636569674033849, -0.215380507641693, 0.214538826629216, -1.18548008459731, 0.56298953322048, -0.71721816157401, -0.788602837850243, -0.41433994791886, -0.441163216905286, 0.418982404924464, 0.298227591540715, 0.76904224100091, 2.10010894052567, -0.611165916680421, 3.2410399349424, 0.32430434416138, -0.0903195939658516, -0.992507150392037, -1.04917700666607, 0.368964527385086, 3.2410399349424, 0.0945835281735714, -0.51606383094478, -0.034067253738464, -0.483780625708744, -0.458365332711106, -0.0540281250854405, -0.499292017172261, -0.51606383094478, 1.10984813892972, -0.119452606630659, -0.530906522170303, -0.70459646368007, -1.31080153332797, 1.31241297643351, 0.976973386685621, -0.530906522170303, -0.0540281250854405, -0.723065969939874, 0.418982404924464, -0.034067253738464, 0.436523478910183, -0.51606383094478, -0.280395335170247, 0.88465049897692, -0.723065969939874, 0.436523478910183, 2.10010894052567, -0.992507150392037, -0.865512862653374, -1.25127136162494, -0.57438868976327, -0.476246894615578, -0.197175894348552, 0.707588353835588, 0.687916772975828, -0.280395335170247, 1.0793502446225, -0.807425372083713, -0.675429294433325, 0.0778488776875013, 0.302014293100801, 2.907155432613, 0.235102422578912, -0.294262932775825, -0.856909910290467, -0.0281862015349582, -0.555407523216953, -0.192452740465306, -0.129731965781434, -1.37901996967488, -0.557695772464678, 0.176495983326576, 1.43943114819245, -1.47139655320819, 1.23649309689803, -2.9419823732506, -0.0210923254962712, -0.336171599004456, -0.604012570765805, 1.4626353787996, -0.80151784589277, -0.334686893420814, 2.25023344027751, 0.165798856163565, 0.812095188602958, -0.652996697179975, 0.845217419118375, -0.719096113051879, 0.51108523759244, 0.343932007092971, -0.255247117916601, -2.43160944028893, -0.397483312736214, -1.15618693969407, 0.647415741989621, -0.164128886580336, -1.8556731107073, -1.14200704317064, 0.721250958019734, 0.667191301653508, 0.834794975718168, -1.95508411115506, -0.75431633950442, 2.69122212898041, 0.659944427136795, 0.0614694508975588, -2.09202942685262, -1.74524920183974, 2.3447618431492, 0.774830200925732, -0.672613771148493, -0.583486288176347, 0.805618816172016, -1.57094426371528, 0.382468899042099, -0.513943899173967, 0.980766523313298, -0.29374665204501, 1.1176772949158, -1.20905695371922, 1.28291266867267, -2.10613086538395, 0.482063644339884, -0.729418024377206, 1.52699168928426, 0.327349747637467, 0.179555394414968, -0.453752584919967, 1.21066109856515, -0.219142409044835, -0.705423322213934, -1.89932798326328, 0.498027933922633, -0.363598625026532, 0.598864405114883, 0.130870713419253, -2.51918676611661, -0.291046175078048, 0.578397476240921, -0.84671523912058, -0.234887891925256, 0.0551264965370913, -0.365133173315097, -0.874997602631346, -0.787945743088567, -1.00074810975902, 0.891955894058927, 0.264926895822003, 0.445591672102194, 1.23521630704007, -0.709321973485272, 1.64371836897973, -0.363165839824306, 1.06281763854744, 1.64054083080228, -0.0260348443913187, 2.21205012132364, -1.75052398316875, 0.544453433443979, -0.496430647415581, -0.656852066157813, -0.651781954989303, 0.942166146478243, -0.623253997322409, -0.296820737834909, 1.08532680118658, -0.358810754075613, -1.35958549698966, -0.592294505276967, 0.546825629048136, 0.831332570790525, -0.0543891788998037, -1.54399417586589, -1.04585727075305, -1.52459461855862, -1.17824475827843, 0.451560348999358, -0.147291217898845, 1.67664020765241, 1.4357243662162, -0.587662362990722, -0.716939093537825, -1.1660025194875, -0.144016351527263, -0.855314003414081, -0.540141718145293, -0.389361522863483, 0.383143203862102, 0.503532258044294, 0.173882819375975, 0.451480495524697, -2.77051132483852, -1.69868123107127, -1.09371178698431, 0.575382640026715, 0.337442714748719, -1.40764602302186, 1.32749939534985, 2.09725183226329, 0.70743368417554, 1.0347500997819, -1.35052082894787, -0.875490541083933, -1.25680718377597, -0.670696558662698, -3.54553942668715, -0.496479112681606, -1.54209727643676, -2.49062960086617, -0.476113468702838, 2.11402956729802, 0.997913117165253, -0.35868340513966, -1.04577629347268, -0.932462869042157, 1.2288189178366, 0.101005239496454, 0.746112124239256, 2.53203551619102, 0.814495272050397, 2.40270167951243, 1.24894732071314, 0.472837453945226, 1.29627094476308, 0.592437657180438, 2.19567934987648, -0.831864926399042, 0.781769611565873, -1.64390247412825, -0.369604553423437, -0.360820732343141, 0.175804222874855, 0.0639644440505875, 0.451132682585146, 0.339690089071918, 0.48207711421286, -1.28692953066192, -3.30746786087678, 0.128616325393531, -0.0264734111593823, -0.780911830494759, -0.956567509224197, -0.448461328515932, -0.35119137042556, -1.48739132325075, 0.660120642136449, 0.629116666391144, -0.162575626339956, 1.46655525660769, 2.81277881863909, -0.0485469751165366, 0.944702033169487, 0.907361207601, 1.5306532874506, 0.977999387600918, -0.180871245132944, 0.272786287235605, -0.0115852843038398, 0.528228443051922, 1.16794743156737, 1.19347695510408, 0.884558670184623, -0.345310153107876, -0.111285287227562, -1.19030902629077, -0.223073344004581, -0.632733445820258, 0.134351503088681, 0.854991730951167, 0.88463504303066, -1.00172509573693, 0.823105269525756, -2.37232518648289, -1.1014758800273, -0.956984808730151, -0.638248570399217, 0.695261141479704, 0.146811272742742, -0.127174777865302, -1.22043437481134, 0.248953571333043, 2.47320224016077, 0.152033813336257, -0.375387309354338, 1.23076822409262, -1.40846623197918, -0.477054883747726, 0.419509304670655, -0.454321914836378, 1.10821972212305, 0.852151602668729, -0.494424550943353, 1.74451922640261, 0.26377021191008, -1.04624502067665, -0.596220736438388, 0.391910737612717, -0.813150922607861, -0.994105423466496, 1.73571541235549, -0.633656634162124, 1.38820301740402, 2.19420304735806, -0.620844234965996, 0.295579297369821, 0.052140928993251, 0.178056306099694, 1.33559171693097, 0.30115772954781, 1.22868464393732, 0.867614441367613, -0.980412685235036, -2.29149284538732, 1.9649849849111, 0.921776208956185, 1.32479542873807, -0.143455072595653, 1.83738854391794, 0.213578245912108, -0.721877431949363, -0.229407708013512, -1.03345171862408, -1.51204544863694, 1.05530008373746, 0.134833676626422, -0.654877153049159, 0.142574836786277, -2.61872277686514, -0.121779728042941, 1.1880783833751, 0.598083762065646, 0.48801244367152, -0.114775264232662, -0.14269070196127, -0.183705466274104, -0.366713648179442, 0.829302698044069, 2.13292349046699, -0.618083443860038, -0.284644580662871, -0.740495671246079, 0.474904452983999, -0.48477729534222, 0.767778021360685, -1.10235728010711, 0.238035514275665, 0.41811656235061, -2.21365931326139, -0.493859453213328, 1.32716567050129, -0.083577670239988, 1.97280958480762, -0.370729069943725, 0.40481137917452, -0.279492027262978, 1.13434321910827), num_trees = 50, clusters = numeric(0), samples_per_cluster = 0, sample_fraction = 0.5, mtry = 3, min_node_size = 5, honesty = TRUE, honesty_fraction = 0.5, honesty_prune_leaves = TRUE, alpha = 0.05, imbalance_penalty = 0, ci_group_size = 1, compute_oob_predictions = TRUE, num_threads = 0, seed = 606721005.217474, legacy_seed = FALSE, verbose = FALSE)
2: do.call(what, args, quote, envir)
3: do.call.rcpp(regression_train, c(data, args))
4: (function (X, Y, num.trees = 2000, sample.weights = NULL, clusters = NULL, equalize.cluster.weights = FALSE, sample.fraction = 0.5, mtry = min(ceiling(sqrt(ncol(X)) + 20), ncol(X)), min.node.size = 5, honesty = TRUE, honesty.fraction = 0.5, honesty.prune.leaves = TRUE, alpha = 0.05, imbalance.penalty = 0, ci.group.size = 2, tune.parameters = "none", tune.num.trees = 50, tune.num.reps = 100, tune.num.draws = 1000, compute.oob.predictions = TRUE, num.threads = NULL, seed = runif(1, 0, .Machine$integer.max)) { has.missing.values <- validate_X(X, allow.na = TRUE) validate_sample_weights(sample.weights, X) Y <- validate_observations(Y, X) clusters <- validate_clusters(clusters, X) samples.per.cluster <- validate_equalize_cluster_weights(equalize.cluster.weights, clusters, sample.weights) num.threads <- validate_num_threads(num.threads) all.tunable.params <- c("sample.fraction", "mtry", "min.node.size", "honesty.fraction", "honesty.prune.leaves", "alpha", "imbalance.penalty") default.parameters <- list(sample.fraction = 0.5, mtry = min(ceiling(sqrt(ncol(X)) + 20), ncol(X)), min.node.size = 5, honesty.fraction = 0.5, honesty.prune.leaves = TRUE, alpha = 0.05, imbalance.penalty = 0) data <- create_train_matrices(X, outcome = Y, sample.weights = sample.weights) args <- list(num.trees = num.trees, clusters = clusters, samples.per.cluster = samples.per.cluster, sample.fraction = sample.fraction, mtry = mtry, min.node.size = min.node.size, honesty = honesty, honesty.fraction = honesty.fraction, honesty.prune.leaves = honesty.prune.leaves, alpha = alpha, imbalance.penalty = imbalance.penalty, ci.group.size = ci.group.size, compute.oob.predictions = compute.oob.predictions, num.threads = num.threads, seed = seed, legacy.seed = get_legacy_seed(), verbose = get_verbose()) tuning.output <- NULL if (!identical(tune.parameters, "none")) { if (identical(tune.parameters, "all")) { tune.parameters <- all.tunable.params } else { tune.parameters <- unique(match.arg(tune.parameters, all.tunable.params, several.ok = TRUE)) } if (!honesty) { tune.parameters <- tune.parameters[!grepl("honesty", tune.parameters)] } tune.parameters.defaults <- default.parameters[tune.parameters] tuning.output <- tune_forest(data = data, nrow.X = nrow(X), ncol.X = ncol(X), args = args, tune.parameters = tune.parameters, tune.parameters.defaults = tune.parameters.defaults, tune.num.trees = tune.num.trees, tune.num.reps = tune.num.reps, tune.num.draws = tune.num.draws, train = regression_train) args <- utils::modifyList(args, as.list(tuning.output[["params"]])) } forest <- do.call.rcpp(regression_train, c(data, args)) class(forest) <- c("regression_forest", "grf") forest[["seed"]] <- seed forest[["num.threads"]] <- num.threads forest[["ci.group.size"]] <- ci.group.size forest[["X.orig"]] <- X forest[["Y.orig"]] <- Y forest[["sample.weights"]] <- sample.weights forest[["clusters"]] <- clusters forest[["equalize.cluster.weights"]] <- equalize.cluster.weights forest[["tunable.params"]] <- args[all.tunable.params] forest[["tuning.output"]] <- tuning.output forest[["has.missing.values"]] <- has.missing.values forest})(Y = c(1.0793502446225, -0.807425372083713, -0.675429294433325, 0.0778488776875013, 0.302014293100801, 2.907155432613, 0.235102422578912, -0.294262932775825, -0.856909910290467, -0.0281862015349582, -0.555407523216953, -0.192452740465306, -0.129731965781434, -1.37901996967488, -0.557695772464678, 0.176495983326576, 1.43943114819245, -1.47139655320819, 1.23649309689803, -2.9419823732506, -0.0210923254962712, -0.336171599004456, -0.604012570765805, 1.4626353787996, -0.80151784589277, -0.334686893420814, 2.25023344027751, 0.165798856163565, 0.812095188602958, -0.652996697179975, 0.845217419118375, -0.719096113051879, 0.51108523759244, 0.343932007092971, -0.255247117916601, -2.43160944028893, -0.397483312736214, -1.15618693969407, 0.647415741989621, -0.164128886580336, -1.8556731107073, -1.14200704317064, 0.721250958019734, 0.667191301653508, 0.834794975718168, -1.95508411115506, -0.75431633950442, 2.69122212898041, 0.659944427136795, 0.0614694508975588, -2.09202942685262, -1.74524920183974, 2.3447618431492, 0.774830200925732, -0.672613771148493, -0.583486288176347, 0.805618816172016, -1.57094426371528, 0.382468899042099, -0.513943899173967, 0.980766523313298, -0.29374665204501, 1.1176772949158, -1.20905695371922, 1.28291266867267, -2.10613086538395, 0.482063644339884, -0.729418024377206, 1.52699168928426, 0.327349747637467, 0.179555394414968, -0.453752584919967, 1.21066109856515, -0.219142409044835, -0.705423322213934, -1.89932798326328, 0.498027933922633, -0.363598625026532, 0.598864405114883, 0.130870713419253, -2.51918676611661, -0.291046175078048, 0.578397476240921, -0.84671523912058, -0.234887891925256, 0.0551264965370913, -0.365133173315097, -0.874997602631346, -0.787945743088567, -1.00074810975902, 0.891955894058927, 0.264926895822003, 0.445591672102194, 1.23521630704007, -0.709321973485272, 1.64371836897973, -0.363165839824306, 1.06281763854744, 1.64054083080228, -0.0260348443913187, 2.21205012132364, -1.75052398316875, 0.544453433443979, -0.496430647415581, -0.656852066157813, -0.651781954989303, 0.942166146478243, -0.623253997322409, -0.296820737834909, 1.08532680118658, -0.358810754075613, -1.35958549698966, -0.592294505276967, 0.546825629048136, 0.831332570790525, -0.0543891788998037, -1.54399417586589, -1.04585727075305, -1.52459461855862, -1.17824475827843, 0.451560348999358, -0.147291217898845, 1.67664020765241, 1.4357243662162, -0.587662362990722, -0.716939093537825, -1.1660025194875, -0.144016351527263, -0.855314003414081, -0.540141718145293, -0.389361522863483, 0.383143203862102, 0.503532258044294, 0.173882819375975, 0.451480495524697, -2.77051132483852, -1.69868123107127, -1.09371178698431, 0.575382640026715, 0.337442714748719, -1.40764602302186, 1.32749939534985, 2.09725183226329, 0.70743368417554, 1.0347500997819, -1.35052082894787, -0.875490541083933, -1.25680718377597, -0.670696558662698, -3.54553942668715, -0.496479112681606, -1.54209727643676, -2.49062960086617, -0.476113468702838, 2.11402956729802, 0.997913117165253, -0.35868340513966, -1.04577629347268, -0.932462869042157, 1.2288189178366, 0.101005239496454, 0.746112124239256, 2.53203551619102, 0.814495272050397, 2.40270167951243, 1.24894732071314, 0.472837453945226, 1.29627094476308, 0.592437657180438, 2.19567934987648, -0.831864926399042, 0.781769611565873, -1.64390247412825, -0.369604553423437, -0.360820732343141, 0.175804222874855, 0.0639644440505875, 0.451132682585146, 0.339690089071918, 0.48207711421286, -1.28692953066192, -3.30746786087678, 0.128616325393531, -0.0264734111593823, -0.780911830494759, -0.956567509224197, -0.448461328515932, -0.35119137042556, -1.48739132325075, 0.660120642136449, 0.629116666391144, -0.162575626339956, 1.46655525660769, 2.81277881863909, -0.0485469751165366, 0.944702033169487, 0.907361207601, 1.5306532874506, 0.977999387600918, -0.180871245132944, 0.272786287235605, -0.0115852843038398, 0.528228443051922, 1.16794743156737, 1.19347695510408, 0.884558670184623, -0.345310153107876, -0.111285287227562, -1.19030902629077, -0.223073344004581, -0.632733445820258, 0.134351503088681, 0.854991730951167, 0.88463504303066, -1.00172509573693, 0.823105269525756, -2.37232518648289, -1.1014758800273, -0.956984808730151, -0.638248570399217, 0.695261141479704, 0.146811272742742, -0.127174777865302, -1.22043437481134, 0.248953571333043, 2.47320224016077, 0.152033813336257, -0.375387309354338, 1.23076822409262, -1.40846623197918, -0.477054883747726, 0.419509304670655, -0.454321914836378, 1.10821972212305, 0.852151602668729, -0.494424550943353, 1.74451922640261, 0.26377021191008, -1.04624502067665, -0.596220736438388, 0.391910737612717, -0.813150922607861, -0.994105423466496, 1.73571541235549, -0.633656634162124, 1.38820301740402, 2.19420304735806, -0.620844234965996, 0.295579297369821, 0.052140928993251, 0.178056306099694, 1.33559171693097, 0.30115772954781, 1.22868464393732, 0.867614441367613, -0.980412685235036, -2.29149284538732, 1.9649849849111, 0.921776208956185, 1.32479542873807, -0.143455072595653, 1.83738854391794, 0.213578245912108, -0.721877431949363, -0.229407708013512, -1.03345171862408, -1.51204544863694, 1.05530008373746, 0.134833676626422, -0.654877153049159, 0.142574836786277, -2.61872277686514, -0.121779728042941, 1.1880783833751, 0.598083762065646, 0.48801244367152, -0.114775264232662, -0.14269070196127, -0.183705466274104, -0.366713648179442, 0.829302698044069, 2.13292349046699, -0.618083443860038, -0.284644580662871, -0.740495671246079, 0.474904452983999, -0.48477729534222, 0.767778021360685, -1.10235728010711, 0.238035514275665, 0.41811656235061, -2.21365931326139, -0.493859453213328, 1.32716567050129, -0.083577670239988, 1.97280958480762, -0.370729069943725, 0.40481137917452, -0.279492027262978, 1.13434321910827), X = c(0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 3, 2, 2, 4, 5, 5, 5, 3, 3, 4, 3, 2, 4, 3, 4, 3, 1, 5, 3, 3, 1, 2, 5, 2, 3, 5, 2, 1, 2, 3, 1, 4, 3, 3, 5, 5, 1, 4, 5, 3, 3, 2, 5, 1, 5, 1, 2, 3, 1, 5, 4, 3, 1, 2, 3, 3, 2, 5, 1, 4, 2, 4, 5, 5, 2, 4, 1, 3, 3, 1, 1, 2, 2, 3, 3, 5, 2, 1, 5, 2, 4, 2, 1, 5, 1, 1, 4, 2, 3, 1, 1, 5, 3, 2, 4, 3, 3, 1, 5, 5, 2, 4, 1, 1, 1, 4, 1, 4, 3, 4, 2, 4, 4, 5, 3, 5, 4, 5, 4, 3, 2, 3, 1, 1, 2, 4, 1, 1, 1, 3, 2, 3, 5, 2, 5, 4, 4, 3, 1, 4, 5, 3, 5, 4, 4, 3, 3, 5, 5, 5, 2, 1, 5, 5, 3, 1, 3, 1, 3, 5, 5, 2, 1, 5, 5, 5, 4, 1, 2, 5, 3, 3, 2, 3, 4, 2, 4, 3, 3, 2, 5, 3, 5, 5, 4, 3, 4, 3, 1, 5, 3, 3, 1, 5, 5, 2, 2, 1, 1, 5, 2, 4, 3, 1, 5, 5, 5, 1, 5, 2, 3, 1, 5, 5, 2, 1, 4, 4, 3, 3, 3, 4, 1, 3, 3, 5, 3, 5, 5, 2, 5, 2, 4, 1, 2, 3, 4, 2, 4, 3, 3, 3, 4, 3, 2, 5, 3, 2, 2, 3, 5, 1, 3, 1, 5, 5, 2, 3, 3, 5, 5, 4, 3, 3, 3, 5, 5, 5, 1, 4, 1, 5, 1, 2, 4, 2, 4, 3, 2, 4, 4, 2, 3, 5, 3, 5, 2, 3, 2, 3, 5, 5, 5, 1, 2, 2, 4, 2, 3, 2, -1.28703047603518, -0.280395335170247, -1.06332613397119, -0.530906522170303, -0.41433994791886, 0.54319405923209, -0.594617267459511, -1.00837660827701, 0.636569674033849, 0.0377883991710788, 1.31241297643351, 0.976973386685621, -0.372438756103829, -1.60153617357459, -1.04917700666607, 3.2410399349424, -1.25127136162494, -0.895363357977542, 0.636569674033849, -1.60153617357459, -1.4617555849959, -0.57438868976327, 0.76904224100091, -0.70459646368007, 0.0945835281735714, 0.332202578950118, 0.119245236427584, -1.4617555849959, -1.2847157223178, -1.28703047603518, -1.26015524475811, -1.31080153332797, 0.436523478910183, -1.00837660827701, 0.310480749443137, 0.54319405923209, -1.57214415914549, -0.262197489402468, 0.54319405923209, 0.0945835281735714, -0.236279568941097, -1.2847157223178, -0.0903195939658516, 0.298227591540715, -0.0903195939658516, -0.458365332711106, -0.611165916680421, 0.368964527385086, -1.26015524475811, -0.182925388372727, -0.0540281250854405, -1.60153617357459, -1.51466765378175, 0.119245236427584, 2.12845189901618, 0.214445309581601, 0.516862044313609, 0.332202578950118, 1.26318517608949, -0.324685911490835, -0.349650387953555, 0.243687429599092, -0.895363357977542, -0.865512862653374, 0.787738847475178, -0.372438756103829, -1.57214415914549, -1.00837660827701, 2.12845189901618, 1.26318517608949, -1.23627311888329, 0.787738847475178, -0.349650387953555, 0.32430434416138, 3.2410399349424, 0.214538826629216, 0.707588353835588, -0.499292017172261, -0.0903195939658516, -0.70459646368007, -1.04917700666607, -0.349650387953555, 1.65090746733669, 1.67569693240319, 1.95529396549246, 1.10984813892972, -1.31080153332797, -0.280395335170247, -1.00837660827701, -1.57214415914549, -0.781536487054751, 0.0652930335253153, 0.418982404924464, -0.349650387953555, 1.10992028971364, 0.368964527385086, -0.236279568941097, -1.4617555849959, -0.992507150392037, 0.76904224100091, -0.280395335170247, 0.243687429599092, 1.95529396549246, 0.0847372921971965, -0.499292017172261, -0.197175894348552, -1.57214415914549, 1.05271146557933, 0.687916772975828, 0.0597499373846007, -0.611165916680421, -0.0540281250854405, -0.372438756103829, 1.23247587848534, 2.12845189901618, -0.865512862653374, -0.530906522170303, -0.895363357977542, 1.99721338474797, 1.31241297643351, -0.374580857767014, -0.236279568941097, 1.26318517608949, -1.01559257860354, -0.374580857767014, -0.372438756103829, -0.441163216905286, 1.95529396549246, 0.0847372921971965, 0.32430434416138, -0.119452606630659, 0.88465049897692, -0.594617267459511, -0.611165916680421, -0.41433994791886, 1.99721338474797, 1.99721338474797, -1.28703047603518, -1.01559257860354, 2.19881034888372, 0.617985817166529, 3.2410399349424, -0.182925388372727, 0.0597499373846007, 1.05271146557933, -1.60153617357459, 1.31241297643351, -0.573973479297987, 0.310480749443137, -0.992507150392037, -1.06332613397119, -0.458365332711106, -0.51606383094478, 0.332202578950118, -1.28703047603518, -0.499292017172261, -1.60153617357459, -1.4617555849959, 0.0945835281735714, -0.594617267459511, 1.23247587848534, -0.611165916680421, -0.499292017172261, -0.483780625708744, -0.895363357977542, 2.10010894052567, -0.262197489402468, -1.26015524475811, -0.280395335170247, 0.332202578950118, 0.636569674033849, -0.236279568941097, 0.707588353835588, -1.28703047603518, 1.05271146557933, -0.119452606630659, -0.324685911490835, 0.418982404924464, 0.32430434416138, -0.476246894615578, 1.23247587848534, -0.738527704739573, 0.214538826629216, 0.76904224100091, -0.262197489402468, -1.00837660827701, 0.243687429599092, 0.88465049897692, 0.701784335374711, -0.034067253738464, 0.88465049897692, 0.0945835281735714, 1.65090746733669, 0.310480749443137, -0.182925388372727, -0.71721816157401, 0.787738847475178, -1.4617555849959, 1.65090746733669, 0.332202578950118, 0.516862044313609, -0.741336096272828, -0.788602837850243, -1.51466765378175, -0.0903195939658516, 1.67569693240319, -1.31701613230524, -0.441163216905286, -0.992507150392037, -0.349650387953555, -1.28703047603518, -0.265145056696353, 1.23247587848534, -0.483780625708744, 0.119245236427584, 0.298227591540715, -0.372438756103829, 0.600708823672418, 0.0945835281735714, 0.214445309581601, -0.236279568941097, -0.197175894348552, 1.95529396549246, 0.418982404924464, 0.214445309581601, -0.594617267459511, -0.788602837850243, -0.182925388372727, -0.363657297095253, 0.707588353835588, -0.992507150392037, -0.723065969939874, 2.19881034888372, 1.65090746733669, 0.516862044313609, 2.12845189901618, 0.243687429599092, -0.57438868976327, 0.754053785184521, -1.18548008459731, 0.436523478910183, 0.418982404924464, 1.99721338474797, 0.636569674033849, -0.215380507641693, 0.214538826629216, -1.18548008459731, 0.56298953322048, -0.71721816157401, -0.788602837850243, -0.41433994791886, -0.441163216905286, 0.418982404924464, 0.298227591540715, 0.76904224100091, 2.10010894052567, -0.611165916680421, 3.2410399349424, 0.32430434416138, -0.0903195939658516, -0.992507150392037, -1.04917700666607, 0.368964527385086, 3.2410399349424, 0.0945835281735714, -0.51606383094478, -0.034067253738464, -0.483780625708744, -0.458365332711106, -0.0540281250854405, -0.499292017172261, -0.51606383094478, 1.10984813892972, -0.119452606630659, -0.530906522170303, -0.70459646368007, -1.31080153332797, 1.31241297643351, 0.976973386685621, -0.530906522170303, -0.0540281250854405, -0.723065969939874, 0.418982404924464, -0.034067253738464, 0.436523478910183, -0.51606383094478, -0.280395335170247, 0.88465049897692, -0.723065969939874, 0.436523478910183, 2.10010894052567, -0.992507150392037, -0.865512862653374, -1.25127136162494, -0.57438868976327, -0.476246894615578, -0.197175894348552, 0.707588353835588, 0.687916772975828, -0.280395335170247), num.trees = 50, sample.weights = NULL, clusters = numeric(0), equalize.cluster.weights = FALSE, sample.fraction = 0.5, mtry = 3, min.node.size = 5, honesty = TRUE, honesty.fraction = 0.5, honesty.prune.leaves = TRUE, alpha = 0.05, imbalance.penalty = 0, ci.group.size = 1, tune.parameters = "none", num.threads = 0, seed = 606721005.217474)
5: do.call(regression_forest, c(Y = list(Y), args.orthog))
6: (function (X, Y, W, Y.hat = NULL, W.hat = NULL, num.trees = 2000, sample.weights = NULL, clusters = NULL, equalize.cluster.weights = FALSE, sample.fraction = 0.5, mtry = min(ceiling(sqrt(ncol(X)) + 20), ncol(X)), min.node.size = 5, honesty = TRUE, honesty.fraction = 0.5, honesty.prune.leaves = TRUE, alpha = 0.05, imbalance.penalty = 0, stabilize.splits = TRUE, ci.group.size = 2, tune.parameters = "none", tune.num.trees = 200, tune.num.reps = 50, tune.num.draws = 1000, compute.oob.predictions = TRUE, num.threads = NULL, seed = runif(1, 0, .Machine$integer.max)) { has.missing.values <- validate_X(X, allow.na = TRUE) validate_sample_weights(sample.weights, X) Y <- validate_observations(Y, X) W <- validate_observations(W, X) clusters <- validate_clusters(clusters, X) samples.per.cluster <- validate_equalize_cluster_weights(equalize.cluster.weights, clusters, sample.weights) num.threads <- validate_num_threads(num.threads) all.tunable.params <- c("sample.fraction", "mtry", "min.node.size", "honesty.fraction", "honesty.prune.leaves", "alpha", "imbalance.penalty") default.parameters <- list(sample.fraction = 0.5, mtry = min(ceiling(sqrt(ncol(X)) + 20), ncol(X)), min.node.size = 5, honesty.fraction = 0.5, honesty.prune.leaves = TRUE, alpha = 0.05, imbalance.penalty = 0) args.orthog <- list(X = X, num.trees = max(50, num.trees/4), sample.weights = sample.weights, clusters = clusters, equalize.cluster.weights = equalize.cluster.weights, sample.fraction = sample.fraction, mtry = mtry, min.node.size = 5, honesty = TRUE, honesty.fraction = 0.5, honesty.prune.leaves = honesty.prune.leaves, alpha = alpha, imbalance.penalty = imbalance.penalty, ci.group.size = 1, tune.parameters = tune.parameters, num.threads = num.threads, seed = seed) if (is.null(Y.hat)) { forest.Y <- do.call(regression_forest, c(Y = list(Y), args.orthog)) Y.hat <- predict(forest.Y)$predictions } else if (length(Y.hat) == 1) { Y.hat <- rep(Y.hat, nrow(X)) } else if (length(Y.hat) != nrow(X)) { stop("Y.hat has incorrect length.") } if (is.null(W.hat)) { forest.W <- do.call(regression_forest, c(Y = list(W), args.orthog)) W.hat <- predict(forest.W)$predictions } else if (length(W.hat) == 1) { W.hat <- rep(W.hat, nrow(X)) } else if (length(W.hat) != nrow(X)) { stop("W.hat has incorrect length.") } Y.centered <- Y - Y.hat W.centered <- W - W.hat data <- create_train_matrices(X, outcome = Y.centered, treatment = W.centered, sample.weights = sample.weights) args <- list(num.trees = num.trees, clusters = clusters, samples.per.cluster = samples.per.cluster, sample.fraction = sample.fraction, mtry = mtry, min.node.size = min.node.size, honesty = honesty, honesty.fraction = honesty.fraction, honesty.prune.leaves = honesty.prune.leaves, alpha = alpha, imbalance.penalty = imbalance.penalty, stabilize.splits = stabilize.splits, ci.group.size = ci.group.size, compute.oob.predictions = compute.oob.predictions, num.threads = num.threads, seed = seed, reduced.form.weight = 0, legacy.seed = get_legacy_seed(), verbose = get_verbose()) tuning.output <- NULL if (!identical(tune.parameters, "none")) { if (identical(tune.parameters, "all")) { tune.parameters <- all.tunable.params } else { tune.parameters <- unique(match.arg(tune.parameters, all.tunable.params, several.ok = TRUE)) } if (!honesty) { tune.parameters <- tune.parameters[!grepl("honesty", tune.parameters)] } tune.parameters.defaults <- default.parameters[tune.parameters] tuning.output <- tune_forest(data = data, nrow.X = nrow(X), ncol.X = ncol(X), args = args, tune.parameters = tune.parameters, tune.parameters.defaults = tune.parameters.defaults, tune.num.trees = tune.num.trees, tune.num.reps = tune.num.reps, tune.num.draws = tune.num.draws, train = causal_train) args <- utils::modifyList(args, as.list(tuning.output[["params"]])) } forest <- do.call.rcpp(causal_train, c(data, args)) class(forest) <- c("causal_forest", "grf") forest[["seed"]] <- seed forest[["num.threads"]] <- num.threads forest[["ci.group.size"]] <- ci.group.size forest[["X.orig"]] <- X forest[["Y.orig"]] <- Y forest[["W.orig"]] <- W forest[["Y.hat"]] <- Y.hat forest[["W.hat"]] <- W.hat forest[["clusters"]] <- clusters forest[["equalize.cluster.weights"]] <- equalize.cluster.weights forest[["sample.weights"]] <- sample.weights forest[["tunable.params"]] <- args[all.tunable.params] forest[["tuning.output"]] <- tuning.output forest[["has.missing.values"]] <- has.missing.values forest})(X = c(0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 3, 2, 2, 4, 5, 5, 5, 3, 3, 4, 3, 2, 4, 3, 4, 3, 1, 5, 3, 3, 1, 2, 5, 2, 3, 5, 2, 1, 2, 3, 1, 4, 3, 3, 5, 5, 1, 4, 5, 3, 3, 2, 5, 1, 5, 1, 2, 3, 1, 5, 4, 3, 1, 2, 3, 3, 2, 5, 1, 4, 2, 4, 5, 5, 2, 4, 1, 3, 3, 1, 1, 2, 2, 3, 3, 5, 2, 1, 5, 2, 4, 2, 1, 5, 1, 1, 4, 2, 3, 1, 1, 5, 3, 2, 4, 3, 3, 1, 5, 5, 2, 4, 1, 1, 1, 4, 1, 4, 3, 4, 2, 4, 4, 5, 3, 5, 4, 5, 4, 3, 2, 3, 1, 1, 2, 4, 1, 1, 1, 3, 2, 3, 5, 2, 5, 4, 4, 3, 1, 4, 5, 3, 5, 4, 4, 3, 3, 5, 5, 5, 2, 1, 5, 5, 3, 1, 3, 1, 3, 5, 5, 2, 1, 5, 5, 5, 4, 1, 2, 5, 3, 3, 2, 3, 4, 2, 4, 3, 3, 2, 5, 3, 5, 5, 4, 3, 4, 3, 1, 5, 3, 3, 1, 5, 5, 2, 2, 1, 1, 5, 2, 4, 3, 1, 5, 5, 5, 1, 5, 2, 3, 1, 5, 5, 2, 1, 4, 4, 3, 3, 3, 4, 1, 3, 3, 5, 3, 5, 5, 2, 5, 2, 4, 1, 2, 3, 4, 2, 4, 3, 3, 3, 4, 3, 2, 5, 3, 2, 2, 3, 5, 1, 3, 1, 5, 5, 2, 3, 3, 5, 5, 4, 3, 3, 3, 5, 5, 5, 1, 4, 1, 5, 1, 2, 4, 2, 4, 3, 2, 4, 4, 2, 3, 5, 3, 5, 2, 3, 2, 3, 5, 5, 5, 1, 2, 2, 4, 2, 3, 2, -1.28703047603518, -0.280395335170247, -1.06332613397119, -0.530906522170303, -0.41433994791886, 0.54319405923209, -0.594617267459511, -1.00837660827701, 0.636569674033849, 0.0377883991710788, 1.31241297643351, 0.976973386685621, -0.372438756103829, -1.60153617357459, -1.04917700666607, 3.2410399349424, -1.25127136162494, -0.895363357977542, 0.636569674033849, -1.60153617357459, -1.4617555849959, -0.57438868976327, 0.76904224100091, -0.70459646368007, 0.0945835281735714, 0.332202578950118, 0.119245236427584, -1.4617555849959, -1.2847157223178, -1.28703047603518, -1.26015524475811, -1.31080153332797, 0.436523478910183, -1.00837660827701, 0.310480749443137, 0.54319405923209, -1.57214415914549, -0.262197489402468, 0.54319405923209, 0.0945835281735714, -0.236279568941097, -1.2847157223178, -0.0903195939658516, 0.298227591540715, -0.0903195939658516, -0.458365332711106, -0.611165916680421, 0.368964527385086, -1.26015524475811, -0.182925388372727, -0.0540281250854405, -1.60153617357459, -1.51466765378175, 0.119245236427584, 2.12845189901618, 0.214445309581601, 0.516862044313609, 0.332202578950118, 1.26318517608949, -0.324685911490835, -0.349650387953555, 0.243687429599092, -0.895363357977542, -0.865512862653374, 0.787738847475178, -0.372438756103829, -1.57214415914549, -1.00837660827701, 2.12845189901618, 1.26318517608949, -1.23627311888329, 0.787738847475178, -0.349650387953555, 0.32430434416138, 3.2410399349424, 0.214538826629216, 0.707588353835588, -0.499292017172261, -0.0903195939658516, -0.70459646368007, -1.04917700666607, -0.349650387953555, 1.65090746733669, 1.67569693240319, 1.95529396549246, 1.10984813892972, -1.31080153332797, -0.280395335170247, -1.00837660827701, -1.57214415914549, -0.781536487054751, 0.0652930335253153, 0.418982404924464, -0.349650387953555, 1.10992028971364, 0.368964527385086, -0.236279568941097, -1.4617555849959, -0.992507150392037, 0.76904224100091, -0.280395335170247, 0.243687429599092, 1.95529396549246, 0.0847372921971965, -0.499292017172261, -0.197175894348552, -1.57214415914549, 1.05271146557933, 0.687916772975828, 0.0597499373846007, -0.611165916680421, -0.0540281250854405, -0.372438756103829, 1.23247587848534, 2.12845189901618, -0.865512862653374, -0.530906522170303, -0.895363357977542, 1.99721338474797, 1.31241297643351, -0.374580857767014, -0.236279568941097, 1.26318517608949, -1.01559257860354, -0.374580857767014, -0.372438756103829, -0.441163216905286, 1.95529396549246, 0.0847372921971965, 0.32430434416138, -0.119452606630659, 0.88465049897692, -0.594617267459511, -0.611165916680421, -0.41433994791886, 1.99721338474797, 1.99721338474797, -1.28703047603518, -1.01559257860354, 2.19881034888372, 0.617985817166529, 3.2410399349424, -0.182925388372727, 0.0597499373846007, 1.05271146557933, -1.60153617357459, 1.31241297643351, -0.573973479297987, 0.310480749443137, -0.992507150392037, -1.06332613397119, -0.458365332711106, -0.51606383094478, 0.332202578950118, -1.28703047603518, -0.499292017172261, -1.60153617357459, -1.4617555849959, 0.0945835281735714, -0.594617267459511, 1.23247587848534, -0.611165916680421, -0.499292017172261, -0.483780625708744, -0.895363357977542, 2.10010894052567, -0.262197489402468, -1.26015524475811, -0.280395335170247, 0.332202578950118, 0.636569674033849, -0.236279568941097, 0.707588353835588, -1.28703047603518, 1.05271146557933, -0.119452606630659, -0.324685911490835, 0.418982404924464, 0.32430434416138, -0.476246894615578, 1.23247587848534, -0.738527704739573, 0.214538826629216, 0.76904224100091, -0.262197489402468, -1.00837660827701, 0.243687429599092, 0.88465049897692, 0.701784335374711, -0.034067253738464, 0.88465049897692, 0.0945835281735714, 1.65090746733669, 0.310480749443137, -0.182925388372727, -0.71721816157401, 0.787738847475178, -1.4617555849959, 1.65090746733669, 0.332202578950118, 0.516862044313609, -0.741336096272828, -0.788602837850243, -1.51466765378175, -0.0903195939658516, 1.67569693240319, -1.31701613230524, -0.441163216905286, -0.992507150392037, -0.349650387953555, -1.28703047603518, -0.265145056696353, 1.23247587848534, -0.483780625708744, 0.119245236427584, 0.298227591540715, -0.372438756103829, 0.600708823672418, 0.0945835281735714, 0.214445309581601, -0.236279568941097, -0.197175894348552, 1.95529396549246, 0.418982404924464, 0.214445309581601, -0.594617267459511, -0.788602837850243, -0.182925388372727, -0.363657297095253, 0.707588353835588, -0.992507150392037, -0.723065969939874, 2.19881034888372, 1.65090746733669, 0.516862044313609, 2.12845189901618, 0.243687429599092, -0.57438868976327, 0.754053785184521, -1.18548008459731, 0.436523478910183, 0.418982404924464, 1.99721338474797, 0.636569674033849, -0.215380507641693, 0.214538826629216, -1.18548008459731, 0.56298953322048, -0.71721816157401, -0.788602837850243, -0.41433994791886, -0.441163216905286, 0.418982404924464, 0.298227591540715, 0.76904224100091, 2.10010894052567, -0.611165916680421, 3.2410399349424, 0.32430434416138, -0.0903195939658516, -0.992507150392037, -1.04917700666607, 0.368964527385086, 3.2410399349424, 0.0945835281735714, -0.51606383094478, -0.034067253738464, -0.483780625708744, -0.458365332711106, -0.0540281250854405, -0.499292017172261, -0.51606383094478, 1.10984813892972, -0.119452606630659, -0.530906522170303, -0.70459646368007, -1.31080153332797, 1.31241297643351, 0.976973386685621, -0.530906522170303, -0.0540281250854405, -0.723065969939874, 0.418982404924464, -0.034067253738464, 0.436523478910183, -0.51606383094478, -0.280395335170247, 0.88465049897692, -0.723065969939874, 0.436523478910183, 2.10010894052567, -0.992507150392037, -0.865512862653374, -1.25127136162494, -0.57438868976327, -0.476246894615578, -0.197175894348552, 0.707588353835588, 0.687916772975828, -0.280395335170247), Y = c(1.0793502446225, -0.807425372083713, -0.675429294433325, 0.0778488776875013, 0.302014293100801, 2.907155432613, 0.235102422578912, -0.294262932775825, -0.856909910290467, -0.0281862015349582, -0.555407523216953, -0.192452740465306, -0.129731965781434, -1.37901996967488, -0.557695772464678, 0.176495983326576, 1.43943114819245, -1.47139655320819, 1.23649309689803, -2.9419823732506, -0.0210923254962712, -0.336171599004456, -0.604012570765805, 1.4626353787996, -0.80151784589277, -0.334686893420814, 2.25023344027751, 0.165798856163565, 0.812095188602958, -0.652996697179975, 0.845217419118375, -0.719096113051879, 0.51108523759244, 0.343932007092971, -0.255247117916601, -2.43160944028893, -0.397483312736214, -1.15618693969407, 0.647415741989621, -0.164128886580336, -1.8556731107073, -1.14200704317064, 0.721250958019734, 0.667191301653508, 0.834794975718168, -1.95508411115506, -0.75431633950442, 2.69122212898041, 0.659944427136795, 0.0614694508975588, -2.09202942685262, -1.74524920183974, 2.3447618431492, 0.774830200925732, -0.672613771148493, -0.583486288176347, 0.805618816172016, -1.57094426371528, 0.382468899042099, -0.513943899173967, 0.980766523313298, -0.29374665204501, 1.1176772949158, -1.20905695371922, 1.28291266867267, -2.10613086538395, 0.482063644339884, -0.729418024377206, 1.52699168928426, 0.327349747637467, 0.179555394414968, -0.453752584919967, 1.21066109856515, -0.219142409044835, -0.705423322213934, -1.89932798326328, 0.498027933922633, -0.363598625026532, 0.598864405114883, 0.130870713419253, -2.51918676611661, -0.291046175078048, 0.578397476240921, -0.84671523912058, -0.234887891925256, 0.0551264965370913, -0.365133173315097, -0.874997602631346, -0.787945743088567, -1.00074810975902, 0.891955894058927, 0.264926895822003, 0.445591672102194, 1.23521630704007, -0.709321973485272, 1.64371836897973, -0.363165839824306, 1.06281763854744, 1.64054083080228, -0.0260348443913187, 2.21205012132364, -1.75052398316875, 0.544453433443979, -0.496430647415581, -0.656852066157813, -0.651781954989303, 0.942166146478243, -0.623253997322409, -0.296820737834909, 1.08532680118658, -0.358810754075613, -1.35958549698966, -0.592294505276967, 0.546825629048136, 0.831332570790525, -0.0543891788998037, -1.54399417586589, -1.04585727075305, -1.52459461855862, -1.17824475827843, 0.451560348999358, -0.147291217898845, 1.67664020765241, 1.4357243662162, -0.587662362990722, -0.716939093537825, -1.1660025194875, -0.144016351527263, -0.855314003414081, -0.540141718145293, -0.389361522863483, 0.383143203862102, 0.503532258044294, 0.173882819375975, 0.451480495524697, -2.77051132483852, -1.69868123107127, -1.09371178698431, 0.575382640026715, 0.337442714748719, -1.40764602302186, 1.32749939534985, 2.09725183226329, 0.70743368417554, 1.0347500997819, -1.35052082894787, -0.875490541083933, -1.25680718377597, -0.670696558662698, -3.54553942668715, -0.496479112681606, -1.54209727643676, -2.49062960086617, -0.476113468702838, 2.11402956729802, 0.997913117165253, -0.35868340513966, -1.04577629347268, -0.932462869042157, 1.2288189178366, 0.101005239496454, 0.746112124239256, 2.53203551619102, 0.814495272050397, 2.40270167951243, 1.24894732071314, 0.472837453945226, 1.29627094476308, 0.592437657180438, 2.19567934987648, -0.831864926399042, 0.781769611565873, -1.64390247412825, -0.369604553423437, -0.360820732343141, 0.175804222874855, 0.0639644440505875, 0.451132682585146, 0.339690089071918, 0.48207711421286, -1.28692953066192, -3.30746786087678, 0.128616325393531, -0.0264734111593823, -0.780911830494759, -0.956567509224197, -0.448461328515932, -0.35119137042556, -1.48739132325075, 0.660120642136449, 0.629116666391144, -0.162575626339956, 1.46655525660769, 2.81277881863909, -0.0485469751165366, 0.944702033169487, 0.907361207601, 1.5306532874506, 0.977999387600918, -0.180871245132944, 0.272786287235605, -0.0115852843038398, 0.528228443051922, 1.16794743156737, 1.19347695510408, 0.884558670184623, -0.345310153107876, -0.111285287227562, -1.19030902629077, -0.223073344004581, -0.632733445820258, 0.134351503088681, 0.854991730951167, 0.88463504303066, -1.00172509573693, 0.823105269525756, -2.37232518648289, -1.1014758800273, -0.956984808730151, -0.638248570399217, 0.695261141479704, 0.146811272742742, -0.127174777865302, -1.22043437481134, 0.248953571333043, 2.47320224016077, 0.152033813336257, -0.375387309354338, 1.23076822409262, -1.40846623197918, -0.477054883747726, 0.419509304670655, -0.454321914836378, 1.10821972212305, 0.852151602668729, -0.494424550943353, 1.74451922640261, 0.26377021191008, -1.04624502067665, -0.596220736438388, 0.391910737612717, -0.813150922607861, -0.994105423466496, 1.73571541235549, -0.633656634162124, 1.38820301740402, 2.19420304735806, -0.620844234965996, 0.295579297369821, 0.052140928993251, 0.178056306099694, 1.33559171693097, 0.30115772954781, 1.22868464393732, 0.867614441367613, -0.980412685235036, -2.29149284538732, 1.9649849849111, 0.921776208956185, 1.32479542873807, -0.143455072595653, 1.83738854391794, 0.213578245912108, -0.721877431949363, -0.229407708013512, -1.03345171862408, -1.51204544863694, 1.05530008373746, 0.134833676626422, -0.654877153049159, 0.142574836786277, -2.61872277686514, -0.121779728042941, 1.1880783833751, 0.598083762065646, 0.48801244367152, -0.114775264232662, -0.14269070196127, -0.183705466274104, -0.366713648179442, 0.829302698044069, 2.13292349046699, -0.618083443860038, -0.284644580662871, -0.740495671246079, 0.474904452983999, -0.48477729534222, 0.767778021360685, -1.10235728010711, 0.238035514275665, 0.41811656235061, -2.21365931326139, -0.493859453213328, 1.32716567050129, -0.083577670239988, 1.97280958480762, -0.370729069943725, 0.40481137917452, -0.279492027262978, 1.13434321910827), W = c(0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0), num.trees = 100)
7: do.call(model, c(list(X = X, Y = Y, W = D), model_params))
8: fit_model(model, X_subset, Y_subset, D_subset, model_type, learner_type, model_params, propensity)
9: `[.data.table`(cumulative_data_dt, , { X_subset <- as.matrix(X_cumul[[1]]) D_subset <- as.numeric(D_cumul[[1]]) Y_subset <- as.numeric(Y_cumul[[1]]) if (!(is.null(model_type))) { trained_model <- fit_model(model, X_subset, Y_subset, D_subset, model_type, learner_type, model_params, propensity) learned_policy <- model_predict(trained_model, X, D, model_type, learner_type, model_params) } else { trained_model <- custom_fit(X_subset, Y_subset, D_subset) learned_policy <- custom_predict(trained_model, X, D) } final_model <- if (t == nb_batch) trained_model else NULL .(learned_policy = list(learned_policy), final_model = list(final_model))}, by = t)
10: cumulative_data_dt[, { X_subset <- as.matrix(X_cumul[[1]]) D_subset <- as.numeric(D_cumul[[1]]) Y_subset <- as.numeric(Y_cumul[[1]]) if (!(is.null(model_type))) { trained_model <- fit_model(model, X_subset, Y_subset, D_subset, model_type, learner_type, model_params, propensity) learned_policy <- model_predict(trained_model, X, D, model_type, learner_type, model_params) } else { trained_model <- custom_fit(X_subset, Y_subset, D_subset) learned_policy <- custom_predict(trained_model, X, D) } final_model <- if (t == nb_batch) trained_model else NULL .(learned_policy = list(learned_policy), final_model = list(final_model))}, by = t]
11: cram_learning(X_matrix, D_slice, Y_slice, batch, model_type = model_type, learner_type = learner_type, baseline_policy = baseline_policy, parallelize_batch = parallelize_batch, model_params = model_params, custom_fit = custom_fit, custom_predict = custom_predict)
12: `[.data.table`(big_X, , { X_matrix <- as.matrix(.SD[, !c("Y", "D"), with = FALSE]) D_slice <- D Y_slice <- Y learning_result <- cram_learning(X_matrix, D_slice, Y_slice, batch, model_type = model_type, learner_type = learner_type, baseline_policy = baseline_policy, parallelize_batch = parallelize_batch, model_params = model_params, custom_fit = custom_fit, custom_predict = custom_predict) policies <- learning_result$policies batch_indices <- learning_result$batch_indices final_policy_model <- learning_result$final_policy_model nb_batch <- length(batch_indices) delta_estimate <- cram_estimator(X_matrix, Y_slice, D_slice, policies, batch_indices, propensity = propensity) policy_value_estimate <- cram_policy_value_estimator(X_matrix, Y_slice, D_slice, policies, batch_indices, propensity = propensity) X_pred <- as.matrix(new_big_X[, !c("sim_id"), with = FALSE]) pred_policies_sim_truth <- model_predict(final_policy_model, X_pred, new_D, model_type, learner_type, model_params) expected_length <- nb_simulations_truth * sample_size if (length(pred_policies_sim_truth) != expected_length) { message("Length mismatch: pred_policies_sim_truth has length ", length(pred_policies_sim_truth), " but expected ", expected_length) } D_1 <- rep(1, nrow(new_big_X)) D_0 <- rep(0, nrow(new_big_X)) Y_1 <- new_big_X[, .(Y = dgp_Y(D_1[.I], .SD)), by = sim_id][, Y] Y_0 <- new_big_X[, .(Y = dgp_Y(D_0[.I], .SD)), by = sim_id][, Y] true_policy_value <- mean(Y_1 * pred_policies_sim_truth + Y_0 * (1 - pred_policies_sim_truth)) baseline_policy_vec <- rep(unlist(baseline_policy), times = nb_simulations_truth) true_delta <- mean((Y_1 - Y_0) * (pred_policies_sim_truth - baseline_policy_vec)) final_policy <- policies[, nb_batch + 1] proportion_treated <- mean(final_policy) delta_asymptotic_variance <- cram_variance_estimator(X_matrix, Y_slice, D_slice, policies, batch_indices, propensity = propensity) delta_asymptotic_sd <- sqrt(delta_asymptotic_variance) delta_standard_error <- delta_asymptotic_sd delta_ci_lower <- delta_estimate - z_value * delta_standard_error delta_ci_upper <- delta_estimate + z_value * delta_standard_error delta_confidence_interval <- c(delta_ci_lower, delta_ci_upper) policy_value_asymptotic_variance <- cram_variance_estimator_policy_value(X_matrix, Y_slice, D_slice, policies, batch_indices, propensity = propensity) policy_value_asymptotic_sd <- sqrt(policy_value_asymptotic_variance) policy_value_standard_error <- policy_value_asymptotic_sd policy_value_ci_lower <- policy_value_estimate - z_value * policy_value_standard_error policy_value_ci_upper <- policy_value_estimate + z_value * policy_value_standard_error policy_value_confidence_interval <- c(policy_value_ci_lower, policy_value_ci_upper) .(proportion_treated = proportion_treated, delta_estimate = delta_estimate, delta_asymptotic_variance = delta_asymptotic_variance, delta_standard_error = delta_standard_error, delta_ci_lower = delta_ci_lower, delta_ci_upper = delta_ci_upper, policy_value_estimate = policy_value_estimate, policy_value_asymptotic_variance = policy_value_asymptotic_variance, policy_value_standard_error = policy_value_standard_error, policy_value_ci_lower = policy_value_ci_lower, policy_value_ci_upper = policy_value_ci_upper, true_delta = true_delta, true_policy_value = true_policy_value)}, by = sim_id)
13: big_X[, { X_matrix <- as.matrix(.SD[, !c("Y", "D"), with = FALSE]) D_slice <- D Y_slice <- Y learning_result <- cram_learning(X_matrix, D_slice, Y_slice, batch, model_type = model_type, learner_type = learner_type, baseline_policy = baseline_policy, parallelize_batch = parallelize_batch, model_params = model_params, custom_fit = custom_fit, custom_predict = custom_predict) policies <- learning_result$policies batch_indices <- learning_result$batch_indices final_policy_model <- learning_result$final_policy_model nb_batch <- length(batch_indices) delta_estimate <- cram_estimator(X_matrix, Y_slice, D_slice, policies, batch_indices, propensity = propensity) policy_value_estimate <- cram_policy_value_estimator(X_matrix, Y_slice, D_slice, policies, batch_indices, propensity = propensity) X_pred <- as.matrix(new_big_X[, !c("sim_id"), with = FALSE]) pred_policies_sim_truth <- model_predict(final_policy_model, X_pred, new_D, model_type, learner_type, model_params) expected_length <- nb_simulations_truth * sample_size if (length(pred_policies_sim_truth) != expected_length) { message("Length mismatch: pred_policies_sim_truth has length ", length(pred_policies_sim_truth), " but expected ", expected_length) } D_1 <- rep(1, nrow(new_big_X)) D_0 <- rep(0, nrow(new_big_X)) Y_1 <- new_big_X[, .(Y = dgp_Y(D_1[.I], .SD)), by = sim_id][, Y] Y_0 <- new_big_X[, .(Y = dgp_Y(D_0[.I], .SD)), by = sim_id][, Y] true_policy_value <- mean(Y_1 * pred_policies_sim_truth + Y_0 * (1 - pred_policies_sim_truth)) baseline_policy_vec <- rep(unlist(baseline_policy), times = nb_simulations_truth) true_delta <- mean((Y_1 - Y_0) * (pred_policies_sim_truth - baseline_policy_vec)) final_policy <- policies[, nb_batch + 1] proportion_treated <- mean(final_policy) delta_asymptotic_variance <- cram_variance_estimator(X_matrix, Y_slice, D_slice, policies, batch_indices, propensity = propensity) delta_asymptotic_sd <- sqrt(delta_asymptotic_variance) delta_standard_error <- delta_asymptotic_sd delta_ci_lower <- delta_estimate - z_value * delta_standard_error delta_ci_upper <- delta_estimate + z_value * delta_standard_error delta_confidence_interval <- c(delta_ci_lower, delta_ci_upper) policy_value_asymptotic_variance <- cram_variance_estimator_policy_value(X_matrix, Y_slice, D_slice, policies, batch_indices, propensity = propensity) policy_value_asymptotic_sd <- sqrt(policy_value_asymptotic_variance) policy_value_standard_error <- policy_value_asymptotic_sd policy_value_ci_lower <- policy_value_estimate - z_value * policy_value_standard_error policy_value_ci_upper <- policy_value_estimate + z_value * policy_value_standard_error policy_value_confidence_interval <- c(policy_value_ci_lower, policy_value_ci_upper) .(proportion_treated = proportion_treated, delta_estimate = delta_estimate, delta_asymptotic_variance = delta_asymptotic_variance, delta_standard_error = delta_standard_error, delta_ci_lower = delta_ci_lower, delta_ci_upper = delta_ci_upper, policy_value_estimate = policy_value_estimate, policy_value_asymptotic_variance = policy_value_asymptotic_variance, policy_value_standard_error = policy_value_standard_error, policy_value_ci_lower = policy_value_ci_lower, policy_value_ci_upper = policy_value_ci_upper, true_delta = true_delta, true_policy_value = true_policy_value)}, by = sim_id]
14: cram_simulation(X = X_data, dgp_D = dgp_D, dgp_Y = dgp_Y, batch = batch, nb_simulations = nb_simulations, nb_simulations_truth = nb_simulations_truth, sample_size = 500)
15: eval(expr, envir)
16: eval(expr, envir)
17: withVisible(eval(expr, envir))
18: withCallingHandlers(code, error = function (e) rlang::entrace(e), message = function (cnd) { watcher$capture_plot_and_output() if (on_message$capture) { watcher$push(cnd) } if (on_message$silence) { invokeRestart("muffleMessage") }}, warning = function (cnd) { if (getOption("warn") >= 2 || getOption("warn") < 0) { return() } watcher$capture_plot_and_output() if (on_warning$capture) { cnd <- sanitize_call(cnd) watcher$push(cnd) } if (on_warning$silence) { invokeRestart("muffleWarning") }}, error = function (cnd) { watcher$capture_plot_and_output() cnd <- sanitize_call(cnd) watcher$push(cnd) switch(on_error, continue = invokeRestart("eval_continue"), stop = invokeRestart("eval_stop"), error = NULL)})
19: eval(call)
20: eval(call)
21: with_handlers({ for (expr in tle$exprs) { ev <- withVisible(eval(expr, envir)) watcher$capture_plot_and_output() watcher$print_value(ev$value, ev$visible, envir) } TRUE}, handlers)
22: doWithOneRestart(return(expr), restart)
23: withOneRestart(expr, restarts[[1L]])
24: withRestartList(expr, restarts[-nr])
25: doWithOneRestart(return(expr), restart)
26: withOneRestart(withRestartList(expr, restarts[-nr]), restarts[[nr]])
27: withRestartList(expr, restarts)
28: withRestarts(with_handlers({ for (expr in tle$exprs) { ev <- withVisible(eval(expr, envir)) watcher$capture_plot_and_output() watcher$print_value(ev$value, ev$visible, envir) } TRUE}, handlers), eval_continue = function() TRUE, eval_stop = function() FALSE)
29: evaluate::evaluate(...)
30: evaluate(code, envir = env, new_device = FALSE, keep_warning = if (is.numeric(options$warning)) TRUE else options$warning, keep_message = if (is.numeric(options$message)) TRUE else options$message, stop_on_error = if (is.numeric(options$error)) options$error else { if (options$error && options$include) 0L else 2L }, output_handler = knit_handlers(options$render, options))
31: in_dir(input_dir(), expr)
32: in_input_dir(evaluate(code, envir = env, new_device = FALSE, keep_warning = if (is.numeric(options$warning)) TRUE else options$warning, keep_message = if (is.numeric(options$message)) TRUE else options$message, stop_on_error = if (is.numeric(options$error)) options$error else { if (options$error && options$include) 0L else 2L }, output_handler = knit_handlers(options$render, options)))
33: eng_r(options)
34: block_exec(params)
35: call_block(x)
36: process_group(group)
37: withCallingHandlers(if (tangle) process_tangle(group) else process_group(group), error = function(e) { if (progress && is.function(pb$interrupt)) pb$interrupt() if (is_R_CMD_build() || is_R_CMD_check()) error <<- format(e) })
38: with_options(withCallingHandlers(if (tangle) process_tangle(group) else process_group(group), error = function(e) { if (progress && is.function(pb$interrupt)) pb$interrupt() if (is_R_CMD_build() || is_R_CMD_check()) error <<- format(e) }), list(rlang_trace_top_env = knit_global()))
39: xfun:::handle_error(with_options(withCallingHandlers(if (tangle) process_tangle(group) else process_group(group), error = function(e) { if (progress && is.function(pb$interrupt)) pb$interrupt() if (is_R_CMD_build() || is_R_CMD_check()) error <<- format(e) }), list(rlang_trace_top_env = knit_global())), function(loc) { setwd(wd) write_utf8(res, output %n% stdout()) paste0("\nQuitting from ", loc, if (!is.null(error)) paste0("\n", rule(), error, "\n", rule()))}, if (labels[i] != "") sprintf(" [%s]", labels[i]), get_loc)
40: process_file(text, output)
41: knitr::knit(knit_input, knit_output, envir = envir, quiet = quiet)
42: rmarkdown::render(file, encoding = encoding, quiet = quiet, envir = globalenv(), output_dir = getwd(), ...)
43: vweave_rmarkdown(...)
44: engine$weave(file, quiet = quiet, encoding = enc)
45: doTryCatch(return(expr), name, parentenv, handler)
46: tryCatchOne(expr, names, parentenv, handlers[[1L]])
47: tryCatchList(expr, classes, parentenv, handlers)
48: tryCatch({ engine$weave(file, quiet = quiet, encoding = enc) setwd(startdir) output <- find_vignette_product(name, by = "weave", engine = engine) if (!have.makefile && vignette_is_tex(output)) { texi2pdf(file = output, clean = FALSE, quiet = quiet) output <- find_vignette_product(name, by = "texi2pdf", engine = engine) }}, error = function(e) { OK <<- FALSE message(gettextf("Error: processing vignette '%s' failed with diagnostics:\n%s", file, conditionMessage(e)))})
49: tools:::.buildOneVignette("cram_policy_simulation.Rmd", "/data/gannet/ripley/R/packages/tests-clang/cramR.Rcheck/vign_test/cramR", TRUE, FALSE, "cram_policy_simulation", "UTF-8", "/tmp/RtmpIMd40o/working_dir/RtmpH6CAVm/file193edd4f498f7e.rds")
An irrecoverable exception occurred. R is aborting now ...
--- re-building ‘quickstart.Rmd’ using rmarkdown
--- finished re-building ‘quickstart.Rmd’
SUMMARY: processing the following file failed:
‘cram_policy_simulation.Rmd’
Error: Vignette re-building failed.
Execution halted
Flavor: r-devel-linux-x86_64-fedora-clang
Version: 0.1.1
Check: installed package size
Result: NOTE
installed size is 5.2Mb
sub-directories of 1Mb or more:
doc 2.2Mb
help 2.4Mb
Flavors: r-oldrel-macos-arm64, r-oldrel-macos-x86_64, r-oldrel-windows-x86_64