Extracts the prognostic_info
list element from an rctglm_prog
object. See
'Value' at rctglm_with_prognosticscore for more details.
Arguments
- x
an object of class
rctglm_prog
(returned by rctglm_with_prognosticscore)
Value
a list with the structure described of prognostic_info
in the
Value
section of rctglm_with_prognosticscore.
See also
The generic rctglm_with_prognosticscore()
for which this method
works.
Examples
# Generate some data
n <- 100
b0 <- 1
b1 <- 1.5
b2 <- 2
W1 <- runif(n, min = -2, max = 2)
exposure_prob <- .5
dat_treat <- glm_data(
Y ~ b0+b1*abs(sin(W1))+b2*A,
W1 = W1,
A = rbinom(n, 1, exposure_prob)
)
dat_notreat <- glm_data(
Y ~ b0+b1*abs(sin(W1)),
W1 = W1
)
learners <- list(
mars = list(
model = parsnip::set_engine(
parsnip::mars(
mode = "regression", prod_degree = 3
),
"earth"
)
)
)
ate <- rctglm_with_prognosticscore(
formula = Y ~ .,
exposure_indicator = A,
exposure_prob = exposure_prob,
data = dat_treat,
family = gaussian(),
estimand_fun = "ate",
data_hist = dat_notreat,
learners = learners)
#>
#> ── Fitting prognostic model ──
#>
#> ℹ Created formula for fitting prognostic model as: Y ~ .
#> ℹ Fitting learners
#> • mod_mars
#> i No tuning parameters. `fit_resamples()` will be attempted
#> i 1 of 1 resampling: mod_mars
#> ✔ 1 of 1 resampling: mod_mars (132ms)
#> ℹ Model with lowest RMSE: mod_mars
#> ℹ Investigate trained learners and fitted model in `prognostic_info` list element
#>
#> ── Symbolic differentiation of estimand function ──
#>
#> ℹ Symbolically deriving partial derivative of the function 'psi1 - psi0' with respect to 'psi0' as: '-1'.
#> • Alternatively, specify the derivative through the argument
#> `estimand_fun_deriv0`
#> ℹ Symbolically deriving partial derivative of the function 'psi1 - psi0' with respect to 'psi1' as: '1'.
#> • Alternatively, specify the derivative through the argument
#> `estimand_fun_deriv1`
prog(ate)
#> $formula
#> Y ~ .
#> <environment: 0x55f085805680>
#>
#> $model_fit
#> ══ Workflow [trained] ══════════════════════════════════════════════════════════
#> Preprocessor: Formula
#> Model: mars()
#>
#> ── Preprocessor ────────────────────────────────────────────────────────────────
#> Y ~ .
#>
#> ── Model ───────────────────────────────────────────────────────────────────────
#> Selected 3 of 8 terms, and 1 of 1 predictors
#> Termination condition: RSq changed by less than 0.001 at 8 terms
#> Importance: W1
#> Number of terms at each degree of interaction: 1 2 (additive model)
#> GCV 1.026029 RSS 90.65992 GRSq 0.1592515 RSq 0.242031
#>
#> $learners
#> $learners$mars
#> $learners$mars$model
#> MARS Model Specification (regression)
#>
#> Main Arguments:
#> prod_degree = 3
#>
#> Computational engine: earth
#>
#>
#>
#>
#> $cv_folds
#> [1] 5
#>
#> $data
#> Y W1
#> 1 1.080226520 0.054207169
#> 2 0.073091604 -0.148972606
#> 3 3.463991365 -1.553011931
#> 4 2.095647164 0.477838799
#> 5 1.548042412 0.782236281
#> 6 1.086007249 -0.732574321
#> 7 2.881073546 1.516755318
#> 8 2.591274753 -0.381372944
#> 9 1.900611511 0.539134447
#> 10 3.276913163 1.180297946
#> 11 1.439599522 0.835140067
#> 12 2.548099724 -1.760414447
#> 13 4.755088192 -1.321984500
#> 14 1.255413214 0.148989349
#> 15 1.626508656 0.482557669
#> 16 2.328459792 -0.572686500
#> 17 2.610934937 0.145828577
#> 18 2.325281944 -0.205931618
#> 19 2.499078380 -1.301826689
#> 20 2.467808292 0.633152368
#> 21 1.831622059 1.886640927
#> 22 1.785702057 -0.881517597
#> 23 0.325903648 0.522782093
#> 24 0.312511589 -0.306786704
#> 25 3.222912589 -0.689656083
#> 26 1.338352104 -0.424981941
#> 27 2.666737727 0.527979673
#> 28 3.180142896 0.117435311
#> 29 2.718007789 1.373388373
#> 30 1.365376059 0.558169925
#> 31 0.198253454 1.970081693
#> 32 1.989264654 0.835948863
#> 33 2.560533844 1.013648397
#> 34 0.720241235 0.061265321
#> 35 3.286797130 0.455773049
#> 36 2.775621326 -1.785018779
#> 37 -0.242777378 -0.215847690
#> 38 1.250574461 -1.412297177
#> 39 -0.008274275 -0.243686806
#> 40 2.516194692 1.919667291
#> 41 2.074303752 -1.381230622
#> 42 1.061268332 0.280097803
#> 43 1.380980681 -0.097808911
#> 44 4.263829485 1.481619725
#> 45 2.340778266 -0.629327535
#> 46 3.078616618 0.672877684
#> 47 0.963857061 -1.011671729
#> 48 1.168146476 0.833760550
#> 49 2.223499098 1.073705474
#> 50 1.103263367 0.965277493
#> 51 0.998553330 -1.817015395
#> 52 0.720258064 1.240615459
#> 53 0.509434600 0.523799929
#> 54 0.723838361 0.548520023
#> 55 1.344597724 -0.822208690
#> 56 0.534112449 0.079563512
#> 57 3.032321333 1.384982192
#> 58 2.375438852 1.193408270
#> 59 2.300375597 -1.369564363
#> 60 2.939245436 -0.443691538
#> 61 -0.146371012 -0.187557026
#> 62 4.118867764 -1.142904804
#> 63 -0.083051780 0.759210528
#> 64 2.013660862 0.002638961
#> 65 2.630543689 1.980248458
#> 66 2.121195516 -1.529962860
#> 67 2.811296125 1.857370362
#> 68 0.341607582 0.565919823
#> 69 3.403676539 -1.564187960
#> 70 1.545973392 -0.834586513
#> 71 0.875589490 -0.103746382
#> 72 3.440435040 1.023317943
#> 73 2.372142479 -1.483048798
#> 74 0.547463922 -0.097032987
#> 75 1.806742066 1.181269797
#> 76 2.618199762 -1.534684835
#> 77 2.519479860 0.893202768
#> 78 1.135825470 -0.190888042
#> 79 2.697688442 -0.981938925
#> 80 2.032643713 -1.042243558
#> 81 1.807305933 0.095488627
#> 82 4.448819143 -1.788098332
#> 83 1.892900503 -1.417561601
#> 84 2.451688328 0.759996420
#> 85 2.746144453 1.353978418
#> 86 0.498685704 1.612627343
#> 87 2.899097178 -1.646279992
#> 88 3.829616389 -1.787805090
#> 89 3.402060386 -1.478330033
#> 90 1.860861057 -1.577669651
#> 91 1.331898944 0.363677772
#> 92 2.559098593 1.772761269
#> 93 3.567000753 -1.388085473
#> 94 1.612616554 1.579245754
#> 95 3.871561423 -1.446541916
#> 96 1.738874250 -0.331721296
#> 97 2.022907515 -1.641866679
#> 98 2.779670580 -0.954397969
#> 99 1.966136555 -0.561635143
#> 100 1.616736057 0.620070576
#>