Skip to contents

Extracts the prognostic_info list element from an rctglm_prog object. See 'Value' at rctglm_with_prognosticscore for more details.

Usage

prog(x)

# S3 method for class 'rctglm_prog'
prog(x)

Arguments

x

an object of class rctglm_prog (returned by rctglm_with_prognosticscore)

Value

a list with the structure described of prognostic_info in the Value section of rctglm_with_prognosticscore.

See also

The generic rctglm_with_prognosticscore() for which this method works.

Examples

# Generate some data
n <- 100
b0 <- 1
b1 <- 1.5
b2 <- 2
W1 <- runif(n, min = -2, max = 2)
exposure_prob <- .5

dat_treat <- glm_data(
  Y ~ b0+b1*abs(sin(W1))+b2*A,
  W1 = W1,
  A = rbinom(n, 1, exposure_prob)
)

dat_notreat <- glm_data(
  Y ~ b0+b1*abs(sin(W1)),
  W1 = W1
)

learners <- list(
  mars = list(
    model = parsnip::set_engine(
      parsnip::mars(
        mode = "regression", prod_degree = 3
      ),
      "earth"
    )
  )
)
ate <- rctglm_with_prognosticscore(
  formula = Y ~ .,
  exposure_indicator = A,
  exposure_prob = exposure_prob,
  data = dat_treat,
  family = gaussian(),
  estimand_fun = "ate",
  data_hist = dat_notreat,
  learners = learners)
#> 
#> ── Fitting prognostic model ──
#> 
#>  Created formula for fitting prognostic model as: Y ~ .
#>  Fitting learners
#> • mod_mars
#> i	No tuning parameters. `fit_resamples()` will be attempted
#> i 1 of 1 resampling: mod_mars
#>  1 of 1 resampling: mod_mars (128ms)
#>  Model with lowest RMSE: mod_mars
#>  Investigate trained learners and fitted model in `prognostic_info` list element
#> 
#> ── Symbolic differentiation of estimand function ──
#> 
#>  Symbolically deriving partial derivative of the function 'psi1 - psi0' with respect to 'psi0' as: '-1'.
#> • Alternatively, specify the derivative through the argument
#> `estimand_fun_deriv0`
#>  Symbolically deriving partial derivative of the function 'psi1 - psi0' with respect to 'psi1' as: '1'.
#> • Alternatively, specify the derivative through the argument
#> `estimand_fun_deriv1`

prog(ate)
#> $formula
#> Y ~ .
#> <environment: 0x559defd19040>
#> 
#> $model_fit
#> ══ Workflow [trained] ══════════════════════════════════════════════════════════
#> Preprocessor: Formula
#> Model: mars()
#> 
#> ── Preprocessor ────────────────────────────────────────────────────────────────
#> Y ~ .
#> 
#> ── Model ───────────────────────────────────────────────────────────────────────
#> Selected 3 of 7 terms, and 1 of 1 predictors
#> Termination condition: RSq changed by less than 0.001 at 7 terms
#> Importance: W1
#> Number of terms at each degree of interaction: 1 2 (additive model)
#> GCV 0.9169908    RSS 81.02531    GRSq 0.07979358    RSq 0.1703965
#> 
#> $learners
#> $learners$mars
#> $learners$mars$model
#> MARS Model Specification (regression)
#> 
#> Main Arguments:
#>   prod_degree = 3
#> 
#> Computational engine: earth 
#> 
#> 
#> 
#> 
#> $cv_folds
#> [1] 5
#> 
#> $data
#>               Y           W1
#> 1    2.72131967  1.384982192
#> 2    1.96536934  1.193408270
#> 3    0.28597650 -1.369564363
#> 4    1.52027982 -0.443691538
#> 5    1.56707286 -0.187557026
#> 6    1.99316477 -1.142904804
#> 7    3.65908575  0.759210528
#> 8    1.31386678  0.002638961
#> 9    0.81196794  1.980248458
#> 10   1.26812618 -1.529962860
#> 11   1.06862939  1.857370362
#> 12   1.91084450  0.565919823
#> 13   2.10114176 -1.564187960
#> 14   1.75812226 -0.834586513
#> 15   1.38984169 -0.103746382
#> 16   4.05054962  1.023317943
#> 17   2.95210537 -1.483048798
#> 18   2.28907930 -0.097032987
#> 19   1.07991083  1.181269797
#> 20   1.55647248 -1.534684835
#> 21   2.07366418  0.893202768
#> 22   0.15454935 -0.190888042
#> 23   0.79115571 -0.981938925
#> 24   0.59658917 -1.042243558
#> 25  -0.09781132  0.095488627
#> 26   1.40642497 -1.788098332
#> 27   1.72804520 -1.417561601
#> 28   1.44827133  0.759996420
#> 29   3.02302248  1.353978418
#> 30   2.47968123  1.612627343
#> 31   2.32637267 -1.646279992
#> 32   3.76014950 -1.787805090
#> 33   1.06753206 -1.478330033
#> 34   4.25406830 -1.577669651
#> 35  -0.58200448  0.363677772
#> 36   3.47921389  1.772761269
#> 37   2.72956722 -1.388085473
#> 38   2.12239233  1.579245754
#> 39   2.86090478 -1.446541916
#> 40   0.02582534 -0.331721296
#> 41   3.39992264 -1.641866679
#> 42   1.65839378 -0.954397969
#> 43   1.51910524 -0.561635143
#> 44   3.03131420  0.620070576
#> 45   2.37780501 -1.582823326
#> 46   1.89846981  1.640790832
#> 47   1.91595912 -1.505982758
#> 48   1.20912163 -0.059998618
#> 49   1.79745562 -0.302318255
#> 50   1.95474197  0.826663015
#> 51   2.49580175 -0.771183933
#> 52   2.18475463  1.836358662
#> 53   2.65592965 -0.722274201
#> 54   3.71480819 -0.508814248
#> 55   1.87943689  1.367007247
#> 56   2.37425858 -0.690988204
#> 57   2.78085001  1.594292774
#> 58  -0.09784400  0.645300754
#> 59   2.60088513  0.924540984
#> 60   3.86364762 -1.609956257
#> 61   2.87412768 -0.699418604
#> 62   1.63140152  1.010322550
#> 63   1.87219306  0.797926514
#> 64   2.12459863  0.761499645
#> 65   2.87909681  0.552423899
#> 66   1.60015317  1.700073148
#> 67   2.90624178  0.356230548
#> 68   2.74988455  1.545411798
#> 69   1.98788870  1.798755240
#> 70   2.10797146  0.377036167
#> 71   2.57142560 -1.211364707
#> 72   1.42425606 -0.469872492
#> 73   3.00885879 -1.856295628
#> 74   1.78750964  1.667663256
#> 75   1.56814662 -0.538129904
#> 76   0.90807385 -1.032231587
#> 77   3.83350260 -1.934395369
#> 78   3.06762363  1.874492300
#> 79   1.73673745 -0.213957776
#> 80   2.77273994  1.857513856
#> 81   2.52606720 -0.419701044
#> 82   1.84679252 -1.649829350
#> 83   1.14039857  0.503861475
#> 84   2.82844152 -0.494098693
#> 85   1.85712427  0.809858762
#> 86   1.84675947 -1.383845829
#> 87   3.12141511 -0.023786677
#> 88   1.94054367 -0.919254839
#> 89   3.75327233  1.394406566
#> 90   1.24555988 -0.454942441
#> 91   3.01590511 -1.840850134
#> 92   2.91345858  1.736370123
#> 93   3.03651709 -1.526609730
#> 94   1.34966077  0.573839966
#> 95   3.71272288 -1.616502794
#> 96   1.70989426 -0.031969011
#> 97   1.14458593 -0.794610540
#> 98   1.68390987 -1.345750707
#> 99   3.38403512 -1.398603517
#> 100  3.27314061 -1.677519421
#>