Group sequential design power using weighted log rank test under non-proportional hazards
Source:R/gs_power_wlr.R
gs_power_wlr.Rd
Group sequential design power using weighted log rank test under non-proportional hazards
Usage
gs_power_wlr(
enroll_rate = define_enroll_rate(duration = c(2, 2, 10), rate = c(3, 6, 9)),
fail_rate = tibble(stratum = "All", duration = c(3, 100), fail_rate = log(2)/c(9, 18),
hr = c(0.9, 0.6), dropout_rate = rep(0.001, 2)),
event = c(30, 40, 50),
analysis_time = NULL,
binding = FALSE,
upper = gs_spending_bound,
lower = gs_spending_bound,
upar = list(sf = gsDesign::sfLDOF, total_spend = 0.025),
lpar = list(sf = gsDesign::sfLDOF, total_spend = NULL),
test_upper = TRUE,
test_lower = TRUE,
ratio = 1,
weight = wlr_weight_fh,
info_scale = c("h0_h1_info", "h0_info", "h1_info"),
approx = "asymptotic",
r = 18,
tol = 1e-06,
interval = c(0.01, 1000),
integer = FALSE
)
Arguments
- enroll_rate
Enrollment rates.
- fail_rate
Failure and dropout rates.
- event
Targeted event at each analysis.
- analysis_time
Minimum time of analysis.
- binding
Indicator of whether futility bound is binding; default of
FALSE
is recommended.- upper
Function to compute upper bound.
- lower
Function to compute lower bound.
- upar
Parameters passed to
upper
.- lpar
Parameters passed to
lower
.- test_upper
Indicator of which analyses should include an upper (efficacy) bound; single value of
TRUE
(default) indicates all analyses; otherwise, a logical vector of the same length asinfo
should indicate which analyses will have an efficacy bound.- test_lower
Indicator of which analyses should include an lower bound; single value of
TRUE
(default) indicates all analyses; single valueFALSE
indicated no lower bound; otherwise, a logical vector of the same length asinfo
should indicate which analyses will have a lower bound.- ratio
Experimental:Control randomization ratio (not yet implemented).
- weight
Weight of weighted log rank test:
"1"
= unweighted."n"
= Gehan-Breslow."sqrtN"
= Tarone-Ware."FH_p[a]_q[b]"
= Fleming-Harrington with p=a and q=b.
- info_scale
Information scale for calculation. Options are:
"h0_h1_info"
(default): variance under both null and alternative hypotheses is used."h0_info"
: variance under null hypothesis is used."h1_info"
: variance under alternative hypothesis is used.
- approx
Approximate estimation method for Z statistics.
"event_driven"
= only work under proportional hazard model with log rank test."asymptotic"
.
- r
Integer value controlling grid for numerical integration as in Jennison and Turnbull (2000); default is 18, range is 1 to 80. Larger values provide larger number of grid points and greater accuracy. Normally,
r
will not be changed by the user.- tol
Tolerance parameter for boundary convergence (on Z-scale).
- interval
An interval that is presumed to include the time at which expected event count is equal to targeted event.
- integer
Logical value integer whether it is an integer design (i.e., integer sample size and events) or not. This argument is commonly used when creating integer design via
to_integer()
.
Examples
library(gsDesign)
library(gsDesign2)
# set enrollment rates
enroll_rate <- define_enroll_rate(duration = 12, rate = 500 / 12)
# set failure rates
fail_rate <- define_fail_rate(
duration = c(4, 100),
fail_rate = log(2) / 15, # median survival 15 month
hr = c(1, .6),
dropout_rate = 0.001
)
# set the targeted number of events and analysis time
target_events <- c(30, 40, 50)
target_analysisTime <- c(10, 24, 30)
# Example 1 ----
# \donttest{
# fixed bounds and calculate the power for targeted number of events
gs_power_wlr(
enroll_rate = enroll_rate,
fail_rate = fail_rate,
event = target_events,
analysis_time = NULL,
upper = gs_b,
upar = gsDesign(
k = length(target_events),
test.type = 1,
n.I = target_events,
maxn.IPlan = max(target_events),
sfu = sfLDOF,
sfupar = NULL
)$upper$bound,
lower = gs_b,
lpar = c(qnorm(.1), rep(-Inf, 2))
)
#> $input
#> $input$enroll_rate
#> # A tibble: 1 × 3
#> stratum duration rate
#> <chr> <dbl> <dbl>
#> 1 All 12 41.7
#>
#> $input$fail_rate
#> # A tibble: 2 × 5
#> stratum duration fail_rate dropout_rate hr
#> <chr> <dbl> <dbl> <dbl> <dbl>
#> 1 All 4 0.0462 0.001 1
#> 2 All 100 0.0462 0.001 0.6
#>
#> $input$event
#> [1] 30 40 50
#>
#> $input$analysis_time
#> NULL
#>
#> $input$binding
#> [1] FALSE
#>
#> $input$ratio
#> [1] 1
#>
#> $input$upper
#> function (par = NULL, k = NULL, ...)
#> {
#> if (is.null(k)) {
#> return(par)
#> }
#> else {
#> return(par[k])
#> }
#> }
#> <bytecode: 0x563f666ad068>
#> <environment: namespace:gsDesign2>
#>
#> $input$upar
#> [1] 2.668630 2.288719 2.030702
#>
#> $input$test_upper
#> [1] TRUE
#>
#> $input$lower
#> function (par = NULL, k = NULL, ...)
#> {
#> if (is.null(k)) {
#> return(par)
#> }
#> else {
#> return(par[k])
#> }
#> }
#> <bytecode: 0x563f666ad068>
#> <environment: namespace:gsDesign2>
#>
#> $input$lpar
#> [1] -1.281552 -Inf -Inf
#>
#> $input$test_lower
#> [1] TRUE
#>
#> $input$weight
#> function (x, arm0, arm1, rho = 0, gamma = 0, tau = NULL)
#> {
#> n <- arm0$size + arm1$size
#> p1 <- arm1$size/n
#> p0 <- 1 - p1
#> if (!is.null(tau)) {
#> if (tau > 0) {
#> x <- pmin(x, tau)
#> }
#> }
#> esurv <- p0 * npsurvSS::psurv(x, arm0) + p1 * npsurvSS::psurv(x,
#> arm1)
#> (1 - esurv)^rho * esurv^gamma
#> }
#> <bytecode: 0x563f66845720>
#> <environment: namespace:gsDesign2>
#>
#> $input$info_scale
#> [1] "h0_h1_info"
#>
#> $input$approx
#> [1] "asymptotic"
#>
#> $input$r
#> [1] 18
#>
#> $input$tol
#> [1] 1e-06
#>
#>
#> $enroll_rate
#> # A tibble: 1 × 3
#> stratum duration rate
#> <chr> <dbl> <dbl>
#> 1 All 12 41.7
#>
#> $fail_rate
#> # A tibble: 2 × 5
#> stratum duration fail_rate dropout_rate hr
#> <chr> <dbl> <dbl> <dbl> <dbl>
#> 1 All 4 0.0462 0.001 1
#> 2 All 100 0.0462 0.001 0.6
#>
#> $bounds
#> # A tibble: 4 × 7
#> analysis bound probability probability0 z `~hr at bound` `nominal p`
#> <int> <chr> <dbl> <dbl> <dbl> <dbl> <dbl>
#> 1 1 upper 0.00470 0.00381 2.67 0.377 0.00381
#> 2 1 lower 0.0881 0.100 -1.28 1.60 0.9
#> 3 2 upper 0.0182 0.0127 2.29 0.485 0.0110
#> 4 3 upper 0.0439 0.0268 2.03 0.563 0.0211
#>
#> $analysis
#> analysis time n event ahr theta info info0
#> 1 1 5.893949 245.5812 29.99999 0.9636346 0.03704308 3.683799 3.684201
#> 2 2 6.900922 287.5384 40.00003 0.9373448 0.06470405 5.749119 5.750793
#> 3 3 7.808453 325.3522 50.00000 0.9155821 0.08819527 8.132495 8.136743
#> info_frac info_frac0
#> 1 0.4529728 0.4527857
#> 2 0.7069318 0.7067685
#> 3 1.0000000 1.0000000
#>
#> attr(,"class")
#> [1] "non_binding" "wlr" "gs_design" "list"
# }
# Example 2 ----
# fixed bounds and calculate the power for targeted analysis time
# \donttest{
gs_power_wlr(
enroll_rate = enroll_rate,
fail_rate = fail_rate,
event = NULL,
analysis_time = target_analysisTime,
upper = gs_b,
upar = gsDesign(
k = length(target_events),
test.type = 1,
n.I = target_events,
maxn.IPlan = max(target_events),
sfu = sfLDOF,
sfupar = NULL
)$upper$bound,
lower = gs_b,
lpar = c(qnorm(.1), rep(-Inf, 2))
)
#> $input
#> $input$enroll_rate
#> # A tibble: 1 × 3
#> stratum duration rate
#> <chr> <dbl> <dbl>
#> 1 All 12 41.7
#>
#> $input$fail_rate
#> # A tibble: 2 × 5
#> stratum duration fail_rate dropout_rate hr
#> <chr> <dbl> <dbl> <dbl> <dbl>
#> 1 All 4 0.0462 0.001 1
#> 2 All 100 0.0462 0.001 0.6
#>
#> $input$event
#> NULL
#>
#> $input$analysis_time
#> [1] 10 24 30
#>
#> $input$binding
#> [1] FALSE
#>
#> $input$ratio
#> [1] 1
#>
#> $input$upper
#> function (par = NULL, k = NULL, ...)
#> {
#> if (is.null(k)) {
#> return(par)
#> }
#> else {
#> return(par[k])
#> }
#> }
#> <bytecode: 0x563f666ad068>
#> <environment: namespace:gsDesign2>
#>
#> $input$upar
#> [1] 2.668630 2.288719 2.030702
#>
#> $input$test_upper
#> [1] TRUE
#>
#> $input$lower
#> function (par = NULL, k = NULL, ...)
#> {
#> if (is.null(k)) {
#> return(par)
#> }
#> else {
#> return(par[k])
#> }
#> }
#> <bytecode: 0x563f666ad068>
#> <environment: namespace:gsDesign2>
#>
#> $input$lpar
#> [1] -1.281552 -Inf -Inf
#>
#> $input$test_lower
#> [1] TRUE
#>
#> $input$weight
#> function (x, arm0, arm1, rho = 0, gamma = 0, tau = NULL)
#> {
#> n <- arm0$size + arm1$size
#> p1 <- arm1$size/n
#> p0 <- 1 - p1
#> if (!is.null(tau)) {
#> if (tau > 0) {
#> x <- pmin(x, tau)
#> }
#> }
#> esurv <- p0 * npsurvSS::psurv(x, arm0) + p1 * npsurvSS::psurv(x,
#> arm1)
#> (1 - esurv)^rho * esurv^gamma
#> }
#> <bytecode: 0x563f66845720>
#> <environment: namespace:gsDesign2>
#>
#> $input$info_scale
#> [1] "h0_h1_info"
#>
#> $input$approx
#> [1] "asymptotic"
#>
#> $input$r
#> [1] 18
#>
#> $input$tol
#> [1] 1e-06
#>
#>
#> $enroll_rate
#> # A tibble: 1 × 3
#> stratum duration rate
#> <chr> <dbl> <dbl>
#> 1 All 12 41.7
#>
#> $fail_rate
#> # A tibble: 2 × 5
#> stratum duration fail_rate dropout_rate hr
#> <chr> <dbl> <dbl> <dbl> <dbl>
#> 1 All 4 0.0462 0.001 1
#> 2 All 100 0.0462 0.001 0.6
#>
#> $bounds
#> # A tibble: 4 × 7
#> analysis bound probability probability0 z `~hr at bound` `nominal p`
#> <int> <chr> <dbl> <dbl> <dbl> <dbl> <dbl>
#> 1 1 upper 0.0172 0.00381 2.67 0.546 0.00381
#> 2 1 lower 0.0335 0.100 -1.28 1.34 0.9
#> 3 2 upper 0.622 0.0141 2.29 0.747 0.0110
#> 4 3 upper 0.842 0.0263 2.03 0.789 0.0211
#>
#> $analysis
#> analysis time n event ahr theta info info0
#> 1 1 10 416.6667 77.80361 0.8720599 0.1368971 16.20843 16.22923
#> 2 2 24 500.0000 246.28341 0.7164215 0.3334865 61.35217 62.08666
#> 3 3 30 500.0000 293.69568 0.6955693 0.3630247 72.91885 74.25144
#> info_frac info_frac0
#> 1 0.2222803 0.2185712
#> 2 0.8413760 0.8361677
#> 3 1.0000000 1.0000000
#>
#> attr(,"class")
#> [1] "non_binding" "wlr" "gs_design" "list"
# }
# Example 3 ----
# fixed bounds and calculate the power for targeted analysis time & number of events
# \donttest{
gs_power_wlr(
enroll_rate = enroll_rate,
fail_rate = fail_rate,
event = target_events,
analysis_time = target_analysisTime,
upper = gs_b,
upar = gsDesign(
k = length(target_events),
test.type = 1,
n.I = target_events,
maxn.IPlan = max(target_events),
sfu = sfLDOF,
sfupar = NULL
)$upper$bound,
lower = gs_b,
lpar = c(qnorm(.1), rep(-Inf, 2))
)
#> $input
#> $input$enroll_rate
#> # A tibble: 1 × 3
#> stratum duration rate
#> <chr> <dbl> <dbl>
#> 1 All 12 41.7
#>
#> $input$fail_rate
#> # A tibble: 2 × 5
#> stratum duration fail_rate dropout_rate hr
#> <chr> <dbl> <dbl> <dbl> <dbl>
#> 1 All 4 0.0462 0.001 1
#> 2 All 100 0.0462 0.001 0.6
#>
#> $input$event
#> [1] 30 40 50
#>
#> $input$analysis_time
#> [1] 10 24 30
#>
#> $input$binding
#> [1] FALSE
#>
#> $input$ratio
#> [1] 1
#>
#> $input$upper
#> function (par = NULL, k = NULL, ...)
#> {
#> if (is.null(k)) {
#> return(par)
#> }
#> else {
#> return(par[k])
#> }
#> }
#> <bytecode: 0x563f666ad068>
#> <environment: namespace:gsDesign2>
#>
#> $input$upar
#> [1] 2.668630 2.288719 2.030702
#>
#> $input$test_upper
#> [1] TRUE
#>
#> $input$lower
#> function (par = NULL, k = NULL, ...)
#> {
#> if (is.null(k)) {
#> return(par)
#> }
#> else {
#> return(par[k])
#> }
#> }
#> <bytecode: 0x563f666ad068>
#> <environment: namespace:gsDesign2>
#>
#> $input$lpar
#> [1] -1.281552 -Inf -Inf
#>
#> $input$test_lower
#> [1] TRUE
#>
#> $input$weight
#> function (x, arm0, arm1, rho = 0, gamma = 0, tau = NULL)
#> {
#> n <- arm0$size + arm1$size
#> p1 <- arm1$size/n
#> p0 <- 1 - p1
#> if (!is.null(tau)) {
#> if (tau > 0) {
#> x <- pmin(x, tau)
#> }
#> }
#> esurv <- p0 * npsurvSS::psurv(x, arm0) + p1 * npsurvSS::psurv(x,
#> arm1)
#> (1 - esurv)^rho * esurv^gamma
#> }
#> <bytecode: 0x563f66845720>
#> <environment: namespace:gsDesign2>
#>
#> $input$info_scale
#> [1] "h0_h1_info"
#>
#> $input$approx
#> [1] "asymptotic"
#>
#> $input$r
#> [1] 18
#>
#> $input$tol
#> [1] 1e-06
#>
#>
#> $enroll_rate
#> # A tibble: 1 × 3
#> stratum duration rate
#> <chr> <dbl> <dbl>
#> 1 All 12 41.7
#>
#> $fail_rate
#> # A tibble: 2 × 5
#> stratum duration fail_rate dropout_rate hr
#> <chr> <dbl> <dbl> <dbl> <dbl>
#> 1 All 4 0.0462 0.001 1
#> 2 All 100 0.0462 0.001 0.6
#>
#> $bounds
#> # A tibble: 4 × 7
#> analysis bound probability probability0 z `~hr at bound` `nominal p`
#> <int> <chr> <dbl> <dbl> <dbl> <dbl> <dbl>
#> 1 1 upper 0.0172 0.00381 2.67 0.546 0.00381
#> 2 1 lower 0.0335 0.100 -1.28 1.34 0.9
#> 3 2 upper 0.622 0.0141 2.29 0.747 0.0110
#> 4 3 upper 0.842 0.0263 2.03 0.789 0.0211
#>
#> $analysis
#> analysis time n event ahr theta info info0
#> 1 1 10 416.6667 77.80361 0.8720599 0.1368971 16.20843 16.22923
#> 2 2 24 500.0000 246.28341 0.7164215 0.3334865 61.35217 62.08666
#> 3 3 30 500.0000 293.69568 0.6955693 0.3630247 72.91885 74.25144
#> info_frac info_frac0
#> 1 0.2222803 0.2185712
#> 2 0.8413760 0.8361677
#> 3 1.0000000 1.0000000
#>
#> attr(,"class")
#> [1] "non_binding" "wlr" "gs_design" "list"
# }
# Example 4 ----
# spending bounds and calculate the power for targeted number of events
# \donttest{
gs_power_wlr(
enroll_rate = enroll_rate,
fail_rate = fail_rate,
event = target_events,
analysis_time = NULL,
upper = gs_spending_bound,
upar = list(sf = gsDesign::sfLDOF, total_spend = 0.025),
lower = gs_spending_bound,
lpar = list(sf = gsDesign::sfLDOF, total_spend = 0.2)
)
#> $input
#> $input$enroll_rate
#> # A tibble: 1 × 3
#> stratum duration rate
#> <chr> <dbl> <dbl>
#> 1 All 12 41.7
#>
#> $input$fail_rate
#> # A tibble: 2 × 5
#> stratum duration fail_rate dropout_rate hr
#> <chr> <dbl> <dbl> <dbl> <dbl>
#> 1 All 4 0.0462 0.001 1
#> 2 All 100 0.0462 0.001 0.6
#>
#> $input$event
#> [1] 30 40 50
#>
#> $input$analysis_time
#> NULL
#>
#> $input$binding
#> [1] FALSE
#>
#> $input$ratio
#> [1] 1
#>
#> $input$upper
#> function (k = 1, par = list(sf = gsDesign::sfLDOF, total_spend = 0.025,
#> param = NULL, timing = NULL, max_info = NULL), hgm1 = NULL,
#> theta = 0.1, info = 1:3, efficacy = TRUE, test_bound = TRUE,
#> r = 18, tol = 1e-06)
#> {
#> if (length(test_bound) == 1 && k > 1) {
#> test_bound <- rep(test_bound, k)
#> }
#> if (!is.null(par$timing)) {
#> timing <- par$timing
#> }
#> else {
#> if (is.null(par$max_info)) {
#> timing <- info/max(info)
#> }
#> else {
#> timing <- info/par$max_info
#> }
#> }
#> spend <- par$sf(alpha = par$total_spend, t = timing, param = par$param)$spend
#> old_spend <- 0
#> for (i in 1:k) {
#> if (test_bound[i]) {
#> xx <- spend[i] - old_spend
#> old_spend <- spend[i]
#> spend[i] <- xx
#> }
#> else {
#> spend[i] <- 0
#> }
#> }
#> spend <- spend[k]
#> if (!efficacy) {
#> if (spend <= 0) {
#> return(-Inf)
#> }
#> if (length(theta) == 1)
#> theta <- rep(theta, length(info))
#> a <- qnorm(spend) + sqrt(info[k]) * theta[k]
#> if (k == 1) {
#> return(a)
#> }
#> mu <- theta[k] * sqrt(info[k])
#> extreme_low <- mu - 3 - 4 * log(r)
#> extreme_high <- mu + 3 + 4 * log(r)
#> adelta <- 1
#> j <- 0
#> while (abs(adelta) > tol) {
#> hg <- hupdate(theta = theta[k], info = info[k], a = -Inf,
#> b = a, thetam1 = theta[k - 1], im1 = info[k -
#> 1], gm1 = hgm1, r = r)
#> i <- length(hg$h)
#> pik <- sum(hg$h)
#> adelta <- spend - pik
#> dplo <- hg$h[i]/hg$w[i]
#> if (adelta > dplo) {
#> adelta <- 1
#> }
#> else if (adelta < -dplo) {
#> adelta <- -1
#> }
#> else {
#> adelta <- adelta/dplo
#> }
#> a <- a + adelta
#> if (a > extreme_high) {
#> a <- extreme_high
#> }
#> else if (a < extreme_low) {
#> a <- extreme_low
#> }
#> if (abs(adelta) < tol) {
#> return(a)
#> }
#> j <- j + 1
#> if (j > 20) {
#> stop(paste("gs_spending_bound(): bound_update did not converge for lower bound calculation, analysis",
#> k, " !"))
#> }
#> }
#> }
#> else {
#> if (spend <= 0) {
#> return(Inf)
#> }
#> if (length(theta) == 1)
#> theta <- rep(theta, length(info))
#> b <- qnorm(spend, lower.tail = FALSE)
#> if (k == 1) {
#> return(b)
#> }
#> mu <- theta[k] * sqrt(info[k])
#> extreme_low <- mu - 3 - 4 * log(r)
#> extreme_high <- mu + 3 + 4 * log(r)
#> bdelta <- 1
#> j <- 1
#> while (abs(bdelta) > tol) {
#> hg <- hupdate(theta = 0, info = info[k], a = b, b = Inf,
#> thetam1 = 0, im1 = info[k - 1], gm1 = hgm1, r = r)
#> pik <- sum(hg$h)
#> bdelta <- spend - pik
#> dpikdb <- hg$h[1]/hg$w[1]
#> if (bdelta > dpikdb) {
#> bdelta <- 1
#> }
#> else if (bdelta < -dpikdb) {
#> bdelta <- -1
#> }
#> else {
#> bdelta <- bdelta/dpikdb
#> }
#> b <- b - bdelta
#> if (b > extreme_high) {
#> b <- extreme_high
#> }
#> else if (b < extreme_low) {
#> b <- extreme_low
#> }
#> if (abs(bdelta) < tol) {
#> return(b)
#> }
#> j <- j + 1
#> if (j > 20) {
#> stop(paste("gs_spending_bound(): bound_update did not converge for lower bound calculation, analysis",
#> k, " !"))
#> }
#> }
#> }
#> }
#> <bytecode: 0x563f666b1ed8>
#> <environment: namespace:gsDesign2>
#>
#> $input$upar
#> $input$upar$sf
#> function (alpha, t, param = NULL)
#> {
#> checkScalar(alpha, "numeric", c(0, Inf), c(FALSE, FALSE))
#> checkVector(t, "numeric", c(0, Inf), c(TRUE, FALSE))
#> if (is.null(param) || param < 0.005 || param > 20)
#> param <- 1
#> checkScalar(param, "numeric", c(0.005, 20), c(TRUE, TRUE))
#> t[t > 1] <- 1
#> if (param == 1) {
#> rho <- 1
#> txt <- "Lan-DeMets O'Brien-Fleming approximation"
#> parname <- "none"
#> }
#> else {
#> rho <- param
#> txt <- "Generalized Lan-DeMets O'Brien-Fleming"
#> parname <- "rho"
#> }
#> z <- -qnorm(alpha/2)
#> x <- list(name = txt, param = param, parname = parname, sf = sfLDOF,
#> spend = 2 * (1 - pnorm(z/t^(rho/2))), bound = NULL, prob = NULL)
#> class(x) <- "spendfn"
#> x
#> }
#> <bytecode: 0x563f63ede910>
#> <environment: namespace:gsDesign>
#>
#> $input$upar$total_spend
#> [1] 0.025
#>
#>
#> $input$test_upper
#> [1] TRUE
#>
#> $input$lower
#> function (k = 1, par = list(sf = gsDesign::sfLDOF, total_spend = 0.025,
#> param = NULL, timing = NULL, max_info = NULL), hgm1 = NULL,
#> theta = 0.1, info = 1:3, efficacy = TRUE, test_bound = TRUE,
#> r = 18, tol = 1e-06)
#> {
#> if (length(test_bound) == 1 && k > 1) {
#> test_bound <- rep(test_bound, k)
#> }
#> if (!is.null(par$timing)) {
#> timing <- par$timing
#> }
#> else {
#> if (is.null(par$max_info)) {
#> timing <- info/max(info)
#> }
#> else {
#> timing <- info/par$max_info
#> }
#> }
#> spend <- par$sf(alpha = par$total_spend, t = timing, param = par$param)$spend
#> old_spend <- 0
#> for (i in 1:k) {
#> if (test_bound[i]) {
#> xx <- spend[i] - old_spend
#> old_spend <- spend[i]
#> spend[i] <- xx
#> }
#> else {
#> spend[i] <- 0
#> }
#> }
#> spend <- spend[k]
#> if (!efficacy) {
#> if (spend <= 0) {
#> return(-Inf)
#> }
#> if (length(theta) == 1)
#> theta <- rep(theta, length(info))
#> a <- qnorm(spend) + sqrt(info[k]) * theta[k]
#> if (k == 1) {
#> return(a)
#> }
#> mu <- theta[k] * sqrt(info[k])
#> extreme_low <- mu - 3 - 4 * log(r)
#> extreme_high <- mu + 3 + 4 * log(r)
#> adelta <- 1
#> j <- 0
#> while (abs(adelta) > tol) {
#> hg <- hupdate(theta = theta[k], info = info[k], a = -Inf,
#> b = a, thetam1 = theta[k - 1], im1 = info[k -
#> 1], gm1 = hgm1, r = r)
#> i <- length(hg$h)
#> pik <- sum(hg$h)
#> adelta <- spend - pik
#> dplo <- hg$h[i]/hg$w[i]
#> if (adelta > dplo) {
#> adelta <- 1
#> }
#> else if (adelta < -dplo) {
#> adelta <- -1
#> }
#> else {
#> adelta <- adelta/dplo
#> }
#> a <- a + adelta
#> if (a > extreme_high) {
#> a <- extreme_high
#> }
#> else if (a < extreme_low) {
#> a <- extreme_low
#> }
#> if (abs(adelta) < tol) {
#> return(a)
#> }
#> j <- j + 1
#> if (j > 20) {
#> stop(paste("gs_spending_bound(): bound_update did not converge for lower bound calculation, analysis",
#> k, " !"))
#> }
#> }
#> }
#> else {
#> if (spend <= 0) {
#> return(Inf)
#> }
#> if (length(theta) == 1)
#> theta <- rep(theta, length(info))
#> b <- qnorm(spend, lower.tail = FALSE)
#> if (k == 1) {
#> return(b)
#> }
#> mu <- theta[k] * sqrt(info[k])
#> extreme_low <- mu - 3 - 4 * log(r)
#> extreme_high <- mu + 3 + 4 * log(r)
#> bdelta <- 1
#> j <- 1
#> while (abs(bdelta) > tol) {
#> hg <- hupdate(theta = 0, info = info[k], a = b, b = Inf,
#> thetam1 = 0, im1 = info[k - 1], gm1 = hgm1, r = r)
#> pik <- sum(hg$h)
#> bdelta <- spend - pik
#> dpikdb <- hg$h[1]/hg$w[1]
#> if (bdelta > dpikdb) {
#> bdelta <- 1
#> }
#> else if (bdelta < -dpikdb) {
#> bdelta <- -1
#> }
#> else {
#> bdelta <- bdelta/dpikdb
#> }
#> b <- b - bdelta
#> if (b > extreme_high) {
#> b <- extreme_high
#> }
#> else if (b < extreme_low) {
#> b <- extreme_low
#> }
#> if (abs(bdelta) < tol) {
#> return(b)
#> }
#> j <- j + 1
#> if (j > 20) {
#> stop(paste("gs_spending_bound(): bound_update did not converge for lower bound calculation, analysis",
#> k, " !"))
#> }
#> }
#> }
#> }
#> <bytecode: 0x563f666b1ed8>
#> <environment: namespace:gsDesign2>
#>
#> $input$lpar
#> $input$lpar$sf
#> function (alpha, t, param = NULL)
#> {
#> checkScalar(alpha, "numeric", c(0, Inf), c(FALSE, FALSE))
#> checkVector(t, "numeric", c(0, Inf), c(TRUE, FALSE))
#> if (is.null(param) || param < 0.005 || param > 20)
#> param <- 1
#> checkScalar(param, "numeric", c(0.005, 20), c(TRUE, TRUE))
#> t[t > 1] <- 1
#> if (param == 1) {
#> rho <- 1
#> txt <- "Lan-DeMets O'Brien-Fleming approximation"
#> parname <- "none"
#> }
#> else {
#> rho <- param
#> txt <- "Generalized Lan-DeMets O'Brien-Fleming"
#> parname <- "rho"
#> }
#> z <- -qnorm(alpha/2)
#> x <- list(name = txt, param = param, parname = parname, sf = sfLDOF,
#> spend = 2 * (1 - pnorm(z/t^(rho/2))), bound = NULL, prob = NULL)
#> class(x) <- "spendfn"
#> x
#> }
#> <bytecode: 0x563f63ede910>
#> <environment: namespace:gsDesign>
#>
#> $input$lpar$total_spend
#> [1] 0.2
#>
#>
#> $input$test_lower
#> [1] TRUE
#>
#> $input$weight
#> function (x, arm0, arm1, rho = 0, gamma = 0, tau = NULL)
#> {
#> n <- arm0$size + arm1$size
#> p1 <- arm1$size/n
#> p0 <- 1 - p1
#> if (!is.null(tau)) {
#> if (tau > 0) {
#> x <- pmin(x, tau)
#> }
#> }
#> esurv <- p0 * npsurvSS::psurv(x, arm0) + p1 * npsurvSS::psurv(x,
#> arm1)
#> (1 - esurv)^rho * esurv^gamma
#> }
#> <bytecode: 0x563f66845720>
#> <environment: namespace:gsDesign2>
#>
#> $input$info_scale
#> [1] "h0_h1_info"
#>
#> $input$approx
#> [1] "asymptotic"
#>
#> $input$r
#> [1] 18
#>
#> $input$tol
#> [1] 1e-06
#>
#>
#> $enroll_rate
#> # A tibble: 1 × 3
#> stratum duration rate
#> <chr> <dbl> <dbl>
#> 1 All 12 41.7
#>
#> $fail_rate
#> # A tibble: 2 × 5
#> stratum duration fail_rate dropout_rate hr
#> <chr> <dbl> <dbl> <dbl> <dbl>
#> 1 All 4 0.0462 0.001 1
#> 2 All 100 0.0462 0.001 0.6
#>
#> $bounds
#> # A tibble: 6 × 7
#> analysis bound probability probability0 z `~hr at bound` `nominal p`
#> <int> <chr> <dbl> <dbl> <dbl> <dbl> <dbl>
#> 1 1 upper 0.00110 0.000865 3.13 0.319 0.000865
#> 2 1 lower 0.0569 0.0655 -1.51 1.74 0.935
#> 3 2 upper 0.0115 0.00767 2.44 0.463 0.00739
#> 4 2 lower 0.127 0.159 -1.06 1.40 0.857
#> 5 3 upper 0.0427 0.0250 2.00 0.568 0.0226
#> 6 3 lower 0.200 0.266 -0.738 1.23 0.770
#>
#> $analysis
#> analysis time n event ahr theta info info0
#> 1 1 5.893949 245.5812 29.99999 0.9636346 0.03704308 3.683799 3.684201
#> 2 2 6.900922 287.5384 40.00003 0.9373448 0.06470405 5.749119 5.750793
#> 3 3 7.808453 325.3522 50.00000 0.9155821 0.08819527 8.132495 8.136743
#> info_frac info_frac0
#> 1 0.4529728 0.4527857
#> 2 0.7069318 0.7067685
#> 3 1.0000000 1.0000000
#>
#> attr(,"class")
#> [1] "non_binding" "wlr" "gs_design" "list"
# }
# Example 5 ----
# spending bounds and calculate the power for targeted analysis time
# \donttest{
gs_power_wlr(
enroll_rate = enroll_rate,
fail_rate = fail_rate,
event = NULL,
analysis_time = target_analysisTime,
upper = gs_spending_bound,
upar = list(sf = gsDesign::sfLDOF, total_spend = 0.025),
lower = gs_spending_bound,
lpar = list(sf = gsDesign::sfLDOF, total_spend = 0.2)
)
#> $input
#> $input$enroll_rate
#> # A tibble: 1 × 3
#> stratum duration rate
#> <chr> <dbl> <dbl>
#> 1 All 12 41.7
#>
#> $input$fail_rate
#> # A tibble: 2 × 5
#> stratum duration fail_rate dropout_rate hr
#> <chr> <dbl> <dbl> <dbl> <dbl>
#> 1 All 4 0.0462 0.001 1
#> 2 All 100 0.0462 0.001 0.6
#>
#> $input$event
#> NULL
#>
#> $input$analysis_time
#> [1] 10 24 30
#>
#> $input$binding
#> [1] FALSE
#>
#> $input$ratio
#> [1] 1
#>
#> $input$upper
#> function (k = 1, par = list(sf = gsDesign::sfLDOF, total_spend = 0.025,
#> param = NULL, timing = NULL, max_info = NULL), hgm1 = NULL,
#> theta = 0.1, info = 1:3, efficacy = TRUE, test_bound = TRUE,
#> r = 18, tol = 1e-06)
#> {
#> if (length(test_bound) == 1 && k > 1) {
#> test_bound <- rep(test_bound, k)
#> }
#> if (!is.null(par$timing)) {
#> timing <- par$timing
#> }
#> else {
#> if (is.null(par$max_info)) {
#> timing <- info/max(info)
#> }
#> else {
#> timing <- info/par$max_info
#> }
#> }
#> spend <- par$sf(alpha = par$total_spend, t = timing, param = par$param)$spend
#> old_spend <- 0
#> for (i in 1:k) {
#> if (test_bound[i]) {
#> xx <- spend[i] - old_spend
#> old_spend <- spend[i]
#> spend[i] <- xx
#> }
#> else {
#> spend[i] <- 0
#> }
#> }
#> spend <- spend[k]
#> if (!efficacy) {
#> if (spend <= 0) {
#> return(-Inf)
#> }
#> if (length(theta) == 1)
#> theta <- rep(theta, length(info))
#> a <- qnorm(spend) + sqrt(info[k]) * theta[k]
#> if (k == 1) {
#> return(a)
#> }
#> mu <- theta[k] * sqrt(info[k])
#> extreme_low <- mu - 3 - 4 * log(r)
#> extreme_high <- mu + 3 + 4 * log(r)
#> adelta <- 1
#> j <- 0
#> while (abs(adelta) > tol) {
#> hg <- hupdate(theta = theta[k], info = info[k], a = -Inf,
#> b = a, thetam1 = theta[k - 1], im1 = info[k -
#> 1], gm1 = hgm1, r = r)
#> i <- length(hg$h)
#> pik <- sum(hg$h)
#> adelta <- spend - pik
#> dplo <- hg$h[i]/hg$w[i]
#> if (adelta > dplo) {
#> adelta <- 1
#> }
#> else if (adelta < -dplo) {
#> adelta <- -1
#> }
#> else {
#> adelta <- adelta/dplo
#> }
#> a <- a + adelta
#> if (a > extreme_high) {
#> a <- extreme_high
#> }
#> else if (a < extreme_low) {
#> a <- extreme_low
#> }
#> if (abs(adelta) < tol) {
#> return(a)
#> }
#> j <- j + 1
#> if (j > 20) {
#> stop(paste("gs_spending_bound(): bound_update did not converge for lower bound calculation, analysis",
#> k, " !"))
#> }
#> }
#> }
#> else {
#> if (spend <= 0) {
#> return(Inf)
#> }
#> if (length(theta) == 1)
#> theta <- rep(theta, length(info))
#> b <- qnorm(spend, lower.tail = FALSE)
#> if (k == 1) {
#> return(b)
#> }
#> mu <- theta[k] * sqrt(info[k])
#> extreme_low <- mu - 3 - 4 * log(r)
#> extreme_high <- mu + 3 + 4 * log(r)
#> bdelta <- 1
#> j <- 1
#> while (abs(bdelta) > tol) {
#> hg <- hupdate(theta = 0, info = info[k], a = b, b = Inf,
#> thetam1 = 0, im1 = info[k - 1], gm1 = hgm1, r = r)
#> pik <- sum(hg$h)
#> bdelta <- spend - pik
#> dpikdb <- hg$h[1]/hg$w[1]
#> if (bdelta > dpikdb) {
#> bdelta <- 1
#> }
#> else if (bdelta < -dpikdb) {
#> bdelta <- -1
#> }
#> else {
#> bdelta <- bdelta/dpikdb
#> }
#> b <- b - bdelta
#> if (b > extreme_high) {
#> b <- extreme_high
#> }
#> else if (b < extreme_low) {
#> b <- extreme_low
#> }
#> if (abs(bdelta) < tol) {
#> return(b)
#> }
#> j <- j + 1
#> if (j > 20) {
#> stop(paste("gs_spending_bound(): bound_update did not converge for lower bound calculation, analysis",
#> k, " !"))
#> }
#> }
#> }
#> }
#> <bytecode: 0x563f666b1ed8>
#> <environment: namespace:gsDesign2>
#>
#> $input$upar
#> $input$upar$sf
#> function (alpha, t, param = NULL)
#> {
#> checkScalar(alpha, "numeric", c(0, Inf), c(FALSE, FALSE))
#> checkVector(t, "numeric", c(0, Inf), c(TRUE, FALSE))
#> if (is.null(param) || param < 0.005 || param > 20)
#> param <- 1
#> checkScalar(param, "numeric", c(0.005, 20), c(TRUE, TRUE))
#> t[t > 1] <- 1
#> if (param == 1) {
#> rho <- 1
#> txt <- "Lan-DeMets O'Brien-Fleming approximation"
#> parname <- "none"
#> }
#> else {
#> rho <- param
#> txt <- "Generalized Lan-DeMets O'Brien-Fleming"
#> parname <- "rho"
#> }
#> z <- -qnorm(alpha/2)
#> x <- list(name = txt, param = param, parname = parname, sf = sfLDOF,
#> spend = 2 * (1 - pnorm(z/t^(rho/2))), bound = NULL, prob = NULL)
#> class(x) <- "spendfn"
#> x
#> }
#> <bytecode: 0x563f63ede910>
#> <environment: namespace:gsDesign>
#>
#> $input$upar$total_spend
#> [1] 0.025
#>
#>
#> $input$test_upper
#> [1] TRUE
#>
#> $input$lower
#> function (k = 1, par = list(sf = gsDesign::sfLDOF, total_spend = 0.025,
#> param = NULL, timing = NULL, max_info = NULL), hgm1 = NULL,
#> theta = 0.1, info = 1:3, efficacy = TRUE, test_bound = TRUE,
#> r = 18, tol = 1e-06)
#> {
#> if (length(test_bound) == 1 && k > 1) {
#> test_bound <- rep(test_bound, k)
#> }
#> if (!is.null(par$timing)) {
#> timing <- par$timing
#> }
#> else {
#> if (is.null(par$max_info)) {
#> timing <- info/max(info)
#> }
#> else {
#> timing <- info/par$max_info
#> }
#> }
#> spend <- par$sf(alpha = par$total_spend, t = timing, param = par$param)$spend
#> old_spend <- 0
#> for (i in 1:k) {
#> if (test_bound[i]) {
#> xx <- spend[i] - old_spend
#> old_spend <- spend[i]
#> spend[i] <- xx
#> }
#> else {
#> spend[i] <- 0
#> }
#> }
#> spend <- spend[k]
#> if (!efficacy) {
#> if (spend <= 0) {
#> return(-Inf)
#> }
#> if (length(theta) == 1)
#> theta <- rep(theta, length(info))
#> a <- qnorm(spend) + sqrt(info[k]) * theta[k]
#> if (k == 1) {
#> return(a)
#> }
#> mu <- theta[k] * sqrt(info[k])
#> extreme_low <- mu - 3 - 4 * log(r)
#> extreme_high <- mu + 3 + 4 * log(r)
#> adelta <- 1
#> j <- 0
#> while (abs(adelta) > tol) {
#> hg <- hupdate(theta = theta[k], info = info[k], a = -Inf,
#> b = a, thetam1 = theta[k - 1], im1 = info[k -
#> 1], gm1 = hgm1, r = r)
#> i <- length(hg$h)
#> pik <- sum(hg$h)
#> adelta <- spend - pik
#> dplo <- hg$h[i]/hg$w[i]
#> if (adelta > dplo) {
#> adelta <- 1
#> }
#> else if (adelta < -dplo) {
#> adelta <- -1
#> }
#> else {
#> adelta <- adelta/dplo
#> }
#> a <- a + adelta
#> if (a > extreme_high) {
#> a <- extreme_high
#> }
#> else if (a < extreme_low) {
#> a <- extreme_low
#> }
#> if (abs(adelta) < tol) {
#> return(a)
#> }
#> j <- j + 1
#> if (j > 20) {
#> stop(paste("gs_spending_bound(): bound_update did not converge for lower bound calculation, analysis",
#> k, " !"))
#> }
#> }
#> }
#> else {
#> if (spend <= 0) {
#> return(Inf)
#> }
#> if (length(theta) == 1)
#> theta <- rep(theta, length(info))
#> b <- qnorm(spend, lower.tail = FALSE)
#> if (k == 1) {
#> return(b)
#> }
#> mu <- theta[k] * sqrt(info[k])
#> extreme_low <- mu - 3 - 4 * log(r)
#> extreme_high <- mu + 3 + 4 * log(r)
#> bdelta <- 1
#> j <- 1
#> while (abs(bdelta) > tol) {
#> hg <- hupdate(theta = 0, info = info[k], a = b, b = Inf,
#> thetam1 = 0, im1 = info[k - 1], gm1 = hgm1, r = r)
#> pik <- sum(hg$h)
#> bdelta <- spend - pik
#> dpikdb <- hg$h[1]/hg$w[1]
#> if (bdelta > dpikdb) {
#> bdelta <- 1
#> }
#> else if (bdelta < -dpikdb) {
#> bdelta <- -1
#> }
#> else {
#> bdelta <- bdelta/dpikdb
#> }
#> b <- b - bdelta
#> if (b > extreme_high) {
#> b <- extreme_high
#> }
#> else if (b < extreme_low) {
#> b <- extreme_low
#> }
#> if (abs(bdelta) < tol) {
#> return(b)
#> }
#> j <- j + 1
#> if (j > 20) {
#> stop(paste("gs_spending_bound(): bound_update did not converge for lower bound calculation, analysis",
#> k, " !"))
#> }
#> }
#> }
#> }
#> <bytecode: 0x563f666b1ed8>
#> <environment: namespace:gsDesign2>
#>
#> $input$lpar
#> $input$lpar$sf
#> function (alpha, t, param = NULL)
#> {
#> checkScalar(alpha, "numeric", c(0, Inf), c(FALSE, FALSE))
#> checkVector(t, "numeric", c(0, Inf), c(TRUE, FALSE))
#> if (is.null(param) || param < 0.005 || param > 20)
#> param <- 1
#> checkScalar(param, "numeric", c(0.005, 20), c(TRUE, TRUE))
#> t[t > 1] <- 1
#> if (param == 1) {
#> rho <- 1
#> txt <- "Lan-DeMets O'Brien-Fleming approximation"
#> parname <- "none"
#> }
#> else {
#> rho <- param
#> txt <- "Generalized Lan-DeMets O'Brien-Fleming"
#> parname <- "rho"
#> }
#> z <- -qnorm(alpha/2)
#> x <- list(name = txt, param = param, parname = parname, sf = sfLDOF,
#> spend = 2 * (1 - pnorm(z/t^(rho/2))), bound = NULL, prob = NULL)
#> class(x) <- "spendfn"
#> x
#> }
#> <bytecode: 0x563f63ede910>
#> <environment: namespace:gsDesign>
#>
#> $input$lpar$total_spend
#> [1] 0.2
#>
#>
#> $input$test_lower
#> [1] TRUE
#>
#> $input$weight
#> function (x, arm0, arm1, rho = 0, gamma = 0, tau = NULL)
#> {
#> n <- arm0$size + arm1$size
#> p1 <- arm1$size/n
#> p0 <- 1 - p1
#> if (!is.null(tau)) {
#> if (tau > 0) {
#> x <- pmin(x, tau)
#> }
#> }
#> esurv <- p0 * npsurvSS::psurv(x, arm0) + p1 * npsurvSS::psurv(x,
#> arm1)
#> (1 - esurv)^rho * esurv^gamma
#> }
#> <bytecode: 0x563f66845720>
#> <environment: namespace:gsDesign2>
#>
#> $input$info_scale
#> [1] "h0_h1_info"
#>
#> $input$approx
#> [1] "asymptotic"
#>
#> $input$r
#> [1] 18
#>
#> $input$tol
#> [1] 1e-06
#>
#>
#> $enroll_rate
#> # A tibble: 1 × 3
#> stratum duration rate
#> <chr> <dbl> <dbl>
#> 1 All 12 41.7
#>
#> $fail_rate
#> # A tibble: 2 × 5
#> stratum duration fail_rate dropout_rate hr
#> <chr> <dbl> <dbl> <dbl> <dbl>
#> 1 All 4 0.0462 0.001 1
#> 2 All 100 0.0462 0.001 0.6
#>
#> $bounds
#> # A tibble: 6 × 7
#> analysis bound probability probability0 z `~hr at bound` `nominal p`
#> <int> <chr> <dbl> <dbl> <dbl> <dbl> <dbl>
#> 1 1 upper 0.0000207 0.00000163 4.65 0.348 0.00000163
#> 2 1 lower 0.00659 0.0269 -1.93 1.55 0.973
#> 3 2 upper 0.663 0.0142 2.19 0.756 0.0142
#> 4 2 lower 0.162 0.947 1.62 0.814 0.0527
#> 5 3 upper 0.811 0.0225 2.04 0.789 0.0209
#> 6 3 lower 0.200 0.980 2.13 0.780 0.0165
#>
#> $analysis
#> analysis time n event ahr theta info info0
#> 1 1 10 416.6667 77.80361 0.8720599 0.1368971 16.20843 16.22923
#> 2 2 24 500.0000 246.28341 0.7164215 0.3334865 61.35217 62.08666
#> 3 3 30 500.0000 293.69568 0.6955693 0.3630247 72.91885 74.25144
#> info_frac info_frac0
#> 1 0.2222803 0.2185712
#> 2 0.8413760 0.8361677
#> 3 1.0000000 1.0000000
#>
#> attr(,"class")
#> [1] "non_binding" "wlr" "gs_design" "list"
# }
# Example 6 ----
# spending bounds and calculate the power for targeted analysis time & number of events
# \donttest{
gs_power_wlr(
enroll_rate = enroll_rate,
fail_rate = fail_rate,
event = target_events,
analysis_time = target_analysisTime,
upper = gs_spending_bound,
upar = list(sf = gsDesign::sfLDOF, total_spend = 0.025),
lower = gs_spending_bound,
lpar = list(sf = gsDesign::sfLDOF, total_spend = 0.2)
)
#> $input
#> $input$enroll_rate
#> # A tibble: 1 × 3
#> stratum duration rate
#> <chr> <dbl> <dbl>
#> 1 All 12 41.7
#>
#> $input$fail_rate
#> # A tibble: 2 × 5
#> stratum duration fail_rate dropout_rate hr
#> <chr> <dbl> <dbl> <dbl> <dbl>
#> 1 All 4 0.0462 0.001 1
#> 2 All 100 0.0462 0.001 0.6
#>
#> $input$event
#> [1] 30 40 50
#>
#> $input$analysis_time
#> [1] 10 24 30
#>
#> $input$binding
#> [1] FALSE
#>
#> $input$ratio
#> [1] 1
#>
#> $input$upper
#> function (k = 1, par = list(sf = gsDesign::sfLDOF, total_spend = 0.025,
#> param = NULL, timing = NULL, max_info = NULL), hgm1 = NULL,
#> theta = 0.1, info = 1:3, efficacy = TRUE, test_bound = TRUE,
#> r = 18, tol = 1e-06)
#> {
#> if (length(test_bound) == 1 && k > 1) {
#> test_bound <- rep(test_bound, k)
#> }
#> if (!is.null(par$timing)) {
#> timing <- par$timing
#> }
#> else {
#> if (is.null(par$max_info)) {
#> timing <- info/max(info)
#> }
#> else {
#> timing <- info/par$max_info
#> }
#> }
#> spend <- par$sf(alpha = par$total_spend, t = timing, param = par$param)$spend
#> old_spend <- 0
#> for (i in 1:k) {
#> if (test_bound[i]) {
#> xx <- spend[i] - old_spend
#> old_spend <- spend[i]
#> spend[i] <- xx
#> }
#> else {
#> spend[i] <- 0
#> }
#> }
#> spend <- spend[k]
#> if (!efficacy) {
#> if (spend <= 0) {
#> return(-Inf)
#> }
#> if (length(theta) == 1)
#> theta <- rep(theta, length(info))
#> a <- qnorm(spend) + sqrt(info[k]) * theta[k]
#> if (k == 1) {
#> return(a)
#> }
#> mu <- theta[k] * sqrt(info[k])
#> extreme_low <- mu - 3 - 4 * log(r)
#> extreme_high <- mu + 3 + 4 * log(r)
#> adelta <- 1
#> j <- 0
#> while (abs(adelta) > tol) {
#> hg <- hupdate(theta = theta[k], info = info[k], a = -Inf,
#> b = a, thetam1 = theta[k - 1], im1 = info[k -
#> 1], gm1 = hgm1, r = r)
#> i <- length(hg$h)
#> pik <- sum(hg$h)
#> adelta <- spend - pik
#> dplo <- hg$h[i]/hg$w[i]
#> if (adelta > dplo) {
#> adelta <- 1
#> }
#> else if (adelta < -dplo) {
#> adelta <- -1
#> }
#> else {
#> adelta <- adelta/dplo
#> }
#> a <- a + adelta
#> if (a > extreme_high) {
#> a <- extreme_high
#> }
#> else if (a < extreme_low) {
#> a <- extreme_low
#> }
#> if (abs(adelta) < tol) {
#> return(a)
#> }
#> j <- j + 1
#> if (j > 20) {
#> stop(paste("gs_spending_bound(): bound_update did not converge for lower bound calculation, analysis",
#> k, " !"))
#> }
#> }
#> }
#> else {
#> if (spend <= 0) {
#> return(Inf)
#> }
#> if (length(theta) == 1)
#> theta <- rep(theta, length(info))
#> b <- qnorm(spend, lower.tail = FALSE)
#> if (k == 1) {
#> return(b)
#> }
#> mu <- theta[k] * sqrt(info[k])
#> extreme_low <- mu - 3 - 4 * log(r)
#> extreme_high <- mu + 3 + 4 * log(r)
#> bdelta <- 1
#> j <- 1
#> while (abs(bdelta) > tol) {
#> hg <- hupdate(theta = 0, info = info[k], a = b, b = Inf,
#> thetam1 = 0, im1 = info[k - 1], gm1 = hgm1, r = r)
#> pik <- sum(hg$h)
#> bdelta <- spend - pik
#> dpikdb <- hg$h[1]/hg$w[1]
#> if (bdelta > dpikdb) {
#> bdelta <- 1
#> }
#> else if (bdelta < -dpikdb) {
#> bdelta <- -1
#> }
#> else {
#> bdelta <- bdelta/dpikdb
#> }
#> b <- b - bdelta
#> if (b > extreme_high) {
#> b <- extreme_high
#> }
#> else if (b < extreme_low) {
#> b <- extreme_low
#> }
#> if (abs(bdelta) < tol) {
#> return(b)
#> }
#> j <- j + 1
#> if (j > 20) {
#> stop(paste("gs_spending_bound(): bound_update did not converge for lower bound calculation, analysis",
#> k, " !"))
#> }
#> }
#> }
#> }
#> <bytecode: 0x563f666b1ed8>
#> <environment: namespace:gsDesign2>
#>
#> $input$upar
#> $input$upar$sf
#> function (alpha, t, param = NULL)
#> {
#> checkScalar(alpha, "numeric", c(0, Inf), c(FALSE, FALSE))
#> checkVector(t, "numeric", c(0, Inf), c(TRUE, FALSE))
#> if (is.null(param) || param < 0.005 || param > 20)
#> param <- 1
#> checkScalar(param, "numeric", c(0.005, 20), c(TRUE, TRUE))
#> t[t > 1] <- 1
#> if (param == 1) {
#> rho <- 1
#> txt <- "Lan-DeMets O'Brien-Fleming approximation"
#> parname <- "none"
#> }
#> else {
#> rho <- param
#> txt <- "Generalized Lan-DeMets O'Brien-Fleming"
#> parname <- "rho"
#> }
#> z <- -qnorm(alpha/2)
#> x <- list(name = txt, param = param, parname = parname, sf = sfLDOF,
#> spend = 2 * (1 - pnorm(z/t^(rho/2))), bound = NULL, prob = NULL)
#> class(x) <- "spendfn"
#> x
#> }
#> <bytecode: 0x563f63ede910>
#> <environment: namespace:gsDesign>
#>
#> $input$upar$total_spend
#> [1] 0.025
#>
#>
#> $input$test_upper
#> [1] TRUE
#>
#> $input$lower
#> function (k = 1, par = list(sf = gsDesign::sfLDOF, total_spend = 0.025,
#> param = NULL, timing = NULL, max_info = NULL), hgm1 = NULL,
#> theta = 0.1, info = 1:3, efficacy = TRUE, test_bound = TRUE,
#> r = 18, tol = 1e-06)
#> {
#> if (length(test_bound) == 1 && k > 1) {
#> test_bound <- rep(test_bound, k)
#> }
#> if (!is.null(par$timing)) {
#> timing <- par$timing
#> }
#> else {
#> if (is.null(par$max_info)) {
#> timing <- info/max(info)
#> }
#> else {
#> timing <- info/par$max_info
#> }
#> }
#> spend <- par$sf(alpha = par$total_spend, t = timing, param = par$param)$spend
#> old_spend <- 0
#> for (i in 1:k) {
#> if (test_bound[i]) {
#> xx <- spend[i] - old_spend
#> old_spend <- spend[i]
#> spend[i] <- xx
#> }
#> else {
#> spend[i] <- 0
#> }
#> }
#> spend <- spend[k]
#> if (!efficacy) {
#> if (spend <= 0) {
#> return(-Inf)
#> }
#> if (length(theta) == 1)
#> theta <- rep(theta, length(info))
#> a <- qnorm(spend) + sqrt(info[k]) * theta[k]
#> if (k == 1) {
#> return(a)
#> }
#> mu <- theta[k] * sqrt(info[k])
#> extreme_low <- mu - 3 - 4 * log(r)
#> extreme_high <- mu + 3 + 4 * log(r)
#> adelta <- 1
#> j <- 0
#> while (abs(adelta) > tol) {
#> hg <- hupdate(theta = theta[k], info = info[k], a = -Inf,
#> b = a, thetam1 = theta[k - 1], im1 = info[k -
#> 1], gm1 = hgm1, r = r)
#> i <- length(hg$h)
#> pik <- sum(hg$h)
#> adelta <- spend - pik
#> dplo <- hg$h[i]/hg$w[i]
#> if (adelta > dplo) {
#> adelta <- 1
#> }
#> else if (adelta < -dplo) {
#> adelta <- -1
#> }
#> else {
#> adelta <- adelta/dplo
#> }
#> a <- a + adelta
#> if (a > extreme_high) {
#> a <- extreme_high
#> }
#> else if (a < extreme_low) {
#> a <- extreme_low
#> }
#> if (abs(adelta) < tol) {
#> return(a)
#> }
#> j <- j + 1
#> if (j > 20) {
#> stop(paste("gs_spending_bound(): bound_update did not converge for lower bound calculation, analysis",
#> k, " !"))
#> }
#> }
#> }
#> else {
#> if (spend <= 0) {
#> return(Inf)
#> }
#> if (length(theta) == 1)
#> theta <- rep(theta, length(info))
#> b <- qnorm(spend, lower.tail = FALSE)
#> if (k == 1) {
#> return(b)
#> }
#> mu <- theta[k] * sqrt(info[k])
#> extreme_low <- mu - 3 - 4 * log(r)
#> extreme_high <- mu + 3 + 4 * log(r)
#> bdelta <- 1
#> j <- 1
#> while (abs(bdelta) > tol) {
#> hg <- hupdate(theta = 0, info = info[k], a = b, b = Inf,
#> thetam1 = 0, im1 = info[k - 1], gm1 = hgm1, r = r)
#> pik <- sum(hg$h)
#> bdelta <- spend - pik
#> dpikdb <- hg$h[1]/hg$w[1]
#> if (bdelta > dpikdb) {
#> bdelta <- 1
#> }
#> else if (bdelta < -dpikdb) {
#> bdelta <- -1
#> }
#> else {
#> bdelta <- bdelta/dpikdb
#> }
#> b <- b - bdelta
#> if (b > extreme_high) {
#> b <- extreme_high
#> }
#> else if (b < extreme_low) {
#> b <- extreme_low
#> }
#> if (abs(bdelta) < tol) {
#> return(b)
#> }
#> j <- j + 1
#> if (j > 20) {
#> stop(paste("gs_spending_bound(): bound_update did not converge for lower bound calculation, analysis",
#> k, " !"))
#> }
#> }
#> }
#> }
#> <bytecode: 0x563f666b1ed8>
#> <environment: namespace:gsDesign2>
#>
#> $input$lpar
#> $input$lpar$sf
#> function (alpha, t, param = NULL)
#> {
#> checkScalar(alpha, "numeric", c(0, Inf), c(FALSE, FALSE))
#> checkVector(t, "numeric", c(0, Inf), c(TRUE, FALSE))
#> if (is.null(param) || param < 0.005 || param > 20)
#> param <- 1
#> checkScalar(param, "numeric", c(0.005, 20), c(TRUE, TRUE))
#> t[t > 1] <- 1
#> if (param == 1) {
#> rho <- 1
#> txt <- "Lan-DeMets O'Brien-Fleming approximation"
#> parname <- "none"
#> }
#> else {
#> rho <- param
#> txt <- "Generalized Lan-DeMets O'Brien-Fleming"
#> parname <- "rho"
#> }
#> z <- -qnorm(alpha/2)
#> x <- list(name = txt, param = param, parname = parname, sf = sfLDOF,
#> spend = 2 * (1 - pnorm(z/t^(rho/2))), bound = NULL, prob = NULL)
#> class(x) <- "spendfn"
#> x
#> }
#> <bytecode: 0x563f63ede910>
#> <environment: namespace:gsDesign>
#>
#> $input$lpar$total_spend
#> [1] 0.2
#>
#>
#> $input$test_lower
#> [1] TRUE
#>
#> $input$weight
#> function (x, arm0, arm1, rho = 0, gamma = 0, tau = NULL)
#> {
#> n <- arm0$size + arm1$size
#> p1 <- arm1$size/n
#> p0 <- 1 - p1
#> if (!is.null(tau)) {
#> if (tau > 0) {
#> x <- pmin(x, tau)
#> }
#> }
#> esurv <- p0 * npsurvSS::psurv(x, arm0) + p1 * npsurvSS::psurv(x,
#> arm1)
#> (1 - esurv)^rho * esurv^gamma
#> }
#> <bytecode: 0x563f66845720>
#> <environment: namespace:gsDesign2>
#>
#> $input$info_scale
#> [1] "h0_h1_info"
#>
#> $input$approx
#> [1] "asymptotic"
#>
#> $input$r
#> [1] 18
#>
#> $input$tol
#> [1] 1e-06
#>
#>
#> $enroll_rate
#> # A tibble: 1 × 3
#> stratum duration rate
#> <chr> <dbl> <dbl>
#> 1 All 12 41.7
#>
#> $fail_rate
#> # A tibble: 2 × 5
#> stratum duration fail_rate dropout_rate hr
#> <chr> <dbl> <dbl> <dbl> <dbl>
#> 1 All 4 0.0462 0.001 1
#> 2 All 100 0.0462 0.001 0.6
#>
#> $bounds
#> # A tibble: 6 × 7
#> analysis bound probability probability0 z `~hr at bound` `nominal p`
#> <int> <chr> <dbl> <dbl> <dbl> <dbl> <dbl>
#> 1 1 upper 0.0000207 0.00000163 4.65 0.348 0.00000163
#> 2 1 lower 0.00659 0.0269 -1.93 1.55 0.973
#> 3 2 upper 0.663 0.0142 2.19 0.756 0.0142
#> 4 2 lower 0.162 0.947 1.62 0.814 0.0527
#> 5 3 upper 0.811 0.0225 2.04 0.789 0.0209
#> 6 3 lower 0.200 0.980 2.13 0.780 0.0165
#>
#> $analysis
#> analysis time n event ahr theta info info0
#> 1 1 10 416.6667 77.80361 0.8720599 0.1368971 16.20843 16.22923
#> 2 2 24 500.0000 246.28341 0.7164215 0.3334865 61.35217 62.08666
#> 3 3 30 500.0000 293.69568 0.6955693 0.3630247 72.91885 74.25144
#> info_frac info_frac0
#> 1 0.2222803 0.2185712
#> 2 0.8413760 0.8361677
#> 3 1.0000000 1.0000000
#>
#> attr(,"class")
#> [1] "non_binding" "wlr" "gs_design" "list"
# }