############################################################################################
# R code to implement the statistical methods described in
# Sensitivity Analysis of Per-Protocol Time-to-Event Treatment Efficacy in Randomized Trials
# by Peter B. Gilbert, Bryan E. Shepherd, and Michael G. Hudgens.
#
# This code uses a "vaccine efficacy" contrast function for the causal estimands,
# h(x,y) = 1 - (1-x)/(1-y) [one minus the relative cumulative failure rate], such that
#
# SCE^{APP}(t) = h(S^{APP}_1(t),S^{APP}_0(t)) = 1 - [1-S^{APP}_1(t)] / [1-S^{APP}_0(t)]
# SCE^{ASA1}(t) = h(S^{ASA1}_1(t),S^{ASA1}_0(t)) = 1 - [1-S^{ASA1}_1(t)]/ [1-S^{ASA1}_0(t)]
# SCE^{PP1}(t) = h(S^{PP1}_1(t),S^{PP1}_0(t)) = 1 - [1-S^{PP1}_1(t)] / [1-S^{PP1}_0(t)]
#
# Code and documentation finalized in April of 2012
#
# The code uses the sensitivityPStrat package (author Bryan Shepherd) in R available on CRAN.
# This file is posted at Bryan Shepherd's website
# http://biostat.mc.vanderbilt.edu/CodeSensitivityPerProtocol
############################################################################################
############################################################################################
# Read in the data-set for analysis. To illustrate the code, a data-set is used that is
# of the same structure as the actual RV144 Thai trial data used in the article. Due to
# confidentiality agreements, the real data-set could not be made available.
# The settings for this illustrative analysis exactly match those in the article.
#
# This code outputs ignorance intervals and 95% estimated uncertainty intervals,
# for the nonparametric bounds and semiparametric sensitivity analysis approaches.
# This output is produced for each of the three causal estimands under each of the
# four assumption sets A, B, C, D. To obtain estimated uncertainty intervals at
# a different confidence level under the semiparametric approach, the input "ci=0.95"
# in the function sensitivitySGD.R from the sensitivityPStrat package should be changed.
# This adjustment can be used to obtain p-values of the hypothesis testing procedures
# based on the estimated uncertainty intervals.
rm(list=ls())
data<-read.table("MockRV144data_Jan2011.dat",sep=" ")
#data<-read.table("RV144data.Feb.2011.dat",sep=" ")
# Structure of the data-set (using the notation in the article; all subjects in the
# intention-to-treat analysis should be included):
# column 1 = subject identifier number
# column 2 = treatment assignment Z (1 = active; 0 = control/placebo)
# column 3 = per-protocol indicator PP (1 = observed to be per-protocol; 0 = otherwise)
# column 4 = failure indicator delta (1 = observed to experience the failure event; 0 = otherwise)
# column 5 = X, the minimum of the failure time T and the right-censoring time C, with time origin randomization
id1<-data$V1 ## Subject identifier
z1<-data$V2 ## Treatment assignment
pp1<-data$V3 ## PP indicator
d1<-data$V4 ## Failure indicator
y1<-data$V5/365 ## Time from randomization until failure or right-censoring (may be date of last contact)
## (converted to years)
# A few failure times y1 have missing values; eliminated these:
id<-id1[!is.na(y1)]
z<-z1[!is.na(y1)]
pp<-pp1[!is.na(y1)]
d<-d1[!is.na(y1)]
y<-y1[!is.na(y1)]
#########################################################################################
# Input values specified by the analyst:
#
# tau0: The fixed time-point after randomization
# that indicates when full adherence status has been measured.
#
# For the RV144 trial tau0 is defined as the right edge of the Month 24 visit window,
# 24 weeks + 3 weeks, which equals 189 days.
tau0<- 189.0/365.25
# t.bar: The increment in the failure time for defining the selection odds ratio
# sensitivity parameters, which is used for the plausible-range sensitivity analyses.
#
# For the RV144 example we use year as the time-scale, and set t.bar to be a 1-year increment.
t.bar <- 1
# B: The selection odds ratio parameters are specified to vary between 1/B and B, for B a
# fixed user-specified constant > 1.
#
# For the RV144 example we use 1.5 (selection odds ratio ranging from 0.667 to 1.50)
B <- 1.5
# conflevel: The confidence level of outputed confidence intervals and estimated uncertainty
# intervals (EUIs). We use 95% confidence.
conflevel <- 0.95
# time.point: The fixed time-point t after randomization at which the SCE^#(t;\gamma_0$ estimands
# are evaluated. For the RV144 example we evaluate SCE^#(t;\gamma_0) at 39 months.
time.point<-168*7/365.25
# Nboot: The number of bootstrap iterations used to construct confidence intervals and estimated
# uncertainty intervals (EUIs). At least 1000 iterations is recommended.
Nboot<-1000
#############################################################
# Load the needed packages
library(survival)
library(sensitivityPStrat)
#############################################################
# Carry out the analysis
gam <- 1-conflevel
pp<-ifelse(y<=tau0&pp==1,0,pp)
betas<- seq(-log(B),log(B), by=.1) ## Create a grid of sensitivity parameters that spans OR=1/B to OR=B
p.pp0<-mean(pp[z==0])
p.pp1<-mean(pp[z==1])
survfit0<-summary(survfit(Surv(y[z==0&pp==1],d[z==0&pp==1])~1))
survfit1<-summary(survfit(Surv(y[z==1&pp==1],d[z==1&pp==1])~1))
Fpp1.time.point<-1-min(survfit1$surv[survfit1$time<=time.point])
Fpp0.time.point<-1-min(survfit0$surv[survfit0$time<=time.point])
S0.tau0<-min(summary(survfit(Surv(y[z==0],d[z==0])~1))$surv[summary(survfit(Surv(y[z==0],d[z==0])~1))$time<=tau0])
S1.tau0<-min(summary(survfit(Surv(y[z==1],d[z==1])~1))$surv[summary(survfit(Surv(y[z==1],d[z==1])~1))$time<=tau0])
###################################################################
#### Plausible Range Semiparametric Analyses
b.low<- -log(B)/t.bar # Selection odds ratios from 1/B to B
b.plaus<-c(b.low,-b.low)
min.pi.plaus<-c(p.pp1*p.pp0,S0.tau0+p.pp1-1,S0.tau0+p.pp1-S1.tau0)
### To determine the max and min plausible, we only need to put in the min plausible pi, as done here.
################################################################
#### APP Estimands
ans<-sensitivitySGD(z=z,s=pp,d=d,y=y,beta0=b.plaus,beta1=b.plaus,Pi=min.pi.plaus,tau=4,time.points=time.point,
selection=1,trigger=1,groupings=c(0,1),ci.method="bootstrap",na.rm=TRUE,N.boot=Nboot,
custom.FUN=function(Fas0,Fas1,time.points,...) {1-Fas1(time.points)/Fas0(time.points)})
plaus.range.APP.A.est<-c(ans$result[2,1,1,1],ans$result[1,2,1,1])
plaus.range.APP.B.est<-c(ans$result[2,1,2,1],ans$result[1,2,2,1])
plaus.range.APP.C.est<-c(ans$result[2,1,3,1],ans$result[1,2,3,1])
plaus.range.APP.A.cb<-c(ans$result.ci[2,1,1,1,1,1],ans$result.ci[1,2,1,1,2,1])
plaus.range.APP.B.cb<-c(ans$result.ci[2,1,2,1,1,1],ans$result.ci[1,2,2,1,2,1])
plaus.range.APP.C.cb<-c(ans$result.ci[2,1,3,1,1,1],ans$result.ci[1,2,3,1,2,1])
##### APP under Assumption Set D
### Equal adherence is not plausible
p.pp0
p.pp1
chisq.test(table(pp,z))
### Thus, we force the estimates of p.pp0 and p.pp1 to be the same. This is the constrained MLE.
APP.D.function<-function(z,pp,d,y,b.plaus,tau,time.point,p.pp0,p.pp1,Fpp0.time.point,Fpp1.time.point) {
if (p.pp0
=p.pp1) {
ve.est<-rep(1-Fpp1.time.point/Fpp0.time.point,2)
}
return(ve.est)
}
plaus.range.APP.D.est<-APP.D.function(z,pp,d,y,b.plaus,tau,time.point,p.pp0,p.pp1,Fpp0.time.point,Fpp1.time.point)
set.seed(300)
ve.ests.boot.APP.D<-matrix(NA,Nboot,2)
for (ii in 1:Nboot) {
print(ii)
boot<-sample(1:length(y),length(y),replace=TRUE)
z.boot<-z[boot]
pp.boot<-pp[boot]
d.boot<-d[boot]
y.boot<-y[boot]
p.pp0.boot<-mean(pp.boot[z.boot==0])
p.pp1.boot<-mean(pp.boot[z.boot==1])
survfit0b<-summary(survfit(Surv(y.boot[z.boot==0&pp.boot==1],d.boot[z.boot==0&pp.boot==1])~1))
survfit1b<-summary(survfit(Surv(y.boot[z.boot==1&pp.boot==1],d.boot[z.boot==1&pp.boot==1])~1))
Fpp1b.time.point<-1-min(survfit1b$surv[survfit1b$time<=time.point])
Fpp0b.time.point<-1-min(survfit0b$surv[survfit0b$time<=time.point])
ve.ests.boot.APP.D[ii,]<-APP.D.function(z.boot,pp.boot,d.boot,y.boot,b.plaus,tau,time.point,p.pp0.boot,p.pp1.boot,Fpp0b.time.point,Fpp1b.time.point)
}
plaus.range.APP.D.cb<-c(quantile(ve.ests.boot.APP.D[,1],gam),quantile(ve.ests.boot.APP.D[,2],1-gam))
#### Non-parametric bounds
ve.bounds.APP.A<-function(p.pp0,p.pp1,Fpp0.time.point,Fpp1.time.point) {
min.pi<-max(c(0,p.pp0+p.pp1-1))
if (min.pi==0) {
# print("Uninformative bounds: VE bounded by -Inf and 1")
ve.lower<- -Inf
ve.upper<- 1
}
if (min.pi>0) {
F0max<-min(c(Fpp0.time.point/(min.pi/p.pp0),1))
F1max<-min(c(Fpp1.time.point/(min.pi/p.pp1),1))
F0min<-max(c(Fpp0.time.point/(min.pi/p.pp0)+1- p.pp0/min.pi,0))
F1min<-max(c(Fpp1.time.point/(min.pi/p.pp1)+1- p.pp1/min.pi,0))
ve.lower<-1-F1max/F0min
ve.upper<-1-F1min/F0max
}
return(c(ve.lower,ve.upper))
}
ve.bounds.APP.B<-function(p.pp0,p.pp1,Fpp0.time.point,Fpp1.time.point,S0.tau0) {
min.pi<-max(c(0,S0.tau0+p.pp1-1))
if (min.pi==0) {
# print("Uninformative bounds: VE bounded by -Inf and 1")
ve.lower<- -Inf
ve.upper<- 1
}
if (min.pi>0) {
F0max<-min(c(Fpp0.time.point/(min.pi/p.pp0),1))
F1max<-min(c(Fpp1.time.point/(min.pi/p.pp1),1))
F0min<-max(c(Fpp0.time.point/(min.pi/p.pp0)+1- p.pp0/min.pi,0))
F1min<-max(c(Fpp1.time.point/(min.pi/p.pp1)+1- p.pp1/min.pi,0))
ve.lower<-1-F1max/F0min
ve.upper<-1-F1min/F0max
}
return(c(ve.lower,ve.upper))
}
ve.bounds.APP.C<-function(p.pp0,p.pp1,Fpp0.time.point,Fpp1.time.point,S0.tau0,S1.tau0) {
min.pi<-max(c(0,S0.tau0+p.pp1-S1.tau0))
if (min.pi==0) {
# print("Uninformative bounds: VE bounded by -Inf and 1")
ve.lower<- -Inf
ve.upper<- 1
}
if (min.pi>0) {
F0max<-min(c(Fpp0.time.point/(min.pi/p.pp0),1))
F1max<-min(c(Fpp1.time.point/(min.pi/p.pp1),1))
F0min<-max(c(Fpp0.time.point/(min.pi/p.pp0)+1- p.pp0/min.pi,0))
F1min<-max(c(Fpp1.time.point/(min.pi/p.pp1)+1- p.pp1/min.pi,0))
ve.lower<-1-F1max/F0min
ve.upper<-1-F1min/F0max
}
return(c(ve.lower,ve.upper))
}
ve.bounds.APP.D<-function(p.pp0,p.pp1,Fpp0.time.point,Fpp1.time.point) {
min.pi<-p.pp0
if (min.pi==0) {
# print("Uninformative bounds: VE bounded by -Inf and 1")
ve.lower<- -Inf
ve.upper<- 1
}
if (min.pi>0 & min.pi>=p.pp1) {
ve.lower<-1-Fpp1.time.point/Fpp0.time.point
ve.upper<-ve.lower
}
if (min.pi>0 & min.pi0,1,ifelse(ve.upper[i,j,k]<0,-1,0))
}
}
}
# Figure 2 in the article:
postscript("ve-app-a.eps",height=4,width=6)#,horizontal=FALSE)
opar <- par(mfrow=c(2,3), oma=c(4,3,3,3),mar=c(3.25,3.25,1.5,2.5))
#for (i in c(8,1,2,4,5,6,7)) {
for (i in c(1,4,5,6,7)) {
#for(i in seq(length=dim(reject.ve)[3])) {
image(betas,betas, reject.ve[,,i], breaks=c(-1.5,-0.5,0.5,1.5),
col=c(gray(.8),gray(1),gray(.6)),xlab="",ylab="", axes=FALSE)
if (i==1 | i==4) {
contour(betas,betas,ve[,,i],add=T,lty=3,levels=c((0:20)/10-1),labcex=0.7, axes=FALSE) }
if (i==5) {
contour(betas,betas,ve[,,i],add=T,lty=3,levels=c(.15,.2,.25),labcex=0.7, axes=FALSE) }
if (i==6 | i==7) {
contour(betas,betas,ve[,,i],add=T,lty=3,levels=c(.18,.2,.22),labcex=0.7, axes=FALSE) }
# mtext(side=1, line=2.4, expression(OR[0]==exp(beta[0])),cex=.8)
# mtext(side=2, line=2.4, expression(OR[1]==exp(beta[1])),cex=.8)
mtext("OR1",side=2, line=2.4)
if (i==6 | i==7) { mtext("OR0",side=1, line=3) }
mtext(side=3, line=0.5, bquote(phi[APP]==.(format(phis[i], digits=2))),cex=1.2)
axis(side=1,at=log(seq(1/B,B,len=4)),label=round(seq(1/B,B,len=4),2),cex.axis=1)
axis(side=2,at=log(seq(1/B,B,len=4)),label=round(seq(1/B,B,len=4),2),cex.axis=1)
box()
}
plot(rep(betas,2),c(ve.lower[,1,length(pis)],ve.upper[,1,length(pis)]),type="n",axes=FALSE,xlab="",ylab="VEAPP(39)",cex.lab=1.0)
lines(betas,ve[,1,length(pis)])
lines(betas,ve.lower[,1,length(pis)],lwd=.5,col=gray(.5))
lines(betas,ve.upper[,1,length(pis)],lwd=.5,col=gray(.5))
abline(h=0,lty=3,lwd=.5)
#mtext(side=1, line=2.4, expression(OR[0]==exp(beta[0])),cex=.8)
mtext("OR0",side=1, line=3)
axis(side=1,at=log(seq(1/B,B,len=4)),label=round(seq(1/B,B,len=4),2),cex.axis=1)
axis(side=2,at=c(-0.2,0,0.2,0.4),labels=c("-.2","0",".2",".4"),cex.axis=1.0)
mtext(side=3, line=0.5, bquote(phi[APP]==1),cex=1.2)
box()
par(opar)
dev.off()
postscript("ve-app-a-pres.eps",height=5,width=10,horizontal=FALSE)
opar <- par(mfrow=c(2,3), mar=c(4.25,4.25,1.25,0.5))
for (i in c(8,1,4,5,6,7)) {
#for(i in seq(length=dim(reject.ve)[3])) {
image(betas,betas, reject.ve[,,i], breaks=c(-1.5,-0.5,0.5,1.5),
col=c(gray(.8),gray(1),gray(.6)),xlab="",ylab="", axes=FALSE)
contour(betas,betas,ve[,,i],add=T,lty=3,levels=c((0:20)/10-1),labcex=0.8, axes=FALSE)
mtext(side=1, line=2.4, expression(OR[0]==exp(beta[0])),cex=.8)
mtext(side=2, line=2.4, expression(OR[1]==exp(beta[1])),cex=.8)
mtext(side=3, line=0, bquote(phi==.(format(phis[i], digits=2))),cex=.8)
axis(side=1,at=log(seq(1/B,B,len=4)),label=round(seq(1/B,B,len=4),2),cex.axis=1)
axis(side=2,at=log(seq(1/B,B,len=4)),label=round(seq(1/B,B,len=4),2),cex.axis=1)
box()
}
par(opar)
dev.off()
postscript("ve-app-phi-1.eps",width=7,height=6)
par(mar=c(4,4,1,1),cex.axis=1.3,cex.lab=1.3,cex.main=1.3)
#plot(rep(betas,2),c(ve.lower[,1,length(pis)],ve.upper[,1,length(pis)]),type="n",axes=FALSE,xlab="",ylab="VE(t=168 wks)")
plot(rep(betas,2),c(ve.lower[,1,length(pis)],ve.upper[,1,length(pis)]),type="n",axes=FALSE,xlab="",ylab="VEAPP(39)")
lines(betas,ve[,1,length(pis)])
lines(betas,ve.lower[,1,length(pis)],lwd=.5,col=gray(.5))
lines(betas,ve.upper[,1,length(pis)],lwd=.5,col=gray(.5))
abline(h=0,lty=3,lwd=.5)
mtext(side=1, line=2.4, expression(OR[0]==exp(beta[0])),cex=1.2)
axis(side=1,at=log(seq(1/B,B,len=4)),label=round(seq(1/B,B,len=4),2),cex.axis=1)
axis(2)
mtext(side=3, line=0, bquote(phi==1),cex=1.2)
box()
dev.off()
postscript("ve-app-abc.eps",height=3.5,width=10)
opar <- par(mfrow=c(1,3), mar=c(4.25,4.25,2.5,0.5))
for (i in c(8,9,10)) {
#for(i in seq(length=dim(reject.ve)[3])) {
image(betas,betas, reject.ve[,,i], breaks=c(-1.5,-0.5,0.5,1.5),
col=c(gray(.8),gray(1),gray(.6)),xlab="",ylab="", axes=FALSE)
contour(betas,betas,ve[,,i],add=T,lty=3,levels=c((0:20)/10-1),labcex=0.8, axes=FALSE)
mtext(side=1, line=2.4, expression(OR[0]==exp(beta[0])),cex=.8)
mtext(side=2, line=2.4, expression(OR[1]==exp(beta[1])),cex=.8)
mtext(side=3, line=0, bquote(phi==.(format(phis[i], digits=4))),cex=1)
axis(side=1,at=log(seq(1/B,B,len=4)),label=round(seq(1/B,B,len=4),2),cex.axis=1)
axis(side=2,at=log(seq(1/B,B,len=4)),label=round(seq(1/B,B,len=4),2),cex.axis=1)
box()
}
par(opar)
dev.off()
#############################################
######## ASA1 Estimands
### Assumption Set A:
t0<-summary(survfit(Surv(y[z==0&y>tau0],d[z==0&y>tau0])~1))$time
F0.tau0<-1-summary(survfit(Surv(y[z==0&y>tau0],d[z==0&y>tau0])~1))$surv
t1<-summary(survfit(Surv(y[z==1&pp==1],d[z==1&pp==1])~1))$time
F1.pp<-1-summary(survfit(Surv(y[z==1&pp==1],d[z==1&pp==1])~1))$surv
F0.tau0.time.point<-max(F0.tau0[t0<=time.point])
##### Nonparametric bounds
# bounds for pi_ASA1 are max(0,S0.tau0+p.pp1-1) and min(p.pp1,S0.tau0)
ve.bounds.ASA1.A<-function(S0.tau0,p.pp1,F0.tau0.time.point,Fpp1.time.point) {
min.pi<-max(c(0,S0.tau0+p.pp1-1))
if (min.pi==0) {
# print("Uninformative bounds: VE bounded by -Inf and 1")
ve.lower<- -Inf
ve.upper<- 1
}
if (min.pi>0) {
F0max<-min(c(F0.tau0.time.point/(min.pi/S0.tau0),1))
F1max<-min(c(Fpp1.time.point/(min.pi/p.pp1),1))
F0min<-max(c(F0.tau0.time.point/(min.pi/S0.tau0)+1- S0.tau0/min.pi,0))
F1min<-max(c(Fpp1.time.point/(min.pi/p.pp1)+1- p.pp1/min.pi,0))
ve.lower<-1-F1max/F0min
ve.upper<-1-F1min/F0max
}
return(c(ve.lower,ve.upper))
}
ve.bounds.asa1.a<-ve.bounds.ASA1.A(S0.tau0,p.pp1,F0.tau0.time.point,Fpp1.time.point)
#################### Sensitivity Analysis
# The package sensitivityPStrat cannot be used for the placebo arm, so a new function is used.
.calc.w <- function(alpha, beta.y)
1L/(1L + exp(-alpha - beta.y))
.alpha.est <- function(alpha, beta.y, dF, C)
sum(log(1L + exp(alpha + beta.y)) * dF) - alpha*C
.calc.alphahat <- function(beta.y, dF, C, interval) {
alphahat <- optimize(f=.alpha.est, interval=interval, beta.y=beta.y,
dF=dF, C=C)$minimum
if(alphahat > max(interval) - 10 || alphahat < min(interval) + 10) {
warning("optimize overflow alphahat value invalid, alphahat = ", alphahat)
}
alphahat
}
estimate.Fas<-function(beta,t,tau,Pi,p,F,time) {
t<-c(t,tau)
beta.y<-ifelse(t>tau,beta*tau,beta*t)
dF<-diff(c(0,F,1))
alphahat<-.calc.alphahat(beta.y, dF, C=Pi/p, interval=c(-100,100))
w<-.calc.w(alphahat, beta.y)
Fas<-sum((w*dF)[t<=time]) * p/Pi
return(Fas)
}
pis<-c(S0.tau0+p.pp1-1,p.pp1) ## lower and upper bounds -- all are close.
phis<-pis/p.pp1
psis<-log(pis*(1-S0.tau0-p.pp1+pis)/((S0.tau0-pis)*(p.pp1-pis)))
psis[is.na(psis)] <- -Inf
F0.asa1<-F1.asa1<-matrix(NA,nrow=length(betas),ncol=length(pis))
for (i in 1:length(betas)) {
for (j in 1:length(pis)) {
F0.asa1[i,j]<-estimate.Fas(beta=betas[i],t=t0,tau=4,Pi=pis[j],p=S0.tau0,F=F0.tau0,time=time.point)
F1.asa1[i,j]<-estimate.Fas(beta=betas[i],t=t1,tau=4,Pi=pis[j],p=p.pp1,F=F1.pp,time=time.point)
}
}
ve.asa1.a<-array(NA,dim=c(length(betas),length(betas),length(pis)))
for (i in 1:length(betas)){
for (j in 1:length(betas)) {
for (k in 1:length(pis)) {
ve.asa1.a[i,j,k]<-1-F1.asa1[j,k]/F0.asa1[i,k]
}
}
}
timer1<-date()
set.seed(300)
ve.bounds.asa1.a.boot<-matrix(NA,Nboot,2)
ve.asa1.a.boot<-array(NA,dim=c(Nboot,length(betas),length(betas),length(pis)))
ve.asa1.a.plaus.boot<-array(NA,dim=c(Nboot,length(b.plaus),length(b.plaus)))
for (ii in 1:Nboot) {
print(ii)
boot<-sample(1:length(y),length(y),replace=TRUE)
z.boot<-z[boot]
pp.boot<-pp[boot]
d.boot<-d[boot]
y.boot<-y[boot]
S0.tau0.boot<-min(summary(survfit(Surv(y.boot[z.boot==0],d[z.boot==0])~1))$surv[summary(survfit(Surv(y.boot[z.boot==0],d.boot[z.boot==0])~1))$time<=tau0])
t0.boot<-summary(survfit(Surv(y.boot[z.boot==0&y.boot>tau0],d.boot[z.boot==0&y.boot>tau0])~1))$time
F0.tau0.boot<-1-summary(survfit(Surv(y.boot[z.boot==0&y.boot>tau0],d.boot[z.boot==0&y.boot>tau0])~1))$surv
t1.boot<-summary(survfit(Surv(y.boot[z.boot==1&pp.boot==1],d.boot[z.boot==1&pp.boot==1])~1))$time
F1.pp.boot<-1-summary(survfit(Surv(y.boot[z.boot==1&pp.boot==1],d.boot[z.boot==1&pp.boot==1])~1))$surv
p.pp1.boot<-mean(pp.boot[z.boot==1])
F0.tau0.boot.time.point<-max(F0.tau0.boot[t0.boot<=time.point])
Fpp1.boot.time.point<-max(F1.pp.boot[t1.boot<=time.point])
ve.bounds.asa1.a.boot[ii,]<-ve.bounds.ASA1.A(S0.tau0.boot,p.pp1.boot,F0.tau0.boot.time.point,Fpp1.boot.time.point)
pis.boot<-c(S0.tau0.boot+p.pp1.boot-1,p.pp1.boot)
F0.asa1.boot<-F1.asa1.boot<-matrix(NA,nrow=length(betas),ncol=length(pis))
for (i in 1:length(betas)) {
for (j in 1:length(pis)) {
F0.asa1.boot[i,j]<-estimate.Fas(beta=betas[i],t=t0.boot,tau=4,Pi=pis.boot[j],p=S0.tau0.boot,F=F0.tau0.boot,time=time.point)
F1.asa1.boot[i,j]<-estimate.Fas(beta=betas[i],t=t1.boot,tau=4,Pi=pis.boot[j],p=p.pp1.boot,F=F1.pp.boot,time=time.point)
}
}
for (i in 1:length(betas)){
for (j in 1:length(betas)) {
for (k in 1:length(pis)) {
ve.asa1.a.boot[ii,i,j,k]<-1-F1.asa1.boot[j,k]/F0.asa1.boot[i,k]
}
}
}
F0.asa1.plaus.boot<-F1.asa1.plaus.boot<-NULL
for (i in 1:length(b.plaus)) {
F0.asa1.plaus.boot[i]<-estimate.Fas(beta=b.plaus[i],t=t0.boot,tau=4,Pi=pis.boot[1],p=S0.tau0.boot,F=F0.tau0.boot,time=time.point)
F1.asa1.plaus.boot[i]<-estimate.Fas(beta=b.plaus[i],t=t1.boot,tau=4,Pi=pis.boot[1],p=p.pp1.boot,F=F1.pp.boot,time=time.point)
}
for (i in 1:length(b.plaus)) {
for (j in 1:length(b.plaus)) {
ve.asa1.a.plaus.boot[ii,i,j]<-1-F1.asa1.plaus.boot[j]/F0.asa1.plaus.boot[i]
}
}
}
ve.asa1.lower<-ve.asa1.upper<-reject.ve.asa1.a<-array(NA,dim=c(length(betas),length(betas),length(pis)))
for (i in 1:length(betas)){
for (j in 1:length(betas)) {
for (k in 1:length(pis)) {
ve.asa1.lower[i,j,k]<-quantile(ve.asa1.a.boot[,i,j,k],gam)
ve.asa1.upper[i,j,k]<-quantile(ve.asa1.a.boot[,i,j,k],1-gam)
reject.ve.asa1.a[i,j,k]<-ifelse(ve.asa1.lower[i,j,k]>0,1,ifelse(ve.asa1.upper[i,j,k]<0,-1,0))
}
}
}
date()
timer1
ve.bounds.asa1.a
ve.bounds.asa1.a.cb<-c(quantile(ve.bounds.asa1.a.boot[,1],gam),quantile(ve.bounds.asa1.a.boot[,2],1-gam))
F0.asa1.plaus<-c(estimate.Fas(beta=b.plaus[1],t=t0,tau=4,Pi=pis[1],p=S0.tau0,F=F0.tau0,time=time.point),
estimate.Fas(beta=b.plaus[2],t=t0,tau=4,Pi=pis[1],p=S0.tau0,F=F0.tau0,time=time.point))
F1.asa1.plaus<-c(estimate.Fas(beta=b.plaus[1],t=t1,tau=4,Pi=pis[1],p=p.pp1,F=F1.pp,time=time.point),
estimate.Fas(beta=b.plaus[2],t=t1,tau=4,Pi=pis[1],p=p.pp1,F=F1.pp,time=time.point))
plaus.range.ASA1.A.est<-1-c(F1.asa1.plaus[1]/F0.asa1.plaus[2], F1.asa1.plaus[2]/F0.asa1.plaus[1])
plaus.range.ASA1.A.cb<-c(quantile(ve.asa1.a.plaus.boot[,2,1],gam),quantile(ve.asa1.a.plaus.boot[,1,2],1-gam))
##### Figures
postscript("ve-asa1-bounds.eps",width=7,height=6)
par(oma=c(5,3,3,3),mar=c(4,4,2,1),cex.axis=1.3,cex.lab=1.3)
xA<-.2
xB<-.4
xC<-.6
xD<-.8
shift<-.03
narrow<-.1
plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="VEASA1(39)",xlab="Assumption set",axes=FALSE)
axis(2)
box()
abline(h=0,lty=2)
lines(c(xA,xA)+shift,plaus.range.ASA1.A.est,lwd=4)
lines(c(xA,xA)+shift,plaus.range.ASA1.A.cb,lwd=1)
#lines(c(xA,xA)-shift,ve.bounds.asa1.a,lwd=4)
if (ve.bounds.asa1.a.cb[1]==-Inf) {
lines(c(xA,xA)-shift,c(ve.bounds.asa1.a.cb[2],-1),lwd=1)
}
lines(c(xA,xA)-shift,ve.bounds.asa1.a.cb,lwd=1)
arrows(x0=xA-shift,y0=ve.bounds.asa1.a[2],x1=xA-shift,y1=-1,lwd=4) ### If lower bound is infinity
lines(c(xB,xB)+shift,plaus.range.APP.B.est,lwd=4)
lines(c(xB,xB)+shift,plaus.range.APP.B.cb,lwd=1)
arrows(x0=xB-shift,y0=bounds.APP.B.est[2],x1=xB-shift,y1=-1,lwd=4)
arrows(x0=xB-shift,y0=1,x1=xB-shift,y1=-1,lwd=1)
lines(c(xC,xC)+shift,plaus.range.APP.C.est,lwd=4)
lines(c(xC,xC)+shift,plaus.range.APP.C.cb,lwd=1)
arrows(x0=xC-shift,y0=bounds.APP.C.est[2],x1=xC-shift,y1=-1,lwd=4)
arrows(x0=xC-shift,y0=1,x1=xC-shift,y1=-1,lwd=1)
lines(c(xD,xD)+shift,plaus.range.APP.D.est,lwd=4)
lines(c(xD,xD)+shift,plaus.range.APP.D.cb,lwd=1)
lines(c(xD,xD)-shift,bounds.APP.D.est,lwd=4)
lines(c(xD,xD)-shift,bounds.APP.D.cb,lwd=1)
mtext("A",side=1,line=1,at=xA)
mtext("B",side=1,line=1,at=xB)
mtext("C",side=1,line=1,at=xC)
mtext("D",side=1,line=1,at=xD)
dev.off()
# Figure 3 in the article:
postscript("ve-asa1-a.eps",height=3,width=5.5)
opar <- par(mfrow=c(1,3), oma=c(4,3,3,3),mar=c(2,3,1,1.8),cex.axis=0.85)#,cex.axis=1.01,cex.main=1.01)
for(i in seq(length=dim(ve.asa1.a)[3])) {
image(betas,betas, reject.ve.asa1.a[,,i], breaks=c(-1.5,-0.5,0.5,1.5),
col=c(gray(.8),gray(1),gray(.6)),xlab="",ylab="", axes=FALSE)
contour(betas,betas,ve.asa1.a[,,i],add=T,lty=3,levels=c((0:20)/10-1),labcex=0.7, axes=FALSE)
# mtext(side=1, line=2.4, expression(OR[0]==exp(beta[0])),cex=1)
# mtext(side=2, line=2.4, expression(OR[1]==exp(beta[1])),cex=1)
mtext("OR0",side=1, line=2.4, cex=1.1)
mtext("OR1",side=2, line=2.4, cex=1.1)
mtext(side=3, line=0.5, bquote(phi[ASA1]==.(format(phis[i], digits=4))),cex=1.01)
axis(side=1,at=log(seq(1/B,B,len=4)),label=round(seq(1/B,B,len=4),2),cex.axis=0.79)
axis(side=2,at=log(seq(1/B,B,len=4)),label=round(seq(1/B,B,len=4),2),cex.axis=0.79)
box()
}
plot(rep(betas,2),c(ve.asa1.lower[,1,length(pis)],ve.asa1.upper[,1,length(pis)]),type="n",axes=FALSE,xlab="",ylab="")
lines(betas,ve.asa1.a[,1,length(pis)])
lines(betas,ve.asa1.lower[,1,length(pis)],lwd=.5,col=gray(.5))
lines(betas,ve.asa1.upper[,1,length(pis)],lwd=.5,col=gray(.5))
abline(h=0,lty=3,lwd=.5)
#mtext(side=1, line=2.4, expression(OR[0]==exp(beta[0])),cex=1)
mtext("OR0",side=1, line=2.4, cex=1.1)
mtext("VEASA1",side=2, line=2.4, cex=1.1)
axis(side=1,at=log(seq(1/B,B,len=4)),label=round(seq(1/B,B,len=4),2),cex.axis=0.79)
axis(side=2,at=log(seq(1/B,B,len=4)),label=round(seq(1/B,B,len=4),2),cex.axis=0.79)
mtext(side=3, line=0.5, bquote(phi[ASA1]==1),cex=1.01)
box()
par(opar)
dev.off()
### Assumption Set B, C, and D: Results are the same as the APP estimates.
#######################################################################################
####### PP1 Estimands
#######################################################################################
#########################################################
######## PP1 Estimands under assumption set A
#### Estimation of S_0^PP(t) is done using sensitivityPStrat
#### by forcing selection to be 100% for all in the placebo arm.
pp.artificial<-ifelse(z==0,1,pp)
system.time({
ans.pp.A<-sensitivitySGL(z=z,s=pp.artificial,d=d,y=y,beta=betas,tau=4,time.points=time.point,selection=1,trigger=1,
groupings=c(0,1),empty.principal.stratum=c(0,1),ci.method="bootstrap",N.boot=Nboot,na.rm=TRUE,
custom.FUN=function(Fas0,Fas1,time.points,...){1-Fas1(time.points)/Fas0(time.points)})
})
ve.pp.A<-ans.pp.A$result
ve.lower.pp.A<-ans.pp.A$result.ci[,1,1,1]
ve.upper.pp.A<-ans.pp.A$result.ci[,1,2,1]
postscript("ve-pp1-a.eps",height=5,width=5,horizontal=FALSE)
par(mar=c(5,4,1,1),cex.axis=1.3,cex.lab=1.3)
plot(rep(betas,2),c(ve.lower.pp.A,ve.upper.pp.A),type="n",axes=FALSE,xlab="",ylab="VEPP1(39)")
lines(betas,ve.pp.A)
lines(betas,ve.lower.pp.A,lwd=.5,col=gray(.5))
lines(betas,ve.upper.pp.A,lwd=.5,col=gray(.5))
abline(h=0,lty=3,lwd=.5)
mtext(side=1, line=2.4, expression(OR[0]==exp(beta[0]^{''})),cex=.8)
axis(side=1,at=log(seq(1/B,B,len=4)),label=round(seq(1/B,B,len=4),2),cex.axis=1)
axis(2)
box()
dev.off()
### Nonparametric bounds
ve.bounds.PP.A<-function(z,y,d,pp,time.point) {
F.0<-1-min(summary(survfit(Surv(y[z==0],d[z==0])~1))$surv[summary(survfit(Surv(y[z==0],d[z==0])~1))$time<=time.point])
F.1pp<-1-min(summary(survfit(Surv(y[z==1&pp==1],d[z==1&pp==1])~1))$surv[summary(survfit(Surv(y[z==1&pp==1],d[z==1&pp==1])~1))$time<=time.point])
p.pp1<-mean(pp[z==1])
F0max<-min(c(F.0/p.pp1,1))
F0min<-max(c(F.0/p.pp1 + 1-1/p.pp1, 0))
ve.lower<-1-F.1pp/F0min
ve.upper<-1-F.1pp/F0max
return(c(ve.lower,ve.upper))
}
ve.pp.a.bounds<-ve.bounds.PP.A(z,y,d,pp,time.point)
ve.pp.a.bounds.boot<-matrix(NA,Nboot,2)
for (ii in 1:Nboot) {
print(ii)
boot<-sample(1:length(y),length(y),replace=TRUE)
z.boot<-z[boot]
pp.boot<-pp[boot]
d.boot<-d[boot]
y.boot<-y[boot]
ve.pp.a.bounds.boot[ii,]<-ve.bounds.PP.A(z.boot,y.boot,d.boot,pp.boot,time.point)
}
ve.pp.a.bounds.cb<-c(min(apply(ve.pp.a.bounds.boot,2,quantile,gam)),max(apply(ve.pp.a.bounds.boot,2,quantile,1-gam)))
## Check the result: checks out
ans.pp.A.extreme<-sensitivitySGL(z=z,s=pp.artificial,d=d,y=y,beta=c(-20,20),tau=4,time.points=time.point,selection=1,trigger=1,
groupings=c(0,1),empty.principal.stratum=c(0,1),ci.method="bootstrap",N.boot=Nboot,na.rm=TRUE,
custom.FUN=function(Fas0,Fas1,time.points,...){1-Fas1(time.points)/Fas0(time.points)})
ans.pp.A.extreme$result
ans.pp.A.extreme$result.ci
ans.pp.A.plaus<-sensitivitySGL(z=z,s=pp.artificial,d=d,y=y,beta=b.plaus,tau=4,time.points=time.point,selection=1,trigger=1,
groupings=c(0,1),empty.principal.stratum=c(0,1),ci.method="bootstrap",N.boot=Nboot,na.rm=TRUE,
custom.FUN=function(Fas0,Fas1,time.points,...){1-Fas1(time.points)/Fas0(time.points)})
ve.pp.a.plaus<-ans.pp.A.plaus$result
ve.pp.a.plaus.cb<-c(min(ans.pp.A.plaus$result.ci),max(ans.pp.A.plaus$result.ci))
###############################################################
#### PP1 Estimands under assumption set B.
p.H0<- 1-S0.tau0 + p.pp0
stuff<-summary(survfit(Surv(y[z==0],d[z==0])~1))
F0<-1-stuff$surv
stuff$time
stuff.pp<-summary(survfit(Surv(y[z==0&pp==1],d[z==0&pp==1])~1))
F0.pp<-1-stuff.pp$surv
stuff.pp$time
#### Check the result
# Fpp1.time.point
# temp<-summary(survfit(Surv(y[z==1&pp==1],d[z==1&pp==1])~1))
# 1-min(temp$surv[junk$time<=time.point])
t.H0<-c(stuff$time[stuff$time<=tau0],stuff.pp$time[stuff.pp$time>tau0])
F.H0<-c(F0[stuff$time<=tau0],(1-S0.tau0 + F0.pp[stuff.pp$time>tau0]*p.pp0))/p.H0
ve.pp.b.est<-NULL
for (i in 1:length(betas)) {
F0.pp1<-estimate.Fas(beta=betas[i],t=t.H0,tau=4,Pi=p.pp1,p=p.H0,F=F.H0,time=time.point)
ve.pp.b.est[i]<-1-Fpp1.time.point/F0.pp1
}
ve.pp.b.plaus<-1-Fpp1.time.point/
c(estimate.Fas(beta=b.plaus[1],t=t.H0,tau=4,Pi=p.pp1,p=p.H0,F=F.H0,time=time.point),
estimate.Fas(beta=b.plaus[2],t=t.H0,tau=4,Pi=p.pp1,p=p.H0,F=F.H0,time=time.point))
#### PP1 Nonparametric bounds under assumption set B.
ve.bounds.PP.B<-function(p.pp1,p.H0,F.H0,t.H0,time.point,Fpp1.time.point) {
min.pi<-p.pp1
if (min.pi==0) {
# print("Uninformative bounds: VE bounded by -Inf and 1")
ve.lower<- -Inf
ve.upper<- 1
}
if (min.pi>0) {
F.H0.time.point<-max(F.H0[t.H0<=time.point])
F0max<-min(c(F.H0.time.point/(min.pi/p.H0),1))
F0min<-max(c(F.H0.time.point/(min.pi/p.H0)+1- p.H0/min.pi,0))
ve.lower<-1-Fpp1.time.point/F0min
ve.upper<-1-Fpp1.time.point/F0max
}
return(c(ve.lower,ve.upper))
}
ve.pp.b.bounds<-ve.bounds.PP.B(p.pp1,p.H0,F.H0,t.H0,time.point,Fpp1.time.point)
###### Checking.... correct.
ve.bounds.PP.B(p.pp1,p.H0,F.H0,t.H0,time.point,Fpp1.time.point)
1-Fpp1.time.point/c(estimate.Fas(beta=20,t=t.H0,tau=4,Pi=p.pp1,p=p.H0,F=F.H0,time=time.point),
estimate.Fas(beta=-20,t=t.H0,tau=4,Pi=p.pp1,p=p.H0,F=F.H0,time=time.point))
ve.pp.b.est.boot<-matrix(NA,Nboot,length(betas))
ve.pp.b.plaus.boot<-ve.pp.b.bounds.boot<-ve.pp.a.bounds.boot<-matrix(NA,Nboot,2)
for (ii in 1:Nboot) {
print(ii)
boot<-sample(1:length(y),length(y),replace=TRUE)
z.boot<-z[boot]
pp.boot<-pp[boot]
d.boot<-d[boot]
y.boot<-y[boot]
S0.tau0.boot<-min(summary(survfit(Surv(y.boot[z.boot==0],d[z.boot==0])~1)
)$surv[summary(survfit(Surv(y.boot[z.boot==0],d.boot[z.boot==0])~1))$time<=tau0])
t1.boot<-summary(survfit(Surv(y.boot[z.boot==1&pp.boot==1],d.boot[z.boot==1&pp.boot==1])~1))$time
F1.pp.boot<-1-summary(survfit(Surv(y.boot[z.boot==1&pp.boot==1],d.boot[z.boot==1&pp.boot==1])~1))$surv
p.pp0.boot<-mean(pp.boot[z.boot==0])
p.pp1.boot<-mean(pp.boot[z.boot==1])
p.H0.boot<- 1-S0.tau0.boot + p.pp0.boot
stuff.boot<-summary(survfit(Surv(y.boot[z.boot==0],d.boot[z.boot==0])~1))
F0.boot<-1-stuff.boot$surv
stuff.pp.boot<-summary(survfit(Surv(y.boot[z.boot==0&pp.boot==1],d.boot[z.boot==0&pp.boot==1])~1))
F0.pp.boot<-1-stuff.pp.boot$surv
junk.boot<-summary(survfit(Surv(y.boot[z.boot==1&pp.boot==1],d.boot[z.boot==1&pp.boot==1])~1))
Fpp1.time.point.boot<-1-min(junk.boot$surv[junk.boot$time<=time.point])
t.H0.boot<-c(stuff.boot$time[stuff.boot$time<=tau0],stuff.pp.boot$time[stuff.pp.boot$time>tau0])
F.H0.boot<-c(F0.boot[stuff.boot$time<=tau0],(1-S0.tau0.boot + F0.pp.boot[stuff.pp.boot$time>tau0]*p.pp0.boot))/p.H0.boot
F0.pp1.boot<-NULL
for (i in 1:length(betas)) {
F0.pp1.boot[i]<-estimate.Fas(beta=betas[i],t=t.H0.boot,tau=4,Pi=p.pp1.boot,p=p.H0.boot,F=F.H0.boot,time=time.point)
ve.pp.b.est.boot[ii,i]<-1-Fpp1.time.point.boot/F0.pp1.boot[i]
}
ve.pp.b.plaus.boot[ii,]<-1-Fpp1.time.point.boot/
c(estimate.Fas(beta=b.plaus[1],t=t.H0.boot,tau=4,Pi=p.pp1.boot,p=p.H0.boot,F=F.H0.boot,time=time.point),
estimate.Fas(beta=b.plaus[2],t=t.H0.boot,tau=4,Pi=p.pp1.boot,p=p.H0.boot,F=F.H0.boot,time=time.point))
ve.pp.b.bounds.boot[ii,]<-ve.bounds.PP.B(p.pp1.boot,p.H0.boot,F.H0.boot,t.H0.boot,time.point,Fpp1.time.point.boot)
ve.pp.a.bounds.boot[ii,]<-ve.bounds.PP.A(z.boot,y.boot,d.boot,pp.boot,time.point)
}
ve.pp.b.lower<-apply(ve.pp.b.est.boot,2,quantile,gam)
ve.pp.b.upper<-apply(ve.pp.b.est.boot,2,quantile,1-gam)
ve.pp.b.plaus.cb<-c(min(apply(ve.pp.b.plaus.boot,2,quantile,gam)),max(apply(ve.pp.b.plaus.boot,2,quantile,1-gam)))
ve.pp.a.bounds.cb<-c(min(apply(ve.pp.a.bounds.boot,2,quantile,gam)),max(apply(ve.pp.a.bounds.boot,2,quantile,1-gam)))
ve.pp.b.bounds.cb<-c(min(apply(ve.pp.b.bounds.boot,2,quantile,gam)),max(apply(ve.pp.b.bounds.boot,2,quantile,1-gam)))
##### Figures
postscript("ve-pp1-bounds.eps",width=7,height=6)
par(mar=c(5,4,1,1),cex.axis=1.3,cex.lab=1.3)
xA<-.2
xB<-.4
xC<-.6
xD<-.8
shift<-.03
narrow<-.1
plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="VEPP1(39)",xlab="Assumption set",axes=FALSE)
axis(2)
box()
abline(h=0,lty=2)
lines(c(xA,xA)+shift,ve.pp.a.plaus,lwd=4)
lines(c(xA,xA)+shift,ve.pp.a.plaus.cb,lwd=1)
lines(c(xA,xA)-shift,ve.pp.a.bounds,lwd=4)
lines(c(xA,xA)-shift,ve.pp.a.bounds.cb,lwd=1)
if (ve.pp.a.bounds[1]==-Inf) {
arrows(x0=xA-shift,y0=ve.pp.a.bounds[2],x1=xA-shift,y1=-1,lwd=4) ### If lower bound is infinity
}
if (ve.pp.a.bounds.cb[1]==-Inf) {
lines(c(xA,xA)-shift,c(ve.pp.a.bounds.cb[2],-1),lwd=1)
}
lines(c(xB,xB)+shift,ve.pp.b.plaus,lwd=4)
lines(c(xB,xB)+shift,ve.pp.b.plaus.cb,lwd=1)
lines(c(xB,xB)-shift,ve.pp.b.bounds,lwd=4)
lines(c(xB,xB)-shift,ve.pp.b.bounds.cb,lwd=1)
if (ve.pp.b.bounds[1]==-Inf) {
arrows(x0=xB-shift,y0=ve.pp.b.bounds[2],x1=xB-shift,y1=-1,lwd=4)
}
if (ve.pp.a.bounds.cb[1]==-Inf) {
lines(c(xB,xB)-shift,c(ve.pp.b.bounds.cb[2],-1),lwd=1)
}
lines(c(xC,xC)+shift,ve.pp.b.plaus,lwd=4)
lines(c(xC,xC)+shift,ve.pp.b.plaus.cb,lwd=1)
lines(c(xC,xC)-shift,ve.pp.b.bounds,lwd=4)
lines(c(xC,xC)-shift,ve.pp.b.bounds.cb,lwd=1)
if (ve.pp.b.bounds[1]==-Inf) {
arrows(x0=xC-shift,y0=ve.pp.b.bounds[2],x1=xC-shift,y1=-1,lwd=4)
}
if (ve.pp.a.bounds.cb[1]==-Inf) {
lines(c(xC,xC)-shift,c(ve.pp.b.bounds.cb[2],-1),lwd=1)
}
lines(c(xD,xD)+shift,plaus.range.APP.D.est,lwd=4)
lines(c(xD,xD)+shift,plaus.range.APP.D.cb,lwd=1)
lines(c(xD,xD)-shift,bounds.APP.D.est,lwd=4)
lines(c(xD,xD)-shift,bounds.APP.D.cb,lwd=1)
mtext("A",side=1,line=1,at=xA)
mtext("B",side=1,line=1,at=xB)
mtext("C",side=1,line=1,at=xC)
mtext("D",side=1,line=1,at=xD)
dev.off()
########## Figure for PP1 sensitivity analysis under assumption sets A and B
# Figure 4 in the article:
postscript("ve-pp1-ab.eps",height=3,width=5,horizontal=FALSE)
par(mfrow=c(1,2),mar=c(4,4,2,1),cex.axis=0.75,cex.lab=1.01)
plot(rep(betas,2),c(pmin(ve.lower.pp.A,ve.pp.b.lower),pmax(ve.upper.pp.A,ve.pp.b.upper)),type="n",axes=FALSE,xlab="",ylab="VEPP1(39)")
lines(betas,ve.pp.A)
lines(betas,ve.lower.pp.A,lwd=.5,col=gray(.5))
lines(betas,ve.upper.pp.A,lwd=.5,col=gray(.5))
abline(h=0,lty=3,lwd=.5)
#mtext(side=1, line=2.4, expression(OR[0]==exp(beta[0])),cex=1)
mtext("OR0",side=1, line=2.4, cex=1)
mtext("Assumption Set A", side=3, line=.5)
axis(side=1,at=log(seq(1/B,B,len=4)),label=round(seq(1/B,B,len=4),2),cex.axis=0.75)
axis(2,at=c(-0.3,0,.3,.6,.9),labels=c("-0.3","0","0.3","0.6","0.9"),cex.axis=0.75)
box()
plot(rep(betas,2),c(pmin(ve.lower.pp.A,ve.pp.b.lower),pmax(ve.upper.pp.A,ve.pp.b.upper)),type="n",axes=FALSE,xlab="",ylab="VEPP1(39)")
lines(betas,ve.pp.b.est)
lines(betas,ve.pp.b.lower,lwd=.5,col=gray(.5))
lines(betas,ve.pp.b.upper,lwd=.5,col=gray(.5))
abline(h=0,lty=3,lwd=.5)
#mtext(side=1, line=2.4, expression(OR[0]==exp(beta[0])),cex=1)
mtext("OR0",side=1, line=2.4, cex=1)
mtext("Assumption Set B or C", side=3, line=.5)
axis(side=1,at=log(seq(1/B,B,len=4)),label=round(seq(1/B,B,len=4),2),cex.axis=0.75)
axis(2,at=c(-0.3,0,.3,.6,.9),labels=c("-0.3","0","0.3","0.6","0.9"),cex.axis=0.75)
box()
dev.off()
################################################################################
# Estimate the non-causal vaccine efficacy paramter SCE^PPobs(time.point)
time <- y1[pp1==1]
Rx <- z[pp1==1]+1
delta <- d1[pp1==1]
survfit0b<-summary(survfit(Surv(time[Rx==1],delta[Rx==1])~1))
survfit1b<-summary(survfit(Surv(time[Rx==2],delta[Rx==2])~1))
Fpp1b.time.point<-1-min(survfit1b$surv[survfit1b$time<=time.point])
Fpp0b.time.point<-1-min(survfit0b$surv[survfit0b$time<=time.point])
vePPobs<-(1-Fpp1b.time.point/Fpp0b.time.point)*100
ve.ests.boot.PPobs<-rep(NA,Nboot)
for (ii in 1:Nboot) {
boot<-sample(1:length(time),length(time),replace=TRUE)
z.boot<-Rx[boot]-1
d.boot<-delta[boot]
y.boot<-time[boot]
survfit0b<-summary(survfit(Surv(y.boot[z.boot==0],d.boot[z.boot==0])~1))
survfit1b<-summary(survfit(Surv(y.boot[z.boot==1],d.boot[z.boot==1])~1))
Fpp1b.time.point<-1-min(survfit1b$surv[survfit1b$time<=time.point])
Fpp0b.time.point<-1-min(survfit0b$surv[survfit0b$time<=time.point])
ve.ests.boot.PPobs[ii]<-(1-Fpp1b.time.point/Fpp0b.time.point)*100
}
confbounds.PPobs<-c(quantile(ve.ests.boot.PPobs,gam/2),quantile(ve.ests.boot.PPobs,1-gam/2))
# Figure 1 in the article
postscript("figure1.eps",height=3,width=5.5)
#par(mfrow=c(1,3),oma=c(5,2,2,2),mar=c(3,2,3,2),cex.axis=1.1,cex.lab=1.1,cex.main=1.1)
opar <- par(mfrow=c(1,3), oma=c(4,3,3,3),mar=c(2,3,1,1.8))#,cex.axis=1.01,cex.main=1.01)
xA<-.2
xB<-.4
xC<-.6
xD<-.8
shift<-.03
narrow<-.1
#plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="Estimated Vaccine Efficacy for those who are Always Per Protocol",xlab="",axes=FALSE)
plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="VEAPP(39)",xlab="",axes=FALSE)
axis(side=2,at=c(-1.0,-0.5,0.0,0.5,1.0),labels=c(-1.0,-0.5,0.0,0.5,1.0))
box()
abline(h=0,lty=2)
#lines(c(xA,xA)+shift,plaus.range.APP.A.est,lwd=2.4)
#lines(c(xA,xA)+shift,plaus.range.APP.A.cb,lwd=1)
arrows(x0=xA+shift,y0=plaus.range.APP.A.est[2],x1=xA+shift,y1=-1,lwd=2.4)
arrows(x0=xA+shift,y0=plaus.range.APP.A.cb[2],x1=xA+shift,y1=-1,lwd=1)
#lines(c(xA,xA)-shift,bounds.APP.A.est,lwd=2.4)
#lines(c(xA,xA)-shift,bounds.APP.A.cb,lwd=1)
arrows(x0=xA-shift,y0=1,x1=xA-shift,y1=-1,lwd=2.4)
lines(c(xB,xB)+shift,plaus.range.APP.B.est,lwd=2.4)
lines(c(xB,xB)+shift,plaus.range.APP.B.cb,lwd=1)
arrows(x0=xB-shift,y0=bounds.APP.B.est[2],x1=xB-shift,y1=-1,lwd=2.4)
arrows(x0=xB-shift,y0=1,x1=xB-shift,y1=-1,lwd=1)
lines(c(xC,xC)+shift,plaus.range.APP.C.est,lwd=2.4)
lines(c(xC,xC)+shift,plaus.range.APP.C.cb,lwd=1)
arrows(x0=xC-shift,y0=bounds.APP.C.est[2],x1=xC-shift,y1=-1,lwd=2.4)
arrows(x0=xC-shift,y0=1,x1=xC-shift,y1=-1,lwd=1)
lines(c(xD,xD)+shift,plaus.range.APP.D.est,lwd=2.4)
lines(c(xD,xD)+shift,plaus.range.APP.D.cb,lwd=1)
lines(c(xD,xD)-shift,bounds.APP.D.est,lwd=2.4)
lines(c(xD,xD)-shift,bounds.APP.D.cb,lwd=1)
mtext("A",side=1,line=1,at=xA)
mtext("B",side=1,line=1,at=xB)
mtext("C",side=1,line=1,at=xC)
mtext("D",side=1,line=1,at=xD)
title("(a)")
mtext(text="Assumption Set",side=1,line=3)
xA<-.2
xB<-.4
xC<-.6
xD<-.8
shift<-.03
narrow<-.1
#plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="Estimated Vaccine Efficacy for those who are Always Per Protocol",xlab="",axes=FALSE)
plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="VEASA1(39)",xlab="",axes=FALSE)
axis(side=2,at=c(-1.0,-0.5,0.0,0.5,1.0),labels=c(-1.0,-0.5,0.0,0.5,1.0))
box()
abline(h=0,lty=2)
#lines(c(xA,xA)+shift,plaus.range.ASA1.A.est,lwd=2.4)
#lines(c(xA,xA)+shift,plaus.range.ASA1.A.cb,lwd=1)
arrows(x0=xA+shift,y0=plaus.range.ASA1.A.est[2],x1=xA+shift,y1=-1,lwd=2.4)
arrows(x0=xA+shift,y0=plaus.range.ASA1.A.cb[2],x1=xA+shift,y1=-1,lwd=1)
#lines(c(xA,xA)-shift,ve.bounds.asa1.a,lwd=2.4)
if (ve.bounds.asa1.a.cb[1]==-Inf) {
lines(c(xA,xA)-shift,c(ve.bounds.asa1.a.cb[2],-1),lwd=1)
}
lines(c(xA,xA)-shift,ve.bounds.asa1.a.cb,lwd=1)
arrows(x0=xA-shift,y0=ve.bounds.asa1.a[2],x1=xA-shift,y1=-1,lwd=2.4) ### If lower bound is infinity
lines(c(xB,xB)+shift,plaus.range.APP.B.est,lwd=2.4)
lines(c(xB,xB)+shift,plaus.range.APP.B.cb,lwd=1)
arrows(x0=xB-shift,y0=bounds.APP.B.est[2],x1=xB-shift,y1=-1,lwd=2.4)
arrows(x0=xB-shift,y0=1,x1=xB-shift,y1=-1,lwd=1)
lines(c(xC,xC)+shift,plaus.range.APP.C.est,lwd=2.4)
lines(c(xC,xC)+shift,plaus.range.APP.C.cb,lwd=1)
arrows(x0=xC-shift,y0=bounds.APP.C.est[2],x1=xC-shift,y1=-1,lwd=2.4)
arrows(x0=xC-shift,y0=1,x1=xC-shift,y1=-1,lwd=1)
lines(c(xD,xD)+shift,plaus.range.APP.D.est,lwd=2.4)
lines(c(xD,xD)+shift,plaus.range.APP.D.cb,lwd=1)
lines(c(xD,xD)-shift,bounds.APP.D.est,lwd=2.4)
lines(c(xD,xD)-shift,bounds.APP.D.cb,lwd=1)
mtext("A",side=1,line=1,at=xA)
mtext("B",side=1,line=1,at=xB)
mtext("C",side=1,line=1,at=xC)
mtext("D",side=1,line=1,at=xD)
title("(b)")
mtext(text="Assumption Set",side=1,line=3)
xA<-.2
xB<-.4
xC<-.6
xD<-.8
shift<-.03
narrow<-.1
#plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="Estimated Vaccine Efficacy for those who are Always Per Protocol",xlab="",axes=FALSE)
plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="VEPP1(39)",xlab="",axes=FALSE)
axis(side=2,at=c(-1.0,-0.5,0.0,0.5,1.0),labels=c(-1.0,-0.5,0.0,0.5,1.0))
box()
abline(h=0,lty=2)
#lines(c(xA,xA)+shift,ve.pp.a.plaus,lwd=2.4)
#lines(c(xA,xA)+shift,ve.pp.a.plaus.cb,lwd=1)
arrows(x0=xA+shift,y0=ve.pp.a.plaus[1,1],x1=xA+shift,y1=-1,lwd=2.4)
arrows(x0=xA+shift,y0=ve.pp.a.plaus.cb[2],x1=xA+shift,y1=-1,lwd=1)
#lines(c(xA,xA)-shift,bounds.APP.A.est,lwd=2.4)
lines(c(xA,xA)-shift,ve.pp.a.bounds,lwd=2.4)
lines(c(xA,xA)-shift,ve.pp.a.bounds.cb,lwd=1)
if (ve.pp.a.bounds[1]==-Inf) {
arrows(x0=xA-shift,y0=ve.pp.a.bounds[2],x1=xA-shift,y1=-1,lwd=2.4) ### If lower bound is infinity
}
if (ve.pp.a.bounds.cb[1]==-Inf) {
lines(c(xA,xA)-shift,c(ve.pp.a.bounds.cb[2],-1),lwd=1)
}
lines(c(xB,xB)+shift,ve.pp.b.plaus,lwd=2.4)
lines(c(xB,xB)+shift,ve.pp.b.plaus.cb,lwd=1)
lines(c(xB,xB)-shift,ve.pp.b.bounds,lwd=2.4)
lines(c(xB,xB)-shift,ve.pp.b.bounds.cb,lwd=1)
if (ve.pp.b.bounds[1]==-Inf) {
arrows(x0=xB-shift,y0=ve.pp.b.bounds[2],x1=xB-shift,y1=-1,lwd=2.4)
}
if (ve.pp.a.bounds.cb[1]==-Inf) {
lines(c(xB,xB)-shift,c(ve.pp.b.bounds.cb[2],-1),lwd=1)
}
lines(c(xC,xC)+shift,ve.pp.b.plaus,lwd=2.4)
lines(c(xC,xC)+shift,ve.pp.b.plaus.cb,lwd=1)
lines(c(xC,xC)-shift,ve.pp.b.bounds,lwd=2.4)
lines(c(xC,xC)-shift,ve.pp.b.bounds.cb,lwd=1)
if (ve.pp.b.bounds[1]==-Inf) {
arrows(x0=xC-shift,y0=ve.pp.b.bounds[2],x1=xC-shift,y1=-1,lwd=2.4)
}
if (ve.pp.a.bounds.cb[1]==-Inf) {
lines(c(xC,xC)-shift,c(ve.pp.b.bounds.cb[2],-1),lwd=1)
}
lines(c(xD,xD)+shift,plaus.range.APP.D.est,lwd=2.4)
lines(c(xD,xD)+shift,plaus.range.APP.D.cb,lwd=1)
lines(c(xD,xD)-shift,bounds.APP.D.est,lwd=2.4)
lines(c(xD,xD)-shift,bounds.APP.D.cb,lwd=1)
mtext("A",side=1,line=1,at=xA)
mtext("B",side=1,line=1,at=xB)
mtext("C",side=1,line=1,at=xC)
mtext("D",side=1,line=1,at=xD)
title("(c)")
mtext(text="Assumption Set",side=1,line=3)
#plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="VE",xlab="Assumption set",axes=FALSE)
#axis(2)
#box()
#abline(h=0,lty=2)
#
#
#lines(c(xA,xA)+shift,c(vePPobs,vePPobs)/100,lwd=2.4)
## Analytic confidence intervals:
##lines(c(xA,xA)+shift,c(ve[72],ve[72])/100,lwd=2.4)
##lines(c(xA,xA)+shift,c(lowint[72],upint[72])/100,lwd=1)
## Bootstrap confidence intervals
#lines(c(xA,xA)+shift,c(confbounds.PPobs[1],confbounds.PPobs[2])/100,lwd=1)
#
## Repeat the vertical line for consistency on the plot:
#lines(c(xA,xA)-shift,c(vePPobs,vePPobs)/100,lwd=2.4)
#lines(c(xA,xA)-shift,c(confbounds.PPobs[1],confbounds.PPobs[2])/100,lwd=1)
#
#title("(d)")
#
#mtext("A",side=1,line=1,at=xA)
dev.off()
# Old way- mfrow=c(3,1) right formatting:
#postscript("figure1.eps",width=4,height=5.5)
#par(mfrow=c(3,1),oma=c(5,2,2,2),mar=c(2,2,2,2),cex.axis=0.9,cex.lab=0.7,cex.main=1.1)
#
#
#xA<-.2
#xB<-.4
#xC<-.6
#xD<-.8
#shift<-.03
#narrow<-.1
#
#plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="Estimated Vaccine Efficacy for those who are Always Per Protocol",xlab="",axes=FALSE)
#plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="VEAPP(39)",xlab="",axes=FALSE)
#axis(side=2,at=c(-1.0,-0.5,0.0,0.5,1.0),labels=c(-1.0,-0.5,0.0,0.5,1.0))
#box()
#abline(h=0,lty=2)
#
##lines(c(xA,xA)+shift,plaus.range.APP.A.est,lwd=3)
##lines(c(xA,xA)+shift,plaus.range.APP.A.cb,lwd=1)
#arrows(x0=xA+shift,y0=plaus.range.APP.A.est[2],x1=xA+shift,y1=-1,lwd=3)
#arrows(x0=xA+shift,y0=plaus.range.APP.A.cb[2],x1=xA+shift,y1=-1,lwd=1)
##lines(c(xA,xA)-shift,bounds.APP.A.est,lwd=3)
##lines(c(xA,xA)-shift,bounds.APP.A.cb,lwd=1)
#arrows(x0=xA-shift,y0=1,x1=xA-shift,y1=-1,lwd=3)
#
#lines(c(xB,xB)+shift,plaus.range.APP.B.est,lwd=3)
#lines(c(xB,xB)+shift,plaus.range.APP.B.cb,lwd=1)
#arrows(x0=xB-shift,y0=bounds.APP.B.est[2],x1=xB-shift,y1=-1,lwd=3)
#arrows(x0=xB-shift,y0=1,x1=xB-shift,y1=-1,lwd=1)
#
#lines(c(xC,xC)+shift,plaus.range.APP.C.est,lwd=3)
#lines(c(xC,xC)+shift,plaus.range.APP.C.cb,lwd=1)
#arrows(x0=xC-shift,y0=bounds.APP.C.est[2],x1=xC-shift,y1=-1,lwd=3)
#arrows(x0=xC-shift,y0=1,x1=xC-shift,y1=-1,lwd=1)
#
#lines(c(xD,xD)+shift,plaus.range.APP.D.est,lwd=3)
#lines(c(xD,xD)+shift,plaus.range.APP.D.cb,lwd=1)
#lines(c(xD,xD)-shift,bounds.APP.D.est,lwd=3)
#lines(c(xD,xD)-shift,bounds.APP.D.cb,lwd=1)
#
#mtext("A",side=1,line=1,at=xA)
#mtext("B",side=1,line=1,at=xB)
#mtext("C",side=1,line=1,at=xC)
#mtext("D",side=1,line=1,at=xD)
#
#title("(a)")
#
#xA<-.2
#xB<-.4
#xC<-.6
#xD<-.8
#shift<-.03
#narrow<-.1
#
##plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="Estimated Vaccine Efficacy for those who are Always Per Protocol",xlab="",axes=FALSE)
#plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="VEASA1(39)",xlab="",axes=FALSE)
#axis(side=2,at=c(-1.0,-0.5,0.0,0.5,1.0),labels=c(-1.0,-0.5,0.0,0.5,1.0))
#box()
#abline(h=0,lty=2)
#
##lines(c(xA,xA)+shift,plaus.range.ASA1.A.est,lwd=3)
##lines(c(xA,xA)+shift,plaus.range.ASA1.A.cb,lwd=1)
#arrows(x0=xA+shift,y0=plaus.range.ASA1.A.est[2],x1=xA+shift,y1=-1,lwd=3)
#arrows(x0=xA+shift,y0=plaus.range.ASA1.A.cb[2],x1=xA+shift,y1=-1,lwd=1)
##lines(c(xA,xA)-shift,ve.bounds.asa1.a,lwd=3)
#if (ve.bounds.asa1.a.cb[1]==-Inf) {
# lines(c(xA,xA)-shift,c(ve.bounds.asa1.a.cb[2],-1),lwd=1)
#}
#lines(c(xA,xA)-shift,ve.bounds.asa1.a.cb,lwd=1)
#arrows(x0=xA-shift,y0=ve.bounds.asa1.a[2],x1=xA-shift,y1=-1,lwd=3) ### If lower bound is infinity
#
#lines(c(xB,xB)+shift,plaus.range.APP.B.est,lwd=3)
#lines(c(xB,xB)+shift,plaus.range.APP.B.cb,lwd=1)
#arrows(x0=xB-shift,y0=bounds.APP.B.est[2],x1=xB-shift,y1=-1,lwd=3)
#arrows(x0=xB-shift,y0=1,x1=xB-shift,y1=-1,lwd=1)
#
#lines(c(xC,xC)+shift,plaus.range.APP.C.est,lwd=3)
#lines(c(xC,xC)+shift,plaus.range.APP.C.cb,lwd=1)
#arrows(x0=xC-shift,y0=bounds.APP.C.est[2],x1=xC-shift,y1=-1,lwd=3)
#arrows(x0=xC-shift,y0=1,x1=xC-shift,y1=-1,lwd=1)
#
#lines(c(xD,xD)+shift,plaus.range.APP.D.est,lwd=3)
#lines(c(xD,xD)+shift,plaus.range.APP.D.cb,lwd=1)
#lines(c(xD,xD)-shift,bounds.APP.D.est,lwd=3)
#lines(c(xD,xD)-shift,bounds.APP.D.cb,lwd=1)
#
#mtext("A",side=1,line=1,at=xA)
#mtext("B",side=1,line=1,at=xB)
#mtext("C",side=1,line=1,at=xC)
#mtext("D",side=1,line=1,at=xD)
#
#title("(b)")
#
#xA<-.2
#xB<-.4
#xC<-.6
#xD<-.8
#shift<-.03
#narrow<-.1
#
##plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="Estimated Vaccine Efficacy for those who are Always Per Protocol",xlab="",axes=FALSE)
#plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="VEPP1(39)",xlab="",axes=FALSE)
#axis(side=2,at=c(-1.0,-0.5,0.0,0.5,1.0),labels=c(-1.0,-0.5,0.0,0.5,1.0))
#box()
#abline(h=0,lty=2)
#
##lines(c(xA,xA)+shift,ve.pp.a.plaus,lwd=3)
##lines(c(xA,xA)+shift,ve.pp.a.plaus.cb,lwd=1)
#arrows(x0=xA+shift,y0=ve.pp.a.plaus[1,1],x1=xA+shift,y1=-1,lwd=3)
#arrows(x0=xA+shift,y0=ve.pp.a.plaus.cb[2],x1=xA+shift,y1=-1,lwd=1)
##lines(c(xA,xA)-shift,bounds.APP.A.est,lwd=3)
#
#lines(c(xA,xA)-shift,ve.pp.a.bounds,lwd=3)
#lines(c(xA,xA)-shift,ve.pp.a.bounds.cb,lwd=1)
#if (ve.pp.a.bounds[1]==-Inf) {
# arrows(x0=xA-shift,y0=ve.pp.a.bounds[2],x1=xA-shift,y1=-1,lwd=3) ### If lower bound is infinity
#}
#if (ve.pp.a.bounds.cb[1]==-Inf) {
# lines(c(xA,xA)-shift,c(ve.pp.a.bounds.cb[2],-1),lwd=1)
#}
#
#lines(c(xB,xB)+shift,ve.pp.b.plaus,lwd=3)
#lines(c(xB,xB)+shift,ve.pp.b.plaus.cb,lwd=1)
#lines(c(xB,xB)-shift,ve.pp.b.bounds,lwd=3)
#lines(c(xB,xB)-shift,ve.pp.b.bounds.cb,lwd=1)
#if (ve.pp.b.bounds[1]==-Inf) {
# arrows(x0=xB-shift,y0=ve.pp.b.bounds[2],x1=xB-shift,y1=-1,lwd=3)
#}
#if (ve.pp.a.bounds.cb[1]==-Inf) {
# lines(c(xB,xB)-shift,c(ve.pp.b.bounds.cb[2],-1),lwd=1)
#}
#
#lines(c(xC,xC)+shift,ve.pp.b.plaus,lwd=3)
#lines(c(xC,xC)+shift,ve.pp.b.plaus.cb,lwd=1)
#lines(c(xC,xC)-shift,ve.pp.b.bounds,lwd=3)
#lines(c(xC,xC)-shift,ve.pp.b.bounds.cb,lwd=1)
#if (ve.pp.b.bounds[1]==-Inf) {
# arrows(x0=xC-shift,y0=ve.pp.b.bounds[2],x1=xC-shift,y1=-1,lwd=3)
#}
#if (ve.pp.a.bounds.cb[1]==-Inf) {
# lines(c(xC,xC)-shift,c(ve.pp.b.bounds.cb[2],-1),lwd=1)
#}
#
#lines(c(xD,xD)+shift,plaus.range.APP.D.est,lwd=3)
#lines(c(xD,xD)+shift,plaus.range.APP.D.cb,lwd=1)
#lines(c(xD,xD)-shift,bounds.APP.D.est,lwd=3)
#lines(c(xD,xD)-shift,bounds.APP.D.cb,lwd=1)
#
#mtext("A",side=1,line=1,at=xA)
#mtext("B",side=1,line=1,at=xB)
#mtext("C",side=1,line=1,at=xC)
#mtext("D",side=1,line=1,at=xD)
#
#title("(c)")
#
#mtext(text="Assumption Set",side=1,line=4)
##plot(c(0+narrow,1-narrow),c(-1,1),type="n",ylab="VE",xlab="Assumption set",axes=FALSE)
##axis(2)
##box()
##abline(h=0,lty=2)
##
##
##lines(c(xA,xA)+shift,c(vePPobs,vePPobs)/100,lwd=3)
### Analytic confidence intervals:
###lines(c(xA,xA)+shift,c(ve[72],ve[72])/100,lwd=3)
###lines(c(xA,xA)+shift,c(lowint[72],upint[72])/100,lwd=1)
### Bootstrap confidence intervals
##lines(c(xA,xA)+shift,c(confbounds.PPobs[1],confbounds.PPobs[2])/100,lwd=1)
##
### Repeat the vertical line for consistency on the plot:
##lines(c(xA,xA)-shift,c(vePPobs,vePPobs)/100,lwd=3)
##lines(c(xA,xA)-shift,c(confbounds.PPobs[1],confbounds.PPobs[2])/100,lwd=1)
##
##title("(d)")
##
##mtext("A",side=1,line=1,at=xA)
#dev.off()
##