install.packages("quantmod")
library(quantmod)
library(xts)
library(zoo)
start<-as.Date("2018-01-01")
end<-as.Date("2019-02-19")
getSymbols("JPM",src="yahoo",from=start,to=end)
JPM
jpm<-JPM$JPM.Adjusted
head(jpm)
jpm.sr<-diff(jpm)/lag(jpm,k=-1)
jpm.sr<-jpm.sr[c(2:282)]
jpm.sr
library(psych)
describe(jpm.sr)
summary(jpm.sr)
sd(jpm.sr,na.rm=T)
str(jpm.sr)
plot(jpm.sr)
#Basic Historical Simulation
Pt<-1000000
loss<--Pt*jpm.sr
head(loss)
v95<-quantile(loss,0.95)
v95
t.loss<-loss[loss>v95]
ES95<-mean(t.loss)
ES95
#Bootstrapping
install.packages("boot")
library(boot)
var95.boot <- function(data,index){
? var.boot <- quantile(data[index],0.95)
? return(var.boot)
}
es95.1 <- function(data,index){
? boot.loss <- data[index]
? boot.var <- quantile (boot.loss, 0.95)
? es.loss <- boot.loss[boot.loss>boot.var]
? boot.es <- mean(es.loss)
? return(boot.es)
}
b.v<-boot(loss,var95.boot,R=1000)
b.v
plot(b.v)
boot.ci(b.v,type="basic")
b.es<-boot(loss,es95.1,R=1000)
b.es
#AW-HS
HS_aw <- function(jpm.sr, P = 1000000, lam = 0.98, ci = 0.95){
? alpha <- 1-ci
? n <- length(jpm.sr)
? df <- data.frame(age = seq(n,1), jpm.sr, loss = -P*jpm.sr)
? df$w <- with(df,((1-lam)*lam^(age-1))/(1-lam^n))
? df <- df[order(df$loss, decreasing = TRUE),]
? df$cumw <- with(df, cumsum(w))
? VaR <- df[which.min(abs(df$cumw - alpha)),'loss']
? df2 <- df[df$loss > VaR,]
? df2$nw <- with(df2, w/sum(w))
? ES <- with(df2, sum(nw * loss))
? res.vec <- c(VaR = VaR, ES = ES)
? names(res.vec) <- paste0(names(res.vec),100*ci)
? return(res.vec)
}
HS_aw(as.vector(jpm.sr))
#Hull-White
library(fGarch)
jpm.dm<-jpm.sr-mean(jpm.sr)
gf<-garchFit(data = jpm.dm)
vol<-volatility(gf)
vol.p<-vol[length(vol)]
st<-vol.p/vol
jpm.adj<-jpm.dm*st
jpm.adj<-jpm.adj+mean(jpm.sr)
jpm.adj
Pt<-1000000
loss1<--Pt*jpm.adj
v95.2<-quantile(loss,0.95)
v95.2
t2.loss<-loss[loss>v95.2]
ES95.2<-mean(t2.loss)
ES95.2
library(boot)
b.v.2<-boot(loss,var95.boot,R=1000)
b.v.2
plot(b.v.2)
boot.ci(b.v.2,type = "basic")
b.es<-boot(loss,es.boot,R=1000)
b.es
plot(b.es)
boot.ci(b.es,type = "basic")
#a normal distribution without voatility adjustment
library(ghyp)
mu<-mean(jpm.sr)
mu
sigma<-sd(jpm.sr)
sigma
g<-gauss(mu,sigma)
g
mean(g)
vcov(g)
plot(g,type="l")
qghyp(0.95,g)
Pt<-1000000
g.l<-transform(g,multiplier=-Pt)
g.l
plot(g.l,type='l')
ESghyp(0.95,g.l,distr="loss")
qghyp(0.95,g.l)
# a normal distribution with voatility adjustment
mu.adj<-mean(jpm.adj)
mu.adj
sigma.adj<-sd(jpm.adj)
sigma.adj
g.adj<-gauss(mu.adj,sigma.adj)
g.adj
mean(g.adj)
vcov(g.adj)
plot(g.adj,type="l")
qghyp(0.95,g.adj)
Pt<-1000000
g.adj.l<-transform(g.adj,multiplier=-Pt)
g.adj.l
plot(g.adj.l,type='l')?
ESghyp(0.95,g.adj.l,distr = "loss")
qghyp(0.95,g.adj.l)
#an appropriate distribution without volatility adjustment
stepAIC.ghyp(jpm.sr)
gf<-fit.hypuv(jpm.sr,symmetric = TRUE)
gf
plot(gf)
hist(gf)
qqghyp(gf,gaussian = FALSE)
s1<-rghyp(10000,gf)
ks.test(s1,jpm.sr)
port1<-1
loss.distribution<-transform(gf,multiplier=-port1)
loss.distribution
ESghyp(0.95,loss.distribution,distr = "loss")
qghyp(0.95,loss.distribution)
# an appropriate distribution with volatility adjustment
stepAIC.ghyp(jpm.adj)
tf<-fit.NIGuv(jpm.adj,symmetric = TRUE)
tf
plot(tf)
hist(tf)
qqghyp(tf,gaussian = FALSE)
s2<-rghyp(10000,tf)
ks.test(s2,jpm.adj)
Pt<-1
loss.distribution.adj<-transform(tf,multiplier=-Pt)
loss.distribution.adj
ESghyp(0.95,loss.distribution.adj,distr = "loss")
qghyp(0.95,loss.distribution.adj)
##two assets
library(quantmod)
library(xts)
library(zoo)
start<-as.Date("2018-01-01")
end<-as.Date("2019-02-19")
getSymbols(c("JPM","AAPL"),src ="yahoo",from=start,to=end)
all<-cbind(JPM[,"JPM.Adjusted"],AAPL[,"AAPL.Adjusted"])
head(all)
##calculate returns
all.ret<-diff(all)/lag(all,k=-1)
all.ret<-all.ret[c(2:282)]
head(all.ret)
summary(all.ret)
plot(all.ret,col=c("blue","red"),legend.loc="top")
library(fBasics)
basicStats(all.ret)
cor(all.ret,use = "complete.obs")
all.ret.df<-as.data.frame(all.ret)
boxplot(all.ret.df)
all.ret
##basic historical simulation
P<-2000000
w<-c(0.5,0.5)
Pw<- -P*w
loss.all<-rowSums(t(Pw*t(all.ret)))
var953<-quantile(loss.all,0.95)
var953
t.loss<-loss.all[loss.all>var953]
ES953<-mean(t.loss)
ES953
##bootstrapping
library(boot)
var.boot <- function(data, index){
? t <- quantile(data[index], 0.95)
? return(t)
}
b.var.all1<- boot(loss.all, var.boot, R=1000)
b.var.all1
plot(b.var.all1)
boot.ci(b.var.all1,type="basic")
es.boot <- function(data,index){
? loss.samp <- data[index]
? var <- quantile(loss.samp,0.95)
? es <- mean(loss.samp[loss.samp>var])
? return(es)
}
b.es.all1 <- boot(loss.all,es.boot,R=1000)
b.es.all1
plot(b.es.all1)
boot.ci(b.es.all1,type="basic")
##Age-weighted historical simulation
HS_aw <- function(r, loss, lam = 0.98, ci = 0.95){
? alpha <- 1-ci
? n <- length(r)
? df <- data.frame(age = seq(n,1), r, loss )
? df$w <- with(df,((1-lam)*lam^(age-1))/(1-lam^n))
? df <- df[order(df$loss, decreasing = TRUE),]
? df$cumw <- with(df, cumsum(w))
? VaR <- df[which.min(abs(df$cumw - alpha)),'loss']
? df2 <- df[df$loss > VaR,]
? df2$nw <- with(df2, w/sum(w))
? ES <- with(df2, sum(nw * loss))
? res.vec <- c(VaR = VaR, ES = ES)
? names(res.vec) <- paste0(names(res.vec),100*ci)
? return(res.vec)
}
HS_aw(all.ret,loss.all)
##Hull-white historical simulation
aapl<-AAPL$AAPL.Adjusted
head(aapl)
aapl.sr<-diff(aapl)/lag(aapl,k=-1)
aapl.sr<-aapl.sr[c(2:282)]
aapl.sr
library(fGarch)
aapl.sr.dm<-aapl.sr-mean(aapl.sr)
gf.aapl<- garchFit(data=aapl.sr.dm)
vol.aapl<-volatility(gf.aapl)
vol.p.aapl<-vol.aapl[length(vol.aapl)]
st.aapl<-vol.p.aapl/vol.aapl
aapl.adj<-aapl.sr.dm*st.aapl
aapl.adj<-aapl.adj+mean(aapl.sr)
aapl.jpm.adj<-cbind(aapl.adj,jpm.adj)
##calculate losses and risk measures
P<-2000000
w<-c(0.5,0.5)
Pw<- -P*w
Loss.adj<-rowSums(t(Pw*t(aapl.jpm.adj)))
Var2.all<-quantile(Loss.adj,0.95)
Var2.all
t.Loss.adj<-Loss.adj[Loss.adj>Var2.all]
ES3.all<-mean(t.Loss.adj)
ES3.all
##bootstrapping
b.var.all1<- boot(Loss.adj,var.boot, R=1000)
b.var.all1
plot(b.var.all1)
boot.ci(b.var.all1,type="basic")
b.es.all1 <- boot(Loss.adj,es.boot,R=1000)
b.es.all1
plot(b.es.all1)
boot.ci(b.es.all1,type="basic")
#Parametric,using the Normal distribution without volatility adjustment
Mean<-colMeans(all.ret)
Mean
Cov.all<- cov(all.ret)
Cov.all
G<-gauss(mu=Mean,sigma=Cov.all)
G
P<-2000000
wa<-0.5
wb<-0.5
mult<- -P*c(wa,wb)
mult
G.l<-transform(G,multiplier=mult)
G.l
ESghyp(0.95,G.l,distr = "loss")
qghyp(0.95,G.l)
##Parametric,using the Normal distribution with volatility adjustment
library(ghyp)
Mean.adj<-colMeans(aapl.jpm.adj)
Mean.adj
Cov.all.adj<-cov(aapl.jpm.adj)
Cov.all.adj
G.adj<-gauss(mu=Mean.adj,sigma=Cov.all.adj)
G.adj
P<-2000000
wa<-0.5
wb<-0.5
mult<- -P*c(wa,wb)
mult
G.l.adj<-transform(G.adj,multiplier=mult)
G.l.adj
ESghyp(0.95,G.l.adj,distr = "loss")
qghyp(0.95,G.l.adj)
##Parametric,using an appropriate distribution without volatility adjustment
stepAIC.ghyp(all.ret)
mf<-fit.NIGmv(all.ret)
mf
#select the variable I want the plot of,
plot(mf[1],type="l")
plot(mf[2],type="l")
hist(mf[1])
qqghyp(mf[1])
hist(mf[2])
qqghyp(mf[2])
#bivariate plot
pairs(mf)
#use Peacock test to check
#check
library(Peacock.test)
S1<-rghyp(10000,mf)
peacock2(all.ret,S1)
#The model is adequate (at 5% significance level) if the test statistic is less than than 1.83, for effective sample size greater than 50.
#calculate effective sample size
n1 <- dim(all.ret)[1]
n2 <- dim(S1)[1]
eff <- (n1*n2)/(n1+n2)
eff
#risk measures
P1 <- 2
wa <- 0.5
wb <- 0.5
mult <- -P1*c(wa,wb)
ld.all<-transform(mf,multiplier=mult)
ld.all
ESghyp(0.95,ld.all,distr = "loss")
qghyp(0.95,ld.all)
##Parametric, using an appropriate distribution with volatility adjustment
stepAIC.ghyp(aapl.jpm.adj)
mf.adj<-fit.tmv(aapl.jpm.adj)
mf.adj
#select the variable I want the plot of,
plot(mf.adj[1],type="l")
plot(mf.adj[2],type="l")
hist(mf.adj[1])
qqghyp(mf.adj[1])
hist(mf.adj[2])
qqghyp(mf.adj[2])
#bivariate plot
pairs(mf.adj)
#use Peacock test to check
library(Peacock.test)
S2<-rghyp(10000,mf.adj)
peacock2(aapl.jpm.adj,S2)
#The model is adequate (at 5% significance level) if the test statistic is less than than 1.83, for effective sample size greater than 50.
#calculate effective sample size
n1.adj <- dim(aapl.jpm.adj)[1]
n2.adj <- dim(S2)[1]
eff.adj <- (n1*n2)/(n1+n2)
eff.adj
#risk measures
P1 <- 2
wa <- 0.5
wb <- 0.5
mult <- -P1*c(wa,wb)
ld.all.adj<-transform(mf.adj,multiplier=mult)
ld.all.adj
ESghyp(0.95,ld.all.adj,distr = "loss")
qghyp(0.95,ld.all.adj)
###(c) find ES in the two-asset case,using basic HS, allowing the weights to vary
#using Hull-White
hsj <- function(wa, P, rets){
? w <- c(wa, 1-wa)
? Pw <- -P*w
? loss <- rowSums(t(Pw * t(rets)))
? var <- quantile(loss, 0.95)
? es <- mean(loss[loss>var])
? return(es)
}
#we want ES for weights from 0-1 at intervals of 0.01
weights<- seq(0, 1, by = 0.01)
sapply(weights, hsj,2000000,aapl.jpm.adj)
ES.weights <- sapply(weights, hsj, 2000000, aapl.jpm.adj)
ES.weights
plot(weights,ES.weights, type = 'l')
#find the minimum ES(risk exposure)
which.min(ES.weights)