install.packages("TSA") library(TSA) # Google Stock Data (August, 2004 to May 2012) from Yahoo finance Website. google <- ts(read.table(file = "google.txt"), start=c(2004,8), freq=12 ) plot(google,ylab="Google Stock",xlab="Time",type="o") # Straight line model fit with output with you dataset fit <- lm(google~time(google)) summary(fit) abline(fit) # Do Residuals from straight line model fit plot(resid(fit),ylab="Residuals",xlab="Year",type="o") # Make Histogram and qq plot hist(rstudent(fit),main="Histogram of standardized residuals",xlab="Standardized residuals") qqnorm(rstudent(fit),main="QQ plot of standardized residuals") # Calculate sample ACF for standardised residuals acf(rstudent(fit),main="Sample ACF for standardized residuals") # Autoregressive process of order p=1, AR(1) summary(google.arma10 <- arma(google, order = c(1, 0))) plot(google.arma10) r<-diff(google) summary(r.arma10 <- arma(r, order = c(1, 0))) plot(r.arma10) # Autoregressive process of order p=2, AR(2) summary(google.arma20 <- arma(google, order = c(2, 0))) plot(google.arma20) r<-diff(google) summary(r.arma20 <- arma(r, order = c(2, 0))) plot(r.arma20) # Moving average process of order q=1, MA(1) summary(google.arma01 <- arma(google, order = c(0, 1))) plot(google.arma01) r<-diff(google) summary(r.arma01 <- arma(r, order = c(0, 1))) plot(r.arma01) # Moving average process of order q=2, MA(2) summary(google.arma02 <- arma(google, order = c(0, 2))) plot(google.arma02) r<-diff(google) summary(r.arma02 <- arma(r, order = c(0, 2))) plot(r.arma02) # Autoregressive Moving average process of orders p=1 and q=1. ARMA(1,1) summary(google.arma11 <- arma(google, order = c(1, 1))) plot(google.arma11) r<-diff(google) summary(r.arma11 <- arma(r, order = c(1, 1))) plot(r.arma11) # MA(1) Model by MLE Method google.ma1.fit = arima(google,order=c(0,0,1),method='ML') # AR(1) Model by MLE Method google.ar1.fit = arima(google,order=c(1,0,0),method='ML') # ARMA(1,1) Model by MLE Method google.arma11.fit = arima(google,order=c(1,0,1),method='ML') # ARIMA(1,1,1) Model by MLE Method google.arima111.fit = arima(google,order=c(1,1,1),method='ML') google.ma1.fit google.ar1.fit google.arma11.fit google.arima111.fit # By comparing the values of AIC, I chose ARIMA(1,1,1) out of 4 models for Google Data because the value of AIC by ARIMA(1,1,1) is the smallest one. # Model Diagnostics by Residual analysis as follows: par(mfrow=c(2,2)) plot(gota,ylab="Water discharge rate",xlab="Year",type="o") plot(rstandard(google.arima111.fit),xlab="Time",ylab="Google Standardised residuals",type='o') abline(h=0) hist(rstandard(google.arima111.fit),xlab="Standardised residuals",main="Google") qqnorm(rstandard(google.arima111.fit),main="Google") qqline(rstandard(google.arima111.fit)) # Shapiro-Wilk and runs tests shapiro.test(rstandard(google.arima111.fit)) runs(rstandard(google.arima111.fit)) # Plot and calculate Sample ACF for residuals as follows: acf(residuals(google.arima111.fit),main="Sample ACF for ARIMA(1,1,1) residuals") acf(residuals(google.arima111.fit),plot=F,lag.max=10) # Ljung-Box test for model adequacy and use ¡®tsdiag¡¯ R command for outputs Box.test(residuals(google.arima111.fit),lag = 10,type="Ljung-Box",fitdf=1) tsdiag(google.arima111.fit,gof=20,omit.initial=F) # Obtain MMSE forecasts google.arima111.predict<-predict(google.arima111.fit,n.ahead=24) google.arima111.predict google.arima111.predict$pred google.arima111.predict$se # Plot for MMSE forecasts plot(google.arima111.fit,n.ahead=20,col='red',type='b',pch=16,ylab="Google",xlab="Year") # Prediction for June, 2012 through May 2014. year.month = c(2012.416,2012.500,2012.583,2012.666,2012.750,2012.833,2012.916, 2013, 2013.083,2013.166, 2013.250, 2013.333, 2013.416, 2013.500, 2013.583, 2013.666,2013.750,2013.833,2013.916, 2014, 2014.083,2014.166, 2014.250, 2014.333) lower.pi<-google.arima111.predict$pred-qnorm(0.975,0,1)*google.arima111.predict$se upper.pi<-google.arima111.predict$pred+qnorm(0.975,0,1)*google.arima111.predict$se data.frame(Month=year.month,lower.pi,upper.pi) # Note: Argument pch=16 produces a small black circle (MMSE prediction) plot(google.arima111.fit,n.ahead=24,col='red',type='b',pch=16,n1=c(2004,8),ylab="Google",xlab="Year") # Put horizontal line at ML estimate of overall mean abline(h=coef(google.arima111.fit)[names(coef(google.arima111.fit))=='intercept']) # Put prediction interval lines on plot (darker than default) lines(y=lower.pi,x=year.month,lwd=2,col="blue",lty="dashed") lines(y=upper.pi,x=year.month,lwd=2,col="blue",lty="dashed") install.packages("fGarch") library(fGarch) con <- url("http://quote.yahoo.com") if(!inherits(try(open(con), silent = TRUE), "try-error")) { close(con) google <- get.hist.quote(instrument = "GOOG", start = "2004-08-19", quote = "AdjClose") plot(google, main = "Google") } r.google<-diff(log(google))*100 plot(r.google) abline(h=0) title(main="Time plot of daily log returns of Google stock") mean(r.google) var(r.google) skewness(r.google) kurtosis(r.google) summary(r.google) # AutoRegressive Conditional Heteroscedasticity (ARCH) # Test for the presence of ARCH under the null hypothesis of no ARCH effect # We use McLeod-Li test for the presence of ARCH. McLeod.Li.test(y=r.google) archlmtest <- function (x, lags, demean = FALSE) { x <- as.vector(x) if(demean) x <- scale(x, center = TRUE, scale = FALSE) lags <- lags + 1 mat <- embed(x^2, lags) arch.lm <- summary(lm(mat[, 1] ~ mat[, -1])) STATISTIC <- arch.lm$r.squared * length(resid(arch.lm)) names(STATISTIC) <- "Chi-squared" PARAMETER <- lags - 1 names(PARAMETER) <- "df" PVAL <- 1 - pchisq(STATISTIC, df = PARAMETER) METHOD <- "ARCH LM-test" result <- list(statistic = STATISTIC, parameter = PARAMETER, p.value = PVAL, method = METHOD, data.name = deparse(substitute(x))) class(result) <- "htest" return(result) } archlmtest(x=r.google,1) archlmtest(x=r.google,2) archlmtest(x=r.google,12) archlmtest(x=r.google,24) #Compute the Box?Pierce or Ljung?Box test statistic for examining the null hypothesis of independence #in a given time series. Box.test(r.google, lag = 1) Box.test(r.google, lag = 1, type="Ljung") # AR(1)-Garch(1,1) for Google google.m1 <- garchFit(~arma(1,0)+garch(1,1),data=r.google, dist="std") summary(google.m1) res = residuals(google.m1) # resiudals stdres = residuals(google.m1, standardize = TRUE) # standardized resiudals par(mfrow=c(2,1)) hist(res, main="Histogram of Residuals") hist(stdres, main="Histogram of Standardized Residuals") predict(google.m1, n.ahead = 30, plot=FALSE) predict(google.m1, n.ahead = 30, plot=TRUE) google.fit <- garchFit(~arma(1,0)+garch(1,1),data=r.google, dist="std",trace=FALSE)@fitted google.fit google.res <- garchFit(~arma(1,0)+garch(1,1),data=r.google, dist="std",trace=FALSE)@residuals google.res x<-cbind(google.fit, google.res) plot(x) par(mfrow=c(2,3)) plot(google.fit) acf(google.fit) pacf(google.fit) plot(google.res) acf(google.res) pacf(google.res) write.table(x, file = "googlefitres.csv", sep = ",", col.names = NA) read.table("googlefitres.csv", header = TRUE, sep = ",", row.names=1) # MA(1)-Garch(1,1) for Google google.m2 <- garchFit(~arma(0,1)+garch(1,1),data=r.google, dist="std") summary(google.m2) # Garch(1,1) for Google google.m3 <- garchFit(~arma(0,0)+garch(1,1),data=r.google, dist="std") summary(google.m3) ## volatility - # Standard Deviation: volatility = volatility(google.m1, type = "sigma") head(volatility) tail(volatility) page(volatility) class(volatility) # Variance: volatility = volatility(google.m1, type = "h") head(volatility) tail(volatility) page(volatility) class(volatility) library(rugarch) library(car) spec = ugarchspec() fit = ugarchfit(data = as.numeric(r.google), spec = spec) fit coef(fit) # Nelson's egarch model egarch11.spec = ugarchspec(variance.model=list(model="eGARCH", garchOrder=c(1,1)), mean.model=list(armaOrder=c(0,0), include.mean=TRUE), distribution.model="std", fixed.pars=list()) MSFT.egarch11.fit = ugarchfit(egarch11.spec, as.numeric(r.google)) MSFT.egarch11.fit # aparch models aparch11.1.spec = ugarchspec(variance.model=list(model="apARCH", garchOrder=c(1,1)), mean.model=list(armaOrder=c(0,0), include.mean=TRUE), distribution.model="std", fixed.pars=list()) MSFT.aparch11.1.fit = ugarchfit(aparch11.1.spec, as.numeric(r.google)) MSFT.aparch11.1.fit # GJR garch model gjrgarch11.spec = ugarchspec(variance.model=list(model="gjrGARCH", garchOrder=c(1,1)), mean.model=list(armaOrder=c(0,0), include.mean=TRUE), distribution.model="std", fixed.pars=list()) MSFT.gjrgarch11.fit = ugarchfit(gjrgarch11.spec, as.numeric(r.google)) MSFT.gjrgarch11.fit stdres2 = residuals(MSFT.gjrgarch11.fit, standardize = TRUE) hist(stdres2) summary(stdres2) # IGARCH igarch11.spec = ugarchspec(variance.model=list(model="iGARCH", garchOrder=c(1,1)), mean.model=list(armaOrder=c(0,0), include.mean=TRUE), distribution.model="std", fixed.pars=list()) MSFT.igarch11.fit = ugarchfit(igarch11.spec, as.numeric(r.google)) MSFT.igarch11.fit # sgarch sgarch11.spec = ugarchspec(variance.model=list(model="sGARCH", garchOrder=c(1,1)), mean.model=list(armaOrder=c(0,0), include.mean=TRUE), distribution.model="std", fixed.pars=list()) MSFT.sgarch11.fit = ugarchfit(sgarch11.spec, as.numeric(r.google)) MSFT.sgarch11.fit # tgarch model tgarch11.spec<-ugarchspec(variance.model = list(model = "fGARCH", garchOrder = c(1,1), submodel = "TGARCH"), mean.model = list (armaOrder = c(0,0), include.mean = TRUE, archm = FALSE, archpow = 1, arfima = FALSE, external.regressors = NULL, archex = FALSE), distribution.model = "std", start.pars = list(), fixed.pars = list()) MSFT.tgarch11.fit = ugarchfit(tgarch11.spec, as.numeric(r.google)) MSFT.tgarch11.fit stdres3 = residuals(MSFT.tgarch11.fit, standardize = TRUE) hist(stdres3) # ngarch model ngarch11.spec<-ugarchspec(variance.model = list(model = "fGARCH", garchOrder = c(1,1), submodel = "NGARCH"), mean.model = list (armaOrder = c(1,1), include.mean = TRUE, archm = FALSE, archpow = 1, arfima = FALSE, external.regressors = NULL, archex = FALSE), distribution.model = "std", start.pars = list(), fixed.pars = list()) MSFT.ngarch11.fit = ugarchfit(ngarch11.spec, as.numeric(r.google)) MSFT.ngarch11.fit # nagarch model nagarch11.spec<-ugarchspec(variance.model = list(model = "fGARCH", garchOrder = c(1,1), submodel = "NAGARCH"), mean.model = list (armaOrder = c(1,1), include.mean = TRUE, archm = FALSE, archpow = 1, arfima = FALSE, external.regressors = NULL, archex = FALSE), distribution.model = "std", start.pars = list(), fixed.pars = list()) MSFT.nagarch11.fit = ugarchfit(nagarch11.spec, as.numeric(r.google)) MSFT.nagarch11.fit # avgarch model avgarch11.spec<-ugarchspec(variance.model = list(model = "fGARCH", garchOrder = c(1,1), submodel = "AVGARCH"), mean.model = list (armaOrder = c(1,1), include.mean = TRUE, archm = FALSE, archpow = 1, arfima = FALSE, external.regressors = NULL, archex = FALSE), distribution.model = "std", start.pars = list(), fixed.pars = list()) MSFT.avgarch11.fit = ugarchfit(avgarch11.spec, as.numeric(r.google)) MSFT.avgarch11.fit # allgarch model allgarch11.spec<-ugarchspec(variance.model = list(model = "fGARCH", garchOrder = c(1,1), submodel = "ALLGARCH"), mean.model = list (armaOrder = c(1,1), include.mean = TRUE, archm = FALSE, archpow = 1, arfima = FALSE, external.regressors = NULL, archex = FALSE), distribution.model = "std", start.pars = list(), fixed.pars = list()) MSFT.allgarch11.fit = ugarchfit(allgarch11.spec, as.numeric(r.google)) MSFT.allgarch11.fit # fgarch model submodel=gjrgarch model fgjrgarch11.spec<-ugarchspec(variance.model = list(model = "fGARCH", garchOrder = c(1,1), submodel = "GJRGARCH"), mean.model = list (armaOrder = c(0,0), include.mean = TRUE, archm = FALSE, archpow = 1, arfima = FALSE, external.regressors = NULL, archex = FALSE), distribution.model = "std", start.pars = list(), fixed.pars = list()) MSFT.fgjrgarch11.fit = ugarchfit(fgjrgarch11.spec, as.numeric(r.google)) MSFT.fgjrgarch11.fit # fgarch model submodel=garch fggarch11.spec<-ugarchspec(variance.model = list(model = "fGARCH", garchOrder = c(1,1), submodel = "GARCH"), mean.model = list (armaOrder = c(0,0), include.mean = TRUE, archm = FALSE, archpow = 1, arfima = FALSE, external.regressors = NULL, archex = FALSE), distribution.model = "std", start.pars = list(), fixed.pars = list()) MSFT.fggarch11.fit = ugarchfit(fggarch11.spec, as.numeric(r.google)) MSFT.fggarch11.fit # Fit Structural Time Series data <- read.csv(file = "jnj.csv", head=TRUE) jnj<-ts(data[,7], start=c(1970,1), freq=12 ) plot(log(jnj), ylab = "Adjusted Close", main = "Johnson & Johnson") ## Fit a 'basic structural model' to the data (fit = StructTS(log(jnj), type = "BSM")) #Note that the measurement uncertainty is small compared to the model #uncertainties (i.e. of the level, slope and seasonality). ## Some diagnostics plot(cbind(fitted(fit), resids=resid(fit)), main = "Johnson & Johnson") tsdiag(fit) stdres = residuals(fit, standardize = TRUE) hist(stdres) k=4 # num of coef=4 L=fit$loglik AIC=2*k-2*L AIC fit <- StructTS(jnj, type="level") tsdiag(fit) summary(fit) k=2 # num of coef=2 L=fit$loglik AIC=2*k-2*L AIC fit <- StructTS(jnj, type="trend") tsdiag(fit) summary(fit) k=3 # num of coef=3 L=fit$loglik AIC=2*k-2*L AIC