Financial Statistics and Econometrics: Homework 3 by Python

1.第1问和第2问

Program
# -*- coding: utf-8 -*-
"""
Created on Tue Nov  6 12:08:07 2018

@author: Wengsway

"""

import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
import statsmodels.api as sm
import warnings

# 1:导入数据并画出波动图
df = pd.read_csv("000002.csv",header = None,names = ['date','r'])
plt.plot(df['r'],label = 'r')
plt.hlines(np.mean(df['r']),0,835,colors = 'r',linestyles = "--",label = 'mean_r')
plt.legend(loc = 'best')
plt.title('Fluctuation Map')
plt.show()

# 2:用 ARMA模型来估计该时间序列
# 2.1:对序列进行平稳性检验
dftest = sm.tsa.stattools.adfuller(df['r'])
dfoutput = pd.Series(dftest[0:4],
                     index=['Test Statistic', 'p-value',
                            '#Lags Used', 'Number of observations Used'])
print(dfoutput)
print('从 ADF 检验结果可知该时间序列是平稳的')

# 2.2:确定AR模型的阶数
# 2.2.1:确定 p
train = df['r'][:-10]# 将数据划分为训练集和测试机
test = df['r'][-10:]
lag_pacf = sm.tsa.stattools.pacf(train, nlags=20, method='ols')
x1 = np.linspace(1,20,20)
plt.plot(x1,lag_pacf[1:],linestyle = '-', marker='o', mec='r')
plt.axhline(y=-(2/np.sqrt(len(df))), linestyle='--', color='gray')
plt.axhline(y=(2/np.sqrt(len(df))), linestyle='--', color='gray')
plt.title('Partial Autocorrelation of r')
plt.xlabel('Lag')
plt.show()
# 2.2.2:画出AR Model
results_AR = sm.tsa.ARMA(train,(11,0)).fit()
plt.plot(train, 'b', label = 'r')
plt.plot(results_AR.fittedvalues, 'r', label = 'AR model')
plt.legend(loc = 'best')
plt.title('The AR Model')
plt.show()
# 2.2.3:展示IC检验结果
aicList = []
bicList = []
hqicList = []
for i in range(1,8):  #从1阶开始算
    order = (i,0)  # 使用ARMA模型,其中MA阶数为0,只考虑了AR阶数。
    ricModel = sm.tsa.ARMA(train,order).fit()
    aicList.append(ricModel.aic)
    bicList.append(ricModel.bic)
    hqicList.append(ricModel.hqic)
plt.plot(aicList,'ro--',label='aic value')
plt.plot(bicList,'bo--',label='bic value')
plt.plot(hqicList,'ko--',label='hqic value')
plt.legend(loc = 'best')
plt.show()
# 2.2.4:AR模型的检验
delta_AR = results_AR.fittedvalues - train # 取得误差项
acf,q,p = sm.tsa.acf(delta_AR,nlags=10,qstat=True)  # 计算自相关系数 及p-value
out_AR = np.c_[range(1,11), acf[1:], q, p]
output_AR = pd.DataFrame(out_AR, columns=['lag', "ACF", "Q", "P-value"])
output_AR = output_AR.set_index('lag')
print(output_AR)
print('从白噪声检验结果可以看出误差项接近于白噪声序列')
adjR_AR = 1 - delta_AR.var()/train.var()
print('AR 模型的调整R^2为',adjR_AR)
# 2.2.5:AR模型的预测 
predicts_AR = results_AR.predict(825, 834, dynamic=True) #一共有835个数据
compare_AR = pd.DataFrame()
compare_AR['original'] = test
compare_AR['predict_AR'] = predicts_AR
print('AR Predict results:','\n',compare_AR)
plt.plot(compare_AR['original'],label = 'Original')
plt.plot(compare_AR['predict_AR'],label = 'AR_Predict')
plt.title('AR Model prediction results')
plt.legend(loc = 'best')
plt.show()

# 2.3:确定 MA 模型的阶数 
# 2.3.1:根据 AC 图确定 q 并画出 MA Model
# 确定 q
lag_acf = sm.tsa.stattools.acf(train, nlags=20)
x2 = np.linspace(1,20,20)
plt.plot(x2,lag_acf[1:],linestyle = '-', marker='o', mec='r')
plt.axhline(y=-(2/np.sqrt(len(df))), linestyle='--', color='gray')
plt.axhline(y=(2/np.sqrt(len(df))), linestyle='--', color='gray')
plt.title('Autocorrelation of r')
plt.xlabel('Lag')
plt.show()
# 画出 MA Model 
results_MA = sm.tsa.ARMA(train, (0,11)).fit()
plt.plot(df['r'], 'b', label = 'changepct')
plt.plot(results_MA.fittedvalues, 'r', label = 'MA model')
plt.title('The MA Model')
plt.legend(loc = 'best')
plt.show()
# 2.3.2:MA 模型的预测
predicts_MA = results_MA.predict(825, 834, dynamic=True) #一共有835个数据
compare_MA = pd.DataFrame()
compare_MA['original'] = test
compare_MA['predict_MA'] = predicts_MA
print('MA Predict results:','\n',compare_MA)
plt.plot(compare_MA['original'],label = 'Original')
plt.plot(compare_MA['predict_MA'],label = 'MA_Predict')
plt.title('MA Model prediction results')
plt.legend(loc = 'best')
plt.show()
# 2.3.3:MA 模型的检验
delta_MA = results_MA.fittedvalues - train  # 取得误差项
adjR_MA = 1 - delta_MA.var()/train.var()
print('MA 模型的调整R^2为',adjR_MA)
# 2.4: ARMA Model

warnings.filterwarnings("ignore")
AIC_output = sm.tsa.arma_order_select_ic(train,
                                         max_ar=5,
                                         max_ma=5,
                                         ic='aic')['aic_min_order']
print('以 AIC 最小原则选取的 p 和 q:',AIC_output)
BIC_output = sm.tsa.arma_order_select_ic(train,
                                         max_ar=5,
                                         max_ma=5,
                                         ic='bic')['bic_min_order']
print('以 BIC 最小原则选取的 p 和 q:',BIC_output)
HQIC_output = sm.tsa.arma_order_select_ic(train,
                                          max_ar=5,
                                          max_ma=5,
                                          ic='hqic')['hqic_min_order']
print('以 HQIC 最小原则选取的 p 和 q:',HQIC_output)
# 假如以AIC原则为准
results_ARMA_1 = sm.tsa.ARMA(train, (3,2)).fit()
print(results_ARMA_1.summary())
predicts_ARMA_1 = results_ARMA_1.predict(825, 834, dynamic = True)
compare_ARMA_1 = pd.DataFrame()
compare_ARMA_1['original'] = test
compare_ARMA_1['predict_ARMA_1'] = predicts_ARMA_1
print('ARMA(3,2) Predict results:','\n',compare_ARMA_1)
plt.plot(compare_ARMA_1['original'],label = 'Original')
plt.plot(compare_ARMA_1['predict_ARMA_1'],label = 'ARMA_Predict_1')
plt.title('ARMA(3,2) Model prediction results')
plt.legend(loc = 'best')
plt.show()
# ARMA模型的检验
delta_ARMA_1 = results_ARMA_1.fittedvalues - train # 拟合优度检验
adjR_ARMA_1 = 1 - delta_ARMA_1.var()/train.var()
print('ARMA(3,2)模型的调整R^2为',adjR_ARMA_1)
Answer

Test Statistic                 -29.465798
p-value                          0.000000
#Lags Used                       0.000000
Number of observations Used    834.000000
dtype: float64
从 ADF 检验结果可知该时间序列是平稳的

           ACF         Q   P-value
lag                               
1.0  -0.000676  0.000378  0.984489
2.0  -0.002286  0.004709  0.997648
3.0   0.003245  0.013448  0.999587
4.0  -0.002859  0.020240  0.999949
5.0  -0.004877  0.040027  0.999983
6.0   0.002683  0.046024  0.999998
7.0  -0.000434  0.046181  1.000000
8.0  -0.001756  0.048756  1.000000
9.0   0.001566  0.050808  1.000000
10.0 -0.002783  0.057293  1.000000

从白噪声检验结果可以看出误差项接近于白噪声序列

AR 模型的调整R^2为 0.021315709346917178
AR Predict results: 
      original  predict_AR
825 -0.002321   -0.000411
826 -0.042345    0.010983
827  0.010690   -0.002776
828 -0.007692    0.004617
829  0.001938    0.005875
830 -0.057060    0.004500
831 -0.031282    0.007006
832  0.026998    0.008854
833 -0.011856    0.003430
834  0.071987    0.004828

MA Predict results: 
      original  predict_MA
825 -0.002321    0.001475
826 -0.042345    0.012787
827  0.010690    0.000178
828 -0.007692    0.006093
829  0.001938    0.006318
830 -0.057060    0.006718
831 -0.031282    0.007489
832  0.026998    0.009936
833 -0.011856    0.003360
834  0.071987    0.005222

MA 模型的调整R^2为 0.020163754775557208
以 AIC 最小原则选取的 p 和 q: (3, 2)
以 BIC 最小原则选取的 p 和 q: (0, 0)
以 HQIC 最小原则选取的 p 和 q: (0, 0)
                              ARMA Model Results                              
==============================================================================
Dep. Variable:                      r   No. Observations:                  825
Model:                     ARMA(3, 2)   Log Likelihood                1189.512
Method:                       css-mle   S.D. of innovations              0.057
Date:                Wed, 07 Nov 2018   AIC                          -2365.023
Time:                        16:02:57   BIC                          -2332.015
Sample:                             0   HQIC                         -2352.361

==============================================================================
                 coef    std err          z      P>|z|      [0.025      0.975]
------------------------------------------------------------------------------
const          0.0052      0.002      2.658      0.008       0.001       0.009
ar.L1.r        0.6920      0.035     19.513      0.000       0.622       0.761
ar.L2.r       -0.9454      0.029    -33.123      0.000      -1.001      -0.889
ar.L3.r       -0.0435      0.035     -1.235      0.217      -0.113       0.026
ma.L1.r       -0.7193      0.007   -104.178      0.000      -0.733      -0.706
ma.L2.r        0.9942      0.009    115.048      0.000       0.977       1.011
                                    Roots                                    
=============================================================================
                  Real          Imaginary           Modulus         Frequency
-----------------------------------------------------------------------------
AR.1            0.3765           -0.9384j            1.0111           -0.1893
AR.2            0.3765           +0.9384j            1.0111            0.1893
AR.3          -22.4756           -0.0000j           22.4756           -0.5000
MA.1            0.3617           -0.9354j            1.0029           -0.1913
MA.2            0.3617           +0.9354j            1.0029            0.1913
-----------------------------------------------------------------------------
ARMA(3,2) Predict results: 
      original  predict_ARMA_1
825 -0.002321        0.003338
826 -0.042345        0.006862
827  0.010690        0.006350
828 -0.007692        0.004505
829  0.001938        0.003559
830 -0.057060        0.004671
831 -0.031282        0.006415
832  0.026998        0.006612
833 -0.011856        0.005051
834  0.071987        0.003708
ARMA(3,2)模型的调整R^2为 0.019816685557134783

Program
# 得到的结果发现AR(3)的系数不显著,因此尝试ARMA(2,2)。
results_ARMA_2 = sm.tsa.ARMA(train, (2,2)).fit()
print(results_ARMA_2.summary())
predicts_ARMA_2 = results_ARMA_2.predict(825, 834, dynamic = True)
compare_ARMA_2 = pd.DataFrame()
compare_ARMA_2['original'] = test
compare_ARMA_2['predict_ARMA_2'] = predicts_ARMA_2
print('ARMA(2,2) Predict results:','\n',compare_ARMA_2)
plt.plot(compare_ARMA_2['original'],label = 'Original')
plt.plot(compare_ARMA_2['predict_ARMA_2'],label = 'ARMA_Predict_2')
plt.title('ARMA(2,2) Model prediction results')
plt.legend(loc = 'best')
plt.show()
# ARMA模型的检验
delta_ARMA_2 = results_ARMA_2.fittedvalues - train # 拟合优度检验
adjR_ARMA_2 = 1 - delta_ARMA_2.var()/train.var()
print('ARMA(2,2)模型的调整R^2为',adjR_ARMA_2)
Answer
ARMA Model Results                              
==============================================================================
Dep. Variable:                      r   No. Observations:                  825
Model:                     ARMA(2, 2)   Log Likelihood                1201.528
Method:                       css-mle   S.D. of innovations              0.057
Date:                Wed, 07 Nov 2018   AIC                          -2391.057
Time:                        16:10:57   BIC                          -2362.692
Sample:                             0   HQIC                         -2380.182

==============================================================================
                 coef    std err          z      P>|z|      [0.025      0.975]
------------------------------------------------------------------------------
const          0.0051      0.002      2.546      0.011       0.001       0.009
ar.L1.r        0.4282      0.010     43.140      0.000       0.409       0.448
ar.L2.r       -0.9812      0.008   -124.697      0.000      -0.997      -0.966
ma.L1.r       -0.4336      0.004   -100.860      0.000      -0.442      -0.425
ma.L2.r        1.0000      0.007    148.386      0.000       0.987       1.013
                                    Roots                                    
=============================================================================
                  Real          Imaginary           Modulus         Frequency
-----------------------------------------------------------------------------
AR.1            0.2182           -0.9857j            1.0095           -0.2153
AR.2            0.2182           +0.9857j            1.0095            0.2153
MA.1            0.2168           -0.9762j            1.0000           -0.2152
MA.2            0.2168           +0.9762j            1.0000            0.2152
-----------------------------------------------------------------------------
ARMA(2,2) Predict results: 
      original  predict_ARMA_2
825 -0.002321        0.009458
826 -0.042345        0.008430
827  0.010690        0.002235
828 -0.007692        0.000592
829  0.001938        0.005966
830 -0.057060        0.009880
831 -0.031282        0.006283
832  0.026998        0.000902
833 -0.011856        0.002128
834  0.071987        0.007932
ARMA(2,2)模型的调整R^2为 0.006762936649020834

ARMA(2,2)模型公式:
r_t=0.0051+0.4282r_{t-1}-0.9812r_{t-2}-0.4336\varepsilon_{t-1}+\varepsilon_{t-2}+\varepsilon_t\\z=(2.546)\quad(43.140)\quad(-124.697)\quad(-100.860)\quad(148.386)

2.第3问至第7问

Program
# -*- coding: utf-8 -*-
"""
Created on Wed Nov  7 09:33:24 2018

@author: Wengsway

"""

import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import statsmodels.api as sm
import arch

df = pd.read_csv("000002.csv",header = None,names = ['date','r'])
# 3:该数列是否具有ARCH效应
# 3.1:根据2建立AR(0)模型
train = df['r'][:-10]# 将数据划分为训练集和测试机
test = df['r'][-10:]
results_AR = sm.tsa.ARMA(train,(0,0)).fit()
delta_AR = train - results_AR.fittedvalues
delta_AR2 = np.square(delta_AR)
plt.subplot(211)
plt.plot(delta_AR,label = 'delta_AR')
plt.legend(loc= 'best')
plt.subplot(212)
plt.plot(delta_AR2,label='delta_AR^2')
plt.legend(loc= 'best')
plt.show()
# 3.2:检验是否有 ARCH 效应
acf,q,p = sm.tsa.acf(delta_AR2,nlags = 15,qstat = True)
out = np.c_[range(1,16),acf[1:],q,p]
output = pd.DataFrame(out,columns = ['lag','AC','Q','P-Value'])
output = output.set_index('lag')
print(output)
print('该数列具有ARCH效应')

# 4:如何确定ARCH效应模型的滞后期并用ARCH模型进行估计
#4.1:确定ARCH效应模型的滞后期
lag_delta_AR2_pacf = sm.tsa.stattools.pacf(delta_AR2, nlags=20)
x3 = np.linspace(1,20,20)
plt.plot(x3,lag_delta_AR2_pacf[1:],linestyle = '-', marker='o', mec='r')
plt.axhline(y=-(2/np.sqrt(len(train))), linestyle='--', color='gray')
plt.axhline(y=(2/np.sqrt(len(train))), linestyle='--', color='gray')
plt.title('Partial Autocorrelation of delta_AR^2')
plt.xlabel('Lag')
plt.show()
# 4.2:用ARCH模型进行估计
results_ARCH = arch.arch_model(train,vol='ARCH',p=2).fit()
print(results_ARCH.summary())
resid2 = np.square(results_ARCH.resid)
plt.plot(resid2,label='ARCH_resid^2')
plt.title('ARCH_resid^2')
plt.show()
# 画出resid2的AC图
lag_resid2_acf = sm.tsa.stattools.acf(resid2,nlags=20)
x4 = np.linspace(1,20,20)
plt.plot(x4,lag_resid2_acf[1:],linestyle = '-', marker='o', mec='r')
plt.axhline(y=-(2/np.sqrt(len(train))), linestyle='--', color='gray')
plt.axhline(y=(2/np.sqrt(len(train))), linestyle='--', color='gray')
plt.title('Autocorrelation of resid2')
plt.xlabel('Lag')
plt.show()
# 画出resid2的PAC图
lag_resid2_pacf = sm.tsa.stattools.pacf(resid2,nlags=20)
x5 = np.linspace(1,20,20)
plt.plot(x5,lag_resid2_pacf[1:],linestyle = '-', marker='o', mec='r')
plt.axhline(y=-(2/np.sqrt(len(train))), linestyle='--', color='gray')
plt.axhline(y=(2/np.sqrt(len(train))), linestyle='--', color='gray')
plt.title('Partial Autocorrelation of resid2')
plt.xlabel('Lag')
plt.show()
'''这一块的预测不知道语句哪里出了问题,截至目前暂未找到解决方法。
# 4.3:用ARCH模型进行预测
predicts_ARCH = results_ARCH.forecast(horizon=10,start = 824)
compare_ARCH = pd.DataFrame()
compare_ARCH['original'] = test
compare_ARCH['predict_ARCH'] = predicts_ARCH
print('ARCH Predict results:','\n',compare_ARCH)
plt.plot(compare_ARCH['original'],label = 'Original')
plt.plot(compare_ARCH['predict_ARCH'],label = 'ARCH_Predict')
plt.title('ARCH Model prediction results')
plt.legend(loc = 'best')
plt.show()
'''

# 5:请用GARCH模型进行估计
results_GARCH = arch.arch_model(train,vol='GARCH',p=1,o=0,q=1).fit()
print(results_GARCH.summary())
results_GARCH.hedgehog_plot()
plt.show()
plt.plot(np.square(results_GARCH.resid),label='GARCH_resid^2')
plt.title('GARCH_resid^2')
plt.show()

# 6:请用TGARCH模型进行估计
results_TGARCH = arch.arch_model(train,p=1,o=1,q=1).fit()
print(results_TGARCH.summary())
results_TGARCH.hedgehog_plot()
plt.show()
plt.plot(np.square(results_TGARCH.resid),label='TGARCH_resid^2')
plt.title('TGARCH_resid^2')
plt.show()
Answer

            AC           Q       P-Value
lag                                     
1.0   0.102316    8.668003  3.238458e-03
2.0   0.196592   40.708061  1.446628e-09
3.0   0.168014   64.138495  7.666933e-14
4.0   0.105382   73.367460  4.411766e-15
5.0   0.101014   81.857440  3.428514e-16
6.0   0.022819   82.291207  1.200634e-15
7.0   0.116310   93.574563  2.278271e-17
8.0   0.057463   96.332047  2.394152e-17
9.0   0.077603  101.367302  8.321210e-18
10.0  0.066512  105.070758  5.241533e-18
11.0  0.062238  108.317518  3.969318e-18
12.0  0.098957  116.535396  3.025848e-19
13.0  0.098623  124.707967  2.354695e-20
14.0  0.040859  126.112457  3.969334e-20
15.0  0.074878  130.835052  1.459064e-20
该数列具有ARCH效应

Iteration:      1,   Func. Count:      6,   Neg. LLF: -1204.1577533945233
Iteration:      2,   Func. Count:     18,   Neg. LLF: -1204.3967008380887
Iteration:      3,   Func. Count:     26,   Neg. LLF: -1204.5198231895197
Iteration:      4,   Func. Count:     35,   Neg. LLF: -1204.519930870478
Iteration:      5,   Func. Count:     45,   Neg. LLF: -1204.5199440576448
Iteration:      6,   Func. Count:     53,   Neg. LLF: -1204.5211810050207
Iteration:      7,   Func. Count:     59,   Neg. LLF: -1204.5211831940467
Optimization terminated successfully.    (Exit mode 0)
            Current function value: -1204.521183194039
            Iterations: 7
            Function evaluations: 59
            Gradient evaluations: 7
                      Constant Mean - ARCH Model Results                      
==============================================================================
Dep. Variable:                      r   R-squared:                      -0.001
Mean Model:             Constant Mean   Adj. R-squared:                 -0.001
Vol Model:                       ARCH   Log-Likelihood:                1204.52
Distribution:                  Normal   AIC:                          -2401.04
Method:            Maximum Likelihood   BIC:                          -2382.18
                                        No. Observations:                  825
Date:                Wed, Nov 07 2018   Df Residuals:                      821
Time:                        16:15:56   Df Model:                            4
                                 Mean Model                                 
============================================================================
                 coef    std err          t      P>|t|      95.0% Conf. Int.
----------------------------------------------------------------------------
mu         3.8316e-03  1.934e-03      1.981  4.757e-02 [4.103e-05,7.622e-03]
                              Volatility Model                              
============================================================================
                 coef    std err          t      P>|t|      95.0% Conf. Int.
----------------------------------------------------------------------------
omega      2.4454e-03  3.178e-04      7.695  1.420e-14 [1.823e-03,3.068e-03]
alpha[1]       0.1227  5.547e-02      2.213  2.692e-02   [1.402e-02,  0.231]
alpha[2]       0.1443  5.028e-02      2.870  4.099e-03   [4.577e-02,  0.243]
============================================================================

Covariance estimator: robust

尝试了好多阶的ARCH模型,一直无法修正ARCH效应,但ARCH(2)系数均显著。此处以均值方程为常数,ARCH(2) 为结果。

ARCH(2)模型的公式如下:
r_t=0.0038+\widehat u_t\\t=(1.984)\\\sigma_t^2=0.0024+0.1227\widehat u_{t-1}^2+0.1443\widehat u_{t-2}^2\\t=(7.695)\qquad(2.213)\qquad(2.870)

Iteration:      1,   Func. Count:      6,   Neg. LLF: -1229.5751372250834
Iteration:      2,   Func. Count:     18,   Neg. LLF: -1230.006965151989
Iteration:      3,   Func. Count:     29,   Neg. LLF: -1230.0332704520888
Iteration:      4,   Func. Count:     36,   Neg. LLF: -1230.3681848149456
Iteration:      5,   Func. Count:     45,   Neg. LLF: -1230.3709021072323
Iteration:      6,   Func. Count:     52,   Neg. LLF: -1230.5382952399789
Iteration:      7,   Func. Count:     60,   Neg. LLF: -1230.5654441845859
Iteration:      8,   Func. Count:     67,   Neg. LLF: -1230.663627391191
Iteration:      9,   Func. Count:     74,   Neg. LLF: -1230.7278889989088
Iteration:     10,   Func. Count:     81,   Neg. LLF: -1230.7373775394299
Iteration:     11,   Func. Count:     88,   Neg. LLF: -1230.7376365561888
Iteration:     12,   Func. Count:     95,   Neg. LLF: -1230.7376481096876
Optimization terminated successfully.    (Exit mode 0)
            Current function value: -1230.7376481031679
            Iterations: 12
            Function evaluations: 106
            Gradient evaluations: 12
                     Constant Mean - GARCH Model Results                      
==============================================================================
Dep. Variable:                      r   R-squared:                      -0.001
Mean Model:             Constant Mean   Adj. R-squared:                 -0.001
Vol Model:                      GARCH   Log-Likelihood:                1230.74
Distribution:                  Normal   AIC:                          -2453.48
Method:            Maximum Likelihood   BIC:                          -2434.61
                                        No. Observations:                  825
Date:                Wed, Nov 07 2018   Df Residuals:                      821
Time:                        16:15:56   Df Model:                            4
                                 Mean Model                                 
============================================================================
                 coef    std err          t      P>|t|      95.0% Conf. Int.
----------------------------------------------------------------------------
mu         3.6032e-03  1.827e-03      1.973  4.854e-02 [2.306e-05,7.183e-03]
                              Volatility Model                              
============================================================================
                 coef    std err          t      P>|t|      95.0% Conf. Int.
----------------------------------------------------------------------------
omega      4.2152e-05  1.351e-05      3.121  1.802e-03 [1.568e-05,6.862e-05]
alpha[1]       0.0360  1.259e-02      2.863  4.201e-03 [1.137e-02,6.073e-02]
beta[1]        0.9517  1.522e-02     62.542      0.000     [  0.922,  0.982]
============================================================================

Covariance estimator: robust

从上图可以看出,GARCH模型的方差还原的效果还可以。

GARCH(1)模型的公式如下:
r_t=0.0036+\widehat u_t\\t=(1.973)\\\sigma_t^2=0.036\widehat u_{t-1}^2+0.9517\sigma_{t-1}^2\\t=(2.863)\qquad(62.542)

Iteration:      1,   Func. Count:      7,   Neg. LLF: -1227.7407305056854
Iteration:      2,   Func. Count:     20,   Neg. LLF: -1228.263982437867
Iteration:      3,   Func. Count:     30,   Neg. LLF: -1228.413993877993
Iteration:      4,   Func. Count:     41,   Neg. LLF: -1228.4180131947087
Iteration:      5,   Func. Count:     54,   Neg. LLF: -1228.5023007917416
Iteration:      6,   Func. Count:     63,   Neg. LLF: -1228.7323280890687
Iteration:      7,   Func. Count:     76,   Neg. LLF: -1228.7326268528036
Iteration:      8,   Func. Count:     89,   Neg. LLF: -1228.732632102148
Iteration:      9,   Func. Count:    101,   Neg. LLF: -1228.732751211943
Iteration:     10,   Func. Count:    113,   Neg. LLF: -1229.334557375506
Iteration:     11,   Func. Count:    125,   Neg. LLF: -1229.359889432208
Iteration:     12,   Func. Count:    135,   Neg. LLF: -1229.6628944968922
Iteration:     13,   Func. Count:    144,   Neg. LLF: -1229.7292233241137
Iteration:     14,   Func. Count:    151,   Neg. LLF: -1233.5840199489767
Iteration:     15,   Func. Count:    160,   Neg. LLF: -1235.1398071417655
Iteration:     16,   Func. Count:    169,   Neg. LLF: -1235.1998797103008
Iteration:     17,   Func. Count:    177,   Neg. LLF: -1235.6319255746955
Iteration:     18,   Func. Count:    184,   Neg. LLF: -1236.0054400949457
Iteration:     19,   Func. Count:    191,   Neg. LLF: -1236.1198537612077
Iteration:     20,   Func. Count:    198,   Neg. LLF: -1236.1411202229565
Iteration:     21,   Func. Count:    205,   Neg. LLF: -1236.1437339615147
Iteration:     22,   Func. Count:    212,   Neg. LLF: -1236.1438561196142
Iteration:     23,   Func. Count:    219,   Neg. LLF: -1236.1438829462534
Optimization terminated successfully.    (Exit mode 0)
            Current function value: -1236.143882945769
            Iterations: 26
            Function evaluations: 219
            Gradient evaluations: 23
                   Constant Mean - GJR-GARCH Model Results                    
==============================================================================
Dep. Variable:                      r   R-squared:                      -0.000
Mean Model:             Constant Mean   Adj. R-squared:                 -0.000
Vol Model:                  GJR-GARCH   Log-Likelihood:                1236.14
Distribution:                  Normal   AIC:                          -2462.29
Method:            Maximum Likelihood   BIC:                          -2438.71
                                        No. Observations:                  825
Date:                Wed, Nov 07 2018   Df Residuals:                      820
Time:                        16:15:57   Df Model:                            5
                                 Mean Model                                 
============================================================================
                 coef    std err          t      P>|t|      95.0% Conf. Int.
----------------------------------------------------------------------------
mu         4.6352e-03  2.097e-03      2.210  2.710e-02 [5.246e-04,8.746e-03]
                               Volatility Model                              
=============================================================================
                 coef    std err          t      P>|t|       95.0% Conf. Int.
-----------------------------------------------------------------------------
omega      3.2223e-05  5.937e-06      5.428  5.703e-08  [2.059e-05,4.386e-05]
alpha[1]       0.0422  1.046e-02      4.034  5.488e-05  [2.168e-02,6.267e-02]
gamma[1]      -0.0322  2.400e-02     -1.758  7.881e-02 [-8.921e-02,4.855e-03]
beta[1]        0.9679  1.568e-02     61.748      0.000      [  0.937,  0.999]
=============================================================================

Covariance estimator: robust

TGARCH(1)模型的公式如下:
r_t=0.0046+\widehat u_t\\t=(2.210)\\\sigma_t^2=0.0422\widehat u_{t-1}^2-0.0322\widehat u_{t-1}^2d_{t-1}+0.9679\sigma_{t-1}^2\\t=(4.034)\qquad(-1.758)\qquad(61.748)
对模型的评价:在上述模型中,非对称效应项的系数 \gamma 显著不等于零,说明 r_t的波动具有非对称效应。因为 \gamma=-0.0322,大于零,说明“好消息”能比“坏消息”产生更大的波动。当出现“好消息”时,u_{t-1}>0 ,则d_{t-1}=0 ,所以该冲击会给 r_t 带来一个\alpha_1=0.0422 倍的冲击,而出现“坏消息”时,u_{t-1}<0 ,则d_{t-1}=1 ,则“坏消息”会给 r_t 带来一个 \alpha_1+\gamma=0.01 倍的冲击。

预测收益率:我将数据划分为训练集和测试集,根据训练集的数据进行建模然后取测试集中的数据进行检验测试的效果。

相关推荐

发表评论

电子邮件地址不会被公开。 必填项已用*标注

微信扫一扫

微信扫一扫,分享到朋友圈

Financial Statistics and Econometrics: Homework 3 by Python
返回顶部

显示

忘记密码?

显示

显示

获取验证码

Close