Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # -*- coding: utf-8 -*-
- """
- Created on Wed Jan 15 23:52:05 2020
- @author: M. Ihsanul Qamil
- """
- import csv
- import pandas as pd
- import numpy as np
- #import math
- import matplotlib.pyplot as plt
- import random as Rand
- from pandas import DataFrame
- from sklearn.model_selection import train_test_split
- import pdb
- import time
- nstart=time.process_time()
- # pdb.set_trace()
- # import IPython as IP
- data = pd.read_csv("TLKM.csv")
- def Distancetrain(d3, d2, d1):
- d=len(d3.index)
- harray=[]
- for i in range(d):
- harray.clear()
- for j in range(d):
- harray.append(((d3.iloc[i]-d3.iloc[j])**2) + ((d2.iloc[i]-d2.iloc[j])**2) + ((d1.iloc[i]-d1.iloc[j])**2))
- if i < 1:
- distancedata=pd.DataFrame(harray)
- else:
- distancedata[i]=harray
- print("distance train")
- print(time.process_time()-nstart)
- return distancedata
- def Distancetest(d3train, d2train, d1train, d3test, d2test, d1test):
- dtrain=len(d3train.index)
- dtest=len(d3test.index)
- harray=[]
- for i in range(dtrain):
- harray.clear()
- for j in range(dtest):
- harray.append(((d3test.iloc[j]-d3train.iloc[i])**2) + ((d2test.iloc[j]-d2train.iloc[i])**2) + ((d1test.iloc[j]-d1train.iloc[i])**2))
- if i < 1:
- distancedata=pd.DataFrame(harray)
- else:
- distancedata[i]=harray
- print("distance test")
- print(time.process_time()-nstart)
- return distancedata
- def Hessian(dfdistance, sigma, lamda):
- d=len(dfdistance.index)
- col=len(dfdistance.columns)
- hes = np.array([], dtype=np.float64).reshape(0,col)
- tampung = [[0] * col]
- sig2= 2*(sigma**2)
- lam2=lamda**2
- for i in range(d):
- for j in range(col):
- tampung[0][j]=np.exp(-1*((dfdistance.iloc[i][j])/(sig2))) + (lam2)
- hes=np.vstack([hes, tampung])
- dfhessian=pd.DataFrame(hes)
- print("hessian")
- print(time.process_time()-nstart)
- return dfhessian
- def Seqlearn(y, dfhessian, gamma, eps, c, itermaxsvr):
- d=len(dfhessian.index)
- a = [[0] * d]
- a_s = [[0] * d]
- la = [[0] * d]
- la_s = [[0] * d]
- E = np.array([], dtype=np.float64).reshape(0,d)
- Etemp = [[0] * d]
- da_s = np.array([], dtype=np.float64).reshape(0,d)
- da = np.array([], dtype=np.float64).reshape(0,d)
- dat_s = [[0] * d]
- dat = [[0] * d]
- tempas = [[0] * d]
- tempa = [[0] * d]
- for i in range(itermaxsvr):
- for j in range(d):
- Rijhelp=0
- for k in range(d):
- Rijhelp = Rijhelp + ((a_s[i][k] - a[i][k])*(dfhessian.iloc[j][k]))
- Etemp[0][j]= y.iloc[j] - Rijhelp
- E=np.vstack([E, Etemp])
- for l in range(d):
- dat_s[0][l]=min(max(gamma*(E[i][l] - eps), -1*(a_s[i][l])), (c - a_s[i][l]))
- dat[0][l]=min(max(gamma*(-(E[i][l]) - eps), -1*(a[i][l])), (c - a[i][l]))
- tempas[0][l]= a_s[i][l] + dat_s[0][l]
- tempa[0][l]= a[i][l] + dat[0][l]
- da_s=np.vstack([da_s, dat_s])
- da=np.vstack([da, dat])
- a=np.vstack([a, tempa])
- a_s=np.vstack([a_s, tempas])
- la=tempa
- la_s=tempas
- # (|da|<eps and |das|<eps ) or max iterasi
- dat_abs=max([abs(xdat) for xdat in dat[0]])
- dat_s_abs=max([abs(xdats) for xdats in dat_s[0]])
- print(dat_abs)
- print(dat_s_abs)
- if (dat_abs < eps) and (dat_s_abs < eps):
- print(time.process_time()-nstart)
- break
- print(time.process_time()-nstart)
- return la, la_s
- def Predictf(a, a_s, dfhessian):
- # predict = sum ((a_s[0][k]-a[0][k]) * hessian[j][k])
- row=len(dfhessian.index)
- col=len(dfhessian.columns)
- for j in range(row):
- datax=0
- for k in range(col):
- datax= datax + ((a_s[0][k] - a[0][k])*(dfhessian.iloc[j][k]))
- if (j == 0):
- dataxm=datax
- elif (j > 0):
- dataxm=np.vstack([dataxm, datax])
- print("predict")
- print(time.process_time()-nstart)
- return dataxm
- def Normalization(datain, closemax, closemin):
- dataout=(datain - closemin)/(closemax - closemin)
- return dataout
- def SVRf(df, closemax, closemin, c, lamda, eps, sigma, gamma, itermaxsvr):
- result = df.assign(Day_3 = Normalization(df.Day_3, closemax, closemin), Day_2=Normalization(df.Day_2, closemax, closemin), Day_1=Normalization(df.Day_1, closemax, closemin), Actual=Normalization(df.Actual, closemax, closemin))
- X_train, X_test, y_train, y_test, d3_train, d3_test, d2_train, d2_test, d1_train, d1_test, date_train, date_test = train_test_split(result['Index'], result['Actual'], result['Day_3'], result['Day_2'], result['Day_1'], result['Date'], train_size=0.9, test_size=0.1, shuffle=False)
- distancetrain=Distancetrain(d3_train, d2_train, d1_train)
- mhessian=Hessian(distancetrain, sigma, lamda)
- a, a_s = Seqlearn(y_train, mhessian, gamma, eps, c, itermaxsvr)
- distancetest=Distancetest(d3_train, d2_train, d1_train, d3_test, d2_test, d1_test)
- testhessian=Hessian(distancetest, sigma, lamda)
- predict = Predictf(a, a_s, testhessian)
- hasilpre=pd.DataFrame()
- tgltest = date_test
- tgltest.reset_index(drop=True, inplace=True)
- hasilpre['Tanggal'] = tgltest
- hasilpre['Close'] = predict
- deresult = hasilpre.assign(Close=(hasilpre.Close * (closemax - closemin) + closemin))
- n=len(y_test)
- aktualtest = (y_test * (closemax - closemin)) + closemin
- aktualtest.reset_index(inplace=True, drop=True)
- dpredict = pd.Series(deresult['Close'], index=deresult.index)
- hasil = aktualtest - dpredict
- hasil1 = (hasil / aktualtest).abs()
- suma = hasil1.sum()
- mape = (1/n) * suma
- print("MAPE")
- print(mape)
- fitness = 1/(1+mape)
- print(fitness)
- return fitness, mape, hasilpre
- Closemax=data['Close'].max()
- Closemin=data['Close'].min()
- print(Closemax)
- print(Closemin)
- day3 = data['Close'][0:((-1)-2)]
- day2 = data['Close'][1:((-1)-1)]
- day2.index = day2.index - 1
- day1 = data['Close'][2:((-1)-0)]
- day1.index = day1.index - 2
- dayact = data['Close'][3:]
- dayact.index = dayact.index - 3
- dateact = data['Tanggal'][3:]
- dateact.index = dateact.index - 3
- mydata = pd.DataFrame({'Index':data['Index'][0:((-1)-2)], 'Date':dateact, 'Day_3':day3, 'Day_2':day2, 'Day_1':day1, 'Actual':dayact})
- print("data proses",time.process_time()-nstart)
- Lamda=0.09
- C=200
- Eps=0.0013
- Sigma=0.11
- Gamma=0.004
- Itermaxsvr=1000
- SVRf(mydata, Closemax, Closemin, C, Lamda, Eps, Sigma, Gamma, Itermaxsvr)
- nstop=time.process_time()
- print(nstop-nstart)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement