Advertisement
Guest User

Untitled

a guest
Sep 5th, 2018
172
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 4.08 KB | None | 0 0
  1. import psycopg2
  2. import struct
  3. import array
  4. import matplotlib.pyplot as plt
  5. import numpy as np
  6. from sklearn.svm import SVR
  7. from sklearn.decomposition import PCA
  8.  
  9. def normalize(v):
  10.     norm = np.linalg.norm(v)
  11.     if norm == 0:
  12.        return v
  13.     return v / norm
  14.  
  15. def getMuestra(n):
  16.     conn = psycopg2.connect(host="127.0.0.1",database="TPAS_FIX", user="postgres", password="pass")
  17.     cur  = conn.cursor()
  18.     cur.execute("SELECT * FROM muestravraw WHERE idmuestratiposuelo = " + str(n)) #120
  19.     print("The number of raw sample: ", cur.rowcount)
  20.     if (cur.rowcount<1):
  21.         return 0
  22.     row = cur.fetchone()
  23.     m = {}
  24.     for i in range(8):
  25.     #m[i] = np.divide(row[i+2],10000.0)
  26.         var = row[i+2]
  27.         var = var[510:]
  28.         #m[i] = normalize(row[i+2])
  29.         m[i] = normalize(var)
  30.  
  31.     return m
  32.  
  33. def getAllMuestras():
  34.     conn = psycopg2.connect(host="127.0.0.1",database="TPAS_FIX", user="postgres", password="pass")
  35.     cur  = conn.cursor()
  36.     cur.execute("SELECT * FROM muestravraw") #120
  37.     print("The number of raw sample: ", cur.rowcount)
  38.     row = cur.rowcount
  39.     bd = {}
  40.     while(True):
  41.         row = cur.fetchone()
  42.         m = {}
  43.         if (row == None):
  44.             break
  45.         for j in range(8):
  46.             var = row[j+2]
  47.             var = var[510:]
  48.             var1 = var[35:70]
  49.             var2 = var[110:170]
  50.             var1.extend(var2)
  51.             #var = [var1[33], var1[39], var1[40], var1[46], var1[61], var1[62], var1[70], var1[71], var1[82]]
  52.             #var = [sum(var[:4]), sum(var[4:-1])]
  53.             var = var1
  54.             m[j] = normalize(var)
  55.         bd[str(row[0])] = m
  56.         #print (row[0])
  57.     return bd
  58.     #m = {}
  59.     #for i in range(8):
  60.     ##m[i] = np.divide(row[i+2],10000.0)
  61.     #    var = row[i+2]
  62.     #    var = var[510:]
  63.     #    #m[i] = normalize(row[i+2])
  64.     #    m[i] = normalize(var)
  65.  
  66.     #return m
  67.  
  68. def getValues(muestra):
  69.     conn = psycopg2.connect(host="127.0.0.1",database="TPAS_FIX", user="postgres", password="pass")
  70.     cur  = conn.cursor()
  71.     cur.execute("SELECT * FROM analisisquimico WHERE idmuestratiposuelo = " + str(muestra)) #120
  72.     row = cur.fetchone()
  73.     idQ = row[0]
  74.     cur.execute("SELECT * FROM analisisquimicoelemento WHERE idanalisisquimico = " + str(idQ) + "and idelemento = 21") #120
  75.     while(True):
  76.         row = cur.fetchone()
  77.         if (row == None):
  78.             break
  79.         return (row[2])
  80.        
  81.  
  82.  
  83. dic = getAllMuestras()
  84. l = []
  85. X = []
  86. y = []
  87. for key in dic:
  88.     X.append(np.multiply(dic[key][0], 100000))
  89.     l.append(key)
  90.     y.append(getValues(key))
  91.  
  92. plt.plot(X[30])
  93. plt.plot(X[10])
  94. plt.plot(X[100])
  95. plt.show()
  96.  
  97. print (len(X))
  98. print (len(y))
  99. #getValues(l[0])
  100. X.pop(46)
  101. y.pop(46)
  102. pca = PCA(n_components = 5)
  103. X = pca.fit(X).transform(X)
  104.  
  105. k = 80
  106. y = np.multiply(y, 1)
  107. X_train = X[:k]
  108. y_train = y[:k]
  109. X_test = X[k:110]
  110. y_test = y[k:110]
  111.  
  112.  
  113. X1 = np.asarray(X_train)
  114. #X1 = X1.transpose()
  115. y1 = np.asarray(y_train)
  116. y_test = np.asarray(y_test)
  117. t = np.linspace(1.0, k, num=k)
  118. #svr_rbf = SVR(kernel='rbf', C=1e9, gamma=0.00001)
  119. svr_rbf = SVR(kernel='poly', C=1e3, degree=5)
  120. #svr_rbf = SVR(kernel='linear', C=1e3)
  121.  
  122. y_rbf = svr_rbf.fit(X1, y1).predict(X1)
  123. plt.scatter(y1, t,color='darkorange', label='data')
  124. plt.scatter(y_rbf, t, color='navy', label ='sim')
  125. plt.show()
  126.  
  127. X_total = np.asarray(X)
  128. y_total = np.asarray(y)
  129. y_sim = svr_rbf.predict(X_test)
  130. y_ori = y_test
  131. t_comp = np.linspace(1, len(y_ori), len(y_ori))
  132. plt.scatter(y_sim, t_comp, label="DATA SIMULADA")
  133. plt.scatter(y_ori, t_comp, label="DATA")
  134. plt.show()
  135.  
  136.  
  137.  
  138. t2 = np.linspace(1, len(y_test), len(y_test))
  139.  
  140. y_sim = svr_rbf.predict(X_test)
  141. plt.scatter(y_sim, t2,color='darkorange', label='data')
  142. plt.scatter(y_test, t2, color='navy', label ='sim')
  143. plt.show()
  144.  
  145. dif = []
  146. for i in range(len(y_test)):
  147.     #print (str(y1[i]) + " " + str(y_rbf[i])+ " " + str(y1[i] - y_rbf[i] ) )
  148.     d = abs(y_test[i]- y_train[i])
  149.     res = d*100.0/y_test[i]
  150.     dif.append(res)
  151.     #print (i)#
  152.  
  153. plt.scatter(t2, dif)
  154. plt.show()
  155.  
  156.  
  157. #plt.scatter(err, t, label='error porc')
  158. #plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement