Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/usr/bin/env
- #
- # VStock daemon - v.1.0
- # voku.xyz
- import mysql.connector
- import datetime
- from datetime import timedelta
- import re
- import os
- import quandl
- import sys
- import string
- import time
- import random
- # UBER
- conn = mysql.connector.connect(user='root',host='localhost',password='fuckpasswords')
- cursor = conn.cursor()
- keys = []
- miscdb = 'vsmisc'
- quandl.ApiConfig.api_key = 'vr8w7D5UrxJXGmb_7ZQi'
- path = os.path.dirname(os.path.realpath(__file__))
- # Initialize the database structure
- # args
- # initialize all the tables and databases
- #
- # tables:
- #
- # vusers --> User Storage
- # pf --> Portfolios
- # cache --> Quick cache for the main database. Truncated each day
- # apistore --> API key retrieval, account info and more
- #
- # Databases:
- #
- # vstock --> Stock data is stored here
- # vsmisc --> User data, cache index, api key storage and portfolio storage
- # vscache --> Cache store
- # vslog --> Used to log read/writes to and from the database
- #
- def initDBStructure():
- tables = [
- 'CREATE TABLE `vusers`(id INT NOT NULL AUTO_INCREMENT, uid TEXT NOT NULL, email TEXT NOT NULL, `hash` TEXT NOT NULL, useragent TEXT NOT NULL, ip TEXT NOT NULL, PRIMARY KEY(id))',
- 'CREATE TABLE `pf` (id INT NOT NULL AUTO_INCREMENT, pid TEXT NOT NULL, `creator` TEXT NOT NULL, `views` INT NOT NULL, `stocks` TEXT NOT NULL, `rank` INT, `created` INT NOT NULL, PRIMARY KEY(id))',
- 'CREATE TABLE `cache` (id INT NOT NULL AUTO_INCREMENT, cid TEXT NOT NULL, `description` TEXT NOT NULL, `uid` TEXT NOT NULL, `created` INT NOT NULL, `expiration` INT NOT NULL, PRIMARY KEY(id))',
- 'CREATE TABLE `apistore`(id INT NOT NULL AUTO_INCREMENT, `uid` TEXT NOT NULL, `aid` TEXT NOT NULL, `public` TEXT NOT NULL, `private` TEXT NOT NULL, PRIMARY KEY(id))'
- ]
- dbs = [
- #'CREATE DATABASE `vstock`',
- 'CREATE DATABASE `vsmisc`',
- 'CREATE DATABASE `vscache`',
- 'CREATE DATABASE `vslog`'
- ]
- def writeTo():
- # write db's
- for k in dbs:
- print('Creating Database %s' %(re.findall('`\w+`',k)[0].replace('`','')))
- cursor.execute(k)
- conn.commit()
- # use `vstock` for the following tables
- cursor.execute('USE `vsmisc`')
- conn.commit()
- for k in tables:
- print('Initializing table: %s' %(re.findall('`\w+`',k)[0].replace('`','')))
- cursor.execute(k)
- conn.commit()
- return True
- return writeTo()
- # Purge the cache server
- def purgeCache():
- cursor.execute('DROP DATABASE `vscache`;')
- cursor.execute('CREATE DATABASE `vscache`')
- conn.commit()
- # Search data in the cache database by the description / user
- def searchCache(query, userdata):
- cursor.execute('USE `vsmisc`')
- sQueryDesc = "SELECT `cid` FROM `cache` WHERE description = '%s'" %(query)
- sQueryCid = "SELECT `cid` FROM `cache` WHERE uid = '%s'" %(query)
- cursor.execute(sQueryDesc)
- for cid in cursor:
- if cid is None:
- #search by userid
- cursor.execute(sQueryCid)
- for cid in cursor:
- if cid is None:
- return False
- else:
- return cid
- else:
- return cid
- # Chronologically order data then cache.
- # chrono() takes the ticker and re-orders ALL records of that particular stock.
- # it returns a cacheid of a new, fresh cache table set to expire in 5 days.
- #
- # categories sorted and cached:
- def chrono(ticker):
- # define the lists
- sopen = []
- sclose = []
- shigh = []
- slow = []
- # read the ticker data and sort chronologically, then return
- # val = priceObj[0]
- # scomp = priceObj[1]
- # endcomp = priceObj[2]
- # freq = priceObj[3]
- cursor.execute('USE `vstock`')
- # fetch the stock data and order it chronologically
- sortQuery = "SELECT low, high, open, close FROM `%s` ORDER BY 'date' DESC" %(ticker)
- cursor.execute(sortQuery)
- for(low, high, stock_open, close) in cursor:
- sopen.append(stock_open)
- sclose.append(close)
- slow.append(low)
- shigh.append(high)
- returnList = [sopen, sclose, slow, shigh]
- return returnList
- # Create a 'symbols.vk' file with all the stock symbols currently in the database from a delta file
- def mkSymbols(delta):
- symb = []
- if os.path.isfile(delta):
- with open(delta) as f:
- for line in f:
- l = line.split(',')
- if l[0] not in symb:
- print(l[0])
- symb.append(l[0])
- # create the symb file
- counter = 0
- with open('symbols.vk','a') as f:
- for k in symb:
- f.write(k+'\n')
- # get the number of indices
- # we recalculate each time because the compute cost is less than the risk of having an innacurate ticker count
- def nSymbols():
- if os.path.isfile('symbols.vk'):
- with open('symbols.vk') as f:
- for i, l in enumerate(f):
- pass
- return i + 1
- else:
- return -1
- # Main Simulation function.
- #
- # File accepts the cacheid, userid and priceObject container and outputs a cacheid of adjusted values
- # per each frame of reference.
- #
- # ** Note - cache object must be chronologically organized before hand using the chrono() function.
- #
- # priceObject{
- # value : Integer
- # start_comparison : date()
- # end_comparison : date()
- # frequency (daily(d) | weekly(w) | monthly(m) | yearly(y) | per Decade(D) )
- # }
- #
- def sim(uid, ticker, priceObj):
- val = priceObj[0]
- scomp = priceObj[1]
- ecomp = priceObj[2]
- freq = priceObj[3]
- # load the stock data
- obj = chrono(ticker)
- compare = [obj[0][0],obj[1][0],obj[2][0],obj[3][0]]
- perc = [0,0,0,0]
- avg = [[],[],[],[]]
- change = [[],[],[],[]]
- i = 0
- cacheID = ''.join(random.choice('0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ') for i in range(12))
- timestamp = time.time()
- expTime = (datetime.datetime.fromtimestamp(timestamp)) + datetime.timedelta(days=5)
- # calculate the three averages
- for (a,b,c,d) in zip(obj[0],obj[1],obj[2],obj[3]):
- perc[0] = ((compare[0] - a) / compare[0])
- perc[1] = ((compare[1] - b) / compare[1])
- perc[2] = ((compare[2] - c) / compare[2])
- perc[3] = ((compare[3] - d) / compare[3])
- for d in range(4):
- avg[i].append(perc[i])
- compare[0] = a
- compare[1] = b
- compare[2] = c
- compare[3] = d
- cursor.execute('USE `vscache`')
- cacheCreateQuery = "CREATE TABLE `%s`(`id` INT NOT NULL AUTO_INCREMENT, `avg` FLOAT NOT NULL, PRIMARY KEY(id))" %(cacheID)
- print(cacheCreateQuery)
- cursor.execute(cacheCreateQuery)
- # calculate the final average and cache
- for i in range(4):
- avg[i].append((perc[2]+perc[3])/2)
- #here
- print(avg[1])
- sys.exit()
- #here
- for k in avg:
- cacheInsert = "INSERT INTO %s (id,avg) VALUES(NULL, '%s')" %(cacheID,k)
- print('Cache Insert --> %s') %(cacheID)
- print(cacheInsert)
- #cursor.execute(cacheInsert)
- #conn.commit()
- # finally index the cache
- cursor.execute('USE `vsmisc`')
- insertQuery = "INSERT INTO `%s` (`id`, `cid`, `description`, `uid`, `created`, `expiration`) VALUES(NULL, %s, %s, %s, %s, %s)" %(ticker+'_'+timestamp, cacheID, 'AVG_ASC',uid,timestamp,expTime.timestamp())
- cursor.execute(insertQuery)
- conn.commit()
- return avg
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement