Advertisement
Guest User

Untitled

a guest
Feb 21st, 2020
144
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 7.51 KB | None | 0 0
  1. # return int(s[::-1].encode('hex'), 16) if s else 0
  2. f.write("\n")
  3. f.close()
  4.  
  5. # Use a low-round version of randmemohash
  6. for _ in range(CACHE_ROUNDS):
  7. for i in range(n):
  8. v = o[i][0] % n
  9. lala =o[(i-1+n) % n]
  10. o[i] = sha3_512(map(xor, o[(i-1+n) % n], o[v]))
  11. col1item = map(hex, o[i])
  12. g.write(str(col1item[1]))
  13. g.write("\n")
  14. return o
  15.  
  16. ########### Make a fake light cache with all 0's
  17. ##########def mkfakecache(cache_size, seed):
  18. ##########n = cache_size // HASH_BYTES
  19.  
  20. ########### Sequentially produce the initial dataset
  21. ###########o = [sha3_512(seed)]
  22. ##########o = [0]
  23. ##########for i in range(1, n):
  24. ##########o.append(0)
  25. ##########return o
  26.  
  27. FNV_PRIME = 0x01000193
  28.  
  29. def fnv(v1, v2):
  30. return ((v1 * FNV_PRIME) ^ v2) % 2**32
  31.  
  32. def calc_dataset_item(cache, i):
  33. n = len(cache)
  34. r = HASH_BYTES // WORD_BYTES
  35. # initialize the mix
  36. mix = copy.copy(cache[i % n])
  37. mix[0] ^= i
  38. mix = sha3_512(mix)
  39. # fnv it with a lot of random cache nodes based on i (256 times)
  40. for j in range(DATASET_PARENTS):
  41. cache_index = fnv(i ^ j, mix[j % r])
  42. lala123 = cache[cache_index % n]
  43. mix = map(fnv, mix, cache[cache_index % n])
  44. return sha3_512(mix)
  45.  
  46. # Pad dataset with all zeros
  47. def calc_fake_dataset_item(cache, i):
  48. #return an array of 16 four-byte numbers, 16 * 4 * 8 = 512
  49. #In the original dataset, you'd get the hash digest of a sha3_512
  50. return [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
  51.  
  52. #Create entire DAG
  53. def calc_dataset(full_size, cache):
  54. print("Create Full DAG")
  55. return [calc_dataset_item(cache, i) for i in range(full_size // HASH_BYTES)]
  56.  
  57. def calc_fake_dataset(full_size, cache):
  58. return [calc_fake_dataset_item(cache, i) for i in range(full_size // HASH_BYTES)]
  59.  
  60. #Hashimoto is the "search function"
  61. def hashimoto(header, nonce, full_size, dataset_lookup):
  62. n = full_size / HASH_BYTES
  63. w = MIX_BYTES // WORD_BYTES
  64. mixhashes = MIX_BYTES / HASH_BYTES
  65. # combine header+nonce into a 64 byte seed
  66. nonce = encode_int(nonce)
  67. #Before we start mix, gotta do a round of Keccak first
  68. s = sha3_512(header + nonce[::-1])
  69. # start the mix with replicated s
  70. mix = []
  71. for _ in range(MIX_BYTES / HASH_BYTES):
  72. mix.extend(s)
  73. print(mix)
  74. print(binascii.hexlify(serialize_hash(mix)))
  75. # mix in random dataset nodes
  76. for i in range(ACCESSES):
  77. p = fnv(i ^ s[0], mix[i % w]) % (n // mixhashes) * mixhashes
  78. newdata = []
  79. for j in range(MIX_BYTES / HASH_BYTES):
  80. newdata.extend(dataset_lookup(p + j))
  81. print("type mix is" , type(mix))
  82. print("type newdata is" , type(newdata))
  83. print("ACCESS is",i)
  84. mix = map(fnv, mix, newdata)
  85. # compress mix
  86. cmix = []
  87. for i in range(0, len(mix), 4):
  88. print("cmix is ", cmix)
  89. cmix.append(fnv(fnv(fnv(mix[i], mix[i+1]), mix[i+2]), mix[i+3]))
  90. print("cmix is ", cmix)
  91. print("cmix is ", cmix)
  92. return {
  93. "mix digest": serialize_hash(cmix),
  94. "result": serialize_hash(sha3_256(s+cmix))
  95. }
  96.  
  97.  
  98. #Mix only algo - for Avnet reference
  99. def mix_only(s, full_size, dataset_lookup):
  100. n = full_size / HASH_BYTES
  101. w = MIX_BYTES // WORD_BYTES
  102. mixhashes = MIX_BYTES / HASH_BYTES
  103. # combine header+nonce into a 64 byte seed
  104. # Avnet : comment below to parse s from object calls
  105. s = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
  106. print(s)
  107. s = sha3_512(s)
  108. print(s)
  109. s = serialize_hash(s)
  110. print("Initial s data in HEX:")
  111. print(binascii.hexlify(s))
  112.  
  113.  
  114. # start the mix with replicated s
  115. mix = []
  116. for _ in range(MIX_BYTES / HASH_BYTES):
  117. mix.extend(s)
  118. print("type mix is" , type(mix))
  119. # mix in random dataset nodes
  120. for i in range(ACCESSES):
  121. p = fnv(i ^ ord(s[0]), ord(mix[i % w])) % (n // mixhashes) * mixhashes
  122. newdata = []
  123. print("newdata is",newdata)
  124. for j in range(MIX_BYTES / HASH_BYTES):
  125. newdata.extend(dataset_lookup(p + j))
  126. print("mixmap is ")
  127. print("type mix is" , type(mix))
  128. print("type newdata is" , type(newdata))
  129. print("ACCESS is",i)
  130. mix = map(fnv, mix, newdata)
  131. # compress mix
  132. cmix = []
  133. for i in range(0, len(mix), 4):
  134. print("cmix start append")
  135. cmix.append(fnv(fnv(fnv(mix[i], mix[i+1]), mix[i+2]), mix[i+3]))
  136. print("returning cmix")
  137. return cmix
  138.  
  139. def hashimoto_light(full_size, cache, header, nonce):
  140. return hashimoto(header, nonce, full_size, lambda x: calc_dataset_item(cache, x))
  141.  
  142. def hashimoto_full(full_size, dataset, header, nonce):
  143. return hashimoto(header, nonce, full_size, lambda x: dataset[x])
  144.  
  145. def get_seedhash(block):
  146. s = '\x00' * 32
  147. for i in range(block.number // EPOCH_LENGTH):
  148. s = serialize_hash(sha3_256(s))
  149. return s
  150.  
  151. def mine(full_size, dataset, header, difficulty):
  152. print("Target: {}".format(float(2**256 // difficulty)))
  153. target = zpad(encode_int(2**256 // difficulty), 64) # [::-1]
  154. print("Target after padding: {}".format(float(decode_int(target))))
  155. from random import randint
  156. nonce = randint(0, 2**64)
  157. print("Nonce: {}".format(nonce))
  158. print(float(decode_int(hashimoto_full(full_size, dataset, header, nonce)['result'])))
  159. steps = 0
  160. while decode_int(hashimoto_full(full_size, dataset, header, nonce)['result']) > decode_int(target):
  161. nonce = (nonce + 1) % 2**64
  162. steps += 1
  163. print("Nonce: {}".format(nonce))
  164. print(float(decode_int(hashimoto_full(full_size, dataset, header, nonce)['result'])))
  165. print("Steps required: {}".format(steps))
  166. return nonce
  167.  
  168.  
  169. class Block(object):
  170. def __init__(self, number):
  171. self.number = number
  172.  
  173.  
  174. def main():
  175. header_size = 508 + 8 * 5
  176.  
  177. block = Block(30010)
  178. #block = Block(7749409)
  179. header = str(bytearray(header_size))
  180. difficulty = 0x4000
  181.  
  182. #fnvresult = fnv(0x12345678,0xffffffff)
  183. #print(hex(fnvresult))
  184.  
  185. cache_size = get_cache_size(block.number)
  186. print("Cache size is", cache_size)
  187. full_size = get_full_size(block.number)
  188. print("Full size is", full_size)
  189. seedhash = get_seedhash(block)
  190. seedhash_hex = binascii.hexlify(seedhash)
  191. #print("Seedhash ASCII is", seedhash)
  192.  
  193. asdf = sha3.keccak_256("")
  194. asdf = asdf.hexdigest()
  195. print(asdf)
  196. asdf3 = deserialize_hash(asdf)
  197. print(asdf3)
  198.  
  199. asdf = sha3_512("")
  200. print(asdf)
  201. asdf2 = serialize_hash(asdf)
  202. print(binascii.hexlify(asdf2))
  203. asdf3 = deserialize_hash(asdf2)
  204. print(asdf3)
  205.  
  206. print("Seedhash HEX is", seedhash_hex)
  207. lenofseed = len(seedhash)
  208. print("Length of seedhash is {} bytes".format(len(seedhash)))
  209. print("Prepare light cache...")
  210.  
  211. #Make fake cache below
  212. #cache = mkfakecache(cache_size, seedhash)
  213. cache = mkcache(cache_size, seedhash)
  214. print("Length of cache is", len(cache))
  215. #print("Prepare zero-padded full dataset...")
  216. #dataset = calc_fake_dataset(full_size, cache)
  217. dataset = calc_dataset(full_size, cache)
  218. print("Dataset to be used for MIX")
  219. #s1 = [0,0,0,0,0,0,0,0,0,0,0,0,0]
  220. #mixoutput = mix_only( s1 , full_size, lambda x: dataset[x])
  221. #print("mix output is " , mixoutput)
  222. print("Mine a block")
  223. mine(full_size, dataset, header, difficulty)
  224.  
  225.  
  226. if __name__ == "__main__":
  227. main()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement