Advertisement
Guest User

scrypt gpu core

a guest
Mar 26th, 2013
201
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 22.62 KB | None | 0 0
  1. #define rotl(x,y) rotate(x,y)
  2. #define Ch(x, y, z) ((x & (y ^ z)) ^ z)
  3. #define Maj(x, y, z) ((x & (y | z)) | (y & z))
  4. #define LOOKUP_GAP 2
  5.  
  6. uint4 EndianSwap4(uint4 n)
  7. {
  8. return rotl(n&0x00FF00FF,24U)|rotl(n&0xFF00FF00,8U);
  9. }
  10.  
  11. #define Tr2(x) (rotl(x, 30U) ^ rotl(x, 19U) ^ rotl(x, 10U))
  12. #define Tr1(x) (rotl(x, 26U) ^ rotl(x, 21U) ^ rotl(x, 7U))
  13. #define Wr2(x) (rotl(x, 25U) ^ rotl(x, 14U) ^ (x>>3U))
  14. #define Wr1(x) (rotl(x, 15U) ^ rotl(x, 13U) ^ (x>>10U))
  15.  
  16. #define RND(a, b, c, d, e, f, g, h, k) \
  17. h += Tr1(e) + Ch(e, f, g) + k; \
  18. d += h; \
  19. h += Tr2(a) + Maj(a, b, c);
  20.  
  21. void SHA256(uint4*restrict state0,uint4*restrict state1, const uint4 block0, const uint4 block1, const uint4 block2, const uint4 block3)
  22. {
  23. uint4 S0 = *state0;
  24. uint4 S1 = *state1;
  25.  
  26. #define A S0.x
  27. #define B S0.y
  28. #define C S0.z
  29. #define D S0.w
  30. #define E S1.x
  31. #define F S1.y
  32. #define G S1.z
  33. #define H S1.w
  34.  
  35. uint4 W[4];
  36.  
  37. W[ 0].x = block0.x;
  38. RND(A,B,C,D,E,F,G,H, W[0].x+0x428a2f98U);
  39. W[ 0].y = block0.y;
  40. RND(H,A,B,C,D,E,F,G, W[0].y+0x71374491U);
  41. W[ 0].z = block0.z;
  42. RND(G,H,A,B,C,D,E,F, W[0].z+0xb5c0fbcfU);
  43. W[ 0].w = block0.w;
  44. RND(F,G,H,A,B,C,D,E, W[0].w+0xe9b5dba5U);
  45.  
  46. W[ 1].x = block1.x;
  47. RND(E,F,G,H,A,B,C,D, W[1].x+0x3956c25bU);
  48. W[ 1].y = block1.y;
  49. RND(D,E,F,G,H,A,B,C, W[1].y+0x59f111f1U);
  50. W[ 1].z = block1.z;
  51. RND(C,D,E,F,G,H,A,B, W[1].z+0x923f82a4U);
  52. W[ 1].w = block1.w;
  53. RND(B,C,D,E,F,G,H,A, W[1].w+0xab1c5ed5U);
  54.  
  55. W[ 2].x = block2.x;
  56. RND(A,B,C,D,E,F,G,H, W[2].x+0xd807aa98U);
  57. W[ 2].y = block2.y;
  58. RND(H,A,B,C,D,E,F,G, W[2].y+0x12835b01U);
  59. W[ 2].z = block2.z;
  60. RND(G,H,A,B,C,D,E,F, W[2].z+0x243185beU);
  61. W[ 2].w = block2.w;
  62. RND(F,G,H,A,B,C,D,E, W[2].w+0x550c7dc3U);
  63.  
  64. W[ 3].x = block3.x;
  65. RND(E,F,G,H,A,B,C,D, W[3].x+0x72be5d74U);
  66. W[ 3].y = block3.y;
  67. RND(D,E,F,G,H,A,B,C, W[3].y+0x80deb1feU);
  68. W[ 3].z = block3.z;
  69. RND(C,D,E,F,G,H,A,B, W[3].z+0x9bdc06a7U);
  70. W[ 3].w = block3.w;
  71. RND(B,C,D,E,F,G,H,A, W[3].w+0xc19bf174U);
  72.  
  73. W[ 0].x += Wr1(W[ 3].z) + W[ 2].y + Wr2(W[ 0].y);
  74. RND(A,B,C,D,E,F,G,H, W[0].x+0xe49b69c1U);
  75.  
  76. W[ 0].y += Wr1(W[ 3].w) + W[ 2].z + Wr2(W[ 0].z);
  77. RND(H,A,B,C,D,E,F,G, W[0].y+0xefbe4786U);
  78.  
  79. W[ 0].z += Wr1(W[ 0].x) + W[ 2].w + Wr2(W[ 0].w);
  80. RND(G,H,A,B,C,D,E,F, W[0].z+0x0fc19dc6U);
  81.  
  82. W[ 0].w += Wr1(W[ 0].y) + W[ 3].x + Wr2(W[ 1].x);
  83. RND(F,G,H,A,B,C,D,E, W[0].w+0x240ca1ccU);
  84.  
  85. W[ 1].x += Wr1(W[ 0].z) + W[ 3].y + Wr2(W[ 1].y);
  86. RND(E,F,G,H,A,B,C,D, W[1].x+0x2de92c6fU);
  87.  
  88. W[ 1].y += Wr1(W[ 0].w) + W[ 3].z + Wr2(W[ 1].z);
  89. RND(D,E,F,G,H,A,B,C, W[1].y+0x4a7484aaU);
  90.  
  91. W[ 1].z += Wr1(W[ 1].x) + W[ 3].w + Wr2(W[ 1].w);
  92. RND(C,D,E,F,G,H,A,B, W[1].z+0x5cb0a9dcU);
  93.  
  94. W[ 1].w += Wr1(W[ 1].y) + W[ 0].x + Wr2(W[ 2].x);
  95. RND(B,C,D,E,F,G,H,A, W[1].w+0x76f988daU);
  96.  
  97. W[ 2].x += Wr1(W[ 1].z) + W[ 0].y + Wr2(W[ 2].y);
  98. RND(A,B,C,D,E,F,G,H, W[2].x+0x983e5152U);
  99.  
  100. W[ 2].y += Wr1(W[ 1].w) + W[ 0].z + Wr2(W[ 2].z);
  101. RND(H,A,B,C,D,E,F,G, W[2].y+0xa831c66dU);
  102.  
  103. W[ 2].z += Wr1(W[ 2].x) + W[ 0].w + Wr2(W[ 2].w);
  104. RND(G,H,A,B,C,D,E,F, W[2].z+0xb00327c8U);
  105.  
  106. W[ 2].w += Wr1(W[ 2].y) + W[ 1].x + Wr2(W[ 3].x);
  107. RND(F,G,H,A,B,C,D,E, W[2].w+0xbf597fc7U);
  108.  
  109. W[ 3].x += Wr1(W[ 2].z) + W[ 1].y + Wr2(W[ 3].y);
  110. RND(E,F,G,H,A,B,C,D, W[3].x+0xc6e00bf3U);
  111.  
  112. W[ 3].y += Wr1(W[ 2].w) + W[ 1].z + Wr2(W[ 3].z);
  113. RND(D,E,F,G,H,A,B,C, W[3].y+0xd5a79147U);
  114.  
  115. W[ 3].z += Wr1(W[ 3].x) + W[ 1].w + Wr2(W[ 3].w);
  116. RND(C,D,E,F,G,H,A,B, W[3].z+0x06ca6351U);
  117.  
  118. W[ 3].w += Wr1(W[ 3].y) + W[ 2].x + Wr2(W[ 0].x);
  119. RND(B,C,D,E,F,G,H,A, W[3].w+0x14292967U);
  120.  
  121. W[ 0].x += Wr1(W[ 3].z) + W[ 2].y + Wr2(W[ 0].y);
  122. RND(A,B,C,D,E,F,G,H, W[0].x+0x27b70a85U);
  123.  
  124. W[ 0].y += Wr1(W[ 3].w) + W[ 2].z + Wr2(W[ 0].z);
  125. RND(H,A,B,C,D,E,F,G, W[0].y+0x2e1b2138U);
  126.  
  127. W[ 0].z += Wr1(W[ 0].x) + W[ 2].w + Wr2(W[ 0].w);
  128. RND(G,H,A,B,C,D,E,F, W[0].z+0x4d2c6dfcU);
  129.  
  130. W[ 0].w += Wr1(W[ 0].y) + W[ 3].x + Wr2(W[ 1].x);
  131. RND(F,G,H,A,B,C,D,E, W[0].w+0x53380d13U);
  132.  
  133. W[ 1].x += Wr1(W[ 0].z) + W[ 3].y + Wr2(W[ 1].y);
  134. RND(E,F,G,H,A,B,C,D, W[1].x+0x650a7354U);
  135.  
  136. W[ 1].y += Wr1(W[ 0].w) + W[ 3].z + Wr2(W[ 1].z);
  137. RND(D,E,F,G,H,A,B,C, W[1].y+0x766a0abbU);
  138.  
  139. W[ 1].z += Wr1(W[ 1].x) + W[ 3].w + Wr2(W[ 1].w);
  140. RND(C,D,E,F,G,H,A,B, W[1].z+0x81c2c92eU);
  141.  
  142. W[ 1].w += Wr1(W[ 1].y) + W[ 0].x + Wr2(W[ 2].x);
  143. RND(B,C,D,E,F,G,H,A, W[1].w+0x92722c85U);
  144.  
  145. W[ 2].x += Wr1(W[ 1].z) + W[ 0].y + Wr2(W[ 2].y);
  146. RND(A,B,C,D,E,F,G,H, W[2].x+0xa2bfe8a1U);
  147.  
  148. W[ 2].y += Wr1(W[ 1].w) + W[ 0].z + Wr2(W[ 2].z);
  149. RND(H,A,B,C,D,E,F,G, W[2].y+0xa81a664bU);
  150.  
  151. W[ 2].z += Wr1(W[ 2].x) + W[ 0].w + Wr2(W[ 2].w);
  152. RND(G,H,A,B,C,D,E,F, W[2].z+0xc24b8b70U);
  153.  
  154. W[ 2].w += Wr1(W[ 2].y) + W[ 1].x + Wr2(W[ 3].x);
  155. RND(F,G,H,A,B,C,D,E, W[2].w+0xc76c51a3U);
  156.  
  157. W[ 3].x += Wr1(W[ 2].z) + W[ 1].y + Wr2(W[ 3].y);
  158. RND(E,F,G,H,A,B,C,D, W[3].x+0xd192e819U);
  159.  
  160. W[ 3].y += Wr1(W[ 2].w) + W[ 1].z + Wr2(W[ 3].z);
  161. RND(D,E,F,G,H,A,B,C, W[3].y+0xd6990624U);
  162.  
  163. W[ 3].z += Wr1(W[ 3].x) + W[ 1].w + Wr2(W[ 3].w);
  164. RND(C,D,E,F,G,H,A,B, W[3].z+0xf40e3585U);
  165.  
  166. W[ 3].w += Wr1(W[ 3].y) + W[ 2].x + Wr2(W[ 0].x);
  167. RND(B,C,D,E,F,G,H,A, W[3].w+0x106aa070U);
  168.  
  169. W[ 0].x += Wr1(W[ 3].z) + W[ 2].y + Wr2(W[ 0].y);
  170. RND(A,B,C,D,E,F,G,H, W[0].x+0x19a4c116U);
  171.  
  172. W[ 0].y += Wr1(W[ 3].w) + W[ 2].z + Wr2(W[ 0].z);
  173. RND(H,A,B,C,D,E,F,G, W[0].y+0x1e376c08U);
  174.  
  175. W[ 0].z += Wr1(W[ 0].x) + W[ 2].w + Wr2(W[ 0].w);
  176. RND(G,H,A,B,C,D,E,F, W[0].z+0x2748774cU);
  177.  
  178. W[ 0].w += Wr1(W[ 0].y) + W[ 3].x + Wr2(W[ 1].x);
  179. RND(F,G,H,A,B,C,D,E, W[0].w+0x34b0bcb5U);
  180.  
  181. W[ 1].x += Wr1(W[ 0].z) + W[ 3].y + Wr2(W[ 1].y);
  182. RND(E,F,G,H,A,B,C,D, W[1].x+0x391c0cb3U);
  183.  
  184. W[ 1].y += Wr1(W[ 0].w) + W[ 3].z + Wr2(W[ 1].z);
  185. RND(D,E,F,G,H,A,B,C, W[1].y+0x4ed8aa4aU);
  186.  
  187. W[ 1].z += Wr1(W[ 1].x) + W[ 3].w + Wr2(W[ 1].w);
  188. RND(C,D,E,F,G,H,A,B, W[1].z+0x5b9cca4fU);
  189.  
  190. W[ 1].w += Wr1(W[ 1].y) + W[ 0].x + Wr2(W[ 2].x);
  191. RND(B,C,D,E,F,G,H,A, W[1].w+0x682e6ff3U);
  192.  
  193. W[ 2].x += Wr1(W[ 1].z) + W[ 0].y + Wr2(W[ 2].y);
  194. RND(A,B,C,D,E,F,G,H, W[2].x+0x748f82eeU);
  195.  
  196. W[ 2].y += Wr1(W[ 1].w) + W[ 0].z + Wr2(W[ 2].z);
  197. RND(H,A,B,C,D,E,F,G, W[2].y+0x78a5636fU);
  198.  
  199. W[ 2].z += Wr1(W[ 2].x) + W[ 0].w + Wr2(W[ 2].w);
  200. RND(G,H,A,B,C,D,E,F, W[2].z+0x84c87814U);
  201.  
  202. W[ 2].w += Wr1(W[ 2].y) + W[ 1].x + Wr2(W[ 3].x);
  203. RND(F,G,H,A,B,C,D,E, W[2].w+0x8cc70208U);
  204.  
  205. W[ 3].x += Wr1(W[ 2].z) + W[ 1].y + Wr2(W[ 3].y);
  206. RND(E,F,G,H,A,B,C,D, W[3].x+0x90befffaU);
  207.  
  208. W[ 3].y += Wr1(W[ 2].w) + W[ 1].z + Wr2(W[ 3].z);
  209. RND(D,E,F,G,H,A,B,C, W[3].y+0xa4506cebU);
  210.  
  211. W[ 3].z += Wr1(W[ 3].x) + W[ 1].w + Wr2(W[ 3].w);
  212. RND(C,D,E,F,G,H,A,B, W[3].z+0xbef9a3f7U);
  213.  
  214. W[ 3].w += Wr1(W[ 3].y) + W[ 2].x + Wr2(W[ 0].x);
  215. RND(B,C,D,E,F,G,H,A, W[3].w+0xc67178f2U);
  216.  
  217. #undef A
  218. #undef B
  219. #undef C
  220. #undef D
  221. #undef E
  222. #undef F
  223. #undef G
  224. #undef H
  225.  
  226. *state0 += S0;
  227. *state1 += S1;
  228. }
  229.  
  230. void SHA256_fresh(uint4*restrict state0,uint4*restrict state1, const uint4 block0, const uint4 block1, const uint4 block2, const uint4 block3)
  231. {
  232. #define A (*state0).x
  233. #define B (*state0).y
  234. #define C (*state0).z
  235. #define D (*state0).w
  236. #define E (*state1).x
  237. #define F (*state1).y
  238. #define G (*state1).z
  239. #define H (*state1).w
  240.  
  241. uint4 W[4];
  242.  
  243. W[ 0].x = block0.x;
  244. D=0x98c7e2a2U+W[0].x;
  245. H=0xfc08884dU+W[0].x;
  246.  
  247. W[ 0].y = block0.y;
  248. C=0xcd2a11aeU+Tr1(D)+Ch(D,0x510e527fU,0x9b05688cU)+W[0].y;
  249. G=0xC3910C8EU+C+Tr2(H)+Ch(H,0xfb6feee7U,0x2a01a605U);
  250.  
  251. W[ 0].z = block0.z;
  252. B=0x0c2e12e0U+Tr1(C)+Ch(C,D,0x510e527fU)+W[0].z;
  253. F=0x4498517BU+B+Tr2(G)+Maj(G,H,0x6a09e667U);
  254.  
  255. W[ 0].w = block0.w;
  256. A=0xa4ce148bU+Tr1(B)+Ch(B,C,D)+W[0].w;
  257. E=0x95F61999U+A+Tr2(F)+Maj(F,G,H);
  258.  
  259. W[ 1].x = block1.x;
  260. RND(E,F,G,H,A,B,C,D, W[1].x+0x3956c25bU);
  261. W[ 1].y = block1.y;
  262. RND(D,E,F,G,H,A,B,C, W[1].y+0x59f111f1U);
  263. W[ 1].z = block1.z;
  264. RND(C,D,E,F,G,H,A,B, W[1].z+0x923f82a4U);
  265. W[ 1].w = block1.w;
  266. RND(B,C,D,E,F,G,H,A, W[1].w+0xab1c5ed5U);
  267.  
  268. W[ 2].x = block2.x;
  269. RND(A,B,C,D,E,F,G,H, W[2].x+0xd807aa98U);
  270. W[ 2].y = block2.y;
  271. RND(H,A,B,C,D,E,F,G, W[2].y+0x12835b01U);
  272. W[ 2].z = block2.z;
  273. RND(G,H,A,B,C,D,E,F, W[2].z+0x243185beU);
  274. W[ 2].w = block2.w;
  275. RND(F,G,H,A,B,C,D,E, W[2].w+0x550c7dc3U);
  276.  
  277. W[ 3].x = block3.x;
  278. RND(E,F,G,H,A,B,C,D, W[3].x+0x72be5d74U);
  279. W[ 3].y = block3.y;
  280. RND(D,E,F,G,H,A,B,C, W[3].y+0x80deb1feU);
  281. W[ 3].z = block3.z;
  282. RND(C,D,E,F,G,H,A,B, W[3].z+0x9bdc06a7U);
  283. W[ 3].w = block3.w;
  284. RND(B,C,D,E,F,G,H,A, W[3].w+0xc19bf174U);
  285.  
  286. W[ 0].x += Wr1(W[ 3].z) + W[ 2].y + Wr2(W[ 0].y);
  287. RND(A,B,C,D,E,F,G,H, W[0].x+0xe49b69c1U);
  288.  
  289. W[ 0].y += Wr1(W[ 3].w) + W[ 2].z + Wr2(W[ 0].z);
  290. RND(H,A,B,C,D,E,F,G, W[0].y+0xefbe4786U);
  291.  
  292. W[ 0].z += Wr1(W[ 0].x) + W[ 2].w + Wr2(W[ 0].w);
  293. RND(G,H,A,B,C,D,E,F, W[0].z+0x0fc19dc6U);
  294.  
  295. W[ 0].w += Wr1(W[ 0].y) + W[ 3].x + Wr2(W[ 1].x);
  296. RND(F,G,H,A,B,C,D,E, W[0].w+0x240ca1ccU);
  297.  
  298. W[ 1].x += Wr1(W[ 0].z) + W[ 3].y + Wr2(W[ 1].y);
  299. RND(E,F,G,H,A,B,C,D, W[1].x+0x2de92c6fU);
  300.  
  301. W[ 1].y += Wr1(W[ 0].w) + W[ 3].z + Wr2(W[ 1].z);
  302. RND(D,E,F,G,H,A,B,C, W[1].y+0x4a7484aaU);
  303.  
  304. W[ 1].z += Wr1(W[ 1].x) + W[ 3].w + Wr2(W[ 1].w);
  305. RND(C,D,E,F,G,H,A,B, W[1].z+0x5cb0a9dcU);
  306.  
  307. W[ 1].w += Wr1(W[ 1].y) + W[ 0].x + Wr2(W[ 2].x);
  308. RND(B,C,D,E,F,G,H,A, W[1].w+0x76f988daU);
  309.  
  310. W[ 2].x += Wr1(W[ 1].z) + W[ 0].y + Wr2(W[ 2].y);
  311. RND(A,B,C,D,E,F,G,H, W[2].x+0x983e5152U);
  312.  
  313. W[ 2].y += Wr1(W[ 1].w) + W[ 0].z + Wr2(W[ 2].z);
  314. RND(H,A,B,C,D,E,F,G, W[2].y+0xa831c66dU);
  315.  
  316. W[ 2].z += Wr1(W[ 2].x) + W[ 0].w + Wr2(W[ 2].w);
  317. RND(G,H,A,B,C,D,E,F, W[2].z+0xb00327c8U);
  318.  
  319. W[ 2].w += Wr1(W[ 2].y) + W[ 1].x + Wr2(W[ 3].x);
  320. RND(F,G,H,A,B,C,D,E, W[2].w+0xbf597fc7U);
  321.  
  322. W[ 3].x += Wr1(W[ 2].z) + W[ 1].y + Wr2(W[ 3].y);
  323. RND(E,F,G,H,A,B,C,D, W[3].x+0xc6e00bf3U);
  324.  
  325. W[ 3].y += Wr1(W[ 2].w) + W[ 1].z + Wr2(W[ 3].z);
  326. RND(D,E,F,G,H,A,B,C, W[3].y+0xd5a79147U);
  327.  
  328. W[ 3].z += Wr1(W[ 3].x) + W[ 1].w + Wr2(W[ 3].w);
  329. RND(C,D,E,F,G,H,A,B, W[3].z+0x06ca6351U);
  330.  
  331. W[ 3].w += Wr1(W[ 3].y) + W[ 2].x + Wr2(W[ 0].x);
  332. RND(B,C,D,E,F,G,H,A, W[3].w+0x14292967U);
  333.  
  334. W[ 0].x += Wr1(W[ 3].z) + W[ 2].y + Wr2(W[ 0].y);
  335. RND(A,B,C,D,E,F,G,H, W[0].x+0x27b70a85U);
  336.  
  337. W[ 0].y += Wr1(W[ 3].w) + W[ 2].z + Wr2(W[ 0].z);
  338. RND(H,A,B,C,D,E,F,G, W[0].y+0x2e1b2138U);
  339.  
  340. W[ 0].z += Wr1(W[ 0].x) + W[ 2].w + Wr2(W[ 0].w);
  341. RND(G,H,A,B,C,D,E,F, W[0].z+0x4d2c6dfcU);
  342.  
  343. W[ 0].w += Wr1(W[ 0].y) + W[ 3].x + Wr2(W[ 1].x);
  344. RND(F,G,H,A,B,C,D,E, W[0].w+0x53380d13U);
  345.  
  346. W[ 1].x += Wr1(W[ 0].z) + W[ 3].y + Wr2(W[ 1].y);
  347. RND(E,F,G,H,A,B,C,D, W[1].x+0x650a7354U);
  348.  
  349. W[ 1].y += Wr1(W[ 0].w) + W[ 3].z + Wr2(W[ 1].z);
  350. RND(D,E,F,G,H,A,B,C, W[1].y+0x766a0abbU);
  351.  
  352. W[ 1].z += Wr1(W[ 1].x) + W[ 3].w + Wr2(W[ 1].w);
  353. RND(C,D,E,F,G,H,A,B, W[1].z+0x81c2c92eU);
  354.  
  355. W[ 1].w += Wr1(W[ 1].y) + W[ 0].x + Wr2(W[ 2].x);
  356. RND(B,C,D,E,F,G,H,A, W[1].w+0x92722c85U);
  357.  
  358. W[ 2].x += Wr1(W[ 1].z) + W[ 0].y + Wr2(W[ 2].y);
  359. RND(A,B,C,D,E,F,G,H, W[2].x+0xa2bfe8a1U);
  360.  
  361. W[ 2].y += Wr1(W[ 1].w) + W[ 0].z + Wr2(W[ 2].z);
  362. RND(H,A,B,C,D,E,F,G, W[2].y+0xa81a664bU);
  363.  
  364. W[ 2].z += Wr1(W[ 2].x) + W[ 0].w + Wr2(W[ 2].w);
  365. RND(G,H,A,B,C,D,E,F, W[2].z+0xc24b8b70U);
  366.  
  367. W[ 2].w += Wr1(W[ 2].y) + W[ 1].x + Wr2(W[ 3].x);
  368. RND(F,G,H,A,B,C,D,E, W[2].w+0xc76c51a3U);
  369.  
  370. W[ 3].x += Wr1(W[ 2].z) + W[ 1].y + Wr2(W[ 3].y);
  371. RND(E,F,G,H,A,B,C,D, W[3].x+0xd192e819U);
  372.  
  373. W[ 3].y += Wr1(W[ 2].w) + W[ 1].z + Wr2(W[ 3].z);
  374. RND(D,E,F,G,H,A,B,C, W[3].y+0xd6990624U);
  375.  
  376. W[ 3].z += Wr1(W[ 3].x) + W[ 1].w + Wr2(W[ 3].w);
  377. RND(C,D,E,F,G,H,A,B, W[3].z+0xf40e3585U);
  378.  
  379. W[ 3].w += Wr1(W[ 3].y) + W[ 2].x + Wr2(W[ 0].x);
  380. RND(B,C,D,E,F,G,H,A, W[3].w+0x106aa070U);
  381.  
  382. W[ 0].x += Wr1(W[ 3].z) + W[ 2].y + Wr2(W[ 0].y);
  383. RND(A,B,C,D,E,F,G,H, W[0].x+0x19a4c116U);
  384.  
  385. W[ 0].y += Wr1(W[ 3].w) + W[ 2].z + Wr2(W[ 0].z);
  386. RND(H,A,B,C,D,E,F,G, W[0].y+0x1e376c08U);
  387.  
  388. W[ 0].z += Wr1(W[ 0].x) + W[ 2].w + Wr2(W[ 0].w);
  389. RND(G,H,A,B,C,D,E,F, W[0].z+0x2748774cU);
  390.  
  391. W[ 0].w += Wr1(W[ 0].y) + W[ 3].x + Wr2(W[ 1].x);
  392. RND(F,G,H,A,B,C,D,E, W[0].w+0x34b0bcb5U);
  393.  
  394. W[ 1].x += Wr1(W[ 0].z) + W[ 3].y + Wr2(W[ 1].y);
  395. RND(E,F,G,H,A,B,C,D, W[1].x+0x391c0cb3U);
  396.  
  397. W[ 1].y += Wr1(W[ 0].w) + W[ 3].z + Wr2(W[ 1].z);
  398. RND(D,E,F,G,H,A,B,C, W[1].y+0x4ed8aa4aU);
  399.  
  400. W[ 1].z += Wr1(W[ 1].x) + W[ 3].w + Wr2(W[ 1].w);
  401. RND(C,D,E,F,G,H,A,B, W[1].z+0x5b9cca4fU);
  402.  
  403. W[ 1].w += Wr1(W[ 1].y) + W[ 0].x + Wr2(W[ 2].x);
  404. RND(B,C,D,E,F,G,H,A, W[1].w+0x682e6ff3U);
  405.  
  406. W[ 2].x += Wr1(W[ 1].z) + W[ 0].y + Wr2(W[ 2].y);
  407. RND(A,B,C,D,E,F,G,H, W[2].x+0x748f82eeU);
  408.  
  409. W[ 2].y += Wr1(W[ 1].w) + W[ 0].z + Wr2(W[ 2].z);
  410. RND(H,A,B,C,D,E,F,G, W[2].y+0x78a5636fU);
  411.  
  412. W[ 2].z += Wr1(W[ 2].x) + W[ 0].w + Wr2(W[ 2].w);
  413. RND(G,H,A,B,C,D,E,F, W[2].z+0x84c87814U);
  414.  
  415. W[ 2].w += Wr1(W[ 2].y) + W[ 1].x + Wr2(W[ 3].x);
  416. RND(F,G,H,A,B,C,D,E, W[2].w+0x8cc70208U);
  417.  
  418. W[ 3].x += Wr1(W[ 2].z) + W[ 1].y + Wr2(W[ 3].y);
  419. RND(E,F,G,H,A,B,C,D, W[3].x+0x90befffaU);
  420.  
  421. W[ 3].y += Wr1(W[ 2].w) + W[ 1].z + Wr2(W[ 3].z);
  422. RND(D,E,F,G,H,A,B,C, W[3].y+0xa4506cebU);
  423.  
  424. W[ 3].z += Wr1(W[ 3].x) + W[ 1].w + Wr2(W[ 3].w);
  425. RND(C,D,E,F,G,H,A,B, W[3].z+0xbef9a3f7U);
  426.  
  427. W[ 3].w += Wr1(W[ 3].y) + W[ 2].x + Wr2(W[ 0].x);
  428. RND(B,C,D,E,F,G,H,A, W[3].w+0xc67178f2U);
  429.  
  430. #undef A
  431. #undef B
  432. #undef C
  433. #undef D
  434. #undef E
  435. #undef F
  436. #undef G
  437. #undef H
  438.  
  439. *state0 += (uint4)(0x6A09E667U,0xBB67AE85U,0x3C6EF372U,0xA54FF53AU);
  440. *state1 += (uint4)(0x510E527FU,0x9B05688CU,0x1F83D9ABU,0x5BE0CD19U);
  441. }
  442.  
  443. __constant uint fixedW[64] =
  444. {
  445. 0x428a2f99,0xf1374491,0xb5c0fbcf,0xe9b5dba5,0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5,
  446. 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3,0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf794,
  447. 0xf59b89c2,0x73924787,0x23c6886e,0xa42ca65c,0x15ed3627,0x4d6edcbf,0xe28217fc,0xef02488f,
  448. 0xb707775c,0x0468c23f,0xe7e72b4c,0x49e1f1a2,0x4b99c816,0x926d1570,0xaa0fc072,0xadb36e2c,
  449. 0xad87a3ea,0xbcb1d3a3,0x7b993186,0x562b9420,0xbff3ca0c,0xda4b0c23,0x6cd8711a,0x8f337caa,
  450. 0xc91b1417,0xc359dce1,0xa83253a7,0x3b13c12d,0x9d3d725d,0xd9031a84,0xb1a03340,0x16f58012,
  451. 0xe64fb6a2,0xe84d923a,0xe93a5730,0x09837686,0x078ff753,0x29833341,0xd5de0b7e,0x6948ccf4,
  452. 0xe0a1adbe,0x7c728e11,0x511c78e4,0x315b45bd,0xfca71413,0xea28f96a,0x79703128,0x4e1ef848,
  453. };
  454.  
  455. void SHA256_fixed(uint4*restrict state0,uint4*restrict state1)
  456. {
  457. uint4 S0 = *state0;
  458. uint4 S1 = *state1;
  459.  
  460. #define A S0.x
  461. #define B S0.y
  462. #define C S0.z
  463. #define D S0.w
  464. #define E S1.x
  465. #define F S1.y
  466. #define G S1.z
  467. #define H S1.w
  468.  
  469. RND(A,B,C,D,E,F,G,H, fixedW[0]);
  470. RND(H,A,B,C,D,E,F,G, fixedW[1]);
  471. RND(G,H,A,B,C,D,E,F, fixedW[2]);
  472. RND(F,G,H,A,B,C,D,E, fixedW[3]);
  473. RND(E,F,G,H,A,B,C,D, fixedW[4]);
  474. RND(D,E,F,G,H,A,B,C, fixedW[5]);
  475. RND(C,D,E,F,G,H,A,B, fixedW[6]);
  476. RND(B,C,D,E,F,G,H,A, fixedW[7]);
  477. RND(A,B,C,D,E,F,G,H, fixedW[8]);
  478. RND(H,A,B,C,D,E,F,G, fixedW[9]);
  479. RND(G,H,A,B,C,D,E,F, fixedW[10]);
  480. RND(F,G,H,A,B,C,D,E, fixedW[11]);
  481. RND(E,F,G,H,A,B,C,D, fixedW[12]);
  482. RND(D,E,F,G,H,A,B,C, fixedW[13]);
  483. RND(C,D,E,F,G,H,A,B, fixedW[14]);
  484. RND(B,C,D,E,F,G,H,A, fixedW[15]);
  485. RND(A,B,C,D,E,F,G,H, fixedW[16]);
  486. RND(H,A,B,C,D,E,F,G, fixedW[17]);
  487. RND(G,H,A,B,C,D,E,F, fixedW[18]);
  488. RND(F,G,H,A,B,C,D,E, fixedW[19]);
  489. RND(E,F,G,H,A,B,C,D, fixedW[20]);
  490. RND(D,E,F,G,H,A,B,C, fixedW[21]);
  491. RND(C,D,E,F,G,H,A,B, fixedW[22]);
  492. RND(B,C,D,E,F,G,H,A, fixedW[23]);
  493. RND(A,B,C,D,E,F,G,H, fixedW[24]);
  494. RND(H,A,B,C,D,E,F,G, fixedW[25]);
  495. RND(G,H,A,B,C,D,E,F, fixedW[26]);
  496. RND(F,G,H,A,B,C,D,E, fixedW[27]);
  497. RND(E,F,G,H,A,B,C,D, fixedW[28]);
  498. RND(D,E,F,G,H,A,B,C, fixedW[29]);
  499. RND(C,D,E,F,G,H,A,B, fixedW[30]);
  500. RND(B,C,D,E,F,G,H,A, fixedW[31]);
  501. RND(A,B,C,D,E,F,G,H, fixedW[32]);
  502. RND(H,A,B,C,D,E,F,G, fixedW[33]);
  503. RND(G,H,A,B,C,D,E,F, fixedW[34]);
  504. RND(F,G,H,A,B,C,D,E, fixedW[35]);
  505. RND(E,F,G,H,A,B,C,D, fixedW[36]);
  506. RND(D,E,F,G,H,A,B,C, fixedW[37]);
  507. RND(C,D,E,F,G,H,A,B, fixedW[38]);
  508. RND(B,C,D,E,F,G,H,A, fixedW[39]);
  509. RND(A,B,C,D,E,F,G,H, fixedW[40]);
  510. RND(H,A,B,C,D,E,F,G, fixedW[41]);
  511. RND(G,H,A,B,C,D,E,F, fixedW[42]);
  512. RND(F,G,H,A,B,C,D,E, fixedW[43]);
  513. RND(E,F,G,H,A,B,C,D, fixedW[44]);
  514. RND(D,E,F,G,H,A,B,C, fixedW[45]);
  515. RND(C,D,E,F,G,H,A,B, fixedW[46]);
  516. RND(B,C,D,E,F,G,H,A, fixedW[47]);
  517. RND(A,B,C,D,E,F,G,H, fixedW[48]);
  518. RND(H,A,B,C,D,E,F,G, fixedW[49]);
  519. RND(G,H,A,B,C,D,E,F, fixedW[50]);
  520. RND(F,G,H,A,B,C,D,E, fixedW[51]);
  521. RND(E,F,G,H,A,B,C,D, fixedW[52]);
  522. RND(D,E,F,G,H,A,B,C, fixedW[53]);
  523. RND(C,D,E,F,G,H,A,B, fixedW[54]);
  524. RND(B,C,D,E,F,G,H,A, fixedW[55]);
  525. RND(A,B,C,D,E,F,G,H, fixedW[56]);
  526. RND(H,A,B,C,D,E,F,G, fixedW[57]);
  527. RND(G,H,A,B,C,D,E,F, fixedW[58]);
  528. RND(F,G,H,A,B,C,D,E, fixedW[59]);
  529. RND(E,F,G,H,A,B,C,D, fixedW[60]);
  530. RND(D,E,F,G,H,A,B,C, fixedW[61]);
  531. RND(C,D,E,F,G,H,A,B, fixedW[62]);
  532. RND(B,C,D,E,F,G,H,A, fixedW[63]);
  533.  
  534. #undef A
  535. #undef B
  536. #undef C
  537. #undef D
  538. #undef E
  539. #undef F
  540. #undef G
  541. #undef H
  542. *state0 += S0;
  543. *state1 += S1;
  544. }
  545.  
  546. void shittify(uint4 B[8])
  547. {
  548. uint4 tmp[4];
  549. tmp[0] = (uint4)(B[1].x,B[2].y,B[3].z,B[0].w);
  550. tmp[1] = (uint4)(B[2].x,B[3].y,B[0].z,B[1].w);
  551. tmp[2] = (uint4)(B[3].x,B[0].y,B[1].z,B[2].w);
  552. tmp[3] = (uint4)(B[0].x,B[1].y,B[2].z,B[3].w);
  553.  
  554. #pragma unroll
  555. for(uint i=0; i<4; ++i)
  556. B[i] = EndianSwap4(tmp[i]);
  557.  
  558. tmp[0] = (uint4)(B[5].x,B[6].y,B[7].z,B[4].w);
  559. tmp[1] = (uint4)(B[6].x,B[7].y,B[4].z,B[5].w);
  560. tmp[2] = (uint4)(B[7].x,B[4].y,B[5].z,B[6].w);
  561. tmp[3] = (uint4)(B[4].x,B[5].y,B[6].z,B[7].w);
  562.  
  563. #pragma unroll
  564. for(uint i=0; i<4; ++i)
  565. B[i+4] = EndianSwap4(tmp[i]);
  566. }
  567.  
  568. void unshittify(uint4 B[8])
  569. {
  570. uint4 tmp[4];
  571. tmp[0] = (uint4)(B[3].x,B[2].y,B[1].z,B[0].w);
  572. tmp[1] = (uint4)(B[0].x,B[3].y,B[2].z,B[1].w);
  573. tmp[2] = (uint4)(B[1].x,B[0].y,B[3].z,B[2].w);
  574. tmp[3] = (uint4)(B[2].x,B[1].y,B[0].z,B[3].w);
  575.  
  576. #pragma unroll
  577. for(uint i=0; i<4; ++i)
  578. B[i] = EndianSwap4(tmp[i]);
  579.  
  580. tmp[0] = (uint4)(B[7].x,B[6].y,B[5].z,B[4].w);
  581. tmp[1] = (uint4)(B[4].x,B[7].y,B[6].z,B[5].w);
  582. tmp[2] = (uint4)(B[5].x,B[4].y,B[7].z,B[6].w);
  583. tmp[3] = (uint4)(B[6].x,B[5].y,B[4].z,B[7].w);
  584.  
  585. #pragma unroll
  586. for(uint i=0; i<4; ++i)
  587. B[i+4] = EndianSwap4(tmp[i]);
  588. }
  589.  
  590. void salsa(uint4 B[8])
  591. {
  592. uint4 w[4];
  593.  
  594. #pragma unroll
  595. for(uint i=0; i<4; ++i)
  596. w[i] = (B[i]^=B[i+4]);
  597.  
  598. #pragma unroll
  599. for(uint i=0; i<4; ++i)
  600. {
  601. w[0] ^= rotl(w[3] +w[2] , 7U);
  602. w[1] ^= rotl(w[0] +w[3] , 9U);
  603. w[2] ^= rotl(w[1] +w[0] ,13U);
  604. w[3] ^= rotl(w[2] +w[1] ,18U);
  605. w[2] ^= rotl(w[3].wxyz+w[0].zwxy, 7U);
  606. w[1] ^= rotl(w[2].wxyz+w[3].zwxy, 9U);
  607. w[0] ^= rotl(w[1].wxyz+w[2].zwxy,13U);
  608. w[3] ^= rotl(w[0].wxyz+w[1].zwxy,18U);
  609. }
  610.  
  611. #pragma unroll
  612. for(uint i=0; i<4; ++i)
  613. w[i] = (B[i+4]^=(B[i]+=w[i]));
  614.  
  615. #pragma unroll
  616. for(uint i=0; i<4; ++i)
  617. {
  618. w[0] ^= rotl(w[3] +w[2] , 7U);
  619. w[1] ^= rotl(w[0] +w[3] , 9U);
  620. w[2] ^= rotl(w[1] +w[0] ,13U);
  621. w[3] ^= rotl(w[2] +w[1] ,18U);
  622. w[2] ^= rotl(w[3].wxyz+w[0].zwxy, 7U);
  623. w[1] ^= rotl(w[2].wxyz+w[3].zwxy, 9U);
  624. w[0] ^= rotl(w[1].wxyz+w[2].zwxy,13U);
  625. w[3] ^= rotl(w[0].wxyz+w[1].zwxy,18U);
  626. }
  627.  
  628. #pragma unroll
  629. for(uint i=0; i<4; ++i)
  630. B[i+4] += w[i];
  631. }
  632.  
  633. #define Coord(x,y,z) x+y*(x ## SIZE)+z*(y ## SIZE)*(x ## SIZE)
  634. #define CO Coord(z,x,y)
  635.  
  636. void scrypt_core(uint4 X[8], __global uint4*restrict lookup)
  637. {
  638. shittify(X);
  639. const uint zSIZE = 8;
  640. const uint ySIZE = (1024/LOOKUP_GAP+(1024%LOOKUP_GAP>0));
  641. #define CONCURRENT_THREADS 6144
  642. const uint xSIZE = CONCURRENT_THREADS;
  643. uint x = get_global_id(0)%xSIZE;
  644.  
  645. for(uint y=0; y<1024/LOOKUP_GAP; ++y)
  646. {
  647. #pragma unroll
  648. for(uint z=0; z<zSIZE; ++z)
  649. lookup[CO] = X[z];
  650. for(uint i=0; i<LOOKUP_GAP; ++i)
  651. salsa(X);
  652. }
  653. #if (LOOKUP_GAP != 2) && (LOOKUP_GAP != 4) && (LOOKUP_GAP != 8)
  654. {
  655. uint y = (1024/LOOKUP_GAP);
  656. #pragma unroll
  657. for(uint z=0; z<zSIZE; ++z)
  658. lookup[CO] = X[z];
  659. for(uint i=0; i<1024%LOOKUP_GAP; ++i)
  660. salsa(X);
  661. }
  662. #endif
  663. for (uint i=0; i<1024; ++i)
  664. {
  665. uint4 V[8];
  666. uint j = X[7].x & 0x3FF;
  667. uint y = (j/LOOKUP_GAP);
  668. #pragma unroll
  669. for(uint z=0; z<zSIZE; ++z)
  670. V[z] = lookup[CO];
  671. #if (LOOKUP_GAP == 2)
  672. if (j&1)
  673. salsa(V);
  674. #else
  675. uint val = j%LOOKUP_GAP;
  676. for (uint z=0; z<val; ++z)
  677. salsa(V);
  678. #endif
  679.  
  680. #pragma unroll
  681. for(uint z=0; z<zSIZE; ++z)
  682. X[z] ^= V[z];
  683. salsa(X);
  684. }
  685. unshittify(X);
  686. }
  687.  
  688. __attribute__((reqd_work_group_size(WORKSIZE, 1, 1)))
  689. __kernel void search(__global uint4*restrict input, __global uint*restrict output, __global uint4*restrict padcache, uint4 pad0, uint4 pad1)
  690. {
  691. uint4 X[8];
  692. uint4 tstate0, tstate1, ostate0, ostate1, tmp0, tmp1;
  693. uint4 data = (uint4)(input[4].x,input[4].y,input[4].z,get_global_id(0));
  694.  
  695. SHA256(&pad0,&pad1, data, (uint4)(0x80000000U,0,0,0), (uint4)(0,0,0,0), (uint4)(0,0,0,0x280));
  696. SHA256_fresh(&ostate0,&ostate1, pad0^0x5C5C5C5CU, pad1^0x5C5C5C5CU, 0x5C5C5C5CU, 0x5C5C5C5CU);
  697. SHA256_fresh(&tstate0,&tstate1, pad0^0x36363636U, pad1^0x36363636U, 0x36363636U, 0x36363636U);
  698.  
  699. tmp0 = tstate0;
  700. tmp1 = tstate1;
  701. SHA256(&tstate0, &tstate1, input[0],input[1],input[2],input[3]);
  702.  
  703. #pragma unroll
  704. for (uint i=0; i<4; i++)
  705. {
  706. pad0 = tstate0;
  707. pad1 = tstate1;
  708. X[i*2 ] = ostate0;
  709. X[i*2+1] = ostate1;
  710.  
  711. SHA256(&pad0,&pad1, data, (uint4)(i+1,0x80000000U,0,0), (uint4)(0,0,0,0), (uint4)(0,0,0,0x4a0U));
  712. SHA256(X+i*2,X+i*2+1, pad0, pad1, (uint4)(0x80000000U, 0U, 0U, 0U), (uint4)(0U, 0U, 0U, 0x300U));
  713. }
  714. scrypt_core(X,padcache);
  715. SHA256(&tmp0,&tmp1, X[0], X[1], X[2], X[3]);
  716. SHA256(&tmp0,&tmp1, X[4], X[5], X[6], X[7]);
  717. SHA256_fixed(&tmp0,&tmp1);
  718. SHA256(&ostate0,&ostate1, tmp0, tmp1, (uint4)(0x80000000U, 0U, 0U, 0U), (uint4)(0U, 0U, 0U, 0x300U));
  719.  
  720. if ((ostate1.w&0xFFFF) == 0)
  721. output[get_global_id(0)&255] = get_global_id(0);
  722. }
  723.  
  724. /*-
  725. * Copyright 2009 Colin Percival, 2011 ArtForz, 2011 pooler, 2012 mtrlt
  726. * All rights reserved.
  727. *
  728. * Redistribution and use in source and binary forms, with or without
  729. * modification, are permitted provided that the following conditions
  730. * are met:
  731. * 1. Redistributions of source code must retain the above copyright
  732. * notice, this list of conditions and the following disclaimer.
  733. * 2. Redistributions in binary form must reproduce the above copyright
  734. * notice, this list of conditions and the following disclaimer in the
  735. * documentation and/or other materials provided with the distribution.
  736. *
  737. * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
  738. * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  739. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  740. * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
  741. * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
  742. * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
  743. * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
  744. * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
  745. * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
  746. * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
  747. * SUCH DAMAGE.
  748. *
  749. * This file was originally written by Colin Percival as part of the Tarsnap
  750. * online backup system.
  751. */
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement