Advertisement
Guest User

Untitled

a guest
Feb 19th, 2019
146
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 49.28 KB | None | 0 0
  1. shader: MESA_SHADER_KERNEL
  2. local-size: 0, 0, 0 (variable)
  3. shared-size: 1
  4. inputs: 3
  5. outputs: 0
  6. uniforms: 0
  7. shared: 0
  8. decl_var shader_in INTERP_MODE_NONE uint64_t @0 (0.x, 0, 0)
  9. decl_var shader_in INTERP_MODE_NONE struct @1 (1, 32, 0)
  10. decl_var shader_in INTERP_MODE_NONE struct @2 (2, 64, 0)
  11. decl_function main (0 params)
  12.  
  13. impl main {
  14. decl_var INTERP_MODE_NONE struct copy_in
  15. decl_var INTERP_MODE_NONE struct copy_in@3
  16. decl_var INTERP_MODE_NONE struct @4
  17. decl_var INTERP_MODE_NONE struct @5
  18. decl_var INTERP_MODE_NONE struct @6
  19. decl_var INTERP_MODE_NONE struct @7
  20. decl_var INTERP_MODE_NONE struct @8
  21. decl_var INTERP_MODE_NONE struct @9
  22. decl_var INTERP_MODE_NONE struct @10
  23. decl_var INTERP_MODE_NONE uint phi
  24. decl_var INTERP_MODE_NONE uint64_t return_tmp
  25. decl_var INTERP_MODE_NONE bool cont
  26. decl_var INTERP_MODE_NONE uint phi@11
  27. decl_var INTERP_MODE_NONE bool phi@12
  28. decl_var INTERP_MODE_NONE struct @13
  29. decl_var INTERP_MODE_NONE uint64_t return_tmp@14
  30. decl_var INTERP_MODE_NONE uint64_t return_tmp@15
  31. decl_var INTERP_MODE_NONE struct @16
  32. decl_var INTERP_MODE_NONE struct @17
  33. decl_var INTERP_MODE_NONE struct @18
  34. decl_var INTERP_MODE_NONE uint64_t return_tmp@19
  35. decl_var INTERP_MODE_NONE uint64_t return_tmp@20
  36. decl_var INTERP_MODE_NONE uint64_t return_tmp@21
  37. decl_var INTERP_MODE_NONE uint64_t return_tmp@22
  38. decl_var INTERP_MODE_NONE uint phi@23
  39. decl_var INTERP_MODE_NONE uint64_t phi@24
  40. decl_var INTERP_MODE_NONE uint64_t return_tmp@25
  41. decl_var INTERP_MODE_NONE uint64_t return_tmp@26
  42. decl_var INTERP_MODE_NONE uint64_t return_tmp@27
  43. decl_var INTERP_MODE_NONE bool cont@28
  44. decl_var INTERP_MODE_NONE uint64_t phi@29
  45. decl_var INTERP_MODE_NONE uint phi@30
  46. decl_var INTERP_MODE_NONE bool phi@31
  47. decl_var INTERP_MODE_NONE uint64_t phi@32
  48. decl_var INTERP_MODE_NONE uint64_t return_tmp@33
  49. block block_0:
  50. /* preds: */
  51. vec1 64 ssa_0 = undefined
  52. vec1 64 ssa_1 = load_const (0x 0 /* 0.000000 */)
  53. vec1 64 ssa_2 = intrinsic load_input (ssa_1) (0, 0) /* base=0 */ /* component=0 */
  54. vec1 64 ssa_3 = load_const (0x 8 /* 0.000000 */)
  55. vec1 64 ssa_4 = deref_var &copy_in (function_temp struct)
  56. vec1 64 ssa_5 = load_const (0x 0 /* 0.000000 */)
  57. vec1 64 ssa_6 = iadd ssa_3, ssa_5
  58. vec1 64 ssa_7 = deref_struct &ssa_4->field0 (function_temp struct) /* &copy_in.field0 */
  59. vec1 64 ssa_8 = load_const (0x 0 /* 0.000000 */)
  60. vec1 64 ssa_9 = iadd ssa_6, ssa_8
  61. vec1 64 ssa_10 = deref_struct &ssa_7->field0 (function_temp uint64_t[1]) /* &copy_in.field0.field0 */
  62. vec1 64 ssa_11 = load_const (0x 0 /* 0.000000 */)
  63. vec1 64 ssa_12 = load_const (0x 8 /* 0.000000 */)
  64. vec1 64 ssa_13 = imul ssa_11, ssa_12
  65. vec1 64 ssa_14 = iadd ssa_9, ssa_13
  66. vec1 64 ssa_15 = deref_array &(*ssa_10)[0] (function_temp uint64_t) /* &copy_in.field0.field0[0] */
  67. vec1 64 ssa_16 = intrinsic load_input (ssa_14) (0, 0) /* base=0 */ /* component=0 */
  68. intrinsic store_deref (ssa_15, ssa_16) (1, 0) /* wrmask=x */ /* access=0 */
  69. vec1 64 ssa_17 = load_const (0x 10 /* 0.000000 */)
  70. vec1 64 ssa_18 = deref_var &copy_in@3 (function_temp struct)
  71. vec1 64 ssa_19 = load_const (0x 0 /* 0.000000 */)
  72. vec1 64 ssa_20 = iadd ssa_17, ssa_19
  73. vec1 64 ssa_21 = deref_struct &ssa_18->field0 (function_temp struct) /* &copy_in@3.field0 */
  74. vec1 64 ssa_22 = load_const (0x 0 /* 0.000000 */)
  75. vec1 64 ssa_23 = iadd ssa_20, ssa_22
  76. vec1 64 ssa_24 = deref_struct &ssa_21->field0 (function_temp uint64_t[1]) /* &copy_in@3.field0.field0 */
  77. vec1 64 ssa_25 = load_const (0x 8 /* 0.000000 */)
  78. vec1 64 ssa_26 = imul ssa_11, ssa_25
  79. vec1 64 ssa_27 = iadd ssa_23, ssa_26
  80. vec1 64 ssa_28 = deref_array &(*ssa_24)[0] (function_temp uint64_t) /* &copy_in@3.field0.field0[0] */
  81. vec1 64 ssa_29 = intrinsic load_input (ssa_27) (0, 0) /* base=0 */ /* component=0 */
  82. intrinsic store_deref (ssa_28, ssa_29) (1, 0) /* wrmask=x */ /* access=0 */
  83. vec1 1 ssa_30 = load_const (true)
  84. vec1 1 ssa_31 = load_const (false)
  85. vec1 32 ssa_32 = undefined
  86. vec1 32 ssa_33 = load_const (0x00000000 /* 0.000000 */)
  87. vec1 32 ssa_34 = load_const (0x00000001 /* 0.000000 */)
  88. vec1 64 ssa_35 = deref_var &@6 (function_temp struct)
  89. vec1 64 ssa_36 = deref_struct &ssa_35->field0 (function_temp struct) /* &@6.field0 */
  90. vec1 64 ssa_37 = deref_var &@7 (function_temp struct)
  91. vec1 64 ssa_38 = deref_cast (uint8_t *)ssa_37 (function_temp uint8_t)
  92. vec1 64 ssa_39 = deref_cast (uint8_t *)ssa_4 (function_temp uint8_t)
  93. vec1 64 ssa_40 = deref_cast (uint8_t[8] *)ssa_39 (function_temp uint8_t[8])
  94. vec1 64 ssa_41 = deref_cast (uint8_t[8] *)ssa_38 (function_temp uint8_t[8])
  95. vec1 64 ssa_42 = deref_array &(*ssa_40)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_39)[0] */
  96. vec1 64 ssa_43 = deref_array &(*ssa_41)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_38)[0] */
  97. vec1 8 ssa_44 = intrinsic load_deref (ssa_42) (0) /* access=0 */
  98. intrinsic store_deref (ssa_43, ssa_44) (1, 0) /* wrmask=x */ /* access=0 */
  99. vec1 64 ssa_45 = load_const (0x 1 /* 0.000000 */)
  100. vec1 64 ssa_46 = deref_array &(*ssa_40)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_39)[1] */
  101. vec1 64 ssa_47 = deref_array &(*ssa_41)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_38)[1] */
  102. vec1 8 ssa_48 = intrinsic load_deref (ssa_46) (0) /* access=0 */
  103. intrinsic store_deref (ssa_47, ssa_48) (1, 0) /* wrmask=x */ /* access=0 */
  104. vec1 64 ssa_49 = load_const (0x 2 /* 0.000000 */)
  105. vec1 64 ssa_50 = deref_array &(*ssa_40)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_39)[2] */
  106. vec1 64 ssa_51 = deref_array &(*ssa_41)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_38)[2] */
  107. vec1 8 ssa_52 = intrinsic load_deref (ssa_50) (0) /* access=0 */
  108. intrinsic store_deref (ssa_51, ssa_52) (1, 0) /* wrmask=x */ /* access=0 */
  109. vec1 64 ssa_53 = load_const (0x 3 /* 0.000000 */)
  110. vec1 64 ssa_54 = deref_array &(*ssa_40)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_39)[3] */
  111. vec1 64 ssa_55 = deref_array &(*ssa_41)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_38)[3] */
  112. vec1 8 ssa_56 = intrinsic load_deref (ssa_54) (0) /* access=0 */
  113. intrinsic store_deref (ssa_55, ssa_56) (1, 0) /* wrmask=x */ /* access=0 */
  114. vec1 64 ssa_57 = load_const (0x 4 /* 0.000000 */)
  115. vec1 64 ssa_58 = deref_array &(*ssa_40)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_39)[4] */
  116. vec1 64 ssa_59 = deref_array &(*ssa_41)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_38)[4] */
  117. vec1 8 ssa_60 = intrinsic load_deref (ssa_58) (0) /* access=0 */
  118. intrinsic store_deref (ssa_59, ssa_60) (1, 0) /* wrmask=x */ /* access=0 */
  119. vec1 64 ssa_61 = load_const (0x 5 /* 0.000000 */)
  120. vec1 64 ssa_62 = deref_array &(*ssa_40)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_39)[5] */
  121. vec1 64 ssa_63 = deref_array &(*ssa_41)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_38)[5] */
  122. vec1 8 ssa_64 = intrinsic load_deref (ssa_62) (0) /* access=0 */
  123. intrinsic store_deref (ssa_63, ssa_64) (1, 0) /* wrmask=x */ /* access=0 */
  124. vec1 64 ssa_65 = load_const (0x 6 /* 0.000000 */)
  125. vec1 64 ssa_66 = deref_array &(*ssa_40)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_39)[6] */
  126. vec1 64 ssa_67 = deref_array &(*ssa_41)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_38)[6] */
  127. vec1 8 ssa_68 = intrinsic load_deref (ssa_66) (0) /* access=0 */
  128. intrinsic store_deref (ssa_67, ssa_68) (1, 0) /* wrmask=x */ /* access=0 */
  129. vec1 64 ssa_69 = load_const (0x 7 /* 0.000000 */)
  130. vec1 64 ssa_70 = deref_array &(*ssa_40)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_39)[7] */
  131. vec1 64 ssa_71 = deref_array &(*ssa_41)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_38)[7] */
  132. vec1 8 ssa_72 = intrinsic load_deref (ssa_70) (0) /* access=0 */
  133. intrinsic store_deref (ssa_71, ssa_72) (1, 0) /* wrmask=x */ /* access=0 */
  134. vec1 64 ssa_73 = deref_var &@8 (function_temp struct)
  135. vec1 64 ssa_74 = deref_cast (uint8_t *)ssa_73 (function_temp uint8_t)
  136. vec1 64 ssa_75 = deref_cast (uint8_t *)ssa_18 (function_temp uint8_t)
  137. vec1 64 ssa_76 = deref_cast (uint8_t[8] *)ssa_75 (function_temp uint8_t[8])
  138. vec1 64 ssa_77 = deref_cast (uint8_t[8] *)ssa_74 (function_temp uint8_t[8])
  139. vec1 64 ssa_78 = deref_array &(*ssa_76)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_75)[0] */
  140. vec1 64 ssa_79 = deref_array &(*ssa_77)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_74)[0] */
  141. vec1 8 ssa_80 = intrinsic load_deref (ssa_78) (0) /* access=0 */
  142. intrinsic store_deref (ssa_79, ssa_80) (1, 0) /* wrmask=x */ /* access=0 */
  143. vec1 64 ssa_81 = deref_array &(*ssa_76)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_75)[1] */
  144. vec1 64 ssa_82 = deref_array &(*ssa_77)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_74)[1] */
  145. vec1 8 ssa_83 = intrinsic load_deref (ssa_81) (0) /* access=0 */
  146. intrinsic store_deref (ssa_82, ssa_83) (1, 0) /* wrmask=x */ /* access=0 */
  147. vec1 64 ssa_84 = deref_array &(*ssa_76)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_75)[2] */
  148. vec1 64 ssa_85 = deref_array &(*ssa_77)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_74)[2] */
  149. vec1 8 ssa_86 = intrinsic load_deref (ssa_84) (0) /* access=0 */
  150. intrinsic store_deref (ssa_85, ssa_86) (1, 0) /* wrmask=x */ /* access=0 */
  151. vec1 64 ssa_87 = deref_array &(*ssa_76)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_75)[3] */
  152. vec1 64 ssa_88 = deref_array &(*ssa_77)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_74)[3] */
  153. vec1 8 ssa_89 = intrinsic load_deref (ssa_87) (0) /* access=0 */
  154. intrinsic store_deref (ssa_88, ssa_89) (1, 0) /* wrmask=x */ /* access=0 */
  155. vec1 64 ssa_90 = deref_array &(*ssa_76)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_75)[4] */
  156. vec1 64 ssa_91 = deref_array &(*ssa_77)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_74)[4] */
  157. vec1 8 ssa_92 = intrinsic load_deref (ssa_90) (0) /* access=0 */
  158. intrinsic store_deref (ssa_91, ssa_92) (1, 0) /* wrmask=x */ /* access=0 */
  159. vec1 64 ssa_93 = deref_array &(*ssa_76)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_75)[5] */
  160. vec1 64 ssa_94 = deref_array &(*ssa_77)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_74)[5] */
  161. vec1 8 ssa_95 = intrinsic load_deref (ssa_93) (0) /* access=0 */
  162. intrinsic store_deref (ssa_94, ssa_95) (1, 0) /* wrmask=x */ /* access=0 */
  163. vec1 64 ssa_96 = deref_array &(*ssa_76)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_75)[6] */
  164. vec1 64 ssa_97 = deref_array &(*ssa_77)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_74)[6] */
  165. vec1 8 ssa_98 = intrinsic load_deref (ssa_96) (0) /* access=0 */
  166. intrinsic store_deref (ssa_97, ssa_98) (1, 0) /* wrmask=x */ /* access=0 */
  167. vec1 64 ssa_99 = deref_array &(*ssa_76)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_75)[7] */
  168. vec1 64 ssa_100 = deref_array &(*ssa_77)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_74)[7] */
  169. vec1 8 ssa_101 = intrinsic load_deref (ssa_99) (0) /* access=0 */
  170. intrinsic store_deref (ssa_100, ssa_101) (1, 0) /* wrmask=x */ /* access=0 */
  171. vec1 64 ssa_102 = deref_var &@4 (function_temp struct)
  172. vec1 64 ssa_103 = deref_cast (uint8_t *)ssa_102 (function_temp uint8_t)
  173. vec1 64 ssa_104 = deref_cast (uint8_t[8] *)ssa_103 (function_temp uint8_t[8])
  174. vec1 64 ssa_105 = deref_array &(*ssa_104)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_103)[0] */
  175. vec1 8 ssa_106 = intrinsic load_deref (ssa_79) (0) /* access=0 */
  176. intrinsic store_deref (ssa_105, ssa_106) (1, 0) /* wrmask=x */ /* access=0 */
  177. vec1 64 ssa_107 = deref_array &(*ssa_104)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_103)[1] */
  178. vec1 8 ssa_108 = intrinsic load_deref (ssa_82) (0) /* access=0 */
  179. intrinsic store_deref (ssa_107, ssa_108) (1, 0) /* wrmask=x */ /* access=0 */
  180. vec1 64 ssa_109 = deref_array &(*ssa_104)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_103)[2] */
  181. vec1 8 ssa_110 = intrinsic load_deref (ssa_85) (0) /* access=0 */
  182. intrinsic store_deref (ssa_109, ssa_110) (1, 0) /* wrmask=x */ /* access=0 */
  183. vec1 64 ssa_111 = deref_array &(*ssa_104)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_103)[3] */
  184. vec1 8 ssa_112 = intrinsic load_deref (ssa_88) (0) /* access=0 */
  185. intrinsic store_deref (ssa_111, ssa_112) (1, 0) /* wrmask=x */ /* access=0 */
  186. vec1 64 ssa_113 = deref_array &(*ssa_104)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_103)[4] */
  187. vec1 8 ssa_114 = intrinsic load_deref (ssa_91) (0) /* access=0 */
  188. intrinsic store_deref (ssa_113, ssa_114) (1, 0) /* wrmask=x */ /* access=0 */
  189. vec1 64 ssa_115 = deref_array &(*ssa_104)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_103)[5] */
  190. vec1 8 ssa_116 = intrinsic load_deref (ssa_94) (0) /* access=0 */
  191. intrinsic store_deref (ssa_115, ssa_116) (1, 0) /* wrmask=x */ /* access=0 */
  192. vec1 64 ssa_117 = deref_array &(*ssa_104)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_103)[6] */
  193. vec1 8 ssa_118 = intrinsic load_deref (ssa_97) (0) /* access=0 */
  194. intrinsic store_deref (ssa_117, ssa_118) (1, 0) /* wrmask=x */ /* access=0 */
  195. vec1 64 ssa_119 = deref_array &(*ssa_104)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_103)[7] */
  196. vec1 8 ssa_120 = intrinsic load_deref (ssa_100) (0) /* access=0 */
  197. intrinsic store_deref (ssa_119, ssa_120) (1, 0) /* wrmask=x */ /* access=0 */
  198. vec1 64 ssa_121 = deref_var &@5 (function_temp struct)
  199. vec1 64 ssa_122 = deref_cast (uint8_t *)ssa_121 (function_temp uint8_t)
  200. vec1 64 ssa_123 = deref_cast (uint8_t[8] *)ssa_122 (function_temp uint8_t[8])
  201. vec1 64 ssa_124 = deref_array &(*ssa_123)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_122)[0] */
  202. vec1 8 ssa_125 = intrinsic load_deref (ssa_43) (0) /* access=0 */
  203. intrinsic store_deref (ssa_124, ssa_125) (1, 0) /* wrmask=x */ /* access=0 */
  204. vec1 64 ssa_126 = deref_array &(*ssa_123)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_122)[1] */
  205. vec1 8 ssa_127 = intrinsic load_deref (ssa_47) (0) /* access=0 */
  206. intrinsic store_deref (ssa_126, ssa_127) (1, 0) /* wrmask=x */ /* access=0 */
  207. vec1 64 ssa_128 = deref_array &(*ssa_123)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_122)[2] */
  208. vec1 8 ssa_129 = intrinsic load_deref (ssa_51) (0) /* access=0 */
  209. intrinsic store_deref (ssa_128, ssa_129) (1, 0) /* wrmask=x */ /* access=0 */
  210. vec1 64 ssa_130 = deref_array &(*ssa_123)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_122)[3] */
  211. vec1 8 ssa_131 = intrinsic load_deref (ssa_55) (0) /* access=0 */
  212. intrinsic store_deref (ssa_130, ssa_131) (1, 0) /* wrmask=x */ /* access=0 */
  213. vec1 64 ssa_132 = deref_array &(*ssa_123)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_122)[4] */
  214. vec1 8 ssa_133 = intrinsic load_deref (ssa_59) (0) /* access=0 */
  215. intrinsic store_deref (ssa_132, ssa_133) (1, 0) /* wrmask=x */ /* access=0 */
  216. vec1 64 ssa_134 = deref_array &(*ssa_123)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_122)[5] */
  217. vec1 8 ssa_135 = intrinsic load_deref (ssa_63) (0) /* access=0 */
  218. intrinsic store_deref (ssa_134, ssa_135) (1, 0) /* wrmask=x */ /* access=0 */
  219. vec1 64 ssa_136 = deref_array &(*ssa_123)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_122)[6] */
  220. vec1 8 ssa_137 = intrinsic load_deref (ssa_67) (0) /* access=0 */
  221. intrinsic store_deref (ssa_136, ssa_137) (1, 0) /* wrmask=x */ /* access=0 */
  222. vec1 64 ssa_138 = deref_array &(*ssa_123)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_122)[7] */
  223. vec1 8 ssa_139 = intrinsic load_deref (ssa_71) (0) /* access=0 */
  224. intrinsic store_deref (ssa_138, ssa_139) (1, 0) /* wrmask=x */ /* access=0 */
  225. vec1 64 ssa_140 = deref_struct &ssa_36->field0 (function_temp struct) /* &@6.field0.field0 */
  226. vec1 64 ssa_141 = deref_struct &ssa_140->field1 (function_temp uint64_t) /* &@6.field0.field0.field1 */
  227. intrinsic store_deref (ssa_141, ssa_2) (1, 0) /* wrmask=x */ /* access=0 */
  228. vec1 64 ssa_142 = deref_struct &ssa_140->field2 (function_temp struct) /* &@6.field0.field0.field2 */
  229. vec1 64 ssa_143 = deref_cast (uint8_t *)ssa_142 (function_temp uint8_t)
  230. vec1 64 ssa_144 = deref_cast (uint8_t[8] *)ssa_143 (function_temp uint8_t[8])
  231. vec1 64 ssa_145 = deref_array &(*ssa_144)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_143)[0] */
  232. vec1 8 ssa_146 = intrinsic load_deref (ssa_124) (0) /* access=0 */
  233. intrinsic store_deref (ssa_145, ssa_146) (1, 0) /* wrmask=x */ /* access=0 */
  234. vec1 64 ssa_147 = deref_array &(*ssa_144)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_143)[1] */
  235. vec1 8 ssa_148 = intrinsic load_deref (ssa_126) (0) /* access=0 */
  236. intrinsic store_deref (ssa_147, ssa_148) (1, 0) /* wrmask=x */ /* access=0 */
  237. vec1 64 ssa_149 = deref_array &(*ssa_144)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_143)[2] */
  238. vec1 8 ssa_150 = intrinsic load_deref (ssa_128) (0) /* access=0 */
  239. intrinsic store_deref (ssa_149, ssa_150) (1, 0) /* wrmask=x */ /* access=0 */
  240. vec1 64 ssa_151 = deref_array &(*ssa_144)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_143)[3] */
  241. vec1 8 ssa_152 = intrinsic load_deref (ssa_130) (0) /* access=0 */
  242. intrinsic store_deref (ssa_151, ssa_152) (1, 0) /* wrmask=x */ /* access=0 */
  243. vec1 64 ssa_153 = deref_array &(*ssa_144)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_143)[4] */
  244. vec1 8 ssa_154 = intrinsic load_deref (ssa_132) (0) /* access=0 */
  245. intrinsic store_deref (ssa_153, ssa_154) (1, 0) /* wrmask=x */ /* access=0 */
  246. vec1 64 ssa_155 = deref_array &(*ssa_144)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_143)[5] */
  247. vec1 8 ssa_156 = intrinsic load_deref (ssa_134) (0) /* access=0 */
  248. intrinsic store_deref (ssa_155, ssa_156) (1, 0) /* wrmask=x */ /* access=0 */
  249. vec1 64 ssa_157 = deref_array &(*ssa_144)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_143)[6] */
  250. vec1 8 ssa_158 = intrinsic load_deref (ssa_136) (0) /* access=0 */
  251. intrinsic store_deref (ssa_157, ssa_158) (1, 0) /* wrmask=x */ /* access=0 */
  252. vec1 64 ssa_159 = deref_array &(*ssa_144)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_143)[7] */
  253. vec1 8 ssa_160 = intrinsic load_deref (ssa_138) (0) /* access=0 */
  254. intrinsic store_deref (ssa_159, ssa_160) (1, 0) /* wrmask=x */ /* access=0 */
  255. vec1 64 ssa_161 = deref_struct &ssa_140->field3 (function_temp struct) /* &@6.field0.field0.field3 */
  256. vec1 64 ssa_162 = deref_cast (uint8_t *)ssa_161 (function_temp uint8_t)
  257. vec1 64 ssa_163 = deref_cast (uint8_t[8] *)ssa_162 (function_temp uint8_t[8])
  258. vec1 64 ssa_164 = deref_array &(*ssa_163)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_162)[0] */
  259. vec1 8 ssa_165 = intrinsic load_deref (ssa_105) (0) /* access=0 */
  260. intrinsic store_deref (ssa_164, ssa_165) (1, 0) /* wrmask=x */ /* access=0 */
  261. vec1 64 ssa_166 = deref_array &(*ssa_163)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_162)[1] */
  262. vec1 8 ssa_167 = intrinsic load_deref (ssa_107) (0) /* access=0 */
  263. intrinsic store_deref (ssa_166, ssa_167) (1, 0) /* wrmask=x */ /* access=0 */
  264. vec1 64 ssa_168 = deref_array &(*ssa_163)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_162)[2] */
  265. vec1 8 ssa_169 = intrinsic load_deref (ssa_109) (0) /* access=0 */
  266. intrinsic store_deref (ssa_168, ssa_169) (1, 0) /* wrmask=x */ /* access=0 */
  267. vec1 64 ssa_170 = deref_array &(*ssa_163)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_162)[3] */
  268. vec1 8 ssa_171 = intrinsic load_deref (ssa_111) (0) /* access=0 */
  269. intrinsic store_deref (ssa_170, ssa_171) (1, 0) /* wrmask=x */ /* access=0 */
  270. vec1 64 ssa_172 = deref_array &(*ssa_163)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_162)[4] */
  271. vec1 8 ssa_173 = intrinsic load_deref (ssa_113) (0) /* access=0 */
  272. intrinsic store_deref (ssa_172, ssa_173) (1, 0) /* wrmask=x */ /* access=0 */
  273. vec1 64 ssa_174 = deref_array &(*ssa_163)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_162)[5] */
  274. vec1 8 ssa_175 = intrinsic load_deref (ssa_115) (0) /* access=0 */
  275. intrinsic store_deref (ssa_174, ssa_175) (1, 0) /* wrmask=x */ /* access=0 */
  276. vec1 64 ssa_176 = deref_array &(*ssa_163)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_162)[6] */
  277. vec1 8 ssa_177 = intrinsic load_deref (ssa_117) (0) /* access=0 */
  278. intrinsic store_deref (ssa_176, ssa_177) (1, 0) /* wrmask=x */ /* access=0 */
  279. vec1 64 ssa_178 = deref_array &(*ssa_163)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_162)[7] */
  280. vec1 8 ssa_179 = intrinsic load_deref (ssa_119) (0) /* access=0 */
  281. intrinsic store_deref (ssa_178, ssa_179) (1, 0) /* wrmask=x */ /* access=0 */
  282. vec1 64 ssa_180 = deref_var &@9 (function_temp struct)
  283. vec1 64 ssa_181 = deref_cast (struct *)ssa_180 (function_temp struct)
  284. vec1 64 ssa_182 = deref_cast (struct *)ssa_181 (function_temp struct)
  285. vec1 64 ssa_183 = deref_struct &ssa_182->field0 (function_temp uint64_t[1]) /* &((struct *)ssa_181)->field0 */
  286. vec1 64 ssa_184 = deref_array &(*ssa_183)[0] (function_temp uint64_t) /* &((struct *)ssa_181)->field0[0] */
  287. intrinsic store_deref (ssa_184, ssa_11) (1, 0) /* wrmask=x */ /* access=0 */
  288. vec1 64 ssa_185 = deref_var &phi (function_temp uint)
  289. intrinsic store_deref (ssa_185, ssa_33) (1, 0) /* wrmask=x */ /* access=0 */
  290. vec1 64 ssa_186 = deref_var &cont (function_temp bool)
  291. intrinsic store_deref (ssa_186, ssa_31) (1, 0) /* wrmask=x */ /* access=0 */
  292. /* succs: block_1 */
  293. loop {
  294. block block_1:
  295. /* preds: block_0 block_8 block_9 block_10 */
  296. vec1 1 ssa_187 = intrinsic load_deref (ssa_186) (0) /* access=0 */
  297. /* succs: block_2 block_6 */
  298. if ssa_187 {
  299. block block_2:
  300. /* preds: block_1 */
  301. vec1 64 ssa_188 = deref_var &phi@11 (function_temp uint)
  302. vec1 32 ssa_189 = intrinsic load_deref (ssa_188) (0) /* access=0 */
  303. vec1 64 ssa_190 = deref_var &phi@12 (function_temp bool)
  304. vec1 1 ssa_191 = intrinsic load_deref (ssa_190) (0) /* access=0 */
  305. intrinsic store_deref (ssa_185, ssa_189) (1, 0) /* wrmask=x */ /* access=0 */
  306. /* succs: block_3 block_4 */
  307. if ssa_191 {
  308. block block_3:
  309. /* preds: block_2 */
  310. break
  311. /* succs: block_11 */
  312. } else {
  313. block block_4:
  314. /* preds: block_2 */
  315. /* succs: block_5 */
  316. }
  317. block block_5:
  318. /* preds: block_4 */
  319. /* succs: block_7 */
  320. } else {
  321. block block_6:
  322. /* preds: block_1 */
  323. /* succs: block_7 */
  324. }
  325. block block_7:
  326. /* preds: block_5 block_6 */
  327. intrinsic store_deref (ssa_186, ssa_30) (1, 0) /* wrmask=x */ /* access=0 */
  328. vec1 32 ssa_192 = intrinsic load_deref (ssa_185) (0) /* access=0 */
  329. vec1 1 ssa_193 = ilt ssa_192, ssa_34
  330. vec1 1 ssa_194 = inot ssa_193
  331. vec1 64 ssa_195 = deref_var &phi@12 (function_temp bool)
  332. intrinsic store_deref (ssa_195, ssa_30) (1, 0) /* wrmask=x */ /* access=0 */
  333. vec1 64 ssa_196 = deref_var &phi@11 (function_temp uint)
  334. intrinsic store_deref (ssa_196, ssa_32) (1, 0) /* wrmask=x */ /* access=0 */
  335. /* succs: block_8 block_9 */
  336. if ssa_194 {
  337. block block_8:
  338. /* preds: block_7 */
  339. continue
  340. /* succs: block_1 */
  341. } else {
  342. block block_9:
  343. /* preds: block_7 */
  344. vec3 32 ssa_197 = intrinsic load_local_group_size () ()
  345. vec3 64 ssa_198 = u2u64 ssa_197
  346. vec3 32 ssa_199 = intrinsic load_work_group_id () ()
  347. vec3 64 ssa_200 = u2u64 ssa_199
  348. vec3 32 ssa_201 = intrinsic load_local_invocation_id () ()
  349. vec3 64 ssa_202 = u2u64 ssa_201
  350. vec3 64 ssa_203 = imul ssa_200, ssa_198
  351. vec3 64 ssa_204 = iadd ssa_203, ssa_202
  352. vec1 1 ssa_205 = ieq ssa_192, ssa_34
  353. vec1 64 ssa_206 = bcsel ssa_205, ssa_204.y, ssa_204.x
  354. vec1 32 ssa_207 = load_const (0x00000002 /* 0.000000 */)
  355. vec1 1 ssa_208 = ieq ssa_192, ssa_207
  356. vec1 64 ssa_209 = bcsel ssa_208, ssa_204.z, ssa_206
  357. vec1 64 ssa_210 = deref_cast (struct *)ssa_180 (function_temp struct)
  358. vec1 64 ssa_211 = deref_var &return_tmp (function_temp uint64_t)
  359. vec1 64 ssa_212 = deref_struct &ssa_210->field0 (function_temp uint64_t[1]) /* &((struct *)ssa_180)->field0 */
  360. vec1 64 ssa_213 = i2i64 ssa_192
  361. vec1 64 ssa_214 = deref_array &(*ssa_212)[ssa_213] (function_temp uint64_t) /* &((struct *)ssa_180)->field0[ssa_213] */
  362. intrinsic store_deref (ssa_211, ssa_214) (1, 0) /* wrmask=x */ /* access=0 */
  363. vec1 64 ssa_215 = intrinsic load_deref (ssa_211) (0) /* access=0 */
  364. vec1 64 ssa_216 = deref_cast (uint64_t *)ssa_215 (function_temp uint64_t)
  365. intrinsic store_deref (ssa_216, ssa_209) (1, 0) /* wrmask=x */ /* access=0 */
  366. vec1 32 ssa_217 = iadd ssa_192, ssa_34
  367. intrinsic store_deref (ssa_195, ssa_31) (1, 0) /* wrmask=x */ /* access=0 */
  368. intrinsic store_deref (ssa_196, ssa_217) (1, 0) /* wrmask=x */ /* access=0 */
  369. continue
  370. /* succs: block_1 */
  371. }
  372. block block_10:
  373. /* preds: */
  374. /* succs: block_1 */
  375. }
  376. block block_11:
  377. /* preds: block_3 */
  378. vec1 64 ssa_218 = deref_var &@10 (function_temp struct)
  379. vec1 64 ssa_219 = deref_cast (uint8_t *)ssa_218 (function_temp uint8_t)
  380. vec1 64 ssa_220 = deref_cast (uint8_t *)ssa_180 (function_temp uint8_t)
  381. vec1 64 ssa_221 = deref_cast (uint8_t[8] *)ssa_220 (function_temp uint8_t[8])
  382. vec1 64 ssa_222 = deref_cast (uint8_t[8] *)ssa_219 (function_temp uint8_t[8])
  383. vec1 64 ssa_223 = deref_array &(*ssa_221)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_220)[0] */
  384. vec1 64 ssa_224 = deref_array &(*ssa_222)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_219)[0] */
  385. vec1 8 ssa_225 = intrinsic load_deref (ssa_223) (0) /* access=0 */
  386. intrinsic store_deref (ssa_224, ssa_225) (1, 0) /* wrmask=x */ /* access=0 */
  387. vec1 64 ssa_226 = deref_array &(*ssa_221)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_220)[1] */
  388. vec1 64 ssa_227 = deref_array &(*ssa_222)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_219)[1] */
  389. vec1 8 ssa_228 = intrinsic load_deref (ssa_226) (0) /* access=0 */
  390. intrinsic store_deref (ssa_227, ssa_228) (1, 0) /* wrmask=x */ /* access=0 */
  391. vec1 64 ssa_229 = deref_array &(*ssa_221)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_220)[2] */
  392. vec1 64 ssa_230 = deref_array &(*ssa_222)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_219)[2] */
  393. vec1 8 ssa_231 = intrinsic load_deref (ssa_229) (0) /* access=0 */
  394. intrinsic store_deref (ssa_230, ssa_231) (1, 0) /* wrmask=x */ /* access=0 */
  395. vec1 64 ssa_232 = deref_array &(*ssa_221)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_220)[3] */
  396. vec1 64 ssa_233 = deref_array &(*ssa_222)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_219)[3] */
  397. vec1 8 ssa_234 = intrinsic load_deref (ssa_232) (0) /* access=0 */
  398. intrinsic store_deref (ssa_233, ssa_234) (1, 0) /* wrmask=x */ /* access=0 */
  399. vec1 64 ssa_235 = deref_array &(*ssa_221)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_220)[4] */
  400. vec1 64 ssa_236 = deref_array &(*ssa_222)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_219)[4] */
  401. vec1 8 ssa_237 = intrinsic load_deref (ssa_235) (0) /* access=0 */
  402. intrinsic store_deref (ssa_236, ssa_237) (1, 0) /* wrmask=x */ /* access=0 */
  403. vec1 64 ssa_238 = deref_array &(*ssa_221)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_220)[5] */
  404. vec1 64 ssa_239 = deref_array &(*ssa_222)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_219)[5] */
  405. vec1 8 ssa_240 = intrinsic load_deref (ssa_238) (0) /* access=0 */
  406. intrinsic store_deref (ssa_239, ssa_240) (1, 0) /* wrmask=x */ /* access=0 */
  407. vec1 64 ssa_241 = deref_array &(*ssa_221)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_220)[6] */
  408. vec1 64 ssa_242 = deref_array &(*ssa_222)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_219)[6] */
  409. vec1 8 ssa_243 = intrinsic load_deref (ssa_241) (0) /* access=0 */
  410. intrinsic store_deref (ssa_242, ssa_243) (1, 0) /* wrmask=x */ /* access=0 */
  411. vec1 64 ssa_244 = deref_array &(*ssa_221)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_220)[7] */
  412. vec1 64 ssa_245 = deref_array &(*ssa_222)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_219)[7] */
  413. vec1 8 ssa_246 = intrinsic load_deref (ssa_244) (0) /* access=0 */
  414. intrinsic store_deref (ssa_245, ssa_246) (1, 0) /* wrmask=x */ /* access=0 */
  415. vec1 64 ssa_247 = deref_cast (struct *)ssa_35 (function_temp struct)
  416. vec1 64 ssa_248 = deref_cast (struct *)ssa_218 (function_temp struct)
  417. vec1 64 ssa_249 = deref_cast (struct *)ssa_248 (function_temp struct)
  418. vec1 64 ssa_250 = deref_var &return_tmp@14 (function_temp uint64_t)
  419. vec1 64 ssa_251 = deref_struct &ssa_249->field0 (function_temp uint64_t[1]) /* &((struct *)ssa_248)->field0 */
  420. vec1 64 ssa_252 = i2i64 ssa_33
  421. vec1 64 ssa_253 = deref_array &(*ssa_251)[ssa_252] (function_temp uint64_t) /* &((struct *)ssa_248)->field0[ssa_252] */
  422. vec1 64 ssa_254 = intrinsic load_deref (ssa_253) (0) /* access=0 */
  423. intrinsic store_deref (ssa_250, ssa_254) (1, 0) /* wrmask=x */ /* access=0 */
  424. vec1 64 ssa_255 = intrinsic load_deref (ssa_250) (0) /* access=0 */
  425. vec1 32 ssa_256 = u2u32 ssa_255
  426. vec1 64 ssa_257 = deref_struct &ssa_247->field0 (function_temp struct) /* &((struct *)ssa_35)->field0 */
  427. vec1 64 ssa_258 = deref_cast (struct *)ssa_257 (function_temp struct)
  428. vec1 64 ssa_259 = deref_var &@13 (function_temp struct)
  429. vec1 64 ssa_260 = deref_cast (uint8_t *)ssa_259 (function_temp uint8_t)
  430. vec1 64 ssa_261 = deref_cast (uint8_t *)ssa_248 (function_temp uint8_t)
  431. vec1 64 ssa_262 = deref_cast (uint8_t[8] *)ssa_261 (function_temp uint8_t[8])
  432. vec1 64 ssa_263 = deref_cast (uint8_t[8] *)ssa_260 (function_temp uint8_t[8])
  433. vec1 64 ssa_264 = deref_array &(*ssa_262)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_261)[0] */
  434. vec1 64 ssa_265 = deref_array &(*ssa_263)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_260)[0] */
  435. vec1 8 ssa_266 = intrinsic load_deref (ssa_264) (0) /* access=0 */
  436. intrinsic store_deref (ssa_265, ssa_266) (1, 0) /* wrmask=x */ /* access=0 */
  437. vec1 64 ssa_267 = deref_array &(*ssa_262)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_261)[1] */
  438. vec1 64 ssa_268 = deref_array &(*ssa_263)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_260)[1] */
  439. vec1 8 ssa_269 = intrinsic load_deref (ssa_267) (0) /* access=0 */
  440. intrinsic store_deref (ssa_268, ssa_269) (1, 0) /* wrmask=x */ /* access=0 */
  441. vec1 64 ssa_270 = deref_array &(*ssa_262)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_261)[2] */
  442. vec1 64 ssa_271 = deref_array &(*ssa_263)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_260)[2] */
  443. vec1 8 ssa_272 = intrinsic load_deref (ssa_270) (0) /* access=0 */
  444. intrinsic store_deref (ssa_271, ssa_272) (1, 0) /* wrmask=x */ /* access=0 */
  445. vec1 64 ssa_273 = deref_array &(*ssa_262)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_261)[3] */
  446. vec1 64 ssa_274 = deref_array &(*ssa_263)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_260)[3] */
  447. vec1 8 ssa_275 = intrinsic load_deref (ssa_273) (0) /* access=0 */
  448. intrinsic store_deref (ssa_274, ssa_275) (1, 0) /* wrmask=x */ /* access=0 */
  449. vec1 64 ssa_276 = deref_array &(*ssa_262)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_261)[4] */
  450. vec1 64 ssa_277 = deref_array &(*ssa_263)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_260)[4] */
  451. vec1 8 ssa_278 = intrinsic load_deref (ssa_276) (0) /* access=0 */
  452. intrinsic store_deref (ssa_277, ssa_278) (1, 0) /* wrmask=x */ /* access=0 */
  453. vec1 64 ssa_279 = deref_array &(*ssa_262)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_261)[5] */
  454. vec1 64 ssa_280 = deref_array &(*ssa_263)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_260)[5] */
  455. vec1 8 ssa_281 = intrinsic load_deref (ssa_279) (0) /* access=0 */
  456. intrinsic store_deref (ssa_280, ssa_281) (1, 0) /* wrmask=x */ /* access=0 */
  457. vec1 64 ssa_282 = deref_array &(*ssa_262)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_261)[6] */
  458. vec1 64 ssa_283 = deref_array &(*ssa_263)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_260)[6] */
  459. vec1 8 ssa_284 = intrinsic load_deref (ssa_282) (0) /* access=0 */
  460. intrinsic store_deref (ssa_283, ssa_284) (1, 0) /* wrmask=x */ /* access=0 */
  461. vec1 64 ssa_285 = deref_array &(*ssa_262)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_261)[7] */
  462. vec1 64 ssa_286 = deref_array &(*ssa_263)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_260)[7] */
  463. vec1 8 ssa_287 = intrinsic load_deref (ssa_285) (0) /* access=0 */
  464. intrinsic store_deref (ssa_286, ssa_287) (1, 0) /* wrmask=x */ /* access=0 */
  465. vec1 64 ssa_288 = deref_var &return_tmp@15 (function_temp uint64_t)
  466. vec1 64 ssa_289 = deref_cast (struct *)ssa_259 (function_temp struct)
  467. vec1 64 ssa_290 = deref_var &@16 (function_temp struct)
  468. vec1 64 ssa_291 = deref_cast (struct *)ssa_290 (function_temp struct)
  469. vec1 64 ssa_292 = deref_var &return_tmp@21 (function_temp uint64_t)
  470. vec1 64 ssa_293 = deref_cast (struct *)ssa_258 (function_temp struct)
  471. intrinsic store_deref (ssa_292, ssa_293) (1, 0) /* wrmask=x */ /* access=0 */
  472. vec1 64 ssa_294 = intrinsic load_deref (ssa_292) (0) /* access=0 */
  473. vec1 64 ssa_295 = deref_cast (struct *)ssa_294 (function_temp struct)
  474. vec1 64 ssa_296 = deref_struct &ssa_295->field2 (function_temp struct) /* &((struct *)ssa_294)->field2 */
  475. vec1 64 ssa_297 = deref_cast (uint8_t *)ssa_291 (function_temp uint8_t)
  476. vec1 64 ssa_298 = deref_cast (uint8_t *)ssa_296 (function_temp uint8_t)
  477. vec1 64 ssa_299 = deref_cast (uint8_t[8] *)ssa_298 (function_temp uint8_t[8])
  478. vec1 64 ssa_300 = deref_cast (uint8_t[8] *)ssa_297 (function_temp uint8_t[8])
  479. vec1 64 ssa_301 = deref_array &(*ssa_299)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_298)[0] */
  480. vec1 64 ssa_302 = deref_array &(*ssa_300)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_297)[0] */
  481. vec1 8 ssa_303 = intrinsic load_deref (ssa_301) (0) /* access=0 */
  482. intrinsic store_deref (ssa_302, ssa_303) (1, 0) /* wrmask=x */ /* access=0 */
  483. vec1 64 ssa_304 = deref_array &(*ssa_299)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_298)[1] */
  484. vec1 64 ssa_305 = deref_array &(*ssa_300)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_297)[1] */
  485. vec1 8 ssa_306 = intrinsic load_deref (ssa_304) (0) /* access=0 */
  486. intrinsic store_deref (ssa_305, ssa_306) (1, 0) /* wrmask=x */ /* access=0 */
  487. vec1 64 ssa_307 = deref_array &(*ssa_299)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_298)[2] */
  488. vec1 64 ssa_308 = deref_array &(*ssa_300)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_297)[2] */
  489. vec1 8 ssa_309 = intrinsic load_deref (ssa_307) (0) /* access=0 */
  490. intrinsic store_deref (ssa_308, ssa_309) (1, 0) /* wrmask=x */ /* access=0 */
  491. vec1 64 ssa_310 = deref_array &(*ssa_299)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_298)[3] */
  492. vec1 64 ssa_311 = deref_array &(*ssa_300)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_297)[3] */
  493. vec1 8 ssa_312 = intrinsic load_deref (ssa_310) (0) /* access=0 */
  494. intrinsic store_deref (ssa_311, ssa_312) (1, 0) /* wrmask=x */ /* access=0 */
  495. vec1 64 ssa_313 = deref_array &(*ssa_299)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_298)[4] */
  496. vec1 64 ssa_314 = deref_array &(*ssa_300)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_297)[4] */
  497. vec1 8 ssa_315 = intrinsic load_deref (ssa_313) (0) /* access=0 */
  498. intrinsic store_deref (ssa_314, ssa_315) (1, 0) /* wrmask=x */ /* access=0 */
  499. vec1 64 ssa_316 = deref_array &(*ssa_299)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_298)[5] */
  500. vec1 64 ssa_317 = deref_array &(*ssa_300)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_297)[5] */
  501. vec1 8 ssa_318 = intrinsic load_deref (ssa_316) (0) /* access=0 */
  502. intrinsic store_deref (ssa_317, ssa_318) (1, 0) /* wrmask=x */ /* access=0 */
  503. vec1 64 ssa_319 = deref_array &(*ssa_299)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_298)[6] */
  504. vec1 64 ssa_320 = deref_array &(*ssa_300)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_297)[6] */
  505. vec1 8 ssa_321 = intrinsic load_deref (ssa_319) (0) /* access=0 */
  506. intrinsic store_deref (ssa_320, ssa_321) (1, 0) /* wrmask=x */ /* access=0 */
  507. vec1 64 ssa_322 = deref_array &(*ssa_299)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_298)[7] */
  508. vec1 64 ssa_323 = deref_array &(*ssa_300)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_297)[7] */
  509. vec1 8 ssa_324 = intrinsic load_deref (ssa_322) (0) /* access=0 */
  510. intrinsic store_deref (ssa_323, ssa_324) (1, 0) /* wrmask=x */ /* access=0 */
  511. vec1 64 ssa_325 = deref_var &@17 (function_temp struct)
  512. vec1 64 ssa_326 = deref_cast (uint8_t *)ssa_325 (function_temp uint8_t)
  513. vec1 64 ssa_327 = deref_cast (uint8_t *)ssa_289 (function_temp uint8_t)
  514. vec1 64 ssa_328 = deref_cast (uint8_t[8] *)ssa_327 (function_temp uint8_t[8])
  515. vec1 64 ssa_329 = deref_cast (uint8_t[8] *)ssa_326 (function_temp uint8_t[8])
  516. vec1 64 ssa_330 = deref_array &(*ssa_328)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_327)[0] */
  517. vec1 64 ssa_331 = deref_array &(*ssa_329)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_326)[0] */
  518. vec1 8 ssa_332 = intrinsic load_deref (ssa_330) (0) /* access=0 */
  519. intrinsic store_deref (ssa_331, ssa_332) (1, 0) /* wrmask=x */ /* access=0 */
  520. vec1 64 ssa_333 = deref_array &(*ssa_328)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_327)[1] */
  521. vec1 64 ssa_334 = deref_array &(*ssa_329)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_326)[1] */
  522. vec1 8 ssa_335 = intrinsic load_deref (ssa_333) (0) /* access=0 */
  523. intrinsic store_deref (ssa_334, ssa_335) (1, 0) /* wrmask=x */ /* access=0 */
  524. vec1 64 ssa_336 = deref_array &(*ssa_328)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_327)[2] */
  525. vec1 64 ssa_337 = deref_array &(*ssa_329)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_326)[2] */
  526. vec1 8 ssa_338 = intrinsic load_deref (ssa_336) (0) /* access=0 */
  527. intrinsic store_deref (ssa_337, ssa_338) (1, 0) /* wrmask=x */ /* access=0 */
  528. vec1 64 ssa_339 = deref_array &(*ssa_328)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_327)[3] */
  529. vec1 64 ssa_340 = deref_array &(*ssa_329)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_326)[3] */
  530. vec1 8 ssa_341 = intrinsic load_deref (ssa_339) (0) /* access=0 */
  531. intrinsic store_deref (ssa_340, ssa_341) (1, 0) /* wrmask=x */ /* access=0 */
  532. vec1 64 ssa_342 = deref_array &(*ssa_328)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_327)[4] */
  533. vec1 64 ssa_343 = deref_array &(*ssa_329)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_326)[4] */
  534. vec1 8 ssa_344 = intrinsic load_deref (ssa_342) (0) /* access=0 */
  535. intrinsic store_deref (ssa_343, ssa_344) (1, 0) /* wrmask=x */ /* access=0 */
  536. vec1 64 ssa_345 = deref_array &(*ssa_328)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_327)[5] */
  537. vec1 64 ssa_346 = deref_array &(*ssa_329)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_326)[5] */
  538. vec1 8 ssa_347 = intrinsic load_deref (ssa_345) (0) /* access=0 */
  539. intrinsic store_deref (ssa_346, ssa_347) (1, 0) /* wrmask=x */ /* access=0 */
  540. vec1 64 ssa_348 = deref_array &(*ssa_328)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_327)[6] */
  541. vec1 64 ssa_349 = deref_array &(*ssa_329)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_326)[6] */
  542. vec1 8 ssa_350 = intrinsic load_deref (ssa_348) (0) /* access=0 */
  543. intrinsic store_deref (ssa_349, ssa_350) (1, 0) /* wrmask=x */ /* access=0 */
  544. vec1 64 ssa_351 = deref_array &(*ssa_328)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_327)[7] */
  545. vec1 64 ssa_352 = deref_array &(*ssa_329)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_326)[7] */
  546. vec1 8 ssa_353 = intrinsic load_deref (ssa_351) (0) /* access=0 */
  547. intrinsic store_deref (ssa_352, ssa_353) (1, 0) /* wrmask=x */ /* access=0 */
  548. vec1 64 ssa_354 = deref_var &@18 (function_temp struct)
  549. vec1 64 ssa_355 = deref_cast (struct *)ssa_354 (function_temp struct)
  550. vec1 64 ssa_356 = deref_var &return_tmp@22 (function_temp uint64_t)
  551. intrinsic store_deref (ssa_356, ssa_293) (1, 0) /* wrmask=x */ /* access=0 */
  552. vec1 64 ssa_357 = intrinsic load_deref (ssa_356) (0) /* access=0 */
  553. vec1 64 ssa_358 = deref_cast (struct *)ssa_357 (function_temp struct)
  554. vec1 64 ssa_359 = deref_struct &ssa_358->field3 (function_temp struct) /* &((struct *)ssa_357)->field3 */
  555. vec1 64 ssa_360 = deref_cast (uint8_t *)ssa_355 (function_temp uint8_t)
  556. vec1 64 ssa_361 = deref_cast (uint8_t *)ssa_359 (function_temp uint8_t)
  557. vec1 64 ssa_362 = deref_cast (uint8_t[8] *)ssa_361 (function_temp uint8_t[8])
  558. vec1 64 ssa_363 = deref_cast (uint8_t[8] *)ssa_360 (function_temp uint8_t[8])
  559. vec1 64 ssa_364 = deref_array &(*ssa_362)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_361)[0] */
  560. vec1 64 ssa_365 = deref_array &(*ssa_363)[0] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_360)[0] */
  561. vec1 8 ssa_366 = intrinsic load_deref (ssa_364) (0) /* access=0 */
  562. intrinsic store_deref (ssa_365, ssa_366) (1, 0) /* wrmask=x */ /* access=0 */
  563. vec1 64 ssa_367 = deref_array &(*ssa_362)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_361)[1] */
  564. vec1 64 ssa_368 = deref_array &(*ssa_363)[1] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_360)[1] */
  565. vec1 8 ssa_369 = intrinsic load_deref (ssa_367) (0) /* access=0 */
  566. intrinsic store_deref (ssa_368, ssa_369) (1, 0) /* wrmask=x */ /* access=0 */
  567. vec1 64 ssa_370 = deref_array &(*ssa_362)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_361)[2] */
  568. vec1 64 ssa_371 = deref_array &(*ssa_363)[2] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_360)[2] */
  569. vec1 8 ssa_372 = intrinsic load_deref (ssa_370) (0) /* access=0 */
  570. intrinsic store_deref (ssa_371, ssa_372) (1, 0) /* wrmask=x */ /* access=0 */
  571. vec1 64 ssa_373 = deref_array &(*ssa_362)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_361)[3] */
  572. vec1 64 ssa_374 = deref_array &(*ssa_363)[3] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_360)[3] */
  573. vec1 8 ssa_375 = intrinsic load_deref (ssa_373) (0) /* access=0 */
  574. intrinsic store_deref (ssa_374, ssa_375) (1, 0) /* wrmask=x */ /* access=0 */
  575. vec1 64 ssa_376 = deref_array &(*ssa_362)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_361)[4] */
  576. vec1 64 ssa_377 = deref_array &(*ssa_363)[4] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_360)[4] */
  577. vec1 8 ssa_378 = intrinsic load_deref (ssa_376) (0) /* access=0 */
  578. intrinsic store_deref (ssa_377, ssa_378) (1, 0) /* wrmask=x */ /* access=0 */
  579. vec1 64 ssa_379 = deref_array &(*ssa_362)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_361)[5] */
  580. vec1 64 ssa_380 = deref_array &(*ssa_363)[5] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_360)[5] */
  581. vec1 8 ssa_381 = intrinsic load_deref (ssa_379) (0) /* access=0 */
  582. intrinsic store_deref (ssa_380, ssa_381) (1, 0) /* wrmask=x */ /* access=0 */
  583. vec1 64 ssa_382 = deref_array &(*ssa_362)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_361)[6] */
  584. vec1 64 ssa_383 = deref_array &(*ssa_363)[6] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_360)[6] */
  585. vec1 8 ssa_384 = intrinsic load_deref (ssa_382) (0) /* access=0 */
  586. intrinsic store_deref (ssa_383, ssa_384) (1, 0) /* wrmask=x */ /* access=0 */
  587. vec1 64 ssa_385 = deref_array &(*ssa_362)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_361)[7] */
  588. vec1 64 ssa_386 = deref_array &(*ssa_363)[7] (function_temp uint8_t) /* &(*(uint8_t[8] *)ssa_360)[7] */
  589. vec1 8 ssa_387 = intrinsic load_deref (ssa_385) (0) /* access=0 */
  590. intrinsic store_deref (ssa_386, ssa_387) (1, 0) /* wrmask=x */ /* access=0 */
  591. vec1 64 ssa_388 = deref_var &return_tmp@19 (function_temp uint64_t)
  592. vec1 64 ssa_389 = undefined
  593. vec1 32 ssa_390 = undefined
  594. vec1 64 ssa_391 = undefined
  595. vec1 64 ssa_392 = deref_cast (struct *)ssa_325 (function_temp struct)
  596. vec1 64 ssa_393 = deref_var &phi@24 (function_temp uint64_t)
  597. intrinsic store_deref (ssa_393, ssa_11) (1, 0) /* wrmask=x */ /* access=0 */
  598. vec1 64 ssa_394 = deref_var &phi@23 (function_temp uint)
  599. intrinsic store_deref (ssa_394, ssa_33) (1, 0) /* wrmask=x */ /* access=0 */
  600. vec1 64 ssa_395 = deref_var &cont@28 (function_temp bool)
  601. intrinsic store_deref (ssa_395, ssa_31) (1, 0) /* wrmask=x */ /* access=0 */
  602. /* succs: block_12 */
  603. loop {
  604. block block_12:
  605. /* preds: block_11 block_19 block_20 block_21 */
  606. vec1 64 ssa_396 = phi block_11: ssa_389, block_19: ssa_406, block_20: ssa_406, block_21: ssa_0
  607. vec1 1 ssa_397 = intrinsic load_deref (ssa_395) (0) /* access=0 */
  608. /* succs: block_13 block_17 */
  609. if ssa_397 {
  610. block block_13:
  611. /* preds: block_12 */
  612. vec1 64 ssa_398 = deref_var &phi@29 (function_temp uint64_t)
  613. vec1 64 ssa_399 = intrinsic load_deref (ssa_398) (0) /* access=0 */
  614. vec1 64 ssa_400 = deref_var &phi@30 (function_temp uint)
  615. vec1 32 ssa_401 = intrinsic load_deref (ssa_400) (0) /* access=0 */
  616. vec1 64 ssa_402 = deref_var &phi@31 (function_temp bool)
  617. vec1 1 ssa_403 = intrinsic load_deref (ssa_402) (0) /* access=0 */
  618. vec1 64 ssa_404 = deref_var &phi@32 (function_temp uint64_t)
  619. intrinsic store_deref (ssa_404, ssa_396) (1, 0) /* wrmask=x */ /* access=0 */
  620. intrinsic store_deref (ssa_393, ssa_399) (1, 0) /* wrmask=x */ /* access=0 */
  621. intrinsic store_deref (ssa_394, ssa_401) (1, 0) /* wrmask=x */ /* access=0 */
  622. /* succs: block_14 block_15 */
  623. if ssa_403 {
  624. block block_14:
  625. /* preds: block_13 */
  626. break
  627. /* succs: block_22 */
  628. } else {
  629. block block_15:
  630. /* preds: block_13 */
  631. /* succs: block_16 */
  632. }
  633. block block_16:
  634. /* preds: block_15 */
  635. /* succs: block_18 */
  636. } else {
  637. block block_17:
  638. /* preds: block_12 */
  639. /* succs: block_18 */
  640. }
  641. block block_18:
  642. /* preds: block_16 block_17 */
  643. intrinsic store_deref (ssa_395, ssa_30) (1, 0) /* wrmask=x */ /* access=0 */
  644. vec1 32 ssa_405 = intrinsic load_deref (ssa_394) (0) /* access=0 */
  645. vec1 64 ssa_406 = intrinsic load_deref (ssa_393) (0) /* access=0 */
  646. vec1 1 ssa_407 = ilt ssa_405, ssa_34
  647. vec1 1 ssa_408 = inot ssa_407
  648. vec1 64 ssa_409 = deref_var &phi@31 (function_temp bool)
  649. intrinsic store_deref (ssa_409, ssa_30) (1, 0) /* wrmask=x */ /* access=0 */
  650. vec1 64 ssa_410 = deref_var &phi@30 (function_temp uint)
  651. intrinsic store_deref (ssa_410, ssa_390) (1, 0) /* wrmask=x */ /* access=0 */
  652. vec1 64 ssa_411 = deref_var &phi@29 (function_temp uint64_t)
  653. intrinsic store_deref (ssa_411, ssa_391) (1, 0) /* wrmask=x */ /* access=0 */
  654. /* succs: block_19 block_20 */
  655. if ssa_408 {
  656. block block_19:
  657. /* preds: block_18 */
  658. continue
  659. /* succs: block_12 */
  660. } else {
  661. block block_20:
  662. /* preds: block_18 */
  663. vec1 64 ssa_412 = deref_cast (struct *)ssa_291 (function_temp struct)
  664. vec1 64 ssa_413 = deref_var &return_tmp@25 (function_temp uint64_t)
  665. vec1 64 ssa_414 = deref_struct &ssa_412->field0 (function_temp uint64_t[1]) /* &((struct *)ssa_291)->field0 */
  666. vec1 64 ssa_415 = i2i64 ssa_405
  667. vec1 64 ssa_416 = deref_array &(*ssa_414)[ssa_415] (function_temp uint64_t) /* &((struct *)ssa_291)->field0[ssa_415] */
  668. intrinsic store_deref (ssa_413, ssa_416) (1, 0) /* wrmask=x */ /* access=0 */
  669. vec1 64 ssa_417 = intrinsic load_deref (ssa_413) (0) /* access=0 */
  670. vec1 64 ssa_418 = deref_cast (uint64_t *)ssa_417 (function_temp uint64_t)
  671. vec1 64 ssa_419 = intrinsic load_deref (ssa_418) (0) /* access=0 */
  672. vec1 64 ssa_420 = imul ssa_406, ssa_419
  673. vec1 64 ssa_421 = deref_cast (struct *)ssa_355 (function_temp struct)
  674. vec1 64 ssa_422 = deref_var &return_tmp@26 (function_temp uint64_t)
  675. vec1 64 ssa_423 = deref_struct &ssa_421->field0 (function_temp uint64_t[1]) /* &((struct *)ssa_355)->field0 */
  676. vec1 64 ssa_424 = deref_array &(*ssa_423)[ssa_415] (function_temp uint64_t) /* &((struct *)ssa_355)->field0[ssa_415] */
  677. intrinsic store_deref (ssa_422, ssa_424) (1, 0) /* wrmask=x */ /* access=0 */
  678. vec1 64 ssa_425 = intrinsic load_deref (ssa_422) (0) /* access=0 */
  679. vec1 64 ssa_426 = deref_cast (uint64_t *)ssa_425 (function_temp uint64_t)
  680. vec1 64 ssa_427 = intrinsic load_deref (ssa_426) (0) /* access=0 */
  681. vec1 64 ssa_428 = iadd ssa_420, ssa_427
  682. vec1 64 ssa_429 = deref_cast (struct *)ssa_392 (function_temp struct)
  683. vec1 64 ssa_430 = deref_var &return_tmp@27 (function_temp uint64_t)
  684. vec1 64 ssa_431 = deref_struct &ssa_429->field0 (function_temp uint64_t[1]) /* &((struct *)ssa_392)->field0 */
  685. vec1 64 ssa_432 = deref_array &(*ssa_431)[ssa_415] (function_temp uint64_t) /* &((struct *)ssa_392)->field0[ssa_415] */
  686. intrinsic store_deref (ssa_430, ssa_432) (1, 0) /* wrmask=x */ /* access=0 */
  687. vec1 64 ssa_433 = intrinsic load_deref (ssa_430) (0) /* access=0 */
  688. vec1 64 ssa_434 = deref_cast (uint64_t *)ssa_433 (function_temp uint64_t)
  689. vec1 64 ssa_435 = intrinsic load_deref (ssa_434) (0) /* access=0 */
  690. vec1 64 ssa_436 = iadd ssa_428, ssa_435
  691. vec1 32 ssa_437 = iadd ssa_405, ssa_34
  692. intrinsic store_deref (ssa_409, ssa_31) (1, 0) /* wrmask=x */ /* access=0 */
  693. intrinsic store_deref (ssa_410, ssa_437) (1, 0) /* wrmask=x */ /* access=0 */
  694. intrinsic store_deref (ssa_411, ssa_436) (1, 0) /* wrmask=x */ /* access=0 */
  695. continue
  696. /* succs: block_12 */
  697. }
  698. block block_21:
  699. /* preds: */
  700. /* succs: block_12 */
  701. }
  702. block block_22:
  703. /* preds: block_14 */
  704. vec1 64 ssa_438 = intrinsic load_deref (ssa_404) (0) /* access=0 */
  705. intrinsic store_deref (ssa_388, ssa_438) (1, 0) /* wrmask=x */ /* access=0 */
  706. vec1 64 ssa_439 = intrinsic load_deref (ssa_388) (0) /* access=0 */
  707. vec1 64 ssa_440 = deref_var &return_tmp@20 (function_temp uint64_t)
  708. vec1 64 ssa_441 = deref_var &return_tmp@33 (function_temp uint64_t)
  709. intrinsic store_deref (ssa_441, ssa_293) (1, 0) /* wrmask=x */ /* access=0 */
  710. vec1 64 ssa_442 = intrinsic load_deref (ssa_441) (0) /* access=0 */
  711. vec1 64 ssa_443 = deref_cast (struct *)ssa_442 (function_temp struct)
  712. vec1 64 ssa_444 = deref_struct &ssa_443->field1 (function_temp uint64_t) /* &((struct *)ssa_442)->field1 */
  713. vec1 64 ssa_445 = intrinsic load_deref (ssa_444) (0) /* access=0 */
  714. vec1 64 ssa_446 = load_const (0x 4 /* 0.000000 */)
  715. vec1 64 ssa_447 = imul ssa_439, ssa_446
  716. vec1 64 ssa_448 = iadd ssa_445, ssa_447
  717. intrinsic store_deref (ssa_440, ssa_448) (1, 0) /* wrmask=x */ /* access=0 */
  718. vec1 64 ssa_449 = intrinsic load_deref (ssa_440) (0) /* access=0 */
  719. intrinsic store_deref (ssa_288, ssa_449) (1, 0) /* wrmask=x */ /* access=0 */
  720. vec1 64 ssa_450 = intrinsic load_deref (ssa_288) (0) /* access=0 */
  721. intrinsic store_global (ssa_256, ssa_450) (1, 0, 4, 0) /* wrmask=x */ /* access=0 */ /* align_mul=4 */ /* align_offset=0 */
  722. /* succs: block_23 */
  723. block block_23:
  724. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement