Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- .file "test.cpp"
- .text
- .p2align 4,,15
- .globl stb_perlin_noise3
- .def stb_perlin_noise3; .scl 2; .type 32; .endef
- .seh_proc stb_perlin_noise3
- stb_perlin_noise3:
- .LFB37:
- pushq %r13
- .seh_pushreg %r13
- pushq %r12
- .seh_pushreg %r12
- pushq %rbp
- .seh_pushreg %rbp
- pushq %rdi
- .seh_pushreg %rdi
- pushq %rsi
- .seh_pushreg %rsi
- pushq %rbx
- .seh_pushreg %rbx
- subq $200, %rsp
- .seh_stackalloc 200
- movaps %xmm6, 32(%rsp)
- .seh_savexmm %xmm6, 32
- movaps %xmm7, 48(%rsp)
- .seh_savexmm %xmm7, 48
- movaps %xmm8, 64(%rsp)
- .seh_savexmm %xmm8, 64
- movaps %xmm9, 80(%rsp)
- .seh_savexmm %xmm9, 80
- movaps %xmm10, 96(%rsp)
- .seh_savexmm %xmm10, 96
- movaps %xmm11, 112(%rsp)
- .seh_savexmm %xmm11, 112
- movaps %xmm12, 128(%rsp)
- .seh_savexmm %xmm12, 128
- movaps %xmm13, 144(%rsp)
- .seh_savexmm %xmm13, 144
- movaps %xmm14, 160(%rsp)
- .seh_savexmm %xmm14, 160
- movaps %xmm15, 176(%rsp)
- .seh_savexmm %xmm15, 176
- .seh_endprologue
- movl 288(%rsp), %eax
- subl $1, %r9d
- movaps %xmm0, %xmm7
- movzbl %r9b, %ebp
- movaps %xmm1, %xmm8
- movaps %xmm2, %xmm6
- unpcklps %xmm7, %xmm7
- cvtps2pd %xmm7, %xmm0
- unpcklps %xmm8, %xmm8
- unpcklps %xmm6, %xmm6
- leal -1(%rax), %r13d
- movl 296(%rsp), %eax
- movzbl %r13b, %r13d
- leal -1(%rax), %r12d
- call floor
- movzbl %r12b, %r12d
- cvttsd2si %xmm0, %ebx
- cvtps2pd %xmm8, %xmm0
- call floor
- cvttsd2si %xmm0, %esi
- cvtps2pd %xmm6, %xmm0
- call floor
- movss .LC1(%rip), %xmm14
- cvttsd2si %xmm0, %eax
- movl %ebx, %ecx
- movss .LC2(%rip), %xmm13
- andl %ebp, %ecx
- cvtsi2ss %ebx, %xmm0
- leal 1(%rsi), %edx
- movslq %ecx, %rcx
- movl %esi, %edi
- andl %r13d, %edi
- andl %r13d, %edx
- cvtsi2ss %eax, %xmm1
- leal 1(%rax), %r8d
- movl %eax, %r10d
- leaq _ZL19stb__perlin_randtab(%rip), %rax
- andl %r12d, %r10d
- andl %r12d, %r8d
- movl (%rax,%rcx,4), %r11d
- leal 1(%rbx), %ecx
- subss %xmm0, %xmm7
- andl %ebp, %ecx
- cvtsi2ss %esi, %xmm0
- movslq %ecx, %rcx
- subss %xmm1, %xmm6
- movl (%rax,%rcx,4), %ecx
- leal (%r11,%rdi), %r9d
- addl %edx, %r11d
- movslq %r9d, %r9
- subss %xmm0, %xmm8
- movss .LC0(%rip), %xmm0
- movl (%rax,%r9,4), %ebx
- movaps %xmm6, %xmm3
- movslq %r11d, %r11
- addl %ecx, %edx
- addl %ecx, %edi
- movl (%rax,%r11,4), %r11d
- movslq %edx, %rdx
- movaps %xmm8, %xmm15
- movaps %xmm6, %xmm2
- movl (%rax,%rdx,4), %esi
- leaq _ZZL16stb__perlin_gradifffE7indices(%rip), %rcx
- movaps %xmm6, %xmm12
- leal (%rbx,%r10), %edx
- addl %r8d, %ebx
- movslq %edi, %rdi
- movslq %edx, %rdx
- movslq %ebx, %rbx
- movl (%rax,%rdi,4), %edi
- movl (%rax,%rdx,4), %edx
- mulss %xmm0, %xmm3
- movaps %xmm8, %xmm4
- mulss %xmm0, %xmm4
- mulss %xmm7, %xmm0
- andl $63, %edx
- subss %xmm14, %xmm3
- movzbl (%rcx,%rdx), %r9d
- leaq _ZZL16stb__perlin_gradifffE5basis(%rip), %rdx
- subss %xmm14, %xmm4
- subss %xmm14, %xmm0
- movaps 160(%rsp), %xmm14
- mulss %xmm6, %xmm3
- mulss %xmm8, %xmm4
- mulss %xmm7, %xmm0
- salq $4, %r9
- addq %rdx, %r9
- movss (%r9), %xmm5
- addss %xmm13, %xmm3
- movss 4(%r9), %xmm1
- addss %xmm13, %xmm4
- mulss %xmm7, %xmm5
- addss %xmm13, %xmm0
- movaps 144(%rsp), %xmm13
- mulss %xmm8, %xmm1
- mulss %xmm6, %xmm3
- mulss %xmm8, %xmm4
- mulss %xmm7, %xmm0
- addss %xmm1, %xmm5
- movss 8(%r9), %xmm1
- movl (%rax,%rbx,4), %r9d
- mulss %xmm6, %xmm3
- mulss %xmm6, %xmm1
- mulss %xmm8, %xmm4
- mulss %xmm7, %xmm0
- mulss %xmm6, %xmm3
- andl $63, %r9d
- movzbl (%rcx,%r9), %ebx
- leal (%r11,%r10), %r9d
- addss %xmm1, %xmm5
- movslq %r9d, %r9
- movss .LC3(%rip), %xmm1
- addl %r8d, %r11d
- movl (%rax,%r9,4), %r9d
- mulss %xmm8, %xmm4
- movslq %r11d, %r11
- subss %xmm1, %xmm15
- mulss %xmm7, %xmm0
- subss %xmm1, %xmm12
- salq $4, %rbx
- addq %rdx, %rbx
- andl $63, %r9d
- movzbl (%rcx,%r9), %r9d
- salq $4, %r9
- addq %rdx, %r9
- movss (%r9), %xmm10
- movss 4(%r9), %xmm6
- mulss %xmm7, %xmm10
- mulss %xmm15, %xmm6
- addss %xmm6, %xmm10
- movss 8(%r9), %xmm6
- movl (%rax,%r11,4), %r9d
- leal (%rdi,%r10), %r11d
- addl %r8d, %edi
- mulss %xmm2, %xmm6
- movslq %r11d, %r11
- movslq %edi, %rdi
- movl (%rax,%r11,4), %r11d
- andl $63, %r9d
- addss %xmm6, %xmm10
- movaps %xmm7, %xmm6
- movzbl (%rcx,%r9), %r9d
- andl $63, %r11d
- subss %xmm1, %xmm6
- movzbl (%rcx,%r11), %r11d
- movaps %xmm6, %xmm1
- salq $4, %r9
- addq %rdx, %r9
- salq $4, %r11
- addq %rdx, %r11
- movss (%r11), %xmm11
- mulss %xmm6, %xmm11
- movss 4(%r11), %xmm6
- mulss %xmm8, %xmm6
- addss %xmm6, %xmm11
- movss 8(%r11), %xmm6
- movl (%rax,%rdi,4), %r11d
- mulss %xmm2, %xmm6
- andl $63, %r11d
- movzbl (%rcx,%r11), %r11d
- addss %xmm6, %xmm11
- salq $4, %r11
- addq %rdx, %r11
- addl %esi, %r10d
- addl %esi, %r8d
- movslq %r10d, %r10
- movslq %r8d, %r8
- movl (%rax,%r10,4), %r10d
- movl (%rax,%r8,4), %eax
- andl $63, %r10d
- movzbl (%rcx,%r10), %r10d
- andl $63, %eax
- movzbl (%rcx,%rax), %eax
- salq $4, %r10
- addq %rdx, %r10
- salq $4, %rax
- movss (%r10), %xmm9
- addq %rax, %rdx
- movss 4(%r10), %xmm6
- mulss %xmm1, %xmm9
- mulss %xmm15, %xmm6
- mulss 8(%r10), %xmm2
- addss %xmm6, %xmm9
- movss (%rbx), %xmm6
- mulss %xmm7, %xmm6
- addss %xmm2, %xmm9
- movss 4(%rbx), %xmm2
- mulss %xmm8, %xmm2
- mulss 4(%r11), %xmm8
- addss %xmm2, %xmm6
- movss 8(%rbx), %xmm2
- mulss %xmm12, %xmm2
- addss %xmm2, %xmm6
- movss 8(%r11), %xmm2
- mulss %xmm12, %xmm2
- subss %xmm5, %xmm6
- mulss %xmm3, %xmm6
- addss %xmm5, %xmm6
- movss (%r11), %xmm5
- mulss %xmm1, %xmm5
- mulss (%rdx), %xmm1
- addss %xmm8, %xmm5
- movss 4(%r9), %xmm8
- mulss %xmm15, %xmm8
- addss %xmm2, %xmm5
- movss (%r9), %xmm2
- mulss 4(%rdx), %xmm15
- mulss %xmm7, %xmm2
- movaps 48(%rsp), %xmm7
- subss %xmm11, %xmm5
- addss %xmm15, %xmm1
- movaps 176(%rsp), %xmm15
- addss %xmm8, %xmm2
- movss 8(%r9), %xmm8
- mulss %xmm3, %xmm5
- mulss %xmm12, %xmm8
- mulss 8(%rdx), %xmm12
- addss %xmm11, %xmm5
- movaps 112(%rsp), %xmm11
- addss %xmm8, %xmm2
- movaps 64(%rsp), %xmm8
- addss %xmm12, %xmm1
- movaps 128(%rsp), %xmm12
- subss %xmm10, %xmm2
- subss %xmm9, %xmm1
- mulss %xmm3, %xmm2
- mulss %xmm3, %xmm1
- addss %xmm10, %xmm2
- movaps 96(%rsp), %xmm10
- addss %xmm9, %xmm1
- movaps 80(%rsp), %xmm9
- subss %xmm6, %xmm2
- subss %xmm5, %xmm1
- mulss %xmm4, %xmm2
- mulss %xmm4, %xmm1
- addss %xmm6, %xmm2
- movaps 32(%rsp), %xmm6
- addss %xmm5, %xmm1
- subss %xmm2, %xmm1
- mulss %xmm1, %xmm0
- addss %xmm2, %xmm0
- addq $200, %rsp
- popq %rbx
- popq %rsi
- popq %rdi
- popq %rbp
- popq %r12
- popq %r13
- ret
- .seh_endproc
- .p2align 4,,15
- .globl _Z26stb_perlin_noise3_no_floorfffiii
- .def _Z26stb_perlin_noise3_no_floorfffiii; .scl 2; .type 32; .endef
- .seh_proc _Z26stb_perlin_noise3_no_floorfffiii
- _Z26stb_perlin_noise3_no_floorfffiii:
- .LFB38:
- pushq %rbp
- .seh_pushreg %rbp
- pushq %rdi
- .seh_pushreg %rdi
- pushq %rsi
- .seh_pushreg %rsi
- pushq %rbx
- .seh_pushreg %rbx
- subq $168, %rsp
- .seh_stackalloc 168
- movaps %xmm6, (%rsp)
- .seh_savexmm %xmm6, 0
- movaps %xmm7, 16(%rsp)
- .seh_savexmm %xmm7, 16
- movaps %xmm8, 32(%rsp)
- .seh_savexmm %xmm8, 32
- movaps %xmm9, 48(%rsp)
- .seh_savexmm %xmm9, 48
- movaps %xmm10, 64(%rsp)
- .seh_savexmm %xmm10, 64
- movaps %xmm11, 80(%rsp)
- .seh_savexmm %xmm11, 80
- movaps %xmm12, 96(%rsp)
- .seh_savexmm %xmm12, 96
- movaps %xmm13, 112(%rsp)
- .seh_savexmm %xmm13, 112
- movaps %xmm14, 128(%rsp)
- .seh_savexmm %xmm14, 128
- movaps %xmm15, 144(%rsp)
- .seh_savexmm %xmm15, 144
- .seh_endprologue
- xorps %xmm3, %xmm3
- movss .LC1(%rip), %xmm14
- movss .LC2(%rip), %xmm13
- movl 240(%rsp), %eax
- subl $1, %r9d
- cvttss2si %xmm0, %edx
- cvttss2si %xmm1, %ecx
- movaps %xmm0, %xmm7
- movzbl %r9b, %r9d
- leal -1(%rax), %r8d
- movl 248(%rsp), %eax
- movzbl %r8b, %r8d
- movl %r8d, %esi
- leal -1(%rax), %ebx
- xorl %eax, %eax
- ucomiss %xmm0, %xmm3
- movzbl %bl, %ebx
- seta %al
- subl %eax, %edx
- xorl %eax, %eax
- ucomiss %xmm1, %xmm3
- seta %al
- xorl %r10d, %r10d
- subl %eax, %ecx
- ucomiss %xmm2, %xmm3
- cvtsi2ss %ecx, %xmm0
- leal 1(%rcx), %r11d
- cvttss2si %xmm2, %eax
- seta %r10b
- andl %ecx, %esi
- movl %r9d, %ecx
- andl %edx, %ecx
- cvtsi2ss %edx, %xmm3
- andl %r8d, %r11d
- subss %xmm0, %xmm1
- movss .LC0(%rip), %xmm0
- movslq %ecx, %rcx
- subl %r10d, %eax
- movl %ebx, %r10d
- addl $1, %edx
- leal 1(%rax), %r8d
- andl %eax, %r10d
- andl %r9d, %edx
- movaps %xmm1, %xmm4
- movaps %xmm1, %xmm8
- movslq %edx, %rdx
- mulss %xmm0, %xmm4
- andl %ebx, %r8d
- movaps %xmm8, %xmm15
- subss %xmm3, %xmm7
- subss %xmm14, %xmm4
- mulss %xmm1, %xmm4
- addss %xmm13, %xmm4
- mulss %xmm1, %xmm4
- mulss %xmm1, %xmm4
- mulss %xmm1, %xmm4
- cvtsi2ss %eax, %xmm1
- leaq _ZL19stb__perlin_randtab(%rip), %rax
- movl (%rax,%rcx,4), %ecx
- movl (%rax,%rdx,4), %edx
- subss %xmm1, %xmm2
- leal (%rcx,%rsi), %r9d
- addl %r11d, %ecx
- movslq %r9d, %r9
- addl %edx, %esi
- addl %r11d, %edx
- movl (%rax,%r9,4), %ebx
- movslq %esi, %rsi
- movslq %edx, %rdx
- movl (%rax,%rsi,4), %edi
- movslq %ecx, %rcx
- movaps %xmm2, %xmm12
- movl (%rax,%rdx,4), %esi
- movaps %xmm2, %xmm3
- movl (%rax,%rcx,4), %ebp
- leaq _ZZL16stb__perlin_gradifffE7indices(%rip), %rcx
- leal (%rbx,%r10), %edx
- addl %r8d, %ebx
- leal (%rdi,%r10), %r11d
- movslq %edx, %rdx
- movslq %ebx, %rbx
- movl (%rax,%rdx,4), %edx
- movslq %r11d, %r11
- addl %r8d, %edi
- movl (%rax,%r11,4), %r11d
- mulss %xmm0, %xmm3
- movslq %edi, %rdi
- mulss %xmm7, %xmm0
- andl $63, %edx
- movzbl (%rcx,%rdx), %r9d
- leaq _ZZL16stb__perlin_gradifffE5basis(%rip), %rdx
- andl $63, %r11d
- movzbl (%rcx,%r11), %r11d
- subss %xmm14, %xmm3
- subss %xmm14, %xmm0
- movaps 128(%rsp), %xmm14
- mulss %xmm2, %xmm3
- salq $4, %r9
- mulss %xmm7, %xmm0
- addq %rdx, %r9
- salq $4, %r11
- movss (%r9), %xmm5
- addq %rdx, %r11
- movss 4(%r9), %xmm1
- mulss %xmm7, %xmm5
- movss (%r11), %xmm11
- addss %xmm13, %xmm3
- mulss %xmm8, %xmm1
- addss %xmm13, %xmm0
- movaps 112(%rsp), %xmm13
- mulss %xmm2, %xmm3
- mulss %xmm7, %xmm0
- addss %xmm1, %xmm5
- movss 8(%r9), %xmm1
- movl (%rax,%rbx,4), %r9d
- mulss %xmm2, %xmm1
- mulss %xmm2, %xmm3
- mulss %xmm7, %xmm0
- andl $63, %r9d
- movzbl (%rcx,%r9), %ebx
- leal 0(%rbp,%r10), %r9d
- addss %xmm1, %xmm5
- movslq %r9d, %r9
- movss .LC3(%rip), %xmm1
- addl %r8d, %ebp
- movl (%rax,%r9,4), %r9d
- movslq %ebp, %rbp
- addl %esi, %r10d
- subss %xmm1, %xmm15
- mulss %xmm2, %xmm3
- movslq %r10d, %r10
- subss %xmm1, %xmm12
- movl (%rax,%r10,4), %r10d
- salq $4, %rbx
- mulss %xmm7, %xmm0
- addq %rdx, %rbx
- andl $63, %r9d
- movzbl (%rcx,%r9), %r9d
- salq $4, %r9
- addq %rdx, %r9
- movss (%r9), %xmm10
- movss 4(%r9), %xmm6
- mulss %xmm7, %xmm10
- mulss %xmm15, %xmm6
- addss %xmm6, %xmm10
- movss 8(%r9), %xmm6
- movl (%rax,%rbp,4), %r9d
- mulss %xmm2, %xmm6
- andl $63, %r9d
- addss %xmm6, %xmm10
- movaps %xmm7, %xmm6
- movzbl (%rcx,%r9), %r9d
- subss %xmm1, %xmm6
- mulss %xmm6, %xmm11
- movaps %xmm6, %xmm1
- movss 4(%r11), %xmm6
- salq $4, %r9
- mulss %xmm8, %xmm6
- addq %rdx, %r9
- addss %xmm6, %xmm11
- movss 8(%r11), %xmm6
- movl (%rax,%rdi,4), %r11d
- mulss %xmm2, %xmm6
- andl $63, %r11d
- movzbl (%rcx,%r11), %r11d
- addss %xmm6, %xmm11
- salq $4, %r11
- addq %rdx, %r11
- andl $63, %r10d
- addl %esi, %r8d
- movzbl (%rcx,%r10), %r10d
- movslq %r8d, %r8
- movl (%rax,%r8,4), %eax
- salq $4, %r10
- addq %rdx, %r10
- andl $63, %eax
- movss (%r10), %xmm9
- movss 4(%r10), %xmm6
- mulss %xmm1, %xmm9
- movzbl (%rcx,%rax), %eax
- mulss %xmm15, %xmm6
- mulss 8(%r10), %xmm2
- salq $4, %rax
- addss %xmm6, %xmm9
- movss (%rbx), %xmm6
- addq %rax, %rdx
- mulss %xmm7, %xmm6
- addss %xmm2, %xmm9
- movss 4(%rbx), %xmm2
- mulss %xmm8, %xmm2
- mulss 4(%r11), %xmm8
- addss %xmm2, %xmm6
- movss 8(%rbx), %xmm2
- mulss %xmm12, %xmm2
- addss %xmm2, %xmm6
- movss 8(%r11), %xmm2
- mulss %xmm12, %xmm2
- subss %xmm5, %xmm6
- mulss %xmm3, %xmm6
- addss %xmm5, %xmm6
- movss (%r11), %xmm5
- mulss %xmm1, %xmm5
- mulss (%rdx), %xmm1
- addss %xmm8, %xmm5
- movss 4(%r9), %xmm8
- mulss %xmm15, %xmm8
- addss %xmm2, %xmm5
- movss (%r9), %xmm2
- mulss 4(%rdx), %xmm15
- mulss %xmm7, %xmm2
- movaps 16(%rsp), %xmm7
- subss %xmm11, %xmm5
- addss %xmm15, %xmm1
- movaps 144(%rsp), %xmm15
- addss %xmm8, %xmm2
- movss 8(%r9), %xmm8
- mulss %xmm3, %xmm5
- mulss %xmm12, %xmm8
- mulss 8(%rdx), %xmm12
- addss %xmm11, %xmm5
- movaps 80(%rsp), %xmm11
- addss %xmm8, %xmm2
- movaps 32(%rsp), %xmm8
- addss %xmm12, %xmm1
- movaps 96(%rsp), %xmm12
- subss %xmm10, %xmm2
- subss %xmm9, %xmm1
- mulss %xmm3, %xmm2
- mulss %xmm3, %xmm1
- addss %xmm10, %xmm2
- movaps 64(%rsp), %xmm10
- addss %xmm9, %xmm1
- movaps 48(%rsp), %xmm9
- subss %xmm6, %xmm2
- subss %xmm5, %xmm1
- mulss %xmm4, %xmm2
- mulss %xmm4, %xmm1
- addss %xmm6, %xmm2
- movaps (%rsp), %xmm6
- addss %xmm5, %xmm1
- subss %xmm2, %xmm1
- mulss %xmm1, %xmm0
- addss %xmm2, %xmm0
- addq $168, %rsp
- popq %rbx
- popq %rsi
- popq %rdi
- popq %rbp
- ret
- .seh_endproc
- .section .text$_Z6printfPKcz,"x"
- .linkonce discard
- .p2align 4,,15
- .globl _Z6printfPKcz
- .def _Z6printfPKcz; .scl 2; .type 32; .endef
- .seh_proc _Z6printfPKcz
- _Z6printfPKcz:
- .LFB46:
- subq $56, %rsp
- .seh_stackalloc 56
- .seh_endprologue
- movq %rdx, 72(%rsp)
- leaq 72(%rsp), %rdx
- movq %r8, 80(%rsp)
- movq %r9, 88(%rsp)
- movq %rdx, 40(%rsp)
- call __mingw_vprintf
- addq $56, %rsp
- ret
- .seh_endproc
- .def __main; .scl 2; .type 32; .endef
- .section .rdata,"dr"
- .align 8
- .LC12:
- .ascii "stb_perlin_noise3: %.1f ns/call\12\0"
- .align 8
- .LC13:
- .ascii "stb_perlin_noise3_no_floor: %.1f ns/call\12\0"
- .section .text.startup,"x"
- .p2align 4,,15
- .globl main
- .def main; .scl 2; .type 32; .endef
- .seh_proc main
- main:
- .LFB96:
- pushq %r15
- .seh_pushreg %r15
- pushq %r14
- .seh_pushreg %r14
- pushq %r13
- .seh_pushreg %r13
- pushq %r12
- .seh_pushreg %r12
- pushq %rbp
- .seh_pushreg %rbp
- pushq %rdi
- .seh_pushreg %rdi
- pushq %rsi
- .seh_pushreg %rsi
- pushq %rbx
- .seh_pushreg %rbx
- subq $216, %rsp
- .seh_stackalloc 216
- movaps %xmm6, 48(%rsp)
- .seh_savexmm %xmm6, 48
- movaps %xmm7, 64(%rsp)
- .seh_savexmm %xmm7, 64
- movaps %xmm8, 80(%rsp)
- .seh_savexmm %xmm8, 80
- movaps %xmm9, 96(%rsp)
- .seh_savexmm %xmm9, 96
- movaps %xmm10, 112(%rsp)
- .seh_savexmm %xmm10, 112
- movaps %xmm11, 128(%rsp)
- .seh_savexmm %xmm11, 128
- movaps %xmm12, 144(%rsp)
- .seh_savexmm %xmm12, 144
- movaps %xmm13, 160(%rsp)
- .seh_savexmm %xmm13, 160
- movaps %xmm14, 176(%rsp)
- .seh_savexmm %xmm14, 176
- movaps %xmm15, 192(%rsp)
- .seh_savexmm %xmm15, 192
- .seh_endprologue
- leaq _ZL19stb__perlin_randtab(%rip), %rbx
- xorl %r15d, %r15d
- leaq _ZZL16stb__perlin_gradifffE7indices(%rip), %rdi
- leaq _ZZL16stb__perlin_gradifffE5basis(%rip), %rsi
- call __main
- call clock
- movss .LC7(%rip), %xmm6
- movl $1024, 44(%rsp)
- cvtsi2sd %eax, %xmm7
- movss .LC3(%rip), %xmm8
- movss .LC8(%rip), %xmm10
- movss .LC9(%rip), %xmm9
- movsd %xmm7, 32(%rsp)
- .L5:
- leaq tex(%rip), %r13
- movl $0x3c800000, 40(%rsp)
- xorpd %xmm0, %xmm0
- .p2align 4,,10
- .L10:
- cvttsd2si %xmm0, %eax
- movss 40(%rsp), %xmm11
- xorl %r14d, %r14d
- movss .LC0(%rip), %xmm13
- movss .LC6(%rip), %xmm12
- cvtsi2ss %eax, %xmm0
- leal 1(%rax), %r12d
- movl %eax, %ebp
- andl $31, %ebp
- andl $31, %r12d
- subss %xmm0, %xmm11
- movd %r15, %xmm0
- mulss %xmm11, %xmm13
- movaps %xmm11, %xmm7
- subss %xmm8, %xmm7
- subss .LC1(%rip), %xmm13
- mulss %xmm11, %xmm13
- addss .LC2(%rip), %xmm13
- mulss %xmm11, %xmm13
- mulss %xmm11, %xmm13
- mulss %xmm11, %xmm13
- jmp .L8
- .p2align 4,,10
- .L6:
- unpcklps %xmm12, %xmm12
- cvtps2pd %xmm12, %xmm0
- call floor
- .L8:
- cvttsd2si %xmm0, %eax
- movaps %xmm12, %xmm4
- addss %xmm9, %xmm12
- movl %eax, %edx
- cvtsi2ss %eax, %xmm0
- addl $1, %eax
- andl $31, %edx
- andl $31, %eax
- movl (%rbx,%rdx,4), %edx
- movl (%rbx,%rax,4), %eax
- subss %xmm0, %xmm4
- leal (%rdx,%rbp), %ecx
- addl %r12d, %edx
- movslq %ecx, %rcx
- movslq %edx, %rdx
- movl (%rbx,%rcx,4), %r8d
- leal (%rax,%rbp), %ecx
- addl %r12d, %eax
- cltq
- movl (%rbx,%rdx,4), %edx
- movslq %ecx, %rcx
- movl (%rbx,%rax,4), %r9d
- movl (%rbx,%rcx,4), %ecx
- movslq %r8d, %rax
- addl $1, %r8d
- movl (%rbx,%rax,4), %eax
- movslq %r8d, %r8
- andl $63, %eax
- movzbl (%rdi,%rax), %eax
- salq $4, %rax
- addq %rsi, %rax
- movss (%rax), %xmm3
- movss 4(%rax), %xmm0
- mulss %xmm4, %xmm3
- mulss %xmm11, %xmm0
- addss %xmm0, %xmm3
- movss 8(%rax), %xmm0
- movl (%rbx,%r8,4), %eax
- mulss %xmm6, %xmm0
- andl $63, %eax
- movzbl (%rdi,%rax), %r8d
- movslq %edx, %rax
- addss %xmm0, %xmm3
- movl (%rbx,%rax,4), %eax
- addl $1, %edx
- movslq %edx, %rdx
- salq $4, %r8
- andl $63, %eax
- addq %rsi, %r8
- movzbl (%rdi,%rax), %eax
- movss 4(%r8), %xmm15
- mulss %xmm11, %xmm15
- salq $4, %rax
- addq %rsi, %rax
- movss (%rax), %xmm14
- movss 4(%rax), %xmm0
- mulss %xmm4, %xmm14
- mulss %xmm7, %xmm0
- addss %xmm0, %xmm14
- movss 8(%rax), %xmm0
- movl (%rbx,%rdx,4), %eax
- mulss %xmm6, %xmm0
- andl $63, %eax
- movzbl (%rdi,%rax), %edx
- movslq %ecx, %rax
- addss %xmm0, %xmm14
- movl (%rbx,%rax,4), %eax
- movaps %xmm4, %xmm0
- addl $1, %ecx
- subss %xmm8, %xmm0
- movslq %ecx, %rcx
- salq $4, %rdx
- andl $63, %eax
- addq %rsi, %rdx
- movzbl (%rdi,%rax), %eax
- salq $4, %rax
- addq %rsi, %rax
- movss (%rax), %xmm1
- movss 4(%rax), %xmm2
- mulss %xmm0, %xmm1
- mulss %xmm11, %xmm2
- addss %xmm2, %xmm1
- movss 8(%rax), %xmm2
- movl (%rbx,%rcx,4), %eax
- mulss %xmm6, %xmm2
- andl $63, %eax
- movzbl (%rdi,%rax), %ecx
- movslq %r9d, %rax
- addss %xmm2, %xmm1
- movl (%rbx,%rax,4), %eax
- addl $1, %r9d
- movslq %r9d, %r9
- salq $4, %rcx
- andl $63, %eax
- addq %rsi, %rcx
- movzbl (%rdi,%rax), %eax
- salq $4, %rax
- addq %rsi, %rax
- movss (%rax), %xmm5
- movss 4(%rax), %xmm2
- mulss %xmm0, %xmm5
- mulss %xmm7, %xmm2
- addss %xmm2, %xmm5
- movss 8(%rax), %xmm2
- movl (%rbx,%r9,4), %eax
- mulss %xmm6, %xmm2
- andl $63, %eax
- addss %xmm2, %xmm5
- movss (%r8), %xmm2
- movzbl (%rdi,%rax), %eax
- mulss %xmm4, %xmm2
- salq $4, %rax
- addss %xmm15, %xmm2
- movss 8(%r8), %xmm15
- addq %rsi, %rax
- mulss %xmm10, %xmm15
- addss %xmm15, %xmm2
- movss 4(%rcx), %xmm15
- mulss %xmm11, %xmm15
- subss %xmm3, %xmm2
- mulss %xmm6, %xmm2
- addss %xmm3, %xmm2
- movss (%rcx), %xmm3
- mulss %xmm0, %xmm3
- mulss (%rax), %xmm0
- addss %xmm15, %xmm3
- movss 8(%rcx), %xmm15
- mulss %xmm10, %xmm15
- addss %xmm15, %xmm3
- movss 4(%rdx), %xmm15
- mulss %xmm7, %xmm15
- subss %xmm1, %xmm3
- mulss %xmm6, %xmm3
- addss %xmm1, %xmm3
- movss (%rdx), %xmm1
- mulss %xmm4, %xmm1
- addss %xmm15, %xmm1
- movss 8(%rdx), %xmm15
- mulss %xmm10, %xmm15
- addss %xmm15, %xmm1
- subss %xmm14, %xmm1
- mulss %xmm6, %xmm1
- addss %xmm14, %xmm1
- subss %xmm2, %xmm1
- mulss %xmm13, %xmm1
- addss %xmm2, %xmm1
- movss .LC0(%rip), %xmm2
- mulss %xmm4, %xmm2
- subss .LC1(%rip), %xmm2
- mulss %xmm4, %xmm2
- addss .LC2(%rip), %xmm2
- mulss %xmm4, %xmm2
- mulss %xmm4, %xmm2
- mulss %xmm4, %xmm2
- movss 4(%rax), %xmm4
- mulss %xmm7, %xmm4
- addss %xmm4, %xmm0
- movss 8(%rax), %xmm4
- mulss %xmm10, %xmm4
- addss %xmm4, %xmm0
- subss %xmm5, %xmm0
- mulss %xmm6, %xmm0
- addss %xmm5, %xmm0
- subss %xmm3, %xmm0
- mulss %xmm13, %xmm0
- addss %xmm3, %xmm0
- subss %xmm1, %xmm0
- mulss %xmm0, %xmm2
- addss %xmm1, %xmm2
- movss %xmm2, 0(%r13,%r14)
- addq $4, %r14
- cmpq $128, %r14
- jne .L6
- movss 40(%rsp), %xmm7
- subq $-128, %r13
- leaq 4096+tex(%rip), %rax
- addss %xmm9, %xmm7
- cmpq %rax, %r13
- movss %xmm7, 40(%rsp)
- je .L7
- unpcklps %xmm7, %xmm7
- cvtps2pd %xmm7, %xmm0
- call floor
- jmp .L10
- .L7:
- subl $1, 44(%rsp)
- jne .L5
- .p2align 4,,6
- call clock
- leaq .LC12(%rip), %rcx
- cvtsi2sd %eax, %xmm0
- subsd 32(%rsp), %xmm0
- mulsd .LC10(%rip), %xmm0
- divsd .LC11(%rip), %xmm0
- movapd %xmm0, %xmm1
- movd %xmm0, %rdx
- call _Z6printfPKcz
- call clock
- movl $1024, %ecx
- movl $0x3c800000, %r11d
- cvtsi2sd %eax, %xmm7
- movl %r11d, %r15d
- movl %ecx, %r14d
- movsd %xmm7, 32(%rsp)
- .L11:
- leaq tex(%rip), %r13
- movl %r11d, 40(%rsp)
- .p2align 4,,10
- .L16:
- movss 40(%rsp), %xmm11
- xorps %xmm7, %xmm7
- xorl %edx, %edx
- movss .LC0(%rip), %xmm12
- ucomiss %xmm11, %xmm7
- movd %r15d, %xmm7
- cvttss2si %xmm11, %eax
- seta %dl
- subl %edx, %eax
- cvtsi2ss %eax, %xmm0
- leal 1(%rax), %ebp
- movl %eax, %r12d
- andl $31, %r12d
- andl $31, %ebp
- xorl %eax, %eax
- subss %xmm0, %xmm11
- mulss %xmm11, %xmm12
- movaps %xmm11, %xmm13
- subss %xmm8, %xmm13
- subss .LC1(%rip), %xmm12
- mulss %xmm11, %xmm12
- addss .LC2(%rip), %xmm12
- mulss %xmm11, %xmm12
- mulss %xmm11, %xmm12
- mulss %xmm11, %xmm12
- .p2align 4,,10
- .L14:
- cvttss2si %xmm7, %edx
- xorps %xmm5, %xmm5
- xorl %ecx, %ecx
- ucomiss %xmm7, %xmm5
- movaps %xmm7, %xmm4
- addss %xmm9, %xmm7
- seta %cl
- subl %ecx, %edx
- movl %edx, %ecx
- cvtsi2ss %edx, %xmm0
- addl $1, %edx
- andl $31, %ecx
- andl $31, %edx
- movl (%rbx,%rcx,4), %ecx
- movl (%rbx,%rdx,4), %edx
- subss %xmm0, %xmm4
- leal (%rcx,%r12), %r8d
- addl %ebp, %ecx
- leal (%rdx,%r12), %r9d
- movslq %r8d, %r8
- addl %ebp, %edx
- movl (%rbx,%r8,4), %r8d
- movslq %edx, %rdx
- movslq %ecx, %rcx
- movl (%rbx,%rdx,4), %r10d
- movslq %r9d, %r9
- movl (%rbx,%rcx,4), %ecx
- movl (%rbx,%r9,4), %r9d
- movslq %r8d, %rdx
- addl $1, %r8d
- movl (%rbx,%rdx,4), %edx
- movslq %r8d, %r8
- andl $63, %edx
- movzbl (%rdi,%rdx), %edx
- salq $4, %rdx
- addq %rsi, %rdx
- movss (%rdx), %xmm3
- movss 4(%rdx), %xmm0
- mulss %xmm4, %xmm3
- mulss %xmm11, %xmm0
- addss %xmm0, %xmm3
- movss 8(%rdx), %xmm0
- movl (%rbx,%r8,4), %edx
- mulss %xmm6, %xmm0
- andl $63, %edx
- movzbl (%rdi,%rdx), %r8d
- movslq %ecx, %rdx
- addss %xmm0, %xmm3
- movl (%rbx,%rdx,4), %edx
- addl $1, %ecx
- movslq %ecx, %rcx
- salq $4, %r8
- andl $63, %edx
- addq %rsi, %r8
- movzbl (%rdi,%rdx), %edx
- movss 4(%r8), %xmm15
- mulss %xmm11, %xmm15
- salq $4, %rdx
- addq %rsi, %rdx
- movss (%rdx), %xmm14
- movss 4(%rdx), %xmm0
- mulss %xmm4, %xmm14
- mulss %xmm13, %xmm0
- addss %xmm0, %xmm14
- movss 8(%rdx), %xmm0
- movl (%rbx,%rcx,4), %edx
- movslq %r9d, %rcx
- addl $1, %r9d
- movl (%rbx,%rcx,4), %ecx
- mulss %xmm6, %xmm0
- movslq %r9d, %r9
- andl $63, %edx
- andl $63, %ecx
- addss %xmm0, %xmm14
- movaps %xmm4, %xmm0
- movzbl (%rdi,%rcx), %ecx
- subss %xmm8, %xmm0
- movzbl (%rdi,%rdx), %edx
- salq $4, %rcx
- addq %rsi, %rcx
- salq $4, %rdx
- movss (%rcx), %xmm1
- addq %rsi, %rdx
- movss 4(%rcx), %xmm2
- mulss %xmm0, %xmm1
- mulss %xmm11, %xmm2
- addss %xmm2, %xmm1
- movss 8(%rcx), %xmm2
- movl (%rbx,%r9,4), %ecx
- movslq %r10d, %r9
- addl $1, %r10d
- movl (%rbx,%r9,4), %r9d
- mulss %xmm6, %xmm2
- movslq %r10d, %r10
- andl $63, %ecx
- andl $63, %r9d
- addss %xmm2, %xmm1
- movzbl (%rdi,%rcx), %ecx
- movzbl (%rdi,%r9), %r9d
- salq $4, %rcx
- salq $4, %r9
- addq %rsi, %rcx
- addq %rsi, %r9
- movss (%r9), %xmm5
- movss 4(%r9), %xmm2
- mulss %xmm0, %xmm5
- mulss %xmm13, %xmm2
- addss %xmm2, %xmm5
- movss 8(%r9), %xmm2
- movl (%rbx,%r10,4), %r9d
- mulss %xmm6, %xmm2
- andl $63, %r9d
- addss %xmm2, %xmm5
- movss (%r8), %xmm2
- movzbl (%rdi,%r9), %r9d
- mulss %xmm4, %xmm2
- salq $4, %r9
- addss %xmm15, %xmm2
- movss 8(%r8), %xmm15
- addq %rsi, %r9
- mulss %xmm10, %xmm15
- addss %xmm15, %xmm2
- movss 4(%rcx), %xmm15
- mulss %xmm11, %xmm15
- subss %xmm3, %xmm2
- mulss %xmm6, %xmm2
- addss %xmm3, %xmm2
- movss (%rcx), %xmm3
- mulss %xmm0, %xmm3
- mulss (%r9), %xmm0
- addss %xmm15, %xmm3
- movss 8(%rcx), %xmm15
- mulss %xmm10, %xmm15
- addss %xmm15, %xmm3
- movss 4(%rdx), %xmm15
- mulss %xmm13, %xmm15
- subss %xmm1, %xmm3
- mulss %xmm6, %xmm3
- addss %xmm1, %xmm3
- movss (%rdx), %xmm1
- mulss %xmm4, %xmm1
- addss %xmm15, %xmm1
- movss 8(%rdx), %xmm15
- mulss %xmm10, %xmm15
- addss %xmm15, %xmm1
- subss %xmm14, %xmm1
- mulss %xmm6, %xmm1
- addss %xmm14, %xmm1
- subss %xmm2, %xmm1
- mulss %xmm12, %xmm1
- addss %xmm2, %xmm1
- movss .LC0(%rip), %xmm2
- mulss %xmm4, %xmm2
- subss .LC1(%rip), %xmm2
- mulss %xmm4, %xmm2
- addss .LC2(%rip), %xmm2
- mulss %xmm4, %xmm2
- mulss %xmm4, %xmm2
- mulss %xmm4, %xmm2
- movss 4(%r9), %xmm4
- mulss %xmm13, %xmm4
- addss %xmm4, %xmm0
- movss 8(%r9), %xmm4
- mulss %xmm10, %xmm4
- addss %xmm4, %xmm0
- subss %xmm5, %xmm0
- mulss %xmm6, %xmm0
- addss %xmm5, %xmm0
- subss %xmm3, %xmm0
- mulss %xmm12, %xmm0
- addss %xmm3, %xmm0
- subss %xmm1, %xmm0
- mulss %xmm0, %xmm2
- addss %xmm1, %xmm2
- movss %xmm2, 0(%r13,%rax)
- addq $4, %rax
- cmpq $128, %rax
- jne .L14
- movss 40(%rsp), %xmm7
- subq $-128, %r13
- leaq 4096+tex(%rip), %rax
- addss %xmm9, %xmm7
- cmpq %r13, %rax
- movss %xmm7, 40(%rsp)
- jne .L16
- subl $1, %r14d
- jne .L11
- call clock
- leaq .LC13(%rip), %rcx
- cvtsi2sd %eax, %xmm0
- subsd 32(%rsp), %xmm0
- mulsd .LC10(%rip), %xmm0
- divsd .LC11(%rip), %xmm0
- movapd %xmm0, %xmm1
- movd %xmm0, %rdx
- call _Z6printfPKcz
- nop
- movaps 48(%rsp), %xmm6
- xorl %eax, %eax
- movaps 64(%rsp), %xmm7
- movaps 80(%rsp), %xmm8
- movaps 96(%rsp), %xmm9
- movaps 112(%rsp), %xmm10
- movaps 128(%rsp), %xmm11
- movaps 144(%rsp), %xmm12
- movaps 160(%rsp), %xmm13
- movaps 176(%rsp), %xmm14
- movaps 192(%rsp), %xmm15
- addq $216, %rsp
- popq %rbx
- popq %rsi
- popq %rdi
- popq %rbp
- popq %r12
- popq %r13
- popq %r14
- popq %r15
- ret
- .seh_endproc
- .data
- .align 32
- _ZZL16stb__perlin_gradifffE5basis:
- .long 1065353216
- .long 1065353216
- .long 0
- .space 4
- .long -1082130432
- .long 1065353216
- .long 0
- .space 4
- .long 1065353216
- .long -1082130432
- .long 0
- .space 4
- .long -1082130432
- .long -1082130432
- .long 0
- .space 4
- .long 1065353216
- .long 0
- .long 1065353216
- .space 4
- .long -1082130432
- .long 0
- .long 1065353216
- .space 4
- .long 1065353216
- .long 0
- .long -1082130432
- .space 4
- .long -1082130432
- .long 0
- .long -1082130432
- .space 4
- .long 0
- .long 1065353216
- .long 1065353216
- .space 4
- .long 0
- .long -1082130432
- .long 1065353216
- .space 4
- .long 0
- .long 1065353216
- .long -1082130432
- .space 4
- .long 0
- .long -1082130432
- .long -1082130432
- .space 4
- .section .rdata,"dr"
- .align 32
- _ZZL16stb__perlin_gradifffE7indices:
- .byte 0
- .byte 1
- .byte 2
- .byte 3
- .byte 4
- .byte 5
- .byte 6
- .byte 7
- .byte 8
- .byte 9
- .byte 10
- .byte 11
- .byte 0
- .byte 9
- .byte 1
- .byte 11
- .byte 0
- .byte 1
- .byte 2
- .byte 3
- .byte 4
- .byte 5
- .byte 6
- .byte 7
- .byte 8
- .byte 9
- .byte 10
- .byte 11
- .byte 0
- .byte 1
- .byte 2
- .byte 3
- .byte 4
- .byte 5
- .byte 6
- .byte 7
- .byte 8
- .byte 9
- .byte 10
- .byte 11
- .byte 0
- .byte 1
- .byte 2
- .byte 3
- .byte 4
- .byte 5
- .byte 6
- .byte 7
- .byte 8
- .byte 9
- .byte 10
- .byte 11
- .byte 0
- .byte 1
- .byte 2
- .byte 3
- .byte 4
- .byte 5
- .byte 6
- .byte 7
- .byte 8
- .byte 9
- .byte 10
- .byte 11
- .globl tex
- .bss
- .align 32
- tex:
- .space 4096
- .section .rdata,"dr"
- .align 32
- _ZL19stb__perlin_randtab:
- .long 23
- .long 125
- .long 161
- .long 52
- .long 103
- .long 117
- .long 70
- .long 37
- .long 247
- .long 101
- .long 203
- .long 169
- .long 124
- .long 126
- .long 44
- .long 123
- .long 152
- .long 238
- .long 145
- .long 45
- .long 171
- .long 114
- .long 253
- .long 10
- .long 192
- .long 136
- .long 4
- .long 157
- .long 249
- .long 30
- .long 35
- .long 72
- .long 175
- .long 63
- .long 77
- .long 90
- .long 181
- .long 16
- .long 96
- .long 111
- .long 133
- .long 104
- .long 75
- .long 162
- .long 93
- .long 56
- .long 66
- .long 240
- .long 8
- .long 50
- .long 84
- .long 229
- .long 49
- .long 210
- .long 173
- .long 239
- .long 141
- .long 1
- .long 87
- .long 18
- .long 2
- .long 198
- .long 143
- .long 57
- .long 225
- .long 160
- .long 58
- .long 217
- .long 168
- .long 206
- .long 245
- .long 204
- .long 199
- .long 6
- .long 73
- .long 60
- .long 20
- .long 230
- .long 211
- .long 233
- .long 94
- .long 200
- .long 88
- .long 9
- .long 74
- .long 155
- .long 33
- .long 15
- .long 219
- .long 130
- .long 226
- .long 202
- .long 83
- .long 236
- .long 42
- .long 172
- .long 165
- .long 218
- .long 55
- .long 222
- .long 46
- .long 107
- .long 98
- .long 154
- .long 109
- .long 67
- .long 196
- .long 178
- .long 127
- .long 158
- .long 13
- .long 243
- .long 65
- .long 79
- .long 166
- .long 248
- .long 25
- .long 224
- .long 115
- .long 80
- .long 68
- .long 51
- .long 184
- .long 128
- .long 232
- .long 208
- .long 151
- .long 122
- .long 26
- .long 212
- .long 105
- .long 43
- .long 179
- .long 213
- .long 235
- .long 148
- .long 146
- .long 89
- .long 14
- .long 195
- .long 28
- .long 78
- .long 112
- .long 76
- .long 250
- .long 47
- .long 24
- .long 251
- .long 140
- .long 108
- .long 186
- .long 190
- .long 228
- .long 170
- .long 183
- .long 139
- .long 39
- .long 188
- .long 244
- .long 246
- .long 132
- .long 48
- .long 119
- .long 144
- .long 180
- .long 138
- .long 134
- .long 193
- .long 82
- .long 182
- .long 120
- .long 121
- .long 86
- .long 220
- .long 209
- .long 3
- .long 91
- .long 241
- .long 149
- .long 85
- .long 205
- .long 150
- .long 113
- .long 216
- .long 31
- .long 100
- .long 41
- .long 164
- .long 177
- .long 214
- .long 153
- .long 231
- .long 38
- .long 71
- .long 185
- .long 174
- .long 97
- .long 201
- .long 29
- .long 95
- .long 7
- .long 92
- .long 54
- .long 254
- .long 191
- .long 118
- .long 34
- .long 221
- .long 131
- .long 11
- .long 163
- .long 99
- .long 234
- .long 81
- .long 227
- .long 147
- .long 156
- .long 176
- .long 17
- .long 142
- .long 69
- .long 12
- .long 110
- .long 62
- .long 27
- .long 255
- .long 0
- .long 194
- .long 59
- .long 116
- .long 242
- .long 252
- .long 19
- .long 21
- .long 187
- .long 53
- .long 207
- .long 129
- .long 64
- .long 135
- .long 61
- .long 40
- .long 167
- .long 237
- .long 102
- .long 223
- .long 106
- .long 159
- .long 197
- .long 189
- .long 215
- .long 137
- .long 36
- .long 32
- .long 22
- .long 5
- .long 23
- .long 125
- .long 161
- .long 52
- .long 103
- .long 117
- .long 70
- .long 37
- .long 247
- .long 101
- .long 203
- .long 169
- .long 124
- .long 126
- .long 44
- .long 123
- .long 152
- .long 238
- .long 145
- .long 45
- .long 171
- .long 114
- .long 253
- .long 10
- .long 192
- .long 136
- .long 4
- .long 157
- .long 249
- .long 30
- .long 35
- .long 72
- .long 175
- .long 63
- .long 77
- .long 90
- .long 181
- .long 16
- .long 96
- .long 111
- .long 133
- .long 104
- .long 75
- .long 162
- .long 93
- .long 56
- .long 66
- .long 240
- .long 8
- .long 50
- .long 84
- .long 229
- .long 49
- .long 210
- .long 173
- .long 239
- .long 141
- .long 1
- .long 87
- .long 18
- .long 2
- .long 198
- .long 143
- .long 57
- .long 225
- .long 160
- .long 58
- .long 217
- .long 168
- .long 206
- .long 245
- .long 204
- .long 199
- .long 6
- .long 73
- .long 60
- .long 20
- .long 230
- .long 211
- .long 233
- .long 94
- .long 200
- .long 88
- .long 9
- .long 74
- .long 155
- .long 33
- .long 15
- .long 219
- .long 130
- .long 226
- .long 202
- .long 83
- .long 236
- .long 42
- .long 172
- .long 165
- .long 218
- .long 55
- .long 222
- .long 46
- .long 107
- .long 98
- .long 154
- .long 109
- .long 67
- .long 196
- .long 178
- .long 127
- .long 158
- .long 13
- .long 243
- .long 65
- .long 79
- .long 166
- .long 248
- .long 25
- .long 224
- .long 115
- .long 80
- .long 68
- .long 51
- .long 184
- .long 128
- .long 232
- .long 208
- .long 151
- .long 122
- .long 26
- .long 212
- .long 105
- .long 43
- .long 179
- .long 213
- .long 235
- .long 148
- .long 146
- .long 89
- .long 14
- .long 195
- .long 28
- .long 78
- .long 112
- .long 76
- .long 250
- .long 47
- .long 24
- .long 251
- .long 140
- .long 108
- .long 186
- .long 190
- .long 228
- .long 170
- .long 183
- .long 139
- .long 39
- .long 188
- .long 244
- .long 246
- .long 132
- .long 48
- .long 119
- .long 144
- .long 180
- .long 138
- .long 134
- .long 193
- .long 82
- .long 182
- .long 120
- .long 121
- .long 86
- .long 220
- .long 209
- .long 3
- .long 91
- .long 241
- .long 149
- .long 85
- .long 205
- .long 150
- .long 113
- .long 216
- .long 31
- .long 100
- .long 41
- .long 164
- .long 177
- .long 214
- .long 153
- .long 231
- .long 38
- .long 71
- .long 185
- .long 174
- .long 97
- .long 201
- .long 29
- .long 95
- .long 7
- .long 92
- .long 54
- .long 254
- .long 191
- .long 118
- .long 34
- .long 221
- .long 131
- .long 11
- .long 163
- .long 99
- .long 234
- .long 81
- .long 227
- .long 147
- .long 156
- .long 176
- .long 17
- .long 142
- .long 69
- .long 12
- .long 110
- .long 62
- .long 27
- .long 255
- .long 0
- .long 194
- .long 59
- .long 116
- .long 242
- .long 252
- .long 19
- .long 21
- .long 187
- .long 53
- .long 207
- .long 129
- .long 64
- .long 135
- .long 61
- .long 40
- .long 167
- .long 237
- .long 102
- .long 223
- .long 106
- .long 159
- .long 197
- .long 189
- .long 215
- .long 137
- .long 36
- .long 32
- .long 22
- .long 5
- .align 4
- .LC0:
- .long 1086324736
- .align 4
- .LC1:
- .long 1097859072
- .align 4
- .LC2:
- .long 1092616192
- .align 4
- .LC3:
- .long 1065353216
- .align 4
- .LC6:
- .long 1015021568
- .align 4
- .LC7:
- .long 1056964608
- .align 4
- .LC8:
- .long -1090519040
- .align 4
- .LC9:
- .long 1023410176
- .align 8
- .LC10:
- .long 0
- .long 1104006501
- .align 8
- .LC11:
- .long 0
- .long 1104101376
- .ident "GCC: (rev5, Built by MinGW-W64 project) 4.8.1"
- .def floor; .scl 2; .type 32; .endef
- .def __mingw_vprintf; .scl 2; .type 32; .endef
- .def clock; .scl 2; .type 32; .endef
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement