Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- .file "test.cpp"
- .text
- .p2align 4,,15
- .globl stb_perlin_noise3
- .def stb_perlin_noise3; .scl 2; .type 32; .endef
- .seh_proc stb_perlin_noise3
- stb_perlin_noise3:
- .LFB25:
- pushq %rdi
- .seh_pushreg %rdi
- pushq %rsi
- .seh_pushreg %rsi
- pushq %rbx
- .seh_pushreg %rbx
- subq $160, %rsp
- .seh_stackalloc 160
- movaps %xmm6, (%rsp)
- .seh_savexmm %xmm6, 0
- movaps %xmm7, 16(%rsp)
- .seh_savexmm %xmm7, 16
- movaps %xmm8, 32(%rsp)
- .seh_savexmm %xmm8, 32
- movaps %xmm9, 48(%rsp)
- .seh_savexmm %xmm9, 48
- movaps %xmm10, 64(%rsp)
- .seh_savexmm %xmm10, 64
- movaps %xmm11, 80(%rsp)
- .seh_savexmm %xmm11, 80
- movaps %xmm12, 96(%rsp)
- .seh_savexmm %xmm12, 96
- movaps %xmm13, 112(%rsp)
- .seh_savexmm %xmm13, 112
- movaps %xmm14, 128(%rsp)
- .seh_savexmm %xmm14, 128
- movaps %xmm15, 144(%rsp)
- .seh_savexmm %xmm15, 144
- .seh_endprologue
- movsd .LC1(%rip), %xmm3
- movsd .LC0(%rip), %xmm7
- movapd %xmm3, %xmm4
- movl 224(%rsp), %eax
- movaps %xmm0, %xmm5
- subl $1, %r9d
- unpcklps %xmm5, %xmm5
- cvtps2pd %xmm5, %xmm6
- andpd %xmm6, %xmm4
- ucomisd %xmm4, %xmm7
- movzbl %r9b, %r9d
- leal -1(%rax), %r8d
- movl 232(%rsp), %eax
- movzbl %r8b, %r8d
- leal -1(%rax), %ebx
- movzbl %bl, %ebx
- jbe .L2
- cvttsd2siq %xmm6, %rax
- cvtsi2sdq %rax, %xmm4
- movapd %xmm4, %xmm0
- cmpnlesd %xmm6, %xmm0
- movsd .LC2(%rip), %xmm6
- andpd %xmm6, %xmm0
- movapd %xmm4, %xmm6
- subsd %xmm0, %xmm6
- .L2:
- cvttsd2si %xmm6, %ecx
- movapd %xmm3, %xmm4
- unpcklps %xmm1, %xmm1
- cvtps2pd %xmm1, %xmm6
- andpd %xmm6, %xmm4
- ucomisd %xmm4, %xmm7
- jbe .L3
- cvttsd2siq %xmm6, %rax
- cvtsi2sdq %rax, %xmm4
- movapd %xmm4, %xmm0
- cmpnlesd %xmm6, %xmm0
- movsd .LC2(%rip), %xmm6
- andpd %xmm6, %xmm0
- movapd %xmm4, %xmm6
- subsd %xmm0, %xmm6
- .L3:
- unpcklps %xmm2, %xmm2
- cvtps2pd %xmm2, %xmm4
- andpd %xmm4, %xmm3
- ucomisd %xmm3, %xmm7
- cvttsd2si %xmm6, %r11d
- jbe .L4
- cvttsd2siq %xmm4, %rax
- cvtsi2sdq %rax, %xmm3
- movapd %xmm3, %xmm0
- cmpnlesd %xmm4, %xmm0
- movsd .LC2(%rip), %xmm4
- andpd %xmm4, %xmm0
- subsd %xmm0, %xmm3
- movapd %xmm3, %xmm4
- .L4:
- cvtsi2ss %ecx, %xmm0
- leal 1(%r11), %edx
- movl %r11d, %esi
- movss .LC3(%rip), %xmm12
- andl %r8d, %esi
- andl %r8d, %edx
- movss .LC4(%rip), %xmm11
- movss .LC5(%rip), %xmm10
- subss %xmm0, %xmm5
- cvtsi2ss %r11d, %xmm0
- movl %ecx, %r11d
- addl $1, %ecx
- cvttsd2si %xmm4, %eax
- andl %r9d, %r11d
- movss .LC6(%rip), %xmm4
- movslq %r11d, %r11
- andl %r9d, %ecx
- movaps %xmm5, %xmm15
- movslq %ecx, %rcx
- subss %xmm4, %xmm15
- subss %xmm0, %xmm1
- leal 1(%rax), %r8d
- movl %eax, %r10d
- andl %ebx, %r10d
- andl %ebx, %r8d
- movaps %xmm1, %xmm7
- movaps %xmm1, %xmm0
- mulss %xmm1, %xmm7
- mulss %xmm1, %xmm7
- mulss %xmm12, %xmm1
- addss %xmm11, %xmm1
- mulss %xmm0, %xmm1
- addss %xmm10, %xmm1
- mulss %xmm1, %xmm7
- cvtsi2ss %eax, %xmm1
- leaq _ZL19stb__perlin_randtab(%rip), %rax
- movl (%rax,%r11,4), %r11d
- movl (%rax,%rcx,4), %ecx
- subss %xmm1, %xmm2
- leal (%r11,%rsi), %r9d
- addl %edx, %r11d
- movslq %r9d, %r9
- addl %ecx, %esi
- addl %ecx, %edx
- movl (%rax,%r9,4), %ebx
- movaps %xmm2, %xmm1
- movslq %esi, %rsi
- mulss %xmm12, %xmm1
- movslq %edx, %rdx
- movl (%rax,%rsi,4), %edi
- movl (%rax,%rdx,4), %esi
- movaps %xmm2, %xmm6
- movslq %r11d, %r11
- mulss %xmm2, %xmm6
- movl (%rax,%r11,4), %r11d
- movaps %xmm2, %xmm14
- leal (%rbx,%r10), %edx
- subss %xmm4, %xmm14
- addl %r8d, %ebx
- movslq %edx, %rdx
- addss %xmm11, %xmm1
- movslq %ebx, %rbx
- movl (%rax,%rdx,4), %edx
- leaq _ZZL16stb__perlin_gradifffE7indices(%rip), %rcx
- mulss %xmm2, %xmm6
- mulss %xmm2, %xmm1
- andl $63, %edx
- movzbl (%rcx,%rdx), %r9d
- leaq _ZZL16stb__perlin_gradifffE5basis(%rip), %rdx
- addss %xmm10, %xmm1
- mulss %xmm1, %xmm6
- salq $4, %r9
- addq %rdx, %r9
- movss 4(%r9), %xmm3
- movss (%r9), %xmm1
- mulss %xmm0, %xmm3
- mulss %xmm5, %xmm1
- addss %xmm1, %xmm3
- movss 8(%r9), %xmm1
- movl (%rax,%rbx,4), %r9d
- mulss %xmm2, %xmm1
- andl $63, %r9d
- movzbl (%rcx,%r9), %ebx
- leal (%r11,%r10), %r9d
- addl %r8d, %r11d
- movslq %r9d, %r9
- addss %xmm1, %xmm3
- movslq %r11d, %r11
- movl (%rax,%r9,4), %r9d
- movaps %xmm0, %xmm1
- subss %xmm4, %xmm1
- salq $4, %rbx
- addq %rdx, %rbx
- andl $63, %r9d
- movzbl (%rcx,%r9), %r9d
- salq $4, %r9
- addq %rdx, %r9
- movss 8(%r9), %xmm9
- movss (%r9), %xmm8
- mulss %xmm2, %xmm9
- mulss %xmm5, %xmm8
- addss %xmm8, %xmm9
- movss 4(%r9), %xmm8
- movl (%rax,%r11,4), %r9d
- leal (%rdi,%r10), %r11d
- addl %r8d, %edi
- movslq %r11d, %r11
- addl %esi, %r10d
- movslq %edi, %rdi
- movl (%rax,%r11,4), %r11d
- mulss %xmm1, %xmm8
- movslq %r10d, %r10
- movl (%rax,%r10,4), %r10d
- andl $63, %r9d
- movzbl (%rcx,%r9), %r9d
- andl $63, %r11d
- addss %xmm8, %xmm9
- movzbl (%rcx,%r11), %r11d
- andl $63, %r10d
- movzbl (%rcx,%r10), %r10d
- salq $4, %r9
- addq %rdx, %r9
- salq $4, %r11
- addq %rdx, %r11
- movss 8(%r11), %xmm13
- movss 4(%r11), %xmm4
- mulss %xmm2, %xmm13
- mulss %xmm0, %xmm4
- addss %xmm4, %xmm13
- movss (%r11), %xmm4
- movl (%rax,%rdi,4), %r11d
- mulss %xmm15, %xmm4
- andl $63, %r11d
- movzbl (%rcx,%r11), %r11d
- addss %xmm4, %xmm13
- salq $4, %r11
- addq %rdx, %r11
- salq $4, %r10
- addl %esi, %r8d
- addq %rdx, %r10
- movslq %r8d, %r8
- movss (%r10), %xmm8
- movss 4(%r10), %xmm4
- mulss %xmm15, %xmm8
- movl (%rax,%r8,4), %eax
- mulss %xmm1, %xmm4
- mulss 8(%r10), %xmm2
- andl $63, %eax
- addss %xmm4, %xmm8
- movss 4(%rbx), %xmm4
- movzbl (%rcx,%rax), %eax
- mulss %xmm0, %xmm4
- addss %xmm2, %xmm8
- movss (%rbx), %xmm2
- mulss 4(%r11), %xmm0
- mulss %xmm5, %xmm2
- salq $4, %rax
- addq %rax, %rdx
- addss %xmm2, %xmm4
- movss 8(%rbx), %xmm2
- mulss %xmm14, %xmm2
- addss %xmm2, %xmm4
- movss 8(%r11), %xmm2
- mulss %xmm14, %xmm2
- subss %xmm3, %xmm4
- mulss %xmm6, %xmm4
- addss %xmm3, %xmm4
- movss (%r11), %xmm3
- mulss %xmm15, %xmm3
- mulss (%rdx), %xmm15
- addss %xmm2, %xmm3
- movss 4(%r9), %xmm2
- mulss %xmm1, %xmm2
- addss %xmm0, %xmm3
- movss 8(%r9), %xmm0
- mulss 4(%rdx), %xmm1
- mulss %xmm14, %xmm0
- mulss 8(%rdx), %xmm14
- subss %xmm13, %xmm3
- addss %xmm15, %xmm1
- movaps 144(%rsp), %xmm15
- addss %xmm0, %xmm2
- movss (%r9), %xmm0
- mulss %xmm6, %xmm3
- mulss %xmm5, %xmm0
- addss %xmm14, %xmm1
- movaps 128(%rsp), %xmm14
- addss %xmm13, %xmm3
- movaps 112(%rsp), %xmm13
- subss %xmm8, %xmm1
- addss %xmm0, %xmm2
- movaps %xmm5, %xmm0
- mulss %xmm5, %xmm0
- mulss %xmm6, %xmm1
- subss %xmm9, %xmm2
- mulss %xmm5, %xmm0
- mulss %xmm6, %xmm2
- addss %xmm8, %xmm1
- movaps (%rsp), %xmm6
- movaps 32(%rsp), %xmm8
- subss %xmm3, %xmm1
- addss %xmm9, %xmm2
- movaps 48(%rsp), %xmm9
- mulss %xmm7, %xmm1
- subss %xmm4, %xmm2
- mulss %xmm7, %xmm2
- addss %xmm3, %xmm1
- movaps 16(%rsp), %xmm7
- addss %xmm4, %xmm2
- movaps %xmm12, %xmm4
- movaps 96(%rsp), %xmm12
- mulss %xmm5, %xmm4
- subss %xmm2, %xmm1
- addss %xmm11, %xmm4
- movaps 80(%rsp), %xmm11
- mulss %xmm5, %xmm4
- addss %xmm10, %xmm4
- movaps 64(%rsp), %xmm10
- mulss %xmm4, %xmm0
- mulss %xmm1, %xmm0
- addss %xmm2, %xmm0
- addq $160, %rsp
- popq %rbx
- popq %rsi
- popq %rdi
- ret
- .seh_endproc
- .p2align 4,,15
- .globl _Z26stb_perlin_noise3_no_floorfffiii
- .def _Z26stb_perlin_noise3_no_floorfffiii; .scl 2; .type 32; .endef
- .seh_proc _Z26stb_perlin_noise3_no_floorfffiii
- _Z26stb_perlin_noise3_no_floorfffiii:
- .LFB26:
- pushq %rbp
- .seh_pushreg %rbp
- pushq %rdi
- .seh_pushreg %rdi
- pushq %rsi
- .seh_pushreg %rsi
- pushq %rbx
- .seh_pushreg %rbx
- subq $168, %rsp
- .seh_stackalloc 168
- movaps %xmm6, (%rsp)
- .seh_savexmm %xmm6, 0
- movaps %xmm7, 16(%rsp)
- .seh_savexmm %xmm7, 16
- movaps %xmm8, 32(%rsp)
- .seh_savexmm %xmm8, 32
- movaps %xmm9, 48(%rsp)
- .seh_savexmm %xmm9, 48
- movaps %xmm10, 64(%rsp)
- .seh_savexmm %xmm10, 64
- movaps %xmm11, 80(%rsp)
- .seh_savexmm %xmm11, 80
- movaps %xmm12, 96(%rsp)
- .seh_savexmm %xmm12, 96
- movaps %xmm13, 112(%rsp)
- .seh_savexmm %xmm13, 112
- movaps %xmm14, 128(%rsp)
- .seh_savexmm %xmm14, 128
- movaps %xmm15, 144(%rsp)
- .seh_savexmm %xmm15, 144
- .seh_endprologue
- movss .LC6(%rip), %xmm4
- movss .LC3(%rip), %xmm12
- movss .LC4(%rip), %xmm11
- movss .LC5(%rip), %xmm10
- movl 240(%rsp), %eax
- movaps %xmm0, %xmm5
- subl $1, %r9d
- cvttss2si %xmm0, %edx
- xorps %xmm0, %xmm0
- movzbl %r9b, %r9d
- cvttss2si %xmm1, %ecx
- leal -1(%rax), %r8d
- movl 248(%rsp), %eax
- movzbl %r8b, %r8d
- movl %r8d, %esi
- leal -1(%rax), %ebx
- xorl %eax, %eax
- comiss %xmm5, %xmm0
- movzbl %bl, %ebx
- seta %al
- subl %eax, %edx
- xorl %eax, %eax
- comiss %xmm1, %xmm0
- seta %al
- xorl %r10d, %r10d
- subl %eax, %ecx
- comiss %xmm2, %xmm0
- cvtsi2ss %edx, %xmm0
- leal 1(%rcx), %r11d
- cvttss2si %xmm2, %eax
- seta %r10b
- andl %ecx, %esi
- andl %r8d, %r11d
- subss %xmm0, %xmm5
- cvtsi2ss %ecx, %xmm0
- movl %r9d, %ecx
- subl %r10d, %eax
- leal 1(%rax), %r8d
- andl %edx, %ecx
- movl %ebx, %r10d
- andl %eax, %r10d
- movslq %ecx, %rcx
- addl $1, %edx
- andl %r9d, %edx
- andl %ebx, %r8d
- movaps %xmm5, %xmm14
- subss %xmm0, %xmm1
- movslq %edx, %rdx
- subss %xmm4, %xmm14
- movaps %xmm1, %xmm7
- movaps %xmm1, %xmm0
- mulss %xmm1, %xmm7
- mulss %xmm1, %xmm7
- mulss %xmm12, %xmm1
- addss %xmm11, %xmm1
- mulss %xmm0, %xmm1
- addss %xmm10, %xmm1
- mulss %xmm1, %xmm7
- cvtsi2ss %eax, %xmm1
- leaq _ZL19stb__perlin_randtab(%rip), %rax
- movl (%rax,%rcx,4), %ecx
- movl (%rax,%rdx,4), %edx
- subss %xmm1, %xmm2
- leal (%rcx,%rsi), %r9d
- addl %r11d, %ecx
- movslq %r9d, %r9
- addl %edx, %esi
- movaps %xmm2, %xmm1
- movl (%rax,%r9,4), %ebx
- addl %r11d, %edx
- movslq %esi, %rsi
- mulss %xmm12, %xmm1
- movslq %edx, %rdx
- movl (%rax,%rsi,4), %edi
- movl (%rax,%rdx,4), %esi
- movaps %xmm2, %xmm6
- movslq %ecx, %rcx
- mulss %xmm2, %xmm6
- movl (%rax,%rcx,4), %ebp
- movaps %xmm2, %xmm15
- leal (%rbx,%r10), %edx
- subss %xmm4, %xmm15
- addl %r8d, %ebx
- movslq %edx, %rdx
- addss %xmm11, %xmm1
- movslq %ebx, %rbx
- movl (%rax,%rdx,4), %edx
- leaq _ZZL16stb__perlin_gradifffE7indices(%rip), %rcx
- mulss %xmm2, %xmm6
- mulss %xmm2, %xmm1
- leal (%rdi,%r10), %r11d
- addl %r8d, %edi
- andl $63, %edx
- movslq %r11d, %r11
- movslq %edi, %rdi
- movzbl (%rcx,%rdx), %r9d
- leaq _ZZL16stb__perlin_gradifffE5basis(%rip), %rdx
- addss %xmm10, %xmm1
- movl (%rax,%r11,4), %r11d
- mulss %xmm1, %xmm6
- salq $4, %r9
- addq %rdx, %r9
- andl $63, %r11d
- movss 4(%r9), %xmm3
- movss 8(%r9), %xmm1
- mulss %xmm0, %xmm3
- movzbl (%rcx,%r11), %r11d
- mulss %xmm2, %xmm1
- salq $4, %r11
- addss %xmm1, %xmm3
- movss (%r9), %xmm1
- addq %rdx, %r11
- movl (%rax,%rbx,4), %r9d
- mulss %xmm5, %xmm1
- movss 4(%r11), %xmm13
- mulss %xmm0, %xmm13
- andl $63, %r9d
- movzbl (%rcx,%r9), %ebx
- leal 0(%rbp,%r10), %r9d
- addss %xmm1, %xmm3
- movslq %r9d, %r9
- movaps %xmm0, %xmm1
- addl %r8d, %ebp
- movl (%rax,%r9,4), %r9d
- subss %xmm4, %xmm1
- movslq %ebp, %rbp
- movss 8(%r11), %xmm4
- addl %esi, %r10d
- movslq %r10d, %r10
- salq $4, %rbx
- mulss %xmm2, %xmm4
- movl (%rax,%r10,4), %r10d
- addq %rdx, %rbx
- andl $63, %r9d
- movzbl (%rcx,%r9), %r9d
- addss %xmm4, %xmm13
- movss (%r11), %xmm4
- movl (%rax,%rdi,4), %r11d
- mulss %xmm14, %xmm4
- salq $4, %r9
- addq %rdx, %r9
- movss (%r9), %xmm9
- movss 4(%r9), %xmm8
- andl $63, %r11d
- mulss %xmm5, %xmm9
- movzbl (%rcx,%r11), %r11d
- addss %xmm4, %xmm13
- mulss %xmm1, %xmm8
- movss 8(%rbx), %xmm4
- mulss %xmm15, %xmm4
- salq $4, %r11
- addss %xmm8, %xmm9
- movss 8(%r9), %xmm8
- addq %rdx, %r11
- movl (%rax,%rbp,4), %r9d
- mulss %xmm2, %xmm8
- andl $63, %r9d
- movzbl (%rcx,%r9), %r9d
- addss %xmm8, %xmm9
- salq $4, %r9
- addq %rdx, %r9
- andl $63, %r10d
- addl %esi, %r8d
- movzbl (%rcx,%r10), %r10d
- movslq %r8d, %r8
- movl (%rax,%r8,4), %eax
- salq $4, %r10
- addq %rdx, %r10
- andl $63, %eax
- movss 4(%r10), %xmm8
- mulss 8(%r10), %xmm2
- mulss %xmm1, %xmm8
- movzbl (%rcx,%rax), %eax
- addss %xmm2, %xmm8
- movss (%r10), %xmm2
- salq $4, %rax
- addq %rax, %rdx
- mulss %xmm14, %xmm2
- addss %xmm2, %xmm8
- movss 4(%rbx), %xmm2
- mulss %xmm0, %xmm2
- mulss 4(%r11), %xmm0
- addss %xmm2, %xmm4
- movss (%rbx), %xmm2
- mulss %xmm5, %xmm2
- addss %xmm2, %xmm4
- movss (%r9), %xmm2
- mulss %xmm5, %xmm2
- subss %xmm3, %xmm4
- mulss %xmm6, %xmm4
- addss %xmm3, %xmm4
- movss 8(%r11), %xmm3
- mulss %xmm15, %xmm3
- addss %xmm0, %xmm3
- movss (%r11), %xmm0
- mulss %xmm14, %xmm0
- mulss (%rdx), %xmm14
- addss %xmm0, %xmm3
- movss 4(%r9), %xmm0
- mulss %xmm1, %xmm0
- mulss 4(%rdx), %xmm1
- subss %xmm13, %xmm3
- addss %xmm0, %xmm2
- movss 8(%r9), %xmm0
- mulss %xmm6, %xmm3
- mulss %xmm15, %xmm0
- mulss 8(%rdx), %xmm15
- addss %xmm13, %xmm3
- movaps 112(%rsp), %xmm13
- addss %xmm0, %xmm2
- movaps %xmm5, %xmm0
- addss %xmm15, %xmm1
- mulss %xmm5, %xmm0
- movaps 144(%rsp), %xmm15
- subss %xmm9, %xmm2
- addss %xmm14, %xmm1
- movaps 128(%rsp), %xmm14
- mulss %xmm5, %xmm0
- mulss %xmm6, %xmm2
- subss %xmm8, %xmm1
- addss %xmm9, %xmm2
- mulss %xmm6, %xmm1
- movaps 48(%rsp), %xmm9
- movaps (%rsp), %xmm6
- subss %xmm4, %xmm2
- addss %xmm8, %xmm1
- movaps 32(%rsp), %xmm8
- mulss %xmm7, %xmm2
- subss %xmm3, %xmm1
- addss %xmm4, %xmm2
- mulss %xmm7, %xmm1
- movaps %xmm12, %xmm4
- mulss %xmm5, %xmm4
- movaps 16(%rsp), %xmm7
- movaps 96(%rsp), %xmm12
- addss %xmm3, %xmm1
- addss %xmm11, %xmm4
- movaps 80(%rsp), %xmm11
- subss %xmm2, %xmm1
- mulss %xmm5, %xmm4
- addss %xmm10, %xmm4
- movaps 64(%rsp), %xmm10
- mulss %xmm4, %xmm0
- mulss %xmm1, %xmm0
- addss %xmm2, %xmm0
- addq $168, %rsp
- popq %rbx
- popq %rsi
- popq %rdi
- popq %rbp
- ret
- .seh_endproc
- .section .text$_Z6printfPKcz,"x"
- .linkonce discard
- .p2align 4,,15
- .globl _Z6printfPKcz
- .def _Z6printfPKcz; .scl 2; .type 32; .endef
- .seh_proc _Z6printfPKcz
- _Z6printfPKcz:
- .LFB34:
- subq $56, %rsp
- .seh_stackalloc 56
- .seh_endprologue
- movq %rdx, 72(%rsp)
- leaq 72(%rsp), %rdx
- movq %r8, 80(%rsp)
- movq %r9, 88(%rsp)
- movq %rdx, 40(%rsp)
- call __mingw_vprintf
- addq $56, %rsp
- ret
- .seh_endproc
- .def __main; .scl 2; .type 32; .endef
- .section .rdata,"dr"
- .align 8
- .LC12:
- .ascii "stb_perlin_noise3: %.1f ns/call\12\0"
- .align 8
- .LC13:
- .ascii "stb_perlin_noise3_no_floor: %.1f ns/call\12\0"
- .section .text.startup,"x"
- .p2align 4,,15
- .globl main
- .def main; .scl 2; .type 32; .endef
- .seh_proc main
- main:
- .LFB84:
- pushq %r12
- .seh_pushreg %r12
- pushq %rbp
- .seh_pushreg %rbp
- pushq %rdi
- .seh_pushreg %rdi
- pushq %rsi
- .seh_pushreg %rsi
- pushq %rbx
- .seh_pushreg %rbx
- subq $176, %rsp
- .seh_stackalloc 176
- movaps %xmm6, 48(%rsp)
- .seh_savexmm %xmm6, 48
- movaps %xmm7, 64(%rsp)
- .seh_savexmm %xmm7, 64
- movaps %xmm8, 80(%rsp)
- .seh_savexmm %xmm8, 80
- movaps %xmm9, 96(%rsp)
- .seh_savexmm %xmm9, 96
- movaps %xmm10, 112(%rsp)
- .seh_savexmm %xmm10, 112
- movaps %xmm11, 128(%rsp)
- .seh_savexmm %xmm11, 128
- movaps %xmm12, 144(%rsp)
- .seh_savexmm %xmm12, 144
- movaps %xmm13, 160(%rsp)
- .seh_savexmm %xmm13, 160
- .seh_endprologue
- leaq 4096+tex(%rip), %rbp
- movl $1024, %r12d
- call __main
- call clock
- movss .LC10(%rip), %xmm7
- cvtsi2sd %eax, %xmm12
- movss .LC8(%rip), %xmm11
- movss .LC9(%rip), %xmm9
- movaps %xmm11, %xmm13
- movaps %xmm11, %xmm10
- .L9:
- leaq 4096+tex(%rip), %rdi
- movaps %xmm13, %xmm8
- leaq tex(%rip), %rsi
- .p2align 4,,10
- .L14:
- xorl %ebx, %ebx
- movaps %xmm10, %xmm6
- .p2align 4,,10
- .L12:
- movaps %xmm6, %xmm0
- movl $32, 40(%rsp)
- movaps %xmm9, %xmm2
- movl $32, 32(%rsp)
- movaps %xmm8, %xmm1
- addss %xmm7, %xmm6
- movl $32, %r9d
- call stb_perlin_noise3
- movss %xmm0, (%rsi,%rbx)
- addq $4, %rbx
- cmpq $128, %rbx
- jne .L12
- subq $-128, %rsi
- addss %xmm7, %xmm8
- cmpq %rbp, %rsi
- jne .L14
- subl $1, %r12d
- jne .L9
- call clock
- leaq .LC12(%rip), %rcx
- movaps %xmm11, %xmm10
- cvtsi2sd %eax, %xmm0
- movl $1024, %ebp
- movsd .LC11(%rip), %xmm13
- subsd %xmm12, %xmm0
- mulsd %xmm13, %xmm0
- movapd %xmm0, %xmm1
- movd %xmm0, %rdx
- call _Z6printfPKcz
- call clock
- movss .LC9(%rip), %xmm9
- cvtsi2sd %eax, %xmm12
- .L15:
- leaq tex(%rip), %rsi
- movaps %xmm11, %xmm8
- .p2align 4,,10
- .L20:
- xorl %ebx, %ebx
- movaps %xmm10, %xmm6
- .p2align 4,,10
- .L18:
- movaps %xmm6, %xmm0
- movl $32, 40(%rsp)
- movaps %xmm9, %xmm2
- movl $32, 32(%rsp)
- movaps %xmm8, %xmm1
- addss %xmm7, %xmm6
- movl $32, %r9d
- call _Z26stb_perlin_noise3_no_floorfffiii
- movss %xmm0, (%rsi,%rbx)
- addq $4, %rbx
- cmpq $128, %rbx
- jne .L18
- subq $-128, %rsi
- addss %xmm7, %xmm8
- cmpq %rsi, %rdi
- jne .L20
- subl $1, %ebp
- jne .L15
- call clock
- leaq .LC13(%rip), %rcx
- cvtsi2sd %eax, %xmm0
- subsd %xmm12, %xmm0
- mulsd %xmm13, %xmm0
- movapd %xmm0, %xmm1
- movd %xmm0, %rdx
- call _Z6printfPKcz
- nop
- movaps 48(%rsp), %xmm6
- xorl %eax, %eax
- movaps 64(%rsp), %xmm7
- movaps 80(%rsp), %xmm8
- movaps 96(%rsp), %xmm9
- movaps 112(%rsp), %xmm10
- movaps 128(%rsp), %xmm11
- movaps 144(%rsp), %xmm12
- movaps 160(%rsp), %xmm13
- addq $176, %rsp
- popq %rbx
- popq %rsi
- popq %rdi
- popq %rbp
- popq %r12
- ret
- .seh_endproc
- .data
- .align 32
- _ZZL16stb__perlin_gradifffE5basis:
- .long 1065353216
- .long 1065353216
- .long 0
- .space 4
- .long -1082130432
- .long 1065353216
- .long 0
- .space 4
- .long 1065353216
- .long -1082130432
- .long 0
- .space 4
- .long -1082130432
- .long -1082130432
- .long 0
- .space 4
- .long 1065353216
- .long 0
- .long 1065353216
- .space 4
- .long -1082130432
- .long 0
- .long 1065353216
- .space 4
- .long 1065353216
- .long 0
- .long -1082130432
- .space 4
- .long -1082130432
- .long 0
- .long -1082130432
- .space 4
- .long 0
- .long 1065353216
- .long 1065353216
- .space 4
- .long 0
- .long -1082130432
- .long 1065353216
- .space 4
- .long 0
- .long 1065353216
- .long -1082130432
- .space 4
- .long 0
- .long -1082130432
- .long -1082130432
- .space 4
- .section .rdata,"dr"
- .align 32
- _ZZL16stb__perlin_gradifffE7indices:
- .byte 0
- .byte 1
- .byte 2
- .byte 3
- .byte 4
- .byte 5
- .byte 6
- .byte 7
- .byte 8
- .byte 9
- .byte 10
- .byte 11
- .byte 0
- .byte 9
- .byte 1
- .byte 11
- .byte 0
- .byte 1
- .byte 2
- .byte 3
- .byte 4
- .byte 5
- .byte 6
- .byte 7
- .byte 8
- .byte 9
- .byte 10
- .byte 11
- .byte 0
- .byte 1
- .byte 2
- .byte 3
- .byte 4
- .byte 5
- .byte 6
- .byte 7
- .byte 8
- .byte 9
- .byte 10
- .byte 11
- .byte 0
- .byte 1
- .byte 2
- .byte 3
- .byte 4
- .byte 5
- .byte 6
- .byte 7
- .byte 8
- .byte 9
- .byte 10
- .byte 11
- .byte 0
- .byte 1
- .byte 2
- .byte 3
- .byte 4
- .byte 5
- .byte 6
- .byte 7
- .byte 8
- .byte 9
- .byte 10
- .byte 11
- .globl tex
- .bss
- .align 32
- tex:
- .space 4096
- .section .rdata,"dr"
- .align 32
- _ZL19stb__perlin_randtab:
- .long 23
- .long 125
- .long 161
- .long 52
- .long 103
- .long 117
- .long 70
- .long 37
- .long 247
- .long 101
- .long 203
- .long 169
- .long 124
- .long 126
- .long 44
- .long 123
- .long 152
- .long 238
- .long 145
- .long 45
- .long 171
- .long 114
- .long 253
- .long 10
- .long 192
- .long 136
- .long 4
- .long 157
- .long 249
- .long 30
- .long 35
- .long 72
- .long 175
- .long 63
- .long 77
- .long 90
- .long 181
- .long 16
- .long 96
- .long 111
- .long 133
- .long 104
- .long 75
- .long 162
- .long 93
- .long 56
- .long 66
- .long 240
- .long 8
- .long 50
- .long 84
- .long 229
- .long 49
- .long 210
- .long 173
- .long 239
- .long 141
- .long 1
- .long 87
- .long 18
- .long 2
- .long 198
- .long 143
- .long 57
- .long 225
- .long 160
- .long 58
- .long 217
- .long 168
- .long 206
- .long 245
- .long 204
- .long 199
- .long 6
- .long 73
- .long 60
- .long 20
- .long 230
- .long 211
- .long 233
- .long 94
- .long 200
- .long 88
- .long 9
- .long 74
- .long 155
- .long 33
- .long 15
- .long 219
- .long 130
- .long 226
- .long 202
- .long 83
- .long 236
- .long 42
- .long 172
- .long 165
- .long 218
- .long 55
- .long 222
- .long 46
- .long 107
- .long 98
- .long 154
- .long 109
- .long 67
- .long 196
- .long 178
- .long 127
- .long 158
- .long 13
- .long 243
- .long 65
- .long 79
- .long 166
- .long 248
- .long 25
- .long 224
- .long 115
- .long 80
- .long 68
- .long 51
- .long 184
- .long 128
- .long 232
- .long 208
- .long 151
- .long 122
- .long 26
- .long 212
- .long 105
- .long 43
- .long 179
- .long 213
- .long 235
- .long 148
- .long 146
- .long 89
- .long 14
- .long 195
- .long 28
- .long 78
- .long 112
- .long 76
- .long 250
- .long 47
- .long 24
- .long 251
- .long 140
- .long 108
- .long 186
- .long 190
- .long 228
- .long 170
- .long 183
- .long 139
- .long 39
- .long 188
- .long 244
- .long 246
- .long 132
- .long 48
- .long 119
- .long 144
- .long 180
- .long 138
- .long 134
- .long 193
- .long 82
- .long 182
- .long 120
- .long 121
- .long 86
- .long 220
- .long 209
- .long 3
- .long 91
- .long 241
- .long 149
- .long 85
- .long 205
- .long 150
- .long 113
- .long 216
- .long 31
- .long 100
- .long 41
- .long 164
- .long 177
- .long 214
- .long 153
- .long 231
- .long 38
- .long 71
- .long 185
- .long 174
- .long 97
- .long 201
- .long 29
- .long 95
- .long 7
- .long 92
- .long 54
- .long 254
- .long 191
- .long 118
- .long 34
- .long 221
- .long 131
- .long 11
- .long 163
- .long 99
- .long 234
- .long 81
- .long 227
- .long 147
- .long 156
- .long 176
- .long 17
- .long 142
- .long 69
- .long 12
- .long 110
- .long 62
- .long 27
- .long 255
- .long 0
- .long 194
- .long 59
- .long 116
- .long 242
- .long 252
- .long 19
- .long 21
- .long 187
- .long 53
- .long 207
- .long 129
- .long 64
- .long 135
- .long 61
- .long 40
- .long 167
- .long 237
- .long 102
- .long 223
- .long 106
- .long 159
- .long 197
- .long 189
- .long 215
- .long 137
- .long 36
- .long 32
- .long 22
- .long 5
- .long 23
- .long 125
- .long 161
- .long 52
- .long 103
- .long 117
- .long 70
- .long 37
- .long 247
- .long 101
- .long 203
- .long 169
- .long 124
- .long 126
- .long 44
- .long 123
- .long 152
- .long 238
- .long 145
- .long 45
- .long 171
- .long 114
- .long 253
- .long 10
- .long 192
- .long 136
- .long 4
- .long 157
- .long 249
- .long 30
- .long 35
- .long 72
- .long 175
- .long 63
- .long 77
- .long 90
- .long 181
- .long 16
- .long 96
- .long 111
- .long 133
- .long 104
- .long 75
- .long 162
- .long 93
- .long 56
- .long 66
- .long 240
- .long 8
- .long 50
- .long 84
- .long 229
- .long 49
- .long 210
- .long 173
- .long 239
- .long 141
- .long 1
- .long 87
- .long 18
- .long 2
- .long 198
- .long 143
- .long 57
- .long 225
- .long 160
- .long 58
- .long 217
- .long 168
- .long 206
- .long 245
- .long 204
- .long 199
- .long 6
- .long 73
- .long 60
- .long 20
- .long 230
- .long 211
- .long 233
- .long 94
- .long 200
- .long 88
- .long 9
- .long 74
- .long 155
- .long 33
- .long 15
- .long 219
- .long 130
- .long 226
- .long 202
- .long 83
- .long 236
- .long 42
- .long 172
- .long 165
- .long 218
- .long 55
- .long 222
- .long 46
- .long 107
- .long 98
- .long 154
- .long 109
- .long 67
- .long 196
- .long 178
- .long 127
- .long 158
- .long 13
- .long 243
- .long 65
- .long 79
- .long 166
- .long 248
- .long 25
- .long 224
- .long 115
- .long 80
- .long 68
- .long 51
- .long 184
- .long 128
- .long 232
- .long 208
- .long 151
- .long 122
- .long 26
- .long 212
- .long 105
- .long 43
- .long 179
- .long 213
- .long 235
- .long 148
- .long 146
- .long 89
- .long 14
- .long 195
- .long 28
- .long 78
- .long 112
- .long 76
- .long 250
- .long 47
- .long 24
- .long 251
- .long 140
- .long 108
- .long 186
- .long 190
- .long 228
- .long 170
- .long 183
- .long 139
- .long 39
- .long 188
- .long 244
- .long 246
- .long 132
- .long 48
- .long 119
- .long 144
- .long 180
- .long 138
- .long 134
- .long 193
- .long 82
- .long 182
- .long 120
- .long 121
- .long 86
- .long 220
- .long 209
- .long 3
- .long 91
- .long 241
- .long 149
- .long 85
- .long 205
- .long 150
- .long 113
- .long 216
- .long 31
- .long 100
- .long 41
- .long 164
- .long 177
- .long 214
- .long 153
- .long 231
- .long 38
- .long 71
- .long 185
- .long 174
- .long 97
- .long 201
- .long 29
- .long 95
- .long 7
- .long 92
- .long 54
- .long 254
- .long 191
- .long 118
- .long 34
- .long 221
- .long 131
- .long 11
- .long 163
- .long 99
- .long 234
- .long 81
- .long 227
- .long 147
- .long 156
- .long 176
- .long 17
- .long 142
- .long 69
- .long 12
- .long 110
- .long 62
- .long 27
- .long 255
- .long 0
- .long 194
- .long 59
- .long 116
- .long 242
- .long 252
- .long 19
- .long 21
- .long 187
- .long 53
- .long 207
- .long 129
- .long 64
- .long 135
- .long 61
- .long 40
- .long 167
- .long 237
- .long 102
- .long 223
- .long 106
- .long 159
- .long 197
- .long 189
- .long 215
- .long 137
- .long 36
- .long 32
- .long 22
- .long 5
- .align 8
- .LC0:
- .long 0
- .long 1127219200
- .align 16
- .LC1:
- .long -1
- .long 2147483647
- .long 0
- .long 0
- .align 8
- .LC2:
- .long 0
- .long 1072693248
- .align 4
- .LC3:
- .long 1086324736
- .align 4
- .LC4:
- .long -1049624576
- .align 4
- .LC5:
- .long 1092616192
- .align 4
- .LC6:
- .long 1065353216
- .align 4
- .LC8:
- .long 1015021568
- .align 4
- .LC9:
- .long 1056964608
- .align 4
- .LC10:
- .long 1023410176
- .align 8
- .LC11:
- .long 0
- .long 1072596096
- .ident "GCC: (rev5, Built by MinGW-W64 project) 4.8.1"
- .def __mingw_vprintf; .scl 2; .type 32; .endef
- .def clock; .scl 2; .type 32; .endef
Add Comment
Please, Sign In to add comment