Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- .globl sha256_transform
- .type sha256_transform, @function
- sha256_transform:
- .LFB29:
- .cfi_startproc
- pushq %r15
- .cfi_def_cfa_offset 16
- .cfi_offset 15, -16
- pushq %r14
- .cfi_def_cfa_offset 24
- .cfi_offset 14, -24
- pushq %r13
- .cfi_def_cfa_offset 32
- .cfi_offset 13, -32
- movl $18, %r13d
- pushq %r12
- .cfi_def_cfa_offset 40
- .cfi_offset 12, -40
- pushq %rbp
- .cfi_def_cfa_offset 48
- .cfi_offset 6, -48
- pushq %rbx
- .cfi_def_cfa_offset 56
- .cfi_offset 3, -56
- subq $328, %rsp
- .cfi_def_cfa_offset 384
- movdqa .LC0(%rip), %xmm3
- movdqu (%rsi), %xmm4
- leaq 48(%rsp), %r14
- movq %rdi, 8(%rsp)
- movdqu 16(%rsi), %xmm1
- movdqa %xmm3, %xmm0
- movdqa %xmm3, %xmm6
- movdqa %xmm3, %xmm7
- movdqu 32(%rsi), %xmm5
- movdqa %xmm3, %xmm8
- movdqa %xmm3, %xmm9
- movdqa %xmm3, %xmm10
- pand %xmm1, %xmm6
- pand %xmm4, %xmm0
- movdqu 48(%rsi), %xmm2
- packuswb %xmm6, %xmm0
- pand %xmm5, %xmm7
- psrlw $8, %xmm1
- pand %xmm0, %xmm9
- movdqa %xmm3, %xmm11
- pand %xmm2, %xmm8
- psrlw $8, %xmm4
- packuswb %xmm8, %xmm7
- pand %xmm7, %xmm10
- psrlw $8, %xmm2
- packuswb %xmm1, %xmm4
- packuswb %xmm10, %xmm9
- pand %xmm4, %xmm11
- psrlw $8, %xmm7
- leaq 12(%r14), %r15
- pmovzxbw %xmm9, %xmm12
- psrlw $8, %xmm5
- packuswb %xmm2, %xmm5
- pand %xmm5, %xmm3
- psrlw $8, %xmm0
- packuswb %xmm3, %xmm11
- packuswb %xmm7, %xmm0
- pmovzxbw %xmm11, %xmm14
- pmovzxbw %xmm0, %xmm1
- pmovzxwd %xmm12, %xmm3
- psrlw $8, %xmm5
- pmovzxwd %xmm1, %xmm6
- psrldq $8, %xmm9
- psrldq $8, %xmm11
- pmovzxbw %xmm9, %xmm13
- psrlw $8, %xmm4
- pmovzxbw %xmm11, %xmm15
- packuswb %xmm5, %xmm4
- psrldq $8, %xmm0
- pmovzxbw %xmm4, %xmm2
- pmovzxwd %xmm14, %xmm5
- pmovzxbw %xmm0, %xmm0
- psrldq $8, %xmm14
- pmovzxwd %xmm2, %xmm7
- psrldq $8, %xmm12
- pmovzxwd %xmm14, %xmm8
- pmovzxwd %xmm13, %xmm14
- psrldq $8, %xmm1
- pmovzxwd %xmm12, %xmm9
- pmovzxwd %xmm15, %xmm12
- psrldq $8, %xmm4
- pmovzxwd %xmm1, %xmm10
- pmovzxwd %xmm0, %xmm1
- pmovzxbw %xmm4, %xmm4
- psrldq $8, %xmm2
- psrldq $8, %xmm15
- pmovzxwd %xmm2, %xmm11
- pmovzxwd %xmm4, %xmm2
- psrldq $8, %xmm13
- pmovzxwd %xmm15, %xmm15
- psrldq $8, %xmm0
- pmovzxwd %xmm13, %xmm13
- pmovzxwd %xmm0, %xmm0
- psrldq $8, %xmm4
- pslld $16, %xmm15
- pmovzxwd %xmm4, %xmm4
- movq %fs:40, %rax
- movq %rax, 312(%rsp)
- xorl %eax, %eax
- pslld $24, %xmm13
- pslld $8, %xmm0
- por %xmm13, %xmm15
- por %xmm4, %xmm0
- por %xmm15, %xmm0
- movaps %xmm0, 96(%rsp)
- movl 104(%rsp), %eax
- pslld $16, %xmm5
- movl 108(%rsp), %r9d
- pslld $24, %xmm3
- pslld $8, %xmm6
- por %xmm3, %xmm5
- pslld $16, %xmm8
- por %xmm7, %xmm6
- por %xmm6, %xmm5
- movl %eax, %ecx
- pslld $24, %xmm9
- movaps %xmm5, 48(%rsp)
- movl %eax, %r8d
- roll $15, %ecx
- pslld $8, %xmm1
- por %xmm9, %xmm8
- roll $13, %r8d
- movl %r9d, %ebp
- pslld $8, %xmm10
- por %xmm2, %xmm1
- xorl %r8d, %ecx
- movl %r9d, %r10d
- por %xmm11, %xmm10
- pslld $16, %xmm12
- por %xmm10, %xmm8
- roll $15, %ebp
- pslld $24, %xmm14
- movaps %xmm8, 64(%rsp)
- roll $13, %r10d
- shrl $10, %eax
- por %xmm14, %xmm12
- por %xmm1, %xmm12
- movaps %xmm12, 80(%rsp)
- movl 4(%r14), %edx
- xorl %r10d, %ebp
- shrl $10, %r9d
- xorl %eax, %ecx
- movd %xmm5, %esi
- xorl %r9d, %ebp
- addl 40(%r14), %ebp
- addl %esi, %ecx
- addl 36(%r14), %ecx
- movl %edx, %r8d
- movl %edx, %ebx
- roll $14, %ebx
- rorl $7, %r8d
- movl %edx, %edi
- xorl %ebx, %r8d
- movl 8(%r14), %ebx
- shrl $3, %edi
- addl %ebp, %edx
- xorl %edi, %r8d
- addl %ecx, %r8d
- movl %r8d, 64(%r14)
- movl %ebx, %eax
- movl %ebx, %r11d
- movl %ebx, %r12d
- roll $14, %r11d
- rorl $7, %eax
- xorl %r11d, %eax
- shrl $3, %r12d
- movl %ebx, %r11d
- xorl %r12d, %eax
- addl %edx, %eax
- movl %eax, 68(%r14)
- .p2align 4,,10
- .p2align 3
- .L2:
- movl (%r15), %r9d
- movl %r8d, %edx
- movl %r8d, %esi
- shrl $10, %r8d
- roll $13, %esi
- roll $15, %edx
- movl 32(%r15), %edi
- movl %eax, %r10d
- xorl %esi, %edx
- movl 4(%r15), %r12d
- roll $15, %r10d
- xorl %edx, %r8d
- leal (%r8,%r11), %ecx
- movl %r9d, %ebx
- movl %r9d, %r8d
- roll $14, %r8d
- rorl $7, %ebx
- movl %r9d, %ebp
- xorl %r8d, %ebx
- shrl $3, %ebp
- addl %ecx, %edi
- xorl %ebp, %ebx
- movl %eax, %r11d
- shrl $10, %eax
- addl %edi, %ebx
- roll $13, %r11d
- movl %r12d, %esi
- movl 8(%r15), %edi
- xorl %r11d, %r10d
- movl %ebx, %r8d
- movl %ebx, %ebp
- movl %ebx, 60(%r15)
- xorl %r10d, %eax
- roll $15, %r8d
- addl 36(%r15), %eax
- movl %r12d, %edx
- roll $13, %ebp
- movl %r12d, %ecx
- movl 12(%r15), %r11d
- shrl $10, %ebx
- xorl %ebp, %r8d
- roll $14, %edx
- xorl %ebx, %r8d
- rorl $7, %esi
- movl 40(%r15), %ebx
- movl %edi, %r10d
- xorl %edx, %esi
- shrl $3, %ecx
- addl %r12d, %r8d
- leal (%rax,%r9), %r9d
- xorl %ecx, %esi
- movl %edi, %r12d
- addl %r9d, %esi
- movl %r11d, %edx
- movl %r11d, %ecx
- addl %r8d, %ebx
- movl %esi, %eax
- movl %esi, %r9d
- movl %esi, 64(%r15)
- movl %edi, %r8d
- rorl $7, %r8d
- roll $14, %r12d
- addl $4, %r13d
- roll $15, %eax
- roll $13, %r9d
- xorl %r12d, %r8d
- shrl $10, %esi
- xorl %r9d, %eax
- rorl $7, %edx
- xorl %esi, %eax
- addl 44(%r15), %eax
- movl %r11d, %esi
- shrl $3, %r10d
- roll $14, %esi
- xorl %r10d, %r8d
- xorl %esi, %edx
- shrl $3, %ecx
- addl %ebx, %r8d
- xorl %ecx, %edx
- movl %r8d, 68(%r15)
- addq $16, %r15
- addl %edi, %eax
- addl %edx, %eax
- movl %eax, 56(%r15)
- cmpl $62, %r13d
- jne .L2
- .p2align 4,,10
- .p2align 3
- .L3:
- leal -2(%r13), %r15d
- movl %r13d, %r12d
- movl 48(%rsp,%r15,4), %edi
- leal -15(%r13), %ebp
- movl 48(%rsp,%rbp,4), %ebx
- leal -7(%r13), %r11d
- leal -16(%r13), %eax
- addl $1, %r13d
- movl 48(%rsp,%rax,4), %r9d
- movl %edi, %r10d
- movl %edi, %r8d
- addl 48(%rsp,%r11,4), %r9d
- shrl $10, %edi
- movl %ebx, %edx
- movl %ebx, %esi
- roll $15, %r10d
- roll $13, %r8d
- rorl $7, %edx
- xorl %r8d, %r10d
- roll $14, %esi
- xorl %edi, %r10d
- xorl %esi, %edx
- shrl $3, %ebx
- addl %r9d, %r10d
- xorl %ebx, %edx
- addl %r10d, %edx
- movl %edx, 48(%rsp,%r12,4)
- cmpl $64, %r13d
- jne .L3
- movq 8(%rsp), %rcx
- leaq k(%rip), %r15
- xorl %ebx, %ebx
- movl 88(%rcx), %r9d
- movl 92(%rcx), %edi
- movl 108(%rcx), %edx
- movl 80(%rcx), %r11d
- movl 84(%rcx), %r12d
- movl %r9d, %r8d
- movl %r9d, 24(%rsp)
- movl 96(%rcx), %r10d
- movl %edi, 28(%rsp)
- movl 100(%rcx), %r13d
- movl %edx, 44(%rsp)
- movl %edx, %r9d
- movl 104(%rcx), %esi
- movl %r11d, 16(%rsp)
- movl %r12d, 20(%rsp)
- movl %r10d, 32(%rsp)
- movl %r13d, 36(%rsp)
- movl %esi, 40(%rsp)
- movl %edi, 4(%rsp)
- jmp .L4
- .p2align 4,,10
- .p2align 3
- .L6:
- movl %ebp, %edx
- movl %ebp, %r9d
- movl %ebp, %ecx
- movl %r11d, 4(%rsp)
- rorl $11, %r9d
- rorl $6, %edx
- xorl %r9d, %edx
- movl 4(%r14,%rbx), %r9d
- roll $7, %ecx
- addl 4(%r15,%rbx), %r9d
- xorl %ecx, %edx
- movl %ebp, %ecx
- notl %ecx
- andl %r13d, %ecx
- addl %r9d, %edx
- movl %ebp, %r9d
- andl %r10d, %r9d
- xorl %r9d, %ecx
- movl %eax, %r9d
- addl %edx, %ecx
- movl %eax, %edx
- rorl $2, %r9d
- addl %ecx, %esi
- rorl $13, %edx
- movl %eax, %ecx
- xorl %edx, %r9d
- roll $10, %ecx
- movl %eax, %edx
- xorl %ecx, %r9d
- movl %eax, %ecx
- andl %r11d, %edx
- andl %r12d, %ecx
- addl %esi, %r8d
- xorl %edx, %ecx
- xorl %ecx, %edi
- addl %r9d, %edi
- movl %r8d, %r9d
- leal (%rsi,%rdi), %ecx
- movl %r8d, %esi
- movl %r8d, %edi
- rorl $11, %edi
- rorl $6, %esi
- xorl %edi, %esi
- movl 8(%r14,%rbx), %edi
- roll $7, %r9d
- addl 8(%r15,%rbx), %edi
- xorl %r9d, %esi
- movl %r8d, %r9d
- andl %ebp, %r9d
- addl %edi, %esi
- movl %r8d, %edi
- notl %edi
- andl %r10d, %edi
- xorl %r9d, %edi
- movl %ecx, %r9d
- addq $12, %rbx
- addl %esi, %edi
- movl %ecx, %esi
- roll $10, %r9d
- addl %r13d, %edi
- movl %ecx, %r13d
- rorl $13, %esi
- rorl $2, %r13d
- xorl %esi, %r13d
- movl %ebp, %esi
- xorl %r13d, %r9d
- movl %r11d, %r13d
- xorl %eax, %r13d
- andl %ecx, %r13d
- xorl %r13d, %edx
- movl %r8d, %r13d
- movl %eax, %r8d
- addl %r9d, %edx
- movl %r10d, %r9d
- leal (%rdi,%rdx), %r11d
- leal (%rdi,%r12), %r10d
- movl %ecx, %r12d
- .L4:
- movl %r10d, %eax
- movl %r10d, %ecx
- movl (%r14,%rbx), %edx
- movl %r10d, %edi
- addl (%r15,%rbx), %edx
- rorl $11, %ecx
- rorl $6, %eax
- movl %r10d, %ebp
- xorl %ecx, %eax
- roll $7, %edi
- xorl %edi, %eax
- notl %ebp
- movl %r11d, %ecx
- andl %esi, %ebp
- movl %r11d, %edi
- rorl $2, %ecx
- addl %eax, %edx
- movl %r10d, %eax
- rorl $13, %edi
- andl %r13d, %eax
- xorl %edi, %ecx
- movl %r11d, %edi
- xorl %eax, %ebp
- movl %r11d, %eax
- andl %r12d, %edi
- addl %edx, %ebp
- andl %r8d, %eax
- movl %r11d, %edx
- addl %ebp, %r9d
- movl %r12d, %ebp
- xorl %edi, %eax
- andl %r8d, %ebp
- roll $10, %edx
- xorl %ebp, %eax
- movl 4(%rsp), %ebp
- xorl %edx, %ecx
- addl %ecx, %eax
- addl %r9d, %eax
- addl %r9d, %ebp
- cmpq $252, %rbx
- jne .L6
- movd %eax, %xmm7
- movq 8(%rsp), %r14
- movd %r12d, %xmm5
- movd %r13d, %xmm9
- movq 312(%rsp), %rax
- xorq %fs:40, %rax
- pinsrd $1, %r11d, %xmm7
- movdqa %xmm7, %xmm1
- movd %ebp, %xmm7
- pinsrd $1, %r8d, %xmm5
- movd 24(%rsp), %xmm6
- punpcklqdq %xmm5, %xmm1
- pinsrd $1, 28(%rsp), %xmm6
- movd 16(%rsp), %xmm8
- pinsrd $1, %r10d, %xmm7
- pinsrd $1, 20(%rsp), %xmm8
- punpcklqdq %xmm6, %xmm8
- paddd %xmm1, %xmm8
- movdqa %xmm7, %xmm1
- pinsrd $1, %esi, %xmm9
- movd 40(%rsp), %xmm11
- punpcklqdq %xmm9, %xmm1
- movd 32(%rsp), %xmm12
- pinsrd $1, 44(%rsp), %xmm11
- pinsrd $1, 36(%rsp), %xmm12
- punpcklqdq %xmm11, %xmm12
- paddd %xmm1, %xmm12
- movups %xmm8, 80(%r14)
- movups %xmm12, 96(%r14)
- jne .L15
- addq $328, %rsp
- .cfi_remember_state
- .cfi_def_cfa_offset 56
- popq %rbx
- .cfi_def_cfa_offset 48
- popq %rbp
- .cfi_def_cfa_offset 40
- popq %r12
- .cfi_def_cfa_offset 32
- popq %r13
- .cfi_def_cfa_offset 24
- popq %r14
- .cfi_def_cfa_offset 16
- popq %r15
- .cfi_def_cfa_offset 8
- ret
- .L15:
- .cfi_restore_state
- call __stack_chk_fail@PLT
- .cfi_endproc
- .LFE29:
- .size sha256_transform, .-sha256_transform
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement