name
stringlengths
1
473k
code
stringlengths
7
647k
asm
stringlengths
4
3.39M
file
stringlengths
8
196
ncnn::convolution_pack8_avx(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Mat const&, int, int, int, int, int, int, int, ncnn::Mat const&, ncnn::Option const&)
static void convolution_pack8_avx(const Mat& bottom_blob, Mat& top_blob, const Mat& weight_data_packed, const Mat& bias_data, int kernel_w, int kernel_h, int dilation_w, int dilation_h, int stride_w, int stride_h, int activation_type, const Mat& activation_params, const Option& opt) { int w = bottom_blob.w; int channels = bottom_blob.c; int outw = top_blob.w; int outh = top_blob.h; int outch = top_blob.c; const int maxk = kernel_w * kernel_h; // kernel offsets std::vector<int> _space_ofs(maxk); int* space_ofs = &_space_ofs[0]; { int p1 = 0; int p2 = 0; int gap = w * dilation_h - kernel_w * dilation_w; for (int i = 0; i < kernel_h; i++) { for (int j = 0; j < kernel_w; j++) { space_ofs[p1] = p2; p1++; p2 += dilation_w; } p2 += gap; } } const float* bias_data_ptr = bias_data; #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < outch; p++) { float* outptr = top_blob.channel(p); for (int i = 0; i < outh; i++) { for (int j = 0; j < outw; j++) { __m256 _sum = _mm256_setzero_ps(); if (bias_data_ptr) { _sum = _mm256_loadu_ps(bias_data_ptr + p * 8); } const float* kptr = weight_data_packed.channel(p); // channels for (int q = 0; q < channels; q++) { const Mat m = bottom_blob.channel(q); const float* sptr = m.row(i * stride_h) + j * stride_w * 8; for (int k = 0; k < maxk; k++) { const float* slptr = sptr + space_ofs[k] * 8; __m256 _val0 = _mm256_broadcast_ss(slptr); __m256 _val1 = _mm256_broadcast_ss(slptr + 1); __m256 _val2 = _mm256_broadcast_ss(slptr + 2); __m256 _val3 = _mm256_broadcast_ss(slptr + 3); __m256 _val4 = _mm256_broadcast_ss(slptr + 4); __m256 _val5 = _mm256_broadcast_ss(slptr + 5); __m256 _val6 = _mm256_broadcast_ss(slptr + 6); __m256 _val7 = _mm256_broadcast_ss(slptr + 7); __m256 _w0 = _mm256_load_ps(kptr); __m256 _w1 = _mm256_load_ps(kptr + 8); _sum = _mm256_comp_fmadd_ps(_val0, _w0, _sum); _sum = _mm256_comp_fmadd_ps(_val1, _w1, _sum); __m256 _w2 = _mm256_load_ps(kptr + 16); __m256 _w3 = _mm256_load_ps(kptr + 24); _sum = _mm256_comp_fmadd_ps(_val2, _w2, _sum); _sum = _mm256_comp_fmadd_ps(_val3, _w3, _sum); __m256 _w4 = _mm256_load_ps(kptr + 32); __m256 _w5 = _mm256_load_ps(kptr + 40); _sum = _mm256_comp_fmadd_ps(_val4, _w4, _sum); _sum = _mm256_comp_fmadd_ps(_val5, _w5, _sum); __m256 _w6 = _mm256_load_ps(kptr + 48); __m256 _w7 = _mm256_load_ps(kptr + 56); _sum = _mm256_comp_fmadd_ps(_val6, _w6, _sum); _sum = _mm256_comp_fmadd_ps(_val7, _w7, _sum); kptr += 64; } } _sum = activation_avx(_sum, activation_type, activation_params); _mm256_store_ps(outptr + j * 8, _sum); } outptr += outw * 8; } } }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x5580, %rsp # imm = 0x5580 movq 0x40(%rbp), %rax movq 0x38(%rbp), %rax movl 0x30(%rbp), %eax movl 0x28(%rbp), %eax movl 0x20(%rbp), %eax movl 0x18(%rbp), %eax movl 0x10(%rbp), %eax movq %rdi, 0x908(%rsp) movq %rsi, 0x900(%rsp) movq %rdx, 0x8f8(%rsp) movq %rcx, 0x8f0(%rsp) movl %r8d, 0x8ec(%rsp) movl %r9d, 0x8e8(%rsp) movq 0x908(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x8e4(%rsp) movq 0x908(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x8e0(%rsp) movq 0x900(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x8dc(%rsp) movq 0x900(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x8d8(%rsp) movq 0x900(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x8d4(%rsp) movl 0x8ec(%rsp), %eax movl 0x8e8(%rsp), %ecx imull %ecx, %eax movl %eax, 0x8d0(%rsp) movslq 0x8d0(%rsp), %rax movq %rax, 0x510(%rsp) leaq 0x8b7(%rsp), %rdi movq %rdi, 0x518(%rsp) callq 0x99670 movq 0x510(%rsp), %rsi movq 0x518(%rsp), %rdx leaq 0x8b8(%rsp), %rdi callq 0xa5960 jmp 0x508e2c leaq 0x8b7(%rsp), %rdi callq 0x99e50 leaq 0x8b8(%rsp), %rdi xorl %eax, %eax movl %eax, %esi callq 0x98a00 movq %rax, 0x898(%rsp) movl $0x0, 0x894(%rsp) movl $0x0, 0x890(%rsp) movl 0x8e4(%rsp), %eax imull 0x18(%rbp), %eax movl 0x8ec(%rsp), %ecx imull 0x10(%rbp), %ecx subl %ecx, %eax movl %eax, 0x88c(%rsp) movl $0x0, 0x888(%rsp) movl 0x888(%rsp), %eax cmpl 0x8e8(%rsp), %eax jge 0x508f61 movl $0x0, 0x884(%rsp) movl 0x884(%rsp), %eax cmpl 0x8ec(%rsp), %eax jge 0x508f36 movl 0x890(%rsp), %edx movq 0x898(%rsp), %rax movslq 0x894(%rsp), %rcx movl %edx, (%rax,%rcx,4) movl 0x894(%rsp), %eax addl $0x1, %eax movl %eax, 0x894(%rsp) movl 0x10(%rbp), %eax addl 0x890(%rsp), %eax movl %eax, 0x890(%rsp) movl 0x884(%rsp), %eax addl $0x1, %eax movl %eax, 0x884(%rsp) jmp 0x508eb1 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x8a8(%rsp) movl %eax, 0x8a4(%rsp) leaq 0x8b7(%rsp), %rdi callq 0x99e50 jmp 0x50e7a6 movl 0x88c(%rsp), %eax addl 0x890(%rsp), %eax movl %eax, 0x890(%rsp) movl 0x888(%rsp), %eax addl $0x1, %eax movl %eax, 0x888(%rsp) jmp 0x508e92 movq 0x8f0(%rsp), %rax movq %rax, 0x9e0(%rsp) movq 0x9e0(%rsp), %rax movq (%rax), %rax movq %rax, 0x508(%rsp) movq 0x508(%rsp), %rax movq %rax, 0x878(%rsp) movl $0x0, 0x874(%rsp) movl 0x874(%rsp), %eax cmpl 0x8d4(%rsp), %eax jge 0x50e791 movq 0x900(%rsp), %rcx movl 0x874(%rsp), %eax leaq 0x820(%rsp), %rdx movq %rdx, 0x9b0(%rsp) movq %rcx, 0x9a8(%rsp) movl %eax, 0x9a4(%rsp) movq 0x9a8(%rsp), %rax movq %rax, 0x4f8(%rsp) movb $0x0, 0x9a3(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x9a4(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x820(%rsp), %r10 movq %r10, 0x5538(%rsp) movl %r9d, 0x5534(%rsp) movl %r8d, 0x5530(%rsp) movl %edi, 0x552c(%rsp) movq %rsi, 0x5520(%rsp) movq %rdx, 0x5518(%rsp) movl %ecx, 0x5514(%rsp) movq %rax, 0x5508(%rsp) movq 0x5538(%rsp), %rcx movq %rcx, 0x500(%rsp) movq 0x5520(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x5518(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x5514(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x5508(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x5534(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x5530(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x552c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x5548(%rsp) movl $0x10, 0x5544(%rsp) movq 0x5548(%rsp), %rax movslq 0x5544(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x5544(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x500(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x4f8(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x848(%rsp) cmpl $0x4, 0x28(%rax) jne 0x509177 movq 0x4f8(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x860(%rsp) movb $0x1, 0x9a3(%rsp) testb $0x1, 0x9a3(%rsp) jne 0x5092b6 leaq 0x820(%rsp), %rax movq %rax, 0x9b8(%rsp) movq 0x9b8(%rsp), %rax movq %rax, 0x9c8(%rsp) movq 0x9c8(%rsp), %rax movq %rax, 0x4f0(%rsp) cmpq $0x0, 0x8(%rax) je 0x50925b movq 0x4f0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x9c4(%rsp) # imm = 0xFFFFFFFF movl 0x9c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x9c0(%rsp) cmpl $0x1, 0x9c0(%rsp) jne 0x50925b movq 0x4f0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x509229 movq 0x4f0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x509227 jmp 0x509259 movq 0x4f0(%rsp), %rax movq (%rax), %rax movq %rax, 0xa68(%rsp) cmpq $0x0, 0xa68(%rsp) je 0x509257 movq 0xa68(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x509259 jmp 0x50925b movq 0x4f0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x5092b6 movq %rax, %rdi callq 0x5fc90 jmp 0x5092b8 leaq 0x820(%rsp), %rax movq %rax, 0x9d0(%rsp) movq 0x9d0(%rsp), %rax movq (%rax), %rax movq %rax, 0x4e8(%rsp) leaq 0x820(%rsp), %rax movq %rax, 0x910(%rsp) movq 0x910(%rsp), %rax movq %rax, 0x998(%rsp) movq 0x998(%rsp), %rax movq %rax, 0x4e0(%rsp) cmpq $0x0, 0x8(%rax) je 0x5093a9 movq 0x4e0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x994(%rsp) # imm = 0xFFFFFFFF movl 0x994(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x990(%rsp) cmpl $0x1, 0x990(%rsp) jne 0x5093a9 movq 0x4e0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x509377 movq 0x4e0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x509375 jmp 0x5093a7 movq 0x4e0(%rsp), %rax movq (%rax), %rax movq %rax, 0xa70(%rsp) cmpq $0x0, 0xa70(%rsp) je 0x5093a5 movq 0xa70(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x5093a7 jmp 0x5093a9 movq 0x4e0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x509404 movq %rax, %rdi callq 0x5fc90 movq 0x4e8(%rsp), %rax movq %rax, 0x868(%rsp) movl $0x0, 0x81c(%rsp) movl 0x81c(%rsp), %eax cmpl 0x8d8(%rsp), %eax jge 0x50e779 movl $0x0, 0x818(%rsp) movl 0x818(%rsp), %eax cmpl 0x8dc(%rsp), %eax jge 0x50e73f vxorps %xmm0, %xmm0, %xmm0 vmovaps %ymm0, 0x1320(%rsp) vmovaps 0x1320(%rsp), %ymm0 vmovaps %ymm0, 0x7e0(%rsp) cmpq $0x0, 0x878(%rsp) je 0x5094b5 movq 0x878(%rsp), %rax movl 0x874(%rsp), %ecx shll $0x3, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xb18(%rsp) movq 0xb18(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x7e0(%rsp) movq 0x8f8(%rsp), %rcx movl 0x874(%rsp), %eax leaq 0x790(%rsp), %rdx movq %rdx, 0xa38(%rsp) movq %rcx, 0xa30(%rsp) movl %eax, 0xa2c(%rsp) movq 0xa30(%rsp), %rax movq %rax, 0x4d0(%rsp) movb $0x0, 0xa2b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xa2c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x790(%rsp), %r10 movq %r10, 0x54c8(%rsp) movl %r9d, 0x54c4(%rsp) movl %r8d, 0x54c0(%rsp) movl %edi, 0x54bc(%rsp) movq %rsi, 0x54b0(%rsp) movq %rdx, 0x54a8(%rsp) movl %ecx, 0x54a4(%rsp) movq %rax, 0x5498(%rsp) movq 0x54c8(%rsp), %rcx movq %rcx, 0x4d8(%rsp) movq 0x54b0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x54a8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x54a4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x5498(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x54c4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x54c0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x54bc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x5568(%rsp) movl $0x10, 0x5564(%rsp) movq 0x5568(%rsp), %rax movslq 0x5564(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x5564(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x4d8(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x4d0(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x7b8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x509679 movq 0x4d0(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x7d0(%rsp) movb $0x1, 0xa2b(%rsp) testb $0x1, 0xa2b(%rsp) jne 0x5097b8 leaq 0x790(%rsp), %rax movq %rax, 0xa40(%rsp) movq 0xa40(%rsp), %rax movq %rax, 0xa50(%rsp) movq 0xa50(%rsp), %rax movq %rax, 0x4c8(%rsp) cmpq $0x0, 0x8(%rax) je 0x50975d movq 0x4c8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xa4c(%rsp) # imm = 0xFFFFFFFF movl 0xa4c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xa48(%rsp) cmpl $0x1, 0xa48(%rsp) jne 0x50975d movq 0x4c8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x50972b movq 0x4c8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x509729 jmp 0x50975b movq 0x4c8(%rsp), %rax movq (%rax), %rax movq %rax, 0xa58(%rsp) cmpq $0x0, 0xa58(%rsp) je 0x509759 movq 0xa58(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x50975b jmp 0x50975d movq 0x4c8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x5097b8 movq %rax, %rdi callq 0x5fc90 jmp 0x5097ba leaq 0x790(%rsp), %rax movq %rax, 0x9d8(%rsp) movq 0x9d8(%rsp), %rax movq (%rax), %rax movq %rax, 0x4c0(%rsp) leaq 0x790(%rsp), %rax movq %rax, 0x920(%rsp) movq 0x920(%rsp), %rax movq %rax, 0x978(%rsp) movq 0x978(%rsp), %rax movq %rax, 0x4b8(%rsp) cmpq $0x0, 0x8(%rax) je 0x5098ab movq 0x4b8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x974(%rsp) # imm = 0xFFFFFFFF movl 0x974(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x970(%rsp) cmpl $0x1, 0x970(%rsp) jne 0x5098ab movq 0x4b8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x509879 movq 0x4b8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x509877 jmp 0x5098a9 movq 0x4b8(%rsp), %rax movq (%rax), %rax movq %rax, 0xa80(%rsp) cmpq $0x0, 0xa80(%rsp) je 0x5098a7 movq 0xa80(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x5098a9 jmp 0x5098ab movq 0x4b8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x509906 movq %rax, %rdi callq 0x5fc90 movq 0x4c0(%rsp), %rax movq %rax, 0x7d8(%rsp) movl $0x0, 0x78c(%rsp) movl 0x78c(%rsp), %eax cmpl 0x8e0(%rsp), %eax jge 0x50a758 movq 0x908(%rsp), %rcx movl 0x78c(%rsp), %eax leaq 0x740(%rsp), %rdx movq %rdx, 0xa08(%rsp) movq %rcx, 0xa00(%rsp) movl %eax, 0x9fc(%rsp) movq 0xa00(%rsp), %rax movq %rax, 0x4a8(%rsp) movb $0x0, 0x9fb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x9fc(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x740(%rsp), %r10 movq %r10, 0x5500(%rsp) movl %r9d, 0x54fc(%rsp) movl %r8d, 0x54f8(%rsp) movl %edi, 0x54f4(%rsp) movq %rsi, 0x54e8(%rsp) movq %rdx, 0x54e0(%rsp) movl %ecx, 0x54dc(%rsp) movq %rax, 0x54d0(%rsp) movq 0x5500(%rsp), %rcx movq %rcx, 0x4b0(%rsp) movq 0x54e8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x54e0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x54dc(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x54d0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x54fc(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x54f8(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x54f4(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x5558(%rsp) movl $0x10, 0x5554(%rsp) movq 0x5558(%rsp), %rax movslq 0x5554(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x5554(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x4b0(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x4a8(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x768(%rsp) cmpl $0x4, 0x28(%rax) jne 0x509af9 movq 0x4a8(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x780(%rsp) movb $0x1, 0x9fb(%rsp) testb $0x1, 0x9fb(%rsp) jne 0x509c38 leaq 0x740(%rsp), %rax movq %rax, 0xa10(%rsp) movq 0xa10(%rsp), %rax movq %rax, 0xa20(%rsp) movq 0xa20(%rsp), %rax movq %rax, 0x4a0(%rsp) cmpq $0x0, 0x8(%rax) je 0x509bdd movq 0x4a0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xa1c(%rsp) # imm = 0xFFFFFFFF movl 0xa1c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xa18(%rsp) cmpl $0x1, 0xa18(%rsp) jne 0x509bdd movq 0x4a0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x509bab movq 0x4a0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x509ba9 jmp 0x509bdb movq 0x4a0(%rsp), %rax movq (%rax), %rax movq %rax, 0xa60(%rsp) cmpq $0x0, 0xa60(%rsp) je 0x509bd9 movq 0xa60(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x509bdb jmp 0x509bdd movq 0x4a0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x509c38 movq %rax, %rdi callq 0x5fc90 jmp 0x509c3a movl 0x81c(%rsp), %eax imull 0x28(%rbp), %eax leaq 0x740(%rsp), %rcx movq %rcx, 0x9f0(%rsp) movl %eax, 0x9ec(%rsp) movq 0x9f0(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x9ec(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x498(%rsp) movq 0x498(%rsp), %rax movl 0x818(%rsp), %ecx imull 0x20(%rbp), %ecx shll $0x3, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x738(%rsp) movl $0x0, 0x734(%rsp) movl 0x734(%rsp), %eax cmpl 0x8d0(%rsp), %eax jge 0x50a617 movq 0x738(%rsp), %rax movq 0x898(%rsp), %rcx movslq 0x734(%rsp), %rdx movl (%rcx,%rdx,4), %ecx shll $0x3, %ecx movslq %ecx, %rcx leaq (%rax,%rcx,4), %rax movq %rax, 0x728(%rsp) movq 0x728(%rsp), %rax movq %rax, 0xd18(%rsp) movq 0xd18(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xd14(%rsp) vbroadcastss 0xd14(%rsp), %ymm0 vmovaps %ymm0, 0xce0(%rsp) vmovaps 0xce0(%rsp), %ymm0 vmovaps %ymm0, 0x700(%rsp) movq 0x728(%rsp), %rax addq $0x4, %rax movq %rax, 0xcd8(%rsp) movq 0xcd8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xcd4(%rsp) vbroadcastss 0xcd4(%rsp), %ymm0 vmovaps %ymm0, 0xca0(%rsp) vmovaps 0xca0(%rsp), %ymm0 vmovaps %ymm0, 0x6e0(%rsp) movq 0x728(%rsp), %rax addq $0x8, %rax movq %rax, 0xc98(%rsp) movq 0xc98(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xc94(%rsp) vbroadcastss 0xc94(%rsp), %ymm0 vmovaps %ymm0, 0xc60(%rsp) vmovaps 0xc60(%rsp), %ymm0 vmovaps %ymm0, 0x6c0(%rsp) movq 0x728(%rsp), %rax addq $0xc, %rax movq %rax, 0xc58(%rsp) movq 0xc58(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xc54(%rsp) vbroadcastss 0xc54(%rsp), %ymm0 vmovaps %ymm0, 0xc20(%rsp) vmovaps 0xc20(%rsp), %ymm0 vmovaps %ymm0, 0x6a0(%rsp) movq 0x728(%rsp), %rax addq $0x10, %rax movq %rax, 0xc18(%rsp) movq 0xc18(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xc14(%rsp) vbroadcastss 0xc14(%rsp), %ymm0 vmovaps %ymm0, 0xbe0(%rsp) vmovaps 0xbe0(%rsp), %ymm0 vmovaps %ymm0, 0x680(%rsp) movq 0x728(%rsp), %rax addq $0x14, %rax movq %rax, 0xbd8(%rsp) movq 0xbd8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xbd4(%rsp) vbroadcastss 0xbd4(%rsp), %ymm0 vmovaps %ymm0, 0xba0(%rsp) vmovaps 0xba0(%rsp), %ymm0 vmovaps %ymm0, 0x660(%rsp) movq 0x728(%rsp), %rax addq $0x18, %rax movq %rax, 0xb98(%rsp) movq 0xb98(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xb94(%rsp) vbroadcastss 0xb94(%rsp), %ymm0 vmovaps %ymm0, 0xb60(%rsp) vmovaps 0xb60(%rsp), %ymm0 vmovaps %ymm0, 0x640(%rsp) movq 0x728(%rsp), %rax addq $0x1c, %rax movq %rax, 0xb58(%rsp) movq 0xb58(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xb54(%rsp) vbroadcastss 0xb54(%rsp), %ymm0 vmovaps %ymm0, 0xb20(%rsp) vmovaps 0xb20(%rsp), %ymm0 vmovaps %ymm0, 0x620(%rsp) movq 0x7d8(%rsp), %rax movq %rax, 0xad8(%rsp) movq 0xad8(%rsp), %rax vmovaps (%rax), %ymm0 vmovaps %ymm0, 0x600(%rsp) movq 0x7d8(%rsp), %rax addq $0x20, %rax movq %rax, 0xad0(%rsp) movq 0xad0(%rsp), %rax vmovaps (%rax), %ymm0 vmovaps %ymm0, 0x5e0(%rsp) vmovaps 0x700(%rsp), %ymm2 vmovaps 0x600(%rsp), %ymm1 vmovaps 0x7e0(%rsp), %ymm0 vmovaps %ymm2, 0x1000(%rsp) vmovaps %ymm1, 0xfe0(%rsp) vmovaps %ymm0, 0xfc0(%rsp) vmovaps 0x1000(%rsp), %ymm2 vmovaps 0xfe0(%rsp), %ymm1 vmovaps 0xfc0(%rsp), %ymm0 vmovaps %ymm2, 0x1060(%rsp) vmovaps %ymm1, 0x1040(%rsp) vmovaps %ymm0, 0x1020(%rsp) vmovaps 0x1060(%rsp), %ymm1 vmovaps 0x1040(%rsp), %ymm0 vmovaps 0x1020(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x460(%rsp) vmovaps 0x460(%rsp), %ymm0 vmovaps %ymm0, 0x7e0(%rsp) vmovaps 0x6e0(%rsp), %ymm2 vmovaps 0x5e0(%rsp), %ymm1 vmovaps 0x7e0(%rsp), %ymm0 vmovaps %ymm2, 0xfa0(%rsp) vmovaps %ymm1, 0xf80(%rsp) vmovaps %ymm0, 0xf60(%rsp) vmovaps 0xfa0(%rsp), %ymm2 vmovaps 0xf80(%rsp), %ymm1 vmovaps 0xf60(%rsp), %ymm0 vmovaps %ymm2, 0x10c0(%rsp) vmovaps %ymm1, 0x10a0(%rsp) vmovaps %ymm0, 0x1080(%rsp) vmovaps 0x10c0(%rsp), %ymm1 vmovaps 0x10a0(%rsp), %ymm0 vmovaps 0x1080(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x440(%rsp) vmovaps 0x440(%rsp), %ymm0 vmovaps %ymm0, 0x7e0(%rsp) movq 0x7d8(%rsp), %rax addq $0x40, %rax movq %rax, 0xac8(%rsp) movq 0xac8(%rsp), %rax vmovaps (%rax), %ymm0 vmovaps %ymm0, 0x5c0(%rsp) movq 0x7d8(%rsp), %rax addq $0x60, %rax movq %rax, 0xac0(%rsp) movq 0xac0(%rsp), %rax vmovaps (%rax), %ymm0 vmovaps %ymm0, 0x5a0(%rsp) vmovaps 0x6c0(%rsp), %ymm2 vmovaps 0x5c0(%rsp), %ymm1 vmovaps 0x7e0(%rsp), %ymm0 vmovaps %ymm2, 0xf40(%rsp) vmovaps %ymm1, 0xf20(%rsp) vmovaps %ymm0, 0xf00(%rsp) vmovaps 0xf40(%rsp), %ymm2 vmovaps 0xf20(%rsp), %ymm1 vmovaps 0xf00(%rsp), %ymm0 vmovaps %ymm2, 0x1120(%rsp) vmovaps %ymm1, 0x1100(%rsp) vmovaps %ymm0, 0x10e0(%rsp) vmovaps 0x1120(%rsp), %ymm1 vmovaps 0x1100(%rsp), %ymm0 vmovaps 0x10e0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x420(%rsp) vmovaps 0x420(%rsp), %ymm0 vmovaps %ymm0, 0x7e0(%rsp) vmovaps 0x6a0(%rsp), %ymm2 vmovaps 0x5a0(%rsp), %ymm1 vmovaps 0x7e0(%rsp), %ymm0 vmovaps %ymm2, 0xee0(%rsp) vmovaps %ymm1, 0xec0(%rsp) vmovaps %ymm0, 0xea0(%rsp) vmovaps 0xee0(%rsp), %ymm2 vmovaps 0xec0(%rsp), %ymm1 vmovaps 0xea0(%rsp), %ymm0 vmovaps %ymm2, 0x1180(%rsp) vmovaps %ymm1, 0x1160(%rsp) vmovaps %ymm0, 0x1140(%rsp) vmovaps 0x1180(%rsp), %ymm1 vmovaps 0x1160(%rsp), %ymm0 vmovaps 0x1140(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm0 vmovaps %ymm0, 0x7e0(%rsp) movq 0x7d8(%rsp), %rax addq $0x80, %rax movq %rax, 0xab8(%rsp) movq 0xab8(%rsp), %rax vmovaps (%rax), %ymm0 vmovaps %ymm0, 0x580(%rsp) movq 0x7d8(%rsp), %rax addq $0xa0, %rax movq %rax, 0xab0(%rsp) movq 0xab0(%rsp), %rax vmovaps (%rax), %ymm0 vmovaps %ymm0, 0x560(%rsp) vmovaps 0x680(%rsp), %ymm2 vmovaps 0x580(%rsp), %ymm1 vmovaps 0x7e0(%rsp), %ymm0 vmovaps %ymm2, 0xe80(%rsp) vmovaps %ymm1, 0xe60(%rsp) vmovaps %ymm0, 0xe40(%rsp) vmovaps 0xe80(%rsp), %ymm2 vmovaps 0xe60(%rsp), %ymm1 vmovaps 0xe40(%rsp), %ymm0 vmovaps %ymm2, 0x11e0(%rsp) vmovaps %ymm1, 0x11c0(%rsp) vmovaps %ymm0, 0x11a0(%rsp) vmovaps 0x11e0(%rsp), %ymm1 vmovaps 0x11c0(%rsp), %ymm0 vmovaps 0x11a0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x3e0(%rsp) vmovaps 0x3e0(%rsp), %ymm0 vmovaps %ymm0, 0x7e0(%rsp) vmovaps 0x660(%rsp), %ymm2 vmovaps 0x560(%rsp), %ymm1 vmovaps 0x7e0(%rsp), %ymm0 vmovaps %ymm2, 0xe20(%rsp) vmovaps %ymm1, 0xe00(%rsp) vmovaps %ymm0, 0xde0(%rsp) vmovaps 0xe20(%rsp), %ymm2 vmovaps 0xe00(%rsp), %ymm1 vmovaps 0xde0(%rsp), %ymm0 vmovaps %ymm2, 0x1240(%rsp) vmovaps %ymm1, 0x1220(%rsp) vmovaps %ymm0, 0x1200(%rsp) vmovaps 0x1240(%rsp), %ymm1 vmovaps 0x1220(%rsp), %ymm0 vmovaps 0x1200(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x3c0(%rsp) vmovaps 0x3c0(%rsp), %ymm0 vmovaps %ymm0, 0x7e0(%rsp) movq 0x7d8(%rsp), %rax addq $0xc0, %rax movq %rax, 0xaa8(%rsp) movq 0xaa8(%rsp), %rax vmovaps (%rax), %ymm0 vmovaps %ymm0, 0x540(%rsp) movq 0x7d8(%rsp), %rax addq $0xe0, %rax movq %rax, 0xaa0(%rsp) movq 0xaa0(%rsp), %rax vmovaps (%rax), %ymm0 vmovaps %ymm0, 0x520(%rsp) vmovaps 0x640(%rsp), %ymm2 vmovaps 0x540(%rsp), %ymm1 vmovaps 0x7e0(%rsp), %ymm0 vmovaps %ymm2, 0xdc0(%rsp) vmovaps %ymm1, 0xda0(%rsp) vmovaps %ymm0, 0xd80(%rsp) vmovaps 0xdc0(%rsp), %ymm2 vmovaps 0xda0(%rsp), %ymm1 vmovaps 0xd80(%rsp), %ymm0 vmovaps %ymm2, 0x12a0(%rsp) vmovaps %ymm1, 0x1280(%rsp) vmovaps %ymm0, 0x1260(%rsp) vmovaps 0x12a0(%rsp), %ymm1 vmovaps 0x1280(%rsp), %ymm0 vmovaps 0x1260(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x3a0(%rsp) vmovaps 0x3a0(%rsp), %ymm0 vmovaps %ymm0, 0x7e0(%rsp) vmovaps 0x620(%rsp), %ymm2 vmovaps 0x520(%rsp), %ymm1 vmovaps 0x7e0(%rsp), %ymm0 vmovaps %ymm2, 0xd60(%rsp) vmovaps %ymm1, 0xd40(%rsp) vmovaps %ymm0, 0xd20(%rsp) vmovaps 0xd60(%rsp), %ymm2 vmovaps 0xd40(%rsp), %ymm1 vmovaps 0xd20(%rsp), %ymm0 vmovaps %ymm2, 0x1300(%rsp) vmovaps %ymm1, 0x12e0(%rsp) vmovaps %ymm0, 0x12c0(%rsp) vmovaps 0x1300(%rsp), %ymm1 vmovaps 0x12e0(%rsp), %ymm0 vmovaps 0x12c0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x380(%rsp) vmovaps 0x380(%rsp), %ymm0 vmovaps %ymm0, 0x7e0(%rsp) movq 0x7d8(%rsp), %rax addq $0x100, %rax # imm = 0x100 movq %rax, 0x7d8(%rsp) movl 0x734(%rsp), %eax addl $0x1, %eax movl %eax, 0x734(%rsp) jmp 0x509cba leaq 0x740(%rsp), %rax movq %rax, 0x930(%rsp) movq 0x930(%rsp), %rax movq %rax, 0x958(%rsp) movq 0x958(%rsp), %rax movq %rax, 0x378(%rsp) cmpq $0x0, 0x8(%rax) je 0x50a6e5 movq 0x378(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x954(%rsp) # imm = 0xFFFFFFFF movl 0x954(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x950(%rsp) cmpl $0x1, 0x950(%rsp) jne 0x50a6e5 movq 0x378(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x50a6b3 movq 0x378(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x50a6b1 jmp 0x50a6e3 movq 0x378(%rsp), %rax movq (%rax), %rax movq %rax, 0xa90(%rsp) cmpq $0x0, 0xa90(%rsp) je 0x50a6e1 movq 0xa90(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x50a6e3 jmp 0x50a6e5 movq 0x378(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x50a740 movq %rax, %rdi callq 0x5fc90 jmp 0x50a742 movl 0x78c(%rsp), %eax addl $0x1, %eax movl %eax, 0x78c(%rsp) jmp 0x509921 vmovaps 0x7e0(%rsp), %ymm0 movl 0x30(%rbp), %ecx movq 0x38(%rbp), %rax vmovaps %ymm0, 0x13e0(%rsp) movl %ecx, 0x13dc(%rsp) movq %rax, 0x13d0(%rsp) movl 0x13dc(%rsp), %eax decl %eax movl %eax, %ecx movq %rcx, 0x370(%rsp) subl $0x5, %eax ja 0x50e6b0 movq 0x370(%rsp), %rax leaq 0x18fba6d(%rip), %rcx # 0x1e06218 movslq (%rcx,%rax,4), %rax addq %rcx, %rax jmpq *%rax vmovaps 0x13e0(%rsp), %ymm1 vxorps %xmm0, %xmm0, %xmm0 vmovaps %ymm0, 0x1580(%rsp) vmovaps 0x1580(%rsp), %ymm0 vmovaps %ymm1, 0x15c0(%rsp) vmovaps %ymm0, 0x15a0(%rsp) vmovaps 0x15c0(%rsp), %ymm0 vmovaps 0x15a0(%rsp), %ymm1 vmaxps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1400(%rsp) jmp 0x50e6c2 vmovaps 0x13e0(%rsp), %ymm1 movq 0x13d0(%rsp), %rax movq %rax, 0x1468(%rsp) movq $0x0, 0x1460(%rsp) movq 0x1468(%rsp), %rax movq (%rax), %rax movq 0x1460(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovaps %ymm1, 0x1680(%rsp) vmovss %xmm0, 0x167c(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %ymm0, 0x17a0(%rsp) vmovaps 0x17a0(%rsp), %ymm2 vmovaps 0x1680(%rsp), %ymm1 vmovaps %ymm2, 0x17e0(%rsp) vmovaps %ymm1, 0x17c0(%rsp) vmovaps 0x17e0(%rsp), %ymm1 vmovaps 0x17c0(%rsp), %ymm2 vmaxps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x1640(%rsp) vmovaps %ymm0, 0x1780(%rsp) vmovaps 0x1780(%rsp), %ymm1 vmovaps 0x1680(%rsp), %ymm0 vmovaps %ymm1, 0x1820(%rsp) vmovaps %ymm0, 0x1800(%rsp) vmovaps 0x1820(%rsp), %ymm0 vmovaps 0x1800(%rsp), %ymm1 vminps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1620(%rsp) vmovaps 0x1640(%rsp), %ymm1 vmovss 0x167c(%rsp), %xmm0 vmovss %xmm0, 0x16bc(%rsp) vmovss 0x16bc(%rsp), %xmm0 vmovss %xmm0, 0x33c(%rsp) vmovss %xmm0, 0x177c(%rsp) vmovss %xmm0, 0x1778(%rsp) vmovss %xmm0, 0x1774(%rsp) vmovss %xmm0, 0x1770(%rsp) vmovss %xmm0, 0x176c(%rsp) vmovss %xmm0, 0x1768(%rsp) vmovss %xmm0, 0x1764(%rsp) vmovss %xmm0, 0x1760(%rsp) vmovss 0x1774(%rsp), %xmm2 vmovss 0x1770(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x1778(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x177c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovss 0x1764(%rsp), %xmm3 vmovss 0x1760(%rsp), %xmm0 vinsertps $0x10, %xmm3, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm3[0],xmm0[2,3] vmovss 0x1768(%rsp), %xmm3 vinsertps $0x20, %xmm3, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm3[0],xmm0[3] vmovss 0x176c(%rsp), %xmm3 vinsertps $0x30, %xmm3, %xmm0, %xmm3 # xmm3 = xmm0[0,1,2],xmm3[0] vmovaps %xmm3, %xmm0 vinsertf128 $0x1, %xmm2, %ymm0, %ymm0 vmovaps %ymm0, 0x1740(%rsp) vmovaps 0x1740(%rsp), %ymm2 vmovaps 0x1620(%rsp), %ymm0 vmovaps %ymm2, 0x1720(%rsp) vmovaps %ymm0, 0x1700(%rsp) vmovaps 0x1720(%rsp), %ymm0 vmulps 0x1700(%rsp), %ymm0, %ymm0 vmovaps %ymm1, 0x16e0(%rsp) vmovaps %ymm0, 0x16c0(%rsp) vmovaps 0x16e0(%rsp), %ymm0 vaddps 0x16c0(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x340(%rsp) vmovaps 0x340(%rsp), %ymm0 vmovaps %ymm0, 0x1400(%rsp) jmp 0x50e6c2 movq 0x13d0(%rsp), %rax movq %rax, 0x1458(%rsp) movq $0x0, 0x1450(%rsp) movq 0x1458(%rsp), %rax movq (%rax), %rax movq 0x1450(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x147c(%rsp) vmovss 0x147c(%rsp), %xmm0 vmovss %xmm0, 0x334(%rsp) vmovss %xmm0, 0x14bc(%rsp) vmovss %xmm0, 0x14b8(%rsp) vmovss %xmm0, 0x14b4(%rsp) vmovss %xmm0, 0x14b0(%rsp) vmovss %xmm0, 0x14ac(%rsp) vmovss %xmm0, 0x14a8(%rsp) vmovss %xmm0, 0x14a4(%rsp) vmovss %xmm0, 0x14a0(%rsp) vmovss 0x14a4(%rsp), %xmm1 vmovss 0x14a0(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x14a8(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x14ac(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0] vmovss 0x14b4(%rsp), %xmm2 vmovss 0x14b0(%rsp), %xmm1 vinsertps $0x10, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0],xmm1[2,3] vmovss 0x14b8(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm2[0],xmm1[3] vmovss 0x14bc(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0] vmovaps %xmm1, 0x1490(%rsp) vmovaps %xmm0, 0x1480(%rsp) vmovaps 0x1480(%rsp), %ymm0 vmovaps %ymm0, 0x13a0(%rsp) movq 0x13d0(%rsp), %rax movq %rax, 0x1448(%rsp) movq $0x1, 0x1440(%rsp) movq 0x1448(%rsp), %rax movq (%rax), %rax movq 0x1440(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x1478(%rsp) vmovss 0x1478(%rsp), %xmm0 vmovss %xmm0, 0x338(%rsp) vmovss %xmm0, 0x14fc(%rsp) vmovss %xmm0, 0x14f8(%rsp) vmovss %xmm0, 0x14f4(%rsp) vmovss %xmm0, 0x14f0(%rsp) vmovss %xmm0, 0x14ec(%rsp) vmovss %xmm0, 0x14e8(%rsp) vmovss %xmm0, 0x14e4(%rsp) vmovss %xmm0, 0x14e0(%rsp) vmovss 0x14f4(%rsp), %xmm1 vmovss 0x14f0(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x14f8(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x14fc(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0x14e4(%rsp), %xmm2 vmovss 0x14e0(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x14e8(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x14ec(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0x14c0(%rsp) vmovaps 0x14c0(%rsp), %ymm0 vmovaps %ymm0, 0x1380(%rsp) vmovaps 0x13e0(%rsp), %ymm1 vmovaps 0x13a0(%rsp), %ymm0 vmovaps %ymm1, 0x1600(%rsp) vmovaps %ymm0, 0x15e0(%rsp) vmovaps 0x1600(%rsp), %ymm0 vmovaps 0x15e0(%rsp), %ymm1 vmaxps %ymm1, %ymm0, %ymm1 vmovaps 0x1380(%rsp), %ymm0 vmovaps %ymm1, 0x1860(%rsp) vmovaps %ymm0, 0x1840(%rsp) vmovaps 0x1860(%rsp), %ymm0 vmovaps 0x1840(%rsp), %ymm1 vminps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1400(%rsp) jmp 0x50e6c2 vmovaps 0x13e0(%rsp), %ymm0 vmovaps %ymm0, 0x18a0(%rsp) movl $0x3f800000, 0x18dc(%rsp) # imm = 0x3F800000 vmovss 0x18dc(%rsp), %xmm0 vmovss %xmm0, 0x2bc(%rsp) vmovss %xmm0, 0x199c(%rsp) vmovss %xmm0, 0x1998(%rsp) vmovss %xmm0, 0x1994(%rsp) vmovss %xmm0, 0x1990(%rsp) vmovss %xmm0, 0x198c(%rsp) vmovss %xmm0, 0x1988(%rsp) vmovss %xmm0, 0x1984(%rsp) vmovss %xmm0, 0x1980(%rsp) vmovss 0x1984(%rsp), %xmm1 vmovss 0x1980(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x1988(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x198c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0] vmovss 0x1994(%rsp), %xmm2 vmovss 0x1990(%rsp), %xmm1 vinsertps $0x10, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0],xmm1[2,3] vmovss 0x1998(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm2[0],xmm1[3] vmovss 0x199c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0] vmovaps %xmm1, 0x1970(%rsp) vmovaps %xmm0, 0x1960(%rsp) vmovaps 0x1960(%rsp), %ymm0 vmovaps %ymm0, 0x1880(%rsp) vmovaps 0x1880(%rsp), %ymm0 vmovaps %ymm0, 0x2c0(%rsp) vmovaps %ymm0, 0x2e0(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %ymm0, 0x19a0(%rsp) vmovaps 0x19a0(%rsp), %ymm2 vmovaps 0x18a0(%rsp), %ymm1 vmovaps %ymm2, 0x1900(%rsp) vmovaps %ymm1, 0x18e0(%rsp) vmovaps 0x1900(%rsp), %ymm1 vmovaps 0x18e0(%rsp), %ymm2 vsubps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x2640(%rsp) vmovaps %ymm0, 0x2ca0(%rsp) vmovaps 0x2ca0(%rsp), %ymm0 vmovaps %ymm0, 0x2620(%rsp) vmovaps 0x18f8cc1(%rip), %ymm0 # 0x1e03b80 vmovaps %ymm0, 0x25c0(%rsp) vmovaps 0x2640(%rsp), %ymm0 vmovaps %ymm0, 0x2d20(%rsp) vmovaps 0x18f8cbe(%rip), %ymm0 # 0x1e03ba0 vmovaps %ymm0, 0x2d00(%rsp) vmovaps 0x2d20(%rsp), %ymm0 vmovaps 0x2d00(%rsp), %ymm1 vminps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x2640(%rsp) vmovaps 0x2640(%rsp), %ymm0 vmovaps %ymm0, 0x2ce0(%rsp) vmovaps 0x18f8c9c(%rip), %ymm0 # 0x1e03bc0 vmovaps %ymm0, 0x2cc0(%rsp) vmovaps 0x2ce0(%rsp), %ymm0 vmovaps 0x2cc0(%rsp), %ymm1 vmaxps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x2640(%rsp) vmovaps 0x2640(%rsp), %ymm0 vmovaps %ymm0, 0x28e0(%rsp) vmovaps 0x18f8c7a(%rip), %ymm0 # 0x1e03be0 vmovaps %ymm0, 0x28c0(%rsp) vmovaps 0x18f8c89(%rip), %ymm0 # 0x1e03c00 vmovaps %ymm0, 0x28a0(%rsp) vmovaps 0x28e0(%rsp), %ymm2 vmovaps 0x28c0(%rsp), %ymm1 vmovaps 0x28a0(%rsp), %ymm0 vmovaps %ymm2, 0x2940(%rsp) vmovaps %ymm1, 0x2920(%rsp) vmovaps %ymm0, 0x2900(%rsp) vmovaps 0x2940(%rsp), %ymm1 vmovaps 0x2920(%rsp), %ymm0 vmovaps 0x2900(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x2600(%rsp) vmovaps 0x2600(%rsp), %ymm0 vroundps $0x1, %ymm0, %ymm0 vmovaps %ymm0, 0x2620(%rsp) vmovaps 0x2620(%rsp), %ymm1 vmovaps 0x2600(%rsp), %ymm0 vcmpltps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x25a0(%rsp) vmovaps 0x25a0(%rsp), %ymm1 vmovaps 0x25c0(%rsp), %ymm0 vmovaps %ymm1, 0x2d60(%rsp) vmovaps %ymm0, 0x2d40(%rsp) vmovaps 0x2d60(%rsp), %ymm0 vmovaps 0x2d40(%rsp), %ymm1 vandps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x25a0(%rsp) vmovaps 0x2620(%rsp), %ymm1 vmovaps 0x25a0(%rsp), %ymm0 vmovaps %ymm1, 0x2bc0(%rsp) vmovaps %ymm0, 0x2ba0(%rsp) vmovaps 0x2bc0(%rsp), %ymm0 vmovaps 0x2ba0(%rsp), %ymm1 vsubps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x2600(%rsp) vmovaps 0x2600(%rsp), %ymm1 vmovaps 0x2640(%rsp), %ymm0 vmovaps %ymm1, 0x2e60(%rsp) vmovaps 0x18f8b60(%rip), %ymm1 # 0x1e03c20 vmovaps %ymm1, 0x2e40(%rsp) vmovaps %ymm0, 0x2e20(%rsp) vmovaps 0x2e60(%rsp), %ymm2 vmovaps 0x2e40(%rsp), %ymm1 vmovaps 0x2e20(%rsp), %ymm0 vmovaps %ymm2, 0x3440(%rsp) vmovaps %ymm1, 0x3420(%rsp) vmovaps %ymm0, 0x3400(%rsp) vmovaps 0x3440(%rsp), %ymm0 vmovaps 0x18fbaa7(%rip), %ymm1 # 0x1e06bc0 vxorps %ymm1, %ymm0, %ymm1 vmovaps 0x3420(%rsp), %ymm0 vmovaps 0x3400(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x300(%rsp) vmovaps 0x300(%rsp), %ymm0 vmovaps %ymm0, 0x2640(%rsp) vmovaps 0x2600(%rsp), %ymm1 vmovaps 0x2640(%rsp), %ymm0 vmovaps %ymm1, 0x2e00(%rsp) vmovaps 0x18f8ace(%rip), %ymm1 # 0x1e03c40 vmovaps %ymm1, 0x2de0(%rsp) vmovaps %ymm0, 0x2dc0(%rsp) vmovaps 0x2e00(%rsp), %ymm2 vmovaps 0x2de0(%rsp), %ymm1 vmovaps 0x2dc0(%rsp), %ymm0 vmovaps %ymm2, 0x34a0(%rsp) vmovaps %ymm1, 0x3480(%rsp) vmovaps %ymm0, 0x3460(%rsp) vmovaps 0x34a0(%rsp), %ymm0 vmovaps 0x18fb9f5(%rip), %ymm1 # 0x1e06bc0 vxorps %ymm1, %ymm0, %ymm1 vmovaps 0x3480(%rsp), %ymm0 vmovaps 0x3460(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm0 vmovaps %ymm0, 0x2640(%rsp) vmovaps 0x2640(%rsp), %ymm0 vmovaps %ymm0, 0x2c80(%rsp) vmovaps %ymm0, 0x2c60(%rsp) vmovaps 0x2c80(%rsp), %ymm0 vmovaps 0x2c60(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x2620(%rsp) vmovaps 0x18f8a1d(%rip), %ymm0 # 0x1e03c60 vmovaps %ymm0, 0x2580(%rsp) vmovaps 0x2580(%rsp), %ymm1 vmovaps 0x2640(%rsp), %ymm0 vmovaps %ymm1, 0x2880(%rsp) vmovaps %ymm0, 0x2860(%rsp) vmovaps 0x18f8a08(%rip), %ymm0 # 0x1e03c80 vmovaps %ymm0, 0x2840(%rsp) vmovaps 0x2880(%rsp), %ymm2 vmovaps 0x2860(%rsp), %ymm1 vmovaps 0x2840(%rsp), %ymm0 vmovaps %ymm2, 0x29a0(%rsp) vmovaps %ymm1, 0x2980(%rsp) vmovaps %ymm0, 0x2960(%rsp) vmovaps 0x29a0(%rsp), %ymm1 vmovaps 0x2980(%rsp), %ymm0 vmovaps 0x2960(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x2580(%rsp) vmovaps 0x2580(%rsp), %ymm1 vmovaps 0x2640(%rsp), %ymm0 vmovaps %ymm1, 0x2820(%rsp) vmovaps %ymm0, 0x2800(%rsp) vmovaps 0x18f8994(%rip), %ymm0 # 0x1e03ca0 vmovaps %ymm0, 0x27e0(%rsp) vmovaps 0x2820(%rsp), %ymm2 vmovaps 0x2800(%rsp), %ymm1 vmovaps 0x27e0(%rsp), %ymm0 vmovaps %ymm2, 0x2a00(%rsp) vmovaps %ymm1, 0x29e0(%rsp) vmovaps %ymm0, 0x29c0(%rsp) vmovaps 0x2a00(%rsp), %ymm1 vmovaps 0x29e0(%rsp), %ymm0 vmovaps 0x29c0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x2580(%rsp) vmovaps 0x2580(%rsp), %ymm1 vmovaps 0x2640(%rsp), %ymm0 vmovaps %ymm1, 0x27c0(%rsp) vmovaps %ymm0, 0x27a0(%rsp) vmovaps 0x18f8920(%rip), %ymm0 # 0x1e03cc0 vmovaps %ymm0, 0x2780(%rsp) vmovaps 0x27c0(%rsp), %ymm2 vmovaps 0x27a0(%rsp), %ymm1 vmovaps 0x2780(%rsp), %ymm0 vmovaps %ymm2, 0x2a60(%rsp) vmovaps %ymm1, 0x2a40(%rsp) vmovaps %ymm0, 0x2a20(%rsp) vmovaps 0x2a60(%rsp), %ymm1 vmovaps 0x2a40(%rsp), %ymm0 vmovaps 0x2a20(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x2580(%rsp) vmovaps 0x2580(%rsp), %ymm1 vmovaps 0x2640(%rsp), %ymm0 vmovaps %ymm1, 0x2760(%rsp) vmovaps %ymm0, 0x2740(%rsp) vmovaps 0x18f88ac(%rip), %ymm0 # 0x1e03ce0 vmovaps %ymm0, 0x2720(%rsp) vmovaps 0x2760(%rsp), %ymm2 vmovaps 0x2740(%rsp), %ymm1 vmovaps 0x2720(%rsp), %ymm0 vmovaps %ymm2, 0x2ac0(%rsp) vmovaps %ymm1, 0x2aa0(%rsp) vmovaps %ymm0, 0x2a80(%rsp) vmovaps 0x2ac0(%rsp), %ymm1 vmovaps 0x2aa0(%rsp), %ymm0 vmovaps 0x2a80(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x2580(%rsp) vmovaps 0x2580(%rsp), %ymm1 vmovaps 0x2640(%rsp), %ymm0 vmovaps %ymm1, 0x2700(%rsp) vmovaps %ymm0, 0x26e0(%rsp) vmovaps 0x18f8738(%rip), %ymm0 # 0x1e03c00 vmovaps %ymm0, 0x26c0(%rsp) vmovaps 0x2700(%rsp), %ymm2 vmovaps 0x26e0(%rsp), %ymm1 vmovaps 0x26c0(%rsp), %ymm0 vmovaps %ymm2, 0x2b20(%rsp) vmovaps %ymm1, 0x2b00(%rsp) vmovaps %ymm0, 0x2ae0(%rsp) vmovaps 0x2b20(%rsp), %ymm1 vmovaps 0x2b00(%rsp), %ymm0 vmovaps 0x2ae0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x2580(%rsp) vmovaps 0x2580(%rsp), %ymm2 vmovaps 0x2620(%rsp), %ymm1 vmovaps 0x2640(%rsp), %ymm0 vmovaps %ymm2, 0x26a0(%rsp) vmovaps %ymm1, 0x2680(%rsp) vmovaps %ymm0, 0x2660(%rsp) vmovaps 0x26a0(%rsp), %ymm2 vmovaps 0x2680(%rsp), %ymm1 vmovaps 0x2660(%rsp), %ymm0 vmovaps %ymm2, 0x2b80(%rsp) vmovaps %ymm1, 0x2b60(%rsp) vmovaps %ymm0, 0x2b40(%rsp) vmovaps 0x2b80(%rsp), %ymm1 vmovaps 0x2b60(%rsp), %ymm0 vmovaps 0x2b40(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x2580(%rsp) vmovaps 0x2580(%rsp), %ymm1 vmovaps 0x25c0(%rsp), %ymm0 vmovaps %ymm1, 0x2c00(%rsp) vmovaps %ymm0, 0x2be0(%rsp) vmovaps 0x2c00(%rsp), %ymm0 vmovaps 0x2be0(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x2580(%rsp) vmovaps 0x2600(%rsp), %ymm0 vmovaps %ymm0, 0x2f40(%rsp) vcvttps2dq 0x2f40(%rsp), %ymm0 vmovaps %ymm0, 0x25e0(%rsp) vmovaps 0x25e0(%rsp), %ymm0 vmovaps %ymm0, 0x3060(%rsp) vmovaps 0x18f86ba(%rip), %ymm0 # 0x1e03d00 vmovaps %ymm0, 0x3040(%rsp) vmovdqa 0x3060(%rsp), %ymm0 vmovdqa %ymm0, 0x2fc0(%rsp) vmovdqa 0x2fc0(%rsp), %xmm0 vmovdqa %xmm0, 0x3030(%rsp) vmovdqa 0x2fd0(%rsp), %xmm0 vmovdqa %xmm0, 0x3020(%rsp) vmovdqa 0x3040(%rsp), %ymm0 vmovdqa %ymm0, 0x2fa0(%rsp) vmovdqa 0x2fa0(%rsp), %xmm0 vmovdqa %xmm0, 0x3010(%rsp) vmovdqa 0x2fb0(%rsp), %xmm0 vmovdqa %xmm0, 0x3000(%rsp) vmovdqa 0x3030(%rsp), %xmm1 vmovdqa 0x3010(%rsp), %xmm0 vmovdqa %xmm1, 0x3530(%rsp) vmovdqa %xmm0, 0x3520(%rsp) vmovdqa 0x3530(%rsp), %xmm0 vmovdqa 0x3520(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x3030(%rsp) vmovdqa 0x3020(%rsp), %xmm1 vmovdqa 0x3000(%rsp), %xmm0 vmovdqa %xmm1, 0x3510(%rsp) vmovdqa %xmm0, 0x3500(%rsp) vmovdqa 0x3510(%rsp), %xmm0 vmovdqa 0x3500(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x3020(%rsp) vmovdqa 0x3030(%rsp), %xmm0 vmovdqa %xmm0, 0x2f80(%rsp) vmovdqa 0x3020(%rsp), %xmm0 vmovdqa %xmm0, 0x2f90(%rsp) vmovdqa 0x2f80(%rsp), %ymm0 vmovdqa %ymm0, 0x2fe0(%rsp) vmovdqa 0x2fe0(%rsp), %ymm0 vmovaps %ymm0, 0x260(%rsp) vmovaps 0x260(%rsp), %ymm0 vmovdqa %ymm0, 0x25e0(%rsp) vmovdqa 0x25e0(%rsp), %ymm0 vmovdqa %ymm0, 0x3220(%rsp) movl $0x17, 0x321c(%rsp) vmovdqa 0x3220(%rsp), %ymm0 vmovdqa %ymm0, 0x31a0(%rsp) vmovdqa 0x31a0(%rsp), %xmm0 vmovdqa %xmm0, 0x3200(%rsp) vmovdqa 0x31b0(%rsp), %xmm0 vmovdqa %xmm0, 0x31f0(%rsp) vmovdqa 0x3200(%rsp), %xmm0 movl 0x321c(%rsp), %eax vmovdqa %xmm0, 0x35b0(%rsp) movl %eax, 0x35ac(%rsp) vmovdqa 0x35b0(%rsp), %xmm0 movl 0x35ac(%rsp), %eax vmovd %eax, %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x3200(%rsp) vmovdqa 0x31f0(%rsp), %xmm0 movl 0x321c(%rsp), %eax vmovdqa %xmm0, 0x3590(%rsp) movl %eax, 0x358c(%rsp) vmovdqa 0x3590(%rsp), %xmm0 movl 0x358c(%rsp), %eax vmovd %eax, %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x31f0(%rsp) vmovdqa 0x3200(%rsp), %xmm0 vmovdqa %xmm0, 0x3180(%rsp) vmovdqa 0x31f0(%rsp), %xmm0 vmovdqa %xmm0, 0x3190(%rsp) vmovdqa 0x3180(%rsp), %ymm0 vmovdqa %ymm0, 0x31c0(%rsp) vmovdqa 0x31c0(%rsp), %ymm0 vmovaps %ymm0, 0x240(%rsp) vmovaps 0x240(%rsp), %ymm0 vmovdqa %ymm0, 0x25e0(%rsp) vmovdqa 0x25e0(%rsp), %ymm0 vmovdqa %ymm0, 0x3300(%rsp) vmovdqa 0x3300(%rsp), %ymm0 vmovaps %ymm0, 0x2560(%rsp) vmovaps 0x2580(%rsp), %ymm1 vmovaps 0x2560(%rsp), %ymm0 vmovaps %ymm1, 0x2c40(%rsp) vmovaps %ymm0, 0x2c20(%rsp) vmovaps 0x2c40(%rsp), %ymm0 vmulps 0x2c20(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x2580(%rsp) vmovaps 0x2580(%rsp), %ymm0 vmovaps %ymm0, 0x220(%rsp) vmovaps 0x2e0(%rsp), %ymm1 vmovaps 0x220(%rsp), %ymm0 vmovaps 0x2c0(%rsp), %ymm2 vmovaps %ymm2, 0x1940(%rsp) vmovaps %ymm0, 0x1920(%rsp) vmovaps 0x1940(%rsp), %ymm0 vaddps 0x1920(%rsp), %ymm0, %ymm0 vmovaps %ymm1, 0x1d60(%rsp) vmovaps %ymm0, 0x1d40(%rsp) vmovaps 0x1d60(%rsp), %ymm0 vdivps 0x1d40(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x200(%rsp) vmovaps 0x200(%rsp), %ymm0 vmovaps %ymm0, 0x1400(%rsp) jmp 0x50e6c2 vmovaps 0x13e0(%rsp), %ymm0 vmovaps %ymm0, 0x19c0(%rsp) vmovaps 0x19c0(%rsp), %ymm0 vmovaps %ymm0, 0x1c0(%rsp) vmovaps %ymm0, 0x1e60(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %ymm0, 0x24c0(%rsp) vmovaps 0x24c0(%rsp), %ymm0 vmovaps %ymm0, 0x1e40(%rsp) vmovaps 0x18f816a(%rip), %ymm0 # 0x1e03b80 vmovaps %ymm0, 0x1de0(%rsp) vmovaps 0x1e60(%rsp), %ymm0 vmovaps %ymm0, 0x2540(%rsp) vmovaps 0x18f8167(%rip), %ymm0 # 0x1e03ba0 vmovaps %ymm0, 0x2520(%rsp) vmovaps 0x2540(%rsp), %ymm0 vmovaps 0x2520(%rsp), %ymm1 vminps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1e60(%rsp) vmovaps 0x1e60(%rsp), %ymm0 vmovaps %ymm0, 0x2500(%rsp) vmovaps 0x18f8145(%rip), %ymm0 # 0x1e03bc0 vmovaps %ymm0, 0x24e0(%rsp) vmovaps 0x2500(%rsp), %ymm0 vmovaps 0x24e0(%rsp), %ymm1 vmaxps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1e60(%rsp) vmovaps 0x1e60(%rsp), %ymm0 vmovaps %ymm0, 0x2100(%rsp) vmovaps 0x18f8123(%rip), %ymm0 # 0x1e03be0 vmovaps %ymm0, 0x20e0(%rsp) vmovaps 0x18f8132(%rip), %ymm0 # 0x1e03c00 vmovaps %ymm0, 0x20c0(%rsp) vmovaps 0x2100(%rsp), %ymm2 vmovaps 0x20e0(%rsp), %ymm1 vmovaps 0x20c0(%rsp), %ymm0 vmovaps %ymm2, 0x2160(%rsp) vmovaps %ymm1, 0x2140(%rsp) vmovaps %ymm0, 0x2120(%rsp) vmovaps 0x2160(%rsp), %ymm1 vmovaps 0x2140(%rsp), %ymm0 vmovaps 0x2120(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1e20(%rsp) vmovaps 0x1e20(%rsp), %ymm0 vroundps $0x1, %ymm0, %ymm0 vmovaps %ymm0, 0x1e40(%rsp) vmovaps 0x1e40(%rsp), %ymm1 vmovaps 0x1e20(%rsp), %ymm0 vcmpltps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1dc0(%rsp) vmovaps 0x1dc0(%rsp), %ymm1 vmovaps 0x1de0(%rsp), %ymm0 vmovaps %ymm1, 0x2da0(%rsp) vmovaps %ymm0, 0x2d80(%rsp) vmovaps 0x2da0(%rsp), %ymm0 vmovaps 0x2d80(%rsp), %ymm1 vandps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1dc0(%rsp) vmovaps 0x1e40(%rsp), %ymm1 vmovaps 0x1dc0(%rsp), %ymm0 vmovaps %ymm1, 0x23e0(%rsp) vmovaps %ymm0, 0x23c0(%rsp) vmovaps 0x23e0(%rsp), %ymm0 vmovaps 0x23c0(%rsp), %ymm1 vsubps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1e20(%rsp) vmovaps 0x1e20(%rsp), %ymm1 vmovaps 0x1e60(%rsp), %ymm0 vmovaps %ymm1, 0x2f20(%rsp) vmovaps 0x18f8009(%rip), %ymm1 # 0x1e03c20 vmovaps %ymm1, 0x2f00(%rsp) vmovaps %ymm0, 0x2ee0(%rsp) vmovaps 0x2f20(%rsp), %ymm2 vmovaps 0x2f00(%rsp), %ymm1 vmovaps 0x2ee0(%rsp), %ymm0 vmovaps %ymm2, 0x3380(%rsp) vmovaps %ymm1, 0x3360(%rsp) vmovaps %ymm0, 0x3340(%rsp) vmovaps 0x3380(%rsp), %ymm0 vmovaps 0x18faf50(%rip), %ymm1 # 0x1e06bc0 vxorps %ymm1, %ymm0, %ymm1 vmovaps 0x3360(%rsp), %ymm0 vmovaps 0x3340(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1e0(%rsp) vmovaps 0x1e0(%rsp), %ymm0 vmovaps %ymm0, 0x1e60(%rsp) vmovaps 0x1e20(%rsp), %ymm1 vmovaps 0x1e60(%rsp), %ymm0 vmovaps %ymm1, 0x2ec0(%rsp) vmovaps 0x18f7f77(%rip), %ymm1 # 0x1e03c40 vmovaps %ymm1, 0x2ea0(%rsp) vmovaps %ymm0, 0x2e80(%rsp) vmovaps 0x2ec0(%rsp), %ymm2 vmovaps 0x2ea0(%rsp), %ymm1 vmovaps 0x2e80(%rsp), %ymm0 vmovaps %ymm2, 0x33e0(%rsp) vmovaps %ymm1, 0x33c0(%rsp) vmovaps %ymm0, 0x33a0(%rsp) vmovaps 0x33e0(%rsp), %ymm0 vmovaps 0x18fae9e(%rip), %ymm1 # 0x1e06bc0 vxorps %ymm1, %ymm0, %ymm1 vmovaps 0x33c0(%rsp), %ymm0 vmovaps 0x33a0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm0 vmovaps %ymm0, 0x1e60(%rsp) vmovaps 0x1e60(%rsp), %ymm0 vmovaps %ymm0, 0x24a0(%rsp) vmovaps %ymm0, 0x2480(%rsp) vmovaps 0x24a0(%rsp), %ymm0 vmovaps 0x2480(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1e40(%rsp) vmovaps 0x18f7ec6(%rip), %ymm0 # 0x1e03c60 vmovaps %ymm0, 0x1da0(%rsp) vmovaps 0x1da0(%rsp), %ymm1 vmovaps 0x1e60(%rsp), %ymm0 vmovaps %ymm1, 0x20a0(%rsp) vmovaps %ymm0, 0x2080(%rsp) vmovaps 0x18f7eb1(%rip), %ymm0 # 0x1e03c80 vmovaps %ymm0, 0x2060(%rsp) vmovaps 0x20a0(%rsp), %ymm2 vmovaps 0x2080(%rsp), %ymm1 vmovaps 0x2060(%rsp), %ymm0 vmovaps %ymm2, 0x21c0(%rsp) vmovaps %ymm1, 0x21a0(%rsp) vmovaps %ymm0, 0x2180(%rsp) vmovaps 0x21c0(%rsp), %ymm1 vmovaps 0x21a0(%rsp), %ymm0 vmovaps 0x2180(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1da0(%rsp) vmovaps 0x1da0(%rsp), %ymm1 vmovaps 0x1e60(%rsp), %ymm0 vmovaps %ymm1, 0x2040(%rsp) vmovaps %ymm0, 0x2020(%rsp) vmovaps 0x18f7e3d(%rip), %ymm0 # 0x1e03ca0 vmovaps %ymm0, 0x2000(%rsp) vmovaps 0x2040(%rsp), %ymm2 vmovaps 0x2020(%rsp), %ymm1 vmovaps 0x2000(%rsp), %ymm0 vmovaps %ymm2, 0x2220(%rsp) vmovaps %ymm1, 0x2200(%rsp) vmovaps %ymm0, 0x21e0(%rsp) vmovaps 0x2220(%rsp), %ymm1 vmovaps 0x2200(%rsp), %ymm0 vmovaps 0x21e0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1da0(%rsp) vmovaps 0x1da0(%rsp), %ymm1 vmovaps 0x1e60(%rsp), %ymm0 vmovaps %ymm1, 0x1fe0(%rsp) vmovaps %ymm0, 0x1fc0(%rsp) vmovaps 0x18f7dc9(%rip), %ymm0 # 0x1e03cc0 vmovaps %ymm0, 0x1fa0(%rsp) vmovaps 0x1fe0(%rsp), %ymm2 vmovaps 0x1fc0(%rsp), %ymm1 vmovaps 0x1fa0(%rsp), %ymm0 vmovaps %ymm2, 0x2280(%rsp) vmovaps %ymm1, 0x2260(%rsp) vmovaps %ymm0, 0x2240(%rsp) vmovaps 0x2280(%rsp), %ymm1 vmovaps 0x2260(%rsp), %ymm0 vmovaps 0x2240(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1da0(%rsp) vmovaps 0x1da0(%rsp), %ymm1 vmovaps 0x1e60(%rsp), %ymm0 vmovaps %ymm1, 0x1f80(%rsp) vmovaps %ymm0, 0x1f60(%rsp) vmovaps 0x18f7d55(%rip), %ymm0 # 0x1e03ce0 vmovaps %ymm0, 0x1f40(%rsp) vmovaps 0x1f80(%rsp), %ymm2 vmovaps 0x1f60(%rsp), %ymm1 vmovaps 0x1f40(%rsp), %ymm0 vmovaps %ymm2, 0x22e0(%rsp) vmovaps %ymm1, 0x22c0(%rsp) vmovaps %ymm0, 0x22a0(%rsp) vmovaps 0x22e0(%rsp), %ymm1 vmovaps 0x22c0(%rsp), %ymm0 vmovaps 0x22a0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1da0(%rsp) vmovaps 0x1da0(%rsp), %ymm1 vmovaps 0x1e60(%rsp), %ymm0 vmovaps %ymm1, 0x1f20(%rsp) vmovaps %ymm0, 0x1f00(%rsp) vmovaps 0x18f7be1(%rip), %ymm0 # 0x1e03c00 vmovaps %ymm0, 0x1ee0(%rsp) vmovaps 0x1f20(%rsp), %ymm2 vmovaps 0x1f00(%rsp), %ymm1 vmovaps 0x1ee0(%rsp), %ymm0 vmovaps %ymm2, 0x2340(%rsp) vmovaps %ymm1, 0x2320(%rsp) vmovaps %ymm0, 0x2300(%rsp) vmovaps 0x2340(%rsp), %ymm1 vmovaps 0x2320(%rsp), %ymm0 vmovaps 0x2300(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1da0(%rsp) vmovaps 0x1da0(%rsp), %ymm2 vmovaps 0x1e40(%rsp), %ymm1 vmovaps 0x1e60(%rsp), %ymm0 vmovaps %ymm2, 0x1ec0(%rsp) vmovaps %ymm1, 0x1ea0(%rsp) vmovaps %ymm0, 0x1e80(%rsp) vmovaps 0x1ec0(%rsp), %ymm2 vmovaps 0x1ea0(%rsp), %ymm1 vmovaps 0x1e80(%rsp), %ymm0 vmovaps %ymm2, 0x23a0(%rsp) vmovaps %ymm1, 0x2380(%rsp) vmovaps %ymm0, 0x2360(%rsp) vmovaps 0x23a0(%rsp), %ymm1 vmovaps 0x2380(%rsp), %ymm0 vmovaps 0x2360(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1da0(%rsp) vmovaps 0x1da0(%rsp), %ymm1 vmovaps 0x1de0(%rsp), %ymm0 vmovaps %ymm1, 0x2420(%rsp) vmovaps %ymm0, 0x2400(%rsp) vmovaps 0x2420(%rsp), %ymm0 vmovaps 0x2400(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1da0(%rsp) vmovaps 0x1e20(%rsp), %ymm0 vmovaps %ymm0, 0x2f60(%rsp) vcvttps2dq 0x2f60(%rsp), %ymm0 vmovaps %ymm0, 0x1e00(%rsp) vmovaps 0x1e00(%rsp), %ymm0 vmovaps %ymm0, 0x3160(%rsp) vmovaps 0x18f7b63(%rip), %ymm0 # 0x1e03d00 vmovaps %ymm0, 0x3140(%rsp) vmovdqa 0x3160(%rsp), %ymm0 vmovdqa %ymm0, 0x30c0(%rsp) vmovdqa 0x30c0(%rsp), %xmm0 vmovdqa %xmm0, 0x3130(%rsp) vmovdqa 0x30d0(%rsp), %xmm0 vmovdqa %xmm0, 0x3120(%rsp) vmovdqa 0x3140(%rsp), %ymm0 vmovdqa %ymm0, 0x30a0(%rsp) vmovdqa 0x30a0(%rsp), %xmm0 vmovdqa %xmm0, 0x3110(%rsp) vmovdqa 0x30b0(%rsp), %xmm0 vmovdqa %xmm0, 0x3100(%rsp) vmovdqa 0x3130(%rsp), %xmm1 vmovdqa 0x3110(%rsp), %xmm0 vmovdqa %xmm1, 0x34f0(%rsp) vmovdqa %xmm0, 0x34e0(%rsp) vmovdqa 0x34f0(%rsp), %xmm0 vmovdqa 0x34e0(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x3130(%rsp) vmovdqa 0x3120(%rsp), %xmm1 vmovdqa 0x3100(%rsp), %xmm0 vmovdqa %xmm1, 0x34d0(%rsp) vmovdqa %xmm0, 0x34c0(%rsp) vmovdqa 0x34d0(%rsp), %xmm0 vmovdqa 0x34c0(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x3120(%rsp) vmovdqa 0x3130(%rsp), %xmm0 vmovdqa %xmm0, 0x3080(%rsp) vmovdqa 0x3120(%rsp), %xmm0 vmovdqa %xmm0, 0x3090(%rsp) vmovdqa 0x3080(%rsp), %ymm0 vmovdqa %ymm0, 0x30e0(%rsp) vmovdqa 0x30e0(%rsp), %ymm0 vmovaps %ymm0, 0x180(%rsp) vmovaps 0x180(%rsp), %ymm0 vmovdqa %ymm0, 0x1e00(%rsp) vmovdqa 0x1e00(%rsp), %ymm0 vmovdqa %ymm0, 0x32e0(%rsp) movl $0x17, 0x32dc(%rsp) vmovdqa 0x32e0(%rsp), %ymm0 vmovdqa %ymm0, 0x3260(%rsp) vmovdqa 0x3260(%rsp), %xmm0 vmovdqa %xmm0, 0x32c0(%rsp) vmovdqa 0x3270(%rsp), %xmm0 vmovdqa %xmm0, 0x32b0(%rsp) vmovdqa 0x32c0(%rsp), %xmm0 movl 0x32dc(%rsp), %eax vmovdqa %xmm0, 0x3570(%rsp) movl %eax, 0x356c(%rsp) vmovdqa 0x3570(%rsp), %xmm0 movl 0x356c(%rsp), %eax vmovd %eax, %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x32c0(%rsp) vmovdqa 0x32b0(%rsp), %xmm0 movl 0x32dc(%rsp), %eax vmovdqa %xmm0, 0x3550(%rsp) movl %eax, 0x354c(%rsp) vmovdqa 0x3550(%rsp), %xmm0 movl 0x354c(%rsp), %eax vmovd %eax, %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x32b0(%rsp) vmovdqa 0x32c0(%rsp), %xmm0 vmovdqa %xmm0, 0x3240(%rsp) vmovdqa 0x32b0(%rsp), %xmm0 vmovdqa %xmm0, 0x3250(%rsp) vmovdqa 0x3240(%rsp), %ymm0 vmovdqa %ymm0, 0x3280(%rsp) vmovdqa 0x3280(%rsp), %ymm0 vmovaps %ymm0, 0x160(%rsp) vmovaps 0x160(%rsp), %ymm0 vmovdqa %ymm0, 0x1e00(%rsp) vmovdqa 0x1e00(%rsp), %ymm0 vmovdqa %ymm0, 0x3320(%rsp) vmovdqa 0x3320(%rsp), %ymm0 vmovaps %ymm0, 0x1d80(%rsp) vmovaps 0x1da0(%rsp), %ymm1 vmovaps 0x1d80(%rsp), %ymm0 vmovaps %ymm1, 0x2460(%rsp) vmovaps %ymm0, 0x2440(%rsp) vmovaps 0x2460(%rsp), %ymm0 vmulps 0x2440(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x1da0(%rsp) vmovaps 0x1da0(%rsp), %ymm0 vmovaps %ymm0, 0x140(%rsp) vmovaps 0x140(%rsp), %ymm1 movl $0x3f800000, 0x19fc(%rsp) # imm = 0x3F800000 vmovss 0x19fc(%rsp), %xmm0 vmovss %xmm0, 0x11c(%rsp) vmovss %xmm0, 0x1abc(%rsp) vmovss %xmm0, 0x1ab8(%rsp) vmovss %xmm0, 0x1ab4(%rsp) vmovss %xmm0, 0x1ab0(%rsp) vmovss %xmm0, 0x1aac(%rsp) vmovss %xmm0, 0x1aa8(%rsp) vmovss %xmm0, 0x1aa4(%rsp) vmovss %xmm0, 0x1aa0(%rsp) vmovss 0x1aa4(%rsp), %xmm2 vmovss 0x1aa0(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x1aa8(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x1aac(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm2[0] vmovss 0x1ab4(%rsp), %xmm3 vmovss 0x1ab0(%rsp), %xmm2 vinsertps $0x10, %xmm3, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm3[0],xmm2[2,3] vmovss 0x1ab8(%rsp), %xmm3 vinsertps $0x20, %xmm3, %xmm2, %xmm2 # xmm2 = xmm2[0,1],xmm3[0],xmm2[3] vmovss 0x1abc(%rsp), %xmm3 vinsertps $0x30, %xmm3, %xmm2, %xmm2 # xmm2 = xmm2[0,1,2],xmm3[0] vmovaps %xmm2, 0x1a90(%rsp) vmovaps %xmm0, 0x1a80(%rsp) vmovaps 0x1a80(%rsp), %ymm0 vmovaps %ymm1, 0x1a20(%rsp) vmovaps %ymm0, 0x1a00(%rsp) vmovaps 0x1a20(%rsp), %ymm0 vmovaps 0x1a00(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x45a0(%rsp) vmovaps 0x18f75bc(%rip), %ymm0 # 0x1e03b80 vmovaps %ymm0, 0x4560(%rsp) vmovaps 0x45a0(%rsp), %ymm0 vxorps %xmm1, %xmm1, %xmm1 vmovaps %ymm1, 0x4f40(%rsp) vmovaps 0x4f40(%rsp), %ymm1 vcmpleps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x4540(%rsp) vmovaps 0x45a0(%rsp), %ymm0 vmovaps %ymm0, 0x4f80(%rsp) vmovaps 0x18f770c(%rip), %ymm0 # 0x1e03d20 vmovaps %ymm0, 0x4f60(%rsp) vmovaps 0x4f80(%rsp), %ymm0 vmovaps 0x4f60(%rsp), %ymm1 vmaxps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x45a0(%rsp) vmovaps 0x45a0(%rsp), %ymm0 vmovaps %ymm0, 0x5240(%rsp) vmovaps 0x5240(%rsp), %ymm0 vmovdqa %ymm0, 0x5220(%rsp) movl $0x17, 0x521c(%rsp) vmovdqa 0x5220(%rsp), %ymm0 vmovdqa %ymm0, 0x51a0(%rsp) vmovdqa 0x51a0(%rsp), %xmm0 vmovdqa %xmm0, 0x5200(%rsp) vmovdqa 0x51b0(%rsp), %xmm0 vmovdqa %xmm0, 0x51f0(%rsp) vmovdqa 0x5200(%rsp), %xmm0 movl 0x521c(%rsp), %eax vmovdqa %xmm0, 0x5440(%rsp) movl %eax, 0x543c(%rsp) vmovdqa 0x5440(%rsp), %xmm0 movl 0x543c(%rsp), %eax vmovd %eax, %xmm1 vpsrld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x5200(%rsp) vmovdqa 0x51f0(%rsp), %xmm0 movl 0x521c(%rsp), %eax vmovdqa %xmm0, 0x5420(%rsp) movl %eax, 0x541c(%rsp) vmovdqa 0x5420(%rsp), %xmm0 movl 0x541c(%rsp), %eax vmovd %eax, %xmm1 vpsrld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x51f0(%rsp) vmovdqa 0x5200(%rsp), %xmm0 vmovdqa %xmm0, 0x5180(%rsp) vmovdqa 0x51f0(%rsp), %xmm0 vmovdqa %xmm0, 0x5190(%rsp) vmovdqa 0x5180(%rsp), %ymm0 vmovdqa %ymm0, 0x51c0(%rsp) vmovdqa 0x51c0(%rsp), %ymm0 vmovaps %ymm0, 0x120(%rsp) vmovaps 0x120(%rsp), %ymm0 vmovaps %ymm0, 0x4580(%rsp) vmovaps 0x45a0(%rsp), %ymm0 vmovaps %ymm0, 0x5040(%rsp) vmovaps 0x18f75a9(%rip), %ymm0 # 0x1e03d40 vmovaps %ymm0, 0x5020(%rsp) vmovaps 0x5040(%rsp), %ymm0 vmovaps 0x5020(%rsp), %ymm1 vandps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x45a0(%rsp) vmovaps 0x45a0(%rsp), %ymm0 vmovaps %ymm0, 0x52c0(%rsp) vmovaps 0x18f7427(%rip), %ymm0 # 0x1e03c00 vmovaps %ymm0, 0x52a0(%rsp) vmovaps 0x52c0(%rsp), %ymm0 vmovaps 0x52a0(%rsp), %ymm1 vorps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x45a0(%rsp) vmovaps 0x4580(%rsp), %ymm0 vmovaps %ymm0, 0x53c0(%rsp) vmovaps 0x18f74e5(%rip), %ymm0 # 0x1e03d00 vmovaps %ymm0, 0x53a0(%rsp) vmovdqa 0x53c0(%rsp), %ymm0 vmovdqa %ymm0, 0x5320(%rsp) vmovdqa 0x5320(%rsp), %xmm0 vmovdqa %xmm0, 0x5390(%rsp) vmovdqa 0x5330(%rsp), %xmm0 vmovdqa %xmm0, 0x5380(%rsp) vmovdqa 0x53a0(%rsp), %ymm0 vmovdqa %ymm0, 0x5300(%rsp) vmovdqa 0x5300(%rsp), %xmm0 vmovdqa %xmm0, 0x5370(%rsp) vmovdqa 0x5310(%rsp), %xmm0 vmovdqa %xmm0, 0x5360(%rsp) vmovdqa 0x5390(%rsp), %xmm1 vmovdqa 0x5370(%rsp), %xmm0 vmovdqa %xmm1, 0x5480(%rsp) vmovdqa %xmm0, 0x5470(%rsp) vmovdqa 0x5480(%rsp), %xmm0 vmovdqa 0x5470(%rsp), %xmm1 vpsubd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x5390(%rsp) vmovdqa 0x5380(%rsp), %xmm1 vmovdqa 0x5360(%rsp), %xmm0 vmovdqa %xmm1, 0x5460(%rsp) vmovdqa %xmm0, 0x5450(%rsp) vmovdqa 0x5460(%rsp), %xmm0 vmovdqa 0x5450(%rsp), %xmm1 vpsubd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x5380(%rsp) vmovdqa 0x5390(%rsp), %xmm0 vmovdqa %xmm0, 0x52e0(%rsp) vmovdqa 0x5380(%rsp), %xmm0 vmovdqa %xmm0, 0x52f0(%rsp) vmovdqa 0x52e0(%rsp), %ymm0 vmovdqa %ymm0, 0x5340(%rsp) vmovdqa 0x5340(%rsp), %ymm0 vmovaps %ymm0, 0xe0(%rsp) vmovaps 0xe0(%rsp), %ymm0 vmovaps %ymm0, 0x4580(%rsp) vmovaps 0x4580(%rsp), %ymm0 vmovaps %ymm0, 0x53e0(%rsp) vcvtdq2ps 0x53e0(%rsp), %ymm0 vmovaps %ymm0, 0x4520(%rsp) vmovaps 0x4520(%rsp), %ymm1 vmovaps 0x4560(%rsp), %ymm0 vmovaps %ymm1, 0x4e60(%rsp) vmovaps %ymm0, 0x4e40(%rsp) vmovaps 0x4e60(%rsp), %ymm0 vmovaps 0x4e40(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x4520(%rsp) vmovaps 0x45a0(%rsp), %ymm0 vmovaps 0x18f7378(%rip), %ymm1 # 0x1e03d60 vcmpltps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x4500(%rsp) vmovaps 0x45a0(%rsp), %ymm1 vmovaps 0x4500(%rsp), %ymm0 vmovaps %ymm1, 0x5000(%rsp) vmovaps %ymm0, 0x4fe0(%rsp) vmovaps 0x5000(%rsp), %ymm0 vmovaps 0x4fe0(%rsp), %ymm1 vandps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x44e0(%rsp) vmovaps 0x45a0(%rsp), %ymm1 vmovaps 0x4560(%rsp), %ymm0 vmovaps %ymm1, 0x4da0(%rsp) vmovaps %ymm0, 0x4d80(%rsp) vmovaps 0x4da0(%rsp), %ymm0 vmovaps 0x4d80(%rsp), %ymm1 vsubps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x45a0(%rsp) vmovaps 0x4520(%rsp), %ymm1 vmovaps 0x4560(%rsp), %ymm2 vmovaps 0x4500(%rsp), %ymm0 vmovaps %ymm2, 0x4fc0(%rsp) vmovaps %ymm0, 0x4fa0(%rsp) vmovaps 0x4fc0(%rsp), %ymm0 vmovaps 0x4fa0(%rsp), %ymm2 vandps %ymm2, %ymm0, %ymm0 vmovaps %ymm1, 0x4d60(%rsp) vmovaps %ymm0, 0x4d40(%rsp) vmovaps 0x4d60(%rsp), %ymm0 vmovaps 0x4d40(%rsp), %ymm1 vsubps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x4520(%rsp) vmovaps 0x45a0(%rsp), %ymm1 vmovaps 0x44e0(%rsp), %ymm0 vmovaps %ymm1, 0x4e20(%rsp) vmovaps %ymm0, 0x4e00(%rsp) vmovaps 0x4e20(%rsp), %ymm0 vmovaps 0x4e00(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x45a0(%rsp) vmovaps 0x45a0(%rsp), %ymm0 vmovaps %ymm0, 0x4f20(%rsp) vmovaps %ymm0, 0x4f00(%rsp) vmovaps 0x4f20(%rsp), %ymm0 vmovaps 0x4f00(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x44c0(%rsp) vmovaps 0x18f720b(%rip), %ymm0 # 0x1e03d80 vmovaps %ymm0, 0x44a0(%rsp) vmovaps 0x44a0(%rsp), %ymm1 vmovaps 0x45a0(%rsp), %ymm0 vmovaps %ymm1, 0x4960(%rsp) vmovaps %ymm0, 0x4940(%rsp) vmovaps 0x18f71f6(%rip), %ymm0 # 0x1e03da0 vmovaps %ymm0, 0x4920(%rsp) vmovaps 0x4960(%rsp), %ymm2 vmovaps 0x4940(%rsp), %ymm1 vmovaps 0x4920(%rsp), %ymm0 vmovaps %ymm2, 0x49c0(%rsp) vmovaps %ymm1, 0x49a0(%rsp) vmovaps %ymm0, 0x4980(%rsp) vmovaps 0x49c0(%rsp), %ymm1 vmovaps 0x49a0(%rsp), %ymm0 vmovaps 0x4980(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x44a0(%rsp) vmovaps 0x44a0(%rsp), %ymm1 vmovaps 0x45a0(%rsp), %ymm0 vmovaps %ymm1, 0x4900(%rsp) vmovaps %ymm0, 0x48e0(%rsp) vmovaps 0x18f7182(%rip), %ymm0 # 0x1e03dc0 vmovaps %ymm0, 0x48c0(%rsp) vmovaps 0x4900(%rsp), %ymm2 vmovaps 0x48e0(%rsp), %ymm1 vmovaps 0x48c0(%rsp), %ymm0 vmovaps %ymm2, 0x4a20(%rsp) vmovaps %ymm1, 0x4a00(%rsp) vmovaps %ymm0, 0x49e0(%rsp) vmovaps 0x4a20(%rsp), %ymm1 vmovaps 0x4a00(%rsp), %ymm0 vmovaps 0x49e0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x44a0(%rsp) vmovaps 0x44a0(%rsp), %ymm1 vmovaps 0x45a0(%rsp), %ymm0 vmovaps %ymm1, 0x48a0(%rsp) vmovaps %ymm0, 0x4880(%rsp) vmovaps 0x18f710e(%rip), %ymm0 # 0x1e03de0 vmovaps %ymm0, 0x4860(%rsp) vmovaps 0x48a0(%rsp), %ymm2 vmovaps 0x4880(%rsp), %ymm1 vmovaps 0x4860(%rsp), %ymm0 vmovaps %ymm2, 0x4a80(%rsp) vmovaps %ymm1, 0x4a60(%rsp) vmovaps %ymm0, 0x4a40(%rsp) vmovaps 0x4a80(%rsp), %ymm1 vmovaps 0x4a60(%rsp), %ymm0 vmovaps 0x4a40(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x44a0(%rsp) vmovaps 0x44a0(%rsp), %ymm1 vmovaps 0x45a0(%rsp), %ymm0 vmovaps %ymm1, 0x4840(%rsp) vmovaps %ymm0, 0x4820(%rsp) vmovaps 0x18f709a(%rip), %ymm0 # 0x1e03e00 vmovaps %ymm0, 0x4800(%rsp) vmovaps 0x4840(%rsp), %ymm2 vmovaps 0x4820(%rsp), %ymm1 vmovaps 0x4800(%rsp), %ymm0 vmovaps %ymm2, 0x4ae0(%rsp) vmovaps %ymm1, 0x4ac0(%rsp) vmovaps %ymm0, 0x4aa0(%rsp) vmovaps 0x4ae0(%rsp), %ymm1 vmovaps 0x4ac0(%rsp), %ymm0 vmovaps 0x4aa0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x44a0(%rsp) vmovaps 0x44a0(%rsp), %ymm1 vmovaps 0x45a0(%rsp), %ymm0 vmovaps %ymm1, 0x47e0(%rsp) vmovaps %ymm0, 0x47c0(%rsp) vmovaps 0x18f7026(%rip), %ymm0 # 0x1e03e20 vmovaps %ymm0, 0x47a0(%rsp) vmovaps 0x47e0(%rsp), %ymm2 vmovaps 0x47c0(%rsp), %ymm1 vmovaps 0x47a0(%rsp), %ymm0 vmovaps %ymm2, 0x4b40(%rsp) vmovaps %ymm1, 0x4b20(%rsp) vmovaps %ymm0, 0x4b00(%rsp) vmovaps 0x4b40(%rsp), %ymm1 vmovaps 0x4b20(%rsp), %ymm0 vmovaps 0x4b00(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x44a0(%rsp) vmovaps 0x44a0(%rsp), %ymm1 vmovaps 0x45a0(%rsp), %ymm0 vmovaps %ymm1, 0x4780(%rsp) vmovaps %ymm0, 0x4760(%rsp) vmovaps 0x18f6fb2(%rip), %ymm0 # 0x1e03e40 vmovaps %ymm0, 0x4740(%rsp) vmovaps 0x4780(%rsp), %ymm2 vmovaps 0x4760(%rsp), %ymm1 vmovaps 0x4740(%rsp), %ymm0 vmovaps %ymm2, 0x4ba0(%rsp) vmovaps %ymm1, 0x4b80(%rsp) vmovaps %ymm0, 0x4b60(%rsp) vmovaps 0x4ba0(%rsp), %ymm1 vmovaps 0x4b80(%rsp), %ymm0 vmovaps 0x4b60(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x44a0(%rsp) vmovaps 0x44a0(%rsp), %ymm1 vmovaps 0x45a0(%rsp), %ymm0 vmovaps %ymm1, 0x4720(%rsp) vmovaps %ymm0, 0x4700(%rsp) vmovaps 0x18f6f3e(%rip), %ymm0 # 0x1e03e60 vmovaps %ymm0, 0x46e0(%rsp) vmovaps 0x4720(%rsp), %ymm2 vmovaps 0x4700(%rsp), %ymm1 vmovaps 0x46e0(%rsp), %ymm0 vmovaps %ymm2, 0x4c00(%rsp) vmovaps %ymm1, 0x4be0(%rsp) vmovaps %ymm0, 0x4bc0(%rsp) vmovaps 0x4c00(%rsp), %ymm1 vmovaps 0x4be0(%rsp), %ymm0 vmovaps 0x4bc0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x44a0(%rsp) vmovaps 0x44a0(%rsp), %ymm1 vmovaps 0x45a0(%rsp), %ymm0 vmovaps %ymm1, 0x46c0(%rsp) vmovaps %ymm0, 0x46a0(%rsp) vmovaps 0x18f6eca(%rip), %ymm0 # 0x1e03e80 vmovaps %ymm0, 0x4680(%rsp) vmovaps 0x46c0(%rsp), %ymm2 vmovaps 0x46a0(%rsp), %ymm1 vmovaps 0x4680(%rsp), %ymm0 vmovaps %ymm2, 0x4c60(%rsp) vmovaps %ymm1, 0x4c40(%rsp) vmovaps %ymm0, 0x4c20(%rsp) vmovaps 0x4c60(%rsp), %ymm1 vmovaps 0x4c40(%rsp), %ymm0 vmovaps 0x4c20(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x44a0(%rsp) vmovaps 0x44a0(%rsp), %ymm1 vmovaps 0x45a0(%rsp), %ymm0 vmovaps %ymm1, 0x4ee0(%rsp) vmovaps %ymm0, 0x4ec0(%rsp) vmovaps 0x4ee0(%rsp), %ymm0 vmovaps 0x4ec0(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x44a0(%rsp) vmovaps 0x44a0(%rsp), %ymm1 vmovaps 0x44c0(%rsp), %ymm0 vmovaps %ymm1, 0x4ea0(%rsp) vmovaps %ymm0, 0x4e80(%rsp) vmovaps 0x4ea0(%rsp), %ymm0 vmovaps 0x4e80(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x44a0(%rsp) vmovaps 0x4520(%rsp), %ymm1 vmovaps 0x44a0(%rsp), %ymm0 vmovaps %ymm1, 0x4660(%rsp) vmovaps 0x18f6b79(%rip), %ymm1 # 0x1e03c40 vmovaps %ymm1, 0x4640(%rsp) vmovaps %ymm0, 0x4620(%rsp) vmovaps 0x4660(%rsp), %ymm2 vmovaps 0x4640(%rsp), %ymm1 vmovaps 0x4620(%rsp), %ymm0 vmovaps %ymm2, 0x4cc0(%rsp) vmovaps %ymm1, 0x4ca0(%rsp) vmovaps %ymm0, 0x4c80(%rsp) vmovaps 0x4cc0(%rsp), %ymm1 vmovaps 0x4ca0(%rsp), %ymm0 vmovaps 0x4c80(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x44a0(%rsp) vmovaps 0x44c0(%rsp), %ymm1 vmovaps 0x44a0(%rsp), %ymm0 vmovaps %ymm1, 0x50a0(%rsp) vmovaps 0x18f6aa5(%rip), %ymm1 # 0x1e03c00 vmovaps %ymm1, 0x5080(%rsp) vmovaps %ymm0, 0x5060(%rsp) vmovaps 0x50a0(%rsp), %ymm2 vmovaps 0x5080(%rsp), %ymm1 vmovaps 0x5060(%rsp), %ymm0 vmovaps %ymm2, 0x5100(%rsp) vmovaps %ymm1, 0x50e0(%rsp) vmovaps %ymm0, 0x50c0(%rsp) vmovaps 0x5100(%rsp), %ymm1 vmovaps 0x50e0(%rsp), %ymm0 vmovaps 0x50c0(%rsp), %ymm2 vfnmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = -(ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x44a0(%rsp) vmovaps 0x45a0(%rsp), %ymm1 vmovaps 0x44a0(%rsp), %ymm0 vmovaps %ymm1, 0x4de0(%rsp) vmovaps %ymm0, 0x4dc0(%rsp) vmovaps 0x4de0(%rsp), %ymm0 vmovaps 0x4dc0(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x45a0(%rsp) vmovaps 0x4520(%rsp), %ymm1 vmovaps 0x45a0(%rsp), %ymm0 vmovaps %ymm1, 0x4600(%rsp) vmovaps 0x18f69ee(%rip), %ymm1 # 0x1e03c20 vmovaps %ymm1, 0x45e0(%rsp) vmovaps %ymm0, 0x45c0(%rsp) vmovaps 0x4600(%rsp), %ymm2 vmovaps 0x45e0(%rsp), %ymm1 vmovaps 0x45c0(%rsp), %ymm0 vmovaps %ymm2, 0x4d20(%rsp) vmovaps %ymm1, 0x4d00(%rsp) vmovaps %ymm0, 0x4ce0(%rsp) vmovaps 0x4d20(%rsp), %ymm1 vmovaps 0x4d00(%rsp), %ymm0 vmovaps 0x4ce0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x45a0(%rsp) vmovaps 0x45a0(%rsp), %ymm1 vmovaps 0x4540(%rsp), %ymm0 vmovaps %ymm1, 0x5280(%rsp) vmovaps %ymm0, 0x5260(%rsp) vmovaps 0x5280(%rsp), %ymm0 vmovaps 0x5260(%rsp), %ymm1 vorps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x44a0(%rsp) vmovaps 0x44a0(%rsp), %ymm0 vmovaps %ymm0, 0xc0(%rsp) vmovaps 0xc0(%rsp), %ymm0 vmovaps %ymm0, 0x3600(%rsp) movl $0x3f800000, 0x363c(%rsp) # imm = 0x3F800000 vmovss 0x363c(%rsp), %xmm0 vmovss %xmm0, 0x94(%rsp) vmovss %xmm0, 0x36bc(%rsp) vmovss %xmm0, 0x36b8(%rsp) vmovss %xmm0, 0x36b4(%rsp) vmovss %xmm0, 0x36b0(%rsp) vmovss %xmm0, 0x36ac(%rsp) vmovss %xmm0, 0x36a8(%rsp) vmovss %xmm0, 0x36a4(%rsp) vmovss %xmm0, 0x36a0(%rsp) vmovss 0x36a4(%rsp), %xmm1 vmovss 0x36a0(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x36a8(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x36ac(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0] vmovss 0x36b4(%rsp), %xmm2 vmovss 0x36b0(%rsp), %xmm1 vinsertps $0x10, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0],xmm1[2,3] vmovss 0x36b8(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm2[0],xmm1[3] vmovss 0x36bc(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0] vmovaps %xmm1, 0x3690(%rsp) vmovaps %xmm0, 0x3680(%rsp) vmovaps 0x3680(%rsp), %ymm0 vmovaps %ymm0, 0x35e0(%rsp) movl $0x40000000, 0x3638(%rsp) # imm = 0x40000000 vmovss 0x3638(%rsp), %xmm0 vmovss %xmm0, 0x98(%rsp) vmovss %xmm0, 0x36fc(%rsp) vmovss %xmm0, 0x36f8(%rsp) vmovss %xmm0, 0x36f4(%rsp) vmovss %xmm0, 0x36f0(%rsp) vmovss %xmm0, 0x36ec(%rsp) vmovss %xmm0, 0x36e8(%rsp) vmovss %xmm0, 0x36e4(%rsp) vmovss %xmm0, 0x36e0(%rsp) vmovss 0x36e4(%rsp), %xmm1 vmovss 0x36e0(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x36e8(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x36ec(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0] vmovss 0x36f4(%rsp), %xmm2 vmovss 0x36f0(%rsp), %xmm1 vinsertps $0x10, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0],xmm1[2,3] vmovss 0x36f8(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm2[0],xmm1[3] vmovss 0x36fc(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0] vmovaps %xmm1, 0x36d0(%rsp) vmovaps %xmm0, 0x36c0(%rsp) vmovaps 0x36c0(%rsp), %ymm0 vmovaps %ymm0, 0x35c0(%rsp) vmovaps 0x3600(%rsp), %ymm1 vmovaps 0x35c0(%rsp), %ymm0 vmovaps %ymm1, 0x3660(%rsp) vmovaps %ymm0, 0x3640(%rsp) vmovaps 0x3660(%rsp), %ymm0 vmovaps 0x3640(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x3720(%rsp) movl $0x3f800000, 0x375c(%rsp) # imm = 0x3F800000 vmovss 0x375c(%rsp), %xmm0 vmovss %xmm0, 0x9c(%rsp) vmovss %xmm0, 0x381c(%rsp) vmovss %xmm0, 0x3818(%rsp) vmovss %xmm0, 0x3814(%rsp) vmovss %xmm0, 0x3810(%rsp) vmovss %xmm0, 0x380c(%rsp) vmovss %xmm0, 0x3808(%rsp) vmovss %xmm0, 0x3804(%rsp) vmovss %xmm0, 0x3800(%rsp) vmovss 0x3804(%rsp), %xmm1 vmovss 0x3800(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x3808(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x380c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0] vmovss 0x3814(%rsp), %xmm2 vmovss 0x3810(%rsp), %xmm1 vinsertps $0x10, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0],xmm1[2,3] vmovss 0x3818(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm2[0],xmm1[3] vmovss 0x381c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0] vmovaps %xmm1, 0x37f0(%rsp) vmovaps %xmm0, 0x37e0(%rsp) vmovaps 0x37e0(%rsp), %ymm0 vmovaps %ymm0, 0x3700(%rsp) vmovaps 0x3700(%rsp), %ymm1 vxorps %xmm0, %xmm0, %xmm0 vmovaps %ymm0, 0x3820(%rsp) vmovaps 0x3820(%rsp), %ymm3 vmovaps 0x3720(%rsp), %ymm2 vmovaps %ymm3, 0x3780(%rsp) vmovaps %ymm2, 0x3760(%rsp) vmovaps 0x3780(%rsp), %ymm2 vmovaps 0x3760(%rsp), %ymm3 vsubps %ymm3, %ymm2, %ymm2 vmovaps %ymm2, 0x3960(%rsp) vmovaps %ymm0, 0x3fc0(%rsp) vmovaps 0x3fc0(%rsp), %ymm0 vmovaps %ymm0, 0x3940(%rsp) vmovaps 0x18f64d8(%rip), %ymm0 # 0x1e03b80 vmovaps %ymm0, 0x38e0(%rsp) vmovaps 0x3960(%rsp), %ymm0 vmovaps %ymm0, 0x4040(%rsp) vmovaps 0x18f64d5(%rip), %ymm0 # 0x1e03ba0 vmovaps %ymm0, 0x4020(%rsp) vmovaps 0x4040(%rsp), %ymm0 vmovaps 0x4020(%rsp), %ymm2 vminps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x3960(%rsp) vmovaps 0x3960(%rsp), %ymm0 vmovaps %ymm0, 0x4000(%rsp) vmovaps 0x18f64b3(%rip), %ymm0 # 0x1e03bc0 vmovaps %ymm0, 0x3fe0(%rsp) vmovaps 0x4000(%rsp), %ymm0 vmovaps 0x3fe0(%rsp), %ymm2 vmaxps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x3960(%rsp) vmovaps 0x3960(%rsp), %ymm0 vmovaps %ymm0, 0x3c00(%rsp) vmovaps 0x18f6491(%rip), %ymm0 # 0x1e03be0 vmovaps %ymm0, 0x3be0(%rsp) vmovaps 0x18f64a0(%rip), %ymm0 # 0x1e03c00 vmovaps %ymm0, 0x3bc0(%rsp) vmovaps 0x3c00(%rsp), %ymm4 vmovaps 0x3be0(%rsp), %ymm3 vmovaps 0x3bc0(%rsp), %ymm2 vmovaps %ymm4, 0x3c60(%rsp) vmovaps %ymm3, 0x3c40(%rsp) vmovaps %ymm2, 0x3c20(%rsp) vmovaps 0x3c60(%rsp), %ymm3 vmovaps 0x3c40(%rsp), %ymm2 vmovaps 0x3c20(%rsp), %ymm4 vfmadd213ps %ymm4, %ymm3, %ymm2 # ymm2 = (ymm3 * ymm2) + ymm4 vmovaps %ymm2, 0x3920(%rsp) vmovaps 0x3920(%rsp), %ymm2 vroundps $0x1, %ymm2, %ymm2 vmovaps %ymm2, 0x3940(%rsp) vmovaps 0x3940(%rsp), %ymm3 vmovaps 0x3920(%rsp), %ymm2 vcmpltps %ymm3, %ymm2, %ymm2 vmovaps %ymm2, 0x38c0(%rsp) vmovaps 0x38c0(%rsp), %ymm3 vmovaps 0x38e0(%rsp), %ymm2 vmovaps %ymm3, 0x4080(%rsp) vmovaps %ymm2, 0x4060(%rsp) vmovaps 0x4080(%rsp), %ymm2 vmovaps 0x4060(%rsp), %ymm3 vandps %ymm3, %ymm2, %ymm2 vmovaps %ymm2, 0x38c0(%rsp) vmovaps 0x3940(%rsp), %ymm3 vmovaps 0x38c0(%rsp), %ymm2 vmovaps %ymm3, 0x3ee0(%rsp) vmovaps %ymm2, 0x3ec0(%rsp) vmovaps 0x3ee0(%rsp), %ymm2 vmovaps 0x3ec0(%rsp), %ymm3 vsubps %ymm3, %ymm2, %ymm2 vmovaps %ymm2, 0x3920(%rsp) vmovaps 0x3920(%rsp), %ymm3 vmovaps 0x3960(%rsp), %ymm2 vmovaps %ymm3, 0x4140(%rsp) vmovaps 0x18f6377(%rip), %ymm3 # 0x1e03c20 vmovaps %ymm3, 0x4120(%rsp) vmovaps %ymm2, 0x4100(%rsp) vmovaps 0x4140(%rsp), %ymm4 vmovaps 0x4120(%rsp), %ymm3 vmovaps 0x4100(%rsp), %ymm2 vmovaps %ymm4, 0x43a0(%rsp) vmovaps %ymm3, 0x4380(%rsp) vmovaps %ymm2, 0x4360(%rsp) vmovaps 0x43a0(%rsp), %ymm3 vmovaps 0x4380(%rsp), %ymm2 vmovaps 0x4360(%rsp), %ymm4 vfnmadd213ps %ymm4, %ymm3, %ymm2 # ymm2 = -(ymm3 * ymm2) + ymm4 vmovaps %ymm2, 0x3960(%rsp) vmovaps 0x3920(%rsp), %ymm3 vmovaps 0x3960(%rsp), %ymm2 vmovaps %ymm3, 0x40e0(%rsp) vmovaps 0x18f6303(%rip), %ymm3 # 0x1e03c40 vmovaps %ymm3, 0x40c0(%rsp) vmovaps %ymm2, 0x40a0(%rsp) vmovaps 0x40e0(%rsp), %ymm4 vmovaps 0x40c0(%rsp), %ymm3 vmovaps 0x40a0(%rsp), %ymm2 vmovaps %ymm4, 0x4400(%rsp) vmovaps %ymm3, 0x43e0(%rsp) vmovaps %ymm2, 0x43c0(%rsp) vmovaps 0x4400(%rsp), %ymm3 vmovaps 0x43e0(%rsp), %ymm2 vmovaps 0x43c0(%rsp), %ymm4 vfnmadd213ps %ymm4, %ymm3, %ymm2 # ymm2 = -(ymm3 * ymm2) + ymm4 vmovaps %ymm2, 0x3960(%rsp) vmovaps 0x3960(%rsp), %ymm2 vmovaps %ymm2, 0x3fa0(%rsp) vmovaps %ymm2, 0x3f80(%rsp) vmovaps 0x3fa0(%rsp), %ymm2 vmovaps 0x3f80(%rsp), %ymm3 vmulps %ymm3, %ymm2, %ymm2 vmovaps %ymm2, 0x3940(%rsp) vmovaps 0x18f6270(%rip), %ymm2 # 0x1e03c60 vmovaps %ymm2, 0x38a0(%rsp) vmovaps 0x38a0(%rsp), %ymm3 vmovaps 0x3960(%rsp), %ymm2 vmovaps %ymm3, 0x3ba0(%rsp) vmovaps %ymm2, 0x3b80(%rsp) vmovaps 0x18f625b(%rip), %ymm2 # 0x1e03c80 vmovaps %ymm2, 0x3b60(%rsp) vmovaps 0x3ba0(%rsp), %ymm4 vmovaps 0x3b80(%rsp), %ymm3 vmovaps 0x3b60(%rsp), %ymm2 vmovaps %ymm4, 0x3cc0(%rsp) vmovaps %ymm3, 0x3ca0(%rsp) vmovaps %ymm2, 0x3c80(%rsp) vmovaps 0x3cc0(%rsp), %ymm3 vmovaps 0x3ca0(%rsp), %ymm2 vmovaps 0x3c80(%rsp), %ymm4 vfmadd213ps %ymm4, %ymm3, %ymm2 # ymm2 = (ymm3 * ymm2) + ymm4 vmovaps %ymm2, 0x38a0(%rsp) vmovaps 0x38a0(%rsp), %ymm3 vmovaps 0x3960(%rsp), %ymm2 vmovaps %ymm3, 0x3b40(%rsp) vmovaps %ymm2, 0x3b20(%rsp) vmovaps 0x18f61e7(%rip), %ymm2 # 0x1e03ca0 vmovaps %ymm2, 0x3b00(%rsp) vmovaps 0x3b40(%rsp), %ymm4 vmovaps 0x3b20(%rsp), %ymm3 vmovaps 0x3b00(%rsp), %ymm2 vmovaps %ymm4, 0x3d20(%rsp) vmovaps %ymm3, 0x3d00(%rsp) vmovaps %ymm2, 0x3ce0(%rsp) vmovaps 0x3d20(%rsp), %ymm3 vmovaps 0x3d00(%rsp), %ymm2 vmovaps 0x3ce0(%rsp), %ymm4 vfmadd213ps %ymm4, %ymm3, %ymm2 # ymm2 = (ymm3 * ymm2) + ymm4 vmovaps %ymm2, 0x38a0(%rsp) vmovaps 0x38a0(%rsp), %ymm3 vmovaps 0x3960(%rsp), %ymm2 vmovaps %ymm3, 0x3ae0(%rsp) vmovaps %ymm2, 0x3ac0(%rsp) vmovaps 0x18f6173(%rip), %ymm2 # 0x1e03cc0 vmovaps %ymm2, 0x3aa0(%rsp) vmovaps 0x3ae0(%rsp), %ymm4 vmovaps 0x3ac0(%rsp), %ymm3 vmovaps 0x3aa0(%rsp), %ymm2 vmovaps %ymm4, 0x3d80(%rsp) vmovaps %ymm3, 0x3d60(%rsp) vmovaps %ymm2, 0x3d40(%rsp) vmovaps 0x3d80(%rsp), %ymm3 vmovaps 0x3d60(%rsp), %ymm2 vmovaps 0x3d40(%rsp), %ymm4 vfmadd213ps %ymm4, %ymm3, %ymm2 # ymm2 = (ymm3 * ymm2) + ymm4 vmovaps %ymm2, 0x38a0(%rsp) vmovaps 0x38a0(%rsp), %ymm3 vmovaps 0x3960(%rsp), %ymm2 vmovaps %ymm3, 0x3a80(%rsp) vmovaps %ymm2, 0x3a60(%rsp) vmovaps 0x18f60ff(%rip), %ymm2 # 0x1e03ce0 vmovaps %ymm2, 0x3a40(%rsp) vmovaps 0x3a80(%rsp), %ymm4 vmovaps 0x3a60(%rsp), %ymm3 vmovaps 0x3a40(%rsp), %ymm2 vmovaps %ymm4, 0x3de0(%rsp) vmovaps %ymm3, 0x3dc0(%rsp) vmovaps %ymm2, 0x3da0(%rsp) vmovaps 0x3de0(%rsp), %ymm3 vmovaps 0x3dc0(%rsp), %ymm2 vmovaps 0x3da0(%rsp), %ymm4 vfmadd213ps %ymm4, %ymm3, %ymm2 # ymm2 = (ymm3 * ymm2) + ymm4 vmovaps %ymm2, 0x38a0(%rsp) vmovaps 0x38a0(%rsp), %ymm3 vmovaps 0x3960(%rsp), %ymm2 vmovaps %ymm3, 0x3a20(%rsp) vmovaps %ymm2, 0x3a00(%rsp) vmovaps %ymm0, 0x39e0(%rsp) vmovaps 0x3a20(%rsp), %ymm3 vmovaps 0x3a00(%rsp), %ymm2 vmovaps 0x39e0(%rsp), %ymm0 vmovaps %ymm3, 0x3e40(%rsp) vmovaps %ymm2, 0x3e20(%rsp) vmovaps %ymm0, 0x3e00(%rsp) vmovaps 0x3e40(%rsp), %ymm2 vmovaps 0x3e20(%rsp), %ymm0 vmovaps 0x3e00(%rsp), %ymm3 vfmadd213ps %ymm3, %ymm2, %ymm0 # ymm0 = (ymm2 * ymm0) + ymm3 vmovaps %ymm0, 0x38a0(%rsp) vmovaps 0x38a0(%rsp), %ymm3 vmovaps 0x3940(%rsp), %ymm2 vmovaps 0x3960(%rsp), %ymm0 vmovaps %ymm3, 0x39c0(%rsp) vmovaps %ymm2, 0x39a0(%rsp) vmovaps %ymm0, 0x3980(%rsp) vmovaps 0x39c0(%rsp), %ymm3 vmovaps 0x39a0(%rsp), %ymm2 vmovaps 0x3980(%rsp), %ymm0 vmovaps %ymm3, 0x3ea0(%rsp) vmovaps %ymm2, 0x3e80(%rsp) vmovaps %ymm0, 0x3e60(%rsp) vmovaps 0x3ea0(%rsp), %ymm2 vmovaps 0x3e80(%rsp), %ymm0 vmovaps 0x3e60(%rsp), %ymm3 vfmadd213ps %ymm3, %ymm2, %ymm0 # ymm0 = (ymm2 * ymm0) + ymm3 vmovaps %ymm0, 0x38a0(%rsp) vmovaps 0x38a0(%rsp), %ymm2 vmovaps 0x38e0(%rsp), %ymm0 vmovaps %ymm2, 0x3f20(%rsp) vmovaps %ymm0, 0x3f00(%rsp) vmovaps 0x3f20(%rsp), %ymm0 vmovaps 0x3f00(%rsp), %ymm2 vaddps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x38a0(%rsp) vmovaps 0x3920(%rsp), %ymm0 vmovaps %ymm0, 0x4160(%rsp) vcvttps2dq 0x4160(%rsp), %ymm0 vmovaps %ymm0, 0x3900(%rsp) vmovaps 0x3900(%rsp), %ymm0 vmovaps %ymm0, 0x4260(%rsp) vmovaps 0x18f5f15(%rip), %ymm0 # 0x1e03d00 vmovaps %ymm0, 0x4240(%rsp) vmovaps 0x4260(%rsp), %ymm0 vmovaps %ymm0, 0x41c0(%rsp) vmovdqa 0x41c0(%rsp), %xmm0 vmovdqa %xmm0, 0x4230(%rsp) vmovdqa 0x41d0(%rsp), %xmm0 vmovdqa %xmm0, 0x4220(%rsp) vmovaps 0x4240(%rsp), %ymm0 vmovaps %ymm0, 0x41a0(%rsp) vmovdqa 0x41a0(%rsp), %xmm0 vmovdqa %xmm0, 0x4210(%rsp) vmovdqa 0x41b0(%rsp), %xmm0 vmovdqa %xmm0, 0x4200(%rsp) vmovdqa 0x4230(%rsp), %xmm2 vmovdqa 0x4210(%rsp), %xmm0 vmovdqa %xmm2, 0x4450(%rsp) vmovdqa %xmm0, 0x4440(%rsp) vmovdqa 0x4450(%rsp), %xmm0 vmovdqa 0x4440(%rsp), %xmm2 vpaddd %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x4230(%rsp) vmovdqa 0x4220(%rsp), %xmm2 vmovdqa 0x4200(%rsp), %xmm0 vmovdqa %xmm2, 0x4430(%rsp) vmovdqa %xmm0, 0x4420(%rsp) vmovdqa 0x4430(%rsp), %xmm0 vmovdqa 0x4420(%rsp), %xmm2 vpaddd %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x4220(%rsp) vmovdqa 0x4230(%rsp), %xmm0 vmovdqa %xmm0, 0x4180(%rsp) vmovdqa 0x4220(%rsp), %xmm0 vmovdqa %xmm0, 0x4190(%rsp) vmovaps 0x4180(%rsp), %ymm0 vmovaps %ymm0, 0x41e0(%rsp) vmovaps 0x41e0(%rsp), %ymm0 vmovaps %ymm0, 0x3900(%rsp) vmovaps 0x3900(%rsp), %ymm0 vmovaps %ymm0, 0x4320(%rsp) movl $0x17, 0x431c(%rsp) vmovaps 0x4320(%rsp), %ymm0 vmovaps %ymm0, 0x42a0(%rsp) vmovdqa 0x42a0(%rsp), %xmm0 vmovdqa %xmm0, 0x4300(%rsp) vmovdqa 0x42b0(%rsp), %xmm0 vmovdqa %xmm0, 0x42f0(%rsp) vmovdqa 0x4300(%rsp), %xmm0 movl 0x431c(%rsp), %eax vmovdqa %xmm0, 0x4490(%rsp) movl %eax, 0x448c(%rsp) vmovdqa 0x4490(%rsp), %xmm0 vmovd 0x448c(%rsp), %xmm2 vpslld %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x4300(%rsp) vmovdqa 0x42f0(%rsp), %xmm0 movl 0x431c(%rsp), %eax vmovdqa %xmm0, 0x4470(%rsp) movl %eax, 0x446c(%rsp) vmovdqa 0x4470(%rsp), %xmm0 vmovd 0x446c(%rsp), %xmm2 vpslld %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x42f0(%rsp) vmovdqa 0x4300(%rsp), %xmm0 vmovdqa %xmm0, 0x4280(%rsp) vmovdqa 0x42f0(%rsp), %xmm0 vmovdqa %xmm0, 0x4290(%rsp) vmovaps 0x4280(%rsp), %ymm0 vmovaps %ymm0, 0x42c0(%rsp) vmovaps 0x42c0(%rsp), %ymm0 vmovaps %ymm0, 0x3900(%rsp) vmovaps 0x3900(%rsp), %ymm0 vmovaps %ymm0, 0x4340(%rsp) vmovaps 0x4340(%rsp), %ymm0 vmovaps %ymm0, 0x3880(%rsp) vmovaps 0x38a0(%rsp), %ymm2 vmovaps 0x3880(%rsp), %ymm0 vmovaps %ymm2, 0x3f60(%rsp) vmovaps %ymm0, 0x3f40(%rsp) vmovaps 0x3f60(%rsp), %ymm0 vmovaps 0x3f40(%rsp), %ymm2 vmulps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x38a0(%rsp) vmovaps 0x38a0(%rsp), %ymm0 vmovaps %ymm1, 0x37c0(%rsp) vmovaps %ymm0, 0x37a0(%rsp) vmovaps 0x37c0(%rsp), %ymm0 vmovaps 0x37a0(%rsp), %ymm2 vaddps %ymm2, %ymm0, %ymm0 vmovaps %ymm1, 0x3860(%rsp) vmovaps %ymm0, 0x3840(%rsp) vmovaps 0x3860(%rsp), %ymm4 vmovaps 0x3840(%rsp), %ymm3 vrcpps %ymm3, %ymm0 vmulps %ymm0, %ymm4, %ymm1 vmovaps %ymm1, %ymm2 vfmsub213ps %ymm4, %ymm3, %ymm2 # ymm2 = (ymm3 * ymm2) - ymm4 vfnmadd213ps %ymm1, %ymm0, %ymm2 # ymm2 = -(ymm0 * ymm2) + ymm1 vmovaps 0x35c0(%rsp), %ymm1 vmovaps 0x35e0(%rsp), %ymm0 vmovaps %ymm2, 0x5160(%rsp) vmovaps %ymm1, 0x5140(%rsp) vmovaps %ymm0, 0x5120(%rsp) vmovaps 0x5160(%rsp), %ymm1 vmovaps 0x5140(%rsp), %ymm0 vmovaps 0x5120(%rsp), %ymm2 vmovaps 0x18f8a57(%rip), %ymm3 # 0x1e06bc0 vxorps %ymm3, %ymm2, %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0xa0(%rsp) vmovaps 0xa0(%rsp), %ymm0 vmovaps 0x1c0(%rsp), %ymm1 vmovaps %ymm1, 0x1a60(%rsp) vmovaps %ymm0, 0x1a40(%rsp) vmovaps 0x1a60(%rsp), %ymm0 vmulps 0x1a40(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x60(%rsp) vmovaps 0x60(%rsp), %ymm0 vmovaps %ymm0, 0x1400(%rsp) jmp 0x50e6c2 movq 0x13d0(%rsp), %rax movq %rax, 0x1438(%rsp) movq $0x0, 0x1430(%rsp) movq 0x1438(%rsp), %rax movq (%rax), %rax movq 0x1430(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x1474(%rsp) vmovss 0x1474(%rsp), %xmm0 vmovss %xmm0, 0x34(%rsp) vmovss %xmm0, 0x153c(%rsp) vmovss %xmm0, 0x1538(%rsp) vmovss %xmm0, 0x1534(%rsp) vmovss %xmm0, 0x1530(%rsp) vmovss %xmm0, 0x152c(%rsp) vmovss %xmm0, 0x1528(%rsp) vmovss %xmm0, 0x1524(%rsp) vmovss %xmm0, 0x1520(%rsp) vmovss 0x1524(%rsp), %xmm1 vmovss 0x1520(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x1528(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x152c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0] vmovss 0x1534(%rsp), %xmm2 vmovss 0x1530(%rsp), %xmm1 vinsertps $0x10, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0],xmm1[2,3] vmovss 0x1538(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm2[0],xmm1[3] vmovss 0x153c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0] vmovaps %xmm1, 0x1510(%rsp) vmovaps %xmm0, 0x1500(%rsp) vmovaps 0x1500(%rsp), %ymm0 vmovaps %ymm0, 0x1360(%rsp) movq 0x13d0(%rsp), %rax movq %rax, 0x1428(%rsp) movq $0x1, 0x1420(%rsp) movq 0x1428(%rsp), %rax movq (%rax), %rax movq 0x1420(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x1470(%rsp) vmovss 0x1470(%rsp), %xmm0 vmovss %xmm0, 0x38(%rsp) vmovss %xmm0, 0x157c(%rsp) vmovss %xmm0, 0x1578(%rsp) vmovss %xmm0, 0x1574(%rsp) vmovss %xmm0, 0x1570(%rsp) vmovss %xmm0, 0x156c(%rsp) vmovss %xmm0, 0x1568(%rsp) vmovss %xmm0, 0x1564(%rsp) vmovss %xmm0, 0x1560(%rsp) vmovss 0x1564(%rsp), %xmm1 vmovss 0x1560(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x1568(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x156c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0] vmovss 0x1574(%rsp), %xmm2 vmovss 0x1570(%rsp), %xmm1 vinsertps $0x10, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0],xmm1[2,3] vmovss 0x1578(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm2[0],xmm1[3] vmovss 0x157c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0] vmovaps %xmm1, 0x1550(%rsp) vmovaps %xmm0, 0x1540(%rsp) vmovaps 0x1540(%rsp), %ymm0 vmovaps %ymm0, 0x1340(%rsp) vmovaps 0x13e0(%rsp), %ymm2 vmovaps 0x1360(%rsp), %ymm1 vmovaps 0x1340(%rsp), %ymm0 vmovaps %ymm2, 0x1b20(%rsp) vmovaps %ymm1, 0x1b00(%rsp) vmovaps %ymm0, 0x1ae0(%rsp) movl $0x3f800000, 0x1c1c(%rsp) # imm = 0x3F800000 vmovss 0x1c1c(%rsp), %xmm0 vmovss %xmm0, 0x3c(%rsp) vmovss %xmm0, 0x1c9c(%rsp) vmovss %xmm0, 0x1c98(%rsp) vmovss %xmm0, 0x1c94(%rsp) vmovss %xmm0, 0x1c90(%rsp) vmovss %xmm0, 0x1c8c(%rsp) vmovss %xmm0, 0x1c88(%rsp) vmovss %xmm0, 0x1c84(%rsp) vmovss %xmm0, 0x1c80(%rsp) vmovss 0x1c84(%rsp), %xmm1 vmovss 0x1c80(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x1c88(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x1c8c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0] vmovss 0x1c94(%rsp), %xmm2 vmovss 0x1c90(%rsp), %xmm1 vinsertps $0x10, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0],xmm1[2,3] vmovss 0x1c98(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm2[0],xmm1[3] vmovss 0x1c9c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0] vmovaps %xmm1, 0x1c70(%rsp) vmovaps %xmm0, 0x1c60(%rsp) vmovaps 0x1c60(%rsp), %ymm0 vmovaps %ymm0, 0x1ac0(%rsp) vmovaps 0x1b20(%rsp), %ymm2 vmovaps 0x1b00(%rsp), %ymm1 vmovaps 0x1ae0(%rsp), %ymm0 vmovaps %ymm2, 0x1b80(%rsp) vmovaps %ymm1, 0x1b60(%rsp) vmovaps %ymm0, 0x1b40(%rsp) vmovaps 0x1b80(%rsp), %ymm2 vmovaps 0x1b60(%rsp), %ymm1 vmovaps 0x1b40(%rsp), %ymm0 vmovaps %ymm2, 0x1be0(%rsp) vmovaps %ymm1, 0x1bc0(%rsp) vmovaps %ymm0, 0x1ba0(%rsp) vmovaps 0x1be0(%rsp), %ymm1 vmovaps 0x1bc0(%rsp), %ymm0 vmovaps 0x1ba0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1ae0(%rsp) vmovaps 0x1ae0(%rsp), %ymm1 vxorps %xmm0, %xmm0, %xmm0 vmovaps %ymm0, 0x1ca0(%rsp) vmovaps 0x1ca0(%rsp), %ymm0 vmovaps %ymm1, 0x1ce0(%rsp) vmovaps %ymm0, 0x1cc0(%rsp) vmovaps 0x1ce0(%rsp), %ymm0 vmovaps 0x1cc0(%rsp), %ymm1 vmaxps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1ae0(%rsp) vmovaps 0x1ae0(%rsp), %ymm1 vmovaps 0x1ac0(%rsp), %ymm0 vmovaps %ymm1, 0x1d20(%rsp) vmovaps %ymm0, 0x1d00(%rsp) vmovaps 0x1d20(%rsp), %ymm0 vmovaps 0x1d00(%rsp), %ymm1 vminps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1ae0(%rsp) vmovaps 0x1ae0(%rsp), %ymm1 vmovaps 0x1b20(%rsp), %ymm0 vmovaps %ymm1, 0x1c40(%rsp) vmovaps %ymm0, 0x1c20(%rsp) vmovaps 0x1c40(%rsp), %ymm0 vmulps 0x1c20(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x40(%rsp) vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm0, 0x1400(%rsp) jmp 0x50e6c2 vmovaps 0x13e0(%rsp), %ymm0 vmovaps %ymm0, 0x1400(%rsp) vmovaps 0x1400(%rsp), %ymm0 vmovaps %ymm0, (%rsp) vmovaps (%rsp), %ymm0 vmovaps %ymm0, 0x7e0(%rsp) movq 0x868(%rsp), %rax movl 0x818(%rsp), %ecx shll $0x3, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax vmovaps 0x7e0(%rsp), %ymm0 movq %rax, 0xb10(%rsp) vmovaps %ymm0, 0xae0(%rsp) vmovaps 0xae0(%rsp), %ymm0 movq 0xb10(%rsp), %rax vmovaps %ymm0, (%rax) movl 0x818(%rsp), %eax addl $0x1, %eax movl %eax, 0x818(%rsp) jmp 0x50943e movl 0x8dc(%rsp), %ecx shll $0x3, %ecx movq 0x868(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x868(%rsp) movl 0x81c(%rsp), %eax addl $0x1, %eax movl %eax, 0x81c(%rsp) jmp 0x50941f jmp 0x50e77b movl 0x874(%rsp), %eax addl $0x1, %eax movl %eax, 0x874(%rsp) jmp 0x508f9f leaq 0x8b8(%rsp), %rdi vzeroupper callq 0x998a0 movq %rbp, %rsp popq %rbp retq movq 0x8a8(%rsp), %rdi callq 0x5e3b0 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/convolution_pack8.h
ncnn::convolution_im2col_sgemm_pack4to8_avx(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Mat const&, int, int, int, int, int, int, ncnn::Option const&)
static void convolution_im2col_sgemm_pack4to8_avx(const Mat& bottom_blob, Mat& top_blob, const Mat& kernel, const Mat& _bias, int kernel_w, int kernel_h, int dilation_w, int dilation_h, int stride_w, int stride_h, const Option& opt) { int w = bottom_blob.w; int inch = bottom_blob.c; int outw = top_blob.w; int outh = top_blob.h; const int size = outw * outh; const int maxk = kernel_w * kernel_h; // im2col Mat bottom_im2col(size, maxk, inch, 16u, 4, opt.workspace_allocator); { const int gap = (w * stride_h - outw * stride_w) * 4; #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < inch; p++) { const Mat img = bottom_blob.channel(p); float* ptr = bottom_im2col.channel(p); for (int u = 0; u < kernel_h; u++) { for (int v = 0; v < kernel_w; v++) { const float* sptr = img.row(dilation_h * u) + dilation_w * v * 4; for (int i = 0; i < outh; i++) { int j = 0; for (; j < outw; j++) { __m128 _val = _mm_load_ps(sptr); _mm_store_ps(ptr, _val); sptr += stride_w * 4; ptr += 4; } sptr += gap; } } } } } im2col_sgemm_pack4to8_avx(bottom_im2col, top_blob, kernel, _bias, opt); }
subq $0x408, %rsp # imm = 0x408 movq 0x430(%rsp), %rax movl 0x428(%rsp), %eax movl 0x420(%rsp), %eax movl 0x418(%rsp), %eax movl 0x410(%rsp), %eax movq %rdi, 0x1d0(%rsp) movq %rsi, 0x1c8(%rsp) movq %rdx, 0x1c0(%rsp) movq %rcx, 0x1b8(%rsp) movl %r8d, 0x1b4(%rsp) movl %r9d, 0x1b0(%rsp) movq 0x1d0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1ac(%rsp) movq 0x1d0(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x1a8(%rsp) movq 0x1c8(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1a4(%rsp) movq 0x1c8(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x1a0(%rsp) movl 0x1a4(%rsp), %eax imull 0x1a0(%rsp), %eax movl %eax, 0x19c(%rsp) movl 0x1b4(%rsp), %eax imull 0x1b0(%rsp), %eax movl %eax, 0x198(%rsp) movl 0x19c(%rsp), %esi movl 0x198(%rsp), %edx movl 0x1a8(%rsp), %ecx movq 0x430(%rsp), %rax movq 0x10(%rax), %rax leaq 0x150(%rsp), %rdi movq %rdi, 0x348(%rsp) movl %esi, 0x344(%rsp) movl %edx, 0x340(%rsp) movl %ecx, 0x33c(%rsp) movq $0x10, 0x330(%rsp) movl $0x4, 0x32c(%rsp) movq %rax, 0x320(%rsp) movq 0x348(%rsp), %rdi movq %rdi, 0x68(%rsp) movq $0x0, (%rdi) movq $0x0, 0x8(%rdi) movq $0x0, 0x10(%rdi) movl $0x0, 0x18(%rdi) movq $0x0, 0x20(%rdi) movl $0x0, 0x28(%rdi) movl $0x0, 0x2c(%rdi) movl $0x0, 0x30(%rdi) movl $0x0, 0x34(%rdi) movl $0x0, 0x38(%rdi) movq $0x0, 0x40(%rdi) movl 0x344(%rsp), %esi movl 0x340(%rsp), %edx movl 0x33c(%rsp), %ecx movq 0x330(%rsp), %r8 movl 0x32c(%rsp), %r9d movq 0x320(%rsp), %rax movq %rax, (%rsp) callq 0x62060 movl 0x1ac(%rsp), %eax imull 0x428(%rsp), %eax movl 0x1a4(%rsp), %ecx imull 0x420(%rsp), %ecx subl %ecx, %eax shll $0x2, %eax movl %eax, 0x14c(%rsp) movl $0x0, 0x148(%rsp) movl 0x148(%rsp), %eax cmpl 0x1a8(%rsp), %eax jge 0x53d8db movq 0x1d0(%rsp), %rcx movl 0x148(%rsp), %eax leaq 0x100(%rsp), %rdx movq %rdx, 0x2c0(%rsp) movq %rcx, 0x2b8(%rsp) movl %eax, 0x2b4(%rsp) movq 0x2b8(%rsp), %rax movq %rax, 0x58(%rsp) movb $0x0, 0x2b3(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x2b4(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x100(%rsp), %r10 movq %r10, 0x3a8(%rsp) movl %r9d, 0x3a4(%rsp) movl %r8d, 0x3a0(%rsp) movl %edi, 0x39c(%rsp) movq %rsi, 0x390(%rsp) movq %rdx, 0x388(%rsp) movl %ecx, 0x384(%rsp) movq %rax, 0x378(%rsp) movq 0x3a8(%rsp), %rcx movq %rcx, 0x60(%rsp) movq 0x390(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x388(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x384(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x378(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3a4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3a0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x39c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x400(%rsp) movl $0x10, 0x3fc(%rsp) movq 0x400(%rsp), %rax movslq 0x3fc(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x3fc(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x60(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x58(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x128(%rsp) cmpl $0x4, 0x28(%rax) jne 0x53d056 movq 0x58(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x140(%rsp) movb $0x1, 0x2b3(%rsp) testb $0x1, 0x2b3(%rsp) jne 0x53d17d leaq 0x100(%rsp), %rax movq %rax, 0x2c8(%rsp) movq 0x2c8(%rsp), %rax movq %rax, 0x2d8(%rsp) movq 0x2d8(%rsp), %rax movq %rax, 0x50(%rsp) cmpq $0x0, 0x8(%rax) je 0x53d125 movq 0x50(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2d4(%rsp) # imm = 0xFFFFFFFF movl 0x2d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2d0(%rsp) cmpl $0x1, 0x2d0(%rsp) jne 0x53d125 movq 0x50(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x53d0f9 movq 0x50(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x53d0f7 jmp 0x53d123 movq 0x50(%rsp), %rax movq (%rax), %rax movq %rax, 0x2e0(%rsp) cmpq $0x0, 0x2e0(%rsp) je 0x53d121 movq 0x2e0(%rsp), %rdi callq 0x5e480 jmp 0x53d123 jmp 0x53d125 movq 0x50(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x53d17d movq %rax, %rdi callq 0x5fc90 jmp 0x53d17f movl 0x148(%rsp), %eax leaq 0xa0(%rsp), %rcx movq %rcx, 0x278(%rsp) leaq 0x150(%rsp), %rcx movq %rcx, 0x270(%rsp) movl %eax, 0x26c(%rsp) movq 0x270(%rsp), %rax movq %rax, 0x40(%rsp) movb $0x0, 0x26b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x26c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xa0(%rsp), %r10 movq %r10, 0x3e0(%rsp) movl %r9d, 0x3dc(%rsp) movl %r8d, 0x3d8(%rsp) movl %edi, 0x3d4(%rsp) movq %rsi, 0x3c8(%rsp) movq %rdx, 0x3c0(%rsp) movl %ecx, 0x3bc(%rsp) movq %rax, 0x3b0(%rsp) movq 0x3e0(%rsp), %rcx movq %rcx, 0x48(%rsp) movq 0x3c8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x3c0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x3bc(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3b0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3dc(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3d8(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3d4(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x3f0(%rsp) movl $0x10, 0x3ec(%rsp) movq 0x3f0(%rsp), %rax movslq 0x3ec(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x3ec(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x48(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x40(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xc8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x53d334 movq 0x40(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xe0(%rsp) movb $0x1, 0x26b(%rsp) testb $0x1, 0x26b(%rsp) jne 0x53d45b leaq 0xa0(%rsp), %rax movq %rax, 0x280(%rsp) movq 0x280(%rsp), %rax movq %rax, 0x290(%rsp) movq 0x290(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x53d403 movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x28c(%rsp) # imm = 0xFFFFFFFF movl 0x28c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x288(%rsp) cmpl $0x1, 0x288(%rsp) jne 0x53d403 movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x53d3d7 movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x53d3d5 jmp 0x53d401 movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x2e8(%rsp) cmpq $0x0, 0x2e8(%rsp) je 0x53d3ff movq 0x2e8(%rsp), %rdi callq 0x5e480 jmp 0x53d401 jmp 0x53d403 movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x53d45b movq %rax, %rdi callq 0x5fc90 jmp 0x53d45d leaq 0xa0(%rsp), %rax movq %rax, 0x298(%rsp) movq 0x298(%rsp), %rax movq (%rax), %rax movq %rax, 0x30(%rsp) leaq 0xa0(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x260(%rsp) movq 0x260(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x53d536 movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x25c(%rsp) # imm = 0xFFFFFFFF movl 0x25c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x258(%rsp) cmpl $0x1, 0x258(%rsp) jne 0x53d536 movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x53d50a movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x53d508 jmp 0x53d534 movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0x2f0(%rsp) cmpq $0x0, 0x2f0(%rsp) je 0x53d532 movq 0x2f0(%rsp), %rdi callq 0x5e480 jmp 0x53d534 jmp 0x53d536 movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x53d58e movq %rax, %rdi callq 0x5fc90 movq 0x30(%rsp), %rax movq %rax, 0xe8(%rsp) movl $0x0, 0x9c(%rsp) movl 0x9c(%rsp), %eax cmpl 0x1b0(%rsp), %eax jge 0x53d7b2 movl $0x0, 0x98(%rsp) movl 0x98(%rsp), %eax cmpl 0x1b4(%rsp), %eax jge 0x53d79a movl 0x418(%rsp), %eax imull 0x9c(%rsp), %eax leaq 0x100(%rsp), %rcx movq %rcx, 0x2a8(%rsp) movl %eax, 0x2a4(%rsp) movq 0x2a8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x2a4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x20(%rsp) movq 0x20(%rsp), %rax movl 0x410(%rsp), %ecx imull 0x98(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x90(%rsp) movl $0x0, 0x8c(%rsp) movl 0x8c(%rsp), %eax cmpl 0x1a0(%rsp), %eax jge 0x53d782 movl $0x0, 0x88(%rsp) movl 0x88(%rsp), %eax cmpl 0x1a4(%rsp), %eax jge 0x53d74b movq 0x90(%rsp), %rax movq %rax, 0x370(%rsp) movq 0x370(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x70(%rsp) movq 0xe8(%rsp), %rax vmovaps 0x70(%rsp), %xmm0 movq %rax, 0x368(%rsp) vmovaps %xmm0, 0x350(%rsp) vmovaps 0x350(%rsp), %xmm0 movq 0x368(%rsp), %rax vmovaps %xmm0, (%rax) movl 0x420(%rsp), %ecx shll $0x2, %ecx movq 0x90(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x90(%rsp) movq 0xe8(%rsp), %rax addq $0x10, %rax movq %rax, 0xe8(%rsp) movl 0x88(%rsp), %eax addl $0x1, %eax movl %eax, 0x88(%rsp) jmp 0x53d67a movq %rax, %rcx movl %edx, %eax movq %rcx, 0xf8(%rsp) movl %eax, 0xf4(%rsp) jmp 0x53da23 movl 0x14c(%rsp), %ecx movq 0x90(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x90(%rsp) movl 0x8c(%rsp), %eax addl $0x1, %eax movl %eax, 0x8c(%rsp) jmp 0x53d65b jmp 0x53d784 movl 0x98(%rsp), %eax addl $0x1, %eax movl %eax, 0x98(%rsp) jmp 0x53d5c5 jmp 0x53d79c movl 0x9c(%rsp), %eax addl $0x1, %eax movl %eax, 0x9c(%rsp) jmp 0x53d5a6 leaq 0x100(%rsp), %rax movq %rax, 0x1e8(%rsp) movq 0x1e8(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x53d86b movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x23c(%rsp) # imm = 0xFFFFFFFF movl 0x23c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x238(%rsp) cmpl $0x1, 0x238(%rsp) jne 0x53d86b movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x53d83f movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x53d83d jmp 0x53d869 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x300(%rsp) cmpq $0x0, 0x300(%rsp) je 0x53d867 movq 0x300(%rsp), %rdi callq 0x5e480 jmp 0x53d869 jmp 0x53d86b movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x53d8c3 movq %rax, %rdi callq 0x5fc90 jmp 0x53d8c5 movl 0x148(%rsp), %eax addl $0x1, %eax movl %eax, 0x148(%rsp) jmp 0x53ce8d movq 0x1c8(%rsp), %rsi movq 0x1c0(%rsp), %rdx movq 0x1b8(%rsp), %rcx movq 0x430(%rsp), %r8 leaq 0x150(%rsp), %rdi callq 0x5f68e0 jmp 0x53d90a leaq 0x150(%rsp), %rax movq %rax, 0x1f8(%rsp) movq 0x1f8(%rsp), %rax movq %rax, 0x220(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x53d9c3 movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x21c(%rsp) # imm = 0xFFFFFFFF movl 0x21c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x218(%rsp) cmpl $0x1, 0x218(%rsp) jne 0x53d9c3 movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x53d997 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x53d995 jmp 0x53d9c1 movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x310(%rsp) cmpq $0x0, 0x310(%rsp) je 0x53d9bf movq 0x310(%rsp), %rdi callq 0x5e480 jmp 0x53d9c1 jmp 0x53d9c3 movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x53da1b movq %rax, %rdi callq 0x5fc90 addq $0x408, %rsp # imm = 0x408 retq leaq 0x150(%rsp), %rax movq %rax, 0x200(%rsp) movq 0x200(%rsp), %rax movq %rax, 0x210(%rsp) movq 0x210(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x53dadc movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x20c(%rsp) # imm = 0xFFFFFFFF movl 0x20c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x208(%rsp) cmpl $0x1, 0x208(%rsp) jne 0x53dadc movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x53dab0 movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x53daae jmp 0x53dada movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x318(%rsp) cmpq $0x0, 0x318(%rsp) je 0x53dad8 movq 0x318(%rsp), %rdi callq 0x5e480 jmp 0x53dada jmp 0x53dadc movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x53db34 movq %rax, %rdi callq 0x5fc90 jmp 0x53db36 movq 0xf8(%rsp), %rdi callq 0x5e3b0 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/convolution_sgemm_pack4to8.h
ncnn::conv1x1s2_sgemm_pack8to4_avx(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Mat const&, ncnn::Option const&)
static void conv1x1s2_sgemm_pack8to4_avx(const Mat& bottom_blob, Mat& top_blob, const Mat& kernel, const Mat& _bias, const Option& opt) { int w = bottom_blob.w; int channels = bottom_blob.c; size_t elemsize = bottom_blob.elemsize; int elempack = bottom_blob.elempack; int outw = top_blob.w; int outh = top_blob.h; const int tailstep = (w - 2 * outw + w) * 8; Mat bottom_blob_shrinked; bottom_blob_shrinked.create(outw, outh, channels, elemsize, elempack, opt.workspace_allocator); #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < channels; p++) { const float* r0 = bottom_blob.channel(p); float* outptr = bottom_blob_shrinked.channel(p); for (int i = 0; i < outh; i++) { int j = 0; for (; j < outw; j++) { __m256 _v = _mm256_load_ps(r0); _mm256_store_ps(outptr, _v); r0 += 16; outptr += 8; } r0 += tailstep; } } conv1x1s1_sgemm_pack8to4_avx(bottom_blob_shrinked, top_blob, kernel, _bias, opt); }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x420, %rsp # imm = 0x420 movq %rdi, 0x1e8(%rsp) movq %rsi, 0x1e0(%rsp) movq %rdx, 0x1d8(%rsp) movq %rcx, 0x1d0(%rsp) movq %r8, 0x1c8(%rsp) movq 0x1e8(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1c4(%rsp) movq 0x1e8(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x1c0(%rsp) movq 0x1e8(%rsp), %rax movq 0x10(%rax), %rax movq %rax, 0x1b8(%rsp) movq 0x1e8(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x1b4(%rsp) movq 0x1e0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1b0(%rsp) movq 0x1e0(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x1ac(%rsp) movl 0x1c4(%rsp), %ecx movl 0x1b0(%rsp), %edx addl %edx, %edx movl %ecx, %eax subl %edx, %eax addl %ecx, %eax shll $0x3, %eax movl %eax, 0x1a8(%rsp) leaq 0x160(%rsp), %rdi movq %rdi, 0x1f0(%rsp) movq 0x1f0(%rsp), %rax movq %rax, 0x78(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movl 0x1b0(%rsp), %esi movl 0x1ac(%rsp), %edx movl 0x1c0(%rsp), %ecx movq 0x1b8(%rsp), %r8 movl 0x1b4(%rsp), %r9d movq 0x1c8(%rsp), %rax movq 0x10(%rax), %r10 movq %rsp, %rax movq %r10, (%rax) callq 0x62060 jmp 0x550ea1 movl $0x0, 0x150(%rsp) movl 0x150(%rsp), %eax cmpl 0x1c0(%rsp), %eax jge 0x551850 movq 0x1e8(%rsp), %rcx movl 0x150(%rsp), %eax leaq 0x100(%rsp), %rdx movq %rdx, 0x2d8(%rsp) movq %rcx, 0x2d0(%rsp) movl %eax, 0x2cc(%rsp) movq 0x2d0(%rsp), %rax movq %rax, 0x68(%rsp) movb $0x0, 0x2cb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x2cc(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x100(%rsp), %r10 movq %r10, 0x3b0(%rsp) movl %r9d, 0x3ac(%rsp) movl %r8d, 0x3a8(%rsp) movl %edi, 0x3a4(%rsp) movq %rsi, 0x398(%rsp) movq %rdx, 0x390(%rsp) movl %ecx, 0x38c(%rsp) movq %rax, 0x380(%rsp) movq 0x3b0(%rsp), %rcx movq %rcx, 0x70(%rsp) movq 0x398(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x390(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x38c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x380(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3ac(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3a8(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3a4(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x408(%rsp) movl $0x10, 0x404(%rsp) movq 0x408(%rsp), %rax movslq 0x404(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x404(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x70(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x68(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x128(%rsp) cmpl $0x4, 0x28(%rax) jne 0x551075 movq 0x68(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x140(%rsp) movb $0x1, 0x2cb(%rsp) testb $0x1, 0x2cb(%rsp) jne 0x5511a2 leaq 0x100(%rsp), %rax movq %rax, 0x2e0(%rsp) movq 0x2e0(%rsp), %rax movq %rax, 0x2f0(%rsp) movq 0x2f0(%rsp), %rax movq %rax, 0x60(%rsp) cmpq $0x0, 0x8(%rax) je 0x55114a movq 0x60(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2ec(%rsp) # imm = 0xFFFFFFFF movl 0x2ec(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2e8(%rsp) cmpl $0x1, 0x2e8(%rsp) jne 0x55114a movq 0x60(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x55111b movq 0x60(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x551119 jmp 0x551148 movq 0x60(%rsp), %rax movq (%rax), %rax movq %rax, 0x2f8(%rsp) cmpq $0x0, 0x2f8(%rsp) je 0x551146 movq 0x2f8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x551148 jmp 0x55114a movq 0x60(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x5511a2 movq %rax, %rdi callq 0x5fc90 jmp 0x5511a4 leaq 0x100(%rsp), %rax movq %rax, 0x2c0(%rsp) movq 0x2c0(%rsp), %rax movq (%rax), %rax movq %rax, 0x58(%rsp) leaq 0x100(%rsp), %rax movq %rax, 0x1f8(%rsp) movq 0x1f8(%rsp), %rax movq %rax, 0x280(%rsp) movq 0x280(%rsp), %rax movq %rax, 0x50(%rsp) cmpq $0x0, 0x8(%rax) je 0x551283 movq 0x50(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x27c(%rsp) # imm = 0xFFFFFFFF movl 0x27c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x278(%rsp) cmpl $0x1, 0x278(%rsp) jne 0x551283 movq 0x50(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x551254 movq 0x50(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x551252 jmp 0x551281 movq 0x50(%rsp), %rax movq (%rax), %rax movq %rax, 0x308(%rsp) cmpq $0x0, 0x308(%rsp) je 0x55127f movq 0x308(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x551281 jmp 0x551283 movq 0x50(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x5512db movq %rax, %rdi callq 0x5fc90 movq 0x58(%rsp), %rax movq %rax, 0x148(%rsp) movl 0x150(%rsp), %eax leaq 0xb0(%rsp), %rcx movq %rcx, 0x298(%rsp) leaq 0x160(%rsp), %rcx movq %rcx, 0x290(%rsp) movl %eax, 0x28c(%rsp) movq 0x290(%rsp), %rax movq %rax, 0x40(%rsp) movb $0x0, 0x28b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x28c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xb0(%rsp), %r10 movq %r10, 0x3e8(%rsp) movl %r9d, 0x3e4(%rsp) movl %r8d, 0x3e0(%rsp) movl %edi, 0x3dc(%rsp) movq %rsi, 0x3d0(%rsp) movq %rdx, 0x3c8(%rsp) movl %ecx, 0x3c4(%rsp) movq %rax, 0x3b8(%rsp) movq 0x3e8(%rsp), %rcx movq %rcx, 0x48(%rsp) movq 0x3d0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x3c8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x3c4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3b8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3e4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3e0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3dc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x3f8(%rsp) movl $0x10, 0x3f4(%rsp) movq 0x3f8(%rsp), %rax movslq 0x3f4(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x3f4(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x48(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x40(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xd8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x55149d movq 0x40(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xf0(%rsp) movb $0x1, 0x28b(%rsp) testb $0x1, 0x28b(%rsp) jne 0x5515ca leaq 0xb0(%rsp), %rax movq %rax, 0x2a0(%rsp) movq 0x2a0(%rsp), %rax movq %rax, 0x2b0(%rsp) movq 0x2b0(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x551572 movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2ac(%rsp) # imm = 0xFFFFFFFF movl 0x2ac(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2a8(%rsp) cmpl $0x1, 0x2a8(%rsp) jne 0x551572 movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x551543 movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x551541 jmp 0x551570 movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x300(%rsp) cmpq $0x0, 0x300(%rsp) je 0x55156e movq 0x300(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x551570 jmp 0x551572 movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x5515ca movq %rax, %rdi callq 0x5fc90 jmp 0x5515cc leaq 0xb0(%rsp), %rax movq %rax, 0x2b8(%rsp) movq 0x2b8(%rsp), %rax movq (%rax), %rax movq %rax, 0x30(%rsp) leaq 0xb0(%rsp), %rax movq %rax, 0x208(%rsp) movq 0x208(%rsp), %rax movq %rax, 0x260(%rsp) movq 0x260(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x5516ab movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x25c(%rsp) # imm = 0xFFFFFFFF movl 0x25c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x258(%rsp) cmpl $0x1, 0x258(%rsp) jne 0x5516ab movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x55167c movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x55167a jmp 0x5516a9 movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0x318(%rsp) cmpq $0x0, 0x318(%rsp) je 0x5516a7 movq 0x318(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x5516a9 jmp 0x5516ab movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x551703 movq %rax, %rdi callq 0x5fc90 movq 0x30(%rsp), %rax movq %rax, 0xf8(%rsp) movl $0x0, 0xac(%rsp) movl 0xac(%rsp), %eax cmpl 0x1ac(%rsp), %eax jge 0x551838 movl $0x0, 0xa8(%rsp) movl 0xa8(%rsp), %eax cmpl 0x1b0(%rsp), %eax jge 0x551801 movq 0x148(%rsp), %rax movq %rax, 0x338(%rsp) movq 0x338(%rsp), %rax vmovaps (%rax), %ymm0 vmovaps %ymm0, 0x80(%rsp) movq 0xf8(%rsp), %rax vmovaps 0x80(%rsp), %ymm0 movq %rax, 0x378(%rsp) vmovaps %ymm0, 0x340(%rsp) vmovaps 0x340(%rsp), %ymm0 movq 0x378(%rsp), %rax vmovaps %ymm0, (%rax) movq 0x148(%rsp), %rax addq $0x40, %rax movq %rax, 0x148(%rsp) movq 0xf8(%rsp), %rax addq $0x20, %rax movq %rax, 0xf8(%rsp) movl 0xa8(%rsp), %eax addl $0x1, %eax movl %eax, 0xa8(%rsp) jmp 0x55173a movq %rax, %rcx movl %edx, %eax movq %rcx, 0x158(%rsp) movl %eax, 0x154(%rsp) jmp 0x551998 movl 0x1a8(%rsp), %ecx movq 0x148(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x148(%rsp) movl 0xac(%rsp), %eax addl $0x1, %eax movl %eax, 0xac(%rsp) jmp 0x55171b jmp 0x55183a movl 0x150(%rsp), %eax addl $0x1, %eax movl %eax, 0x150(%rsp) jmp 0x550eac movq 0x1e0(%rsp), %rsi movq 0x1d8(%rsp), %rdx movq 0x1d0(%rsp), %rcx movq 0x1c8(%rsp), %r8 leaq 0x160(%rsp), %rdi vzeroupper callq 0x550930 jmp 0x551882 leaq 0x160(%rsp), %rax movq %rax, 0x218(%rsp) movq 0x218(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x55193b movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x23c(%rsp) # imm = 0xFFFFFFFF movl 0x23c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x238(%rsp) cmpl $0x1, 0x238(%rsp) jne 0x55193b movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x55190f movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x55190d jmp 0x551939 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x328(%rsp) cmpq $0x0, 0x328(%rsp) je 0x551937 movq 0x328(%rsp), %rdi callq 0x5e480 jmp 0x551939 jmp 0x55193b movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x551993 movq %rax, %rdi callq 0x5fc90 movq %rbp, %rsp popq %rbp retq leaq 0x160(%rsp), %rax movq %rax, 0x220(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x551a51 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x22c(%rsp) # imm = 0xFFFFFFFF movl 0x22c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x228(%rsp) cmpl $0x1, 0x228(%rsp) jne 0x551a51 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x551a25 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x551a23 jmp 0x551a4f movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x330(%rsp) cmpq $0x0, 0x330(%rsp) je 0x551a4d movq 0x330(%rsp), %rdi callq 0x5e480 jmp 0x551a4f jmp 0x551a51 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x551aa9 movq %rax, %rdi callq 0x5fc90 jmp 0x551aab movq 0x158(%rsp), %rdi callq 0x5e3b0 nopl (%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/convolution_1x1_pack8to4.h
ncnn::convolution_pack8to4_avx(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Mat const&, int, int, int, int, int, int, int, ncnn::Mat const&, ncnn::Option const&)
static void convolution_pack8to4_avx(const Mat& bottom_blob, Mat& top_blob, const Mat& weight_data_packed, const Mat& bias_data, int kernel_w, int kernel_h, int dilation_w, int dilation_h, int stride_w, int stride_h, int activation_type, const Mat& activation_params, const Option& opt) { int w = bottom_blob.w; int channels = bottom_blob.c; int outw = top_blob.w; int outh = top_blob.h; int outch = top_blob.c; const int maxk = kernel_w * kernel_h; // kernel offsets std::vector<int> _space_ofs(maxk); int* space_ofs = &_space_ofs[0]; { int p1 = 0; int p2 = 0; int gap = w * dilation_h - kernel_w * dilation_w; for (int i = 0; i < kernel_h; i++) { for (int j = 0; j < kernel_w; j++) { space_ofs[p1] = p2; p1++; p2 += dilation_w; } p2 += gap; } } const float* bias_data_ptr = bias_data; #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < outch; p++) { float* outptr = top_blob.channel(p); for (int i = 0; i < outh; i++) { for (int j = 0; j < outw; j++) { __m128 _sum = _mm_setzero_ps(); if (bias_data_ptr) { _sum = _mm_loadu_ps(bias_data_ptr + p * 4); } const float* kptr = weight_data_packed.channel(p); // channels for (int q = 0; q < channels; q++) { const Mat m = bottom_blob.channel(q); const float* sptr = m.row(i * stride_h) + j * stride_w * 8; for (int k = 0; k < maxk; k++) { const float* slptr = sptr + space_ofs[k] * 8; __m128 _val0 = _mm_broadcast_ss(slptr); __m128 _val1 = _mm_broadcast_ss(slptr + 1); __m128 _val2 = _mm_broadcast_ss(slptr + 2); __m128 _val3 = _mm_broadcast_ss(slptr + 3); __m128 _val4 = _mm_broadcast_ss(slptr + 4); __m128 _val5 = _mm_broadcast_ss(slptr + 5); __m128 _val6 = _mm_broadcast_ss(slptr + 6); __m128 _val7 = _mm_broadcast_ss(slptr + 7); __m128 _w0 = _mm_load_ps(kptr); __m128 _w1 = _mm_load_ps(kptr + 4); _sum = _mm_comp_fmadd_ps(_val0, _w0, _sum); _sum = _mm_comp_fmadd_ps(_val1, _w1, _sum); __m128 _w2 = _mm_load_ps(kptr + 8); __m128 _w3 = _mm_load_ps(kptr + 12); _sum = _mm_comp_fmadd_ps(_val2, _w2, _sum); _sum = _mm_comp_fmadd_ps(_val3, _w3, _sum); __m128 _w4 = _mm_load_ps(kptr + 16); __m128 _w5 = _mm_load_ps(kptr + 20); _sum = _mm_comp_fmadd_ps(_val4, _w4, _sum); _sum = _mm_comp_fmadd_ps(_val5, _w5, _sum); __m128 _w6 = _mm_load_ps(kptr + 24); __m128 _w7 = _mm_load_ps(kptr + 28); _sum = _mm_comp_fmadd_ps(_val6, _w6, _sum); _sum = _mm_comp_fmadd_ps(_val7, _w7, _sum); kptr += 32; } } _sum = activation_sse(_sum, activation_type, activation_params); _mm_storeu_ps(outptr + j * 4, _sum); } outptr += outw * 4; } } }
subq $0x29b8, %rsp # imm = 0x29B8 movq 0x29f0(%rsp), %rax movq 0x29e8(%rsp), %rax movl 0x29e0(%rsp), %eax movl 0x29d8(%rsp), %eax movl 0x29d0(%rsp), %eax movl 0x29c8(%rsp), %eax movl 0x29c0(%rsp), %eax movq %rdi, 0x560(%rsp) movq %rsi, 0x558(%rsp) movq %rdx, 0x550(%rsp) movq %rcx, 0x548(%rsp) movl %r8d, 0x544(%rsp) movl %r9d, 0x540(%rsp) movq 0x560(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x53c(%rsp) movq 0x560(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x538(%rsp) movq 0x558(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x534(%rsp) movq 0x558(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x530(%rsp) movq 0x558(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x52c(%rsp) movl 0x544(%rsp), %eax movl 0x540(%rsp), %ecx imull %ecx, %eax movl %eax, 0x528(%rsp) movslq 0x528(%rsp), %rax movq %rax, 0x290(%rsp) leaq 0x50f(%rsp), %rdi movq %rdi, 0x298(%rsp) callq 0x99670 movq 0x290(%rsp), %rsi movq 0x298(%rsp), %rdx leaq 0x510(%rsp), %rdi callq 0xa5960 jmp 0x552a90 leaq 0x50f(%rsp), %rdi callq 0x99e50 leaq 0x510(%rsp), %rdi xorl %eax, %eax movl %eax, %esi callq 0x98a00 movq %rax, 0x4f0(%rsp) movl $0x0, 0x4ec(%rsp) movl $0x0, 0x4e8(%rsp) movl 0x53c(%rsp), %eax imull 0x29c8(%rsp), %eax movl 0x544(%rsp), %ecx imull 0x29c0(%rsp), %ecx subl %ecx, %eax movl %eax, 0x4e4(%rsp) movl $0x0, 0x4e0(%rsp) movl 0x4e0(%rsp), %eax cmpl 0x540(%rsp), %eax jge 0x552bd1 movl $0x0, 0x4dc(%rsp) movl 0x4dc(%rsp), %eax cmpl 0x544(%rsp), %eax jge 0x552ba6 movl 0x4e8(%rsp), %edx movq 0x4f0(%rsp), %rax movslq 0x4ec(%rsp), %rcx movl %edx, (%rax,%rcx,4) movl 0x4ec(%rsp), %eax addl $0x1, %eax movl %eax, 0x4ec(%rsp) movl 0x29c0(%rsp), %eax addl 0x4e8(%rsp), %eax movl %eax, 0x4e8(%rsp) movl 0x4dc(%rsp), %eax addl $0x1, %eax movl %eax, 0x4dc(%rsp) jmp 0x552b1d movq %rax, %rcx movl %edx, %eax movq %rcx, 0x500(%rsp) movl %eax, 0x4fc(%rsp) leaq 0x50f(%rsp), %rdi callq 0x99e50 jmp 0x557481 movl 0x4e4(%rsp), %eax addl 0x4e8(%rsp), %eax movl %eax, 0x4e8(%rsp) movl 0x4e0(%rsp), %eax addl $0x1, %eax movl %eax, 0x4e0(%rsp) jmp 0x552afe movq 0x548(%rsp), %rax movq %rax, 0x638(%rsp) movq 0x638(%rsp), %rax movq (%rax), %rax movq %rax, 0x288(%rsp) movq 0x288(%rsp), %rax movq %rax, 0x4d0(%rsp) movl $0x0, 0x4cc(%rsp) movl 0x4cc(%rsp), %eax cmpl 0x52c(%rsp), %eax jge 0x55746c movq 0x558(%rsp), %rcx movl 0x4cc(%rsp), %eax leaq 0x478(%rsp), %rdx movq %rdx, 0x608(%rsp) movq %rcx, 0x600(%rsp) movl %eax, 0x5fc(%rsp) movq 0x600(%rsp), %rax movq %rax, 0x278(%rsp) movb $0x0, 0x5fb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x5fc(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x478(%rsp), %r10 movq %r10, 0x2980(%rsp) movl %r9d, 0x297c(%rsp) movl %r8d, 0x2978(%rsp) movl %edi, 0x2974(%rsp) movq %rsi, 0x2968(%rsp) movq %rdx, 0x2960(%rsp) movl %ecx, 0x295c(%rsp) movq %rax, 0x2950(%rsp) movq 0x2980(%rsp), %rcx movq %rcx, 0x280(%rsp) movq 0x2968(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2960(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x295c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2950(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x297c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x2978(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2974(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x2990(%rsp) movl $0x10, 0x298c(%rsp) movq 0x2990(%rsp), %rax movslq 0x298c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x298c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x280(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x278(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x4a0(%rsp) cmpl $0x4, 0x28(%rax) jne 0x552de7 movq 0x278(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x4b8(%rsp) movb $0x1, 0x5fb(%rsp) testb $0x1, 0x5fb(%rsp) jne 0x552f20 leaq 0x478(%rsp), %rax movq %rax, 0x610(%rsp) movq 0x610(%rsp), %rax movq %rax, 0x620(%rsp) movq 0x620(%rsp), %rax movq %rax, 0x270(%rsp) cmpq $0x0, 0x8(%rax) je 0x552ec5 movq 0x270(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x61c(%rsp) # imm = 0xFFFFFFFF movl 0x61c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x618(%rsp) cmpl $0x1, 0x618(%rsp) jne 0x552ec5 movq 0x270(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x552e96 movq 0x270(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x552e94 jmp 0x552ec3 movq 0x270(%rsp), %rax movq (%rax), %rax movq %rax, 0x6c0(%rsp) cmpq $0x0, 0x6c0(%rsp) je 0x552ec1 movq 0x6c0(%rsp), %rdi callq 0x5e480 jmp 0x552ec3 jmp 0x552ec5 movq 0x270(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x552f20 movq %rax, %rdi callq 0x5fc90 jmp 0x552f22 leaq 0x478(%rsp), %rax movq %rax, 0x628(%rsp) movq 0x628(%rsp), %rax movq (%rax), %rax movq %rax, 0x268(%rsp) leaq 0x478(%rsp), %rax movq %rax, 0x568(%rsp) movq 0x568(%rsp), %rax movq %rax, 0x5f0(%rsp) movq 0x5f0(%rsp), %rax movq %rax, 0x260(%rsp) cmpq $0x0, 0x8(%rax) je 0x55300d movq 0x260(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x5ec(%rsp) # imm = 0xFFFFFFFF movl 0x5ec(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x5e8(%rsp) cmpl $0x1, 0x5e8(%rsp) jne 0x55300d movq 0x260(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x552fde movq 0x260(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x552fdc jmp 0x55300b movq 0x260(%rsp), %rax movq (%rax), %rax movq %rax, 0x6c8(%rsp) cmpq $0x0, 0x6c8(%rsp) je 0x553009 movq 0x6c8(%rsp), %rdi callq 0x5e480 jmp 0x55300b jmp 0x55300d movq 0x260(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x553068 movq %rax, %rdi callq 0x5fc90 movq 0x268(%rsp), %rax movq %rax, 0x4c0(%rsp) movl $0x0, 0x474(%rsp) movl 0x474(%rsp), %eax cmpl 0x530(%rsp), %eax jge 0x557454 movl $0x0, 0x470(%rsp) movl 0x470(%rsp), %eax cmpl 0x534(%rsp), %eax jge 0x55741a vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0xa40(%rsp) vmovaps 0xa40(%rsp), %xmm0 vmovaps %xmm0, 0x250(%rsp) vmovaps 0x250(%rsp), %xmm0 vmovaps %xmm0, 0x460(%rsp) cmpq $0x0, 0x4d0(%rsp) je 0x55312b movq 0x4d0(%rsp), %rax movl 0x4cc(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x6f8(%rsp) movq 0x6f8(%rsp), %rax vmovups (%rax), %xmm0 vmovaps %xmm0, 0x460(%rsp) movq 0x550(%rsp), %rcx movl 0x4cc(%rsp), %eax leaq 0x410(%rsp), %rdx movq %rdx, 0x690(%rsp) movq %rcx, 0x688(%rsp) movl %eax, 0x684(%rsp) movq 0x688(%rsp), %rax movq %rax, 0x240(%rsp) movb $0x0, 0x683(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x684(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x410(%rsp), %r10 movq %r10, 0x2910(%rsp) movl %r9d, 0x290c(%rsp) movl %r8d, 0x2908(%rsp) movl %edi, 0x2904(%rsp) movq %rsi, 0x28f8(%rsp) movq %rdx, 0x28f0(%rsp) movl %ecx, 0x28ec(%rsp) movq %rax, 0x28e0(%rsp) movq 0x2910(%rsp), %rcx movq %rcx, 0x248(%rsp) movq 0x28f8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x28f0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x28ec(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x28e0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x290c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x2908(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2904(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x29b0(%rsp) movl $0x10, 0x29ac(%rsp) movq 0x29b0(%rsp), %rax movslq 0x29ac(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x29ac(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x248(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x240(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x438(%rsp) cmpl $0x4, 0x28(%rax) jne 0x5532ef movq 0x240(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x450(%rsp) movb $0x1, 0x683(%rsp) testb $0x1, 0x683(%rsp) jne 0x553428 leaq 0x410(%rsp), %rax movq %rax, 0x698(%rsp) movq 0x698(%rsp), %rax movq %rax, 0x6a8(%rsp) movq 0x6a8(%rsp), %rax movq %rax, 0x238(%rsp) cmpq $0x0, 0x8(%rax) je 0x5533cd movq 0x238(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x6a4(%rsp) # imm = 0xFFFFFFFF movl 0x6a4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x6a0(%rsp) cmpl $0x1, 0x6a0(%rsp) jne 0x5533cd movq 0x238(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x55339e movq 0x238(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x55339c jmp 0x5533cb movq 0x238(%rsp), %rax movq (%rax), %rax movq %rax, 0x6b0(%rsp) cmpq $0x0, 0x6b0(%rsp) je 0x5533c9 movq 0x6b0(%rsp), %rdi callq 0x5e480 jmp 0x5533cb jmp 0x5533cd movq 0x238(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x553428 movq %rax, %rdi callq 0x5fc90 jmp 0x55342a leaq 0x410(%rsp), %rax movq %rax, 0x630(%rsp) movq 0x630(%rsp), %rax movq (%rax), %rax movq %rax, 0x230(%rsp) leaq 0x410(%rsp), %rax movq %rax, 0x578(%rsp) movq 0x578(%rsp), %rax movq %rax, 0x5d0(%rsp) movq 0x5d0(%rsp), %rax movq %rax, 0x228(%rsp) cmpq $0x0, 0x8(%rax) je 0x553515 movq 0x228(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x5cc(%rsp) # imm = 0xFFFFFFFF movl 0x5cc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x5c8(%rsp) cmpl $0x1, 0x5c8(%rsp) jne 0x553515 movq 0x228(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x5534e6 movq 0x228(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x5534e4 jmp 0x553513 movq 0x228(%rsp), %rax movq (%rax), %rax movq %rax, 0x6d8(%rsp) cmpq $0x0, 0x6d8(%rsp) je 0x553511 movq 0x6d8(%rsp), %rdi callq 0x5e480 jmp 0x553513 jmp 0x553515 movq 0x228(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x553570 movq %rax, %rdi callq 0x5fc90 movq 0x230(%rsp), %rax movq %rax, 0x458(%rsp) movl $0x0, 0x40c(%rsp) movl 0x40c(%rsp), %eax cmpl 0x538(%rsp), %eax jge 0x554446 movq 0x560(%rsp), %rcx movl 0x40c(%rsp), %eax leaq 0x3c0(%rsp), %rdx movq %rdx, 0x660(%rsp) movq %rcx, 0x658(%rsp) movl %eax, 0x654(%rsp) movq 0x658(%rsp), %rax movq %rax, 0x218(%rsp) movb $0x0, 0x653(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x654(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x3c0(%rsp), %r10 movq %r10, 0x2948(%rsp) movl %r9d, 0x2944(%rsp) movl %r8d, 0x2940(%rsp) movl %edi, 0x293c(%rsp) movq %rsi, 0x2930(%rsp) movq %rdx, 0x2928(%rsp) movl %ecx, 0x2924(%rsp) movq %rax, 0x2918(%rsp) movq 0x2948(%rsp), %rcx movq %rcx, 0x220(%rsp) movq 0x2930(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2928(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2924(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2918(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x2944(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x2940(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x293c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x29a0(%rsp) movl $0x10, 0x299c(%rsp) movq 0x29a0(%rsp), %rax movslq 0x299c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x299c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x220(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x218(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x3e8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x553763 movq 0x218(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x400(%rsp) movb $0x1, 0x653(%rsp) testb $0x1, 0x653(%rsp) jne 0x55389c leaq 0x3c0(%rsp), %rax movq %rax, 0x668(%rsp) movq 0x668(%rsp), %rax movq %rax, 0x678(%rsp) movq 0x678(%rsp), %rax movq %rax, 0x210(%rsp) cmpq $0x0, 0x8(%rax) je 0x553841 movq 0x210(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x674(%rsp) # imm = 0xFFFFFFFF movl 0x674(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x670(%rsp) cmpl $0x1, 0x670(%rsp) jne 0x553841 movq 0x210(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x553812 movq 0x210(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x553810 jmp 0x55383f movq 0x210(%rsp), %rax movq (%rax), %rax movq %rax, 0x6b8(%rsp) cmpq $0x0, 0x6b8(%rsp) je 0x55383d movq 0x6b8(%rsp), %rdi callq 0x5e480 jmp 0x55383f jmp 0x553841 movq 0x210(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x55389c movq %rax, %rdi callq 0x5fc90 jmp 0x55389e movl 0x474(%rsp), %eax imull 0x29d8(%rsp), %eax leaq 0x3c0(%rsp), %rcx movq %rcx, 0x648(%rsp) movl %eax, 0x644(%rsp) movq 0x648(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x644(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x208(%rsp) movq 0x208(%rsp), %rax movl 0x470(%rsp), %ecx imull 0x29d0(%rsp), %ecx shll $0x3, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x3b8(%rsp) movl $0x0, 0x3b4(%rsp) movl 0x3b4(%rsp), %eax cmpl 0x528(%rsp), %eax jge 0x55430b movq 0x3b8(%rsp), %rax movq 0x4f0(%rsp), %rcx movslq 0x3b4(%rsp), %rdx movl (%rcx,%rdx,4), %ecx shll $0x3, %ecx movslq %ecx, %rcx leaq (%rax,%rcx,4), %rax movq %rax, 0x3a8(%rsp) movq 0x3a8(%rsp), %rax movq %rax, 0xb48(%rsp) movq 0xb48(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xb44(%rsp) vbroadcastss 0xb44(%rsp), %xmm0 vmovaps %xmm0, 0xb30(%rsp) vmovaps 0xb30(%rsp), %xmm0 vmovaps %xmm0, 0x1f0(%rsp) vmovaps 0x1f0(%rsp), %xmm0 vmovaps %xmm0, 0x390(%rsp) movq 0x3a8(%rsp), %rax addq $0x4, %rax movq %rax, 0xb28(%rsp) movq 0xb28(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xb24(%rsp) vbroadcastss 0xb24(%rsp), %xmm0 vmovaps %xmm0, 0xb10(%rsp) vmovaps 0xb10(%rsp), %xmm0 vmovaps %xmm0, 0x1e0(%rsp) vmovaps 0x1e0(%rsp), %xmm0 vmovaps %xmm0, 0x380(%rsp) movq 0x3a8(%rsp), %rax addq $0x8, %rax movq %rax, 0xb08(%rsp) movq 0xb08(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xb04(%rsp) vbroadcastss 0xb04(%rsp), %xmm0 vmovaps %xmm0, 0xaf0(%rsp) vmovaps 0xaf0(%rsp), %xmm0 vmovaps %xmm0, 0x1d0(%rsp) vmovaps 0x1d0(%rsp), %xmm0 vmovaps %xmm0, 0x370(%rsp) movq 0x3a8(%rsp), %rax addq $0xc, %rax movq %rax, 0xae8(%rsp) movq 0xae8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xae4(%rsp) vbroadcastss 0xae4(%rsp), %xmm0 vmovaps %xmm0, 0xad0(%rsp) vmovaps 0xad0(%rsp), %xmm0 vmovaps %xmm0, 0x1c0(%rsp) vmovaps 0x1c0(%rsp), %xmm0 vmovaps %xmm0, 0x360(%rsp) movq 0x3a8(%rsp), %rax addq $0x10, %rax movq %rax, 0xac8(%rsp) movq 0xac8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xac4(%rsp) vbroadcastss 0xac4(%rsp), %xmm0 vmovaps %xmm0, 0xab0(%rsp) vmovaps 0xab0(%rsp), %xmm0 vmovaps %xmm0, 0x1b0(%rsp) vmovaps 0x1b0(%rsp), %xmm0 vmovaps %xmm0, 0x350(%rsp) movq 0x3a8(%rsp), %rax addq $0x14, %rax movq %rax, 0xaa8(%rsp) movq 0xaa8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xaa4(%rsp) vbroadcastss 0xaa4(%rsp), %xmm0 vmovaps %xmm0, 0xa90(%rsp) vmovaps 0xa90(%rsp), %xmm0 vmovaps %xmm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %xmm0 vmovaps %xmm0, 0x340(%rsp) movq 0x3a8(%rsp), %rax addq $0x18, %rax movq %rax, 0xa88(%rsp) movq 0xa88(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xa84(%rsp) vbroadcastss 0xa84(%rsp), %xmm0 vmovaps %xmm0, 0xa70(%rsp) vmovaps 0xa70(%rsp), %xmm0 vmovaps %xmm0, 0x190(%rsp) vmovaps 0x190(%rsp), %xmm0 vmovaps %xmm0, 0x330(%rsp) movq 0x3a8(%rsp), %rax addq $0x1c, %rax movq %rax, 0xa68(%rsp) movq 0xa68(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xa64(%rsp) vbroadcastss 0xa64(%rsp), %xmm0 vmovaps %xmm0, 0xa50(%rsp) vmovaps 0xa50(%rsp), %xmm0 vmovaps %xmm0, 0x180(%rsp) vmovaps 0x180(%rsp), %xmm0 vmovaps %xmm0, 0x320(%rsp) movq 0x458(%rsp), %rax movq %rax, 0x738(%rsp) movq 0x738(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x310(%rsp) movq 0x458(%rsp), %rax addq $0x10, %rax movq %rax, 0x730(%rsp) movq 0x730(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x300(%rsp) vmovaps 0x390(%rsp), %xmm2 vmovaps 0x310(%rsp), %xmm1 vmovaps 0x460(%rsp), %xmm0 vmovaps %xmm2, 0x8b0(%rsp) vmovaps %xmm1, 0x8a0(%rsp) vmovaps %xmm0, 0x890(%rsp) vmovaps 0x8b0(%rsp), %xmm2 vmovaps 0x8a0(%rsp), %xmm1 vmovaps 0x890(%rsp), %xmm0 vmovaps %xmm2, 0x8e0(%rsp) vmovaps %xmm1, 0x8d0(%rsp) vmovaps %xmm0, 0x8c0(%rsp) vmovaps 0x8e0(%rsp), %xmm1 vmovaps 0x8d0(%rsp), %xmm0 vmovaps 0x8c0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x170(%rsp) vmovaps 0x170(%rsp), %xmm0 vmovaps %xmm0, 0x460(%rsp) vmovaps 0x380(%rsp), %xmm2 vmovaps 0x300(%rsp), %xmm1 vmovaps 0x460(%rsp), %xmm0 vmovaps %xmm2, 0x880(%rsp) vmovaps %xmm1, 0x870(%rsp) vmovaps %xmm0, 0x860(%rsp) vmovaps 0x880(%rsp), %xmm2 vmovaps 0x870(%rsp), %xmm1 vmovaps 0x860(%rsp), %xmm0 vmovaps %xmm2, 0x910(%rsp) vmovaps %xmm1, 0x900(%rsp) vmovaps %xmm0, 0x8f0(%rsp) vmovaps 0x910(%rsp), %xmm1 vmovaps 0x900(%rsp), %xmm0 vmovaps 0x8f0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm0 vmovaps %xmm0, 0x460(%rsp) movq 0x458(%rsp), %rax addq $0x20, %rax movq %rax, 0x728(%rsp) movq 0x728(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x2f0(%rsp) movq 0x458(%rsp), %rax addq $0x30, %rax movq %rax, 0x720(%rsp) movq 0x720(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x2e0(%rsp) vmovaps 0x370(%rsp), %xmm2 vmovaps 0x2f0(%rsp), %xmm1 vmovaps 0x460(%rsp), %xmm0 vmovaps %xmm2, 0x850(%rsp) vmovaps %xmm1, 0x840(%rsp) vmovaps %xmm0, 0x830(%rsp) vmovaps 0x850(%rsp), %xmm2 vmovaps 0x840(%rsp), %xmm1 vmovaps 0x830(%rsp), %xmm0 vmovaps %xmm2, 0x940(%rsp) vmovaps %xmm1, 0x930(%rsp) vmovaps %xmm0, 0x920(%rsp) vmovaps 0x940(%rsp), %xmm1 vmovaps 0x930(%rsp), %xmm0 vmovaps 0x920(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x150(%rsp) vmovaps 0x150(%rsp), %xmm0 vmovaps %xmm0, 0x460(%rsp) vmovaps 0x360(%rsp), %xmm2 vmovaps 0x2e0(%rsp), %xmm1 vmovaps 0x460(%rsp), %xmm0 vmovaps %xmm2, 0x820(%rsp) vmovaps %xmm1, 0x810(%rsp) vmovaps %xmm0, 0x800(%rsp) vmovaps 0x820(%rsp), %xmm2 vmovaps 0x810(%rsp), %xmm1 vmovaps 0x800(%rsp), %xmm0 vmovaps %xmm2, 0x970(%rsp) vmovaps %xmm1, 0x960(%rsp) vmovaps %xmm0, 0x950(%rsp) vmovaps 0x970(%rsp), %xmm1 vmovaps 0x960(%rsp), %xmm0 vmovaps 0x950(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x140(%rsp) vmovaps 0x140(%rsp), %xmm0 vmovaps %xmm0, 0x460(%rsp) movq 0x458(%rsp), %rax addq $0x40, %rax movq %rax, 0x718(%rsp) movq 0x718(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x2d0(%rsp) movq 0x458(%rsp), %rax addq $0x50, %rax movq %rax, 0x710(%rsp) movq 0x710(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x2c0(%rsp) vmovaps 0x350(%rsp), %xmm2 vmovaps 0x2d0(%rsp), %xmm1 vmovaps 0x460(%rsp), %xmm0 vmovaps %xmm2, 0x7f0(%rsp) vmovaps %xmm1, 0x7e0(%rsp) vmovaps %xmm0, 0x7d0(%rsp) vmovaps 0x7f0(%rsp), %xmm2 vmovaps 0x7e0(%rsp), %xmm1 vmovaps 0x7d0(%rsp), %xmm0 vmovaps %xmm2, 0x9a0(%rsp) vmovaps %xmm1, 0x990(%rsp) vmovaps %xmm0, 0x980(%rsp) vmovaps 0x9a0(%rsp), %xmm1 vmovaps 0x990(%rsp), %xmm0 vmovaps 0x980(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x130(%rsp) vmovaps 0x130(%rsp), %xmm0 vmovaps %xmm0, 0x460(%rsp) vmovaps 0x340(%rsp), %xmm2 vmovaps 0x2c0(%rsp), %xmm1 vmovaps 0x460(%rsp), %xmm0 vmovaps %xmm2, 0x7c0(%rsp) vmovaps %xmm1, 0x7b0(%rsp) vmovaps %xmm0, 0x7a0(%rsp) vmovaps 0x7c0(%rsp), %xmm2 vmovaps 0x7b0(%rsp), %xmm1 vmovaps 0x7a0(%rsp), %xmm0 vmovaps %xmm2, 0x9d0(%rsp) vmovaps %xmm1, 0x9c0(%rsp) vmovaps %xmm0, 0x9b0(%rsp) vmovaps 0x9d0(%rsp), %xmm1 vmovaps 0x9c0(%rsp), %xmm0 vmovaps 0x9b0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x120(%rsp) vmovaps 0x120(%rsp), %xmm0 vmovaps %xmm0, 0x460(%rsp) movq 0x458(%rsp), %rax addq $0x60, %rax movq %rax, 0x708(%rsp) movq 0x708(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x2b0(%rsp) movq 0x458(%rsp), %rax addq $0x70, %rax movq %rax, 0x700(%rsp) movq 0x700(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x2a0(%rsp) vmovaps 0x330(%rsp), %xmm2 vmovaps 0x2b0(%rsp), %xmm1 vmovaps 0x460(%rsp), %xmm0 vmovaps %xmm2, 0x790(%rsp) vmovaps %xmm1, 0x780(%rsp) vmovaps %xmm0, 0x770(%rsp) vmovaps 0x790(%rsp), %xmm2 vmovaps 0x780(%rsp), %xmm1 vmovaps 0x770(%rsp), %xmm0 vmovaps %xmm2, 0xa00(%rsp) vmovaps %xmm1, 0x9f0(%rsp) vmovaps %xmm0, 0x9e0(%rsp) vmovaps 0xa00(%rsp), %xmm1 vmovaps 0x9f0(%rsp), %xmm0 vmovaps 0x9e0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x110(%rsp) vmovaps 0x110(%rsp), %xmm0 vmovaps %xmm0, 0x460(%rsp) vmovaps 0x320(%rsp), %xmm2 vmovaps 0x2a0(%rsp), %xmm1 vmovaps 0x460(%rsp), %xmm0 vmovaps %xmm2, 0x760(%rsp) vmovaps %xmm1, 0x750(%rsp) vmovaps %xmm0, 0x740(%rsp) vmovaps 0x760(%rsp), %xmm2 vmovaps 0x750(%rsp), %xmm1 vmovaps 0x740(%rsp), %xmm0 vmovaps %xmm2, 0xa30(%rsp) vmovaps %xmm1, 0xa20(%rsp) vmovaps %xmm0, 0xa10(%rsp) vmovaps 0xa30(%rsp), %xmm1 vmovaps 0xa20(%rsp), %xmm0 vmovaps 0xa10(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x100(%rsp) vmovaps 0x100(%rsp), %xmm0 vmovaps %xmm0, 0x460(%rsp) movq 0x458(%rsp), %rax addq $0x80, %rax movq %rax, 0x458(%rsp) movl 0x3b4(%rsp), %eax addl $0x1, %eax movl %eax, 0x3b4(%rsp) jmp 0x553926 leaq 0x3c0(%rsp), %rax movq %rax, 0x588(%rsp) movq 0x588(%rsp), %rax movq %rax, 0x5b0(%rsp) movq 0x5b0(%rsp), %rax movq %rax, 0xf8(%rsp) cmpq $0x0, 0x8(%rax) je 0x5543d3 movq 0xf8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x5ac(%rsp) # imm = 0xFFFFFFFF movl 0x5ac(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x5a8(%rsp) cmpl $0x1, 0x5a8(%rsp) jne 0x5543d3 movq 0xf8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x5543a4 movq 0xf8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x5543a2 jmp 0x5543d1 movq 0xf8(%rsp), %rax movq (%rax), %rax movq %rax, 0x6e8(%rsp) cmpq $0x0, 0x6e8(%rsp) je 0x5543cf movq 0x6e8(%rsp), %rdi callq 0x5e480 jmp 0x5543d1 jmp 0x5543d3 movq 0xf8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x55442e movq %rax, %rdi callq 0x5fc90 jmp 0x554430 movl 0x40c(%rsp), %eax addl $0x1, %eax movl %eax, 0x40c(%rsp) jmp 0x55358b vmovaps 0x460(%rsp), %xmm0 movl 0x29e0(%rsp), %ecx movq 0x29e8(%rsp), %rax vmovaps %xmm0, 0xba0(%rsp) movl %ecx, 0xb9c(%rsp) movq %rax, 0xb90(%rsp) movl 0xb9c(%rsp), %eax decl %eax movl %eax, %ecx movq %rcx, 0xf0(%rsp) subl $0x5, %eax ja 0x557389 movq 0xf0(%rsp), %rax leaq 0x18b1dd7(%rip), %rcx # 0x1e06278 movslq (%rcx,%rax,4), %rax addq %rcx, %rax jmpq *%rax vmovaps 0xba0(%rsp), %xmm1 vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0xc10(%rsp) vmovaps 0xc10(%rsp), %xmm0 vmovaps %xmm1, 0xc50(%rsp) vmovaps %xmm0, 0xc40(%rsp) vmovaps 0xc50(%rsp), %xmm0 vmovaps 0xc40(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xbb0(%rsp) jmp 0x55739b vmovaps 0xba0(%rsp), %xmm1 movq 0xb90(%rsp), %rax movq %rax, 0xc08(%rsp) movq $0x0, 0xc00(%rsp) movq 0xc08(%rsp), %rax movq (%rax), %rax movq 0xc00(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovaps %xmm1, 0xcb0(%rsp) vmovss %xmm0, 0xcac(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0xcf0(%rsp) vmovaps 0xcf0(%rsp), %xmm2 vmovaps 0xcb0(%rsp), %xmm1 vmovaps %xmm2, 0xd10(%rsp) vmovaps %xmm1, 0xd00(%rsp) vmovaps 0xd10(%rsp), %xmm1 vmovaps 0xd00(%rsp), %xmm2 vmaxps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0xc90(%rsp) vmovaps %xmm0, 0xce0(%rsp) vmovaps 0xce0(%rsp), %xmm1 vmovaps 0xcb0(%rsp), %xmm0 vmovaps %xmm1, 0xdd0(%rsp) vmovaps %xmm0, 0xdc0(%rsp) vmovaps 0xdd0(%rsp), %xmm0 vmovaps 0xdc0(%rsp), %xmm1 vminps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xc80(%rsp) vmovaps 0xc90(%rsp), %xmm1 vmovss 0xcac(%rsp), %xmm0 vmovss %xmm0, 0xd3c(%rsp) vbroadcastss 0xd3c(%rsp), %xmm0 vmovaps %xmm0, 0xd20(%rsp) vmovaps 0xd20(%rsp), %xmm2 vmovaps 0xc80(%rsp), %xmm0 vmovaps %xmm2, 0x1000(%rsp) vmovaps %xmm0, 0xff0(%rsp) vmovaps 0x1000(%rsp), %xmm0 vmulps 0xff0(%rsp), %xmm0, %xmm0 vmovaps %xmm1, 0xcd0(%rsp) vmovaps %xmm0, 0xcc0(%rsp) vmovaps 0xcd0(%rsp), %xmm0 vaddps 0xcc0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0xe0(%rsp) vmovaps 0xe0(%rsp), %xmm0 vmovaps %xmm0, 0xbb0(%rsp) jmp 0x55739b movq 0xb90(%rsp), %rax movq %rax, 0xbf8(%rsp) movq $0x0, 0xbf0(%rsp) movq 0xbf8(%rsp), %rax movq (%rax), %rax movq 0xbf0(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0xd7c(%rsp) vbroadcastss 0xd7c(%rsp), %xmm0 vmovaps %xmm0, 0xd60(%rsp) vmovaps 0xd60(%rsp), %xmm0 vmovaps %xmm0, 0xb80(%rsp) movq 0xb90(%rsp), %rax movq %rax, 0xbe8(%rsp) movq $0x1, 0xbe0(%rsp) movq 0xbe8(%rsp), %rax movq (%rax), %rax movq 0xbe0(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0xd5c(%rsp) vbroadcastss 0xd5c(%rsp), %xmm0 vmovaps %xmm0, 0xd40(%rsp) vmovaps 0xd40(%rsp), %xmm0 vmovaps %xmm0, 0xb70(%rsp) vmovaps 0xba0(%rsp), %xmm1 vmovaps 0xb80(%rsp), %xmm0 vmovaps %xmm1, 0xc70(%rsp) vmovaps %xmm0, 0xc60(%rsp) vmovaps 0xc70(%rsp), %xmm0 vmovaps 0xc60(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm1 vmovaps 0xb70(%rsp), %xmm0 vmovaps %xmm1, 0xdf0(%rsp) vmovaps %xmm0, 0xde0(%rsp) vmovaps 0xdf0(%rsp), %xmm0 vmovaps 0xde0(%rsp), %xmm1 vminps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xbb0(%rsp) jmp 0x55739b vmovaps 0xba0(%rsp), %xmm0 vmovaps %xmm0, 0xe10(%rsp) movl $0x3f800000, 0xe6c(%rsp) # imm = 0x3F800000 vbroadcastss 0xe6c(%rsp), %xmm0 vmovaps %xmm0, 0xe50(%rsp) vmovaps 0xe50(%rsp), %xmm0 vmovaps %xmm0, 0xe00(%rsp) vmovaps 0xe00(%rsp), %xmm0 vmovaps %xmm0, 0xb0(%rsp) vmovaps %xmm0, 0xc0(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0xe40(%rsp) vmovaps 0xe40(%rsp), %xmm2 vmovaps 0xe10(%rsp), %xmm1 vmovaps %xmm2, 0x1860(%rsp) vmovaps %xmm1, 0x1850(%rsp) vmovaps 0x1860(%rsp), %xmm1 vmovaps 0x1850(%rsp), %xmm2 vsubps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x1490(%rsp) vmovaps %xmm0, 0x1760(%rsp) vmovaps 0x1760(%rsp), %xmm0 vmovaps %xmm0, 0x1480(%rsp) vmovaps 0x18ae211(%rip), %xmm0 # 0x1e02ab0 vmovaps %xmm0, 0x1450(%rsp) vmovaps 0x1490(%rsp), %xmm0 vmovaps %xmm0, 0x17a0(%rsp) vmovaps 0x18ae1fe(%rip), %xmm0 # 0x1e02ac0 vmovaps %xmm0, 0x1790(%rsp) vmovaps 0x17a0(%rsp), %xmm0 vmovaps 0x1790(%rsp), %xmm1 vminps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1490(%rsp) vmovaps 0x1490(%rsp), %xmm0 vmovaps %xmm0, 0x1780(%rsp) vmovaps 0x18ae1cc(%rip), %xmm0 # 0x1e02ad0 vmovaps %xmm0, 0x1770(%rsp) vmovaps 0x1780(%rsp), %xmm0 vmovaps 0x1770(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1490(%rsp) vmovaps 0x1490(%rsp), %xmm0 vmovaps %xmm0, 0x1800(%rsp) vmovaps 0x18ae19a(%rip), %xmm0 # 0x1e02ae0 vmovaps %xmm0, 0x17f0(%rsp) vmovaps 0x1800(%rsp), %xmm0 vmovaps 0x17f0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1470(%rsp) vmovaps 0x1470(%rsp), %xmm0 vmovaps %xmm0, 0x1510(%rsp) vmovaps 0x18ae168(%rip), %xmm0 # 0x1e02af0 vmovaps %xmm0, 0x1500(%rsp) vmovaps 0x1510(%rsp), %xmm0 vmovaps 0x1500(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1470(%rsp) vmovaps 0x1470(%rsp), %xmm0 vmovaps %xmm0, 0x1880(%rsp) vcvttps2dq 0x1880(%rsp), %xmm0 vmovdqa %xmm0, 0x1460(%rsp) vmovdqa 0x1460(%rsp), %xmm0 vmovdqa %xmm0, 0x18b0(%rsp) vcvtdq2ps 0x18b0(%rsp), %xmm0 vmovaps %xmm0, 0x1480(%rsp) vmovaps 0x1480(%rsp), %xmm1 vmovaps 0x1470(%rsp), %xmm0 vmovaps %xmm1, 0x18e0(%rsp) vmovaps %xmm0, 0x18d0(%rsp) vmovaps 0x18d0(%rsp), %xmm0 vmovaps 0x18e0(%rsp), %xmm1 vcmpltps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1440(%rsp) vmovaps 0x1440(%rsp), %xmm1 vmovaps 0x1450(%rsp), %xmm0 vmovaps %xmm1, 0x1920(%rsp) vmovaps %xmm0, 0x1910(%rsp) vmovdqa 0x1920(%rsp), %xmm0 vmovdqa 0x1910(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x1440(%rsp) vmovaps 0x1480(%rsp), %xmm1 vmovaps 0x1440(%rsp), %xmm0 vmovaps %xmm1, 0x1820(%rsp) vmovaps %xmm0, 0x1810(%rsp) vmovaps 0x1820(%rsp), %xmm0 vmovaps 0x1810(%rsp), %xmm1 vsubps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1470(%rsp) vmovaps 0x1470(%rsp), %xmm1 vmovaps 0x1490(%rsp), %xmm0 vmovaps %xmm1, 0x19a0(%rsp) vmovaps 0x18ae01b(%rip), %xmm1 # 0x1e02b00 vmovaps %xmm1, 0x1990(%rsp) vmovaps %xmm0, 0x1980(%rsp) vmovaps 0x19a0(%rsp), %xmm2 vmovaps 0x1990(%rsp), %xmm1 vmovaps 0x1980(%rsp), %xmm0 vmovaps %xmm2, 0x1ab0(%rsp) vmovaps %xmm1, 0x1aa0(%rsp) vmovaps %xmm0, 0x1a90(%rsp) vmovaps 0x1ab0(%rsp), %xmm0 vmovdqa 0x18b20a2(%rip), %xmm1 # 0x1e06be0 vpxor %xmm1, %xmm0, %xmm1 vmovaps 0x1aa0(%rsp), %xmm0 vmovaps 0x1a90(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xd0(%rsp) vmovaps 0xd0(%rsp), %xmm0 vmovaps %xmm0, 0x1490(%rsp) vmovaps 0x1470(%rsp), %xmm1 vmovaps 0x1490(%rsp), %xmm0 vmovaps %xmm1, 0x1970(%rsp) vmovaps 0x18adf79(%rip), %xmm1 # 0x1e02b10 vmovaps %xmm1, 0x1960(%rsp) vmovaps %xmm0, 0x1950(%rsp) vmovaps 0x1970(%rsp), %xmm2 vmovaps 0x1960(%rsp), %xmm1 vmovaps 0x1950(%rsp), %xmm0 vmovaps %xmm2, 0x1ae0(%rsp) vmovaps %xmm1, 0x1ad0(%rsp) vmovaps %xmm0, 0x1ac0(%rsp) vmovaps 0x1ae0(%rsp), %xmm0 vmovdqa 0x18b1ff0(%rip), %xmm1 # 0x1e06be0 vpxor %xmm1, %xmm0, %xmm1 vmovaps 0x1ad0(%rsp), %xmm0 vmovaps 0x1ac0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xa0(%rsp) vmovaps 0xa0(%rsp), %xmm0 vmovaps %xmm0, 0x1490(%rsp) vmovaps 0x1490(%rsp), %xmm0 vmovaps %xmm0, 0x17e0(%rsp) vmovaps %xmm0, 0x17d0(%rsp) vmovaps 0x17e0(%rsp), %xmm0 vmovaps 0x17d0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1480(%rsp) vmovaps 0x18adeb8(%rip), %xmm0 # 0x1e02b20 vmovaps %xmm0, 0x1430(%rsp) vmovaps 0x1430(%rsp), %xmm1 vmovaps 0x1490(%rsp), %xmm0 vmovaps %xmm1, 0x1630(%rsp) vmovaps %xmm0, 0x1620(%rsp) vmovaps 0x18ade93(%rip), %xmm0 # 0x1e02b30 vmovaps %xmm0, 0x1610(%rsp) vmovaps 0x1630(%rsp), %xmm2 vmovaps 0x1620(%rsp), %xmm1 vmovaps 0x1610(%rsp), %xmm0 vmovaps %xmm2, 0x1660(%rsp) vmovaps %xmm1, 0x1650(%rsp) vmovaps %xmm0, 0x1640(%rsp) vmovaps 0x1660(%rsp), %xmm1 vmovaps 0x1650(%rsp), %xmm0 vmovaps 0x1640(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1430(%rsp) vmovaps 0x1430(%rsp), %xmm1 vmovaps 0x1490(%rsp), %xmm0 vmovaps %xmm1, 0x1600(%rsp) vmovaps %xmm0, 0x15f0(%rsp) vmovaps 0x18ade0f(%rip), %xmm0 # 0x1e02b40 vmovaps %xmm0, 0x15e0(%rsp) vmovaps 0x1600(%rsp), %xmm2 vmovaps 0x15f0(%rsp), %xmm1 vmovaps 0x15e0(%rsp), %xmm0 vmovaps %xmm2, 0x1690(%rsp) vmovaps %xmm1, 0x1680(%rsp) vmovaps %xmm0, 0x1670(%rsp) vmovaps 0x1690(%rsp), %xmm1 vmovaps 0x1680(%rsp), %xmm0 vmovaps 0x1670(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1430(%rsp) vmovaps 0x1430(%rsp), %xmm1 vmovaps 0x1490(%rsp), %xmm0 vmovaps %xmm1, 0x15d0(%rsp) vmovaps %xmm0, 0x15c0(%rsp) vmovaps 0x18add8b(%rip), %xmm0 # 0x1e02b50 vmovaps %xmm0, 0x15b0(%rsp) vmovaps 0x15d0(%rsp), %xmm2 vmovaps 0x15c0(%rsp), %xmm1 vmovaps 0x15b0(%rsp), %xmm0 vmovaps %xmm2, 0x16c0(%rsp) vmovaps %xmm1, 0x16b0(%rsp) vmovaps %xmm0, 0x16a0(%rsp) vmovaps 0x16c0(%rsp), %xmm1 vmovaps 0x16b0(%rsp), %xmm0 vmovaps 0x16a0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1430(%rsp) vmovaps 0x1430(%rsp), %xmm1 vmovaps 0x1490(%rsp), %xmm0 vmovaps %xmm1, 0x15a0(%rsp) vmovaps %xmm0, 0x1590(%rsp) vmovaps 0x18add07(%rip), %xmm0 # 0x1e02b60 vmovaps %xmm0, 0x1580(%rsp) vmovaps 0x15a0(%rsp), %xmm2 vmovaps 0x1590(%rsp), %xmm1 vmovaps 0x1580(%rsp), %xmm0 vmovaps %xmm2, 0x16f0(%rsp) vmovaps %xmm1, 0x16e0(%rsp) vmovaps %xmm0, 0x16d0(%rsp) vmovaps 0x16f0(%rsp), %xmm1 vmovaps 0x16e0(%rsp), %xmm0 vmovaps 0x16d0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1430(%rsp) vmovaps 0x1430(%rsp), %xmm1 vmovaps 0x1490(%rsp), %xmm0 vmovaps %xmm1, 0x1570(%rsp) vmovaps %xmm0, 0x1560(%rsp) vmovaps 0x18adc03(%rip), %xmm0 # 0x1e02af0 vmovaps %xmm0, 0x1550(%rsp) vmovaps 0x1570(%rsp), %xmm2 vmovaps 0x1560(%rsp), %xmm1 vmovaps 0x1550(%rsp), %xmm0 vmovaps %xmm2, 0x1720(%rsp) vmovaps %xmm1, 0x1710(%rsp) vmovaps %xmm0, 0x1700(%rsp) vmovaps 0x1720(%rsp), %xmm1 vmovaps 0x1710(%rsp), %xmm0 vmovaps 0x1700(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1430(%rsp) vmovaps 0x1430(%rsp), %xmm2 vmovaps 0x1480(%rsp), %xmm1 vmovaps 0x1490(%rsp), %xmm0 vmovaps %xmm2, 0x1540(%rsp) vmovaps %xmm1, 0x1530(%rsp) vmovaps %xmm0, 0x1520(%rsp) vmovaps 0x1540(%rsp), %xmm2 vmovaps 0x1530(%rsp), %xmm1 vmovaps 0x1520(%rsp), %xmm0 vmovaps %xmm2, 0x1750(%rsp) vmovaps %xmm1, 0x1740(%rsp) vmovaps %xmm0, 0x1730(%rsp) vmovaps 0x1750(%rsp), %xmm1 vmovaps 0x1740(%rsp), %xmm0 vmovaps 0x1730(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1430(%rsp) vmovaps 0x1430(%rsp), %xmm1 vmovaps 0x1450(%rsp), %xmm0 vmovaps %xmm1, 0x14f0(%rsp) vmovaps %xmm0, 0x14e0(%rsp) vmovaps 0x14f0(%rsp), %xmm0 vmovaps 0x14e0(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1430(%rsp) vmovaps 0x1470(%rsp), %xmm0 vmovaps %xmm0, 0x1870(%rsp) vcvttps2dq 0x1870(%rsp), %xmm0 vmovdqa %xmm0, 0x1460(%rsp) vmovdqa 0x1460(%rsp), %xmm0 vmovdqa %xmm0, 0x14b0(%rsp) vmovdqa 0x18adb05(%rip), %xmm0 # 0x1e02b70 vmovdqa %xmm0, 0x14a0(%rsp) vmovdqa 0x14b0(%rsp), %xmm0 vmovdqa 0x14a0(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x1460(%rsp) vmovdqa 0x1460(%rsp), %xmm0 vmovdqa %xmm0, 0x14d0(%rsp) movl $0x17, 0x14cc(%rsp) vmovdqa 0x14d0(%rsp), %xmm0 movl 0x14cc(%rsp), %eax vmovd %eax, %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x1460(%rsp) vmovdqa 0x1460(%rsp), %xmm0 vmovdqa %xmm0, 0x1a10(%rsp) vmovdqa 0x1a10(%rsp), %xmm0 vmovaps %xmm0, 0x1420(%rsp) vmovaps 0x1430(%rsp), %xmm1 vmovaps 0x1420(%rsp), %xmm0 vmovaps %xmm1, 0x17c0(%rsp) vmovaps %xmm0, 0x17b0(%rsp) vmovaps 0x17c0(%rsp), %xmm0 vmulps 0x17b0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x1430(%rsp) vmovaps 0x1430(%rsp), %xmm0 vmovaps %xmm0, 0x90(%rsp) vmovaps 0xc0(%rsp), %xmm1 vmovaps 0x90(%rsp), %xmm0 vmovaps 0xb0(%rsp), %xmm2 vmovaps %xmm2, 0xe30(%rsp) vmovaps %xmm0, 0xe20(%rsp) vmovaps 0xe30(%rsp), %xmm0 vaddps 0xe20(%rsp), %xmm0, %xmm0 vmovaps %xmm1, 0x1020(%rsp) vmovaps %xmm0, 0x1010(%rsp) vmovaps 0x1020(%rsp), %xmm0 vdivps 0x1010(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x80(%rsp) vmovaps 0x80(%rsp), %xmm0 vmovaps %xmm0, 0xbb0(%rsp) jmp 0x55739b vmovaps 0xba0(%rsp), %xmm0 vmovaps %xmm0, 0xe70(%rsp) vmovaps 0xe70(%rsp), %xmm0 vmovaps %xmm0, 0x60(%rsp) vmovaps %xmm0, 0x10a0(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x1370(%rsp) vmovaps 0x1370(%rsp), %xmm0 vmovaps %xmm0, 0x1090(%rsp) vmovaps 0x18ad896(%rip), %xmm0 # 0x1e02ab0 vmovaps %xmm0, 0x1060(%rsp) vmovaps 0x10a0(%rsp), %xmm0 vmovaps %xmm0, 0x13b0(%rsp) vmovaps 0x18ad883(%rip), %xmm0 # 0x1e02ac0 vmovaps %xmm0, 0x13a0(%rsp) vmovaps 0x13b0(%rsp), %xmm0 vmovaps 0x13a0(%rsp), %xmm1 vminps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x10a0(%rsp) vmovaps 0x10a0(%rsp), %xmm0 vmovaps %xmm0, 0x1390(%rsp) vmovaps 0x18ad851(%rip), %xmm0 # 0x1e02ad0 vmovaps %xmm0, 0x1380(%rsp) vmovaps 0x1390(%rsp), %xmm0 vmovaps 0x1380(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x10a0(%rsp) vmovaps 0x10a0(%rsp), %xmm0 vmovaps %xmm0, 0x1410(%rsp) vmovaps 0x18ad81f(%rip), %xmm0 # 0x1e02ae0 vmovaps %xmm0, 0x1400(%rsp) vmovaps 0x1410(%rsp), %xmm0 vmovaps 0x1400(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1080(%rsp) vmovaps 0x1080(%rsp), %xmm0 vmovaps %xmm0, 0x1120(%rsp) vmovaps 0x18ad7ed(%rip), %xmm0 # 0x1e02af0 vmovaps %xmm0, 0x1110(%rsp) vmovaps 0x1120(%rsp), %xmm0 vmovaps 0x1110(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1080(%rsp) vmovaps 0x1080(%rsp), %xmm0 vmovaps %xmm0, 0x18a0(%rsp) vcvttps2dq 0x18a0(%rsp), %xmm0 vmovdqa %xmm0, 0x1070(%rsp) vmovdqa 0x1070(%rsp), %xmm0 vmovdqa %xmm0, 0x18c0(%rsp) vcvtdq2ps 0x18c0(%rsp), %xmm0 vmovaps %xmm0, 0x1090(%rsp) vmovaps 0x1090(%rsp), %xmm1 vmovaps 0x1080(%rsp), %xmm0 vmovaps %xmm1, 0x1900(%rsp) vmovaps %xmm0, 0x18f0(%rsp) vmovaps 0x18f0(%rsp), %xmm0 vmovaps 0x1900(%rsp), %xmm1 vcmpltps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1050(%rsp) vmovaps 0x1050(%rsp), %xmm1 vmovaps 0x1060(%rsp), %xmm0 vmovaps %xmm1, 0x1940(%rsp) vmovaps %xmm0, 0x1930(%rsp) vmovdqa 0x1940(%rsp), %xmm0 vmovdqa 0x1930(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x1050(%rsp) vmovaps 0x1090(%rsp), %xmm1 vmovaps 0x1050(%rsp), %xmm0 vmovaps %xmm1, 0x1840(%rsp) vmovaps %xmm0, 0x1830(%rsp) vmovaps 0x1840(%rsp), %xmm0 vmovaps 0x1830(%rsp), %xmm1 vsubps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1080(%rsp) vmovaps 0x1080(%rsp), %xmm1 vmovaps 0x10a0(%rsp), %xmm0 vmovaps %xmm1, 0x1a00(%rsp) vmovaps 0x18ad6a0(%rip), %xmm1 # 0x1e02b00 vmovaps %xmm1, 0x19f0(%rsp) vmovaps %xmm0, 0x19e0(%rsp) vmovaps 0x1a00(%rsp), %xmm2 vmovaps 0x19f0(%rsp), %xmm1 vmovaps 0x19e0(%rsp), %xmm0 vmovaps %xmm2, 0x1a50(%rsp) vmovaps %xmm1, 0x1a40(%rsp) vmovaps %xmm0, 0x1a30(%rsp) vmovaps 0x1a50(%rsp), %xmm0 vmovdqa 0x18b1727(%rip), %xmm1 # 0x1e06be0 vpxor %xmm1, %xmm0, %xmm1 vmovaps 0x1a40(%rsp), %xmm0 vmovaps 0x1a30(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x70(%rsp) vmovaps 0x70(%rsp), %xmm0 vmovaps %xmm0, 0x10a0(%rsp) vmovaps 0x1080(%rsp), %xmm1 vmovaps 0x10a0(%rsp), %xmm0 vmovaps %xmm1, 0x19d0(%rsp) vmovaps 0x18ad604(%rip), %xmm1 # 0x1e02b10 vmovaps %xmm1, 0x19c0(%rsp) vmovaps %xmm0, 0x19b0(%rsp) vmovaps 0x19d0(%rsp), %xmm2 vmovaps 0x19c0(%rsp), %xmm1 vmovaps 0x19b0(%rsp), %xmm0 vmovaps %xmm2, 0x1a80(%rsp) vmovaps %xmm1, 0x1a70(%rsp) vmovaps %xmm0, 0x1a60(%rsp) vmovaps 0x1a80(%rsp), %xmm0 vmovdqa 0x18b167b(%rip), %xmm1 # 0x1e06be0 vpxor %xmm1, %xmm0, %xmm1 vmovaps 0x1a70(%rsp), %xmm0 vmovaps 0x1a60(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x50(%rsp) vmovaps 0x50(%rsp), %xmm0 vmovaps %xmm0, 0x10a0(%rsp) vmovaps 0x10a0(%rsp), %xmm0 vmovaps %xmm0, 0x13f0(%rsp) vmovaps %xmm0, 0x13e0(%rsp) vmovaps 0x13f0(%rsp), %xmm0 vmovaps 0x13e0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1090(%rsp) vmovaps 0x18ad549(%rip), %xmm0 # 0x1e02b20 vmovaps %xmm0, 0x1040(%rsp) vmovaps 0x1040(%rsp), %xmm1 vmovaps 0x10a0(%rsp), %xmm0 vmovaps %xmm1, 0x1240(%rsp) vmovaps %xmm0, 0x1230(%rsp) vmovaps 0x18ad524(%rip), %xmm0 # 0x1e02b30 vmovaps %xmm0, 0x1220(%rsp) vmovaps 0x1240(%rsp), %xmm2 vmovaps 0x1230(%rsp), %xmm1 vmovaps 0x1220(%rsp), %xmm0 vmovaps %xmm2, 0x1270(%rsp) vmovaps %xmm1, 0x1260(%rsp) vmovaps %xmm0, 0x1250(%rsp) vmovaps 0x1270(%rsp), %xmm1 vmovaps 0x1260(%rsp), %xmm0 vmovaps 0x1250(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1040(%rsp) vmovaps 0x1040(%rsp), %xmm1 vmovaps 0x10a0(%rsp), %xmm0 vmovaps %xmm1, 0x1210(%rsp) vmovaps %xmm0, 0x1200(%rsp) vmovaps 0x18ad4a0(%rip), %xmm0 # 0x1e02b40 vmovaps %xmm0, 0x11f0(%rsp) vmovaps 0x1210(%rsp), %xmm2 vmovaps 0x1200(%rsp), %xmm1 vmovaps 0x11f0(%rsp), %xmm0 vmovaps %xmm2, 0x12a0(%rsp) vmovaps %xmm1, 0x1290(%rsp) vmovaps %xmm0, 0x1280(%rsp) vmovaps 0x12a0(%rsp), %xmm1 vmovaps 0x1290(%rsp), %xmm0 vmovaps 0x1280(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1040(%rsp) vmovaps 0x1040(%rsp), %xmm1 vmovaps 0x10a0(%rsp), %xmm0 vmovaps %xmm1, 0x11e0(%rsp) vmovaps %xmm0, 0x11d0(%rsp) vmovaps 0x18ad41c(%rip), %xmm0 # 0x1e02b50 vmovaps %xmm0, 0x11c0(%rsp) vmovaps 0x11e0(%rsp), %xmm2 vmovaps 0x11d0(%rsp), %xmm1 vmovaps 0x11c0(%rsp), %xmm0 vmovaps %xmm2, 0x12d0(%rsp) vmovaps %xmm1, 0x12c0(%rsp) vmovaps %xmm0, 0x12b0(%rsp) vmovaps 0x12d0(%rsp), %xmm1 vmovaps 0x12c0(%rsp), %xmm0 vmovaps 0x12b0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1040(%rsp) vmovaps 0x1040(%rsp), %xmm1 vmovaps 0x10a0(%rsp), %xmm0 vmovaps %xmm1, 0x11b0(%rsp) vmovaps %xmm0, 0x11a0(%rsp) vmovaps 0x18ad398(%rip), %xmm0 # 0x1e02b60 vmovaps %xmm0, 0x1190(%rsp) vmovaps 0x11b0(%rsp), %xmm2 vmovaps 0x11a0(%rsp), %xmm1 vmovaps 0x1190(%rsp), %xmm0 vmovaps %xmm2, 0x1300(%rsp) vmovaps %xmm1, 0x12f0(%rsp) vmovaps %xmm0, 0x12e0(%rsp) vmovaps 0x1300(%rsp), %xmm1 vmovaps 0x12f0(%rsp), %xmm0 vmovaps 0x12e0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1040(%rsp) vmovaps 0x1040(%rsp), %xmm1 vmovaps 0x10a0(%rsp), %xmm0 vmovaps %xmm1, 0x1180(%rsp) vmovaps %xmm0, 0x1170(%rsp) vmovaps 0x18ad294(%rip), %xmm0 # 0x1e02af0 vmovaps %xmm0, 0x1160(%rsp) vmovaps 0x1180(%rsp), %xmm2 vmovaps 0x1170(%rsp), %xmm1 vmovaps 0x1160(%rsp), %xmm0 vmovaps %xmm2, 0x1330(%rsp) vmovaps %xmm1, 0x1320(%rsp) vmovaps %xmm0, 0x1310(%rsp) vmovaps 0x1330(%rsp), %xmm1 vmovaps 0x1320(%rsp), %xmm0 vmovaps 0x1310(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1040(%rsp) vmovaps 0x1040(%rsp), %xmm2 vmovaps 0x1090(%rsp), %xmm1 vmovaps 0x10a0(%rsp), %xmm0 vmovaps %xmm2, 0x1150(%rsp) vmovaps %xmm1, 0x1140(%rsp) vmovaps %xmm0, 0x1130(%rsp) vmovaps 0x1150(%rsp), %xmm2 vmovaps 0x1140(%rsp), %xmm1 vmovaps 0x1130(%rsp), %xmm0 vmovaps %xmm2, 0x1360(%rsp) vmovaps %xmm1, 0x1350(%rsp) vmovaps %xmm0, 0x1340(%rsp) vmovaps 0x1360(%rsp), %xmm1 vmovaps 0x1350(%rsp), %xmm0 vmovaps 0x1340(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1040(%rsp) vmovaps 0x1040(%rsp), %xmm1 vmovaps 0x1060(%rsp), %xmm0 vmovaps %xmm1, 0x1100(%rsp) vmovaps %xmm0, 0x10f0(%rsp) vmovaps 0x1100(%rsp), %xmm0 vmovaps 0x10f0(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1040(%rsp) vmovaps 0x1080(%rsp), %xmm0 vmovaps %xmm0, 0x1890(%rsp) vcvttps2dq 0x1890(%rsp), %xmm0 vmovdqa %xmm0, 0x1070(%rsp) vmovdqa 0x1070(%rsp), %xmm0 vmovdqa %xmm0, 0x10c0(%rsp) vmovdqa 0x18ad196(%rip), %xmm0 # 0x1e02b70 vmovdqa %xmm0, 0x10b0(%rsp) vmovdqa 0x10c0(%rsp), %xmm0 vmovdqa 0x10b0(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x1070(%rsp) vmovdqa 0x1070(%rsp), %xmm0 vmovdqa %xmm0, 0x10e0(%rsp) movl $0x17, 0x10dc(%rsp) vmovdqa 0x10e0(%rsp), %xmm0 movl 0x10dc(%rsp), %eax vmovd %eax, %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x1070(%rsp) vmovdqa 0x1070(%rsp), %xmm0 vmovdqa %xmm0, 0x1a20(%rsp) vmovdqa 0x1a20(%rsp), %xmm0 vmovaps %xmm0, 0x1030(%rsp) vmovaps 0x1040(%rsp), %xmm1 vmovaps 0x1030(%rsp), %xmm0 vmovaps %xmm1, 0x13d0(%rsp) vmovaps %xmm0, 0x13c0(%rsp) vmovaps 0x13d0(%rsp), %xmm0 vmulps 0x13c0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x1040(%rsp) vmovaps 0x1040(%rsp), %xmm0 vmovaps %xmm0, 0x40(%rsp) vmovaps 0x40(%rsp), %xmm1 movl $0x3f800000, 0xebc(%rsp) # imm = 0x3F800000 vbroadcastss 0xebc(%rsp), %xmm0 vmovaps %xmm0, 0xea0(%rsp) vmovaps 0xea0(%rsp), %xmm0 vmovaps %xmm1, 0xe90(%rsp) vmovaps %xmm0, 0xe80(%rsp) vmovaps 0xe90(%rsp), %xmm0 vmovaps 0xe80(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x2240(%rsp) vmovaps 0x18acf98(%rip), %xmm0 # 0x1e02ab0 vmovaps %xmm0, 0x2220(%rsp) vmovaps 0x2240(%rsp), %xmm1 vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x26b0(%rsp) vmovaps 0x26b0(%rsp), %xmm0 vmovaps %xmm1, 0x2860(%rsp) vmovaps %xmm0, 0x2850(%rsp) vmovaps 0x2860(%rsp), %xmm0 vmovaps 0x2850(%rsp), %xmm1 vcmpleps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x2210(%rsp) vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm0, 0x26d0(%rsp) vmovaps 0x18acff4(%rip), %xmm0 # 0x1e02b80 vmovaps %xmm0, 0x26c0(%rsp) vmovaps 0x26d0(%rsp), %xmm0 vmovaps 0x26c0(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x2240(%rsp) vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm0, 0x2870(%rsp) vmovaps 0x2870(%rsp), %xmm0 vmovaps %xmm0, 0x2260(%rsp) movl $0x17, 0x225c(%rsp) vmovdqa 0x2260(%rsp), %xmm0 vmovd 0x225c(%rsp), %xmm1 vpsrld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x2230(%rsp) vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm0, 0x27e0(%rsp) vmovaps 0x18acf74(%rip), %xmm0 # 0x1e02b90 vmovaps %xmm0, 0x27d0(%rsp) vmovdqa 0x27e0(%rsp), %xmm0 vmovdqa 0x27d0(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x2240(%rsp) vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm0, 0x28b0(%rsp) vmovaps 0x18ace92(%rip), %xmm1 # 0x1e02af0 vmovaps %xmm1, 0x28a0(%rsp) vmovdqa 0x28b0(%rsp), %xmm0 vmovdqa 0x28a0(%rsp), %xmm2 vpor %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x2240(%rsp) vmovdqa 0x2230(%rsp), %xmm0 vmovdqa %xmm0, 0x2280(%rsp) vmovdqa 0x18aced0(%rip), %xmm0 # 0x1e02b70 vmovdqa %xmm0, 0x2270(%rsp) vmovdqa 0x2280(%rsp), %xmm0 vmovdqa 0x2270(%rsp), %xmm2 vpsubd %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x2230(%rsp) vmovdqa 0x2230(%rsp), %xmm0 vmovdqa %xmm0, 0x2780(%rsp) vcvtdq2ps 0x2780(%rsp), %xmm0 vmovaps %xmm0, 0x2200(%rsp) vmovaps 0x2200(%rsp), %xmm2 vmovaps 0x2220(%rsp), %xmm0 vmovaps %xmm2, 0x22e0(%rsp) vmovaps %xmm0, 0x22d0(%rsp) vmovaps 0x22e0(%rsp), %xmm0 vmovaps 0x22d0(%rsp), %xmm2 vaddps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x2200(%rsp) vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm0, 0x28d0(%rsp) vmovaps 0x18ace57(%rip), %xmm0 # 0x1e02ba0 vmovaps %xmm0, 0x28c0(%rsp) vmovaps 0x28d0(%rsp), %xmm0 vmovaps 0x28c0(%rsp), %xmm2 vcmpltps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x21f0(%rsp) vmovaps 0x2240(%rsp), %xmm2 vmovaps 0x21f0(%rsp), %xmm0 vmovaps %xmm2, 0x27c0(%rsp) vmovaps %xmm0, 0x27b0(%rsp) vmovdqa 0x27c0(%rsp), %xmm0 vmovdqa 0x27b0(%rsp), %xmm2 vpand %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x21e0(%rsp) vmovaps 0x2240(%rsp), %xmm2 vmovaps 0x2220(%rsp), %xmm0 vmovaps %xmm2, 0x2770(%rsp) vmovaps %xmm0, 0x2760(%rsp) vmovaps 0x2770(%rsp), %xmm0 vmovaps 0x2760(%rsp), %xmm2 vsubps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x2240(%rsp) vmovaps 0x2200(%rsp), %xmm2 vmovaps 0x2220(%rsp), %xmm3 vmovaps 0x21f0(%rsp), %xmm0 vmovaps %xmm3, 0x27a0(%rsp) vmovaps %xmm0, 0x2790(%rsp) vmovdqa 0x27a0(%rsp), %xmm0 vmovdqa 0x2790(%rsp), %xmm3 vpand %xmm3, %xmm0, %xmm0 vmovaps %xmm2, 0x2750(%rsp) vmovdqa %xmm0, 0x2740(%rsp) vmovaps 0x2750(%rsp), %xmm0 vmovaps 0x2740(%rsp), %xmm2 vsubps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x2200(%rsp) vmovaps 0x2240(%rsp), %xmm2 vmovaps 0x21e0(%rsp), %xmm0 vmovaps %xmm2, 0x22c0(%rsp) vmovaps %xmm0, 0x22b0(%rsp) vmovaps 0x22c0(%rsp), %xmm0 vmovaps 0x22b0(%rsp), %xmm2 vaddps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x2240(%rsp) vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm0, 0x2730(%rsp) vmovaps %xmm0, 0x2720(%rsp) vmovaps 0x2730(%rsp), %xmm0 vmovaps 0x2720(%rsp), %xmm2 vmulps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x21d0(%rsp) vmovaps 0x18accbf(%rip), %xmm0 # 0x1e02bb0 vmovaps %xmm0, 0x21c0(%rsp) vmovaps 0x21c0(%rsp), %xmm2 vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm2, 0x24c0(%rsp) vmovaps %xmm0, 0x24b0(%rsp) vmovaps 0x18acc9a(%rip), %xmm0 # 0x1e02bc0 vmovaps %xmm0, 0x24a0(%rsp) vmovaps 0x24c0(%rsp), %xmm3 vmovaps 0x24b0(%rsp), %xmm2 vmovaps 0x24a0(%rsp), %xmm0 vmovaps %xmm3, 0x24f0(%rsp) vmovaps %xmm2, 0x24e0(%rsp) vmovaps %xmm0, 0x24d0(%rsp) vmovaps 0x24f0(%rsp), %xmm2 vmovaps 0x24e0(%rsp), %xmm0 vmovaps 0x24d0(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x21c0(%rsp) vmovaps 0x21c0(%rsp), %xmm2 vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm2, 0x2490(%rsp) vmovaps %xmm0, 0x2480(%rsp) vmovaps 0x18acc16(%rip), %xmm0 # 0x1e02bd0 vmovaps %xmm0, 0x2470(%rsp) vmovaps 0x2490(%rsp), %xmm3 vmovaps 0x2480(%rsp), %xmm2 vmovaps 0x2470(%rsp), %xmm0 vmovaps %xmm3, 0x2520(%rsp) vmovaps %xmm2, 0x2510(%rsp) vmovaps %xmm0, 0x2500(%rsp) vmovaps 0x2520(%rsp), %xmm2 vmovaps 0x2510(%rsp), %xmm0 vmovaps 0x2500(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x21c0(%rsp) vmovaps 0x21c0(%rsp), %xmm2 vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm2, 0x2460(%rsp) vmovaps %xmm0, 0x2450(%rsp) vmovaps 0x18acb92(%rip), %xmm0 # 0x1e02be0 vmovaps %xmm0, 0x2440(%rsp) vmovaps 0x2460(%rsp), %xmm3 vmovaps 0x2450(%rsp), %xmm2 vmovaps 0x2440(%rsp), %xmm0 vmovaps %xmm3, 0x2550(%rsp) vmovaps %xmm2, 0x2540(%rsp) vmovaps %xmm0, 0x2530(%rsp) vmovaps 0x2550(%rsp), %xmm2 vmovaps 0x2540(%rsp), %xmm0 vmovaps 0x2530(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x21c0(%rsp) vmovaps 0x21c0(%rsp), %xmm2 vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm2, 0x2430(%rsp) vmovaps %xmm0, 0x2420(%rsp) vmovaps 0x18acb0e(%rip), %xmm0 # 0x1e02bf0 vmovaps %xmm0, 0x2410(%rsp) vmovaps 0x2430(%rsp), %xmm3 vmovaps 0x2420(%rsp), %xmm2 vmovaps 0x2410(%rsp), %xmm0 vmovaps %xmm3, 0x2580(%rsp) vmovaps %xmm2, 0x2570(%rsp) vmovaps %xmm0, 0x2560(%rsp) vmovaps 0x2580(%rsp), %xmm2 vmovaps 0x2570(%rsp), %xmm0 vmovaps 0x2560(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x21c0(%rsp) vmovaps 0x21c0(%rsp), %xmm2 vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm2, 0x2400(%rsp) vmovaps %xmm0, 0x23f0(%rsp) vmovaps 0x18aca8a(%rip), %xmm0 # 0x1e02c00 vmovaps %xmm0, 0x23e0(%rsp) vmovaps 0x2400(%rsp), %xmm3 vmovaps 0x23f0(%rsp), %xmm2 vmovaps 0x23e0(%rsp), %xmm0 vmovaps %xmm3, 0x25b0(%rsp) vmovaps %xmm2, 0x25a0(%rsp) vmovaps %xmm0, 0x2590(%rsp) vmovaps 0x25b0(%rsp), %xmm2 vmovaps 0x25a0(%rsp), %xmm0 vmovaps 0x2590(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x21c0(%rsp) vmovaps 0x21c0(%rsp), %xmm2 vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm2, 0x23d0(%rsp) vmovaps %xmm0, 0x23c0(%rsp) vmovaps 0x18aca06(%rip), %xmm0 # 0x1e02c10 vmovaps %xmm0, 0x23b0(%rsp) vmovaps 0x23d0(%rsp), %xmm3 vmovaps 0x23c0(%rsp), %xmm2 vmovaps 0x23b0(%rsp), %xmm0 vmovaps %xmm3, 0x25e0(%rsp) vmovaps %xmm2, 0x25d0(%rsp) vmovaps %xmm0, 0x25c0(%rsp) vmovaps 0x25e0(%rsp), %xmm2 vmovaps 0x25d0(%rsp), %xmm0 vmovaps 0x25c0(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x21c0(%rsp) vmovaps 0x21c0(%rsp), %xmm2 vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm2, 0x23a0(%rsp) vmovaps %xmm0, 0x2390(%rsp) vmovaps 0x18ac982(%rip), %xmm0 # 0x1e02c20 vmovaps %xmm0, 0x2380(%rsp) vmovaps 0x23a0(%rsp), %xmm3 vmovaps 0x2390(%rsp), %xmm2 vmovaps 0x2380(%rsp), %xmm0 vmovaps %xmm3, 0x2610(%rsp) vmovaps %xmm2, 0x2600(%rsp) vmovaps %xmm0, 0x25f0(%rsp) vmovaps 0x2610(%rsp), %xmm2 vmovaps 0x2600(%rsp), %xmm0 vmovaps 0x25f0(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x21c0(%rsp) vmovaps 0x21c0(%rsp), %xmm2 vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm2, 0x2370(%rsp) vmovaps %xmm0, 0x2360(%rsp) vmovaps 0x18ac8fe(%rip), %xmm0 # 0x1e02c30 vmovaps %xmm0, 0x2350(%rsp) vmovaps 0x2370(%rsp), %xmm3 vmovaps 0x2360(%rsp), %xmm2 vmovaps 0x2350(%rsp), %xmm0 vmovaps %xmm3, 0x2640(%rsp) vmovaps %xmm2, 0x2630(%rsp) vmovaps %xmm0, 0x2620(%rsp) vmovaps 0x2640(%rsp), %xmm2 vmovaps 0x2630(%rsp), %xmm0 vmovaps 0x2620(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x21c0(%rsp) vmovaps 0x21c0(%rsp), %xmm2 vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm2, 0x2710(%rsp) vmovaps %xmm0, 0x2700(%rsp) vmovaps 0x2710(%rsp), %xmm0 vmovaps 0x2700(%rsp), %xmm2 vmulps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x21c0(%rsp) vmovaps 0x21c0(%rsp), %xmm2 vmovaps 0x21d0(%rsp), %xmm0 vmovaps %xmm2, 0x26f0(%rsp) vmovaps %xmm0, 0x26e0(%rsp) vmovaps 0x26f0(%rsp), %xmm0 vmovaps 0x26e0(%rsp), %xmm2 vmulps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x21c0(%rsp) vmovaps 0x2200(%rsp), %xmm2 vmovaps 0x21c0(%rsp), %xmm0 vmovaps %xmm2, 0x2340(%rsp) vmovaps 0x18ac6cd(%rip), %xmm2 # 0x1e02b10 vmovaps %xmm2, 0x2330(%rsp) vmovaps %xmm0, 0x2320(%rsp) vmovaps 0x2340(%rsp), %xmm3 vmovaps 0x2330(%rsp), %xmm2 vmovaps 0x2320(%rsp), %xmm0 vmovaps %xmm3, 0x2670(%rsp) vmovaps %xmm2, 0x2660(%rsp) vmovaps %xmm0, 0x2650(%rsp) vmovaps 0x2670(%rsp), %xmm2 vmovaps 0x2660(%rsp), %xmm0 vmovaps 0x2650(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x21c0(%rsp) vmovaps 0x21d0(%rsp), %xmm2 vmovaps 0x21c0(%rsp), %xmm0 vmovaps %xmm2, 0x2810(%rsp) vmovaps %xmm1, 0x2800(%rsp) vmovaps %xmm0, 0x27f0(%rsp) vmovaps 0x2810(%rsp), %xmm2 vmovaps 0x2800(%rsp), %xmm1 vmovaps 0x27f0(%rsp), %xmm0 vmovaps %xmm2, 0x2840(%rsp) vmovaps %xmm1, 0x2830(%rsp) vmovaps %xmm0, 0x2820(%rsp) vmovaps 0x2840(%rsp), %xmm1 vmovaps 0x2830(%rsp), %xmm0 vmovaps 0x2820(%rsp), %xmm2 vfnmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = -(xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x21c0(%rsp) vmovaps 0x2240(%rsp), %xmm1 vmovaps 0x21c0(%rsp), %xmm0 vmovaps %xmm1, 0x22a0(%rsp) vmovaps %xmm0, 0x2290(%rsp) vmovaps 0x22a0(%rsp), %xmm0 vmovaps 0x2290(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x2240(%rsp) vmovaps 0x2200(%rsp), %xmm1 vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm1, 0x2310(%rsp) vmovaps 0x18ac55a(%rip), %xmm1 # 0x1e02b00 vmovaps %xmm1, 0x2300(%rsp) vmovaps %xmm0, 0x22f0(%rsp) vmovaps 0x2310(%rsp), %xmm2 vmovaps 0x2300(%rsp), %xmm1 vmovaps 0x22f0(%rsp), %xmm0 vmovaps %xmm2, 0x26a0(%rsp) vmovaps %xmm1, 0x2690(%rsp) vmovaps %xmm0, 0x2680(%rsp) vmovaps 0x26a0(%rsp), %xmm1 vmovaps 0x2690(%rsp), %xmm0 vmovaps 0x2680(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x2240(%rsp) vmovaps 0x2240(%rsp), %xmm1 vmovaps 0x2210(%rsp), %xmm0 vmovaps %xmm1, 0x2890(%rsp) vmovaps %xmm0, 0x2880(%rsp) vmovaps 0x2890(%rsp), %xmm0 vmovaps 0x2880(%rsp), %xmm1 vpor %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x2240(%rsp) vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm0, 0x30(%rsp) vmovaps 0x30(%rsp), %xmm0 vmovaps %xmm0, 0x1b10(%rsp) movl $0x3f800000, 0x1b5c(%rsp) # imm = 0x3F800000 vbroadcastss 0x1b5c(%rsp), %xmm0 vmovaps %xmm0, 0x1b40(%rsp) vmovaps 0x1b40(%rsp), %xmm0 vmovaps %xmm0, 0x1b00(%rsp) movl $0x40000000, 0x1b3c(%rsp) # imm = 0x40000000 vbroadcastss 0x1b3c(%rsp), %xmm0 vmovaps %xmm0, 0x1b20(%rsp) vmovaps 0x1b20(%rsp), %xmm0 vmovaps %xmm0, 0x1af0(%rsp) vmovaps 0x1b10(%rsp), %xmm1 vmovaps 0x1af0(%rsp), %xmm0 vmovaps %xmm1, 0x1c00(%rsp) vmovaps %xmm0, 0x1bf0(%rsp) vmovaps 0x1c00(%rsp), %xmm0 vmovaps 0x1bf0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1b70(%rsp) movl $0x3f800000, 0x1bcc(%rsp) # imm = 0x3F800000 vbroadcastss 0x1bcc(%rsp), %xmm0 vmovaps %xmm0, 0x1bb0(%rsp) vmovaps 0x1bb0(%rsp), %xmm0 vmovaps %xmm0, 0x1b60(%rsp) vmovaps 0x1b60(%rsp), %xmm2 vmovaps %xmm2, %xmm1 vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x1ba0(%rsp) vmovaps 0x1ba0(%rsp), %xmm4 vmovaps 0x1b70(%rsp), %xmm3 vmovaps %xmm4, 0x2050(%rsp) vmovaps %xmm3, 0x2040(%rsp) vmovaps 0x2050(%rsp), %xmm3 vmovaps 0x2040(%rsp), %xmm4 vsubps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x1ca0(%rsp) vmovaps %xmm0, 0x1f70(%rsp) vmovaps 0x1f70(%rsp), %xmm0 vmovaps %xmm0, 0x1c90(%rsp) vmovaps 0x18ac2e5(%rip), %xmm0 # 0x1e02ab0 vmovaps %xmm0, 0x1c60(%rsp) vmovaps 0x1ca0(%rsp), %xmm0 vmovaps %xmm0, 0x1fb0(%rsp) vmovaps 0x18ac2d2(%rip), %xmm0 # 0x1e02ac0 vmovaps %xmm0, 0x1fa0(%rsp) vmovaps 0x1fb0(%rsp), %xmm0 vmovaps 0x1fa0(%rsp), %xmm3 vminps %xmm3, %xmm0, %xmm0 vmovaps %xmm0, 0x1ca0(%rsp) vmovaps 0x1ca0(%rsp), %xmm0 vmovaps %xmm0, 0x1f90(%rsp) vmovaps 0x18ac2a0(%rip), %xmm0 # 0x1e02ad0 vmovaps %xmm0, 0x1f80(%rsp) vmovaps 0x1f90(%rsp), %xmm0 vmovaps 0x1f80(%rsp), %xmm3 vmaxps %xmm3, %xmm0, %xmm0 vmovaps %xmm0, 0x1ca0(%rsp) vmovaps 0x1ca0(%rsp), %xmm0 vmovaps %xmm0, 0x2010(%rsp) vmovaps 0x18ac26e(%rip), %xmm0 # 0x1e02ae0 vmovaps %xmm0, 0x2000(%rsp) vmovaps 0x2010(%rsp), %xmm0 vmovaps 0x2000(%rsp), %xmm3 vmulps %xmm3, %xmm0, %xmm0 vmovaps %xmm0, 0x1c80(%rsp) vmovaps 0x1c80(%rsp), %xmm0 vmovaps %xmm0, 0x1d20(%rsp) vmovaps 0x18ac23c(%rip), %xmm0 # 0x1e02af0 vmovaps %xmm0, 0x1d10(%rsp) vmovaps 0x1d20(%rsp), %xmm3 vmovaps 0x1d10(%rsp), %xmm4 vaddps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x1c80(%rsp) vmovaps 0x1c80(%rsp), %xmm3 vmovaps %xmm3, 0x2090(%rsp) vcvttps2dq 0x2090(%rsp), %xmm3 vmovdqa %xmm3, 0x1c70(%rsp) vmovdqa 0x1c70(%rsp), %xmm3 vmovdqa %xmm3, 0x20a0(%rsp) vcvtdq2ps 0x20a0(%rsp), %xmm3 vmovaps %xmm3, 0x1c90(%rsp) vmovaps 0x1c90(%rsp), %xmm4 vmovaps 0x1c80(%rsp), %xmm3 vmovaps %xmm4, 0x20c0(%rsp) vmovaps %xmm3, 0x20b0(%rsp) vmovaps 0x20b0(%rsp), %xmm3 vmovaps 0x20c0(%rsp), %xmm4 vcmpltps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x1c50(%rsp) vmovaps 0x1c50(%rsp), %xmm4 vmovaps 0x1c60(%rsp), %xmm3 vmovaps %xmm4, 0x20e0(%rsp) vmovaps %xmm3, 0x20d0(%rsp) vmovdqa 0x20e0(%rsp), %xmm3 vmovdqa 0x20d0(%rsp), %xmm4 vpand %xmm4, %xmm3, %xmm3 vmovdqa %xmm3, 0x1c50(%rsp) vmovaps 0x1c90(%rsp), %xmm4 vmovaps 0x1c50(%rsp), %xmm3 vmovaps %xmm4, 0x2030(%rsp) vmovaps %xmm3, 0x2020(%rsp) vmovaps 0x2030(%rsp), %xmm3 vmovaps 0x2020(%rsp), %xmm4 vsubps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x1c80(%rsp) vmovaps 0x1c80(%rsp), %xmm4 vmovaps 0x1ca0(%rsp), %xmm3 vmovaps %xmm4, 0x2140(%rsp) vmovaps 0x18ac0ef(%rip), %xmm4 # 0x1e02b00 vmovaps %xmm4, 0x2130(%rsp) vmovaps %xmm3, 0x2120(%rsp) vmovaps 0x2140(%rsp), %xmm5 vmovaps 0x2130(%rsp), %xmm4 vmovaps 0x2120(%rsp), %xmm3 vmovaps %xmm5, 0x2180(%rsp) vmovaps %xmm4, 0x2170(%rsp) vmovaps %xmm3, 0x2160(%rsp) vmovaps 0x2180(%rsp), %xmm4 vmovaps 0x2170(%rsp), %xmm3 vmovaps 0x2160(%rsp), %xmm5 vfnmadd213ps %xmm5, %xmm4, %xmm3 # xmm3 = -(xmm4 * xmm3) + xmm5 vmovaps %xmm3, 0x1ca0(%rsp) vmovaps 0x1c80(%rsp), %xmm4 vmovaps 0x1ca0(%rsp), %xmm3 vmovaps %xmm4, 0x2110(%rsp) vmovaps 0x18ac06b(%rip), %xmm4 # 0x1e02b10 vmovaps %xmm4, 0x2100(%rsp) vmovaps %xmm3, 0x20f0(%rsp) vmovaps 0x2110(%rsp), %xmm5 vmovaps 0x2100(%rsp), %xmm4 vmovaps 0x20f0(%rsp), %xmm3 vmovaps %xmm5, 0x21b0(%rsp) vmovaps %xmm4, 0x21a0(%rsp) vmovaps %xmm3, 0x2190(%rsp) vmovaps 0x21b0(%rsp), %xmm4 vmovaps 0x21a0(%rsp), %xmm3 vmovaps 0x2190(%rsp), %xmm5 vfnmadd213ps %xmm5, %xmm4, %xmm3 # xmm3 = -(xmm4 * xmm3) + xmm5 vmovaps %xmm3, 0x1ca0(%rsp) vmovaps 0x1ca0(%rsp), %xmm3 vmovaps %xmm3, 0x1ff0(%rsp) vmovaps %xmm3, 0x1fe0(%rsp) vmovaps 0x1ff0(%rsp), %xmm3 vmovaps 0x1fe0(%rsp), %xmm4 vmulps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x1c90(%rsp) vmovaps 0x18abfc8(%rip), %xmm3 # 0x1e02b20 vmovaps %xmm3, 0x1c40(%rsp) vmovaps 0x1c40(%rsp), %xmm4 vmovaps 0x1ca0(%rsp), %xmm3 vmovaps %xmm4, 0x1e40(%rsp) vmovaps %xmm3, 0x1e30(%rsp) vmovaps 0x18abfa3(%rip), %xmm3 # 0x1e02b30 vmovaps %xmm3, 0x1e20(%rsp) vmovaps 0x1e40(%rsp), %xmm5 vmovaps 0x1e30(%rsp), %xmm4 vmovaps 0x1e20(%rsp), %xmm3 vmovaps %xmm5, 0x1e70(%rsp) vmovaps %xmm4, 0x1e60(%rsp) vmovaps %xmm3, 0x1e50(%rsp) vmovaps 0x1e70(%rsp), %xmm4 vmovaps 0x1e60(%rsp), %xmm3 vmovaps 0x1e50(%rsp), %xmm5 vfmadd213ps %xmm5, %xmm4, %xmm3 # xmm3 = (xmm4 * xmm3) + xmm5 vmovaps %xmm3, 0x1c40(%rsp) vmovaps 0x1c40(%rsp), %xmm4 vmovaps 0x1ca0(%rsp), %xmm3 vmovaps %xmm4, 0x1e10(%rsp) vmovaps %xmm3, 0x1e00(%rsp) vmovaps 0x18abf1f(%rip), %xmm3 # 0x1e02b40 vmovaps %xmm3, 0x1df0(%rsp) vmovaps 0x1e10(%rsp), %xmm5 vmovaps 0x1e00(%rsp), %xmm4 vmovaps 0x1df0(%rsp), %xmm3 vmovaps %xmm5, 0x1ea0(%rsp) vmovaps %xmm4, 0x1e90(%rsp) vmovaps %xmm3, 0x1e80(%rsp) vmovaps 0x1ea0(%rsp), %xmm4 vmovaps 0x1e90(%rsp), %xmm3 vmovaps 0x1e80(%rsp), %xmm5 vfmadd213ps %xmm5, %xmm4, %xmm3 # xmm3 = (xmm4 * xmm3) + xmm5 vmovaps %xmm3, 0x1c40(%rsp) vmovaps 0x1c40(%rsp), %xmm4 vmovaps 0x1ca0(%rsp), %xmm3 vmovaps %xmm4, 0x1de0(%rsp) vmovaps %xmm3, 0x1dd0(%rsp) vmovaps 0x18abe9b(%rip), %xmm3 # 0x1e02b50 vmovaps %xmm3, 0x1dc0(%rsp) vmovaps 0x1de0(%rsp), %xmm5 vmovaps 0x1dd0(%rsp), %xmm4 vmovaps 0x1dc0(%rsp), %xmm3 vmovaps %xmm5, 0x1ed0(%rsp) vmovaps %xmm4, 0x1ec0(%rsp) vmovaps %xmm3, 0x1eb0(%rsp) vmovaps 0x1ed0(%rsp), %xmm4 vmovaps 0x1ec0(%rsp), %xmm3 vmovaps 0x1eb0(%rsp), %xmm5 vfmadd213ps %xmm5, %xmm4, %xmm3 # xmm3 = (xmm4 * xmm3) + xmm5 vmovaps %xmm3, 0x1c40(%rsp) vmovaps 0x1c40(%rsp), %xmm4 vmovaps 0x1ca0(%rsp), %xmm3 vmovaps %xmm4, 0x1db0(%rsp) vmovaps %xmm3, 0x1da0(%rsp) vmovaps 0x18abe17(%rip), %xmm3 # 0x1e02b60 vmovaps %xmm3, 0x1d90(%rsp) vmovaps 0x1db0(%rsp), %xmm5 vmovaps 0x1da0(%rsp), %xmm4 vmovaps 0x1d90(%rsp), %xmm3 vmovaps %xmm5, 0x1f00(%rsp) vmovaps %xmm4, 0x1ef0(%rsp) vmovaps %xmm3, 0x1ee0(%rsp) vmovaps 0x1f00(%rsp), %xmm4 vmovaps 0x1ef0(%rsp), %xmm3 vmovaps 0x1ee0(%rsp), %xmm5 vfmadd213ps %xmm5, %xmm4, %xmm3 # xmm3 = (xmm4 * xmm3) + xmm5 vmovaps %xmm3, 0x1c40(%rsp) vmovaps 0x1c40(%rsp), %xmm4 vmovaps 0x1ca0(%rsp), %xmm3 vmovaps %xmm4, 0x1d80(%rsp) vmovaps %xmm3, 0x1d70(%rsp) vmovaps %xmm0, 0x1d60(%rsp) vmovaps 0x1d80(%rsp), %xmm4 vmovaps 0x1d70(%rsp), %xmm3 vmovaps 0x1d60(%rsp), %xmm0 vmovaps %xmm4, 0x1f30(%rsp) vmovaps %xmm3, 0x1f20(%rsp) vmovaps %xmm0, 0x1f10(%rsp) vmovaps 0x1f30(%rsp), %xmm3 vmovaps 0x1f20(%rsp), %xmm0 vmovaps 0x1f10(%rsp), %xmm4 vfmadd213ps %xmm4, %xmm3, %xmm0 # xmm0 = (xmm3 * xmm0) + xmm4 vmovaps %xmm0, 0x1c40(%rsp) vmovaps 0x1c40(%rsp), %xmm4 vmovaps 0x1c90(%rsp), %xmm3 vmovaps 0x1ca0(%rsp), %xmm0 vmovaps %xmm4, 0x1d50(%rsp) vmovaps %xmm3, 0x1d40(%rsp) vmovaps %xmm0, 0x1d30(%rsp) vmovaps 0x1d50(%rsp), %xmm4 vmovaps 0x1d40(%rsp), %xmm3 vmovaps 0x1d30(%rsp), %xmm0 vmovaps %xmm4, 0x1f60(%rsp) vmovaps %xmm3, 0x1f50(%rsp) vmovaps %xmm0, 0x1f40(%rsp) vmovaps 0x1f60(%rsp), %xmm3 vmovaps 0x1f50(%rsp), %xmm0 vmovaps 0x1f40(%rsp), %xmm4 vfmadd213ps %xmm4, %xmm3, %xmm0 # xmm0 = (xmm3 * xmm0) + xmm4 vmovaps %xmm0, 0x1c40(%rsp) vmovaps 0x1c40(%rsp), %xmm3 vmovaps 0x1c60(%rsp), %xmm0 vmovaps %xmm3, 0x1d00(%rsp) vmovaps %xmm0, 0x1cf0(%rsp) vmovaps 0x1d00(%rsp), %xmm0 vmovaps 0x1cf0(%rsp), %xmm3 vaddps %xmm3, %xmm0, %xmm0 vmovaps %xmm0, 0x1c40(%rsp) vmovaps 0x1c80(%rsp), %xmm0 vmovaps %xmm0, 0x2080(%rsp) vcvttps2dq 0x2080(%rsp), %xmm0 vmovdqa %xmm0, 0x1c70(%rsp) vmovdqa 0x1c70(%rsp), %xmm0 vmovdqa %xmm0, 0x1cc0(%rsp) vmovdqa 0x18abc1d(%rip), %xmm0 # 0x1e02b70 vmovdqa %xmm0, 0x1cb0(%rsp) vmovdqa 0x1cc0(%rsp), %xmm0 vmovdqa 0x1cb0(%rsp), %xmm3 vpaddd %xmm3, %xmm0, %xmm0 vmovdqa %xmm0, 0x1c70(%rsp) vmovdqa 0x1c70(%rsp), %xmm0 vmovdqa %xmm0, 0x1ce0(%rsp) movl $0x17, 0x1cdc(%rsp) vmovdqa 0x1ce0(%rsp), %xmm0 movl 0x1cdc(%rsp), %eax vmovd %eax, %xmm3 vpslld %xmm3, %xmm0, %xmm0 vmovdqa %xmm0, 0x1c70(%rsp) vmovdqa 0x1c70(%rsp), %xmm0 vmovdqa %xmm0, 0x2150(%rsp) vmovdqa 0x2150(%rsp), %xmm0 vmovaps %xmm0, 0x1c30(%rsp) vmovaps 0x1c40(%rsp), %xmm3 vmovaps 0x1c30(%rsp), %xmm0 vmovaps %xmm3, 0x1fd0(%rsp) vmovaps %xmm0, 0x1fc0(%rsp) vmovaps 0x1fd0(%rsp), %xmm0 vmulps 0x1fc0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x1c40(%rsp) vmovaps 0x1c40(%rsp), %xmm0 vmovaps %xmm2, 0x1b90(%rsp) vmovaps %xmm0, 0x1b80(%rsp) vmovaps 0x1b90(%rsp), %xmm0 vaddps 0x1b80(%rsp), %xmm0, %xmm0 vmovaps %xmm1, 0x1c20(%rsp) vmovaps %xmm0, 0x1c10(%rsp) vmovaps 0x1c20(%rsp), %xmm0 vdivps 0x1c10(%rsp), %xmm0, %xmm1 vmovaps 0x1af0(%rsp), %xmm0 vmovaps %xmm1, 0x1be0(%rsp) vmovaps %xmm0, 0x1bd0(%rsp) vmovaps 0x1be0(%rsp), %xmm0 vmulps 0x1bd0(%rsp), %xmm0, %xmm1 vmovaps 0x1b00(%rsp), %xmm0 vmovaps %xmm1, 0x2070(%rsp) vmovaps %xmm0, 0x2060(%rsp) vmovaps 0x2070(%rsp), %xmm0 vsubps 0x2060(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x20(%rsp) vmovaps 0x20(%rsp), %xmm0 vmovaps 0x60(%rsp), %xmm1 vmovaps %xmm1, 0xfe0(%rsp) vmovaps %xmm0, 0xfd0(%rsp) vmovaps 0xfe0(%rsp), %xmm0 vmulps 0xfd0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x10(%rsp) vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm0, 0xbb0(%rsp) jmp 0x55739b movq 0xb90(%rsp), %rax movq %rax, 0xbd8(%rsp) movq $0x0, 0xbd0(%rsp) movq 0xbd8(%rsp), %rax movq (%rax), %rax movq 0xbd0(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0xdbc(%rsp) vbroadcastss 0xdbc(%rsp), %xmm0 vmovaps %xmm0, 0xda0(%rsp) vmovaps 0xda0(%rsp), %xmm0 vmovaps %xmm0, 0xb60(%rsp) movq 0xb90(%rsp), %rax movq %rax, 0xbc8(%rsp) movq $0x1, 0xbc0(%rsp) movq 0xbc8(%rsp), %rax movq (%rax), %rax movq 0xbc0(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0xd9c(%rsp) vbroadcastss 0xd9c(%rsp), %xmm0 vmovaps %xmm0, 0xd80(%rsp) vmovaps 0xd80(%rsp), %xmm0 vmovaps %xmm0, 0xb50(%rsp) vmovaps 0xba0(%rsp), %xmm2 vmovaps 0xb60(%rsp), %xmm1 vmovaps 0xb50(%rsp), %xmm0 vmovaps %xmm2, 0xef0(%rsp) vmovaps %xmm1, 0xee0(%rsp) vmovaps %xmm0, 0xed0(%rsp) movl $0x3f800000, 0xf6c(%rsp) # imm = 0x3F800000 vbroadcastss 0xf6c(%rsp), %xmm0 vmovaps %xmm0, 0xf50(%rsp) vmovaps 0xf50(%rsp), %xmm0 vmovaps %xmm0, 0xec0(%rsp) vmovaps 0xef0(%rsp), %xmm1 vmovaps 0xee0(%rsp), %xmm0 vmovaps %xmm1, 0xfc0(%rsp) vmovaps %xmm0, 0xfb0(%rsp) vmovaps 0xfc0(%rsp), %xmm0 vmovaps 0xfb0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm1 vmovaps 0xed0(%rsp), %xmm0 vmovaps %xmm1, 0xf10(%rsp) vmovaps %xmm0, 0xf00(%rsp) vmovaps 0xf10(%rsp), %xmm0 vmovaps 0xf00(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xed0(%rsp) vmovaps 0xed0(%rsp), %xmm1 vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0xf20(%rsp) vmovaps 0xf20(%rsp), %xmm0 vmovaps %xmm1, 0xf40(%rsp) vmovaps %xmm0, 0xf30(%rsp) vmovaps 0xf40(%rsp), %xmm0 vmovaps 0xf30(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xed0(%rsp) vmovaps 0xed0(%rsp), %xmm1 vmovaps 0xec0(%rsp), %xmm0 vmovaps %xmm1, 0xf80(%rsp) vmovaps %xmm0, 0xf70(%rsp) vmovaps 0xf80(%rsp), %xmm0 vmovaps 0xf70(%rsp), %xmm1 vminps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xed0(%rsp) vmovaps 0xed0(%rsp), %xmm1 vmovaps 0xef0(%rsp), %xmm0 vmovaps %xmm1, 0xfa0(%rsp) vmovaps %xmm0, 0xf90(%rsp) vmovaps 0xfa0(%rsp), %xmm0 vmulps 0xf90(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0xbb0(%rsp) jmp 0x55739b vmovaps 0xba0(%rsp), %xmm0 vmovaps %xmm0, 0xbb0(%rsp) vmovaps 0xbb0(%rsp), %xmm0 vmovaps %xmm0, (%rsp) vmovaps (%rsp), %xmm0 vmovaps %xmm0, 0x460(%rsp) movq 0x4c0(%rsp), %rax movl 0x470(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax vmovaps 0x460(%rsp), %xmm0 movq %rax, 0xc38(%rsp) vmovaps %xmm0, 0xc20(%rsp) vmovaps 0xc20(%rsp), %xmm0 movq 0xc38(%rsp), %rax vmovups %xmm0, (%rax) jmp 0x557404 movl 0x470(%rsp), %eax addl $0x1, %eax movl %eax, 0x470(%rsp) jmp 0x5530a2 movl 0x534(%rsp), %ecx shll $0x2, %ecx movq 0x4c0(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x4c0(%rsp) movl 0x474(%rsp), %eax addl $0x1, %eax movl %eax, 0x474(%rsp) jmp 0x553083 jmp 0x557456 movl 0x4cc(%rsp), %eax addl $0x1, %eax movl %eax, 0x4cc(%rsp) jmp 0x552c0f leaq 0x510(%rsp), %rdi callq 0x998a0 addq $0x29b8, %rsp # imm = 0x29B8 retq movq 0x500(%rsp), %rdi callq 0x5e3b0 nop
/ysh329[P]ncnn/src/layer/x86/convolution_pack8to4.h
ncnn::convolution_pack4_sse(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Mat const&, int, int, int, int, int, int, int, ncnn::Mat const&, ncnn::Option const&)
static void convolution_pack4_sse(const Mat& bottom_blob, Mat& top_blob, const Mat& weight_data_packed, const Mat& bias_data, int kernel_w, int kernel_h, int dilation_w, int dilation_h, int stride_w, int stride_h, int activation_type, const Mat& activation_params, const Option& opt) { int w = bottom_blob.w; int channels = bottom_blob.c; int outw = top_blob.w; int outh = top_blob.h; int outch = top_blob.c; const int maxk = kernel_w * kernel_h; // kernel offsets std::vector<int> _space_ofs(maxk); int* space_ofs = &_space_ofs[0]; { int p1 = 0; int p2 = 0; int gap = w * dilation_h - kernel_w * dilation_w; for (int i = 0; i < kernel_h; i++) { for (int j = 0; j < kernel_w; j++) { space_ofs[p1] = p2; p1++; p2 += dilation_w; } p2 += gap; } } const float* bias_data_ptr = bias_data; #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < outch; p++) { float* outptr = top_blob.channel(p); for (int i = 0; i < outh; i++) { for (int j = 0; j < outw; j++) { __m128 _sum = _mm_setzero_ps(); if (bias_data_ptr) { _sum = _mm_loadu_ps(bias_data_ptr + p * 4); } const float* kptr = weight_data_packed.channel(p); // channels for (int q = 0; q < channels; q++) { const Mat m = bottom_blob.channel(q); const float* sptr = m.row(i * stride_h) + j * stride_w * 4; for (int k = 0; k < maxk; k++) { const float* slptr = sptr + space_ofs[k] * 4; __m128 _val0 = _mm_load1_ps(slptr); __m128 _val1 = _mm_load1_ps(slptr + 1); __m128 _val2 = _mm_load1_ps(slptr + 2); __m128 _val3 = _mm_load1_ps(slptr + 3); __m128 _w0 = _mm_load_ps(kptr); __m128 _w1 = _mm_load_ps(kptr + 4); __m128 _w2 = _mm_load_ps(kptr + 8); __m128 _w3 = _mm_load_ps(kptr + 12); _sum = _mm_comp_fmadd_ps(_val0, _w0, _sum); _sum = _mm_comp_fmadd_ps(_val1, _w1, _sum); _sum = _mm_comp_fmadd_ps(_val2, _w2, _sum); _sum = _mm_comp_fmadd_ps(_val3, _w3, _sum); kptr += 16; } } _sum = activation_sse(_sum, activation_type, activation_params); _mm_storeu_ps(outptr + j * 4, _sum); } outptr += outw * 4; } } }
subq $0x2648, %rsp # imm = 0x2648 movq 0x2680(%rsp), %rax movq 0x2678(%rsp), %rax movl 0x2670(%rsp), %eax movl 0x2668(%rsp), %eax movl 0x2660(%rsp), %eax movl 0x2658(%rsp), %eax movl 0x2650(%rsp), %eax movq %rdi, 0x410(%rsp) movq %rsi, 0x408(%rsp) movq %rdx, 0x400(%rsp) movq %rcx, 0x3f8(%rsp) movl %r8d, 0x3f4(%rsp) movl %r9d, 0x3f0(%rsp) movq 0x410(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x3ec(%rsp) movq 0x410(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x3e8(%rsp) movq 0x408(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x3e4(%rsp) movq 0x408(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x3e0(%rsp) movq 0x408(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x3dc(%rsp) movl 0x3f4(%rsp), %eax movl 0x3f0(%rsp), %ecx imull %ecx, %eax movl %eax, 0x3d8(%rsp) movslq 0x3d8(%rsp), %rax movq %rax, 0x1c0(%rsp) leaq 0x3bf(%rsp), %rdi movq %rdi, 0x1c8(%rsp) callq 0x99670 movq 0x1c0(%rsp), %rsi movq 0x1c8(%rsp), %rdx leaq 0x3c0(%rsp), %rdi callq 0xa5960 jmp 0x55d740 leaq 0x3bf(%rsp), %rdi callq 0x99e50 leaq 0x3c0(%rsp), %rdi xorl %eax, %eax movl %eax, %esi callq 0x98a00 movq %rax, 0x3a0(%rsp) movl $0x0, 0x39c(%rsp) movl $0x0, 0x398(%rsp) movl 0x3ec(%rsp), %eax imull 0x2658(%rsp), %eax movl 0x3f4(%rsp), %ecx imull 0x2650(%rsp), %ecx subl %ecx, %eax movl %eax, 0x394(%rsp) movl $0x0, 0x390(%rsp) movl 0x390(%rsp), %eax cmpl 0x3f0(%rsp), %eax jge 0x55d881 movl $0x0, 0x38c(%rsp) movl 0x38c(%rsp), %eax cmpl 0x3f4(%rsp), %eax jge 0x55d856 movl 0x398(%rsp), %edx movq 0x3a0(%rsp), %rax movslq 0x39c(%rsp), %rcx movl %edx, (%rax,%rcx,4) movl 0x39c(%rsp), %eax addl $0x1, %eax movl %eax, 0x39c(%rsp) movl 0x2650(%rsp), %eax addl 0x398(%rsp), %eax movl %eax, 0x398(%rsp) movl 0x38c(%rsp), %eax addl $0x1, %eax movl %eax, 0x38c(%rsp) jmp 0x55d7cd movq %rax, %rcx movl %edx, %eax movq %rcx, 0x3b0(%rsp) movl %eax, 0x3ac(%rsp) leaq 0x3bf(%rsp), %rdi callq 0x99e50 jmp 0x561c13 movl 0x394(%rsp), %eax addl 0x398(%rsp), %eax movl %eax, 0x398(%rsp) movl 0x390(%rsp), %eax addl $0x1, %eax movl %eax, 0x390(%rsp) jmp 0x55d7ae movq 0x3f8(%rsp), %rax movq %rax, 0x4e8(%rsp) movq 0x4e8(%rsp), %rax movq (%rax), %rax movq %rax, 0x1b8(%rsp) movq 0x1b8(%rsp), %rax movq %rax, 0x380(%rsp) movl $0x0, 0x37c(%rsp) movl 0x37c(%rsp), %eax cmpl 0x3dc(%rsp), %eax jge 0x561bfe movq 0x408(%rsp), %rcx movl 0x37c(%rsp), %eax leaq 0x328(%rsp), %rdx movq %rdx, 0x4b8(%rsp) movq %rcx, 0x4b0(%rsp) movl %eax, 0x4ac(%rsp) movq 0x4b0(%rsp), %rax movq %rax, 0x1a8(%rsp) movb $0x0, 0x4ab(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x4ac(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x328(%rsp), %r10 movq %r10, 0x2610(%rsp) movl %r9d, 0x260c(%rsp) movl %r8d, 0x2608(%rsp) movl %edi, 0x2604(%rsp) movq %rsi, 0x25f8(%rsp) movq %rdx, 0x25f0(%rsp) movl %ecx, 0x25ec(%rsp) movq %rax, 0x25e0(%rsp) movq 0x2610(%rsp), %rcx movq %rcx, 0x1b0(%rsp) movq 0x25f8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x25f0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x25ec(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x25e0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x260c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x2608(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2604(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x2620(%rsp) movl $0x10, 0x261c(%rsp) movq 0x2620(%rsp), %rax movslq 0x261c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x261c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x1b0(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x1a8(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x350(%rsp) cmpl $0x4, 0x28(%rax) jne 0x55da97 movq 0x1a8(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x368(%rsp) movb $0x1, 0x4ab(%rsp) testb $0x1, 0x4ab(%rsp) jne 0x55dbd0 leaq 0x328(%rsp), %rax movq %rax, 0x4c0(%rsp) movq 0x4c0(%rsp), %rax movq %rax, 0x4d0(%rsp) movq 0x4d0(%rsp), %rax movq %rax, 0x1a0(%rsp) cmpq $0x0, 0x8(%rax) je 0x55db75 movq 0x1a0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4cc(%rsp) # imm = 0xFFFFFFFF movl 0x4cc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4c8(%rsp) cmpl $0x1, 0x4c8(%rsp) jne 0x55db75 movq 0x1a0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x55db46 movq 0x1a0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x55db44 jmp 0x55db73 movq 0x1a0(%rsp), %rax movq (%rax), %rax movq %rax, 0x570(%rsp) cmpq $0x0, 0x570(%rsp) je 0x55db71 movq 0x570(%rsp), %rdi callq 0x5e480 jmp 0x55db73 jmp 0x55db75 movq 0x1a0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x55dbd0 movq %rax, %rdi callq 0x5fc90 jmp 0x55dbd2 leaq 0x328(%rsp), %rax movq %rax, 0x4d8(%rsp) movq 0x4d8(%rsp), %rax movq (%rax), %rax movq %rax, 0x198(%rsp) leaq 0x328(%rsp), %rax movq %rax, 0x418(%rsp) movq 0x418(%rsp), %rax movq %rax, 0x4a0(%rsp) movq 0x4a0(%rsp), %rax movq %rax, 0x190(%rsp) cmpq $0x0, 0x8(%rax) je 0x55dcbd movq 0x190(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x49c(%rsp) # imm = 0xFFFFFFFF movl 0x49c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x498(%rsp) cmpl $0x1, 0x498(%rsp) jne 0x55dcbd movq 0x190(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x55dc8e movq 0x190(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x55dc8c jmp 0x55dcbb movq 0x190(%rsp), %rax movq (%rax), %rax movq %rax, 0x578(%rsp) cmpq $0x0, 0x578(%rsp) je 0x55dcb9 movq 0x578(%rsp), %rdi callq 0x5e480 jmp 0x55dcbb jmp 0x55dcbd movq 0x190(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x55dd18 movq %rax, %rdi callq 0x5fc90 movq 0x198(%rsp), %rax movq %rax, 0x370(%rsp) movl $0x0, 0x324(%rsp) movl 0x324(%rsp), %eax cmpl 0x3e0(%rsp), %eax jge 0x561be6 movl $0x0, 0x320(%rsp) movl 0x320(%rsp), %eax cmpl 0x3e4(%rsp), %eax jge 0x561bac vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x7d0(%rsp) vmovaps 0x7d0(%rsp), %xmm0 vmovaps %xmm0, 0x310(%rsp) cmpq $0x0, 0x380(%rsp) je 0x55ddc9 movq 0x380(%rsp), %rax movl 0x37c(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x5a8(%rsp) movq 0x5a8(%rsp), %rax vmovups (%rax), %xmm0 vmovaps %xmm0, 0x310(%rsp) movq 0x400(%rsp), %rcx movl 0x37c(%rsp), %eax leaq 0x2c0(%rsp), %rdx movq %rdx, 0x540(%rsp) movq %rcx, 0x538(%rsp) movl %eax, 0x534(%rsp) movq 0x538(%rsp), %rax movq %rax, 0x180(%rsp) movb $0x0, 0x533(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x534(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x2c0(%rsp), %r10 movq %r10, 0x25a0(%rsp) movl %r9d, 0x259c(%rsp) movl %r8d, 0x2598(%rsp) movl %edi, 0x2594(%rsp) movq %rsi, 0x2588(%rsp) movq %rdx, 0x2580(%rsp) movl %ecx, 0x257c(%rsp) movq %rax, 0x2570(%rsp) movq 0x25a0(%rsp), %rcx movq %rcx, 0x188(%rsp) movq 0x2588(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2580(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x257c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2570(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x259c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x2598(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2594(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x2640(%rsp) movl $0x10, 0x263c(%rsp) movq 0x2640(%rsp), %rax movslq 0x263c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x263c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x188(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x180(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x2e8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x55df8d movq 0x180(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x300(%rsp) movb $0x1, 0x533(%rsp) testb $0x1, 0x533(%rsp) jne 0x55e0c6 leaq 0x2c0(%rsp), %rax movq %rax, 0x548(%rsp) movq 0x548(%rsp), %rax movq %rax, 0x558(%rsp) movq 0x558(%rsp), %rax movq %rax, 0x178(%rsp) cmpq $0x0, 0x8(%rax) je 0x55e06b movq 0x178(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x554(%rsp) # imm = 0xFFFFFFFF movl 0x554(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x550(%rsp) cmpl $0x1, 0x550(%rsp) jne 0x55e06b movq 0x178(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x55e03c movq 0x178(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x55e03a jmp 0x55e069 movq 0x178(%rsp), %rax movq (%rax), %rax movq %rax, 0x560(%rsp) cmpq $0x0, 0x560(%rsp) je 0x55e067 movq 0x560(%rsp), %rdi callq 0x5e480 jmp 0x55e069 jmp 0x55e06b movq 0x178(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x55e0c6 movq %rax, %rdi callq 0x5fc90 jmp 0x55e0c8 leaq 0x2c0(%rsp), %rax movq %rax, 0x4e0(%rsp) movq 0x4e0(%rsp), %rax movq (%rax), %rax movq %rax, 0x170(%rsp) leaq 0x2c0(%rsp), %rax movq %rax, 0x428(%rsp) movq 0x428(%rsp), %rax movq %rax, 0x480(%rsp) movq 0x480(%rsp), %rax movq %rax, 0x168(%rsp) cmpq $0x0, 0x8(%rax) je 0x55e1b3 movq 0x168(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x47c(%rsp) # imm = 0xFFFFFFFF movl 0x47c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x478(%rsp) cmpl $0x1, 0x478(%rsp) jne 0x55e1b3 movq 0x168(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x55e184 movq 0x168(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x55e182 jmp 0x55e1b1 movq 0x168(%rsp), %rax movq (%rax), %rax movq %rax, 0x588(%rsp) cmpq $0x0, 0x588(%rsp) je 0x55e1af movq 0x588(%rsp), %rdi callq 0x5e480 jmp 0x55e1b1 jmp 0x55e1b3 movq 0x168(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x55e20e movq %rax, %rdi callq 0x5fc90 movq 0x170(%rsp), %rax movq %rax, 0x308(%rsp) movl $0x0, 0x2bc(%rsp) movl 0x2bc(%rsp), %eax cmpl 0x3e8(%rsp), %eax jge 0x55ebda movq 0x410(%rsp), %rcx movl 0x2bc(%rsp), %eax leaq 0x270(%rsp), %rdx movq %rdx, 0x510(%rsp) movq %rcx, 0x508(%rsp) movl %eax, 0x504(%rsp) movq 0x508(%rsp), %rax movq %rax, 0x158(%rsp) movb $0x0, 0x503(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x504(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x270(%rsp), %r10 movq %r10, 0x25d8(%rsp) movl %r9d, 0x25d4(%rsp) movl %r8d, 0x25d0(%rsp) movl %edi, 0x25cc(%rsp) movq %rsi, 0x25c0(%rsp) movq %rdx, 0x25b8(%rsp) movl %ecx, 0x25b4(%rsp) movq %rax, 0x25a8(%rsp) movq 0x25d8(%rsp), %rcx movq %rcx, 0x160(%rsp) movq 0x25c0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x25b8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x25b4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x25a8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x25d4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x25d0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x25cc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x2630(%rsp) movl $0x10, 0x262c(%rsp) movq 0x2630(%rsp), %rax movslq 0x262c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x262c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x160(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x158(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x298(%rsp) cmpl $0x4, 0x28(%rax) jne 0x55e401 movq 0x158(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x2b0(%rsp) movb $0x1, 0x503(%rsp) testb $0x1, 0x503(%rsp) jne 0x55e53a leaq 0x270(%rsp), %rax movq %rax, 0x518(%rsp) movq 0x518(%rsp), %rax movq %rax, 0x528(%rsp) movq 0x528(%rsp), %rax movq %rax, 0x150(%rsp) cmpq $0x0, 0x8(%rax) je 0x55e4df movq 0x150(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x524(%rsp) # imm = 0xFFFFFFFF movl 0x524(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x520(%rsp) cmpl $0x1, 0x520(%rsp) jne 0x55e4df movq 0x150(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x55e4b0 movq 0x150(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x55e4ae jmp 0x55e4dd movq 0x150(%rsp), %rax movq (%rax), %rax movq %rax, 0x568(%rsp) cmpq $0x0, 0x568(%rsp) je 0x55e4db movq 0x568(%rsp), %rdi callq 0x5e480 jmp 0x55e4dd jmp 0x55e4df movq 0x150(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x55e53a movq %rax, %rdi callq 0x5fc90 jmp 0x55e53c movl 0x324(%rsp), %eax imull 0x2668(%rsp), %eax leaq 0x270(%rsp), %rcx movq %rcx, 0x4f8(%rsp) movl %eax, 0x4f4(%rsp) movq 0x4f8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x4f4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x148(%rsp) movq 0x148(%rsp), %rax movl 0x320(%rsp), %ecx imull 0x2660(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x268(%rsp) movl $0x0, 0x264(%rsp) movl 0x264(%rsp), %eax cmpl 0x3d8(%rsp), %eax jge 0x55ea9f movq 0x268(%rsp), %rax movq 0x3a0(%rsp), %rcx movslq 0x264(%rsp), %rdx movl (%rcx,%rdx,4), %ecx shll $0x2, %ecx movslq %ecx, %rcx leaq (%rax,%rcx,4), %rax movq %rax, 0x258(%rsp) movq 0x258(%rsp), %rax movq %rax, 0x648(%rsp) movq 0x648(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x644(%rsp) vbroadcastss 0x644(%rsp), %xmm0 vmovaps %xmm0, 0x630(%rsp) vmovaps 0x630(%rsp), %xmm0 vmovaps %xmm0, 0x240(%rsp) movq 0x258(%rsp), %rax addq $0x4, %rax movq %rax, 0x628(%rsp) movq 0x628(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x624(%rsp) vbroadcastss 0x624(%rsp), %xmm0 vmovaps %xmm0, 0x610(%rsp) vmovaps 0x610(%rsp), %xmm0 vmovaps %xmm0, 0x230(%rsp) movq 0x258(%rsp), %rax addq $0x8, %rax movq %rax, 0x608(%rsp) movq 0x608(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x604(%rsp) vbroadcastss 0x604(%rsp), %xmm0 vmovaps %xmm0, 0x5f0(%rsp) vmovaps 0x5f0(%rsp), %xmm0 vmovaps %xmm0, 0x220(%rsp) movq 0x258(%rsp), %rax addq $0xc, %rax movq %rax, 0x5e8(%rsp) movq 0x5e8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x5e4(%rsp) vbroadcastss 0x5e4(%rsp), %xmm0 vmovaps %xmm0, 0x5d0(%rsp) vmovaps 0x5d0(%rsp), %xmm0 vmovaps %xmm0, 0x210(%rsp) movq 0x308(%rsp), %rax movq %rax, 0x5c8(%rsp) movq 0x5c8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x200(%rsp) movq 0x308(%rsp), %rax addq $0x10, %rax movq %rax, 0x5c0(%rsp) movq 0x5c0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x1f0(%rsp) movq 0x308(%rsp), %rax addq $0x20, %rax movq %rax, 0x5b8(%rsp) movq 0x5b8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x1e0(%rsp) movq 0x308(%rsp), %rax addq $0x30, %rax movq %rax, 0x5b0(%rsp) movq 0x5b0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x1d0(%rsp) vmovaps 0x240(%rsp), %xmm2 vmovaps 0x200(%rsp), %xmm1 vmovaps 0x310(%rsp), %xmm0 vmovaps %xmm2, 0x700(%rsp) vmovaps %xmm1, 0x6f0(%rsp) vmovaps %xmm0, 0x6e0(%rsp) vmovaps 0x700(%rsp), %xmm2 vmovaps 0x6f0(%rsp), %xmm1 vmovaps 0x6e0(%rsp), %xmm0 vmovaps %xmm2, 0x730(%rsp) vmovaps %xmm1, 0x720(%rsp) vmovaps %xmm0, 0x710(%rsp) vmovaps 0x730(%rsp), %xmm1 vmovaps 0x720(%rsp), %xmm0 vmovaps 0x710(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x130(%rsp) vmovaps 0x130(%rsp), %xmm0 vmovaps %xmm0, 0x310(%rsp) vmovaps 0x230(%rsp), %xmm2 vmovaps 0x1f0(%rsp), %xmm1 vmovaps 0x310(%rsp), %xmm0 vmovaps %xmm2, 0x6d0(%rsp) vmovaps %xmm1, 0x6c0(%rsp) vmovaps %xmm0, 0x6b0(%rsp) vmovaps 0x6d0(%rsp), %xmm2 vmovaps 0x6c0(%rsp), %xmm1 vmovaps 0x6b0(%rsp), %xmm0 vmovaps %xmm2, 0x760(%rsp) vmovaps %xmm1, 0x750(%rsp) vmovaps %xmm0, 0x740(%rsp) vmovaps 0x760(%rsp), %xmm1 vmovaps 0x750(%rsp), %xmm0 vmovaps 0x740(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x120(%rsp) vmovaps 0x120(%rsp), %xmm0 vmovaps %xmm0, 0x310(%rsp) vmovaps 0x220(%rsp), %xmm2 vmovaps 0x1e0(%rsp), %xmm1 vmovaps 0x310(%rsp), %xmm0 vmovaps %xmm2, 0x6a0(%rsp) vmovaps %xmm1, 0x690(%rsp) vmovaps %xmm0, 0x680(%rsp) vmovaps 0x6a0(%rsp), %xmm2 vmovaps 0x690(%rsp), %xmm1 vmovaps 0x680(%rsp), %xmm0 vmovaps %xmm2, 0x790(%rsp) vmovaps %xmm1, 0x780(%rsp) vmovaps %xmm0, 0x770(%rsp) vmovaps 0x790(%rsp), %xmm1 vmovaps 0x780(%rsp), %xmm0 vmovaps 0x770(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x110(%rsp) vmovaps 0x110(%rsp), %xmm0 vmovaps %xmm0, 0x310(%rsp) vmovaps 0x210(%rsp), %xmm2 vmovaps 0x1d0(%rsp), %xmm1 vmovaps 0x310(%rsp), %xmm0 vmovaps %xmm2, 0x670(%rsp) vmovaps %xmm1, 0x660(%rsp) vmovaps %xmm0, 0x650(%rsp) vmovaps 0x670(%rsp), %xmm2 vmovaps 0x660(%rsp), %xmm1 vmovaps 0x650(%rsp), %xmm0 vmovaps %xmm2, 0x7c0(%rsp) vmovaps %xmm1, 0x7b0(%rsp) vmovaps %xmm0, 0x7a0(%rsp) vmovaps 0x7c0(%rsp), %xmm1 vmovaps 0x7b0(%rsp), %xmm0 vmovaps 0x7a0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x100(%rsp) vmovaps 0x100(%rsp), %xmm0 vmovaps %xmm0, 0x310(%rsp) movq 0x308(%rsp), %rax addq $0x40, %rax movq %rax, 0x308(%rsp) movl 0x264(%rsp), %eax addl $0x1, %eax movl %eax, 0x264(%rsp) jmp 0x55e5c4 leaq 0x270(%rsp), %rax movq %rax, 0x438(%rsp) movq 0x438(%rsp), %rax movq %rax, 0x460(%rsp) movq 0x460(%rsp), %rax movq %rax, 0xf8(%rsp) cmpq $0x0, 0x8(%rax) je 0x55eb67 movq 0xf8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x45c(%rsp) # imm = 0xFFFFFFFF movl 0x45c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x458(%rsp) cmpl $0x1, 0x458(%rsp) jne 0x55eb67 movq 0xf8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x55eb38 movq 0xf8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x55eb36 jmp 0x55eb65 movq 0xf8(%rsp), %rax movq (%rax), %rax movq %rax, 0x598(%rsp) cmpq $0x0, 0x598(%rsp) je 0x55eb63 movq 0x598(%rsp), %rdi callq 0x5e480 jmp 0x55eb65 jmp 0x55eb67 movq 0xf8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x55ebc2 movq %rax, %rdi callq 0x5fc90 jmp 0x55ebc4 movl 0x2bc(%rsp), %eax addl $0x1, %eax movl %eax, 0x2bc(%rsp) jmp 0x55e229 vmovaps 0x310(%rsp), %xmm0 movl 0x2670(%rsp), %ecx movq 0x2678(%rsp), %rax vmovaps %xmm0, 0x830(%rsp) movl %ecx, 0x82c(%rsp) movq %rax, 0x820(%rsp) movl 0x82c(%rsp), %eax decl %eax movl %eax, %ecx movq %rcx, 0xf0(%rsp) subl $0x5, %eax ja 0x561b1d movq 0xf0(%rsp), %rax leaq 0x18a765b(%rip), %rcx # 0x1e06290 movslq (%rcx,%rax,4), %rax addq %rcx, %rax jmpq *%rax vmovaps 0x830(%rsp), %xmm1 vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x8a0(%rsp) vmovaps 0x8a0(%rsp), %xmm0 vmovaps %xmm1, 0x8e0(%rsp) vmovaps %xmm0, 0x8d0(%rsp) vmovaps 0x8e0(%rsp), %xmm0 vmovaps 0x8d0(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x840(%rsp) jmp 0x561b2f vmovaps 0x830(%rsp), %xmm1 movq 0x820(%rsp), %rax movq %rax, 0x898(%rsp) movq $0x0, 0x890(%rsp) movq 0x898(%rsp), %rax movq (%rax), %rax movq 0x890(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovaps %xmm1, 0x940(%rsp) vmovss %xmm0, 0x93c(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x980(%rsp) vmovaps 0x980(%rsp), %xmm2 vmovaps 0x940(%rsp), %xmm1 vmovaps %xmm2, 0x9a0(%rsp) vmovaps %xmm1, 0x990(%rsp) vmovaps 0x9a0(%rsp), %xmm1 vmovaps 0x990(%rsp), %xmm2 vmaxps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x920(%rsp) vmovaps %xmm0, 0x970(%rsp) vmovaps 0x970(%rsp), %xmm1 vmovaps 0x940(%rsp), %xmm0 vmovaps %xmm1, 0xa60(%rsp) vmovaps %xmm0, 0xa50(%rsp) vmovaps 0xa60(%rsp), %xmm0 vmovaps 0xa50(%rsp), %xmm1 vminps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x910(%rsp) vmovaps 0x920(%rsp), %xmm1 vmovss 0x93c(%rsp), %xmm0 vmovss %xmm0, 0x9cc(%rsp) vbroadcastss 0x9cc(%rsp), %xmm0 vmovaps %xmm0, 0x9b0(%rsp) vmovaps 0x9b0(%rsp), %xmm2 vmovaps 0x910(%rsp), %xmm0 vmovaps %xmm2, 0xc90(%rsp) vmovaps %xmm0, 0xc80(%rsp) vmovaps 0xc90(%rsp), %xmm0 vmulps 0xc80(%rsp), %xmm0, %xmm0 vmovaps %xmm1, 0x960(%rsp) vmovaps %xmm0, 0x950(%rsp) vmovaps 0x960(%rsp), %xmm0 vaddps 0x950(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0xe0(%rsp) vmovaps 0xe0(%rsp), %xmm0 vmovaps %xmm0, 0x840(%rsp) jmp 0x561b2f movq 0x820(%rsp), %rax movq %rax, 0x888(%rsp) movq $0x0, 0x880(%rsp) movq 0x888(%rsp), %rax movq (%rax), %rax movq 0x880(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0xa0c(%rsp) vbroadcastss 0xa0c(%rsp), %xmm0 vmovaps %xmm0, 0x9f0(%rsp) vmovaps 0x9f0(%rsp), %xmm0 vmovaps %xmm0, 0x810(%rsp) movq 0x820(%rsp), %rax movq %rax, 0x878(%rsp) movq $0x1, 0x870(%rsp) movq 0x878(%rsp), %rax movq (%rax), %rax movq 0x870(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x9ec(%rsp) vbroadcastss 0x9ec(%rsp), %xmm0 vmovaps %xmm0, 0x9d0(%rsp) vmovaps 0x9d0(%rsp), %xmm0 vmovaps %xmm0, 0x800(%rsp) vmovaps 0x830(%rsp), %xmm1 vmovaps 0x810(%rsp), %xmm0 vmovaps %xmm1, 0x900(%rsp) vmovaps %xmm0, 0x8f0(%rsp) vmovaps 0x900(%rsp), %xmm0 vmovaps 0x8f0(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm1 vmovaps 0x800(%rsp), %xmm0 vmovaps %xmm1, 0xa80(%rsp) vmovaps %xmm0, 0xa70(%rsp) vmovaps 0xa80(%rsp), %xmm0 vmovaps 0xa70(%rsp), %xmm1 vminps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x840(%rsp) jmp 0x561b2f vmovaps 0x830(%rsp), %xmm0 vmovaps %xmm0, 0xaa0(%rsp) movl $0x3f800000, 0xafc(%rsp) # imm = 0x3F800000 vbroadcastss 0xafc(%rsp), %xmm0 vmovaps %xmm0, 0xae0(%rsp) vmovaps 0xae0(%rsp), %xmm0 vmovaps %xmm0, 0xa90(%rsp) vmovaps 0xa90(%rsp), %xmm0 vmovaps %xmm0, 0xb0(%rsp) vmovaps %xmm0, 0xc0(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0xad0(%rsp) vmovaps 0xad0(%rsp), %xmm2 vmovaps 0xaa0(%rsp), %xmm1 vmovaps %xmm2, 0x14f0(%rsp) vmovaps %xmm1, 0x14e0(%rsp) vmovaps 0x14f0(%rsp), %xmm1 vmovaps 0x14e0(%rsp), %xmm2 vsubps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x1120(%rsp) vmovaps %xmm0, 0x13f0(%rsp) vmovaps 0x13f0(%rsp), %xmm0 vmovaps %xmm0, 0x1110(%rsp) vmovaps 0x18a3a7d(%rip), %xmm0 # 0x1e02ab0 vmovaps %xmm0, 0x10e0(%rsp) vmovaps 0x1120(%rsp), %xmm0 vmovaps %xmm0, 0x1430(%rsp) vmovaps 0x18a3a6a(%rip), %xmm0 # 0x1e02ac0 vmovaps %xmm0, 0x1420(%rsp) vmovaps 0x1430(%rsp), %xmm0 vmovaps 0x1420(%rsp), %xmm1 vminps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1120(%rsp) vmovaps 0x1120(%rsp), %xmm0 vmovaps %xmm0, 0x1410(%rsp) vmovaps 0x18a3a38(%rip), %xmm0 # 0x1e02ad0 vmovaps %xmm0, 0x1400(%rsp) vmovaps 0x1410(%rsp), %xmm0 vmovaps 0x1400(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1120(%rsp) vmovaps 0x1120(%rsp), %xmm0 vmovaps %xmm0, 0x1490(%rsp) vmovaps 0x18a3a06(%rip), %xmm0 # 0x1e02ae0 vmovaps %xmm0, 0x1480(%rsp) vmovaps 0x1490(%rsp), %xmm0 vmovaps 0x1480(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1100(%rsp) vmovaps 0x1100(%rsp), %xmm0 vmovaps %xmm0, 0x11a0(%rsp) vmovaps 0x18a39d4(%rip), %xmm0 # 0x1e02af0 vmovaps %xmm0, 0x1190(%rsp) vmovaps 0x11a0(%rsp), %xmm0 vmovaps 0x1190(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1100(%rsp) vmovaps 0x1100(%rsp), %xmm0 vmovaps %xmm0, 0x1510(%rsp) vcvttps2dq 0x1510(%rsp), %xmm0 vmovdqa %xmm0, 0x10f0(%rsp) vmovdqa 0x10f0(%rsp), %xmm0 vmovdqa %xmm0, 0x1540(%rsp) vcvtdq2ps 0x1540(%rsp), %xmm0 vmovaps %xmm0, 0x1110(%rsp) vmovaps 0x1110(%rsp), %xmm1 vmovaps 0x1100(%rsp), %xmm0 vmovaps %xmm1, 0x1570(%rsp) vmovaps %xmm0, 0x1560(%rsp) vmovaps 0x1560(%rsp), %xmm0 vmovaps 0x1570(%rsp), %xmm1 vcmpltps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x10d0(%rsp) vmovaps 0x10d0(%rsp), %xmm1 vmovaps 0x10e0(%rsp), %xmm0 vmovaps %xmm1, 0x15b0(%rsp) vmovaps %xmm0, 0x15a0(%rsp) vmovdqa 0x15b0(%rsp), %xmm0 vmovdqa 0x15a0(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x10d0(%rsp) vmovaps 0x1110(%rsp), %xmm1 vmovaps 0x10d0(%rsp), %xmm0 vmovaps %xmm1, 0x14b0(%rsp) vmovaps %xmm0, 0x14a0(%rsp) vmovaps 0x14b0(%rsp), %xmm0 vmovaps 0x14a0(%rsp), %xmm1 vsubps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1100(%rsp) vmovaps 0x1100(%rsp), %xmm1 vmovaps 0x1120(%rsp), %xmm0 vmovaps %xmm1, 0x1630(%rsp) vmovaps 0x18a3887(%rip), %xmm1 # 0x1e02b00 vmovaps %xmm1, 0x1620(%rsp) vmovaps %xmm0, 0x1610(%rsp) vmovaps 0x1630(%rsp), %xmm2 vmovaps 0x1620(%rsp), %xmm1 vmovaps 0x1610(%rsp), %xmm0 vmovaps %xmm2, 0x1740(%rsp) vmovaps %xmm1, 0x1730(%rsp) vmovaps %xmm0, 0x1720(%rsp) vmovaps 0x1740(%rsp), %xmm0 vmovdqa 0x18a790e(%rip), %xmm1 # 0x1e06be0 vpxor %xmm1, %xmm0, %xmm1 vmovaps 0x1730(%rsp), %xmm0 vmovaps 0x1720(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xd0(%rsp) vmovaps 0xd0(%rsp), %xmm0 vmovaps %xmm0, 0x1120(%rsp) vmovaps 0x1100(%rsp), %xmm1 vmovaps 0x1120(%rsp), %xmm0 vmovaps %xmm1, 0x1600(%rsp) vmovaps 0x18a37e5(%rip), %xmm1 # 0x1e02b10 vmovaps %xmm1, 0x15f0(%rsp) vmovaps %xmm0, 0x15e0(%rsp) vmovaps 0x1600(%rsp), %xmm2 vmovaps 0x15f0(%rsp), %xmm1 vmovaps 0x15e0(%rsp), %xmm0 vmovaps %xmm2, 0x1770(%rsp) vmovaps %xmm1, 0x1760(%rsp) vmovaps %xmm0, 0x1750(%rsp) vmovaps 0x1770(%rsp), %xmm0 vmovdqa 0x18a785c(%rip), %xmm1 # 0x1e06be0 vpxor %xmm1, %xmm0, %xmm1 vmovaps 0x1760(%rsp), %xmm0 vmovaps 0x1750(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xa0(%rsp) vmovaps 0xa0(%rsp), %xmm0 vmovaps %xmm0, 0x1120(%rsp) vmovaps 0x1120(%rsp), %xmm0 vmovaps %xmm0, 0x1470(%rsp) vmovaps %xmm0, 0x1460(%rsp) vmovaps 0x1470(%rsp), %xmm0 vmovaps 0x1460(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1110(%rsp) vmovaps 0x18a3724(%rip), %xmm0 # 0x1e02b20 vmovaps %xmm0, 0x10c0(%rsp) vmovaps 0x10c0(%rsp), %xmm1 vmovaps 0x1120(%rsp), %xmm0 vmovaps %xmm1, 0x12c0(%rsp) vmovaps %xmm0, 0x12b0(%rsp) vmovaps 0x18a36ff(%rip), %xmm0 # 0x1e02b30 vmovaps %xmm0, 0x12a0(%rsp) vmovaps 0x12c0(%rsp), %xmm2 vmovaps 0x12b0(%rsp), %xmm1 vmovaps 0x12a0(%rsp), %xmm0 vmovaps %xmm2, 0x12f0(%rsp) vmovaps %xmm1, 0x12e0(%rsp) vmovaps %xmm0, 0x12d0(%rsp) vmovaps 0x12f0(%rsp), %xmm1 vmovaps 0x12e0(%rsp), %xmm0 vmovaps 0x12d0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x10c0(%rsp) vmovaps 0x10c0(%rsp), %xmm1 vmovaps 0x1120(%rsp), %xmm0 vmovaps %xmm1, 0x1290(%rsp) vmovaps %xmm0, 0x1280(%rsp) vmovaps 0x18a367b(%rip), %xmm0 # 0x1e02b40 vmovaps %xmm0, 0x1270(%rsp) vmovaps 0x1290(%rsp), %xmm2 vmovaps 0x1280(%rsp), %xmm1 vmovaps 0x1270(%rsp), %xmm0 vmovaps %xmm2, 0x1320(%rsp) vmovaps %xmm1, 0x1310(%rsp) vmovaps %xmm0, 0x1300(%rsp) vmovaps 0x1320(%rsp), %xmm1 vmovaps 0x1310(%rsp), %xmm0 vmovaps 0x1300(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x10c0(%rsp) vmovaps 0x10c0(%rsp), %xmm1 vmovaps 0x1120(%rsp), %xmm0 vmovaps %xmm1, 0x1260(%rsp) vmovaps %xmm0, 0x1250(%rsp) vmovaps 0x18a35f7(%rip), %xmm0 # 0x1e02b50 vmovaps %xmm0, 0x1240(%rsp) vmovaps 0x1260(%rsp), %xmm2 vmovaps 0x1250(%rsp), %xmm1 vmovaps 0x1240(%rsp), %xmm0 vmovaps %xmm2, 0x1350(%rsp) vmovaps %xmm1, 0x1340(%rsp) vmovaps %xmm0, 0x1330(%rsp) vmovaps 0x1350(%rsp), %xmm1 vmovaps 0x1340(%rsp), %xmm0 vmovaps 0x1330(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x10c0(%rsp) vmovaps 0x10c0(%rsp), %xmm1 vmovaps 0x1120(%rsp), %xmm0 vmovaps %xmm1, 0x1230(%rsp) vmovaps %xmm0, 0x1220(%rsp) vmovaps 0x18a3573(%rip), %xmm0 # 0x1e02b60 vmovaps %xmm0, 0x1210(%rsp) vmovaps 0x1230(%rsp), %xmm2 vmovaps 0x1220(%rsp), %xmm1 vmovaps 0x1210(%rsp), %xmm0 vmovaps %xmm2, 0x1380(%rsp) vmovaps %xmm1, 0x1370(%rsp) vmovaps %xmm0, 0x1360(%rsp) vmovaps 0x1380(%rsp), %xmm1 vmovaps 0x1370(%rsp), %xmm0 vmovaps 0x1360(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x10c0(%rsp) vmovaps 0x10c0(%rsp), %xmm1 vmovaps 0x1120(%rsp), %xmm0 vmovaps %xmm1, 0x1200(%rsp) vmovaps %xmm0, 0x11f0(%rsp) vmovaps 0x18a346f(%rip), %xmm0 # 0x1e02af0 vmovaps %xmm0, 0x11e0(%rsp) vmovaps 0x1200(%rsp), %xmm2 vmovaps 0x11f0(%rsp), %xmm1 vmovaps 0x11e0(%rsp), %xmm0 vmovaps %xmm2, 0x13b0(%rsp) vmovaps %xmm1, 0x13a0(%rsp) vmovaps %xmm0, 0x1390(%rsp) vmovaps 0x13b0(%rsp), %xmm1 vmovaps 0x13a0(%rsp), %xmm0 vmovaps 0x1390(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x10c0(%rsp) vmovaps 0x10c0(%rsp), %xmm2 vmovaps 0x1110(%rsp), %xmm1 vmovaps 0x1120(%rsp), %xmm0 vmovaps %xmm2, 0x11d0(%rsp) vmovaps %xmm1, 0x11c0(%rsp) vmovaps %xmm0, 0x11b0(%rsp) vmovaps 0x11d0(%rsp), %xmm2 vmovaps 0x11c0(%rsp), %xmm1 vmovaps 0x11b0(%rsp), %xmm0 vmovaps %xmm2, 0x13e0(%rsp) vmovaps %xmm1, 0x13d0(%rsp) vmovaps %xmm0, 0x13c0(%rsp) vmovaps 0x13e0(%rsp), %xmm1 vmovaps 0x13d0(%rsp), %xmm0 vmovaps 0x13c0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x10c0(%rsp) vmovaps 0x10c0(%rsp), %xmm1 vmovaps 0x10e0(%rsp), %xmm0 vmovaps %xmm1, 0x1180(%rsp) vmovaps %xmm0, 0x1170(%rsp) vmovaps 0x1180(%rsp), %xmm0 vmovaps 0x1170(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x10c0(%rsp) vmovaps 0x1100(%rsp), %xmm0 vmovaps %xmm0, 0x1500(%rsp) vcvttps2dq 0x1500(%rsp), %xmm0 vmovdqa %xmm0, 0x10f0(%rsp) vmovdqa 0x10f0(%rsp), %xmm0 vmovdqa %xmm0, 0x1140(%rsp) vmovdqa 0x18a3371(%rip), %xmm0 # 0x1e02b70 vmovdqa %xmm0, 0x1130(%rsp) vmovdqa 0x1140(%rsp), %xmm0 vmovdqa 0x1130(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x10f0(%rsp) vmovdqa 0x10f0(%rsp), %xmm0 vmovdqa %xmm0, 0x1160(%rsp) movl $0x17, 0x115c(%rsp) vmovdqa 0x1160(%rsp), %xmm0 movl 0x115c(%rsp), %eax vmovd %eax, %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x10f0(%rsp) vmovdqa 0x10f0(%rsp), %xmm0 vmovdqa %xmm0, 0x16a0(%rsp) vmovdqa 0x16a0(%rsp), %xmm0 vmovaps %xmm0, 0x10b0(%rsp) vmovaps 0x10c0(%rsp), %xmm1 vmovaps 0x10b0(%rsp), %xmm0 vmovaps %xmm1, 0x1450(%rsp) vmovaps %xmm0, 0x1440(%rsp) vmovaps 0x1450(%rsp), %xmm0 vmulps 0x1440(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x10c0(%rsp) vmovaps 0x10c0(%rsp), %xmm0 vmovaps %xmm0, 0x90(%rsp) vmovaps 0xc0(%rsp), %xmm1 vmovaps 0x90(%rsp), %xmm0 vmovaps 0xb0(%rsp), %xmm2 vmovaps %xmm2, 0xac0(%rsp) vmovaps %xmm0, 0xab0(%rsp) vmovaps 0xac0(%rsp), %xmm0 vaddps 0xab0(%rsp), %xmm0, %xmm0 vmovaps %xmm1, 0xcb0(%rsp) vmovaps %xmm0, 0xca0(%rsp) vmovaps 0xcb0(%rsp), %xmm0 vdivps 0xca0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x80(%rsp) vmovaps 0x80(%rsp), %xmm0 vmovaps %xmm0, 0x840(%rsp) jmp 0x561b2f vmovaps 0x830(%rsp), %xmm0 vmovaps %xmm0, 0xb00(%rsp) vmovaps 0xb00(%rsp), %xmm0 vmovaps %xmm0, 0x60(%rsp) vmovaps %xmm0, 0xd30(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x1000(%rsp) vmovaps 0x1000(%rsp), %xmm0 vmovaps %xmm0, 0xd20(%rsp) vmovaps 0x18a3102(%rip), %xmm0 # 0x1e02ab0 vmovaps %xmm0, 0xcf0(%rsp) vmovaps 0xd30(%rsp), %xmm0 vmovaps %xmm0, 0x1040(%rsp) vmovaps 0x18a30ef(%rip), %xmm0 # 0x1e02ac0 vmovaps %xmm0, 0x1030(%rsp) vmovaps 0x1040(%rsp), %xmm0 vmovaps 0x1030(%rsp), %xmm1 vminps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xd30(%rsp) vmovaps 0xd30(%rsp), %xmm0 vmovaps %xmm0, 0x1020(%rsp) vmovaps 0x18a30bd(%rip), %xmm0 # 0x1e02ad0 vmovaps %xmm0, 0x1010(%rsp) vmovaps 0x1020(%rsp), %xmm0 vmovaps 0x1010(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xd30(%rsp) vmovaps 0xd30(%rsp), %xmm0 vmovaps %xmm0, 0x10a0(%rsp) vmovaps 0x18a308b(%rip), %xmm0 # 0x1e02ae0 vmovaps %xmm0, 0x1090(%rsp) vmovaps 0x10a0(%rsp), %xmm0 vmovaps 0x1090(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xd10(%rsp) vmovaps 0xd10(%rsp), %xmm0 vmovaps %xmm0, 0xdb0(%rsp) vmovaps 0x18a3059(%rip), %xmm0 # 0x1e02af0 vmovaps %xmm0, 0xda0(%rsp) vmovaps 0xdb0(%rsp), %xmm0 vmovaps 0xda0(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xd10(%rsp) vmovaps 0xd10(%rsp), %xmm0 vmovaps %xmm0, 0x1530(%rsp) vcvttps2dq 0x1530(%rsp), %xmm0 vmovdqa %xmm0, 0xd00(%rsp) vmovdqa 0xd00(%rsp), %xmm0 vmovdqa %xmm0, 0x1550(%rsp) vcvtdq2ps 0x1550(%rsp), %xmm0 vmovaps %xmm0, 0xd20(%rsp) vmovaps 0xd20(%rsp), %xmm1 vmovaps 0xd10(%rsp), %xmm0 vmovaps %xmm1, 0x1590(%rsp) vmovaps %xmm0, 0x1580(%rsp) vmovaps 0x1580(%rsp), %xmm0 vmovaps 0x1590(%rsp), %xmm1 vcmpltps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xce0(%rsp) vmovaps 0xce0(%rsp), %xmm1 vmovaps 0xcf0(%rsp), %xmm0 vmovaps %xmm1, 0x15d0(%rsp) vmovaps %xmm0, 0x15c0(%rsp) vmovdqa 0x15d0(%rsp), %xmm0 vmovdqa 0x15c0(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0xce0(%rsp) vmovaps 0xd20(%rsp), %xmm1 vmovaps 0xce0(%rsp), %xmm0 vmovaps %xmm1, 0x14d0(%rsp) vmovaps %xmm0, 0x14c0(%rsp) vmovaps 0x14d0(%rsp), %xmm0 vmovaps 0x14c0(%rsp), %xmm1 vsubps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xd10(%rsp) vmovaps 0xd10(%rsp), %xmm1 vmovaps 0xd30(%rsp), %xmm0 vmovaps %xmm1, 0x1690(%rsp) vmovaps 0x18a2f0c(%rip), %xmm1 # 0x1e02b00 vmovaps %xmm1, 0x1680(%rsp) vmovaps %xmm0, 0x1670(%rsp) vmovaps 0x1690(%rsp), %xmm2 vmovaps 0x1680(%rsp), %xmm1 vmovaps 0x1670(%rsp), %xmm0 vmovaps %xmm2, 0x16e0(%rsp) vmovaps %xmm1, 0x16d0(%rsp) vmovaps %xmm0, 0x16c0(%rsp) vmovaps 0x16e0(%rsp), %xmm0 vmovdqa 0x18a6f93(%rip), %xmm1 # 0x1e06be0 vpxor %xmm1, %xmm0, %xmm1 vmovaps 0x16d0(%rsp), %xmm0 vmovaps 0x16c0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x70(%rsp) vmovaps 0x70(%rsp), %xmm0 vmovaps %xmm0, 0xd30(%rsp) vmovaps 0xd10(%rsp), %xmm1 vmovaps 0xd30(%rsp), %xmm0 vmovaps %xmm1, 0x1660(%rsp) vmovaps 0x18a2e70(%rip), %xmm1 # 0x1e02b10 vmovaps %xmm1, 0x1650(%rsp) vmovaps %xmm0, 0x1640(%rsp) vmovaps 0x1660(%rsp), %xmm2 vmovaps 0x1650(%rsp), %xmm1 vmovaps 0x1640(%rsp), %xmm0 vmovaps %xmm2, 0x1710(%rsp) vmovaps %xmm1, 0x1700(%rsp) vmovaps %xmm0, 0x16f0(%rsp) vmovaps 0x1710(%rsp), %xmm0 vmovdqa 0x18a6ee7(%rip), %xmm1 # 0x1e06be0 vpxor %xmm1, %xmm0, %xmm1 vmovaps 0x1700(%rsp), %xmm0 vmovaps 0x16f0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x50(%rsp) vmovaps 0x50(%rsp), %xmm0 vmovaps %xmm0, 0xd30(%rsp) vmovaps 0xd30(%rsp), %xmm0 vmovaps %xmm0, 0x1080(%rsp) vmovaps %xmm0, 0x1070(%rsp) vmovaps 0x1080(%rsp), %xmm0 vmovaps 0x1070(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xd20(%rsp) vmovaps 0x18a2db5(%rip), %xmm0 # 0x1e02b20 vmovaps %xmm0, 0xcd0(%rsp) vmovaps 0xcd0(%rsp), %xmm1 vmovaps 0xd30(%rsp), %xmm0 vmovaps %xmm1, 0xed0(%rsp) vmovaps %xmm0, 0xec0(%rsp) vmovaps 0x18a2d90(%rip), %xmm0 # 0x1e02b30 vmovaps %xmm0, 0xeb0(%rsp) vmovaps 0xed0(%rsp), %xmm2 vmovaps 0xec0(%rsp), %xmm1 vmovaps 0xeb0(%rsp), %xmm0 vmovaps %xmm2, 0xf00(%rsp) vmovaps %xmm1, 0xef0(%rsp) vmovaps %xmm0, 0xee0(%rsp) vmovaps 0xf00(%rsp), %xmm1 vmovaps 0xef0(%rsp), %xmm0 vmovaps 0xee0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xcd0(%rsp) vmovaps 0xcd0(%rsp), %xmm1 vmovaps 0xd30(%rsp), %xmm0 vmovaps %xmm1, 0xea0(%rsp) vmovaps %xmm0, 0xe90(%rsp) vmovaps 0x18a2d0c(%rip), %xmm0 # 0x1e02b40 vmovaps %xmm0, 0xe80(%rsp) vmovaps 0xea0(%rsp), %xmm2 vmovaps 0xe90(%rsp), %xmm1 vmovaps 0xe80(%rsp), %xmm0 vmovaps %xmm2, 0xf30(%rsp) vmovaps %xmm1, 0xf20(%rsp) vmovaps %xmm0, 0xf10(%rsp) vmovaps 0xf30(%rsp), %xmm1 vmovaps 0xf20(%rsp), %xmm0 vmovaps 0xf10(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xcd0(%rsp) vmovaps 0xcd0(%rsp), %xmm1 vmovaps 0xd30(%rsp), %xmm0 vmovaps %xmm1, 0xe70(%rsp) vmovaps %xmm0, 0xe60(%rsp) vmovaps 0x18a2c88(%rip), %xmm0 # 0x1e02b50 vmovaps %xmm0, 0xe50(%rsp) vmovaps 0xe70(%rsp), %xmm2 vmovaps 0xe60(%rsp), %xmm1 vmovaps 0xe50(%rsp), %xmm0 vmovaps %xmm2, 0xf60(%rsp) vmovaps %xmm1, 0xf50(%rsp) vmovaps %xmm0, 0xf40(%rsp) vmovaps 0xf60(%rsp), %xmm1 vmovaps 0xf50(%rsp), %xmm0 vmovaps 0xf40(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xcd0(%rsp) vmovaps 0xcd0(%rsp), %xmm1 vmovaps 0xd30(%rsp), %xmm0 vmovaps %xmm1, 0xe40(%rsp) vmovaps %xmm0, 0xe30(%rsp) vmovaps 0x18a2c04(%rip), %xmm0 # 0x1e02b60 vmovaps %xmm0, 0xe20(%rsp) vmovaps 0xe40(%rsp), %xmm2 vmovaps 0xe30(%rsp), %xmm1 vmovaps 0xe20(%rsp), %xmm0 vmovaps %xmm2, 0xf90(%rsp) vmovaps %xmm1, 0xf80(%rsp) vmovaps %xmm0, 0xf70(%rsp) vmovaps 0xf90(%rsp), %xmm1 vmovaps 0xf80(%rsp), %xmm0 vmovaps 0xf70(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xcd0(%rsp) vmovaps 0xcd0(%rsp), %xmm1 vmovaps 0xd30(%rsp), %xmm0 vmovaps %xmm1, 0xe10(%rsp) vmovaps %xmm0, 0xe00(%rsp) vmovaps 0x18a2b00(%rip), %xmm0 # 0x1e02af0 vmovaps %xmm0, 0xdf0(%rsp) vmovaps 0xe10(%rsp), %xmm2 vmovaps 0xe00(%rsp), %xmm1 vmovaps 0xdf0(%rsp), %xmm0 vmovaps %xmm2, 0xfc0(%rsp) vmovaps %xmm1, 0xfb0(%rsp) vmovaps %xmm0, 0xfa0(%rsp) vmovaps 0xfc0(%rsp), %xmm1 vmovaps 0xfb0(%rsp), %xmm0 vmovaps 0xfa0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xcd0(%rsp) vmovaps 0xcd0(%rsp), %xmm2 vmovaps 0xd20(%rsp), %xmm1 vmovaps 0xd30(%rsp), %xmm0 vmovaps %xmm2, 0xde0(%rsp) vmovaps %xmm1, 0xdd0(%rsp) vmovaps %xmm0, 0xdc0(%rsp) vmovaps 0xde0(%rsp), %xmm2 vmovaps 0xdd0(%rsp), %xmm1 vmovaps 0xdc0(%rsp), %xmm0 vmovaps %xmm2, 0xff0(%rsp) vmovaps %xmm1, 0xfe0(%rsp) vmovaps %xmm0, 0xfd0(%rsp) vmovaps 0xff0(%rsp), %xmm1 vmovaps 0xfe0(%rsp), %xmm0 vmovaps 0xfd0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xcd0(%rsp) vmovaps 0xcd0(%rsp), %xmm1 vmovaps 0xcf0(%rsp), %xmm0 vmovaps %xmm1, 0xd90(%rsp) vmovaps %xmm0, 0xd80(%rsp) vmovaps 0xd90(%rsp), %xmm0 vmovaps 0xd80(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xcd0(%rsp) vmovaps 0xd10(%rsp), %xmm0 vmovaps %xmm0, 0x1520(%rsp) vcvttps2dq 0x1520(%rsp), %xmm0 vmovdqa %xmm0, 0xd00(%rsp) vmovdqa 0xd00(%rsp), %xmm0 vmovdqa %xmm0, 0xd50(%rsp) vmovdqa 0x18a2a02(%rip), %xmm0 # 0x1e02b70 vmovdqa %xmm0, 0xd40(%rsp) vmovdqa 0xd50(%rsp), %xmm0 vmovdqa 0xd40(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0xd00(%rsp) vmovdqa 0xd00(%rsp), %xmm0 vmovdqa %xmm0, 0xd70(%rsp) movl $0x17, 0xd6c(%rsp) vmovdqa 0xd70(%rsp), %xmm0 movl 0xd6c(%rsp), %eax vmovd %eax, %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0xd00(%rsp) vmovdqa 0xd00(%rsp), %xmm0 vmovdqa %xmm0, 0x16b0(%rsp) vmovdqa 0x16b0(%rsp), %xmm0 vmovaps %xmm0, 0xcc0(%rsp) vmovaps 0xcd0(%rsp), %xmm1 vmovaps 0xcc0(%rsp), %xmm0 vmovaps %xmm1, 0x1060(%rsp) vmovaps %xmm0, 0x1050(%rsp) vmovaps 0x1060(%rsp), %xmm0 vmulps 0x1050(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0xcd0(%rsp) vmovaps 0xcd0(%rsp), %xmm0 vmovaps %xmm0, 0x40(%rsp) vmovaps 0x40(%rsp), %xmm1 movl $0x3f800000, 0xb4c(%rsp) # imm = 0x3F800000 vbroadcastss 0xb4c(%rsp), %xmm0 vmovaps %xmm0, 0xb30(%rsp) vmovaps 0xb30(%rsp), %xmm0 vmovaps %xmm1, 0xb20(%rsp) vmovaps %xmm0, 0xb10(%rsp) vmovaps 0xb20(%rsp), %xmm0 vmovaps 0xb10(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1ed0(%rsp) vmovaps 0x18a2804(%rip), %xmm0 # 0x1e02ab0 vmovaps %xmm0, 0x1eb0(%rsp) vmovaps 0x1ed0(%rsp), %xmm1 vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x2340(%rsp) vmovaps 0x2340(%rsp), %xmm0 vmovaps %xmm1, 0x24f0(%rsp) vmovaps %xmm0, 0x24e0(%rsp) vmovaps 0x24f0(%rsp), %xmm0 vmovaps 0x24e0(%rsp), %xmm1 vcmpleps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1ea0(%rsp) vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm0, 0x2360(%rsp) vmovaps 0x18a2860(%rip), %xmm0 # 0x1e02b80 vmovaps %xmm0, 0x2350(%rsp) vmovaps 0x2360(%rsp), %xmm0 vmovaps 0x2350(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1ed0(%rsp) vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm0, 0x2500(%rsp) vmovaps 0x2500(%rsp), %xmm0 vmovaps %xmm0, 0x1ef0(%rsp) movl $0x17, 0x1eec(%rsp) vmovdqa 0x1ef0(%rsp), %xmm0 vmovd 0x1eec(%rsp), %xmm1 vpsrld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x1ec0(%rsp) vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm0, 0x2470(%rsp) vmovaps 0x18a27e0(%rip), %xmm0 # 0x1e02b90 vmovaps %xmm0, 0x2460(%rsp) vmovdqa 0x2470(%rsp), %xmm0 vmovdqa 0x2460(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x1ed0(%rsp) vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm0, 0x2540(%rsp) vmovaps 0x18a26fe(%rip), %xmm1 # 0x1e02af0 vmovaps %xmm1, 0x2530(%rsp) vmovdqa 0x2540(%rsp), %xmm0 vmovdqa 0x2530(%rsp), %xmm2 vpor %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x1ed0(%rsp) vmovdqa 0x1ec0(%rsp), %xmm0 vmovdqa %xmm0, 0x1f10(%rsp) vmovdqa 0x18a273c(%rip), %xmm0 # 0x1e02b70 vmovdqa %xmm0, 0x1f00(%rsp) vmovdqa 0x1f10(%rsp), %xmm0 vmovdqa 0x1f00(%rsp), %xmm2 vpsubd %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x1ec0(%rsp) vmovdqa 0x1ec0(%rsp), %xmm0 vmovdqa %xmm0, 0x2410(%rsp) vcvtdq2ps 0x2410(%rsp), %xmm0 vmovaps %xmm0, 0x1e90(%rsp) vmovaps 0x1e90(%rsp), %xmm2 vmovaps 0x1eb0(%rsp), %xmm0 vmovaps %xmm2, 0x1f70(%rsp) vmovaps %xmm0, 0x1f60(%rsp) vmovaps 0x1f70(%rsp), %xmm0 vmovaps 0x1f60(%rsp), %xmm2 vaddps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x1e90(%rsp) vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm0, 0x2560(%rsp) vmovaps 0x18a26c3(%rip), %xmm0 # 0x1e02ba0 vmovaps %xmm0, 0x2550(%rsp) vmovaps 0x2560(%rsp), %xmm0 vmovaps 0x2550(%rsp), %xmm2 vcmpltps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x1e80(%rsp) vmovaps 0x1ed0(%rsp), %xmm2 vmovaps 0x1e80(%rsp), %xmm0 vmovaps %xmm2, 0x2450(%rsp) vmovaps %xmm0, 0x2440(%rsp) vmovdqa 0x2450(%rsp), %xmm0 vmovdqa 0x2440(%rsp), %xmm2 vpand %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x1e70(%rsp) vmovaps 0x1ed0(%rsp), %xmm2 vmovaps 0x1eb0(%rsp), %xmm0 vmovaps %xmm2, 0x2400(%rsp) vmovaps %xmm0, 0x23f0(%rsp) vmovaps 0x2400(%rsp), %xmm0 vmovaps 0x23f0(%rsp), %xmm2 vsubps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x1ed0(%rsp) vmovaps 0x1e90(%rsp), %xmm2 vmovaps 0x1eb0(%rsp), %xmm3 vmovaps 0x1e80(%rsp), %xmm0 vmovaps %xmm3, 0x2430(%rsp) vmovaps %xmm0, 0x2420(%rsp) vmovdqa 0x2430(%rsp), %xmm0 vmovdqa 0x2420(%rsp), %xmm3 vpand %xmm3, %xmm0, %xmm0 vmovaps %xmm2, 0x23e0(%rsp) vmovdqa %xmm0, 0x23d0(%rsp) vmovaps 0x23e0(%rsp), %xmm0 vmovaps 0x23d0(%rsp), %xmm2 vsubps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x1e90(%rsp) vmovaps 0x1ed0(%rsp), %xmm2 vmovaps 0x1e70(%rsp), %xmm0 vmovaps %xmm2, 0x1f50(%rsp) vmovaps %xmm0, 0x1f40(%rsp) vmovaps 0x1f50(%rsp), %xmm0 vmovaps 0x1f40(%rsp), %xmm2 vaddps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x1ed0(%rsp) vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm0, 0x23c0(%rsp) vmovaps %xmm0, 0x23b0(%rsp) vmovaps 0x23c0(%rsp), %xmm0 vmovaps 0x23b0(%rsp), %xmm2 vmulps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x1e60(%rsp) vmovaps 0x18a252b(%rip), %xmm0 # 0x1e02bb0 vmovaps %xmm0, 0x1e50(%rsp) vmovaps 0x1e50(%rsp), %xmm2 vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm2, 0x2150(%rsp) vmovaps %xmm0, 0x2140(%rsp) vmovaps 0x18a2506(%rip), %xmm0 # 0x1e02bc0 vmovaps %xmm0, 0x2130(%rsp) vmovaps 0x2150(%rsp), %xmm3 vmovaps 0x2140(%rsp), %xmm2 vmovaps 0x2130(%rsp), %xmm0 vmovaps %xmm3, 0x2180(%rsp) vmovaps %xmm2, 0x2170(%rsp) vmovaps %xmm0, 0x2160(%rsp) vmovaps 0x2180(%rsp), %xmm2 vmovaps 0x2170(%rsp), %xmm0 vmovaps 0x2160(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x1e50(%rsp) vmovaps 0x1e50(%rsp), %xmm2 vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm2, 0x2120(%rsp) vmovaps %xmm0, 0x2110(%rsp) vmovaps 0x18a2482(%rip), %xmm0 # 0x1e02bd0 vmovaps %xmm0, 0x2100(%rsp) vmovaps 0x2120(%rsp), %xmm3 vmovaps 0x2110(%rsp), %xmm2 vmovaps 0x2100(%rsp), %xmm0 vmovaps %xmm3, 0x21b0(%rsp) vmovaps %xmm2, 0x21a0(%rsp) vmovaps %xmm0, 0x2190(%rsp) vmovaps 0x21b0(%rsp), %xmm2 vmovaps 0x21a0(%rsp), %xmm0 vmovaps 0x2190(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x1e50(%rsp) vmovaps 0x1e50(%rsp), %xmm2 vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm2, 0x20f0(%rsp) vmovaps %xmm0, 0x20e0(%rsp) vmovaps 0x18a23fe(%rip), %xmm0 # 0x1e02be0 vmovaps %xmm0, 0x20d0(%rsp) vmovaps 0x20f0(%rsp), %xmm3 vmovaps 0x20e0(%rsp), %xmm2 vmovaps 0x20d0(%rsp), %xmm0 vmovaps %xmm3, 0x21e0(%rsp) vmovaps %xmm2, 0x21d0(%rsp) vmovaps %xmm0, 0x21c0(%rsp) vmovaps 0x21e0(%rsp), %xmm2 vmovaps 0x21d0(%rsp), %xmm0 vmovaps 0x21c0(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x1e50(%rsp) vmovaps 0x1e50(%rsp), %xmm2 vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm2, 0x20c0(%rsp) vmovaps %xmm0, 0x20b0(%rsp) vmovaps 0x18a237a(%rip), %xmm0 # 0x1e02bf0 vmovaps %xmm0, 0x20a0(%rsp) vmovaps 0x20c0(%rsp), %xmm3 vmovaps 0x20b0(%rsp), %xmm2 vmovaps 0x20a0(%rsp), %xmm0 vmovaps %xmm3, 0x2210(%rsp) vmovaps %xmm2, 0x2200(%rsp) vmovaps %xmm0, 0x21f0(%rsp) vmovaps 0x2210(%rsp), %xmm2 vmovaps 0x2200(%rsp), %xmm0 vmovaps 0x21f0(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x1e50(%rsp) vmovaps 0x1e50(%rsp), %xmm2 vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm2, 0x2090(%rsp) vmovaps %xmm0, 0x2080(%rsp) vmovaps 0x18a22f6(%rip), %xmm0 # 0x1e02c00 vmovaps %xmm0, 0x2070(%rsp) vmovaps 0x2090(%rsp), %xmm3 vmovaps 0x2080(%rsp), %xmm2 vmovaps 0x2070(%rsp), %xmm0 vmovaps %xmm3, 0x2240(%rsp) vmovaps %xmm2, 0x2230(%rsp) vmovaps %xmm0, 0x2220(%rsp) vmovaps 0x2240(%rsp), %xmm2 vmovaps 0x2230(%rsp), %xmm0 vmovaps 0x2220(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x1e50(%rsp) vmovaps 0x1e50(%rsp), %xmm2 vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm2, 0x2060(%rsp) vmovaps %xmm0, 0x2050(%rsp) vmovaps 0x18a2272(%rip), %xmm0 # 0x1e02c10 vmovaps %xmm0, 0x2040(%rsp) vmovaps 0x2060(%rsp), %xmm3 vmovaps 0x2050(%rsp), %xmm2 vmovaps 0x2040(%rsp), %xmm0 vmovaps %xmm3, 0x2270(%rsp) vmovaps %xmm2, 0x2260(%rsp) vmovaps %xmm0, 0x2250(%rsp) vmovaps 0x2270(%rsp), %xmm2 vmovaps 0x2260(%rsp), %xmm0 vmovaps 0x2250(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x1e50(%rsp) vmovaps 0x1e50(%rsp), %xmm2 vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm2, 0x2030(%rsp) vmovaps %xmm0, 0x2020(%rsp) vmovaps 0x18a21ee(%rip), %xmm0 # 0x1e02c20 vmovaps %xmm0, 0x2010(%rsp) vmovaps 0x2030(%rsp), %xmm3 vmovaps 0x2020(%rsp), %xmm2 vmovaps 0x2010(%rsp), %xmm0 vmovaps %xmm3, 0x22a0(%rsp) vmovaps %xmm2, 0x2290(%rsp) vmovaps %xmm0, 0x2280(%rsp) vmovaps 0x22a0(%rsp), %xmm2 vmovaps 0x2290(%rsp), %xmm0 vmovaps 0x2280(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x1e50(%rsp) vmovaps 0x1e50(%rsp), %xmm2 vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm2, 0x2000(%rsp) vmovaps %xmm0, 0x1ff0(%rsp) vmovaps 0x18a216a(%rip), %xmm0 # 0x1e02c30 vmovaps %xmm0, 0x1fe0(%rsp) vmovaps 0x2000(%rsp), %xmm3 vmovaps 0x1ff0(%rsp), %xmm2 vmovaps 0x1fe0(%rsp), %xmm0 vmovaps %xmm3, 0x22d0(%rsp) vmovaps %xmm2, 0x22c0(%rsp) vmovaps %xmm0, 0x22b0(%rsp) vmovaps 0x22d0(%rsp), %xmm2 vmovaps 0x22c0(%rsp), %xmm0 vmovaps 0x22b0(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x1e50(%rsp) vmovaps 0x1e50(%rsp), %xmm2 vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm2, 0x23a0(%rsp) vmovaps %xmm0, 0x2390(%rsp) vmovaps 0x23a0(%rsp), %xmm0 vmovaps 0x2390(%rsp), %xmm2 vmulps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x1e50(%rsp) vmovaps 0x1e50(%rsp), %xmm2 vmovaps 0x1e60(%rsp), %xmm0 vmovaps %xmm2, 0x2380(%rsp) vmovaps %xmm0, 0x2370(%rsp) vmovaps 0x2380(%rsp), %xmm0 vmovaps 0x2370(%rsp), %xmm2 vmulps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x1e50(%rsp) vmovaps 0x1e90(%rsp), %xmm2 vmovaps 0x1e50(%rsp), %xmm0 vmovaps %xmm2, 0x1fd0(%rsp) vmovaps 0x18a1f39(%rip), %xmm2 # 0x1e02b10 vmovaps %xmm2, 0x1fc0(%rsp) vmovaps %xmm0, 0x1fb0(%rsp) vmovaps 0x1fd0(%rsp), %xmm3 vmovaps 0x1fc0(%rsp), %xmm2 vmovaps 0x1fb0(%rsp), %xmm0 vmovaps %xmm3, 0x2300(%rsp) vmovaps %xmm2, 0x22f0(%rsp) vmovaps %xmm0, 0x22e0(%rsp) vmovaps 0x2300(%rsp), %xmm2 vmovaps 0x22f0(%rsp), %xmm0 vmovaps 0x22e0(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, 0x1e50(%rsp) vmovaps 0x1e60(%rsp), %xmm2 vmovaps 0x1e50(%rsp), %xmm0 vmovaps %xmm2, 0x24a0(%rsp) vmovaps %xmm1, 0x2490(%rsp) vmovaps %xmm0, 0x2480(%rsp) vmovaps 0x24a0(%rsp), %xmm2 vmovaps 0x2490(%rsp), %xmm1 vmovaps 0x2480(%rsp), %xmm0 vmovaps %xmm2, 0x24d0(%rsp) vmovaps %xmm1, 0x24c0(%rsp) vmovaps %xmm0, 0x24b0(%rsp) vmovaps 0x24d0(%rsp), %xmm1 vmovaps 0x24c0(%rsp), %xmm0 vmovaps 0x24b0(%rsp), %xmm2 vfnmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = -(xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1e50(%rsp) vmovaps 0x1ed0(%rsp), %xmm1 vmovaps 0x1e50(%rsp), %xmm0 vmovaps %xmm1, 0x1f30(%rsp) vmovaps %xmm0, 0x1f20(%rsp) vmovaps 0x1f30(%rsp), %xmm0 vmovaps 0x1f20(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1ed0(%rsp) vmovaps 0x1e90(%rsp), %xmm1 vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm1, 0x1fa0(%rsp) vmovaps 0x18a1dc6(%rip), %xmm1 # 0x1e02b00 vmovaps %xmm1, 0x1f90(%rsp) vmovaps %xmm0, 0x1f80(%rsp) vmovaps 0x1fa0(%rsp), %xmm2 vmovaps 0x1f90(%rsp), %xmm1 vmovaps 0x1f80(%rsp), %xmm0 vmovaps %xmm2, 0x2330(%rsp) vmovaps %xmm1, 0x2320(%rsp) vmovaps %xmm0, 0x2310(%rsp) vmovaps 0x2330(%rsp), %xmm1 vmovaps 0x2320(%rsp), %xmm0 vmovaps 0x2310(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1ed0(%rsp) vmovaps 0x1ed0(%rsp), %xmm1 vmovaps 0x1ea0(%rsp), %xmm0 vmovaps %xmm1, 0x2520(%rsp) vmovaps %xmm0, 0x2510(%rsp) vmovaps 0x2520(%rsp), %xmm0 vmovaps 0x2510(%rsp), %xmm1 vpor %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1ed0(%rsp) vmovaps 0x1ed0(%rsp), %xmm0 vmovaps %xmm0, 0x30(%rsp) vmovaps 0x30(%rsp), %xmm0 vmovaps %xmm0, 0x17a0(%rsp) movl $0x3f800000, 0x17ec(%rsp) # imm = 0x3F800000 vbroadcastss 0x17ec(%rsp), %xmm0 vmovaps %xmm0, 0x17d0(%rsp) vmovaps 0x17d0(%rsp), %xmm0 vmovaps %xmm0, 0x1790(%rsp) movl $0x40000000, 0x17cc(%rsp) # imm = 0x40000000 vbroadcastss 0x17cc(%rsp), %xmm0 vmovaps %xmm0, 0x17b0(%rsp) vmovaps 0x17b0(%rsp), %xmm0 vmovaps %xmm0, 0x1780(%rsp) vmovaps 0x17a0(%rsp), %xmm1 vmovaps 0x1780(%rsp), %xmm0 vmovaps %xmm1, 0x1890(%rsp) vmovaps %xmm0, 0x1880(%rsp) vmovaps 0x1890(%rsp), %xmm0 vmovaps 0x1880(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1800(%rsp) movl $0x3f800000, 0x185c(%rsp) # imm = 0x3F800000 vbroadcastss 0x185c(%rsp), %xmm0 vmovaps %xmm0, 0x1840(%rsp) vmovaps 0x1840(%rsp), %xmm0 vmovaps %xmm0, 0x17f0(%rsp) vmovaps 0x17f0(%rsp), %xmm2 vmovaps %xmm2, %xmm1 vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x1830(%rsp) vmovaps 0x1830(%rsp), %xmm4 vmovaps 0x1800(%rsp), %xmm3 vmovaps %xmm4, 0x1ce0(%rsp) vmovaps %xmm3, 0x1cd0(%rsp) vmovaps 0x1ce0(%rsp), %xmm3 vmovaps 0x1cd0(%rsp), %xmm4 vsubps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x1930(%rsp) vmovaps %xmm0, 0x1c00(%rsp) vmovaps 0x1c00(%rsp), %xmm0 vmovaps %xmm0, 0x1920(%rsp) vmovaps 0x18a1b51(%rip), %xmm0 # 0x1e02ab0 vmovaps %xmm0, 0x18f0(%rsp) vmovaps 0x1930(%rsp), %xmm0 vmovaps %xmm0, 0x1c40(%rsp) vmovaps 0x18a1b3e(%rip), %xmm0 # 0x1e02ac0 vmovaps %xmm0, 0x1c30(%rsp) vmovaps 0x1c40(%rsp), %xmm0 vmovaps 0x1c30(%rsp), %xmm3 vminps %xmm3, %xmm0, %xmm0 vmovaps %xmm0, 0x1930(%rsp) vmovaps 0x1930(%rsp), %xmm0 vmovaps %xmm0, 0x1c20(%rsp) vmovaps 0x18a1b0c(%rip), %xmm0 # 0x1e02ad0 vmovaps %xmm0, 0x1c10(%rsp) vmovaps 0x1c20(%rsp), %xmm0 vmovaps 0x1c10(%rsp), %xmm3 vmaxps %xmm3, %xmm0, %xmm0 vmovaps %xmm0, 0x1930(%rsp) vmovaps 0x1930(%rsp), %xmm0 vmovaps %xmm0, 0x1ca0(%rsp) vmovaps 0x18a1ada(%rip), %xmm0 # 0x1e02ae0 vmovaps %xmm0, 0x1c90(%rsp) vmovaps 0x1ca0(%rsp), %xmm0 vmovaps 0x1c90(%rsp), %xmm3 vmulps %xmm3, %xmm0, %xmm0 vmovaps %xmm0, 0x1910(%rsp) vmovaps 0x1910(%rsp), %xmm0 vmovaps %xmm0, 0x19b0(%rsp) vmovaps 0x18a1aa8(%rip), %xmm0 # 0x1e02af0 vmovaps %xmm0, 0x19a0(%rsp) vmovaps 0x19b0(%rsp), %xmm3 vmovaps 0x19a0(%rsp), %xmm4 vaddps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x1910(%rsp) vmovaps 0x1910(%rsp), %xmm3 vmovaps %xmm3, 0x1d20(%rsp) vcvttps2dq 0x1d20(%rsp), %xmm3 vmovdqa %xmm3, 0x1900(%rsp) vmovdqa 0x1900(%rsp), %xmm3 vmovdqa %xmm3, 0x1d30(%rsp) vcvtdq2ps 0x1d30(%rsp), %xmm3 vmovaps %xmm3, 0x1920(%rsp) vmovaps 0x1920(%rsp), %xmm4 vmovaps 0x1910(%rsp), %xmm3 vmovaps %xmm4, 0x1d50(%rsp) vmovaps %xmm3, 0x1d40(%rsp) vmovaps 0x1d40(%rsp), %xmm3 vmovaps 0x1d50(%rsp), %xmm4 vcmpltps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x18e0(%rsp) vmovaps 0x18e0(%rsp), %xmm4 vmovaps 0x18f0(%rsp), %xmm3 vmovaps %xmm4, 0x1d70(%rsp) vmovaps %xmm3, 0x1d60(%rsp) vmovdqa 0x1d70(%rsp), %xmm3 vmovdqa 0x1d60(%rsp), %xmm4 vpand %xmm4, %xmm3, %xmm3 vmovdqa %xmm3, 0x18e0(%rsp) vmovaps 0x1920(%rsp), %xmm4 vmovaps 0x18e0(%rsp), %xmm3 vmovaps %xmm4, 0x1cc0(%rsp) vmovaps %xmm3, 0x1cb0(%rsp) vmovaps 0x1cc0(%rsp), %xmm3 vmovaps 0x1cb0(%rsp), %xmm4 vsubps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x1910(%rsp) vmovaps 0x1910(%rsp), %xmm4 vmovaps 0x1930(%rsp), %xmm3 vmovaps %xmm4, 0x1dd0(%rsp) vmovaps 0x18a195b(%rip), %xmm4 # 0x1e02b00 vmovaps %xmm4, 0x1dc0(%rsp) vmovaps %xmm3, 0x1db0(%rsp) vmovaps 0x1dd0(%rsp), %xmm5 vmovaps 0x1dc0(%rsp), %xmm4 vmovaps 0x1db0(%rsp), %xmm3 vmovaps %xmm5, 0x1e10(%rsp) vmovaps %xmm4, 0x1e00(%rsp) vmovaps %xmm3, 0x1df0(%rsp) vmovaps 0x1e10(%rsp), %xmm4 vmovaps 0x1e00(%rsp), %xmm3 vmovaps 0x1df0(%rsp), %xmm5 vfnmadd213ps %xmm5, %xmm4, %xmm3 # xmm3 = -(xmm4 * xmm3) + xmm5 vmovaps %xmm3, 0x1930(%rsp) vmovaps 0x1910(%rsp), %xmm4 vmovaps 0x1930(%rsp), %xmm3 vmovaps %xmm4, 0x1da0(%rsp) vmovaps 0x18a18d7(%rip), %xmm4 # 0x1e02b10 vmovaps %xmm4, 0x1d90(%rsp) vmovaps %xmm3, 0x1d80(%rsp) vmovaps 0x1da0(%rsp), %xmm5 vmovaps 0x1d90(%rsp), %xmm4 vmovaps 0x1d80(%rsp), %xmm3 vmovaps %xmm5, 0x1e40(%rsp) vmovaps %xmm4, 0x1e30(%rsp) vmovaps %xmm3, 0x1e20(%rsp) vmovaps 0x1e40(%rsp), %xmm4 vmovaps 0x1e30(%rsp), %xmm3 vmovaps 0x1e20(%rsp), %xmm5 vfnmadd213ps %xmm5, %xmm4, %xmm3 # xmm3 = -(xmm4 * xmm3) + xmm5 vmovaps %xmm3, 0x1930(%rsp) vmovaps 0x1930(%rsp), %xmm3 vmovaps %xmm3, 0x1c80(%rsp) vmovaps %xmm3, 0x1c70(%rsp) vmovaps 0x1c80(%rsp), %xmm3 vmovaps 0x1c70(%rsp), %xmm4 vmulps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x1920(%rsp) vmovaps 0x18a1834(%rip), %xmm3 # 0x1e02b20 vmovaps %xmm3, 0x18d0(%rsp) vmovaps 0x18d0(%rsp), %xmm4 vmovaps 0x1930(%rsp), %xmm3 vmovaps %xmm4, 0x1ad0(%rsp) vmovaps %xmm3, 0x1ac0(%rsp) vmovaps 0x18a180f(%rip), %xmm3 # 0x1e02b30 vmovaps %xmm3, 0x1ab0(%rsp) vmovaps 0x1ad0(%rsp), %xmm5 vmovaps 0x1ac0(%rsp), %xmm4 vmovaps 0x1ab0(%rsp), %xmm3 vmovaps %xmm5, 0x1b00(%rsp) vmovaps %xmm4, 0x1af0(%rsp) vmovaps %xmm3, 0x1ae0(%rsp) vmovaps 0x1b00(%rsp), %xmm4 vmovaps 0x1af0(%rsp), %xmm3 vmovaps 0x1ae0(%rsp), %xmm5 vfmadd213ps %xmm5, %xmm4, %xmm3 # xmm3 = (xmm4 * xmm3) + xmm5 vmovaps %xmm3, 0x18d0(%rsp) vmovaps 0x18d0(%rsp), %xmm4 vmovaps 0x1930(%rsp), %xmm3 vmovaps %xmm4, 0x1aa0(%rsp) vmovaps %xmm3, 0x1a90(%rsp) vmovaps 0x18a178b(%rip), %xmm3 # 0x1e02b40 vmovaps %xmm3, 0x1a80(%rsp) vmovaps 0x1aa0(%rsp), %xmm5 vmovaps 0x1a90(%rsp), %xmm4 vmovaps 0x1a80(%rsp), %xmm3 vmovaps %xmm5, 0x1b30(%rsp) vmovaps %xmm4, 0x1b20(%rsp) vmovaps %xmm3, 0x1b10(%rsp) vmovaps 0x1b30(%rsp), %xmm4 vmovaps 0x1b20(%rsp), %xmm3 vmovaps 0x1b10(%rsp), %xmm5 vfmadd213ps %xmm5, %xmm4, %xmm3 # xmm3 = (xmm4 * xmm3) + xmm5 vmovaps %xmm3, 0x18d0(%rsp) vmovaps 0x18d0(%rsp), %xmm4 vmovaps 0x1930(%rsp), %xmm3 vmovaps %xmm4, 0x1a70(%rsp) vmovaps %xmm3, 0x1a60(%rsp) vmovaps 0x18a1707(%rip), %xmm3 # 0x1e02b50 vmovaps %xmm3, 0x1a50(%rsp) vmovaps 0x1a70(%rsp), %xmm5 vmovaps 0x1a60(%rsp), %xmm4 vmovaps 0x1a50(%rsp), %xmm3 vmovaps %xmm5, 0x1b60(%rsp) vmovaps %xmm4, 0x1b50(%rsp) vmovaps %xmm3, 0x1b40(%rsp) vmovaps 0x1b60(%rsp), %xmm4 vmovaps 0x1b50(%rsp), %xmm3 vmovaps 0x1b40(%rsp), %xmm5 vfmadd213ps %xmm5, %xmm4, %xmm3 # xmm3 = (xmm4 * xmm3) + xmm5 vmovaps %xmm3, 0x18d0(%rsp) vmovaps 0x18d0(%rsp), %xmm4 vmovaps 0x1930(%rsp), %xmm3 vmovaps %xmm4, 0x1a40(%rsp) vmovaps %xmm3, 0x1a30(%rsp) vmovaps 0x18a1683(%rip), %xmm3 # 0x1e02b60 vmovaps %xmm3, 0x1a20(%rsp) vmovaps 0x1a40(%rsp), %xmm5 vmovaps 0x1a30(%rsp), %xmm4 vmovaps 0x1a20(%rsp), %xmm3 vmovaps %xmm5, 0x1b90(%rsp) vmovaps %xmm4, 0x1b80(%rsp) vmovaps %xmm3, 0x1b70(%rsp) vmovaps 0x1b90(%rsp), %xmm4 vmovaps 0x1b80(%rsp), %xmm3 vmovaps 0x1b70(%rsp), %xmm5 vfmadd213ps %xmm5, %xmm4, %xmm3 # xmm3 = (xmm4 * xmm3) + xmm5 vmovaps %xmm3, 0x18d0(%rsp) vmovaps 0x18d0(%rsp), %xmm4 vmovaps 0x1930(%rsp), %xmm3 vmovaps %xmm4, 0x1a10(%rsp) vmovaps %xmm3, 0x1a00(%rsp) vmovaps %xmm0, 0x19f0(%rsp) vmovaps 0x1a10(%rsp), %xmm4 vmovaps 0x1a00(%rsp), %xmm3 vmovaps 0x19f0(%rsp), %xmm0 vmovaps %xmm4, 0x1bc0(%rsp) vmovaps %xmm3, 0x1bb0(%rsp) vmovaps %xmm0, 0x1ba0(%rsp) vmovaps 0x1bc0(%rsp), %xmm3 vmovaps 0x1bb0(%rsp), %xmm0 vmovaps 0x1ba0(%rsp), %xmm4 vfmadd213ps %xmm4, %xmm3, %xmm0 # xmm0 = (xmm3 * xmm0) + xmm4 vmovaps %xmm0, 0x18d0(%rsp) vmovaps 0x18d0(%rsp), %xmm4 vmovaps 0x1920(%rsp), %xmm3 vmovaps 0x1930(%rsp), %xmm0 vmovaps %xmm4, 0x19e0(%rsp) vmovaps %xmm3, 0x19d0(%rsp) vmovaps %xmm0, 0x19c0(%rsp) vmovaps 0x19e0(%rsp), %xmm4 vmovaps 0x19d0(%rsp), %xmm3 vmovaps 0x19c0(%rsp), %xmm0 vmovaps %xmm4, 0x1bf0(%rsp) vmovaps %xmm3, 0x1be0(%rsp) vmovaps %xmm0, 0x1bd0(%rsp) vmovaps 0x1bf0(%rsp), %xmm3 vmovaps 0x1be0(%rsp), %xmm0 vmovaps 0x1bd0(%rsp), %xmm4 vfmadd213ps %xmm4, %xmm3, %xmm0 # xmm0 = (xmm3 * xmm0) + xmm4 vmovaps %xmm0, 0x18d0(%rsp) vmovaps 0x18d0(%rsp), %xmm3 vmovaps 0x18f0(%rsp), %xmm0 vmovaps %xmm3, 0x1990(%rsp) vmovaps %xmm0, 0x1980(%rsp) vmovaps 0x1990(%rsp), %xmm0 vmovaps 0x1980(%rsp), %xmm3 vaddps %xmm3, %xmm0, %xmm0 vmovaps %xmm0, 0x18d0(%rsp) vmovaps 0x1910(%rsp), %xmm0 vmovaps %xmm0, 0x1d10(%rsp) vcvttps2dq 0x1d10(%rsp), %xmm0 vmovdqa %xmm0, 0x1900(%rsp) vmovdqa 0x1900(%rsp), %xmm0 vmovdqa %xmm0, 0x1950(%rsp) vmovdqa 0x18a1489(%rip), %xmm0 # 0x1e02b70 vmovdqa %xmm0, 0x1940(%rsp) vmovdqa 0x1950(%rsp), %xmm0 vmovdqa 0x1940(%rsp), %xmm3 vpaddd %xmm3, %xmm0, %xmm0 vmovdqa %xmm0, 0x1900(%rsp) vmovdqa 0x1900(%rsp), %xmm0 vmovdqa %xmm0, 0x1970(%rsp) movl $0x17, 0x196c(%rsp) vmovdqa 0x1970(%rsp), %xmm0 movl 0x196c(%rsp), %eax vmovd %eax, %xmm3 vpslld %xmm3, %xmm0, %xmm0 vmovdqa %xmm0, 0x1900(%rsp) vmovdqa 0x1900(%rsp), %xmm0 vmovdqa %xmm0, 0x1de0(%rsp) vmovdqa 0x1de0(%rsp), %xmm0 vmovaps %xmm0, 0x18c0(%rsp) vmovaps 0x18d0(%rsp), %xmm3 vmovaps 0x18c0(%rsp), %xmm0 vmovaps %xmm3, 0x1c60(%rsp) vmovaps %xmm0, 0x1c50(%rsp) vmovaps 0x1c60(%rsp), %xmm0 vmulps 0x1c50(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x18d0(%rsp) vmovaps 0x18d0(%rsp), %xmm0 vmovaps %xmm2, 0x1820(%rsp) vmovaps %xmm0, 0x1810(%rsp) vmovaps 0x1820(%rsp), %xmm0 vaddps 0x1810(%rsp), %xmm0, %xmm0 vmovaps %xmm1, 0x18b0(%rsp) vmovaps %xmm0, 0x18a0(%rsp) vmovaps 0x18b0(%rsp), %xmm0 vdivps 0x18a0(%rsp), %xmm0, %xmm1 vmovaps 0x1780(%rsp), %xmm0 vmovaps %xmm1, 0x1870(%rsp) vmovaps %xmm0, 0x1860(%rsp) vmovaps 0x1870(%rsp), %xmm0 vmulps 0x1860(%rsp), %xmm0, %xmm1 vmovaps 0x1790(%rsp), %xmm0 vmovaps %xmm1, 0x1d00(%rsp) vmovaps %xmm0, 0x1cf0(%rsp) vmovaps 0x1d00(%rsp), %xmm0 vsubps 0x1cf0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x20(%rsp) vmovaps 0x20(%rsp), %xmm0 vmovaps 0x60(%rsp), %xmm1 vmovaps %xmm1, 0xc70(%rsp) vmovaps %xmm0, 0xc60(%rsp) vmovaps 0xc70(%rsp), %xmm0 vmulps 0xc60(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x10(%rsp) vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm0, 0x840(%rsp) jmp 0x561b2f movq 0x820(%rsp), %rax movq %rax, 0x868(%rsp) movq $0x0, 0x860(%rsp) movq 0x868(%rsp), %rax movq (%rax), %rax movq 0x860(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0xa4c(%rsp) vbroadcastss 0xa4c(%rsp), %xmm0 vmovaps %xmm0, 0xa30(%rsp) vmovaps 0xa30(%rsp), %xmm0 vmovaps %xmm0, 0x7f0(%rsp) movq 0x820(%rsp), %rax movq %rax, 0x858(%rsp) movq $0x1, 0x850(%rsp) movq 0x858(%rsp), %rax movq (%rax), %rax movq 0x850(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0xa2c(%rsp) vbroadcastss 0xa2c(%rsp), %xmm0 vmovaps %xmm0, 0xa10(%rsp) vmovaps 0xa10(%rsp), %xmm0 vmovaps %xmm0, 0x7e0(%rsp) vmovaps 0x830(%rsp), %xmm2 vmovaps 0x7f0(%rsp), %xmm1 vmovaps 0x7e0(%rsp), %xmm0 vmovaps %xmm2, 0xb80(%rsp) vmovaps %xmm1, 0xb70(%rsp) vmovaps %xmm0, 0xb60(%rsp) movl $0x3f800000, 0xbfc(%rsp) # imm = 0x3F800000 vbroadcastss 0xbfc(%rsp), %xmm0 vmovaps %xmm0, 0xbe0(%rsp) vmovaps 0xbe0(%rsp), %xmm0 vmovaps %xmm0, 0xb50(%rsp) vmovaps 0xb80(%rsp), %xmm1 vmovaps 0xb70(%rsp), %xmm0 vmovaps %xmm1, 0xc50(%rsp) vmovaps %xmm0, 0xc40(%rsp) vmovaps 0xc50(%rsp), %xmm0 vmovaps 0xc40(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm1 vmovaps 0xb60(%rsp), %xmm0 vmovaps %xmm1, 0xba0(%rsp) vmovaps %xmm0, 0xb90(%rsp) vmovaps 0xba0(%rsp), %xmm0 vmovaps 0xb90(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xb60(%rsp) vmovaps 0xb60(%rsp), %xmm1 vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0xbb0(%rsp) vmovaps 0xbb0(%rsp), %xmm0 vmovaps %xmm1, 0xbd0(%rsp) vmovaps %xmm0, 0xbc0(%rsp) vmovaps 0xbd0(%rsp), %xmm0 vmovaps 0xbc0(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xb60(%rsp) vmovaps 0xb60(%rsp), %xmm1 vmovaps 0xb50(%rsp), %xmm0 vmovaps %xmm1, 0xc10(%rsp) vmovaps %xmm0, 0xc00(%rsp) vmovaps 0xc10(%rsp), %xmm0 vmovaps 0xc00(%rsp), %xmm1 vminps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0xb60(%rsp) vmovaps 0xb60(%rsp), %xmm1 vmovaps 0xb80(%rsp), %xmm0 vmovaps %xmm1, 0xc30(%rsp) vmovaps %xmm0, 0xc20(%rsp) vmovaps 0xc30(%rsp), %xmm0 vmulps 0xc20(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x840(%rsp) jmp 0x561b2f vmovaps 0x830(%rsp), %xmm0 vmovaps %xmm0, 0x840(%rsp) vmovaps 0x840(%rsp), %xmm0 vmovaps %xmm0, (%rsp) vmovaps (%rsp), %xmm0 vmovaps %xmm0, 0x310(%rsp) movq 0x370(%rsp), %rax movl 0x320(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax vmovaps 0x310(%rsp), %xmm0 movq %rax, 0x8c8(%rsp) vmovaps %xmm0, 0x8b0(%rsp) vmovaps 0x8b0(%rsp), %xmm0 movq 0x8c8(%rsp), %rax vmovups %xmm0, (%rax) movl 0x320(%rsp), %eax addl $0x1, %eax movl %eax, 0x320(%rsp) jmp 0x55dd52 movl 0x3e4(%rsp), %ecx shll $0x2, %ecx movq 0x370(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x370(%rsp) movl 0x324(%rsp), %eax addl $0x1, %eax movl %eax, 0x324(%rsp) jmp 0x55dd33 jmp 0x561be8 movl 0x37c(%rsp), %eax addl $0x1, %eax movl %eax, 0x37c(%rsp) jmp 0x55d8bf leaq 0x3c0(%rsp), %rdi callq 0x998a0 addq $0x2648, %rsp # imm = 0x2648 retq movq 0x3b0(%rsp), %rdi callq 0x5e3b0
/ysh329[P]ncnn/src/layer/x86/convolution_pack4.h
ncnn::conv1x1s2_sgemm_sse(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Mat const&, ncnn::Option const&)
static void conv1x1s2_sgemm_sse(const Mat& bottom_blob, Mat& top_blob, const Mat& kernel, const Mat& _bias, const Option& opt) { int w = bottom_blob.w; int channels = bottom_blob.c; size_t elemsize = bottom_blob.elemsize; int elempack = bottom_blob.elempack; int outw = top_blob.w; int outh = top_blob.h; const int tailstep = w - 2 * outw + w; Mat bottom_blob_shrinked; bottom_blob_shrinked.create(outw, outh, channels, elemsize, elempack, opt.workspace_allocator); #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < channels; p++) { const float* r0 = bottom_blob.channel(p); float* outptr = bottom_blob_shrinked.channel(p); for (int i = 0; i < outh; i++) { for (int j = 0; j < outw; j++) { outptr[0] = r0[0]; r0 += 2; outptr += 1; } r0 += tailstep; } } conv1x1s1_sgemm_sse(bottom_blob_shrinked, top_blob, kernel, _bias, opt); }
subq $0x398, %rsp # imm = 0x398 movq %rdi, 0x1b8(%rsp) movq %rsi, 0x1b0(%rsp) movq %rdx, 0x1a8(%rsp) movq %rcx, 0x1a0(%rsp) movq %r8, 0x198(%rsp) movq 0x1b8(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x194(%rsp) movq 0x1b8(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x190(%rsp) movq 0x1b8(%rsp), %rax movq 0x10(%rax), %rax movq %rax, 0x188(%rsp) movq 0x1b8(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x184(%rsp) movq 0x1b0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x180(%rsp) movq 0x1b0(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x17c(%rsp) movl 0x194(%rsp), %ecx movl 0x180(%rsp), %edx addl %edx, %edx movl %ecx, %eax subl %edx, %eax addl %ecx, %eax movl %eax, 0x178(%rsp) leaq 0x130(%rsp), %rdi movq %rdi, 0x1c0(%rsp) movq 0x1c0(%rsp), %rax movq %rax, 0x70(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movl 0x180(%rsp), %esi movl 0x17c(%rsp), %edx movl 0x190(%rsp), %ecx movq 0x188(%rsp), %r8 movl 0x184(%rsp), %r9d movq 0x198(%rsp), %rax movq 0x10(%rax), %r10 movq %rsp, %rax movq %r10, (%rax) callq 0x62060 jmp 0x591076 movl $0x0, 0x120(%rsp) movl 0x120(%rsp), %eax cmpl 0x190(%rsp), %eax jge 0x5919aa movq 0x1b8(%rsp), %rcx movl 0x120(%rsp), %eax leaq 0xd0(%rsp), %rdx movq %rdx, 0x2a8(%rsp) movq %rcx, 0x2a0(%rsp) movl %eax, 0x29c(%rsp) movq 0x2a0(%rsp), %rax movq %rax, 0x60(%rsp) movb $0x0, 0x29b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x29c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xd0(%rsp), %r10 movq %r10, 0x338(%rsp) movl %r9d, 0x334(%rsp) movl %r8d, 0x330(%rsp) movl %edi, 0x32c(%rsp) movq %rsi, 0x320(%rsp) movq %rdx, 0x318(%rsp) movl %ecx, 0x314(%rsp) movq %rax, 0x308(%rsp) movq 0x338(%rsp), %rcx movq %rcx, 0x68(%rsp) movq 0x320(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x318(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x314(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x308(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x334(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x330(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x32c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x390(%rsp) movl $0x10, 0x38c(%rsp) movq 0x390(%rsp), %rax movslq 0x38c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x38c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x68(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x60(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xf8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x59124a movq 0x60(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x110(%rsp) movb $0x1, 0x29b(%rsp) testb $0x1, 0x29b(%rsp) jne 0x591371 leaq 0xd0(%rsp), %rax movq %rax, 0x2b0(%rsp) movq 0x2b0(%rsp), %rax movq %rax, 0x2c0(%rsp) movq 0x2c0(%rsp), %rax movq %rax, 0x58(%rsp) cmpq $0x0, 0x8(%rax) je 0x591319 movq 0x58(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2bc(%rsp) # imm = 0xFFFFFFFF movl 0x2bc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2b8(%rsp) cmpl $0x1, 0x2b8(%rsp) jne 0x591319 movq 0x58(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x5912ed movq 0x58(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x5912eb jmp 0x591317 movq 0x58(%rsp), %rax movq (%rax), %rax movq %rax, 0x2c8(%rsp) cmpq $0x0, 0x2c8(%rsp) je 0x591315 movq 0x2c8(%rsp), %rdi callq 0x5e480 jmp 0x591317 jmp 0x591319 movq 0x58(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x591371 movq %rax, %rdi callq 0x5fc90 jmp 0x591373 leaq 0xd0(%rsp), %rax movq %rax, 0x290(%rsp) movq 0x290(%rsp), %rax movq (%rax), %rax movq %rax, 0x50(%rsp) leaq 0xd0(%rsp), %rax movq %rax, 0x1c8(%rsp) movq 0x1c8(%rsp), %rax movq %rax, 0x250(%rsp) movq 0x250(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x59144c movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x24c(%rsp) # imm = 0xFFFFFFFF movl 0x24c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x248(%rsp) cmpl $0x1, 0x248(%rsp) jne 0x59144c movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x591420 movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x59141e jmp 0x59144a movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x2d8(%rsp) cmpq $0x0, 0x2d8(%rsp) je 0x591448 movq 0x2d8(%rsp), %rdi callq 0x5e480 jmp 0x59144a jmp 0x59144c movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x5914a4 movq %rax, %rdi callq 0x5fc90 movq 0x50(%rsp), %rax movq %rax, 0x118(%rsp) movl 0x120(%rsp), %eax leaq 0x80(%rsp), %rcx movq %rcx, 0x268(%rsp) leaq 0x130(%rsp), %rcx movq %rcx, 0x260(%rsp) movl %eax, 0x25c(%rsp) movq 0x260(%rsp), %rax movq %rax, 0x38(%rsp) movb $0x0, 0x25b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x25c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x80(%rsp), %r10 movq %r10, 0x370(%rsp) movl %r9d, 0x36c(%rsp) movl %r8d, 0x368(%rsp) movl %edi, 0x364(%rsp) movq %rsi, 0x358(%rsp) movq %rdx, 0x350(%rsp) movl %ecx, 0x34c(%rsp) movq %rax, 0x340(%rsp) movq 0x370(%rsp), %rcx movq %rcx, 0x40(%rsp) movq 0x358(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x350(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x34c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x340(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x36c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x368(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x364(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x380(%rsp) movl $0x10, 0x37c(%rsp) movq 0x380(%rsp), %rax movslq 0x37c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x37c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x40(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x38(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xa8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x591666 movq 0x38(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xc0(%rsp) movb $0x1, 0x25b(%rsp) testb $0x1, 0x25b(%rsp) jne 0x59178d leaq 0x80(%rsp), %rax movq %rax, 0x270(%rsp) movq 0x270(%rsp), %rax movq %rax, 0x280(%rsp) movq 0x280(%rsp), %rax movq %rax, 0x30(%rsp) cmpq $0x0, 0x8(%rax) je 0x591735 movq 0x30(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x27c(%rsp) # imm = 0xFFFFFFFF movl 0x27c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x278(%rsp) cmpl $0x1, 0x278(%rsp) jne 0x591735 movq 0x30(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x591709 movq 0x30(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x591707 jmp 0x591733 movq 0x30(%rsp), %rax movq (%rax), %rax movq %rax, 0x2d0(%rsp) cmpq $0x0, 0x2d0(%rsp) je 0x591731 movq 0x2d0(%rsp), %rdi callq 0x5e480 jmp 0x591733 jmp 0x591735 movq 0x30(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x59178d movq %rax, %rdi callq 0x5fc90 jmp 0x59178f leaq 0x80(%rsp), %rax movq %rax, 0x288(%rsp) movq 0x288(%rsp), %rax movq (%rax), %rax movq %rax, 0x28(%rsp) leaq 0x80(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x591868 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x22c(%rsp) # imm = 0xFFFFFFFF movl 0x22c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x228(%rsp) cmpl $0x1, 0x228(%rsp) jne 0x591868 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x59183c movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x59183a jmp 0x591866 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x2e8(%rsp) cmpq $0x0, 0x2e8(%rsp) je 0x591864 movq 0x2e8(%rsp), %rdi callq 0x5e480 jmp 0x591866 jmp 0x591868 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x5918c0 movq %rax, %rdi callq 0x5fc90 movq 0x28(%rsp), %rax movq %rax, 0xc8(%rsp) movl $0x0, 0x7c(%rsp) movl 0x7c(%rsp), %eax cmpl 0x17c(%rsp), %eax jge 0x591992 movl $0x0, 0x78(%rsp) movl 0x78(%rsp), %eax cmpl 0x180(%rsp), %eax jge 0x591961 movq 0x118(%rsp), %rax vmovss (%rax), %xmm0 movq 0xc8(%rsp), %rax vmovss %xmm0, (%rax) movq 0x118(%rsp), %rax addq $0x8, %rax movq %rax, 0x118(%rsp) movq 0xc8(%rsp), %rax addq $0x4, %rax movq %rax, 0xc8(%rsp) movl 0x78(%rsp), %eax addl $0x1, %eax movl %eax, 0x78(%rsp) jmp 0x5918ee movq %rax, %rcx movl %edx, %eax movq %rcx, 0x128(%rsp) movl %eax, 0x124(%rsp) jmp 0x591af2 movl 0x178(%rsp), %ecx movq 0x118(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x118(%rsp) movl 0x7c(%rsp), %eax addl $0x1, %eax movl %eax, 0x7c(%rsp) jmp 0x5918d5 jmp 0x591994 movl 0x120(%rsp), %eax addl $0x1, %eax movl %eax, 0x120(%rsp) jmp 0x591081 movq 0x1b0(%rsp), %rsi movq 0x1a8(%rsp), %rdx movq 0x1a0(%rsp), %rcx movq 0x198(%rsp), %r8 leaq 0x130(%rsp), %rdi callq 0x590b10 jmp 0x5919d9 leaq 0x130(%rsp), %rax movq %rax, 0x1e8(%rsp) movq 0x1e8(%rsp), %rax movq %rax, 0x210(%rsp) movq 0x210(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x591a92 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x20c(%rsp) # imm = 0xFFFFFFFF movl 0x20c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x208(%rsp) cmpl $0x1, 0x208(%rsp) jne 0x591a92 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x591a66 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x591a64 jmp 0x591a90 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x2f8(%rsp) cmpq $0x0, 0x2f8(%rsp) je 0x591a8e movq 0x2f8(%rsp), %rdi callq 0x5e480 jmp 0x591a90 jmp 0x591a92 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x591aea movq %rax, %rdi callq 0x5fc90 addq $0x398, %rsp # imm = 0x398 retq leaq 0x130(%rsp), %rax movq %rax, 0x1f0(%rsp) movq 0x1f0(%rsp), %rax movq %rax, 0x200(%rsp) movq 0x200(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x591bab movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1fc(%rsp) # imm = 0xFFFFFFFF movl 0x1fc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1f8(%rsp) cmpl $0x1, 0x1f8(%rsp) jne 0x591bab movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x591b7f movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x591b7d jmp 0x591ba9 movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x300(%rsp) cmpq $0x0, 0x300(%rsp) je 0x591ba7 movq 0x300(%rsp), %rdi callq 0x5e480 jmp 0x591ba9 jmp 0x591bab movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x591c03 movq %rax, %rdi callq 0x5fc90 jmp 0x591c05 movq 0x128(%rsp), %rdi callq 0x5e3b0 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/convolution_1x1.h
ncnn::Convolution_x86_fma::forward(std::vector<ncnn::Mat, std::allocator<ncnn::Mat>> const&, std::vector<ncnn::Mat, std::allocator<ncnn::Mat>>&, ncnn::Option const&) const
int Convolution_x86_fma::forward(const std::vector<Mat>& bottom_blobs, std::vector<Mat>& top_blobs, const Option& opt) const { const Mat& bottom_blob = bottom_blobs[0]; const Mat& _weight_data = bottom_blobs[1]; Mat& top_blob = top_blobs[0]; const int _kernel_w = _weight_data.w; const int _kernel_h = _weight_data.h; const int _num_output = _weight_data.c * _weight_data.elempack; Mat weight_data_flattened; flatten(_weight_data, weight_data_flattened, opt); if (weight_data_flattened.empty()) return -100; // weight_data_flattened as pack1 weight_data_flattened.w *= weight_data_flattened.elempack; weight_data_flattened.elemsize /= weight_data_flattened.elempack; weight_data_flattened.elempack = 1; Mat bias_data_flattened; if (bias_term) { const Mat& _bias_data = bottom_blobs[2]; flatten(_bias_data, bias_data_flattened, opt); if (bias_data_flattened.empty()) return -100; // bias_data_flattened as pack1 bias_data_flattened.w *= bias_data_flattened.elempack; bias_data_flattened.elemsize /= bias_data_flattened.elempack; bias_data_flattened.elempack = 1; } ncnn::Layer* op = ncnn::create_layer(ncnn::LayerType::Convolution); ncnn::ParamDict pd; pd.set(0, _num_output); pd.set(1, _kernel_w); pd.set(11, _kernel_h); pd.set(2, dilation_w); pd.set(21, dilation_h); pd.set(3, stride_w); pd.set(31, stride_h); pd.set(4, pad_left); pd.set(15, pad_right); pd.set(14, pad_top); pd.set(16, pad_bottom); pd.set(18, pad_value); pd.set(5, bias_term); pd.set(6, weight_data_flattened.w); pd.set(8, int8_scale_term); pd.set(9, activation_type); pd.set(10, activation_params); op->load_param(pd); ncnn::Mat weights[2]; weights[0] = weight_data_flattened; weights[1] = bias_data_flattened; op->load_model(ncnn::ModelBinFromMatArray(weights)); op->create_pipeline(opt); op->forward(bottom_blob, top_blob, opt); op->destroy_pipeline(opt); delete op; return 0; }
subq $0x448, %rsp # imm = 0x448 movq %rdi, 0x2b0(%rsp) movq %rsi, 0x2a8(%rsp) movq %rdx, 0x2a0(%rsp) movq %rcx, 0x298(%rsp) movq 0x2b0(%rsp), %rax movq %rax, 0xf8(%rsp) movq 0x2a8(%rsp), %rdi xorl %eax, %eax movl %eax, %esi movq %rsi, 0x100(%rsp) callq 0xb5820 movq %rax, 0x290(%rsp) movq 0x2a8(%rsp), %rdi movl $0x1, %esi callq 0xb5820 movq 0x100(%rsp), %rsi movq %rax, 0x288(%rsp) movq 0x2a0(%rsp), %rdi callq 0x98840 movq %rax, 0x280(%rsp) movq 0x288(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x27c(%rsp) movq 0x288(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x278(%rsp) movq 0x288(%rsp), %rax movl 0x18(%rax), %ecx movl 0x38(%rax), %eax imull %ecx, %eax movl %eax, 0x274(%rsp) leaq 0x228(%rsp), %rsi movq %rsi, 0x2c0(%rsp) movq 0x2c0(%rsp), %rax movq %rax, 0x108(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0x288(%rsp), %rdi movq 0x298(%rsp), %rdx callq 0x69690 jmp 0x595753 leaq 0x228(%rsp), %rax movq %rax, 0x3e8(%rsp) movq 0x3e8(%rsp), %rcx movq %rcx, 0xe8(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0xf7(%rsp) je 0x5957b4 movq 0xe8(%rsp), %rax movq %rax, 0x438(%rsp) movq 0x438(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0xf7(%rsp) movb 0xf7(%rsp), %al movb %al, 0xe7(%rsp) movb 0xe7(%rsp), %al testb $0x1, %al jne 0x5957cf jmp 0x595803 movl $0xffffff9c, 0x2bc(%rsp) # imm = 0xFFFFFF9C movl $0x1, 0x218(%rsp) jmp 0x596820 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) jmp 0x596940 movl 0x240(%rsp), %eax imull 0x254(%rsp), %eax movl %eax, 0x254(%rsp) movslq 0x240(%rsp), %rcx movq 0x238(%rsp), %rax xorl %edx, %edx divq %rcx movq %rax, 0x238(%rsp) movl $0x1, 0x240(%rsp) leaq 0x1d0(%rsp), %rax movq %rax, 0x2c8(%rsp) movq 0x2c8(%rsp), %rax movq %rax, 0xd8(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x0, 0x100(%rax,%rcx) je 0x5959f6 movq 0x2a8(%rsp), %rdi movl $0x2, %esi callq 0xb5820 movq %rax, 0x1c8(%rsp) movq 0x1c8(%rsp), %rdi movq 0x298(%rsp), %rdx leaq 0x1d0(%rsp), %rsi callq 0x69690 jmp 0x595908 leaq 0x1d0(%rsp), %rax movq %rax, 0x3e0(%rsp) movq 0x3e0(%rsp), %rcx movq %rcx, 0xc8(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0xd7(%rsp) je 0x595969 movq 0xc8(%rsp), %rax movq %rax, 0x440(%rsp) movq 0x440(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0xd7(%rsp) movb 0xd7(%rsp), %al movb %al, 0xc7(%rsp) movb 0xc7(%rsp), %al testb $0x1, %al jne 0x595984 jmp 0x5959b8 movl $0xffffff9c, 0x2bc(%rsp) # imm = 0xFFFFFF9C movl $0x1, 0x218(%rsp) jmp 0x5965f4 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) jmp 0x59670a movl 0x1e8(%rsp), %eax imull 0x1fc(%rsp), %eax movl %eax, 0x1fc(%rsp) movslq 0x1e8(%rsp), %rcx movq 0x1e0(%rsp), %rax xorl %edx, %edx divq %rcx movq %rax, 0x1e0(%rsp) movl $0x1, 0x1e8(%rsp) movl $0x6, %edi callq 0xae160 movq %rax, 0xb8(%rsp) jmp 0x595a0a movq 0xb8(%rsp), %rax movq %rax, 0x1c0(%rsp) leaq 0x1b0(%rsp), %rdi callq 0xa0840 jmp 0x595a29 movl 0x274(%rsp), %edx leaq 0x1b0(%rsp), %rdi xorl %esi, %esi callq 0xa16d0 jmp 0x595a41 movl 0x27c(%rsp), %edx leaq 0x1b0(%rsp), %rdi movl $0x1, %esi callq 0xa16d0 jmp 0x595a5c movl 0x278(%rsp), %edx leaq 0x1b0(%rsp), %rdi movl $0xb, %esi callq 0xa16d0 jmp 0x595a77 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xdc(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x2, %esi callq 0xa16d0 jmp 0x595aa1 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xe0(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x15, %esi callq 0xa16d0 jmp 0x595acb movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xe4(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x3, %esi callq 0xa16d0 jmp 0x595af5 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xe8(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x1f, %esi callq 0xa16d0 jmp 0x595b1f movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xec(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x4, %esi callq 0xa16d0 jmp 0x595b49 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xf0(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0xf, %esi callq 0xa16d0 jmp 0x595b73 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xf4(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0xe, %esi callq 0xa16d0 jmp 0x595b9d movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xf8(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x10, %esi callq 0xa16d0 jmp 0x595bc7 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx vmovss 0xfc(%rax,%rcx), %xmm0 leaq 0x1b0(%rsp), %rdi movl $0x12, %esi callq 0xa1710 jmp 0x595bf3 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0x100(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x5, %esi callq 0xa16d0 jmp 0x595c1d movl 0x254(%rsp), %edx leaq 0x1b0(%rsp), %rdi movl $0x6, %esi callq 0xa16d0 jmp 0x595c38 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0x108(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x8, %esi callq 0xa16d0 jmp 0x595c62 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0x10c(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x9, %esi callq 0xa16d0 jmp 0x595c8c movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx leaq 0x110(%rax,%rcx), %rdx leaq 0x1b0(%rsp), %rdi movl $0xa, %esi callq 0xa1760 jmp 0x595cb7 movq 0x1c0(%rsp), %rdi movq (%rdi), %rax movq 0x10(%rax), %rax leaq 0x1b0(%rsp), %rsi callq *%rax jmp 0x595cd2 leaq 0x120(%rsp), %rax movq %rax, %rcx addq $0x90, %rcx movq %rcx, 0xa8(%rsp) movq %rax, 0xb0(%rsp) movq 0xb0(%rsp), %rax movq %rax, 0x98(%rsp) movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rax movq %rax, 0xa0(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0xa8(%rsp), %rcx movq 0x98(%rsp), %rax addq $0x48, %rax cmpq %rcx, %rax movq %rax, 0xb0(%rsp) jne 0x595cf4 leaq 0x120(%rsp), %rax movq %rax, 0x340(%rsp) leaq 0x228(%rsp), %rax movq %rax, 0x338(%rsp) movq 0x340(%rsp), %rax movq %rax, 0x90(%rsp) cmpq 0x338(%rsp), %rax jne 0x595de1 movq 0x90(%rsp), %rax movq %rax, 0x348(%rsp) jmp 0x595fd4 movq 0x338(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x595e19 movq 0x338(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x334(%rsp) movl 0x334(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x330(%rsp) movq 0x90(%rsp), %rax movq %rax, 0x358(%rsp) movq 0x358(%rsp), %rax movq %rax, 0x88(%rsp) cmpq $0x0, 0x8(%rax) je 0x595ed1 movq 0x88(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x354(%rsp) # imm = 0xFFFFFFFF movl 0x354(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x350(%rsp) cmpl $0x1, 0x350(%rsp) jne 0x595ed1 movq 0x88(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x595ea2 movq 0x88(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x595ea0 jmp 0x595ecf movq 0x88(%rsp), %rax movq (%rax), %rax movq %rax, 0x430(%rsp) cmpq $0x0, 0x430(%rsp) je 0x595ecd movq 0x430(%rsp), %rdi callq 0x5e480 jmp 0x595ecf jmp 0x595ed1 movq 0x88(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x90(%rsp), %rax movq 0x338(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x338(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x338(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x338(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x338(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x338(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x338(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x338(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x338(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x338(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x338(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x348(%rsp) leaq 0x120(%rsp), %rax addq $0x48, %rax movq %rax, 0x320(%rsp) leaq 0x1d0(%rsp), %rax movq %rax, 0x318(%rsp) movq 0x320(%rsp), %rax movq %rax, 0x80(%rsp) cmpq 0x318(%rsp), %rax jne 0x596027 movq 0x80(%rsp), %rax movq %rax, 0x328(%rsp) jmp 0x596208 movq 0x318(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x59605f movq 0x318(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x314(%rsp) movl 0x314(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x310(%rsp) movq 0x80(%rsp), %rax movq %rax, 0x368(%rsp) movq 0x368(%rsp), %rax movq %rax, 0x78(%rsp) cmpq $0x0, 0x8(%rax) je 0x596108 movq 0x78(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x364(%rsp) # imm = 0xFFFFFFFF movl 0x364(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x360(%rsp) cmpl $0x1, 0x360(%rsp) jne 0x596108 movq 0x78(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x5960dc movq 0x78(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x5960da jmp 0x596106 movq 0x78(%rsp), %rax movq (%rax), %rax movq %rax, 0x428(%rsp) cmpq $0x0, 0x428(%rsp) je 0x596104 movq 0x428(%rsp), %rdi callq 0x5e480 jmp 0x596106 jmp 0x596108 movq 0x78(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x80(%rsp), %rax movq 0x318(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x318(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x318(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x318(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x318(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x318(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x318(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x318(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x318(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x318(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x318(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x328(%rsp) movq 0x1c0(%rsp), %rax movq %rax, 0x70(%rsp) leaq 0x110(%rsp), %rdi leaq 0x120(%rsp), %rsi callq 0x89470 jmp 0x59622c movq 0x70(%rsp), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax leaq 0x110(%rsp), %rsi callq *%rax jmp 0x596244 leaq 0x110(%rsp), %rdi callq 0x89520 movq 0x1c0(%rsp), %rdi movq 0x298(%rsp), %rsi movq (%rdi), %rax movq 0x20(%rax), %rax callq *%rax jmp 0x59626c movq 0x1c0(%rsp), %rdi movq 0x290(%rsp), %rsi movq 0x280(%rsp), %rdx movq 0x298(%rsp), %rcx movq (%rdi), %rax movq 0x38(%rax), %rax callq *%rax jmp 0x596297 movq 0x1c0(%rsp), %rdi movq 0x298(%rsp), %rsi movq (%rdi), %rax movq 0x28(%rax), %rax callq *%rax jmp 0x5962b2 movq 0x1c0(%rsp), %rax movq %rax, 0x68(%rsp) cmpq $0x0, %rax je 0x5962d0 movq 0x68(%rsp), %rdi movq (%rdi), %rax callq *0x8(%rax) movl $0x0, 0x2bc(%rsp) movl $0x1, 0x218(%rsp) leaq 0x120(%rsp), %rax movq %rax, 0x58(%rsp) addq $0x90, %rax movq %rax, 0x60(%rsp) jmp 0x596358 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) jmp 0x5965e2 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) jmp 0x596499 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) leaq 0x110(%rsp), %rdi callq 0x89520 jmp 0x596499 movq 0x60(%rsp), %rax addq $-0x48, %rax movq %rax, 0x48(%rsp) movq %rax, 0x2e0(%rsp) movq 0x2e0(%rsp), %rax movq %rax, 0x3c8(%rsp) movq 0x3c8(%rsp), %rax movq %rax, 0x50(%rsp) cmpq $0x0, 0x8(%rax) je 0x596417 movq 0x50(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x3c4(%rsp) # imm = 0xFFFFFFFF movl 0x3c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x3c0(%rsp) cmpl $0x1, 0x3c0(%rsp) jne 0x596417 movq 0x50(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x5963eb movq 0x50(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x5963e9 jmp 0x596415 movq 0x50(%rsp), %rax movq (%rax), %rax movq %rax, 0x3f8(%rsp) cmpq $0x0, 0x3f8(%rsp) je 0x596413 movq 0x3f8(%rsp), %rdi callq 0x5e480 jmp 0x596415 jmp 0x596417 movq 0x50(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x59646f movq %rax, %rdi callq 0x5fc90 movq 0x48(%rsp), %rax movq 0x58(%rsp), %rcx cmpq %rcx, %rax movq %rax, 0x60(%rsp) jne 0x596358 leaq 0x1b0(%rsp), %rdi callq 0xa0e10 jmp 0x5965f4 leaq 0x120(%rsp), %rax movq %rax, 0x38(%rsp) addq $0x90, %rax movq %rax, 0x40(%rsp) movq 0x40(%rsp), %rax addq $-0x48, %rax movq %rax, 0x28(%rsp) movq %rax, 0x2e8(%rsp) movq 0x2e8(%rsp), %rax movq %rax, 0x3b8(%rsp) movq 0x3b8(%rsp), %rax movq %rax, 0x30(%rsp) cmpq $0x0, 0x8(%rax) je 0x596570 movq 0x30(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x3b4(%rsp) # imm = 0xFFFFFFFF movl 0x3b4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x3b0(%rsp) cmpl $0x1, 0x3b0(%rsp) jne 0x596570 movq 0x30(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x596544 movq 0x30(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x596542 jmp 0x59656e movq 0x30(%rsp), %rax movq (%rax), %rax movq %rax, 0x400(%rsp) cmpq $0x0, 0x400(%rsp) je 0x59656c movq 0x400(%rsp), %rdi callq 0x5e480 jmp 0x59656e jmp 0x596570 movq 0x30(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x5965c8 movq %rax, %rdi callq 0x5fc90 movq 0x28(%rsp), %rax movq 0x38(%rsp), %rcx cmpq %rcx, %rax movq %rax, 0x40(%rsp) jne 0x5964b1 jmp 0x5965e2 leaq 0x1b0(%rsp), %rdi callq 0xa0e10 jmp 0x59670a leaq 0x1d0(%rsp), %rax movq %rax, 0x2f0(%rsp) movq 0x2f0(%rsp), %rax movq %rax, 0x3a8(%rsp) movq 0x3a8(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x5966ad movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x3a4(%rsp) # imm = 0xFFFFFFFF movl 0x3a4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x3a0(%rsp) cmpl $0x1, 0x3a0(%rsp) jne 0x5966ad movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x596681 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x59667f jmp 0x5966ab movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x408(%rsp) cmpq $0x0, 0x408(%rsp) je 0x5966a9 movq 0x408(%rsp), %rdi callq 0x5e480 jmp 0x5966ab jmp 0x5966ad movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x596705 movq %rax, %rdi callq 0x5fc90 jmp 0x596820 leaq 0x1d0(%rsp), %rax movq %rax, 0x2f8(%rsp) movq 0x2f8(%rsp), %rax movq %rax, 0x398(%rsp) movq 0x398(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x5967c3 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x394(%rsp) # imm = 0xFFFFFFFF movl 0x394(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x390(%rsp) cmpl $0x1, 0x390(%rsp) jne 0x5967c3 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x596797 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x596795 jmp 0x5967c1 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x410(%rsp) cmpq $0x0, 0x410(%rsp) je 0x5967bf movq 0x410(%rsp), %rdi callq 0x5e480 jmp 0x5967c1 jmp 0x5967c3 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x59681b movq %rax, %rdi callq 0x5fc90 jmp 0x596940 leaq 0x228(%rsp), %rax movq %rax, 0x300(%rsp) movq 0x300(%rsp), %rax movq %rax, 0x388(%rsp) movq 0x388(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x5968d9 movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x384(%rsp) # imm = 0xFFFFFFFF movl 0x384(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x380(%rsp) cmpl $0x1, 0x380(%rsp) jne 0x5968d9 movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x5968ad movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x5968ab jmp 0x5968d7 movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x418(%rsp) cmpq $0x0, 0x418(%rsp) je 0x5968d5 movq 0x418(%rsp), %rdi callq 0x5e480 jmp 0x5968d7 jmp 0x5968d9 movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x596931 movq %rax, %rdi callq 0x5fc90 movl 0x2bc(%rsp), %eax addq $0x448, %rsp # imm = 0x448 retq leaq 0x228(%rsp), %rax movq %rax, 0x308(%rsp) movq 0x308(%rsp), %rax movq %rax, 0x378(%rsp) movq 0x378(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x5969f9 movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x374(%rsp) # imm = 0xFFFFFFFF movl 0x374(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x370(%rsp) cmpl $0x1, 0x370(%rsp) jne 0x5969f9 movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x5969cd movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x5969cb jmp 0x5969f7 movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x420(%rsp) cmpq $0x0, 0x420(%rsp) je 0x5969f5 movq 0x420(%rsp), %rdi callq 0x5e480 jmp 0x5969f7 jmp 0x5969f9 movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x596a51 movq %rax, %rdi callq 0x5fc90 jmp 0x596a53 movq 0x220(%rsp), %rdi callq 0x5e3b0
/ysh329[P]ncnn/build_O0/src/layer/x86/convolution_x86_fma.cpp
ncnn::conv3x3s1_winograd63_transform_output_sse(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Option const&)
static void conv3x3s1_winograd63_transform_output_sse(const Mat& top_blob_tm, Mat& top_blob, const Mat& bias, const Option& opt) { const int outw = top_blob.w; const int outh = top_blob.h; const int outch = top_blob.c; const int w_tiles = outw / 6; const int h_tiles = outh / 6; const int tiles = w_tiles * h_tiles; const float* biasptr = bias; // const float otm[6][8] = { // {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 32.0f, 32.0f, 0.0f}, // {0.0f, 1.0f, -1.0f, 2.0f, -2.0f, 16.0f,-16.0f, 0.0f}, // {0.0f, 1.0f, 1.0f, 4.0f, 4.0f, 8.0f, 8.0f, 0.0f}, // {0.0f, 1.0f, -1.0f, 8.0f, -8.0f, 4.0f, -4.0f, 0.0f}, // {0.0f, 1.0f, 1.0f, 16.0f, 16.0f, 2.0f, 2.0f, 0.0f}, // {0.0f, 1.0f, -1.0f, 32.0f, -32.0f, 1.0f, -1.0f, 1.0f} // }; // 0 = r0 + (r1 + r2) + (r3 + r4) + (r5 + r6) * 32 // 1 = (r1 - r2) + (r3 - r4) * 2 + (r5 - r6) * 16 // 2 = (r1 + r2) + (r3 + r4) * 4 + (r5 + r6) * 8 // 3 = (r1 - r2) + (r3 - r4) * 8 + (r5 - r6) * 4 // 4 = (r1 + r2) + (r3 + r4) * 16+ (r5 + r6) * 2 // 5 = r7 + (r1 - r2) + (r3 - r4) * 32+ (r5 - r6) #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < outch; p++) { const Mat out0_tm = top_blob_tm.channel(p); Mat out0 = top_blob.channel(p); const float bias0 = biasptr ? biasptr[p] : 0.f; float tmp[6][8]; // tile for (int i = 0; i < h_tiles; i++) { for (int j = 0; j < w_tiles; j++) { const float* output0_tm_0 = (const float*)out0_tm + (i * w_tiles + j) * 1; const float* output0_tm_1 = output0_tm_0 + tiles * 1; const float* output0_tm_2 = output0_tm_0 + tiles * 2; const float* output0_tm_3 = output0_tm_0 + tiles * 3; const float* output0_tm_4 = output0_tm_0 + tiles * 4; const float* output0_tm_5 = output0_tm_0 + tiles * 5; const float* output0_tm_6 = output0_tm_0 + tiles * 6; const float* output0_tm_7 = output0_tm_0 + tiles * 7; // TODO sse optimize for (int m = 0; m < 8; m++) { float tmp024a = output0_tm_1[0] + output0_tm_2[0]; float tmp135a = output0_tm_1[0] - output0_tm_2[0]; float tmp024b = output0_tm_3[0] + output0_tm_4[0]; float tmp135b = output0_tm_3[0] - output0_tm_4[0]; float tmp024c = output0_tm_5[0] + output0_tm_6[0]; float tmp135c = output0_tm_5[0] - output0_tm_6[0]; tmp[0][m] = output0_tm_0[0] + tmp024a + tmp024b + tmp024c * 32; tmp[2][m] = tmp024a + tmp024b * 4 + tmp024c * 8; tmp[4][m] = tmp024a + tmp024b * 16 + tmp024c + tmp024c; tmp[1][m] = tmp135a + tmp135b + tmp135b + tmp135c * 16; tmp[3][m] = tmp135a + tmp135b * 8 + tmp135c * 4; tmp[5][m] = output0_tm_7[0] + tmp135a + tmp135b * 32 + tmp135c; output0_tm_0 += tiles * 8; output0_tm_1 += tiles * 8; output0_tm_2 += tiles * 8; output0_tm_3 += tiles * 8; output0_tm_4 += tiles * 8; output0_tm_5 += tiles * 8; output0_tm_6 += tiles * 8; output0_tm_7 += tiles * 8; } float* output0 = out0.row(i * 6) + j * 6; for (int m = 0; m < 6; m++) { const float* tmp0 = tmp[m]; float tmp024a = tmp0[1] + tmp0[2]; float tmp135a = tmp0[1] - tmp0[2]; float tmp024b = tmp0[3] + tmp0[4]; float tmp135b = tmp0[3] - tmp0[4]; float tmp024c = tmp0[5] + tmp0[6]; float tmp135c = tmp0[5] - tmp0[6]; output0[0] = bias0 + tmp0[0] + tmp024a + tmp024b + tmp024c * 32; output0[2] = bias0 + tmp024a + tmp024b * 4 + tmp024c * 8; output0[4] = bias0 + tmp024a + tmp024b * 16 + tmp024c + tmp024c; output0[1] = bias0 + tmp135a + tmp135b + tmp135b + tmp135c * 16; output0[3] = bias0 + tmp135a + tmp135b * 8 + tmp135c * 4; output0[5] = bias0 + tmp0[7] + tmp135a + tmp135b * 32 + tmp135c; output0 += outw; } } } } }
subq $0x438, %rsp # imm = 0x438 movq %rdi, 0x290(%rsp) movq %rsi, 0x288(%rsp) movq %rdx, 0x280(%rsp) movq %rcx, 0x278(%rsp) movq 0x288(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x274(%rsp) movq 0x288(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x270(%rsp) movq 0x288(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x26c(%rsp) movl 0x274(%rsp), %eax movl $0x6, %ecx cltd idivl %ecx movl %eax, 0x268(%rsp) movl 0x270(%rsp), %eax movl $0x6, %ecx cltd idivl %ecx movl %eax, 0x264(%rsp) movl 0x268(%rsp), %eax imull 0x264(%rsp), %eax movl %eax, 0x260(%rsp) movq 0x280(%rsp), %rax movq %rax, 0x330(%rsp) movq 0x330(%rsp), %rax movq (%rax), %rax movq %rax, 0x258(%rsp) movl $0x0, 0x254(%rsp) movl 0x254(%rsp), %eax cmpl 0x26c(%rsp), %eax jge 0x6041ee movq 0x290(%rsp), %rcx movl 0x254(%rsp), %eax leaq 0x208(%rsp), %rdx movq %rdx, 0x348(%rsp) movq %rcx, 0x340(%rsp) movl %eax, 0x33c(%rsp) movq 0x340(%rsp), %rax movq %rax, 0x50(%rsp) movb $0x0, 0x33b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x33c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x208(%rsp), %r10 movq %r10, 0x3d8(%rsp) movl %r9d, 0x3d4(%rsp) movl %r8d, 0x3d0(%rsp) movl %edi, 0x3cc(%rsp) movq %rsi, 0x3c0(%rsp) movq %rdx, 0x3b8(%rsp) movl %ecx, 0x3b4(%rsp) movq %rax, 0x3a8(%rsp) movq 0x3d8(%rsp), %rcx movq %rcx, 0x48(%rsp) movq 0x3c0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x3b8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x3b4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3a8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3d4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3d0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3cc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x430(%rsp) movl $0x10, 0x42c(%rsp) movq 0x430(%rsp), %rax movslq 0x42c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x42c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x50(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x230(%rsp) cmpl $0x4, 0x28(%rax) jne 0x603351 movq 0x50(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x248(%rsp) movb $0x1, 0x33b(%rsp) testb $0x1, 0x33b(%rsp) jne 0x603478 leaq 0x208(%rsp), %rax movq %rax, 0x350(%rsp) movq 0x350(%rsp), %rax movq %rax, 0x360(%rsp) movq 0x360(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x603420 movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x35c(%rsp) # imm = 0xFFFFFFFF movl 0x35c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x358(%rsp) cmpl $0x1, 0x358(%rsp) jne 0x603420 movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x6033f4 movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x6033f2 jmp 0x60341e movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x368(%rsp) cmpq $0x0, 0x368(%rsp) je 0x60341c movq 0x368(%rsp), %rdi callq 0x5e480 jmp 0x60341e jmp 0x603420 movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x603478 movq %rax, %rdi callq 0x5fc90 movq 0x288(%rsp), %rcx movl 0x254(%rsp), %eax leaq 0x1c0(%rsp), %rdx movq %rdx, 0x308(%rsp) movq %rcx, 0x300(%rsp) movl %eax, 0x2fc(%rsp) movq 0x300(%rsp), %rax movq %rax, 0x30(%rsp) movb $0x0, 0x2fb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x2fc(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x1c0(%rsp), %r10 movq %r10, 0x410(%rsp) movl %r9d, 0x40c(%rsp) movl %r8d, 0x408(%rsp) movl %edi, 0x404(%rsp) movq %rsi, 0x3f8(%rsp) movq %rdx, 0x3f0(%rsp) movl %ecx, 0x3ec(%rsp) movq %rax, 0x3e0(%rsp) movq 0x410(%rsp), %rcx movq %rcx, 0x38(%rsp) movq 0x3f8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x3f0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x3ec(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3e0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x40c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x408(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x404(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x420(%rsp) movl $0x10, 0x41c(%rsp) movq 0x420(%rsp), %rax movslq 0x41c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x41c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x38(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x30(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x1e8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x60362d movq 0x30(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x200(%rsp) movb $0x1, 0x2fb(%rsp) testb $0x1, 0x2fb(%rsp) jne 0x603754 leaq 0x1c0(%rsp), %rax movq %rax, 0x310(%rsp) movq 0x310(%rsp), %rax movq %rax, 0x320(%rsp) movq 0x320(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x6036fc movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x31c(%rsp) # imm = 0xFFFFFFFF movl 0x31c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x318(%rsp) cmpl $0x1, 0x318(%rsp) jne 0x6036fc movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x6036d0 movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x6036ce jmp 0x6036fa movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0x370(%rsp) cmpq $0x0, 0x370(%rsp) je 0x6036f8 movq 0x370(%rsp), %rdi callq 0x5e480 jmp 0x6036fa jmp 0x6036fc movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x603754 movq %rax, %rdi callq 0x5fc90 jmp 0x603756 cmpq $0x0, 0x258(%rsp) je 0x60377e movq 0x258(%rsp), %rax movslq 0x254(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x24(%rsp) jmp 0x60378a vxorps %xmm0, %xmm0, %xmm0 vmovss %xmm0, 0x24(%rsp) jmp 0x60378a vmovss 0x24(%rsp), %xmm0 vmovss %xmm0, 0x1b0(%rsp) movl $0x0, 0xec(%rsp) movl 0xec(%rsp), %eax cmpl 0x264(%rsp), %eax jge 0x603fb4 movl $0x0, 0xe8(%rsp) movl 0xe8(%rsp), %eax cmpl 0x268(%rsp), %eax jge 0x603f9c leaq 0x208(%rsp), %rax movq %rax, 0x328(%rsp) movq 0x328(%rsp), %rax movq (%rax), %rax movq %rax, 0x18(%rsp) movq 0x18(%rsp), %rax movl 0xec(%rsp), %ecx imull 0x268(%rsp), %ecx addl 0xe8(%rsp), %ecx shll $0x0, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xe0(%rsp) movq 0xe0(%rsp), %rax movl 0x260(%rsp), %ecx shll $0x0, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xd8(%rsp) movq 0xe0(%rsp), %rax movl 0x260(%rsp), %ecx shll %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xd0(%rsp) movq 0xe0(%rsp), %rax imull $0x3, 0x260(%rsp), %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xc8(%rsp) movq 0xe0(%rsp), %rax movl 0x260(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xc0(%rsp) movq 0xe0(%rsp), %rax imull $0x5, 0x260(%rsp), %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xb8(%rsp) movq 0xe0(%rsp), %rax imull $0x6, 0x260(%rsp), %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xb0(%rsp) movq 0xe0(%rsp), %rax imull $0x7, 0x260(%rsp), %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xa8(%rsp) movl $0x0, 0xa4(%rsp) cmpl $0x8, 0xa4(%rsp) jge 0x603cc1 movq 0xd8(%rsp), %rax vmovss (%rax), %xmm0 movq 0xd0(%rsp), %rax vaddss (%rax), %xmm0, %xmm0 vmovss %xmm0, 0xa0(%rsp) movq 0xd8(%rsp), %rax vmovss (%rax), %xmm0 movq 0xd0(%rsp), %rax vsubss (%rax), %xmm0, %xmm0 vmovss %xmm0, 0x9c(%rsp) movq 0xc8(%rsp), %rax vmovss (%rax), %xmm0 movq 0xc0(%rsp), %rax vaddss (%rax), %xmm0, %xmm0 vmovss %xmm0, 0x98(%rsp) movq 0xc8(%rsp), %rax vmovss (%rax), %xmm0 movq 0xc0(%rsp), %rax vsubss (%rax), %xmm0, %xmm0 vmovss %xmm0, 0x94(%rsp) movq 0xb8(%rsp), %rax vmovss (%rax), %xmm0 movq 0xb0(%rsp), %rax vaddss (%rax), %xmm0, %xmm0 vmovss %xmm0, 0x90(%rsp) movq 0xb8(%rsp), %rax vmovss (%rax), %xmm0 movq 0xb0(%rsp), %rax vsubss (%rax), %xmm0, %xmm0 vmovss %xmm0, 0x8c(%rsp) movq 0xe0(%rsp), %rax vmovss (%rax), %xmm0 vaddss 0xa0(%rsp), %xmm0, %xmm0 vaddss 0x98(%rsp), %xmm0, %xmm0 vmovss 0x1801079(%rip), %xmm1 # 0x1e04a98 vmulss 0x90(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movslq 0xa4(%rsp), %rax vmovss %xmm0, 0xf0(%rsp,%rax,4) vmovss 0xa0(%rsp), %xmm0 vmovss 0x180104e(%rip), %xmm1 # 0x1e04a9c vmulss 0x98(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 vmovss 0x180103d(%rip), %xmm1 # 0x1e04aa0 vmulss 0x90(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movslq 0xa4(%rsp), %rax vmovss %xmm0, 0x130(%rsp,%rax,4) vmovss 0xa0(%rsp), %xmm0 vmovss 0x1801012(%rip), %xmm1 # 0x1e04aa4 vmulss 0x98(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 vaddss 0x90(%rsp), %xmm0, %xmm0 vaddss 0x90(%rsp), %xmm0, %xmm0 movslq 0xa4(%rsp), %rax vmovss %xmm0, 0x170(%rsp,%rax,4) vmovss 0x9c(%rsp), %xmm0 vaddss 0x94(%rsp), %xmm0, %xmm0 vaddss 0x94(%rsp), %xmm0, %xmm0 vmovss 0x1800fbf(%rip), %xmm1 # 0x1e04aa4 vmulss 0x8c(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movslq 0xa4(%rsp), %rax vmovss %xmm0, 0x110(%rsp,%rax,4) vmovss 0x9c(%rsp), %xmm0 vmovss 0x1800f8c(%rip), %xmm1 # 0x1e04aa0 vmulss 0x94(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 vmovss 0x1800f73(%rip), %xmm1 # 0x1e04a9c vmulss 0x8c(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movslq 0xa4(%rsp), %rax vmovss %xmm0, 0x150(%rsp,%rax,4) movq 0xa8(%rsp), %rax vmovss (%rax), %xmm0 vaddss 0x9c(%rsp), %xmm0, %xmm0 vmovss 0x1800f34(%rip), %xmm1 # 0x1e04a98 vmulss 0x94(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 vaddss 0x8c(%rsp), %xmm0, %xmm0 movslq 0xa4(%rsp), %rax vmovss %xmm0, 0x190(%rsp,%rax,4) movl 0x260(%rsp), %ecx shll $0x3, %ecx movq 0xe0(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xe0(%rsp) movl 0x260(%rsp), %ecx shll $0x3, %ecx movq 0xd8(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xd8(%rsp) movl 0x260(%rsp), %ecx shll $0x3, %ecx movq 0xd0(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xd0(%rsp) movl 0x260(%rsp), %ecx shll $0x3, %ecx movq 0xc8(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xc8(%rsp) movl 0x260(%rsp), %ecx shll $0x3, %ecx movq 0xc0(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xc0(%rsp) movl 0x260(%rsp), %ecx shll $0x3, %ecx movq 0xb8(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xb8(%rsp) movl 0x260(%rsp), %ecx shll $0x3, %ecx movq 0xb0(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xb0(%rsp) movl 0x260(%rsp), %ecx shll $0x3, %ecx movq 0xa8(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xa8(%rsp) movl 0xa4(%rsp), %eax addl $0x1, %eax movl %eax, 0xa4(%rsp) jmp 0x603925 imull $0x6, 0xec(%rsp), %eax leaq 0x1c0(%rsp), %rcx movq %rcx, 0x3a0(%rsp) movl %eax, 0x39c(%rsp) movq 0x3a0(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x39c(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax imull $0x6, 0xe8(%rsp), %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x80(%rsp) movl $0x0, 0x7c(%rsp) cmpl $0x6, 0x7c(%rsp) jge 0x603f84 movslq 0x7c(%rsp), %rcx leaq 0xf0(%rsp), %rax shlq $0x5, %rcx addq %rcx, %rax movq %rax, 0x70(%rsp) movq 0x70(%rsp), %rax vmovss 0x4(%rax), %xmm0 movq 0x70(%rsp), %rax vaddss 0x8(%rax), %xmm0, %xmm0 vmovss %xmm0, 0x6c(%rsp) movq 0x70(%rsp), %rax vmovss 0x4(%rax), %xmm0 movq 0x70(%rsp), %rax vsubss 0x8(%rax), %xmm0, %xmm0 vmovss %xmm0, 0x68(%rsp) movq 0x70(%rsp), %rax vmovss 0xc(%rax), %xmm0 movq 0x70(%rsp), %rax vaddss 0x10(%rax), %xmm0, %xmm0 vmovss %xmm0, 0x64(%rsp) movq 0x70(%rsp), %rax vmovss 0xc(%rax), %xmm0 movq 0x70(%rsp), %rax vsubss 0x10(%rax), %xmm0, %xmm0 vmovss %xmm0, 0x60(%rsp) movq 0x70(%rsp), %rax vmovss 0x14(%rax), %xmm0 movq 0x70(%rsp), %rax vaddss 0x18(%rax), %xmm0, %xmm0 vmovss %xmm0, 0x5c(%rsp) movq 0x70(%rsp), %rax vmovss 0x14(%rax), %xmm0 movq 0x70(%rsp), %rax vsubss 0x18(%rax), %xmm0, %xmm0 vmovss %xmm0, 0x58(%rsp) vmovss 0x1b0(%rsp), %xmm0 movq 0x70(%rsp), %rax vaddss (%rax), %xmm0, %xmm0 vaddss 0x6c(%rsp), %xmm0, %xmm0 vaddss 0x64(%rsp), %xmm0, %xmm0 vmovss 0x1800c8d(%rip), %xmm1 # 0x1e04a98 vmulss 0x5c(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x80(%rsp), %rax vmovss %xmm0, (%rax) vmovss 0x1b0(%rsp), %xmm0 vaddss 0x6c(%rsp), %xmm0, %xmm0 vmovss 0x1800c64(%rip), %xmm1 # 0x1e04a9c vmulss 0x64(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 vmovss 0x1800c56(%rip), %xmm1 # 0x1e04aa0 vmulss 0x5c(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x80(%rsp), %rax vmovss %xmm0, 0x8(%rax) vmovss 0x1b0(%rsp), %xmm0 vaddss 0x6c(%rsp), %xmm0, %xmm0 vmovss 0x1800c2c(%rip), %xmm1 # 0x1e04aa4 vmulss 0x64(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 vaddss 0x5c(%rsp), %xmm0, %xmm0 vaddss 0x5c(%rsp), %xmm0, %xmm0 movq 0x80(%rsp), %rax vmovss %xmm0, 0x10(%rax) vmovss 0x1b0(%rsp), %xmm0 vaddss 0x68(%rsp), %xmm0, %xmm0 vaddss 0x60(%rsp), %xmm0, %xmm0 vaddss 0x60(%rsp), %xmm0, %xmm0 vmovss 0x1800be6(%rip), %xmm1 # 0x1e04aa4 vmulss 0x58(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x80(%rsp), %rax vmovss %xmm0, 0x4(%rax) vmovss 0x1b0(%rsp), %xmm0 vaddss 0x68(%rsp), %xmm0, %xmm0 vmovss 0x1800bb4(%rip), %xmm1 # 0x1e04aa0 vmulss 0x60(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 vmovss 0x1800b9e(%rip), %xmm1 # 0x1e04a9c vmulss 0x58(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x80(%rsp), %rax vmovss %xmm0, 0xc(%rax) vmovss 0x1b0(%rsp), %xmm0 movq 0x70(%rsp), %rax vaddss 0x1c(%rax), %xmm0, %xmm0 vaddss 0x68(%rsp), %xmm0, %xmm0 vmovss 0x1800b62(%rip), %xmm1 # 0x1e04a98 vmulss 0x60(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 vaddss 0x58(%rsp), %xmm0, %xmm0 movq 0x80(%rsp), %rax vmovss %xmm0, 0x14(%rax) movl 0x274(%rsp), %ecx movq 0x80(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x80(%rsp) movl 0x7c(%rsp), %eax addl $0x1, %eax movl %eax, 0x7c(%rsp) jmp 0x603d25 jmp 0x603f86 movl 0xe8(%rsp), %eax addl $0x1, %eax movl %eax, 0xe8(%rsp) jmp 0x6037c3 jmp 0x603f9e movl 0xec(%rsp), %eax addl $0x1, %eax movl %eax, 0xec(%rsp) jmp 0x6037a4 leaq 0x1c0(%rsp), %rax movq %rax, 0x298(%rsp) movq 0x298(%rsp), %rax movq %rax, 0x2f0(%rsp) movq 0x2f0(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x60406d movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2ec(%rsp) # imm = 0xFFFFFFFF movl 0x2ec(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2e8(%rsp) cmpl $0x1, 0x2e8(%rsp) jne 0x60406d movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x604041 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x60403f jmp 0x60406b movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x378(%rsp) cmpq $0x0, 0x378(%rsp) je 0x604069 movq 0x378(%rsp), %rdi callq 0x5e480 jmp 0x60406b jmp 0x60406d movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x6040c5 movq %rax, %rdi callq 0x5fc90 leaq 0x208(%rsp), %rax movq %rax, 0x2a8(%rsp) movq 0x2a8(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x60417e movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2cc(%rsp) # imm = 0xFFFFFFFF movl 0x2cc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2c8(%rsp) cmpl $0x1, 0x2c8(%rsp) jne 0x60417e movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x604152 movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x604150 jmp 0x60417c movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x388(%rsp) cmpq $0x0, 0x388(%rsp) je 0x60417a movq 0x388(%rsp), %rdi callq 0x5e480 jmp 0x60417c jmp 0x60417e movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x6041d6 movq %rax, %rdi callq 0x5fc90 jmp 0x6041d8 movl 0x254(%rsp), %eax addl $0x1, %eax movl %eax, 0x254(%rsp) jmp 0x60318d addq $0x438, %rsp # imm = 0x438 retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/convolution_winograd_transform.h
ncnn::conv3x3s1_winograd63_transform_input_pack4_sse(ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static void conv3x3s1_winograd63_transform_input_pack4_sse(const Mat& bottom_blob, Mat& bottom_blob_tm, const Option& opt) { const int w = bottom_blob.w; const int h = bottom_blob.h; const int inch = bottom_blob.c; const int w_tiles = (w - 2) / 6; const int h_tiles = (h - 2) / 6; const int tiles = w_tiles * h_tiles; // const float itm[8][8] = { // {1.0f, 0.0f, -5.25f, 0.00f, 5.25f, 0.00f, -1.0f, 0.0f}, // // {0.0f, 1.0f, 1.00f, -4.25f, -4.25f, 1.00f, 1.0f, 0.0f}, // {0.0f, -1.0f, 1.00f, 4.25f, -4.25f, -1.00f, 1.0f, 0.0f}, // // {0.0f, 0.5f, 0.25f, -2.50f, -1.25f, 2.00f, 1.0f, 0.0f}, // {0.0f, -0.5f, 0.25f, 2.50f, -1.25f, -2.00f, 1.0f, 0.0f}, // // {0.0f, 2.0f, 4.00f, -2.50f, -5.00f, 0.50f, 1.0f, 0.0f}, // {0.0f, -2.0f, 4.00f, 2.50f, -5.00f, -0.50f, 1.0f, 0.0f}, // // {0.0f, -1.0f, 0.00f, 5.25f, 0.00f, -5.25f, 0.0f, 1.0f} // }; // 0 = r00 - r06 + (r04 - r02) * 5.25 // 7 = r07 - r01 + (r03 - r05) * 5.25 // 1 = (r02 + r06 - r04 * 4.25) + (r01 - r03 * 4.25 + r05) // 2 = (r02 + r06 - r04 * 4.25) - (r01 - r03 * 4.25 + r05) // 3 = (r06 + r02 * 0.25 - r04 * 1.25) + (r01 * 0.5 - r03 * 2.5 + r05 * 2) // 4 = (r06 + r02 * 0.25 - r04 * 1.25) - (r01 * 0.5 - r03 * 2.5 + r05 * 2) // reuse r04 * 1.25 // reuse r03 * 2.5 // 5 = (r06 + (r02 - r04 * 1.25) * 4) + (r01 * 2 - r03 * 2.5 + r05 * 0.5) // 6 = (r06 + (r02 - r04 * 1.25) * 4) - (r01 * 2 - r03 * 2.5 + r05 * 0.5) #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < inch; q++) { const Mat img0 = bottom_blob.channel(q); Mat img0_tm = bottom_blob_tm.channel(q); #ifdef _MSC_VER __declspec(align(16)) #else __attribute__((aligned(16))) #endif float tmp[8][8][4]; __m128 _v5_25 = _mm_set1_ps(5.25f); __m128 _vm4_25 = _mm_set1_ps(-4.25f); __m128 _vm1_25 = _mm_set1_ps(-1.25f); __m128 _v0_25 = _mm_set1_ps(0.25f); __m128 _vm2_5 = _mm_set1_ps(-2.5f); __m128 _v0_5 = _mm_set1_ps(0.5f); __m128 _v2 = _mm_set1_ps(2.f); __m128 _v4 = _mm_set1_ps(4.f); // tile for (int i = 0; i < h_tiles; i++) { for (int j = 0; j < w_tiles; j++) { const float* r0 = img0.row(i * 6) + (j * 6) * 4; for (int m = 0; m < 8; m++) { __m128 _r00 = _mm_load_ps(r0); __m128 _r01 = _mm_load_ps(r0 + 4); __m128 _r02 = _mm_load_ps(r0 + 4 * 2); __m128 _r03 = _mm_load_ps(r0 + 4 * 3); __m128 _r04 = _mm_load_ps(r0 + 4 * 4); __m128 _r05 = _mm_load_ps(r0 + 4 * 5); __m128 _r06 = _mm_load_ps(r0 + 4 * 6); __m128 _r07 = _mm_load_ps(r0 + 4 * 7); __m128 _tmp0m = _mm_comp_fmadd_ps(_v5_25, _mm_sub_ps(_r04, _r02), _mm_sub_ps(_r00, _r06)); __m128 _tmp7m = _mm_comp_fmadd_ps(_v5_25, _mm_sub_ps(_r03, _r05), _mm_sub_ps(_r07, _r01)); _mm_store_ps(tmp[0][m], _tmp0m); _mm_store_ps(tmp[7][m], _tmp7m); __m128 _tmp12a = _mm_comp_fmadd_ps(_vm4_25, _r04, _mm_add_ps(_r02, _r06)); __m128 _tmp12b = _mm_comp_fmadd_ps(_vm4_25, _r03, _mm_add_ps(_r01, _r05)); __m128 _tmp1m = _mm_add_ps(_tmp12a, _tmp12b); __m128 _tmp2m = _mm_sub_ps(_tmp12a, _tmp12b); _mm_store_ps(tmp[1][m], _tmp1m); _mm_store_ps(tmp[2][m], _tmp2m); __m128 _tmp34a = _mm_comp_fmadd_ps(_vm1_25, _r04, _mm_comp_fmadd_ps(_v0_25, _r02, _r06)); __m128 _tmp34b = _mm_comp_fmadd_ps(_v2, _r05, _mm_comp_fmadd_ps(_vm2_5, _r03, _mm_mul_ps(_r01, _v0_5))); __m128 _tmp3m = _mm_add_ps(_tmp34a, _tmp34b); __m128 _tmp4m = _mm_sub_ps(_tmp34a, _tmp34b); _mm_store_ps(tmp[3][m], _tmp3m); _mm_store_ps(tmp[4][m], _tmp4m); __m128 _tmp56a = _mm_comp_fmadd_ps(_v4, _mm_comp_fmadd_ps(_vm1_25, _r04, _r02), _r06); __m128 _tmp56b = _mm_comp_fmadd_ps(_v0_5, _r05, _mm_comp_fmadd_ps(_vm2_5, _r03, _mm_mul_ps(_r01, _v2))); __m128 _tmp5m = _mm_add_ps(_tmp56a, _tmp56b); __m128 _tmp6m = _mm_sub_ps(_tmp56a, _tmp56b); _mm_store_ps(tmp[5][m], _tmp5m); _mm_store_ps(tmp[6][m], _tmp6m); r0 += w * 4; } float* r0_tm_0 = (float*)img0_tm + (i * w_tiles + j) * 4; float* r0_tm_1 = r0_tm_0 + tiles * 4; float* r0_tm_2 = r0_tm_0 + tiles * 4 * 2; float* r0_tm_3 = r0_tm_0 + tiles * 4 * 3; float* r0_tm_4 = r0_tm_0 + tiles * 4 * 4; float* r0_tm_5 = r0_tm_0 + tiles * 4 * 5; float* r0_tm_6 = r0_tm_0 + tiles * 4 * 6; float* r0_tm_7 = r0_tm_0 + tiles * 4 * 7; for (int m = 0; m < 8; m++) { __m128 _tmp00 = _mm_load_ps(tmp[m][0]); __m128 _tmp01 = _mm_load_ps(tmp[m][1]); __m128 _tmp02 = _mm_load_ps(tmp[m][2]); __m128 _tmp03 = _mm_load_ps(tmp[m][3]); __m128 _tmp04 = _mm_load_ps(tmp[m][4]); __m128 _tmp05 = _mm_load_ps(tmp[m][5]); __m128 _tmp06 = _mm_load_ps(tmp[m][6]); __m128 _tmp07 = _mm_load_ps(tmp[m][7]); __m128 _r0tm0 = _mm_comp_fmadd_ps(_v5_25, _mm_sub_ps(_tmp04, _tmp02), _mm_sub_ps(_tmp00, _tmp06)); __m128 _r0tm7 = _mm_comp_fmadd_ps(_v5_25, _mm_sub_ps(_tmp03, _tmp05), _mm_sub_ps(_tmp07, _tmp01)); __m128 _tmp12a = _mm_comp_fmadd_ps(_vm4_25, _tmp04, _mm_add_ps(_tmp02, _tmp06)); __m128 _tmp12b = _mm_comp_fmadd_ps(_vm4_25, _tmp03, _mm_add_ps(_tmp01, _tmp05)); __m128 _r0tm1 = _mm_add_ps(_tmp12a, _tmp12b); __m128 _r0tm2 = _mm_sub_ps(_tmp12a, _tmp12b); __m128 _tmp34a = _mm_comp_fmadd_ps(_vm1_25, _tmp04, _mm_comp_fmadd_ps(_v0_25, _tmp02, _tmp06)); __m128 _tmp34b = _mm_comp_fmadd_ps(_v2, _tmp05, _mm_comp_fmadd_ps(_vm2_5, _tmp03, _mm_mul_ps(_tmp01, _v0_5))); __m128 _r0tm3 = _mm_add_ps(_tmp34a, _tmp34b); __m128 _r0tm4 = _mm_sub_ps(_tmp34a, _tmp34b); __m128 _tmp56a = _mm_comp_fmadd_ps(_v4, _mm_comp_fmadd_ps(_vm1_25, _tmp04, _tmp02), _tmp06); __m128 _tmp56b = _mm_comp_fmadd_ps(_v0_5, _tmp05, _mm_comp_fmadd_ps(_vm2_5, _tmp03, _mm_mul_ps(_tmp01, _v2))); __m128 _r0tm5 = _mm_add_ps(_tmp56a, _tmp56b); __m128 _r0tm6 = _mm_sub_ps(_tmp56a, _tmp56b); _mm_store_ps(r0_tm_0, _r0tm0); _mm_store_ps(r0_tm_1, _r0tm1); _mm_store_ps(r0_tm_2, _r0tm2); _mm_store_ps(r0_tm_3, _r0tm3); _mm_store_ps(r0_tm_4, _r0tm4); _mm_store_ps(r0_tm_5, _r0tm5); _mm_store_ps(r0_tm_6, _r0tm6); _mm_store_ps(r0_tm_7, _r0tm7); r0_tm_0 += tiles * 4 * 8; r0_tm_1 += tiles * 4 * 8; r0_tm_2 += tiles * 4 * 8; r0_tm_3 += tiles * 4 * 8; r0_tm_4 += tiles * 4 * 8; r0_tm_5 += tiles * 4 * 8; r0_tm_6 += tiles * 4 * 8; r0_tm_7 += tiles * 4 * 8; } } } } }
subq $0x1ce8, %rsp # imm = 0x1CE8 movq %rdi, 0xb40(%rsp) movq %rsi, 0xb38(%rsp) movq %rdx, 0xb30(%rsp) movq 0xb40(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0xb2c(%rsp) movq 0xb40(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0xb28(%rsp) movq 0xb40(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0xb24(%rsp) movl 0xb2c(%rsp), %eax subl $0x2, %eax movl $0x6, %ecx cltd idivl %ecx movl %eax, 0xb20(%rsp) movl 0xb28(%rsp), %eax subl $0x2, %eax movl $0x6, %ecx cltd idivl %ecx movl %eax, 0xb1c(%rsp) movl 0xb20(%rsp), %eax imull 0xb1c(%rsp), %eax movl %eax, 0xb18(%rsp) movl $0x0, 0xb14(%rsp) movl 0xb14(%rsp), %eax cmpl 0xb24(%rsp), %eax jge 0x61b43a movq 0xb40(%rsp), %rcx movl 0xb14(%rsp), %eax leaq 0xac8(%rsp), %rdx movq %rdx, 0xc00(%rsp) movq %rcx, 0xbf8(%rsp) movl %eax, 0xbf4(%rsp) movq 0xbf8(%rsp), %rax movq %rax, 0x2b8(%rsp) movb $0x0, 0xbf3(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xbf4(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xac8(%rsp), %r10 movq %r10, 0x1c88(%rsp) movl %r9d, 0x1c84(%rsp) movl %r8d, 0x1c80(%rsp) movl %edi, 0x1c7c(%rsp) movq %rsi, 0x1c70(%rsp) movq %rdx, 0x1c68(%rsp) movl %ecx, 0x1c64(%rsp) movq %rax, 0x1c58(%rsp) movq 0x1c88(%rsp), %rcx movq %rcx, 0x2b0(%rsp) movq 0x1c70(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x1c68(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x1c64(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x1c58(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x1c84(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x1c80(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x1c7c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x1ce0(%rsp) movl $0x10, 0x1cdc(%rsp) movq 0x1ce0(%rsp), %rax movslq 0x1cdc(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x1cdc(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x2b8(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xaf0(%rsp) cmpl $0x4, 0x28(%rax) jne 0x618b98 movq 0x2b8(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xb08(%rsp) movb $0x1, 0xbf3(%rsp) testb $0x1, 0xbf3(%rsp) jne 0x618cd1 leaq 0xac8(%rsp), %rax movq %rax, 0xc08(%rsp) movq 0xc08(%rsp), %rax movq %rax, 0xc18(%rsp) movq 0xc18(%rsp), %rax movq %rax, 0x2a8(%rsp) cmpq $0x0, 0x8(%rax) je 0x618c76 movq 0x2a8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xc14(%rsp) # imm = 0xFFFFFFFF movl 0xc14(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xc10(%rsp) cmpl $0x1, 0xc10(%rsp) jne 0x618c76 movq 0x2a8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x618c47 movq 0x2a8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x618c45 jmp 0x618c74 movq 0x2a8(%rsp), %rax movq (%rax), %rax movq %rax, 0xc20(%rsp) cmpq $0x0, 0xc20(%rsp) je 0x618c72 movq 0xc20(%rsp), %rdi callq 0x5e480 jmp 0x618c74 jmp 0x618c76 movq 0x2a8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x618cd1 movq %rax, %rdi callq 0x5fc90 movq 0xb38(%rsp), %rcx movl 0xb14(%rsp), %eax leaq 0xa80(%rsp), %rdx movq %rdx, 0xbb8(%rsp) movq %rcx, 0xbb0(%rsp) movl %eax, 0xbac(%rsp) movq 0xbb0(%rsp), %rax movq %rax, 0x298(%rsp) movb $0x0, 0xbab(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xbac(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xa80(%rsp), %r10 movq %r10, 0x1cc0(%rsp) movl %r9d, 0x1cbc(%rsp) movl %r8d, 0x1cb8(%rsp) movl %edi, 0x1cb4(%rsp) movq %rsi, 0x1ca8(%rsp) movq %rdx, 0x1ca0(%rsp) movl %ecx, 0x1c9c(%rsp) movq %rax, 0x1c90(%rsp) movq 0x1cc0(%rsp), %rcx movq %rcx, 0x2a0(%rsp) movq 0x1ca8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x1ca0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x1c9c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x1c90(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x1cbc(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x1cb8(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x1cb4(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x1cd0(%rsp) movl $0x10, 0x1ccc(%rsp) movq 0x1cd0(%rsp), %rax movslq 0x1ccc(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x1ccc(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x2a0(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x298(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xaa8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x618e95 movq 0x298(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xac0(%rsp) movb $0x1, 0xbab(%rsp) testb $0x1, 0xbab(%rsp) jne 0x618fce leaq 0xa80(%rsp), %rax movq %rax, 0xbc0(%rsp) movq 0xbc0(%rsp), %rax movq %rax, 0xbd0(%rsp) movq 0xbd0(%rsp), %rax movq %rax, 0x290(%rsp) cmpq $0x0, 0x8(%rax) je 0x618f73 movq 0x290(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xbcc(%rsp) # imm = 0xFFFFFFFF movl 0xbcc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xbc8(%rsp) cmpl $0x1, 0xbc8(%rsp) jne 0x618f73 movq 0x290(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x618f44 movq 0x290(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x618f42 jmp 0x618f71 movq 0x290(%rsp), %rax movq (%rax), %rax movq %rax, 0xc28(%rsp) cmpq $0x0, 0xc28(%rsp) je 0x618f6f movq 0xc28(%rsp), %rdi callq 0x5e480 jmp 0x618f71 jmp 0x618f73 movq 0x290(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x618fce movq %rax, %rdi callq 0x5fc90 jmp 0x618fd0 movl $0x40a80000, 0x1a0c(%rsp) # imm = 0x40A80000 vbroadcastss 0x1a0c(%rsp), %xmm0 vmovaps %xmm0, 0x19f0(%rsp) vmovaps 0x19f0(%rsp), %xmm0 vmovaps %xmm0, 0x660(%rsp) movl $0xc0880000, 0x19ec(%rsp) # imm = 0xC0880000 vbroadcastss 0x19ec(%rsp), %xmm0 vmovaps %xmm0, 0x19d0(%rsp) vmovaps 0x19d0(%rsp), %xmm0 vmovaps %xmm0, 0x650(%rsp) movl $0xbfa00000, 0x19cc(%rsp) # imm = 0xBFA00000 vbroadcastss 0x19cc(%rsp), %xmm0 vmovaps %xmm0, 0x19b0(%rsp) vmovaps 0x19b0(%rsp), %xmm0 vmovaps %xmm0, 0x640(%rsp) movl $0x3e800000, 0x19ac(%rsp) # imm = 0x3E800000 vbroadcastss 0x19ac(%rsp), %xmm0 vmovaps %xmm0, 0x1990(%rsp) vmovaps 0x1990(%rsp), %xmm0 vmovaps %xmm0, 0x630(%rsp) movl $0xc0200000, 0x198c(%rsp) # imm = 0xC0200000 vbroadcastss 0x198c(%rsp), %xmm0 vmovaps %xmm0, 0x1970(%rsp) vmovaps 0x1970(%rsp), %xmm0 vmovaps %xmm0, 0x620(%rsp) movl $0x3f000000, 0x196c(%rsp) # imm = 0x3F000000 vbroadcastss 0x196c(%rsp), %xmm0 vmovaps %xmm0, 0x1950(%rsp) vmovaps 0x1950(%rsp), %xmm0 vmovaps %xmm0, 0x610(%rsp) movl $0x40000000, 0x194c(%rsp) # imm = 0x40000000 vbroadcastss 0x194c(%rsp), %xmm0 vmovaps %xmm0, 0x1930(%rsp) vmovaps 0x1930(%rsp), %xmm0 vmovaps %xmm0, 0x600(%rsp) movl $0x40800000, 0x192c(%rsp) # imm = 0x40800000 vbroadcastss 0x192c(%rsp), %xmm0 vmovaps %xmm0, 0x1910(%rsp) vmovaps 0x1910(%rsp), %xmm0 vmovaps %xmm0, 0x5f0(%rsp) movl $0x0, 0x5ec(%rsp) movl 0x5ec(%rsp), %eax cmpl 0xb1c(%rsp), %eax jge 0x61b20a movl $0x0, 0x5e8(%rsp) movl 0x5e8(%rsp), %eax cmpl 0xb20(%rsp), %eax jge 0x61b1f2 imull $0x6, 0x5ec(%rsp), %eax leaq 0xac8(%rsp), %rcx movq %rcx, 0xbe8(%rsp) movl %eax, 0xbe4(%rsp) movq 0xbe8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xbe4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x288(%rsp) movq 0x288(%rsp), %rax imull $0x6, 0x5e8(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x5e0(%rsp) movl $0x0, 0x5dc(%rsp) cmpl $0x8, 0x5dc(%rsp) jge 0x61a0da movq 0x5e0(%rsp), %rax movq %rax, 0xec8(%rsp) movq 0xec8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x5c0(%rsp) movq 0x5e0(%rsp), %rax addq $0x10, %rax movq %rax, 0xec0(%rsp) movq 0xec0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x5b0(%rsp) movq 0x5e0(%rsp), %rax addq $0x20, %rax movq %rax, 0xeb8(%rsp) movq 0xeb8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x5a0(%rsp) movq 0x5e0(%rsp), %rax addq $0x30, %rax movq %rax, 0xeb0(%rsp) movq 0xeb0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x590(%rsp) movq 0x5e0(%rsp), %rax addq $0x40, %rax movq %rax, 0xea8(%rsp) movq 0xea8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x580(%rsp) movq 0x5e0(%rsp), %rax addq $0x50, %rax movq %rax, 0xea0(%rsp) movq 0xea0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x570(%rsp) movq 0x5e0(%rsp), %rax addq $0x60, %rax movq %rax, 0xe98(%rsp) movq 0xe98(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x560(%rsp) movq 0x5e0(%rsp), %rax addq $0x70, %rax movq %rax, 0xe90(%rsp) movq 0xe90(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x550(%rsp) vmovaps 0x660(%rsp), %xmm2 vmovaps 0x580(%rsp), %xmm1 vmovaps 0x5a0(%rsp), %xmm0 vmovaps %xmm1, 0x1c40(%rsp) vmovaps %xmm0, 0x1c30(%rsp) vmovaps 0x1c40(%rsp), %xmm0 vsubps 0x1c30(%rsp), %xmm0, %xmm1 vmovaps 0x5c0(%rsp), %xmm3 vmovaps 0x560(%rsp), %xmm0 vmovaps %xmm3, 0x1c20(%rsp) vmovaps %xmm0, 0x1c10(%rsp) vmovaps 0x1c20(%rsp), %xmm0 vsubps 0x1c10(%rsp), %xmm0, %xmm0 vmovaps %xmm2, 0x1480(%rsp) vmovaps %xmm1, 0x1470(%rsp) vmovaps %xmm0, 0x1460(%rsp) vmovaps 0x1480(%rsp), %xmm2 vmovaps 0x1470(%rsp), %xmm1 vmovaps 0x1460(%rsp), %xmm0 vmovaps %xmm2, 0x14b0(%rsp) vmovaps %xmm1, 0x14a0(%rsp) vmovaps %xmm0, 0x1490(%rsp) vmovaps 0x14b0(%rsp), %xmm1 vmovaps 0x14a0(%rsp), %xmm0 vmovaps 0x1490(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x270(%rsp) vmovaps 0x270(%rsp), %xmm0 vmovaps %xmm0, 0x540(%rsp) vmovaps 0x660(%rsp), %xmm2 vmovaps 0x590(%rsp), %xmm1 vmovaps 0x570(%rsp), %xmm0 vmovaps %xmm1, 0x1c00(%rsp) vmovaps %xmm0, 0x1bf0(%rsp) vmovaps 0x1c00(%rsp), %xmm0 vsubps 0x1bf0(%rsp), %xmm0, %xmm1 vmovaps 0x550(%rsp), %xmm3 vmovaps 0x5b0(%rsp), %xmm0 vmovaps %xmm3, 0x1be0(%rsp) vmovaps %xmm0, 0x1bd0(%rsp) vmovaps 0x1be0(%rsp), %xmm0 vsubps 0x1bd0(%rsp), %xmm0, %xmm0 vmovaps %xmm2, 0x1450(%rsp) vmovaps %xmm1, 0x1440(%rsp) vmovaps %xmm0, 0x1430(%rsp) vmovaps 0x1450(%rsp), %xmm2 vmovaps 0x1440(%rsp), %xmm1 vmovaps 0x1430(%rsp), %xmm0 vmovaps %xmm2, 0x14e0(%rsp) vmovaps %xmm1, 0x14d0(%rsp) vmovaps %xmm0, 0x14c0(%rsp) vmovaps 0x14e0(%rsp), %xmm1 vmovaps 0x14d0(%rsp), %xmm0 vmovaps 0x14c0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x260(%rsp) vmovaps 0x260(%rsp), %xmm0 vmovaps %xmm0, 0x530(%rsp) leaq 0x670(%rsp), %rax movslq 0x5dc(%rsp), %rcx shlq $0x4, %rcx addq %rcx, %rax vmovaps 0x540(%rsp), %xmm0 movq %rax, 0xe48(%rsp) vmovaps %xmm0, 0xe30(%rsp) vmovaps 0xe30(%rsp), %xmm0 movq 0xe48(%rsp), %rax vmovaps %xmm0, (%rax) leaq 0x670(%rsp), %rax addq $0x380, %rax # imm = 0x380 movslq 0x5dc(%rsp), %rcx shlq $0x4, %rcx addq %rcx, %rax vmovaps 0x530(%rsp), %xmm0 movq %rax, 0xe28(%rsp) vmovaps %xmm0, 0xe10(%rsp) vmovaps 0xe10(%rsp), %xmm0 movq 0xe28(%rsp), %rax vmovaps %xmm0, (%rax) vmovaps 0x650(%rsp), %xmm2 vmovaps 0x580(%rsp), %xmm1 vmovaps 0x5a0(%rsp), %xmm3 vmovaps 0x560(%rsp), %xmm0 vmovaps %xmm3, 0x1000(%rsp) vmovaps %xmm0, 0xff0(%rsp) vmovaps 0x1000(%rsp), %xmm0 vaddps 0xff0(%rsp), %xmm0, %xmm0 vmovaps %xmm2, 0x1420(%rsp) vmovaps %xmm1, 0x1410(%rsp) vmovaps %xmm0, 0x1400(%rsp) vmovaps 0x1420(%rsp), %xmm2 vmovaps 0x1410(%rsp), %xmm1 vmovaps 0x1400(%rsp), %xmm0 vmovaps %xmm2, 0x1510(%rsp) vmovaps %xmm1, 0x1500(%rsp) vmovaps %xmm0, 0x14f0(%rsp) vmovaps 0x1510(%rsp), %xmm1 vmovaps 0x1500(%rsp), %xmm0 vmovaps 0x14f0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x250(%rsp) vmovaps 0x250(%rsp), %xmm0 vmovaps %xmm0, 0x520(%rsp) vmovaps 0x650(%rsp), %xmm2 vmovaps 0x590(%rsp), %xmm1 vmovaps 0x5b0(%rsp), %xmm3 vmovaps 0x570(%rsp), %xmm0 vmovaps %xmm3, 0xfe0(%rsp) vmovaps %xmm0, 0xfd0(%rsp) vmovaps 0xfe0(%rsp), %xmm0 vaddps 0xfd0(%rsp), %xmm0, %xmm0 vmovaps %xmm2, 0x13f0(%rsp) vmovaps %xmm1, 0x13e0(%rsp) vmovaps %xmm0, 0x13d0(%rsp) vmovaps 0x13f0(%rsp), %xmm2 vmovaps 0x13e0(%rsp), %xmm1 vmovaps 0x13d0(%rsp), %xmm0 vmovaps %xmm2, 0x1540(%rsp) vmovaps %xmm1, 0x1530(%rsp) vmovaps %xmm0, 0x1520(%rsp) vmovaps 0x1540(%rsp), %xmm1 vmovaps 0x1530(%rsp), %xmm0 vmovaps 0x1520(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x240(%rsp) vmovaps 0x240(%rsp), %xmm0 vmovaps %xmm0, 0x510(%rsp) vmovaps 0x520(%rsp), %xmm1 vmovaps 0x510(%rsp), %xmm0 vmovaps %xmm1, 0xfc0(%rsp) vmovaps %xmm0, 0xfb0(%rsp) vmovaps 0xfc0(%rsp), %xmm0 vaddps 0xfb0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x500(%rsp) vmovaps 0x520(%rsp), %xmm1 vmovaps 0x510(%rsp), %xmm0 vmovaps %xmm1, 0x1bc0(%rsp) vmovaps %xmm0, 0x1bb0(%rsp) vmovaps 0x1bc0(%rsp), %xmm0 vsubps 0x1bb0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x4f0(%rsp) leaq 0x670(%rsp), %rax addq $0x80, %rax movslq 0x5dc(%rsp), %rcx shlq $0x4, %rcx addq %rcx, %rax vmovaps 0x500(%rsp), %xmm0 movq %rax, 0xe08(%rsp) vmovaps %xmm0, 0xdf0(%rsp) vmovaps 0xdf0(%rsp), %xmm0 movq 0xe08(%rsp), %rax vmovaps %xmm0, (%rax) leaq 0x670(%rsp), %rax addq $0x100, %rax # imm = 0x100 movslq 0x5dc(%rsp), %rcx shlq $0x4, %rcx addq %rcx, %rax vmovaps 0x4f0(%rsp), %xmm0 movq %rax, 0xde8(%rsp) vmovaps %xmm0, 0xdd0(%rsp) vmovaps 0xdd0(%rsp), %xmm0 movq 0xde8(%rsp), %rax vmovaps %xmm0, (%rax) vmovaps 0x640(%rsp), %xmm0 vmovaps %xmm0, 0x210(%rsp) vmovaps 0x580(%rsp), %xmm0 vmovaps %xmm0, 0x220(%rsp) vmovaps 0x630(%rsp), %xmm2 vmovaps 0x5a0(%rsp), %xmm1 vmovaps 0x560(%rsp), %xmm0 vmovaps %xmm2, 0x13c0(%rsp) vmovaps %xmm1, 0x13b0(%rsp) vmovaps %xmm0, 0x13a0(%rsp) vmovaps 0x13c0(%rsp), %xmm2 vmovaps 0x13b0(%rsp), %xmm1 vmovaps 0x13a0(%rsp), %xmm0 vmovaps %xmm2, 0x1570(%rsp) vmovaps %xmm1, 0x1560(%rsp) vmovaps %xmm0, 0x1550(%rsp) vmovaps 0x1570(%rsp), %xmm1 vmovaps 0x1560(%rsp), %xmm0 vmovaps 0x1550(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x230(%rsp) vmovaps 0x230(%rsp), %xmm0 vmovaps 0x220(%rsp), %xmm1 vmovaps 0x210(%rsp), %xmm2 vmovaps %xmm2, 0x1390(%rsp) vmovaps %xmm1, 0x1380(%rsp) vmovaps %xmm0, 0x1370(%rsp) vmovaps 0x1390(%rsp), %xmm2 vmovaps 0x1380(%rsp), %xmm1 vmovaps 0x1370(%rsp), %xmm0 vmovaps %xmm2, 0x15a0(%rsp) vmovaps %xmm1, 0x1590(%rsp) vmovaps %xmm0, 0x1580(%rsp) vmovaps 0x15a0(%rsp), %xmm1 vmovaps 0x1590(%rsp), %xmm0 vmovaps 0x1580(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x200(%rsp) vmovaps 0x200(%rsp), %xmm0 vmovaps %xmm0, 0x4e0(%rsp) vmovaps 0x600(%rsp), %xmm0 vmovaps %xmm0, 0x1d0(%rsp) vmovaps 0x570(%rsp), %xmm0 vmovaps %xmm0, 0x1e0(%rsp) vmovaps 0x620(%rsp), %xmm2 vmovaps 0x590(%rsp), %xmm1 vmovaps 0x5b0(%rsp), %xmm3 vmovaps 0x610(%rsp), %xmm0 vmovaps %xmm3, 0x1a80(%rsp) vmovaps %xmm0, 0x1a70(%rsp) vmovaps 0x1a80(%rsp), %xmm0 vmulps 0x1a70(%rsp), %xmm0, %xmm0 vmovaps %xmm2, 0x1360(%rsp) vmovaps %xmm1, 0x1350(%rsp) vmovaps %xmm0, 0x1340(%rsp) vmovaps 0x1360(%rsp), %xmm2 vmovaps 0x1350(%rsp), %xmm1 vmovaps 0x1340(%rsp), %xmm0 vmovaps %xmm2, 0x15d0(%rsp) vmovaps %xmm1, 0x15c0(%rsp) vmovaps %xmm0, 0x15b0(%rsp) vmovaps 0x15d0(%rsp), %xmm1 vmovaps 0x15c0(%rsp), %xmm0 vmovaps 0x15b0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1f0(%rsp) vmovaps 0x1f0(%rsp), %xmm0 vmovaps 0x1e0(%rsp), %xmm1 vmovaps 0x1d0(%rsp), %xmm2 vmovaps %xmm2, 0x1330(%rsp) vmovaps %xmm1, 0x1320(%rsp) vmovaps %xmm0, 0x1310(%rsp) vmovaps 0x1330(%rsp), %xmm2 vmovaps 0x1320(%rsp), %xmm1 vmovaps 0x1310(%rsp), %xmm0 vmovaps %xmm2, 0x1600(%rsp) vmovaps %xmm1, 0x15f0(%rsp) vmovaps %xmm0, 0x15e0(%rsp) vmovaps 0x1600(%rsp), %xmm1 vmovaps 0x15f0(%rsp), %xmm0 vmovaps 0x15e0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1c0(%rsp) vmovaps 0x1c0(%rsp), %xmm0 vmovaps %xmm0, 0x4d0(%rsp) vmovaps 0x4e0(%rsp), %xmm1 vmovaps 0x4d0(%rsp), %xmm0 vmovaps %xmm1, 0xfa0(%rsp) vmovaps %xmm0, 0xf90(%rsp) vmovaps 0xfa0(%rsp), %xmm0 vaddps 0xf90(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x4c0(%rsp) vmovaps 0x4e0(%rsp), %xmm1 vmovaps 0x4d0(%rsp), %xmm0 vmovaps %xmm1, 0x1ba0(%rsp) vmovaps %xmm0, 0x1b90(%rsp) vmovaps 0x1ba0(%rsp), %xmm0 vsubps 0x1b90(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x4b0(%rsp) leaq 0x670(%rsp), %rax addq $0x180, %rax # imm = 0x180 movslq 0x5dc(%rsp), %rcx shlq $0x4, %rcx addq %rcx, %rax vmovaps 0x4c0(%rsp), %xmm0 movq %rax, 0xdc8(%rsp) vmovaps %xmm0, 0xdb0(%rsp) vmovaps 0xdb0(%rsp), %xmm0 movq 0xdc8(%rsp), %rax vmovaps %xmm0, (%rax) leaq 0x670(%rsp), %rax addq $0x200, %rax # imm = 0x200 movslq 0x5dc(%rsp), %rcx shlq $0x4, %rcx addq %rcx, %rax vmovaps 0x4b0(%rsp), %xmm0 movq %rax, 0xda8(%rsp) vmovaps %xmm0, 0xd90(%rsp) vmovaps 0xd90(%rsp), %xmm0 movq 0xda8(%rsp), %rax vmovaps %xmm0, (%rax) vmovaps 0x5f0(%rsp), %xmm0 vmovaps %xmm0, 0x1a0(%rsp) vmovaps 0x640(%rsp), %xmm2 vmovaps 0x580(%rsp), %xmm1 vmovaps 0x5a0(%rsp), %xmm0 vmovaps %xmm2, 0x1300(%rsp) vmovaps %xmm1, 0x12f0(%rsp) vmovaps %xmm0, 0x12e0(%rsp) vmovaps 0x1300(%rsp), %xmm2 vmovaps 0x12f0(%rsp), %xmm1 vmovaps 0x12e0(%rsp), %xmm0 vmovaps %xmm2, 0x1630(%rsp) vmovaps %xmm1, 0x1620(%rsp) vmovaps %xmm0, 0x1610(%rsp) vmovaps 0x1630(%rsp), %xmm1 vmovaps 0x1620(%rsp), %xmm0 vmovaps 0x1610(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1b0(%rsp) vmovaps 0x1b0(%rsp), %xmm1 vmovaps 0x1a0(%rsp), %xmm2 vmovaps 0x560(%rsp), %xmm0 vmovaps %xmm2, 0x12d0(%rsp) vmovaps %xmm1, 0x12c0(%rsp) vmovaps %xmm0, 0x12b0(%rsp) vmovaps 0x12d0(%rsp), %xmm2 vmovaps 0x12c0(%rsp), %xmm1 vmovaps 0x12b0(%rsp), %xmm0 vmovaps %xmm2, 0x1660(%rsp) vmovaps %xmm1, 0x1650(%rsp) vmovaps %xmm0, 0x1640(%rsp) vmovaps 0x1660(%rsp), %xmm1 vmovaps 0x1650(%rsp), %xmm0 vmovaps 0x1640(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x190(%rsp) vmovaps 0x190(%rsp), %xmm0 vmovaps %xmm0, 0x4a0(%rsp) vmovaps 0x610(%rsp), %xmm0 vmovaps %xmm0, 0x160(%rsp) vmovaps 0x570(%rsp), %xmm0 vmovaps %xmm0, 0x170(%rsp) vmovaps 0x620(%rsp), %xmm2 vmovaps 0x590(%rsp), %xmm1 vmovaps 0x5b0(%rsp), %xmm3 vmovaps 0x600(%rsp), %xmm0 vmovaps %xmm3, 0x1a60(%rsp) vmovaps %xmm0, 0x1a50(%rsp) vmovaps 0x1a60(%rsp), %xmm0 vmulps 0x1a50(%rsp), %xmm0, %xmm0 vmovaps %xmm2, 0x12a0(%rsp) vmovaps %xmm1, 0x1290(%rsp) vmovaps %xmm0, 0x1280(%rsp) vmovaps 0x12a0(%rsp), %xmm2 vmovaps 0x1290(%rsp), %xmm1 vmovaps 0x1280(%rsp), %xmm0 vmovaps %xmm2, 0x1690(%rsp) vmovaps %xmm1, 0x1680(%rsp) vmovaps %xmm0, 0x1670(%rsp) vmovaps 0x1690(%rsp), %xmm1 vmovaps 0x1680(%rsp), %xmm0 vmovaps 0x1670(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x180(%rsp) vmovaps 0x180(%rsp), %xmm0 vmovaps 0x170(%rsp), %xmm1 vmovaps 0x160(%rsp), %xmm2 vmovaps %xmm2, 0x1270(%rsp) vmovaps %xmm1, 0x1260(%rsp) vmovaps %xmm0, 0x1250(%rsp) vmovaps 0x1270(%rsp), %xmm2 vmovaps 0x1260(%rsp), %xmm1 vmovaps 0x1250(%rsp), %xmm0 vmovaps %xmm2, 0x16c0(%rsp) vmovaps %xmm1, 0x16b0(%rsp) vmovaps %xmm0, 0x16a0(%rsp) vmovaps 0x16c0(%rsp), %xmm1 vmovaps 0x16b0(%rsp), %xmm0 vmovaps 0x16a0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x150(%rsp) vmovaps 0x150(%rsp), %xmm0 vmovaps %xmm0, 0x490(%rsp) vmovaps 0x4a0(%rsp), %xmm1 vmovaps 0x490(%rsp), %xmm0 vmovaps %xmm1, 0xf80(%rsp) vmovaps %xmm0, 0xf70(%rsp) vmovaps 0xf80(%rsp), %xmm0 vaddps 0xf70(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x480(%rsp) vmovaps 0x4a0(%rsp), %xmm1 vmovaps 0x490(%rsp), %xmm0 vmovaps %xmm1, 0x1b80(%rsp) vmovaps %xmm0, 0x1b70(%rsp) vmovaps 0x1b80(%rsp), %xmm0 vsubps 0x1b70(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x470(%rsp) leaq 0x670(%rsp), %rax addq $0x280, %rax # imm = 0x280 movslq 0x5dc(%rsp), %rcx shlq $0x4, %rcx addq %rcx, %rax vmovaps 0x480(%rsp), %xmm0 movq %rax, 0xd88(%rsp) vmovaps %xmm0, 0xd70(%rsp) vmovaps 0xd70(%rsp), %xmm0 movq 0xd88(%rsp), %rax vmovaps %xmm0, (%rax) leaq 0x670(%rsp), %rax addq $0x300, %rax # imm = 0x300 movslq 0x5dc(%rsp), %rcx shlq $0x4, %rcx addq %rcx, %rax vmovaps 0x470(%rsp), %xmm0 movq %rax, 0xd68(%rsp) vmovaps %xmm0, 0xd50(%rsp) vmovaps 0xd50(%rsp), %xmm0 movq 0xd68(%rsp), %rax vmovaps %xmm0, (%rax) movl 0xb2c(%rsp), %ecx shll $0x2, %ecx movq 0x5e0(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x5e0(%rsp) movl 0x5dc(%rsp), %eax addl $0x1, %eax movl %eax, 0x5dc(%rsp) jmp 0x619208 leaq 0xa80(%rsp), %rax movq %rax, 0xbd8(%rsp) movq 0xbd8(%rsp), %rax movq (%rax), %rax movq %rax, 0x148(%rsp) movq 0x148(%rsp), %rax movl 0x5ec(%rsp), %ecx imull 0xb20(%rsp), %ecx addl 0x5e8(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x468(%rsp) movq 0x468(%rsp), %rax movl 0xb18(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x460(%rsp) movq 0x468(%rsp), %rax movl 0xb18(%rsp), %ecx shll $0x2, %ecx shll %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x458(%rsp) movq 0x468(%rsp), %rax movl 0xb18(%rsp), %ecx shll $0x2, %ecx imull $0x3, %ecx, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x450(%rsp) movq 0x468(%rsp), %rax movl 0xb18(%rsp), %ecx shll $0x2, %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x448(%rsp) movq 0x468(%rsp), %rax movl 0xb18(%rsp), %ecx shll $0x2, %ecx imull $0x5, %ecx, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x440(%rsp) movq 0x468(%rsp), %rax movl 0xb18(%rsp), %ecx shll $0x2, %ecx imull $0x6, %ecx, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x438(%rsp) movq 0x468(%rsp), %rax movl 0xb18(%rsp), %ecx shll $0x2, %ecx imull $0x7, %ecx, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x430(%rsp) movl $0x0, 0x42c(%rsp) cmpl $0x8, 0x42c(%rsp) jge 0x61b1da movslq 0x42c(%rsp), %rcx leaq 0x670(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax movq %rax, 0xe88(%rsp) movq 0xe88(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x410(%rsp) movslq 0x42c(%rsp), %rcx leaq 0x670(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax addq $0x10, %rax movq %rax, 0xe80(%rsp) movq 0xe80(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x400(%rsp) movslq 0x42c(%rsp), %rcx leaq 0x670(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax addq $0x20, %rax movq %rax, 0xe78(%rsp) movq 0xe78(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x3f0(%rsp) movslq 0x42c(%rsp), %rcx leaq 0x670(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax addq $0x30, %rax movq %rax, 0xe70(%rsp) movq 0xe70(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x3e0(%rsp) movslq 0x42c(%rsp), %rcx leaq 0x670(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax addq $0x40, %rax movq %rax, 0xe68(%rsp) movq 0xe68(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x3d0(%rsp) movslq 0x42c(%rsp), %rcx leaq 0x670(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax addq $0x50, %rax movq %rax, 0xe60(%rsp) movq 0xe60(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x3c0(%rsp) movslq 0x42c(%rsp), %rcx leaq 0x670(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax addq $0x60, %rax movq %rax, 0xe58(%rsp) movq 0xe58(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x3b0(%rsp) movslq 0x42c(%rsp), %rcx leaq 0x670(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax addq $0x70, %rax movq %rax, 0xe50(%rsp) movq 0xe50(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x3a0(%rsp) vmovaps 0x660(%rsp), %xmm2 vmovaps 0x3d0(%rsp), %xmm1 vmovaps 0x3f0(%rsp), %xmm0 vmovaps %xmm1, 0x1b60(%rsp) vmovaps %xmm0, 0x1b50(%rsp) vmovaps 0x1b60(%rsp), %xmm0 vsubps 0x1b50(%rsp), %xmm0, %xmm1 vmovaps 0x410(%rsp), %xmm3 vmovaps 0x3b0(%rsp), %xmm0 vmovaps %xmm3, 0x1b40(%rsp) vmovaps %xmm0, 0x1b30(%rsp) vmovaps 0x1b40(%rsp), %xmm0 vsubps 0x1b30(%rsp), %xmm0, %xmm0 vmovaps %xmm2, 0x1240(%rsp) vmovaps %xmm1, 0x1230(%rsp) vmovaps %xmm0, 0x1220(%rsp) vmovaps 0x1240(%rsp), %xmm2 vmovaps 0x1230(%rsp), %xmm1 vmovaps 0x1220(%rsp), %xmm0 vmovaps %xmm2, 0x16f0(%rsp) vmovaps %xmm1, 0x16e0(%rsp) vmovaps %xmm0, 0x16d0(%rsp) vmovaps 0x16f0(%rsp), %xmm1 vmovaps 0x16e0(%rsp), %xmm0 vmovaps 0x16d0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x130(%rsp) vmovaps 0x130(%rsp), %xmm0 vmovaps %xmm0, 0x390(%rsp) vmovaps 0x660(%rsp), %xmm2 vmovaps 0x3e0(%rsp), %xmm1 vmovaps 0x3c0(%rsp), %xmm0 vmovaps %xmm1, 0x1b20(%rsp) vmovaps %xmm0, 0x1b10(%rsp) vmovaps 0x1b20(%rsp), %xmm0 vsubps 0x1b10(%rsp), %xmm0, %xmm1 vmovaps 0x3a0(%rsp), %xmm3 vmovaps 0x400(%rsp), %xmm0 vmovaps %xmm3, 0x1b00(%rsp) vmovaps %xmm0, 0x1af0(%rsp) vmovaps 0x1b00(%rsp), %xmm0 vsubps 0x1af0(%rsp), %xmm0, %xmm0 vmovaps %xmm2, 0x1210(%rsp) vmovaps %xmm1, 0x1200(%rsp) vmovaps %xmm0, 0x11f0(%rsp) vmovaps 0x1210(%rsp), %xmm2 vmovaps 0x1200(%rsp), %xmm1 vmovaps 0x11f0(%rsp), %xmm0 vmovaps %xmm2, 0x1720(%rsp) vmovaps %xmm1, 0x1710(%rsp) vmovaps %xmm0, 0x1700(%rsp) vmovaps 0x1720(%rsp), %xmm1 vmovaps 0x1710(%rsp), %xmm0 vmovaps 0x1700(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x120(%rsp) vmovaps 0x120(%rsp), %xmm0 vmovaps %xmm0, 0x380(%rsp) vmovaps 0x650(%rsp), %xmm2 vmovaps 0x3d0(%rsp), %xmm1 vmovaps 0x3f0(%rsp), %xmm3 vmovaps 0x3b0(%rsp), %xmm0 vmovaps %xmm3, 0xf60(%rsp) vmovaps %xmm0, 0xf50(%rsp) vmovaps 0xf60(%rsp), %xmm0 vaddps 0xf50(%rsp), %xmm0, %xmm0 vmovaps %xmm2, 0x11e0(%rsp) vmovaps %xmm1, 0x11d0(%rsp) vmovaps %xmm0, 0x11c0(%rsp) vmovaps 0x11e0(%rsp), %xmm2 vmovaps 0x11d0(%rsp), %xmm1 vmovaps 0x11c0(%rsp), %xmm0 vmovaps %xmm2, 0x1750(%rsp) vmovaps %xmm1, 0x1740(%rsp) vmovaps %xmm0, 0x1730(%rsp) vmovaps 0x1750(%rsp), %xmm1 vmovaps 0x1740(%rsp), %xmm0 vmovaps 0x1730(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x110(%rsp) vmovaps 0x110(%rsp), %xmm0 vmovaps %xmm0, 0x370(%rsp) vmovaps 0x650(%rsp), %xmm2 vmovaps 0x3e0(%rsp), %xmm1 vmovaps 0x400(%rsp), %xmm3 vmovaps 0x3c0(%rsp), %xmm0 vmovaps %xmm3, 0xf40(%rsp) vmovaps %xmm0, 0xf30(%rsp) vmovaps 0xf40(%rsp), %xmm0 vaddps 0xf30(%rsp), %xmm0, %xmm0 vmovaps %xmm2, 0x11b0(%rsp) vmovaps %xmm1, 0x11a0(%rsp) vmovaps %xmm0, 0x1190(%rsp) vmovaps 0x11b0(%rsp), %xmm2 vmovaps 0x11a0(%rsp), %xmm1 vmovaps 0x1190(%rsp), %xmm0 vmovaps %xmm2, 0x1780(%rsp) vmovaps %xmm1, 0x1770(%rsp) vmovaps %xmm0, 0x1760(%rsp) vmovaps 0x1780(%rsp), %xmm1 vmovaps 0x1770(%rsp), %xmm0 vmovaps 0x1760(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x100(%rsp) vmovaps 0x100(%rsp), %xmm0 vmovaps %xmm0, 0x360(%rsp) vmovaps 0x370(%rsp), %xmm1 vmovaps 0x360(%rsp), %xmm0 vmovaps %xmm1, 0xf20(%rsp) vmovaps %xmm0, 0xf10(%rsp) vmovaps 0xf20(%rsp), %xmm0 vaddps 0xf10(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x350(%rsp) vmovaps 0x370(%rsp), %xmm1 vmovaps 0x360(%rsp), %xmm0 vmovaps %xmm1, 0x1ae0(%rsp) vmovaps %xmm0, 0x1ad0(%rsp) vmovaps 0x1ae0(%rsp), %xmm0 vsubps 0x1ad0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x340(%rsp) vmovaps 0x640(%rsp), %xmm0 vmovaps %xmm0, 0xd0(%rsp) vmovaps 0x3d0(%rsp), %xmm0 vmovaps %xmm0, 0xe0(%rsp) vmovaps 0x630(%rsp), %xmm2 vmovaps 0x3f0(%rsp), %xmm1 vmovaps 0x3b0(%rsp), %xmm0 vmovaps %xmm2, 0x1180(%rsp) vmovaps %xmm1, 0x1170(%rsp) vmovaps %xmm0, 0x1160(%rsp) vmovaps 0x1180(%rsp), %xmm2 vmovaps 0x1170(%rsp), %xmm1 vmovaps 0x1160(%rsp), %xmm0 vmovaps %xmm2, 0x17b0(%rsp) vmovaps %xmm1, 0x17a0(%rsp) vmovaps %xmm0, 0x1790(%rsp) vmovaps 0x17b0(%rsp), %xmm1 vmovaps 0x17a0(%rsp), %xmm0 vmovaps 0x1790(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xf0(%rsp) vmovaps 0xf0(%rsp), %xmm0 vmovaps 0xe0(%rsp), %xmm1 vmovaps 0xd0(%rsp), %xmm2 vmovaps %xmm2, 0x1150(%rsp) vmovaps %xmm1, 0x1140(%rsp) vmovaps %xmm0, 0x1130(%rsp) vmovaps 0x1150(%rsp), %xmm2 vmovaps 0x1140(%rsp), %xmm1 vmovaps 0x1130(%rsp), %xmm0 vmovaps %xmm2, 0x17e0(%rsp) vmovaps %xmm1, 0x17d0(%rsp) vmovaps %xmm0, 0x17c0(%rsp) vmovaps 0x17e0(%rsp), %xmm1 vmovaps 0x17d0(%rsp), %xmm0 vmovaps 0x17c0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xc0(%rsp) vmovaps 0xc0(%rsp), %xmm0 vmovaps %xmm0, 0x330(%rsp) vmovaps 0x600(%rsp), %xmm0 vmovaps %xmm0, 0x90(%rsp) vmovaps 0x3c0(%rsp), %xmm0 vmovaps %xmm0, 0xa0(%rsp) vmovaps 0x620(%rsp), %xmm2 vmovaps 0x3e0(%rsp), %xmm1 vmovaps 0x400(%rsp), %xmm3 vmovaps 0x610(%rsp), %xmm0 vmovaps %xmm3, 0x1a40(%rsp) vmovaps %xmm0, 0x1a30(%rsp) vmovaps 0x1a40(%rsp), %xmm0 vmulps 0x1a30(%rsp), %xmm0, %xmm0 vmovaps %xmm2, 0x1120(%rsp) vmovaps %xmm1, 0x1110(%rsp) vmovaps %xmm0, 0x1100(%rsp) vmovaps 0x1120(%rsp), %xmm2 vmovaps 0x1110(%rsp), %xmm1 vmovaps 0x1100(%rsp), %xmm0 vmovaps %xmm2, 0x1810(%rsp) vmovaps %xmm1, 0x1800(%rsp) vmovaps %xmm0, 0x17f0(%rsp) vmovaps 0x1810(%rsp), %xmm1 vmovaps 0x1800(%rsp), %xmm0 vmovaps 0x17f0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xb0(%rsp) vmovaps 0xb0(%rsp), %xmm0 vmovaps 0xa0(%rsp), %xmm1 vmovaps 0x90(%rsp), %xmm2 vmovaps %xmm2, 0x10f0(%rsp) vmovaps %xmm1, 0x10e0(%rsp) vmovaps %xmm0, 0x10d0(%rsp) vmovaps 0x10f0(%rsp), %xmm2 vmovaps 0x10e0(%rsp), %xmm1 vmovaps 0x10d0(%rsp), %xmm0 vmovaps %xmm2, 0x1840(%rsp) vmovaps %xmm1, 0x1830(%rsp) vmovaps %xmm0, 0x1820(%rsp) vmovaps 0x1840(%rsp), %xmm1 vmovaps 0x1830(%rsp), %xmm0 vmovaps 0x1820(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x80(%rsp) vmovaps 0x80(%rsp), %xmm0 vmovaps %xmm0, 0x320(%rsp) vmovaps 0x330(%rsp), %xmm1 vmovaps 0x320(%rsp), %xmm0 vmovaps %xmm1, 0xf00(%rsp) vmovaps %xmm0, 0xef0(%rsp) vmovaps 0xf00(%rsp), %xmm0 vaddps 0xef0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x310(%rsp) vmovaps 0x330(%rsp), %xmm1 vmovaps 0x320(%rsp), %xmm0 vmovaps %xmm1, 0x1ac0(%rsp) vmovaps %xmm0, 0x1ab0(%rsp) vmovaps 0x1ac0(%rsp), %xmm0 vsubps 0x1ab0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x300(%rsp) vmovaps 0x5f0(%rsp), %xmm0 vmovaps %xmm0, 0x60(%rsp) vmovaps 0x640(%rsp), %xmm2 vmovaps 0x3d0(%rsp), %xmm1 vmovaps 0x3f0(%rsp), %xmm0 vmovaps %xmm2, 0x10c0(%rsp) vmovaps %xmm1, 0x10b0(%rsp) vmovaps %xmm0, 0x10a0(%rsp) vmovaps 0x10c0(%rsp), %xmm2 vmovaps 0x10b0(%rsp), %xmm1 vmovaps 0x10a0(%rsp), %xmm0 vmovaps %xmm2, 0x1870(%rsp) vmovaps %xmm1, 0x1860(%rsp) vmovaps %xmm0, 0x1850(%rsp) vmovaps 0x1870(%rsp), %xmm1 vmovaps 0x1860(%rsp), %xmm0 vmovaps 0x1850(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x70(%rsp) vmovaps 0x70(%rsp), %xmm1 vmovaps 0x60(%rsp), %xmm2 vmovaps 0x3b0(%rsp), %xmm0 vmovaps %xmm2, 0x1090(%rsp) vmovaps %xmm1, 0x1080(%rsp) vmovaps %xmm0, 0x1070(%rsp) vmovaps 0x1090(%rsp), %xmm2 vmovaps 0x1080(%rsp), %xmm1 vmovaps 0x1070(%rsp), %xmm0 vmovaps %xmm2, 0x18a0(%rsp) vmovaps %xmm1, 0x1890(%rsp) vmovaps %xmm0, 0x1880(%rsp) vmovaps 0x18a0(%rsp), %xmm1 vmovaps 0x1890(%rsp), %xmm0 vmovaps 0x1880(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x50(%rsp) vmovaps 0x50(%rsp), %xmm0 vmovaps %xmm0, 0x2f0(%rsp) vmovaps 0x610(%rsp), %xmm0 vmovaps %xmm0, 0x20(%rsp) vmovaps 0x3c0(%rsp), %xmm0 vmovaps %xmm0, 0x30(%rsp) vmovaps 0x620(%rsp), %xmm2 vmovaps 0x3e0(%rsp), %xmm1 vmovaps 0x400(%rsp), %xmm3 vmovaps 0x600(%rsp), %xmm0 vmovaps %xmm3, 0x1a20(%rsp) vmovaps %xmm0, 0x1a10(%rsp) vmovaps 0x1a20(%rsp), %xmm0 vmulps 0x1a10(%rsp), %xmm0, %xmm0 vmovaps %xmm2, 0x1060(%rsp) vmovaps %xmm1, 0x1050(%rsp) vmovaps %xmm0, 0x1040(%rsp) vmovaps 0x1060(%rsp), %xmm2 vmovaps 0x1050(%rsp), %xmm1 vmovaps 0x1040(%rsp), %xmm0 vmovaps %xmm2, 0x18d0(%rsp) vmovaps %xmm1, 0x18c0(%rsp) vmovaps %xmm0, 0x18b0(%rsp) vmovaps 0x18d0(%rsp), %xmm1 vmovaps 0x18c0(%rsp), %xmm0 vmovaps 0x18b0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x40(%rsp) vmovaps 0x40(%rsp), %xmm0 vmovaps 0x30(%rsp), %xmm1 vmovaps 0x20(%rsp), %xmm2 vmovaps %xmm2, 0x1030(%rsp) vmovaps %xmm1, 0x1020(%rsp) vmovaps %xmm0, 0x1010(%rsp) vmovaps 0x1030(%rsp), %xmm2 vmovaps 0x1020(%rsp), %xmm1 vmovaps 0x1010(%rsp), %xmm0 vmovaps %xmm2, 0x1900(%rsp) vmovaps %xmm1, 0x18f0(%rsp) vmovaps %xmm0, 0x18e0(%rsp) vmovaps 0x1900(%rsp), %xmm1 vmovaps 0x18f0(%rsp), %xmm0 vmovaps 0x18e0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x10(%rsp) vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm0, 0x2e0(%rsp) vmovaps 0x2f0(%rsp), %xmm1 vmovaps 0x2e0(%rsp), %xmm0 vmovaps %xmm1, 0xee0(%rsp) vmovaps %xmm0, 0xed0(%rsp) vmovaps 0xee0(%rsp), %xmm0 vaddps 0xed0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x2d0(%rsp) vmovaps 0x2f0(%rsp), %xmm1 vmovaps 0x2e0(%rsp), %xmm0 vmovaps %xmm1, 0x1aa0(%rsp) vmovaps %xmm0, 0x1a90(%rsp) vmovaps 0x1aa0(%rsp), %xmm0 vsubps 0x1a90(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x2c0(%rsp) movq 0x468(%rsp), %rax vmovaps 0x390(%rsp), %xmm0 movq %rax, 0xd48(%rsp) vmovaps %xmm0, 0xd30(%rsp) vmovaps 0xd30(%rsp), %xmm0 movq 0xd48(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x460(%rsp), %rax vmovaps 0x350(%rsp), %xmm0 movq %rax, 0xd28(%rsp) vmovaps %xmm0, 0xd10(%rsp) vmovaps 0xd10(%rsp), %xmm0 movq 0xd28(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x458(%rsp), %rax vmovaps 0x340(%rsp), %xmm0 movq %rax, 0xd08(%rsp) vmovaps %xmm0, 0xcf0(%rsp) vmovaps 0xcf0(%rsp), %xmm0 movq 0xd08(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x450(%rsp), %rax vmovaps 0x310(%rsp), %xmm0 movq %rax, 0xce8(%rsp) vmovaps %xmm0, 0xcd0(%rsp) vmovaps 0xcd0(%rsp), %xmm0 movq 0xce8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x448(%rsp), %rax vmovaps 0x300(%rsp), %xmm0 movq %rax, 0xcc8(%rsp) vmovaps %xmm0, 0xcb0(%rsp) vmovaps 0xcb0(%rsp), %xmm0 movq 0xcc8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x440(%rsp), %rax vmovaps 0x2d0(%rsp), %xmm0 movq %rax, 0xca8(%rsp) vmovaps %xmm0, 0xc90(%rsp) vmovaps 0xc90(%rsp), %xmm0 movq 0xca8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x438(%rsp), %rax vmovaps 0x2c0(%rsp), %xmm0 movq %rax, 0xc88(%rsp) vmovaps %xmm0, 0xc70(%rsp) vmovaps 0xc70(%rsp), %xmm0 movq 0xc88(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x430(%rsp), %rax vmovaps 0x380(%rsp), %xmm0 movq %rax, 0xc68(%rsp) vmovaps %xmm0, 0xc50(%rsp) vmovaps 0xc50(%rsp), %xmm0 movq 0xc68(%rsp), %rax vmovaps %xmm0, (%rax) movl 0xb18(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x468(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x468(%rsp) movl 0xb18(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x460(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x460(%rsp) movl 0xb18(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x458(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x458(%rsp) movl 0xb18(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x450(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x450(%rsp) movl 0xb18(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x448(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x448(%rsp) movl 0xb18(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x440(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x440(%rsp) movl 0xb18(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x438(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x438(%rsp) movl 0xb18(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x430(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x430(%rsp) movl 0x42c(%rsp), %eax addl $0x1, %eax movl %eax, 0x42c(%rsp) jmp 0x61a248 jmp 0x61b1dc movl 0x5e8(%rsp), %eax addl $0x1, %eax movl %eax, 0x5e8(%rsp) jmp 0x61917a jmp 0x61b1f4 movl 0x5ec(%rsp), %eax addl $0x1, %eax movl %eax, 0x5ec(%rsp) jmp 0x61915b leaq 0xa80(%rsp), %rax movq %rax, 0xb48(%rsp) movq 0xb48(%rsp), %rax movq %rax, 0xba0(%rsp) movq 0xba0(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x61b2c3 movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb9c(%rsp) # imm = 0xFFFFFFFF movl 0xb9c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb98(%rsp) cmpl $0x1, 0xb98(%rsp) jne 0x61b2c3 movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x61b297 movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x61b295 jmp 0x61b2c1 movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0xc30(%rsp) cmpq $0x0, 0xc30(%rsp) je 0x61b2bf movq 0xc30(%rsp), %rdi callq 0x5e480 jmp 0x61b2c1 jmp 0x61b2c3 movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x61b31b movq %rax, %rdi callq 0x5fc90 leaq 0xac8(%rsp), %rax movq %rax, 0xb58(%rsp) movq 0xb58(%rsp), %rax movq %rax, 0xb80(%rsp) movq 0xb80(%rsp), %rax movq %rax, (%rsp) cmpq $0x0, 0x8(%rax) je 0x61b3cb movq (%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb7c(%rsp) # imm = 0xFFFFFFFF movl 0xb7c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb78(%rsp) cmpl $0x1, 0xb78(%rsp) jne 0x61b3cb movq (%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x61b3a0 movq (%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x61b39e jmp 0x61b3c9 movq (%rsp), %rax movq (%rax), %rax movq %rax, 0xc40(%rsp) cmpq $0x0, 0xc40(%rsp) je 0x61b3c7 movq 0xc40(%rsp), %rdi callq 0x5e480 jmp 0x61b3c9 jmp 0x61b3cb movq (%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x61b422 movq %rax, %rdi callq 0x5fc90 jmp 0x61b424 movl 0xb14(%rsp), %eax addl $0x1, %eax movl %eax, 0xb14(%rsp) jmp 0x6189c8 addq $0x1ce8, %rsp # imm = 0x1CE8 retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/convolution_winograd_transform_pack4.h
ncnn::convolution_winograd_dot_pack4_sse(ncnn::Mat&, int, ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static void convolution_winograd_dot_pack4_sse(Mat& bottom_blob_tm, int outch, const Mat& kernel_tm, Mat& top_blob_tm, const Option& opt) { // Mat bottom_blob_tm(tiles, 16/36/64, inch, 16u, 4, opt.workspace_allocator); const int tiles = bottom_blob_tm.w; const int batch = bottom_blob_tm.h; const int inch = bottom_blob_tm.c; // permute Mat bottom_blob_tm2; if (tiles >= 12) bottom_blob_tm2.create(12 * inch, tiles / 12 + (tiles % 12) / 8 + (tiles % 12 % 8) / 4 + (tiles % 12 % 4) / 2 + tiles % 12 % 2, batch, 16u, 4, opt.workspace_allocator); else if (tiles >= 8) bottom_blob_tm2.create(8 * inch, tiles / 8 + (tiles % 8) / 4 + (tiles % 4) / 2 + tiles % 2, batch, 16u, 4, opt.workspace_allocator); else if (tiles >= 4) bottom_blob_tm2.create(4 * inch, tiles / 4 + (tiles % 4) / 2 + tiles % 2, batch, 16u, 4, opt.workspace_allocator); else if (tiles >= 2) bottom_blob_tm2.create(2 * inch, tiles / 2 + tiles % 2, batch, 16u, 4, opt.workspace_allocator); else // if (tiles >= 1) bottom_blob_tm2.create(1 * inch, tiles, batch, 16u, 4, opt.workspace_allocator); #pragma omp parallel for num_threads(opt.num_threads) for (int r = 0; r < batch; r++) { Mat tm2 = bottom_blob_tm2.channel(r); // tile int i = 0; for (; i + 11 < tiles; i += 12) { float* tmpptr = tm2.row(i / 12); const float* r0 = bottom_blob_tm; r0 += (r * tiles + i) * 4; for (int q = 0; q < inch; q++) { // transpose 4x12 __m128 _r0 = _mm_load_ps(r0); __m128 _r1 = _mm_load_ps(r0 + 4); __m128 _r2 = _mm_load_ps(r0 + 4 * 2); __m128 _r3 = _mm_load_ps(r0 + 4 * 3); __m128 _r4 = _mm_load_ps(r0 + 4 * 4); __m128 _r5 = _mm_load_ps(r0 + 4 * 5); __m128 _r6 = _mm_load_ps(r0 + 4 * 6); __m128 _r7 = _mm_load_ps(r0 + 4 * 7); __m128 _r8 = _mm_load_ps(r0 + 4 * 8); __m128 _r9 = _mm_load_ps(r0 + 4 * 9); __m128 _ra = _mm_load_ps(r0 + 4 * 10); __m128 _rb = _mm_load_ps(r0 + 4 * 11); _MM_TRANSPOSE4_PS(_r0, _r1, _r2, _r3); _MM_TRANSPOSE4_PS(_r4, _r5, _r6, _r7); _MM_TRANSPOSE4_PS(_r8, _r9, _ra, _rb); _mm_store_ps(tmpptr, _r0); _mm_store_ps(tmpptr + 4, _r4); _mm_store_ps(tmpptr + 4 * 2, _r8); _mm_store_ps(tmpptr + 4 * 3, _r1); _mm_store_ps(tmpptr + 4 * 4, _r5); _mm_store_ps(tmpptr + 4 * 5, _r9); _mm_store_ps(tmpptr + 4 * 6, _r2); _mm_store_ps(tmpptr + 4 * 7, _r6); _mm_store_ps(tmpptr + 4 * 8, _ra); _mm_store_ps(tmpptr + 4 * 9, _r3); _mm_store_ps(tmpptr + 4 * 10, _r7); _mm_store_ps(tmpptr + 4 * 11, _rb); r0 += bottom_blob_tm.cstep * 4; tmpptr += 48; } } for (; i + 7 < tiles; i += 8) { float* tmpptr = tm2.row(i / 12 + (i % 12) / 8); const float* r0 = bottom_blob_tm; r0 += (r * tiles + i) * 4; for (int q = 0; q < inch; q++) { // transpose 4x8 __m128 _r0 = _mm_load_ps(r0); __m128 _r1 = _mm_load_ps(r0 + 4); __m128 _r2 = _mm_load_ps(r0 + 4 * 2); __m128 _r3 = _mm_load_ps(r0 + 4 * 3); __m128 _r4 = _mm_load_ps(r0 + 4 * 4); __m128 _r5 = _mm_load_ps(r0 + 4 * 5); __m128 _r6 = _mm_load_ps(r0 + 4 * 6); __m128 _r7 = _mm_load_ps(r0 + 4 * 7); _MM_TRANSPOSE4_PS(_r0, _r1, _r2, _r3); _MM_TRANSPOSE4_PS(_r4, _r5, _r6, _r7); _mm_store_ps(tmpptr, _r0); _mm_store_ps(tmpptr + 4, _r4); _mm_store_ps(tmpptr + 4 * 2, _r1); _mm_store_ps(tmpptr + 4 * 3, _r5); _mm_store_ps(tmpptr + 4 * 4, _r2); _mm_store_ps(tmpptr + 4 * 5, _r6); _mm_store_ps(tmpptr + 4 * 6, _r3); _mm_store_ps(tmpptr + 4 * 7, _r7); r0 += bottom_blob_tm.cstep * 4; tmpptr += 32; } } for (; i + 3 < tiles; i += 4) { float* tmpptr = tm2.row(i / 12 + (i % 12) / 8 + (i % 12 % 8) / 4); const float* r0 = bottom_blob_tm; r0 += (r * tiles + i) * 4; for (int q = 0; q < inch; q++) { // transpose 4x4 __m128 _r0 = _mm_load_ps(r0); __m128 _r1 = _mm_load_ps(r0 + 4); __m128 _r2 = _mm_load_ps(r0 + 4 * 2); __m128 _r3 = _mm_load_ps(r0 + 4 * 3); _MM_TRANSPOSE4_PS(_r0, _r1, _r2, _r3); _mm_store_ps(tmpptr, _r0); _mm_store_ps(tmpptr + 4, _r1); _mm_store_ps(tmpptr + 4 * 2, _r2); _mm_store_ps(tmpptr + 4 * 3, _r3); r0 += bottom_blob_tm.cstep * 4; tmpptr += 16; } } for (; i + 1 < tiles; i += 2) { float* tmpptr = tm2.row(i / 12 + (i % 12) / 8 + (i % 12 % 8) / 4 + (i % 12 % 4) / 2); const float* r0 = bottom_blob_tm; r0 += (r * tiles + i) * 4; for (int q = 0; q < inch; q++) { // transpose 4x2 __m128 _r0 = _mm_load_ps(r0); __m128 _r1 = _mm_load_ps(r0 + 4); __m128 _r01_0 = _mm_unpacklo_ps(_r0, _r1); __m128 _r01_1 = _mm_unpackhi_ps(_r0, _r1); _mm_store_ps(tmpptr, _r01_0); _mm_store_ps(tmpptr + 4, _r01_1); r0 += bottom_blob_tm.cstep * 4; tmpptr += 8; } } for (; i < tiles; i++) { float* tmpptr = tm2.row(i / 12 + (i % 12) / 8 + (i % 12 % 8) / 4 + (i % 12 % 4) / 2 + i % 12 % 2); const float* r0 = bottom_blob_tm; r0 += (r * tiles + i) * 4; for (int q = 0; q < inch; q++) { __m128 _val = _mm_load_ps(r0); _mm_store_ps(tmpptr, _val); r0 += bottom_blob_tm.cstep * 4; tmpptr += 4; } } } bottom_blob_tm = Mat(); // permute end top_blob_tm.create(tiles, batch, outch, 16u, 4, opt.workspace_allocator); #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < outch; p++) { float* output0_tm = top_blob_tm.channel(p); const Mat kernel0_tm = kernel_tm.channel(p); for (int r = 0; r < batch; r++) { const Mat bb2 = bottom_blob_tm2.channel(r); int i = 0; for (; i + 11 < tiles; i += 12) { const float* r0 = bb2.row(i / 12); const float* k0 = kernel0_tm.row(r); int nn = inch * 4; // inch always > 0 __m128 _sum0 = _mm_setzero_ps(); __m128 _sum1 = _mm_setzero_ps(); __m128 _sum2 = _mm_setzero_ps(); __m128 _sum3 = _mm_setzero_ps(); __m128 _sum4 = _mm_setzero_ps(); __m128 _sum5 = _mm_setzero_ps(); __m128 _sum6 = _mm_setzero_ps(); __m128 _sum7 = _mm_setzero_ps(); __m128 _sum8 = _mm_setzero_ps(); __m128 _sum9 = _mm_setzero_ps(); __m128 _suma = _mm_setzero_ps(); __m128 _sumb = _mm_setzero_ps(); for (int j = 0; j < nn; j++) { __m128 _w0 = _mm_load_ps(k0); __m128 _val0 = _mm_load1_ps(r0); __m128 _val1 = _mm_load1_ps(r0 + 1); __m128 _val2 = _mm_load1_ps(r0 + 2); __m128 _val3 = _mm_load1_ps(r0 + 3); __m128 _val4 = _mm_load1_ps(r0 + 4); __m128 _val5 = _mm_load1_ps(r0 + 5); __m128 _val6 = _mm_load1_ps(r0 + 6); __m128 _val7 = _mm_load1_ps(r0 + 7); __m128 _val8 = _mm_load1_ps(r0 + 8); __m128 _val9 = _mm_load1_ps(r0 + 9); __m128 _vala = _mm_load1_ps(r0 + 10); __m128 _valb = _mm_load1_ps(r0 + 11); _sum0 = _mm_comp_fmadd_ps(_val0, _w0, _sum0); _sum1 = _mm_comp_fmadd_ps(_val1, _w0, _sum1); _sum2 = _mm_comp_fmadd_ps(_val2, _w0, _sum2); _sum3 = _mm_comp_fmadd_ps(_val3, _w0, _sum3); _sum4 = _mm_comp_fmadd_ps(_val4, _w0, _sum4); _sum5 = _mm_comp_fmadd_ps(_val5, _w0, _sum5); _sum6 = _mm_comp_fmadd_ps(_val6, _w0, _sum6); _sum7 = _mm_comp_fmadd_ps(_val7, _w0, _sum7); _sum8 = _mm_comp_fmadd_ps(_val8, _w0, _sum8); _sum9 = _mm_comp_fmadd_ps(_val9, _w0, _sum9); _suma = _mm_comp_fmadd_ps(_vala, _w0, _suma); _sumb = _mm_comp_fmadd_ps(_valb, _w0, _sumb); r0 += 12; k0 += 4; } _mm_store_ps(output0_tm, _sum0); _mm_store_ps(output0_tm + 4, _sum1); _mm_store_ps(output0_tm + 4 * 2, _sum2); _mm_store_ps(output0_tm + 4 * 3, _sum3); _mm_store_ps(output0_tm + 4 * 4, _sum4); _mm_store_ps(output0_tm + 4 * 5, _sum5); _mm_store_ps(output0_tm + 4 * 6, _sum6); _mm_store_ps(output0_tm + 4 * 7, _sum7); _mm_store_ps(output0_tm + 4 * 8, _sum8); _mm_store_ps(output0_tm + 4 * 9, _sum9); _mm_store_ps(output0_tm + 4 * 10, _suma); _mm_store_ps(output0_tm + 4 * 11, _sumb); output0_tm += 4 * 12; } for (; i + 7 < tiles; i += 8) { const float* r0 = bb2.row(i / 12 + (i % 12) / 8); const float* k0 = kernel0_tm.row(r); int nn = inch * 4; // inch always > 0 __m128 _sum0 = _mm_setzero_ps(); __m128 _sum1 = _mm_setzero_ps(); __m128 _sum2 = _mm_setzero_ps(); __m128 _sum3 = _mm_setzero_ps(); __m128 _sum4 = _mm_setzero_ps(); __m128 _sum5 = _mm_setzero_ps(); __m128 _sum6 = _mm_setzero_ps(); __m128 _sum7 = _mm_setzero_ps(); for (int j = 0; j < nn; j++) { __m128 _w0 = _mm_load_ps(k0); __m128 _val0 = _mm_load1_ps(r0); __m128 _val1 = _mm_load1_ps(r0 + 1); __m128 _val2 = _mm_load1_ps(r0 + 2); __m128 _val3 = _mm_load1_ps(r0 + 3); __m128 _val4 = _mm_load1_ps(r0 + 4); __m128 _val5 = _mm_load1_ps(r0 + 5); __m128 _val6 = _mm_load1_ps(r0 + 6); __m128 _val7 = _mm_load1_ps(r0 + 7); _sum0 = _mm_comp_fmadd_ps(_val0, _w0, _sum0); _sum1 = _mm_comp_fmadd_ps(_val1, _w0, _sum1); _sum2 = _mm_comp_fmadd_ps(_val2, _w0, _sum2); _sum3 = _mm_comp_fmadd_ps(_val3, _w0, _sum3); _sum4 = _mm_comp_fmadd_ps(_val4, _w0, _sum4); _sum5 = _mm_comp_fmadd_ps(_val5, _w0, _sum5); _sum6 = _mm_comp_fmadd_ps(_val6, _w0, _sum6); _sum7 = _mm_comp_fmadd_ps(_val7, _w0, _sum7); r0 += 8; k0 += 4; } _mm_store_ps(output0_tm, _sum0); _mm_store_ps(output0_tm + 4, _sum1); _mm_store_ps(output0_tm + 4 * 2, _sum2); _mm_store_ps(output0_tm + 4 * 3, _sum3); _mm_store_ps(output0_tm + 4 * 4, _sum4); _mm_store_ps(output0_tm + 4 * 5, _sum5); _mm_store_ps(output0_tm + 4 * 6, _sum6); _mm_store_ps(output0_tm + 4 * 7, _sum7); output0_tm += 4 * 8; } for (; i + 3 < tiles; i += 4) { const float* r0 = bb2.row(i / 12 + (i % 12) / 8 + (i % 12 % 8) / 4); const float* k0 = kernel0_tm.row(r); int nn = inch * 4; // inch always > 0 __m128 _sum0 = _mm_setzero_ps(); __m128 _sum1 = _mm_setzero_ps(); __m128 _sum2 = _mm_setzero_ps(); __m128 _sum3 = _mm_setzero_ps(); for (int j = 0; j < nn; j++) { __m128 _w0 = _mm_load_ps(k0); __m128 _val0 = _mm_load1_ps(r0); __m128 _val1 = _mm_load1_ps(r0 + 1); __m128 _val2 = _mm_load1_ps(r0 + 2); __m128 _val3 = _mm_load1_ps(r0 + 3); _sum0 = _mm_comp_fmadd_ps(_val0, _w0, _sum0); _sum1 = _mm_comp_fmadd_ps(_val1, _w0, _sum1); _sum2 = _mm_comp_fmadd_ps(_val2, _w0, _sum2); _sum3 = _mm_comp_fmadd_ps(_val3, _w0, _sum3); r0 += 4; k0 += 4; } _mm_store_ps(output0_tm, _sum0); _mm_store_ps(output0_tm + 4, _sum1); _mm_store_ps(output0_tm + 4 * 2, _sum2); _mm_store_ps(output0_tm + 4 * 3, _sum3); output0_tm += 4 * 4; } for (; i + 1 < tiles; i += 2) { const float* r0 = bb2.row(i / 12 + (i % 12) / 8 + (i % 12 % 8) / 4 + (i % 12 % 4) / 2); const float* k0 = kernel0_tm.row(r); int nn = inch * 4; // inch always > 0 __m128 _sum0 = _mm_setzero_ps(); __m128 _sum1 = _mm_setzero_ps(); for (int j = 0; j < nn; j++) { __m128 _w0 = _mm_load_ps(k0); __m128 _val0 = _mm_load1_ps(r0); __m128 _val1 = _mm_load1_ps(r0 + 1); _sum0 = _mm_comp_fmadd_ps(_val0, _w0, _sum0); _sum1 = _mm_comp_fmadd_ps(_val1, _w0, _sum1); r0 += 2; k0 += 4; } _mm_store_ps(output0_tm, _sum0); _mm_store_ps(output0_tm + 4, _sum1); output0_tm += 4 * 2; } for (; i < tiles; i++) { const float* r0 = bb2.row(i / 12 + (i % 12) / 8 + (i % 12 % 8) / 4 + (i % 12 % 4) / 2 + i % 12 % 2); const float* k0 = kernel0_tm.row(r); int nn = inch * 4; // inch always > 0 __m128 _sum = _mm_setzero_ps(); for (int j = 0; j < nn; j++) { __m128 _w0 = _mm_load_ps(k0); __m128 _val0 = _mm_load1_ps(r0); _sum = _mm_comp_fmadd_ps(_val0, _w0, _sum); r0 += 1; k0 += 4; } _mm_store_ps(output0_tm, _sum); output0_tm += 4; } } } }
subq $0x3058, %rsp # imm = 0x3058 movq %rdi, 0xe30(%rsp) movl %esi, 0xe2c(%rsp) movq %rdx, 0xe20(%rsp) movq %rcx, 0xe18(%rsp) movq %r8, 0xe10(%rsp) movq 0xe30(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0xe0c(%rsp) movq 0xe30(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0xe08(%rsp) movq 0xe30(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0xe04(%rsp) leaq 0xdb8(%rsp), %rax movq %rax, 0xe38(%rsp) movq 0xe38(%rsp), %rax movq %rax, 0x388(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) cmpl $0xc, 0xe0c(%rsp) jl 0x61b629 movl 0xe04(%rsp), %ecx shll $0x2, %ecx movl %ecx, %eax leal (%rax,%rax,2), %esi movslq 0xe0c(%rsp), %rcx movl %ecx, %eax imulq $0x2aaaaaab, %rcx, %rdx # imm = 0x2AAAAAAB movq %rdx, %rcx shrq $0x3f, %rcx sarq $0x21, %rdx addl %ecx, %edx movl %edx, %edi shll $0x2, %edi movl %edi, %ecx leal (%rcx,%rcx,2), %ecx subl %ecx, %eax movl %eax, 0x384(%rsp) movl %eax, %edi addl $0x7, %edi movl %eax, %ecx addl $0x3, %ecx testl %eax, %eax movl %eax, %r8d cmovsl %edi, %r8d movl %eax, %edi cmovsl %ecx, %edi movl %r8d, %ecx sarl $0x3, %ecx addl %ecx, %edx andl $-0x8, %r8d movl %eax, %ecx subl %r8d, %ecx negl %r8d movl %r8d, %r9d movl %eax, %r8d leal 0x3(%r8,%r9), %r8d testl %ecx, %ecx cmovsl %r8d, %ecx sarl $0x2, %ecx addl %ecx, %edx andl $-0x4, %edi movl %eax, %ecx subl %edi, %ecx movl %ecx, %edi shrl $0x1f, %edi addl %edi, %ecx sarl %ecx addl %ecx, %edx movl %eax, %edi shrl $0x1f, %edi movl %eax, %ecx addl %edi, %ecx andl $-0x2, %ecx subl %ecx, %eax addl %eax, %edx movl 0xe08(%rsp), %ecx movq 0xe10(%rsp), %rax movq 0x10(%rax), %rdi movq %rsp, %rax movq %rdi, (%rax) leaq 0xdb8(%rsp), %rdi movl $0x10, %r8d movl $0x4, %r9d callq 0x62060 jmp 0x61b60b jmp 0x61b81f movq %rax, %rcx movl %edx, %eax movq %rcx, 0xdb0(%rsp) movl %eax, 0xdac(%rsp) jmp 0x621c66 cmpl $0x8, 0xe0c(%rsp) jl 0x61b6ea movl 0xe04(%rsp), %esi shll $0x3, %esi movl 0xe0c(%rsp), %eax movl %eax, 0x380(%rsp) movl %eax, %edx addl $0x7, %edx movl %eax, %ecx addl $0x3, %ecx testl %eax, %eax movl %eax, %r8d cmovsl %edx, %r8d movl %eax, %edi cmovsl %ecx, %edi movl %r8d, %edx sarl $0x3, %edx andl $-0x8, %r8d movl %eax, %ecx subl %r8d, %ecx negl %r8d movl %r8d, %r9d movl %eax, %r8d leal 0x3(%r8,%r9), %r8d testl %ecx, %ecx cmovsl %r8d, %ecx sarl $0x2, %ecx addl %ecx, %edx andl $-0x4, %edi movl %eax, %ecx subl %edi, %ecx movl %ecx, %edi shrl $0x1f, %edi addl %edi, %ecx sarl %ecx addl %ecx, %edx movl %eax, %edi shrl $0x1f, %edi movl %eax, %ecx addl %edi, %ecx andl $-0x2, %ecx subl %ecx, %eax addl %eax, %edx movl 0xe08(%rsp), %ecx movq 0xe10(%rsp), %rax movq 0x10(%rax), %rdi movq %rsp, %rax movq %rdi, (%rax) leaq 0xdb8(%rsp), %rdi movl $0x10, %r8d movl $0x4, %r9d callq 0x62060 jmp 0x61b6e5 jmp 0x61b81d cmpl $0x4, 0xe0c(%rsp) jl 0x61b771 movl 0xe04(%rsp), %esi shll $0x2, %esi movl 0xe0c(%rsp), %eax movl %eax, %ecx addl $0x3, %ecx testl %eax, %eax movl %eax, %edi cmovsl %ecx, %edi movl %edi, %edx sarl $0x2, %edx andl $-0x4, %edi movl %eax, %ecx subl %edi, %ecx movl %ecx, %edi shrl $0x1f, %edi addl %edi, %ecx sarl %ecx addl %ecx, %edx movl %eax, %edi shrl $0x1f, %edi movl %eax, %ecx addl %edi, %ecx andl $-0x2, %ecx subl %ecx, %eax addl %eax, %edx movl 0xe08(%rsp), %ecx movq 0xe10(%rsp), %rax movq 0x10(%rax), %rdi movq %rsp, %rax movq %rdi, (%rax) leaq 0xdb8(%rsp), %rdi movl $0x10, %r8d movl $0x4, %r9d callq 0x62060 jmp 0x61b76c jmp 0x61b81b cmpl $0x2, 0xe0c(%rsp) jl 0x61b7d5 movl 0xe04(%rsp), %esi addl %esi, %esi movl 0xe0c(%rsp), %eax movl %eax, %ecx shrl $0x1f, %ecx movl %eax, %edx addl %ecx, %edx movl %edx, %ecx andl $-0x2, %ecx subl %ecx, %eax sarl %edx addl %eax, %edx movl 0xe08(%rsp), %ecx movq 0xe10(%rsp), %rax movq 0x10(%rax), %rdi movq %rsp, %rax movq %rdi, (%rax) leaq 0xdb8(%rsp), %rdi movl $0x10, %r8d movl $0x4, %r9d callq 0x62060 jmp 0x61b7d3 jmp 0x61b819 movl 0xe04(%rsp), %esi movl 0xe0c(%rsp), %edx movl 0xe08(%rsp), %ecx movq 0xe10(%rsp), %rax movq 0x10(%rax), %rdi movq %rsp, %rax movq %rdi, (%rax) leaq 0xdb8(%rsp), %rdi movl $0x10, %r8d movl $0x4, %r9d callq 0x62060 jmp 0x61b817 jmp 0x61b819 jmp 0x61b81b jmp 0x61b81d jmp 0x61b81f movl $0x0, 0xda8(%rsp) movl 0xda8(%rsp), %eax cmpl 0xe08(%rsp), %eax jge 0x61dca5 movl 0xda8(%rsp), %eax leaq 0xd60(%rsp), %rcx movq %rcx, 0x1008(%rsp) leaq 0xdb8(%rsp), %rcx movq %rcx, 0x1000(%rsp) movl %eax, 0xffc(%rsp) movq 0x1000(%rsp), %rax movq %rax, 0x370(%rsp) movb $0x0, 0xffb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xffc(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xd60(%rsp), %r10 movq %r10, 0x2fa0(%rsp) movl %r9d, 0x2f9c(%rsp) movl %r8d, 0x2f98(%rsp) movl %edi, 0x2f94(%rsp) movq %rsi, 0x2f88(%rsp) movq %rdx, 0x2f80(%rsp) movl %ecx, 0x2f7c(%rsp) movq %rax, 0x2f70(%rsp) movq 0x2fa0(%rsp), %rcx movq %rcx, 0x378(%rsp) movq 0x2f88(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2f80(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2f7c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2f70(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x2f9c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x2f98(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2f94(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x3040(%rsp) movl $0x10, 0x303c(%rsp) movq 0x3040(%rsp), %rax movslq 0x303c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x303c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x378(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x370(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xd88(%rsp) cmpl $0x4, 0x28(%rax) jne 0x61ba02 movq 0x370(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xda0(%rsp) movb $0x1, 0xffb(%rsp) testb $0x1, 0xffb(%rsp) jne 0x61bb3b leaq 0xd60(%rsp), %rax movq %rax, 0x1010(%rsp) movq 0x1010(%rsp), %rax movq %rax, 0x1020(%rsp) movq 0x1020(%rsp), %rax movq %rax, 0x368(%rsp) cmpq $0x0, 0x8(%rax) je 0x61bae0 movq 0x368(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x101c(%rsp) # imm = 0xFFFFFFFF movl 0x101c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1018(%rsp) cmpl $0x1, 0x1018(%rsp) jne 0x61bae0 movq 0x368(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x61bab1 movq 0x368(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x61baaf jmp 0x61bade movq 0x368(%rsp), %rax movq (%rax), %rax movq %rax, 0x1130(%rsp) cmpq $0x0, 0x1130(%rsp) je 0x61badc movq 0x1130(%rsp), %rdi callq 0x5e480 jmp 0x61bade jmp 0x61bae0 movq 0x368(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x61bb3b movq %rax, %rdi callq 0x5fc90 jmp 0x61bb3d movl $0x0, 0xd5c(%rsp) movl 0xd5c(%rsp), %eax addl $0xb, %eax cmpl 0xe0c(%rsp), %eax jge 0x61c7a7 movl 0xd5c(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx leaq 0xd60(%rsp), %rcx movq %rcx, 0x11f8(%rsp) movl %eax, 0x11f4(%rsp) movq 0x11f8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x11f4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0xd50(%rsp) movq 0xe30(%rsp), %rax movq %rax, 0x1120(%rsp) movq 0x1120(%rsp), %rax movq (%rax), %rax movq %rax, 0x360(%rsp) movq 0x360(%rsp), %rax movq %rax, 0xd48(%rsp) movl 0xda8(%rsp), %ecx imull 0xe0c(%rsp), %ecx addl 0xd5c(%rsp), %ecx shll $0x2, %ecx movq 0xd48(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xd48(%rsp) movl $0x0, 0xd44(%rsp) movl 0xd44(%rsp), %eax cmpl 0xe04(%rsp), %eax jge 0x61c78f movq 0xd48(%rsp), %rax movq %rax, 0x19b8(%rsp) movq 0x19b8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xd30(%rsp) movq 0xd48(%rsp), %rax addq $0x10, %rax movq %rax, 0x19b0(%rsp) movq 0x19b0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xd20(%rsp) movq 0xd48(%rsp), %rax addq $0x20, %rax movq %rax, 0x19a8(%rsp) movq 0x19a8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xd10(%rsp) movq 0xd48(%rsp), %rax addq $0x30, %rax movq %rax, 0x19a0(%rsp) movq 0x19a0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xd00(%rsp) movq 0xd48(%rsp), %rax addq $0x40, %rax movq %rax, 0x1998(%rsp) movq 0x1998(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xcf0(%rsp) movq 0xd48(%rsp), %rax addq $0x50, %rax movq %rax, 0x1990(%rsp) movq 0x1990(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xce0(%rsp) movq 0xd48(%rsp), %rax addq $0x60, %rax movq %rax, 0x1988(%rsp) movq 0x1988(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xcd0(%rsp) movq 0xd48(%rsp), %rax addq $0x70, %rax movq %rax, 0x1980(%rsp) movq 0x1980(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xcc0(%rsp) movq 0xd48(%rsp), %rax addq $0x80, %rax movq %rax, 0x1978(%rsp) movq 0x1978(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xcb0(%rsp) movq 0xd48(%rsp), %rax addq $0x90, %rax movq %rax, 0x1970(%rsp) movq 0x1970(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xca0(%rsp) movq 0xd48(%rsp), %rax addq $0xa0, %rax movq %rax, 0x1968(%rsp) movq 0x1968(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xc90(%rsp) movq 0xd48(%rsp), %rax addq $0xb0, %rax movq %rax, 0x1960(%rsp) movq 0x1960(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xc80(%rsp) vmovaps 0xd30(%rsp), %xmm1 vmovaps 0xd20(%rsp), %xmm0 vmovaps %xmm1, 0x1b50(%rsp) vmovaps %xmm0, 0x1b40(%rsp) vmovaps 0x1b50(%rsp), %xmm0 vmovaps 0x1b40(%rsp), %xmm1 vunpcklps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] vmovaps %xmm0, 0xc40(%rsp) vmovaps 0xd10(%rsp), %xmm1 vmovaps 0xd00(%rsp), %xmm0 vmovaps %xmm1, 0x1b30(%rsp) vmovaps %xmm0, 0x1b20(%rsp) vmovaps 0x1b30(%rsp), %xmm0 vmovaps 0x1b20(%rsp), %xmm1 vunpcklps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] vmovaps %xmm0, 0xc60(%rsp) vmovaps 0xd30(%rsp), %xmm1 vmovaps 0xd20(%rsp), %xmm0 vmovaps %xmm1, 0x1cf0(%rsp) vmovaps %xmm0, 0x1ce0(%rsp) vmovaps 0x1cf0(%rsp), %xmm0 vmovaps 0x1ce0(%rsp), %xmm1 vunpckhps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] vmovaps %xmm0, 0xc50(%rsp) vmovaps 0xd10(%rsp), %xmm1 vmovaps 0xd00(%rsp), %xmm0 vmovaps %xmm1, 0x1cd0(%rsp) vmovaps %xmm0, 0x1cc0(%rsp) vmovaps 0x1cd0(%rsp), %xmm0 vmovaps 0x1cc0(%rsp), %xmm1 vunpckhps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] vmovaps %xmm0, 0xc70(%rsp) vmovaps 0xc40(%rsp), %xmm1 vmovaps 0xc60(%rsp), %xmm0 vmovaps %xmm1, 0x1e70(%rsp) vmovaps %xmm0, 0x1e60(%rsp) vmovapd 0x1e70(%rsp), %xmm0 vmovapd 0x1e60(%rsp), %xmm1 vunpcklpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0] vmovapd %xmm0, 0xd30(%rsp) vmovaps 0xc60(%rsp), %xmm1 vmovaps 0xc40(%rsp), %xmm0 vmovaps %xmm1, 0x1ff0(%rsp) vmovaps %xmm0, 0x1fe0(%rsp) vmovapd 0x1ff0(%rsp), %xmm1 vmovapd 0x1fe0(%rsp), %xmm0 vunpckhpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[1],xmm1[1] vmovapd %xmm0, 0xd20(%rsp) vmovaps 0xc50(%rsp), %xmm1 vmovaps 0xc70(%rsp), %xmm0 vmovaps %xmm1, 0x1e50(%rsp) vmovaps %xmm0, 0x1e40(%rsp) vmovapd 0x1e50(%rsp), %xmm0 vmovapd 0x1e40(%rsp), %xmm1 vunpcklpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0] vmovapd %xmm0, 0xd10(%rsp) vmovaps 0xc70(%rsp), %xmm1 vmovaps 0xc50(%rsp), %xmm0 vmovaps %xmm1, 0x1fd0(%rsp) vmovaps %xmm0, 0x1fc0(%rsp) vmovapd 0x1fd0(%rsp), %xmm1 vmovapd 0x1fc0(%rsp), %xmm0 vunpckhpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[1],xmm1[1] vmovaps %xmm0, 0xd00(%rsp) jmp 0x61c03f jmp 0x61c041 vmovaps 0xcf0(%rsp), %xmm1 vmovaps 0xce0(%rsp), %xmm0 vmovaps %xmm1, 0x1b10(%rsp) vmovaps %xmm0, 0x1b00(%rsp) vmovaps 0x1b10(%rsp), %xmm0 vmovaps 0x1b00(%rsp), %xmm1 vunpcklps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] vmovaps %xmm0, 0xc00(%rsp) vmovaps 0xcd0(%rsp), %xmm1 vmovaps 0xcc0(%rsp), %xmm0 vmovaps %xmm1, 0x1af0(%rsp) vmovaps %xmm0, 0x1ae0(%rsp) vmovaps 0x1af0(%rsp), %xmm0 vmovaps 0x1ae0(%rsp), %xmm1 vunpcklps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] vmovaps %xmm0, 0xc20(%rsp) vmovaps 0xcf0(%rsp), %xmm1 vmovaps 0xce0(%rsp), %xmm0 vmovaps %xmm1, 0x1cb0(%rsp) vmovaps %xmm0, 0x1ca0(%rsp) vmovaps 0x1cb0(%rsp), %xmm0 vmovaps 0x1ca0(%rsp), %xmm1 vunpckhps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] vmovaps %xmm0, 0xc10(%rsp) vmovaps 0xcd0(%rsp), %xmm1 vmovaps 0xcc0(%rsp), %xmm0 vmovaps %xmm1, 0x1c90(%rsp) vmovaps %xmm0, 0x1c80(%rsp) vmovaps 0x1c90(%rsp), %xmm0 vmovaps 0x1c80(%rsp), %xmm1 vunpckhps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] vmovaps %xmm0, 0xc30(%rsp) vmovaps 0xc00(%rsp), %xmm1 vmovaps 0xc20(%rsp), %xmm0 vmovaps %xmm1, 0x1e30(%rsp) vmovaps %xmm0, 0x1e20(%rsp) vmovapd 0x1e30(%rsp), %xmm0 vmovapd 0x1e20(%rsp), %xmm1 vunpcklpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0] vmovapd %xmm0, 0xcf0(%rsp) vmovaps 0xc20(%rsp), %xmm1 vmovaps 0xc00(%rsp), %xmm0 vmovaps %xmm1, 0x1fb0(%rsp) vmovaps %xmm0, 0x1fa0(%rsp) vmovapd 0x1fb0(%rsp), %xmm1 vmovapd 0x1fa0(%rsp), %xmm0 vunpckhpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[1],xmm1[1] vmovapd %xmm0, 0xce0(%rsp) vmovaps 0xc10(%rsp), %xmm1 vmovaps 0xc30(%rsp), %xmm0 vmovaps %xmm1, 0x1e10(%rsp) vmovaps %xmm0, 0x1e00(%rsp) vmovapd 0x1e10(%rsp), %xmm0 vmovapd 0x1e00(%rsp), %xmm1 vunpcklpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0] vmovapd %xmm0, 0xcd0(%rsp) vmovaps 0xc30(%rsp), %xmm1 vmovaps 0xc10(%rsp), %xmm0 vmovaps %xmm1, 0x1f90(%rsp) vmovaps %xmm0, 0x1f80(%rsp) vmovapd 0x1f90(%rsp), %xmm1 vmovapd 0x1f80(%rsp), %xmm0 vunpckhpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[1],xmm1[1] vmovaps %xmm0, 0xcc0(%rsp) jmp 0x61c25b jmp 0x61c25d vmovaps 0xcb0(%rsp), %xmm1 vmovaps 0xca0(%rsp), %xmm0 vmovaps %xmm1, 0x1ad0(%rsp) vmovaps %xmm0, 0x1ac0(%rsp) vmovaps 0x1ad0(%rsp), %xmm0 vmovaps 0x1ac0(%rsp), %xmm1 vunpcklps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] vmovaps %xmm0, 0xbc0(%rsp) vmovaps 0xc90(%rsp), %xmm1 vmovaps 0xc80(%rsp), %xmm0 vmovaps %xmm1, 0x1ab0(%rsp) vmovaps %xmm0, 0x1aa0(%rsp) vmovaps 0x1ab0(%rsp), %xmm0 vmovaps 0x1aa0(%rsp), %xmm1 vunpcklps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] vmovaps %xmm0, 0xbe0(%rsp) vmovaps 0xcb0(%rsp), %xmm1 vmovaps 0xca0(%rsp), %xmm0 vmovaps %xmm1, 0x1c70(%rsp) vmovaps %xmm0, 0x1c60(%rsp) vmovaps 0x1c70(%rsp), %xmm0 vmovaps 0x1c60(%rsp), %xmm1 vunpckhps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] vmovaps %xmm0, 0xbd0(%rsp) vmovaps 0xc90(%rsp), %xmm1 vmovaps 0xc80(%rsp), %xmm0 vmovaps %xmm1, 0x1c50(%rsp) vmovaps %xmm0, 0x1c40(%rsp) vmovaps 0x1c50(%rsp), %xmm0 vmovaps 0x1c40(%rsp), %xmm1 vunpckhps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] vmovaps %xmm0, 0xbf0(%rsp) vmovaps 0xbc0(%rsp), %xmm1 vmovaps 0xbe0(%rsp), %xmm0 vmovaps %xmm1, 0x1df0(%rsp) vmovaps %xmm0, 0x1de0(%rsp) vmovapd 0x1df0(%rsp), %xmm0 vmovapd 0x1de0(%rsp), %xmm1 vunpcklpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0] vmovapd %xmm0, 0xcb0(%rsp) vmovaps 0xbe0(%rsp), %xmm1 vmovaps 0xbc0(%rsp), %xmm0 vmovaps %xmm1, 0x1f70(%rsp) vmovaps %xmm0, 0x1f60(%rsp) vmovapd 0x1f70(%rsp), %xmm1 vmovapd 0x1f60(%rsp), %xmm0 vunpckhpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[1],xmm1[1] vmovapd %xmm0, 0xca0(%rsp) vmovaps 0xbd0(%rsp), %xmm1 vmovaps 0xbf0(%rsp), %xmm0 vmovaps %xmm1, 0x1dd0(%rsp) vmovaps %xmm0, 0x1dc0(%rsp) vmovapd 0x1dd0(%rsp), %xmm0 vmovapd 0x1dc0(%rsp), %xmm1 vunpcklpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0] vmovapd %xmm0, 0xc90(%rsp) vmovaps 0xbf0(%rsp), %xmm1 vmovaps 0xbd0(%rsp), %xmm0 vmovaps %xmm1, 0x1f50(%rsp) vmovaps %xmm0, 0x1f40(%rsp) vmovapd 0x1f50(%rsp), %xmm1 vmovapd 0x1f40(%rsp), %xmm0 vunpckhpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[1],xmm1[1] vmovaps %xmm0, 0xc80(%rsp) jmp 0x61c477 movq 0xd50(%rsp), %rax vmovaps 0xd30(%rsp), %xmm0 movq %rax, 0x18b8(%rsp) vmovaps %xmm0, 0x18a0(%rsp) vmovaps 0x18a0(%rsp), %xmm0 movq 0x18b8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xd50(%rsp), %rax addq $0x10, %rax vmovaps 0xcf0(%rsp), %xmm0 movq %rax, 0x1898(%rsp) vmovaps %xmm0, 0x1880(%rsp) vmovaps 0x1880(%rsp), %xmm0 movq 0x1898(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xd50(%rsp), %rax addq $0x20, %rax vmovaps 0xcb0(%rsp), %xmm0 movq %rax, 0x1878(%rsp) vmovaps %xmm0, 0x1860(%rsp) vmovaps 0x1860(%rsp), %xmm0 movq 0x1878(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xd50(%rsp), %rax addq $0x30, %rax vmovaps 0xd20(%rsp), %xmm0 movq %rax, 0x1858(%rsp) vmovaps %xmm0, 0x1840(%rsp) vmovaps 0x1840(%rsp), %xmm0 movq 0x1858(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xd50(%rsp), %rax addq $0x40, %rax vmovaps 0xce0(%rsp), %xmm0 movq %rax, 0x1838(%rsp) vmovaps %xmm0, 0x1820(%rsp) vmovaps 0x1820(%rsp), %xmm0 movq 0x1838(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xd50(%rsp), %rax addq $0x50, %rax vmovaps 0xca0(%rsp), %xmm0 movq %rax, 0x1818(%rsp) vmovaps %xmm0, 0x1800(%rsp) vmovaps 0x1800(%rsp), %xmm0 movq 0x1818(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xd50(%rsp), %rax addq $0x60, %rax vmovaps 0xd10(%rsp), %xmm0 movq %rax, 0x17f8(%rsp) vmovaps %xmm0, 0x17e0(%rsp) vmovaps 0x17e0(%rsp), %xmm0 movq 0x17f8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xd50(%rsp), %rax addq $0x70, %rax vmovaps 0xcd0(%rsp), %xmm0 movq %rax, 0x17d8(%rsp) vmovaps %xmm0, 0x17c0(%rsp) vmovaps 0x17c0(%rsp), %xmm0 movq 0x17d8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xd50(%rsp), %rax addq $0x80, %rax vmovaps 0xc90(%rsp), %xmm0 movq %rax, 0x17b8(%rsp) vmovaps %xmm0, 0x17a0(%rsp) vmovaps 0x17a0(%rsp), %xmm0 movq 0x17b8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xd50(%rsp), %rax addq $0x90, %rax vmovaps 0xd00(%rsp), %xmm0 movq %rax, 0x1798(%rsp) vmovaps %xmm0, 0x1780(%rsp) vmovaps 0x1780(%rsp), %xmm0 movq 0x1798(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xd50(%rsp), %rax addq $0xa0, %rax vmovaps 0xcc0(%rsp), %xmm0 movq %rax, 0x1778(%rsp) vmovaps %xmm0, 0x1760(%rsp) vmovaps 0x1760(%rsp), %xmm0 movq 0x1778(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xd50(%rsp), %rax addq $0xb0, %rax vmovaps 0xc80(%rsp), %xmm0 movq %rax, 0x1758(%rsp) vmovaps %xmm0, 0x1740(%rsp) vmovaps 0x1740(%rsp), %xmm0 movq 0x1758(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xe30(%rsp), %rax movq 0x40(%rax), %rax shlq $0x2, %rax shlq $0x2, %rax addq 0xd48(%rsp), %rax movq %rax, 0xd48(%rsp) movq 0xd50(%rsp), %rax addq $0xc0, %rax movq %rax, 0xd50(%rsp) movl 0xd44(%rsp), %eax addl $0x1, %eax movl %eax, 0xd44(%rsp) jmp 0x61bc21 jmp 0x61c791 movl 0xd5c(%rsp), %eax addl $0xc, %eax movl %eax, 0xd5c(%rsp) jmp 0x61bb48 jmp 0x61c7a9 movl 0xd5c(%rsp), %eax addl $0x7, %eax cmpl 0xe0c(%rsp), %eax jge 0x61d077 movl 0xd5c(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %eax, 0x354(%rsp) movl 0xd5c(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x8, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x354(%rsp), %eax addl %ecx, %eax leaq 0xd60(%rsp), %rcx movq %rcx, 0x11e8(%rsp) movl %eax, 0x11e4(%rsp) movq 0x11e8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x11e4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0xbb8(%rsp) movq 0xe30(%rsp), %rax movq %rax, 0x1118(%rsp) movq 0x1118(%rsp), %rax movq (%rax), %rax movq %rax, 0x358(%rsp) movq 0x358(%rsp), %rax movq %rax, 0xbb0(%rsp) movl 0xda8(%rsp), %ecx imull 0xe0c(%rsp), %ecx addl 0xd5c(%rsp), %ecx shll $0x2, %ecx movq 0xbb0(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xbb0(%rsp) movl $0x0, 0xbac(%rsp) movl 0xbac(%rsp), %eax cmpl 0xe04(%rsp), %eax jge 0x61d05f movq 0xbb0(%rsp), %rax movq %rax, 0x1958(%rsp) movq 0x1958(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xb90(%rsp) movq 0xbb0(%rsp), %rax addq $0x10, %rax movq %rax, 0x1950(%rsp) movq 0x1950(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xb80(%rsp) movq 0xbb0(%rsp), %rax addq $0x20, %rax movq %rax, 0x1948(%rsp) movq 0x1948(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xb70(%rsp) movq 0xbb0(%rsp), %rax addq $0x30, %rax movq %rax, 0x1940(%rsp) movq 0x1940(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xb60(%rsp) movq 0xbb0(%rsp), %rax addq $0x40, %rax movq %rax, 0x1938(%rsp) movq 0x1938(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xb50(%rsp) movq 0xbb0(%rsp), %rax addq $0x50, %rax movq %rax, 0x1930(%rsp) movq 0x1930(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xb40(%rsp) movq 0xbb0(%rsp), %rax addq $0x60, %rax movq %rax, 0x1928(%rsp) movq 0x1928(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xb30(%rsp) movq 0xbb0(%rsp), %rax addq $0x70, %rax movq %rax, 0x1920(%rsp) movq 0x1920(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xb20(%rsp) vmovaps 0xb90(%rsp), %xmm1 vmovaps 0xb80(%rsp), %xmm0 vmovaps %xmm1, 0x1a90(%rsp) vmovaps %xmm0, 0x1a80(%rsp) vmovaps 0x1a90(%rsp), %xmm0 vmovaps 0x1a80(%rsp), %xmm1 vunpcklps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] vmovaps %xmm0, 0xae0(%rsp) vmovaps 0xb70(%rsp), %xmm1 vmovaps 0xb60(%rsp), %xmm0 vmovaps %xmm1, 0x1a70(%rsp) vmovaps %xmm0, 0x1a60(%rsp) vmovaps 0x1a70(%rsp), %xmm0 vmovaps 0x1a60(%rsp), %xmm1 vunpcklps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] vmovaps %xmm0, 0xb00(%rsp) vmovaps 0xb90(%rsp), %xmm1 vmovaps 0xb80(%rsp), %xmm0 vmovaps %xmm1, 0x1c30(%rsp) vmovaps %xmm0, 0x1c20(%rsp) vmovaps 0x1c30(%rsp), %xmm0 vmovaps 0x1c20(%rsp), %xmm1 vunpckhps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] vmovaps %xmm0, 0xaf0(%rsp) vmovaps 0xb70(%rsp), %xmm1 vmovaps 0xb60(%rsp), %xmm0 vmovaps %xmm1, 0x1c10(%rsp) vmovaps %xmm0, 0x1c00(%rsp) vmovaps 0x1c10(%rsp), %xmm0 vmovaps 0x1c00(%rsp), %xmm1 vunpckhps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] vmovaps %xmm0, 0xb10(%rsp) vmovaps 0xae0(%rsp), %xmm1 vmovaps 0xb00(%rsp), %xmm0 vmovaps %xmm1, 0x1db0(%rsp) vmovaps %xmm0, 0x1da0(%rsp) vmovapd 0x1db0(%rsp), %xmm0 vmovapd 0x1da0(%rsp), %xmm1 vunpcklpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0] vmovapd %xmm0, 0xb90(%rsp) vmovaps 0xb00(%rsp), %xmm1 vmovaps 0xae0(%rsp), %xmm0 vmovaps %xmm1, 0x1f30(%rsp) vmovaps %xmm0, 0x1f20(%rsp) vmovapd 0x1f30(%rsp), %xmm1 vmovapd 0x1f20(%rsp), %xmm0 vunpckhpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[1],xmm1[1] vmovapd %xmm0, 0xb80(%rsp) vmovaps 0xaf0(%rsp), %xmm1 vmovaps 0xb10(%rsp), %xmm0 vmovaps %xmm1, 0x1d90(%rsp) vmovaps %xmm0, 0x1d80(%rsp) vmovapd 0x1d90(%rsp), %xmm0 vmovapd 0x1d80(%rsp), %xmm1 vunpcklpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0] vmovapd %xmm0, 0xb70(%rsp) vmovaps 0xb10(%rsp), %xmm1 vmovaps 0xaf0(%rsp), %xmm0 vmovaps %xmm1, 0x1f10(%rsp) vmovaps %xmm0, 0x1f00(%rsp) vmovapd 0x1f10(%rsp), %xmm1 vmovapd 0x1f00(%rsp), %xmm0 vunpckhpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[1],xmm1[1] vmovaps %xmm0, 0xb60(%rsp) jmp 0x61cc1f jmp 0x61cc21 vmovaps 0xb50(%rsp), %xmm1 vmovaps 0xb40(%rsp), %xmm0 vmovaps %xmm1, 0x1a50(%rsp) vmovaps %xmm0, 0x1a40(%rsp) vmovaps 0x1a50(%rsp), %xmm0 vmovaps 0x1a40(%rsp), %xmm1 vunpcklps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] vmovaps %xmm0, 0xaa0(%rsp) vmovaps 0xb30(%rsp), %xmm1 vmovaps 0xb20(%rsp), %xmm0 vmovaps %xmm1, 0x1a30(%rsp) vmovaps %xmm0, 0x1a20(%rsp) vmovaps 0x1a30(%rsp), %xmm0 vmovaps 0x1a20(%rsp), %xmm1 vunpcklps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] vmovaps %xmm0, 0xac0(%rsp) vmovaps 0xb50(%rsp), %xmm1 vmovaps 0xb40(%rsp), %xmm0 vmovaps %xmm1, 0x1bf0(%rsp) vmovaps %xmm0, 0x1be0(%rsp) vmovaps 0x1bf0(%rsp), %xmm0 vmovaps 0x1be0(%rsp), %xmm1 vunpckhps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] vmovaps %xmm0, 0xab0(%rsp) vmovaps 0xb30(%rsp), %xmm1 vmovaps 0xb20(%rsp), %xmm0 vmovaps %xmm1, 0x1bd0(%rsp) vmovaps %xmm0, 0x1bc0(%rsp) vmovaps 0x1bd0(%rsp), %xmm0 vmovaps 0x1bc0(%rsp), %xmm1 vunpckhps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] vmovaps %xmm0, 0xad0(%rsp) vmovaps 0xaa0(%rsp), %xmm1 vmovaps 0xac0(%rsp), %xmm0 vmovaps %xmm1, 0x1d70(%rsp) vmovaps %xmm0, 0x1d60(%rsp) vmovapd 0x1d70(%rsp), %xmm0 vmovapd 0x1d60(%rsp), %xmm1 vunpcklpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0] vmovapd %xmm0, 0xb50(%rsp) vmovaps 0xac0(%rsp), %xmm1 vmovaps 0xaa0(%rsp), %xmm0 vmovaps %xmm1, 0x1ef0(%rsp) vmovaps %xmm0, 0x1ee0(%rsp) vmovapd 0x1ef0(%rsp), %xmm1 vmovapd 0x1ee0(%rsp), %xmm0 vunpckhpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[1],xmm1[1] vmovapd %xmm0, 0xb40(%rsp) vmovaps 0xab0(%rsp), %xmm1 vmovaps 0xad0(%rsp), %xmm0 vmovaps %xmm1, 0x1d50(%rsp) vmovaps %xmm0, 0x1d40(%rsp) vmovapd 0x1d50(%rsp), %xmm0 vmovapd 0x1d40(%rsp), %xmm1 vunpcklpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0] vmovapd %xmm0, 0xb30(%rsp) vmovaps 0xad0(%rsp), %xmm1 vmovaps 0xab0(%rsp), %xmm0 vmovaps %xmm1, 0x1ed0(%rsp) vmovaps %xmm0, 0x1ec0(%rsp) vmovapd 0x1ed0(%rsp), %xmm1 vmovapd 0x1ec0(%rsp), %xmm0 vunpckhpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[1],xmm1[1] vmovaps %xmm0, 0xb20(%rsp) jmp 0x61ce3b movq 0xbb8(%rsp), %rax vmovaps 0xb90(%rsp), %xmm0 movq %rax, 0x1738(%rsp) vmovaps %xmm0, 0x1720(%rsp) vmovaps 0x1720(%rsp), %xmm0 movq 0x1738(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xbb8(%rsp), %rax addq $0x10, %rax vmovaps 0xb50(%rsp), %xmm0 movq %rax, 0x1718(%rsp) vmovaps %xmm0, 0x1700(%rsp) vmovaps 0x1700(%rsp), %xmm0 movq 0x1718(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xbb8(%rsp), %rax addq $0x20, %rax vmovaps 0xb80(%rsp), %xmm0 movq %rax, 0x16f8(%rsp) vmovaps %xmm0, 0x16e0(%rsp) vmovaps 0x16e0(%rsp), %xmm0 movq 0x16f8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xbb8(%rsp), %rax addq $0x30, %rax vmovaps 0xb40(%rsp), %xmm0 movq %rax, 0x16d8(%rsp) vmovaps %xmm0, 0x16c0(%rsp) vmovaps 0x16c0(%rsp), %xmm0 movq 0x16d8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xbb8(%rsp), %rax addq $0x40, %rax vmovaps 0xb70(%rsp), %xmm0 movq %rax, 0x16b8(%rsp) vmovaps %xmm0, 0x16a0(%rsp) vmovaps 0x16a0(%rsp), %xmm0 movq 0x16b8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xbb8(%rsp), %rax addq $0x50, %rax vmovaps 0xb30(%rsp), %xmm0 movq %rax, 0x1698(%rsp) vmovaps %xmm0, 0x1680(%rsp) vmovaps 0x1680(%rsp), %xmm0 movq 0x1698(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xbb8(%rsp), %rax addq $0x60, %rax vmovaps 0xb60(%rsp), %xmm0 movq %rax, 0x1678(%rsp) vmovaps %xmm0, 0x1660(%rsp) vmovaps 0x1660(%rsp), %xmm0 movq 0x1678(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xbb8(%rsp), %rax addq $0x70, %rax vmovaps 0xb20(%rsp), %xmm0 movq %rax, 0x1658(%rsp) vmovaps %xmm0, 0x1640(%rsp) vmovaps 0x1640(%rsp), %xmm0 movq 0x1658(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xe30(%rsp), %rax movq 0x40(%rax), %rax shlq $0x2, %rax shlq $0x2, %rax addq 0xbb0(%rsp), %rax movq %rax, 0xbb0(%rsp) movq 0xbb8(%rsp), %rax addq $0x80, %rax movq %rax, 0xbb8(%rsp) movl 0xbac(%rsp), %eax addl $0x1, %eax movl %eax, 0xbac(%rsp) jmp 0x61c8ad jmp 0x61d061 movl 0xd5c(%rsp), %eax addl $0x8, %eax movl %eax, 0xd5c(%rsp) jmp 0x61c7a9 jmp 0x61d079 movl 0xd5c(%rsp), %eax addl $0x3, %eax cmpl 0xe0c(%rsp), %eax jge 0x61d5ce movl 0xd5c(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %eax, 0x340(%rsp) movl 0xd5c(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x8, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x340(%rsp), %eax addl %ecx, %eax movl %eax, 0x344(%rsp) movl 0xd5c(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x8, %ecx cltd idivl %ecx movl %edx, %eax movl $0x4, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x344(%rsp), %eax addl %ecx, %eax leaq 0xd60(%rsp), %rcx movq %rcx, 0x11d8(%rsp) movl %eax, 0x11d4(%rsp) movq 0x11d8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x11d4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0xa98(%rsp) movq 0xe30(%rsp), %rax movq %rax, 0x1110(%rsp) movq 0x1110(%rsp), %rax movq (%rax), %rax movq %rax, 0x348(%rsp) movq 0x348(%rsp), %rax movq %rax, 0xa90(%rsp) movl 0xda8(%rsp), %ecx imull 0xe0c(%rsp), %ecx addl 0xd5c(%rsp), %ecx shll $0x2, %ecx movq 0xa90(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xa90(%rsp) movl $0x0, 0xa8c(%rsp) movl 0xa8c(%rsp), %eax cmpl 0xe04(%rsp), %eax jge 0x61d5b6 movq 0xa90(%rsp), %rax movq %rax, 0x1918(%rsp) movq 0x1918(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xa70(%rsp) movq 0xa90(%rsp), %rax addq $0x10, %rax movq %rax, 0x1910(%rsp) movq 0x1910(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xa60(%rsp) movq 0xa90(%rsp), %rax addq $0x20, %rax movq %rax, 0x1908(%rsp) movq 0x1908(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xa50(%rsp) movq 0xa90(%rsp), %rax addq $0x30, %rax movq %rax, 0x1900(%rsp) movq 0x1900(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xa40(%rsp) vmovaps 0xa70(%rsp), %xmm1 vmovaps 0xa60(%rsp), %xmm0 vmovaps %xmm1, 0x1a10(%rsp) vmovaps %xmm0, 0x1a00(%rsp) vmovaps 0x1a10(%rsp), %xmm0 vmovaps 0x1a00(%rsp), %xmm1 vunpcklps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] vmovaps %xmm0, 0xa00(%rsp) vmovaps 0xa50(%rsp), %xmm1 vmovaps 0xa40(%rsp), %xmm0 vmovaps %xmm1, 0x19f0(%rsp) vmovaps %xmm0, 0x19e0(%rsp) vmovaps 0x19f0(%rsp), %xmm0 vmovaps 0x19e0(%rsp), %xmm1 vunpcklps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] vmovaps %xmm0, 0xa20(%rsp) vmovaps 0xa70(%rsp), %xmm1 vmovaps 0xa60(%rsp), %xmm0 vmovaps %xmm1, 0x1bb0(%rsp) vmovaps %xmm0, 0x1ba0(%rsp) vmovaps 0x1bb0(%rsp), %xmm0 vmovaps 0x1ba0(%rsp), %xmm1 vunpckhps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] vmovaps %xmm0, 0xa10(%rsp) vmovaps 0xa50(%rsp), %xmm1 vmovaps 0xa40(%rsp), %xmm0 vmovaps %xmm1, 0x1b90(%rsp) vmovaps %xmm0, 0x1b80(%rsp) vmovaps 0x1b90(%rsp), %xmm0 vmovaps 0x1b80(%rsp), %xmm1 vunpckhps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] vmovaps %xmm0, 0xa30(%rsp) vmovaps 0xa00(%rsp), %xmm1 vmovaps 0xa20(%rsp), %xmm0 vmovaps %xmm1, 0x1d30(%rsp) vmovaps %xmm0, 0x1d20(%rsp) vmovapd 0x1d30(%rsp), %xmm0 vmovapd 0x1d20(%rsp), %xmm1 vunpcklpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0] vmovapd %xmm0, 0xa70(%rsp) vmovaps 0xa20(%rsp), %xmm1 vmovaps 0xa00(%rsp), %xmm0 vmovaps %xmm1, 0x1eb0(%rsp) vmovaps %xmm0, 0x1ea0(%rsp) vmovapd 0x1eb0(%rsp), %xmm1 vmovapd 0x1ea0(%rsp), %xmm0 vunpckhpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[1],xmm1[1] vmovapd %xmm0, 0xa60(%rsp) vmovaps 0xa10(%rsp), %xmm1 vmovaps 0xa30(%rsp), %xmm0 vmovaps %xmm1, 0x1d10(%rsp) vmovaps %xmm0, 0x1d00(%rsp) vmovapd 0x1d10(%rsp), %xmm0 vmovapd 0x1d00(%rsp), %xmm1 vunpcklpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0] vmovapd %xmm0, 0xa50(%rsp) vmovaps 0xa30(%rsp), %xmm1 vmovaps 0xa10(%rsp), %xmm0 vmovaps %xmm1, 0x1e90(%rsp) vmovaps %xmm0, 0x1e80(%rsp) vmovapd 0x1e90(%rsp), %xmm1 vmovapd 0x1e80(%rsp), %xmm0 vunpckhpd %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[1],xmm1[1] vmovaps %xmm0, 0xa40(%rsp) jmp 0x61d480 movq 0xa98(%rsp), %rax vmovaps 0xa70(%rsp), %xmm0 movq %rax, 0x1638(%rsp) vmovaps %xmm0, 0x1620(%rsp) vmovaps 0x1620(%rsp), %xmm0 movq 0x1638(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xa98(%rsp), %rax addq $0x10, %rax vmovaps 0xa60(%rsp), %xmm0 movq %rax, 0x1618(%rsp) vmovaps %xmm0, 0x1600(%rsp) vmovaps 0x1600(%rsp), %xmm0 movq 0x1618(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xa98(%rsp), %rax addq $0x20, %rax vmovaps 0xa50(%rsp), %xmm0 movq %rax, 0x15f8(%rsp) vmovaps %xmm0, 0x15e0(%rsp) vmovaps 0x15e0(%rsp), %xmm0 movq 0x15f8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xa98(%rsp), %rax addq $0x30, %rax vmovaps 0xa40(%rsp), %xmm0 movq %rax, 0x15d8(%rsp) vmovaps %xmm0, 0x15c0(%rsp) vmovaps 0x15c0(%rsp), %xmm0 movq 0x15d8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xe30(%rsp), %rax movq 0x40(%rax), %rax shlq $0x2, %rax shlq $0x2, %rax addq 0xa90(%rsp), %rax movq %rax, 0xa90(%rsp) movq 0xa98(%rsp), %rax addq $0x40, %rax movq %rax, 0xa98(%rsp) movl 0xa8c(%rsp), %eax addl $0x1, %eax movl %eax, 0xa8c(%rsp) jmp 0x61d1b2 jmp 0x61d5b8 movl 0xd5c(%rsp), %eax addl $0x4, %eax movl %eax, 0xd5c(%rsp) jmp 0x61d079 jmp 0x61d5d0 movl 0xd5c(%rsp), %eax addl $0x1, %eax cmpl 0xe0c(%rsp), %eax jge 0x61d8fe movl 0xd5c(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %eax, 0x32c(%rsp) movl 0xd5c(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x8, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x32c(%rsp), %eax addl %ecx, %eax movl %eax, 0x330(%rsp) movl 0xd5c(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x8, %ecx cltd idivl %ecx movl %edx, %eax movl $0x4, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x330(%rsp), %eax addl %ecx, %eax movl %eax, 0x334(%rsp) movl 0xd5c(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x4, %ecx cltd idivl %ecx movl %edx, %eax movl $0x2, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x334(%rsp), %eax addl %ecx, %eax leaq 0xd60(%rsp), %rcx movq %rcx, 0x11c8(%rsp) movl %eax, 0x11c4(%rsp) movq 0x11c8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x11c4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x9f8(%rsp) movq 0xe30(%rsp), %rax movq %rax, 0x1108(%rsp) movq 0x1108(%rsp), %rax movq (%rax), %rax movq %rax, 0x338(%rsp) movq 0x338(%rsp), %rax movq %rax, 0x9f0(%rsp) movl 0xda8(%rsp), %ecx imull 0xe0c(%rsp), %ecx addl 0xd5c(%rsp), %ecx shll $0x2, %ecx movq 0x9f0(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x9f0(%rsp) movl $0x0, 0x9ec(%rsp) movl 0x9ec(%rsp), %eax cmpl 0xe04(%rsp), %eax jge 0x61d8e6 movq 0x9f0(%rsp), %rax movq %rax, 0x18f8(%rsp) movq 0x18f8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x9d0(%rsp) movq 0x9f0(%rsp), %rax addq $0x10, %rax movq %rax, 0x18f0(%rsp) movq 0x18f0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x9c0(%rsp) vmovaps 0x9d0(%rsp), %xmm1 vmovaps 0x9c0(%rsp), %xmm0 vmovaps %xmm1, 0x19d0(%rsp) vmovaps %xmm0, 0x19c0(%rsp) vmovaps 0x19d0(%rsp), %xmm0 vmovaps 0x19c0(%rsp), %xmm1 vunpcklps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] vmovaps %xmm0, 0x9b0(%rsp) vmovaps 0x9d0(%rsp), %xmm1 vmovaps 0x9c0(%rsp), %xmm0 vmovaps %xmm1, 0x1b70(%rsp) vmovaps %xmm0, 0x1b60(%rsp) vmovaps 0x1b70(%rsp), %xmm0 vmovaps 0x1b60(%rsp), %xmm1 vunpckhps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] vmovaps %xmm0, 0x9a0(%rsp) movq 0x9f8(%rsp), %rax vmovaps 0x9b0(%rsp), %xmm0 movq %rax, 0x15b8(%rsp) vmovaps %xmm0, 0x15a0(%rsp) vmovaps 0x15a0(%rsp), %xmm0 movq 0x15b8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x9f8(%rsp), %rax addq $0x10, %rax vmovaps 0x9a0(%rsp), %xmm0 movq %rax, 0x1598(%rsp) vmovaps %xmm0, 0x1580(%rsp) vmovaps 0x1580(%rsp), %xmm0 movq 0x1598(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xe30(%rsp), %rax movq 0x40(%rax), %rax shlq $0x2, %rax shlq $0x2, %rax addq 0x9f0(%rsp), %rax movq %rax, 0x9f0(%rsp) movq 0x9f8(%rsp), %rax addq $0x20, %rax movq %rax, 0x9f8(%rsp) movl 0x9ec(%rsp), %eax addl $0x1, %eax movl %eax, 0x9ec(%rsp) jmp 0x61d73e jmp 0x61d8e8 movl 0xd5c(%rsp), %eax addl $0x2, %eax movl %eax, 0xd5c(%rsp) jmp 0x61d5d0 jmp 0x61d900 movl 0xd5c(%rsp), %eax cmpl 0xe0c(%rsp), %eax jge 0x61db6a movl 0xd5c(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %eax, 0x310(%rsp) movl 0xd5c(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x8, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x310(%rsp), %eax addl %ecx, %eax movl %eax, 0x314(%rsp) movl 0xd5c(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x8, %ecx cltd idivl %ecx movl %edx, %eax movl $0x4, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x314(%rsp), %eax addl %ecx, %eax movl %eax, 0x318(%rsp) movl 0xd5c(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x4, %ecx cltd idivl %ecx movl %edx, %eax movl $0x2, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x318(%rsp), %eax addl %ecx, %eax movl %eax, 0x31c(%rsp) movl 0xd5c(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x2, %ecx cltd idivl %ecx movl 0x31c(%rsp), %eax addl %edx, %eax leaq 0xd60(%rsp), %rcx movq %rcx, 0x11b8(%rsp) movl %eax, 0x11b4(%rsp) movq 0x11b8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x11b4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x998(%rsp) movq 0xe30(%rsp), %rax movq %rax, 0x1100(%rsp) movq 0x1100(%rsp), %rax movq (%rax), %rax movq %rax, 0x320(%rsp) movq 0x320(%rsp), %rax movq %rax, 0x990(%rsp) movl 0xda8(%rsp), %ecx imull 0xe0c(%rsp), %ecx addl 0xd5c(%rsp), %ecx shll $0x2, %ecx movq 0x990(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x990(%rsp) movl $0x0, 0x98c(%rsp) movl 0x98c(%rsp), %eax cmpl 0xe04(%rsp), %eax jge 0x61db52 movq 0x990(%rsp), %rax movq %rax, 0x18e8(%rsp) movq 0x18e8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x970(%rsp) movq 0x998(%rsp), %rax vmovaps 0x970(%rsp), %xmm0 movq %rax, 0x1578(%rsp) vmovaps %xmm0, 0x1560(%rsp) vmovaps 0x1560(%rsp), %xmm0 movq 0x1578(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xe30(%rsp), %rax movq 0x40(%rax), %rax shlq $0x2, %rax shlq $0x2, %rax addq 0x990(%rsp), %rax movq %rax, 0x990(%rsp) movq 0x998(%rsp), %rax addq $0x10, %rax movq %rax, 0x998(%rsp) movl 0x98c(%rsp), %eax addl $0x1, %eax movl %eax, 0x98c(%rsp) jmp 0x61da94 jmp 0x61db54 movl 0xd5c(%rsp), %eax addl $0x1, %eax movl %eax, 0xd5c(%rsp) jmp 0x61d900 leaq 0xd60(%rsp), %rax movq %rax, 0xe48(%rsp) movq 0xe48(%rsp), %rax movq %rax, 0xf90(%rsp) movq 0xf90(%rsp), %rax movq %rax, 0x308(%rsp) cmpq $0x0, 0x8(%rax) je 0x61dc32 movq 0x308(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xf8c(%rsp) # imm = 0xFFFFFFFF movl 0xf8c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xf88(%rsp) cmpl $0x1, 0xf88(%rsp) jne 0x61dc32 movq 0x308(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x61dc03 movq 0x308(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x61dc01 jmp 0x61dc30 movq 0x308(%rsp), %rax movq (%rax), %rax movq %rax, 0x1148(%rsp) cmpq $0x0, 0x1148(%rsp) je 0x61dc2e movq 0x1148(%rsp), %rdi callq 0x5e480 jmp 0x61dc30 jmp 0x61dc32 movq 0x308(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x61dc8d movq %rax, %rdi callq 0x5fc90 jmp 0x61dc8f movl 0xda8(%rsp), %eax addl $0x1, %eax movl %eax, 0xda8(%rsp) jmp 0x61b82a leaq 0x928(%rsp), %rax movq %rax, 0xe40(%rsp) movq 0xe40(%rsp), %rax movq %rax, 0x300(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0xe30(%rsp), %rax movq %rax, 0xeb8(%rsp) leaq 0x928(%rsp), %rax movq %rax, 0xeb0(%rsp) movq 0xeb8(%rsp), %rax movq %rax, 0x2f8(%rsp) cmpq 0xeb0(%rsp), %rax jne 0x61dd65 movq 0x2f8(%rsp), %rax movq %rax, 0xec0(%rsp) jmp 0x61df58 movq 0xeb0(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x61dd9d movq 0xeb0(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0xeac(%rsp) movl 0xeac(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xea8(%rsp) movq 0x2f8(%rsp), %rax movq %rax, 0xed0(%rsp) movq 0xed0(%rsp), %rax movq %rax, 0x2f0(%rsp) cmpq $0x0, 0x8(%rax) je 0x61de55 movq 0x2f0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xecc(%rsp) # imm = 0xFFFFFFFF movl 0xecc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xec8(%rsp) cmpl $0x1, 0xec8(%rsp) jne 0x61de55 movq 0x2f0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x61de26 movq 0x2f0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x61de24 jmp 0x61de53 movq 0x2f0(%rsp), %rax movq (%rax), %rax movq %rax, 0x11a8(%rsp) cmpq $0x0, 0x11a8(%rsp) je 0x61de51 movq 0x11a8(%rsp), %rdi callq 0x5e480 jmp 0x61de53 jmp 0x61de55 movq 0x2f0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x2f8(%rsp), %rax movq 0xeb0(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0xeb0(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0xeb0(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0xeb0(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0xeb0(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0xeb0(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0xeb0(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0xeb0(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0xeb0(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0xeb0(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0xeb0(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0xec0(%rsp) leaq 0x928(%rsp), %rax movq %rax, 0xe58(%rsp) movq 0xe58(%rsp), %rax movq %rax, 0xf70(%rsp) movq 0xf70(%rsp), %rax movq %rax, 0x2e8(%rsp) cmpq $0x0, 0x8(%rax) je 0x61e020 movq 0x2e8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xf6c(%rsp) # imm = 0xFFFFFFFF movl 0xf6c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xf68(%rsp) cmpl $0x1, 0xf68(%rsp) jne 0x61e020 movq 0x2e8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x61dff1 movq 0x2e8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x61dfef jmp 0x61e01e movq 0x2e8(%rsp), %rax movq (%rax), %rax movq %rax, 0x1158(%rsp) cmpq $0x0, 0x1158(%rsp) je 0x61e01c movq 0x1158(%rsp), %rdi callq 0x5e480 jmp 0x61e01e jmp 0x61e020 movq 0x2e8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x61e07b movq %rax, %rdi callq 0x5fc90 movq 0xe18(%rsp), %rdi movl 0xe0c(%rsp), %esi movl 0xe08(%rsp), %edx movl 0xe2c(%rsp), %ecx movq 0xe10(%rsp), %rax movq 0x10(%rax), %r8 movq %rsp, %rax movq %r8, (%rax) movl $0x10, %r8d movl $0x4, %r9d callq 0x62060 jmp 0x61e0bd movl $0x0, 0x924(%rsp) movl 0x924(%rsp), %eax cmpl 0xe2c(%rsp), %eax jge 0x621b4d movq 0xe18(%rsp), %rcx movl 0x924(%rsp), %eax leaq 0x8d0(%rsp), %rdx movq %rdx, 0xfd8(%rsp) movq %rcx, 0xfd0(%rsp) movl %eax, 0xfcc(%rsp) movq 0xfd0(%rsp), %rax movq %rax, 0x2d8(%rsp) movb $0x0, 0xfcb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xfcc(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x8d0(%rsp), %r10 movq %r10, 0x2fd8(%rsp) movl %r9d, 0x2fd4(%rsp) movl %r8d, 0x2fd0(%rsp) movl %edi, 0x2fcc(%rsp) movq %rsi, 0x2fc0(%rsp) movq %rdx, 0x2fb8(%rsp) movl %ecx, 0x2fb4(%rsp) movq %rax, 0x2fa8(%rsp) movq 0x2fd8(%rsp), %rcx movq %rcx, 0x2e0(%rsp) movq 0x2fc0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2fb8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2fb4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2fa8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x2fd4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x2fd0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2fcc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x3030(%rsp) movl $0x10, 0x302c(%rsp) movq 0x3030(%rsp), %rax movslq 0x302c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x302c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x2e0(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x2d8(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x8f8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x61e2a0 movq 0x2d8(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x910(%rsp) movb $0x1, 0xfcb(%rsp) testb $0x1, 0xfcb(%rsp) jne 0x61e3d9 leaq 0x8d0(%rsp), %rax movq %rax, 0xfe0(%rsp) movq 0xfe0(%rsp), %rax movq %rax, 0xff0(%rsp) movq 0xff0(%rsp), %rax movq %rax, 0x2d0(%rsp) cmpq $0x0, 0x8(%rax) je 0x61e37e movq 0x2d0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xfec(%rsp) # imm = 0xFFFFFFFF movl 0xfec(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xfe8(%rsp) cmpl $0x1, 0xfe8(%rsp) jne 0x61e37e movq 0x2d0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x61e34f movq 0x2d0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x61e34d jmp 0x61e37c movq 0x2d0(%rsp), %rax movq (%rax), %rax movq %rax, 0x1138(%rsp) cmpq $0x0, 0x1138(%rsp) je 0x61e37a movq 0x1138(%rsp), %rdi callq 0x5e480 jmp 0x61e37c jmp 0x61e37e movq 0x2d0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x61e3d9 movq %rax, %rdi callq 0x5fc90 jmp 0x61e3db leaq 0x8d0(%rsp), %rax movq %rax, 0x1028(%rsp) movq 0x1028(%rsp), %rax movq (%rax), %rax movq %rax, 0x2c8(%rsp) leaq 0x8d0(%rsp), %rax movq %rax, 0xe68(%rsp) movq 0xe68(%rsp), %rax movq %rax, 0xf50(%rsp) movq 0xf50(%rsp), %rax movq %rax, 0x2c0(%rsp) cmpq $0x0, 0x8(%rax) je 0x61e4c6 movq 0x2c0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xf4c(%rsp) # imm = 0xFFFFFFFF movl 0xf4c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xf48(%rsp) cmpl $0x1, 0xf48(%rsp) jne 0x61e4c6 movq 0x2c0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x61e497 movq 0x2c0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x61e495 jmp 0x61e4c4 movq 0x2c0(%rsp), %rax movq (%rax), %rax movq %rax, 0x1168(%rsp) cmpq $0x0, 0x1168(%rsp) je 0x61e4c2 movq 0x1168(%rsp), %rdi callq 0x5e480 jmp 0x61e4c4 jmp 0x61e4c6 movq 0x2c0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x61e521 movq %rax, %rdi callq 0x5fc90 movq 0x2c8(%rsp), %rax movq %rax, 0x918(%rsp) movq 0xe20(%rsp), %rcx movl 0x924(%rsp), %eax leaq 0x888(%rsp), %rdx movq %rdx, 0x10e0(%rsp) movq %rcx, 0x10d8(%rsp) movl %eax, 0x10d4(%rsp) movq 0x10d8(%rsp), %rax movq %rax, 0x2b0(%rsp) movb $0x0, 0x10d3(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x10d4(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x888(%rsp), %r10 movq %r10, 0x2f68(%rsp) movl %r9d, 0x2f64(%rsp) movl %r8d, 0x2f60(%rsp) movl %edi, 0x2f5c(%rsp) movq %rsi, 0x2f50(%rsp) movq %rdx, 0x2f48(%rsp) movl %ecx, 0x2f44(%rsp) movq %rax, 0x2f38(%rsp) movq 0x2f68(%rsp), %rcx movq %rcx, 0x2b8(%rsp) movq 0x2f50(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2f48(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2f44(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2f38(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x2f64(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x2f60(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2f5c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x3050(%rsp) movl $0x10, 0x304c(%rsp) movq 0x3050(%rsp), %rax movslq 0x304c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x304c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x2b8(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x2b0(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x8b0(%rsp) cmpl $0x4, 0x28(%rax) jne 0x61e6f5 movq 0x2b0(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x8c8(%rsp) movb $0x1, 0x10d3(%rsp) testb $0x1, 0x10d3(%rsp) jne 0x61e82e leaq 0x888(%rsp), %rax movq %rax, 0x10e8(%rsp) movq 0x10e8(%rsp), %rax movq %rax, 0x10f8(%rsp) movq 0x10f8(%rsp), %rax movq %rax, 0x2a8(%rsp) cmpq $0x0, 0x8(%rax) je 0x61e7d3 movq 0x2a8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x10f4(%rsp) # imm = 0xFFFFFFFF movl 0x10f4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x10f0(%rsp) cmpl $0x1, 0x10f0(%rsp) jne 0x61e7d3 movq 0x2a8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x61e7a4 movq 0x2a8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x61e7a2 jmp 0x61e7d1 movq 0x2a8(%rsp), %rax movq (%rax), %rax movq %rax, 0x1128(%rsp) cmpq $0x0, 0x1128(%rsp) je 0x61e7cf movq 0x1128(%rsp), %rdi callq 0x5e480 jmp 0x61e7d1 jmp 0x61e7d3 movq 0x2a8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x61e82e movq %rax, %rdi callq 0x5fc90 jmp 0x61e830 movl $0x0, 0x884(%rsp) movl 0x884(%rsp), %eax cmpl 0xe08(%rsp), %eax jge 0x621a24 movl 0x884(%rsp), %eax leaq 0x838(%rsp), %rcx movq %rcx, 0xfa8(%rsp) leaq 0xdb8(%rsp), %rcx movq %rcx, 0xfa0(%rsp) movl %eax, 0xf9c(%rsp) movq 0xfa0(%rsp), %rax movq %rax, 0x298(%rsp) movb $0x0, 0xf9b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xf9c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x838(%rsp), %r10 movq %r10, 0x3010(%rsp) movl %r9d, 0x300c(%rsp) movl %r8d, 0x3008(%rsp) movl %edi, 0x3004(%rsp) movq %rsi, 0x2ff8(%rsp) movq %rdx, 0x2ff0(%rsp) movl %ecx, 0x2fec(%rsp) movq %rax, 0x2fe0(%rsp) movq 0x3010(%rsp), %rcx movq %rcx, 0x2a0(%rsp) movq 0x2ff8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2ff0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2fec(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2fe0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x300c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3008(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3004(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x3020(%rsp) movl $0x10, 0x301c(%rsp) movq 0x3020(%rsp), %rax movslq 0x301c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x301c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x2a0(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x298(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x860(%rsp) cmpl $0x4, 0x28(%rax) jne 0x61ea13 movq 0x298(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x878(%rsp) movb $0x1, 0xf9b(%rsp) testb $0x1, 0xf9b(%rsp) jne 0x61eb4c leaq 0x838(%rsp), %rax movq %rax, 0xfb0(%rsp) movq 0xfb0(%rsp), %rax movq %rax, 0xfc0(%rsp) movq 0xfc0(%rsp), %rax movq %rax, 0x290(%rsp) cmpq $0x0, 0x8(%rax) je 0x61eaf1 movq 0x290(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xfbc(%rsp) # imm = 0xFFFFFFFF movl 0xfbc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xfb8(%rsp) cmpl $0x1, 0xfb8(%rsp) jne 0x61eaf1 movq 0x290(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x61eac2 movq 0x290(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x61eac0 jmp 0x61eaef movq 0x290(%rsp), %rax movq (%rax), %rax movq %rax, 0x1140(%rsp) cmpq $0x0, 0x1140(%rsp) je 0x61eaed movq 0x1140(%rsp), %rdi callq 0x5e480 jmp 0x61eaef jmp 0x61eaf1 movq 0x290(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x61eb4c movq %rax, %rdi callq 0x5fc90 jmp 0x61eb4e movl $0x0, 0x834(%rsp) movl 0x834(%rsp), %eax addl $0xb, %eax cmpl 0xe0c(%rsp), %eax jge 0x61fdb6 movl 0x834(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx leaq 0x838(%rsp), %rcx movq %rcx, 0x10c8(%rsp) movl %eax, 0x10c4(%rsp) movq 0x10c8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x10c4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x288(%rsp) movq 0x288(%rsp), %rax movq %rax, 0x828(%rsp) movl 0x884(%rsp), %eax leaq 0x888(%rsp), %rcx movq %rcx, 0x10b8(%rsp) movl %eax, 0x10b4(%rsp) movq 0x10b8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x10b4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x280(%rsp) movq 0x280(%rsp), %rax movq %rax, 0x820(%rsp) movl 0xe04(%rsp), %eax shll $0x2, %eax movl %eax, 0x81c(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x270(%rsp) vmovaps %xmm0, 0x2f20(%rsp) vmovaps 0x2f20(%rsp), %xmm1 vmovaps %xmm1, 0x800(%rsp) vmovaps %xmm0, 0x2f10(%rsp) vmovaps 0x2f10(%rsp), %xmm1 vmovaps %xmm1, 0x7f0(%rsp) vmovaps %xmm0, 0x2f00(%rsp) vmovaps 0x2f00(%rsp), %xmm1 vmovaps %xmm1, 0x7e0(%rsp) vmovaps %xmm0, 0x2ef0(%rsp) vmovaps 0x2ef0(%rsp), %xmm1 vmovaps %xmm1, 0x7d0(%rsp) vmovaps %xmm0, 0x2ee0(%rsp) vmovaps 0x2ee0(%rsp), %xmm1 vmovaps %xmm1, 0x7c0(%rsp) vmovaps %xmm0, 0x2ed0(%rsp) vmovaps 0x2ed0(%rsp), %xmm1 vmovaps %xmm1, 0x7b0(%rsp) vmovaps %xmm0, 0x2ec0(%rsp) vmovaps 0x2ec0(%rsp), %xmm1 vmovaps %xmm1, 0x7a0(%rsp) vmovaps %xmm0, 0x2eb0(%rsp) vmovaps 0x2eb0(%rsp), %xmm1 vmovaps %xmm1, 0x790(%rsp) vmovaps %xmm0, 0x2ea0(%rsp) vmovaps 0x2ea0(%rsp), %xmm1 vmovaps %xmm1, 0x780(%rsp) vmovaps %xmm0, 0x2e90(%rsp) vmovaps 0x2e90(%rsp), %xmm1 vmovaps %xmm1, 0x770(%rsp) vmovaps %xmm0, 0x2e80(%rsp) vmovaps 0x2e80(%rsp), %xmm1 vmovaps %xmm1, 0x760(%rsp) vmovaps %xmm0, 0x2e70(%rsp) vmovaps 0x2e70(%rsp), %xmm0 vmovaps %xmm0, 0x750(%rsp) movl $0x0, 0x74c(%rsp) movl 0x74c(%rsp), %eax cmpl 0x81c(%rsp), %eax jge 0x61fac2 movq 0x820(%rsp), %rax movq %rax, 0x18e0(%rsp) movq 0x18e0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x730(%rsp) movq 0x828(%rsp), %rax movq %rax, 0x2358(%rsp) movq 0x2358(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2354(%rsp) vbroadcastss 0x2354(%rsp), %xmm0 vmovaps %xmm0, 0x2340(%rsp) vmovaps 0x2340(%rsp), %xmm0 vmovaps %xmm0, 0x720(%rsp) movq 0x828(%rsp), %rax addq $0x4, %rax movq %rax, 0x2338(%rsp) movq 0x2338(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2334(%rsp) vbroadcastss 0x2334(%rsp), %xmm0 vmovaps %xmm0, 0x2320(%rsp) vmovaps 0x2320(%rsp), %xmm0 vmovaps %xmm0, 0x710(%rsp) movq 0x828(%rsp), %rax addq $0x8, %rax movq %rax, 0x2318(%rsp) movq 0x2318(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2314(%rsp) vbroadcastss 0x2314(%rsp), %xmm0 vmovaps %xmm0, 0x2300(%rsp) vmovaps 0x2300(%rsp), %xmm0 vmovaps %xmm0, 0x700(%rsp) movq 0x828(%rsp), %rax addq $0xc, %rax movq %rax, 0x22f8(%rsp) movq 0x22f8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x22f4(%rsp) vbroadcastss 0x22f4(%rsp), %xmm0 vmovaps %xmm0, 0x22e0(%rsp) vmovaps 0x22e0(%rsp), %xmm0 vmovaps %xmm0, 0x6f0(%rsp) movq 0x828(%rsp), %rax addq $0x10, %rax movq %rax, 0x22d8(%rsp) movq 0x22d8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x22d4(%rsp) vbroadcastss 0x22d4(%rsp), %xmm0 vmovaps %xmm0, 0x22c0(%rsp) vmovaps 0x22c0(%rsp), %xmm0 vmovaps %xmm0, 0x6e0(%rsp) movq 0x828(%rsp), %rax addq $0x14, %rax movq %rax, 0x22b8(%rsp) movq 0x22b8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x22b4(%rsp) vbroadcastss 0x22b4(%rsp), %xmm0 vmovaps %xmm0, 0x22a0(%rsp) vmovaps 0x22a0(%rsp), %xmm0 vmovaps %xmm0, 0x6d0(%rsp) movq 0x828(%rsp), %rax addq $0x18, %rax movq %rax, 0x2298(%rsp) movq 0x2298(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2294(%rsp) vbroadcastss 0x2294(%rsp), %xmm0 vmovaps %xmm0, 0x2280(%rsp) vmovaps 0x2280(%rsp), %xmm0 vmovaps %xmm0, 0x6c0(%rsp) movq 0x828(%rsp), %rax addq $0x1c, %rax movq %rax, 0x2278(%rsp) movq 0x2278(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2274(%rsp) vbroadcastss 0x2274(%rsp), %xmm0 vmovaps %xmm0, 0x2260(%rsp) vmovaps 0x2260(%rsp), %xmm0 vmovaps %xmm0, 0x6b0(%rsp) movq 0x828(%rsp), %rax addq $0x20, %rax movq %rax, 0x2258(%rsp) movq 0x2258(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2254(%rsp) vbroadcastss 0x2254(%rsp), %xmm0 vmovaps %xmm0, 0x2240(%rsp) vmovaps 0x2240(%rsp), %xmm0 vmovaps %xmm0, 0x6a0(%rsp) movq 0x828(%rsp), %rax addq $0x24, %rax movq %rax, 0x2238(%rsp) movq 0x2238(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2234(%rsp) vbroadcastss 0x2234(%rsp), %xmm0 vmovaps %xmm0, 0x2220(%rsp) vmovaps 0x2220(%rsp), %xmm0 vmovaps %xmm0, 0x690(%rsp) movq 0x828(%rsp), %rax addq $0x28, %rax movq %rax, 0x2218(%rsp) movq 0x2218(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2214(%rsp) vbroadcastss 0x2214(%rsp), %xmm0 vmovaps %xmm0, 0x2200(%rsp) vmovaps 0x2200(%rsp), %xmm0 vmovaps %xmm0, 0x680(%rsp) movq 0x828(%rsp), %rax addq $0x2c, %rax movq %rax, 0x21f8(%rsp) movq 0x21f8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x21f4(%rsp) vbroadcastss 0x21f4(%rsp), %xmm0 vmovaps %xmm0, 0x21e0(%rsp) vmovaps 0x21e0(%rsp), %xmm0 vmovaps %xmm0, 0x670(%rsp) vmovaps 0x720(%rsp), %xmm2 vmovaps 0x730(%rsp), %xmm1 vmovaps 0x800(%rsp), %xmm0 vmovaps %xmm2, 0x2860(%rsp) vmovaps %xmm1, 0x2850(%rsp) vmovaps %xmm0, 0x2840(%rsp) vmovaps 0x2860(%rsp), %xmm2 vmovaps 0x2850(%rsp), %xmm1 vmovaps 0x2840(%rsp), %xmm0 vmovaps %xmm2, 0x2890(%rsp) vmovaps %xmm1, 0x2880(%rsp) vmovaps %xmm0, 0x2870(%rsp) vmovaps 0x2890(%rsp), %xmm1 vmovaps 0x2880(%rsp), %xmm0 vmovaps 0x2870(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x260(%rsp) vmovaps 0x260(%rsp), %xmm0 vmovaps %xmm0, 0x800(%rsp) vmovaps 0x710(%rsp), %xmm2 vmovaps 0x730(%rsp), %xmm1 vmovaps 0x7f0(%rsp), %xmm0 vmovaps %xmm2, 0x2830(%rsp) vmovaps %xmm1, 0x2820(%rsp) vmovaps %xmm0, 0x2810(%rsp) vmovaps 0x2830(%rsp), %xmm2 vmovaps 0x2820(%rsp), %xmm1 vmovaps 0x2810(%rsp), %xmm0 vmovaps %xmm2, 0x28c0(%rsp) vmovaps %xmm1, 0x28b0(%rsp) vmovaps %xmm0, 0x28a0(%rsp) vmovaps 0x28c0(%rsp), %xmm1 vmovaps 0x28b0(%rsp), %xmm0 vmovaps 0x28a0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x250(%rsp) vmovaps 0x250(%rsp), %xmm0 vmovaps %xmm0, 0x7f0(%rsp) vmovaps 0x700(%rsp), %xmm2 vmovaps 0x730(%rsp), %xmm1 vmovaps 0x7e0(%rsp), %xmm0 vmovaps %xmm2, 0x2800(%rsp) vmovaps %xmm1, 0x27f0(%rsp) vmovaps %xmm0, 0x27e0(%rsp) vmovaps 0x2800(%rsp), %xmm2 vmovaps 0x27f0(%rsp), %xmm1 vmovaps 0x27e0(%rsp), %xmm0 vmovaps %xmm2, 0x28f0(%rsp) vmovaps %xmm1, 0x28e0(%rsp) vmovaps %xmm0, 0x28d0(%rsp) vmovaps 0x28f0(%rsp), %xmm1 vmovaps 0x28e0(%rsp), %xmm0 vmovaps 0x28d0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x240(%rsp) vmovaps 0x240(%rsp), %xmm0 vmovaps %xmm0, 0x7e0(%rsp) vmovaps 0x6f0(%rsp), %xmm2 vmovaps 0x730(%rsp), %xmm1 vmovaps 0x7d0(%rsp), %xmm0 vmovaps %xmm2, 0x27d0(%rsp) vmovaps %xmm1, 0x27c0(%rsp) vmovaps %xmm0, 0x27b0(%rsp) vmovaps 0x27d0(%rsp), %xmm2 vmovaps 0x27c0(%rsp), %xmm1 vmovaps 0x27b0(%rsp), %xmm0 vmovaps %xmm2, 0x2920(%rsp) vmovaps %xmm1, 0x2910(%rsp) vmovaps %xmm0, 0x2900(%rsp) vmovaps 0x2920(%rsp), %xmm1 vmovaps 0x2910(%rsp), %xmm0 vmovaps 0x2900(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x230(%rsp) vmovaps 0x230(%rsp), %xmm0 vmovaps %xmm0, 0x7d0(%rsp) vmovaps 0x6e0(%rsp), %xmm2 vmovaps 0x730(%rsp), %xmm1 vmovaps 0x7c0(%rsp), %xmm0 vmovaps %xmm2, 0x27a0(%rsp) vmovaps %xmm1, 0x2790(%rsp) vmovaps %xmm0, 0x2780(%rsp) vmovaps 0x27a0(%rsp), %xmm2 vmovaps 0x2790(%rsp), %xmm1 vmovaps 0x2780(%rsp), %xmm0 vmovaps %xmm2, 0x2950(%rsp) vmovaps %xmm1, 0x2940(%rsp) vmovaps %xmm0, 0x2930(%rsp) vmovaps 0x2950(%rsp), %xmm1 vmovaps 0x2940(%rsp), %xmm0 vmovaps 0x2930(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x220(%rsp) vmovaps 0x220(%rsp), %xmm0 vmovaps %xmm0, 0x7c0(%rsp) vmovaps 0x6d0(%rsp), %xmm2 vmovaps 0x730(%rsp), %xmm1 vmovaps 0x7b0(%rsp), %xmm0 vmovaps %xmm2, 0x2770(%rsp) vmovaps %xmm1, 0x2760(%rsp) vmovaps %xmm0, 0x2750(%rsp) vmovaps 0x2770(%rsp), %xmm2 vmovaps 0x2760(%rsp), %xmm1 vmovaps 0x2750(%rsp), %xmm0 vmovaps %xmm2, 0x2980(%rsp) vmovaps %xmm1, 0x2970(%rsp) vmovaps %xmm0, 0x2960(%rsp) vmovaps 0x2980(%rsp), %xmm1 vmovaps 0x2970(%rsp), %xmm0 vmovaps 0x2960(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x210(%rsp) vmovaps 0x210(%rsp), %xmm0 vmovaps %xmm0, 0x7b0(%rsp) vmovaps 0x6c0(%rsp), %xmm2 vmovaps 0x730(%rsp), %xmm1 vmovaps 0x7a0(%rsp), %xmm0 vmovaps %xmm2, 0x2740(%rsp) vmovaps %xmm1, 0x2730(%rsp) vmovaps %xmm0, 0x2720(%rsp) vmovaps 0x2740(%rsp), %xmm2 vmovaps 0x2730(%rsp), %xmm1 vmovaps 0x2720(%rsp), %xmm0 vmovaps %xmm2, 0x29b0(%rsp) vmovaps %xmm1, 0x29a0(%rsp) vmovaps %xmm0, 0x2990(%rsp) vmovaps 0x29b0(%rsp), %xmm1 vmovaps 0x29a0(%rsp), %xmm0 vmovaps 0x2990(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x200(%rsp) vmovaps 0x200(%rsp), %xmm0 vmovaps %xmm0, 0x7a0(%rsp) vmovaps 0x6b0(%rsp), %xmm2 vmovaps 0x730(%rsp), %xmm1 vmovaps 0x790(%rsp), %xmm0 vmovaps %xmm2, 0x2710(%rsp) vmovaps %xmm1, 0x2700(%rsp) vmovaps %xmm0, 0x26f0(%rsp) vmovaps 0x2710(%rsp), %xmm2 vmovaps 0x2700(%rsp), %xmm1 vmovaps 0x26f0(%rsp), %xmm0 vmovaps %xmm2, 0x29e0(%rsp) vmovaps %xmm1, 0x29d0(%rsp) vmovaps %xmm0, 0x29c0(%rsp) vmovaps 0x29e0(%rsp), %xmm1 vmovaps 0x29d0(%rsp), %xmm0 vmovaps 0x29c0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1f0(%rsp) vmovaps 0x1f0(%rsp), %xmm0 vmovaps %xmm0, 0x790(%rsp) vmovaps 0x6a0(%rsp), %xmm2 vmovaps 0x730(%rsp), %xmm1 vmovaps 0x780(%rsp), %xmm0 vmovaps %xmm2, 0x26e0(%rsp) vmovaps %xmm1, 0x26d0(%rsp) vmovaps %xmm0, 0x26c0(%rsp) vmovaps 0x26e0(%rsp), %xmm2 vmovaps 0x26d0(%rsp), %xmm1 vmovaps 0x26c0(%rsp), %xmm0 vmovaps %xmm2, 0x2a10(%rsp) vmovaps %xmm1, 0x2a00(%rsp) vmovaps %xmm0, 0x29f0(%rsp) vmovaps 0x2a10(%rsp), %xmm1 vmovaps 0x2a00(%rsp), %xmm0 vmovaps 0x29f0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1e0(%rsp) vmovaps 0x1e0(%rsp), %xmm0 vmovaps %xmm0, 0x780(%rsp) vmovaps 0x690(%rsp), %xmm2 vmovaps 0x730(%rsp), %xmm1 vmovaps 0x770(%rsp), %xmm0 vmovaps %xmm2, 0x26b0(%rsp) vmovaps %xmm1, 0x26a0(%rsp) vmovaps %xmm0, 0x2690(%rsp) vmovaps 0x26b0(%rsp), %xmm2 vmovaps 0x26a0(%rsp), %xmm1 vmovaps 0x2690(%rsp), %xmm0 vmovaps %xmm2, 0x2a40(%rsp) vmovaps %xmm1, 0x2a30(%rsp) vmovaps %xmm0, 0x2a20(%rsp) vmovaps 0x2a40(%rsp), %xmm1 vmovaps 0x2a30(%rsp), %xmm0 vmovaps 0x2a20(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1d0(%rsp) vmovaps 0x1d0(%rsp), %xmm0 vmovaps %xmm0, 0x770(%rsp) vmovaps 0x680(%rsp), %xmm2 vmovaps 0x730(%rsp), %xmm1 vmovaps 0x760(%rsp), %xmm0 vmovaps %xmm2, 0x2680(%rsp) vmovaps %xmm1, 0x2670(%rsp) vmovaps %xmm0, 0x2660(%rsp) vmovaps 0x2680(%rsp), %xmm2 vmovaps 0x2670(%rsp), %xmm1 vmovaps 0x2660(%rsp), %xmm0 vmovaps %xmm2, 0x2a70(%rsp) vmovaps %xmm1, 0x2a60(%rsp) vmovaps %xmm0, 0x2a50(%rsp) vmovaps 0x2a70(%rsp), %xmm1 vmovaps 0x2a60(%rsp), %xmm0 vmovaps 0x2a50(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1c0(%rsp) vmovaps 0x1c0(%rsp), %xmm0 vmovaps %xmm0, 0x760(%rsp) vmovaps 0x670(%rsp), %xmm2 vmovaps 0x730(%rsp), %xmm1 vmovaps 0x750(%rsp), %xmm0 vmovaps %xmm2, 0x2650(%rsp) vmovaps %xmm1, 0x2640(%rsp) vmovaps %xmm0, 0x2630(%rsp) vmovaps 0x2650(%rsp), %xmm2 vmovaps 0x2640(%rsp), %xmm1 vmovaps 0x2630(%rsp), %xmm0 vmovaps %xmm2, 0x2aa0(%rsp) vmovaps %xmm1, 0x2a90(%rsp) vmovaps %xmm0, 0x2a80(%rsp) vmovaps 0x2aa0(%rsp), %xmm1 vmovaps 0x2a90(%rsp), %xmm0 vmovaps 0x2a80(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1b0(%rsp) vmovaps 0x1b0(%rsp), %xmm0 vmovaps %xmm0, 0x750(%rsp) movq 0x828(%rsp), %rax addq $0x30, %rax movq %rax, 0x828(%rsp) movq 0x820(%rsp), %rax addq $0x10, %rax movq %rax, 0x820(%rsp) movl 0x74c(%rsp), %eax addl $0x1, %eax movl %eax, 0x74c(%rsp) jmp 0x61ed97 movq %rax, %rcx movl %edx, %eax movq %rcx, 0xdb0(%rsp) movl %eax, 0xdac(%rsp) leaq 0x928(%rsp), %rax movq %rax, 0xe60(%rsp) movq 0xe60(%rsp), %rax movq %rax, 0xf60(%rsp) movq 0xf60(%rsp), %rax movq %rax, 0x1a8(%rsp) cmpq $0x0, 0x8(%rax) je 0x61fa62 movq 0x1a8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xf5c(%rsp) # imm = 0xFFFFFFFF movl 0xf5c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xf58(%rsp) cmpl $0x1, 0xf58(%rsp) jne 0x61fa62 movq 0x1a8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x61fa33 movq 0x1a8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x61fa31 jmp 0x61fa60 movq 0x1a8(%rsp), %rax movq (%rax), %rax movq %rax, 0x1160(%rsp) cmpq $0x0, 0x1160(%rsp) je 0x61fa5e movq 0x1160(%rsp), %rdi callq 0x5e480 jmp 0x61fa60 jmp 0x61fa62 movq 0x1a8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x61fabd movq %rax, %rdi callq 0x5fc90 jmp 0x621c66 movq 0x918(%rsp), %rax vmovaps 0x800(%rsp), %xmm0 movq %rax, 0x1558(%rsp) vmovaps %xmm0, 0x1540(%rsp) vmovaps 0x1540(%rsp), %xmm0 movq 0x1558(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x10, %rax vmovaps 0x7f0(%rsp), %xmm0 movq %rax, 0x1538(%rsp) vmovaps %xmm0, 0x1520(%rsp) vmovaps 0x1520(%rsp), %xmm0 movq 0x1538(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x20, %rax vmovaps 0x7e0(%rsp), %xmm0 movq %rax, 0x1518(%rsp) vmovaps %xmm0, 0x1500(%rsp) vmovaps 0x1500(%rsp), %xmm0 movq 0x1518(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x30, %rax vmovaps 0x7d0(%rsp), %xmm0 movq %rax, 0x14f8(%rsp) vmovaps %xmm0, 0x14e0(%rsp) vmovaps 0x14e0(%rsp), %xmm0 movq 0x14f8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x40, %rax vmovaps 0x7c0(%rsp), %xmm0 movq %rax, 0x14d8(%rsp) vmovaps %xmm0, 0x14c0(%rsp) vmovaps 0x14c0(%rsp), %xmm0 movq 0x14d8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x50, %rax vmovaps 0x7b0(%rsp), %xmm0 movq %rax, 0x14b8(%rsp) vmovaps %xmm0, 0x14a0(%rsp) vmovaps 0x14a0(%rsp), %xmm0 movq 0x14b8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x60, %rax vmovaps 0x7a0(%rsp), %xmm0 movq %rax, 0x1498(%rsp) vmovaps %xmm0, 0x1480(%rsp) vmovaps 0x1480(%rsp), %xmm0 movq 0x1498(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x70, %rax vmovaps 0x790(%rsp), %xmm0 movq %rax, 0x1478(%rsp) vmovaps %xmm0, 0x1460(%rsp) vmovaps 0x1460(%rsp), %xmm0 movq 0x1478(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x80, %rax vmovaps 0x780(%rsp), %xmm0 movq %rax, 0x1458(%rsp) vmovaps %xmm0, 0x1440(%rsp) vmovaps 0x1440(%rsp), %xmm0 movq 0x1458(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x90, %rax vmovaps 0x770(%rsp), %xmm0 movq %rax, 0x1438(%rsp) vmovaps %xmm0, 0x1420(%rsp) vmovaps 0x1420(%rsp), %xmm0 movq 0x1438(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0xa0, %rax vmovaps 0x760(%rsp), %xmm0 movq %rax, 0x1418(%rsp) vmovaps %xmm0, 0x1400(%rsp) vmovaps 0x1400(%rsp), %xmm0 movq 0x1418(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0xb0, %rax vmovaps 0x750(%rsp), %xmm0 movq %rax, 0x13f8(%rsp) vmovaps %xmm0, 0x13e0(%rsp) vmovaps 0x13e0(%rsp), %xmm0 movq 0x13f8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0xc0, %rax movq %rax, 0x918(%rsp) movl 0x834(%rsp), %eax addl $0xc, %eax movl %eax, 0x834(%rsp) jmp 0x61eb59 jmp 0x61fdb8 movl 0x834(%rsp), %eax addl $0x7, %eax cmpl 0xe0c(%rsp), %eax jge 0x6209d0 movl 0x834(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %eax, 0x19c(%rsp) movl 0x834(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x8, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x19c(%rsp), %eax addl %ecx, %eax leaq 0x838(%rsp), %rcx movq %rcx, 0x10a8(%rsp) movl %eax, 0x10a4(%rsp) movq 0x10a8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x10a4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x1a0(%rsp) movq 0x1a0(%rsp), %rax movq %rax, 0x668(%rsp) movl 0x884(%rsp), %eax leaq 0x888(%rsp), %rcx movq %rcx, 0x1098(%rsp) movl %eax, 0x1094(%rsp) movq 0x1098(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x1094(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x190(%rsp) movq 0x190(%rsp), %rax movq %rax, 0x660(%rsp) movl 0xe04(%rsp), %eax shll $0x2, %eax movl %eax, 0x65c(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x180(%rsp) vmovaps %xmm0, 0x2e60(%rsp) vmovaps 0x2e60(%rsp), %xmm1 vmovaps %xmm1, 0x640(%rsp) vmovaps %xmm0, 0x2e50(%rsp) vmovaps 0x2e50(%rsp), %xmm1 vmovaps %xmm1, 0x630(%rsp) vmovaps %xmm0, 0x2e40(%rsp) vmovaps 0x2e40(%rsp), %xmm1 vmovaps %xmm1, 0x620(%rsp) vmovaps %xmm0, 0x2e30(%rsp) vmovaps 0x2e30(%rsp), %xmm1 vmovaps %xmm1, 0x610(%rsp) vmovaps %xmm0, 0x2e20(%rsp) vmovaps 0x2e20(%rsp), %xmm1 vmovaps %xmm1, 0x600(%rsp) vmovaps %xmm0, 0x2e10(%rsp) vmovaps 0x2e10(%rsp), %xmm1 vmovaps %xmm1, 0x5f0(%rsp) vmovaps %xmm0, 0x2e00(%rsp) vmovaps 0x2e00(%rsp), %xmm1 vmovaps %xmm1, 0x5e0(%rsp) vmovaps %xmm0, 0x2df0(%rsp) vmovaps 0x2df0(%rsp), %xmm0 vmovaps %xmm0, 0x5d0(%rsp) movl $0x0, 0x5cc(%rsp) movl 0x5cc(%rsp), %eax cmpl 0x65c(%rsp), %eax jge 0x6207d0 movq 0x660(%rsp), %rax movq %rax, 0x18d8(%rsp) movq 0x18d8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x5b0(%rsp) movq 0x668(%rsp), %rax movq %rax, 0x21d8(%rsp) movq 0x21d8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x21d4(%rsp) vbroadcastss 0x21d4(%rsp), %xmm0 vmovaps %xmm0, 0x21c0(%rsp) vmovaps 0x21c0(%rsp), %xmm0 vmovaps %xmm0, 0x5a0(%rsp) movq 0x668(%rsp), %rax addq $0x4, %rax movq %rax, 0x21b8(%rsp) movq 0x21b8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x21b4(%rsp) vbroadcastss 0x21b4(%rsp), %xmm0 vmovaps %xmm0, 0x21a0(%rsp) vmovaps 0x21a0(%rsp), %xmm0 vmovaps %xmm0, 0x590(%rsp) movq 0x668(%rsp), %rax addq $0x8, %rax movq %rax, 0x2198(%rsp) movq 0x2198(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2194(%rsp) vbroadcastss 0x2194(%rsp), %xmm0 vmovaps %xmm0, 0x2180(%rsp) vmovaps 0x2180(%rsp), %xmm0 vmovaps %xmm0, 0x580(%rsp) movq 0x668(%rsp), %rax addq $0xc, %rax movq %rax, 0x2178(%rsp) movq 0x2178(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2174(%rsp) vbroadcastss 0x2174(%rsp), %xmm0 vmovaps %xmm0, 0x2160(%rsp) vmovaps 0x2160(%rsp), %xmm0 vmovaps %xmm0, 0x570(%rsp) movq 0x668(%rsp), %rax addq $0x10, %rax movq %rax, 0x2158(%rsp) movq 0x2158(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2154(%rsp) vbroadcastss 0x2154(%rsp), %xmm0 vmovaps %xmm0, 0x2140(%rsp) vmovaps 0x2140(%rsp), %xmm0 vmovaps %xmm0, 0x560(%rsp) movq 0x668(%rsp), %rax addq $0x14, %rax movq %rax, 0x2138(%rsp) movq 0x2138(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2134(%rsp) vbroadcastss 0x2134(%rsp), %xmm0 vmovaps %xmm0, 0x2120(%rsp) vmovaps 0x2120(%rsp), %xmm0 vmovaps %xmm0, 0x550(%rsp) movq 0x668(%rsp), %rax addq $0x18, %rax movq %rax, 0x2118(%rsp) movq 0x2118(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2114(%rsp) vbroadcastss 0x2114(%rsp), %xmm0 vmovaps %xmm0, 0x2100(%rsp) vmovaps 0x2100(%rsp), %xmm0 vmovaps %xmm0, 0x540(%rsp) movq 0x668(%rsp), %rax addq $0x1c, %rax movq %rax, 0x20f8(%rsp) movq 0x20f8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x20f4(%rsp) vbroadcastss 0x20f4(%rsp), %xmm0 vmovaps %xmm0, 0x20e0(%rsp) vmovaps 0x20e0(%rsp), %xmm0 vmovaps %xmm0, 0x530(%rsp) vmovaps 0x5a0(%rsp), %xmm2 vmovaps 0x5b0(%rsp), %xmm1 vmovaps 0x640(%rsp), %xmm0 vmovaps %xmm2, 0x2620(%rsp) vmovaps %xmm1, 0x2610(%rsp) vmovaps %xmm0, 0x2600(%rsp) vmovaps 0x2620(%rsp), %xmm2 vmovaps 0x2610(%rsp), %xmm1 vmovaps 0x2600(%rsp), %xmm0 vmovaps %xmm2, 0x2ad0(%rsp) vmovaps %xmm1, 0x2ac0(%rsp) vmovaps %xmm0, 0x2ab0(%rsp) vmovaps 0x2ad0(%rsp), %xmm1 vmovaps 0x2ac0(%rsp), %xmm0 vmovaps 0x2ab0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x170(%rsp) vmovaps 0x170(%rsp), %xmm0 vmovaps %xmm0, 0x640(%rsp) vmovaps 0x590(%rsp), %xmm2 vmovaps 0x5b0(%rsp), %xmm1 vmovaps 0x630(%rsp), %xmm0 vmovaps %xmm2, 0x25f0(%rsp) vmovaps %xmm1, 0x25e0(%rsp) vmovaps %xmm0, 0x25d0(%rsp) vmovaps 0x25f0(%rsp), %xmm2 vmovaps 0x25e0(%rsp), %xmm1 vmovaps 0x25d0(%rsp), %xmm0 vmovaps %xmm2, 0x2b00(%rsp) vmovaps %xmm1, 0x2af0(%rsp) vmovaps %xmm0, 0x2ae0(%rsp) vmovaps 0x2b00(%rsp), %xmm1 vmovaps 0x2af0(%rsp), %xmm0 vmovaps 0x2ae0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm0 vmovaps %xmm0, 0x630(%rsp) vmovaps 0x580(%rsp), %xmm2 vmovaps 0x5b0(%rsp), %xmm1 vmovaps 0x620(%rsp), %xmm0 vmovaps %xmm2, 0x25c0(%rsp) vmovaps %xmm1, 0x25b0(%rsp) vmovaps %xmm0, 0x25a0(%rsp) vmovaps 0x25c0(%rsp), %xmm2 vmovaps 0x25b0(%rsp), %xmm1 vmovaps 0x25a0(%rsp), %xmm0 vmovaps %xmm2, 0x2b30(%rsp) vmovaps %xmm1, 0x2b20(%rsp) vmovaps %xmm0, 0x2b10(%rsp) vmovaps 0x2b30(%rsp), %xmm1 vmovaps 0x2b20(%rsp), %xmm0 vmovaps 0x2b10(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x150(%rsp) vmovaps 0x150(%rsp), %xmm0 vmovaps %xmm0, 0x620(%rsp) vmovaps 0x570(%rsp), %xmm2 vmovaps 0x5b0(%rsp), %xmm1 vmovaps 0x610(%rsp), %xmm0 vmovaps %xmm2, 0x2590(%rsp) vmovaps %xmm1, 0x2580(%rsp) vmovaps %xmm0, 0x2570(%rsp) vmovaps 0x2590(%rsp), %xmm2 vmovaps 0x2580(%rsp), %xmm1 vmovaps 0x2570(%rsp), %xmm0 vmovaps %xmm2, 0x2b60(%rsp) vmovaps %xmm1, 0x2b50(%rsp) vmovaps %xmm0, 0x2b40(%rsp) vmovaps 0x2b60(%rsp), %xmm1 vmovaps 0x2b50(%rsp), %xmm0 vmovaps 0x2b40(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x140(%rsp) vmovaps 0x140(%rsp), %xmm0 vmovaps %xmm0, 0x610(%rsp) vmovaps 0x560(%rsp), %xmm2 vmovaps 0x5b0(%rsp), %xmm1 vmovaps 0x600(%rsp), %xmm0 vmovaps %xmm2, 0x2560(%rsp) vmovaps %xmm1, 0x2550(%rsp) vmovaps %xmm0, 0x2540(%rsp) vmovaps 0x2560(%rsp), %xmm2 vmovaps 0x2550(%rsp), %xmm1 vmovaps 0x2540(%rsp), %xmm0 vmovaps %xmm2, 0x2b90(%rsp) vmovaps %xmm1, 0x2b80(%rsp) vmovaps %xmm0, 0x2b70(%rsp) vmovaps 0x2b90(%rsp), %xmm1 vmovaps 0x2b80(%rsp), %xmm0 vmovaps 0x2b70(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x130(%rsp) vmovaps 0x130(%rsp), %xmm0 vmovaps %xmm0, 0x600(%rsp) vmovaps 0x550(%rsp), %xmm2 vmovaps 0x5b0(%rsp), %xmm1 vmovaps 0x5f0(%rsp), %xmm0 vmovaps %xmm2, 0x2530(%rsp) vmovaps %xmm1, 0x2520(%rsp) vmovaps %xmm0, 0x2510(%rsp) vmovaps 0x2530(%rsp), %xmm2 vmovaps 0x2520(%rsp), %xmm1 vmovaps 0x2510(%rsp), %xmm0 vmovaps %xmm2, 0x2bc0(%rsp) vmovaps %xmm1, 0x2bb0(%rsp) vmovaps %xmm0, 0x2ba0(%rsp) vmovaps 0x2bc0(%rsp), %xmm1 vmovaps 0x2bb0(%rsp), %xmm0 vmovaps 0x2ba0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x120(%rsp) vmovaps 0x120(%rsp), %xmm0 vmovaps %xmm0, 0x5f0(%rsp) vmovaps 0x540(%rsp), %xmm2 vmovaps 0x5b0(%rsp), %xmm1 vmovaps 0x5e0(%rsp), %xmm0 vmovaps %xmm2, 0x2500(%rsp) vmovaps %xmm1, 0x24f0(%rsp) vmovaps %xmm0, 0x24e0(%rsp) vmovaps 0x2500(%rsp), %xmm2 vmovaps 0x24f0(%rsp), %xmm1 vmovaps 0x24e0(%rsp), %xmm0 vmovaps %xmm2, 0x2bf0(%rsp) vmovaps %xmm1, 0x2be0(%rsp) vmovaps %xmm0, 0x2bd0(%rsp) vmovaps 0x2bf0(%rsp), %xmm1 vmovaps 0x2be0(%rsp), %xmm0 vmovaps 0x2bd0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x110(%rsp) vmovaps 0x110(%rsp), %xmm0 vmovaps %xmm0, 0x5e0(%rsp) vmovaps 0x530(%rsp), %xmm2 vmovaps 0x5b0(%rsp), %xmm1 vmovaps 0x5d0(%rsp), %xmm0 vmovaps %xmm2, 0x24d0(%rsp) vmovaps %xmm1, 0x24c0(%rsp) vmovaps %xmm0, 0x24b0(%rsp) vmovaps 0x24d0(%rsp), %xmm2 vmovaps 0x24c0(%rsp), %xmm1 vmovaps 0x24b0(%rsp), %xmm0 vmovaps %xmm2, 0x2c20(%rsp) vmovaps %xmm1, 0x2c10(%rsp) vmovaps %xmm0, 0x2c00(%rsp) vmovaps 0x2c20(%rsp), %xmm1 vmovaps 0x2c10(%rsp), %xmm0 vmovaps 0x2c00(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x100(%rsp) vmovaps 0x100(%rsp), %xmm0 vmovaps %xmm0, 0x5d0(%rsp) movq 0x668(%rsp), %rax addq $0x20, %rax movq %rax, 0x668(%rsp) movq 0x660(%rsp), %rax addq $0x10, %rax movq %rax, 0x660(%rsp) movl 0x5cc(%rsp), %eax addl $0x1, %eax movl %eax, 0x5cc(%rsp) jmp 0x61ffb5 movq 0x918(%rsp), %rax vmovaps 0x640(%rsp), %xmm0 movq %rax, 0x13d8(%rsp) vmovaps %xmm0, 0x13c0(%rsp) vmovaps 0x13c0(%rsp), %xmm0 movq 0x13d8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x10, %rax vmovaps 0x630(%rsp), %xmm0 movq %rax, 0x13b8(%rsp) vmovaps %xmm0, 0x13a0(%rsp) vmovaps 0x13a0(%rsp), %xmm0 movq 0x13b8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x20, %rax vmovaps 0x620(%rsp), %xmm0 movq %rax, 0x1398(%rsp) vmovaps %xmm0, 0x1380(%rsp) vmovaps 0x1380(%rsp), %xmm0 movq 0x1398(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x30, %rax vmovaps 0x610(%rsp), %xmm0 movq %rax, 0x1378(%rsp) vmovaps %xmm0, 0x1360(%rsp) vmovaps 0x1360(%rsp), %xmm0 movq 0x1378(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x40, %rax vmovaps 0x600(%rsp), %xmm0 movq %rax, 0x1358(%rsp) vmovaps %xmm0, 0x1340(%rsp) vmovaps 0x1340(%rsp), %xmm0 movq 0x1358(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x50, %rax vmovaps 0x5f0(%rsp), %xmm0 movq %rax, 0x1338(%rsp) vmovaps %xmm0, 0x1320(%rsp) vmovaps 0x1320(%rsp), %xmm0 movq 0x1338(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x60, %rax vmovaps 0x5e0(%rsp), %xmm0 movq %rax, 0x1318(%rsp) vmovaps %xmm0, 0x1300(%rsp) vmovaps 0x1300(%rsp), %xmm0 movq 0x1318(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x70, %rax vmovaps 0x5d0(%rsp), %xmm0 movq %rax, 0x12f8(%rsp) vmovaps %xmm0, 0x12e0(%rsp) vmovaps 0x12e0(%rsp), %xmm0 movq 0x12f8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x80, %rax movq %rax, 0x918(%rsp) movl 0x834(%rsp), %eax addl $0x8, %eax movl %eax, 0x834(%rsp) jmp 0x61fdb8 jmp 0x6209d2 movl 0x834(%rsp), %eax addl $0x3, %eax cmpl 0xe0c(%rsp), %eax jge 0x6210e8 movl 0x834(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %eax, 0xf0(%rsp) movl 0x834(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x8, %ecx cltd idivl %ecx movl %eax, %ecx movl 0xf0(%rsp), %eax addl %ecx, %eax movl %eax, 0xf4(%rsp) movl 0x834(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x8, %ecx cltd idivl %ecx movl %edx, %eax movl $0x4, %ecx cltd idivl %ecx movl %eax, %ecx movl 0xf4(%rsp), %eax addl %ecx, %eax leaq 0x838(%rsp), %rcx movq %rcx, 0x1088(%rsp) movl %eax, 0x1084(%rsp) movq 0x1088(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x1084(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0xf8(%rsp) movq 0xf8(%rsp), %rax movq %rax, 0x528(%rsp) movl 0x884(%rsp), %eax leaq 0x888(%rsp), %rcx movq %rcx, 0x1078(%rsp) movl %eax, 0x1074(%rsp) movq 0x1078(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x1074(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0xe8(%rsp) movq 0xe8(%rsp), %rax movq %rax, 0x520(%rsp) movl 0xe04(%rsp), %eax shll $0x2, %eax movl %eax, 0x51c(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x2de0(%rsp) vmovaps 0x2de0(%rsp), %xmm1 vmovaps %xmm1, 0x500(%rsp) vmovaps %xmm0, 0x2dd0(%rsp) vmovaps 0x2dd0(%rsp), %xmm1 vmovaps %xmm1, 0x4f0(%rsp) vmovaps %xmm0, 0x2dc0(%rsp) vmovaps 0x2dc0(%rsp), %xmm1 vmovaps %xmm1, 0x4e0(%rsp) vmovaps %xmm0, 0x2db0(%rsp) vmovaps 0x2db0(%rsp), %xmm0 vmovaps %xmm0, 0x4d0(%rsp) movl $0x0, 0x4cc(%rsp) movl 0x4cc(%rsp), %eax cmpl 0x51c(%rsp), %eax jge 0x620fd6 movq 0x520(%rsp), %rax movq %rax, 0x18d0(%rsp) movq 0x18d0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x4b0(%rsp) movq 0x528(%rsp), %rax movq %rax, 0x20d8(%rsp) movq 0x20d8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x20d4(%rsp) vbroadcastss 0x20d4(%rsp), %xmm0 vmovaps %xmm0, 0x20c0(%rsp) vmovaps 0x20c0(%rsp), %xmm0 vmovaps %xmm0, 0x4a0(%rsp) movq 0x528(%rsp), %rax addq $0x4, %rax movq %rax, 0x20b8(%rsp) movq 0x20b8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x20b4(%rsp) vbroadcastss 0x20b4(%rsp), %xmm0 vmovaps %xmm0, 0x20a0(%rsp) vmovaps 0x20a0(%rsp), %xmm0 vmovaps %xmm0, 0x490(%rsp) movq 0x528(%rsp), %rax addq $0x8, %rax movq %rax, 0x2098(%rsp) movq 0x2098(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2094(%rsp) vbroadcastss 0x2094(%rsp), %xmm0 vmovaps %xmm0, 0x2080(%rsp) vmovaps 0x2080(%rsp), %xmm0 vmovaps %xmm0, 0x480(%rsp) movq 0x528(%rsp), %rax addq $0xc, %rax movq %rax, 0x2078(%rsp) movq 0x2078(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2074(%rsp) vbroadcastss 0x2074(%rsp), %xmm0 vmovaps %xmm0, 0x2060(%rsp) vmovaps 0x2060(%rsp), %xmm0 vmovaps %xmm0, 0x470(%rsp) vmovaps 0x4a0(%rsp), %xmm2 vmovaps 0x4b0(%rsp), %xmm1 vmovaps 0x500(%rsp), %xmm0 vmovaps %xmm2, 0x24a0(%rsp) vmovaps %xmm1, 0x2490(%rsp) vmovaps %xmm0, 0x2480(%rsp) vmovaps 0x24a0(%rsp), %xmm2 vmovaps 0x2490(%rsp), %xmm1 vmovaps 0x2480(%rsp), %xmm0 vmovaps %xmm2, 0x2c50(%rsp) vmovaps %xmm1, 0x2c40(%rsp) vmovaps %xmm0, 0x2c30(%rsp) vmovaps 0x2c50(%rsp), %xmm1 vmovaps 0x2c40(%rsp), %xmm0 vmovaps 0x2c30(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xd0(%rsp) vmovaps 0xd0(%rsp), %xmm0 vmovaps %xmm0, 0x500(%rsp) vmovaps 0x490(%rsp), %xmm2 vmovaps 0x4b0(%rsp), %xmm1 vmovaps 0x4f0(%rsp), %xmm0 vmovaps %xmm2, 0x2470(%rsp) vmovaps %xmm1, 0x2460(%rsp) vmovaps %xmm0, 0x2450(%rsp) vmovaps 0x2470(%rsp), %xmm2 vmovaps 0x2460(%rsp), %xmm1 vmovaps 0x2450(%rsp), %xmm0 vmovaps %xmm2, 0x2c80(%rsp) vmovaps %xmm1, 0x2c70(%rsp) vmovaps %xmm0, 0x2c60(%rsp) vmovaps 0x2c80(%rsp), %xmm1 vmovaps 0x2c70(%rsp), %xmm0 vmovaps 0x2c60(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xc0(%rsp) vmovaps 0xc0(%rsp), %xmm0 vmovaps %xmm0, 0x4f0(%rsp) vmovaps 0x480(%rsp), %xmm2 vmovaps 0x4b0(%rsp), %xmm1 vmovaps 0x4e0(%rsp), %xmm0 vmovaps %xmm2, 0x2440(%rsp) vmovaps %xmm1, 0x2430(%rsp) vmovaps %xmm0, 0x2420(%rsp) vmovaps 0x2440(%rsp), %xmm2 vmovaps 0x2430(%rsp), %xmm1 vmovaps 0x2420(%rsp), %xmm0 vmovaps %xmm2, 0x2cb0(%rsp) vmovaps %xmm1, 0x2ca0(%rsp) vmovaps %xmm0, 0x2c90(%rsp) vmovaps 0x2cb0(%rsp), %xmm1 vmovaps 0x2ca0(%rsp), %xmm0 vmovaps 0x2c90(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xb0(%rsp) vmovaps 0xb0(%rsp), %xmm0 vmovaps %xmm0, 0x4e0(%rsp) vmovaps 0x470(%rsp), %xmm2 vmovaps 0x4b0(%rsp), %xmm1 vmovaps 0x4d0(%rsp), %xmm0 vmovaps %xmm2, 0x2410(%rsp) vmovaps %xmm1, 0x2400(%rsp) vmovaps %xmm0, 0x23f0(%rsp) vmovaps 0x2410(%rsp), %xmm2 vmovaps 0x2400(%rsp), %xmm1 vmovaps 0x23f0(%rsp), %xmm0 vmovaps %xmm2, 0x2ce0(%rsp) vmovaps %xmm1, 0x2cd0(%rsp) vmovaps %xmm0, 0x2cc0(%rsp) vmovaps 0x2ce0(%rsp), %xmm1 vmovaps 0x2cd0(%rsp), %xmm0 vmovaps 0x2cc0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xa0(%rsp) vmovaps 0xa0(%rsp), %xmm0 vmovaps %xmm0, 0x4d0(%rsp) movq 0x528(%rsp), %rax addq $0x10, %rax movq %rax, 0x528(%rsp) movq 0x520(%rsp), %rax addq $0x10, %rax movq %rax, 0x520(%rsp) movl 0x4cc(%rsp), %eax addl $0x1, %eax movl %eax, 0x4cc(%rsp) jmp 0x620b8f movq 0x918(%rsp), %rax vmovaps 0x500(%rsp), %xmm0 movq %rax, 0x12d8(%rsp) vmovaps %xmm0, 0x12c0(%rsp) vmovaps 0x12c0(%rsp), %xmm0 movq 0x12d8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x10, %rax vmovaps 0x4f0(%rsp), %xmm0 movq %rax, 0x12b8(%rsp) vmovaps %xmm0, 0x12a0(%rsp) vmovaps 0x12a0(%rsp), %xmm0 movq 0x12b8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x20, %rax vmovaps 0x4e0(%rsp), %xmm0 movq %rax, 0x1298(%rsp) vmovaps %xmm0, 0x1280(%rsp) vmovaps 0x1280(%rsp), %xmm0 movq 0x1298(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x30, %rax vmovaps 0x4d0(%rsp), %xmm0 movq %rax, 0x1278(%rsp) vmovaps %xmm0, 0x1260(%rsp) vmovaps 0x1260(%rsp), %xmm0 movq 0x1278(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x40, %rax movq %rax, 0x918(%rsp) movl 0x834(%rsp), %eax addl $0x4, %eax movl %eax, 0x834(%rsp) jmp 0x6209d2 jmp 0x6210ea movl 0x834(%rsp), %eax addl $0x1, %eax cmpl 0xe0c(%rsp), %eax jge 0x621593 movl 0x834(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %eax, 0x8c(%rsp) movl 0x834(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x8, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x8c(%rsp), %eax addl %ecx, %eax movl %eax, 0x90(%rsp) movl 0x834(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x8, %ecx cltd idivl %ecx movl %edx, %eax movl $0x4, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x90(%rsp), %eax addl %ecx, %eax movl %eax, 0x94(%rsp) movl 0x834(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x4, %ecx cltd idivl %ecx movl %edx, %eax movl $0x2, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x94(%rsp), %eax addl %ecx, %eax leaq 0x838(%rsp), %rcx movq %rcx, 0x1068(%rsp) movl %eax, 0x1064(%rsp) movq 0x1068(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x1064(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x98(%rsp) movq 0x98(%rsp), %rax movq %rax, 0x468(%rsp) movl 0x884(%rsp), %eax leaq 0x888(%rsp), %rcx movq %rcx, 0x1058(%rsp) movl %eax, 0x1054(%rsp) movq 0x1058(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x1054(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x80(%rsp) movq 0x80(%rsp), %rax movq %rax, 0x460(%rsp) movl 0xe04(%rsp), %eax shll $0x2, %eax movl %eax, 0x45c(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x2da0(%rsp) vmovaps 0x2da0(%rsp), %xmm1 vmovaps %xmm1, 0x440(%rsp) vmovaps %xmm0, 0x2d90(%rsp) vmovaps 0x2d90(%rsp), %xmm0 vmovaps %xmm0, 0x430(%rsp) movl $0x0, 0x42c(%rsp) movl 0x42c(%rsp), %eax cmpl 0x45c(%rsp), %eax jge 0x6214f7 movq 0x460(%rsp), %rax movq %rax, 0x18c8(%rsp) movq 0x18c8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x410(%rsp) movq 0x468(%rsp), %rax movq %rax, 0x2058(%rsp) movq 0x2058(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2054(%rsp) vbroadcastss 0x2054(%rsp), %xmm0 vmovaps %xmm0, 0x2040(%rsp) vmovaps 0x2040(%rsp), %xmm0 vmovaps %xmm0, 0x400(%rsp) movq 0x468(%rsp), %rax addq $0x4, %rax movq %rax, 0x2038(%rsp) movq 0x2038(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2034(%rsp) vbroadcastss 0x2034(%rsp), %xmm0 vmovaps %xmm0, 0x2020(%rsp) vmovaps 0x2020(%rsp), %xmm0 vmovaps %xmm0, 0x3f0(%rsp) vmovaps 0x400(%rsp), %xmm2 vmovaps 0x410(%rsp), %xmm1 vmovaps 0x440(%rsp), %xmm0 vmovaps %xmm2, 0x23e0(%rsp) vmovaps %xmm1, 0x23d0(%rsp) vmovaps %xmm0, 0x23c0(%rsp) vmovaps 0x23e0(%rsp), %xmm2 vmovaps 0x23d0(%rsp), %xmm1 vmovaps 0x23c0(%rsp), %xmm0 vmovaps %xmm2, 0x2d10(%rsp) vmovaps %xmm1, 0x2d00(%rsp) vmovaps %xmm0, 0x2cf0(%rsp) vmovaps 0x2d10(%rsp), %xmm1 vmovaps 0x2d00(%rsp), %xmm0 vmovaps 0x2cf0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x70(%rsp) vmovaps 0x70(%rsp), %xmm0 vmovaps %xmm0, 0x440(%rsp) vmovaps 0x3f0(%rsp), %xmm2 vmovaps 0x410(%rsp), %xmm1 vmovaps 0x430(%rsp), %xmm0 vmovaps %xmm2, 0x23b0(%rsp) vmovaps %xmm1, 0x23a0(%rsp) vmovaps %xmm0, 0x2390(%rsp) vmovaps 0x23b0(%rsp), %xmm2 vmovaps 0x23a0(%rsp), %xmm1 vmovaps 0x2390(%rsp), %xmm0 vmovaps %xmm2, 0x2d40(%rsp) vmovaps %xmm1, 0x2d30(%rsp) vmovaps %xmm0, 0x2d20(%rsp) vmovaps 0x2d40(%rsp), %xmm1 vmovaps 0x2d30(%rsp), %xmm0 vmovaps 0x2d20(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x60(%rsp) vmovaps 0x60(%rsp), %xmm0 vmovaps %xmm0, 0x430(%rsp) movq 0x468(%rsp), %rax addq $0x8, %rax movq %rax, 0x468(%rsp) movq 0x460(%rsp), %rax addq $0x10, %rax movq %rax, 0x460(%rsp) movl 0x42c(%rsp), %eax addl $0x1, %eax movl %eax, 0x42c(%rsp) jmp 0x6212a6 movq 0x918(%rsp), %rax vmovaps 0x440(%rsp), %xmm0 movq %rax, 0x1258(%rsp) vmovaps %xmm0, 0x1240(%rsp) vmovaps 0x1240(%rsp), %xmm0 movq 0x1258(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x10, %rax vmovaps 0x430(%rsp), %xmm0 movq %rax, 0x1238(%rsp) vmovaps %xmm0, 0x1220(%rsp) vmovaps 0x1220(%rsp), %xmm0 movq 0x1238(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x20, %rax movq %rax, 0x918(%rsp) movl 0x834(%rsp), %eax addl $0x2, %eax movl %eax, 0x834(%rsp) jmp 0x6210ea jmp 0x621595 movl 0x834(%rsp), %eax cmpl 0xe0c(%rsp), %eax jge 0x6218fb movl 0x834(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %eax, 0x48(%rsp) movl 0x834(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x8, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x48(%rsp), %eax addl %ecx, %eax movl %eax, 0x4c(%rsp) movl 0x834(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x8, %ecx cltd idivl %ecx movl %edx, %eax movl $0x4, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x4c(%rsp), %eax addl %ecx, %eax movl %eax, 0x50(%rsp) movl 0x834(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x4, %ecx cltd idivl %ecx movl %edx, %eax movl $0x2, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x50(%rsp), %eax addl %ecx, %eax movl %eax, 0x54(%rsp) movl 0x834(%rsp), %eax movl $0xc, %ecx cltd idivl %ecx movl %edx, %eax movl $0x2, %ecx cltd idivl %ecx movl 0x54(%rsp), %eax addl %edx, %eax leaq 0x838(%rsp), %rcx movq %rcx, 0x1048(%rsp) movl %eax, 0x1044(%rsp) movq 0x1048(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x1044(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x58(%rsp) movq 0x58(%rsp), %rax movq %rax, 0x3e8(%rsp) movl 0x884(%rsp), %eax leaq 0x888(%rsp), %rcx movq %rcx, 0x1038(%rsp) movl %eax, 0x1034(%rsp) movq 0x1038(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x1034(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x40(%rsp) movq 0x40(%rsp), %rax movq %rax, 0x3e0(%rsp) movl 0xe04(%rsp), %eax shll $0x2, %eax movl %eax, 0x3dc(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x2d80(%rsp) vmovaps 0x2d80(%rsp), %xmm0 vmovaps %xmm0, 0x3c0(%rsp) movl $0x0, 0x3bc(%rsp) movl 0x3bc(%rsp), %eax cmpl 0x3dc(%rsp), %eax jge 0x62189a movq 0x3e0(%rsp), %rax movq %rax, 0x18c0(%rsp) movq 0x18c0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x3a0(%rsp) movq 0x3e8(%rsp), %rax movq %rax, 0x2018(%rsp) movq 0x2018(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2014(%rsp) vbroadcastss 0x2014(%rsp), %xmm0 vmovaps %xmm0, 0x2000(%rsp) vmovaps 0x2000(%rsp), %xmm0 vmovaps %xmm0, 0x390(%rsp) vmovaps 0x390(%rsp), %xmm2 vmovaps 0x3a0(%rsp), %xmm1 vmovaps 0x3c0(%rsp), %xmm0 vmovaps %xmm2, 0x2380(%rsp) vmovaps %xmm1, 0x2370(%rsp) vmovaps %xmm0, 0x2360(%rsp) vmovaps 0x2380(%rsp), %xmm2 vmovaps 0x2370(%rsp), %xmm1 vmovaps 0x2360(%rsp), %xmm0 vmovaps %xmm2, 0x2d70(%rsp) vmovaps %xmm1, 0x2d60(%rsp) vmovaps %xmm0, 0x2d50(%rsp) vmovaps 0x2d70(%rsp), %xmm1 vmovaps 0x2d60(%rsp), %xmm0 vmovaps 0x2d50(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x30(%rsp) vmovaps 0x30(%rsp), %xmm0 vmovaps %xmm0, 0x3c0(%rsp) movq 0x3e8(%rsp), %rax addq $0x4, %rax movq %rax, 0x3e8(%rsp) movq 0x3e0(%rsp), %rax addq $0x10, %rax movq %rax, 0x3e0(%rsp) movl 0x3bc(%rsp), %eax addl $0x1, %eax movl %eax, 0x3bc(%rsp) jmp 0x621738 movq 0x918(%rsp), %rax vmovaps 0x3c0(%rsp), %xmm0 movq %rax, 0x1218(%rsp) vmovaps %xmm0, 0x1200(%rsp) vmovaps 0x1200(%rsp), %xmm0 movq 0x1218(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x918(%rsp), %rax addq $0x10, %rax movq %rax, 0x918(%rsp) movl 0x834(%rsp), %eax addl $0x1, %eax movl %eax, 0x834(%rsp) jmp 0x621595 leaq 0x838(%rsp), %rax movq %rax, 0xe78(%rsp) movq 0xe78(%rsp), %rax movq %rax, 0xf30(%rsp) movq 0xf30(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x6219b4 movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xf2c(%rsp) # imm = 0xFFFFFFFF movl 0xf2c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xf28(%rsp) cmpl $0x1, 0xf28(%rsp) jne 0x6219b4 movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x621988 movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x621986 jmp 0x6219b2 movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0x1178(%rsp) cmpq $0x0, 0x1178(%rsp) je 0x6219b0 movq 0x1178(%rsp), %rdi callq 0x5e480 jmp 0x6219b2 jmp 0x6219b4 movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x621a0c movq %rax, %rdi callq 0x5fc90 jmp 0x621a0e movl 0x884(%rsp), %eax addl $0x1, %eax movl %eax, 0x884(%rsp) jmp 0x61e83b leaq 0x888(%rsp), %rax movq %rax, 0xe88(%rsp) movq 0xe88(%rsp), %rax movq %rax, 0xf10(%rsp) movq 0xf10(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x621add movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xf0c(%rsp) # imm = 0xFFFFFFFF movl 0xf0c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xf08(%rsp) cmpl $0x1, 0xf08(%rsp) jne 0x621add movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x621ab1 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x621aaf jmp 0x621adb movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x1188(%rsp) cmpq $0x0, 0x1188(%rsp) je 0x621ad9 movq 0x1188(%rsp), %rdi callq 0x5e480 jmp 0x621adb jmp 0x621add movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x621b35 movq %rax, %rdi callq 0x5fc90 jmp 0x621b37 movl 0x924(%rsp), %eax addl $0x1, %eax movl %eax, 0x924(%rsp) jmp 0x61e0c8 leaq 0xdb8(%rsp), %rax movq %rax, 0xe98(%rsp) movq 0xe98(%rsp), %rax movq %rax, 0xef0(%rsp) movq 0xef0(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x621c06 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xeec(%rsp) # imm = 0xFFFFFFFF movl 0xeec(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xee8(%rsp) cmpl $0x1, 0xee8(%rsp) jne 0x621c06 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x621bda movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x621bd8 jmp 0x621c04 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x1198(%rsp) cmpq $0x0, 0x1198(%rsp) je 0x621c02 movq 0x1198(%rsp), %rdi callq 0x5e480 jmp 0x621c04 jmp 0x621c06 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x621c5e movq %rax, %rdi callq 0x5fc90 addq $0x3058, %rsp # imm = 0x3058 retq leaq 0xdb8(%rsp), %rax movq %rax, 0xea0(%rsp) movq 0xea0(%rsp), %rax movq %rax, 0xee0(%rsp) movq 0xee0(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x621d1f movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xedc(%rsp) # imm = 0xFFFFFFFF movl 0xedc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xed8(%rsp) cmpl $0x1, 0xed8(%rsp) jne 0x621d1f movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x621cf3 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x621cf1 jmp 0x621d1d movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x11a0(%rsp) cmpq $0x0, 0x11a0(%rsp) je 0x621d1b movq 0x11a0(%rsp), %rdi callq 0x5e480 jmp 0x621d1d jmp 0x621d1f movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x621d77 movq %rax, %rdi callq 0x5fc90 jmp 0x621d79 movq 0xdb0(%rsp), %rdi callq 0x5e3b0 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/convolution_winograd_dot_pack4.h
ncnn::conv3x3s1_winograd63_transform_output_pack4_sse(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Option const&)
static void conv3x3s1_winograd63_transform_output_pack4_sse(const Mat& top_blob_tm, Mat& top_blob, const Mat& bias, const Option& opt) { const int outw = top_blob.w; const int outh = top_blob.h; const int outch = top_blob.c; const int w_tiles = outw / 6; const int h_tiles = outh / 6; const int tiles = w_tiles * h_tiles; const float* biasptr = bias; // const float otm[6][8] = { // {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 32.0f, 32.0f, 0.0f}, // {0.0f, 1.0f, -1.0f, 2.0f, -2.0f, 16.0f,-16.0f, 0.0f}, // {0.0f, 1.0f, 1.0f, 4.0f, 4.0f, 8.0f, 8.0f, 0.0f}, // {0.0f, 1.0f, -1.0f, 8.0f, -8.0f, 4.0f, -4.0f, 0.0f}, // {0.0f, 1.0f, 1.0f, 16.0f, 16.0f, 2.0f, 2.0f, 0.0f}, // {0.0f, 1.0f, -1.0f, 32.0f, -32.0f, 1.0f, -1.0f, 1.0f} // }; // 0 = r0 + (r1 + r2) + (r3 + r4) + (r5 + r6) * 32 // 1 = (r1 - r2) + (r3 - r4) * 2 + (r5 - r6) * 16 // 2 = (r1 + r2) + (r3 + r4) * 4 + (r5 + r6) * 8 // 3 = (r1 - r2) + (r3 - r4) * 8 + (r5 - r6) * 4 // 4 = (r1 + r2) + (r3 + r4) * 16+ (r5 + r6) * 2 // 5 = r7 + (r1 - r2) + (r3 - r4) * 32+ (r5 - r6) #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < outch; p++) { const Mat out0_tm = top_blob_tm.channel(p); Mat out0 = top_blob.channel(p); __m128 _bias0 = biasptr ? _mm_loadu_ps(biasptr + p * 4) : _mm_setzero_ps(); #ifdef _MSC_VER __declspec(align(16)) #else __attribute__((aligned(16))) #endif float tmp[6][8][4]; __m128 _v32 = _mm_set1_ps(32.f); __m128 _v16 = _mm_set1_ps(16.f); __m128 _v8 = _mm_set1_ps(8.f); __m128 _v4 = _mm_set1_ps(4.f); __m128 _v2 = _mm_set1_ps(2.f); // tile for (int i = 0; i < h_tiles; i++) { for (int j = 0; j < w_tiles; j++) { const float* output0_tm_0 = (const float*)out0_tm + (i * w_tiles + j) * 4; const float* output0_tm_1 = output0_tm_0 + tiles * 4; const float* output0_tm_2 = output0_tm_0 + tiles * 4 * 2; const float* output0_tm_3 = output0_tm_0 + tiles * 4 * 3; const float* output0_tm_4 = output0_tm_0 + tiles * 4 * 4; const float* output0_tm_5 = output0_tm_0 + tiles * 4 * 5; const float* output0_tm_6 = output0_tm_0 + tiles * 4 * 6; const float* output0_tm_7 = output0_tm_0 + tiles * 4 * 7; float* output0 = out0.row(i * 6) + (j * 6) * 4; for (int m = 0; m < 8; m++) { __m128 _out0tm0 = _mm_load_ps(output0_tm_0); __m128 _out0tm1 = _mm_load_ps(output0_tm_1); __m128 _out0tm2 = _mm_load_ps(output0_tm_2); __m128 _out0tm3 = _mm_load_ps(output0_tm_3); __m128 _out0tm4 = _mm_load_ps(output0_tm_4); __m128 _out0tm5 = _mm_load_ps(output0_tm_5); __m128 _out0tm6 = _mm_load_ps(output0_tm_6); __m128 _out0tm7 = _mm_load_ps(output0_tm_7); __m128 _tmp024a = _mm_add_ps(_out0tm1, _out0tm2); __m128 _tmp135a = _mm_sub_ps(_out0tm1, _out0tm2); __m128 _tmp024b = _mm_add_ps(_out0tm3, _out0tm4); __m128 _tmp135b = _mm_sub_ps(_out0tm3, _out0tm4); __m128 _tmp024c = _mm_add_ps(_out0tm5, _out0tm6); __m128 _tmp135c = _mm_sub_ps(_out0tm5, _out0tm6); __m128 _tmp0m = _mm_add_ps(_mm_add_ps(_out0tm0, _tmp024a), _mm_comp_fmadd_ps(_v32, _tmp024c, _tmp024b)); __m128 _tmp2m = _mm_comp_fmadd_ps(_v8, _tmp024c, _mm_comp_fmadd_ps(_v4, _tmp024b, _tmp024a)); __m128 _tmp4m = _mm_comp_fmadd_ps(_v2, _tmp024c, _mm_comp_fmadd_ps(_v16, _tmp024b, _tmp024a)); _mm_store_ps(tmp[0][m], _tmp0m); _mm_store_ps(tmp[2][m], _tmp2m); _mm_store_ps(tmp[4][m], _tmp4m); __m128 _tmp1m = _mm_comp_fmadd_ps(_v16, _tmp135c, _mm_comp_fmadd_ps(_v2, _tmp135b, _tmp135a)); __m128 _tmp3m = _mm_comp_fmadd_ps(_v4, _tmp135c, _mm_comp_fmadd_ps(_v8, _tmp135b, _tmp135a)); __m128 _tmp5m = _mm_add_ps(_mm_add_ps(_out0tm7, _tmp135a), _mm_comp_fmadd_ps(_v32, _tmp135b, _tmp135c)); _mm_store_ps(tmp[1][m], _tmp1m); _mm_store_ps(tmp[3][m], _tmp3m); _mm_store_ps(tmp[5][m], _tmp5m); output0_tm_0 += tiles * 4 * 8; output0_tm_1 += tiles * 4 * 8; output0_tm_2 += tiles * 4 * 8; output0_tm_3 += tiles * 4 * 8; output0_tm_4 += tiles * 4 * 8; output0_tm_5 += tiles * 4 * 8; output0_tm_6 += tiles * 4 * 8; output0_tm_7 += tiles * 4 * 8; } for (int m = 0; m < 6; m++) { __m128 _tmp00 = _mm_load_ps(tmp[m][0]); __m128 _tmp01 = _mm_load_ps(tmp[m][1]); __m128 _tmp02 = _mm_load_ps(tmp[m][2]); __m128 _tmp03 = _mm_load_ps(tmp[m][3]); __m128 _tmp04 = _mm_load_ps(tmp[m][4]); __m128 _tmp05 = _mm_load_ps(tmp[m][5]); __m128 _tmp06 = _mm_load_ps(tmp[m][6]); __m128 _tmp07 = _mm_load_ps(tmp[m][7]); __m128 _tmp024a = _mm_add_ps(_tmp01, _tmp02); __m128 _tmp135a = _mm_sub_ps(_tmp01, _tmp02); __m128 _tmp024b = _mm_add_ps(_tmp03, _tmp04); __m128 _tmp135b = _mm_sub_ps(_tmp03, _tmp04); __m128 _tmp024c = _mm_add_ps(_tmp05, _tmp06); __m128 _tmp135c = _mm_sub_ps(_tmp05, _tmp06); __m128 _out00 = _mm_add_ps(_bias0, _mm_add_ps(_mm_add_ps(_tmp00, _tmp024a), _mm_comp_fmadd_ps(_v32, _tmp024c, _tmp024b))); __m128 _out02 = _mm_add_ps(_bias0, _mm_comp_fmadd_ps(_v8, _tmp024c, _mm_comp_fmadd_ps(_v4, _tmp024b, _tmp024a))); __m128 _out04 = _mm_add_ps(_bias0, _mm_comp_fmadd_ps(_v2, _tmp024c, _mm_comp_fmadd_ps(_v16, _tmp024b, _tmp024a))); _mm_store_ps(output0, _out00); _mm_store_ps(output0 + 4 * 2, _out02); _mm_store_ps(output0 + 4 * 4, _out04); __m128 _out01 = _mm_add_ps(_bias0, _mm_comp_fmadd_ps(_v16, _tmp135c, _mm_comp_fmadd_ps(_v2, _tmp135b, _tmp135a))); __m128 _out03 = _mm_add_ps(_bias0, _mm_comp_fmadd_ps(_v4, _tmp135c, _mm_comp_fmadd_ps(_v8, _tmp135b, _tmp135a))); __m128 _out05 = _mm_add_ps(_bias0, _mm_add_ps(_mm_add_ps(_tmp07, _tmp135a), _mm_comp_fmadd_ps(_v32, _tmp135b, _tmp135c))); _mm_store_ps(output0 + 4, _out01); _mm_store_ps(output0 + 4 * 3, _out03); _mm_store_ps(output0 + 4 * 5, _out05); output0 += outw * 4; } } } } }
subq $0x1998, %rsp # imm = 0x1998 movq %rdi, 0xa70(%rsp) movq %rsi, 0xa68(%rsp) movq %rdx, 0xa60(%rsp) movq %rcx, 0xa58(%rsp) movq 0xa68(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0xa54(%rsp) movq 0xa68(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0xa50(%rsp) movq 0xa68(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0xa4c(%rsp) movl 0xa54(%rsp), %eax movl $0x6, %ecx cltd idivl %ecx movl %eax, 0xa48(%rsp) movl 0xa50(%rsp), %eax movl $0x6, %ecx cltd idivl %ecx movl %eax, 0xa44(%rsp) movl 0xa48(%rsp), %eax imull 0xa44(%rsp), %eax movl %eax, 0xa40(%rsp) movq 0xa60(%rsp), %rax movq %rax, 0xb10(%rsp) movq 0xb10(%rsp), %rax movq (%rax), %rax movq %rax, 0xa38(%rsp) movl $0x0, 0xa34(%rsp) movl 0xa34(%rsp), %eax cmpl 0xa4c(%rsp), %eax jge 0x62457a movq 0xa70(%rsp), %rcx movl 0xa34(%rsp), %eax leaq 0x9e8(%rsp), %rdx movq %rdx, 0xb28(%rsp) movq %rcx, 0xb20(%rsp) movl %eax, 0xb1c(%rsp) movq 0xb20(%rsp), %rax movq %rax, 0x338(%rsp) movb $0x0, 0xb1b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xb1c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x9e8(%rsp), %r10 movq %r10, 0x1938(%rsp) movl %r9d, 0x1934(%rsp) movl %r8d, 0x1930(%rsp) movl %edi, 0x192c(%rsp) movq %rsi, 0x1920(%rsp) movq %rdx, 0x1918(%rsp) movl %ecx, 0x1914(%rsp) movq %rax, 0x1908(%rsp) movq 0x1938(%rsp), %rcx movq %rcx, 0x330(%rsp) movq 0x1920(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x1918(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x1914(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x1908(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x1934(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x1930(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x192c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x1990(%rsp) movl $0x10, 0x198c(%rsp) movq 0x1990(%rsp), %rax movslq 0x198c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x198c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x338(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xa10(%rsp) cmpl $0x4, 0x28(%rax) jne 0x62202d movq 0x338(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xa28(%rsp) movb $0x1, 0xb1b(%rsp) testb $0x1, 0xb1b(%rsp) jne 0x622166 leaq 0x9e8(%rsp), %rax movq %rax, 0xb30(%rsp) movq 0xb30(%rsp), %rax movq %rax, 0xb40(%rsp) movq 0xb40(%rsp), %rax movq %rax, 0x328(%rsp) cmpq $0x0, 0x8(%rax) je 0x62210b movq 0x328(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb3c(%rsp) # imm = 0xFFFFFFFF movl 0xb3c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb38(%rsp) cmpl $0x1, 0xb38(%rsp) jne 0x62210b movq 0x328(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x6220dc movq 0x328(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x6220da jmp 0x622109 movq 0x328(%rsp), %rax movq (%rax), %rax movq %rax, 0xb48(%rsp) cmpq $0x0, 0xb48(%rsp) je 0x622107 movq 0xb48(%rsp), %rdi callq 0x5e480 jmp 0x622109 jmp 0x62210b movq 0x328(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x622166 movq %rax, %rdi callq 0x5fc90 movq 0xa68(%rsp), %rcx movl 0xa34(%rsp), %eax leaq 0x9a0(%rsp), %rdx movq %rdx, 0xae8(%rsp) movq %rcx, 0xae0(%rsp) movl %eax, 0xadc(%rsp) movq 0xae0(%rsp), %rax movq %rax, 0x318(%rsp) movb $0x0, 0xadb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xadc(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x9a0(%rsp), %r10 movq %r10, 0x1970(%rsp) movl %r9d, 0x196c(%rsp) movl %r8d, 0x1968(%rsp) movl %edi, 0x1964(%rsp) movq %rsi, 0x1958(%rsp) movq %rdx, 0x1950(%rsp) movl %ecx, 0x194c(%rsp) movq %rax, 0x1940(%rsp) movq 0x1970(%rsp), %rcx movq %rcx, 0x320(%rsp) movq 0x1958(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x1950(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x194c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x1940(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x196c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x1968(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x1964(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x1980(%rsp) movl $0x10, 0x197c(%rsp) movq 0x1980(%rsp), %rax movslq 0x197c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x197c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x320(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x318(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x9c8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x62232a movq 0x318(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x9e0(%rsp) movb $0x1, 0xadb(%rsp) testb $0x1, 0xadb(%rsp) jne 0x622463 leaq 0x9a0(%rsp), %rax movq %rax, 0xaf0(%rsp) movq 0xaf0(%rsp), %rax movq %rax, 0xb00(%rsp) movq 0xb00(%rsp), %rax movq %rax, 0x310(%rsp) cmpq $0x0, 0x8(%rax) je 0x622408 movq 0x310(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xafc(%rsp) # imm = 0xFFFFFFFF movl 0xafc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xaf8(%rsp) cmpl $0x1, 0xaf8(%rsp) jne 0x622408 movq 0x310(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x6223d9 movq 0x310(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x6223d7 jmp 0x622406 movq 0x310(%rsp), %rax movq (%rax), %rax movq %rax, 0xb50(%rsp) cmpq $0x0, 0xb50(%rsp) je 0x622404 movq 0xb50(%rsp), %rdi callq 0x5e480 jmp 0x622406 jmp 0x622408 movq 0x310(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x622463 movq %rax, %rdi callq 0x5fc90 jmp 0x622465 cmpq $0x0, 0xa38(%rsp) je 0x6224ab movq 0xa38(%rsp), %rax movl 0xa34(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xb88(%rsp) movq 0xb88(%rsp), %rax vmovups (%rax), %xmm0 vmovaps %xmm0, 0x300(%rsp) jmp 0x6224ca vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x1790(%rsp) vmovaps 0x1790(%rsp), %xmm0 vmovaps %xmm0, 0x300(%rsp) vmovaps 0x300(%rsp), %xmm0 vmovaps %xmm0, 0x980(%rsp) movl $0x42000000, 0x183c(%rsp) # imm = 0x42000000 vbroadcastss 0x183c(%rsp), %xmm0 vmovaps %xmm0, 0x1820(%rsp) vmovaps 0x1820(%rsp), %xmm0 vmovaps %xmm0, 0x670(%rsp) movl $0x41800000, 0x181c(%rsp) # imm = 0x41800000 vbroadcastss 0x181c(%rsp), %xmm0 vmovaps %xmm0, 0x1800(%rsp) vmovaps 0x1800(%rsp), %xmm0 vmovaps %xmm0, 0x660(%rsp) movl $0x41000000, 0x17fc(%rsp) # imm = 0x41000000 vbroadcastss 0x17fc(%rsp), %xmm0 vmovaps %xmm0, 0x17e0(%rsp) vmovaps 0x17e0(%rsp), %xmm0 vmovaps %xmm0, 0x650(%rsp) movl $0x40800000, 0x17dc(%rsp) # imm = 0x40800000 vbroadcastss 0x17dc(%rsp), %xmm0 vmovaps %xmm0, 0x17c0(%rsp) vmovaps 0x17c0(%rsp), %xmm0 vmovaps %xmm0, 0x640(%rsp) movl $0x40000000, 0x17bc(%rsp) # imm = 0x40000000 vbroadcastss 0x17bc(%rsp), %xmm0 vmovaps %xmm0, 0x17a0(%rsp) vmovaps 0x17a0(%rsp), %xmm0 vmovaps %xmm0, 0x630(%rsp) movl $0x0, 0x62c(%rsp) movl 0x62c(%rsp), %eax cmpl 0xa44(%rsp), %eax jge 0x62434a movl $0x0, 0x628(%rsp) movl 0x628(%rsp), %eax cmpl 0xa48(%rsp), %eax jge 0x624332 leaq 0x9e8(%rsp), %rax movq %rax, 0xb08(%rsp) movq 0xb08(%rsp), %rax movq (%rax), %rax movq %rax, 0x2f8(%rsp) movq 0x2f8(%rsp), %rax movl 0x62c(%rsp), %ecx imull 0xa48(%rsp), %ecx addl 0x628(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x620(%rsp) movq 0x620(%rsp), %rax movl 0xa40(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x618(%rsp) movq 0x620(%rsp), %rax movl 0xa40(%rsp), %ecx shll $0x2, %ecx shll %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x610(%rsp) movq 0x620(%rsp), %rax movl 0xa40(%rsp), %ecx shll $0x2, %ecx imull $0x3, %ecx, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x608(%rsp) movq 0x620(%rsp), %rax movl 0xa40(%rsp), %ecx shll $0x2, %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x600(%rsp) movq 0x620(%rsp), %rax movl 0xa40(%rsp), %ecx shll $0x2, %ecx imull $0x5, %ecx, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x5f8(%rsp) movq 0x620(%rsp), %rax movl 0xa40(%rsp), %ecx shll $0x2, %ecx imull $0x6, %ecx, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x5f0(%rsp) movq 0x620(%rsp), %rax movl 0xa40(%rsp), %ecx shll $0x2, %ecx imull $0x7, %ecx, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x5e8(%rsp) imull $0x6, 0x62c(%rsp), %eax leaq 0x9a0(%rsp), %rcx movq %rcx, 0xb80(%rsp) movl %eax, 0xb7c(%rsp) movq 0xb80(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xb7c(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax imull $0x6, 0x628(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x5e0(%rsp) movl $0x0, 0x5dc(%rsp) cmpl $0x8, 0x5dc(%rsp) jge 0x62353d movq 0x620(%rsp), %rax movq %rax, 0xd88(%rsp) movq 0xd88(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x5c0(%rsp) movq 0x618(%rsp), %rax movq %rax, 0xd80(%rsp) movq 0xd80(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x5b0(%rsp) movq 0x610(%rsp), %rax movq %rax, 0xd78(%rsp) movq 0xd78(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x5a0(%rsp) movq 0x608(%rsp), %rax movq %rax, 0xd70(%rsp) movq 0xd70(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x590(%rsp) movq 0x600(%rsp), %rax movq %rax, 0xd68(%rsp) movq 0xd68(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x580(%rsp) movq 0x5f8(%rsp), %rax movq %rax, 0xd60(%rsp) movq 0xd60(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x570(%rsp) movq 0x5f0(%rsp), %rax movq %rax, 0xd58(%rsp) movq 0xd58(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x560(%rsp) movq 0x5e8(%rsp), %rax movq %rax, 0xd50(%rsp) movq 0xd50(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x550(%rsp) vmovaps 0x5b0(%rsp), %xmm1 vmovaps 0x5a0(%rsp), %xmm0 vmovaps %xmm1, 0x1000(%rsp) vmovaps %xmm0, 0xff0(%rsp) vmovaps 0x1000(%rsp), %xmm0 vaddps 0xff0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x540(%rsp) vmovaps 0x5b0(%rsp), %xmm1 vmovaps 0x5a0(%rsp), %xmm0 vmovaps %xmm1, 0x18f0(%rsp) vmovaps %xmm0, 0x18e0(%rsp) vmovaps 0x18f0(%rsp), %xmm0 vsubps 0x18e0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x530(%rsp) vmovaps 0x590(%rsp), %xmm1 vmovaps 0x580(%rsp), %xmm0 vmovaps %xmm1, 0xfe0(%rsp) vmovaps %xmm0, 0xfd0(%rsp) vmovaps 0xfe0(%rsp), %xmm0 vaddps 0xfd0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x520(%rsp) vmovaps 0x590(%rsp), %xmm1 vmovaps 0x580(%rsp), %xmm0 vmovaps %xmm1, 0x18d0(%rsp) vmovaps %xmm0, 0x18c0(%rsp) vmovaps 0x18d0(%rsp), %xmm0 vsubps 0x18c0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x510(%rsp) vmovaps 0x570(%rsp), %xmm1 vmovaps 0x560(%rsp), %xmm0 vmovaps %xmm1, 0xfc0(%rsp) vmovaps %xmm0, 0xfb0(%rsp) vmovaps 0xfc0(%rsp), %xmm0 vaddps 0xfb0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x500(%rsp) vmovaps 0x570(%rsp), %xmm1 vmovaps 0x560(%rsp), %xmm0 vmovaps %xmm1, 0x18b0(%rsp) vmovaps %xmm0, 0x18a0(%rsp) vmovaps 0x18b0(%rsp), %xmm0 vsubps 0x18a0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x4f0(%rsp) vmovaps 0x5c0(%rsp), %xmm1 vmovaps 0x540(%rsp), %xmm0 vmovaps %xmm1, 0xfa0(%rsp) vmovaps %xmm0, 0xf90(%rsp) vmovaps 0xfa0(%rsp), %xmm0 vaddps 0xf90(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x2d0(%rsp) vmovaps 0x670(%rsp), %xmm2 vmovaps 0x500(%rsp), %xmm1 vmovaps 0x520(%rsp), %xmm0 vmovaps %xmm2, 0x13c0(%rsp) vmovaps %xmm1, 0x13b0(%rsp) vmovaps %xmm0, 0x13a0(%rsp) vmovaps 0x13c0(%rsp), %xmm2 vmovaps 0x13b0(%rsp), %xmm1 vmovaps 0x13a0(%rsp), %xmm0 vmovaps %xmm2, 0x13f0(%rsp) vmovaps %xmm1, 0x13e0(%rsp) vmovaps %xmm0, 0x13d0(%rsp) vmovaps 0x13f0(%rsp), %xmm1 vmovaps 0x13e0(%rsp), %xmm0 vmovaps 0x13d0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x2e0(%rsp) vmovaps 0x2e0(%rsp), %xmm0 vmovaps 0x2d0(%rsp), %xmm1 vmovaps %xmm1, 0xf80(%rsp) vmovaps %xmm0, 0xf70(%rsp) vmovaps 0xf80(%rsp), %xmm0 vaddps 0xf70(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x4e0(%rsp) vmovaps 0x650(%rsp), %xmm0 vmovaps %xmm0, 0x2a0(%rsp) vmovaps 0x500(%rsp), %xmm0 vmovaps %xmm0, 0x2b0(%rsp) vmovaps 0x640(%rsp), %xmm2 vmovaps 0x520(%rsp), %xmm1 vmovaps 0x540(%rsp), %xmm0 vmovaps %xmm2, 0x1390(%rsp) vmovaps %xmm1, 0x1380(%rsp) vmovaps %xmm0, 0x1370(%rsp) vmovaps 0x1390(%rsp), %xmm2 vmovaps 0x1380(%rsp), %xmm1 vmovaps 0x1370(%rsp), %xmm0 vmovaps %xmm2, 0x1420(%rsp) vmovaps %xmm1, 0x1410(%rsp) vmovaps %xmm0, 0x1400(%rsp) vmovaps 0x1420(%rsp), %xmm1 vmovaps 0x1410(%rsp), %xmm0 vmovaps 0x1400(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x2c0(%rsp) vmovaps 0x2c0(%rsp), %xmm0 vmovaps 0x2b0(%rsp), %xmm1 vmovaps 0x2a0(%rsp), %xmm2 vmovaps %xmm2, 0x1360(%rsp) vmovaps %xmm1, 0x1350(%rsp) vmovaps %xmm0, 0x1340(%rsp) vmovaps 0x1360(%rsp), %xmm2 vmovaps 0x1350(%rsp), %xmm1 vmovaps 0x1340(%rsp), %xmm0 vmovaps %xmm2, 0x1450(%rsp) vmovaps %xmm1, 0x1440(%rsp) vmovaps %xmm0, 0x1430(%rsp) vmovaps 0x1450(%rsp), %xmm1 vmovaps 0x1440(%rsp), %xmm0 vmovaps 0x1430(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x290(%rsp) vmovaps 0x290(%rsp), %xmm0 vmovaps %xmm0, 0x4d0(%rsp) vmovaps 0x630(%rsp), %xmm0 vmovaps %xmm0, 0x260(%rsp) vmovaps 0x500(%rsp), %xmm0 vmovaps %xmm0, 0x270(%rsp) vmovaps 0x660(%rsp), %xmm2 vmovaps 0x520(%rsp), %xmm1 vmovaps 0x540(%rsp), %xmm0 vmovaps %xmm2, 0x1330(%rsp) vmovaps %xmm1, 0x1320(%rsp) vmovaps %xmm0, 0x1310(%rsp) vmovaps 0x1330(%rsp), %xmm2 vmovaps 0x1320(%rsp), %xmm1 vmovaps 0x1310(%rsp), %xmm0 vmovaps %xmm2, 0x1480(%rsp) vmovaps %xmm1, 0x1470(%rsp) vmovaps %xmm0, 0x1460(%rsp) vmovaps 0x1480(%rsp), %xmm1 vmovaps 0x1470(%rsp), %xmm0 vmovaps 0x1460(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x280(%rsp) vmovaps 0x280(%rsp), %xmm0 vmovaps 0x270(%rsp), %xmm1 vmovaps 0x260(%rsp), %xmm2 vmovaps %xmm2, 0x1300(%rsp) vmovaps %xmm1, 0x12f0(%rsp) vmovaps %xmm0, 0x12e0(%rsp) vmovaps 0x1300(%rsp), %xmm2 vmovaps 0x12f0(%rsp), %xmm1 vmovaps 0x12e0(%rsp), %xmm0 vmovaps %xmm2, 0x14b0(%rsp) vmovaps %xmm1, 0x14a0(%rsp) vmovaps %xmm0, 0x1490(%rsp) vmovaps 0x14b0(%rsp), %xmm1 vmovaps 0x14a0(%rsp), %xmm0 vmovaps 0x1490(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x250(%rsp) vmovaps 0x250(%rsp), %xmm0 vmovaps %xmm0, 0x4c0(%rsp) leaq 0x680(%rsp), %rax movslq 0x5dc(%rsp), %rcx shlq $0x4, %rcx addq %rcx, %rax vmovaps 0x4e0(%rsp), %xmm0 movq %rax, 0xd08(%rsp) vmovaps %xmm0, 0xcf0(%rsp) vmovaps 0xcf0(%rsp), %xmm0 movq 0xd08(%rsp), %rax vmovaps %xmm0, (%rax) leaq 0x680(%rsp), %rax addq $0x100, %rax # imm = 0x100 movslq 0x5dc(%rsp), %rcx shlq $0x4, %rcx addq %rcx, %rax vmovaps 0x4d0(%rsp), %xmm0 movq %rax, 0xce8(%rsp) vmovaps %xmm0, 0xcd0(%rsp) vmovaps 0xcd0(%rsp), %xmm0 movq 0xce8(%rsp), %rax vmovaps %xmm0, (%rax) leaq 0x680(%rsp), %rax addq $0x200, %rax # imm = 0x200 movslq 0x5dc(%rsp), %rcx shlq $0x4, %rcx addq %rcx, %rax vmovaps 0x4c0(%rsp), %xmm0 movq %rax, 0xcc8(%rsp) vmovaps %xmm0, 0xcb0(%rsp) vmovaps 0xcb0(%rsp), %xmm0 movq 0xcc8(%rsp), %rax vmovaps %xmm0, (%rax) vmovaps 0x660(%rsp), %xmm0 vmovaps %xmm0, 0x220(%rsp) vmovaps 0x4f0(%rsp), %xmm0 vmovaps %xmm0, 0x230(%rsp) vmovaps 0x630(%rsp), %xmm2 vmovaps 0x510(%rsp), %xmm1 vmovaps 0x530(%rsp), %xmm0 vmovaps %xmm2, 0x12d0(%rsp) vmovaps %xmm1, 0x12c0(%rsp) vmovaps %xmm0, 0x12b0(%rsp) vmovaps 0x12d0(%rsp), %xmm2 vmovaps 0x12c0(%rsp), %xmm1 vmovaps 0x12b0(%rsp), %xmm0 vmovaps %xmm2, 0x14e0(%rsp) vmovaps %xmm1, 0x14d0(%rsp) vmovaps %xmm0, 0x14c0(%rsp) vmovaps 0x14e0(%rsp), %xmm1 vmovaps 0x14d0(%rsp), %xmm0 vmovaps 0x14c0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x240(%rsp) vmovaps 0x240(%rsp), %xmm0 vmovaps 0x230(%rsp), %xmm1 vmovaps 0x220(%rsp), %xmm2 vmovaps %xmm2, 0x12a0(%rsp) vmovaps %xmm1, 0x1290(%rsp) vmovaps %xmm0, 0x1280(%rsp) vmovaps 0x12a0(%rsp), %xmm2 vmovaps 0x1290(%rsp), %xmm1 vmovaps 0x1280(%rsp), %xmm0 vmovaps %xmm2, 0x1510(%rsp) vmovaps %xmm1, 0x1500(%rsp) vmovaps %xmm0, 0x14f0(%rsp) vmovaps 0x1510(%rsp), %xmm1 vmovaps 0x1500(%rsp), %xmm0 vmovaps 0x14f0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x210(%rsp) vmovaps 0x210(%rsp), %xmm0 vmovaps %xmm0, 0x4b0(%rsp) vmovaps 0x640(%rsp), %xmm0 vmovaps %xmm0, 0x1e0(%rsp) vmovaps 0x4f0(%rsp), %xmm0 vmovaps %xmm0, 0x1f0(%rsp) vmovaps 0x650(%rsp), %xmm2 vmovaps 0x510(%rsp), %xmm1 vmovaps 0x530(%rsp), %xmm0 vmovaps %xmm2, 0x1270(%rsp) vmovaps %xmm1, 0x1260(%rsp) vmovaps %xmm0, 0x1250(%rsp) vmovaps 0x1270(%rsp), %xmm2 vmovaps 0x1260(%rsp), %xmm1 vmovaps 0x1250(%rsp), %xmm0 vmovaps %xmm2, 0x1540(%rsp) vmovaps %xmm1, 0x1530(%rsp) vmovaps %xmm0, 0x1520(%rsp) vmovaps 0x1540(%rsp), %xmm1 vmovaps 0x1530(%rsp), %xmm0 vmovaps 0x1520(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x200(%rsp) vmovaps 0x200(%rsp), %xmm0 vmovaps 0x1f0(%rsp), %xmm1 vmovaps 0x1e0(%rsp), %xmm2 vmovaps %xmm2, 0x1240(%rsp) vmovaps %xmm1, 0x1230(%rsp) vmovaps %xmm0, 0x1220(%rsp) vmovaps 0x1240(%rsp), %xmm2 vmovaps 0x1230(%rsp), %xmm1 vmovaps 0x1220(%rsp), %xmm0 vmovaps %xmm2, 0x1570(%rsp) vmovaps %xmm1, 0x1560(%rsp) vmovaps %xmm0, 0x1550(%rsp) vmovaps 0x1570(%rsp), %xmm1 vmovaps 0x1560(%rsp), %xmm0 vmovaps 0x1550(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1d0(%rsp) vmovaps 0x1d0(%rsp), %xmm0 vmovaps %xmm0, 0x4a0(%rsp) vmovaps 0x550(%rsp), %xmm1 vmovaps 0x530(%rsp), %xmm0 vmovaps %xmm1, 0xf60(%rsp) vmovaps %xmm0, 0xf50(%rsp) vmovaps 0xf60(%rsp), %xmm0 vaddps 0xf50(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x1b0(%rsp) vmovaps 0x670(%rsp), %xmm2 vmovaps 0x510(%rsp), %xmm1 vmovaps 0x4f0(%rsp), %xmm0 vmovaps %xmm2, 0x1210(%rsp) vmovaps %xmm1, 0x1200(%rsp) vmovaps %xmm0, 0x11f0(%rsp) vmovaps 0x1210(%rsp), %xmm2 vmovaps 0x1200(%rsp), %xmm1 vmovaps 0x11f0(%rsp), %xmm0 vmovaps %xmm2, 0x15a0(%rsp) vmovaps %xmm1, 0x1590(%rsp) vmovaps %xmm0, 0x1580(%rsp) vmovaps 0x15a0(%rsp), %xmm1 vmovaps 0x1590(%rsp), %xmm0 vmovaps 0x1580(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1c0(%rsp) vmovaps 0x1c0(%rsp), %xmm0 vmovaps 0x1b0(%rsp), %xmm1 vmovaps %xmm1, 0xf40(%rsp) vmovaps %xmm0, 0xf30(%rsp) vmovaps 0xf40(%rsp), %xmm0 vaddps 0xf30(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x490(%rsp) leaq 0x680(%rsp), %rax addq $0x80, %rax movslq 0x5dc(%rsp), %rcx shlq $0x4, %rcx addq %rcx, %rax vmovaps 0x4b0(%rsp), %xmm0 movq %rax, 0xca8(%rsp) vmovaps %xmm0, 0xc90(%rsp) vmovaps 0xc90(%rsp), %xmm0 movq 0xca8(%rsp), %rax vmovaps %xmm0, (%rax) leaq 0x680(%rsp), %rax addq $0x180, %rax # imm = 0x180 movslq 0x5dc(%rsp), %rcx shlq $0x4, %rcx addq %rcx, %rax vmovaps 0x4a0(%rsp), %xmm0 movq %rax, 0xc88(%rsp) vmovaps %xmm0, 0xc70(%rsp) vmovaps 0xc70(%rsp), %xmm0 movq 0xc88(%rsp), %rax vmovaps %xmm0, (%rax) leaq 0x680(%rsp), %rax addq $0x280, %rax # imm = 0x280 movslq 0x5dc(%rsp), %rcx shlq $0x4, %rcx addq %rcx, %rax vmovaps 0x490(%rsp), %xmm0 movq %rax, 0xc68(%rsp) vmovaps %xmm0, 0xc50(%rsp) vmovaps 0xc50(%rsp), %xmm0 movq 0xc68(%rsp), %rax vmovaps %xmm0, (%rax) movl 0xa40(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x620(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x620(%rsp) movl 0xa40(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x618(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x618(%rsp) movl 0xa40(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x610(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x610(%rsp) movl 0xa40(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x608(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x608(%rsp) movl 0xa40(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x600(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x600(%rsp) movl 0xa40(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x5f8(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x5f8(%rsp) movl 0xa40(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x5f0(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x5f0(%rsp) movl 0xa40(%rsp), %ecx shll $0x2, %ecx shll $0x3, %ecx movq 0x5e8(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x5e8(%rsp) movl 0x5dc(%rsp), %eax addl $0x1, %eax movl %eax, 0x5dc(%rsp) jmp 0x6227d7 movl $0x0, 0x48c(%rsp) cmpl $0x6, 0x48c(%rsp) jge 0x62431a movslq 0x48c(%rsp), %rcx leaq 0x680(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax movq %rax, 0xd48(%rsp) movq 0xd48(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x470(%rsp) movslq 0x48c(%rsp), %rcx leaq 0x680(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax addq $0x10, %rax movq %rax, 0xd40(%rsp) movq 0xd40(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x460(%rsp) movslq 0x48c(%rsp), %rcx leaq 0x680(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax addq $0x20, %rax movq %rax, 0xd38(%rsp) movq 0xd38(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x450(%rsp) movslq 0x48c(%rsp), %rcx leaq 0x680(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax addq $0x30, %rax movq %rax, 0xd30(%rsp) movq 0xd30(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x440(%rsp) movslq 0x48c(%rsp), %rcx leaq 0x680(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax addq $0x40, %rax movq %rax, 0xd28(%rsp) movq 0xd28(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x430(%rsp) movslq 0x48c(%rsp), %rcx leaq 0x680(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax addq $0x50, %rax movq %rax, 0xd20(%rsp) movq 0xd20(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x420(%rsp) movslq 0x48c(%rsp), %rcx leaq 0x680(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax addq $0x60, %rax movq %rax, 0xd18(%rsp) movq 0xd18(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x410(%rsp) movslq 0x48c(%rsp), %rcx leaq 0x680(%rsp), %rax shlq $0x7, %rcx addq %rcx, %rax addq $0x70, %rax movq %rax, 0xd10(%rsp) movq 0xd10(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x400(%rsp) vmovaps 0x460(%rsp), %xmm1 vmovaps 0x450(%rsp), %xmm0 vmovaps %xmm1, 0xf20(%rsp) vmovaps %xmm0, 0xf10(%rsp) vmovaps 0xf20(%rsp), %xmm0 vaddps 0xf10(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x3f0(%rsp) vmovaps 0x460(%rsp), %xmm1 vmovaps 0x450(%rsp), %xmm0 vmovaps %xmm1, 0x1890(%rsp) vmovaps %xmm0, 0x1880(%rsp) vmovaps 0x1890(%rsp), %xmm0 vsubps 0x1880(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x3e0(%rsp) vmovaps 0x440(%rsp), %xmm1 vmovaps 0x430(%rsp), %xmm0 vmovaps %xmm1, 0xf00(%rsp) vmovaps %xmm0, 0xef0(%rsp) vmovaps 0xf00(%rsp), %xmm0 vaddps 0xef0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x3d0(%rsp) vmovaps 0x440(%rsp), %xmm1 vmovaps 0x430(%rsp), %xmm0 vmovaps %xmm1, 0x1870(%rsp) vmovaps %xmm0, 0x1860(%rsp) vmovaps 0x1870(%rsp), %xmm0 vsubps 0x1860(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x3c0(%rsp) vmovaps 0x420(%rsp), %xmm1 vmovaps 0x410(%rsp), %xmm0 vmovaps %xmm1, 0xee0(%rsp) vmovaps %xmm0, 0xed0(%rsp) vmovaps 0xee0(%rsp), %xmm0 vaddps 0xed0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x3b0(%rsp) vmovaps 0x420(%rsp), %xmm1 vmovaps 0x410(%rsp), %xmm0 vmovaps %xmm1, 0x1850(%rsp) vmovaps %xmm0, 0x1840(%rsp) vmovaps 0x1850(%rsp), %xmm0 vsubps 0x1840(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x3a0(%rsp) vmovaps 0x980(%rsp), %xmm0 vmovaps %xmm0, 0x180(%rsp) vmovaps 0x470(%rsp), %xmm1 vmovaps 0x3f0(%rsp), %xmm0 vmovaps %xmm1, 0xec0(%rsp) vmovaps %xmm0, 0xeb0(%rsp) vmovaps 0xec0(%rsp), %xmm0 vaddps 0xeb0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x190(%rsp) vmovaps 0x670(%rsp), %xmm2 vmovaps 0x3b0(%rsp), %xmm1 vmovaps 0x3d0(%rsp), %xmm0 vmovaps %xmm2, 0x11e0(%rsp) vmovaps %xmm1, 0x11d0(%rsp) vmovaps %xmm0, 0x11c0(%rsp) vmovaps 0x11e0(%rsp), %xmm2 vmovaps 0x11d0(%rsp), %xmm1 vmovaps 0x11c0(%rsp), %xmm0 vmovaps %xmm2, 0x15d0(%rsp) vmovaps %xmm1, 0x15c0(%rsp) vmovaps %xmm0, 0x15b0(%rsp) vmovaps 0x15d0(%rsp), %xmm1 vmovaps 0x15c0(%rsp), %xmm0 vmovaps 0x15b0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1a0(%rsp) vmovaps 0x180(%rsp), %xmm1 vmovaps 0x1a0(%rsp), %xmm0 vmovaps 0x190(%rsp), %xmm2 vmovaps %xmm2, 0xea0(%rsp) vmovaps %xmm0, 0xe90(%rsp) vmovaps 0xea0(%rsp), %xmm0 vaddps 0xe90(%rsp), %xmm0, %xmm0 vmovaps %xmm1, 0xe80(%rsp) vmovaps %xmm0, 0xe70(%rsp) vmovaps 0xe80(%rsp), %xmm0 vaddps 0xe70(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x390(%rsp) vmovaps 0x980(%rsp), %xmm0 vmovaps %xmm0, 0x140(%rsp) vmovaps 0x650(%rsp), %xmm0 vmovaps %xmm0, 0x150(%rsp) vmovaps 0x3b0(%rsp), %xmm0 vmovaps %xmm0, 0x160(%rsp) vmovaps 0x640(%rsp), %xmm2 vmovaps 0x3d0(%rsp), %xmm1 vmovaps 0x3f0(%rsp), %xmm0 vmovaps %xmm2, 0x11b0(%rsp) vmovaps %xmm1, 0x11a0(%rsp) vmovaps %xmm0, 0x1190(%rsp) vmovaps 0x11b0(%rsp), %xmm2 vmovaps 0x11a0(%rsp), %xmm1 vmovaps 0x1190(%rsp), %xmm0 vmovaps %xmm2, 0x1600(%rsp) vmovaps %xmm1, 0x15f0(%rsp) vmovaps %xmm0, 0x15e0(%rsp) vmovaps 0x1600(%rsp), %xmm1 vmovaps 0x15f0(%rsp), %xmm0 vmovaps 0x15e0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x170(%rsp) vmovaps 0x170(%rsp), %xmm0 vmovaps 0x160(%rsp), %xmm1 vmovaps 0x150(%rsp), %xmm2 vmovaps %xmm2, 0x1180(%rsp) vmovaps %xmm1, 0x1170(%rsp) vmovaps %xmm0, 0x1160(%rsp) vmovaps 0x1180(%rsp), %xmm2 vmovaps 0x1170(%rsp), %xmm1 vmovaps 0x1160(%rsp), %xmm0 vmovaps %xmm2, 0x1630(%rsp) vmovaps %xmm1, 0x1620(%rsp) vmovaps %xmm0, 0x1610(%rsp) vmovaps 0x1630(%rsp), %xmm1 vmovaps 0x1620(%rsp), %xmm0 vmovaps 0x1610(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x130(%rsp) vmovaps 0x130(%rsp), %xmm0 vmovaps 0x140(%rsp), %xmm1 vmovaps %xmm1, 0xe60(%rsp) vmovaps %xmm0, 0xe50(%rsp) vmovaps 0xe60(%rsp), %xmm0 vaddps 0xe50(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x380(%rsp) vmovaps 0x980(%rsp), %xmm0 vmovaps %xmm0, 0xf0(%rsp) vmovaps 0x630(%rsp), %xmm0 vmovaps %xmm0, 0x100(%rsp) vmovaps 0x3b0(%rsp), %xmm0 vmovaps %xmm0, 0x110(%rsp) vmovaps 0x660(%rsp), %xmm2 vmovaps 0x3d0(%rsp), %xmm1 vmovaps 0x3f0(%rsp), %xmm0 vmovaps %xmm2, 0x1150(%rsp) vmovaps %xmm1, 0x1140(%rsp) vmovaps %xmm0, 0x1130(%rsp) vmovaps 0x1150(%rsp), %xmm2 vmovaps 0x1140(%rsp), %xmm1 vmovaps 0x1130(%rsp), %xmm0 vmovaps %xmm2, 0x1660(%rsp) vmovaps %xmm1, 0x1650(%rsp) vmovaps %xmm0, 0x1640(%rsp) vmovaps 0x1660(%rsp), %xmm1 vmovaps 0x1650(%rsp), %xmm0 vmovaps 0x1640(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x120(%rsp) vmovaps 0x120(%rsp), %xmm0 vmovaps 0x110(%rsp), %xmm1 vmovaps 0x100(%rsp), %xmm2 vmovaps %xmm2, 0x1120(%rsp) vmovaps %xmm1, 0x1110(%rsp) vmovaps %xmm0, 0x1100(%rsp) vmovaps 0x1120(%rsp), %xmm2 vmovaps 0x1110(%rsp), %xmm1 vmovaps 0x1100(%rsp), %xmm0 vmovaps %xmm2, 0x1690(%rsp) vmovaps %xmm1, 0x1680(%rsp) vmovaps %xmm0, 0x1670(%rsp) vmovaps 0x1690(%rsp), %xmm1 vmovaps 0x1680(%rsp), %xmm0 vmovaps 0x1670(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xe0(%rsp) vmovaps 0xe0(%rsp), %xmm0 vmovaps 0xf0(%rsp), %xmm1 vmovaps %xmm1, 0xe40(%rsp) vmovaps %xmm0, 0xe30(%rsp) vmovaps 0xe40(%rsp), %xmm0 vaddps 0xe30(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x370(%rsp) movq 0x5e0(%rsp), %rax vmovaps 0x390(%rsp), %xmm0 movq %rax, 0xc48(%rsp) vmovaps %xmm0, 0xc30(%rsp) vmovaps 0xc30(%rsp), %xmm0 movq 0xc48(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x5e0(%rsp), %rax addq $0x20, %rax vmovaps 0x380(%rsp), %xmm0 movq %rax, 0xc28(%rsp) vmovaps %xmm0, 0xc10(%rsp) vmovaps 0xc10(%rsp), %xmm0 movq 0xc28(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x5e0(%rsp), %rax addq $0x40, %rax vmovaps 0x370(%rsp), %xmm0 movq %rax, 0xc08(%rsp) vmovaps %xmm0, 0xbf0(%rsp) vmovaps 0xbf0(%rsp), %xmm0 movq 0xc08(%rsp), %rax vmovaps %xmm0, (%rax) vmovaps 0x980(%rsp), %xmm0 vmovaps %xmm0, 0xa0(%rsp) vmovaps 0x660(%rsp), %xmm0 vmovaps %xmm0, 0xb0(%rsp) vmovaps 0x3a0(%rsp), %xmm0 vmovaps %xmm0, 0xc0(%rsp) vmovaps 0x630(%rsp), %xmm2 vmovaps 0x3c0(%rsp), %xmm1 vmovaps 0x3e0(%rsp), %xmm0 vmovaps %xmm2, 0x10f0(%rsp) vmovaps %xmm1, 0x10e0(%rsp) vmovaps %xmm0, 0x10d0(%rsp) vmovaps 0x10f0(%rsp), %xmm2 vmovaps 0x10e0(%rsp), %xmm1 vmovaps 0x10d0(%rsp), %xmm0 vmovaps %xmm2, 0x16c0(%rsp) vmovaps %xmm1, 0x16b0(%rsp) vmovaps %xmm0, 0x16a0(%rsp) vmovaps 0x16c0(%rsp), %xmm1 vmovaps 0x16b0(%rsp), %xmm0 vmovaps 0x16a0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xd0(%rsp) vmovaps 0xd0(%rsp), %xmm0 vmovaps 0xc0(%rsp), %xmm1 vmovaps 0xb0(%rsp), %xmm2 vmovaps %xmm2, 0x10c0(%rsp) vmovaps %xmm1, 0x10b0(%rsp) vmovaps %xmm0, 0x10a0(%rsp) vmovaps 0x10c0(%rsp), %xmm2 vmovaps 0x10b0(%rsp), %xmm1 vmovaps 0x10a0(%rsp), %xmm0 vmovaps %xmm2, 0x16f0(%rsp) vmovaps %xmm1, 0x16e0(%rsp) vmovaps %xmm0, 0x16d0(%rsp) vmovaps 0x16f0(%rsp), %xmm1 vmovaps 0x16e0(%rsp), %xmm0 vmovaps 0x16d0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x90(%rsp) vmovaps 0x90(%rsp), %xmm0 vmovaps 0xa0(%rsp), %xmm1 vmovaps %xmm1, 0xe20(%rsp) vmovaps %xmm0, 0xe10(%rsp) vmovaps 0xe20(%rsp), %xmm0 vaddps 0xe10(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x360(%rsp) vmovaps 0x980(%rsp), %xmm0 vmovaps %xmm0, 0x50(%rsp) vmovaps 0x640(%rsp), %xmm0 vmovaps %xmm0, 0x60(%rsp) vmovaps 0x3a0(%rsp), %xmm0 vmovaps %xmm0, 0x70(%rsp) vmovaps 0x650(%rsp), %xmm2 vmovaps 0x3c0(%rsp), %xmm1 vmovaps 0x3e0(%rsp), %xmm0 vmovaps %xmm2, 0x1090(%rsp) vmovaps %xmm1, 0x1080(%rsp) vmovaps %xmm0, 0x1070(%rsp) vmovaps 0x1090(%rsp), %xmm2 vmovaps 0x1080(%rsp), %xmm1 vmovaps 0x1070(%rsp), %xmm0 vmovaps %xmm2, 0x1720(%rsp) vmovaps %xmm1, 0x1710(%rsp) vmovaps %xmm0, 0x1700(%rsp) vmovaps 0x1720(%rsp), %xmm1 vmovaps 0x1710(%rsp), %xmm0 vmovaps 0x1700(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x80(%rsp) vmovaps 0x80(%rsp), %xmm0 vmovaps 0x70(%rsp), %xmm1 vmovaps 0x60(%rsp), %xmm2 vmovaps %xmm2, 0x1060(%rsp) vmovaps %xmm1, 0x1050(%rsp) vmovaps %xmm0, 0x1040(%rsp) vmovaps 0x1060(%rsp), %xmm2 vmovaps 0x1050(%rsp), %xmm1 vmovaps 0x1040(%rsp), %xmm0 vmovaps %xmm2, 0x1750(%rsp) vmovaps %xmm1, 0x1740(%rsp) vmovaps %xmm0, 0x1730(%rsp) vmovaps 0x1750(%rsp), %xmm1 vmovaps 0x1740(%rsp), %xmm0 vmovaps 0x1730(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x40(%rsp) vmovaps 0x40(%rsp), %xmm0 vmovaps 0x50(%rsp), %xmm1 vmovaps %xmm1, 0xe00(%rsp) vmovaps %xmm0, 0xdf0(%rsp) vmovaps 0xe00(%rsp), %xmm0 vaddps 0xdf0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x350(%rsp) vmovaps 0x980(%rsp), %xmm0 vmovaps %xmm0, 0x10(%rsp) vmovaps 0x400(%rsp), %xmm1 vmovaps 0x3e0(%rsp), %xmm0 vmovaps %xmm1, 0xde0(%rsp) vmovaps %xmm0, 0xdd0(%rsp) vmovaps 0xde0(%rsp), %xmm0 vaddps 0xdd0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x20(%rsp) vmovaps 0x670(%rsp), %xmm2 vmovaps 0x3c0(%rsp), %xmm1 vmovaps 0x3a0(%rsp), %xmm0 vmovaps %xmm2, 0x1030(%rsp) vmovaps %xmm1, 0x1020(%rsp) vmovaps %xmm0, 0x1010(%rsp) vmovaps 0x1030(%rsp), %xmm2 vmovaps 0x1020(%rsp), %xmm1 vmovaps 0x1010(%rsp), %xmm0 vmovaps %xmm2, 0x1780(%rsp) vmovaps %xmm1, 0x1770(%rsp) vmovaps %xmm0, 0x1760(%rsp) vmovaps 0x1780(%rsp), %xmm1 vmovaps 0x1770(%rsp), %xmm0 vmovaps 0x1760(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x30(%rsp) vmovaps 0x10(%rsp), %xmm1 vmovaps 0x30(%rsp), %xmm0 vmovaps 0x20(%rsp), %xmm2 vmovaps %xmm2, 0xdc0(%rsp) vmovaps %xmm0, 0xdb0(%rsp) vmovaps 0xdc0(%rsp), %xmm0 vaddps 0xdb0(%rsp), %xmm0, %xmm0 vmovaps %xmm1, 0xda0(%rsp) vmovaps %xmm0, 0xd90(%rsp) vmovaps 0xda0(%rsp), %xmm0 vaddps 0xd90(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x340(%rsp) movq 0x5e0(%rsp), %rax addq $0x10, %rax vmovaps 0x360(%rsp), %xmm0 movq %rax, 0xbe8(%rsp) vmovaps %xmm0, 0xbd0(%rsp) vmovaps 0xbd0(%rsp), %xmm0 movq 0xbe8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x5e0(%rsp), %rax addq $0x30, %rax vmovaps 0x350(%rsp), %xmm0 movq %rax, 0xbc8(%rsp) vmovaps %xmm0, 0xbb0(%rsp) vmovaps 0xbb0(%rsp), %xmm0 movq 0xbc8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x5e0(%rsp), %rax addq $0x50, %rax vmovaps 0x340(%rsp), %xmm0 movq %rax, 0xba8(%rsp) vmovaps %xmm0, 0xb90(%rsp) vmovaps 0xb90(%rsp), %xmm0 movq 0xba8(%rsp), %rax vmovaps %xmm0, (%rax) movl 0xa54(%rsp), %ecx shll $0x2, %ecx movq 0x5e0(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x5e0(%rsp) movl 0x48c(%rsp), %eax addl $0x1, %eax movl %eax, 0x48c(%rsp) jmp 0x623548 jmp 0x62431c movl 0x628(%rsp), %eax addl $0x1, %eax movl %eax, 0x628(%rsp) jmp 0x6225f6 jmp 0x624334 movl 0x62c(%rsp), %eax addl $0x1, %eax movl %eax, 0x62c(%rsp) jmp 0x6225d7 leaq 0x9a0(%rsp), %rax movq %rax, 0xa78(%rsp) movq 0xa78(%rsp), %rax movq %rax, 0xad0(%rsp) movq 0xad0(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x624403 movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xacc(%rsp) # imm = 0xFFFFFFFF movl 0xacc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xac8(%rsp) cmpl $0x1, 0xac8(%rsp) jne 0x624403 movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x6243d7 movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x6243d5 jmp 0x624401 movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0xb58(%rsp) cmpq $0x0, 0xb58(%rsp) je 0x6243ff movq 0xb58(%rsp), %rdi callq 0x5e480 jmp 0x624401 jmp 0x624403 movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x62445b movq %rax, %rdi callq 0x5fc90 leaq 0x9e8(%rsp), %rax movq %rax, 0xa88(%rsp) movq 0xa88(%rsp), %rax movq %rax, 0xab0(%rsp) movq 0xab0(%rsp), %rax movq %rax, (%rsp) cmpq $0x0, 0x8(%rax) je 0x62450b movq (%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xaac(%rsp) # imm = 0xFFFFFFFF movl 0xaac(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xaa8(%rsp) cmpl $0x1, 0xaa8(%rsp) jne 0x62450b movq (%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x6244e0 movq (%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x6244de jmp 0x624509 movq (%rsp), %rax movq (%rax), %rax movq %rax, 0xb68(%rsp) cmpq $0x0, 0xb68(%rsp) je 0x624507 movq 0xb68(%rsp), %rdi callq 0x5e480 jmp 0x624509 jmp 0x62450b movq (%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x624562 movq %rax, %rdi callq 0x5fc90 jmp 0x624564 movl 0xa34(%rsp), %eax addl $0x1, %eax movl %eax, 0xa34(%rsp) jmp 0x621e5d addq $0x1998, %rsp # imm = 0x1998 retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/convolution_winograd_transform_pack4.h
ncnn::Deconvolution::load_model(ncnn::ModelBin const&)
int Deconvolution::load_model(const ModelBin& mb) { weight_data = mb.load(weight_data_size, 0); if (weight_data.empty()) return -100; if (bias_term) { bias_data = mb.load(num_output, 1); if (bias_data.empty()) return -100; } return 0; }
subq $0x238, %rsp # imm = 0x238 movq %rdi, 0x118(%rsp) movq %rsi, 0x110(%rsp) movq 0x118(%rsp), %rax movq %rax, 0x60(%rsp) movq 0x110(%rsp), %rsi movl 0x110(%rax), %edx movq (%rsi), %rax leaq 0xc8(%rsp), %rdi xorl %ecx, %ecx callq *0x10(%rax) movq 0x60(%rsp), %rax addq $0x160, %rax # imm = 0x160 movq %rax, 0x178(%rsp) leaq 0xc8(%rsp), %rax movq %rax, 0x170(%rsp) movq 0x178(%rsp), %rax movq %rax, 0x68(%rsp) cmpq 0x170(%rsp), %rax jne 0x940bee movq 0x68(%rsp), %rax movq %rax, 0x180(%rsp) jmp 0x940dc9 movq 0x170(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x940c26 movq 0x170(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x16c(%rsp) movl 0x16c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x168(%rsp) movq 0x68(%rsp), %rax movq %rax, 0x1a0(%rsp) movq 0x1a0(%rsp), %rax movq %rax, 0x58(%rsp) cmpq $0x0, 0x8(%rax) je 0x940ccc movq 0x58(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x19c(%rsp) # imm = 0xFFFFFFFF movl 0x19c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x198(%rsp) cmpl $0x1, 0x198(%rsp) jne 0x940ccc movq 0x58(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x940ca0 movq 0x58(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x940c9e jmp 0x940cca movq 0x58(%rsp), %rax movq (%rax), %rax movq %rax, 0x220(%rsp) cmpq $0x0, 0x220(%rsp) je 0x940cc8 movq 0x220(%rsp), %rdi callq 0x5e480 jmp 0x940cca jmp 0x940ccc movq 0x58(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x68(%rsp), %rax movq 0x170(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x170(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x170(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x170(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x170(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x170(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x170(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x170(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x170(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x170(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x170(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x180(%rsp) leaq 0xc8(%rsp), %rax movq %rax, 0x128(%rsp) movq 0x128(%rsp), %rax movq %rax, 0x1f0(%rsp) movq 0x1f0(%rsp), %rax movq %rax, 0x50(%rsp) cmpq $0x0, 0x8(%rax) je 0x940e82 movq 0x50(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1ec(%rsp) # imm = 0xFFFFFFFF movl 0x1ec(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1e8(%rsp) cmpl $0x1, 0x1e8(%rsp) jne 0x940e82 movq 0x50(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x940e56 movq 0x50(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x940e54 jmp 0x940e80 movq 0x50(%rsp), %rax movq (%rax), %rax movq %rax, 0x1f8(%rsp) cmpq $0x0, 0x1f8(%rsp) je 0x940e7e movq 0x1f8(%rsp), %rdi callq 0x5e480 jmp 0x940e80 jmp 0x940e82 movq 0x50(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x940eda movq %rax, %rdi callq 0x5fc90 movq 0x60(%rsp), %rax addq $0x160, %rax # imm = 0x160 movq %rax, 0x190(%rsp) movq 0x190(%rsp), %rcx movq %rcx, 0x40(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x4f(%rsp) je 0x940f32 movq 0x40(%rsp), %rax movq %rax, 0x228(%rsp) movq 0x228(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x4f(%rsp) movb 0x4f(%rsp), %al testb $0x1, %al jne 0x940f3f jmp 0x941079 movl $0xffffff9c, 0x124(%rsp) # imm = 0xFFFFFF9C jmp 0x941586 movq %rax, %rcx movl %edx, %eax movq %rcx, 0xc0(%rsp) movl %eax, 0xbc(%rsp) leaq 0xc8(%rsp), %rax movq %rax, 0x130(%rsp) movq 0x130(%rsp), %rax movq %rax, 0x1e0(%rsp) movq 0x1e0(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x94101c movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1dc(%rsp) # imm = 0xFFFFFFFF movl 0x1dc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1d8(%rsp) cmpl $0x1, 0x1d8(%rsp) jne 0x94101c movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x940ff0 movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x940fee jmp 0x94101a movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x200(%rsp) cmpq $0x0, 0x200(%rsp) je 0x941018 movq 0x200(%rsp), %rdi callq 0x5e480 jmp 0x94101a jmp 0x94101c movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x941074 movq %rax, %rdi callq 0x5fc90 jmp 0x941595 movq 0x60(%rsp), %rax cmpl $0x0, 0x10c(%rax) je 0x94157b movq 0x60(%rsp), %rax movq 0x110(%rsp), %rsi movl 0xd0(%rax), %edx movq (%rsi), %rax leaq 0x70(%rsp), %rdi movl $0x1, %ecx callq *0x10(%rax) movq 0x60(%rsp), %rax addq $0x1a8, %rax # imm = 0x1A8 movq %rax, 0x158(%rsp) leaq 0x70(%rsp), %rax movq %rax, 0x150(%rsp) movq 0x158(%rsp), %rax movq %rax, 0x30(%rsp) cmpq 0x150(%rsp), %rax jne 0x9410f7 movq 0x30(%rsp), %rax movq %rax, 0x160(%rsp) jmp 0x9412d2 movq 0x150(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x94112f movq 0x150(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x14c(%rsp) movl 0x14c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x148(%rsp) movq 0x30(%rsp), %rax movq %rax, 0x1b0(%rsp) movq 0x1b0(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x9411d5 movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1ac(%rsp) # imm = 0xFFFFFFFF movl 0x1ac(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1a8(%rsp) cmpl $0x1, 0x1a8(%rsp) jne 0x9411d5 movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x9411a9 movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x9411a7 jmp 0x9411d3 movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0x218(%rsp) cmpq $0x0, 0x218(%rsp) je 0x9411d1 movq 0x218(%rsp), %rdi callq 0x5e480 jmp 0x9411d3 jmp 0x9411d5 movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x30(%rsp), %rax movq 0x150(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x150(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x150(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x150(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x150(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x150(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x150(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x150(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x150(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x150(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x150(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x160(%rsp) leaq 0x70(%rsp), %rax movq %rax, 0x138(%rsp) movq 0x138(%rsp), %rax movq %rax, 0x1d0(%rsp) movq 0x1d0(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x941388 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1cc(%rsp) # imm = 0xFFFFFFFF movl 0x1cc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1c8(%rsp) cmpl $0x1, 0x1c8(%rsp) jne 0x941388 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x94135c movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x94135a jmp 0x941386 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x208(%rsp) cmpq $0x0, 0x208(%rsp) je 0x941384 movq 0x208(%rsp), %rdi callq 0x5e480 jmp 0x941386 jmp 0x941388 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x9413e0 movq %rax, %rdi callq 0x5fc90 movq 0x60(%rsp), %rax addq $0x1a8, %rax # imm = 0x1A8 movq %rax, 0x188(%rsp) movq 0x188(%rsp), %rcx movq %rcx, 0x10(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x1f(%rsp) je 0x941438 movq 0x10(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x1f(%rsp) movb 0x1f(%rsp), %al testb $0x1, %al jne 0x941445 jmp 0x941579 movl $0xffffff9c, 0x124(%rsp) # imm = 0xFFFFFF9C jmp 0x941586 movq %rax, %rcx movl %edx, %eax movq %rcx, 0xc0(%rsp) movl %eax, 0xbc(%rsp) leaq 0x70(%rsp), %rax movq %rax, 0x140(%rsp) movq 0x140(%rsp), %rax movq %rax, 0x1c0(%rsp) movq 0x1c0(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x94151f movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1bc(%rsp) # imm = 0xFFFFFFFF movl 0x1bc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1b8(%rsp) cmpl $0x1, 0x1b8(%rsp) jne 0x94151f movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x9414f3 movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x9414f1 jmp 0x94151d movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x210(%rsp) cmpq $0x0, 0x210(%rsp) je 0x94151b movq 0x210(%rsp), %rdi callq 0x5e480 jmp 0x94151d jmp 0x94151f movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x941577 movq %rax, %rdi callq 0x5fc90 jmp 0x941595 jmp 0x94157b movl $0x0, 0x124(%rsp) movl 0x124(%rsp), %eax addq $0x238, %rsp # imm = 0x238 retq movq 0xc0(%rsp), %rdi callq 0x5e3b0 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/deconvolution.cpp
ncnn::Deconvolution::forward(ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&) const
int Deconvolution::forward(const Mat& bottom_blob, Mat& top_blob, const Option& opt) const { int w = bottom_blob.w; int h = bottom_blob.h; size_t elemsize = bottom_blob.elemsize; const int kernel_extent_w = dilation_w * (kernel_w - 1) + 1; const int kernel_extent_h = dilation_h * (kernel_h - 1) + 1; int outw = (w - 1) * stride_w + kernel_extent_w + output_pad_right; int outh = (h - 1) * stride_h + kernel_extent_h + output_pad_bottom; Mat top_blob_bordered; if (pad_left > 0 || pad_right > 0 || pad_top > 0 || pad_bottom > 0 || (output_w > 0 && output_h > 0)) { top_blob_bordered.create(outw, outh, num_output, elemsize, opt.workspace_allocator); } else { top_blob_bordered = top_blob; top_blob_bordered.create(outw, outh, num_output, elemsize, opt.blob_allocator); } if (top_blob_bordered.empty()) return -100; int ret = deconvolution(bottom_blob, top_blob_bordered, weight_data, bias_data, kernel_w, kernel_h, stride_w, stride_h, dilation_w, dilation_h, activation_type, activation_params, opt); if (ret != 0) return ret; cut_padding(top_blob_bordered, top_blob, opt); if (top_blob.empty()) return -100; return 0; }
pushq %rbp pushq %r15 pushq %r14 pushq %rbx subq $0x1e8, %rsp # imm = 0x1E8 movq %rdi, 0x138(%rsp) movq %rsi, 0x130(%rsp) movq %rdx, 0x128(%rsp) movq %rcx, 0x120(%rsp) movq 0x138(%rsp), %rax movq %rax, 0x90(%rsp) movq 0x130(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x11c(%rsp) movq 0x130(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x118(%rsp) movq 0x130(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x110(%rsp) movl 0xdc(%rax), %ecx movl 0xd4(%rax), %edx subl $0x1, %edx imull %edx, %ecx addl $0x1, %ecx movl %ecx, 0x10c(%rsp) movl 0xe0(%rax), %ecx movl 0xd8(%rax), %edx subl $0x1, %edx imull %edx, %ecx addl $0x1, %ecx movl %ecx, 0x108(%rsp) movl 0x11c(%rsp), %ecx subl $0x1, %ecx imull 0xe4(%rax), %ecx addl 0x10c(%rsp), %ecx addl 0xfc(%rax), %ecx movl %ecx, 0x104(%rsp) movl 0x118(%rsp), %ecx subl $0x1, %ecx imull 0xe8(%rax), %ecx addl 0x108(%rsp), %ecx addl 0x100(%rax), %ecx movl %ecx, 0x100(%rsp) leaq 0xb8(%rsp), %rcx movq %rcx, 0x148(%rsp) movq 0x148(%rsp), %rcx movq %rcx, 0x98(%rsp) movq $0x0, (%rcx) movq $0x0, 0x8(%rcx) movq $0x0, 0x10(%rcx) movl $0x0, 0x18(%rcx) movq $0x0, 0x20(%rcx) movl $0x0, 0x28(%rcx) movl $0x0, 0x2c(%rcx) movl $0x0, 0x30(%rcx) movl $0x0, 0x34(%rcx) movl $0x0, 0x38(%rcx) movq $0x0, 0x40(%rcx) cmpl $0x0, 0xec(%rax) jg 0x94177e movq 0x90(%rsp), %rax cmpl $0x0, 0xf0(%rax) jg 0x94177e movq 0x90(%rsp), %rax cmpl $0x0, 0xf4(%rax) jg 0x94177e movq 0x90(%rsp), %rax cmpl $0x0, 0xf8(%rax) jg 0x94177e movq 0x90(%rsp), %rax cmpl $0x0, 0x104(%rax) jle 0x9418fe movq 0x90(%rsp), %rax cmpl $0x0, 0x108(%rax) jle 0x9418fe movq 0x90(%rsp), %rax movl 0x104(%rsp), %esi movl 0x100(%rsp), %edx movl 0xd0(%rax), %ecx movq 0x110(%rsp), %r8 movq 0x120(%rsp), %rax movq 0x10(%rax), %r9 leaq 0xb8(%rsp), %rdi callq 0x65550 jmp 0x9417bd jmp 0x941b6f movq %rax, %rcx movl %edx, %eax movq %rcx, 0xb0(%rsp) movl %eax, 0xac(%rsp) leaq 0xb8(%rsp), %rax movq %rax, 0x158(%rsp) movq 0x158(%rsp), %rax movq %rax, 0x1a8(%rsp) movq 0x1a8(%rsp), %rax movq %rax, 0x88(%rsp) cmpq $0x0, 0x8(%rax) je 0x94189e movq 0x88(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1a4(%rsp) # imm = 0xFFFFFFFF movl 0x1a4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1a0(%rsp) cmpl $0x1, 0x1a0(%rsp) jne 0x94189e movq 0x88(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x94186f movq 0x88(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x94186d jmp 0x94189c movq 0x88(%rsp), %rax movq (%rax), %rax movq %rax, 0x1c8(%rsp) cmpq $0x0, 0x1c8(%rsp) je 0x94189a movq 0x1c8(%rsp), %rdi callq 0x5e480 jmp 0x94189c jmp 0x94189e movq 0x88(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x9418f9 movq %rax, %rdi callq 0x5fc90 jmp 0x941e98 movq 0x128(%rsp), %rax leaq 0xb8(%rsp), %rcx movq %rcx, 0x170(%rsp) movq %rax, 0x168(%rsp) movq 0x170(%rsp), %rax movq %rax, 0x80(%rsp) cmpq 0x168(%rsp), %rax jne 0x94194d movq 0x80(%rsp), %rax movq %rax, 0x178(%rsp) jmp 0x941b2e movq 0x168(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x941985 movq 0x168(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x164(%rsp) movl 0x164(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x160(%rsp) movq 0x80(%rsp), %rax movq %rax, 0x198(%rsp) movq 0x198(%rsp), %rax movq %rax, 0x78(%rsp) cmpq $0x0, 0x8(%rax) je 0x941a2e movq 0x78(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x194(%rsp) # imm = 0xFFFFFFFF movl 0x194(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x190(%rsp) cmpl $0x1, 0x190(%rsp) jne 0x941a2e movq 0x78(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x941a02 movq 0x78(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x941a00 jmp 0x941a2c movq 0x78(%rsp), %rax movq (%rax), %rax movq %rax, 0x1d0(%rsp) cmpq $0x0, 0x1d0(%rsp) je 0x941a2a movq 0x1d0(%rsp), %rdi callq 0x5e480 jmp 0x941a2c jmp 0x941a2e movq 0x78(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x80(%rsp), %rax movq 0x168(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x168(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x168(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x168(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x168(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x168(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x168(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x168(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x168(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x168(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x168(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x178(%rsp) movq 0x90(%rsp), %rax movl 0x104(%rsp), %esi movl 0x100(%rsp), %edx movl 0xd0(%rax), %ecx movq 0x110(%rsp), %r8 movq 0x120(%rsp), %rax movq 0x8(%rax), %r9 leaq 0xb8(%rsp), %rdi callq 0x65550 jmp 0x941b6d jmp 0x941b6f leaq 0xb8(%rsp), %rax movq %rax, 0x188(%rsp) movq 0x188(%rsp), %rcx movq %rcx, 0x68(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x77(%rsp) je 0x941bc4 movq 0x68(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x77(%rsp) movb 0x77(%rsp), %al movb %al, 0x67(%rsp) movb 0x67(%rsp), %al testb $0x1, %al jne 0x941bd6 jmp 0x941bf1 movl $0xffffff9c, 0x144(%rsp) # imm = 0xFFFFFF9C movl $0x1, 0xa8(%rsp) jmp 0x941d72 movq 0x90(%rsp), %r14 movq 0x130(%rsp), %rdi movq %r14, %rdx addq $0x160, %rdx # imm = 0x160 movq %r14, %rcx addq $0x1a8, %rcx # imm = 0x1A8 movl 0xd4(%r14), %r8d movl 0xd8(%r14), %r9d movl 0xe4(%r14), %esi movl 0xe8(%r14), %r10d movl 0xdc(%r14), %r11d movl 0xe0(%r14), %ebx movl 0x114(%r14), %ebp addq $0x118, %r14 # imm = 0x118 movq 0x120(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0xb8(%rsp), %rsi callq 0x941eb0 movl %eax, 0x60(%rsp) jmp 0x941c83 movl 0x60(%rsp), %eax movl %eax, 0xa4(%rsp) cmpl $0x0, 0xa4(%rsp) je 0x941cb6 movl 0xa4(%rsp), %eax movl %eax, 0x144(%rsp) movl $0x1, 0xa8(%rsp) jmp 0x941d72 movq 0x90(%rsp), %rdi movq 0x128(%rsp), %rdx movq 0x120(%rsp), %rcx leaq 0xb8(%rsp), %rsi callq 0x943230 jmp 0x941cdd movq 0x128(%rsp), %rax movq %rax, 0x180(%rsp) movq 0x180(%rsp), %rcx movq %rcx, 0x50(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x5f(%rsp) je 0x941d32 movq 0x50(%rsp), %rax movq %rax, 0x1e0(%rsp) movq 0x1e0(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x5f(%rsp) movb 0x5f(%rsp), %al movb %al, 0x4f(%rsp) movb 0x4f(%rsp), %al testb $0x1, %al jne 0x941d44 jmp 0x941d5c movl $0xffffff9c, 0x144(%rsp) # imm = 0xFFFFFF9C movl $0x1, 0xa8(%rsp) jmp 0x941d72 movl $0x0, 0x144(%rsp) movl $0x1, 0xa8(%rsp) leaq 0xb8(%rsp), %rax movq %rax, 0x150(%rsp) movq 0x150(%rsp), %rax movq %rax, 0x1b8(%rsp) movq 0x1b8(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x941e2b movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1b4(%rsp) # imm = 0xFFFFFFFF movl 0x1b4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1b0(%rsp) cmpl $0x1, 0x1b0(%rsp) jne 0x941e2b movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x941dff movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x941dfd jmp 0x941e29 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x1c0(%rsp) cmpq $0x0, 0x1c0(%rsp) je 0x941e27 movq 0x1c0(%rsp), %rdi callq 0x5e480 jmp 0x941e29 jmp 0x941e2b movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x941e83 movq %rax, %rdi callq 0x5fc90 movl 0x144(%rsp), %eax addq $0x1e8, %rsp # imm = 0x1E8 popq %rbx popq %r14 popq %r15 popq %rbp retq movq 0xb0(%rsp), %rdi callq 0x5e3b0 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/deconvolution.cpp
ncnn::deconvolution(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Mat const&, int, int, int, int, int, int, int, ncnn::Mat const&, ncnn::Option const&)
static int deconvolution(const Mat& bottom_blob, Mat& top_blob, const Mat& weight_data, const Mat& bias_data, int kernel_w, int kernel_h, int stride_w, int stride_h, int dilation_w, int dilation_h, int activation_type, const Mat& activation_params, const Option& opt) { const int outw = top_blob.w; const int outch = top_blob.c; const int maxk = kernel_w * kernel_h; // kernel offsets std::vector<int> _space_ofs(maxk); int* space_ofs = &_space_ofs[0]; { int p1 = 0; int p2 = 0; int gap = outw * dilation_h - kernel_w * dilation_w; for (int i = 0; i < kernel_h; i++) { for (int j = 0; j < kernel_w; j++) { space_ofs[p1] = p2; p1++; p2 += dilation_w; } p2 += gap; } } #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < outch; p++) { Mat out = top_blob.channel(p); const float bias = bias_data.empty() ? 0.f : bias_data[p]; out.fill(bias); // shadowed variable for less openmp task args const int w = bottom_blob.w; const int h = bottom_blob.h; const int inch = bottom_blob.c; const int outw = top_blob.w; const int outh = top_blob.h; for (int i = 0; i < h; i++) { for (int j = 0; j < w; j++) { float* outptr = out.row(i * stride_h) + j * stride_w; const float* kptr = (const float*)weight_data + maxk * inch * p; for (int q = 0; q < inch; q++) { const float val = bottom_blob.channel(q).row(i)[j]; for (int k = 0; k < maxk; k++) { float w = kptr[k]; outptr[space_ofs[k]] += val * w; } kptr += maxk; } } } { float* outptr = out; int size = outw * outh; for (int i = 0; i < size; i++) { outptr[i] = activation_ss(outptr[i], activation_type, activation_params); } } } return 0; }
subq $0x498, %rsp # imm = 0x498 movq 0x4d0(%rsp), %rax movq 0x4c8(%rsp), %rax movl 0x4c0(%rsp), %eax movl 0x4b8(%rsp), %eax movl 0x4b0(%rsp), %eax movl 0x4a8(%rsp), %eax movl 0x4a0(%rsp), %eax movq %rdi, 0x210(%rsp) movq %rsi, 0x208(%rsp) movq %rdx, 0x200(%rsp) movq %rcx, 0x1f8(%rsp) movl %r8d, 0x1f4(%rsp) movl %r9d, 0x1f0(%rsp) movq 0x208(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1ec(%rsp) movq 0x208(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x1e8(%rsp) movl 0x1f4(%rsp), %eax movl 0x1f0(%rsp), %ecx imull %ecx, %eax movl %eax, 0x1e4(%rsp) movslq 0x1e4(%rsp), %rax movq %rax, 0xa0(%rsp) leaq 0x1c7(%rsp), %rdi movq %rdi, 0xa8(%rsp) callq 0x99670 movq 0xa0(%rsp), %rsi movq 0xa8(%rsp), %rdx leaq 0x1c8(%rsp), %rdi callq 0xa5960 jmp 0x941f9a leaq 0x1c7(%rsp), %rdi callq 0x99e50 leaq 0x1c8(%rsp), %rdi xorl %eax, %eax movl %eax, %esi callq 0x98a00 movq %rax, 0x1a8(%rsp) movl $0x0, 0x1a4(%rsp) movl $0x0, 0x1a0(%rsp) movl 0x1ec(%rsp), %eax imull 0x4b8(%rsp), %eax movl 0x1f4(%rsp), %ecx imull 0x4b0(%rsp), %ecx subl %ecx, %eax movl %eax, 0x19c(%rsp) movl $0x0, 0x198(%rsp) movl 0x198(%rsp), %eax cmpl 0x1f0(%rsp), %eax jge 0x9420db movl $0x0, 0x194(%rsp) movl 0x194(%rsp), %eax cmpl 0x1f4(%rsp), %eax jge 0x9420b0 movl 0x1a0(%rsp), %edx movq 0x1a8(%rsp), %rax movslq 0x1a4(%rsp), %rcx movl %edx, (%rax,%rcx,4) movl 0x1a4(%rsp), %eax addl $0x1, %eax movl %eax, 0x1a4(%rsp) movl 0x4b0(%rsp), %eax addl 0x1a0(%rsp), %eax movl %eax, 0x1a0(%rsp) movl 0x194(%rsp), %eax addl $0x1, %eax movl %eax, 0x194(%rsp) jmp 0x942027 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x1b8(%rsp) movl %eax, 0x1b4(%rsp) leaq 0x1c7(%rsp), %rdi callq 0x99e50 jmp 0x94321f movl 0x19c(%rsp), %eax addl 0x1a0(%rsp), %eax movl %eax, 0x1a0(%rsp) movl 0x198(%rsp), %eax addl $0x1, %eax movl %eax, 0x198(%rsp) jmp 0x942008 movl $0x0, 0x190(%rsp) movl 0x190(%rsp), %eax cmpl 0x1e8(%rsp), %eax jge 0x943208 movq 0x208(%rsp), %rcx movl 0x190(%rsp), %eax leaq 0x148(%rsp), %rdx movq %rdx, 0x2b8(%rsp) movq %rcx, 0x2b0(%rsp) movl %eax, 0x2ac(%rsp) movq 0x2b0(%rsp), %rax movq %rax, 0x90(%rsp) movb $0x0, 0x2ab(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x2ac(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x148(%rsp), %r10 movq %r10, 0x470(%rsp) movl %r9d, 0x46c(%rsp) movl %r8d, 0x468(%rsp) movl %edi, 0x464(%rsp) movq %rsi, 0x458(%rsp) movq %rdx, 0x450(%rsp) movl %ecx, 0x44c(%rsp) movq %rax, 0x440(%rsp) movq 0x470(%rsp), %rcx movq %rcx, 0x98(%rsp) movq 0x458(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x450(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x44c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x440(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x46c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x468(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x464(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x480(%rsp) movl $0x10, 0x47c(%rsp) movq 0x480(%rsp), %rax movslq 0x47c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x47c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x98(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x90(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x170(%rsp) cmpl $0x4, 0x28(%rax) jne 0x9422be movq 0x90(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x188(%rsp) movb $0x1, 0x2ab(%rsp) testb $0x1, 0x2ab(%rsp) jne 0x9423f7 leaq 0x148(%rsp), %rax movq %rax, 0x2c0(%rsp) movq 0x2c0(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rax movq %rax, 0x88(%rsp) cmpq $0x0, 0x8(%rax) je 0x94239c movq 0x88(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2cc(%rsp) # imm = 0xFFFFFFFF movl 0x2cc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2c8(%rsp) cmpl $0x1, 0x2c8(%rsp) jne 0x94239c movq 0x88(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x94236d movq 0x88(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x94236b jmp 0x94239a movq 0x88(%rsp), %rax movq (%rax), %rax movq %rax, 0x2d8(%rsp) cmpq $0x0, 0x2d8(%rsp) je 0x942398 movq 0x2d8(%rsp), %rdi callq 0x5e480 jmp 0x94239a jmp 0x94239c movq 0x88(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x9423f7 movq %rax, %rdi callq 0x5fc90 jmp 0x9423f9 movq 0x1f8(%rsp), %rax movq %rax, 0x238(%rsp) movq 0x238(%rsp), %rcx movq %rcx, 0x78(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x87(%rsp) je 0x942454 movq 0x78(%rsp), %rax movq %rax, 0x2a0(%rsp) movq 0x2a0(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x87(%rsp) movb 0x87(%rsp), %al movb %al, 0x77(%rsp) movb 0x77(%rsp), %al testb $0x1, %al jne 0x942469 jmp 0x942474 xorps %xmm0, %xmm0 movss %xmm0, 0x70(%rsp) jmp 0x9424c2 movq 0x1f8(%rsp), %rcx movslq 0x190(%rsp), %rax movq %rcx, 0x2e8(%rsp) movq %rax, 0x2e0(%rsp) movq 0x2e8(%rsp), %rax movq (%rax), %rax movq 0x2e0(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x68(%rsp) movq 0x68(%rsp), %rax movss (%rax), %xmm0 movss %xmm0, 0x70(%rsp) movss 0x70(%rsp), %xmm0 movss %xmm0, 0x144(%rsp) movss 0x144(%rsp), %xmm0 leaq 0x148(%rsp), %rax movq %rax, 0x308(%rsp) movss %xmm0, 0x304(%rsp) movq 0x308(%rsp), %rax movq %rax, 0x310(%rsp) movq 0x310(%rsp), %rdx movq 0x40(%rdx), %rcx movslq 0x38(%rdx), %rdx imulq %rdx, %rcx movl %ecx, 0x300(%rsp) movq (%rax), %rax movq %rax, 0x2f8(%rsp) movl $0x0, 0x2f4(%rsp) movl 0x2f4(%rsp), %eax cmpl 0x300(%rsp), %eax jge 0x94257b movss 0x304(%rsp), %xmm0 movq 0x2f8(%rsp), %rax movq %rax, %rcx addq $0x4, %rcx movq %rcx, 0x2f8(%rsp) movss %xmm0, (%rax) movl 0x2f4(%rsp), %eax addl $0x1, %eax movl %eax, 0x2f4(%rsp) jmp 0x942534 jmp 0x94257d movq 0x210(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x140(%rsp) movq 0x210(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x13c(%rsp) movq 0x210(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x138(%rsp) movq 0x208(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x134(%rsp) movq 0x208(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x130(%rsp) movl $0x0, 0x12c(%rsp) movl 0x12c(%rsp), %eax cmpl 0x13c(%rsp), %eax jge 0x942c3d movl $0x0, 0x128(%rsp) movl 0x128(%rsp), %eax cmpl 0x140(%rsp), %eax jge 0x942c25 movl 0x12c(%rsp), %eax imull 0x4a8(%rsp), %eax leaq 0x148(%rsp), %rcx movq %rcx, 0x320(%rsp) movl %eax, 0x31c(%rsp) movq 0x320(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x31c(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x60(%rsp) movq 0x60(%rsp), %rax movl 0x128(%rsp), %ecx imull 0x4a0(%rsp), %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x120(%rsp) movq 0x200(%rsp), %rax movq %rax, 0x328(%rsp) movq 0x328(%rsp), %rax movq (%rax), %rax movq %rax, 0x58(%rsp) movq 0x58(%rsp), %rax movl 0x1e4(%rsp), %ecx imull 0x138(%rsp), %ecx imull 0x190(%rsp), %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x118(%rsp) movl $0x0, 0x114(%rsp) movl 0x114(%rsp), %eax cmpl 0x138(%rsp), %eax jge 0x942c0d movq 0x210(%rsp), %rcx movl 0x114(%rsp), %eax leaq 0xc8(%rsp), %rdx movq %rdx, 0x340(%rsp) movq %rcx, 0x338(%rsp) movl %eax, 0x334(%rsp) movq 0x338(%rsp), %rax movq %rax, 0x48(%rsp) movb $0x0, 0x333(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x334(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xc8(%rsp), %r10 movq %r10, 0x438(%rsp) movl %r9d, 0x434(%rsp) movl %r8d, 0x430(%rsp) movl %edi, 0x42c(%rsp) movq %rsi, 0x420(%rsp) movq %rdx, 0x418(%rsp) movl %ecx, 0x414(%rsp) movq %rax, 0x408(%rsp) movq 0x438(%rsp), %rcx movq %rcx, 0x50(%rsp) movq 0x420(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x418(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x414(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x408(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x434(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x430(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x42c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x490(%rsp) movl $0x10, 0x48c(%rsp) movq 0x490(%rsp), %rax movslq 0x48c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x48c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x50(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x48(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xf0(%rsp) cmpl $0x4, 0x28(%rax) jne 0x9428ab movq 0x48(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x108(%rsp) movb $0x1, 0x333(%rsp) testb $0x1, 0x333(%rsp) jne 0x9429d2 leaq 0xc8(%rsp), %rax movq %rax, 0x348(%rsp) movq 0x348(%rsp), %rax movq %rax, 0x358(%rsp) movq 0x358(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x94297a movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x354(%rsp) # imm = 0xFFFFFFFF movl 0x354(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x350(%rsp) cmpl $0x1, 0x350(%rsp) jne 0x94297a movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x94294e movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x94294c jmp 0x942978 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x360(%rsp) cmpq $0x0, 0x360(%rsp) je 0x942976 movq 0x360(%rsp), %rdi callq 0x5e480 jmp 0x942978 jmp 0x94297a movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x9429d2 movq %rax, %rdi callq 0x5fc90 jmp 0x9429d4 movl 0x12c(%rsp), %eax leaq 0xc8(%rsp), %rcx movq %rcx, 0x370(%rsp) movl %eax, 0x36c(%rsp) movq 0x370(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x36c(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x38(%rsp) movq 0x38(%rsp), %rax movslq 0x128(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x2c(%rsp) leaq 0xc8(%rsp), %rax movq %rax, 0x218(%rsp) movq 0x218(%rsp), %rax movq %rax, 0x278(%rsp) movq 0x278(%rsp), %rax movq %rax, 0x30(%rsp) cmpq $0x0, 0x8(%rax) je 0x942aeb movq 0x30(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x274(%rsp) # imm = 0xFFFFFFFF movl 0x274(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x270(%rsp) cmpl $0x1, 0x270(%rsp) jne 0x942aeb movq 0x30(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x942abf movq 0x30(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x942abd jmp 0x942ae9 movq 0x30(%rsp), %rax movq (%rax), %rax movq %rax, 0x280(%rsp) cmpq $0x0, 0x280(%rsp) je 0x942ae7 movq 0x280(%rsp), %rdi callq 0x5e480 jmp 0x942ae9 jmp 0x942aeb movq 0x30(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x942b43 movq %rax, %rdi callq 0x5fc90 movss 0x2c(%rsp), %xmm0 movss %xmm0, 0x110(%rsp) movl $0x0, 0xc4(%rsp) movl 0xc4(%rsp), %eax cmpl 0x1e4(%rsp), %eax jge 0x942bd6 movq 0x118(%rsp), %rax movslq 0xc4(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0xc0(%rsp) movss 0x110(%rsp), %xmm0 mulss 0xc0(%rsp), %xmm0 movq 0x120(%rsp), %rax movq 0x1a8(%rsp), %rcx movslq 0xc4(%rsp), %rdx movslq (%rcx,%rdx,4), %rcx addss (%rax,%rcx,4), %xmm0 movss %xmm0, (%rax,%rcx,4) movl 0xc4(%rsp), %eax addl $0x1, %eax movl %eax, 0xc4(%rsp) jmp 0x942b5d movl 0x1e4(%rsp), %ecx movq 0x118(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x118(%rsp) movl 0x114(%rsp), %eax addl $0x1, %eax movl %eax, 0x114(%rsp) jmp 0x9426e2 jmp 0x942c0f movl 0x128(%rsp), %eax addl $0x1, %eax movl %eax, 0x128(%rsp) jmp 0x942601 jmp 0x942c27 movl 0x12c(%rsp), %eax addl $0x1, %eax movl %eax, 0x12c(%rsp) jmp 0x9425e2 leaq 0x148(%rsp), %rax movq %rax, 0x378(%rsp) movq 0x378(%rsp), %rax movq (%rax), %rax movq %rax, 0x20(%rsp) movq 0x20(%rsp), %rax movq %rax, 0xb8(%rsp) movl 0x134(%rsp), %eax imull 0x130(%rsp), %eax movl %eax, 0xb4(%rsp) movl $0x0, 0xb0(%rsp) movl 0xb0(%rsp), %eax cmpl 0xb4(%rsp), %eax jge 0x9430e9 movq 0xb8(%rsp), %rax movslq 0xb0(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movl 0x4c0(%rsp), %ecx movq 0x4c8(%rsp), %rax movss %xmm0, 0x3b4(%rsp) movl %ecx, 0x3b0(%rsp) movq %rax, 0x3a8(%rsp) movl 0x3b0(%rsp), %eax decl %eax movl %eax, %ecx movq %rcx, 0x18(%rsp) subl $0x5, %eax ja 0x9430a9 movq 0x18(%rsp), %rax leaq 0x14c49f4(%rip), %rcx # 0x1e076f4 movslq (%rcx,%rax,4), %rax addq %rcx, %rax jmpq *%rax movss 0x3b4(%rsp), %xmm0 xorps %xmm1, %xmm1 callq 0x137490 movss %xmm0, 0x3b4(%rsp) jmp 0x9430a9 movq 0x3a8(%rsp), %rax movq %rax, 0x400(%rsp) movq $0x0, 0x3f8(%rsp) movq 0x400(%rsp), %rax movq (%rax), %rax movq 0x3f8(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x3a4(%rsp) movss 0x3b4(%rsp), %xmm0 xorps %xmm1, %xmm1 ucomiss %xmm1, %xmm0 jbe 0x942d87 movss 0x3b4(%rsp), %xmm0 movss %xmm0, 0x14(%rsp) jmp 0x942d9f movss 0x3b4(%rsp), %xmm0 mulss 0x3a4(%rsp), %xmm0 movss %xmm0, 0x14(%rsp) movss 0x14(%rsp), %xmm0 movss %xmm0, 0x3b4(%rsp) jmp 0x9430a9 movq 0x3a8(%rsp), %rax movq %rax, 0x3f0(%rsp) movq $0x0, 0x3e8(%rsp) movq 0x3f0(%rsp), %rax movq (%rax), %rax movq 0x3e8(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x3a0(%rsp) movq 0x3a8(%rsp), %rax movq %rax, 0x3e0(%rsp) movq $0x1, 0x3d8(%rsp) movq 0x3e0(%rsp), %rax movq (%rax), %rax movq 0x3d8(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x39c(%rsp) movss 0x3b4(%rsp), %xmm1 movss 0x3a0(%rsp), %xmm0 ucomiss %xmm1, %xmm0 jbe 0x942e56 movss 0x3a0(%rsp), %xmm0 movss %xmm0, 0x3b4(%rsp) movss 0x3b4(%rsp), %xmm0 ucomiss 0x39c(%rsp), %xmm0 jbe 0x942e7b movss 0x39c(%rsp), %xmm0 movss %xmm0, 0x3b4(%rsp) jmp 0x9430a9 movss 0x14bffa8(%rip), %xmm0 # 0x1e02e30 movss %xmm0, 0x398(%rsp) leaq 0x3b4(%rsp), %rdi leaq 0x398(%rsp), %rsi callq 0x1374b0 movss (%rax), %xmm0 movss %xmm0, 0x3b4(%rsp) movss 0x14bff79(%rip), %xmm0 # 0x1e02e34 movss %xmm0, 0x394(%rsp) leaq 0x3b4(%rsp), %rdi leaq 0x394(%rsp), %rsi callq 0x1374f0 movss (%rax), %xmm0 movss %xmm0, 0x3b4(%rsp) movss 0x3b4(%rsp), %xmm0 movd %xmm0, %eax xorl $0x80000000, %eax # imm = 0x80000000 movd %eax, %xmm0 callq 0xf74e0 movss 0x14bd3c7(%rip), %xmm1 # 0x1e002d0 addss %xmm0, %xmm1 movss 0x14bd3bb(%rip), %xmm0 # 0x1e002d0 divss %xmm1, %xmm0 movss %xmm0, 0x3b4(%rsp) jmp 0x9430a9 movss 0x3b4(%rsp), %xmm0 movss %xmm0, 0x10(%rsp) movss 0x3b4(%rsp), %xmm0 callq 0xf74e0 movss 0x14bd384(%rip), %xmm1 # 0x1e002d0 addss %xmm1, %xmm0 callq 0xf74c0 callq 0x137530 movaps %xmm0, %xmm1 movss 0x10(%rsp), %xmm0 mulss %xmm1, %xmm0 movss %xmm0, 0x3b4(%rsp) jmp 0x9430a9 movq 0x3a8(%rsp), %rax movq %rax, 0x3d0(%rsp) movq $0x0, 0x3c8(%rsp) movq 0x3d0(%rsp), %rax movq (%rax), %rax movq 0x3c8(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x390(%rsp) movq 0x3a8(%rsp), %rax movq %rax, 0x3c0(%rsp) movq $0x1, 0x3b8(%rsp) movq 0x3c0(%rsp), %rax movq (%rax), %rax movq 0x3b8(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x38c(%rsp) movss 0x38c(%rsp), %xmm0 movd %xmm0, %eax xorl $0x80000000, %eax # imm = 0x80000000 movd %eax, %xmm0 divss 0x390(%rsp), %xmm0 movss %xmm0, 0x388(%rsp) movss 0x14bd2b1(%rip), %xmm0 # 0x1e002d0 divss 0x390(%rsp), %xmm0 addss 0x388(%rsp), %xmm0 movss %xmm0, 0x384(%rsp) movss 0x3b4(%rsp), %xmm1 movss 0x388(%rsp), %xmm0 ucomiss %xmm1, %xmm0 jbe 0x94305f xorps %xmm0, %xmm0 movss %xmm0, 0x3b4(%rsp) jmp 0x9430a7 movss 0x3b4(%rsp), %xmm0 ucomiss 0x384(%rsp), %xmm0 jbe 0x943074 jmp 0x9430a5 movss 0x3b4(%rsp), %xmm0 movss 0x3b4(%rsp), %xmm1 mulss 0x390(%rsp), %xmm1 addss 0x38c(%rsp), %xmm1 mulss %xmm1, %xmm0 movss %xmm0, 0x3b4(%rsp) jmp 0x9430a7 jmp 0x9430a9 movss 0x3b4(%rsp), %xmm0 movss %xmm0, 0xc(%rsp) movss 0xc(%rsp), %xmm0 movq 0xb8(%rsp), %rax movslq 0xb0(%rsp), %rcx movss %xmm0, (%rax,%rcx,4) movl 0xb0(%rsp), %eax addl $0x1, %eax movl %eax, 0xb0(%rsp) jmp 0x942c8b leaq 0x148(%rsp), %rax movq %rax, 0x228(%rsp) movq 0x228(%rsp), %rax movq %rax, 0x258(%rsp) movq 0x258(%rsp), %rax movq %rax, (%rsp) cmpq $0x0, 0x8(%rax) je 0x943199 movq (%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x254(%rsp) # imm = 0xFFFFFFFF movl 0x254(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x250(%rsp) cmpl $0x1, 0x250(%rsp) jne 0x943199 movq (%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x94316e movq (%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x94316c jmp 0x943197 movq (%rsp), %rax movq (%rax), %rax movq %rax, 0x290(%rsp) cmpq $0x0, 0x290(%rsp) je 0x943195 movq 0x290(%rsp), %rdi callq 0x5e480 jmp 0x943197 jmp 0x943199 movq (%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x9431f0 movq %rax, %rdi callq 0x5fc90 jmp 0x9431f2 movl 0x190(%rsp), %eax addl $0x1, %eax movl %eax, 0x190(%rsp) jmp 0x9420e6 leaq 0x1c8(%rsp), %rdi callq 0x998a0 xorl %eax, %eax addq $0x498, %rsp # imm = 0x498 retq movq 0x1b8(%rsp), %rdi callq 0x5e3b0 nopl (%rax)
/ysh329[P]ncnn/src/layer/deconvolution.cpp
ncnn::Deconvolution::cut_padding(ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&) const
void Deconvolution::cut_padding(const Mat& top_blob_bordered, Mat& top_blob, const Option& opt) const { if (pad_left > 0 || pad_right > 0 || pad_top > 0 || pad_bottom > 0) { copy_cut_border(top_blob_bordered, top_blob, pad_top, pad_bottom, pad_left, pad_right, opt); } else if (output_w > 0 && output_h > 0) { int wcut = top_blob_bordered.w - output_w; int hcut = top_blob_bordered.h - output_h; if (pad_left == -233 || pad_right == -233 || pad_top == -233 || pad_bottom == -233) { // onnx padding=SAME_UPPER copy_cut_border(top_blob_bordered, top_blob, hcut / 2, hcut - hcut / 2, wcut / 2, wcut - wcut / 2, opt); } else if (pad_left == -234 || pad_right == -234 || pad_top == -234 || pad_bottom == -234) { // onnx padding=SAME_LOWER copy_cut_border(top_blob_bordered, top_blob, hcut - hcut / 2, hcut / 2, wcut - wcut / 2, wcut / 2, opt); } } else { top_blob = top_blob_bordered; } }
subq $0x98, %rsp movq %rdi, 0x58(%rsp) movq %rsi, 0x50(%rsp) movq %rdx, 0x48(%rsp) movq %rcx, 0x40(%rsp) movq 0x58(%rsp), %rax movq %rax, 0x30(%rsp) cmpl $0x0, 0xec(%rax) jg 0x943288 movq 0x30(%rsp), %rax cmpl $0x0, 0xf0(%rax) jg 0x943288 movq 0x30(%rsp), %rax cmpl $0x0, 0xf4(%rax) jg 0x943288 movq 0x30(%rsp), %rax cmpl $0x0, 0xf8(%rax) jle 0x9432c4 movq 0x30(%rsp), %rax movq 0x50(%rsp), %rdi movq 0x48(%rsp), %rsi movl 0xf4(%rax), %edx movl 0xf8(%rax), %ecx movl 0xec(%rax), %r8d movl 0xf0(%rax), %r9d movq 0x40(%rsp), %rax movq %rax, (%rsp) callq 0x68d50 jmp 0x94364d movq 0x30(%rsp), %rax cmpl $0x0, 0x104(%rax) jle 0x943478 movq 0x30(%rsp), %rax cmpl $0x0, 0x108(%rax) jle 0x943478 movq 0x30(%rsp), %rax movq 0x50(%rsp), %rcx movl 0x2c(%rcx), %ecx subl 0x104(%rax), %ecx movl %ecx, 0x3c(%rsp) movq 0x50(%rsp), %rcx movl 0x30(%rcx), %ecx subl 0x108(%rax), %ecx movl %ecx, 0x38(%rsp) cmpl $0xffffff17, 0xec(%rax) # imm = 0xFFFFFF17 je 0x943350 movq 0x30(%rsp), %rax cmpl $0xffffff17, 0xf0(%rax) # imm = 0xFFFFFF17 je 0x943350 movq 0x30(%rsp), %rax cmpl $0xffffff17, 0xf4(%rax) # imm = 0xFFFFFF17 je 0x943350 movq 0x30(%rsp), %rax cmpl $0xffffff17, 0xf8(%rax) # imm = 0xFFFFFF17 jne 0x9433bc movq 0x50(%rsp), %rdi movq 0x48(%rsp), %rsi movl 0x38(%rsp), %eax movl $0x2, %ecx cltd idivl %ecx movl %eax, 0x2c(%rsp) movl 0x38(%rsp), %ecx movl 0x38(%rsp), %eax movl $0x2, %r8d cltd idivl %r8d subl %eax, %ecx movl 0x3c(%rsp), %eax movl $0x2, %r8d cltd idivl %r8d movl %eax, %r8d movl 0x3c(%rsp), %r9d movl 0x3c(%rsp), %eax movl $0x2, %r10d cltd idivl %r10d movl 0x2c(%rsp), %edx subl %eax, %r9d movq 0x40(%rsp), %rax movq %rax, (%rsp) callq 0x68d50 jmp 0x943473 movq 0x30(%rsp), %rax cmpl $0xffffff16, 0xec(%rax) # imm = 0xFFFFFF16 je 0x943400 movq 0x30(%rsp), %rax cmpl $0xffffff16, 0xf0(%rax) # imm = 0xFFFFFF16 je 0x943400 movq 0x30(%rsp), %rax cmpl $0xffffff16, 0xf4(%rax) # imm = 0xFFFFFF16 je 0x943400 movq 0x30(%rsp), %rax cmpl $0xffffff16, 0xf8(%rax) # imm = 0xFFFFFF16 jne 0x943471 movq 0x50(%rsp), %rdi movq 0x48(%rsp), %rsi movl 0x38(%rsp), %eax movl %eax, 0x24(%rsp) movl 0x38(%rsp), %eax movl $0x2, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x24(%rsp), %eax subl %ecx, %eax movl %eax, 0x28(%rsp) movl 0x38(%rsp), %eax movl $0x2, %ecx cltd idivl %ecx movl %eax, %ecx movl 0x3c(%rsp), %r8d movl 0x3c(%rsp), %eax movl $0x2, %r9d cltd idivl %r9d subl %eax, %r8d movl 0x3c(%rsp), %eax movl $0x2, %r9d cltd idivl %r9d movl 0x28(%rsp), %edx movl %eax, %r9d movq 0x40(%rsp), %rax movq %rax, (%rsp) callq 0x68d50 jmp 0x943473 jmp 0x94364b movq 0x50(%rsp), %rax movq 0x48(%rsp), %rcx movq %rcx, 0x70(%rsp) movq %rax, 0x68(%rsp) movq 0x70(%rsp), %rax movq %rax, 0x18(%rsp) cmpq 0x68(%rsp), %rax jne 0x9434ac movq 0x18(%rsp), %rax movq %rax, 0x78(%rsp) jmp 0x94364b movq 0x68(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x9434d5 movq 0x68(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x64(%rsp) movl 0x64(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x60(%rsp) movq 0x18(%rsp), %rax movq %rax, 0x88(%rsp) movq 0x88(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x943572 movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x84(%rsp) # imm = 0xFFFFFFFF movl 0x84(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x80(%rsp) cmpl $0x1, 0x80(%rsp) jne 0x943572 movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x943546 movq 0x10(%rsp), %rax movq 0x20(%rax), %rdi movq (%rax), %rsi movq (%rdi), %rax callq *0x18(%rax) jmp 0x943570 movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x90(%rsp) cmpq $0x0, 0x90(%rsp) je 0x94356e movq 0x90(%rsp), %rdi callq 0x5e480 jmp 0x943570 jmp 0x943572 movq 0x18(%rsp), %rax movq 0x10(%rsp), %rcx movq $0x0, (%rcx) movq $0x0, 0x10(%rcx) movl $0x0, 0x18(%rcx) movl $0x0, 0x28(%rcx) movl $0x0, 0x2c(%rcx) movl $0x0, 0x30(%rcx) movl $0x0, 0x34(%rcx) movl $0x0, 0x38(%rcx) movq $0x0, 0x40(%rcx) movq $0x0, 0x8(%rcx) movq 0x68(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x68(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x68(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x68(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x68(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x68(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x68(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x68(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x68(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x68(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x68(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x78(%rsp) jmp 0x94364d addq $0x98, %rsp retq nopw %cs:(%rax,%rax) nop
/ysh329[P]ncnn/src/layer/deconvolution.cpp
ncnn::deconvolution_pack4_sse(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Mat const&, int, int, int, int, int, int, int, ncnn::Mat const&, ncnn::Option const&)
static void deconvolution_pack4_sse(const Mat& bottom_blob, Mat& top_blob, const Mat& weight_data_packed, const Mat& bias_data, int kernel_w, int kernel_h, int dilation_w, int dilation_h, int stride_w, int stride_h, int activation_type, const Mat& activation_params, const Option& opt) { int outch = top_blob.c; const int kernel_extent_w = dilation_w * (kernel_w - 1) + 1; const int kernel_extent_h = dilation_h * (kernel_h - 1) + 1; const float* bias_data_ptr = bias_data; // num_output #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < outch; p++) { float* outptr = top_blob.channel(p); const int maxk = kernel_w * kernel_h; // shadowed variable for less openmp task args const int w = bottom_blob.w; const int h = bottom_blob.h; const int channels = bottom_blob.c; const int outw = top_blob.w; const int outh = top_blob.h; for (int i = 0; i < outh; i++) { for (int j = 0; j < outw; j++) { __m128 _sum = _mm_setzero_ps(); if (bias_data_ptr) { _sum = _mm_loadu_ps(bias_data_ptr + p * 4); } const float* kptr = weight_data_packed.channel(p); // channels for (int q = 0; q < channels; q++) { const Mat m = bottom_blob.channel(q); for (int y = 0; y < kernel_h; y++) { int sys = (i + y * dilation_h - (kernel_extent_h - 1)); if (sys < 0 || sys % stride_h != 0) continue; int sy = sys / stride_h; if (sy >= h) continue; for (int x = 0; x < kernel_w; x++) { int sxs = (j + x * dilation_w - (kernel_extent_w - 1)); if (sxs < 0 || sxs % stride_w != 0) continue; int sx = sxs / stride_w; if (sx >= w) continue; const float* sptr = m.row(sy) + sx * 4; int k = (y * kernel_w + x) * 16; __m128 _val0 = _mm_load1_ps(sptr); __m128 _val1 = _mm_load1_ps(sptr + 1); __m128 _val2 = _mm_load1_ps(sptr + 2); __m128 _val3 = _mm_load1_ps(sptr + 3); __m128 _w0 = _mm_load_ps(kptr + k); __m128 _w1 = _mm_load_ps(kptr + k + 4); __m128 _w2 = _mm_load_ps(kptr + k + 8); __m128 _w3 = _mm_load_ps(kptr + k + 12); _sum = _mm_comp_fmadd_ps(_val0, _w0, _sum); _sum = _mm_comp_fmadd_ps(_val1, _w1, _sum); _sum = _mm_comp_fmadd_ps(_val2, _w2, _sum); _sum = _mm_comp_fmadd_ps(_val3, _w3, _sum); } } kptr += maxk * 16; } _sum = activation_sse(_sum, activation_type, activation_params); _mm_storeu_ps(outptr, _sum); outptr += 4; } } } }
subq $0x2848, %rsp # imm = 0x2848 movq 0x2880(%rsp), %rax movq 0x2878(%rsp), %rax movl 0x2870(%rsp), %eax movl 0x2868(%rsp), %eax movl 0x2860(%rsp), %eax movl 0x2858(%rsp), %eax movl 0x2850(%rsp), %eax movq %rdi, 0x390(%rsp) movq %rsi, 0x388(%rsp) movq %rdx, 0x380(%rsp) movq %rcx, 0x378(%rsp) movl %r8d, 0x374(%rsp) movl %r9d, 0x370(%rsp) movq 0x388(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x36c(%rsp) movl 0x2850(%rsp), %eax movl 0x374(%rsp), %ecx subl $0x1, %ecx imull %ecx, %eax addl $0x1, %eax movl %eax, 0x368(%rsp) movl 0x2858(%rsp), %eax movl 0x370(%rsp), %ecx subl $0x1, %ecx imull %ecx, %eax addl $0x1, %eax movl %eax, 0x364(%rsp) movq 0x378(%rsp), %rax movq %rax, 0x4c8(%rsp) movq 0x4c8(%rsp), %rax movq (%rax), %rax movq %rax, 0x358(%rsp) movl $0x0, 0x354(%rsp) movl 0x354(%rsp), %eax cmpl 0x36c(%rsp), %eax jge 0x94b084 movq 0x388(%rsp), %rcx movl 0x354(%rsp), %eax leaq 0x300(%rsp), %rdx movq %rdx, 0x3b0(%rsp) movq %rcx, 0x3a8(%rsp) movl %eax, 0x3a4(%rsp) movq 0x3a8(%rsp), %rax movq %rax, 0x168(%rsp) movb $0x0, 0x3a3(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x3a4(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x300(%rsp), %r10 movq %r10, 0x580(%rsp) movl %r9d, 0x57c(%rsp) movl %r8d, 0x578(%rsp) movl %edi, 0x574(%rsp) movq %rsi, 0x568(%rsp) movq %rdx, 0x560(%rsp) movl %ecx, 0x55c(%rsp) movq %rax, 0x550(%rsp) movq 0x580(%rsp), %rcx movq %rcx, 0x160(%rsp) movq 0x568(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x560(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x55c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x550(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x57c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x578(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x574(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x590(%rsp) movl $0x10, 0x58c(%rsp) movq 0x590(%rsp), %rax movslq 0x58c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x58c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x168(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x328(%rsp) cmpl $0x4, 0x28(%rax) jne 0x9470a6 movq 0x168(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x340(%rsp) movb $0x1, 0x3a3(%rsp) testb $0x1, 0x3a3(%rsp) jne 0x9471e1 leaq 0x300(%rsp), %rax movq %rax, 0x3b8(%rsp) movq 0x3b8(%rsp), %rax movq %rax, 0x458(%rsp) movq 0x458(%rsp), %rax movq %rax, 0x158(%rsp) cmpq $0x0, 0x8(%rax) je 0x947184 movq 0x158(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x454(%rsp) # imm = 0xFFFFFFFF movl 0x454(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x450(%rsp) cmpl $0x1, 0x450(%rsp) jne 0x947184 movq 0x158(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x947155 movq 0x158(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x947153 jmp 0x947182 movq 0x158(%rsp), %rax movq (%rax), %rax movq %rax, 0x5c8(%rsp) cmpq $0x0, 0x5c8(%rsp) je 0x947180 movq 0x5c8(%rsp), %rdi callq 0x5e480 jmp 0x947182 jmp 0x947184 movq 0x158(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x9471df movq %rax, %rdi callq 0x5fc90 jmp 0x9471e1 leaq 0x300(%rsp), %rax movq %rax, 0x398(%rsp) movq 0x398(%rsp), %rax movq (%rax), %rax movq %rax, 0x150(%rsp) leaq 0x300(%rsp), %rax movq %rax, 0x3c0(%rsp) movq 0x3c0(%rsp), %rax movq %rax, 0x448(%rsp) movq 0x448(%rsp), %rax movq %rax, 0x148(%rsp) cmpq $0x0, 0x8(%rax) je 0x9472cc movq 0x148(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x444(%rsp) # imm = 0xFFFFFFFF movl 0x444(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x440(%rsp) cmpl $0x1, 0x440(%rsp) jne 0x9472cc movq 0x148(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x94729d movq 0x148(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x94729b jmp 0x9472ca movq 0x148(%rsp), %rax movq (%rax), %rax movq %rax, 0x5d0(%rsp) cmpq $0x0, 0x5d0(%rsp) je 0x9472c8 movq 0x5d0(%rsp), %rdi callq 0x5e480 jmp 0x9472ca jmp 0x9472cc movq 0x148(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x947327 movq %rax, %rdi callq 0x5fc90 movq 0x150(%rsp), %rax movq %rax, 0x348(%rsp) movl 0x374(%rsp), %eax imull 0x370(%rsp), %eax movl %eax, 0x2f0(%rsp) movq 0x390(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x2ec(%rsp) movq 0x390(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x2e8(%rsp) movq 0x390(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x2e4(%rsp) movq 0x388(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x2e0(%rsp) movq 0x388(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x2dc(%rsp) movl $0x0, 0x2d8(%rsp) movl 0x2d8(%rsp), %eax cmpl 0x2dc(%rsp), %eax jge 0x94b06c movl $0x0, 0x2d4(%rsp) movl 0x2d4(%rsp), %eax cmpl 0x2e0(%rsp), %eax jge 0x94b054 xorps %xmm0, %xmm0 movaps %xmm0, 0x600(%rsp) movaps 0x600(%rsp), %xmm0 movaps %xmm0, 0x2c0(%rsp) cmpq $0x0, 0x358(%rsp) je 0x947442 movq 0x358(%rsp), %rax movl 0x354(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x618(%rsp) movq 0x618(%rsp), %rax movups (%rax), %xmm0 movaps %xmm0, 0x2c0(%rsp) movq 0x380(%rsp), %rcx movl 0x354(%rsp), %eax leaq 0x270(%rsp), %rdx movq %rdx, 0x4a0(%rsp) movq %rcx, 0x498(%rsp) movl %eax, 0x494(%rsp) movq 0x498(%rsp), %rax movq %rax, 0x140(%rsp) movb $0x0, 0x493(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x494(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x270(%rsp), %r10 movq %r10, 0x510(%rsp) movl %r9d, 0x50c(%rsp) movl %r8d, 0x508(%rsp) movl %edi, 0x504(%rsp) movq %rsi, 0x4f8(%rsp) movq %rdx, 0x4f0(%rsp) movl %ecx, 0x4ec(%rsp) movq %rax, 0x4e0(%rsp) movq 0x510(%rsp), %rcx movq %rcx, 0x138(%rsp) movq 0x4f8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x4f0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x4ec(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x4e0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x50c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x508(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x504(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x5b0(%rsp) movl $0x10, 0x5ac(%rsp) movq 0x5b0(%rsp), %rax movslq 0x5ac(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x5ac(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x140(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x298(%rsp) cmpl $0x4, 0x28(%rax) jne 0x9475fe movq 0x140(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x2b0(%rsp) movb $0x1, 0x493(%rsp) testb $0x1, 0x493(%rsp) jne 0x947737 leaq 0x270(%rsp), %rax movq %rax, 0x4a8(%rsp) movq 0x4a8(%rsp), %rax movq %rax, 0x4b8(%rsp) movq 0x4b8(%rsp), %rax movq %rax, 0x130(%rsp) cmpq $0x0, 0x8(%rax) je 0x9476dc movq 0x130(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4b4(%rsp) # imm = 0xFFFFFFFF movl 0x4b4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4b0(%rsp) cmpl $0x1, 0x4b0(%rsp) jne 0x9476dc movq 0x130(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x9476ad movq 0x130(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x9476ab jmp 0x9476da movq 0x130(%rsp), %rax movq (%rax), %rax movq %rax, 0x5b8(%rsp) cmpq $0x0, 0x5b8(%rsp) je 0x9476d8 movq 0x5b8(%rsp), %rdi callq 0x5e480 jmp 0x9476da jmp 0x9476dc movq 0x130(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x947737 movq %rax, %rdi callq 0x5fc90 leaq 0x270(%rsp), %rax movq %rax, 0x4c0(%rsp) movq 0x4c0(%rsp), %rax movq (%rax), %rax movq %rax, 0x128(%rsp) leaq 0x270(%rsp), %rax movq %rax, 0x3d0(%rsp) movq 0x3d0(%rsp), %rax movq %rax, 0x428(%rsp) movq 0x428(%rsp), %rax movq %rax, 0x120(%rsp) cmpq $0x0, 0x8(%rax) je 0x947822 movq 0x120(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x424(%rsp) # imm = 0xFFFFFFFF movl 0x424(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x420(%rsp) cmpl $0x1, 0x420(%rsp) jne 0x947822 movq 0x120(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x9477f3 movq 0x120(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x9477f1 jmp 0x947820 movq 0x120(%rsp), %rax movq (%rax), %rax movq %rax, 0x5e0(%rsp) cmpq $0x0, 0x5e0(%rsp) je 0x94781e movq 0x5e0(%rsp), %rdi callq 0x5e480 jmp 0x947820 jmp 0x947822 movq 0x120(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x94787d movq %rax, %rdi callq 0x5fc90 movq 0x128(%rsp), %rax movq %rax, 0x2b8(%rsp) movl $0x0, 0x26c(%rsp) movl 0x26c(%rsp), %eax cmpl 0x2e4(%rsp), %eax jge 0x9483c6 movq 0x390(%rsp), %rcx movl 0x26c(%rsp), %eax leaq 0x220(%rsp), %rdx movq %rdx, 0x470(%rsp) movq %rcx, 0x468(%rsp) movl %eax, 0x464(%rsp) movq 0x468(%rsp), %rax movq %rax, 0x118(%rsp) movb $0x0, 0x463(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x464(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x220(%rsp), %r10 movq %r10, 0x548(%rsp) movl %r9d, 0x544(%rsp) movl %r8d, 0x540(%rsp) movl %edi, 0x53c(%rsp) movq %rsi, 0x530(%rsp) movq %rdx, 0x528(%rsp) movl %ecx, 0x524(%rsp) movq %rax, 0x518(%rsp) movq 0x548(%rsp), %rcx movq %rcx, 0x110(%rsp) movq 0x530(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x528(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x524(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x518(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x544(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x540(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x53c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x5a0(%rsp) movl $0x10, 0x59c(%rsp) movq 0x5a0(%rsp), %rax movslq 0x59c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x59c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x118(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x248(%rsp) cmpl $0x4, 0x28(%rax) jne 0x947a68 movq 0x118(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x260(%rsp) movb $0x1, 0x463(%rsp) testb $0x1, 0x463(%rsp) jne 0x947ba1 leaq 0x220(%rsp), %rax movq %rax, 0x478(%rsp) movq 0x478(%rsp), %rax movq %rax, 0x488(%rsp) movq 0x488(%rsp), %rax movq %rax, 0x108(%rsp) cmpq $0x0, 0x8(%rax) je 0x947b46 movq 0x108(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x484(%rsp) # imm = 0xFFFFFFFF movl 0x484(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x480(%rsp) cmpl $0x1, 0x480(%rsp) jne 0x947b46 movq 0x108(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x947b17 movq 0x108(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x947b15 jmp 0x947b44 movq 0x108(%rsp), %rax movq (%rax), %rax movq %rax, 0x5c0(%rsp) cmpq $0x0, 0x5c0(%rsp) je 0x947b42 movq 0x5c0(%rsp), %rdi callq 0x5e480 jmp 0x947b44 jmp 0x947b46 movq 0x108(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x947ba1 movq %rax, %rdi callq 0x5fc90 movl $0x0, 0x21c(%rsp) movl 0x21c(%rsp), %eax cmpl 0x370(%rsp), %eax jge 0x948279 movl 0x2d8(%rsp), %eax movl 0x21c(%rsp), %ecx imull 0x2858(%rsp), %ecx addl %ecx, %eax movl 0x364(%rsp), %ecx subl $0x1, %ecx subl %ecx, %eax movl %eax, 0x218(%rsp) cmpl $0x0, 0x218(%rsp) jl 0x947c09 movl 0x218(%rsp), %eax cltd idivl 0x2868(%rsp) cmpl $0x0, %edx je 0x947c0e jmp 0x948263 movl 0x218(%rsp), %eax cltd idivl 0x2868(%rsp) movl %eax, 0x214(%rsp) movl 0x214(%rsp), %eax cmpl 0x2e8(%rsp), %eax jl 0x947c39 jmp 0x948263 movl $0x0, 0x210(%rsp) movl 0x210(%rsp), %eax cmpl 0x374(%rsp), %eax jge 0x948261 movl 0x2d4(%rsp), %eax movl 0x210(%rsp), %ecx imull 0x2850(%rsp), %ecx addl %ecx, %eax movl 0x368(%rsp), %ecx subl $0x1, %ecx subl %ecx, %eax movl %eax, 0x20c(%rsp) cmpl $0x0, 0x20c(%rsp) jl 0x947ca1 movl 0x20c(%rsp), %eax cltd idivl 0x2860(%rsp) cmpl $0x0, %edx je 0x947ca6 jmp 0x94824b movl 0x20c(%rsp), %eax cltd idivl 0x2860(%rsp) movl %eax, 0x208(%rsp) movl 0x208(%rsp), %eax cmpl 0x2ec(%rsp), %eax jl 0x947cd1 jmp 0x94824b movl 0x214(%rsp), %eax leaq 0x220(%rsp), %rcx movq %rcx, 0x4d8(%rsp) movl %eax, 0x4d4(%rsp) movq 0x4d8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x4d4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x100(%rsp) movq 0x100(%rsp), %rax movl 0x208(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx leaq (%rax,%rcx,4), %rax movq %rax, 0x200(%rsp) movl 0x21c(%rsp), %eax movl 0x374(%rsp), %ecx imull %ecx, %eax movl 0x210(%rsp), %ecx addl %ecx, %eax shll $0x4, %eax movl %eax, 0x1fc(%rsp) movq 0x200(%rsp), %rax movq %rax, 0x698(%rsp) movq 0x698(%rsp), %rax movss (%rax), %xmm0 movss %xmm0, 0x690(%rsp) movaps 0x690(%rsp), %xmm0 shufps $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0] movaps %xmm0, 0x680(%rsp) movaps 0x680(%rsp), %xmm0 movaps %xmm0, 0xf0(%rsp) movaps 0xf0(%rsp), %xmm0 movaps %xmm0, 0x1e0(%rsp) movq 0x200(%rsp), %rax addq $0x4, %rax movq %rax, 0x678(%rsp) movq 0x678(%rsp), %rax movss (%rax), %xmm0 movss %xmm0, 0x670(%rsp) movaps 0x670(%rsp), %xmm0 shufps $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0] movaps %xmm0, 0x660(%rsp) movaps 0x660(%rsp), %xmm0 movaps %xmm0, 0xe0(%rsp) movaps 0xe0(%rsp), %xmm0 movaps %xmm0, 0x1d0(%rsp) movq 0x200(%rsp), %rax addq $0x8, %rax movq %rax, 0x658(%rsp) movq 0x658(%rsp), %rax movss (%rax), %xmm0 movss %xmm0, 0x650(%rsp) movaps 0x650(%rsp), %xmm0 shufps $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0] movaps %xmm0, 0x640(%rsp) movaps 0x640(%rsp), %xmm0 movaps %xmm0, 0xd0(%rsp) movaps 0xd0(%rsp), %xmm0 movaps %xmm0, 0x1c0(%rsp) movq 0x200(%rsp), %rax addq $0xc, %rax movq %rax, 0x638(%rsp) movq 0x638(%rsp), %rax movss (%rax), %xmm0 movss %xmm0, 0x630(%rsp) movaps 0x630(%rsp), %xmm0 shufps $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0] movaps %xmm0, 0x620(%rsp) movaps 0x620(%rsp), %xmm0 movaps %xmm0, 0xc0(%rsp) movaps 0xc0(%rsp), %xmm0 movaps %xmm0, 0x1b0(%rsp) movq 0x2b8(%rsp), %rax movslq 0x1fc(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x6b8(%rsp) movq 0x6b8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xb0(%rsp) movaps 0xb0(%rsp), %xmm0 movaps %xmm0, 0x1a0(%rsp) movq 0x2b8(%rsp), %rax movslq 0x1fc(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax addq $0x10, %rax movq %rax, 0x6b0(%rsp) movq 0x6b0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xa0(%rsp) movaps 0xa0(%rsp), %xmm0 movaps %xmm0, 0x190(%rsp) movq 0x2b8(%rsp), %rax movslq 0x1fc(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax addq $0x20, %rax movq %rax, 0x6a8(%rsp) movq 0x6a8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x90(%rsp) movaps 0x90(%rsp), %xmm0 movaps %xmm0, 0x180(%rsp) movq 0x2b8(%rsp), %rax movslq 0x1fc(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax addq $0x30, %rax movq %rax, 0x6a0(%rsp) movq 0x6a0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x80(%rsp) movaps 0x80(%rsp), %xmm0 movaps %xmm0, 0x170(%rsp) movaps 0x1e0(%rsp), %xmm2 movaps 0x1a0(%rsp), %xmm1 movaps 0x2c0(%rsp), %xmm0 movaps %xmm2, 0x770(%rsp) movaps %xmm1, 0x760(%rsp) movaps %xmm0, 0x750(%rsp) movaps 0x770(%rsp), %xmm1 movaps 0x760(%rsp), %xmm0 movaps %xmm1, 0x900(%rsp) movaps %xmm0, 0x8f0(%rsp) movaps 0x900(%rsp), %xmm1 mulps 0x8f0(%rsp), %xmm1 movaps 0x750(%rsp), %xmm0 movaps %xmm1, 0x880(%rsp) movaps %xmm0, 0x870(%rsp) movaps 0x880(%rsp), %xmm0 addps 0x870(%rsp), %xmm0 movaps %xmm0, 0x70(%rsp) movaps 0x70(%rsp), %xmm0 movaps %xmm0, 0x2c0(%rsp) movaps 0x1d0(%rsp), %xmm2 movaps 0x190(%rsp), %xmm1 movaps 0x2c0(%rsp), %xmm0 movaps %xmm2, 0x740(%rsp) movaps %xmm1, 0x730(%rsp) movaps %xmm0, 0x720(%rsp) movaps 0x740(%rsp), %xmm1 movaps 0x730(%rsp), %xmm0 movaps %xmm1, 0x920(%rsp) movaps %xmm0, 0x910(%rsp) movaps 0x920(%rsp), %xmm1 mulps 0x910(%rsp), %xmm1 movaps 0x720(%rsp), %xmm0 movaps %xmm1, 0x8a0(%rsp) movaps %xmm0, 0x890(%rsp) movaps 0x8a0(%rsp), %xmm0 addps 0x890(%rsp), %xmm0 movaps %xmm0, 0x60(%rsp) movaps 0x60(%rsp), %xmm0 movaps %xmm0, 0x2c0(%rsp) movaps 0x1c0(%rsp), %xmm2 movaps 0x180(%rsp), %xmm1 movaps 0x2c0(%rsp), %xmm0 movaps %xmm2, 0x710(%rsp) movaps %xmm1, 0x700(%rsp) movaps %xmm0, 0x6f0(%rsp) movaps 0x710(%rsp), %xmm1 movaps 0x700(%rsp), %xmm0 movaps %xmm1, 0x940(%rsp) movaps %xmm0, 0x930(%rsp) movaps 0x940(%rsp), %xmm1 mulps 0x930(%rsp), %xmm1 movaps 0x6f0(%rsp), %xmm0 movaps %xmm1, 0x8c0(%rsp) movaps %xmm0, 0x8b0(%rsp) movaps 0x8c0(%rsp), %xmm0 addps 0x8b0(%rsp), %xmm0 movaps %xmm0, 0x50(%rsp) movaps 0x50(%rsp), %xmm0 movaps %xmm0, 0x2c0(%rsp) movaps 0x1b0(%rsp), %xmm2 movaps 0x170(%rsp), %xmm1 movaps 0x2c0(%rsp), %xmm0 movaps %xmm2, 0x6e0(%rsp) movaps %xmm1, 0x6d0(%rsp) movaps %xmm0, 0x6c0(%rsp) movaps 0x6e0(%rsp), %xmm1 movaps 0x6d0(%rsp), %xmm0 movaps %xmm1, 0x960(%rsp) movaps %xmm0, 0x950(%rsp) movaps 0x960(%rsp), %xmm1 mulps 0x950(%rsp), %xmm1 movaps 0x6c0(%rsp), %xmm0 movaps %xmm1, 0x8e0(%rsp) movaps %xmm0, 0x8d0(%rsp) movaps 0x8e0(%rsp), %xmm0 addps 0x8d0(%rsp), %xmm0 movaps %xmm0, 0x40(%rsp) movaps 0x40(%rsp), %xmm0 movaps %xmm0, 0x2c0(%rsp) movl 0x210(%rsp), %eax addl $0x1, %eax movl %eax, 0x210(%rsp) jmp 0x947c44 jmp 0x948263 movl 0x21c(%rsp), %eax addl $0x1, %eax movl %eax, 0x21c(%rsp) jmp 0x947bac movl 0x2f0(%rsp), %ecx shll $0x4, %ecx movq 0x2b8(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x2b8(%rsp) leaq 0x220(%rsp), %rax movq %rax, 0x3e0(%rsp) movq 0x3e0(%rsp), %rax movq %rax, 0x408(%rsp) movq 0x408(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x948356 movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x404(%rsp) # imm = 0xFFFFFFFF movl 0x404(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x400(%rsp) cmpl $0x1, 0x400(%rsp) jne 0x948356 movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x94832a movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x948328 jmp 0x948354 movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x5f0(%rsp) cmpq $0x0, 0x5f0(%rsp) je 0x948352 movq 0x5f0(%rsp), %rdi callq 0x5e480 jmp 0x948354 jmp 0x948356 movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x9483ae movq %rax, %rdi callq 0x5fc90 jmp 0x9483b0 movl 0x26c(%rsp), %eax addl $0x1, %eax movl %eax, 0x26c(%rsp) jmp 0x947898 movaps 0x2c0(%rsp), %xmm0 movl 0x2870(%rsp), %ecx movq 0x2878(%rsp), %rax movaps %xmm0, 0x7d0(%rsp) movl %ecx, 0x7cc(%rsp) movq %rax, 0x7c0(%rsp) movl 0x7cc(%rsp), %eax decl %eax movl %eax, %ecx movq %rcx, 0x30(%rsp) subl $0x5, %eax ja 0x94afd7 movq 0x30(%rsp), %rax leaq 0x14bf32f(%rip), %rcx # 0x1e07748 movslq (%rcx,%rax,4), %rax addq %rcx, %rax jmpq *%rax movaps 0x7d0(%rsp), %xmm1 xorps %xmm0, %xmm0 movaps %xmm0, 0x840(%rsp) movaps 0x840(%rsp), %xmm0 movaps %xmm1, 0x980(%rsp) movaps %xmm0, 0x970(%rsp) movaps 0x980(%rsp), %xmm0 movaps 0x970(%rsp), %xmm1 maxps %xmm1, %xmm0 movaps %xmm0, 0x7e0(%rsp) jmp 0x94afe7 movaps 0x7d0(%rsp), %xmm1 movq 0x7c0(%rsp), %rax movq %rax, 0x838(%rsp) movq $0x0, 0x830(%rsp) movq 0x838(%rsp), %rax movq (%rax), %rax movq 0x830(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movaps %xmm1, 0x9e0(%rsp) movss %xmm0, 0x9dc(%rsp) xorps %xmm0, %xmm0 movaps %xmm0, 0xa00(%rsp) movaps 0xa00(%rsp), %xmm2 movaps 0x9e0(%rsp), %xmm1 movaps %xmm2, 0xa60(%rsp) movaps %xmm1, 0xa50(%rsp) movaps 0xa60(%rsp), %xmm1 movaps 0xa50(%rsp), %xmm2 maxps %xmm2, %xmm1 movaps %xmm1, 0x9c0(%rsp) movaps %xmm0, 0x9f0(%rsp) movaps 0x9f0(%rsp), %xmm1 movaps 0x9e0(%rsp), %xmm0 movaps %xmm1, 0xb20(%rsp) movaps %xmm0, 0xb10(%rsp) movaps 0xb20(%rsp), %xmm0 movaps 0xb10(%rsp), %xmm1 minps %xmm1, %xmm0 movaps %xmm0, 0x9b0(%rsp) movaps 0x9c0(%rsp), %xmm1 movss 0x9dc(%rsp), %xmm0 movss %xmm0, 0xa80(%rsp) movaps 0xa80(%rsp), %xmm0 shufps $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0] movaps %xmm0, 0xa70(%rsp) movaps 0xa70(%rsp), %xmm2 movaps 0x9b0(%rsp), %xmm0 movaps %xmm2, 0xa40(%rsp) movaps %xmm0, 0xa30(%rsp) movaps 0xa40(%rsp), %xmm0 mulps 0xa30(%rsp), %xmm0 movaps %xmm1, 0xa20(%rsp) movaps %xmm0, 0xa10(%rsp) movaps 0xa20(%rsp), %xmm0 addps 0xa10(%rsp), %xmm0 movaps %xmm0, 0x7e0(%rsp) jmp 0x94afe7 movq 0x7c0(%rsp), %rax movq %rax, 0x828(%rsp) movq $0x0, 0x820(%rsp) movq 0x828(%rsp), %rax movq (%rax), %rax movq 0x820(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0xacc(%rsp) movss 0xacc(%rsp), %xmm0 shufps $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0] movaps %xmm0, 0xab0(%rsp) movaps 0xab0(%rsp), %xmm0 movaps %xmm0, 0x7b0(%rsp) movq 0x7c0(%rsp), %rax movq %rax, 0x818(%rsp) movq $0x1, 0x810(%rsp) movq 0x818(%rsp), %rax movq (%rax), %rax movq 0x810(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0xaa0(%rsp) movaps 0xaa0(%rsp), %xmm0 shufps $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0] movaps %xmm0, 0xa90(%rsp) movaps 0xa90(%rsp), %xmm0 movaps %xmm0, 0x7a0(%rsp) movaps 0x7d0(%rsp), %xmm1 movaps 0x7b0(%rsp), %xmm0 movaps %xmm1, 0x9a0(%rsp) movaps %xmm0, 0x990(%rsp) movaps 0x9a0(%rsp), %xmm1 movaps 0x990(%rsp), %xmm0 maxps %xmm0, %xmm1 movaps 0x7a0(%rsp), %xmm0 movaps %xmm1, 0xb40(%rsp) movaps %xmm0, 0xb30(%rsp) movaps 0xb40(%rsp), %xmm0 movaps 0xb30(%rsp), %xmm1 minps %xmm1, %xmm0 movaps %xmm0, 0x7e0(%rsp) jmp 0x94afe7 movaps 0x7d0(%rsp), %xmm0 movaps %xmm0, 0xb60(%rsp) movl $0x3f800000, 0xbbc(%rsp) # imm = 0x3F800000 movss 0xbbc(%rsp), %xmm0 shufps $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0] movaps %xmm0, 0xba0(%rsp) movaps 0xba0(%rsp), %xmm0 movaps %xmm0, 0xb50(%rsp) movaps 0xb50(%rsp), %xmm2 movaps %xmm2, %xmm1 xorps %xmm0, %xmm0 movaps %xmm0, 0xb70(%rsp) movaps 0xb70(%rsp), %xmm4 movaps 0xb60(%rsp), %xmm3 movaps %xmm4, 0x15d0(%rsp) movaps %xmm3, 0x15c0(%rsp) movaps 0x15d0(%rsp), %xmm3 movaps 0x15c0(%rsp), %xmm4 subps %xmm4, %xmm3 movaps %xmm3, 0x11e0(%rsp) movaps %xmm0, 0x11f0(%rsp) movaps 0x11f0(%rsp), %xmm0 movaps %xmm0, 0x11d0(%rsp) movaps 0x14ba304(%rip), %xmm0 # 0x1e02ab0 movaps %xmm0, 0x11a0(%rsp) movaps 0x11e0(%rsp), %xmm0 movaps %xmm0, 0x1570(%rsp) movaps 0x14ba2f5(%rip), %xmm0 # 0x1e02ac0 movaps %xmm0, 0x1560(%rsp) movaps 0x1570(%rsp), %xmm0 movaps 0x1560(%rsp), %xmm3 minps %xmm3, %xmm0 movaps %xmm0, 0x11e0(%rsp) movaps 0x11e0(%rsp), %xmm0 movaps %xmm0, 0x1550(%rsp) movaps 0x14ba2cb(%rip), %xmm0 # 0x1e02ad0 movaps %xmm0, 0x1540(%rsp) movaps 0x1550(%rsp), %xmm0 movaps 0x1540(%rsp), %xmm3 maxps %xmm3, %xmm0 movaps %xmm0, 0x11e0(%rsp) movaps 0x11e0(%rsp), %xmm0 movaps %xmm0, 0x1530(%rsp) movaps 0x14ba2a1(%rip), %xmm0 # 0x1e02ae0 movaps %xmm0, 0x1520(%rsp) movaps 0x1530(%rsp), %xmm0 movaps 0x1520(%rsp), %xmm3 mulps %xmm3, %xmm0 movaps %xmm0, 0x11c0(%rsp) movaps 0x11c0(%rsp), %xmm0 movaps %xmm0, 0x1410(%rsp) movaps 0x14ba277(%rip), %xmm0 # 0x1e02af0 movaps %xmm0, 0x1400(%rsp) movaps 0x1410(%rsp), %xmm3 movaps 0x1400(%rsp), %xmm4 addps %xmm4, %xmm3 movaps %xmm3, 0x11c0(%rsp) movaps 0x11c0(%rsp), %xmm3 movaps %xmm3, 0x15f0(%rsp) cvttps2dq 0x15f0(%rsp), %xmm3 movaps %xmm3, 0x11b0(%rsp) movaps 0x11b0(%rsp), %xmm3 movaps %xmm3, 0x1620(%rsp) cvtdq2ps 0x1620(%rsp), %xmm3 movaps %xmm3, 0x11d0(%rsp) movaps 0x11d0(%rsp), %xmm4 movaps 0x11c0(%rsp), %xmm3 movaps %xmm4, 0x1650(%rsp) movaps %xmm3, 0x1640(%rsp) movaps 0x1640(%rsp), %xmm3 movaps 0x1650(%rsp), %xmm4 cmpltps %xmm4, %xmm3 movaps %xmm3, 0x1190(%rsp) movaps 0x1190(%rsp), %xmm4 movaps 0x11a0(%rsp), %xmm3 movaps %xmm4, 0x1690(%rsp) movaps %xmm3, 0x1680(%rsp) movaps 0x1690(%rsp), %xmm3 movaps 0x1680(%rsp), %xmm4 pand %xmm4, %xmm3 movaps %xmm3, 0x1190(%rsp) movaps 0x11d0(%rsp), %xmm4 movaps 0x1190(%rsp), %xmm3 movaps %xmm4, 0x1590(%rsp) movaps %xmm3, 0x1580(%rsp) movaps 0x1590(%rsp), %xmm3 movaps 0x1580(%rsp), %xmm4 subps %xmm4, %xmm3 movaps %xmm3, 0x11c0(%rsp) movaps 0x11c0(%rsp), %xmm4 movaps 0x11e0(%rsp), %xmm3 movaps %xmm4, 0x1750(%rsp) movaps 0x14ba151(%rip), %xmm4 # 0x1e02b00 movaps %xmm4, 0x1740(%rsp) movaps %xmm3, 0x1730(%rsp) movaps 0x1730(%rsp), %xmm4 movaps 0x1750(%rsp), %xmm5 movaps 0x1740(%rsp), %xmm3 movaps %xmm5, 0x1770(%rsp) movaps %xmm3, 0x1760(%rsp) movaps 0x1770(%rsp), %xmm3 movaps 0x1760(%rsp), %xmm5 mulps %xmm5, %xmm3 movaps %xmm4, 0x1790(%rsp) movaps %xmm3, 0x1780(%rsp) movaps 0x1790(%rsp), %xmm3 movaps 0x1780(%rsp), %xmm4 subps %xmm4, %xmm3 movaps %xmm3, 0x11e0(%rsp) movaps 0x11c0(%rsp), %xmm4 movaps 0x11e0(%rsp), %xmm3 movaps %xmm4, 0x16e0(%rsp) movaps 0x14ba0cc(%rip), %xmm4 # 0x1e02b10 movaps %xmm4, 0x16d0(%rsp) movaps %xmm3, 0x16c0(%rsp) movaps 0x16c0(%rsp), %xmm4 movaps 0x16e0(%rsp), %xmm5 movaps 0x16d0(%rsp), %xmm3 movaps %xmm5, 0x1700(%rsp) movaps %xmm3, 0x16f0(%rsp) movaps 0x1700(%rsp), %xmm3 movaps 0x16f0(%rsp), %xmm5 mulps %xmm5, %xmm3 movaps %xmm4, 0x1720(%rsp) movaps %xmm3, 0x1710(%rsp) movaps 0x1720(%rsp), %xmm3 movaps 0x1710(%rsp), %xmm4 subps %xmm4, %xmm3 movaps %xmm3, 0x11e0(%rsp) movaps 0x11e0(%rsp), %xmm3 movaps %xmm3, 0x1510(%rsp) movaps %xmm3, 0x1500(%rsp) movaps 0x1510(%rsp), %xmm3 movaps 0x1500(%rsp), %xmm4 mulps %xmm4, %xmm3 movaps %xmm3, 0x11d0(%rsp) movaps 0x14ba02c(%rip), %xmm3 # 0x1e02b20 movaps %xmm3, 0x1180(%rsp) movaps 0x1180(%rsp), %xmm4 movaps 0x11e0(%rsp), %xmm3 movaps %xmm4, 0x1310(%rsp) movaps %xmm3, 0x1300(%rsp) movaps 0x14ba00d(%rip), %xmm3 # 0x1e02b30 movaps %xmm3, 0x12f0(%rsp) movaps 0x1310(%rsp), %xmm4 movaps 0x1300(%rsp), %xmm3 movaps %xmm4, 0x1430(%rsp) movaps %xmm3, 0x1420(%rsp) movaps 0x1430(%rsp), %xmm4 movaps 0x1420(%rsp), %xmm3 mulps %xmm3, %xmm4 movaps 0x12f0(%rsp), %xmm3 movaps %xmm4, 0x1330(%rsp) movaps %xmm3, 0x1320(%rsp) movaps 0x1330(%rsp), %xmm3 movaps 0x1320(%rsp), %xmm4 addps %xmm4, %xmm3 movaps %xmm3, 0x1180(%rsp) movaps 0x1180(%rsp), %xmm4 movaps 0x11e0(%rsp), %xmm3 movaps %xmm4, 0x12e0(%rsp) movaps %xmm3, 0x12d0(%rsp) movaps 0x14b9f88(%rip), %xmm3 # 0x1e02b40 movaps %xmm3, 0x12c0(%rsp) movaps 0x12e0(%rsp), %xmm4 movaps 0x12d0(%rsp), %xmm3 movaps %xmm4, 0x1450(%rsp) movaps %xmm3, 0x1440(%rsp) movaps 0x1450(%rsp), %xmm4 movaps 0x1440(%rsp), %xmm3 mulps %xmm3, %xmm4 movaps 0x12c0(%rsp), %xmm3 movaps %xmm4, 0x1350(%rsp) movaps %xmm3, 0x1340(%rsp) movaps 0x1350(%rsp), %xmm3 movaps 0x1340(%rsp), %xmm4 addps %xmm4, %xmm3 movaps %xmm3, 0x1180(%rsp) movaps 0x1180(%rsp), %xmm4 movaps 0x11e0(%rsp), %xmm3 movaps %xmm4, 0x12b0(%rsp) movaps %xmm3, 0x12a0(%rsp) movaps 0x14b9f03(%rip), %xmm3 # 0x1e02b50 movaps %xmm3, 0x1290(%rsp) movaps 0x12b0(%rsp), %xmm4 movaps 0x12a0(%rsp), %xmm3 movaps %xmm4, 0x1470(%rsp) movaps %xmm3, 0x1460(%rsp) movaps 0x1470(%rsp), %xmm4 movaps 0x1460(%rsp), %xmm3 mulps %xmm3, %xmm4 movaps 0x1290(%rsp), %xmm3 movaps %xmm4, 0x1370(%rsp) movaps %xmm3, 0x1360(%rsp) movaps 0x1370(%rsp), %xmm3 movaps 0x1360(%rsp), %xmm4 addps %xmm4, %xmm3 movaps %xmm3, 0x1180(%rsp) movaps 0x1180(%rsp), %xmm4 movaps 0x11e0(%rsp), %xmm3 movaps %xmm4, 0x1280(%rsp) movaps %xmm3, 0x1270(%rsp) movaps 0x14b9e7e(%rip), %xmm3 # 0x1e02b60 movaps %xmm3, 0x1260(%rsp) movaps 0x1280(%rsp), %xmm4 movaps 0x1270(%rsp), %xmm3 movaps %xmm4, 0x1490(%rsp) movaps %xmm3, 0x1480(%rsp) movaps 0x1490(%rsp), %xmm4 movaps 0x1480(%rsp), %xmm3 mulps %xmm3, %xmm4 movaps 0x1260(%rsp), %xmm3 movaps %xmm4, 0x1390(%rsp) movaps %xmm3, 0x1380(%rsp) movaps 0x1390(%rsp), %xmm3 movaps 0x1380(%rsp), %xmm4 addps %xmm4, %xmm3 movaps %xmm3, 0x1180(%rsp) movaps 0x1180(%rsp), %xmm4 movaps 0x11e0(%rsp), %xmm3 movaps %xmm4, 0x1250(%rsp) movaps %xmm3, 0x1240(%rsp) movaps %xmm0, 0x1230(%rsp) movaps 0x1250(%rsp), %xmm3 movaps 0x1240(%rsp), %xmm0 movaps %xmm3, 0x14b0(%rsp) movaps %xmm0, 0x14a0(%rsp) movaps 0x14b0(%rsp), %xmm3 movaps 0x14a0(%rsp), %xmm0 mulps %xmm0, %xmm3 movaps 0x1230(%rsp), %xmm0 movaps %xmm3, 0x13b0(%rsp) movaps %xmm0, 0x13a0(%rsp) movaps 0x13b0(%rsp), %xmm0 movaps 0x13a0(%rsp), %xmm3 addps %xmm3, %xmm0 movaps %xmm0, 0x1180(%rsp) movaps 0x1180(%rsp), %xmm4 movaps 0x11d0(%rsp), %xmm3 movaps 0x11e0(%rsp), %xmm0 movaps %xmm4, 0x1220(%rsp) movaps %xmm3, 0x1210(%rsp) movaps %xmm0, 0x1200(%rsp) movaps 0x1220(%rsp), %xmm3 movaps 0x1210(%rsp), %xmm0 movaps %xmm3, 0x14d0(%rsp) movaps %xmm0, 0x14c0(%rsp) movaps 0x14d0(%rsp), %xmm3 movaps 0x14c0(%rsp), %xmm0 mulps %xmm0, %xmm3 movaps 0x1200(%rsp), %xmm0 movaps %xmm3, 0x13d0(%rsp) movaps %xmm0, 0x13c0(%rsp) movaps 0x13d0(%rsp), %xmm0 movaps 0x13c0(%rsp), %xmm3 addps %xmm3, %xmm0 movaps %xmm0, 0x1180(%rsp) movaps 0x1180(%rsp), %xmm3 movaps 0x11a0(%rsp), %xmm0 movaps %xmm3, 0x13f0(%rsp) movaps %xmm0, 0x13e0(%rsp) movaps 0x13f0(%rsp), %xmm0 movaps 0x13e0(%rsp), %xmm3 addps %xmm3, %xmm0 movaps %xmm0, 0x1180(%rsp) movaps 0x11c0(%rsp), %xmm0 movaps %xmm0, 0x15e0(%rsp) cvttps2dq 0x15e0(%rsp), %xmm0 movaps %xmm0, 0x11b0(%rsp) movaps 0x11b0(%rsp), %xmm0 movaps %xmm0, 0x1890(%rsp) movaps 0x14b9c89(%rip), %xmm0 # 0x1e02b70 movaps %xmm0, 0x1880(%rsp) movdqa 0x1890(%rsp), %xmm0 movdqa 0x1880(%rsp), %xmm3 paddd %xmm3, %xmm0 movdqa %xmm0, 0x11b0(%rsp) movdqa 0x11b0(%rsp), %xmm0 movdqa %xmm0, 0x18d0(%rsp) movl $0x17, 0x18cc(%rsp) movdqa 0x18d0(%rsp), %xmm0 movl 0x18cc(%rsp), %eax movd %eax, %xmm3 pslld %xmm3, %xmm0 movdqa %xmm0, 0x11b0(%rsp) movdqa 0x11b0(%rsp), %xmm0 movdqa %xmm0, 0x1900(%rsp) movdqa 0x1900(%rsp), %xmm0 movaps %xmm0, 0x1170(%rsp) movaps 0x1180(%rsp), %xmm3 movaps 0x1170(%rsp), %xmm0 movaps %xmm3, 0x14f0(%rsp) movaps %xmm0, 0x14e0(%rsp) movaps 0x14f0(%rsp), %xmm0 mulps 0x14e0(%rsp), %xmm0 movaps %xmm0, 0x1180(%rsp) movaps 0x1180(%rsp), %xmm0 movaps %xmm2, 0xb90(%rsp) movaps %xmm0, 0xb80(%rsp) movaps 0xb90(%rsp), %xmm0 addps 0xb80(%rsp), %xmm0 movaps %xmm1, 0xd50(%rsp) movaps %xmm0, 0xd40(%rsp) movaps 0xd50(%rsp), %xmm0 divps 0xd40(%rsp), %xmm0 movaps %xmm0, 0x7e0(%rsp) jmp 0x94afe7 movaps 0x7d0(%rsp), %xmm0 movaps %xmm0, 0xbc0(%rsp) movaps 0xbc0(%rsp), %xmm0 movaps %xmm0, (%rsp) movaps %xmm0, 0xdd0(%rsp) xorps %xmm0, %xmm0 movaps %xmm0, 0x10(%rsp) movaps %xmm0, 0xde0(%rsp) movaps 0xde0(%rsp), %xmm1 movaps %xmm1, 0xdc0(%rsp) movaps 0x14b9a68(%rip), %xmm15 # 0x1e02ab0 movaps %xmm15, 0xd90(%rsp) movaps 0xdd0(%rsp), %xmm1 movaps %xmm1, 0x1160(%rsp) movaps 0x14b9a57(%rip), %xmm14 # 0x1e02ac0 movaps %xmm14, 0x1150(%rsp) movaps 0x1160(%rsp), %xmm1 movaps 0x1150(%rsp), %xmm2 minps %xmm2, %xmm1 movaps %xmm1, 0xdd0(%rsp) movaps 0xdd0(%rsp), %xmm1 movaps %xmm1, 0x1140(%rsp) movaps 0x14b9a2b(%rip), %xmm13 # 0x1e02ad0 movaps %xmm13, 0x1130(%rsp) movaps 0x1140(%rsp), %xmm1 movaps 0x1130(%rsp), %xmm2 maxps %xmm2, %xmm1 movaps %xmm1, 0xdd0(%rsp) movaps 0xdd0(%rsp), %xmm1 movaps %xmm1, 0x1120(%rsp) movaps 0x14b99ff(%rip), %xmm10 # 0x1e02ae0 movaps %xmm10, 0x1110(%rsp) movaps 0x1120(%rsp), %xmm1 movaps 0x1110(%rsp), %xmm2 mulps %xmm2, %xmm1 movaps %xmm1, 0xdb0(%rsp) movaps 0xdb0(%rsp), %xmm1 movaps %xmm1, 0x1000(%rsp) movaps 0x14b99d4(%rip), %xmm4 # 0x1e02af0 movaps %xmm4, 0xff0(%rsp) movaps 0x1000(%rsp), %xmm1 movaps 0xff0(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0xdb0(%rsp) movaps 0xdb0(%rsp), %xmm1 movaps %xmm1, 0x1610(%rsp) cvttps2dq 0x1610(%rsp), %xmm1 movaps %xmm1, 0xda0(%rsp) movaps 0xda0(%rsp), %xmm1 movaps %xmm1, 0x1630(%rsp) cvtdq2ps 0x1630(%rsp), %xmm1 movaps %xmm1, 0xdc0(%rsp) movaps 0xdc0(%rsp), %xmm2 movaps 0xdb0(%rsp), %xmm1 movaps %xmm2, 0x1670(%rsp) movaps %xmm1, 0x1660(%rsp) movaps 0x1660(%rsp), %xmm1 movaps 0x1670(%rsp), %xmm2 cmpltps %xmm2, %xmm1 movaps %xmm1, 0xd80(%rsp) movaps 0xd80(%rsp), %xmm2 movaps 0xd90(%rsp), %xmm1 movaps %xmm2, 0x16b0(%rsp) movaps %xmm1, 0x16a0(%rsp) movaps 0x16b0(%rsp), %xmm1 movaps 0x16a0(%rsp), %xmm2 pand %xmm2, %xmm1 movaps %xmm1, 0xd80(%rsp) movaps 0xdc0(%rsp), %xmm2 movaps 0xd80(%rsp), %xmm1 movaps %xmm2, 0x15b0(%rsp) movaps %xmm1, 0x15a0(%rsp) movaps 0x15b0(%rsp), %xmm1 movaps 0x15a0(%rsp), %xmm2 subps %xmm2, %xmm1 movaps %xmm1, 0xdb0(%rsp) movaps 0xdb0(%rsp), %xmm2 movaps 0xdd0(%rsp), %xmm1 movaps %xmm2, 0x1830(%rsp) movaps 0x14b98ad(%rip), %xmm12 # 0x1e02b00 movaps %xmm12, 0x1820(%rsp) movaps %xmm1, 0x1810(%rsp) movaps 0x1810(%rsp), %xmm2 movaps 0x1830(%rsp), %xmm3 movaps 0x1820(%rsp), %xmm1 movaps %xmm3, 0x1850(%rsp) movaps %xmm1, 0x1840(%rsp) movaps 0x1850(%rsp), %xmm1 movaps 0x1840(%rsp), %xmm3 mulps %xmm3, %xmm1 movaps %xmm2, 0x1870(%rsp) movaps %xmm1, 0x1860(%rsp) movaps 0x1870(%rsp), %xmm1 movaps 0x1860(%rsp), %xmm2 subps %xmm2, %xmm1 movaps %xmm1, 0xdd0(%rsp) movaps 0xdb0(%rsp), %xmm2 movaps 0xdd0(%rsp), %xmm1 movaps %xmm2, 0x17c0(%rsp) movaps 0x14b9826(%rip), %xmm11 # 0x1e02b10 movaps %xmm11, 0x17b0(%rsp) movaps %xmm1, 0x17a0(%rsp) movaps 0x17a0(%rsp), %xmm2 movaps 0x17c0(%rsp), %xmm3 movaps 0x17b0(%rsp), %xmm1 movaps %xmm3, 0x17e0(%rsp) movaps %xmm1, 0x17d0(%rsp) movaps 0x17e0(%rsp), %xmm1 movaps 0x17d0(%rsp), %xmm3 mulps %xmm3, %xmm1 movaps %xmm2, 0x1800(%rsp) movaps %xmm1, 0x17f0(%rsp) movaps 0x1800(%rsp), %xmm1 movaps 0x17f0(%rsp), %xmm2 subps %xmm2, %xmm1 movaps %xmm1, 0xdd0(%rsp) movaps 0xdd0(%rsp), %xmm1 movaps %xmm1, 0x1100(%rsp) movaps %xmm1, 0x10f0(%rsp) movaps 0x1100(%rsp), %xmm1 movaps 0x10f0(%rsp), %xmm2 mulps %xmm2, %xmm1 movaps %xmm1, 0xdc0(%rsp) movaps 0x14b9784(%rip), %xmm9 # 0x1e02b20 movaps %xmm9, 0xd70(%rsp) movaps 0xd70(%rsp), %xmm2 movaps 0xdd0(%rsp), %xmm1 movaps %xmm2, 0xf00(%rsp) movaps %xmm1, 0xef0(%rsp) movaps 0x14b9763(%rip), %xmm8 # 0x1e02b30 movaps %xmm8, 0xee0(%rsp) movaps 0xf00(%rsp), %xmm2 movaps 0xef0(%rsp), %xmm1 movaps %xmm2, 0x1020(%rsp) movaps %xmm1, 0x1010(%rsp) movaps 0x1020(%rsp), %xmm2 movaps 0x1010(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0xee0(%rsp), %xmm1 movaps %xmm2, 0xf20(%rsp) movaps %xmm1, 0xf10(%rsp) movaps 0xf20(%rsp), %xmm1 movaps 0xf10(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0xd70(%rsp) movaps 0xd70(%rsp), %xmm2 movaps 0xdd0(%rsp), %xmm1 movaps %xmm2, 0xed0(%rsp) movaps %xmm1, 0xec0(%rsp) movaps 0x14b96dd(%rip), %xmm7 # 0x1e02b40 movaps %xmm7, 0xeb0(%rsp) movaps 0xed0(%rsp), %xmm2 movaps 0xec0(%rsp), %xmm1 movaps %xmm2, 0x1040(%rsp) movaps %xmm1, 0x1030(%rsp) movaps 0x1040(%rsp), %xmm2 movaps 0x1030(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0xeb0(%rsp), %xmm1 movaps %xmm2, 0xf40(%rsp) movaps %xmm1, 0xf30(%rsp) movaps 0xf40(%rsp), %xmm1 movaps 0xf30(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0xd70(%rsp) movaps 0xd70(%rsp), %xmm2 movaps 0xdd0(%rsp), %xmm1 movaps %xmm2, 0xea0(%rsp) movaps %xmm1, 0xe90(%rsp) movaps 0x14b9658(%rip), %xmm6 # 0x1e02b50 movaps %xmm6, 0xe80(%rsp) movaps 0xea0(%rsp), %xmm2 movaps 0xe90(%rsp), %xmm1 movaps %xmm2, 0x1060(%rsp) movaps %xmm1, 0x1050(%rsp) movaps 0x1060(%rsp), %xmm2 movaps 0x1050(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0xe80(%rsp), %xmm1 movaps %xmm2, 0xf60(%rsp) movaps %xmm1, 0xf50(%rsp) movaps 0xf60(%rsp), %xmm1 movaps 0xf50(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0xd70(%rsp) movaps 0xd70(%rsp), %xmm2 movaps 0xdd0(%rsp), %xmm1 movaps %xmm2, 0xe70(%rsp) movaps %xmm1, 0xe60(%rsp) movaps 0x14b95d3(%rip), %xmm5 # 0x1e02b60 movaps %xmm5, 0xe50(%rsp) movaps 0xe70(%rsp), %xmm2 movaps 0xe60(%rsp), %xmm1 movaps %xmm2, 0x1080(%rsp) movaps %xmm1, 0x1070(%rsp) movaps 0x1080(%rsp), %xmm2 movaps 0x1070(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0xe50(%rsp), %xmm1 movaps %xmm2, 0xf80(%rsp) movaps %xmm1, 0xf70(%rsp) movaps 0xf80(%rsp), %xmm1 movaps 0xf70(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0xd70(%rsp) movaps 0xd70(%rsp), %xmm2 movaps 0xdd0(%rsp), %xmm1 movaps %xmm2, 0xe40(%rsp) movaps %xmm1, 0xe30(%rsp) movaps %xmm4, 0xe20(%rsp) movaps 0xe40(%rsp), %xmm2 movaps 0xe30(%rsp), %xmm1 movaps %xmm2, 0x10a0(%rsp) movaps %xmm1, 0x1090(%rsp) movaps 0x10a0(%rsp), %xmm2 movaps 0x1090(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0xe20(%rsp), %xmm1 movaps %xmm2, 0xfa0(%rsp) movaps %xmm1, 0xf90(%rsp) movaps 0xfa0(%rsp), %xmm1 movaps 0xf90(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0xd70(%rsp) movaps 0xd70(%rsp), %xmm3 movaps 0xdc0(%rsp), %xmm2 movaps 0xdd0(%rsp), %xmm1 movaps %xmm3, 0xe10(%rsp) movaps %xmm2, 0xe00(%rsp) movaps %xmm1, 0xdf0(%rsp) movaps 0xe10(%rsp), %xmm2 movaps 0xe00(%rsp), %xmm1 movaps %xmm2, 0x10c0(%rsp) movaps %xmm1, 0x10b0(%rsp) movaps 0x10c0(%rsp), %xmm2 movaps 0x10b0(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0xdf0(%rsp), %xmm1 movaps %xmm2, 0xfc0(%rsp) movaps %xmm1, 0xfb0(%rsp) movaps 0xfc0(%rsp), %xmm1 movaps 0xfb0(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0xd70(%rsp) movaps 0xd70(%rsp), %xmm2 movaps 0xd90(%rsp), %xmm1 movaps %xmm2, 0xfe0(%rsp) movaps %xmm1, 0xfd0(%rsp) movaps 0xfe0(%rsp), %xmm1 movaps 0xfd0(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0xd70(%rsp) movaps 0xdb0(%rsp), %xmm1 movaps %xmm1, 0x1600(%rsp) cvttps2dq 0x1600(%rsp), %xmm1 movaps %xmm1, 0xda0(%rsp) movaps 0xda0(%rsp), %xmm1 movaps %xmm1, 0x18b0(%rsp) movaps 0x14b93de(%rip), %xmm1 # 0x1e02b70 movaps %xmm1, 0x20(%rsp) movaps %xmm1, 0x18a0(%rsp) movaps 0x18b0(%rsp), %xmm2 movaps 0x18a0(%rsp), %xmm3 paddd %xmm3, %xmm2 movaps %xmm2, 0xda0(%rsp) movaps 0xda0(%rsp), %xmm2 movaps %xmm2, 0x18f0(%rsp) movl $0x17, 0x18ec(%rsp) movaps 0x18f0(%rsp), %xmm2 movd 0x18ec(%rsp), %xmm3 pslld %xmm3, %xmm2 movaps %xmm2, 0xda0(%rsp) movaps 0xda0(%rsp), %xmm2 movaps %xmm2, 0x1910(%rsp) movaps 0x1910(%rsp), %xmm2 movaps %xmm2, 0xd60(%rsp) movaps 0xd70(%rsp), %xmm3 movaps 0xd60(%rsp), %xmm2 movaps %xmm3, 0x10e0(%rsp) movaps %xmm2, 0x10d0(%rsp) movaps 0x10e0(%rsp), %xmm2 movaps 0x10d0(%rsp), %xmm3 mulps %xmm3, %xmm2 movaps %xmm2, 0xd70(%rsp) movaps 0xd70(%rsp), %xmm3 movl $0x3f800000, 0xc2c(%rsp) # imm = 0x3F800000 movss 0xc2c(%rsp), %xmm2 shufps $0x0, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0] movaps %xmm2, 0xc10(%rsp) movaps 0xc10(%rsp), %xmm2 movaps %xmm3, 0xbe0(%rsp) movaps %xmm2, 0xbd0(%rsp) movaps 0xbe0(%rsp), %xmm2 movaps 0xbd0(%rsp), %xmm3 addps %xmm3, %xmm2 movaps %xmm2, 0x20f0(%rsp) movaps %xmm15, 0x20d0(%rsp) movaps 0x20f0(%rsp), %xmm3 movaps %xmm0, 0x2100(%rsp) movaps 0x2100(%rsp), %xmm2 movaps %xmm3, 0x2780(%rsp) movaps %xmm2, 0x2770(%rsp) movaps 0x2780(%rsp), %xmm2 movaps 0x2770(%rsp), %xmm3 cmpleps %xmm3, %xmm2 movaps %xmm2, 0x20c0(%rsp) movaps 0x20f0(%rsp), %xmm2 movaps %xmm2, 0x2640(%rsp) movaps 0x14b9273(%rip), %xmm2 # 0x1e02b80 movaps %xmm2, 0x2630(%rsp) movaps 0x2640(%rsp), %xmm2 movaps 0x2630(%rsp), %xmm3 maxps %xmm3, %xmm2 movaps %xmm2, 0x20f0(%rsp) movaps 0x20f0(%rsp), %xmm2 movaps %xmm2, 0x27b0(%rsp) movaps 0x27b0(%rsp), %xmm2 movaps %xmm2, 0x27a0(%rsp) movl $0x17, 0x279c(%rsp) movaps 0x27a0(%rsp), %xmm2 movd 0x279c(%rsp), %xmm3 psrld %xmm3, %xmm2 movaps %xmm2, 0x20e0(%rsp) movaps 0x20f0(%rsp), %xmm2 movaps %xmm2, 0x26f0(%rsp) movaps 0x14b9201(%rip), %xmm2 # 0x1e02b90 movaps %xmm2, 0x26e0(%rsp) movaps 0x26f0(%rsp), %xmm2 movaps 0x26e0(%rsp), %xmm3 pand %xmm3, %xmm2 movaps %xmm2, 0x20f0(%rsp) movaps 0x20f0(%rsp), %xmm2 movaps %xmm2, 0x27f0(%rsp) movaps %xmm4, 0x27e0(%rsp) movaps 0x27f0(%rsp), %xmm2 movaps 0x27e0(%rsp), %xmm3 por %xmm3, %xmm2 movaps %xmm2, 0x20f0(%rsp) movaps 0x20e0(%rsp), %xmm2 movaps %xmm2, 0x2810(%rsp) movaps %xmm1, 0x2800(%rsp) movaps 0x2810(%rsp), %xmm1 movaps 0x2800(%rsp), %xmm2 psubd %xmm2, %xmm1 movaps %xmm1, 0x20e0(%rsp) movaps 0x20e0(%rsp), %xmm1 movaps %xmm1, 0x2690(%rsp) cvtdq2ps 0x2690(%rsp), %xmm1 movaps %xmm1, 0x20b0(%rsp) movaps 0x20b0(%rsp), %xmm2 movaps 0x20d0(%rsp), %xmm1 movaps %xmm2, 0x2480(%rsp) movaps %xmm1, 0x2470(%rsp) movaps 0x2480(%rsp), %xmm1 movaps 0x2470(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0x20b0(%rsp) movaps 0x20f0(%rsp), %xmm1 movaps %xmm1, 0x2830(%rsp) movaps 0x14b9113(%rip), %xmm1 # 0x1e02ba0 movaps %xmm1, 0x2820(%rsp) movaps 0x2830(%rsp), %xmm1 movaps 0x2820(%rsp), %xmm2 cmpltps %xmm2, %xmm1 movaps %xmm1, 0x20a0(%rsp) movaps 0x20f0(%rsp), %xmm2 movaps 0x20a0(%rsp), %xmm1 movaps %xmm2, 0x26d0(%rsp) movaps %xmm1, 0x26c0(%rsp) movaps 0x26d0(%rsp), %xmm1 movaps 0x26c0(%rsp), %xmm2 pand %xmm2, %xmm1 movaps %xmm1, 0x2090(%rsp) movaps 0x20f0(%rsp), %xmm2 movaps 0x20d0(%rsp), %xmm1 movaps %xmm2, 0x2680(%rsp) movaps %xmm1, 0x2670(%rsp) movaps 0x2680(%rsp), %xmm1 movaps 0x2670(%rsp), %xmm2 subps %xmm2, %xmm1 movaps %xmm1, 0x20f0(%rsp) movaps 0x20b0(%rsp), %xmm2 movaps 0x20d0(%rsp), %xmm3 movaps 0x20a0(%rsp), %xmm1 movaps %xmm3, 0x26b0(%rsp) movaps %xmm1, 0x26a0(%rsp) movaps 0x26b0(%rsp), %xmm1 movaps 0x26a0(%rsp), %xmm3 pand %xmm3, %xmm1 movaps %xmm2, 0x2660(%rsp) movaps %xmm1, 0x2650(%rsp) movaps 0x2660(%rsp), %xmm1 movaps 0x2650(%rsp), %xmm2 subps %xmm2, %xmm1 movaps %xmm1, 0x20b0(%rsp) movaps 0x20f0(%rsp), %xmm2 movaps 0x2090(%rsp), %xmm1 movaps %xmm2, 0x2460(%rsp) movaps %xmm1, 0x2450(%rsp) movaps 0x2460(%rsp), %xmm1 movaps 0x2450(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0x20f0(%rsp) movaps 0x20f0(%rsp), %xmm1 movaps %xmm1, 0x2620(%rsp) movaps %xmm1, 0x2610(%rsp) movaps 0x2620(%rsp), %xmm1 movaps 0x2610(%rsp), %xmm2 mulps %xmm2, %xmm1 movaps %xmm1, 0x2080(%rsp) movaps 0x14b8fac(%rip), %xmm1 # 0x1e02bb0 movaps %xmm1, 0x2070(%rsp) movaps 0x2070(%rsp), %xmm2 movaps 0x20f0(%rsp), %xmm1 movaps %xmm2, 0x22e0(%rsp) movaps %xmm1, 0x22d0(%rsp) movaps 0x14b8f8d(%rip), %xmm1 # 0x1e02bc0 movaps %xmm1, 0x22c0(%rsp) movaps 0x22e0(%rsp), %xmm2 movaps 0x22d0(%rsp), %xmm1 movaps %xmm2, 0x24a0(%rsp) movaps %xmm1, 0x2490(%rsp) movaps 0x24a0(%rsp), %xmm2 movaps 0x2490(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0x22c0(%rsp), %xmm1 movaps %xmm2, 0x2300(%rsp) movaps %xmm1, 0x22f0(%rsp) movaps 0x2300(%rsp), %xmm1 movaps 0x22f0(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0x2070(%rsp) movaps 0x2070(%rsp), %xmm2 movaps 0x20f0(%rsp), %xmm1 movaps %xmm2, 0x22b0(%rsp) movaps %xmm1, 0x22a0(%rsp) movaps 0x14b8f08(%rip), %xmm1 # 0x1e02bd0 movaps %xmm1, 0x2290(%rsp) movaps 0x22b0(%rsp), %xmm2 movaps 0x22a0(%rsp), %xmm1 movaps %xmm2, 0x24c0(%rsp) movaps %xmm1, 0x24b0(%rsp) movaps 0x24c0(%rsp), %xmm2 movaps 0x24b0(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0x2290(%rsp), %xmm1 movaps %xmm2, 0x2320(%rsp) movaps %xmm1, 0x2310(%rsp) movaps 0x2320(%rsp), %xmm1 movaps 0x2310(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0x2070(%rsp) movaps 0x2070(%rsp), %xmm2 movaps 0x20f0(%rsp), %xmm1 movaps %xmm2, 0x2280(%rsp) movaps %xmm1, 0x2270(%rsp) movaps 0x14b8e83(%rip), %xmm1 # 0x1e02be0 movaps %xmm1, 0x2260(%rsp) movaps 0x2280(%rsp), %xmm2 movaps 0x2270(%rsp), %xmm1 movaps %xmm2, 0x24e0(%rsp) movaps %xmm1, 0x24d0(%rsp) movaps 0x24e0(%rsp), %xmm2 movaps 0x24d0(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0x2260(%rsp), %xmm1 movaps %xmm2, 0x2340(%rsp) movaps %xmm1, 0x2330(%rsp) movaps 0x2340(%rsp), %xmm1 movaps 0x2330(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0x2070(%rsp) movaps 0x2070(%rsp), %xmm2 movaps 0x20f0(%rsp), %xmm1 movaps %xmm2, 0x2250(%rsp) movaps %xmm1, 0x2240(%rsp) movaps 0x14b8dfe(%rip), %xmm1 # 0x1e02bf0 movaps %xmm1, 0x2230(%rsp) movaps 0x2250(%rsp), %xmm2 movaps 0x2240(%rsp), %xmm1 movaps %xmm2, 0x2500(%rsp) movaps %xmm1, 0x24f0(%rsp) movaps 0x2500(%rsp), %xmm2 movaps 0x24f0(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0x2230(%rsp), %xmm1 movaps %xmm2, 0x2360(%rsp) movaps %xmm1, 0x2350(%rsp) movaps 0x2360(%rsp), %xmm1 movaps 0x2350(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0x2070(%rsp) movaps 0x2070(%rsp), %xmm2 movaps 0x20f0(%rsp), %xmm1 movaps %xmm2, 0x2220(%rsp) movaps %xmm1, 0x2210(%rsp) movaps 0x14b8d79(%rip), %xmm1 # 0x1e02c00 movaps %xmm1, 0x2200(%rsp) movaps 0x2220(%rsp), %xmm2 movaps 0x2210(%rsp), %xmm1 movaps %xmm2, 0x2520(%rsp) movaps %xmm1, 0x2510(%rsp) movaps 0x2520(%rsp), %xmm2 movaps 0x2510(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0x2200(%rsp), %xmm1 movaps %xmm2, 0x2380(%rsp) movaps %xmm1, 0x2370(%rsp) movaps 0x2380(%rsp), %xmm1 movaps 0x2370(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0x2070(%rsp) movaps 0x2070(%rsp), %xmm2 movaps 0x20f0(%rsp), %xmm1 movaps %xmm2, 0x21f0(%rsp) movaps %xmm1, 0x21e0(%rsp) movaps 0x14b8cf4(%rip), %xmm1 # 0x1e02c10 movaps %xmm1, 0x21d0(%rsp) movaps 0x21f0(%rsp), %xmm2 movaps 0x21e0(%rsp), %xmm1 movaps %xmm2, 0x2540(%rsp) movaps %xmm1, 0x2530(%rsp) movaps 0x2540(%rsp), %xmm2 movaps 0x2530(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0x21d0(%rsp), %xmm1 movaps %xmm2, 0x23a0(%rsp) movaps %xmm1, 0x2390(%rsp) movaps 0x23a0(%rsp), %xmm1 movaps 0x2390(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0x2070(%rsp) movaps 0x2070(%rsp), %xmm2 movaps 0x20f0(%rsp), %xmm1 movaps %xmm2, 0x21c0(%rsp) movaps %xmm1, 0x21b0(%rsp) movaps 0x14b8c6f(%rip), %xmm1 # 0x1e02c20 movaps %xmm1, 0x21a0(%rsp) movaps 0x21c0(%rsp), %xmm2 movaps 0x21b0(%rsp), %xmm1 movaps %xmm2, 0x2560(%rsp) movaps %xmm1, 0x2550(%rsp) movaps 0x2560(%rsp), %xmm2 movaps 0x2550(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0x21a0(%rsp), %xmm1 movaps %xmm2, 0x23c0(%rsp) movaps %xmm1, 0x23b0(%rsp) movaps 0x23c0(%rsp), %xmm1 movaps 0x23b0(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0x2070(%rsp) movaps 0x2070(%rsp), %xmm2 movaps 0x20f0(%rsp), %xmm1 movaps %xmm2, 0x2190(%rsp) movaps %xmm1, 0x2180(%rsp) movaps 0x14b8bea(%rip), %xmm1 # 0x1e02c30 movaps %xmm1, 0x2170(%rsp) movaps 0x2190(%rsp), %xmm2 movaps 0x2180(%rsp), %xmm1 movaps %xmm2, 0x2580(%rsp) movaps %xmm1, 0x2570(%rsp) movaps 0x2580(%rsp), %xmm2 movaps 0x2570(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0x2170(%rsp), %xmm1 movaps %xmm2, 0x23e0(%rsp) movaps %xmm1, 0x23d0(%rsp) movaps 0x23e0(%rsp), %xmm1 movaps 0x23d0(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0x2070(%rsp) movaps 0x2070(%rsp), %xmm2 movaps 0x20f0(%rsp), %xmm1 movaps %xmm2, 0x2600(%rsp) movaps %xmm1, 0x25f0(%rsp) movaps 0x2600(%rsp), %xmm1 movaps 0x25f0(%rsp), %xmm2 mulps %xmm2, %xmm1 movaps %xmm1, 0x2070(%rsp) movaps 0x2070(%rsp), %xmm2 movaps 0x2080(%rsp), %xmm1 movaps %xmm2, 0x25e0(%rsp) movaps %xmm1, 0x25d0(%rsp) movaps 0x25e0(%rsp), %xmm1 movaps 0x25d0(%rsp), %xmm2 mulps %xmm2, %xmm1 movaps %xmm1, 0x2070(%rsp) movaps 0x20b0(%rsp), %xmm2 movaps 0x2070(%rsp), %xmm1 movaps %xmm2, 0x2160(%rsp) movaps %xmm11, 0x2150(%rsp) movaps %xmm1, 0x2140(%rsp) movaps 0x2160(%rsp), %xmm2 movaps 0x2150(%rsp), %xmm1 movaps %xmm2, 0x25a0(%rsp) movaps %xmm1, 0x2590(%rsp) movaps 0x25a0(%rsp), %xmm2 movaps 0x2590(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0x2140(%rsp), %xmm1 movaps %xmm2, 0x2400(%rsp) movaps %xmm1, 0x23f0(%rsp) movaps 0x2400(%rsp), %xmm1 movaps 0x23f0(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0x2070(%rsp) movaps 0x2080(%rsp), %xmm2 movaps 0x2070(%rsp), %xmm1 movaps %xmm2, 0x2720(%rsp) movaps %xmm4, 0x2710(%rsp) movaps %xmm1, 0x2700(%rsp) movaps 0x2700(%rsp), %xmm2 movaps 0x2720(%rsp), %xmm3 movaps 0x2710(%rsp), %xmm1 movaps %xmm3, 0x2740(%rsp) movaps %xmm1, 0x2730(%rsp) movaps 0x2740(%rsp), %xmm1 movaps 0x2730(%rsp), %xmm3 mulps %xmm3, %xmm1 movaps %xmm2, 0x2760(%rsp) movaps %xmm1, 0x2750(%rsp) movaps 0x2760(%rsp), %xmm1 movaps 0x2750(%rsp), %xmm2 subps %xmm2, %xmm1 movaps %xmm1, 0x2070(%rsp) movaps 0x20f0(%rsp), %xmm2 movaps 0x2070(%rsp), %xmm1 movaps %xmm2, 0x2440(%rsp) movaps %xmm1, 0x2430(%rsp) movaps 0x2440(%rsp), %xmm1 movaps 0x2430(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0x20f0(%rsp) movaps 0x20b0(%rsp), %xmm2 movaps 0x20f0(%rsp), %xmm1 movaps %xmm2, 0x2130(%rsp) movaps %xmm12, 0x2120(%rsp) movaps %xmm1, 0x2110(%rsp) movaps 0x2130(%rsp), %xmm2 movaps 0x2120(%rsp), %xmm1 movaps %xmm2, 0x25c0(%rsp) movaps %xmm1, 0x25b0(%rsp) movaps 0x25c0(%rsp), %xmm2 movaps 0x25b0(%rsp), %xmm1 mulps %xmm1, %xmm2 movaps 0x2110(%rsp), %xmm1 movaps %xmm2, 0x2420(%rsp) movaps %xmm1, 0x2410(%rsp) movaps 0x2420(%rsp), %xmm1 movaps 0x2410(%rsp), %xmm2 addps %xmm2, %xmm1 movaps %xmm1, 0x20f0(%rsp) movaps 0x20f0(%rsp), %xmm2 movaps 0x20c0(%rsp), %xmm1 movaps %xmm2, 0x27d0(%rsp) movaps %xmm1, 0x27c0(%rsp) movaps 0x27d0(%rsp), %xmm1 movaps 0x27c0(%rsp), %xmm2 por %xmm2, %xmm1 movaps %xmm1, 0x20f0(%rsp) movaps 0x20f0(%rsp), %xmm1 movaps %xmm1, 0x1940(%rsp) movl $0x3f800000, 0x19cc(%rsp) # imm = 0x3F800000 movss 0x19cc(%rsp), %xmm1 shufps $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0] movaps %xmm1, 0x19b0(%rsp) movaps 0x19b0(%rsp), %xmm1 movaps %xmm1, 0x1930(%rsp) movl $0x40000000, 0x19ac(%rsp) # imm = 0x40000000 movss 0x19ac(%rsp), %xmm1 shufps $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0] movaps %xmm1, 0x1990(%rsp) movaps 0x1990(%rsp), %xmm1 movaps %xmm1, 0x1920(%rsp) movaps 0x1940(%rsp), %xmm2 movaps 0x1920(%rsp), %xmm1 movaps %xmm2, 0x1980(%rsp) movaps %xmm1, 0x1970(%rsp) movaps 0x1980(%rsp), %xmm1 movaps 0x1970(%rsp), %xmm2 mulps %xmm2, %xmm1 movaps %xmm1, 0x19e0(%rsp) movl $0x3f800000, 0x1a3c(%rsp) # imm = 0x3F800000 movss 0x1a3c(%rsp), %xmm1 shufps $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0] movaps %xmm1, 0x1a20(%rsp) movaps 0x1a20(%rsp), %xmm1 movaps %xmm1, 0x19d0(%rsp) movaps 0x19d0(%rsp), %xmm3 movaps %xmm3, %xmm2 movaps %xmm0, 0x19f0(%rsp) movaps 0x19f0(%rsp), %xmm1 movaps 0x19e0(%rsp), %xmm0 movaps %xmm1, 0x1ea0(%rsp) movaps %xmm0, 0x1e90(%rsp) movaps 0x1ea0(%rsp), %xmm0 movaps 0x1e90(%rsp), %xmm1 subps %xmm1, %xmm0 movaps (%rsp), %xmm1 movaps %xmm0, 0x1ad0(%rsp) movaps 0x10(%rsp), %xmm0 movaps %xmm0, 0x1ae0(%rsp) movaps 0x1ae0(%rsp), %xmm0 movaps %xmm0, 0x1ac0(%rsp) movaps 0x20(%rsp), %xmm0 movaps %xmm15, 0x1a90(%rsp) movaps 0x1ad0(%rsp), %xmm15 movaps %xmm15, 0x1e60(%rsp) movaps %xmm14, 0x1e50(%rsp) movaps 0x1e60(%rsp), %xmm14 movaps 0x1e50(%rsp), %xmm15 minps %xmm15, %xmm14 movaps %xmm14, 0x1ad0(%rsp) movaps 0x1ad0(%rsp), %xmm14 movaps %xmm14, 0x1e40(%rsp) movaps %xmm13, 0x1e30(%rsp) movaps 0x1e40(%rsp), %xmm13 movaps 0x1e30(%rsp), %xmm14 maxps %xmm14, %xmm13 movaps %xmm13, 0x1ad0(%rsp) movaps 0x1ad0(%rsp), %xmm13 movaps %xmm13, 0x1e20(%rsp) movaps %xmm10, 0x1e10(%rsp) movaps 0x1e20(%rsp), %xmm10 movaps 0x1e10(%rsp), %xmm13 mulps %xmm13, %xmm10 movaps %xmm10, 0x1ab0(%rsp) movaps 0x1ab0(%rsp), %xmm10 movaps %xmm10, 0x1d00(%rsp) movaps %xmm4, 0x1cf0(%rsp) movaps 0x1d00(%rsp), %xmm10 movaps 0x1cf0(%rsp), %xmm13 addps %xmm13, %xmm10 movaps %xmm10, 0x1ab0(%rsp) movaps 0x1ab0(%rsp), %xmm10 movaps %xmm10, 0x1ee0(%rsp) cvttps2dq 0x1ee0(%rsp), %xmm10 movaps %xmm10, 0x1aa0(%rsp) movaps 0x1aa0(%rsp), %xmm10 movaps %xmm10, 0x1ef0(%rsp) cvtdq2ps 0x1ef0(%rsp), %xmm10 movaps %xmm10, 0x1ac0(%rsp) movaps 0x1ac0(%rsp), %xmm13 movaps 0x1ab0(%rsp), %xmm10 movaps %xmm13, 0x1f10(%rsp) movaps %xmm10, 0x1f00(%rsp) movaps 0x1f00(%rsp), %xmm10 movaps 0x1f10(%rsp), %xmm13 cmpltps %xmm13, %xmm10 movaps %xmm10, 0x1a80(%rsp) movaps 0x1a80(%rsp), %xmm13 movaps 0x1a90(%rsp), %xmm10 movaps %xmm13, 0x1f30(%rsp) movaps %xmm10, 0x1f20(%rsp) movaps 0x1f30(%rsp), %xmm10 movaps 0x1f20(%rsp), %xmm13 pand %xmm13, %xmm10 movaps %xmm10, 0x1a80(%rsp) movaps 0x1ac0(%rsp), %xmm13 movaps 0x1a80(%rsp), %xmm10 movaps %xmm13, 0x1e80(%rsp) movaps %xmm10, 0x1e70(%rsp) movaps 0x1e80(%rsp), %xmm10 movaps 0x1e70(%rsp), %xmm13 subps %xmm13, %xmm10 movaps %xmm10, 0x1ab0(%rsp) movaps 0x1ab0(%rsp), %xmm13 movaps 0x1ad0(%rsp), %xmm10 movaps %xmm13, 0x1fd0(%rsp) movaps %xmm12, 0x1fc0(%rsp) movaps %xmm10, 0x1fb0(%rsp) movaps 0x1fb0(%rsp), %xmm12 movaps 0x1fd0(%rsp), %xmm13 movaps 0x1fc0(%rsp), %xmm10 movaps %xmm13, 0x1ff0(%rsp) movaps %xmm10, 0x1fe0(%rsp) movaps 0x1ff0(%rsp), %xmm10 movaps 0x1fe0(%rsp), %xmm13 mulps %xmm13, %xmm10 movaps %xmm12, 0x2010(%rsp) movaps %xmm10, 0x2000(%rsp) movaps 0x2010(%rsp), %xmm10 movaps 0x2000(%rsp), %xmm12 subps %xmm12, %xmm10 movaps %xmm10, 0x1ad0(%rsp) movaps 0x1ab0(%rsp), %xmm12 movaps 0x1ad0(%rsp), %xmm10 movaps %xmm12, 0x1f60(%rsp) movaps %xmm11, 0x1f50(%rsp) movaps %xmm10, 0x1f40(%rsp) movaps 0x1f40(%rsp), %xmm11 movaps 0x1f60(%rsp), %xmm12 movaps 0x1f50(%rsp), %xmm10 movaps %xmm12, 0x1f80(%rsp) movaps %xmm10, 0x1f70(%rsp) movaps 0x1f80(%rsp), %xmm10 movaps 0x1f70(%rsp), %xmm12 mulps %xmm12, %xmm10 movaps %xmm11, 0x1fa0(%rsp) movaps %xmm10, 0x1f90(%rsp) movaps 0x1fa0(%rsp), %xmm10 movaps 0x1f90(%rsp), %xmm11 subps %xmm11, %xmm10 movaps %xmm10, 0x1ad0(%rsp) movaps 0x1ad0(%rsp), %xmm10 movaps %xmm10, 0x1e00(%rsp) movaps %xmm10, 0x1df0(%rsp) movaps 0x1e00(%rsp), %xmm10 movaps 0x1df0(%rsp), %xmm11 mulps %xmm11, %xmm10 movaps %xmm10, 0x1ac0(%rsp) movaps %xmm9, 0x1a70(%rsp) movaps 0x1a70(%rsp), %xmm10 movaps 0x1ad0(%rsp), %xmm9 movaps %xmm10, 0x1c00(%rsp) movaps %xmm9, 0x1bf0(%rsp) movaps %xmm8, 0x1be0(%rsp) movaps 0x1c00(%rsp), %xmm9 movaps 0x1bf0(%rsp), %xmm8 movaps %xmm9, 0x1d20(%rsp) movaps %xmm8, 0x1d10(%rsp) movaps 0x1d20(%rsp), %xmm9 movaps 0x1d10(%rsp), %xmm8 mulps %xmm8, %xmm9 movaps 0x1be0(%rsp), %xmm8 movaps %xmm9, 0x1c20(%rsp) movaps %xmm8, 0x1c10(%rsp) movaps 0x1c20(%rsp), %xmm8 movaps 0x1c10(%rsp), %xmm9 addps %xmm9, %xmm8 movaps %xmm8, 0x1a70(%rsp) movaps 0x1a70(%rsp), %xmm9 movaps 0x1ad0(%rsp), %xmm8 movaps %xmm9, 0x1bd0(%rsp) movaps %xmm8, 0x1bc0(%rsp) movaps %xmm7, 0x1bb0(%rsp) movaps 0x1bd0(%rsp), %xmm8 movaps 0x1bc0(%rsp), %xmm7 movaps %xmm8, 0x1d40(%rsp) movaps %xmm7, 0x1d30(%rsp) movaps 0x1d40(%rsp), %xmm8 movaps 0x1d30(%rsp), %xmm7 mulps %xmm7, %xmm8 movaps 0x1bb0(%rsp), %xmm7 movaps %xmm8, 0x1c40(%rsp) movaps %xmm7, 0x1c30(%rsp) movaps 0x1c40(%rsp), %xmm7 movaps 0x1c30(%rsp), %xmm8 addps %xmm8, %xmm7 movaps %xmm7, 0x1a70(%rsp) movaps 0x1a70(%rsp), %xmm8 movaps 0x1ad0(%rsp), %xmm7 movaps %xmm8, 0x1ba0(%rsp) movaps %xmm7, 0x1b90(%rsp) movaps %xmm6, 0x1b80(%rsp) movaps 0x1ba0(%rsp), %xmm7 movaps 0x1b90(%rsp), %xmm6 movaps %xmm7, 0x1d60(%rsp) movaps %xmm6, 0x1d50(%rsp) movaps 0x1d60(%rsp), %xmm7 movaps 0x1d50(%rsp), %xmm6 mulps %xmm6, %xmm7 movaps 0x1b80(%rsp), %xmm6 movaps %xmm7, 0x1c60(%rsp) movaps %xmm6, 0x1c50(%rsp) movaps 0x1c60(%rsp), %xmm6 movaps 0x1c50(%rsp), %xmm7 addps %xmm7, %xmm6 movaps %xmm6, 0x1a70(%rsp) movaps 0x1a70(%rsp), %xmm7 movaps 0x1ad0(%rsp), %xmm6 movaps %xmm7, 0x1b70(%rsp) movaps %xmm6, 0x1b60(%rsp) movaps %xmm5, 0x1b50(%rsp) movaps 0x1b70(%rsp), %xmm6 movaps 0x1b60(%rsp), %xmm5 movaps %xmm6, 0x1d80(%rsp) movaps %xmm5, 0x1d70(%rsp) movaps 0x1d80(%rsp), %xmm6 movaps 0x1d70(%rsp), %xmm5 mulps %xmm5, %xmm6 movaps 0x1b50(%rsp), %xmm5 movaps %xmm6, 0x1c80(%rsp) movaps %xmm5, 0x1c70(%rsp) movaps 0x1c80(%rsp), %xmm5 movaps 0x1c70(%rsp), %xmm6 addps %xmm6, %xmm5 movaps %xmm5, 0x1a70(%rsp) movaps 0x1a70(%rsp), %xmm6 movaps 0x1ad0(%rsp), %xmm5 movaps %xmm6, 0x1b40(%rsp) movaps %xmm5, 0x1b30(%rsp) movaps %xmm4, 0x1b20(%rsp) movaps 0x1b40(%rsp), %xmm5 movaps 0x1b30(%rsp), %xmm4 movaps %xmm5, 0x1da0(%rsp) movaps %xmm4, 0x1d90(%rsp) movaps 0x1da0(%rsp), %xmm5 movaps 0x1d90(%rsp), %xmm4 mulps %xmm4, %xmm5 movaps 0x1b20(%rsp), %xmm4 movaps %xmm5, 0x1ca0(%rsp) movaps %xmm4, 0x1c90(%rsp) movaps 0x1ca0(%rsp), %xmm4 movaps 0x1c90(%rsp), %xmm5 addps %xmm5, %xmm4 movaps %xmm4, 0x1a70(%rsp) movaps 0x1a70(%rsp), %xmm6 movaps 0x1ac0(%rsp), %xmm5 movaps 0x1ad0(%rsp), %xmm4 movaps %xmm6, 0x1b10(%rsp) movaps %xmm5, 0x1b00(%rsp) movaps %xmm4, 0x1af0(%rsp) movaps 0x1b10(%rsp), %xmm5 movaps 0x1b00(%rsp), %xmm4 movaps %xmm5, 0x1dc0(%rsp) movaps %xmm4, 0x1db0(%rsp) movaps 0x1dc0(%rsp), %xmm5 movaps 0x1db0(%rsp), %xmm4 mulps %xmm4, %xmm5 movaps 0x1af0(%rsp), %xmm4 movaps %xmm5, 0x1cc0(%rsp) movaps %xmm4, 0x1cb0(%rsp) movaps 0x1cc0(%rsp), %xmm4 movaps 0x1cb0(%rsp), %xmm5 addps %xmm5, %xmm4 movaps %xmm4, 0x1a70(%rsp) movaps 0x1a70(%rsp), %xmm5 movaps 0x1a90(%rsp), %xmm4 movaps %xmm5, 0x1ce0(%rsp) movaps %xmm4, 0x1cd0(%rsp) movaps 0x1ce0(%rsp), %xmm4 movaps 0x1cd0(%rsp), %xmm5 addps %xmm5, %xmm4 movaps %xmm4, 0x1a70(%rsp) movaps 0x1ab0(%rsp), %xmm4 movaps %xmm4, 0x1ed0(%rsp) cvttps2dq 0x1ed0(%rsp), %xmm4 movaps %xmm4, 0x1aa0(%rsp) movaps 0x1aa0(%rsp), %xmm4 movaps %xmm4, 0x2030(%rsp) movaps %xmm0, 0x2020(%rsp) movdqa 0x2030(%rsp), %xmm0 movdqa 0x2020(%rsp), %xmm4 paddd %xmm4, %xmm0 movdqa %xmm0, 0x1aa0(%rsp) movdqa 0x1aa0(%rsp), %xmm0 movdqa %xmm0, 0x2050(%rsp) movl $0x17, 0x204c(%rsp) movdqa 0x2050(%rsp), %xmm0 movl 0x204c(%rsp), %eax movd %eax, %xmm4 pslld %xmm4, %xmm0 movdqa %xmm0, 0x1aa0(%rsp) movdqa 0x1aa0(%rsp), %xmm0 movdqa %xmm0, 0x2060(%rsp) movdqa 0x2060(%rsp), %xmm0 movaps %xmm0, 0x1a60(%rsp) movaps 0x1a70(%rsp), %xmm4 movaps 0x1a60(%rsp), %xmm0 movaps %xmm4, 0x1de0(%rsp) movaps %xmm0, 0x1dd0(%rsp) movaps 0x1de0(%rsp), %xmm0 mulps 0x1dd0(%rsp), %xmm0 movaps %xmm0, 0x1a70(%rsp) movaps 0x1a70(%rsp), %xmm0 movaps %xmm3, 0x1a10(%rsp) movaps %xmm0, 0x1a00(%rsp) movaps 0x1a10(%rsp), %xmm0 addps 0x1a00(%rsp), %xmm0 movaps %xmm2, 0x1a50(%rsp) movaps %xmm0, 0x1a40(%rsp) movaps 0x1a50(%rsp), %xmm2 divps 0x1a40(%rsp), %xmm2 movaps 0x1920(%rsp), %xmm0 movaps %xmm2, 0x1960(%rsp) movaps %xmm0, 0x1950(%rsp) movaps 0x1960(%rsp), %xmm2 mulps 0x1950(%rsp), %xmm2 movaps 0x1930(%rsp), %xmm0 movaps %xmm2, 0x1ec0(%rsp) movaps %xmm0, 0x1eb0(%rsp) movaps 0x1ec0(%rsp), %xmm0 subps 0x1eb0(%rsp), %xmm0 movaps %xmm1, 0xc00(%rsp) movaps %xmm0, 0xbf0(%rsp) movaps 0xc00(%rsp), %xmm0 mulps 0xbf0(%rsp), %xmm0 movaps %xmm0, 0x7e0(%rsp) jmp 0x94afe7 movq 0x7c0(%rsp), %rax movq %rax, 0x808(%rsp) movq $0x0, 0x800(%rsp) movq 0x808(%rsp), %rax movq (%rax), %rax movq 0x800(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0xb0c(%rsp) movss 0xb0c(%rsp), %xmm0 shufps $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0] movaps %xmm0, 0xaf0(%rsp) movaps 0xaf0(%rsp), %xmm0 movaps %xmm0, 0x790(%rsp) movq 0x7c0(%rsp), %rax movq %rax, 0x7f8(%rsp) movq $0x1, 0x7f0(%rsp) movq 0x7f8(%rsp), %rax movq (%rax), %rax movq 0x7f0(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0xaec(%rsp) movss 0xaec(%rsp), %xmm0 shufps $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0] movaps %xmm0, 0xad0(%rsp) movaps 0xad0(%rsp), %xmm0 movaps %xmm0, 0x780(%rsp) movaps 0x7d0(%rsp), %xmm2 movaps 0x790(%rsp), %xmm1 movaps 0x780(%rsp), %xmm0 movaps %xmm2, 0xc60(%rsp) movaps %xmm1, 0xc50(%rsp) movaps %xmm0, 0xc40(%rsp) movl $0x3f800000, 0xd1c(%rsp) # imm = 0x3F800000 movss 0xd1c(%rsp), %xmm0 shufps $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0] movaps %xmm0, 0xd00(%rsp) movaps 0xd00(%rsp), %xmm0 movaps %xmm0, 0xc30(%rsp) movaps 0xc60(%rsp), %xmm1 movaps 0xc50(%rsp), %xmm0 movaps %xmm1, 0xcd0(%rsp) movaps %xmm0, 0xcc0(%rsp) movaps 0xcd0(%rsp), %xmm1 movaps 0xcc0(%rsp), %xmm0 mulps %xmm0, %xmm1 movaps 0xc40(%rsp), %xmm0 movaps %xmm1, 0xc90(%rsp) movaps %xmm0, 0xc80(%rsp) movaps 0xc90(%rsp), %xmm0 movaps 0xc80(%rsp), %xmm1 addps %xmm1, %xmm0 movaps %xmm0, 0xc40(%rsp) movaps 0xc40(%rsp), %xmm1 xorps %xmm0, %xmm0 movaps %xmm0, 0xc70(%rsp) movaps 0xc70(%rsp), %xmm0 movaps %xmm1, 0xcf0(%rsp) movaps %xmm0, 0xce0(%rsp) movaps 0xcf0(%rsp), %xmm0 movaps 0xce0(%rsp), %xmm1 maxps %xmm1, %xmm0 movaps %xmm0, 0xc40(%rsp) movaps 0xc40(%rsp), %xmm1 movaps 0xc30(%rsp), %xmm0 movaps %xmm1, 0xd30(%rsp) movaps %xmm0, 0xd20(%rsp) movaps 0xd30(%rsp), %xmm0 movaps 0xd20(%rsp), %xmm1 minps %xmm1, %xmm0 movaps %xmm0, 0xc40(%rsp) movaps 0xc40(%rsp), %xmm1 movaps 0xc60(%rsp), %xmm0 movaps %xmm1, 0xcb0(%rsp) movaps %xmm0, 0xca0(%rsp) movaps 0xcb0(%rsp), %xmm0 mulps 0xca0(%rsp), %xmm0 movaps %xmm0, 0x7e0(%rsp) jmp 0x94afe7 movaps 0x7d0(%rsp), %xmm0 movaps %xmm0, 0x7e0(%rsp) movaps 0x7e0(%rsp), %xmm0 movaps %xmm0, 0x2c0(%rsp) movq 0x348(%rsp), %rax movaps 0x2c0(%rsp), %xmm0 movq %rax, 0x868(%rsp) movaps %xmm0, 0x850(%rsp) movaps 0x850(%rsp), %xmm0 movq 0x868(%rsp), %rax movups %xmm0, (%rax) movq 0x348(%rsp), %rax addq $0x10, %rax movq %rax, 0x348(%rsp) movl 0x2d4(%rsp), %eax addl $0x1, %eax movl %eax, 0x2d4(%rsp) jmp 0x9473d1 jmp 0x94b056 movl 0x2d8(%rsp), %eax addl $0x1, %eax movl %eax, 0x2d8(%rsp) jmp 0x9473b2 jmp 0x94b06e movl 0x354(%rsp), %eax addl $0x1, %eax movl %eax, 0x354(%rsp) jmp 0x946ed6 addq $0x2848, %rsp # imm = 0x2848 retq nopl (%rax)
/ysh329[P]ncnn/src/layer/x86/deconvolution_pack4.h
ncnn::deconvolution_pack4to1_sse(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Mat const&, int, int, int, int, int, int, int, ncnn::Mat const&, ncnn::Option const&)
static void deconvolution_pack4to1_sse(const Mat& bottom_blob, Mat& top_blob, const Mat& weight_data_packed, const Mat& bias_data, int kernel_w, int kernel_h, int dilation_w, int dilation_h, int stride_w, int stride_h, int activation_type, const Mat& activation_params, const Option& opt) { int outch = top_blob.c; const int kernel_extent_w = dilation_w * (kernel_w - 1) + 1; const int kernel_extent_h = dilation_h * (kernel_h - 1) + 1; const float* bias_data_ptr = bias_data; // num_output #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < outch; p++) { float* outptr = top_blob.channel(p); const int maxk = kernel_w * kernel_h; // shadowed variable for less openmp task args const int w = bottom_blob.w; const int h = bottom_blob.h; const int channels = bottom_blob.c; const int outw = top_blob.w; const int outh = top_blob.h; for (int i = 0; i < outh; i++) { for (int j = 0; j < outw; j++) { float sum = 0.f; if (bias_data_ptr) { sum = bias_data_ptr[p]; } __m128 _sum = _mm_setzero_ps(); const float* kptr = weight_data_packed.channel(p); // channels for (int q = 0; q < channels; q++) { const Mat m = bottom_blob.channel(q); for (int y = 0; y < kernel_h; y++) { int sys = (i + y * dilation_h - (kernel_extent_h - 1)); if (sys < 0 || sys % stride_h != 0) continue; int sy = sys / stride_h; if (sy >= h) continue; for (int x = 0; x < kernel_w; x++) { int sxs = (j + x * dilation_w - (kernel_extent_w - 1)); if (sxs < 0 || sxs % stride_w != 0) continue; int sx = sxs / stride_w; if (sx >= w) continue; const float* sptr = m.row(sy) + sx * 4; int k = y * kernel_w + x; __m128 _val = _mm_load_ps(sptr); __m128 _w = _mm_load_ps(kptr + k * 4); _sum = _mm_comp_fmadd_ps(_val, _w, _sum); } } kptr += maxk * 4; } sum += _mm_reduce_add_ps(_sum); sum = activation_ss(sum, activation_type, activation_params); outptr[0] = sum; outptr++; } } } }
subq $0x698, %rsp # imm = 0x698 movq 0x6d0(%rsp), %rax movq 0x6c8(%rsp), %rax movl 0x6c0(%rsp), %eax movl 0x6b8(%rsp), %eax movl 0x6b0(%rsp), %eax movl 0x6a8(%rsp), %eax movl 0x6a0(%rsp), %eax movq %rdi, 0x268(%rsp) movq %rsi, 0x260(%rsp) movq %rdx, 0x258(%rsp) movq %rcx, 0x250(%rsp) movl %r8d, 0x24c(%rsp) movl %r9d, 0x248(%rsp) movq 0x260(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x244(%rsp) movl 0x6a0(%rsp), %eax movl 0x24c(%rsp), %ecx subl $0x1, %ecx imull %ecx, %eax addl $0x1, %eax movl %eax, 0x240(%rsp) movl 0x6a8(%rsp), %eax movl 0x248(%rsp), %ecx subl $0x1, %ecx imull %ecx, %eax addl $0x1, %eax movl %eax, 0x23c(%rsp) movq 0x250(%rsp), %rax movq %rax, 0x3a0(%rsp) movq 0x3a0(%rsp), %rax movq (%rax), %rax movq %rax, 0x230(%rsp) movl $0x0, 0x22c(%rsp) movl 0x22c(%rsp), %eax cmpl 0x244(%rsp), %eax jge 0x9505e5 movq 0x260(%rsp), %rcx movl 0x22c(%rsp), %eax leaq 0x1d8(%rsp), %rdx movq %rdx, 0x288(%rsp) movq %rcx, 0x280(%rsp) movl %eax, 0x27c(%rsp) movq 0x280(%rsp), %rax movq %rax, 0x98(%rsp) movb $0x0, 0x27b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x27c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x1d8(%rsp), %r10 movq %r10, 0x4e0(%rsp) movl %r9d, 0x4dc(%rsp) movl %r8d, 0x4d8(%rsp) movl %edi, 0x4d4(%rsp) movq %rsi, 0x4c8(%rsp) movq %rdx, 0x4c0(%rsp) movl %ecx, 0x4bc(%rsp) movq %rax, 0x4b0(%rsp) movq 0x4e0(%rsp), %rcx movq %rcx, 0x90(%rsp) movq 0x4c8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x4c0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x4bc(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x4b0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x4dc(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x4d8(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x4d4(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x4f0(%rsp) movl $0x10, 0x4ec(%rsp) movq 0x4f0(%rsp), %rax movslq 0x4ec(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x4ec(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x98(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x200(%rsp) cmpl $0x4, 0x28(%rax) jne 0x94f1d6 movq 0x98(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x218(%rsp) movb $0x1, 0x27b(%rsp) testb $0x1, 0x27b(%rsp) jne 0x94f311 leaq 0x1d8(%rsp), %rax movq %rax, 0x290(%rsp) movq 0x290(%rsp), %rax movq %rax, 0x330(%rsp) movq 0x330(%rsp), %rax movq %rax, 0x88(%rsp) cmpq $0x0, 0x8(%rax) je 0x94f2b4 movq 0x88(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x32c(%rsp) # imm = 0xFFFFFFFF movl 0x32c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x328(%rsp) cmpl $0x1, 0x328(%rsp) jne 0x94f2b4 movq 0x88(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x94f285 movq 0x88(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x94f283 jmp 0x94f2b2 movq 0x88(%rsp), %rax movq (%rax), %rax movq %rax, 0x528(%rsp) cmpq $0x0, 0x528(%rsp) je 0x94f2b0 movq 0x528(%rsp), %rdi callq 0x5e480 jmp 0x94f2b2 jmp 0x94f2b4 movq 0x88(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x94f30f movq %rax, %rdi callq 0x5fc90 jmp 0x94f311 leaq 0x1d8(%rsp), %rax movq %rax, 0x270(%rsp) movq 0x270(%rsp), %rax movq (%rax), %rax movq %rax, 0x80(%rsp) leaq 0x1d8(%rsp), %rax movq %rax, 0x298(%rsp) movq 0x298(%rsp), %rax movq %rax, 0x320(%rsp) movq 0x320(%rsp), %rax movq %rax, 0x78(%rsp) cmpq $0x0, 0x8(%rax) je 0x94f3ed movq 0x78(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x31c(%rsp) # imm = 0xFFFFFFFF movl 0x31c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x318(%rsp) cmpl $0x1, 0x318(%rsp) jne 0x94f3ed movq 0x78(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x94f3c1 movq 0x78(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x94f3bf jmp 0x94f3eb movq 0x78(%rsp), %rax movq (%rax), %rax movq %rax, 0x530(%rsp) cmpq $0x0, 0x530(%rsp) je 0x94f3e9 movq 0x530(%rsp), %rdi callq 0x5e480 jmp 0x94f3eb jmp 0x94f3ed movq 0x78(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x94f445 movq %rax, %rdi callq 0x5fc90 movq 0x80(%rsp), %rax movq %rax, 0x220(%rsp) movl 0x24c(%rsp), %eax imull 0x248(%rsp), %eax movl %eax, 0x1c8(%rsp) movq 0x268(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1c4(%rsp) movq 0x268(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x1c0(%rsp) movq 0x268(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x1bc(%rsp) movq 0x260(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1b8(%rsp) movq 0x260(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x1b4(%rsp) movl $0x0, 0x1b0(%rsp) movl 0x1b0(%rsp), %eax cmpl 0x1b4(%rsp), %eax jge 0x9505cd movl $0x0, 0x1ac(%rsp) movl 0x1ac(%rsp), %eax cmpl 0x1b8(%rsp), %eax jge 0x9505b5 xorps %xmm0, %xmm0 movss %xmm0, 0x1a8(%rsp) cmpq $0x0, 0x230(%rsp) je 0x94f538 movq 0x230(%rsp), %rax movslq 0x22c(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x1a8(%rsp) xorps %xmm0, %xmm0 movaps %xmm0, 0x560(%rsp) movaps 0x560(%rsp), %xmm0 movaps %xmm0, 0x190(%rsp) movq 0x258(%rsp), %rcx movl 0x22c(%rsp), %eax leaq 0x140(%rsp), %rdx movq %rdx, 0x378(%rsp) movq %rcx, 0x370(%rsp) movl %eax, 0x36c(%rsp) movq 0x370(%rsp), %rax movq %rax, 0x70(%rsp) movb $0x0, 0x36b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x36c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x140(%rsp), %r10 movq %r10, 0x470(%rsp) movl %r9d, 0x46c(%rsp) movl %r8d, 0x468(%rsp) movl %edi, 0x464(%rsp) movq %rsi, 0x458(%rsp) movq %rdx, 0x450(%rsp) movl %ecx, 0x44c(%rsp) movq %rax, 0x440(%rsp) movq 0x470(%rsp), %rcx movq %rcx, 0x68(%rsp) movq 0x458(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x450(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x44c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x440(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x46c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x468(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x464(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x510(%rsp) movl $0x10, 0x50c(%rsp) movq 0x510(%rsp), %rax movslq 0x50c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x50c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x70(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x168(%rsp) cmpl $0x4, 0x28(%rax) jne 0x94f703 movq 0x70(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x180(%rsp) movb $0x1, 0x36b(%rsp) testb $0x1, 0x36b(%rsp) jne 0x94f82a leaq 0x140(%rsp), %rax movq %rax, 0x380(%rsp) movq 0x380(%rsp), %rax movq %rax, 0x390(%rsp) movq 0x390(%rsp), %rax movq %rax, 0x60(%rsp) cmpq $0x0, 0x8(%rax) je 0x94f7d2 movq 0x60(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x38c(%rsp) # imm = 0xFFFFFFFF movl 0x38c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x388(%rsp) cmpl $0x1, 0x388(%rsp) jne 0x94f7d2 movq 0x60(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x94f7a6 movq 0x60(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x94f7a4 jmp 0x94f7d0 movq 0x60(%rsp), %rax movq (%rax), %rax movq %rax, 0x518(%rsp) cmpq $0x0, 0x518(%rsp) je 0x94f7ce movq 0x518(%rsp), %rdi callq 0x5e480 jmp 0x94f7d0 jmp 0x94f7d2 movq 0x60(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x94f82a movq %rax, %rdi callq 0x5fc90 leaq 0x140(%rsp), %rax movq %rax, 0x398(%rsp) movq 0x398(%rsp), %rax movq (%rax), %rax movq %rax, 0x58(%rsp) leaq 0x140(%rsp), %rax movq %rax, 0x2a8(%rsp) movq 0x2a8(%rsp), %rax movq %rax, 0x300(%rsp) movq 0x300(%rsp), %rax movq %rax, 0x50(%rsp) cmpq $0x0, 0x8(%rax) je 0x94f903 movq 0x50(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2fc(%rsp) # imm = 0xFFFFFFFF movl 0x2fc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2f8(%rsp) cmpl $0x1, 0x2f8(%rsp) jne 0x94f903 movq 0x50(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x94f8d7 movq 0x50(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x94f8d5 jmp 0x94f901 movq 0x50(%rsp), %rax movq (%rax), %rax movq %rax, 0x540(%rsp) cmpq $0x0, 0x540(%rsp) je 0x94f8ff movq 0x540(%rsp), %rdi callq 0x5e480 jmp 0x94f901 jmp 0x94f903 movq 0x50(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x94f95b movq %rax, %rdi callq 0x5fc90 movq 0x58(%rsp), %rax movq %rax, 0x188(%rsp) movl $0x0, 0x13c(%rsp) movl 0x13c(%rsp), %eax cmpl 0x1bc(%rsp), %eax jge 0x950081 movq 0x268(%rsp), %rcx movl 0x13c(%rsp), %eax leaq 0xf0(%rsp), %rdx movq %rdx, 0x348(%rsp) movq %rcx, 0x340(%rsp) movl %eax, 0x33c(%rsp) movq 0x340(%rsp), %rax movq %rax, 0x48(%rsp) movb $0x0, 0x33b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x33c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xf0(%rsp), %r10 movq %r10, 0x4a8(%rsp) movl %r9d, 0x4a4(%rsp) movl %r8d, 0x4a0(%rsp) movl %edi, 0x49c(%rsp) movq %rsi, 0x490(%rsp) movq %rdx, 0x488(%rsp) movl %ecx, 0x484(%rsp) movq %rax, 0x478(%rsp) movq 0x4a8(%rsp), %rcx movq %rcx, 0x40(%rsp) movq 0x490(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x488(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x484(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x478(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x4a4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x4a0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x49c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x500(%rsp) movl $0x10, 0x4fc(%rsp) movq 0x500(%rsp), %rax movslq 0x4fc(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x4fc(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x48(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x118(%rsp) cmpl $0x4, 0x28(%rax) jne 0x94fb37 movq 0x48(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x130(%rsp) movb $0x1, 0x33b(%rsp) testb $0x1, 0x33b(%rsp) jne 0x94fc5e leaq 0xf0(%rsp), %rax movq %rax, 0x350(%rsp) movq 0x350(%rsp), %rax movq %rax, 0x360(%rsp) movq 0x360(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x94fc06 movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x35c(%rsp) # imm = 0xFFFFFFFF movl 0x35c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x358(%rsp) cmpl $0x1, 0x358(%rsp) jne 0x94fc06 movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x94fbda movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x94fbd8 jmp 0x94fc04 movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x520(%rsp) cmpq $0x0, 0x520(%rsp) je 0x94fc02 movq 0x520(%rsp), %rdi callq 0x5e480 jmp 0x94fc04 jmp 0x94fc06 movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x94fc5e movq %rax, %rdi callq 0x5fc90 movl $0x0, 0xec(%rsp) movl 0xec(%rsp), %eax cmpl 0x248(%rsp), %eax jge 0x94ff34 movl 0x1b0(%rsp), %eax movl 0xec(%rsp), %ecx imull 0x6a8(%rsp), %ecx addl %ecx, %eax movl 0x23c(%rsp), %ecx subl $0x1, %ecx subl %ecx, %eax movl %eax, 0xe8(%rsp) cmpl $0x0, 0xe8(%rsp) jl 0x94fcc6 movl 0xe8(%rsp), %eax cltd idivl 0x6b8(%rsp) cmpl $0x0, %edx je 0x94fccb jmp 0x94ff1e movl 0xe8(%rsp), %eax cltd idivl 0x6b8(%rsp) movl %eax, 0xe4(%rsp) movl 0xe4(%rsp), %eax cmpl 0x1c0(%rsp), %eax jl 0x94fcf6 jmp 0x94ff1e movl $0x0, 0xe0(%rsp) movl 0xe0(%rsp), %eax cmpl 0x24c(%rsp), %eax jge 0x94ff1c movl 0x1ac(%rsp), %eax movl 0xe0(%rsp), %ecx imull 0x6a0(%rsp), %ecx addl %ecx, %eax movl 0x240(%rsp), %ecx subl $0x1, %ecx subl %ecx, %eax movl %eax, 0xdc(%rsp) cmpl $0x0, 0xdc(%rsp) jl 0x94fd5e movl 0xdc(%rsp), %eax cltd idivl 0x6b0(%rsp) cmpl $0x0, %edx je 0x94fd63 jmp 0x94ff06 movl 0xdc(%rsp), %eax cltd idivl 0x6b0(%rsp) movl %eax, 0xd8(%rsp) movl 0xd8(%rsp), %eax cmpl 0x1c4(%rsp), %eax jl 0x94fd8e jmp 0x94ff06 movl 0xe4(%rsp), %eax leaq 0xf0(%rsp), %rcx movq %rcx, 0x3b0(%rsp) movl %eax, 0x3ac(%rsp) movq 0x3b0(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x3ac(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x30(%rsp) movq 0x30(%rsp), %rax movl 0xd8(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xd0(%rsp) movl 0xec(%rsp), %eax imull 0x24c(%rsp), %eax addl 0xe0(%rsp), %eax movl %eax, 0xcc(%rsp) movq 0xd0(%rsp), %rax movq %rax, 0x578(%rsp) movq 0x578(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xb0(%rsp) movq 0x188(%rsp), %rax movl 0xcc(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x570(%rsp) movq 0x570(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xa0(%rsp) movaps 0xb0(%rsp), %xmm2 movaps 0xa0(%rsp), %xmm1 movaps 0x190(%rsp), %xmm0 movaps %xmm2, 0x5a0(%rsp) movaps %xmm1, 0x590(%rsp) movaps %xmm0, 0x580(%rsp) movaps 0x5a0(%rsp), %xmm1 movaps 0x590(%rsp), %xmm0 movaps %xmm1, 0x5e0(%rsp) movaps %xmm0, 0x5d0(%rsp) movaps 0x5e0(%rsp), %xmm1 mulps 0x5d0(%rsp), %xmm1 movaps 0x580(%rsp), %xmm0 movaps %xmm1, 0x5c0(%rsp) movaps %xmm0, 0x5b0(%rsp) movaps 0x5c0(%rsp), %xmm0 addps 0x5b0(%rsp), %xmm0 movaps %xmm0, 0x20(%rsp) movaps 0x20(%rsp), %xmm0 movaps %xmm0, 0x190(%rsp) movl 0xe0(%rsp), %eax addl $0x1, %eax movl %eax, 0xe0(%rsp) jmp 0x94fd01 jmp 0x94ff1e movl 0xec(%rsp), %eax addl $0x1, %eax movl %eax, 0xec(%rsp) jmp 0x94fc69 movl 0x1c8(%rsp), %ecx shll $0x2, %ecx movq 0x188(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x188(%rsp) leaq 0xf0(%rsp), %rax movq %rax, 0x2b8(%rsp) movq 0x2b8(%rsp), %rax movq %rax, 0x2e0(%rsp) movq 0x2e0(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x950011 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2dc(%rsp) # imm = 0xFFFFFFFF movl 0x2dc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2d8(%rsp) cmpl $0x1, 0x2d8(%rsp) jne 0x950011 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x94ffe5 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x94ffe3 jmp 0x95000f movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x550(%rsp) cmpq $0x0, 0x550(%rsp) je 0x95000d movq 0x550(%rsp), %rdi callq 0x5e480 jmp 0x95000f jmp 0x950011 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x950069 movq %rax, %rdi callq 0x5fc90 jmp 0x95006b movl 0x13c(%rsp), %eax addl $0x1, %eax movl %eax, 0x13c(%rsp) jmp 0x94f973 movaps 0x190(%rsp), %xmm0 movaps %xmm0, 0x610(%rsp) movaps 0x610(%rsp), %xmm1 movaps %xmm1, 0x650(%rsp) movaps %xmm1, 0x640(%rsp) movapd 0x650(%rsp), %xmm2 movapd 0x640(%rsp), %xmm0 unpckhpd %xmm2, %xmm0 # xmm0 = xmm0[1],xmm2[1] movaps %xmm1, 0x630(%rsp) movapd %xmm0, 0x620(%rsp) movaps 0x630(%rsp), %xmm0 movaps 0x620(%rsp), %xmm1 addps %xmm1, %xmm0 movaps %xmm0, 0x600(%rsp) movaps 0x600(%rsp), %xmm1 movaps %xmm1, %xmm0 shufps $0x55, %xmm0, %xmm0 # xmm0 = xmm0[1,1,1,1] movaps %xmm1, 0x670(%rsp) movaps %xmm0, 0x660(%rsp) movss 0x660(%rsp), %xmm1 movaps 0x670(%rsp), %xmm0 addss %xmm1, %xmm0 movaps %xmm0, 0x670(%rsp) movaps 0x670(%rsp), %xmm0 movaps %xmm0, 0x5f0(%rsp) movaps 0x5f0(%rsp), %xmm0 movaps %xmm0, 0x680(%rsp) movss 0x680(%rsp), %xmm1 movss 0x1a8(%rsp), %xmm0 addss %xmm1, %xmm0 movss %xmm0, 0x1a8(%rsp) movss 0x1a8(%rsp), %xmm0 movl 0x6c0(%rsp), %ecx movq 0x6c8(%rsp), %rax movss %xmm0, 0x3ec(%rsp) movl %ecx, 0x3e8(%rsp) movq %rax, 0x3e0(%rsp) movl 0x3e8(%rsp), %eax decl %eax movl %eax, %ecx movq %rcx, 0x10(%rsp) subl $0x5, %eax ja 0x950564 movq 0x10(%rsp), %rax leaq 0x14b75bd(%rip), %rcx # 0x1e07778 movslq (%rcx,%rax,4), %rax addq %rcx, %rax jmpq *%rax movss 0x3ec(%rsp), %xmm0 xorps %xmm1, %xmm1 callq 0x137490 movss %xmm0, 0x3ec(%rsp) jmp 0x950564 movq 0x3e0(%rsp), %rax movq %rax, 0x438(%rsp) movq $0x0, 0x430(%rsp) movq 0x438(%rsp), %rax movq (%rax), %rax movq 0x430(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x3dc(%rsp) movss 0x3ec(%rsp), %xmm0 xorps %xmm1, %xmm1 ucomiss %xmm1, %xmm0 jbe 0x950242 movss 0x3ec(%rsp), %xmm0 movss %xmm0, 0xc(%rsp) jmp 0x95025a movss 0x3ec(%rsp), %xmm0 mulss 0x3dc(%rsp), %xmm0 movss %xmm0, 0xc(%rsp) movss 0xc(%rsp), %xmm0 movss %xmm0, 0x3ec(%rsp) jmp 0x950564 movq 0x3e0(%rsp), %rax movq %rax, 0x428(%rsp) movq $0x0, 0x420(%rsp) movq 0x428(%rsp), %rax movq (%rax), %rax movq 0x420(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x3d8(%rsp) movq 0x3e0(%rsp), %rax movq %rax, 0x418(%rsp) movq $0x1, 0x410(%rsp) movq 0x418(%rsp), %rax movq (%rax), %rax movq 0x410(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x3d4(%rsp) movss 0x3ec(%rsp), %xmm1 movss 0x3d8(%rsp), %xmm0 ucomiss %xmm1, %xmm0 jbe 0x950311 movss 0x3d8(%rsp), %xmm0 movss %xmm0, 0x3ec(%rsp) movss 0x3ec(%rsp), %xmm0 ucomiss 0x3d4(%rsp), %xmm0 jbe 0x950336 movss 0x3d4(%rsp), %xmm0 movss %xmm0, 0x3ec(%rsp) jmp 0x950564 movss 0x14b2aed(%rip), %xmm0 # 0x1e02e30 movss %xmm0, 0x3d0(%rsp) leaq 0x3ec(%rsp), %rdi leaq 0x3d0(%rsp), %rsi callq 0x1374b0 movss (%rax), %xmm0 movss %xmm0, 0x3ec(%rsp) movss 0x14b2abe(%rip), %xmm0 # 0x1e02e34 movss %xmm0, 0x3cc(%rsp) leaq 0x3ec(%rsp), %rdi leaq 0x3cc(%rsp), %rsi callq 0x1374f0 movss (%rax), %xmm0 movss %xmm0, 0x3ec(%rsp) movss 0x3ec(%rsp), %xmm0 movd %xmm0, %eax xorl $0x80000000, %eax # imm = 0x80000000 movd %eax, %xmm0 callq 0xf74e0 movss 0x14aff0c(%rip), %xmm1 # 0x1e002d0 addss %xmm0, %xmm1 movss 0x14aff00(%rip), %xmm0 # 0x1e002d0 divss %xmm1, %xmm0 movss %xmm0, 0x3ec(%rsp) jmp 0x950564 movss 0x3ec(%rsp), %xmm0 movss %xmm0, 0x8(%rsp) movss 0x3ec(%rsp), %xmm0 callq 0xf74e0 movss 0x14afec9(%rip), %xmm1 # 0x1e002d0 addss %xmm1, %xmm0 callq 0xf74c0 callq 0x137530 movaps %xmm0, %xmm1 movss 0x8(%rsp), %xmm0 mulss %xmm1, %xmm0 movss %xmm0, 0x3ec(%rsp) jmp 0x950564 movq 0x3e0(%rsp), %rax movq %rax, 0x408(%rsp) movq $0x0, 0x400(%rsp) movq 0x408(%rsp), %rax movq (%rax), %rax movq 0x400(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x3c8(%rsp) movq 0x3e0(%rsp), %rax movq %rax, 0x3f8(%rsp) movq $0x1, 0x3f0(%rsp) movq 0x3f8(%rsp), %rax movq (%rax), %rax movq 0x3f0(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x3c4(%rsp) movss 0x3c4(%rsp), %xmm0 movd %xmm0, %eax xorl $0x80000000, %eax # imm = 0x80000000 movd %eax, %xmm0 divss 0x3c8(%rsp), %xmm0 movss %xmm0, 0x3c0(%rsp) movss 0x14afdf6(%rip), %xmm0 # 0x1e002d0 divss 0x3c8(%rsp), %xmm0 addss 0x3c0(%rsp), %xmm0 movss %xmm0, 0x3bc(%rsp) movss 0x3ec(%rsp), %xmm1 movss 0x3c0(%rsp), %xmm0 ucomiss %xmm1, %xmm0 jbe 0x95051a xorps %xmm0, %xmm0 movss %xmm0, 0x3ec(%rsp) jmp 0x950562 movss 0x3ec(%rsp), %xmm0 ucomiss 0x3bc(%rsp), %xmm0 jbe 0x95052f jmp 0x950560 movss 0x3ec(%rsp), %xmm0 movss 0x3ec(%rsp), %xmm1 mulss 0x3c8(%rsp), %xmm1 addss 0x3c4(%rsp), %xmm1 mulss %xmm1, %xmm0 movss %xmm0, 0x3ec(%rsp) jmp 0x950562 jmp 0x950564 movss 0x3ec(%rsp), %xmm0 movss %xmm0, 0x1a8(%rsp) movss 0x1a8(%rsp), %xmm0 movq 0x220(%rsp), %rax movss %xmm0, (%rax) movq 0x220(%rsp), %rax addq $0x4, %rax movq %rax, 0x220(%rsp) movl 0x1ac(%rsp), %eax addl $0x1, %eax movl %eax, 0x1ac(%rsp) jmp 0x94f4ef jmp 0x9505b7 movl 0x1b0(%rsp), %eax addl $0x1, %eax movl %eax, 0x1b0(%rsp) jmp 0x94f4d0 jmp 0x9505cf movl 0x22c(%rsp), %eax addl $0x1, %eax movl %eax, 0x22c(%rsp) jmp 0x94f006 addq $0x698, %rsp # imm = 0x698 retq nopl (%rax)
/ysh329[P]ncnn/src/layer/x86/deconvolution_pack4to1.h
virtual thunk to ncnn::Deconvolution_x86::forward(ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&) const
int Deconvolution_x86::forward(const Mat& bottom_blob, Mat& top_blob, const Option& opt) const { // deconvolv with NxN kernel // value = value + bias int w = bottom_blob.w; int h = bottom_blob.h; size_t elemsize = bottom_blob.elemsize; int elempack = bottom_blob.elempack; // NCNN_LOGE("Deconvolution input %d x %d pad = %d %d ksize=%d %d stride=%d %d", w, h, pad_w, pad_h, kernel_w, kernel_h, stride_w, stride_h); const int kernel_extent_w = dilation_w * (kernel_w - 1) + 1; const int kernel_extent_h = dilation_h * (kernel_h - 1) + 1; int outw = (w - 1) * stride_w + kernel_extent_w + output_pad_right; int outh = (h - 1) * stride_h + kernel_extent_h + output_pad_bottom; int out_elempack = 1; #if __SSE2__ if (opt.use_packing_layout) { #if __AVX512F__ out_elempack = num_output % 16 == 0 ? 16 : num_output % 8 == 0 ? 8 : num_output % 4 == 0 ? 4 : 1; #elif __AVX__ out_elempack = num_output % 8 == 0 ? 8 : num_output % 4 == 0 ? 4 : 1; #else out_elempack = num_output % 4 == 0 ? 4 : 1; #endif } #endif // __SSE2__ size_t out_elemsize = elemsize / elempack * out_elempack; Mat top_blob_bordered; if (pad_left > 0 || pad_right > 0 || pad_top > 0 || pad_bottom > 0 || (output_w > 0 && output_h > 0)) { top_blob_bordered.create(outw, outh, num_output / out_elempack, out_elemsize, out_elempack, opt.workspace_allocator); } else { top_blob_bordered = top_blob; top_blob_bordered.create(outw, outh, num_output / out_elempack, out_elemsize, out_elempack, opt.blob_allocator); } if (top_blob_bordered.empty()) return -100; const int maxk = kernel_w * kernel_h; #if __SSE2__ #if __AVX__ #if __AVX512F__ if (elempack == 16 && out_elempack == 16) { { deconvolution_pack16_avx512(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 8 && out_elempack == 16) { { deconvolution_pack8to16_avx512(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 16 && out_elempack == 8) { { deconvolution_pack16to8_avx512(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 4 && out_elempack == 16) { { deconvolution_pack4to16_avx512(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 16 && out_elempack == 4) { { deconvolution_pack16to4_avx512(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 1 && out_elempack == 16) { { deconvolution_pack1to16_avx512(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 16 && out_elempack == 1) { { deconvolution_pack16to1_avx512(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } #endif // __AVX512F__ if (elempack == 8 && out_elempack == 8) { { deconvolution_pack8_avx(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 4 && out_elempack == 8) { { deconvolution_pack4to8_avx(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 8 && out_elempack == 4) { { deconvolution_pack8to4_avx(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 1 && out_elempack == 8) { { deconvolution_pack1to8_avx(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 8 && out_elempack == 1) { { deconvolution_pack8to1_avx(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } #endif // __AVX__ if (elempack == 4 && out_elempack == 4) { { deconvolution_pack4_sse(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 1 && out_elempack == 4) { { deconvolution_pack1to4_sse(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 4 && out_elempack == 1) { { deconvolution_pack4to1_sse(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } #endif // __SSE2__ if (elempack == 1 && out_elempack == 1) { { // num_output #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < num_output; p++) { float* outptr = top_blob_bordered.channel(p); // shadowed variable for less openmp task args const int w = bottom_blob.w; const int h = bottom_blob.h; const int channels = bottom_blob.c; const int outw = top_blob_bordered.w; const int outh = top_blob_bordered.h; for (int i = 0; i < outh; i++) { for (int j = 0; j < outw; j++) { float sum = 0.f; if (bias_term) { sum = bias_data[p]; } const float* kptr = (const float*)weight_data_tm.channel(p); // channels for (int q = 0; q < channels; q++) { const Mat m = bottom_blob.channel(q); for (int y = 0; y < kernel_h; y++) { int sys = (i + y * dilation_h - (kernel_extent_h - 1)); if (sys < 0 || sys % stride_h != 0) continue; int sy = sys / stride_h; if (sy >= h) continue; const float* sptr = m.row(sy); for (int x = 0; x < kernel_w; x++) { int sxs = (j + x * dilation_w - (kernel_extent_w - 1)); if (sxs < 0 || sxs % stride_w != 0) continue; int sx = sxs / stride_w; if (sx >= w) continue; float val = sptr[sx]; int k = y * kernel_w + x; float w = kptr[k]; sum += val * w; } } kptr += maxk; } sum = activation_ss(sum, activation_type, activation_params); outptr[j] = sum; } outptr += outw; } } } } cut_padding(top_blob_bordered, top_blob, opt); if (top_blob.empty()) return -100; return 0; }
movq %rdi, -0x8(%rsp) movq %rsi, -0x10(%rsp) movq %rdx, -0x18(%rsp) movq %rcx, -0x20(%rsp) movq -0x8(%rsp), %rdi movq (%rdi), %rax movq -0x48(%rax), %rax addq %rax, %rdi movq -0x10(%rsp), %rsi movq -0x18(%rsp), %rdx movq -0x20(%rsp), %rcx jmp 0x944bf0 nopw (%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/deconvolution_x86.cpp
virtual thunk to ncnn::Deconvolution_x86_avx512::create_pipeline(ncnn::Option const&)
int Deconvolution_x86_avx512::create_pipeline(const Option& opt) { const int maxk = kernel_w * kernel_h; int num_input = weight_data_size / maxk / num_output; Mat weight_data_transposed(weight_data.w); { float* pt = weight_data_transposed; const float* p = weight_data; for (int i = 0; i < num_input * num_output; i++) { for (int k = 0; k < maxk; k++) { pt[maxk - 1 - k] = p[k]; } p += maxk; pt += maxk; } } int elempack = 1; int out_elempack = 1; #if __SSE2__ if (opt.use_packing_layout) { #if __AVX512F__ elempack = num_input % 16 == 0 ? 16 : num_input % 8 == 0 ? 8 : num_input % 4 == 0 ? 4 : 1; out_elempack = num_output % 16 == 0 ? 16 : num_output % 8 == 0 ? 8 : num_output % 4 == 0 ? 4 : 1; #elif __AVX__ elempack = num_input % 8 == 0 ? 8 : num_input % 4 == 0 ? 4 : 1; out_elempack = num_output % 8 == 0 ? 8 : num_output % 4 == 0 ? 4 : 1; #else elempack = num_input % 4 == 0 ? 4 : 1; out_elempack = num_output % 4 == 0 ? 4 : 1; #endif } #endif // __SSE2__ // src = kw-kh-inch-outch // dst = pb-pa-kw-kh-inch/pa-outch/pb { Mat weight_data_r2 = weight_data_transposed.reshape(maxk, num_input, num_output); weight_data_tm.create(maxk, num_input / elempack, num_output / out_elempack, (size_t)4u * elempack * out_elempack, elempack * out_elempack); for (int q = 0; q + (out_elempack - 1) < num_output; q += out_elempack) { float* g00 = weight_data_tm.channel(q / out_elempack); for (int p = 0; p + (elempack - 1) < num_input; p += elempack) { for (int k = 0; k < maxk; k++) { for (int i = 0; i < elempack; i++) { for (int j = 0; j < out_elempack; j++) { const float* k00 = weight_data_r2.channel(q + j).row(p + i); g00[0] = k00[k]; g00++; } } } } } } if (opt.lightmode) { weight_data.release(); } return 0; }
movq %rdi, -0x8(%rsp) movq %rsi, -0x10(%rsp) movq -0x8(%rsp), %rdi movq (%rdi), %rax movq -0x30(%rax), %rax addq %rax, %rdi movq -0x10(%rsp), %rsi jmp 0x950790 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/deconvolution_x86_avx512.cpp
ncnn::Deconvolution_x86_avx512::forward(ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&) const
int Deconvolution_x86_avx512::forward(const Mat& bottom_blob, Mat& top_blob, const Option& opt) const { // deconvolv with NxN kernel // value = value + bias int w = bottom_blob.w; int h = bottom_blob.h; size_t elemsize = bottom_blob.elemsize; int elempack = bottom_blob.elempack; // NCNN_LOGE("Deconvolution input %d x %d pad = %d %d ksize=%d %d stride=%d %d", w, h, pad_w, pad_h, kernel_w, kernel_h, stride_w, stride_h); const int kernel_extent_w = dilation_w * (kernel_w - 1) + 1; const int kernel_extent_h = dilation_h * (kernel_h - 1) + 1; int outw = (w - 1) * stride_w + kernel_extent_w + output_pad_right; int outh = (h - 1) * stride_h + kernel_extent_h + output_pad_bottom; int out_elempack = 1; #if __SSE2__ if (opt.use_packing_layout) { #if __AVX512F__ out_elempack = num_output % 16 == 0 ? 16 : num_output % 8 == 0 ? 8 : num_output % 4 == 0 ? 4 : 1; #elif __AVX__ out_elempack = num_output % 8 == 0 ? 8 : num_output % 4 == 0 ? 4 : 1; #else out_elempack = num_output % 4 == 0 ? 4 : 1; #endif } #endif // __SSE2__ size_t out_elemsize = elemsize / elempack * out_elempack; Mat top_blob_bordered; if (pad_left > 0 || pad_right > 0 || pad_top > 0 || pad_bottom > 0 || (output_w > 0 && output_h > 0)) { top_blob_bordered.create(outw, outh, num_output / out_elempack, out_elemsize, out_elempack, opt.workspace_allocator); } else { top_blob_bordered = top_blob; top_blob_bordered.create(outw, outh, num_output / out_elempack, out_elemsize, out_elempack, opt.blob_allocator); } if (top_blob_bordered.empty()) return -100; const int maxk = kernel_w * kernel_h; #if __SSE2__ #if __AVX__ #if __AVX512F__ if (elempack == 16 && out_elempack == 16) { { deconvolution_pack16_avx512(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 8 && out_elempack == 16) { { deconvolution_pack8to16_avx512(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 16 && out_elempack == 8) { { deconvolution_pack16to8_avx512(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 4 && out_elempack == 16) { { deconvolution_pack4to16_avx512(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 16 && out_elempack == 4) { { deconvolution_pack16to4_avx512(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 1 && out_elempack == 16) { { deconvolution_pack1to16_avx512(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 16 && out_elempack == 1) { { deconvolution_pack16to1_avx512(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } #endif // __AVX512F__ if (elempack == 8 && out_elempack == 8) { { deconvolution_pack8_avx(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 4 && out_elempack == 8) { { deconvolution_pack4to8_avx(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 8 && out_elempack == 4) { { deconvolution_pack8to4_avx(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 1 && out_elempack == 8) { { deconvolution_pack1to8_avx(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 8 && out_elempack == 1) { { deconvolution_pack8to1_avx(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } #endif // __AVX__ if (elempack == 4 && out_elempack == 4) { { deconvolution_pack4_sse(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 1 && out_elempack == 4) { { deconvolution_pack1to4_sse(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } if (elempack == 4 && out_elempack == 1) { { deconvolution_pack4to1_sse(bottom_blob, top_blob_bordered, weight_data_tm, bias_data, kernel_w, kernel_h, dilation_w, dilation_h, stride_w, stride_h, activation_type, activation_params, opt); } } #endif // __SSE2__ if (elempack == 1 && out_elempack == 1) { { // num_output #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < num_output; p++) { float* outptr = top_blob_bordered.channel(p); // shadowed variable for less openmp task args const int w = bottom_blob.w; const int h = bottom_blob.h; const int channels = bottom_blob.c; const int outw = top_blob_bordered.w; const int outh = top_blob_bordered.h; for (int i = 0; i < outh; i++) { for (int j = 0; j < outw; j++) { float sum = 0.f; if (bias_term) { sum = bias_data[p]; } const float* kptr = (const float*)weight_data_tm.channel(p); // channels for (int q = 0; q < channels; q++) { const Mat m = bottom_blob.channel(q); for (int y = 0; y < kernel_h; y++) { int sys = (i + y * dilation_h - (kernel_extent_h - 1)); if (sys < 0 || sys % stride_h != 0) continue; int sy = sys / stride_h; if (sy >= h) continue; const float* sptr = m.row(sy); for (int x = 0; x < kernel_w; x++) { int sxs = (j + x * dilation_w - (kernel_extent_w - 1)); if (sxs < 0 || sxs % stride_w != 0) continue; int sx = sxs / stride_w; if (sx >= w) continue; float val = sptr[sx]; int k = y * kernel_w + x; float w = kptr[k]; sum += val * w; } } kptr += maxk; } sum = activation_ss(sum, activation_type, activation_params); outptr[j] = sum; } outptr += outw; } } } } cut_padding(top_blob_bordered, top_blob, opt); if (top_blob.empty()) return -100; return 0; }
pushq %rbp pushq %r15 pushq %r14 pushq %rbx subq $0x748, %rsp # imm = 0x748 movq %rdi, 0x3a0(%rsp) movq %rsi, 0x398(%rsp) movq %rdx, 0x390(%rsp) movq %rcx, 0x388(%rsp) movq 0x3a0(%rsp), %rcx movq %rcx, 0x1b0(%rsp) movq 0x398(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x384(%rsp) movq 0x398(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x380(%rsp) movq 0x398(%rsp), %rax movq 0x10(%rax), %rax movq %rax, 0x378(%rsp) movq 0x398(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x374(%rsp) movq (%rcx), %rax movq -0x18(%rax), %rax movl 0xdc(%rcx,%rax), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx movl 0xd4(%rcx,%rdx), %edx subl $0x1, %edx imull %edx, %eax addl $0x1, %eax movl %eax, 0x370(%rsp) movq (%rcx), %rax movq -0x18(%rax), %rax movl 0xe0(%rcx,%rax), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx movl 0xd8(%rcx,%rdx), %edx subl $0x1, %edx imull %edx, %eax addl $0x1, %eax movl %eax, 0x36c(%rsp) movl 0x384(%rsp), %eax subl $0x1, %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx imull 0xe4(%rcx,%rdx), %eax addl 0x370(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx addl 0xfc(%rcx,%rdx), %eax movl %eax, 0x368(%rsp) movl 0x380(%rsp), %eax subl $0x1, %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx imull 0xe8(%rcx,%rdx), %eax addl 0x36c(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx addl 0x100(%rcx,%rdx), %eax movl %eax, 0x364(%rsp) movl $0x1, 0x360(%rsp) movq 0x388(%rsp), %rax testb $0x1, 0x27(%rax) je 0x951e89 movq 0x1b0(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xd0(%rax,%rcx), %eax movl $0x10, %ecx cltd idivl %ecx cmpl $0x0, %edx jne 0x951e07 movl $0x10, %eax movl %eax, 0x1ac(%rsp) jmp 0x951e7b movq 0x1b0(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xd0(%rax,%rcx), %eax movl $0x8, %ecx cltd idivl %ecx cmpl $0x0, %edx jne 0x951e38 movl $0x8, %eax movl %eax, 0x1a8(%rsp) jmp 0x951e6d movq 0x1b0(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xd0(%rax,%rcx), %eax movl $0x4, %ecx cltd idivl %ecx movl $0x1, %eax movl $0x4, %ecx cmpl $0x0, %edx cmovel %ecx, %eax movl %eax, 0x1a8(%rsp) movl 0x1a8(%rsp), %eax movl %eax, 0x1ac(%rsp) movl 0x1ac(%rsp), %eax movl %eax, 0x360(%rsp) movq 0x378(%rsp), %rax movslq 0x374(%rsp), %rcx xorl %edx, %edx divq %rcx movq %rax, %rcx movq 0x1b0(%rsp), %rax movslq 0x360(%rsp), %rdx imulq %rdx, %rcx movq %rcx, 0x358(%rsp) leaq 0x310(%rsp), %rcx movq %rcx, 0x3b0(%rsp) movq 0x3b0(%rsp), %rcx movq %rcx, 0x1a0(%rsp) movq $0x0, (%rcx) movq $0x0, 0x8(%rcx) movq $0x0, 0x10(%rcx) movl $0x0, 0x18(%rcx) movq $0x0, 0x20(%rcx) movl $0x0, 0x28(%rcx) movl $0x0, 0x2c(%rcx) movl $0x0, 0x30(%rcx) movl $0x0, 0x34(%rcx) movl $0x0, 0x38(%rcx) movq $0x0, 0x40(%rcx) movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x0, 0xec(%rax,%rcx) jg 0x951fc8 movq 0x1b0(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x0, 0xf0(%rax,%rcx) jg 0x951fc8 movq 0x1b0(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x0, 0xf4(%rax,%rcx) jg 0x951fc8 movq 0x1b0(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x0, 0xf8(%rax,%rcx) jg 0x951fc8 movq 0x1b0(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x0, 0x104(%rax,%rcx) jle 0x95204f movq 0x1b0(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x0, 0x108(%rax,%rcx) jle 0x95204f movq 0x1b0(%rsp), %rax movl 0x368(%rsp), %esi movl 0x364(%rsp), %ecx movl %ecx, 0x19c(%rsp) movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xd0(%rax,%rcx), %eax movl 0x360(%rsp), %r9d cltd idivl %r9d movl 0x19c(%rsp), %edx movl %eax, %ecx movq 0x358(%rsp), %r8 movq 0x388(%rsp), %rax movq 0x10(%rax), %rdi movq %rsp, %rax movq %rdi, (%rax) leaq 0x310(%rsp), %rdi callq 0x62060 jmp 0x952031 jmp 0x9522fc movq %rax, %rcx movl %edx, %eax movq %rcx, 0x308(%rsp) movl %eax, 0x304(%rsp) jmp 0x9546c2 movq 0x390(%rsp), %rax leaq 0x310(%rsp), %rcx movq %rcx, 0x4c0(%rsp) movq %rax, 0x4b8(%rsp) movq 0x4c0(%rsp), %rax movq %rax, 0x190(%rsp) cmpq 0x4b8(%rsp), %rax jne 0x95209e movq 0x190(%rsp), %rax movq %rax, 0x4c8(%rsp) jmp 0x952291 movq 0x4b8(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x9520d6 movq 0x4b8(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x4b4(%rsp) movl 0x4b4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4b0(%rsp) movq 0x190(%rsp), %rax movq %rax, 0x4d8(%rsp) movq 0x4d8(%rsp), %rax movq %rax, 0x188(%rsp) cmpq $0x0, 0x8(%rax) je 0x95218e movq 0x188(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4d4(%rsp) # imm = 0xFFFFFFFF movl 0x4d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4d0(%rsp) cmpl $0x1, 0x4d0(%rsp) jne 0x95218e movq 0x188(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x95215f movq 0x188(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x95215d jmp 0x95218c movq 0x188(%rsp), %rax movq (%rax), %rax movq %rax, 0x6e8(%rsp) cmpq $0x0, 0x6e8(%rsp) je 0x95218a movq 0x6e8(%rsp), %rdi callq 0x5e480 jmp 0x95218c jmp 0x95218e movq 0x190(%rsp), %rax movq 0x188(%rsp), %rcx movq $0x0, (%rcx) movq $0x0, 0x10(%rcx) movl $0x0, 0x18(%rcx) movl $0x0, 0x28(%rcx) movl $0x0, 0x2c(%rcx) movl $0x0, 0x30(%rcx) movl $0x0, 0x34(%rcx) movl $0x0, 0x38(%rcx) movq $0x0, 0x40(%rcx) movq $0x0, 0x8(%rcx) movq 0x4b8(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x4b8(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x4b8(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x4b8(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x4b8(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x4b8(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x4b8(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x4b8(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x4b8(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x4b8(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x4b8(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x4c8(%rsp) movq 0x1b0(%rsp), %rax movl 0x368(%rsp), %esi movl 0x364(%rsp), %ecx movl %ecx, 0x184(%rsp) movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xd0(%rax,%rcx), %eax movl 0x360(%rsp), %r9d cltd idivl %r9d movl 0x184(%rsp), %edx movl %eax, %ecx movq 0x358(%rsp), %r8 movq 0x388(%rsp), %rax movq 0x8(%rax), %rdi movq %rsp, %rax movq %rdi, (%rax) leaq 0x310(%rsp), %rdi callq 0x62060 jmp 0x9522fa jmp 0x9522fc leaq 0x310(%rsp), %rax movq %rax, 0x4e8(%rsp) movq 0x4e8(%rsp), %rcx movq %rcx, 0x178(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x183(%rsp) je 0x95235d movq 0x178(%rsp), %rax movq %rax, 0x738(%rsp) movq 0x738(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x183(%rsp) movb 0x183(%rsp), %al movb %al, 0x177(%rsp) movb 0x177(%rsp), %al testb $0x1, %al jne 0x952378 jmp 0x952393 movl $0xffffff9c, 0x3ac(%rsp) # imm = 0xFFFFFF9C movl $0x1, 0x300(%rsp) jmp 0x95459c movq 0x1b0(%rsp), %rcx movq (%rcx), %rax movq -0x18(%rax), %rax movl 0xd4(%rcx,%rax), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx imull 0xd8(%rcx,%rdx), %eax movl %eax, 0x2fc(%rsp) cmpl $0x10, 0x374(%rsp) jne 0x95247d cmpl $0x10, 0x360(%rsp) jne 0x95247d movq 0x1b0(%rsp), %rax movq 0x398(%rsp), %rdi movq %rax, %rdx addq $0x8, %rdx movq (%rax), %rcx movq -0x18(%rcx), %r14 movq %r14, 0x168(%rsp) leaq 0x1a8(%rax,%r14), %rcx movl 0xd4(%rax,%r14), %r8d movl 0xd8(%rax,%r14), %r9d movl 0xdc(%rax,%r14), %esi movl 0xe0(%rax,%r14), %r10d movl 0xe4(%rax,%r14), %r11d movl 0xe8(%rax,%r14), %ebx movl 0x114(%rax,%r14), %ebp leaq 0x118(%rax,%r14), %r14 movq 0x388(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0x310(%rsp), %rsi callq 0x9547f0 jmp 0x95247b jmp 0x95247d cmpl $0x8, 0x374(%rsp) jne 0x95253b cmpl $0x10, 0x360(%rsp) jne 0x95253b movq 0x1b0(%rsp), %rax movq 0x398(%rsp), %rdi movq %rax, %rdx addq $0x8, %rdx movq (%rax), %rcx movq -0x18(%rcx), %r14 movq %r14, 0x160(%rsp) leaq 0x1a8(%rax,%r14), %rcx movl 0xd4(%rax,%r14), %r8d movl 0xd8(%rax,%r14), %r9d movl 0xdc(%rax,%r14), %esi movl 0xe0(%rax,%r14), %r10d movl 0xe4(%rax,%r14), %r11d movl 0xe8(%rax,%r14), %ebx movl 0x114(%rax,%r14), %ebp leaq 0x118(%rax,%r14), %r14 movq 0x388(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0x310(%rsp), %rsi callq 0x9597c0 jmp 0x952539 jmp 0x95253b cmpl $0x10, 0x374(%rsp) jne 0x9525f9 cmpl $0x8, 0x360(%rsp) jne 0x9525f9 movq 0x1b0(%rsp), %rax movq 0x398(%rsp), %rdi movq %rax, %rdx addq $0x8, %rdx movq (%rax), %rcx movq -0x18(%rcx), %r14 movq %r14, 0x158(%rsp) leaq 0x1a8(%rax,%r14), %rcx movl 0xd4(%rax,%r14), %r8d movl 0xd8(%rax,%r14), %r9d movl 0xdc(%rax,%r14), %esi movl 0xe0(%rax,%r14), %r10d movl 0xe4(%rax,%r14), %r11d movl 0xe8(%rax,%r14), %ebx movl 0x114(%rax,%r14), %ebp leaq 0x118(%rax,%r14), %r14 movq 0x388(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0x310(%rsp), %rsi callq 0x95db70 jmp 0x9525f7 jmp 0x9525f9 cmpl $0x4, 0x374(%rsp) jne 0x9526b7 cmpl $0x10, 0x360(%rsp) jne 0x9526b7 movq 0x1b0(%rsp), %rax movq 0x398(%rsp), %rdi movq %rax, %rdx addq $0x8, %rdx movq (%rax), %rcx movq -0x18(%rcx), %r14 movq %r14, 0x150(%rsp) leaq 0x1a8(%rax,%r14), %rcx movl 0xd4(%rax,%r14), %r8d movl 0xd8(%rax,%r14), %r9d movl 0xdc(%rax,%r14), %esi movl 0xe0(%rax,%r14), %r10d movl 0xe4(%rax,%r14), %r11d movl 0xe8(%rax,%r14), %ebx movl 0x114(%rax,%r14), %ebp leaq 0x118(%rax,%r14), %r14 movq 0x388(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0x310(%rsp), %rsi callq 0x9636d0 jmp 0x9526b5 jmp 0x9526b7 cmpl $0x10, 0x374(%rsp) jne 0x952775 cmpl $0x4, 0x360(%rsp) jne 0x952775 movq 0x1b0(%rsp), %rax movq 0x398(%rsp), %rdi movq %rax, %rdx addq $0x8, %rdx movq (%rax), %rcx movq -0x18(%rcx), %r14 movq %r14, 0x148(%rsp) leaq 0x1a8(%rax,%r14), %rcx movl 0xd4(%rax,%r14), %r8d movl 0xd8(%rax,%r14), %r9d movl 0xdc(%rax,%r14), %esi movl 0xe0(%rax,%r14), %r10d movl 0xe4(%rax,%r14), %r11d movl 0xe8(%rax,%r14), %ebx movl 0x114(%rax,%r14), %ebp leaq 0x118(%rax,%r14), %r14 movq 0x388(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0x310(%rsp), %rsi callq 0x967630 jmp 0x952773 jmp 0x952775 cmpl $0x1, 0x374(%rsp) jne 0x952833 cmpl $0x10, 0x360(%rsp) jne 0x952833 movq 0x1b0(%rsp), %rax movq 0x398(%rsp), %rdi movq %rax, %rdx addq $0x8, %rdx movq (%rax), %rcx movq -0x18(%rcx), %r14 movq %r14, 0x140(%rsp) leaq 0x1a8(%rax,%r14), %rcx movl 0xd4(%rax,%r14), %r8d movl 0xd8(%rax,%r14), %r9d movl 0xdc(%rax,%r14), %esi movl 0xe0(%rax,%r14), %r10d movl 0xe4(%rax,%r14), %r11d movl 0xe8(%rax,%r14), %ebx movl 0x114(%rax,%r14), %ebp leaq 0x118(%rax,%r14), %r14 movq 0x388(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0x310(%rsp), %rsi callq 0x96c7b0 jmp 0x952831 jmp 0x952833 cmpl $0x10, 0x374(%rsp) jne 0x9528f1 cmpl $0x1, 0x360(%rsp) jne 0x9528f1 movq 0x1b0(%rsp), %rax movq 0x398(%rsp), %rdi movq %rax, %rdx addq $0x8, %rdx movq (%rax), %rcx movq -0x18(%rcx), %r14 movq %r14, 0x138(%rsp) leaq 0x1a8(%rax,%r14), %rcx movl 0xd4(%rax,%r14), %r8d movl 0xd8(%rax,%r14), %r9d movl 0xdc(%rax,%r14), %esi movl 0xe0(%rax,%r14), %r10d movl 0xe4(%rax,%r14), %r11d movl 0xe8(%rax,%r14), %ebx movl 0x114(%rax,%r14), %ebp leaq 0x118(%rax,%r14), %r14 movq 0x388(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0x310(%rsp), %rsi callq 0x970440 jmp 0x9528ef jmp 0x9528f1 cmpl $0x8, 0x374(%rsp) jne 0x9529af cmpl $0x8, 0x360(%rsp) jne 0x9529af movq 0x1b0(%rsp), %rax movq 0x398(%rsp), %rdi movq %rax, %rdx addq $0x8, %rdx movq (%rax), %rcx movq -0x18(%rcx), %r14 movq %r14, 0x130(%rsp) leaq 0x1a8(%rax,%r14), %rcx movl 0xd4(%rax,%r14), %r8d movl 0xd8(%rax,%r14), %r9d movl 0xdc(%rax,%r14), %esi movl 0xe0(%rax,%r14), %r10d movl 0xe4(%rax,%r14), %r11d movl 0xe8(%rax,%r14), %ebx movl 0x114(%rax,%r14), %ebp leaq 0x118(%rax,%r14), %r14 movq 0x388(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0x310(%rsp), %rsi callq 0x971bd0 jmp 0x9529ad jmp 0x9529af cmpl $0x4, 0x374(%rsp) jne 0x952a6d cmpl $0x8, 0x360(%rsp) jne 0x952a6d movq 0x1b0(%rsp), %rax movq 0x398(%rsp), %rdi movq %rax, %rdx addq $0x8, %rdx movq (%rax), %rcx movq -0x18(%rcx), %r14 movq %r14, 0x128(%rsp) leaq 0x1a8(%rax,%r14), %rcx movl 0xd4(%rax,%r14), %r8d movl 0xd8(%rax,%r14), %r9d movl 0xdc(%rax,%r14), %esi movl 0xe0(%rax,%r14), %r10d movl 0xe4(%rax,%r14), %r11d movl 0xe8(%rax,%r14), %ebx movl 0x114(%rax,%r14), %ebp leaq 0x118(%rax,%r14), %r14 movq 0x388(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0x310(%rsp), %rsi callq 0x976e30 jmp 0x952a6b jmp 0x952a6d cmpl $0x8, 0x374(%rsp) jne 0x952b2b cmpl $0x4, 0x360(%rsp) jne 0x952b2b movq 0x1b0(%rsp), %rax movq 0x398(%rsp), %rdi movq %rax, %rdx addq $0x8, %rdx movq (%rax), %rcx movq -0x18(%rcx), %r14 movq %r14, 0x120(%rsp) leaq 0x1a8(%rax,%r14), %rcx movl 0xd4(%rax,%r14), %r8d movl 0xd8(%rax,%r14), %r9d movl 0xdc(%rax,%r14), %esi movl 0xe0(%rax,%r14), %r10d movl 0xe4(%rax,%r14), %r11d movl 0xe8(%rax,%r14), %ebx movl 0x114(%rax,%r14), %ebp leaq 0x118(%rax,%r14), %r14 movq 0x388(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0x310(%rsp), %rsi callq 0x97bc20 jmp 0x952b29 jmp 0x952b2b cmpl $0x1, 0x374(%rsp) jne 0x952be9 cmpl $0x8, 0x360(%rsp) jne 0x952be9 movq 0x1b0(%rsp), %rax movq 0x398(%rsp), %rdi movq %rax, %rdx addq $0x8, %rdx movq (%rax), %rcx movq -0x18(%rcx), %r14 movq %r14, 0x118(%rsp) leaq 0x1a8(%rax,%r14), %rcx movl 0xd4(%rax,%r14), %r8d movl 0xd8(%rax,%r14), %r9d movl 0xdc(%rax,%r14), %esi movl 0xe0(%rax,%r14), %r10d movl 0xe4(%rax,%r14), %r11d movl 0xe8(%rax,%r14), %ebx movl 0x114(%rax,%r14), %ebp leaq 0x118(%rax,%r14), %r14 movq 0x388(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0x310(%rsp), %rsi callq 0x980450 jmp 0x952be7 jmp 0x952be9 cmpl $0x8, 0x374(%rsp) jne 0x952ca7 cmpl $0x1, 0x360(%rsp) jne 0x952ca7 movq 0x1b0(%rsp), %rax movq 0x398(%rsp), %rdi movq %rax, %rdx addq $0x8, %rdx movq (%rax), %rcx movq -0x18(%rcx), %r14 movq %r14, 0x110(%rsp) leaq 0x1a8(%rax,%r14), %rcx movl 0xd4(%rax,%r14), %r8d movl 0xd8(%rax,%r14), %r9d movl 0xdc(%rax,%r14), %esi movl 0xe0(%rax,%r14), %r10d movl 0xe4(%rax,%r14), %r11d movl 0xe8(%rax,%r14), %ebx movl 0x114(%rax,%r14), %ebp leaq 0x118(%rax,%r14), %r14 movq 0x388(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0x310(%rsp), %rsi callq 0x984fd0 jmp 0x952ca5 jmp 0x952ca7 cmpl $0x4, 0x374(%rsp) jne 0x952d65 cmpl $0x4, 0x360(%rsp) jne 0x952d65 movq 0x1b0(%rsp), %rax movq 0x398(%rsp), %rdi movq %rax, %rdx addq $0x8, %rdx movq (%rax), %rcx movq -0x18(%rcx), %r14 movq %r14, 0x108(%rsp) leaq 0x1a8(%rax,%r14), %rcx movl 0xd4(%rax,%r14), %r8d movl 0xd8(%rax,%r14), %r9d movl 0xdc(%rax,%r14), %esi movl 0xe0(%rax,%r14), %r10d movl 0xe4(%rax,%r14), %r11d movl 0xe8(%rax,%r14), %ebx movl 0x114(%rax,%r14), %ebp leaq 0x118(%rax,%r14), %r14 movq 0x388(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0x310(%rsp), %rsi callq 0x986710 jmp 0x952d63 jmp 0x952d65 cmpl $0x1, 0x374(%rsp) jne 0x952e23 cmpl $0x4, 0x360(%rsp) jne 0x952e23 movq 0x1b0(%rsp), %rax movq 0x398(%rsp), %rdi movq %rax, %rdx addq $0x8, %rdx movq (%rax), %rcx movq -0x18(%rcx), %r14 movq %r14, 0x100(%rsp) leaq 0x1a8(%rax,%r14), %rcx movl 0xd4(%rax,%r14), %r8d movl 0xd8(%rax,%r14), %r9d movl 0xdc(%rax,%r14), %esi movl 0xe0(%rax,%r14), %r10d movl 0xe4(%rax,%r14), %r11d movl 0xe8(%rax,%r14), %ebx movl 0x114(%rax,%r14), %ebp leaq 0x118(%rax,%r14), %r14 movq 0x388(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0x310(%rsp), %rsi callq 0x98ab50 jmp 0x952e21 jmp 0x952e23 cmpl $0x4, 0x374(%rsp) jne 0x952ee1 cmpl $0x1, 0x360(%rsp) jne 0x952ee1 movq 0x1b0(%rsp), %rax movq 0x398(%rsp), %rdi movq %rax, %rdx addq $0x8, %rdx movq (%rax), %rcx movq -0x18(%rcx), %r14 movq %r14, 0xf8(%rsp) leaq 0x1a8(%rax,%r14), %rcx movl 0xd4(%rax,%r14), %r8d movl 0xd8(%rax,%r14), %r9d movl 0xdc(%rax,%r14), %esi movl 0xe0(%rax,%r14), %r10d movl 0xe4(%rax,%r14), %r11d movl 0xe8(%rax,%r14), %ebx movl 0x114(%rax,%r14), %ebp leaq 0x118(%rax,%r14), %r14 movq 0x388(%rsp), %r15 movq %rsp, %rax movq %r15, 0x30(%rax) movq %r14, 0x28(%rax) movl %ebp, 0x20(%rax) movl %ebx, 0x18(%rax) movl %r11d, 0x10(%rax) movl %r10d, 0x8(%rax) movl %esi, (%rax) leaq 0x310(%rsp), %rsi callq 0x98ebc0 jmp 0x952edf jmp 0x952ee1 cmpl $0x1, 0x374(%rsp) jne 0x9544d6 cmpl $0x1, 0x360(%rsp) jne 0x9544d6 movl $0x0, 0x2f8(%rsp) movq 0x1b0(%rsp), %rcx movl 0x2f8(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx cmpl 0xd0(%rcx,%rdx), %eax jge 0x9544d4 movl 0x2f8(%rsp), %eax leaq 0x2a8(%rsp), %rcx movq %rcx, 0x3d0(%rsp) leaq 0x310(%rsp), %rcx movq %rcx, 0x3c8(%rsp) movl %eax, 0x3c4(%rsp) movq 0x3c8(%rsp), %rax movq %rax, 0xe8(%rsp) movb $0x0, 0x3c3(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x3c4(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x2a8(%rsp), %r10 movq %r10, 0x6a0(%rsp) movl %r9d, 0x69c(%rsp) movl %r8d, 0x698(%rsp) movl %edi, 0x694(%rsp) movq %rsi, 0x688(%rsp) movq %rdx, 0x680(%rsp) movl %ecx, 0x67c(%rsp) movq %rax, 0x670(%rsp) movq 0x6a0(%rsp), %rcx movq %rcx, 0xf0(%rsp) movq 0x688(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x680(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x67c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x670(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x69c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x698(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x694(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x6b0(%rsp) movl $0x10, 0x6ac(%rsp) movq 0x6b0(%rsp), %rax movslq 0x6ac(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x6ac(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0xf0(%rsp), %rax movq %rcx, 0x40(%rax) movq 0xe8(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x2d0(%rsp) cmpl $0x4, 0x28(%rax) jne 0x9530ef movq 0xe8(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x2e8(%rsp) movb $0x1, 0x3c3(%rsp) testb $0x1, 0x3c3(%rsp) jne 0x95322a leaq 0x2a8(%rsp), %rax movq %rax, 0x3d8(%rsp) movq 0x3d8(%rsp), %rax movq %rax, 0x4a8(%rsp) movq 0x4a8(%rsp), %rax movq %rax, 0xe0(%rsp) cmpq $0x0, 0x8(%rax) je 0x9531cd movq 0xe0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4a4(%rsp) # imm = 0xFFFFFFFF movl 0x4a4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4a0(%rsp) cmpl $0x1, 0x4a0(%rsp) jne 0x9531cd movq 0xe0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x95319e movq 0xe0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x95319c jmp 0x9531cb movq 0xe0(%rsp), %rax movq (%rax), %rax movq %rax, 0x6f0(%rsp) cmpq $0x0, 0x6f0(%rsp) je 0x9531c9 movq 0x6f0(%rsp), %rdi callq 0x5e480 jmp 0x9531cb jmp 0x9531cd movq 0xe0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x953228 movq %rax, %rdi callq 0x5fc90 jmp 0x95322a jmp 0x95322c leaq 0x2a8(%rsp), %rax movq %rax, 0x3b8(%rsp) movq 0x3b8(%rsp), %rax movq (%rax), %rax movq %rax, 0xd8(%rsp) leaq 0x2a8(%rsp), %rax movq %rax, 0x3e0(%rsp) movq 0x3e0(%rsp), %rax movq %rax, 0x498(%rsp) movq 0x498(%rsp), %rax movq %rax, 0xd0(%rsp) cmpq $0x0, 0x8(%rax) je 0x953317 movq 0xd0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x494(%rsp) # imm = 0xFFFFFFFF movl 0x494(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x490(%rsp) cmpl $0x1, 0x490(%rsp) jne 0x953317 movq 0xd0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x9532e8 movq 0xd0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x9532e6 jmp 0x953315 movq 0xd0(%rsp), %rax movq (%rax), %rax movq %rax, 0x6f8(%rsp) cmpq $0x0, 0x6f8(%rsp) je 0x953313 movq 0x6f8(%rsp), %rdi callq 0x5e480 jmp 0x953315 jmp 0x953317 movq 0xd0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x953372 movq %rax, %rdi callq 0x5fc90 movq 0xd8(%rsp), %rax movq %rax, 0x2f0(%rsp) movq 0x398(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x2a4(%rsp) movq 0x398(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x2a0(%rsp) movq 0x398(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x29c(%rsp) movl 0x33c(%rsp), %eax movl %eax, 0x298(%rsp) movl 0x340(%rsp), %eax movl %eax, 0x294(%rsp) movl $0x0, 0x290(%rsp) movl 0x290(%rsp), %eax cmpl 0x294(%rsp), %eax jge 0x9544bc movl $0x0, 0x28c(%rsp) movl 0x28c(%rsp), %eax cmpl 0x298(%rsp), %eax jge 0x954485 movq 0x1b0(%rsp), %rax vpxor %xmm0, %xmm0, %xmm0 vmovss %xmm0, 0x288(%rsp) movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x0, 0x10c(%rax,%rcx) je 0x95349d movq 0x1b0(%rsp), %rcx movq (%rcx), %rax addq -0x18(%rax), %rcx addq $0x1a8, %rcx # imm = 0x1A8 movslq 0x2f8(%rsp), %rax movq %rcx, 0x4f8(%rsp) movq %rax, 0x4f0(%rsp) movq 0x4f8(%rsp), %rax movq (%rax), %rax movq 0x4f0(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xc8(%rsp) movq 0xc8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x288(%rsp) movq 0x1b0(%rsp), %rcx addq $0x8, %rcx movl 0x2f8(%rsp), %eax leaq 0x238(%rsp), %rdx movq %rdx, 0x540(%rsp) movq %rcx, 0x538(%rsp) movl %eax, 0x534(%rsp) movq 0x538(%rsp), %rax movq %rax, 0xb8(%rsp) movb $0x0, 0x533(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x534(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x238(%rsp), %r10 movq %r10, 0x630(%rsp) movl %r9d, 0x62c(%rsp) movl %r8d, 0x628(%rsp) movl %edi, 0x624(%rsp) movq %rsi, 0x618(%rsp) movq %rdx, 0x610(%rsp) movl %ecx, 0x60c(%rsp) movq %rax, 0x600(%rsp) movq 0x630(%rsp), %rcx movq %rcx, 0xc0(%rsp) movq 0x618(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x610(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x60c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x600(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x62c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x628(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x624(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x6d0(%rsp) movl $0x10, 0x6cc(%rsp) movq 0x6d0(%rsp), %rax movslq 0x6cc(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x6cc(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0xc0(%rsp), %rax movq %rcx, 0x40(%rax) movq 0xb8(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x260(%rsp) cmpl $0x4, 0x28(%rax) jne 0x953665 movq 0xb8(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x278(%rsp) movb $0x1, 0x533(%rsp) testb $0x1, 0x533(%rsp) jne 0x95379e leaq 0x238(%rsp), %rax movq %rax, 0x548(%rsp) movq 0x548(%rsp), %rax movq %rax, 0x558(%rsp) movq 0x558(%rsp), %rax movq %rax, 0xb0(%rsp) cmpq $0x0, 0x8(%rax) je 0x953743 movq 0xb0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x554(%rsp) # imm = 0xFFFFFFFF movl 0x554(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x550(%rsp) cmpl $0x1, 0x550(%rsp) jne 0x953743 movq 0xb0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x953714 movq 0xb0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x953712 jmp 0x953741 movq 0xb0(%rsp), %rax movq (%rax), %rax movq %rax, 0x6d8(%rsp) cmpq $0x0, 0x6d8(%rsp) je 0x95373f movq 0x6d8(%rsp), %rdi callq 0x5e480 jmp 0x953741 jmp 0x953743 movq 0xb0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x95379e movq %rax, %rdi callq 0x5fc90 jmp 0x9537a0 leaq 0x238(%rsp), %rax movq %rax, 0x560(%rsp) movq 0x560(%rsp), %rax movq (%rax), %rax movq %rax, 0xa8(%rsp) leaq 0x238(%rsp), %rax movq %rax, 0x3f0(%rsp) movq 0x3f0(%rsp), %rax movq %rax, 0x478(%rsp) movq 0x478(%rsp), %rax movq %rax, 0xa0(%rsp) cmpq $0x0, 0x8(%rax) je 0x95388b movq 0xa0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x474(%rsp) # imm = 0xFFFFFFFF movl 0x474(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x470(%rsp) cmpl $0x1, 0x470(%rsp) jne 0x95388b movq 0xa0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x95385c movq 0xa0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x95385a jmp 0x953889 movq 0xa0(%rsp), %rax movq (%rax), %rax movq %rax, 0x708(%rsp) cmpq $0x0, 0x708(%rsp) je 0x953887 movq 0x708(%rsp), %rdi callq 0x5e480 jmp 0x953889 jmp 0x95388b movq 0xa0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x9538e6 movq %rax, %rdi callq 0x5fc90 movq 0xa8(%rsp), %rax movq %rax, 0x280(%rsp) movl $0x0, 0x234(%rsp) movl 0x234(%rsp), %eax cmpl 0x29c(%rsp), %eax jge 0x954019 movq 0x398(%rsp), %rcx movl 0x234(%rsp), %eax leaq 0x1e8(%rsp), %rdx movq %rdx, 0x510(%rsp) movq %rcx, 0x508(%rsp) movl %eax, 0x504(%rsp) movq 0x508(%rsp), %rax movq %rax, 0x90(%rsp) movb $0x0, 0x503(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x504(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x1e8(%rsp), %r10 movq %r10, 0x668(%rsp) movl %r9d, 0x664(%rsp) movl %r8d, 0x660(%rsp) movl %edi, 0x65c(%rsp) movq %rsi, 0x650(%rsp) movq %rdx, 0x648(%rsp) movl %ecx, 0x644(%rsp) movq %rax, 0x638(%rsp) movq 0x668(%rsp), %rcx movq %rcx, 0x98(%rsp) movq 0x650(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x648(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x644(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x638(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x664(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x660(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x65c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x6c0(%rsp) movl $0x10, 0x6bc(%rsp) movq 0x6c0(%rsp), %rax movslq 0x6bc(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x6bc(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x98(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x90(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x210(%rsp) cmpl $0x4, 0x28(%rax) jne 0x953ad9 movq 0x90(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x228(%rsp) movb $0x1, 0x503(%rsp) testb $0x1, 0x503(%rsp) jne 0x953c12 leaq 0x1e8(%rsp), %rax movq %rax, 0x518(%rsp) movq 0x518(%rsp), %rax movq %rax, 0x528(%rsp) movq 0x528(%rsp), %rax movq %rax, 0x88(%rsp) cmpq $0x0, 0x8(%rax) je 0x953bb7 movq 0x88(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x524(%rsp) # imm = 0xFFFFFFFF movl 0x524(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x520(%rsp) cmpl $0x1, 0x520(%rsp) jne 0x953bb7 movq 0x88(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x953b88 movq 0x88(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x953b86 jmp 0x953bb5 movq 0x88(%rsp), %rax movq (%rax), %rax movq %rax, 0x6e0(%rsp) cmpq $0x0, 0x6e0(%rsp) je 0x953bb3 movq 0x6e0(%rsp), %rdi callq 0x5e480 jmp 0x953bb5 jmp 0x953bb7 movq 0x88(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x953c12 movq %rax, %rdi callq 0x5fc90 jmp 0x953c14 movl $0x0, 0x1e4(%rsp) movq 0x1b0(%rsp), %rcx movl 0x1e4(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx cmpl 0xd8(%rcx,%rdx), %eax jge 0x953ecf movq 0x1b0(%rsp), %rdx movl 0x290(%rsp), %eax movl 0x1e4(%rsp), %ecx movq (%rdx), %rsi movq -0x18(%rsi), %rsi imull 0xe0(%rdx,%rsi), %ecx addl %ecx, %eax movl 0x36c(%rsp), %ecx subl $0x1, %ecx subl %ecx, %eax movl %eax, 0x1e0(%rsp) cmpl $0x0, 0x1e0(%rsp) jl 0x953ca9 movq 0x1b0(%rsp), %rcx movl 0x1e0(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rsi cltd idivl 0xe8(%rcx,%rsi) cmpl $0x0, %edx je 0x953cae jmp 0x953eb9 movq 0x1b0(%rsp), %rcx movl 0x1e0(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rsi cltd idivl 0xe8(%rcx,%rsi) movl %eax, 0x1dc(%rsp) movl 0x1dc(%rsp), %eax cmpl 0x2a0(%rsp), %eax jl 0x953ce8 jmp 0x953eb9 movl 0x1dc(%rsp), %eax leaq 0x1e8(%rsp), %rcx movq %rcx, 0x570(%rsp) movl %eax, 0x56c(%rsp) movq 0x570(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x56c(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x80(%rsp) movq 0x80(%rsp), %rax movq %rax, 0x1d0(%rsp) movl $0x0, 0x1cc(%rsp) movq 0x1b0(%rsp), %rcx movl 0x1cc(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx cmpl 0xd4(%rcx,%rdx), %eax jge 0x953eb7 movq 0x1b0(%rsp), %rdx movl 0x28c(%rsp), %eax movl 0x1cc(%rsp), %ecx movq (%rdx), %rsi movq -0x18(%rsi), %rsi imull 0xdc(%rdx,%rsi), %ecx addl %ecx, %eax movl 0x370(%rsp), %ecx subl $0x1, %ecx subl %ecx, %eax movl %eax, 0x1c8(%rsp) cmpl $0x0, 0x1c8(%rsp) jl 0x953dd6 movq 0x1b0(%rsp), %rcx movl 0x1c8(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rsi cltd idivl 0xe4(%rcx,%rsi) cmpl $0x0, %edx je 0x953ddb jmp 0x953ea1 movq 0x1b0(%rsp), %rcx movl 0x1c8(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rsi cltd idivl 0xe4(%rcx,%rsi) movl %eax, 0x1c4(%rsp) movl 0x1c4(%rsp), %eax cmpl 0x2a4(%rsp), %eax jl 0x953e15 jmp 0x953ea1 movq 0x1b0(%rsp), %rcx movq 0x1d0(%rsp), %rax movslq 0x1c4(%rsp), %rdx vmovss (%rax,%rdx,4), %xmm0 vmovss %xmm0, 0x1c0(%rsp) movl 0x1e4(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx imull 0xd4(%rcx,%rdx), %eax addl 0x1cc(%rsp), %eax movl %eax, 0x1bc(%rsp) movq 0x280(%rsp), %rax movslq 0x1bc(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x1b8(%rsp) vmovss 0x1c0(%rsp), %xmm0 vmulss 0x1b8(%rsp), %xmm0, %xmm0 vaddss 0x288(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x288(%rsp) movl 0x1cc(%rsp), %eax addl $0x1, %eax movl %eax, 0x1cc(%rsp) jmp 0x953d4c jmp 0x953eb9 movl 0x1e4(%rsp), %eax addl $0x1, %eax movl %eax, 0x1e4(%rsp) jmp 0x953c1f movl 0x2fc(%rsp), %ecx movq 0x280(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x280(%rsp) leaq 0x1e8(%rsp), %rax movq %rax, 0x400(%rsp) movq 0x400(%rsp), %rax movq %rax, 0x458(%rsp) movq 0x458(%rsp), %rax movq %rax, 0x78(%rsp) cmpq $0x0, 0x8(%rax) je 0x953fa9 movq 0x78(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x454(%rsp) # imm = 0xFFFFFFFF movl 0x454(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x450(%rsp) cmpl $0x1, 0x450(%rsp) jne 0x953fa9 movq 0x78(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x953f7d movq 0x78(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x953f7b jmp 0x953fa7 movq 0x78(%rsp), %rax movq (%rax), %rax movq %rax, 0x718(%rsp) cmpq $0x0, 0x718(%rsp) je 0x953fa5 movq 0x718(%rsp), %rdi callq 0x5e480 jmp 0x953fa7 jmp 0x953fa9 movq 0x78(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x954001 movq %rax, %rdi callq 0x5fc90 jmp 0x954003 movl 0x234(%rsp), %eax addl $0x1, %eax movl %eax, 0x234(%rsp) jmp 0x953901 movq 0x1b0(%rsp), %rax vmovss 0x288(%rsp), %xmm0 movq (%rax), %rcx movq -0x18(%rcx), %rdx movl 0x114(%rax,%rdx), %ecx leaq 0x118(%rax,%rdx), %rax vmovss %xmm0, 0x5ac(%rsp) movl %ecx, 0x5a8(%rsp) movq %rax, 0x5a0(%rsp) movl 0x5a8(%rsp), %eax decl %eax movl %eax, %ecx movq %rcx, 0x70(%rsp) subl $0x5, %eax ja 0x954433 movq 0x70(%rsp), %rax leaq 0x14b3903(%rip), %rcx # 0x1e07980 movslq (%rcx,%rax,4), %rax addq %rcx, %rax jmpq *%rax vmovss 0x5ac(%rsp), %xmm0 vpxor %xmm1, %xmm1, %xmm1 callq 0x137490 vmovss %xmm0, 0x5ac(%rsp) jmp 0x954433 movq 0x5a0(%rsp), %rax movq %rax, 0x5f8(%rsp) movq $0x0, 0x5f0(%rsp) movq 0x5f8(%rsp), %rax movq (%rax), %rax movq 0x5f0(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x59c(%rsp) vmovss 0x5ac(%rsp), %xmm0 vpxor %xmm1, %xmm1, %xmm1 vucomiss %xmm1, %xmm0 jbe 0x954107 vmovss 0x5ac(%rsp), %xmm0 vmovss %xmm0, 0x6c(%rsp) jmp 0x95411f vmovss 0x5ac(%rsp), %xmm0 vmulss 0x59c(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x6c(%rsp) vmovss 0x6c(%rsp), %xmm0 vmovss %xmm0, 0x5ac(%rsp) jmp 0x954433 movq 0x5a0(%rsp), %rax movq %rax, 0x5e8(%rsp) movq $0x0, 0x5e0(%rsp) movq 0x5e8(%rsp), %rax movq (%rax), %rax movq 0x5e0(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x598(%rsp) movq 0x5a0(%rsp), %rax movq %rax, 0x5d8(%rsp) movq $0x1, 0x5d0(%rsp) movq 0x5d8(%rsp), %rax movq (%rax), %rax movq 0x5d0(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x594(%rsp) vmovss 0x5ac(%rsp), %xmm1 vmovss 0x598(%rsp), %xmm0 vucomiss %xmm1, %xmm0 jbe 0x9541d7 vmovss 0x598(%rsp), %xmm0 vmovss %xmm0, 0x5ac(%rsp) vmovss 0x5ac(%rsp), %xmm0 vucomiss 0x594(%rsp), %xmm0 jbe 0x9541fd vmovss 0x594(%rsp), %xmm0 vmovss %xmm0, 0x5ac(%rsp) jmp 0x954433 vmovss 0x14aec26(%rip), %xmm0 # 0x1e02e30 vmovss %xmm0, 0x590(%rsp) leaq 0x5ac(%rsp), %rdi leaq 0x590(%rsp), %rsi callq 0x1374b0 vmovss (%rax), %xmm0 vmovss %xmm0, 0x5ac(%rsp) vmovss 0x14aebf7(%rip), %xmm0 # 0x1e02e34 vmovss %xmm0, 0x58c(%rsp) leaq 0x5ac(%rsp), %rdi leaq 0x58c(%rsp), %rsi callq 0x1374f0 vmovss (%rax), %xmm0 vmovss %xmm0, 0x5ac(%rsp) vmovss 0x5ac(%rsp), %xmm0 vmovd %xmm0, %eax xorl $0x80000000, %eax # imm = 0x80000000 vmovd %eax, %xmm0 callq 0xf74e0 vmovaps %xmm0, %xmm1 vmovss 0x14ac041(%rip), %xmm0 # 0x1e002d0 vaddss %xmm1, %xmm0, %xmm1 vmovss 0x14ac035(%rip), %xmm0 # 0x1e002d0 vdivss %xmm1, %xmm0, %xmm0 vmovss %xmm0, 0x5ac(%rsp) jmp 0x954433 vmovss 0x5ac(%rsp), %xmm0 vmovss %xmm0, 0x68(%rsp) vmovss 0x5ac(%rsp), %xmm0 callq 0xf74e0 vmovss 0x14abffe(%rip), %xmm1 # 0x1e002d0 vaddss %xmm1, %xmm0, %xmm0 callq 0xf74c0 callq 0x137530 vmovaps %xmm0, %xmm1 vmovss 0x68(%rsp), %xmm0 vmulss %xmm1, %xmm0, %xmm0 vmovss %xmm0, 0x5ac(%rsp) jmp 0x954433 movq 0x5a0(%rsp), %rax movq %rax, 0x5c8(%rsp) movq $0x0, 0x5c0(%rsp) movq 0x5c8(%rsp), %rax movq (%rax), %rax movq 0x5c0(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x588(%rsp) movq 0x5a0(%rsp), %rax movq %rax, 0x5b8(%rsp) movq $0x1, 0x5b0(%rsp) movq 0x5b8(%rsp), %rax movq (%rax), %rax movq 0x5b0(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x584(%rsp) vmovss 0x584(%rsp), %xmm0 vmovd %xmm0, %eax xorl $0x80000000, %eax # imm = 0x80000000 vmovd %eax, %xmm0 vdivss 0x588(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x580(%rsp) vmovss 0x14abf2a(%rip), %xmm0 # 0x1e002d0 vdivss 0x588(%rsp), %xmm0, %xmm0 vaddss 0x580(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x57c(%rsp) vmovss 0x5ac(%rsp), %xmm1 vmovss 0x580(%rsp), %xmm0 vucomiss %xmm1, %xmm0 jbe 0x9543e8 vpxor %xmm0, %xmm0, %xmm0 vmovss %xmm0, 0x5ac(%rsp) jmp 0x954431 vmovss 0x5ac(%rsp), %xmm0 vucomiss 0x57c(%rsp), %xmm0 jbe 0x9543fe jmp 0x95442f vmovss 0x5ac(%rsp), %xmm0 vmovss 0x5ac(%rsp), %xmm1 vmulss 0x588(%rsp), %xmm1, %xmm1 vaddss 0x584(%rsp), %xmm1, %xmm1 vmulss %xmm1, %xmm0, %xmm0 vmovss %xmm0, 0x5ac(%rsp) jmp 0x954431 jmp 0x954433 vmovss 0x5ac(%rsp), %xmm0 vmovss %xmm0, 0x64(%rsp) vmovss 0x64(%rsp), %xmm0 vmovss %xmm0, 0x288(%rsp) vmovss 0x288(%rsp), %xmm0 movq 0x2f0(%rsp), %rax movslq 0x28c(%rsp), %rcx vmovss %xmm0, (%rax,%rcx,4) movl 0x28c(%rsp), %eax addl $0x1, %eax movl %eax, 0x28c(%rsp) jmp 0x9533fe movl 0x298(%rsp), %ecx movq 0x2f0(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x2f0(%rsp) movl 0x290(%rsp), %eax addl $0x1, %eax movl %eax, 0x290(%rsp) jmp 0x9533df jmp 0x9544be movl 0x2f8(%rsp), %eax addl $0x1, %eax movl %eax, 0x2f8(%rsp) jmp 0x952f08 jmp 0x9544d6 movq 0x1b0(%rsp), %rdi movq (%rdi), %rax movq -0x18(%rax), %rax addq %rax, %rdi movq 0x390(%rsp), %rdx movq 0x388(%rsp), %rcx leaq 0x310(%rsp), %rsi callq 0x943230 jmp 0x954507 movq 0x390(%rsp), %rax movq %rax, 0x4e0(%rsp) movq 0x4e0(%rsp), %rcx movq %rcx, 0x58(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x63(%rsp) je 0x95455c movq 0x58(%rsp), %rax movq %rax, 0x740(%rsp) movq 0x740(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x63(%rsp) movb 0x63(%rsp), %al movb %al, 0x57(%rsp) movb 0x57(%rsp), %al testb $0x1, %al jne 0x95456e jmp 0x954586 movl $0xffffff9c, 0x3ac(%rsp) # imm = 0xFFFFFF9C movl $0x1, 0x300(%rsp) jmp 0x95459c movl $0x0, 0x3ac(%rsp) movl $0x1, 0x300(%rsp) leaq 0x310(%rsp), %rax movq %rax, 0x410(%rsp) movq 0x410(%rsp), %rax movq %rax, 0x438(%rsp) movq 0x438(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x954655 movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x434(%rsp) # imm = 0xFFFFFFFF movl 0x434(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x430(%rsp) cmpl $0x1, 0x430(%rsp) jne 0x954655 movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x954629 movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x954627 jmp 0x954653 movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x728(%rsp) cmpq $0x0, 0x728(%rsp) je 0x954651 movq 0x728(%rsp), %rdi callq 0x5e480 jmp 0x954653 jmp 0x954655 movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x9546ad movq %rax, %rdi callq 0x5fc90 movl 0x3ac(%rsp), %eax addq $0x748, %rsp # imm = 0x748 popq %rbx popq %r14 popq %r15 popq %rbp retq leaq 0x310(%rsp), %rax movq %rax, 0x418(%rsp) movq 0x418(%rsp), %rax movq %rax, 0x428(%rsp) movq 0x428(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x95477b movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x424(%rsp) # imm = 0xFFFFFFFF movl 0x424(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x420(%rsp) cmpl $0x1, 0x420(%rsp) jne 0x95477b movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x95474f movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x95474d jmp 0x954779 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x730(%rsp) cmpq $0x0, 0x730(%rsp) je 0x954777 movq 0x730(%rsp), %rdi callq 0x5e480 jmp 0x954779 jmp 0x95477b movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x9547d3 movq %rax, %rdi callq 0x5fc90 jmp 0x9547d5 movq 0x308(%rsp), %rdi callq 0x5e3b0 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/deconvolution_x86_avx512.cpp
ncnn::Dropout_x86_avx::forward_inplace(ncnn::Mat&, ncnn::Option const&) const
int Dropout_x86_avx::forward_inplace(Mat& bottom_top_blob, const Option& opt) const { if (scale == 1.f) { return 0; } #if __SSE2__ int dims = bottom_top_blob.dims; int elempack = bottom_top_blob.elempack; #if __AVX__ #if __AVX512F__ if (elempack == 16) { Mat tmp; convert_packing(bottom_top_blob, tmp, 8, opt); forward_inplace(tmp, opt); convert_packing(tmp, bottom_top_blob, 16, opt); return 0; } #endif // __AVX512F__ if (elempack == 8) { int w = bottom_top_blob.w; int h = bottom_top_blob.h; int channels = bottom_top_blob.c; int size = w * h; __m256 _scale = _mm256_set1_ps(scale); if (dims == 1) { #pragma omp parallel for num_threads(opt.num_threads) for (int i = 0; i < w; i++) { float* ptr = (float*)bottom_top_blob + i * 8; __m256 _p = _mm256_loadu_ps(ptr); _p = _mm256_mul_ps(_p, _scale); _mm256_storeu_ps(ptr, _p); } } if (dims == 2) { #pragma omp parallel for num_threads(opt.num_threads) for (int i = 0; i < h; i++) { float* ptr = bottom_top_blob.row(i); for (int j = 0; j < w; j++) { __m256 _p = _mm256_loadu_ps(ptr); _p = _mm256_mul_ps(_p, _scale); _mm256_storeu_ps(ptr, _p); ptr += 8; } } } if (dims == 3) { #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { float* ptr = bottom_top_blob.channel(q); for (int i = 0; i < size; i++) { __m256 _p = _mm256_loadu_ps(ptr); _p = _mm256_mul_ps(_p, _scale); _mm256_storeu_ps(ptr, _p); ptr += 8; } } } return 0; } #endif // __AVX__ if (elempack == 4) { int w = bottom_top_blob.w; int h = bottom_top_blob.h; int channels = bottom_top_blob.c; int size = w * h; __m128 _scale = _mm_set1_ps(scale); if (dims == 1) { #pragma omp parallel for num_threads(opt.num_threads) for (int i = 0; i < w; i++) { float* ptr = (float*)bottom_top_blob + i * 4; __m128 _p = _mm_loadu_ps(ptr); _p = _mm_mul_ps(_p, _scale); _mm_storeu_ps(ptr, _p); } } if (dims == 2) { #pragma omp parallel for num_threads(opt.num_threads) for (int i = 0; i < h; i++) { float* ptr = bottom_top_blob.row(i); for (int j = 0; j < w; j++) { __m128 _p = _mm_loadu_ps(ptr); _p = _mm_mul_ps(_p, _scale); _mm_storeu_ps(ptr, _p); ptr += 4; } } } if (dims == 3) { #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { float* ptr = bottom_top_blob.channel(q); for (int i = 0; i < size; i++) { __m128 _p = _mm_loadu_ps(ptr); _p = _mm_mul_ps(_p, _scale); _mm_storeu_ps(ptr, _p); ptr += 4; } } } return 0; } #endif // __SSE2__ return Dropout::forward_inplace(bottom_top_blob, opt); }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x760, %rsp # imm = 0x760 movq %rdi, 0x2b8(%rsp) movq %rsi, 0x2b0(%rsp) movq %rdx, 0x2a8(%rsp) movq 0x2b8(%rsp), %rax movq %rax, 0x58(%rsp) movq (%rax), %rcx movq -0x18(%rcx), %rcx vmovss 0xd0(%rax,%rcx), %xmm0 vmovss 0x14243e4(%rip), %xmm1 # 0x1e002d0 vucomiss %xmm1, %xmm0 jne 0x9dbf04 jp 0x9dbf04 movl $0x0, 0x2c0(%rsp) jmp 0x9dd0e3 movq 0x2b0(%rsp), %rax movl 0x28(%rax), %eax movl %eax, 0x2a4(%rsp) movq 0x2b0(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x2a0(%rsp) cmpl $0x8, 0x2a0(%rsp) jne 0x9dc873 movq 0x58(%rsp), %rax movq 0x2b0(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x29c(%rsp) movq 0x2b0(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x298(%rsp) movq 0x2b0(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x294(%rsp) movl 0x29c(%rsp), %ecx movl 0x298(%rsp), %edx imull %edx, %ecx movl %ecx, 0x290(%rsp) movq (%rax), %rcx movq -0x18(%rcx), %rcx vmovss 0xd0(%rax,%rcx), %xmm0 vmovss %xmm0, 0x2c4(%rsp) vmovss 0x2c4(%rsp), %xmm0 vmovss %xmm0, 0x54(%rsp) vmovss %xmm0, 0x62c(%rsp) vmovss %xmm0, 0x628(%rsp) vmovss %xmm0, 0x624(%rsp) vmovss %xmm0, 0x620(%rsp) vmovss %xmm0, 0x61c(%rsp) vmovss %xmm0, 0x618(%rsp) vmovss %xmm0, 0x614(%rsp) vmovss %xmm0, 0x610(%rsp) vmovss 0x624(%rsp), %xmm1 vmovss 0x620(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x628(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x62c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0x614(%rsp), %xmm2 vmovss 0x610(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x618(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x61c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0x5e0(%rsp) vmovaps 0x5e0(%rsp), %ymm0 vmovaps %ymm0, 0x260(%rsp) cmpl $0x1, 0x2a4(%rsp) jne 0x9dc1a1 movl $0x0, 0x25c(%rsp) movl 0x25c(%rsp), %eax cmpl 0x29c(%rsp), %eax jge 0x9dc19f movq 0x2b0(%rsp), %rax movq %rax, 0x2e0(%rsp) movq 0x2e0(%rsp), %rax movq (%rax), %rax movl 0x25c(%rsp), %ecx shll $0x3, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x250(%rsp) movq 0x250(%rsp), %rax movq %rax, 0x2f8(%rsp) movq 0x2f8(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x220(%rsp) vmovaps 0x220(%rsp), %ymm1 vmovaps 0x260(%rsp), %ymm0 vmovaps %ymm1, 0x3a0(%rsp) vmovaps %ymm0, 0x380(%rsp) vmovaps 0x3a0(%rsp), %ymm0 vmulps 0x380(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x220(%rsp) movq 0x250(%rsp), %rax vmovaps 0x220(%rsp), %ymm0 movq %rax, 0x468(%rsp) vmovaps %ymm0, 0x440(%rsp) vmovaps 0x440(%rsp), %ymm0 movq 0x468(%rsp), %rax vmovups %ymm0, (%rax) movl 0x25c(%rsp), %eax addl $0x1, %eax movl %eax, 0x25c(%rsp) jmp 0x9dc0a3 jmp 0x9dc1a1 cmpl $0x2, 0x2a4(%rsp) jne 0x9dc315 movl $0x0, 0x21c(%rsp) movl 0x21c(%rsp), %eax cmpl 0x298(%rsp), %eax jge 0x9dc313 movq 0x2b0(%rsp), %rcx movl 0x21c(%rsp), %eax movq %rcx, 0x488(%rsp) movl %eax, 0x484(%rsp) movq 0x488(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x484(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x210(%rsp) movl $0x0, 0x20c(%rsp) movl 0x20c(%rsp), %eax cmpl 0x29c(%rsp), %eax jge 0x9dc2fb movq 0x210(%rsp), %rax movq %rax, 0x2f0(%rsp) movq 0x2f0(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x1e0(%rsp) vmovaps 0x1e0(%rsp), %ymm1 vmovaps 0x260(%rsp), %ymm0 vmovaps %ymm1, 0x360(%rsp) vmovaps %ymm0, 0x340(%rsp) vmovaps 0x360(%rsp), %ymm0 vmulps 0x340(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x1e0(%rsp) movq 0x210(%rsp), %rax vmovaps 0x1e0(%rsp), %ymm0 movq %rax, 0x438(%rsp) vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm0 movq 0x438(%rsp), %rax vmovups %ymm0, (%rax) movq 0x210(%rsp), %rax addq $0x20, %rax movq %rax, 0x210(%rsp) movl 0x20c(%rsp), %eax addl $0x1, %eax movl %eax, 0x20c(%rsp) jmp 0x9dc222 jmp 0x9dc2fd movl 0x21c(%rsp), %eax addl $0x1, %eax movl %eax, 0x21c(%rsp) jmp 0x9dc1ba jmp 0x9dc315 cmpl $0x3, 0x2a4(%rsp) jne 0x9dc863 movl $0x0, 0x1dc(%rsp) movl 0x1dc(%rsp), %eax cmpl 0x294(%rsp), %eax jge 0x9dc861 movq 0x2b0(%rsp), %rcx movl 0x1dc(%rsp), %eax leaq 0x188(%rsp), %rdx movq %rdx, 0x4b8(%rsp) movq %rcx, 0x4b0(%rsp) movl %eax, 0x4ac(%rsp) movq 0x4b0(%rsp), %rax movq %rax, 0x48(%rsp) movb $0x0, 0x4ab(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x4ac(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x188(%rsp), %r10 movq %r10, 0x660(%rsp) movl %r9d, 0x65c(%rsp) movl %r8d, 0x658(%rsp) movl %edi, 0x654(%rsp) movq %rsi, 0x648(%rsp) movq %rdx, 0x640(%rsp) movl %ecx, 0x63c(%rsp) movq %rax, 0x630(%rsp) movq 0x660(%rsp), %rcx movq %rcx, 0x40(%rsp) movq 0x648(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x640(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x63c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x630(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x65c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x658(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x654(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x6b8(%rsp) movl $0x10, 0x6b4(%rsp) movq 0x6b8(%rsp), %rax movslq 0x6b4(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x6b4(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x48(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x1b0(%rsp) cmpl $0x4, 0x28(%rax) jne 0x9dc4f2 movq 0x48(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x1c8(%rsp) movb $0x1, 0x4ab(%rsp) testb $0x1, 0x4ab(%rsp) jne 0x9dc621 leaq 0x188(%rsp), %rax movq %rax, 0x4c0(%rsp) movq 0x4c0(%rsp), %rax movq %rax, 0x718(%rsp) movq 0x718(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x9dc5c7 movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x714(%rsp) # imm = 0xFFFFFFFF movl 0x714(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x710(%rsp) cmpl $0x1, 0x710(%rsp) jne 0x9dc5c7 movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x9dc598 movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x9dc596 jmp 0x9dc5c5 movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x720(%rsp) cmpq $0x0, 0x720(%rsp) je 0x9dc5c3 movq 0x720(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x9dc5c5 jmp 0x9dc5c7 movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x9dc61f movq %rax, %rdi callq 0x5fc90 jmp 0x9dc621 leaq 0x188(%rsp), %rax movq %rax, 0x2d8(%rsp) movq 0x2d8(%rsp), %rax movq (%rax), %rax movq %rax, 0x30(%rsp) leaq 0x188(%rsp), %rax movq %rax, 0x4d0(%rsp) movq 0x4d0(%rsp), %rax movq %rax, 0x6f8(%rsp) movq 0x6f8(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x9dc700 movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x6f4(%rsp) # imm = 0xFFFFFFFF movl 0x6f4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x6f0(%rsp) cmpl $0x1, 0x6f0(%rsp) jne 0x9dc700 movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x9dc6d1 movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x9dc6cf jmp 0x9dc6fe movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0x730(%rsp) cmpq $0x0, 0x730(%rsp) je 0x9dc6fc movq 0x730(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x9dc6fe jmp 0x9dc700 movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x9dc758 movq %rax, %rdi callq 0x5fc90 movq 0x30(%rsp), %rax movq %rax, 0x1d0(%rsp) movl $0x0, 0x178(%rsp) movl 0x178(%rsp), %eax cmpl 0x290(%rsp), %eax jge 0x9dc849 movq 0x1d0(%rsp), %rax movq %rax, 0x2e8(%rsp) movq 0x2e8(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x140(%rsp) vmovaps 0x140(%rsp), %ymm1 vmovaps 0x260(%rsp), %ymm0 vmovaps %ymm1, 0x320(%rsp) vmovaps %ymm0, 0x300(%rsp) vmovaps 0x320(%rsp), %ymm0 vmulps 0x300(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x140(%rsp) movq 0x1d0(%rsp), %rax vmovaps 0x140(%rsp), %ymm0 movq %rax, 0x3f8(%rsp) vmovaps %ymm0, 0x3c0(%rsp) vmovaps 0x3c0(%rsp), %ymm0 movq 0x3f8(%rsp), %rax vmovups %ymm0, (%rax) movq 0x1d0(%rsp), %rax addq $0x20, %rax movq %rax, 0x1d0(%rsp) movl 0x178(%rsp), %eax addl $0x1, %eax movl %eax, 0x178(%rsp) jmp 0x9dc770 jmp 0x9dc84b movl 0x1dc(%rsp), %eax addl $0x1, %eax movl %eax, 0x1dc(%rsp) jmp 0x9dc32e jmp 0x9dc863 movl $0x0, 0x2c0(%rsp) jmp 0x9dd0e3 cmpl $0x4, 0x2a0(%rsp) jne 0x9dd0bb movq 0x58(%rsp), %rax movq 0x2b0(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x13c(%rsp) movq 0x2b0(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x138(%rsp) movq 0x2b0(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x134(%rsp) movl 0x13c(%rsp), %ecx movl 0x138(%rsp), %edx imull %edx, %ecx movl %ecx, 0x130(%rsp) movq (%rax), %rcx movq -0x18(%rcx), %rcx vmovss 0xd0(%rax,%rcx), %xmm0 vmovss %xmm0, 0x504(%rsp) vbroadcastss 0x504(%rsp), %xmm0 vmovaps %xmm0, 0x4f0(%rsp) vmovaps 0x4f0(%rsp), %xmm0 vmovaps %xmm0, 0x120(%rsp) cmpl $0x1, 0x2a4(%rsp) jne 0x9dca29 movl $0x0, 0x11c(%rsp) movl 0x11c(%rsp), %eax cmpl 0x13c(%rsp), %eax jge 0x9dca27 movq 0x2b0(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rax movq (%rax), %rax movl 0x11c(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x110(%rsp) movq 0x110(%rsp), %rax movq %rax, 0x518(%rsp) movq 0x518(%rsp), %rax vmovups (%rax), %xmm0 vmovaps %xmm0, 0x100(%rsp) vmovaps 0x100(%rsp), %xmm1 vmovaps 0x120(%rsp), %xmm0 vmovaps %xmm1, 0x570(%rsp) vmovaps %xmm0, 0x560(%rsp) vmovaps 0x570(%rsp), %xmm0 vmulps 0x560(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x100(%rsp) movq 0x110(%rsp), %rax vmovaps 0x100(%rsp), %xmm0 movq %rax, 0x5d8(%rsp) vmovaps %xmm0, 0x5c0(%rsp) vmovaps 0x5c0(%rsp), %xmm0 movq 0x5d8(%rsp), %rax vmovups %xmm0, (%rax) movl 0x11c(%rsp), %eax addl $0x1, %eax movl %eax, 0x11c(%rsp) jmp 0x9dc92b jmp 0x9dca29 cmpl $0x2, 0x2a4(%rsp) jne 0x9dcb9d movl $0x0, 0xfc(%rsp) movl 0xfc(%rsp), %eax cmpl 0x138(%rsp), %eax jge 0x9dcb9b movq 0x2b0(%rsp), %rcx movl 0xfc(%rsp), %eax movq %rcx, 0x478(%rsp) movl %eax, 0x474(%rsp) movq 0x478(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x474(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0xf0(%rsp) movl $0x0, 0xec(%rsp) movl 0xec(%rsp), %eax cmpl 0x13c(%rsp), %eax jge 0x9dcb83 movq 0xf0(%rsp), %rax movq %rax, 0x510(%rsp) movq 0x510(%rsp), %rax vmovups (%rax), %xmm0 vmovaps %xmm0, 0xd0(%rsp) vmovaps 0xd0(%rsp), %xmm1 vmovaps 0x120(%rsp), %xmm0 vmovaps %xmm1, 0x550(%rsp) vmovaps %xmm0, 0x540(%rsp) vmovaps 0x550(%rsp), %xmm0 vmulps 0x540(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0xd0(%rsp) movq 0xf0(%rsp), %rax vmovaps 0xd0(%rsp), %xmm0 movq %rax, 0x5b8(%rsp) vmovaps %xmm0, 0x5a0(%rsp) vmovaps 0x5a0(%rsp), %xmm0 movq 0x5b8(%rsp), %rax vmovups %xmm0, (%rax) movq 0xf0(%rsp), %rax addq $0x10, %rax movq %rax, 0xf0(%rsp) movl 0xec(%rsp), %eax addl $0x1, %eax movl %eax, 0xec(%rsp) jmp 0x9dcaaa jmp 0x9dcb85 movl 0xfc(%rsp), %eax addl $0x1, %eax movl %eax, 0xfc(%rsp) jmp 0x9dca42 jmp 0x9dcb9d cmpl $0x3, 0x2a4(%rsp) jne 0x9dd0ae movl $0x0, 0xcc(%rsp) movl 0xcc(%rsp), %eax cmpl 0x134(%rsp), %eax jge 0x9dd0ac movq 0x2b0(%rsp), %rcx movl 0xcc(%rsp), %eax leaq 0x78(%rsp), %rdx movq %rdx, 0x4a0(%rsp) movq %rcx, 0x498(%rsp) movl %eax, 0x494(%rsp) movq 0x498(%rsp), %rax movq %rax, 0x20(%rsp) movb $0x0, 0x493(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x494(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x78(%rsp), %r10 movq %r10, 0x698(%rsp) movl %r9d, 0x694(%rsp) movl %r8d, 0x690(%rsp) movl %edi, 0x68c(%rsp) movq %rsi, 0x680(%rsp) movq %rdx, 0x678(%rsp) movl %ecx, 0x674(%rsp) movq %rax, 0x668(%rsp) movq 0x698(%rsp), %rcx movq %rcx, 0x18(%rsp) movq 0x680(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x678(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x674(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x668(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x694(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x690(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x68c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x6a8(%rsp) movl $0x10, 0x6a4(%rsp) movq 0x6a8(%rsp), %rax movslq 0x6a4(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x6a4(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x20(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xa0(%rsp) cmpl $0x4, 0x28(%rax) jne 0x9dcd74 movq 0x20(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xb8(%rsp) movb $0x1, 0x493(%rsp) testb $0x1, 0x493(%rsp) jne 0x9dce9a leaq 0x78(%rsp), %rax movq %rax, 0x4c8(%rsp) movq 0x4c8(%rsp), %rax movq %rax, 0x708(%rsp) movq 0x708(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x9dce40 movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x704(%rsp) # imm = 0xFFFFFFFF movl 0x704(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x700(%rsp) cmpl $0x1, 0x700(%rsp) jne 0x9dce40 movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x9dce14 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x9dce12 jmp 0x9dce3e movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x728(%rsp) cmpq $0x0, 0x728(%rsp) je 0x9dce3c movq 0x728(%rsp), %rdi callq 0x5e480 jmp 0x9dce3e jmp 0x9dce40 movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x9dce98 movq %rax, %rdi callq 0x5fc90 jmp 0x9dce9a leaq 0x78(%rsp), %rax movq %rax, 0x2c8(%rsp) movq 0x2c8(%rsp), %rax movq (%rax), %rax movq %rax, 0x8(%rsp) leaq 0x78(%rsp), %rax movq %rax, 0x4e0(%rsp) movq 0x4e0(%rsp), %rax movq %rax, 0x6d8(%rsp) movq 0x6d8(%rsp), %rax movq %rax, (%rsp) cmpq $0x0, 0x8(%rax) je 0x9dcf64 movq (%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x6d4(%rsp) # imm = 0xFFFFFFFF movl 0x6d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x6d0(%rsp) cmpl $0x1, 0x6d0(%rsp) jne 0x9dcf64 movq (%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x9dcf39 movq (%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x9dcf37 jmp 0x9dcf62 movq (%rsp), %rax movq (%rax), %rax movq %rax, 0x740(%rsp) cmpq $0x0, 0x740(%rsp) je 0x9dcf60 movq 0x740(%rsp), %rdi callq 0x5e480 jmp 0x9dcf62 jmp 0x9dcf64 movq (%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x9dcfbb movq %rax, %rdi callq 0x5fc90 movq 0x8(%rsp), %rax movq %rax, 0xc0(%rsp) movl $0x0, 0x74(%rsp) movl 0x74(%rsp), %eax cmpl 0x130(%rsp), %eax jge 0x9dd094 movq 0xc0(%rsp), %rax movq %rax, 0x508(%rsp) movq 0x508(%rsp), %rax vmovups (%rax), %xmm0 vmovaps %xmm0, 0x60(%rsp) vmovaps 0x60(%rsp), %xmm1 vmovaps 0x120(%rsp), %xmm0 vmovaps %xmm1, 0x530(%rsp) vmovaps %xmm0, 0x520(%rsp) vmovaps 0x530(%rsp), %xmm0 vmulps 0x520(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x60(%rsp) movq 0xc0(%rsp), %rax vmovaps 0x60(%rsp), %xmm0 movq %rax, 0x598(%rsp) vmovaps %xmm0, 0x580(%rsp) vmovaps 0x580(%rsp), %xmm0 movq 0x598(%rsp), %rax vmovups %xmm0, (%rax) movq 0xc0(%rsp), %rax addq $0x10, %rax movq %rax, 0xc0(%rsp) movl 0x74(%rsp), %eax addl $0x1, %eax movl %eax, 0x74(%rsp) jmp 0x9dcfd0 jmp 0x9dd096 movl 0xcc(%rsp), %eax addl $0x1, %eax movl %eax, 0xcc(%rsp) jmp 0x9dcbb6 jmp 0x9dd0ae movl $0x0, 0x2c0(%rsp) jmp 0x9dd0e3 movq 0x58(%rsp), %rdi movq (%rdi), %rax addq -0x18(%rax), %rdi movq 0x2b0(%rsp), %rsi movq 0x2a8(%rsp), %rdx callq 0x9d8510 movl %eax, 0x2c0(%rsp) movl 0x2c0(%rsp), %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/dropout_x86_avx.cpp
virtual thunk to ncnn::Dropout_x86_avx::forward_inplace(ncnn::Mat&, ncnn::Option const&) const
int Dropout_x86_avx::forward_inplace(Mat& bottom_top_blob, const Option& opt) const { if (scale == 1.f) { return 0; } #if __SSE2__ int dims = bottom_top_blob.dims; int elempack = bottom_top_blob.elempack; #if __AVX__ #if __AVX512F__ if (elempack == 16) { Mat tmp; convert_packing(bottom_top_blob, tmp, 8, opt); forward_inplace(tmp, opt); convert_packing(tmp, bottom_top_blob, 16, opt); return 0; } #endif // __AVX512F__ if (elempack == 8) { int w = bottom_top_blob.w; int h = bottom_top_blob.h; int channels = bottom_top_blob.c; int size = w * h; __m256 _scale = _mm256_set1_ps(scale); if (dims == 1) { #pragma omp parallel for num_threads(opt.num_threads) for (int i = 0; i < w; i++) { float* ptr = (float*)bottom_top_blob + i * 8; __m256 _p = _mm256_loadu_ps(ptr); _p = _mm256_mul_ps(_p, _scale); _mm256_storeu_ps(ptr, _p); } } if (dims == 2) { #pragma omp parallel for num_threads(opt.num_threads) for (int i = 0; i < h; i++) { float* ptr = bottom_top_blob.row(i); for (int j = 0; j < w; j++) { __m256 _p = _mm256_loadu_ps(ptr); _p = _mm256_mul_ps(_p, _scale); _mm256_storeu_ps(ptr, _p); ptr += 8; } } } if (dims == 3) { #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { float* ptr = bottom_top_blob.channel(q); for (int i = 0; i < size; i++) { __m256 _p = _mm256_loadu_ps(ptr); _p = _mm256_mul_ps(_p, _scale); _mm256_storeu_ps(ptr, _p); ptr += 8; } } } return 0; } #endif // __AVX__ if (elempack == 4) { int w = bottom_top_blob.w; int h = bottom_top_blob.h; int channels = bottom_top_blob.c; int size = w * h; __m128 _scale = _mm_set1_ps(scale); if (dims == 1) { #pragma omp parallel for num_threads(opt.num_threads) for (int i = 0; i < w; i++) { float* ptr = (float*)bottom_top_blob + i * 4; __m128 _p = _mm_loadu_ps(ptr); _p = _mm_mul_ps(_p, _scale); _mm_storeu_ps(ptr, _p); } } if (dims == 2) { #pragma omp parallel for num_threads(opt.num_threads) for (int i = 0; i < h; i++) { float* ptr = bottom_top_blob.row(i); for (int j = 0; j < w; j++) { __m128 _p = _mm_loadu_ps(ptr); _p = _mm_mul_ps(_p, _scale); _mm_storeu_ps(ptr, _p); ptr += 4; } } } if (dims == 3) { #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { float* ptr = bottom_top_blob.channel(q); for (int i = 0; i < size; i++) { __m128 _p = _mm_loadu_ps(ptr); _p = _mm_mul_ps(_p, _scale); _mm_storeu_ps(ptr, _p); ptr += 4; } } } return 0; } #endif // __SSE2__ return Dropout::forward_inplace(bottom_top_blob, opt); }
movq %rdi, -0x8(%rsp) movq %rsi, -0x10(%rsp) movq %rdx, -0x18(%rsp) movq -0x8(%rsp), %rdi movq (%rdi), %rax movq -0x58(%rax), %rax addq %rax, %rdi movq -0x10(%rsp), %rsi movq -0x18(%rsp), %rdx jmp 0x9dbea0 nopl (%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/dropout_x86_avx.cpp
ncnn::ELU_x86_avx::forward_inplace(ncnn::Mat&, ncnn::Option const&) const
int ELU_x86_avx::forward_inplace(Mat& bottom_top_blob, const Option& opt) const { int w = bottom_top_blob.w; int h = bottom_top_blob.h; int channels = bottom_top_blob.c; int elempack = bottom_top_blob.elempack; int size = w * h * elempack; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { float* ptr = bottom_top_blob.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ __m512 _alpha512 = _mm512_set1_ps(alpha); for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); _mm512_storeu_ps(ptr, elu_avx512(_p, _alpha512)); ptr += 16; } #endif // __AVX512F__ __m256 _alpha256 = _mm256_set1_ps(alpha); for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); _mm256_storeu_ps(ptr, elu_avx(_p, _alpha256)); ptr += 8; } #endif // __AVX__ __m128 _alpha128 = _mm_set1_ps(alpha); for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); _mm_store_ps(ptr, elu_sse(_p, _alpha128)); ptr += 4; } #endif // __SSE2__ for (; i < size; i++) { if (*ptr < 0.f) *ptr = static_cast<float>(alpha * (exp(*ptr) - 1.f)); ptr++; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x1960, %rsp # imm = 0x1960 movq %rdi, 0x130(%rsp) movq %rsi, 0x128(%rsp) movq %rdx, 0x120(%rsp) movq 0x130(%rsp), %rax movq %rax, 0x38(%rsp) movq 0x128(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x11c(%rsp) movq 0x128(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x118(%rsp) movq 0x128(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x114(%rsp) movq 0x128(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x110(%rsp) movl 0x11c(%rsp), %eax imull 0x118(%rsp), %eax imull 0x110(%rsp), %eax movl %eax, 0x10c(%rsp) movl $0x0, 0x108(%rsp) movl 0x108(%rsp), %eax cmpl 0x114(%rsp), %eax jge 0xa09900 movq 0x128(%rsp), %rcx movl 0x108(%rsp), %eax leaq 0xb8(%rsp), %rdx movq %rdx, 0x148(%rsp) movq %rcx, 0x140(%rsp) movl %eax, 0x13c(%rsp) movq 0x140(%rsp), %rax movq %rax, 0x30(%rsp) movb $0x0, 0x13b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x13c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xb8(%rsp), %r10 movq %r10, 0x320(%rsp) movl %r9d, 0x31c(%rsp) movl %r8d, 0x318(%rsp) movl %edi, 0x314(%rsp) movq %rsi, 0x308(%rsp) movq %rdx, 0x300(%rsp) movl %ecx, 0x2fc(%rsp) movq %rax, 0x2f0(%rsp) movq 0x320(%rsp), %rcx movq %rcx, 0x28(%rsp) movq 0x308(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x300(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2fc(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2f0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x31c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x318(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x314(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x330(%rsp) movl $0x10, 0x32c(%rsp) movq 0x330(%rsp), %rax movslq 0x32c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x32c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x30(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xe0(%rsp) cmpl $0x4, 0x28(%rax) jne 0xa07b79 movq 0x30(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xf8(%rsp) movb $0x1, 0x13b(%rsp) testb $0x1, 0x13b(%rsp) jne 0xa07ca8 leaq 0xb8(%rsp), %rax movq %rax, 0x158(%rsp) movq 0x158(%rsp), %rax movq %rax, 0x360(%rsp) movq 0x360(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0xa07c4e movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x35c(%rsp) # imm = 0xFFFFFFFF movl 0x35c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x358(%rsp) cmpl $0x1, 0x358(%rsp) jne 0xa07c4e movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0xa07c1f movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0xa07c1d jmp 0xa07c4c movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x368(%rsp) cmpq $0x0, 0x368(%rsp) je 0xa07c4a movq 0x368(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0xa07c4c jmp 0xa07c4e movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0xa07ca6 movq %rax, %rdi callq 0x5fc90 jmp 0xa07ca8 leaq 0xb8(%rsp), %rax movq %rax, 0x150(%rsp) movq 0x150(%rsp), %rax movq (%rax), %rax movq %rax, 0x18(%rsp) leaq 0xb8(%rsp), %rax movq %rax, 0x160(%rsp) movq 0x160(%rsp), %rax movq %rax, 0x350(%rsp) movq 0x350(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0xa07d87 movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x34c(%rsp) # imm = 0xFFFFFFFF movl 0x34c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x348(%rsp) cmpl $0x1, 0x348(%rsp) jne 0xa07d87 movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0xa07d58 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0xa07d56 jmp 0xa07d85 movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x370(%rsp) cmpq $0x0, 0x370(%rsp) je 0xa07d83 movq 0x370(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0xa07d85 jmp 0xa07d87 movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0xa07ddf movq %rax, %rdi callq 0x5fc90 movq 0x38(%rsp), %rax movq 0x18(%rsp), %rcx movq %rcx, 0x100(%rsp) movl $0x0, 0xa8(%rsp) movq (%rax), %rcx movq -0x18(%rcx), %rcx vmovss 0xd0(%rax,%rcx), %xmm0 vmovss %xmm0, 0x174(%rsp) vmovss 0x174(%rsp), %xmm0 vmovss %xmm0, 0xc(%rsp) vmovss %xmm0, 0x3fc(%rsp) vmovss %xmm0, 0x3f8(%rsp) vmovss %xmm0, 0x3f4(%rsp) vmovss %xmm0, 0x3f0(%rsp) vmovss %xmm0, 0x3ec(%rsp) vmovss %xmm0, 0x3e8(%rsp) vmovss %xmm0, 0x3e4(%rsp) vmovss %xmm0, 0x3e0(%rsp) vmovss 0x3f4(%rsp), %xmm1 vmovss 0x3f0(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x3f8(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x3fc(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0x3e4(%rsp), %xmm2 vmovss 0x3e0(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x3e8(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x3ec(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0x3c0(%rsp) vmovaps 0x3c0(%rsp), %ymm0 vmovaps %ymm0, 0x80(%rsp) movl 0xa8(%rsp), %eax addl $0x7, %eax cmpl 0x10c(%rsp), %eax jge 0xa08cc6 movq 0x100(%rsp), %rax movq %rax, 0x178(%rsp) movq 0x178(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x60(%rsp) movq 0x100(%rsp), %rax vmovaps 0x60(%rsp), %ymm1 vmovaps 0x80(%rsp), %ymm0 vmovaps %ymm1, 0x220(%rsp) vmovaps %ymm0, 0x200(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %ymm0, 0x460(%rsp) vmovaps 0x460(%rsp), %ymm2 vmovaps 0x220(%rsp), %ymm1 vmovaps %ymm2, 0x420(%rsp) vmovaps %ymm1, 0x400(%rsp) vmovaps 0x420(%rsp), %ymm1 vmovaps 0x400(%rsp), %ymm2 vmaxps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x1e0(%rsp) vmovaps %ymm0, 0x440(%rsp) vmovaps 0x440(%rsp), %ymm2 vmovaps 0x220(%rsp), %ymm1 vmovaps %ymm2, 0x4a0(%rsp) vmovaps %ymm1, 0x480(%rsp) vmovaps 0x4a0(%rsp), %ymm1 vmovaps 0x480(%rsp), %ymm2 vminps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x1c0(%rsp) vmovaps 0x1c0(%rsp), %ymm1 vmovaps %ymm1, 0x5e0(%rsp) vmovaps %ymm0, 0x640(%rsp) vmovaps 0x640(%rsp), %ymm0 vmovaps %ymm0, 0x5c0(%rsp) vmovaps 0x13fbb50(%rip), %ymm0 # 0x1e03b80 vmovaps %ymm0, 0x560(%rsp) vmovaps 0x5e0(%rsp), %ymm0 vmovaps %ymm0, 0x680(%rsp) vmovaps 0x13fbb4d(%rip), %ymm0 # 0x1e03ba0 vmovaps %ymm0, 0x660(%rsp) vmovaps 0x680(%rsp), %ymm0 vmovaps 0x660(%rsp), %ymm1 vminps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x5e0(%rsp) vmovaps 0x5e0(%rsp), %ymm0 vmovaps %ymm0, 0x620(%rsp) vmovaps 0x13fbb2b(%rip), %ymm0 # 0x1e03bc0 vmovaps %ymm0, 0x600(%rsp) vmovaps 0x620(%rsp), %ymm0 vmovaps 0x600(%rsp), %ymm1 vmaxps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x5e0(%rsp) vmovaps 0x5e0(%rsp), %ymm0 vmovaps %ymm0, 0xda0(%rsp) vmovaps 0x13fbb09(%rip), %ymm0 # 0x1e03be0 vmovaps %ymm0, 0xd80(%rsp) vmovaps 0x13fbb18(%rip), %ymm0 # 0x1e03c00 vmovaps %ymm0, 0xd60(%rsp) vmovaps 0xda0(%rsp), %ymm2 vmovaps 0xd80(%rsp), %ymm1 vmovaps %ymm2, 0xe20(%rsp) vmovaps %ymm1, 0xe00(%rsp) vmovaps 0xe20(%rsp), %ymm1 vmovaps 0xe00(%rsp), %ymm2 vmulps %ymm2, %ymm1, %ymm2 vmovaps 0xd60(%rsp), %ymm1 vmovaps %ymm2, 0xde0(%rsp) vmovaps %ymm1, 0xdc0(%rsp) vmovaps 0xde0(%rsp), %ymm1 vmovaps 0xdc0(%rsp), %ymm2 vaddps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x5a0(%rsp) vmovaps 0x5a0(%rsp), %ymm1 vroundps $0x1, %ymm1, %ymm1 vmovaps %ymm1, 0x5c0(%rsp) vmovaps 0x5c0(%rsp), %ymm2 vmovaps 0x5a0(%rsp), %ymm1 vcmpltps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x540(%rsp) vmovaps 0x540(%rsp), %ymm2 vmovaps 0x560(%rsp), %ymm1 vmovaps %ymm2, 0xe60(%rsp) vmovaps %ymm1, 0xe40(%rsp) vmovaps 0xe60(%rsp), %ymm1 vmovaps 0xe40(%rsp), %ymm2 vandps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x540(%rsp) vmovaps 0x5c0(%rsp), %ymm2 vmovaps 0x540(%rsp), %ymm1 vmovaps %ymm2, 0x6c0(%rsp) vmovaps %ymm1, 0x6a0(%rsp) vmovaps 0x6c0(%rsp), %ymm1 vmovaps 0x6a0(%rsp), %ymm2 vsubps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x5a0(%rsp) vmovaps 0x5a0(%rsp), %ymm2 vmovaps 0x5e0(%rsp), %ymm1 vmovaps %ymm2, 0xfa0(%rsp) vmovaps 0x13fb9da(%rip), %ymm2 # 0x1e03c20 vmovaps %ymm2, 0xf80(%rsp) vmovaps %ymm1, 0xf60(%rsp) vmovaps 0xf60(%rsp), %ymm2 vmovaps 0xfa0(%rsp), %ymm3 vmovaps 0xf80(%rsp), %ymm1 vmovaps %ymm3, 0x1020(%rsp) vmovaps %ymm1, 0x1000(%rsp) vmovaps 0x1020(%rsp), %ymm1 vmovaps 0x1000(%rsp), %ymm3 vmulps %ymm3, %ymm1, %ymm1 vmovaps %ymm2, 0xfe0(%rsp) vmovaps %ymm1, 0xfc0(%rsp) vmovaps 0xfe0(%rsp), %ymm1 vmovaps 0xfc0(%rsp), %ymm2 vsubps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x5e0(%rsp) vmovaps 0x5a0(%rsp), %ymm2 vmovaps 0x5e0(%rsp), %ymm1 vmovaps %ymm2, 0xec0(%rsp) vmovaps 0x13fb951(%rip), %ymm2 # 0x1e03c40 vmovaps %ymm2, 0xea0(%rsp) vmovaps %ymm1, 0xe80(%rsp) vmovaps 0xe80(%rsp), %ymm2 vmovaps 0xec0(%rsp), %ymm3 vmovaps 0xea0(%rsp), %ymm1 vmovaps %ymm3, 0xf40(%rsp) vmovaps %ymm1, 0xf20(%rsp) vmovaps 0xf40(%rsp), %ymm1 vmovaps 0xf20(%rsp), %ymm3 vmulps %ymm3, %ymm1, %ymm1 vmovaps %ymm2, 0xf00(%rsp) vmovaps %ymm1, 0xee0(%rsp) vmovaps 0xf00(%rsp), %ymm1 vmovaps 0xee0(%rsp), %ymm2 vsubps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x5e0(%rsp) vmovaps 0x5e0(%rsp), %ymm1 vmovaps %ymm1, 0x7c0(%rsp) vmovaps %ymm1, 0x7a0(%rsp) vmovaps 0x7c0(%rsp), %ymm1 vmovaps 0x7a0(%rsp), %ymm2 vmulps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x5c0(%rsp) vmovaps 0x13fb8a9(%rip), %ymm1 # 0x1e03c60 vmovaps %ymm1, 0x520(%rsp) vmovaps 0x520(%rsp), %ymm2 vmovaps 0x5e0(%rsp), %ymm1 vmovaps %ymm2, 0xcc0(%rsp) vmovaps %ymm1, 0xca0(%rsp) vmovaps 0x13fb894(%rip), %ymm1 # 0x1e03c80 vmovaps %ymm1, 0xc80(%rsp) vmovaps 0xcc0(%rsp), %ymm2 vmovaps 0xca0(%rsp), %ymm1 vmovaps %ymm2, 0xd40(%rsp) vmovaps %ymm1, 0xd20(%rsp) vmovaps 0xd40(%rsp), %ymm1 vmovaps 0xd20(%rsp), %ymm2 vmulps %ymm2, %ymm1, %ymm2 vmovaps 0xc80(%rsp), %ymm1 vmovaps %ymm2, 0xd00(%rsp) vmovaps %ymm1, 0xce0(%rsp) vmovaps 0xd00(%rsp), %ymm1 vmovaps 0xce0(%rsp), %ymm2 vaddps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x520(%rsp) vmovaps 0x520(%rsp), %ymm2 vmovaps 0x5e0(%rsp), %ymm1 vmovaps %ymm2, 0xbe0(%rsp) vmovaps %ymm1, 0xbc0(%rsp) vmovaps 0x13fb80b(%rip), %ymm1 # 0x1e03ca0 vmovaps %ymm1, 0xba0(%rsp) vmovaps 0xbe0(%rsp), %ymm2 vmovaps 0xbc0(%rsp), %ymm1 vmovaps %ymm2, 0xc60(%rsp) vmovaps %ymm1, 0xc40(%rsp) vmovaps 0xc60(%rsp), %ymm1 vmovaps 0xc40(%rsp), %ymm2 vmulps %ymm2, %ymm1, %ymm2 vmovaps 0xba0(%rsp), %ymm1 vmovaps %ymm2, 0xc20(%rsp) vmovaps %ymm1, 0xc00(%rsp) vmovaps 0xc20(%rsp), %ymm1 vmovaps 0xc00(%rsp), %ymm2 vaddps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x520(%rsp) vmovaps 0x520(%rsp), %ymm2 vmovaps 0x5e0(%rsp), %ymm1 vmovaps %ymm2, 0xb00(%rsp) vmovaps %ymm1, 0xae0(%rsp) vmovaps 0x13fb782(%rip), %ymm1 # 0x1e03cc0 vmovaps %ymm1, 0xac0(%rsp) vmovaps 0xb00(%rsp), %ymm2 vmovaps 0xae0(%rsp), %ymm1 vmovaps %ymm2, 0xb80(%rsp) vmovaps %ymm1, 0xb60(%rsp) vmovaps 0xb80(%rsp), %ymm1 vmovaps 0xb60(%rsp), %ymm2 vmulps %ymm2, %ymm1, %ymm2 vmovaps 0xac0(%rsp), %ymm1 vmovaps %ymm2, 0xb40(%rsp) vmovaps %ymm1, 0xb20(%rsp) vmovaps 0xb40(%rsp), %ymm1 vmovaps 0xb20(%rsp), %ymm2 vaddps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x520(%rsp) vmovaps 0x520(%rsp), %ymm2 vmovaps 0x5e0(%rsp), %ymm1 vmovaps %ymm2, 0xa20(%rsp) vmovaps %ymm1, 0xa00(%rsp) vmovaps 0x13fb6f9(%rip), %ymm1 # 0x1e03ce0 vmovaps %ymm1, 0x9e0(%rsp) vmovaps 0xa20(%rsp), %ymm2 vmovaps 0xa00(%rsp), %ymm1 vmovaps %ymm2, 0xaa0(%rsp) vmovaps %ymm1, 0xa80(%rsp) vmovaps 0xaa0(%rsp), %ymm1 vmovaps 0xa80(%rsp), %ymm2 vmulps %ymm2, %ymm1, %ymm2 vmovaps 0x9e0(%rsp), %ymm1 vmovaps %ymm2, 0xa60(%rsp) vmovaps %ymm1, 0xa40(%rsp) vmovaps 0xa60(%rsp), %ymm1 vmovaps 0xa40(%rsp), %ymm2 vaddps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x520(%rsp) vmovaps 0x520(%rsp), %ymm2 vmovaps 0x5e0(%rsp), %ymm1 vmovaps %ymm2, 0x940(%rsp) vmovaps %ymm1, 0x920(%rsp) vmovaps %ymm0, 0x900(%rsp) vmovaps 0x940(%rsp), %ymm1 vmovaps 0x920(%rsp), %ymm0 vmovaps %ymm1, 0x9c0(%rsp) vmovaps %ymm0, 0x9a0(%rsp) vmovaps 0x9c0(%rsp), %ymm0 vmovaps 0x9a0(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm1 vmovaps 0x900(%rsp), %ymm0 vmovaps %ymm1, 0x980(%rsp) vmovaps %ymm0, 0x960(%rsp) vmovaps 0x980(%rsp), %ymm0 vmovaps 0x960(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x520(%rsp) vmovaps 0x520(%rsp), %ymm2 vmovaps 0x5c0(%rsp), %ymm1 vmovaps 0x5e0(%rsp), %ymm0 vmovaps %ymm2, 0x860(%rsp) vmovaps %ymm1, 0x840(%rsp) vmovaps %ymm0, 0x820(%rsp) vmovaps 0x860(%rsp), %ymm1 vmovaps 0x840(%rsp), %ymm0 vmovaps %ymm1, 0x8e0(%rsp) vmovaps %ymm0, 0x8c0(%rsp) vmovaps 0x8e0(%rsp), %ymm0 vmovaps 0x8c0(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm1 vmovaps 0x820(%rsp), %ymm0 vmovaps %ymm1, 0x8a0(%rsp) vmovaps %ymm0, 0x880(%rsp) vmovaps 0x8a0(%rsp), %ymm0 vmovaps 0x880(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x520(%rsp) vmovaps 0x520(%rsp), %ymm1 vmovaps 0x560(%rsp), %ymm0 vmovaps %ymm1, 0x700(%rsp) vmovaps %ymm0, 0x6e0(%rsp) vmovaps 0x700(%rsp), %ymm0 vmovaps 0x6e0(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x520(%rsp) vmovaps 0x5a0(%rsp), %ymm0 vmovaps %ymm0, 0x1040(%rsp) vcvttps2dq 0x1040(%rsp), %ymm0 vmovaps %ymm0, 0x580(%rsp) vmovaps 0x580(%rsp), %ymm0 vmovaps %ymm0, 0x1140(%rsp) vmovaps 0x13fb4d0(%rip), %ymm0 # 0x1e03d00 vmovaps %ymm0, 0x1120(%rsp) vmovaps 0x1140(%rsp), %ymm0 vmovaps %ymm0, 0x10a0(%rsp) vmovdqa 0x10a0(%rsp), %xmm0 vmovdqa %xmm0, 0x1110(%rsp) vmovdqa 0x10b0(%rsp), %xmm0 vmovdqa %xmm0, 0x1100(%rsp) vmovaps 0x1120(%rsp), %ymm0 vmovaps %ymm0, 0x1080(%rsp) vmovdqa 0x1080(%rsp), %xmm0 vmovdqa %xmm0, 0x10f0(%rsp) vmovdqa 0x1090(%rsp), %xmm0 vmovdqa %xmm0, 0x10e0(%rsp) vmovdqa 0x1110(%rsp), %xmm1 vmovdqa 0x10f0(%rsp), %xmm0 vmovdqa %xmm1, 0x1270(%rsp) vmovdqa %xmm0, 0x1260(%rsp) vmovdqa 0x1270(%rsp), %xmm0 vmovdqa 0x1260(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x1110(%rsp) vmovdqa 0x1100(%rsp), %xmm1 vmovdqa 0x10e0(%rsp), %xmm0 vmovdqa %xmm1, 0x1250(%rsp) vmovdqa %xmm0, 0x1240(%rsp) vmovdqa 0x1250(%rsp), %xmm0 vmovdqa 0x1240(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x1100(%rsp) vmovdqa 0x1110(%rsp), %xmm0 vmovdqa %xmm0, 0x1060(%rsp) vmovdqa 0x1100(%rsp), %xmm0 vmovdqa %xmm0, 0x1070(%rsp) vmovaps 0x1060(%rsp), %ymm0 vmovaps %ymm0, 0x10c0(%rsp) vmovaps 0x10c0(%rsp), %ymm0 vmovaps %ymm0, 0x580(%rsp) vmovaps 0x580(%rsp), %ymm0 vmovaps %ymm0, 0x1200(%rsp) movl $0x17, 0x11fc(%rsp) vmovaps 0x1200(%rsp), %ymm0 vmovaps %ymm0, 0x1180(%rsp) vmovdqa 0x1180(%rsp), %xmm0 vmovdqa %xmm0, 0x11e0(%rsp) vmovdqa 0x1190(%rsp), %xmm0 vmovdqa %xmm0, 0x11d0(%rsp) vmovdqa 0x11e0(%rsp), %xmm0 movl 0x11fc(%rsp), %ecx vmovdqa %xmm0, 0x12b0(%rsp) movl %ecx, 0x12ac(%rsp) vmovdqa 0x12b0(%rsp), %xmm0 vmovd 0x12ac(%rsp), %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x11e0(%rsp) vmovdqa 0x11d0(%rsp), %xmm0 movl 0x11fc(%rsp), %ecx vmovdqa %xmm0, 0x1290(%rsp) movl %ecx, 0x128c(%rsp) vmovdqa 0x1290(%rsp), %xmm0 vmovd 0x128c(%rsp), %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x11d0(%rsp) vmovdqa 0x11e0(%rsp), %xmm0 vmovdqa %xmm0, 0x1160(%rsp) vmovdqa 0x11d0(%rsp), %xmm0 vmovdqa %xmm0, 0x1170(%rsp) vmovaps 0x1160(%rsp), %ymm0 vmovaps %ymm0, 0x11a0(%rsp) vmovaps 0x11a0(%rsp), %ymm0 vmovaps %ymm0, 0x580(%rsp) vmovaps 0x580(%rsp), %ymm0 vmovaps %ymm0, 0x1220(%rsp) vmovaps 0x1220(%rsp), %ymm0 vmovaps %ymm0, 0x500(%rsp) vmovaps 0x520(%rsp), %ymm1 vmovaps 0x500(%rsp), %ymm0 vmovaps %ymm1, 0x780(%rsp) vmovaps %ymm0, 0x760(%rsp) vmovaps 0x780(%rsp), %ymm0 vmovaps 0x760(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x520(%rsp) vmovaps 0x520(%rsp), %ymm1 movl $0x3f800000, 0x24c(%rsp) # imm = 0x3F800000 vmovss 0x24c(%rsp), %xmm0 vmovss %xmm0, 0x8(%rsp) vmovss %xmm0, 0x3bc(%rsp) vmovss %xmm0, 0x3b8(%rsp) vmovss %xmm0, 0x3b4(%rsp) vmovss %xmm0, 0x3b0(%rsp) vmovss %xmm0, 0x3ac(%rsp) vmovss %xmm0, 0x3a8(%rsp) vmovss %xmm0, 0x3a4(%rsp) vmovss %xmm0, 0x3a0(%rsp) vmovss 0x3b4(%rsp), %xmm2 vmovss 0x3b0(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x3b8(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x3bc(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovss 0x3a4(%rsp), %xmm3 vmovss 0x3a0(%rsp), %xmm0 vinsertps $0x10, %xmm3, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm3[0],xmm0[2,3] vmovss 0x3a8(%rsp), %xmm3 vinsertps $0x20, %xmm3, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm3[0],xmm0[3] vmovss 0x3ac(%rsp), %xmm3 vinsertps $0x30, %xmm3, %xmm0, %xmm3 # xmm3 = xmm0[0,1,2],xmm3[0] vmovaps %xmm3, %xmm0 vinsertf128 $0x1, %xmm2, %ymm0, %ymm0 vmovaps %ymm0, 0x380(%rsp) vmovaps 0x380(%rsp), %ymm0 vmovaps %ymm1, 0x4e0(%rsp) vmovaps %ymm0, 0x4c0(%rsp) vmovaps 0x4e0(%rsp), %ymm0 vsubps 0x4c0(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x1c0(%rsp) vmovaps 0x1e0(%rsp), %ymm1 vmovaps 0x200(%rsp), %ymm2 vmovaps 0x1c0(%rsp), %ymm0 vmovaps %ymm2, 0x800(%rsp) vmovaps %ymm0, 0x7e0(%rsp) vmovaps 0x800(%rsp), %ymm0 vmulps 0x7e0(%rsp), %ymm0, %ymm0 vmovaps %ymm1, 0x740(%rsp) vmovaps %ymm0, 0x720(%rsp) vmovaps 0x740(%rsp), %ymm0 vaddps 0x720(%rsp), %ymm0, %ymm0 movq %rax, 0x1b8(%rsp) vmovaps %ymm0, 0x180(%rsp) vmovaps 0x180(%rsp), %ymm0 movq 0x1b8(%rsp), %rax vmovups %ymm0, (%rax) movq 0x100(%rsp), %rax addq $0x20, %rax movq %rax, 0x100(%rsp) movl 0xa8(%rsp), %eax addl $0x8, %eax movl %eax, 0xa8(%rsp) jmp 0xa07efd movq 0x38(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx vmovss 0xd0(%rax,%rcx), %xmm0 vmovss %xmm0, 0x264(%rsp) vbroadcastss 0x264(%rsp), %xmm0 vmovaps %xmm0, 0x250(%rsp) vmovaps 0x250(%rsp), %xmm0 vmovaps %xmm0, 0x50(%rsp) movl 0xa8(%rsp), %eax addl $0x3, %eax cmpl 0x10c(%rsp), %eax jge 0xa09841 movq 0x100(%rsp), %rax movq %rax, 0x268(%rsp) movq 0x268(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x40(%rsp) movq 0x100(%rsp), %rax vmovaps 0x40(%rsp), %xmm1 vmovaps 0x50(%rsp), %xmm0 vmovaps %xmm1, 0x2c0(%rsp) vmovaps %xmm0, 0x2b0(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x12f0(%rsp) vmovaps 0x12f0(%rsp), %xmm2 vmovaps 0x2c0(%rsp), %xmm1 vmovaps %xmm2, 0x12d0(%rsp) vmovaps %xmm1, 0x12c0(%rsp) vmovaps 0x12d0(%rsp), %xmm1 vmovaps 0x12c0(%rsp), %xmm2 vmaxps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x2a0(%rsp) vmovaps %xmm0, 0x12e0(%rsp) vmovaps 0x12e0(%rsp), %xmm2 vmovaps 0x2c0(%rsp), %xmm1 vmovaps %xmm2, 0x1310(%rsp) vmovaps %xmm1, 0x1300(%rsp) vmovaps 0x1310(%rsp), %xmm1 vmovaps 0x1300(%rsp), %xmm2 vminps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x290(%rsp) vmovaps 0x290(%rsp), %xmm1 vmovaps %xmm1, 0x13b0(%rsp) vmovaps %xmm0, 0x1420(%rsp) vmovaps 0x1420(%rsp), %xmm0 vmovaps %xmm0, 0x13a0(%rsp) vmovaps 0x13f9c7a(%rip), %xmm0 # 0x1e02ab0 vmovaps %xmm0, 0x1370(%rsp) vmovaps 0x13b0(%rsp), %xmm0 vmovaps %xmm0, 0x1440(%rsp) vmovaps 0x13f9c67(%rip), %xmm0 # 0x1e02ac0 vmovaps %xmm0, 0x1430(%rsp) vmovaps 0x1440(%rsp), %xmm0 vmovaps 0x1430(%rsp), %xmm1 vminps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x13b0(%rsp) vmovaps 0x13b0(%rsp), %xmm0 vmovaps %xmm0, 0x1410(%rsp) vmovaps 0x13f9c35(%rip), %xmm0 # 0x1e02ad0 vmovaps %xmm0, 0x1400(%rsp) vmovaps 0x1410(%rsp), %xmm0 vmovaps 0x1400(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x13b0(%rsp) vmovaps 0x13b0(%rsp), %xmm0 vmovaps %xmm0, 0x1520(%rsp) vmovaps 0x13f9c03(%rip), %xmm0 # 0x1e02ae0 vmovaps %xmm0, 0x1510(%rsp) vmovaps 0x1520(%rsp), %xmm0 vmovaps 0x1510(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1390(%rsp) vmovaps 0x1390(%rsp), %xmm0 vmovaps %xmm0, 0x14a0(%rsp) vmovaps 0x13f9bd1(%rip), %xmm0 # 0x1e02af0 vmovaps %xmm0, 0x1490(%rsp) vmovaps 0x14a0(%rsp), %xmm1 vmovaps 0x1490(%rsp), %xmm2 vaddps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x1390(%rsp) vmovaps 0x1390(%rsp), %xmm1 vmovaps %xmm1, 0x1560(%rsp) vcvttps2dq 0x1560(%rsp), %xmm1 vmovdqa %xmm1, 0x1380(%rsp) vmovdqa 0x1380(%rsp), %xmm1 vmovdqa %xmm1, 0x1570(%rsp) vcvtdq2ps 0x1570(%rsp), %xmm1 vmovaps %xmm1, 0x13a0(%rsp) vmovaps 0x13a0(%rsp), %xmm2 vmovaps 0x1390(%rsp), %xmm1 vmovaps %xmm2, 0x1590(%rsp) vmovaps %xmm1, 0x1580(%rsp) vmovaps 0x1580(%rsp), %xmm1 vmovaps 0x1590(%rsp), %xmm2 vcmpltps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x1360(%rsp) vmovaps 0x1360(%rsp), %xmm2 vmovaps 0x1370(%rsp), %xmm1 vmovaps %xmm2, 0x15b0(%rsp) vmovaps %xmm1, 0x15a0(%rsp) vmovdqa 0x15b0(%rsp), %xmm1 vmovdqa 0x15a0(%rsp), %xmm2 vpand %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x1360(%rsp) vmovaps 0x13a0(%rsp), %xmm2 vmovaps 0x1360(%rsp), %xmm1 vmovaps %xmm2, 0x1460(%rsp) vmovaps %xmm1, 0x1450(%rsp) vmovaps 0x1460(%rsp), %xmm1 vmovaps 0x1450(%rsp), %xmm2 vsubps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x1390(%rsp) vmovaps 0x1390(%rsp), %xmm2 vmovaps 0x13b0(%rsp), %xmm1 vmovaps %xmm2, 0x1650(%rsp) vmovaps 0x13f9a84(%rip), %xmm2 # 0x1e02b00 vmovaps %xmm2, 0x1640(%rsp) vmovaps %xmm1, 0x1630(%rsp) vmovaps 0x1630(%rsp), %xmm2 vmovaps 0x1650(%rsp), %xmm3 vmovaps 0x1640(%rsp), %xmm1 vmovaps %xmm3, 0x1690(%rsp) vmovaps %xmm1, 0x1680(%rsp) vmovaps 0x1690(%rsp), %xmm1 vmovaps 0x1680(%rsp), %xmm3 vmulps %xmm3, %xmm1, %xmm1 vmovaps %xmm2, 0x1670(%rsp) vmovaps %xmm1, 0x1660(%rsp) vmovaps 0x1670(%rsp), %xmm1 vmovaps 0x1660(%rsp), %xmm2 vsubps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x13b0(%rsp) vmovaps 0x1390(%rsp), %xmm2 vmovaps 0x13b0(%rsp), %xmm1 vmovaps %xmm2, 0x15e0(%rsp) vmovaps 0x13f99eb(%rip), %xmm2 # 0x1e02b10 vmovaps %xmm2, 0x15d0(%rsp) vmovaps %xmm1, 0x15c0(%rsp) vmovaps 0x15c0(%rsp), %xmm2 vmovaps 0x15e0(%rsp), %xmm3 vmovaps 0x15d0(%rsp), %xmm1 vmovaps %xmm3, 0x1620(%rsp) vmovaps %xmm1, 0x1610(%rsp) vmovaps 0x1620(%rsp), %xmm1 vmovaps 0x1610(%rsp), %xmm3 vmulps %xmm3, %xmm1, %xmm1 vmovaps %xmm2, 0x1600(%rsp) vmovaps %xmm1, 0x15f0(%rsp) vmovaps 0x1600(%rsp), %xmm1 vmovaps 0x15f0(%rsp), %xmm2 vsubps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x13b0(%rsp) vmovaps 0x13b0(%rsp), %xmm1 vmovaps %xmm1, 0x1500(%rsp) vmovaps %xmm1, 0x14f0(%rsp) vmovaps 0x1500(%rsp), %xmm1 vmovaps 0x14f0(%rsp), %xmm2 vmulps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x13a0(%rsp) vmovaps 0x13f9933(%rip), %xmm1 # 0x1e02b20 vmovaps %xmm1, 0x1350(%rsp) vmovaps 0x1350(%rsp), %xmm2 vmovaps 0x13b0(%rsp), %xmm1 vmovaps %xmm2, 0x18f0(%rsp) vmovaps %xmm1, 0x18e0(%rsp) vmovaps 0x13f990e(%rip), %xmm1 # 0x1e02b30 vmovaps %xmm1, 0x18d0(%rsp) vmovaps 0x18f0(%rsp), %xmm2 vmovaps 0x18e0(%rsp), %xmm1 vmovaps %xmm2, 0x1930(%rsp) vmovaps %xmm1, 0x1920(%rsp) vmovaps 0x1930(%rsp), %xmm1 vmovaps 0x1920(%rsp), %xmm2 vmulps %xmm2, %xmm1, %xmm2 vmovaps 0x18d0(%rsp), %xmm1 vmovaps %xmm2, 0x1910(%rsp) vmovaps %xmm1, 0x1900(%rsp) vmovaps 0x1910(%rsp), %xmm1 vmovaps 0x1900(%rsp), %xmm2 vaddps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x1350(%rsp) vmovaps 0x1350(%rsp), %xmm2 vmovaps 0x13b0(%rsp), %xmm1 vmovaps %xmm2, 0x1880(%rsp) vmovaps %xmm1, 0x1870(%rsp) vmovaps 0x13f9875(%rip), %xmm1 # 0x1e02b40 vmovaps %xmm1, 0x1860(%rsp) vmovaps 0x1880(%rsp), %xmm2 vmovaps 0x1870(%rsp), %xmm1 vmovaps %xmm2, 0x18c0(%rsp) vmovaps %xmm1, 0x18b0(%rsp) vmovaps 0x18c0(%rsp), %xmm1 vmovaps 0x18b0(%rsp), %xmm2 vmulps %xmm2, %xmm1, %xmm2 vmovaps 0x1860(%rsp), %xmm1 vmovaps %xmm2, 0x18a0(%rsp) vmovaps %xmm1, 0x1890(%rsp) vmovaps 0x18a0(%rsp), %xmm1 vmovaps 0x1890(%rsp), %xmm2 vaddps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x1350(%rsp) vmovaps 0x1350(%rsp), %xmm2 vmovaps 0x13b0(%rsp), %xmm1 vmovaps %xmm2, 0x1810(%rsp) vmovaps %xmm1, 0x1800(%rsp) vmovaps 0x13f97dc(%rip), %xmm1 # 0x1e02b50 vmovaps %xmm1, 0x17f0(%rsp) vmovaps 0x1810(%rsp), %xmm2 vmovaps 0x1800(%rsp), %xmm1 vmovaps %xmm2, 0x1850(%rsp) vmovaps %xmm1, 0x1840(%rsp) vmovaps 0x1850(%rsp), %xmm1 vmovaps 0x1840(%rsp), %xmm2 vmulps %xmm2, %xmm1, %xmm2 vmovaps 0x17f0(%rsp), %xmm1 vmovaps %xmm2, 0x1830(%rsp) vmovaps %xmm1, 0x1820(%rsp) vmovaps 0x1830(%rsp), %xmm1 vmovaps 0x1820(%rsp), %xmm2 vaddps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x1350(%rsp) vmovaps 0x1350(%rsp), %xmm2 vmovaps 0x13b0(%rsp), %xmm1 vmovaps %xmm2, 0x17a0(%rsp) vmovaps %xmm1, 0x1790(%rsp) vmovaps 0x13f9743(%rip), %xmm1 # 0x1e02b60 vmovaps %xmm1, 0x1780(%rsp) vmovaps 0x17a0(%rsp), %xmm2 vmovaps 0x1790(%rsp), %xmm1 vmovaps %xmm2, 0x17e0(%rsp) vmovaps %xmm1, 0x17d0(%rsp) vmovaps 0x17e0(%rsp), %xmm1 vmovaps 0x17d0(%rsp), %xmm2 vmulps %xmm2, %xmm1, %xmm2 vmovaps 0x1780(%rsp), %xmm1 vmovaps %xmm2, 0x17c0(%rsp) vmovaps %xmm1, 0x17b0(%rsp) vmovaps 0x17c0(%rsp), %xmm1 vmovaps 0x17b0(%rsp), %xmm2 vaddps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x1350(%rsp) vmovaps 0x1350(%rsp), %xmm2 vmovaps 0x13b0(%rsp), %xmm1 vmovaps %xmm2, 0x1730(%rsp) vmovaps %xmm1, 0x1720(%rsp) vmovaps %xmm0, 0x1710(%rsp) vmovaps 0x1730(%rsp), %xmm1 vmovaps 0x1720(%rsp), %xmm0 vmovaps %xmm1, 0x1770(%rsp) vmovaps %xmm0, 0x1760(%rsp) vmovaps 0x1770(%rsp), %xmm0 vmovaps 0x1760(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm1 vmovaps 0x1710(%rsp), %xmm0 vmovaps %xmm1, 0x1750(%rsp) vmovaps %xmm0, 0x1740(%rsp) vmovaps 0x1750(%rsp), %xmm0 vmovaps 0x1740(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1350(%rsp) vmovaps 0x1350(%rsp), %xmm2 vmovaps 0x13a0(%rsp), %xmm1 vmovaps 0x13b0(%rsp), %xmm0 vmovaps %xmm2, 0x16c0(%rsp) vmovaps %xmm1, 0x16b0(%rsp) vmovaps %xmm0, 0x16a0(%rsp) vmovaps 0x16c0(%rsp), %xmm1 vmovaps 0x16b0(%rsp), %xmm0 vmovaps %xmm1, 0x1700(%rsp) vmovaps %xmm0, 0x16f0(%rsp) vmovaps 0x1700(%rsp), %xmm0 vmovaps 0x16f0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm1 vmovaps 0x16a0(%rsp), %xmm0 vmovaps %xmm1, 0x16e0(%rsp) vmovaps %xmm0, 0x16d0(%rsp) vmovaps 0x16e0(%rsp), %xmm0 vmovaps 0x16d0(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1350(%rsp) vmovaps 0x1350(%rsp), %xmm1 vmovaps 0x1370(%rsp), %xmm0 vmovaps %xmm1, 0x1480(%rsp) vmovaps %xmm0, 0x1470(%rsp) vmovaps 0x1480(%rsp), %xmm0 vmovaps 0x1470(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1350(%rsp) vmovaps 0x1390(%rsp), %xmm0 vmovaps %xmm0, 0x1550(%rsp) vcvttps2dq 0x1550(%rsp), %xmm0 vmovdqa %xmm0, 0x1380(%rsp) vmovdqa 0x1380(%rsp), %xmm0 vmovdqa %xmm0, 0x13d0(%rsp) vmovdqa 0x13f950a(%rip), %xmm0 # 0x1e02b70 vmovdqa %xmm0, 0x13c0(%rsp) vmovdqa 0x13d0(%rsp), %xmm0 vmovdqa 0x13c0(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x1380(%rsp) vmovdqa 0x1380(%rsp), %xmm0 vmovdqa %xmm0, 0x13f0(%rsp) movl $0x17, 0x13ec(%rsp) vmovdqa 0x13f0(%rsp), %xmm0 vmovd 0x13ec(%rsp), %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x1380(%rsp) vmovdqa 0x1380(%rsp), %xmm0 vmovdqa %xmm0, 0x1940(%rsp) vmovdqa 0x1940(%rsp), %xmm0 vmovdqa %xmm0, 0x1340(%rsp) vmovaps 0x1350(%rsp), %xmm1 vmovaps 0x1340(%rsp), %xmm0 vmovaps %xmm1, 0x14e0(%rsp) vmovaps %xmm0, 0x14d0(%rsp) vmovaps 0x14e0(%rsp), %xmm0 vmovaps 0x14d0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1350(%rsp) vmovaps 0x1350(%rsp), %xmm1 movl $0x3f800000, 0x2ec(%rsp) # imm = 0x3F800000 vbroadcastss 0x2ec(%rsp), %xmm0 vmovaps %xmm0, 0x2d0(%rsp) vmovaps 0x2d0(%rsp), %xmm0 vmovaps %xmm1, 0x1330(%rsp) vmovaps %xmm0, 0x1320(%rsp) vmovaps 0x1330(%rsp), %xmm0 vsubps 0x1320(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x290(%rsp) vmovaps 0x2a0(%rsp), %xmm1 vmovaps 0x2b0(%rsp), %xmm2 vmovaps 0x290(%rsp), %xmm0 vmovaps %xmm2, 0x1540(%rsp) vmovaps %xmm0, 0x1530(%rsp) vmovaps 0x1540(%rsp), %xmm0 vmulps 0x1530(%rsp), %xmm0, %xmm0 vmovaps %xmm1, 0x14c0(%rsp) vmovaps %xmm0, 0x14b0(%rsp) vmovaps 0x14c0(%rsp), %xmm0 vaddps 0x14b0(%rsp), %xmm0, %xmm0 movq %rax, 0x288(%rsp) vmovaps %xmm0, 0x270(%rsp) vmovaps 0x270(%rsp), %xmm0 movq 0x288(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x100(%rsp), %rax addq $0x10, %rax movq %rax, 0x100(%rsp) movl 0xa8(%rsp), %eax addl $0x4, %eax movl %eax, 0xa8(%rsp) jmp 0xa08d06 jmp 0xa09843 movl 0xa8(%rsp), %eax cmpl 0x10c(%rsp), %eax jge 0xa098e8 movq 0x100(%rsp), %rax vxorps %xmm0, %xmm0, %xmm0 vucomiss (%rax), %xmm0 jbe 0xa098be movq 0x38(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx vmovss 0xd0(%rax,%rcx), %xmm0 vmovss %xmm0, 0x4(%rsp) movq 0x100(%rsp), %rax vmovss (%rax), %xmm0 vzeroupper callq 0xf74e0 vmovaps %xmm0, %xmm1 vmovss 0x4(%rsp), %xmm0 vmovss 0x13f6a26(%rip), %xmm2 # 0x1e002d0 vsubss %xmm2, %xmm1, %xmm1 vmulss %xmm1, %xmm0, %xmm0 movq 0x100(%rsp), %rax vmovss %xmm0, (%rax) movq 0x100(%rsp), %rax addq $0x4, %rax movq %rax, 0x100(%rsp) movl 0xa8(%rsp), %eax addl $0x1, %eax movl %eax, 0xa8(%rsp) jmp 0xa09843 jmp 0xa098ea movl 0x108(%rsp), %eax addl $0x1, %eax movl %eax, 0x108(%rsp) jmp 0xa079b5 xorl %eax, %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw (%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/elu_x86_avx.cpp
int ncnn::binary_op_scalar_inplace<ncnn::BinaryOp_x86_fma_functor::binary_op_rsub>(ncnn::Mat&, float, ncnn::Option const&)
static int binary_op_scalar_inplace(Mat& a, float b, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { float* ptr = a.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ __m512 _b_avx512 = _mm512_set1_ps(b); for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); _p = op.func_pack16(_p, _b_avx512); _mm512_storeu_ps(ptr, _p); ptr += 16; } #endif // __AVX512F__ __m256 _b_avx = _mm256_set1_ps(b); for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); _p = op.func_pack8(_p, _b_avx); _mm256_storeu_ps(ptr, _p); ptr += 8; } #endif // __AVX__ __m128 _b = _mm_set1_ps((float)b); for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); _p = op.func_pack4(_p, _b); _mm_store_ps(ptr, _p); ptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *ptr = op.func(*ptr, b); ptr++; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x2e0, %rsp # imm = 0x2E0 movq %rdi, 0x140(%rsp) vmovss %xmm0, 0x13c(%rsp) movq %rsi, 0x130(%rsp) movq 0x140(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x128(%rsp) movq 0x140(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x124(%rsp) movq 0x140(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x120(%rsp) movq 0x140(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x11c(%rsp) movq 0x140(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x118(%rsp) movl 0x128(%rsp), %eax imull 0x124(%rsp), %eax imull 0x120(%rsp), %eax imull 0x118(%rsp), %eax movl %eax, 0x114(%rsp) movl $0x0, 0x110(%rsp) movl 0x110(%rsp), %eax cmpl 0x11c(%rsp), %eax jge 0x152872e movq 0x140(%rsp), %rcx movl 0x110(%rsp), %eax leaq 0xc0(%rsp), %rdx movq %rdx, 0x160(%rsp) movq %rcx, 0x158(%rsp) movl %eax, 0x154(%rsp) movq 0x158(%rsp), %rax movq %rax, 0x38(%rsp) movb $0x0, 0x153(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x154(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xc0(%rsp), %r10 movq %r10, 0x210(%rsp) movl %r9d, 0x20c(%rsp) movl %r8d, 0x208(%rsp) movl %edi, 0x204(%rsp) movq %rsi, 0x1f8(%rsp) movq %rdx, 0x1f0(%rsp) movl %ecx, 0x1ec(%rsp) movq %rax, 0x1e0(%rsp) movq 0x210(%rsp), %rcx movq %rcx, 0x30(%rsp) movq 0x1f8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x1f0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x1ec(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x1e0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x20c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x208(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x204(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x220(%rsp) movl $0x10, 0x21c(%rsp) movq 0x220(%rsp), %rax movslq 0x21c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x21c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x38(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xe8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1528197 movq 0x38(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x100(%rsp) movb $0x1, 0x153(%rsp) testb $0x1, 0x153(%rsp) jne 0x15282c6 leaq 0xc0(%rsp), %rax movq %rax, 0x168(%rsp) movq 0x168(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x152826c movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x22c(%rsp) # imm = 0xFFFFFFFF movl 0x22c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x228(%rsp) cmpl $0x1, 0x228(%rsp) jne 0x152826c movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152823d movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152823b jmp 0x152826a movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0x250(%rsp) cmpq $0x0, 0x250(%rsp) je 0x1528268 movq 0x250(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152826a jmp 0x152826c movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x15282c4 movq %rax, %rdi callq 0x5fc90 jmp 0x15282c6 leaq 0xc0(%rsp), %rax movq %rax, 0x170(%rsp) movq 0x170(%rsp), %rax movq (%rax), %rax movq %rax, 0x18(%rsp) leaq 0xc0(%rsp), %rax movq %rax, 0x148(%rsp) movq 0x148(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x15283a5 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x23c(%rsp) # imm = 0xFFFFFFFF movl 0x23c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x238(%rsp) cmpl $0x1, 0x238(%rsp) jne 0x15283a5 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1528376 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1528374 jmp 0x15283a3 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x248(%rsp) cmpq $0x0, 0x248(%rsp) je 0x15283a1 movq 0x248(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x15283a3 jmp 0x15283a5 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x15283fd movq %rax, %rdi callq 0x5fc90 movq 0x18(%rsp), %rax movq %rax, 0x108(%rsp) movl $0x0, 0xbc(%rsp) vmovss 0x13c(%rsp), %xmm0 vmovss %xmm0, 0x25c(%rsp) vmovss 0x25c(%rsp), %xmm0 vmovss %xmm0, 0x14(%rsp) vmovss %xmm0, 0x2cc(%rsp) vmovss %xmm0, 0x2c8(%rsp) vmovss %xmm0, 0x2c4(%rsp) vmovss %xmm0, 0x2c0(%rsp) vmovss %xmm0, 0x2bc(%rsp) vmovss %xmm0, 0x2b8(%rsp) vmovss %xmm0, 0x2b4(%rsp) vmovss %xmm0, 0x2b0(%rsp) vmovss 0x2c4(%rsp), %xmm1 vmovss 0x2c0(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x2c8(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x2cc(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0x2b4(%rsp), %xmm2 vmovss 0x2b0(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x2b8(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x2bc(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm0 vmovaps %ymm0, 0x80(%rsp) movl 0xbc(%rsp), %eax addl $0x7, %eax cmpl 0x114(%rsp), %eax jge 0x15285c6 movq 0x108(%rsp), %rax movq %rax, 0x178(%rsp) movq 0x178(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x60(%rsp) leaq 0x12f(%rsp), %rdi leaq 0x60(%rsp), %rsi leaq 0x80(%rsp), %rdx callq 0x153f1f0 vmovaps %ymm0, 0x60(%rsp) movq 0x108(%rsp), %rax vmovaps 0x60(%rsp), %ymm0 movq %rax, 0x1b0(%rsp) vmovaps %ymm0, 0x180(%rsp) vmovaps 0x180(%rsp), %ymm0 movq 0x1b0(%rsp), %rax vmovups %ymm0, (%rax) movq 0x108(%rsp), %rax addq $0x20, %rax movq %rax, 0x108(%rsp) movl 0xbc(%rsp), %eax addl $0x8, %eax movl %eax, 0xbc(%rsp) jmp 0x152850f vmovss 0x13c(%rsp), %xmm0 vmovss %xmm0, 0x27c(%rsp) vbroadcastss 0x27c(%rsp), %xmm0 vmovaps %xmm0, 0x260(%rsp) vmovaps 0x260(%rsp), %xmm0 vmovaps %xmm0, 0x50(%rsp) movl 0xbc(%rsp), %eax addl $0x3, %eax cmpl 0x114(%rsp), %eax jge 0x15286b1 movq 0x108(%rsp), %rax movq %rax, 0x1b8(%rsp) movq 0x1b8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x40(%rsp) leaq 0x12f(%rsp), %rdi leaq 0x40(%rsp), %rsi leaq 0x50(%rsp), %rdx vzeroupper callq 0x153f240 vmovaps %xmm0, 0x40(%rsp) movq 0x108(%rsp), %rax vmovaps 0x40(%rsp), %xmm0 movq %rax, 0x1d8(%rsp) vmovaps %xmm0, 0x1c0(%rsp) vmovaps 0x1c0(%rsp), %xmm0 movq 0x1d8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x108(%rsp), %rax addq $0x10, %rax movq %rax, 0x108(%rsp) movl 0xbc(%rsp), %eax addl $0x4, %eax movl %eax, 0xbc(%rsp) jmp 0x15285fa jmp 0x15286b3 movl 0xbc(%rsp), %eax cmpl 0x114(%rsp), %eax jge 0x1528716 movq 0x108(%rsp), %rsi leaq 0x12f(%rsp), %rdi leaq 0x13c(%rsp), %rdx vzeroupper callq 0x153f280 movq 0x108(%rsp), %rax vmovss %xmm0, (%rax) movq 0x108(%rsp), %rax addq $0x4, %rax movq %rax, 0x108(%rsp) movl 0xbc(%rsp), %eax addl $0x1, %eax movl %eax, 0xbc(%rsp) jmp 0x15286b3 jmp 0x1528718 movl 0x110(%rsp), %eax addl $0x1, %eax movl %eax, 0x110(%rsp) jmp 0x1527fd3 xorl %eax, %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopl (%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_fma.cpp
virtual thunk to ncnn::BinaryOp_x86_fma::forward_inplace(ncnn::Mat&, ncnn::Option const&) const
int BinaryOp_x86_fma::forward_inplace(Mat& bottom_top_blob, const Option& opt) const { using namespace BinaryOp_x86_fma_functor; if (op_type == Operation_ADD) return binary_op_scalar_inplace<binary_op_add>(bottom_top_blob, b, opt); if (op_type == Operation_SUB) return binary_op_scalar_inplace<binary_op_sub>(bottom_top_blob, b, opt); if (op_type == Operation_MUL) return binary_op_scalar_inplace<binary_op_mul>(bottom_top_blob, b, opt); if (op_type == Operation_DIV) return binary_op_scalar_inplace<binary_op_div>(bottom_top_blob, b, opt); if (op_type == Operation_MAX) return binary_op_scalar_inplace<binary_op_max>(bottom_top_blob, b, opt); if (op_type == Operation_MIN) return binary_op_scalar_inplace<binary_op_min>(bottom_top_blob, b, opt); if (op_type == Operation_POW) return binary_op_scalar_inplace<binary_op_pow>(bottom_top_blob, b, opt); if (op_type == Operation_RSUB) return binary_op_scalar_inplace<binary_op_rsub>(bottom_top_blob, b, opt); if (op_type == Operation_RDIV) return binary_op_scalar_inplace<binary_op_rdiv>(bottom_top_blob, b, opt); return 0; }
movq %rdi, -0x8(%rsp) movq %rsi, -0x10(%rsp) movq %rdx, -0x18(%rsp) movq -0x8(%rsp), %rdi movq (%rdi), %rax movq -0x58(%rax), %rax addq %rax, %rdi movq -0x10(%rsp), %rsi movq -0x18(%rsp), %rdx jmp 0x15243d0 nopl (%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_fma.cpp
int ncnn::binary_op_7_13_19_29<ncnn::BinaryOp_x86_fma_functor::binary_op_add>(ncnn::Mat const&, ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static int binary_op_7_13_19_29(const Mat& a, const Mat& b, Mat& c, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; // type 7 13 19 29 c.create_like(a, opt.blob_allocator); if (c.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float* ptr = a.channel(q); const float* ptr1 = b.channel(q); float* outptr = c.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); __m512 _p1 = _mm512_loadu_ps(ptr1); __m512 _outp = op.func_pack16(_p, _p1); _mm512_storeu_ps(outptr, _outp); ptr += 16; ptr1 += 16; outptr += 16; } #endif // __AVX512F__ for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); __m256 _p1 = _mm256_loadu_ps(ptr1); __m256 _outp = op.func_pack8(_p, _p1); _mm256_storeu_ps(outptr, _outp); ptr += 8; ptr1 += 8; outptr += 8; } #endif // __AVX__ for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); __m128 _p1 = _mm_load_ps(ptr1); __m128 _outp = op.func_pack4(_p, _p1); _mm_store_ps(outptr, _outp); ptr += 4; ptr1 += 4; outptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *outptr = op.func(*ptr, *ptr1); ptr += 1; ptr1 += 1; outptr += 1; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x560, %rsp # imm = 0x560 movq %rdi, 0x270(%rsp) movq %rsi, 0x268(%rsp) movq %rdx, 0x260(%rsp) movq %rcx, 0x258(%rsp) movq 0x270(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x250(%rsp) movq 0x270(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x24c(%rsp) movq 0x270(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x248(%rsp) movq 0x270(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x244(%rsp) movq 0x270(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x240(%rsp) movl 0x250(%rsp), %eax imull 0x24c(%rsp), %eax imull 0x248(%rsp), %eax imull 0x240(%rsp), %eax movl %eax, 0x23c(%rsp) movq 0x260(%rsp), %rdi movq 0x270(%rsp), %rsi movq 0x258(%rsp), %rax movq 0x8(%rax), %rdx callq 0x65b60 movq 0x260(%rsp), %rax movq %rax, 0x280(%rsp) movq 0x280(%rsp), %rcx movq %rcx, 0x80(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x8f(%rsp) je 0x15290c1 movq 0x80(%rsp), %rax movq %rax, 0x398(%rsp) movq 0x398(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x8f(%rsp) movb 0x8f(%rsp), %al testb $0x1, %al jne 0x15290ce jmp 0x15290de movl $0xffffff9c, 0x27c(%rsp) # imm = 0xFFFFFF9C jmp 0x152a049 movl $0x0, 0x238(%rsp) movl 0x238(%rsp), %eax cmpl 0x244(%rsp), %eax jge 0x152a03e movq 0x270(%rsp), %rcx movl 0x238(%rsp), %eax leaq 0x1e8(%rsp), %rdx movq %rdx, 0x2b0(%rsp) movq %rcx, 0x2a8(%rsp) movl %eax, 0x2a4(%rsp) movq 0x2a8(%rsp), %rax movq %rax, 0x78(%rsp) movb $0x0, 0x2a3(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x2a4(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x1e8(%rsp), %r10 movq %r10, 0x408(%rsp) movl %r9d, 0x404(%rsp) movl %r8d, 0x400(%rsp) movl %edi, 0x3fc(%rsp) movq %rsi, 0x3f0(%rsp) movq %rdx, 0x3e8(%rsp) movl %ecx, 0x3e4(%rsp) movq %rax, 0x3d8(%rsp) movq 0x408(%rsp), %rcx movq %rcx, 0x70(%rsp) movq 0x3f0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x3e8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x3e4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3d8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x404(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x400(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3fc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x460(%rsp) movl $0x10, 0x45c(%rsp) movq 0x460(%rsp), %rax movslq 0x45c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x45c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x78(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x210(%rsp) cmpl $0x4, 0x28(%rax) jne 0x15292ad movq 0x78(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x228(%rsp) movb $0x1, 0x2a3(%rsp) testb $0x1, 0x2a3(%rsp) jne 0x15293dc leaq 0x1e8(%rsp), %rax movq %rax, 0x2c8(%rsp) movq 0x2c8(%rsp), %rax movq %rax, 0x500(%rsp) movq 0x500(%rsp), %rax movq %rax, 0x68(%rsp) cmpq $0x0, 0x8(%rax) je 0x1529382 movq 0x68(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4fc(%rsp) # imm = 0xFFFFFFFF movl 0x4fc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4f8(%rsp) cmpl $0x1, 0x4f8(%rsp) jne 0x1529382 movq 0x68(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1529353 movq 0x68(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1529351 jmp 0x1529380 movq 0x68(%rsp), %rax movq (%rax), %rax movq %rax, 0x508(%rsp) cmpq $0x0, 0x508(%rsp) je 0x152937e movq 0x508(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1529380 jmp 0x1529382 movq 0x68(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x15293da movq %rax, %rdi callq 0x5fc90 jmp 0x15293dc leaq 0x1e8(%rsp), %rax movq %rax, 0x2c0(%rsp) movq 0x2c0(%rsp), %rax movq (%rax), %rax movq %rax, 0x60(%rsp) leaq 0x1e8(%rsp), %rax movq %rax, 0x2d8(%rsp) movq 0x2d8(%rsp), %rax movq %rax, 0x4e0(%rsp) movq 0x4e0(%rsp), %rax movq %rax, 0x58(%rsp) cmpq $0x0, 0x8(%rax) je 0x15294bb movq 0x58(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4dc(%rsp) # imm = 0xFFFFFFFF movl 0x4dc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4d8(%rsp) cmpl $0x1, 0x4d8(%rsp) jne 0x15294bb movq 0x58(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152948c movq 0x58(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152948a jmp 0x15294b9 movq 0x58(%rsp), %rax movq (%rax), %rax movq %rax, 0x518(%rsp) cmpq $0x0, 0x518(%rsp) je 0x15294b7 movq 0x518(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x15294b9 jmp 0x15294bb movq 0x58(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1529513 movq %rax, %rdi callq 0x5fc90 movq 0x60(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x268(%rsp), %rcx movl 0x238(%rsp), %eax leaq 0x188(%rsp), %rdx movq %rdx, 0x298(%rsp) movq %rcx, 0x290(%rsp) movl %eax, 0x28c(%rsp) movq 0x290(%rsp), %rax movq %rax, 0x50(%rsp) movb $0x0, 0x28b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x28c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x188(%rsp), %r10 movq %r10, 0x440(%rsp) movl %r9d, 0x43c(%rsp) movl %r8d, 0x438(%rsp) movl %edi, 0x434(%rsp) movq %rsi, 0x428(%rsp) movq %rdx, 0x420(%rsp) movl %ecx, 0x41c(%rsp) movq %rax, 0x410(%rsp) movq 0x440(%rsp), %rcx movq %rcx, 0x48(%rsp) movq 0x428(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x420(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x41c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x410(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x43c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x438(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x434(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x450(%rsp) movl $0x10, 0x44c(%rsp) movq 0x450(%rsp), %rax movslq 0x44c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x44c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x50(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x1b0(%rsp) cmpl $0x4, 0x28(%rax) jne 0x15296d0 movq 0x50(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x1c8(%rsp) movb $0x1, 0x28b(%rsp) testb $0x1, 0x28b(%rsp) jne 0x15297ff leaq 0x188(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rax movq %rax, 0x4f0(%rsp) movq 0x4f0(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x15297a5 movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4ec(%rsp) # imm = 0xFFFFFFFF movl 0x4ec(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4e8(%rsp) cmpl $0x1, 0x4e8(%rsp) jne 0x15297a5 movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1529776 movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1529774 jmp 0x15297a3 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x510(%rsp) cmpq $0x0, 0x510(%rsp) je 0x15297a1 movq 0x510(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x15297a3 jmp 0x15297a5 movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x15297fd movq %rax, %rdi callq 0x5fc90 jmp 0x15297ff leaq 0x188(%rsp), %rax movq %rax, 0x2b8(%rsp) movq 0x2b8(%rsp), %rax movq (%rax), %rax movq %rax, 0x38(%rsp) leaq 0x188(%rsp), %rax movq %rax, 0x2e8(%rsp) movq 0x2e8(%rsp), %rax movq %rax, 0x4c0(%rsp) movq 0x4c0(%rsp), %rax movq %rax, 0x30(%rsp) cmpq $0x0, 0x8(%rax) je 0x15298de movq 0x30(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4bc(%rsp) # imm = 0xFFFFFFFF movl 0x4bc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4b8(%rsp) cmpl $0x1, 0x4b8(%rsp) jne 0x15298de movq 0x30(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x15298af movq 0x30(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x15298ad jmp 0x15298dc movq 0x30(%rsp), %rax movq (%rax), %rax movq %rax, 0x528(%rsp) cmpq $0x0, 0x528(%rsp) je 0x15298da movq 0x528(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x15298dc jmp 0x15298de movq 0x30(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1529936 movq %rax, %rdi callq 0x5fc90 movq 0x38(%rsp), %rax movq %rax, 0x1d0(%rsp) movq 0x260(%rsp), %rcx movl 0x238(%rsp), %eax leaq 0x138(%rsp), %rdx movq %rdx, 0x318(%rsp) movq %rcx, 0x310(%rsp) movl %eax, 0x30c(%rsp) movq 0x310(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0x30b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x30c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x138(%rsp), %r10 movq %r10, 0x3d0(%rsp) movl %r9d, 0x3cc(%rsp) movl %r8d, 0x3c8(%rsp) movl %edi, 0x3c4(%rsp) movq %rsi, 0x3b8(%rsp) movq %rdx, 0x3b0(%rsp) movl %ecx, 0x3ac(%rsp) movq %rax, 0x3a0(%rsp) movq 0x3d0(%rsp), %rcx movq %rcx, 0x20(%rsp) movq 0x3b8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x3b0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x3ac(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3a0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3cc(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3c8(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3c4(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x470(%rsp) movl $0x10, 0x46c(%rsp) movq 0x470(%rsp), %rax movslq 0x46c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x46c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x28(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x160(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1529af3 movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x178(%rsp) movb $0x1, 0x30b(%rsp) testb $0x1, 0x30b(%rsp) jne 0x1529c22 leaq 0x138(%rsp), %rax movq %rax, 0x320(%rsp) movq 0x320(%rsp), %rax movq %rax, 0x480(%rsp) movq 0x480(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x1529bc8 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x47c(%rsp) # imm = 0xFFFFFFFF movl 0x47c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x478(%rsp) cmpl $0x1, 0x478(%rsp) jne 0x1529bc8 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1529b99 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1529b97 jmp 0x1529bc6 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x548(%rsp) cmpq $0x0, 0x548(%rsp) je 0x1529bc4 movq 0x548(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1529bc6 jmp 0x1529bc8 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1529c20 movq %rax, %rdi callq 0x5fc90 jmp 0x1529c22 leaq 0x138(%rsp), %rax movq %rax, 0x328(%rsp) movq 0x328(%rsp), %rax movq (%rax), %rax movq %rax, 0x10(%rsp) leaq 0x138(%rsp), %rax movq %rax, 0x2f8(%rsp) movq 0x2f8(%rsp), %rax movq %rax, 0x4a0(%rsp) movq 0x4a0(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x1529d01 movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x49c(%rsp) # imm = 0xFFFFFFFF movl 0x49c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x498(%rsp) cmpl $0x1, 0x498(%rsp) jne 0x1529d01 movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1529cd2 movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1529cd0 jmp 0x1529cff movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x538(%rsp) cmpq $0x0, 0x538(%rsp) je 0x1529cfd movq 0x538(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1529cff jmp 0x1529d01 movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1529d59 movq %rax, %rdi callq 0x5fc90 movq 0x10(%rsp), %rax movq %rax, 0x180(%rsp) movl $0x0, 0x134(%rsp) movl 0x134(%rsp), %eax addl $0x7, %eax cmpl 0x23c(%rsp), %eax jge 0x1529e81 movq 0x230(%rsp), %rax movq %rax, 0x338(%rsp) movq 0x338(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x100(%rsp) movq 0x1d0(%rsp), %rax movq %rax, 0x330(%rsp) movq 0x330(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0xe0(%rsp) leaq 0x257(%rsp), %rdi leaq 0x100(%rsp), %rsi leaq 0xe0(%rsp), %rdx callq 0x153c240 vmovaps %ymm0, 0xc0(%rsp) movq 0x180(%rsp), %rax vmovaps 0xc0(%rsp), %ymm0 movq %rax, 0x368(%rsp) vmovaps %ymm0, 0x340(%rsp) vmovaps 0x340(%rsp), %ymm0 movq 0x368(%rsp), %rax vmovups %ymm0, (%rax) movq 0x230(%rsp), %rax addq $0x20, %rax movq %rax, 0x230(%rsp) movq 0x1d0(%rsp), %rax addq $0x20, %rax movq %rax, 0x1d0(%rsp) movq 0x180(%rsp), %rax addq $0x20, %rax movq %rax, 0x180(%rsp) movl 0x134(%rsp), %eax addl $0x8, %eax movl %eax, 0x134(%rsp) jmp 0x1529d71 jmp 0x1529e83 movl 0x134(%rsp), %eax addl $0x3, %eax cmpl 0x23c(%rsp), %eax jge 0x1529f96 movq 0x230(%rsp), %rax movq %rax, 0x378(%rsp) movq 0x378(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xb0(%rsp) movq 0x1d0(%rsp), %rax movq %rax, 0x370(%rsp) movq 0x370(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xa0(%rsp) leaq 0x257(%rsp), %rdi leaq 0xb0(%rsp), %rsi leaq 0xa0(%rsp), %rdx vzeroupper callq 0x153c290 vmovaps %xmm0, 0x90(%rsp) movq 0x180(%rsp), %rax vmovaps 0x90(%rsp), %xmm0 movq %rax, 0x390(%rsp) vmovaps %xmm0, 0x380(%rsp) vmovaps 0x380(%rsp), %xmm0 movq 0x390(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x230(%rsp), %rax addq $0x10, %rax movq %rax, 0x230(%rsp) movq 0x1d0(%rsp), %rax addq $0x10, %rax movq %rax, 0x1d0(%rsp) movq 0x180(%rsp), %rax addq $0x10, %rax movq %rax, 0x180(%rsp) movl 0x134(%rsp), %eax addl $0x4, %eax movl %eax, 0x134(%rsp) jmp 0x1529e83 jmp 0x1529f98 movl 0x134(%rsp), %eax cmpl 0x23c(%rsp), %eax jge 0x152a026 movq 0x230(%rsp), %rsi movq 0x1d0(%rsp), %rdx leaq 0x257(%rsp), %rdi vzeroupper callq 0x153c2d0 movq 0x180(%rsp), %rax vmovss %xmm0, (%rax) movq 0x230(%rsp), %rax addq $0x4, %rax movq %rax, 0x230(%rsp) movq 0x1d0(%rsp), %rax addq $0x4, %rax movq %rax, 0x1d0(%rsp) movq 0x180(%rsp), %rax addq $0x4, %rax movq %rax, 0x180(%rsp) movl 0x134(%rsp), %eax addl $0x1, %eax movl %eax, 0x134(%rsp) jmp 0x1529f98 jmp 0x152a028 movl 0x238(%rsp), %eax addl $0x1, %eax movl %eax, 0x238(%rsp) jmp 0x15290e9 movl $0x0, 0x27c(%rsp) movl 0x27c(%rsp), %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopl (%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_fma.cpp
int ncnn::binary_op_6_11_16_25<ncnn::BinaryOp_x86_fma_functor::binary_op_add>(ncnn::Mat const&, ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static int binary_op_6_11_16_25(const Mat& a, const Mat& b, Mat& c, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; // type 6 11 16 25 c.create_like(a, opt.blob_allocator); if (c.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float* ptr = a.channel(q); const float b0 = b[0]; float* outptr = c.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ __m512 _b0_avx512 = _mm512_set1_ps(b0); for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); __m512 _outp = op.func_pack16(_p, _b0_avx512); _mm512_storeu_ps(outptr, _outp); ptr += 16; outptr += 16; } #endif // __AVX512F__ __m256 _b0_avx = _mm256_set1_ps(b0); for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); __m256 _outp = op.func_pack8(_p, _b0_avx); _mm256_storeu_ps(outptr, _outp); ptr += 8; outptr += 8; } #endif // __AVX__ __m128 _b0 = _mm_set1_ps(b0); for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); __m128 _outp = op.func_pack4(_p, _b0); _mm_store_ps(outptr, _outp); ptr += 4; outptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *outptr = op.func(*ptr, b0); ptr += 1; outptr += 1; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x460, %rsp # imm = 0x460 movq %rdi, 0x200(%rsp) movq %rsi, 0x1f8(%rsp) movq %rdx, 0x1f0(%rsp) movq %rcx, 0x1e8(%rsp) movq 0x200(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1e0(%rsp) movq 0x200(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x1dc(%rsp) movq 0x200(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x1d8(%rsp) movq 0x200(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x1d4(%rsp) movq 0x200(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x1d0(%rsp) movl 0x1e0(%rsp), %eax imull 0x1dc(%rsp), %eax imull 0x1d8(%rsp), %eax imull 0x1d0(%rsp), %eax movl %eax, 0x1cc(%rsp) movq 0x1f0(%rsp), %rdi movq 0x200(%rsp), %rsi movq 0x1e8(%rsp), %rax movq 0x8(%rax), %rdx callq 0x65b60 movq 0x1f0(%rsp), %rax movq %rax, 0x210(%rsp) movq 0x210(%rsp), %rcx movq %rcx, 0x60(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x6f(%rsp) je 0x152a185 movq 0x60(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x6f(%rsp) movb 0x6f(%rsp), %al testb $0x1, %al jne 0x152a18f jmp 0x152a19f movl $0xffffff9c, 0x20c(%rsp) # imm = 0xFFFFFF9C jmp 0x152adc4 movl $0x0, 0x1c8(%rsp) movl 0x1c8(%rsp), %eax cmpl 0x1d4(%rsp), %eax jge 0x152adb9 movq 0x200(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x178(%rsp), %rdx movq %rdx, 0x228(%rsp) movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x58(%rsp) movb $0x0, 0x21b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x21c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x178(%rsp), %r10 movq %r10, 0x340(%rsp) movl %r9d, 0x33c(%rsp) movl %r8d, 0x338(%rsp) movl %edi, 0x334(%rsp) movq %rsi, 0x328(%rsp) movq %rdx, 0x320(%rsp) movl %ecx, 0x31c(%rsp) movq %rax, 0x310(%rsp) movq 0x340(%rsp), %rcx movq %rcx, 0x50(%rsp) movq 0x328(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x320(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x31c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x310(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x33c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x338(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x334(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x350(%rsp) movl $0x10, 0x34c(%rsp) movq 0x350(%rsp), %rax movslq 0x34c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x34c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x58(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x1a0(%rsp) cmpl $0x4, 0x28(%rax) jne 0x152a36e movq 0x58(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x1b8(%rsp) movb $0x1, 0x21b(%rsp) testb $0x1, 0x21b(%rsp) jne 0x152a49d leaq 0x178(%rsp), %rax movq %rax, 0x238(%rsp) movq 0x238(%rsp), %rax movq %rax, 0x3a0(%rsp) movq 0x3a0(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x152a443 movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x39c(%rsp) # imm = 0xFFFFFFFF movl 0x39c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x398(%rsp) cmpl $0x1, 0x398(%rsp) jne 0x152a443 movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152a414 movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152a412 jmp 0x152a441 movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x3a8(%rsp) cmpq $0x0, 0x3a8(%rsp) je 0x152a43f movq 0x3a8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152a441 jmp 0x152a443 movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152a49b movq %rax, %rdi callq 0x5fc90 jmp 0x152a49d leaq 0x178(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rax movq (%rax), %rax movq %rax, 0x38(%rsp) leaq 0x178(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq %rax, 0x390(%rsp) movq 0x390(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x152a57c movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x38c(%rsp) # imm = 0xFFFFFFFF movl 0x38c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x388(%rsp) cmpl $0x1, 0x388(%rsp) jne 0x152a57c movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152a54d movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152a54b jmp 0x152a57a movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b0(%rsp) cmpq $0x0, 0x3b0(%rsp) je 0x152a578 movq 0x3b0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152a57a jmp 0x152a57c movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152a5d4 movq %rax, %rdi callq 0x5fc90 movq 0x38(%rsp), %rax movq %rax, 0x1c0(%rsp) movq 0x1f8(%rsp), %rax movq %rax, 0x3d0(%rsp) movq $0x0, 0x3c8(%rsp) movq 0x3d0(%rsp), %rax movq (%rax), %rax movq 0x3c8(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x174(%rsp) movq 0x1f0(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x120(%rsp), %rdx movq %rdx, 0x260(%rsp) movq %rcx, 0x258(%rsp) movl %eax, 0x254(%rsp) movq 0x258(%rsp), %rax movq %rax, 0x30(%rsp) movb $0x0, 0x253(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x254(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x120(%rsp), %r10 movq %r10, 0x308(%rsp) movl %r9d, 0x304(%rsp) movl %r8d, 0x300(%rsp) movl %edi, 0x2fc(%rsp) movq %rsi, 0x2f0(%rsp) movq %rdx, 0x2e8(%rsp) movl %ecx, 0x2e4(%rsp) movq %rax, 0x2d8(%rsp) movq 0x308(%rsp), %rcx movq %rcx, 0x28(%rsp) movq 0x2f0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2e8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2e4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2d8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x304(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x300(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2fc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x360(%rsp) movl $0x10, 0x35c(%rsp) movq 0x360(%rsp), %rax movslq 0x35c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x35c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x30(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x148(%rsp) cmpl $0x4, 0x28(%rax) jne 0x152a7ce movq 0x30(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x160(%rsp) movb $0x1, 0x253(%rsp) testb $0x1, 0x253(%rsp) jne 0x152a8fd leaq 0x120(%rsp), %rax movq %rax, 0x268(%rsp) movq 0x268(%rsp), %rax movq %rax, 0x370(%rsp) movq 0x370(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x152a8a3 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x36c(%rsp) # imm = 0xFFFFFFFF movl 0x36c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x368(%rsp) cmpl $0x1, 0x368(%rsp) jne 0x152a8a3 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152a874 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152a872 jmp 0x152a8a1 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x3c0(%rsp) cmpq $0x0, 0x3c0(%rsp) je 0x152a89f movq 0x3c0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152a8a1 jmp 0x152a8a3 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152a8fb movq %rax, %rdi callq 0x5fc90 jmp 0x152a8fd leaq 0x120(%rsp), %rax movq %rax, 0x270(%rsp) movq 0x270(%rsp), %rax movq (%rax), %rax movq %rax, 0x10(%rsp) leaq 0x120(%rsp), %rax movq %rax, 0x248(%rsp) movq 0x248(%rsp), %rax movq %rax, 0x380(%rsp) movq 0x380(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x152a9dc movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x37c(%rsp) # imm = 0xFFFFFFFF movl 0x37c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x378(%rsp) cmpl $0x1, 0x378(%rsp) jne 0x152a9dc movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152a9ad movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152a9ab jmp 0x152a9da movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b8(%rsp) cmpq $0x0, 0x3b8(%rsp) je 0x152a9d8 movq 0x3b8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152a9da jmp 0x152a9dc movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152aa34 movq %rax, %rdi callq 0x5fc90 movq 0x10(%rsp), %rax movq %rax, 0x168(%rsp) movl $0x0, 0x11c(%rsp) vmovss 0x174(%rsp), %xmm0 vmovss %xmm0, 0x3dc(%rsp) vmovss 0x3dc(%rsp), %xmm0 vmovss %xmm0, 0xc(%rsp) vmovss %xmm0, 0x44c(%rsp) vmovss %xmm0, 0x448(%rsp) vmovss %xmm0, 0x444(%rsp) vmovss %xmm0, 0x440(%rsp) vmovss %xmm0, 0x43c(%rsp) vmovss %xmm0, 0x438(%rsp) vmovss %xmm0, 0x434(%rsp) vmovss %xmm0, 0x430(%rsp) vmovss 0x444(%rsp), %xmm1 vmovss 0x440(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x448(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x44c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0x434(%rsp), %xmm2 vmovss 0x430(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x438(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x43c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm0 vmovaps %ymm0, 0xe0(%rsp) movl 0x11c(%rsp), %eax addl $0x7, %eax cmpl 0x1cc(%rsp), %eax jge 0x152ac1d movq 0x1c0(%rsp), %rax movq %rax, 0x278(%rsp) movq 0x278(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0xc0(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0xc0(%rsp), %rsi leaq 0xe0(%rsp), %rdx callq 0x153c240 vmovaps %ymm0, 0xa0(%rsp) movq 0x168(%rsp), %rax vmovaps 0xa0(%rsp), %ymm0 movq %rax, 0x2a0(%rsp) vmovaps %ymm0, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm0 movq 0x2a0(%rsp), %rax vmovups %ymm0, (%rax) movq 0x1c0(%rsp), %rax addq $0x20, %rax movq %rax, 0x1c0(%rsp) movq 0x168(%rsp), %rax addq $0x20, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x8, %eax movl %eax, 0x11c(%rsp) jmp 0x152ab46 vmovss 0x174(%rsp), %xmm0 vmovss %xmm0, 0x3fc(%rsp) vbroadcastss 0x3fc(%rsp), %xmm0 vmovaps %xmm0, 0x3e0(%rsp) vmovaps 0x3e0(%rsp), %xmm0 vmovaps %xmm0, 0x90(%rsp) movl 0x11c(%rsp), %eax addl $0x3, %eax cmpl 0x1cc(%rsp), %eax jge 0x152ad28 movq 0x1c0(%rsp), %rax movq %rax, 0x2a8(%rsp) movq 0x2a8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x80(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0x80(%rsp), %rsi leaq 0x90(%rsp), %rdx vzeroupper callq 0x153c290 vmovaps %xmm0, 0x70(%rsp) movq 0x168(%rsp), %rax vmovaps 0x70(%rsp), %xmm0 movq %rax, 0x2c8(%rsp) vmovaps %xmm0, 0x2b0(%rsp) vmovaps 0x2b0(%rsp), %xmm0 movq 0x2c8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x1c0(%rsp), %rax addq $0x10, %rax movq %rax, 0x1c0(%rsp) movq 0x168(%rsp), %rax addq $0x10, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x4, %eax movl %eax, 0x11c(%rsp) jmp 0x152ac54 jmp 0x152ad2a movl 0x11c(%rsp), %eax cmpl 0x1cc(%rsp), %eax jge 0x152ada1 movq 0x1c0(%rsp), %rsi leaq 0x1e7(%rsp), %rdi leaq 0x174(%rsp), %rdx vzeroupper callq 0x153c2d0 movq 0x168(%rsp), %rax vmovss %xmm0, (%rax) movq 0x1c0(%rsp), %rax addq $0x4, %rax movq %rax, 0x1c0(%rsp) movq 0x168(%rsp), %rax addq $0x4, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x1, %eax movl %eax, 0x11c(%rsp) jmp 0x152ad2a jmp 0x152ada3 movl 0x1c8(%rsp), %eax addl $0x1, %eax movl %eax, 0x1c8(%rsp) jmp 0x152a1aa movl $0x0, 0x20c(%rsp) movl 0x20c(%rsp), %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_fma.cpp
int ncnn::binary_op_2_3_4_20<ncnn::BinaryOp_x86_fma_functor::binary_op_add>(ncnn::Mat const&, ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static int binary_op_2_3_4_20(const Mat& a, const Mat& b, Mat& c, const Option& opt) { Op op; int w = b.w; int h = b.h; int d = b.d; int channels = b.c; int elempack = b.elempack; int size = w * h * d * elempack; // type 2 3 4 20 c.create_like(b, opt.blob_allocator); if (c.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float a0 = a[0]; const float* ptr = b.channel(q); float* outptr = c.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ __m512 _a0_avx512 = _mm512_set1_ps(a0); for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); __m512 _outp = op.func_pack16(_a0_avx512, _p); _mm512_storeu_ps(outptr, _outp); ptr += 16; outptr += 16; } #endif // __AVX512F__ __m256 _a0_avx = _mm256_set1_ps(a0); for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); __m256 _outp = op.func_pack8(_a0_avx, _p); _mm256_storeu_ps(outptr, _outp); ptr += 8; outptr += 8; } #endif // __AVX__ __m128 _a0 = _mm_set1_ps(a0); for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); __m128 _outp = op.func_pack4(_a0, _p); _mm_store_ps(outptr, _outp); ptr += 4; outptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *outptr = op.func(a0, *ptr); ptr += 1; outptr += 1; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x460, %rsp # imm = 0x460 movq %rdi, 0x200(%rsp) movq %rsi, 0x1f8(%rsp) movq %rdx, 0x1f0(%rsp) movq %rcx, 0x1e8(%rsp) movq 0x1f8(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1e0(%rsp) movq 0x1f8(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x1dc(%rsp) movq 0x1f8(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x1d8(%rsp) movq 0x1f8(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x1d4(%rsp) movq 0x1f8(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x1d0(%rsp) movl 0x1e0(%rsp), %eax imull 0x1dc(%rsp), %eax imull 0x1d8(%rsp), %eax imull 0x1d0(%rsp), %eax movl %eax, 0x1cc(%rsp) movq 0x1f0(%rsp), %rdi movq 0x1f8(%rsp), %rsi movq 0x1e8(%rsp), %rax movq 0x8(%rax), %rdx callq 0x65b60 movq 0x1f0(%rsp), %rax movq %rax, 0x210(%rsp) movq 0x210(%rsp), %rcx movq %rcx, 0x60(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x6f(%rsp) je 0x152af05 movq 0x60(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x6f(%rsp) movb 0x6f(%rsp), %al testb $0x1, %al jne 0x152af0f jmp 0x152af1f movl $0xffffff9c, 0x20c(%rsp) # imm = 0xFFFFFF9C jmp 0x152bb44 movl $0x0, 0x1c8(%rsp) movl 0x1c8(%rsp), %eax cmpl 0x1d4(%rsp), %eax jge 0x152bb39 movq 0x200(%rsp), %rax movq %rax, 0x3d0(%rsp) movq $0x0, 0x3c8(%rsp) movq 0x3d0(%rsp), %rax movq (%rax), %rax movq 0x3c8(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x1c4(%rsp) movq 0x1f8(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x170(%rsp), %rdx movq %rdx, 0x228(%rsp) movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x58(%rsp) movb $0x0, 0x21b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x21c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x170(%rsp), %r10 movq %r10, 0x340(%rsp) movl %r9d, 0x33c(%rsp) movl %r8d, 0x338(%rsp) movl %edi, 0x334(%rsp) movq %rsi, 0x328(%rsp) movq %rdx, 0x320(%rsp) movl %ecx, 0x31c(%rsp) movq %rax, 0x310(%rsp) movq 0x340(%rsp), %rcx movq %rcx, 0x50(%rsp) movq 0x328(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x320(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x31c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x310(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x33c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x338(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x334(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x350(%rsp) movl $0x10, 0x34c(%rsp) movq 0x350(%rsp), %rax movslq 0x34c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x34c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x58(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x198(%rsp) cmpl $0x4, 0x28(%rax) jne 0x152b12b movq 0x58(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x1b0(%rsp) movb $0x1, 0x21b(%rsp) testb $0x1, 0x21b(%rsp) jne 0x152b25a leaq 0x170(%rsp), %rax movq %rax, 0x238(%rsp) movq 0x238(%rsp), %rax movq %rax, 0x3a0(%rsp) movq 0x3a0(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x152b200 movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x39c(%rsp) # imm = 0xFFFFFFFF movl 0x39c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x398(%rsp) cmpl $0x1, 0x398(%rsp) jne 0x152b200 movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152b1d1 movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152b1cf jmp 0x152b1fe movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x3a8(%rsp) cmpq $0x0, 0x3a8(%rsp) je 0x152b1fc movq 0x3a8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152b1fe jmp 0x152b200 movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152b258 movq %rax, %rdi callq 0x5fc90 jmp 0x152b25a leaq 0x170(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rax movq (%rax), %rax movq %rax, 0x38(%rsp) leaq 0x170(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq %rax, 0x390(%rsp) movq 0x390(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x152b339 movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x38c(%rsp) # imm = 0xFFFFFFFF movl 0x38c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x388(%rsp) cmpl $0x1, 0x388(%rsp) jne 0x152b339 movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152b30a movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152b308 jmp 0x152b337 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b0(%rsp) cmpq $0x0, 0x3b0(%rsp) je 0x152b335 movq 0x3b0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152b337 jmp 0x152b339 movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152b391 movq %rax, %rdi callq 0x5fc90 movq 0x38(%rsp), %rax movq %rax, 0x1b8(%rsp) movq 0x1f0(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x120(%rsp), %rdx movq %rdx, 0x260(%rsp) movq %rcx, 0x258(%rsp) movl %eax, 0x254(%rsp) movq 0x258(%rsp), %rax movq %rax, 0x30(%rsp) movb $0x0, 0x253(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x254(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x120(%rsp), %r10 movq %r10, 0x308(%rsp) movl %r9d, 0x304(%rsp) movl %r8d, 0x300(%rsp) movl %edi, 0x2fc(%rsp) movq %rsi, 0x2f0(%rsp) movq %rdx, 0x2e8(%rsp) movl %ecx, 0x2e4(%rsp) movq %rax, 0x2d8(%rsp) movq 0x308(%rsp), %rcx movq %rcx, 0x28(%rsp) movq 0x2f0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2e8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2e4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2d8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x304(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x300(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2fc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x360(%rsp) movl $0x10, 0x35c(%rsp) movq 0x360(%rsp), %rax movslq 0x35c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x35c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x30(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x148(%rsp) cmpl $0x4, 0x28(%rax) jne 0x152b54e movq 0x30(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x160(%rsp) movb $0x1, 0x253(%rsp) testb $0x1, 0x253(%rsp) jne 0x152b67d leaq 0x120(%rsp), %rax movq %rax, 0x268(%rsp) movq 0x268(%rsp), %rax movq %rax, 0x370(%rsp) movq 0x370(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x152b623 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x36c(%rsp) # imm = 0xFFFFFFFF movl 0x36c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x368(%rsp) cmpl $0x1, 0x368(%rsp) jne 0x152b623 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152b5f4 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152b5f2 jmp 0x152b621 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x3c0(%rsp) cmpq $0x0, 0x3c0(%rsp) je 0x152b61f movq 0x3c0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152b621 jmp 0x152b623 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152b67b movq %rax, %rdi callq 0x5fc90 jmp 0x152b67d leaq 0x120(%rsp), %rax movq %rax, 0x270(%rsp) movq 0x270(%rsp), %rax movq (%rax), %rax movq %rax, 0x10(%rsp) leaq 0x120(%rsp), %rax movq %rax, 0x248(%rsp) movq 0x248(%rsp), %rax movq %rax, 0x380(%rsp) movq 0x380(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x152b75c movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x37c(%rsp) # imm = 0xFFFFFFFF movl 0x37c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x378(%rsp) cmpl $0x1, 0x378(%rsp) jne 0x152b75c movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152b72d movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152b72b jmp 0x152b75a movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b8(%rsp) cmpq $0x0, 0x3b8(%rsp) je 0x152b758 movq 0x3b8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152b75a jmp 0x152b75c movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152b7b4 movq %rax, %rdi callq 0x5fc90 movq 0x10(%rsp), %rax movq %rax, 0x168(%rsp) movl $0x0, 0x11c(%rsp) vmovss 0x1c4(%rsp), %xmm0 vmovss %xmm0, 0x3dc(%rsp) vmovss 0x3dc(%rsp), %xmm0 vmovss %xmm0, 0xc(%rsp) vmovss %xmm0, 0x44c(%rsp) vmovss %xmm0, 0x448(%rsp) vmovss %xmm0, 0x444(%rsp) vmovss %xmm0, 0x440(%rsp) vmovss %xmm0, 0x43c(%rsp) vmovss %xmm0, 0x438(%rsp) vmovss %xmm0, 0x434(%rsp) vmovss %xmm0, 0x430(%rsp) vmovss 0x444(%rsp), %xmm1 vmovss 0x440(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x448(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x44c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0x434(%rsp), %xmm2 vmovss 0x430(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x438(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x43c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm0 vmovaps %ymm0, 0xe0(%rsp) movl 0x11c(%rsp), %eax addl $0x7, %eax cmpl 0x1cc(%rsp), %eax jge 0x152b99d movq 0x1b8(%rsp), %rax movq %rax, 0x278(%rsp) movq 0x278(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0xc0(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0xe0(%rsp), %rsi leaq 0xc0(%rsp), %rdx callq 0x153c240 vmovaps %ymm0, 0xa0(%rsp) movq 0x168(%rsp), %rax vmovaps 0xa0(%rsp), %ymm0 movq %rax, 0x2a0(%rsp) vmovaps %ymm0, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm0 movq 0x2a0(%rsp), %rax vmovups %ymm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x20, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x20, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x8, %eax movl %eax, 0x11c(%rsp) jmp 0x152b8c6 vmovss 0x1c4(%rsp), %xmm0 vmovss %xmm0, 0x3fc(%rsp) vbroadcastss 0x3fc(%rsp), %xmm0 vmovaps %xmm0, 0x3e0(%rsp) vmovaps 0x3e0(%rsp), %xmm0 vmovaps %xmm0, 0x90(%rsp) movl 0x11c(%rsp), %eax addl $0x3, %eax cmpl 0x1cc(%rsp), %eax jge 0x152baa8 movq 0x1b8(%rsp), %rax movq %rax, 0x2a8(%rsp) movq 0x2a8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x80(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0x90(%rsp), %rsi leaq 0x80(%rsp), %rdx vzeroupper callq 0x153c290 vmovaps %xmm0, 0x70(%rsp) movq 0x168(%rsp), %rax vmovaps 0x70(%rsp), %xmm0 movq %rax, 0x2c8(%rsp) vmovaps %xmm0, 0x2b0(%rsp) vmovaps 0x2b0(%rsp), %xmm0 movq 0x2c8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x10, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x10, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x4, %eax movl %eax, 0x11c(%rsp) jmp 0x152b9d4 jmp 0x152baaa movl 0x11c(%rsp), %eax cmpl 0x1cc(%rsp), %eax jge 0x152bb21 movq 0x1b8(%rsp), %rdx leaq 0x1e7(%rsp), %rdi leaq 0x1c4(%rsp), %rsi vzeroupper callq 0x153c2d0 movq 0x168(%rsp), %rax vmovss %xmm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x4, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x4, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x1, %eax movl %eax, 0x11c(%rsp) jmp 0x152baaa jmp 0x152bb23 movl 0x1c8(%rsp), %eax addl $0x1, %eax movl %eax, 0x1c8(%rsp) jmp 0x152af2a movl $0x0, 0x20c(%rsp) movl 0x20c(%rsp), %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_fma.cpp
int ncnn::binary_op_7_13_19_29<ncnn::BinaryOp_x86_fma_functor::binary_op_sub>(ncnn::Mat const&, ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static int binary_op_7_13_19_29(const Mat& a, const Mat& b, Mat& c, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; // type 7 13 19 29 c.create_like(a, opt.blob_allocator); if (c.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float* ptr = a.channel(q); const float* ptr1 = b.channel(q); float* outptr = c.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); __m512 _p1 = _mm512_loadu_ps(ptr1); __m512 _outp = op.func_pack16(_p, _p1); _mm512_storeu_ps(outptr, _outp); ptr += 16; ptr1 += 16; outptr += 16; } #endif // __AVX512F__ for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); __m256 _p1 = _mm256_loadu_ps(ptr1); __m256 _outp = op.func_pack8(_p, _p1); _mm256_storeu_ps(outptr, _outp); ptr += 8; ptr1 += 8; outptr += 8; } #endif // __AVX__ for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); __m128 _p1 = _mm_load_ps(ptr1); __m128 _outp = op.func_pack4(_p, _p1); _mm_store_ps(outptr, _outp); ptr += 4; ptr1 += 4; outptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *outptr = op.func(*ptr, *ptr1); ptr += 1; ptr1 += 1; outptr += 1; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x500, %rsp # imm = 0x500 movq %rdi, 0x268(%rsp) movq %rsi, 0x260(%rsp) movq %rdx, 0x258(%rsp) movq %rcx, 0x250(%rsp) movq 0x268(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x248(%rsp) movq 0x268(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x244(%rsp) movq 0x268(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x240(%rsp) movq 0x268(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x23c(%rsp) movq 0x268(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x238(%rsp) movl 0x248(%rsp), %eax imull 0x244(%rsp), %eax imull 0x240(%rsp), %eax imull 0x238(%rsp), %eax movl %eax, 0x234(%rsp) movq 0x258(%rsp), %rdi movq 0x268(%rsp), %rsi movq 0x250(%rsp), %rax movq 0x8(%rax), %rdx callq 0x65b60 movq 0x258(%rsp), %rax movq %rax, 0x278(%rsp) movq 0x278(%rsp), %rcx movq %rcx, 0x80(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x8f(%rsp) je 0x152bc91 movq 0x80(%rsp), %rax movq %rax, 0x380(%rsp) movq 0x380(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x8f(%rsp) movb 0x8f(%rsp), %al testb $0x1, %al jne 0x152bc9e jmp 0x152bcae movl $0xffffff9c, 0x274(%rsp) # imm = 0xFFFFFF9C jmp 0x152cc19 movl $0x0, 0x230(%rsp) movl 0x230(%rsp), %eax cmpl 0x23c(%rsp), %eax jge 0x152cc0e movq 0x268(%rsp), %rcx movl 0x230(%rsp), %eax leaq 0x1e0(%rsp), %rdx movq %rdx, 0x2a8(%rsp) movq %rcx, 0x2a0(%rsp) movl %eax, 0x29c(%rsp) movq 0x2a0(%rsp), %rax movq %rax, 0x78(%rsp) movb $0x0, 0x29b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x29c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x1e0(%rsp), %r10 movq %r10, 0x3f0(%rsp) movl %r9d, 0x3ec(%rsp) movl %r8d, 0x3e8(%rsp) movl %edi, 0x3e4(%rsp) movq %rsi, 0x3d8(%rsp) movq %rdx, 0x3d0(%rsp) movl %ecx, 0x3cc(%rsp) movq %rax, 0x3c0(%rsp) movq 0x3f0(%rsp), %rcx movq %rcx, 0x70(%rsp) movq 0x3d8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x3d0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x3cc(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3c0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3ec(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3e8(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3e4(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x448(%rsp) movl $0x10, 0x444(%rsp) movq 0x448(%rsp), %rax movslq 0x444(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x444(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x78(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x208(%rsp) cmpl $0x4, 0x28(%rax) jne 0x152be7d movq 0x78(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x220(%rsp) movb $0x1, 0x29b(%rsp) testb $0x1, 0x29b(%rsp) jne 0x152bfac leaq 0x1e0(%rsp), %rax movq %rax, 0x2c0(%rsp) movq 0x2c0(%rsp), %rax movq %rax, 0x4b8(%rsp) movq 0x4b8(%rsp), %rax movq %rax, 0x68(%rsp) cmpq $0x0, 0x8(%rax) je 0x152bf52 movq 0x68(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4b4(%rsp) # imm = 0xFFFFFFFF movl 0x4b4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4b0(%rsp) cmpl $0x1, 0x4b0(%rsp) jne 0x152bf52 movq 0x68(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152bf23 movq 0x68(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152bf21 jmp 0x152bf50 movq 0x68(%rsp), %rax movq (%rax), %rax movq %rax, 0x4c0(%rsp) cmpq $0x0, 0x4c0(%rsp) je 0x152bf4e movq 0x4c0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152bf50 jmp 0x152bf52 movq 0x68(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152bfaa movq %rax, %rdi callq 0x5fc90 jmp 0x152bfac leaq 0x1e0(%rsp), %rax movq %rax, 0x2b8(%rsp) movq 0x2b8(%rsp), %rax movq (%rax), %rax movq %rax, 0x58(%rsp) leaq 0x1e0(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rax movq %rax, 0x498(%rsp) movq 0x498(%rsp), %rax movq %rax, 0x60(%rsp) cmpq $0x0, 0x8(%rax) je 0x152c08b movq 0x60(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x494(%rsp) # imm = 0xFFFFFFFF movl 0x494(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x490(%rsp) cmpl $0x1, 0x490(%rsp) jne 0x152c08b movq 0x60(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152c05c movq 0x60(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152c05a jmp 0x152c089 movq 0x60(%rsp), %rax movq (%rax), %rax movq %rax, 0x4d0(%rsp) cmpq $0x0, 0x4d0(%rsp) je 0x152c087 movq 0x4d0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152c089 jmp 0x152c08b movq 0x60(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152c0e3 movq %rax, %rdi callq 0x5fc90 movq 0x58(%rsp), %rax movq %rax, 0x228(%rsp) movq 0x260(%rsp), %rcx movl 0x230(%rsp), %eax leaq 0x190(%rsp), %rdx movq %rdx, 0x290(%rsp) movq %rcx, 0x288(%rsp) movl %eax, 0x284(%rsp) movq 0x288(%rsp), %rax movq %rax, 0x50(%rsp) movb $0x0, 0x283(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x284(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x190(%rsp), %r10 movq %r10, 0x428(%rsp) movl %r9d, 0x424(%rsp) movl %r8d, 0x420(%rsp) movl %edi, 0x41c(%rsp) movq %rsi, 0x410(%rsp) movq %rdx, 0x408(%rsp) movl %ecx, 0x404(%rsp) movq %rax, 0x3f8(%rsp) movq 0x428(%rsp), %rcx movq %rcx, 0x48(%rsp) movq 0x410(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x408(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x404(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3f8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x424(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x420(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x41c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x438(%rsp) movl $0x10, 0x434(%rsp) movq 0x438(%rsp), %rax movslq 0x434(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x434(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x50(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x1b8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x152c2a0 movq 0x50(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x1d0(%rsp) movb $0x1, 0x283(%rsp) testb $0x1, 0x283(%rsp) jne 0x152c3cf leaq 0x190(%rsp), %rax movq %rax, 0x2c8(%rsp) movq 0x2c8(%rsp), %rax movq %rax, 0x4a8(%rsp) movq 0x4a8(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x152c375 movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4a4(%rsp) # imm = 0xFFFFFFFF movl 0x4a4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4a0(%rsp) cmpl $0x1, 0x4a0(%rsp) jne 0x152c375 movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152c346 movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152c344 jmp 0x152c373 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x4c8(%rsp) cmpq $0x0, 0x4c8(%rsp) je 0x152c371 movq 0x4c8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152c373 jmp 0x152c375 movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152c3cd movq %rax, %rdi callq 0x5fc90 jmp 0x152c3cf leaq 0x190(%rsp), %rax movq %rax, 0x2b0(%rsp) movq 0x2b0(%rsp), %rax movq (%rax), %rax movq %rax, 0x30(%rsp) leaq 0x190(%rsp), %rax movq %rax, 0x2d8(%rsp) movq 0x2d8(%rsp), %rax movq %rax, 0x488(%rsp) movq 0x488(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x152c4ae movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x484(%rsp) # imm = 0xFFFFFFFF movl 0x484(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x480(%rsp) cmpl $0x1, 0x480(%rsp) jne 0x152c4ae movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152c47f movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152c47d jmp 0x152c4ac movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x4d8(%rsp) cmpq $0x0, 0x4d8(%rsp) je 0x152c4aa movq 0x4d8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152c4ac jmp 0x152c4ae movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152c506 movq %rax, %rdi callq 0x5fc90 movq 0x30(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x258(%rsp), %rcx movl 0x230(%rsp), %eax leaq 0x140(%rsp), %rdx movq %rdx, 0x2f8(%rsp) movq %rcx, 0x2f0(%rsp) movl %eax, 0x2ec(%rsp) movq 0x2f0(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0x2eb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x2ec(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x140(%rsp), %r10 movq %r10, 0x3b8(%rsp) movl %r9d, 0x3b4(%rsp) movl %r8d, 0x3b0(%rsp) movl %edi, 0x3ac(%rsp) movq %rsi, 0x3a0(%rsp) movq %rdx, 0x398(%rsp) movl %ecx, 0x394(%rsp) movq %rax, 0x388(%rsp) movq 0x3b8(%rsp), %rcx movq %rcx, 0x20(%rsp) movq 0x3a0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x398(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x394(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x388(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3b4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3b0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3ac(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x458(%rsp) movl $0x10, 0x454(%rsp) movq 0x458(%rsp), %rax movslq 0x454(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x454(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x28(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x168(%rsp) cmpl $0x4, 0x28(%rax) jne 0x152c6c3 movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x180(%rsp) movb $0x1, 0x2eb(%rsp) testb $0x1, 0x2eb(%rsp) jne 0x152c7f2 leaq 0x140(%rsp), %rax movq %rax, 0x300(%rsp) movq 0x300(%rsp), %rax movq %rax, 0x468(%rsp) movq 0x468(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x152c798 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x464(%rsp) # imm = 0xFFFFFFFF movl 0x464(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x460(%rsp) cmpl $0x1, 0x460(%rsp) jne 0x152c798 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152c769 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152c767 jmp 0x152c796 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x4e8(%rsp) cmpq $0x0, 0x4e8(%rsp) je 0x152c794 movq 0x4e8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152c796 jmp 0x152c798 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152c7f0 movq %rax, %rdi callq 0x5fc90 jmp 0x152c7f2 leaq 0x140(%rsp), %rax movq %rax, 0x308(%rsp) movq 0x308(%rsp), %rax movq (%rax), %rax movq %rax, 0x8(%rsp) leaq 0x140(%rsp), %rax movq %rax, 0x2e0(%rsp) movq 0x2e0(%rsp), %rax movq %rax, 0x478(%rsp) movq 0x478(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x152c8d1 movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x474(%rsp) # imm = 0xFFFFFFFF movl 0x474(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x470(%rsp) cmpl $0x1, 0x470(%rsp) jne 0x152c8d1 movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152c8a2 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152c8a0 jmp 0x152c8cf movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x4e0(%rsp) cmpq $0x0, 0x4e0(%rsp) je 0x152c8cd movq 0x4e0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152c8cf jmp 0x152c8d1 movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152c929 movq %rax, %rdi callq 0x5fc90 movq 0x8(%rsp), %rax movq %rax, 0x188(%rsp) movl $0x0, 0x13c(%rsp) movl 0x13c(%rsp), %eax addl $0x7, %eax cmpl 0x234(%rsp), %eax jge 0x152ca51 movq 0x228(%rsp), %rax movq %rax, 0x318(%rsp) movq 0x318(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x100(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x310(%rsp) movq 0x310(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0xe0(%rsp) leaq 0x24f(%rsp), %rdi leaq 0x100(%rsp), %rsi leaq 0xe0(%rsp), %rdx callq 0x153c300 vmovaps %ymm0, 0xc0(%rsp) movq 0x188(%rsp), %rax vmovaps 0xc0(%rsp), %ymm0 movq %rax, 0x348(%rsp) vmovaps %ymm0, 0x320(%rsp) vmovaps 0x320(%rsp), %ymm0 movq 0x348(%rsp), %rax vmovups %ymm0, (%rax) movq 0x228(%rsp), %rax addq $0x20, %rax movq %rax, 0x228(%rsp) movq 0x1d8(%rsp), %rax addq $0x20, %rax movq %rax, 0x1d8(%rsp) movq 0x188(%rsp), %rax addq $0x20, %rax movq %rax, 0x188(%rsp) movl 0x13c(%rsp), %eax addl $0x8, %eax movl %eax, 0x13c(%rsp) jmp 0x152c941 jmp 0x152ca53 movl 0x13c(%rsp), %eax addl $0x3, %eax cmpl 0x234(%rsp), %eax jge 0x152cb66 movq 0x228(%rsp), %rax movq %rax, 0x358(%rsp) movq 0x358(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xb0(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x350(%rsp) movq 0x350(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xa0(%rsp) leaq 0x24f(%rsp), %rdi leaq 0xb0(%rsp), %rsi leaq 0xa0(%rsp), %rdx vzeroupper callq 0x153c350 vmovaps %xmm0, 0x90(%rsp) movq 0x188(%rsp), %rax vmovaps 0x90(%rsp), %xmm0 movq %rax, 0x378(%rsp) vmovaps %xmm0, 0x360(%rsp) vmovaps 0x360(%rsp), %xmm0 movq 0x378(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x228(%rsp), %rax addq $0x10, %rax movq %rax, 0x228(%rsp) movq 0x1d8(%rsp), %rax addq $0x10, %rax movq %rax, 0x1d8(%rsp) movq 0x188(%rsp), %rax addq $0x10, %rax movq %rax, 0x188(%rsp) movl 0x13c(%rsp), %eax addl $0x4, %eax movl %eax, 0x13c(%rsp) jmp 0x152ca53 jmp 0x152cb68 movl 0x13c(%rsp), %eax cmpl 0x234(%rsp), %eax jge 0x152cbf6 movq 0x228(%rsp), %rsi movq 0x1d8(%rsp), %rdx leaq 0x24f(%rsp), %rdi vzeroupper callq 0x153c390 movq 0x188(%rsp), %rax vmovss %xmm0, (%rax) movq 0x228(%rsp), %rax addq $0x4, %rax movq %rax, 0x228(%rsp) movq 0x1d8(%rsp), %rax addq $0x4, %rax movq %rax, 0x1d8(%rsp) movq 0x188(%rsp), %rax addq $0x4, %rax movq %rax, 0x188(%rsp) movl 0x13c(%rsp), %eax addl $0x1, %eax movl %eax, 0x13c(%rsp) jmp 0x152cb68 jmp 0x152cbf8 movl 0x230(%rsp), %eax addl $0x1, %eax movl %eax, 0x230(%rsp) jmp 0x152bcb9 movl $0x0, 0x274(%rsp) movl 0x274(%rsp), %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopl (%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_fma.cpp
int ncnn::binary_op_7_13_19_29<ncnn::BinaryOp_x86_fma_functor::binary_op_mul>(ncnn::Mat const&, ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static int binary_op_7_13_19_29(const Mat& a, const Mat& b, Mat& c, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; // type 7 13 19 29 c.create_like(a, opt.blob_allocator); if (c.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float* ptr = a.channel(q); const float* ptr1 = b.channel(q); float* outptr = c.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); __m512 _p1 = _mm512_loadu_ps(ptr1); __m512 _outp = op.func_pack16(_p, _p1); _mm512_storeu_ps(outptr, _outp); ptr += 16; ptr1 += 16; outptr += 16; } #endif // __AVX512F__ for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); __m256 _p1 = _mm256_loadu_ps(ptr1); __m256 _outp = op.func_pack8(_p, _p1); _mm256_storeu_ps(outptr, _outp); ptr += 8; ptr1 += 8; outptr += 8; } #endif // __AVX__ for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); __m128 _p1 = _mm_load_ps(ptr1); __m128 _outp = op.func_pack4(_p, _p1); _mm_store_ps(outptr, _outp); ptr += 4; ptr1 += 4; outptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *outptr = op.func(*ptr, *ptr1); ptr += 1; ptr1 += 1; outptr += 1; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x500, %rsp # imm = 0x500 movq %rdi, 0x268(%rsp) movq %rsi, 0x260(%rsp) movq %rdx, 0x258(%rsp) movq %rcx, 0x250(%rsp) movq 0x268(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x248(%rsp) movq 0x268(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x244(%rsp) movq 0x268(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x240(%rsp) movq 0x268(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x23c(%rsp) movq 0x268(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x238(%rsp) movl 0x248(%rsp), %eax imull 0x244(%rsp), %eax imull 0x240(%rsp), %eax imull 0x238(%rsp), %eax movl %eax, 0x234(%rsp) movq 0x258(%rsp), %rdi movq 0x268(%rsp), %rsi movq 0x250(%rsp), %rax movq 0x8(%rax), %rdx callq 0x65b60 movq 0x258(%rsp), %rax movq %rax, 0x278(%rsp) movq 0x278(%rsp), %rcx movq %rcx, 0x80(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x8f(%rsp) je 0x152e861 movq 0x80(%rsp), %rax movq %rax, 0x380(%rsp) movq 0x380(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x8f(%rsp) movb 0x8f(%rsp), %al testb $0x1, %al jne 0x152e86e jmp 0x152e87e movl $0xffffff9c, 0x274(%rsp) # imm = 0xFFFFFF9C jmp 0x152f7e9 movl $0x0, 0x230(%rsp) movl 0x230(%rsp), %eax cmpl 0x23c(%rsp), %eax jge 0x152f7de movq 0x268(%rsp), %rcx movl 0x230(%rsp), %eax leaq 0x1e0(%rsp), %rdx movq %rdx, 0x2a8(%rsp) movq %rcx, 0x2a0(%rsp) movl %eax, 0x29c(%rsp) movq 0x2a0(%rsp), %rax movq %rax, 0x78(%rsp) movb $0x0, 0x29b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x29c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x1e0(%rsp), %r10 movq %r10, 0x3f0(%rsp) movl %r9d, 0x3ec(%rsp) movl %r8d, 0x3e8(%rsp) movl %edi, 0x3e4(%rsp) movq %rsi, 0x3d8(%rsp) movq %rdx, 0x3d0(%rsp) movl %ecx, 0x3cc(%rsp) movq %rax, 0x3c0(%rsp) movq 0x3f0(%rsp), %rcx movq %rcx, 0x70(%rsp) movq 0x3d8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x3d0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x3cc(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3c0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3ec(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3e8(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3e4(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x448(%rsp) movl $0x10, 0x444(%rsp) movq 0x448(%rsp), %rax movslq 0x444(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x444(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x78(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x208(%rsp) cmpl $0x4, 0x28(%rax) jne 0x152ea4d movq 0x78(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x220(%rsp) movb $0x1, 0x29b(%rsp) testb $0x1, 0x29b(%rsp) jne 0x152eb7c leaq 0x1e0(%rsp), %rax movq %rax, 0x2c0(%rsp) movq 0x2c0(%rsp), %rax movq %rax, 0x4b8(%rsp) movq 0x4b8(%rsp), %rax movq %rax, 0x68(%rsp) cmpq $0x0, 0x8(%rax) je 0x152eb22 movq 0x68(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4b4(%rsp) # imm = 0xFFFFFFFF movl 0x4b4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4b0(%rsp) cmpl $0x1, 0x4b0(%rsp) jne 0x152eb22 movq 0x68(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152eaf3 movq 0x68(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152eaf1 jmp 0x152eb20 movq 0x68(%rsp), %rax movq (%rax), %rax movq %rax, 0x4c0(%rsp) cmpq $0x0, 0x4c0(%rsp) je 0x152eb1e movq 0x4c0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152eb20 jmp 0x152eb22 movq 0x68(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152eb7a movq %rax, %rdi callq 0x5fc90 jmp 0x152eb7c leaq 0x1e0(%rsp), %rax movq %rax, 0x2b8(%rsp) movq 0x2b8(%rsp), %rax movq (%rax), %rax movq %rax, 0x58(%rsp) leaq 0x1e0(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rax movq %rax, 0x498(%rsp) movq 0x498(%rsp), %rax movq %rax, 0x60(%rsp) cmpq $0x0, 0x8(%rax) je 0x152ec5b movq 0x60(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x494(%rsp) # imm = 0xFFFFFFFF movl 0x494(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x490(%rsp) cmpl $0x1, 0x490(%rsp) jne 0x152ec5b movq 0x60(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152ec2c movq 0x60(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152ec2a jmp 0x152ec59 movq 0x60(%rsp), %rax movq (%rax), %rax movq %rax, 0x4d0(%rsp) cmpq $0x0, 0x4d0(%rsp) je 0x152ec57 movq 0x4d0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152ec59 jmp 0x152ec5b movq 0x60(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152ecb3 movq %rax, %rdi callq 0x5fc90 movq 0x58(%rsp), %rax movq %rax, 0x228(%rsp) movq 0x260(%rsp), %rcx movl 0x230(%rsp), %eax leaq 0x190(%rsp), %rdx movq %rdx, 0x290(%rsp) movq %rcx, 0x288(%rsp) movl %eax, 0x284(%rsp) movq 0x288(%rsp), %rax movq %rax, 0x50(%rsp) movb $0x0, 0x283(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x284(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x190(%rsp), %r10 movq %r10, 0x428(%rsp) movl %r9d, 0x424(%rsp) movl %r8d, 0x420(%rsp) movl %edi, 0x41c(%rsp) movq %rsi, 0x410(%rsp) movq %rdx, 0x408(%rsp) movl %ecx, 0x404(%rsp) movq %rax, 0x3f8(%rsp) movq 0x428(%rsp), %rcx movq %rcx, 0x48(%rsp) movq 0x410(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x408(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x404(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3f8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x424(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x420(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x41c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x438(%rsp) movl $0x10, 0x434(%rsp) movq 0x438(%rsp), %rax movslq 0x434(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x434(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x50(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x1b8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x152ee70 movq 0x50(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x1d0(%rsp) movb $0x1, 0x283(%rsp) testb $0x1, 0x283(%rsp) jne 0x152ef9f leaq 0x190(%rsp), %rax movq %rax, 0x2c8(%rsp) movq 0x2c8(%rsp), %rax movq %rax, 0x4a8(%rsp) movq 0x4a8(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x152ef45 movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4a4(%rsp) # imm = 0xFFFFFFFF movl 0x4a4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4a0(%rsp) cmpl $0x1, 0x4a0(%rsp) jne 0x152ef45 movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152ef16 movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152ef14 jmp 0x152ef43 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x4c8(%rsp) cmpq $0x0, 0x4c8(%rsp) je 0x152ef41 movq 0x4c8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152ef43 jmp 0x152ef45 movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152ef9d movq %rax, %rdi callq 0x5fc90 jmp 0x152ef9f leaq 0x190(%rsp), %rax movq %rax, 0x2b0(%rsp) movq 0x2b0(%rsp), %rax movq (%rax), %rax movq %rax, 0x30(%rsp) leaq 0x190(%rsp), %rax movq %rax, 0x2d8(%rsp) movq 0x2d8(%rsp), %rax movq %rax, 0x488(%rsp) movq 0x488(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x152f07e movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x484(%rsp) # imm = 0xFFFFFFFF movl 0x484(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x480(%rsp) cmpl $0x1, 0x480(%rsp) jne 0x152f07e movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152f04f movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152f04d jmp 0x152f07c movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x4d8(%rsp) cmpq $0x0, 0x4d8(%rsp) je 0x152f07a movq 0x4d8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152f07c jmp 0x152f07e movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152f0d6 movq %rax, %rdi callq 0x5fc90 movq 0x30(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x258(%rsp), %rcx movl 0x230(%rsp), %eax leaq 0x140(%rsp), %rdx movq %rdx, 0x2f8(%rsp) movq %rcx, 0x2f0(%rsp) movl %eax, 0x2ec(%rsp) movq 0x2f0(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0x2eb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x2ec(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x140(%rsp), %r10 movq %r10, 0x3b8(%rsp) movl %r9d, 0x3b4(%rsp) movl %r8d, 0x3b0(%rsp) movl %edi, 0x3ac(%rsp) movq %rsi, 0x3a0(%rsp) movq %rdx, 0x398(%rsp) movl %ecx, 0x394(%rsp) movq %rax, 0x388(%rsp) movq 0x3b8(%rsp), %rcx movq %rcx, 0x20(%rsp) movq 0x3a0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x398(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x394(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x388(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3b4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3b0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3ac(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x458(%rsp) movl $0x10, 0x454(%rsp) movq 0x458(%rsp), %rax movslq 0x454(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x454(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x28(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x168(%rsp) cmpl $0x4, 0x28(%rax) jne 0x152f293 movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x180(%rsp) movb $0x1, 0x2eb(%rsp) testb $0x1, 0x2eb(%rsp) jne 0x152f3c2 leaq 0x140(%rsp), %rax movq %rax, 0x300(%rsp) movq 0x300(%rsp), %rax movq %rax, 0x468(%rsp) movq 0x468(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x152f368 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x464(%rsp) # imm = 0xFFFFFFFF movl 0x464(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x460(%rsp) cmpl $0x1, 0x460(%rsp) jne 0x152f368 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152f339 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152f337 jmp 0x152f366 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x4e8(%rsp) cmpq $0x0, 0x4e8(%rsp) je 0x152f364 movq 0x4e8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152f366 jmp 0x152f368 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152f3c0 movq %rax, %rdi callq 0x5fc90 jmp 0x152f3c2 leaq 0x140(%rsp), %rax movq %rax, 0x308(%rsp) movq 0x308(%rsp), %rax movq (%rax), %rax movq %rax, 0x8(%rsp) leaq 0x140(%rsp), %rax movq %rax, 0x2e0(%rsp) movq 0x2e0(%rsp), %rax movq %rax, 0x478(%rsp) movq 0x478(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x152f4a1 movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x474(%rsp) # imm = 0xFFFFFFFF movl 0x474(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x470(%rsp) cmpl $0x1, 0x470(%rsp) jne 0x152f4a1 movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x152f472 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x152f470 jmp 0x152f49f movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x4e0(%rsp) cmpq $0x0, 0x4e0(%rsp) je 0x152f49d movq 0x4e0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x152f49f jmp 0x152f4a1 movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x152f4f9 movq %rax, %rdi callq 0x5fc90 movq 0x8(%rsp), %rax movq %rax, 0x188(%rsp) movl $0x0, 0x13c(%rsp) movl 0x13c(%rsp), %eax addl $0x7, %eax cmpl 0x234(%rsp), %eax jge 0x152f621 movq 0x228(%rsp), %rax movq %rax, 0x318(%rsp) movq 0x318(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x100(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x310(%rsp) movq 0x310(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0xe0(%rsp) leaq 0x24f(%rsp), %rdi leaq 0x100(%rsp), %rsi leaq 0xe0(%rsp), %rdx callq 0x153c3c0 vmovaps %ymm0, 0xc0(%rsp) movq 0x188(%rsp), %rax vmovaps 0xc0(%rsp), %ymm0 movq %rax, 0x348(%rsp) vmovaps %ymm0, 0x320(%rsp) vmovaps 0x320(%rsp), %ymm0 movq 0x348(%rsp), %rax vmovups %ymm0, (%rax) movq 0x228(%rsp), %rax addq $0x20, %rax movq %rax, 0x228(%rsp) movq 0x1d8(%rsp), %rax addq $0x20, %rax movq %rax, 0x1d8(%rsp) movq 0x188(%rsp), %rax addq $0x20, %rax movq %rax, 0x188(%rsp) movl 0x13c(%rsp), %eax addl $0x8, %eax movl %eax, 0x13c(%rsp) jmp 0x152f511 jmp 0x152f623 movl 0x13c(%rsp), %eax addl $0x3, %eax cmpl 0x234(%rsp), %eax jge 0x152f736 movq 0x228(%rsp), %rax movq %rax, 0x358(%rsp) movq 0x358(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xb0(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x350(%rsp) movq 0x350(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xa0(%rsp) leaq 0x24f(%rsp), %rdi leaq 0xb0(%rsp), %rsi leaq 0xa0(%rsp), %rdx vzeroupper callq 0x153c410 vmovaps %xmm0, 0x90(%rsp) movq 0x188(%rsp), %rax vmovaps 0x90(%rsp), %xmm0 movq %rax, 0x378(%rsp) vmovaps %xmm0, 0x360(%rsp) vmovaps 0x360(%rsp), %xmm0 movq 0x378(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x228(%rsp), %rax addq $0x10, %rax movq %rax, 0x228(%rsp) movq 0x1d8(%rsp), %rax addq $0x10, %rax movq %rax, 0x1d8(%rsp) movq 0x188(%rsp), %rax addq $0x10, %rax movq %rax, 0x188(%rsp) movl 0x13c(%rsp), %eax addl $0x4, %eax movl %eax, 0x13c(%rsp) jmp 0x152f623 jmp 0x152f738 movl 0x13c(%rsp), %eax cmpl 0x234(%rsp), %eax jge 0x152f7c6 movq 0x228(%rsp), %rsi movq 0x1d8(%rsp), %rdx leaq 0x24f(%rsp), %rdi vzeroupper callq 0x153c450 movq 0x188(%rsp), %rax vmovss %xmm0, (%rax) movq 0x228(%rsp), %rax addq $0x4, %rax movq %rax, 0x228(%rsp) movq 0x1d8(%rsp), %rax addq $0x4, %rax movq %rax, 0x1d8(%rsp) movq 0x188(%rsp), %rax addq $0x4, %rax movq %rax, 0x188(%rsp) movl 0x13c(%rsp), %eax addl $0x1, %eax movl %eax, 0x13c(%rsp) jmp 0x152f738 jmp 0x152f7c8 movl 0x230(%rsp), %eax addl $0x1, %eax movl %eax, 0x230(%rsp) jmp 0x152e889 movl $0x0, 0x274(%rsp) movl 0x274(%rsp), %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopl (%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_fma.cpp
int ncnn::binary_op_2_3_4_20<ncnn::BinaryOp_x86_fma_functor::binary_op_div>(ncnn::Mat const&, ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static int binary_op_2_3_4_20(const Mat& a, const Mat& b, Mat& c, const Option& opt) { Op op; int w = b.w; int h = b.h; int d = b.d; int channels = b.c; int elempack = b.elempack; int size = w * h * d * elempack; // type 2 3 4 20 c.create_like(b, opt.blob_allocator); if (c.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float a0 = a[0]; const float* ptr = b.channel(q); float* outptr = c.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ __m512 _a0_avx512 = _mm512_set1_ps(a0); for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); __m512 _outp = op.func_pack16(_a0_avx512, _p); _mm512_storeu_ps(outptr, _outp); ptr += 16; outptr += 16; } #endif // __AVX512F__ __m256 _a0_avx = _mm256_set1_ps(a0); for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); __m256 _outp = op.func_pack8(_a0_avx, _p); _mm256_storeu_ps(outptr, _outp); ptr += 8; outptr += 8; } #endif // __AVX__ __m128 _a0 = _mm_set1_ps(a0); for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); __m128 _outp = op.func_pack4(_a0, _p); _mm_store_ps(outptr, _outp); ptr += 4; outptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *outptr = op.func(a0, *ptr); ptr += 1; outptr += 1; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x460, %rsp # imm = 0x460 movq %rdi, 0x200(%rsp) movq %rsi, 0x1f8(%rsp) movq %rdx, 0x1f0(%rsp) movq %rcx, 0x1e8(%rsp) movq 0x1f8(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1e0(%rsp) movq 0x1f8(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x1dc(%rsp) movq 0x1f8(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x1d8(%rsp) movq 0x1f8(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x1d4(%rsp) movq 0x1f8(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x1d0(%rsp) movl 0x1e0(%rsp), %eax imull 0x1dc(%rsp), %eax imull 0x1d8(%rsp), %eax imull 0x1d0(%rsp), %eax movl %eax, 0x1cc(%rsp) movq 0x1f0(%rsp), %rdi movq 0x1f8(%rsp), %rsi movq 0x1e8(%rsp), %rax movq 0x8(%rax), %rdx callq 0x65b60 movq 0x1f0(%rsp), %rax movq %rax, 0x210(%rsp) movq 0x210(%rsp), %rcx movq %rcx, 0x60(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x6f(%rsp) je 0x1533275 movq 0x60(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x6f(%rsp) movb 0x6f(%rsp), %al testb $0x1, %al jne 0x153327f jmp 0x153328f movl $0xffffff9c, 0x20c(%rsp) # imm = 0xFFFFFF9C jmp 0x1533eb4 movl $0x0, 0x1c8(%rsp) movl 0x1c8(%rsp), %eax cmpl 0x1d4(%rsp), %eax jge 0x1533ea9 movq 0x200(%rsp), %rax movq %rax, 0x3d0(%rsp) movq $0x0, 0x3c8(%rsp) movq 0x3d0(%rsp), %rax movq (%rax), %rax movq 0x3c8(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x1c4(%rsp) movq 0x1f8(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x170(%rsp), %rdx movq %rdx, 0x228(%rsp) movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x58(%rsp) movb $0x0, 0x21b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x21c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x170(%rsp), %r10 movq %r10, 0x340(%rsp) movl %r9d, 0x33c(%rsp) movl %r8d, 0x338(%rsp) movl %edi, 0x334(%rsp) movq %rsi, 0x328(%rsp) movq %rdx, 0x320(%rsp) movl %ecx, 0x31c(%rsp) movq %rax, 0x310(%rsp) movq 0x340(%rsp), %rcx movq %rcx, 0x50(%rsp) movq 0x328(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x320(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x31c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x310(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x33c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x338(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x334(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x350(%rsp) movl $0x10, 0x34c(%rsp) movq 0x350(%rsp), %rax movslq 0x34c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x34c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x58(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x198(%rsp) cmpl $0x4, 0x28(%rax) jne 0x153349b movq 0x58(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x1b0(%rsp) movb $0x1, 0x21b(%rsp) testb $0x1, 0x21b(%rsp) jne 0x15335ca leaq 0x170(%rsp), %rax movq %rax, 0x238(%rsp) movq 0x238(%rsp), %rax movq %rax, 0x3a0(%rsp) movq 0x3a0(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x1533570 movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x39c(%rsp) # imm = 0xFFFFFFFF movl 0x39c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x398(%rsp) cmpl $0x1, 0x398(%rsp) jne 0x1533570 movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1533541 movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x153353f jmp 0x153356e movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x3a8(%rsp) cmpq $0x0, 0x3a8(%rsp) je 0x153356c movq 0x3a8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x153356e jmp 0x1533570 movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x15335c8 movq %rax, %rdi callq 0x5fc90 jmp 0x15335ca leaq 0x170(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rax movq (%rax), %rax movq %rax, 0x38(%rsp) leaq 0x170(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq %rax, 0x390(%rsp) movq 0x390(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x15336a9 movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x38c(%rsp) # imm = 0xFFFFFFFF movl 0x38c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x388(%rsp) cmpl $0x1, 0x388(%rsp) jne 0x15336a9 movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x153367a movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1533678 jmp 0x15336a7 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b0(%rsp) cmpq $0x0, 0x3b0(%rsp) je 0x15336a5 movq 0x3b0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x15336a7 jmp 0x15336a9 movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1533701 movq %rax, %rdi callq 0x5fc90 movq 0x38(%rsp), %rax movq %rax, 0x1b8(%rsp) movq 0x1f0(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x120(%rsp), %rdx movq %rdx, 0x260(%rsp) movq %rcx, 0x258(%rsp) movl %eax, 0x254(%rsp) movq 0x258(%rsp), %rax movq %rax, 0x30(%rsp) movb $0x0, 0x253(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x254(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x120(%rsp), %r10 movq %r10, 0x308(%rsp) movl %r9d, 0x304(%rsp) movl %r8d, 0x300(%rsp) movl %edi, 0x2fc(%rsp) movq %rsi, 0x2f0(%rsp) movq %rdx, 0x2e8(%rsp) movl %ecx, 0x2e4(%rsp) movq %rax, 0x2d8(%rsp) movq 0x308(%rsp), %rcx movq %rcx, 0x28(%rsp) movq 0x2f0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2e8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2e4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2d8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x304(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x300(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2fc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x360(%rsp) movl $0x10, 0x35c(%rsp) movq 0x360(%rsp), %rax movslq 0x35c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x35c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x30(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x148(%rsp) cmpl $0x4, 0x28(%rax) jne 0x15338be movq 0x30(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x160(%rsp) movb $0x1, 0x253(%rsp) testb $0x1, 0x253(%rsp) jne 0x15339ed leaq 0x120(%rsp), %rax movq %rax, 0x268(%rsp) movq 0x268(%rsp), %rax movq %rax, 0x370(%rsp) movq 0x370(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x1533993 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x36c(%rsp) # imm = 0xFFFFFFFF movl 0x36c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x368(%rsp) cmpl $0x1, 0x368(%rsp) jne 0x1533993 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1533964 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1533962 jmp 0x1533991 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x3c0(%rsp) cmpq $0x0, 0x3c0(%rsp) je 0x153398f movq 0x3c0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1533991 jmp 0x1533993 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x15339eb movq %rax, %rdi callq 0x5fc90 jmp 0x15339ed leaq 0x120(%rsp), %rax movq %rax, 0x270(%rsp) movq 0x270(%rsp), %rax movq (%rax), %rax movq %rax, 0x10(%rsp) leaq 0x120(%rsp), %rax movq %rax, 0x248(%rsp) movq 0x248(%rsp), %rax movq %rax, 0x380(%rsp) movq 0x380(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x1533acc movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x37c(%rsp) # imm = 0xFFFFFFFF movl 0x37c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x378(%rsp) cmpl $0x1, 0x378(%rsp) jne 0x1533acc movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1533a9d movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1533a9b jmp 0x1533aca movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b8(%rsp) cmpq $0x0, 0x3b8(%rsp) je 0x1533ac8 movq 0x3b8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1533aca jmp 0x1533acc movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1533b24 movq %rax, %rdi callq 0x5fc90 movq 0x10(%rsp), %rax movq %rax, 0x168(%rsp) movl $0x0, 0x11c(%rsp) vmovss 0x1c4(%rsp), %xmm0 vmovss %xmm0, 0x3dc(%rsp) vmovss 0x3dc(%rsp), %xmm0 vmovss %xmm0, 0xc(%rsp) vmovss %xmm0, 0x44c(%rsp) vmovss %xmm0, 0x448(%rsp) vmovss %xmm0, 0x444(%rsp) vmovss %xmm0, 0x440(%rsp) vmovss %xmm0, 0x43c(%rsp) vmovss %xmm0, 0x438(%rsp) vmovss %xmm0, 0x434(%rsp) vmovss %xmm0, 0x430(%rsp) vmovss 0x444(%rsp), %xmm1 vmovss 0x440(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x448(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x44c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0x434(%rsp), %xmm2 vmovss 0x430(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x438(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x43c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm0 vmovaps %ymm0, 0xe0(%rsp) movl 0x11c(%rsp), %eax addl $0x7, %eax cmpl 0x1cc(%rsp), %eax jge 0x1533d0d movq 0x1b8(%rsp), %rax movq %rax, 0x278(%rsp) movq 0x278(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0xc0(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0xe0(%rsp), %rsi leaq 0xc0(%rsp), %rdx callq 0x153c480 vmovaps %ymm0, 0xa0(%rsp) movq 0x168(%rsp), %rax vmovaps 0xa0(%rsp), %ymm0 movq %rax, 0x2a0(%rsp) vmovaps %ymm0, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm0 movq 0x2a0(%rsp), %rax vmovups %ymm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x20, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x20, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x8, %eax movl %eax, 0x11c(%rsp) jmp 0x1533c36 vmovss 0x1c4(%rsp), %xmm0 vmovss %xmm0, 0x3fc(%rsp) vbroadcastss 0x3fc(%rsp), %xmm0 vmovaps %xmm0, 0x3e0(%rsp) vmovaps 0x3e0(%rsp), %xmm0 vmovaps %xmm0, 0x90(%rsp) movl 0x11c(%rsp), %eax addl $0x3, %eax cmpl 0x1cc(%rsp), %eax jge 0x1533e18 movq 0x1b8(%rsp), %rax movq %rax, 0x2a8(%rsp) movq 0x2a8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x80(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0x90(%rsp), %rsi leaq 0x80(%rsp), %rdx vzeroupper callq 0x153c4d0 vmovaps %xmm0, 0x70(%rsp) movq 0x168(%rsp), %rax vmovaps 0x70(%rsp), %xmm0 movq %rax, 0x2c8(%rsp) vmovaps %xmm0, 0x2b0(%rsp) vmovaps 0x2b0(%rsp), %xmm0 movq 0x2c8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x10, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x10, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x4, %eax movl %eax, 0x11c(%rsp) jmp 0x1533d44 jmp 0x1533e1a movl 0x11c(%rsp), %eax cmpl 0x1cc(%rsp), %eax jge 0x1533e91 movq 0x1b8(%rsp), %rdx leaq 0x1e7(%rsp), %rdi leaq 0x1c4(%rsp), %rsi vzeroupper callq 0x153c510 movq 0x168(%rsp), %rax vmovss %xmm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x4, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x4, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x1, %eax movl %eax, 0x11c(%rsp) jmp 0x1533e1a jmp 0x1533e93 movl 0x1c8(%rsp), %eax addl $0x1, %eax movl %eax, 0x1c8(%rsp) jmp 0x153329a movl $0x0, 0x20c(%rsp) movl 0x20c(%rsp), %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_fma.cpp
int ncnn::binary_op_7_13_19_29<ncnn::BinaryOp_x86_fma_functor::binary_op_max>(ncnn::Mat const&, ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static int binary_op_7_13_19_29(const Mat& a, const Mat& b, Mat& c, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; // type 7 13 19 29 c.create_like(a, opt.blob_allocator); if (c.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float* ptr = a.channel(q); const float* ptr1 = b.channel(q); float* outptr = c.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); __m512 _p1 = _mm512_loadu_ps(ptr1); __m512 _outp = op.func_pack16(_p, _p1); _mm512_storeu_ps(outptr, _outp); ptr += 16; ptr1 += 16; outptr += 16; } #endif // __AVX512F__ for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); __m256 _p1 = _mm256_loadu_ps(ptr1); __m256 _outp = op.func_pack8(_p, _p1); _mm256_storeu_ps(outptr, _outp); ptr += 8; ptr1 += 8; outptr += 8; } #endif // __AVX__ for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); __m128 _p1 = _mm_load_ps(ptr1); __m128 _outp = op.func_pack4(_p, _p1); _mm_store_ps(outptr, _outp); ptr += 4; ptr1 += 4; outptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *outptr = op.func(*ptr, *ptr1); ptr += 1; ptr1 += 1; outptr += 1; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x500, %rsp # imm = 0x500 movq %rdi, 0x268(%rsp) movq %rsi, 0x260(%rsp) movq %rdx, 0x258(%rsp) movq %rcx, 0x250(%rsp) movq 0x268(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x248(%rsp) movq 0x268(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x244(%rsp) movq 0x268(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x240(%rsp) movq 0x268(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x23c(%rsp) movq 0x268(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x238(%rsp) movl 0x248(%rsp), %eax imull 0x244(%rsp), %eax imull 0x240(%rsp), %eax imull 0x238(%rsp), %eax movl %eax, 0x234(%rsp) movq 0x258(%rsp), %rdi movq 0x268(%rsp), %rsi movq 0x250(%rsp), %rax movq 0x8(%rax), %rdx callq 0x65b60 movq 0x258(%rsp), %rax movq %rax, 0x278(%rsp) movq 0x278(%rsp), %rcx movq %rcx, 0x80(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x8f(%rsp) je 0x1534001 movq 0x80(%rsp), %rax movq %rax, 0x380(%rsp) movq 0x380(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x8f(%rsp) movb 0x8f(%rsp), %al testb $0x1, %al jne 0x153400e jmp 0x153401e movl $0xffffff9c, 0x274(%rsp) # imm = 0xFFFFFF9C jmp 0x1534f89 movl $0x0, 0x230(%rsp) movl 0x230(%rsp), %eax cmpl 0x23c(%rsp), %eax jge 0x1534f7e movq 0x268(%rsp), %rcx movl 0x230(%rsp), %eax leaq 0x1e0(%rsp), %rdx movq %rdx, 0x2a8(%rsp) movq %rcx, 0x2a0(%rsp) movl %eax, 0x29c(%rsp) movq 0x2a0(%rsp), %rax movq %rax, 0x78(%rsp) movb $0x0, 0x29b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x29c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x1e0(%rsp), %r10 movq %r10, 0x3f0(%rsp) movl %r9d, 0x3ec(%rsp) movl %r8d, 0x3e8(%rsp) movl %edi, 0x3e4(%rsp) movq %rsi, 0x3d8(%rsp) movq %rdx, 0x3d0(%rsp) movl %ecx, 0x3cc(%rsp) movq %rax, 0x3c0(%rsp) movq 0x3f0(%rsp), %rcx movq %rcx, 0x70(%rsp) movq 0x3d8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x3d0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x3cc(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3c0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3ec(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3e8(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3e4(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x448(%rsp) movl $0x10, 0x444(%rsp) movq 0x448(%rsp), %rax movslq 0x444(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x444(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x78(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x208(%rsp) cmpl $0x4, 0x28(%rax) jne 0x15341ed movq 0x78(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x220(%rsp) movb $0x1, 0x29b(%rsp) testb $0x1, 0x29b(%rsp) jne 0x153431c leaq 0x1e0(%rsp), %rax movq %rax, 0x2c0(%rsp) movq 0x2c0(%rsp), %rax movq %rax, 0x4b8(%rsp) movq 0x4b8(%rsp), %rax movq %rax, 0x68(%rsp) cmpq $0x0, 0x8(%rax) je 0x15342c2 movq 0x68(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4b4(%rsp) # imm = 0xFFFFFFFF movl 0x4b4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4b0(%rsp) cmpl $0x1, 0x4b0(%rsp) jne 0x15342c2 movq 0x68(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1534293 movq 0x68(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1534291 jmp 0x15342c0 movq 0x68(%rsp), %rax movq (%rax), %rax movq %rax, 0x4c0(%rsp) cmpq $0x0, 0x4c0(%rsp) je 0x15342be movq 0x4c0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x15342c0 jmp 0x15342c2 movq 0x68(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x153431a movq %rax, %rdi callq 0x5fc90 jmp 0x153431c leaq 0x1e0(%rsp), %rax movq %rax, 0x2b8(%rsp) movq 0x2b8(%rsp), %rax movq (%rax), %rax movq %rax, 0x58(%rsp) leaq 0x1e0(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rax movq %rax, 0x498(%rsp) movq 0x498(%rsp), %rax movq %rax, 0x60(%rsp) cmpq $0x0, 0x8(%rax) je 0x15343fb movq 0x60(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x494(%rsp) # imm = 0xFFFFFFFF movl 0x494(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x490(%rsp) cmpl $0x1, 0x490(%rsp) jne 0x15343fb movq 0x60(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x15343cc movq 0x60(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x15343ca jmp 0x15343f9 movq 0x60(%rsp), %rax movq (%rax), %rax movq %rax, 0x4d0(%rsp) cmpq $0x0, 0x4d0(%rsp) je 0x15343f7 movq 0x4d0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x15343f9 jmp 0x15343fb movq 0x60(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1534453 movq %rax, %rdi callq 0x5fc90 movq 0x58(%rsp), %rax movq %rax, 0x228(%rsp) movq 0x260(%rsp), %rcx movl 0x230(%rsp), %eax leaq 0x190(%rsp), %rdx movq %rdx, 0x290(%rsp) movq %rcx, 0x288(%rsp) movl %eax, 0x284(%rsp) movq 0x288(%rsp), %rax movq %rax, 0x50(%rsp) movb $0x0, 0x283(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x284(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x190(%rsp), %r10 movq %r10, 0x428(%rsp) movl %r9d, 0x424(%rsp) movl %r8d, 0x420(%rsp) movl %edi, 0x41c(%rsp) movq %rsi, 0x410(%rsp) movq %rdx, 0x408(%rsp) movl %ecx, 0x404(%rsp) movq %rax, 0x3f8(%rsp) movq 0x428(%rsp), %rcx movq %rcx, 0x48(%rsp) movq 0x410(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x408(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x404(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3f8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x424(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x420(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x41c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x438(%rsp) movl $0x10, 0x434(%rsp) movq 0x438(%rsp), %rax movslq 0x434(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x434(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x50(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x1b8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1534610 movq 0x50(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x1d0(%rsp) movb $0x1, 0x283(%rsp) testb $0x1, 0x283(%rsp) jne 0x153473f leaq 0x190(%rsp), %rax movq %rax, 0x2c8(%rsp) movq 0x2c8(%rsp), %rax movq %rax, 0x4a8(%rsp) movq 0x4a8(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x15346e5 movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4a4(%rsp) # imm = 0xFFFFFFFF movl 0x4a4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4a0(%rsp) cmpl $0x1, 0x4a0(%rsp) jne 0x15346e5 movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x15346b6 movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x15346b4 jmp 0x15346e3 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x4c8(%rsp) cmpq $0x0, 0x4c8(%rsp) je 0x15346e1 movq 0x4c8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x15346e3 jmp 0x15346e5 movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x153473d movq %rax, %rdi callq 0x5fc90 jmp 0x153473f leaq 0x190(%rsp), %rax movq %rax, 0x2b0(%rsp) movq 0x2b0(%rsp), %rax movq (%rax), %rax movq %rax, 0x30(%rsp) leaq 0x190(%rsp), %rax movq %rax, 0x2d8(%rsp) movq 0x2d8(%rsp), %rax movq %rax, 0x488(%rsp) movq 0x488(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x153481e movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x484(%rsp) # imm = 0xFFFFFFFF movl 0x484(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x480(%rsp) cmpl $0x1, 0x480(%rsp) jne 0x153481e movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x15347ef movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x15347ed jmp 0x153481c movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x4d8(%rsp) cmpq $0x0, 0x4d8(%rsp) je 0x153481a movq 0x4d8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x153481c jmp 0x153481e movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1534876 movq %rax, %rdi callq 0x5fc90 movq 0x30(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x258(%rsp), %rcx movl 0x230(%rsp), %eax leaq 0x140(%rsp), %rdx movq %rdx, 0x2f8(%rsp) movq %rcx, 0x2f0(%rsp) movl %eax, 0x2ec(%rsp) movq 0x2f0(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0x2eb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x2ec(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x140(%rsp), %r10 movq %r10, 0x3b8(%rsp) movl %r9d, 0x3b4(%rsp) movl %r8d, 0x3b0(%rsp) movl %edi, 0x3ac(%rsp) movq %rsi, 0x3a0(%rsp) movq %rdx, 0x398(%rsp) movl %ecx, 0x394(%rsp) movq %rax, 0x388(%rsp) movq 0x3b8(%rsp), %rcx movq %rcx, 0x20(%rsp) movq 0x3a0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x398(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x394(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x388(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3b4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3b0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3ac(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x458(%rsp) movl $0x10, 0x454(%rsp) movq 0x458(%rsp), %rax movslq 0x454(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x454(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x28(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x168(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1534a33 movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x180(%rsp) movb $0x1, 0x2eb(%rsp) testb $0x1, 0x2eb(%rsp) jne 0x1534b62 leaq 0x140(%rsp), %rax movq %rax, 0x300(%rsp) movq 0x300(%rsp), %rax movq %rax, 0x468(%rsp) movq 0x468(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x1534b08 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x464(%rsp) # imm = 0xFFFFFFFF movl 0x464(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x460(%rsp) cmpl $0x1, 0x460(%rsp) jne 0x1534b08 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1534ad9 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1534ad7 jmp 0x1534b06 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x4e8(%rsp) cmpq $0x0, 0x4e8(%rsp) je 0x1534b04 movq 0x4e8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1534b06 jmp 0x1534b08 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1534b60 movq %rax, %rdi callq 0x5fc90 jmp 0x1534b62 leaq 0x140(%rsp), %rax movq %rax, 0x308(%rsp) movq 0x308(%rsp), %rax movq (%rax), %rax movq %rax, 0x8(%rsp) leaq 0x140(%rsp), %rax movq %rax, 0x2e0(%rsp) movq 0x2e0(%rsp), %rax movq %rax, 0x478(%rsp) movq 0x478(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x1534c41 movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x474(%rsp) # imm = 0xFFFFFFFF movl 0x474(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x470(%rsp) cmpl $0x1, 0x470(%rsp) jne 0x1534c41 movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1534c12 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1534c10 jmp 0x1534c3f movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x4e0(%rsp) cmpq $0x0, 0x4e0(%rsp) je 0x1534c3d movq 0x4e0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1534c3f jmp 0x1534c41 movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1534c99 movq %rax, %rdi callq 0x5fc90 movq 0x8(%rsp), %rax movq %rax, 0x188(%rsp) movl $0x0, 0x13c(%rsp) movl 0x13c(%rsp), %eax addl $0x7, %eax cmpl 0x234(%rsp), %eax jge 0x1534dc1 movq 0x228(%rsp), %rax movq %rax, 0x318(%rsp) movq 0x318(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x100(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x310(%rsp) movq 0x310(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0xe0(%rsp) leaq 0x24f(%rsp), %rdi leaq 0x100(%rsp), %rsi leaq 0xe0(%rsp), %rdx callq 0x153c540 vmovaps %ymm0, 0xc0(%rsp) movq 0x188(%rsp), %rax vmovaps 0xc0(%rsp), %ymm0 movq %rax, 0x348(%rsp) vmovaps %ymm0, 0x320(%rsp) vmovaps 0x320(%rsp), %ymm0 movq 0x348(%rsp), %rax vmovups %ymm0, (%rax) movq 0x228(%rsp), %rax addq $0x20, %rax movq %rax, 0x228(%rsp) movq 0x1d8(%rsp), %rax addq $0x20, %rax movq %rax, 0x1d8(%rsp) movq 0x188(%rsp), %rax addq $0x20, %rax movq %rax, 0x188(%rsp) movl 0x13c(%rsp), %eax addl $0x8, %eax movl %eax, 0x13c(%rsp) jmp 0x1534cb1 jmp 0x1534dc3 movl 0x13c(%rsp), %eax addl $0x3, %eax cmpl 0x234(%rsp), %eax jge 0x1534ed6 movq 0x228(%rsp), %rax movq %rax, 0x358(%rsp) movq 0x358(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xb0(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x350(%rsp) movq 0x350(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xa0(%rsp) leaq 0x24f(%rsp), %rdi leaq 0xb0(%rsp), %rsi leaq 0xa0(%rsp), %rdx vzeroupper callq 0x153c5a0 vmovaps %xmm0, 0x90(%rsp) movq 0x188(%rsp), %rax vmovaps 0x90(%rsp), %xmm0 movq %rax, 0x378(%rsp) vmovaps %xmm0, 0x360(%rsp) vmovaps 0x360(%rsp), %xmm0 movq 0x378(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x228(%rsp), %rax addq $0x10, %rax movq %rax, 0x228(%rsp) movq 0x1d8(%rsp), %rax addq $0x10, %rax movq %rax, 0x1d8(%rsp) movq 0x188(%rsp), %rax addq $0x10, %rax movq %rax, 0x188(%rsp) movl 0x13c(%rsp), %eax addl $0x4, %eax movl %eax, 0x13c(%rsp) jmp 0x1534dc3 jmp 0x1534ed8 movl 0x13c(%rsp), %eax cmpl 0x234(%rsp), %eax jge 0x1534f66 movq 0x228(%rsp), %rsi movq 0x1d8(%rsp), %rdx leaq 0x24f(%rsp), %rdi vzeroupper callq 0x153c5e0 movq 0x188(%rsp), %rax vmovss %xmm0, (%rax) movq 0x228(%rsp), %rax addq $0x4, %rax movq %rax, 0x228(%rsp) movq 0x1d8(%rsp), %rax addq $0x4, %rax movq %rax, 0x1d8(%rsp) movq 0x188(%rsp), %rax addq $0x4, %rax movq %rax, 0x188(%rsp) movl 0x13c(%rsp), %eax addl $0x1, %eax movl %eax, 0x13c(%rsp) jmp 0x1534ed8 jmp 0x1534f68 movl 0x230(%rsp), %eax addl $0x1, %eax movl %eax, 0x230(%rsp) jmp 0x1534029 movl $0x0, 0x274(%rsp) movl 0x274(%rsp), %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopl (%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_fma.cpp
int ncnn::binary_op_2_3_4_20<ncnn::BinaryOp_x86_avx_functor::binary_op_mul>(ncnn::Mat const&, ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static int binary_op_2_3_4_20(const Mat& a, const Mat& b, Mat& c, const Option& opt) { Op op; int w = b.w; int h = b.h; int d = b.d; int channels = b.c; int elempack = b.elempack; int size = w * h * d * elempack; // type 2 3 4 20 c.create_like(b, opt.blob_allocator); if (c.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float a0 = a[0]; const float* ptr = b.channel(q); float* outptr = c.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ __m512 _a0_avx512 = _mm512_set1_ps(a0); for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); __m512 _outp = op.func_pack16(_a0_avx512, _p); _mm512_storeu_ps(outptr, _outp); ptr += 16; outptr += 16; } #endif // __AVX512F__ __m256 _a0_avx = _mm256_set1_ps(a0); for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); __m256 _outp = op.func_pack8(_a0_avx, _p); _mm256_storeu_ps(outptr, _outp); ptr += 8; outptr += 8; } #endif // __AVX__ __m128 _a0 = _mm_set1_ps(a0); for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); __m128 _outp = op.func_pack4(_a0, _p); _mm_store_ps(outptr, _outp); ptr += 4; outptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *outptr = op.func(a0, *ptr); ptr += 1; outptr += 1; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x460, %rsp # imm = 0x460 movq %rdi, 0x200(%rsp) movq %rsi, 0x1f8(%rsp) movq %rdx, 0x1f0(%rsp) movq %rcx, 0x1e8(%rsp) movq 0x1f8(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1e0(%rsp) movq 0x1f8(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x1dc(%rsp) movq 0x1f8(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x1d8(%rsp) movq 0x1f8(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x1d4(%rsp) movq 0x1f8(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x1d0(%rsp) movl 0x1e0(%rsp), %eax imull 0x1dc(%rsp), %eax imull 0x1d8(%rsp), %eax imull 0x1d0(%rsp), %eax movl %eax, 0x1cc(%rsp) movq 0x1f0(%rsp), %rdi movq 0x1f8(%rsp), %rsi movq 0x1e8(%rsp), %rax movq 0x8(%rax), %rdx callq 0x65b60 movq 0x1f0(%rsp), %rax movq %rax, 0x210(%rsp) movq 0x210(%rsp), %rcx movq %rcx, 0x60(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x6f(%rsp) je 0x1621295 movq 0x60(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x6f(%rsp) movb 0x6f(%rsp), %al testb $0x1, %al jne 0x162129f jmp 0x16212af movl $0xffffff9c, 0x20c(%rsp) # imm = 0xFFFFFF9C jmp 0x1621ed4 movl $0x0, 0x1c8(%rsp) movl 0x1c8(%rsp), %eax cmpl 0x1d4(%rsp), %eax jge 0x1621ec9 movq 0x200(%rsp), %rax movq %rax, 0x3d0(%rsp) movq $0x0, 0x3c8(%rsp) movq 0x3d0(%rsp), %rax movq (%rax), %rax movq 0x3c8(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x1c4(%rsp) movq 0x1f8(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x170(%rsp), %rdx movq %rdx, 0x228(%rsp) movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x58(%rsp) movb $0x0, 0x21b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x21c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x170(%rsp), %r10 movq %r10, 0x340(%rsp) movl %r9d, 0x33c(%rsp) movl %r8d, 0x338(%rsp) movl %edi, 0x334(%rsp) movq %rsi, 0x328(%rsp) movq %rdx, 0x320(%rsp) movl %ecx, 0x31c(%rsp) movq %rax, 0x310(%rsp) movq 0x340(%rsp), %rcx movq %rcx, 0x50(%rsp) movq 0x328(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x320(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x31c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x310(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x33c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x338(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x334(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x350(%rsp) movl $0x10, 0x34c(%rsp) movq 0x350(%rsp), %rax movslq 0x34c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x34c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x58(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x198(%rsp) cmpl $0x4, 0x28(%rax) jne 0x16214bb movq 0x58(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x1b0(%rsp) movb $0x1, 0x21b(%rsp) testb $0x1, 0x21b(%rsp) jne 0x16215ea leaq 0x170(%rsp), %rax movq %rax, 0x238(%rsp) movq 0x238(%rsp), %rax movq %rax, 0x3a0(%rsp) movq 0x3a0(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x1621590 movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x39c(%rsp) # imm = 0xFFFFFFFF movl 0x39c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x398(%rsp) cmpl $0x1, 0x398(%rsp) jne 0x1621590 movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1621561 movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x162155f jmp 0x162158e movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x3a8(%rsp) cmpq $0x0, 0x3a8(%rsp) je 0x162158c movq 0x3a8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x162158e jmp 0x1621590 movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16215e8 movq %rax, %rdi callq 0x5fc90 jmp 0x16215ea leaq 0x170(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rax movq (%rax), %rax movq %rax, 0x38(%rsp) leaq 0x170(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq %rax, 0x390(%rsp) movq 0x390(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x16216c9 movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x38c(%rsp) # imm = 0xFFFFFFFF movl 0x38c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x388(%rsp) cmpl $0x1, 0x388(%rsp) jne 0x16216c9 movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x162169a movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1621698 jmp 0x16216c7 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b0(%rsp) cmpq $0x0, 0x3b0(%rsp) je 0x16216c5 movq 0x3b0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x16216c7 jmp 0x16216c9 movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1621721 movq %rax, %rdi callq 0x5fc90 movq 0x38(%rsp), %rax movq %rax, 0x1b8(%rsp) movq 0x1f0(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x120(%rsp), %rdx movq %rdx, 0x260(%rsp) movq %rcx, 0x258(%rsp) movl %eax, 0x254(%rsp) movq 0x258(%rsp), %rax movq %rax, 0x30(%rsp) movb $0x0, 0x253(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x254(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x120(%rsp), %r10 movq %r10, 0x308(%rsp) movl %r9d, 0x304(%rsp) movl %r8d, 0x300(%rsp) movl %edi, 0x2fc(%rsp) movq %rsi, 0x2f0(%rsp) movq %rdx, 0x2e8(%rsp) movl %ecx, 0x2e4(%rsp) movq %rax, 0x2d8(%rsp) movq 0x308(%rsp), %rcx movq %rcx, 0x28(%rsp) movq 0x2f0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2e8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2e4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2d8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x304(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x300(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2fc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x360(%rsp) movl $0x10, 0x35c(%rsp) movq 0x360(%rsp), %rax movslq 0x35c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x35c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x30(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x148(%rsp) cmpl $0x4, 0x28(%rax) jne 0x16218de movq 0x30(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x160(%rsp) movb $0x1, 0x253(%rsp) testb $0x1, 0x253(%rsp) jne 0x1621a0d leaq 0x120(%rsp), %rax movq %rax, 0x268(%rsp) movq 0x268(%rsp), %rax movq %rax, 0x370(%rsp) movq 0x370(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x16219b3 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x36c(%rsp) # imm = 0xFFFFFFFF movl 0x36c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x368(%rsp) cmpl $0x1, 0x368(%rsp) jne 0x16219b3 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1621984 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1621982 jmp 0x16219b1 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x3c0(%rsp) cmpq $0x0, 0x3c0(%rsp) je 0x16219af movq 0x3c0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x16219b1 jmp 0x16219b3 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1621a0b movq %rax, %rdi callq 0x5fc90 jmp 0x1621a0d leaq 0x120(%rsp), %rax movq %rax, 0x270(%rsp) movq 0x270(%rsp), %rax movq (%rax), %rax movq %rax, 0x10(%rsp) leaq 0x120(%rsp), %rax movq %rax, 0x248(%rsp) movq 0x248(%rsp), %rax movq %rax, 0x380(%rsp) movq 0x380(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x1621aec movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x37c(%rsp) # imm = 0xFFFFFFFF movl 0x37c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x378(%rsp) cmpl $0x1, 0x378(%rsp) jne 0x1621aec movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1621abd movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1621abb jmp 0x1621aea movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b8(%rsp) cmpq $0x0, 0x3b8(%rsp) je 0x1621ae8 movq 0x3b8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1621aea jmp 0x1621aec movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1621b44 movq %rax, %rdi callq 0x5fc90 movq 0x10(%rsp), %rax movq %rax, 0x168(%rsp) movl $0x0, 0x11c(%rsp) vmovss 0x1c4(%rsp), %xmm0 vmovss %xmm0, 0x3dc(%rsp) vmovss 0x3dc(%rsp), %xmm0 vmovss %xmm0, 0xc(%rsp) vmovss %xmm0, 0x44c(%rsp) vmovss %xmm0, 0x448(%rsp) vmovss %xmm0, 0x444(%rsp) vmovss %xmm0, 0x440(%rsp) vmovss %xmm0, 0x43c(%rsp) vmovss %xmm0, 0x438(%rsp) vmovss %xmm0, 0x434(%rsp) vmovss %xmm0, 0x430(%rsp) vmovss 0x444(%rsp), %xmm1 vmovss 0x440(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x448(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x44c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0x434(%rsp), %xmm2 vmovss 0x430(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x438(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x43c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm0 vmovaps %ymm0, 0xe0(%rsp) movl 0x11c(%rsp), %eax addl $0x7, %eax cmpl 0x1cc(%rsp), %eax jge 0x1621d2d movq 0x1b8(%rsp), %rax movq %rax, 0x278(%rsp) movq 0x278(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0xc0(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0xe0(%rsp), %rsi leaq 0xc0(%rsp), %rdx callq 0x162cfb0 vmovaps %ymm0, 0xa0(%rsp) movq 0x168(%rsp), %rax vmovaps 0xa0(%rsp), %ymm0 movq %rax, 0x2a0(%rsp) vmovaps %ymm0, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm0 movq 0x2a0(%rsp), %rax vmovups %ymm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x20, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x20, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x8, %eax movl %eax, 0x11c(%rsp) jmp 0x1621c56 vmovss 0x1c4(%rsp), %xmm0 vmovss %xmm0, 0x3fc(%rsp) vbroadcastss 0x3fc(%rsp), %xmm0 vmovaps %xmm0, 0x3e0(%rsp) vmovaps 0x3e0(%rsp), %xmm0 vmovaps %xmm0, 0x90(%rsp) movl 0x11c(%rsp), %eax addl $0x3, %eax cmpl 0x1cc(%rsp), %eax jge 0x1621e38 movq 0x1b8(%rsp), %rax movq %rax, 0x2a8(%rsp) movq 0x2a8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x80(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0x90(%rsp), %rsi leaq 0x80(%rsp), %rdx vzeroupper callq 0x162d000 vmovaps %xmm0, 0x70(%rsp) movq 0x168(%rsp), %rax vmovaps 0x70(%rsp), %xmm0 movq %rax, 0x2c8(%rsp) vmovaps %xmm0, 0x2b0(%rsp) vmovaps 0x2b0(%rsp), %xmm0 movq 0x2c8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x10, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x10, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x4, %eax movl %eax, 0x11c(%rsp) jmp 0x1621d64 jmp 0x1621e3a movl 0x11c(%rsp), %eax cmpl 0x1cc(%rsp), %eax jge 0x1621eb1 movq 0x1b8(%rsp), %rdx leaq 0x1e7(%rsp), %rdi leaq 0x1c4(%rsp), %rsi vzeroupper callq 0x162d040 movq 0x168(%rsp), %rax vmovss %xmm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x4, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x4, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x1, %eax movl %eax, 0x11c(%rsp) jmp 0x1621e3a jmp 0x1621eb3 movl 0x1c8(%rsp), %eax addl $0x1, %eax movl %eax, 0x1c8(%rsp) jmp 0x16212ba movl $0x0, 0x20c(%rsp) movl 0x20c(%rsp), %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_avx.cpp
int ncnn::binary_op_6_11_16_25<ncnn::BinaryOp_x86_avx_functor::binary_op_div>(ncnn::Mat const&, ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static int binary_op_6_11_16_25(const Mat& a, const Mat& b, Mat& c, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; // type 6 11 16 25 c.create_like(a, opt.blob_allocator); if (c.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float* ptr = a.channel(q); const float b0 = b[0]; float* outptr = c.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ __m512 _b0_avx512 = _mm512_set1_ps(b0); for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); __m512 _outp = op.func_pack16(_p, _b0_avx512); _mm512_storeu_ps(outptr, _outp); ptr += 16; outptr += 16; } #endif // __AVX512F__ __m256 _b0_avx = _mm256_set1_ps(b0); for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); __m256 _outp = op.func_pack8(_p, _b0_avx); _mm256_storeu_ps(outptr, _outp); ptr += 8; outptr += 8; } #endif // __AVX__ __m128 _b0 = _mm_set1_ps(b0); for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); __m128 _outp = op.func_pack4(_p, _b0); _mm_store_ps(outptr, _outp); ptr += 4; outptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *outptr = op.func(*ptr, b0); ptr += 1; outptr += 1; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x460, %rsp # imm = 0x460 movq %rdi, 0x200(%rsp) movq %rsi, 0x1f8(%rsp) movq %rdx, 0x1f0(%rsp) movq %rcx, 0x1e8(%rsp) movq 0x200(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1e0(%rsp) movq 0x200(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x1dc(%rsp) movq 0x200(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x1d8(%rsp) movq 0x200(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x1d4(%rsp) movq 0x200(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x1d0(%rsp) movl 0x1e0(%rsp), %eax imull 0x1dc(%rsp), %eax imull 0x1d8(%rsp), %eax imull 0x1d0(%rsp), %eax movl %eax, 0x1cc(%rsp) movq 0x1f0(%rsp), %rdi movq 0x200(%rsp), %rsi movq 0x1e8(%rsp), %rax movq 0x8(%rax), %rdx callq 0x65b60 movq 0x1f0(%rsp), %rax movq %rax, 0x210(%rsp) movq 0x210(%rsp), %rcx movq %rcx, 0x60(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x6f(%rsp) je 0x16230e5 movq 0x60(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x6f(%rsp) movb 0x6f(%rsp), %al testb $0x1, %al jne 0x16230ef jmp 0x16230ff movl $0xffffff9c, 0x20c(%rsp) # imm = 0xFFFFFF9C jmp 0x1623d24 movl $0x0, 0x1c8(%rsp) movl 0x1c8(%rsp), %eax cmpl 0x1d4(%rsp), %eax jge 0x1623d19 movq 0x200(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x178(%rsp), %rdx movq %rdx, 0x228(%rsp) movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x58(%rsp) movb $0x0, 0x21b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x21c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x178(%rsp), %r10 movq %r10, 0x340(%rsp) movl %r9d, 0x33c(%rsp) movl %r8d, 0x338(%rsp) movl %edi, 0x334(%rsp) movq %rsi, 0x328(%rsp) movq %rdx, 0x320(%rsp) movl %ecx, 0x31c(%rsp) movq %rax, 0x310(%rsp) movq 0x340(%rsp), %rcx movq %rcx, 0x50(%rsp) movq 0x328(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x320(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x31c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x310(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x33c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x338(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x334(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x350(%rsp) movl $0x10, 0x34c(%rsp) movq 0x350(%rsp), %rax movslq 0x34c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x34c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x58(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x1a0(%rsp) cmpl $0x4, 0x28(%rax) jne 0x16232ce movq 0x58(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x1b8(%rsp) movb $0x1, 0x21b(%rsp) testb $0x1, 0x21b(%rsp) jne 0x16233fd leaq 0x178(%rsp), %rax movq %rax, 0x238(%rsp) movq 0x238(%rsp), %rax movq %rax, 0x3a0(%rsp) movq 0x3a0(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x16233a3 movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x39c(%rsp) # imm = 0xFFFFFFFF movl 0x39c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x398(%rsp) cmpl $0x1, 0x398(%rsp) jne 0x16233a3 movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1623374 movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1623372 jmp 0x16233a1 movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x3a8(%rsp) cmpq $0x0, 0x3a8(%rsp) je 0x162339f movq 0x3a8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x16233a1 jmp 0x16233a3 movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16233fb movq %rax, %rdi callq 0x5fc90 jmp 0x16233fd leaq 0x178(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rax movq (%rax), %rax movq %rax, 0x38(%rsp) leaq 0x178(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq %rax, 0x390(%rsp) movq 0x390(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x16234dc movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x38c(%rsp) # imm = 0xFFFFFFFF movl 0x38c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x388(%rsp) cmpl $0x1, 0x388(%rsp) jne 0x16234dc movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16234ad movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x16234ab jmp 0x16234da movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b0(%rsp) cmpq $0x0, 0x3b0(%rsp) je 0x16234d8 movq 0x3b0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x16234da jmp 0x16234dc movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1623534 movq %rax, %rdi callq 0x5fc90 movq 0x38(%rsp), %rax movq %rax, 0x1c0(%rsp) movq 0x1f8(%rsp), %rax movq %rax, 0x3d0(%rsp) movq $0x0, 0x3c8(%rsp) movq 0x3d0(%rsp), %rax movq (%rax), %rax movq 0x3c8(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x174(%rsp) movq 0x1f0(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x120(%rsp), %rdx movq %rdx, 0x260(%rsp) movq %rcx, 0x258(%rsp) movl %eax, 0x254(%rsp) movq 0x258(%rsp), %rax movq %rax, 0x30(%rsp) movb $0x0, 0x253(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x254(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x120(%rsp), %r10 movq %r10, 0x308(%rsp) movl %r9d, 0x304(%rsp) movl %r8d, 0x300(%rsp) movl %edi, 0x2fc(%rsp) movq %rsi, 0x2f0(%rsp) movq %rdx, 0x2e8(%rsp) movl %ecx, 0x2e4(%rsp) movq %rax, 0x2d8(%rsp) movq 0x308(%rsp), %rcx movq %rcx, 0x28(%rsp) movq 0x2f0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2e8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2e4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2d8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x304(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x300(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2fc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x360(%rsp) movl $0x10, 0x35c(%rsp) movq 0x360(%rsp), %rax movslq 0x35c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x35c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x30(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x148(%rsp) cmpl $0x4, 0x28(%rax) jne 0x162372e movq 0x30(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x160(%rsp) movb $0x1, 0x253(%rsp) testb $0x1, 0x253(%rsp) jne 0x162385d leaq 0x120(%rsp), %rax movq %rax, 0x268(%rsp) movq 0x268(%rsp), %rax movq %rax, 0x370(%rsp) movq 0x370(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x1623803 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x36c(%rsp) # imm = 0xFFFFFFFF movl 0x36c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x368(%rsp) cmpl $0x1, 0x368(%rsp) jne 0x1623803 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16237d4 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x16237d2 jmp 0x1623801 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x3c0(%rsp) cmpq $0x0, 0x3c0(%rsp) je 0x16237ff movq 0x3c0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1623801 jmp 0x1623803 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x162385b movq %rax, %rdi callq 0x5fc90 jmp 0x162385d leaq 0x120(%rsp), %rax movq %rax, 0x270(%rsp) movq 0x270(%rsp), %rax movq (%rax), %rax movq %rax, 0x10(%rsp) leaq 0x120(%rsp), %rax movq %rax, 0x248(%rsp) movq 0x248(%rsp), %rax movq %rax, 0x380(%rsp) movq 0x380(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x162393c movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x37c(%rsp) # imm = 0xFFFFFFFF movl 0x37c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x378(%rsp) cmpl $0x1, 0x378(%rsp) jne 0x162393c movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x162390d movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x162390b jmp 0x162393a movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b8(%rsp) cmpq $0x0, 0x3b8(%rsp) je 0x1623938 movq 0x3b8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x162393a jmp 0x162393c movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1623994 movq %rax, %rdi callq 0x5fc90 movq 0x10(%rsp), %rax movq %rax, 0x168(%rsp) movl $0x0, 0x11c(%rsp) vmovss 0x174(%rsp), %xmm0 vmovss %xmm0, 0x3dc(%rsp) vmovss 0x3dc(%rsp), %xmm0 vmovss %xmm0, 0xc(%rsp) vmovss %xmm0, 0x44c(%rsp) vmovss %xmm0, 0x448(%rsp) vmovss %xmm0, 0x444(%rsp) vmovss %xmm0, 0x440(%rsp) vmovss %xmm0, 0x43c(%rsp) vmovss %xmm0, 0x438(%rsp) vmovss %xmm0, 0x434(%rsp) vmovss %xmm0, 0x430(%rsp) vmovss 0x444(%rsp), %xmm1 vmovss 0x440(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x448(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x44c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0x434(%rsp), %xmm2 vmovss 0x430(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x438(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x43c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm0 vmovaps %ymm0, 0xe0(%rsp) movl 0x11c(%rsp), %eax addl $0x7, %eax cmpl 0x1cc(%rsp), %eax jge 0x1623b7d movq 0x1c0(%rsp), %rax movq %rax, 0x278(%rsp) movq 0x278(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0xc0(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0xc0(%rsp), %rsi leaq 0xe0(%rsp), %rdx callq 0x162d070 vmovaps %ymm0, 0xa0(%rsp) movq 0x168(%rsp), %rax vmovaps 0xa0(%rsp), %ymm0 movq %rax, 0x2a0(%rsp) vmovaps %ymm0, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm0 movq 0x2a0(%rsp), %rax vmovups %ymm0, (%rax) movq 0x1c0(%rsp), %rax addq $0x20, %rax movq %rax, 0x1c0(%rsp) movq 0x168(%rsp), %rax addq $0x20, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x8, %eax movl %eax, 0x11c(%rsp) jmp 0x1623aa6 vmovss 0x174(%rsp), %xmm0 vmovss %xmm0, 0x3fc(%rsp) vbroadcastss 0x3fc(%rsp), %xmm0 vmovaps %xmm0, 0x3e0(%rsp) vmovaps 0x3e0(%rsp), %xmm0 vmovaps %xmm0, 0x90(%rsp) movl 0x11c(%rsp), %eax addl $0x3, %eax cmpl 0x1cc(%rsp), %eax jge 0x1623c88 movq 0x1c0(%rsp), %rax movq %rax, 0x2a8(%rsp) movq 0x2a8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x80(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0x80(%rsp), %rsi leaq 0x90(%rsp), %rdx vzeroupper callq 0x162d0c0 vmovaps %xmm0, 0x70(%rsp) movq 0x168(%rsp), %rax vmovaps 0x70(%rsp), %xmm0 movq %rax, 0x2c8(%rsp) vmovaps %xmm0, 0x2b0(%rsp) vmovaps 0x2b0(%rsp), %xmm0 movq 0x2c8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x1c0(%rsp), %rax addq $0x10, %rax movq %rax, 0x1c0(%rsp) movq 0x168(%rsp), %rax addq $0x10, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x4, %eax movl %eax, 0x11c(%rsp) jmp 0x1623bb4 jmp 0x1623c8a movl 0x11c(%rsp), %eax cmpl 0x1cc(%rsp), %eax jge 0x1623d01 movq 0x1c0(%rsp), %rsi leaq 0x1e7(%rsp), %rdi leaq 0x174(%rsp), %rdx vzeroupper callq 0x162d100 movq 0x168(%rsp), %rax vmovss %xmm0, (%rax) movq 0x1c0(%rsp), %rax addq $0x4, %rax movq %rax, 0x1c0(%rsp) movq 0x168(%rsp), %rax addq $0x4, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x1, %eax movl %eax, 0x11c(%rsp) jmp 0x1623c8a jmp 0x1623d03 movl 0x1c8(%rsp), %eax addl $0x1, %eax movl %eax, 0x1c8(%rsp) jmp 0x162310a movl $0x0, 0x20c(%rsp) movl 0x20c(%rsp), %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_avx.cpp
int ncnn::binary_op_7_13_19_29<ncnn::BinaryOp_x86_avx_functor::binary_op_max>(ncnn::Mat const&, ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static int binary_op_7_13_19_29(const Mat& a, const Mat& b, Mat& c, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; // type 7 13 19 29 c.create_like(a, opt.blob_allocator); if (c.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float* ptr = a.channel(q); const float* ptr1 = b.channel(q); float* outptr = c.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); __m512 _p1 = _mm512_loadu_ps(ptr1); __m512 _outp = op.func_pack16(_p, _p1); _mm512_storeu_ps(outptr, _outp); ptr += 16; ptr1 += 16; outptr += 16; } #endif // __AVX512F__ for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); __m256 _p1 = _mm256_loadu_ps(ptr1); __m256 _outp = op.func_pack8(_p, _p1); _mm256_storeu_ps(outptr, _outp); ptr += 8; ptr1 += 8; outptr += 8; } #endif // __AVX__ for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); __m128 _p1 = _mm_load_ps(ptr1); __m128 _outp = op.func_pack4(_p, _p1); _mm_store_ps(outptr, _outp); ptr += 4; ptr1 += 4; outptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *outptr = op.func(*ptr, *ptr1); ptr += 1; ptr1 += 1; outptr += 1; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x500, %rsp # imm = 0x500 movq %rdi, 0x268(%rsp) movq %rsi, 0x260(%rsp) movq %rdx, 0x258(%rsp) movq %rcx, 0x250(%rsp) movq 0x268(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x248(%rsp) movq 0x268(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x244(%rsp) movq 0x268(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x240(%rsp) movq 0x268(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x23c(%rsp) movq 0x268(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x238(%rsp) movl 0x248(%rsp), %eax imull 0x244(%rsp), %eax imull 0x240(%rsp), %eax imull 0x238(%rsp), %eax movl %eax, 0x234(%rsp) movq 0x258(%rsp), %rdi movq 0x268(%rsp), %rsi movq 0x250(%rsp), %rax movq 0x8(%rax), %rdx callq 0x65b60 movq 0x258(%rsp), %rax movq %rax, 0x278(%rsp) movq 0x278(%rsp), %rcx movq %rcx, 0x80(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x8f(%rsp) je 0x1624bf1 movq 0x80(%rsp), %rax movq %rax, 0x380(%rsp) movq 0x380(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x8f(%rsp) movb 0x8f(%rsp), %al testb $0x1, %al jne 0x1624bfe jmp 0x1624c0e movl $0xffffff9c, 0x274(%rsp) # imm = 0xFFFFFF9C jmp 0x1625b79 movl $0x0, 0x230(%rsp) movl 0x230(%rsp), %eax cmpl 0x23c(%rsp), %eax jge 0x1625b6e movq 0x268(%rsp), %rcx movl 0x230(%rsp), %eax leaq 0x1e0(%rsp), %rdx movq %rdx, 0x2a8(%rsp) movq %rcx, 0x2a0(%rsp) movl %eax, 0x29c(%rsp) movq 0x2a0(%rsp), %rax movq %rax, 0x78(%rsp) movb $0x0, 0x29b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x29c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x1e0(%rsp), %r10 movq %r10, 0x3f0(%rsp) movl %r9d, 0x3ec(%rsp) movl %r8d, 0x3e8(%rsp) movl %edi, 0x3e4(%rsp) movq %rsi, 0x3d8(%rsp) movq %rdx, 0x3d0(%rsp) movl %ecx, 0x3cc(%rsp) movq %rax, 0x3c0(%rsp) movq 0x3f0(%rsp), %rcx movq %rcx, 0x70(%rsp) movq 0x3d8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x3d0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x3cc(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3c0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3ec(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3e8(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3e4(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x448(%rsp) movl $0x10, 0x444(%rsp) movq 0x448(%rsp), %rax movslq 0x444(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x444(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x78(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x208(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1624ddd movq 0x78(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x220(%rsp) movb $0x1, 0x29b(%rsp) testb $0x1, 0x29b(%rsp) jne 0x1624f0c leaq 0x1e0(%rsp), %rax movq %rax, 0x2c0(%rsp) movq 0x2c0(%rsp), %rax movq %rax, 0x4b8(%rsp) movq 0x4b8(%rsp), %rax movq %rax, 0x68(%rsp) cmpq $0x0, 0x8(%rax) je 0x1624eb2 movq 0x68(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4b4(%rsp) # imm = 0xFFFFFFFF movl 0x4b4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4b0(%rsp) cmpl $0x1, 0x4b0(%rsp) jne 0x1624eb2 movq 0x68(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1624e83 movq 0x68(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1624e81 jmp 0x1624eb0 movq 0x68(%rsp), %rax movq (%rax), %rax movq %rax, 0x4c0(%rsp) cmpq $0x0, 0x4c0(%rsp) je 0x1624eae movq 0x4c0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1624eb0 jmp 0x1624eb2 movq 0x68(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1624f0a movq %rax, %rdi callq 0x5fc90 jmp 0x1624f0c leaq 0x1e0(%rsp), %rax movq %rax, 0x2b8(%rsp) movq 0x2b8(%rsp), %rax movq (%rax), %rax movq %rax, 0x58(%rsp) leaq 0x1e0(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rax movq %rax, 0x498(%rsp) movq 0x498(%rsp), %rax movq %rax, 0x60(%rsp) cmpq $0x0, 0x8(%rax) je 0x1624feb movq 0x60(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x494(%rsp) # imm = 0xFFFFFFFF movl 0x494(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x490(%rsp) cmpl $0x1, 0x490(%rsp) jne 0x1624feb movq 0x60(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1624fbc movq 0x60(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1624fba jmp 0x1624fe9 movq 0x60(%rsp), %rax movq (%rax), %rax movq %rax, 0x4d0(%rsp) cmpq $0x0, 0x4d0(%rsp) je 0x1624fe7 movq 0x4d0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1624fe9 jmp 0x1624feb movq 0x60(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1625043 movq %rax, %rdi callq 0x5fc90 movq 0x58(%rsp), %rax movq %rax, 0x228(%rsp) movq 0x260(%rsp), %rcx movl 0x230(%rsp), %eax leaq 0x190(%rsp), %rdx movq %rdx, 0x290(%rsp) movq %rcx, 0x288(%rsp) movl %eax, 0x284(%rsp) movq 0x288(%rsp), %rax movq %rax, 0x50(%rsp) movb $0x0, 0x283(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x284(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x190(%rsp), %r10 movq %r10, 0x428(%rsp) movl %r9d, 0x424(%rsp) movl %r8d, 0x420(%rsp) movl %edi, 0x41c(%rsp) movq %rsi, 0x410(%rsp) movq %rdx, 0x408(%rsp) movl %ecx, 0x404(%rsp) movq %rax, 0x3f8(%rsp) movq 0x428(%rsp), %rcx movq %rcx, 0x48(%rsp) movq 0x410(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x408(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x404(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3f8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x424(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x420(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x41c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x438(%rsp) movl $0x10, 0x434(%rsp) movq 0x438(%rsp), %rax movslq 0x434(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x434(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x50(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x1b8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1625200 movq 0x50(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x1d0(%rsp) movb $0x1, 0x283(%rsp) testb $0x1, 0x283(%rsp) jne 0x162532f leaq 0x190(%rsp), %rax movq %rax, 0x2c8(%rsp) movq 0x2c8(%rsp), %rax movq %rax, 0x4a8(%rsp) movq 0x4a8(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x16252d5 movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x4a4(%rsp) # imm = 0xFFFFFFFF movl 0x4a4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x4a0(%rsp) cmpl $0x1, 0x4a0(%rsp) jne 0x16252d5 movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16252a6 movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x16252a4 jmp 0x16252d3 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x4c8(%rsp) cmpq $0x0, 0x4c8(%rsp) je 0x16252d1 movq 0x4c8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x16252d3 jmp 0x16252d5 movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x162532d movq %rax, %rdi callq 0x5fc90 jmp 0x162532f leaq 0x190(%rsp), %rax movq %rax, 0x2b0(%rsp) movq 0x2b0(%rsp), %rax movq (%rax), %rax movq %rax, 0x30(%rsp) leaq 0x190(%rsp), %rax movq %rax, 0x2d8(%rsp) movq 0x2d8(%rsp), %rax movq %rax, 0x488(%rsp) movq 0x488(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x162540e movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x484(%rsp) # imm = 0xFFFFFFFF movl 0x484(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x480(%rsp) cmpl $0x1, 0x480(%rsp) jne 0x162540e movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16253df movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x16253dd jmp 0x162540c movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x4d8(%rsp) cmpq $0x0, 0x4d8(%rsp) je 0x162540a movq 0x4d8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x162540c jmp 0x162540e movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1625466 movq %rax, %rdi callq 0x5fc90 movq 0x30(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x258(%rsp), %rcx movl 0x230(%rsp), %eax leaq 0x140(%rsp), %rdx movq %rdx, 0x2f8(%rsp) movq %rcx, 0x2f0(%rsp) movl %eax, 0x2ec(%rsp) movq 0x2f0(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0x2eb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x2ec(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x140(%rsp), %r10 movq %r10, 0x3b8(%rsp) movl %r9d, 0x3b4(%rsp) movl %r8d, 0x3b0(%rsp) movl %edi, 0x3ac(%rsp) movq %rsi, 0x3a0(%rsp) movq %rdx, 0x398(%rsp) movl %ecx, 0x394(%rsp) movq %rax, 0x388(%rsp) movq 0x3b8(%rsp), %rcx movq %rcx, 0x20(%rsp) movq 0x3a0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x398(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x394(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x388(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3b4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3b0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3ac(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x458(%rsp) movl $0x10, 0x454(%rsp) movq 0x458(%rsp), %rax movslq 0x454(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x454(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x28(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x168(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1625623 movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x180(%rsp) movb $0x1, 0x2eb(%rsp) testb $0x1, 0x2eb(%rsp) jne 0x1625752 leaq 0x140(%rsp), %rax movq %rax, 0x300(%rsp) movq 0x300(%rsp), %rax movq %rax, 0x468(%rsp) movq 0x468(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x16256f8 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x464(%rsp) # imm = 0xFFFFFFFF movl 0x464(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x460(%rsp) cmpl $0x1, 0x460(%rsp) jne 0x16256f8 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16256c9 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x16256c7 jmp 0x16256f6 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x4e8(%rsp) cmpq $0x0, 0x4e8(%rsp) je 0x16256f4 movq 0x4e8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x16256f6 jmp 0x16256f8 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1625750 movq %rax, %rdi callq 0x5fc90 jmp 0x1625752 leaq 0x140(%rsp), %rax movq %rax, 0x308(%rsp) movq 0x308(%rsp), %rax movq (%rax), %rax movq %rax, 0x8(%rsp) leaq 0x140(%rsp), %rax movq %rax, 0x2e0(%rsp) movq 0x2e0(%rsp), %rax movq %rax, 0x478(%rsp) movq 0x478(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x1625831 movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x474(%rsp) # imm = 0xFFFFFFFF movl 0x474(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x470(%rsp) cmpl $0x1, 0x470(%rsp) jne 0x1625831 movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1625802 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1625800 jmp 0x162582f movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x4e0(%rsp) cmpq $0x0, 0x4e0(%rsp) je 0x162582d movq 0x4e0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x162582f jmp 0x1625831 movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1625889 movq %rax, %rdi callq 0x5fc90 movq 0x8(%rsp), %rax movq %rax, 0x188(%rsp) movl $0x0, 0x13c(%rsp) movl 0x13c(%rsp), %eax addl $0x7, %eax cmpl 0x234(%rsp), %eax jge 0x16259b1 movq 0x228(%rsp), %rax movq %rax, 0x318(%rsp) movq 0x318(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x100(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x310(%rsp) movq 0x310(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0xe0(%rsp) leaq 0x24f(%rsp), %rdi leaq 0x100(%rsp), %rsi leaq 0xe0(%rsp), %rdx callq 0x162d130 vmovaps %ymm0, 0xc0(%rsp) movq 0x188(%rsp), %rax vmovaps 0xc0(%rsp), %ymm0 movq %rax, 0x348(%rsp) vmovaps %ymm0, 0x320(%rsp) vmovaps 0x320(%rsp), %ymm0 movq 0x348(%rsp), %rax vmovups %ymm0, (%rax) movq 0x228(%rsp), %rax addq $0x20, %rax movq %rax, 0x228(%rsp) movq 0x1d8(%rsp), %rax addq $0x20, %rax movq %rax, 0x1d8(%rsp) movq 0x188(%rsp), %rax addq $0x20, %rax movq %rax, 0x188(%rsp) movl 0x13c(%rsp), %eax addl $0x8, %eax movl %eax, 0x13c(%rsp) jmp 0x16258a1 jmp 0x16259b3 movl 0x13c(%rsp), %eax addl $0x3, %eax cmpl 0x234(%rsp), %eax jge 0x1625ac6 movq 0x228(%rsp), %rax movq %rax, 0x358(%rsp) movq 0x358(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xb0(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x350(%rsp) movq 0x350(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0xa0(%rsp) leaq 0x24f(%rsp), %rdi leaq 0xb0(%rsp), %rsi leaq 0xa0(%rsp), %rdx vzeroupper callq 0x162d190 vmovaps %xmm0, 0x90(%rsp) movq 0x188(%rsp), %rax vmovaps 0x90(%rsp), %xmm0 movq %rax, 0x378(%rsp) vmovaps %xmm0, 0x360(%rsp) vmovaps 0x360(%rsp), %xmm0 movq 0x378(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x228(%rsp), %rax addq $0x10, %rax movq %rax, 0x228(%rsp) movq 0x1d8(%rsp), %rax addq $0x10, %rax movq %rax, 0x1d8(%rsp) movq 0x188(%rsp), %rax addq $0x10, %rax movq %rax, 0x188(%rsp) movl 0x13c(%rsp), %eax addl $0x4, %eax movl %eax, 0x13c(%rsp) jmp 0x16259b3 jmp 0x1625ac8 movl 0x13c(%rsp), %eax cmpl 0x234(%rsp), %eax jge 0x1625b56 movq 0x228(%rsp), %rsi movq 0x1d8(%rsp), %rdx leaq 0x24f(%rsp), %rdi vzeroupper callq 0x162d1d0 movq 0x188(%rsp), %rax vmovss %xmm0, (%rax) movq 0x228(%rsp), %rax addq $0x4, %rax movq %rax, 0x228(%rsp) movq 0x1d8(%rsp), %rax addq $0x4, %rax movq %rax, 0x1d8(%rsp) movq 0x188(%rsp), %rax addq $0x4, %rax movq %rax, 0x188(%rsp) movl 0x13c(%rsp), %eax addl $0x1, %eax movl %eax, 0x13c(%rsp) jmp 0x1625ac8 jmp 0x1625b58 movl 0x230(%rsp), %eax addl $0x1, %eax movl %eax, 0x230(%rsp) jmp 0x1624c19 movl $0x0, 0x274(%rsp) movl 0x274(%rsp), %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopl (%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_avx.cpp
int ncnn::binary_op_6_11_16_25<ncnn::BinaryOp_x86_avx_functor::binary_op_max>(ncnn::Mat const&, ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static int binary_op_6_11_16_25(const Mat& a, const Mat& b, Mat& c, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; // type 6 11 16 25 c.create_like(a, opt.blob_allocator); if (c.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float* ptr = a.channel(q); const float b0 = b[0]; float* outptr = c.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ __m512 _b0_avx512 = _mm512_set1_ps(b0); for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); __m512 _outp = op.func_pack16(_p, _b0_avx512); _mm512_storeu_ps(outptr, _outp); ptr += 16; outptr += 16; } #endif // __AVX512F__ __m256 _b0_avx = _mm256_set1_ps(b0); for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); __m256 _outp = op.func_pack8(_p, _b0_avx); _mm256_storeu_ps(outptr, _outp); ptr += 8; outptr += 8; } #endif // __AVX__ __m128 _b0 = _mm_set1_ps(b0); for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); __m128 _outp = op.func_pack4(_p, _b0); _mm_store_ps(outptr, _outp); ptr += 4; outptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *outptr = op.func(*ptr, b0); ptr += 1; outptr += 1; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x460, %rsp # imm = 0x460 movq %rdi, 0x200(%rsp) movq %rsi, 0x1f8(%rsp) movq %rdx, 0x1f0(%rsp) movq %rcx, 0x1e8(%rsp) movq 0x200(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1e0(%rsp) movq 0x200(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x1dc(%rsp) movq 0x200(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x1d8(%rsp) movq 0x200(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x1d4(%rsp) movq 0x200(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x1d0(%rsp) movl 0x1e0(%rsp), %eax imull 0x1dc(%rsp), %eax imull 0x1d8(%rsp), %eax imull 0x1d0(%rsp), %eax movl %eax, 0x1cc(%rsp) movq 0x1f0(%rsp), %rdi movq 0x200(%rsp), %rsi movq 0x1e8(%rsp), %rax movq 0x8(%rax), %rdx callq 0x65b60 movq 0x1f0(%rsp), %rax movq %rax, 0x210(%rsp) movq 0x210(%rsp), %rcx movq %rcx, 0x60(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x6f(%rsp) je 0x1625cb5 movq 0x60(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x6f(%rsp) movb 0x6f(%rsp), %al testb $0x1, %al jne 0x1625cbf jmp 0x1625ccf movl $0xffffff9c, 0x20c(%rsp) # imm = 0xFFFFFF9C jmp 0x16268f4 movl $0x0, 0x1c8(%rsp) movl 0x1c8(%rsp), %eax cmpl 0x1d4(%rsp), %eax jge 0x16268e9 movq 0x200(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x178(%rsp), %rdx movq %rdx, 0x228(%rsp) movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x58(%rsp) movb $0x0, 0x21b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x21c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x178(%rsp), %r10 movq %r10, 0x340(%rsp) movl %r9d, 0x33c(%rsp) movl %r8d, 0x338(%rsp) movl %edi, 0x334(%rsp) movq %rsi, 0x328(%rsp) movq %rdx, 0x320(%rsp) movl %ecx, 0x31c(%rsp) movq %rax, 0x310(%rsp) movq 0x340(%rsp), %rcx movq %rcx, 0x50(%rsp) movq 0x328(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x320(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x31c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x310(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x33c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x338(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x334(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x350(%rsp) movl $0x10, 0x34c(%rsp) movq 0x350(%rsp), %rax movslq 0x34c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x34c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x58(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x1a0(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1625e9e movq 0x58(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x1b8(%rsp) movb $0x1, 0x21b(%rsp) testb $0x1, 0x21b(%rsp) jne 0x1625fcd leaq 0x178(%rsp), %rax movq %rax, 0x238(%rsp) movq 0x238(%rsp), %rax movq %rax, 0x3a0(%rsp) movq 0x3a0(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x1625f73 movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x39c(%rsp) # imm = 0xFFFFFFFF movl 0x39c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x398(%rsp) cmpl $0x1, 0x398(%rsp) jne 0x1625f73 movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1625f44 movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1625f42 jmp 0x1625f71 movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x3a8(%rsp) cmpq $0x0, 0x3a8(%rsp) je 0x1625f6f movq 0x3a8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1625f71 jmp 0x1625f73 movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1625fcb movq %rax, %rdi callq 0x5fc90 jmp 0x1625fcd leaq 0x178(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rax movq (%rax), %rax movq %rax, 0x38(%rsp) leaq 0x178(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq %rax, 0x390(%rsp) movq 0x390(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x16260ac movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x38c(%rsp) # imm = 0xFFFFFFFF movl 0x38c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x388(%rsp) cmpl $0x1, 0x388(%rsp) jne 0x16260ac movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x162607d movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x162607b jmp 0x16260aa movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b0(%rsp) cmpq $0x0, 0x3b0(%rsp) je 0x16260a8 movq 0x3b0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x16260aa jmp 0x16260ac movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1626104 movq %rax, %rdi callq 0x5fc90 movq 0x38(%rsp), %rax movq %rax, 0x1c0(%rsp) movq 0x1f8(%rsp), %rax movq %rax, 0x3d0(%rsp) movq $0x0, 0x3c8(%rsp) movq 0x3d0(%rsp), %rax movq (%rax), %rax movq 0x3c8(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x174(%rsp) movq 0x1f0(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x120(%rsp), %rdx movq %rdx, 0x260(%rsp) movq %rcx, 0x258(%rsp) movl %eax, 0x254(%rsp) movq 0x258(%rsp), %rax movq %rax, 0x30(%rsp) movb $0x0, 0x253(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x254(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x120(%rsp), %r10 movq %r10, 0x308(%rsp) movl %r9d, 0x304(%rsp) movl %r8d, 0x300(%rsp) movl %edi, 0x2fc(%rsp) movq %rsi, 0x2f0(%rsp) movq %rdx, 0x2e8(%rsp) movl %ecx, 0x2e4(%rsp) movq %rax, 0x2d8(%rsp) movq 0x308(%rsp), %rcx movq %rcx, 0x28(%rsp) movq 0x2f0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2e8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2e4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2d8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x304(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x300(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2fc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x360(%rsp) movl $0x10, 0x35c(%rsp) movq 0x360(%rsp), %rax movslq 0x35c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x35c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x30(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x148(%rsp) cmpl $0x4, 0x28(%rax) jne 0x16262fe movq 0x30(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x160(%rsp) movb $0x1, 0x253(%rsp) testb $0x1, 0x253(%rsp) jne 0x162642d leaq 0x120(%rsp), %rax movq %rax, 0x268(%rsp) movq 0x268(%rsp), %rax movq %rax, 0x370(%rsp) movq 0x370(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x16263d3 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x36c(%rsp) # imm = 0xFFFFFFFF movl 0x36c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x368(%rsp) cmpl $0x1, 0x368(%rsp) jne 0x16263d3 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16263a4 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x16263a2 jmp 0x16263d1 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x3c0(%rsp) cmpq $0x0, 0x3c0(%rsp) je 0x16263cf movq 0x3c0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x16263d1 jmp 0x16263d3 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x162642b movq %rax, %rdi callq 0x5fc90 jmp 0x162642d leaq 0x120(%rsp), %rax movq %rax, 0x270(%rsp) movq 0x270(%rsp), %rax movq (%rax), %rax movq %rax, 0x10(%rsp) leaq 0x120(%rsp), %rax movq %rax, 0x248(%rsp) movq 0x248(%rsp), %rax movq %rax, 0x380(%rsp) movq 0x380(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x162650c movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x37c(%rsp) # imm = 0xFFFFFFFF movl 0x37c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x378(%rsp) cmpl $0x1, 0x378(%rsp) jne 0x162650c movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16264dd movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x16264db jmp 0x162650a movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b8(%rsp) cmpq $0x0, 0x3b8(%rsp) je 0x1626508 movq 0x3b8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x162650a jmp 0x162650c movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1626564 movq %rax, %rdi callq 0x5fc90 movq 0x10(%rsp), %rax movq %rax, 0x168(%rsp) movl $0x0, 0x11c(%rsp) vmovss 0x174(%rsp), %xmm0 vmovss %xmm0, 0x3dc(%rsp) vmovss 0x3dc(%rsp), %xmm0 vmovss %xmm0, 0xc(%rsp) vmovss %xmm0, 0x44c(%rsp) vmovss %xmm0, 0x448(%rsp) vmovss %xmm0, 0x444(%rsp) vmovss %xmm0, 0x440(%rsp) vmovss %xmm0, 0x43c(%rsp) vmovss %xmm0, 0x438(%rsp) vmovss %xmm0, 0x434(%rsp) vmovss %xmm0, 0x430(%rsp) vmovss 0x444(%rsp), %xmm1 vmovss 0x440(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x448(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x44c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0x434(%rsp), %xmm2 vmovss 0x430(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x438(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x43c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm0 vmovaps %ymm0, 0xe0(%rsp) movl 0x11c(%rsp), %eax addl $0x7, %eax cmpl 0x1cc(%rsp), %eax jge 0x162674d movq 0x1c0(%rsp), %rax movq %rax, 0x278(%rsp) movq 0x278(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0xc0(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0xc0(%rsp), %rsi leaq 0xe0(%rsp), %rdx callq 0x162d130 vmovaps %ymm0, 0xa0(%rsp) movq 0x168(%rsp), %rax vmovaps 0xa0(%rsp), %ymm0 movq %rax, 0x2a0(%rsp) vmovaps %ymm0, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm0 movq 0x2a0(%rsp), %rax vmovups %ymm0, (%rax) movq 0x1c0(%rsp), %rax addq $0x20, %rax movq %rax, 0x1c0(%rsp) movq 0x168(%rsp), %rax addq $0x20, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x8, %eax movl %eax, 0x11c(%rsp) jmp 0x1626676 vmovss 0x174(%rsp), %xmm0 vmovss %xmm0, 0x3fc(%rsp) vbroadcastss 0x3fc(%rsp), %xmm0 vmovaps %xmm0, 0x3e0(%rsp) vmovaps 0x3e0(%rsp), %xmm0 vmovaps %xmm0, 0x90(%rsp) movl 0x11c(%rsp), %eax addl $0x3, %eax cmpl 0x1cc(%rsp), %eax jge 0x1626858 movq 0x1c0(%rsp), %rax movq %rax, 0x2a8(%rsp) movq 0x2a8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x80(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0x80(%rsp), %rsi leaq 0x90(%rsp), %rdx vzeroupper callq 0x162d190 vmovaps %xmm0, 0x70(%rsp) movq 0x168(%rsp), %rax vmovaps 0x70(%rsp), %xmm0 movq %rax, 0x2c8(%rsp) vmovaps %xmm0, 0x2b0(%rsp) vmovaps 0x2b0(%rsp), %xmm0 movq 0x2c8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x1c0(%rsp), %rax addq $0x10, %rax movq %rax, 0x1c0(%rsp) movq 0x168(%rsp), %rax addq $0x10, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x4, %eax movl %eax, 0x11c(%rsp) jmp 0x1626784 jmp 0x162685a movl 0x11c(%rsp), %eax cmpl 0x1cc(%rsp), %eax jge 0x16268d1 movq 0x1c0(%rsp), %rsi leaq 0x1e7(%rsp), %rdi leaq 0x174(%rsp), %rdx vzeroupper callq 0x162d1d0 movq 0x168(%rsp), %rax vmovss %xmm0, (%rax) movq 0x1c0(%rsp), %rax addq $0x4, %rax movq %rax, 0x1c0(%rsp) movq 0x168(%rsp), %rax addq $0x4, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x1, %eax movl %eax, 0x11c(%rsp) jmp 0x162685a jmp 0x16268d3 movl 0x1c8(%rsp), %eax addl $0x1, %eax movl %eax, 0x1c8(%rsp) jmp 0x1625cda movl $0x0, 0x20c(%rsp) movl 0x20c(%rsp), %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_avx.cpp
int ncnn::binary_op_2_3_4_20<ncnn::BinaryOp_x86_avx_functor::binary_op_max>(ncnn::Mat const&, ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static int binary_op_2_3_4_20(const Mat& a, const Mat& b, Mat& c, const Option& opt) { Op op; int w = b.w; int h = b.h; int d = b.d; int channels = b.c; int elempack = b.elempack; int size = w * h * d * elempack; // type 2 3 4 20 c.create_like(b, opt.blob_allocator); if (c.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float a0 = a[0]; const float* ptr = b.channel(q); float* outptr = c.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ __m512 _a0_avx512 = _mm512_set1_ps(a0); for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); __m512 _outp = op.func_pack16(_a0_avx512, _p); _mm512_storeu_ps(outptr, _outp); ptr += 16; outptr += 16; } #endif // __AVX512F__ __m256 _a0_avx = _mm256_set1_ps(a0); for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); __m256 _outp = op.func_pack8(_a0_avx, _p); _mm256_storeu_ps(outptr, _outp); ptr += 8; outptr += 8; } #endif // __AVX__ __m128 _a0 = _mm_set1_ps(a0); for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); __m128 _outp = op.func_pack4(_a0, _p); _mm_store_ps(outptr, _outp); ptr += 4; outptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *outptr = op.func(a0, *ptr); ptr += 1; outptr += 1; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x460, %rsp # imm = 0x460 movq %rdi, 0x200(%rsp) movq %rsi, 0x1f8(%rsp) movq %rdx, 0x1f0(%rsp) movq %rcx, 0x1e8(%rsp) movq 0x1f8(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1e0(%rsp) movq 0x1f8(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x1dc(%rsp) movq 0x1f8(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x1d8(%rsp) movq 0x1f8(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x1d4(%rsp) movq 0x1f8(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x1d0(%rsp) movl 0x1e0(%rsp), %eax imull 0x1dc(%rsp), %eax imull 0x1d8(%rsp), %eax imull 0x1d0(%rsp), %eax movl %eax, 0x1cc(%rsp) movq 0x1f0(%rsp), %rdi movq 0x1f8(%rsp), %rsi movq 0x1e8(%rsp), %rax movq 0x8(%rax), %rdx callq 0x65b60 movq 0x1f0(%rsp), %rax movq %rax, 0x210(%rsp) movq 0x210(%rsp), %rcx movq %rcx, 0x60(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x6f(%rsp) je 0x1626a35 movq 0x60(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x6f(%rsp) movb 0x6f(%rsp), %al testb $0x1, %al jne 0x1626a3f jmp 0x1626a4f movl $0xffffff9c, 0x20c(%rsp) # imm = 0xFFFFFF9C jmp 0x1627674 movl $0x0, 0x1c8(%rsp) movl 0x1c8(%rsp), %eax cmpl 0x1d4(%rsp), %eax jge 0x1627669 movq 0x200(%rsp), %rax movq %rax, 0x3d0(%rsp) movq $0x0, 0x3c8(%rsp) movq 0x3d0(%rsp), %rax movq (%rax), %rax movq 0x3c8(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x1c4(%rsp) movq 0x1f8(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x170(%rsp), %rdx movq %rdx, 0x228(%rsp) movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x58(%rsp) movb $0x0, 0x21b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x21c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x170(%rsp), %r10 movq %r10, 0x340(%rsp) movl %r9d, 0x33c(%rsp) movl %r8d, 0x338(%rsp) movl %edi, 0x334(%rsp) movq %rsi, 0x328(%rsp) movq %rdx, 0x320(%rsp) movl %ecx, 0x31c(%rsp) movq %rax, 0x310(%rsp) movq 0x340(%rsp), %rcx movq %rcx, 0x50(%rsp) movq 0x328(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x320(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x31c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x310(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x33c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x338(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x334(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x350(%rsp) movl $0x10, 0x34c(%rsp) movq 0x350(%rsp), %rax movslq 0x34c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x34c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x58(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x198(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1626c5b movq 0x58(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x1b0(%rsp) movb $0x1, 0x21b(%rsp) testb $0x1, 0x21b(%rsp) jne 0x1626d8a leaq 0x170(%rsp), %rax movq %rax, 0x238(%rsp) movq 0x238(%rsp), %rax movq %rax, 0x3a0(%rsp) movq 0x3a0(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x1626d30 movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x39c(%rsp) # imm = 0xFFFFFFFF movl 0x39c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x398(%rsp) cmpl $0x1, 0x398(%rsp) jne 0x1626d30 movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1626d01 movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1626cff jmp 0x1626d2e movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x3a8(%rsp) cmpq $0x0, 0x3a8(%rsp) je 0x1626d2c movq 0x3a8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1626d2e jmp 0x1626d30 movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1626d88 movq %rax, %rdi callq 0x5fc90 jmp 0x1626d8a leaq 0x170(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rax movq (%rax), %rax movq %rax, 0x38(%rsp) leaq 0x170(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq %rax, 0x390(%rsp) movq 0x390(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x1626e69 movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x38c(%rsp) # imm = 0xFFFFFFFF movl 0x38c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x388(%rsp) cmpl $0x1, 0x388(%rsp) jne 0x1626e69 movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1626e3a movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1626e38 jmp 0x1626e67 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b0(%rsp) cmpq $0x0, 0x3b0(%rsp) je 0x1626e65 movq 0x3b0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1626e67 jmp 0x1626e69 movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1626ec1 movq %rax, %rdi callq 0x5fc90 movq 0x38(%rsp), %rax movq %rax, 0x1b8(%rsp) movq 0x1f0(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x120(%rsp), %rdx movq %rdx, 0x260(%rsp) movq %rcx, 0x258(%rsp) movl %eax, 0x254(%rsp) movq 0x258(%rsp), %rax movq %rax, 0x30(%rsp) movb $0x0, 0x253(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x254(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x120(%rsp), %r10 movq %r10, 0x308(%rsp) movl %r9d, 0x304(%rsp) movl %r8d, 0x300(%rsp) movl %edi, 0x2fc(%rsp) movq %rsi, 0x2f0(%rsp) movq %rdx, 0x2e8(%rsp) movl %ecx, 0x2e4(%rsp) movq %rax, 0x2d8(%rsp) movq 0x308(%rsp), %rcx movq %rcx, 0x28(%rsp) movq 0x2f0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2e8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2e4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2d8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x304(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x300(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2fc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x360(%rsp) movl $0x10, 0x35c(%rsp) movq 0x360(%rsp), %rax movslq 0x35c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x35c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x30(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x148(%rsp) cmpl $0x4, 0x28(%rax) jne 0x162707e movq 0x30(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x160(%rsp) movb $0x1, 0x253(%rsp) testb $0x1, 0x253(%rsp) jne 0x16271ad leaq 0x120(%rsp), %rax movq %rax, 0x268(%rsp) movq 0x268(%rsp), %rax movq %rax, 0x370(%rsp) movq 0x370(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x1627153 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x36c(%rsp) # imm = 0xFFFFFFFF movl 0x36c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x368(%rsp) cmpl $0x1, 0x368(%rsp) jne 0x1627153 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1627124 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1627122 jmp 0x1627151 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x3c0(%rsp) cmpq $0x0, 0x3c0(%rsp) je 0x162714f movq 0x3c0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1627151 jmp 0x1627153 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16271ab movq %rax, %rdi callq 0x5fc90 jmp 0x16271ad leaq 0x120(%rsp), %rax movq %rax, 0x270(%rsp) movq 0x270(%rsp), %rax movq (%rax), %rax movq %rax, 0x10(%rsp) leaq 0x120(%rsp), %rax movq %rax, 0x248(%rsp) movq 0x248(%rsp), %rax movq %rax, 0x380(%rsp) movq 0x380(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x162728c movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x37c(%rsp) # imm = 0xFFFFFFFF movl 0x37c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x378(%rsp) cmpl $0x1, 0x378(%rsp) jne 0x162728c movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x162725d movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x162725b jmp 0x162728a movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b8(%rsp) cmpq $0x0, 0x3b8(%rsp) je 0x1627288 movq 0x3b8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x162728a jmp 0x162728c movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16272e4 movq %rax, %rdi callq 0x5fc90 movq 0x10(%rsp), %rax movq %rax, 0x168(%rsp) movl $0x0, 0x11c(%rsp) vmovss 0x1c4(%rsp), %xmm0 vmovss %xmm0, 0x3dc(%rsp) vmovss 0x3dc(%rsp), %xmm0 vmovss %xmm0, 0xc(%rsp) vmovss %xmm0, 0x44c(%rsp) vmovss %xmm0, 0x448(%rsp) vmovss %xmm0, 0x444(%rsp) vmovss %xmm0, 0x440(%rsp) vmovss %xmm0, 0x43c(%rsp) vmovss %xmm0, 0x438(%rsp) vmovss %xmm0, 0x434(%rsp) vmovss %xmm0, 0x430(%rsp) vmovss 0x444(%rsp), %xmm1 vmovss 0x440(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x448(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x44c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0x434(%rsp), %xmm2 vmovss 0x430(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x438(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x43c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm0 vmovaps %ymm0, 0xe0(%rsp) movl 0x11c(%rsp), %eax addl $0x7, %eax cmpl 0x1cc(%rsp), %eax jge 0x16274cd movq 0x1b8(%rsp), %rax movq %rax, 0x278(%rsp) movq 0x278(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0xc0(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0xe0(%rsp), %rsi leaq 0xc0(%rsp), %rdx callq 0x162d130 vmovaps %ymm0, 0xa0(%rsp) movq 0x168(%rsp), %rax vmovaps 0xa0(%rsp), %ymm0 movq %rax, 0x2a0(%rsp) vmovaps %ymm0, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm0 movq 0x2a0(%rsp), %rax vmovups %ymm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x20, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x20, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x8, %eax movl %eax, 0x11c(%rsp) jmp 0x16273f6 vmovss 0x1c4(%rsp), %xmm0 vmovss %xmm0, 0x3fc(%rsp) vbroadcastss 0x3fc(%rsp), %xmm0 vmovaps %xmm0, 0x3e0(%rsp) vmovaps 0x3e0(%rsp), %xmm0 vmovaps %xmm0, 0x90(%rsp) movl 0x11c(%rsp), %eax addl $0x3, %eax cmpl 0x1cc(%rsp), %eax jge 0x16275d8 movq 0x1b8(%rsp), %rax movq %rax, 0x2a8(%rsp) movq 0x2a8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x80(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0x90(%rsp), %rsi leaq 0x80(%rsp), %rdx vzeroupper callq 0x162d190 vmovaps %xmm0, 0x70(%rsp) movq 0x168(%rsp), %rax vmovaps 0x70(%rsp), %xmm0 movq %rax, 0x2c8(%rsp) vmovaps %xmm0, 0x2b0(%rsp) vmovaps 0x2b0(%rsp), %xmm0 movq 0x2c8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x10, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x10, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x4, %eax movl %eax, 0x11c(%rsp) jmp 0x1627504 jmp 0x16275da movl 0x11c(%rsp), %eax cmpl 0x1cc(%rsp), %eax jge 0x1627651 movq 0x1b8(%rsp), %rdx leaq 0x1e7(%rsp), %rdi leaq 0x1c4(%rsp), %rsi vzeroupper callq 0x162d1d0 movq 0x168(%rsp), %rax vmovss %xmm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x4, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x4, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x1, %eax movl %eax, 0x11c(%rsp) jmp 0x16275da jmp 0x1627653 movl 0x1c8(%rsp), %eax addl $0x1, %eax movl %eax, 0x1c8(%rsp) jmp 0x1626a5a movl $0x0, 0x20c(%rsp) movl 0x20c(%rsp), %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_avx.cpp
int ncnn::binary_op_2_3_4_20<ncnn::BinaryOp_x86_avx_functor::binary_op_min>(ncnn::Mat const&, ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static int binary_op_2_3_4_20(const Mat& a, const Mat& b, Mat& c, const Option& opt) { Op op; int w = b.w; int h = b.h; int d = b.d; int channels = b.c; int elempack = b.elempack; int size = w * h * d * elempack; // type 2 3 4 20 c.create_like(b, opt.blob_allocator); if (c.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float a0 = a[0]; const float* ptr = b.channel(q); float* outptr = c.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ __m512 _a0_avx512 = _mm512_set1_ps(a0); for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); __m512 _outp = op.func_pack16(_a0_avx512, _p); _mm512_storeu_ps(outptr, _outp); ptr += 16; outptr += 16; } #endif // __AVX512F__ __m256 _a0_avx = _mm256_set1_ps(a0); for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); __m256 _outp = op.func_pack8(_a0_avx, _p); _mm256_storeu_ps(outptr, _outp); ptr += 8; outptr += 8; } #endif // __AVX__ __m128 _a0 = _mm_set1_ps(a0); for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); __m128 _outp = op.func_pack4(_a0, _p); _mm_store_ps(outptr, _outp); ptr += 4; outptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *outptr = op.func(a0, *ptr); ptr += 1; outptr += 1; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x460, %rsp # imm = 0x460 movq %rdi, 0x200(%rsp) movq %rsi, 0x1f8(%rsp) movq %rdx, 0x1f0(%rsp) movq %rcx, 0x1e8(%rsp) movq 0x1f8(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1e0(%rsp) movq 0x1f8(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x1dc(%rsp) movq 0x1f8(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x1d8(%rsp) movq 0x1f8(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x1d4(%rsp) movq 0x1f8(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x1d0(%rsp) movl 0x1e0(%rsp), %eax imull 0x1dc(%rsp), %eax imull 0x1d8(%rsp), %eax imull 0x1d0(%rsp), %eax movl %eax, 0x1cc(%rsp) movq 0x1f0(%rsp), %rdi movq 0x1f8(%rsp), %rsi movq 0x1e8(%rsp), %rax movq 0x8(%rax), %rdx callq 0x65b60 movq 0x1f0(%rsp), %rax movq %rax, 0x210(%rsp) movq 0x210(%rsp), %rcx movq %rcx, 0x60(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x6f(%rsp) je 0x1629605 movq 0x60(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x6f(%rsp) movb 0x6f(%rsp), %al testb $0x1, %al jne 0x162960f jmp 0x162961f movl $0xffffff9c, 0x20c(%rsp) # imm = 0xFFFFFF9C jmp 0x162a244 movl $0x0, 0x1c8(%rsp) movl 0x1c8(%rsp), %eax cmpl 0x1d4(%rsp), %eax jge 0x162a239 movq 0x200(%rsp), %rax movq %rax, 0x3d0(%rsp) movq $0x0, 0x3c8(%rsp) movq 0x3d0(%rsp), %rax movq (%rax), %rax movq 0x3c8(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x1c4(%rsp) movq 0x1f8(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x170(%rsp), %rdx movq %rdx, 0x228(%rsp) movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x58(%rsp) movb $0x0, 0x21b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x21c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x170(%rsp), %r10 movq %r10, 0x340(%rsp) movl %r9d, 0x33c(%rsp) movl %r8d, 0x338(%rsp) movl %edi, 0x334(%rsp) movq %rsi, 0x328(%rsp) movq %rdx, 0x320(%rsp) movl %ecx, 0x31c(%rsp) movq %rax, 0x310(%rsp) movq 0x340(%rsp), %rcx movq %rcx, 0x50(%rsp) movq 0x328(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x320(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x31c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x310(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x33c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x338(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x334(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x350(%rsp) movl $0x10, 0x34c(%rsp) movq 0x350(%rsp), %rax movslq 0x34c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x34c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x58(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x198(%rsp) cmpl $0x4, 0x28(%rax) jne 0x162982b movq 0x58(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x1b0(%rsp) movb $0x1, 0x21b(%rsp) testb $0x1, 0x21b(%rsp) jne 0x162995a leaq 0x170(%rsp), %rax movq %rax, 0x238(%rsp) movq 0x238(%rsp), %rax movq %rax, 0x3a0(%rsp) movq 0x3a0(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x1629900 movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x39c(%rsp) # imm = 0xFFFFFFFF movl 0x39c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x398(%rsp) cmpl $0x1, 0x398(%rsp) jne 0x1629900 movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16298d1 movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x16298cf jmp 0x16298fe movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x3a8(%rsp) cmpq $0x0, 0x3a8(%rsp) je 0x16298fc movq 0x3a8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x16298fe jmp 0x1629900 movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1629958 movq %rax, %rdi callq 0x5fc90 jmp 0x162995a leaq 0x170(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rax movq (%rax), %rax movq %rax, 0x38(%rsp) leaq 0x170(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq %rax, 0x390(%rsp) movq 0x390(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x1629a39 movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x38c(%rsp) # imm = 0xFFFFFFFF movl 0x38c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x388(%rsp) cmpl $0x1, 0x388(%rsp) jne 0x1629a39 movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1629a0a movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1629a08 jmp 0x1629a37 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b0(%rsp) cmpq $0x0, 0x3b0(%rsp) je 0x1629a35 movq 0x3b0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1629a37 jmp 0x1629a39 movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1629a91 movq %rax, %rdi callq 0x5fc90 movq 0x38(%rsp), %rax movq %rax, 0x1b8(%rsp) movq 0x1f0(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x120(%rsp), %rdx movq %rdx, 0x260(%rsp) movq %rcx, 0x258(%rsp) movl %eax, 0x254(%rsp) movq 0x258(%rsp), %rax movq %rax, 0x30(%rsp) movb $0x0, 0x253(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x254(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x120(%rsp), %r10 movq %r10, 0x308(%rsp) movl %r9d, 0x304(%rsp) movl %r8d, 0x300(%rsp) movl %edi, 0x2fc(%rsp) movq %rsi, 0x2f0(%rsp) movq %rdx, 0x2e8(%rsp) movl %ecx, 0x2e4(%rsp) movq %rax, 0x2d8(%rsp) movq 0x308(%rsp), %rcx movq %rcx, 0x28(%rsp) movq 0x2f0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2e8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2e4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2d8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x304(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x300(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2fc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x360(%rsp) movl $0x10, 0x35c(%rsp) movq 0x360(%rsp), %rax movslq 0x35c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x35c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x30(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x148(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1629c4e movq 0x30(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x160(%rsp) movb $0x1, 0x253(%rsp) testb $0x1, 0x253(%rsp) jne 0x1629d7d leaq 0x120(%rsp), %rax movq %rax, 0x268(%rsp) movq 0x268(%rsp), %rax movq %rax, 0x370(%rsp) movq 0x370(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x1629d23 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x36c(%rsp) # imm = 0xFFFFFFFF movl 0x36c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x368(%rsp) cmpl $0x1, 0x368(%rsp) jne 0x1629d23 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1629cf4 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1629cf2 jmp 0x1629d21 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x3c0(%rsp) cmpq $0x0, 0x3c0(%rsp) je 0x1629d1f movq 0x3c0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1629d21 jmp 0x1629d23 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1629d7b movq %rax, %rdi callq 0x5fc90 jmp 0x1629d7d leaq 0x120(%rsp), %rax movq %rax, 0x270(%rsp) movq 0x270(%rsp), %rax movq (%rax), %rax movq %rax, 0x10(%rsp) leaq 0x120(%rsp), %rax movq %rax, 0x248(%rsp) movq 0x248(%rsp), %rax movq %rax, 0x380(%rsp) movq 0x380(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x1629e5c movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x37c(%rsp) # imm = 0xFFFFFFFF movl 0x37c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x378(%rsp) cmpl $0x1, 0x378(%rsp) jne 0x1629e5c movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1629e2d movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1629e2b jmp 0x1629e5a movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b8(%rsp) cmpq $0x0, 0x3b8(%rsp) je 0x1629e58 movq 0x3b8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1629e5a jmp 0x1629e5c movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1629eb4 movq %rax, %rdi callq 0x5fc90 movq 0x10(%rsp), %rax movq %rax, 0x168(%rsp) movl $0x0, 0x11c(%rsp) vmovss 0x1c4(%rsp), %xmm0 vmovss %xmm0, 0x3dc(%rsp) vmovss 0x3dc(%rsp), %xmm0 vmovss %xmm0, 0xc(%rsp) vmovss %xmm0, 0x44c(%rsp) vmovss %xmm0, 0x448(%rsp) vmovss %xmm0, 0x444(%rsp) vmovss %xmm0, 0x440(%rsp) vmovss %xmm0, 0x43c(%rsp) vmovss %xmm0, 0x438(%rsp) vmovss %xmm0, 0x434(%rsp) vmovss %xmm0, 0x430(%rsp) vmovss 0x444(%rsp), %xmm1 vmovss 0x440(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x448(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x44c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0x434(%rsp), %xmm2 vmovss 0x430(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x438(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x43c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm0 vmovaps %ymm0, 0xe0(%rsp) movl 0x11c(%rsp), %eax addl $0x7, %eax cmpl 0x1cc(%rsp), %eax jge 0x162a09d movq 0x1b8(%rsp), %rax movq %rax, 0x278(%rsp) movq 0x278(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0xc0(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0xe0(%rsp), %rsi leaq 0xc0(%rsp), %rdx callq 0x162d200 vmovaps %ymm0, 0xa0(%rsp) movq 0x168(%rsp), %rax vmovaps 0xa0(%rsp), %ymm0 movq %rax, 0x2a0(%rsp) vmovaps %ymm0, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm0 movq 0x2a0(%rsp), %rax vmovups %ymm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x20, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x20, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x8, %eax movl %eax, 0x11c(%rsp) jmp 0x1629fc6 vmovss 0x1c4(%rsp), %xmm0 vmovss %xmm0, 0x3fc(%rsp) vbroadcastss 0x3fc(%rsp), %xmm0 vmovaps %xmm0, 0x3e0(%rsp) vmovaps 0x3e0(%rsp), %xmm0 vmovaps %xmm0, 0x90(%rsp) movl 0x11c(%rsp), %eax addl $0x3, %eax cmpl 0x1cc(%rsp), %eax jge 0x162a1a8 movq 0x1b8(%rsp), %rax movq %rax, 0x2a8(%rsp) movq 0x2a8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x80(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0x90(%rsp), %rsi leaq 0x80(%rsp), %rdx vzeroupper callq 0x162d260 vmovaps %xmm0, 0x70(%rsp) movq 0x168(%rsp), %rax vmovaps 0x70(%rsp), %xmm0 movq %rax, 0x2c8(%rsp) vmovaps %xmm0, 0x2b0(%rsp) vmovaps 0x2b0(%rsp), %xmm0 movq 0x2c8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x10, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x10, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x4, %eax movl %eax, 0x11c(%rsp) jmp 0x162a0d4 jmp 0x162a1aa movl 0x11c(%rsp), %eax cmpl 0x1cc(%rsp), %eax jge 0x162a221 movq 0x1b8(%rsp), %rdx leaq 0x1e7(%rsp), %rdi leaq 0x1c4(%rsp), %rsi vzeroupper callq 0x162d2a0 movq 0x168(%rsp), %rax vmovss %xmm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x4, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x4, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x1, %eax movl %eax, 0x11c(%rsp) jmp 0x162a1aa jmp 0x162a223 movl 0x1c8(%rsp), %eax addl $0x1, %eax movl %eax, 0x1c8(%rsp) jmp 0x162962a movl $0x0, 0x20c(%rsp) movl 0x20c(%rsp), %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_avx.cpp
int ncnn::binary_op_2_3_4_20<ncnn::BinaryOp_x86_avx_functor::binary_op_pow>(ncnn::Mat const&, ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static int binary_op_2_3_4_20(const Mat& a, const Mat& b, Mat& c, const Option& opt) { Op op; int w = b.w; int h = b.h; int d = b.d; int channels = b.c; int elempack = b.elempack; int size = w * h * d * elempack; // type 2 3 4 20 c.create_like(b, opt.blob_allocator); if (c.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float a0 = a[0]; const float* ptr = b.channel(q); float* outptr = c.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ __m512 _a0_avx512 = _mm512_set1_ps(a0); for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); __m512 _outp = op.func_pack16(_a0_avx512, _p); _mm512_storeu_ps(outptr, _outp); ptr += 16; outptr += 16; } #endif // __AVX512F__ __m256 _a0_avx = _mm256_set1_ps(a0); for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); __m256 _outp = op.func_pack8(_a0_avx, _p); _mm256_storeu_ps(outptr, _outp); ptr += 8; outptr += 8; } #endif // __AVX__ __m128 _a0 = _mm_set1_ps(a0); for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); __m128 _outp = op.func_pack4(_a0, _p); _mm_store_ps(outptr, _outp); ptr += 4; outptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *outptr = op.func(a0, *ptr); ptr += 1; outptr += 1; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x460, %rsp # imm = 0x460 movq %rdi, 0x200(%rsp) movq %rsi, 0x1f8(%rsp) movq %rdx, 0x1f0(%rsp) movq %rcx, 0x1e8(%rsp) movq 0x1f8(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1e0(%rsp) movq 0x1f8(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x1dc(%rsp) movq 0x1f8(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x1d8(%rsp) movq 0x1f8(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x1d4(%rsp) movq 0x1f8(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x1d0(%rsp) movl 0x1e0(%rsp), %eax imull 0x1dc(%rsp), %eax imull 0x1d8(%rsp), %eax imull 0x1d0(%rsp), %eax movl %eax, 0x1cc(%rsp) movq 0x1f0(%rsp), %rdi movq 0x1f8(%rsp), %rsi movq 0x1e8(%rsp), %rax movq 0x8(%rax), %rdx callq 0x65b60 movq 0x1f0(%rsp), %rax movq %rax, 0x210(%rsp) movq 0x210(%rsp), %rcx movq %rcx, 0x60(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x6f(%rsp) je 0x162c1d5 movq 0x60(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x6f(%rsp) movb 0x6f(%rsp), %al testb $0x1, %al jne 0x162c1df jmp 0x162c1ef movl $0xffffff9c, 0x20c(%rsp) # imm = 0xFFFFFF9C jmp 0x162ce14 movl $0x0, 0x1c8(%rsp) movl 0x1c8(%rsp), %eax cmpl 0x1d4(%rsp), %eax jge 0x162ce09 movq 0x200(%rsp), %rax movq %rax, 0x3d0(%rsp) movq $0x0, 0x3c8(%rsp) movq 0x3d0(%rsp), %rax movq (%rax), %rax movq 0x3c8(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x1c4(%rsp) movq 0x1f8(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x170(%rsp), %rdx movq %rdx, 0x228(%rsp) movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x58(%rsp) movb $0x0, 0x21b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x21c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x170(%rsp), %r10 movq %r10, 0x340(%rsp) movl %r9d, 0x33c(%rsp) movl %r8d, 0x338(%rsp) movl %edi, 0x334(%rsp) movq %rsi, 0x328(%rsp) movq %rdx, 0x320(%rsp) movl %ecx, 0x31c(%rsp) movq %rax, 0x310(%rsp) movq 0x340(%rsp), %rcx movq %rcx, 0x50(%rsp) movq 0x328(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x320(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x31c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x310(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x33c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x338(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x334(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x350(%rsp) movl $0x10, 0x34c(%rsp) movq 0x350(%rsp), %rax movslq 0x34c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x34c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x58(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x198(%rsp) cmpl $0x4, 0x28(%rax) jne 0x162c3fb movq 0x58(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x1b0(%rsp) movb $0x1, 0x21b(%rsp) testb $0x1, 0x21b(%rsp) jne 0x162c52a leaq 0x170(%rsp), %rax movq %rax, 0x238(%rsp) movq 0x238(%rsp), %rax movq %rax, 0x3a0(%rsp) movq 0x3a0(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x162c4d0 movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x39c(%rsp) # imm = 0xFFFFFFFF movl 0x39c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x398(%rsp) cmpl $0x1, 0x398(%rsp) jne 0x162c4d0 movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x162c4a1 movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x162c49f jmp 0x162c4ce movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x3a8(%rsp) cmpq $0x0, 0x3a8(%rsp) je 0x162c4cc movq 0x3a8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x162c4ce jmp 0x162c4d0 movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x162c528 movq %rax, %rdi callq 0x5fc90 jmp 0x162c52a leaq 0x170(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rax movq (%rax), %rax movq %rax, 0x38(%rsp) leaq 0x170(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq %rax, 0x390(%rsp) movq 0x390(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x162c609 movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x38c(%rsp) # imm = 0xFFFFFFFF movl 0x38c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x388(%rsp) cmpl $0x1, 0x388(%rsp) jne 0x162c609 movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x162c5da movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x162c5d8 jmp 0x162c607 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b0(%rsp) cmpq $0x0, 0x3b0(%rsp) je 0x162c605 movq 0x3b0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x162c607 jmp 0x162c609 movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x162c661 movq %rax, %rdi callq 0x5fc90 movq 0x38(%rsp), %rax movq %rax, 0x1b8(%rsp) movq 0x1f0(%rsp), %rcx movl 0x1c8(%rsp), %eax leaq 0x120(%rsp), %rdx movq %rdx, 0x260(%rsp) movq %rcx, 0x258(%rsp) movl %eax, 0x254(%rsp) movq 0x258(%rsp), %rax movq %rax, 0x30(%rsp) movb $0x0, 0x253(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x254(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x120(%rsp), %r10 movq %r10, 0x308(%rsp) movl %r9d, 0x304(%rsp) movl %r8d, 0x300(%rsp) movl %edi, 0x2fc(%rsp) movq %rsi, 0x2f0(%rsp) movq %rdx, 0x2e8(%rsp) movl %ecx, 0x2e4(%rsp) movq %rax, 0x2d8(%rsp) movq 0x308(%rsp), %rcx movq %rcx, 0x28(%rsp) movq 0x2f0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2e8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2e4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2d8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x304(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x300(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2fc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x360(%rsp) movl $0x10, 0x35c(%rsp) movq 0x360(%rsp), %rax movslq 0x35c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x35c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x30(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x148(%rsp) cmpl $0x4, 0x28(%rax) jne 0x162c81e movq 0x30(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x160(%rsp) movb $0x1, 0x253(%rsp) testb $0x1, 0x253(%rsp) jne 0x162c94d leaq 0x120(%rsp), %rax movq %rax, 0x268(%rsp) movq 0x268(%rsp), %rax movq %rax, 0x370(%rsp) movq 0x370(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x162c8f3 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x36c(%rsp) # imm = 0xFFFFFFFF movl 0x36c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x368(%rsp) cmpl $0x1, 0x368(%rsp) jne 0x162c8f3 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x162c8c4 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x162c8c2 jmp 0x162c8f1 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x3c0(%rsp) cmpq $0x0, 0x3c0(%rsp) je 0x162c8ef movq 0x3c0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x162c8f1 jmp 0x162c8f3 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x162c94b movq %rax, %rdi callq 0x5fc90 jmp 0x162c94d leaq 0x120(%rsp), %rax movq %rax, 0x270(%rsp) movq 0x270(%rsp), %rax movq (%rax), %rax movq %rax, 0x10(%rsp) leaq 0x120(%rsp), %rax movq %rax, 0x248(%rsp) movq 0x248(%rsp), %rax movq %rax, 0x380(%rsp) movq 0x380(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x162ca2c movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x37c(%rsp) # imm = 0xFFFFFFFF movl 0x37c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x378(%rsp) cmpl $0x1, 0x378(%rsp) jne 0x162ca2c movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x162c9fd movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x162c9fb jmp 0x162ca2a movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x3b8(%rsp) cmpq $0x0, 0x3b8(%rsp) je 0x162ca28 movq 0x3b8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x162ca2a jmp 0x162ca2c movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x162ca84 movq %rax, %rdi callq 0x5fc90 movq 0x10(%rsp), %rax movq %rax, 0x168(%rsp) movl $0x0, 0x11c(%rsp) vmovss 0x1c4(%rsp), %xmm0 vmovss %xmm0, 0x3dc(%rsp) vmovss 0x3dc(%rsp), %xmm0 vmovss %xmm0, 0xc(%rsp) vmovss %xmm0, 0x44c(%rsp) vmovss %xmm0, 0x448(%rsp) vmovss %xmm0, 0x444(%rsp) vmovss %xmm0, 0x440(%rsp) vmovss %xmm0, 0x43c(%rsp) vmovss %xmm0, 0x438(%rsp) vmovss %xmm0, 0x434(%rsp) vmovss %xmm0, 0x430(%rsp) vmovss 0x444(%rsp), %xmm1 vmovss 0x440(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x448(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x44c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0x434(%rsp), %xmm2 vmovss 0x430(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x438(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x43c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm0 vmovaps %ymm0, 0xe0(%rsp) movl 0x11c(%rsp), %eax addl $0x7, %eax cmpl 0x1cc(%rsp), %eax jge 0x162cc6d movq 0x1b8(%rsp), %rax movq %rax, 0x278(%rsp) movq 0x278(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0xc0(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0xe0(%rsp), %rsi leaq 0xc0(%rsp), %rdx callq 0x162d2d0 vmovaps %ymm0, 0xa0(%rsp) movq 0x168(%rsp), %rax vmovaps 0xa0(%rsp), %ymm0 movq %rax, 0x2a0(%rsp) vmovaps %ymm0, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm0 movq 0x2a0(%rsp), %rax vmovups %ymm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x20, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x20, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x8, %eax movl %eax, 0x11c(%rsp) jmp 0x162cb96 vmovss 0x1c4(%rsp), %xmm0 vmovss %xmm0, 0x3fc(%rsp) vbroadcastss 0x3fc(%rsp), %xmm0 vmovaps %xmm0, 0x3e0(%rsp) vmovaps 0x3e0(%rsp), %xmm0 vmovaps %xmm0, 0x90(%rsp) movl 0x11c(%rsp), %eax addl $0x3, %eax cmpl 0x1cc(%rsp), %eax jge 0x162cd78 movq 0x1b8(%rsp), %rax movq %rax, 0x2a8(%rsp) movq 0x2a8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x80(%rsp) leaq 0x1e7(%rsp), %rdi leaq 0x90(%rsp), %rsi leaq 0x80(%rsp), %rdx vzeroupper callq 0x162ec10 vmovaps %xmm0, 0x70(%rsp) movq 0x168(%rsp), %rax vmovaps 0x70(%rsp), %xmm0 movq %rax, 0x2c8(%rsp) vmovaps %xmm0, 0x2b0(%rsp) vmovaps 0x2b0(%rsp), %xmm0 movq 0x2c8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x10, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x10, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x4, %eax movl %eax, 0x11c(%rsp) jmp 0x162cca4 jmp 0x162cd7a movl 0x11c(%rsp), %eax cmpl 0x1cc(%rsp), %eax jge 0x162cdf1 movq 0x1b8(%rsp), %rdx leaq 0x1e7(%rsp), %rdi leaq 0x1c4(%rsp), %rsi vzeroupper callq 0x16300f0 movq 0x168(%rsp), %rax vmovss %xmm0, (%rax) movq 0x1b8(%rsp), %rax addq $0x4, %rax movq %rax, 0x1b8(%rsp) movq 0x168(%rsp), %rax addq $0x4, %rax movq %rax, 0x168(%rsp) movl 0x11c(%rsp), %eax addl $0x1, %eax movl %eax, 0x11c(%rsp) jmp 0x162cd7a jmp 0x162cdf3 movl 0x1c8(%rsp), %eax addl $0x1, %eax movl %eax, 0x1c8(%rsp) jmp 0x162c1fa movl $0x0, 0x20c(%rsp) movl 0x20c(%rsp), %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax) nopl (%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_avx.cpp
ncnn::BinaryOp_x86_avx_functor::binary_op_pow::func_pack4(float vector[4] const&, float vector[4] const&) const
__m128 func_pack4(const __m128& x, const __m128& y) const { return pow_ps(x, y); }
subq $0xd88, %rsp # imm = 0xD88 movq %rdi, -0x68(%rsp) movq %rsi, -0x70(%rsp) movq %rdx, -0x78(%rsp) movq -0x70(%rsp), %rax vmovaps (%rax), %xmm1 movq -0x78(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm1, -0x50(%rsp) vmovaps %xmm0, -0x60(%rsp) vmovaps -0x60(%rsp), %xmm7 vmovaps -0x50(%rsp), %xmm0 vmovaps %xmm0, 0x1e0(%rsp) vmovaps 0x7d3e4f(%rip), %xmm2 # 0x1e02ab0 vmovaps %xmm2, 0x1c0(%rsp) vmovaps 0x1e0(%rsp), %xmm1 vxorps %xmm5, %xmm5, %xmm5 vmovaps %xmm5, 0x310(%rsp) vmovaps 0x310(%rsp), %xmm0 vmovaps %xmm1, 0xcc0(%rsp) vmovaps %xmm0, 0xcb0(%rsp) vmovaps 0xcc0(%rsp), %xmm0 vmovaps 0xcb0(%rsp), %xmm1 vcmpleps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1b0(%rsp) vmovaps 0x1e0(%rsp), %xmm0 vmovaps %xmm0, 0x300(%rsp) vmovaps 0x7d3eab(%rip), %xmm0 # 0x1e02b80 vmovaps %xmm0, 0x2f0(%rsp) vmovaps 0x300(%rsp), %xmm0 vmovaps 0x2f0(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x1e0(%rsp) vmovaps 0x1e0(%rsp), %xmm0 vmovaps %xmm0, 0xcf0(%rsp) vmovaps 0xcf0(%rsp), %xmm0 vmovaps %xmm0, 0xce0(%rsp) movl $0x17, 0xcdc(%rsp) vmovdqa 0xce0(%rsp), %xmm0 vmovd 0xcdc(%rsp), %xmm1 vpsrld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x1d0(%rsp) vmovaps 0x1e0(%rsp), %xmm0 vmovaps %xmm0, 0x3e0(%rsp) vmovaps 0x7d3e2b(%rip), %xmm0 # 0x1e02b90 vmovaps %xmm0, 0x3d0(%rsp) vmovdqa 0x3e0(%rsp), %xmm0 vmovdqa 0x3d0(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x1e0(%rsp) vmovaps 0x1e0(%rsp), %xmm0 vmovaps %xmm0, 0xd30(%rsp) vmovaps 0x7d3d49(%rip), %xmm1 # 0x1e02af0 vmovaps %xmm1, 0xd20(%rsp) vmovdqa 0xd30(%rsp), %xmm0 vmovdqa 0xd20(%rsp), %xmm3 vpor %xmm3, %xmm0, %xmm0 vmovdqa %xmm0, 0x1e0(%rsp) vmovdqa 0x1d0(%rsp), %xmm0 vmovdqa %xmm0, 0xd50(%rsp) vmovdqa 0x7d3d87(%rip), %xmm0 # 0x1e02b70 vmovdqa %xmm0, 0xd40(%rsp) vmovdqa 0xd50(%rsp), %xmm3 vmovdqa 0xd40(%rsp), %xmm4 vpsubd %xmm4, %xmm3, %xmm3 vmovdqa %xmm3, 0x1d0(%rsp) vmovdqa 0x1d0(%rsp), %xmm3 vmovdqa %xmm3, 0x350(%rsp) vcvtdq2ps 0x350(%rsp), %xmm3 vmovaps %xmm3, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %xmm4 vmovaps 0x1c0(%rsp), %xmm3 vmovaps %xmm4, 0x240(%rsp) vmovaps %xmm3, 0x230(%rsp) vmovaps 0x240(%rsp), %xmm3 vmovaps 0x230(%rsp), %xmm4 vaddps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x1a0(%rsp) vmovaps 0x1e0(%rsp), %xmm3 vmovaps %xmm3, 0xd70(%rsp) vmovaps 0x7d3d0e(%rip), %xmm3 # 0x1e02ba0 vmovaps %xmm3, 0xd60(%rsp) vmovaps 0xd70(%rsp), %xmm3 vmovaps 0xd60(%rsp), %xmm4 vcmpltps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x190(%rsp) vmovaps 0x1e0(%rsp), %xmm4 vmovaps 0x190(%rsp), %xmm3 vmovaps %xmm4, 0x3c0(%rsp) vmovaps %xmm3, 0x3b0(%rsp) vmovdqa 0x3c0(%rsp), %xmm3 vmovdqa 0x3b0(%rsp), %xmm4 vpand %xmm4, %xmm3, %xmm3 vmovdqa %xmm3, 0x180(%rsp) vmovaps 0x1e0(%rsp), %xmm4 vmovaps 0x1c0(%rsp), %xmm3 vmovaps %xmm4, 0x280(%rsp) vmovaps %xmm3, 0x270(%rsp) vmovaps 0x280(%rsp), %xmm3 vmovaps 0x270(%rsp), %xmm4 vsubps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x1e0(%rsp) vmovaps 0x1a0(%rsp), %xmm4 vmovaps 0x1c0(%rsp), %xmm6 vmovaps 0x190(%rsp), %xmm3 vmovaps %xmm6, 0x3a0(%rsp) vmovaps %xmm3, 0x390(%rsp) vmovdqa 0x3a0(%rsp), %xmm3 vmovdqa 0x390(%rsp), %xmm6 vpand %xmm6, %xmm3, %xmm3 vmovaps %xmm4, 0x260(%rsp) vmovdqa %xmm3, 0x250(%rsp) vmovaps 0x260(%rsp), %xmm3 vmovaps 0x250(%rsp), %xmm4 vsubps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x1a0(%rsp) vmovaps 0x1e0(%rsp), %xmm4 vmovaps 0x180(%rsp), %xmm3 vmovaps %xmm4, 0x220(%rsp) vmovaps %xmm3, 0x210(%rsp) vmovaps 0x220(%rsp), %xmm3 vmovaps 0x210(%rsp), %xmm4 vaddps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x1e0(%rsp) vmovaps 0x1e0(%rsp), %xmm3 vmovaps %xmm3, 0x2e0(%rsp) vmovaps %xmm3, 0x2d0(%rsp) vmovaps 0x2e0(%rsp), %xmm3 vmovaps 0x2d0(%rsp), %xmm4 vmulps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x170(%rsp) vmovaps 0x7d3b76(%rip), %xmm3 # 0x1e02bb0 vmovaps %xmm3, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm4 vmovaps 0x1e0(%rsp), %xmm3 vmovaps %xmm4, 0x970(%rsp) vmovaps %xmm3, 0x960(%rsp) vmovaps 0x7d3b51(%rip), %xmm3 # 0x1e02bc0 vmovaps %xmm3, 0x950(%rsp) vmovaps 0x970(%rsp), %xmm4 vmovaps 0x960(%rsp), %xmm3 vmovaps %xmm4, 0x9b0(%rsp) vmovaps %xmm3, 0x9a0(%rsp) vmovaps 0x9b0(%rsp), %xmm3 vmovaps 0x9a0(%rsp), %xmm4 vmulps %xmm4, %xmm3, %xmm4 vmovaps 0x950(%rsp), %xmm3 vmovaps %xmm4, 0x990(%rsp) vmovaps %xmm3, 0x980(%rsp) vmovaps 0x990(%rsp), %xmm3 vmovaps 0x980(%rsp), %xmm4 vaddps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm4 vmovaps 0x1e0(%rsp), %xmm3 vmovaps %xmm4, 0x900(%rsp) vmovaps %xmm3, 0x8f0(%rsp) vmovaps 0x7d3ab8(%rip), %xmm3 # 0x1e02bd0 vmovaps %xmm3, 0x8e0(%rsp) vmovaps 0x900(%rsp), %xmm4 vmovaps 0x8f0(%rsp), %xmm3 vmovaps %xmm4, 0x940(%rsp) vmovaps %xmm3, 0x930(%rsp) vmovaps 0x940(%rsp), %xmm3 vmovaps 0x930(%rsp), %xmm4 vmulps %xmm4, %xmm3, %xmm4 vmovaps 0x8e0(%rsp), %xmm3 vmovaps %xmm4, 0x920(%rsp) vmovaps %xmm3, 0x910(%rsp) vmovaps 0x920(%rsp), %xmm3 vmovaps 0x910(%rsp), %xmm4 vaddps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm4 vmovaps 0x1e0(%rsp), %xmm3 vmovaps %xmm4, 0x890(%rsp) vmovaps %xmm3, 0x880(%rsp) vmovaps 0x7d3a1f(%rip), %xmm3 # 0x1e02be0 vmovaps %xmm3, 0x870(%rsp) vmovaps 0x890(%rsp), %xmm4 vmovaps 0x880(%rsp), %xmm3 vmovaps %xmm4, 0x8d0(%rsp) vmovaps %xmm3, 0x8c0(%rsp) vmovaps 0x8d0(%rsp), %xmm3 vmovaps 0x8c0(%rsp), %xmm4 vmulps %xmm4, %xmm3, %xmm4 vmovaps 0x870(%rsp), %xmm3 vmovaps %xmm4, 0x8b0(%rsp) vmovaps %xmm3, 0x8a0(%rsp) vmovaps 0x8b0(%rsp), %xmm3 vmovaps 0x8a0(%rsp), %xmm4 vaddps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm4 vmovaps 0x1e0(%rsp), %xmm3 vmovaps %xmm4, 0x820(%rsp) vmovaps %xmm3, 0x810(%rsp) vmovaps 0x7d3986(%rip), %xmm3 # 0x1e02bf0 vmovaps %xmm3, 0x800(%rsp) vmovaps 0x820(%rsp), %xmm4 vmovaps 0x810(%rsp), %xmm3 vmovaps %xmm4, 0x860(%rsp) vmovaps %xmm3, 0x850(%rsp) vmovaps 0x860(%rsp), %xmm3 vmovaps 0x850(%rsp), %xmm4 vmulps %xmm4, %xmm3, %xmm4 vmovaps 0x800(%rsp), %xmm3 vmovaps %xmm4, 0x840(%rsp) vmovaps %xmm3, 0x830(%rsp) vmovaps 0x840(%rsp), %xmm3 vmovaps 0x830(%rsp), %xmm4 vaddps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm4 vmovaps 0x1e0(%rsp), %xmm3 vmovaps %xmm4, 0x7b0(%rsp) vmovaps %xmm3, 0x7a0(%rsp) vmovaps 0x7d38ed(%rip), %xmm3 # 0x1e02c00 vmovaps %xmm3, 0x790(%rsp) vmovaps 0x7b0(%rsp), %xmm4 vmovaps 0x7a0(%rsp), %xmm3 vmovaps %xmm4, 0x7f0(%rsp) vmovaps %xmm3, 0x7e0(%rsp) vmovaps 0x7f0(%rsp), %xmm3 vmovaps 0x7e0(%rsp), %xmm4 vmulps %xmm4, %xmm3, %xmm4 vmovaps 0x790(%rsp), %xmm3 vmovaps %xmm4, 0x7d0(%rsp) vmovaps %xmm3, 0x7c0(%rsp) vmovaps 0x7d0(%rsp), %xmm3 vmovaps 0x7c0(%rsp), %xmm4 vaddps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm4 vmovaps 0x1e0(%rsp), %xmm3 vmovaps %xmm4, 0x740(%rsp) vmovaps %xmm3, 0x730(%rsp) vmovaps 0x7d3854(%rip), %xmm3 # 0x1e02c10 vmovaps %xmm3, 0x720(%rsp) vmovaps 0x740(%rsp), %xmm4 vmovaps 0x730(%rsp), %xmm3 vmovaps %xmm4, 0x780(%rsp) vmovaps %xmm3, 0x770(%rsp) vmovaps 0x780(%rsp), %xmm3 vmovaps 0x770(%rsp), %xmm4 vmulps %xmm4, %xmm3, %xmm4 vmovaps 0x720(%rsp), %xmm3 vmovaps %xmm4, 0x760(%rsp) vmovaps %xmm3, 0x750(%rsp) vmovaps 0x760(%rsp), %xmm3 vmovaps 0x750(%rsp), %xmm4 vaddps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm4 vmovaps 0x1e0(%rsp), %xmm3 vmovaps %xmm4, 0x6d0(%rsp) vmovaps %xmm3, 0x6c0(%rsp) vmovaps 0x7d37bb(%rip), %xmm3 # 0x1e02c20 vmovaps %xmm3, 0x6b0(%rsp) vmovaps 0x6d0(%rsp), %xmm4 vmovaps 0x6c0(%rsp), %xmm3 vmovaps %xmm4, 0x710(%rsp) vmovaps %xmm3, 0x700(%rsp) vmovaps 0x710(%rsp), %xmm3 vmovaps 0x700(%rsp), %xmm4 vmulps %xmm4, %xmm3, %xmm4 vmovaps 0x6b0(%rsp), %xmm3 vmovaps %xmm4, 0x6f0(%rsp) vmovaps %xmm3, 0x6e0(%rsp) vmovaps 0x6f0(%rsp), %xmm3 vmovaps 0x6e0(%rsp), %xmm4 vaddps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm4 vmovaps 0x1e0(%rsp), %xmm3 vmovaps %xmm4, 0x660(%rsp) vmovaps %xmm3, 0x650(%rsp) vmovaps 0x7d3722(%rip), %xmm3 # 0x1e02c30 vmovaps %xmm3, 0x640(%rsp) vmovaps 0x660(%rsp), %xmm4 vmovaps 0x650(%rsp), %xmm3 vmovaps %xmm4, 0x6a0(%rsp) vmovaps %xmm3, 0x690(%rsp) vmovaps 0x6a0(%rsp), %xmm3 vmovaps 0x690(%rsp), %xmm4 vmulps %xmm4, %xmm3, %xmm4 vmovaps 0x640(%rsp), %xmm3 vmovaps %xmm4, 0x680(%rsp) vmovaps %xmm3, 0x670(%rsp) vmovaps 0x680(%rsp), %xmm3 vmovaps 0x670(%rsp), %xmm4 vaddps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm4 vmovaps 0x1e0(%rsp), %xmm3 vmovaps %xmm4, 0x2c0(%rsp) vmovaps %xmm3, 0x2b0(%rsp) vmovaps 0x2c0(%rsp), %xmm3 vmovaps 0x2b0(%rsp), %xmm4 vmulps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm4 vmovaps 0x170(%rsp), %xmm3 vmovaps %xmm4, 0x2a0(%rsp) vmovaps %xmm3, 0x290(%rsp) vmovaps 0x2a0(%rsp), %xmm3 vmovaps 0x290(%rsp), %xmm4 vmulps %xmm4, %xmm3, %xmm3 vmovaps %xmm3, 0x160(%rsp) vmovaps 0x1a0(%rsp), %xmm3 vmovaps 0x160(%rsp), %xmm4 vmovaps %xmm3, 0x5f0(%rsp) vmovaps 0x7d34dc(%rip), %xmm3 # 0x1e02b10 vmovaps %xmm3, 0x5e0(%rsp) vmovaps %xmm4, 0x5d0(%rsp) vmovaps 0x5f0(%rsp), %xmm6 vmovaps 0x5e0(%rsp), %xmm4 vmovaps %xmm6, 0x630(%rsp) vmovaps %xmm4, 0x620(%rsp) vmovaps 0x630(%rsp), %xmm4 vmovaps 0x620(%rsp), %xmm6 vmulps %xmm6, %xmm4, %xmm6 vmovaps 0x5d0(%rsp), %xmm4 vmovaps %xmm6, 0x610(%rsp) vmovaps %xmm4, 0x600(%rsp) vmovaps 0x610(%rsp), %xmm4 vmovaps 0x600(%rsp), %xmm6 vaddps %xmm6, %xmm4, %xmm4 vmovaps %xmm4, 0x160(%rsp) vmovaps 0x170(%rsp), %xmm6 vmovaps 0x160(%rsp), %xmm4 vmovaps %xmm6, 0x430(%rsp) vmovaps %xmm1, 0x420(%rsp) vmovaps %xmm4, 0x410(%rsp) vmovaps 0x410(%rsp), %xmm6 vmovaps 0x430(%rsp), %xmm8 vmovaps 0x420(%rsp), %xmm4 vmovaps %xmm8, 0x470(%rsp) vmovaps %xmm4, 0x460(%rsp) vmovaps 0x470(%rsp), %xmm4 vmovaps 0x460(%rsp), %xmm8 vmulps %xmm4, %xmm8, %xmm4 vmovaps %xmm6, 0x450(%rsp) vmovaps %xmm4, 0x440(%rsp) vmovaps 0x450(%rsp), %xmm4 vmovaps 0x440(%rsp), %xmm6 vsubps %xmm6, %xmm4, %xmm4 vmovaps %xmm4, 0x160(%rsp) vmovaps 0x1e0(%rsp), %xmm6 vmovaps 0x160(%rsp), %xmm4 vmovaps %xmm6, 0x200(%rsp) vmovaps %xmm4, 0x1f0(%rsp) vmovaps 0x200(%rsp), %xmm4 vmovaps 0x1f0(%rsp), %xmm6 vaddps %xmm6, %xmm4, %xmm4 vmovaps %xmm4, 0x1e0(%rsp) vmovaps 0x1a0(%rsp), %xmm4 vmovaps 0x1e0(%rsp), %xmm6 vmovaps %xmm4, 0x580(%rsp) vmovaps 0x7d333f(%rip), %xmm4 # 0x1e02b00 vmovaps %xmm4, 0x570(%rsp) vmovaps %xmm6, 0x560(%rsp) vmovaps 0x580(%rsp), %xmm8 vmovaps 0x570(%rsp), %xmm6 vmovaps %xmm8, 0x5c0(%rsp) vmovaps %xmm6, 0x5b0(%rsp) vmovaps 0x5c0(%rsp), %xmm6 vmovaps 0x5b0(%rsp), %xmm8 vmulps %xmm6, %xmm8, %xmm8 vmovaps 0x560(%rsp), %xmm6 vmovaps %xmm8, 0x5a0(%rsp) vmovaps %xmm6, 0x590(%rsp) vmovaps 0x5a0(%rsp), %xmm6 vmovaps 0x590(%rsp), %xmm8 vaddps %xmm6, %xmm8, %xmm6 vmovaps %xmm6, 0x1e0(%rsp) vmovaps 0x1e0(%rsp), %xmm8 vmovaps 0x1b0(%rsp), %xmm6 vmovaps %xmm8, 0xd10(%rsp) vmovaps %xmm6, 0xd00(%rsp) vmovdqa 0xd10(%rsp), %xmm6 vmovdqa 0xd00(%rsp), %xmm8 vpor %xmm6, %xmm8, %xmm6 vmovdqa %xmm6, 0x1e0(%rsp) vmovaps 0x1e0(%rsp), %xmm6 vmovaps %xmm7, -0x30(%rsp) vmovaps %xmm6, -0x40(%rsp) vmovaps -0x30(%rsp), %xmm6 vmovaps -0x40(%rsp), %xmm7 vmulps %xmm7, %xmm6, %xmm6 vmovaps %xmm6, 0x50(%rsp) vmovaps %xmm5, 0x320(%rsp) vmovaps 0x320(%rsp), %xmm5 vmovaps %xmm5, 0x40(%rsp) vmovaps %xmm2, 0x10(%rsp) vmovaps 0x50(%rsp), %xmm2 vmovaps %xmm2, 0x150(%rsp) vmovaps 0x7d31d6(%rip), %xmm2 # 0x1e02ac0 vmovaps %xmm2, 0x140(%rsp) vmovaps 0x150(%rsp), %xmm2 vmovaps 0x140(%rsp), %xmm5 vminps %xmm5, %xmm2, %xmm2 vmovaps %xmm2, 0x50(%rsp) vmovaps 0x50(%rsp), %xmm2 vmovaps %xmm2, 0x130(%rsp) vmovaps 0x7d31aa(%rip), %xmm2 # 0x1e02ad0 vmovaps %xmm2, 0x120(%rsp) vmovaps 0x130(%rsp), %xmm2 vmovaps 0x120(%rsp), %xmm5 vmaxps %xmm5, %xmm2, %xmm2 vmovaps %xmm2, 0x50(%rsp) vmovaps 0x50(%rsp), %xmm2 vmovaps %xmm2, 0x110(%rsp) vmovaps 0x7d317e(%rip), %xmm2 # 0x1e02ae0 vmovaps %xmm2, 0x100(%rsp) vmovaps 0x110(%rsp), %xmm2 vmovaps 0x100(%rsp), %xmm5 vmulps %xmm5, %xmm2, %xmm2 vmovaps %xmm2, 0x30(%rsp) vmovaps 0x30(%rsp), %xmm2 vmovaps %xmm2, 0x90(%rsp) vmovaps %xmm1, 0x80(%rsp) vmovaps 0x90(%rsp), %xmm2 vmovaps 0x80(%rsp), %xmm5 vaddps %xmm5, %xmm2, %xmm2 vmovaps %xmm2, 0x30(%rsp) vmovaps 0x30(%rsp), %xmm2 vmovaps %xmm2, 0x340(%rsp) vcvttps2dq 0x340(%rsp), %xmm2 vmovdqa %xmm2, 0x20(%rsp) vmovdqa 0x20(%rsp), %xmm2 vmovdqa %xmm2, 0x360(%rsp) vcvtdq2ps 0x360(%rsp), %xmm2 vmovaps %xmm2, 0x40(%rsp) vmovaps 0x40(%rsp), %xmm5 vmovaps 0x30(%rsp), %xmm2 vmovaps %xmm5, 0x380(%rsp) vmovaps %xmm2, 0x370(%rsp) vmovaps 0x370(%rsp), %xmm2 vmovaps 0x380(%rsp), %xmm5 vcmpltps %xmm5, %xmm2, %xmm2 vmovaps %xmm2, (%rsp) vmovaps (%rsp), %xmm5 vmovaps 0x10(%rsp), %xmm2 vmovaps %xmm5, 0x400(%rsp) vmovaps %xmm2, 0x3f0(%rsp) vmovdqa 0x400(%rsp), %xmm2 vmovdqa 0x3f0(%rsp), %xmm5 vpand %xmm5, %xmm2, %xmm2 vmovdqa %xmm2, (%rsp) vmovaps 0x40(%rsp), %xmm5 vmovaps (%rsp), %xmm2 vmovaps %xmm5, 0xb0(%rsp) vmovaps %xmm2, 0xa0(%rsp) vmovaps 0xb0(%rsp), %xmm2 vmovaps 0xa0(%rsp), %xmm5 vsubps %xmm5, %xmm2, %xmm2 vmovaps %xmm2, 0x30(%rsp) vmovaps 0x30(%rsp), %xmm5 vmovaps 0x50(%rsp), %xmm2 vmovaps %xmm5, 0x510(%rsp) vmovaps %xmm4, 0x500(%rsp) vmovaps %xmm2, 0x4f0(%rsp) vmovaps 0x4f0(%rsp), %xmm4 vmovaps 0x510(%rsp), %xmm5 vmovaps 0x500(%rsp), %xmm2 vmovaps %xmm5, 0x550(%rsp) vmovaps %xmm2, 0x540(%rsp) vmovaps 0x550(%rsp), %xmm2 vmovaps 0x540(%rsp), %xmm5 vmulps %xmm5, %xmm2, %xmm2 vmovaps %xmm4, 0x530(%rsp) vmovaps %xmm2, 0x520(%rsp) vmovaps 0x530(%rsp), %xmm2 vmovaps 0x520(%rsp), %xmm4 vsubps %xmm4, %xmm2, %xmm2 vmovaps %xmm2, 0x50(%rsp) vmovaps 0x30(%rsp), %xmm4 vmovaps 0x50(%rsp), %xmm2 vmovaps %xmm4, 0x4a0(%rsp) vmovaps %xmm3, 0x490(%rsp) vmovaps %xmm2, 0x480(%rsp) vmovaps 0x480(%rsp), %xmm3 vmovaps 0x4a0(%rsp), %xmm4 vmovaps 0x490(%rsp), %xmm2 vmovaps %xmm4, 0x4e0(%rsp) vmovaps %xmm2, 0x4d0(%rsp) vmovaps 0x4e0(%rsp), %xmm2 vmovaps 0x4d0(%rsp), %xmm4 vmulps %xmm4, %xmm2, %xmm2 vmovaps %xmm3, 0x4c0(%rsp) vmovaps %xmm2, 0x4b0(%rsp) vmovaps 0x4c0(%rsp), %xmm2 vmovaps 0x4b0(%rsp), %xmm3 vsubps %xmm3, %xmm2, %xmm2 vmovaps %xmm2, 0x50(%rsp) vmovaps 0x50(%rsp), %xmm2 vmovaps %xmm2, 0xf0(%rsp) vmovaps %xmm2, 0xe0(%rsp) vmovaps 0xf0(%rsp), %xmm2 vmovaps 0xe0(%rsp), %xmm3 vmulps %xmm3, %xmm2, %xmm2 vmovaps %xmm2, 0x40(%rsp) vmovaps 0x7d2f12(%rip), %xmm2 # 0x1e02b20 vmovaps %xmm2, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm3 vmovaps 0x50(%rsp), %xmm2 vmovaps %xmm3, 0xc10(%rsp) vmovaps %xmm2, 0xc00(%rsp) vmovaps 0x7d2ef6(%rip), %xmm2 # 0x1e02b30 vmovaps %xmm2, 0xbf0(%rsp) vmovaps 0xc10(%rsp), %xmm3 vmovaps 0xc00(%rsp), %xmm2 vmovaps %xmm3, 0xc50(%rsp) vmovaps %xmm2, 0xc40(%rsp) vmovaps 0xc50(%rsp), %xmm2 vmovaps 0xc40(%rsp), %xmm3 vmulps %xmm3, %xmm2, %xmm3 vmovaps 0xbf0(%rsp), %xmm2 vmovaps %xmm3, 0xc30(%rsp) vmovaps %xmm2, 0xc20(%rsp) vmovaps 0xc30(%rsp), %xmm2 vmovaps 0xc20(%rsp), %xmm3 vaddps %xmm3, %xmm2, %xmm2 vmovaps %xmm2, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm3 vmovaps 0x50(%rsp), %xmm2 vmovaps %xmm3, 0xba0(%rsp) vmovaps %xmm2, 0xb90(%rsp) vmovaps 0x7d2e66(%rip), %xmm2 # 0x1e02b40 vmovaps %xmm2, 0xb80(%rsp) vmovaps 0xba0(%rsp), %xmm3 vmovaps 0xb90(%rsp), %xmm2 vmovaps %xmm3, 0xbe0(%rsp) vmovaps %xmm2, 0xbd0(%rsp) vmovaps 0xbe0(%rsp), %xmm2 vmovaps 0xbd0(%rsp), %xmm3 vmulps %xmm3, %xmm2, %xmm3 vmovaps 0xb80(%rsp), %xmm2 vmovaps %xmm3, 0xbc0(%rsp) vmovaps %xmm2, 0xbb0(%rsp) vmovaps 0xbc0(%rsp), %xmm2 vmovaps 0xbb0(%rsp), %xmm3 vaddps %xmm3, %xmm2, %xmm2 vmovaps %xmm2, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm3 vmovaps 0x50(%rsp), %xmm2 vmovaps %xmm3, 0xb30(%rsp) vmovaps %xmm2, 0xb20(%rsp) vmovaps 0x7d2dd6(%rip), %xmm2 # 0x1e02b50 vmovaps %xmm2, 0xb10(%rsp) vmovaps 0xb30(%rsp), %xmm3 vmovaps 0xb20(%rsp), %xmm2 vmovaps %xmm3, 0xb70(%rsp) vmovaps %xmm2, 0xb60(%rsp) vmovaps 0xb70(%rsp), %xmm2 vmovaps 0xb60(%rsp), %xmm3 vmulps %xmm3, %xmm2, %xmm3 vmovaps 0xb10(%rsp), %xmm2 vmovaps %xmm3, 0xb50(%rsp) vmovaps %xmm2, 0xb40(%rsp) vmovaps 0xb50(%rsp), %xmm2 vmovaps 0xb40(%rsp), %xmm3 vaddps %xmm3, %xmm2, %xmm2 vmovaps %xmm2, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm3 vmovaps 0x50(%rsp), %xmm2 vmovaps %xmm3, 0xac0(%rsp) vmovaps %xmm2, 0xab0(%rsp) vmovaps 0x7d2d46(%rip), %xmm2 # 0x1e02b60 vmovaps %xmm2, 0xaa0(%rsp) vmovaps 0xac0(%rsp), %xmm3 vmovaps 0xab0(%rsp), %xmm2 vmovaps %xmm3, 0xb00(%rsp) vmovaps %xmm2, 0xaf0(%rsp) vmovaps 0xb00(%rsp), %xmm2 vmovaps 0xaf0(%rsp), %xmm3 vmulps %xmm3, %xmm2, %xmm3 vmovaps 0xaa0(%rsp), %xmm2 vmovaps %xmm3, 0xae0(%rsp) vmovaps %xmm2, 0xad0(%rsp) vmovaps 0xae0(%rsp), %xmm2 vmovaps 0xad0(%rsp), %xmm3 vaddps %xmm3, %xmm2, %xmm2 vmovaps %xmm2, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm3 vmovaps 0x50(%rsp), %xmm2 vmovaps %xmm3, 0xa50(%rsp) vmovaps %xmm2, 0xa40(%rsp) vmovaps %xmm1, 0xa30(%rsp) vmovaps 0xa50(%rsp), %xmm2 vmovaps 0xa40(%rsp), %xmm1 vmovaps %xmm2, 0xa90(%rsp) vmovaps %xmm1, 0xa80(%rsp) vmovaps 0xa90(%rsp), %xmm1 vmovaps 0xa80(%rsp), %xmm2 vmulps %xmm2, %xmm1, %xmm2 vmovaps 0xa30(%rsp), %xmm1 vmovaps %xmm2, 0xa70(%rsp) vmovaps %xmm1, 0xa60(%rsp) vmovaps 0xa70(%rsp), %xmm1 vmovaps 0xa60(%rsp), %xmm2 vaddps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm3 vmovaps 0x40(%rsp), %xmm2 vmovaps 0x50(%rsp), %xmm1 vmovaps %xmm3, 0x9e0(%rsp) vmovaps %xmm2, 0x9d0(%rsp) vmovaps %xmm1, 0x9c0(%rsp) vmovaps 0x9e0(%rsp), %xmm2 vmovaps 0x9d0(%rsp), %xmm1 vmovaps %xmm2, 0xa20(%rsp) vmovaps %xmm1, 0xa10(%rsp) vmovaps 0xa20(%rsp), %xmm1 vmovaps 0xa10(%rsp), %xmm2 vmulps %xmm2, %xmm1, %xmm2 vmovaps 0x9c0(%rsp), %xmm1 vmovaps %xmm2, 0xa00(%rsp) vmovaps %xmm1, 0x9f0(%rsp) vmovaps 0xa00(%rsp), %xmm1 vmovaps 0x9f0(%rsp), %xmm2 vaddps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm2 vmovaps 0x10(%rsp), %xmm1 vmovaps %xmm2, 0x70(%rsp) vmovaps %xmm1, 0x60(%rsp) vmovaps 0x70(%rsp), %xmm1 vmovaps 0x60(%rsp), %xmm2 vaddps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, -0x10(%rsp) vmovaps 0x30(%rsp), %xmm1 vmovaps %xmm1, 0x330(%rsp) vcvttps2dq 0x330(%rsp), %xmm1 vmovdqa %xmm1, 0x20(%rsp) vmovdqa 0x20(%rsp), %xmm1 vmovdqa %xmm1, 0xc70(%rsp) vmovdqa %xmm0, 0xc60(%rsp) vmovdqa 0xc70(%rsp), %xmm0 vmovdqa 0xc60(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x20(%rsp) vmovdqa 0x20(%rsp), %xmm0 vmovdqa %xmm0, 0xc90(%rsp) movl $0x17, 0xc8c(%rsp) vmovdqa 0xc90(%rsp), %xmm0 movl 0xc8c(%rsp), %eax vmovd %eax, %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x20(%rsp) vmovdqa 0x20(%rsp), %xmm0 vmovdqa %xmm0, 0xca0(%rsp) vmovdqa 0xca0(%rsp), %xmm0 vmovaps %xmm0, -0x20(%rsp) vmovaps -0x10(%rsp), %xmm1 vmovaps -0x20(%rsp), %xmm0 vmovaps %xmm1, 0xd0(%rsp) vmovaps %xmm0, 0xc0(%rsp) vmovaps 0xd0(%rsp), %xmm0 vmulps 0xc0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm0 addq $0xd88, %rsp # imm = 0xD88 retq nopw %cs:(%rax,%rax) nop
/ysh329[P]ncnn/build_O0/src/layer/x86/binaryop_x86_avx.cpp
int ncnn::unary_op_inplace<ncnn::UnaryOp_x86_functor::unary_op_acos>(ncnn::Mat&, ncnn::Option const&)
static int unary_op_inplace(Mat& a, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { float* ptr = a.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); _p = op.func_pack16(_p); _mm512_storeu_ps(ptr, _p); ptr += 16; } #endif // __AVX512F__ for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); _p = op.func_pack8(_p); _mm256_storeu_ps(ptr, _p); ptr += 8; } #endif // __AVX__ for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); _p = op.func_pack4(_p); _mm_store_ps(ptr, _p); ptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *ptr = op.func(*ptr); ptr++; } } return 0; }
subq $0x198, %rsp # imm = 0x198 movq %rdi, 0xc0(%rsp) movq %rsi, 0xb8(%rsp) movq 0xc0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0xb0(%rsp) movq 0xc0(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0xac(%rsp) movq 0xc0(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0xa8(%rsp) movq 0xc0(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0xa4(%rsp) movq 0xc0(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0xa0(%rsp) movl 0xb0(%rsp), %eax imull 0xac(%rsp), %eax imull 0xa8(%rsp), %eax imull 0xa0(%rsp), %eax movl %eax, 0x9c(%rsp) movl $0x0, 0x98(%rsp) movl 0x98(%rsp), %eax cmpl 0xa4(%rsp), %eax jge 0x1636b5b movq 0xc0(%rsp), %rcx movl 0x98(%rsp), %eax leaq 0x48(%rsp), %rdx movq %rdx, 0xd8(%rsp) movq %rcx, 0xd0(%rsp) movl %eax, 0xcc(%rsp) movq 0xd0(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0xcb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xcc(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x48(%rsp), %r10 movq %r10, 0x150(%rsp) movl %r9d, 0x14c(%rsp) movl %r8d, 0x148(%rsp) movl %edi, 0x144(%rsp) movq %rsi, 0x138(%rsp) movq %rdx, 0x130(%rsp) movl %ecx, 0x12c(%rsp) movq %rax, 0x120(%rsp) movq 0x150(%rsp), %rcx movq %rcx, 0x20(%rsp) movq 0x138(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x130(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x12c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x120(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x14c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x148(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x144(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x160(%rsp) movl $0x10, 0x15c(%rsp) movq 0x160(%rsp), %rax movslq 0x15c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x15c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x28(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x70(%rsp) cmpl $0x4, 0x28(%rax) jne 0x16367ed movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x88(%rsp) movb $0x1, 0xcb(%rsp) testb $0x1, 0xcb(%rsp) jne 0x1636913 leaq 0x48(%rsp), %rax movq %rax, 0xe8(%rsp) movq 0xe8(%rsp), %rax movq %rax, 0x180(%rsp) movq 0x180(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x16368b9 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x17c(%rsp) # imm = 0xFFFFFFFF movl 0x17c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x178(%rsp) cmpl $0x1, 0x178(%rsp) jne 0x16368b9 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x163688d movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x163688b jmp 0x16368b7 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x188(%rsp) cmpq $0x0, 0x188(%rsp) je 0x16368b5 movq 0x188(%rsp), %rdi callq 0x5e480 jmp 0x16368b7 jmp 0x16368b9 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1636911 movq %rax, %rdi callq 0x5fc90 jmp 0x1636913 leaq 0x48(%rsp), %rax movq %rax, 0xe0(%rsp) movq 0xe0(%rsp), %rax movq (%rax), %rax movq %rax, 0x8(%rsp) leaq 0x48(%rsp), %rax movq %rax, 0xf0(%rsp) movq 0xf0(%rsp), %rax movq %rax, 0x170(%rsp) movq 0x170(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x16369e6 movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x16c(%rsp) # imm = 0xFFFFFFFF movl 0x16c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x168(%rsp) cmpl $0x1, 0x168(%rsp) jne 0x16369e6 movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16369ba movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16369b8 jmp 0x16369e4 movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x190(%rsp) cmpq $0x0, 0x190(%rsp) je 0x16369e2 movq 0x190(%rsp), %rdi callq 0x5e480 jmp 0x16369e4 jmp 0x16369e6 movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1636a3e movq %rax, %rdi callq 0x5fc90 movq 0x8(%rsp), %rax movq %rax, 0x90(%rsp) movl $0x0, 0x44(%rsp) movl 0x44(%rsp), %eax addl $0x3, %eax cmpl 0x9c(%rsp), %eax jge 0x1636af2 movq 0x90(%rsp), %rax movq %rax, 0xf8(%rsp) movq 0xf8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x30(%rsp) leaq 0xb7(%rsp), %rdi leaq 0x30(%rsp), %rsi callq 0x163b920 movaps %xmm0, 0x30(%rsp) movq 0x90(%rsp), %rax movaps 0x30(%rsp), %xmm0 movq %rax, 0x118(%rsp) movaps %xmm0, 0x100(%rsp) movaps 0x100(%rsp), %xmm0 movq 0x118(%rsp), %rax movaps %xmm0, (%rax) movq 0x90(%rsp), %rax addq $0x10, %rax movq %rax, 0x90(%rsp) movl 0x44(%rsp), %eax addl $0x4, %eax movl %eax, 0x44(%rsp) jmp 0x1636a53 jmp 0x1636af4 movl 0x44(%rsp), %eax cmpl 0x9c(%rsp), %eax jge 0x1636b43 movq 0x90(%rsp), %rsi leaq 0xb7(%rsp), %rdi callq 0x163b9b0 movq 0x90(%rsp), %rax movss %xmm0, (%rax) movq 0x90(%rsp), %rax addq $0x4, %rax movq %rax, 0x90(%rsp) movl 0x44(%rsp), %eax addl $0x1, %eax movl %eax, 0x44(%rsp) jmp 0x1636af4 jmp 0x1636b45 movl 0x98(%rsp), %eax addl $0x1, %eax movl %eax, 0x98(%rsp) jmp 0x1636632 xorl %eax, %eax addq $0x198, %rsp # imm = 0x198 retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/unaryop_x86.cpp
ncnn::UnaryOp_x86_fma_functor::unary_op_floor::func_pack4(float vector[4] const&) const
__m128 func_pack4(const __m128& x) const { #if __SSE4_1__ return _mm_floor_ps(x); #endif // __SSE4_1__ // Use negative zero as the sign bit mask. const __m128 magic_negative_zero = _mm_set_ps1(-0.0f); // The smallest float number that have no fractional part. (2^23) const __m128 magic_smallest_no_fraction = _mm_set_ps1(8388608.0f); // absolute = abs(x); __m128 absolute = _mm_andnot_ps(magic_negative_zero, x); // negative_mask = magic_negative_zero && x; __m128 negative_mask = _mm_and_ps(magic_negative_zero, x); // no_fraction = (magic_smallest_no_fraction < absolute); __m128 no_fraction = _mm_cmplt_ps(magic_smallest_no_fraction, absolute); // truncated = static_cast<float>(static_cast<uint32_t>(absolute)); __m128 truncated = _mm_cvtepi32_ps(_mm_cvttps_epi32(absolute)); // truncated_with_sign = (truncated || negative_mask); __m128 truncated_with_sign = _mm_or_ps(truncated, negative_mask); // negative_fix = ((x < truncated_with_sign) ? 1.0f : 0.0f); __m128 negative_fix = _mm_and_ps( _mm_cmplt_ps(x, truncated_with_sign), _mm_set_ps1(1.0f)); // fixed_result = truncated_with_sign - negative_fix; __m128 fixed_result = _mm_sub_ps(truncated_with_sign, negative_fix); // return ((x && no_fraction) || (!no_fraction && fixed_result)); return _mm_or_ps( _mm_and_ps(x, no_fraction), _mm_andnot_ps(no_fraction, fixed_result)); }
subq $0x28, %rsp movq %rdi, 0x20(%rsp) movq %rsi, 0x18(%rsp) movq 0x18(%rsp), %rax vmovaps (%rax), %xmm0 vroundps $0x1, %xmm0, %xmm0 addq $0x28, %rsp retq nopw %cs:(%rax,%rax) nopl (%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_fma.cpp
ncnn::UnaryOp_x86_fma_functor::unary_op_ceil::func_pack4(float vector[4] const&) const
__m128 func_pack4(const __m128& x) const { #if __SSE4_1__ return _mm_ceil_ps(x); #endif // __SSE4_1__ // Use negative zero as the sign bit mask. const __m128 magic_negative_zero = _mm_set_ps1(-0.0f); // The smallest float number that have no fractional part. (2^23) const __m128 magic_smallest_no_fraction = _mm_set_ps1(8388608.0f); // absolute = abs(x); __m128 absolute = _mm_andnot_ps(magic_negative_zero, x); // negative_mask = magic_negative_zero && x; __m128 negative_mask = _mm_and_ps(magic_negative_zero, x); // no_fraction = (magic_smallest_no_fraction < absolute); __m128 no_fraction = _mm_cmplt_ps(magic_smallest_no_fraction, absolute); // truncated = static_cast<float>(static_cast<uint32_t>(absolute)); __m128 truncated = _mm_cvtepi32_ps(_mm_cvttps_epi32(absolute)); // truncated_with_sign = (truncated || negative_mask); __m128 truncated_with_sign = _mm_or_ps(truncated, negative_mask); // positive_fix = ((x > -0.0f) && (x > truncated_with_sign) ? -1.0f : 0.0f); __m128 positive_fix = _mm_and_ps( _mm_and_ps( _mm_cmpgt_ps(x, magic_negative_zero), _mm_cmpgt_ps(x, truncated_with_sign)), _mm_set_ps1(-1.0f)); // fixed_result = truncated_with_sign - positive_fix; __m128 fixed_result = _mm_sub_ps(truncated_with_sign, positive_fix); // return ((x && no_fraction) || (!no_fraction && fixed_result)); return _mm_or_ps( _mm_and_ps(x, no_fraction), _mm_andnot_ps(no_fraction, fixed_result)); }
subq $0x28, %rsp movq %rdi, 0x20(%rsp) movq %rsi, 0x18(%rsp) movq 0x18(%rsp), %rax vmovaps (%rax), %xmm0 vroundps $0x2, %xmm0, %xmm0 addq $0x28, %rsp retq nopw %cs:(%rax,%rax) nopl (%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_fma.cpp
ncnn::UnaryOp_x86_fma_functor::unary_op_exp::func_pack4(float vector[4] const&) const
__m128 func_pack4(const __m128& x) const { return exp_ps(x); }
subq $0x4e8, %rsp # imm = 0x4E8 movq %rdi, -0x78(%rsp) movq %rsi, -0x80(%rsp) movq -0x80(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, (%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x30(%rsp) vmovaps 0x30(%rsp), %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps 0x7a8df3(%rip), %xmm0 # 0x1e02ab0 vmovaps %xmm0, -0x40(%rsp) vmovaps (%rsp), %xmm0 vmovaps %xmm0, 0xf0(%rsp) vmovaps 0x7a8de7(%rip), %xmm0 # 0x1e02ac0 vmovaps %xmm0, 0xe0(%rsp) vmovaps 0xf0(%rsp), %xmm0 vmovaps 0xe0(%rsp), %xmm1 vminps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, (%rsp) vmovaps (%rsp), %xmm0 vmovaps %xmm0, 0x110(%rsp) vmovaps 0x7a8dbd(%rip), %xmm0 # 0x1e02ad0 vmovaps %xmm0, 0x100(%rsp) vmovaps 0x110(%rsp), %xmm0 vmovaps 0x100(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, (%rsp) vmovaps (%rsp), %xmm0 vmovaps %xmm0, 0x90(%rsp) vmovaps 0x7a8d93(%rip), %xmm0 # 0x1e02ae0 vmovaps %xmm0, 0x80(%rsp) vmovaps 0x90(%rsp), %xmm0 vmovaps 0x80(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x20(%rsp) vmovaps -0x20(%rsp), %xmm0 vmovaps %xmm0, 0x150(%rsp) vmovaps 0x7a8d67(%rip), %xmm0 # 0x1e02af0 vmovaps %xmm0, 0x140(%rsp) vmovaps 0x150(%rsp), %xmm1 vmovaps 0x140(%rsp), %xmm2 vaddps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, -0x20(%rsp) vmovaps -0x20(%rsp), %xmm1 vmovaps %xmm1, 0x170(%rsp) vcvttps2dq 0x170(%rsp), %xmm1 vmovdqa %xmm1, -0x30(%rsp) vmovdqa -0x30(%rsp), %xmm1 vmovdqa %xmm1, 0x180(%rsp) vcvtdq2ps 0x180(%rsp), %xmm1 vmovaps %xmm1, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm2 vmovaps -0x20(%rsp), %xmm1 vmovaps %xmm2, 0x1a0(%rsp) vmovaps %xmm1, 0x190(%rsp) vmovaps 0x190(%rsp), %xmm1 vmovaps 0x1a0(%rsp), %xmm2 vcmpltps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, -0x50(%rsp) vmovaps -0x50(%rsp), %xmm2 vmovaps -0x40(%rsp), %xmm1 vmovaps %xmm2, 0x1c0(%rsp) vmovaps %xmm1, 0x1b0(%rsp) vmovdqa 0x1c0(%rsp), %xmm1 vmovdqa 0x1b0(%rsp), %xmm2 vpand %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, -0x50(%rsp) vmovaps -0x10(%rsp), %xmm2 vmovaps -0x50(%rsp), %xmm1 vmovaps %xmm2, 0x20(%rsp) vmovaps %xmm1, 0x10(%rsp) vmovaps 0x20(%rsp), %xmm1 vmovaps 0x10(%rsp), %xmm2 vsubps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, -0x20(%rsp) vmovaps -0x20(%rsp), %xmm2 vmovaps (%rsp), %xmm1 vmovaps %xmm2, 0x220(%rsp) vmovaps 0x7a8c57(%rip), %xmm2 # 0x1e02b00 vmovaps %xmm2, 0x210(%rsp) vmovaps %xmm1, 0x200(%rsp) vmovaps 0x220(%rsp), %xmm3 vmovaps 0x210(%rsp), %xmm2 vmovaps 0x200(%rsp), %xmm1 vmovaps %xmm3, 0x380(%rsp) vmovaps %xmm2, 0x370(%rsp) vmovaps %xmm1, 0x360(%rsp) vmovaps 0x380(%rsp), %xmm2 vmovaps 0x370(%rsp), %xmm1 vmovaps 0x360(%rsp), %xmm3 vfnmadd213ps %xmm3, %xmm2, %xmm1 # xmm1 = -(xmm2 * xmm1) + xmm3 vmovaps %xmm1, (%rsp) vmovaps -0x20(%rsp), %xmm2 vmovaps (%rsp), %xmm1 vmovaps %xmm2, 0x1f0(%rsp) vmovaps 0x7a8bde(%rip), %xmm2 # 0x1e02b10 vmovaps %xmm2, 0x1e0(%rsp) vmovaps %xmm1, 0x1d0(%rsp) vmovaps 0x1f0(%rsp), %xmm3 vmovaps 0x1e0(%rsp), %xmm2 vmovaps 0x1d0(%rsp), %xmm1 vmovaps %xmm3, 0x3b0(%rsp) vmovaps %xmm2, 0x3a0(%rsp) vmovaps %xmm1, 0x390(%rsp) vmovaps 0x3b0(%rsp), %xmm2 vmovaps 0x3a0(%rsp), %xmm1 vmovaps 0x390(%rsp), %xmm3 vfnmadd213ps %xmm3, %xmm2, %xmm1 # xmm1 = -(xmm2 * xmm1) + xmm3 vmovaps %xmm1, (%rsp) vmovaps (%rsp), %xmm1 vmovaps %xmm1, 0x70(%rsp) vmovaps %xmm1, 0x60(%rsp) vmovaps 0x70(%rsp), %xmm1 vmovaps 0x60(%rsp), %xmm2 vmulps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, -0x10(%rsp) vmovaps 0x7a8b52(%rip), %xmm1 # 0x1e02b20 vmovaps %xmm1, -0x60(%rsp) vmovaps -0x60(%rsp), %xmm2 vmovaps (%rsp), %xmm1 vmovaps %xmm2, 0x340(%rsp) vmovaps %xmm1, 0x330(%rsp) vmovaps 0x7a8b37(%rip), %xmm1 # 0x1e02b30 vmovaps %xmm1, 0x320(%rsp) vmovaps 0x340(%rsp), %xmm3 vmovaps 0x330(%rsp), %xmm2 vmovaps 0x320(%rsp), %xmm1 vmovaps %xmm3, 0x3e0(%rsp) vmovaps %xmm2, 0x3d0(%rsp) vmovaps %xmm1, 0x3c0(%rsp) vmovaps 0x3e0(%rsp), %xmm2 vmovaps 0x3d0(%rsp), %xmm1 vmovaps 0x3c0(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm1 # xmm1 = (xmm2 * xmm1) + xmm3 vmovaps %xmm1, -0x60(%rsp) vmovaps -0x60(%rsp), %xmm2 vmovaps (%rsp), %xmm1 vmovaps %xmm2, 0x310(%rsp) vmovaps %xmm1, 0x300(%rsp) vmovaps 0x7a8abd(%rip), %xmm1 # 0x1e02b40 vmovaps %xmm1, 0x2f0(%rsp) vmovaps 0x310(%rsp), %xmm3 vmovaps 0x300(%rsp), %xmm2 vmovaps 0x2f0(%rsp), %xmm1 vmovaps %xmm3, 0x410(%rsp) vmovaps %xmm2, 0x400(%rsp) vmovaps %xmm1, 0x3f0(%rsp) vmovaps 0x410(%rsp), %xmm2 vmovaps 0x400(%rsp), %xmm1 vmovaps 0x3f0(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm1 # xmm1 = (xmm2 * xmm1) + xmm3 vmovaps %xmm1, -0x60(%rsp) vmovaps -0x60(%rsp), %xmm2 vmovaps (%rsp), %xmm1 vmovaps %xmm2, 0x2e0(%rsp) vmovaps %xmm1, 0x2d0(%rsp) vmovaps 0x7a8a43(%rip), %xmm1 # 0x1e02b50 vmovaps %xmm1, 0x2c0(%rsp) vmovaps 0x2e0(%rsp), %xmm3 vmovaps 0x2d0(%rsp), %xmm2 vmovaps 0x2c0(%rsp), %xmm1 vmovaps %xmm3, 0x440(%rsp) vmovaps %xmm2, 0x430(%rsp) vmovaps %xmm1, 0x420(%rsp) vmovaps 0x440(%rsp), %xmm2 vmovaps 0x430(%rsp), %xmm1 vmovaps 0x420(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm1 # xmm1 = (xmm2 * xmm1) + xmm3 vmovaps %xmm1, -0x60(%rsp) vmovaps -0x60(%rsp), %xmm2 vmovaps (%rsp), %xmm1 vmovaps %xmm2, 0x2b0(%rsp) vmovaps %xmm1, 0x2a0(%rsp) vmovaps 0x7a89c9(%rip), %xmm1 # 0x1e02b60 vmovaps %xmm1, 0x290(%rsp) vmovaps 0x2b0(%rsp), %xmm3 vmovaps 0x2a0(%rsp), %xmm2 vmovaps 0x290(%rsp), %xmm1 vmovaps %xmm3, 0x470(%rsp) vmovaps %xmm2, 0x460(%rsp) vmovaps %xmm1, 0x450(%rsp) vmovaps 0x470(%rsp), %xmm2 vmovaps 0x460(%rsp), %xmm1 vmovaps 0x450(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm1 # xmm1 = (xmm2 * xmm1) + xmm3 vmovaps %xmm1, -0x60(%rsp) vmovaps -0x60(%rsp), %xmm2 vmovaps (%rsp), %xmm1 vmovaps %xmm2, 0x280(%rsp) vmovaps %xmm1, 0x270(%rsp) vmovaps %xmm0, 0x260(%rsp) vmovaps 0x280(%rsp), %xmm2 vmovaps 0x270(%rsp), %xmm1 vmovaps 0x260(%rsp), %xmm0 vmovaps %xmm2, 0x4a0(%rsp) vmovaps %xmm1, 0x490(%rsp) vmovaps %xmm0, 0x480(%rsp) vmovaps 0x4a0(%rsp), %xmm1 vmovaps 0x490(%rsp), %xmm0 vmovaps 0x480(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, -0x60(%rsp) vmovaps -0x60(%rsp), %xmm2 vmovaps -0x10(%rsp), %xmm1 vmovaps (%rsp), %xmm0 vmovaps %xmm2, 0x250(%rsp) vmovaps %xmm1, 0x240(%rsp) vmovaps %xmm0, 0x230(%rsp) vmovaps 0x250(%rsp), %xmm2 vmovaps 0x240(%rsp), %xmm1 vmovaps 0x230(%rsp), %xmm0 vmovaps %xmm2, 0x4d0(%rsp) vmovaps %xmm1, 0x4c0(%rsp) vmovaps %xmm0, 0x4b0(%rsp) vmovaps 0x4d0(%rsp), %xmm1 vmovaps 0x4c0(%rsp), %xmm0 vmovaps 0x4b0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, -0x60(%rsp) vmovaps -0x60(%rsp), %xmm1 vmovaps -0x40(%rsp), %xmm0 vmovaps %xmm1, 0x130(%rsp) vmovaps %xmm0, 0x120(%rsp) vmovaps 0x130(%rsp), %xmm0 vmovaps 0x120(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x60(%rsp) vmovaps -0x20(%rsp), %xmm0 vmovaps %xmm0, 0x160(%rsp) vcvttps2dq 0x160(%rsp), %xmm0 vmovdqa %xmm0, -0x30(%rsp) vmovdqa -0x30(%rsp), %xmm0 vmovdqa %xmm0, 0xb0(%rsp) vmovdqa 0x7a87fb(%rip), %xmm0 # 0x1e02b70 vmovdqa %xmm0, 0xa0(%rsp) vmovdqa 0xb0(%rsp), %xmm0 vmovdqa 0xa0(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x30(%rsp) vmovdqa -0x30(%rsp), %xmm0 vmovdqa %xmm0, 0xd0(%rsp) movl $0x17, 0xcc(%rsp) vmovdqa 0xd0(%rsp), %xmm0 movl 0xcc(%rsp), %eax vmovd %eax, %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x30(%rsp) vmovdqa -0x30(%rsp), %xmm0 vmovdqa %xmm0, 0x350(%rsp) vmovdqa 0x350(%rsp), %xmm0 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x60(%rsp), %xmm1 vmovaps -0x70(%rsp), %xmm0 vmovaps %xmm1, 0x50(%rsp) vmovaps %xmm0, 0x40(%rsp) vmovaps 0x50(%rsp), %xmm0 vmulps 0x40(%rsp), %xmm0, %xmm0 vmovaps %xmm0, -0x60(%rsp) vmovaps -0x60(%rsp), %xmm0 addq $0x4e8, %rsp # imm = 0x4E8 retq nopl (%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_fma.cpp
ncnn::UnaryOp_x86_fma_functor::unary_op_log::func_pack4(float vector[4] const&) const
__m128 func_pack4(const __m128& x) const { return log_ps(x); }
subq $0x6b8, %rsp # imm = 0x6B8 movq %rdi, -0x78(%rsp) movq %rsi, -0x80(%rsp) movq -0x80(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x10(%rsp) vmovaps 0x7a7958(%rip), %xmm0 # 0x1e02ab0 vmovaps %xmm0, -0x10(%rsp) vmovaps 0x10(%rsp), %xmm1 vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x60(%rsp) vmovaps 0x60(%rsp), %xmm0 vmovaps %xmm1, 0x630(%rsp) vmovaps %xmm0, 0x620(%rsp) vmovaps 0x630(%rsp), %xmm0 vmovaps 0x620(%rsp), %xmm1 vcmpleps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x20(%rsp) vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm0, 0xe0(%rsp) vmovaps 0x7a79c6(%rip), %xmm0 # 0x1e02b80 vmovaps %xmm0, 0xd0(%rsp) vmovaps 0xe0(%rsp), %xmm0 vmovaps 0xd0(%rsp), %xmm1 vmaxps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x10(%rsp) vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm0, 0x640(%rsp) vmovaps 0x640(%rsp), %xmm0 vmovaps %xmm0, 0x5f0(%rsp) movl $0x17, 0x5ec(%rsp) vmovdqa 0x5f0(%rsp), %xmm0 vmovd 0x5ec(%rsp), %xmm1 vpsrld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, (%rsp) vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm0, 0x1b0(%rsp) vmovaps 0x7a7953(%rip), %xmm0 # 0x1e02b90 vmovaps %xmm0, 0x1a0(%rsp) vmovdqa 0x1b0(%rsp), %xmm0 vmovdqa 0x1a0(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x10(%rsp) vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm0, 0x680(%rsp) vmovaps 0x7a7877(%rip), %xmm1 # 0x1e02af0 vmovaps %xmm1, 0x670(%rsp) vmovdqa 0x680(%rsp), %xmm0 vmovdqa 0x670(%rsp), %xmm2 vpor %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x10(%rsp) vmovdqa (%rsp), %xmm0 vmovdqa %xmm0, 0x610(%rsp) vmovdqa 0x7a78bc(%rip), %xmm0 # 0x1e02b70 vmovdqa %xmm0, 0x600(%rsp) vmovdqa 0x610(%rsp), %xmm0 vmovdqa 0x600(%rsp), %xmm2 vpsubd %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, (%rsp) vmovdqa (%rsp), %xmm0 vmovdqa %xmm0, 0x150(%rsp) vcvtdq2ps 0x150(%rsp), %xmm0 vmovaps %xmm0, -0x30(%rsp) vmovaps -0x30(%rsp), %xmm2 vmovaps -0x10(%rsp), %xmm0 vmovaps %xmm2, 0x140(%rsp) vmovaps %xmm0, 0x130(%rsp) vmovaps 0x140(%rsp), %xmm0 vmovaps 0x130(%rsp), %xmm2 vaddps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, -0x30(%rsp) vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm0, 0x6a0(%rsp) vmovaps 0x7a785a(%rip), %xmm0 # 0x1e02ba0 vmovaps %xmm0, 0x690(%rsp) vmovaps 0x6a0(%rsp), %xmm0 vmovaps 0x690(%rsp), %xmm2 vcmpltps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, -0x40(%rsp) vmovaps 0x10(%rsp), %xmm2 vmovaps -0x40(%rsp), %xmm0 vmovaps %xmm2, 0x190(%rsp) vmovaps %xmm0, 0x180(%rsp) vmovdqa 0x190(%rsp), %xmm0 vmovdqa 0x180(%rsp), %xmm2 vpand %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, -0x50(%rsp) vmovaps 0x10(%rsp), %xmm2 vmovaps -0x10(%rsp), %xmm0 vmovaps %xmm2, 0x50(%rsp) vmovaps %xmm0, 0x40(%rsp) vmovaps 0x50(%rsp), %xmm0 vmovaps 0x40(%rsp), %xmm2 vsubps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x10(%rsp) vmovaps -0x30(%rsp), %xmm2 vmovaps -0x10(%rsp), %xmm3 vmovaps -0x40(%rsp), %xmm0 vmovaps %xmm3, 0x170(%rsp) vmovaps %xmm0, 0x160(%rsp) vmovdqa 0x170(%rsp), %xmm0 vmovdqa 0x160(%rsp), %xmm3 vpand %xmm3, %xmm0, %xmm0 vmovaps %xmm2, 0x30(%rsp) vmovdqa %xmm0, 0x20(%rsp) vmovaps 0x30(%rsp), %xmm0 vmovaps 0x20(%rsp), %xmm2 vsubps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, -0x30(%rsp) vmovaps 0x10(%rsp), %xmm2 vmovaps -0x50(%rsp), %xmm0 vmovaps %xmm2, 0x120(%rsp) vmovaps %xmm0, 0x110(%rsp) vmovaps 0x120(%rsp), %xmm0 vmovaps 0x110(%rsp), %xmm2 vaddps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, 0x10(%rsp) vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm0, 0xc0(%rsp) vmovaps %xmm0, 0xb0(%rsp) vmovaps 0xc0(%rsp), %xmm0 vmovaps 0xb0(%rsp), %xmm2 vmulps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, -0x60(%rsp) vmovaps 0x7a770a(%rip), %xmm0 # 0x1e02bb0 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm2 vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm2, 0x3c0(%rsp) vmovaps %xmm0, 0x3b0(%rsp) vmovaps 0x7a76ee(%rip), %xmm0 # 0x1e02bc0 vmovaps %xmm0, 0x3a0(%rsp) vmovaps 0x3c0(%rsp), %xmm3 vmovaps 0x3b0(%rsp), %xmm2 vmovaps 0x3a0(%rsp), %xmm0 vmovaps %xmm3, 0x420(%rsp) vmovaps %xmm2, 0x410(%rsp) vmovaps %xmm0, 0x400(%rsp) vmovaps 0x420(%rsp), %xmm2 vmovaps 0x410(%rsp), %xmm0 vmovaps 0x400(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm2 vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm2, 0x390(%rsp) vmovaps %xmm0, 0x380(%rsp) vmovaps 0x7a7673(%rip), %xmm0 # 0x1e02bd0 vmovaps %xmm0, 0x370(%rsp) vmovaps 0x390(%rsp), %xmm3 vmovaps 0x380(%rsp), %xmm2 vmovaps 0x370(%rsp), %xmm0 vmovaps %xmm3, 0x450(%rsp) vmovaps %xmm2, 0x440(%rsp) vmovaps %xmm0, 0x430(%rsp) vmovaps 0x450(%rsp), %xmm2 vmovaps 0x440(%rsp), %xmm0 vmovaps 0x430(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm2 vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm2, 0x360(%rsp) vmovaps %xmm0, 0x350(%rsp) vmovaps 0x7a75f8(%rip), %xmm0 # 0x1e02be0 vmovaps %xmm0, 0x340(%rsp) vmovaps 0x360(%rsp), %xmm3 vmovaps 0x350(%rsp), %xmm2 vmovaps 0x340(%rsp), %xmm0 vmovaps %xmm3, 0x480(%rsp) vmovaps %xmm2, 0x470(%rsp) vmovaps %xmm0, 0x460(%rsp) vmovaps 0x480(%rsp), %xmm2 vmovaps 0x470(%rsp), %xmm0 vmovaps 0x460(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm2 vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm2, 0x330(%rsp) vmovaps %xmm0, 0x320(%rsp) vmovaps 0x7a757d(%rip), %xmm0 # 0x1e02bf0 vmovaps %xmm0, 0x310(%rsp) vmovaps 0x330(%rsp), %xmm3 vmovaps 0x320(%rsp), %xmm2 vmovaps 0x310(%rsp), %xmm0 vmovaps %xmm3, 0x4b0(%rsp) vmovaps %xmm2, 0x4a0(%rsp) vmovaps %xmm0, 0x490(%rsp) vmovaps 0x4b0(%rsp), %xmm2 vmovaps 0x4a0(%rsp), %xmm0 vmovaps 0x490(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm2 vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm2, 0x300(%rsp) vmovaps %xmm0, 0x2f0(%rsp) vmovaps 0x7a7502(%rip), %xmm0 # 0x1e02c00 vmovaps %xmm0, 0x2e0(%rsp) vmovaps 0x300(%rsp), %xmm3 vmovaps 0x2f0(%rsp), %xmm2 vmovaps 0x2e0(%rsp), %xmm0 vmovaps %xmm3, 0x4e0(%rsp) vmovaps %xmm2, 0x4d0(%rsp) vmovaps %xmm0, 0x4c0(%rsp) vmovaps 0x4e0(%rsp), %xmm2 vmovaps 0x4d0(%rsp), %xmm0 vmovaps 0x4c0(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm2 vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm2, 0x2d0(%rsp) vmovaps %xmm0, 0x2c0(%rsp) vmovaps 0x7a7487(%rip), %xmm0 # 0x1e02c10 vmovaps %xmm0, 0x2b0(%rsp) vmovaps 0x2d0(%rsp), %xmm3 vmovaps 0x2c0(%rsp), %xmm2 vmovaps 0x2b0(%rsp), %xmm0 vmovaps %xmm3, 0x510(%rsp) vmovaps %xmm2, 0x500(%rsp) vmovaps %xmm0, 0x4f0(%rsp) vmovaps 0x510(%rsp), %xmm2 vmovaps 0x500(%rsp), %xmm0 vmovaps 0x4f0(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm2 vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm2, 0x2a0(%rsp) vmovaps %xmm0, 0x290(%rsp) vmovaps 0x7a740c(%rip), %xmm0 # 0x1e02c20 vmovaps %xmm0, 0x280(%rsp) vmovaps 0x2a0(%rsp), %xmm3 vmovaps 0x290(%rsp), %xmm2 vmovaps 0x280(%rsp), %xmm0 vmovaps %xmm3, 0x540(%rsp) vmovaps %xmm2, 0x530(%rsp) vmovaps %xmm0, 0x520(%rsp) vmovaps 0x540(%rsp), %xmm2 vmovaps 0x530(%rsp), %xmm0 vmovaps 0x520(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm2 vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm2, 0x270(%rsp) vmovaps %xmm0, 0x260(%rsp) vmovaps 0x7a7391(%rip), %xmm0 # 0x1e02c30 vmovaps %xmm0, 0x250(%rsp) vmovaps 0x270(%rsp), %xmm3 vmovaps 0x260(%rsp), %xmm2 vmovaps 0x250(%rsp), %xmm0 vmovaps %xmm3, 0x570(%rsp) vmovaps %xmm2, 0x560(%rsp) vmovaps %xmm0, 0x550(%rsp) vmovaps 0x570(%rsp), %xmm2 vmovaps 0x560(%rsp), %xmm0 vmovaps 0x550(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm2 vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm2, 0xa0(%rsp) vmovaps %xmm0, 0x90(%rsp) vmovaps 0xa0(%rsp), %xmm0 vmovaps 0x90(%rsp), %xmm2 vmulps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm2 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm2, 0x80(%rsp) vmovaps %xmm0, 0x70(%rsp) vmovaps 0x80(%rsp), %xmm0 vmovaps 0x70(%rsp), %xmm2 vmulps %xmm2, %xmm0, %xmm0 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x30(%rsp), %xmm2 vmovaps -0x70(%rsp), %xmm0 vmovaps %xmm2, 0x240(%rsp) vmovaps 0x7a7181(%rip), %xmm2 # 0x1e02b10 vmovaps %xmm2, 0x230(%rsp) vmovaps %xmm0, 0x220(%rsp) vmovaps 0x240(%rsp), %xmm3 vmovaps 0x230(%rsp), %xmm2 vmovaps 0x220(%rsp), %xmm0 vmovaps %xmm3, 0x5a0(%rsp) vmovaps %xmm2, 0x590(%rsp) vmovaps %xmm0, 0x580(%rsp) vmovaps 0x5a0(%rsp), %xmm2 vmovaps 0x590(%rsp), %xmm0 vmovaps 0x580(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm0 # xmm0 = (xmm2 * xmm0) + xmm3 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x60(%rsp), %xmm2 vmovaps -0x70(%rsp), %xmm0 vmovaps %xmm2, 0x1e0(%rsp) vmovaps %xmm1, 0x1d0(%rsp) vmovaps %xmm0, 0x1c0(%rsp) vmovaps 0x1e0(%rsp), %xmm2 vmovaps 0x1d0(%rsp), %xmm1 vmovaps 0x1c0(%rsp), %xmm0 vmovaps %xmm2, 0x3f0(%rsp) vmovaps %xmm1, 0x3e0(%rsp) vmovaps %xmm0, 0x3d0(%rsp) vmovaps 0x3f0(%rsp), %xmm1 vmovaps 0x3e0(%rsp), %xmm0 vmovaps 0x3d0(%rsp), %xmm2 vfnmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = -(xmm1 * xmm0) + xmm2 vmovaps %xmm0, -0x70(%rsp) vmovaps 0x10(%rsp), %xmm1 vmovaps -0x70(%rsp), %xmm0 vmovaps %xmm1, 0x100(%rsp) vmovaps %xmm0, 0xf0(%rsp) vmovaps 0x100(%rsp), %xmm0 vmovaps 0xf0(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x10(%rsp) vmovaps -0x30(%rsp), %xmm1 vmovaps 0x10(%rsp), %xmm0 vmovaps %xmm1, 0x210(%rsp) vmovaps 0x7a7029(%rip), %xmm1 # 0x1e02b00 vmovaps %xmm1, 0x200(%rsp) vmovaps %xmm0, 0x1f0(%rsp) vmovaps 0x210(%rsp), %xmm2 vmovaps 0x200(%rsp), %xmm1 vmovaps 0x1f0(%rsp), %xmm0 vmovaps %xmm2, 0x5d0(%rsp) vmovaps %xmm1, 0x5c0(%rsp) vmovaps %xmm0, 0x5b0(%rsp) vmovaps 0x5d0(%rsp), %xmm1 vmovaps 0x5c0(%rsp), %xmm0 vmovaps 0x5b0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x10(%rsp) vmovaps 0x10(%rsp), %xmm1 vmovaps -0x20(%rsp), %xmm0 vmovaps %xmm1, 0x660(%rsp) vmovaps %xmm0, 0x650(%rsp) vmovaps 0x660(%rsp), %xmm0 vmovaps 0x650(%rsp), %xmm1 vpor %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x10(%rsp) vmovaps 0x10(%rsp), %xmm0 addq $0x6b8, %rsp # imm = 0x6B8 retq nopl (%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_fma.cpp
ncnn::UnaryOp_x86_fma_functor::unary_op_sin::func_pack8(float vector[8] const&) const
__m256 func_pack8(const __m256& x) const { return sin256_ps(x); }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0xec0, %rsp # imm = 0xEC0 movq %rdi, 0x18(%rsp) movq %rsi, 0x10(%rsp) movq 0x10(%rsp), %rax vmovaps (%rax), %ymm0 vmovaps %ymm0, 0x240(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %ymm0, 0x260(%rsp) vmovaps 0x260(%rsp), %ymm0 vmovaps %ymm0, 0x200(%rsp) vmovaps 0x240(%rsp), %ymm0 vmovaps %ymm0, 0x1c0(%rsp) vmovaps 0x240(%rsp), %ymm0 vmovaps %ymm0, 0x760(%rsp) vmovaps 0x7b966a(%rip), %ymm0 # 0x1e152a0 vmovaps %ymm0, 0x740(%rsp) vmovaps 0x760(%rsp), %ymm0 vmovaps 0x740(%rsp), %ymm1 vandps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x240(%rsp) vmovaps 0x1c0(%rsp), %ymm0 vmovaps %ymm0, 0x720(%rsp) vmovaps 0x7aaf48(%rip), %ymm0 # 0x1e06bc0 vmovaps %ymm0, 0x700(%rsp) vmovaps 0x720(%rsp), %ymm0 vmovaps 0x700(%rsp), %ymm1 vandps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1c0(%rsp) vmovaps 0x240(%rsp), %ymm0 vmovaps %ymm0, 0x3a0(%rsp) vmovaps 0x7b9606(%rip), %ymm0 # 0x1e152c0 vmovaps %ymm0, 0x380(%rsp) vmovaps 0x3a0(%rsp), %ymm0 vmovaps 0x380(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm0 vmovaps %ymm0, 0x860(%rsp) vcvttps2dq 0x860(%rsp), %ymm0 vmovaps %ymm0, 0x100(%rsp) vmovdqa 0x100(%rsp), %xmm0 vmovdqa %xmm0, 0x130(%rsp) vmovdqa 0x110(%rsp), %xmm0 vmovdqa %xmm0, 0x120(%rsp) vmovdqa 0x130(%rsp), %xmm0 vmovdqa %xmm0, 0xc50(%rsp) vmovdqa 0x7b7d3c(%rip), %xmm0 # 0x1e13a80 vmovdqa %xmm0, 0xc40(%rsp) vmovdqa 0xc50(%rsp), %xmm1 vmovdqa 0xc40(%rsp), %xmm2 vpaddd %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x130(%rsp) vmovdqa 0x120(%rsp), %xmm1 vmovdqa %xmm1, 0xc30(%rsp) vmovdqa %xmm0, 0xc20(%rsp) vmovdqa 0xc30(%rsp), %xmm0 vmovdqa 0xc20(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x120(%rsp) vmovdqa 0x130(%rsp), %xmm0 vmovdqa %xmm0, 0xd70(%rsp) vmovdqa 0x7b7cd0(%rip), %xmm0 # 0x1e13a90 vmovdqa %xmm0, 0xd60(%rsp) vmovdqa 0xd70(%rsp), %xmm1 vmovdqa 0xd60(%rsp), %xmm2 vpand %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x130(%rsp) vmovdqa 0x120(%rsp), %xmm1 vmovdqa %xmm1, 0xd50(%rsp) vmovdqa %xmm0, 0xd40(%rsp) vmovdqa 0xd50(%rsp), %xmm0 vmovdqa 0xd40(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x120(%rsp) vmovdqa 0x130(%rsp), %xmm0 vmovdqa %xmm0, 0xe0(%rsp) vmovdqa 0x120(%rsp), %xmm0 vmovdqa %xmm0, 0xf0(%rsp) vmovaps 0xe0(%rsp), %ymm0 vmovaps %ymm0, 0x160(%rsp) vmovaps 0x160(%rsp), %ymm0 vmovaps %ymm0, 0xca0(%rsp) vcvtdq2ps 0xca0(%rsp), %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovdqa 0x130(%rsp), %xmm0 vmovdqa %xmm0, 0xd30(%rsp) vmovdqa 0x7b7c0a(%rip), %xmm0 # 0x1e13aa0 vmovdqa %xmm0, 0xd20(%rsp) vmovdqa 0xd30(%rsp), %xmm1 vmovdqa 0xd20(%rsp), %xmm2 vpand %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x150(%rsp) vmovdqa 0x120(%rsp), %xmm1 vmovdqa %xmm1, 0xd10(%rsp) vmovdqa %xmm0, 0xd00(%rsp) vmovdqa 0xd10(%rsp), %xmm0 vmovdqa 0xd00(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x140(%rsp) vmovdqa 0x150(%rsp), %xmm0 vmovdqa %xmm0, 0xc90(%rsp) movl $0x1d, 0xc8c(%rsp) vmovdqa 0xc90(%rsp), %xmm0 vmovd 0xc8c(%rsp), %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x150(%rsp) vmovdqa 0x140(%rsp), %xmm0 vmovdqa %xmm0, 0xc70(%rsp) movl $0x1d, 0xc6c(%rsp) vmovdqa 0xc70(%rsp), %xmm0 vmovd 0xc6c(%rsp), %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x140(%rsp) vmovdqa 0x150(%rsp), %xmm0 vmovdqa %xmm0, 0xc0(%rsp) vmovdqa 0x140(%rsp), %xmm0 vmovdqa %xmm0, 0xd0(%rsp) vmovaps 0xc0(%rsp), %ymm0 vmovaps %ymm0, 0x180(%rsp) vmovdqa 0x130(%rsp), %xmm0 vmovdqa %xmm0, 0xcf0(%rsp) vmovdqa 0x7b7af0(%rip), %xmm0 # 0x1e13ab0 vmovdqa %xmm0, 0xce0(%rsp) vmovdqa 0xcf0(%rsp), %xmm1 vmovdqa 0xce0(%rsp), %xmm2 vpand %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x130(%rsp) vmovdqa 0x120(%rsp), %xmm1 vmovdqa %xmm1, 0xcd0(%rsp) vmovdqa %xmm0, 0xcc0(%rsp) vmovdqa 0xcd0(%rsp), %xmm0 vmovdqa 0xcc0(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x120(%rsp) vmovdqa 0x130(%rsp), %xmm2 vxorps %xmm0, %xmm0, %xmm0 vmovdqa %xmm0, 0xdd0(%rsp) vmovdqa 0xdd0(%rsp), %xmm1 vmovdqa %xmm2, 0xdb0(%rsp) vmovdqa %xmm1, 0xda0(%rsp) vmovdqa 0xdb0(%rsp), %xmm1 vmovdqa 0xda0(%rsp), %xmm2 vpcmpeqd %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x130(%rsp) vmovdqa 0x120(%rsp), %xmm1 vmovdqa %xmm0, 0xdc0(%rsp) vmovdqa 0xdc0(%rsp), %xmm0 vmovdqa %xmm1, 0xd90(%rsp) vmovdqa %xmm0, 0xd80(%rsp) vmovdqa 0xd90(%rsp), %xmm0 vmovdqa 0xd80(%rsp), %xmm1 vpcmpeqd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x120(%rsp) vmovdqa 0x130(%rsp), %xmm0 vmovdqa %xmm0, 0xa0(%rsp) vmovdqa 0x120(%rsp), %xmm0 vmovdqa %xmm0, 0xb0(%rsp) vmovaps 0xa0(%rsp), %ymm0 vmovaps %ymm0, 0x160(%rsp) vmovaps 0x180(%rsp), %ymm0 vmovaps %ymm0, 0x8a0(%rsp) vmovaps 0x8a0(%rsp), %ymm0 vmovaps %ymm0, 0x80(%rsp) vmovaps 0x160(%rsp), %ymm0 vmovaps %ymm0, 0x880(%rsp) vmovaps 0x880(%rsp), %ymm0 vmovaps %ymm0, 0x60(%rsp) vmovaps 0x1c0(%rsp), %ymm1 vmovaps 0x80(%rsp), %ymm0 vmovaps %ymm1, 0xe40(%rsp) vmovaps %ymm0, 0xe20(%rsp) vmovaps 0xe40(%rsp), %ymm0 vmovaps 0xe20(%rsp), %ymm1 vxorps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1c0(%rsp) vmovaps 0x7b915c(%rip), %ymm0 # 0x1e152e0 vmovaps %ymm0, 0x220(%rsp) vmovaps 0x7b916b(%rip), %ymm0 # 0x1e15300 vmovaps %ymm0, 0x200(%rsp) vmovaps 0x7b917a(%rip), %ymm0 # 0x1e15320 vmovaps %ymm0, 0x1e0(%rsp) vmovaps 0x1a0(%rsp), %ymm2 vmovaps 0x220(%rsp), %ymm1 vmovaps 0x240(%rsp), %ymm0 vmovaps %ymm2, 0x6a0(%rsp) vmovaps %ymm1, 0x680(%rsp) vmovaps %ymm0, 0x660(%rsp) vmovaps 0x6a0(%rsp), %ymm2 vmovaps 0x680(%rsp), %ymm1 vmovaps 0x660(%rsp), %ymm0 vmovaps %ymm2, 0x900(%rsp) vmovaps %ymm1, 0x8e0(%rsp) vmovaps %ymm0, 0x8c0(%rsp) vmovaps 0x900(%rsp), %ymm1 vmovaps 0x8e0(%rsp), %ymm0 vmovaps 0x8c0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x240(%rsp) vmovaps 0x1a0(%rsp), %ymm2 vmovaps 0x200(%rsp), %ymm1 vmovaps 0x240(%rsp), %ymm0 vmovaps %ymm2, 0x640(%rsp) vmovaps %ymm1, 0x620(%rsp) vmovaps %ymm0, 0x600(%rsp) vmovaps 0x640(%rsp), %ymm2 vmovaps 0x620(%rsp), %ymm1 vmovaps 0x600(%rsp), %ymm0 vmovaps %ymm2, 0x960(%rsp) vmovaps %ymm1, 0x940(%rsp) vmovaps %ymm0, 0x920(%rsp) vmovaps 0x960(%rsp), %ymm1 vmovaps 0x940(%rsp), %ymm0 vmovaps 0x920(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x240(%rsp) vmovaps 0x1a0(%rsp), %ymm2 vmovaps 0x1e0(%rsp), %ymm1 vmovaps 0x240(%rsp), %ymm0 vmovaps %ymm2, 0x5e0(%rsp) vmovaps %ymm1, 0x5c0(%rsp) vmovaps %ymm0, 0x5a0(%rsp) vmovaps 0x5e0(%rsp), %ymm2 vmovaps 0x5c0(%rsp), %ymm1 vmovaps 0x5a0(%rsp), %ymm0 vmovaps %ymm2, 0x9c0(%rsp) vmovaps %ymm1, 0x9a0(%rsp) vmovaps %ymm0, 0x980(%rsp) vmovaps 0x9c0(%rsp), %ymm1 vmovaps 0x9a0(%rsp), %ymm0 vmovaps 0x980(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x240(%rsp) vmovaps 0x7b8fca(%rip), %ymm0 # 0x1e15340 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x240(%rsp), %ymm0 vmovaps %ymm0, 0x360(%rsp) vmovaps %ymm0, 0x340(%rsp) vmovaps 0x360(%rsp), %ymm0 vmovaps 0x340(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x40(%rsp) vmovaps 0x1a0(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm1, 0x580(%rsp) vmovaps %ymm0, 0x560(%rsp) vmovaps 0x7b8f81(%rip), %ymm0 # 0x1e15360 vmovaps %ymm0, 0x540(%rsp) vmovaps 0x580(%rsp), %ymm2 vmovaps 0x560(%rsp), %ymm1 vmovaps 0x540(%rsp), %ymm0 vmovaps %ymm2, 0xa20(%rsp) vmovaps %ymm1, 0xa00(%rsp) vmovaps %ymm0, 0x9e0(%rsp) vmovaps 0xa20(%rsp), %ymm1 vmovaps 0xa00(%rsp), %ymm0 vmovaps 0x9e0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm1, 0x520(%rsp) vmovaps %ymm0, 0x500(%rsp) vmovaps 0x7b8f10(%rip), %ymm0 # 0x1e15380 vmovaps %ymm0, 0x4e0(%rsp) vmovaps 0x520(%rsp), %ymm2 vmovaps 0x500(%rsp), %ymm1 vmovaps 0x4e0(%rsp), %ymm0 vmovaps %ymm2, 0xa80(%rsp) vmovaps %ymm1, 0xa60(%rsp) vmovaps %ymm0, 0xa40(%rsp) vmovaps 0xa80(%rsp), %ymm1 vmovaps 0xa60(%rsp), %ymm0 vmovaps 0xa40(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm1, 0x320(%rsp) vmovaps %ymm0, 0x300(%rsp) vmovaps 0x320(%rsp), %ymm0 vmovaps 0x300(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm1, 0x2e0(%rsp) vmovaps %ymm0, 0x2c0(%rsp) vmovaps 0x2e0(%rsp), %ymm0 vmovaps 0x2c0(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x40(%rsp), %ymm1 vmovaps 0x1a0(%rsp), %ymm0 vmovaps %ymm1, 0x7c0(%rsp) vmovaps 0x7a7688(%rip), %ymm1 # 0x1e03c00 vmovaps %ymm1, 0x7a0(%rsp) vmovaps %ymm0, 0x780(%rsp) vmovaps 0x7c0(%rsp), %ymm2 vmovaps 0x7a0(%rsp), %ymm1 vmovaps 0x780(%rsp), %ymm0 vmovaps %ymm2, 0xc00(%rsp) vmovaps %ymm1, 0xbe0(%rsp) vmovaps %ymm0, 0xbc0(%rsp) vmovaps 0xc00(%rsp), %ymm1 vmovaps 0xbe0(%rsp), %ymm0 vmovaps 0xbc0(%rsp), %ymm2 vfnmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = -(ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm0 vmovaps %ymm0, 0x840(%rsp) vmovaps 0x7a757d(%rip), %ymm0 # 0x1e03b80 vmovaps %ymm0, 0x820(%rsp) vmovaps 0x840(%rsp), %ymm0 vmovaps 0x820(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x7b8d6d(%rip), %ymm0 # 0x1e153a0 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x20(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm1, 0x4c0(%rsp) vmovaps %ymm0, 0x4a0(%rsp) vmovaps 0x7b8d61(%rip), %ymm0 # 0x1e153c0 vmovaps %ymm0, 0x480(%rsp) vmovaps 0x4c0(%rsp), %ymm2 vmovaps 0x4a0(%rsp), %ymm1 vmovaps 0x480(%rsp), %ymm0 vmovaps %ymm2, 0xae0(%rsp) vmovaps %ymm1, 0xac0(%rsp) vmovaps %ymm0, 0xaa0(%rsp) vmovaps 0xae0(%rsp), %ymm1 vmovaps 0xac0(%rsp), %ymm0 vmovaps 0xaa0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x20(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm1, 0x460(%rsp) vmovaps %ymm0, 0x440(%rsp) vmovaps 0x7b8cf6(%rip), %ymm0 # 0x1e153e0 vmovaps %ymm0, 0x420(%rsp) vmovaps 0x460(%rsp), %ymm2 vmovaps 0x440(%rsp), %ymm1 vmovaps 0x420(%rsp), %ymm0 vmovaps %ymm2, 0xb40(%rsp) vmovaps %ymm1, 0xb20(%rsp) vmovaps %ymm0, 0xb00(%rsp) vmovaps 0xb40(%rsp), %ymm1 vmovaps 0xb20(%rsp), %ymm0 vmovaps 0xb00(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x20(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm1, 0x2a0(%rsp) vmovaps %ymm0, 0x280(%rsp) vmovaps 0x2a0(%rsp), %ymm0 vmovaps 0x280(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x20(%rsp), %ymm1 vmovaps 0x240(%rsp), %ymm0 vmovaps %ymm1, 0x400(%rsp) vmovaps %ymm0, 0x3e0(%rsp) vmovaps %ymm0, 0x3c0(%rsp) vmovaps 0x400(%rsp), %ymm2 vmovaps 0x3e0(%rsp), %ymm1 vmovaps 0x3c0(%rsp), %ymm0 vmovaps %ymm2, 0xba0(%rsp) vmovaps %ymm1, 0xb80(%rsp) vmovaps %ymm0, 0xb60(%rsp) vmovaps 0xba0(%rsp), %ymm1 vmovaps 0xb80(%rsp), %ymm0 vmovaps 0xb60(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x60(%rsp), %ymm0 vmovaps %ymm0, 0x1e0(%rsp) vmovaps 0x1e0(%rsp), %ymm1 vmovaps 0x20(%rsp), %ymm0 vmovaps %ymm1, 0x6e0(%rsp) vmovaps %ymm0, 0x6c0(%rsp) vmovaps 0x6e0(%rsp), %ymm0 vmovaps 0x6c0(%rsp), %ymm1 vandps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x1e0(%rsp), %ymm1 vmovaps 0x1a0(%rsp), %ymm0 vmovaps %ymm1, 0xe80(%rsp) vmovaps %ymm0, 0xe60(%rsp) vmovaps 0xe80(%rsp), %ymm0 vxorps %xmm1, %xmm1, %xmm1 vcmptrueps %ymm1, %ymm1, %ymm1 vxorps %ymm1, %ymm0, %ymm0 vmovaps 0xe60(%rsp), %ymm1 vandps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm1 vmovaps 0x20(%rsp), %ymm0 vmovaps %ymm1, 0x800(%rsp) vmovaps %ymm0, 0x7e0(%rsp) vmovaps 0x800(%rsp), %ymm0 vaddps 0x7e0(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm1 vmovaps 0x1c0(%rsp), %ymm0 vmovaps %ymm1, 0xe00(%rsp) vmovaps %ymm0, 0xde0(%rsp) vmovaps 0xe00(%rsp), %ymm0 vmovaps 0xde0(%rsp), %ymm1 vxorps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm0 movq %rbp, %rsp popq %rbp retq nopl (%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_fma.cpp
ncnn::UnaryOp_x86_fma_functor::unary_op_sin::func_pack4(float vector[4] const&) const
__m128 func_pack4(const __m128& x) const { return sin_ps(x); }
subq $0x678, %rsp # imm = 0x678 movq %rdi, -0x78(%rsp) movq %rsi, -0x80(%rsp) movq -0x80(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x40(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x70(%rsp) vmovaps 0x70(%rsp), %xmm0 vmovaps %xmm0, 0x20(%rsp) vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm0, (%rsp) vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm0, 0x210(%rsp) vmovaps 0x7b2808(%rip), %xmm0 # 0x1e0f1a0 vmovaps %xmm0, 0x200(%rsp) vmovdqa 0x210(%rsp), %xmm0 vmovdqa 0x200(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x40(%rsp) vmovaps (%rsp), %xmm0 vmovaps %xmm0, 0x1f0(%rsp) vmovaps 0x7aa20d(%rip), %xmm0 # 0x1e06be0 vmovaps %xmm0, 0x1e0(%rsp) vmovdqa 0x1f0(%rsp), %xmm0 vmovdqa 0x1e0(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, (%rsp) vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm0, 0x110(%rsp) vmovaps 0x7b7062(%rip), %xmm0 # 0x1e13a70 vmovaps %xmm0, 0x100(%rsp) vmovaps 0x110(%rsp), %xmm0 vmovaps 0x100(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm0 vmovaps %xmm0, 0x1a0(%rsp) vcvttps2dq 0x1a0(%rsp), %xmm0 vmovdqa %xmm0, -0x30(%rsp) vmovdqa -0x30(%rsp), %xmm0 vmovdqa %xmm0, 0x130(%rsp) vmovdqa 0x7b7018(%rip), %xmm0 # 0x1e13a80 vmovdqa %xmm0, 0x120(%rsp) vmovdqa 0x130(%rsp), %xmm0 vmovdqa 0x120(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x30(%rsp) vmovdqa -0x30(%rsp), %xmm0 vmovdqa %xmm0, 0x5f0(%rsp) vmovdqa 0x7b6fec(%rip), %xmm0 # 0x1e13a90 vmovdqa %xmm0, 0x5e0(%rsp) vmovdqa 0x5f0(%rsp), %xmm0 vmovdqa 0x5e0(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x30(%rsp) vmovdqa -0x30(%rsp), %xmm0 vmovdqa %xmm0, 0x1b0(%rsp) vcvtdq2ps 0x1b0(%rsp), %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovdqa -0x30(%rsp), %xmm0 vmovdqa %xmm0, 0x5d0(%rsp) vmovdqa 0x7b6fa2(%rip), %xmm0 # 0x1e13aa0 vmovdqa %xmm0, 0x5c0(%rsp) vmovdqa 0x5d0(%rsp), %xmm0 vmovdqa 0x5c0(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x20(%rsp) vmovdqa -0x20(%rsp), %xmm0 vmovdqa %xmm0, 0x150(%rsp) movl $0x1d, 0x14c(%rsp) vmovdqa 0x150(%rsp), %xmm0 vmovd 0x14c(%rsp), %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x20(%rsp) vmovdqa -0x30(%rsp), %xmm0 vmovdqa %xmm0, 0x5b0(%rsp) vmovdqa 0x7b6f40(%rip), %xmm0 # 0x1e13ab0 vmovdqa %xmm0, 0x5a0(%rsp) vmovdqa 0x5b0(%rsp), %xmm0 vmovdqa 0x5a0(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x30(%rsp) vmovdqa -0x30(%rsp), %xmm1 vxorps %xmm0, %xmm0, %xmm0 vmovdqa %xmm0, 0x620(%rsp) vmovdqa 0x620(%rsp), %xmm0 vmovdqa %xmm1, 0x610(%rsp) vmovdqa %xmm0, 0x600(%rsp) vmovdqa 0x610(%rsp), %xmm0 vmovdqa 0x600(%rsp), %xmm1 vpcmpeqd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x30(%rsp) vmovdqa -0x20(%rsp), %xmm0 vmovdqa %xmm0, 0x3e0(%rsp) vmovdqa 0x3e0(%rsp), %xmm0 vmovdqa %xmm0, -0x40(%rsp) vmovdqa -0x30(%rsp), %xmm0 vmovdqa %xmm0, 0x3d0(%rsp) vmovdqa 0x3d0(%rsp), %xmm0 vmovdqa %xmm0, -0x50(%rsp) vmovaps (%rsp), %xmm1 vmovaps -0x40(%rsp), %xmm0 vmovaps %xmm1, 0x660(%rsp) vmovaps %xmm0, 0x650(%rsp) vmovdqa 0x660(%rsp), %xmm0 vmovdqa 0x650(%rsp), %xmm1 vpxor %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, (%rsp) vmovaps 0x7b6e65(%rip), %xmm0 # 0x1e13ac0 vmovaps %xmm0, 0x30(%rsp) vmovaps 0x7b6e67(%rip), %xmm0 # 0x1e13ad0 vmovaps %xmm0, 0x20(%rsp) vmovaps 0x7b6e69(%rip), %xmm0 # 0x1e13ae0 vmovaps %xmm0, 0x10(%rsp) vmovaps -0x10(%rsp), %xmm2 vmovaps 0x30(%rsp), %xmm1 vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm2, 0x3c0(%rsp) vmovaps %xmm1, 0x3b0(%rsp) vmovaps %xmm0, 0x3a0(%rsp) vmovaps 0x3c0(%rsp), %xmm2 vmovaps 0x3b0(%rsp), %xmm1 vmovaps 0x3a0(%rsp), %xmm0 vmovaps %xmm2, 0x440(%rsp) vmovaps %xmm1, 0x430(%rsp) vmovaps %xmm0, 0x420(%rsp) vmovaps 0x440(%rsp), %xmm1 vmovaps 0x430(%rsp), %xmm0 vmovaps 0x420(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x40(%rsp) vmovaps -0x10(%rsp), %xmm2 vmovaps 0x20(%rsp), %xmm1 vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm2, 0x390(%rsp) vmovaps %xmm1, 0x380(%rsp) vmovaps %xmm0, 0x370(%rsp) vmovaps 0x390(%rsp), %xmm2 vmovaps 0x380(%rsp), %xmm1 vmovaps 0x370(%rsp), %xmm0 vmovaps %xmm2, 0x470(%rsp) vmovaps %xmm1, 0x460(%rsp) vmovaps %xmm0, 0x450(%rsp) vmovaps 0x470(%rsp), %xmm1 vmovaps 0x460(%rsp), %xmm0 vmovaps 0x450(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x40(%rsp) vmovaps -0x10(%rsp), %xmm2 vmovaps 0x10(%rsp), %xmm1 vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm2, 0x360(%rsp) vmovaps %xmm1, 0x350(%rsp) vmovaps %xmm0, 0x340(%rsp) vmovaps 0x360(%rsp), %xmm2 vmovaps 0x350(%rsp), %xmm1 vmovaps 0x340(%rsp), %xmm0 vmovaps %xmm2, 0x4a0(%rsp) vmovaps %xmm1, 0x490(%rsp) vmovaps %xmm0, 0x480(%rsp) vmovaps 0x4a0(%rsp), %xmm1 vmovaps 0x490(%rsp), %xmm0 vmovaps 0x480(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x40(%rsp) vmovaps 0x7b6cd0(%rip), %xmm0 # 0x1e13af0 vmovaps %xmm0, -0x10(%rsp) vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm0, 0xf0(%rsp) vmovaps %xmm0, 0xe0(%rsp) vmovaps 0xf0(%rsp), %xmm0 vmovaps 0xe0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x60(%rsp) vmovaps -0x10(%rsp), %xmm1 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm1, 0x330(%rsp) vmovaps %xmm0, 0x320(%rsp) vmovaps 0x7b6c80(%rip), %xmm0 # 0x1e13b00 vmovaps %xmm0, 0x310(%rsp) vmovaps 0x330(%rsp), %xmm2 vmovaps 0x320(%rsp), %xmm1 vmovaps 0x310(%rsp), %xmm0 vmovaps %xmm2, 0x4d0(%rsp) vmovaps %xmm1, 0x4c0(%rsp) vmovaps %xmm0, 0x4b0(%rsp) vmovaps 0x4d0(%rsp), %xmm1 vmovaps 0x4c0(%rsp), %xmm0 vmovaps 0x4b0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm1 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm1, 0x300(%rsp) vmovaps %xmm0, 0x2f0(%rsp) vmovaps 0x7b6c05(%rip), %xmm0 # 0x1e13b10 vmovaps %xmm0, 0x2e0(%rsp) vmovaps 0x300(%rsp), %xmm2 vmovaps 0x2f0(%rsp), %xmm1 vmovaps 0x2e0(%rsp), %xmm0 vmovaps %xmm2, 0x500(%rsp) vmovaps %xmm1, 0x4f0(%rsp) vmovaps %xmm0, 0x4e0(%rsp) vmovaps 0x500(%rsp), %xmm1 vmovaps 0x4f0(%rsp), %xmm0 vmovaps 0x4e0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm1 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm1, 0xd0(%rsp) vmovaps %xmm0, 0xc0(%rsp) vmovaps 0xd0(%rsp), %xmm0 vmovaps 0xc0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm1 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm1, 0xb0(%rsp) vmovaps %xmm0, 0xa0(%rsp) vmovaps 0xb0(%rsp), %xmm0 vmovaps 0xa0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x60(%rsp), %xmm1 vmovaps -0x10(%rsp), %xmm0 vmovaps %xmm1, 0x240(%rsp) vmovaps 0x7a5aef(%rip), %xmm1 # 0x1e02af0 vmovaps %xmm1, 0x230(%rsp) vmovaps %xmm0, 0x220(%rsp) vmovaps 0x240(%rsp), %xmm2 vmovaps 0x230(%rsp), %xmm1 vmovaps 0x220(%rsp), %xmm0 vmovaps %xmm2, 0x410(%rsp) vmovaps %xmm1, 0x400(%rsp) vmovaps %xmm0, 0x3f0(%rsp) vmovaps 0x410(%rsp), %xmm1 vmovaps 0x400(%rsp), %xmm0 vmovaps 0x3f0(%rsp), %xmm2 vfnmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = -(xmm1 * xmm0) + xmm2 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm0 vmovaps %xmm0, 0x190(%rsp) vmovaps 0x7a5a2a(%rip), %xmm0 # 0x1e02ab0 vmovaps %xmm0, 0x180(%rsp) vmovaps 0x190(%rsp), %xmm0 vmovaps 0x180(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps 0x7b6a6d(%rip), %xmm0 # 0x1e13b20 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm1 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm1, 0x2d0(%rsp) vmovaps %xmm0, 0x2c0(%rsp) vmovaps 0x7b6a51(%rip), %xmm0 # 0x1e13b30 vmovaps %xmm0, 0x2b0(%rsp) vmovaps 0x2d0(%rsp), %xmm2 vmovaps 0x2c0(%rsp), %xmm1 vmovaps 0x2b0(%rsp), %xmm0 vmovaps %xmm2, 0x530(%rsp) vmovaps %xmm1, 0x520(%rsp) vmovaps %xmm0, 0x510(%rsp) vmovaps 0x530(%rsp), %xmm1 vmovaps 0x520(%rsp), %xmm0 vmovaps 0x510(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm1 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm1, 0x2a0(%rsp) vmovaps %xmm0, 0x290(%rsp) vmovaps 0x7b69d6(%rip), %xmm0 # 0x1e13b40 vmovaps %xmm0, 0x280(%rsp) vmovaps 0x2a0(%rsp), %xmm2 vmovaps 0x290(%rsp), %xmm1 vmovaps 0x280(%rsp), %xmm0 vmovaps %xmm2, 0x560(%rsp) vmovaps %xmm1, 0x550(%rsp) vmovaps %xmm0, 0x540(%rsp) vmovaps 0x560(%rsp), %xmm1 vmovaps 0x550(%rsp), %xmm0 vmovaps 0x540(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm1 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm1, 0x90(%rsp) vmovaps %xmm0, 0x80(%rsp) vmovaps 0x90(%rsp), %xmm0 vmovaps 0x80(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm1 vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm1, 0x270(%rsp) vmovaps %xmm0, 0x260(%rsp) vmovaps %xmm0, 0x250(%rsp) vmovaps 0x270(%rsp), %xmm2 vmovaps 0x260(%rsp), %xmm1 vmovaps 0x250(%rsp), %xmm0 vmovaps %xmm2, 0x590(%rsp) vmovaps %xmm1, 0x580(%rsp) vmovaps %xmm0, 0x570(%rsp) vmovaps 0x590(%rsp), %xmm1 vmovaps 0x580(%rsp), %xmm0 vmovaps 0x570(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x50(%rsp), %xmm0 vmovaps %xmm0, 0x10(%rsp) vmovaps 0x10(%rsp), %xmm1 vmovaps -0x70(%rsp), %xmm0 vmovaps %xmm1, 0x1d0(%rsp) vmovaps %xmm0, 0x1c0(%rsp) vmovdqa 0x1d0(%rsp), %xmm0 vmovdqa 0x1c0(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x70(%rsp) vmovaps 0x10(%rsp), %xmm1 vmovaps -0x10(%rsp), %xmm0 vmovaps %xmm1, 0x60(%rsp) vmovaps %xmm0, 0x50(%rsp) vmovdqa 0x60(%rsp), %xmm0 vpcmpeqd %xmm1, %xmm1, %xmm1 vpxor %xmm1, %xmm0, %xmm0 vmovaps 0x50(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm1 vmovaps -0x70(%rsp), %xmm0 vmovaps %xmm1, 0x170(%rsp) vmovaps %xmm0, 0x160(%rsp) vmovaps 0x170(%rsp), %xmm0 vaddps 0x160(%rsp), %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm1 vmovaps (%rsp), %xmm0 vmovaps %xmm1, 0x640(%rsp) vmovaps %xmm0, 0x630(%rsp) vmovaps 0x640(%rsp), %xmm0 vmovaps 0x630(%rsp), %xmm1 vpxor %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm0 addq $0x678, %rsp # imm = 0x678 retq nopw %cs:(%rax,%rax) nop
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_fma.cpp
ncnn::UnaryOp_x86_fma_functor::unary_op_cos::func_pack8(float vector[8] const&) const
__m256 func_pack8(const __m256& x) const { return cos256_ps(x); }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0xda0, %rsp # imm = 0xDA0 movq %rdi, 0x18(%rsp) movq %rsi, 0x10(%rsp) movq 0x10(%rsp), %rax vmovaps (%rax), %ymm0 vmovaps %ymm0, 0x220(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %ymm0, 0x240(%rsp) vmovaps 0x240(%rsp), %ymm0 vmovaps %ymm0, 0x1e0(%rsp) vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm0, 0x6a0(%rsp) vmovaps 0x7b7e7c(%rip), %ymm0 # 0x1e152a0 vmovaps %ymm0, 0x680(%rsp) vmovaps 0x6a0(%rsp), %ymm0 vmovaps 0x680(%rsp), %ymm1 vandps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x220(%rsp) vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm0, 0x440(%rsp) vmovaps 0x7b7e5a(%rip), %ymm0 # 0x1e152c0 vmovaps %ymm0, 0x420(%rsp) vmovaps 0x440(%rsp), %ymm0 vmovaps 0x420(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm0 vmovaps %ymm0, 0x860(%rsp) vcvttps2dq 0x860(%rsp), %ymm0 vmovaps %ymm0, 0x100(%rsp) vmovdqa 0x100(%rsp), %xmm0 vmovdqa %xmm0, 0x130(%rsp) vmovdqa 0x110(%rsp), %xmm0 vmovdqa %xmm0, 0x120(%rsp) vmovdqa 0x130(%rsp), %xmm0 vmovdqa %xmm0, 0xb30(%rsp) vmovdqa 0x7b6590(%rip), %xmm0 # 0x1e13a80 vmovdqa %xmm0, 0xb20(%rsp) vmovdqa 0xb30(%rsp), %xmm1 vmovdqa 0xb20(%rsp), %xmm2 vpaddd %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x130(%rsp) vmovdqa 0x120(%rsp), %xmm1 vmovdqa %xmm1, 0xb10(%rsp) vmovdqa %xmm0, 0xb00(%rsp) vmovdqa 0xb10(%rsp), %xmm0 vmovdqa 0xb00(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x120(%rsp) vmovdqa 0x130(%rsp), %xmm0 vmovdqa %xmm0, 0xc50(%rsp) vmovdqa 0x7b6524(%rip), %xmm0 # 0x1e13a90 vmovdqa %xmm0, 0xc40(%rsp) vmovdqa 0xc50(%rsp), %xmm1 vmovdqa 0xc40(%rsp), %xmm2 vpand %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x130(%rsp) vmovdqa 0x120(%rsp), %xmm1 vmovdqa %xmm1, 0xc30(%rsp) vmovdqa %xmm0, 0xc20(%rsp) vmovdqa 0xc30(%rsp), %xmm0 vmovdqa 0xc20(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x120(%rsp) vmovdqa 0x130(%rsp), %xmm0 vmovdqa %xmm0, 0xe0(%rsp) vmovdqa 0x120(%rsp), %xmm0 vmovdqa %xmm0, 0xf0(%rsp) vmovaps 0xe0(%rsp), %ymm0 vmovaps %ymm0, 0x160(%rsp) vmovaps 0x160(%rsp), %ymm0 vmovaps %ymm0, 0xb80(%rsp) vcvtdq2ps 0xb80(%rsp), %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovdqa 0x130(%rsp), %xmm0 vmovdqa %xmm0, 0xbd0(%rsp) vmovdqa 0x7b646e(%rip), %xmm0 # 0x1e13ab0 vmovdqa %xmm0, 0xbc0(%rsp) vmovdqa 0xbd0(%rsp), %xmm1 vmovdqa 0xbc0(%rsp), %xmm2 vpsubd %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x130(%rsp) vmovdqa 0x120(%rsp), %xmm1 vmovdqa %xmm1, 0xbb0(%rsp) vmovdqa %xmm0, 0xba0(%rsp) vmovdqa 0xbb0(%rsp), %xmm1 vmovdqa 0xba0(%rsp), %xmm2 vpsubd %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x120(%rsp) vmovdqa 0x130(%rsp), %xmm1 vmovdqa %xmm1, 0xd80(%rsp) vmovdqa 0x7b63e2(%rip), %xmm1 # 0x1e13aa0 vmovdqa %xmm1, 0xd70(%rsp) vmovdqa 0xd80(%rsp), %xmm2 vmovdqa 0xd70(%rsp), %xmm3 vpandn %xmm3, %xmm2, %xmm2 vmovdqa %xmm2, 0x150(%rsp) vmovdqa 0x120(%rsp), %xmm2 vmovdqa %xmm2, 0xd60(%rsp) vmovdqa %xmm1, 0xd50(%rsp) vmovdqa 0xd60(%rsp), %xmm1 vmovdqa 0xd50(%rsp), %xmm2 vpandn %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x140(%rsp) vmovdqa 0x150(%rsp), %xmm1 vmovdqa %xmm1, 0xb70(%rsp) movl $0x1d, 0xb6c(%rsp) vmovdqa 0xb70(%rsp), %xmm1 vmovd 0xb6c(%rsp), %xmm2 vpslld %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x150(%rsp) vmovdqa 0x140(%rsp), %xmm1 vmovdqa %xmm1, 0xb50(%rsp) movl $0x1d, 0xb4c(%rsp) vmovdqa 0xb50(%rsp), %xmm1 vmovd 0xb4c(%rsp), %xmm2 vpslld %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x140(%rsp) vmovdqa 0x150(%rsp), %xmm1 vmovdqa %xmm1, 0xc0(%rsp) vmovdqa 0x140(%rsp), %xmm1 vmovdqa %xmm1, 0xd0(%rsp) vmovaps 0xc0(%rsp), %ymm1 vmovaps %ymm1, 0x180(%rsp) vmovdqa 0x130(%rsp), %xmm1 vmovdqa %xmm1, 0xc10(%rsp) vmovdqa %xmm0, 0xc00(%rsp) vmovdqa 0xc10(%rsp), %xmm1 vmovdqa 0xc00(%rsp), %xmm2 vpand %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x130(%rsp) vmovdqa 0x120(%rsp), %xmm1 vmovdqa %xmm1, 0xbf0(%rsp) vmovdqa %xmm0, 0xbe0(%rsp) vmovdqa 0xbf0(%rsp), %xmm0 vmovdqa 0xbe0(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x120(%rsp) vmovdqa 0x130(%rsp), %xmm2 vxorps %xmm0, %xmm0, %xmm0 vmovdqa %xmm0, 0xcb0(%rsp) vmovdqa 0xcb0(%rsp), %xmm1 vmovdqa %xmm2, 0xc90(%rsp) vmovdqa %xmm1, 0xc80(%rsp) vmovdqa 0xc90(%rsp), %xmm1 vmovdqa 0xc80(%rsp), %xmm2 vpcmpeqd %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x130(%rsp) vmovdqa 0x120(%rsp), %xmm1 vmovdqa %xmm0, 0xca0(%rsp) vmovdqa 0xca0(%rsp), %xmm0 vmovdqa %xmm1, 0xc70(%rsp) vmovdqa %xmm0, 0xc60(%rsp) vmovdqa 0xc70(%rsp), %xmm0 vmovdqa 0xc60(%rsp), %xmm1 vpcmpeqd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x120(%rsp) vmovdqa 0x130(%rsp), %xmm0 vmovdqa %xmm0, 0xa0(%rsp) vmovdqa 0x120(%rsp), %xmm0 vmovdqa %xmm0, 0xb0(%rsp) vmovaps 0xa0(%rsp), %ymm0 vmovaps %ymm0, 0x160(%rsp) vmovaps 0x180(%rsp), %ymm0 vmovaps %ymm0, 0x8a0(%rsp) vmovaps 0x8a0(%rsp), %ymm0 vmovaps %ymm0, 0x80(%rsp) vmovaps 0x160(%rsp), %ymm0 vmovaps %ymm0, 0x880(%rsp) vmovaps 0x880(%rsp), %ymm0 vmovaps %ymm0, 0x60(%rsp) vmovaps 0x7b797f(%rip), %ymm0 # 0x1e152e0 vmovaps %ymm0, 0x200(%rsp) vmovaps 0x7b798e(%rip), %ymm0 # 0x1e15300 vmovaps %ymm0, 0x1e0(%rsp) vmovaps 0x7b799d(%rip), %ymm0 # 0x1e15320 vmovaps %ymm0, 0x1c0(%rsp) vmovaps 0x1a0(%rsp), %ymm2 vmovaps 0x200(%rsp), %ymm1 vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm2, 0x620(%rsp) vmovaps %ymm1, 0x600(%rsp) vmovaps %ymm0, 0x5e0(%rsp) vmovaps 0x620(%rsp), %ymm2 vmovaps 0x600(%rsp), %ymm1 vmovaps 0x5e0(%rsp), %ymm0 vmovaps %ymm2, 0x900(%rsp) vmovaps %ymm1, 0x8e0(%rsp) vmovaps %ymm0, 0x8c0(%rsp) vmovaps 0x900(%rsp), %ymm1 vmovaps 0x8e0(%rsp), %ymm0 vmovaps 0x8c0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x220(%rsp) vmovaps 0x1a0(%rsp), %ymm2 vmovaps 0x1e0(%rsp), %ymm1 vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm2, 0x5c0(%rsp) vmovaps %ymm1, 0x5a0(%rsp) vmovaps %ymm0, 0x580(%rsp) vmovaps 0x5c0(%rsp), %ymm2 vmovaps 0x5a0(%rsp), %ymm1 vmovaps 0x580(%rsp), %ymm0 vmovaps %ymm2, 0x960(%rsp) vmovaps %ymm1, 0x940(%rsp) vmovaps %ymm0, 0x920(%rsp) vmovaps 0x960(%rsp), %ymm1 vmovaps 0x940(%rsp), %ymm0 vmovaps 0x920(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x220(%rsp) vmovaps 0x1a0(%rsp), %ymm2 vmovaps 0x1c0(%rsp), %ymm1 vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm2, 0x560(%rsp) vmovaps %ymm1, 0x540(%rsp) vmovaps %ymm0, 0x520(%rsp) vmovaps 0x560(%rsp), %ymm2 vmovaps 0x540(%rsp), %ymm1 vmovaps 0x520(%rsp), %ymm0 vmovaps %ymm2, 0x9c0(%rsp) vmovaps %ymm1, 0x9a0(%rsp) vmovaps %ymm0, 0x980(%rsp) vmovaps 0x9c0(%rsp), %ymm1 vmovaps 0x9a0(%rsp), %ymm0 vmovaps 0x980(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x220(%rsp) vmovaps 0x7b77ed(%rip), %ymm0 # 0x1e15340 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovaps %ymm0, 0x3e0(%rsp) vmovaps 0x400(%rsp), %ymm0 vmovaps 0x3e0(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x40(%rsp) vmovaps 0x1a0(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm1, 0x500(%rsp) vmovaps %ymm0, 0x4e0(%rsp) vmovaps 0x7b77a4(%rip), %ymm0 # 0x1e15360 vmovaps %ymm0, 0x4c0(%rsp) vmovaps 0x500(%rsp), %ymm2 vmovaps 0x4e0(%rsp), %ymm1 vmovaps 0x4c0(%rsp), %ymm0 vmovaps %ymm2, 0xa20(%rsp) vmovaps %ymm1, 0xa00(%rsp) vmovaps %ymm0, 0x9e0(%rsp) vmovaps 0xa20(%rsp), %ymm1 vmovaps 0xa00(%rsp), %ymm0 vmovaps 0x9e0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm1, 0x4a0(%rsp) vmovaps %ymm0, 0x480(%rsp) vmovaps 0x7b7733(%rip), %ymm0 # 0x1e15380 vmovaps %ymm0, 0x460(%rsp) vmovaps 0x4a0(%rsp), %ymm2 vmovaps 0x480(%rsp), %ymm1 vmovaps 0x460(%rsp), %ymm0 vmovaps %ymm2, 0xa80(%rsp) vmovaps %ymm1, 0xa60(%rsp) vmovaps %ymm0, 0xa40(%rsp) vmovaps 0xa80(%rsp), %ymm1 vmovaps 0xa60(%rsp), %ymm0 vmovaps 0xa40(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm1, 0x3c0(%rsp) vmovaps %ymm0, 0x3a0(%rsp) vmovaps 0x3c0(%rsp), %ymm0 vmovaps 0x3a0(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm1, 0x380(%rsp) vmovaps %ymm0, 0x360(%rsp) vmovaps 0x380(%rsp), %ymm0 vmovaps 0x360(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x40(%rsp), %ymm1 vmovaps 0x1a0(%rsp), %ymm0 vmovaps %ymm1, 0x700(%rsp) vmovaps 0x7a5eab(%rip), %ymm1 # 0x1e03c00 vmovaps %ymm1, 0x6e0(%rsp) vmovaps %ymm0, 0x6c0(%rsp) vmovaps 0x700(%rsp), %ymm2 vmovaps 0x6e0(%rsp), %ymm1 vmovaps 0x6c0(%rsp), %ymm0 vmovaps %ymm2, 0xae0(%rsp) vmovaps %ymm1, 0xac0(%rsp) vmovaps %ymm0, 0xaa0(%rsp) vmovaps 0xae0(%rsp), %ymm1 vmovaps 0xac0(%rsp), %ymm0 vmovaps 0xaa0(%rsp), %ymm2 vfnmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = -(ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm0 vmovaps %ymm0, 0x840(%rsp) vmovaps 0x7a5da0(%rip), %ymm0 # 0x1e03b80 vmovaps %ymm0, 0x820(%rsp) vmovaps 0x840(%rsp), %ymm0 vmovaps 0x820(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x7b7590(%rip), %ymm0 # 0x1e153a0 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x20(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm1, 0x340(%rsp) vmovaps %ymm0, 0x320(%rsp) vmovaps 0x340(%rsp), %ymm0 vmovaps 0x320(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x20(%rsp), %ymm0 vmovaps %ymm0, 0x800(%rsp) vmovaps 0x7b7559(%rip), %ymm0 # 0x1e153c0 vmovaps %ymm0, 0x7e0(%rsp) vmovaps 0x800(%rsp), %ymm0 vmovaps 0x7e0(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x20(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm1, 0x300(%rsp) vmovaps %ymm0, 0x2e0(%rsp) vmovaps 0x300(%rsp), %ymm0 vmovaps 0x2e0(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x20(%rsp), %ymm0 vmovaps %ymm0, 0x7c0(%rsp) vmovaps 0x7b7503(%rip), %ymm0 # 0x1e153e0 vmovaps %ymm0, 0x7a0(%rsp) vmovaps 0x7c0(%rsp), %ymm0 vmovaps 0x7a0(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x20(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm1, 0x2c0(%rsp) vmovaps %ymm0, 0x2a0(%rsp) vmovaps 0x2c0(%rsp), %ymm0 vmovaps 0x2a0(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x20(%rsp), %ymm1 vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm1, 0x280(%rsp) vmovaps %ymm0, 0x260(%rsp) vmovaps 0x280(%rsp), %ymm0 vmovaps 0x260(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x20(%rsp), %ymm1 vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm1, 0x780(%rsp) vmovaps %ymm0, 0x760(%rsp) vmovaps 0x780(%rsp), %ymm0 vmovaps 0x760(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x60(%rsp), %ymm0 vmovaps %ymm0, 0x1c0(%rsp) vmovaps 0x1c0(%rsp), %ymm1 vmovaps 0x20(%rsp), %ymm0 vmovaps %ymm1, 0x660(%rsp) vmovaps %ymm0, 0x640(%rsp) vmovaps 0x660(%rsp), %ymm0 vmovaps 0x640(%rsp), %ymm1 vandps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x1c0(%rsp), %ymm1 vmovaps 0x1a0(%rsp), %ymm0 vmovaps %ymm1, 0xd20(%rsp) vmovaps %ymm0, 0xd00(%rsp) vmovaps 0xd20(%rsp), %ymm0 vxorps %xmm1, %xmm1, %xmm1 vcmptrueps %ymm1, %ymm1, %ymm1 vxorps %ymm1, %ymm0, %ymm0 vmovaps 0xd00(%rsp), %ymm1 vandps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm1 vmovaps 0x20(%rsp), %ymm0 vmovaps %ymm1, 0x740(%rsp) vmovaps %ymm0, 0x720(%rsp) vmovaps 0x740(%rsp), %ymm0 vaddps 0x720(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm1 vmovaps 0x80(%rsp), %ymm0 vmovaps %ymm1, 0xce0(%rsp) vmovaps %ymm0, 0xcc0(%rsp) vmovaps 0xce0(%rsp), %ymm0 vmovaps 0xcc0(%rsp), %ymm1 vxorps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm0 movq %rbp, %rsp popq %rbp retq nop
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_fma.cpp
ncnn::UnaryOp_x86_fma_functor::unary_op_tan::func_pack8(float vector[8] const&) const
__m256 func_pack8(const __m256& x) const { return tan256_ps(x); }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x13a0, %rsp # imm = 0x13A0 movq %rdi, 0x18(%rsp) movq %rsi, 0x10(%rsp) movq 0x10(%rsp), %rax vmovaps (%rax), %ymm0 vmovaps %ymm0, 0xe0(%rsp) movl $0x322bcc77, 0x1bc(%rsp) # imm = 0x322BCC77 vmovss 0x1bc(%rsp), %xmm0 vmovss %xmm0, 0x138c(%rsp) vmovss %xmm0, 0x1388(%rsp) vmovss %xmm0, 0x1384(%rsp) vmovss %xmm0, 0x1380(%rsp) vmovss %xmm0, 0x137c(%rsp) vmovss %xmm0, 0x1378(%rsp) vmovss %xmm0, 0x1374(%rsp) vmovss %xmm0, 0x1370(%rsp) vmovss 0x1374(%rsp), %xmm1 vmovss 0x1370(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x1378(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x137c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0] vmovss 0x1384(%rsp), %xmm2 vmovss 0x1380(%rsp), %xmm1 vinsertps $0x10, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0],xmm1[2,3] vmovss 0x1388(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm2[0],xmm1[3] vmovss 0x138c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0] vmovaps %xmm1, 0x1350(%rsp) vmovaps %xmm0, 0x1340(%rsp) vmovaps 0x1340(%rsp), %ymm0 vmovaps %ymm0, 0x80(%rsp) vmovaps 0xe0(%rsp), %ymm0 vmovaps %ymm0, 0x4c0(%rsp) leaq 0xc0(%rsp), %rax movq %rax, 0x4b8(%rsp) leaq 0xa0(%rsp), %rax movq %rax, 0x4b0(%rsp) vxorps %xmm1, %xmm1, %xmm1 vmovaps %ymm1, 0x560(%rsp) vmovaps 0x560(%rsp), %ymm0 vmovaps %ymm0, 0x440(%rsp) vmovaps 0x4c0(%rsp), %ymm0 vmovaps %ymm0, 0x420(%rsp) vmovaps 0x4c0(%rsp), %ymm0 vmovaps %ymm0, 0xa60(%rsp) vmovaps 0x7b65fc(%rip), %ymm0 # 0x1e152a0 vmovaps %ymm0, 0xa40(%rsp) vmovaps 0xa60(%rsp), %ymm0 vmovaps 0xa40(%rsp), %ymm2 vandps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x4c0(%rsp) vmovaps 0x420(%rsp), %ymm0 vmovaps %ymm0, 0xa20(%rsp) vmovaps 0x7a7eda(%rip), %ymm0 # 0x1e06bc0 vmovaps %ymm0, 0xa00(%rsp) vmovaps 0xa20(%rsp), %ymm0 vmovaps 0xa00(%rsp), %ymm2 vandps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x420(%rsp) vmovaps 0x4c0(%rsp), %ymm0 vmovaps %ymm0, 0x6a0(%rsp) vmovaps 0x7b6598(%rip), %ymm0 # 0x1e152c0 vmovaps %ymm0, 0x680(%rsp) vmovaps 0x6a0(%rsp), %ymm0 vmovaps 0x680(%rsp), %ymm2 vmulps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm0 vmovaps %ymm0, 0xba0(%rsp) vcvttps2dq 0xba0(%rsp), %ymm0 vmovaps %ymm0, 0x320(%rsp) vmovdqa 0x320(%rsp), %xmm0 vmovdqa %xmm0, 0x370(%rsp) vmovdqa 0x330(%rsp), %xmm0 vmovdqa %xmm0, 0x360(%rsp) vmovdqa 0x370(%rsp), %xmm0 vmovdqa %xmm0, 0xfb0(%rsp) vmovdqa 0x7b4cce(%rip), %xmm0 # 0x1e13a80 vmovdqa %xmm0, 0xfa0(%rsp) vmovdqa 0xfb0(%rsp), %xmm2 vmovdqa 0xfa0(%rsp), %xmm3 vpaddd %xmm3, %xmm2, %xmm2 vmovdqa %xmm2, 0x370(%rsp) vmovdqa 0x360(%rsp), %xmm2 vmovdqa %xmm2, 0xf90(%rsp) vmovdqa %xmm0, 0xf80(%rsp) vmovdqa 0xf90(%rsp), %xmm0 vmovdqa 0xf80(%rsp), %xmm2 vpaddd %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x360(%rsp) vmovdqa 0x370(%rsp), %xmm0 vmovdqa %xmm0, 0x1150(%rsp) vmovdqa 0x7b4c62(%rip), %xmm0 # 0x1e13a90 vmovdqa %xmm0, 0x1140(%rsp) vmovdqa 0x1150(%rsp), %xmm2 vmovdqa 0x1140(%rsp), %xmm3 vpand %xmm3, %xmm2, %xmm2 vmovdqa %xmm2, 0x370(%rsp) vmovdqa 0x360(%rsp), %xmm2 vmovdqa %xmm2, 0x1130(%rsp) vmovdqa %xmm0, 0x1120(%rsp) vmovdqa 0x1130(%rsp), %xmm0 vmovdqa 0x1120(%rsp), %xmm2 vpand %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x360(%rsp) vmovdqa 0x370(%rsp), %xmm0 vmovdqa %xmm0, 0x300(%rsp) vmovdqa 0x360(%rsp), %xmm0 vmovdqa %xmm0, 0x310(%rsp) vmovaps 0x300(%rsp), %ymm0 vmovaps %ymm0, 0x3c0(%rsp) vmovaps 0x3c0(%rsp), %ymm0 vmovaps %ymm0, 0x1040(%rsp) vcvtdq2ps 0x1040(%rsp), %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovdqa 0x370(%rsp), %xmm0 vmovdqa %xmm0, 0x350(%rsp) vmovdqa 0x360(%rsp), %xmm0 vmovdqa %xmm0, 0x340(%rsp) vmovdqa 0x370(%rsp), %xmm0 vmovdqa %xmm0, 0x1110(%rsp) vmovdqa 0x7b4b78(%rip), %xmm0 # 0x1e13aa0 vmovdqa %xmm0, 0x1100(%rsp) vmovdqa 0x1110(%rsp), %xmm2 vmovdqa 0x1100(%rsp), %xmm3 vpand %xmm3, %xmm2, %xmm2 vmovdqa %xmm2, 0x390(%rsp) vmovdqa 0x360(%rsp), %xmm2 vmovdqa %xmm2, 0x10f0(%rsp) vmovdqa %xmm0, 0x10e0(%rsp) vmovdqa 0x10f0(%rsp), %xmm2 vmovdqa 0x10e0(%rsp), %xmm3 vpand %xmm3, %xmm2, %xmm2 vmovdqa %xmm2, 0x380(%rsp) vmovdqa 0x390(%rsp), %xmm2 vmovdqa %xmm2, 0x1030(%rsp) movl $0x1d, 0x102c(%rsp) vmovdqa 0x1030(%rsp), %xmm2 vmovd 0x102c(%rsp), %xmm3 vpslld %xmm3, %xmm2, %xmm2 vmovdqa %xmm2, 0x390(%rsp) vmovdqa 0x380(%rsp), %xmm2 vmovdqa %xmm2, 0x1010(%rsp) movl $0x1d, 0x100c(%rsp) vmovdqa 0x1010(%rsp), %xmm2 vmovd 0x100c(%rsp), %xmm3 vpslld %xmm3, %xmm2, %xmm2 vmovdqa %xmm2, 0x380(%rsp) vmovdqa 0x390(%rsp), %xmm2 vmovdqa %xmm2, 0x2e0(%rsp) vmovdqa 0x380(%rsp), %xmm2 vmovdqa %xmm2, 0x2f0(%rsp) vmovaps 0x2e0(%rsp), %ymm2 vmovaps %ymm2, 0x3e0(%rsp) vmovdqa 0x370(%rsp), %xmm2 vmovdqa %xmm2, 0x10d0(%rsp) vmovdqa 0x7b4a5e(%rip), %xmm2 # 0x1e13ab0 vmovdqa %xmm2, 0x10c0(%rsp) vmovdqa 0x10d0(%rsp), %xmm3 vmovdqa 0x10c0(%rsp), %xmm4 vpand %xmm4, %xmm3, %xmm3 vmovdqa %xmm3, 0x370(%rsp) vmovdqa 0x360(%rsp), %xmm3 vmovdqa %xmm3, 0x10b0(%rsp) vmovdqa %xmm2, 0x10a0(%rsp) vmovdqa 0x10b0(%rsp), %xmm3 vmovdqa 0x10a0(%rsp), %xmm4 vpand %xmm4, %xmm3, %xmm3 vmovdqa %xmm3, 0x360(%rsp) vmovdqa 0x370(%rsp), %xmm5 vxorps %xmm3, %xmm3, %xmm3 vmovdqa %xmm3, 0x11b0(%rsp) vmovdqa 0x11b0(%rsp), %xmm4 vmovdqa %xmm5, 0x1190(%rsp) vmovdqa %xmm4, 0x1180(%rsp) vmovdqa 0x1190(%rsp), %xmm4 vmovdqa 0x1180(%rsp), %xmm5 vpcmpeqd %xmm5, %xmm4, %xmm4 vmovdqa %xmm4, 0x370(%rsp) vmovdqa 0x360(%rsp), %xmm4 vmovdqa %xmm3, 0x11a0(%rsp) vmovdqa 0x11a0(%rsp), %xmm3 vmovdqa %xmm4, 0x1170(%rsp) vmovdqa %xmm3, 0x1160(%rsp) vmovdqa 0x1170(%rsp), %xmm3 vmovdqa 0x1160(%rsp), %xmm4 vpcmpeqd %xmm4, %xmm3, %xmm3 vmovdqa %xmm3, 0x360(%rsp) vmovdqa 0x370(%rsp), %xmm3 vmovdqa %xmm3, 0x2c0(%rsp) vmovdqa 0x360(%rsp), %xmm3 vmovdqa %xmm3, 0x2d0(%rsp) vmovaps 0x2c0(%rsp), %ymm3 vmovaps %ymm3, 0x3c0(%rsp) vmovaps 0x3e0(%rsp), %ymm3 vmovaps %ymm3, 0xc00(%rsp) vmovaps 0xc00(%rsp), %ymm3 vmovaps %ymm3, 0x2a0(%rsp) vmovaps 0x3c0(%rsp), %ymm3 vmovaps %ymm3, 0xbe0(%rsp) vmovaps 0xbe0(%rsp), %ymm3 vmovaps %ymm3, 0x280(%rsp) vmovaps 0x7b610a(%rip), %ymm3 # 0x1e152e0 vmovaps %ymm3, 0x480(%rsp) vmovaps 0x7b6119(%rip), %ymm3 # 0x1e15300 vmovaps %ymm3, 0x460(%rsp) vmovaps 0x7b6128(%rip), %ymm3 # 0x1e15320 vmovaps %ymm3, 0x440(%rsp) vmovaps 0x400(%rsp), %ymm5 vmovaps 0x480(%rsp), %ymm4 vmovaps 0x4c0(%rsp), %ymm3 vmovaps %ymm5, 0x9a0(%rsp) vmovaps %ymm4, 0x980(%rsp) vmovaps %ymm3, 0x960(%rsp) vmovaps 0x9a0(%rsp), %ymm5 vmovaps 0x980(%rsp), %ymm4 vmovaps 0x960(%rsp), %ymm3 vmovaps %ymm5, 0xc60(%rsp) vmovaps %ymm4, 0xc40(%rsp) vmovaps %ymm3, 0xc20(%rsp) vmovaps 0xc60(%rsp), %ymm4 vmovaps 0xc40(%rsp), %ymm3 vmovaps 0xc20(%rsp), %ymm5 vfmadd213ps %ymm5, %ymm4, %ymm3 # ymm3 = (ymm4 * ymm3) + ymm5 vmovaps %ymm3, 0x4c0(%rsp) vmovaps 0x400(%rsp), %ymm5 vmovaps 0x460(%rsp), %ymm4 vmovaps 0x4c0(%rsp), %ymm3 vmovaps %ymm5, 0x940(%rsp) vmovaps %ymm4, 0x920(%rsp) vmovaps %ymm3, 0x900(%rsp) vmovaps 0x940(%rsp), %ymm5 vmovaps 0x920(%rsp), %ymm4 vmovaps 0x900(%rsp), %ymm3 vmovaps %ymm5, 0xcc0(%rsp) vmovaps %ymm4, 0xca0(%rsp) vmovaps %ymm3, 0xc80(%rsp) vmovaps 0xcc0(%rsp), %ymm4 vmovaps 0xca0(%rsp), %ymm3 vmovaps 0xc80(%rsp), %ymm5 vfmadd213ps %ymm5, %ymm4, %ymm3 # ymm3 = (ymm4 * ymm3) + ymm5 vmovaps %ymm3, 0x4c0(%rsp) vmovaps 0x400(%rsp), %ymm5 vmovaps 0x440(%rsp), %ymm4 vmovaps 0x4c0(%rsp), %ymm3 vmovaps %ymm5, 0x8e0(%rsp) vmovaps %ymm4, 0x8c0(%rsp) vmovaps %ymm3, 0x8a0(%rsp) vmovaps 0x8e0(%rsp), %ymm5 vmovaps 0x8c0(%rsp), %ymm4 vmovaps 0x8a0(%rsp), %ymm3 vmovaps %ymm5, 0xd20(%rsp) vmovaps %ymm4, 0xd00(%rsp) vmovaps %ymm3, 0xce0(%rsp) vmovaps 0xd20(%rsp), %ymm4 vmovaps 0xd00(%rsp), %ymm3 vmovaps 0xce0(%rsp), %ymm5 vfmadd213ps %ymm5, %ymm4, %ymm3 # ymm3 = (ymm4 * ymm3) + ymm5 vmovaps %ymm3, 0x4c0(%rsp) vmovdqa 0x350(%rsp), %xmm3 vmovdqa %xmm3, 0x1090(%rsp) vmovdqa %xmm2, 0x1080(%rsp) vmovdqa 0x1090(%rsp), %xmm3 vmovdqa 0x1080(%rsp), %xmm4 vpsubd %xmm4, %xmm3, %xmm3 vmovdqa %xmm3, 0x350(%rsp) vmovdqa 0x340(%rsp), %xmm3 vmovdqa %xmm3, 0x1070(%rsp) vmovdqa %xmm2, 0x1060(%rsp) vmovdqa 0x1070(%rsp), %xmm2 vmovdqa 0x1060(%rsp), %xmm3 vpsubd %xmm3, %xmm2, %xmm2 vmovdqa %xmm2, 0x340(%rsp) vmovdqa 0x350(%rsp), %xmm2 vmovdqa %xmm2, 0x12f0(%rsp) vmovdqa %xmm0, 0x12e0(%rsp) vmovdqa 0x12f0(%rsp), %xmm2 vmovdqa 0x12e0(%rsp), %xmm3 vpandn %xmm3, %xmm2, %xmm2 vmovdqa %xmm2, 0x350(%rsp) vmovdqa 0x340(%rsp), %xmm2 vmovdqa %xmm2, 0x12d0(%rsp) vmovdqa %xmm0, 0x12c0(%rsp) vmovdqa 0x12d0(%rsp), %xmm0 vmovdqa 0x12c0(%rsp), %xmm2 vpandn %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x340(%rsp) vmovdqa 0x350(%rsp), %xmm0 vmovdqa %xmm0, 0xff0(%rsp) movl $0x1d, 0xfec(%rsp) vmovdqa 0xff0(%rsp), %xmm0 vmovd 0xfec(%rsp), %xmm2 vpslld %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x350(%rsp) vmovdqa 0x340(%rsp), %xmm0 vmovdqa %xmm0, 0xfd0(%rsp) movl $0x1d, 0xfcc(%rsp) vmovdqa 0xfd0(%rsp), %xmm0 vmovd 0xfcc(%rsp), %xmm2 vpslld %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x340(%rsp) vmovdqa 0x350(%rsp), %xmm0 vmovdqa %xmm0, 0x260(%rsp) vmovdqa 0x340(%rsp), %xmm0 vmovdqa %xmm0, 0x270(%rsp) vmovaps 0x260(%rsp), %ymm0 vmovaps %ymm0, 0x3a0(%rsp) vmovaps 0x3a0(%rsp), %ymm0 vmovaps %ymm0, 0xbc0(%rsp) vmovaps 0xbc0(%rsp), %ymm0 vmovaps %ymm0, 0x240(%rsp) vmovaps 0x420(%rsp), %ymm2 vmovaps 0x2a0(%rsp), %ymm0 vmovaps %ymm2, 0x1260(%rsp) vmovaps %ymm0, 0x1240(%rsp) vmovaps 0x1260(%rsp), %ymm0 vmovaps 0x1240(%rsp), %ymm2 vxorps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x420(%rsp) vmovaps 0x4c0(%rsp), %ymm0 vmovaps %ymm0, 0x660(%rsp) vmovaps %ymm0, 0x640(%rsp) vmovaps 0x660(%rsp), %ymm0 vmovaps 0x640(%rsp), %ymm2 vmulps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x220(%rsp) vmovaps 0x7b5d41(%rip), %ymm0 # 0x1e15340 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm2 vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm2, 0x880(%rsp) vmovaps %ymm0, 0x860(%rsp) vmovaps 0x7b5d2c(%rip), %ymm0 # 0x1e15360 vmovaps %ymm0, 0x840(%rsp) vmovaps 0x880(%rsp), %ymm3 vmovaps 0x860(%rsp), %ymm2 vmovaps 0x840(%rsp), %ymm0 vmovaps %ymm3, 0xd80(%rsp) vmovaps %ymm2, 0xd60(%rsp) vmovaps %ymm0, 0xd40(%rsp) vmovaps 0xd80(%rsp), %ymm2 vmovaps 0xd60(%rsp), %ymm0 vmovaps 0xd40(%rsp), %ymm3 vfmadd213ps %ymm3, %ymm2, %ymm0 # ymm0 = (ymm2 * ymm0) + ymm3 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm2 vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm2, 0x820(%rsp) vmovaps %ymm0, 0x800(%rsp) vmovaps 0x7b5cb8(%rip), %ymm0 # 0x1e15380 vmovaps %ymm0, 0x7e0(%rsp) vmovaps 0x820(%rsp), %ymm3 vmovaps 0x800(%rsp), %ymm2 vmovaps 0x7e0(%rsp), %ymm0 vmovaps %ymm3, 0xde0(%rsp) vmovaps %ymm2, 0xdc0(%rsp) vmovaps %ymm0, 0xda0(%rsp) vmovaps 0xde0(%rsp), %ymm2 vmovaps 0xdc0(%rsp), %ymm0 vmovaps 0xda0(%rsp), %ymm3 vfmadd213ps %ymm3, %ymm2, %ymm0 # ymm0 = (ymm2 * ymm0) + ymm3 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm2 vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm2, 0x620(%rsp) vmovaps %ymm0, 0x600(%rsp) vmovaps 0x620(%rsp), %ymm0 vmovaps 0x600(%rsp), %ymm2 vmulps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm2 vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm2, 0x5e0(%rsp) vmovaps %ymm0, 0x5c0(%rsp) vmovaps 0x5e0(%rsp), %ymm0 vmovaps 0x5c0(%rsp), %ymm2 vmulps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x220(%rsp), %ymm2 vmovaps 0x400(%rsp), %ymm0 vmovaps %ymm2, 0xac0(%rsp) vmovaps 0x7a4427(%rip), %ymm2 # 0x1e03c00 vmovaps %ymm2, 0xaa0(%rsp) vmovaps %ymm0, 0xa80(%rsp) vmovaps 0xac0(%rsp), %ymm3 vmovaps 0xaa0(%rsp), %ymm2 vmovaps 0xa80(%rsp), %ymm0 vmovaps %ymm3, 0xf60(%rsp) vmovaps %ymm2, 0xf40(%rsp) vmovaps %ymm0, 0xf20(%rsp) vmovaps 0xf60(%rsp), %ymm2 vmovaps 0xf40(%rsp), %ymm0 vmovaps 0xf20(%rsp), %ymm3 vfnmadd213ps %ymm3, %ymm2, %ymm0 # ymm0 = -(ymm2 * ymm0) + ymm3 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x400(%rsp), %ymm0 vmovaps %ymm0, 0xb80(%rsp) vmovaps 0x7a431c(%rip), %ymm0 # 0x1e03b80 vmovaps %ymm0, 0xb60(%rsp) vmovaps 0xb80(%rsp), %ymm0 vmovaps 0xb60(%rsp), %ymm2 vaddps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x7b5b0c(%rip), %ymm0 # 0x1e153a0 vmovaps %ymm0, 0x200(%rsp) vmovaps 0x200(%rsp), %ymm2 vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm2, 0x7c0(%rsp) vmovaps %ymm0, 0x7a0(%rsp) vmovaps 0x7b5af7(%rip), %ymm0 # 0x1e153c0 vmovaps %ymm0, 0x780(%rsp) vmovaps 0x7c0(%rsp), %ymm3 vmovaps 0x7a0(%rsp), %ymm2 vmovaps 0x780(%rsp), %ymm0 vmovaps %ymm3, 0xe40(%rsp) vmovaps %ymm2, 0xe20(%rsp) vmovaps %ymm0, 0xe00(%rsp) vmovaps 0xe40(%rsp), %ymm2 vmovaps 0xe20(%rsp), %ymm0 vmovaps 0xe00(%rsp), %ymm3 vfmadd213ps %ymm3, %ymm2, %ymm0 # ymm0 = (ymm2 * ymm0) + ymm3 vmovaps %ymm0, 0x200(%rsp) vmovaps 0x200(%rsp), %ymm2 vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm2, 0x760(%rsp) vmovaps %ymm0, 0x740(%rsp) vmovaps 0x7b5a83(%rip), %ymm0 # 0x1e153e0 vmovaps %ymm0, 0x720(%rsp) vmovaps 0x760(%rsp), %ymm3 vmovaps 0x740(%rsp), %ymm2 vmovaps 0x720(%rsp), %ymm0 vmovaps %ymm3, 0xea0(%rsp) vmovaps %ymm2, 0xe80(%rsp) vmovaps %ymm0, 0xe60(%rsp) vmovaps 0xea0(%rsp), %ymm2 vmovaps 0xe80(%rsp), %ymm0 vmovaps 0xe60(%rsp), %ymm3 vfmadd213ps %ymm3, %ymm2, %ymm0 # ymm0 = (ymm2 * ymm0) + ymm3 vmovaps %ymm0, 0x200(%rsp) vmovaps 0x200(%rsp), %ymm2 vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm2, 0x5a0(%rsp) vmovaps %ymm0, 0x580(%rsp) vmovaps 0x5a0(%rsp), %ymm0 vmovaps 0x580(%rsp), %ymm2 vmulps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x200(%rsp) vmovaps 0x200(%rsp), %ymm2 vmovaps 0x4c0(%rsp), %ymm0 vmovaps %ymm2, 0x700(%rsp) vmovaps %ymm0, 0x6e0(%rsp) vmovaps %ymm0, 0x6c0(%rsp) vmovaps 0x700(%rsp), %ymm3 vmovaps 0x6e0(%rsp), %ymm2 vmovaps 0x6c0(%rsp), %ymm0 vmovaps %ymm3, 0xf00(%rsp) vmovaps %ymm2, 0xee0(%rsp) vmovaps %ymm0, 0xec0(%rsp) vmovaps 0xf00(%rsp), %ymm2 vmovaps 0xee0(%rsp), %ymm0 vmovaps 0xec0(%rsp), %ymm3 vfmadd213ps %ymm3, %ymm2, %ymm0 # ymm0 = (ymm2 * ymm0) + ymm3 vmovaps %ymm0, 0x200(%rsp) vmovaps 0x280(%rsp), %ymm0 vmovaps %ymm0, 0x440(%rsp) vmovaps 0x440(%rsp), %ymm2 vmovaps 0x200(%rsp), %ymm0 vmovaps %ymm2, 0x9e0(%rsp) vmovaps %ymm0, 0x9c0(%rsp) vmovaps 0x9e0(%rsp), %ymm0 vmovaps 0x9c0(%rsp), %ymm2 vandps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x1e0(%rsp) vmovaps 0x440(%rsp), %ymm2 vmovaps 0x400(%rsp), %ymm0 vmovaps %ymm2, 0x12a0(%rsp) vmovaps %ymm0, 0x1280(%rsp) vmovaps 0x12a0(%rsp), %ymm0 vmovaps 0x1280(%rsp), %ymm2 vandnps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x1c0(%rsp) vmovaps 0x200(%rsp), %ymm2 vmovaps 0x1e0(%rsp), %ymm0 vmovaps %ymm2, 0x540(%rsp) vmovaps %ymm0, 0x520(%rsp) vmovaps 0x540(%rsp), %ymm0 vmovaps 0x520(%rsp), %ymm2 vsubps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x200(%rsp) vmovaps 0x400(%rsp), %ymm2 vmovaps 0x1c0(%rsp), %ymm0 vmovaps %ymm2, 0x500(%rsp) vmovaps %ymm0, 0x4e0(%rsp) vmovaps 0x500(%rsp), %ymm0 vmovaps 0x4e0(%rsp), %ymm2 vsubps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x400(%rsp) vmovaps 0x1c0(%rsp), %ymm2 vmovaps 0x1e0(%rsp), %ymm0 vmovaps %ymm2, 0xb40(%rsp) vmovaps %ymm0, 0xb20(%rsp) vmovaps 0xb40(%rsp), %ymm0 vmovaps 0xb20(%rsp), %ymm2 vaddps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x480(%rsp) vmovaps 0x400(%rsp), %ymm2 vmovaps 0x200(%rsp), %ymm0 vmovaps %ymm2, 0xb00(%rsp) vmovaps %ymm0, 0xae0(%rsp) vmovaps 0xb00(%rsp), %ymm0 vmovaps 0xae0(%rsp), %ymm2 vaddps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x460(%rsp) vmovaps 0x480(%rsp), %ymm2 vmovaps 0x420(%rsp), %ymm0 vmovaps %ymm2, 0x1220(%rsp) vmovaps %ymm0, 0x1200(%rsp) vmovaps 0x1220(%rsp), %ymm0 vmovaps 0x1200(%rsp), %ymm2 vxorps %ymm2, %ymm0, %ymm0 movq 0x4b8(%rsp), %rax vmovaps %ymm0, (%rax) vmovaps 0x460(%rsp), %ymm2 vmovaps 0x240(%rsp), %ymm0 vmovaps %ymm2, 0x11e0(%rsp) vmovaps %ymm0, 0x11c0(%rsp) vmovaps 0x11e0(%rsp), %ymm0 vmovaps 0x11c0(%rsp), %ymm2 vxorps %ymm2, %ymm0, %ymm0 movq 0x4b0(%rsp), %rax vmovaps %ymm0, (%rax) vmovaps 0xa0(%rsp), %ymm0 vmovaps %ymm1, 0x100(%rsp) vmovaps 0x100(%rsp), %ymm1 vcmpeqps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x60(%rsp) vmovaps 0x80(%rsp), %ymm1 vmovaps 0x60(%rsp), %ymm0 vmovaps %ymm1, 0x140(%rsp) vmovaps %ymm0, 0x120(%rsp) vmovaps 0x140(%rsp), %ymm0 vmovaps 0x120(%rsp), %ymm1 vandps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x40(%rsp) vmovaps 0xa0(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm1, 0x180(%rsp) vmovaps %ymm0, 0x160(%rsp) vmovaps 0x180(%rsp), %ymm0 vaddps 0x160(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0xa0(%rsp) vmovaps 0xc0(%rsp), %ymm1 vmovaps 0xa0(%rsp), %ymm0 vmovaps %ymm1, 0x1320(%rsp) vmovaps %ymm0, 0x1300(%rsp) vmovaps 0x1320(%rsp), %ymm0 vdivps 0x1300(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x20(%rsp), %ymm0 movq %rbp, %rsp popq %rbp retq nopw (%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_fma.cpp
ncnn::UnaryOp_x86_fma_functor::unary_op_tan::func_pack4(float vector[4] const&) const
__m128 func_pack4(const __m128& x) const { return tan_ps(x); }
subq $0x8d8, %rsp # imm = 0x8D8 movq %rdi, -0x78(%rsp) movq %rsi, -0x80(%rsp) movq -0x80(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, -0x10(%rsp) movl $0x322bcc77, 0x1c(%rsp) # imm = 0x322BCC77 vbroadcastss 0x1c(%rsp), %xmm0 vmovaps %xmm0, (%rsp) vmovaps (%rsp), %xmm0 vmovaps %xmm0, -0x40(%rsp) vmovaps -0x10(%rsp), %xmm0 vmovaps %xmm0, 0x170(%rsp) leaq -0x20(%rsp), %rax movq %rax, 0x168(%rsp) leaq -0x30(%rsp), %rax movq %rax, 0x160(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x1e0(%rsp) vmovaps 0x1e0(%rsp), %xmm1 vmovaps %xmm1, 0x130(%rsp) vmovaps 0x170(%rsp), %xmm1 vmovaps %xmm1, 0x120(%rsp) vmovaps 0x170(%rsp), %xmm1 vmovaps %xmm1, 0x3c0(%rsp) vmovaps 0x7af33d(%rip), %xmm1 # 0x1e0f1a0 vmovaps %xmm1, 0x3b0(%rsp) vmovdqa 0x3c0(%rsp), %xmm1 vmovdqa 0x3b0(%rsp), %xmm2 vpand %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x170(%rsp) vmovaps 0x120(%rsp), %xmm1 vmovaps %xmm1, 0x3a0(%rsp) vmovaps 0x7a6d3b(%rip), %xmm1 # 0x1e06be0 vmovaps %xmm1, 0x390(%rsp) vmovdqa 0x3a0(%rsp), %xmm1 vmovdqa 0x390(%rsp), %xmm2 vpand %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x120(%rsp) vmovaps 0x170(%rsp), %xmm1 vmovaps %xmm1, 0x280(%rsp) vmovaps 0x7b3b89(%rip), %xmm1 # 0x1e13a70 vmovaps %xmm1, 0x270(%rsp) vmovaps 0x280(%rsp), %xmm1 vmovaps 0x270(%rsp), %xmm2 vmulps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x110(%rsp) vmovaps 0x110(%rsp), %xmm1 vmovaps %xmm1, 0x350(%rsp) vcvttps2dq 0x350(%rsp), %xmm1 vmovdqa %xmm1, 0xf0(%rsp) vmovdqa 0xf0(%rsp), %xmm1 vmovdqa %xmm1, 0x2a0(%rsp) vmovdqa 0x7b3b33(%rip), %xmm1 # 0x1e13a80 vmovdqa %xmm1, 0x290(%rsp) vmovdqa 0x2a0(%rsp), %xmm1 vmovdqa 0x290(%rsp), %xmm2 vpaddd %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0xf0(%rsp) vmovdqa 0xf0(%rsp), %xmm1 vmovdqa %xmm1, 0x7d0(%rsp) vmovdqa 0x7b3b01(%rip), %xmm1 # 0x1e13a90 vmovdqa %xmm1, 0x7c0(%rsp) vmovdqa 0x7d0(%rsp), %xmm1 vmovdqa 0x7c0(%rsp), %xmm2 vpand %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0xf0(%rsp) vmovdqa 0xf0(%rsp), %xmm1 vmovdqa %xmm1, 0x360(%rsp) vcvtdq2ps 0x360(%rsp), %xmm1 vmovaps %xmm1, 0x110(%rsp) vmovdqa 0xf0(%rsp), %xmm1 vmovdqa %xmm1, 0xe0(%rsp) vmovdqa 0xf0(%rsp), %xmm1 vmovdqa %xmm1, 0x7b0(%rsp) vmovdqa 0x7b3a99(%rip), %xmm1 # 0x1e13aa0 vmovdqa %xmm1, 0x7a0(%rsp) vmovdqa 0x7b0(%rsp), %xmm2 vmovdqa 0x7a0(%rsp), %xmm3 vpand %xmm3, %xmm2, %xmm2 vmovdqa %xmm2, 0x100(%rsp) vmovdqa 0x100(%rsp), %xmm2 vmovdqa %xmm2, 0x2e0(%rsp) movl $0x1d, 0x2dc(%rsp) vmovdqa 0x2e0(%rsp), %xmm2 vmovd 0x2dc(%rsp), %xmm3 vpslld %xmm3, %xmm2, %xmm2 vmovdqa %xmm2, 0x100(%rsp) vmovdqa 0x100(%rsp), %xmm2 vmovdqa %xmm2, 0x5a0(%rsp) vmovdqa 0x5a0(%rsp), %xmm2 vmovdqa %xmm2, 0xd0(%rsp) vmovdqa 0xf0(%rsp), %xmm2 vmovdqa %xmm2, 0x790(%rsp) vmovdqa 0x7b3a07(%rip), %xmm2 # 0x1e13ab0 vmovdqa %xmm2, 0x780(%rsp) vmovdqa 0x790(%rsp), %xmm3 vmovdqa 0x780(%rsp), %xmm4 vpand %xmm4, %xmm3, %xmm3 vmovdqa %xmm3, 0xf0(%rsp) vmovdqa 0xf0(%rsp), %xmm4 vxorps %xmm3, %xmm3, %xmm3 vmovdqa %xmm3, 0x800(%rsp) vmovdqa 0x800(%rsp), %xmm3 vmovdqa %xmm4, 0x7f0(%rsp) vmovdqa %xmm3, 0x7e0(%rsp) vmovdqa 0x7f0(%rsp), %xmm3 vmovdqa 0x7e0(%rsp), %xmm4 vpcmpeqd %xmm4, %xmm3, %xmm3 vmovdqa %xmm3, 0xf0(%rsp) vmovdqa 0xf0(%rsp), %xmm3 vmovdqa %xmm3, 0x590(%rsp) vmovdqa 0x590(%rsp), %xmm3 vmovdqa %xmm3, 0xc0(%rsp) vmovaps 0x7b3973(%rip), %xmm3 # 0x1e13ac0 vmovaps %xmm3, 0x150(%rsp) vmovaps 0x7b3972(%rip), %xmm3 # 0x1e13ad0 vmovaps %xmm3, 0x140(%rsp) vmovaps 0x7b3971(%rip), %xmm3 # 0x1e13ae0 vmovaps %xmm3, 0x130(%rsp) vmovaps 0x110(%rsp), %xmm5 vmovaps 0x150(%rsp), %xmm4 vmovaps 0x170(%rsp), %xmm3 vmovaps %xmm5, 0x570(%rsp) vmovaps %xmm4, 0x560(%rsp) vmovaps %xmm3, 0x550(%rsp) vmovaps 0x570(%rsp), %xmm5 vmovaps 0x560(%rsp), %xmm4 vmovaps 0x550(%rsp), %xmm3 vmovaps %xmm5, 0x600(%rsp) vmovaps %xmm4, 0x5f0(%rsp) vmovaps %xmm3, 0x5e0(%rsp) vmovaps 0x600(%rsp), %xmm4 vmovaps 0x5f0(%rsp), %xmm3 vmovaps 0x5e0(%rsp), %xmm5 vfmadd213ps %xmm5, %xmm4, %xmm3 # xmm3 = (xmm4 * xmm3) + xmm5 vmovaps %xmm3, 0x170(%rsp) vmovaps 0x110(%rsp), %xmm5 vmovaps 0x140(%rsp), %xmm4 vmovaps 0x170(%rsp), %xmm3 vmovaps %xmm5, 0x540(%rsp) vmovaps %xmm4, 0x530(%rsp) vmovaps %xmm3, 0x520(%rsp) vmovaps 0x540(%rsp), %xmm5 vmovaps 0x530(%rsp), %xmm4 vmovaps 0x520(%rsp), %xmm3 vmovaps %xmm5, 0x630(%rsp) vmovaps %xmm4, 0x620(%rsp) vmovaps %xmm3, 0x610(%rsp) vmovaps 0x630(%rsp), %xmm4 vmovaps 0x620(%rsp), %xmm3 vmovaps 0x610(%rsp), %xmm5 vfmadd213ps %xmm5, %xmm4, %xmm3 # xmm3 = (xmm4 * xmm3) + xmm5 vmovaps %xmm3, 0x170(%rsp) vmovaps 0x110(%rsp), %xmm5 vmovaps 0x130(%rsp), %xmm4 vmovaps 0x170(%rsp), %xmm3 vmovaps %xmm5, 0x510(%rsp) vmovaps %xmm4, 0x500(%rsp) vmovaps %xmm3, 0x4f0(%rsp) vmovaps 0x510(%rsp), %xmm5 vmovaps 0x500(%rsp), %xmm4 vmovaps 0x4f0(%rsp), %xmm3 vmovaps %xmm5, 0x660(%rsp) vmovaps %xmm4, 0x650(%rsp) vmovaps %xmm3, 0x640(%rsp) vmovaps 0x660(%rsp), %xmm4 vmovaps 0x650(%rsp), %xmm3 vmovaps 0x640(%rsp), %xmm5 vfmadd213ps %xmm5, %xmm4, %xmm3 # xmm3 = (xmm4 * xmm3) + xmm5 vmovaps %xmm3, 0x170(%rsp) vmovdqa 0xe0(%rsp), %xmm3 vmovdqa %xmm3, 0x770(%rsp) vmovdqa %xmm2, 0x760(%rsp) vmovdqa 0x770(%rsp), %xmm2 vmovdqa 0x760(%rsp), %xmm3 vpsubd %xmm3, %xmm2, %xmm2 vmovdqa %xmm2, 0xe0(%rsp) vmovdqa 0xe0(%rsp), %xmm2 vmovdqa %xmm2, 0x880(%rsp) vmovdqa %xmm1, 0x870(%rsp) vmovdqa 0x880(%rsp), %xmm1 vmovdqa 0x870(%rsp), %xmm2 vpandn %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0xe0(%rsp) vmovdqa 0xe0(%rsp), %xmm1 vmovdqa %xmm1, 0x2c0(%rsp) movl $0x1d, 0x2bc(%rsp) vmovdqa 0x2c0(%rsp), %xmm1 vmovd 0x2bc(%rsp), %xmm2 vpslld %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0xe0(%rsp) vmovdqa 0xe0(%rsp), %xmm1 vmovdqa %xmm1, 0x580(%rsp) vmovdqa 0x580(%rsp), %xmm1 vmovdqa %xmm1, 0xb0(%rsp) vmovaps 0x120(%rsp), %xmm2 vmovaps 0xd0(%rsp), %xmm1 vmovaps %xmm2, 0x860(%rsp) vmovaps %xmm1, 0x850(%rsp) vmovdqa 0x860(%rsp), %xmm1 vmovdqa 0x850(%rsp), %xmm2 vpxor %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x120(%rsp) vmovaps 0x170(%rsp), %xmm1 vmovaps %xmm1, 0x260(%rsp) vmovaps %xmm1, 0x250(%rsp) vmovaps 0x260(%rsp), %xmm1 vmovaps 0x250(%rsp), %xmm2 vmulps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0xa0(%rsp) vmovaps 0x7b3660(%rip), %xmm1 # 0x1e13af0 vmovaps %xmm1, 0x110(%rsp) vmovaps 0x110(%rsp), %xmm2 vmovaps 0xa0(%rsp), %xmm1 vmovaps %xmm2, 0x4e0(%rsp) vmovaps %xmm1, 0x4d0(%rsp) vmovaps 0x7b363b(%rip), %xmm1 # 0x1e13b00 vmovaps %xmm1, 0x4c0(%rsp) vmovaps 0x4e0(%rsp), %xmm3 vmovaps 0x4d0(%rsp), %xmm2 vmovaps 0x4c0(%rsp), %xmm1 vmovaps %xmm3, 0x690(%rsp) vmovaps %xmm2, 0x680(%rsp) vmovaps %xmm1, 0x670(%rsp) vmovaps 0x690(%rsp), %xmm2 vmovaps 0x680(%rsp), %xmm1 vmovaps 0x670(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm1 # xmm1 = (xmm2 * xmm1) + xmm3 vmovaps %xmm1, 0x110(%rsp) vmovaps 0x110(%rsp), %xmm2 vmovaps 0xa0(%rsp), %xmm1 vmovaps %xmm2, 0x4b0(%rsp) vmovaps %xmm1, 0x4a0(%rsp) vmovaps 0x7b35b7(%rip), %xmm1 # 0x1e13b10 vmovaps %xmm1, 0x490(%rsp) vmovaps 0x4b0(%rsp), %xmm3 vmovaps 0x4a0(%rsp), %xmm2 vmovaps 0x490(%rsp), %xmm1 vmovaps %xmm3, 0x6c0(%rsp) vmovaps %xmm2, 0x6b0(%rsp) vmovaps %xmm1, 0x6a0(%rsp) vmovaps 0x6c0(%rsp), %xmm2 vmovaps 0x6b0(%rsp), %xmm1 vmovaps 0x6a0(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm1 # xmm1 = (xmm2 * xmm1) + xmm3 vmovaps %xmm1, 0x110(%rsp) vmovaps 0x110(%rsp), %xmm2 vmovaps 0xa0(%rsp), %xmm1 vmovaps %xmm2, 0x240(%rsp) vmovaps %xmm1, 0x230(%rsp) vmovaps 0x240(%rsp), %xmm1 vmovaps 0x230(%rsp), %xmm2 vmulps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x110(%rsp) vmovaps 0x110(%rsp), %xmm2 vmovaps 0xa0(%rsp), %xmm1 vmovaps %xmm2, 0x220(%rsp) vmovaps %xmm1, 0x210(%rsp) vmovaps 0x220(%rsp), %xmm1 vmovaps 0x210(%rsp), %xmm2 vmulps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x110(%rsp) vmovaps 0xa0(%rsp), %xmm2 vmovaps 0x110(%rsp), %xmm1 vmovaps %xmm2, 0x3f0(%rsp) vmovaps 0x7a2486(%rip), %xmm2 # 0x1e02af0 vmovaps %xmm2, 0x3e0(%rsp) vmovaps %xmm1, 0x3d0(%rsp) vmovaps 0x3f0(%rsp), %xmm3 vmovaps 0x3e0(%rsp), %xmm2 vmovaps 0x3d0(%rsp), %xmm1 vmovaps %xmm3, 0x5d0(%rsp) vmovaps %xmm2, 0x5c0(%rsp) vmovaps %xmm1, 0x5b0(%rsp) vmovaps 0x5d0(%rsp), %xmm2 vmovaps 0x5c0(%rsp), %xmm1 vmovaps 0x5b0(%rsp), %xmm3 vfnmadd213ps %xmm3, %xmm2, %xmm1 # xmm1 = -(xmm2 * xmm1) + xmm3 vmovaps %xmm1, 0x110(%rsp) vmovaps 0x110(%rsp), %xmm1 vmovaps %xmm1, 0x340(%rsp) vmovaps 0x7a23bb(%rip), %xmm1 # 0x1e02ab0 vmovaps %xmm1, 0x330(%rsp) vmovaps 0x340(%rsp), %xmm1 vmovaps 0x330(%rsp), %xmm2 vaddps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x110(%rsp) vmovaps 0x7b33fb(%rip), %xmm1 # 0x1e13b20 vmovaps %xmm1, 0x90(%rsp) vmovaps 0x90(%rsp), %xmm2 vmovaps 0xa0(%rsp), %xmm1 vmovaps %xmm2, 0x480(%rsp) vmovaps %xmm1, 0x470(%rsp) vmovaps 0x7b33d6(%rip), %xmm1 # 0x1e13b30 vmovaps %xmm1, 0x460(%rsp) vmovaps 0x480(%rsp), %xmm3 vmovaps 0x470(%rsp), %xmm2 vmovaps 0x460(%rsp), %xmm1 vmovaps %xmm3, 0x6f0(%rsp) vmovaps %xmm2, 0x6e0(%rsp) vmovaps %xmm1, 0x6d0(%rsp) vmovaps 0x6f0(%rsp), %xmm2 vmovaps 0x6e0(%rsp), %xmm1 vmovaps 0x6d0(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm1 # xmm1 = (xmm2 * xmm1) + xmm3 vmovaps %xmm1, 0x90(%rsp) vmovaps 0x90(%rsp), %xmm2 vmovaps 0xa0(%rsp), %xmm1 vmovaps %xmm2, 0x450(%rsp) vmovaps %xmm1, 0x440(%rsp) vmovaps 0x7b3352(%rip), %xmm1 # 0x1e13b40 vmovaps %xmm1, 0x430(%rsp) vmovaps 0x450(%rsp), %xmm3 vmovaps 0x440(%rsp), %xmm2 vmovaps 0x430(%rsp), %xmm1 vmovaps %xmm3, 0x720(%rsp) vmovaps %xmm2, 0x710(%rsp) vmovaps %xmm1, 0x700(%rsp) vmovaps 0x720(%rsp), %xmm2 vmovaps 0x710(%rsp), %xmm1 vmovaps 0x700(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm1 # xmm1 = (xmm2 * xmm1) + xmm3 vmovaps %xmm1, 0x90(%rsp) vmovaps 0x90(%rsp), %xmm2 vmovaps 0xa0(%rsp), %xmm1 vmovaps %xmm2, 0x200(%rsp) vmovaps %xmm1, 0x1f0(%rsp) vmovaps 0x200(%rsp), %xmm1 vmovaps 0x1f0(%rsp), %xmm2 vmulps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x90(%rsp) vmovaps 0x90(%rsp), %xmm2 vmovaps 0x170(%rsp), %xmm1 vmovaps %xmm2, 0x420(%rsp) vmovaps %xmm1, 0x410(%rsp) vmovaps %xmm1, 0x400(%rsp) vmovaps 0x420(%rsp), %xmm3 vmovaps 0x410(%rsp), %xmm2 vmovaps 0x400(%rsp), %xmm1 vmovaps %xmm3, 0x750(%rsp) vmovaps %xmm2, 0x740(%rsp) vmovaps %xmm1, 0x730(%rsp) vmovaps 0x750(%rsp), %xmm2 vmovaps 0x740(%rsp), %xmm1 vmovaps 0x730(%rsp), %xmm3 vfmadd213ps %xmm3, %xmm2, %xmm1 # xmm1 = (xmm2 * xmm1) + xmm3 vmovaps %xmm1, 0x90(%rsp) vmovaps 0xc0(%rsp), %xmm1 vmovaps %xmm1, 0x130(%rsp) vmovaps 0x130(%rsp), %xmm2 vmovaps 0x90(%rsp), %xmm1 vmovaps %xmm2, 0x380(%rsp) vmovaps %xmm1, 0x370(%rsp) vmovdqa 0x380(%rsp), %xmm1 vmovdqa 0x370(%rsp), %xmm2 vpand %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x80(%rsp) vmovaps 0x130(%rsp), %xmm2 vmovaps 0x110(%rsp), %xmm1 vmovaps %xmm2, 0x190(%rsp) vmovaps %xmm1, 0x180(%rsp) vmovdqa 0x190(%rsp), %xmm1 vmovdqa 0x180(%rsp), %xmm2 vpandn %xmm2, %xmm1, %xmm1 vmovdqa %xmm1, 0x70(%rsp) vmovaps 0x90(%rsp), %xmm2 vmovaps 0x80(%rsp), %xmm1 vmovaps %xmm2, 0x1d0(%rsp) vmovaps %xmm1, 0x1c0(%rsp) vmovaps 0x1d0(%rsp), %xmm1 vmovaps 0x1c0(%rsp), %xmm2 vsubps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x90(%rsp) vmovaps 0x110(%rsp), %xmm2 vmovaps 0x70(%rsp), %xmm1 vmovaps %xmm2, 0x1b0(%rsp) vmovaps %xmm1, 0x1a0(%rsp) vmovaps 0x1b0(%rsp), %xmm1 vmovaps 0x1a0(%rsp), %xmm2 vsubps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x110(%rsp) vmovaps 0x70(%rsp), %xmm2 vmovaps 0x80(%rsp), %xmm1 vmovaps %xmm2, 0x320(%rsp) vmovaps %xmm1, 0x310(%rsp) vmovaps 0x320(%rsp), %xmm1 vmovaps 0x310(%rsp), %xmm2 vaddps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x150(%rsp) vmovaps 0x110(%rsp), %xmm2 vmovaps 0x90(%rsp), %xmm1 vmovaps %xmm2, 0x300(%rsp) vmovaps %xmm1, 0x2f0(%rsp) vmovaps 0x300(%rsp), %xmm1 vmovaps 0x2f0(%rsp), %xmm2 vaddps %xmm2, %xmm1, %xmm1 vmovaps %xmm1, 0x140(%rsp) vmovaps 0x150(%rsp), %xmm2 vmovaps 0x120(%rsp), %xmm1 vmovaps %xmm2, 0x840(%rsp) vmovaps %xmm1, 0x830(%rsp) vmovdqa 0x840(%rsp), %xmm1 vmovdqa 0x830(%rsp), %xmm2 vpxor %xmm2, %xmm1, %xmm1 movq 0x168(%rsp), %rax vmovdqa %xmm1, (%rax) vmovaps 0x140(%rsp), %xmm2 vmovaps 0xb0(%rsp), %xmm1 vmovaps %xmm2, 0x820(%rsp) vmovaps %xmm1, 0x810(%rsp) vmovdqa 0x820(%rsp), %xmm1 vmovdqa 0x810(%rsp), %xmm2 vpxor %xmm2, %xmm1, %xmm1 movq 0x160(%rsp), %rax vmovdqa %xmm1, (%rax) vmovaps -0x30(%rsp), %xmm1 vmovaps %xmm0, 0x20(%rsp) vmovaps 0x20(%rsp), %xmm0 vmovaps %xmm1, 0x8a0(%rsp) vmovaps %xmm0, 0x890(%rsp) vmovaps 0x8a0(%rsp), %xmm0 vmovaps 0x890(%rsp), %xmm1 vcmpeqps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x50(%rsp) vmovaps -0x40(%rsp), %xmm1 vmovaps -0x50(%rsp), %xmm0 vmovaps %xmm1, 0x60(%rsp) vmovaps %xmm0, 0x50(%rsp) vmovaps 0x60(%rsp), %xmm0 vmovaps 0x50(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x60(%rsp) vmovaps -0x30(%rsp), %xmm1 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm1, 0x40(%rsp) vmovaps %xmm0, 0x30(%rsp) vmovaps 0x40(%rsp), %xmm0 vaddps 0x30(%rsp), %xmm0, %xmm0 vmovaps %xmm0, -0x30(%rsp) vmovaps -0x20(%rsp), %xmm1 vmovaps -0x30(%rsp), %xmm0 vmovaps %xmm1, 0x8c0(%rsp) vmovaps %xmm0, 0x8b0(%rsp) vmovaps 0x8c0(%rsp), %xmm0 vdivps 0x8b0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm0 addq $0x8d8, %rsp # imm = 0x8D8 retq nopl (%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_fma.cpp
ncnn::UnaryOp_x86_fma_functor::unary_op_acos::func_pack4(float vector[4] const&) const
__m128 func_pack4(const __m128& x) const { //TODO sse optimize float tmp[4]; _mm_storeu_ps(tmp, x); tmp[0] = acos(tmp[0]); tmp[1] = acos(tmp[1]); tmp[2] = acos(tmp[2]); tmp[3] = acos(tmp[3]); return _mm_loadu_ps(tmp); }
subq $0x48, %rsp movq %rdi, 0x18(%rsp) movq %rsi, 0x10(%rsp) movq %rsp, %rax movq 0x10(%rsp), %rcx vmovaps (%rcx), %xmm0 movq %rax, 0x38(%rsp) vmovaps %xmm0, 0x20(%rsp) vmovaps 0x20(%rsp), %xmm0 movq 0x38(%rsp), %rax vmovups %xmm0, (%rax) vmovss (%rsp), %xmm0 callq 0x16314b0 vmovss %xmm0, (%rsp) vmovss 0x4(%rsp), %xmm0 callq 0x16314b0 vmovss %xmm0, 0x4(%rsp) vmovss 0x8(%rsp), %xmm0 callq 0x16314b0 vmovss %xmm0, 0x8(%rsp) vmovss 0xc(%rsp), %xmm0 callq 0x16314b0 vmovss %xmm0, 0xc(%rsp) movq %rsp, %rax movq %rax, 0x40(%rsp) movq 0x40(%rsp), %rax vmovups (%rax), %xmm0 addq $0x48, %rsp retq nopl (%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_fma.cpp
ncnn::UnaryOp_x86_fma_functor::unary_op_tanh::func_pack8(float vector[8] const&) const
__m256 func_pack8(const __m256& x) const { return tanh_avx(x); }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0xf80, %rsp # imm = 0xF80 movq %rdi, 0x18(%rsp) movq %rsi, 0x10(%rsp) movq 0x10(%rsp), %rax vmovaps (%rax), %ymm0 vmovaps %ymm0, 0x60(%rsp) movl $0x3f800000, 0xdc(%rsp) # imm = 0x3F800000 vmovss 0xdc(%rsp), %xmm0 vmovss %xmm0, 0x11c(%rsp) vmovss %xmm0, 0x118(%rsp) vmovss %xmm0, 0x114(%rsp) vmovss %xmm0, 0x110(%rsp) vmovss %xmm0, 0x10c(%rsp) vmovss %xmm0, 0x108(%rsp) vmovss %xmm0, 0x104(%rsp) vmovss %xmm0, 0x100(%rsp) vmovss 0x104(%rsp), %xmm1 vmovss 0x100(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x108(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x10c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0] vmovss 0x114(%rsp), %xmm2 vmovss 0x110(%rsp), %xmm1 vinsertps $0x10, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0],xmm1[2,3] vmovss 0x118(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm2[0],xmm1[3] vmovss 0x11c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0] vmovaps %xmm1, 0xf0(%rsp) vmovaps %xmm0, 0xe0(%rsp) vmovaps 0xe0(%rsp), %ymm0 vmovaps %ymm0, 0x40(%rsp) movl $0x40000000, 0xd8(%rsp) # imm = 0x40000000 vmovss 0xd8(%rsp), %xmm0 vmovss %xmm0, 0x15c(%rsp) vmovss %xmm0, 0x158(%rsp) vmovss %xmm0, 0x154(%rsp) vmovss %xmm0, 0x150(%rsp) vmovss %xmm0, 0x14c(%rsp) vmovss %xmm0, 0x148(%rsp) vmovss %xmm0, 0x144(%rsp) vmovss %xmm0, 0x140(%rsp) vmovss 0x144(%rsp), %xmm1 vmovss 0x140(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x148(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x14c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0] vmovss 0x154(%rsp), %xmm2 vmovss 0x150(%rsp), %xmm1 vinsertps $0x10, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0],xmm1[2,3] vmovss 0x158(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm2[0],xmm1[3] vmovss 0x15c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0] vmovaps %xmm1, 0x130(%rsp) vmovaps %xmm0, 0x120(%rsp) vmovaps 0x120(%rsp), %ymm0 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x60(%rsp), %ymm1 vmovaps 0x20(%rsp), %ymm0 vmovaps %ymm1, 0xa0(%rsp) vmovaps %ymm0, 0x80(%rsp) vmovaps 0xa0(%rsp), %ymm0 vmovaps 0x80(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x1e0(%rsp) movl $0x3f800000, 0xedc(%rsp) # imm = 0x3F800000 vmovss 0xedc(%rsp), %xmm0 vmovss %xmm0, 0xf6c(%rsp) vmovss %xmm0, 0xf68(%rsp) vmovss %xmm0, 0xf64(%rsp) vmovss %xmm0, 0xf60(%rsp) vmovss %xmm0, 0xf5c(%rsp) vmovss %xmm0, 0xf58(%rsp) vmovss %xmm0, 0xf54(%rsp) vmovss %xmm0, 0xf50(%rsp) vmovss 0xf54(%rsp), %xmm1 vmovss 0xf50(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0xf58(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0xf5c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0] vmovss 0xf64(%rsp), %xmm2 vmovss 0xf60(%rsp), %xmm1 vinsertps $0x10, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0],xmm1[2,3] vmovss 0xf68(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm2[0],xmm1[3] vmovss 0xf6c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0] vmovaps %xmm1, 0xf30(%rsp) vmovaps %xmm0, 0xf20(%rsp) vmovaps 0xf20(%rsp), %ymm0 vmovaps %ymm0, 0x1c0(%rsp) vmovaps 0x1c0(%rsp), %ymm1 vxorps %xmm0, %xmm0, %xmm0 vmovaps %ymm0, 0x240(%rsp) vmovaps 0x240(%rsp), %ymm3 vmovaps 0x1e0(%rsp), %ymm2 vmovaps %ymm3, 0x220(%rsp) vmovaps %ymm2, 0x200(%rsp) vmovaps 0x220(%rsp), %ymm2 vmovaps 0x200(%rsp), %ymm3 vsubps %ymm3, %ymm2, %ymm2 vmovaps %ymm2, 0x340(%rsp) vmovaps %ymm0, 0x3e0(%rsp) vmovaps 0x3e0(%rsp), %ymm0 vmovaps %ymm0, 0x320(%rsp) vmovaps 0x7a25f1(%rip), %ymm0 # 0x1e03b80 vmovaps %ymm0, 0x2c0(%rsp) vmovaps 0x340(%rsp), %ymm0 vmovaps %ymm0, 0x4a0(%rsp) vmovaps 0x7a25ee(%rip), %ymm0 # 0x1e03ba0 vmovaps %ymm0, 0x480(%rsp) vmovaps 0x4a0(%rsp), %ymm0 vmovaps 0x480(%rsp), %ymm2 vminps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x340(%rsp) vmovaps 0x340(%rsp), %ymm0 vmovaps %ymm0, 0x380(%rsp) vmovaps 0x7a25cc(%rip), %ymm0 # 0x1e03bc0 vmovaps %ymm0, 0x360(%rsp) vmovaps 0x380(%rsp), %ymm0 vmovaps 0x360(%rsp), %ymm2 vmaxps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x340(%rsp) vmovaps 0x340(%rsp), %ymm0 vmovaps %ymm0, 0x740(%rsp) vmovaps 0x7a25aa(%rip), %ymm0 # 0x1e03be0 vmovaps %ymm0, 0x720(%rsp) vmovaps 0x7a25b9(%rip), %ymm0 # 0x1e03c00 vmovaps %ymm0, 0x700(%rsp) vmovaps 0x740(%rsp), %ymm4 vmovaps 0x720(%rsp), %ymm3 vmovaps 0x700(%rsp), %ymm2 vmovaps %ymm4, 0xb20(%rsp) vmovaps %ymm3, 0xb00(%rsp) vmovaps %ymm2, 0xae0(%rsp) vmovaps 0xb20(%rsp), %ymm3 vmovaps 0xb00(%rsp), %ymm2 vmovaps 0xae0(%rsp), %ymm4 vfmadd213ps %ymm4, %ymm3, %ymm2 # ymm2 = (ymm3 * ymm2) + ymm4 vmovaps %ymm2, 0x300(%rsp) vmovaps 0x300(%rsp), %ymm2 vroundps $0x1, %ymm2, %ymm2 vmovaps %ymm2, 0x320(%rsp) vmovaps 0x320(%rsp), %ymm3 vmovaps 0x300(%rsp), %ymm2 vcmpltps %ymm3, %ymm2, %ymm2 vmovaps %ymm2, 0x2a0(%rsp) vmovaps 0x2a0(%rsp), %ymm3 vmovaps 0x2c0(%rsp), %ymm2 vmovaps %ymm3, 0x780(%rsp) vmovaps %ymm2, 0x760(%rsp) vmovaps 0x780(%rsp), %ymm2 vmovaps 0x760(%rsp), %ymm3 vandps %ymm3, %ymm2, %ymm2 vmovaps %ymm2, 0x2a0(%rsp) vmovaps 0x320(%rsp), %ymm3 vmovaps 0x2a0(%rsp), %ymm2 vmovaps %ymm3, 0x3c0(%rsp) vmovaps %ymm2, 0x3a0(%rsp) vmovaps 0x3c0(%rsp), %ymm2 vmovaps 0x3a0(%rsp), %ymm3 vsubps %ymm3, %ymm2, %ymm2 vmovaps %ymm2, 0x300(%rsp) vmovaps 0x300(%rsp), %ymm3 vmovaps 0x340(%rsp), %ymm2 vmovaps %ymm3, 0x840(%rsp) vmovaps 0x7a2490(%rip), %ymm3 # 0x1e03c20 vmovaps %ymm3, 0x820(%rsp) vmovaps %ymm2, 0x800(%rsp) vmovaps 0x840(%rsp), %ymm4 vmovaps 0x820(%rsp), %ymm3 vmovaps 0x800(%rsp), %ymm2 vmovaps %ymm4, 0xdc0(%rsp) vmovaps %ymm3, 0xda0(%rsp) vmovaps %ymm2, 0xd80(%rsp) vmovaps 0xdc0(%rsp), %ymm3 vmovaps 0xda0(%rsp), %ymm2 vmovaps 0xd80(%rsp), %ymm4 vfnmadd213ps %ymm4, %ymm3, %ymm2 # ymm2 = -(ymm3 * ymm2) + ymm4 vmovaps %ymm2, 0x340(%rsp) vmovaps 0x300(%rsp), %ymm3 vmovaps 0x340(%rsp), %ymm2 vmovaps %ymm3, 0x7e0(%rsp) vmovaps 0x7a241c(%rip), %ymm3 # 0x1e03c40 vmovaps %ymm3, 0x7c0(%rsp) vmovaps %ymm2, 0x7a0(%rsp) vmovaps 0x7e0(%rsp), %ymm4 vmovaps 0x7c0(%rsp), %ymm3 vmovaps 0x7a0(%rsp), %ymm2 vmovaps %ymm4, 0xe20(%rsp) vmovaps %ymm3, 0xe00(%rsp) vmovaps %ymm2, 0xde0(%rsp) vmovaps 0xe20(%rsp), %ymm3 vmovaps 0xe00(%rsp), %ymm2 vmovaps 0xde0(%rsp), %ymm4 vfnmadd213ps %ymm4, %ymm3, %ymm2 # ymm2 = -(ymm3 * ymm2) + ymm4 vmovaps %ymm2, 0x340(%rsp) vmovaps 0x340(%rsp), %ymm2 vmovaps %ymm2, 0x460(%rsp) vmovaps %ymm2, 0x440(%rsp) vmovaps 0x460(%rsp), %ymm2 vmovaps 0x440(%rsp), %ymm3 vmulps %ymm3, %ymm2, %ymm2 vmovaps %ymm2, 0x320(%rsp) vmovaps 0x7a2389(%rip), %ymm2 # 0x1e03c60 vmovaps %ymm2, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm3 vmovaps 0x340(%rsp), %ymm2 vmovaps %ymm3, 0x6e0(%rsp) vmovaps %ymm2, 0x6c0(%rsp) vmovaps 0x7a2374(%rip), %ymm2 # 0x1e03c80 vmovaps %ymm2, 0x6a0(%rsp) vmovaps 0x6e0(%rsp), %ymm4 vmovaps 0x6c0(%rsp), %ymm3 vmovaps 0x6a0(%rsp), %ymm2 vmovaps %ymm4, 0xb80(%rsp) vmovaps %ymm3, 0xb60(%rsp) vmovaps %ymm2, 0xb40(%rsp) vmovaps 0xb80(%rsp), %ymm3 vmovaps 0xb60(%rsp), %ymm2 vmovaps 0xb40(%rsp), %ymm4 vfmadd213ps %ymm4, %ymm3, %ymm2 # ymm2 = (ymm3 * ymm2) + ymm4 vmovaps %ymm2, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm3 vmovaps 0x340(%rsp), %ymm2 vmovaps %ymm3, 0x680(%rsp) vmovaps %ymm2, 0x660(%rsp) vmovaps 0x7a2300(%rip), %ymm2 # 0x1e03ca0 vmovaps %ymm2, 0x640(%rsp) vmovaps 0x680(%rsp), %ymm4 vmovaps 0x660(%rsp), %ymm3 vmovaps 0x640(%rsp), %ymm2 vmovaps %ymm4, 0xbe0(%rsp) vmovaps %ymm3, 0xbc0(%rsp) vmovaps %ymm2, 0xba0(%rsp) vmovaps 0xbe0(%rsp), %ymm3 vmovaps 0xbc0(%rsp), %ymm2 vmovaps 0xba0(%rsp), %ymm4 vfmadd213ps %ymm4, %ymm3, %ymm2 # ymm2 = (ymm3 * ymm2) + ymm4 vmovaps %ymm2, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm3 vmovaps 0x340(%rsp), %ymm2 vmovaps %ymm3, 0x620(%rsp) vmovaps %ymm2, 0x600(%rsp) vmovaps 0x7a228c(%rip), %ymm2 # 0x1e03cc0 vmovaps %ymm2, 0x5e0(%rsp) vmovaps 0x620(%rsp), %ymm4 vmovaps 0x600(%rsp), %ymm3 vmovaps 0x5e0(%rsp), %ymm2 vmovaps %ymm4, 0xc40(%rsp) vmovaps %ymm3, 0xc20(%rsp) vmovaps %ymm2, 0xc00(%rsp) vmovaps 0xc40(%rsp), %ymm3 vmovaps 0xc20(%rsp), %ymm2 vmovaps 0xc00(%rsp), %ymm4 vfmadd213ps %ymm4, %ymm3, %ymm2 # ymm2 = (ymm3 * ymm2) + ymm4 vmovaps %ymm2, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm3 vmovaps 0x340(%rsp), %ymm2 vmovaps %ymm3, 0x5c0(%rsp) vmovaps %ymm2, 0x5a0(%rsp) vmovaps 0x7a2218(%rip), %ymm2 # 0x1e03ce0 vmovaps %ymm2, 0x580(%rsp) vmovaps 0x5c0(%rsp), %ymm4 vmovaps 0x5a0(%rsp), %ymm3 vmovaps 0x580(%rsp), %ymm2 vmovaps %ymm4, 0xca0(%rsp) vmovaps %ymm3, 0xc80(%rsp) vmovaps %ymm2, 0xc60(%rsp) vmovaps 0xca0(%rsp), %ymm3 vmovaps 0xc80(%rsp), %ymm2 vmovaps 0xc60(%rsp), %ymm4 vfmadd213ps %ymm4, %ymm3, %ymm2 # ymm2 = (ymm3 * ymm2) + ymm4 vmovaps %ymm2, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm3 vmovaps 0x340(%rsp), %ymm2 vmovaps %ymm3, 0x560(%rsp) vmovaps %ymm2, 0x540(%rsp) vmovaps %ymm0, 0x520(%rsp) vmovaps 0x560(%rsp), %ymm3 vmovaps 0x540(%rsp), %ymm2 vmovaps 0x520(%rsp), %ymm0 vmovaps %ymm3, 0xd00(%rsp) vmovaps %ymm2, 0xce0(%rsp) vmovaps %ymm0, 0xcc0(%rsp) vmovaps 0xd00(%rsp), %ymm2 vmovaps 0xce0(%rsp), %ymm0 vmovaps 0xcc0(%rsp), %ymm3 vfmadd213ps %ymm3, %ymm2, %ymm0 # ymm0 = (ymm2 * ymm0) + ymm3 vmovaps %ymm0, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm3 vmovaps 0x320(%rsp), %ymm2 vmovaps 0x340(%rsp), %ymm0 vmovaps %ymm3, 0x500(%rsp) vmovaps %ymm2, 0x4e0(%rsp) vmovaps %ymm0, 0x4c0(%rsp) vmovaps 0x500(%rsp), %ymm3 vmovaps 0x4e0(%rsp), %ymm2 vmovaps 0x4c0(%rsp), %ymm0 vmovaps %ymm3, 0xd60(%rsp) vmovaps %ymm2, 0xd40(%rsp) vmovaps %ymm0, 0xd20(%rsp) vmovaps 0xd60(%rsp), %ymm2 vmovaps 0xd40(%rsp), %ymm0 vmovaps 0xd20(%rsp), %ymm3 vfmadd213ps %ymm3, %ymm2, %ymm0 # ymm0 = (ymm2 * ymm0) + ymm3 vmovaps %ymm0, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm2 vmovaps 0x2c0(%rsp), %ymm0 vmovaps %ymm2, 0x880(%rsp) vmovaps %ymm0, 0x860(%rsp) vmovaps 0x880(%rsp), %ymm0 vmovaps 0x860(%rsp), %ymm2 vaddps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x280(%rsp) vmovaps 0x300(%rsp), %ymm0 vmovaps %ymm0, 0x8e0(%rsp) vcvttps2dq 0x8e0(%rsp), %ymm0 vmovaps %ymm0, 0x2e0(%rsp) vmovaps 0x2e0(%rsp), %ymm0 vmovaps %ymm0, 0x9e0(%rsp) vmovaps 0x7a202e(%rip), %ymm0 # 0x1e03d00 vmovaps %ymm0, 0x9c0(%rsp) vmovaps 0x9e0(%rsp), %ymm0 vmovaps %ymm0, 0x940(%rsp) vmovdqa 0x940(%rsp), %xmm0 vmovdqa %xmm0, 0x9b0(%rsp) vmovdqa 0x950(%rsp), %xmm0 vmovdqa %xmm0, 0x9a0(%rsp) vmovaps 0x9c0(%rsp), %ymm0 vmovaps %ymm0, 0x920(%rsp) vmovdqa 0x920(%rsp), %xmm0 vmovdqa %xmm0, 0x990(%rsp) vmovdqa 0x930(%rsp), %xmm0 vmovdqa %xmm0, 0x980(%rsp) vmovdqa 0x9b0(%rsp), %xmm2 vmovdqa 0x990(%rsp), %xmm0 vmovdqa %xmm2, 0xe80(%rsp) vmovdqa %xmm0, 0xe70(%rsp) vmovdqa 0xe80(%rsp), %xmm0 vmovdqa 0xe70(%rsp), %xmm2 vpaddd %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x9b0(%rsp) vmovdqa 0x9a0(%rsp), %xmm2 vmovdqa 0x980(%rsp), %xmm0 vmovdqa %xmm2, 0xe60(%rsp) vmovdqa %xmm0, 0xe50(%rsp) vmovdqa 0xe60(%rsp), %xmm0 vmovdqa 0xe50(%rsp), %xmm2 vpaddd %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0x9a0(%rsp) vmovdqa 0x9b0(%rsp), %xmm0 vmovdqa %xmm0, 0x900(%rsp) vmovdqa 0x9a0(%rsp), %xmm0 vmovdqa %xmm0, 0x910(%rsp) vmovaps 0x900(%rsp), %ymm0 vmovaps %ymm0, 0x960(%rsp) vmovaps 0x960(%rsp), %ymm0 vmovaps %ymm0, 0x2e0(%rsp) vmovaps 0x2e0(%rsp), %ymm0 vmovaps %ymm0, 0xaa0(%rsp) movl $0x17, 0xa9c(%rsp) vmovaps 0xaa0(%rsp), %ymm0 vmovaps %ymm0, 0xa20(%rsp) vmovdqa 0xa20(%rsp), %xmm0 vmovdqa %xmm0, 0xa80(%rsp) vmovdqa 0xa30(%rsp), %xmm0 vmovdqa %xmm0, 0xa70(%rsp) vmovdqa 0xa80(%rsp), %xmm0 movl 0xa9c(%rsp), %eax vmovdqa %xmm0, 0xec0(%rsp) movl %eax, 0xebc(%rsp) vmovdqa 0xec0(%rsp), %xmm0 vmovd 0xebc(%rsp), %xmm2 vpslld %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0xa80(%rsp) vmovdqa 0xa70(%rsp), %xmm0 movl 0xa9c(%rsp), %eax vmovdqa %xmm0, 0xea0(%rsp) movl %eax, 0xe9c(%rsp) vmovdqa 0xea0(%rsp), %xmm0 vmovd 0xe9c(%rsp), %xmm2 vpslld %xmm2, %xmm0, %xmm0 vmovdqa %xmm0, 0xa70(%rsp) vmovdqa 0xa80(%rsp), %xmm0 vmovdqa %xmm0, 0xa00(%rsp) vmovdqa 0xa70(%rsp), %xmm0 vmovdqa %xmm0, 0xa10(%rsp) vmovaps 0xa00(%rsp), %ymm0 vmovaps %ymm0, 0xa40(%rsp) vmovaps 0xa40(%rsp), %ymm0 vmovaps %ymm0, 0x2e0(%rsp) vmovaps 0x2e0(%rsp), %ymm0 vmovaps %ymm0, 0xac0(%rsp) vmovaps 0xac0(%rsp), %ymm0 vmovaps %ymm0, 0x260(%rsp) vmovaps 0x280(%rsp), %ymm2 vmovaps 0x260(%rsp), %ymm0 vmovaps %ymm2, 0x420(%rsp) vmovaps %ymm0, 0x400(%rsp) vmovaps 0x420(%rsp), %ymm0 vmovaps 0x400(%rsp), %ymm2 vmulps %ymm2, %ymm0, %ymm0 vmovaps %ymm0, 0x280(%rsp) vmovaps 0x280(%rsp), %ymm0 vmovaps %ymm1, 0x8c0(%rsp) vmovaps %ymm0, 0x8a0(%rsp) vmovaps 0x8c0(%rsp), %ymm0 vmovaps 0x8a0(%rsp), %ymm2 vaddps %ymm2, %ymm0, %ymm0 vmovaps %ymm1, 0xf00(%rsp) vmovaps %ymm0, 0xee0(%rsp) vmovaps 0xf00(%rsp), %ymm4 vmovaps 0xee0(%rsp), %ymm3 vrcpps %ymm3, %ymm0 vmulps %ymm0, %ymm4, %ymm1 vmovaps %ymm1, %ymm2 vfmsub213ps %ymm4, %ymm3, %ymm2 # ymm2 = (ymm3 * ymm2) - ymm4 vfnmadd213ps %ymm1, %ymm0, %ymm2 # ymm2 = -(ymm0 * ymm2) + ymm1 vmovaps 0x20(%rsp), %ymm1 vmovaps 0x40(%rsp), %ymm0 vmovaps %ymm2, 0x1a0(%rsp) vmovaps %ymm1, 0x180(%rsp) vmovaps %ymm0, 0x160(%rsp) vmovaps 0x1a0(%rsp), %ymm1 vmovaps 0x180(%rsp), %ymm0 vmovaps 0x160(%rsp), %ymm2 vmovaps 0x7a4b76(%rip), %ymm3 # 0x1e06bc0 vxorps %ymm3, %ymm2, %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 movq %rbp, %rsp popq %rbp retq nopl (%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_fma.cpp
ncnn::UnaryOp_x86_avx::forward_inplace(ncnn::Mat&, ncnn::Option const&) const
int UnaryOp_x86_avx::forward_inplace(Mat& bottom_top_blob, const Option& opt) const { using namespace UnaryOp_x86_avx_functor; if (op_type == Operation_ABS) return unary_op_inplace<unary_op_abs>(bottom_top_blob, opt); if (op_type == Operation_NEG) return unary_op_inplace<unary_op_neg>(bottom_top_blob, opt); if (op_type == Operation_FLOOR) return unary_op_inplace<unary_op_floor>(bottom_top_blob, opt); if (op_type == Operation_CEIL) return unary_op_inplace<unary_op_ceil>(bottom_top_blob, opt); if (op_type == Operation_SQUARE) return unary_op_inplace<unary_op_square>(bottom_top_blob, opt); if (op_type == Operation_SQRT) return unary_op_inplace<unary_op_sqrt>(bottom_top_blob, opt); if (op_type == Operation_RSQRT) return unary_op_inplace<unary_op_rsqrt>(bottom_top_blob, opt); if (op_type == Operation_EXP) return unary_op_inplace<unary_op_exp>(bottom_top_blob, opt); if (op_type == Operation_LOG) return unary_op_inplace<unary_op_log>(bottom_top_blob, opt); if (op_type == Operation_SIN) return unary_op_inplace<unary_op_sin>(bottom_top_blob, opt); if (op_type == Operation_COS) return unary_op_inplace<unary_op_cos>(bottom_top_blob, opt); if (op_type == Operation_TAN) return unary_op_inplace<unary_op_tan>(bottom_top_blob, opt); if (op_type == Operation_ASIN) return unary_op_inplace<unary_op_asin>(bottom_top_blob, opt); if (op_type == Operation_ACOS) return unary_op_inplace<unary_op_acos>(bottom_top_blob, opt); if (op_type == Operation_ATAN) return unary_op_inplace<unary_op_atan>(bottom_top_blob, opt); if (op_type == Operation_RECIPROCAL) return unary_op_inplace<unary_op_reciprocal>(bottom_top_blob, opt); if (op_type == Operation_TANH) return unary_op_inplace<unary_op_tanh>(bottom_top_blob, opt); return 0; }
subq $0x28, %rsp movq %rdi, 0x18(%rsp) movq %rsi, 0x10(%rsp) movq %rdx, 0x8(%rsp) movq 0x18(%rsp), %rax movq %rax, (%rsp) movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x0, 0xd0(%rax,%rcx) jne 0x1662b95 movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x1662e70 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x1, 0xd0(%rax,%rcx) jne 0x1662bc2 movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x1663520 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x2, 0xd0(%rax,%rcx) jne 0x1662bef movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x1663bd0 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x3, 0xd0(%rax,%rcx) jne 0x1662c1c movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x1664280 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x4, 0xd0(%rax,%rcx) jne 0x1662c49 movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x1664930 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x5, 0xd0(%rax,%rcx) jne 0x1662c76 movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x1664fe0 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x6, 0xd0(%rax,%rcx) jne 0x1662ca3 movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x1665690 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x7, 0xd0(%rax,%rcx) jne 0x1662cd0 movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x1665d40 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x8, 0xd0(%rax,%rcx) jne 0x1662cfd movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x16663f0 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x9, 0xd0(%rax,%rcx) jne 0x1662d2a movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x1666aa0 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0xa, 0xd0(%rax,%rcx) jne 0x1662d57 movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x1667150 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0xb, 0xd0(%rax,%rcx) jne 0x1662d84 movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x1667800 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0xc, 0xd0(%rax,%rcx) jne 0x1662db1 movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x1667eb0 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0xd, 0xd0(%rax,%rcx) jne 0x1662dde movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x1668560 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0xe, 0xd0(%rax,%rcx) jne 0x1662e08 movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x1668c10 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0xf, 0xd0(%rax,%rcx) jne 0x1662e32 movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x16692c0 movl %eax, 0x24(%rsp) jmp 0x1662e64 movq (%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x10, 0xd0(%rax,%rcx) jne 0x1662e5c movq 0x10(%rsp), %rdi movq 0x8(%rsp), %rsi callq 0x1669970 movl %eax, 0x24(%rsp) jmp 0x1662e64 movl $0x0, 0x24(%rsp) movl 0x24(%rsp), %eax addq $0x28, %rsp retq nopl (%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_avx.cpp
int ncnn::unary_op_inplace<ncnn::UnaryOp_x86_avx_functor::unary_op_neg>(ncnn::Mat&, ncnn::Option const&)
static int unary_op_inplace(Mat& a, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { float* ptr = a.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); _p = op.func_pack16(_p); _mm512_storeu_ps(ptr, _p); ptr += 16; } #endif // __AVX512F__ for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); _p = op.func_pack8(_p); _mm256_storeu_ps(ptr, _p); ptr += 8; } #endif // __AVX__ for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); _p = op.func_pack4(_p); _mm_store_ps(ptr, _p); ptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *ptr = op.func(*ptr); ptr++; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x200, %rsp # imm = 0x200 movq %rdi, 0xe0(%rsp) movq %rsi, 0xd8(%rsp) movq 0xe0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0xd0(%rsp) movq 0xe0(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0xcc(%rsp) movq 0xe0(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0xc8(%rsp) movq 0xe0(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0xc4(%rsp) movq 0xe0(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0xc0(%rsp) movl 0xd0(%rsp), %eax imull 0xcc(%rsp), %eax imull 0xc8(%rsp), %eax imull 0xc0(%rsp), %eax movl %eax, 0xbc(%rsp) movl $0x0, 0xb8(%rsp) movl 0xb8(%rsp), %eax cmpl 0xc4(%rsp), %eax jge 0x1663bb7 movq 0xe0(%rsp), %rcx movl 0xb8(%rsp), %eax leaq 0x68(%rsp), %rdx movq %rdx, 0xf8(%rsp) movq %rcx, 0xf0(%rsp) movl %eax, 0xec(%rsp) movq 0xf0(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0xeb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xec(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x68(%rsp), %r10 movq %r10, 0x1a8(%rsp) movl %r9d, 0x1a4(%rsp) movl %r8d, 0x1a0(%rsp) movl %edi, 0x19c(%rsp) movq %rsi, 0x190(%rsp) movq %rdx, 0x188(%rsp) movl %ecx, 0x184(%rsp) movq %rax, 0x178(%rsp) movq 0x1a8(%rsp), %rcx movq %rcx, 0x20(%rsp) movq 0x190(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x188(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x184(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x178(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x1a4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x1a0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x19c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x1b8(%rsp) movl $0x10, 0x1b4(%rsp) movq 0x1b8(%rsp), %rax movslq 0x1b4(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x1b4(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x28(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x90(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1663788 movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xa8(%rsp) movb $0x1, 0xeb(%rsp) testb $0x1, 0xeb(%rsp) jne 0x16638b4 leaq 0x68(%rsp), %rax movq %rax, 0x108(%rsp) movq 0x108(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x166385a movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1d4(%rsp) # imm = 0xFFFFFFFF movl 0x1d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1d0(%rsp) cmpl $0x1, 0x1d0(%rsp) jne 0x166385a movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x166382b movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1663829 jmp 0x1663858 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e0(%rsp) cmpq $0x0, 0x1e0(%rsp) je 0x1663856 movq 0x1e0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1663858 jmp 0x166385a movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16638b2 movq %rax, %rdi callq 0x5fc90 jmp 0x16638b4 leaq 0x68(%rsp), %rax movq %rax, 0x100(%rsp) movq 0x100(%rsp), %rax movq (%rax), %rax movq %rax, 0x8(%rsp) leaq 0x68(%rsp), %rax movq %rax, 0x110(%rsp) movq 0x110(%rsp), %rax movq %rax, 0x1c8(%rsp) movq 0x1c8(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x166398d movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1c4(%rsp) # imm = 0xFFFFFFFF movl 0x1c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1c0(%rsp) cmpl $0x1, 0x1c0(%rsp) jne 0x166398d movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x166395e movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x166395c jmp 0x166398b movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e8(%rsp) cmpq $0x0, 0x1e8(%rsp) je 0x1663989 movq 0x1e8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x166398b jmp 0x166398d movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16639e5 movq %rax, %rdi callq 0x5fc90 movq 0x8(%rsp), %rax movq %rax, 0xb0(%rsp) movl $0x0, 0x64(%rsp) movl 0x64(%rsp), %eax addl $0x7, %eax cmpl 0xbc(%rsp), %eax jge 0x1663aa0 movq 0xb0(%rsp), %rax movq %rax, 0x118(%rsp) movq 0x118(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x40(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x40(%rsp), %rsi callq 0x166a190 vmovaps %ymm0, 0x40(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x40(%rsp), %ymm0 movq %rax, 0x150(%rsp) vmovaps %ymm0, 0x120(%rsp) vmovaps 0x120(%rsp), %ymm0 movq 0x150(%rsp), %rax vmovups %ymm0, (%rax) movq 0xb0(%rsp), %rax addq $0x20, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x8, %eax movl %eax, 0x64(%rsp) jmp 0x16639fa jmp 0x1663aa2 movl 0x64(%rsp), %eax addl $0x3, %eax cmpl 0xbc(%rsp), %eax jge 0x1663b4b movq 0xb0(%rsp), %rax movq %rax, 0x158(%rsp) movq 0x158(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x30(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x30(%rsp), %rsi vzeroupper callq 0x166a1e0 vmovaps %xmm0, 0x30(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x30(%rsp), %xmm0 movq %rax, 0x170(%rsp) vmovaps %xmm0, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm0 movq 0x170(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x10, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x4, %eax movl %eax, 0x64(%rsp) jmp 0x1663aa2 jmp 0x1663b4d movl 0x64(%rsp), %eax cmpl 0xbc(%rsp), %eax jge 0x1663b9f movq 0xb0(%rsp), %rsi leaq 0xd7(%rsp), %rdi vzeroupper callq 0x166a220 movq 0xb0(%rsp), %rax vmovss %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x4, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x1, %eax movl %eax, 0x64(%rsp) jmp 0x1663b4d jmp 0x1663ba1 movl 0xb8(%rsp), %eax addl $0x1, %eax movl %eax, 0xb8(%rsp) jmp 0x16635ca xorl %eax, %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_avx.cpp
int ncnn::unary_op_inplace<ncnn::UnaryOp_x86_avx_functor::unary_op_floor>(ncnn::Mat&, ncnn::Option const&)
static int unary_op_inplace(Mat& a, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { float* ptr = a.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); _p = op.func_pack16(_p); _mm512_storeu_ps(ptr, _p); ptr += 16; } #endif // __AVX512F__ for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); _p = op.func_pack8(_p); _mm256_storeu_ps(ptr, _p); ptr += 8; } #endif // __AVX__ for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); _p = op.func_pack4(_p); _mm_store_ps(ptr, _p); ptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *ptr = op.func(*ptr); ptr++; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x200, %rsp # imm = 0x200 movq %rdi, 0xe0(%rsp) movq %rsi, 0xd8(%rsp) movq 0xe0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0xd0(%rsp) movq 0xe0(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0xcc(%rsp) movq 0xe0(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0xc8(%rsp) movq 0xe0(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0xc4(%rsp) movq 0xe0(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0xc0(%rsp) movl 0xd0(%rsp), %eax imull 0xcc(%rsp), %eax imull 0xc8(%rsp), %eax imull 0xc0(%rsp), %eax movl %eax, 0xbc(%rsp) movl $0x0, 0xb8(%rsp) movl 0xb8(%rsp), %eax cmpl 0xc4(%rsp), %eax jge 0x1664267 movq 0xe0(%rsp), %rcx movl 0xb8(%rsp), %eax leaq 0x68(%rsp), %rdx movq %rdx, 0xf8(%rsp) movq %rcx, 0xf0(%rsp) movl %eax, 0xec(%rsp) movq 0xf0(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0xeb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xec(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x68(%rsp), %r10 movq %r10, 0x1a8(%rsp) movl %r9d, 0x1a4(%rsp) movl %r8d, 0x1a0(%rsp) movl %edi, 0x19c(%rsp) movq %rsi, 0x190(%rsp) movq %rdx, 0x188(%rsp) movl %ecx, 0x184(%rsp) movq %rax, 0x178(%rsp) movq 0x1a8(%rsp), %rcx movq %rcx, 0x20(%rsp) movq 0x190(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x188(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x184(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x178(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x1a4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x1a0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x19c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x1b8(%rsp) movl $0x10, 0x1b4(%rsp) movq 0x1b8(%rsp), %rax movslq 0x1b4(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x1b4(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x28(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x90(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1663e38 movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xa8(%rsp) movb $0x1, 0xeb(%rsp) testb $0x1, 0xeb(%rsp) jne 0x1663f64 leaq 0x68(%rsp), %rax movq %rax, 0x108(%rsp) movq 0x108(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x1663f0a movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1d4(%rsp) # imm = 0xFFFFFFFF movl 0x1d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1d0(%rsp) cmpl $0x1, 0x1d0(%rsp) jne 0x1663f0a movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1663edb movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1663ed9 jmp 0x1663f08 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e0(%rsp) cmpq $0x0, 0x1e0(%rsp) je 0x1663f06 movq 0x1e0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1663f08 jmp 0x1663f0a movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1663f62 movq %rax, %rdi callq 0x5fc90 jmp 0x1663f64 leaq 0x68(%rsp), %rax movq %rax, 0x100(%rsp) movq 0x100(%rsp), %rax movq (%rax), %rax movq %rax, 0x8(%rsp) leaq 0x68(%rsp), %rax movq %rax, 0x110(%rsp) movq 0x110(%rsp), %rax movq %rax, 0x1c8(%rsp) movq 0x1c8(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x166403d movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1c4(%rsp) # imm = 0xFFFFFFFF movl 0x1c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1c0(%rsp) cmpl $0x1, 0x1c0(%rsp) jne 0x166403d movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x166400e movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x166400c jmp 0x166403b movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e8(%rsp) cmpq $0x0, 0x1e8(%rsp) je 0x1664039 movq 0x1e8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x166403b jmp 0x166403d movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1664095 movq %rax, %rdi callq 0x5fc90 movq 0x8(%rsp), %rax movq %rax, 0xb0(%rsp) movl $0x0, 0x64(%rsp) movl 0x64(%rsp), %eax addl $0x7, %eax cmpl 0xbc(%rsp), %eax jge 0x1664150 movq 0xb0(%rsp), %rax movq %rax, 0x118(%rsp) movq 0x118(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x40(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x40(%rsp), %rsi callq 0x166a250 vmovaps %ymm0, 0x40(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x40(%rsp), %ymm0 movq %rax, 0x150(%rsp) vmovaps %ymm0, 0x120(%rsp) vmovaps 0x120(%rsp), %ymm0 movq 0x150(%rsp), %rax vmovups %ymm0, (%rax) movq 0xb0(%rsp), %rax addq $0x20, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x8, %eax movl %eax, 0x64(%rsp) jmp 0x16640aa jmp 0x1664152 movl 0x64(%rsp), %eax addl $0x3, %eax cmpl 0xbc(%rsp), %eax jge 0x16641fb movq 0xb0(%rsp), %rax movq %rax, 0x158(%rsp) movq 0x158(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x30(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x30(%rsp), %rsi vzeroupper callq 0x166a270 vmovaps %xmm0, 0x30(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x30(%rsp), %xmm0 movq %rax, 0x170(%rsp) vmovaps %xmm0, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm0 movq 0x170(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x10, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x4, %eax movl %eax, 0x64(%rsp) jmp 0x1664152 jmp 0x16641fd movl 0x64(%rsp), %eax cmpl 0xbc(%rsp), %eax jge 0x166424f movq 0xb0(%rsp), %rsi leaq 0xd7(%rsp), %rdi vzeroupper callq 0x166a2a0 movq 0xb0(%rsp), %rax vmovss %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x4, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x1, %eax movl %eax, 0x64(%rsp) jmp 0x16641fd jmp 0x1664251 movl 0xb8(%rsp), %eax addl $0x1, %eax movl %eax, 0xb8(%rsp) jmp 0x1663c7a xorl %eax, %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_avx.cpp
int ncnn::unary_op_inplace<ncnn::UnaryOp_x86_avx_functor::unary_op_ceil>(ncnn::Mat&, ncnn::Option const&)
static int unary_op_inplace(Mat& a, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { float* ptr = a.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); _p = op.func_pack16(_p); _mm512_storeu_ps(ptr, _p); ptr += 16; } #endif // __AVX512F__ for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); _p = op.func_pack8(_p); _mm256_storeu_ps(ptr, _p); ptr += 8; } #endif // __AVX__ for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); _p = op.func_pack4(_p); _mm_store_ps(ptr, _p); ptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *ptr = op.func(*ptr); ptr++; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x200, %rsp # imm = 0x200 movq %rdi, 0xe0(%rsp) movq %rsi, 0xd8(%rsp) movq 0xe0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0xd0(%rsp) movq 0xe0(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0xcc(%rsp) movq 0xe0(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0xc8(%rsp) movq 0xe0(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0xc4(%rsp) movq 0xe0(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0xc0(%rsp) movl 0xd0(%rsp), %eax imull 0xcc(%rsp), %eax imull 0xc8(%rsp), %eax imull 0xc0(%rsp), %eax movl %eax, 0xbc(%rsp) movl $0x0, 0xb8(%rsp) movl 0xb8(%rsp), %eax cmpl 0xc4(%rsp), %eax jge 0x1664917 movq 0xe0(%rsp), %rcx movl 0xb8(%rsp), %eax leaq 0x68(%rsp), %rdx movq %rdx, 0xf8(%rsp) movq %rcx, 0xf0(%rsp) movl %eax, 0xec(%rsp) movq 0xf0(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0xeb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xec(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x68(%rsp), %r10 movq %r10, 0x1a8(%rsp) movl %r9d, 0x1a4(%rsp) movl %r8d, 0x1a0(%rsp) movl %edi, 0x19c(%rsp) movq %rsi, 0x190(%rsp) movq %rdx, 0x188(%rsp) movl %ecx, 0x184(%rsp) movq %rax, 0x178(%rsp) movq 0x1a8(%rsp), %rcx movq %rcx, 0x20(%rsp) movq 0x190(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x188(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x184(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x178(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x1a4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x1a0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x19c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x1b8(%rsp) movl $0x10, 0x1b4(%rsp) movq 0x1b8(%rsp), %rax movslq 0x1b4(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x1b4(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x28(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x90(%rsp) cmpl $0x4, 0x28(%rax) jne 0x16644e8 movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xa8(%rsp) movb $0x1, 0xeb(%rsp) testb $0x1, 0xeb(%rsp) jne 0x1664614 leaq 0x68(%rsp), %rax movq %rax, 0x108(%rsp) movq 0x108(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x16645ba movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1d4(%rsp) # imm = 0xFFFFFFFF movl 0x1d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1d0(%rsp) cmpl $0x1, 0x1d0(%rsp) jne 0x16645ba movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x166458b movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1664589 jmp 0x16645b8 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e0(%rsp) cmpq $0x0, 0x1e0(%rsp) je 0x16645b6 movq 0x1e0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x16645b8 jmp 0x16645ba movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1664612 movq %rax, %rdi callq 0x5fc90 jmp 0x1664614 leaq 0x68(%rsp), %rax movq %rax, 0x100(%rsp) movq 0x100(%rsp), %rax movq (%rax), %rax movq %rax, 0x8(%rsp) leaq 0x68(%rsp), %rax movq %rax, 0x110(%rsp) movq 0x110(%rsp), %rax movq %rax, 0x1c8(%rsp) movq 0x1c8(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x16646ed movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1c4(%rsp) # imm = 0xFFFFFFFF movl 0x1c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1c0(%rsp) cmpl $0x1, 0x1c0(%rsp) jne 0x16646ed movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16646be movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x16646bc jmp 0x16646eb movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e8(%rsp) cmpq $0x0, 0x1e8(%rsp) je 0x16646e9 movq 0x1e8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x16646eb jmp 0x16646ed movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1664745 movq %rax, %rdi callq 0x5fc90 movq 0x8(%rsp), %rax movq %rax, 0xb0(%rsp) movl $0x0, 0x64(%rsp) movl 0x64(%rsp), %eax addl $0x7, %eax cmpl 0xbc(%rsp), %eax jge 0x1664800 movq 0xb0(%rsp), %rax movq %rax, 0x118(%rsp) movq 0x118(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x40(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x40(%rsp), %rsi callq 0x166a2d0 vmovaps %ymm0, 0x40(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x40(%rsp), %ymm0 movq %rax, 0x150(%rsp) vmovaps %ymm0, 0x120(%rsp) vmovaps 0x120(%rsp), %ymm0 movq 0x150(%rsp), %rax vmovups %ymm0, (%rax) movq 0xb0(%rsp), %rax addq $0x20, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x8, %eax movl %eax, 0x64(%rsp) jmp 0x166475a jmp 0x1664802 movl 0x64(%rsp), %eax addl $0x3, %eax cmpl 0xbc(%rsp), %eax jge 0x16648ab movq 0xb0(%rsp), %rax movq %rax, 0x158(%rsp) movq 0x158(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x30(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x30(%rsp), %rsi vzeroupper callq 0x166a2f0 vmovaps %xmm0, 0x30(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x30(%rsp), %xmm0 movq %rax, 0x170(%rsp) vmovaps %xmm0, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm0 movq 0x170(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x10, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x4, %eax movl %eax, 0x64(%rsp) jmp 0x1664802 jmp 0x16648ad movl 0x64(%rsp), %eax cmpl 0xbc(%rsp), %eax jge 0x16648ff movq 0xb0(%rsp), %rsi leaq 0xd7(%rsp), %rdi vzeroupper callq 0x166a320 movq 0xb0(%rsp), %rax vmovss %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x4, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x1, %eax movl %eax, 0x64(%rsp) jmp 0x16648ad jmp 0x1664901 movl 0xb8(%rsp), %eax addl $0x1, %eax movl %eax, 0xb8(%rsp) jmp 0x166432a xorl %eax, %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_avx.cpp
int ncnn::unary_op_inplace<ncnn::UnaryOp_x86_avx_functor::unary_op_square>(ncnn::Mat&, ncnn::Option const&)
static int unary_op_inplace(Mat& a, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { float* ptr = a.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); _p = op.func_pack16(_p); _mm512_storeu_ps(ptr, _p); ptr += 16; } #endif // __AVX512F__ for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); _p = op.func_pack8(_p); _mm256_storeu_ps(ptr, _p); ptr += 8; } #endif // __AVX__ for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); _p = op.func_pack4(_p); _mm_store_ps(ptr, _p); ptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *ptr = op.func(*ptr); ptr++; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x200, %rsp # imm = 0x200 movq %rdi, 0xe0(%rsp) movq %rsi, 0xd8(%rsp) movq 0xe0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0xd0(%rsp) movq 0xe0(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0xcc(%rsp) movq 0xe0(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0xc8(%rsp) movq 0xe0(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0xc4(%rsp) movq 0xe0(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0xc0(%rsp) movl 0xd0(%rsp), %eax imull 0xcc(%rsp), %eax imull 0xc8(%rsp), %eax imull 0xc0(%rsp), %eax movl %eax, 0xbc(%rsp) movl $0x0, 0xb8(%rsp) movl 0xb8(%rsp), %eax cmpl 0xc4(%rsp), %eax jge 0x1664fc7 movq 0xe0(%rsp), %rcx movl 0xb8(%rsp), %eax leaq 0x68(%rsp), %rdx movq %rdx, 0xf8(%rsp) movq %rcx, 0xf0(%rsp) movl %eax, 0xec(%rsp) movq 0xf0(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0xeb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xec(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x68(%rsp), %r10 movq %r10, 0x1a8(%rsp) movl %r9d, 0x1a4(%rsp) movl %r8d, 0x1a0(%rsp) movl %edi, 0x19c(%rsp) movq %rsi, 0x190(%rsp) movq %rdx, 0x188(%rsp) movl %ecx, 0x184(%rsp) movq %rax, 0x178(%rsp) movq 0x1a8(%rsp), %rcx movq %rcx, 0x20(%rsp) movq 0x190(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x188(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x184(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x178(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x1a4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x1a0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x19c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x1b8(%rsp) movl $0x10, 0x1b4(%rsp) movq 0x1b8(%rsp), %rax movslq 0x1b4(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x1b4(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x28(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x90(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1664b98 movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xa8(%rsp) movb $0x1, 0xeb(%rsp) testb $0x1, 0xeb(%rsp) jne 0x1664cc4 leaq 0x68(%rsp), %rax movq %rax, 0x108(%rsp) movq 0x108(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x1664c6a movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1d4(%rsp) # imm = 0xFFFFFFFF movl 0x1d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1d0(%rsp) cmpl $0x1, 0x1d0(%rsp) jne 0x1664c6a movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1664c3b movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1664c39 jmp 0x1664c68 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e0(%rsp) cmpq $0x0, 0x1e0(%rsp) je 0x1664c66 movq 0x1e0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1664c68 jmp 0x1664c6a movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1664cc2 movq %rax, %rdi callq 0x5fc90 jmp 0x1664cc4 leaq 0x68(%rsp), %rax movq %rax, 0x100(%rsp) movq 0x100(%rsp), %rax movq (%rax), %rax movq %rax, 0x8(%rsp) leaq 0x68(%rsp), %rax movq %rax, 0x110(%rsp) movq 0x110(%rsp), %rax movq %rax, 0x1c8(%rsp) movq 0x1c8(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x1664d9d movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1c4(%rsp) # imm = 0xFFFFFFFF movl 0x1c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1c0(%rsp) cmpl $0x1, 0x1c0(%rsp) jne 0x1664d9d movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1664d6e movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1664d6c jmp 0x1664d9b movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e8(%rsp) cmpq $0x0, 0x1e8(%rsp) je 0x1664d99 movq 0x1e8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1664d9b jmp 0x1664d9d movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1664df5 movq %rax, %rdi callq 0x5fc90 movq 0x8(%rsp), %rax movq %rax, 0xb0(%rsp) movl $0x0, 0x64(%rsp) movl 0x64(%rsp), %eax addl $0x7, %eax cmpl 0xbc(%rsp), %eax jge 0x1664eb0 movq 0xb0(%rsp), %rax movq %rax, 0x118(%rsp) movq 0x118(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x40(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x40(%rsp), %rsi callq 0x166a350 vmovaps %ymm0, 0x40(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x40(%rsp), %ymm0 movq %rax, 0x150(%rsp) vmovaps %ymm0, 0x120(%rsp) vmovaps 0x120(%rsp), %ymm0 movq 0x150(%rsp), %rax vmovups %ymm0, (%rax) movq 0xb0(%rsp), %rax addq $0x20, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x8, %eax movl %eax, 0x64(%rsp) jmp 0x1664e0a jmp 0x1664eb2 movl 0x64(%rsp), %eax addl $0x3, %eax cmpl 0xbc(%rsp), %eax jge 0x1664f5b movq 0xb0(%rsp), %rax movq %rax, 0x158(%rsp) movq 0x158(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x30(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x30(%rsp), %rsi vzeroupper callq 0x166a3a0 vmovaps %xmm0, 0x30(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x30(%rsp), %xmm0 movq %rax, 0x170(%rsp) vmovaps %xmm0, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm0 movq 0x170(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x10, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x4, %eax movl %eax, 0x64(%rsp) jmp 0x1664eb2 jmp 0x1664f5d movl 0x64(%rsp), %eax cmpl 0xbc(%rsp), %eax jge 0x1664faf movq 0xb0(%rsp), %rsi leaq 0xd7(%rsp), %rdi vzeroupper callq 0x166a3e0 movq 0xb0(%rsp), %rax vmovss %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x4, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x1, %eax movl %eax, 0x64(%rsp) jmp 0x1664f5d jmp 0x1664fb1 movl 0xb8(%rsp), %eax addl $0x1, %eax movl %eax, 0xb8(%rsp) jmp 0x16649da xorl %eax, %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_avx.cpp
int ncnn::unary_op_inplace<ncnn::UnaryOp_x86_avx_functor::unary_op_sqrt>(ncnn::Mat&, ncnn::Option const&)
static int unary_op_inplace(Mat& a, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { float* ptr = a.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); _p = op.func_pack16(_p); _mm512_storeu_ps(ptr, _p); ptr += 16; } #endif // __AVX512F__ for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); _p = op.func_pack8(_p); _mm256_storeu_ps(ptr, _p); ptr += 8; } #endif // __AVX__ for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); _p = op.func_pack4(_p); _mm_store_ps(ptr, _p); ptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *ptr = op.func(*ptr); ptr++; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x200, %rsp # imm = 0x200 movq %rdi, 0xe0(%rsp) movq %rsi, 0xd8(%rsp) movq 0xe0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0xd0(%rsp) movq 0xe0(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0xcc(%rsp) movq 0xe0(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0xc8(%rsp) movq 0xe0(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0xc4(%rsp) movq 0xe0(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0xc0(%rsp) movl 0xd0(%rsp), %eax imull 0xcc(%rsp), %eax imull 0xc8(%rsp), %eax imull 0xc0(%rsp), %eax movl %eax, 0xbc(%rsp) movl $0x0, 0xb8(%rsp) movl 0xb8(%rsp), %eax cmpl 0xc4(%rsp), %eax jge 0x1665677 movq 0xe0(%rsp), %rcx movl 0xb8(%rsp), %eax leaq 0x68(%rsp), %rdx movq %rdx, 0xf8(%rsp) movq %rcx, 0xf0(%rsp) movl %eax, 0xec(%rsp) movq 0xf0(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0xeb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xec(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x68(%rsp), %r10 movq %r10, 0x1a8(%rsp) movl %r9d, 0x1a4(%rsp) movl %r8d, 0x1a0(%rsp) movl %edi, 0x19c(%rsp) movq %rsi, 0x190(%rsp) movq %rdx, 0x188(%rsp) movl %ecx, 0x184(%rsp) movq %rax, 0x178(%rsp) movq 0x1a8(%rsp), %rcx movq %rcx, 0x20(%rsp) movq 0x190(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x188(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x184(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x178(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x1a4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x1a0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x19c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x1b8(%rsp) movl $0x10, 0x1b4(%rsp) movq 0x1b8(%rsp), %rax movslq 0x1b4(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x1b4(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x28(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x90(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1665248 movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xa8(%rsp) movb $0x1, 0xeb(%rsp) testb $0x1, 0xeb(%rsp) jne 0x1665374 leaq 0x68(%rsp), %rax movq %rax, 0x108(%rsp) movq 0x108(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x166531a movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1d4(%rsp) # imm = 0xFFFFFFFF movl 0x1d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1d0(%rsp) cmpl $0x1, 0x1d0(%rsp) jne 0x166531a movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16652eb movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x16652e9 jmp 0x1665318 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e0(%rsp) cmpq $0x0, 0x1e0(%rsp) je 0x1665316 movq 0x1e0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1665318 jmp 0x166531a movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1665372 movq %rax, %rdi callq 0x5fc90 jmp 0x1665374 leaq 0x68(%rsp), %rax movq %rax, 0x100(%rsp) movq 0x100(%rsp), %rax movq (%rax), %rax movq %rax, 0x8(%rsp) leaq 0x68(%rsp), %rax movq %rax, 0x110(%rsp) movq 0x110(%rsp), %rax movq %rax, 0x1c8(%rsp) movq 0x1c8(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x166544d movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1c4(%rsp) # imm = 0xFFFFFFFF movl 0x1c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1c0(%rsp) cmpl $0x1, 0x1c0(%rsp) jne 0x166544d movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x166541e movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x166541c jmp 0x166544b movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e8(%rsp) cmpq $0x0, 0x1e8(%rsp) je 0x1665449 movq 0x1e8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x166544b jmp 0x166544d movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16654a5 movq %rax, %rdi callq 0x5fc90 movq 0x8(%rsp), %rax movq %rax, 0xb0(%rsp) movl $0x0, 0x64(%rsp) movl 0x64(%rsp), %eax addl $0x7, %eax cmpl 0xbc(%rsp), %eax jge 0x1665560 movq 0xb0(%rsp), %rax movq %rax, 0x118(%rsp) movq 0x118(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x40(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x40(%rsp), %rsi callq 0x166a400 vmovaps %ymm0, 0x40(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x40(%rsp), %ymm0 movq %rax, 0x150(%rsp) vmovaps %ymm0, 0x120(%rsp) vmovaps 0x120(%rsp), %ymm0 movq 0x150(%rsp), %rax vmovups %ymm0, (%rax) movq 0xb0(%rsp), %rax addq $0x20, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x8, %eax movl %eax, 0x64(%rsp) jmp 0x16654ba jmp 0x1665562 movl 0x64(%rsp), %eax addl $0x3, %eax cmpl 0xbc(%rsp), %eax jge 0x166560b movq 0xb0(%rsp), %rax movq %rax, 0x158(%rsp) movq 0x158(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x30(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x30(%rsp), %rsi vzeroupper callq 0x166a480 vmovaps %xmm0, 0x30(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x30(%rsp), %xmm0 movq %rax, 0x170(%rsp) vmovaps %xmm0, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm0 movq 0x170(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x10, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x4, %eax movl %eax, 0x64(%rsp) jmp 0x1665562 jmp 0x166560d movl 0x64(%rsp), %eax cmpl 0xbc(%rsp), %eax jge 0x166565f movq 0xb0(%rsp), %rsi leaq 0xd7(%rsp), %rdi vzeroupper callq 0x166a4f0 movq 0xb0(%rsp), %rax vmovss %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x4, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x1, %eax movl %eax, 0x64(%rsp) jmp 0x166560d jmp 0x1665661 movl 0xb8(%rsp), %eax addl $0x1, %eax movl %eax, 0xb8(%rsp) jmp 0x166508a xorl %eax, %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_avx.cpp
int ncnn::unary_op_inplace<ncnn::UnaryOp_x86_avx_functor::unary_op_sin>(ncnn::Mat&, ncnn::Option const&)
static int unary_op_inplace(Mat& a, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { float* ptr = a.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); _p = op.func_pack16(_p); _mm512_storeu_ps(ptr, _p); ptr += 16; } #endif // __AVX512F__ for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); _p = op.func_pack8(_p); _mm256_storeu_ps(ptr, _p); ptr += 8; } #endif // __AVX__ for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); _p = op.func_pack4(_p); _mm_store_ps(ptr, _p); ptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *ptr = op.func(*ptr); ptr++; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x200, %rsp # imm = 0x200 movq %rdi, 0xe0(%rsp) movq %rsi, 0xd8(%rsp) movq 0xe0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0xd0(%rsp) movq 0xe0(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0xcc(%rsp) movq 0xe0(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0xc8(%rsp) movq 0xe0(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0xc4(%rsp) movq 0xe0(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0xc0(%rsp) movl 0xd0(%rsp), %eax imull 0xcc(%rsp), %eax imull 0xc8(%rsp), %eax imull 0xc0(%rsp), %eax movl %eax, 0xbc(%rsp) movl $0x0, 0xb8(%rsp) movl 0xb8(%rsp), %eax cmpl 0xc4(%rsp), %eax jge 0x1667137 movq 0xe0(%rsp), %rcx movl 0xb8(%rsp), %eax leaq 0x68(%rsp), %rdx movq %rdx, 0xf8(%rsp) movq %rcx, 0xf0(%rsp) movl %eax, 0xec(%rsp) movq 0xf0(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0xeb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xec(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x68(%rsp), %r10 movq %r10, 0x1a8(%rsp) movl %r9d, 0x1a4(%rsp) movl %r8d, 0x1a0(%rsp) movl %edi, 0x19c(%rsp) movq %rsi, 0x190(%rsp) movq %rdx, 0x188(%rsp) movl %ecx, 0x184(%rsp) movq %rax, 0x178(%rsp) movq 0x1a8(%rsp), %rcx movq %rcx, 0x20(%rsp) movq 0x190(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x188(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x184(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x178(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x1a4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x1a0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x19c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x1b8(%rsp) movl $0x10, 0x1b4(%rsp) movq 0x1b8(%rsp), %rax movslq 0x1b4(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x1b4(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x28(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x90(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1666d08 movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xa8(%rsp) movb $0x1, 0xeb(%rsp) testb $0x1, 0xeb(%rsp) jne 0x1666e34 leaq 0x68(%rsp), %rax movq %rax, 0x108(%rsp) movq 0x108(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x1666dda movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1d4(%rsp) # imm = 0xFFFFFFFF movl 0x1d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1d0(%rsp) cmpl $0x1, 0x1d0(%rsp) jne 0x1666dda movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1666dab movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1666da9 jmp 0x1666dd8 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e0(%rsp) cmpq $0x0, 0x1e0(%rsp) je 0x1666dd6 movq 0x1e0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1666dd8 jmp 0x1666dda movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1666e32 movq %rax, %rdi callq 0x5fc90 jmp 0x1666e34 leaq 0x68(%rsp), %rax movq %rax, 0x100(%rsp) movq 0x100(%rsp), %rax movq (%rax), %rax movq %rax, 0x8(%rsp) leaq 0x68(%rsp), %rax movq %rax, 0x110(%rsp) movq 0x110(%rsp), %rax movq %rax, 0x1c8(%rsp) movq 0x1c8(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x1666f0d movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1c4(%rsp) # imm = 0xFFFFFFFF movl 0x1c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1c0(%rsp) cmpl $0x1, 0x1c0(%rsp) jne 0x1666f0d movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1666ede movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1666edc jmp 0x1666f0b movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e8(%rsp) cmpq $0x0, 0x1e8(%rsp) je 0x1666f09 movq 0x1e8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1666f0b jmp 0x1666f0d movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1666f65 movq %rax, %rdi callq 0x5fc90 movq 0x8(%rsp), %rax movq %rax, 0xb0(%rsp) movl $0x0, 0x64(%rsp) movl 0x64(%rsp), %eax addl $0x7, %eax cmpl 0xbc(%rsp), %eax jge 0x1667020 movq 0xb0(%rsp), %rax movq %rax, 0x118(%rsp) movq 0x118(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x40(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x40(%rsp), %rsi callq 0x166d250 vmovaps %ymm0, 0x40(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x40(%rsp), %ymm0 movq %rax, 0x150(%rsp) vmovaps %ymm0, 0x120(%rsp) vmovaps 0x120(%rsp), %ymm0 movq 0x150(%rsp), %rax vmovups %ymm0, (%rax) movq 0xb0(%rsp), %rax addq $0x20, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x8, %eax movl %eax, 0x64(%rsp) jmp 0x1666f7a jmp 0x1667022 movl 0x64(%rsp), %eax addl $0x3, %eax cmpl 0xbc(%rsp), %eax jge 0x16670cb movq 0xb0(%rsp), %rax movq %rax, 0x158(%rsp) movq 0x158(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x30(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x30(%rsp), %rsi vzeroupper callq 0x166e090 vmovaps %xmm0, 0x30(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x30(%rsp), %xmm0 movq %rax, 0x170(%rsp) vmovaps %xmm0, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm0 movq 0x170(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x10, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x4, %eax movl %eax, 0x64(%rsp) jmp 0x1667022 jmp 0x16670cd movl 0x64(%rsp), %eax cmpl 0xbc(%rsp), %eax jge 0x166711f movq 0xb0(%rsp), %rsi leaq 0xd7(%rsp), %rdi vzeroupper callq 0x166eba0 movq 0xb0(%rsp), %rax vmovss %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x4, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x1, %eax movl %eax, 0x64(%rsp) jmp 0x16670cd jmp 0x1667121 movl 0xb8(%rsp), %eax addl $0x1, %eax movl %eax, 0xb8(%rsp) jmp 0x1666b4a xorl %eax, %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_avx.cpp
int ncnn::unary_op_inplace<ncnn::UnaryOp_x86_avx_functor::unary_op_tan>(ncnn::Mat&, ncnn::Option const&)
static int unary_op_inplace(Mat& a, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { float* ptr = a.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); _p = op.func_pack16(_p); _mm512_storeu_ps(ptr, _p); ptr += 16; } #endif // __AVX512F__ for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); _p = op.func_pack8(_p); _mm256_storeu_ps(ptr, _p); ptr += 8; } #endif // __AVX__ for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); _p = op.func_pack4(_p); _mm_store_ps(ptr, _p); ptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *ptr = op.func(*ptr); ptr++; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x200, %rsp # imm = 0x200 movq %rdi, 0xe0(%rsp) movq %rsi, 0xd8(%rsp) movq 0xe0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0xd0(%rsp) movq 0xe0(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0xcc(%rsp) movq 0xe0(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0xc8(%rsp) movq 0xe0(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0xc4(%rsp) movq 0xe0(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0xc0(%rsp) movl 0xd0(%rsp), %eax imull 0xcc(%rsp), %eax imull 0xc8(%rsp), %eax imull 0xc0(%rsp), %eax movl %eax, 0xbc(%rsp) movl $0x0, 0xb8(%rsp) movl 0xb8(%rsp), %eax cmpl 0xc4(%rsp), %eax jge 0x1667e97 movq 0xe0(%rsp), %rcx movl 0xb8(%rsp), %eax leaq 0x68(%rsp), %rdx movq %rdx, 0xf8(%rsp) movq %rcx, 0xf0(%rsp) movl %eax, 0xec(%rsp) movq 0xf0(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0xeb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xec(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x68(%rsp), %r10 movq %r10, 0x1a8(%rsp) movl %r9d, 0x1a4(%rsp) movl %r8d, 0x1a0(%rsp) movl %edi, 0x19c(%rsp) movq %rsi, 0x190(%rsp) movq %rdx, 0x188(%rsp) movl %ecx, 0x184(%rsp) movq %rax, 0x178(%rsp) movq 0x1a8(%rsp), %rcx movq %rcx, 0x20(%rsp) movq 0x190(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x188(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x184(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x178(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x1a4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x1a0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x19c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x1b8(%rsp) movl $0x10, 0x1b4(%rsp) movq 0x1b8(%rsp), %rax movslq 0x1b4(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x1b4(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x28(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x90(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1667a68 movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xa8(%rsp) movb $0x1, 0xeb(%rsp) testb $0x1, 0xeb(%rsp) jne 0x1667b94 leaq 0x68(%rsp), %rax movq %rax, 0x108(%rsp) movq 0x108(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x1667b3a movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1d4(%rsp) # imm = 0xFFFFFFFF movl 0x1d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1d0(%rsp) cmpl $0x1, 0x1d0(%rsp) jne 0x1667b3a movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1667b0b movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1667b09 jmp 0x1667b38 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e0(%rsp) cmpq $0x0, 0x1e0(%rsp) je 0x1667b36 movq 0x1e0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1667b38 jmp 0x1667b3a movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1667b92 movq %rax, %rdi callq 0x5fc90 jmp 0x1667b94 leaq 0x68(%rsp), %rax movq %rax, 0x100(%rsp) movq 0x100(%rsp), %rax movq (%rax), %rax movq %rax, 0x8(%rsp) leaq 0x68(%rsp), %rax movq %rax, 0x110(%rsp) movq 0x110(%rsp), %rax movq %rax, 0x1c8(%rsp) movq 0x1c8(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x1667c6d movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1c4(%rsp) # imm = 0xFFFFFFFF movl 0x1c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1c0(%rsp) cmpl $0x1, 0x1c0(%rsp) jne 0x1667c6d movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1667c3e movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1667c3c jmp 0x1667c6b movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e8(%rsp) cmpq $0x0, 0x1e8(%rsp) je 0x1667c69 movq 0x1e8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1667c6b jmp 0x1667c6d movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1667cc5 movq %rax, %rdi callq 0x5fc90 movq 0x8(%rsp), %rax movq %rax, 0xb0(%rsp) movl $0x0, 0x64(%rsp) movl 0x64(%rsp), %eax addl $0x7, %eax cmpl 0xbc(%rsp), %eax jge 0x1667d80 movq 0xb0(%rsp), %rax movq %rax, 0x118(%rsp) movq 0x118(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x40(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x40(%rsp), %rsi callq 0x1670450 vmovaps %ymm0, 0x40(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x40(%rsp), %ymm0 movq %rax, 0x150(%rsp) vmovaps %ymm0, 0x120(%rsp) vmovaps 0x120(%rsp), %ymm0 movq 0x150(%rsp), %rax vmovups %ymm0, (%rax) movq 0xb0(%rsp), %rax addq $0x20, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x8, %eax movl %eax, 0x64(%rsp) jmp 0x1667cda jmp 0x1667d82 movl 0x64(%rsp), %eax addl $0x3, %eax cmpl 0xbc(%rsp), %eax jge 0x1667e2b movq 0xb0(%rsp), %rax movq %rax, 0x158(%rsp) movq 0x158(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x30(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x30(%rsp), %rsi vzeroupper callq 0x16717b0 vmovaps %xmm0, 0x30(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x30(%rsp), %xmm0 movq %rax, 0x170(%rsp) vmovaps %xmm0, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm0 movq 0x170(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x10, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x4, %eax movl %eax, 0x64(%rsp) jmp 0x1667d82 jmp 0x1667e2d movl 0x64(%rsp), %eax cmpl 0xbc(%rsp), %eax jge 0x1667e7f movq 0xb0(%rsp), %rsi leaq 0xd7(%rsp), %rdi vzeroupper callq 0x16726f0 movq 0xb0(%rsp), %rax vmovss %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x4, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x1, %eax movl %eax, 0x64(%rsp) jmp 0x1667e2d jmp 0x1667e81 movl 0xb8(%rsp), %eax addl $0x1, %eax movl %eax, 0xb8(%rsp) jmp 0x16678aa xorl %eax, %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_avx.cpp
int ncnn::unary_op_inplace<ncnn::UnaryOp_x86_avx_functor::unary_op_acos>(ncnn::Mat&, ncnn::Option const&)
static int unary_op_inplace(Mat& a, const Option& opt) { Op op; int w = a.w; int h = a.h; int d = a.d; int channels = a.c; int elempack = a.elempack; int size = w * h * d * elempack; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { float* ptr = a.channel(q); int i = 0; #if __SSE2__ #if __AVX__ #if __AVX512F__ for (; i + 15 < size; i += 16) { __m512 _p = _mm512_loadu_ps(ptr); _p = op.func_pack16(_p); _mm512_storeu_ps(ptr, _p); ptr += 16; } #endif // __AVX512F__ for (; i + 7 < size; i += 8) { __m256 _p = _mm256_loadu_ps(ptr); _p = op.func_pack8(_p); _mm256_storeu_ps(ptr, _p); ptr += 8; } #endif // __AVX__ for (; i + 3 < size; i += 4) { __m128 _p = _mm_load_ps(ptr); _p = op.func_pack4(_p); _mm_store_ps(ptr, _p); ptr += 4; } #endif // __SSE2__ for (; i < size; i++) { *ptr = op.func(*ptr); ptr++; } } return 0; }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x200, %rsp # imm = 0x200 movq %rdi, 0xe0(%rsp) movq %rsi, 0xd8(%rsp) movq 0xe0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0xd0(%rsp) movq 0xe0(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0xcc(%rsp) movq 0xe0(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0xc8(%rsp) movq 0xe0(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0xc4(%rsp) movq 0xe0(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0xc0(%rsp) movl 0xd0(%rsp), %eax imull 0xcc(%rsp), %eax imull 0xc8(%rsp), %eax imull 0xc0(%rsp), %eax movl %eax, 0xbc(%rsp) movl $0x0, 0xb8(%rsp) movl 0xb8(%rsp), %eax cmpl 0xc4(%rsp), %eax jge 0x1668bf7 movq 0xe0(%rsp), %rcx movl 0xb8(%rsp), %eax leaq 0x68(%rsp), %rdx movq %rdx, 0xf8(%rsp) movq %rcx, 0xf0(%rsp) movl %eax, 0xec(%rsp) movq 0xf0(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0xeb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xec(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x68(%rsp), %r10 movq %r10, 0x1a8(%rsp) movl %r9d, 0x1a4(%rsp) movl %r8d, 0x1a0(%rsp) movl %edi, 0x19c(%rsp) movq %rsi, 0x190(%rsp) movq %rdx, 0x188(%rsp) movl %ecx, 0x184(%rsp) movq %rax, 0x178(%rsp) movq 0x1a8(%rsp), %rcx movq %rcx, 0x20(%rsp) movq 0x190(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x188(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x184(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x178(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x1a4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x1a0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x19c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x1b8(%rsp) movl $0x10, 0x1b4(%rsp) movq 0x1b8(%rsp), %rax movslq 0x1b4(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x1b4(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x28(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x90(%rsp) cmpl $0x4, 0x28(%rax) jne 0x16687c8 movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xa8(%rsp) movb $0x1, 0xeb(%rsp) testb $0x1, 0xeb(%rsp) jne 0x16688f4 leaq 0x68(%rsp), %rax movq %rax, 0x108(%rsp) movq 0x108(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x166889a movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1d4(%rsp) # imm = 0xFFFFFFFF movl 0x1d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1d0(%rsp) cmpl $0x1, 0x1d0(%rsp) jne 0x166889a movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x166886b movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1668869 jmp 0x1668898 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e0(%rsp) cmpq $0x0, 0x1e0(%rsp) je 0x1668896 movq 0x1e0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1668898 jmp 0x166889a movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16688f2 movq %rax, %rdi callq 0x5fc90 jmp 0x16688f4 leaq 0x68(%rsp), %rax movq %rax, 0x100(%rsp) movq 0x100(%rsp), %rax movq (%rax), %rax movq %rax, 0x8(%rsp) leaq 0x68(%rsp), %rax movq %rax, 0x110(%rsp) movq 0x110(%rsp), %rax movq %rax, 0x1c8(%rsp) movq 0x1c8(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x16689cd movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1c4(%rsp) # imm = 0xFFFFFFFF movl 0x1c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1c0(%rsp) cmpl $0x1, 0x1c0(%rsp) jne 0x16689cd movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x166899e movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x166899c jmp 0x16689cb movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x1e8(%rsp) cmpq $0x0, 0x1e8(%rsp) je 0x16689c9 movq 0x1e8(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x16689cb jmp 0x16689cd movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1668a25 movq %rax, %rdi callq 0x5fc90 movq 0x8(%rsp), %rax movq %rax, 0xb0(%rsp) movl $0x0, 0x64(%rsp) movl 0x64(%rsp), %eax addl $0x7, %eax cmpl 0xbc(%rsp), %eax jge 0x1668ae0 movq 0xb0(%rsp), %rax movq %rax, 0x118(%rsp) movq 0x118(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x40(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x40(%rsp), %rsi callq 0x16728c0 vmovaps %ymm0, 0x40(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x40(%rsp), %ymm0 movq %rax, 0x150(%rsp) vmovaps %ymm0, 0x120(%rsp) vmovaps 0x120(%rsp), %ymm0 movq 0x150(%rsp), %rax vmovups %ymm0, (%rax) movq 0xb0(%rsp), %rax addq $0x20, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x8, %eax movl %eax, 0x64(%rsp) jmp 0x1668a3a jmp 0x1668ae2 movl 0x64(%rsp), %eax addl $0x3, %eax cmpl 0xbc(%rsp), %eax jge 0x1668b8b movq 0xb0(%rsp), %rax movq %rax, 0x158(%rsp) movq 0x158(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x30(%rsp) leaq 0xd7(%rsp), %rdi leaq 0x30(%rsp), %rsi vzeroupper callq 0x16729a0 vmovaps %xmm0, 0x30(%rsp) movq 0xb0(%rsp), %rax vmovaps 0x30(%rsp), %xmm0 movq %rax, 0x170(%rsp) vmovaps %xmm0, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm0 movq 0x170(%rsp), %rax vmovaps %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x10, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x4, %eax movl %eax, 0x64(%rsp) jmp 0x1668ae2 jmp 0x1668b8d movl 0x64(%rsp), %eax cmpl 0xbc(%rsp), %eax jge 0x1668bdf movq 0xb0(%rsp), %rsi leaq 0xd7(%rsp), %rdi vzeroupper callq 0x1672a30 movq 0xb0(%rsp), %rax vmovss %xmm0, (%rax) movq 0xb0(%rsp), %rax addq $0x4, %rax movq %rax, 0xb0(%rsp) movl 0x64(%rsp), %eax addl $0x1, %eax movl %eax, 0x64(%rsp) jmp 0x1668b8d jmp 0x1668be1 movl 0xb8(%rsp), %eax addl $0x1, %eax movl %eax, 0xb8(%rsp) jmp 0x166860a xorl %eax, %eax movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_avx.cpp
virtual thunk to ncnn::UnaryOp_x86_avx::forward_inplace(ncnn::Mat&, ncnn::Option const&) const
int UnaryOp_x86_avx::forward_inplace(Mat& bottom_top_blob, const Option& opt) const { using namespace UnaryOp_x86_avx_functor; if (op_type == Operation_ABS) return unary_op_inplace<unary_op_abs>(bottom_top_blob, opt); if (op_type == Operation_NEG) return unary_op_inplace<unary_op_neg>(bottom_top_blob, opt); if (op_type == Operation_FLOOR) return unary_op_inplace<unary_op_floor>(bottom_top_blob, opt); if (op_type == Operation_CEIL) return unary_op_inplace<unary_op_ceil>(bottom_top_blob, opt); if (op_type == Operation_SQUARE) return unary_op_inplace<unary_op_square>(bottom_top_blob, opt); if (op_type == Operation_SQRT) return unary_op_inplace<unary_op_sqrt>(bottom_top_blob, opt); if (op_type == Operation_RSQRT) return unary_op_inplace<unary_op_rsqrt>(bottom_top_blob, opt); if (op_type == Operation_EXP) return unary_op_inplace<unary_op_exp>(bottom_top_blob, opt); if (op_type == Operation_LOG) return unary_op_inplace<unary_op_log>(bottom_top_blob, opt); if (op_type == Operation_SIN) return unary_op_inplace<unary_op_sin>(bottom_top_blob, opt); if (op_type == Operation_COS) return unary_op_inplace<unary_op_cos>(bottom_top_blob, opt); if (op_type == Operation_TAN) return unary_op_inplace<unary_op_tan>(bottom_top_blob, opt); if (op_type == Operation_ASIN) return unary_op_inplace<unary_op_asin>(bottom_top_blob, opt); if (op_type == Operation_ACOS) return unary_op_inplace<unary_op_acos>(bottom_top_blob, opt); if (op_type == Operation_ATAN) return unary_op_inplace<unary_op_atan>(bottom_top_blob, opt); if (op_type == Operation_RECIPROCAL) return unary_op_inplace<unary_op_reciprocal>(bottom_top_blob, opt); if (op_type == Operation_TANH) return unary_op_inplace<unary_op_tanh>(bottom_top_blob, opt); return 0; }
movq %rdi, -0x8(%rsp) movq %rsi, -0x10(%rsp) movq %rdx, -0x18(%rsp) movq -0x8(%rsp), %rdi movq (%rdi), %rax movq -0x58(%rax), %rax addq %rax, %rdi movq -0x10(%rsp), %rsi movq -0x18(%rsp), %rdx jmp 0x1662b50 nopl (%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_avx.cpp
ncnn::UnaryOp_x86_avx_functor::unary_op_floor::func_pack4(float vector[4] const&) const
__m128 func_pack4(const __m128& x) const { #if __SSE4_1__ return _mm_floor_ps(x); #endif // __SSE4_1__ // Use negative zero as the sign bit mask. const __m128 magic_negative_zero = _mm_set_ps1(-0.0f); // The smallest float number that have no fractional part. (2^23) const __m128 magic_smallest_no_fraction = _mm_set_ps1(8388608.0f); // absolute = abs(x); __m128 absolute = _mm_andnot_ps(magic_negative_zero, x); // negative_mask = magic_negative_zero && x; __m128 negative_mask = _mm_and_ps(magic_negative_zero, x); // no_fraction = (magic_smallest_no_fraction < absolute); __m128 no_fraction = _mm_cmplt_ps(magic_smallest_no_fraction, absolute); // truncated = static_cast<float>(static_cast<uint32_t>(absolute)); __m128 truncated = _mm_cvtepi32_ps(_mm_cvttps_epi32(absolute)); // truncated_with_sign = (truncated || negative_mask); __m128 truncated_with_sign = _mm_or_ps(truncated, negative_mask); // negative_fix = ((x < truncated_with_sign) ? 1.0f : 0.0f); __m128 negative_fix = _mm_and_ps( _mm_cmplt_ps(x, truncated_with_sign), _mm_set_ps1(1.0f)); // fixed_result = truncated_with_sign - negative_fix; __m128 fixed_result = _mm_sub_ps(truncated_with_sign, negative_fix); // return ((x && no_fraction) || (!no_fraction && fixed_result)); return _mm_or_ps( _mm_and_ps(x, no_fraction), _mm_andnot_ps(no_fraction, fixed_result)); }
subq $0x28, %rsp movq %rdi, 0x20(%rsp) movq %rsi, 0x18(%rsp) movq 0x18(%rsp), %rax vmovaps (%rax), %xmm0 vroundps $0x1, %xmm0, %xmm0 addq $0x28, %rsp retq nopw %cs:(%rax,%rax) nopl (%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_avx.cpp
ncnn::UnaryOp_x86_avx_functor::unary_op_exp::func_pack8(float vector[8] const&) const
__m256 func_pack8(const __m256& x) const { return exp256_ps(x); }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0xd80, %rsp # imm = 0xD80 movq %rdi, 0x18(%rsp) movq %rsi, 0x10(%rsp) movq 0x10(%rsp), %rax vmovaps (%rax), %ymm0 vmovaps %ymm0, 0x100(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %ymm0 vmovaps %ymm0, 0xe0(%rsp) vmovaps 0x79955e(%rip), %ymm0 # 0x1e03b80 vmovaps %ymm0, 0x80(%rsp) vmovaps 0x100(%rsp), %ymm0 vmovaps %ymm0, 0x260(%rsp) vmovaps 0x79955b(%rip), %ymm0 # 0x1e03ba0 vmovaps %ymm0, 0x240(%rsp) vmovaps 0x260(%rsp), %ymm0 vmovaps 0x240(%rsp), %ymm1 vminps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x100(%rsp) vmovaps 0x100(%rsp), %ymm0 vmovaps %ymm0, 0x140(%rsp) vmovaps 0x799539(%rip), %ymm0 # 0x1e03bc0 vmovaps %ymm0, 0x120(%rsp) vmovaps 0x140(%rsp), %ymm0 vmovaps 0x120(%rsp), %ymm1 vmaxps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x100(%rsp) vmovaps 0x100(%rsp), %ymm0 vmovaps %ymm0, 0x680(%rsp) vmovaps 0x799517(%rip), %ymm0 # 0x1e03be0 vmovaps %ymm0, 0x660(%rsp) vmovaps 0x799526(%rip), %ymm0 # 0x1e03c00 vmovaps %ymm0, 0x640(%rsp) vmovaps 0x680(%rsp), %ymm2 vmovaps 0x660(%rsp), %ymm1 vmovaps %ymm2, 0x6c0(%rsp) vmovaps %ymm1, 0x6a0(%rsp) vmovaps 0x6c0(%rsp), %ymm1 vmovaps 0x6a0(%rsp), %ymm2 vmulps %ymm2, %ymm1, %ymm2 vmovaps 0x640(%rsp), %ymm1 vmovaps %ymm2, 0x900(%rsp) vmovaps %ymm1, 0x8e0(%rsp) vmovaps 0x900(%rsp), %ymm1 vmovaps 0x8e0(%rsp), %ymm2 vaddps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0xc0(%rsp) vmovaps 0xc0(%rsp), %ymm1 vroundps $0x1, %ymm1, %ymm1 vmovaps %ymm1, 0xe0(%rsp) vmovaps 0xe0(%rsp), %ymm2 vmovaps 0xc0(%rsp), %ymm1 vcmpltps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x60(%rsp) vmovaps 0x60(%rsp), %ymm2 vmovaps 0x80(%rsp), %ymm1 vmovaps %ymm2, 0x700(%rsp) vmovaps %ymm1, 0x6e0(%rsp) vmovaps 0x700(%rsp), %ymm1 vmovaps 0x6e0(%rsp), %ymm2 vandps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x60(%rsp) vmovaps 0xe0(%rsp), %ymm2 vmovaps 0x60(%rsp), %ymm1 vmovaps %ymm2, 0x180(%rsp) vmovaps %ymm1, 0x160(%rsp) vmovaps 0x180(%rsp), %ymm1 vmovaps 0x160(%rsp), %ymm2 vsubps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0xc0(%rsp) vmovaps 0xc0(%rsp), %ymm2 vmovaps 0x100(%rsp), %ymm1 vmovaps %ymm2, 0x840(%rsp) vmovaps 0x7993f4(%rip), %ymm2 # 0x1e03c20 vmovaps %ymm2, 0x820(%rsp) vmovaps %ymm1, 0x800(%rsp) vmovaps 0x800(%rsp), %ymm2 vmovaps 0x840(%rsp), %ymm3 vmovaps 0x820(%rsp), %ymm1 vmovaps %ymm3, 0x8c0(%rsp) vmovaps %ymm1, 0x8a0(%rsp) vmovaps 0x8c0(%rsp), %ymm1 vmovaps 0x8a0(%rsp), %ymm3 vmulps %ymm3, %ymm1, %ymm1 vmovaps %ymm2, 0x880(%rsp) vmovaps %ymm1, 0x860(%rsp) vmovaps 0x880(%rsp), %ymm1 vmovaps 0x860(%rsp), %ymm2 vsubps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x100(%rsp) vmovaps 0xc0(%rsp), %ymm2 vmovaps 0x100(%rsp), %ymm1 vmovaps %ymm2, 0x760(%rsp) vmovaps 0x79936b(%rip), %ymm2 # 0x1e03c40 vmovaps %ymm2, 0x740(%rsp) vmovaps %ymm1, 0x720(%rsp) vmovaps 0x720(%rsp), %ymm2 vmovaps 0x760(%rsp), %ymm3 vmovaps 0x740(%rsp), %ymm1 vmovaps %ymm3, 0x7e0(%rsp) vmovaps %ymm1, 0x7c0(%rsp) vmovaps 0x7e0(%rsp), %ymm1 vmovaps 0x7c0(%rsp), %ymm3 vmulps %ymm3, %ymm1, %ymm1 vmovaps %ymm2, 0x7a0(%rsp) vmovaps %ymm1, 0x780(%rsp) vmovaps 0x7a0(%rsp), %ymm1 vmovaps 0x780(%rsp), %ymm2 vsubps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x100(%rsp) vmovaps 0x100(%rsp), %ymm1 vmovaps %ymm1, 0x220(%rsp) vmovaps %ymm1, 0x200(%rsp) vmovaps 0x220(%rsp), %ymm1 vmovaps 0x200(%rsp), %ymm2 vmulps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0xe0(%rsp) vmovaps 0x7992c3(%rip), %ymm1 # 0x1e03c60 vmovaps %ymm1, 0x40(%rsp) vmovaps 0x40(%rsp), %ymm2 vmovaps 0x100(%rsp), %ymm1 vmovaps %ymm2, 0x5e0(%rsp) vmovaps %ymm1, 0x5c0(%rsp) vmovaps 0x7992b4(%rip), %ymm1 # 0x1e03c80 vmovaps %ymm1, 0x5a0(%rsp) vmovaps 0x5e0(%rsp), %ymm2 vmovaps 0x5c0(%rsp), %ymm1 vmovaps %ymm2, 0x620(%rsp) vmovaps %ymm1, 0x600(%rsp) vmovaps 0x620(%rsp), %ymm1 vmovaps 0x600(%rsp), %ymm2 vmulps %ymm2, %ymm1, %ymm2 vmovaps 0x5a0(%rsp), %ymm1 vmovaps %ymm2, 0x940(%rsp) vmovaps %ymm1, 0x920(%rsp) vmovaps 0x940(%rsp), %ymm1 vmovaps 0x920(%rsp), %ymm2 vaddps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x40(%rsp) vmovaps 0x40(%rsp), %ymm2 vmovaps 0x100(%rsp), %ymm1 vmovaps %ymm2, 0x540(%rsp) vmovaps %ymm1, 0x520(%rsp) vmovaps 0x799231(%rip), %ymm1 # 0x1e03ca0 vmovaps %ymm1, 0x500(%rsp) vmovaps 0x540(%rsp), %ymm2 vmovaps 0x520(%rsp), %ymm1 vmovaps %ymm2, 0x580(%rsp) vmovaps %ymm1, 0x560(%rsp) vmovaps 0x580(%rsp), %ymm1 vmovaps 0x560(%rsp), %ymm2 vmulps %ymm2, %ymm1, %ymm2 vmovaps 0x500(%rsp), %ymm1 vmovaps %ymm2, 0x980(%rsp) vmovaps %ymm1, 0x960(%rsp) vmovaps 0x980(%rsp), %ymm1 vmovaps 0x960(%rsp), %ymm2 vaddps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x40(%rsp) vmovaps 0x40(%rsp), %ymm2 vmovaps 0x100(%rsp), %ymm1 vmovaps %ymm2, 0x4a0(%rsp) vmovaps %ymm1, 0x480(%rsp) vmovaps 0x7991ae(%rip), %ymm1 # 0x1e03cc0 vmovaps %ymm1, 0x460(%rsp) vmovaps 0x4a0(%rsp), %ymm2 vmovaps 0x480(%rsp), %ymm1 vmovaps %ymm2, 0x4e0(%rsp) vmovaps %ymm1, 0x4c0(%rsp) vmovaps 0x4e0(%rsp), %ymm1 vmovaps 0x4c0(%rsp), %ymm2 vmulps %ymm2, %ymm1, %ymm2 vmovaps 0x460(%rsp), %ymm1 vmovaps %ymm2, 0x9c0(%rsp) vmovaps %ymm1, 0x9a0(%rsp) vmovaps 0x9c0(%rsp), %ymm1 vmovaps 0x9a0(%rsp), %ymm2 vaddps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x40(%rsp) vmovaps 0x40(%rsp), %ymm2 vmovaps 0x100(%rsp), %ymm1 vmovaps %ymm2, 0x400(%rsp) vmovaps %ymm1, 0x3e0(%rsp) vmovaps 0x79912b(%rip), %ymm1 # 0x1e03ce0 vmovaps %ymm1, 0x3c0(%rsp) vmovaps 0x400(%rsp), %ymm2 vmovaps 0x3e0(%rsp), %ymm1 vmovaps %ymm2, 0x440(%rsp) vmovaps %ymm1, 0x420(%rsp) vmovaps 0x440(%rsp), %ymm1 vmovaps 0x420(%rsp), %ymm2 vmulps %ymm2, %ymm1, %ymm2 vmovaps 0x3c0(%rsp), %ymm1 vmovaps %ymm2, 0xa00(%rsp) vmovaps %ymm1, 0x9e0(%rsp) vmovaps 0xa00(%rsp), %ymm1 vmovaps 0x9e0(%rsp), %ymm2 vaddps %ymm2, %ymm1, %ymm1 vmovaps %ymm1, 0x40(%rsp) vmovaps 0x40(%rsp), %ymm2 vmovaps 0x100(%rsp), %ymm1 vmovaps %ymm2, 0x360(%rsp) vmovaps %ymm1, 0x340(%rsp) vmovaps %ymm0, 0x320(%rsp) vmovaps 0x360(%rsp), %ymm1 vmovaps 0x340(%rsp), %ymm0 vmovaps %ymm1, 0x3a0(%rsp) vmovaps %ymm0, 0x380(%rsp) vmovaps 0x3a0(%rsp), %ymm0 vmovaps 0x380(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm1 vmovaps 0x320(%rsp), %ymm0 vmovaps %ymm1, 0xa40(%rsp) vmovaps %ymm0, 0xa20(%rsp) vmovaps 0xa40(%rsp), %ymm0 vmovaps 0xa20(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x40(%rsp) vmovaps 0x40(%rsp), %ymm2 vmovaps 0xe0(%rsp), %ymm1 vmovaps 0x100(%rsp), %ymm0 vmovaps %ymm2, 0x2c0(%rsp) vmovaps %ymm1, 0x2a0(%rsp) vmovaps %ymm0, 0x280(%rsp) vmovaps 0x2c0(%rsp), %ymm1 vmovaps 0x2a0(%rsp), %ymm0 vmovaps %ymm1, 0x300(%rsp) vmovaps %ymm0, 0x2e0(%rsp) vmovaps 0x300(%rsp), %ymm0 vmovaps 0x2e0(%rsp), %ymm1 vmulps %ymm1, %ymm0, %ymm1 vmovaps 0x280(%rsp), %ymm0 vmovaps %ymm1, 0xa80(%rsp) vmovaps %ymm0, 0xa60(%rsp) vmovaps 0xa80(%rsp), %ymm0 vmovaps 0xa60(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x40(%rsp) vmovaps 0x40(%rsp), %ymm1 vmovaps 0x80(%rsp), %ymm0 vmovaps %ymm1, 0xac0(%rsp) vmovaps %ymm0, 0xaa0(%rsp) vmovaps 0xac0(%rsp), %ymm0 vmovaps 0xaa0(%rsp), %ymm1 vaddps %ymm1, %ymm0, %ymm0 vmovaps %ymm0, 0x40(%rsp) vmovaps 0xc0(%rsp), %ymm0 vmovaps %ymm0, 0xae0(%rsp) vcvttps2dq 0xae0(%rsp), %ymm0 vmovaps %ymm0, 0xa0(%rsp) vmovaps 0xa0(%rsp), %ymm0 vmovaps %ymm0, 0xbe0(%rsp) vmovaps 0x798f17(%rip), %ymm0 # 0x1e03d00 vmovaps %ymm0, 0xbc0(%rsp) vmovdqa 0xbe0(%rsp), %ymm0 vmovdqa %ymm0, 0xb40(%rsp) vmovdqa 0xb40(%rsp), %xmm0 vmovdqa %xmm0, 0xbb0(%rsp) vmovdqa 0xb50(%rsp), %xmm0 vmovdqa %xmm0, 0xba0(%rsp) vmovdqa 0xbc0(%rsp), %ymm0 vmovdqa %ymm0, 0xb20(%rsp) vmovdqa 0xb20(%rsp), %xmm0 vmovdqa %xmm0, 0xb90(%rsp) vmovdqa 0xb30(%rsp), %xmm0 vmovdqa %xmm0, 0xb80(%rsp) vmovdqa 0xbb0(%rsp), %xmm1 vmovdqa 0xb90(%rsp), %xmm0 vmovdqa %xmm1, 0xd20(%rsp) vmovdqa %xmm0, 0xd10(%rsp) vmovdqa 0xd20(%rsp), %xmm0 vmovdqa 0xd10(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0xbb0(%rsp) vmovdqa 0xba0(%rsp), %xmm1 vmovdqa 0xb80(%rsp), %xmm0 vmovdqa %xmm1, 0xd00(%rsp) vmovdqa %xmm0, 0xcf0(%rsp) vmovdqa 0xd00(%rsp), %xmm0 vmovdqa 0xcf0(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0xba0(%rsp) vmovdqa 0xbb0(%rsp), %xmm0 vmovdqa %xmm0, 0xb00(%rsp) vmovdqa 0xba0(%rsp), %xmm0 vmovdqa %xmm0, 0xb10(%rsp) vmovdqa 0xb00(%rsp), %ymm0 vmovdqa %ymm0, 0xb60(%rsp) vmovdqa 0xb60(%rsp), %ymm0 vmovdqa %ymm0, 0xa0(%rsp) vmovdqa 0xa0(%rsp), %ymm0 vmovdqa %ymm0, 0xca0(%rsp) movl $0x17, 0xc9c(%rsp) vmovdqa 0xca0(%rsp), %ymm0 vmovdqa %ymm0, 0xc20(%rsp) vmovdqa 0xc20(%rsp), %xmm0 vmovdqa %xmm0, 0xc80(%rsp) vmovdqa 0xc30(%rsp), %xmm0 vmovdqa %xmm0, 0xc70(%rsp) vmovdqa 0xc80(%rsp), %xmm0 movl 0xc9c(%rsp), %eax vmovdqa %xmm0, 0xd60(%rsp) movl %eax, 0xd5c(%rsp) vmovdqa 0xd60(%rsp), %xmm0 movl 0xd5c(%rsp), %eax vmovd %eax, %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0xc80(%rsp) vmovdqa 0xc70(%rsp), %xmm0 movl 0xc9c(%rsp), %eax vmovdqa %xmm0, 0xd40(%rsp) movl %eax, 0xd3c(%rsp) vmovdqa 0xd40(%rsp), %xmm0 movl 0xd3c(%rsp), %eax vmovd %eax, %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0xc70(%rsp) vmovdqa 0xc80(%rsp), %xmm0 vmovdqa %xmm0, 0xc00(%rsp) vmovdqa 0xc70(%rsp), %xmm0 vmovdqa %xmm0, 0xc10(%rsp) vmovdqa 0xc00(%rsp), %ymm0 vmovdqa %ymm0, 0xc40(%rsp) vmovdqa 0xc40(%rsp), %ymm0 vmovdqa %ymm0, 0xa0(%rsp) vmovdqa 0xa0(%rsp), %ymm0 vmovdqa %ymm0, 0xcc0(%rsp) vmovdqa 0xcc0(%rsp), %ymm0 vmovaps %ymm0, 0x20(%rsp) vmovaps 0x40(%rsp), %ymm1 vmovaps 0x20(%rsp), %ymm0 vmovaps %ymm1, 0x1e0(%rsp) vmovaps %ymm0, 0x1c0(%rsp) vmovaps 0x1e0(%rsp), %ymm0 vmulps 0x1c0(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x40(%rsp) vmovaps 0x40(%rsp), %ymm0 movq %rbp, %rsp popq %rbp retq nopl (%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_avx.cpp
ncnn::UnaryOp_x86_avx_functor::unary_op_sin::func_pack4(float vector[4] const&) const
__m128 func_pack4(const __m128& x) const { return sin_ps(x); }
subq $0x708, %rsp # imm = 0x708 movq %rdi, -0x78(%rsp) movq %rsi, -0x80(%rsp) movq -0x80(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x40(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovaps %xmm0, 0x70(%rsp) vmovaps 0x70(%rsp), %xmm0 vmovaps %xmm0, 0x20(%rsp) vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm0, (%rsp) vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm0, 0x210(%rsp) vmovaps 0x7a10b8(%rip), %xmm0 # 0x1e0f1a0 vmovaps %xmm0, 0x200(%rsp) vmovdqa 0x210(%rsp), %xmm0 vmovdqa 0x200(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, 0x40(%rsp) vmovaps (%rsp), %xmm0 vmovaps %xmm0, 0x1f0(%rsp) vmovaps 0x798abd(%rip), %xmm0 # 0x1e06be0 vmovaps %xmm0, 0x1e0(%rsp) vmovdqa 0x1f0(%rsp), %xmm0 vmovdqa 0x1e0(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, (%rsp) vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm0, 0x110(%rsp) vmovaps 0x7a5912(%rip), %xmm0 # 0x1e13a70 vmovaps %xmm0, 0x100(%rsp) vmovaps 0x110(%rsp), %xmm0 vmovaps 0x100(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm0 vmovaps %xmm0, 0x1a0(%rsp) vcvttps2dq 0x1a0(%rsp), %xmm0 vmovdqa %xmm0, -0x30(%rsp) vmovdqa -0x30(%rsp), %xmm0 vmovdqa %xmm0, 0x130(%rsp) vmovdqa 0x7a58c8(%rip), %xmm0 # 0x1e13a80 vmovdqa %xmm0, 0x120(%rsp) vmovdqa 0x130(%rsp), %xmm0 vmovdqa 0x120(%rsp), %xmm1 vpaddd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x30(%rsp) vmovdqa -0x30(%rsp), %xmm0 vmovdqa %xmm0, 0x680(%rsp) vmovdqa 0x7a589c(%rip), %xmm0 # 0x1e13a90 vmovdqa %xmm0, 0x670(%rsp) vmovdqa 0x680(%rsp), %xmm0 vmovdqa 0x670(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x30(%rsp) vmovdqa -0x30(%rsp), %xmm0 vmovdqa %xmm0, 0x1b0(%rsp) vcvtdq2ps 0x1b0(%rsp), %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovdqa -0x30(%rsp), %xmm0 vmovdqa %xmm0, 0x660(%rsp) vmovdqa 0x7a5852(%rip), %xmm0 # 0x1e13aa0 vmovdqa %xmm0, 0x650(%rsp) vmovdqa 0x660(%rsp), %xmm0 vmovdqa 0x650(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x20(%rsp) vmovdqa -0x20(%rsp), %xmm0 vmovdqa %xmm0, 0x150(%rsp) movl $0x1d, 0x14c(%rsp) vmovdqa 0x150(%rsp), %xmm0 vmovd 0x14c(%rsp), %xmm1 vpslld %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x20(%rsp) vmovdqa -0x30(%rsp), %xmm0 vmovdqa %xmm0, 0x640(%rsp) vmovdqa 0x7a57f0(%rip), %xmm0 # 0x1e13ab0 vmovdqa %xmm0, 0x630(%rsp) vmovdqa 0x640(%rsp), %xmm0 vmovdqa 0x630(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x30(%rsp) vmovdqa -0x30(%rsp), %xmm1 vxorps %xmm0, %xmm0, %xmm0 vmovdqa %xmm0, 0x6b0(%rsp) vmovdqa 0x6b0(%rsp), %xmm0 vmovdqa %xmm1, 0x6a0(%rsp) vmovdqa %xmm0, 0x690(%rsp) vmovdqa 0x6a0(%rsp), %xmm0 vmovdqa 0x690(%rsp), %xmm1 vpcmpeqd %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x30(%rsp) vmovdqa -0x20(%rsp), %xmm0 vmovdqa %xmm0, 0x620(%rsp) vmovdqa 0x620(%rsp), %xmm0 vmovdqa %xmm0, -0x40(%rsp) vmovdqa -0x30(%rsp), %xmm0 vmovdqa %xmm0, 0x610(%rsp) vmovdqa 0x610(%rsp), %xmm0 vmovdqa %xmm0, -0x50(%rsp) vmovaps (%rsp), %xmm1 vmovaps -0x40(%rsp), %xmm0 vmovaps %xmm1, 0x6f0(%rsp) vmovaps %xmm0, 0x6e0(%rsp) vmovdqa 0x6f0(%rsp), %xmm0 vmovdqa 0x6e0(%rsp), %xmm1 vpxor %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, (%rsp) vmovaps 0x7a5715(%rip), %xmm0 # 0x1e13ac0 vmovaps %xmm0, 0x30(%rsp) vmovaps 0x7a5717(%rip), %xmm0 # 0x1e13ad0 vmovaps %xmm0, 0x20(%rsp) vmovaps 0x7a5719(%rip), %xmm0 # 0x1e13ae0 vmovaps %xmm0, 0x10(%rsp) vmovaps -0x10(%rsp), %xmm2 vmovaps 0x30(%rsp), %xmm1 vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm2, 0x5c0(%rsp) vmovaps %xmm1, 0x5b0(%rsp) vmovaps %xmm0, 0x5a0(%rsp) vmovaps 0x5c0(%rsp), %xmm1 vmovaps 0x5b0(%rsp), %xmm0 vmovaps %xmm1, 0x5e0(%rsp) vmovaps %xmm0, 0x5d0(%rsp) vmovaps 0x5e0(%rsp), %xmm0 vmovaps 0x5d0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm1 vmovaps 0x5a0(%rsp), %xmm0 vmovaps %xmm1, 0x600(%rsp) vmovaps %xmm0, 0x5f0(%rsp) vmovaps 0x600(%rsp), %xmm0 vmovaps 0x5f0(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x40(%rsp) vmovaps -0x10(%rsp), %xmm2 vmovaps 0x20(%rsp), %xmm1 vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm2, 0x550(%rsp) vmovaps %xmm1, 0x540(%rsp) vmovaps %xmm0, 0x530(%rsp) vmovaps 0x550(%rsp), %xmm1 vmovaps 0x540(%rsp), %xmm0 vmovaps %xmm1, 0x570(%rsp) vmovaps %xmm0, 0x560(%rsp) vmovaps 0x570(%rsp), %xmm0 vmovaps 0x560(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm1 vmovaps 0x530(%rsp), %xmm0 vmovaps %xmm1, 0x590(%rsp) vmovaps %xmm0, 0x580(%rsp) vmovaps 0x590(%rsp), %xmm0 vmovaps 0x580(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x40(%rsp) vmovaps -0x10(%rsp), %xmm2 vmovaps 0x10(%rsp), %xmm1 vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm2, 0x4e0(%rsp) vmovaps %xmm1, 0x4d0(%rsp) vmovaps %xmm0, 0x4c0(%rsp) vmovaps 0x4e0(%rsp), %xmm1 vmovaps 0x4d0(%rsp), %xmm0 vmovaps %xmm1, 0x500(%rsp) vmovaps %xmm0, 0x4f0(%rsp) vmovaps 0x500(%rsp), %xmm0 vmovaps 0x4f0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm1 vmovaps 0x4c0(%rsp), %xmm0 vmovaps %xmm1, 0x520(%rsp) vmovaps %xmm0, 0x510(%rsp) vmovaps 0x520(%rsp), %xmm0 vmovaps 0x510(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, 0x40(%rsp) vmovaps 0x7a5541(%rip), %xmm0 # 0x1e13af0 vmovaps %xmm0, -0x10(%rsp) vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm0, 0xf0(%rsp) vmovaps %xmm0, 0xe0(%rsp) vmovaps 0xf0(%rsp), %xmm0 vmovaps 0xe0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x60(%rsp) vmovaps -0x10(%rsp), %xmm1 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm1, 0x470(%rsp) vmovaps %xmm0, 0x460(%rsp) vmovaps 0x7a54f1(%rip), %xmm0 # 0x1e13b00 vmovaps %xmm0, 0x450(%rsp) vmovaps 0x470(%rsp), %xmm1 vmovaps 0x460(%rsp), %xmm0 vmovaps %xmm1, 0x490(%rsp) vmovaps %xmm0, 0x480(%rsp) vmovaps 0x490(%rsp), %xmm0 vmovaps 0x480(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm1 vmovaps 0x450(%rsp), %xmm0 vmovaps %xmm1, 0x4b0(%rsp) vmovaps %xmm0, 0x4a0(%rsp) vmovaps 0x4b0(%rsp), %xmm0 vmovaps 0x4a0(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm1 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm1, 0x400(%rsp) vmovaps %xmm0, 0x3f0(%rsp) vmovaps 0x7a5461(%rip), %xmm0 # 0x1e13b10 vmovaps %xmm0, 0x3e0(%rsp) vmovaps 0x400(%rsp), %xmm1 vmovaps 0x3f0(%rsp), %xmm0 vmovaps %xmm1, 0x420(%rsp) vmovaps %xmm0, 0x410(%rsp) vmovaps 0x420(%rsp), %xmm0 vmovaps 0x410(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm1 vmovaps 0x3e0(%rsp), %xmm0 vmovaps %xmm1, 0x440(%rsp) vmovaps %xmm0, 0x430(%rsp) vmovaps 0x440(%rsp), %xmm0 vmovaps 0x430(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm1 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm1, 0xd0(%rsp) vmovaps %xmm0, 0xc0(%rsp) vmovaps 0xd0(%rsp), %xmm0 vmovaps 0xc0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm1 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm1, 0xb0(%rsp) vmovaps %xmm0, 0xa0(%rsp) vmovaps 0xb0(%rsp), %xmm0 vmovaps 0xa0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x60(%rsp), %xmm1 vmovaps -0x10(%rsp), %xmm0 vmovaps %xmm1, 0x240(%rsp) vmovaps 0x794336(%rip), %xmm1 # 0x1e02af0 vmovaps %xmm1, 0x230(%rsp) vmovaps %xmm0, 0x220(%rsp) vmovaps 0x220(%rsp), %xmm1 vmovaps 0x240(%rsp), %xmm2 vmovaps 0x230(%rsp), %xmm0 vmovaps %xmm2, 0x280(%rsp) vmovaps %xmm0, 0x270(%rsp) vmovaps 0x280(%rsp), %xmm0 vmovaps 0x270(%rsp), %xmm2 vmulps %xmm2, %xmm0, %xmm0 vmovaps %xmm1, 0x260(%rsp) vmovaps %xmm0, 0x250(%rsp) vmovaps 0x260(%rsp), %xmm0 vmovaps 0x250(%rsp), %xmm1 vsubps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm0 vmovaps %xmm0, 0x190(%rsp) vmovaps 0x79425c(%rip), %xmm0 # 0x1e02ab0 vmovaps %xmm0, 0x180(%rsp) vmovaps 0x190(%rsp), %xmm0 vmovaps 0x180(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps 0x7a529f(%rip), %xmm0 # 0x1e13b20 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm1 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm1, 0x390(%rsp) vmovaps %xmm0, 0x380(%rsp) vmovaps 0x7a5283(%rip), %xmm0 # 0x1e13b30 vmovaps %xmm0, 0x370(%rsp) vmovaps 0x390(%rsp), %xmm1 vmovaps 0x380(%rsp), %xmm0 vmovaps %xmm1, 0x3b0(%rsp) vmovaps %xmm0, 0x3a0(%rsp) vmovaps 0x3b0(%rsp), %xmm0 vmovaps 0x3a0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm1 vmovaps 0x370(%rsp), %xmm0 vmovaps %xmm1, 0x3d0(%rsp) vmovaps %xmm0, 0x3c0(%rsp) vmovaps 0x3d0(%rsp), %xmm0 vmovaps 0x3c0(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm1 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm1, 0x320(%rsp) vmovaps %xmm0, 0x310(%rsp) vmovaps 0x7a51f3(%rip), %xmm0 # 0x1e13b40 vmovaps %xmm0, 0x300(%rsp) vmovaps 0x320(%rsp), %xmm1 vmovaps 0x310(%rsp), %xmm0 vmovaps %xmm1, 0x340(%rsp) vmovaps %xmm0, 0x330(%rsp) vmovaps 0x340(%rsp), %xmm0 vmovaps 0x330(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm1 vmovaps 0x300(%rsp), %xmm0 vmovaps %xmm1, 0x360(%rsp) vmovaps %xmm0, 0x350(%rsp) vmovaps 0x360(%rsp), %xmm0 vmovaps 0x350(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm1 vmovaps -0x60(%rsp), %xmm0 vmovaps %xmm1, 0x90(%rsp) vmovaps %xmm0, 0x80(%rsp) vmovaps 0x90(%rsp), %xmm0 vmovaps 0x80(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x70(%rsp), %xmm1 vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm1, 0x2b0(%rsp) vmovaps %xmm0, 0x2a0(%rsp) vmovaps %xmm0, 0x290(%rsp) vmovaps 0x2b0(%rsp), %xmm1 vmovaps 0x2a0(%rsp), %xmm0 vmovaps %xmm1, 0x2d0(%rsp) vmovaps %xmm0, 0x2c0(%rsp) vmovaps 0x2d0(%rsp), %xmm0 vmovaps 0x2c0(%rsp), %xmm1 vmulps %xmm1, %xmm0, %xmm1 vmovaps 0x290(%rsp), %xmm0 vmovaps %xmm1, 0x2f0(%rsp) vmovaps %xmm0, 0x2e0(%rsp) vmovaps 0x2f0(%rsp), %xmm0 vmovaps 0x2e0(%rsp), %xmm1 vaddps %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x70(%rsp) vmovaps -0x50(%rsp), %xmm0 vmovaps %xmm0, 0x10(%rsp) vmovaps 0x10(%rsp), %xmm1 vmovaps -0x70(%rsp), %xmm0 vmovaps %xmm1, 0x1d0(%rsp) vmovaps %xmm0, 0x1c0(%rsp) vmovdqa 0x1d0(%rsp), %xmm0 vmovdqa 0x1c0(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovdqa %xmm0, -0x70(%rsp) vmovaps 0x10(%rsp), %xmm1 vmovaps -0x10(%rsp), %xmm0 vmovaps %xmm1, 0x60(%rsp) vmovaps %xmm0, 0x50(%rsp) vmovdqa 0x60(%rsp), %xmm0 vpcmpeqd %xmm1, %xmm1, %xmm1 vpxor %xmm1, %xmm0, %xmm0 vmovaps 0x50(%rsp), %xmm1 vpand %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm1 vmovaps -0x70(%rsp), %xmm0 vmovaps %xmm1, 0x170(%rsp) vmovaps %xmm0, 0x160(%rsp) vmovaps 0x170(%rsp), %xmm0 vaddps 0x160(%rsp), %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm1 vmovaps (%rsp), %xmm0 vmovaps %xmm1, 0x6d0(%rsp) vmovaps %xmm0, 0x6c0(%rsp) vmovaps 0x6d0(%rsp), %xmm0 vmovaps 0x6c0(%rsp), %xmm1 vpxor %xmm1, %xmm0, %xmm0 vmovaps %xmm0, -0x10(%rsp) vmovaps -0x10(%rsp), %xmm0 addq $0x708, %rsp # imm = 0x708 retq nopw %cs:(%rax,%rax) nopl (%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_avx.cpp
ncnn::UnaryOp_x86_avx_functor::unary_op_asin::func_pack4(float vector[4] const&) const
__m128 func_pack4(const __m128& x) const { //TODO sse optimize float tmp[4]; _mm_storeu_ps(tmp, x); tmp[0] = asin(tmp[0]); tmp[1] = asin(tmp[1]); tmp[2] = asin(tmp[2]); tmp[3] = asin(tmp[3]); return _mm_loadu_ps(tmp); }
subq $0x48, %rsp movq %rdi, 0x18(%rsp) movq %rsi, 0x10(%rsp) movq %rsp, %rax movq 0x10(%rsp), %rcx vmovaps (%rcx), %xmm0 movq %rax, 0x38(%rsp) vmovaps %xmm0, 0x20(%rsp) vmovaps 0x20(%rsp), %xmm0 movq 0x38(%rsp), %rax vmovups %xmm0, (%rax) vmovss (%rsp), %xmm0 callq 0x1631460 vmovss %xmm0, (%rsp) vmovss 0x4(%rsp), %xmm0 callq 0x1631460 vmovss %xmm0, 0x4(%rsp) vmovss 0x8(%rsp), %xmm0 callq 0x1631460 vmovss %xmm0, 0x8(%rsp) vmovss 0xc(%rsp), %xmm0 callq 0x1631460 vmovss %xmm0, 0xc(%rsp) movq %rsp, %rax movq %rax, 0x40(%rsp) movq 0x40(%rsp), %rax vmovups (%rax), %xmm0 addq $0x48, %rsp retq nopl (%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_avx.cpp
ncnn::UnaryOp_x86_avx_functor::unary_op_acos::func_pack4(float vector[4] const&) const
__m128 func_pack4(const __m128& x) const { //TODO sse optimize float tmp[4]; _mm_storeu_ps(tmp, x); tmp[0] = acos(tmp[0]); tmp[1] = acos(tmp[1]); tmp[2] = acos(tmp[2]); tmp[3] = acos(tmp[3]); return _mm_loadu_ps(tmp); }
subq $0x48, %rsp movq %rdi, 0x18(%rsp) movq %rsi, 0x10(%rsp) movq %rsp, %rax movq 0x10(%rsp), %rcx vmovaps (%rcx), %xmm0 movq %rax, 0x38(%rsp) vmovaps %xmm0, 0x20(%rsp) vmovaps 0x20(%rsp), %xmm0 movq 0x38(%rsp), %rax vmovups %xmm0, (%rax) vmovss (%rsp), %xmm0 callq 0x16314b0 vmovss %xmm0, (%rsp) vmovss 0x4(%rsp), %xmm0 callq 0x16314b0 vmovss %xmm0, 0x4(%rsp) vmovss 0x8(%rsp), %xmm0 callq 0x16314b0 vmovss %xmm0, 0x8(%rsp) vmovss 0xc(%rsp), %xmm0 callq 0x16314b0 vmovss %xmm0, 0xc(%rsp) movq %rsp, %rax movq %rax, 0x40(%rsp) movq 0x40(%rsp), %rax vmovups (%rax), %xmm0 addq $0x48, %rsp retq nopl (%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/unaryop_x86_avx.cpp
ncnn::ConvolutionDepthWise_x86::create_group_ops(ncnn::Option const&)
int ConvolutionDepthWise_x86::create_group_ops(const Option& opt) { // create Convolution op for each group const int maxk = kernel_w * kernel_h; int channels = (weight_data_size / group) / maxk / (num_output / group) * group; for (int i = 0; i < (int)group_ops.size(); i++) delete group_ops[i]; group_ops.clear(); const int channels_g = channels / group; const int num_output_g = num_output / group; group_ops.resize(group); for (int g = 0; g < group; g++) { Mat weight_data_g = weight_data.range(maxk * channels_g * num_output_g * g, maxk * channels_g * num_output_g).clone(); Mat bias_data_g; if (bias_term) bias_data_g = bias_data.range(num_output_g * g, num_output_g); ncnn::Layer* op = ncnn::create_layer(ncnn::LayerType::Convolution); // set param ncnn::ParamDict pd; pd.set(0, num_output_g); // num_output pd.set(1, kernel_w); pd.set(11, kernel_h); pd.set(2, dilation_w); pd.set(12, dilation_h); pd.set(3, stride_w); pd.set(13, stride_h); pd.set(4, 0); // pad_w pd.set(14, 0); // pad_h pd.set(5, bias_term); pd.set(6, maxk * channels_g * num_output_g); // weight_data_size pd.set(8, int8_scale_term); pd.set(9, activation_type); pd.set(10, activation_params); op->load_param(pd); // set weights if (bias_term) { ncnn::Mat weights[5]; weights[0] = weight_data_g; weights[1] = bias_data_g; #if NCNN_INT8 if (int8_scale_term) { Mat weight_data_int8_scales_g(num_output_g); weight_data_int8_scales_g.fill(weight_data_int8_scales[g]); weights[2] = weight_data_int8_scales_g; weights[3] = bottom_blob_int8_scales.range(g, 1); } if (int8_scale_term > 100) { weights[4] = top_blob_int8_scales.range(g, 1); } #endif op->load_model(ModelBinFromMatArray(weights)); } else { ncnn::Mat weights[4]; weights[0] = weight_data_g; #if NCNN_INT8 if (int8_scale_term) { Mat weight_data_int8_scales_g(num_output_g); weight_data_int8_scales_g.fill(weight_data_int8_scales[g]); weights[1] = weight_data_int8_scales_g; weights[2] = bottom_blob_int8_scales.range(g, 1); } if (int8_scale_term > 100) { weights[3] = top_blob_int8_scales.range(g, 1); } #endif op->load_model(ModelBinFromMatArray(weights)); } op->create_pipeline(opt); group_ops[g] = op; } return 0; }
subq $0x1058, %rsp # imm = 0x1058 movq %rdi, 0x868(%rsp) movq %rsi, 0x860(%rsp) movq 0x868(%rsp), %rcx movq %rcx, 0x288(%rsp) movq (%rcx), %rax movq -0x18(%rax), %rax movl 0xd4(%rcx,%rax), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx imull 0xd8(%rcx,%rdx), %eax movl %eax, 0x85c(%rsp) movq (%rcx), %rax movq -0x18(%rax), %rax movl 0x104(%rcx,%rax), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rsi cltd idivl 0x108(%rcx,%rsi) cltd idivl 0x85c(%rsp) movl %eax, 0x294(%rsp) movq (%rcx), %rax movq -0x18(%rax), %rax movl 0xd0(%rcx,%rax), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rsi cltd idivl 0x108(%rcx,%rsi) movl %eax, %esi movl 0x294(%rsp), %eax cltd idivl %esi movq (%rcx), %rdx movq -0x18(%rdx), %rdx imull 0x108(%rcx,%rdx), %eax movl %eax, 0x858(%rsp) movl $0x0, 0x854(%rsp) movq 0x288(%rsp), %rdi movl 0x854(%rsp), %eax movl %eax, 0x284(%rsp) addq $0x10, %rdi callq 0x989c0 movq %rax, %rcx movl 0x284(%rsp), %eax cmpl %ecx, %eax jge 0x168302b movq 0x288(%rsp), %rdi addq $0x10, %rdi movslq 0x854(%rsp), %rsi callq 0x989e0 movq (%rax), %rax movq %rax, 0x278(%rsp) cmpq $0x0, %rax je 0x1683016 movq 0x278(%rsp), %rdi movq (%rdi), %rax callq *0x8(%rax) jmp 0x1683018 movl 0x854(%rsp), %eax addl $0x1, %eax movl %eax, 0x854(%rsp) jmp 0x1682fb1 movq 0x288(%rsp), %rdi addq $0x10, %rdi callq 0x990f0 movq 0x288(%rsp), %rcx movl 0x858(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rsi cltd idivl 0x108(%rcx,%rsi) movl %eax, 0x850(%rsp) movq (%rcx), %rax movq -0x18(%rax), %rax movl 0xd0(%rcx,%rax), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rsi cltd idivl 0x108(%rcx,%rsi) movl %eax, %ecx movq 0x288(%rsp), %rax movl %ecx, 0x84c(%rsp) movq %rax, %rdi addq $0x10, %rdi movq (%rax), %rcx movq -0x18(%rcx), %rcx movslq 0x108(%rax,%rcx), %rsi callq 0x98d40 movl $0x0, 0x848(%rsp) movq 0x288(%rsp), %rcx movl 0x848(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx cmpl 0x108(%rcx,%rdx), %eax jge 0x168754b movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx leaq 0x168(%rax,%rcx), %rdx movl 0x85c(%rsp), %eax movl 0x850(%rsp), %ecx imull %ecx, %eax movl 0x84c(%rsp), %ecx imull %ecx, %eax movl 0x848(%rsp), %esi movl %eax, %ecx imull %esi, %ecx leaq 0x7b8(%rsp), %rsi movq %rsi, 0xd60(%rsp) movq %rdx, 0xd58(%rsp) movl %ecx, 0xd54(%rsp) movl %eax, 0xd50(%rsp) movq 0xd58(%rsp), %rax movl 0xd50(%rsp), %r8d movslq 0xd54(%rsp), %rcx movq (%rax), %rdi movq 0x10(%rax), %rdx imulq %rdx, %rcx addq %rcx, %rdi movl 0x18(%rax), %ecx movq 0x20(%rax), %rax movq %rsi, 0xf50(%rsp) movl %r8d, 0xf4c(%rsp) movq %rdi, 0xf40(%rsp) movq %rdx, 0xf38(%rsp) movl %ecx, 0xf34(%rsp) movq %rax, 0xf28(%rsp) movq 0xf50(%rsp), %rax movq %rax, 0x270(%rsp) movq 0xf40(%rsp), %rcx movq %rcx, (%rax) movq $0x0, 0x8(%rax) movq 0xf38(%rsp), %rcx movq %rcx, 0x10(%rax) movl 0xf34(%rsp), %ecx movl %ecx, 0x18(%rax) movq 0xf28(%rsp), %rcx movq %rcx, 0x20(%rax) movl $0x1, 0x28(%rax) movl 0xf4c(%rsp), %ecx movl %ecx, 0x2c(%rax) movl $0x1, 0x30(%rax) movl $0x1, 0x34(%rax) movl $0x1, 0x38(%rax) movslq 0x2c(%rax), %rcx movq %rcx, 0x40(%rax) xorl %eax, %eax movl %eax, %edx leaq 0x800(%rsp), %rdi callq 0x60c90 jmp 0x168321e leaq 0x7b8(%rsp), %rax movq %rax, 0x888(%rsp) movq 0x888(%rsp), %rax movq %rax, 0xcd0(%rsp) movq 0xcd0(%rsp), %rax movq %rax, 0x268(%rsp) cmpq $0x0, 0x8(%rax) je 0x16832e6 movq 0x268(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xccc(%rsp) # imm = 0xFFFFFFFF movl 0xccc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xcc8(%rsp) cmpl $0x1, 0xcc8(%rsp) jne 0x16832e6 movq 0x268(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16832b7 movq 0x268(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16832b5 jmp 0x16832e4 movq 0x268(%rsp), %rax movq (%rax), %rax movq %rax, 0xe08(%rsp) cmpq $0x0, 0xe08(%rsp) je 0x16832e2 movq 0xe08(%rsp), %rdi callq 0x5e480 jmp 0x16832e4 jmp 0x16832e6 movq 0x268(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1683341 movq %rax, %rdi callq 0x5fc90 leaq 0x760(%rsp), %rax movq %rax, 0x870(%rsp) movq 0x870(%rsp), %rax movq %rax, 0x260(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x0, 0x100(%rax,%rcx) je 0x1683af2 movq 0x288(%rsp), %rdx movq (%rdx), %rax addq -0x18(%rax), %rdx addq $0x1b0, %rdx # imm = 0x1B0 movl 0x84c(%rsp), %ecx imull 0x848(%rsp), %ecx movl 0x84c(%rsp), %eax leaq 0x718(%rsp), %rsi movq %rsi, 0xd48(%rsp) movq %rdx, 0xd40(%rsp) movl %ecx, 0xd3c(%rsp) movl %eax, 0xd38(%rsp) movq 0xd40(%rsp), %rax movl 0xd38(%rsp), %edi movq (%rax), %rsi movslq 0xd3c(%rsp), %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x718(%rsp), %r8 movq %r8, 0xf80(%rsp) movl %edi, 0xf7c(%rsp) movq %rsi, 0xf70(%rsp) movq %rdx, 0xf68(%rsp) movl %ecx, 0xf64(%rsp) movq %rax, 0xf58(%rsp) movq 0xf80(%rsp), %rax movq %rax, 0x258(%rsp) movq 0xf70(%rsp), %rcx movq %rcx, (%rax) movq $0x0, 0x8(%rax) movq 0xf68(%rsp), %rcx movq %rcx, 0x10(%rax) movl 0xf64(%rsp), %ecx movl %ecx, 0x18(%rax) movq 0xf58(%rsp), %rcx movq %rcx, 0x20(%rax) movl $0x1, 0x28(%rax) movl 0xf7c(%rsp), %ecx movl %ecx, 0x2c(%rax) movl $0x1, 0x30(%rax) movl $0x1, 0x34(%rax) movl $0x1, 0x38(%rax) movslq 0x2c(%rax), %rcx movq %rcx, 0x40(%rax) leaq 0x760(%rsp), %rax movq %rax, 0xa88(%rsp) leaq 0x718(%rsp), %rax movq %rax, 0xa80(%rsp) movq 0xa88(%rsp), %rax movq %rax, 0x250(%rsp) cmpq 0xa80(%rsp), %rax jne 0x1683546 movq 0x250(%rsp), %rax movq %rax, 0xa90(%rsp) jmp 0x1683739 movq 0xa80(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x168357e movq 0xa80(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0xa7c(%rsp) movl 0xa7c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xa78(%rsp) movq 0x250(%rsp), %rax movq %rax, 0xaa0(%rsp) movq 0xaa0(%rsp), %rax movq %rax, 0x248(%rsp) cmpq $0x0, 0x8(%rax) je 0x1683636 movq 0x248(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xa9c(%rsp) # imm = 0xFFFFFFFF movl 0xa9c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xa98(%rsp) cmpl $0x1, 0xa98(%rsp) jne 0x1683636 movq 0x248(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1683607 movq 0x248(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1683605 jmp 0x1683634 movq 0x248(%rsp), %rax movq (%rax), %rax movq %rax, 0xf20(%rsp) cmpq $0x0, 0xf20(%rsp) je 0x1683632 movq 0xf20(%rsp), %rdi callq 0x5e480 jmp 0x1683634 jmp 0x1683636 movq 0x248(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x250(%rsp), %rax movq 0xa80(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0xa80(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0xa80(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0xa80(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0xa80(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0xa80(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0xa80(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0xa80(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0xa80(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0xa80(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0xa80(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0xa90(%rsp) leaq 0x718(%rsp), %rax movq %rax, 0x898(%rsp) movq 0x898(%rsp), %rax movq %rax, 0xcb0(%rsp) movq 0xcb0(%rsp), %rax movq %rax, 0x240(%rsp) cmpq $0x0, 0x8(%rax) je 0x1683801 movq 0x240(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xcac(%rsp) # imm = 0xFFFFFFFF movl 0xcac(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xca8(%rsp) cmpl $0x1, 0xca8(%rsp) jne 0x1683801 movq 0x240(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16837d2 movq 0x240(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16837d0 jmp 0x16837ff movq 0x240(%rsp), %rax movq (%rax), %rax movq %rax, 0xe18(%rsp) cmpq $0x0, 0xe18(%rsp) je 0x16837fd movq 0xe18(%rsp), %rdi callq 0x5e480 jmp 0x16837ff jmp 0x1683801 movq 0x240(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x168385c movq %rax, %rdi callq 0x5fc90 jmp 0x1683af2 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x7b0(%rsp) movl %eax, 0x7ac(%rsp) leaq 0x7b8(%rsp), %rax movq %rax, 0x890(%rsp) movq 0x890(%rsp), %rax movq %rax, 0xcc0(%rsp) movq 0xcc0(%rsp), %rax movq %rax, 0x238(%rsp) cmpq $0x0, 0x8(%rax) je 0x168393d movq 0x238(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xcbc(%rsp) # imm = 0xFFFFFFFF movl 0xcbc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xcb8(%rsp) cmpl $0x1, 0xcb8(%rsp) jne 0x168393d movq 0x238(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x168390e movq 0x238(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x168390c jmp 0x168393b movq 0x238(%rsp), %rax movq (%rax), %rax movq %rax, 0xe10(%rsp) cmpq $0x0, 0xe10(%rsp) je 0x1683939 movq 0xe10(%rsp), %rdi callq 0x5e480 jmp 0x168393b jmp 0x168393d movq 0x238(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1683998 movq %rax, %rdi callq 0x5fc90 jmp 0x1687555 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x7b0(%rsp) movl %eax, 0x7ac(%rsp) jmp 0x168732f movq %rax, %rcx movl %edx, %eax movq %rcx, 0x7b0(%rsp) movl %eax, 0x7ac(%rsp) leaq 0x718(%rsp), %rax movq %rax, 0x8a0(%rsp) movq 0x8a0(%rsp), %rax movq %rax, 0xca0(%rsp) movq 0xca0(%rsp), %rax movq %rax, 0x230(%rsp) cmpq $0x0, 0x8(%rax) je 0x1683a92 movq 0x230(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xc9c(%rsp) # imm = 0xFFFFFFFF movl 0xc9c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xc98(%rsp) cmpl $0x1, 0xc98(%rsp) jne 0x1683a92 movq 0x230(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1683a63 movq 0x230(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1683a61 jmp 0x1683a90 movq 0x230(%rsp), %rax movq (%rax), %rax movq %rax, 0xe20(%rsp) cmpq $0x0, 0xe20(%rsp) je 0x1683a8e movq 0xe20(%rsp), %rdi callq 0x5e480 jmp 0x1683a90 jmp 0x1683a92 movq 0x230(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1683aed movq %rax, %rdi callq 0x5fc90 jmp 0x168732f movl $0x6, %edi callq 0xae160 movq %rax, 0x228(%rsp) jmp 0x1683b06 movq 0x228(%rsp), %rax movq %rax, 0x710(%rsp) leaq 0x700(%rsp), %rdi callq 0xa0840 jmp 0x1683b25 movl 0x84c(%rsp), %edx leaq 0x700(%rsp), %rdi xorl %esi, %esi callq 0xa16d0 jmp 0x1683b3d movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xd4(%rax,%rcx), %edx leaq 0x700(%rsp), %rdi movl $0x1, %esi callq 0xa16d0 jmp 0x1683b67 movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xd8(%rax,%rcx), %edx leaq 0x700(%rsp), %rdi movl $0xb, %esi callq 0xa16d0 jmp 0x1683b91 movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xdc(%rax,%rcx), %edx leaq 0x700(%rsp), %rdi movl $0x2, %esi callq 0xa16d0 jmp 0x1683bbb movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xe0(%rax,%rcx), %edx leaq 0x700(%rsp), %rdi movl $0xc, %esi callq 0xa16d0 jmp 0x1683be5 movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xe4(%rax,%rcx), %edx leaq 0x700(%rsp), %rdi movl $0x3, %esi callq 0xa16d0 jmp 0x1683c0f movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xe8(%rax,%rcx), %edx leaq 0x700(%rsp), %rdi movl $0xd, %esi callq 0xa16d0 jmp 0x1683c39 leaq 0x700(%rsp), %rdi movl $0x4, %esi xorl %edx, %edx callq 0xa16d0 jmp 0x1683c4f leaq 0x700(%rsp), %rdi movl $0xe, %esi xorl %edx, %edx callq 0xa16d0 jmp 0x1683c65 movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0x100(%rax,%rcx), %edx leaq 0x700(%rsp), %rdi movl $0x5, %esi callq 0xa16d0 jmp 0x1683c8f movl 0x85c(%rsp), %edx movl 0x850(%rsp), %eax imull %eax, %edx movl 0x84c(%rsp), %eax imull %eax, %edx leaq 0x700(%rsp), %rdi movl $0x6, %esi callq 0xa16d0 jmp 0x1683cbe movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0x10c(%rax,%rcx), %edx leaq 0x700(%rsp), %rdi movl $0x8, %esi callq 0xa16d0 jmp 0x1683ce8 movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0x110(%rax,%rcx), %edx leaq 0x700(%rsp), %rdi movl $0x9, %esi callq 0xa16d0 jmp 0x1683d12 movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx leaq 0x118(%rax,%rcx), %rdx leaq 0x700(%rsp), %rdi movl $0xa, %esi callq 0xa1760 jmp 0x1683d3d movq 0x710(%rsp), %rdi movq (%rdi), %rax movq 0x10(%rax), %rax leaq 0x700(%rsp), %rsi callq *%rax jmp 0x1683d58 movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x0, 0x100(%rax,%rcx) je 0x1685870 leaq 0x590(%rsp), %rax movq %rax, %rcx addq $0x168, %rcx # imm = 0x168 movq %rcx, 0x218(%rsp) movq %rax, 0x220(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x208(%rsp) movq %rax, 0x878(%rsp) movq 0x878(%rsp), %rax movq %rax, 0x210(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0x218(%rsp), %rcx movq 0x208(%rsp), %rax addq $0x48, %rax cmpq %rcx, %rax movq %rax, 0x220(%rsp) jne 0x1683d97 leaq 0x590(%rsp), %rax movq %rax, 0xa68(%rsp) leaq 0x800(%rsp), %rax movq %rax, 0xa60(%rsp) movq 0xa68(%rsp), %rax movq %rax, 0x200(%rsp) cmpq 0xa60(%rsp), %rax jne 0x1683e84 movq 0x200(%rsp), %rax movq %rax, 0xa70(%rsp) jmp 0x1684077 movq 0xa60(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x1683ebc movq 0xa60(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0xa5c(%rsp) movl 0xa5c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xa58(%rsp) movq 0x200(%rsp), %rax movq %rax, 0xab0(%rsp) movq 0xab0(%rsp), %rax movq %rax, 0x1f8(%rsp) cmpq $0x0, 0x8(%rax) je 0x1683f74 movq 0x1f8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xaac(%rsp) # imm = 0xFFFFFFFF movl 0xaac(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xaa8(%rsp) cmpl $0x1, 0xaa8(%rsp) jne 0x1683f74 movq 0x1f8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1683f45 movq 0x1f8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1683f43 jmp 0x1683f72 movq 0x1f8(%rsp), %rax movq (%rax), %rax movq %rax, 0xf18(%rsp) cmpq $0x0, 0xf18(%rsp) je 0x1683f70 movq 0xf18(%rsp), %rdi callq 0x5e480 jmp 0x1683f72 jmp 0x1683f74 movq 0x1f8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x200(%rsp), %rax movq 0xa60(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0xa60(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0xa60(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0xa60(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0xa60(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0xa60(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0xa60(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0xa60(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0xa60(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0xa60(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0xa60(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0xa70(%rsp) leaq 0x590(%rsp), %rax addq $0x48, %rax movq %rax, 0xa48(%rsp) leaq 0x760(%rsp), %rax movq %rax, 0xa40(%rsp) movq 0xa48(%rsp), %rax movq %rax, 0x1f0(%rsp) cmpq 0xa40(%rsp), %rax jne 0x16840ca movq 0x1f0(%rsp), %rax movq %rax, 0xa50(%rsp) jmp 0x16842bd movq 0xa40(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x1684102 movq 0xa40(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0xa3c(%rsp) movl 0xa3c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xa38(%rsp) movq 0x1f0(%rsp), %rax movq %rax, 0xac0(%rsp) movq 0xac0(%rsp), %rax movq %rax, 0x1e8(%rsp) cmpq $0x0, 0x8(%rax) je 0x16841ba movq 0x1e8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xabc(%rsp) # imm = 0xFFFFFFFF movl 0xabc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xab8(%rsp) cmpl $0x1, 0xab8(%rsp) jne 0x16841ba movq 0x1e8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x168418b movq 0x1e8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1684189 jmp 0x16841b8 movq 0x1e8(%rsp), %rax movq (%rax), %rax movq %rax, 0xf10(%rsp) cmpq $0x0, 0xf10(%rsp) je 0x16841b6 movq 0xf10(%rsp), %rdi callq 0x5e480 jmp 0x16841b8 jmp 0x16841ba movq 0x1e8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x1f0(%rsp), %rax movq 0xa40(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0xa40(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0xa40(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0xa40(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0xa40(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0xa40(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0xa40(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0xa40(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0xa40(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0xa40(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0xa40(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0xa50(%rsp) movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x0, 0x10c(%rax,%rcx) je 0x1684f34 movl 0x84c(%rsp), %eax leaq 0x548(%rsp), %rcx movq %rcx, 0xd80(%rsp) movl %eax, 0xd7c(%rsp) movq $0x4, 0xd70(%rsp) movq $0x0, 0xd68(%rsp) movq 0xd80(%rsp), %rdi movq %rdi, 0x1e0(%rsp) movq $0x0, (%rdi) movq $0x0, 0x8(%rdi) movq $0x0, 0x10(%rdi) movl $0x0, 0x18(%rdi) movq $0x0, 0x20(%rdi) movl $0x0, 0x28(%rdi) movl $0x0, 0x2c(%rdi) movl $0x0, 0x30(%rdi) movl $0x0, 0x34(%rdi) movl $0x0, 0x38(%rdi) movq $0x0, 0x40(%rdi) movl 0xd7c(%rsp), %esi movq 0xd70(%rsp), %rdx movq 0xd68(%rsp), %rcx callq 0x65040 jmp 0x168438f jmp 0x1684391 movq 0x288(%rsp), %rcx movq (%rcx), %rax addq -0x18(%rax), %rcx addq $0x1f8, %rcx # imm = 0x1F8 movslq 0x848(%rsp), %rax movq %rcx, 0xe00(%rsp) movq %rax, 0xdf8(%rsp) movq 0xe00(%rsp), %rax movq (%rax), %rax movq 0xdf8(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rax movss (%rax), %xmm0 leaq 0x548(%rsp), %rax movq %rax, 0xde0(%rsp) movss %xmm0, 0xddc(%rsp) movq 0xde0(%rsp), %rax movq %rax, 0x1048(%rsp) movq 0x1048(%rsp), %rdx movq 0x40(%rdx), %rcx movslq 0x38(%rdx), %rdx imulq %rdx, %rcx movl %ecx, 0xdd8(%rsp) movq (%rax), %rax movq %rax, 0xdd0(%rsp) movl $0x0, 0xdcc(%rsp) movl 0xdcc(%rsp), %eax cmpl 0xdd8(%rsp), %eax jge 0x168448e movss 0xddc(%rsp), %xmm0 movq 0xdd0(%rsp), %rax movq %rax, %rcx addq $0x4, %rcx movq %rcx, 0xdd0(%rsp) movss %xmm0, (%rax) movl 0xdcc(%rsp), %eax addl $0x1, %eax movl %eax, 0xdcc(%rsp) jmp 0x1684447 jmp 0x1684490 leaq 0x590(%rsp), %rax addq $0x90, %rax movq %rax, 0xa28(%rsp) leaq 0x548(%rsp), %rax movq %rax, 0xa20(%rsp) movq 0xa28(%rsp), %rax movq %rax, 0x1d0(%rsp) cmpq 0xa20(%rsp), %rax jne 0x16844e5 movq 0x1d0(%rsp), %rax movq %rax, 0xa30(%rsp) jmp 0x16846d8 movq 0xa20(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x168451d movq 0xa20(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0xa1c(%rsp) movl 0xa1c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xa18(%rsp) movq 0x1d0(%rsp), %rax movq %rax, 0xad0(%rsp) movq 0xad0(%rsp), %rax movq %rax, 0x1c8(%rsp) cmpq $0x0, 0x8(%rax) je 0x16845d5 movq 0x1c8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xacc(%rsp) # imm = 0xFFFFFFFF movl 0xacc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xac8(%rsp) cmpl $0x1, 0xac8(%rsp) jne 0x16845d5 movq 0x1c8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16845a6 movq 0x1c8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16845a4 jmp 0x16845d3 movq 0x1c8(%rsp), %rax movq (%rax), %rax movq %rax, 0xf08(%rsp) cmpq $0x0, 0xf08(%rsp) je 0x16845d1 movq 0xf08(%rsp), %rdi callq 0x5e480 jmp 0x16845d3 jmp 0x16845d5 movq 0x1c8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x1d0(%rsp), %rax movq 0xa20(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0xa20(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0xa20(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0xa20(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0xa20(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0xa20(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0xa20(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0xa20(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0xa20(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0xa20(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0xa20(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0xa30(%rsp) movq 0x288(%rsp), %rcx movq (%rcx), %rax addq -0x18(%rax), %rcx addq $0x240, %rcx # imm = 0x240 movl 0x848(%rsp), %eax leaq 0x500(%rsp), %rdx movq %rdx, 0xd30(%rsp) movq %rcx, 0xd28(%rsp) movl %eax, 0xd24(%rsp) movl $0x1, 0xd20(%rsp) movq 0xd28(%rsp), %rax movl 0xd20(%rsp), %edi movq (%rax), %rsi movslq 0xd24(%rsp), %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x500(%rsp), %r8 movq %r8, 0xfb0(%rsp) movl %edi, 0xfac(%rsp) movq %rsi, 0xfa0(%rsp) movq %rdx, 0xf98(%rsp) movl %ecx, 0xf94(%rsp) movq %rax, 0xf88(%rsp) movq 0xfb0(%rsp), %rax movq %rax, 0x1c0(%rsp) movq 0xfa0(%rsp), %rcx movq %rcx, (%rax) movq $0x0, 0x8(%rax) movq 0xf98(%rsp), %rcx movq %rcx, 0x10(%rax) movl 0xf94(%rsp), %ecx movl %ecx, 0x18(%rax) movq 0xf88(%rsp), %rcx movq %rcx, 0x20(%rax) movl $0x1, 0x28(%rax) movl 0xfac(%rsp), %ecx movl %ecx, 0x2c(%rax) movl $0x1, 0x30(%rax) movl $0x1, 0x34(%rax) movl $0x1, 0x38(%rax) movslq 0x2c(%rax), %rcx movq %rcx, 0x40(%rax) leaq 0x590(%rsp), %rax addq $0xd8, %rax movq %rax, 0xa08(%rsp) leaq 0x500(%rsp), %rax movq %rax, 0xa00(%rsp) movq 0xa08(%rsp), %rax movq %rax, 0x1b8(%rsp) cmpq 0xa00(%rsp), %rax jne 0x168484a movq 0x1b8(%rsp), %rax movq %rax, 0xa10(%rsp) jmp 0x1684a3d movq 0xa00(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x1684882 movq 0xa00(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x9fc(%rsp) movl 0x9fc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x9f8(%rsp) movq 0x1b8(%rsp), %rax movq %rax, 0xae0(%rsp) movq 0xae0(%rsp), %rax movq %rax, 0x1b0(%rsp) cmpq $0x0, 0x8(%rax) je 0x168493a movq 0x1b0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xadc(%rsp) # imm = 0xFFFFFFFF movl 0xadc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xad8(%rsp) cmpl $0x1, 0xad8(%rsp) jne 0x168493a movq 0x1b0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x168490b movq 0x1b0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1684909 jmp 0x1684938 movq 0x1b0(%rsp), %rax movq (%rax), %rax movq %rax, 0xf00(%rsp) cmpq $0x0, 0xf00(%rsp) je 0x1684936 movq 0xf00(%rsp), %rdi callq 0x5e480 jmp 0x1684938 jmp 0x168493a movq 0x1b0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x1b8(%rsp), %rax movq 0xa00(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0xa00(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0xa00(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0xa00(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0xa00(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0xa00(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0xa00(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0xa00(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0xa00(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0xa00(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0xa00(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0xa10(%rsp) leaq 0x500(%rsp), %rax movq %rax, 0x8b0(%rsp) movq 0x8b0(%rsp), %rax movq %rax, 0xc80(%rsp) movq 0xc80(%rsp), %rax movq %rax, 0x1a8(%rsp) cmpq $0x0, 0x8(%rax) je 0x1684b05 movq 0x1a8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xc7c(%rsp) # imm = 0xFFFFFFFF movl 0xc7c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xc78(%rsp) cmpl $0x1, 0xc78(%rsp) jne 0x1684b05 movq 0x1a8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1684ad6 movq 0x1a8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1684ad4 jmp 0x1684b03 movq 0x1a8(%rsp), %rax movq (%rax), %rax movq %rax, 0xe30(%rsp) cmpq $0x0, 0xe30(%rsp) je 0x1684b01 movq 0xe30(%rsp), %rdi callq 0x5e480 jmp 0x1684b03 jmp 0x1684b05 movq 0x1a8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1684b60 movq %rax, %rdi callq 0x5fc90 leaq 0x548(%rsp), %rax movq %rax, 0x8c0(%rsp) movq 0x8c0(%rsp), %rax movq %rax, 0xc60(%rsp) movq 0xc60(%rsp), %rax movq %rax, 0x1a0(%rsp) cmpq $0x0, 0x8(%rax) je 0x1684c28 movq 0x1a0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xc5c(%rsp) # imm = 0xFFFFFFFF movl 0xc5c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xc58(%rsp) cmpl $0x1, 0xc58(%rsp) jne 0x1684c28 movq 0x1a0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1684bf9 movq 0x1a0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1684bf7 jmp 0x1684c26 movq 0x1a0(%rsp), %rax movq (%rax), %rax movq %rax, 0xe40(%rsp) cmpq $0x0, 0xe40(%rsp) je 0x1684c24 movq 0xe40(%rsp), %rdi callq 0x5e480 jmp 0x1684c26 jmp 0x1684c28 movq 0x1a0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1684c83 movq %rax, %rdi callq 0x5fc90 jmp 0x1684f34 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x7b0(%rsp) movl %eax, 0x7ac(%rsp) jmp 0x1687322 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x7b0(%rsp) movl %eax, 0x7ac(%rsp) jmp 0x16856fd movq %rax, %rcx movl %edx, %eax movq %rcx, 0x7b0(%rsp) movl %eax, 0x7ac(%rsp) jmp 0x1684e0c movq %rax, %rcx movl %edx, %eax movq %rcx, 0x7b0(%rsp) movl %eax, 0x7ac(%rsp) leaq 0x500(%rsp), %rax movq %rax, 0x8b8(%rsp) movq 0x8b8(%rsp), %rax movq %rax, 0xc70(%rsp) movq 0xc70(%rsp), %rax movq %rax, 0x198(%rsp) cmpq $0x0, 0x8(%rax) je 0x1684daf movq 0x198(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xc6c(%rsp) # imm = 0xFFFFFFFF movl 0xc6c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xc68(%rsp) cmpl $0x1, 0xc68(%rsp) jne 0x1684daf movq 0x198(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1684d80 movq 0x198(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1684d7e jmp 0x1684dad movq 0x198(%rsp), %rax movq (%rax), %rax movq %rax, 0xe38(%rsp) cmpq $0x0, 0xe38(%rsp) je 0x1684dab movq 0xe38(%rsp), %rdi callq 0x5e480 jmp 0x1684dad jmp 0x1684daf movq 0x198(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1684e0a movq %rax, %rdi callq 0x5fc90 jmp 0x1684e0c leaq 0x548(%rsp), %rax movq %rax, 0x8c8(%rsp) movq 0x8c8(%rsp), %rax movq %rax, 0xc50(%rsp) movq 0xc50(%rsp), %rax movq %rax, 0x190(%rsp) cmpq $0x0, 0x8(%rax) je 0x1684ed4 movq 0x190(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xc4c(%rsp) # imm = 0xFFFFFFFF movl 0xc4c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xc48(%rsp) cmpl $0x1, 0xc48(%rsp) jne 0x1684ed4 movq 0x190(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1684ea5 movq 0x190(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1684ea3 jmp 0x1684ed2 movq 0x190(%rsp), %rax movq (%rax), %rax movq %rax, 0xe48(%rsp) cmpq $0x0, 0xe48(%rsp) je 0x1684ed0 movq 0xe48(%rsp), %rdi callq 0x5e480 jmp 0x1684ed2 jmp 0x1684ed4 movq 0x190(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1684f2f movq %rax, %rdi callq 0x5fc90 jmp 0x16856fd movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x64, 0x10c(%rax,%rcx) jle 0x168551a movq 0x288(%rsp), %rcx movq (%rcx), %rax addq -0x18(%rax), %rcx addq $0x288, %rcx # imm = 0x288 movl 0x848(%rsp), %eax leaq 0x4b8(%rsp), %rdx movq %rdx, 0xd18(%rsp) movq %rcx, 0xd10(%rsp) movl %eax, 0xd0c(%rsp) movl $0x1, 0xd08(%rsp) movq 0xd10(%rsp), %rax movl 0xd08(%rsp), %edi movq (%rax), %rsi movslq 0xd0c(%rsp), %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x4b8(%rsp), %r8 movq %r8, 0xfe0(%rsp) movl %edi, 0xfdc(%rsp) movq %rsi, 0xfd0(%rsp) movq %rdx, 0xfc8(%rsp) movl %ecx, 0xfc4(%rsp) movq %rax, 0xfb8(%rsp) movq 0xfe0(%rsp), %rax movq %rax, 0x188(%rsp) movq 0xfd0(%rsp), %rcx movq %rcx, (%rax) movq $0x0, 0x8(%rax) movq 0xfc8(%rsp), %rcx movq %rcx, 0x10(%rax) movl 0xfc4(%rsp), %ecx movl %ecx, 0x18(%rax) movq 0xfb8(%rsp), %rcx movq %rcx, 0x20(%rax) movl $0x1, 0x28(%rax) movl 0xfdc(%rsp), %ecx movl %ecx, 0x2c(%rax) movl $0x1, 0x30(%rax) movl $0x1, 0x34(%rax) movl $0x1, 0x38(%rax) movslq 0x2c(%rax), %rcx movq %rcx, 0x40(%rax) leaq 0x590(%rsp), %rax addq $0x120, %rax # imm = 0x120 movq %rax, 0x9e8(%rsp) leaq 0x4b8(%rsp), %rax movq %rax, 0x9e0(%rsp) movq 0x9e8(%rsp), %rax movq %rax, 0x180(%rsp) cmpq 0x9e0(%rsp), %rax jne 0x16850c3 movq 0x180(%rsp), %rax movq %rax, 0x9f0(%rsp) jmp 0x16852b6 movq 0x9e0(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x16850fb movq 0x9e0(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x9dc(%rsp) movl 0x9dc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x9d8(%rsp) movq 0x180(%rsp), %rax movq %rax, 0xaf0(%rsp) movq 0xaf0(%rsp), %rax movq %rax, 0x178(%rsp) cmpq $0x0, 0x8(%rax) je 0x16851b3 movq 0x178(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xaec(%rsp) # imm = 0xFFFFFFFF movl 0xaec(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xae8(%rsp) cmpl $0x1, 0xae8(%rsp) jne 0x16851b3 movq 0x178(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1685184 movq 0x178(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1685182 jmp 0x16851b1 movq 0x178(%rsp), %rax movq (%rax), %rax movq %rax, 0xef8(%rsp) cmpq $0x0, 0xef8(%rsp) je 0x16851af movq 0xef8(%rsp), %rdi callq 0x5e480 jmp 0x16851b1 jmp 0x16851b3 movq 0x178(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x180(%rsp), %rax movq 0x9e0(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x9e0(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x9e0(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x9e0(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x9e0(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x9e0(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x9e0(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x9e0(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x9e0(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x9e0(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x9e0(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x9f0(%rsp) leaq 0x4b8(%rsp), %rax movq %rax, 0x8d0(%rsp) movq 0x8d0(%rsp), %rax movq %rax, 0xc40(%rsp) movq 0xc40(%rsp), %rax movq %rax, 0x170(%rsp) cmpq $0x0, 0x8(%rax) je 0x168537e movq 0x170(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xc3c(%rsp) # imm = 0xFFFFFFFF movl 0xc3c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xc38(%rsp) cmpl $0x1, 0xc38(%rsp) jne 0x168537e movq 0x170(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x168534f movq 0x170(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x168534d jmp 0x168537c movq 0x170(%rsp), %rax movq (%rax), %rax movq %rax, 0xe50(%rsp) cmpq $0x0, 0xe50(%rsp) je 0x168537a movq 0xe50(%rsp), %rdi callq 0x5e480 jmp 0x168537c jmp 0x168537e movq 0x170(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16853d9 movq %rax, %rdi callq 0x5fc90 jmp 0x168551a movq %rax, %rcx movl %edx, %eax movq %rcx, 0x7b0(%rsp) movl %eax, 0x7ac(%rsp) leaq 0x4b8(%rsp), %rax movq %rax, 0x8d8(%rsp) movq 0x8d8(%rsp), %rax movq %rax, 0xc30(%rsp) movq 0xc30(%rsp), %rax movq %rax, 0x168(%rsp) cmpq $0x0, 0x8(%rax) je 0x16854ba movq 0x168(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xc2c(%rsp) # imm = 0xFFFFFFFF movl 0xc2c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xc28(%rsp) cmpl $0x1, 0xc28(%rsp) jne 0x16854ba movq 0x168(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x168548b movq 0x168(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1685489 jmp 0x16854b8 movq 0x168(%rsp), %rax movq (%rax), %rax movq %rax, 0xe58(%rsp) cmpq $0x0, 0xe58(%rsp) je 0x16854b6 movq 0xe58(%rsp), %rdi callq 0x5e480 jmp 0x16854b8 jmp 0x16854ba movq 0x168(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1685515 movq %rax, %rdi callq 0x5fc90 jmp 0x16856fd movq 0x710(%rsp), %rax movq %rax, 0x160(%rsp) leaq 0x4a8(%rsp), %rdi leaq 0x590(%rsp), %rsi callq 0x89470 jmp 0x1685541 movq 0x160(%rsp), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax leaq 0x4a8(%rsp), %rsi callq *%rax jmp 0x168555c leaq 0x4a8(%rsp), %rdi callq 0x89520 leaq 0x590(%rsp), %rax movq %rax, 0x150(%rsp) addq $0x168, %rax # imm = 0x168 movq %rax, 0x158(%rsp) movq 0x158(%rsp), %rax addq $-0x48, %rax movq %rax, 0x140(%rsp) movq %rax, 0x8e0(%rsp) movq 0x8e0(%rsp), %rax movq %rax, 0xc20(%rsp) movq 0xc20(%rsp), %rax movq %rax, 0x148(%rsp) cmpq $0x0, 0x8(%rax) je 0x168565b movq 0x148(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xc1c(%rsp) # imm = 0xFFFFFFFF movl 0xc1c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xc18(%rsp) cmpl $0x1, 0xc18(%rsp) jne 0x168565b movq 0x148(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x168562c movq 0x148(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x168562a jmp 0x1685659 movq 0x148(%rsp), %rax movq (%rax), %rax movq %rax, 0xe60(%rsp) cmpq $0x0, 0xe60(%rsp) je 0x1685657 movq 0xe60(%rsp), %rdi callq 0x5e480 jmp 0x1685659 jmp 0x168565b movq 0x148(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16856b6 movq %rax, %rdi callq 0x5fc90 movq 0x140(%rsp), %rax movq 0x150(%rsp), %rcx cmpq %rcx, %rax movq %rax, 0x158(%rsp) jne 0x1685587 jmp 0x1687092 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x7b0(%rsp) movl %eax, 0x7ac(%rsp) leaq 0x4a8(%rsp), %rdi callq 0x89520 leaq 0x590(%rsp), %rax movq %rax, 0x130(%rsp) addq $0x168, %rax # imm = 0x168 movq %rax, 0x138(%rsp) movq 0x138(%rsp), %rax addq $-0x48, %rax movq %rax, 0x120(%rsp) movq %rax, 0x8e8(%rsp) movq 0x8e8(%rsp), %rax movq %rax, 0xc10(%rsp) movq 0xc10(%rsp), %rax movq %rax, 0x128(%rsp) cmpq $0x0, 0x8(%rax) je 0x16857ef movq 0x128(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xc0c(%rsp) # imm = 0xFFFFFFFF movl 0xc0c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xc08(%rsp) cmpl $0x1, 0xc08(%rsp) jne 0x16857ef movq 0x128(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16857c0 movq 0x128(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16857be jmp 0x16857ed movq 0x128(%rsp), %rax movq (%rax), %rax movq %rax, 0xe68(%rsp) cmpq $0x0, 0xe68(%rsp) je 0x16857eb movq 0xe68(%rsp), %rdi callq 0x5e480 jmp 0x16857ed jmp 0x16857ef movq 0x128(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x168584a movq %rax, %rdi callq 0x5fc90 movq 0x120(%rsp), %rax movq 0x130(%rsp), %rcx cmpq %rcx, %rax movq %rax, 0x138(%rsp) jne 0x168571b jmp 0x1687322 leaq 0x380(%rsp), %rax movq %rax, %rcx addq $0x120, %rcx # imm = 0x120 movq %rcx, 0x110(%rsp) movq %rax, 0x118(%rsp) movq 0x118(%rsp), %rax movq %rax, 0x100(%rsp) movq %rax, 0x880(%rsp) movq 0x880(%rsp), %rax movq %rax, 0x108(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0x110(%rsp), %rcx movq 0x100(%rsp), %rax addq $0x48, %rax cmpq %rcx, %rax movq %rax, 0x118(%rsp) jne 0x1685892 leaq 0x380(%rsp), %rax movq %rax, 0x9c8(%rsp) leaq 0x800(%rsp), %rax movq %rax, 0x9c0(%rsp) movq 0x9c8(%rsp), %rax movq %rax, 0xf8(%rsp) cmpq 0x9c0(%rsp), %rax jne 0x168597f movq 0xf8(%rsp), %rax movq %rax, 0x9d0(%rsp) jmp 0x1685b72 movq 0x9c0(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x16859b7 movq 0x9c0(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x9bc(%rsp) movl 0x9bc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x9b8(%rsp) movq 0xf8(%rsp), %rax movq %rax, 0xb00(%rsp) movq 0xb00(%rsp), %rax movq %rax, 0xf0(%rsp) cmpq $0x0, 0x8(%rax) je 0x1685a6f movq 0xf0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xafc(%rsp) # imm = 0xFFFFFFFF movl 0xafc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xaf8(%rsp) cmpl $0x1, 0xaf8(%rsp) jne 0x1685a6f movq 0xf0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1685a40 movq 0xf0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1685a3e jmp 0x1685a6d movq 0xf0(%rsp), %rax movq (%rax), %rax movq %rax, 0xef0(%rsp) cmpq $0x0, 0xef0(%rsp) je 0x1685a6b movq 0xef0(%rsp), %rdi callq 0x5e480 jmp 0x1685a6d jmp 0x1685a6f movq 0xf0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0xf8(%rsp), %rax movq 0x9c0(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x9c0(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x9c0(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x9c0(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x9c0(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x9c0(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x9c0(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x9c0(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x9c0(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x9c0(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x9c0(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x9d0(%rsp) movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x0, 0x10c(%rax,%rcx) je 0x16867ce movl 0x84c(%rsp), %eax leaq 0x338(%rsp), %rcx movq %rcx, 0xda0(%rsp) movl %eax, 0xd9c(%rsp) movq $0x4, 0xd90(%rsp) movq $0x0, 0xd88(%rsp) movq 0xda0(%rsp), %rdi movq %rdi, 0xe8(%rsp) movq $0x0, (%rdi) movq $0x0, 0x8(%rdi) movq $0x0, 0x10(%rdi) movl $0x0, 0x18(%rdi) movq $0x0, 0x20(%rdi) movl $0x0, 0x28(%rdi) movl $0x0, 0x2c(%rdi) movl $0x0, 0x30(%rdi) movl $0x0, 0x34(%rdi) movl $0x0, 0x38(%rdi) movq $0x0, 0x40(%rdi) movl 0xd9c(%rsp), %esi movq 0xd90(%rsp), %rdx movq 0xd88(%rsp), %rcx callq 0x65040 jmp 0x1685c44 jmp 0x1685c46 movq 0x288(%rsp), %rcx movq (%rcx), %rax addq -0x18(%rax), %rcx addq $0x1f8, %rcx # imm = 0x1F8 movslq 0x848(%rsp), %rax movq %rcx, 0xdf0(%rsp) movq %rax, 0xde8(%rsp) movq 0xdf0(%rsp), %rax movq (%rax), %rax movq 0xde8(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xe0(%rsp) movq 0xe0(%rsp), %rax movss (%rax), %xmm0 leaq 0x338(%rsp), %rax movq %rax, 0xdc0(%rsp) movss %xmm0, 0xdbc(%rsp) movq 0xdc0(%rsp), %rax movq %rax, 0x1050(%rsp) movq 0x1050(%rsp), %rdx movq 0x40(%rdx), %rcx movslq 0x38(%rdx), %rdx imulq %rdx, %rcx movl %ecx, 0xdb8(%rsp) movq (%rax), %rax movq %rax, 0xdb0(%rsp) movl $0x0, 0xdac(%rsp) movl 0xdac(%rsp), %eax cmpl 0xdb8(%rsp), %eax jge 0x1685d43 movss 0xdbc(%rsp), %xmm0 movq 0xdb0(%rsp), %rax movq %rax, %rcx addq $0x4, %rcx movq %rcx, 0xdb0(%rsp) movss %xmm0, (%rax) movl 0xdac(%rsp), %eax addl $0x1, %eax movl %eax, 0xdac(%rsp) jmp 0x1685cfc jmp 0x1685d45 leaq 0x380(%rsp), %rax addq $0x48, %rax movq %rax, 0x9a8(%rsp) leaq 0x338(%rsp), %rax movq %rax, 0x9a0(%rsp) movq 0x9a8(%rsp), %rax movq %rax, 0xd8(%rsp) cmpq 0x9a0(%rsp), %rax jne 0x1685d98 movq 0xd8(%rsp), %rax movq %rax, 0x9b0(%rsp) jmp 0x1685f8b movq 0x9a0(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x1685dd0 movq 0x9a0(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x99c(%rsp) movl 0x99c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x998(%rsp) movq 0xd8(%rsp), %rax movq %rax, 0xb10(%rsp) movq 0xb10(%rsp), %rax movq %rax, 0xd0(%rsp) cmpq $0x0, 0x8(%rax) je 0x1685e88 movq 0xd0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb0c(%rsp) # imm = 0xFFFFFFFF movl 0xb0c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb08(%rsp) cmpl $0x1, 0xb08(%rsp) jne 0x1685e88 movq 0xd0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1685e59 movq 0xd0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1685e57 jmp 0x1685e86 movq 0xd0(%rsp), %rax movq (%rax), %rax movq %rax, 0xee8(%rsp) cmpq $0x0, 0xee8(%rsp) je 0x1685e84 movq 0xee8(%rsp), %rdi callq 0x5e480 jmp 0x1685e86 jmp 0x1685e88 movq 0xd0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0xd8(%rsp), %rax movq 0x9a0(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x9a0(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x9a0(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x9a0(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x9a0(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x9a0(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x9a0(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x9a0(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x9a0(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x9a0(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x9a0(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x9b0(%rsp) movq 0x288(%rsp), %rcx movq (%rcx), %rax addq -0x18(%rax), %rcx addq $0x240, %rcx # imm = 0x240 movl 0x848(%rsp), %eax leaq 0x2f0(%rsp), %rdx movq %rdx, 0xd00(%rsp) movq %rcx, 0xcf8(%rsp) movl %eax, 0xcf4(%rsp) movl $0x1, 0xcf0(%rsp) movq 0xcf8(%rsp), %rax movl 0xcf0(%rsp), %edi movq (%rax), %rsi movslq 0xcf4(%rsp), %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x2f0(%rsp), %r8 movq %r8, 0x1010(%rsp) movl %edi, 0x100c(%rsp) movq %rsi, 0x1000(%rsp) movq %rdx, 0xff8(%rsp) movl %ecx, 0xff4(%rsp) movq %rax, 0xfe8(%rsp) movq 0x1010(%rsp), %rax movq %rax, 0xc8(%rsp) movq 0x1000(%rsp), %rcx movq %rcx, (%rax) movq $0x0, 0x8(%rax) movq 0xff8(%rsp), %rcx movq %rcx, 0x10(%rax) movl 0xff4(%rsp), %ecx movl %ecx, 0x18(%rax) movq 0xfe8(%rsp), %rcx movq %rcx, 0x20(%rax) movl $0x1, 0x28(%rax) movl 0x100c(%rsp), %ecx movl %ecx, 0x2c(%rax) movl $0x1, 0x30(%rax) movl $0x1, 0x34(%rax) movl $0x1, 0x38(%rax) movslq 0x2c(%rax), %rcx movq %rcx, 0x40(%rax) leaq 0x380(%rsp), %rax addq $0x90, %rax movq %rax, 0x988(%rsp) leaq 0x2f0(%rsp), %rax movq %rax, 0x980(%rsp) movq 0x988(%rsp), %rax movq %rax, 0xc0(%rsp) cmpq 0x980(%rsp), %rax jne 0x16860fd movq 0xc0(%rsp), %rax movq %rax, 0x990(%rsp) jmp 0x16862f0 movq 0x980(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x1686135 movq 0x980(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x97c(%rsp) movl 0x97c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x978(%rsp) movq 0xc0(%rsp), %rax movq %rax, 0xb20(%rsp) movq 0xb20(%rsp), %rax movq %rax, 0xb8(%rsp) cmpq $0x0, 0x8(%rax) je 0x16861ed movq 0xb8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb1c(%rsp) # imm = 0xFFFFFFFF movl 0xb1c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb18(%rsp) cmpl $0x1, 0xb18(%rsp) jne 0x16861ed movq 0xb8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16861be movq 0xb8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16861bc jmp 0x16861eb movq 0xb8(%rsp), %rax movq (%rax), %rax movq %rax, 0xee0(%rsp) cmpq $0x0, 0xee0(%rsp) je 0x16861e9 movq 0xee0(%rsp), %rdi callq 0x5e480 jmp 0x16861eb jmp 0x16861ed movq 0xb8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0xc0(%rsp), %rax movq 0x980(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x980(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x980(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x980(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x980(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x980(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x980(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x980(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x980(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x980(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x980(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x990(%rsp) leaq 0x2f0(%rsp), %rax movq %rax, 0x8f8(%rsp) movq 0x8f8(%rsp), %rax movq %rax, 0xbf0(%rsp) movq 0xbf0(%rsp), %rax movq %rax, 0xb0(%rsp) cmpq $0x0, 0x8(%rax) je 0x16863b8 movq 0xb0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xbec(%rsp) # imm = 0xFFFFFFFF movl 0xbec(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xbe8(%rsp) cmpl $0x1, 0xbe8(%rsp) jne 0x16863b8 movq 0xb0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1686389 movq 0xb0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1686387 jmp 0x16863b6 movq 0xb0(%rsp), %rax movq (%rax), %rax movq %rax, 0xe78(%rsp) cmpq $0x0, 0xe78(%rsp) je 0x16863b4 movq 0xe78(%rsp), %rdi callq 0x5e480 jmp 0x16863b6 jmp 0x16863b8 movq 0xb0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1686413 movq %rax, %rdi callq 0x5fc90 leaq 0x338(%rsp), %rax movq %rax, 0x908(%rsp) movq 0x908(%rsp), %rax movq %rax, 0xbd0(%rsp) movq 0xbd0(%rsp), %rax movq %rax, 0xa8(%rsp) cmpq $0x0, 0x8(%rax) je 0x16864db movq 0xa8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xbcc(%rsp) # imm = 0xFFFFFFFF movl 0xbcc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xbc8(%rsp) cmpl $0x1, 0xbc8(%rsp) jne 0x16864db movq 0xa8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16864ac movq 0xa8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16864aa jmp 0x16864d9 movq 0xa8(%rsp), %rax movq (%rax), %rax movq %rax, 0xe88(%rsp) cmpq $0x0, 0xe88(%rsp) je 0x16864d7 movq 0xe88(%rsp), %rdi callq 0x5e480 jmp 0x16864d9 jmp 0x16864db movq 0xa8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1686536 movq %rax, %rdi callq 0x5fc90 jmp 0x16867ce movq %rax, %rcx movl %edx, %eax movq %rcx, 0x7b0(%rsp) movl %eax, 0x7ac(%rsp) jmp 0x1686f46 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x7b0(%rsp) movl %eax, 0x7ac(%rsp) jmp 0x16866a6 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x7b0(%rsp) movl %eax, 0x7ac(%rsp) leaq 0x2f0(%rsp), %rax movq %rax, 0x900(%rsp) movq 0x900(%rsp), %rax movq %rax, 0xbe0(%rsp) movq 0xbe0(%rsp), %rax movq %rax, 0xa0(%rsp) cmpq $0x0, 0x8(%rax) je 0x1686649 movq 0xa0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xbdc(%rsp) # imm = 0xFFFFFFFF movl 0xbdc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xbd8(%rsp) cmpl $0x1, 0xbd8(%rsp) jne 0x1686649 movq 0xa0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x168661a movq 0xa0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1686618 jmp 0x1686647 movq 0xa0(%rsp), %rax movq (%rax), %rax movq %rax, 0xe80(%rsp) cmpq $0x0, 0xe80(%rsp) je 0x1686645 movq 0xe80(%rsp), %rdi callq 0x5e480 jmp 0x1686647 jmp 0x1686649 movq 0xa0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16866a4 movq %rax, %rdi callq 0x5fc90 jmp 0x16866a6 leaq 0x338(%rsp), %rax movq %rax, 0x910(%rsp) movq 0x910(%rsp), %rax movq %rax, 0xbc0(%rsp) movq 0xbc0(%rsp), %rax movq %rax, 0x98(%rsp) cmpq $0x0, 0x8(%rax) je 0x168676e movq 0x98(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xbbc(%rsp) # imm = 0xFFFFFFFF movl 0xbbc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xbb8(%rsp) cmpl $0x1, 0xbb8(%rsp) jne 0x168676e movq 0x98(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x168673f movq 0x98(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x168673d jmp 0x168676c movq 0x98(%rsp), %rax movq (%rax), %rax movq %rax, 0xe90(%rsp) cmpq $0x0, 0xe90(%rsp) je 0x168676a movq 0xe90(%rsp), %rdi callq 0x5e480 jmp 0x168676c jmp 0x168676e movq 0x98(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16867c9 movq %rax, %rdi callq 0x5fc90 jmp 0x1686f46 movq 0x288(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x64, 0x10c(%rax,%rcx) jle 0x1686d90 movq 0x288(%rsp), %rcx movq (%rcx), %rax addq -0x18(%rax), %rcx addq $0x288, %rcx # imm = 0x288 movl 0x848(%rsp), %eax leaq 0x2a8(%rsp), %rdx movq %rdx, 0xce8(%rsp) movq %rcx, 0xce0(%rsp) movl %eax, 0xcdc(%rsp) movl $0x1, 0xcd8(%rsp) movq 0xce0(%rsp), %rax movl 0xcd8(%rsp), %edi movq (%rax), %rsi movslq 0xcdc(%rsp), %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x2a8(%rsp), %r8 movq %r8, 0x1040(%rsp) movl %edi, 0x103c(%rsp) movq %rsi, 0x1030(%rsp) movq %rdx, 0x1028(%rsp) movl %ecx, 0x1024(%rsp) movq %rax, 0x1018(%rsp) movq 0x1040(%rsp), %rax movq %rax, 0x90(%rsp) movq 0x1030(%rsp), %rcx movq %rcx, (%rax) movq $0x0, 0x8(%rax) movq 0x1028(%rsp), %rcx movq %rcx, 0x10(%rax) movl 0x1024(%rsp), %ecx movl %ecx, 0x18(%rax) movq 0x1018(%rsp), %rcx movq %rcx, 0x20(%rax) movl $0x1, 0x28(%rax) movl 0x103c(%rsp), %ecx movl %ecx, 0x2c(%rax) movl $0x1, 0x30(%rax) movl $0x1, 0x34(%rax) movl $0x1, 0x38(%rax) movslq 0x2c(%rax), %rcx movq %rcx, 0x40(%rax) leaq 0x380(%rsp), %rax addq $0xd8, %rax movq %rax, 0x968(%rsp) leaq 0x2a8(%rsp), %rax movq %rax, 0x960(%rsp) movq 0x968(%rsp), %rax movq %rax, 0x88(%rsp) cmpq 0x960(%rsp), %rax jne 0x168695d movq 0x88(%rsp), %rax movq %rax, 0x970(%rsp) jmp 0x1686b50 movq 0x960(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x1686995 movq 0x960(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x95c(%rsp) movl 0x95c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x958(%rsp) movq 0x88(%rsp), %rax movq %rax, 0xb30(%rsp) movq 0xb30(%rsp), %rax movq %rax, 0x80(%rsp) cmpq $0x0, 0x8(%rax) je 0x1686a4d movq 0x80(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb2c(%rsp) # imm = 0xFFFFFFFF movl 0xb2c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb28(%rsp) cmpl $0x1, 0xb28(%rsp) jne 0x1686a4d movq 0x80(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1686a1e movq 0x80(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1686a1c jmp 0x1686a4b movq 0x80(%rsp), %rax movq (%rax), %rax movq %rax, 0xed8(%rsp) cmpq $0x0, 0xed8(%rsp) je 0x1686a49 movq 0xed8(%rsp), %rdi callq 0x5e480 jmp 0x1686a4b jmp 0x1686a4d movq 0x80(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x88(%rsp), %rax movq 0x960(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x960(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x960(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x960(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x960(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x960(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x960(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x960(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x960(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x960(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x960(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x970(%rsp) leaq 0x2a8(%rsp), %rax movq %rax, 0x918(%rsp) movq 0x918(%rsp), %rax movq %rax, 0xbb0(%rsp) movq 0xbb0(%rsp), %rax movq %rax, 0x78(%rsp) cmpq $0x0, 0x8(%rax) je 0x1686c09 movq 0x78(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xbac(%rsp) # imm = 0xFFFFFFFF movl 0xbac(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xba8(%rsp) cmpl $0x1, 0xba8(%rsp) jne 0x1686c09 movq 0x78(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1686bdd movq 0x78(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1686bdb jmp 0x1686c07 movq 0x78(%rsp), %rax movq (%rax), %rax movq %rax, 0xe98(%rsp) cmpq $0x0, 0xe98(%rsp) je 0x1686c05 movq 0xe98(%rsp), %rdi callq 0x5e480 jmp 0x1686c07 jmp 0x1686c09 movq 0x78(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1686c61 movq %rax, %rdi callq 0x5fc90 jmp 0x1686d90 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x7b0(%rsp) movl %eax, 0x7ac(%rsp) leaq 0x2a8(%rsp), %rax movq %rax, 0x920(%rsp) movq 0x920(%rsp), %rax movq %rax, 0xba0(%rsp) movq 0xba0(%rsp), %rax movq %rax, 0x70(%rsp) cmpq $0x0, 0x8(%rax) je 0x1686d33 movq 0x70(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb9c(%rsp) # imm = 0xFFFFFFFF movl 0xb9c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb98(%rsp) cmpl $0x1, 0xb98(%rsp) jne 0x1686d33 movq 0x70(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1686d07 movq 0x70(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1686d05 jmp 0x1686d31 movq 0x70(%rsp), %rax movq (%rax), %rax movq %rax, 0xea0(%rsp) cmpq $0x0, 0xea0(%rsp) je 0x1686d2f movq 0xea0(%rsp), %rdi callq 0x5e480 jmp 0x1686d31 jmp 0x1686d33 movq 0x70(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1686d8b movq %rax, %rdi callq 0x5fc90 jmp 0x1686f46 movq 0x710(%rsp), %rax movq %rax, 0x68(%rsp) leaq 0x298(%rsp), %rdi leaq 0x380(%rsp), %rsi callq 0x89470 jmp 0x1686db4 movq 0x68(%rsp), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax leaq 0x298(%rsp), %rsi callq *%rax jmp 0x1686dcc leaq 0x298(%rsp), %rdi callq 0x89520 leaq 0x380(%rsp), %rax movq %rax, 0x58(%rsp) addq $0x120, %rax # imm = 0x120 movq %rax, 0x60(%rsp) movq 0x60(%rsp), %rax addq $-0x48, %rax movq %rax, 0x48(%rsp) movq %rax, 0x928(%rsp) movq 0x928(%rsp), %rax movq %rax, 0xb90(%rsp) movq 0xb90(%rsp), %rax movq %rax, 0x50(%rsp) cmpq $0x0, 0x8(%rax) je 0x1686eb0 movq 0x50(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb8c(%rsp) # imm = 0xFFFFFFFF movl 0xb8c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb88(%rsp) cmpl $0x1, 0xb88(%rsp) jne 0x1686eb0 movq 0x50(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1686e84 movq 0x50(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1686e82 jmp 0x1686eae movq 0x50(%rsp), %rax movq (%rax), %rax movq %rax, 0xea8(%rsp) cmpq $0x0, 0xea8(%rsp) je 0x1686eac movq 0xea8(%rsp), %rdi callq 0x5e480 jmp 0x1686eae jmp 0x1686eb0 movq 0x50(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1686f08 movq %rax, %rdi callq 0x5fc90 movq 0x48(%rsp), %rax movq 0x58(%rsp), %rcx cmpq %rcx, %rax movq %rax, 0x60(%rsp) jne 0x1686df1 jmp 0x1687092 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x7b0(%rsp) movl %eax, 0x7ac(%rsp) leaq 0x298(%rsp), %rdi callq 0x89520 leaq 0x380(%rsp), %rax movq %rax, 0x38(%rsp) addq $0x120, %rax # imm = 0x120 movq %rax, 0x40(%rsp) movq 0x40(%rsp), %rax addq $-0x48, %rax movq %rax, 0x28(%rsp) movq %rax, 0x930(%rsp) movq 0x930(%rsp), %rax movq %rax, 0xb80(%rsp) movq 0xb80(%rsp), %rax movq %rax, 0x30(%rsp) cmpq $0x0, 0x8(%rax) je 0x168701d movq 0x30(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb7c(%rsp) # imm = 0xFFFFFFFF movl 0xb7c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb78(%rsp) cmpl $0x1, 0xb78(%rsp) jne 0x168701d movq 0x30(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1686ff1 movq 0x30(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1686fef jmp 0x168701b movq 0x30(%rsp), %rax movq (%rax), %rax movq %rax, 0xeb0(%rsp) cmpq $0x0, 0xeb0(%rsp) je 0x1687019 movq 0xeb0(%rsp), %rdi callq 0x5e480 jmp 0x168701b jmp 0x168701d movq 0x30(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1687075 movq %rax, %rdi callq 0x5fc90 movq 0x28(%rsp), %rax movq 0x38(%rsp), %rcx cmpq %rcx, %rax movq %rax, 0x40(%rsp) jne 0x1686f5e jmp 0x1687322 movq 0x710(%rsp), %rdi movq 0x860(%rsp), %rsi movq (%rdi), %rax movq 0x20(%rax), %rax callq *%rax jmp 0x16870ad movq 0x288(%rsp), %rdi movq 0x710(%rsp), %rax movq %rax, 0x18(%rsp) addq $0x10, %rdi movslq 0x848(%rsp), %rsi callq 0x989e0 movq 0x18(%rsp), %rcx movq %rcx, (%rax) leaq 0x700(%rsp), %rdi callq 0xa0e10 leaq 0x760(%rsp), %rax movq %rax, 0x938(%rsp) movq 0x938(%rsp), %rax movq %rax, 0xb70(%rsp) movq 0xb70(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x16871a1 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb6c(%rsp) # imm = 0xFFFFFFFF movl 0xb6c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb68(%rsp) cmpl $0x1, 0xb68(%rsp) jne 0x16871a1 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1687175 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1687173 jmp 0x168719f movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0xeb8(%rsp) cmpq $0x0, 0xeb8(%rsp) je 0x168719d movq 0xeb8(%rsp), %rdi callq 0x5e480 jmp 0x168719f jmp 0x16871a1 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16871f9 movq %rax, %rdi callq 0x5fc90 leaq 0x800(%rsp), %rax movq %rax, 0x948(%rsp) movq 0x948(%rsp), %rax movq %rax, 0xb50(%rsp) movq 0xb50(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x16872b2 movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb4c(%rsp) # imm = 0xFFFFFFFF movl 0xb4c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb48(%rsp) cmpl $0x1, 0xb48(%rsp) jne 0x16872b2 movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1687286 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1687284 jmp 0x16872b0 movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0xec8(%rsp) cmpq $0x0, 0xec8(%rsp) je 0x16872ae movq 0xec8(%rsp), %rdi callq 0x5e480 jmp 0x16872b0 jmp 0x16872b2 movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x168730a movq %rax, %rdi callq 0x5fc90 jmp 0x168730c movl 0x848(%rsp), %eax addl $0x1, %eax movl %eax, 0x848(%rsp) jmp 0x16830b5 leaq 0x700(%rsp), %rdi callq 0xa0e10 leaq 0x760(%rsp), %rax movq %rax, 0x940(%rsp) movq 0x940(%rsp), %rax movq %rax, 0xb60(%rsp) movq 0xb60(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x16873e8 movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb5c(%rsp) # imm = 0xFFFFFFFF movl 0xb5c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb58(%rsp) cmpl $0x1, 0xb58(%rsp) jne 0x16873e8 movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16873bc movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16873ba jmp 0x16873e6 movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0xec0(%rsp) cmpq $0x0, 0xec0(%rsp) je 0x16873e4 movq 0xec0(%rsp), %rdi callq 0x5e480 jmp 0x16873e6 jmp 0x16873e8 movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1687440 movq %rax, %rdi callq 0x5fc90 jmp 0x1687442 leaq 0x800(%rsp), %rax movq %rax, 0x950(%rsp) movq 0x950(%rsp), %rax movq %rax, 0xb40(%rsp) movq 0xb40(%rsp), %rax movq %rax, (%rsp) cmpq $0x0, 0x8(%rax) je 0x16874f2 movq (%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb3c(%rsp) # imm = 0xFFFFFFFF movl 0xb3c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb38(%rsp) cmpl $0x1, 0xb38(%rsp) jne 0x16874f2 movq (%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16874c7 movq (%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16874c5 jmp 0x16874f0 movq (%rsp), %rax movq (%rax), %rax movq %rax, 0xed0(%rsp) cmpq $0x0, 0xed0(%rsp) je 0x16874ee movq 0xed0(%rsp), %rdi callq 0x5e480 jmp 0x16874f0 jmp 0x16874f2 movq (%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1687549 movq %rax, %rdi callq 0x5fc90 jmp 0x1687555 xorl %eax, %eax addq $0x1058, %rsp # imm = 0x1058 retq movq 0x7b0(%rsp), %rdi callq 0x5e3b0 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/convolutiondepthwise_x86.cpp
virtual thunk to ncnn::ConvolutionDepthWise_x86::destroy_pipeline(ncnn::Option const&)
int ConvolutionDepthWise_x86::destroy_pipeline(const Option& opt) { if (activation) { activation->destroy_pipeline(opt); delete activation; activation = 0; } for (int i = 0; i < (int)group_ops.size(); i++) { group_ops[i]->destroy_pipeline(opt); delete group_ops[i]; } group_ops.clear(); return 0; }
movq %rdi, -0x8(%rsp) movq %rsi, -0x10(%rsp) movq -0x8(%rsp), %rdi movq (%rdi), %rax movq -0x38(%rax), %rax addq %rax, %rdi movq -0x10(%rsp), %rsi jmp 0x16875a0 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/convolutiondepthwise_x86.cpp
ncnn::convdw5x5s1_pack4_sse(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Mat const&, ncnn::Option const&)
static void convdw5x5s1_pack4_sse(const Mat& bottom_blob, Mat& top_blob, const Mat& kernel, const Mat& _bias, const Option& opt) { int w = bottom_blob.w; int outw = top_blob.w; int outh = top_blob.h; const int group = bottom_blob.c; const float* bias = _bias; #pragma omp parallel for num_threads(opt.num_threads) for (int g = 0; g < group; g++) { Mat out = top_blob.channel(g); __m128 _bias0 = bias ? _mm_loadu_ps(bias + g * 4) : _mm_setzero_ps(); const float* k0 = kernel.row(g); float* outptr0 = out.row(0); float* outptr1 = out.row(1); const Mat img0 = bottom_blob.channel(g); const float* r0 = img0.row(0); const float* r1 = img0.row(1); const float* r2 = img0.row(2); const float* r3 = img0.row(3); const float* r4 = img0.row(4); const float* r5 = img0.row(5); int i = 0; for (; i + 1 < outh; i += 2) { int j = 0; for (; j < outw; j++) { __m128 _sum0 = _bias0; __m128 _sum1 = _bias0; __m128 _r00 = _mm_load_ps(r0); __m128 _r01 = _mm_load_ps(r0 + 4); __m128 _r02 = _mm_load_ps(r0 + 4 * 2); __m128 _r03 = _mm_load_ps(r0 + 4 * 3); __m128 _r04 = _mm_load_ps(r0 + 4 * 4); __m128 _k00 = _mm_load_ps(k0); __m128 _k01 = _mm_load_ps(k0 + 4); __m128 _k02 = _mm_load_ps(k0 + 4 * 2); __m128 _k03 = _mm_load_ps(k0 + 4 * 3); __m128 _k04 = _mm_load_ps(k0 + 4 * 4); k0 += 4 * 5; _sum0 = _mm_comp_fmadd_ps(_k00, _r00, _sum0); _sum0 = _mm_comp_fmadd_ps(_k01, _r01, _sum0); _sum0 = _mm_comp_fmadd_ps(_k02, _r02, _sum0); _sum0 = _mm_comp_fmadd_ps(_k03, _r03, _sum0); _sum0 = _mm_comp_fmadd_ps(_k04, _r04, _sum0); __m128 _r10 = _mm_load_ps(r1); __m128 _r11 = _mm_load_ps(r1 + 4); __m128 _r12 = _mm_load_ps(r1 + 4 * 2); __m128 _r13 = _mm_load_ps(r1 + 4 * 3); __m128 _r14 = _mm_load_ps(r1 + 4 * 4); _sum1 = _mm_comp_fmadd_ps(_k00, _r10, _sum1); _sum1 = _mm_comp_fmadd_ps(_k01, _r11, _sum1); _sum1 = _mm_comp_fmadd_ps(_k02, _r12, _sum1); _sum1 = _mm_comp_fmadd_ps(_k03, _r13, _sum1); _sum1 = _mm_comp_fmadd_ps(_k04, _r14, _sum1); __m128 _k10 = _mm_load_ps(k0); __m128 _k11 = _mm_load_ps(k0 + 4); __m128 _k12 = _mm_load_ps(k0 + 4 * 2); __m128 _k13 = _mm_load_ps(k0 + 4 * 3); __m128 _k14 = _mm_load_ps(k0 + 4 * 4); k0 += 4 * 5; _sum0 = _mm_comp_fmadd_ps(_k10, _r10, _sum0); _sum0 = _mm_comp_fmadd_ps(_k11, _r11, _sum0); _sum0 = _mm_comp_fmadd_ps(_k12, _r12, _sum0); _sum0 = _mm_comp_fmadd_ps(_k13, _r13, _sum0); _sum0 = _mm_comp_fmadd_ps(_k14, _r14, _sum0); __m128 _r20 = _mm_load_ps(r2); __m128 _r21 = _mm_load_ps(r2 + 4); __m128 _r22 = _mm_load_ps(r2 + 4 * 2); __m128 _r23 = _mm_load_ps(r2 + 4 * 3); __m128 _r24 = _mm_load_ps(r2 + 4 * 4); _sum1 = _mm_comp_fmadd_ps(_k10, _r20, _sum1); _sum1 = _mm_comp_fmadd_ps(_k11, _r21, _sum1); _sum1 = _mm_comp_fmadd_ps(_k12, _r22, _sum1); _sum1 = _mm_comp_fmadd_ps(_k13, _r23, _sum1); _sum1 = _mm_comp_fmadd_ps(_k14, _r24, _sum1); __m128 _k20 = _mm_load_ps(k0); __m128 _k21 = _mm_load_ps(k0 + 4); __m128 _k22 = _mm_load_ps(k0 + 4 * 2); __m128 _k23 = _mm_load_ps(k0 + 4 * 3); __m128 _k24 = _mm_load_ps(k0 + 4 * 4); k0 += 4 * 5; _sum0 = _mm_comp_fmadd_ps(_k20, _r20, _sum0); _sum0 = _mm_comp_fmadd_ps(_k21, _r21, _sum0); _sum0 = _mm_comp_fmadd_ps(_k22, _r22, _sum0); _sum0 = _mm_comp_fmadd_ps(_k23, _r23, _sum0); _sum0 = _mm_comp_fmadd_ps(_k24, _r24, _sum0); __m128 _r30 = _mm_load_ps(r3); __m128 _r31 = _mm_load_ps(r3 + 4); __m128 _r32 = _mm_load_ps(r3 + 4 * 2); __m128 _r33 = _mm_load_ps(r3 + 4 * 3); __m128 _r34 = _mm_load_ps(r3 + 4 * 4); _sum1 = _mm_comp_fmadd_ps(_k20, _r30, _sum1); _sum1 = _mm_comp_fmadd_ps(_k21, _r31, _sum1); _sum1 = _mm_comp_fmadd_ps(_k22, _r32, _sum1); _sum1 = _mm_comp_fmadd_ps(_k23, _r33, _sum1); _sum1 = _mm_comp_fmadd_ps(_k24, _r34, _sum1); __m128 _k30 = _mm_load_ps(k0); __m128 _k31 = _mm_load_ps(k0 + 4); __m128 _k32 = _mm_load_ps(k0 + 4 * 2); __m128 _k33 = _mm_load_ps(k0 + 4 * 3); __m128 _k34 = _mm_load_ps(k0 + 4 * 4); k0 += 4 * 5; _sum0 = _mm_comp_fmadd_ps(_k30, _r30, _sum0); _sum0 = _mm_comp_fmadd_ps(_k31, _r31, _sum0); _sum0 = _mm_comp_fmadd_ps(_k32, _r32, _sum0); _sum0 = _mm_comp_fmadd_ps(_k33, _r33, _sum0); _sum0 = _mm_comp_fmadd_ps(_k34, _r34, _sum0); __m128 _r40 = _mm_load_ps(r4); __m128 _r41 = _mm_load_ps(r4 + 4); __m128 _r42 = _mm_load_ps(r4 + 4 * 2); __m128 _r43 = _mm_load_ps(r4 + 4 * 3); __m128 _r44 = _mm_load_ps(r4 + 4 * 4); _sum1 = _mm_comp_fmadd_ps(_k30, _r40, _sum1); _sum1 = _mm_comp_fmadd_ps(_k31, _r41, _sum1); _sum1 = _mm_comp_fmadd_ps(_k32, _r42, _sum1); _sum1 = _mm_comp_fmadd_ps(_k33, _r43, _sum1); _sum1 = _mm_comp_fmadd_ps(_k34, _r44, _sum1); __m128 _k40 = _mm_load_ps(k0); __m128 _k41 = _mm_load_ps(k0 + 4); __m128 _k42 = _mm_load_ps(k0 + 4 * 2); __m128 _k43 = _mm_load_ps(k0 + 4 * 3); __m128 _k44 = _mm_load_ps(k0 + 4 * 4); k0 -= 4 * 20; _sum0 = _mm_comp_fmadd_ps(_k40, _r40, _sum0); _sum0 = _mm_comp_fmadd_ps(_k41, _r41, _sum0); _sum0 = _mm_comp_fmadd_ps(_k42, _r42, _sum0); _sum0 = _mm_comp_fmadd_ps(_k43, _r43, _sum0); _sum0 = _mm_comp_fmadd_ps(_k44, _r44, _sum0); __m128 _r50 = _mm_load_ps(r5); __m128 _r51 = _mm_load_ps(r5 + 4); __m128 _r52 = _mm_load_ps(r5 + 4 * 2); __m128 _r53 = _mm_load_ps(r5 + 4 * 3); __m128 _r54 = _mm_load_ps(r5 + 4 * 4); _sum1 = _mm_comp_fmadd_ps(_k40, _r50, _sum1); _sum1 = _mm_comp_fmadd_ps(_k41, _r51, _sum1); _sum1 = _mm_comp_fmadd_ps(_k42, _r52, _sum1); _sum1 = _mm_comp_fmadd_ps(_k43, _r53, _sum1); _sum1 = _mm_comp_fmadd_ps(_k44, _r54, _sum1); _mm_store_ps(outptr0, _sum0); _mm_store_ps(outptr1, _sum1); outptr0 += 4; outptr1 += 4; r0 += 4; r1 += 4; r2 += 4; r3 += 4; r4 += 4; r5 += 4; } r0 += 4 * 4 + w * 4; r1 += 4 * 4 + w * 4; r2 += 4 * 4 + w * 4; r3 += 4 * 4 + w * 4; r4 += 4 * 4 + w * 4; r5 += 4 * 4 + w * 4; outptr0 += outw * 4; outptr1 += outw * 4; } for (; i < outh; i++) { int j = 0; for (; j < outw; j++) { __m128 _sum0 = _bias0; __m128 _r00 = _mm_load_ps(r0); __m128 _r01 = _mm_load_ps(r0 + 4); __m128 _r02 = _mm_load_ps(r0 + 4 * 2); __m128 _r03 = _mm_load_ps(r0 + 4 * 3); __m128 _r04 = _mm_load_ps(r0 + 4 * 4); __m128 _k00 = _mm_load_ps(k0); __m128 _k01 = _mm_load_ps(k0 + 4); __m128 _k02 = _mm_load_ps(k0 + 4 * 2); __m128 _k03 = _mm_load_ps(k0 + 4 * 3); __m128 _k04 = _mm_load_ps(k0 + 4 * 4); k0 += 4 * 5; _sum0 = _mm_comp_fmadd_ps(_k00, _r00, _sum0); _sum0 = _mm_comp_fmadd_ps(_k01, _r01, _sum0); _sum0 = _mm_comp_fmadd_ps(_k02, _r02, _sum0); _sum0 = _mm_comp_fmadd_ps(_k03, _r03, _sum0); _sum0 = _mm_comp_fmadd_ps(_k04, _r04, _sum0); __m128 _r10 = _mm_load_ps(r1); __m128 _r11 = _mm_load_ps(r1 + 4); __m128 _r12 = _mm_load_ps(r1 + 4 * 2); __m128 _r13 = _mm_load_ps(r1 + 4 * 3); __m128 _r14 = _mm_load_ps(r1 + 4 * 4); __m128 _k10 = _mm_load_ps(k0); __m128 _k11 = _mm_load_ps(k0 + 4); __m128 _k12 = _mm_load_ps(k0 + 4 * 2); __m128 _k13 = _mm_load_ps(k0 + 4 * 3); __m128 _k14 = _mm_load_ps(k0 + 4 * 4); k0 += 4 * 5; _sum0 = _mm_comp_fmadd_ps(_k10, _r10, _sum0); _sum0 = _mm_comp_fmadd_ps(_k11, _r11, _sum0); _sum0 = _mm_comp_fmadd_ps(_k12, _r12, _sum0); _sum0 = _mm_comp_fmadd_ps(_k13, _r13, _sum0); _sum0 = _mm_comp_fmadd_ps(_k14, _r14, _sum0); __m128 _r20 = _mm_load_ps(r2); __m128 _r21 = _mm_load_ps(r2 + 4); __m128 _r22 = _mm_load_ps(r2 + 4 * 2); __m128 _r23 = _mm_load_ps(r2 + 4 * 3); __m128 _r24 = _mm_load_ps(r2 + 4 * 4); __m128 _k20 = _mm_load_ps(k0); __m128 _k21 = _mm_load_ps(k0 + 4); __m128 _k22 = _mm_load_ps(k0 + 4 * 2); __m128 _k23 = _mm_load_ps(k0 + 4 * 3); __m128 _k24 = _mm_load_ps(k0 + 4 * 4); k0 += 4 * 5; _sum0 = _mm_comp_fmadd_ps(_k20, _r20, _sum0); _sum0 = _mm_comp_fmadd_ps(_k21, _r21, _sum0); _sum0 = _mm_comp_fmadd_ps(_k22, _r22, _sum0); _sum0 = _mm_comp_fmadd_ps(_k23, _r23, _sum0); _sum0 = _mm_comp_fmadd_ps(_k24, _r24, _sum0); __m128 _r30 = _mm_load_ps(r3); __m128 _r31 = _mm_load_ps(r3 + 4); __m128 _r32 = _mm_load_ps(r3 + 4 * 2); __m128 _r33 = _mm_load_ps(r3 + 4 * 3); __m128 _r34 = _mm_load_ps(r3 + 4 * 4); __m128 _k30 = _mm_load_ps(k0); __m128 _k31 = _mm_load_ps(k0 + 4); __m128 _k32 = _mm_load_ps(k0 + 4 * 2); __m128 _k33 = _mm_load_ps(k0 + 4 * 3); __m128 _k34 = _mm_load_ps(k0 + 4 * 4); k0 += 4 * 5; _sum0 = _mm_comp_fmadd_ps(_k30, _r30, _sum0); _sum0 = _mm_comp_fmadd_ps(_k31, _r31, _sum0); _sum0 = _mm_comp_fmadd_ps(_k32, _r32, _sum0); _sum0 = _mm_comp_fmadd_ps(_k33, _r33, _sum0); _sum0 = _mm_comp_fmadd_ps(_k34, _r34, _sum0); __m128 _r40 = _mm_load_ps(r4); __m128 _r41 = _mm_load_ps(r4 + 4); __m128 _r42 = _mm_load_ps(r4 + 4 * 2); __m128 _r43 = _mm_load_ps(r4 + 4 * 3); __m128 _r44 = _mm_load_ps(r4 + 4 * 4); __m128 _k40 = _mm_load_ps(k0); __m128 _k41 = _mm_load_ps(k0 + 4); __m128 _k42 = _mm_load_ps(k0 + 4 * 2); __m128 _k43 = _mm_load_ps(k0 + 4 * 3); __m128 _k44 = _mm_load_ps(k0 + 4 * 4); k0 -= 4 * 20; _sum0 = _mm_comp_fmadd_ps(_k40, _r40, _sum0); _sum0 = _mm_comp_fmadd_ps(_k41, _r41, _sum0); _sum0 = _mm_comp_fmadd_ps(_k42, _r42, _sum0); _sum0 = _mm_comp_fmadd_ps(_k43, _r43, _sum0); _sum0 = _mm_comp_fmadd_ps(_k44, _r44, _sum0); _mm_store_ps(outptr0, _sum0); outptr0 += 4; r0 += 4; r1 += 4; r2 += 4; r3 += 4; r4 += 4; } r0 += 4 * 4; r1 += 4 * 4; r2 += 4 * 4; r3 += 4 * 4; r4 += 4 * 4; } } }
subq $0x3428, %rsp # imm = 0x3428 movq %rdi, 0xd78(%rsp) movq %rsi, 0xd70(%rsp) movq %rdx, 0xd68(%rsp) movq %rcx, 0xd60(%rsp) movq %r8, 0xd58(%rsp) movq 0xd78(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0xd54(%rsp) movq 0xd70(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0xd50(%rsp) movq 0xd70(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0xd4c(%rsp) movq 0xd78(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0xd48(%rsp) movq 0xd60(%rsp), %rax movq %rax, 0xe10(%rsp) movq 0xe10(%rsp), %rax movq (%rax), %rax movq %rax, 0xd40(%rsp) movl $0x0, 0xd3c(%rsp) movl 0xd3c(%rsp), %eax cmpl 0xd48(%rsp), %eax jge 0x16aaad9 movq 0xd70(%rsp), %rcx movl 0xd3c(%rsp), %eax leaq 0xcf0(%rsp), %rdx movq %rdx, 0xdf0(%rsp) movq %rcx, 0xde8(%rsp) movl %eax, 0xde4(%rsp) movq 0xde8(%rsp), %rax movq %rax, 0x558(%rsp) movb $0x0, 0xde3(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xde4(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xcf0(%rsp), %r10 movq %r10, 0x3400(%rsp) movl %r9d, 0x33fc(%rsp) movl %r8d, 0x33f8(%rsp) movl %edi, 0x33f4(%rsp) movq %rsi, 0x33e8(%rsp) movq %rdx, 0x33e0(%rsp) movl %ecx, 0x33dc(%rsp) movq %rax, 0x33d0(%rsp) movq 0x3400(%rsp), %rcx movq %rcx, 0x550(%rsp) movq 0x33e8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x33e0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x33dc(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x33d0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x33fc(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x33f8(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x33f4(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x3410(%rsp) movl $0x10, 0x340c(%rsp) movq 0x3410(%rsp), %rax movslq 0x340c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x340c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x558(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xd18(%rsp) cmpl $0x4, 0x28(%rax) jne 0x16a5d85 movq 0x558(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xd30(%rsp) movb $0x1, 0xde3(%rsp) testb $0x1, 0xde3(%rsp) jne 0x16a5ebe leaq 0xcf0(%rsp), %rax movq %rax, 0xdf8(%rsp) movq 0xdf8(%rsp), %rax movq %rax, 0xe08(%rsp) movq 0xe08(%rsp), %rax movq %rax, 0x548(%rsp) cmpq $0x0, 0x8(%rax) je 0x16a5e63 movq 0x548(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xe04(%rsp) # imm = 0xFFFFFFFF movl 0xe04(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xe00(%rsp) cmpl $0x1, 0xe00(%rsp) jne 0x16a5e63 movq 0x548(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16a5e34 movq 0x548(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16a5e32 jmp 0x16a5e61 movq 0x548(%rsp), %rax movq (%rax), %rax movq %rax, 0xea8(%rsp) cmpq $0x0, 0xea8(%rsp) je 0x16a5e5f movq 0xea8(%rsp), %rdi callq 0x5e480 jmp 0x16a5e61 jmp 0x16a5e63 movq 0x548(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16a5ebe movq %rax, %rdi callq 0x5fc90 cmpq $0x0, 0xd40(%rsp) je 0x16a5f12 movq 0xd40(%rsp), %rax movl 0xd3c(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xe18(%rsp) movq 0xe18(%rsp), %rax movups (%rax), %xmm0 movaps %xmm0, 0x530(%rsp) movaps 0x530(%rsp), %xmm0 movaps %xmm0, 0x520(%rsp) jmp 0x16a5f3f xorps %xmm0, %xmm0 movaps %xmm0, 0xe90(%rsp) movaps 0xe90(%rsp), %xmm0 movaps %xmm0, 0x510(%rsp) movaps 0x510(%rsp), %xmm0 movaps %xmm0, 0x520(%rsp) jmp 0x16a5f3f movaps 0x520(%rsp), %xmm0 movaps %xmm0, 0xce0(%rsp) movq 0xd68(%rsp), %rcx movl 0xd3c(%rsp), %eax movq %rcx, 0xe88(%rsp) movl %eax, 0xe84(%rsp) movq 0xe88(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xe84(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x508(%rsp) movq 0x508(%rsp), %rax movq %rax, 0xcc8(%rsp) leaq 0xcf0(%rsp), %rax movq %rax, 0xee8(%rsp) movl $0x0, 0xee4(%rsp) movq 0xee8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xee4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0xcc0(%rsp) leaq 0xcf0(%rsp), %rax movq %rax, 0xed8(%rsp) movl $0x1, 0xed4(%rsp) movq 0xed8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xed4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0xcb8(%rsp) movq 0xd78(%rsp), %rcx movl 0xd3c(%rsp), %eax leaq 0xc70(%rsp), %rdx movq %rdx, 0xf00(%rsp) movq %rcx, 0xef8(%rsp) movl %eax, 0xef4(%rsp) movq 0xef8(%rsp), %rax movq %rax, 0x4f8(%rsp) movb $0x0, 0xef3(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0xef4(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xc70(%rsp), %r10 movq %r10, 0x33c8(%rsp) movl %r9d, 0x33c4(%rsp) movl %r8d, 0x33c0(%rsp) movl %edi, 0x33bc(%rsp) movq %rsi, 0x33b0(%rsp) movq %rdx, 0x33a8(%rsp) movl %ecx, 0x33a4(%rsp) movq %rax, 0x3398(%rsp) movq 0x33c8(%rsp), %rcx movq %rcx, 0x500(%rsp) movq 0x33b0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x33a8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x33a4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3398(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x33c4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x33c0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x33bc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x3420(%rsp) movl $0x10, 0x341c(%rsp) movq 0x3420(%rsp), %rax movslq 0x341c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x341c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x500(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x4f8(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xc98(%rsp) cmpl $0x4, 0x28(%rax) jne 0x16a61f8 movq 0x4f8(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xcb0(%rsp) movb $0x1, 0xef3(%rsp) testb $0x1, 0xef3(%rsp) jne 0x16a6331 leaq 0xc70(%rsp), %rax movq %rax, 0xf08(%rsp) movq 0xf08(%rsp), %rax movq %rax, 0xf18(%rsp) movq 0xf18(%rsp), %rax movq %rax, 0x4f0(%rsp) cmpq $0x0, 0x8(%rax) je 0x16a62d6 movq 0x4f0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xf14(%rsp) # imm = 0xFFFFFFFF movl 0xf14(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xf10(%rsp) cmpl $0x1, 0xf10(%rsp) jne 0x16a62d6 movq 0x4f0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16a62a7 movq 0x4f0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16a62a5 jmp 0x16a62d4 movq 0x4f0(%rsp), %rax movq (%rax), %rax movq %rax, 0xf20(%rsp) cmpq $0x0, 0xf20(%rsp) je 0x16a62d2 movq 0xf20(%rsp), %rdi callq 0x5e480 jmp 0x16a62d4 jmp 0x16a62d6 movq 0x4f0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16a6331 movq %rax, %rdi callq 0x5fc90 jmp 0x16a6333 leaq 0xc70(%rsp), %rax movq %rax, 0xe78(%rsp) movl $0x0, 0xe74(%rsp) movq 0xe78(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xe74(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x4e8(%rsp) movq 0x4e8(%rsp), %rax movq %rax, 0xc68(%rsp) leaq 0xc70(%rsp), %rax movq %rax, 0xe68(%rsp) movl $0x1, 0xe64(%rsp) movq 0xe68(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xe64(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x4e0(%rsp) movq 0x4e0(%rsp), %rax movq %rax, 0xc60(%rsp) leaq 0xc70(%rsp), %rax movq %rax, 0xe58(%rsp) movl $0x2, 0xe54(%rsp) movq 0xe58(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xe54(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x4d8(%rsp) movq 0x4d8(%rsp), %rax movq %rax, 0xc58(%rsp) leaq 0xc70(%rsp), %rax movq %rax, 0xe48(%rsp) movl $0x3, 0xe44(%rsp) movq 0xe48(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xe44(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x4d0(%rsp) movq 0x4d0(%rsp), %rax movq %rax, 0xc50(%rsp) leaq 0xc70(%rsp), %rax movq %rax, 0xe38(%rsp) movl $0x4, 0xe34(%rsp) movq 0xe38(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xe34(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x4c8(%rsp) movq 0x4c8(%rsp), %rax movq %rax, 0xc48(%rsp) leaq 0xc70(%rsp), %rax movq %rax, 0xe28(%rsp) movl $0x5, 0xe24(%rsp) movq 0xe28(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xe24(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x4c0(%rsp) movq 0x4c0(%rsp), %rax movq %rax, 0xc40(%rsp) movl $0x0, 0xc3c(%rsp) movl 0xc3c(%rsp), %eax addl $0x1, %eax cmpl 0xd4c(%rsp), %eax jge 0x16a8fd7 movl $0x0, 0xc38(%rsp) movl 0xc38(%rsp), %eax cmpl 0xd50(%rsp), %eax jge 0x16a8e8f movaps 0xce0(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0xce0(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movq 0xc68(%rsp), %rax movq %rax, 0x1268(%rsp) movq 0x1268(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xc00(%rsp) movq 0xc68(%rsp), %rax addq $0x10, %rax movq %rax, 0x1260(%rsp) movq 0x1260(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xbf0(%rsp) movq 0xc68(%rsp), %rax addq $0x20, %rax movq %rax, 0x1258(%rsp) movq 0x1258(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xbe0(%rsp) movq 0xc68(%rsp), %rax addq $0x30, %rax movq %rax, 0x1250(%rsp) movq 0x1250(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xbd0(%rsp) movq 0xc68(%rsp), %rax addq $0x40, %rax movq %rax, 0x1248(%rsp) movq 0x1248(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xbc0(%rsp) movq 0xcc8(%rsp), %rax movq %rax, 0x1240(%rsp) movq 0x1240(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xbb0(%rsp) movq 0xcc8(%rsp), %rax addq $0x10, %rax movq %rax, 0x1238(%rsp) movq 0x1238(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xba0(%rsp) movq 0xcc8(%rsp), %rax addq $0x20, %rax movq %rax, 0x1230(%rsp) movq 0x1230(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xb90(%rsp) movq 0xcc8(%rsp), %rax addq $0x30, %rax movq %rax, 0x1228(%rsp) movq 0x1228(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xb80(%rsp) movq 0xcc8(%rsp), %rax addq $0x40, %rax movq %rax, 0x1220(%rsp) movq 0x1220(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xb70(%rsp) movq 0xcc8(%rsp), %rax addq $0x50, %rax movq %rax, 0xcc8(%rsp) movaps 0xbb0(%rsp), %xmm2 movaps 0xc00(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x32f0(%rsp) movaps %xmm1, 0x32e0(%rsp) movaps %xmm0, 0x32d0(%rsp) movaps 0x32f0(%rsp), %xmm1 movaps 0x32e0(%rsp), %xmm0 movaps %xmm1, 0x3330(%rsp) movaps %xmm0, 0x3320(%rsp) movaps 0x3330(%rsp), %xmm1 mulps 0x3320(%rsp), %xmm1 movaps 0x32d0(%rsp), %xmm0 movaps %xmm1, 0x3310(%rsp) movaps %xmm0, 0x3300(%rsp) movaps 0x3310(%rsp), %xmm0 addps 0x3300(%rsp), %xmm0 movaps %xmm0, 0x4b0(%rsp) movaps 0x4b0(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0xba0(%rsp), %xmm2 movaps 0xbf0(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x3280(%rsp) movaps %xmm1, 0x3270(%rsp) movaps %xmm0, 0x3260(%rsp) movaps 0x3280(%rsp), %xmm1 movaps 0x3270(%rsp), %xmm0 movaps %xmm1, 0x32c0(%rsp) movaps %xmm0, 0x32b0(%rsp) movaps 0x32c0(%rsp), %xmm1 mulps 0x32b0(%rsp), %xmm1 movaps 0x3260(%rsp), %xmm0 movaps %xmm1, 0x32a0(%rsp) movaps %xmm0, 0x3290(%rsp) movaps 0x32a0(%rsp), %xmm0 addps 0x3290(%rsp), %xmm0 movaps %xmm0, 0x4a0(%rsp) movaps 0x4a0(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0xb90(%rsp), %xmm2 movaps 0xbe0(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x3210(%rsp) movaps %xmm1, 0x3200(%rsp) movaps %xmm0, 0x31f0(%rsp) movaps 0x3210(%rsp), %xmm1 movaps 0x3200(%rsp), %xmm0 movaps %xmm1, 0x3250(%rsp) movaps %xmm0, 0x3240(%rsp) movaps 0x3250(%rsp), %xmm1 mulps 0x3240(%rsp), %xmm1 movaps 0x31f0(%rsp), %xmm0 movaps %xmm1, 0x3230(%rsp) movaps %xmm0, 0x3220(%rsp) movaps 0x3230(%rsp), %xmm0 addps 0x3220(%rsp), %xmm0 movaps %xmm0, 0x490(%rsp) movaps 0x490(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0xb80(%rsp), %xmm2 movaps 0xbd0(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x31a0(%rsp) movaps %xmm1, 0x3190(%rsp) movaps %xmm0, 0x3180(%rsp) movaps 0x31a0(%rsp), %xmm1 movaps 0x3190(%rsp), %xmm0 movaps %xmm1, 0x31e0(%rsp) movaps %xmm0, 0x31d0(%rsp) movaps 0x31e0(%rsp), %xmm1 mulps 0x31d0(%rsp), %xmm1 movaps 0x3180(%rsp), %xmm0 movaps %xmm1, 0x31c0(%rsp) movaps %xmm0, 0x31b0(%rsp) movaps 0x31c0(%rsp), %xmm0 addps 0x31b0(%rsp), %xmm0 movaps %xmm0, 0x480(%rsp) movaps 0x480(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0xb70(%rsp), %xmm2 movaps 0xbc0(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x3130(%rsp) movaps %xmm1, 0x3120(%rsp) movaps %xmm0, 0x3110(%rsp) movaps 0x3130(%rsp), %xmm1 movaps 0x3120(%rsp), %xmm0 movaps %xmm1, 0x3170(%rsp) movaps %xmm0, 0x3160(%rsp) movaps 0x3170(%rsp), %xmm1 mulps 0x3160(%rsp), %xmm1 movaps 0x3110(%rsp), %xmm0 movaps %xmm1, 0x3150(%rsp) movaps %xmm0, 0x3140(%rsp) movaps 0x3150(%rsp), %xmm0 addps 0x3140(%rsp), %xmm0 movaps %xmm0, 0x470(%rsp) movaps 0x470(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movq 0xc60(%rsp), %rax movq %rax, 0x1218(%rsp) movq 0x1218(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xb60(%rsp) movq 0xc60(%rsp), %rax addq $0x10, %rax movq %rax, 0x1210(%rsp) movq 0x1210(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xb50(%rsp) movq 0xc60(%rsp), %rax addq $0x20, %rax movq %rax, 0x1208(%rsp) movq 0x1208(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xb40(%rsp) movq 0xc60(%rsp), %rax addq $0x30, %rax movq %rax, 0x1200(%rsp) movq 0x1200(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xb30(%rsp) movq 0xc60(%rsp), %rax addq $0x40, %rax movq %rax, 0x11f8(%rsp) movq 0x11f8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xb20(%rsp) movaps 0xbb0(%rsp), %xmm2 movaps 0xb60(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x30c0(%rsp) movaps %xmm1, 0x30b0(%rsp) movaps %xmm0, 0x30a0(%rsp) movaps 0x30c0(%rsp), %xmm1 movaps 0x30b0(%rsp), %xmm0 movaps %xmm1, 0x3100(%rsp) movaps %xmm0, 0x30f0(%rsp) movaps 0x3100(%rsp), %xmm1 mulps 0x30f0(%rsp), %xmm1 movaps 0x30a0(%rsp), %xmm0 movaps %xmm1, 0x30e0(%rsp) movaps %xmm0, 0x30d0(%rsp) movaps 0x30e0(%rsp), %xmm0 addps 0x30d0(%rsp), %xmm0 movaps %xmm0, 0x460(%rsp) movaps 0x460(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0xba0(%rsp), %xmm2 movaps 0xb50(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x3050(%rsp) movaps %xmm1, 0x3040(%rsp) movaps %xmm0, 0x3030(%rsp) movaps 0x3050(%rsp), %xmm1 movaps 0x3040(%rsp), %xmm0 movaps %xmm1, 0x3090(%rsp) movaps %xmm0, 0x3080(%rsp) movaps 0x3090(%rsp), %xmm1 mulps 0x3080(%rsp), %xmm1 movaps 0x3030(%rsp), %xmm0 movaps %xmm1, 0x3070(%rsp) movaps %xmm0, 0x3060(%rsp) movaps 0x3070(%rsp), %xmm0 addps 0x3060(%rsp), %xmm0 movaps %xmm0, 0x450(%rsp) movaps 0x450(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0xb90(%rsp), %xmm2 movaps 0xb40(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x2fe0(%rsp) movaps %xmm1, 0x2fd0(%rsp) movaps %xmm0, 0x2fc0(%rsp) movaps 0x2fe0(%rsp), %xmm1 movaps 0x2fd0(%rsp), %xmm0 movaps %xmm1, 0x3020(%rsp) movaps %xmm0, 0x3010(%rsp) movaps 0x3020(%rsp), %xmm1 mulps 0x3010(%rsp), %xmm1 movaps 0x2fc0(%rsp), %xmm0 movaps %xmm1, 0x3000(%rsp) movaps %xmm0, 0x2ff0(%rsp) movaps 0x3000(%rsp), %xmm0 addps 0x2ff0(%rsp), %xmm0 movaps %xmm0, 0x440(%rsp) movaps 0x440(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0xb80(%rsp), %xmm2 movaps 0xb30(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x2f70(%rsp) movaps %xmm1, 0x2f60(%rsp) movaps %xmm0, 0x2f50(%rsp) movaps 0x2f70(%rsp), %xmm1 movaps 0x2f60(%rsp), %xmm0 movaps %xmm1, 0x2fb0(%rsp) movaps %xmm0, 0x2fa0(%rsp) movaps 0x2fb0(%rsp), %xmm1 mulps 0x2fa0(%rsp), %xmm1 movaps 0x2f50(%rsp), %xmm0 movaps %xmm1, 0x2f90(%rsp) movaps %xmm0, 0x2f80(%rsp) movaps 0x2f90(%rsp), %xmm0 addps 0x2f80(%rsp), %xmm0 movaps %xmm0, 0x430(%rsp) movaps 0x430(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0xb70(%rsp), %xmm2 movaps 0xb20(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x2f00(%rsp) movaps %xmm1, 0x2ef0(%rsp) movaps %xmm0, 0x2ee0(%rsp) movaps 0x2f00(%rsp), %xmm1 movaps 0x2ef0(%rsp), %xmm0 movaps %xmm1, 0x2f40(%rsp) movaps %xmm0, 0x2f30(%rsp) movaps 0x2f40(%rsp), %xmm1 mulps 0x2f30(%rsp), %xmm1 movaps 0x2ee0(%rsp), %xmm0 movaps %xmm1, 0x2f20(%rsp) movaps %xmm0, 0x2f10(%rsp) movaps 0x2f20(%rsp), %xmm0 addps 0x2f10(%rsp), %xmm0 movaps %xmm0, 0x420(%rsp) movaps 0x420(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movq 0xcc8(%rsp), %rax movq %rax, 0x11f0(%rsp) movq 0x11f0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xb10(%rsp) movq 0xcc8(%rsp), %rax addq $0x10, %rax movq %rax, 0x11e8(%rsp) movq 0x11e8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xb00(%rsp) movq 0xcc8(%rsp), %rax addq $0x20, %rax movq %rax, 0x11e0(%rsp) movq 0x11e0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xaf0(%rsp) movq 0xcc8(%rsp), %rax addq $0x30, %rax movq %rax, 0x11d8(%rsp) movq 0x11d8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xae0(%rsp) movq 0xcc8(%rsp), %rax addq $0x40, %rax movq %rax, 0x11d0(%rsp) movq 0x11d0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xad0(%rsp) movq 0xcc8(%rsp), %rax addq $0x50, %rax movq %rax, 0xcc8(%rsp) movaps 0xb10(%rsp), %xmm2 movaps 0xb60(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x2e90(%rsp) movaps %xmm1, 0x2e80(%rsp) movaps %xmm0, 0x2e70(%rsp) movaps 0x2e90(%rsp), %xmm1 movaps 0x2e80(%rsp), %xmm0 movaps %xmm1, 0x2ed0(%rsp) movaps %xmm0, 0x2ec0(%rsp) movaps 0x2ed0(%rsp), %xmm1 mulps 0x2ec0(%rsp), %xmm1 movaps 0x2e70(%rsp), %xmm0 movaps %xmm1, 0x2eb0(%rsp) movaps %xmm0, 0x2ea0(%rsp) movaps 0x2eb0(%rsp), %xmm0 addps 0x2ea0(%rsp), %xmm0 movaps %xmm0, 0x410(%rsp) movaps 0x410(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0xb00(%rsp), %xmm2 movaps 0xb50(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x2e20(%rsp) movaps %xmm1, 0x2e10(%rsp) movaps %xmm0, 0x2e00(%rsp) movaps 0x2e20(%rsp), %xmm1 movaps 0x2e10(%rsp), %xmm0 movaps %xmm1, 0x2e60(%rsp) movaps %xmm0, 0x2e50(%rsp) movaps 0x2e60(%rsp), %xmm1 mulps 0x2e50(%rsp), %xmm1 movaps 0x2e00(%rsp), %xmm0 movaps %xmm1, 0x2e40(%rsp) movaps %xmm0, 0x2e30(%rsp) movaps 0x2e40(%rsp), %xmm0 addps 0x2e30(%rsp), %xmm0 movaps %xmm0, 0x400(%rsp) movaps 0x400(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0xaf0(%rsp), %xmm2 movaps 0xb40(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x2db0(%rsp) movaps %xmm1, 0x2da0(%rsp) movaps %xmm0, 0x2d90(%rsp) movaps 0x2db0(%rsp), %xmm1 movaps 0x2da0(%rsp), %xmm0 movaps %xmm1, 0x2df0(%rsp) movaps %xmm0, 0x2de0(%rsp) movaps 0x2df0(%rsp), %xmm1 mulps 0x2de0(%rsp), %xmm1 movaps 0x2d90(%rsp), %xmm0 movaps %xmm1, 0x2dd0(%rsp) movaps %xmm0, 0x2dc0(%rsp) movaps 0x2dd0(%rsp), %xmm0 addps 0x2dc0(%rsp), %xmm0 movaps %xmm0, 0x3f0(%rsp) movaps 0x3f0(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0xae0(%rsp), %xmm2 movaps 0xb30(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x2d40(%rsp) movaps %xmm1, 0x2d30(%rsp) movaps %xmm0, 0x2d20(%rsp) movaps 0x2d40(%rsp), %xmm1 movaps 0x2d30(%rsp), %xmm0 movaps %xmm1, 0x2d80(%rsp) movaps %xmm0, 0x2d70(%rsp) movaps 0x2d80(%rsp), %xmm1 mulps 0x2d70(%rsp), %xmm1 movaps 0x2d20(%rsp), %xmm0 movaps %xmm1, 0x2d60(%rsp) movaps %xmm0, 0x2d50(%rsp) movaps 0x2d60(%rsp), %xmm0 addps 0x2d50(%rsp), %xmm0 movaps %xmm0, 0x3e0(%rsp) movaps 0x3e0(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0xad0(%rsp), %xmm2 movaps 0xb20(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x2cd0(%rsp) movaps %xmm1, 0x2cc0(%rsp) movaps %xmm0, 0x2cb0(%rsp) movaps 0x2cd0(%rsp), %xmm1 movaps 0x2cc0(%rsp), %xmm0 movaps %xmm1, 0x2d10(%rsp) movaps %xmm0, 0x2d00(%rsp) movaps 0x2d10(%rsp), %xmm1 mulps 0x2d00(%rsp), %xmm1 movaps 0x2cb0(%rsp), %xmm0 movaps %xmm1, 0x2cf0(%rsp) movaps %xmm0, 0x2ce0(%rsp) movaps 0x2cf0(%rsp), %xmm0 addps 0x2ce0(%rsp), %xmm0 movaps %xmm0, 0x3d0(%rsp) movaps 0x3d0(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movq 0xc58(%rsp), %rax movq %rax, 0x11c8(%rsp) movq 0x11c8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xac0(%rsp) movq 0xc58(%rsp), %rax addq $0x10, %rax movq %rax, 0x11c0(%rsp) movq 0x11c0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xab0(%rsp) movq 0xc58(%rsp), %rax addq $0x20, %rax movq %rax, 0x11b8(%rsp) movq 0x11b8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xaa0(%rsp) movq 0xc58(%rsp), %rax addq $0x30, %rax movq %rax, 0x11b0(%rsp) movq 0x11b0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xa90(%rsp) movq 0xc58(%rsp), %rax addq $0x40, %rax movq %rax, 0x11a8(%rsp) movq 0x11a8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xa80(%rsp) movaps 0xb10(%rsp), %xmm2 movaps 0xac0(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x2c60(%rsp) movaps %xmm1, 0x2c50(%rsp) movaps %xmm0, 0x2c40(%rsp) movaps 0x2c60(%rsp), %xmm1 movaps 0x2c50(%rsp), %xmm0 movaps %xmm1, 0x2ca0(%rsp) movaps %xmm0, 0x2c90(%rsp) movaps 0x2ca0(%rsp), %xmm1 mulps 0x2c90(%rsp), %xmm1 movaps 0x2c40(%rsp), %xmm0 movaps %xmm1, 0x2c80(%rsp) movaps %xmm0, 0x2c70(%rsp) movaps 0x2c80(%rsp), %xmm0 addps 0x2c70(%rsp), %xmm0 movaps %xmm0, 0x3c0(%rsp) movaps 0x3c0(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0xb00(%rsp), %xmm2 movaps 0xab0(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x2bf0(%rsp) movaps %xmm1, 0x2be0(%rsp) movaps %xmm0, 0x2bd0(%rsp) movaps 0x2bf0(%rsp), %xmm1 movaps 0x2be0(%rsp), %xmm0 movaps %xmm1, 0x2c30(%rsp) movaps %xmm0, 0x2c20(%rsp) movaps 0x2c30(%rsp), %xmm1 mulps 0x2c20(%rsp), %xmm1 movaps 0x2bd0(%rsp), %xmm0 movaps %xmm1, 0x2c10(%rsp) movaps %xmm0, 0x2c00(%rsp) movaps 0x2c10(%rsp), %xmm0 addps 0x2c00(%rsp), %xmm0 movaps %xmm0, 0x3b0(%rsp) movaps 0x3b0(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0xaf0(%rsp), %xmm2 movaps 0xaa0(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x2b80(%rsp) movaps %xmm1, 0x2b70(%rsp) movaps %xmm0, 0x2b60(%rsp) movaps 0x2b80(%rsp), %xmm1 movaps 0x2b70(%rsp), %xmm0 movaps %xmm1, 0x2bc0(%rsp) movaps %xmm0, 0x2bb0(%rsp) movaps 0x2bc0(%rsp), %xmm1 mulps 0x2bb0(%rsp), %xmm1 movaps 0x2b60(%rsp), %xmm0 movaps %xmm1, 0x2ba0(%rsp) movaps %xmm0, 0x2b90(%rsp) movaps 0x2ba0(%rsp), %xmm0 addps 0x2b90(%rsp), %xmm0 movaps %xmm0, 0x3a0(%rsp) movaps 0x3a0(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0xae0(%rsp), %xmm2 movaps 0xa90(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x2b10(%rsp) movaps %xmm1, 0x2b00(%rsp) movaps %xmm0, 0x2af0(%rsp) movaps 0x2b10(%rsp), %xmm1 movaps 0x2b00(%rsp), %xmm0 movaps %xmm1, 0x2b50(%rsp) movaps %xmm0, 0x2b40(%rsp) movaps 0x2b50(%rsp), %xmm1 mulps 0x2b40(%rsp), %xmm1 movaps 0x2af0(%rsp), %xmm0 movaps %xmm1, 0x2b30(%rsp) movaps %xmm0, 0x2b20(%rsp) movaps 0x2b30(%rsp), %xmm0 addps 0x2b20(%rsp), %xmm0 movaps %xmm0, 0x390(%rsp) movaps 0x390(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0xad0(%rsp), %xmm2 movaps 0xa80(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x2aa0(%rsp) movaps %xmm1, 0x2a90(%rsp) movaps %xmm0, 0x2a80(%rsp) movaps 0x2aa0(%rsp), %xmm1 movaps 0x2a90(%rsp), %xmm0 movaps %xmm1, 0x2ae0(%rsp) movaps %xmm0, 0x2ad0(%rsp) movaps 0x2ae0(%rsp), %xmm1 mulps 0x2ad0(%rsp), %xmm1 movaps 0x2a80(%rsp), %xmm0 movaps %xmm1, 0x2ac0(%rsp) movaps %xmm0, 0x2ab0(%rsp) movaps 0x2ac0(%rsp), %xmm0 addps 0x2ab0(%rsp), %xmm0 movaps %xmm0, 0x380(%rsp) movaps 0x380(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movq 0xcc8(%rsp), %rax movq %rax, 0x11a0(%rsp) movq 0x11a0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xa70(%rsp) movq 0xcc8(%rsp), %rax addq $0x10, %rax movq %rax, 0x1198(%rsp) movq 0x1198(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xa60(%rsp) movq 0xcc8(%rsp), %rax addq $0x20, %rax movq %rax, 0x1190(%rsp) movq 0x1190(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xa50(%rsp) movq 0xcc8(%rsp), %rax addq $0x30, %rax movq %rax, 0x1188(%rsp) movq 0x1188(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xa40(%rsp) movq 0xcc8(%rsp), %rax addq $0x40, %rax movq %rax, 0x1180(%rsp) movq 0x1180(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xa30(%rsp) movq 0xcc8(%rsp), %rax addq $0x50, %rax movq %rax, 0xcc8(%rsp) movaps 0xa70(%rsp), %xmm2 movaps 0xac0(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x2a30(%rsp) movaps %xmm1, 0x2a20(%rsp) movaps %xmm0, 0x2a10(%rsp) movaps 0x2a30(%rsp), %xmm1 movaps 0x2a20(%rsp), %xmm0 movaps %xmm1, 0x2a70(%rsp) movaps %xmm0, 0x2a60(%rsp) movaps 0x2a70(%rsp), %xmm1 mulps 0x2a60(%rsp), %xmm1 movaps 0x2a10(%rsp), %xmm0 movaps %xmm1, 0x2a50(%rsp) movaps %xmm0, 0x2a40(%rsp) movaps 0x2a50(%rsp), %xmm0 addps 0x2a40(%rsp), %xmm0 movaps %xmm0, 0x370(%rsp) movaps 0x370(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0xa60(%rsp), %xmm2 movaps 0xab0(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x29c0(%rsp) movaps %xmm1, 0x29b0(%rsp) movaps %xmm0, 0x29a0(%rsp) movaps 0x29c0(%rsp), %xmm1 movaps 0x29b0(%rsp), %xmm0 movaps %xmm1, 0x2a00(%rsp) movaps %xmm0, 0x29f0(%rsp) movaps 0x2a00(%rsp), %xmm1 mulps 0x29f0(%rsp), %xmm1 movaps 0x29a0(%rsp), %xmm0 movaps %xmm1, 0x29e0(%rsp) movaps %xmm0, 0x29d0(%rsp) movaps 0x29e0(%rsp), %xmm0 addps 0x29d0(%rsp), %xmm0 movaps %xmm0, 0x360(%rsp) movaps 0x360(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0xa50(%rsp), %xmm2 movaps 0xaa0(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x2950(%rsp) movaps %xmm1, 0x2940(%rsp) movaps %xmm0, 0x2930(%rsp) movaps 0x2950(%rsp), %xmm1 movaps 0x2940(%rsp), %xmm0 movaps %xmm1, 0x2990(%rsp) movaps %xmm0, 0x2980(%rsp) movaps 0x2990(%rsp), %xmm1 mulps 0x2980(%rsp), %xmm1 movaps 0x2930(%rsp), %xmm0 movaps %xmm1, 0x2970(%rsp) movaps %xmm0, 0x2960(%rsp) movaps 0x2970(%rsp), %xmm0 addps 0x2960(%rsp), %xmm0 movaps %xmm0, 0x350(%rsp) movaps 0x350(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0xa40(%rsp), %xmm2 movaps 0xa90(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x28e0(%rsp) movaps %xmm1, 0x28d0(%rsp) movaps %xmm0, 0x28c0(%rsp) movaps 0x28e0(%rsp), %xmm1 movaps 0x28d0(%rsp), %xmm0 movaps %xmm1, 0x2920(%rsp) movaps %xmm0, 0x2910(%rsp) movaps 0x2920(%rsp), %xmm1 mulps 0x2910(%rsp), %xmm1 movaps 0x28c0(%rsp), %xmm0 movaps %xmm1, 0x2900(%rsp) movaps %xmm0, 0x28f0(%rsp) movaps 0x2900(%rsp), %xmm0 addps 0x28f0(%rsp), %xmm0 movaps %xmm0, 0x340(%rsp) movaps 0x340(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0xa30(%rsp), %xmm2 movaps 0xa80(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x2870(%rsp) movaps %xmm1, 0x2860(%rsp) movaps %xmm0, 0x2850(%rsp) movaps 0x2870(%rsp), %xmm1 movaps 0x2860(%rsp), %xmm0 movaps %xmm1, 0x28b0(%rsp) movaps %xmm0, 0x28a0(%rsp) movaps 0x28b0(%rsp), %xmm1 mulps 0x28a0(%rsp), %xmm1 movaps 0x2850(%rsp), %xmm0 movaps %xmm1, 0x2890(%rsp) movaps %xmm0, 0x2880(%rsp) movaps 0x2890(%rsp), %xmm0 addps 0x2880(%rsp), %xmm0 movaps %xmm0, 0x330(%rsp) movaps 0x330(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movq 0xc50(%rsp), %rax movq %rax, 0x1178(%rsp) movq 0x1178(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xa20(%rsp) movq 0xc50(%rsp), %rax addq $0x10, %rax movq %rax, 0x1170(%rsp) movq 0x1170(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xa10(%rsp) movq 0xc50(%rsp), %rax addq $0x20, %rax movq %rax, 0x1168(%rsp) movq 0x1168(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0xa00(%rsp) movq 0xc50(%rsp), %rax addq $0x30, %rax movq %rax, 0x1160(%rsp) movq 0x1160(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x9f0(%rsp) movq 0xc50(%rsp), %rax addq $0x40, %rax movq %rax, 0x1158(%rsp) movq 0x1158(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x9e0(%rsp) movaps 0xa70(%rsp), %xmm2 movaps 0xa20(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x2800(%rsp) movaps %xmm1, 0x27f0(%rsp) movaps %xmm0, 0x27e0(%rsp) movaps 0x2800(%rsp), %xmm1 movaps 0x27f0(%rsp), %xmm0 movaps %xmm1, 0x2840(%rsp) movaps %xmm0, 0x2830(%rsp) movaps 0x2840(%rsp), %xmm1 mulps 0x2830(%rsp), %xmm1 movaps 0x27e0(%rsp), %xmm0 movaps %xmm1, 0x2820(%rsp) movaps %xmm0, 0x2810(%rsp) movaps 0x2820(%rsp), %xmm0 addps 0x2810(%rsp), %xmm0 movaps %xmm0, 0x320(%rsp) movaps 0x320(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0xa60(%rsp), %xmm2 movaps 0xa10(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x2790(%rsp) movaps %xmm1, 0x2780(%rsp) movaps %xmm0, 0x2770(%rsp) movaps 0x2790(%rsp), %xmm1 movaps 0x2780(%rsp), %xmm0 movaps %xmm1, 0x27d0(%rsp) movaps %xmm0, 0x27c0(%rsp) movaps 0x27d0(%rsp), %xmm1 mulps 0x27c0(%rsp), %xmm1 movaps 0x2770(%rsp), %xmm0 movaps %xmm1, 0x27b0(%rsp) movaps %xmm0, 0x27a0(%rsp) movaps 0x27b0(%rsp), %xmm0 addps 0x27a0(%rsp), %xmm0 movaps %xmm0, 0x310(%rsp) movaps 0x310(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0xa50(%rsp), %xmm2 movaps 0xa00(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x2720(%rsp) movaps %xmm1, 0x2710(%rsp) movaps %xmm0, 0x2700(%rsp) movaps 0x2720(%rsp), %xmm1 movaps 0x2710(%rsp), %xmm0 movaps %xmm1, 0x2760(%rsp) movaps %xmm0, 0x2750(%rsp) movaps 0x2760(%rsp), %xmm1 mulps 0x2750(%rsp), %xmm1 movaps 0x2700(%rsp), %xmm0 movaps %xmm1, 0x2740(%rsp) movaps %xmm0, 0x2730(%rsp) movaps 0x2740(%rsp), %xmm0 addps 0x2730(%rsp), %xmm0 movaps %xmm0, 0x300(%rsp) movaps 0x300(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0xa40(%rsp), %xmm2 movaps 0x9f0(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x26b0(%rsp) movaps %xmm1, 0x26a0(%rsp) movaps %xmm0, 0x2690(%rsp) movaps 0x26b0(%rsp), %xmm1 movaps 0x26a0(%rsp), %xmm0 movaps %xmm1, 0x26f0(%rsp) movaps %xmm0, 0x26e0(%rsp) movaps 0x26f0(%rsp), %xmm1 mulps 0x26e0(%rsp), %xmm1 movaps 0x2690(%rsp), %xmm0 movaps %xmm1, 0x26d0(%rsp) movaps %xmm0, 0x26c0(%rsp) movaps 0x26d0(%rsp), %xmm0 addps 0x26c0(%rsp), %xmm0 movaps %xmm0, 0x2f0(%rsp) movaps 0x2f0(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0xa30(%rsp), %xmm2 movaps 0x9e0(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x2640(%rsp) movaps %xmm1, 0x2630(%rsp) movaps %xmm0, 0x2620(%rsp) movaps 0x2640(%rsp), %xmm1 movaps 0x2630(%rsp), %xmm0 movaps %xmm1, 0x2680(%rsp) movaps %xmm0, 0x2670(%rsp) movaps 0x2680(%rsp), %xmm1 mulps 0x2670(%rsp), %xmm1 movaps 0x2620(%rsp), %xmm0 movaps %xmm1, 0x2660(%rsp) movaps %xmm0, 0x2650(%rsp) movaps 0x2660(%rsp), %xmm0 addps 0x2650(%rsp), %xmm0 movaps %xmm0, 0x2e0(%rsp) movaps 0x2e0(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movq 0xcc8(%rsp), %rax movq %rax, 0x1150(%rsp) movq 0x1150(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x9d0(%rsp) movq 0xcc8(%rsp), %rax addq $0x10, %rax movq %rax, 0x1148(%rsp) movq 0x1148(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x9c0(%rsp) movq 0xcc8(%rsp), %rax addq $0x20, %rax movq %rax, 0x1140(%rsp) movq 0x1140(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x9b0(%rsp) movq 0xcc8(%rsp), %rax addq $0x30, %rax movq %rax, 0x1138(%rsp) movq 0x1138(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x9a0(%rsp) movq 0xcc8(%rsp), %rax addq $0x40, %rax movq %rax, 0x1130(%rsp) movq 0x1130(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x990(%rsp) movq 0xcc8(%rsp), %rax addq $0x50, %rax movq %rax, 0xcc8(%rsp) movaps 0x9d0(%rsp), %xmm2 movaps 0xa20(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x25d0(%rsp) movaps %xmm1, 0x25c0(%rsp) movaps %xmm0, 0x25b0(%rsp) movaps 0x25d0(%rsp), %xmm1 movaps 0x25c0(%rsp), %xmm0 movaps %xmm1, 0x2610(%rsp) movaps %xmm0, 0x2600(%rsp) movaps 0x2610(%rsp), %xmm1 mulps 0x2600(%rsp), %xmm1 movaps 0x25b0(%rsp), %xmm0 movaps %xmm1, 0x25f0(%rsp) movaps %xmm0, 0x25e0(%rsp) movaps 0x25f0(%rsp), %xmm0 addps 0x25e0(%rsp), %xmm0 movaps %xmm0, 0x2d0(%rsp) movaps 0x2d0(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0x9c0(%rsp), %xmm2 movaps 0xa10(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x2560(%rsp) movaps %xmm1, 0x2550(%rsp) movaps %xmm0, 0x2540(%rsp) movaps 0x2560(%rsp), %xmm1 movaps 0x2550(%rsp), %xmm0 movaps %xmm1, 0x25a0(%rsp) movaps %xmm0, 0x2590(%rsp) movaps 0x25a0(%rsp), %xmm1 mulps 0x2590(%rsp), %xmm1 movaps 0x2540(%rsp), %xmm0 movaps %xmm1, 0x2580(%rsp) movaps %xmm0, 0x2570(%rsp) movaps 0x2580(%rsp), %xmm0 addps 0x2570(%rsp), %xmm0 movaps %xmm0, 0x2c0(%rsp) movaps 0x2c0(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0x9b0(%rsp), %xmm2 movaps 0xa00(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x24f0(%rsp) movaps %xmm1, 0x24e0(%rsp) movaps %xmm0, 0x24d0(%rsp) movaps 0x24f0(%rsp), %xmm1 movaps 0x24e0(%rsp), %xmm0 movaps %xmm1, 0x2530(%rsp) movaps %xmm0, 0x2520(%rsp) movaps 0x2530(%rsp), %xmm1 mulps 0x2520(%rsp), %xmm1 movaps 0x24d0(%rsp), %xmm0 movaps %xmm1, 0x2510(%rsp) movaps %xmm0, 0x2500(%rsp) movaps 0x2510(%rsp), %xmm0 addps 0x2500(%rsp), %xmm0 movaps %xmm0, 0x2b0(%rsp) movaps 0x2b0(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0x9a0(%rsp), %xmm2 movaps 0x9f0(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x2480(%rsp) movaps %xmm1, 0x2470(%rsp) movaps %xmm0, 0x2460(%rsp) movaps 0x2480(%rsp), %xmm1 movaps 0x2470(%rsp), %xmm0 movaps %xmm1, 0x24c0(%rsp) movaps %xmm0, 0x24b0(%rsp) movaps 0x24c0(%rsp), %xmm1 mulps 0x24b0(%rsp), %xmm1 movaps 0x2460(%rsp), %xmm0 movaps %xmm1, 0x24a0(%rsp) movaps %xmm0, 0x2490(%rsp) movaps 0x24a0(%rsp), %xmm0 addps 0x2490(%rsp), %xmm0 movaps %xmm0, 0x2a0(%rsp) movaps 0x2a0(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0x990(%rsp), %xmm2 movaps 0x9e0(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x2410(%rsp) movaps %xmm1, 0x2400(%rsp) movaps %xmm0, 0x23f0(%rsp) movaps 0x2410(%rsp), %xmm1 movaps 0x2400(%rsp), %xmm0 movaps %xmm1, 0x2450(%rsp) movaps %xmm0, 0x2440(%rsp) movaps 0x2450(%rsp), %xmm1 mulps 0x2440(%rsp), %xmm1 movaps 0x23f0(%rsp), %xmm0 movaps %xmm1, 0x2430(%rsp) movaps %xmm0, 0x2420(%rsp) movaps 0x2430(%rsp), %xmm0 addps 0x2420(%rsp), %xmm0 movaps %xmm0, 0x290(%rsp) movaps 0x290(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movq 0xc48(%rsp), %rax movq %rax, 0x1128(%rsp) movq 0x1128(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x980(%rsp) movq 0xc48(%rsp), %rax addq $0x10, %rax movq %rax, 0x1120(%rsp) movq 0x1120(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x970(%rsp) movq 0xc48(%rsp), %rax addq $0x20, %rax movq %rax, 0x1118(%rsp) movq 0x1118(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x960(%rsp) movq 0xc48(%rsp), %rax addq $0x30, %rax movq %rax, 0x1110(%rsp) movq 0x1110(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x950(%rsp) movq 0xc48(%rsp), %rax addq $0x40, %rax movq %rax, 0x1108(%rsp) movq 0x1108(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x940(%rsp) movaps 0x9d0(%rsp), %xmm2 movaps 0x980(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x23a0(%rsp) movaps %xmm1, 0x2390(%rsp) movaps %xmm0, 0x2380(%rsp) movaps 0x23a0(%rsp), %xmm1 movaps 0x2390(%rsp), %xmm0 movaps %xmm1, 0x23e0(%rsp) movaps %xmm0, 0x23d0(%rsp) movaps 0x23e0(%rsp), %xmm1 mulps 0x23d0(%rsp), %xmm1 movaps 0x2380(%rsp), %xmm0 movaps %xmm1, 0x23c0(%rsp) movaps %xmm0, 0x23b0(%rsp) movaps 0x23c0(%rsp), %xmm0 addps 0x23b0(%rsp), %xmm0 movaps %xmm0, 0x280(%rsp) movaps 0x280(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0x9c0(%rsp), %xmm2 movaps 0x970(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x2330(%rsp) movaps %xmm1, 0x2320(%rsp) movaps %xmm0, 0x2310(%rsp) movaps 0x2330(%rsp), %xmm1 movaps 0x2320(%rsp), %xmm0 movaps %xmm1, 0x2370(%rsp) movaps %xmm0, 0x2360(%rsp) movaps 0x2370(%rsp), %xmm1 mulps 0x2360(%rsp), %xmm1 movaps 0x2310(%rsp), %xmm0 movaps %xmm1, 0x2350(%rsp) movaps %xmm0, 0x2340(%rsp) movaps 0x2350(%rsp), %xmm0 addps 0x2340(%rsp), %xmm0 movaps %xmm0, 0x270(%rsp) movaps 0x270(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0x9b0(%rsp), %xmm2 movaps 0x960(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x22c0(%rsp) movaps %xmm1, 0x22b0(%rsp) movaps %xmm0, 0x22a0(%rsp) movaps 0x22c0(%rsp), %xmm1 movaps 0x22b0(%rsp), %xmm0 movaps %xmm1, 0x2300(%rsp) movaps %xmm0, 0x22f0(%rsp) movaps 0x2300(%rsp), %xmm1 mulps 0x22f0(%rsp), %xmm1 movaps 0x22a0(%rsp), %xmm0 movaps %xmm1, 0x22e0(%rsp) movaps %xmm0, 0x22d0(%rsp) movaps 0x22e0(%rsp), %xmm0 addps 0x22d0(%rsp), %xmm0 movaps %xmm0, 0x260(%rsp) movaps 0x260(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0x9a0(%rsp), %xmm2 movaps 0x950(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x2250(%rsp) movaps %xmm1, 0x2240(%rsp) movaps %xmm0, 0x2230(%rsp) movaps 0x2250(%rsp), %xmm1 movaps 0x2240(%rsp), %xmm0 movaps %xmm1, 0x2290(%rsp) movaps %xmm0, 0x2280(%rsp) movaps 0x2290(%rsp), %xmm1 mulps 0x2280(%rsp), %xmm1 movaps 0x2230(%rsp), %xmm0 movaps %xmm1, 0x2270(%rsp) movaps %xmm0, 0x2260(%rsp) movaps 0x2270(%rsp), %xmm0 addps 0x2260(%rsp), %xmm0 movaps %xmm0, 0x250(%rsp) movaps 0x250(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0x990(%rsp), %xmm2 movaps 0x940(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x21e0(%rsp) movaps %xmm1, 0x21d0(%rsp) movaps %xmm0, 0x21c0(%rsp) movaps 0x21e0(%rsp), %xmm1 movaps 0x21d0(%rsp), %xmm0 movaps %xmm1, 0x2220(%rsp) movaps %xmm0, 0x2210(%rsp) movaps 0x2220(%rsp), %xmm1 mulps 0x2210(%rsp), %xmm1 movaps 0x21c0(%rsp), %xmm0 movaps %xmm1, 0x2200(%rsp) movaps %xmm0, 0x21f0(%rsp) movaps 0x2200(%rsp), %xmm0 addps 0x21f0(%rsp), %xmm0 movaps %xmm0, 0x240(%rsp) movaps 0x240(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movq 0xcc8(%rsp), %rax movq %rax, 0x1100(%rsp) movq 0x1100(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x930(%rsp) movq 0xcc8(%rsp), %rax addq $0x10, %rax movq %rax, 0x10f8(%rsp) movq 0x10f8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x920(%rsp) movq 0xcc8(%rsp), %rax addq $0x20, %rax movq %rax, 0x10f0(%rsp) movq 0x10f0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x910(%rsp) movq 0xcc8(%rsp), %rax addq $0x30, %rax movq %rax, 0x10e8(%rsp) movq 0x10e8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x900(%rsp) movq 0xcc8(%rsp), %rax addq $0x40, %rax movq %rax, 0x10e0(%rsp) movq 0x10e0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x8f0(%rsp) movq 0xcc8(%rsp), %rax addq $-0x140, %rax # imm = 0xFEC0 movq %rax, 0xcc8(%rsp) movaps 0x930(%rsp), %xmm2 movaps 0x980(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x2170(%rsp) movaps %xmm1, 0x2160(%rsp) movaps %xmm0, 0x2150(%rsp) movaps 0x2170(%rsp), %xmm1 movaps 0x2160(%rsp), %xmm0 movaps %xmm1, 0x21b0(%rsp) movaps %xmm0, 0x21a0(%rsp) movaps 0x21b0(%rsp), %xmm1 mulps 0x21a0(%rsp), %xmm1 movaps 0x2150(%rsp), %xmm0 movaps %xmm1, 0x2190(%rsp) movaps %xmm0, 0x2180(%rsp) movaps 0x2190(%rsp), %xmm0 addps 0x2180(%rsp), %xmm0 movaps %xmm0, 0x230(%rsp) movaps 0x230(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0x920(%rsp), %xmm2 movaps 0x970(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x2100(%rsp) movaps %xmm1, 0x20f0(%rsp) movaps %xmm0, 0x20e0(%rsp) movaps 0x2100(%rsp), %xmm1 movaps 0x20f0(%rsp), %xmm0 movaps %xmm1, 0x2140(%rsp) movaps %xmm0, 0x2130(%rsp) movaps 0x2140(%rsp), %xmm1 mulps 0x2130(%rsp), %xmm1 movaps 0x20e0(%rsp), %xmm0 movaps %xmm1, 0x2120(%rsp) movaps %xmm0, 0x2110(%rsp) movaps 0x2120(%rsp), %xmm0 addps 0x2110(%rsp), %xmm0 movaps %xmm0, 0x220(%rsp) movaps 0x220(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0x910(%rsp), %xmm2 movaps 0x960(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x2090(%rsp) movaps %xmm1, 0x2080(%rsp) movaps %xmm0, 0x2070(%rsp) movaps 0x2090(%rsp), %xmm1 movaps 0x2080(%rsp), %xmm0 movaps %xmm1, 0x20d0(%rsp) movaps %xmm0, 0x20c0(%rsp) movaps 0x20d0(%rsp), %xmm1 mulps 0x20c0(%rsp), %xmm1 movaps 0x2070(%rsp), %xmm0 movaps %xmm1, 0x20b0(%rsp) movaps %xmm0, 0x20a0(%rsp) movaps 0x20b0(%rsp), %xmm0 addps 0x20a0(%rsp), %xmm0 movaps %xmm0, 0x210(%rsp) movaps 0x210(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0x900(%rsp), %xmm2 movaps 0x950(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x2020(%rsp) movaps %xmm1, 0x2010(%rsp) movaps %xmm0, 0x2000(%rsp) movaps 0x2020(%rsp), %xmm1 movaps 0x2010(%rsp), %xmm0 movaps %xmm1, 0x2060(%rsp) movaps %xmm0, 0x2050(%rsp) movaps 0x2060(%rsp), %xmm1 mulps 0x2050(%rsp), %xmm1 movaps 0x2000(%rsp), %xmm0 movaps %xmm1, 0x2040(%rsp) movaps %xmm0, 0x2030(%rsp) movaps 0x2040(%rsp), %xmm0 addps 0x2030(%rsp), %xmm0 movaps %xmm0, 0x200(%rsp) movaps 0x200(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movaps 0x8f0(%rsp), %xmm2 movaps 0x940(%rsp), %xmm1 movaps 0xc20(%rsp), %xmm0 movaps %xmm2, 0x1fb0(%rsp) movaps %xmm1, 0x1fa0(%rsp) movaps %xmm0, 0x1f90(%rsp) movaps 0x1fb0(%rsp), %xmm1 movaps 0x1fa0(%rsp), %xmm0 movaps %xmm1, 0x1ff0(%rsp) movaps %xmm0, 0x1fe0(%rsp) movaps 0x1ff0(%rsp), %xmm1 mulps 0x1fe0(%rsp), %xmm1 movaps 0x1f90(%rsp), %xmm0 movaps %xmm1, 0x1fd0(%rsp) movaps %xmm0, 0x1fc0(%rsp) movaps 0x1fd0(%rsp), %xmm0 addps 0x1fc0(%rsp), %xmm0 movaps %xmm0, 0x1f0(%rsp) movaps 0x1f0(%rsp), %xmm0 movaps %xmm0, 0xc20(%rsp) movq 0xc40(%rsp), %rax movq %rax, 0x10d8(%rsp) movq 0x10d8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x8e0(%rsp) movq 0xc40(%rsp), %rax addq $0x10, %rax movq %rax, 0x10d0(%rsp) movq 0x10d0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x8d0(%rsp) movq 0xc40(%rsp), %rax addq $0x20, %rax movq %rax, 0x10c8(%rsp) movq 0x10c8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x8c0(%rsp) movq 0xc40(%rsp), %rax addq $0x30, %rax movq %rax, 0x10c0(%rsp) movq 0x10c0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x8b0(%rsp) movq 0xc40(%rsp), %rax addq $0x40, %rax movq %rax, 0x10b8(%rsp) movq 0x10b8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x8a0(%rsp) movaps 0x930(%rsp), %xmm2 movaps 0x8e0(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x1f40(%rsp) movaps %xmm1, 0x1f30(%rsp) movaps %xmm0, 0x1f20(%rsp) movaps 0x1f40(%rsp), %xmm1 movaps 0x1f30(%rsp), %xmm0 movaps %xmm1, 0x1f80(%rsp) movaps %xmm0, 0x1f70(%rsp) movaps 0x1f80(%rsp), %xmm1 mulps 0x1f70(%rsp), %xmm1 movaps 0x1f20(%rsp), %xmm0 movaps %xmm1, 0x1f60(%rsp) movaps %xmm0, 0x1f50(%rsp) movaps 0x1f60(%rsp), %xmm0 addps 0x1f50(%rsp), %xmm0 movaps %xmm0, 0x1e0(%rsp) movaps 0x1e0(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0x920(%rsp), %xmm2 movaps 0x8d0(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x1ed0(%rsp) movaps %xmm1, 0x1ec0(%rsp) movaps %xmm0, 0x1eb0(%rsp) movaps 0x1ed0(%rsp), %xmm1 movaps 0x1ec0(%rsp), %xmm0 movaps %xmm1, 0x1f10(%rsp) movaps %xmm0, 0x1f00(%rsp) movaps 0x1f10(%rsp), %xmm1 mulps 0x1f00(%rsp), %xmm1 movaps 0x1eb0(%rsp), %xmm0 movaps %xmm1, 0x1ef0(%rsp) movaps %xmm0, 0x1ee0(%rsp) movaps 0x1ef0(%rsp), %xmm0 addps 0x1ee0(%rsp), %xmm0 movaps %xmm0, 0x1d0(%rsp) movaps 0x1d0(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0x910(%rsp), %xmm2 movaps 0x8c0(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x1e60(%rsp) movaps %xmm1, 0x1e50(%rsp) movaps %xmm0, 0x1e40(%rsp) movaps 0x1e60(%rsp), %xmm1 movaps 0x1e50(%rsp), %xmm0 movaps %xmm1, 0x1ea0(%rsp) movaps %xmm0, 0x1e90(%rsp) movaps 0x1ea0(%rsp), %xmm1 mulps 0x1e90(%rsp), %xmm1 movaps 0x1e40(%rsp), %xmm0 movaps %xmm1, 0x1e80(%rsp) movaps %xmm0, 0x1e70(%rsp) movaps 0x1e80(%rsp), %xmm0 addps 0x1e70(%rsp), %xmm0 movaps %xmm0, 0x1c0(%rsp) movaps 0x1c0(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0x900(%rsp), %xmm2 movaps 0x8b0(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x1df0(%rsp) movaps %xmm1, 0x1de0(%rsp) movaps %xmm0, 0x1dd0(%rsp) movaps 0x1df0(%rsp), %xmm1 movaps 0x1de0(%rsp), %xmm0 movaps %xmm1, 0x1e30(%rsp) movaps %xmm0, 0x1e20(%rsp) movaps 0x1e30(%rsp), %xmm1 mulps 0x1e20(%rsp), %xmm1 movaps 0x1dd0(%rsp), %xmm0 movaps %xmm1, 0x1e10(%rsp) movaps %xmm0, 0x1e00(%rsp) movaps 0x1e10(%rsp), %xmm0 addps 0x1e00(%rsp), %xmm0 movaps %xmm0, 0x1b0(%rsp) movaps 0x1b0(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movaps 0x8f0(%rsp), %xmm2 movaps 0x8a0(%rsp), %xmm1 movaps 0xc10(%rsp), %xmm0 movaps %xmm2, 0x1d80(%rsp) movaps %xmm1, 0x1d70(%rsp) movaps %xmm0, 0x1d60(%rsp) movaps 0x1d80(%rsp), %xmm1 movaps 0x1d70(%rsp), %xmm0 movaps %xmm1, 0x1dc0(%rsp) movaps %xmm0, 0x1db0(%rsp) movaps 0x1dc0(%rsp), %xmm1 mulps 0x1db0(%rsp), %xmm1 movaps 0x1d60(%rsp), %xmm0 movaps %xmm1, 0x1da0(%rsp) movaps %xmm0, 0x1d90(%rsp) movaps 0x1da0(%rsp), %xmm0 addps 0x1d90(%rsp), %xmm0 movaps %xmm0, 0x1a0(%rsp) movaps 0x1a0(%rsp), %xmm0 movaps %xmm0, 0xc10(%rsp) movq 0xcc0(%rsp), %rax movaps 0xc20(%rsp), %xmm0 movq %rax, 0x3390(%rsp) movaps %xmm0, 0x3380(%rsp) movaps 0x3380(%rsp), %xmm0 movq 0x3390(%rsp), %rax movaps %xmm0, (%rax) movq 0xcb8(%rsp), %rax movaps 0xc10(%rsp), %xmm0 movq %rax, 0x3378(%rsp) movaps %xmm0, 0x3360(%rsp) movaps 0x3360(%rsp), %xmm0 movq 0x3378(%rsp), %rax movaps %xmm0, (%rax) movq 0xcc0(%rsp), %rax addq $0x10, %rax movq %rax, 0xcc0(%rsp) movq 0xcb8(%rsp), %rax addq $0x10, %rax movq %rax, 0xcb8(%rsp) movq 0xc68(%rsp), %rax addq $0x10, %rax movq %rax, 0xc68(%rsp) movq 0xc60(%rsp), %rax addq $0x10, %rax movq %rax, 0xc60(%rsp) movq 0xc58(%rsp), %rax addq $0x10, %rax movq %rax, 0xc58(%rsp) movq 0xc50(%rsp), %rax addq $0x10, %rax movq %rax, 0xc50(%rsp) movq 0xc48(%rsp), %rax addq $0x10, %rax movq %rax, 0xc48(%rsp) movq 0xc40(%rsp), %rax addq $0x10, %rax movq %rax, 0xc40(%rsp) movl 0xc38(%rsp), %eax addl $0x1, %eax movl %eax, 0xc38(%rsp) jmp 0x16a6564 movl 0xd54(%rsp), %ecx shll $0x2, %ecx addl $0x10, %ecx movq 0xc68(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xc68(%rsp) movl 0xd54(%rsp), %ecx shll $0x2, %ecx addl $0x10, %ecx movq 0xc60(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xc60(%rsp) movl 0xd54(%rsp), %ecx shll $0x2, %ecx addl $0x10, %ecx movq 0xc58(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xc58(%rsp) movl 0xd54(%rsp), %ecx shll $0x2, %ecx addl $0x10, %ecx movq 0xc50(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xc50(%rsp) movl 0xd54(%rsp), %ecx shll $0x2, %ecx addl $0x10, %ecx movq 0xc48(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xc48(%rsp) movl 0xd54(%rsp), %ecx shll $0x2, %ecx addl $0x10, %ecx movq 0xc40(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xc40(%rsp) movl 0xd50(%rsp), %ecx shll $0x2, %ecx movq 0xcc0(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xcc0(%rsp) movl 0xd50(%rsp), %ecx shll $0x2, %ecx movq 0xcb8(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xcb8(%rsp) movl 0xc3c(%rsp), %eax addl $0x2, %eax movl %eax, 0xc3c(%rsp) jmp 0x16a6542 jmp 0x16a8fd9 movl 0xc3c(%rsp), %eax cmpl 0xd4c(%rsp), %eax jge 0x16aa8a9 movl $0x0, 0x89c(%rsp) movl 0x89c(%rsp), %eax cmpl 0xd50(%rsp), %eax jge 0x16aa82f movaps 0xce0(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movq 0xc68(%rsp), %rax movq %rax, 0x10b0(%rsp) movq 0x10b0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x870(%rsp) movq 0xc68(%rsp), %rax addq $0x10, %rax movq %rax, 0x10a8(%rsp) movq 0x10a8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x860(%rsp) movq 0xc68(%rsp), %rax addq $0x20, %rax movq %rax, 0x10a0(%rsp) movq 0x10a0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x850(%rsp) movq 0xc68(%rsp), %rax addq $0x30, %rax movq %rax, 0x1098(%rsp) movq 0x1098(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x840(%rsp) movq 0xc68(%rsp), %rax addq $0x40, %rax movq %rax, 0x1090(%rsp) movq 0x1090(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x830(%rsp) movq 0xcc8(%rsp), %rax movq %rax, 0x1088(%rsp) movq 0x1088(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x820(%rsp) movq 0xcc8(%rsp), %rax addq $0x10, %rax movq %rax, 0x1080(%rsp) movq 0x1080(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x810(%rsp) movq 0xcc8(%rsp), %rax addq $0x20, %rax movq %rax, 0x1078(%rsp) movq 0x1078(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x800(%rsp) movq 0xcc8(%rsp), %rax addq $0x30, %rax movq %rax, 0x1070(%rsp) movq 0x1070(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x7f0(%rsp) movq 0xcc8(%rsp), %rax addq $0x40, %rax movq %rax, 0x1068(%rsp) movq 0x1068(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x7e0(%rsp) movq 0xcc8(%rsp), %rax addq $0x50, %rax movq %rax, 0xcc8(%rsp) movaps 0x820(%rsp), %xmm2 movaps 0x870(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1d10(%rsp) movaps %xmm1, 0x1d00(%rsp) movaps %xmm0, 0x1cf0(%rsp) movaps 0x1d10(%rsp), %xmm1 movaps 0x1d00(%rsp), %xmm0 movaps %xmm1, 0x1d50(%rsp) movaps %xmm0, 0x1d40(%rsp) movaps 0x1d50(%rsp), %xmm1 mulps 0x1d40(%rsp), %xmm1 movaps 0x1cf0(%rsp), %xmm0 movaps %xmm1, 0x1d30(%rsp) movaps %xmm0, 0x1d20(%rsp) movaps 0x1d30(%rsp), %xmm0 addps 0x1d20(%rsp), %xmm0 movaps %xmm0, 0x190(%rsp) movaps 0x190(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x810(%rsp), %xmm2 movaps 0x860(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1ca0(%rsp) movaps %xmm1, 0x1c90(%rsp) movaps %xmm0, 0x1c80(%rsp) movaps 0x1ca0(%rsp), %xmm1 movaps 0x1c90(%rsp), %xmm0 movaps %xmm1, 0x1ce0(%rsp) movaps %xmm0, 0x1cd0(%rsp) movaps 0x1ce0(%rsp), %xmm1 mulps 0x1cd0(%rsp), %xmm1 movaps 0x1c80(%rsp), %xmm0 movaps %xmm1, 0x1cc0(%rsp) movaps %xmm0, 0x1cb0(%rsp) movaps 0x1cc0(%rsp), %xmm0 addps 0x1cb0(%rsp), %xmm0 movaps %xmm0, 0x180(%rsp) movaps 0x180(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x800(%rsp), %xmm2 movaps 0x850(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1c30(%rsp) movaps %xmm1, 0x1c20(%rsp) movaps %xmm0, 0x1c10(%rsp) movaps 0x1c30(%rsp), %xmm1 movaps 0x1c20(%rsp), %xmm0 movaps %xmm1, 0x1c70(%rsp) movaps %xmm0, 0x1c60(%rsp) movaps 0x1c70(%rsp), %xmm1 mulps 0x1c60(%rsp), %xmm1 movaps 0x1c10(%rsp), %xmm0 movaps %xmm1, 0x1c50(%rsp) movaps %xmm0, 0x1c40(%rsp) movaps 0x1c50(%rsp), %xmm0 addps 0x1c40(%rsp), %xmm0 movaps %xmm0, 0x170(%rsp) movaps 0x170(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x7f0(%rsp), %xmm2 movaps 0x840(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1bc0(%rsp) movaps %xmm1, 0x1bb0(%rsp) movaps %xmm0, 0x1ba0(%rsp) movaps 0x1bc0(%rsp), %xmm1 movaps 0x1bb0(%rsp), %xmm0 movaps %xmm1, 0x1c00(%rsp) movaps %xmm0, 0x1bf0(%rsp) movaps 0x1c00(%rsp), %xmm1 mulps 0x1bf0(%rsp), %xmm1 movaps 0x1ba0(%rsp), %xmm0 movaps %xmm1, 0x1be0(%rsp) movaps %xmm0, 0x1bd0(%rsp) movaps 0x1be0(%rsp), %xmm0 addps 0x1bd0(%rsp), %xmm0 movaps %xmm0, 0x160(%rsp) movaps 0x160(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x7e0(%rsp), %xmm2 movaps 0x830(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1b50(%rsp) movaps %xmm1, 0x1b40(%rsp) movaps %xmm0, 0x1b30(%rsp) movaps 0x1b50(%rsp), %xmm1 movaps 0x1b40(%rsp), %xmm0 movaps %xmm1, 0x1b90(%rsp) movaps %xmm0, 0x1b80(%rsp) movaps 0x1b90(%rsp), %xmm1 mulps 0x1b80(%rsp), %xmm1 movaps 0x1b30(%rsp), %xmm0 movaps %xmm1, 0x1b70(%rsp) movaps %xmm0, 0x1b60(%rsp) movaps 0x1b70(%rsp), %xmm0 addps 0x1b60(%rsp), %xmm0 movaps %xmm0, 0x150(%rsp) movaps 0x150(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movq 0xc60(%rsp), %rax movq %rax, 0x1060(%rsp) movq 0x1060(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x7d0(%rsp) movq 0xc60(%rsp), %rax addq $0x10, %rax movq %rax, 0x1058(%rsp) movq 0x1058(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x7c0(%rsp) movq 0xc60(%rsp), %rax addq $0x20, %rax movq %rax, 0x1050(%rsp) movq 0x1050(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x7b0(%rsp) movq 0xc60(%rsp), %rax addq $0x30, %rax movq %rax, 0x1048(%rsp) movq 0x1048(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x7a0(%rsp) movq 0xc60(%rsp), %rax addq $0x40, %rax movq %rax, 0x1040(%rsp) movq 0x1040(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x790(%rsp) movq 0xcc8(%rsp), %rax movq %rax, 0x1038(%rsp) movq 0x1038(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x780(%rsp) movq 0xcc8(%rsp), %rax addq $0x10, %rax movq %rax, 0x1030(%rsp) movq 0x1030(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x770(%rsp) movq 0xcc8(%rsp), %rax addq $0x20, %rax movq %rax, 0x1028(%rsp) movq 0x1028(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x760(%rsp) movq 0xcc8(%rsp), %rax addq $0x30, %rax movq %rax, 0x1020(%rsp) movq 0x1020(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x750(%rsp) movq 0xcc8(%rsp), %rax addq $0x40, %rax movq %rax, 0x1018(%rsp) movq 0x1018(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x740(%rsp) movq 0xcc8(%rsp), %rax addq $0x50, %rax movq %rax, 0xcc8(%rsp) movaps 0x780(%rsp), %xmm2 movaps 0x7d0(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1ae0(%rsp) movaps %xmm1, 0x1ad0(%rsp) movaps %xmm0, 0x1ac0(%rsp) movaps 0x1ae0(%rsp), %xmm1 movaps 0x1ad0(%rsp), %xmm0 movaps %xmm1, 0x1b20(%rsp) movaps %xmm0, 0x1b10(%rsp) movaps 0x1b20(%rsp), %xmm1 mulps 0x1b10(%rsp), %xmm1 movaps 0x1ac0(%rsp), %xmm0 movaps %xmm1, 0x1b00(%rsp) movaps %xmm0, 0x1af0(%rsp) movaps 0x1b00(%rsp), %xmm0 addps 0x1af0(%rsp), %xmm0 movaps %xmm0, 0x140(%rsp) movaps 0x140(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x770(%rsp), %xmm2 movaps 0x7c0(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1a70(%rsp) movaps %xmm1, 0x1a60(%rsp) movaps %xmm0, 0x1a50(%rsp) movaps 0x1a70(%rsp), %xmm1 movaps 0x1a60(%rsp), %xmm0 movaps %xmm1, 0x1ab0(%rsp) movaps %xmm0, 0x1aa0(%rsp) movaps 0x1ab0(%rsp), %xmm1 mulps 0x1aa0(%rsp), %xmm1 movaps 0x1a50(%rsp), %xmm0 movaps %xmm1, 0x1a90(%rsp) movaps %xmm0, 0x1a80(%rsp) movaps 0x1a90(%rsp), %xmm0 addps 0x1a80(%rsp), %xmm0 movaps %xmm0, 0x130(%rsp) movaps 0x130(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x760(%rsp), %xmm2 movaps 0x7b0(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1a00(%rsp) movaps %xmm1, 0x19f0(%rsp) movaps %xmm0, 0x19e0(%rsp) movaps 0x1a00(%rsp), %xmm1 movaps 0x19f0(%rsp), %xmm0 movaps %xmm1, 0x1a40(%rsp) movaps %xmm0, 0x1a30(%rsp) movaps 0x1a40(%rsp), %xmm1 mulps 0x1a30(%rsp), %xmm1 movaps 0x19e0(%rsp), %xmm0 movaps %xmm1, 0x1a20(%rsp) movaps %xmm0, 0x1a10(%rsp) movaps 0x1a20(%rsp), %xmm0 addps 0x1a10(%rsp), %xmm0 movaps %xmm0, 0x120(%rsp) movaps 0x120(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x750(%rsp), %xmm2 movaps 0x7a0(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1990(%rsp) movaps %xmm1, 0x1980(%rsp) movaps %xmm0, 0x1970(%rsp) movaps 0x1990(%rsp), %xmm1 movaps 0x1980(%rsp), %xmm0 movaps %xmm1, 0x19d0(%rsp) movaps %xmm0, 0x19c0(%rsp) movaps 0x19d0(%rsp), %xmm1 mulps 0x19c0(%rsp), %xmm1 movaps 0x1970(%rsp), %xmm0 movaps %xmm1, 0x19b0(%rsp) movaps %xmm0, 0x19a0(%rsp) movaps 0x19b0(%rsp), %xmm0 addps 0x19a0(%rsp), %xmm0 movaps %xmm0, 0x110(%rsp) movaps 0x110(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x740(%rsp), %xmm2 movaps 0x790(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1920(%rsp) movaps %xmm1, 0x1910(%rsp) movaps %xmm0, 0x1900(%rsp) movaps 0x1920(%rsp), %xmm1 movaps 0x1910(%rsp), %xmm0 movaps %xmm1, 0x1960(%rsp) movaps %xmm0, 0x1950(%rsp) movaps 0x1960(%rsp), %xmm1 mulps 0x1950(%rsp), %xmm1 movaps 0x1900(%rsp), %xmm0 movaps %xmm1, 0x1940(%rsp) movaps %xmm0, 0x1930(%rsp) movaps 0x1940(%rsp), %xmm0 addps 0x1930(%rsp), %xmm0 movaps %xmm0, 0x100(%rsp) movaps 0x100(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movq 0xc58(%rsp), %rax movq %rax, 0x1010(%rsp) movq 0x1010(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x730(%rsp) movq 0xc58(%rsp), %rax addq $0x10, %rax movq %rax, 0x1008(%rsp) movq 0x1008(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x720(%rsp) movq 0xc58(%rsp), %rax addq $0x20, %rax movq %rax, 0x1000(%rsp) movq 0x1000(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x710(%rsp) movq 0xc58(%rsp), %rax addq $0x30, %rax movq %rax, 0xff8(%rsp) movq 0xff8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x700(%rsp) movq 0xc58(%rsp), %rax addq $0x40, %rax movq %rax, 0xff0(%rsp) movq 0xff0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x6f0(%rsp) movq 0xcc8(%rsp), %rax movq %rax, 0xfe8(%rsp) movq 0xfe8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x6e0(%rsp) movq 0xcc8(%rsp), %rax addq $0x10, %rax movq %rax, 0xfe0(%rsp) movq 0xfe0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x6d0(%rsp) movq 0xcc8(%rsp), %rax addq $0x20, %rax movq %rax, 0xfd8(%rsp) movq 0xfd8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x6c0(%rsp) movq 0xcc8(%rsp), %rax addq $0x30, %rax movq %rax, 0xfd0(%rsp) movq 0xfd0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x6b0(%rsp) movq 0xcc8(%rsp), %rax addq $0x40, %rax movq %rax, 0xfc8(%rsp) movq 0xfc8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x6a0(%rsp) movq 0xcc8(%rsp), %rax addq $0x50, %rax movq %rax, 0xcc8(%rsp) movaps 0x6e0(%rsp), %xmm2 movaps 0x730(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x18b0(%rsp) movaps %xmm1, 0x18a0(%rsp) movaps %xmm0, 0x1890(%rsp) movaps 0x18b0(%rsp), %xmm1 movaps 0x18a0(%rsp), %xmm0 movaps %xmm1, 0x18f0(%rsp) movaps %xmm0, 0x18e0(%rsp) movaps 0x18f0(%rsp), %xmm1 mulps 0x18e0(%rsp), %xmm1 movaps 0x1890(%rsp), %xmm0 movaps %xmm1, 0x18d0(%rsp) movaps %xmm0, 0x18c0(%rsp) movaps 0x18d0(%rsp), %xmm0 addps 0x18c0(%rsp), %xmm0 movaps %xmm0, 0xf0(%rsp) movaps 0xf0(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x6d0(%rsp), %xmm2 movaps 0x720(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1840(%rsp) movaps %xmm1, 0x1830(%rsp) movaps %xmm0, 0x1820(%rsp) movaps 0x1840(%rsp), %xmm1 movaps 0x1830(%rsp), %xmm0 movaps %xmm1, 0x1880(%rsp) movaps %xmm0, 0x1870(%rsp) movaps 0x1880(%rsp), %xmm1 mulps 0x1870(%rsp), %xmm1 movaps 0x1820(%rsp), %xmm0 movaps %xmm1, 0x1860(%rsp) movaps %xmm0, 0x1850(%rsp) movaps 0x1860(%rsp), %xmm0 addps 0x1850(%rsp), %xmm0 movaps %xmm0, 0xe0(%rsp) movaps 0xe0(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x6c0(%rsp), %xmm2 movaps 0x710(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x17d0(%rsp) movaps %xmm1, 0x17c0(%rsp) movaps %xmm0, 0x17b0(%rsp) movaps 0x17d0(%rsp), %xmm1 movaps 0x17c0(%rsp), %xmm0 movaps %xmm1, 0x1810(%rsp) movaps %xmm0, 0x1800(%rsp) movaps 0x1810(%rsp), %xmm1 mulps 0x1800(%rsp), %xmm1 movaps 0x17b0(%rsp), %xmm0 movaps %xmm1, 0x17f0(%rsp) movaps %xmm0, 0x17e0(%rsp) movaps 0x17f0(%rsp), %xmm0 addps 0x17e0(%rsp), %xmm0 movaps %xmm0, 0xd0(%rsp) movaps 0xd0(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x6b0(%rsp), %xmm2 movaps 0x700(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1760(%rsp) movaps %xmm1, 0x1750(%rsp) movaps %xmm0, 0x1740(%rsp) movaps 0x1760(%rsp), %xmm1 movaps 0x1750(%rsp), %xmm0 movaps %xmm1, 0x17a0(%rsp) movaps %xmm0, 0x1790(%rsp) movaps 0x17a0(%rsp), %xmm1 mulps 0x1790(%rsp), %xmm1 movaps 0x1740(%rsp), %xmm0 movaps %xmm1, 0x1780(%rsp) movaps %xmm0, 0x1770(%rsp) movaps 0x1780(%rsp), %xmm0 addps 0x1770(%rsp), %xmm0 movaps %xmm0, 0xc0(%rsp) movaps 0xc0(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x6a0(%rsp), %xmm2 movaps 0x6f0(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x16f0(%rsp) movaps %xmm1, 0x16e0(%rsp) movaps %xmm0, 0x16d0(%rsp) movaps 0x16f0(%rsp), %xmm1 movaps 0x16e0(%rsp), %xmm0 movaps %xmm1, 0x1730(%rsp) movaps %xmm0, 0x1720(%rsp) movaps 0x1730(%rsp), %xmm1 mulps 0x1720(%rsp), %xmm1 movaps 0x16d0(%rsp), %xmm0 movaps %xmm1, 0x1710(%rsp) movaps %xmm0, 0x1700(%rsp) movaps 0x1710(%rsp), %xmm0 addps 0x1700(%rsp), %xmm0 movaps %xmm0, 0xb0(%rsp) movaps 0xb0(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movq 0xc50(%rsp), %rax movq %rax, 0xfc0(%rsp) movq 0xfc0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x690(%rsp) movq 0xc50(%rsp), %rax addq $0x10, %rax movq %rax, 0xfb8(%rsp) movq 0xfb8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x680(%rsp) movq 0xc50(%rsp), %rax addq $0x20, %rax movq %rax, 0xfb0(%rsp) movq 0xfb0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x670(%rsp) movq 0xc50(%rsp), %rax addq $0x30, %rax movq %rax, 0xfa8(%rsp) movq 0xfa8(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x660(%rsp) movq 0xc50(%rsp), %rax addq $0x40, %rax movq %rax, 0xfa0(%rsp) movq 0xfa0(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x650(%rsp) movq 0xcc8(%rsp), %rax movq %rax, 0xf98(%rsp) movq 0xf98(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x640(%rsp) movq 0xcc8(%rsp), %rax addq $0x10, %rax movq %rax, 0xf90(%rsp) movq 0xf90(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x630(%rsp) movq 0xcc8(%rsp), %rax addq $0x20, %rax movq %rax, 0xf88(%rsp) movq 0xf88(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x620(%rsp) movq 0xcc8(%rsp), %rax addq $0x30, %rax movq %rax, 0xf80(%rsp) movq 0xf80(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x610(%rsp) movq 0xcc8(%rsp), %rax addq $0x40, %rax movq %rax, 0xf78(%rsp) movq 0xf78(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x600(%rsp) movq 0xcc8(%rsp), %rax addq $0x50, %rax movq %rax, 0xcc8(%rsp) movaps 0x640(%rsp), %xmm2 movaps 0x690(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1680(%rsp) movaps %xmm1, 0x1670(%rsp) movaps %xmm0, 0x1660(%rsp) movaps 0x1680(%rsp), %xmm1 movaps 0x1670(%rsp), %xmm0 movaps %xmm1, 0x16c0(%rsp) movaps %xmm0, 0x16b0(%rsp) movaps 0x16c0(%rsp), %xmm1 mulps 0x16b0(%rsp), %xmm1 movaps 0x1660(%rsp), %xmm0 movaps %xmm1, 0x16a0(%rsp) movaps %xmm0, 0x1690(%rsp) movaps 0x16a0(%rsp), %xmm0 addps 0x1690(%rsp), %xmm0 movaps %xmm0, 0xa0(%rsp) movaps 0xa0(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x630(%rsp), %xmm2 movaps 0x680(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1610(%rsp) movaps %xmm1, 0x1600(%rsp) movaps %xmm0, 0x15f0(%rsp) movaps 0x1610(%rsp), %xmm1 movaps 0x1600(%rsp), %xmm0 movaps %xmm1, 0x1650(%rsp) movaps %xmm0, 0x1640(%rsp) movaps 0x1650(%rsp), %xmm1 mulps 0x1640(%rsp), %xmm1 movaps 0x15f0(%rsp), %xmm0 movaps %xmm1, 0x1630(%rsp) movaps %xmm0, 0x1620(%rsp) movaps 0x1630(%rsp), %xmm0 addps 0x1620(%rsp), %xmm0 movaps %xmm0, 0x90(%rsp) movaps 0x90(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x620(%rsp), %xmm2 movaps 0x670(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x15a0(%rsp) movaps %xmm1, 0x1590(%rsp) movaps %xmm0, 0x1580(%rsp) movaps 0x15a0(%rsp), %xmm1 movaps 0x1590(%rsp), %xmm0 movaps %xmm1, 0x15e0(%rsp) movaps %xmm0, 0x15d0(%rsp) movaps 0x15e0(%rsp), %xmm1 mulps 0x15d0(%rsp), %xmm1 movaps 0x1580(%rsp), %xmm0 movaps %xmm1, 0x15c0(%rsp) movaps %xmm0, 0x15b0(%rsp) movaps 0x15c0(%rsp), %xmm0 addps 0x15b0(%rsp), %xmm0 movaps %xmm0, 0x80(%rsp) movaps 0x80(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x610(%rsp), %xmm2 movaps 0x660(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1530(%rsp) movaps %xmm1, 0x1520(%rsp) movaps %xmm0, 0x1510(%rsp) movaps 0x1530(%rsp), %xmm1 movaps 0x1520(%rsp), %xmm0 movaps %xmm1, 0x1570(%rsp) movaps %xmm0, 0x1560(%rsp) movaps 0x1570(%rsp), %xmm1 mulps 0x1560(%rsp), %xmm1 movaps 0x1510(%rsp), %xmm0 movaps %xmm1, 0x1550(%rsp) movaps %xmm0, 0x1540(%rsp) movaps 0x1550(%rsp), %xmm0 addps 0x1540(%rsp), %xmm0 movaps %xmm0, 0x70(%rsp) movaps 0x70(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x600(%rsp), %xmm2 movaps 0x650(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x14c0(%rsp) movaps %xmm1, 0x14b0(%rsp) movaps %xmm0, 0x14a0(%rsp) movaps 0x14c0(%rsp), %xmm1 movaps 0x14b0(%rsp), %xmm0 movaps %xmm1, 0x1500(%rsp) movaps %xmm0, 0x14f0(%rsp) movaps 0x1500(%rsp), %xmm1 mulps 0x14f0(%rsp), %xmm1 movaps 0x14a0(%rsp), %xmm0 movaps %xmm1, 0x14e0(%rsp) movaps %xmm0, 0x14d0(%rsp) movaps 0x14e0(%rsp), %xmm0 addps 0x14d0(%rsp), %xmm0 movaps %xmm0, 0x60(%rsp) movaps 0x60(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movq 0xc48(%rsp), %rax movq %rax, 0xf70(%rsp) movq 0xf70(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x5f0(%rsp) movq 0xc48(%rsp), %rax addq $0x10, %rax movq %rax, 0xf68(%rsp) movq 0xf68(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x5e0(%rsp) movq 0xc48(%rsp), %rax addq $0x20, %rax movq %rax, 0xf60(%rsp) movq 0xf60(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x5d0(%rsp) movq 0xc48(%rsp), %rax addq $0x30, %rax movq %rax, 0xf58(%rsp) movq 0xf58(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x5c0(%rsp) movq 0xc48(%rsp), %rax addq $0x40, %rax movq %rax, 0xf50(%rsp) movq 0xf50(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x5b0(%rsp) movq 0xcc8(%rsp), %rax movq %rax, 0xf48(%rsp) movq 0xf48(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x5a0(%rsp) movq 0xcc8(%rsp), %rax addq $0x10, %rax movq %rax, 0xf40(%rsp) movq 0xf40(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x590(%rsp) movq 0xcc8(%rsp), %rax addq $0x20, %rax movq %rax, 0xf38(%rsp) movq 0xf38(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x580(%rsp) movq 0xcc8(%rsp), %rax addq $0x30, %rax movq %rax, 0xf30(%rsp) movq 0xf30(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x570(%rsp) movq 0xcc8(%rsp), %rax addq $0x40, %rax movq %rax, 0xf28(%rsp) movq 0xf28(%rsp), %rax movaps (%rax), %xmm0 movaps %xmm0, 0x560(%rsp) movq 0xcc8(%rsp), %rax addq $-0x140, %rax # imm = 0xFEC0 movq %rax, 0xcc8(%rsp) movaps 0x5a0(%rsp), %xmm2 movaps 0x5f0(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1450(%rsp) movaps %xmm1, 0x1440(%rsp) movaps %xmm0, 0x1430(%rsp) movaps 0x1450(%rsp), %xmm1 movaps 0x1440(%rsp), %xmm0 movaps %xmm1, 0x1490(%rsp) movaps %xmm0, 0x1480(%rsp) movaps 0x1490(%rsp), %xmm1 mulps 0x1480(%rsp), %xmm1 movaps 0x1430(%rsp), %xmm0 movaps %xmm1, 0x1470(%rsp) movaps %xmm0, 0x1460(%rsp) movaps 0x1470(%rsp), %xmm0 addps 0x1460(%rsp), %xmm0 movaps %xmm0, 0x50(%rsp) movaps 0x50(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x590(%rsp), %xmm2 movaps 0x5e0(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x13e0(%rsp) movaps %xmm1, 0x13d0(%rsp) movaps %xmm0, 0x13c0(%rsp) movaps 0x13e0(%rsp), %xmm1 movaps 0x13d0(%rsp), %xmm0 movaps %xmm1, 0x1420(%rsp) movaps %xmm0, 0x1410(%rsp) movaps 0x1420(%rsp), %xmm1 mulps 0x1410(%rsp), %xmm1 movaps 0x13c0(%rsp), %xmm0 movaps %xmm1, 0x1400(%rsp) movaps %xmm0, 0x13f0(%rsp) movaps 0x1400(%rsp), %xmm0 addps 0x13f0(%rsp), %xmm0 movaps %xmm0, 0x40(%rsp) movaps 0x40(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x580(%rsp), %xmm2 movaps 0x5d0(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1370(%rsp) movaps %xmm1, 0x1360(%rsp) movaps %xmm0, 0x1350(%rsp) movaps 0x1370(%rsp), %xmm1 movaps 0x1360(%rsp), %xmm0 movaps %xmm1, 0x13b0(%rsp) movaps %xmm0, 0x13a0(%rsp) movaps 0x13b0(%rsp), %xmm1 mulps 0x13a0(%rsp), %xmm1 movaps 0x1350(%rsp), %xmm0 movaps %xmm1, 0x1390(%rsp) movaps %xmm0, 0x1380(%rsp) movaps 0x1390(%rsp), %xmm0 addps 0x1380(%rsp), %xmm0 movaps %xmm0, 0x30(%rsp) movaps 0x30(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x570(%rsp), %xmm2 movaps 0x5c0(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1300(%rsp) movaps %xmm1, 0x12f0(%rsp) movaps %xmm0, 0x12e0(%rsp) movaps 0x1300(%rsp), %xmm1 movaps 0x12f0(%rsp), %xmm0 movaps %xmm1, 0x1340(%rsp) movaps %xmm0, 0x1330(%rsp) movaps 0x1340(%rsp), %xmm1 mulps 0x1330(%rsp), %xmm1 movaps 0x12e0(%rsp), %xmm0 movaps %xmm1, 0x1320(%rsp) movaps %xmm0, 0x1310(%rsp) movaps 0x1320(%rsp), %xmm0 addps 0x1310(%rsp), %xmm0 movaps %xmm0, 0x20(%rsp) movaps 0x20(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movaps 0x560(%rsp), %xmm2 movaps 0x5b0(%rsp), %xmm1 movaps 0x880(%rsp), %xmm0 movaps %xmm2, 0x1290(%rsp) movaps %xmm1, 0x1280(%rsp) movaps %xmm0, 0x1270(%rsp) movaps 0x1290(%rsp), %xmm1 movaps 0x1280(%rsp), %xmm0 movaps %xmm1, 0x12d0(%rsp) movaps %xmm0, 0x12c0(%rsp) movaps 0x12d0(%rsp), %xmm1 mulps 0x12c0(%rsp), %xmm1 movaps 0x1270(%rsp), %xmm0 movaps %xmm1, 0x12b0(%rsp) movaps %xmm0, 0x12a0(%rsp) movaps 0x12b0(%rsp), %xmm0 addps 0x12a0(%rsp), %xmm0 movaps %xmm0, 0x10(%rsp) movaps 0x10(%rsp), %xmm0 movaps %xmm0, 0x880(%rsp) movq 0xcc0(%rsp), %rax movaps 0x880(%rsp), %xmm0 movq %rax, 0x3358(%rsp) movaps %xmm0, 0x3340(%rsp) movaps 0x3340(%rsp), %xmm0 movq 0x3358(%rsp), %rax movaps %xmm0, (%rax) movq 0xcc0(%rsp), %rax addq $0x10, %rax movq %rax, 0xcc0(%rsp) movq 0xc68(%rsp), %rax addq $0x10, %rax movq %rax, 0xc68(%rsp) movq 0xc60(%rsp), %rax addq $0x10, %rax movq %rax, 0xc60(%rsp) movq 0xc58(%rsp), %rax addq $0x10, %rax movq %rax, 0xc58(%rsp) movq 0xc50(%rsp), %rax addq $0x10, %rax movq %rax, 0xc50(%rsp) movq 0xc48(%rsp), %rax addq $0x10, %rax movq %rax, 0xc48(%rsp) movl 0x89c(%rsp), %eax addl $0x1, %eax movl %eax, 0x89c(%rsp) jmp 0x16a8ff8 movq 0xc68(%rsp), %rax addq $0x40, %rax movq %rax, 0xc68(%rsp) movq 0xc60(%rsp), %rax addq $0x40, %rax movq %rax, 0xc60(%rsp) movq 0xc58(%rsp), %rax addq $0x40, %rax movq %rax, 0xc58(%rsp) movq 0xc50(%rsp), %rax addq $0x40, %rax movq %rax, 0xc50(%rsp) movq 0xc48(%rsp), %rax addq $0x40, %rax movq %rax, 0xc48(%rsp) movl 0xc3c(%rsp), %eax addl $0x1, %eax movl %eax, 0xc3c(%rsp) jmp 0x16a8fd9 leaq 0xc70(%rsp), %rax movq %rax, 0xd80(%rsp) movq 0xd80(%rsp), %rax movq %rax, 0xdd8(%rsp) movq 0xdd8(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x16aa962 movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xdd4(%rsp) # imm = 0xFFFFFFFF movl 0xdd4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xdd0(%rsp) cmpl $0x1, 0xdd0(%rsp) jne 0x16aa962 movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16aa936 movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16aa934 jmp 0x16aa960 movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0xeb0(%rsp) cmpq $0x0, 0xeb0(%rsp) je 0x16aa95e movq 0xeb0(%rsp), %rdi callq 0x5e480 jmp 0x16aa960 jmp 0x16aa962 movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16aa9ba movq %rax, %rdi callq 0x5fc90 leaq 0xcf0(%rsp), %rax movq %rax, 0xd90(%rsp) movq 0xd90(%rsp), %rax movq %rax, 0xdb8(%rsp) movq 0xdb8(%rsp), %rax movq %rax, (%rsp) cmpq $0x0, 0x8(%rax) je 0x16aaa6a movq (%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xdb4(%rsp) # imm = 0xFFFFFFFF movl 0xdb4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xdb0(%rsp) cmpl $0x1, 0xdb0(%rsp) jne 0x16aaa6a movq (%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16aaa3f movq (%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16aaa3d jmp 0x16aaa68 movq (%rsp), %rax movq (%rax), %rax movq %rax, 0xec0(%rsp) cmpq $0x0, 0xec0(%rsp) je 0x16aaa66 movq 0xec0(%rsp), %rdi callq 0x5e480 jmp 0x16aaa68 jmp 0x16aaa6a movq (%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16aaac1 movq %rax, %rdi callq 0x5fc90 jmp 0x16aaac3 movl 0xd3c(%rsp), %eax addl $0x1, %eax movl %eax, 0xd3c(%rsp) jmp 0x16a5bb5 addq $0x3428, %rsp # imm = 0x3428 retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/convolutiondepthwise_5x5_pack4.h
ncnn::convdw3x3s1_sse(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Mat const&, ncnn::Option const&)
static void convdw3x3s1_sse(const Mat& bottom_blob, Mat& top_blob, const Mat& _kernel, const Mat& _bias, const Option& opt) { int w = bottom_blob.w; int outw = top_blob.w; int outh = top_blob.h; const int group = bottom_blob.c; const float* kernel = _kernel; const float* bias = _bias; #pragma omp parallel for num_threads(opt.num_threads) for (int g = 0; g < group; g++) { Mat out = top_blob.channel(g); const float bias0 = bias ? bias[g] : 0.f; const float* kernel0 = kernel + g * 9; float* outptr = out; float* outptr2 = outptr + outw; const float* img0 = bottom_blob.channel(g); const float* r0 = img0; const float* r1 = img0 + w; const float* r2 = img0 + w * 2; const float* r3 = img0 + w * 3; const float* k0 = kernel0; const float* k1 = kernel0 + 3; const float* k2 = kernel0 + 6; int i = 0; for (; i + 1 < outh; i += 2) { int remain = outw; for (; remain > 0; remain--) { float sum = bias0; sum += r0[0] * k0[0]; sum += r0[1] * k0[1]; sum += r0[2] * k0[2]; sum += r1[0] * k1[0]; sum += r1[1] * k1[1]; sum += r1[2] * k1[2]; sum += r2[0] * k2[0]; sum += r2[1] * k2[1]; sum += r2[2] * k2[2]; float sum2 = bias0; sum2 += r1[0] * k0[0]; sum2 += r1[1] * k0[1]; sum2 += r1[2] * k0[2]; sum2 += r2[0] * k1[0]; sum2 += r2[1] * k1[1]; sum2 += r2[2] * k1[2]; sum2 += r3[0] * k2[0]; sum2 += r3[1] * k2[1]; sum2 += r3[2] * k2[2]; *outptr = sum; *outptr2 = sum2; r0++; r1++; r2++; r3++; outptr++; outptr2++; } r0 += 2 + w; r1 += 2 + w; r2 += 2 + w; r3 += 2 + w; outptr += outw; outptr2 += outw; } for (; i < outh; i++) { int remain = outw; for (; remain > 0; remain--) { float sum = bias0; sum += r0[0] * k0[0]; sum += r0[1] * k0[1]; sum += r0[2] * k0[2]; sum += r1[0] * k1[0]; sum += r1[1] * k1[1]; sum += r1[2] * k1[2]; sum += r2[0] * k2[0]; sum += r2[1] * k2[1]; sum += r2[2] * k2[2]; *outptr = sum; r0++; r1++; r2++; outptr++; } r0 += 2; r1 += 2; r2 += 2; } } }
subq $0x368, %rsp # imm = 0x368 movq %rdi, 0x1c0(%rsp) movq %rsi, 0x1b8(%rsp) movq %rdx, 0x1b0(%rsp) movq %rcx, 0x1a8(%rsp) movq %r8, 0x1a0(%rsp) movq 0x1c0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x19c(%rsp) movq 0x1b8(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x198(%rsp) movq 0x1b8(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x194(%rsp) movq 0x1c0(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x190(%rsp) movq 0x1b0(%rsp), %rax movq %rax, 0x270(%rsp) movq 0x270(%rsp), %rax movq (%rax), %rax movq %rax, 0x188(%rsp) movq 0x1a8(%rsp), %rax movq %rax, 0x268(%rsp) movq 0x268(%rsp), %rax movq (%rax), %rax movq %rax, 0x180(%rsp) movl $0x0, 0x17c(%rsp) movl 0x17c(%rsp), %eax cmpl 0x190(%rsp), %eax jge 0x16ae0da movq 0x1b8(%rsp), %rcx movl 0x17c(%rsp), %eax leaq 0x130(%rsp), %rdx movq %rdx, 0x238(%rsp) movq %rcx, 0x230(%rsp) movl %eax, 0x22c(%rsp) movq 0x230(%rsp), %rax movq %rax, 0x58(%rsp) movb $0x0, 0x22b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x22c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x130(%rsp), %r10 movq %r10, 0x340(%rsp) movl %r9d, 0x33c(%rsp) movl %r8d, 0x338(%rsp) movl %edi, 0x334(%rsp) movq %rsi, 0x328(%rsp) movq %rdx, 0x320(%rsp) movl %ecx, 0x31c(%rsp) movq %rax, 0x310(%rsp) movq 0x340(%rsp), %rcx movq %rcx, 0x50(%rsp) movq 0x328(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x320(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x31c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x310(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x33c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x338(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x334(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x350(%rsp) movl $0x10, 0x34c(%rsp) movq 0x350(%rsp), %rax movslq 0x34c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x34c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x58(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x158(%rsp) cmpl $0x4, 0x28(%rax) jne 0x16ad26c movq 0x58(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x170(%rsp) movb $0x1, 0x22b(%rsp) testb $0x1, 0x22b(%rsp) jne 0x16ad393 leaq 0x130(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq %rax, 0x250(%rsp) movq 0x250(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x16ad33b movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x24c(%rsp) # imm = 0xFFFFFFFF movl 0x24c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x248(%rsp) cmpl $0x1, 0x248(%rsp) jne 0x16ad33b movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16ad30f movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16ad30d jmp 0x16ad339 movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x278(%rsp) cmpq $0x0, 0x278(%rsp) je 0x16ad337 movq 0x278(%rsp), %rdi callq 0x5e480 jmp 0x16ad339 jmp 0x16ad33b movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16ad393 movq %rax, %rdi callq 0x5fc90 cmpq $0x0, 0x180(%rsp) je 0x16ad3bb movq 0x180(%rsp), %rax movslq 0x17c(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x44(%rsp) jmp 0x16ad3c6 xorps %xmm0, %xmm0 movss %xmm0, 0x44(%rsp) jmp 0x16ad3c6 movss 0x44(%rsp), %xmm0 movss %xmm0, 0x12c(%rsp) movq 0x188(%rsp), %rax imull $0x9, 0x17c(%rsp), %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x120(%rsp) leaq 0x130(%rsp), %rax movq %rax, 0x258(%rsp) movq 0x258(%rsp), %rax movq (%rax), %rax movq %rax, 0x38(%rsp) movq 0x38(%rsp), %rax movq %rax, 0x118(%rsp) movq 0x118(%rsp), %rax movslq 0x198(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x100(%rsp) movq 0x1c0(%rsp), %rcx movl 0x17c(%rsp), %eax leaq 0xb0(%rsp), %rdx movq %rdx, 0x2b0(%rsp) movq %rcx, 0x2a8(%rsp) movl %eax, 0x2a4(%rsp) movq 0x2a8(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0x2a3(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x2a4(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xb0(%rsp), %r10 movq %r10, 0x308(%rsp) movl %r9d, 0x304(%rsp) movl %r8d, 0x300(%rsp) movl %edi, 0x2fc(%rsp) movq %rsi, 0x2f0(%rsp) movq %rdx, 0x2e8(%rsp) movl %ecx, 0x2e4(%rsp) movq %rax, 0x2d8(%rsp) movq 0x308(%rsp), %rcx movq %rcx, 0x30(%rsp) movq 0x2f0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2e8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2e4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2d8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x304(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x300(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2fc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x360(%rsp) movl $0x10, 0x35c(%rsp) movq 0x360(%rsp), %rax movslq 0x35c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x35c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x30(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x28(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xd8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x16ad5f8 movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xf0(%rsp) movb $0x1, 0x2a3(%rsp) testb $0x1, 0x2a3(%rsp) jne 0x16ad71f leaq 0xb0(%rsp), %rax movq %rax, 0x2b8(%rsp) movq 0x2b8(%rsp), %rax movq %rax, 0x2c8(%rsp) movq 0x2c8(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x16ad6c7 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2c4(%rsp) # imm = 0xFFFFFFFF movl 0x2c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2c0(%rsp) cmpl $0x1, 0x2c0(%rsp) jne 0x16ad6c7 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16ad69b movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16ad699 jmp 0x16ad6c5 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x2d0(%rsp) cmpq $0x0, 0x2d0(%rsp) je 0x16ad6c3 movq 0x2d0(%rsp), %rdi callq 0x5e480 jmp 0x16ad6c5 jmp 0x16ad6c7 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16ad71f movq %rax, %rdi callq 0x5fc90 jmp 0x16ad721 leaq 0xb0(%rsp), %rax movq %rax, 0x260(%rsp) movq 0x260(%rsp), %rax movq (%rax), %rax movq %rax, 0x18(%rsp) leaq 0xb0(%rsp), %rax movq %rax, 0x1c8(%rsp) movq 0x1c8(%rsp), %rax movq %rax, 0x220(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x16ad7fa movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x21c(%rsp) # imm = 0xFFFFFFFF movl 0x21c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x218(%rsp) cmpl $0x1, 0x218(%rsp) jne 0x16ad7fa movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16ad7ce movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16ad7cc jmp 0x16ad7f8 movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x280(%rsp) cmpq $0x0, 0x280(%rsp) je 0x16ad7f6 movq 0x280(%rsp), %rdi callq 0x5e480 jmp 0x16ad7f8 jmp 0x16ad7fa movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16ad852 movq %rax, %rdi callq 0x5fc90 movq 0x18(%rsp), %rax movq %rax, 0xf8(%rsp) movq 0xf8(%rsp), %rax movq %rax, 0xa8(%rsp) movq 0xf8(%rsp), %rax movslq 0x19c(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xa0(%rsp) movq 0xf8(%rsp), %rax movl 0x19c(%rsp), %ecx shll %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x98(%rsp) movq 0xf8(%rsp), %rax imull $0x3, 0x19c(%rsp), %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x90(%rsp) movq 0x120(%rsp), %rax movq %rax, 0x88(%rsp) movq 0x120(%rsp), %rax addq $0xc, %rax movq %rax, 0x80(%rsp) movq 0x120(%rsp), %rax addq $0x18, %rax movq %rax, 0x78(%rsp) movl $0x0, 0x74(%rsp) movl 0x74(%rsp), %eax addl $0x1, %eax cmpl 0x194(%rsp), %eax jge 0x16add74 movl 0x198(%rsp), %eax movl %eax, 0x70(%rsp) cmpl $0x0, 0x70(%rsp) jle 0x16adc92 movss 0x12c(%rsp), %xmm0 movss %xmm0, 0x6c(%rsp) movq 0xa8(%rsp), %rax movss (%rax), %xmm0 movq 0x88(%rsp), %rax mulss (%rax), %xmm0 addss 0x6c(%rsp), %xmm0 movss %xmm0, 0x6c(%rsp) movq 0xa8(%rsp), %rax movss 0x4(%rax), %xmm0 movq 0x88(%rsp), %rax mulss 0x4(%rax), %xmm0 addss 0x6c(%rsp), %xmm0 movss %xmm0, 0x6c(%rsp) movq 0xa8(%rsp), %rax movss 0x8(%rax), %xmm0 movq 0x88(%rsp), %rax mulss 0x8(%rax), %xmm0 addss 0x6c(%rsp), %xmm0 movss %xmm0, 0x6c(%rsp) movq 0xa0(%rsp), %rax movss (%rax), %xmm0 movq 0x80(%rsp), %rax mulss (%rax), %xmm0 addss 0x6c(%rsp), %xmm0 movss %xmm0, 0x6c(%rsp) movq 0xa0(%rsp), %rax movss 0x4(%rax), %xmm0 movq 0x80(%rsp), %rax mulss 0x4(%rax), %xmm0 addss 0x6c(%rsp), %xmm0 movss %xmm0, 0x6c(%rsp) movq 0xa0(%rsp), %rax movss 0x8(%rax), %xmm0 movq 0x80(%rsp), %rax mulss 0x8(%rax), %xmm0 addss 0x6c(%rsp), %xmm0 movss %xmm0, 0x6c(%rsp) movq 0x98(%rsp), %rax movss (%rax), %xmm0 movq 0x78(%rsp), %rax mulss (%rax), %xmm0 addss 0x6c(%rsp), %xmm0 movss %xmm0, 0x6c(%rsp) movq 0x98(%rsp), %rax movss 0x4(%rax), %xmm0 movq 0x78(%rsp), %rax mulss 0x4(%rax), %xmm0 addss 0x6c(%rsp), %xmm0 movss %xmm0, 0x6c(%rsp) movq 0x98(%rsp), %rax movss 0x8(%rax), %xmm0 movq 0x78(%rsp), %rax mulss 0x8(%rax), %xmm0 addss 0x6c(%rsp), %xmm0 movss %xmm0, 0x6c(%rsp) movss 0x12c(%rsp), %xmm0 movss %xmm0, 0x68(%rsp) movq 0xa0(%rsp), %rax movss (%rax), %xmm0 movq 0x88(%rsp), %rax mulss (%rax), %xmm0 addss 0x68(%rsp), %xmm0 movss %xmm0, 0x68(%rsp) movq 0xa0(%rsp), %rax movss 0x4(%rax), %xmm0 movq 0x88(%rsp), %rax mulss 0x4(%rax), %xmm0 addss 0x68(%rsp), %xmm0 movss %xmm0, 0x68(%rsp) movq 0xa0(%rsp), %rax movss 0x8(%rax), %xmm0 movq 0x88(%rsp), %rax mulss 0x8(%rax), %xmm0 addss 0x68(%rsp), %xmm0 movss %xmm0, 0x68(%rsp) movq 0x98(%rsp), %rax movss (%rax), %xmm0 movq 0x80(%rsp), %rax mulss (%rax), %xmm0 addss 0x68(%rsp), %xmm0 movss %xmm0, 0x68(%rsp) movq 0x98(%rsp), %rax movss 0x4(%rax), %xmm0 movq 0x80(%rsp), %rax mulss 0x4(%rax), %xmm0 addss 0x68(%rsp), %xmm0 movss %xmm0, 0x68(%rsp) movq 0x98(%rsp), %rax movss 0x8(%rax), %xmm0 movq 0x80(%rsp), %rax mulss 0x8(%rax), %xmm0 addss 0x68(%rsp), %xmm0 movss %xmm0, 0x68(%rsp) movq 0x90(%rsp), %rax movss (%rax), %xmm0 movq 0x78(%rsp), %rax mulss (%rax), %xmm0 addss 0x68(%rsp), %xmm0 movss %xmm0, 0x68(%rsp) movq 0x90(%rsp), %rax movss 0x4(%rax), %xmm0 movq 0x78(%rsp), %rax mulss 0x4(%rax), %xmm0 addss 0x68(%rsp), %xmm0 movss %xmm0, 0x68(%rsp) movq 0x90(%rsp), %rax movss 0x8(%rax), %xmm0 movq 0x78(%rsp), %rax mulss 0x8(%rax), %xmm0 addss 0x68(%rsp), %xmm0 movss %xmm0, 0x68(%rsp) movss 0x6c(%rsp), %xmm0 movq 0x118(%rsp), %rax movss %xmm0, (%rax) movss 0x68(%rsp), %xmm0 movq 0x100(%rsp), %rax movss %xmm0, (%rax) movq 0xa8(%rsp), %rax addq $0x4, %rax movq %rax, 0xa8(%rsp) movq 0xa0(%rsp), %rax addq $0x4, %rax movq %rax, 0xa0(%rsp) movq 0x98(%rsp), %rax addq $0x4, %rax movq %rax, 0x98(%rsp) movq 0x90(%rsp), %rax addq $0x4, %rax movq %rax, 0x90(%rsp) movq 0x118(%rsp), %rax addq $0x4, %rax movq %rax, 0x118(%rsp) movq 0x100(%rsp), %rax addq $0x4, %rax movq %rax, 0x100(%rsp) movl 0x70(%rsp), %eax addl $-0x1, %eax movl %eax, 0x70(%rsp) jmp 0x16ad92f movl 0x19c(%rsp), %ecx addl $0x2, %ecx movq 0xa8(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xa8(%rsp) movl 0x19c(%rsp), %ecx addl $0x2, %ecx movq 0xa0(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0xa0(%rsp) movl 0x19c(%rsp), %ecx addl $0x2, %ecx movq 0x98(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x98(%rsp) movl 0x19c(%rsp), %ecx addl $0x2, %ecx movq 0x90(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x90(%rsp) movl 0x198(%rsp), %ecx movq 0x118(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x118(%rsp) movl 0x198(%rsp), %ecx movq 0x100(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x100(%rsp) movl 0x74(%rsp), %eax addl $0x2, %eax movl %eax, 0x74(%rsp) jmp 0x16ad910 jmp 0x16add76 movl 0x74(%rsp), %eax cmpl 0x194(%rsp), %eax jge 0x16adfb1 movl 0x198(%rsp), %eax movl %eax, 0x64(%rsp) cmpl $0x0, 0x64(%rsp) jle 0x16adf65 movss 0x12c(%rsp), %xmm0 movss %xmm0, 0x60(%rsp) movq 0xa8(%rsp), %rax movss (%rax), %xmm0 movq 0x88(%rsp), %rax mulss (%rax), %xmm0 addss 0x60(%rsp), %xmm0 movss %xmm0, 0x60(%rsp) movq 0xa8(%rsp), %rax movss 0x4(%rax), %xmm0 movq 0x88(%rsp), %rax mulss 0x4(%rax), %xmm0 addss 0x60(%rsp), %xmm0 movss %xmm0, 0x60(%rsp) movq 0xa8(%rsp), %rax movss 0x8(%rax), %xmm0 movq 0x88(%rsp), %rax mulss 0x8(%rax), %xmm0 addss 0x60(%rsp), %xmm0 movss %xmm0, 0x60(%rsp) movq 0xa0(%rsp), %rax movss (%rax), %xmm0 movq 0x80(%rsp), %rax mulss (%rax), %xmm0 addss 0x60(%rsp), %xmm0 movss %xmm0, 0x60(%rsp) movq 0xa0(%rsp), %rax movss 0x4(%rax), %xmm0 movq 0x80(%rsp), %rax mulss 0x4(%rax), %xmm0 addss 0x60(%rsp), %xmm0 movss %xmm0, 0x60(%rsp) movq 0xa0(%rsp), %rax movss 0x8(%rax), %xmm0 movq 0x80(%rsp), %rax mulss 0x8(%rax), %xmm0 addss 0x60(%rsp), %xmm0 movss %xmm0, 0x60(%rsp) movq 0x98(%rsp), %rax movss (%rax), %xmm0 movq 0x78(%rsp), %rax mulss (%rax), %xmm0 addss 0x60(%rsp), %xmm0 movss %xmm0, 0x60(%rsp) movq 0x98(%rsp), %rax movss 0x4(%rax), %xmm0 movq 0x78(%rsp), %rax mulss 0x4(%rax), %xmm0 addss 0x60(%rsp), %xmm0 movss %xmm0, 0x60(%rsp) movq 0x98(%rsp), %rax movss 0x8(%rax), %xmm0 movq 0x78(%rsp), %rax mulss 0x8(%rax), %xmm0 addss 0x60(%rsp), %xmm0 movss %xmm0, 0x60(%rsp) movss 0x60(%rsp), %xmm0 movq 0x118(%rsp), %rax movss %xmm0, (%rax) movq 0xa8(%rsp), %rax addq $0x4, %rax movq %rax, 0xa8(%rsp) movq 0xa0(%rsp), %rax addq $0x4, %rax movq %rax, 0xa0(%rsp) movq 0x98(%rsp), %rax addq $0x4, %rax movq %rax, 0x98(%rsp) movq 0x118(%rsp), %rax addq $0x4, %rax movq %rax, 0x118(%rsp) movl 0x64(%rsp), %eax addl $-0x1, %eax movl %eax, 0x64(%rsp) jmp 0x16add92 movq 0xa8(%rsp), %rax addq $0x8, %rax movq %rax, 0xa8(%rsp) movq 0xa0(%rsp), %rax addq $0x8, %rax movq %rax, 0xa0(%rsp) movq 0x98(%rsp), %rax addq $0x8, %rax movq %rax, 0x98(%rsp) movl 0x74(%rsp), %eax addl $0x1, %eax movl %eax, 0x74(%rsp) jmp 0x16add76 leaq 0x130(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x200(%rsp) movq 0x200(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x16ae06a movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1fc(%rsp) # imm = 0xFFFFFFFF movl 0x1fc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1f8(%rsp) cmpl $0x1, 0x1f8(%rsp) jne 0x16ae06a movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16ae03e movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16ae03c jmp 0x16ae068 movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x290(%rsp) cmpq $0x0, 0x290(%rsp) je 0x16ae066 movq 0x290(%rsp), %rdi callq 0x5e480 jmp 0x16ae068 jmp 0x16ae06a movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16ae0c2 movq %rax, %rdi callq 0x5fc90 jmp 0x16ae0c4 movl 0x17c(%rsp), %eax addl $0x1, %eax movl %eax, 0x17c(%rsp) jmp 0x16ad0a8 addq $0x368, %rsp # imm = 0x368 retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/convolutiondepthwise_3x3.h
virtual thunk to ncnn::ConvolutionDepthWise_x86::forward(ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&) const
int ConvolutionDepthWise_x86::forward(const Mat& bottom_blob, Mat& top_blob, const Option& opt) const { #if NCNN_INT8 if (opt.use_int8_inference && int8_scale_term) { return forward_int8_x86(bottom_blob, top_blob, opt); } #endif int w = bottom_blob.w; int h = bottom_blob.h; int channels = bottom_blob.c; size_t elemsize = bottom_blob.elemsize; int elempack = bottom_blob.elempack; const int kernel_extent_w = dilation_w * (kernel_w - 1) + 1; const int kernel_extent_h = dilation_h * (kernel_h - 1) + 1; Mat bottom_blob_bordered; make_padding(bottom_blob, bottom_blob_bordered, opt); if (bottom_blob_bordered.empty()) return -100; w = bottom_blob_bordered.w; h = bottom_blob_bordered.h; int outw = (w - kernel_extent_w) / stride_w + 1; int outh = (h - kernel_extent_h) / stride_h + 1; int out_elempack = 1; #if __SSE2__ if (opt.use_packing_layout) { #if __AVX512F__ out_elempack = num_output % 16 == 0 ? 16 : num_output % 8 == 0 ? 8 : num_output % 4 == 0 ? 4 : 1; #elif __AVX__ out_elempack = num_output % 8 == 0 ? 8 : num_output % 4 == 0 ? 4 : 1; #else out_elempack = num_output % 4 == 0 ? 4 : 1; #endif } #endif // __SSE2__ size_t out_elemsize = elemsize / elempack * out_elempack; top_blob.create(outw, outh, num_output / out_elempack, out_elemsize, out_elempack, opt.blob_allocator); if (top_blob.empty()) return -100; // depth-wise if (channels * elempack == group && group == num_output) { #if __SSE2__ #if __AVX__ #if __AVX512F__ if (elempack == 16) { if (kernel_w == 3 && kernel_h == 3 && dilation_w == 1 && dilation_h == 1 && stride_w == 1 && stride_h == 1) { convdw3x3s1_pack16_avx512(bottom_blob_bordered, top_blob, weight_data_tm, bias_data, opt); if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } if (kernel_w == 3 && kernel_h == 3 && dilation_w == 1 && dilation_h == 1 && stride_w == 2 && stride_h == 2) { convdw3x3s2_pack16_avx512(bottom_blob_bordered, top_blob, weight_data_tm, bias_data, opt); if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } if (kernel_w == 5 && kernel_h == 5 && dilation_w == 1 && dilation_h == 1 && stride_w == 1 && stride_h == 1) { convdw5x5s1_pack16_avx512(bottom_blob_bordered, top_blob, weight_data_tm, bias_data, opt); if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } if (kernel_w == 5 && kernel_h == 5 && dilation_w == 1 && dilation_h == 1 && stride_w == 2 && stride_h == 2) { convdw5x5s2_pack16_avx512(bottom_blob_bordered, top_blob, weight_data_tm, bias_data, opt); if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } else { const int maxk = kernel_w * kernel_h; // kernel offsets std::vector<int> _space_ofs(maxk); int* space_ofs = &_space_ofs[0]; { int p1 = 0; int p2 = 0; int gap = w * dilation_h - kernel_w * dilation_w; for (int i = 0; i < kernel_h; i++) { for (int j = 0; j < kernel_w; j++) { space_ofs[p1] = p2; p1++; p2 += dilation_w; } p2 += gap; } } #pragma omp parallel for num_threads(opt.num_threads) for (int g = 0; g < channels; g++) { float* outptr = top_blob.channel(g); const float* kptr = (const float*)weight_data_tm + maxk * g * 16; const Mat m = bottom_blob_bordered.channel(g); for (int i = 0; i < outh; i++) { for (int j = 0; j < outw; j++) { __m512 _sum = _mm512_set1_ps(0.f); if (bias_term) { _sum = _mm512_loadu_ps(((const float*)bias_data) + g * 16); } const float* sptr = m.row(i * stride_h) + j * stride_w * 16; for (int k = 0; k < maxk; k++) { __m512 _val = _mm512_loadu_ps(sptr + space_ofs[k] * 16); __m512 _w = _mm512_loadu_ps(kptr + k * 16); _sum = _mm512_fmadd_ps(_val, _w, _sum); } _mm512_storeu_ps(outptr, _sum); outptr += 16; } } } if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } } #endif // __AVX512F__ if (elempack == 8) { if (kernel_w == 3 && kernel_h == 3 && dilation_w == 1 && dilation_h == 1 && stride_w == 1 && stride_h == 1) { convdw3x3s1_pack8_avx(bottom_blob_bordered, top_blob, weight_data_tm, bias_data, opt); if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } if (kernel_w == 3 && kernel_h == 3 && dilation_w == 1 && dilation_h == 1 && stride_w == 2 && stride_h == 2) { convdw3x3s2_pack8_avx(bottom_blob_bordered, top_blob, weight_data_tm, bias_data, opt); if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } if (kernel_w == 5 && kernel_h == 5 && dilation_w == 1 && dilation_h == 1 && stride_w == 1 && stride_h == 1) { convdw5x5s1_pack8_avx(bottom_blob_bordered, top_blob, weight_data_tm, bias_data, opt); if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } if (kernel_w == 5 && kernel_h == 5 && dilation_w == 1 && dilation_h == 1 && stride_w == 2 && stride_h == 2) { convdw5x5s2_pack8_avx(bottom_blob_bordered, top_blob, weight_data_tm, bias_data, opt); if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } else { const int maxk = kernel_w * kernel_h; // kernel offsets std::vector<int> _space_ofs(maxk); int* space_ofs = &_space_ofs[0]; { int p1 = 0; int p2 = 0; int gap = w * dilation_h - kernel_w * dilation_w; for (int i = 0; i < kernel_h; i++) { for (int j = 0; j < kernel_w; j++) { space_ofs[p1] = p2; p1++; p2 += dilation_w; } p2 += gap; } } #pragma omp parallel for num_threads(opt.num_threads) for (int g = 0; g < channels; g++) { float* outptr = top_blob.channel(g); const float* kptr = (const float*)weight_data_tm + maxk * g * 8; const Mat m = bottom_blob_bordered.channel(g); for (int i = 0; i < outh; i++) { for (int j = 0; j < outw; j++) { __m256 _sum = _mm256_set1_ps(0.f); if (bias_term) { _sum = _mm256_loadu_ps(((const float*)bias_data) + g * 8); } const float* sptr = m.row(i * stride_h) + j * stride_w * 8; for (int k = 0; k < maxk; k++) { __m256 _val = _mm256_loadu_ps(sptr + space_ofs[k] * 8); __m256 _w = _mm256_loadu_ps(kptr + k * 8); _sum = _mm256_comp_fmadd_ps(_val, _w, _sum); } _mm256_storeu_ps(outptr + j * 8, _sum); } outptr += outw * 8; } } if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } } #endif // __AVX__ if (elempack == 4) { if (kernel_w == 3 && kernel_h == 3 && dilation_w == 1 && dilation_h == 1 && stride_w == 1 && stride_h == 1) { convdw3x3s1_pack4_sse(bottom_blob_bordered, top_blob, weight_data_tm, bias_data, opt); if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } if (kernel_w == 3 && kernel_h == 3 && dilation_w == 1 && dilation_h == 1 && stride_w == 2 && stride_h == 2) { convdw3x3s2_pack4_sse(bottom_blob_bordered, top_blob, weight_data_tm, bias_data, opt); if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } if (kernel_w == 5 && kernel_h == 5 && dilation_w == 1 && dilation_h == 1 && stride_w == 1 && stride_h == 1) { convdw5x5s1_pack4_sse(bottom_blob_bordered, top_blob, weight_data_tm, bias_data, opt); if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } if (kernel_w == 5 && kernel_h == 5 && dilation_w == 1 && dilation_h == 1 && stride_w == 2 && stride_h == 2) { convdw5x5s2_pack4_sse(bottom_blob_bordered, top_blob, weight_data_tm, bias_data, opt); if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } { const int maxk = kernel_w * kernel_h; // kernel offsets std::vector<int> _space_ofs(maxk); int* space_ofs = &_space_ofs[0]; { int p1 = 0; int p2 = 0; int gap = w * dilation_h - kernel_w * dilation_w; for (int i = 0; i < kernel_h; i++) { for (int j = 0; j < kernel_w; j++) { space_ofs[p1] = p2; p1++; p2 += dilation_w; } p2 += gap; } } #pragma omp parallel for num_threads(opt.num_threads) for (int g = 0; g < channels; g++) { float* outptr = top_blob.channel(g); const float* kptr = (const float*)weight_data_tm + maxk * g * 4; const Mat m = bottom_blob_bordered.channel(g); for (int i = 0; i < outh; i++) { for (int j = 0; j < outw; j++) { __m128 _sum = _mm_set1_ps(0.f); if (bias_term) { _sum = _mm_loadu_ps(((const float*)bias_data) + g * 4); } const float* sptr = m.row(i * stride_h) + j * stride_w * 4; for (int k = 0; k < maxk; k++) { __m128 _val = _mm_loadu_ps(sptr + space_ofs[k] * 4); __m128 _w = _mm_loadu_ps(kptr + k * 4); _sum = _mm_add_ps(_mm_mul_ps(_val, _w), _sum); } _sum = activation_sse(_sum, activation_type, activation_params); _mm_storeu_ps(outptr + j * 4, _sum); } outptr += outw * 4; } } return 0; } } #endif // __SSE2__ if (elempack == 1) { if (kernel_w == 3 && kernel_h == 3 && dilation_w == 1 && dilation_h == 1 && stride_w == 1 && stride_h == 1) { convdw3x3s1_sse(bottom_blob_bordered, top_blob, weight_data_tm, bias_data, opt); if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } if (kernel_w == 3 && kernel_h == 3 && dilation_w == 1 && dilation_h == 1 && stride_w == 2 && stride_h == 2) { convdw3x3s2_sse(bottom_blob_bordered, top_blob, weight_data_tm, bias_data, opt); if (activation) { activation->forward_inplace(top_blob, opt); } return 0; } } } // group convolution const int channels_g = channels * elempack / group; const int num_output_g = num_output / group; int g_elempack = 1; int out_g_elempack = 1; #if __SSE2__ if (opt.use_packing_layout) { #if __AVX512F__ g_elempack = channels_g % 16 == 0 ? 16 : channels_g % 8 == 0 ? 8 : channels_g % 4 == 0 ? 4 : 1; out_g_elempack = num_output_g % 16 == 0 ? 16 : num_output_g % 8 == 0 ? 8 : num_output_g % 4 == 0 ? 4 : 1; #elif __AVX__ g_elempack = channels_g % 8 == 0 ? 8 : channels_g % 4 == 0 ? 4 : 1; out_g_elempack = num_output_g % 8 == 0 ? 8 : num_output_g % 4 == 0 ? 4 : 1; #else g_elempack = channels_g % 4 == 0 ? 4 : 1; out_g_elempack = num_output_g % 4 == 0 ? 4 : 1; #endif } #endif // __SSE2__ // unpacking Mat bottom_blob_bordered_unpacked = bottom_blob_bordered; if (elempack > g_elempack) { Option opt_p = opt; opt_p.blob_allocator = opt.workspace_allocator; convert_packing(bottom_blob_bordered, bottom_blob_bordered_unpacked, g_elempack, opt_p); } Mat top_blob_unpacked = top_blob; if (out_g_elempack < out_elempack) { top_blob_unpacked.create(outw, outh, num_output / out_g_elempack, out_elemsize / out_elempack * out_g_elempack, out_g_elempack, opt.workspace_allocator); if (top_blob_unpacked.empty()) return -100; } for (int g = 0; g < group; g++) { const Mat bottom_blob_bordered_g = bottom_blob_bordered_unpacked.channel_range(channels_g * g / g_elempack, channels_g / g_elempack); Mat top_blob_g = top_blob_unpacked.channel_range(num_output_g * g / out_g_elempack, num_output_g / out_g_elempack); const ncnn::Layer* op = group_ops[g]; Option opt_g = opt; opt_g.blob_allocator = top_blob_unpacked.allocator; // forward op->forward(bottom_blob_bordered_g, top_blob_g, opt_g); } // packing if (out_g_elempack < out_elempack) { convert_packing(top_blob_unpacked, top_blob, out_elempack, opt); } else { top_blob = top_blob_unpacked; } return 0; }
movq %rdi, -0x8(%rsp) movq %rsi, -0x10(%rsp) movq %rdx, -0x18(%rsp) movq %rcx, -0x20(%rsp) movq -0x8(%rsp), %rdi movq (%rdi), %rax movq -0x48(%rax), %rax addq %rax, %rdi movq -0x10(%rsp), %rsi movq -0x18(%rsp), %rdx movq -0x20(%rsp), %rcx jmp 0x16876d0 nopw (%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/convolutiondepthwise_x86.cpp
ncnn::convdw3x3s1_int8_dequant_sse(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Mat const&, std::vector<float, std::allocator<float>>, ncnn::Option const&)
static void convdw3x3s1_int8_dequant_sse(const Mat& bottom_blob, Mat& top_blob, const Mat& _kernel, const Mat& _bias, std::vector<float> scales_dequant, const Option& opt) { int w = bottom_blob.w; //int h = bottom_blob.h; //int inch = bottom_blob.c; int outw = top_blob.w; int outh = top_blob.h; int outch = top_blob.c; const signed char* kernel = _kernel; const float* bias = _bias; #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < outch; p++) { Mat out = top_blob.channel(p); float* outptr = out; const float bias0 = bias ? bias[p] : 0.f; const float scale_dequant = scales_dequant[p]; out.fill(bias0); const signed char* kernel0 = (const signed char*)kernel + p * 9; const signed char* img0 = bottom_blob.channel(p); const signed char* r0 = img0; const signed char* r1 = img0 + w; const signed char* r2 = img0 + w * 2; int i = 0; for (; i < outh; i++) { int remain = outw; for (; remain > 0; remain--) { int sum = 0; sum += (int)r0[0] * (int)kernel0[0]; sum += (int)r0[1] * (int)kernel0[1]; sum += (int)r0[2] * (int)kernel0[2]; sum += (int)r1[0] * (int)kernel0[3]; sum += (int)r1[1] * (int)kernel0[4]; sum += (int)r1[2] * (int)kernel0[5]; sum += (int)r2[0] * (int)kernel0[6]; sum += (int)r2[1] * (int)kernel0[7]; sum += (int)r2[2] * (int)kernel0[8]; *outptr += (float)sum * scale_dequant; r0++; r1++; r2++; outptr++; } r0 += 2; r1 += 2; r2 += 2; } } }
subq $0x368, %rsp # imm = 0x368 movq %r8, 0x58(%rsp) movq %rdi, 0x198(%rsp) movq %rsi, 0x190(%rsp) movq %rdx, 0x188(%rsp) movq %rcx, 0x180(%rsp) movq %r8, 0x178(%rsp) movq %r9, 0x170(%rsp) movq 0x198(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x16c(%rsp) movq 0x190(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x168(%rsp) movq 0x190(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x164(%rsp) movq 0x190(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x160(%rsp) movq 0x188(%rsp), %rax movq %rax, 0x268(%rsp) movq 0x268(%rsp), %rax movq (%rax), %rax movq %rax, 0x158(%rsp) movq 0x180(%rsp), %rax movq %rax, 0x258(%rsp) movq 0x258(%rsp), %rax movq (%rax), %rax movq %rax, 0x150(%rsp) movl $0x0, 0x14c(%rsp) movl 0x14c(%rsp), %eax cmpl 0x160(%rsp), %eax jge 0x16b1c06 movq 0x190(%rsp), %rcx movl 0x14c(%rsp), %eax leaq 0x100(%rsp), %rdx movq %rdx, 0x230(%rsp) movq %rcx, 0x228(%rsp) movl %eax, 0x224(%rsp) movq 0x228(%rsp), %rax movq %rax, 0x50(%rsp) movb $0x0, 0x223(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x224(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x100(%rsp), %r10 movq %r10, 0x340(%rsp) movl %r9d, 0x33c(%rsp) movl %r8d, 0x338(%rsp) movl %edi, 0x334(%rsp) movq %rsi, 0x328(%rsp) movq %rdx, 0x320(%rsp) movl %ecx, 0x31c(%rsp) movq %rax, 0x310(%rsp) movq 0x340(%rsp), %rcx movq %rcx, 0x48(%rsp) movq 0x328(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x320(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x31c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x310(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x33c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x338(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x334(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x350(%rsp) movl $0x10, 0x34c(%rsp) movq 0x350(%rsp), %rax movslq 0x34c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x34c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x50(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x128(%rsp) cmpl $0x4, 0x28(%rax) jne 0x16b11f9 movq 0x50(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x140(%rsp) movb $0x1, 0x223(%rsp) testb $0x1, 0x223(%rsp) jne 0x16b1320 leaq 0x100(%rsp), %rax movq %rax, 0x238(%rsp) movq 0x238(%rsp), %rax movq %rax, 0x248(%rsp) movq 0x248(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x16b12c8 movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x244(%rsp) # imm = 0xFFFFFFFF movl 0x244(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x240(%rsp) cmpl $0x1, 0x240(%rsp) jne 0x16b12c8 movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16b129c movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16b129a jmp 0x16b12c6 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x270(%rsp) cmpq $0x0, 0x270(%rsp) je 0x16b12c4 movq 0x270(%rsp), %rdi callq 0x5e480 jmp 0x16b12c6 jmp 0x16b12c8 movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16b1320 movq %rax, %rdi callq 0x5fc90 leaq 0x100(%rsp), %rax movq %rax, 0x250(%rsp) movq 0x250(%rsp), %rax movq (%rax), %rax movq %rax, 0x38(%rsp) movq 0x38(%rsp), %rax movq %rax, 0xf8(%rsp) cmpq $0x0, 0x150(%rsp) je 0x16b1375 movq 0x150(%rsp), %rax movslq 0x14c(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x34(%rsp) jmp 0x16b1380 xorps %xmm0, %xmm0 movss %xmm0, 0x34(%rsp) jmp 0x16b1380 movq 0x58(%rsp), %rdi movss 0x34(%rsp), %xmm0 movss %xmm0, 0xe8(%rsp) movslq 0x14c(%rsp), %rsi callq 0xf86fc0 movss (%rax), %xmm0 movss %xmm0, 0xe4(%rsp) movss 0xe8(%rsp), %xmm0 leaq 0x100(%rsp), %rax movq %rax, 0x218(%rsp) movss %xmm0, 0x214(%rsp) movq 0x218(%rsp), %rax movq %rax, 0x298(%rsp) movq 0x298(%rsp), %rdx movq 0x40(%rdx), %rcx movslq 0x38(%rdx), %rdx imulq %rdx, %rcx movl %ecx, 0x210(%rsp) movq (%rax), %rax movq %rax, 0x208(%rsp) movl $0x0, 0x204(%rsp) movl 0x204(%rsp), %eax cmpl 0x210(%rsp), %eax jge 0x16b1458 movss 0x214(%rsp), %xmm0 movq 0x208(%rsp), %rax movq %rax, %rcx addq $0x4, %rcx movq %rcx, 0x208(%rsp) movss %xmm0, (%rax) movl 0x204(%rsp), %eax addl $0x1, %eax movl %eax, 0x204(%rsp) jmp 0x16b1411 jmp 0x16b145a movq 0x158(%rsp), %rax imull $0x9, 0x14c(%rsp), %ecx movslq %ecx, %rcx addq %rcx, %rax movq %rax, 0xd8(%rsp) movq 0x198(%rsp), %rcx movl 0x14c(%rsp), %eax leaq 0x88(%rsp), %rdx movq %rdx, 0x2b0(%rsp) movq %rcx, 0x2a8(%rsp) movl %eax, 0x2a4(%rsp) movq 0x2a8(%rsp), %rax movq %rax, 0x20(%rsp) movb $0x0, 0x2a3(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x2a4(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x88(%rsp), %r10 movq %r10, 0x308(%rsp) movl %r9d, 0x304(%rsp) movl %r8d, 0x300(%rsp) movl %edi, 0x2fc(%rsp) movq %rsi, 0x2f0(%rsp) movq %rdx, 0x2e8(%rsp) movl %ecx, 0x2e4(%rsp) movq %rax, 0x2d8(%rsp) movq 0x308(%rsp), %rcx movq %rcx, 0x28(%rsp) movq 0x2f0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2e8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2e4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2d8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x304(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x300(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2fc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x360(%rsp) movl $0x10, 0x35c(%rsp) movq 0x360(%rsp), %rax movslq 0x35c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x35c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x28(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x20(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xb0(%rsp) cmpl $0x4, 0x28(%rax) jne 0x16b162d movq 0x20(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xc8(%rsp) movb $0x1, 0x2a3(%rsp) testb $0x1, 0x2a3(%rsp) jne 0x16b1754 leaq 0x88(%rsp), %rax movq %rax, 0x2b8(%rsp) movq 0x2b8(%rsp), %rax movq %rax, 0x2c8(%rsp) movq 0x2c8(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x16b16fc movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2c4(%rsp) # imm = 0xFFFFFFFF movl 0x2c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2c0(%rsp) cmpl $0x1, 0x2c0(%rsp) jne 0x16b16fc movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16b16d0 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16b16ce jmp 0x16b16fa movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x2d0(%rsp) cmpq $0x0, 0x2d0(%rsp) je 0x16b16f8 movq 0x2d0(%rsp), %rdi callq 0x5e480 jmp 0x16b16fa jmp 0x16b16fc movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16b1754 movq %rax, %rdi callq 0x5fc90 jmp 0x16b1756 leaq 0x88(%rsp), %rax movq %rax, 0x260(%rsp) movq 0x260(%rsp), %rax movq (%rax), %rax movq %rax, 0x10(%rsp) leaq 0x88(%rsp), %rax movq %rax, 0x1a0(%rsp) movq 0x1a0(%rsp), %rax movq %rax, 0x1f8(%rsp) movq 0x1f8(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x16b182f movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1f4(%rsp) # imm = 0xFFFFFFFF movl 0x1f4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1f0(%rsp) cmpl $0x1, 0x1f0(%rsp) jne 0x16b182f movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16b1803 movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16b1801 jmp 0x16b182d movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x278(%rsp) cmpq $0x0, 0x278(%rsp) je 0x16b182b movq 0x278(%rsp), %rdi callq 0x5e480 jmp 0x16b182d jmp 0x16b182f movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16b1887 movq %rax, %rdi callq 0x5fc90 movq 0x10(%rsp), %rax movq %rax, 0xd0(%rsp) movq 0xd0(%rsp), %rax movq %rax, 0x80(%rsp) movq 0xd0(%rsp), %rax movslq 0x16c(%rsp), %rcx addq %rcx, %rax movq %rax, 0x78(%rsp) movq 0xd0(%rsp), %rax movl 0x16c(%rsp), %ecx shll %ecx movslq %ecx, %rcx addq %rcx, %rax movq %rax, 0x70(%rsp) movl $0x0, 0x6c(%rsp) movl 0x6c(%rsp), %eax cmpl 0x164(%rsp), %eax jge 0x16b1ae7 movl 0x168(%rsp), %eax movl %eax, 0x68(%rsp) cmpl $0x0, 0x68(%rsp) jle 0x16b1aa7 movl $0x0, 0x64(%rsp) movq 0x80(%rsp), %rax movsbl (%rax), %eax movq 0xd8(%rsp), %rcx movsbl (%rcx), %ecx imull %ecx, %eax addl 0x64(%rsp), %eax movl %eax, 0x64(%rsp) movq 0x80(%rsp), %rax movsbl 0x1(%rax), %eax movq 0xd8(%rsp), %rcx movsbl 0x1(%rcx), %ecx imull %ecx, %eax addl 0x64(%rsp), %eax movl %eax, 0x64(%rsp) movq 0x80(%rsp), %rax movsbl 0x2(%rax), %eax movq 0xd8(%rsp), %rcx movsbl 0x2(%rcx), %ecx imull %ecx, %eax addl 0x64(%rsp), %eax movl %eax, 0x64(%rsp) movq 0x78(%rsp), %rax movsbl (%rax), %eax movq 0xd8(%rsp), %rcx movsbl 0x3(%rcx), %ecx imull %ecx, %eax addl 0x64(%rsp), %eax movl %eax, 0x64(%rsp) movq 0x78(%rsp), %rax movsbl 0x1(%rax), %eax movq 0xd8(%rsp), %rcx movsbl 0x4(%rcx), %ecx imull %ecx, %eax addl 0x64(%rsp), %eax movl %eax, 0x64(%rsp) movq 0x78(%rsp), %rax movsbl 0x2(%rax), %eax movq 0xd8(%rsp), %rcx movsbl 0x5(%rcx), %ecx imull %ecx, %eax addl 0x64(%rsp), %eax movl %eax, 0x64(%rsp) movq 0x70(%rsp), %rax movsbl (%rax), %eax movq 0xd8(%rsp), %rcx movsbl 0x6(%rcx), %ecx imull %ecx, %eax addl 0x64(%rsp), %eax movl %eax, 0x64(%rsp) movq 0x70(%rsp), %rax movsbl 0x1(%rax), %eax movq 0xd8(%rsp), %rcx movsbl 0x7(%rcx), %ecx imull %ecx, %eax addl 0x64(%rsp), %eax movl %eax, 0x64(%rsp) movq 0x70(%rsp), %rax movsbl 0x2(%rax), %eax movq 0xd8(%rsp), %rcx movsbl 0x8(%rcx), %ecx imull %ecx, %eax addl 0x64(%rsp), %eax movl %eax, 0x64(%rsp) cvtsi2ssl 0x64(%rsp), %xmm0 mulss 0xe4(%rsp), %xmm0 movq 0xf8(%rsp), %rax addss (%rax), %xmm0 movss %xmm0, (%rax) movq 0x80(%rsp), %rax addq $0x1, %rax movq %rax, 0x80(%rsp) movq 0x78(%rsp), %rax addq $0x1, %rax movq %rax, 0x78(%rsp) movq 0x70(%rsp), %rax addq $0x1, %rax movq %rax, 0x70(%rsp) movq 0xf8(%rsp), %rax addq $0x4, %rax movq %rax, 0xf8(%rsp) movl 0x68(%rsp), %eax addl $-0x1, %eax movl %eax, 0x68(%rsp) jmp 0x16b18fc movq 0x80(%rsp), %rax addq $0x2, %rax movq %rax, 0x80(%rsp) movq 0x78(%rsp), %rax addq $0x2, %rax movq %rax, 0x78(%rsp) movq 0x70(%rsp), %rax addq $0x2, %rax movq %rax, 0x70(%rsp) movl 0x6c(%rsp), %eax addl $0x1, %eax movl %eax, 0x6c(%rsp) jmp 0x16b18e0 leaq 0x100(%rsp), %rax movq %rax, 0x1b0(%rsp) movq 0x1b0(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rax movq %rax, (%rsp) cmpq $0x0, 0x8(%rax) je 0x16b1b97 movq (%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1d4(%rsp) # imm = 0xFFFFFFFF movl 0x1d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1d0(%rsp) cmpl $0x1, 0x1d0(%rsp) jne 0x16b1b97 movq (%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x16b1b6c movq (%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x16b1b6a jmp 0x16b1b95 movq (%rsp), %rax movq (%rax), %rax movq %rax, 0x288(%rsp) cmpq $0x0, 0x288(%rsp) je 0x16b1b93 movq 0x288(%rsp), %rdi callq 0x5e480 jmp 0x16b1b95 jmp 0x16b1b97 movq (%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x16b1bee movq %rax, %rdi callq 0x5fc90 jmp 0x16b1bf0 movl 0x14c(%rsp), %eax addl $0x1, %eax movl %eax, 0x14c(%rsp) jmp 0x16b1035 addq $0x368, %rsp # imm = 0x368 retq nop
/ysh329[P]ncnn/src/layer/x86/convolutiondepthwise_3x3_int8.h
ncnn::convdw3x3s2_sse(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Mat const&, ncnn::Option const&)
static void convdw3x3s2_sse(const Mat& bottom_blob, Mat& top_blob, const Mat& _kernel, const Mat& _bias, const Option& opt) { int w = bottom_blob.w; int outw = top_blob.w; int outh = top_blob.h; const int group = bottom_blob.c; const int tailstep = w - 2 * outw + w; const float* kernel = _kernel; const float* bias = _bias; #pragma omp parallel for num_threads(opt.num_threads) for (int g = 0; g < group; g++) { Mat out = top_blob.channel(g); const float bias0 = bias ? bias[g] : 0.f; const float* kernel0 = kernel + g * 9; float* outptr = out; const float* img0 = bottom_blob.channel(g); const float* r0 = img0; const float* r1 = img0 + w; const float* r2 = img0 + w * 2; const float* k0 = kernel0; const float* k1 = kernel0 + 3; const float* k2 = kernel0 + 6; int i = 0; for (; i < outh; i++) { int remain = outw; for (; remain > 0; remain--) { float sum = bias0; sum += r0[0] * k0[0]; sum += r0[1] * k0[1]; sum += r0[2] * k0[2]; sum += r1[0] * k1[0]; sum += r1[1] * k1[1]; sum += r1[2] * k1[2]; sum += r2[0] * k2[0]; sum += r2[1] * k2[1]; sum += r2[2] * k2[2]; *outptr = sum; r0 += 2; r1 += 2; r2 += 2; outptr++; } r0 += tailstep; r1 += tailstep; r2 += tailstep; } } }
subq $0x358, %rsp # imm = 0x358 movq %rdi, 0x1b0(%rsp) movq %rsi, 0x1a8(%rsp) movq %rdx, 0x1a0(%rsp) movq %rcx, 0x198(%rsp) movq %r8, 0x190(%rsp) movq 0x1b0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x18c(%rsp) movq 0x1a8(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x188(%rsp) movq 0x1a8(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x184(%rsp) movq 0x1b0(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x180(%rsp) movl 0x18c(%rsp), %eax movl 0x188(%rsp), %ecx shll %ecx subl %ecx, %eax addl 0x18c(%rsp), %eax movl %eax, 0x17c(%rsp) movq 0x1a0(%rsp), %rax movq %rax, 0x260(%rsp) movq 0x260(%rsp), %rax movq (%rax), %rax movq %rax, 0x170(%rsp) movq 0x198(%rsp), %rax movq %rax, 0x258(%rsp) movq 0x258(%rsp), %rax movq (%rax), %rax movq %rax, 0x168(%rsp) movl $0x0, 0x164(%rsp) movl 0x164(%rsp), %eax cmpl 0x180(%rsp), %eax jge 0x1753aef movq 0x1a8(%rsp), %rcx movl 0x164(%rsp), %eax leaq 0x118(%rsp), %rdx movq %rdx, 0x228(%rsp) movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x58(%rsp) movb $0x0, 0x21b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x21c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x118(%rsp), %r10 movq %r10, 0x330(%rsp) movl %r9d, 0x32c(%rsp) movl %r8d, 0x328(%rsp) movl %edi, 0x324(%rsp) movq %rsi, 0x318(%rsp) movq %rdx, 0x310(%rsp) movl %ecx, 0x30c(%rsp) movq %rax, 0x300(%rsp) movq 0x330(%rsp), %rcx movq %rcx, 0x50(%rsp) movq 0x318(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x310(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x30c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x300(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x32c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x328(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x324(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x340(%rsp) movl $0x10, 0x33c(%rsp) movq 0x340(%rsp), %rax movslq 0x33c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x33c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x58(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x140(%rsp) cmpl $0x4, 0x28(%rax) jne 0x175310c movq 0x58(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x158(%rsp) movb $0x1, 0x21b(%rsp) testb $0x1, 0x21b(%rsp) jne 0x1753233 leaq 0x118(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x17531db movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x23c(%rsp) # imm = 0xFFFFFFFF movl 0x23c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x238(%rsp) cmpl $0x1, 0x238(%rsp) jne 0x17531db movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x17531af movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x17531ad jmp 0x17531d9 movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x268(%rsp) cmpq $0x0, 0x268(%rsp) je 0x17531d7 movq 0x268(%rsp), %rdi callq 0x5e480 jmp 0x17531d9 jmp 0x17531db movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1753233 movq %rax, %rdi callq 0x5fc90 cmpq $0x0, 0x168(%rsp) je 0x175325b movq 0x168(%rsp), %rax movslq 0x164(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x44(%rsp) jmp 0x1753267 vxorps %xmm0, %xmm0, %xmm0 vmovss %xmm0, 0x44(%rsp) jmp 0x1753267 vmovss 0x44(%rsp), %xmm0 vmovss %xmm0, 0x114(%rsp) movq 0x170(%rsp), %rax imull $0x9, 0x164(%rsp), %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x108(%rsp) leaq 0x118(%rsp), %rax movq %rax, 0x248(%rsp) movq 0x248(%rsp), %rax movq (%rax), %rax movq %rax, 0x38(%rsp) movq 0x38(%rsp), %rax movq %rax, 0x100(%rsp) movq 0x1b0(%rsp), %rcx movl 0x164(%rsp), %eax leaq 0xa0(%rsp), %rdx movq %rdx, 0x2a0(%rsp) movq %rcx, 0x298(%rsp) movl %eax, 0x294(%rsp) movq 0x298(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0x293(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x294(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xa0(%rsp), %r10 movq %r10, 0x2f8(%rsp) movl %r9d, 0x2f4(%rsp) movl %r8d, 0x2f0(%rsp) movl %edi, 0x2ec(%rsp) movq %rsi, 0x2e0(%rsp) movq %rdx, 0x2d8(%rsp) movl %ecx, 0x2d4(%rsp) movq %rax, 0x2c8(%rsp) movq 0x2f8(%rsp), %rcx movq %rcx, 0x30(%rsp) movq 0x2e0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2d8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2d4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2c8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x2f4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x2f0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2ec(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x350(%rsp) movl $0x10, 0x34c(%rsp) movq 0x350(%rsp), %rax movslq 0x34c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x34c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x30(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x28(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xc8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x175347a movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xe0(%rsp) movb $0x1, 0x293(%rsp) testb $0x1, 0x293(%rsp) jne 0x17535a1 leaq 0xa0(%rsp), %rax movq %rax, 0x2a8(%rsp) movq 0x2a8(%rsp), %rax movq %rax, 0x2b8(%rsp) movq 0x2b8(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x1753549 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2b4(%rsp) # imm = 0xFFFFFFFF movl 0x2b4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2b0(%rsp) cmpl $0x1, 0x2b0(%rsp) jne 0x1753549 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x175351d movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x175351b jmp 0x1753547 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x2c0(%rsp) cmpq $0x0, 0x2c0(%rsp) je 0x1753545 movq 0x2c0(%rsp), %rdi callq 0x5e480 jmp 0x1753547 jmp 0x1753549 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x17535a1 movq %rax, %rdi callq 0x5fc90 jmp 0x17535a3 leaq 0xa0(%rsp), %rax movq %rax, 0x250(%rsp) movq 0x250(%rsp), %rax movq (%rax), %rax movq %rax, 0x18(%rsp) leaq 0xa0(%rsp), %rax movq %rax, 0x1b8(%rsp) movq 0x1b8(%rsp), %rax movq %rax, 0x210(%rsp) movq 0x210(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x175367c movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x20c(%rsp) # imm = 0xFFFFFFFF movl 0x20c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x208(%rsp) cmpl $0x1, 0x208(%rsp) jne 0x175367c movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1753650 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x175364e jmp 0x175367a movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x270(%rsp) cmpq $0x0, 0x270(%rsp) je 0x1753678 movq 0x270(%rsp), %rdi callq 0x5e480 jmp 0x175367a jmp 0x175367c movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x17536d4 movq %rax, %rdi callq 0x5fc90 movq 0x18(%rsp), %rax movq %rax, 0xe8(%rsp) movq 0xe8(%rsp), %rax movq %rax, 0x98(%rsp) movq 0xe8(%rsp), %rax movslq 0x18c(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x90(%rsp) movq 0xe8(%rsp), %rax movl 0x18c(%rsp), %ecx shll %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x88(%rsp) movq 0x108(%rsp), %rax movq %rax, 0x80(%rsp) movq 0x108(%rsp), %rax addq $0xc, %rax movq %rax, 0x78(%rsp) movq 0x108(%rsp), %rax addq $0x18, %rax movq %rax, 0x70(%rsp) movl $0x0, 0x6c(%rsp) movl 0x6c(%rsp), %eax cmpl 0x184(%rsp), %eax jge 0x17539c6 movl 0x188(%rsp), %eax movl %eax, 0x68(%rsp) cmpl $0x0, 0x68(%rsp) jle 0x1753953 vmovss 0x114(%rsp), %xmm0 vmovss %xmm0, 0x64(%rsp) movq 0x98(%rsp), %rax vmovss (%rax), %xmm0 movq 0x80(%rsp), %rax vmulss (%rax), %xmm0, %xmm0 vaddss 0x64(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x64(%rsp) movq 0x98(%rsp), %rax vmovss 0x4(%rax), %xmm0 movq 0x80(%rsp), %rax vmulss 0x4(%rax), %xmm0, %xmm0 vaddss 0x64(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x64(%rsp) movq 0x98(%rsp), %rax vmovss 0x8(%rax), %xmm0 movq 0x80(%rsp), %rax vmulss 0x8(%rax), %xmm0, %xmm0 vaddss 0x64(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x64(%rsp) movq 0x90(%rsp), %rax vmovss (%rax), %xmm0 movq 0x78(%rsp), %rax vmulss (%rax), %xmm0, %xmm0 vaddss 0x64(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x64(%rsp) movq 0x90(%rsp), %rax vmovss 0x4(%rax), %xmm0 movq 0x78(%rsp), %rax vmulss 0x4(%rax), %xmm0, %xmm0 vaddss 0x64(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x64(%rsp) movq 0x90(%rsp), %rax vmovss 0x8(%rax), %xmm0 movq 0x78(%rsp), %rax vmulss 0x8(%rax), %xmm0, %xmm0 vaddss 0x64(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x64(%rsp) movq 0x88(%rsp), %rax vmovss (%rax), %xmm0 movq 0x70(%rsp), %rax vmulss (%rax), %xmm0, %xmm0 vaddss 0x64(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x64(%rsp) movq 0x88(%rsp), %rax vmovss 0x4(%rax), %xmm0 movq 0x70(%rsp), %rax vmulss 0x4(%rax), %xmm0, %xmm0 vaddss 0x64(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x64(%rsp) movq 0x88(%rsp), %rax vmovss 0x8(%rax), %xmm0 movq 0x70(%rsp), %rax vmulss 0x8(%rax), %xmm0, %xmm0 vaddss 0x64(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x64(%rsp) vmovss 0x64(%rsp), %xmm0 movq 0x100(%rsp), %rax vmovss %xmm0, (%rax) movq 0x98(%rsp), %rax addq $0x8, %rax movq %rax, 0x98(%rsp) movq 0x90(%rsp), %rax addq $0x8, %rax movq %rax, 0x90(%rsp) movq 0x88(%rsp), %rax addq $0x8, %rax movq %rax, 0x88(%rsp) movq 0x100(%rsp), %rax addq $0x4, %rax movq %rax, 0x100(%rsp) movl 0x68(%rsp), %eax addl $-0x1, %eax movl %eax, 0x68(%rsp) jmp 0x1753789 movl 0x17c(%rsp), %ecx movq 0x98(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x98(%rsp) movl 0x17c(%rsp), %ecx movq 0x90(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x90(%rsp) movl 0x17c(%rsp), %ecx movq 0x88(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x88(%rsp) movl 0x6c(%rsp), %eax addl $0x1, %eax movl %eax, 0x6c(%rsp) jmp 0x175376d leaq 0x118(%rsp), %rax movq %rax, 0x1c8(%rsp) movq 0x1c8(%rsp), %rax movq %rax, 0x1f0(%rsp) movq 0x1f0(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x1753a7f movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1ec(%rsp) # imm = 0xFFFFFFFF movl 0x1ec(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1e8(%rsp) cmpl $0x1, 0x1e8(%rsp) jne 0x1753a7f movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1753a53 movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1753a51 jmp 0x1753a7d movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x280(%rsp) cmpq $0x0, 0x280(%rsp) je 0x1753a7b movq 0x280(%rsp), %rdi callq 0x5e480 jmp 0x1753a7d jmp 0x1753a7f movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1753ad7 movq %rax, %rdi callq 0x5fc90 jmp 0x1753ad9 movl 0x164(%rsp), %eax addl $0x1, %eax movl %eax, 0x164(%rsp) jmp 0x1752f48 addq $0x358, %rsp # imm = 0x358 retq nopw (%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/convolutiondepthwise_3x3.h
ncnn::convdw3x3s1_int8_requant_sse(ncnn::Mat const&, ncnn::Mat&, ncnn::Mat const&, ncnn::Mat const&, std::vector<float, std::allocator<float>>, ncnn::Option const&)
static void convdw3x3s1_int8_requant_sse(const Mat& bottom_blob, Mat& top_blob, const Mat& _kernel, const Mat& _bias, std::vector<float> scales_requant, const Option& opt) { int w = bottom_blob.w; //int h = bottom_blob.h; //int inch = bottom_blob.c; int outw = top_blob.w; int outh = top_blob.h; int outch = top_blob.c; const signed char* kernel = _kernel; const float* bias = _bias; #pragma omp parallel for num_threads(opt.num_threads) for (int p = 0; p < outch; p++) { Mat out = top_blob.channel(p); signed char* outptr = out; const float bias0 = bias ? bias[p] : 0.f; const float scale_requant_in = scales_requant[2 * p]; const float scale_requant_out = scales_requant[2 * p + 1]; const signed char* kernel0 = (const signed char*)kernel + p * 9; const signed char* img0 = bottom_blob.channel(p); const signed char* r0 = img0; const signed char* r1 = img0 + w; const signed char* r2 = img0 + w * 2; int i = 0; for (; i < outh; i++) { int remain = outw; for (; remain > 0; remain--) { int sum = 0; sum += (int)r0[0] * (int)kernel0[0]; sum += (int)r0[1] * (int)kernel0[1]; sum += (int)r0[2] * (int)kernel0[2]; sum += (int)r1[0] * (int)kernel0[3]; sum += (int)r1[1] * (int)kernel0[4]; sum += (int)r1[2] * (int)kernel0[5]; sum += (int)r2[0] * (int)kernel0[6]; sum += (int)r2[1] * (int)kernel0[7]; sum += (int)r2[2] * (int)kernel0[8]; *outptr = float2int8(((float)sum * scale_requant_in + bias0) * scale_requant_out); r0++; r1++; r2++; outptr++; } r0 += 2; r1 += 2; r2 += 2; } } }
subq $0x358, %rsp # imm = 0x358 movq %r8, 0x60(%rsp) movq %rdi, 0x1a0(%rsp) movq %rsi, 0x198(%rsp) movq %rdx, 0x190(%rsp) movq %rcx, 0x188(%rsp) movq %r8, 0x180(%rsp) movq %r9, 0x178(%rsp) movq 0x1a0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x174(%rsp) movq 0x198(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x170(%rsp) movq 0x198(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x16c(%rsp) movq 0x198(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x168(%rsp) movq 0x190(%rsp), %rax movq %rax, 0x250(%rsp) movq 0x250(%rsp), %rax movq (%rax), %rax movq %rax, 0x160(%rsp) movq 0x188(%rsp), %rax movq %rax, 0x238(%rsp) movq 0x238(%rsp), %rax movq (%rax), %rax movq %rax, 0x158(%rsp) movl $0x0, 0x154(%rsp) movl 0x154(%rsp), %eax cmpl 0x168(%rsp), %eax jge 0x1755cc0 movq 0x198(%rsp), %rcx movl 0x154(%rsp), %eax leaq 0x108(%rsp), %rdx movq %rdx, 0x218(%rsp) movq %rcx, 0x210(%rsp) movl %eax, 0x20c(%rsp) movq 0x210(%rsp), %rax movq %rax, 0x58(%rsp) movb $0x0, 0x20b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x20c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x108(%rsp), %r10 movq %r10, 0x330(%rsp) movl %r9d, 0x32c(%rsp) movl %r8d, 0x328(%rsp) movl %edi, 0x324(%rsp) movq %rsi, 0x318(%rsp) movq %rdx, 0x310(%rsp) movl %ecx, 0x30c(%rsp) movq %rax, 0x300(%rsp) movq 0x330(%rsp), %rcx movq %rcx, 0x50(%rsp) movq 0x318(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x310(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x30c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x300(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x32c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x328(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x324(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x340(%rsp) movl $0x10, 0x33c(%rsp) movq 0x340(%rsp), %rax movslq 0x33c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x33c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x58(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x130(%rsp) cmpl $0x4, 0x28(%rax) jne 0x17552a9 movq 0x58(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x148(%rsp) movb $0x1, 0x20b(%rsp) testb $0x1, 0x20b(%rsp) jne 0x17553d0 leaq 0x108(%rsp), %rax movq %rax, 0x220(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x1755378 movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x22c(%rsp) # imm = 0xFFFFFFFF movl 0x22c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x228(%rsp) cmpl $0x1, 0x228(%rsp) jne 0x1755378 movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x175534c movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x175534a jmp 0x1755376 movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x268(%rsp) cmpq $0x0, 0x268(%rsp) je 0x1755374 movq 0x268(%rsp), %rdi callq 0x5e480 jmp 0x1755376 jmp 0x1755378 movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x17553d0 movq %rax, %rdi callq 0x5fc90 leaq 0x108(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq (%rax), %rax movq %rax, 0x40(%rsp) movq 0x40(%rsp), %rax movq %rax, 0x100(%rsp) cmpq $0x0, 0x158(%rsp) je 0x1755425 movq 0x158(%rsp), %rax movslq 0x154(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x3c(%rsp) jmp 0x1755431 vxorps %xmm0, %xmm0, %xmm0 vmovss %xmm0, 0x3c(%rsp) jmp 0x1755431 movq 0x60(%rsp), %rdi vmovss 0x3c(%rsp), %xmm0 vmovss %xmm0, 0xf0(%rsp) movl 0x154(%rsp), %eax shll %eax movslq %eax, %rsi callq 0xf86fc0 movq 0x60(%rsp), %rdi vmovss (%rax), %xmm0 vmovss %xmm0, 0xec(%rsp) movl 0x154(%rsp), %eax shll %eax addl $0x1, %eax movslq %eax, %rsi callq 0xf86fc0 vmovss (%rax), %xmm0 vmovss %xmm0, 0xe8(%rsp) movq 0x160(%rsp), %rax imull $0x9, 0x154(%rsp), %ecx movslq %ecx, %rcx addq %rcx, %rax movq %rax, 0xe0(%rsp) movq 0x1a0(%rsp), %rcx movl 0x154(%rsp), %eax leaq 0x90(%rsp), %rdx movq %rdx, 0x2a0(%rsp) movq %rcx, 0x298(%rsp) movl %eax, 0x294(%rsp) movq 0x298(%rsp), %rax movq %rax, 0x28(%rsp) movb $0x0, 0x293(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x294(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x90(%rsp), %r10 movq %r10, 0x2f8(%rsp) movl %r9d, 0x2f4(%rsp) movl %r8d, 0x2f0(%rsp) movl %edi, 0x2ec(%rsp) movq %rsi, 0x2e0(%rsp) movq %rdx, 0x2d8(%rsp) movl %ecx, 0x2d4(%rsp) movq %rax, 0x2c8(%rsp) movq 0x2f8(%rsp), %rcx movq %rcx, 0x30(%rsp) movq 0x2e0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2d8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2d4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2c8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x2f4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x2f0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x2ec(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x350(%rsp) movl $0x10, 0x34c(%rsp) movq 0x350(%rsp), %rax movslq 0x34c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x34c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x30(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x28(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xb8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x175565c movq 0x28(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xd0(%rsp) movb $0x1, 0x293(%rsp) testb $0x1, 0x293(%rsp) jne 0x1755783 leaq 0x90(%rsp), %rax movq %rax, 0x2a8(%rsp) movq 0x2a8(%rsp), %rax movq %rax, 0x2b8(%rsp) movq 0x2b8(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x175572b movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2b4(%rsp) # imm = 0xFFFFFFFF movl 0x2b4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2b0(%rsp) cmpl $0x1, 0x2b0(%rsp) jne 0x175572b movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x17556ff movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x17556fd jmp 0x1755729 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x2c0(%rsp) cmpq $0x0, 0x2c0(%rsp) je 0x1755727 movq 0x2c0(%rsp), %rdi callq 0x5e480 jmp 0x1755729 jmp 0x175572b movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1755783 movq %rax, %rdi callq 0x5fc90 jmp 0x1755785 leaq 0x90(%rsp), %rax movq %rax, 0x248(%rsp) movq 0x248(%rsp), %rax movq (%rax), %rax movq %rax, 0x18(%rsp) leaq 0x90(%rsp), %rax movq %rax, 0x1a8(%rsp) movq 0x1a8(%rsp), %rax movq %rax, 0x200(%rsp) movq 0x200(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x175585e movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1fc(%rsp) # imm = 0xFFFFFFFF movl 0x1fc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1f8(%rsp) cmpl $0x1, 0x1f8(%rsp) jne 0x175585e movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1755832 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1755830 jmp 0x175585c movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x270(%rsp) cmpq $0x0, 0x270(%rsp) je 0x175585a movq 0x270(%rsp), %rdi callq 0x5e480 jmp 0x175585c jmp 0x175585e movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x17558b6 movq %rax, %rdi callq 0x5fc90 movq 0x18(%rsp), %rax movq %rax, 0xd8(%rsp) movq 0xd8(%rsp), %rax movq %rax, 0x88(%rsp) movq 0xd8(%rsp), %rax movslq 0x174(%rsp), %rcx addq %rcx, %rax movq %rax, 0x80(%rsp) movq 0xd8(%rsp), %rax movl 0x174(%rsp), %ecx shll %ecx movslq %ecx, %rcx addq %rcx, %rax movq %rax, 0x78(%rsp) movl $0x0, 0x74(%rsp) movl 0x74(%rsp), %eax cmpl 0x16c(%rsp), %eax jge 0x1755ba1 movl 0x170(%rsp), %eax movl %eax, 0x70(%rsp) cmpl $0x0, 0x70(%rsp) jle 0x1755b5b movl $0x0, 0x6c(%rsp) movq 0x88(%rsp), %rax movsbl (%rax), %eax movq 0xe0(%rsp), %rcx movsbl (%rcx), %ecx imull %ecx, %eax addl 0x6c(%rsp), %eax movl %eax, 0x6c(%rsp) movq 0x88(%rsp), %rax movsbl 0x1(%rax), %eax movq 0xe0(%rsp), %rcx movsbl 0x1(%rcx), %ecx imull %ecx, %eax addl 0x6c(%rsp), %eax movl %eax, 0x6c(%rsp) movq 0x88(%rsp), %rax movsbl 0x2(%rax), %eax movq 0xe0(%rsp), %rcx movsbl 0x2(%rcx), %ecx imull %ecx, %eax addl 0x6c(%rsp), %eax movl %eax, 0x6c(%rsp) movq 0x80(%rsp), %rax movsbl (%rax), %eax movq 0xe0(%rsp), %rcx movsbl 0x3(%rcx), %ecx imull %ecx, %eax addl 0x6c(%rsp), %eax movl %eax, 0x6c(%rsp) movq 0x80(%rsp), %rax movsbl 0x1(%rax), %eax movq 0xe0(%rsp), %rcx movsbl 0x4(%rcx), %ecx imull %ecx, %eax addl 0x6c(%rsp), %eax movl %eax, 0x6c(%rsp) movq 0x80(%rsp), %rax movsbl 0x2(%rax), %eax movq 0xe0(%rsp), %rcx movsbl 0x5(%rcx), %ecx imull %ecx, %eax addl 0x6c(%rsp), %eax movl %eax, 0x6c(%rsp) movq 0x78(%rsp), %rax movsbl (%rax), %eax movq 0xe0(%rsp), %rcx movsbl 0x6(%rcx), %ecx imull %ecx, %eax addl 0x6c(%rsp), %eax movl %eax, 0x6c(%rsp) movq 0x78(%rsp), %rax movsbl 0x1(%rax), %eax movq 0xe0(%rsp), %rcx movsbl 0x7(%rcx), %ecx imull %ecx, %eax addl 0x6c(%rsp), %eax movl %eax, 0x6c(%rsp) movq 0x78(%rsp), %rax movsbl 0x2(%rax), %eax movq 0xe0(%rsp), %rcx movsbl 0x8(%rcx), %ecx imull %ecx, %eax addl 0x6c(%rsp), %eax movl %eax, 0x6c(%rsp) vcvtsi2ssl 0x6c(%rsp), %xmm0, %xmm0 vmulss 0xec(%rsp), %xmm0, %xmm0 vaddss 0xf0(%rsp), %xmm0, %xmm0 vmulss 0xe8(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x260(%rsp) vmovss 0x260(%rsp), %xmm0 callq 0x137550 vcvttss2si %xmm0, %eax movl %eax, 0x25c(%rsp) cmpl $0x7f, 0x25c(%rsp) jle 0x1755ac6 movb $0x7f, 0x267(%rsp) jmp 0x1755ae8 cmpl $-0x7f, 0x25c(%rsp) jge 0x1755ada movb $-0x7f, 0x267(%rsp) jmp 0x1755ae8 movl 0x25c(%rsp), %eax movb %al, 0x267(%rsp) movb 0x267(%rsp), %al movb %al, 0xf(%rsp) movb 0xf(%rsp), %cl movq 0x100(%rsp), %rax movb %cl, (%rax) movq 0x88(%rsp), %rax addq $0x1, %rax movq %rax, 0x88(%rsp) movq 0x80(%rsp), %rax addq $0x1, %rax movq %rax, 0x80(%rsp) movq 0x78(%rsp), %rax addq $0x1, %rax movq %rax, 0x78(%rsp) movq 0x100(%rsp), %rax addq $0x1, %rax movq %rax, 0x100(%rsp) movl 0x70(%rsp), %eax addl $-0x1, %eax movl %eax, 0x70(%rsp) jmp 0x175592e movq 0x88(%rsp), %rax addq $0x2, %rax movq %rax, 0x88(%rsp) movq 0x80(%rsp), %rax addq $0x2, %rax movq %rax, 0x80(%rsp) movq 0x78(%rsp), %rax addq $0x2, %rax movq %rax, 0x78(%rsp) movl 0x74(%rsp), %eax addl $0x1, %eax movl %eax, 0x74(%rsp) jmp 0x1755912 leaq 0x108(%rsp), %rax movq %rax, 0x1b8(%rsp) movq 0x1b8(%rsp), %rax movq %rax, 0x1e0(%rsp) movq 0x1e0(%rsp), %rax movq %rax, (%rsp) cmpq $0x0, 0x8(%rax) je 0x1755c51 movq (%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1dc(%rsp) # imm = 0xFFFFFFFF movl 0x1dc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1d8(%rsp) cmpl $0x1, 0x1d8(%rsp) jne 0x1755c51 movq (%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1755c26 movq (%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1755c24 jmp 0x1755c4f movq (%rsp), %rax movq (%rax), %rax movq %rax, 0x280(%rsp) cmpq $0x0, 0x280(%rsp) je 0x1755c4d movq 0x280(%rsp), %rdi callq 0x5e480 jmp 0x1755c4f jmp 0x1755c51 movq (%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1755ca8 movq %rax, %rdi callq 0x5fc90 jmp 0x1755caa movl 0x154(%rsp), %eax addl $0x1, %eax movl %eax, 0x154(%rsp) jmp 0x17550e5 addq $0x358, %rsp # imm = 0x358 retq nopl (%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/convolutiondepthwise_3x3_int8.h
ncnn::ConvolutionDepthWise_x86_avx::forward(std::vector<ncnn::Mat, std::allocator<ncnn::Mat>> const&, std::vector<ncnn::Mat, std::allocator<ncnn::Mat>>&, ncnn::Option const&) const
int ConvolutionDepthWise_x86_avx::forward(const std::vector<Mat>& bottom_blobs, std::vector<Mat>& top_blobs, const Option& opt) const { const Mat& bottom_blob = bottom_blobs[0]; const Mat& _weight_data = bottom_blobs[1]; Mat& top_blob = top_blobs[0]; const int _kernel_w = _weight_data.w; const int _kernel_h = _weight_data.h; const int _num_output = _weight_data.c * _weight_data.elempack; Mat weight_data_flattened; flatten(_weight_data, weight_data_flattened, opt); if (weight_data_flattened.empty()) return -100; // weight_data_flattened as pack1 weight_data_flattened.w *= weight_data_flattened.elempack; weight_data_flattened.elemsize /= weight_data_flattened.elempack; weight_data_flattened.elempack = 1; Mat bias_data_flattened; if (bias_term) { const Mat& _bias_data = bottom_blobs[2]; flatten(_bias_data, bias_data_flattened, opt); if (bias_data_flattened.empty()) return -100; // bias_data_flattened as pack1 bias_data_flattened.w *= bias_data_flattened.elempack; bias_data_flattened.elemsize /= bias_data_flattened.elempack; bias_data_flattened.elempack = 1; } ncnn::Layer* op = ncnn::create_layer(ncnn::LayerType::ConvolutionDepthWise); ncnn::ParamDict pd; pd.set(0, _num_output); pd.set(1, _kernel_w); pd.set(11, _kernel_h); pd.set(2, dilation_w); pd.set(12, dilation_h); pd.set(3, stride_w); pd.set(13, stride_h); pd.set(4, pad_left); pd.set(15, pad_right); pd.set(14, pad_top); pd.set(16, pad_bottom); pd.set(18, pad_value); pd.set(5, bias_term); pd.set(6, weight_data_flattened.w); pd.set(7, group); pd.set(8, int8_scale_term); pd.set(9, activation_type); pd.set(10, activation_params); op->load_param(pd); ncnn::Mat weights[2]; weights[0] = weight_data_flattened; weights[1] = bias_data_flattened; op->load_model(ncnn::ModelBinFromMatArray(weights)); op->create_pipeline(opt); op->forward(bottom_blob, top_blob, opt); op->destroy_pipeline(opt); delete op; return 0; }
subq $0x448, %rsp # imm = 0x448 movq %rdi, 0x2b0(%rsp) movq %rsi, 0x2a8(%rsp) movq %rdx, 0x2a0(%rsp) movq %rcx, 0x298(%rsp) movq 0x2b0(%rsp), %rax movq %rax, 0xf8(%rsp) movq 0x2a8(%rsp), %rdi xorl %eax, %eax movl %eax, %esi movq %rsi, 0x100(%rsp) callq 0xb5820 movq %rax, 0x290(%rsp) movq 0x2a8(%rsp), %rdi movl $0x1, %esi callq 0xb5820 movq 0x100(%rsp), %rsi movq %rax, 0x288(%rsp) movq 0x2a0(%rsp), %rdi callq 0x98840 movq %rax, 0x280(%rsp) movq 0x288(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x27c(%rsp) movq 0x288(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x278(%rsp) movq 0x288(%rsp), %rax movl 0x18(%rax), %ecx movl 0x38(%rax), %eax imull %ecx, %eax movl %eax, 0x274(%rsp) leaq 0x228(%rsp), %rsi movq %rsi, 0x2c0(%rsp) movq 0x2c0(%rsp), %rax movq %rax, 0x108(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0x288(%rsp), %rdi movq 0x298(%rsp), %rdx callq 0x69690 jmp 0x179f363 leaq 0x228(%rsp), %rax movq %rax, 0x3e8(%rsp) movq 0x3e8(%rsp), %rcx movq %rcx, 0xe8(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0xf7(%rsp) je 0x179f3c4 movq 0xe8(%rsp), %rax movq %rax, 0x438(%rsp) movq 0x438(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0xf7(%rsp) movb 0xf7(%rsp), %al movb %al, 0xe7(%rsp) movb 0xe7(%rsp), %al testb $0x1, %al jne 0x179f3df jmp 0x179f413 movl $0xffffff9c, 0x2bc(%rsp) # imm = 0xFFFFFF9C movl $0x1, 0x218(%rsp) jmp 0x17a045a movq %rax, %rcx movl %edx, %eax movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) jmp 0x17a057a movl 0x240(%rsp), %eax imull 0x254(%rsp), %eax movl %eax, 0x254(%rsp) movslq 0x240(%rsp), %rcx movq 0x238(%rsp), %rax xorl %edx, %edx divq %rcx movq %rax, 0x238(%rsp) movl $0x1, 0x240(%rsp) leaq 0x1d0(%rsp), %rax movq %rax, 0x2c8(%rsp) movq 0x2c8(%rsp), %rax movq %rax, 0xd8(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x0, 0x100(%rax,%rcx) je 0x179f606 movq 0x2a8(%rsp), %rdi movl $0x2, %esi callq 0xb5820 movq %rax, 0x1c8(%rsp) movq 0x1c8(%rsp), %rdi movq 0x298(%rsp), %rdx leaq 0x1d0(%rsp), %rsi callq 0x69690 jmp 0x179f518 leaq 0x1d0(%rsp), %rax movq %rax, 0x3e0(%rsp) movq 0x3e0(%rsp), %rcx movq %rcx, 0xc8(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0xd7(%rsp) je 0x179f579 movq 0xc8(%rsp), %rax movq %rax, 0x440(%rsp) movq 0x440(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0xd7(%rsp) movb 0xd7(%rsp), %al movb %al, 0xc7(%rsp) movb 0xc7(%rsp), %al testb $0x1, %al jne 0x179f594 jmp 0x179f5c8 movl $0xffffff9c, 0x2bc(%rsp) # imm = 0xFFFFFF9C movl $0x1, 0x218(%rsp) jmp 0x17a022e movq %rax, %rcx movl %edx, %eax movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) jmp 0x17a0344 movl 0x1e8(%rsp), %eax imull 0x1fc(%rsp), %eax movl %eax, 0x1fc(%rsp) movslq 0x1e8(%rsp), %rcx movq 0x1e0(%rsp), %rax xorl %edx, %edx divq %rcx movq %rax, 0x1e0(%rsp) movl $0x1, 0x1e8(%rsp) movl $0x2a, %edi callq 0xae160 movq %rax, 0xb8(%rsp) jmp 0x179f61a movq 0xb8(%rsp), %rax movq %rax, 0x1c0(%rsp) leaq 0x1b0(%rsp), %rdi callq 0xa0840 jmp 0x179f639 movl 0x274(%rsp), %edx leaq 0x1b0(%rsp), %rdi xorl %esi, %esi callq 0xa16d0 jmp 0x179f651 movl 0x27c(%rsp), %edx leaq 0x1b0(%rsp), %rdi movl $0x1, %esi callq 0xa16d0 jmp 0x179f66c movl 0x278(%rsp), %edx leaq 0x1b0(%rsp), %rdi movl $0xb, %esi callq 0xa16d0 jmp 0x179f687 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xdc(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x2, %esi callq 0xa16d0 jmp 0x179f6b1 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xe0(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0xc, %esi callq 0xa16d0 jmp 0x179f6db movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xe4(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x3, %esi callq 0xa16d0 jmp 0x179f705 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xe8(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0xd, %esi callq 0xa16d0 jmp 0x179f72f movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xec(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x4, %esi callq 0xa16d0 jmp 0x179f759 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xf0(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0xf, %esi callq 0xa16d0 jmp 0x179f783 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xf4(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0xe, %esi callq 0xa16d0 jmp 0x179f7ad movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xf8(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x10, %esi callq 0xa16d0 jmp 0x179f7d7 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx vmovss 0xfc(%rax,%rcx), %xmm0 leaq 0x1b0(%rsp), %rdi movl $0x12, %esi callq 0xa1710 jmp 0x179f803 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0x100(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x5, %esi callq 0xa16d0 jmp 0x179f82d movl 0x254(%rsp), %edx leaq 0x1b0(%rsp), %rdi movl $0x6, %esi callq 0xa16d0 jmp 0x179f848 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0x108(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x7, %esi callq 0xa16d0 jmp 0x179f872 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0x10c(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x8, %esi callq 0xa16d0 jmp 0x179f89c movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0x110(%rax,%rcx), %edx leaq 0x1b0(%rsp), %rdi movl $0x9, %esi callq 0xa16d0 jmp 0x179f8c6 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx leaq 0x118(%rax,%rcx), %rdx leaq 0x1b0(%rsp), %rdi movl $0xa, %esi callq 0xa1760 jmp 0x179f8f1 movq 0x1c0(%rsp), %rdi movq (%rdi), %rax movq 0x10(%rax), %rax leaq 0x1b0(%rsp), %rsi callq *%rax jmp 0x179f90c leaq 0x120(%rsp), %rax movq %rax, %rcx addq $0x90, %rcx movq %rcx, 0xa8(%rsp) movq %rax, 0xb0(%rsp) movq 0xb0(%rsp), %rax movq %rax, 0x98(%rsp) movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rax movq %rax, 0xa0(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0xa8(%rsp), %rcx movq 0x98(%rsp), %rax addq $0x48, %rax cmpq %rcx, %rax movq %rax, 0xb0(%rsp) jne 0x179f92e leaq 0x120(%rsp), %rax movq %rax, 0x340(%rsp) leaq 0x228(%rsp), %rax movq %rax, 0x338(%rsp) movq 0x340(%rsp), %rax movq %rax, 0x90(%rsp) cmpq 0x338(%rsp), %rax jne 0x179fa1b movq 0x90(%rsp), %rax movq %rax, 0x348(%rsp) jmp 0x179fc0e movq 0x338(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x179fa53 movq 0x338(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x334(%rsp) movl 0x334(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x330(%rsp) movq 0x90(%rsp), %rax movq %rax, 0x358(%rsp) movq 0x358(%rsp), %rax movq %rax, 0x88(%rsp) cmpq $0x0, 0x8(%rax) je 0x179fb0b movq 0x88(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x354(%rsp) # imm = 0xFFFFFFFF movl 0x354(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x350(%rsp) cmpl $0x1, 0x350(%rsp) jne 0x179fb0b movq 0x88(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x179fadc movq 0x88(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x179fada jmp 0x179fb09 movq 0x88(%rsp), %rax movq (%rax), %rax movq %rax, 0x430(%rsp) cmpq $0x0, 0x430(%rsp) je 0x179fb07 movq 0x430(%rsp), %rdi callq 0x5e480 jmp 0x179fb09 jmp 0x179fb0b movq 0x88(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x90(%rsp), %rax movq 0x338(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x338(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x338(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x338(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x338(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x338(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x338(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x338(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x338(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x338(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x338(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x348(%rsp) leaq 0x120(%rsp), %rax addq $0x48, %rax movq %rax, 0x320(%rsp) leaq 0x1d0(%rsp), %rax movq %rax, 0x318(%rsp) movq 0x320(%rsp), %rax movq %rax, 0x80(%rsp) cmpq 0x318(%rsp), %rax jne 0x179fc61 movq 0x80(%rsp), %rax movq %rax, 0x328(%rsp) jmp 0x179fe42 movq 0x318(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x179fc99 movq 0x318(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x314(%rsp) movl 0x314(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x310(%rsp) movq 0x80(%rsp), %rax movq %rax, 0x368(%rsp) movq 0x368(%rsp), %rax movq %rax, 0x78(%rsp) cmpq $0x0, 0x8(%rax) je 0x179fd42 movq 0x78(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x364(%rsp) # imm = 0xFFFFFFFF movl 0x364(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x360(%rsp) cmpl $0x1, 0x360(%rsp) jne 0x179fd42 movq 0x78(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x179fd16 movq 0x78(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x179fd14 jmp 0x179fd40 movq 0x78(%rsp), %rax movq (%rax), %rax movq %rax, 0x428(%rsp) cmpq $0x0, 0x428(%rsp) je 0x179fd3e movq 0x428(%rsp), %rdi callq 0x5e480 jmp 0x179fd40 jmp 0x179fd42 movq 0x78(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x80(%rsp), %rax movq 0x318(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x318(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x318(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x318(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x318(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x318(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x318(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x318(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x318(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x318(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x318(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x328(%rsp) movq 0x1c0(%rsp), %rax movq %rax, 0x70(%rsp) leaq 0x110(%rsp), %rdi leaq 0x120(%rsp), %rsi callq 0x89470 jmp 0x179fe66 movq 0x70(%rsp), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax leaq 0x110(%rsp), %rsi callq *%rax jmp 0x179fe7e leaq 0x110(%rsp), %rdi callq 0x89520 movq 0x1c0(%rsp), %rdi movq 0x298(%rsp), %rsi movq (%rdi), %rax movq 0x20(%rax), %rax callq *%rax jmp 0x179fea6 movq 0x1c0(%rsp), %rdi movq 0x290(%rsp), %rsi movq 0x280(%rsp), %rdx movq 0x298(%rsp), %rcx movq (%rdi), %rax movq 0x38(%rax), %rax callq *%rax jmp 0x179fed1 movq 0x1c0(%rsp), %rdi movq 0x298(%rsp), %rsi movq (%rdi), %rax movq 0x28(%rax), %rax callq *%rax jmp 0x179feec movq 0x1c0(%rsp), %rax movq %rax, 0x68(%rsp) cmpq $0x0, %rax je 0x179ff0a movq 0x68(%rsp), %rdi movq (%rdi), %rax callq *0x8(%rax) movl $0x0, 0x2bc(%rsp) movl $0x1, 0x218(%rsp) leaq 0x120(%rsp), %rax movq %rax, 0x58(%rsp) addq $0x90, %rax movq %rax, 0x60(%rsp) jmp 0x179ff92 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) jmp 0x17a021c movq %rax, %rcx movl %edx, %eax movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) jmp 0x17a00d3 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x220(%rsp) movl %eax, 0x21c(%rsp) leaq 0x110(%rsp), %rdi callq 0x89520 jmp 0x17a00d3 movq 0x60(%rsp), %rax addq $-0x48, %rax movq %rax, 0x48(%rsp) movq %rax, 0x2e0(%rsp) movq 0x2e0(%rsp), %rax movq %rax, 0x3c8(%rsp) movq 0x3c8(%rsp), %rax movq %rax, 0x50(%rsp) cmpq $0x0, 0x8(%rax) je 0x17a0051 movq 0x50(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x3c4(%rsp) # imm = 0xFFFFFFFF movl 0x3c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x3c0(%rsp) cmpl $0x1, 0x3c0(%rsp) jne 0x17a0051 movq 0x50(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x17a0025 movq 0x50(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x17a0023 jmp 0x17a004f movq 0x50(%rsp), %rax movq (%rax), %rax movq %rax, 0x3f8(%rsp) cmpq $0x0, 0x3f8(%rsp) je 0x17a004d movq 0x3f8(%rsp), %rdi callq 0x5e480 jmp 0x17a004f jmp 0x17a0051 movq 0x50(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x17a00a9 movq %rax, %rdi callq 0x5fc90 movq 0x48(%rsp), %rax movq 0x58(%rsp), %rcx cmpq %rcx, %rax movq %rax, 0x60(%rsp) jne 0x179ff92 leaq 0x1b0(%rsp), %rdi callq 0xa0e10 jmp 0x17a022e leaq 0x120(%rsp), %rax movq %rax, 0x38(%rsp) addq $0x90, %rax movq %rax, 0x40(%rsp) movq 0x40(%rsp), %rax addq $-0x48, %rax movq %rax, 0x28(%rsp) movq %rax, 0x2e8(%rsp) movq 0x2e8(%rsp), %rax movq %rax, 0x3b8(%rsp) movq 0x3b8(%rsp), %rax movq %rax, 0x30(%rsp) cmpq $0x0, 0x8(%rax) je 0x17a01aa movq 0x30(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x3b4(%rsp) # imm = 0xFFFFFFFF movl 0x3b4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x3b0(%rsp) cmpl $0x1, 0x3b0(%rsp) jne 0x17a01aa movq 0x30(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x17a017e movq 0x30(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x17a017c jmp 0x17a01a8 movq 0x30(%rsp), %rax movq (%rax), %rax movq %rax, 0x400(%rsp) cmpq $0x0, 0x400(%rsp) je 0x17a01a6 movq 0x400(%rsp), %rdi callq 0x5e480 jmp 0x17a01a8 jmp 0x17a01aa movq 0x30(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x17a0202 movq %rax, %rdi callq 0x5fc90 movq 0x28(%rsp), %rax movq 0x38(%rsp), %rcx cmpq %rcx, %rax movq %rax, 0x40(%rsp) jne 0x17a00eb jmp 0x17a021c leaq 0x1b0(%rsp), %rdi callq 0xa0e10 jmp 0x17a0344 leaq 0x1d0(%rsp), %rax movq %rax, 0x2f0(%rsp) movq 0x2f0(%rsp), %rax movq %rax, 0x3a8(%rsp) movq 0x3a8(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x17a02e7 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x3a4(%rsp) # imm = 0xFFFFFFFF movl 0x3a4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x3a0(%rsp) cmpl $0x1, 0x3a0(%rsp) jne 0x17a02e7 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x17a02bb movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x17a02b9 jmp 0x17a02e5 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x408(%rsp) cmpq $0x0, 0x408(%rsp) je 0x17a02e3 movq 0x408(%rsp), %rdi callq 0x5e480 jmp 0x17a02e5 jmp 0x17a02e7 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x17a033f movq %rax, %rdi callq 0x5fc90 jmp 0x17a045a leaq 0x1d0(%rsp), %rax movq %rax, 0x2f8(%rsp) movq 0x2f8(%rsp), %rax movq %rax, 0x398(%rsp) movq 0x398(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x17a03fd movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x394(%rsp) # imm = 0xFFFFFFFF movl 0x394(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x390(%rsp) cmpl $0x1, 0x390(%rsp) jne 0x17a03fd movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x17a03d1 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x17a03cf jmp 0x17a03fb movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x410(%rsp) cmpq $0x0, 0x410(%rsp) je 0x17a03f9 movq 0x410(%rsp), %rdi callq 0x5e480 jmp 0x17a03fb jmp 0x17a03fd movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x17a0455 movq %rax, %rdi callq 0x5fc90 jmp 0x17a057a leaq 0x228(%rsp), %rax movq %rax, 0x300(%rsp) movq 0x300(%rsp), %rax movq %rax, 0x388(%rsp) movq 0x388(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x17a0513 movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x384(%rsp) # imm = 0xFFFFFFFF movl 0x384(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x380(%rsp) cmpl $0x1, 0x380(%rsp) jne 0x17a0513 movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x17a04e7 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x17a04e5 jmp 0x17a0511 movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x418(%rsp) cmpq $0x0, 0x418(%rsp) je 0x17a050f movq 0x418(%rsp), %rdi callq 0x5e480 jmp 0x17a0511 jmp 0x17a0513 movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x17a056b movq %rax, %rdi callq 0x5fc90 movl 0x2bc(%rsp), %eax addq $0x448, %rsp # imm = 0x448 retq leaq 0x228(%rsp), %rax movq %rax, 0x308(%rsp) movq 0x308(%rsp), %rax movq %rax, 0x378(%rsp) movq 0x378(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x17a0633 movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x374(%rsp) # imm = 0xFFFFFFFF movl 0x374(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x370(%rsp) cmpl $0x1, 0x370(%rsp) jne 0x17a0633 movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x17a0607 movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x17a0605 jmp 0x17a0631 movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x420(%rsp) cmpq $0x0, 0x420(%rsp) je 0x17a062f movq 0x420(%rsp), %rdi callq 0x5e480 jmp 0x17a0631 jmp 0x17a0633 movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x17a068b movq %rax, %rdi callq 0x5fc90 jmp 0x17a068d movq 0x220(%rsp), %rdi callq 0x5e3b0 nopw (%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/convolutiondepthwise_x86_avx.cpp
ncnn::resize_bicubic_image_pack4(ncnn::Mat const&, ncnn::Mat&, float*, int*, float*, int*)
static void resize_bicubic_image_pack4(const Mat& src, Mat& dst, float* alpha, int* xofs, float* beta, int* yofs) { int w = dst.w; int h = dst.h; // loop body Mat rowsbuf0(w, (size_t)4 * 4u, 4); Mat rowsbuf1(w, (size_t)4 * 4u, 4); Mat rowsbuf2(w, (size_t)4 * 4u, 4); Mat rowsbuf3(w, (size_t)4 * 4u, 4); float* rows0 = rowsbuf0; float* rows1 = rowsbuf1; float* rows2 = rowsbuf2; float* rows3 = rowsbuf3; int prev_sy1 = -3; for (int dy = 0; dy < h; dy++) { int sy = yofs[dy]; if (sy == prev_sy1) { // reuse all rows } else if (sy == prev_sy1 + 1) { // hresize one row float* rows0_old = rows0; rows0 = rows1; rows1 = rows2; rows2 = rows3; rows3 = rows0_old; const float* S3 = src.row(sy + 2); const float* alphap = alpha; float* rows3p = rows3; for (int dx = 0; dx < w; dx++) { int sx = xofs[dx] * 4; const float* S3p = S3 + sx; __m128 _a0 = _mm_set1_ps(alphap[0]); __m128 _a1 = _mm_set1_ps(alphap[1]); __m128 _a2 = _mm_set1_ps(alphap[2]); __m128 _a3 = _mm_set1_ps(alphap[3]); __m128 _S30 = _mm_load_ps(S3p - 4); __m128 _S31 = _mm_load_ps(S3p + 0); __m128 _S32 = _mm_load_ps(S3p + 4); __m128 _S33 = _mm_load_ps(S3p + 8); __m128 _rows3 = _mm_mul_ps(_S30, _a0); _rows3 = _mm_comp_fmadd_ps(_S31, _a1, _rows3); _rows3 = _mm_comp_fmadd_ps(_S32, _a2, _rows3); _rows3 = _mm_comp_fmadd_ps(_S33, _a3, _rows3); _mm_store_ps(rows3p + dx * 4, _rows3); alphap += 4; } } else if (sy == prev_sy1 + 2) { // hresize two rows float* rows0_old = rows0; float* rows1_old = rows1; rows0 = rows2; rows1 = rows3; rows2 = rows0_old; rows3 = rows1_old; const float* S2 = src.row(sy + 1); const float* S3 = src.row(sy + 2); const float* alphap = alpha; float* rows2p = rows2; float* rows3p = rows3; for (int dx = 0; dx < w; dx++) { int sx = xofs[dx] * 4; const float* S2p = S2 + sx; const float* S3p = S3 + sx; __m128 _a0 = _mm_set1_ps(alphap[0]); __m128 _a1 = _mm_set1_ps(alphap[1]); __m128 _a2 = _mm_set1_ps(alphap[2]); __m128 _a3 = _mm_set1_ps(alphap[3]); __m128 _S20 = _mm_load_ps(S2p - 4); __m128 _S21 = _mm_load_ps(S2p + 0); __m128 _S22 = _mm_load_ps(S2p + 4); __m128 _S23 = _mm_load_ps(S2p + 8); __m128 _S30 = _mm_load_ps(S3p - 4); __m128 _S31 = _mm_load_ps(S3p + 0); __m128 _S32 = _mm_load_ps(S3p + 4); __m128 _S33 = _mm_load_ps(S3p + 8); __m128 _rows2 = _mm_mul_ps(_S20, _a0); __m128 _rows3 = _mm_mul_ps(_S30, _a0); _rows2 = _mm_comp_fmadd_ps(_S21, _a1, _rows2); _rows3 = _mm_comp_fmadd_ps(_S31, _a1, _rows3); _rows2 = _mm_comp_fmadd_ps(_S22, _a2, _rows2); _rows3 = _mm_comp_fmadd_ps(_S32, _a2, _rows3); _rows2 = _mm_comp_fmadd_ps(_S23, _a3, _rows2); _rows3 = _mm_comp_fmadd_ps(_S33, _a3, _rows3); _mm_store_ps(rows2p + dx * 4, _rows2); _mm_store_ps(rows3p + dx * 4, _rows3); alphap += 4; } } else if (sy == prev_sy1 + 3) { // hresize three rows float* rows0_old = rows0; float* rows1_old = rows1; float* rows2_old = rows2; rows0 = rows3; rows1 = rows0_old; rows2 = rows1_old; rows3 = rows2_old; const float* S1 = src.row(sy); const float* S2 = src.row(sy + 1); const float* S3 = src.row(sy + 2); const float* alphap = alpha; float* rows1p = rows1; float* rows2p = rows2; float* rows3p = rows3; for (int dx = 0; dx < w; dx++) { int sx = xofs[dx] * 4; const float* S1p = S1 + sx; const float* S2p = S2 + sx; const float* S3p = S3 + sx; __m128 _a0 = _mm_set1_ps(alphap[0]); __m128 _a1 = _mm_set1_ps(alphap[1]); __m128 _a2 = _mm_set1_ps(alphap[2]); __m128 _a3 = _mm_set1_ps(alphap[3]); __m128 _S10 = _mm_load_ps(S1p - 4); __m128 _S11 = _mm_load_ps(S1p + 0); __m128 _S12 = _mm_load_ps(S1p + 4); __m128 _S13 = _mm_load_ps(S1p + 8); __m128 _S20 = _mm_load_ps(S2p - 4); __m128 _S21 = _mm_load_ps(S2p + 0); __m128 _S22 = _mm_load_ps(S2p + 4); __m128 _S23 = _mm_load_ps(S2p + 8); __m128 _S30 = _mm_load_ps(S3p - 4); __m128 _S31 = _mm_load_ps(S3p + 0); __m128 _S32 = _mm_load_ps(S3p + 4); __m128 _S33 = _mm_load_ps(S3p + 8); __m128 _rows1 = _mm_mul_ps(_S10, _a0); __m128 _rows2 = _mm_mul_ps(_S20, _a0); __m128 _rows3 = _mm_mul_ps(_S30, _a0); _rows1 = _mm_comp_fmadd_ps(_S11, _a1, _rows1); _rows2 = _mm_comp_fmadd_ps(_S21, _a1, _rows2); _rows3 = _mm_comp_fmadd_ps(_S31, _a1, _rows3); _rows1 = _mm_comp_fmadd_ps(_S12, _a2, _rows1); _rows2 = _mm_comp_fmadd_ps(_S22, _a2, _rows2); _rows3 = _mm_comp_fmadd_ps(_S32, _a2, _rows3); _rows1 = _mm_comp_fmadd_ps(_S13, _a3, _rows1); _rows2 = _mm_comp_fmadd_ps(_S23, _a3, _rows2); _rows3 = _mm_comp_fmadd_ps(_S33, _a3, _rows3); _mm_store_ps(rows1p + dx * 4, _rows1); _mm_store_ps(rows2p + dx * 4, _rows2); _mm_store_ps(rows3p + dx * 4, _rows3); alphap += 4; } } else { // hresize four rows const float* S0 = src.row(sy - 1); const float* S1 = src.row(sy); const float* S2 = src.row(sy + 1); const float* S3 = src.row(sy + 2); const float* alphap = alpha; float* rows0p = rows0; float* rows1p = rows1; float* rows2p = rows2; float* rows3p = rows3; for (int dx = 0; dx < w; dx++) { int sx = xofs[dx] * 4; const float* S0p = S0 + sx; const float* S1p = S1 + sx; const float* S2p = S2 + sx; const float* S3p = S3 + sx; __m128 _a0 = _mm_set1_ps(alphap[0]); __m128 _a1 = _mm_set1_ps(alphap[1]); __m128 _a2 = _mm_set1_ps(alphap[2]); __m128 _a3 = _mm_set1_ps(alphap[3]); __m128 _S00 = _mm_load_ps(S0p - 4); __m128 _S01 = _mm_load_ps(S0p + 0); __m128 _S02 = _mm_load_ps(S0p + 4); __m128 _S03 = _mm_load_ps(S0p + 8); __m128 _S10 = _mm_load_ps(S1p - 4); __m128 _S11 = _mm_load_ps(S1p + 0); __m128 _S12 = _mm_load_ps(S1p + 4); __m128 _S13 = _mm_load_ps(S1p + 8); __m128 _S20 = _mm_load_ps(S2p - 4); __m128 _S21 = _mm_load_ps(S2p + 0); __m128 _S22 = _mm_load_ps(S2p + 4); __m128 _S23 = _mm_load_ps(S2p + 8); __m128 _S30 = _mm_load_ps(S3p - 4); __m128 _S31 = _mm_load_ps(S3p + 0); __m128 _S32 = _mm_load_ps(S3p + 4); __m128 _S33 = _mm_load_ps(S3p + 8); __m128 _rows0 = _mm_mul_ps(_S00, _a0); __m128 _rows1 = _mm_mul_ps(_S10, _a0); __m128 _rows2 = _mm_mul_ps(_S20, _a0); __m128 _rows3 = _mm_mul_ps(_S30, _a0); _rows0 = _mm_comp_fmadd_ps(_S01, _a1, _rows0); _rows1 = _mm_comp_fmadd_ps(_S11, _a1, _rows1); _rows2 = _mm_comp_fmadd_ps(_S21, _a1, _rows2); _rows3 = _mm_comp_fmadd_ps(_S31, _a1, _rows3); _rows0 = _mm_comp_fmadd_ps(_S02, _a2, _rows0); _rows1 = _mm_comp_fmadd_ps(_S12, _a2, _rows1); _rows2 = _mm_comp_fmadd_ps(_S22, _a2, _rows2); _rows3 = _mm_comp_fmadd_ps(_S32, _a2, _rows3); _rows0 = _mm_comp_fmadd_ps(_S03, _a3, _rows0); _rows1 = _mm_comp_fmadd_ps(_S13, _a3, _rows1); _rows2 = _mm_comp_fmadd_ps(_S23, _a3, _rows2); _rows3 = _mm_comp_fmadd_ps(_S33, _a3, _rows3); _mm_store_ps(rows0p + dx * 4, _rows0); _mm_store_ps(rows1p + dx * 4, _rows1); _mm_store_ps(rows2p + dx * 4, _rows2); _mm_store_ps(rows3p + dx * 4, _rows3); alphap += 4; } } prev_sy1 = sy; // vresize __m128 _b0 = _mm_set1_ps(beta[0]); __m128 _b1 = _mm_set1_ps(beta[1]); __m128 _b2 = _mm_set1_ps(beta[2]); __m128 _b3 = _mm_set1_ps(beta[3]); float* rows0p = rows0; float* rows1p = rows1; float* rows2p = rows2; float* rows3p = rows3; float* Dp = dst.row(dy); for (int dx = 0; dx < w; dx++) { __m128 _rows0 = _mm_load_ps(rows0p); __m128 _rows1 = _mm_load_ps(rows1p); __m128 _rows2 = _mm_load_ps(rows2p); __m128 _rows3 = _mm_load_ps(rows3p); __m128 _D = _mm_mul_ps(_rows0, _b0); _D = _mm_comp_fmadd_ps(_rows1, _b1, _D); _D = _mm_comp_fmadd_ps(_rows2, _b2, _D); _D = _mm_comp_fmadd_ps(_rows3, _b3, _D); _mm_store_ps(Dp, _D); Dp += 4; rows0p += 4; rows1p += 4; rows2p += 4; rows3p += 4; } beta += 4; } }
subq $0x1fc8, %rsp # imm = 0x1FC8 movq %rdi, 0xa48(%rsp) movq %rsi, 0xa40(%rsp) movq %rdx, 0xa38(%rsp) movq %rcx, 0xa30(%rsp) movq %r8, 0xa28(%rsp) movq %r9, 0xa20(%rsp) movq 0xa40(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0xa1c(%rsp) movq 0xa40(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0xa18(%rsp) movl 0xa1c(%rsp), %eax leaq 0x9d0(%rsp), %rcx movq %rcx, 0x1f28(%rsp) movl %eax, 0x1f24(%rsp) movq $0x10, 0x1f18(%rsp) movl $0x4, 0x1f14(%rsp) movq $0x0, 0x1f08(%rsp) movq 0x1f28(%rsp), %rdi movq %rdi, 0x260(%rsp) movq $0x0, (%rdi) movq $0x0, 0x8(%rdi) movq $0x0, 0x10(%rdi) movl $0x0, 0x18(%rdi) movq $0x0, 0x20(%rdi) movl $0x0, 0x28(%rdi) movl $0x0, 0x2c(%rdi) movl $0x0, 0x30(%rdi) movl $0x0, 0x34(%rdi) movl $0x0, 0x38(%rdi) movq $0x0, 0x40(%rdi) movl 0x1f24(%rsp), %esi movq 0x1f18(%rsp), %rdx movl 0x1f14(%rsp), %ecx movq 0x1f08(%rsp), %r8 callq 0x61b20 movl 0xa1c(%rsp), %eax leaq 0x988(%rsp), %rcx movq %rcx, 0x1f50(%rsp) movl %eax, 0x1f4c(%rsp) movq $0x10, 0x1f40(%rsp) movl $0x4, 0x1f3c(%rsp) movq $0x0, 0x1f30(%rsp) movq 0x1f50(%rsp), %rdi movq %rdi, 0x268(%rsp) movq $0x0, (%rdi) movq $0x0, 0x8(%rdi) movq $0x0, 0x10(%rdi) movl $0x0, 0x18(%rdi) movq $0x0, 0x20(%rdi) movl $0x0, 0x28(%rdi) movl $0x0, 0x2c(%rdi) movl $0x0, 0x30(%rdi) movl $0x0, 0x34(%rdi) movl $0x0, 0x38(%rdi) movq $0x0, 0x40(%rdi) movl 0x1f4c(%rsp), %esi movq 0x1f40(%rsp), %rdx movl 0x1f3c(%rsp), %ecx movq 0x1f30(%rsp), %r8 callq 0x61b20 jmp 0x184eb47 jmp 0x184eb49 movl 0xa1c(%rsp), %eax leaq 0x930(%rsp), %rcx movq %rcx, 0x1f78(%rsp) movl %eax, 0x1f74(%rsp) movq $0x10, 0x1f68(%rsp) movl $0x4, 0x1f64(%rsp) movq $0x0, 0x1f58(%rsp) movq 0x1f78(%rsp), %rdi movq %rdi, 0x258(%rsp) movq $0x0, (%rdi) movq $0x0, 0x8(%rdi) movq $0x0, 0x10(%rdi) movl $0x0, 0x18(%rdi) movq $0x0, 0x20(%rdi) movl $0x0, 0x28(%rdi) movl $0x0, 0x2c(%rdi) movl $0x0, 0x30(%rdi) movl $0x0, 0x34(%rdi) movl $0x0, 0x38(%rdi) movq $0x0, 0x40(%rdi) movl 0x1f74(%rsp), %esi movq 0x1f68(%rsp), %rdx movl 0x1f64(%rsp), %ecx movq 0x1f58(%rsp), %r8 callq 0x61b20 jmp 0x184ec10 jmp 0x184ec12 movl 0xa1c(%rsp), %eax leaq 0x8e8(%rsp), %rcx movq %rcx, 0x1fa0(%rsp) movl %eax, 0x1f9c(%rsp) movq $0x10, 0x1f90(%rsp) movl $0x4, 0x1f8c(%rsp) movq $0x0, 0x1f80(%rsp) movq 0x1fa0(%rsp), %rdi movq %rdi, 0x250(%rsp) movq $0x0, (%rdi) movq $0x0, 0x8(%rdi) movq $0x0, 0x10(%rdi) movl $0x0, 0x18(%rdi) movq $0x0, 0x20(%rdi) movl $0x0, 0x28(%rdi) movl $0x0, 0x2c(%rdi) movl $0x0, 0x30(%rdi) movl $0x0, 0x34(%rdi) movl $0x0, 0x38(%rdi) movq $0x0, 0x40(%rdi) movl 0x1f9c(%rsp), %esi movq 0x1f90(%rsp), %rdx movl 0x1f8c(%rsp), %ecx movq 0x1f80(%rsp), %r8 callq 0x61b20 jmp 0x184ecd9 jmp 0x184ecdb leaq 0x9d0(%rsp), %rax movq %rax, 0x1fc0(%rsp) movq 0x1fc0(%rsp), %rax movq (%rax), %rax movq %rax, 0x8e0(%rsp) leaq 0x988(%rsp), %rax movq %rax, 0x1fb8(%rsp) movq 0x1fb8(%rsp), %rax movq (%rax), %rax movq %rax, 0x8d8(%rsp) leaq 0x930(%rsp), %rax movq %rax, 0x1fb0(%rsp) movq 0x1fb0(%rsp), %rax movq (%rax), %rax movq %rax, 0x8d0(%rsp) leaq 0x8e8(%rsp), %rax movq %rax, 0x1fa8(%rsp) movq 0x1fa8(%rsp), %rax movq (%rax), %rax movq %rax, 0x8c8(%rsp) movl $0xfffffffd, 0x8c4(%rsp) # imm = 0xFFFFFFFD movl $0x0, 0x8c0(%rsp) movl 0x8c0(%rsp), %eax cmpl 0xa18(%rsp), %eax jge 0x1851e1d movq 0xa20(%rsp), %rax movslq 0x8c0(%rsp), %rcx movl (%rax,%rcx,4), %eax movl %eax, 0x8bc(%rsp) movl 0x8bc(%rsp), %eax cmpl 0x8c4(%rsp), %eax jne 0x184ee0b jmp 0x18518eb movq %rax, %rcx movl %edx, %eax movq %rcx, 0x980(%rsp) movl %eax, 0x97c(%rsp) jmp 0x185248f movq %rax, %rcx movl %edx, %eax movq %rcx, 0x980(%rsp) movl %eax, 0x97c(%rsp) jmp 0x185237c movq %rax, %rcx movl %edx, %eax movq %rcx, 0x980(%rsp) movl %eax, 0x97c(%rsp) jmp 0x1852269 movl 0x8bc(%rsp), %eax movl 0x8c4(%rsp), %ecx addl $0x1, %ecx cmpl %ecx, %eax jne 0x184f371 movq 0x8e0(%rsp), %rax movq %rax, 0x8b0(%rsp) movq 0x8d8(%rsp), %rax movq %rax, 0x8e0(%rsp) movq 0x8d0(%rsp), %rax movq %rax, 0x8d8(%rsp) movq 0x8c8(%rsp), %rax movq %rax, 0x8d0(%rsp) movq 0x8b0(%rsp), %rax movq %rax, 0x8c8(%rsp) movq 0xa48(%rsp), %rcx movl 0x8bc(%rsp), %eax addl $0x2, %eax movq %rcx, 0xc88(%rsp) movl %eax, 0xc84(%rsp) movq 0xc88(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xc84(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x8a8(%rsp) movq 0xa38(%rsp), %rax movq %rax, 0x8a0(%rsp) movq 0x8c8(%rsp), %rax movq %rax, 0x898(%rsp) movl $0x0, 0x894(%rsp) movl 0x894(%rsp), %eax cmpl 0xa1c(%rsp), %eax jge 0x184f36c movq 0xa30(%rsp), %rax movslq 0x894(%rsp), %rcx movl (%rax,%rcx,4), %eax shll $0x2, %eax movl %eax, 0x890(%rsp) movq 0x8a8(%rsp), %rax movslq 0x890(%rsp), %rcx leaq (%rax,%rcx,4), %rax movq %rax, 0x888(%rsp) movq 0x8a0(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x107c(%rsp) vbroadcastss 0x107c(%rsp), %xmm0 vmovaps %xmm0, 0x1060(%rsp) vmovaps 0x1060(%rsp), %xmm0 vmovaps %xmm0, 0x870(%rsp) movq 0x8a0(%rsp), %rax vmovss 0x4(%rax), %xmm0 vmovss %xmm0, 0x105c(%rsp) vbroadcastss 0x105c(%rsp), %xmm0 vmovaps %xmm0, 0x1040(%rsp) vmovaps 0x1040(%rsp), %xmm0 vmovaps %xmm0, 0x860(%rsp) movq 0x8a0(%rsp), %rax vmovss 0x8(%rax), %xmm0 vmovss %xmm0, 0x103c(%rsp) vbroadcastss 0x103c(%rsp), %xmm0 vmovaps %xmm0, 0x1020(%rsp) vmovaps 0x1020(%rsp), %xmm0 vmovaps %xmm0, 0x850(%rsp) movq 0x8a0(%rsp), %rax vmovss 0xc(%rax), %xmm0 vmovss %xmm0, 0x101c(%rsp) vbroadcastss 0x101c(%rsp), %xmm0 vmovaps %xmm0, 0x1000(%rsp) vmovaps 0x1000(%rsp), %xmm0 vmovaps %xmm0, 0x840(%rsp) movq 0x888(%rsp), %rax addq $-0x10, %rax movq %rax, 0xbe8(%rsp) movq 0xbe8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x830(%rsp) movq 0x888(%rsp), %rax movq %rax, 0xbe0(%rsp) movq 0xbe0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x820(%rsp) movq 0x888(%rsp), %rax addq $0x10, %rax movq %rax, 0xbd8(%rsp) movq 0xbd8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x810(%rsp) movq 0x888(%rsp), %rax addq $0x20, %rax movq %rax, 0xbd0(%rsp) movq 0xbd0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x800(%rsp) vmovaps 0x830(%rsp), %xmm1 vmovaps 0x870(%rsp), %xmm0 vmovaps %xmm1, 0x11d0(%rsp) vmovaps %xmm0, 0x11c0(%rsp) vmovaps 0x11d0(%rsp), %xmm0 vmulps 0x11c0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x7f0(%rsp) vmovaps 0x820(%rsp), %xmm2 vmovaps 0x860(%rsp), %xmm1 vmovaps 0x7f0(%rsp), %xmm0 vmovaps %xmm2, 0x1800(%rsp) vmovaps %xmm1, 0x17f0(%rsp) vmovaps %xmm0, 0x17e0(%rsp) vmovaps 0x1800(%rsp), %xmm2 vmovaps 0x17f0(%rsp), %xmm1 vmovaps 0x17e0(%rsp), %xmm0 vmovaps %xmm2, 0x18f0(%rsp) vmovaps %xmm1, 0x18e0(%rsp) vmovaps %xmm0, 0x18d0(%rsp) vmovaps 0x18f0(%rsp), %xmm1 vmovaps 0x18e0(%rsp), %xmm0 vmovaps 0x18d0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x240(%rsp) vmovaps 0x240(%rsp), %xmm0 vmovaps %xmm0, 0x7f0(%rsp) vmovaps 0x810(%rsp), %xmm2 vmovaps 0x850(%rsp), %xmm1 vmovaps 0x7f0(%rsp), %xmm0 vmovaps %xmm2, 0x17d0(%rsp) vmovaps %xmm1, 0x17c0(%rsp) vmovaps %xmm0, 0x17b0(%rsp) vmovaps 0x17d0(%rsp), %xmm2 vmovaps 0x17c0(%rsp), %xmm1 vmovaps 0x17b0(%rsp), %xmm0 vmovaps %xmm2, 0x1920(%rsp) vmovaps %xmm1, 0x1910(%rsp) vmovaps %xmm0, 0x1900(%rsp) vmovaps 0x1920(%rsp), %xmm1 vmovaps 0x1910(%rsp), %xmm0 vmovaps 0x1900(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x230(%rsp) vmovaps 0x230(%rsp), %xmm0 vmovaps %xmm0, 0x7f0(%rsp) vmovaps 0x800(%rsp), %xmm2 vmovaps 0x840(%rsp), %xmm1 vmovaps 0x7f0(%rsp), %xmm0 vmovaps %xmm2, 0x17a0(%rsp) vmovaps %xmm1, 0x1790(%rsp) vmovaps %xmm0, 0x1780(%rsp) vmovaps 0x17a0(%rsp), %xmm2 vmovaps 0x1790(%rsp), %xmm1 vmovaps 0x1780(%rsp), %xmm0 vmovaps %xmm2, 0x1950(%rsp) vmovaps %xmm1, 0x1940(%rsp) vmovaps %xmm0, 0x1930(%rsp) vmovaps 0x1950(%rsp), %xmm1 vmovaps 0x1940(%rsp), %xmm0 vmovaps 0x1930(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x220(%rsp) vmovaps 0x220(%rsp), %xmm0 vmovaps %xmm0, 0x7f0(%rsp) movq 0x898(%rsp), %rax movl 0x894(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax vmovaps 0x7f0(%rsp), %xmm0 movq %rax, 0xdf8(%rsp) vmovaps %xmm0, 0xde0(%rsp) vmovaps 0xde0(%rsp), %xmm0 movq 0xdf8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x8a0(%rsp), %rax addq $0x10, %rax movq %rax, 0x8a0(%rsp) movl 0x894(%rsp), %eax addl $0x1, %eax movl %eax, 0x894(%rsp) jmp 0x184eeeb jmp 0x18518e9 movl 0x8bc(%rsp), %eax movl 0x8c4(%rsp), %ecx addl $0x2, %ecx cmpl %ecx, %eax jne 0x184fc7e movq 0x8e0(%rsp), %rax movq %rax, 0x7e8(%rsp) movq 0x8d8(%rsp), %rax movq %rax, 0x7e0(%rsp) movq 0x8d0(%rsp), %rax movq %rax, 0x8e0(%rsp) movq 0x8c8(%rsp), %rax movq %rax, 0x8d8(%rsp) movq 0x7e8(%rsp), %rax movq %rax, 0x8d0(%rsp) movq 0x7e0(%rsp), %rax movq %rax, 0x8c8(%rsp) movq 0xa48(%rsp), %rcx movl 0x8bc(%rsp), %eax addl $0x1, %eax movq %rcx, 0xc78(%rsp) movl %eax, 0xc74(%rsp) movq 0xc78(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xc74(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x7d8(%rsp) movq 0xa48(%rsp), %rcx movl 0x8bc(%rsp), %eax addl $0x2, %eax movq %rcx, 0xc68(%rsp) movl %eax, 0xc64(%rsp) movq 0xc68(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xc64(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x7d0(%rsp) movq 0xa38(%rsp), %rax movq %rax, 0x7c8(%rsp) movq 0x8d0(%rsp), %rax movq %rax, 0x7c0(%rsp) movq 0x8c8(%rsp), %rax movq %rax, 0x7b8(%rsp) movl $0x0, 0x7b4(%rsp) movl 0x7b4(%rsp), %eax cmpl 0xa1c(%rsp), %eax jge 0x184fc79 movq 0xa30(%rsp), %rax movslq 0x7b4(%rsp), %rcx movl (%rax,%rcx,4), %eax shll $0x2, %eax movl %eax, 0x7b0(%rsp) movq 0x7d8(%rsp), %rax movslq 0x7b0(%rsp), %rcx leaq (%rax,%rcx,4), %rax movq %rax, 0x7a8(%rsp) movq 0x7d0(%rsp), %rax movslq 0x7b0(%rsp), %rcx leaq (%rax,%rcx,4), %rax movq %rax, 0x7a0(%rsp) movq 0x7c8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xffc(%rsp) vbroadcastss 0xffc(%rsp), %xmm0 vmovaps %xmm0, 0xfe0(%rsp) vmovaps 0xfe0(%rsp), %xmm0 vmovaps %xmm0, 0x790(%rsp) movq 0x7c8(%rsp), %rax vmovss 0x4(%rax), %xmm0 vmovss %xmm0, 0xfdc(%rsp) vbroadcastss 0xfdc(%rsp), %xmm0 vmovaps %xmm0, 0xfc0(%rsp) vmovaps 0xfc0(%rsp), %xmm0 vmovaps %xmm0, 0x780(%rsp) movq 0x7c8(%rsp), %rax vmovss 0x8(%rax), %xmm0 vmovss %xmm0, 0xfbc(%rsp) vbroadcastss 0xfbc(%rsp), %xmm0 vmovaps %xmm0, 0xfa0(%rsp) vmovaps 0xfa0(%rsp), %xmm0 vmovaps %xmm0, 0x770(%rsp) movq 0x7c8(%rsp), %rax vmovss 0xc(%rax), %xmm0 vmovss %xmm0, 0xf9c(%rsp) vbroadcastss 0xf9c(%rsp), %xmm0 vmovaps %xmm0, 0xf80(%rsp) vmovaps 0xf80(%rsp), %xmm0 vmovaps %xmm0, 0x760(%rsp) movq 0x7a8(%rsp), %rax addq $-0x10, %rax movq %rax, 0xbc8(%rsp) movq 0xbc8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x750(%rsp) movq 0x7a8(%rsp), %rax movq %rax, 0xbc0(%rsp) movq 0xbc0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x740(%rsp) movq 0x7a8(%rsp), %rax addq $0x10, %rax movq %rax, 0xbb8(%rsp) movq 0xbb8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x730(%rsp) movq 0x7a8(%rsp), %rax addq $0x20, %rax movq %rax, 0xbb0(%rsp) movq 0xbb0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x720(%rsp) movq 0x7a0(%rsp), %rax addq $-0x10, %rax movq %rax, 0xba8(%rsp) movq 0xba8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x710(%rsp) movq 0x7a0(%rsp), %rax movq %rax, 0xba0(%rsp) movq 0xba0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x700(%rsp) movq 0x7a0(%rsp), %rax addq $0x10, %rax movq %rax, 0xb98(%rsp) movq 0xb98(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x6f0(%rsp) movq 0x7a0(%rsp), %rax addq $0x20, %rax movq %rax, 0xb90(%rsp) movq 0xb90(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x6e0(%rsp) vmovaps 0x750(%rsp), %xmm1 vmovaps 0x790(%rsp), %xmm0 vmovaps %xmm1, 0x11b0(%rsp) vmovaps %xmm0, 0x11a0(%rsp) vmovaps 0x11b0(%rsp), %xmm0 vmulps 0x11a0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x6d0(%rsp) vmovaps 0x710(%rsp), %xmm1 vmovaps 0x790(%rsp), %xmm0 vmovaps %xmm1, 0x1190(%rsp) vmovaps %xmm0, 0x1180(%rsp) vmovaps 0x1190(%rsp), %xmm0 vmulps 0x1180(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x6c0(%rsp) vmovaps 0x740(%rsp), %xmm2 vmovaps 0x780(%rsp), %xmm1 vmovaps 0x6d0(%rsp), %xmm0 vmovaps %xmm2, 0x1770(%rsp) vmovaps %xmm1, 0x1760(%rsp) vmovaps %xmm0, 0x1750(%rsp) vmovaps 0x1770(%rsp), %xmm2 vmovaps 0x1760(%rsp), %xmm1 vmovaps 0x1750(%rsp), %xmm0 vmovaps %xmm2, 0x1980(%rsp) vmovaps %xmm1, 0x1970(%rsp) vmovaps %xmm0, 0x1960(%rsp) vmovaps 0x1980(%rsp), %xmm1 vmovaps 0x1970(%rsp), %xmm0 vmovaps 0x1960(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x210(%rsp) vmovaps 0x210(%rsp), %xmm0 vmovaps %xmm0, 0x6d0(%rsp) vmovaps 0x700(%rsp), %xmm2 vmovaps 0x780(%rsp), %xmm1 vmovaps 0x6c0(%rsp), %xmm0 vmovaps %xmm2, 0x1740(%rsp) vmovaps %xmm1, 0x1730(%rsp) vmovaps %xmm0, 0x1720(%rsp) vmovaps 0x1740(%rsp), %xmm2 vmovaps 0x1730(%rsp), %xmm1 vmovaps 0x1720(%rsp), %xmm0 vmovaps %xmm2, 0x19b0(%rsp) vmovaps %xmm1, 0x19a0(%rsp) vmovaps %xmm0, 0x1990(%rsp) vmovaps 0x19b0(%rsp), %xmm1 vmovaps 0x19a0(%rsp), %xmm0 vmovaps 0x1990(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x200(%rsp) vmovaps 0x200(%rsp), %xmm0 vmovaps %xmm0, 0x6c0(%rsp) vmovaps 0x730(%rsp), %xmm2 vmovaps 0x770(%rsp), %xmm1 vmovaps 0x6d0(%rsp), %xmm0 vmovaps %xmm2, 0x1710(%rsp) vmovaps %xmm1, 0x1700(%rsp) vmovaps %xmm0, 0x16f0(%rsp) vmovaps 0x1710(%rsp), %xmm2 vmovaps 0x1700(%rsp), %xmm1 vmovaps 0x16f0(%rsp), %xmm0 vmovaps %xmm2, 0x19e0(%rsp) vmovaps %xmm1, 0x19d0(%rsp) vmovaps %xmm0, 0x19c0(%rsp) vmovaps 0x19e0(%rsp), %xmm1 vmovaps 0x19d0(%rsp), %xmm0 vmovaps 0x19c0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1f0(%rsp) vmovaps 0x1f0(%rsp), %xmm0 vmovaps %xmm0, 0x6d0(%rsp) vmovaps 0x6f0(%rsp), %xmm2 vmovaps 0x770(%rsp), %xmm1 vmovaps 0x6c0(%rsp), %xmm0 vmovaps %xmm2, 0x16e0(%rsp) vmovaps %xmm1, 0x16d0(%rsp) vmovaps %xmm0, 0x16c0(%rsp) vmovaps 0x16e0(%rsp), %xmm2 vmovaps 0x16d0(%rsp), %xmm1 vmovaps 0x16c0(%rsp), %xmm0 vmovaps %xmm2, 0x1a10(%rsp) vmovaps %xmm1, 0x1a00(%rsp) vmovaps %xmm0, 0x19f0(%rsp) vmovaps 0x1a10(%rsp), %xmm1 vmovaps 0x1a00(%rsp), %xmm0 vmovaps 0x19f0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1e0(%rsp) vmovaps 0x1e0(%rsp), %xmm0 vmovaps %xmm0, 0x6c0(%rsp) vmovaps 0x720(%rsp), %xmm2 vmovaps 0x760(%rsp), %xmm1 vmovaps 0x6d0(%rsp), %xmm0 vmovaps %xmm2, 0x16b0(%rsp) vmovaps %xmm1, 0x16a0(%rsp) vmovaps %xmm0, 0x1690(%rsp) vmovaps 0x16b0(%rsp), %xmm2 vmovaps 0x16a0(%rsp), %xmm1 vmovaps 0x1690(%rsp), %xmm0 vmovaps %xmm2, 0x1a40(%rsp) vmovaps %xmm1, 0x1a30(%rsp) vmovaps %xmm0, 0x1a20(%rsp) vmovaps 0x1a40(%rsp), %xmm1 vmovaps 0x1a30(%rsp), %xmm0 vmovaps 0x1a20(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1d0(%rsp) vmovaps 0x1d0(%rsp), %xmm0 vmovaps %xmm0, 0x6d0(%rsp) vmovaps 0x6e0(%rsp), %xmm2 vmovaps 0x760(%rsp), %xmm1 vmovaps 0x6c0(%rsp), %xmm0 vmovaps %xmm2, 0x1680(%rsp) vmovaps %xmm1, 0x1670(%rsp) vmovaps %xmm0, 0x1660(%rsp) vmovaps 0x1680(%rsp), %xmm2 vmovaps 0x1670(%rsp), %xmm1 vmovaps 0x1660(%rsp), %xmm0 vmovaps %xmm2, 0x1a70(%rsp) vmovaps %xmm1, 0x1a60(%rsp) vmovaps %xmm0, 0x1a50(%rsp) vmovaps 0x1a70(%rsp), %xmm1 vmovaps 0x1a60(%rsp), %xmm0 vmovaps 0x1a50(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1c0(%rsp) vmovaps 0x1c0(%rsp), %xmm0 vmovaps %xmm0, 0x6c0(%rsp) movq 0x7c0(%rsp), %rax movl 0x7b4(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax vmovaps 0x6d0(%rsp), %xmm0 movq %rax, 0xdd8(%rsp) vmovaps %xmm0, 0xdc0(%rsp) vmovaps 0xdc0(%rsp), %xmm0 movq 0xdd8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x7b8(%rsp), %rax movl 0x7b4(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax vmovaps 0x6c0(%rsp), %xmm0 movq %rax, 0xdb8(%rsp) vmovaps %xmm0, 0xda0(%rsp) vmovaps 0xda0(%rsp), %xmm0 movq 0xdb8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x7c8(%rsp), %rax addq $0x10, %rax movq %rax, 0x7c8(%rsp) movl 0x7b4(%rsp), %eax addl $0x1, %eax movl %eax, 0x7b4(%rsp) jmp 0x184f4bd jmp 0x18518e7 movl 0x8bc(%rsp), %eax movl 0x8c4(%rsp), %ecx addl $0x3, %ecx cmpl %ecx, %eax jne 0x185092f movq 0x8e0(%rsp), %rax movq %rax, 0x6b8(%rsp) movq 0x8d8(%rsp), %rax movq %rax, 0x6b0(%rsp) movq 0x8d0(%rsp), %rax movq %rax, 0x6a8(%rsp) movq 0x8c8(%rsp), %rax movq %rax, 0x8e0(%rsp) movq 0x6b8(%rsp), %rax movq %rax, 0x8d8(%rsp) movq 0x6b0(%rsp), %rax movq %rax, 0x8d0(%rsp) movq 0x6a8(%rsp), %rax movq %rax, 0x8c8(%rsp) movq 0xa48(%rsp), %rcx movl 0x8bc(%rsp), %eax movq %rcx, 0xc58(%rsp) movl %eax, 0xc54(%rsp) movq 0xc58(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xc54(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x6a0(%rsp) movq 0xa48(%rsp), %rcx movl 0x8bc(%rsp), %eax addl $0x1, %eax movq %rcx, 0xc48(%rsp) movl %eax, 0xc44(%rsp) movq 0xc48(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xc44(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x698(%rsp) movq 0xa48(%rsp), %rcx movl 0x8bc(%rsp), %eax addl $0x2, %eax movq %rcx, 0xc38(%rsp) movl %eax, 0xc34(%rsp) movq 0xc38(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xc34(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x690(%rsp) movq 0xa38(%rsp), %rax movq %rax, 0x688(%rsp) movq 0x8d8(%rsp), %rax movq %rax, 0x680(%rsp) movq 0x8d0(%rsp), %rax movq %rax, 0x678(%rsp) movq 0x8c8(%rsp), %rax movq %rax, 0x670(%rsp) movl $0x0, 0x66c(%rsp) movl 0x66c(%rsp), %eax cmpl 0xa1c(%rsp), %eax jge 0x185092a movq 0xa30(%rsp), %rax movslq 0x66c(%rsp), %rcx movl (%rax,%rcx,4), %eax shll $0x2, %eax movl %eax, 0x668(%rsp) movq 0x6a0(%rsp), %rax movslq 0x668(%rsp), %rcx leaq (%rax,%rcx,4), %rax movq %rax, 0x660(%rsp) movq 0x698(%rsp), %rax movslq 0x668(%rsp), %rcx leaq (%rax,%rcx,4), %rax movq %rax, 0x658(%rsp) movq 0x690(%rsp), %rax movslq 0x668(%rsp), %rcx leaq (%rax,%rcx,4), %rax movq %rax, 0x650(%rsp) movq 0x688(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xf7c(%rsp) vbroadcastss 0xf7c(%rsp), %xmm0 vmovaps %xmm0, 0xf60(%rsp) vmovaps 0xf60(%rsp), %xmm0 vmovaps %xmm0, 0x640(%rsp) movq 0x688(%rsp), %rax vmovss 0x4(%rax), %xmm0 vmovss %xmm0, 0xf5c(%rsp) vbroadcastss 0xf5c(%rsp), %xmm0 vmovaps %xmm0, 0xf40(%rsp) vmovaps 0xf40(%rsp), %xmm0 vmovaps %xmm0, 0x630(%rsp) movq 0x688(%rsp), %rax vmovss 0x8(%rax), %xmm0 vmovss %xmm0, 0xf3c(%rsp) vbroadcastss 0xf3c(%rsp), %xmm0 vmovaps %xmm0, 0xf20(%rsp) vmovaps 0xf20(%rsp), %xmm0 vmovaps %xmm0, 0x620(%rsp) movq 0x688(%rsp), %rax vmovss 0xc(%rax), %xmm0 vmovss %xmm0, 0xf1c(%rsp) vbroadcastss 0xf1c(%rsp), %xmm0 vmovaps %xmm0, 0xf00(%rsp) vmovaps 0xf00(%rsp), %xmm0 vmovaps %xmm0, 0x610(%rsp) movq 0x660(%rsp), %rax addq $-0x10, %rax movq %rax, 0xb88(%rsp) movq 0xb88(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x600(%rsp) movq 0x660(%rsp), %rax movq %rax, 0xb80(%rsp) movq 0xb80(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x5f0(%rsp) movq 0x660(%rsp), %rax addq $0x10, %rax movq %rax, 0xb78(%rsp) movq 0xb78(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x5e0(%rsp) movq 0x660(%rsp), %rax addq $0x20, %rax movq %rax, 0xb70(%rsp) movq 0xb70(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x5d0(%rsp) movq 0x658(%rsp), %rax addq $-0x10, %rax movq %rax, 0xb68(%rsp) movq 0xb68(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x5c0(%rsp) movq 0x658(%rsp), %rax movq %rax, 0xb60(%rsp) movq 0xb60(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x5b0(%rsp) movq 0x658(%rsp), %rax addq $0x10, %rax movq %rax, 0xb58(%rsp) movq 0xb58(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x5a0(%rsp) movq 0x658(%rsp), %rax addq $0x20, %rax movq %rax, 0xb50(%rsp) movq 0xb50(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x590(%rsp) movq 0x650(%rsp), %rax addq $-0x10, %rax movq %rax, 0xb48(%rsp) movq 0xb48(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x580(%rsp) movq 0x650(%rsp), %rax movq %rax, 0xb40(%rsp) movq 0xb40(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x570(%rsp) movq 0x650(%rsp), %rax addq $0x10, %rax movq %rax, 0xb38(%rsp) movq 0xb38(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x560(%rsp) movq 0x650(%rsp), %rax addq $0x20, %rax movq %rax, 0xb30(%rsp) movq 0xb30(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x550(%rsp) vmovaps 0x600(%rsp), %xmm1 vmovaps 0x640(%rsp), %xmm0 vmovaps %xmm1, 0x1170(%rsp) vmovaps %xmm0, 0x1160(%rsp) vmovaps 0x1170(%rsp), %xmm0 vmulps 0x1160(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x540(%rsp) vmovaps 0x5c0(%rsp), %xmm1 vmovaps 0x640(%rsp), %xmm0 vmovaps %xmm1, 0x1150(%rsp) vmovaps %xmm0, 0x1140(%rsp) vmovaps 0x1150(%rsp), %xmm0 vmulps 0x1140(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x530(%rsp) vmovaps 0x580(%rsp), %xmm1 vmovaps 0x640(%rsp), %xmm0 vmovaps %xmm1, 0x1130(%rsp) vmovaps %xmm0, 0x1120(%rsp) vmovaps 0x1130(%rsp), %xmm0 vmulps 0x1120(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x520(%rsp) vmovaps 0x5f0(%rsp), %xmm2 vmovaps 0x630(%rsp), %xmm1 vmovaps 0x540(%rsp), %xmm0 vmovaps %xmm2, 0x1650(%rsp) vmovaps %xmm1, 0x1640(%rsp) vmovaps %xmm0, 0x1630(%rsp) vmovaps 0x1650(%rsp), %xmm2 vmovaps 0x1640(%rsp), %xmm1 vmovaps 0x1630(%rsp), %xmm0 vmovaps %xmm2, 0x1aa0(%rsp) vmovaps %xmm1, 0x1a90(%rsp) vmovaps %xmm0, 0x1a80(%rsp) vmovaps 0x1aa0(%rsp), %xmm1 vmovaps 0x1a90(%rsp), %xmm0 vmovaps 0x1a80(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1b0(%rsp) vmovaps 0x1b0(%rsp), %xmm0 vmovaps %xmm0, 0x540(%rsp) vmovaps 0x5b0(%rsp), %xmm2 vmovaps 0x630(%rsp), %xmm1 vmovaps 0x530(%rsp), %xmm0 vmovaps %xmm2, 0x1620(%rsp) vmovaps %xmm1, 0x1610(%rsp) vmovaps %xmm0, 0x1600(%rsp) vmovaps 0x1620(%rsp), %xmm2 vmovaps 0x1610(%rsp), %xmm1 vmovaps 0x1600(%rsp), %xmm0 vmovaps %xmm2, 0x1ad0(%rsp) vmovaps %xmm1, 0x1ac0(%rsp) vmovaps %xmm0, 0x1ab0(%rsp) vmovaps 0x1ad0(%rsp), %xmm1 vmovaps 0x1ac0(%rsp), %xmm0 vmovaps 0x1ab0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x1a0(%rsp) vmovaps 0x1a0(%rsp), %xmm0 vmovaps %xmm0, 0x530(%rsp) vmovaps 0x570(%rsp), %xmm2 vmovaps 0x630(%rsp), %xmm1 vmovaps 0x520(%rsp), %xmm0 vmovaps %xmm2, 0x15f0(%rsp) vmovaps %xmm1, 0x15e0(%rsp) vmovaps %xmm0, 0x15d0(%rsp) vmovaps 0x15f0(%rsp), %xmm2 vmovaps 0x15e0(%rsp), %xmm1 vmovaps 0x15d0(%rsp), %xmm0 vmovaps %xmm2, 0x1b00(%rsp) vmovaps %xmm1, 0x1af0(%rsp) vmovaps %xmm0, 0x1ae0(%rsp) vmovaps 0x1b00(%rsp), %xmm1 vmovaps 0x1af0(%rsp), %xmm0 vmovaps 0x1ae0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x190(%rsp) vmovaps 0x190(%rsp), %xmm0 vmovaps %xmm0, 0x520(%rsp) vmovaps 0x5e0(%rsp), %xmm2 vmovaps 0x620(%rsp), %xmm1 vmovaps 0x540(%rsp), %xmm0 vmovaps %xmm2, 0x15c0(%rsp) vmovaps %xmm1, 0x15b0(%rsp) vmovaps %xmm0, 0x15a0(%rsp) vmovaps 0x15c0(%rsp), %xmm2 vmovaps 0x15b0(%rsp), %xmm1 vmovaps 0x15a0(%rsp), %xmm0 vmovaps %xmm2, 0x1b30(%rsp) vmovaps %xmm1, 0x1b20(%rsp) vmovaps %xmm0, 0x1b10(%rsp) vmovaps 0x1b30(%rsp), %xmm1 vmovaps 0x1b20(%rsp), %xmm0 vmovaps 0x1b10(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x180(%rsp) vmovaps 0x180(%rsp), %xmm0 vmovaps %xmm0, 0x540(%rsp) vmovaps 0x5a0(%rsp), %xmm2 vmovaps 0x620(%rsp), %xmm1 vmovaps 0x530(%rsp), %xmm0 vmovaps %xmm2, 0x1590(%rsp) vmovaps %xmm1, 0x1580(%rsp) vmovaps %xmm0, 0x1570(%rsp) vmovaps 0x1590(%rsp), %xmm2 vmovaps 0x1580(%rsp), %xmm1 vmovaps 0x1570(%rsp), %xmm0 vmovaps %xmm2, 0x1b60(%rsp) vmovaps %xmm1, 0x1b50(%rsp) vmovaps %xmm0, 0x1b40(%rsp) vmovaps 0x1b60(%rsp), %xmm1 vmovaps 0x1b50(%rsp), %xmm0 vmovaps 0x1b40(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x170(%rsp) vmovaps 0x170(%rsp), %xmm0 vmovaps %xmm0, 0x530(%rsp) vmovaps 0x560(%rsp), %xmm2 vmovaps 0x620(%rsp), %xmm1 vmovaps 0x520(%rsp), %xmm0 vmovaps %xmm2, 0x1560(%rsp) vmovaps %xmm1, 0x1550(%rsp) vmovaps %xmm0, 0x1540(%rsp) vmovaps 0x1560(%rsp), %xmm2 vmovaps 0x1550(%rsp), %xmm1 vmovaps 0x1540(%rsp), %xmm0 vmovaps %xmm2, 0x1b90(%rsp) vmovaps %xmm1, 0x1b80(%rsp) vmovaps %xmm0, 0x1b70(%rsp) vmovaps 0x1b90(%rsp), %xmm1 vmovaps 0x1b80(%rsp), %xmm0 vmovaps 0x1b70(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x160(%rsp) vmovaps 0x160(%rsp), %xmm0 vmovaps %xmm0, 0x520(%rsp) vmovaps 0x5d0(%rsp), %xmm2 vmovaps 0x610(%rsp), %xmm1 vmovaps 0x540(%rsp), %xmm0 vmovaps %xmm2, 0x1530(%rsp) vmovaps %xmm1, 0x1520(%rsp) vmovaps %xmm0, 0x1510(%rsp) vmovaps 0x1530(%rsp), %xmm2 vmovaps 0x1520(%rsp), %xmm1 vmovaps 0x1510(%rsp), %xmm0 vmovaps %xmm2, 0x1bc0(%rsp) vmovaps %xmm1, 0x1bb0(%rsp) vmovaps %xmm0, 0x1ba0(%rsp) vmovaps 0x1bc0(%rsp), %xmm1 vmovaps 0x1bb0(%rsp), %xmm0 vmovaps 0x1ba0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x150(%rsp) vmovaps 0x150(%rsp), %xmm0 vmovaps %xmm0, 0x540(%rsp) vmovaps 0x590(%rsp), %xmm2 vmovaps 0x610(%rsp), %xmm1 vmovaps 0x530(%rsp), %xmm0 vmovaps %xmm2, 0x1500(%rsp) vmovaps %xmm1, 0x14f0(%rsp) vmovaps %xmm0, 0x14e0(%rsp) vmovaps 0x1500(%rsp), %xmm2 vmovaps 0x14f0(%rsp), %xmm1 vmovaps 0x14e0(%rsp), %xmm0 vmovaps %xmm2, 0x1bf0(%rsp) vmovaps %xmm1, 0x1be0(%rsp) vmovaps %xmm0, 0x1bd0(%rsp) vmovaps 0x1bf0(%rsp), %xmm1 vmovaps 0x1be0(%rsp), %xmm0 vmovaps 0x1bd0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x140(%rsp) vmovaps 0x140(%rsp), %xmm0 vmovaps %xmm0, 0x530(%rsp) vmovaps 0x550(%rsp), %xmm2 vmovaps 0x610(%rsp), %xmm1 vmovaps 0x520(%rsp), %xmm0 vmovaps %xmm2, 0x14d0(%rsp) vmovaps %xmm1, 0x14c0(%rsp) vmovaps %xmm0, 0x14b0(%rsp) vmovaps 0x14d0(%rsp), %xmm2 vmovaps 0x14c0(%rsp), %xmm1 vmovaps 0x14b0(%rsp), %xmm0 vmovaps %xmm2, 0x1c20(%rsp) vmovaps %xmm1, 0x1c10(%rsp) vmovaps %xmm0, 0x1c00(%rsp) vmovaps 0x1c20(%rsp), %xmm1 vmovaps 0x1c10(%rsp), %xmm0 vmovaps 0x1c00(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x130(%rsp) vmovaps 0x130(%rsp), %xmm0 vmovaps %xmm0, 0x520(%rsp) movq 0x680(%rsp), %rax movl 0x66c(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax vmovaps 0x540(%rsp), %xmm0 movq %rax, 0xd98(%rsp) vmovaps %xmm0, 0xd80(%rsp) vmovaps 0xd80(%rsp), %xmm0 movq 0xd98(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x678(%rsp), %rax movl 0x66c(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax vmovaps 0x530(%rsp), %xmm0 movq %rax, 0xd78(%rsp) vmovaps %xmm0, 0xd60(%rsp) vmovaps 0xd60(%rsp), %xmm0 movq 0xd78(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x670(%rsp), %rax movl 0x66c(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax vmovaps 0x520(%rsp), %xmm0 movq %rax, 0xd58(%rsp) vmovaps %xmm0, 0xd40(%rsp) vmovaps 0xd40(%rsp), %xmm0 movq 0xd58(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x688(%rsp), %rax addq $0x10, %rax movq %rax, 0x688(%rsp) movl 0x66c(%rsp), %eax addl $0x1, %eax movl %eax, 0x66c(%rsp) jmp 0x184fe33 jmp 0x18518e5 movq 0xa48(%rsp), %rcx movl 0x8bc(%rsp), %eax subl $0x1, %eax movq %rcx, 0xc28(%rsp) movl %eax, 0xc24(%rsp) movq 0xc28(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xc24(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x518(%rsp) movq 0xa48(%rsp), %rcx movl 0x8bc(%rsp), %eax movq %rcx, 0xc18(%rsp) movl %eax, 0xc14(%rsp) movq 0xc18(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xc14(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x510(%rsp) movq 0xa48(%rsp), %rcx movl 0x8bc(%rsp), %eax addl $0x1, %eax movq %rcx, 0xc08(%rsp) movl %eax, 0xc04(%rsp) movq 0xc08(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xc04(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x508(%rsp) movq 0xa48(%rsp), %rcx movl 0x8bc(%rsp), %eax addl $0x2, %eax movq %rcx, 0xbf8(%rsp) movl %eax, 0xbf4(%rsp) movq 0xbf8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xbf4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x500(%rsp) movq 0xa38(%rsp), %rax movq %rax, 0x4f8(%rsp) movq 0x8e0(%rsp), %rax movq %rax, 0x4f0(%rsp) movq 0x8d8(%rsp), %rax movq %rax, 0x4e8(%rsp) movq 0x8d0(%rsp), %rax movq %rax, 0x4e0(%rsp) movq 0x8c8(%rsp), %rax movq %rax, 0x4d8(%rsp) movl $0x0, 0x4d4(%rsp) movl 0x4d4(%rsp), %eax cmpl 0xa1c(%rsp), %eax jge 0x18518e3 movq 0xa30(%rsp), %rax movslq 0x4d4(%rsp), %rcx movl (%rax,%rcx,4), %eax shll $0x2, %eax movl %eax, 0x4d0(%rsp) movq 0x518(%rsp), %rax movslq 0x4d0(%rsp), %rcx leaq (%rax,%rcx,4), %rax movq %rax, 0x4c8(%rsp) movq 0x510(%rsp), %rax movslq 0x4d0(%rsp), %rcx leaq (%rax,%rcx,4), %rax movq %rax, 0x4c0(%rsp) movq 0x508(%rsp), %rax movslq 0x4d0(%rsp), %rcx leaq (%rax,%rcx,4), %rax movq %rax, 0x4b8(%rsp) movq 0x500(%rsp), %rax movslq 0x4d0(%rsp), %rcx leaq (%rax,%rcx,4), %rax movq %rax, 0x4b0(%rsp) movq 0x4f8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xefc(%rsp) vbroadcastss 0xefc(%rsp), %xmm0 vmovaps %xmm0, 0xee0(%rsp) vmovaps 0xee0(%rsp), %xmm0 vmovaps %xmm0, 0x4a0(%rsp) movq 0x4f8(%rsp), %rax vmovss 0x4(%rax), %xmm0 vmovss %xmm0, 0xedc(%rsp) vbroadcastss 0xedc(%rsp), %xmm0 vmovaps %xmm0, 0xec0(%rsp) vmovaps 0xec0(%rsp), %xmm0 vmovaps %xmm0, 0x490(%rsp) movq 0x4f8(%rsp), %rax vmovss 0x8(%rax), %xmm0 vmovss %xmm0, 0xebc(%rsp) vbroadcastss 0xebc(%rsp), %xmm0 vmovaps %xmm0, 0xea0(%rsp) vmovaps 0xea0(%rsp), %xmm0 vmovaps %xmm0, 0x480(%rsp) movq 0x4f8(%rsp), %rax vmovss 0xc(%rax), %xmm0 vmovss %xmm0, 0xe9c(%rsp) vbroadcastss 0xe9c(%rsp), %xmm0 vmovaps %xmm0, 0xe80(%rsp) vmovaps 0xe80(%rsp), %xmm0 vmovaps %xmm0, 0x470(%rsp) movq 0x4c8(%rsp), %rax addq $-0x10, %rax movq %rax, 0xb28(%rsp) movq 0xb28(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x460(%rsp) movq 0x4c8(%rsp), %rax movq %rax, 0xb20(%rsp) movq 0xb20(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x450(%rsp) movq 0x4c8(%rsp), %rax addq $0x10, %rax movq %rax, 0xb18(%rsp) movq 0xb18(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x440(%rsp) movq 0x4c8(%rsp), %rax addq $0x20, %rax movq %rax, 0xb10(%rsp) movq 0xb10(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x430(%rsp) movq 0x4c0(%rsp), %rax addq $-0x10, %rax movq %rax, 0xb08(%rsp) movq 0xb08(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x420(%rsp) movq 0x4c0(%rsp), %rax movq %rax, 0xb00(%rsp) movq 0xb00(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x410(%rsp) movq 0x4c0(%rsp), %rax addq $0x10, %rax movq %rax, 0xaf8(%rsp) movq 0xaf8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x400(%rsp) movq 0x4c0(%rsp), %rax addq $0x20, %rax movq %rax, 0xaf0(%rsp) movq 0xaf0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x3f0(%rsp) movq 0x4b8(%rsp), %rax addq $-0x10, %rax movq %rax, 0xae8(%rsp) movq 0xae8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x3e0(%rsp) movq 0x4b8(%rsp), %rax movq %rax, 0xae0(%rsp) movq 0xae0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x3d0(%rsp) movq 0x4b8(%rsp), %rax addq $0x10, %rax movq %rax, 0xad8(%rsp) movq 0xad8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x3c0(%rsp) movq 0x4b8(%rsp), %rax addq $0x20, %rax movq %rax, 0xad0(%rsp) movq 0xad0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x3b0(%rsp) movq 0x4b0(%rsp), %rax addq $-0x10, %rax movq %rax, 0xac8(%rsp) movq 0xac8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x3a0(%rsp) movq 0x4b0(%rsp), %rax movq %rax, 0xac0(%rsp) movq 0xac0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x390(%rsp) movq 0x4b0(%rsp), %rax addq $0x10, %rax movq %rax, 0xab8(%rsp) movq 0xab8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x380(%rsp) movq 0x4b0(%rsp), %rax addq $0x20, %rax movq %rax, 0xab0(%rsp) movq 0xab0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x370(%rsp) vmovaps 0x460(%rsp), %xmm1 vmovaps 0x4a0(%rsp), %xmm0 vmovaps %xmm1, 0x1110(%rsp) vmovaps %xmm0, 0x1100(%rsp) vmovaps 0x1110(%rsp), %xmm0 vmulps 0x1100(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x360(%rsp) vmovaps 0x420(%rsp), %xmm1 vmovaps 0x4a0(%rsp), %xmm0 vmovaps %xmm1, 0x10f0(%rsp) vmovaps %xmm0, 0x10e0(%rsp) vmovaps 0x10f0(%rsp), %xmm0 vmulps 0x10e0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x350(%rsp) vmovaps 0x3e0(%rsp), %xmm1 vmovaps 0x4a0(%rsp), %xmm0 vmovaps %xmm1, 0x10d0(%rsp) vmovaps %xmm0, 0x10c0(%rsp) vmovaps 0x10d0(%rsp), %xmm0 vmulps 0x10c0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x340(%rsp) vmovaps 0x3a0(%rsp), %xmm1 vmovaps 0x4a0(%rsp), %xmm0 vmovaps %xmm1, 0x10b0(%rsp) vmovaps %xmm0, 0x10a0(%rsp) vmovaps 0x10b0(%rsp), %xmm0 vmulps 0x10a0(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x330(%rsp) vmovaps 0x450(%rsp), %xmm2 vmovaps 0x490(%rsp), %xmm1 vmovaps 0x360(%rsp), %xmm0 vmovaps %xmm2, 0x14a0(%rsp) vmovaps %xmm1, 0x1490(%rsp) vmovaps %xmm0, 0x1480(%rsp) vmovaps 0x14a0(%rsp), %xmm2 vmovaps 0x1490(%rsp), %xmm1 vmovaps 0x1480(%rsp), %xmm0 vmovaps %xmm2, 0x1c50(%rsp) vmovaps %xmm1, 0x1c40(%rsp) vmovaps %xmm0, 0x1c30(%rsp) vmovaps 0x1c50(%rsp), %xmm1 vmovaps 0x1c40(%rsp), %xmm0 vmovaps 0x1c30(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x120(%rsp) vmovaps 0x120(%rsp), %xmm0 vmovaps %xmm0, 0x360(%rsp) vmovaps 0x410(%rsp), %xmm2 vmovaps 0x490(%rsp), %xmm1 vmovaps 0x350(%rsp), %xmm0 vmovaps %xmm2, 0x1470(%rsp) vmovaps %xmm1, 0x1460(%rsp) vmovaps %xmm0, 0x1450(%rsp) vmovaps 0x1470(%rsp), %xmm2 vmovaps 0x1460(%rsp), %xmm1 vmovaps 0x1450(%rsp), %xmm0 vmovaps %xmm2, 0x1c80(%rsp) vmovaps %xmm1, 0x1c70(%rsp) vmovaps %xmm0, 0x1c60(%rsp) vmovaps 0x1c80(%rsp), %xmm1 vmovaps 0x1c70(%rsp), %xmm0 vmovaps 0x1c60(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x110(%rsp) vmovaps 0x110(%rsp), %xmm0 vmovaps %xmm0, 0x350(%rsp) vmovaps 0x3d0(%rsp), %xmm2 vmovaps 0x490(%rsp), %xmm1 vmovaps 0x340(%rsp), %xmm0 vmovaps %xmm2, 0x1440(%rsp) vmovaps %xmm1, 0x1430(%rsp) vmovaps %xmm0, 0x1420(%rsp) vmovaps 0x1440(%rsp), %xmm2 vmovaps 0x1430(%rsp), %xmm1 vmovaps 0x1420(%rsp), %xmm0 vmovaps %xmm2, 0x1cb0(%rsp) vmovaps %xmm1, 0x1ca0(%rsp) vmovaps %xmm0, 0x1c90(%rsp) vmovaps 0x1cb0(%rsp), %xmm1 vmovaps 0x1ca0(%rsp), %xmm0 vmovaps 0x1c90(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x100(%rsp) vmovaps 0x100(%rsp), %xmm0 vmovaps %xmm0, 0x340(%rsp) vmovaps 0x390(%rsp), %xmm2 vmovaps 0x490(%rsp), %xmm1 vmovaps 0x330(%rsp), %xmm0 vmovaps %xmm2, 0x1410(%rsp) vmovaps %xmm1, 0x1400(%rsp) vmovaps %xmm0, 0x13f0(%rsp) vmovaps 0x1410(%rsp), %xmm2 vmovaps 0x1400(%rsp), %xmm1 vmovaps 0x13f0(%rsp), %xmm0 vmovaps %xmm2, 0x1ce0(%rsp) vmovaps %xmm1, 0x1cd0(%rsp) vmovaps %xmm0, 0x1cc0(%rsp) vmovaps 0x1ce0(%rsp), %xmm1 vmovaps 0x1cd0(%rsp), %xmm0 vmovaps 0x1cc0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xf0(%rsp) vmovaps 0xf0(%rsp), %xmm0 vmovaps %xmm0, 0x330(%rsp) vmovaps 0x440(%rsp), %xmm2 vmovaps 0x480(%rsp), %xmm1 vmovaps 0x360(%rsp), %xmm0 vmovaps %xmm2, 0x13e0(%rsp) vmovaps %xmm1, 0x13d0(%rsp) vmovaps %xmm0, 0x13c0(%rsp) vmovaps 0x13e0(%rsp), %xmm2 vmovaps 0x13d0(%rsp), %xmm1 vmovaps 0x13c0(%rsp), %xmm0 vmovaps %xmm2, 0x1d10(%rsp) vmovaps %xmm1, 0x1d00(%rsp) vmovaps %xmm0, 0x1cf0(%rsp) vmovaps 0x1d10(%rsp), %xmm1 vmovaps 0x1d00(%rsp), %xmm0 vmovaps 0x1cf0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xe0(%rsp) vmovaps 0xe0(%rsp), %xmm0 vmovaps %xmm0, 0x360(%rsp) vmovaps 0x400(%rsp), %xmm2 vmovaps 0x480(%rsp), %xmm1 vmovaps 0x350(%rsp), %xmm0 vmovaps %xmm2, 0x13b0(%rsp) vmovaps %xmm1, 0x13a0(%rsp) vmovaps %xmm0, 0x1390(%rsp) vmovaps 0x13b0(%rsp), %xmm2 vmovaps 0x13a0(%rsp), %xmm1 vmovaps 0x1390(%rsp), %xmm0 vmovaps %xmm2, 0x1d40(%rsp) vmovaps %xmm1, 0x1d30(%rsp) vmovaps %xmm0, 0x1d20(%rsp) vmovaps 0x1d40(%rsp), %xmm1 vmovaps 0x1d30(%rsp), %xmm0 vmovaps 0x1d20(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xd0(%rsp) vmovaps 0xd0(%rsp), %xmm0 vmovaps %xmm0, 0x350(%rsp) vmovaps 0x3c0(%rsp), %xmm2 vmovaps 0x480(%rsp), %xmm1 vmovaps 0x340(%rsp), %xmm0 vmovaps %xmm2, 0x1380(%rsp) vmovaps %xmm1, 0x1370(%rsp) vmovaps %xmm0, 0x1360(%rsp) vmovaps 0x1380(%rsp), %xmm2 vmovaps 0x1370(%rsp), %xmm1 vmovaps 0x1360(%rsp), %xmm0 vmovaps %xmm2, 0x1d70(%rsp) vmovaps %xmm1, 0x1d60(%rsp) vmovaps %xmm0, 0x1d50(%rsp) vmovaps 0x1d70(%rsp), %xmm1 vmovaps 0x1d60(%rsp), %xmm0 vmovaps 0x1d50(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xc0(%rsp) vmovaps 0xc0(%rsp), %xmm0 vmovaps %xmm0, 0x340(%rsp) vmovaps 0x380(%rsp), %xmm2 vmovaps 0x480(%rsp), %xmm1 vmovaps 0x330(%rsp), %xmm0 vmovaps %xmm2, 0x1350(%rsp) vmovaps %xmm1, 0x1340(%rsp) vmovaps %xmm0, 0x1330(%rsp) vmovaps 0x1350(%rsp), %xmm2 vmovaps 0x1340(%rsp), %xmm1 vmovaps 0x1330(%rsp), %xmm0 vmovaps %xmm2, 0x1da0(%rsp) vmovaps %xmm1, 0x1d90(%rsp) vmovaps %xmm0, 0x1d80(%rsp) vmovaps 0x1da0(%rsp), %xmm1 vmovaps 0x1d90(%rsp), %xmm0 vmovaps 0x1d80(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xb0(%rsp) vmovaps 0xb0(%rsp), %xmm0 vmovaps %xmm0, 0x330(%rsp) vmovaps 0x430(%rsp), %xmm2 vmovaps 0x470(%rsp), %xmm1 vmovaps 0x360(%rsp), %xmm0 vmovaps %xmm2, 0x1320(%rsp) vmovaps %xmm1, 0x1310(%rsp) vmovaps %xmm0, 0x1300(%rsp) vmovaps 0x1320(%rsp), %xmm2 vmovaps 0x1310(%rsp), %xmm1 vmovaps 0x1300(%rsp), %xmm0 vmovaps %xmm2, 0x1dd0(%rsp) vmovaps %xmm1, 0x1dc0(%rsp) vmovaps %xmm0, 0x1db0(%rsp) vmovaps 0x1dd0(%rsp), %xmm1 vmovaps 0x1dc0(%rsp), %xmm0 vmovaps 0x1db0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0xa0(%rsp) vmovaps 0xa0(%rsp), %xmm0 vmovaps %xmm0, 0x360(%rsp) vmovaps 0x3f0(%rsp), %xmm2 vmovaps 0x470(%rsp), %xmm1 vmovaps 0x350(%rsp), %xmm0 vmovaps %xmm2, 0x12f0(%rsp) vmovaps %xmm1, 0x12e0(%rsp) vmovaps %xmm0, 0x12d0(%rsp) vmovaps 0x12f0(%rsp), %xmm2 vmovaps 0x12e0(%rsp), %xmm1 vmovaps 0x12d0(%rsp), %xmm0 vmovaps %xmm2, 0x1e00(%rsp) vmovaps %xmm1, 0x1df0(%rsp) vmovaps %xmm0, 0x1de0(%rsp) vmovaps 0x1e00(%rsp), %xmm1 vmovaps 0x1df0(%rsp), %xmm0 vmovaps 0x1de0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x90(%rsp) vmovaps 0x90(%rsp), %xmm0 vmovaps %xmm0, 0x350(%rsp) vmovaps 0x3b0(%rsp), %xmm2 vmovaps 0x470(%rsp), %xmm1 vmovaps 0x340(%rsp), %xmm0 vmovaps %xmm2, 0x12c0(%rsp) vmovaps %xmm1, 0x12b0(%rsp) vmovaps %xmm0, 0x12a0(%rsp) vmovaps 0x12c0(%rsp), %xmm2 vmovaps 0x12b0(%rsp), %xmm1 vmovaps 0x12a0(%rsp), %xmm0 vmovaps %xmm2, 0x1e30(%rsp) vmovaps %xmm1, 0x1e20(%rsp) vmovaps %xmm0, 0x1e10(%rsp) vmovaps 0x1e30(%rsp), %xmm1 vmovaps 0x1e20(%rsp), %xmm0 vmovaps 0x1e10(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x80(%rsp) vmovaps 0x80(%rsp), %xmm0 vmovaps %xmm0, 0x340(%rsp) vmovaps 0x370(%rsp), %xmm2 vmovaps 0x470(%rsp), %xmm1 vmovaps 0x330(%rsp), %xmm0 vmovaps %xmm2, 0x1290(%rsp) vmovaps %xmm1, 0x1280(%rsp) vmovaps %xmm0, 0x1270(%rsp) vmovaps 0x1290(%rsp), %xmm2 vmovaps 0x1280(%rsp), %xmm1 vmovaps 0x1270(%rsp), %xmm0 vmovaps %xmm2, 0x1e60(%rsp) vmovaps %xmm1, 0x1e50(%rsp) vmovaps %xmm0, 0x1e40(%rsp) vmovaps 0x1e60(%rsp), %xmm1 vmovaps 0x1e50(%rsp), %xmm0 vmovaps 0x1e40(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x70(%rsp) vmovaps 0x70(%rsp), %xmm0 vmovaps %xmm0, 0x330(%rsp) movq 0x4f0(%rsp), %rax movl 0x4d4(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax vmovaps 0x360(%rsp), %xmm0 movq %rax, 0xd38(%rsp) vmovaps %xmm0, 0xd20(%rsp) vmovaps 0xd20(%rsp), %xmm0 movq 0xd38(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x4e8(%rsp), %rax movl 0x4d4(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax vmovaps 0x350(%rsp), %xmm0 movq %rax, 0xd18(%rsp) vmovaps %xmm0, 0xd00(%rsp) vmovaps 0xd00(%rsp), %xmm0 movq 0xd18(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x4e0(%rsp), %rax movl 0x4d4(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax vmovaps 0x340(%rsp), %xmm0 movq %rax, 0xcf8(%rsp) vmovaps %xmm0, 0xce0(%rsp) vmovaps 0xce0(%rsp), %xmm0 movq 0xcf8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x4d8(%rsp), %rax movl 0x4d4(%rsp), %ecx shll $0x2, %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax vmovaps 0x330(%rsp), %xmm0 movq %rax, 0xcd8(%rsp) vmovaps %xmm0, 0xcc0(%rsp) vmovaps 0xcc0(%rsp), %xmm0 movq 0xcd8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x4f8(%rsp), %rax addq $0x10, %rax movq %rax, 0x4f8(%rsp) movl 0x4d4(%rsp), %eax addl $0x1, %eax movl %eax, 0x4d4(%rsp) jmp 0x1850ab7 jmp 0x18518e5 jmp 0x18518e7 jmp 0x18518e9 jmp 0x18518eb movl 0x8bc(%rsp), %eax movl %eax, 0x8c4(%rsp) movq 0xa28(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0xe7c(%rsp) vbroadcastss 0xe7c(%rsp), %xmm0 vmovaps %xmm0, 0xe60(%rsp) vmovaps 0xe60(%rsp), %xmm0 vmovaps %xmm0, 0x320(%rsp) movq 0xa28(%rsp), %rax vmovss 0x4(%rax), %xmm0 vmovss %xmm0, 0xe5c(%rsp) vbroadcastss 0xe5c(%rsp), %xmm0 vmovaps %xmm0, 0xe40(%rsp) vmovaps 0xe40(%rsp), %xmm0 vmovaps %xmm0, 0x310(%rsp) movq 0xa28(%rsp), %rax vmovss 0x8(%rax), %xmm0 vmovss %xmm0, 0xe3c(%rsp) vbroadcastss 0xe3c(%rsp), %xmm0 vmovaps %xmm0, 0xe20(%rsp) vmovaps 0xe20(%rsp), %xmm0 vmovaps %xmm0, 0x300(%rsp) movq 0xa28(%rsp), %rax vmovss 0xc(%rax), %xmm0 vmovss %xmm0, 0xe1c(%rsp) vbroadcastss 0xe1c(%rsp), %xmm0 vmovaps %xmm0, 0xe00(%rsp) vmovaps 0xe00(%rsp), %xmm0 vmovaps %xmm0, 0x2f0(%rsp) movq 0x8e0(%rsp), %rax movq %rax, 0x2e8(%rsp) movq 0x8d8(%rsp), %rax movq %rax, 0x2e0(%rsp) movq 0x8d0(%rsp), %rax movq %rax, 0x2d8(%rsp) movq 0x8c8(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0xa40(%rsp), %rcx movl 0x8c0(%rsp), %eax movq %rcx, 0xc98(%rsp) movl %eax, 0xc94(%rsp) movq 0xc98(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0xc94(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x2c8(%rsp) movl $0x0, 0x2c4(%rsp) movl 0x2c4(%rsp), %eax cmpl 0xa1c(%rsp), %eax jge 0x1851df3 movq 0x2e8(%rsp), %rax movq %rax, 0xaa8(%rsp) movq 0xaa8(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x2b0(%rsp) movq 0x2e0(%rsp), %rax movq %rax, 0xaa0(%rsp) movq 0xaa0(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x2a0(%rsp) movq 0x2d8(%rsp), %rax movq %rax, 0xa98(%rsp) movq 0xa98(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x290(%rsp) movq 0x2d0(%rsp), %rax movq %rax, 0xa90(%rsp) movq 0xa90(%rsp), %rax vmovaps (%rax), %xmm0 vmovaps %xmm0, 0x280(%rsp) vmovaps 0x2b0(%rsp), %xmm1 vmovaps 0x320(%rsp), %xmm0 vmovaps %xmm1, 0x1090(%rsp) vmovaps %xmm0, 0x1080(%rsp) vmovaps 0x1090(%rsp), %xmm0 vmulps 0x1080(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x270(%rsp) vmovaps 0x2a0(%rsp), %xmm2 vmovaps 0x310(%rsp), %xmm1 vmovaps 0x270(%rsp), %xmm0 vmovaps %xmm2, 0x1260(%rsp) vmovaps %xmm1, 0x1250(%rsp) vmovaps %xmm0, 0x1240(%rsp) vmovaps 0x1260(%rsp), %xmm2 vmovaps 0x1250(%rsp), %xmm1 vmovaps 0x1240(%rsp), %xmm0 vmovaps %xmm2, 0x1e90(%rsp) vmovaps %xmm1, 0x1e80(%rsp) vmovaps %xmm0, 0x1e70(%rsp) vmovaps 0x1e90(%rsp), %xmm1 vmovaps 0x1e80(%rsp), %xmm0 vmovaps 0x1e70(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x60(%rsp) vmovaps 0x60(%rsp), %xmm0 vmovaps %xmm0, 0x270(%rsp) vmovaps 0x290(%rsp), %xmm2 vmovaps 0x300(%rsp), %xmm1 vmovaps 0x270(%rsp), %xmm0 vmovaps %xmm2, 0x1230(%rsp) vmovaps %xmm1, 0x1220(%rsp) vmovaps %xmm0, 0x1210(%rsp) vmovaps 0x1230(%rsp), %xmm2 vmovaps 0x1220(%rsp), %xmm1 vmovaps 0x1210(%rsp), %xmm0 vmovaps %xmm2, 0x1ec0(%rsp) vmovaps %xmm1, 0x1eb0(%rsp) vmovaps %xmm0, 0x1ea0(%rsp) vmovaps 0x1ec0(%rsp), %xmm1 vmovaps 0x1eb0(%rsp), %xmm0 vmovaps 0x1ea0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x50(%rsp) vmovaps 0x50(%rsp), %xmm0 vmovaps %xmm0, 0x270(%rsp) vmovaps 0x280(%rsp), %xmm2 vmovaps 0x2f0(%rsp), %xmm1 vmovaps 0x270(%rsp), %xmm0 vmovaps %xmm2, 0x1200(%rsp) vmovaps %xmm1, 0x11f0(%rsp) vmovaps %xmm0, 0x11e0(%rsp) vmovaps 0x1200(%rsp), %xmm2 vmovaps 0x11f0(%rsp), %xmm1 vmovaps 0x11e0(%rsp), %xmm0 vmovaps %xmm2, 0x1ef0(%rsp) vmovaps %xmm1, 0x1ee0(%rsp) vmovaps %xmm0, 0x1ed0(%rsp) vmovaps 0x1ef0(%rsp), %xmm1 vmovaps 0x1ee0(%rsp), %xmm0 vmovaps 0x1ed0(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x40(%rsp) vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm0, 0x270(%rsp) movq 0x2c8(%rsp), %rax vmovaps 0x270(%rsp), %xmm0 movq %rax, 0xcb8(%rsp) vmovaps %xmm0, 0xca0(%rsp) vmovaps 0xca0(%rsp), %xmm0 movq 0xcb8(%rsp), %rax vmovaps %xmm0, (%rax) movq 0x2c8(%rsp), %rax addq $0x10, %rax movq %rax, 0x2c8(%rsp) movq 0x2e8(%rsp), %rax addq $0x10, %rax movq %rax, 0x2e8(%rsp) movq 0x2e0(%rsp), %rax addq $0x10, %rax movq %rax, 0x2e0(%rsp) movq 0x2d8(%rsp), %rax addq $0x10, %rax movq %rax, 0x2d8(%rsp) movq 0x2d0(%rsp), %rax addq $0x10, %rax movq %rax, 0x2d0(%rsp) movl 0x2c4(%rsp), %eax addl $0x1, %eax movl %eax, 0x2c4(%rsp) jmp 0x1851a78 movq 0xa28(%rsp), %rax addq $0x10, %rax movq %rax, 0xa28(%rsp) movl 0x8c0(%rsp), %eax addl $0x1, %eax movl %eax, 0x8c0(%rsp) jmp 0x184ed7d leaq 0x8e8(%rsp), %rax movq %rax, 0xa50(%rsp) movq 0xa50(%rsp), %rax movq %rax, 0x1888(%rsp) movq 0x1888(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x1851ed6 movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1884(%rsp) # imm = 0xFFFFFFFF movl 0x1884(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1880(%rsp) cmpl $0x1, 0x1880(%rsp) jne 0x1851ed6 movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1851eaa movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1851ea8 jmp 0x1851ed4 movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x1890(%rsp) cmpq $0x0, 0x1890(%rsp) je 0x1851ed2 movq 0x1890(%rsp), %rdi callq 0x5e480 jmp 0x1851ed4 jmp 0x1851ed6 movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1851f2e movq %rax, %rdi callq 0x5fc90 leaq 0x930(%rsp), %rax movq %rax, 0xa60(%rsp) movq 0xa60(%rsp), %rax movq %rax, 0x1868(%rsp) movq 0x1868(%rsp), %rax movq %rax, 0x30(%rsp) cmpq $0x0, 0x8(%rax) je 0x1851fe7 movq 0x30(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1864(%rsp) # imm = 0xFFFFFFFF movl 0x1864(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1860(%rsp) cmpl $0x1, 0x1860(%rsp) jne 0x1851fe7 movq 0x30(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1851fbb movq 0x30(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1851fb9 jmp 0x1851fe5 movq 0x30(%rsp), %rax movq (%rax), %rax movq %rax, 0x18a0(%rsp) cmpq $0x0, 0x18a0(%rsp) je 0x1851fe3 movq 0x18a0(%rsp), %rdi callq 0x5e480 jmp 0x1851fe5 jmp 0x1851fe7 movq 0x30(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x185203f movq %rax, %rdi callq 0x5fc90 leaq 0x988(%rsp), %rax movq %rax, 0xa70(%rsp) movq 0xa70(%rsp), %rax movq %rax, 0x1848(%rsp) movq 0x1848(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x18520f8 movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1844(%rsp) # imm = 0xFFFFFFFF movl 0x1844(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1840(%rsp) cmpl $0x1, 0x1840(%rsp) jne 0x18520f8 movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18520cc movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18520ca jmp 0x18520f6 movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0x18b0(%rsp) cmpq $0x0, 0x18b0(%rsp) je 0x18520f4 movq 0x18b0(%rsp), %rdi callq 0x5e480 jmp 0x18520f6 jmp 0x18520f8 movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1852150 movq %rax, %rdi callq 0x5fc90 leaq 0x9d0(%rsp), %rax movq %rax, 0xa80(%rsp) movq 0xa80(%rsp), %rax movq %rax, 0x1828(%rsp) movq 0x1828(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x1852209 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1824(%rsp) # imm = 0xFFFFFFFF movl 0x1824(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1820(%rsp) cmpl $0x1, 0x1820(%rsp) jne 0x1852209 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18521dd movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18521db jmp 0x1852207 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x18c0(%rsp) cmpq $0x0, 0x18c0(%rsp) je 0x1852205 movq 0x18c0(%rsp), %rdi callq 0x5e480 jmp 0x1852207 jmp 0x1852209 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1852261 movq %rax, %rdi callq 0x5fc90 addq $0x1fc8, %rsp # imm = 0x1FC8 retq leaq 0x930(%rsp), %rax movq %rax, 0xa68(%rsp) movq 0xa68(%rsp), %rax movq %rax, 0x1858(%rsp) movq 0x1858(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x1852322 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1854(%rsp) # imm = 0xFFFFFFFF movl 0x1854(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1850(%rsp) cmpl $0x1, 0x1850(%rsp) jne 0x1852322 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18522f6 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18522f4 jmp 0x1852320 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x18a8(%rsp) cmpq $0x0, 0x18a8(%rsp) je 0x185231e movq 0x18a8(%rsp), %rdi callq 0x5e480 jmp 0x1852320 jmp 0x1852322 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x185237a movq %rax, %rdi callq 0x5fc90 jmp 0x185237c leaq 0x988(%rsp), %rax movq %rax, 0xa78(%rsp) movq 0xa78(%rsp), %rax movq %rax, 0x1838(%rsp) movq 0x1838(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x1852435 movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1834(%rsp) # imm = 0xFFFFFFFF movl 0x1834(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1830(%rsp) cmpl $0x1, 0x1830(%rsp) jne 0x1852435 movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1852409 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1852407 jmp 0x1852433 movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x18b8(%rsp) cmpq $0x0, 0x18b8(%rsp) je 0x1852431 movq 0x18b8(%rsp), %rdi callq 0x5e480 jmp 0x1852433 jmp 0x1852435 movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x185248d movq %rax, %rdi callq 0x5fc90 jmp 0x185248f leaq 0x9d0(%rsp), %rax movq %rax, 0xa88(%rsp) movq 0xa88(%rsp), %rax movq %rax, 0x1818(%rsp) movq 0x1818(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x1852548 movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1814(%rsp) # imm = 0xFFFFFFFF movl 0x1814(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1810(%rsp) cmpl $0x1, 0x1810(%rsp) jne 0x1852548 movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x185251c movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x185251a jmp 0x1852546 movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x18c8(%rsp) cmpq $0x0, 0x18c8(%rsp) je 0x1852544 movq 0x18c8(%rsp), %rdi callq 0x5e480 jmp 0x1852546 jmp 0x1852548 movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18525a0 movq %rax, %rdi callq 0x5fc90 jmp 0x18525a2 movq 0x980(%rsp), %rdi callq 0x5e3b0 nop
/ysh329[P]ncnn/src/layer/x86/interp_bicubic_pack4.h
ncnn::resize_bilinear_image(ncnn::Mat const&, ncnn::Mat&, float*, int*, float*, int*)
static void resize_bilinear_image(const Mat& src, Mat& dst, float* alpha, int* xofs, float* beta, int* yofs) { int w = dst.w; int h = dst.h; // loop body Mat rowsbuf0(w); Mat rowsbuf1(w); float* rows0 = rowsbuf0; float* rows1 = rowsbuf1; int prev_sy1 = -2; for (int dy = 0; dy < h; dy++) { int sy = yofs[dy]; if (sy == prev_sy1) { // reuse all rows } else if (sy == prev_sy1 + 1) { // hresize one row float* rows0_old = rows0; rows0 = rows1; rows1 = rows0_old; const float* S1 = src.row(sy + 1); const float* alphap = alpha; float* rows1p = rows1; int dx = 0; for (; dx < w; dx++) { int sx = xofs[dx]; const float* S1p = S1 + sx; float a0 = alphap[0]; float a1 = alphap[1]; rows1p[dx] = S1p[0] * a0 + S1p[1] * a1; alphap += 2; } } else { // hresize two rows const float* S0 = src.row(sy); const float* S1 = src.row(sy + 1); const float* alphap = alpha; float* rows0p = rows0; float* rows1p = rows1; int dx = 0; for (; dx < w; dx++) { int sx = xofs[dx]; const float* S0p = S0 + sx; const float* S1p = S1 + sx; float a0 = alphap[0]; float a1 = alphap[1]; rows0p[dx] = S0p[0] * a0 + S0p[1] * a1; rows1p[dx] = S1p[0] * a0 + S1p[1] * a1; alphap += 2; } } prev_sy1 = sy; // vresize float b0 = beta[0]; float b1 = beta[1]; float* rows0p = rows0; float* rows1p = rows1; float* Dp = dst.row(dy); int dx = 0; #if __SSE2__ #if __AVX__ __m256 _b0_256 = _mm256_set1_ps(b0); __m256 _b1_256 = _mm256_set1_ps(b1); for (; dx + 7 < w; dx += 8) { __m256 _rows0 = _mm256_loadu_ps(rows0p); __m256 _rows1 = _mm256_loadu_ps(rows1p); __m256 _D = _mm256_mul_ps(_rows0, _b0_256); _D = _mm256_comp_fmadd_ps(_rows1, _b1_256, _D); _mm256_storeu_ps(Dp, _D); Dp += 8; rows0p += 8; rows1p += 8; } #endif // __AVX__ __m128 _b0_128 = _mm_set1_ps(b0); __m128 _b1_128 = _mm_set1_ps(b1); for (; dx + 3 < w; dx += 4) { __m128 _rows0 = _mm_loadu_ps(rows0p); __m128 _rows1 = _mm_loadu_ps(rows1p); __m128 _D = _mm_mul_ps(_rows0, _b0_128); _D = _mm_comp_fmadd_ps(_rows1, _b1_128, _D); _mm_storeu_ps(Dp, _D); Dp += 4; rows0p += 4; rows1p += 4; } #endif // __SSE2__ for (; dx < w; dx++) { *Dp++ = *rows0p++ * b0 + *rows1p++ * b1; } beta += 2; } }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x800, %rsp # imm = 0x800 movq %rdi, 0x3f0(%rsp) movq %rsi, 0x3e8(%rsp) movq %rdx, 0x3e0(%rsp) movq %rcx, 0x3d8(%rsp) movq %r8, 0x3d0(%rsp) movq %r9, 0x3c8(%rsp) movq 0x3e8(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x3c4(%rsp) movq 0x3e8(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x3c0(%rsp) movl 0x3c4(%rsp), %eax leaq 0x378(%rsp), %rcx movq %rcx, 0x7a8(%rsp) movl %eax, 0x7a4(%rsp) movq $0x4, 0x798(%rsp) movq $0x0, 0x790(%rsp) movq 0x7a8(%rsp), %rdi movq %rdi, 0x140(%rsp) movq $0x0, (%rdi) movq $0x0, 0x8(%rdi) movq $0x0, 0x10(%rdi) movl $0x0, 0x18(%rdi) movq $0x0, 0x20(%rdi) movl $0x0, 0x28(%rdi) movl $0x0, 0x2c(%rdi) movl $0x0, 0x30(%rdi) movl $0x0, 0x34(%rdi) movl $0x0, 0x38(%rdi) movq $0x0, 0x40(%rdi) movl 0x7a4(%rsp), %esi movq 0x798(%rsp), %rdx movq 0x790(%rsp), %rcx callq 0x65040 movl 0x3c4(%rsp), %eax leaq 0x330(%rsp), %rcx movq %rcx, 0x7c8(%rsp) movl %eax, 0x7c4(%rsp) movq $0x4, 0x7b8(%rsp) movq $0x0, 0x7b0(%rsp) movq 0x7c8(%rsp), %rdi movq %rdi, 0x148(%rsp) movq $0x0, (%rdi) movq $0x0, 0x8(%rdi) movq $0x0, 0x10(%rdi) movl $0x0, 0x18(%rdi) movq $0x0, 0x20(%rdi) movl $0x0, 0x28(%rdi) movl $0x0, 0x2c(%rdi) movl $0x0, 0x30(%rdi) movl $0x0, 0x34(%rdi) movl $0x0, 0x38(%rdi) movq $0x0, 0x40(%rdi) movl 0x7c4(%rsp), %esi movq 0x7b8(%rsp), %rdx movq 0x7b0(%rsp), %rcx callq 0x65040 jmp 0x185277b jmp 0x185277d leaq 0x378(%rsp), %rax movq %rax, 0x788(%rsp) movq 0x788(%rsp), %rax movq (%rax), %rax movq %rax, 0x318(%rsp) leaq 0x330(%rsp), %rax movq %rax, 0x780(%rsp) movq 0x780(%rsp), %rax movq (%rax), %rax movq %rax, 0x310(%rsp) movl $0xfffffffe, 0x30c(%rsp) # imm = 0xFFFFFFFE movl $0x0, 0x308(%rsp) movl 0x308(%rsp), %eax cmpl 0x3c0(%rsp), %eax jge 0x18533c2 movq 0x3c8(%rsp), %rax movslq 0x308(%rsp), %rcx movl (%rax,%rcx,4), %eax movl %eax, 0x304(%rsp) movl 0x304(%rsp), %eax cmpl 0x30c(%rsp), %eax jne 0x1852835 jmp 0x1852bfd movq %rax, %rcx movl %edx, %eax movq %rcx, 0x328(%rsp) movl %eax, 0x324(%rsp) jmp 0x18535f8 movl 0x304(%rsp), %eax movl 0x30c(%rsp), %ecx addl $0x1, %ecx cmpl %ecx, %eax jne 0x18529e0 movq 0x318(%rsp), %rax movq %rax, 0x2f8(%rsp) movq 0x310(%rsp), %rax movq %rax, 0x318(%rsp) movq 0x2f8(%rsp), %rax movq %rax, 0x310(%rsp) movq 0x3f0(%rsp), %rcx movl 0x304(%rsp), %eax addl $0x1, %eax movq %rcx, 0x440(%rsp) movl %eax, 0x43c(%rsp) movq 0x440(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x43c(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x2f0(%rsp) movq 0x3e0(%rsp), %rax movq %rax, 0x2e8(%rsp) movq 0x310(%rsp), %rax movq %rax, 0x2e0(%rsp) movl $0x0, 0x2dc(%rsp) movl 0x2dc(%rsp), %eax cmpl 0x3c4(%rsp), %eax jge 0x18529db movq 0x3d8(%rsp), %rax movslq 0x2dc(%rsp), %rcx movl (%rax,%rcx,4), %eax movl %eax, 0x2d8(%rsp) movq 0x2f0(%rsp), %rax movslq 0x2d8(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x2d0(%rsp) movq 0x2e8(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x2cc(%rsp) movq 0x2e8(%rsp), %rax vmovss 0x4(%rax), %xmm0 vmovss %xmm0, 0x2c8(%rsp) movq 0x2d0(%rsp), %rax vmovss (%rax), %xmm0 vmulss 0x2cc(%rsp), %xmm0, %xmm0 movq 0x2d0(%rsp), %rax vmovss 0x4(%rax), %xmm1 vmulss 0x2c8(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x2e0(%rsp), %rax movslq 0x2dc(%rsp), %rcx vmovss %xmm0, (%rax,%rcx,4) movq 0x2e8(%rsp), %rax addq $0x8, %rax movq %rax, 0x2e8(%rsp) movl 0x2dc(%rsp), %eax addl $0x1, %eax movl %eax, 0x2dc(%rsp) jmp 0x18528f5 jmp 0x1852bfb movq 0x3f0(%rsp), %rcx movl 0x304(%rsp), %eax movq %rcx, 0x430(%rsp) movl %eax, 0x42c(%rsp) movq 0x430(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x42c(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x2c0(%rsp) movq 0x3f0(%rsp), %rcx movl 0x304(%rsp), %eax addl $0x1, %eax movq %rcx, 0x420(%rsp) movl %eax, 0x41c(%rsp) movq 0x420(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x41c(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x2b8(%rsp) movq 0x3e0(%rsp), %rax movq %rax, 0x2b0(%rsp) movq 0x318(%rsp), %rax movq %rax, 0x2a8(%rsp) movq 0x310(%rsp), %rax movq %rax, 0x2a0(%rsp) movl $0x0, 0x29c(%rsp) movl 0x29c(%rsp), %eax cmpl 0x3c4(%rsp), %eax jge 0x1852bf9 movq 0x3d8(%rsp), %rax movslq 0x29c(%rsp), %rcx movl (%rax,%rcx,4), %eax movl %eax, 0x298(%rsp) movq 0x2c0(%rsp), %rax movslq 0x298(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x290(%rsp) movq 0x2b8(%rsp), %rax movslq 0x298(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x288(%rsp) movq 0x2b0(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x284(%rsp) movq 0x2b0(%rsp), %rax vmovss 0x4(%rax), %xmm0 vmovss %xmm0, 0x280(%rsp) movq 0x290(%rsp), %rax vmovss (%rax), %xmm0 vmulss 0x284(%rsp), %xmm0, %xmm0 movq 0x290(%rsp), %rax vmovss 0x4(%rax), %xmm1 vmulss 0x280(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x2a8(%rsp), %rax movslq 0x29c(%rsp), %rcx vmovss %xmm0, (%rax,%rcx,4) movq 0x288(%rsp), %rax vmovss (%rax), %xmm0 vmulss 0x284(%rsp), %xmm0, %xmm0 movq 0x288(%rsp), %rax vmovss 0x4(%rax), %xmm1 vmulss 0x280(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x2a0(%rsp), %rax movslq 0x29c(%rsp), %rcx vmovss %xmm0, (%rax,%rcx,4) movq 0x2b0(%rsp), %rax addq $0x8, %rax movq %rax, 0x2b0(%rsp) movl 0x29c(%rsp), %eax addl $0x1, %eax movl %eax, 0x29c(%rsp) jmp 0x1852ab0 jmp 0x1852bfb jmp 0x1852bfd movl 0x304(%rsp), %eax movl %eax, 0x30c(%rsp) movq 0x3d0(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x27c(%rsp) movq 0x3d0(%rsp), %rax vmovss 0x4(%rax), %xmm0 vmovss %xmm0, 0x278(%rsp) movq 0x318(%rsp), %rax movq %rax, 0x270(%rsp) movq 0x310(%rsp), %rax movq %rax, 0x268(%rsp) movq 0x3e8(%rsp), %rcx movl 0x308(%rsp), %eax movq %rcx, 0x450(%rsp) movl %eax, 0x44c(%rsp) movq 0x450(%rsp), %rdx movslq 0x2c(%rdx), %rcx movslq 0x44c(%rsp), %rax imulq %rax, %rcx movq (%rdx), %rax movq 0x10(%rdx), %rdx imulq %rdx, %rcx addq %rcx, %rax movq %rax, 0x260(%rsp) movl $0x0, 0x25c(%rsp) vmovss 0x27c(%rsp), %xmm0 vmovss %xmm0, 0x45c(%rsp) vmovss 0x45c(%rsp), %xmm0 vmovss %xmm0, 0x11c(%rsp) vmovss %xmm0, 0x69c(%rsp) vmovss %xmm0, 0x698(%rsp) vmovss %xmm0, 0x694(%rsp) vmovss %xmm0, 0x690(%rsp) vmovss %xmm0, 0x68c(%rsp) vmovss %xmm0, 0x688(%rsp) vmovss %xmm0, 0x684(%rsp) vmovss %xmm0, 0x680(%rsp) vmovss 0x694(%rsp), %xmm1 vmovss 0x690(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x698(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x69c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0x684(%rsp), %xmm2 vmovss 0x680(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x688(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x68c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0x660(%rsp) vmovaps 0x660(%rsp), %ymm0 vmovaps %ymm0, 0x120(%rsp) vmovaps 0x120(%rsp), %ymm0 vmovaps %ymm0, 0x220(%rsp) vmovss 0x278(%rsp), %xmm0 vmovss %xmm0, 0x458(%rsp) vmovss 0x458(%rsp), %xmm0 vmovss %xmm0, 0xdc(%rsp) vmovss %xmm0, 0x6dc(%rsp) vmovss %xmm0, 0x6d8(%rsp) vmovss %xmm0, 0x6d4(%rsp) vmovss %xmm0, 0x6d0(%rsp) vmovss %xmm0, 0x6cc(%rsp) vmovss %xmm0, 0x6c8(%rsp) vmovss %xmm0, 0x6c4(%rsp) vmovss %xmm0, 0x6c0(%rsp) vmovss 0x6d4(%rsp), %xmm1 vmovss 0x6d0(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0x6d8(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0x6dc(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0x6c4(%rsp), %xmm2 vmovss 0x6c0(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0x6c8(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0x6cc(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0x6a0(%rsp) vmovaps 0x6a0(%rsp), %ymm0 vmovaps %ymm0, 0xe0(%rsp) vmovaps 0xe0(%rsp), %ymm0 vmovaps %ymm0, 0x200(%rsp) movl 0x25c(%rsp), %eax addl $0x7, %eax cmpl 0x3c4(%rsp), %eax jge 0x18530b9 movq 0x270(%rsp), %rax movq %rax, 0x7d8(%rsp) movq 0x7d8(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0xa0(%rsp) vmovaps 0xa0(%rsp), %ymm0 vmovaps %ymm0, 0x1e0(%rsp) movq 0x268(%rsp), %rax movq %rax, 0x7d0(%rsp) movq 0x7d0(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x80(%rsp) vmovaps 0x80(%rsp), %ymm0 vmovaps %ymm0, 0x1c0(%rsp) vmovaps 0x1e0(%rsp), %ymm1 vmovaps 0x220(%rsp), %ymm0 vmovaps %ymm1, 0x480(%rsp) vmovaps %ymm0, 0x460(%rsp) vmovaps 0x480(%rsp), %ymm0 vmulps 0x460(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x1a0(%rsp) vmovaps 0x1c0(%rsp), %ymm2 vmovaps 0x200(%rsp), %ymm1 vmovaps 0x1a0(%rsp), %ymm0 vmovaps %ymm2, 0x4e0(%rsp) vmovaps %ymm1, 0x4c0(%rsp) vmovaps %ymm0, 0x4a0(%rsp) vmovaps 0x4e0(%rsp), %ymm2 vmovaps 0x4c0(%rsp), %ymm1 vmovaps 0x4a0(%rsp), %ymm0 vmovaps %ymm2, 0x720(%rsp) vmovaps %ymm1, 0x700(%rsp) vmovaps %ymm0, 0x6e0(%rsp) vmovaps 0x720(%rsp), %ymm1 vmovaps 0x700(%rsp), %ymm0 vmovaps 0x6e0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x60(%rsp) vmovaps 0x60(%rsp), %ymm0 vmovaps %ymm0, 0x1a0(%rsp) movq 0x260(%rsp), %rax vmovaps 0x1a0(%rsp), %ymm0 movq %rax, 0x5d8(%rsp) vmovaps %ymm0, 0x5a0(%rsp) vmovaps 0x5a0(%rsp), %ymm0 movq 0x5d8(%rsp), %rax vmovups %ymm0, (%rax) movq 0x260(%rsp), %rax addq $0x20, %rax movq %rax, 0x260(%rsp) movq 0x270(%rsp), %rax addq $0x20, %rax movq %rax, 0x270(%rsp) movq 0x268(%rsp), %rax addq $0x20, %rax movq %rax, 0x268(%rsp) movl 0x25c(%rsp), %eax addl $0x8, %eax movl %eax, 0x25c(%rsp) jmp 0x1852ecb vmovss 0x27c(%rsp), %xmm0 vmovss %xmm0, 0x54c(%rsp) vbroadcastss 0x54c(%rsp), %xmm0 vmovaps %xmm0, 0x530(%rsp) vmovaps 0x530(%rsp), %xmm0 vmovaps %xmm0, 0x190(%rsp) vmovss 0x278(%rsp), %xmm0 vmovss %xmm0, 0x52c(%rsp) vbroadcastss 0x52c(%rsp), %xmm0 vmovaps %xmm0, 0x510(%rsp) vmovaps 0x510(%rsp), %xmm0 vmovaps %xmm0, 0x180(%rsp) movl 0x25c(%rsp), %eax addl $0x3, %eax cmpl 0x3c4(%rsp), %eax jge 0x1853309 movq 0x270(%rsp), %rax movq %rax, 0x7e8(%rsp) movq 0x7e8(%rsp), %rax vmovups (%rax), %xmm0 vmovaps %xmm0, 0x50(%rsp) vmovaps 0x50(%rsp), %xmm0 vmovaps %xmm0, 0x170(%rsp) movq 0x268(%rsp), %rax movq %rax, 0x7e0(%rsp) movq 0x7e0(%rsp), %rax vmovups (%rax), %xmm0 vmovaps %xmm0, 0x40(%rsp) vmovaps 0x40(%rsp), %xmm0 vmovaps %xmm0, 0x160(%rsp) vmovaps 0x170(%rsp), %xmm1 vmovaps 0x190(%rsp), %xmm0 vmovaps %xmm1, 0x560(%rsp) vmovaps %xmm0, 0x550(%rsp) vmovaps 0x560(%rsp), %xmm0 vmulps 0x550(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x150(%rsp) vmovaps 0x160(%rsp), %xmm2 vmovaps 0x180(%rsp), %xmm1 vmovaps 0x150(%rsp), %xmm0 vmovaps %xmm2, 0x590(%rsp) vmovaps %xmm1, 0x580(%rsp) vmovaps %xmm0, 0x570(%rsp) vmovaps 0x590(%rsp), %xmm2 vmovaps 0x580(%rsp), %xmm1 vmovaps 0x570(%rsp), %xmm0 vmovaps %xmm2, 0x770(%rsp) vmovaps %xmm1, 0x760(%rsp) vmovaps %xmm0, 0x750(%rsp) vmovaps 0x770(%rsp), %xmm1 vmovaps 0x760(%rsp), %xmm0 vmovaps 0x750(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x30(%rsp) vmovaps 0x30(%rsp), %xmm0 vmovaps %xmm0, 0x150(%rsp) movq 0x260(%rsp), %rax vmovaps 0x150(%rsp), %xmm0 movq %rax, 0x658(%rsp) vmovaps %xmm0, 0x640(%rsp) vmovaps 0x640(%rsp), %xmm0 movq 0x658(%rsp), %rax vmovups %xmm0, (%rax) movq 0x260(%rsp), %rax addq $0x10, %rax movq %rax, 0x260(%rsp) movq 0x270(%rsp), %rax addq $0x10, %rax movq %rax, 0x270(%rsp) movq 0x268(%rsp), %rax addq $0x10, %rax movq %rax, 0x268(%rsp) movl 0x25c(%rsp), %eax addl $0x4, %eax movl %eax, 0x25c(%rsp) jmp 0x1853127 jmp 0x185330b movl 0x25c(%rsp), %eax cmpl 0x3c4(%rsp), %eax jge 0x1853398 movq 0x270(%rsp), %rax movq %rax, %rcx addq $0x4, %rcx movq %rcx, 0x270(%rsp) vmovss (%rax), %xmm0 vmulss 0x27c(%rsp), %xmm0, %xmm0 movq 0x268(%rsp), %rax movq %rax, %rcx addq $0x4, %rcx movq %rcx, 0x268(%rsp) vmovss (%rax), %xmm1 vmulss 0x278(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x260(%rsp), %rax movq %rax, %rcx addq $0x4, %rcx movq %rcx, 0x260(%rsp) vmovss %xmm0, (%rax) movl 0x25c(%rsp), %eax addl $0x1, %eax movl %eax, 0x25c(%rsp) jmp 0x185330b movq 0x3d0(%rsp), %rax addq $0x8, %rax movq %rax, 0x3d0(%rsp) movl 0x308(%rsp), %eax addl $0x1, %eax movl %eax, 0x308(%rsp) jmp 0x18527d9 leaq 0x330(%rsp), %rax movq %rax, 0x3f8(%rsp) movq 0x3f8(%rsp), %rax movq %rax, 0x618(%rsp) movq 0x618(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x1853481 movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x614(%rsp) # imm = 0xFFFFFFFF movl 0x614(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x610(%rsp) cmpl $0x1, 0x610(%rsp) jne 0x1853481 movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1853452 movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1853450 jmp 0x185347f movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0x620(%rsp) cmpq $0x0, 0x620(%rsp) je 0x185347d movq 0x620(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x185347f jmp 0x1853481 movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18534d9 movq %rax, %rdi callq 0x5fc90 leaq 0x378(%rsp), %rax movq %rax, 0x408(%rsp) movq 0x408(%rsp), %rax movq %rax, 0x5f8(%rsp) movq 0x5f8(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x1853598 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x5f4(%rsp) # imm = 0xFFFFFFFF movl 0x5f4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x5f0(%rsp) cmpl $0x1, 0x5f0(%rsp) jne 0x1853598 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1853569 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1853567 jmp 0x1853596 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x630(%rsp) cmpq $0x0, 0x630(%rsp) je 0x1853594 movq 0x630(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1853596 jmp 0x1853598 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18535f0 movq %rax, %rdi callq 0x5fc90 movq %rbp, %rsp popq %rbp vzeroupper retq leaq 0x378(%rsp), %rax movq %rax, 0x410(%rsp) movq 0x410(%rsp), %rax movq %rax, 0x5e8(%rsp) movq 0x5e8(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x18536b1 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x5e4(%rsp) # imm = 0xFFFFFFFF movl 0x5e4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x5e0(%rsp) cmpl $0x1, 0x5e0(%rsp) jne 0x18536b1 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1853685 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1853683 jmp 0x18536af movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x638(%rsp) cmpq $0x0, 0x638(%rsp) je 0x18536ad movq 0x638(%rsp), %rdi callq 0x5e480 jmp 0x18536af jmp 0x18536b1 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1853709 movq %rax, %rdi callq 0x5fc90 jmp 0x185370b movq 0x328(%rsp), %rdi callq 0x5e3b0 nopl (%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/interp_bilinear.h
ncnn::resize_bicubic_image(ncnn::Mat const&, ncnn::Mat&, float*, int*, float*, int*)
static void resize_bicubic_image(const Mat& src, Mat& dst, float* alpha, int* xofs, float* beta, int* yofs) { int w = dst.w; int h = dst.h; // loop body Mat rowsbuf0(w); Mat rowsbuf1(w); Mat rowsbuf2(w); Mat rowsbuf3(w); float* rows0 = rowsbuf0; float* rows1 = rowsbuf1; float* rows2 = rowsbuf2; float* rows3 = rowsbuf3; int prev_sy1 = -3; for (int dy = 0; dy < h; dy++) { int sy = yofs[dy]; if (sy == prev_sy1) { // reuse all rows } else if (sy == prev_sy1 + 1) { // hresize one row float* rows0_old = rows0; rows0 = rows1; rows1 = rows2; rows2 = rows3; rows3 = rows0_old; const float* S3 = src.row(sy + 2); const float* alphap = alpha; float* rows3p = rows3; for (int dx = 0; dx < w; dx++) { int sx = xofs[dx]; const float* S3p = S3 + sx; float a0 = alphap[0]; float a1 = alphap[1]; float a2 = alphap[2]; float a3 = alphap[3]; rows3p[dx] = S3p[-1] * a0 + S3p[0] * a1 + S3p[1] * a2 + S3p[2] * a3; alphap += 4; } } else if (sy == prev_sy1 + 2) { // hresize two rows float* rows0_old = rows0; float* rows1_old = rows1; rows0 = rows2; rows1 = rows3; rows2 = rows0_old; rows3 = rows1_old; const float* S2 = src.row(sy + 1); const float* S3 = src.row(sy + 2); const float* alphap = alpha; float* rows2p = rows2; float* rows3p = rows3; for (int dx = 0; dx < w; dx++) { int sx = xofs[dx]; const float* S2p = S2 + sx; const float* S3p = S3 + sx; float a0 = alphap[0]; float a1 = alphap[1]; float a2 = alphap[2]; float a3 = alphap[3]; rows2p[dx] = S2p[-1] * a0 + S2p[0] * a1 + S2p[1] * a2 + S2p[2] * a3; rows3p[dx] = S3p[-1] * a0 + S3p[0] * a1 + S3p[1] * a2 + S3p[2] * a3; alphap += 4; } } else if (sy == prev_sy1 + 3) { // hresize three rows float* rows0_old = rows0; float* rows1_old = rows1; float* rows2_old = rows2; rows0 = rows3; rows1 = rows0_old; rows2 = rows1_old; rows3 = rows2_old; const float* S1 = src.row(sy); const float* S2 = src.row(sy + 1); const float* S3 = src.row(sy + 2); const float* alphap = alpha; float* rows1p = rows1; float* rows2p = rows2; float* rows3p = rows3; for (int dx = 0; dx < w; dx++) { int sx = xofs[dx]; const float* S1p = S1 + sx; const float* S2p = S2 + sx; const float* S3p = S3 + sx; float a0 = alphap[0]; float a1 = alphap[1]; float a2 = alphap[2]; float a3 = alphap[3]; rows1p[dx] = S1p[-1] * a0 + S1p[0] * a1 + S1p[1] * a2 + S1p[2] * a3; rows2p[dx] = S2p[-1] * a0 + S2p[0] * a1 + S2p[1] * a2 + S2p[2] * a3; rows3p[dx] = S3p[-1] * a0 + S3p[0] * a1 + S3p[1] * a2 + S3p[2] * a3; alphap += 4; } } else { // hresize four rows const float* S0 = src.row(sy - 1); const float* S1 = src.row(sy); const float* S2 = src.row(sy + 1); const float* S3 = src.row(sy + 2); const float* alphap = alpha; float* rows0p = rows0; float* rows1p = rows1; float* rows2p = rows2; float* rows3p = rows3; for (int dx = 0; dx < w; dx++) { int sx = xofs[dx]; const float* S0p = S0 + sx; const float* S1p = S1 + sx; const float* S2p = S2 + sx; const float* S3p = S3 + sx; float a0 = alphap[0]; float a1 = alphap[1]; float a2 = alphap[2]; float a3 = alphap[3]; rows0p[dx] = S0p[-1] * a0 + S0p[0] * a1 + S0p[1] * a2 + S0p[2] * a3; rows1p[dx] = S1p[-1] * a0 + S1p[0] * a1 + S1p[1] * a2 + S1p[2] * a3; rows2p[dx] = S2p[-1] * a0 + S2p[0] * a1 + S2p[1] * a2 + S2p[2] * a3; rows3p[dx] = S3p[-1] * a0 + S3p[0] * a1 + S3p[1] * a2 + S3p[2] * a3; alphap += 4; } } prev_sy1 = sy; // vresize float b0 = beta[0]; float b1 = beta[1]; float b2 = beta[2]; float b3 = beta[3]; float* rows0p = rows0; float* rows1p = rows1; float* rows2p = rows2; float* rows3p = rows3; float* Dp = dst.row(dy); int dx = 0; #if __SSE2__ #if __AVX__ __m256 _b0_256 = _mm256_set1_ps(b0); __m256 _b1_256 = _mm256_set1_ps(b1); __m256 _b2_256 = _mm256_set1_ps(b2); __m256 _b3_256 = _mm256_set1_ps(b3); for (; dx + 7 < w; dx += 8) { __m256 _rows0 = _mm256_loadu_ps(rows0p); __m256 _rows1 = _mm256_loadu_ps(rows1p); __m256 _rows2 = _mm256_loadu_ps(rows2p); __m256 _rows3 = _mm256_loadu_ps(rows3p); __m256 _D = _mm256_mul_ps(_rows0, _b0_256); _D = _mm256_comp_fmadd_ps(_rows1, _b1_256, _D); _D = _mm256_comp_fmadd_ps(_rows2, _b2_256, _D); _D = _mm256_comp_fmadd_ps(_rows3, _b3_256, _D); _mm256_storeu_ps(Dp, _D); Dp += 8; rows0p += 8; rows1p += 8; rows2p += 8; rows3p += 8; } #endif // __AVX__ __m128 _b0_128 = _mm_set1_ps(b0); __m128 _b1_128 = _mm_set1_ps(b1); __m128 _b2_128 = _mm_set1_ps(b2); __m128 _b3_128 = _mm_set1_ps(b3); for (; dx + 3 < w; dx += 4) { __m128 _rows0 = _mm_loadu_ps(rows0p); __m128 _rows1 = _mm_loadu_ps(rows1p); __m128 _rows2 = _mm_loadu_ps(rows2p); __m128 _rows3 = _mm_loadu_ps(rows3p); __m128 _D = _mm_mul_ps(_rows0, _b0_128); _D = _mm_comp_fmadd_ps(_rows1, _b1_128, _D); _D = _mm_comp_fmadd_ps(_rows2, _b2_128, _D); _D = _mm_comp_fmadd_ps(_rows3, _b3_128, _D); _mm_storeu_ps(Dp, _D); Dp += 4; rows0p += 4; rows1p += 4; rows2p += 4; rows3p += 4; } #endif // __SSE2__ for (; dx < w; dx++) { *Dp++ = *rows0p++ * b0 + *rows1p++ * b1 + *rows2p++ * b2 + *rows3p++ * b3; } beta += 4; } }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0xfa0, %rsp # imm = 0xFA0 movq %rdi, 0x738(%rsp) movq %rsi, 0x730(%rsp) movq %rdx, 0x728(%rsp) movq %rcx, 0x720(%rsp) movq %r8, 0x718(%rsp) movq %r9, 0x710(%rsp) movq 0x730(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x70c(%rsp) movq 0x730(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x708(%rsp) movl 0x70c(%rsp), %eax leaq 0x6c0(%rsp), %rcx movq %rcx, 0xee8(%rsp) movl %eax, 0xee4(%rsp) movq $0x4, 0xed8(%rsp) movq $0x0, 0xed0(%rsp) movq 0xee8(%rsp), %rdi movq %rdi, 0x200(%rsp) movq $0x0, (%rdi) movq $0x0, 0x8(%rdi) movq $0x0, 0x10(%rdi) movl $0x0, 0x18(%rdi) movq $0x0, 0x20(%rdi) movl $0x0, 0x28(%rdi) movl $0x0, 0x2c(%rdi) movl $0x0, 0x30(%rdi) movl $0x0, 0x34(%rdi) movl $0x0, 0x38(%rdi) movq $0x0, 0x40(%rdi) movl 0xee4(%rsp), %esi movq 0xed8(%rsp), %rdx movq 0xed0(%rsp), %rcx callq 0x65040 movl 0x70c(%rsp), %eax leaq 0x678(%rsp), %rcx movq %rcx, 0xf08(%rsp) movl %eax, 0xf04(%rsp) movq $0x4, 0xef8(%rsp) movq $0x0, 0xef0(%rsp) movq 0xf08(%rsp), %rdi movq %rdi, 0x208(%rsp) movq $0x0, (%rdi) movq $0x0, 0x8(%rdi) movq $0x0, 0x10(%rdi) movl $0x0, 0x18(%rdi) movq $0x0, 0x20(%rdi) movl $0x0, 0x28(%rdi) movl $0x0, 0x2c(%rdi) movl $0x0, 0x30(%rdi) movl $0x0, 0x34(%rdi) movl $0x0, 0x38(%rdi) movq $0x0, 0x40(%rdi) movl 0xf04(%rsp), %esi movq 0xef8(%rsp), %rdx movq 0xef0(%rsp), %rcx callq 0x65040 jmp 0x18538eb jmp 0x18538ed movl 0x70c(%rsp), %eax leaq 0x620(%rsp), %rcx movq %rcx, 0xf28(%rsp) movl %eax, 0xf24(%rsp) movq $0x4, 0xf18(%rsp) movq $0x0, 0xf10(%rsp) movq 0xf28(%rsp), %rdi movq %rdi, 0x1f8(%rsp) movq $0x0, (%rdi) movq $0x0, 0x8(%rdi) movq $0x0, 0x10(%rdi) movl $0x0, 0x18(%rdi) movq $0x0, 0x20(%rdi) movl $0x0, 0x28(%rdi) movl $0x0, 0x2c(%rdi) movl $0x0, 0x30(%rdi) movl $0x0, 0x34(%rdi) movl $0x0, 0x38(%rdi) movq $0x0, 0x40(%rdi) movl 0xf24(%rsp), %esi movq 0xf18(%rsp), %rdx movq 0xf10(%rsp), %rcx callq 0x65040 jmp 0x18539a2 jmp 0x18539a4 movl 0x70c(%rsp), %eax leaq 0x5d8(%rsp), %rcx movq %rcx, 0xf48(%rsp) movl %eax, 0xf44(%rsp) movq $0x4, 0xf38(%rsp) movq $0x0, 0xf30(%rsp) movq 0xf48(%rsp), %rdi movq %rdi, 0x1f0(%rsp) movq $0x0, (%rdi) movq $0x0, 0x8(%rdi) movq $0x0, 0x10(%rdi) movl $0x0, 0x18(%rdi) movq $0x0, 0x20(%rdi) movl $0x0, 0x28(%rdi) movl $0x0, 0x2c(%rdi) movl $0x0, 0x30(%rdi) movl $0x0, 0x34(%rdi) movl $0x0, 0x38(%rdi) movq $0x0, 0x40(%rdi) movl 0xf44(%rsp), %esi movq 0xf38(%rsp), %rdx movq 0xf30(%rsp), %rcx callq 0x65040 jmp 0x1853a59 jmp 0x1853a5b leaq 0x6c0(%rsp), %rax movq %rax, 0xec8(%rsp) movq 0xec8(%rsp), %rax movq (%rax), %rax movq %rax, 0x5d0(%rsp) leaq 0x678(%rsp), %rax movq %rax, 0xec0(%rsp) movq 0xec0(%rsp), %rax movq (%rax), %rax movq %rax, 0x5c8(%rsp) leaq 0x620(%rsp), %rax movq %rax, 0xeb8(%rsp) movq 0xeb8(%rsp), %rax movq (%rax), %rax movq %rax, 0x5c0(%rsp) leaq 0x5d8(%rsp), %rax movq %rax, 0xeb0(%rsp) movq 0xeb0(%rsp), %rax movq (%rax), %rax movq %rax, 0x5b8(%rsp) movl $0xfffffffd, 0x5b4(%rsp) # imm = 0xFFFFFFFD movl $0x0, 0x5b0(%rsp) movl 0x5b0(%rsp), %eax cmpl 0x708(%rsp), %eax jge 0x18557dc movq 0x710(%rsp), %rax movslq 0x5b0(%rsp), %rcx movl (%rax,%rcx,4), %eax movl %eax, 0x5ac(%rsp) movl 0x5ac(%rsp), %eax cmpl 0x5b4(%rsp), %eax jne 0x1853b8b jmp 0x18549ad movq %rax, %rcx movl %edx, %eax movq %rcx, 0x670(%rsp) movl %eax, 0x66c(%rsp) jmp 0x1855e66 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x670(%rsp) movl %eax, 0x66c(%rsp) jmp 0x1855d53 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x670(%rsp) movl %eax, 0x66c(%rsp) jmp 0x1855c40 movl 0x5ac(%rsp), %eax movl 0x5b4(%rsp), %ecx addl $0x1, %ecx cmpl %ecx, %eax jne 0x1853db6 movq 0x5d0(%rsp), %rax movq %rax, 0x5a0(%rsp) movq 0x5c8(%rsp), %rax movq %rax, 0x5d0(%rsp) movq 0x5c0(%rsp), %rax movq %rax, 0x5c8(%rsp) movq 0x5b8(%rsp), %rax movq %rax, 0x5c0(%rsp) movq 0x5a0(%rsp), %rax movq %rax, 0x5b8(%rsp) movq 0x738(%rsp), %rcx movl 0x5ac(%rsp), %eax addl $0x2, %eax movq %rcx, 0x818(%rsp) movl %eax, 0x814(%rsp) movq 0x818(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x814(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x598(%rsp) movq 0x728(%rsp), %rax movq %rax, 0x590(%rsp) movq 0x5b8(%rsp), %rax movq %rax, 0x588(%rsp) movl $0x0, 0x584(%rsp) movl 0x584(%rsp), %eax cmpl 0x70c(%rsp), %eax jge 0x1853db1 movq 0x720(%rsp), %rax movslq 0x584(%rsp), %rcx movl (%rax,%rcx,4), %eax movl %eax, 0x580(%rsp) movq 0x598(%rsp), %rax movslq 0x580(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x578(%rsp) movq 0x590(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x574(%rsp) movq 0x590(%rsp), %rax vmovss 0x4(%rax), %xmm0 vmovss %xmm0, 0x570(%rsp) movq 0x590(%rsp), %rax vmovss 0x8(%rax), %xmm0 vmovss %xmm0, 0x56c(%rsp) movq 0x590(%rsp), %rax vmovss 0xc(%rax), %xmm0 vmovss %xmm0, 0x568(%rsp) movq 0x578(%rsp), %rax vmovss -0x4(%rax), %xmm0 vmulss 0x574(%rsp), %xmm0, %xmm0 movq 0x578(%rsp), %rax vmovss (%rax), %xmm1 vmulss 0x570(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x578(%rsp), %rax vmovss 0x4(%rax), %xmm1 vmulss 0x56c(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x578(%rsp), %rax vmovss 0x8(%rax), %xmm1 vmulss 0x568(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x588(%rsp), %rax movslq 0x584(%rsp), %rcx vmovss %xmm0, (%rax,%rcx,4) movq 0x590(%rsp), %rax addq $0x10, %rax movq %rax, 0x590(%rsp) movl 0x584(%rsp), %eax addl $0x1, %eax movl %eax, 0x584(%rsp) jmp 0x1853c6b jmp 0x18549ab movl 0x5ac(%rsp), %eax movl 0x5b4(%rsp), %ecx addl $0x2, %ecx cmpl %ecx, %eax jne 0x18540e4 movq 0x5d0(%rsp), %rax movq %rax, 0x560(%rsp) movq 0x5c8(%rsp), %rax movq %rax, 0x558(%rsp) movq 0x5c0(%rsp), %rax movq %rax, 0x5d0(%rsp) movq 0x5b8(%rsp), %rax movq %rax, 0x5c8(%rsp) movq 0x560(%rsp), %rax movq %rax, 0x5c0(%rsp) movq 0x558(%rsp), %rax movq %rax, 0x5b8(%rsp) movq 0x738(%rsp), %rcx movl 0x5ac(%rsp), %eax addl $0x1, %eax movq %rcx, 0x808(%rsp) movl %eax, 0x804(%rsp) movq 0x808(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x804(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x550(%rsp) movq 0x738(%rsp), %rcx movl 0x5ac(%rsp), %eax addl $0x2, %eax movq %rcx, 0x7f8(%rsp) movl %eax, 0x7f4(%rsp) movq 0x7f8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x7f4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x548(%rsp) movq 0x728(%rsp), %rax movq %rax, 0x540(%rsp) movq 0x5c0(%rsp), %rax movq %rax, 0x538(%rsp) movq 0x5b8(%rsp), %rax movq %rax, 0x530(%rsp) movl $0x0, 0x52c(%rsp) movl 0x52c(%rsp), %eax cmpl 0x70c(%rsp), %eax jge 0x18540df movq 0x720(%rsp), %rax movslq 0x52c(%rsp), %rcx movl (%rax,%rcx,4), %eax movl %eax, 0x528(%rsp) movq 0x550(%rsp), %rax movslq 0x528(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x520(%rsp) movq 0x548(%rsp), %rax movslq 0x528(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x518(%rsp) movq 0x540(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x514(%rsp) movq 0x540(%rsp), %rax vmovss 0x4(%rax), %xmm0 vmovss %xmm0, 0x510(%rsp) movq 0x540(%rsp), %rax vmovss 0x8(%rax), %xmm0 vmovss %xmm0, 0x50c(%rsp) movq 0x540(%rsp), %rax vmovss 0xc(%rax), %xmm0 vmovss %xmm0, 0x508(%rsp) movq 0x520(%rsp), %rax vmovss -0x4(%rax), %xmm0 vmulss 0x514(%rsp), %xmm0, %xmm0 movq 0x520(%rsp), %rax vmovss (%rax), %xmm1 vmulss 0x510(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x520(%rsp), %rax vmovss 0x4(%rax), %xmm1 vmulss 0x50c(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x520(%rsp), %rax vmovss 0x8(%rax), %xmm1 vmulss 0x508(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x538(%rsp), %rax movslq 0x52c(%rsp), %rcx vmovss %xmm0, (%rax,%rcx,4) movq 0x518(%rsp), %rax vmovss -0x4(%rax), %xmm0 vmulss 0x514(%rsp), %xmm0, %xmm0 movq 0x518(%rsp), %rax vmovss (%rax), %xmm1 vmulss 0x510(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x518(%rsp), %rax vmovss 0x4(%rax), %xmm1 vmulss 0x50c(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x518(%rsp), %rax vmovss 0x8(%rax), %xmm1 vmulss 0x508(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x530(%rsp), %rax movslq 0x52c(%rsp), %rcx vmovss %xmm0, (%rax,%rcx,4) movq 0x540(%rsp), %rax addq $0x10, %rax movq %rax, 0x540(%rsp) movl 0x52c(%rsp), %eax addl $0x1, %eax movl %eax, 0x52c(%rsp) jmp 0x1853f02 jmp 0x18549a9 movl 0x5ac(%rsp), %eax movl 0x5b4(%rsp), %ecx addl $0x3, %ecx cmpl %ecx, %eax jne 0x1854512 movq 0x5d0(%rsp), %rax movq %rax, 0x500(%rsp) movq 0x5c8(%rsp), %rax movq %rax, 0x4f8(%rsp) movq 0x5c0(%rsp), %rax movq %rax, 0x4f0(%rsp) movq 0x5b8(%rsp), %rax movq %rax, 0x5d0(%rsp) movq 0x500(%rsp), %rax movq %rax, 0x5c8(%rsp) movq 0x4f8(%rsp), %rax movq %rax, 0x5c0(%rsp) movq 0x4f0(%rsp), %rax movq %rax, 0x5b8(%rsp) movq 0x738(%rsp), %rcx movl 0x5ac(%rsp), %eax movq %rcx, 0x7e8(%rsp) movl %eax, 0x7e4(%rsp) movq 0x7e8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x7e4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x4e8(%rsp) movq 0x738(%rsp), %rcx movl 0x5ac(%rsp), %eax addl $0x1, %eax movq %rcx, 0x7d8(%rsp) movl %eax, 0x7d4(%rsp) movq 0x7d8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x7d4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x4e0(%rsp) movq 0x738(%rsp), %rcx movl 0x5ac(%rsp), %eax addl $0x2, %eax movq %rcx, 0x7c8(%rsp) movl %eax, 0x7c4(%rsp) movq 0x7c8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x7c4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x4d8(%rsp) movq 0x728(%rsp), %rax movq %rax, 0x4d0(%rsp) movq 0x5c8(%rsp), %rax movq %rax, 0x4c8(%rsp) movq 0x5c0(%rsp), %rax movq %rax, 0x4c0(%rsp) movq 0x5b8(%rsp), %rax movq %rax, 0x4b8(%rsp) movl $0x0, 0x4b4(%rsp) movl 0x4b4(%rsp), %eax cmpl 0x70c(%rsp), %eax jge 0x185450d movq 0x720(%rsp), %rax movslq 0x4b4(%rsp), %rcx movl (%rax,%rcx,4), %eax movl %eax, 0x4b0(%rsp) movq 0x4e8(%rsp), %rax movslq 0x4b0(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x4a8(%rsp) movq 0x4e0(%rsp), %rax movslq 0x4b0(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x4a0(%rsp) movq 0x4d8(%rsp), %rax movslq 0x4b0(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x498(%rsp) movq 0x4d0(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x494(%rsp) movq 0x4d0(%rsp), %rax vmovss 0x4(%rax), %xmm0 vmovss %xmm0, 0x490(%rsp) movq 0x4d0(%rsp), %rax vmovss 0x8(%rax), %xmm0 vmovss %xmm0, 0x48c(%rsp) movq 0x4d0(%rsp), %rax vmovss 0xc(%rax), %xmm0 vmovss %xmm0, 0x488(%rsp) movq 0x4a8(%rsp), %rax vmovss -0x4(%rax), %xmm0 vmulss 0x494(%rsp), %xmm0, %xmm0 movq 0x4a8(%rsp), %rax vmovss (%rax), %xmm1 vmulss 0x490(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x4a8(%rsp), %rax vmovss 0x4(%rax), %xmm1 vmulss 0x48c(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x4a8(%rsp), %rax vmovss 0x8(%rax), %xmm1 vmulss 0x488(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x4c8(%rsp), %rax movslq 0x4b4(%rsp), %rcx vmovss %xmm0, (%rax,%rcx,4) movq 0x4a0(%rsp), %rax vmovss -0x4(%rax), %xmm0 vmulss 0x494(%rsp), %xmm0, %xmm0 movq 0x4a0(%rsp), %rax vmovss (%rax), %xmm1 vmulss 0x490(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x4a0(%rsp), %rax vmovss 0x4(%rax), %xmm1 vmulss 0x48c(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x4a0(%rsp), %rax vmovss 0x8(%rax), %xmm1 vmulss 0x488(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x4c0(%rsp), %rax movslq 0x4b4(%rsp), %rcx vmovss %xmm0, (%rax,%rcx,4) movq 0x498(%rsp), %rax vmovss -0x4(%rax), %xmm0 vmulss 0x494(%rsp), %xmm0, %xmm0 movq 0x498(%rsp), %rax vmovss (%rax), %xmm1 vmulss 0x490(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x498(%rsp), %rax vmovss 0x4(%rax), %xmm1 vmulss 0x48c(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x498(%rsp), %rax vmovss 0x8(%rax), %xmm1 vmulss 0x488(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x4b8(%rsp), %rax movslq 0x4b4(%rsp), %rcx vmovss %xmm0, (%rax,%rcx,4) movq 0x4d0(%rsp), %rax addq $0x10, %rax movq %rax, 0x4d0(%rsp) movl 0x4b4(%rsp), %eax addl $0x1, %eax movl %eax, 0x4b4(%rsp) jmp 0x1854299 jmp 0x18549a7 movq 0x738(%rsp), %rcx movl 0x5ac(%rsp), %eax subl $0x1, %eax movq %rcx, 0x7b8(%rsp) movl %eax, 0x7b4(%rsp) movq 0x7b8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x7b4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x480(%rsp) movq 0x738(%rsp), %rcx movl 0x5ac(%rsp), %eax movq %rcx, 0x7a8(%rsp) movl %eax, 0x7a4(%rsp) movq 0x7a8(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x7a4(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x478(%rsp) movq 0x738(%rsp), %rcx movl 0x5ac(%rsp), %eax addl $0x1, %eax movq %rcx, 0x798(%rsp) movl %eax, 0x794(%rsp) movq 0x798(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x794(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x470(%rsp) movq 0x738(%rsp), %rcx movl 0x5ac(%rsp), %eax addl $0x2, %eax movq %rcx, 0x788(%rsp) movl %eax, 0x784(%rsp) movq 0x788(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x784(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x468(%rsp) movq 0x728(%rsp), %rax movq %rax, 0x460(%rsp) movq 0x5d0(%rsp), %rax movq %rax, 0x458(%rsp) movq 0x5c8(%rsp), %rax movq %rax, 0x450(%rsp) movq 0x5c0(%rsp), %rax movq %rax, 0x448(%rsp) movq 0x5b8(%rsp), %rax movq %rax, 0x440(%rsp) movl $0x0, 0x43c(%rsp) movl 0x43c(%rsp), %eax cmpl 0x70c(%rsp), %eax jge 0x18549a5 movq 0x720(%rsp), %rax movslq 0x43c(%rsp), %rcx movl (%rax,%rcx,4), %eax movl %eax, 0x438(%rsp) movq 0x480(%rsp), %rax movslq 0x438(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x430(%rsp) movq 0x478(%rsp), %rax movslq 0x438(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x428(%rsp) movq 0x470(%rsp), %rax movslq 0x438(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x420(%rsp) movq 0x468(%rsp), %rax movslq 0x438(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x418(%rsp) movq 0x460(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x414(%rsp) movq 0x460(%rsp), %rax vmovss 0x4(%rax), %xmm0 vmovss %xmm0, 0x410(%rsp) movq 0x460(%rsp), %rax vmovss 0x8(%rax), %xmm0 vmovss %xmm0, 0x40c(%rsp) movq 0x460(%rsp), %rax vmovss 0xc(%rax), %xmm0 vmovss %xmm0, 0x408(%rsp) movq 0x430(%rsp), %rax vmovss -0x4(%rax), %xmm0 vmulss 0x414(%rsp), %xmm0, %xmm0 movq 0x430(%rsp), %rax vmovss (%rax), %xmm1 vmulss 0x410(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x430(%rsp), %rax vmovss 0x4(%rax), %xmm1 vmulss 0x40c(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x430(%rsp), %rax vmovss 0x8(%rax), %xmm1 vmulss 0x408(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x458(%rsp), %rax movslq 0x43c(%rsp), %rcx vmovss %xmm0, (%rax,%rcx,4) movq 0x428(%rsp), %rax vmovss -0x4(%rax), %xmm0 vmulss 0x414(%rsp), %xmm0, %xmm0 movq 0x428(%rsp), %rax vmovss (%rax), %xmm1 vmulss 0x410(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x428(%rsp), %rax vmovss 0x4(%rax), %xmm1 vmulss 0x40c(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x428(%rsp), %rax vmovss 0x8(%rax), %xmm1 vmulss 0x408(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x450(%rsp), %rax movslq 0x43c(%rsp), %rcx vmovss %xmm0, (%rax,%rcx,4) movq 0x420(%rsp), %rax vmovss -0x4(%rax), %xmm0 vmulss 0x414(%rsp), %xmm0, %xmm0 movq 0x420(%rsp), %rax vmovss (%rax), %xmm1 vmulss 0x410(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x420(%rsp), %rax vmovss 0x4(%rax), %xmm1 vmulss 0x40c(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x420(%rsp), %rax vmovss 0x8(%rax), %xmm1 vmulss 0x408(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x448(%rsp), %rax movslq 0x43c(%rsp), %rcx vmovss %xmm0, (%rax,%rcx,4) movq 0x418(%rsp), %rax vmovss -0x4(%rax), %xmm0 vmulss 0x414(%rsp), %xmm0, %xmm0 movq 0x418(%rsp), %rax vmovss (%rax), %xmm1 vmulss 0x410(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x418(%rsp), %rax vmovss 0x4(%rax), %xmm1 vmulss 0x40c(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x418(%rsp), %rax vmovss 0x8(%rax), %xmm1 vmulss 0x408(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x440(%rsp), %rax movslq 0x43c(%rsp), %rcx vmovss %xmm0, (%rax,%rcx,4) movq 0x460(%rsp), %rax addq $0x10, %rax movq %rax, 0x460(%rsp) movl 0x43c(%rsp), %eax addl $0x1, %eax movl %eax, 0x43c(%rsp) jmp 0x185469a jmp 0x18549a7 jmp 0x18549a9 jmp 0x18549ab jmp 0x18549ad movl 0x5ac(%rsp), %eax movl %eax, 0x5b4(%rsp) movq 0x718(%rsp), %rax vmovss (%rax), %xmm0 vmovss %xmm0, 0x404(%rsp) movq 0x718(%rsp), %rax vmovss 0x4(%rax), %xmm0 vmovss %xmm0, 0x400(%rsp) movq 0x718(%rsp), %rax vmovss 0x8(%rax), %xmm0 vmovss %xmm0, 0x3fc(%rsp) movq 0x718(%rsp), %rax vmovss 0xc(%rax), %xmm0 vmovss %xmm0, 0x3f8(%rsp) movq 0x5d0(%rsp), %rax movq %rax, 0x3f0(%rsp) movq 0x5c8(%rsp), %rax movq %rax, 0x3e8(%rsp) movq 0x5c0(%rsp), %rax movq %rax, 0x3e0(%rsp) movq 0x5b8(%rsp), %rax movq %rax, 0x3d8(%rsp) movq 0x730(%rsp), %rcx movl 0x5b0(%rsp), %eax movq %rcx, 0x828(%rsp) movl %eax, 0x824(%rsp) movq 0x828(%rsp), %rdx movslq 0x2c(%rdx), %rcx movslq 0x824(%rsp), %rax imulq %rax, %rcx movq (%rdx), %rax movq 0x10(%rdx), %rdx imulq %rdx, %rcx addq %rcx, %rax movq %rax, 0x3d0(%rsp) movl $0x0, 0x3cc(%rsp) vmovss 0x404(%rsp), %xmm0 vmovss %xmm0, 0x83c(%rsp) vmovss 0x83c(%rsp), %xmm0 vmovss %xmm0, 0x1bc(%rsp) vmovss %xmm0, 0xc3c(%rsp) vmovss %xmm0, 0xc38(%rsp) vmovss %xmm0, 0xc34(%rsp) vmovss %xmm0, 0xc30(%rsp) vmovss %xmm0, 0xc2c(%rsp) vmovss %xmm0, 0xc28(%rsp) vmovss %xmm0, 0xc24(%rsp) vmovss %xmm0, 0xc20(%rsp) vmovss 0xc34(%rsp), %xmm1 vmovss 0xc30(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0xc38(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0xc3c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0xc24(%rsp), %xmm2 vmovss 0xc20(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0xc28(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0xc2c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0xc00(%rsp) vmovaps 0xc00(%rsp), %ymm0 vmovaps %ymm0, 0x1c0(%rsp) vmovaps 0x1c0(%rsp), %ymm0 vmovaps %ymm0, 0x3a0(%rsp) vmovss 0x400(%rsp), %xmm0 vmovss %xmm0, 0x838(%rsp) vmovss 0x838(%rsp), %xmm0 vmovss %xmm0, 0x17c(%rsp) vmovss %xmm0, 0xc7c(%rsp) vmovss %xmm0, 0xc78(%rsp) vmovss %xmm0, 0xc74(%rsp) vmovss %xmm0, 0xc70(%rsp) vmovss %xmm0, 0xc6c(%rsp) vmovss %xmm0, 0xc68(%rsp) vmovss %xmm0, 0xc64(%rsp) vmovss %xmm0, 0xc60(%rsp) vmovss 0xc74(%rsp), %xmm1 vmovss 0xc70(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0xc78(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0xc7c(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0xc64(%rsp), %xmm2 vmovss 0xc60(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0xc68(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0xc6c(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0xc40(%rsp) vmovaps 0xc40(%rsp), %ymm0 vmovaps %ymm0, 0x180(%rsp) vmovaps 0x180(%rsp), %ymm0 vmovaps %ymm0, 0x380(%rsp) vmovss 0x3fc(%rsp), %xmm0 vmovss %xmm0, 0x834(%rsp) vmovss 0x834(%rsp), %xmm0 vmovss %xmm0, 0x13c(%rsp) vmovss %xmm0, 0xcbc(%rsp) vmovss %xmm0, 0xcb8(%rsp) vmovss %xmm0, 0xcb4(%rsp) vmovss %xmm0, 0xcb0(%rsp) vmovss %xmm0, 0xcac(%rsp) vmovss %xmm0, 0xca8(%rsp) vmovss %xmm0, 0xca4(%rsp) vmovss %xmm0, 0xca0(%rsp) vmovss 0xcb4(%rsp), %xmm1 vmovss 0xcb0(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0xcb8(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0xcbc(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0xca4(%rsp), %xmm2 vmovss 0xca0(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0xca8(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0xcac(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0xc80(%rsp) vmovaps 0xc80(%rsp), %ymm0 vmovaps %ymm0, 0x140(%rsp) vmovaps 0x140(%rsp), %ymm0 vmovaps %ymm0, 0x360(%rsp) vmovss 0x3f8(%rsp), %xmm0 vmovss %xmm0, 0x830(%rsp) vmovss 0x830(%rsp), %xmm0 vmovss %xmm0, 0xfc(%rsp) vmovss %xmm0, 0xcfc(%rsp) vmovss %xmm0, 0xcf8(%rsp) vmovss %xmm0, 0xcf4(%rsp) vmovss %xmm0, 0xcf0(%rsp) vmovss %xmm0, 0xcec(%rsp) vmovss %xmm0, 0xce8(%rsp) vmovss %xmm0, 0xce4(%rsp) vmovss %xmm0, 0xce0(%rsp) vmovss 0xcf4(%rsp), %xmm1 vmovss 0xcf0(%rsp), %xmm0 vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3] vmovss 0xcf8(%rsp), %xmm1 vinsertps $0x20, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],xmm0[3] vmovss 0xcfc(%rsp), %xmm1 vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0] vmovss 0xce4(%rsp), %xmm2 vmovss 0xce0(%rsp), %xmm0 vinsertps $0x10, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm2[0],xmm0[2,3] vmovss 0xce8(%rsp), %xmm2 vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3] vmovss 0xcec(%rsp), %xmm2 vinsertps $0x30, %xmm2, %xmm0, %xmm2 # xmm2 = xmm0[0,1,2],xmm2[0] vmovaps %xmm2, %xmm0 vinsertf128 $0x1, %xmm1, %ymm0, %ymm0 vmovaps %ymm0, 0xcc0(%rsp) vmovaps 0xcc0(%rsp), %ymm0 vmovaps %ymm0, 0x100(%rsp) vmovaps 0x100(%rsp), %ymm0 vmovaps %ymm0, 0x340(%rsp) movl 0x3cc(%rsp), %eax addl $0x7, %eax cmpl 0x70c(%rsp), %eax jge 0x1855275 movq 0x3f0(%rsp), %rax movq %rax, 0xf68(%rsp) movq 0xf68(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x320(%rsp) movq 0x3e8(%rsp), %rax movq %rax, 0xf60(%rsp) movq 0xf60(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x300(%rsp) movq 0x3e0(%rsp), %rax movq %rax, 0xf58(%rsp) movq 0xf58(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x2e0(%rsp) movq 0x3d8(%rsp), %rax movq %rax, 0xf50(%rsp) movq 0xf50(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x2c0(%rsp) vmovaps 0x320(%rsp), %ymm1 vmovaps 0x3a0(%rsp), %ymm0 vmovaps %ymm1, 0x860(%rsp) vmovaps %ymm0, 0x840(%rsp) vmovaps 0x860(%rsp), %ymm0 vmulps 0x840(%rsp), %ymm0, %ymm0 vmovaps %ymm0, 0x2a0(%rsp) vmovaps 0x300(%rsp), %ymm2 vmovaps 0x380(%rsp), %ymm1 vmovaps 0x2a0(%rsp), %ymm0 vmovaps %ymm2, 0x980(%rsp) vmovaps %ymm1, 0x960(%rsp) vmovaps %ymm0, 0x940(%rsp) vmovaps 0x980(%rsp), %ymm2 vmovaps 0x960(%rsp), %ymm1 vmovaps 0x940(%rsp), %ymm0 vmovaps %ymm2, 0xd40(%rsp) vmovaps %ymm1, 0xd20(%rsp) vmovaps %ymm0, 0xd00(%rsp) vmovaps 0xd40(%rsp), %ymm1 vmovaps 0xd20(%rsp), %ymm0 vmovaps 0xd00(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0xc0(%rsp) vmovaps 0xc0(%rsp), %ymm0 vmovaps %ymm0, 0x2a0(%rsp) vmovaps 0x2e0(%rsp), %ymm2 vmovaps 0x360(%rsp), %ymm1 vmovaps 0x2a0(%rsp), %ymm0 vmovaps %ymm2, 0x920(%rsp) vmovaps %ymm1, 0x900(%rsp) vmovaps %ymm0, 0x8e0(%rsp) vmovaps 0x920(%rsp), %ymm2 vmovaps 0x900(%rsp), %ymm1 vmovaps 0x8e0(%rsp), %ymm0 vmovaps %ymm2, 0xda0(%rsp) vmovaps %ymm1, 0xd80(%rsp) vmovaps %ymm0, 0xd60(%rsp) vmovaps 0xda0(%rsp), %ymm1 vmovaps 0xd80(%rsp), %ymm0 vmovaps 0xd60(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0xa0(%rsp) vmovaps 0xa0(%rsp), %ymm0 vmovaps %ymm0, 0x2a0(%rsp) vmovaps 0x2c0(%rsp), %ymm2 vmovaps 0x340(%rsp), %ymm1 vmovaps 0x2a0(%rsp), %ymm0 vmovaps %ymm2, 0x8c0(%rsp) vmovaps %ymm1, 0x8a0(%rsp) vmovaps %ymm0, 0x880(%rsp) vmovaps 0x8c0(%rsp), %ymm2 vmovaps 0x8a0(%rsp), %ymm1 vmovaps 0x880(%rsp), %ymm0 vmovaps %ymm2, 0xe00(%rsp) vmovaps %ymm1, 0xde0(%rsp) vmovaps %ymm0, 0xdc0(%rsp) vmovaps 0xe00(%rsp), %ymm1 vmovaps 0xde0(%rsp), %ymm0 vmovaps 0xdc0(%rsp), %ymm2 vfmadd213ps %ymm2, %ymm1, %ymm0 # ymm0 = (ymm1 * ymm0) + ymm2 vmovaps %ymm0, 0x80(%rsp) vmovaps 0x80(%rsp), %ymm0 vmovaps %ymm0, 0x2a0(%rsp) movq 0x3d0(%rsp), %rax vmovaps 0x2a0(%rsp), %ymm0 movq %rax, 0xb18(%rsp) vmovaps %ymm0, 0xae0(%rsp) vmovaps 0xae0(%rsp), %ymm0 movq 0xb18(%rsp), %rax vmovups %ymm0, (%rax) movq 0x3d0(%rsp), %rax addq $0x20, %rax movq %rax, 0x3d0(%rsp) movq 0x3f0(%rsp), %rax addq $0x20, %rax movq %rax, 0x3f0(%rsp) movq 0x3e8(%rsp), %rax addq $0x20, %rax movq %rax, 0x3e8(%rsp) movq 0x3e0(%rsp), %rax addq $0x20, %rax movq %rax, 0x3e0(%rsp) movq 0x3d8(%rsp), %rax addq $0x20, %rax movq %rax, 0x3d8(%rsp) movl 0x3cc(%rsp), %eax addl $0x8, %eax movl %eax, 0x3cc(%rsp) jmp 0x1854ee5 vmovss 0x404(%rsp), %xmm0 vmovss %xmm0, 0xa2c(%rsp) vbroadcastss 0xa2c(%rsp), %xmm0 vmovaps %xmm0, 0xa10(%rsp) vmovaps 0xa10(%rsp), %xmm0 vmovaps %xmm0, 0x290(%rsp) vmovss 0x400(%rsp), %xmm0 vmovss %xmm0, 0xa0c(%rsp) vbroadcastss 0xa0c(%rsp), %xmm0 vmovaps %xmm0, 0x9f0(%rsp) vmovaps 0x9f0(%rsp), %xmm0 vmovaps %xmm0, 0x280(%rsp) vmovss 0x3fc(%rsp), %xmm0 vmovss %xmm0, 0x9ec(%rsp) vbroadcastss 0x9ec(%rsp), %xmm0 vmovaps %xmm0, 0x9d0(%rsp) vmovaps 0x9d0(%rsp), %xmm0 vmovaps %xmm0, 0x270(%rsp) vmovss 0x3f8(%rsp), %xmm0 vmovss %xmm0, 0x9cc(%rsp) vbroadcastss 0x9cc(%rsp), %xmm0 vmovaps %xmm0, 0x9b0(%rsp) vmovaps 0x9b0(%rsp), %xmm0 vmovaps %xmm0, 0x260(%rsp) movl 0x3cc(%rsp), %eax addl $0x3, %eax cmpl 0x70c(%rsp), %eax jge 0x18556cf movq 0x3f0(%rsp), %rax movq %rax, 0xf88(%rsp) movq 0xf88(%rsp), %rax vmovups (%rax), %xmm0 vmovaps %xmm0, 0x250(%rsp) movq 0x3e8(%rsp), %rax movq %rax, 0xf80(%rsp) movq 0xf80(%rsp), %rax vmovups (%rax), %xmm0 vmovaps %xmm0, 0x240(%rsp) movq 0x3e0(%rsp), %rax movq %rax, 0xf78(%rsp) movq 0xf78(%rsp), %rax vmovups (%rax), %xmm0 vmovaps %xmm0, 0x230(%rsp) movq 0x3d8(%rsp), %rax movq %rax, 0xf70(%rsp) movq 0xf70(%rsp), %rax vmovups (%rax), %xmm0 vmovaps %xmm0, 0x220(%rsp) vmovaps 0x250(%rsp), %xmm1 vmovaps 0x290(%rsp), %xmm0 vmovaps %xmm1, 0xa40(%rsp) vmovaps %xmm0, 0xa30(%rsp) vmovaps 0xa40(%rsp), %xmm0 vmulps 0xa30(%rsp), %xmm0, %xmm0 vmovaps %xmm0, 0x210(%rsp) vmovaps 0x240(%rsp), %xmm2 vmovaps 0x280(%rsp), %xmm1 vmovaps 0x210(%rsp), %xmm0 vmovaps %xmm2, 0xad0(%rsp) vmovaps %xmm1, 0xac0(%rsp) vmovaps %xmm0, 0xab0(%rsp) vmovaps 0xad0(%rsp), %xmm2 vmovaps 0xac0(%rsp), %xmm1 vmovaps 0xab0(%rsp), %xmm0 vmovaps %xmm2, 0xe40(%rsp) vmovaps %xmm1, 0xe30(%rsp) vmovaps %xmm0, 0xe20(%rsp) vmovaps 0xe40(%rsp), %xmm1 vmovaps 0xe30(%rsp), %xmm0 vmovaps 0xe20(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x70(%rsp) vmovaps 0x70(%rsp), %xmm0 vmovaps %xmm0, 0x210(%rsp) vmovaps 0x230(%rsp), %xmm2 vmovaps 0x270(%rsp), %xmm1 vmovaps 0x210(%rsp), %xmm0 vmovaps %xmm2, 0xaa0(%rsp) vmovaps %xmm1, 0xa90(%rsp) vmovaps %xmm0, 0xa80(%rsp) vmovaps 0xaa0(%rsp), %xmm2 vmovaps 0xa90(%rsp), %xmm1 vmovaps 0xa80(%rsp), %xmm0 vmovaps %xmm2, 0xe70(%rsp) vmovaps %xmm1, 0xe60(%rsp) vmovaps %xmm0, 0xe50(%rsp) vmovaps 0xe70(%rsp), %xmm1 vmovaps 0xe60(%rsp), %xmm0 vmovaps 0xe50(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x60(%rsp) vmovaps 0x60(%rsp), %xmm0 vmovaps %xmm0, 0x210(%rsp) vmovaps 0x220(%rsp), %xmm2 vmovaps 0x260(%rsp), %xmm1 vmovaps 0x210(%rsp), %xmm0 vmovaps %xmm2, 0xa70(%rsp) vmovaps %xmm1, 0xa60(%rsp) vmovaps %xmm0, 0xa50(%rsp) vmovaps 0xa70(%rsp), %xmm2 vmovaps 0xa60(%rsp), %xmm1 vmovaps 0xa50(%rsp), %xmm0 vmovaps %xmm2, 0xea0(%rsp) vmovaps %xmm1, 0xe90(%rsp) vmovaps %xmm0, 0xe80(%rsp) vmovaps 0xea0(%rsp), %xmm1 vmovaps 0xe90(%rsp), %xmm0 vmovaps 0xe80(%rsp), %xmm2 vfmadd213ps %xmm2, %xmm1, %xmm0 # xmm0 = (xmm1 * xmm0) + xmm2 vmovaps %xmm0, 0x50(%rsp) vmovaps 0x50(%rsp), %xmm0 vmovaps %xmm0, 0x210(%rsp) movq 0x3d0(%rsp), %rax vmovaps 0x210(%rsp), %xmm0 movq %rax, 0xbf8(%rsp) vmovaps %xmm0, 0xbe0(%rsp) vmovaps 0xbe0(%rsp), %xmm0 movq 0xbf8(%rsp), %rax vmovups %xmm0, (%rax) movq 0x3d0(%rsp), %rax addq $0x10, %rax movq %rax, 0x3d0(%rsp) movq 0x3f0(%rsp), %rax addq $0x10, %rax movq %rax, 0x3f0(%rsp) movq 0x3e8(%rsp), %rax addq $0x10, %rax movq %rax, 0x3e8(%rsp) movq 0x3e0(%rsp), %rax addq $0x10, %rax movq %rax, 0x3e0(%rsp) movq 0x3d8(%rsp), %rax addq $0x10, %rax movq %rax, 0x3d8(%rsp) movl 0x3cc(%rsp), %eax addl $0x4, %eax movl %eax, 0x3cc(%rsp) jmp 0x1855351 jmp 0x18556d1 movl 0x3cc(%rsp), %eax cmpl 0x70c(%rsp), %eax jge 0x18557b2 movq 0x3f0(%rsp), %rax movq %rax, %rcx addq $0x4, %rcx movq %rcx, 0x3f0(%rsp) vmovss (%rax), %xmm0 vmulss 0x404(%rsp), %xmm0, %xmm0 movq 0x3e8(%rsp), %rax movq %rax, %rcx addq $0x4, %rcx movq %rcx, 0x3e8(%rsp) vmovss (%rax), %xmm1 vmulss 0x400(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x3e0(%rsp), %rax movq %rax, %rcx addq $0x4, %rcx movq %rcx, 0x3e0(%rsp) vmovss (%rax), %xmm1 vmulss 0x3fc(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x3d8(%rsp), %rax movq %rax, %rcx addq $0x4, %rcx movq %rcx, 0x3d8(%rsp) vmovss (%rax), %xmm1 vmulss 0x3f8(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 movq 0x3d0(%rsp), %rax movq %rax, %rcx addq $0x4, %rcx movq %rcx, 0x3d0(%rsp) vmovss %xmm0, (%rax) movl 0x3cc(%rsp), %eax addl $0x1, %eax movl %eax, 0x3cc(%rsp) jmp 0x18556d1 movq 0x718(%rsp), %rax addq $0x10, %rax movq %rax, 0x718(%rsp) movl 0x5b0(%rsp), %eax addl $0x1, %eax movl %eax, 0x5b0(%rsp) jmp 0x1853afd leaq 0x5d8(%rsp), %rax movq %rax, 0x740(%rsp) movq 0x740(%rsp), %rax movq %rax, 0xb98(%rsp) movq 0xb98(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x185589b movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb94(%rsp) # imm = 0xFFFFFFFF movl 0xb94(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb90(%rsp) cmpl $0x1, 0xb90(%rsp) jne 0x185589b movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x185586c movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x185586a jmp 0x1855899 movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0xba0(%rsp) cmpq $0x0, 0xba0(%rsp) je 0x1855897 movq 0xba0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1855899 jmp 0x185589b movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18558f3 movq %rax, %rdi callq 0x5fc90 leaq 0x620(%rsp), %rax movq %rax, 0x750(%rsp) movq 0x750(%rsp), %rax movq %rax, 0xb78(%rsp) movq 0xb78(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x18559b2 movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb74(%rsp) # imm = 0xFFFFFFFF movl 0xb74(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb70(%rsp) cmpl $0x1, 0xb70(%rsp) jne 0x18559b2 movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1855983 movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1855981 jmp 0x18559b0 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0xbb0(%rsp) cmpq $0x0, 0xbb0(%rsp) je 0x18559ae movq 0xbb0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x18559b0 jmp 0x18559b2 movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1855a0a movq %rax, %rdi callq 0x5fc90 leaq 0x678(%rsp), %rax movq %rax, 0x760(%rsp) movq 0x760(%rsp), %rax movq %rax, 0xb58(%rsp) movq 0xb58(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x1855ac9 movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb54(%rsp) # imm = 0xFFFFFFFF movl 0xb54(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb50(%rsp) cmpl $0x1, 0xb50(%rsp) jne 0x1855ac9 movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1855a9a movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1855a98 jmp 0x1855ac7 movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0xbc0(%rsp) cmpq $0x0, 0xbc0(%rsp) je 0x1855ac5 movq 0xbc0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1855ac7 jmp 0x1855ac9 movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1855b21 movq %rax, %rdi callq 0x5fc90 leaq 0x6c0(%rsp), %rax movq %rax, 0x770(%rsp) movq 0x770(%rsp), %rax movq %rax, 0xb38(%rsp) movq 0xb38(%rsp), %rax movq %rax, 0x30(%rsp) cmpq $0x0, 0x8(%rax) je 0x1855be0 movq 0x30(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb34(%rsp) # imm = 0xFFFFFFFF movl 0xb34(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb30(%rsp) cmpl $0x1, 0xb30(%rsp) jne 0x1855be0 movq 0x30(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1855bb1 movq 0x30(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1855baf jmp 0x1855bde movq 0x30(%rsp), %rax movq (%rax), %rax movq %rax, 0xbd0(%rsp) cmpq $0x0, 0xbd0(%rsp) je 0x1855bdc movq 0xbd0(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1855bde jmp 0x1855be0 movq 0x30(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1855c38 movq %rax, %rdi callq 0x5fc90 movq %rbp, %rsp popq %rbp vzeroupper retq leaq 0x620(%rsp), %rax movq %rax, 0x758(%rsp) movq 0x758(%rsp), %rax movq %rax, 0xb68(%rsp) movq 0xb68(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x1855cf9 movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb64(%rsp) # imm = 0xFFFFFFFF movl 0xb64(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb60(%rsp) cmpl $0x1, 0xb60(%rsp) jne 0x1855cf9 movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1855ccd movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1855ccb jmp 0x1855cf7 movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0xbb8(%rsp) cmpq $0x0, 0xbb8(%rsp) je 0x1855cf5 movq 0xbb8(%rsp), %rdi callq 0x5e480 jmp 0x1855cf7 jmp 0x1855cf9 movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1855d51 movq %rax, %rdi callq 0x5fc90 jmp 0x1855d53 leaq 0x678(%rsp), %rax movq %rax, 0x768(%rsp) movq 0x768(%rsp), %rax movq %rax, 0xb48(%rsp) movq 0xb48(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x1855e0c movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb44(%rsp) # imm = 0xFFFFFFFF movl 0xb44(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb40(%rsp) cmpl $0x1, 0xb40(%rsp) jne 0x1855e0c movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1855de0 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1855dde jmp 0x1855e0a movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0xbc8(%rsp) cmpq $0x0, 0xbc8(%rsp) je 0x1855e08 movq 0xbc8(%rsp), %rdi callq 0x5e480 jmp 0x1855e0a jmp 0x1855e0c movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1855e64 movq %rax, %rdi callq 0x5fc90 jmp 0x1855e66 leaq 0x6c0(%rsp), %rax movq %rax, 0x778(%rsp) movq 0x778(%rsp), %rax movq %rax, 0xb28(%rsp) movq 0xb28(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x1855f1f movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xb24(%rsp) # imm = 0xFFFFFFFF movl 0xb24(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb20(%rsp) cmpl $0x1, 0xb20(%rsp) jne 0x1855f1f movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1855ef3 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1855ef1 jmp 0x1855f1d movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0xbd8(%rsp) cmpq $0x0, 0xbd8(%rsp) je 0x1855f1b movq 0xbd8(%rsp), %rdi callq 0x5e480 jmp 0x1855f1d jmp 0x1855f1f movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1855f77 movq %rax, %rdi callq 0x5fc90 jmp 0x1855f79 movq 0x670(%rsp), %rdi callq 0x5e3b0 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/interp_bicubic.h
ncnn::interpolate_cubic(float, float*)
static inline void interpolate_cubic(float fx, float* coeffs) { const float A = -0.75f; float fx0 = fx + 1; float fx1 = fx; float fx2 = 1 - fx; // float fx3 = 2 - fx; coeffs[0] = A * fx0 * fx0 * fx0 - 5 * A * fx0 * fx0 + 8 * A * fx0 - 4 * A; coeffs[1] = (A + 2) * fx1 * fx1 * fx1 - (A + 3) * fx1 * fx1 + 1; coeffs[2] = (A + 2) * fx2 * fx2 * fx2 - (A + 3) * fx2 * fx2 + 1; coeffs[3] = 1.f - coeffs[0] - coeffs[1] - coeffs[2]; }
vmovss %xmm0, -0x4(%rsp) movq %rdi, -0x10(%rsp) vmovss 0x5c0fcd(%rip), %xmm0 # 0x1e16fb0 vmovss %xmm0, -0x14(%rsp) vmovss 0x5aa2df(%rip), %xmm0 # 0x1e002d0 vaddss -0x4(%rsp), %xmm0, %xmm0 vmovss %xmm0, -0x18(%rsp) vmovss -0x4(%rsp), %xmm0 vmovss %xmm0, -0x1c(%rsp) vmovss 0x5aa2bf(%rip), %xmm0 # 0x1e002d0 vsubss -0x4(%rsp), %xmm0, %xmm0 vmovss %xmm0, -0x20(%rsp) vmovss 0x5c0f8b(%rip), %xmm0 # 0x1e16fb0 vmulss -0x18(%rsp), %xmm0, %xmm0 vmulss -0x18(%rsp), %xmm0, %xmm0 vmulss -0x18(%rsp), %xmm0, %xmm0 vmovss 0x5c0f6d(%rip), %xmm1 # 0x1e16fac vmulss -0x18(%rsp), %xmm1, %xmm1 vmulss -0x18(%rsp), %xmm1, %xmm1 vsubss %xmm1, %xmm0, %xmm0 vmovss 0x5c0f51(%rip), %xmm1 # 0x1e16fa8 vmulss -0x18(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 vmovss 0x5bddcb(%rip), %xmm1 # 0x1e13e34 vsubss %xmm1, %xmm0, %xmm0 movq -0x10(%rsp), %rax vmovss %xmm0, (%rax) vmovss 0x5c0f26(%rip), %xmm0 # 0x1e16fa4 vmulss -0x1c(%rsp), %xmm0, %xmm0 vmulss -0x1c(%rsp), %xmm0, %xmm0 vmulss -0x1c(%rsp), %xmm0, %xmm0 vmovss 0x5c0f08(%rip), %xmm1 # 0x1e16fa0 vmulss -0x1c(%rsp), %xmm1, %xmm1 vmulss -0x1c(%rsp), %xmm1, %xmm1 vsubss %xmm1, %xmm0, %xmm0 vmovss 0x5aa220(%rip), %xmm1 # 0x1e002d0 vaddss %xmm1, %xmm0, %xmm0 movq -0x10(%rsp), %rax vmovss %xmm0, 0x4(%rax) vmovss 0x5c0ede(%rip), %xmm0 # 0x1e16fa4 vmulss -0x20(%rsp), %xmm0, %xmm0 vmulss -0x20(%rsp), %xmm0, %xmm0 vmulss -0x20(%rsp), %xmm0, %xmm0 vmovss 0x5c0ec0(%rip), %xmm1 # 0x1e16fa0 vmulss -0x20(%rsp), %xmm1, %xmm1 vmulss -0x20(%rsp), %xmm1, %xmm1 vsubss %xmm1, %xmm0, %xmm0 vmovss 0x5aa1d8(%rip), %xmm1 # 0x1e002d0 vaddss %xmm1, %xmm0, %xmm0 movq -0x10(%rsp), %rax vmovss %xmm0, 0x8(%rax) movq -0x10(%rsp), %rax vmovss 0x5aa1bd(%rip), %xmm0 # 0x1e002d0 vsubss (%rax), %xmm0, %xmm0 movq -0x10(%rsp), %rax vsubss 0x4(%rax), %xmm0, %xmm0 movq -0x10(%rsp), %rax vsubss 0x8(%rax), %xmm0, %xmm0 movq -0x10(%rsp), %rax vmovss %xmm0, 0xc(%rax) retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/interp_bicubic.h
ncnn::linear_coeffs(int, int, int*, float*, int)
static void linear_coeffs(int w, int outw, int* xofs, float* alpha, int align_corner) { double scale = (double)w / outw; if (align_corner) { scale = (double)(w - 1) / (outw - 1); } for (int dx = 0; dx < outw; dx++) { float fx = (float)((dx + 0.5) * scale - 0.5); if (align_corner) { fx = (float)(dx * scale); } int sx = floor(fx); fx -= sx; if (sx < 0) { sx = 0; fx = 0.f; } if (sx >= w - 1) { sx = w - 2; fx = 1.f; } xofs[dx] = sx; alpha[dx * 2] = 1.f - fx; alpha[dx * 2 + 1] = fx; } }
subq $0x38, %rsp movl %edi, 0x34(%rsp) movl %esi, 0x30(%rsp) movq %rdx, 0x28(%rsp) movq %rcx, 0x20(%rsp) movl %r8d, 0x1c(%rsp) vcvtsi2sdl 0x34(%rsp), %xmm0, %xmm0 vcvtsi2sdl 0x30(%rsp), %xmm1, %xmm1 vdivsd %xmm1, %xmm0, %xmm0 vmovsd %xmm0, 0x10(%rsp) cmpl $0x0, 0x1c(%rsp) je 0x1861888 movl 0x34(%rsp), %eax subl $0x1, %eax vcvtsi2sd %eax, %xmm0, %xmm0 movl 0x30(%rsp), %eax subl $0x1, %eax vcvtsi2sd %eax, %xmm1, %xmm1 vdivsd %xmm1, %xmm0, %xmm0 vmovsd %xmm0, 0x10(%rsp) movl $0x0, 0xc(%rsp) movl 0xc(%rsp), %eax cmpl 0x30(%rsp), %eax jge 0x18619b1 vcvtsi2sdl 0xc(%rsp), %xmm0, %xmm0 vmovsd 0x59e764(%rip), %xmm1 # 0x1e00010 vaddsd %xmm1, %xmm0, %xmm0 vmulsd 0x10(%rsp), %xmm0, %xmm0 vmovsd 0x59e752(%rip), %xmm1 # 0x1e00010 vsubsd %xmm1, %xmm0, %xmm1 vcvtsd2ss %xmm1, %xmm0, %xmm0 vmovss %xmm0, 0x8(%rsp) cmpl $0x0, 0x1c(%rsp) je 0x18618e9 vcvtsi2sdl 0xc(%rsp), %xmm0, %xmm0 vmulsd 0x10(%rsp), %xmm0, %xmm1 vcvtsd2ss %xmm1, %xmm0, %xmm0 vmovss %xmm0, 0x8(%rsp) vmovss 0x8(%rsp), %xmm0 callq 0x84440 vcvttss2si %xmm0, %eax movl %eax, 0x4(%rsp) vcvtsi2ssl 0x4(%rsp), %xmm0, %xmm1 vmovss 0x8(%rsp), %xmm0 vsubss %xmm1, %xmm0, %xmm0 vmovss %xmm0, 0x8(%rsp) cmpl $0x0, 0x4(%rsp) jge 0x186192b movl $0x0, 0x4(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovss %xmm0, 0x8(%rsp) movl 0x4(%rsp), %eax movl 0x34(%rsp), %ecx subl $0x1, %ecx cmpl %ecx, %eax jl 0x1861953 movl 0x34(%rsp), %eax subl $0x2, %eax movl %eax, 0x4(%rsp) vmovss 0x59e983(%rip), %xmm0 # 0x1e002d0 vmovss %xmm0, 0x8(%rsp) movl 0x4(%rsp), %edx movq 0x28(%rsp), %rax movslq 0xc(%rsp), %rcx movl %edx, (%rax,%rcx,4) vmovss 0x59e964(%rip), %xmm0 # 0x1e002d0 vsubss 0x8(%rsp), %xmm0, %xmm0 movq 0x20(%rsp), %rax movl 0xc(%rsp), %ecx shll %ecx movslq %ecx, %rcx vmovss %xmm0, (%rax,%rcx,4) vmovss 0x8(%rsp), %xmm0 movq 0x20(%rsp), %rax movl 0xc(%rsp), %ecx shll %ecx addl $0x1, %ecx movslq %ecx, %rcx vmovss %xmm0, (%rax,%rcx,4) movl 0xc(%rsp), %eax addl $0x1, %eax movl %eax, 0xc(%rsp) jmp 0x1861890 addq $0x38, %rsp retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/interp_bilinear.h
ncnn::YoloDetectionOutput::destroy_pipeline(ncnn::Option const&)
int YoloDetectionOutput::destroy_pipeline(const Option& opt) { if (softmax) { softmax->destroy_pipeline(opt); delete softmax; softmax = 0; } return 0; }
subq $0x28, %rsp movq %rdi, 0x20(%rsp) movq %rsi, 0x18(%rsp) movq 0x20(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x128(%rax) je 0x18b774b movq 0x10(%rsp), %rax movq 0x128(%rax), %rdi movq 0x18(%rsp), %rsi movq (%rdi), %rax callq *0x28(%rax) movq 0x10(%rsp), %rax movq 0x128(%rax), %rax movq %rax, 0x8(%rsp) cmpq $0x0, %rax je 0x18b773b movq 0x8(%rsp), %rdi movq (%rdi), %rax callq *0x8(%rax) movq 0x10(%rsp), %rax movq $0x0, 0x128(%rax) xorl %eax, %eax addq $0x28, %rsp retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/yolodetectionoutput.cpp
ncnn::YoloDetectionOutput::forward_inplace(std::vector<ncnn::Mat, std::allocator<ncnn::Mat>>&, ncnn::Option const&) const
int YoloDetectionOutput::forward_inplace(std::vector<Mat>& bottom_top_blobs, const Option& opt) const { // gather all box std::vector<BBoxRect> all_bbox_rects; std::vector<float> all_bbox_scores; for (size_t b = 0; b < bottom_top_blobs.size(); b++) { Mat& bottom_top_blob = bottom_top_blobs[b]; int w = bottom_top_blob.w; int h = bottom_top_blob.h; int channels = bottom_top_blob.c; const int channels_per_box = channels / num_box; // anchor coord + box score + num_class if (channels_per_box != 4 + 1 + num_class) return -1; std::vector<std::vector<BBoxRect> > all_box_bbox_rects; std::vector<std::vector<float> > all_box_bbox_scores; all_box_bbox_rects.resize(num_box); all_box_bbox_scores.resize(num_box); #pragma omp parallel for num_threads(opt.num_threads) for (int pp = 0; pp < num_box; pp++) { int p = pp * channels_per_box; const float bias_w = biases[pp * 2]; const float bias_h = biases[pp * 2 + 1]; const float* xptr = bottom_top_blob.channel(p); const float* yptr = bottom_top_blob.channel(p + 1); const float* wptr = bottom_top_blob.channel(p + 2); const float* hptr = bottom_top_blob.channel(p + 3); const float* box_score_ptr = bottom_top_blob.channel(p + 4); // softmax class scores Mat scores = bottom_top_blob.channel_range(p + 5, num_class); softmax->forward_inplace(scores, opt); for (int i = 0; i < h; i++) { for (int j = 0; j < w; j++) { // region box float bbox_cx = (j + sigmoid(xptr[0])) / w; float bbox_cy = (i + sigmoid(yptr[0])) / h; float bbox_w = static_cast<float>(exp(wptr[0]) * bias_w / w); float bbox_h = static_cast<float>(exp(hptr[0]) * bias_h / h); float bbox_xmin = bbox_cx - bbox_w * 0.5f; float bbox_ymin = bbox_cy - bbox_h * 0.5f; float bbox_xmax = bbox_cx + bbox_w * 0.5f; float bbox_ymax = bbox_cy + bbox_h * 0.5f; // box score float box_score = sigmoid(box_score_ptr[0]); // find class index with max class score int class_index = 0; float class_score = 0.f; for (int q = 0; q < num_class; q++) { float score = scores.channel(q).row(i)[j]; if (score > class_score) { class_index = q; class_score = score; } } // NCNN_LOGE("%d %f %f", class_index, box_score, class_score); float confidence = box_score * class_score; if (confidence >= confidence_threshold) { BBoxRect c = {bbox_xmin, bbox_ymin, bbox_xmax, bbox_ymax, class_index}; all_box_bbox_rects[pp].push_back(c); all_box_bbox_scores[pp].push_back(confidence); } xptr++; yptr++; wptr++; hptr++; box_score_ptr++; } } } for (int i = 0; i < num_box; i++) { const std::vector<BBoxRect>& box_bbox_rects = all_box_bbox_rects[i]; const std::vector<float>& box_bbox_scores = all_box_bbox_scores[i]; all_bbox_rects.insert(all_bbox_rects.end(), box_bbox_rects.begin(), box_bbox_rects.end()); all_bbox_scores.insert(all_bbox_scores.end(), box_bbox_scores.begin(), box_bbox_scores.end()); } } // global sort inplace qsort_descent_inplace(all_bbox_rects, all_bbox_scores); // apply nms std::vector<size_t> picked; nms_sorted_bboxes(all_bbox_rects, picked, nms_threshold); // select std::vector<BBoxRect> bbox_rects; std::vector<float> bbox_scores; for (size_t i = 0; i < picked.size(); i++) { size_t z = picked[i]; bbox_rects.push_back(all_bbox_rects[z]); bbox_scores.push_back(all_bbox_scores[z]); } // fill result int num_detected = static_cast<int>(bbox_rects.size()); if (num_detected == 0) return 0; Mat& top_blob = bottom_top_blobs[0]; top_blob.create(6, num_detected, 4u, opt.blob_allocator); if (top_blob.empty()) return -100; for (int i = 0; i < num_detected; i++) { const BBoxRect& r = bbox_rects[i]; float score = bbox_scores[i]; float* outptr = top_blob.row(i); outptr[0] = static_cast<float>(r.label + 1); // +1 for prepend background class outptr[1] = score; outptr[2] = r.xmin; outptr[3] = r.ymin; outptr[4] = r.xmax; outptr[5] = r.ymax; } return 0; }
subq $0xba8, %rsp # imm = 0xBA8 movq %rdi, 0x5d8(%rsp) movq %rsi, 0x5d0(%rsp) movq %rdx, 0x5c8(%rsp) movq 0x5d8(%rsp), %rax movq %rax, 0x1b0(%rsp) leaq 0x5b0(%rsp), %rdi callq 0x17fd480 leaq 0x598(%rsp), %rdi callq 0xf86d00 movq $0x0, 0x590(%rsp) movq 0x590(%rsp), %rax movq %rax, 0x1a8(%rsp) movq 0x5d0(%rsp), %rdi callq 0x994f0 movq %rax, %rcx movq 0x1a8(%rsp), %rax cmpq %rcx, %rax jae 0x18b9fd7 movq 0x5d0(%rsp), %rdi movq 0x590(%rsp), %rsi callq 0x98840 movq 0x1b0(%rsp), %rcx movq %rax, 0x588(%rsp) movq 0x588(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x584(%rsp) movq 0x588(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x580(%rsp) movq 0x588(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x57c(%rsp) movl 0x57c(%rsp), %eax cltd idivl 0xd4(%rcx) movl %eax, 0x578(%rsp) movl 0x578(%rsp), %eax movl 0xd0(%rcx), %ecx addl $0x5, %ecx cmpl %ecx, %eax je 0x18b7885 movl $0xffffffff, 0x5e4(%rsp) # imm = 0xFFFFFFFF movl $0x1, 0x574(%rsp) jmp 0x18ba411 leaq 0x558(%rsp), %rdi movq %rdi, 0x1a0(%rsp) callq 0x17fd320 leaq 0x540(%rsp), %rdi callq 0x17fd330 movq 0x1b0(%rsp), %rax movq 0x1a0(%rsp), %rdi movslq 0xd4(%rax), %rsi callq 0x17fd340 jmp 0x18b78c5 movq 0x1b0(%rsp), %rax movslq 0xd4(%rax), %rsi leaq 0x540(%rsp), %rdi callq 0x17fd3e0 jmp 0x18b78e3 movl $0x0, 0x530(%rsp) movq 0x1b0(%rsp), %rcx movl 0x530(%rsp), %eax cmpl 0xd4(%rcx), %eax jge 0x18b9df1 movq 0x1b0(%rsp), %rcx movl 0x530(%rsp), %eax imull 0x578(%rsp), %eax movl %eax, 0x52c(%rsp) addq $0xe0, %rcx movl 0x530(%rsp), %eax shll %eax cltq movq %rcx, 0x670(%rsp) movq %rax, 0x668(%rsp) movq 0x670(%rsp), %rax movq (%rax), %rax movq 0x668(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x198(%rsp) movq 0x1b0(%rsp), %rcx movq 0x198(%rsp), %rax movss (%rax), %xmm0 movss %xmm0, 0x528(%rsp) addq $0xe0, %rcx movl 0x530(%rsp), %eax shll %eax addl $0x1, %eax cltq movq %rcx, 0x660(%rsp) movq %rax, 0x658(%rsp) movq 0x660(%rsp), %rax movq (%rax), %rax movq 0x658(%rsp), %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x190(%rsp) movq 0x190(%rsp), %rax movss (%rax), %xmm0 movss %xmm0, 0x524(%rsp) movq 0x588(%rsp), %rcx movl 0x52c(%rsp), %eax leaq 0x4d0(%rsp), %rdx movq %rdx, 0x728(%rsp) movq %rcx, 0x720(%rsp) movl %eax, 0x71c(%rsp) movq 0x720(%rsp), %rax movq %rax, 0x180(%rsp) movb $0x0, 0x71b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x71c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x4d0(%rsp), %r10 movq %r10, 0x9d8(%rsp) movl %r9d, 0x9d4(%rsp) movl %r8d, 0x9d0(%rsp) movl %edi, 0x9cc(%rsp) movq %rsi, 0x9c0(%rsp) movq %rdx, 0x9b8(%rsp) movl %ecx, 0x9b4(%rsp) movq %rax, 0x9a8(%rsp) movq 0x9d8(%rsp), %rcx movq %rcx, 0x188(%rsp) movq 0x9c0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x9b8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x9b4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x9a8(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x9d4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x9d0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x9cc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0xb50(%rsp) movl $0x10, 0xb4c(%rsp) movq 0xb50(%rsp), %rax movslq 0xb4c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0xb4c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x188(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x180(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x4f8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x18b7ba8 movq 0x180(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x510(%rsp) movb $0x1, 0x71b(%rsp) testb $0x1, 0x71b(%rsp) jne 0x18b7ce3 leaq 0x4d0(%rsp), %rax movq %rax, 0x730(%rsp) movq 0x730(%rsp), %rax movq %rax, 0x7c8(%rsp) movq 0x7c8(%rsp), %rax movq %rax, 0x178(%rsp) cmpq $0x0, 0x8(%rax) je 0x18b7c86 movq 0x178(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x7c4(%rsp) # imm = 0xFFFFFFFF movl 0x7c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x7c0(%rsp) cmpl $0x1, 0x7c0(%rsp) jne 0x18b7c86 movq 0x178(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18b7c57 movq 0x178(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18b7c55 jmp 0x18b7c84 movq 0x178(%rsp), %rax movq (%rax), %rax movq %rax, 0x998(%rsp) cmpq $0x0, 0x998(%rsp) je 0x18b7c82 movq 0x998(%rsp), %rdi callq 0x5e480 jmp 0x18b7c84 jmp 0x18b7c86 movq 0x178(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18b7ce1 movq %rax, %rdi callq 0x5fc90 jmp 0x18b7ce3 jmp 0x18b7ce5 leaq 0x4d0(%rsp), %rax movq %rax, 0x758(%rsp) movq 0x758(%rsp), %rax movq (%rax), %rax movq %rax, 0x170(%rsp) leaq 0x4d0(%rsp), %rax movq %rax, 0x5e8(%rsp) movq 0x5e8(%rsp), %rax movq %rax, 0x8f8(%rsp) movq 0x8f8(%rsp), %rax movq %rax, 0x168(%rsp) cmpq $0x0, 0x8(%rax) je 0x18b7dd0 movq 0x168(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x8f4(%rsp) # imm = 0xFFFFFFFF movl 0x8f4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x8f0(%rsp) cmpl $0x1, 0x8f0(%rsp) jne 0x18b7dd0 movq 0x168(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18b7da1 movq 0x168(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18b7d9f jmp 0x18b7dce movq 0x168(%rsp), %rax movq (%rax), %rax movq %rax, 0x900(%rsp) cmpq $0x0, 0x900(%rsp) je 0x18b7dcc movq 0x900(%rsp), %rdi callq 0x5e480 jmp 0x18b7dce jmp 0x18b7dd0 movq 0x168(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18b7e2b movq %rax, %rdi callq 0x5fc90 movq 0x170(%rsp), %rax movq %rax, 0x518(%rsp) movq 0x588(%rsp), %rcx movl 0x52c(%rsp), %eax addl $0x1, %eax leaq 0x480(%rsp), %rdx movq %rdx, 0x708(%rsp) movq %rcx, 0x700(%rsp) movl %eax, 0x6fc(%rsp) movq 0x700(%rsp), %rax movq %rax, 0x158(%rsp) movb $0x0, 0x6fb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x6fc(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x480(%rsp), %r10 movq %r10, 0xa10(%rsp) movl %r9d, 0xa0c(%rsp) movl %r8d, 0xa08(%rsp) movl %edi, 0xa04(%rsp) movq %rsi, 0x9f8(%rsp) movq %rdx, 0x9f0(%rsp) movl %ecx, 0x9ec(%rsp) movq %rax, 0x9e0(%rsp) movq 0xa10(%rsp), %rcx movq %rcx, 0x160(%rsp) movq 0x9f8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x9f0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x9ec(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x9e0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0xa0c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0xa08(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0xa04(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0xb40(%rsp) movl $0x10, 0xb3c(%rsp) movq 0xb40(%rsp), %rax movslq 0xb3c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0xb3c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x160(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x158(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x4a8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x18b8002 movq 0x158(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x4c0(%rsp) movb $0x1, 0x6fb(%rsp) testb $0x1, 0x6fb(%rsp) jne 0x18b813d leaq 0x480(%rsp), %rax movq %rax, 0x710(%rsp) movq 0x710(%rsp), %rax movq %rax, 0x7d8(%rsp) movq 0x7d8(%rsp), %rax movq %rax, 0x150(%rsp) cmpq $0x0, 0x8(%rax) je 0x18b80e0 movq 0x150(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x7d4(%rsp) # imm = 0xFFFFFFFF movl 0x7d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x7d0(%rsp) cmpl $0x1, 0x7d0(%rsp) jne 0x18b80e0 movq 0x150(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18b80b1 movq 0x150(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18b80af jmp 0x18b80de movq 0x150(%rsp), %rax movq (%rax), %rax movq %rax, 0x990(%rsp) cmpq $0x0, 0x990(%rsp) je 0x18b80dc movq 0x990(%rsp), %rdi callq 0x5e480 jmp 0x18b80de jmp 0x18b80e0 movq 0x150(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18b813b movq %rax, %rdi callq 0x5fc90 jmp 0x18b813d jmp 0x18b813f leaq 0x480(%rsp), %rax movq %rax, 0x750(%rsp) movq 0x750(%rsp), %rax movq (%rax), %rax movq %rax, 0x148(%rsp) leaq 0x480(%rsp), %rax movq %rax, 0x5f8(%rsp) movq 0x5f8(%rsp), %rax movq %rax, 0x8d8(%rsp) movq 0x8d8(%rsp), %rax movq %rax, 0x140(%rsp) cmpq $0x0, 0x8(%rax) je 0x18b822a movq 0x140(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x8d4(%rsp) # imm = 0xFFFFFFFF movl 0x8d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x8d0(%rsp) cmpl $0x1, 0x8d0(%rsp) jne 0x18b822a movq 0x140(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18b81fb movq 0x140(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18b81f9 jmp 0x18b8228 movq 0x140(%rsp), %rax movq (%rax), %rax movq %rax, 0x910(%rsp) cmpq $0x0, 0x910(%rsp) je 0x18b8226 movq 0x910(%rsp), %rdi callq 0x5e480 jmp 0x18b8228 jmp 0x18b822a movq 0x140(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18b8285 movq %rax, %rdi callq 0x5fc90 movq 0x148(%rsp), %rax movq %rax, 0x4c8(%rsp) movq 0x588(%rsp), %rcx movl 0x52c(%rsp), %eax addl $0x2, %eax leaq 0x430(%rsp), %rdx movq %rdx, 0x6e8(%rsp) movq %rcx, 0x6e0(%rsp) movl %eax, 0x6dc(%rsp) movq 0x6e0(%rsp), %rax movq %rax, 0x130(%rsp) movb $0x0, 0x6db(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x6dc(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x430(%rsp), %r10 movq %r10, 0xa48(%rsp) movl %r9d, 0xa44(%rsp) movl %r8d, 0xa40(%rsp) movl %edi, 0xa3c(%rsp) movq %rsi, 0xa30(%rsp) movq %rdx, 0xa28(%rsp) movl %ecx, 0xa24(%rsp) movq %rax, 0xa18(%rsp) movq 0xa48(%rsp), %rcx movq %rcx, 0x138(%rsp) movq 0xa30(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0xa28(%rsp), %rax movq %rax, 0x10(%rcx) movl 0xa24(%rsp), %eax movl %eax, 0x18(%rcx) movq 0xa18(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0xa44(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0xa40(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0xa3c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0xb30(%rsp) movl $0x10, 0xb2c(%rsp) movq 0xb30(%rsp), %rax movslq 0xb2c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0xb2c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x138(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x130(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x458(%rsp) cmpl $0x4, 0x28(%rax) jne 0x18b845c movq 0x130(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x470(%rsp) movb $0x1, 0x6db(%rsp) testb $0x1, 0x6db(%rsp) jne 0x18b8597 leaq 0x430(%rsp), %rax movq %rax, 0x6f0(%rsp) movq 0x6f0(%rsp), %rax movq %rax, 0x7e8(%rsp) movq 0x7e8(%rsp), %rax movq %rax, 0x128(%rsp) cmpq $0x0, 0x8(%rax) je 0x18b853a movq 0x128(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x7e4(%rsp) # imm = 0xFFFFFFFF movl 0x7e4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x7e0(%rsp) cmpl $0x1, 0x7e0(%rsp) jne 0x18b853a movq 0x128(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18b850b movq 0x128(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18b8509 jmp 0x18b8538 movq 0x128(%rsp), %rax movq (%rax), %rax movq %rax, 0x988(%rsp) cmpq $0x0, 0x988(%rsp) je 0x18b8536 movq 0x988(%rsp), %rdi callq 0x5e480 jmp 0x18b8538 jmp 0x18b853a movq 0x128(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18b8595 movq %rax, %rdi callq 0x5fc90 jmp 0x18b8597 jmp 0x18b8599 leaq 0x430(%rsp), %rax movq %rax, 0x748(%rsp) movq 0x748(%rsp), %rax movq (%rax), %rax movq %rax, 0x120(%rsp) leaq 0x430(%rsp), %rax movq %rax, 0x608(%rsp) movq 0x608(%rsp), %rax movq %rax, 0x8b8(%rsp) movq 0x8b8(%rsp), %rax movq %rax, 0x118(%rsp) cmpq $0x0, 0x8(%rax) je 0x18b8684 movq 0x118(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x8b4(%rsp) # imm = 0xFFFFFFFF movl 0x8b4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x8b0(%rsp) cmpl $0x1, 0x8b0(%rsp) jne 0x18b8684 movq 0x118(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18b8655 movq 0x118(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18b8653 jmp 0x18b8682 movq 0x118(%rsp), %rax movq (%rax), %rax movq %rax, 0x920(%rsp) cmpq $0x0, 0x920(%rsp) je 0x18b8680 movq 0x920(%rsp), %rdi callq 0x5e480 jmp 0x18b8682 jmp 0x18b8684 movq 0x118(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18b86df movq %rax, %rdi callq 0x5fc90 movq 0x120(%rsp), %rax movq %rax, 0x478(%rsp) movq 0x588(%rsp), %rcx movl 0x52c(%rsp), %eax addl $0x3, %eax leaq 0x3e0(%rsp), %rdx movq %rdx, 0x6c8(%rsp) movq %rcx, 0x6c0(%rsp) movl %eax, 0x6bc(%rsp) movq 0x6c0(%rsp), %rax movq %rax, 0x108(%rsp) movb $0x0, 0x6bb(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x6bc(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x3e0(%rsp), %r10 movq %r10, 0xa80(%rsp) movl %r9d, 0xa7c(%rsp) movl %r8d, 0xa78(%rsp) movl %edi, 0xa74(%rsp) movq %rsi, 0xa68(%rsp) movq %rdx, 0xa60(%rsp) movl %ecx, 0xa5c(%rsp) movq %rax, 0xa50(%rsp) movq 0xa80(%rsp), %rcx movq %rcx, 0x110(%rsp) movq 0xa68(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0xa60(%rsp), %rax movq %rax, 0x10(%rcx) movl 0xa5c(%rsp), %eax movl %eax, 0x18(%rcx) movq 0xa50(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0xa7c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0xa78(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0xa74(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0xb20(%rsp) movl $0x10, 0xb1c(%rsp) movq 0xb20(%rsp), %rax movslq 0xb1c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0xb1c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x110(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x108(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x408(%rsp) cmpl $0x4, 0x28(%rax) jne 0x18b88b6 movq 0x108(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x420(%rsp) movb $0x1, 0x6bb(%rsp) testb $0x1, 0x6bb(%rsp) jne 0x18b89f1 leaq 0x3e0(%rsp), %rax movq %rax, 0x6d0(%rsp) movq 0x6d0(%rsp), %rax movq %rax, 0x7f8(%rsp) movq 0x7f8(%rsp), %rax movq %rax, 0x100(%rsp) cmpq $0x0, 0x8(%rax) je 0x18b8994 movq 0x100(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x7f4(%rsp) # imm = 0xFFFFFFFF movl 0x7f4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x7f0(%rsp) cmpl $0x1, 0x7f0(%rsp) jne 0x18b8994 movq 0x100(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18b8965 movq 0x100(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18b8963 jmp 0x18b8992 movq 0x100(%rsp), %rax movq (%rax), %rax movq %rax, 0x980(%rsp) cmpq $0x0, 0x980(%rsp) je 0x18b8990 movq 0x980(%rsp), %rdi callq 0x5e480 jmp 0x18b8992 jmp 0x18b8994 movq 0x100(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18b89ef movq %rax, %rdi callq 0x5fc90 jmp 0x18b89f1 jmp 0x18b89f3 leaq 0x3e0(%rsp), %rax movq %rax, 0x740(%rsp) movq 0x740(%rsp), %rax movq (%rax), %rax movq %rax, 0xf8(%rsp) leaq 0x3e0(%rsp), %rax movq %rax, 0x618(%rsp) movq 0x618(%rsp), %rax movq %rax, 0x898(%rsp) movq 0x898(%rsp), %rax movq %rax, 0xf0(%rsp) cmpq $0x0, 0x8(%rax) je 0x18b8ade movq 0xf0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x894(%rsp) # imm = 0xFFFFFFFF movl 0x894(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x890(%rsp) cmpl $0x1, 0x890(%rsp) jne 0x18b8ade movq 0xf0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18b8aaf movq 0xf0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18b8aad jmp 0x18b8adc movq 0xf0(%rsp), %rax movq (%rax), %rax movq %rax, 0x930(%rsp) cmpq $0x0, 0x930(%rsp) je 0x18b8ada movq 0x930(%rsp), %rdi callq 0x5e480 jmp 0x18b8adc jmp 0x18b8ade movq 0xf0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18b8b39 movq %rax, %rdi callq 0x5fc90 movq 0xf8(%rsp), %rax movq %rax, 0x428(%rsp) movq 0x588(%rsp), %rcx movl 0x52c(%rsp), %eax addl $0x4, %eax leaq 0x390(%rsp), %rdx movq %rdx, 0x6a8(%rsp) movq %rcx, 0x6a0(%rsp) movl %eax, 0x69c(%rsp) movq 0x6a0(%rsp), %rax movq %rax, 0xe0(%rsp) movb $0x0, 0x69b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x69c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x390(%rsp), %r10 movq %r10, 0xab8(%rsp) movl %r9d, 0xab4(%rsp) movl %r8d, 0xab0(%rsp) movl %edi, 0xaac(%rsp) movq %rsi, 0xaa0(%rsp) movq %rdx, 0xa98(%rsp) movl %ecx, 0xa94(%rsp) movq %rax, 0xa88(%rsp) movq 0xab8(%rsp), %rcx movq %rcx, 0xe8(%rsp) movq 0xaa0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0xa98(%rsp), %rax movq %rax, 0x10(%rcx) movl 0xa94(%rsp), %eax movl %eax, 0x18(%rcx) movq 0xa88(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0xab4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0xab0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0xaac(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0xb10(%rsp) movl $0x10, 0xb0c(%rsp) movq 0xb10(%rsp), %rax movslq 0xb0c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0xb0c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0xe8(%rsp), %rax movq %rcx, 0x40(%rax) movq 0xe0(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x3b8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x18b8d10 movq 0xe0(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x3d0(%rsp) movb $0x1, 0x69b(%rsp) testb $0x1, 0x69b(%rsp) jne 0x18b8e4b leaq 0x390(%rsp), %rax movq %rax, 0x6b0(%rsp) movq 0x6b0(%rsp), %rax movq %rax, 0x808(%rsp) movq 0x808(%rsp), %rax movq %rax, 0xd8(%rsp) cmpq $0x0, 0x8(%rax) je 0x18b8dee movq 0xd8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x804(%rsp) # imm = 0xFFFFFFFF movl 0x804(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x800(%rsp) cmpl $0x1, 0x800(%rsp) jne 0x18b8dee movq 0xd8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18b8dbf movq 0xd8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18b8dbd jmp 0x18b8dec movq 0xd8(%rsp), %rax movq (%rax), %rax movq %rax, 0x978(%rsp) cmpq $0x0, 0x978(%rsp) je 0x18b8dea movq 0x978(%rsp), %rdi callq 0x5e480 jmp 0x18b8dec jmp 0x18b8dee movq 0xd8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18b8e49 movq %rax, %rdi callq 0x5fc90 jmp 0x18b8e4b jmp 0x18b8e4d leaq 0x390(%rsp), %rax movq %rax, 0x738(%rsp) movq 0x738(%rsp), %rax movq (%rax), %rax movq %rax, 0xd0(%rsp) leaq 0x390(%rsp), %rax movq %rax, 0x628(%rsp) movq 0x628(%rsp), %rax movq %rax, 0x878(%rsp) movq 0x878(%rsp), %rax movq %rax, 0xc8(%rsp) cmpq $0x0, 0x8(%rax) je 0x18b8f38 movq 0xc8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x874(%rsp) # imm = 0xFFFFFFFF movl 0x874(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x870(%rsp) cmpl $0x1, 0x870(%rsp) jne 0x18b8f38 movq 0xc8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18b8f09 movq 0xc8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18b8f07 jmp 0x18b8f36 movq 0xc8(%rsp), %rax movq (%rax), %rax movq %rax, 0x940(%rsp) cmpq $0x0, 0x940(%rsp) je 0x18b8f34 movq 0x940(%rsp), %rdi callq 0x5e480 jmp 0x18b8f36 jmp 0x18b8f38 movq 0xc8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18b8f93 movq %rax, %rdi callq 0x5fc90 movq 0x1b0(%rsp), %rax movq 0xd0(%rsp), %rcx movq %rcx, 0x3d8(%rsp) movq 0x588(%rsp), %rdx movl 0x52c(%rsp), %ecx addl $0x5, %ecx movl 0xd0(%rax), %eax leaq 0x348(%rsp), %rsi movq %rsi, 0x778(%rsp) movq %rdx, 0x770(%rsp) movl %ecx, 0x76c(%rsp) movl %eax, 0x768(%rsp) movq 0x770(%rsp), %rax movq %rax, 0xc0(%rsp) movb $0x0, 0x767(%rsp) movl 0x2c(%rax), %r10d movl 0x30(%rax), %r9d movl 0x34(%rax), %r8d movl 0x768(%rsp), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x76c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x348(%rsp), %r11 movq %r11, 0xb88(%rsp) movl %r10d, 0xb84(%rsp) movl %r9d, 0xb80(%rsp) movl %r8d, 0xb7c(%rsp) movl %edi, 0xb78(%rsp) movq %rsi, 0xb70(%rsp) movq %rdx, 0xb68(%rsp) movl %ecx, 0xb64(%rsp) movq %rax, 0xb58(%rsp) movq 0xb88(%rsp), %rcx movq %rcx, 0xb8(%rsp) movq 0xb70(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0xb68(%rsp), %rax movq %rax, 0x10(%rcx) movl 0xb64(%rsp), %eax movl %eax, 0x18(%rcx) movq 0xb58(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x4, 0x28(%rcx) movl 0xb84(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0xb80(%rsp), %eax movl %eax, 0x30(%rcx) movl 0xb7c(%rsp), %eax movl %eax, 0x34(%rcx) movl 0xb78(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax movslq 0x34(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0xb98(%rsp) movl $0x10, 0xb94(%rsp) movq 0xb98(%rsp), %rax movslq 0xb94(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0xb94(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0xc0(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %eax movl %eax, 0x370(%rsp) movb $0x1, 0x767(%rsp) testb $0x1, 0x767(%rsp) jne 0x18b92a8 leaq 0x348(%rsp), %rax movq %rax, 0x780(%rsp) movq 0x780(%rsp), %rax movq %rax, 0x7b8(%rsp) movq 0x7b8(%rsp), %rax movq %rax, 0xb0(%rsp) cmpq $0x0, 0x8(%rax) je 0x18b924b movq 0xb0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x7b4(%rsp) # imm = 0xFFFFFFFF movl 0x7b4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x7b0(%rsp) cmpl $0x1, 0x7b0(%rsp) jne 0x18b924b movq 0xb0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18b921c movq 0xb0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18b921a jmp 0x18b9249 movq 0xb0(%rsp), %rax movq (%rax), %rax movq %rax, 0x9a0(%rsp) cmpq $0x0, 0x9a0(%rsp) je 0x18b9247 movq 0x9a0(%rsp), %rdi callq 0x5e480 jmp 0x18b9249 jmp 0x18b924b movq 0xb0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18b92a6 movq %rax, %rdi callq 0x5fc90 jmp 0x18b92a8 jmp 0x18b92aa movq 0x1b0(%rsp), %rax movq 0x128(%rax), %rdi movq 0x5c8(%rsp), %rdx movq (%rdi), %rax movq 0x48(%rax), %rax leaq 0x348(%rsp), %rsi callq *%rax jmp 0x18b92d4 movl $0x0, 0x344(%rsp) movl 0x344(%rsp), %eax cmpl 0x580(%rsp), %eax jge 0x18b9bb2 movl $0x0, 0x340(%rsp) movl 0x340(%rsp), %eax cmpl 0x584(%rsp), %eax jge 0x18b9b9a cvtsi2ssl 0x340(%rsp), %xmm0 movss %xmm0, 0xa8(%rsp) movq 0x518(%rsp), %rax movss (%rax), %xmm0 callq 0x18ba470 movss %xmm0, 0xac(%rsp) jmp 0x18b9340 movss 0xac(%rsp), %xmm1 movss 0xa8(%rsp), %xmm0 addss %xmm1, %xmm0 cvtsi2ssl 0x584(%rsp), %xmm1 divss %xmm1, %xmm0 movss %xmm0, 0x33c(%rsp) cvtsi2ssl 0x344(%rsp), %xmm0 movss %xmm0, 0xa0(%rsp) movq 0x4c8(%rsp), %rax movss (%rax), %xmm0 callq 0x18ba470 movss %xmm0, 0xa4(%rsp) jmp 0x18b939a movss 0xa4(%rsp), %xmm1 movss 0xa0(%rsp), %xmm0 addss %xmm1, %xmm0 cvtsi2ssl 0x580(%rsp), %xmm1 divss %xmm1, %xmm0 movss %xmm0, 0x338(%rsp) movq 0x478(%rsp), %rax movss (%rax), %xmm0 callq 0xf74e0 movss %xmm0, 0x9c(%rsp) jmp 0x18b93e2 movss 0x9c(%rsp), %xmm0 movss 0x528(%rsp), %xmm1 mulss %xmm1, %xmm0 cvtsi2ssl 0x584(%rsp), %xmm1 divss %xmm1, %xmm0 movss %xmm0, 0x334(%rsp) movq 0x428(%rsp), %rax movss (%rax), %xmm0 callq 0xf74e0 movss %xmm0, 0x98(%rsp) jmp 0x18b942a movss 0x98(%rsp), %xmm0 movss 0x524(%rsp), %xmm1 mulss %xmm1, %xmm0 cvtsi2ssl 0x580(%rsp), %xmm1 divss %xmm1, %xmm0 movss %xmm0, 0x330(%rsp) movss 0x33c(%rsp), %xmm0 movss 0x334(%rsp), %xmm1 movss 0x546e64(%rip), %xmm2 # 0x1e002d4 mulss %xmm2, %xmm1 subss %xmm1, %xmm0 movss %xmm0, 0x32c(%rsp) movss 0x338(%rsp), %xmm0 movss 0x330(%rsp), %xmm1 mulss %xmm2, %xmm1 subss %xmm1, %xmm0 movss %xmm0, 0x328(%rsp) movss 0x33c(%rsp), %xmm0 movss 0x334(%rsp), %xmm1 mulss %xmm2, %xmm1 addss %xmm1, %xmm0 movss %xmm0, 0x324(%rsp) movss 0x338(%rsp), %xmm0 movss 0x330(%rsp), %xmm1 mulss %xmm2, %xmm1 addss %xmm1, %xmm0 movss %xmm0, 0x320(%rsp) movq 0x3d8(%rsp), %rax movss (%rax), %xmm0 callq 0x18ba470 movss %xmm0, 0x94(%rsp) jmp 0x18b9506 movss 0x94(%rsp), %xmm0 movss %xmm0, 0x31c(%rsp) movl $0x0, 0x318(%rsp) xorps %xmm0, %xmm0 movss %xmm0, 0x314(%rsp) movl $0x0, 0x310(%rsp) movq 0x1b0(%rsp), %rcx movl 0x310(%rsp), %eax cmpl 0xd0(%rcx), %eax jge 0x18b9a41 movl 0x310(%rsp), %eax leaq 0x2c0(%rsp), %rcx movq %rcx, 0x688(%rsp) leaq 0x348(%rsp), %rcx movq %rcx, 0x680(%rsp) movl %eax, 0x67c(%rsp) movq 0x680(%rsp), %rax movq %rax, 0x80(%rsp) movb $0x0, 0x67b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x67c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x2c0(%rsp), %r10 movq %r10, 0xaf0(%rsp) movl %r9d, 0xaec(%rsp) movl %r8d, 0xae8(%rsp) movl %edi, 0xae4(%rsp) movq %rsi, 0xad8(%rsp) movq %rdx, 0xad0(%rsp) movl %ecx, 0xacc(%rsp) movq %rax, 0xac0(%rsp) movq 0xaf0(%rsp), %rcx movq %rcx, 0x88(%rsp) movq 0xad8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0xad0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0xacc(%rsp), %eax movl %eax, 0x18(%rcx) movq 0xac0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0xaec(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0xae8(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0xae4(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0xb00(%rsp) movl $0x10, 0xafc(%rsp) movq 0xb00(%rsp), %rax movslq 0xafc(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0xafc(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0x88(%rsp), %rax movq %rcx, 0x40(%rax) movq 0x80(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x2e8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x18b9719 movq 0x80(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x300(%rsp) movb $0x1, 0x67b(%rsp) testb $0x1, 0x67b(%rsp) jne 0x18b9842 leaq 0x2c0(%rsp), %rax movq %rax, 0x690(%rsp) movq 0x690(%rsp), %rax movq %rax, 0x818(%rsp) movq 0x818(%rsp), %rax movq %rax, 0x78(%rsp) cmpq $0x0, 0x8(%rax) je 0x18b97e8 movq 0x78(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x814(%rsp) # imm = 0xFFFFFFFF movl 0x814(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x810(%rsp) cmpl $0x1, 0x810(%rsp) jne 0x18b97e8 movq 0x78(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18b97bc movq 0x78(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18b97ba jmp 0x18b97e6 movq 0x78(%rsp), %rax movq (%rax), %rax movq %rax, 0x970(%rsp) cmpq $0x0, 0x970(%rsp) je 0x18b97e4 movq 0x970(%rsp), %rdi callq 0x5e480 jmp 0x18b97e6 jmp 0x18b97e8 movq 0x78(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18b9840 movq %rax, %rdi callq 0x5fc90 jmp 0x18b9842 jmp 0x18b9844 movl 0x344(%rsp), %eax leaq 0x2c0(%rsp), %rcx movq %rcx, 0x7a0(%rsp) movl %eax, 0x79c(%rsp) movq 0x7a0(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x79c(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0x70(%rsp) movq 0x70(%rsp), %rax movslq 0x340(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x64(%rsp) leaq 0x2c0(%rsp), %rax movq %rax, 0x638(%rsp) movq 0x638(%rsp), %rax movq %rax, 0x858(%rsp) movq 0x858(%rsp), %rax movq %rax, 0x68(%rsp) cmpq $0x0, 0x8(%rax) je 0x18b995b movq 0x68(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x854(%rsp) # imm = 0xFFFFFFFF movl 0x854(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x850(%rsp) cmpl $0x1, 0x850(%rsp) jne 0x18b995b movq 0x68(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18b992f movq 0x68(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18b992d jmp 0x18b9959 movq 0x68(%rsp), %rax movq (%rax), %rax movq %rax, 0x950(%rsp) cmpq $0x0, 0x950(%rsp) je 0x18b9957 movq 0x950(%rsp), %rdi callq 0x5e480 jmp 0x18b9959 jmp 0x18b995b movq 0x68(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18b99b3 movq %rax, %rdi callq 0x5fc90 movss 0x64(%rsp), %xmm0 movss %xmm0, 0x30c(%rsp) movss 0x30c(%rsp), %xmm0 ucomiss 0x314(%rsp), %xmm0 jbe 0x18b9a29 movl 0x310(%rsp), %eax movl %eax, 0x318(%rsp) movss 0x30c(%rsp), %xmm0 movss %xmm0, 0x314(%rsp) jmp 0x18b9a29 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x538(%rsp) movl %eax, 0x534(%rsp) jmp 0x18b9fb8 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x538(%rsp) movl %eax, 0x534(%rsp) jmp 0x18b9cdb jmp 0x18b9a2b movl 0x310(%rsp), %eax addl $0x1, %eax movl %eax, 0x310(%rsp) jmp 0x18b953a movq 0x1b0(%rsp), %rax movss 0x31c(%rsp), %xmm0 mulss 0x314(%rsp), %xmm0 movss %xmm0, 0x2bc(%rsp) movss 0x2bc(%rsp), %xmm0 ucomiss 0xd8(%rax), %xmm0 jb 0x18b9b20 movss 0x32c(%rsp), %xmm0 movss %xmm0, 0x2a8(%rsp) movss 0x328(%rsp), %xmm0 movss %xmm0, 0x2ac(%rsp) movss 0x324(%rsp), %xmm0 movss %xmm0, 0x2b0(%rsp) movss 0x320(%rsp), %xmm0 movss %xmm0, 0x2b4(%rsp) movl 0x318(%rsp), %eax movl %eax, 0x2b8(%rsp) movslq 0x530(%rsp), %rsi leaq 0x558(%rsp), %rdi callq 0x17fd5c0 movq %rax, %rdi leaq 0x2a8(%rsp), %rsi callq 0x17fd490 jmp 0x18b9af7 movslq 0x530(%rsp), %rsi leaq 0x540(%rsp), %rdi callq 0x17fd600 movq %rax, %rdi leaq 0x2bc(%rsp), %rsi callq 0xf86d80 jmp 0x18b9b1e jmp 0x18b9b20 movq 0x518(%rsp), %rax addq $0x4, %rax movq %rax, 0x518(%rsp) movq 0x4c8(%rsp), %rax addq $0x4, %rax movq %rax, 0x4c8(%rsp) movq 0x478(%rsp), %rax addq $0x4, %rax movq %rax, 0x478(%rsp) movq 0x428(%rsp), %rax addq $0x4, %rax movq %rax, 0x428(%rsp) movq 0x3d8(%rsp), %rax addq $0x4, %rax movq %rax, 0x3d8(%rsp) movl 0x340(%rsp), %eax addl $0x1, %eax movl %eax, 0x340(%rsp) jmp 0x18b92fe jmp 0x18b9b9c movl 0x344(%rsp), %eax addl $0x1, %eax movl %eax, 0x344(%rsp) jmp 0x18b92df leaq 0x348(%rsp), %rax movq %rax, 0x648(%rsp) movq 0x648(%rsp), %rax movq %rax, 0x838(%rsp) movq 0x838(%rsp), %rax movq %rax, 0x58(%rsp) cmpq $0x0, 0x8(%rax) je 0x18b9c6b movq 0x58(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x834(%rsp) # imm = 0xFFFFFFFF movl 0x834(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x830(%rsp) cmpl $0x1, 0x830(%rsp) jne 0x18b9c6b movq 0x58(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18b9c3f movq 0x58(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18b9c3d jmp 0x18b9c69 movq 0x58(%rsp), %rax movq (%rax), %rax movq %rax, 0x960(%rsp) cmpq $0x0, 0x960(%rsp) je 0x18b9c67 movq 0x960(%rsp), %rdi callq 0x5e480 jmp 0x18b9c69 jmp 0x18b9c6b movq 0x58(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18b9cc3 movq %rax, %rdi callq 0x5fc90 jmp 0x18b9cc5 movl 0x530(%rsp), %eax addl $0x1, %eax movl %eax, 0x530(%rsp) jmp 0x18b78ee leaq 0x348(%rsp), %rax movq %rax, 0x650(%rsp) movq 0x650(%rsp), %rax movq %rax, 0x828(%rsp) movq 0x828(%rsp), %rax movq %rax, 0x50(%rsp) cmpq $0x0, 0x8(%rax) je 0x18b9d94 movq 0x50(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x824(%rsp) # imm = 0xFFFFFFFF movl 0x824(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x820(%rsp) cmpl $0x1, 0x820(%rsp) jne 0x18b9d94 movq 0x50(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18b9d68 movq 0x50(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18b9d66 jmp 0x18b9d92 movq 0x50(%rsp), %rax movq (%rax), %rax movq %rax, 0x968(%rsp) cmpq $0x0, 0x968(%rsp) je 0x18b9d90 movq 0x968(%rsp), %rdi callq 0x5e480 jmp 0x18b9d92 jmp 0x18b9d94 movq 0x50(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18b9dec movq %rax, %rdi callq 0x5fc90 jmp 0x18b9fb8 movl $0x0, 0x2a4(%rsp) movq 0x1b0(%rsp), %rcx movl 0x2a4(%rsp), %eax cmpl 0xd4(%rcx), %eax jge 0x18b9f85 movslq 0x2a4(%rsp), %rsi leaq 0x558(%rsp), %rdi callq 0x17fd5c0 movq %rax, 0x298(%rsp) movslq 0x2a4(%rsp), %rsi leaq 0x540(%rsp), %rdi callq 0x17fd600 movq %rax, 0x290(%rsp) leaq 0x5b0(%rsp), %rdi movq %rdi, 0x40(%rsp) callq 0x17fd740 movq %rax, 0x280(%rsp) leaq 0x288(%rsp), %rdi leaq 0x280(%rsp), %rsi callq 0x17fd770 movq 0x298(%rsp), %rdi callq 0x17fd7b0 movq %rax, 0x278(%rsp) movq 0x298(%rsp), %rdi callq 0x17fd7e0 movq 0x40(%rsp), %rdi movq %rax, 0x270(%rsp) movq 0x288(%rsp), %rsi movq 0x278(%rsp), %rdx movq 0x270(%rsp), %rcx callq 0x17fd680 movq %rax, 0x48(%rsp) jmp 0x18b9ed3 movq 0x48(%rsp), %rax movq %rax, 0x268(%rsp) leaq 0x598(%rsp), %rdi movq %rdi, 0x30(%rsp) callq 0xf88de0 movq %rax, 0x258(%rsp) leaq 0x260(%rsp), %rdi leaq 0x258(%rsp), %rsi callq 0x17fd8d0 movq 0x290(%rsp), %rdi callq 0xf89320 movq %rax, 0x250(%rsp) movq 0x290(%rsp), %rdi callq 0xf89350 movq 0x30(%rsp), %rdi movq %rax, 0x248(%rsp) movq 0x260(%rsp), %rsi movq 0x250(%rsp), %rdx movq 0x248(%rsp), %rcx callq 0x17fd810 movq %rax, 0x38(%rsp) jmp 0x18b9f62 movq 0x38(%rsp), %rax movq %rax, 0x240(%rsp) movl 0x2a4(%rsp), %eax addl $0x1, %eax movl %eax, 0x2a4(%rsp) jmp 0x18b9dfc leaq 0x540(%rsp), %rdi callq 0x17fd910 leaq 0x558(%rsp), %rdi callq 0x17fd970 movq 0x590(%rsp), %rax addq $0x1, %rax movq %rax, 0x590(%rsp) jmp 0x18b77b5 leaq 0x540(%rsp), %rdi callq 0x17fd910 leaq 0x558(%rsp), %rdi callq 0x17fd970 jmp 0x18ba43a leaq 0x5b0(%rsp), %rdi leaq 0x598(%rsp), %rsi callq 0x18ba4b0 jmp 0x18b9fee leaq 0x228(%rsp), %rdi movq %rdi, 0x28(%rsp) callq 0xf86f50 movq 0x1b0(%rsp), %rax movq 0x28(%rsp), %rsi movss 0xdc(%rax), %xmm0 leaq 0x5b0(%rsp), %rdi callq 0x18ba520 jmp 0x18ba024 leaq 0x210(%rsp), %rdi callq 0x17fd480 leaq 0x1f8(%rsp), %rdi callq 0xf86d00 movq $0x0, 0x1f0(%rsp) movq 0x1f0(%rsp), %rax movq %rax, 0x20(%rsp) leaq 0x228(%rsp), %rdi callq 0xf86f60 movq %rax, %rcx movq 0x20(%rsp), %rax cmpq %rcx, %rax jae 0x18ba163 movq 0x1f0(%rsp), %rsi leaq 0x228(%rsp), %rdi callq 0xf86fa0 movq (%rax), %rax movq %rax, 0x1e8(%rsp) movq 0x1e8(%rsp), %rsi leaq 0x5b0(%rsp), %rdi callq 0x17fd5e0 movq %rax, %rsi leaq 0x210(%rsp), %rdi callq 0x17fd490 jmp 0x18ba0bc movq 0x1e8(%rsp), %rsi leaq 0x598(%rsp), %rdi callq 0xf86fc0 movq %rax, %rsi leaq 0x1f8(%rsp), %rdi callq 0xf86d80 jmp 0x18ba0e3 jmp 0x18ba0e5 movq 0x1f0(%rsp), %rax addq $0x1, %rax movq %rax, 0x1f0(%rsp) jmp 0x18ba04a movq %rax, %rcx movl %edx, %eax movq %rcx, 0x538(%rsp) movl %eax, 0x534(%rsp) jmp 0x18ba43a movq %rax, %rcx movl %edx, %eax movq %rcx, 0x538(%rsp) movl %eax, 0x534(%rsp) jmp 0x18ba402 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x538(%rsp) movl %eax, 0x534(%rsp) leaq 0x1f8(%rsp), %rdi callq 0xf87040 leaq 0x210(%rsp), %rdi callq 0x17fd620 jmp 0x18ba402 leaq 0x210(%rsp), %rdi callq 0x17fd500 movl %eax, 0x1e4(%rsp) cmpl $0x0, 0x1e4(%rsp) jne 0x18ba19c movl $0x0, 0x5e4(%rsp) movl $0x1, 0x574(%rsp) jmp 0x18ba3d9 movq 0x5d0(%rsp), %rdi xorl %eax, %eax movl %eax, %esi callq 0x98840 movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rdi movl 0x1e4(%rsp), %edx movq 0x5c8(%rsp), %rax movq 0x8(%rax), %r8 movl $0x6, %esi movl $0x4, %ecx callq 0x652c0 jmp 0x18ba1e1 movq 0x1d8(%rsp), %rax movq %rax, 0x7a8(%rsp) movq 0x7a8(%rsp), %rcx movq %rcx, 0x10(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x1f(%rsp) je 0x18ba236 movq 0x10(%rsp), %rax movq %rax, 0xba0(%rsp) movq 0xba0(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x1f(%rsp) movb 0x1f(%rsp), %al movb %al, 0xf(%rsp) movb 0xf(%rsp), %al testb $0x1, %al jne 0x18ba248 jmp 0x18ba263 movl $0xffffff9c, 0x5e4(%rsp) # imm = 0xFFFFFF9C movl $0x1, 0x574(%rsp) jmp 0x18ba3d9 movl $0x0, 0x1d4(%rsp) movl 0x1d4(%rsp), %eax cmpl 0x1e4(%rsp), %eax jge 0x18ba3c3 movslq 0x1d4(%rsp), %rsi leaq 0x210(%rsp), %rdi callq 0x17fd5e0 movq %rax, 0x1c8(%rsp) movslq 0x1d4(%rsp), %rsi leaq 0x1f8(%rsp), %rdi callq 0xf86fc0 movss (%rax), %xmm0 movss %xmm0, 0x1c4(%rsp) movq 0x1d8(%rsp), %rcx movl 0x1d4(%rsp), %eax movq %rcx, 0x790(%rsp) movl %eax, 0x78c(%rsp) movq 0x790(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x78c(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, (%rsp) movq (%rsp), %rax movq %rax, 0x1b8(%rsp) movq 0x1c8(%rsp), %rax movl 0x10(%rax), %eax addl $0x1, %eax cvtsi2ss %eax, %xmm0 movq 0x1b8(%rsp), %rax movss %xmm0, (%rax) movss 0x1c4(%rsp), %xmm0 movq 0x1b8(%rsp), %rax movss %xmm0, 0x4(%rax) movq 0x1c8(%rsp), %rax movss (%rax), %xmm0 movq 0x1b8(%rsp), %rax movss %xmm0, 0x8(%rax) movq 0x1c8(%rsp), %rax movss 0x4(%rax), %xmm0 movq 0x1b8(%rsp), %rax movss %xmm0, 0xc(%rax) movq 0x1c8(%rsp), %rax movss 0x8(%rax), %xmm0 movq 0x1b8(%rsp), %rax movss %xmm0, 0x10(%rax) movq 0x1c8(%rsp), %rax movss 0xc(%rax), %xmm0 movq 0x1b8(%rsp), %rax movss %xmm0, 0x14(%rax) movl 0x1d4(%rsp), %eax addl $0x1, %eax movl %eax, 0x1d4(%rsp) jmp 0x18ba26e movl $0x0, 0x5e4(%rsp) movl $0x1, 0x574(%rsp) leaq 0x1f8(%rsp), %rdi callq 0xf87040 leaq 0x210(%rsp), %rdi callq 0x17fd620 leaq 0x228(%rsp), %rdi callq 0xf86fe0 jmp 0x18ba411 leaq 0x228(%rsp), %rdi callq 0xf86fe0 jmp 0x18ba43a leaq 0x598(%rsp), %rdi callq 0xf87040 leaq 0x5b0(%rsp), %rdi callq 0x17fd620 movl 0x5e4(%rsp), %eax addq $0xba8, %rsp # imm = 0xBA8 retq leaq 0x598(%rsp), %rdi callq 0xf87040 leaq 0x5b0(%rsp), %rdi callq 0x17fd620 movq 0x538(%rsp), %rdi callq 0x5e3b0 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/yolodetectionoutput.cpp
ncnn::nms_sorted_bboxes(std::vector<ncnn::BBoxRect, std::allocator<ncnn::BBoxRect>> const&, std::vector<unsigned long, std::allocator<unsigned long>>&, float)
static void nms_sorted_bboxes(const std::vector<BBoxRect>& bboxes, std::vector<size_t>& picked, float nms_threshold) { picked.clear(); const size_t n = bboxes.size(); std::vector<float> areas(n); for (size_t i = 0; i < n; i++) { const BBoxRect& r = bboxes[i]; float width = r.xmax - r.xmin; float height = r.ymax - r.ymin; areas[i] = width * height; } for (size_t i = 0; i < n; i++) { const BBoxRect& a = bboxes[i]; int keep = 1; for (int j = 0; j < (int)picked.size(); j++) { const BBoxRect& b = bboxes[picked[j]]; // intersection over union float inter_area = intersection_area(a, b); float union_area = areas[i] + areas[picked[j]] - inter_area; // float IoU = inter_area / union_area if (inter_area / union_area > nms_threshold) keep = 0; } if (keep) picked.push_back(i); } }
subq $0xc8, %rsp movq %rdi, 0xc0(%rsp) movq %rsi, 0xb8(%rsp) movss %xmm0, 0xb4(%rsp) movq 0xb8(%rsp), %rdi callq 0xf872d0 movq 0xc0(%rsp), %rdi callq 0x17fd500 movq %rax, 0xa8(%rsp) movq 0xa8(%rsp), %rax movq %rax, 0x28(%rsp) leaq 0x8f(%rsp), %rdi movq %rdi, 0x30(%rsp) callq 0xf872f0 movq 0x28(%rsp), %rsi movq 0x30(%rsp), %rdx leaq 0x90(%rsp), %rdi callq 0xf87300 jmp 0x18ba59a leaq 0x8f(%rsp), %rdi callq 0xf87380 movq $0x0, 0x70(%rsp) movq 0x70(%rsp), %rax cmpq 0xa8(%rsp), %rax jae 0x18ba671 movq 0xc0(%rsp), %rdi movq 0x70(%rsp), %rsi callq 0x17fdbb0 movq %rax, 0x68(%rsp) movq 0x68(%rsp), %rax movss 0x8(%rax), %xmm0 movq 0x68(%rsp), %rax subss (%rax), %xmm0 movss %xmm0, 0x64(%rsp) movq 0x68(%rsp), %rax movss 0xc(%rax), %xmm0 movq 0x68(%rsp), %rax subss 0x4(%rax), %xmm0 movss %xmm0, 0x60(%rsp) movss 0x64(%rsp), %xmm0 mulss 0x60(%rsp), %xmm0 movss %xmm0, 0x24(%rsp) movq 0x70(%rsp), %rsi leaq 0x90(%rsp), %rdi callq 0xf86fc0 movss 0x24(%rsp), %xmm0 movss %xmm0, (%rax) movq 0x70(%rsp), %rax addq $0x1, %rax movq %rax, 0x70(%rsp) jmp 0x18ba5b0 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x80(%rsp) movl %eax, 0x7c(%rsp) leaq 0x8f(%rsp), %rdi callq 0xf87380 jmp 0x18ba819 movq $0x0, 0x58(%rsp) movq 0x58(%rsp), %rax cmpq 0xa8(%rsp), %rax jae 0x18ba804 movq 0xc0(%rsp), %rdi movq 0x58(%rsp), %rsi callq 0x17fdbb0 movq %rax, 0x50(%rsp) movl $0x1, 0x4c(%rsp) movl $0x0, 0x48(%rsp) movl 0x48(%rsp), %eax movl %eax, 0x20(%rsp) movq 0xb8(%rsp), %rdi callq 0xf86f60 movq %rax, %rcx movl 0x20(%rsp), %eax cmpl %ecx, %eax jge 0x18ba7d2 movq 0xc0(%rsp), %rax movq %rax, 0x10(%rsp) movq 0xb8(%rsp), %rdi movslq 0x48(%rsp), %rsi callq 0xf86fa0 movq 0x10(%rsp), %rdi movq (%rax), %rsi callq 0x17fdbb0 movq %rax, 0x40(%rsp) movq 0x50(%rsp), %rdi movq 0x40(%rsp), %rsi callq 0x18ba830 movss %xmm0, 0x1c(%rsp) jmp 0x18ba720 movss 0x1c(%rsp), %xmm0 movss %xmm0, 0x3c(%rsp) movq 0x58(%rsp), %rsi leaq 0x90(%rsp), %rdi callq 0xf86fc0 movss (%rax), %xmm0 movss %xmm0, 0xc(%rsp) movq 0xb8(%rsp), %rdi movslq 0x48(%rsp), %rsi callq 0xf86fa0 movq (%rax), %rsi leaq 0x90(%rsp), %rdi callq 0xf86fc0 movss 0xc(%rsp), %xmm0 addss (%rax), %xmm0 subss 0x3c(%rsp), %xmm0 movss %xmm0, 0x38(%rsp) movss 0x3c(%rsp), %xmm0 divss 0x38(%rsp), %xmm0 ucomiss 0xb4(%rsp), %xmm0 jbe 0x18ba7c0 movl $0x0, 0x4c(%rsp) jmp 0x18ba7c0 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x80(%rsp) movl %eax, 0x7c(%rsp) leaq 0x90(%rsp), %rdi callq 0xf87040 jmp 0x18ba819 jmp 0x18ba7c2 movl 0x48(%rsp), %eax addl $0x1, %eax movl %eax, 0x48(%rsp) jmp 0x18ba6b4 cmpl $0x0, 0x4c(%rsp) je 0x18ba7ef movq 0xb8(%rsp), %rdi leaq 0x58(%rsp), %rsi callq 0xf873b0 jmp 0x18ba7ed jmp 0x18ba7ef jmp 0x18ba7f1 movq 0x58(%rsp), %rax addq $0x1, %rax movq %rax, 0x58(%rsp) jmp 0x18ba67a leaq 0x90(%rsp), %rdi callq 0xf87040 addq $0xc8, %rsp retq movq 0x80(%rsp), %rdi callq 0x5e3b0 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/yolodetectionoutput.cpp
ncnn::intersection_area(ncnn::BBoxRect const&, ncnn::BBoxRect const&)
static inline float intersection_area(const BBoxRect& a, const BBoxRect& b) { if (a.xmin > b.xmax || a.xmax < b.xmin || a.ymin > b.ymax || a.ymax < b.ymin) { // no intersection return 0.f; } float inter_width = std::min(a.xmax, b.xmax) - std::max(a.xmin, b.xmin); float inter_height = std::min(a.ymax, b.ymax) - std::max(a.ymin, b.ymin); return inter_width * inter_height; }
subq $0x28, %rsp movq %rdi, 0x18(%rsp) movq %rsi, 0x10(%rsp) movq 0x18(%rsp), %rax movss (%rax), %xmm0 movq 0x10(%rsp), %rax ucomiss 0x8(%rax), %xmm0 ja 0x18ba898 movq 0x18(%rsp), %rax movss 0x8(%rax), %xmm1 movq 0x10(%rsp), %rax movss (%rax), %xmm0 ucomiss %xmm1, %xmm0 ja 0x18ba898 movq 0x18(%rsp), %rax movss 0x4(%rax), %xmm0 movq 0x10(%rsp), %rax ucomiss 0xc(%rax), %xmm0 ja 0x18ba898 movq 0x18(%rsp), %rax movss 0xc(%rax), %xmm1 movq 0x10(%rsp), %rax movss 0x4(%rax), %xmm0 ucomiss %xmm1, %xmm0 jbe 0x18ba8a6 xorps %xmm0, %xmm0 movss %xmm0, 0x24(%rsp) jmp 0x18ba93e movq 0x18(%rsp), %rdi addq $0x8, %rdi movq 0x10(%rsp), %rsi addq $0x8, %rsi callq 0x1374b0 movss (%rax), %xmm0 movss %xmm0, (%rsp) movq 0x18(%rsp), %rdi movq 0x10(%rsp), %rsi callq 0x1374f0 movss (%rsp), %xmm0 subss (%rax), %xmm0 movss %xmm0, 0xc(%rsp) movq 0x18(%rsp), %rdi addq $0xc, %rdi movq 0x10(%rsp), %rsi addq $0xc, %rsi callq 0x1374b0 movss (%rax), %xmm0 movss %xmm0, 0x4(%rsp) movq 0x18(%rsp), %rdi addq $0x4, %rdi movq 0x10(%rsp), %rsi addq $0x4, %rsi callq 0x1374f0 movss 0x4(%rsp), %xmm0 subss (%rax), %xmm0 movss %xmm0, 0x8(%rsp) movss 0xc(%rsp), %xmm0 mulss 0x8(%rsp), %xmm0 movss %xmm0, 0x24(%rsp) movss 0x24(%rsp), %xmm0 addq $0x28, %rsp retq nopl (%rax)
/ysh329[P]ncnn/src/layer/yolodetectionoutput.cpp
void ncnn::qsort_descent_inplace<ncnn::BBoxRect>(std::vector<ncnn::BBoxRect, std::allocator<ncnn::BBoxRect>>&, std::vector<float, std::allocator<float>>&, int, int)
static void qsort_descent_inplace(std::vector<T>& datas, std::vector<float>& scores, int left, int right) { int i = left; int j = right; float p = scores[(left + right) / 2]; while (i <= j) { while (scores[i] > p) i++; while (scores[j] < p) j--; if (i <= j) { // swap std::swap(datas[i], datas[j]); std::swap(scores[i], scores[j]); i++; j--; } } if (left < j) qsort_descent_inplace(datas, scores, left, j); if (i < right) qsort_descent_inplace(datas, scores, i, right); }
subq $0x38, %rsp movq %rdi, 0x30(%rsp) movq %rsi, 0x28(%rsp) movl %edx, 0x24(%rsp) movl %ecx, 0x20(%rsp) movl 0x24(%rsp), %eax movl %eax, 0x1c(%rsp) movl 0x20(%rsp), %eax movl %eax, 0x18(%rsp) movq 0x28(%rsp), %rdi movl 0x24(%rsp), %eax addl 0x20(%rsp), %eax movl $0x2, %ecx cltd idivl %ecx movslq %eax, %rsi callq 0xf86fc0 movss (%rax), %xmm0 movss %xmm0, 0x14(%rsp) movl 0x1c(%rsp), %eax cmpl 0x18(%rsp), %eax jg 0x18baa84 jmp 0x18ba9ad movq 0x28(%rsp), %rdi movslq 0x1c(%rsp), %rsi callq 0xf86fc0 movss (%rax), %xmm0 ucomiss 0x14(%rsp), %xmm0 jbe 0x18ba9d4 movl 0x1c(%rsp), %eax addl $0x1, %eax movl %eax, 0x1c(%rsp) jmp 0x18ba9ad jmp 0x18ba9d6 movq 0x28(%rsp), %rdi movslq 0x18(%rsp), %rsi callq 0xf86fc0 movss (%rax), %xmm1 movss 0x14(%rsp), %xmm0 ucomiss %xmm1, %xmm0 jbe 0x18baa01 movl 0x18(%rsp), %eax addl $-0x1, %eax movl %eax, 0x18(%rsp) jmp 0x18ba9d6 movl 0x1c(%rsp), %eax cmpl 0x18(%rsp), %eax jg 0x18baa7f movq 0x30(%rsp), %rdi movslq 0x1c(%rsp), %rsi callq 0x17fd5e0 movq %rax, (%rsp) movq 0x30(%rsp), %rdi movslq 0x18(%rsp), %rsi callq 0x17fd5e0 movq (%rsp), %rdi movq %rax, %rsi callq 0x17ffd20 movq 0x28(%rsp), %rdi movslq 0x1c(%rsp), %rsi callq 0xf86fc0 movq %rax, 0x8(%rsp) movq 0x28(%rsp), %rdi movslq 0x18(%rsp), %rsi callq 0xf86fc0 movq 0x8(%rsp), %rdi movq %rax, %rsi callq 0xf89400 movl 0x1c(%rsp), %eax addl $0x1, %eax movl %eax, 0x1c(%rsp) movl 0x18(%rsp), %eax addl $-0x1, %eax movl %eax, 0x18(%rsp) jmp 0x18ba99d movl 0x24(%rsp), %eax cmpl 0x18(%rsp), %eax jge 0x18baaa5 movq 0x30(%rsp), %rdi movq 0x28(%rsp), %rsi movl 0x24(%rsp), %edx movl 0x18(%rsp), %ecx callq 0x18ba950 movl 0x1c(%rsp), %eax cmpl 0x20(%rsp), %eax jge 0x18baac6 movq 0x30(%rsp), %rdi movq 0x28(%rsp), %rsi movl 0x1c(%rsp), %edx movl 0x20(%rsp), %ecx callq 0x18ba950 addq $0x38, %rsp retq nopl (%rax,%rax)
/ysh329[P]ncnn/src/layer/yolodetectionoutput.cpp
ncnn::Quantize::load_model(ncnn::ModelBin const&)
int Quantize::load_model(const ModelBin& mb) { scale_data = mb.load(scale_data_size, 1); if (scale_data.empty()) return -100; return 0; }
subq $0x138, %rsp # imm = 0x138 movq %rdi, 0xa0(%rsp) movq %rsi, 0x98(%rsp) movq 0xa0(%rsp), %rax movq %rax, 0x30(%rsp) movq 0x98(%rsp), %rsi movl 0xd0(%rax), %edx movq (%rsi), %rax leaq 0x50(%rsp), %rdi movl $0x1, %ecx callq *0x10(%rax) movq 0x30(%rsp), %rax addq $0xd8, %rax movq %rax, 0xc0(%rsp) leaq 0x50(%rsp), %rax movq %rax, 0xb8(%rsp) movq 0xc0(%rsp), %rax movq %rax, 0x38(%rsp) cmpq 0xb8(%rsp), %rax jne 0x18bac7b movq 0x38(%rsp), %rax movq %rax, 0xc8(%rsp) jmp 0x18bae56 movq 0xb8(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x18bacb3 movq 0xb8(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0xb4(%rsp) movl 0xb4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xb0(%rsp) movq 0x38(%rsp), %rax movq %rax, 0x110(%rsp) movq 0x110(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x18bad59 movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x10c(%rsp) # imm = 0xFFFFFFFF movl 0x10c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x108(%rsp) cmpl $0x1, 0x108(%rsp) jne 0x18bad59 movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18bad2d movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18bad2b jmp 0x18bad57 movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0x118(%rsp) cmpq $0x0, 0x118(%rsp) je 0x18bad55 movq 0x118(%rsp), %rdi callq 0x5e480 jmp 0x18bad57 jmp 0x18bad59 movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x38(%rsp), %rax movq 0xb8(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0xb8(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0xb8(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0xb8(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0xb8(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0xb8(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0xb8(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0xb8(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0xb8(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0xb8(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0xb8(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0xc8(%rsp) leaq 0x50(%rsp), %rax movq %rax, 0xd0(%rsp) movq 0xd0(%rsp), %rax movq %rax, 0x100(%rsp) movq 0x100(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x18baf0c movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xfc(%rsp) # imm = 0xFFFFFFFF movl 0xfc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xf8(%rsp) cmpl $0x1, 0xf8(%rsp) jne 0x18baf0c movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18baee0 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18baede jmp 0x18baf0a movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x120(%rsp) cmpq $0x0, 0x120(%rsp) je 0x18baf08 movq 0x120(%rsp), %rdi callq 0x5e480 jmp 0x18baf0a jmp 0x18baf0c movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18baf64 movq %rax, %rdi callq 0x5fc90 movq 0x30(%rsp), %rax addq $0xd8, %rax movq %rax, 0xe0(%rsp) movq 0xe0(%rsp), %rcx movq %rcx, 0x10(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x1f(%rsp) je 0x18bafbc movq 0x10(%rsp), %rax movq %rax, 0x130(%rsp) movq 0x130(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x1f(%rsp) movb 0x1f(%rsp), %al testb $0x1, %al jne 0x18bafc9 jmp 0x18bb0f7 movl $0xffffff9c, 0xac(%rsp) # imm = 0xFFFFFF9C jmp 0x18bb102 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x48(%rsp) movl %eax, 0x44(%rsp) leaq 0x50(%rsp), %rax movq %rax, 0xd8(%rsp) movq 0xd8(%rsp), %rax movq %rax, 0xf0(%rsp) movq 0xf0(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x18bb09d movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0xec(%rsp) # imm = 0xFFFFFFFF movl 0xec(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0xe8(%rsp) cmpl $0x1, 0xe8(%rsp) jne 0x18bb09d movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18bb071 movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18bb06f jmp 0x18bb09b movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x128(%rsp) cmpq $0x0, 0x128(%rsp) je 0x18bb099 movq 0x128(%rsp), %rdi callq 0x5e480 jmp 0x18bb09b jmp 0x18bb09d movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18bb0f5 movq %rax, %rdi callq 0x5fc90 jmp 0x18bb111 movl $0x0, 0xac(%rsp) movl 0xac(%rsp), %eax addq $0x138, %rsp # imm = 0x138 retq movq 0x48(%rsp), %rdi callq 0x5e3b0 nopl (%rax,%rax)
/ysh329[P]ncnn/src/layer/quantize.cpp
ncnn::Quantize::forward(ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&) const
int Quantize::forward(const Mat& bottom_blob, Mat& top_blob, const Option& opt) const { int dims = bottom_blob.dims; if (dims == 1) { int w = bottom_blob.w; top_blob.create(w, (size_t)1u, opt.blob_allocator); if (top_blob.empty()) return -100; const float* ptr = bottom_blob; signed char* outptr = top_blob; if (scale_data_size == 1) { const float scale = scale_data[0]; #pragma omp parallel for num_threads(opt.num_threads) for (int i = 0; i < w; i++) { outptr[i] = float2int8(ptr[i] * scale); } } else { #pragma omp parallel for num_threads(opt.num_threads) for (int i = 0; i < w; i++) { outptr[i] = float2int8(ptr[i] * scale_data[i]); } } } if (dims == 2) { int w = bottom_blob.w; int h = bottom_blob.h; top_blob.create(w, h, (size_t)1u, opt.blob_allocator); if (top_blob.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int i = 0; i < h; i++) { const float* ptr0 = bottom_blob.row(i); signed char* outptr0 = top_blob.row<signed char>(i); const float scale = scale_data_size == 1 ? scale_data[0] : scale_data[i]; for (int j = 0; j < w; j++) { outptr0[j] = float2int8(ptr0[j] * scale); } } } if (dims == 3) { int w = bottom_blob.w; int h = bottom_blob.h; int channels = bottom_blob.c; int size = w * h; top_blob.create(w, h, channels, (size_t)1u, opt.blob_allocator); if (top_blob.empty()) return -100; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float* ptr = bottom_blob.channel(q); signed char* outptr = top_blob.channel(q); const float scale = scale_data_size == 1 ? scale_data[0] : scale_data[q]; for (int i = 0; i < size; i++) { outptr[i] = float2int8(ptr[i] * scale); } } } return 0; }
subq $0x428, %rsp # imm = 0x428 movq %rdi, 0x1c8(%rsp) movq %rsi, 0x1c0(%rsp) movq %rdx, 0x1b8(%rsp) movq %rcx, 0x1b0(%rsp) movq 0x1c8(%rsp), %rax movq %rax, 0x90(%rsp) movq 0x1c0(%rsp), %rax movl 0x28(%rax), %eax movl %eax, 0x1ac(%rsp) cmpl $0x1, 0x1ac(%rsp) jne 0x18bb3dc movq 0x1c0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x1a8(%rsp) movq 0x1b8(%rsp), %rdi movl 0x1a8(%rsp), %esi movq 0x1b0(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, %edx callq 0x65040 movq 0x1b8(%rsp), %rax movq %rax, 0x208(%rsp) movq 0x208(%rsp), %rcx movq %rcx, 0x80(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x8f(%rsp) je 0x18bb20f movq 0x80(%rsp), %rax movq %rax, 0x380(%rsp) movq 0x380(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x8f(%rsp) movb 0x8f(%rsp), %al testb $0x1, %al jne 0x18bb21c jmp 0x18bb22c movl $0xffffff9c, 0x1d4(%rsp) # imm = 0xFFFFFF9C jmp 0x18bc0fd movq 0x90(%rsp), %rax movq 0x1c0(%rsp), %rcx movq %rcx, 0x218(%rsp) movq 0x218(%rsp), %rcx movq (%rcx), %rcx movq %rcx, 0x1a0(%rsp) movq 0x1b8(%rsp), %rcx movq %rcx, 0x228(%rsp) movq 0x228(%rsp), %rcx movq (%rcx), %rcx movq %rcx, 0x198(%rsp) cmpl $0x1, 0xd0(%rax) jne 0x18bb335 movq 0x90(%rsp), %rax addq $0xd8, %rax movq %rax, 0x288(%rsp) movq $0x0, 0x280(%rsp) movq 0x288(%rsp), %rax movq (%rax), %rax movq 0x280(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x194(%rsp) movl $0x0, 0x190(%rsp) movl 0x190(%rsp), %eax cmpl 0x1a8(%rsp), %eax jge 0x18bb330 movq 0x1a0(%rsp), %rax movslq 0x190(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 mulss 0x194(%rsp), %xmm0 callq 0x18bc110 movb %al, %dl movq 0x198(%rsp), %rax movslq 0x190(%rsp), %rcx movb %dl, (%rax,%rcx) movl 0x190(%rsp), %eax addl $0x1, %eax movl %eax, 0x190(%rsp) jmp 0x18bb2d5 jmp 0x18bb3da movl $0x0, 0x18c(%rsp) movl 0x18c(%rsp), %eax cmpl 0x1a8(%rsp), %eax jge 0x18bb3d8 movq 0x90(%rsp), %rcx movq 0x1a0(%rsp), %rax movslq 0x18c(%rsp), %rdx movss (%rax,%rdx,4), %xmm0 addq $0xd8, %rcx movslq 0x18c(%rsp), %rax movq %rcx, 0x278(%rsp) movq %rax, 0x270(%rsp) movq 0x278(%rsp), %rax movq (%rax), %rax movq 0x270(%rsp), %rcx mulss (%rax,%rcx,4), %xmm0 callq 0x18bc110 movb %al, %dl movq 0x198(%rsp), %rax movslq 0x18c(%rsp), %rcx movb %dl, (%rax,%rcx) movl 0x18c(%rsp), %eax addl $0x1, %eax movl %eax, 0x18c(%rsp) jmp 0x18bb340 jmp 0x18bb3da jmp 0x18bb3dc cmpl $0x2, 0x1ac(%rsp) jne 0x18bb681 movq 0x1c0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x188(%rsp) movq 0x1c0(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x184(%rsp) movq 0x1b8(%rsp), %rdi movl 0x188(%rsp), %esi movl 0x184(%rsp), %edx movq 0x1b0(%rsp), %rax movq 0x8(%rax), %r8 movl $0x1, %ecx callq 0x652c0 movq 0x1b8(%rsp), %rax movq %rax, 0x200(%rsp) movq 0x200(%rsp), %rcx movq %rcx, 0x70(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x7f(%rsp) je 0x18bb48f movq 0x70(%rsp), %rax movq %rax, 0x388(%rsp) movq 0x388(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x7f(%rsp) movb 0x7f(%rsp), %al testb $0x1, %al jne 0x18bb499 jmp 0x18bb4a9 movl $0xffffff9c, 0x1d4(%rsp) # imm = 0xFFFFFF9C jmp 0x18bc0fd movl $0x0, 0x180(%rsp) movl 0x180(%rsp), %eax cmpl 0x184(%rsp), %eax jge 0x18bb67f movq 0x90(%rsp), %rax movq 0x1c0(%rsp), %rdx movl 0x180(%rsp), %ecx movq %rdx, 0x298(%rsp) movl %ecx, 0x294(%rsp) movq 0x298(%rsp), %rsi movq (%rsi), %rcx movslq 0x2c(%rsi), %rdx movslq 0x294(%rsp), %rdi imulq %rdi, %rdx imulq 0x10(%rsi), %rdx addq %rdx, %rcx movq %rcx, 0x178(%rsp) movq 0x1b8(%rsp), %rdx movl 0x180(%rsp), %ecx movq %rdx, 0x2a8(%rsp) movl %ecx, 0x2a4(%rsp) movq 0x2a8(%rsp), %rsi movq (%rsi), %rcx movslq 0x2c(%rsi), %rdx movslq 0x2a4(%rsp), %rdi imulq %rdi, %rdx imulq 0x10(%rsi), %rdx addq %rdx, %rcx movq %rcx, 0x170(%rsp) cmpl $0x1, 0xd0(%rax) jne 0x18bb5ad movq 0x90(%rsp), %rax addq $0xd8, %rax movq %rax, 0x268(%rsp) movq $0x0, 0x260(%rsp) movq 0x268(%rsp), %rax movq (%rax), %rax movq 0x260(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x6c(%rsp) jmp 0x18bb5f2 movq 0x90(%rsp), %rcx addq $0xd8, %rcx movslq 0x180(%rsp), %rax movq %rcx, 0x258(%rsp) movq %rax, 0x250(%rsp) movq 0x258(%rsp), %rax movq (%rax), %rax movq 0x250(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0x6c(%rsp) movss 0x6c(%rsp), %xmm0 movss %xmm0, 0x16c(%rsp) movl $0x0, 0x168(%rsp) movl 0x168(%rsp), %eax cmpl 0x188(%rsp), %eax jge 0x18bb667 movq 0x178(%rsp), %rax movslq 0x168(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 mulss 0x16c(%rsp), %xmm0 callq 0x18bc110 movb %al, %dl movq 0x170(%rsp), %rax movslq 0x168(%rsp), %rcx movb %dl, (%rax,%rcx) movl 0x168(%rsp), %eax addl $0x1, %eax movl %eax, 0x168(%rsp) jmp 0x18bb60c jmp 0x18bb669 movl 0x180(%rsp), %eax addl $0x1, %eax movl %eax, 0x180(%rsp) jmp 0x18bb4b4 jmp 0x18bb681 cmpl $0x3, 0x1ac(%rsp) jne 0x18bc0f2 movq 0x1c0(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x164(%rsp) movq 0x1c0(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x160(%rsp) movq 0x1c0(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x15c(%rsp) movl 0x164(%rsp), %eax imull 0x160(%rsp), %eax movl %eax, 0x158(%rsp) movq 0x1b8(%rsp), %rdi movl 0x164(%rsp), %esi movl 0x160(%rsp), %edx movl 0x15c(%rsp), %ecx movq 0x1b0(%rsp), %rax movq 0x8(%rax), %r9 movl $0x1, %r8d callq 0x65550 movq 0x1b8(%rsp), %rax movq %rax, 0x1f8(%rsp) movq 0x1f8(%rsp), %rcx movq %rcx, 0x60(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x6b(%rsp) je 0x18bb764 movq 0x60(%rsp), %rax movq %rax, 0x390(%rsp) movq 0x390(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x6b(%rsp) movb 0x6b(%rsp), %al testb $0x1, %al jne 0x18bb76e jmp 0x18bb77e movl $0xffffff9c, 0x1d4(%rsp) # imm = 0xFFFFFF9C jmp 0x18bc0fd movl $0x0, 0x154(%rsp) movl 0x154(%rsp), %eax cmpl 0x15c(%rsp), %eax jge 0x18bc0f0 movq 0x1c0(%rsp), %rcx movl 0x154(%rsp), %eax leaq 0x100(%rsp), %rdx movq %rdx, 0x2c0(%rsp) movq %rcx, 0x2b8(%rsp) movl %eax, 0x2b4(%rsp) movq 0x2b8(%rsp), %rax movq %rax, 0x58(%rsp) movb $0x0, 0x2b3(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x2b4(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x100(%rsp), %r10 movq %r10, 0x400(%rsp) movl %r9d, 0x3fc(%rsp) movl %r8d, 0x3f8(%rsp) movl %edi, 0x3f4(%rsp) movq %rsi, 0x3e8(%rsp) movq %rdx, 0x3e0(%rsp) movl %ecx, 0x3dc(%rsp) movq %rax, 0x3d0(%rsp) movq 0x400(%rsp), %rcx movq %rcx, 0x50(%rsp) movq 0x3e8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x3e0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x3dc(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x3d0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3fc(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3f8(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3f4(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x410(%rsp) movl $0x10, 0x40c(%rsp) movq 0x410(%rsp), %rax movslq 0x40c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x40c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x58(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x128(%rsp) cmpl $0x4, 0x28(%rax) jne 0x18bb94d movq 0x58(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x140(%rsp) movb $0x1, 0x2b3(%rsp) testb $0x1, 0x2b3(%rsp) jne 0x18bba76 leaq 0x100(%rsp), %rax movq %rax, 0x2c8(%rsp) movq 0x2c8(%rsp), %rax movq %rax, 0x308(%rsp) movq 0x308(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x18bba1c movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x304(%rsp) # imm = 0xFFFFFFFF movl 0x304(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x300(%rsp) cmpl $0x1, 0x300(%rsp) jne 0x18bba1c movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18bb9f0 movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18bb9ee jmp 0x18bba1a movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x370(%rsp) cmpq $0x0, 0x370(%rsp) je 0x18bba18 movq 0x370(%rsp), %rdi callq 0x5e480 jmp 0x18bba1a jmp 0x18bba1c movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18bba74 movq %rax, %rdi callq 0x5fc90 jmp 0x18bba76 leaq 0x100(%rsp), %rax movq %rax, 0x210(%rsp) movq 0x210(%rsp), %rax movq (%rax), %rax movq %rax, 0x40(%rsp) leaq 0x100(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x1d8(%rsp), %rax movq %rax, 0x348(%rsp) movq 0x348(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x18bbb4f movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x344(%rsp) # imm = 0xFFFFFFFF movl 0x344(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x340(%rsp) cmpl $0x1, 0x340(%rsp) jne 0x18bbb4f movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18bbb23 movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18bbb21 jmp 0x18bbb4d movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x350(%rsp) cmpq $0x0, 0x350(%rsp) je 0x18bbb4b movq 0x350(%rsp), %rdi callq 0x5e480 jmp 0x18bbb4d jmp 0x18bbb4f movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18bbba7 movq %rax, %rdi callq 0x5fc90 movq 0x40(%rsp), %rax movq %rax, 0x148(%rsp) movq 0x1b8(%rsp), %rcx movl 0x154(%rsp), %eax leaq 0xa0(%rsp), %rdx movq %rdx, 0x2e0(%rsp) movq %rcx, 0x2d8(%rsp) movl %eax, 0x2d4(%rsp) movq 0x2d8(%rsp), %rax movq %rax, 0x30(%rsp) movb $0x0, 0x2d3(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x2d4(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xa0(%rsp), %r10 movq %r10, 0x3c8(%rsp) movl %r9d, 0x3c4(%rsp) movl %r8d, 0x3c0(%rsp) movl %edi, 0x3bc(%rsp) movq %rsi, 0x3b0(%rsp) movq %rdx, 0x3a8(%rsp) movl %ecx, 0x3a4(%rsp) movq %rax, 0x398(%rsp) movq 0x3c8(%rsp), %rcx movq %rcx, 0x28(%rsp) movq 0x3b0(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x3a8(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x3a4(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x398(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x3c4(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x3c0(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x3bc(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x420(%rsp) movl $0x10, 0x41c(%rsp) movq 0x420(%rsp), %rax movslq 0x41c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x41c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x30(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xc8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x18bbd64 movq 0x30(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xe0(%rsp) movb $0x1, 0x2d3(%rsp) testb $0x1, 0x2d3(%rsp) jne 0x18bbe8d leaq 0xa0(%rsp), %rax movq %rax, 0x2e8(%rsp) movq 0x2e8(%rsp), %rax movq %rax, 0x2f8(%rsp) movq 0x2f8(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x18bbe33 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2f4(%rsp) # imm = 0xFFFFFFFF movl 0x2f4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2f0(%rsp) cmpl $0x1, 0x2f0(%rsp) jne 0x18bbe33 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18bbe07 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18bbe05 jmp 0x18bbe31 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x378(%rsp) cmpq $0x0, 0x378(%rsp) je 0x18bbe2f movq 0x378(%rsp), %rdi callq 0x5e480 jmp 0x18bbe31 jmp 0x18bbe33 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18bbe8b movq %rax, %rdi callq 0x5fc90 jmp 0x18bbe8d leaq 0xa0(%rsp), %rax movq %rax, 0x220(%rsp) movq 0x220(%rsp), %rax movq (%rax), %rax movq %rax, 0x18(%rsp) leaq 0xa0(%rsp), %rax movq %rax, 0x1e8(%rsp) movq 0x1e8(%rsp), %rax movq %rax, 0x328(%rsp) movq 0x328(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x18bbf66 movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x324(%rsp) # imm = 0xFFFFFFFF movl 0x324(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x320(%rsp) cmpl $0x1, 0x320(%rsp) jne 0x18bbf66 movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x18bbf3a movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x18bbf38 jmp 0x18bbf64 movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x360(%rsp) cmpq $0x0, 0x360(%rsp) je 0x18bbf62 movq 0x360(%rsp), %rdi callq 0x5e480 jmp 0x18bbf64 jmp 0x18bbf66 movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x18bbfbe movq %rax, %rdi callq 0x5fc90 movq 0x90(%rsp), %rax movq 0x18(%rsp), %rcx movq %rcx, 0xe8(%rsp) cmpl $0x1, 0xd0(%rax) jne 0x18bc01e movq 0x90(%rsp), %rax addq $0xd8, %rax movq %rax, 0x248(%rsp) movq $0x0, 0x240(%rsp) movq 0x248(%rsp), %rax movq (%rax), %rax movq 0x240(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0xc(%rsp) jmp 0x18bc063 movq 0x90(%rsp), %rcx addq $0xd8, %rcx movslq 0x154(%rsp), %rax movq %rcx, 0x238(%rsp) movq %rax, 0x230(%rsp) movq 0x238(%rsp), %rax movq (%rax), %rax movq 0x230(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 movss %xmm0, 0xc(%rsp) movss 0xc(%rsp), %xmm0 movss %xmm0, 0x9c(%rsp) movl $0x0, 0x98(%rsp) movl 0x98(%rsp), %eax cmpl 0x158(%rsp), %eax jge 0x18bc0d8 movq 0x148(%rsp), %rax movslq 0x98(%rsp), %rcx movss (%rax,%rcx,4), %xmm0 mulss 0x9c(%rsp), %xmm0 callq 0x18bc110 movb %al, %dl movq 0xe8(%rsp), %rax movslq 0x98(%rsp), %rcx movb %dl, (%rax,%rcx) movl 0x98(%rsp), %eax addl $0x1, %eax movl %eax, 0x98(%rsp) jmp 0x18bc07d jmp 0x18bc0da movl 0x154(%rsp), %eax addl $0x1, %eax movl %eax, 0x154(%rsp) jmp 0x18bb789 jmp 0x18bc0f2 movl $0x0, 0x1d4(%rsp) movl 0x1d4(%rsp), %eax addq $0x428, %rsp # imm = 0x428 retq nopl (%rax)
/ysh329[P]ncnn/src/layer/quantize.cpp
ncnn::Dequantize::Dequantize()
Dequantize::Dequantize() { one_blob_only = true; support_inplace = false; }
subq $0x68, %rsp movq %rdi, 0x30(%rsp) movq 0x30(%rsp), %rdi movq %rdi, 0x10(%rsp) callq 0xad350 movq 0x10(%rsp), %rax leaq 0x5c84c4(%rip), %rcx # 0x1eca048 addq $0x10, %rcx movq %rcx, (%rax) addq $0xd8, %rax movq %rax, 0x38(%rsp) movq 0x38(%rsp), %rax movq %rax, 0x18(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0x10(%rsp), %rax addq $0x120, %rax # imm = 0x120 movq %rax, 0x40(%rsp) movq 0x40(%rsp), %rax movq %rax, 0x8(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0x10(%rsp), %rax movb $0x1, 0x8(%rax) movb $0x0, 0x9(%rax) addq $0x68, %rsp retq nop
/ysh329[P]ncnn/src/layer/dequantize.cpp
ncnn::Dequantize::load_model(ncnn::ModelBin const&)
int Dequantize::load_model(const ModelBin& mb) { scale_data = mb.load(scale_data_size, 1); if (scale_data.empty()) return -100; if (bias_data_size) { bias_data = mb.load(bias_data_size, 1); if (bias_data.empty()) return -100; } return 0; }
subq $0x238, %rsp # imm = 0x238 movq %rdi, 0x118(%rsp) movq %rsi, 0x110(%rsp) movq 0x118(%rsp), %rax movq %rax, 0x60(%rsp) movq 0x110(%rsp), %rsi movl 0xd0(%rax), %edx movq (%rsi), %rax leaq 0xc8(%rsp), %rdi movl $0x1, %ecx callq *0x10(%rax) movq 0x60(%rsp), %rax addq $0xd8, %rax movq %rax, 0x178(%rsp) leaq 0xc8(%rsp), %rax movq %rax, 0x170(%rsp) movq 0x178(%rsp), %rax movq %rax, 0x68(%rsp) cmpq 0x170(%rsp), %rax jne 0x1901d61 movq 0x68(%rsp), %rax movq %rax, 0x180(%rsp) jmp 0x1901f3c movq 0x170(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x1901d99 movq 0x170(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x16c(%rsp) movl 0x16c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x168(%rsp) movq 0x68(%rsp), %rax movq %rax, 0x1a0(%rsp) movq 0x1a0(%rsp), %rax movq %rax, 0x58(%rsp) cmpq $0x0, 0x8(%rax) je 0x1901e3f movq 0x58(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x19c(%rsp) # imm = 0xFFFFFFFF movl 0x19c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x198(%rsp) cmpl $0x1, 0x198(%rsp) jne 0x1901e3f movq 0x58(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1901e13 movq 0x58(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1901e11 jmp 0x1901e3d movq 0x58(%rsp), %rax movq (%rax), %rax movq %rax, 0x220(%rsp) cmpq $0x0, 0x220(%rsp) je 0x1901e3b movq 0x220(%rsp), %rdi callq 0x5e480 jmp 0x1901e3d jmp 0x1901e3f movq 0x58(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x68(%rsp), %rax movq 0x170(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x170(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x170(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x170(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x170(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x170(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x170(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x170(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x170(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x170(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x170(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x180(%rsp) leaq 0xc8(%rsp), %rax movq %rax, 0x128(%rsp) movq 0x128(%rsp), %rax movq %rax, 0x1f0(%rsp) movq 0x1f0(%rsp), %rax movq %rax, 0x50(%rsp) cmpq $0x0, 0x8(%rax) je 0x1901ff5 movq 0x50(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1ec(%rsp) # imm = 0xFFFFFFFF movl 0x1ec(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1e8(%rsp) cmpl $0x1, 0x1e8(%rsp) jne 0x1901ff5 movq 0x50(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1901fc9 movq 0x50(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1901fc7 jmp 0x1901ff3 movq 0x50(%rsp), %rax movq (%rax), %rax movq %rax, 0x1f8(%rsp) cmpq $0x0, 0x1f8(%rsp) je 0x1901ff1 movq 0x1f8(%rsp), %rdi callq 0x5e480 jmp 0x1901ff3 jmp 0x1901ff5 movq 0x50(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x190204d movq %rax, %rdi callq 0x5fc90 movq 0x60(%rsp), %rax addq $0xd8, %rax movq %rax, 0x190(%rsp) movq 0x190(%rsp), %rcx movq %rcx, 0x40(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x4f(%rsp) je 0x19020a5 movq 0x40(%rsp), %rax movq %rax, 0x228(%rsp) movq 0x228(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x4f(%rsp) movb 0x4f(%rsp), %al testb $0x1, %al jne 0x19020b2 jmp 0x19021ec movl $0xffffff9c, 0x124(%rsp) # imm = 0xFFFFFF9C jmp 0x19026f9 movq %rax, %rcx movl %edx, %eax movq %rcx, 0xc0(%rsp) movl %eax, 0xbc(%rsp) leaq 0xc8(%rsp), %rax movq %rax, 0x130(%rsp) movq 0x130(%rsp), %rax movq %rax, 0x1e0(%rsp) movq 0x1e0(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x190218f movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1dc(%rsp) # imm = 0xFFFFFFFF movl 0x1dc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1d8(%rsp) cmpl $0x1, 0x1d8(%rsp) jne 0x190218f movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1902163 movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1902161 jmp 0x190218d movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x200(%rsp) cmpq $0x0, 0x200(%rsp) je 0x190218b movq 0x200(%rsp), %rdi callq 0x5e480 jmp 0x190218d jmp 0x190218f movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x19021e7 movq %rax, %rdi callq 0x5fc90 jmp 0x1902708 movq 0x60(%rsp), %rax cmpl $0x0, 0xd4(%rax) je 0x19026ee movq 0x60(%rsp), %rax movq 0x110(%rsp), %rsi movl 0xd4(%rax), %edx movq (%rsi), %rax leaq 0x70(%rsp), %rdi movl $0x1, %ecx callq *0x10(%rax) movq 0x60(%rsp), %rax addq $0x120, %rax # imm = 0x120 movq %rax, 0x158(%rsp) leaq 0x70(%rsp), %rax movq %rax, 0x150(%rsp) movq 0x158(%rsp), %rax movq %rax, 0x30(%rsp) cmpq 0x150(%rsp), %rax jne 0x190226a movq 0x30(%rsp), %rax movq %rax, 0x160(%rsp) jmp 0x1902445 movq 0x150(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x19022a2 movq 0x150(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x14c(%rsp) movl 0x14c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x148(%rsp) movq 0x30(%rsp), %rax movq %rax, 0x1b0(%rsp) movq 0x1b0(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x1902348 movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1ac(%rsp) # imm = 0xFFFFFFFF movl 0x1ac(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1a8(%rsp) cmpl $0x1, 0x1a8(%rsp) jne 0x1902348 movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x190231c movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x190231a jmp 0x1902346 movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0x218(%rsp) cmpq $0x0, 0x218(%rsp) je 0x1902344 movq 0x218(%rsp), %rdi callq 0x5e480 jmp 0x1902346 jmp 0x1902348 movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x30(%rsp), %rax movq 0x150(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x150(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x150(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x150(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x150(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x150(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x150(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x150(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x150(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x150(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x150(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x160(%rsp) leaq 0x70(%rsp), %rax movq %rax, 0x138(%rsp) movq 0x138(%rsp), %rax movq %rax, 0x1d0(%rsp) movq 0x1d0(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x19024fb movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1cc(%rsp) # imm = 0xFFFFFFFF movl 0x1cc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1c8(%rsp) cmpl $0x1, 0x1c8(%rsp) jne 0x19024fb movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x19024cf movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x19024cd jmp 0x19024f9 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x208(%rsp) cmpq $0x0, 0x208(%rsp) je 0x19024f7 movq 0x208(%rsp), %rdi callq 0x5e480 jmp 0x19024f9 jmp 0x19024fb movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1902553 movq %rax, %rdi callq 0x5fc90 movq 0x60(%rsp), %rax addq $0x120, %rax # imm = 0x120 movq %rax, 0x188(%rsp) movq 0x188(%rsp), %rcx movq %rcx, 0x10(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x1f(%rsp) je 0x19025ab movq 0x10(%rsp), %rax movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x1f(%rsp) movb 0x1f(%rsp), %al testb $0x1, %al jne 0x19025b8 jmp 0x19026ec movl $0xffffff9c, 0x124(%rsp) # imm = 0xFFFFFF9C jmp 0x19026f9 movq %rax, %rcx movl %edx, %eax movq %rcx, 0xc0(%rsp) movl %eax, 0xbc(%rsp) leaq 0x70(%rsp), %rax movq %rax, 0x140(%rsp) movq 0x140(%rsp), %rax movq %rax, 0x1c0(%rsp) movq 0x1c0(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x1902692 movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1bc(%rsp) # imm = 0xFFFFFFFF movl 0x1bc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1b8(%rsp) cmpl $0x1, 0x1b8(%rsp) jne 0x1902692 movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1902666 movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1902664 jmp 0x1902690 movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x210(%rsp) cmpq $0x0, 0x210(%rsp) je 0x190268e movq 0x210(%rsp), %rdi callq 0x5e480 jmp 0x1902690 jmp 0x1902692 movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x19026ea movq %rax, %rdi callq 0x5fc90 jmp 0x1902708 jmp 0x19026ee movl $0x0, 0x124(%rsp) movl 0x124(%rsp), %eax addq $0x238, %rsp # imm = 0x238 retq movq 0xc0(%rsp), %rdi callq 0x5e3b0 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/dequantize.cpp
ncnn::Requantize::load_model(ncnn::ModelBin const&)
int Requantize::load_model(const ModelBin& mb) { scale_in_data = mb.load(scale_in_data_size, 1); if (scale_in_data.empty()) return -100; scale_out_data = mb.load(scale_out_data_size, 1); if (scale_out_data.empty()) return -100; if (bias_data_size) { bias_data = mb.load(bias_data_size, 1); if (bias_data.empty()) return -100; } return 0; }
subq $0x338, %rsp # imm = 0x338 movq %rdi, 0x190(%rsp) movq %rsi, 0x188(%rsp) movq 0x190(%rsp), %rax movq %rax, 0x90(%rsp) movq 0x188(%rsp), %rsi movl 0xd0(%rax), %edx movq (%rsi), %rax leaq 0x140(%rsp), %rdi movl $0x1, %ecx callq *0x10(%rax) movq 0x90(%rsp), %rax addq $0x128, %rax # imm = 0x128 movq %rax, 0x220(%rsp) leaq 0x140(%rsp), %rax movq %rax, 0x218(%rsp) movq 0x220(%rsp), %rax movq %rax, 0x98(%rsp) cmpq 0x218(%rsp), %rax jne 0x19eac9d movq 0x98(%rsp), %rax movq %rax, 0x228(%rsp) jmp 0x19eae90 movq 0x218(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x19eacd5 movq 0x218(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x214(%rsp) movl 0x214(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x210(%rsp) movq 0x98(%rsp), %rax movq %rax, 0x250(%rsp) movq 0x250(%rsp), %rax movq %rax, 0x88(%rsp) cmpq $0x0, 0x8(%rax) je 0x19ead8d movq 0x88(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x24c(%rsp) # imm = 0xFFFFFFFF movl 0x24c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x248(%rsp) cmpl $0x1, 0x248(%rsp) jne 0x19ead8d movq 0x88(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x19ead5e movq 0x88(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x19ead5c jmp 0x19ead8b movq 0x88(%rsp), %rax movq (%rax), %rax movq %rax, 0x318(%rsp) cmpq $0x0, 0x318(%rsp) je 0x19ead89 movq 0x318(%rsp), %rdi callq 0x5e480 jmp 0x19ead8b jmp 0x19ead8d movq 0x88(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x98(%rsp), %rax movq 0x218(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x218(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x218(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x218(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x218(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x218(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x218(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x218(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x218(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x218(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x218(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x228(%rsp) leaq 0x140(%rsp), %rax movq %rax, 0x1a0(%rsp) movq 0x1a0(%rsp), %rax movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rax movq %rax, 0x80(%rsp) cmpq $0x0, 0x8(%rax) je 0x19eaf58 movq 0x80(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2cc(%rsp) # imm = 0xFFFFFFFF movl 0x2cc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2c8(%rsp) cmpl $0x1, 0x2c8(%rsp) jne 0x19eaf58 movq 0x80(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x19eaf29 movq 0x80(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x19eaf27 jmp 0x19eaf56 movq 0x80(%rsp), %rax movq (%rax), %rax movq %rax, 0x2d8(%rsp) cmpq $0x0, 0x2d8(%rsp) je 0x19eaf54 movq 0x2d8(%rsp), %rdi callq 0x5e480 jmp 0x19eaf56 jmp 0x19eaf58 movq 0x80(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x19eafb3 movq %rax, %rdi callq 0x5fc90 movq 0x90(%rsp), %rax addq $0x128, %rax # imm = 0x128 movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rcx movq %rcx, 0x70(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x7f(%rsp) je 0x19eb00e movq 0x70(%rsp), %rax movq %rax, 0x320(%rsp) movq 0x320(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x7f(%rsp) movb 0x7f(%rsp), %al testb $0x1, %al jne 0x19eb01b jmp 0x19eb155 movl $0xffffff9c, 0x19c(%rsp) # imm = 0xFFFFFF9C jmp 0x19ebb80 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x138(%rsp) movl %eax, 0x134(%rsp) leaq 0x140(%rsp), %rax movq %rax, 0x1a8(%rsp) movq 0x1a8(%rsp), %rax movq %rax, 0x2c0(%rsp) movq 0x2c0(%rsp), %rax movq %rax, 0x68(%rsp) cmpq $0x0, 0x8(%rax) je 0x19eb0f8 movq 0x68(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2bc(%rsp) # imm = 0xFFFFFFFF movl 0x2bc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2b8(%rsp) cmpl $0x1, 0x2b8(%rsp) jne 0x19eb0f8 movq 0x68(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x19eb0cc movq 0x68(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x19eb0ca jmp 0x19eb0f6 movq 0x68(%rsp), %rax movq (%rax), %rax movq %rax, 0x2e0(%rsp) cmpq $0x0, 0x2e0(%rsp) je 0x19eb0f4 movq 0x2e0(%rsp), %rdi callq 0x5e480 jmp 0x19eb0f6 jmp 0x19eb0f8 movq 0x68(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x19eb150 movq %rax, %rdi callq 0x5fc90 jmp 0x19ebb8f movq 0x90(%rsp), %rax movq 0x188(%rsp), %rsi movl 0xd4(%rax), %edx movq (%rsi), %rax leaq 0xe8(%rsp), %rdi movl $0x1, %ecx callq *0x10(%rax) movq 0x90(%rsp), %rax addq $0x170, %rax # imm = 0x170 movq %rax, 0x200(%rsp) leaq 0xe8(%rsp), %rax movq %rax, 0x1f8(%rsp) movq 0x200(%rsp), %rax movq %rax, 0x60(%rsp) cmpq 0x1f8(%rsp), %rax jne 0x19eb1cd movq 0x60(%rsp), %rax movq %rax, 0x208(%rsp) jmp 0x19eb3a8 movq 0x1f8(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x19eb205 movq 0x1f8(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x1f4(%rsp) movl 0x1f4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1f0(%rsp) movq 0x60(%rsp), %rax movq %rax, 0x260(%rsp) movq 0x260(%rsp), %rax movq %rax, 0x58(%rsp) cmpq $0x0, 0x8(%rax) je 0x19eb2ab movq 0x58(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x25c(%rsp) # imm = 0xFFFFFFFF movl 0x25c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x258(%rsp) cmpl $0x1, 0x258(%rsp) jne 0x19eb2ab movq 0x58(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x19eb27f movq 0x58(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x19eb27d jmp 0x19eb2a9 movq 0x58(%rsp), %rax movq (%rax), %rax movq %rax, 0x310(%rsp) cmpq $0x0, 0x310(%rsp) je 0x19eb2a7 movq 0x310(%rsp), %rdi callq 0x5e480 jmp 0x19eb2a9 jmp 0x19eb2ab movq 0x58(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x60(%rsp), %rax movq 0x1f8(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x1f8(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x1f8(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x1f8(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x1f8(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x1f8(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x1f8(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x1f8(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x1f8(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x1f8(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x1f8(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x208(%rsp) leaq 0xe8(%rsp), %rax movq %rax, 0x1b0(%rsp) movq 0x1b0(%rsp), %rax movq %rax, 0x2b0(%rsp) movq 0x2b0(%rsp), %rax movq %rax, 0x50(%rsp) cmpq $0x0, 0x8(%rax) je 0x19eb461 movq 0x50(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x2ac(%rsp) # imm = 0xFFFFFFFF movl 0x2ac(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x2a8(%rsp) cmpl $0x1, 0x2a8(%rsp) jne 0x19eb461 movq 0x50(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x19eb435 movq 0x50(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x19eb433 jmp 0x19eb45f movq 0x50(%rsp), %rax movq (%rax), %rax movq %rax, 0x2e8(%rsp) cmpq $0x0, 0x2e8(%rsp) je 0x19eb45d movq 0x2e8(%rsp), %rdi callq 0x5e480 jmp 0x19eb45f jmp 0x19eb461 movq 0x50(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x19eb4b9 movq %rax, %rdi callq 0x5fc90 movq 0x90(%rsp), %rax addq $0x170, %rax # imm = 0x170 movq %rax, 0x238(%rsp) movq 0x238(%rsp), %rcx movq %rcx, 0x40(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x4f(%rsp) je 0x19eb514 movq 0x40(%rsp), %rax movq %rax, 0x328(%rsp) movq 0x328(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x4f(%rsp) movb 0x4f(%rsp), %al testb $0x1, %al jne 0x19eb521 jmp 0x19eb65b movl $0xffffff9c, 0x19c(%rsp) # imm = 0xFFFFFF9C jmp 0x19ebb80 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x138(%rsp) movl %eax, 0x134(%rsp) leaq 0xe8(%rsp), %rax movq %rax, 0x1b8(%rsp) movq 0x1b8(%rsp), %rax movq %rax, 0x2a0(%rsp) movq 0x2a0(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x19eb5fe movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x29c(%rsp) # imm = 0xFFFFFFFF movl 0x29c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x298(%rsp) cmpl $0x1, 0x298(%rsp) jne 0x19eb5fe movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x19eb5d2 movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x19eb5d0 jmp 0x19eb5fc movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x2f0(%rsp) cmpq $0x0, 0x2f0(%rsp) je 0x19eb5fa movq 0x2f0(%rsp), %rdi callq 0x5e480 jmp 0x19eb5fc jmp 0x19eb5fe movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x19eb656 movq %rax, %rdi callq 0x5fc90 jmp 0x19ebb8f movq 0x90(%rsp), %rax cmpl $0x0, 0xd8(%rax) je 0x19ebb75 movq 0x90(%rsp), %rax movq 0x188(%rsp), %rsi movl 0xd8(%rax), %edx movq (%rsi), %rax leaq 0xa0(%rsp), %rdi movl $0x1, %ecx callq *0x10(%rax) movq 0x90(%rsp), %rax addq $0x1b8, %rax # imm = 0x1B8 movq %rax, 0x1e0(%rsp) leaq 0xa0(%rsp), %rax movq %rax, 0x1d8(%rsp) movq 0x1e0(%rsp), %rax movq %rax, 0x30(%rsp) cmpq 0x1d8(%rsp), %rax jne 0x19eb6e8 movq 0x30(%rsp), %rax movq %rax, 0x1e8(%rsp) jmp 0x19eb8c3 movq 0x1d8(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x19eb720 movq 0x1d8(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x1d4(%rsp) movl 0x1d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1d0(%rsp) movq 0x30(%rsp), %rax movq %rax, 0x270(%rsp) movq 0x270(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x19eb7c6 movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x26c(%rsp) # imm = 0xFFFFFFFF movl 0x26c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x268(%rsp) cmpl $0x1, 0x268(%rsp) jne 0x19eb7c6 movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x19eb79a movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x19eb798 jmp 0x19eb7c4 movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0x308(%rsp) cmpq $0x0, 0x308(%rsp) je 0x19eb7c2 movq 0x308(%rsp), %rdi callq 0x5e480 jmp 0x19eb7c4 jmp 0x19eb7c6 movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x30(%rsp), %rax movq 0x1d8(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x1d8(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x1d8(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x1d8(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x1d8(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x1d8(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x1d8(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x1d8(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x1d8(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x1d8(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x1d8(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x1e8(%rsp) leaq 0xa0(%rsp), %rax movq %rax, 0x1c0(%rsp) movq 0x1c0(%rsp), %rax movq %rax, 0x290(%rsp) movq 0x290(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x19eb97c movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x28c(%rsp) # imm = 0xFFFFFFFF movl 0x28c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x288(%rsp) cmpl $0x1, 0x288(%rsp) jne 0x19eb97c movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x19eb950 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x19eb94e jmp 0x19eb97a movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x2f8(%rsp) cmpq $0x0, 0x2f8(%rsp) je 0x19eb978 movq 0x2f8(%rsp), %rdi callq 0x5e480 jmp 0x19eb97a jmp 0x19eb97c movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x19eb9d4 movq %rax, %rdi callq 0x5fc90 movq 0x90(%rsp), %rax addq $0x1b8, %rax # imm = 0x1B8 movq %rax, 0x230(%rsp) movq 0x230(%rsp), %rcx movq %rcx, 0x10(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x1f(%rsp) je 0x19eba2f movq 0x10(%rsp), %rax movq %rax, 0x330(%rsp) movq 0x330(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x1f(%rsp) movb 0x1f(%rsp), %al testb $0x1, %al jne 0x19eba3c jmp 0x19ebb73 movl $0xffffff9c, 0x19c(%rsp) # imm = 0xFFFFFF9C jmp 0x19ebb80 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x138(%rsp) movl %eax, 0x134(%rsp) leaq 0xa0(%rsp), %rax movq %rax, 0x1c8(%rsp) movq 0x1c8(%rsp), %rax movq %rax, 0x280(%rsp) movq 0x280(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x19ebb19 movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x27c(%rsp) # imm = 0xFFFFFFFF movl 0x27c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x278(%rsp) cmpl $0x1, 0x278(%rsp) jne 0x19ebb19 movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x19ebaed movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x19ebaeb jmp 0x19ebb17 movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x300(%rsp) cmpq $0x0, 0x300(%rsp) je 0x19ebb15 movq 0x300(%rsp), %rdi callq 0x5e480 jmp 0x19ebb17 jmp 0x19ebb19 movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x19ebb71 movq %rax, %rdi callq 0x5fc90 jmp 0x19ebb8f jmp 0x19ebb75 movl $0x0, 0x19c(%rsp) movl 0x19c(%rsp), %eax addq $0x338, %rsp # imm = 0x338 retq movq 0x138(%rsp), %rdi callq 0x5e3b0 nopl (%rax)
/ysh329[P]ncnn/src/layer/requantize.cpp
ncnn::float32_to_int8(float)
signed char float32_to_int8(float value) { float tmp; if (value >= 0.f) tmp = value + 0.5f; else tmp = value - 0.5f; if (tmp > 127) return 127; if (tmp < -128) return -128; return static_cast<signed char>(tmp); }
movss %xmm0, -0x8(%rsp) movss -0x8(%rsp), %xmm0 xorps %xmm1, %xmm1 ucomiss %xmm1, %xmm0 jb 0x1ca1fba movss 0x15e328(%rip), %xmm0 # 0x1e002d4 addss -0x8(%rsp), %xmm0 movss %xmm0, -0xc(%rsp) jmp 0x1ca1fd2 movss -0x8(%rsp), %xmm0 movss 0x15e30c(%rip), %xmm1 # 0x1e002d4 subss %xmm1, %xmm0 movss %xmm0, -0xc(%rsp) movss -0xc(%rsp), %xmm0 movss 0x1797a0(%rip), %xmm1 # 0x1e1b780 ucomiss %xmm1, %xmm0 jbe 0x1ca1fec movb $0x7f, -0x1(%rsp) jmp 0x1ca2010 movss 0x16d08c(%rip), %xmm0 # 0x1e0f080 ucomiss -0xc(%rsp), %xmm0 jbe 0x1ca2002 movb $-0x80, -0x1(%rsp) jmp 0x1ca2010 movss -0xc(%rsp), %xmm0 cvttss2si %xmm0, %eax movb %al, -0x1(%rsp) movb -0x1(%rsp), %al retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/cast.cpp
ncnn::cast_fp32_to_fp16_sse(ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&)
static void cast_fp32_to_fp16_sse(const Mat& bottom_blob, Mat& top_blob, const Option& opt) { #if NCNN_RUNTIME_CPU && NCNN_AVX512FP16 && __AVX512F__ && !__AVX512FP16__ if (ncnn::cpu_support_x86_avx512_fp16()) { cast_fp32_to_fp16_sse_avx512fp16(bottom_blob, top_blob, opt); return; } #endif #if NCNN_RUNTIME_CPU && NCNN_F16C && __AVX__ && !__F16C__ if (ncnn::cpu_support_x86_f16c()) { cast_fp32_to_fp16_sse_f16c(bottom_blob, top_blob, opt); return; } #endif const int w = bottom_blob.w; const int h = bottom_blob.h; const int d = bottom_blob.d; const int channels = bottom_blob.c; const int elempack = bottom_blob.elempack; const int size = w * h * d * elempack; #pragma omp parallel for num_threads(opt.num_threads) for (int q = 0; q < channels; q++) { const float* ptr = bottom_blob.channel(q); unsigned short* outptr = top_blob.channel(q); int i = 0; #if __AVX512FP16__ for (; i + 15 < size; i += 16) { __m512 _v_fp32 = _mm512_loadu_ps(ptr); __m256h _v_fp16 = _mm512_cvtxps_ph(_v_fp32); _mm256_storeu_si256((__m256i*)outptr, (__m256i)_v_fp16); ptr += 16; outptr += 16; } for (; i + 7 < size; i += 8) { __m256 _v_fp32 = _mm256_loadu_ps(ptr); __m128h _v_fp16 = _mm256_cvtxps_ph(_v_fp32); _mm_storeu_si128((__m128i*)outptr, (__m128i)_v_fp16); ptr += 8; outptr += 8; } for (; i + 3 < size; i += 4) { __m128 _v_fp32 = _mm_loadu_ps(ptr); __m128h _v_fp16 = _mm_cvtxps_ph(_v_fp32); _mm_storel_epi64((__m128i*)outptr, (__m128i)_v_fp16); ptr += 4; outptr += 4; } #elif __F16C__ for (; i + 7 < size; i += 8) { __m256 _v_fp32 = _mm256_loadu_ps(ptr); __m128i _v_fp16 = _mm256_cvtps_ph(_v_fp32, _MM_FROUND_TRUNC); _mm_storeu_si128((__m128i*)outptr, _v_fp16); ptr += 8; outptr += 8; } for (; i + 3 < size; i += 4) { __m128 _v_fp32 = _mm_loadu_ps(ptr); __m128i _v_fp16 = _mm_cvtps_ph(_v_fp32, _MM_FROUND_TRUNC); _mm_storel_epi64((__m128i*)outptr, _v_fp16); ptr += 4; outptr += 4; } #endif for (; i < size; i++) { *outptr++ = float32_to_float16(*ptr++); } } }
pushq %rbp movq %rsp, %rbp andq $-0x20, %rsp subq $0x380, %rsp # imm = 0x380 movq %rdi, 0x190(%rsp) movq %rsi, 0x188(%rsp) movq %rdx, 0x180(%rsp) movq 0x190(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x17c(%rsp) movq 0x190(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x178(%rsp) movq 0x190(%rsp), %rax movl 0x34(%rax), %eax movl %eax, 0x174(%rsp) movq 0x190(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x170(%rsp) movq 0x190(%rsp), %rax movl 0x18(%rax), %eax movl %eax, 0x16c(%rsp) movl 0x17c(%rsp), %eax imull 0x178(%rsp), %eax imull 0x174(%rsp), %eax imull 0x16c(%rsp), %eax movl %eax, 0x168(%rsp) movl $0x0, 0x164(%rsp) movl 0x164(%rsp), %eax cmpl 0x170(%rsp), %eax jge 0x1caebde movq 0x190(%rsp), %rcx movl 0x164(%rsp), %eax leaq 0x110(%rsp), %rdx movq %rdx, 0x1a8(%rsp) movq %rcx, 0x1a0(%rsp) movl %eax, 0x19c(%rsp) movq 0x1a0(%rsp), %rax movq %rax, 0x48(%rsp) movb $0x0, 0x19b(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x19c(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x110(%rsp), %r10 movq %r10, 0x348(%rsp) movl %r9d, 0x344(%rsp) movl %r8d, 0x340(%rsp) movl %edi, 0x33c(%rsp) movq %rsi, 0x330(%rsp) movq %rdx, 0x328(%rsp) movl %ecx, 0x324(%rsp) movq %rax, 0x318(%rsp) movq 0x348(%rsp), %rcx movq %rcx, 0x40(%rsp) movq 0x330(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x328(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x324(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x318(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x344(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x340(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x33c(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x358(%rsp) movl $0x10, 0x354(%rsp) movq 0x358(%rsp), %rax movslq 0x354(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x354(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x48(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x138(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1cae346 movq 0x48(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x150(%rsp) movb $0x1, 0x19b(%rsp) testb $0x1, 0x19b(%rsp) jne 0x1cae475 leaq 0x110(%rsp), %rax movq %rax, 0x1b0(%rsp) movq 0x1b0(%rsp), %rax movq %rax, 0x250(%rsp) movq 0x250(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x1cae41b movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x24c(%rsp) # imm = 0xFFFFFFFF movl 0x24c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x248(%rsp) cmpl $0x1, 0x248(%rsp) jne 0x1cae41b movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1cae3ec movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1cae3ea jmp 0x1cae419 movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x258(%rsp) cmpq $0x0, 0x258(%rsp) je 0x1cae417 movq 0x258(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1cae419 jmp 0x1cae41b movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1cae473 movq %rax, %rdi callq 0x5fc90 jmp 0x1cae475 leaq 0x110(%rsp), %rax movq %rax, 0x288(%rsp) movq 0x288(%rsp), %rax movq (%rax), %rax movq %rax, 0x30(%rsp) leaq 0x110(%rsp), %rax movq %rax, 0x1b8(%rsp) movq 0x1b8(%rsp), %rax movq %rax, 0x240(%rsp) movq 0x240(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x1cae554 movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x23c(%rsp) # imm = 0xFFFFFFFF movl 0x23c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x238(%rsp) cmpl $0x1, 0x238(%rsp) jne 0x1cae554 movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1cae525 movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1cae523 jmp 0x1cae552 movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0x260(%rsp) cmpq $0x0, 0x260(%rsp) je 0x1cae550 movq 0x260(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1cae552 jmp 0x1cae554 movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1cae5ac movq %rax, %rdi callq 0x5fc90 movq 0x30(%rsp), %rax movq %rax, 0x158(%rsp) movq 0x188(%rsp), %rcx movl 0x164(%rsp), %eax leaq 0xb0(%rsp), %rdx movq %rdx, 0x1e8(%rsp) movq %rcx, 0x1e0(%rsp) movl %eax, 0x1dc(%rsp) movq 0x1e0(%rsp), %rax movq %rax, 0x20(%rsp) movb $0x0, 0x1db(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x1dc(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0xb0(%rsp), %r10 movq %r10, 0x310(%rsp) movl %r9d, 0x30c(%rsp) movl %r8d, 0x308(%rsp) movl %edi, 0x304(%rsp) movq %rsi, 0x2f8(%rsp) movq %rdx, 0x2f0(%rsp) movl %ecx, 0x2ec(%rsp) movq %rax, 0x2e0(%rsp) movq 0x310(%rsp), %rcx movq %rcx, 0x18(%rsp) movq 0x2f8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x2f0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x2ec(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x2e0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x30c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x308(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x304(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x368(%rsp) movl $0x10, 0x364(%rsp) movq 0x368(%rsp), %rax movslq 0x364(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x364(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rdx movq 0x20(%rsp), %rax movq %rdx, 0x40(%rcx) movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0xd8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1cae769 movq 0x20(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0xf0(%rsp) movb $0x1, 0x1db(%rsp) testb $0x1, 0x1db(%rsp) jne 0x1cae898 leaq 0xb0(%rsp), %rax movq %rax, 0x1f0(%rsp) movq 0x1f0(%rsp), %rax movq %rax, 0x200(%rsp) movq 0x200(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x1cae83e movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x1fc(%rsp) # imm = 0xFFFFFFFF movl 0x1fc(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x1f8(%rsp) cmpl $0x1, 0x1f8(%rsp) jne 0x1cae83e movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1cae80f movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1cae80d jmp 0x1cae83c movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x280(%rsp) cmpq $0x0, 0x280(%rsp) je 0x1cae83a movq 0x280(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1cae83c jmp 0x1cae83e movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1cae896 movq %rax, %rdi callq 0x5fc90 jmp 0x1cae898 leaq 0xb0(%rsp), %rax movq %rax, 0x290(%rsp) movq 0x290(%rsp), %rax movq (%rax), %rax movq %rax, 0x8(%rsp) leaq 0xb0(%rsp), %rax movq %rax, 0x1c8(%rsp) movq 0x1c8(%rsp), %rax movq %rax, 0x220(%rsp) movq 0x220(%rsp), %rax movq %rax, (%rsp) cmpq $0x0, 0x8(%rax) je 0x1cae972 movq (%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x21c(%rsp) # imm = 0xFFFFFFFF movl 0x21c(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x218(%rsp) cmpl $0x1, 0x218(%rsp) jne 0x1cae972 movq (%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1cae944 movq (%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax vzeroupper callq *%rax jmp 0x1cae942 jmp 0x1cae970 movq (%rsp), %rax movq (%rax), %rax movq %rax, 0x270(%rsp) cmpq $0x0, 0x270(%rsp) je 0x1cae96e movq 0x270(%rsp), %rdi vzeroupper callq 0x5e480 jmp 0x1cae970 jmp 0x1cae972 movq (%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1cae9c9 movq %rax, %rdi callq 0x5fc90 movq 0x8(%rsp), %rax movq %rax, 0xf8(%rsp) movl $0x0, 0xac(%rsp) movl 0xac(%rsp), %eax addl $0x7, %eax cmpl 0x168(%rsp), %eax jge 0x1caeaa4 movq 0x158(%rsp), %rax movq %rax, 0x298(%rsp) movq 0x298(%rsp), %rax vmovups (%rax), %ymm0 vmovaps %ymm0, 0x80(%rsp) vmovaps 0x80(%rsp), %ymm0 vcvtps2ph $0x3, %ymm0, %xmm0 vmovdqa %xmm0, 0x70(%rsp) movq 0xf8(%rsp), %rax vmovdqa 0x70(%rsp), %xmm0 movq %rax, 0x2b0(%rsp) vmovdqa %xmm0, 0x2a0(%rsp) vmovdqa 0x2a0(%rsp), %xmm0 movq 0x2b0(%rsp), %rax vmovdqu %xmm0, (%rax) movq 0x158(%rsp), %rax addq $0x20, %rax movq %rax, 0x158(%rsp) movq 0xf8(%rsp), %rax addq $0x10, %rax movq %rax, 0xf8(%rsp) movl 0xac(%rsp), %eax addl $0x8, %eax movl %eax, 0xac(%rsp) jmp 0x1cae9e1 jmp 0x1caeaa6 movl 0xac(%rsp), %eax addl $0x3, %eax cmpl 0x168(%rsp), %eax jge 0x1caeb61 movq 0x158(%rsp), %rax movq %rax, 0x2b8(%rsp) movq 0x2b8(%rsp), %rax vmovups (%rax), %xmm0 vmovaps %xmm0, 0x60(%rsp) vmovaps 0x60(%rsp), %xmm0 vcvtps2ph $0x3, %xmm0, %xmm0 vmovdqa %xmm0, 0x50(%rsp) movq 0xf8(%rsp), %rax vmovdqa 0x50(%rsp), %xmm0 movq %rax, 0x2d8(%rsp) vmovdqa %xmm0, 0x2c0(%rsp) movq 0x2c0(%rsp), %rcx movq 0x2d8(%rsp), %rax movq %rcx, (%rax) movq 0x158(%rsp), %rax addq $0x10, %rax movq %rax, 0x158(%rsp) movq 0xf8(%rsp), %rax addq $0x8, %rax movq %rax, 0xf8(%rsp) movl 0xac(%rsp), %eax addl $0x4, %eax movl %eax, 0xac(%rsp) jmp 0x1caeaa6 jmp 0x1caeb63 movl 0xac(%rsp), %eax cmpl 0x168(%rsp), %eax jge 0x1caebc6 movq 0x158(%rsp), %rax movq %rax, %rcx addq $0x4, %rcx movq %rcx, 0x158(%rsp) vmovss (%rax), %xmm0 vzeroupper callq 0x68900 movw %ax, %cx movq 0xf8(%rsp), %rax movq %rax, %rdx addq $0x2, %rdx movq %rdx, 0xf8(%rsp) movw %cx, (%rax) movl 0xac(%rsp), %eax addl $0x1, %eax movl %eax, 0xac(%rsp) jmp 0x1caeb63 jmp 0x1caebc8 movl 0x164(%rsp), %eax addl $0x1, %eax movl %eax, 0x164(%rsp) jmp 0x1cae182 movq %rbp, %rsp popq %rbp vzeroupper retq nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/src/layer/x86/cast_fp16.h
ncnn::GRU::GRU()
GRU::GRU() { one_blob_only = false; support_inplace = false; }
subq $0x98, %rsp movq %rdi, 0x38(%rsp) movq 0x38(%rsp), %rdi movq %rdi, 0x18(%rsp) callq 0xad350 movq 0x18(%rsp), %rax leaq 0x1a4531(%rip), %rcx # 0x1ecd868 addq $0x10, %rcx movq %rcx, (%rax) addq $0xe0, %rax movq %rax, 0x40(%rsp) movq 0x40(%rsp), %rax movq %rax, 0x20(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0x18(%rsp), %rax addq $0x128, %rax # imm = 0x128 movq %rax, 0x48(%rsp) movq 0x48(%rsp), %rax movq %rax, 0x10(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0x18(%rsp), %rax addq $0x170, %rax # imm = 0x170 movq %rax, 0x50(%rsp) movq 0x50(%rsp), %rax movq %rax, 0x8(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0x18(%rsp), %rax movb $0x0, 0x8(%rax) movb $0x0, 0x9(%rax) addq $0x98, %rsp retq nop
/ysh329[P]ncnn/src/layer/gru.cpp
ncnn::Convolution1D::forward(ncnn::Mat const&, ncnn::Mat&, ncnn::Option const&) const
int Convolution1D::forward(const Mat& bottom_blob, Mat& top_blob, const Option& opt) const { Mat bottom_blob_bordered; make_padding(bottom_blob, bottom_blob_bordered, opt); if (bottom_blob_bordered.empty()) return -100; const int w = bottom_blob_bordered.w; const size_t elemsize = bottom_blob_bordered.elemsize; const int kernel_extent_w = dilation_w * (kernel_w - 1) + 1; const int outw = (w - kernel_extent_w) / stride_w + 1; top_blob.create(outw, num_output, elemsize, opt.blob_allocator); if (top_blob.empty()) return -100; int ret = convolution1d(bottom_blob_bordered, top_blob, weight_data, bias_data, kernel_w, stride_w, dilation_w, activation_type, activation_params, opt); if (ret != 0) return ret; return 0; }
pushq %rbx subq $0x180, %rsp # imm = 0x180 movq %rdi, 0x108(%rsp) movq %rsi, 0x100(%rsp) movq %rdx, 0xf8(%rsp) movq %rcx, 0xf0(%rsp) movq 0x108(%rsp), %rdi movq %rdi, 0x68(%rsp) leaq 0xa8(%rsp), %rdx movq %rdx, 0x118(%rsp) movq 0x118(%rsp), %rax movq %rax, 0x70(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0x100(%rsp), %rsi movq 0xf0(%rsp), %rcx callq 0x1d45a40 jmp 0x1d4559a leaq 0xa8(%rsp), %rax movq %rax, 0x138(%rsp) movq 0x138(%rsp), %rcx movq %rcx, 0x58(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x67(%rsp) je 0x1d455ef movq 0x58(%rsp), %rax movq %rax, 0x170(%rsp) movq 0x170(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x67(%rsp) movb 0x67(%rsp), %al movb %al, 0x57(%rsp) movb 0x57(%rsp), %al testb $0x1, %al jne 0x1d45604 jmp 0x1d45749 movl $0xffffff9c, 0x114(%rsp) # imm = 0xFFFFFF9C movl $0x1, 0x98(%rsp) jmp 0x1d4590e movq %rax, %rcx movl %edx, %eax movq %rcx, 0xa0(%rsp) movl %eax, 0x9c(%rsp) leaq 0xa8(%rsp), %rax movq %rax, 0x128(%rsp) movq 0x128(%rsp), %rax movq %rax, 0x148(%rsp) movq 0x148(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x1d456ec movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x144(%rsp) # imm = 0xFFFFFFFF movl 0x144(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x140(%rsp) cmpl $0x1, 0x140(%rsp) jne 0x1d456ec movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1d456c0 movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1d456be jmp 0x1d456ea movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x168(%rsp) cmpq $0x0, 0x168(%rsp) je 0x1d456e8 movq 0x168(%rsp), %rdi callq 0x5e480 jmp 0x1d456ea jmp 0x1d456ec movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1d45744 movq %rax, %rdi callq 0x5fc90 jmp 0x1d45a2f movq 0x68(%rsp), %rcx movl 0xd4(%rsp), %eax movl %eax, 0x94(%rsp) movq 0xb8(%rsp), %rax movq %rax, 0x88(%rsp) movl 0xd4(%rcx), %edx movl 0xd8(%rcx), %eax decl %edx imull %edx, %eax incl %eax movl %eax, 0x84(%rsp) movl 0x94(%rsp), %eax movl 0x84(%rsp), %edx subl %edx, %eax movl 0xdc(%rcx), %ecx cltd idivl %ecx movl %eax, %ecx movq 0x68(%rsp), %rax incl %ecx movl %ecx, 0x80(%rsp) movq 0xf8(%rsp), %rdi movl 0x80(%rsp), %esi movl 0xd0(%rax), %edx movq 0x88(%rsp), %rcx movq 0xf0(%rsp), %rax movq 0x8(%rax), %r8 callq 0x652c0 jmp 0x1d457df movq 0xf8(%rsp), %rax movq %rax, 0x130(%rsp) movq 0x130(%rsp), %rcx movq %rcx, 0x38(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x47(%rsp) je 0x1d45834 movq 0x38(%rsp), %rax movq %rax, 0x178(%rsp) movq 0x178(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x47(%rsp) movb 0x47(%rsp), %al movb %al, 0x37(%rsp) movb 0x37(%rsp), %al testb $0x1, %al jne 0x1d45846 jmp 0x1d45861 movl $0xffffff9c, 0x114(%rsp) # imm = 0xFFFFFF9C movl $0x1, 0x98(%rsp) jmp 0x1d4590e movq 0x68(%rsp), %r11 movq 0xf8(%rsp), %rsi movq %r11, %rdx addq $0x148, %rdx # imm = 0x148 movq %r11, %rcx addq $0x190, %rcx # imm = 0x190 movl 0xdc(%r11), %r9d movl 0xd4(%r11), %r8d movl 0xd8(%r11), %edi movl 0xf4(%r11), %r10d addq $0xf8, %r11 movq 0xf0(%rsp), %rbx movq %rsp, %rax movq %rbx, 0x18(%rax) movq %r11, 0x10(%rax) movl %r10d, 0x8(%rax) movl %edi, (%rax) leaq 0xa8(%rsp), %rdi callq 0x1d45a80 movl %eax, 0x30(%rsp) jmp 0x1d458d1 movl 0x30(%rsp), %eax movl %eax, 0x7c(%rsp) cmpl $0x0, 0x7c(%rsp) je 0x1d458f8 movl 0x7c(%rsp), %eax movl %eax, 0x114(%rsp) movl $0x1, 0x98(%rsp) jmp 0x1d4590e movl $0x0, 0x114(%rsp) movl $0x1, 0x98(%rsp) leaq 0xa8(%rsp), %rax movq %rax, 0x120(%rsp) movq 0x120(%rsp), %rax movq %rax, 0x158(%rsp) movq 0x158(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x1d459c7 movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x154(%rsp) # imm = 0xFFFFFFFF movl 0x154(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x150(%rsp) cmpl $0x1, 0x150(%rsp) jne 0x1d459c7 movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1d4599b movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1d45999 jmp 0x1d459c5 movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0x160(%rsp) cmpq $0x0, 0x160(%rsp) je 0x1d459c3 movq 0x160(%rsp), %rdi callq 0x5e480 jmp 0x1d459c5 jmp 0x1d459c7 movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1d45a1f movq %rax, %rdi callq 0x5fc90 movl 0x114(%rsp), %eax addq $0x180, %rsp # imm = 0x180 popq %rbx retq movq 0xa0(%rsp), %rdi callq 0x5e3b0 nopl (%rax)
/ysh329[P]ncnn/src/layer/convolution1d.cpp
ncnn::Convolution1D_x86_fma::forward(std::vector<ncnn::Mat, std::allocator<ncnn::Mat>> const&, std::vector<ncnn::Mat, std::allocator<ncnn::Mat>>&, ncnn::Option const&) const
int Convolution1D_x86_fma::forward(const std::vector<Mat>& bottom_blobs, std::vector<Mat>& top_blobs, const Option& opt) const { const Mat& bottom_blob = bottom_blobs[0]; const Mat& _weight_data = bottom_blobs[1]; Mat& top_blob = top_blobs[0]; const int _kernel_w = _weight_data.w; const int _num_output = _weight_data.c * _weight_data.elempack; Mat weight_data_flattened; flatten(_weight_data, weight_data_flattened, opt); if (weight_data_flattened.empty()) return -100; // weight_data_flattened as pack1 weight_data_flattened.w *= weight_data_flattened.elempack; weight_data_flattened.elemsize /= weight_data_flattened.elempack; weight_data_flattened.elempack = 1; Mat bias_data_flattened; if (bias_term) { const Mat& _bias_data = bottom_blobs[2]; flatten(_bias_data, bias_data_flattened, opt); if (bias_data_flattened.empty()) return -100; // bias_data_flattened as pack1 bias_data_flattened.w *= bias_data_flattened.elempack; bias_data_flattened.elemsize /= bias_data_flattened.elempack; bias_data_flattened.elempack = 1; } ncnn::Layer* op = ncnn::create_layer(ncnn::LayerType::Convolution1D); ncnn::ParamDict pd; pd.set(0, _num_output); pd.set(1, _kernel_w); pd.set(2, dilation_w); pd.set(3, stride_w); pd.set(4, pad_left); pd.set(15, pad_right); pd.set(18, pad_value); pd.set(5, bias_term); pd.set(6, weight_data_flattened.w); pd.set(9, activation_type); pd.set(10, activation_params); op->load_param(pd); ncnn::Mat weights[2]; weights[0] = weight_data_flattened; weights[1] = bias_data_flattened; op->load_model(ncnn::ModelBinFromMatArray(weights)); op->create_pipeline(opt); op->forward(bottom_blob, top_blob, opt); op->destroy_pipeline(opt); delete op; return 0; }
subq $0x448, %rsp # imm = 0x448 movq %rdi, 0x2b0(%rsp) movq %rsi, 0x2a8(%rsp) movq %rdx, 0x2a0(%rsp) movq %rcx, 0x298(%rsp) movq 0x2b0(%rsp), %rax movq %rax, 0xf8(%rsp) movq 0x2a8(%rsp), %rdi xorl %eax, %eax movl %eax, %esi movq %rsi, 0x100(%rsp) callq 0xb5820 movq %rax, 0x290(%rsp) movq 0x2a8(%rsp), %rdi movl $0x1, %esi callq 0xb5820 movq 0x100(%rsp), %rsi movq %rax, 0x288(%rsp) movq 0x2a0(%rsp), %rdi callq 0x98840 movq %rax, 0x280(%rsp) movq 0x288(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x27c(%rsp) movq 0x288(%rsp), %rax movl 0x18(%rax), %ecx movl 0x38(%rax), %eax imull %ecx, %eax movl %eax, 0x278(%rsp) leaq 0x230(%rsp), %rsi movq %rsi, 0x2c0(%rsp) movq 0x2c0(%rsp), %rax movq %rax, 0x108(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0x288(%rsp), %rdi movq 0x298(%rsp), %rdx callq 0x69690 jmp 0x1d8dfc1 leaq 0x230(%rsp), %rax movq %rax, 0x318(%rsp) movq 0x318(%rsp), %rcx movq %rcx, 0xe8(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0xf7(%rsp) je 0x1d8e022 movq 0xe8(%rsp), %rax movq %rax, 0x438(%rsp) movq 0x438(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0xf7(%rsp) movb 0xf7(%rsp), %al movb %al, 0xe7(%rsp) movb 0xe7(%rsp), %al testb $0x1, %al jne 0x1d8e03d jmp 0x1d8e071 movl $0xffffff9c, 0x2bc(%rsp) # imm = 0xFFFFFF9C movl $0x1, 0x220(%rsp) jmp 0x1d8efa1 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x228(%rsp) movl %eax, 0x224(%rsp) jmp 0x1d8f0c1 movl 0x248(%rsp), %eax imull 0x25c(%rsp), %eax movl %eax, 0x25c(%rsp) movslq 0x248(%rsp), %rcx movq 0x240(%rsp), %rax xorl %edx, %edx divq %rcx movq %rax, 0x240(%rsp) movl $0x1, 0x248(%rsp) leaq 0x1d8(%rsp), %rax movq %rax, 0x2c8(%rsp) movq 0x2c8(%rsp), %rax movq %rax, 0xd8(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx cmpl $0x0, 0xec(%rax,%rcx) je 0x1d8e264 movq 0x2a8(%rsp), %rdi movl $0x2, %esi callq 0xb5820 movq %rax, 0x1d0(%rsp) movq 0x1d0(%rsp), %rdi movq 0x298(%rsp), %rdx leaq 0x1d8(%rsp), %rsi callq 0x69690 jmp 0x1d8e176 leaq 0x1d8(%rsp), %rax movq %rax, 0x310(%rsp) movq 0x310(%rsp), %rcx movq %rcx, 0xc8(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0xd7(%rsp) je 0x1d8e1d7 movq 0xc8(%rsp), %rax movq %rax, 0x440(%rsp) movq 0x440(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0xd7(%rsp) movb 0xd7(%rsp), %al movb %al, 0xc7(%rsp) movb 0xc7(%rsp), %al testb $0x1, %al jne 0x1d8e1f2 jmp 0x1d8e226 movl $0xffffff9c, 0x2bc(%rsp) # imm = 0xFFFFFF9C movl $0x1, 0x220(%rsp) jmp 0x1d8ed75 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x228(%rsp) movl %eax, 0x224(%rsp) jmp 0x1d8ee8b movl 0x1f0(%rsp), %eax imull 0x204(%rsp), %eax movl %eax, 0x204(%rsp) movslq 0x1f0(%rsp), %rcx movq 0x1e8(%rsp), %rax xorl %edx, %edx divq %rcx movq %rax, 0x1e8(%rsp) movl $0x1, 0x1f0(%rsp) movl $0x51, %edi callq 0xae160 movq %rax, 0xb8(%rsp) jmp 0x1d8e278 movq 0xb8(%rsp), %rax movq %rax, 0x1c8(%rsp) leaq 0x1b8(%rsp), %rdi callq 0xa0840 jmp 0x1d8e297 movl 0x278(%rsp), %edx leaq 0x1b8(%rsp), %rdi xorl %esi, %esi callq 0xa16d0 jmp 0x1d8e2af movl 0x27c(%rsp), %edx leaq 0x1b8(%rsp), %rdi movl $0x1, %esi callq 0xa16d0 jmp 0x1d8e2ca movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xd8(%rax,%rcx), %edx leaq 0x1b8(%rsp), %rdi movl $0x2, %esi callq 0xa16d0 jmp 0x1d8e2f4 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xdc(%rax,%rcx), %edx leaq 0x1b8(%rsp), %rdi movl $0x3, %esi callq 0xa16d0 jmp 0x1d8e31e movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xe0(%rax,%rcx), %edx leaq 0x1b8(%rsp), %rdi movl $0x4, %esi callq 0xa16d0 jmp 0x1d8e348 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xe4(%rax,%rcx), %edx leaq 0x1b8(%rsp), %rdi movl $0xf, %esi callq 0xa16d0 jmp 0x1d8e372 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx vmovss 0xe8(%rax,%rcx), %xmm0 leaq 0x1b8(%rsp), %rdi movl $0x12, %esi callq 0xa1710 jmp 0x1d8e39e movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xec(%rax,%rcx), %edx leaq 0x1b8(%rsp), %rdi movl $0x5, %esi callq 0xa16d0 jmp 0x1d8e3c8 movl 0x25c(%rsp), %edx leaq 0x1b8(%rsp), %rdi movl $0x6, %esi callq 0xa16d0 jmp 0x1d8e3e3 movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xf4(%rax,%rcx), %edx leaq 0x1b8(%rsp), %rdi movl $0x9, %esi callq 0xa16d0 jmp 0x1d8e40d movq 0xf8(%rsp), %rax movq (%rax), %rcx movq -0x18(%rcx), %rcx leaq 0xf8(%rax,%rcx), %rdx leaq 0x1b8(%rsp), %rdi movl $0xa, %esi callq 0xa1760 jmp 0x1d8e438 movq 0x1c8(%rsp), %rdi movq (%rdi), %rax movq 0x10(%rax), %rax leaq 0x1b8(%rsp), %rsi callq *%rax jmp 0x1d8e453 leaq 0x120(%rsp), %rax movq %rax, %rcx addq $0x90, %rcx movq %rcx, 0xa8(%rsp) movq %rax, 0xb0(%rsp) movq 0xb0(%rsp), %rax movq %rax, 0x98(%rsp) movq %rax, 0x2d0(%rsp) movq 0x2d0(%rsp), %rax movq %rax, 0xa0(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0xa8(%rsp), %rcx movq 0x98(%rsp), %rax addq $0x48, %rax cmpq %rcx, %rax movq %rax, 0xb0(%rsp) jne 0x1d8e475 leaq 0x120(%rsp), %rax movq %rax, 0x350(%rsp) leaq 0x230(%rsp), %rax movq %rax, 0x348(%rsp) movq 0x350(%rsp), %rax movq %rax, 0x90(%rsp) cmpq 0x348(%rsp), %rax jne 0x1d8e562 movq 0x90(%rsp), %rax movq %rax, 0x358(%rsp) jmp 0x1d8e755 movq 0x348(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x1d8e59a movq 0x348(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x344(%rsp) movl 0x344(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x340(%rsp) movq 0x90(%rsp), %rax movq %rax, 0x368(%rsp) movq 0x368(%rsp), %rax movq %rax, 0x88(%rsp) cmpq $0x0, 0x8(%rax) je 0x1d8e652 movq 0x88(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x364(%rsp) # imm = 0xFFFFFFFF movl 0x364(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x360(%rsp) cmpl $0x1, 0x360(%rsp) jne 0x1d8e652 movq 0x88(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1d8e623 movq 0x88(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1d8e621 jmp 0x1d8e650 movq 0x88(%rsp), %rax movq (%rax), %rax movq %rax, 0x430(%rsp) cmpq $0x0, 0x430(%rsp) je 0x1d8e64e movq 0x430(%rsp), %rdi callq 0x5e480 jmp 0x1d8e650 jmp 0x1d8e652 movq 0x88(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x90(%rsp), %rax movq 0x348(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x348(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x348(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x348(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x348(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x348(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x348(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x348(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x348(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x348(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x348(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x358(%rsp) leaq 0x120(%rsp), %rax addq $0x48, %rax movq %rax, 0x330(%rsp) leaq 0x1d8(%rsp), %rax movq %rax, 0x328(%rsp) movq 0x330(%rsp), %rax movq %rax, 0x80(%rsp) cmpq 0x328(%rsp), %rax jne 0x1d8e7a8 movq 0x80(%rsp), %rax movq %rax, 0x338(%rsp) jmp 0x1d8e989 movq 0x328(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x1d8e7e0 movq 0x328(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x324(%rsp) movl 0x324(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x320(%rsp) movq 0x80(%rsp), %rax movq %rax, 0x378(%rsp) movq 0x378(%rsp), %rax movq %rax, 0x78(%rsp) cmpq $0x0, 0x8(%rax) je 0x1d8e889 movq 0x78(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x374(%rsp) # imm = 0xFFFFFFFF movl 0x374(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x370(%rsp) cmpl $0x1, 0x370(%rsp) jne 0x1d8e889 movq 0x78(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1d8e85d movq 0x78(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1d8e85b jmp 0x1d8e887 movq 0x78(%rsp), %rax movq (%rax), %rax movq %rax, 0x428(%rsp) cmpq $0x0, 0x428(%rsp) je 0x1d8e885 movq 0x428(%rsp), %rdi callq 0x5e480 jmp 0x1d8e887 jmp 0x1d8e889 movq 0x78(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x80(%rsp), %rax movq 0x328(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x328(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x328(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x328(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x328(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x328(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x328(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x328(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x328(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x328(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x328(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x338(%rsp) movq 0x1c8(%rsp), %rax movq %rax, 0x70(%rsp) leaq 0x110(%rsp), %rdi leaq 0x120(%rsp), %rsi callq 0x89470 jmp 0x1d8e9ad movq 0x70(%rsp), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax leaq 0x110(%rsp), %rsi callq *%rax jmp 0x1d8e9c5 leaq 0x110(%rsp), %rdi callq 0x89520 movq 0x1c8(%rsp), %rdi movq 0x298(%rsp), %rsi movq (%rdi), %rax movq 0x20(%rax), %rax callq *%rax jmp 0x1d8e9ed movq 0x1c8(%rsp), %rdi movq 0x290(%rsp), %rsi movq 0x280(%rsp), %rdx movq 0x298(%rsp), %rcx movq (%rdi), %rax movq 0x38(%rax), %rax callq *%rax jmp 0x1d8ea18 movq 0x1c8(%rsp), %rdi movq 0x298(%rsp), %rsi movq (%rdi), %rax movq 0x28(%rax), %rax callq *%rax jmp 0x1d8ea33 movq 0x1c8(%rsp), %rax movq %rax, 0x68(%rsp) cmpq $0x0, %rax je 0x1d8ea51 movq 0x68(%rsp), %rdi movq (%rdi), %rax callq *0x8(%rax) movl $0x0, 0x2bc(%rsp) movl $0x1, 0x220(%rsp) leaq 0x120(%rsp), %rax movq %rax, 0x58(%rsp) addq $0x90, %rax movq %rax, 0x60(%rsp) jmp 0x1d8ead9 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x228(%rsp) movl %eax, 0x224(%rsp) jmp 0x1d8ed63 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x228(%rsp) movl %eax, 0x224(%rsp) jmp 0x1d8ec1a movq %rax, %rcx movl %edx, %eax movq %rcx, 0x228(%rsp) movl %eax, 0x224(%rsp) leaq 0x110(%rsp), %rdi callq 0x89520 jmp 0x1d8ec1a movq 0x60(%rsp), %rax addq $-0x48, %rax movq %rax, 0x48(%rsp) movq %rax, 0x2e0(%rsp) movq 0x2e0(%rsp), %rax movq %rax, 0x3d8(%rsp) movq 0x3d8(%rsp), %rax movq %rax, 0x50(%rsp) cmpq $0x0, 0x8(%rax) je 0x1d8eb98 movq 0x50(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x3d4(%rsp) # imm = 0xFFFFFFFF movl 0x3d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x3d0(%rsp) cmpl $0x1, 0x3d0(%rsp) jne 0x1d8eb98 movq 0x50(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1d8eb6c movq 0x50(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1d8eb6a jmp 0x1d8eb96 movq 0x50(%rsp), %rax movq (%rax), %rax movq %rax, 0x3f8(%rsp) cmpq $0x0, 0x3f8(%rsp) je 0x1d8eb94 movq 0x3f8(%rsp), %rdi callq 0x5e480 jmp 0x1d8eb96 jmp 0x1d8eb98 movq 0x50(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1d8ebf0 movq %rax, %rdi callq 0x5fc90 movq 0x48(%rsp), %rax movq 0x58(%rsp), %rcx cmpq %rcx, %rax movq %rax, 0x60(%rsp) jne 0x1d8ead9 leaq 0x1b8(%rsp), %rdi callq 0xa0e10 jmp 0x1d8ed75 leaq 0x120(%rsp), %rax movq %rax, 0x38(%rsp) addq $0x90, %rax movq %rax, 0x40(%rsp) movq 0x40(%rsp), %rax addq $-0x48, %rax movq %rax, 0x28(%rsp) movq %rax, 0x2e8(%rsp) movq 0x2e8(%rsp), %rax movq %rax, 0x3c8(%rsp) movq 0x3c8(%rsp), %rax movq %rax, 0x30(%rsp) cmpq $0x0, 0x8(%rax) je 0x1d8ecf1 movq 0x30(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x3c4(%rsp) # imm = 0xFFFFFFFF movl 0x3c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x3c0(%rsp) cmpl $0x1, 0x3c0(%rsp) jne 0x1d8ecf1 movq 0x30(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1d8ecc5 movq 0x30(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1d8ecc3 jmp 0x1d8ecef movq 0x30(%rsp), %rax movq (%rax), %rax movq %rax, 0x400(%rsp) cmpq $0x0, 0x400(%rsp) je 0x1d8eced movq 0x400(%rsp), %rdi callq 0x5e480 jmp 0x1d8ecef jmp 0x1d8ecf1 movq 0x30(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1d8ed49 movq %rax, %rdi callq 0x5fc90 movq 0x28(%rsp), %rax movq 0x38(%rsp), %rcx cmpq %rcx, %rax movq %rax, 0x40(%rsp) jne 0x1d8ec32 jmp 0x1d8ed63 leaq 0x1b8(%rsp), %rdi callq 0xa0e10 jmp 0x1d8ee8b leaq 0x1d8(%rsp), %rax movq %rax, 0x2f0(%rsp) movq 0x2f0(%rsp), %rax movq %rax, 0x3b8(%rsp) movq 0x3b8(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x1d8ee2e movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x3b4(%rsp) # imm = 0xFFFFFFFF movl 0x3b4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x3b0(%rsp) cmpl $0x1, 0x3b0(%rsp) jne 0x1d8ee2e movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1d8ee02 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1d8ee00 jmp 0x1d8ee2c movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x408(%rsp) cmpq $0x0, 0x408(%rsp) je 0x1d8ee2a movq 0x408(%rsp), %rdi callq 0x5e480 jmp 0x1d8ee2c jmp 0x1d8ee2e movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1d8ee86 movq %rax, %rdi callq 0x5fc90 jmp 0x1d8efa1 leaq 0x1d8(%rsp), %rax movq %rax, 0x2f8(%rsp) movq 0x2f8(%rsp), %rax movq %rax, 0x3a8(%rsp) movq 0x3a8(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x1d8ef44 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x3a4(%rsp) # imm = 0xFFFFFFFF movl 0x3a4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x3a0(%rsp) cmpl $0x1, 0x3a0(%rsp) jne 0x1d8ef44 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1d8ef18 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1d8ef16 jmp 0x1d8ef42 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x410(%rsp) cmpq $0x0, 0x410(%rsp) je 0x1d8ef40 movq 0x410(%rsp), %rdi callq 0x5e480 jmp 0x1d8ef42 jmp 0x1d8ef44 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1d8ef9c movq %rax, %rdi callq 0x5fc90 jmp 0x1d8f0c1 leaq 0x230(%rsp), %rax movq %rax, 0x300(%rsp) movq 0x300(%rsp), %rax movq %rax, 0x398(%rsp) movq 0x398(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x1d8f05a movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x394(%rsp) # imm = 0xFFFFFFFF movl 0x394(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x390(%rsp) cmpl $0x1, 0x390(%rsp) jne 0x1d8f05a movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1d8f02e movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1d8f02c jmp 0x1d8f058 movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x418(%rsp) cmpq $0x0, 0x418(%rsp) je 0x1d8f056 movq 0x418(%rsp), %rdi callq 0x5e480 jmp 0x1d8f058 jmp 0x1d8f05a movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1d8f0b2 movq %rax, %rdi callq 0x5fc90 movl 0x2bc(%rsp), %eax addq $0x448, %rsp # imm = 0x448 retq leaq 0x230(%rsp), %rax movq %rax, 0x308(%rsp) movq 0x308(%rsp), %rax movq %rax, 0x388(%rsp) movq 0x388(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x1d8f17a movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x384(%rsp) # imm = 0xFFFFFFFF movl 0x384(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x380(%rsp) cmpl $0x1, 0x380(%rsp) jne 0x1d8f17a movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1d8f14e movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1d8f14c jmp 0x1d8f178 movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x420(%rsp) cmpq $0x0, 0x420(%rsp) je 0x1d8f176 movq 0x420(%rsp), %rdi callq 0x5e480 jmp 0x1d8f178 jmp 0x1d8f17a movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1d8f1d2 movq %rax, %rdi callq 0x5fc90 jmp 0x1d8f1d4 movq 0x228(%rsp), %rdi callq 0x5e3b0 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/convolution1d_x86_fma.cpp
ncnn::Pooling3D::load_param(ncnn::ParamDict const&)
int Pooling3D::load_param(const ParamDict& pd) { pooling_type = pd.get(0, 0); kernel_w = pd.get(1, 0); kernel_h = pd.get(11, kernel_w); kernel_d = pd.get(21, kernel_w); stride_w = pd.get(2, 1); stride_h = pd.get(12, stride_w); stride_d = pd.get(22, stride_w); pad_left = pd.get(3, 0); pad_right = pd.get(14, pad_left); pad_top = pd.get(13, pad_left); pad_bottom = pd.get(15, pad_top); pad_front = pd.get(23, pad_left); pad_behind = pd.get(16, pad_front); global_pooling = pd.get(4, 0); pad_mode = pd.get(5, 0); avgpool_count_include_pad = pd.get(6, 0); adaptive_pooling = pd.get(7, 0); out_w = pd.get(8, 0); out_h = pd.get(18, out_w); out_d = pd.get(28, out_w); return 0; }
subq $0x18, %rsp movq %rdi, 0x10(%rsp) movq %rsi, 0x8(%rsp) movq 0x10(%rsp), %rax movq %rax, (%rsp) movq 0x8(%rsp), %rdi xorl %edx, %edx movl %edx, %esi callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0xd0(%rax) movq 0x8(%rsp), %rdi movl $0x1, %esi xorl %edx, %edx callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0xd4(%rax) movq 0x8(%rsp), %rdi movl 0xd4(%rax), %edx movl $0xb, %esi callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0xd8(%rax) movq 0x8(%rsp), %rdi movl 0xd4(%rax), %edx movl $0x15, %esi callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0xdc(%rax) movq 0x8(%rsp), %rdi movl $0x2, %esi movl $0x1, %edx callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0xe0(%rax) movq 0x8(%rsp), %rdi movl 0xe0(%rax), %edx movl $0xc, %esi callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0xe4(%rax) movq 0x8(%rsp), %rdi movl 0xe0(%rax), %edx movl $0x16, %esi callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0xe8(%rax) movq 0x8(%rsp), %rdi movl $0x3, %esi xorl %edx, %edx callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0xec(%rax) movq 0x8(%rsp), %rdi movl 0xec(%rax), %edx movl $0xe, %esi callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0xf0(%rax) movq 0x8(%rsp), %rdi movl 0xec(%rax), %edx movl $0xd, %esi callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0xf4(%rax) movq 0x8(%rsp), %rdi movl 0xf4(%rax), %edx movl $0xf, %esi callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0xf8(%rax) movq 0x8(%rsp), %rdi movl 0xec(%rax), %edx movl $0x17, %esi callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0xfc(%rax) movq 0x8(%rsp), %rdi movl 0xfc(%rax), %edx movl $0x10, %esi callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0x100(%rax) movq 0x8(%rsp), %rdi movl $0x4, %esi xorl %edx, %edx callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0x104(%rax) movq 0x8(%rsp), %rdi movl $0x5, %esi xorl %edx, %edx callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0x108(%rax) movq 0x8(%rsp), %rdi movl $0x6, %esi xorl %edx, %edx callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0x10c(%rax) movq 0x8(%rsp), %rdi movl $0x7, %esi xorl %edx, %edx callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0x110(%rax) movq 0x8(%rsp), %rdi movl $0x8, %esi xorl %edx, %edx callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0x114(%rax) movq 0x8(%rsp), %rdi movl 0x114(%rax), %edx movl $0x12, %esi callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0x118(%rax) movq 0x8(%rsp), %rdi movl 0x114(%rax), %edx movl $0x1c, %esi callq 0xa14b0 movl %eax, %ecx movq (%rsp), %rax movl %ecx, 0x11c(%rax) xorl %eax, %eax addq $0x18, %rsp retq nop
/ysh329[P]ncnn/src/layer/pooling3d.cpp
ncnn::DeformableConv2D::load_param(ncnn::ParamDict const&)
int DeformableConv2D::load_param(const ParamDict& pd) { num_output = pd.get(0, 0); kernel_w = pd.get(1, 0); kernel_h = pd.get(11, kernel_w); dilation_w = pd.get(2, 1); dilation_h = pd.get(12, dilation_w); stride_w = pd.get(3, 1); stride_h = pd.get(13, stride_w); pad_left = pd.get(4, 0); pad_right = pd.get(15, pad_left); pad_top = pd.get(14, pad_left); pad_bottom = pd.get(16, pad_top); bias_term = pd.get(5, 0); weight_data_size = pd.get(6, 0); activation_type = pd.get(9, 0); activation_params = pd.get(10, Mat()); return 0; }
subq $0x1b8, %rsp # imm = 0x1B8 movq %rdi, 0xf0(%rsp) movq %rsi, 0xe8(%rsp) movq 0xf0(%rsp), %rax movq %rax, 0x38(%rsp) movq 0xe8(%rsp), %rdi xorl %edx, %edx movl %edx, 0x34(%rsp) movl %edx, %esi callq 0xa14b0 movl 0x34(%rsp), %edx movl %eax, %ecx movq 0x38(%rsp), %rax movl %ecx, 0xd0(%rax) movq 0xe8(%rsp), %rdi movl $0x1, %esi movl %esi, 0x30(%rsp) callq 0xa14b0 movl %eax, %ecx movq 0x38(%rsp), %rax movl %ecx, 0xd4(%rax) movq 0xe8(%rsp), %rdi movl 0xd4(%rax), %edx movl $0xb, %esi callq 0xa14b0 movl 0x30(%rsp), %edx movl %eax, %ecx movq 0x38(%rsp), %rax movl %ecx, 0xd8(%rax) movq 0xe8(%rsp), %rdi movl $0x2, %esi callq 0xa14b0 movl %eax, %ecx movq 0x38(%rsp), %rax movl %ecx, 0xdc(%rax) movq 0xe8(%rsp), %rdi movl 0xdc(%rax), %edx movl $0xc, %esi callq 0xa14b0 movl 0x30(%rsp), %edx movl %eax, %ecx movq 0x38(%rsp), %rax movl %ecx, 0xe0(%rax) movq 0xe8(%rsp), %rdi movl $0x3, %esi callq 0xa14b0 movl %eax, %ecx movq 0x38(%rsp), %rax movl %ecx, 0xe4(%rax) movq 0xe8(%rsp), %rdi movl 0xe4(%rax), %edx movl $0xd, %esi callq 0xa14b0 movl 0x34(%rsp), %edx movl %eax, %ecx movq 0x38(%rsp), %rax movl %ecx, 0xe8(%rax) movq 0xe8(%rsp), %rdi movl $0x4, %esi callq 0xa14b0 movl %eax, %ecx movq 0x38(%rsp), %rax movl %ecx, 0xec(%rax) movq 0xe8(%rsp), %rdi movl 0xec(%rax), %edx movl $0xf, %esi callq 0xa14b0 movl %eax, %ecx movq 0x38(%rsp), %rax movl %ecx, 0xf0(%rax) movq 0xe8(%rsp), %rdi movl 0xec(%rax), %edx movl $0xe, %esi callq 0xa14b0 movl %eax, %ecx movq 0x38(%rsp), %rax movl %ecx, 0xf4(%rax) movq 0xe8(%rsp), %rdi movl 0xf4(%rax), %edx movl $0x10, %esi callq 0xa14b0 movl 0x34(%rsp), %edx movl %eax, %ecx movq 0x38(%rsp), %rax movl %ecx, 0xf8(%rax) movq 0xe8(%rsp), %rdi movl $0x5, %esi callq 0xa14b0 movl 0x34(%rsp), %edx movl %eax, %ecx movq 0x38(%rsp), %rax movl %ecx, 0xfc(%rax) movq 0xe8(%rsp), %rdi movl $0x6, %esi callq 0xa14b0 movl 0x34(%rsp), %edx movl %eax, %ecx movq 0x38(%rsp), %rax movl %ecx, 0x100(%rax) movq 0xe8(%rsp), %rdi movl $0x9, %esi callq 0xa14b0 movl %eax, %ecx movq 0x38(%rsp), %rax movl %ecx, 0x104(%rax) movq 0xe8(%rsp), %rsi leaq 0x58(%rsp), %rcx movq %rcx, 0xf8(%rsp) movq 0xf8(%rsp), %rax movq %rax, 0x40(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) leaq 0xa0(%rsp), %rdi movl $0xa, %edx callq 0xa1580 jmp 0x1de8744 movq 0x38(%rsp), %rax addq $0x108, %rax # imm = 0x108 movq %rax, 0x130(%rsp) leaq 0xa0(%rsp), %rax movq %rax, 0x128(%rsp) movq 0x130(%rsp), %rax movq %rax, 0x28(%rsp) cmpq 0x128(%rsp), %rax jne 0x1de8790 movq 0x28(%rsp), %rax movq %rax, 0x138(%rsp) jmp 0x1de896b movq 0x128(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x1de87c8 movq 0x128(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x124(%rsp) movl 0x124(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x120(%rsp) movq 0x28(%rsp), %rax movq %rax, 0x148(%rsp) movq 0x148(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x1de886e movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x144(%rsp) # imm = 0xFFFFFFFF movl 0x144(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x140(%rsp) cmpl $0x1, 0x140(%rsp) jne 0x1de886e movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1de8842 movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1de8840 jmp 0x1de886c movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x1b0(%rsp) cmpq $0x0, 0x1b0(%rsp) je 0x1de886a movq 0x1b0(%rsp), %rdi callq 0x5e480 jmp 0x1de886c jmp 0x1de886e movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) movq 0x28(%rsp), %rax movq 0x128(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x128(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x128(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x128(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x128(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x128(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x128(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x128(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x128(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x128(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x128(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x138(%rsp) leaq 0xa0(%rsp), %rax movq %rax, 0x100(%rsp) movq 0x100(%rsp), %rax movq %rax, 0x188(%rsp) movq 0x188(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x1de8a24 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x184(%rsp) # imm = 0xFFFFFFFF movl 0x184(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x180(%rsp) cmpl $0x1, 0x180(%rsp) jne 0x1de8a24 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1de89f8 movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1de89f6 jmp 0x1de8a22 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x190(%rsp) cmpq $0x0, 0x190(%rsp) je 0x1de8a20 movq 0x190(%rsp), %rdi callq 0x5e480 jmp 0x1de8a22 jmp 0x1de8a24 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1de8a7c movq %rax, %rdi callq 0x5fc90 leaq 0x58(%rsp), %rax movq %rax, 0x110(%rsp) movq 0x110(%rsp), %rax movq %rax, 0x168(%rsp) movq 0x168(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x1de8b32 movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x164(%rsp) # imm = 0xFFFFFFFF movl 0x164(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x160(%rsp) cmpl $0x1, 0x160(%rsp) jne 0x1de8b32 movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1de8b06 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1de8b04 jmp 0x1de8b30 movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x1a0(%rsp) cmpq $0x0, 0x1a0(%rsp) je 0x1de8b2e movq 0x1a0(%rsp), %rdi callq 0x5e480 jmp 0x1de8b30 jmp 0x1de8b32 movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1de8b8a movq %rax, %rdi callq 0x5fc90 xorl %eax, %eax addq $0x1b8, %rsp # imm = 0x1B8 retq movq %rax, %rcx movl %edx, %eax movq %rcx, 0x50(%rsp) movl %eax, 0x4c(%rsp) jmp 0x1de8cc8 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x50(%rsp) movl %eax, 0x4c(%rsp) leaq 0xa0(%rsp), %rax movq %rax, 0x108(%rsp) movq 0x108(%rsp), %rax movq %rax, 0x178(%rsp) movq 0x178(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x1de8c6e movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x174(%rsp) # imm = 0xFFFFFFFF movl 0x174(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x170(%rsp) cmpl $0x1, 0x170(%rsp) jne 0x1de8c6e movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1de8c42 movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1de8c40 jmp 0x1de8c6c movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x198(%rsp) cmpq $0x0, 0x198(%rsp) je 0x1de8c6a movq 0x198(%rsp), %rdi callq 0x5e480 jmp 0x1de8c6c jmp 0x1de8c6e movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1de8cc6 movq %rax, %rdi callq 0x5fc90 jmp 0x1de8cc8 leaq 0x58(%rsp), %rax movq %rax, 0x118(%rsp) movq 0x118(%rsp), %rax movq %rax, 0x158(%rsp) movq 0x158(%rsp), %rax movq %rax, (%rsp) cmpq $0x0, 0x8(%rax) je 0x1de8d75 movq (%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x154(%rsp) # imm = 0xFFFFFFFF movl 0x154(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x150(%rsp) cmpl $0x1, 0x150(%rsp) jne 0x1de8d75 movq (%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1de8d4a movq (%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1de8d48 jmp 0x1de8d73 movq (%rsp), %rax movq (%rax), %rax movq %rax, 0x1a8(%rsp) cmpq $0x0, 0x1a8(%rsp) je 0x1de8d71 movq 0x1a8(%rsp), %rdi callq 0x5e480 jmp 0x1de8d73 jmp 0x1de8d75 movq (%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1de8dcc movq %rax, %rdi callq 0x5fc90 jmp 0x1de8dce movq 0x50(%rsp), %rdi callq 0x5e3b0 nopl (%rax,%rax)
/ysh329[P]ncnn/src/layer/deformableconv2d.cpp
virtual thunk to ncnn::DeformableConv2D_x86_fma::destroy_pipeline(ncnn::Option const&)
int DeformableConv2D_x86_fma::destroy_pipeline(const Option& opt) { if (inner_product) { inner_product->destroy_pipeline(opt); delete inner_product; inner_product = 0; } if (permute) { permute->destroy_pipeline(opt); delete permute; permute = 0; } return 0; }
movq %rdi, -0x8(%rsp) movq %rsi, -0x10(%rsp) movq -0x8(%rsp), %rdi movq (%rdi), %rax movq -0x38(%rax), %rax addq %rax, %rdi movq -0x10(%rsp), %rsi jmp 0x1df8080 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/deformableconv2d_x86_fma.cpp
ncnn::DeformableConv2D_x86_fma::forward(std::vector<ncnn::Mat, std::allocator<ncnn::Mat>> const&, std::vector<ncnn::Mat, std::allocator<ncnn::Mat>>&, ncnn::Option const&) const
int DeformableConv2D_x86_fma::forward(const std::vector<Mat>& bottom_blobs, std::vector<Mat>& top_blobs, const Option& opt) const { const Mat& bottom_blob = bottom_blobs[0]; const Mat& offset = bottom_blobs[1]; const bool has_mask = (bottom_blobs.size() == 3); const int w = bottom_blob.w; const int h = bottom_blob.h; const int in_c = bottom_blob.c; const size_t elemsize = bottom_blob.elemsize; const int kernel_extent_w = dilation_w * (kernel_w - 1) + 1; const int kernel_extent_h = dilation_h * (kernel_h - 1) + 1; const int out_w = (w + pad_left + pad_right - kernel_extent_w) / stride_w + 1; const int out_h = (h + pad_top + pad_bottom - kernel_extent_h) / stride_h + 1; // output = im2col matmul weight_t, im2col.shape is [out_h * out_w, kernel_h * kernel_w * in_c] (in python), // weight_t.shape is [num_output, kernel_h * kernel_w * in_c] (in python), // output.shape is [out_h * out_w, num_output] (in python). Mat im2col; im2col.create(kernel_h * kernel_w * in_c * out_h * out_w, elemsize, opt.blob_allocator); if (im2col.empty()) return -100; Mat& output = top_blobs[0]; output.create(num_output, out_h * out_w, elemsize, opt.blob_allocator); if (output.empty()) return -100; Mat bottom_blob_flatten = bottom_blob.reshape(w * h * in_c); Mat offset_flatten = offset.reshape(offset.w * offset.h * offset.c); const float* data_im_ptr = bottom_blob_flatten; const float* data_offset_ptr = offset_flatten; float* im2col_ptr = im2col; // im2col #pragma omp parallel for num_threads(opt.num_threads) for (int h_col = 0; h_col < out_h; h_col++) { for (int w_col = 0; w_col < out_w; w_col++) { int h_in = h_col * stride_h - pad_top; int w_in = w_col * stride_w - pad_left; float* data_col_ptr = im2col_ptr + (h_col * out_w + w_col) * kernel_h * kernel_w * in_c; for (int i = 0; i < kernel_h; i++) { for (int j = 0; j < kernel_w; j++) { const int data_offset_h_ptr = (((i * kernel_w + j) * 2) * out_h + h_col) * out_w + w_col; const int data_offset_w_ptr = (((i * kernel_w + j) * 2 + 1) * out_h + h_col) * out_w + w_col; const float offset_h = data_offset_ptr[data_offset_h_ptr]; const float offset_w = data_offset_ptr[data_offset_w_ptr]; const float mask_ = has_mask ? bottom_blobs[2].channel(i * kernel_w + j).row(h_col)[w_col] : 1.f; const float h_im = h_in + i * dilation_h + offset_h; const float w_im = w_in + j * dilation_w + offset_w; // Bilinear const bool cond = h_im > -1 && w_im > -1 && h_im < h && w_im < w; float w1 = 0.f; float w2 = 0.f; float w3 = 0.f; float w4 = 0.f; bool v1_cond = false; bool v2_cond = false; bool v3_cond = false; bool v4_cond = false; int v1_pos = 0; int v2_pos = 0; int v3_pos = 0; int v4_pos = 0; if (cond) { int h_low = floor(h_im); int w_low = floor(w_im); int h_high = h_low + 1; int w_high = w_low + 1; float lh = h_im - h_low; float lw = w_im - w_low; float hh = 1 - lh; float hw = 1 - lw; v1_cond = (h_low >= 0 && w_low >= 0); v2_cond = (h_low >= 0 && w_high <= w - 1); v3_cond = (h_high <= h - 1 && w_low >= 0); v4_cond = (h_high <= h - 1 && w_high <= w - 1); if (v1_cond) v1_pos = h_low * w + w_low; if (v2_cond) v2_pos = h_low * w + w_high; if (v3_cond) v3_pos = h_high * w + w_low; if (v4_cond) v4_pos = h_high * w + w_high; w1 = hh * hw; w2 = hh * lw; w3 = lh * hw; w4 = lh * lw; } const float* data_im_channel_ptr = data_im_ptr; for (int c_im = 0; c_im < in_c; c_im++) { float val = 0.f; if (cond) { float v1 = v1_cond ? data_im_channel_ptr[v1_pos] : 0.f; float v2 = v2_cond ? data_im_channel_ptr[v2_pos] : 0.f; float v3 = v3_cond ? data_im_channel_ptr[v3_pos] : 0.f; float v4 = v4_cond ? data_im_channel_ptr[v4_pos] : 0.f; val = w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4; } *data_col_ptr = val * mask_; data_col_ptr += 1; data_im_channel_ptr += h * w; } } } } } im2col = im2col.reshape(kernel_h * kernel_w * in_c, out_h * out_w); // call InnerProduct inner_product->forward(im2col, output, opt); ncnn::Mat output_t; // call Permute permute->forward(output, output_t, opt); output_t = output_t.reshape(out_w, out_h, num_output); top_blobs[0] = output_t; return 0; }
subq $0x7b8, %rsp # imm = 0x7B8 movq %rdi, 0x468(%rsp) movq %rsi, 0x460(%rsp) movq %rdx, 0x458(%rsp) movq %rcx, 0x450(%rsp) movq 0x468(%rsp), %rax movq %rax, 0x130(%rsp) movq 0x460(%rsp), %rdi xorl %eax, %eax movl %eax, %esi callq 0xb5820 movq %rax, 0x448(%rsp) movq 0x460(%rsp), %rdi movl $0x1, %esi callq 0xb5820 movq %rax, 0x440(%rsp) movq 0x460(%rsp), %rdi callq 0x994f0 movq 0x130(%rsp), %rcx subq $0x3, %rax sete 0x43f(%rsp) movq 0x448(%rsp), %rax movl 0x2c(%rax), %eax movl %eax, 0x438(%rsp) movq 0x448(%rsp), %rax movl 0x30(%rax), %eax movl %eax, 0x434(%rsp) movq 0x448(%rsp), %rax movl 0x38(%rax), %eax movl %eax, 0x430(%rsp) movq 0x448(%rsp), %rax movq 0x10(%rax), %rax movq %rax, 0x428(%rsp) movq (%rcx), %rax movq -0x18(%rax), %rax movl 0xd4(%rcx,%rax), %edx movl 0xdc(%rcx,%rax), %eax decl %edx imull %edx, %eax incl %eax movl %eax, 0x424(%rsp) movq (%rcx), %rax movq -0x18(%rax), %rax movl 0xd8(%rcx,%rax), %edx movl 0xe0(%rcx,%rax), %eax decl %edx imull %edx, %eax incl %eax movl %eax, 0x420(%rsp) movl 0x438(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx movl 0xe4(%rcx,%rdx), %esi movl 0xec(%rcx,%rdx), %edi addl %edi, %eax movl 0xf0(%rcx,%rdx), %edx addl %edx, %eax movl 0x424(%rsp), %edx subl %edx, %eax cltd idivl %esi incl %eax movl %eax, 0x41c(%rsp) movl 0x434(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx movl 0xe8(%rcx,%rdx), %esi movl 0xf4(%rcx,%rdx), %edi addl %edi, %eax movl 0xf8(%rcx,%rdx), %edx addl %edx, %eax movl 0x420(%rsp), %edx subl %edx, %eax cltd idivl %esi incl %eax movl %eax, 0x418(%rsp) leaq 0x3d0(%rsp), %rdi movq %rdi, 0x478(%rsp) movq 0x478(%rsp), %rax movq %rax, 0x138(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq (%rcx), %rax movq -0x18(%rax), %rdx movl 0xd4(%rcx,%rdx), %eax movl 0xd8(%rcx,%rdx), %esi imull %eax, %esi movl 0x430(%rsp), %eax imull %eax, %esi movl 0x418(%rsp), %eax imull %eax, %esi movl 0x41c(%rsp), %eax imull %eax, %esi movq 0x428(%rsp), %rdx movq 0x450(%rsp), %rax movq 0x8(%rax), %rcx callq 0x65040 jmp 0x1df83b5 leaq 0x3d0(%rsp), %rax movq %rax, 0x490(%rsp) movq 0x490(%rsp), %rcx movq %rcx, 0x120(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x12f(%rsp) je 0x1df8416 movq 0x120(%rsp), %rax movq %rax, 0x6d0(%rsp) movq 0x6d0(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x12f(%rsp) movb 0x12f(%rsp), %al movb %al, 0x11f(%rsp) movb 0x11f(%rsp), %al testb $0x1, %al jne 0x1df8431 jmp 0x1df8465 movl $0xffffff9c, 0x474(%rsp) # imm = 0xFFFFFF9C movl $0x1, 0x3c0(%rsp) jmp 0x1dfa816 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x3c8(%rsp) movl %eax, 0x3c4(%rsp) jmp 0x1dfa936 movq 0x458(%rsp), %rdi xorl %eax, %eax movl %eax, %esi callq 0x98840 movq %rax, %rcx movq 0x130(%rsp), %rax movq %rcx, 0x3b8(%rsp) movq 0x3b8(%rsp), %rdi movq (%rax), %rcx movq -0x18(%rcx), %rcx movl 0xd0(%rax,%rcx), %esi movl 0x418(%rsp), %edx movl 0x41c(%rsp), %eax imull %eax, %edx movq 0x428(%rsp), %rcx movq 0x450(%rsp), %rax movq 0x8(%rax), %r8 callq 0x652c0 jmp 0x1df84cb movq 0x3b8(%rsp), %rax movq %rax, 0x488(%rsp) movq 0x488(%rsp), %rcx movq %rcx, 0x110(%rsp) movb $0x1, %al cmpq $0x0, (%rcx) movb %al, 0x11e(%rsp) je 0x1df852c movq 0x110(%rsp), %rax movq %rax, 0x6d8(%rsp) movq 0x6d8(%rsp), %rcx movq 0x40(%rcx), %rax movslq 0x38(%rcx), %rcx imulq %rcx, %rax cmpq $0x0, %rax sete %al movb %al, 0x11e(%rsp) movb 0x11e(%rsp), %al movb %al, 0x10f(%rsp) movb 0x10f(%rsp), %al testb $0x1, %al jne 0x1df8547 jmp 0x1df8562 movl $0xffffff9c, 0x474(%rsp) # imm = 0xFFFFFF9C movl $0x1, 0x3c0(%rsp) jmp 0x1dfa816 movq 0x448(%rsp), %rsi movl 0x438(%rsp), %edx movl 0x434(%rsp), %eax imull %eax, %edx movl 0x430(%rsp), %eax imull %eax, %edx xorl %eax, %eax movl %eax, %ecx leaq 0x370(%rsp), %rdi callq 0x62b40 jmp 0x1df8598 movq 0x440(%rsp), %rsi movl 0x2c(%rsi), %edx movl 0x30(%rsi), %eax imull %eax, %edx movl 0x38(%rsi), %eax imull %eax, %edx xorl %eax, %eax movl %eax, %ecx leaq 0x328(%rsp), %rdi callq 0x62b40 jmp 0x1df85c2 leaq 0x370(%rsp), %rax movq %rax, 0x698(%rsp) movq 0x698(%rsp), %rax movq (%rax), %rax movq %rax, 0x100(%rsp) movq 0x100(%rsp), %rax movq %rax, 0x320(%rsp) leaq 0x328(%rsp), %rax movq %rax, 0x690(%rsp) movq 0x690(%rsp), %rax movq (%rax), %rax movq %rax, 0xf8(%rsp) movq 0xf8(%rsp), %rax movq %rax, 0x318(%rsp) leaq 0x3d0(%rsp), %rax movq %rax, 0x498(%rsp) movq 0x498(%rsp), %rax movq (%rax), %rax movq %rax, 0xf0(%rsp) movq 0xf0(%rsp), %rax movq %rax, 0x310(%rsp) movl $0x0, 0x30c(%rsp) movl 0x30c(%rsp), %eax cmpl 0x418(%rsp), %eax jge 0x1df94f4 movl $0x0, 0x308(%rsp) movl 0x308(%rsp), %eax cmpl 0x41c(%rsp), %eax jge 0x1df94dc movq 0x130(%rsp), %rdx movl 0x30c(%rsp), %eax movq (%rdx), %rcx movq -0x18(%rcx), %rcx imull 0xe8(%rdx,%rcx), %eax movq (%rdx), %rcx movq -0x18(%rcx), %rcx subl 0xf4(%rdx,%rcx), %eax movl %eax, 0x304(%rsp) movl 0x308(%rsp), %eax movq (%rdx), %rcx movq -0x18(%rcx), %rcx imull 0xe4(%rdx,%rcx), %eax movq (%rdx), %rcx movq -0x18(%rcx), %rcx subl 0xec(%rdx,%rcx), %eax movl %eax, 0x300(%rsp) movq 0x310(%rsp), %rax movl 0x30c(%rsp), %ecx imull 0x41c(%rsp), %ecx addl 0x308(%rsp), %ecx movq (%rdx), %rsi movq -0x18(%rsi), %rsi imull 0xd8(%rdx,%rsi), %ecx movq (%rdx), %rsi movq -0x18(%rsi), %rsi imull 0xd4(%rdx,%rsi), %ecx imull 0x430(%rsp), %ecx movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x2f8(%rsp) movl $0x0, 0x2f4(%rsp) movq 0x130(%rsp), %rcx movl 0x2f4(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx cmpl 0xd8(%rcx,%rdx), %eax jge 0x1df94c4 movl $0x0, 0x2f0(%rsp) movq 0x130(%rsp), %rcx movl 0x2f0(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx cmpl 0xd4(%rcx,%rdx), %eax jge 0x1df94ac movq 0x130(%rsp), %rcx movl 0x2f4(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx imull 0xd4(%rcx,%rdx), %eax addl 0x2f0(%rsp), %eax shll %eax imull 0x418(%rsp), %eax addl 0x30c(%rsp), %eax imull 0x41c(%rsp), %eax addl 0x308(%rsp), %eax movl %eax, 0x2ec(%rsp) movl 0x2f4(%rsp), %eax movq (%rcx), %rdx movq -0x18(%rdx), %rdx imull 0xd4(%rcx,%rdx), %eax addl 0x2f0(%rsp), %eax shll %eax addl $0x1, %eax imull 0x418(%rsp), %eax addl 0x30c(%rsp), %eax imull 0x41c(%rsp), %eax addl 0x308(%rsp), %eax movl %eax, 0x2e8(%rsp) movq 0x318(%rsp), %rax movslq 0x2ec(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x2e4(%rsp) movq 0x318(%rsp), %rax movslq 0x2e8(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0x2e0(%rsp) movb 0x43f(%rsp), %al movb $0x0, 0x28f(%rsp) testb $0x1, %al je 0x1df8c2a movq 0x460(%rsp), %rdi movl $0x2, %esi callq 0xb5820 movq 0x130(%rsp), %rdx movq %rax, %rcx movl 0x2f4(%rsp), %eax movq (%rdx), %rsi movq -0x18(%rsi), %rsi imull 0xd4(%rdx,%rsi), %eax addl 0x2f0(%rsp), %eax leaq 0x290(%rsp), %rdx movq %rdx, 0x6b0(%rsp) movq %rcx, 0x6a8(%rsp) movl %eax, 0x6a4(%rsp) movq 0x6a8(%rsp), %rax movq %rax, 0xe0(%rsp) movb $0x0, 0x6a3(%rsp) movl 0x2c(%rax), %r9d movl 0x30(%rax), %r8d movl 0x34(%rax), %edi movq (%rax), %rsi movq 0x40(%rax), %rcx movslq 0x6a4(%rsp), %rdx imulq %rdx, %rcx imulq 0x10(%rax), %rcx addq %rcx, %rsi movq 0x10(%rax), %rdx movl 0x18(%rax), %ecx movq 0x20(%rax), %rax leaq 0x290(%rsp), %r10 movq %r10, 0x710(%rsp) movl %r9d, 0x70c(%rsp) movl %r8d, 0x708(%rsp) movl %edi, 0x704(%rsp) movq %rsi, 0x6f8(%rsp) movq %rdx, 0x6f0(%rsp) movl %ecx, 0x6ec(%rsp) movq %rax, 0x6e0(%rsp) movq 0x710(%rsp), %rcx movq %rcx, 0xe8(%rsp) movq 0x6f8(%rsp), %rax movq %rax, (%rcx) movq $0x0, 0x8(%rcx) movq 0x6f0(%rsp), %rax movq %rax, 0x10(%rcx) movl 0x6ec(%rsp), %eax movl %eax, 0x18(%rcx) movq 0x6e0(%rsp), %rax movq %rax, 0x20(%rcx) movl $0x3, 0x28(%rcx) movl 0x70c(%rsp), %eax movl %eax, 0x2c(%rcx) movl 0x708(%rsp), %eax movl %eax, 0x30(%rcx) movl $0x1, 0x34(%rcx) movl 0x704(%rsp), %eax movl %eax, 0x38(%rcx) movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rdx imulq %rdx, %rax imulq 0x10(%rcx), %rax movq %rax, 0x720(%rsp) movl $0x10, 0x71c(%rsp) movq 0x720(%rsp), %rax movslq 0x71c(%rsp), %rdx addq %rdx, %rax subq $0x1, %rax xorl %edx, %edx subl 0x71c(%rsp), %edx movslq %edx, %rdx andq %rdx, %rax xorl %edx, %edx divq 0x10(%rcx) movq %rax, %rcx movq 0xe8(%rsp), %rax movq %rcx, 0x40(%rax) movq 0xe0(%rsp), %rax movl 0x28(%rax), %ecx subl $0x1, %ecx movl %ecx, 0x2b8(%rsp) cmpl $0x4, 0x28(%rax) jne 0x1df8a7e movq 0xe0(%rsp), %rcx movslq 0x2c(%rcx), %rax movslq 0x30(%rcx), %rcx imulq %rcx, %rax movq %rax, 0x2d0(%rsp) movb $0x1, 0x6a3(%rsp) testb $0x1, 0x6a3(%rsp) jne 0x1df8bb7 leaq 0x290(%rsp), %rax movq %rax, 0x6b8(%rsp) movq 0x6b8(%rsp), %rax movq %rax, 0x6c8(%rsp) movq 0x6c8(%rsp), %rax movq %rax, 0xd8(%rsp) cmpq $0x0, 0x8(%rax) je 0x1df8b5c movq 0xd8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x6c4(%rsp) # imm = 0xFFFFFFFF movl 0x6c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x6c0(%rsp) cmpl $0x1, 0x6c0(%rsp) jne 0x1df8b5c movq 0xd8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1df8b2d movq 0xd8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1df8b2b jmp 0x1df8b5a movq 0xd8(%rsp), %rax movq (%rax), %rax movq %rax, 0x728(%rsp) cmpq $0x0, 0x728(%rsp) je 0x1df8b58 movq 0x728(%rsp), %rdi callq 0x5e480 jmp 0x1df8b5a jmp 0x1df8b5c movq 0xd8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1df8bb7 movq %rax, %rdi callq 0x5fc90 jmp 0x1df8bb9 movb $0x1, 0x28f(%rsp) movl 0x30c(%rsp), %eax leaq 0x290(%rsp), %rcx movq %rcx, 0x518(%rsp) movl %eax, 0x514(%rsp) movq 0x518(%rsp), %rdx movq (%rdx), %rax movslq 0x2c(%rdx), %rcx movslq 0x514(%rsp), %rsi imulq %rsi, %rcx imulq 0x10(%rdx), %rcx addq %rcx, %rax movq %rax, 0xd0(%rsp) movq 0xd0(%rsp), %rax movslq 0x308(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0xcc(%rsp) jmp 0x1df8c3d vmovss 0x769e(%rip), %xmm0 # 0x1e002d0 vmovss %xmm0, 0xcc(%rsp) jmp 0x1df8c3d vmovss 0xcc(%rsp), %xmm0 vmovss %xmm0, 0xc8(%rsp) testb $0x1, 0x28f(%rsp) jne 0x1df8c5e jmp 0x1df8d83 leaq 0x290(%rsp), %rax movq %rax, 0x4a0(%rsp) movq 0x4a0(%rsp), %rax movq %rax, 0x5f8(%rsp) movq 0x5f8(%rsp), %rax movq %rax, 0xc0(%rsp) cmpq $0x0, 0x8(%rax) je 0x1df8d26 movq 0xc0(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x5f4(%rsp) # imm = 0xFFFFFFFF movl 0x5f4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x5f0(%rsp) cmpl $0x1, 0x5f0(%rsp) jne 0x1df8d26 movq 0xc0(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1df8cf7 movq 0xc0(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1df8cf5 jmp 0x1df8d24 movq 0xc0(%rsp), %rax movq (%rax), %rax movq %rax, 0x748(%rsp) cmpq $0x0, 0x748(%rsp) je 0x1df8d22 movq 0x748(%rsp), %rdi callq 0x5e480 jmp 0x1df8d24 jmp 0x1df8d26 movq 0xc0(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1df8d81 movq %rax, %rdi callq 0x5fc90 jmp 0x1df8d83 movq 0x130(%rsp), %rdx vmovss 0xc8(%rsp), %xmm0 vmovss %xmm0, 0x2dc(%rsp) movl 0x304(%rsp), %eax movl 0x2f4(%rsp), %ecx movq (%rdx), %rsi movq -0x18(%rsi), %rsi imull 0xe0(%rdx,%rsi), %ecx addl %ecx, %eax vcvtsi2ss %eax, %xmm0, %xmm0 vaddss 0x2e4(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x288(%rsp) movl 0x300(%rsp), %eax movl 0x2f0(%rsp), %ecx movq (%rdx), %rsi movq -0x18(%rsi), %rsi imull 0xdc(%rdx,%rsi), %ecx addl %ecx, %eax vcvtsi2ss %eax, %xmm0, %xmm0 vaddss 0x2e0(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x284(%rsp) vmovss 0x288(%rsp), %xmm0 xorl %eax, %eax vmovss 0x113d6(%rip), %xmm1 # 0x1e0a1f0 vucomiss %xmm1, %xmm0 movb %al, 0xbf(%rsp) jbe 0x1df8e88 vmovss 0x284(%rsp), %xmm0 xorl %eax, %eax vmovss 0x113b6(%rip), %xmm1 # 0x1e0a1f0 vucomiss %xmm1, %xmm0 movb %al, 0xbf(%rsp) jbe 0x1df8e88 vmovss 0x288(%rsp), %xmm1 vcvtsi2ssl 0x434(%rsp), %xmm0, %xmm0 xorl %eax, %eax vucomiss %xmm1, %xmm0 movb %al, 0xbf(%rsp) jbe 0x1df8e88 vmovss 0x284(%rsp), %xmm1 vcvtsi2ssl 0x438(%rsp), %xmm0, %xmm0 vucomiss %xmm1, %xmm0 seta %al movb %al, 0xbf(%rsp) movb 0xbf(%rsp), %al andb $0x1, %al movb %al, 0x283(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovss %xmm0, 0x27c(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovss %xmm0, 0x278(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovss %xmm0, 0x274(%rsp) vxorps %xmm0, %xmm0, %xmm0 vmovss %xmm0, 0x270(%rsp) movb $0x0, 0x26f(%rsp) movb $0x0, 0x26e(%rsp) movb $0x0, 0x26d(%rsp) movb $0x0, 0x26c(%rsp) movl $0x0, 0x268(%rsp) movl $0x0, 0x264(%rsp) movl $0x0, 0x260(%rsp) movl $0x0, 0x25c(%rsp) testb $0x1, 0x283(%rsp) je 0x1df9250 vmovss 0x288(%rsp), %xmm0 callq 0x84440 vmovss %xmm0, 0xb8(%rsp) jmp 0x1df8f3f vmovss 0xb8(%rsp), %xmm0 vcvttss2si %xmm0, %eax movl %eax, 0x258(%rsp) vmovss 0x284(%rsp), %xmm0 callq 0x84440 vmovss %xmm0, 0xb4(%rsp) jmp 0x1df8f6c vmovss 0xb4(%rsp), %xmm0 vcvttss2si %xmm0, %eax movl %eax, 0x254(%rsp) movl 0x258(%rsp), %eax addl $0x1, %eax movl %eax, 0x250(%rsp) movl 0x254(%rsp), %eax addl $0x1, %eax movl %eax, 0x24c(%rsp) vmovss 0x288(%rsp), %xmm0 vcvtsi2ssl 0x258(%rsp), %xmm1, %xmm1 vsubss %xmm1, %xmm0, %xmm0 vmovss %xmm0, 0x248(%rsp) vmovss 0x284(%rsp), %xmm0 vcvtsi2ssl 0x254(%rsp), %xmm1, %xmm1 vsubss %xmm1, %xmm0, %xmm0 vmovss %xmm0, 0x244(%rsp) vmovss 0x72e8(%rip), %xmm0 # 0x1e002d0 vsubss 0x248(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x240(%rsp) vmovss 0x72ce(%rip), %xmm0 # 0x1e002d0 vsubss 0x244(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x23c(%rsp) xorl %eax, %eax cmpl $0x0, 0x258(%rsp) movb %al, 0xb3(%rsp) jl 0x1df9039 cmpl $0x0, 0x254(%rsp) setge %al movb %al, 0xb3(%rsp) movb 0xb3(%rsp), %al andb $0x1, %al movb %al, 0x26f(%rsp) xorl %eax, %eax cmpl $0x0, 0x258(%rsp) movb %al, 0xb2(%rsp) jl 0x1df9079 movl 0x24c(%rsp), %eax movl 0x438(%rsp), %ecx subl $0x1, %ecx cmpl %ecx, %eax setle %al movb %al, 0xb2(%rsp) movb 0xb2(%rsp), %al andb $0x1, %al movb %al, 0x26e(%rsp) movl 0x250(%rsp), %ecx movl 0x434(%rsp), %edx subl $0x1, %edx xorl %eax, %eax cmpl %edx, %ecx movb %al, 0xb1(%rsp) jg 0x1df90b9 cmpl $0x0, 0x254(%rsp) setge %al movb %al, 0xb1(%rsp) movb 0xb1(%rsp), %al andb $0x1, %al movb %al, 0x26d(%rsp) movl 0x250(%rsp), %ecx movl 0x434(%rsp), %edx subl $0x1, %edx xorl %eax, %eax cmpl %edx, %ecx movb %al, 0xb0(%rsp) jg 0x1df9104 movl 0x24c(%rsp), %eax movl 0x438(%rsp), %ecx subl $0x1, %ecx cmpl %ecx, %eax setle %al movb %al, 0xb0(%rsp) movb 0xb0(%rsp), %al andb $0x1, %al movb %al, 0x26c(%rsp) testb $0x1, 0x26f(%rsp) je 0x1df916f movl 0x258(%rsp), %eax imull 0x438(%rsp), %eax addl 0x254(%rsp), %eax movl %eax, 0x268(%rsp) jmp 0x1df916f movq %rax, %rcx movl %edx, %eax movq %rcx, 0x3c8(%rsp) movl %eax, 0x3c4(%rsp) jmp 0x1dfa700 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x3c8(%rsp) movl %eax, 0x3c4(%rsp) jmp 0x1dfa5ed testb $0x1, 0x26e(%rsp) je 0x1df9196 movl 0x258(%rsp), %eax imull 0x438(%rsp), %eax addl 0x24c(%rsp), %eax movl %eax, 0x264(%rsp) testb $0x1, 0x26d(%rsp) je 0x1df91bd movl 0x250(%rsp), %eax imull 0x438(%rsp), %eax addl 0x254(%rsp), %eax movl %eax, 0x260(%rsp) testb $0x1, 0x26c(%rsp) je 0x1df91e4 movl 0x250(%rsp), %eax imull 0x438(%rsp), %eax addl 0x24c(%rsp), %eax movl %eax, 0x25c(%rsp) vmovss 0x240(%rsp), %xmm0 vmulss 0x23c(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x27c(%rsp) vmovss 0x240(%rsp), %xmm0 vmulss 0x244(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x278(%rsp) vmovss 0x248(%rsp), %xmm0 vmulss 0x23c(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x274(%rsp) vmovss 0x248(%rsp), %xmm0 vmulss 0x244(%rsp), %xmm0, %xmm0 vmovss %xmm0, 0x270(%rsp) movq 0x320(%rsp), %rax movq %rax, 0x230(%rsp) movl $0x0, 0x22c(%rsp) movl 0x22c(%rsp), %eax cmpl 0x430(%rsp), %eax jge 0x1df9494 vxorps %xmm0, %xmm0, %xmm0 vmovss %xmm0, 0x228(%rsp) testb $0x1, 0x283(%rsp) je 0x1df9423 testb $0x1, 0x26f(%rsp) je 0x1df92c4 movq 0x230(%rsp), %rax movslq 0x268(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0xac(%rsp) jmp 0x1df92d3 vxorps %xmm0, %xmm0, %xmm0 vmovss %xmm0, 0xac(%rsp) jmp 0x1df92d3 vmovss 0xac(%rsp), %xmm0 vmovss %xmm0, 0x224(%rsp) testb $0x1, 0x26e(%rsp) je 0x1df930f movq 0x230(%rsp), %rax movslq 0x264(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0xa8(%rsp) jmp 0x1df931e vxorps %xmm0, %xmm0, %xmm0 vmovss %xmm0, 0xa8(%rsp) jmp 0x1df931e vmovss 0xa8(%rsp), %xmm0 vmovss %xmm0, 0x220(%rsp) testb $0x1, 0x26d(%rsp) je 0x1df935a movq 0x230(%rsp), %rax movslq 0x260(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0xa4(%rsp) jmp 0x1df9369 vxorps %xmm0, %xmm0, %xmm0 vmovss %xmm0, 0xa4(%rsp) jmp 0x1df9369 vmovss 0xa4(%rsp), %xmm0 vmovss %xmm0, 0x21c(%rsp) testb $0x1, 0x26c(%rsp) je 0x1df93a5 movq 0x230(%rsp), %rax movslq 0x25c(%rsp), %rcx vmovss (%rax,%rcx,4), %xmm0 vmovss %xmm0, 0xa0(%rsp) jmp 0x1df93b4 vxorps %xmm0, %xmm0, %xmm0 vmovss %xmm0, 0xa0(%rsp) jmp 0x1df93b4 vmovss 0xa0(%rsp), %xmm0 vmovss %xmm0, 0x218(%rsp) vmovss 0x27c(%rsp), %xmm0 vmulss 0x224(%rsp), %xmm0, %xmm0 vmovss 0x278(%rsp), %xmm1 vmulss 0x220(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 vmovss 0x274(%rsp), %xmm1 vmulss 0x21c(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 vmovss 0x270(%rsp), %xmm1 vmulss 0x218(%rsp), %xmm1, %xmm1 vaddss %xmm1, %xmm0, %xmm0 vmovss %xmm0, 0x228(%rsp) vmovss 0x228(%rsp), %xmm0 vmulss 0x2dc(%rsp), %xmm0, %xmm0 movq 0x2f8(%rsp), %rax vmovss %xmm0, (%rax) movq 0x2f8(%rsp), %rax addq $0x4, %rax movq %rax, 0x2f8(%rsp) movl 0x434(%rsp), %ecx imull 0x438(%rsp), %ecx movq 0x230(%rsp), %rax movslq %ecx, %rcx shlq $0x2, %rcx addq %rcx, %rax movq %rax, 0x230(%rsp) movl 0x22c(%rsp), %eax addl $0x1, %eax movl %eax, 0x22c(%rsp) jmp 0x1df926b jmp 0x1df9496 movl 0x2f0(%rsp), %eax addl $0x1, %eax movl %eax, 0x2f0(%rsp) jmp 0x1df8786 jmp 0x1df94ae movl 0x2f4(%rsp), %eax addl $0x1, %eax movl %eax, 0x2f4(%rsp) jmp 0x1df8758 jmp 0x1df94c6 movl 0x308(%rsp), %eax addl $0x1, %eax movl %eax, 0x308(%rsp) jmp 0x1df8685 jmp 0x1df94de movl 0x30c(%rsp), %eax addl $0x1, %eax movl %eax, 0x30c(%rsp) jmp 0x1df8666 movq 0x130(%rsp), %rcx movq (%rcx), %rax movq -0x18(%rax), %rdx movl 0xd4(%rcx,%rdx), %eax movl 0xd8(%rcx,%rdx), %edx imull %eax, %edx movl 0x430(%rsp), %eax imull %eax, %edx movl 0x418(%rsp), %ecx movl 0x41c(%rsp), %eax imull %eax, %ecx xorl %eax, %eax movl %eax, %r8d leaq 0x1d0(%rsp), %rdi leaq 0x3d0(%rsp), %rsi callq 0x63250 jmp 0x1df954b leaq 0x3d0(%rsp), %rax movq %rax, 0x670(%rsp) leaq 0x1d0(%rsp), %rax movq %rax, 0x668(%rsp) movq 0x670(%rsp), %rax movq %rax, 0x98(%rsp) cmpq 0x668(%rsp), %rax jne 0x1df959a movq 0x98(%rsp), %rax movq %rax, 0x678(%rsp) jmp 0x1df978d movq 0x668(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x1df95d2 movq 0x668(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x664(%rsp) movl 0x664(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x660(%rsp) movq 0x98(%rsp), %rax movq %rax, 0x688(%rsp) movq 0x688(%rsp), %rax movq %rax, 0x90(%rsp) cmpq $0x0, 0x8(%rax) je 0x1df968a movq 0x90(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x684(%rsp) # imm = 0xFFFFFFFF movl 0x684(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x680(%rsp) cmpl $0x1, 0x680(%rsp) jne 0x1df968a movq 0x90(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1df965b movq 0x90(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1df9659 jmp 0x1df9688 movq 0x90(%rsp), %rax movq (%rax), %rax movq %rax, 0x730(%rsp) cmpq $0x0, 0x730(%rsp) je 0x1df9686 movq 0x730(%rsp), %rdi callq 0x5e480 jmp 0x1df9688 jmp 0x1df968a movq 0x98(%rsp), %rax movq 0x90(%rsp), %rcx movq $0x0, (%rcx) movq $0x0, 0x10(%rcx) movl $0x0, 0x18(%rcx) movl $0x0, 0x28(%rcx) movl $0x0, 0x2c(%rcx) movl $0x0, 0x30(%rcx) movl $0x0, 0x34(%rcx) movl $0x0, 0x38(%rcx) movq $0x0, 0x40(%rcx) movq $0x0, 0x8(%rcx) movq 0x668(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x668(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x668(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x668(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x668(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x668(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x668(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x668(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x668(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x668(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x668(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x678(%rsp) leaq 0x1d0(%rsp), %rax movq %rax, 0x4b0(%rsp) movq 0x4b0(%rsp), %rax movq %rax, 0x5d8(%rsp) movq 0x5d8(%rsp), %rax movq %rax, 0x88(%rsp) cmpq $0x0, 0x8(%rax) je 0x1df9855 movq 0x88(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x5d4(%rsp) # imm = 0xFFFFFFFF movl 0x5d4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x5d0(%rsp) cmpl $0x1, 0x5d0(%rsp) jne 0x1df9855 movq 0x88(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1df9826 movq 0x88(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1df9824 jmp 0x1df9853 movq 0x88(%rsp), %rax movq (%rax), %rax movq %rax, 0x758(%rsp) cmpq $0x0, 0x758(%rsp) je 0x1df9851 movq 0x758(%rsp), %rdi callq 0x5e480 jmp 0x1df9853 jmp 0x1df9855 movq 0x88(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1df98b0 movq %rax, %rdi callq 0x5fc90 movq 0x130(%rsp), %rax movq 0x50(%rax), %rdi movq 0x3b8(%rsp), %rdx movq 0x450(%rsp), %rcx movq (%rdi), %rax movq 0x38(%rax), %rax leaq 0x3d0(%rsp), %rsi callq *%rax jmp 0x1df98df leaq 0x188(%rsp), %rax movq %rax, 0x480(%rsp) movq 0x480(%rsp), %rax movq %rax, 0x80(%rsp) movq $0x0, (%rax) movq $0x0, 0x8(%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movq $0x0, 0x20(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq 0x130(%rsp), %rax movq 0x58(%rax), %rdi movq 0x3b8(%rsp), %rsi movq 0x450(%rsp), %rcx movq (%rdi), %rax movq 0x38(%rax), %rax leaq 0x188(%rsp), %rdx callq *%rax jmp 0x1df997f movq 0x130(%rsp), %rax movl 0x41c(%rsp), %edx movl 0x418(%rsp), %ecx movq (%rax), %rsi movq -0x18(%rsi), %rsi movl 0xd0(%rax,%rsi), %r8d xorl %eax, %eax movl %eax, %r9d leaq 0x140(%rsp), %rdi leaq 0x188(%rsp), %rsi callq 0x63980 jmp 0x1df99c0 leaq 0x188(%rsp), %rax movq %rax, 0x640(%rsp) leaq 0x140(%rsp), %rax movq %rax, 0x638(%rsp) movq 0x640(%rsp), %rax movq %rax, 0x78(%rsp) cmpq 0x638(%rsp), %rax jne 0x1df9a09 movq 0x78(%rsp), %rax movq %rax, 0x648(%rsp) jmp 0x1df9be4 movq 0x638(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x1df9a41 movq 0x638(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x634(%rsp) movl 0x634(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x630(%rsp) movq 0x78(%rsp), %rax movq %rax, 0x658(%rsp) movq 0x658(%rsp), %rax movq %rax, 0x70(%rsp) cmpq $0x0, 0x8(%rax) je 0x1df9ae7 movq 0x70(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x654(%rsp) # imm = 0xFFFFFFFF movl 0x654(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x650(%rsp) cmpl $0x1, 0x650(%rsp) jne 0x1df9ae7 movq 0x70(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1df9abb movq 0x70(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1df9ab9 jmp 0x1df9ae5 movq 0x70(%rsp), %rax movq (%rax), %rax movq %rax, 0x738(%rsp) cmpq $0x0, 0x738(%rsp) je 0x1df9ae3 movq 0x738(%rsp), %rdi callq 0x5e480 jmp 0x1df9ae5 jmp 0x1df9ae7 movq 0x78(%rsp), %rax movq 0x70(%rsp), %rcx movq $0x0, (%rcx) movq $0x0, 0x10(%rcx) movl $0x0, 0x18(%rcx) movl $0x0, 0x28(%rcx) movl $0x0, 0x2c(%rcx) movl $0x0, 0x30(%rcx) movl $0x0, 0x34(%rcx) movl $0x0, 0x38(%rcx) movq $0x0, 0x40(%rcx) movq $0x0, 0x8(%rcx) movq 0x638(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x638(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x638(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x638(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x638(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x638(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x638(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x638(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x638(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x638(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x638(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x648(%rsp) leaq 0x140(%rsp), %rax movq %rax, 0x4c0(%rsp) movq 0x4c0(%rsp), %rax movq %rax, 0x5b8(%rsp) movq 0x5b8(%rsp), %rax movq %rax, 0x68(%rsp) cmpq $0x0, 0x8(%rax) je 0x1df9c9d movq 0x68(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x5b4(%rsp) # imm = 0xFFFFFFFF movl 0x5b4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x5b0(%rsp) cmpl $0x1, 0x5b0(%rsp) jne 0x1df9c9d movq 0x68(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1df9c71 movq 0x68(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1df9c6f jmp 0x1df9c9b movq 0x68(%rsp), %rax movq (%rax), %rax movq %rax, 0x768(%rsp) cmpq $0x0, 0x768(%rsp) je 0x1df9c99 movq 0x768(%rsp), %rdi callq 0x5e480 jmp 0x1df9c9b jmp 0x1df9c9d movq 0x68(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1df9cf5 movq %rax, %rdi callq 0x5fc90 movq 0x458(%rsp), %rdi xorl %eax, %eax movl %eax, %esi callq 0x98840 movq %rax, 0x610(%rsp) leaq 0x188(%rsp), %rax movq %rax, 0x608(%rsp) movq 0x610(%rsp), %rax movq %rax, 0x60(%rsp) cmpq 0x608(%rsp), %rax jne 0x1df9d47 movq 0x60(%rsp), %rax movq %rax, 0x618(%rsp) jmp 0x1df9f22 movq 0x608(%rsp), %rax cmpq $0x0, 0x8(%rax) je 0x1df9d7f movq 0x608(%rsp), %rax movq 0x8(%rax), %rcx movl $0x1, 0x604(%rsp) movl 0x604(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x600(%rsp) movq 0x60(%rsp), %rax movq %rax, 0x628(%rsp) movq 0x628(%rsp), %rax movq %rax, 0x58(%rsp) cmpq $0x0, 0x8(%rax) je 0x1df9e25 movq 0x58(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x624(%rsp) # imm = 0xFFFFFFFF movl 0x624(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x620(%rsp) cmpl $0x1, 0x620(%rsp) jne 0x1df9e25 movq 0x58(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1df9df9 movq 0x58(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1df9df7 jmp 0x1df9e23 movq 0x58(%rsp), %rax movq (%rax), %rax movq %rax, 0x740(%rsp) cmpq $0x0, 0x740(%rsp) je 0x1df9e21 movq 0x740(%rsp), %rdi callq 0x5e480 jmp 0x1df9e23 jmp 0x1df9e25 movq 0x60(%rsp), %rax movq 0x58(%rsp), %rcx movq $0x0, (%rcx) movq $0x0, 0x10(%rcx) movl $0x0, 0x18(%rcx) movl $0x0, 0x28(%rcx) movl $0x0, 0x2c(%rcx) movl $0x0, 0x30(%rcx) movl $0x0, 0x34(%rcx) movl $0x0, 0x38(%rcx) movq $0x0, 0x40(%rcx) movq $0x0, 0x8(%rcx) movq 0x608(%rsp), %rcx movq (%rcx), %rcx movq %rcx, (%rax) movq 0x608(%rsp), %rcx movq 0x8(%rcx), %rcx movq %rcx, 0x8(%rax) movq 0x608(%rsp), %rcx movq 0x10(%rcx), %rcx movq %rcx, 0x10(%rax) movq 0x608(%rsp), %rcx movl 0x18(%rcx), %ecx movl %ecx, 0x18(%rax) movq 0x608(%rsp), %rcx movq 0x20(%rcx), %rcx movq %rcx, 0x20(%rax) movq 0x608(%rsp), %rcx movl 0x28(%rcx), %ecx movl %ecx, 0x28(%rax) movq 0x608(%rsp), %rcx movl 0x2c(%rcx), %ecx movl %ecx, 0x2c(%rax) movq 0x608(%rsp), %rcx movl 0x30(%rcx), %ecx movl %ecx, 0x30(%rax) movq 0x608(%rsp), %rcx movl 0x34(%rcx), %ecx movl %ecx, 0x34(%rax) movq 0x608(%rsp), %rcx movl 0x38(%rcx), %ecx movl %ecx, 0x38(%rax) movq 0x608(%rsp), %rcx movq 0x40(%rcx), %rcx movq %rcx, 0x40(%rax) movq %rax, 0x618(%rsp) movl $0x0, 0x474(%rsp) movl $0x1, 0x3c0(%rsp) leaq 0x188(%rsp), %rax movq %rax, 0x4d0(%rsp) movq 0x4d0(%rsp), %rax movq %rax, 0x598(%rsp) movq 0x598(%rsp), %rax movq %rax, 0x50(%rsp) cmpq $0x0, 0x8(%rax) je 0x1df9ff1 movq 0x50(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x594(%rsp) # imm = 0xFFFFFFFF movl 0x594(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x590(%rsp) cmpl $0x1, 0x590(%rsp) jne 0x1df9ff1 movq 0x50(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1df9fc5 movq 0x50(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1df9fc3 jmp 0x1df9fef movq 0x50(%rsp), %rax movq (%rax), %rax movq %rax, 0x778(%rsp) cmpq $0x0, 0x778(%rsp) je 0x1df9fed movq 0x778(%rsp), %rdi callq 0x5e480 jmp 0x1df9fef jmp 0x1df9ff1 movq 0x50(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1dfa049 movq %rax, %rdi callq 0x5fc90 leaq 0x328(%rsp), %rax movq %rax, 0x4e0(%rsp) movq 0x4e0(%rsp), %rax movq %rax, 0x578(%rsp) movq 0x578(%rsp), %rax movq %rax, 0x48(%rsp) cmpq $0x0, 0x8(%rax) je 0x1dfa102 movq 0x48(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x574(%rsp) # imm = 0xFFFFFFFF movl 0x574(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x570(%rsp) cmpl $0x1, 0x570(%rsp) jne 0x1dfa102 movq 0x48(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1dfa0d6 movq 0x48(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1dfa0d4 jmp 0x1dfa100 movq 0x48(%rsp), %rax movq (%rax), %rax movq %rax, 0x788(%rsp) cmpq $0x0, 0x788(%rsp) je 0x1dfa0fe movq 0x788(%rsp), %rdi callq 0x5e480 jmp 0x1dfa100 jmp 0x1dfa102 movq 0x48(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1dfa15a movq %rax, %rdi callq 0x5fc90 leaq 0x370(%rsp), %rax movq %rax, 0x4f0(%rsp) movq 0x4f0(%rsp), %rax movq %rax, 0x558(%rsp) movq 0x558(%rsp), %rax movq %rax, 0x40(%rsp) cmpq $0x0, 0x8(%rax) je 0x1dfa213 movq 0x40(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x554(%rsp) # imm = 0xFFFFFFFF movl 0x554(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x550(%rsp) cmpl $0x1, 0x550(%rsp) jne 0x1dfa213 movq 0x40(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1dfa1e7 movq 0x40(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1dfa1e5 jmp 0x1dfa211 movq 0x40(%rsp), %rax movq (%rax), %rax movq %rax, 0x798(%rsp) cmpq $0x0, 0x798(%rsp) je 0x1dfa20f movq 0x798(%rsp), %rdi callq 0x5e480 jmp 0x1dfa211 jmp 0x1dfa213 movq 0x40(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1dfa26b movq %rax, %rdi callq 0x5fc90 jmp 0x1dfa816 movq %rax, %rcx movl %edx, %eax movq %rcx, 0x3c8(%rsp) movl %eax, 0x3c4(%rsp) leaq 0x1d0(%rsp), %rax movq %rax, 0x4b8(%rsp) movq 0x4b8(%rsp), %rax movq %rax, 0x5c8(%rsp) movq 0x5c8(%rsp), %rax movq %rax, 0x38(%rsp) cmpq $0x0, 0x8(%rax) je 0x1dfa33d movq 0x38(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x5c4(%rsp) # imm = 0xFFFFFFFF movl 0x5c4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x5c0(%rsp) cmpl $0x1, 0x5c0(%rsp) jne 0x1dfa33d movq 0x38(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1dfa311 movq 0x38(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1dfa30f jmp 0x1dfa33b movq 0x38(%rsp), %rax movq (%rax), %rax movq %rax, 0x760(%rsp) cmpq $0x0, 0x760(%rsp) je 0x1dfa339 movq 0x760(%rsp), %rdi callq 0x5e480 jmp 0x1dfa33b jmp 0x1dfa33d movq 0x38(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1dfa395 movq %rax, %rdi callq 0x5fc90 jmp 0x1dfa5ed movq %rax, %rcx movl %edx, %eax movq %rcx, 0x3c8(%rsp) movl %eax, 0x3c4(%rsp) jmp 0x1dfa4da movq %rax, %rcx movl %edx, %eax movq %rcx, 0x3c8(%rsp) movl %eax, 0x3c4(%rsp) leaq 0x140(%rsp), %rax movq %rax, 0x4c8(%rsp) movq 0x4c8(%rsp), %rax movq %rax, 0x5a8(%rsp) movq 0x5a8(%rsp), %rax movq %rax, 0x30(%rsp) cmpq $0x0, 0x8(%rax) je 0x1dfa480 movq 0x30(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x5a4(%rsp) # imm = 0xFFFFFFFF movl 0x5a4(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x5a0(%rsp) cmpl $0x1, 0x5a0(%rsp) jne 0x1dfa480 movq 0x30(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1dfa454 movq 0x30(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1dfa452 jmp 0x1dfa47e movq 0x30(%rsp), %rax movq (%rax), %rax movq %rax, 0x770(%rsp) cmpq $0x0, 0x770(%rsp) je 0x1dfa47c movq 0x770(%rsp), %rdi callq 0x5e480 jmp 0x1dfa47e jmp 0x1dfa480 movq 0x30(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1dfa4d8 movq %rax, %rdi callq 0x5fc90 jmp 0x1dfa4da leaq 0x188(%rsp), %rax movq %rax, 0x4d8(%rsp) movq 0x4d8(%rsp), %rax movq %rax, 0x588(%rsp) movq 0x588(%rsp), %rax movq %rax, 0x28(%rsp) cmpq $0x0, 0x8(%rax) je 0x1dfa593 movq 0x28(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x584(%rsp) # imm = 0xFFFFFFFF movl 0x584(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x580(%rsp) cmpl $0x1, 0x580(%rsp) jne 0x1dfa593 movq 0x28(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1dfa567 movq 0x28(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1dfa565 jmp 0x1dfa591 movq 0x28(%rsp), %rax movq (%rax), %rax movq %rax, 0x780(%rsp) cmpq $0x0, 0x780(%rsp) je 0x1dfa58f movq 0x780(%rsp), %rdi callq 0x5e480 jmp 0x1dfa591 jmp 0x1dfa593 movq 0x28(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1dfa5eb movq %rax, %rdi callq 0x5fc90 jmp 0x1dfa5ed leaq 0x328(%rsp), %rax movq %rax, 0x4e8(%rsp) movq 0x4e8(%rsp), %rax movq %rax, 0x568(%rsp) movq 0x568(%rsp), %rax movq %rax, 0x20(%rsp) cmpq $0x0, 0x8(%rax) je 0x1dfa6a6 movq 0x20(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x564(%rsp) # imm = 0xFFFFFFFF movl 0x564(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x560(%rsp) cmpl $0x1, 0x560(%rsp) jne 0x1dfa6a6 movq 0x20(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1dfa67a movq 0x20(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1dfa678 jmp 0x1dfa6a4 movq 0x20(%rsp), %rax movq (%rax), %rax movq %rax, 0x790(%rsp) cmpq $0x0, 0x790(%rsp) je 0x1dfa6a2 movq 0x790(%rsp), %rdi callq 0x5e480 jmp 0x1dfa6a4 jmp 0x1dfa6a6 movq 0x20(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1dfa6fe movq %rax, %rdi callq 0x5fc90 jmp 0x1dfa700 leaq 0x370(%rsp), %rax movq %rax, 0x4f8(%rsp) movq 0x4f8(%rsp), %rax movq %rax, 0x548(%rsp) movq 0x548(%rsp), %rax movq %rax, 0x18(%rsp) cmpq $0x0, 0x8(%rax) je 0x1dfa7b9 movq 0x18(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x544(%rsp) # imm = 0xFFFFFFFF movl 0x544(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x540(%rsp) cmpl $0x1, 0x540(%rsp) jne 0x1dfa7b9 movq 0x18(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1dfa78d movq 0x18(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1dfa78b jmp 0x1dfa7b7 movq 0x18(%rsp), %rax movq (%rax), %rax movq %rax, 0x7a0(%rsp) cmpq $0x0, 0x7a0(%rsp) je 0x1dfa7b5 movq 0x7a0(%rsp), %rdi callq 0x5e480 jmp 0x1dfa7b7 jmp 0x1dfa7b9 movq 0x18(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1dfa811 movq %rax, %rdi callq 0x5fc90 jmp 0x1dfa936 leaq 0x3d0(%rsp), %rax movq %rax, 0x500(%rsp) movq 0x500(%rsp), %rax movq %rax, 0x538(%rsp) movq 0x538(%rsp), %rax movq %rax, 0x10(%rsp) cmpq $0x0, 0x8(%rax) je 0x1dfa8cf movq 0x10(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x534(%rsp) # imm = 0xFFFFFFFF movl 0x534(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x530(%rsp) cmpl $0x1, 0x530(%rsp) jne 0x1dfa8cf movq 0x10(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1dfa8a3 movq 0x10(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1dfa8a1 jmp 0x1dfa8cd movq 0x10(%rsp), %rax movq (%rax), %rax movq %rax, 0x7a8(%rsp) cmpq $0x0, 0x7a8(%rsp) je 0x1dfa8cb movq 0x7a8(%rsp), %rdi callq 0x5e480 jmp 0x1dfa8cd jmp 0x1dfa8cf movq 0x10(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1dfa927 movq %rax, %rdi callq 0x5fc90 movl 0x474(%rsp), %eax addq $0x7b8, %rsp # imm = 0x7B8 retq leaq 0x3d0(%rsp), %rax movq %rax, 0x508(%rsp) movq 0x508(%rsp), %rax movq %rax, 0x528(%rsp) movq 0x528(%rsp), %rax movq %rax, 0x8(%rsp) cmpq $0x0, 0x8(%rax) je 0x1dfa9ef movq 0x8(%rsp), %rax movq 0x8(%rax), %rcx movl $0xffffffff, 0x524(%rsp) # imm = 0xFFFFFFFF movl 0x524(%rsp), %eax lock xaddl %eax, (%rcx) movl %eax, 0x520(%rsp) cmpl $0x1, 0x520(%rsp) jne 0x1dfa9ef movq 0x8(%rsp), %rax cmpq $0x0, 0x20(%rax) je 0x1dfa9c3 movq 0x8(%rsp), %rax movq (%rax), %rsi movq 0x20(%rax), %rdi movq (%rdi), %rax movq 0x18(%rax), %rax callq *%rax jmp 0x1dfa9c1 jmp 0x1dfa9ed movq 0x8(%rsp), %rax movq (%rax), %rax movq %rax, 0x7b0(%rsp) cmpq $0x0, 0x7b0(%rsp) je 0x1dfa9eb movq 0x7b0(%rsp), %rdi callq 0x5e480 jmp 0x1dfa9ed jmp 0x1dfa9ef movq 0x8(%rsp), %rax movq $0x0, (%rax) movq $0x0, 0x10(%rax) movl $0x0, 0x18(%rax) movl $0x0, 0x28(%rax) movl $0x0, 0x2c(%rax) movl $0x0, 0x30(%rax) movl $0x0, 0x34(%rax) movl $0x0, 0x38(%rax) movq $0x0, 0x40(%rax) movq $0x0, 0x8(%rax) jmp 0x1dfaa47 movq %rax, %rdi callq 0x5fc90 jmp 0x1dfaa49 movq 0x3c8(%rsp), %rdi callq 0x5e3b0 nopw %cs:(%rax,%rax)
/ysh329[P]ncnn/build_O0/src/layer/x86/deformableconv2d_x86_fma.cpp