Loop Id: 267 | Module: exec | Source: advec_mom_kernel.f90:81-177 [...] | Coverage: 0.01% |
---|
Loop Id: 267 | Module: exec | Source: advec_mom_kernel.f90:81-177 [...] | Coverage: 0.01% |
---|
0x43b5c0 VMOVAPD %ZMM27,%ZMM0{%K3}{z} |
0x43b5c6 VSUBPD %ZMM24,%ZMM9,%ZMM4 |
0x43b5cc VFMADD213PD %ZMM20,%ZMM0,%ZMM4 |
0x43b5d2 VMULPD %ZMM22,%ZMM4,%ZMM0 |
0x43b5d8 IMUL %R15,%R14 |
0x43b5dc ADD 0x48(%RBP),%R14 |
0x43b5e0 VMOVUPD %ZMM0,(%R14,%R9,8){%K1} |
0x43b5e7 ADD %R15,%R12 |
0x43b5ea ADD %RSI,%R8 |
0x43b5ed CMP 0x18(%RSP),%R10 |
0x43b5f2 LEA 0x1(%R10),%R10 |
0x43b5f6 JE 4385a9 |
0x43b5fc MOV %RBX,%RCX |
0x43b5ff AND $-0x8,%RCX |
0x43b603 JE 43b840 |
0x43b609 MOV %RBX,%RDI |
0x43b60c MOV %R15,%RSI |
0x43b60f LEA -0x1(%RCX),%R9 |
0x43b613 MOV 0x70(%RSP),%RAX |
0x43b618 LEA (%RAX,%R10,1),%R14 |
0x43b61c SUB 0x20(%RSP),%R14 |
0x43b621 MOV 0x28(%RSP),%R15 |
0x43b626 IMUL %R14,%R15 |
0x43b62a IMUL 0x10(%RSP),%R14 |
0x43b630 ADD 0x50(%RBP),%R15 |
0x43b634 ADD 0x98(%RBP),%R14 |
0x43b63b XOR %EAX,%EAX |
0x43b63d JMP 43b673 |
(268) 0x43b640 VCMPPD $0x1,%ZMM27,%ZMM11,%K1 |
(268) 0x43b647 VMOVAPD %ZMM31,%ZMM0{%K1}{z} |
(268) 0x43b64d VSUBPD %ZMM25,%ZMM9,%ZMM4 |
(268) 0x43b653 VFMADD213PD %ZMM24,%ZMM0,%ZMM4 |
(268) 0x43b659 VMULPD %ZMM23,%ZMM4,%ZMM0 |
(268) 0x43b65f VMOVUPD %ZMM0,(%R12,%RAX,8) |
(268) 0x43b666 ADD $0x8,%RAX |
(268) 0x43b66a CMP %R9,%RAX |
(268) 0x43b66d JA 43b800 |
(268) 0x43b673 VMOVUPD (%R8,%RAX,8),%ZMM23 |
(268) 0x43b67a VFPCLASSPD $0x50,%ZMM23,%K1 |
(268) 0x43b681 LEA (%R11,%RAX,1),%EBX |
(268) 0x43b685 VPBROADCASTD %EBX,%YMM25 |
(268) 0x43b68b VPADDD %YMM5,%YMM25,%YMM26 |
(268) 0x43b691 VPADDD %YMM6,%YMM25,%YMM24 |
(268) 0x43b697 VPBLENDMD %YMM24,%YMM26,%YMM27{%K1} |
(268) 0x43b69d VMOVDQA32 %YMM26,%YMM24{%K1} |
(268) 0x43b6a3 VPMOVSXDQ %YMM24,%ZMM24 |
(268) 0x43b6a9 VPSUBQ %ZMM8,%ZMM24,%ZMM28 |
(268) 0x43b6af KXNORW %K0,%K0,%K2 |
(268) 0x43b6b3 VXORPD %XMM29,%XMM29,%XMM29 |
(268) 0x43b6b9 VGATHERQPD (%R15,%ZMM28,8),%ZMM29{%K2} |
(268) 0x43b6c0 KXNORW %K0,%K0,%K2 |
(268) 0x43b6c4 VPXORD %XMM24,%XMM24,%XMM24 |
(268) 0x43b6ca VGATHERQPD (%R14,%ZMM28,8),%ZMM24{%K2} |
(268) 0x43b6d1 VPADDD %YMM14,%YMM25,%YMM28 |
(268) 0x43b6d7 VMOVDQA64 %YMM28,%YMM30 |
(268) 0x43b6dd VPADDD %YMM15,%YMM25,%YMM30{%K1} |
(268) 0x43b6e3 VANDPD %ZMM7,%ZMM23,%ZMM25 |
(268) 0x43b6e9 VPMOVSXDQ %YMM30,%ZMM30 |
(268) 0x43b6ef VPSUBQ %ZMM8,%ZMM30,%ZMM30 |
(268) 0x43b6f5 KXNORW %K0,%K0,%K2 |
(268) 0x43b6f9 VPXOR %XMM4,%XMM4,%XMM4 |
(268) 0x43b6fd VGATHERQPD (%R14,%ZMM30,8),%ZMM4{%K2} |
(268) 0x43b704 VDIVPD %ZMM29,%ZMM25,%ZMM25 |
(268) 0x43b70a VPMOVSXDQ %YMM27,%ZMM27 |
(268) 0x43b710 VPSUBQ %ZMM8,%ZMM27,%ZMM27 |
(268) 0x43b716 KXNORW %K0,%K0,%K2 |
(268) 0x43b71a VXORPD %XMM29,%XMM29,%XMM29 |
(268) 0x43b720 VGATHERQPD (%R14,%ZMM27,8),%ZMM29{%K2} |
(268) 0x43b727 VXORPD %XMM31,%XMM31,%XMM31 |
(268) 0x43b72d VSUBPD %ZMM4,%ZMM24,%ZMM30 |
(268) 0x43b733 VSUBPD %ZMM24,%ZMM29,%ZMM29 |
(268) 0x43b739 VMULPD %ZMM30,%ZMM29,%ZMM27 |
(268) 0x43b73f VCMPPD $0x1,%ZMM27,%ZMM31,%K0 |
(268) 0x43b746 KORTESTB %K0,%K0 |
(268) 0x43b74a JE 43b640 |
(268) 0x43b750 VCMPPD $0x1,%ZMM27,%ZMM11,%K2 |
(268) 0x43b757 VMOVDQA32 %YMM26,%YMM28{%K1} |
(268) 0x43b75d VMOVUPD (%R13,%RAX,8),%ZMM4{%K2}{z} |
(268) 0x43b765 VANDPD %ZMM7,%ZMM29,%ZMM26 |
(268) 0x43b76b VSUBPD %ZMM25,%ZMM10,%ZMM31 |
(268) 0x43b771 VMULPD %ZMM31,%ZMM26,%ZMM31 |
(268) 0x43b777 VDIVPD %ZMM4,%ZMM31,%ZMM31 |
(268) 0x43b77d VPMOVSXDQ %YMM28,%ZMM28 |
(268) 0x43b783 VPSUBQ %ZMM8,%ZMM28,%ZMM28 |
(268) 0x43b789 VPXOR %XMM0,%XMM0,%XMM0 |
(268) 0x43b78d VGATHERQPD (%RDX,%ZMM28,8),%ZMM0{%K2} |
(268) 0x43b794 VANDPD %ZMM7,%ZMM30,%ZMM28 |
(268) 0x43b79a VMINPD %ZMM26,%ZMM28,%ZMM26 |
(268) 0x43b7a0 VFMADD213PD %ZMM28,%ZMM25,%ZMM28 |
(268) 0x43b7a6 VDIVPD %ZMM0,%ZMM28,%ZMM0 |
(268) 0x43b7ac VADDPD %ZMM31,%ZMM0,%ZMM0 |
(268) 0x43b7b2 VMULPD %ZMM12,%ZMM4,%ZMM4 |
(268) 0x43b7b8 VMULPD %ZMM0,%ZMM4,%ZMM0 |
(268) 0x43b7be VMINPD %ZMM26,%ZMM0,%ZMM31 |
(268) 0x43b7c4 VFPCLASSPD $0x56,%ZMM29,%K1 |
(268) 0x43b7cb VXORPD %ZMM13,%ZMM31,%ZMM31{%K1} |
(268) 0x43b7d1 JMP 43b640 |
0x43b800 MOV %RDI,%RBX |
0x43b803 CMP %RCX,%RDI |
0x43b806 MOV %RSI,%R15 |
0x43b809 MOV 0x38(%RSP),%RSI |
0x43b80e JE 43b5e7 |
0x43b814 JMP 43b842 |
0x43b840 XOR %ECX,%ECX |
0x43b842 MOV %RBX,%RAX |
0x43b845 SUB %RCX,%RAX |
0x43b848 VPBROADCASTQ %RAX,%ZMM23 |
0x43b84e VPCMPNLEUQ 0xcc467(%RIP),%ZMM23,%K1 |
0x43b859 KORTESTB %K1,%K1 |
0x43b85d JE 43b5e7 |
0x43b863 MOV 0x70(%RSP),%RAX |
0x43b868 LEA (%RAX,%R10,1),%R14 |
0x43b86c SUB 0x20(%RSP),%R14 |
0x43b871 MOV %RSI,%RAX |
0x43b874 IMUL %R14,%RAX |
0x43b878 ADD 0x60(%RBP),%RAX |
0x43b87c MOV 0x30(%RSP),%RDI |
0x43b881 LEA (%RDI,%RCX,1),%R9 |
0x43b885 SUB 0x40(%RSP),%R9 |
0x43b88a VMOVUPD (%RAX,%R9,8),%ZMM0{%K1}{z} |
0x43b891 VMOVQ %RCX,%XMM4 |
0x43b896 VPADDQ %XMM4,%XMM3,%XMM4 |
0x43b89a VMOVAPD %ZMM0,%ZMM22{%K1} |
0x43b8a0 VFPCLASSPD $0x50,%ZMM22,%K2{%K1} |
0x43b8a7 VPBROADCASTD %ECX,%YMM0 |
0x43b8ad VPADDD %YMM5,%YMM0,%YMM0 |
0x43b8b1 VPBROADCASTQ %XMM4,%ZMM4 |
0x43b8b7 VPMOVQD %ZMM4,%YMM4 |
0x43b8bd VPADDD %YMM6,%YMM4,%YMM4 |
0x43b8c1 VPADDD %YMM0,%YMM1,%YMM23 |
0x43b8c7 VPBLENDMD %YMM4,%YMM23,%YMM24{%K2} |
0x43b8cd VMOVDQA32 %YMM23,%YMM4{%K2} |
0x43b8d3 MOV 0x28(%RSP),%RAX |
0x43b8d8 IMUL %R14,%RAX |
0x43b8dc VPMOVSXDQ %YMM4,%ZMM4 |
0x43b8e2 VPSUBQ %ZMM8,%ZMM4,%ZMM4 |
0x43b8e8 ADD 0x50(%RBP),%RAX |
0x43b8ec KMOVQ %K1,%K3 |
0x43b8f1 VPXORD %XMM26,%XMM26,%XMM26 |
0x43b8f7 VGATHERQPD (%RAX,%ZMM4,8),%ZMM26{%K3} |
0x43b8fe MOV 0x10(%RSP),%RAX |
0x43b903 IMUL %R14,%RAX |
0x43b907 ADD 0x98(%RBP),%RAX |
0x43b90e KMOVQ %K1,%K3 |
0x43b913 VPXORD %XMM28,%XMM28,%XMM28 |
0x43b919 VGATHERQPD (%RAX,%ZMM4,8),%ZMM28{%K3} |
0x43b920 VPADDD 0x120(%RSP),%YMM0,%YMM25 |
0x43b928 VMOVDQA64 %YMM25,%YMM4 |
0x43b92e VPADDD %YMM0,%YMM2,%YMM4{%K2} |
0x43b934 VPMOVSXDQ %YMM4,%ZMM0 |
0x43b93a VPSUBQ %ZMM8,%ZMM0,%ZMM0 |
0x43b940 KMOVQ %K1,%K3 |
0x43b945 VPXOR %XMM4,%XMM4,%XMM4 |
0x43b949 VGATHERQPD (%RAX,%ZMM0,8),%ZMM4{%K3} |
0x43b950 VPMOVSXDQ %YMM24,%ZMM0 |
0x43b956 VPSUBQ %ZMM8,%ZMM0,%ZMM0 |
0x43b95c KMOVQ %K1,%K3 |
0x43b961 VXORPD %XMM29,%XMM29,%XMM29 |
0x43b967 VGATHERQPD (%RAX,%ZMM0,8),%ZMM29{%K3} |
0x43b96e VXORPD %XMM27,%XMM27,%XMM27 |
0x43b974 VANDPD %ZMM7,%ZMM22,%ZMM0 |
0x43b97a VMOVAPD %ZMM26,%ZMM21{%K1} |
0x43b980 VDIVPD %ZMM21,%ZMM0,%ZMM24 |
0x43b986 VMOVAPD %ZMM28,%ZMM20{%K1} |
0x43b98c VMOVAPD %ZMM4,%ZMM19{%K1} |
0x43b992 VSUBPD %ZMM19,%ZMM20,%ZMM28 |
0x43b998 VMOVAPD %ZMM29,%ZMM18{%K1} |
0x43b99e VSUBPD %ZMM20,%ZMM18,%ZMM26 |
0x43b9a4 VMULPD %ZMM28,%ZMM26,%ZMM0 |
0x43b9aa VCMPPD $0x1,%ZMM0,%ZMM27,%K3{%K1} |
0x43b9b1 KORTESTB %K3,%K3 |
0x43b9b5 JE 43b5c0 |
0x43b9bb VMOVDQA32 %YMM23,%YMM25{%K2} |
0x43b9c1 VMOVUPD (%RDX,%R9,8),%ZMM0{%K3}{z} |
0x43b9c8 VMOVAPD %ZMM0,%ZMM17{%K3} |
0x43b9ce VANDPD %ZMM7,%ZMM28,%ZMM0 |
0x43b9d4 VANDPD %ZMM7,%ZMM26,%ZMM4 |
0x43b9da VSUBPD %ZMM24,%ZMM10,%ZMM23 |
0x43b9e0 VMULPD %ZMM23,%ZMM4,%ZMM23 |
0x43b9e6 VDIVPD %ZMM17,%ZMM23,%ZMM23 |
0x43b9ec VPMOVSXDQ %YMM25,%ZMM25 |
0x43b9f2 VPSUBQ %ZMM8,%ZMM25,%ZMM25 |
0x43b9f8 KMOVQ %K3,%K2 |
0x43b9fd VGATHERQPD (%RDX,%ZMM25,8),%ZMM27{%K2} |
0x43ba04 VMINPD %ZMM4,%ZMM0,%ZMM4 |
0x43ba0a VFMADD213PD %ZMM0,%ZMM24,%ZMM0 |
0x43ba10 VMOVAPD %ZMM27,%ZMM16{%K3} |
0x43ba16 VDIVPD %ZMM16,%ZMM0,%ZMM0 |
0x43ba1c VADDPD %ZMM23,%ZMM0,%ZMM0 |
0x43ba22 VMULPD %ZMM12,%ZMM17,%ZMM23 |
0x43ba28 VMULPD %ZMM0,%ZMM23,%ZMM0 |
0x43ba2e VMINPD %ZMM4,%ZMM0,%ZMM27 |
0x43ba34 VFPCLASSPD $0x56,%ZMM26,%K2 |
0x43ba3b VXORPD %ZMM13,%ZMM27,%ZMM27{%K2} |
0x43ba41 JMP 43b5c0 |
/scratch_na/users/xoserete/qaas_runs/171-419-3245/intel/CloverLeafFC/build/CloverLeafFC/CloverLeaf_ref/kernels/advec_mom_kernel.f90: 81 - 177 |
-------------------------------------------------------------------------------- |
81: IF(mom_sweep.EQ.1)THEN ! x 1 |
[...] |
150: DO k=y_min,y_max+1 |
151: DO j=x_min-1,x_max+1 |
152: IF(node_flux(j,k).LT.0.0)THEN |
[...] |
158: upwind=j-1 |
159: donor=j |
160: downwind=j+1 |
161: dif=upwind |
162: ENDIF |
163: sigma=ABS(node_flux(j,k))/(node_mass_pre(donor,k)) |
164: width=celldx(j) |
165: vdiffuw=vel1(donor,k)-vel1(upwind,k) |
166: vdiffdw=vel1(downwind,k)-vel1(donor,k) |
167: limiter=0.0 |
168: IF(vdiffuw*vdiffdw.GT.0.0)THEN |
169: auw=ABS(vdiffuw) |
170: adw=ABS(vdiffdw) |
171: wind=1.0_8 |
172: IF(vdiffdw.LE.0.0) wind=-1.0_8 |
173: limiter=wind*MIN(width*((2.0_8-sigma)*adw/width+(1.0_8+sigma)*auw/celldx(dif))/6.0_8,auw,adw) |
174: ENDIF |
175: advec_vel_s=vel1(donor,k)+(1.0-sigma)*limiter |
176: mom_flux(j,k)=advec_vel_s*node_flux(j,k) |
177: ENDDO |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►100.00+ | __kmp_invoke_microtask | libiomp5.so | |
○ | __kmp_invoke_task_func | libiomp5.so |
Path / |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.00 |
CQA speedup if FP arith vectorized | 1.00 |
CQA speedup if fully vectorized | 1.00 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | 1.48 |
Bottlenecks | P0, |
Function | advec_mom_kernel_.DIR.OMP.PARALLEL.2 |
Source | advec_mom_kernel.f90:81-81,advec_mom_kernel.f90:150-152,advec_mom_kernel.f90:158-158,advec_mom_kernel.f90:163-170,advec_mom_kernel.f90:173-176 |
Source loop unroll info | NA |
Source loop unroll confidence level | NA |
Unroll/vectorization loop type | NA |
Unroll factor | NA |
CQA cycles | 48.00 |
CQA cycles if no scalar integer | 48.00 |
CQA cycles if FP arith vectorized | 48.00 |
CQA cycles if fully vectorized | 48.00 |
Front-end cycles | 25.17 |
DIV/SQRT cycles | 32.50 |
P0 cycles | 15.00 |
P1 cycles | 20.33 |
P2 cycles | 20.33 |
P3 cycles | 0.50 |
P4 cycles | 32.50 |
P5 cycles | 10.00 |
P6 cycles | 0.50 |
P7 cycles | 0.50 |
P8 cycles | 0.50 |
P9 cycles | 10.00 |
P10 cycles | 20.33 |
P11 cycles | 48.00 |
Inter-iter dependencies cycles | NA |
FE+BE cycles (UFS) | 49.55 - 130.86 |
Stall cycles (UFS) | 25.20 - 106.51 |
Nb insns | 126.00 |
Nb uops | 151.00 |
Nb loads | 27.00 |
Nb stores | 1.00 |
Nb stack references | 13.00 |
FLOP/cycle | 2.83 |
Nb FLOP add-sub | 40.00 |
Nb FLOP mul | 40.00 |
Nb FLOP fma | 16.00 |
Nb FLOP div | 24.00 |
Nb FLOP rcp | 0.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 0.00 |
Bytes/cycle | 15.67 |
Bytes prefetched | 0.00 |
Bytes loaded | 688.00 |
Bytes stored | 64.00 |
Stride 0 | NA |
Stride 1 | NA |
Stride n | NA |
Stride unknown | NA |
Stride indirect | NA |
Vectorization ratio all | 81.25 |
Vectorization ratio load | 69.23 |
Vectorization ratio store | 100.00 |
Vectorization ratio mul | 83.33 |
Vectorization ratio add_sub | 83.33 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 80.43 |
Vector-efficiency ratio all | 69.77 |
Vector-efficiency ratio load | 69.23 |
Vector-efficiency ratio store | 100.00 |
Vector-efficiency ratio mul | 85.42 |
Vector-efficiency ratio add_sub | 67.36 |
Vector-efficiency ratio fma | 100.00 |
Vector-efficiency ratio div_sqrt | 100.00 |
Vector-efficiency ratio other | 65.90 |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.00 |
CQA speedup if FP arith vectorized | 1.00 |
CQA speedup if fully vectorized | 1.00 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | 1.48 |
Bottlenecks | P0, |
Function | advec_mom_kernel_.DIR.OMP.PARALLEL.2 |
Source | advec_mom_kernel.f90:81-81,advec_mom_kernel.f90:150-152,advec_mom_kernel.f90:158-158,advec_mom_kernel.f90:163-170,advec_mom_kernel.f90:173-176 |
Source loop unroll info | NA |
Source loop unroll confidence level | NA |
Unroll/vectorization loop type | NA |
Unroll factor | NA |
CQA cycles | 48.00 |
CQA cycles if no scalar integer | 48.00 |
CQA cycles if FP arith vectorized | 48.00 |
CQA cycles if fully vectorized | 48.00 |
Front-end cycles | 25.17 |
DIV/SQRT cycles | 32.50 |
P0 cycles | 15.00 |
P1 cycles | 20.33 |
P2 cycles | 20.33 |
P3 cycles | 0.50 |
P4 cycles | 32.50 |
P5 cycles | 10.00 |
P6 cycles | 0.50 |
P7 cycles | 0.50 |
P8 cycles | 0.50 |
P9 cycles | 10.00 |
P10 cycles | 20.33 |
P11 cycles | 48.00 |
Inter-iter dependencies cycles | NA |
FE+BE cycles (UFS) | 49.55 - 130.86 |
Stall cycles (UFS) | 25.20 - 106.51 |
Nb insns | 126.00 |
Nb uops | 151.00 |
Nb loads | 27.00 |
Nb stores | 1.00 |
Nb stack references | 13.00 |
FLOP/cycle | 2.83 |
Nb FLOP add-sub | 40.00 |
Nb FLOP mul | 40.00 |
Nb FLOP fma | 16.00 |
Nb FLOP div | 24.00 |
Nb FLOP rcp | 0.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 0.00 |
Bytes/cycle | 15.67 |
Bytes prefetched | 0.00 |
Bytes loaded | 688.00 |
Bytes stored | 64.00 |
Stride 0 | NA |
Stride 1 | NA |
Stride n | NA |
Stride unknown | NA |
Stride indirect | NA |
Vectorization ratio all | 81.25 |
Vectorization ratio load | 69.23 |
Vectorization ratio store | 100.00 |
Vectorization ratio mul | 83.33 |
Vectorization ratio add_sub | 83.33 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 80.43 |
Vector-efficiency ratio all | 69.77 |
Vector-efficiency ratio load | 69.23 |
Vector-efficiency ratio store | 100.00 |
Vector-efficiency ratio mul | 85.42 |
Vector-efficiency ratio add_sub | 67.36 |
Vector-efficiency ratio fma | 100.00 |
Vector-efficiency ratio div_sqrt | 100.00 |
Vector-efficiency ratio other | 65.90 |
Path / |
Function | advec_mom_kernel_.DIR.OMP.PARALLEL.2 |
Source file and lines | advec_mom_kernel.f90:81-177 |
Module | exec |
nb instructions | 126 |
nb uops | 151 |
loop length | 667 |
used x86 registers | 14 |
used mmx registers | 0 |
used xmm registers | 6 |
used ymm registers | 9 |
used zmm registers | 22 |
nb stack references | 13 |
ADD-SUB / MUL ratio | 1.00 |
micro-operation queue | 25.17 cycles |
front end | 25.17 cycles |
P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 32.50 | 15.00 | 20.33 | 20.33 | 0.50 | 32.50 | 10.00 | 0.50 | 0.50 | 0.50 | 10.00 | 20.33 |
cycles | 32.50 | 15.00 | 20.33 | 20.33 | 0.50 | 32.50 | 10.00 | 0.50 | 0.50 | 0.50 | 10.00 | 20.33 |
Cycles executing div or sqrt instructions | 48.00 |
FE+BE cycles | 49.55-130.86 |
Stall cycles | 25.20-106.51 |
ROB full (events) | 27.69-110.04 |
Front-end | 25.17 |
Dispatch | 32.50 |
DIV/SQRT | 48.00 |
Overall L1 | 48.00 |
all | 60% |
load | 33% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 0% |
add-sub | 76% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 59% |
all | 100% |
load | 100% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 100% |
all | 81% |
load | 69% |
store | 100% |
mul | 83% |
add-sub | 83% |
fma | 100% |
div/sqrt | 100% |
other | 80% |
all | 40% |
load | 33% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 12% |
add-sub | 54% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 35% |
all | 96% |
load | 100% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 93% |
all | 69% |
load | 69% |
store | 100% |
mul | 85% |
add-sub | 67% |
fma | 100% |
div/sqrt | 100% |
other | 65% |
Instruction | Nb FU | P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | Latency | Recip. throughput |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VMOVAPD %ZMM27,%ZMM0{%K3}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VSUBPD %ZMM24,%ZMM9,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VFMADD213PD %ZMM20,%ZMM0,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM22,%ZMM4,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
IMUL %R15,%R14 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
ADD 0x48(%RBP),%R14 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
VMOVUPD %ZMM0,(%R14,%R9,8){%K1} | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 1 |
ADD %R15,%R12 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
ADD %RSI,%R8 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
CMP 0x18(%RSP),%R10 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
LEA 0x1(%R10),%R10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
JE 4385a9 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x7a9> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
MOV %RBX,%RCX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
AND $-0x8,%RCX | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1-2 | 0.20 |
JE 43b840 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3a40> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
MOV %RBX,%RDI | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
MOV %R15,%RSI | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
LEA -0x1(%RCX),%R9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
MOV 0x70(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
LEA (%RAX,%R10,1),%R14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
SUB 0x20(%RSP),%R14 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
MOV 0x28(%RSP),%R15 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
IMUL %R14,%R15 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
IMUL 0x10(%RSP),%R14 | 1 | 0 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 1 |
ADD 0x50(%RBP),%R15 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
ADD 0x98(%RBP),%R14 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
XOR %EAX,%EAX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
JMP 43b673 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3873> | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5.84 |
MOV %RDI,%RBX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
CMP %RCX,%RDI | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
MOV %RSI,%R15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
MOV 0x38(%RSP),%RSI | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
JE 43b5e7 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x37e7> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
JMP 43b842 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3a42> | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5.84 |
XOR %ECX,%ECX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
MOV %RBX,%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
SUB %RCX,%RAX | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
VPBROADCASTQ %RAX,%ZMM23 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPCMPNLEUQ 0xcc467(%RIP),%ZMM23,%K1 | |||||||||||||||
KORTESTB %K1,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
JE 43b5e7 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x37e7> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
MOV 0x70(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
LEA (%RAX,%R10,1),%R14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
SUB 0x20(%RSP),%R14 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
MOV %RSI,%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
IMUL %R14,%RAX | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
ADD 0x60(%RBP),%RAX | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
MOV 0x30(%RSP),%RDI | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
LEA (%RDI,%RCX,1),%R9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
SUB 0x40(%RSP),%R9 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
VMOVUPD (%RAX,%R9,8),%ZMM0{%K1}{z} | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.50 |
VMOVQ %RCX,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VPADDQ %XMM4,%XMM3,%XMM4 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VMOVAPD %ZMM0,%ZMM22{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VFPCLASSPD $0x50,%ZMM22,%K2{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPBROADCASTD %ECX,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPADDD %YMM5,%YMM0,%YMM0 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPBROADCASTQ %XMM4,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPMOVQD %ZMM4,%YMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPADDD %YMM6,%YMM4,%YMM4 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPADDD %YMM0,%YMM1,%YMM23 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPBLENDMD %YMM4,%YMM23,%YMM24{%K2} | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VMOVDQA32 %YMM23,%YMM4{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
MOV 0x28(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
IMUL %R14,%RAX | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPMOVSXDQ %YMM4,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM8,%ZMM4,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
ADD 0x50(%RBP),%RAX | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VPXORD %XMM26,%XMM26,%XMM26 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.33 |
VGATHERQPD (%RAX,%ZMM4,8),%ZMM26{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
MOV 0x10(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
IMUL %R14,%RAX | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
ADD 0x98(%RBP),%RAX | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VPXORD %XMM28,%XMM28,%XMM28 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.33 |
VGATHERQPD (%RAX,%ZMM4,8),%ZMM28{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPADDD 0x120(%RSP),%YMM0,%YMM25 | 1 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VMOVDQA64 %YMM25,%YMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPADDD %YMM0,%YMM2,%YMM4{%K2} | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPMOVSXDQ %YMM4,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM8,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VPXOR %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VGATHERQPD (%RAX,%ZMM0,8),%ZMM4{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPMOVSXDQ %YMM24,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM8,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VXORPD %XMM29,%XMM29,%XMM29 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VGATHERQPD (%RAX,%ZMM0,8),%ZMM29{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VXORPD %XMM27,%XMM27,%XMM27 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VANDPD %ZMM7,%ZMM22,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMOVAPD %ZMM26,%ZMM21{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VDIVPD %ZMM21,%ZMM0,%ZMM24 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VMOVAPD %ZMM28,%ZMM20{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VMOVAPD %ZMM4,%ZMM19{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VSUBPD %ZMM19,%ZMM20,%ZMM28 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMOVAPD %ZMM29,%ZMM18{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VSUBPD %ZMM20,%ZMM18,%ZMM26 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM28,%ZMM26,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VCMPPD $0x1,%ZMM0,%ZMM27,%K3{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
KORTESTB %K3,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
JE 43b5c0 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x37c0> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
VMOVDQA32 %YMM23,%YMM25{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VMOVUPD (%RDX,%R9,8),%ZMM0{%K3}{z} | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.50 |
VMOVAPD %ZMM0,%ZMM17{%K3} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VANDPD %ZMM7,%ZMM28,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VANDPD %ZMM7,%ZMM26,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VSUBPD %ZMM24,%ZMM10,%ZMM23 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM23,%ZMM4,%ZMM23 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VDIVPD %ZMM17,%ZMM23,%ZMM23 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VPMOVSXDQ %YMM25,%ZMM25 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM8,%ZMM25,%ZMM25 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
KMOVQ %K3,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VGATHERQPD (%RDX,%ZMM25,8),%ZMM27{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VMINPD %ZMM4,%ZMM0,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD213PD %ZMM0,%ZMM24,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMOVAPD %ZMM27,%ZMM16{%K3} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VDIVPD %ZMM16,%ZMM0,%ZMM0 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VADDPD %ZMM23,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM12,%ZMM17,%ZMM23 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM0,%ZMM23,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMINPD %ZMM4,%ZMM0,%ZMM27 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFPCLASSPD $0x56,%ZMM26,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VXORPD %ZMM13,%ZMM27,%ZMM27{%K2} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
JMP 43b5c0 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x37c0> | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.08 |
Function | advec_mom_kernel_.DIR.OMP.PARALLEL.2 |
Source file and lines | advec_mom_kernel.f90:81-177 |
Module | exec |
nb instructions | 126 |
nb uops | 151 |
loop length | 667 |
used x86 registers | 14 |
used mmx registers | 0 |
used xmm registers | 6 |
used ymm registers | 9 |
used zmm registers | 22 |
nb stack references | 13 |
ADD-SUB / MUL ratio | 1.00 |
micro-operation queue | 25.17 cycles |
front end | 25.17 cycles |
P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 32.50 | 15.00 | 20.33 | 20.33 | 0.50 | 32.50 | 10.00 | 0.50 | 0.50 | 0.50 | 10.00 | 20.33 |
cycles | 32.50 | 15.00 | 20.33 | 20.33 | 0.50 | 32.50 | 10.00 | 0.50 | 0.50 | 0.50 | 10.00 | 20.33 |
Cycles executing div or sqrt instructions | 48.00 |
FE+BE cycles | 49.55-130.86 |
Stall cycles | 25.20-106.51 |
ROB full (events) | 27.69-110.04 |
Front-end | 25.17 |
Dispatch | 32.50 |
DIV/SQRT | 48.00 |
Overall L1 | 48.00 |
all | 60% |
load | 33% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 0% |
add-sub | 76% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 59% |
all | 100% |
load | 100% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 100% |
all | 81% |
load | 69% |
store | 100% |
mul | 83% |
add-sub | 83% |
fma | 100% |
div/sqrt | 100% |
other | 80% |
all | 40% |
load | 33% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 12% |
add-sub | 54% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 35% |
all | 96% |
load | 100% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 93% |
all | 69% |
load | 69% |
store | 100% |
mul | 85% |
add-sub | 67% |
fma | 100% |
div/sqrt | 100% |
other | 65% |
Instruction | Nb FU | P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | Latency | Recip. throughput |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VMOVAPD %ZMM27,%ZMM0{%K3}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VSUBPD %ZMM24,%ZMM9,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VFMADD213PD %ZMM20,%ZMM0,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM22,%ZMM4,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
IMUL %R15,%R14 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
ADD 0x48(%RBP),%R14 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
VMOVUPD %ZMM0,(%R14,%R9,8){%K1} | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 1 |
ADD %R15,%R12 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
ADD %RSI,%R8 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
CMP 0x18(%RSP),%R10 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
LEA 0x1(%R10),%R10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
JE 4385a9 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x7a9> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
MOV %RBX,%RCX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
AND $-0x8,%RCX | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1-2 | 0.20 |
JE 43b840 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3a40> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
MOV %RBX,%RDI | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
MOV %R15,%RSI | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
LEA -0x1(%RCX),%R9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
MOV 0x70(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
LEA (%RAX,%R10,1),%R14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
SUB 0x20(%RSP),%R14 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
MOV 0x28(%RSP),%R15 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
IMUL %R14,%R15 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
IMUL 0x10(%RSP),%R14 | 1 | 0 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 1 |
ADD 0x50(%RBP),%R15 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
ADD 0x98(%RBP),%R14 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
XOR %EAX,%EAX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
JMP 43b673 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3873> | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5.84 |
MOV %RDI,%RBX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
CMP %RCX,%RDI | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
MOV %RSI,%R15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
MOV 0x38(%RSP),%RSI | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
JE 43b5e7 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x37e7> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
JMP 43b842 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3a42> | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5.84 |
XOR %ECX,%ECX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
MOV %RBX,%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
SUB %RCX,%RAX | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
VPBROADCASTQ %RAX,%ZMM23 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPCMPNLEUQ 0xcc467(%RIP),%ZMM23,%K1 | |||||||||||||||
KORTESTB %K1,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
JE 43b5e7 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x37e7> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
MOV 0x70(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
LEA (%RAX,%R10,1),%R14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
SUB 0x20(%RSP),%R14 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
MOV %RSI,%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
IMUL %R14,%RAX | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
ADD 0x60(%RBP),%RAX | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
MOV 0x30(%RSP),%RDI | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
LEA (%RDI,%RCX,1),%R9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
SUB 0x40(%RSP),%R9 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
VMOVUPD (%RAX,%R9,8),%ZMM0{%K1}{z} | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.50 |
VMOVQ %RCX,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VPADDQ %XMM4,%XMM3,%XMM4 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VMOVAPD %ZMM0,%ZMM22{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VFPCLASSPD $0x50,%ZMM22,%K2{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPBROADCASTD %ECX,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPADDD %YMM5,%YMM0,%YMM0 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPBROADCASTQ %XMM4,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPMOVQD %ZMM4,%YMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPADDD %YMM6,%YMM4,%YMM4 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPADDD %YMM0,%YMM1,%YMM23 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPBLENDMD %YMM4,%YMM23,%YMM24{%K2} | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VMOVDQA32 %YMM23,%YMM4{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
MOV 0x28(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
IMUL %R14,%RAX | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPMOVSXDQ %YMM4,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM8,%ZMM4,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
ADD 0x50(%RBP),%RAX | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VPXORD %XMM26,%XMM26,%XMM26 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.33 |
VGATHERQPD (%RAX,%ZMM4,8),%ZMM26{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
MOV 0x10(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
IMUL %R14,%RAX | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
ADD 0x98(%RBP),%RAX | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VPXORD %XMM28,%XMM28,%XMM28 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.33 |
VGATHERQPD (%RAX,%ZMM4,8),%ZMM28{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPADDD 0x120(%RSP),%YMM0,%YMM25 | 1 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VMOVDQA64 %YMM25,%YMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPADDD %YMM0,%YMM2,%YMM4{%K2} | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPMOVSXDQ %YMM4,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM8,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VPXOR %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VGATHERQPD (%RAX,%ZMM0,8),%ZMM4{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPMOVSXDQ %YMM24,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM8,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VXORPD %XMM29,%XMM29,%XMM29 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VGATHERQPD (%RAX,%ZMM0,8),%ZMM29{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VXORPD %XMM27,%XMM27,%XMM27 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VANDPD %ZMM7,%ZMM22,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMOVAPD %ZMM26,%ZMM21{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VDIVPD %ZMM21,%ZMM0,%ZMM24 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VMOVAPD %ZMM28,%ZMM20{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VMOVAPD %ZMM4,%ZMM19{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VSUBPD %ZMM19,%ZMM20,%ZMM28 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMOVAPD %ZMM29,%ZMM18{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VSUBPD %ZMM20,%ZMM18,%ZMM26 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM28,%ZMM26,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VCMPPD $0x1,%ZMM0,%ZMM27,%K3{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
KORTESTB %K3,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
JE 43b5c0 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x37c0> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
VMOVDQA32 %YMM23,%YMM25{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VMOVUPD (%RDX,%R9,8),%ZMM0{%K3}{z} | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.50 |
VMOVAPD %ZMM0,%ZMM17{%K3} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VANDPD %ZMM7,%ZMM28,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VANDPD %ZMM7,%ZMM26,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VSUBPD %ZMM24,%ZMM10,%ZMM23 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM23,%ZMM4,%ZMM23 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VDIVPD %ZMM17,%ZMM23,%ZMM23 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VPMOVSXDQ %YMM25,%ZMM25 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM8,%ZMM25,%ZMM25 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
KMOVQ %K3,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VGATHERQPD (%RDX,%ZMM25,8),%ZMM27{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VMINPD %ZMM4,%ZMM0,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD213PD %ZMM0,%ZMM24,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMOVAPD %ZMM27,%ZMM16{%K3} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VDIVPD %ZMM16,%ZMM0,%ZMM0 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VADDPD %ZMM23,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM12,%ZMM17,%ZMM23 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM0,%ZMM23,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMINPD %ZMM4,%ZMM0,%ZMM27 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFPCLASSPD $0x56,%ZMM26,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VXORPD %ZMM13,%ZMM27,%ZMM27{%K2} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
JMP 43b5c0 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x37c0> | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.08 |