Loop Id: 95 | Module: exec | Source: timestep.c:74-78 | Coverage: 2.11% |
---|
Loop Id: 95 | Module: exec | Source: timestep.c:74-78 | Coverage: 2.11% |
---|
0x40eb40 VMOVUPD 0x80(%R10,%R11,1),%YMM7 [1] |
0x40eb4a VMOVUPD 0x20(%R10,%R11,1),%YMM8 [1] |
0x40eb51 VMOVUPD 0x80(%R9,%R11,1),%YMM9 [2] |
0x40eb5b VMOVUPD 0x20(%R9,%R11,1),%YMM10 [2] |
0x40eb62 VMOVUPD 0x10(%R10,%R11,1),%XMM11 [1] |
0x40eb69 VMOVUPD 0x70(%R10,%R11,1),%XMM12 [1] |
0x40eb70 VBLENDPD $0x3,(%R10,%R11,1),%YMM8,%YMM13 [1] |
0x40eb77 VBLENDPD $0x3,0x60(%R10,%R11,1),%YMM7,%YMM14 [1] |
0x40eb7f VBLENDPD $0x3,(%R9,%R11,1),%YMM10,%YMM15 [2] |
0x40eb86 VMOVUPD 0x10(%R9,%R11,1),%XMM16 [2] |
0x40eb8e VBLENDPD $0x3,0x60(%R9,%R11,1),%YMM9,%YMM2 [2] |
0x40eb96 VMOVUPD 0x20(%R10,%R11,1),%XMM3 [1] |
0x40eb9d VMOVUPD 0x80(%R10,%R11,1),%XMM4 [1] |
0x40eba7 VMOVUPD 0x20(%R9,%R11,1),%XMM5 [2] |
0x40ebae VINSERTF128 $0x1,0x40(%R10,%R11,1),%YMM11,%YMM11 [1] |
0x40ebb6 VBLENDPD $0xc,0x40(%R10,%R11,1),%YMM3,%YMM3 [1] |
0x40ebbe VBLENDPD $0xa,%YMM3,%YMM11,%YMM3 |
0x40ebc4 VBLENDPD $0xa,%YMM11,%YMM13,%YMM11 |
0x40ebca VSHUFPD $0x5,%YMM8,%YMM13,%YMM8 |
0x40ebd0 VINSERTF128 $0x1,0xa0(%R10,%R11,1),%YMM12,%YMM12 [1] |
0x40ebdb VBLENDPD $0xc,0xa0(%R10,%R11,1),%YMM4,%YMM4 [1] |
0x40ebe6 VBLENDPD $0xa,%YMM4,%YMM12,%YMM4 |
0x40ebec VBLENDPD $0xa,%YMM12,%YMM14,%YMM12 |
0x40ebf2 VSHUFPD $0x5,%YMM7,%YMM14,%YMM13 |
0x40ebf7 VINSERTF32X4 $0x1,0x40(%R9,%R11,1),%YMM16,%YMM14 [2] |
0x40ec00 VBLENDPD $0xc,0x40(%R9,%R11,1),%YMM5,%YMM5 [2] |
0x40ec08 VMOVUPD 0x70(%R9,%R11,1),%XMM16 [2] |
0x40ec10 VBLENDPD $0xa,%YMM5,%YMM14,%YMM7 |
0x40ec16 VBLENDPD $0xa,%YMM14,%YMM15,%YMM5 |
0x40ec1c VSHUFPD $0x5,%YMM10,%YMM15,%YMM10 |
0x40ec22 VBROADCASTSD 0x50(%R10,%R11,1),%YMM14 [1] |
0x40ec29 VBLENDPD $0x8,%YMM14,%YMM8,%YMM14 |
0x40ec2f VBROADCASTSD 0xb0(%R10,%R11,1),%YMM8 [1] |
0x40ec39 VBLENDPD $0x8,%YMM8,%YMM13,%YMM13 |
0x40ec3f VBROADCASTSD 0x50(%R9,%R11,1),%YMM8 [2] |
0x40ec46 VBLENDPD $0x8,%YMM8,%YMM10,%YMM10 |
0x40ec4c VBROADCASTSD 0xb0(%R9,%R11,1),%YMM8 [2] |
0x40ec56 VSHUFPD $0x5,%YMM9,%YMM2,%YMM9 |
0x40ec5c VBLENDPD $0x8,%YMM8,%YMM9,%YMM9 |
0x40ec62 VINSERTF32X4 $0x1,0xa0(%R9,%R11,1),%YMM16,%YMM15 [2] |
0x40ec6b VBLENDPD $0xa,%YMM15,%YMM2,%YMM8 |
0x40ec71 VFMADD231PD %YMM12,%YMM1,%YMM8 |
0x40ec76 VFMADD231PD %YMM11,%YMM1,%YMM5 |
0x40ec7b VFMADD231PD %YMM13,%YMM1,%YMM9 |
0x40ec80 VFMADD231PD %YMM14,%YMM1,%YMM10 |
0x40ec85 VMOVUPD 0x80(%R9,%R11,1),%XMM2 [2] |
0x40ec8f VBLENDPD $0xc,0xa0(%R9,%R11,1),%YMM2,%YMM2 [2] |
0x40ec9a VBLENDPD $0xa,%YMM2,%YMM15,%YMM2 |
0x40eca0 VFMADD231PD %YMM4,%YMM1,%YMM2 |
0x40eca5 VFMADD231PD %YMM3,%YMM1,%YMM7 |
0x40ecaa VMOVAPD %YMM5,%YMM3 |
0x40ecae VPERMT2PD %YMM10,%YMM17,%YMM3 |
0x40ecb4 VMOVAPD %YMM9,%YMM4 |
0x40ecb8 VPERMT2PD %YMM8,%YMM18,%YMM4 |
0x40ecbe VMOVAPD %YMM9,%YMM11 |
0x40ecc3 VPERMT2PD %YMM8,%YMM19,%YMM11 |
0x40ecc9 VPERMT2PD %YMM9,%YMM17,%YMM8 |
0x40eccf VMOVAPD %YMM10,%YMM9 |
0x40ecd4 VPERMT2PD %YMM5,%YMM18,%YMM9 |
0x40ecda VPERMT2PD %YMM5,%YMM19,%YMM10 |
0x40ece0 VPERMT2PD %YMM2,%YMM20,%YMM8 |
0x40ece6 VBLENDPD $0x2,%YMM2,%YMM11,%YMM5 |
0x40ecec VPERMT2PD %YMM4,%YMM6,%YMM2 |
0x40ecf2 VBLENDPD $0x2,%YMM7,%YMM10,%YMM4 |
0x40ecf8 VPERMT2PD %YMM7,%YMM20,%YMM3 |
0x40ecfe VPERMT2PD %YMM9,%YMM6,%YMM7 |
0x40ed04 VMOVUPD %YMM5,0x80(%R9,%R11,1) [2] |
0x40ed0e VMOVUPD %YMM4,0x20(%R9,%R11,1) [2] |
0x40ed15 VMOVUPD %YMM7,0x40(%R9,%R11,1) [2] |
0x40ed1c VMOVUPD %YMM2,0xa0(%R9,%R11,1) [2] |
0x40ed26 VMOVUPD %YMM8,0x60(%R9,%R11,1) [2] |
0x40ed2d VMOVUPD %YMM3,(%R9,%R11,1) [2] |
0x40ed33 ADD $0x8,%R13D |
0x40ed37 ADD $0xc0,%R11 |
0x40ed3e CMP %R12D,%R13D |
0x40ed41 JLE 40eb40 |
/scratch_na/users/xoserete/qaas_runs/171-172-2581/intel/CoMD/build/CoMD/CoMD/src-openmp/timestep.c: 74 - 78 |
-------------------------------------------------------------------------------- |
74: for (int iOff=MAXATOMS*iBox,ii=0; ii<s->boxes->nAtoms[iBox]; ii++,iOff++) |
75: { |
76: s->atoms->p[iOff][0] += dt*s->atoms->f[iOff][0]; |
77: s->atoms->p[iOff][1] += dt*s->atoms->f[iOff][1]; |
78: s->atoms->p[iOff][2] += dt*s->atoms->f[iOff][2]; |
Path / |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.00 |
CQA speedup if FP arith vectorized | 1.07 |
CQA speedup if fully vectorized | 2.09 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | 1.06 |
Bottlenecks | P0, P1, P5, |
Function | advanceVelocity.extracted |
Source | timestep.c:74-78 |
Source loop unroll info | unrolled by 8 |
Source loop unroll confidence level | high |
Unroll/vectorization loop type | main |
Unroll factor | 8 |
CQA cycles | 15.33 |
CQA cycles if no scalar integer | 15.33 |
CQA cycles if FP arith vectorized | 14.33 |
CQA cycles if fully vectorized | 7.33 |
Front-end cycles | 14.50 |
DIV/SQRT cycles | 15.33 |
P0 cycles | 15.33 |
P1 cycles | 9.33 |
P2 cycles | 9.33 |
P3 cycles | 3.00 |
P4 cycles | 15.33 |
P5 cycles | 1.60 |
P6 cycles | 3.00 |
P7 cycles | 3.00 |
P8 cycles | 3.00 |
P9 cycles | 1.40 |
P10 cycles | 9.33 |
P11 cycles | 0.00 |
Inter-iter dependencies cycles | 1 |
FE+BE cycles (UFS) | 15.69 |
Stall cycles (UFS) | 0.53 |
Nb insns | 76.00 |
Nb uops | 87.00 |
Nb loads | 28.00 |
Nb stores | 6.00 |
Nb stack references | 0.00 |
FLOP/cycle | 3.13 |
Nb FLOP add-sub | 0.00 |
Nb FLOP mul | 0.00 |
Nb FLOP fma | 24.00 |
Nb FLOP div | 0.00 |
Nb FLOP rcp | 0.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 0.00 |
Bytes/cycle | 52.17 |
Bytes prefetched | 0.00 |
Bytes loaded | 608.00 |
Bytes stored | 192.00 |
Stride 0 | 0.00 |
Stride 1 | 0.00 |
Stride n | 1.00 |
Stride unknown | 1.00 |
Stride indirect | 0.00 |
Vectorization ratio all | 94.44 |
Vectorization ratio load | 85.71 |
Vectorization ratio store | 100.00 |
Vectorization ratio mul | NA |
Vectorization ratio add_sub | NA |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | NA |
Vectorization ratio other | 91.67 |
Vector-efficiency ratio all | 43.75 |
Vector-efficiency ratio load | 33.93 |
Vector-efficiency ratio store | 50.00 |
Vector-efficiency ratio mul | NA |
Vector-efficiency ratio add_sub | NA |
Vector-efficiency ratio fma | 50.00 |
Vector-efficiency ratio div_sqrt | NA |
Vector-efficiency ratio other | 44.79 |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.00 |
CQA speedup if FP arith vectorized | 1.07 |
CQA speedup if fully vectorized | 2.09 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | 1.06 |
Bottlenecks | P0, P1, P5, |
Function | advanceVelocity.extracted |
Source | timestep.c:74-78 |
Source loop unroll info | unrolled by 8 |
Source loop unroll confidence level | high |
Unroll/vectorization loop type | main |
Unroll factor | 8 |
CQA cycles | 15.33 |
CQA cycles if no scalar integer | 15.33 |
CQA cycles if FP arith vectorized | 14.33 |
CQA cycles if fully vectorized | 7.33 |
Front-end cycles | 14.50 |
DIV/SQRT cycles | 15.33 |
P0 cycles | 15.33 |
P1 cycles | 9.33 |
P2 cycles | 9.33 |
P3 cycles | 3.00 |
P4 cycles | 15.33 |
P5 cycles | 1.60 |
P6 cycles | 3.00 |
P7 cycles | 3.00 |
P8 cycles | 3.00 |
P9 cycles | 1.40 |
P10 cycles | 9.33 |
P11 cycles | 0.00 |
Inter-iter dependencies cycles | 1 |
FE+BE cycles (UFS) | 15.69 |
Stall cycles (UFS) | 0.53 |
Nb insns | 76.00 |
Nb uops | 87.00 |
Nb loads | 28.00 |
Nb stores | 6.00 |
Nb stack references | 0.00 |
FLOP/cycle | 3.13 |
Nb FLOP add-sub | 0.00 |
Nb FLOP mul | 0.00 |
Nb FLOP fma | 24.00 |
Nb FLOP div | 0.00 |
Nb FLOP rcp | 0.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 0.00 |
Bytes/cycle | 52.17 |
Bytes prefetched | 0.00 |
Bytes loaded | 608.00 |
Bytes stored | 192.00 |
Stride 0 | 0.00 |
Stride 1 | 0.00 |
Stride n | 1.00 |
Stride unknown | 1.00 |
Stride indirect | 0.00 |
Vectorization ratio all | 94.44 |
Vectorization ratio load | 85.71 |
Vectorization ratio store | 100.00 |
Vectorization ratio mul | NA |
Vectorization ratio add_sub | NA |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | NA |
Vectorization ratio other | 91.67 |
Vector-efficiency ratio all | 43.75 |
Vector-efficiency ratio load | 33.93 |
Vector-efficiency ratio store | 50.00 |
Vector-efficiency ratio mul | NA |
Vector-efficiency ratio add_sub | NA |
Vector-efficiency ratio fma | 50.00 |
Vector-efficiency ratio div_sqrt | NA |
Vector-efficiency ratio other | 44.79 |
Path / |
Function | advanceVelocity.extracted |
Source file and lines | timestep.c:74-78 |
Module | exec |
nb instructions | 76 |
nb uops | 87 |
loop length | 519 |
used x86 registers | 5 |
used mmx registers | 0 |
used xmm registers | 7 |
used ymm registers | 20 |
used zmm registers | 0 |
nb stack references | 0 |
micro-operation queue | 14.50 cycles |
front end | 14.50 cycles |
P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 15.33 | 15.33 | 9.33 | 9.33 | 3.00 | 15.33 | 1.60 | 3.00 | 3.00 | 3.00 | 1.40 | 9.33 |
cycles | 15.33 | 15.33 | 9.33 | 9.33 | 3.00 | 15.33 | 1.60 | 3.00 | 3.00 | 3.00 | 1.40 | 9.33 |
Cycles executing div or sqrt instructions | NA |
Longest recurrence chain latency (RecMII) | 1.00 |
FE+BE cycles | 15.69 |
Stall cycles | 0.53 |
RS full (events) | 1.49 |
Front-end | 14.50 |
Dispatch | 15.33 |
Data deps. | 1.00 |
Overall L1 | 15.33 |
all | 94% |
load | 85% |
store | 100% |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | NA (no add-sub vectorizable/vectorized instructions) |
fma | 100% |
div/sqrt | NA (no div/sqrt vectorizable/vectorized instructions) |
other | 91% |
all | 43% |
load | 33% |
store | 50% |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | NA (no add-sub vectorizable/vectorized instructions) |
fma | 50% |
div/sqrt | NA (no div/sqrt vectorizable/vectorized instructions) |
other | 44% |
Instruction | Nb FU | P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | Latency | Recip. throughput |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VMOVUPD 0x80(%R10,%R11,1),%YMM7 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x20(%R10,%R11,1),%YMM8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x80(%R9,%R11,1),%YMM9 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x20(%R9,%R11,1),%YMM10 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x10(%R10,%R11,1),%XMM11 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x70(%R10,%R11,1),%XMM12 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VBLENDPD $0x3,(%R10,%R11,1),%YMM8,%YMM13 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0x3,0x60(%R10,%R11,1),%YMM7,%YMM14 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0x3,(%R9,%R11,1),%YMM10,%YMM15 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VMOVUPD 0x10(%R9,%R11,1),%XMM16 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VBLENDPD $0x3,0x60(%R9,%R11,1),%YMM9,%YMM2 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VMOVUPD 0x20(%R10,%R11,1),%XMM3 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x80(%R10,%R11,1),%XMM4 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x20(%R9,%R11,1),%XMM5 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VINSERTF128 $0x1,0x40(%R10,%R11,1),%YMM11,%YMM11 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0xc,0x40(%R10,%R11,1),%YMM3,%YMM3 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xa,%YMM3,%YMM11,%YMM3 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBLENDPD $0xa,%YMM11,%YMM13,%YMM11 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VSHUFPD $0x5,%YMM8,%YMM13,%YMM8 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VINSERTF128 $0x1,0xa0(%R10,%R11,1),%YMM12,%YMM12 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0xc,0xa0(%R10,%R11,1),%YMM4,%YMM4 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xa,%YMM4,%YMM12,%YMM4 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBLENDPD $0xa,%YMM12,%YMM14,%YMM12 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VSHUFPD $0x5,%YMM7,%YMM14,%YMM13 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VINSERTF32X4 $0x1,0x40(%R9,%R11,1),%YMM16,%YMM14 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0xc,0x40(%R9,%R11,1),%YMM5,%YMM5 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VMOVUPD 0x70(%R9,%R11,1),%XMM16 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VBLENDPD $0xa,%YMM5,%YMM14,%YMM7 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBLENDPD $0xa,%YMM14,%YMM15,%YMM5 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VSHUFPD $0x5,%YMM10,%YMM15,%YMM10 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VBROADCASTSD 0x50(%R10,%R11,1),%YMM14 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0x8,%YMM14,%YMM8,%YMM14 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBROADCASTSD 0xb0(%R10,%R11,1),%YMM8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0x8,%YMM8,%YMM13,%YMM13 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBROADCASTSD 0x50(%R9,%R11,1),%YMM8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0x8,%YMM8,%YMM10,%YMM10 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBROADCASTSD 0xb0(%R9,%R11,1),%YMM8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VSHUFPD $0x5,%YMM9,%YMM2,%YMM9 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VBLENDPD $0x8,%YMM8,%YMM9,%YMM9 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VINSERTF32X4 $0x1,0xa0(%R9,%R11,1),%YMM16,%YMM15 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0xa,%YMM15,%YMM2,%YMM8 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VFMADD231PD %YMM12,%YMM1,%YMM8 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %YMM11,%YMM1,%YMM5 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %YMM13,%YMM1,%YMM9 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %YMM14,%YMM1,%YMM10 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMOVUPD 0x80(%R9,%R11,1),%XMM2 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VBLENDPD $0xc,0xa0(%R9,%R11,1),%YMM2,%YMM2 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xa,%YMM2,%YMM15,%YMM2 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VFMADD231PD %YMM4,%YMM1,%YMM2 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %YMM3,%YMM1,%YMM7 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMOVAPD %YMM5,%YMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM10,%YMM17,%YMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVAPD %YMM9,%YMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM8,%YMM18,%YMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVAPD %YMM9,%YMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM8,%YMM19,%YMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM9,%YMM17,%YMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVAPD %YMM10,%YMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM5,%YMM18,%YMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM5,%YMM19,%YMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM2,%YMM20,%YMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VBLENDPD $0x2,%YMM2,%YMM11,%YMM5 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPERMT2PD %YMM4,%YMM6,%YMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VBLENDPD $0x2,%YMM7,%YMM10,%YMM4 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPERMT2PD %YMM7,%YMM20,%YMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM9,%YMM6,%YMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVUPD %YMM5,0x80(%R9,%R11,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM4,0x20(%R9,%R11,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM7,0x40(%R9,%R11,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM2,0xa0(%R9,%R11,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM8,0x60(%R9,%R11,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM3,(%R9,%R11,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
ADD $0x8,%R13D | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
ADD $0xc0,%R11 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
CMP %R12D,%R13D | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
JLE 40eb40 <advanceVelocity.extracted+0x1d0> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
Function | advanceVelocity.extracted |
Source file and lines | timestep.c:74-78 |
Module | exec |
nb instructions | 76 |
nb uops | 87 |
loop length | 519 |
used x86 registers | 5 |
used mmx registers | 0 |
used xmm registers | 7 |
used ymm registers | 20 |
used zmm registers | 0 |
nb stack references | 0 |
micro-operation queue | 14.50 cycles |
front end | 14.50 cycles |
P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 15.33 | 15.33 | 9.33 | 9.33 | 3.00 | 15.33 | 1.60 | 3.00 | 3.00 | 3.00 | 1.40 | 9.33 |
cycles | 15.33 | 15.33 | 9.33 | 9.33 | 3.00 | 15.33 | 1.60 | 3.00 | 3.00 | 3.00 | 1.40 | 9.33 |
Cycles executing div or sqrt instructions | NA |
Longest recurrence chain latency (RecMII) | 1.00 |
FE+BE cycles | 15.69 |
Stall cycles | 0.53 |
RS full (events) | 1.49 |
Front-end | 14.50 |
Dispatch | 15.33 |
Data deps. | 1.00 |
Overall L1 | 15.33 |
all | 94% |
load | 85% |
store | 100% |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | NA (no add-sub vectorizable/vectorized instructions) |
fma | 100% |
div/sqrt | NA (no div/sqrt vectorizable/vectorized instructions) |
other | 91% |
all | 43% |
load | 33% |
store | 50% |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | NA (no add-sub vectorizable/vectorized instructions) |
fma | 50% |
div/sqrt | NA (no div/sqrt vectorizable/vectorized instructions) |
other | 44% |
Instruction | Nb FU | P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | Latency | Recip. throughput |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VMOVUPD 0x80(%R10,%R11,1),%YMM7 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x20(%R10,%R11,1),%YMM8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x80(%R9,%R11,1),%YMM9 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x20(%R9,%R11,1),%YMM10 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x10(%R10,%R11,1),%XMM11 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x70(%R10,%R11,1),%XMM12 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VBLENDPD $0x3,(%R10,%R11,1),%YMM8,%YMM13 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0x3,0x60(%R10,%R11,1),%YMM7,%YMM14 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0x3,(%R9,%R11,1),%YMM10,%YMM15 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VMOVUPD 0x10(%R9,%R11,1),%XMM16 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VBLENDPD $0x3,0x60(%R9,%R11,1),%YMM9,%YMM2 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VMOVUPD 0x20(%R10,%R11,1),%XMM3 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x80(%R10,%R11,1),%XMM4 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x20(%R9,%R11,1),%XMM5 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VINSERTF128 $0x1,0x40(%R10,%R11,1),%YMM11,%YMM11 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0xc,0x40(%R10,%R11,1),%YMM3,%YMM3 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xa,%YMM3,%YMM11,%YMM3 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBLENDPD $0xa,%YMM11,%YMM13,%YMM11 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VSHUFPD $0x5,%YMM8,%YMM13,%YMM8 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VINSERTF128 $0x1,0xa0(%R10,%R11,1),%YMM12,%YMM12 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0xc,0xa0(%R10,%R11,1),%YMM4,%YMM4 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xa,%YMM4,%YMM12,%YMM4 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBLENDPD $0xa,%YMM12,%YMM14,%YMM12 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VSHUFPD $0x5,%YMM7,%YMM14,%YMM13 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VINSERTF32X4 $0x1,0x40(%R9,%R11,1),%YMM16,%YMM14 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0xc,0x40(%R9,%R11,1),%YMM5,%YMM5 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VMOVUPD 0x70(%R9,%R11,1),%XMM16 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VBLENDPD $0xa,%YMM5,%YMM14,%YMM7 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBLENDPD $0xa,%YMM14,%YMM15,%YMM5 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VSHUFPD $0x5,%YMM10,%YMM15,%YMM10 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VBROADCASTSD 0x50(%R10,%R11,1),%YMM14 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0x8,%YMM14,%YMM8,%YMM14 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBROADCASTSD 0xb0(%R10,%R11,1),%YMM8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0x8,%YMM8,%YMM13,%YMM13 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBROADCASTSD 0x50(%R9,%R11,1),%YMM8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0x8,%YMM8,%YMM10,%YMM10 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBROADCASTSD 0xb0(%R9,%R11,1),%YMM8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VSHUFPD $0x5,%YMM9,%YMM2,%YMM9 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VBLENDPD $0x8,%YMM8,%YMM9,%YMM9 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VINSERTF32X4 $0x1,0xa0(%R9,%R11,1),%YMM16,%YMM15 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0xa,%YMM15,%YMM2,%YMM8 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VFMADD231PD %YMM12,%YMM1,%YMM8 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %YMM11,%YMM1,%YMM5 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %YMM13,%YMM1,%YMM9 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %YMM14,%YMM1,%YMM10 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMOVUPD 0x80(%R9,%R11,1),%XMM2 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VBLENDPD $0xc,0xa0(%R9,%R11,1),%YMM2,%YMM2 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xa,%YMM2,%YMM15,%YMM2 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VFMADD231PD %YMM4,%YMM1,%YMM2 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %YMM3,%YMM1,%YMM7 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMOVAPD %YMM5,%YMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM10,%YMM17,%YMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVAPD %YMM9,%YMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM8,%YMM18,%YMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVAPD %YMM9,%YMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM8,%YMM19,%YMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM9,%YMM17,%YMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVAPD %YMM10,%YMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM5,%YMM18,%YMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM5,%YMM19,%YMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM2,%YMM20,%YMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VBLENDPD $0x2,%YMM2,%YMM11,%YMM5 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPERMT2PD %YMM4,%YMM6,%YMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VBLENDPD $0x2,%YMM7,%YMM10,%YMM4 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPERMT2PD %YMM7,%YMM20,%YMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM9,%YMM6,%YMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVUPD %YMM5,0x80(%R9,%R11,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM4,0x20(%R9,%R11,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM7,0x40(%R9,%R11,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM2,0xa0(%R9,%R11,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM8,0x60(%R9,%R11,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM3,(%R9,%R11,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
ADD $0x8,%R13D | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
ADD $0xc0,%R11 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
CMP %R12D,%R13D | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
JLE 40eb40 <advanceVelocity.extracted+0x1d0> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |