Loop Id: 170 | Module: exec | Source: advec_mom_kernel.f90:81-177 [...] | Coverage: 0.01% |
---|
Loop Id: 170 | Module: exec | Source: advec_mom_kernel.f90:81-177 [...] | Coverage: 0.01% |
---|
0x435900 VMOVAPD %ZMM28,%ZMM0{%K3}{z} |
0x435906 VSUBPD %ZMM25,%ZMM9,%ZMM5 |
0x43590c VFMADD213PD %ZMM21,%ZMM0,%ZMM5 |
0x435912 VMULPD %ZMM23,%ZMM5,%ZMM0 |
0x435918 IMUL %RBX,%R12 |
0x43591c ADD 0x148(%RSP),%R12 |
0x435924 VMOVUPD %ZMM0,(%R12,%R13,8){%K1} |
0x43592b ADD %RBX,%R15 |
0x43592e ADD %RDX,%R9 |
0x435931 CMP %RAX,%RSI |
0x435934 LEA 0x1(%RSI),%RSI |
0x435938 JE 432627 |
0x43593e TEST %R10,%R10 |
0x435941 JE 435b80 |
0x435947 MOV 0x40(%RSP),%R8 |
0x43594c LEA (%R8,%RSI,1),%R12 |
0x435950 SUB 0x2c0(%RSP),%R12 |
0x435958 MOV 0x78(%RSP),%R13 |
0x43595d IMUL %R12,%R13 |
0x435961 IMUL 0x90(%RSP),%R12 |
0x43596a ADD 0xe0(%RSP),%R13 |
0x435972 ADD 0x130(%RSP),%R12 |
0x43597a XOR %R8D,%R8D |
0x43597d JMP 4359b3 |
(171) 0x435980 VCMPPD $0x1,%ZMM28,%ZMM11,%K1 |
(171) 0x435987 VMOVAPD %ZMM7,%ZMM0{%K1}{z} |
(171) 0x43598d VSUBPD %ZMM26,%ZMM9,%ZMM5 |
(171) 0x435993 VFMADD213PD %ZMM25,%ZMM0,%ZMM5 |
(171) 0x435999 VMULPD %ZMM24,%ZMM5,%ZMM0 |
(171) 0x43599f VMOVUPD %ZMM0,(%R15,%R8,8) |
(171) 0x4359a6 ADD $0x8,%R8 |
(171) 0x4359aa CMP %R10,%R8 |
(171) 0x4359ad JGE 435b40 |
(171) 0x4359b3 VMOVUPD (%R9,%R8,8),%ZMM24 |
(171) 0x4359ba VFPCLASSPD $0x50,%ZMM24,%K1 |
(171) 0x4359c1 LEA (%R11,%R8,1),%R14D |
(171) 0x4359c5 VPBROADCASTD %R14D,%YMM7 |
(171) 0x4359cb VPADDD %YMM6,%YMM7,%YMM27 |
(171) 0x4359d1 VPADDD %YMM15,%YMM7,%YMM25 |
(171) 0x4359d7 VPBLENDMD %YMM25,%YMM27,%YMM28{%K1} |
(171) 0x4359dd VMOVDQA32 %YMM27,%YMM25{%K1} |
(171) 0x4359e3 VPMOVSXDQ %YMM25,%ZMM25 |
(171) 0x4359e9 VPSUBQ %ZMM4,%ZMM25,%ZMM26 |
(171) 0x4359ef KXNORW %K0,%K0,%K2 |
(171) 0x4359f3 VXORPD %XMM30,%XMM30,%XMM30 |
(171) 0x4359f9 VGATHERQPD (%R13,%ZMM26,8),%ZMM30{%K2} |
(171) 0x435a01 KXNORW %K0,%K0,%K2 |
(171) 0x435a05 VPXORD %XMM25,%XMM25,%XMM25 |
(171) 0x435a0b VGATHERQPD (%R12,%ZMM26,8),%ZMM25{%K2} |
(171) 0x435a12 VPADDD %YMM14,%YMM7,%YMM29 |
(171) 0x435a18 VMOVDQA64 %YMM29,%YMM26 |
(171) 0x435a1e VPADDD %YMM16,%YMM7,%YMM26{%K1} |
(171) 0x435a24 VANDPD %ZMM8,%ZMM24,%ZMM7 |
(171) 0x435a2a VPMOVSXDQ %YMM26,%ZMM26 |
(171) 0x435a30 VPSUBQ %ZMM4,%ZMM26,%ZMM26 |
(171) 0x435a36 KXNORW %K0,%K0,%K2 |
(171) 0x435a3a VXORPD %XMM31,%XMM31,%XMM31 |
(171) 0x435a40 VGATHERQPD (%R12,%ZMM26,8),%ZMM31{%K2} |
(171) 0x435a47 VDIVPD %ZMM30,%ZMM7,%ZMM26 |
(171) 0x435a4d VPMOVSXDQ %YMM28,%ZMM7 |
(171) 0x435a53 VPSUBQ %ZMM4,%ZMM7,%ZMM7 |
(171) 0x435a59 KXNORW %K0,%K0,%K2 |
(171) 0x435a5d VPXORD %XMM28,%XMM28,%XMM28 |
(171) 0x435a63 VGATHERQPD (%R12,%ZMM7,8),%ZMM28{%K2} |
(171) 0x435a6a VXORPD %XMM7,%XMM7,%XMM7 |
(171) 0x435a6e VSUBPD %ZMM31,%ZMM25,%ZMM31 |
(171) 0x435a74 VSUBPD %ZMM25,%ZMM28,%ZMM30 |
(171) 0x435a7a VMULPD %ZMM31,%ZMM30,%ZMM28 |
(171) 0x435a80 VCMPPD $0x1,%ZMM28,%ZMM7,%K0 |
(171) 0x435a87 KORTESTB %K0,%K0 |
(171) 0x435a8b JE 435980 |
(171) 0x435a91 VCMPPD $0x1,%ZMM28,%ZMM11,%K2 |
(171) 0x435a98 VMOVDQA32 %YMM27,%YMM29{%K1} |
(171) 0x435a9e VMOVUPD (%RDI,%R8,8),%ZMM7{%K2}{z} |
(171) 0x435aa5 VANDPD %ZMM8,%ZMM30,%ZMM27 |
(171) 0x435aab VSUBPD %ZMM26,%ZMM10,%ZMM5 |
(171) 0x435ab1 VMULPD %ZMM5,%ZMM27,%ZMM5 |
(171) 0x435ab7 VDIVPD %ZMM7,%ZMM5,%ZMM5 |
(171) 0x435abd VPMOVSXDQ %YMM29,%ZMM29 |
(171) 0x435ac3 VPSUBQ %ZMM4,%ZMM29,%ZMM29 |
(171) 0x435ac9 VPXOR %XMM0,%XMM0,%XMM0 |
(171) 0x435acd MOV 0x80(%RSP),%R14 |
(171) 0x435ad5 VGATHERQPD (%R14,%ZMM29,8),%ZMM0{%K2} |
(171) 0x435adc VANDPD %ZMM8,%ZMM31,%ZMM29 |
(171) 0x435ae2 VMINPD %ZMM27,%ZMM29,%ZMM27 |
(171) 0x435ae8 VFMADD213PD %ZMM29,%ZMM26,%ZMM29 |
(171) 0x435aee VDIVPD %ZMM0,%ZMM29,%ZMM0 |
(171) 0x435af4 VADDPD %ZMM5,%ZMM0,%ZMM0 |
(171) 0x435afa VMULPD %ZMM12,%ZMM7,%ZMM5 |
(171) 0x435b00 VMULPD %ZMM0,%ZMM5,%ZMM0 |
(171) 0x435b06 VMINPD %ZMM27,%ZMM0,%ZMM7 |
(171) 0x435b0c VFPCLASSPD $0x56,%ZMM30,%K1 |
(171) 0x435b13 VXORPD %ZMM13,%ZMM7,%ZMM7{%K1} |
(171) 0x435b19 JMP 435980 |
0x435b40 MOV %R10,%R8 |
0x435b43 CMP %R10,%RCX |
0x435b46 JE 43592b |
0x435b4c JMP 435b83 |
0x435b80 XOR %R8D,%R8D |
0x435b83 MOV %RCX,%R14 |
0x435b86 SUB %R8,%R14 |
0x435b89 VPBROADCASTQ %R14,%ZMM7 |
0x435b8f VPCMPNLEUQ 0xd14e6(%RIP),%ZMM7,%K1 |
0x435b9a KORTESTB %K1,%K1 |
0x435b9e JE 43592b |
0x435ba4 MOV 0x40(%RSP),%R14 |
0x435ba9 LEA (%R14,%RSI,1),%R12 |
0x435bad SUB 0x2c0(%RSP),%R12 |
0x435bb5 MOV %RDX,%R14 |
0x435bb8 IMUL %R12,%R14 |
0x435bbc ADD 0x58(%RSP),%R14 |
0x435bc1 MOV 0x18(%RSP),%R13 |
0x435bc6 ADD %R8,%R13 |
0x435bc9 SUB 0x98(%RSP),%R13 |
0x435bd1 VMOVUPD (%R14,%R13,8),%ZMM0{%K1}{z} |
0x435bd8 VPBROADCASTQ %R8,%ZMM5 |
0x435bde VPADDQ %ZMM5,%ZMM3,%ZMM5 |
0x435be4 VMOVAPD %ZMM0,%ZMM23{%K1} |
0x435bea VFPCLASSPD $0x50,%ZMM23,%K2{%K1} |
0x435bf1 VPBROADCASTD %R8D,%YMM0 |
0x435bf7 VPADDD %YMM6,%YMM0,%YMM0 |
0x435bfb VPMOVQD %ZMM5,%YMM5 |
0x435c01 VPCMPEQD %YMM7,%YMM7,%YMM7 |
0x435c05 VPADDD %YMM7,%YMM5,%YMM5 |
0x435c09 VPADDD %YMM0,%YMM1,%YMM24 |
0x435c0f VPBLENDMD %YMM5,%YMM24,%YMM7{%K2} |
0x435c15 VMOVDQA32 %YMM24,%YMM5{%K2} |
0x435c1b MOV 0x78(%RSP),%R8 |
0x435c20 IMUL %R12,%R8 |
0x435c24 VPMOVSXDQ %YMM5,%ZMM5 |
0x435c2a VPSUBQ %ZMM4,%ZMM5,%ZMM5 |
0x435c30 ADD 0xe0(%RSP),%R8 |
0x435c38 KMOVQ %K1,%K3 |
0x435c3d VXORPD %XMM25,%XMM25,%XMM25 |
0x435c43 VGATHERQPD (%R8,%ZMM5,8),%ZMM25{%K3} |
0x435c4a MOV 0x90(%RSP),%R8 |
0x435c52 IMUL %R12,%R8 |
0x435c56 ADD 0x130(%RSP),%R8 |
0x435c5e KMOVQ %K1,%K3 |
0x435c63 VPXORD %XMM27,%XMM27,%XMM27 |
0x435c69 VGATHERQPD (%R8,%ZMM5,8),%ZMM27{%K3} |
0x435c70 VPADDD 0x1e0(%RSP),%YMM0,%YMM26 |
0x435c78 VMOVDQA64 %YMM26,%YMM5 |
0x435c7e VPADDD %YMM0,%YMM2,%YMM5{%K2} |
0x435c84 VPMOVSXDQ %YMM5,%ZMM0 |
0x435c8a VPSUBQ %ZMM4,%ZMM0,%ZMM0 |
0x435c90 KMOVQ %K1,%K3 |
0x435c95 VPXOR %XMM5,%XMM5,%XMM5 |
0x435c99 VGATHERQPD (%R8,%ZMM0,8),%ZMM5{%K3} |
0x435ca0 VPMOVSXDQ %YMM7,%ZMM0 |
0x435ca6 VPSUBQ %ZMM4,%ZMM0,%ZMM0 |
0x435cac KMOVQ %K1,%K3 |
0x435cb1 VPXORD %XMM29,%XMM29,%XMM29 |
0x435cb7 VGATHERQPD (%R8,%ZMM0,8),%ZMM29{%K3} |
0x435cbe VXORPD %XMM28,%XMM28,%XMM28 |
0x435cc4 VANDPD %ZMM8,%ZMM23,%ZMM0 |
0x435cca VMOVAPD %ZMM25,%ZMM22{%K1} |
0x435cd0 VDIVPD %ZMM22,%ZMM0,%ZMM25 |
0x435cd6 VMOVAPD %ZMM27,%ZMM21{%K1} |
0x435cdc VMOVAPD %ZMM5,%ZMM20{%K1} |
0x435ce2 VSUBPD %ZMM20,%ZMM21,%ZMM7 |
0x435ce8 VMOVAPD %ZMM29,%ZMM19{%K1} |
0x435cee VSUBPD %ZMM21,%ZMM19,%ZMM27 |
0x435cf4 VMULPD %ZMM7,%ZMM27,%ZMM0 |
0x435cfa VCMPPD $0x1,%ZMM0,%ZMM28,%K3{%K1} |
0x435d01 KORTESTB %K3,%K3 |
0x435d05 JE 435900 |
0x435d0b VMOVDQA32 %YMM24,%YMM26{%K2} |
0x435d11 MOV 0x80(%RSP),%R8 |
0x435d19 VMOVUPD (%R8,%R13,8),%ZMM0{%K3}{z} |
0x435d20 VMOVAPD %ZMM0,%ZMM18{%K3} |
0x435d26 VANDPD %ZMM8,%ZMM7,%ZMM0 |
0x435d2c VANDPD %ZMM8,%ZMM27,%ZMM5 |
0x435d32 VSUBPD %ZMM25,%ZMM10,%ZMM7 |
0x435d38 VMULPD %ZMM7,%ZMM5,%ZMM7 |
0x435d3e VDIVPD %ZMM18,%ZMM7,%ZMM7 |
0x435d44 VPMOVSXDQ %YMM26,%ZMM24 |
0x435d4a VPSUBQ %ZMM4,%ZMM24,%ZMM24 |
0x435d50 KMOVQ %K3,%K2 |
0x435d55 VPXORD %XMM26,%XMM26,%XMM26 |
0x435d5b VGATHERQPD (%R8,%ZMM24,8),%ZMM26{%K2} |
0x435d62 VMINPD %ZMM5,%ZMM0,%ZMM5 |
0x435d68 VFMADD213PD %ZMM0,%ZMM25,%ZMM0 |
0x435d6e VMOVAPD %ZMM26,%ZMM17{%K3} |
0x435d74 VDIVPD %ZMM17,%ZMM0,%ZMM0 |
0x435d7a VADDPD %ZMM7,%ZMM0,%ZMM0 |
0x435d80 VMULPD %ZMM12,%ZMM18,%ZMM7 |
0x435d86 VMULPD %ZMM0,%ZMM7,%ZMM0 |
0x435d8c VMINPD %ZMM5,%ZMM0,%ZMM28 |
0x435d92 VFPCLASSPD $0x56,%ZMM27,%K2 |
0x435d99 VXORPD %ZMM13,%ZMM28,%ZMM28{%K2} |
0x435d9f JMP 435900 |
/scratch_na/users/xoserete/qaas_runs/171-415-7919/intel/CloverLeafFC/build/CloverLeafFC/CloverLeaf_ref/kernels/advec_mom_kernel.f90: 81 - 177 |
-------------------------------------------------------------------------------- |
81: IF(mom_sweep.EQ.1)THEN ! x 1 |
[...] |
150: DO k=y_min,y_max+1 |
151: DO j=x_min-1,x_max+1 |
152: IF(node_flux(j,k).LT.0.0)THEN |
[...] |
158: upwind=j-1 |
159: donor=j |
160: downwind=j+1 |
161: dif=upwind |
162: ENDIF |
163: sigma=ABS(node_flux(j,k))/(node_mass_pre(donor,k)) |
164: width=celldx(j) |
165: vdiffuw=vel1(donor,k)-vel1(upwind,k) |
166: vdiffdw=vel1(downwind,k)-vel1(donor,k) |
167: limiter=0.0 |
168: IF(vdiffuw*vdiffdw.GT.0.0)THEN |
169: auw=ABS(vdiffuw) |
170: adw=ABS(vdiffdw) |
171: wind=1.0_8 |
172: IF(vdiffdw.LE.0.0) wind=-1.0_8 |
173: limiter=wind*MIN(width*((2.0_8-sigma)*adw/width+(1.0_8+sigma)*auw/celldx(dif))/6.0_8,auw,adw) |
174: ENDIF |
175: advec_vel_s=vel1(donor,k)+(1.0-sigma)*limiter |
176: mom_flux(j,k)=advec_vel_s*node_flux(j,k) |
177: ENDDO |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►100.00+ | __kmp_invoke_microtask | libiomp5.so | |
○ | __kmp_invoke_task_func | libiomp5.so |
Path / |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.00 |
CQA speedup if FP arith vectorized | 1.00 |
CQA speedup if fully vectorized | 1.00 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | 1.48 |
Bottlenecks | P0, |
Function | advec_mom_kernel_.DIR.OMP.PARALLEL.2 |
Source | advec_mom_kernel.f90:81-81,advec_mom_kernel.f90:150-152,advec_mom_kernel.f90:158-158,advec_mom_kernel.f90:163-170,advec_mom_kernel.f90:173-176 |
Source loop unroll info | NA |
Source loop unroll confidence level | NA |
Unroll/vectorization loop type | NA |
Unroll factor | NA |
CQA cycles | 48.00 |
CQA cycles if no scalar integer | 48.00 |
CQA cycles if FP arith vectorized | 48.00 |
CQA cycles if fully vectorized | 48.00 |
Front-end cycles | 24.50 |
DIV/SQRT cycles | 32.50 |
P0 cycles | 16.00 |
P1 cycles | 20.00 |
P2 cycles | 20.00 |
P3 cycles | 0.50 |
P4 cycles | 32.50 |
P5 cycles | 10.60 |
P6 cycles | 0.50 |
P7 cycles | 0.50 |
P8 cycles | 0.50 |
P9 cycles | 10.40 |
P10 cycles | 20.00 |
P11 cycles | 48.00 |
Inter-iter dependencies cycles | NA |
FE+BE cycles (UFS) | 60.25 - 112.88 |
Stall cycles (UFS) | 36.57 - 89.20 |
Nb insns | 122.00 |
Nb uops | 147.00 |
Nb loads | 26.00 |
Nb stores | 1.00 |
Nb stack references | 12.00 |
FLOP/cycle | 2.83 |
Nb FLOP add-sub | 40.00 |
Nb FLOP mul | 40.00 |
Nb FLOP fma | 16.00 |
Nb FLOP div | 24.00 |
Nb FLOP rcp | 0.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 0.00 |
Bytes/cycle | 15.50 |
Bytes prefetched | 0.00 |
Bytes loaded | 680.00 |
Bytes stored | 64.00 |
Stride 0 | NA |
Stride 1 | NA |
Stride n | NA |
Stride unknown | NA |
Stride indirect | NA |
Vectorization ratio all | 89.33 |
Vectorization ratio load | 100.00 |
Vectorization ratio store | 100.00 |
Vectorization ratio mul | 100.00 |
Vectorization ratio add_sub | 88.24 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 86.67 |
Vector-efficiency ratio all | 75.25 |
Vector-efficiency ratio load | 94.44 |
Vector-efficiency ratio store | 100.00 |
Vector-efficiency ratio mul | 100.00 |
Vector-efficiency ratio add_sub | 75.00 |
Vector-efficiency ratio fma | 100.00 |
Vector-efficiency ratio div_sqrt | 100.00 |
Vector-efficiency ratio other | 68.19 |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.00 |
CQA speedup if FP arith vectorized | 1.00 |
CQA speedup if fully vectorized | 1.00 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | 1.48 |
Bottlenecks | P0, |
Function | advec_mom_kernel_.DIR.OMP.PARALLEL.2 |
Source | advec_mom_kernel.f90:81-81,advec_mom_kernel.f90:150-152,advec_mom_kernel.f90:158-158,advec_mom_kernel.f90:163-170,advec_mom_kernel.f90:173-176 |
Source loop unroll info | NA |
Source loop unroll confidence level | NA |
Unroll/vectorization loop type | NA |
Unroll factor | NA |
CQA cycles | 48.00 |
CQA cycles if no scalar integer | 48.00 |
CQA cycles if FP arith vectorized | 48.00 |
CQA cycles if fully vectorized | 48.00 |
Front-end cycles | 24.50 |
DIV/SQRT cycles | 32.50 |
P0 cycles | 16.00 |
P1 cycles | 20.00 |
P2 cycles | 20.00 |
P3 cycles | 0.50 |
P4 cycles | 32.50 |
P5 cycles | 10.60 |
P6 cycles | 0.50 |
P7 cycles | 0.50 |
P8 cycles | 0.50 |
P9 cycles | 10.40 |
P10 cycles | 20.00 |
P11 cycles | 48.00 |
Inter-iter dependencies cycles | NA |
FE+BE cycles (UFS) | 60.25 - 112.88 |
Stall cycles (UFS) | 36.57 - 89.20 |
Nb insns | 122.00 |
Nb uops | 147.00 |
Nb loads | 26.00 |
Nb stores | 1.00 |
Nb stack references | 12.00 |
FLOP/cycle | 2.83 |
Nb FLOP add-sub | 40.00 |
Nb FLOP mul | 40.00 |
Nb FLOP fma | 16.00 |
Nb FLOP div | 24.00 |
Nb FLOP rcp | 0.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 0.00 |
Bytes/cycle | 15.50 |
Bytes prefetched | 0.00 |
Bytes loaded | 680.00 |
Bytes stored | 64.00 |
Stride 0 | NA |
Stride 1 | NA |
Stride n | NA |
Stride unknown | NA |
Stride indirect | NA |
Vectorization ratio all | 89.33 |
Vectorization ratio load | 100.00 |
Vectorization ratio store | 100.00 |
Vectorization ratio mul | 100.00 |
Vectorization ratio add_sub | 88.24 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 86.67 |
Vector-efficiency ratio all | 75.25 |
Vector-efficiency ratio load | 94.44 |
Vector-efficiency ratio store | 100.00 |
Vector-efficiency ratio mul | 100.00 |
Vector-efficiency ratio add_sub | 75.00 |
Vector-efficiency ratio fma | 100.00 |
Vector-efficiency ratio div_sqrt | 100.00 |
Vector-efficiency ratio other | 68.19 |
Path / |
Function | advec_mom_kernel_.DIR.OMP.PARALLEL.2 |
Source file and lines | advec_mom_kernel.f90:81-177 |
Module | exec |
nb instructions | 122 |
nb uops | 147 |
loop length | 689 |
used x86 registers | 13 |
used mmx registers | 0 |
used xmm registers | 6 |
used ymm registers | 8 |
used zmm registers | 23 |
nb stack references | 12 |
ADD-SUB / MUL ratio | 1.00 |
micro-operation queue | 24.50 cycles |
front end | 24.50 cycles |
P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 32.50 | 16.00 | 20.00 | 20.00 | 0.50 | 32.50 | 10.60 | 0.50 | 0.50 | 0.50 | 10.40 | 20.00 |
cycles | 32.50 | 16.00 | 20.00 | 20.00 | 0.50 | 32.50 | 10.60 | 0.50 | 0.50 | 0.50 | 10.40 | 20.00 |
Cycles executing div or sqrt instructions | 48.00 |
FE+BE cycles | 60.25-112.88 |
Stall cycles | 36.57-89.20 |
ROB full (events) | 39.38-95.01 |
Front-end | 24.50 |
Dispatch | 32.50 |
DIV/SQRT | 48.00 |
Overall L1 | 48.00 |
all | 75% |
load | 100% |
store | NA (no store vectorizable/vectorized instructions) |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 83% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 71% |
all | 100% |
load | 100% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 100% |
all | 89% |
load | 100% |
store | 100% |
mul | 100% |
add-sub | 88% |
fma | 100% |
div/sqrt | 100% |
other | 86% |
all | 48% |
load | 75% |
store | NA (no store vectorizable/vectorized instructions) |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 64% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 38% |
all | 96% |
load | 100% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 93% |
all | 75% |
load | 94% |
store | 100% |
mul | 100% |
add-sub | 75% |
fma | 100% |
div/sqrt | 100% |
other | 68% |
Instruction | Nb FU | P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | Latency | Recip. throughput |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VMOVAPD %ZMM28,%ZMM0{%K3}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VSUBPD %ZMM25,%ZMM9,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VFMADD213PD %ZMM21,%ZMM0,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM23,%ZMM5,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
IMUL %RBX,%R12 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
ADD 0x148(%RSP),%R12 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
VMOVUPD %ZMM0,(%R12,%R13,8){%K1} | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 1 |
ADD %RBX,%R15 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
ADD %RDX,%R9 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
CMP %RAX,%RSI | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
LEA 0x1(%RSI),%RSI | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
JE 432627 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x947> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
TEST %R10,%R10 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 2 | 0.20 |
JE 435b80 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3ea0> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
MOV 0x40(%RSP),%R8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
LEA (%R8,%RSI,1),%R12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
SUB 0x2c0(%RSP),%R12 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
MOV 0x78(%RSP),%R13 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
IMUL %R12,%R13 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
IMUL 0x90(%RSP),%R12 | 1 | 0 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 1 |
ADD 0xe0(%RSP),%R13 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
ADD 0x130(%RSP),%R12 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
XOR %R8D,%R8D | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
JMP 4359b3 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3cd3> | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5.84 |
MOV %R10,%R8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
CMP %R10,%RCX | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
JE 43592b <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3c4b> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
JMP 435b83 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3ea3> | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5.84 |
XOR %R8D,%R8D | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
MOV %RCX,%R14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
SUB %R8,%R14 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
VPBROADCASTQ %R14,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPCMPNLEUQ 0xd14e6(%RIP),%ZMM7,%K1 | |||||||||||||||
KORTESTB %K1,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
JE 43592b <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3c4b> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
MOV 0x40(%RSP),%R14 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
LEA (%R14,%RSI,1),%R12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
SUB 0x2c0(%RSP),%R12 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
MOV %RDX,%R14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
IMUL %R12,%R14 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
ADD 0x58(%RSP),%R14 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
MOV 0x18(%RSP),%R13 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
ADD %R8,%R13 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
SUB 0x98(%RSP),%R13 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
VMOVUPD (%R14,%R13,8),%ZMM0{%K1}{z} | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.50 |
VPBROADCASTQ %R8,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPADDQ %ZMM5,%ZMM3,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMOVAPD %ZMM0,%ZMM23{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VFPCLASSPD $0x50,%ZMM23,%K2{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPBROADCASTD %R8D,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPADDD %YMM6,%YMM0,%YMM0 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPMOVQD %ZMM5,%YMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPCMPEQD %YMM7,%YMM7,%YMM7 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VPADDD %YMM7,%YMM5,%YMM5 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPADDD %YMM0,%YMM1,%YMM24 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPBLENDMD %YMM5,%YMM24,%YMM7{%K2} | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VMOVDQA32 %YMM24,%YMM5{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
MOV 0x78(%RSP),%R8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
IMUL %R12,%R8 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPMOVSXDQ %YMM5,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM4,%ZMM5,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
ADD 0xe0(%RSP),%R8 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VXORPD %XMM25,%XMM25,%XMM25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VGATHERQPD (%R8,%ZMM5,8),%ZMM25{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
MOV 0x90(%RSP),%R8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
IMUL %R12,%R8 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
ADD 0x130(%RSP),%R8 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VPXORD %XMM27,%XMM27,%XMM27 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.33 |
VGATHERQPD (%R8,%ZMM5,8),%ZMM27{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPADDD 0x1e0(%RSP),%YMM0,%YMM26 | 1 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VMOVDQA64 %YMM26,%YMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPADDD %YMM0,%YMM2,%YMM5{%K2} | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPMOVSXDQ %YMM5,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM4,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VPXOR %XMM5,%XMM5,%XMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VGATHERQPD (%R8,%ZMM0,8),%ZMM5{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPMOVSXDQ %YMM7,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM4,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VPXORD %XMM29,%XMM29,%XMM29 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.33 |
VGATHERQPD (%R8,%ZMM0,8),%ZMM29{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VXORPD %XMM28,%XMM28,%XMM28 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VANDPD %ZMM8,%ZMM23,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMOVAPD %ZMM25,%ZMM22{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VDIVPD %ZMM22,%ZMM0,%ZMM25 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VMOVAPD %ZMM27,%ZMM21{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VMOVAPD %ZMM5,%ZMM20{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VSUBPD %ZMM20,%ZMM21,%ZMM7 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMOVAPD %ZMM29,%ZMM19{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VSUBPD %ZMM21,%ZMM19,%ZMM27 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM7,%ZMM27,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VCMPPD $0x1,%ZMM0,%ZMM28,%K3{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
KORTESTB %K3,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
JE 435900 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3c20> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
VMOVDQA32 %YMM24,%YMM26{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
MOV 0x80(%RSP),%R8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VMOVUPD (%R8,%R13,8),%ZMM0{%K3}{z} | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.50 |
VMOVAPD %ZMM0,%ZMM18{%K3} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VANDPD %ZMM8,%ZMM7,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VANDPD %ZMM8,%ZMM27,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VSUBPD %ZMM25,%ZMM10,%ZMM7 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM7,%ZMM5,%ZMM7 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VDIVPD %ZMM18,%ZMM7,%ZMM7 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VPMOVSXDQ %YMM26,%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM4,%ZMM24,%ZMM24 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
KMOVQ %K3,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VPXORD %XMM26,%XMM26,%XMM26 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.33 |
VGATHERQPD (%R8,%ZMM24,8),%ZMM26{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VMINPD %ZMM5,%ZMM0,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD213PD %ZMM0,%ZMM25,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMOVAPD %ZMM26,%ZMM17{%K3} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VDIVPD %ZMM17,%ZMM0,%ZMM0 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VADDPD %ZMM7,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM12,%ZMM18,%ZMM7 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM0,%ZMM7,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMINPD %ZMM5,%ZMM0,%ZMM28 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFPCLASSPD $0x56,%ZMM27,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VXORPD %ZMM13,%ZMM28,%ZMM28{%K2} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
JMP 435900 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3c20> | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.08 |
Function | advec_mom_kernel_.DIR.OMP.PARALLEL.2 |
Source file and lines | advec_mom_kernel.f90:81-177 |
Module | exec |
nb instructions | 122 |
nb uops | 147 |
loop length | 689 |
used x86 registers | 13 |
used mmx registers | 0 |
used xmm registers | 6 |
used ymm registers | 8 |
used zmm registers | 23 |
nb stack references | 12 |
ADD-SUB / MUL ratio | 1.00 |
micro-operation queue | 24.50 cycles |
front end | 24.50 cycles |
P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 32.50 | 16.00 | 20.00 | 20.00 | 0.50 | 32.50 | 10.60 | 0.50 | 0.50 | 0.50 | 10.40 | 20.00 |
cycles | 32.50 | 16.00 | 20.00 | 20.00 | 0.50 | 32.50 | 10.60 | 0.50 | 0.50 | 0.50 | 10.40 | 20.00 |
Cycles executing div or sqrt instructions | 48.00 |
FE+BE cycles | 60.25-112.88 |
Stall cycles | 36.57-89.20 |
ROB full (events) | 39.38-95.01 |
Front-end | 24.50 |
Dispatch | 32.50 |
DIV/SQRT | 48.00 |
Overall L1 | 48.00 |
all | 75% |
load | 100% |
store | NA (no store vectorizable/vectorized instructions) |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 83% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 71% |
all | 100% |
load | 100% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 100% |
all | 89% |
load | 100% |
store | 100% |
mul | 100% |
add-sub | 88% |
fma | 100% |
div/sqrt | 100% |
other | 86% |
all | 48% |
load | 75% |
store | NA (no store vectorizable/vectorized instructions) |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 64% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 38% |
all | 96% |
load | 100% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 93% |
all | 75% |
load | 94% |
store | 100% |
mul | 100% |
add-sub | 75% |
fma | 100% |
div/sqrt | 100% |
other | 68% |
Instruction | Nb FU | P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | Latency | Recip. throughput |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VMOVAPD %ZMM28,%ZMM0{%K3}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VSUBPD %ZMM25,%ZMM9,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VFMADD213PD %ZMM21,%ZMM0,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM23,%ZMM5,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
IMUL %RBX,%R12 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
ADD 0x148(%RSP),%R12 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
VMOVUPD %ZMM0,(%R12,%R13,8){%K1} | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 1 |
ADD %RBX,%R15 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
ADD %RDX,%R9 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
CMP %RAX,%RSI | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
LEA 0x1(%RSI),%RSI | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
JE 432627 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x947> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
TEST %R10,%R10 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 2 | 0.20 |
JE 435b80 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3ea0> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
MOV 0x40(%RSP),%R8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
LEA (%R8,%RSI,1),%R12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
SUB 0x2c0(%RSP),%R12 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
MOV 0x78(%RSP),%R13 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
IMUL %R12,%R13 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
IMUL 0x90(%RSP),%R12 | 1 | 0 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 1 |
ADD 0xe0(%RSP),%R13 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
ADD 0x130(%RSP),%R12 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
XOR %R8D,%R8D | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
JMP 4359b3 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3cd3> | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5.84 |
MOV %R10,%R8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
CMP %R10,%RCX | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
JE 43592b <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3c4b> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
JMP 435b83 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3ea3> | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5.84 |
XOR %R8D,%R8D | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
MOV %RCX,%R14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
SUB %R8,%R14 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
VPBROADCASTQ %R14,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPCMPNLEUQ 0xd14e6(%RIP),%ZMM7,%K1 | |||||||||||||||
KORTESTB %K1,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
JE 43592b <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3c4b> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
MOV 0x40(%RSP),%R14 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
LEA (%R14,%RSI,1),%R12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
SUB 0x2c0(%RSP),%R12 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
MOV %RDX,%R14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
IMUL %R12,%R14 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
ADD 0x58(%RSP),%R14 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
MOV 0x18(%RSP),%R13 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
ADD %R8,%R13 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
SUB 0x98(%RSP),%R13 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
VMOVUPD (%R14,%R13,8),%ZMM0{%K1}{z} | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.50 |
VPBROADCASTQ %R8,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPADDQ %ZMM5,%ZMM3,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMOVAPD %ZMM0,%ZMM23{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VFPCLASSPD $0x50,%ZMM23,%K2{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPBROADCASTD %R8D,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPADDD %YMM6,%YMM0,%YMM0 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPMOVQD %ZMM5,%YMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPCMPEQD %YMM7,%YMM7,%YMM7 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VPADDD %YMM7,%YMM5,%YMM5 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPADDD %YMM0,%YMM1,%YMM24 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPBLENDMD %YMM5,%YMM24,%YMM7{%K2} | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VMOVDQA32 %YMM24,%YMM5{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
MOV 0x78(%RSP),%R8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
IMUL %R12,%R8 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPMOVSXDQ %YMM5,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM4,%ZMM5,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
ADD 0xe0(%RSP),%R8 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VXORPD %XMM25,%XMM25,%XMM25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VGATHERQPD (%R8,%ZMM5,8),%ZMM25{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
MOV 0x90(%RSP),%R8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
IMUL %R12,%R8 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
ADD 0x130(%RSP),%R8 | 1 | 0.20 | 0.20 | 0.33 | 0.33 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.33 | 1 | 0.33 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VPXORD %XMM27,%XMM27,%XMM27 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.33 |
VGATHERQPD (%R8,%ZMM5,8),%ZMM27{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPADDD 0x1e0(%RSP),%YMM0,%YMM26 | 1 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VMOVDQA64 %YMM26,%YMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPADDD %YMM0,%YMM2,%YMM5{%K2} | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPMOVSXDQ %YMM5,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM4,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VPXOR %XMM5,%XMM5,%XMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VGATHERQPD (%R8,%ZMM0,8),%ZMM5{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPMOVSXDQ %YMM7,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM4,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
KMOVQ %K1,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VPXORD %XMM29,%XMM29,%XMM29 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.33 |
VGATHERQPD (%R8,%ZMM0,8),%ZMM29{%K3} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VXORPD %XMM28,%XMM28,%XMM28 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VANDPD %ZMM8,%ZMM23,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMOVAPD %ZMM25,%ZMM22{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VDIVPD %ZMM22,%ZMM0,%ZMM25 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VMOVAPD %ZMM27,%ZMM21{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VMOVAPD %ZMM5,%ZMM20{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VSUBPD %ZMM20,%ZMM21,%ZMM7 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMOVAPD %ZMM29,%ZMM19{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VSUBPD %ZMM21,%ZMM19,%ZMM27 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM7,%ZMM27,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VCMPPD $0x1,%ZMM0,%ZMM28,%K3{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
KORTESTB %K3,%K3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
JE 435900 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3c20> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
VMOVDQA32 %YMM24,%YMM26{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
MOV 0x80(%RSP),%R8 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VMOVUPD (%R8,%R13,8),%ZMM0{%K3}{z} | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.50 |
VMOVAPD %ZMM0,%ZMM18{%K3} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VANDPD %ZMM8,%ZMM7,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VANDPD %ZMM8,%ZMM27,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VSUBPD %ZMM25,%ZMM10,%ZMM7 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM7,%ZMM5,%ZMM7 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VDIVPD %ZMM18,%ZMM7,%ZMM7 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VPMOVSXDQ %YMM26,%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPSUBQ %ZMM4,%ZMM24,%ZMM24 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
KMOVQ %K3,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VPXORD %XMM26,%XMM26,%XMM26 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.33 |
VGATHERQPD (%R8,%ZMM24,8),%ZMM26{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VMINPD %ZMM5,%ZMM0,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD213PD %ZMM0,%ZMM25,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMOVAPD %ZMM26,%ZMM17{%K3} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VDIVPD %ZMM17,%ZMM0,%ZMM0 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VADDPD %ZMM7,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM12,%ZMM18,%ZMM7 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM0,%ZMM7,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMINPD %ZMM5,%ZMM0,%ZMM28 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFPCLASSPD $0x56,%ZMM27,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VXORPD %ZMM13,%ZMM28,%ZMM28{%K2} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
JMP 435900 <advec_mom_kernel_mod_mp_advec_mom_kernel_.DIR.OMP.PARALLEL.2+0x3c20> | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.08 |