Loop Id: 99 | Module: exec | Source: advec_cell.cpp:158-202 [...] | Coverage: 2.31% |
---|
Loop Id: 99 | Module: exec | Source: advec_cell.cpp:158-202 [...] | Coverage: 2.31% |
---|
0x41d7a0 VADDPD %ZMM10,%ZMM6,%ZMM6{%K1} |
0x41d7a6 VMULPD %ZMM4,%ZMM6,%ZMM2 |
0x41d7ac VPMULLQ %ZMM1,%ZMM26,%ZMM1 |
0x41d7b2 VPADDQ %ZMM0,%ZMM1,%ZMM0 |
0x41d7b8 KXNORW %K0,%K0,%K1 |
0x41d7bc MOV 0x28(%RSP),%RAX [11] |
0x41d7c1 VSCATTERQPD %ZMM2,(%RAX,%ZMM0,8){%K1} [14] |
0x41d7c8 VPADDQ 0x45e86(%RIP){1to8},%ZMM17,%ZMM17 [10] |
0x41d7d2 ADD $0x8,%R13 |
0x41d7d6 CMP %R14,%R13 |
0x41d7d9 JAE 41db29 |
0x41d7df VMOVDQA64 %ZMM17,%ZMM0 |
0x41d7e5 VMOVDQA64 %ZMM16,%ZMM1 |
0x41d7eb MOV $0x451520,%RAX |
0x41d7f2 CALL %RAX |
0x41d7f4 VPMOVQD %ZMM0,%YMM0 |
0x41d7fa VPADDD 0xa0(%RSP),%YMM0,%YMM30 [11] |
0x41d802 VMOVDQA64 %ZMM17,%ZMM0 |
0x41d808 VMOVDQA64 %ZMM16,%ZMM1 |
0x41d80e CALL 4513a0 <__svml_i64rem8_z0> |
0x41d814 VPADDQ %ZMM19,%ZMM0,%ZMM0 |
0x41d81a VPSLLQ $0x20,%ZMM0,%ZMM0 |
0x41d821 VPSRAQ $0x20,%ZMM0,%ZMM0 |
0x41d828 VPMOVSXDQ %YMM30,%ZMM1 |
0x41d82e VPXOR %XMM2,%XMM2,%XMM2 |
0x41d832 VPMULLQ %ZMM1,%ZMM20,%ZMM2 |
0x41d838 VPADDQ %ZMM0,%ZMM2,%ZMM2 |
0x41d83e VXORPD %XMM4,%XMM4,%XMM4 |
0x41d842 KXNORW %K0,%K0,%K1 |
0x41d846 MOV 0x58(%RSP),%RAX [11] |
0x41d84b VGATHERQPD (%RAX,%ZMM2,8),%ZMM4{%K1} [9] |
0x41d852 VCMPPD $0x1,%ZMM4,%ZMM27,%K1 |
0x41d859 VPCMPEQD %YMM7,%YMM7,%YMM7 |
0x41d85d VPADDD %YMM7,%YMM30,%YMM2 |
0x41d863 VPMOVSXDQ %YMM2,%ZMM5 |
0x41d869 VPBLENDMQ %ZMM5,%ZMM1,%ZMM6{%K1} |
0x41d86f VPXOR %XMM2,%XMM2,%XMM2 |
0x41d873 VPMULLQ %ZMM6,%ZMM22,%ZMM2 |
0x41d879 VPADDQ %ZMM0,%ZMM2,%ZMM3 |
0x41d87f VPXOR %XMM2,%XMM2,%XMM2 |
0x41d883 KXNORW %K0,%K0,%K2 |
0x41d887 MOV 0x8(%RSP),%RAX [11] |
0x41d88c VGATHERQPD (%RAX,%ZMM3,8),%ZMM2{%K2} [13] |
0x41d893 VPSUBD %YMM7,%YMM30,%YMM7 |
0x41d899 VPMINSD %YMM7,%YMM21,%YMM7 |
0x41d89f VPMOVSXDQ %YMM7,%ZMM7 |
0x41d8a5 VMOVDQA64 %ZMM7,%ZMM8 |
0x41d8ab VXORPD %XMM9,%XMM9,%XMM9 |
0x41d8b0 KXNORW %K0,%K0,%K2 |
0x41d8b4 VGATHERDPD (%R12,%YMM30,8),%ZMM9{%K2} [12] |
0x41d8bb VMOVDQA64 %ZMM5,%ZMM7{%K1} |
0x41d8c1 VANDPD %ZMM28,%ZMM4,%ZMM10 |
0x41d8c7 VDIVPD %ZMM2,%ZMM10,%ZMM10 |
0x41d8cd VXORPD %XMM2,%XMM2,%XMM2 |
0x41d8d1 KXNORW %K0,%K0,%K2 |
0x41d8d5 VGATHERQPD (%R12,%ZMM7,8),%ZMM2{%K2} [16] |
0x41d8dc VFMADD213PD %ZMM9,%ZMM10,%ZMM9 |
0x41d8e2 VDIVPD %ZMM2,%ZMM9,%ZMM2 |
0x41d8e8 VPADDD 0x46de6(%RIP){1to8},%YMM30,%YMM9 [10] |
0x41d8f2 VPXOR %XMM7,%XMM7,%XMM7 |
0x41d8f6 VPMULLQ %ZMM6,%ZMM23,%ZMM7 |
0x41d8fc VPADDQ %ZMM0,%ZMM7,%ZMM7 |
0x41d902 VXORPD %XMM11,%XMM11,%XMM11 |
0x41d907 KXNORW %K0,%K0,%K2 |
0x41d90b VGATHERQPD (%RDI,%ZMM7,8),%ZMM11{%K2} [4] |
0x41d912 VPMOVSXDQ %YMM9,%ZMM8{%K1} |
0x41d918 VPXOR %XMM9,%XMM9,%XMM9 |
0x41d91d VPMULLQ %ZMM8,%ZMM23,%ZMM9 |
0x41d923 VPADDQ %ZMM0,%ZMM9,%ZMM9 |
0x41d929 VXORPD %XMM12,%XMM12,%XMM12 |
0x41d92e KXNORW %K0,%K0,%K2 |
0x41d932 VGATHERQPD (%RDI,%ZMM9,8),%ZMM12{%K2} [8] |
0x41d939 VPBLENDMQ %ZMM1,%ZMM5,%ZMM9{%K1} |
0x41d93f VPXOR %XMM5,%XMM5,%XMM5 |
0x41d943 VPMULLQ %ZMM9,%ZMM23,%ZMM5 |
0x41d949 VPADDQ %ZMM0,%ZMM5,%ZMM5 |
0x41d94f VXORPD %XMM13,%XMM13,%XMM13 |
0x41d954 KXNORW %K0,%K0,%K1 |
0x41d958 VGATHERQPD (%RDI,%ZMM5,8),%ZMM13{%K1} [1] |
0x41d95f VBROADCASTSD 0x46d4f(%RIP),%ZMM5 [10] |
0x41d969 VSUBPD %ZMM10,%ZMM5,%ZMM5 |
0x41d96f VSUBPD %ZMM12,%ZMM11,%ZMM12 |
0x41d975 VSUBPD %ZMM11,%ZMM13,%ZMM13 |
0x41d97b VMULPD %ZMM12,%ZMM13,%ZMM14 |
0x41d981 VCMPPD $0x1,%ZMM14,%ZMM27,%K1 |
0x41d988 VCMPPD $0x1,%ZMM13,%ZMM27,%K2 |
0x41d98f VSUBPD %ZMM10,%ZMM31,%ZMM10 |
0x41d995 VXORPD %ZMM29,%ZMM10,%ZMM14 |
0x41d99b VMOVAPD %ZMM10,%ZMM14{%K2} |
0x41d9a1 VANDPD %ZMM28,%ZMM12,%ZMM10 |
0x41d9a7 VANDPD %ZMM28,%ZMM13,%ZMM12 |
0x41d9ad VMINPD %ZMM12,%ZMM10,%ZMM13 |
0x41d9b3 VMULPD %ZMM2,%ZMM10,%ZMM10 |
0x41d9b9 VFMADD231PD %ZMM12,%ZMM5,%ZMM10 |
0x41d9bf VMULPD %ZMM18,%ZMM10,%ZMM10 |
0x41d9c5 VMINPD %ZMM10,%ZMM13,%ZMM10 |
0x41d9cb VFMADD231PD %ZMM14,%ZMM10,%ZMM11{%K1} |
0x41d9d1 VMULPD %ZMM4,%ZMM11,%ZMM4 |
0x41d9d7 VPXOR %XMM10,%XMM10,%XMM10 |
0x41d9dc VPMULLQ %ZMM1,%ZMM24,%ZMM10 |
0x41d9e2 VPADDQ %ZMM0,%ZMM10,%ZMM10 |
0x41d9e8 KXNORW %K0,%K0,%K1 |
0x41d9ec MOV 0x30(%RSP),%RAX [11] |
0x41d9f1 VSCATTERQPD %ZMM4,(%RAX,%ZMM10,8){%K1} [6] |
0x41d9f8 VPMULLQ %ZMM6,%ZMM25,%ZMM6 |
0x41d9fe VPADDQ %ZMM0,%ZMM6,%ZMM10 |
0x41da04 VPXOR %XMM6,%XMM6,%XMM6 |
0x41da08 KXNORW %K0,%K0,%K1 |
0x41da0c VGATHERQPD (%R15,%ZMM10,8),%ZMM6{%K1} [2] |
0x41da13 VPMULLQ %ZMM8,%ZMM25,%ZMM8 |
0x41da19 VPADDQ %ZMM0,%ZMM8,%ZMM8 |
0x41da1f VXORPD %XMM10,%XMM10,%XMM10 |
0x41da24 KXNORW %K0,%K0,%K1 |
0x41da28 VGATHERQPD (%R15,%ZMM8,8),%ZMM10{%K1} [15] |
0x41da2f VPXOR %XMM8,%XMM8,%XMM8 |
0x41da34 VPMULLQ %ZMM9,%ZMM25,%ZMM8 |
0x41da3a VPADDQ %ZMM0,%ZMM8,%ZMM8 |
0x41da40 VPXOR %XMM9,%XMM9,%XMM9 |
0x41da45 KXNORW %K0,%K0,%K1 |
0x41da49 VGATHERQPD (%R15,%ZMM8,8),%ZMM9{%K1} [3] |
0x41da50 VSUBPD %ZMM10,%ZMM6,%ZMM8 |
0x41da56 VSUBPD %ZMM6,%ZMM9,%ZMM9 |
0x41da5c VMULPD %ZMM8,%ZMM9,%ZMM10 |
0x41da62 VCMPPD $0x1,%ZMM10,%ZMM27,%K1 |
0x41da69 KORTESTB %K1,%K1 |
0x41da6d VXORPD %XMM10,%XMM10,%XMM10 |
0x41da72 JE 41d7a0 |
0x41da78 VPSLLQ $0x3,%ZMM3,%ZMM3 |
0x41da7f VPADDQ 0x100(%RSP),%ZMM3,%ZMM3 [11] |
0x41da87 VPSLLQ $0x3,%ZMM7,%ZMM7 |
0x41da8e VPADDQ 0xc0(%RSP),%ZMM7,%ZMM7 [11] |
0x41da96 KMOVQ %K1,%K2 |
0x41da9b VGATHERQPD (,%ZMM7,1),%ZMM10{%K2} [7] |
0x41daa6 VXORPD %XMM7,%XMM7,%XMM7 |
0x41daaa KMOVQ %K1,%K2 |
0x41daaf VGATHERQPD (,%ZMM3,1),%ZMM7{%K2} [5] |
0x41daba VANDPD %ZMM28,%ZMM4,%ZMM3 |
0x41dac0 VMULPD %ZMM10,%ZMM7,%ZMM7 |
0x41dac6 VDIVPD %ZMM7,%ZMM3,%ZMM3 |
0x41dacc VCMPPD $0x1,%ZMM9,%ZMM27,%K2 |
0x41dad3 VSUBPD %ZMM3,%ZMM31,%ZMM3 |
0x41dad9 VXORPD %ZMM29,%ZMM3,%ZMM7 |
0x41dadf VMOVAPD %ZMM3,%ZMM7{%K2} |
0x41dae5 VANDPD %ZMM28,%ZMM8,%ZMM3 |
0x41daeb VANDPD %ZMM28,%ZMM9,%ZMM8 |
0x41daf1 VMINPD %ZMM8,%ZMM3,%ZMM9 |
0x41daf7 VMULPD %ZMM2,%ZMM3,%ZMM2 |
0x41dafd VFMADD213PD %ZMM2,%ZMM8,%ZMM5 |
0x41db03 VMULPD %ZMM18,%ZMM5,%ZMM2 |
0x41db09 VMINPD %ZMM2,%ZMM9,%ZMM2 |
0x41db0f VMULPD %ZMM2,%ZMM7,%ZMM10 |
0x41db15 JMP 41d7a0 |
/scratch_na/users/xoserete/qaas_runs/171-415-4687/intel/CloverLeafCXX/build/CloverLeafCXX/src/omp/context.h: 69 - 69 |
-------------------------------------------------------------------------------- |
69: T &operator()(size_t i, size_t j) const { return data[i + j * sizeX]; } |
/scratch_na/users/xoserete/qaas_runs/171-415-4687/intel/CloverLeafCXX/build/CloverLeafCXX/src/omp/advec_cell.cpp: 158 - 202 |
-------------------------------------------------------------------------------- |
158: for (int j = (y_min + 1); j < (y_max + 2 + 2); j++) { |
159: for (int i = (x_min + 1); i < (x_max + 2); i++) |
160: ({ |
161: int upwind, donor, downwind, dif; |
162: double sigmat, sigma3, sigma4, sigmav, sigmam, diffuw, diffdw, limiter, wind; |
163: if (vol_flux_y(i, j) > 0.0) { |
164: upwind = j - 2; |
165: donor = j - 1; |
166: downwind = j; |
167: dif = donor; |
168: } else { |
169: upwind = std::min(j + 1, y_max + 2); |
170: donor = j; |
171: downwind = j - 1; |
172: dif = upwind; |
173: } |
174: sigmat = std::fabs(vol_flux_y(i, j)) / pre_vol(i, donor); |
175: sigma3 = (1.0 + sigmat) * (vertexdy[j] / vertexdy[dif]); |
176: sigma4 = 2.0 - sigmat; |
177: sigmav = sigmat; |
178: diffuw = density1(i, donor) - density1(i, upwind); |
179: diffdw = density1(i, downwind) - density1(i, donor); |
180: wind = 1.0; |
181: if (diffdw <= 0.0) wind = -1.0; |
182: if (diffuw * diffdw > 0.0) { |
183: limiter = (1.0 - sigmav) * wind * |
184: std::fmin(std::fmin(std::fabs(diffuw), std::fabs(diffdw)), |
185: one_by_six * (sigma3 * std::fabs(diffuw) + sigma4 * std::fabs(diffdw))); |
186: } else { |
187: limiter = 0.0; |
188: } |
189: mass_flux_y(i, j) = vol_flux_y(i, j) * (density1(i, donor) + limiter); |
190: sigmam = std::fabs(mass_flux_y(i, j)) / (density1(i, donor) * pre_vol(i, donor)); |
191: diffuw = energy1(i, donor) - energy1(i, upwind); |
192: diffdw = energy1(i, downwind) - energy1(i, donor); |
193: wind = 1.0; |
194: if (diffdw <= 0.0) wind = -1.0; |
195: if (diffuw * diffdw > 0.0) { |
196: limiter = (1.0 - sigmam) * wind * |
197: std::fmin(std::fmin(std::fabs(diffuw), std::fabs(diffdw)), |
198: one_by_six * (sigma3 * std::fabs(diffuw) + sigma4 * std::fabs(diffdw))); |
199: } else { |
200: limiter = 0.0; |
201: } |
202: ener_flux(i, j) = mass_flux_y(i, j) * (energy1(i, donor) + limiter); |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►100.00+ | __kmp_invoke_microtask | libiomp5.so | |
○ | __kmp_invoke_task_func | libiomp5.so |
Path / |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.11 |
CQA speedup if FP arith vectorized | 1.00 |
CQA speedup if fully vectorized | 1.00 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | NA |
Bottlenecks | |
Function | advec_cell_kernel(int, int, int, int, int, int, clover::Buffer1D |
Source | context.h:69-69,advec_cell.cpp:158-202 |
Source loop unroll info | unrolled by 8 |
Source loop unroll confidence level | high |
Unroll/vectorization loop type | main |
Unroll factor | 8 |
CQA cycles | 72.00 |
CQA cycles if no scalar integer | 65.00 |
CQA cycles if FP arith vectorized | 72.00 |
CQA cycles if fully vectorized | 72.00 |
Front-end cycles | 44.75 |
DIV/SQRT cycles | 72.00 |
P0 cycles | 9.50 |
P1 cycles | 32.33 |
P2 cycles | 32.33 |
P3 cycles | 9.00 |
P4 cycles | 72.00 |
P5 cycles | 3.60 |
P6 cycles | 9.00 |
P7 cycles | 9.00 |
P8 cycles | 9.00 |
P9 cycles | 3.40 |
P10 cycles | 32.33 |
P11 cycles | 40.00 |
Inter-iter dependencies cycles | 1 |
FE+BE cycles (UFS) | 93.64 - 183.32 |
Stall cycles (UFS) | 57.88 - 147.56 |
Nb insns | 139.50 |
Nb uops | 268.50 |
Nb loads | 20.00 |
Nb stores | 2.00 |
Nb stack references | 6.00 |
FLOP/cycle | 2.78 |
Nb FLOP add-sub | 60.00 |
Nb FLOP mul | 64.00 |
Nb FLOP fma | 28.00 |
Nb FLOP div | 20.00 |
Nb FLOP rcp | 0.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 0.00 |
Bytes/cycle | 13.57 |
Bytes prefetched | 0.00 |
Bytes loaded | 852.00 |
Bytes stored | 128.00 |
Stride 0 | 2.00 |
Stride 1 | 0.00 |
Stride n | 0.00 |
Stride unknown | 0.00 |
Stride indirect | 12.00 |
Vectorization ratio all | 99.11 |
Vectorization ratio load | 93.65 |
Vectorization ratio store | 100.00 |
Vectorization ratio mul | 100.00 |
Vectorization ratio add_sub | 100.00 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 98.41 |
Vector-efficiency ratio all | 82.54 |
Vector-efficiency ratio load | 88.10 |
Vector-efficiency ratio store | 100.00 |
Vector-efficiency ratio mul | 100.00 |
Vector-efficiency ratio add_sub | 91.81 |
Vector-efficiency ratio fma | 100.00 |
Vector-efficiency ratio div_sqrt | 100.00 |
Vector-efficiency ratio other | 71.81 |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.10 |
CQA speedup if FP arith vectorized | 1.00 |
CQA speedup if fully vectorized | 1.00 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | NA |
Bottlenecks | P0, P5, |
Function | advec_cell_kernel(int, int, int, int, int, int, clover::Buffer1D |
Source | context.h:69-69,advec_cell.cpp:158-202 |
Source loop unroll info | unrolled by 8 |
Source loop unroll confidence level | high |
Unroll/vectorization loop type | main |
Unroll factor | 8 |
CQA cycles | 79.00 |
CQA cycles if no scalar integer | 71.50 |
CQA cycles if FP arith vectorized | 79.00 |
CQA cycles if fully vectorized | 79.00 |
Front-end cycles | 47.67 |
DIV/SQRT cycles | 79.00 |
P0 cycles | 11.00 |
P1 cycles | 35.33 |
P2 cycles | 35.33 |
P3 cycles | 9.00 |
P4 cycles | 79.00 |
P5 cycles | 3.60 |
P6 cycles | 9.00 |
P7 cycles | 9.00 |
P8 cycles | 9.00 |
P9 cycles | 3.40 |
P10 cycles | 35.33 |
P11 cycles | 48.00 |
Inter-iter dependencies cycles | 1 |
FE+BE cycles (UFS) | 97.03 - 184.48 |
Stall cycles (UFS) | 58.68 - 146.13 |
Nb insns | 152.00 |
Nb uops | 286.00 |
Nb loads | 22.00 |
Nb stores | 2.00 |
Nb stack references | 7.00 |
FLOP/cycle | 2.94 |
Nb FLOP add-sub | 64.00 |
Nb FLOP mul | 80.00 |
Nb FLOP fma | 32.00 |
Nb FLOP div | 24.00 |
Nb FLOP rcp | 0.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 0.00 |
Bytes/cycle | 14.03 |
Bytes prefetched | 0.00 |
Bytes loaded | 980.00 |
Bytes stored | 128.00 |
Stride 0 | 2.00 |
Stride 1 | 0.00 |
Stride n | 0.00 |
Stride unknown | 0.00 |
Stride indirect | 13.00 |
Vectorization ratio all | 99.20 |
Vectorization ratio load | 94.44 |
Vectorization ratio store | 100.00 |
Vectorization ratio mul | 100.00 |
Vectorization ratio add_sub | 100.00 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 98.57 |
Vector-efficiency ratio all | 83.90 |
Vector-efficiency ratio load | 89.58 |
Vector-efficiency ratio store | 100.00 |
Vector-efficiency ratio mul | 100.00 |
Vector-efficiency ratio add_sub | 92.31 |
Vector-efficiency ratio fma | 100.00 |
Vector-efficiency ratio div_sqrt | 100.00 |
Vector-efficiency ratio other | 74.11 |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.11 |
CQA speedup if FP arith vectorized | 1.00 |
CQA speedup if fully vectorized | 1.00 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | 1.55 |
Bottlenecks | P0, P5, |
Function | advec_cell_kernel(int, int, int, int, int, int, clover::Buffer1D |
Source | context.h:69-69,advec_cell.cpp:158-202 |
Source loop unroll info | unrolled by 8 |
Source loop unroll confidence level | high |
Unroll/vectorization loop type | main |
Unroll factor | 8 |
CQA cycles | 65.00 |
CQA cycles if no scalar integer | 58.50 |
CQA cycles if FP arith vectorized | 65.00 |
CQA cycles if fully vectorized | 65.00 |
Front-end cycles | 41.83 |
DIV/SQRT cycles | 65.00 |
P0 cycles | 8.00 |
P1 cycles | 29.33 |
P2 cycles | 29.33 |
P3 cycles | 9.00 |
P4 cycles | 65.00 |
P5 cycles | 3.60 |
P6 cycles | 9.00 |
P7 cycles | 9.00 |
P8 cycles | 9.00 |
P9 cycles | 3.40 |
P10 cycles | 29.33 |
P11 cycles | 32.00 |
Inter-iter dependencies cycles | 1 |
FE+BE cycles (UFS) | 90.25 - 182.16 |
Stall cycles (UFS) | 57.08 - 148.99 |
Nb insns | 127.00 |
Nb uops | 251.00 |
Nb loads | 18.00 |
Nb stores | 2.00 |
Nb stack references | 5.00 |
FLOP/cycle | 2.58 |
Nb FLOP add-sub | 56.00 |
Nb FLOP mul | 48.00 |
Nb FLOP fma | 24.00 |
Nb FLOP div | 16.00 |
Nb FLOP rcp | 0.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 0.00 |
Bytes/cycle | 13.11 |
Bytes prefetched | 0.00 |
Bytes loaded | 724.00 |
Bytes stored | 128.00 |
Stride 0 | 2.00 |
Stride 1 | 0.00 |
Stride n | 0.00 |
Stride unknown | 0.00 |
Stride indirect | 11.00 |
Vectorization ratio all | 99.03 |
Vectorization ratio load | 92.86 |
Vectorization ratio store | 100.00 |
Vectorization ratio mul | 100.00 |
Vectorization ratio add_sub | 100.00 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 98.25 |
Vector-efficiency ratio all | 81.19 |
Vector-efficiency ratio load | 86.61 |
Vector-efficiency ratio store | 100.00 |
Vector-efficiency ratio mul | 100.00 |
Vector-efficiency ratio add_sub | 91.30 |
Vector-efficiency ratio fma | 100.00 |
Vector-efficiency ratio div_sqrt | 100.00 |
Vector-efficiency ratio other | 69.52 |
Path / |
nb instructions | 139.50 |
nb uops | 268.50 |
loop length | 809 |
used x86 registers | 7 |
used mmx registers | 0 |
used xmm registers | 11 |
used ymm registers | 6 |
used zmm registers | 29 |
nb stack references | 6 |
ADD-SUB / MUL ratio | 0.98 |
micro-operation queue | 44.75 cycles |
front end | 44.75 cycles |
P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 72.00 | 6.00 | 32.33 | 32.33 | 9.00 | 72.00 | 3.60 | 9.00 | 9.00 | 9.00 | 3.40 | 32.33 |
cycles | 72.00 | 9.50 | 32.33 | 32.33 | 9.00 | 72.00 | 3.60 | 9.00 | 9.00 | 9.00 | 3.40 | 32.33 |
Cycles executing div or sqrt instructions | 40.00 |
Longest recurrence chain latency (RecMII) | 1.00 |
FE+BE cycles | 93.64-183.32 |
Stall cycles | 57.88-147.56 |
ROB full (events) | 59.08-151.70 |
RS full (events) | 7.25-2.63 |
Front-end | 44.75 |
Dispatch | 72.00 |
DIV/SQRT | 40.00 |
Data deps. | 1.00 |
Overall L1 | 72.00 |
all | 100% |
load | 100% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 100% |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 100% |
all | 98% |
load | 91% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 97% |
all | 99% |
load | 93% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 98% |
all | 77% |
load | 73% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 100% |
add-sub | 88% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 62% |
all | 87% |
load | 92% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 79% |
all | 82% |
load | 88% |
store | 100% |
mul | 100% |
add-sub | 91% |
fma | 100% |
div/sqrt | 100% |
other | 71% |
nb instructions | 152 |
nb uops | 286 |
loop length | 890 |
used x86 registers | 7 |
used mmx registers | 0 |
used xmm registers | 11 |
used ymm registers | 6 |
used zmm registers | 29 |
nb stack references | 7 |
ADD-SUB / MUL ratio | 0.80 |
micro-operation queue | 47.67 cycles |
front end | 47.67 cycles |
P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 79.00 | 6.00 | 35.33 | 35.33 | 9.00 | 79.00 | 3.60 | 9.00 | 9.00 | 9.00 | 3.40 | 35.33 |
cycles | 79.00 | 11.00 | 35.33 | 35.33 | 9.00 | 79.00 | 3.60 | 9.00 | 9.00 | 9.00 | 3.40 | 35.33 |
Cycles executing div or sqrt instructions | 48.00 |
Longest recurrence chain latency (RecMII) | 1.00 |
FE+BE cycles | 97.03-184.48 |
Stall cycles | 58.68-146.14 |
ROB full (events) | 58.98-150.25 |
RS full (events) | 11.18-4.08 |
Front-end | 47.67 |
Dispatch | 79.00 |
DIV/SQRT | 48.00 |
Data deps. | 1.00 |
Overall L1 | 79.00 |
all | 100% |
load | 100% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 100% |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 100% |
all | 98% |
load | 92% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 97% |
all | 99% |
load | 94% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 98% |
all | 78% |
load | 80% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 100% |
add-sub | 88% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 63% |
all | 88% |
load | 93% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 81% |
all | 83% |
load | 89% |
store | 100% |
mul | 100% |
add-sub | 92% |
fma | 100% |
div/sqrt | 100% |
other | 74% |
Instruction | Nb FU | P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | Latency | Recip. throughput |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VADDPD %ZMM10,%ZMM6,%ZMM6{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM4,%ZMM6,%ZMM2 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VPMULLQ %ZMM1,%ZMM26,%ZMM1 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM1,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x28(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VSCATTERQPD %ZMM2,(%RAX,%ZMM0,8){%K1} | 20 | 2.20 | 0.20 | 0 | 0 | 4 | 0.20 | 0.20 | 4 | 4 | 4 | 0.20 | 0 | 2-12 | 7 |
VPADDQ 0x45e86(%RIP){1to8},%ZMM17,%ZMM17 | 1 | 0.50 | 0 | 0.33 | 0.33 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.67 |
ADD $0x8,%R13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
CMP %R14,%R13 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
JAE 41db29 <_Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7+0x5d9> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
VMOVDQA64 %ZMM17,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VMOVDQA64 %ZMM16,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
MOV $0x451520,%RAX | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
CALL %RAX | 2 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0.50 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0 | 2.14 |
VPMOVQD %ZMM0,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPADDD 0xa0(%RSP),%YMM0,%YMM30 | 1 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VMOVDQA64 %ZMM17,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VMOVDQA64 %ZMM16,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
CALL 4513a0 <__svml_i64rem8_z0> | 2 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0 | 1 |
VPADDQ %ZMM19,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPSLLQ $0x20,%ZMM0,%ZMM0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2-4 | 1 |
VPSRAQ $0x20,%ZMM0,%ZMM0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2-4 | 1 |
VPMOVSXDQ %YMM30,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPXOR %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM1,%ZMM20,%ZMM2 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM2,%ZMM2 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x58(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VGATHERQPD (%RAX,%ZMM2,8),%ZMM4{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VCMPPD $0x1,%ZMM4,%ZMM27,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPCMPEQD %YMM7,%YMM7,%YMM7 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VPADDD %YMM7,%YMM30,%YMM2 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPMOVSXDQ %YMM2,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPBLENDMQ %ZMM5,%ZMM1,%ZMM6{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM6,%ZMM22,%ZMM2 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM2,%ZMM3 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x8(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VGATHERQPD (%RAX,%ZMM3,8),%ZMM2{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPSUBD %YMM7,%YMM30,%YMM7 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.33 |
VPMINSD %YMM7,%YMM21,%YMM7 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPMOVSXDQ %YMM7,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVDQA64 %ZMM7,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VXORPD %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERDPD (%R12,%YMM30,8),%ZMM9{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VMOVDQA64 %ZMM5,%ZMM7{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VANDPD %ZMM28,%ZMM4,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VDIVPD %ZMM2,%ZMM10,%ZMM10 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VXORPD %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R12,%ZMM7,8),%ZMM2{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VFMADD213PD %ZMM9,%ZMM10,%ZMM9 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VDIVPD %ZMM2,%ZMM9,%ZMM2 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VPADDD 0x46de6(%RIP){1to8},%YMM30,%YMM9 | 1 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VPXOR %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM6,%ZMM23,%ZMM7 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM7,%ZMM7 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM11,%XMM11,%XMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%RDI,%ZMM7,8),%ZMM11{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPMOVSXDQ %YMM9,%ZMM8{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPXOR %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM8,%ZMM23,%ZMM9 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM9,%ZMM9 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM12,%XMM12,%XMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%RDI,%ZMM9,8),%ZMM12{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPBLENDMQ %ZMM1,%ZMM5,%ZMM9{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM5,%XMM5,%XMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM9,%ZMM23,%ZMM5 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM5,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM13,%XMM13,%XMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%RDI,%ZMM5,8),%ZMM13{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VBROADCASTSD 0x46d4f(%RIP),%ZMM5 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VSUBPD %ZMM10,%ZMM5,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VSUBPD %ZMM12,%ZMM11,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VSUBPD %ZMM11,%ZMM13,%ZMM13 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM12,%ZMM13,%ZMM14 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VCMPPD $0x1,%ZMM14,%ZMM27,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VCMPPD $0x1,%ZMM13,%ZMM27,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VSUBPD %ZMM10,%ZMM31,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VXORPD %ZMM29,%ZMM10,%ZMM14 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
VMOVAPD %ZMM10,%ZMM14{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VANDPD %ZMM28,%ZMM12,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VANDPD %ZMM28,%ZMM13,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMINPD %ZMM12,%ZMM10,%ZMM13 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM2,%ZMM10,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %ZMM12,%ZMM5,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM18,%ZMM10,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMINPD %ZMM10,%ZMM13,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %ZMM14,%ZMM10,%ZMM11{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM4,%ZMM11,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VPXOR %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM1,%ZMM24,%ZMM10 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM10,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x30(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VSCATTERQPD %ZMM4,(%RAX,%ZMM10,8){%K1} | 20 | 2.20 | 0.20 | 0 | 0 | 4 | 0.20 | 0.20 | 4 | 4 | 4 | 0.20 | 0 | 2-12 | 7 |
VPMULLQ %ZMM6,%ZMM25,%ZMM6 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM6,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM6,%XMM6,%XMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R15,%ZMM10,8),%ZMM6{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPMULLQ %ZMM8,%ZMM25,%ZMM8 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM8,%ZMM8 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R15,%ZMM8,8),%ZMM10{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPXOR %XMM8,%XMM8,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM9,%ZMM25,%ZMM8 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM8,%ZMM8 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R15,%ZMM8,8),%ZMM9{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VSUBPD %ZMM10,%ZMM6,%ZMM8 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VSUBPD %ZMM6,%ZMM9,%ZMM9 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM8,%ZMM9,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VCMPPD $0x1,%ZMM10,%ZMM27,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
KORTESTB %K1,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VXORPD %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
JE 41d7a0 <_Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7+0x250> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
VPSLLQ $0x3,%ZMM3,%ZMM3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2-4 | 1 |
VPADDQ 0x100(%RSP),%ZMM3,%ZMM3 | 1 | 0.50 | 0 | 0.33 | 0.33 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.67 |
VPSLLQ $0x3,%ZMM7,%ZMM7 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2-4 | 1 |
VPADDQ 0xc0(%RSP),%ZMM7,%ZMM7 | 1 | 0.50 | 0 | 0.33 | 0.33 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.67 |
KMOVQ %K1,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VGATHERQPD (,%ZMM7,1),%ZMM10{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VXORPD %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KMOVQ %K1,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VGATHERQPD (,%ZMM3,1),%ZMM7{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VANDPD %ZMM28,%ZMM4,%ZMM3 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMULPD %ZMM10,%ZMM7,%ZMM7 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VDIVPD %ZMM7,%ZMM3,%ZMM3 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VCMPPD $0x1,%ZMM9,%ZMM27,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VSUBPD %ZMM3,%ZMM31,%ZMM3 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VXORPD %ZMM29,%ZMM3,%ZMM7 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
VMOVAPD %ZMM3,%ZMM7{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VANDPD %ZMM28,%ZMM8,%ZMM3 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VANDPD %ZMM28,%ZMM9,%ZMM8 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMINPD %ZMM8,%ZMM3,%ZMM9 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM2,%ZMM3,%ZMM2 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD213PD %ZMM2,%ZMM8,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM18,%ZMM5,%ZMM2 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMINPD %ZMM2,%ZMM9,%ZMM2 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM2,%ZMM7,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
JMP 41d7a0 <_Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7+0x250> | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.08 |
nb instructions | 127 |
nb uops | 251 |
loop length | 728 |
used x86 registers | 7 |
used mmx registers | 0 |
used xmm registers | 11 |
used ymm registers | 6 |
used zmm registers | 29 |
nb stack references | 5 |
ADD-SUB / MUL ratio | 1.17 |
micro-operation queue | 41.83 cycles |
front end | 41.83 cycles |
P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 65.00 | 6.00 | 29.33 | 29.33 | 9.00 | 65.00 | 3.60 | 9.00 | 9.00 | 9.00 | 3.40 | 29.33 |
cycles | 65.00 | 8.00 | 29.33 | 29.33 | 9.00 | 65.00 | 3.60 | 9.00 | 9.00 | 9.00 | 3.40 | 29.33 |
Cycles executing div or sqrt instructions | 32.00 |
Longest recurrence chain latency (RecMII) | 1.00 |
FE+BE cycles | 90.25-182.16 |
Stall cycles | 57.08-148.99 |
ROB full (events) | 59.18-153.15 |
RS full (events) | 3.31-1.17 |
Front-end | 41.83 |
Dispatch | 65.00 |
DIV/SQRT | 32.00 |
Data deps. | 1.00 |
Overall L1 | 65.00 |
all | 100% |
load | 100% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 100% |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 100% |
all | 98% |
load | 90% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 96% |
all | 99% |
load | 92% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 98% |
all | 76% |
load | 66% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 100% |
add-sub | 87% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 61% |
all | 86% |
load | 92% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 77% |
all | 81% |
load | 86% |
store | 100% |
mul | 100% |
add-sub | 91% |
fma | 100% |
div/sqrt | 100% |
other | 69% |
Instruction | Nb FU | P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | Latency | Recip. throughput |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VADDPD %ZMM10,%ZMM6,%ZMM6{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM4,%ZMM6,%ZMM2 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VPMULLQ %ZMM1,%ZMM26,%ZMM1 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM1,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x28(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VSCATTERQPD %ZMM2,(%RAX,%ZMM0,8){%K1} | 20 | 2.20 | 0.20 | 0 | 0 | 4 | 0.20 | 0.20 | 4 | 4 | 4 | 0.20 | 0 | 2-12 | 7 |
VPADDQ 0x45e86(%RIP){1to8},%ZMM17,%ZMM17 | 1 | 0.50 | 0 | 0.33 | 0.33 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.67 |
ADD $0x8,%R13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
CMP %R14,%R13 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
JAE 41db29 <_Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7+0x5d9> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
VMOVDQA64 %ZMM17,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VMOVDQA64 %ZMM16,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
MOV $0x451520,%RAX | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
CALL %RAX | 2 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0.50 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0 | 2.14 |
VPMOVQD %ZMM0,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPADDD 0xa0(%RSP),%YMM0,%YMM30 | 1 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VMOVDQA64 %ZMM17,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VMOVDQA64 %ZMM16,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
CALL 4513a0 <__svml_i64rem8_z0> | 2 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0 | 1 |
VPADDQ %ZMM19,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPSLLQ $0x20,%ZMM0,%ZMM0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2-4 | 1 |
VPSRAQ $0x20,%ZMM0,%ZMM0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2-4 | 1 |
VPMOVSXDQ %YMM30,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPXOR %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM1,%ZMM20,%ZMM2 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM2,%ZMM2 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x58(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VGATHERQPD (%RAX,%ZMM2,8),%ZMM4{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VCMPPD $0x1,%ZMM4,%ZMM27,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPCMPEQD %YMM7,%YMM7,%YMM7 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VPADDD %YMM7,%YMM30,%YMM2 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPMOVSXDQ %YMM2,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPBLENDMQ %ZMM5,%ZMM1,%ZMM6{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM6,%ZMM22,%ZMM2 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM2,%ZMM3 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x8(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VGATHERQPD (%RAX,%ZMM3,8),%ZMM2{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPSUBD %YMM7,%YMM30,%YMM7 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.33 |
VPMINSD %YMM7,%YMM21,%YMM7 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPMOVSXDQ %YMM7,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVDQA64 %ZMM7,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VXORPD %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERDPD (%R12,%YMM30,8),%ZMM9{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VMOVDQA64 %ZMM5,%ZMM7{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VANDPD %ZMM28,%ZMM4,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VDIVPD %ZMM2,%ZMM10,%ZMM10 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VXORPD %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R12,%ZMM7,8),%ZMM2{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VFMADD213PD %ZMM9,%ZMM10,%ZMM9 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VDIVPD %ZMM2,%ZMM9,%ZMM2 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VPADDD 0x46de6(%RIP){1to8},%YMM30,%YMM9 | 1 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VPXOR %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM6,%ZMM23,%ZMM7 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM7,%ZMM7 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM11,%XMM11,%XMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%RDI,%ZMM7,8),%ZMM11{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPMOVSXDQ %YMM9,%ZMM8{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPXOR %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM8,%ZMM23,%ZMM9 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM9,%ZMM9 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM12,%XMM12,%XMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%RDI,%ZMM9,8),%ZMM12{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPBLENDMQ %ZMM1,%ZMM5,%ZMM9{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM5,%XMM5,%XMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM9,%ZMM23,%ZMM5 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM5,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM13,%XMM13,%XMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%RDI,%ZMM5,8),%ZMM13{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VBROADCASTSD 0x46d4f(%RIP),%ZMM5 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VSUBPD %ZMM10,%ZMM5,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VSUBPD %ZMM12,%ZMM11,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VSUBPD %ZMM11,%ZMM13,%ZMM13 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM12,%ZMM13,%ZMM14 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VCMPPD $0x1,%ZMM14,%ZMM27,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VCMPPD $0x1,%ZMM13,%ZMM27,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VSUBPD %ZMM10,%ZMM31,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VXORPD %ZMM29,%ZMM10,%ZMM14 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
VMOVAPD %ZMM10,%ZMM14{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VANDPD %ZMM28,%ZMM12,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VANDPD %ZMM28,%ZMM13,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMINPD %ZMM12,%ZMM10,%ZMM13 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM2,%ZMM10,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %ZMM12,%ZMM5,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM18,%ZMM10,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMINPD %ZMM10,%ZMM13,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %ZMM14,%ZMM10,%ZMM11{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM4,%ZMM11,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VPXOR %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM1,%ZMM24,%ZMM10 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM10,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x30(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VSCATTERQPD %ZMM4,(%RAX,%ZMM10,8){%K1} | 20 | 2.20 | 0.20 | 0 | 0 | 4 | 0.20 | 0.20 | 4 | 4 | 4 | 0.20 | 0 | 2-12 | 7 |
VPMULLQ %ZMM6,%ZMM25,%ZMM6 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM6,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM6,%XMM6,%XMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R15,%ZMM10,8),%ZMM6{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPMULLQ %ZMM8,%ZMM25,%ZMM8 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM8,%ZMM8 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R15,%ZMM8,8),%ZMM10{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPXOR %XMM8,%XMM8,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM9,%ZMM25,%ZMM8 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM8,%ZMM8 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R15,%ZMM8,8),%ZMM9{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VSUBPD %ZMM10,%ZMM6,%ZMM8 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VSUBPD %ZMM6,%ZMM9,%ZMM9 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM8,%ZMM9,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VCMPPD $0x1,%ZMM10,%ZMM27,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
KORTESTB %K1,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VXORPD %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
JE 41d7a0 <_Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7+0x250> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |