Function: Step10_orig | Module: exec | Source: Step10_orig.c:10-41 [...] | Coverage: 57.40% |
---|
Function: Step10_orig | Module: exec | Source: Step10_orig.c:10-41 [...] | Coverage: 57.40% |
---|
/home/eoseret/qaas_runs_CPU_9468/172-289-8348/intel/HACCmk/build/HACCmk/src/Step10_orig.c: 10 - 41 |
-------------------------------------------------------------------------------- |
10: { |
[...] |
19: for ( j = 0; j < count1; j++ ) |
20: { |
21: dxc = xx1[j] - xxi; |
22: dyc = yy1[j] - yyi; |
23: dzc = zz1[j] - zzi; |
24: |
25: r2 = dxc * dxc + dyc * dyc + dzc * dzc; |
26: |
27: m = ( r2 < fsrrmax2 ) ? mass1[j] : 0.0f; |
28: |
29: f = pow( r2 + mp_rsm2, -1.5 ) - ( ma0 + r2*(ma1 + r2*(ma2 + r2*(ma3 + r2*(ma4 + r2*ma5))))); |
30: |
31: f = ( r2 > 0.0f ) ? m * f : 0.0f; |
32: |
33: xi = xi + f * dxc; |
34: yi = yi + f * dyc; |
35: zi = zi + f * dzc; |
36: } |
37: |
38: *dxi = xi; |
39: *dyi = yi; |
40: *dzi = zi; |
41: } |
0x401b20 PUSH %RBP |
0x401b21 MOV %RSP,%RBP |
0x401b24 PUSH %RBX |
0x401b25 MOV 0x18(%RBP),%RAX |
0x401b29 MOV 0x10(%RBP),%R10 |
0x401b2d TEST %EDI,%EDI |
0x401b2f JLE 401d27 |
0x401b35 MOV %EDI,%R11D |
0x401b38 MOV $-0x8,%EDI |
0x401b3d AND %R11,%RDI |
0x401b40 JE 401d38 |
0x401b46 VBROADCASTSD 0x6711(%RIP),%YMM8 |
0x401b4f VBROADCASTSS 0x6710(%RIP),%YMM9 |
0x401b58 VBROADCASTSS 0x670b(%RIP),%YMM10 |
0x401b61 VBROADCASTSS 0x6706(%RIP),%YMM11 |
0x401b6a VBROADCASTSS 0x6701(%RIP),%YMM13 |
0x401b73 VBROADCASTSS 0x66fc(%RIP),%YMM14 |
0x401b7c VBROADCASTSS 0x66f7(%RIP),%YMM15 |
0x401b85 VBROADCASTSS %XMM0,%YMM6 |
0x401b8a VBROADCASTSS %XMM1,%YMM5 |
0x401b8f VBROADCASTSS %XMM3,%YMM1 |
0x401b94 VBROADCASTSS %XMM4,%YMM0 |
0x401b99 VBROADCASTSS %XMM2,%YMM2 |
0x401b9e VXORPS %XMM3,%XMM3,%XMM3 |
0x401ba2 VXORPS %XMM12,%XMM12,%XMM12 |
0x401ba7 VXORPS %XMM7,%XMM7,%XMM7 |
0x401bab VXORPS %XMM4,%XMM4,%XMM4 |
0x401baf XOR %EBX,%EBX |
0x401bb1 NOPW %CS:(%RAX,%RAX,1) |
(5) 0x401bc0 VMOVUPS (%RSI,%RBX,4),%YMM16 |
(5) 0x401bc7 VMOVUPS (%RDX,%RBX,4),%YMM17 |
(5) 0x401bce VMOVUPS (%RCX,%RBX,4),%YMM18 |
(5) 0x401bd5 VMOVAPS %YMM9,%YMM25 |
(5) 0x401bdb VSUBPS %YMM6,%YMM16,%YMM16 |
(5) 0x401be1 VSUBPS %YMM5,%YMM17,%YMM17 |
(5) 0x401be7 VSUBPS %YMM2,%YMM18,%YMM18 |
(5) 0x401bed VMULPS %YMM16,%YMM16,%YMM19 |
(5) 0x401bf3 VFMADD231PS %YMM17,%YMM17,%YMM19 |
(5) 0x401bf9 VFMADD231PS %YMM18,%YMM18,%YMM19 |
(5) 0x401bff VADDPS %YMM0,%YMM19,%YMM21 |
(5) 0x401c05 VFMADD213PS %YMM10,%YMM19,%YMM25 |
(5) 0x401c0b VCMPPS $0x1,%YMM1,%YMM19,%K1 |
(5) 0x401c12 VMOVUPS (%R8,%RBX,4),%YMM20{%K1}{z} |
(5) 0x401c19 VCMPPS $0x1,%YMM19,%YMM3,%K1 |
(5) 0x401c20 ADD $0x8,%RBX |
(5) 0x401c24 VEXTRACTF32X4 $0x1,%YMM21,%XMM22 |
(5) 0x401c2b VCVTPS2PD %XMM21,%YMM21 |
(5) 0x401c31 VFMADD213PS %YMM11,%YMM19,%YMM25 |
(5) 0x401c37 VCVTPS2PD %XMM22,%YMM22 |
(5) 0x401c3d VSQRTPD %YMM21,%YMM23 |
(5) 0x401c43 VMULPD %YMM21,%YMM21,%YMM21 |
(5) 0x401c49 VFMADD213PS %YMM13,%YMM19,%YMM25 |
(5) 0x401c4f VSQRTPD %YMM22,%YMM24 |
(5) 0x401c55 VMULPD %YMM22,%YMM22,%YMM22 |
(5) 0x401c5b VDIVPD %YMM21,%YMM8,%YMM21 |
(5) 0x401c61 VFMADD213PS %YMM14,%YMM19,%YMM25 |
(5) 0x401c67 VDIVPD %YMM22,%YMM8,%YMM22 |
(5) 0x401c6d VFMADD213PS %YMM15,%YMM19,%YMM25 |
(5) 0x401c73 VCVTPS2PD %XMM25,%YMM26 |
(5) 0x401c79 VFMADD231PD %YMM21,%YMM23,%YMM26 |
(5) 0x401c7f VEXTRACTF32X4 $0x1,%YMM25,%XMM21 |
(5) 0x401c86 VCVTPS2PD %XMM21,%YMM21 |
(5) 0x401c8c VFMADD231PD %YMM22,%YMM24,%YMM21 |
(5) 0x401c92 VCVTPD2PS %YMM26,%XMM22 |
(5) 0x401c98 VCVTPD2PS %YMM21,%XMM21 |
(5) 0x401c9e VINSERTF32X4 $0x1,%XMM21,%YMM22,%YMM21 |
(5) 0x401ca5 VMULPS %YMM21,%YMM20,%YMM19{%K1}{z} |
(5) 0x401cab VFMADD231PS %YMM16,%YMM19,%YMM12 |
(5) 0x401cb1 VFMADD231PS %YMM17,%YMM19,%YMM7 |
(5) 0x401cb7 VFMADD231PS %YMM18,%YMM19,%YMM4 |
(5) 0x401cbd CMP %RDI,%RBX |
(5) 0x401cc0 JB 401bc0 |
0x401cc6 VEXTRACTF128 $0x1,%YMM12,%XMM3 |
0x401ccc VADDPS %XMM3,%XMM12,%XMM3 |
0x401cd0 VSHUFPD $0x1,%XMM3,%XMM3,%XMM8 |
0x401cd5 VADDPS %XMM3,%XMM8,%XMM3 |
0x401cd9 VMOVSHDUP %XMM3,%XMM8 |
0x401cdd VADDSS %XMM3,%XMM8,%XMM3 |
0x401ce1 VEXTRACTF128 $0x1,%YMM7,%XMM8 |
0x401ce7 VADDPS %XMM7,%XMM8,%XMM7 |
0x401ceb VSHUFPD $0x1,%XMM7,%XMM7,%XMM8 |
0x401cf0 VADDPS %XMM7,%XMM8,%XMM7 |
0x401cf4 VMOVSHDUP %XMM7,%XMM8 |
0x401cf8 VADDSS %XMM7,%XMM8,%XMM7 |
0x401cfc VEXTRACTF128 $0x1,%YMM4,%XMM8 |
0x401d02 VADDPS %XMM4,%XMM8,%XMM4 |
0x401d06 VSHUFPD $0x1,%XMM4,%XMM4,%XMM8 |
0x401d0b VADDPS %XMM4,%XMM8,%XMM4 |
0x401d0f VMOVSHDUP %XMM4,%XMM8 |
0x401d13 VADDSS %XMM4,%XMM8,%XMM4 |
0x401d17 VPBROADCASTQ %R11,%YMM8 |
0x401d1d CMP %R11,%RDI |
0x401d20 JNE 401d65 |
0x401d22 JMP 401eed |
0x401d27 VXORPS %XMM3,%XMM3,%XMM3 |
0x401d2b VXORPS %XMM7,%XMM7,%XMM7 |
0x401d2f VXORPS %XMM4,%XMM4,%XMM4 |
0x401d33 JMP 401eed |
0x401d38 VBROADCASTSS %XMM0,%YMM6 |
0x401d3d VBROADCASTSS %XMM1,%YMM5 |
0x401d42 VBROADCASTSS %XMM3,%YMM1 |
0x401d47 VBROADCASTSS %XMM4,%YMM0 |
0x401d4c VBROADCASTSS %XMM2,%YMM2 |
0x401d51 VPBROADCASTQ %R11,%YMM8 |
0x401d57 VXORPS %XMM4,%XMM4,%XMM4 |
0x401d5b VXORPS %XMM7,%XMM7,%XMM7 |
0x401d5f VXORPS %XMM3,%XMM3,%XMM3 |
0x401d63 XOR %EDI,%EDI |
0x401d65 VPBROADCASTQ %RDI,%YMM9 |
0x401d6b VBROADCASTSD 0x64ec(%RIP),%YMM13 |
0x401d74 VPSUBQ %YMM9,%YMM8,%YMM10 |
0x401d79 VPCMPNLEUQ 0x651c(%RIP),%YMM10,%K1 |
0x401d84 VPSUBQ %YMM9,%YMM8,%YMM8 |
0x401d89 VPCMPNLEUQ 0x64ec(%RIP),%YMM8,%K0 |
0x401d94 VPXOR %XMM9,%XMM9,%XMM9 |
0x401d99 KSHIFTLB $0x4,%K1,%K1 |
0x401d9f KORB %K1,%K0,%K1 |
0x401da3 VMOVUPS (%RSI,%RDI,4),%YMM8{%K1}{z} |
0x401daa VMOVUPS (%RDX,%RDI,4),%YMM12{%K1}{z} |
0x401db1 VMOVUPS (%RCX,%RDI,4),%YMM11{%K1}{z} |
0x401db8 VSUBPS %YMM6,%YMM8,%YMM6 |
0x401dbc VSUBPS %YMM5,%YMM12,%YMM5 |
0x401dc0 VSUBPS %YMM2,%YMM11,%YMM2 |
0x401dc4 VMULPS %YMM6,%YMM6,%YMM15 |
0x401dc8 VFMADD231PS %YMM5,%YMM5,%YMM15 |
0x401dcd VFMADD231PS %YMM2,%YMM2,%YMM15 |
0x401dd2 VADDPS %YMM0,%YMM15,%YMM0 |
0x401dd6 VCMPPS $0x1,%YMM1,%YMM15,%K2{%K1} |
0x401ddd VMOVUPS (%R8,%RDI,4),%YMM1{%K2}{z} |
0x401de4 VCMPPS $0x1,%YMM15,%YMM9,%K2 |
0x401deb VEXTRACTF128 $0x1,%YMM0,%XMM10 |
0x401df1 VCVTPS2PD %XMM0,%YMM0 |
0x401df5 VCVTPS2PD %XMM10,%YMM10 |
0x401dfa VSQRTPD %YMM0,%YMM11 |
0x401dfe VMULPD %YMM0,%YMM0,%YMM0 |
0x401e02 VSQRTPD %YMM10,%YMM12 |
0x401e07 VMULPD %YMM10,%YMM10,%YMM10 |
0x401e0c VDIVPD %YMM0,%YMM13,%YMM0 |
0x401e10 VDIVPD %YMM10,%YMM13,%YMM10 |
0x401e15 VBROADCASTSS 0x644a(%RIP),%YMM13 |
0x401e1e VFMADD213PS 0x6444(%RIP){1to8},%YMM15,%YMM13 |
0x401e28 VFMADD213PS 0x643e(%RIP){1to8},%YMM15,%YMM13 |
0x401e32 VFMADD213PS 0x6438(%RIP){1to8},%YMM15,%YMM13 |
0x401e3c VFMADD213PS 0x6432(%RIP){1to8},%YMM15,%YMM13 |
0x401e46 VFMADD213PS 0x642c(%RIP){1to8},%YMM15,%YMM13 |
0x401e50 VCVTPS2PD %XMM13,%YMM14 |
0x401e55 VFMADD231PD %YMM0,%YMM11,%YMM14 |
0x401e5a VEXTRACTF128 $0x1,%YMM13,%XMM0 |
0x401e60 VCVTPS2PD %XMM0,%YMM0 |
0x401e64 VCVTPD2PS %YMM14,%XMM11 |
0x401e69 VFMADD231PD %YMM10,%YMM12,%YMM0 |
0x401e6e VCVTPD2PS %YMM0,%XMM0 |
0x401e72 VINSERTF128 $0x1,%XMM0,%YMM11,%YMM0 |
0x401e78 VMULPS %YMM0,%YMM1,%YMM0{%K2}{z} |
0x401e7e VMULPS %YMM6,%YMM0,%YMM1{%K1}{z} |
0x401e84 VMULPS %YMM5,%YMM0,%YMM5{%K1}{z} |
0x401e8a VMULPS %YMM2,%YMM0,%YMM0{%K1}{z} |
0x401e90 VEXTRACTF128 $0x1,%YMM1,%XMM2 |
0x401e96 VADDPS %XMM2,%XMM1,%XMM1 |
0x401e9a VSHUFPD $0x1,%XMM1,%XMM1,%XMM2 |
0x401e9f VADDPS %XMM2,%XMM1,%XMM1 |
0x401ea3 VMOVSHDUP %XMM1,%XMM2 |
0x401ea7 VADDSS %XMM2,%XMM1,%XMM1 |
0x401eab VADDSS %XMM1,%XMM3,%XMM3 |
0x401eaf VEXTRACTF128 $0x1,%YMM5,%XMM1 |
0x401eb5 VADDPS %XMM1,%XMM5,%XMM1 |
0x401eb9 VSHUFPD $0x1,%XMM1,%XMM1,%XMM2 |
0x401ebe VADDPS %XMM2,%XMM1,%XMM1 |
0x401ec2 VMOVSHDUP %XMM1,%XMM2 |
0x401ec6 VADDSS %XMM2,%XMM1,%XMM1 |
0x401eca VADDSS %XMM1,%XMM7,%XMM7 |
0x401ece VEXTRACTF128 $0x1,%YMM0,%XMM1 |
0x401ed4 VADDPS %XMM1,%XMM0,%XMM0 |
0x401ed8 VSHUFPD $0x1,%XMM0,%XMM0,%XMM1 |
0x401edd VADDPS %XMM1,%XMM0,%XMM0 |
0x401ee1 VMOVSHDUP %XMM0,%XMM1 |
0x401ee5 VADDSS %XMM1,%XMM0,%XMM0 |
0x401ee9 VADDSS %XMM0,%XMM4,%XMM4 |
0x401eed VMOVSS %XMM3,(%R9) |
0x401ef2 VMOVSS %XMM7,(%R10) |
0x401ef7 VMOVSS %XMM4,(%RAX) |
0x401efb POP %RBX |
0x401efc POP %RBP |
0x401efd VZEROUPPER |
0x401f00 RET |
0x401f01 NOPL (%RAX) |
0x401f04 NOPW %CS:(%RAX,%RAX,1) |
0x401f0e XCHG %AX,%AX |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►99.99+ | main.extracted.8 | main.c:142 | exec |
○ | __kmp_invoke_microtask | libiomp5.so | |
○ | __kmp_invoke_task_func | libiomp5.so |
Path / |
Source file and lines | Step10_orig.c:10-41 |
Module | exec |
nb instructions | 145 |
nb uops | 148 |
loop length | 746 |
used x86 registers | 12 |
used mmx registers | 0 |
used xmm registers | 13 |
used ymm registers | 15 |
used zmm registers | 0 |
nb stack references | 2 |
ADD-SUB / MUL ratio | 3.57 |
micro-operation queue | 24.67 cycles |
front end | 24.67 cycles |
ALU0/BRU0 | ALU1 | ALU2 | ALU3 | BRU1 | AGU0 | AGU1 | AGU2 | FP0 | FP1 | FP2 | FP3 | FP4 | FP5 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 3.00 | 1.50 | 1.25 | 1.25 | 3.00 | 7.67 | 7.67 | 7.67 | 24.25 | 24.25 | 24.33 | 24.17 | 1.50 | 1.50 |
cycles | 3.00 | 1.50 | 1.25 | 1.25 | 3.00 | 7.67 | 7.67 | 7.67 | 24.25 | 24.25 | 24.33 | 24.17 | 1.50 | 1.50 |
Cycles executing div or sqrt instructions | 27.00 |
Front-end | 24.67 |
Dispatch | 24.33 |
DIV/SQRT | 27.00 |
Overall L1 | 27.00 |
all | 54% |
load | 100% |
store | NA (no store vectorizable/vectorized instructions) |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 44% |
all | 71% |
load | 50% |
store | 0% |
mul | 100% |
add-sub | 64% |
fma | 100% |
div/sqrt | 100% |
other | 67% |
all | 70% |
load | 55% |
store | 0% |
mul | 100% |
add-sub | 66% |
fma | 100% |
div/sqrt | 100% |
other | 64% |
all | 27% |
load | 50% |
store | NA (no store vectorizable/vectorized instructions) |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 50% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 22% |
all | 26% |
load | 28% |
store | 6% |
mul | 50% |
add-sub | 22% |
fma | 50% |
div/sqrt | 50% |
other | 19% |
all | 26% |
load | 30% |
store | 6% |
mul | 50% |
add-sub | 24% |
fma | 50% |
div/sqrt | 50% |
other | 19% |
Instruction | Nb FU | ALU0/BRU0 | ALU1 | ALU2 | ALU3 | BRU1 | AGU0 | AGU1 | AGU2 | FP0 | FP1 | FP2 | FP3 | FP4 | FP5 | Latency | Recip. throughput | Vectorization |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
PUSH %RBP | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 | N/A |
MOV %RSP,%RBP | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | N/A |
PUSH %RBX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 | N/A |
MOV 0x18(%RBP),%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
MOV 0x10(%RBP),%R10 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
TEST %EDI,%EDI | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | scal (6.3%) |
JLE 401d27 <Step10_orig+0x207> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50-1 | N/A |
MOV %EDI,%R11D | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | N/A |
MOV $-0x8,%EDI | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
AND %R11,%RDI | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
JE 401d38 <Step10_orig+0x218> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50-1 | N/A |
VBROADCASTSD 0x6711(%RIP),%YMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VBROADCASTSS 0x6710(%RIP),%YMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (6.3%) |
VBROADCASTSS 0x670b(%RIP),%YMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (6.3%) |
VBROADCASTSS 0x6706(%RIP),%YMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (6.3%) |
VBROADCASTSS 0x6701(%RIP),%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (6.3%) |
VBROADCASTSS 0x66fc(%RIP),%YMM14 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (6.3%) |
VBROADCASTSS 0x66f7(%RIP),%YMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM0,%YMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM1,%YMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM3,%YMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM4,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM2,%YMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VXORPS %XMM3,%XMM3,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VXORPS %XMM12,%XMM12,%XMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VXORPS %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VXORPS %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
XOR %EBX,%EBX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | scal (6.3%) |
NOPW %CS:(%RAX,%RAX,1) | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09 | N/A |
VEXTRACTF128 $0x1,%YMM12,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VADDPS %XMM3,%XMM12,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VSHUFPD $0x1,%XMM3,%XMM3,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (25.0%) |
VADDPS %XMM3,%XMM8,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VMOVSHDUP %XMM3,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (12.5%) |
VADDSS %XMM3,%XMM8,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VEXTRACTF128 $0x1,%YMM7,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VADDPS %XMM7,%XMM8,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VSHUFPD $0x1,%XMM7,%XMM7,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (25.0%) |
VADDPS %XMM7,%XMM8,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VMOVSHDUP %XMM7,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (12.5%) |
VADDSS %XMM7,%XMM8,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VEXTRACTF128 $0x1,%YMM4,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VADDPS %XMM4,%XMM8,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VSHUFPD $0x1,%XMM4,%XMM4,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (25.0%) |
VADDPS %XMM4,%XMM8,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VMOVSHDUP %XMM4,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (12.5%) |
VADDSS %XMM4,%XMM8,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VPBROADCASTQ %R11,%YMM8 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | scal (12.5%) |
CMP %R11,%RDI | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
JNE 401d65 <Step10_orig+0x245> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50-1 | N/A |
JMP 401eed <Step10_orig+0x3cd> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | N/A |
VXORPS %XMM3,%XMM3,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VXORPS %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VXORPS %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
JMP 401eed <Step10_orig+0x3cd> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | N/A |
VBROADCASTSS %XMM0,%YMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM1,%YMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM3,%YMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM4,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM2,%YMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VPBROADCASTQ %R11,%YMM8 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | scal (12.5%) |
VXORPS %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VXORPS %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VXORPS %XMM3,%XMM3,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
XOR %EDI,%EDI | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | N/A |
VPBROADCASTQ %RDI,%YMM9 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | scal (12.5%) |
VBROADCASTSD 0x64ec(%RIP),%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VPSUBQ %YMM9,%YMM8,%YMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (50.0%) |
VPCMPNLEUQ 0x651c(%RIP),%YMM10,%K1 | vect (50.0%) | |||||||||||||||||
VPSUBQ %YMM9,%YMM8,%YMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (50.0%) |
VPCMPNLEUQ 0x64ec(%RIP),%YMM8,%K0 | vect (50.0%) | |||||||||||||||||
VPXOR %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | vect (25.0%) |
KSHIFTLB $0x4,%K1,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
KORB %K1,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VMOVUPS (%RSI,%RDI,4),%YMM8{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VMOVUPS (%RDX,%RDI,4),%YMM12{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VMOVUPS (%RCX,%RDI,4),%YMM11{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VSUBPS %YMM6,%YMM8,%YMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VSUBPS %YMM5,%YMM12,%YMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VSUBPS %YMM2,%YMM11,%YMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VMULPS %YMM6,%YMM6,%YMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VFMADD231PS %YMM5,%YMM5,%YMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VFMADD231PS %YMM2,%YMM2,%YMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VADDPS %YMM0,%YMM15,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VCMPPS $0x1,%YMM1,%YMM15,%K2{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VMOVUPS (%R8,%RDI,4),%YMM1{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VCMPPS $0x1,%YMM15,%YMM9,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VEXTRACTF128 $0x1,%YMM0,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VCVTPS2PD %XMM0,%YMM0 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 1 | 0.50 | 0 | 0 | 4 | 0.67 | vect (25.0%) |
VCVTPS2PD %XMM10,%YMM10 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 1 | 0.50 | 0 | 0 | 4 | 0.67 | vect (25.0%) |
VSQRTPD %YMM0,%YMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 21 | 8.50 | vect (50.0%) |
VMULPD %YMM0,%YMM0,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VSQRTPD %YMM10,%YMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 21 | 8.50 | vect (50.0%) |
VMULPD %YMM10,%YMM10,%YMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VDIVPD %YMM0,%YMM13,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 13 | 5 | vect (50.0%) |
VDIVPD %YMM10,%YMM13,%YMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 13 | 5 | vect (50.0%) |
VBROADCASTSS 0x644a(%RIP),%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (6.3%) |
VFMADD213PS 0x6444(%RIP){1to8},%YMM15,%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VFMADD213PS 0x643e(%RIP){1to8},%YMM15,%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VFMADD213PS 0x6438(%RIP){1to8},%YMM15,%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VFMADD213PS 0x6432(%RIP){1to8},%YMM15,%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VFMADD213PS 0x642c(%RIP){1to8},%YMM15,%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VCVTPS2PD %XMM13,%YMM14 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 1 | 0.50 | 0 | 0 | 4 | 0.67 | vect (25.0%) |
VFMADD231PD %YMM0,%YMM11,%YMM14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VEXTRACTF128 $0x1,%YMM13,%XMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VCVTPS2PD %XMM0,%YMM0 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 1 | 0.50 | 0 | 0 | 4 | 0.67 | vect (25.0%) |
VCVTPD2PS %YMM14,%XMM11 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 1 | 0.50 | 0 | 0 | 6 | 0.67 | vect (50.0%) |
VFMADD231PD %YMM10,%YMM12,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VCVTPD2PS %YMM0,%XMM0 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 1 | 0.50 | 0 | 0 | 6 | 0.67 | vect (50.0%) |
VINSERTF128 $0x1,%XMM0,%YMM11,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | vect (25.0%) |
VMULPS %YMM0,%YMM1,%YMM0{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VMULPS %YMM6,%YMM0,%YMM1{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VMULPS %YMM5,%YMM0,%YMM5{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VMULPS %YMM2,%YMM0,%YMM0{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VEXTRACTF128 $0x1,%YMM1,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VADDPS %XMM2,%XMM1,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VSHUFPD $0x1,%XMM1,%XMM1,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (25.0%) |
VADDPS %XMM2,%XMM1,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VMOVSHDUP %XMM1,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (12.5%) |
VADDSS %XMM2,%XMM1,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VADDSS %XMM1,%XMM3,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VEXTRACTF128 $0x1,%YMM5,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VADDPS %XMM1,%XMM5,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VSHUFPD $0x1,%XMM1,%XMM1,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (25.0%) |
VADDPS %XMM2,%XMM1,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VMOVSHDUP %XMM1,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (12.5%) |
VADDSS %XMM2,%XMM1,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VADDSS %XMM1,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VEXTRACTF128 $0x1,%YMM0,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VADDPS %XMM1,%XMM0,%XMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VSHUFPD $0x1,%XMM0,%XMM0,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (25.0%) |
VADDPS %XMM1,%XMM0,%XMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VMOVSHDUP %XMM0,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (12.5%) |
VADDSS %XMM1,%XMM0,%XMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VADDSS %XMM0,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VMOVSS %XMM3,(%R9) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 1 | 1 | scal (6.3%) |
VMOVSS %XMM7,(%R10) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 1 | 1 | scal (6.3%) |
VMOVSS %XMM4,(%RAX) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 1 | 1 | scal (6.3%) |
POP %RBX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 | N/A |
POP %RBP | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 | N/A |
VZEROUPPER | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | vect (25.0%) |
RET | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 | N/A |
NOPL (%RAX) | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09 | N/A |
NOPW %CS:(%RAX,%RAX,1) | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09 | N/A |
XCHG %AX,%AX | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09 | N/A |
Source file and lines | Step10_orig.c:10-41 |
Module | exec |
nb instructions | 145 |
nb uops | 148 |
loop length | 746 |
used x86 registers | 12 |
used mmx registers | 0 |
used xmm registers | 13 |
used ymm registers | 15 |
used zmm registers | 0 |
nb stack references | 2 |
ADD-SUB / MUL ratio | 3.57 |
micro-operation queue | 24.67 cycles |
front end | 24.67 cycles |
ALU0/BRU0 | ALU1 | ALU2 | ALU3 | BRU1 | AGU0 | AGU1 | AGU2 | FP0 | FP1 | FP2 | FP3 | FP4 | FP5 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 3.00 | 1.50 | 1.25 | 1.25 | 3.00 | 7.67 | 7.67 | 7.67 | 24.25 | 24.25 | 24.33 | 24.17 | 1.50 | 1.50 |
cycles | 3.00 | 1.50 | 1.25 | 1.25 | 3.00 | 7.67 | 7.67 | 7.67 | 24.25 | 24.25 | 24.33 | 24.17 | 1.50 | 1.50 |
Cycles executing div or sqrt instructions | 27.00 |
Front-end | 24.67 |
Dispatch | 24.33 |
DIV/SQRT | 27.00 |
Overall L1 | 27.00 |
all | 54% |
load | 100% |
store | NA (no store vectorizable/vectorized instructions) |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 44% |
all | 71% |
load | 50% |
store | 0% |
mul | 100% |
add-sub | 64% |
fma | 100% |
div/sqrt | 100% |
other | 67% |
all | 70% |
load | 55% |
store | 0% |
mul | 100% |
add-sub | 66% |
fma | 100% |
div/sqrt | 100% |
other | 64% |
all | 27% |
load | 50% |
store | NA (no store vectorizable/vectorized instructions) |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 50% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 22% |
all | 26% |
load | 28% |
store | 6% |
mul | 50% |
add-sub | 22% |
fma | 50% |
div/sqrt | 50% |
other | 19% |
all | 26% |
load | 30% |
store | 6% |
mul | 50% |
add-sub | 24% |
fma | 50% |
div/sqrt | 50% |
other | 19% |
Instruction | Nb FU | ALU0/BRU0 | ALU1 | ALU2 | ALU3 | BRU1 | AGU0 | AGU1 | AGU2 | FP0 | FP1 | FP2 | FP3 | FP4 | FP5 | Latency | Recip. throughput | Vectorization |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
PUSH %RBP | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 | N/A |
MOV %RSP,%RBP | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | N/A |
PUSH %RBX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 | N/A |
MOV 0x18(%RBP),%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
MOV 0x10(%RBP),%R10 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
TEST %EDI,%EDI | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | scal (6.3%) |
JLE 401d27 <Step10_orig+0x207> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50-1 | N/A |
MOV %EDI,%R11D | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | N/A |
MOV $-0x8,%EDI | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
AND %R11,%RDI | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
JE 401d38 <Step10_orig+0x218> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50-1 | N/A |
VBROADCASTSD 0x6711(%RIP),%YMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VBROADCASTSS 0x6710(%RIP),%YMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (6.3%) |
VBROADCASTSS 0x670b(%RIP),%YMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (6.3%) |
VBROADCASTSS 0x6706(%RIP),%YMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (6.3%) |
VBROADCASTSS 0x6701(%RIP),%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (6.3%) |
VBROADCASTSS 0x66fc(%RIP),%YMM14 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (6.3%) |
VBROADCASTSS 0x66f7(%RIP),%YMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM0,%YMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM1,%YMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM3,%YMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM4,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM2,%YMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VXORPS %XMM3,%XMM3,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VXORPS %XMM12,%XMM12,%XMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VXORPS %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VXORPS %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
XOR %EBX,%EBX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | scal (6.3%) |
NOPW %CS:(%RAX,%RAX,1) | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09 | N/A |
VEXTRACTF128 $0x1,%YMM12,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VADDPS %XMM3,%XMM12,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VSHUFPD $0x1,%XMM3,%XMM3,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (25.0%) |
VADDPS %XMM3,%XMM8,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VMOVSHDUP %XMM3,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (12.5%) |
VADDSS %XMM3,%XMM8,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VEXTRACTF128 $0x1,%YMM7,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VADDPS %XMM7,%XMM8,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VSHUFPD $0x1,%XMM7,%XMM7,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (25.0%) |
VADDPS %XMM7,%XMM8,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VMOVSHDUP %XMM7,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (12.5%) |
VADDSS %XMM7,%XMM8,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VEXTRACTF128 $0x1,%YMM4,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VADDPS %XMM4,%XMM8,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VSHUFPD $0x1,%XMM4,%XMM4,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (25.0%) |
VADDPS %XMM4,%XMM8,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VMOVSHDUP %XMM4,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (12.5%) |
VADDSS %XMM4,%XMM8,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VPBROADCASTQ %R11,%YMM8 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | scal (12.5%) |
CMP %R11,%RDI | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
JNE 401d65 <Step10_orig+0x245> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50-1 | N/A |
JMP 401eed <Step10_orig+0x3cd> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | N/A |
VXORPS %XMM3,%XMM3,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VXORPS %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VXORPS %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
JMP 401eed <Step10_orig+0x3cd> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | N/A |
VBROADCASTSS %XMM0,%YMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM1,%YMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM3,%YMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM4,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VBROADCASTSS %XMM2,%YMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | scal (6.3%) |
VPBROADCASTQ %R11,%YMM8 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | scal (12.5%) |
VXORPS %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VXORPS %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VXORPS %XMM3,%XMM3,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
XOR %EDI,%EDI | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | N/A |
VPBROADCASTQ %RDI,%YMM9 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | scal (12.5%) |
VBROADCASTSD 0x64ec(%RIP),%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VPSUBQ %YMM9,%YMM8,%YMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (50.0%) |
VPCMPNLEUQ 0x651c(%RIP),%YMM10,%K1 | vect (50.0%) | |||||||||||||||||
VPSUBQ %YMM9,%YMM8,%YMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (50.0%) |
VPCMPNLEUQ 0x64ec(%RIP),%YMM8,%K0 | vect (50.0%) | |||||||||||||||||
VPXOR %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | vect (25.0%) |
KSHIFTLB $0x4,%K1,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
KORB %K1,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VMOVUPS (%RSI,%RDI,4),%YMM8{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VMOVUPS (%RDX,%RDI,4),%YMM12{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VMOVUPS (%RCX,%RDI,4),%YMM11{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VSUBPS %YMM6,%YMM8,%YMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VSUBPS %YMM5,%YMM12,%YMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VSUBPS %YMM2,%YMM11,%YMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VMULPS %YMM6,%YMM6,%YMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VFMADD231PS %YMM5,%YMM5,%YMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VFMADD231PS %YMM2,%YMM2,%YMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VADDPS %YMM0,%YMM15,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VCMPPS $0x1,%YMM1,%YMM15,%K2{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VMOVUPS (%R8,%RDI,4),%YMM1{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VCMPPS $0x1,%YMM15,%YMM9,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VEXTRACTF128 $0x1,%YMM0,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VCVTPS2PD %XMM0,%YMM0 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 1 | 0.50 | 0 | 0 | 4 | 0.67 | vect (25.0%) |
VCVTPS2PD %XMM10,%YMM10 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 1 | 0.50 | 0 | 0 | 4 | 0.67 | vect (25.0%) |
VSQRTPD %YMM0,%YMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 21 | 8.50 | vect (50.0%) |
VMULPD %YMM0,%YMM0,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VSQRTPD %YMM10,%YMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 21 | 8.50 | vect (50.0%) |
VMULPD %YMM10,%YMM10,%YMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VDIVPD %YMM0,%YMM13,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 13 | 5 | vect (50.0%) |
VDIVPD %YMM10,%YMM13,%YMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 13 | 5 | vect (50.0%) |
VBROADCASTSS 0x644a(%RIP),%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (6.3%) |
VFMADD213PS 0x6444(%RIP){1to8},%YMM15,%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VFMADD213PS 0x643e(%RIP){1to8},%YMM15,%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VFMADD213PS 0x6438(%RIP){1to8},%YMM15,%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VFMADD213PS 0x6432(%RIP){1to8},%YMM15,%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VFMADD213PS 0x642c(%RIP){1to8},%YMM15,%YMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VCVTPS2PD %XMM13,%YMM14 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 1 | 0.50 | 0 | 0 | 4 | 0.67 | vect (25.0%) |
VFMADD231PD %YMM0,%YMM11,%YMM14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VEXTRACTF128 $0x1,%YMM13,%XMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VCVTPS2PD %XMM0,%YMM0 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 1 | 0.50 | 0 | 0 | 4 | 0.67 | vect (25.0%) |
VCVTPD2PS %YMM14,%XMM11 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 1 | 0.50 | 0 | 0 | 6 | 0.67 | vect (50.0%) |
VFMADD231PD %YMM10,%YMM12,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 0.50 | vect (50.0%) |
VCVTPD2PS %YMM0,%XMM0 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 1 | 0.50 | 0 | 0 | 6 | 0.67 | vect (50.0%) |
VINSERTF128 $0x1,%XMM0,%YMM11,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 1 | 0.50 | vect (25.0%) |
VMULPS %YMM0,%YMM1,%YMM0{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VMULPS %YMM6,%YMM0,%YMM1{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VMULPS %YMM5,%YMM0,%YMM5{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VMULPS %YMM2,%YMM0,%YMM0{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 0.50 | vect (50.0%) |
VEXTRACTF128 $0x1,%YMM1,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VADDPS %XMM2,%XMM1,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VSHUFPD $0x1,%XMM1,%XMM1,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (25.0%) |
VADDPS %XMM2,%XMM1,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VMOVSHDUP %XMM1,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (12.5%) |
VADDSS %XMM2,%XMM1,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VADDSS %XMM1,%XMM3,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VEXTRACTF128 $0x1,%YMM5,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VADDPS %XMM1,%XMM5,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VSHUFPD $0x1,%XMM1,%XMM1,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (25.0%) |
VADDPS %XMM2,%XMM1,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VMOVSHDUP %XMM1,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (12.5%) |
VADDSS %XMM2,%XMM1,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VADDSS %XMM1,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VEXTRACTF128 $0x1,%YMM0,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.25 | vect (25.0%) |
VADDPS %XMM1,%XMM0,%XMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VSHUFPD $0x1,%XMM0,%XMM0,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (25.0%) |
VADDPS %XMM1,%XMM0,%XMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | vect (25.0%) |
VMOVSHDUP %XMM0,%XMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 1 | 0.33 | vect (12.5%) |
VADDSS %XMM1,%XMM0,%XMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VADDSS %XMM0,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 0.50 | scal (6.3%) |
VMOVSS %XMM3,(%R9) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 1 | 1 | scal (6.3%) |
VMOVSS %XMM7,(%R10) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 1 | 1 | scal (6.3%) |
VMOVSS %XMM4,(%RAX) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 1 | 1 | scal (6.3%) |
POP %RBX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 | N/A |
POP %RBP | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 | N/A |
VZEROUPPER | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | vect (25.0%) |
RET | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 | N/A |
NOPL (%RAX) | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09 | N/A |
NOPW %CS:(%RAX,%RAX,1) | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09 | N/A |
XCHG %AX,%AX | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09 | N/A |
Name | Coverage (%) | Time (s) |
---|---|---|
▼Step10_orig– | 57.40 | 4.53 |
○Loop 5 - Step10_orig.c:19-35 - exec | 57.38 | 4.41 |