Loop Id: 16801 | Module: libgromacs_mpi.so.9.0.0 | Source: bonded.cpp:2066-2126 [...] | Coverage: 0.32% |
---|
Loop Id: 16801 | Module: libgromacs_mpi.so.9.0.0 | Source: bonded.cpp:2066-2126 [...] | Coverage: 0.32% |
---|
0xbc7610 VMOVDQA64 0xe00(%RSP),%ZMM0 |
0xbc7618 VMOVDQA64 0x940(%RSP),%ZMM11 |
0xbc7620 VPMULLD %ZMM11,%ZMM0,%ZMM1 |
0xbc7626 VXORPS %XMM2,%XMM2,%XMM2 |
0xbc762a KXNORW %K0,%K0,%K1 |
0xbc762e MOV 0x60(%RSP),%RAX |
0xbc7633 VGATHERDPS (%RAX,%ZMM1,4),%ZMM2{%K1} |
0xbc763a VXORPS %XMM3,%XMM3,%XMM3 |
0xbc763e KXNORW %K0,%K0,%K1 |
0xbc7642 MOV 0x70(%RSP),%RCX |
0xbc7647 VGATHERDPS (%RCX,%ZMM1,4),%ZMM3{%K1} |
0xbc764e VXORPS %XMM4,%XMM4,%XMM4 |
0xbc7652 KXNORW %K0,%K0,%K1 |
0xbc7656 MOV 0x68(%RSP),%RDX |
0xbc765b VGATHERDPS (%RDX,%ZMM1,4),%ZMM4{%K1} |
0xbc7662 VPMULLD 0x100(%RSP),%ZMM11,%ZMM1 |
0xbc766a VXORPS %XMM5,%XMM5,%XMM5 |
0xbc766e KXNORW %K0,%K0,%K1 |
0xbc7672 VGATHERDPS (%RAX,%ZMM1,4),%ZMM5{%K1} |
0xbc7679 VXORPS %XMM6,%XMM6,%XMM6 |
0xbc767d KXNORW %K0,%K0,%K1 |
0xbc7681 VGATHERDPS (%RCX,%ZMM1,4),%ZMM6{%K1} |
0xbc7688 VXORPS %XMM7,%XMM7,%XMM7 |
0xbc768c KXNORW %K0,%K0,%K1 |
0xbc7690 VGATHERDPS (%RDX,%ZMM1,4),%ZMM7{%K1} |
0xbc7697 VPMULLD 0xc0(%RSP),%ZMM11,%ZMM1 |
0xbc769f VXORPS %XMM8,%XMM8,%XMM8 |
0xbc76a4 KXNORW %K0,%K0,%K1 |
0xbc76a8 VGATHERDPS (%RAX,%ZMM1,4),%ZMM8{%K1} |
0xbc76af VXORPS %XMM9,%XMM9,%XMM9 |
0xbc76b4 KXNORW %K0,%K0,%K1 |
0xbc76b8 VGATHERDPS (%RCX,%ZMM1,4),%ZMM9{%K1} |
0xbc76bf VXORPS %XMM10,%XMM10,%XMM10 |
0xbc76c4 KXNORW %K0,%K0,%K1 |
0xbc76c8 VGATHERDPS (%RDX,%ZMM1,4),%ZMM10{%K1} |
0xbc76cf VPMULLD 0x80(%RSP),%ZMM11,%ZMM1 |
0xbc76d7 VPXOR %XMM11,%XMM11,%XMM11 |
0xbc76dc KXNORW %K0,%K0,%K1 |
0xbc76e0 VGATHERDPS (%RAX,%ZMM1,4),%ZMM11{%K1} |
0xbc76e7 VXORPS %XMM12,%XMM12,%XMM12 |
0xbc76ec KXNORW %K0,%K0,%K1 |
0xbc76f0 VGATHERDPS (%RCX,%ZMM1,4),%ZMM12{%K1} |
0xbc76f7 VXORPS %XMM13,%XMM13,%XMM13 |
0xbc76fc KXNORW %K0,%K0,%K1 |
0xbc7700 VGATHERDPS (%RDX,%ZMM1,4),%ZMM13{%K1} |
0xbc7707 VSUBPS %ZMM5,%ZMM2,%ZMM1 |
0xbc770d VSUBPS %ZMM6,%ZMM3,%ZMM2 |
0xbc7713 VSUBPS %ZMM7,%ZMM4,%ZMM3 |
0xbc7719 VSUBPS %ZMM5,%ZMM8,%ZMM4 |
0xbc771f VSUBPS %ZMM6,%ZMM9,%ZMM5 |
0xbc7725 VSUBPS %ZMM7,%ZMM10,%ZMM6 |
0xbc772b VSUBPS %ZMM11,%ZMM8,%ZMM7 |
0xbc7731 VSUBPS %ZMM12,%ZMM9,%ZMM8 |
0xbc7737 VSUBPS %ZMM13,%ZMM10,%ZMM9 |
0xbc773d VMOVAPS 0xb80(%RSP),%ZMM12 |
0xbc7745 VMULPS %ZMM12,%ZMM3,%ZMM10 |
0xbc774b VRNDSCALEPS $0,%ZMM10,%ZMM10 |
0xbc7752 VMOVAPS 0xb40(%RSP),%ZMM13 |
0xbc775a VMULPS %ZMM13,%ZMM10,%ZMM11 |
0xbc7760 VSUBPS %ZMM11,%ZMM1,%ZMM11 |
0xbc7766 VMOVAPS 0xb00(%RSP),%ZMM14 |
0xbc776e VMULPS %ZMM14,%ZMM10,%ZMM1 |
0xbc7774 VSUBPS %ZMM1,%ZMM2,%ZMM2 |
0xbc777a VMOVAPS 0xac0(%RSP),%ZMM15 |
0xbc7782 VMULPS %ZMM15,%ZMM10,%ZMM1 |
0xbc7788 VSUBPS %ZMM1,%ZMM3,%ZMM1 |
0xbc778e VMOVAPS 0xa80(%RSP),%ZMM16 |
0xbc7796 VMULPS %ZMM16,%ZMM2,%ZMM3 |
0xbc779c VRNDSCALEPS $0,%ZMM3,%ZMM3 |
0xbc77a3 VMOVAPS 0xa40(%RSP),%ZMM17 |
0xbc77ab VMULPS %ZMM17,%ZMM3,%ZMM10 |
0xbc77b1 VSUBPS %ZMM10,%ZMM11,%ZMM10 |
0xbc77b7 VMOVAPS 0xa00(%RSP),%ZMM18 |
0xbc77bf VMULPS %ZMM18,%ZMM3,%ZMM3 |
0xbc77c5 VSUBPS %ZMM3,%ZMM2,%ZMM2 |
0xbc77cb VMOVAPS 0x9c0(%RSP),%ZMM19 |
0xbc77d3 VMULPS %ZMM19,%ZMM10,%ZMM3 |
0xbc77d9 VRNDSCALEPS $0,%ZMM3,%ZMM3 |
0xbc77e0 VMOVAPS 0x980(%RSP),%ZMM20 |
0xbc77e8 VMULPS %ZMM20,%ZMM3,%ZMM3 |
0xbc77ee VSUBPS %ZMM3,%ZMM10,%ZMM3 |
0xbc77f4 VMULPS %ZMM12,%ZMM6,%ZMM10 |
0xbc77fa VRNDSCALEPS $0,%ZMM10,%ZMM10 |
0xbc7801 VMULPS %ZMM10,%ZMM13,%ZMM11 |
0xbc7807 VSUBPS %ZMM11,%ZMM4,%ZMM11 |
0xbc780d VMULPS %ZMM10,%ZMM14,%ZMM4 |
0xbc7813 VSUBPS %ZMM4,%ZMM5,%ZMM5 |
0xbc7819 VMULPS %ZMM10,%ZMM15,%ZMM4 |
0xbc781f VSUBPS %ZMM4,%ZMM6,%ZMM4 |
0xbc7825 VMULPS %ZMM5,%ZMM16,%ZMM6 |
0xbc782b VRNDSCALEPS $0,%ZMM6,%ZMM6 |
0xbc7832 VMULPS %ZMM6,%ZMM17,%ZMM10 |
0xbc7838 VSUBPS %ZMM10,%ZMM11,%ZMM10 |
0xbc783e VMULPS %ZMM6,%ZMM18,%ZMM6 |
0xbc7844 VSUBPS %ZMM6,%ZMM5,%ZMM5 |
0xbc784a VMULPS %ZMM10,%ZMM19,%ZMM6 |
0xbc7850 VRNDSCALEPS $0,%ZMM6,%ZMM6 |
0xbc7857 VMULPS %ZMM6,%ZMM20,%ZMM6 |
0xbc785d VSUBPS %ZMM6,%ZMM10,%ZMM6 |
0xbc7863 VMULPS %ZMM12,%ZMM9,%ZMM10 |
0xbc7869 VRNDSCALEPS $0,%ZMM10,%ZMM10 |
0xbc7870 VMULPS %ZMM10,%ZMM13,%ZMM11 |
0xbc7876 VSUBPS %ZMM11,%ZMM7,%ZMM7 |
0xbc787c VMULPS %ZMM10,%ZMM14,%ZMM11 |
0xbc7882 VSUBPS %ZMM11,%ZMM8,%ZMM11 |
0xbc7888 VMULPS %ZMM10,%ZMM15,%ZMM8 |
0xbc788e VSUBPS %ZMM8,%ZMM9,%ZMM8 |
0xbc7894 VMULPS %ZMM11,%ZMM16,%ZMM9 |
0xbc789a VRNDSCALEPS $0,%ZMM9,%ZMM9 |
0xbc78a1 VMULPS %ZMM9,%ZMM17,%ZMM10 |
0xbc78a7 VSUBPS %ZMM10,%ZMM7,%ZMM7 |
0xbc78ad VMULPS %ZMM9,%ZMM18,%ZMM9 |
0xbc78b3 VSUBPS %ZMM9,%ZMM11,%ZMM12 |
0xbc78b9 VMULPS %ZMM7,%ZMM19,%ZMM9 |
0xbc78bf VRNDSCALEPS $0,%ZMM9,%ZMM9 |
0xbc78c6 VMULPS %ZMM9,%ZMM20,%ZMM9 |
0xbc78cc VSUBPS %ZMM9,%ZMM7,%ZMM14 |
0xbc78d2 VMULPS %ZMM4,%ZMM2,%ZMM7 |
0xbc78d8 VFNMADD231PS %ZMM5,%ZMM1,%ZMM7 |
0xbc78de VMULPS %ZMM6,%ZMM1,%ZMM9 |
0xbc78e4 VFNMADD231PS %ZMM4,%ZMM3,%ZMM9 |
0xbc78ea VMULPS %ZMM5,%ZMM3,%ZMM10 |
0xbc78f0 VFNMADD231PS %ZMM6,%ZMM2,%ZMM10 |
0xbc78f6 VMULPS %ZMM8,%ZMM5,%ZMM11 |
0xbc78fc VFNMADD231PS %ZMM12,%ZMM4,%ZMM11 |
0xbc7902 VMULPS %ZMM14,%ZMM4,%ZMM13 |
0xbc7908 VFNMADD231PS %ZMM8,%ZMM6,%ZMM13 |
0xbc790e VMULPS %ZMM12,%ZMM6,%ZMM15 |
0xbc7914 VFNMADD231PS %ZMM14,%ZMM5,%ZMM15 |
0xbc791a VMULPS %ZMM15,%ZMM9,%ZMM16 |
0xbc7920 VFNMADD231PS %ZMM13,%ZMM10,%ZMM16 |
0xbc7926 VMULPS %ZMM11,%ZMM10,%ZMM17 |
0xbc792c VFNMADD231PS %ZMM15,%ZMM7,%ZMM17 |
0xbc7932 VMULPS %ZMM13,%ZMM7,%ZMM18 |
0xbc7938 VFNMADD231PS %ZMM11,%ZMM9,%ZMM18 |
0xbc793e VMULPS %ZMM16,%ZMM16,%ZMM16 |
0xbc7944 VMULPS %ZMM17,%ZMM17,%ZMM17 |
0xbc794a VADDPS %ZMM16,%ZMM17,%ZMM16 |
0xbc7950 VMULPS %ZMM18,%ZMM18,%ZMM17 |
0xbc7956 VADDPS %ZMM16,%ZMM17,%ZMM16 |
0xbc795c VXORPS %XMM20,%XMM20,%XMM20 |
0xbc7962 VCMPPS $0x1,%ZMM16,%ZMM20,%K1 |
0xbc7969 VRSQRT14PS %ZMM16,%ZMM17{%K1}{z} |
0xbc796f VMULPS %ZMM16,%ZMM17,%ZMM18 |
0xbc7975 VMOVAPS 0x900(%RSP),%ZMM26 |
0xbc797d VMULPS %ZMM26,%ZMM17,%ZMM19 |
0xbc7983 VMOVAPS 0x8c0(%RSP),%ZMM23 |
0xbc798b VFMADD213PS %ZMM23,%ZMM17,%ZMM18 |
0xbc7991 VMULPS %ZMM18,%ZMM19,%ZMM17 |
0xbc7997 VMULPS %ZMM17,%ZMM16,%ZMM16 |
0xbc799d VMULPS %ZMM11,%ZMM7,%ZMM17 |
0xbc79a3 VMULPS %ZMM13,%ZMM9,%ZMM18 |
0xbc79a9 VADDPS %ZMM18,%ZMM17,%ZMM17 |
0xbc79af VMULPS %ZMM15,%ZMM10,%ZMM18 |
0xbc79b5 VADDPS %ZMM17,%ZMM18,%ZMM17 |
0xbc79bb VCMPPS $0xc,%ZMM20,%ZMM17,%K1 |
0xbc79c2 VCMPPS $0xc,%ZMM20,%ZMM16,%K2 |
0xbc79c9 VCMPPS $0x1,%ZMM20,%ZMM17,%K3 |
0xbc79d0 VCMPPS $0x1,%ZMM20,%ZMM16,%K4 |
0xbc79d7 VMOVAPS 0x880(%RSP),%ZMM24 |
0xbc79df VBLENDMPS %ZMM20,%ZMM24,%ZMM18{%K1} |
0xbc79e5 VMOVAPS %ZMM18,%ZMM18{%K2}{z} |
0xbc79eb VBROADCASTSS -0x86b895(%RIP),%ZMM18{%K3} |
0xbc79f5 VMOVAPS 0x840(%RSP),%ZMM25 |
0xbc79fd VORPS %ZMM25,%ZMM18,%ZMM18{%K4} |
0xbc7a03 VRCP14PS %ZMM17,%ZMM19{%K1}{z} |
0xbc7a09 VMOVAPS 0x800(%RSP),%ZMM27 |
0xbc7a11 VFNMADD213PS %ZMM27,%ZMM19,%ZMM17 |
0xbc7a17 VMULPS %ZMM17,%ZMM19,%ZMM17 |
0xbc7a1d VMULPS %ZMM16,%ZMM17,%ZMM16 |
0xbc7a23 VANDPS 0x7c0(%RSP),%ZMM16,%ZMM17 |
0xbc7a2b VMOVAPS 0x780(%RSP),%ZMM28 |
0xbc7a33 VCMPPS $0x1,%ZMM17,%ZMM28,%K2 |
0xbc7a3a VRCP14PS %ZMM17,%ZMM19{%K2}{z} |
0xbc7a40 VCMPPS $0x1,%ZMM20,%ZMM16,%K1 |
0xbc7a47 VMOVAPS %ZMM17,%ZMM16 |
0xbc7a4d VFNMADD213PS %ZMM27,%ZMM19,%ZMM16 |
0xbc7a53 VMULPS %ZMM16,%ZMM19,%ZMM17{%K2} |
0xbc7a59 VMULPS %ZMM17,%ZMM17,%ZMM16 |
0xbc7a5f VMULPS %ZMM16,%ZMM17,%ZMM19 |
0xbc7a65 VMULPS %ZMM16,%ZMM16,%ZMM20 |
0xbc7a6b VMOVAPS 0x740(%RSP),%ZMM21 |
0xbc7a73 VFMADD213PS 0x700(%RSP),%ZMM20,%ZMM21 |
0xbc7a7b VMOVAPS 0x6c0(%RSP),%ZMM22 |
0xbc7a83 VFMADD213PS 0x680(%RSP),%ZMM20,%ZMM22 |
0xbc7a8b VFMADD213PS 0x640(%RSP),%ZMM20,%ZMM21 |
0xbc7a93 VFMADD213PS 0x600(%RSP),%ZMM20,%ZMM22 |
0xbc7a9b VFMADD213PS 0x5c0(%RSP),%ZMM20,%ZMM21 |
0xbc7aa3 VFMADD213PS 0x580(%RSP),%ZMM20,%ZMM22 |
0xbc7aab VFMADD231PS %ZMM21,%ZMM16,%ZMM22 |
0xbc7ab1 VFMADD213PS %ZMM17,%ZMM19,%ZMM22 |
0xbc7ab7 VSUBPS %ZMM22,%ZMM24,%ZMM22{%K2} |
0xbc7abd VXORPS %ZMM25,%ZMM22,%ZMM22{%K1} |
0xbc7ac3 VADDPS %ZMM22,%ZMM18,%ZMM16 |
0xbc7ac9 VMULPS %ZMM11,%ZMM3,%ZMM17 |
0xbc7acf VMULPS %ZMM13,%ZMM2,%ZMM18 |
0xbc7ad5 VADDPS %ZMM18,%ZMM17,%ZMM17 |
0xbc7adb VMULPS %ZMM15,%ZMM1,%ZMM18 |
0xbc7ae1 VADDPS %ZMM17,%ZMM18,%ZMM17 |
0xbc7ae7 VPTERNLOGD $-0x1c,0x4c0(%RSP),%ZMM16,%ZMM17 |
0xbc7af0 VMULPS %ZMM7,%ZMM7,%ZMM16 |
0xbc7af6 VMULPS %ZMM9,%ZMM9,%ZMM18 |
0xbc7afc VADDPS %ZMM18,%ZMM16,%ZMM16 |
0xbc7b02 VMULPS %ZMM10,%ZMM10,%ZMM18 |
0xbc7b08 VADDPS %ZMM16,%ZMM18,%ZMM16 |
0xbc7b0e VMULPS %ZMM11,%ZMM11,%ZMM18 |
0xbc7b14 VMULPS %ZMM13,%ZMM13,%ZMM19 |
0xbc7b1a VADDPS %ZMM19,%ZMM18,%ZMM18 |
0xbc7b20 VMULPS %ZMM15,%ZMM15,%ZMM19 |
0xbc7b26 VADDPS %ZMM18,%ZMM19,%ZMM18 |
0xbc7b2c VMULPS %ZMM6,%ZMM6,%ZMM19 |
0xbc7b32 VMULPS %ZMM5,%ZMM5,%ZMM20 |
0xbc7b38 VADDPS %ZMM19,%ZMM20,%ZMM19 |
0xbc7b3e VMULPS %ZMM4,%ZMM4,%ZMM20 |
0xbc7b44 VADDPS %ZMM19,%ZMM20,%ZMM19 |
0xbc7b4a VMAXPS 0x540(%RSP),%ZMM19,%ZMM19 |
0xbc7b52 VRSQRT14PS %ZMM19,%ZMM20 |
0xbc7b58 VMULPS %ZMM20,%ZMM19,%ZMM21 |
0xbc7b5e VMULPS %ZMM26,%ZMM20,%ZMM22 |
0xbc7b64 VFMADD213PS %ZMM23,%ZMM20,%ZMM21 |
0xbc7b6a VMULPS %ZMM21,%ZMM22,%ZMM20 |
0xbc7b70 VMULPS 0x500(%RSP),%ZMM19,%ZMM21 |
0xbc7b78 VMAXPS %ZMM21,%ZMM16,%ZMM16 |
0xbc7b7e VMAXPS %ZMM21,%ZMM18,%ZMM18 |
0xbc7b84 VMOVAPS 0xdc0(%RSP),%ZMM21 |
0xbc7b8c VMOVAPS 0x480(%RSP),%ZMM22 |
0xbc7b94 VMULPS 0xd80(%RSP),%ZMM22,%ZMM22 |
0xbc7b9c VFMADD231PS %ZMM17,%ZMM21,%ZMM22 |
0xbc7ba2 VMULPS 0x440(%RSP),%ZMM22,%ZMM17 |
0xbc7baa VRNDSCALEPS $0,%ZMM17,%ZMM23 |
0xbc7bb1 VFMADD231PS 0x380(%RSP),%ZMM23,%ZMM22 |
0xbc7bb9 VFMADD231PS 0x340(%RSP),%ZMM23,%ZMM22 |
0xbc7bc1 VFMADD231PS 0x300(%RSP),%ZMM23,%ZMM22 |
0xbc7bc9 VFMADD231PS 0x2c0(%RSP),%ZMM23,%ZMM22 |
0xbc7bd1 VMULPS %ZMM22,%ZMM22,%ZMM23 |
0xbc7bd7 VMOVAPS 0x280(%RSP),%ZMM24 |
0xbc7bdf VFMADD213PS 0x240(%RSP),%ZMM23,%ZMM24 |
0xbc7be7 VFMADD213PS 0x200(%RSP),%ZMM23,%ZMM24 |
0xbc7bef VMULPS %ZMM23,%ZMM22,%ZMM25 |
0xbc7bf5 VFMADD213PS %ZMM22,%ZMM24,%ZMM25 |
0xbc7bfb VRCP14PS %ZMM16,%ZMM22 |
0xbc7c01 VRCP14PS %ZMM18,%ZMM24 |
0xbc7c07 VFNMADD213PS %ZMM27,%ZMM22,%ZMM16 |
0xbc7c0d VMULPS %ZMM16,%ZMM22,%ZMM16 |
0xbc7c13 VFNMADD213PS %ZMM27,%ZMM24,%ZMM18 |
0xbc7c19 VMULPS %ZMM18,%ZMM24,%ZMM18 |
0xbc7c1f VMULPS %ZMM20,%ZMM19,%ZMM19 |
0xbc7c25 VMULPS %ZMM16,%ZMM19,%ZMM16 |
0xbc7c2b VMULPS %ZMM18,%ZMM19,%ZMM18 |
0xbc7c31 VMULPS %ZMM6,%ZMM3,%ZMM3 |
0xbc7c37 VMULPS %ZMM5,%ZMM2,%ZMM2 |
0xbc7c3d VADDPS %ZMM3,%ZMM2,%ZMM2 |
0xbc7c43 VMULPS %ZMM4,%ZMM1,%ZMM1 |
0xbc7c49 VADDPS %ZMM2,%ZMM1,%ZMM1 |
0xbc7c4f VMULPS %ZMM14,%ZMM6,%ZMM2 |
0xbc7c55 VMULPS %ZMM12,%ZMM5,%ZMM3 |
0xbc7c5b VADDPS %ZMM2,%ZMM3,%ZMM2 |
0xbc7c61 VMULPS %ZMM8,%ZMM4,%ZMM3 |
0xbc7c67 VADDPS %ZMM2,%ZMM3,%ZMM2 |
0xbc7c6d VMULPS %ZMM20,%ZMM20,%ZMM3 |
0xbc7c73 VMULPS %ZMM3,%ZMM1,%ZMM4 |
0xbc7c79 VMULPS %ZMM3,%ZMM2,%ZMM5 |
0xbc7c7f VCVTPS2DQ %ZMM17,%ZMM1 |
0xbc7c85 VPTESTNMD 0x400(%RSP),%ZMM1,%K1 |
0xbc7c8d VPTESTMD 0x3c0(%RSP),%ZMM1,%K2 |
0xbc7c95 VMOVAPS 0x1c0(%RSP),%ZMM1 |
0xbc7c9d VFMADD213PS 0x180(%RSP),%ZMM23,%ZMM1 |
0xbc7ca5 VFMADD213PS 0x140(%RSP),%ZMM23,%ZMM1 |
0xbc7cad VFMADD213PS %ZMM26,%ZMM23,%ZMM1 |
0xbc7cb3 VFMADD213PS %ZMM28,%ZMM23,%ZMM1 |
0xbc7cb9 VMOVAPS %ZMM25,%ZMM1{%K1} |
0xbc7cbf VBROADCASTSS -0x86bb11(%RIP),%ZMM2{%K2}{z} |
0xbc7cc9 VXORPS %ZMM2,%ZMM1,%ZMM1 |
0xbc7ccf VMULPS 0xd40(%RSP),%ZMM21,%ZMM2 |
0xbc7cd7 VMULPS %ZMM1,%ZMM2,%ZMM1 |
0xbc7cdd VMULPS %ZMM1,%ZMM16,%ZMM2 |
0xbc7ce3 VMULPS %ZMM1,%ZMM18,%ZMM1 |
0xbc7ce9 VMULPS %ZMM2,%ZMM7,%ZMM3 |
0xbc7cef VMULPS %ZMM2,%ZMM9,%ZMM6 |
0xbc7cf5 VMULPS %ZMM2,%ZMM10,%ZMM2 |
0xbc7cfb VMULPS %ZMM1,%ZMM11,%ZMM7 |
0xbc7d01 VMULPS %ZMM1,%ZMM13,%ZMM8 |
0xbc7d07 VMULPS %ZMM1,%ZMM15,%ZMM1 |
0xbc7d0d VMOVAPS %ZMM3,0xd00(%RSP) |
0xbc7d15 VMOVAPS %ZMM2,0xc80(%RSP) |
0xbc7d1d VMOVAPS %ZMM6,0xcc0(%RSP) |
0xbc7d25 LEA 0xc80(%RSP),%RAX |
0xbc7d2d MOV %RAX,0x20(%RSP) |
0xbc7d32 VMOVAPS %ZMM7,0xc40(%RSP) |
0xbc7d3a LEA 0xc40(%RSP),%RAX |
0xbc7d42 MOV %RAX,0x28(%RSP) |
0xbc7d47 VMOVAPS %ZMM8,0xc00(%RSP) |
0xbc7d4f LEA 0xc00(%RSP),%RAX |
0xbc7d57 MOV %RAX,0x30(%RSP) |
0xbc7d5c VMOVAPS %ZMM1,0xbc0(%RSP) |
0xbc7d64 LEA 0xbc0(%RSP),%RAX |
0xbc7d6c MOV %RAX,0x38(%RSP) |
0xbc7d71 MOV 0x78(%RSP),%RAX |
0xbc7d76 MOV %RAX,0x40(%RSP) |
0xbc7d7b LEA 0xcc0(%RSP),%RAX |
0xbc7d83 MOV %RAX,0x18(%RSP) |
0xbc7d88 LEA 0xd00(%RSP),%RAX |
0xbc7d90 MOV %RAX,0x10(%RSP) |
0xbc7d95 LEA 0x100(%RSP),%RDX |
0xbc7d9d LEA 0xc0(%RSP),%R8 |
0xbc7da5 LEA 0x80(%RSP),%R9 |
0xbc7dad CALL bde790 <_ZN12_GLOBAL__N_124do_dih_fup_noshiftf_simdEPKiS1_S1_S1_N3gmx9SimdFloatES3_S3_S3_S3_S3_S3_S3_PA4_f@@608> |
0xbc7db2 ADD $0x50,%R14 |
0xbc7db6 CMP %R13D,%R14D |
0xbc7db9 JGE bc7e73 |
0xbc7dbf MOV %R14,%RAX |
0xbc7dc2 XOR %ECX,%ECX |
0xbc7dc4 MOV %R14D,%EDX |
0xbc7dc7 JMP bc7e22 |
(16802) 0xbc7dd0 LEA (%RSI,%RSI,2),%RSI |
(16802) 0xbc7dd4 SAL $0x4,%RSI |
(16802) 0xbc7dd8 VMOVSS (%R15,%RSI,1),%XMM1 |
(16802) 0xbc7dde VMOVSS 0x4(%R15,%RSI,1),%XMM0 |
(16802) 0xbc7de5 VCVTSI2SSL 0x8(%R15,%RSI,1),%XMM9,%XMM2 |
(16802) 0xbc7dec LEA 0x5(%RDX),%ESI |
(16802) 0xbc7def CMP %R13D,%ESI |
(16802) 0xbc7df2 CMOVL %ESI,%EDX |
(16802) 0xbc7df5 VMOVSS %XMM0,0xd40(%RSP,%RCX,1) |
(16802) 0xbc7dfe VMOVSS %XMM1,0xd80(%RSP,%RCX,1) |
(16802) 0xbc7e07 VMOVSS %XMM2,0xdc0(%RSP,%RCX,1) |
(16802) 0xbc7e10 ADD $0x4,%RCX |
(16802) 0xbc7e14 ADD $0x5,%RAX |
(16802) 0xbc7e18 CMP $0x40,%RCX |
(16802) 0xbc7e1c JE bc7610 |
(16802) 0xbc7e22 MOVSXD %EDX,%RDI |
(16802) 0xbc7e25 MOVSXD (%R12,%RDI,4),%RSI |
(16802) 0xbc7e29 MOV 0x4(%R12,%RDI,4),%R8D |
(16802) 0xbc7e2e MOV %R8D,0xe00(%RSP,%RCX,1) |
(16802) 0xbc7e36 MOV 0x8(%R12,%RDI,4),%R8D |
(16802) 0xbc7e3b MOV %R8D,0x100(%RSP,%RCX,1) |
(16802) 0xbc7e43 MOV 0xc(%R12,%RDI,4),%R8D |
(16802) 0xbc7e48 MOV %R8D,0xc0(%RSP,%RCX,1) |
(16802) 0xbc7e50 MOV 0x10(%R12,%RDI,4),%EDI |
(16802) 0xbc7e55 MOV %EDI,0x80(%RSP,%RCX,1) |
(16802) 0xbc7e5c VXORPS %XMM0,%XMM0,%XMM0 |
(16802) 0xbc7e60 CMP %RBX,%RAX |
(16802) 0xbc7e63 JL bc7dd0 |
(16802) 0xbc7e69 VXORPS %XMM1,%XMM1,%XMM1 |
(16802) 0xbc7e6d VXORPS %XMM2,%XMM2,%XMM2 |
(16802) 0xbc7e71 JMP bc7df5 |
/home/eoseret/gromacs-2024.2/src/gromacs/listed_forces/bonded.cpp: 2066 - 2126 |
-------------------------------------------------------------------------------- |
2066: for (i = 0; (i < nbonds); i += GMX_SIMD_REAL_WIDTH * nfa1) |
[...] |
2072: for (s = 0; s < GMX_SIMD_REAL_WIDTH; s++) |
2073: { |
2074: type = forceatoms[iu]; |
2075: ai[s] = forceatoms[iu + 1]; |
2076: aj[s] = forceatoms[iu + 2]; |
2077: ak[s] = forceatoms[iu + 3]; |
2078: al[s] = forceatoms[iu + 4]; |
2079: |
2080: /* At the end fill the arrays with the last atoms and 0 params */ |
2081: if (i + s * nfa1 < nbonds) |
2082: { |
2083: cp[s] = forceparams[type].pdihs.cpA; |
2084: phi0[s] = forceparams[type].pdihs.phiA; |
2085: mult[s] = forceparams[type].pdihs.mult; |
2086: |
2087: if (iu + nfa1 < nbonds) |
[...] |
2101: dih_angle_simd( |
2102: x, ai, aj, ak, al, pbc_simd, &phi_S, &mx_S, &my_S, &mz_S, &nx_S, &ny_S, &nz_S, &nrkj_m2_S, &nrkj_n2_S, &p_S, &q_S); |
2103: |
2104: cp_S = load<SimdReal>(cp); |
2105: phi0_S = load<SimdReal>(phi0) * deg2rad_S; |
2106: mult_S = load<SimdReal>(mult); |
[...] |
2126: do_dih_fup_noshiftf_simd(ai, aj, ak, al, p_S, q_S, mx_S, my_S, mz_S, nx_S, ny_S, nz_S, f); |
/home/eoseret/gromacs-2024.2/src/gromacs/simd/include/gromacs/simd/impl_x86_avx_512/impl_x86_avx_512_util_float.h: 113 - 113 |
-------------------------------------------------------------------------------- |
113: v->simdInternal_ = _mm512_i32gather_ps(offset.simdInternal_, base, sizeof(float) * align_); |
/home/eoseret/gromacs-2024.2/src/gromacs/simd/include/gromacs/simd/impl_x86_avx_512/impl_x86_avx_512_simd_float.h: 175 - 501 |
-------------------------------------------------------------------------------- |
175: return { _mm512_castsi512_ps(_mm512_xor_epi32(_mm512_castps_si512(a.simdInternal_), |
[...] |
181: return { _mm512_add_ps(a.simdInternal_, b.simdInternal_) }; |
182: } |
183: |
184: static inline SimdFloat gmx_simdcall operator-(SimdFloat a, SimdFloat b) |
185: { |
186: return { _mm512_sub_ps(a.simdInternal_, b.simdInternal_) }; |
[...] |
197: return { _mm512_mul_ps(a.simdInternal_, b.simdInternal_) }; |
198: } |
199: |
200: static inline SimdFloat gmx_simdcall fma(SimdFloat a, SimdFloat b, SimdFloat c) |
201: { |
202: return { _mm512_fmadd_ps(a.simdInternal_, b.simdInternal_, c.simdInternal_) }; |
203: } |
204: |
205: static inline SimdFloat gmx_simdcall fms(SimdFloat a, SimdFloat b, SimdFloat c) |
206: { |
207: return { _mm512_fmsub_ps(a.simdInternal_, b.simdInternal_, c.simdInternal_) }; |
208: } |
209: |
210: static inline SimdFloat gmx_simdcall fnma(SimdFloat a, SimdFloat b, SimdFloat c) |
211: { |
212: return { _mm512_fnmadd_ps(a.simdInternal_, b.simdInternal_, c.simdInternal_) }; |
[...] |
224: return { _mm512_rsqrt14_ps(x.simdInternal_) }; |
225: } |
226: |
227: static inline SimdFloat gmx_simdcall rcp(SimdFloat x) |
228: { |
229: return { _mm512_rcp14_ps(x.simdInternal_) }; |
[...] |
252: return { _mm512_maskz_rsqrt14_ps(m.simdInternal_, x.simdInternal_) }; |
253: } |
254: |
255: static inline SimdFloat gmx_simdcall maskzRcp(SimdFloat x, SimdFBool m) |
256: { |
257: return { _mm512_maskz_rcp14_ps(m.simdInternal_, x.simdInternal_) }; |
[...] |
263: return { _mm512_castsi512_ps(_mm512_andnot_epi32(_mm512_castps_si512(_mm512_set1_ps(GMX_FLOAT_NEGZERO)), |
[...] |
269: return { _mm512_max_ps(a.simdInternal_, b.simdInternal_) }; |
[...] |
279: return { _mm512_roundscale_ps(x.simdInternal_, 0) }; |
[...] |
362: return { _mm512_cmp_ps_mask(a.simdInternal_, b.simdInternal_, _CMP_NEQ_OQ) }; |
363: } |
364: |
365: static inline SimdFBool gmx_simdcall operator<(SimdFloat a, SimdFloat b) |
366: { |
367: return { _mm512_cmp_ps_mask(a.simdInternal_, b.simdInternal_, _CMP_LT_OQ) }; |
[...] |
398: return { _mm512_mask_mov_ps(_mm512_setzero_ps(), m.simdInternal_, a.simdInternal_) }; |
399: } |
400: |
401: static inline SimdFloat gmx_simdcall selectByNotMask(SimdFloat a, SimdFBool m) |
402: { |
403: return { _mm512_mask_mov_ps(a.simdInternal_, m.simdInternal_, _mm512_setzero_ps()) }; |
404: } |
405: |
406: static inline SimdFloat gmx_simdcall blend(SimdFloat a, SimdFloat b, SimdFBool sel) |
407: { |
408: return { _mm512_mask_blend_ps(sel.simdInternal_, a.simdInternal_, b.simdInternal_) }; |
409: } |
410: |
411: static inline SimdFloat gmx_simdcall copysign(SimdFloat a, SimdFloat b) |
412: { |
413: return { _mm512_castsi512_ps(_mm512_ternarylogic_epi32(_mm512_castps_si512(a.simdInternal_), |
[...] |
451: return { _mm512_mullo_epi32(a.simdInternal_, b.simdInternal_) }; |
452: } |
453: |
454: static inline SimdFIBool gmx_simdcall operator==(SimdFInt32 a, SimdFInt32 b) |
455: { |
456: return { _mm512_cmp_epi32_mask(a.simdInternal_, b.simdInternal_, _MM_CMPINT_EQ) }; |
[...] |
501: return { _mm512_cvtps_epi32(a.simdInternal_) }; |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►100.00+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►100.00+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►100.00+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►61.57+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 | |
►20.24+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
►18.19+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►75.44+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 | |
►21.35+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
►3.21+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►82.88+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 | |
►15.95+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
►1.18+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►96.90+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 | |
►3.10+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►80.56+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 | |
►10.53+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
►8.41+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►100.00+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 |
Path / |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.03 |
CQA speedup if FP arith vectorized | 1.00 |
CQA speedup if fully vectorized | 1.03 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | 1.10 |
Bottlenecks | micro-operation queue, |
Function | std::enable_if<((BondedKernelFlavor)0)==((BondedKernelFlavor)0), float>::type (anonymous namespace)::pdihs<(BondedKernelFlavor)0>(int, int const*, t_iparams const*, float const (*) [3], float (*) [4], float (*) [3], t_pbc const*, float, float*, gmx::ArrayRef |
Source | bonded.cpp:2066-2066,bonded.cpp:2101-2101,bonded.cpp:2106-2106,bonded.cpp:2126-2126,impl_x86_avx_512_util_float.h:113-113,impl_x86_avx_512_simd_float.h:175-175,impl_x86_avx_512_simd_float.h:181-181,impl_x86_avx_512_simd_float.h:186-186,impl_x86_avx_512_simd_float.h:197-197,impl_x86_avx_512_simd_float.h:202-202,impl_x86_avx_512_simd_float.h:207-207,impl_x86_avx_512_simd_float.h:212-212,impl_x86_avx_512_simd_float.h:224-224,impl_x86_avx_512_simd_float.h:229-229,impl_x86_avx_512_simd_float.h:252-252,impl_x86_avx_512_simd_float.h:257-257,impl_x86_avx_512_simd_float.h:263-263,impl_x86_avx_512_simd_float.h:269-269,impl_x86_avx_512_simd_float.h:279-279,impl_x86_avx_512_simd_float.h:362-362,impl_x86_avx_512_simd_float.h:367-367,impl_x86_avx_512_simd_float.h:398-398,impl_x86_avx_512_simd_float.h:403-403,impl_x86_avx_512_simd_float.h:408-408,impl_x86_avx_512_simd_float.h:413-413,impl_x86_avx_512_simd_float.h:451-451,impl_x86_avx_512_simd_float.h:456-456,impl_x86_avx_512_simd_float.h:501-501 |
Source loop unroll info | NA |
Source loop unroll confidence level | NA |
Unroll/vectorization loop type | NA |
Unroll factor | NA |
CQA cycles | 213.83 |
CQA cycles if no scalar integer | 207.17 |
CQA cycles if FP arith vectorized | 213.83 |
CQA cycles if fully vectorized | 208.39 |
Front-end cycles | 213.83 |
P0 cycles | 3.25 |
P1 cycles | 3.25 |
P2 cycles | 3.00 |
P3 cycles | 3.00 |
P4 cycles | 1.50 |
P5 cycles | 39.33 |
P6 cycles | 39.33 |
P7 cycles | 39.33 |
P8 cycles | 193.75 |
P9 cycles | 183.67 |
P10 cycles | 145.25 |
P11 cycles | 146.33 |
P12 cycles | 114.00 |
P13 cycles | 114.00 |
DIV/SQRT cycles | 0.00 |
Inter-iter dependencies cycles | 1 |
FE+BE cycles (UFS) | NA |
Stall cycles (UFS) | NA |
Nb insns | 314.00 |
Nb uops | 1283.00 |
Nb loads | 67.00 |
Nb stores | 13.00 |
Nb stack references | 66.00 |
FLOP/cycle | 16.24 |
Nb FLOP add-sub | 720.00 |
Nb FLOP mul | 1536.00 |
Nb FLOP fma | 560.00 |
Nb FLOP div | 0.00 |
Nb FLOP rcp | 64.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 32.00 |
Bytes/cycle | 20.50 |
Bytes prefetched | 0.00 |
Bytes loaded | 3944.00 |
Bytes stored | 440.00 |
Stride 0 | 3.00 |
Stride 1 | 0.00 |
Stride n | 2.00 |
Stride unknown | 2.00 |
Stride indirect | 0.00 |
Vectorization ratio all | 96.80 |
Vectorization ratio load | 96.83 |
Vectorization ratio store | 46.15 |
Vectorization ratio mul | 100.00 |
Vectorization ratio add_sub | 100.00 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 96.61 |
Vector-efficiency ratio all | 93.68 |
Vector-efficiency ratio load | 97.02 |
Vector-efficiency ratio store | 52.88 |
Vector-efficiency ratio mul | 100.00 |
Vector-efficiency ratio add_sub | 100.00 |
Vector-efficiency ratio fma | 100.00 |
Vector-efficiency ratio div_sqrt | 100.00 |
Vector-efficiency ratio other | 80.30 |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.03 |
CQA speedup if FP arith vectorized | 1.00 |
CQA speedup if fully vectorized | 1.03 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | 1.10 |
Bottlenecks | micro-operation queue, |
Function | std::enable_if<((BondedKernelFlavor)0)==((BondedKernelFlavor)0), float>::type (anonymous namespace)::pdihs<(BondedKernelFlavor)0>(int, int const*, t_iparams const*, float const (*) [3], float (*) [4], float (*) [3], t_pbc const*, float, float*, gmx::ArrayRef |
Source | bonded.cpp:2066-2066,bonded.cpp:2101-2101,bonded.cpp:2106-2106,bonded.cpp:2126-2126,impl_x86_avx_512_util_float.h:113-113,impl_x86_avx_512_simd_float.h:175-175,impl_x86_avx_512_simd_float.h:181-181,impl_x86_avx_512_simd_float.h:186-186,impl_x86_avx_512_simd_float.h:197-197,impl_x86_avx_512_simd_float.h:202-202,impl_x86_avx_512_simd_float.h:207-207,impl_x86_avx_512_simd_float.h:212-212,impl_x86_avx_512_simd_float.h:224-224,impl_x86_avx_512_simd_float.h:229-229,impl_x86_avx_512_simd_float.h:252-252,impl_x86_avx_512_simd_float.h:257-257,impl_x86_avx_512_simd_float.h:263-263,impl_x86_avx_512_simd_float.h:269-269,impl_x86_avx_512_simd_float.h:279-279,impl_x86_avx_512_simd_float.h:362-362,impl_x86_avx_512_simd_float.h:367-367,impl_x86_avx_512_simd_float.h:398-398,impl_x86_avx_512_simd_float.h:403-403,impl_x86_avx_512_simd_float.h:408-408,impl_x86_avx_512_simd_float.h:413-413,impl_x86_avx_512_simd_float.h:451-451,impl_x86_avx_512_simd_float.h:456-456,impl_x86_avx_512_simd_float.h:501-501 |
Source loop unroll info | NA |
Source loop unroll confidence level | NA |
Unroll/vectorization loop type | NA |
Unroll factor | NA |
CQA cycles | 213.83 |
CQA cycles if no scalar integer | 207.17 |
CQA cycles if FP arith vectorized | 213.83 |
CQA cycles if fully vectorized | 208.39 |
Front-end cycles | 213.83 |
P0 cycles | 3.25 |
P1 cycles | 3.25 |
P2 cycles | 3.00 |
P3 cycles | 3.00 |
P4 cycles | 1.50 |
P5 cycles | 39.33 |
P6 cycles | 39.33 |
P7 cycles | 39.33 |
P8 cycles | 193.75 |
P9 cycles | 183.67 |
P10 cycles | 145.25 |
P11 cycles | 146.33 |
P12 cycles | 114.00 |
P13 cycles | 114.00 |
DIV/SQRT cycles | 0.00 |
Inter-iter dependencies cycles | 1 |
FE+BE cycles (UFS) | NA |
Stall cycles (UFS) | NA |
Nb insns | 314.00 |
Nb uops | 1283.00 |
Nb loads | 67.00 |
Nb stores | 13.00 |
Nb stack references | 66.00 |
FLOP/cycle | 16.24 |
Nb FLOP add-sub | 720.00 |
Nb FLOP mul | 1536.00 |
Nb FLOP fma | 560.00 |
Nb FLOP div | 0.00 |
Nb FLOP rcp | 64.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 32.00 |
Bytes/cycle | 20.50 |
Bytes prefetched | 0.00 |
Bytes loaded | 3944.00 |
Bytes stored | 440.00 |
Stride 0 | 3.00 |
Stride 1 | 0.00 |
Stride n | 2.00 |
Stride unknown | 2.00 |
Stride indirect | 0.00 |
Vectorization ratio all | 96.80 |
Vectorization ratio load | 96.83 |
Vectorization ratio store | 46.15 |
Vectorization ratio mul | 100.00 |
Vectorization ratio add_sub | 100.00 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 96.61 |
Vector-efficiency ratio all | 93.68 |
Vector-efficiency ratio load | 97.02 |
Vector-efficiency ratio store | 52.88 |
Vector-efficiency ratio mul | 100.00 |
Vector-efficiency ratio add_sub | 100.00 |
Vector-efficiency ratio fma | 100.00 |
Vector-efficiency ratio div_sqrt | 100.00 |
Vector-efficiency ratio other | 80.30 |
Path / |
Function | std::enable_if<((BondedKernelFlavor)0)==((BondedKernelFlavor)0), float>::type (anonymous namespace)::pdihs<(BondedKernelFlavor)0>(int, int const*, t_iparams const*, float const (*) [3], float (*) [4], float (*) [3], t_pbc const*, float, float*, gmx::ArrayRef |
Source file and lines | bonded.cpp:2066-2126 |
Module | libgromacs_mpi.so.9.0.0 |
nb instructions | 314 |
nb uops | 1283 |
loop length | 1977 |
used x86 registers | 8 |
used mmx registers | 0 |
used xmm registers | 13 |
used ymm registers | 0 |
used zmm registers | 29 |
nb stack references | 66 |
ADD-SUB / MUL ratio | 0.47 |
micro-operation queue | 213.83 cycles |
front end | 213.83 cycles |
ALU0/BRU0 | ALU1 | ALU2 | ALU3 | BRU1 | AGU0 | AGU1 | AGU2 | FP0 | FP1 | FP2 | FP3 | FP4 | FP5 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 3.25 | 3.25 | 3.00 | 3.00 | 1.50 | 23.00 | 23.00 | 23.00 | 123.25 | 123.17 | 123.25 | 123.33 | 114.00 | 114.00 |
cycles | 3.25 | 3.25 | 3.00 | 3.00 | 1.50 | 39.33 | 39.33 | 39.33 | 193.75 | 183.67 | 145.25 | 146.33 | 114.00 | 114.00 |
Cycles executing div or sqrt instructions | NA |
Longest recurrence chain latency (RecMII) | 1.00 |
Front-end | 213.83 |
Dispatch | 193.75 |
Data deps. | 1.00 |
Overall L1 | 213.83 |
all | 58% |
load | 100% |
store | 0% |
mul | 100% |
add-sub | NA (no add-sub vectorizable/vectorized instructions) |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 100% |
all | 99% |
load | 96% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 96% |
all | 96% |
load | 96% |
store | 46% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 96% |
all | 59% |
load | 100% |
store | 12% |
mul | 100% |
add-sub | NA (no add-sub vectorizable/vectorized instructions) |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 81% |
all | 95% |
load | 96% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 80% |
all | 93% |
load | 97% |
store | 52% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 80% |
Instruction | Nb FU | ALU0/BRU0 | ALU1 | ALU2 | ALU3 | BRU1 | AGU0 | AGU1 | AGU2 | FP0 | FP1 | FP2 | FP3 | FP4 | FP5 | Latency | Recip. throughput | Vectorization |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VMOVDQA64 0xe00(%RSP),%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVDQA64 0x940(%RSP),%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPMULLD %ZMM11,%ZMM0,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
MOV 0x60(%RSP),%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM2{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM3,%XMM3,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
MOV 0x70(%RSP),%RCX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM3{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
MOV 0x68(%RSP),%RDX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM4{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VPMULLD 0x100(%RSP),%ZMM11,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %XMM5,%XMM5,%XMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM5{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM6,%XMM6,%XMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM6{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM7{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VPMULLD 0xc0(%RSP),%ZMM11,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %XMM8,%XMM8,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM8{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM9{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM10{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VPMULLD 0x80(%RSP),%ZMM11,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPXOR %XMM11,%XMM11,%XMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM11{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM12,%XMM12,%XMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM12{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM13,%XMM13,%XMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM13{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VSUBPS %ZMM5,%ZMM2,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM3,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM7,%ZMM4,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM5,%ZMM8,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM9,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM7,%ZMM10,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM8,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM12,%ZMM9,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM13,%ZMM10,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xb80(%RSP),%ZMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM3,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM10,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xb40(%RSP),%ZMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM10,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM1,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xb00(%RSP),%ZMM14 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM14,%ZMM10,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM1,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xac0(%RSP),%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM10,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM1,%ZMM3,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xa80(%RSP),%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM2,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xa40(%RSP),%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM3,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM10,%ZMM11,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xa00(%RSP),%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM3,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x9c0(%RSP),%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM19,%ZMM10,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x980(%RSP),%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM20,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM3,%ZMM10,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM6,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM10,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM13,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM4,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM14,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM4,%ZMM5,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM15,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM4,%ZMM6,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM16,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM6,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM17,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM10,%ZMM11,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM18,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM5,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM19,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM6,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM20,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM10,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM9,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM10,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM13,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM7,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM14,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM8,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM15,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM8,%ZMM9,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM16,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM9,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM9,%ZMM17,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM10,%ZMM7,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM9,%ZMM18,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM9,%ZMM11,%ZMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM19,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM9,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM9,%ZMM20,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM9,%ZMM7,%ZMM14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM4,%ZMM2,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM5,%ZMM1,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM1,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM4,%ZMM3,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM3,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM6,%ZMM2,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM8,%ZMM5,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM12,%ZMM4,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM14,%ZMM4,%ZMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM8,%ZMM6,%ZMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM6,%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM14,%ZMM5,%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM9,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM13,%ZMM10,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM10,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM15,%ZMM7,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM7,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM11,%ZMM9,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM16,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM18,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM16,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %XMM20,%XMM20,%XMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VCMPPS $0x1,%ZMM16,%ZMM20,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VRSQRT14PS %ZMM16,%ZMM17{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM17,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x900(%RSP),%ZMM26 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM26,%ZMM17,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x8c0(%RSP),%ZMM23 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM23,%ZMM17,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM19,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM7,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM9,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM18,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM10,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM17,%ZMM18,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VCMPPS $0xc,%ZMM20,%ZMM17,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VCMPPS $0xc,%ZMM20,%ZMM16,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VCMPPS $0x1,%ZMM20,%ZMM17,%K3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VCMPPS $0x1,%ZMM20,%ZMM16,%K4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS 0x880(%RSP),%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VBLENDMPS %ZMM20,%ZMM24,%ZMM18{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VMOVAPS %ZMM18,%ZMM18{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VBROADCASTSS -0x86b895(%RIP),%ZMM18{%K3} | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 5 | 1 | scal (6.3%) |
VMOVAPS 0x840(%RSP),%ZMM25 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VORPS %ZMM25,%ZMM18,%ZMM18{%K4} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VRCP14PS %ZMM17,%ZMM19{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS 0x800(%RSP),%ZMM27 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM19,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM19,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VANDPS 0x7c0(%RSP),%ZMM16,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 1 | vect (100.0%) |
VMOVAPS 0x780(%RSP),%ZMM28 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VCMPPS $0x1,%ZMM17,%ZMM28,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VRCP14PS %ZMM17,%ZMM19{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VCMPPS $0x1,%ZMM20,%ZMM16,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS %ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM19,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM19,%ZMM17{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM17,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM16,%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x740(%RSP),%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x700(%RSP),%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMOVAPS 0x6c0(%RSP),%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x680(%RSP),%ZMM20,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x640(%RSP),%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x600(%RSP),%ZMM20,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x5c0(%RSP),%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x580(%RSP),%ZMM20,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS %ZMM21,%ZMM16,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS %ZMM17,%ZMM19,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VSUBPS %ZMM22,%ZMM24,%ZMM22{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %ZMM25,%ZMM22,%ZMM22{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VADDPS %ZMM22,%ZMM18,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM3,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM2,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM18,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM1,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM17,%ZMM18,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPTERNLOGD $-0x1c,0x4c0(%RSP),%ZMM16,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM7,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM9,%ZMM9,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM18,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM10,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM16,%ZMM18,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM11,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM13,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM19,%ZMM18,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM15,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM18,%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM6,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM5,%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM19,%ZMM20,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM4,%ZMM4,%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM19,%ZMM20,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMAXPS 0x540(%RSP),%ZMM19,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 2 | 1 | vect (100.0%) |
VRSQRT14PS %ZMM19,%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMULPS %ZMM20,%ZMM19,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM26,%ZMM20,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM23,%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM21,%ZMM22,%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS 0x500(%RSP),%ZMM19,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMAXPS %ZMM21,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 2 | 1 | vect (100.0%) |
VMAXPS %ZMM21,%ZMM18,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 2 | 1 | vect (100.0%) |
VMOVAPS 0xdc0(%RSP),%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x480(%RSP),%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS 0xd80(%RSP),%ZMM22,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD231PS %ZMM17,%ZMM21,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS 0x440(%RSP),%ZMM22,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM17,%ZMM23 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD231PS 0x380(%RSP),%ZMM23,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS 0x340(%RSP),%ZMM23,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS 0x300(%RSP),%ZMM23,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS 0x2c0(%RSP),%ZMM23,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM22,%ZMM22,%ZMM23 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x280(%RSP),%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x240(%RSP),%ZMM23,%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x200(%RSP),%ZMM23,%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM23,%ZMM22,%ZMM25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM22,%ZMM24,%ZMM25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VRCP14PS %ZMM16,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VRCP14PS %ZMM18,%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM22,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM22,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM24,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM24,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM20,%ZMM19,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM19,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM3,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM4,%ZMM1,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM2,%ZMM1,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM14,%ZMM6,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM5,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM2,%ZMM3,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM8,%ZMM4,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM2,%ZMM3,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM20,%ZMM20,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM3,%ZMM1,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM3,%ZMM2,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VCVTPS2DQ %ZMM17,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPTESTNMD 0x400(%RSP),%ZMM1,%K1 | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VPTESTMD 0x3c0(%RSP),%ZMM1,%K2 | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS 0x1c0(%RSP),%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x180(%RSP),%ZMM23,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x140(%RSP),%ZMM23,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS %ZMM26,%ZMM23,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS %ZMM28,%ZMM23,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMOVAPS %ZMM25,%ZMM1{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VBROADCASTSS -0x86bb11(%RIP),%ZMM2{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 5 | 1 | scal (6.3%) |
VXORPS %ZMM2,%ZMM1,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VMULPS 0xd40(%RSP),%ZMM21,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM2,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM16,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM18,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM2,%ZMM7,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM2,%ZMM9,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM2,%ZMM10,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM11,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM13,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM15,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS %ZMM3,0xd00(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
VMOVAPS %ZMM2,0xc80(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
VMOVAPS %ZMM6,0xcc0(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xc80(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x20(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VMOVAPS %ZMM7,0xc40(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xc40(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x28(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VMOVAPS %ZMM8,0xc00(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xc00(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x30(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VMOVAPS %ZMM1,0xbc0(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xbc0(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x38(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
MOV 0x78(%RSP),%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
MOV %RAX,0x40(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
LEA 0xcc0(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x18(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
LEA 0xd00(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x10(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
LEA 0x100(%RSP),%RDX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
LEA 0xc0(%RSP),%R8 | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
LEA 0x80(%RSP),%R9 | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
CALL bde790 <_ZN12_GLOBAL__N_124do_dih_fup_noshiftf_simdEPKiS1_S1_S1_N3gmx9SimdFloatES3_S3_S3_S3_S3_S3_S3_PA4_f@@608> | 2 | 0.50 | 0 | 0 | 0 | 0.50 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | N/A |
ADD $0x50,%R14 | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
CMP %R13D,%R14D | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
JGE bc7e73 <_ZN12_GLOBAL__N_15pdihsIL18BondedKernelFlavor0EEENSt9enable_ifIXeqT_LS1_0EEfE4typeEiPKiPK9t_iparamsPA3_KfPA4_fPA3_fPK5t_pbcfPfN3gmx8ArrayRefISA_EEP8t_fcdataP12t_disresdataP12t_oriresdataPi+0xbb3> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50-1 | N/A |
MOV %R14,%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | N/A |
XOR %ECX,%ECX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | N/A |
MOV %R14D,%EDX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | N/A |
JMP bc7e22 <_ZN12_GLOBAL__N_15pdihsIL18BondedKernelFlavor0EEENSt9enable_ifIXeqT_LS1_0EEfE4typeEiPKiPK9t_iparamsPA3_KfPA4_fPA3_fPK5t_pbcfPfN3gmx8ArrayRefISA_EEP8t_fcdataP12t_disresdataP12t_oriresdataPi+0xb62> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | N/A |
Function | std::enable_if<((BondedKernelFlavor)0)==((BondedKernelFlavor)0), float>::type (anonymous namespace)::pdihs<(BondedKernelFlavor)0>(int, int const*, t_iparams const*, float const (*) [3], float (*) [4], float (*) [3], t_pbc const*, float, float*, gmx::ArrayRef |
Source file and lines | bonded.cpp:2066-2126 |
Module | libgromacs_mpi.so.9.0.0 |
nb instructions | 314 |
nb uops | 1283 |
loop length | 1977 |
used x86 registers | 8 |
used mmx registers | 0 |
used xmm registers | 13 |
used ymm registers | 0 |
used zmm registers | 29 |
nb stack references | 66 |
ADD-SUB / MUL ratio | 0.47 |
micro-operation queue | 213.83 cycles |
front end | 213.83 cycles |
ALU0/BRU0 | ALU1 | ALU2 | ALU3 | BRU1 | AGU0 | AGU1 | AGU2 | FP0 | FP1 | FP2 | FP3 | FP4 | FP5 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 3.25 | 3.25 | 3.00 | 3.00 | 1.50 | 23.00 | 23.00 | 23.00 | 123.25 | 123.17 | 123.25 | 123.33 | 114.00 | 114.00 |
cycles | 3.25 | 3.25 | 3.00 | 3.00 | 1.50 | 39.33 | 39.33 | 39.33 | 193.75 | 183.67 | 145.25 | 146.33 | 114.00 | 114.00 |
Cycles executing div or sqrt instructions | NA |
Longest recurrence chain latency (RecMII) | 1.00 |
Front-end | 213.83 |
Dispatch | 193.75 |
Data deps. | 1.00 |
Overall L1 | 213.83 |
all | 58% |
load | 100% |
store | 0% |
mul | 100% |
add-sub | NA (no add-sub vectorizable/vectorized instructions) |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 100% |
all | 99% |
load | 96% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 96% |
all | 96% |
load | 96% |
store | 46% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 96% |
all | 59% |
load | 100% |
store | 12% |
mul | 100% |
add-sub | NA (no add-sub vectorizable/vectorized instructions) |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 81% |
all | 95% |
load | 96% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 80% |
all | 93% |
load | 97% |
store | 52% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 80% |
Instruction | Nb FU | ALU0/BRU0 | ALU1 | ALU2 | ALU3 | BRU1 | AGU0 | AGU1 | AGU2 | FP0 | FP1 | FP2 | FP3 | FP4 | FP5 | Latency | Recip. throughput | Vectorization |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VMOVDQA64 0xe00(%RSP),%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVDQA64 0x940(%RSP),%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPMULLD %ZMM11,%ZMM0,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
MOV 0x60(%RSP),%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM2{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM3,%XMM3,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
MOV 0x70(%RSP),%RCX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM3{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
MOV 0x68(%RSP),%RDX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM4{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VPMULLD 0x100(%RSP),%ZMM11,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %XMM5,%XMM5,%XMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM5{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM6,%XMM6,%XMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM6{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM7{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VPMULLD 0xc0(%RSP),%ZMM11,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %XMM8,%XMM8,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM8{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM9{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM10{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VPMULLD 0x80(%RSP),%ZMM11,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPXOR %XMM11,%XMM11,%XMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM11{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM12,%XMM12,%XMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM12{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VXORPS %XMM13,%XMM13,%XMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM13{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VSUBPS %ZMM5,%ZMM2,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM3,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM7,%ZMM4,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM5,%ZMM8,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM9,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM7,%ZMM10,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM8,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM12,%ZMM9,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM13,%ZMM10,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xb80(%RSP),%ZMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM3,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM10,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xb40(%RSP),%ZMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM10,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM1,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xb00(%RSP),%ZMM14 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM14,%ZMM10,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM1,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xac0(%RSP),%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM10,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM1,%ZMM3,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xa80(%RSP),%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM2,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xa40(%RSP),%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM3,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM10,%ZMM11,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xa00(%RSP),%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM3,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x9c0(%RSP),%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM19,%ZMM10,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x980(%RSP),%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM20,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM3,%ZMM10,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM6,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM10,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM13,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM4,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM14,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM4,%ZMM5,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM15,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM4,%ZMM6,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM16,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM6,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM17,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM10,%ZMM11,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM18,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM5,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM19,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM6,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM20,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM10,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM9,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM10,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM13,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM7,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM14,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM8,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM15,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM8,%ZMM9,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM16,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM9,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM9,%ZMM17,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM10,%ZMM7,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM9,%ZMM18,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM9,%ZMM11,%ZMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM19,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM9,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM9,%ZMM20,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM9,%ZMM7,%ZMM14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM4,%ZMM2,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM5,%ZMM1,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM1,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM4,%ZMM3,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM3,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM6,%ZMM2,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM8,%ZMM5,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM12,%ZMM4,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM14,%ZMM4,%ZMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM8,%ZMM6,%ZMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM6,%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM14,%ZMM5,%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM9,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM13,%ZMM10,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM10,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM15,%ZMM7,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM7,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM11,%ZMM9,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM16,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM18,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM16,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %XMM20,%XMM20,%XMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VCMPPS $0x1,%ZMM16,%ZMM20,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VRSQRT14PS %ZMM16,%ZMM17{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM17,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x900(%RSP),%ZMM26 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM26,%ZMM17,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x8c0(%RSP),%ZMM23 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM23,%ZMM17,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM19,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM7,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM9,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM18,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM10,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM17,%ZMM18,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VCMPPS $0xc,%ZMM20,%ZMM17,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VCMPPS $0xc,%ZMM20,%ZMM16,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VCMPPS $0x1,%ZMM20,%ZMM17,%K3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VCMPPS $0x1,%ZMM20,%ZMM16,%K4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS 0x880(%RSP),%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VBLENDMPS %ZMM20,%ZMM24,%ZMM18{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VMOVAPS %ZMM18,%ZMM18{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VBROADCASTSS -0x86b895(%RIP),%ZMM18{%K3} | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 5 | 1 | scal (6.3%) |
VMOVAPS 0x840(%RSP),%ZMM25 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VORPS %ZMM25,%ZMM18,%ZMM18{%K4} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VRCP14PS %ZMM17,%ZMM19{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS 0x800(%RSP),%ZMM27 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM19,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM19,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VANDPS 0x7c0(%RSP),%ZMM16,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 1 | vect (100.0%) |
VMOVAPS 0x780(%RSP),%ZMM28 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VCMPPS $0x1,%ZMM17,%ZMM28,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VRCP14PS %ZMM17,%ZMM19{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VCMPPS $0x1,%ZMM20,%ZMM16,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS %ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM19,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM19,%ZMM17{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM17,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM16,%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x740(%RSP),%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x700(%RSP),%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMOVAPS 0x6c0(%RSP),%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x680(%RSP),%ZMM20,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x640(%RSP),%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x600(%RSP),%ZMM20,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x5c0(%RSP),%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x580(%RSP),%ZMM20,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS %ZMM21,%ZMM16,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS %ZMM17,%ZMM19,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VSUBPS %ZMM22,%ZMM24,%ZMM22{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %ZMM25,%ZMM22,%ZMM22{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VADDPS %ZMM22,%ZMM18,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM3,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM2,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM18,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM1,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM17,%ZMM18,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPTERNLOGD $-0x1c,0x4c0(%RSP),%ZMM16,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM7,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM9,%ZMM9,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM18,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM10,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM16,%ZMM18,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM11,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM13,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM19,%ZMM18,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM15,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM18,%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM6,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM5,%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM19,%ZMM20,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM4,%ZMM4,%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM19,%ZMM20,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMAXPS 0x540(%RSP),%ZMM19,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 2 | 1 | vect (100.0%) |
VRSQRT14PS %ZMM19,%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMULPS %ZMM20,%ZMM19,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM26,%ZMM20,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM23,%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM21,%ZMM22,%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS 0x500(%RSP),%ZMM19,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMAXPS %ZMM21,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 2 | 1 | vect (100.0%) |
VMAXPS %ZMM21,%ZMM18,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 2 | 1 | vect (100.0%) |
VMOVAPS 0xdc0(%RSP),%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x480(%RSP),%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS 0xd80(%RSP),%ZMM22,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD231PS %ZMM17,%ZMM21,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS 0x440(%RSP),%ZMM22,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM17,%ZMM23 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD231PS 0x380(%RSP),%ZMM23,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS 0x340(%RSP),%ZMM23,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS 0x300(%RSP),%ZMM23,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS 0x2c0(%RSP),%ZMM23,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM22,%ZMM22,%ZMM23 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x280(%RSP),%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x240(%RSP),%ZMM23,%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x200(%RSP),%ZMM23,%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM23,%ZMM22,%ZMM25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM22,%ZMM24,%ZMM25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VRCP14PS %ZMM16,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VRCP14PS %ZMM18,%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM22,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM22,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM24,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM24,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM20,%ZMM19,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM19,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM3,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM4,%ZMM1,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM2,%ZMM1,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM14,%ZMM6,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM5,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM2,%ZMM3,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM8,%ZMM4,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM2,%ZMM3,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM20,%ZMM20,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM3,%ZMM1,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM3,%ZMM2,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VCVTPS2DQ %ZMM17,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPTESTNMD 0x400(%RSP),%ZMM1,%K1 | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VPTESTMD 0x3c0(%RSP),%ZMM1,%K2 | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS 0x1c0(%RSP),%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x180(%RSP),%ZMM23,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x140(%RSP),%ZMM23,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS %ZMM26,%ZMM23,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS %ZMM28,%ZMM23,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMOVAPS %ZMM25,%ZMM1{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VBROADCASTSS -0x86bb11(%RIP),%ZMM2{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 5 | 1 | scal (6.3%) |
VXORPS %ZMM2,%ZMM1,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VMULPS 0xd40(%RSP),%ZMM21,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM2,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM16,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM18,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM2,%ZMM7,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM2,%ZMM9,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM2,%ZMM10,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM11,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM13,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM15,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS %ZMM3,0xd00(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
VMOVAPS %ZMM2,0xc80(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
VMOVAPS %ZMM6,0xcc0(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xc80(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x20(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VMOVAPS %ZMM7,0xc40(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xc40(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x28(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VMOVAPS %ZMM8,0xc00(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xc00(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x30(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VMOVAPS %ZMM1,0xbc0(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xbc0(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x38(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
MOV 0x78(%RSP),%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
MOV %RAX,0x40(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
LEA 0xcc0(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x18(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
LEA 0xd00(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x10(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
LEA 0x100(%RSP),%RDX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
LEA 0xc0(%RSP),%R8 | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
LEA 0x80(%RSP),%R9 | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
CALL bde790 <_ZN12_GLOBAL__N_124do_dih_fup_noshiftf_simdEPKiS1_S1_S1_N3gmx9SimdFloatES3_S3_S3_S3_S3_S3_S3_PA4_f@@608> | 2 | 0.50 | 0 | 0 | 0 | 0.50 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | N/A |
ADD $0x50,%R14 | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
CMP %R13D,%R14D | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
JGE bc7e73 <_ZN12_GLOBAL__N_15pdihsIL18BondedKernelFlavor0EEENSt9enable_ifIXeqT_LS1_0EEfE4typeEiPKiPK9t_iparamsPA3_KfPA4_fPA3_fPK5t_pbcfPfN3gmx8ArrayRefISA_EEP8t_fcdataP12t_disresdataP12t_oriresdataPi+0xbb3> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50-1 | N/A |
MOV %R14,%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | N/A |
XOR %ECX,%ECX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | N/A |
MOV %R14D,%EDX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | N/A |
JMP bc7e22 <_ZN12_GLOBAL__N_15pdihsIL18BondedKernelFlavor0EEENSt9enable_ifIXeqT_LS1_0EEfE4typeEiPKiPK9t_iparamsPA3_KfPA4_fPA3_fPK5t_pbcfPfN3gmx8ArrayRefISA_EEP8t_fcdataP12t_disresdataP12t_oriresdataPi+0xb62> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | N/A |
Run 1x1 | Number processes: 1Number processes per node: 1OMP_NUM_THREADS: 1 |
---|---|
Run 2x1 | Number processes: 2Number processes per node: 2OMP_NUM_THREADS: 1 |
Run 4x1 | Number processes: 4Number processes per node: 4OMP_NUM_THREADS: 1 |
Run 8x1 | Number processes: 8Number processes per node: 8OMP_NUM_THREADS: 1 |
Run 16x1 | Number processes: 16Number processes per node: 16OMP_NUM_THREADS: 1 |
Run 32x1 | Number processes: 32Number processes per node: 32OMP_NUM_THREADS: 1 |
Run 64x1 | Number processes: 64Number processes per node: 64OMP_NUM_THREADS: 1 |
Run 128x1 | Number processes: 128Number processes per node: 128OMP_NUM_THREADS: 1 |
Run 192x1 | Number processes: 192Number nodes: 1Number processes per node: 192Run Command: <executable> mdrun -s ion_channel.tpr -nsteps 10000 -pin on -deffnm aoccMPI Command: mpirun -genv I_MPI_FABRICS=shm -n <number_processes>Dataset: Run Directory: .OMP_NUM_THREADS: 1 |
(1x1) Efficiency | (1x1) Potential Speed-Up (%) | (2x1) Efficiency | (2x1) Potential Speed-Up (%) | (4x1) Efficiency | (4x1) Potential Speed-Up (%) | (8x1) Efficiency | (8x1) Potential Speed-Up (%) | (16x1) Efficiency | (16x1) Potential Speed-Up (%) | (32x1) Efficiency | (32x1) Potential Speed-Up (%) | (64x1) Efficiency | (64x1) Potential Speed-Up (%) | (128x1) Efficiency | (128x1) Potential Speed-Up (%) | (192x1) Efficiency | (192x1) Potential Speed-Up (%) |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1 | 0 | 1 | 0 | 1.03 | 0 | 1.01 | -0 | 1.06 | 0 | 1.02 | 0 | 1.08 | 0 | 0.96 | 0.01 | 0.93 | 0.02 |
Run | Number of threads | Efficiency (ideal is 1) | Speedup | Ideal Speedup | Time (s) | Coverage (%) |
---|---|---|---|---|---|---|
1x1 | 1 | 1 | 1 | 1 | 7.8399906158447 | 0.94049274921417 |
2x1 | 2 | 1 | 1.99 | 2 | 4.7099976539612 | 0.86758315563202 |
4x1 | 4 | 1.03 | 4.11 | 4 | 2.8250029087067 | 0.76611089706421 |
8x1 | 8 | 1.01 | 8.06 | 8 | 1.5750008821487 | 0.62877869606018 |
16x1 | 16 | 1.06 | 17.04 | 16 | 0.9299995303154 | 0.55639851093292 |
32x1 | 20 | 1.02 | 32.76 | 32 | 0.97499990463257 | 0.46570274233818 |
64x1 | 37 | 1.08 | 68.88 | 64 | 0.41999998688698 | 0.28639209270477 |
128x1 | 71 | 0.96 | 123.28 | 128 | 0.3800000846386 | 0.37420412898064 |
192x1 | 104 | 0.93 | 179.1 | 192 | 0.24000000953674 | 0.32497438788414 |