1 # CS_ARCH_X86, CS_MODE_32, CS_OPT_SYNTAX_ATT
10 0xc5,0xea,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddss 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
11 0xc5,0xea,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubss 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
12 0xc5,0xea,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulss 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
13 0xc5,0xea,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivss 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
14 0xc5,0xeb,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddsd 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
15 0xc5,0xeb,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubsd 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
16 0xc5,0xeb,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulsd 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
17 0xc5,0xeb,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivsd 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
26 0xc5,0xe8,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddps 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
27 0xc5,0xe8,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubps 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
28 0xc5,0xe8,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulps 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
29 0xc5,0xe8,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivps 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
30 0xc5,0xe9,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddpd 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
31 0xc5,0xe9,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubpd 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
32 0xc5,0xe9,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulpd 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
33 0xc5,0xe9,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivpd 3735928559(%
ebx,%
ecx,8), %
xmm2, %
xmm5
38 0xc5,0xea,0x5f,0x6c,0xcb,0xfc = vmaxss -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
39 0xc5,0xeb,0x5f,0x6c,0xcb,0xfc = vmaxsd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
40 0xc5,0xea,0x5d,0x6c,0xcb,0xfc = vminss -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
41 0xc5,0xeb,0x5d,0x6c,0xcb,0xfc = vminsd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
46 0xc5,0xe8,0x5f,0x6c,0xcb,0xfc = vmaxps -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
47 0xc5,0xe9,0x5f,0x6c,0xcb,0xfc = vmaxpd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
48 0xc5,0xe8,0x5d,0x6c,0xcb,0xfc = vminps -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
49 0xc5,0xe9,0x5d,0x6c,0xcb,0xfc = vminpd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
52 0xc5,0xe8,0x54,0x6c,0xcb,0xfc = vandps -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
53 0xc5,0xe9,0x54,0x6c,0xcb,0xfc = vandpd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
56 0xc5,0xe8,0x56,0x6c,0xcb,0xfc = vorps -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
57 0xc5,0xe9,0x56,0x6c,0xcb,0xfc = vorpd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
60 0xc5,0xe8,0x57,0x6c,0xcb,0xfc = vxorps -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
61 0xc5,0xe9,0x57,0x6c,0xcb,0xfc = vxorpd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
64 0xc5,0xe8,0x55,0x6c,0xcb,0xfc = vandnps -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
65 0xc5,0xe9,0x55,0x6c,0xcb,0xfc = vandnpd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
66 0xc5,0xfa,0x10,0x6c,0xcb,0xfc = vmovss -4(%
ebx,%
ecx,8), %
xmm5
74 0xc5,0xe8,0x15,0x6c,0xcb,0xfc = vunpckhps -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
76 0xc5,0xe8,0x14,0x6c,0xcb,0xfc = vunpcklps -4(%
ebx,%
ecx,8), %
xmm2, %
xmm5
80 0xc5,0xc8,0xc2,0xc8,0x07 = vcmpps $7, %
xmm0, %
xmm6, %
xmm1
81 0xc5,0xc9,0xc2,0xc8,0x00 = vcmppd $0, %
xmm0, %
xmm6, %
xmm1
82 0xc5,0xc9,0xc2,0x08,0x00 = vcmppd $0, (%
eax), %
xmm6, %
xmm1
83 0xc5,0xc9,0xc2,0xc8,0x07 = vcmppd $7, %
xmm0, %
xmm6, %
xmm1
85 0xc5,0xe8,0xc6,0x5c,0xcb,0xfc,0x08 = vshufps $8, -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
86 0xc5,0xe9,0xc6,0xd9,0x08 = vshufpd $8, %
xmm1, %
xmm2, %
xmm3
87 0xc5,0xe9,0xc6,0x5c,0xcb,0xfc,0x08 = vshufpd $8, -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
95 0xc5,0xe8,0xc2,0xd9,0x03 = vcmpunordps %
xmm1, %
xmm2, %
xmm3
96 0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqps -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
97 0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpleps -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
98 0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltps -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
99 0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqps -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
100 0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnleps -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
101 0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltps -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
102 0xc5,0xc8,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordps -4(%
ebx,%
ecx,8), %
xmm6, %
xmm2
103 0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordps -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
107 0xc5,0xe9,0xc2,0xd9,0x04 = vcmpneqpd %
xmm1, %
xmm2, %
xmm3
108 0xc5,0xe9,0xc2,0xd9,0x06 = vcmpnlepd %
xmm1, %
xmm2, %
xmm3
109 0xc5,0xe9,0xc2,0xd9,0x05 = vcmpnltpd %
xmm1, %
xmm2, %
xmm3
110 0xc5,0xe9,0xc2,0xd9,0x07 = vcmpordpd %
xmm1, %
xmm2, %
xmm3
111 0xc5,0xe9,0xc2,0xd9,0x03 = vcmpunordpd %
xmm1, %
xmm2, %
xmm3
112 0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqpd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
113 0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplepd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
114 0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltpd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
115 0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqpd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
116 0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlepd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
117 0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltpd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
118 0xc5,0xc9,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordpd -4(%
ebx,%
ecx,8), %
xmm6, %
xmm2
119 0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordpd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
120 0xc5,0xf8,0x50,0xc2 = vmovmskps %
xmm2, %
eax
121 0xc5,0xf9,0x50,0xc2 = vmovmskpd %
xmm2, %
eax
122 0xc5,0xfc,0x50,0xc2 = vmovmskps %
ymm2, %
eax
123 0xc5,0xfd,0x50,0xc2 = vmovmskpd %
ymm2, %
eax
127 0xc5,0xea,0xc2,0xd9,0x04 = vcmpneqss %
xmm1, %
xmm2, %
xmm3
128 0xc5,0xea,0xc2,0xd9,0x06 = vcmpnless %
xmm1, %
xmm2, %
xmm3
129 0xc5,0xea,0xc2,0xd9,0x05 = vcmpnltss %
xmm1, %
xmm2, %
xmm3
130 0xc5,0xea,0xc2,0xd9,0x07 = vcmpordss %
xmm1, %
xmm2, %
xmm3
131 0xc5,0xea,0xc2,0xd9,0x03 = vcmpunordss %
xmm1, %
xmm2, %
xmm3
132 0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqss -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
133 0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpless -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
134 0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltss -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
135 0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqss -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
136 0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnless -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
137 0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltss -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
138 0xc5,0xca,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordss -4(%
ebx,%
ecx,8), %
xmm6, %
xmm2
139 0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordss -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
143 0xc5,0xeb,0xc2,0xd9,0x04 = vcmpneqsd %
xmm1, %
xmm2, %
xmm3
144 0xc5,0xeb,0xc2,0xd9,0x06 = vcmpnlesd %
xmm1, %
xmm2, %
xmm3
145 0xc5,0xeb,0xc2,0xd9,0x05 = vcmpnltsd %
xmm1, %
xmm2, %
xmm3
146 0xc5,0xeb,0xc2,0xd9,0x07 = vcmpordsd %
xmm1, %
xmm2, %
xmm3
147 0xc5,0xeb,0xc2,0xd9,0x03 = vcmpunordsd %
xmm1, %
xmm2, %
xmm3
148 0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqsd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
149 0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplesd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
150 0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltsd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
151 0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqsd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
152 0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlesd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
153 0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltsd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
154 0xc5,0xcb,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordsd -4(%
ebx,%
ecx,8), %
xmm6, %
xmm2
155 0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordsd -4(%
ebx,%
ecx,8), %
xmm2, %
xmm3
156 0xc5,0xf8,0x2e,0xd1 = vucomiss %
xmm1, %
xmm2
157 0xc5,0xf8,0x2e,0x10 = vucomiss (%
eax), %
xmm2
158 0xc5,0xf8,0x2f,0xd1 = vcomiss %
xmm1, %
xmm2
159 0xc5,0xf8,0x2f,0x10 = vcomiss (%
eax), %
xmm2
160 0xc5,0xf9,0x2e,0xd1 = vucomisd %
xmm1, %
xmm2
161 0xc5,0xf9,0x2e,0x10 = vucomisd (%
eax), %
xmm2
162 0xc5,0xf9,0x2f,0xd1 = vcomisd %
xmm1, %
xmm2
163 0xc5,0xf9,0x2f,0x10 = vcomisd (%
eax), %
xmm2
164 0xc5,0xfa,0x2c,0xc1 = vcvttss2si %
xmm1, %
eax
165 0xc5,0xfa,0x2c,0x01 = vcvttss2si (%
ecx), %
eax
166 0xc5,0xf2,0x2a,0x10 = vcvtsi2ss (%
eax), %
xmm1, %
xmm2
167 0xc5,0xf2,0x2a,0x10 = vcvtsi2ss (%
eax), %
xmm1, %
xmm2
168 0xc5,0xf2,0x2a,0x10 = vcvtsi2ssl (%
eax), %
xmm1, %
xmm2
169 0xc5,0xf2,0x2a,0x10 = vcvtsi2ssl (%
eax), %
xmm1, %
xmm2
170 0xc5,0xfb,0x2c,0xc1 = vcvttsd2si %
xmm1, %
eax
171 0xc5,0xfb,0x2c,0x01 = vcvttsd2si (%
ecx), %
eax
172 0xc5,0xf3,0x2a,0x10 = vcvtsi2sd (%
eax), %
xmm1, %
xmm2
173 0xc5,0xf3,0x2a,0x10 = vcvtsi2sd (%
eax), %
xmm1, %
xmm2
174 0xc5,0xf3,0x2a,0x10 = vcvtsi2sdl (%
eax), %
xmm1, %
xmm2
175 0xc5,0xf3,0x2a,0x10 = vcvtsi2sdl (%
eax), %
xmm1, %
xmm2
176 0xc5,0xf8,0x28,0x10 = vmovaps (%
eax), %
xmm2
177 0xc5,0xf8,0x28,0xd1 = vmovaps %
xmm1, %
xmm2
178 0xc5,0xf8,0x29,0x08 = vmovaps %
xmm1, (%
eax)
180 0xc5,0xf9,0x28,0xd1 = vmovapd %
xmm1, %
xmm2
181 0xc5,0xf9,0x29,0x08 = vmovapd %
xmm1, (%
eax)
183 0xc5,0xf8,0x10,0xd1 = vmovups %
xmm1, %
xmm2
184 0xc5,0xf8,0x11,0x08 = vmovups %
xmm1, (%
eax)
186 0xc5,0xf9,0x10,0xd1 = vmovupd %
xmm1, %
xmm2
187 0xc5,0xf9,0x11,0x08 = vmovupd %
xmm1, (%
eax)
190 0xc5,0xf9,0x13,0x08 = vmovlpd %
xmm1, (%
eax)
192 0xc5,0xf8,0x17,0x08 = vmovhps %
xmm1, (%
eax)
194 0xc5,0xf9,0x17,0x08 = vmovhpd %
xmm1, (%
eax)
198 0xc5,0xfa,0x2d,0xc1 = vcvtss2si %
xmm1, %
eax
199 0xc5,0xfa,0x2d,0x18 = vcvtss2si (%
eax), %
ebx
200 0xc5,0xfa,0x2d,0xc1 = vcvtss2sil %
xmm1, %
eax
201 0xc5,0xfa,0x2d,0x18 = vcvtss2sil (%
eax), %
ebx
202 0xc5,0xf8,0x5b,0xf5 = vcvtdq2ps %
xmm5, %
xmm6
203 0xc5,0xf8,0x5b,0x30 = vcvtdq2ps (%
eax), %
xmm6
205 0xc5,0xdb,0x5a,0x30 = vcvtsd2ss (%
eax), %
xmm4, %
xmm6
206 0xc5,0xf9,0x5b,0xda = vcvtps2dq %
xmm2, %
xmm3
207 0xc5,0xf9,0x5b,0x18 = vcvtps2dq (%
eax), %
xmm3
209 0xc5,0xda,0x5a,0x30 = vcvtss2sd (%
eax), %
xmm4, %
xmm6
210 0xc5,0xf8,0x5b,0xf4 = vcvtdq2ps %
xmm4, %
xmm6
211 0xc5,0xf8,0x5b,0x21 = vcvtdq2ps (%
ecx), %
xmm4
212 0xc5,0xfa,0x5b,0xda = vcvttps2dq %
xmm2, %
xmm3
213 0xc5,0xfa,0x5b,0x18 = vcvttps2dq (%
eax), %
xmm3
214 0xc5,0xf8,0x5a,0xda = vcvtps2pd %
xmm2, %
xmm3
215 0xc5,0xf8,0x5a,0x18 = vcvtps2pd (%
eax), %
xmm3
216 0xc5,0xf9,0x5a,0xda = vcvtpd2ps %
xmm2, %
xmm3
217 0xc5,0xf9,0x51,0xd1 = vsqrtpd %
xmm1, %
xmm2
218 0xc5,0xf9,0x51,0x10 = vsqrtpd (%
eax), %
xmm2
219 0xc5,0xf8,0x51,0xd1 = vsqrtps %
xmm1, %
xmm2
220 0xc5,0xf8,0x51,0x10 = vsqrtps (%
eax), %
xmm2
225 0xc5,0xf8,0x52,0xd1 = vrsqrtps %
xmm1, %
xmm2
226 0xc5,0xf8,0x52,0x10 = vrsqrtps (%
eax), %
xmm2
228 0xc5,0xea,0x52,0x18 = vrsqrtss (%
eax), %
xmm2, %
xmm3
229 0xc5,0xf8,0x53,0xd1 = vrcpps %
xmm1, %
xmm2
230 0xc5,0xf8,0x53,0x10 = vrcpps (%
eax), %
xmm2
233 0xc5,0xf9,0xe7,0x08 = vmovntdq %
xmm1, (%
eax)
234 0xc5,0xf9,0x2b,0x08 = vmovntpd %
xmm1, (%
eax)
235 0xc5,0xf8,0x2b,0x08 = vmovntps %
xmm1, (%
eax)
236 0xc5,0xf8,0xae,0
x10 = vldmxcsr (%
eax)
237 0xc5,0xf8,0xae,0x18 = vstmxcsr (%
eax)
238 0xc5,0xf8,0xae,0x15,0xef,0xbe,0xad,0xde = vldmxcsr 0xdeadbeef
239 0xc5,0xf8,0xae,0x1d,0xef,0xbe,0xad,0xde = vstmxcsr 0xdeadbeef
253 0xc5,0xe9,0xd8,0x18 = vpsubusb (%
eax), %
xmm2, %
xmm3
255 0xc5,0xe9,0xd9,0x18 = vpsubusw (%
eax), %
xmm2, %
xmm3
269 0xc5,0xe9,0xdc,0x18 = vpaddusb (%
eax), %
xmm2, %
xmm3
271 0xc5,0xe9,0xdd,0x18 = vpaddusw (%
eax), %
xmm2, %
xmm3
273 0xc5,0xe9,0xe4,0x18 = vpmulhuw (%
eax), %
xmm2, %
xmm3
279 0xc5,0xe9,0xf4,0x18 = vpmuludq (%
eax), %
xmm2, %
xmm3
310 0xc5,0xe1,0x72,0xf2,0x0a = vpslld $10, %
xmm2, %
xmm3
311 0xc5,0xe1,0x73,0xfa,0x0a = vpslldq $10, %
xmm2, %
xmm3
312 0xc5,0xe1,0x73,0xf2,0x0a = vpsllq $10, %
xmm2, %
xmm3
313 0xc5,0xe1,0x71,0xf2,0x0a = vpsllw $10, %
xmm2, %
xmm3
314 0xc5,0xe1,0x72,0xe2,0x0a = vpsrad $10, %
xmm2, %
xmm3
315 0xc5,0xe1,0x71,0xe2,0x0a = vpsraw $10, %
xmm2, %
xmm3
316 0xc5,0xe1,0x72,0xd2,0x0a = vpsrld $10, %
xmm2, %
xmm3
317 0xc5,0xe1,0x73,0xda,0x0a = vpsrldq $10, %
xmm2, %
xmm3
318 0xc5,0xe1,0x73,0xd2,0x0a = vpsrlq $10, %
xmm2, %
xmm3
319 0xc5,0xe1,0x71,0xd2,0x0a = vpsrlw $10, %
xmm2, %
xmm3
320 0xc5,0xe1,0x72,0xf2,0x0a = vpslld $10, %
xmm2, %
xmm3
330 0xc5,0xe9,0x74,0x18 = vpcmpeqb (%
eax), %
xmm2, %
xmm3
332 0xc5,0xe9,0x75,0x18 = vpcmpeqw (%
eax), %
xmm2, %
xmm3
334 0xc5,0xe9,0x76,0x18 = vpcmpeqd (%
eax), %
xmm2, %
xmm3
336 0xc5,0xe9,0x64,0x18 = vpcmpgtb (%
eax), %
xmm2, %
xmm3
338 0xc5,0xe9,0x65,0x18 = vpcmpgtw (%
eax), %
xmm2, %
xmm3
340 0xc5,0xe9,0x66,0x18 = vpcmpgtd (%
eax), %
xmm2, %
xmm3
342 0xc5,0xe9,0x63,0x18 = vpacksswb (%
eax), %
xmm2, %
xmm3
344 0xc5,0xe9,0x6b,0x18 = vpackssdw (%
eax), %
xmm2, %
xmm3
346 0xc5,0xe9,0x67,0x18 = vpackuswb (%
eax), %
xmm2, %
xmm3
347 0xc5,0xf9,0x70,0xda,0x04 = vpshufd $4, %
xmm2, %
xmm3
348 0xc5,0xf9,0x70,0x18,0x04 = vpshufd $4, (%
eax), %
xmm3
349 0xc5,0xfa,0x70,0xda,0x04 = vpshufhw $4, %
xmm2, %
xmm3
350 0xc5,0xfa,0x70,0
x18,0x04 = vpshufhw $4, (%
eax), %
xmm3
351 0xc5,0xfb,0x70,0xda,0x04 = vpshuflw $4, %
xmm2, %
xmm3
352 0xc5,0xfb,0x70,0x18,0x04 = vpshuflw $4, (%
eax), %
xmm3
356 0xc5,0xe9,0x61,0x18 = vpunpcklwd (%
eax), %
xmm2, %
xmm3
358 0xc5,0xe9,0x62,0x18 = vpunpckldq (%
eax), %
xmm2, %
xmm3
360 0xc5,0xe9,0x6c,0x18 = vpunpcklqdq (%
eax), %
xmm2, %
xmm3
362 0xc5,0xe9,0x68,0x18 = vpunpckhbw (%
eax), %
xmm2, %
xmm3
364 0xc5,0xe9,0x69,0x18 = vpunpckhwd (%
eax), %
xmm2, %
xmm3
366 0xc5,0xe9,0x6a,0x18 = vpunpckhdq (%
eax), %
xmm2, %
xmm3
368 0xc5,0xe9,0x6d,0x18 = vpunpckhqdq (%
eax), %
xmm2, %
xmm3
369 0xc5,0xe9,0xc4,0xd8,0x07 = vpinsrw $7, %
eax, %
xmm2, %
xmm3
370 0xc5,0xe9,0xc4,0x18,0x07 = vpinsrw $7, (%
eax), %
xmm2, %
xmm3
371 0xc5,0xf9,0xc5,0xc2,0x07 = vpextrw $7, %
xmm2, %
eax
372 0xc5,0xf9,0xd7,0xc1 = vpmovmskb %
xmm1, %
eax
373 0xc5,0xf9,0xf7,0xd1 = vmaskmovdqu %
xmm1, %
xmm2
374 0xc5,0xf9,0x7e,0xc8 = vmovd %
xmm1, %
eax
375 0xc5,0xf9,0x7e,0x08 = vmovd %
xmm1, (%
eax)
376 0xc5,0xf9,0x6e,0xc8 = vmovd %
eax, %
xmm1
377 0xc5,0xf9,0x6e,0x08 = vmovd (%
eax), %
xmm1
378 0xc5,0xf9,0xd6,0x08 = vmovq %
xmm1, (%
eax)
379 0xc5,0xfa,0x7e,0xd1 = vmovq %
xmm1, %
xmm2
380 0xc5,0xfa,0x7e,0x08 = vmovq (%
eax), %
xmm1
381 0xc5,0xfb,0xe6,0xd1 = vcvtpd2dq %
xmm1, %
xmm2
382 0xc5,0xfa,0xe6,0xd1 = vcvtdq2pd %
xmm1, %
xmm2
383 0xc5,0xfa,0xe6,0x10 = vcvtdq2pd (%
eax), %
xmm2
384 0xc5,0xfa,0x16,0xd1 = vmovshdup %
xmm1, %
xmm2
385 0xc5,0xfa,0x16,0x10 = vmovshdup (%
eax), %
xmm2
386 0xc5,0xfa,0x12,0xd1 = vmovsldup %
xmm1, %
xmm2
387 0xc5,0xfa,0x12,0x10 = vmovsldup (%
eax), %
xmm2
388 0xc5,0xfb,0x12,0xd1 = vmovddup %
xmm1, %
xmm2
389 0xc5,0xfb,0x12,0x10 = vmovddup (%
eax), %
xmm2
391 0xc5,0xf3,0xd0,0x10 = vaddsubps (%
eax), %
xmm1, %
xmm2
393 0xc5,0xf1,0xd0,0x10 = vaddsubpd (%
eax), %
xmm1, %
xmm2
402 0xc4,0xe2,0x79,0x1c,0xd1 = vpabsb %
xmm1, %
xmm2
403 0xc4,0xe2,0x79,0x1c,0x10 = vpabsb (%
eax), %
xmm2
404 0xc4,0xe2,0x79,0x1d,0xd1 = vpabsw %
xmm1, %
xmm2
405 0xc4,0xe2,0x79,0x1d,0x10 = vpabsw (%
eax), %
xmm2
406 0xc4,0xe2,0x79,0x1e,0xd1 = vpabsd %
xmm1, %
xmm2
407 0xc4,0xe2,0x79,0x1e,0x10 = vpabsd (%
eax), %
xmm2
409 0xc4,0xe2,0x69,0x01,0x18 = vphaddw (%
eax), %
xmm2, %
xmm3
411 0xc4,0xe2,0x69,0x02,0x18 = vphaddd (%
eax), %
xmm2, %
xmm3
413 0xc4,0xe2,0x69,0x03,0x18 = vphaddsw (%
eax), %
xmm2, %
xmm3
415 0xc4,0xe2,0x69,0x05,0x18 = vphsubw (%
eax), %
xmm2, %
xmm3
417 0xc4,0xe2,0x69,0x06,0x18 = vphsubd (%
eax), %
xmm2, %
xmm3
419 0xc4,0xe2,0x69,0x07,0x18 = vphsubsw (%
eax), %
xmm2, %
xmm3
420 0xc4,0xe2,0x69,0x04,0xd9 = vpmaddubsw %
xmm1, %
xmm2, %
xmm3
421 0xc4,0xe2,0x69,0x04,0x18 = vpmaddubsw (%
eax), %
xmm2, %
xmm3
423 0xc4,0xe2,0x69,0x00,0x18 = vpshufb (%
eax), %
xmm2, %
xmm3
425 0xc4,0xe2,0x69,0x08,0x18 = vpsignb (%
eax), %
xmm2, %
xmm3
427 0xc4,0xe2,0x69,0x09,0x18 = vpsignw (%
eax), %
xmm2, %
xmm3
429 0xc4,0xe2,0x69,0x0a,0x18 = vpsignd (%
eax), %
xmm2, %
xmm3
430 0xc4,0xe2,0x69,0x0b,0xd9 = vpmulhrsw %
xmm1, %
xmm2, %
xmm3
431 0xc4,0xe2,0x69,0x0b,0x18 = vpmulhrsw (%
eax), %
xmm2, %
xmm3
432 0xc4,0xe3,0x69,0x0f,0xd9,0x07 = vpalignr $7, %
xmm1, %
xmm2, %
xmm3
433 0xc4,0xe3,0x69,0x0f,0x18,0x07 = vpalignr $7, (%
eax), %
xmm2, %
xmm3
434 0xc4,0xe3,0x69,0x0b,0xd9,0x07 = vroundsd $7, %
xmm1, %
xmm2, %
xmm3
436 0xc4,0xe3,0x69,0x0a,0xd9,0x07 = vroundss $7, %
xmm1, %
xmm2, %
xmm3
437 0xc4,0xe3,0x69,0x0a,0x18,0x07 = vroundss $7, (%
eax), %
xmm2, %
xmm3
438 0xc4,0xe3,0x79,0x09,0xda,0x07 = vroundpd $7, %
xmm2, %
xmm3
439 0xc4,0xe3,0x79,0x09,0
x18,0x07 = vroundpd $7, (%
eax), %
xmm3
440 0xc4,0xe3,0x79,0x08,0xda,0x07 = vroundps $7, %
xmm2, %
xmm3
441 0xc4,0xe3,0x79,0x08,0x18,0x07 = vroundps $7, (%
eax), %
xmm3
442 0xc4,0xe2,0x79,0x41,0xda = vphminposuw %
xmm2, %
xmm3
443 0xc4,0xe2,0x79,0x41,0
x10 = vphminposuw (%
eax), %
xmm2
444 0xc4,0xe2,0x61,0x2b,0xca = vpackusdw %
xmm2, %
xmm3, %
xmm1
445 0xc4,0xe2,0x69,0x2b,0x18 = vpackusdw (%
eax), %
xmm2, %
xmm3
447 0xc4,0xe2,0x69,0x29,0x18 = vpcmpeqq (%
eax), %
xmm2, %
xmm3
449 0xc4,0xe2,0x69,0x38,0x18 = vpminsb (%
eax), %
xmm2, %
xmm3
451 0xc4,0xe2,0x69,0x39,0x18 = vpminsd (%
eax), %
xmm2, %
xmm3
453 0xc4,0xe2,0x69,0x3b,0x18 = vpminud (%
eax), %
xmm2, %
xmm3
455 0xc4,0xe2,0x69,0x3a,0x18 = vpminuw (%
eax), %
xmm2, %
xmm3
457 0xc4,0xe2,0x69,0x3c,0x18 = vpmaxsb (%
eax), %
xmm2, %
xmm3
459 0xc4,0xe2,0x69,0x3d,0x18 = vpmaxsd (%
eax), %
xmm2, %
xmm3
461 0xc4,0xe2,0x69,0x3f,0x18 = vpmaxud (%
eax), %
xmm2, %
xmm3
463 0xc4,0xe2,0x69,0x3e,0x18 = vpmaxuw (%
eax), %
xmm2, %
xmm3
465 0xc4,0xe2,0x69,0x28,0x18 = vpmuldq (%
eax), %
xmm2, %
xmm3
467 0xc4,0xe2,0x51,0x40,0x18 = vpmulld (%
eax), %
xmm5, %
xmm3
468 0xc4,0xe3,0x51,0x0c,0xca,0x03 = vblendps $3, %
xmm2, %
xmm5, %
xmm1
469 0xc4,0xe3,0x51,0x0c,0x08,0x03 = vblendps $3, (%
eax), %
xmm5, %
xmm1
470 0xc4,0xe3,0x51,0x0d,0xca,0x03 = vblendpd $3, %
xmm2, %
xmm5, %
xmm1
471 0xc4,0xe3,0x51,0x0d,0x08,0x03 = vblendpd $3, (%
eax), %
xmm5, %
xmm1
472 0xc4,0xe3,0x51,0x0e,0xca,0x03 = vpblendw $3, %
xmm2, %
xmm5, %
xmm1
473 0xc4,0xe3,0x51,0x0e,0x08,0x03 = vpblendw $3, (%
eax), %
xmm5, %
xmm1
474 0xc4,0xe3,0x51,0x42,0xca,0x03 = vmpsadbw $3, %
xmm2, %
xmm5, %
xmm1
475 0xc4,0xe3,0x51,0x42,0x08,0x03 = vmpsadbw $3, (%
eax), %
xmm5, %
xmm1
476 0xc4,0xe3,0x51,0x40,0xca,0x03 = vdpps $3, %
xmm2, %
xmm5, %
xmm1
477 0xc4,0xe3,0x51,0x40,0x08,0x03 = vdpps $3, (%
eax), %
xmm5, %
xmm1
478 0xc4,0xe3,0x51,0x41,0xca,0x03 = vdppd $3, %
xmm2, %
xmm5, %
xmm1
479 0xc4,0xe3,0x51,0x41,0x08,0x03 = vdppd $3, (%
eax), %
xmm5, %
xmm1
488 0xc4,0xe2,0x79,0x23,0xea = vpmovsxwd %
xmm2, %
xmm5
489 0xc4,0xe2,0x79,0x23,0x10 = vpmovsxwd (%
eax), %
xmm2
490 0xc4,0xe2,0x79,0x25,0xea = vpmovsxdq %
xmm2, %
xmm5
491 0xc4,0xe2,0x79,0x25,0x10 = vpmovsxdq (%
eax), %
xmm2
492 0xc4,0xe2,0x79,0x30,0xea = vpmovzxbw %
xmm2, %
xmm5
493 0xc4,0xe2,0x79,0x30,0x10 = vpmovzxbw (%
eax), %
xmm2
494 0xc4,0xe2,0x79,0x33,0xea = vpmovzxwd %
xmm2, %
xmm5
495 0xc4,0xe2,0x79,0x33,0x10 = vpmovzxwd (%
eax), %
xmm2
496 0xc4,0xe2,0x79,0x35,0xea = vpmovzxdq %
xmm2, %
xmm5
497 0xc4,0xe2,0x79,0x35,0x10 = vpmovzxdq (%
eax), %
xmm2
498 0xc4,0xe2,0x79,0x22,0xea = vpmovsxbq %
xmm2, %
xmm5
499 0xc4,0xe2,0x79,0x22,0x10 = vpmovsxbq (%
eax), %
xmm2
500 0xc4,0xe2,0x79,0x32,0xea = vpmovzxbq %
xmm2, %
xmm5
501 0xc4,0xe2,0x79,0x32,0x10 = vpmovzxbq (%
eax), %
xmm2
502 0xc4,0xe2,0x79,0x21,0xea = vpmovsxbd %
xmm2, %
xmm5
503 0xc4,0xe2,0x79,0x21,0x10 = vpmovsxbd (%
eax), %
xmm2
504 0xc4,0xe2,0x79,0x24,0xea = vpmovsxwq %
xmm2, %
xmm5
505 0xc4,0xe2,0x79,0x24,0x10 = vpmovsxwq (%
eax), %
xmm2
506 0xc4,0xe2,0x79,0x31,0xea = vpmovzxbd %
xmm2, %
xmm5
507 0xc4,0xe2,0x79,0x31,0x10 = vpmovzxbd (%
eax), %
xmm2
508 0xc4,0xe2,0x79,0x34,0xea = vpmovzxwq %
xmm2, %
xmm5
509 0xc4,0xe2,0x79,0x34,0x10 = vpmovzxwq (%
eax), %
xmm2
510 0xc5,0xf9,0xc5,0xc2,0x07 = vpextrw $7, %
xmm2, %
eax
511 0xc4,0xe3,0x79,0x15,0x10,0x07 = vpextrw $7, %
xmm2, (%
eax)
512 0xc4,0xe3,0x79,0
x16,0xd0,0x07 = vpextrd $7, %
xmm2, %
eax
514 0xc4,0xe3,0x79,0x14,0xd0,0x07 = vpextrb $7, %
xmm2, %
eax
515 0xc4,0xe3,0x79,0x14,0x10,0x07 = vpextrb $7, %
xmm2, (%
eax)
517 0xc4,0xe3,0x79,0x17,0xd0,0x07 = vextractps $7, %
xmm2, %
eax
518 0xc5,0xe9,0xc4,0xe8,0x07 = vpinsrw $7, %
eax, %
xmm2, %
xmm5
519 0xc5,0xe9,0xc4,0x28,0x07 = vpinsrw $7, (%
eax), %
xmm2, %
xmm5
522 0xc4,0xe3,0x69,0x22,0xe8,0x07 = vpinsrd $7, %
eax, %
xmm2, %
xmm5
523 0xc4,0xe3,0x69,0x22,0x28,0x07 = vpinsrd $7, (%
eax), %
xmm2, %
xmm5
525 0xc4,0xe3,0x51,0
x21,0x08,0x07 = vinsertps $7, (%
eax), %
xmm5, %
xmm1
526 0xc4,0xe2,0x79,0x17,0xea = vptest %
xmm2, %
xmm5
527 0xc4,0xe2,0x79,0x17,0x10 = vptest (%
eax), %
xmm2
528 0xc4,0xe2,0x79,0x2a,0x10 = vmovntdqa (%
eax), %
xmm2
530 0xc4,0xe2,0x51,0x37,0x18 = vpcmpgtq (%
eax), %
xmm5, %
xmm3
531 0xc4,0xe3,0x79,0x62,0xea,0x07 = vpcmpistrm $7, %
xmm2, %
xmm5
532 0xc4,0xe3,0x79,0x62,0x28,0x07 = vpcmpistrm $7, (%
eax), %
xmm5
533 0xc4,0xe3,0x79,0x60,0xea,0x07 = vpcmpestrm $7, %
xmm2, %
xmm5
534 0xc4,0xe3,0x79,0x60,0
x28,0x07 = vpcmpestrm $7, (%
eax), %
xmm5
535 0xc4,0xe3,0x79,0x63,0xea,0x07 = vpcmpistri $7, %
xmm2, %
xmm5
536 0xc4,0xe3,0x79,0x63,0x28,0x07 = vpcmpistri $7, (%
eax), %
xmm5
537 0xc4,0xe3,0x79,0x61,0xea,0x07 = vpcmpestri $7, %
xmm2, %
xmm5
538 0xc4,0xe3,0x79,0x61,0
x28,0x07 = vpcmpestri $7, (%
eax), %
xmm5
539 0xc4,0xe2,0x79,0xdb,0xea = vaesimc %
xmm2, %
xmm5
540 0xc4,0xe2,0x79,0xdb,0x10 = vaesimc (%
eax), %
xmm2
542 0xc4,0xe2,0x51,0xdc,0x18 = vaesenc (%
eax), %
xmm5, %
xmm3
543 0xc4,0xe2,0x51,0xdd,0xca = vaesenclast %
xmm2, %
xmm5, %
xmm1
544 0xc4,0xe2,0x51,0xdd,0x18 = vaesenclast (%
eax), %
xmm5, %
xmm3
546 0xc4,0xe2,0x51,0xde,0x18 = vaesdec (%
eax), %
xmm5, %
xmm3
547 0xc4,0xe2,0x51,0xdf,0xca = vaesdeclast %
xmm2, %
xmm5, %
xmm1
548 0xc4,0xe2,0x51,0xdf,0x18 = vaesdeclast (%
eax), %
xmm5, %
xmm3
549 0xc4,0xe3,0x79,0xdf,0xea,0x07 = vaeskeygenassist $7, %
xmm2, %
xmm5
550 0xc4,0xe3,0x79,0xdf,0x28,0x07 = vaeskeygenassist $7, (%
eax), %
xmm5
551 0xc5,0xe8,0xc2,0xd9,0x08 = vcmpeq_uqps %
xmm1, %
xmm2, %
xmm3
552 0xc5,0xe8,0xc2,0xd9,0x09 = vcmpngeps %
xmm1, %
xmm2, %
xmm3
553 0xc5,0xe8,0xc2,0xd9,0x0a = vcmpngtps %
xmm1, %
xmm2, %
xmm3
554 0xc5,0xe8,0xc2,0xd9,0x0b = vcmpfalseps %
xmm1, %
xmm2, %
xmm3
555 0xc5,0xe8,0xc2,0xd9,0x0c = vcmpneq_oqps %
xmm1, %
xmm2, %
xmm3
558 0xc5,0xe8,0xc2,0xd9,0x0f = vcmptrueps %
xmm1, %
xmm2, %
xmm3
569 0xc5,0xe8,0xc2,0xd9,0x1a = vcmpngt_uqps %
xmm1, %
xmm2, %
xmm3
570 0xc5,0xe8,0xc2,0xd9,0x1b = vcmpfalse_osps %
xmm1, %
xmm2, %
xmm3
571 0xc5,0xe8,0xc2,0xd9,0x1c = vcmpneq_osps %
xmm1, %
xmm2, %
xmm3
572 0xc5,0xe8,0xc2,0xd9,0x1d = vcmpge_oqps %
xmm1, %
xmm2, %
xmm3
573 0xc5,0xe8,0xc2,0xd9,0x1e = vcmpgt_oqps %
xmm1, %
xmm2, %
xmm3
574 0xc5,0xe8,0xc2,0xd9,0x1f = vcmptrue_usps %
xmm1, %
xmm2, %
xmm3
576 0xc5,0xfc,0x28,0xd1 = vmovaps %
ymm1, %
ymm2
577 0xc5,0xfc,0x29,0x08 = vmovaps %
ymm1, (%
eax)
579 0xc5,0xfd,0x28,0xd1 = vmovapd %
ymm1, %
ymm2
580 0xc5,0xfd,0x29,0x08 = vmovapd %
ymm1, (%
eax)
582 0xc5,0xfc,0x10,0xd1 = vmovups %
ymm1, %
ymm2
583 0xc5,0xfc,0x11,0x08 = vmovups %
ymm1, (%
eax)
585 0xc5,0xfd,0x10,0xd1 = vmovupd %
ymm1, %
ymm2
586 0xc5,0xfd,0x11,0x08 = vmovupd %
ymm1, (%
eax)
592 0xc5,0xed,0x15,0x6c,0xcb,0xfc = vunpckhpd -4(%
ebx,%
ecx,8), %
ymm2, %
ymm5
594 0xc5,0xed,0x14,0x6c,0xcb,0xfc = vunpcklpd -4(%
ebx,%
ecx,8), %
ymm2, %
ymm5
595 0xc5,0xfd,0xe7,0x08 = vmovntdq %
ymm1, (%
eax)
596 0xc5,0xfd,0x2b,0x08 = vmovntpd %
ymm1, (%
eax)
597 0xc5,0xfc,0x2b,0x08 = vmovntps %
ymm1, (%
eax)
598 0xc5,0xf8,0x50,0xc2 = vmovmskps %
xmm2, %
eax
599 0xc5,0xf9,0x50,0xc2 = vmovmskpd %
xmm2, %
eax
624 0xc5,0xfd,0x51,0xd1 = vsqrtpd %
ymm1, %
ymm2
625 0xc5,0xfd,0x51,0x10 = vsqrtpd (%
eax), %
ymm2
626 0xc5,0xfc,0x51,0xd1 = vsqrtps %
ymm1, %
ymm2
627 0xc5,0xfc,0x51,0x10 = vsqrtps (%
eax), %
ymm2
628 0xc5,0xfc,0x52,0xd1 = vrsqrtps %
ymm1, %
ymm2
629 0xc5,0xfc,0x52,0x10 = vrsqrtps (%
eax), %
ymm2
630 0xc5,0xfc,0x53,0xd1 = vrcpps %
ymm1, %
ymm2
631 0xc5,0xfc,0x53,0x10 = vrcpps (%
eax), %
ymm2
634 0xc5,0xec,0x54,0x6c,0xcb,0xfc = vandps -4(%
ebx,%
ecx,8), %
ymm2, %
ymm5
635 0xc5,0xed,0x54,0x6c,0xcb,0xfc = vandpd -4(%
ebx,%
ecx,8), %
ymm2, %
ymm5
638 0xc5,0xec,0x56,0x6c,0xcb,0xfc = vorps -4(%
ebx,%
ecx,8), %
ymm2, %
ymm5
639 0xc5,0xed,0x56,0x6c,0xcb,0xfc = vorpd -4(%
ebx,%
ecx,8), %
ymm2, %
ymm5
642 0xc5,0xec,0x57,0x6c,0xcb,0xfc = vxorps -4(%
ebx,%
ecx,8), %
ymm2, %
ymm5
643 0xc5,0xed,0x57,0x6c,0xcb,0xfc = vxorpd -4(%
ebx,%
ecx,8), %
ymm2, %
ymm5
646 0xc5,0xec,0x55,0x6c,0xcb,0xfc = vandnps -4(%
ebx,%
ecx,8), %
ymm2, %
ymm5
647 0xc5,0xed,0x55,0x6c,0xcb,0xfc = vandnpd -4(%
ebx,%
ecx,8), %
ymm2, %
ymm5
648 0xc5,0xfc,0x5a,0xd3 = vcvtps2pd %
xmm3, %
ymm2
649 0xc5,0xfc,0x5a,0x10 = vcvtps2pd (%
eax), %
ymm2
650 0xc5,0xfe,0xe6,0xd3 = vcvtdq2pd %
xmm3, %
ymm2
651 0xc5,0xfe,0xe6,0x10 = vcvtdq2pd (%
eax), %
ymm2
652 0xc5,0xfc,0x5b,0xea = vcvtdq2ps %
ymm2, %
ymm5
653 0xc5,0xfc,0x5b,0x10 = vcvtdq2ps (%
eax), %
ymm2
654 0xc5,0xfd,0x5b,0xea = vcvtps2dq %
ymm2, %
ymm5
655 0xc5,0xfd,0x5b,0x28 = vcvtps2dq (%
eax), %
ymm5
656 0xc5,0xfe,0x5b,0xea = vcvttps2dq %
ymm2, %
ymm5
657 0xc5,0xfe,0x5b,0x28 = vcvttps2dq (%
eax), %
ymm5
658 0xc5,0xf9,0xe6,0xe9 = vcvttpd2dq %
xmm1, %
xmm5
659 0xc5,0xfd,0xe6,0xea = vcvttpd2dq %
ymm2, %
xmm5
660 0xc5,0xf9,0xe6,0xe9 = vcvttpd2dqx %
xmm1, %
xmm5
661 0xc5,0xf9,0xe6,0x08 = vcvttpd2dqx (%
eax), %
xmm1
662 0xc5,0xfd,0xe6,0xca = vcvttpd2dqy %
ymm2, %
xmm1
663 0xc5,0xfd,0xe6,0x08 = vcvttpd2dqy (%
eax), %
xmm1
664 0xc5,0xfd,0x5a,0xea = vcvtpd2ps %
ymm2, %
xmm5
665 0xc5,0xf9,0x5a,0xe9 = vcvtpd2psx %
xmm1, %
xmm5
666 0xc5,0xf9,0x5a,0x08 = vcvtpd2psx (%
eax), %
xmm1
667 0xc5,0xfd,0x5a,0xca = vcvtpd2psy %
ymm2, %
xmm1
668 0xc5,0xfd,0x5a,0x08 = vcvtpd2psy (%
eax), %
xmm1
669 0xc5,0xff,0xe6,0xea = vcvtpd2dq %
ymm2, %
xmm5
670 0xc5,0xff,0xe6,0xca = vcvtpd2dqy %
ymm2, %
xmm1
671 0xc5,0xff,0xe6,0x08 = vcvtpd2dqy (%
eax), %
xmm1
672 0xc5,0xfb,0xe6,0xe9 = vcvtpd2dqx %
xmm1, %
xmm5
673 0xc5,0xfb,0xe6,0x08 = vcvtpd2dqx (%
eax), %
xmm1
677 0xc5,0xec,0xc2,0xd9,0x04 = vcmpneqps %
ymm1, %
ymm2, %
ymm3
678 0xc5,0xec,0xc2,0xd9,0x06 = vcmpnleps %
ymm1, %
ymm2, %
ymm3
679 0xc5,0xec,0xc2,0xd9,0x05 = vcmpnltps %
ymm1, %
ymm2, %
ymm3
680 0xc5,0xec,0xc2,0xd9,0x07 = vcmpordps %
ymm1, %
ymm2, %
ymm3
681 0xc5,0xec,0xc2,0xd9,0x03 = vcmpunordps %
ymm1, %
ymm2, %
ymm3
682 0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqps -4(%
ebx,%
ecx,8), %
ymm2, %
ymm3
683 0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpleps -4(%
ebx,%
ecx,8), %
ymm2, %
ymm3
684 0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltps -4(%
ebx,%
ecx,8), %
ymm2, %
ymm3
685 0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqps -4(%
ebx,%
ecx,8), %
ymm2, %
ymm3
686 0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnleps -4(%
ebx,%
ecx,8), %
ymm2, %
ymm3
687 0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltps -4(%
ebx,%
ecx,8), %
ymm2, %
ymm3
688 0xc5,0xcc,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordps -4(%
ebx,%
ecx,8), %
ymm6, %
ymm2
689 0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordps -4(%
ebx,%
ecx,8), %
ymm2, %
ymm3
693 0xc5,0xed,0xc2,0xd9,0x04 = vcmpneqpd %
ymm1, %
ymm2, %
ymm3
694 0xc5,0xed,0xc2,0xd9,0x06 = vcmpnlepd %
ymm1, %
ymm2, %
ymm3
695 0xc5,0xed,0xc2,0xd9,0x05 = vcmpnltpd %
ymm1, %
ymm2, %
ymm3
696 0xc5,0xed,0xc2,0xd9,0x07 = vcmpordpd %
ymm1, %
ymm2, %
ymm3
697 0xc5,0xed,0xc2,0xd9,0x03 = vcmpunordpd %
ymm1, %
ymm2, %
ymm3
698 0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqpd -4(%
ebx,%
ecx,8), %
ymm2, %
ymm3
699 0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplepd -4(%
ebx,%
ecx,8), %
ymm2, %
ymm3
700 0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltpd -4(%
ebx,%
ecx,8), %
ymm2, %
ymm3
701 0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqpd -4(%
ebx,%
ecx,8), %
ymm2, %
ymm3
702 0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlepd -4(%
ebx,%
ecx,8), %
ymm2, %
ymm3
703 0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltpd -4(%
ebx,%
ecx,8), %
ymm2, %
ymm3
704 0xc5,0xcd,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordpd -4(%
ebx,%
ecx,8), %
ymm6, %
ymm2
705 0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordpd -4(%
ebx,%
ecx,8), %
ymm2, %
ymm3
706 0xc5,0xec,0xc2,0xd9,0x08 = vcmpeq_uqps %
ymm1, %
ymm2, %
ymm3
707 0xc5,0xec,0xc2,0xd9,0x09 = vcmpngeps %
ymm1, %
ymm2, %
ymm3
708 0xc5,0xec,0xc2,0xd9,0x0a = vcmpngtps %
ymm1, %
ymm2, %
ymm3
709 0xc5,0xec,0xc2,0xd9,0x0b = vcmpfalseps %
ymm1, %
ymm2, %
ymm3
710 0xc5,0xec,0xc2,0xd9,0x0c = vcmpneq_oqps %
ymm1, %
ymm2, %
ymm3
713 0xc5,0xec,0xc2,0xd9,0x0f = vcmptrueps %
ymm1, %
ymm2, %
ymm3
714 0xc5,0xec,0xc2,0xd9,0x10 = vcmpeq_osps %
ymm1, %
ymm2, %
ymm3
715 0xc5,0xec,0xc2,0xd9,0x11 = vcmplt_oqps %
ymm1, %
ymm2, %
ymm3
716 0xc5,0xec,0xc2,0xd9,0x12 = vcmple_oqps %
ymm1, %
ymm2, %
ymm3
717 0xc5,0xec,0xc2,0xd9,0x13 = vcmpunord_sps %
ymm1, %
ymm2, %
ymm3
718 0xc5,0xec,0xc2,0xd9,0x14 = vcmpneq_usps %
ymm1, %
ymm2, %
ymm3
719 0xc5,0xec,0xc2,0xd9,0x15 = vcmpnlt_uqps %
ymm1, %
ymm2, %
ymm3
720 0xc5,0xec,0xc2,0xd9,0x16 = vcmpnle_uqps %
ymm1, %
ymm2, %
ymm3
721 0xc5,0xec,0xc2,0xd9,0x17 = vcmpord_sps %
ymm1, %
ymm2, %
ymm3
722 0xc5,0xec,0xc2,0xd9,0x18 = vcmpeq_usps %
ymm1, %
ymm2, %
ymm3
723 0xc5,0xec,0xc2,0xd9,0x19 = vcmpnge_uqps %
ymm1, %
ymm2, %
ymm3
724 0xc5,0xec,0xc2,0xd9,0x1a = vcmpngt_uqps %
ymm1, %
ymm2, %
ymm3
725 0xc5,0xec,0xc2,0xd9,0x1b = vcmpfalse_osps %
ymm1, %
ymm2, %
ymm3
726 0xc5,0xec,0xc2,0xd9,0x1c = vcmpneq_osps %
ymm1, %
ymm2, %
ymm3
727 0xc5,0xec,0xc2,0xd9,0x1d = vcmpge_oqps %
ymm1, %
ymm2, %
ymm3
728 0xc5,0xec,0xc2,0xd9,0x1e = vcmpgt_oqps %
ymm1, %
ymm2, %
ymm3
729 0xc5,0xec,0xc2,0xd9,0x1f = vcmptrue_usps %
ymm1, %
ymm2, %
ymm3
733 0xc5,0xf5,0xd0,0x10 = vaddsubpd (%
eax), %
ymm1, %
ymm2
742 0xc4,0xe3,0x55,0x0c,0xca,0x03 = vblendps $3, %
ymm2, %
ymm5, %
ymm1
743 0xc4,0xe3,0x55,0x0c,0x08,0x03 = vblendps $3, (%
eax), %
ymm5, %
ymm1
744 0xc4,0xe3,0x55,0x0d,0xca,0x03 = vblendpd $3, %
ymm2, %
ymm5, %
ymm1
745 0xc4,0xe3,0x55,0x0d,0x08,0x03 = vblendpd $3, (%
eax), %
ymm5, %
ymm1
746 0xc4,0xe3,0x55,0x40,0xca,0x03 = vdpps $3, %
ymm2, %
ymm5, %
ymm1
747 0xc4,0xe3,0x55,0x40,0x08,0x03 = vdpps $3, (%
eax), %
ymm5, %
ymm1
748 0xc4,0xe2,0x7d,0x1a,0
x10 = vbroadcastf128 (%
eax), %
ymm2
749 0xc4,0xe2,0x7d,0x19,0x10 = vbroadcastsd (%
eax), %
ymm2
750 0xc4,0xe2,0x79,0x18,0x10 = vbroadcastss (%
eax), %
xmm2
751 0xc4,0xe2,0x7d,0x18,0x10 = vbroadcastss (%
eax), %
ymm2
752 0xc4,0xe3,0x6d,0x18,0xea,0x07 = vinsertf128 $7, %
xmm2, %
ymm2, %
ymm5
753 0xc4,0xe3,0x6d,0x18,0x28,0x07 = vinsertf128 $7, (%
eax), %
ymm2, %
ymm5
754 0xc4,0xe3,0x7d,0
x19,0xd2,0x07 = vextractf128 $7, %
ymm2, %
xmm2
755 0xc4,0xe3,0x7d,0
x19,0
x10,0x07 = vextractf128 $7, %
ymm2, (%
eax)
756 0xc4,0xe2,0x51,0x2f,0x10 = vmaskmovpd %
xmm2, %
xmm5, (%
eax)
758 0xc4,0xe2,0x69,0x2d,0x28 = vmaskmovpd (%
eax), %
xmm2, %
xmm5
759 0xc4,0xe2,0x6d,0x2d,0x28 = vmaskmovpd (%
eax), %
ymm2, %
ymm5
760 0xc4,0xe2,0x51,0x2e,0x10 = vmaskmovps %
xmm2, %
xmm5, (%
eax)
762 0xc4,0xe2,0x69,0x2c,0x28 = vmaskmovps (%
eax), %
xmm2, %
xmm5
763 0xc4,0xe2,0x6d,0x2c,0x28 = vmaskmovps (%
eax), %
ymm2, %
ymm5
764 0xc4,0xe3,0x79,0x04,0xe9,0x07 = vpermilps $7, %
xmm1, %
xmm5
765 0xc4,0xe3,0x7d,0x04,0xcd,0x07 = vpermilps $7, %
ymm5, %
ymm1
766 0xc4,0xe3,0x79,0x04,0x28,0x07 = vpermilps $7, (%
eax), %
xmm5
767 0xc4,0xe3,0x7d,0x04,0
x28,0x07 = vpermilps $7, (%
eax), %
ymm5
768 0xc4,0xe2,0x51,0x0c,0xc9 = vpermilps %
xmm1, %
xmm5, %
xmm1
769 0xc4,0xe2,0x55,0x0c,0xc9 = vpermilps %
ymm1, %
ymm5, %
ymm1
770 0xc4,0xe2,0x51,0x0c,0x18 = vpermilps (%
eax), %
xmm5, %
xmm3
771 0xc4,0xe2,0x55,0x0c,0x08 = vpermilps (%
eax), %
ymm5, %
ymm1
772 0xc4,0xe3,0x79,0x05,0xe9,0x07 = vpermilpd $7, %
xmm1, %
xmm5
773 0xc4,0xe3,0x7d,0x05,0xcd,0x07 = vpermilpd $7, %
ymm5, %
ymm1
774 0xc4,0xe3,0x79,0x05,0x28,0x07 = vpermilpd $7, (%
eax), %
xmm5
775 0xc4,0xe3,0x7d,0x05,0
x28,0x07 = vpermilpd $7, (%
eax), %
ymm5
776 0xc4,0xe2,0x51,0x0d,0xc9 = vpermilpd %
xmm1, %
xmm5, %
xmm1
777 0xc4,0xe2,0x55,0x0d,0xc9 = vpermilpd %
ymm1, %
ymm5, %
ymm1
778 0xc4,0xe2,0x51,0x0d,0x18 = vpermilpd (%
eax), %
xmm5, %
xmm3
779 0xc4,0xe2,0x55,0x0d,0x08 = vpermilpd (%
eax), %
ymm5, %
ymm1
780 0xc4,0xe3,0x55,0x06,0xca,0x07 = vperm2f128 $7, %
ymm2, %
ymm5, %
ymm1
781 0xc4,0xe3,0x55,0x06,0x08,0x07 = vperm2f128 $7, (%
eax), %
ymm5, %
ymm1
782 0xc5,0xfc,0x77 = vzeroall
783 0xc5,0xf8,0x77 = vzeroupper
784 0xc5,0xfb,0x2d,0xcc = vcvtsd2sil %
xmm4, %
ecx
785 0xc5,0xfb,0x2d,0x09 = vcvtsd2sil (%
ecx), %
ecx
786 0xc5,0xfb,0x2d,0xcc = vcvtsd2si %
xmm4, %
ecx
787 0xc5,0xfb,0x2d,0x09 = vcvtsd2si (%
ecx), %
ecx
788 0xc5,0xfb,0x2a,0x7d,0x00 = vcvtsi2sdl (%ebp), %
xmm0, %
xmm7
789 0xc5,0xfb,0x2a,0x3c,0x24 = vcvtsi2sdl (%esp), %
xmm0, %
xmm7
790 0xc5,0xfb,0x2a,0x7d,0x00 = vcvtsi2sd (%ebp), %
xmm0, %
xmm7
791 0xc5,0xfb,0x2a,0x3c,0x24 = vcvtsi2sd (%esp), %
xmm0, %
xmm7
792 0xc5,0xff,0xf0,0x10 = vlddqu (%
eax), %
ymm2
793 0xc5,0xff,0x12,0xea = vmovddup %
ymm2, %
ymm5
794 0xc5,0xff,0x12,0x10 = vmovddup (%
eax), %
ymm2
795 0xc5,0xfd,0x6f,0xea = vmovdqa %
ymm2, %
ymm5
796 0xc5,0xfd,0x7f,0x10 = vmovdqa %
ymm2, (%
eax)
798 0xc5,0xfe,0x6f,0xea = vmovdqu %
ymm2, %
ymm5
799 0xc5,0xfe,0x7f,0x10 = vmovdqu %
ymm2, (%
eax)
801 0xc5,0xfe,0x16,0xea = vmovshdup %
ymm2, %
ymm5
802 0xc5,0xfe,0x16,0x10 = vmovshdup (%
eax), %
ymm2
803 0xc5,0xfe,0x12,0xea = vmovsldup %
ymm2, %
ymm5
804 0xc5,0xfe,0x12,0x10 = vmovsldup (%
eax), %
ymm2
805 0xc4,0xe2,0x7d,0x17,0xea = vptest %
ymm2, %
ymm5
806 0xc4,0xe2,0x7d,0x17,0x10 = vptest (%
eax), %
ymm2
807 0xc4,0xe3,0x7d,0x09,0xcd,0x07 = vroundpd $7, %
ymm5, %
ymm1
808 0xc4,0xe3,0x7d,0x09,0x28,0x07 = vroundpd $7, (%
eax), %
ymm5
809 0xc4,0xe3,0x7d,0x08,0xcd,0x07 = vroundps $7, %
ymm5, %
ymm1
810 0xc4,0xe3,0x7d,0x08,0
x28,0x07 = vroundps $7, (%
eax), %
ymm5
811 0xc5,0xd5,0xc6,0xca,0x07 = vshufpd $7, %
ymm2, %
ymm5, %
ymm1
812 0xc5,0xd5,0xc6,0x08,0x07 = vshufpd $7, (%
eax), %
ymm5, %
ymm1
813 0xc5,0xd4,0xc6,0xca,0x07 = vshufps $7, %
ymm2, %
ymm5, %
ymm1
814 0xc5,0xd4,0xc6,0x08,0x07 = vshufps $7, (%
eax), %
ymm5, %
ymm1
815 0xc4,0xe2,0x79,0x0f,0xea = vtestpd %
xmm2, %
xmm5
816 0xc4,0xe2,0x7d,0x0f,0xea = vtestpd %
ymm2, %
ymm5
817 0xc4,0xe2,0x79,0x0f,0x10 = vtestpd (%
eax), %
xmm2
818 0xc4,0xe2,0x7d,0x0f,0x10 = vtestpd (%
eax), %
ymm2
819 0xc4,0xe2,0x79,0x0e,0xea = vtestps %
xmm2, %
xmm5
820 0xc4,0xe2,0x7d,0x0e,0xea = vtestps %
ymm2, %
ymm5
821 0xc4,0xe2,0x79,0x0e,0x10 = vtestps (%
eax), %
xmm2
822 0xc4,0xe2,0x7d,0x0e,0x10 = vtestps (%
eax), %
ymm2
823 0xc4,0xe3,0x75,0x4b,0x94,0x20,0xad,0xde,0x00,0x00,0x00 = vblendvpd %
ymm0, 0xdead(%
eax,%
eiz), %
ymm1, %
ymm2
826 0xc4,0xe3,0x51,0x44,0xca,0x01 = vpclmulhqlqdq %
xmm2, %
xmm5, %
xmm1
827 0xc4,0xe3,0x51,0x44,0x18,0x01 = vpclmulhqlqdq (%
eax), %
xmm5, %
xmm3
828 0xc4,0xe3,0x51,0x44,0xca,0x10 = vpclmullqhqdq %
xmm2, %
xmm5, %
xmm1
829 0xc4,0xe3,0x51,0x44,0x18,0x10 = vpclmullqhqdq (%
eax), %
xmm5, %
xmm3
830 0xc4,0xe3,0x51,0x44,0xca,0x00 = vpclmullqlqdq %
xmm2, %
xmm5, %
xmm1
831 0xc4,0xe3,0x51,0x44,0x18,0x00 = vpclmullqlqdq (%
eax), %
xmm5, %
xmm3
832 0xc4,0xe3,0x51,0x44,0xca,0x11 = vpclmulqdq $17, %
xmm2, %
xmm5, %
xmm1
833 0xc4,0xe3,0x51,0x44,0x18,0x11 = vpclmulqdq $17, (%
eax), %
xmm5, %
xmm3