1 # CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT
66 0xc5,0x7a,0x10,0x54,0xcb,0xfc = vmovss -4(%
rbx, %
rcx, 8), %
xmm10
85 0xc5,0x18,0xc6,0x6c,0xcb,0xfc,0x08 = vshufps $8, -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
87 0xc5,0
x19,0xc6,0x6c,0xcb,0xfc,0x08 = vshufpd $8, -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
96 0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x00 = vcmpeqps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
97 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0x02 = vcmpleps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
98 0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x01 = vcmpltps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
99 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0x04 = vcmpneqps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
100 0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x06 = vcmpnleps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
101 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0x05 = vcmpnltps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
102 0xc5,0xc8,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordps -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
103 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0x03 = vcmpunordps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
112 0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x00 = vcmpeqpd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
113 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0x02 = vcmplepd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
114 0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x01 = vcmpltpd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
115 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0x04 = vcmpneqpd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
116 0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x06 = vcmpnlepd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
117 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0x05 = vcmpnltpd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
118 0xc5,0xc9,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordpd -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
119 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0x03 = vcmpunordpd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
128 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x00 = vcmpeqss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
129 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x02 = vcmpless -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
130 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x01 = vcmpltss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
131 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x04 = vcmpneqss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
132 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x06 = vcmpnless -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
133 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x05 = vcmpnltss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
134 0xc5,0xca,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordss -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
135 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x03 = vcmpunordss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
144 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x00 = vcmpeqsd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
145 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x02 = vcmplesd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
146 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x01 = vcmpltsd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
147 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x04 = vcmpneqsd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
148 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x06 = vcmpnlesd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
149 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x05 = vcmpnltsd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
150 0xc5,0xcb,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordsd -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
151 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x03 = vcmpunordsd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
176 0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x08 = vcmpeq_uqps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
177 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0x09 = vcmpngeps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
178 0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x0a = vcmpngtps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
179 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0x0b = vcmpfalseps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
180 0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x0c = vcmpneq_oqps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
181 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0x0d = vcmpgeps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
182 0xc5,0xc8,0xc2,0x54,0xcb,0xfc,0x0e = vcmpgtps -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
183 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0x0f = vcmptrueps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
184 0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x10 = vcmpeq_osps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
185 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0
x11 = vcmplt_oqps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
186 0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x12 = vcmple_oqps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
187 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0
x13 = vcmpunord_sps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
188 0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x14 = vcmpneq_usps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
189 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0
x15 = vcmpnlt_uqps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
190 0xc5,0xc8,0xc2,0x54,0xcb,0xfc,0x16 = vcmpnle_uqps -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
191 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0
x17 = vcmpord_sps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
192 0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x18 = vcmpeq_usps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
193 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0
x19 = vcmpnge_uqps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
194 0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x1a = vcmpngt_uqps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
195 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0x1b = vcmpfalse_osps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
196 0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x1c = vcmpneq_osps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
197 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0x1d = vcmpge_oqps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
198 0xc5,0xc8,0xc2,0x54,0xcb,0xfc,0x1e = vcmpgt_oqps -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
199 0xc5,0
x18,0xc2,0x6c,0xcb,0xfc,0x1f = vcmptrue_usps -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
224 0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x08 = vcmpeq_uqpd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
225 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0x09 = vcmpngepd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
226 0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x0a = vcmpngtpd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
227 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0x0b = vcmpfalsepd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
228 0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x0c = vcmpneq_oqpd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
229 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0x0d = vcmpgepd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
230 0xc5,0xc9,0xc2,0x54,0xcb,0xfc,0x0e = vcmpgtpd -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
231 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0x0f = vcmptruepd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
232 0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x10 = vcmpeq_ospd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
233 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0
x11 = vcmplt_oqpd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
234 0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x12 = vcmple_oqpd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
235 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0
x13 = vcmpunord_spd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
236 0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x14 = vcmpneq_uspd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
237 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0
x15 = vcmpnlt_uqpd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
238 0xc5,0xc9,0xc2,0x54,0xcb,0xfc,0x16 = vcmpnle_uqpd -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
239 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0
x17 = vcmpord_spd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
240 0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x18 = vcmpeq_uspd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
241 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0
x19 = vcmpnge_uqpd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
242 0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x1a = vcmpngt_uqpd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
243 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0x1b = vcmpfalse_ospd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
244 0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x1c = vcmpneq_ospd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
245 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0x1d = vcmpge_oqpd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
246 0xc5,0xc9,0xc2,0x54,0xcb,0xfc,0x1e = vcmpgt_oqpd -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
247 0xc5,0
x19,0xc2,0x6c,0xcb,0xfc,0x1f = vcmptrue_uspd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
272 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x08 = vcmpeq_uqss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
273 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x09 = vcmpngess -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
274 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x0a = vcmpngtss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
275 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x0b = vcmpfalsess -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
276 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x0c = vcmpneq_oqss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
277 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x0d = vcmpgess -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
278 0xc5,0xca,0xc2,0x54,0xcb,0xfc,0x0e = vcmpgtss -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
279 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x0f = vcmptruess -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
280 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x10 = vcmpeq_osss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
281 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0
x11 = vcmplt_oqss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
282 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x12 = vcmple_oqss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
283 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0
x13 = vcmpunord_sss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
284 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x14 = vcmpneq_usss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
285 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0
x15 = vcmpnlt_uqss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
286 0xc5,0xca,0xc2,0x54,0xcb,0xfc,0x16 = vcmpnle_uqss -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
287 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0
x17 = vcmpord_sss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
288 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x18 = vcmpeq_usss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
289 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0
x19 = vcmpnge_uqss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
290 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x1a = vcmpngt_uqss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
291 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x1b = vcmpfalse_osss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
292 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x1c = vcmpneq_osss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
293 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x1d = vcmpge_oqss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
294 0xc5,0xca,0xc2,0x54,0xcb,0xfc,0x1e = vcmpgt_oqss -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
295 0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x1f = vcmptrue_usss -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
320 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x08 = vcmpeq_uqsd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
321 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x09 = vcmpngesd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
322 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x0a = vcmpngtsd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
323 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x0b = vcmpfalsesd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
324 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x0c = vcmpneq_oqsd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
325 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x0d = vcmpgesd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
326 0xc5,0xcb,0xc2,0x54,0xcb,0xfc,0x0e = vcmpgtsd -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
327 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x0f = vcmptruesd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
328 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x10 = vcmpeq_ossd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
329 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0
x11 = vcmplt_oqsd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
330 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x12 = vcmple_oqsd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
331 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0
x13 = vcmpunord_ssd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
332 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x14 = vcmpneq_ussd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
333 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0
x15 = vcmpnlt_uqsd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
334 0xc5,0xcb,0xc2,0x54,0xcb,0xfc,0x16 = vcmpnle_uqsd -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
335 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0
x17 = vcmpord_ssd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
336 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x18 = vcmpeq_ussd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
337 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0
x19 = vcmpnge_uqsd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
338 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x1a = vcmpngt_uqsd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
339 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x1b = vcmpfalse_ossd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
340 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x1c = vcmpneq_ossd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
341 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x1d = vcmpge_oqsd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
342 0xc5,0xcb,0xc2,0x54,0xcb,0xfc,0x1e = vcmpgt_oqsd -4(%
rbx, %
rcx, 8), %
xmm6, %
xmm2
343 0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x1f = vcmptrue_ussd -4(%
rbx, %
rcx, 8), %
xmm12, %
xmm13
344 0xc4,0x41,0x78,0x2e,0xe3 = vucomiss %
xmm11, %
xmm12
345 0xc5,0x78,0x2e,0x20 = vucomiss (%
rax), %
xmm12
346 0xc4,0x41,0x78,0x2f,0xe3 = vcomiss %
xmm11, %
xmm12
347 0xc5,0x78,0x2f,0x20 = vcomiss (%
rax), %
xmm12
348 0xc4,0x41,0x79,0x2e,0xe3 = vucomisd %
xmm11, %
xmm12
349 0xc5,0x79,0x2e,0x20 = vucomisd (%
rax), %
xmm12
350 0xc4,0x41,0x79,0x2f,0xe3 = vcomisd %
xmm11, %
xmm12
351 0xc5,0x79,0x2f,0x20 = vcomisd (%
rax), %
xmm12
352 0xc5,0xfa,0x2c,0x01 = vcvttss2si (%
rcx), %
eax
355 0xc5,0xfb,0x2c,0x01 = vcvttsd2si (%
rcx), %
eax
358 0xc5,0x78,0x28,0x20 = vmovaps (%
rax), %
xmm12
359 0xc4,0x41,0x78,0x28,0xe3 = vmovaps %
xmm11, %
xmm12
360 0xc5,0x78,0x29,0x18 = vmovaps %
xmm11, (%
rax)
362 0xc4,0x41,0x79,0x28,0xe3 = vmovapd %
xmm11, %
xmm12
363 0xc5,0x79,0x29,0x18 = vmovapd %
xmm11, (%
rax)
365 0xc4,0x41,0x78,0x10,0xe3 = vmovups %
xmm11, %
xmm12
366 0xc5,0x78,0x11,0x18 = vmovups %
xmm11, (%
rax)
368 0xc4,0x41,0x79,0x10,0xe3 = vmovupd %
xmm11, %
xmm12
369 0xc5,0x79,0x11,0x18 = vmovupd %
xmm11, (%
rax)
372 0xc5,0x79,0x13,0x18 = vmovlpd %
xmm11, (%
rax)
374 0xc5,0x78,0x17,0x18 = vmovhps %
xmm11, (%
rax)
376 0xc5,0x79,0x17,0x18 = vmovhpd %
xmm11, (%
rax)
380 0xc4,0xc1,0x7a,0x2d,0xc3 = vcvtss2si %
xmm11, %
eax
381 0xc5,0xfa,0x2d,0x18 = vcvtss2si (%
rax), %
ebx
382 0xc4,0x41,0x78,0x5b,0xe2 = vcvtdq2ps %
xmm10, %
xmm12
383 0xc5,0x78,0x5b,0x20 = vcvtdq2ps (%
rax), %
xmm12
386 0xc4,0x41,0x79,0x5b,0xdc = vcvtps2dq %
xmm12, %
xmm11
387 0xc5,0x79,0x5b,0x18 = vcvtps2dq (%
rax), %
xmm11
390 0xc4,0x41,0x78,0x5b,0xd5 = vcvtdq2ps %
xmm13, %
xmm10
391 0xc5,0x78,0x5b,0x29 = vcvtdq2ps (%
ecx), %
xmm13
392 0xc4,0x41,0x7a,0x5b,0xdc = vcvttps2dq %
xmm12, %
xmm11
393 0xc5,0x7a,0x5b,0x18 = vcvttps2dq (%
rax), %
xmm11
394 0xc4,0x41,0x78,0x5a,0xdc = vcvtps2pd %
xmm12, %
xmm11
395 0xc5,0x78,0x5a,0x18 = vcvtps2pd (%
rax), %
xmm11
396 0xc4,0x41,0x79,0x5a,0xdc = vcvtpd2ps %
xmm12, %
xmm11
397 0xc4,0x41,0x79,0x51,0xe3 = vsqrtpd %
xmm11, %
xmm12
398 0xc5,0x79,0x51,0x20 = vsqrtpd (%
rax), %
xmm12
399 0xc4,0x41,0x78,0x51,0xe3 = vsqrtps %
xmm11, %
xmm12
400 0xc5,0x78,0x51,0x20 = vsqrtps (%
rax), %
xmm12
405 0xc4,0x41,0x78,0x52,0xe3 = vrsqrtps %
xmm11, %
xmm12
406 0xc5,0x78,0x52,0x20 = vrsqrtps (%
rax), %
xmm12
409 0xc4,0x41,0x78,0x53,0xe3 = vrcpps %
xmm11, %
xmm12
410 0xc5,0x78,0x53,0x20 = vrcpps (%
rax), %
xmm12
413 0xc5,0x79,0xe7,0x18 = vmovntdq %
xmm11, (%
rax)
415 0xc5,0x78,0x2b,0x18 = vmovntps %
xmm11, (%
rax)
416 0xc5,0xf8,0xae,0
x15,0xfc,0xff,0xff,0xff = vldmxcsr -4(%
rip)
417 0xc5,0xf8,0xae,0x5c,0x24,0xfc = vstmxcsr -4(%rsp)
488 0xc4,0xc1,0x11,0x72,0xf4,0x0a = vpslld $10, %
xmm12, %
xmm13
489 0xc4,0xc1,0x11,0x73,0xfc,0x0a = vpslldq $10, %
xmm12, %
xmm13
490 0xc4,0xc1,0x11,0x73,0xf4,0x0a = vpsllq $10, %
xmm12, %
xmm13
491 0xc4,0xc1,0x11,0x71,0xf4,0x0a = vpsllw $10, %
xmm12, %
xmm13
492 0xc4,0xc1,0x11,0x72,0xe4,0x0a = vpsrad $10, %
xmm12, %
xmm13
493 0xc4,0xc1,0x11,0x71,0xe4,0x0a = vpsraw $10, %
xmm12, %
xmm13
494 0xc4,0xc1,0x11,0x72,0xd4,0x0a = vpsrld $10, %
xmm12, %
xmm13
495 0xc4,0xc1,0x11,0x73,0xdc,0x0a = vpsrldq $10, %
xmm12, %
xmm13
496 0xc4,0xc1,0x11,0x73,0xd4,0x0a = vpsrlq $10, %
xmm12, %
xmm13
497 0xc4,0xc1,0x11,0x71,0xd4,0x0a = vpsrlw $10, %
xmm12, %
xmm13
498 0xc4,0xc1,0x11,0x72,0xf4,0x0a = vpslld $10, %
xmm12, %
xmm13
525 0xc4,0x41,0x79,0x70,0xec,0x04 = vpshufd $4, %
xmm12, %
xmm13
526 0xc5,0x79,0x70,0x28,0x04 = vpshufd $4, (%
rax), %
xmm13
527 0xc4,0x41,0x7a,0x70,0xec,0x04 = vpshufhw $4, %
xmm12, %
xmm13
528 0xc5,0x7a,0x70,0
x28,0x04 = vpshufhw $4, (%
rax), %
xmm13
529 0xc4,0x41,0x7b,0x70,0xec,0x04 = vpshuflw $4, %
xmm12, %
xmm13
530 0xc5,0x7b,0x70,0x28,0x04 = vpshuflw $4, (%
rax), %
xmm13
548 0xc5,0x19,0xc4,0x28,0x07 = vpinsrw $7, (%
rax), %
xmm12, %
xmm13
549 0xc4,0xc1,0x79,0xc5,0xc4,0x07 = vpextrw $7, %
xmm12, %
eax
550 0xc4,0xc1,0x79,0xd7,0xc4 = vpmovmskb %
xmm12, %
eax
551 0xc4,0x41,0x79,0xf7,0xfe = vmaskmovdqu %
xmm14, %
xmm15
552 0xc5,0x79,0x6e,0xf0 = vmovd %
eax, %
xmm14
554 0xc5,0x79,0x7e,0x30 = vmovd %
xmm14, (%
rax)
555 0xc4,0x61,0xf9,0x6e,0xf0 = vmovd %
rax, %
xmm14
556 0xc4,0xe1,0xf9,0x7e,0xc0 = vmovd %
xmm0, %
rax
558 0xc4,0x41,0x7a,0x7e,0xe6 = vmovq %
xmm14, %
xmm12
559 0xc5,0x7a,0x7e,0x30 = vmovq (%
rax), %
xmm14
560 0xc4,0x61,0xf9,0x6e,0xf0 = vmovq %
rax, %
xmm14
561 0xc4,0x61,0xf9,0x7e,0xf0 = vmovq %
xmm14, %
rax
562 0xc4,0x41,0x7b,0xe6,0xe3 = vcvtpd2dq %
xmm11, %
xmm12
563 0xc4,0x41,0x7a,0xe6,0xe3 = vcvtdq2pd %
xmm11, %
xmm12
564 0xc5,0x7a,0xe6,0x20 = vcvtdq2pd (%
rax), %
xmm12
565 0xc4,0x41,0x7a,0x16,0xe3 = vmovshdup %
xmm11, %
xmm12
566 0xc5,0x7a,0x16,0x20 = vmovshdup (%
rax), %
xmm12
567 0xc4,0x41,0x7a,0x12,0xe3 = vmovsldup %
xmm11, %
xmm12
568 0xc5,0x7a,0x12,0x20 = vmovsldup (%
rax), %
xmm12
569 0xc4,0x41,0x7b,0x12,0xe3 = vmovddup %
xmm11, %
xmm12
570 0xc5,0x7b,0x12,0x20 = vmovddup (%
rax), %
xmm12
583 0xc4,0x42,0x79,0x1c,0xe3 = vpabsb %
xmm11, %
xmm12
584 0xc4,0x62,0x79,0x1c,0x20 = vpabsb (%
rax), %
xmm12
585 0xc4,0x42,0x79,0x1d,0xe3 = vpabsw %
xmm11, %
xmm12
586 0xc4,0x62,0x79,0x1d,0x20 = vpabsw (%
rax), %
xmm12
587 0xc4,0x42,0x79,0x1e,0xe3 = vpabsd %
xmm11, %
xmm12
588 0xc4,0x62,0x79,0x1e,0x20 = vpabsd (%
rax), %
xmm12
614 0xc4,0x63,0x19,0x0f,0x28,0x07 = vpalignr $7, (%
rax), %
xmm12, %
xmm13
618 0xc4,0x63,0x19,0x0a,0x28,0x07 = vroundss $7, (%
rax), %
xmm12, %
xmm13
619 0xc4,0x43,0x79,0x09,0xec,0x07 = vroundpd $7, %
xmm12, %
xmm13
620 0xc4,0x63,0x79,0x09,0
x28,0x07 = vroundpd $7, (%
rax), %
xmm13
621 0xc4,0x43,0x79,0x08,0xec,0x07 = vroundps $7, %
xmm12, %
xmm13
622 0xc4,0x63,0x79,0x08,0x28,0x07 = vroundps $7, (%
rax), %
xmm13
623 0xc4,0x42,0x79,0x41,0xec = vphminposuw %
xmm12, %
xmm13
624 0xc4,0x62,0x79,0x41,0
x20 = vphminposuw (%
rax), %
xmm12
648 0xc4,0x62,0x51,0x40,0x28 = vpmulld (%
rax), %
xmm5, %
xmm13
649 0xc4,0x43,0x51,0x0c,0xdc,0x03 = vblendps $3, %
xmm12, %
xmm5, %
xmm11
650 0xc4,0x63,0x51,0x0c,0x18,0x03 = vblendps $3, (%
rax), %
xmm5, %
xmm11
651 0xc4,0x43,0x51,0x0d,0xdc,0x03 = vblendpd $3, %
xmm12, %
xmm5, %
xmm11
653 0xc4,0x43,0x51,0x0e,0xdc,0x03 = vpblendw $3, %
xmm12, %
xmm5, %
xmm11
654 0xc4,0x63,0x51,0x0e,0x18,0x03 = vpblendw $3, (%
rax), %
xmm5, %
xmm11
655 0xc4,0x43,0x51,0x42,0xdc,0x03 = vmpsadbw $3, %
xmm12, %
xmm5, %
xmm11
658 0xc4,0x63,0x51,0x40,0x18,0x03 = vdpps $3, (%
rax), %
xmm5, %
xmm11
669 0xc4,0x42,0x79,0x23,0xd4 = vpmovsxwd %
xmm12, %
xmm10
670 0xc4,0x62,0x79,0x23,0x20 = vpmovsxwd (%
rax), %
xmm12
671 0xc4,0x42,0x79,0x25,0xd4 = vpmovsxdq %
xmm12, %
xmm10
672 0xc4,0x62,0x79,0x25,0x20 = vpmovsxdq (%
rax), %
xmm12
673 0xc4,0x42,0x79,0x30,0xd4 = vpmovzxbw %
xmm12, %
xmm10
674 0xc4,0x62,0x79,0x30,0x20 = vpmovzxbw (%
rax), %
xmm12
675 0xc4,0x42,0x79,0x33,0xd4 = vpmovzxwd %
xmm12, %
xmm10
676 0xc4,0x62,0x79,0x33,0x20 = vpmovzxwd (%
rax), %
xmm12
677 0xc4,0x42,0x79,0x35,0xd4 = vpmovzxdq %
xmm12, %
xmm10
678 0xc4,0x62,0x79,0x35,0x20 = vpmovzxdq (%
rax), %
xmm12
679 0xc4,0x42,0x79,0x22,0xd4 = vpmovsxbq %
xmm12, %
xmm10
680 0xc4,0x62,0x79,0x22,0x20 = vpmovsxbq (%
rax), %
xmm12
681 0xc4,0x42,0x79,0x32,0xd4 = vpmovzxbq %
xmm12, %
xmm10
682 0xc4,0x62,0x79,0x32,0x20 = vpmovzxbq (%
rax), %
xmm12
683 0xc4,0x42,0x79,0x21,0xd4 = vpmovsxbd %
xmm12, %
xmm10
684 0xc4,0x62,0x79,0x21,0x20 = vpmovsxbd (%
rax), %
xmm12
685 0xc4,0x42,0x79,0x24,0xd4 = vpmovsxwq %
xmm12, %
xmm10
686 0xc4,0x62,0x79,0x24,0x20 = vpmovsxwq (%
rax), %
xmm12
687 0xc4,0x42,0x79,0x31,0xd4 = vpmovzxbd %
xmm12, %
xmm10
688 0xc4,0x62,0x79,0x31,0x20 = vpmovzxbd (%
rax), %
xmm12
689 0xc4,0x42,0x79,0x34,0xd4 = vpmovzxwq %
xmm12, %
xmm10
690 0xc4,0x62,0x79,0x34,0x20 = vpmovzxwq (%
rax), %
xmm12
691 0xc4,0xc1,0x79,0xc5,0xc4,0x07 = vpextrw $7, %
xmm12, %
eax
692 0xc4,0x63,0x79,0x15,0x20,0x07 = vpextrw $7, %
xmm12, (%
rax)
693 0xc4,0x63,0x79,0
x16,0xe0,0x07 = vpextrd $7, %
xmm12, %
eax
695 0xc4,0x63,0x79,0x14,0xe0,0x07 = vpextrb $7, %
xmm12, %
eax
696 0xc4,0x63,0x79,0x14,0x20,0x07 = vpextrb $7, %
xmm12, (%
rax)
697 0xc4,0x63,0xf9,0
x16,0xe1,0x07 = vpextrq $7, %
xmm12, %
rcx
699 0xc4,0x63,0x79,0x17,0x20,0x07 = vextractps $7, %
xmm12, (%
rax)
700 0xc4,0x63,0x79,0
x17,0xe0,0x07 = vextractps $7, %
xmm12, %
eax
703 0xc4,0x63,0x19,0x20,0xd0,0x07 = vpinsrb $7, %
eax, %
xmm12, %
xmm10
704 0xc4,0x63,0x19,0x20,0x10,0x07 = vpinsrb $7, (%
rax), %
xmm12, %
xmm10
707 0xc4,0x63,0x99,0x22,0xd0,0x07 = vpinsrq $7, %
rax, %
xmm12, %
xmm10
708 0xc4,0x63,0x99,0x22,0x10,0x07 = vpinsrq $7, (%
rax), %
xmm12, %
xmm10
711 0xc4,0x42,0x79,0x17,0xd4 = vptest %
xmm12, %
xmm10
712 0xc4,0x62,0x79,0x17,0x20 = vptest (%
rax), %
xmm12
713 0xc4,0x62,0x79,0x2a,0x20 = vmovntdqa (%
rax), %
xmm12
716 0xc4,0x43,0x79,0x62,0xd4,0x07 = vpcmpistrm $7, %
xmm12, %
xmm10
717 0xc4,0x63,0x79,0x62,0x10,0x07 = vpcmpistrm $7, (%
rax), %
xmm10
718 0xc4,0x43,0x79,0x60,0xd4,0x07 = vpcmpestrm $7, %
xmm12, %
xmm10
719 0xc4,0x63,0x79,0x60,0
x10,0x07 = vpcmpestrm $7, (%
rax), %
xmm10
720 0xc4,0x43,0x79,0x63,0xd4,0x07 = vpcmpistri $7, %
xmm12, %
xmm10
721 0xc4,0x63,0x79,0x63,0x10,0x07 = vpcmpistri $7, (%
rax), %
xmm10
722 0xc4,0x43,0x79,0x61,0xd4,0x07 = vpcmpestri $7, %
xmm12, %
xmm10
723 0xc4,0x63,0x79,0x61,0
x10,0x07 = vpcmpestri $7, (%
rax), %
xmm10
724 0xc4,0x42,0x79,0xdb,0xd4 = vaesimc %
xmm12, %
xmm10
725 0xc4,0x62,0x79,0xdb,0x20 = vaesimc (%
rax), %
xmm12
729 0xc4,0x62,0x29,0xdd,0x28 = vaesenclast (%
rax), %
xmm10, %
xmm13
733 0xc4,0x62,0x29,0xdf,0x28 = vaesdeclast (%
rax), %
xmm10, %
xmm13
734 0xc4,0x43,0x79,0xdf,0xd4,0x07 = vaeskeygenassist $7, %
xmm12, %
xmm10
735 0xc4,0x63,0x79,0xdf,0x10,0x07 = vaeskeygenassist $7, (%
rax), %
xmm10
761 0xc4,0x41,0x7c,0x28,0xe3 = vmovaps %
ymm11, %
ymm12
762 0xc5,0x7c,0x29,0x18 = vmovaps %
ymm11, (%
rax)
764 0xc4,0x41,0x7d,0x28,0xe3 = vmovapd %
ymm11, %
ymm12
765 0xc5,0x7d,0x29,0x18 = vmovapd %
ymm11, (%
rax)
767 0xc4,0x41,0x7c,0x10,0xe3 = vmovups %
ymm11, %
ymm12
768 0xc5,0x7c,0x11,0x18 = vmovups %
ymm11, (%
rax)
770 0xc4,0x41,0x7d,0x10,0xe3 = vmovupd %
ymm11, %
ymm12
771 0xc5,0x7d,0x11,0x18 = vmovupd %
ymm11, (%
rax)
781 0xc5,0x7d,0x2b,0x18 = vmovntpd %
ymm11, (%
rax)
783 0xc4,0xc1,0x78,0x50,0xc4 = vmovmskps %
xmm12, %
eax
784 0xc4,0xc1,0x79,0x50,0xc4 = vmovmskpd %
xmm12, %
eax
809 0xc4,0x41,0x7d,0x51,0xe3 = vsqrtpd %
ymm11, %
ymm12
810 0xc5,0x7d,0x51,0x20 = vsqrtpd (%
rax), %
ymm12
811 0xc4,0x41,0x7c,0x51,0xe3 = vsqrtps %
ymm11, %
ymm12
812 0xc5,0x7c,0x51,0x20 = vsqrtps (%
rax), %
ymm12
813 0xc4,0x41,0x7c,0x52,0xe3 = vrsqrtps %
ymm11, %
ymm12
814 0xc5,0x7c,0x52,0x20 = vrsqrtps (%
rax), %
ymm12
815 0xc4,0x41,0x7c,0x53,0xe3 = vrcpps %
ymm11, %
ymm12
816 0xc5,0x7c,0x53,0x20 = vrcpps (%
rax), %
ymm12
833 0xc4,0x41,0x7c,0x5a,0xe5 = vcvtps2pd %
xmm13, %
ymm12
834 0xc5,0x7c,0x5a,0x20 = vcvtps2pd (%
rax), %
ymm12
835 0xc4,0x41,0x7e,0xe6,0xe5 = vcvtdq2pd %
xmm13, %
ymm12
836 0xc5,0x7e,0xe6,0x20 = vcvtdq2pd (%
rax), %
ymm12
837 0xc4,0x41,0x7c,0x5b,0xd4 = vcvtdq2ps %
ymm12, %
ymm10
838 0xc5,0x7c,0x5b,0x20 = vcvtdq2ps (%
rax), %
ymm12
839 0xc4,0x41,0x7d,0x5b,0xd4 = vcvtps2dq %
ymm12, %
ymm10
840 0xc5,0x7d,0x5b,0x10 = vcvtps2dq (%
rax), %
ymm10
841 0xc4,0x41,0x7e,0x5b,0xd4 = vcvttps2dq %
ymm12, %
ymm10
842 0xc5,0x7e,0x5b,0x10 = vcvttps2dq (%
rax), %
ymm10
843 0xc4,0x41,0x79,0xe6,0xd3 = vcvttpd2dq %
xmm11, %
xmm10
844 0xc4,0x41,0x7d,0xe6,0xd4 = vcvttpd2dq %
ymm12, %
xmm10
845 0xc4,0x41,0x79,0xe6,0xd3 = vcvttpd2dqx %
xmm11, %
xmm10
846 0xc5,0x79,0xe6,0x18 = vcvttpd2dqx (%
rax), %
xmm11
847 0xc4,0x41,0x7d,0xe6,0xdc = vcvttpd2dqy %
ymm12, %
xmm11
848 0xc5,0x7d,0xe6,0x18 = vcvttpd2dqy (%
rax), %
xmm11
849 0xc4,0x41,0x7d,0x5a,0xd4 = vcvtpd2ps %
ymm12, %
xmm10
850 0xc4,0x41,0x79,0x5a,0xd3 = vcvtpd2psx %
xmm11, %
xmm10
851 0xc5,0x79,0x5a,0x18 = vcvtpd2psx (%
rax), %
xmm11
852 0xc4,0x41,0x7d,0x5a,0xdc = vcvtpd2psy %
ymm12, %
xmm11
853 0xc5,0x7d,0x5a,0x18 = vcvtpd2psy (%
rax), %
xmm11
854 0xc4,0x41,0x7f,0xe6,0xd4 = vcvtpd2dq %
ymm12, %
xmm10
855 0xc4,0x41,0x7f,0xe6,0xdc = vcvtpd2dqy %
ymm12, %
xmm11
856 0xc5,0x7f,0xe6,0x18 = vcvtpd2dqy (%
rax), %
xmm11
857 0xc4,0x41,0x7b,0xe6,0xd3 = vcvtpd2dqx %
xmm11, %
xmm10
858 0xc5,0x7b,0xe6,0x18 = vcvtpd2dqx (%
rax), %
xmm11
867 0xc5,0x1c,0xc2,0x6c,0xcb,0xfc,0x00 = vcmpeqps -4(%
rbx, %
rcx, 8), %
ymm12, %
ymm13
868 0xc5,0x1c,0xc2,0x6c,0xcb,0xfc,0x02 = vcmpleps -4(%
rbx, %
rcx, 8), %
ymm12, %
ymm13
869 0xc5,0x1c,0xc2,0x6c,0xcb,0xfc,0x01 = vcmpltps -4(%
rbx, %
rcx, 8), %
ymm12, %
ymm13
870 0xc5,0x1c,0xc2,0x6c,0xcb,0xfc,0x04 = vcmpneqps -4(%
rbx, %
rcx, 8), %
ymm12, %
ymm13
871 0xc5,0x1c,0xc2,0x6c,0xcb,0xfc,0x06 = vcmpnleps -4(%
rbx, %
rcx, 8), %
ymm12, %
ymm13
872 0xc5,0x1c,0xc2,0x6c,0xcb,0xfc,0x05 = vcmpnltps -4(%
rbx, %
rcx, 8), %
ymm12, %
ymm13
873 0xc5,0x4c,0xc2,0x64,0xcb,0xfc,0x07 = vcmpordps -4(%
rbx, %
rcx, 8), %
ymm6, %
ymm12
874 0xc5,0x1c,0xc2,0x6c,0xcb,0xfc,0x03 = vcmpunordps -4(%
rbx, %
rcx, 8), %
ymm12, %
ymm13
883 0xc5,0x1d,0xc2,0x6c,0xcb,0xfc,0x00 = vcmpeqpd -4(%
rbx, %
rcx, 8), %
ymm12, %
ymm13
884 0xc5,0x1d,0xc2,0x6c,0xcb,0xfc,0x02 = vcmplepd -4(%
rbx, %
rcx, 8), %
ymm12, %
ymm13
885 0xc5,0x1d,0xc2,0x6c,0xcb,0xfc,0x01 = vcmpltpd -4(%
rbx, %
rcx, 8), %
ymm12, %
ymm13
886 0xc5,0x1d,0xc2,0x6c,0xcb,0xfc,0x04 = vcmpneqpd -4(%
rbx, %
rcx, 8), %
ymm12, %
ymm13
887 0xc5,0x1d,0xc2,0x6c,0xcb,0xfc,0x06 = vcmpnlepd -4(%
rbx, %
rcx, 8), %
ymm12, %
ymm13
888 0xc5,0x1d,0xc2,0x6c,0xcb,0xfc,0x05 = vcmpnltpd -4(%
rbx, %
rcx, 8), %
ymm12, %
ymm13
889 0xc5,0x4d,0xc2,0x64,0xcb,0xfc,0x07 = vcmpordpd -4(%
rbx, %
rcx, 8), %
ymm6, %
ymm12
890 0xc5,0x1d,0xc2,0x6c,0xcb,0xfc,0x03 = vcmpunordpd -4(%
rbx, %
rcx, 8), %
ymm12, %
ymm13
928 0xc4,0x63,0x2d,0x0c,0x18,0x03 = vblendps $3, (%
rax), %
ymm10, %
ymm11
932 0xc4,0x63,0x2d,0x40,0x18,0x03 = vdpps $3, (%
rax), %
ymm10, %
ymm11
933 0xc4,0x62,0x7d,0x1a,0
x20 = vbroadcastf128 (%
rax), %
ymm12
934 0xc4,0x62,0x7d,0x19,0x20 = vbroadcastsd (%
rax), %
ymm12
935 0xc4,0x62,0x79,0x18,0x20 = vbroadcastss (%
rax), %
xmm12
936 0xc4,0x62,0x7d,0x18,0x20 = vbroadcastss (%
rax), %
ymm12
938 0xc4,0x63,0x1d,0x18,0x10,0x07 = vinsertf128 $7, (%
rax), %
ymm12, %
ymm10
939 0xc4,0x43,0x7d,0
x19,0xe4,0x07 = vextractf128 $7, %
ymm12, %
xmm12
949 0xc4,0x43,0x79,0x04,0xd3,0x07 = vpermilps $7, %
xmm11, %
xmm10
950 0xc4,0x43,0x7d,0x04,0xda,0x07 = vpermilps $7, %
ymm10, %
ymm11
951 0xc4,0x63,0x79,0x04,0x10,0x07 = vpermilps $7, (%
rax), %
xmm10
952 0xc4,0x63,0x7d,0x04,0
x10,0x07 = vpermilps $7, (%
rax), %
ymm10
957 0xc4,0x43,0x79,0x05,0xd3,0x07 = vpermilpd $7, %
xmm11, %
xmm10
958 0xc4,0x43,0x7d,0x05,0xda,0x07 = vpermilpd $7, %
ymm10, %
ymm11
959 0xc4,0x63,0x79,0x05,0x10,0x07 = vpermilpd $7, (%
rax), %
xmm10
960 0xc4,0x63,0x7d,0x05,0
x10,0x07 = vpermilpd $7, (%
rax), %
ymm10
966 0xc4,0x63,0x2d,0x06,0x18,0x07 = vperm2f128 $7, (%
rax), %
ymm10, %
ymm11
967 0xc4,0x41,0x7b,0x2d,0xc0 = vcvtsd2si %
xmm8, %
r8d
968 0xc5,0xfb,0x2d,0x09 = vcvtsd2si (%
rcx), %
ecx
969 0xc4,0xe1,0xfa,0x2d,0xcc = vcvtss2si %
xmm4, %
rcx
970 0xc4,0x61,0xfa,0x2d,0x01 = vcvtss2si (%
rcx), %
r8
972 0xc5,0x3b,0x2a,0x7d,0x00 = vcvtsi2sdl (%rbp), %
xmm8, %
xmm15
974 0xc4,0xe1,0xdb,0x2a,0x31 = vcvtsi2sdq (%
rcx), %
xmm4, %
xmm6
975 0xc4,0xe1,0xda,0x2a,0xf1 = vcvtsi2ssq %
rcx, %
xmm4, %
xmm6
976 0xc4,0xe1,0xda,0x2a,0x31 = vcvtsi2ssq (%
rcx), %
xmm4, %
xmm6
977 0xc4,0xe1,0xfb,0x2c,0xcc = vcvttsd2si %
xmm4, %
rcx
978 0xc4,0xe1,0xfb,0x2c,0x09 = vcvttsd2si (%
rcx), %
rcx
979 0xc4,0xe1,0xfa,0x2c,0xcc = vcvttss2si %
xmm4, %
rcx
980 0xc4,0xe1,0xfa,0x2c,0x09 = vcvttss2si (%
rcx), %
rcx
981 0xc5,0x7f,0xf0,0x20 = vlddqu (%
rax), %
ymm12
982 0xc4,0x41,0x7f,0x12,0xd4 = vmovddup %
ymm12, %
ymm10
983 0xc5,0x7f,0x12,0x20 = vmovddup (%
rax), %
ymm12
984 0xc4,0x41,0x7d,0x6f,0xd4 = vmovdqa %
ymm12, %
ymm10
985 0xc5,0x7d,0x7f,0x20 = vmovdqa %
ymm12, (%
rax)
987 0xc4,0x41,0x7e,0x6f,0xd4 = vmovdqu %
ymm12, %
ymm10
988 0xc5,0x7e,0x7f,0x20 = vmovdqu %
ymm12, (%
rax)
990 0xc4,0x41,0x7e,0x16,0xd4 = vmovshdup %
ymm12, %
ymm10
991 0xc5,0x7e,0x16,0x20 = vmovshdup (%
rax), %
ymm12
992 0xc4,0x41,0x7e,0x12,0xd4 = vmovsldup %
ymm12, %
ymm10
993 0xc5,0x7e,0x12,0x20 = vmovsldup (%
rax), %
ymm12
994 0xc4,0x42,0x7d,0x17,0xd4 = vptest %
ymm12, %
ymm10
995 0xc4,0x62,0x7d,0x17,0x20 = vptest (%
rax), %
ymm12
996 0xc4,0x43,0x7d,0x09,0xda,0x07 = vroundpd $7, %
ymm10, %
ymm11
997 0xc4,0x63,0x7d,0x09,0x10,0x07 = vroundpd $7, (%
rax), %
ymm10
998 0xc4,0x43,0x7d,0x08,0xda,0x07 = vroundps $7, %
ymm10, %
ymm11
999 0xc4,0x63,0x7d,0x08,0
x10,0x07 = vroundps $7, (%
rax), %
ymm10
1001 0xc5,0x2d,0xc6,0x18,0x07 = vshufpd $7, (%
rax), %
ymm10, %
ymm11
1004 0xc4,0x42,0x79,0x0f,0xd4 = vtestpd %
xmm12, %
xmm10
1005 0xc4,0x42,0x7d,0x0f,0xd4 = vtestpd %
ymm12, %
ymm10
1006 0xc4,0x62,0x79,0x0f,0x20 = vtestpd (%
rax), %
xmm12
1007 0xc4,0x62,0x7d,0x0f,0x20 = vtestpd (%
rax), %
ymm12
1008 0xc4,0x42,0x79,0x0e,0xd4 = vtestps %
xmm12, %
xmm10
1009 0xc4,0x42,0x7d,0x0e,0xd4 = vtestps %
ymm12, %
ymm10
1010 0xc4,0x62,0x79,0x0e,0x20 = vtestps (%
rax), %
xmm12
1011 0xc4,0x62,0x7d,0x0e,0x20 = vtestps (%
rax), %
ymm12
1012 0xc4,0x43,0x79,0x17,0xc0,0x0a = vextractps $10, %
xmm8, %
r8
1013 0xc4,0xe3,0x79,0x17,0xe1,0x07 = vextractps $7, %
xmm4, %
ecx
1014 0xc4,0xe1,0xf9,0x7e,0xe1 = vmovd %
xmm4, %
rcx
1015 0xc5,0xf9,0x50,0xcc = vmovmskpd %
xmm4, %
ecx
1016 0xc5,0xfd,0x50,0xcc = vmovmskpd %
ymm4, %
ecx
1017 0xc5,0xf8,0x50,0xcc = vmovmskps %
xmm4, %
ecx
1018 0xc5,0xfc,0x50,0xcc = vmovmskps %
ymm4, %
ecx
1019 0xc4,0xe3,0x79,0x14,0xe1,0x07 = vpextrb $7, %
xmm4, %
ecx
1020 0xc4,0x41,0x01,0xc4,0xc0,0x07 = vpinsrw $7, %
r8d, %
xmm15, %
xmm8
1021 0xc5,0xd9,0xc4,0xf1,0x07 = vpinsrw $7, %
ecx, %
xmm4, %
xmm6
1022 0xc5,0xf9,0xd7,0xcc = vpmovmskb %
xmm4, %
ecx
1023 0xc4,0x63,0x1d,0x4b,0xac,0x20,0xad,0xde,0x00,0x00,0xb0 = vblendvpd %
ymm11, 0xdead(%
rax, %
riz), %
ymm12, %
ymm13
1026 0xc4,0xc1,0x78,0x29,0x1c,0x1e = vmovaps %
xmm3, (%
r14, %
rbx)
1028 0xc4,0xa1,0x78,0x29,0x1c,0x18 = vmovaps %
xmm3, (%
rax, %
r11)
1030 0xc4,0xe2,0xf9,0x93,0x14,0x4f = vgatherqpd %
xmm0, (%
rdi, %
xmm1, 2), %
xmm2
1038 0xc4,0xe2,0xf9,0x91,0x14,0x4f = vpgatherqq %
xmm0, (%
rdi, %
xmm1, 2), %
xmm2
1040 0xc4,0xe2,0xfd,0x91,0x14,0x4f = vpgatherqq %
ymm0, (%
rdi, %
ymm1, 2), %
ymm2
void rip(char *fname, off_t offset, unsigned int length)