1 # CS_ARCH_ARM, CS_MODE_ARM, None
3 0x80,0x0a,0x30,0xee = vadd.f32
s0,
s1,
s0
4 0xe0,0x0b,0x71,0xee = vsub.f64
d16,
d17,
d16
5 0xc0,0x0a,0x30,0xee = vsub.f32
s0,
s1,
s0
6 0xa0,0x0b,0xc1,0xee = vdiv.f64
d16,
d17,
d16
7 0x80,0x0a,0x80,0xee = vdiv.f32
s0,
s1,
s0
8 0xa3,0x2a,0xc2,0xee = vdiv.f32
s5,
s5,
s7
9 0x07,0x5b,0x85,0xee = vdiv.f64
d5,
d5,
d7
10 0xa0,0x0b,0x61,0xee = vmul.f64
d16,
d17,
d16
12 0x80,0x0a,0x20,0xee = vmul.f32
s0,
s1,
s0
14 0xe0,0x0b,0x61,0xee = vnmul.f64
d16,
d17,
d16
15 0xc0,0x0a,0x20,0xee = vnmul.f32
s0,
s1,
s0
16 0xe0,0x1b,0xf4,0xee = vcmpe.f64
d17,
d16
17 0xc0,0x0a,0xf4,0xee = vcmpe.f32
s1,
s0
18 0xc0,0x0b,0xf5,0xee = vcmpe.f64
d16, #0
19 0xc0,0x0a,0xb5,0xee = vcmpe.f32
s0, #0
20 0xe0,0x0b,0xf0,0xee = vabs.f64
d16,
d16
21 0xc0,0x0a,0xb0,0xee = vabs.f32
s0,
s0
22 0xe0,0x0b,0xb7,0xee = vcvt.f32.f64
s0,
d16
23 0xc0,0x0a,0xf7,0xee = vcvt.f64.f32
d16,
s0
24 0x60,0x0b,0xf1,0xee = vneg.f64
d16,
d16
25 0x40,0x0a,0xb1,0xee = vneg.f32
s0,
s0
26 0xe0,0x0b,0xf1,0xee = vsqrt.f64
d16,
d16
27 0xc0,0x0a,0xb1,0xee = vsqrt.f32
s0,
s0
28 0xc0,0x0b,0xf8,0xee = vcvt.f64.s32
d16,
s0
29 0xc0,0x0a,0xb8,0xee = vcvt.f32.s32
s0,
s0
30 0x40,0x0b,0xf8,0xee = vcvt.f64.u32
d16,
s0
31 0x40,0x0a,0xb8,0xee = vcvt.f32.u32
s0,
s0
32 0xe0,0x0b,0xbd,0xee = vcvt.s32.f64
s0,
d16
33 0xc0,0x0a,0xbd,0xee = vcvt.s32.f32
s0,
s0
34 0xe0,0x0b,0xbc,0xee = vcvt.u32.f64
s0,
d16
35 0xc0,0x0a,0xbc,0xee = vcvt.u32.f32
s0,
s0
37 0x00,0x0a,0x41,0xee = vmla.f32
s1,
s2,
s0
38 0xe1,0x0b,0x42,0xee = vmls.f64
d16,
d18,
d17
39 0x40,0x0a,0x41,0xee = vmls.f32
s1,
s2,
s0
40 0xe1,0x0b,0x52,0xee = vnmla.f64
d16,
d18,
d17
41 0x40,0x0a,0x51,0xee = vnmla.f32
s1,
s2,
s0
42 0xa1,0x0b,0x52,0xee = vnmls.f64
d16,
d18,
d17
43 0x00,0x0a,0x51,0xee = vnmls.f32
s1,
s2,
s0
44 0x10,0xfa,0xf1,0xee = vmrs APSR_nzcv,
fpscr
45 0x10,0xfa,0xf1,0xee = vmrs APSR_nzcv,
fpscr
46 0x10,0xfa,0xf1,0xee = vmrs APSR_nzcv,
fpscr
48 0x10,0x3a,0xf0,0xee = vmrs
r3,
fpsid
51 0x60,0x0b,0xf1,0x1e = vnegne.f64
d16,
d16
52 0x10,0x0a,0x00,0x1e = vmovne
s0,
r0
53 0x10,0x1a,0x00,0x0e = vmoveq
s0,
r1
58 0x10,0x0a,0xf1,0xee = vmrs
r0,
fpscr
60 0x10,0x0a,0xf0,0xee = vmrs
r0,
fpsid
63 0x10,0x0a,0xe1,0xee = vmsr
fpscr,
r0
64 0x10,0x0a,0xe8,0xee = vmsr
fpexc,
r0
65 0x10,0x0a,0xe0,0xee = vmsr
fpsid,
r0
66 0x10,0x3a,0xe9,0xee = vmsr
fpinst,
r3
68 0x08,0x0b,0xf0,0xee =
vmov.f64
d16, #3.000000e+00
69 0x08,0x0a,0xb0,0xee =
vmov.f32
s0, #3.000000e+00
70 0x08,0x0b,0xf8,0xee =
vmov.f64
d16, #-3.000000e+00
71 0x08,0x0a,0xb8,0xee =
vmov.f32
s0, #-3.000000e+00
89 0x00,0x1b,0xd0,0xed = vldr
d17, [
r0]
90 0x00,0x0a,0x9e,0xed = vldr
s0, [
lr]
91 0x00,0x0b,0x9e,0xed = vldr
d0, [
lr]
92 0x08,0x1b,0x92,0xed = vldr
d1, [
r2, #32]
93 0x08,0x1b,0x12,0xed = vldr
d1, [
r2, #-32]
94 0x00,0x2b,0x93,0xed = vldr
d2, [
r3]
95 0x00,0x3b,0x9f,0xed = vldr
d3, [
pc]
96 0x00,0x3b,0x9f,0xed = vldr
d3, [
pc]
97 0x00,0x3b,0x1f,0xed = vldr
d3, [
pc, #-0]
98 0x00,0x6a,0xd0,0xed = vldr
s13, [
r0]
99 0x08,0x0a,0xd2,0xed = vldr
s1, [
r2, #32]
100 0x08,0x0a,0x52,0xed = vldr
s1, [
r2, #-32]
101 0x00,0x1a,0x93,0xed = vldr
s2, [
r3]
102 0x00,0x2a,0xdf,0xed = vldr
s5, [
pc]
103 0x00,0x2a,0xdf,0xed = vldr
s5, [
pc]
104 0x00,0x2a,0x5f,0xed = vldr
s5, [
pc, #-0]
105 0x00,0x4b,0x81,0xed = vstr
d4, [
r1]
106 0x06,0x4b,0x81,0xed = vstr
d4, [
r1, #24]
107 0x06,0x4b,0x01,0xed = vstr
d4, [
r1, #-24]
108 0x00,0x0a,0x8e,0xed = vstr
s0, [
lr]
109 0x00,0x0b,0x8e,0xed = vstr
d0, [
lr]
110 0x00,0x2a,0x81,0xed = vstr
s4, [
r1]
111 0x06,0x2a,0x81,0xed = vstr
s4, [
r1, #24]
112 0x06,0x2a,0x01,0xed = vstr
s4, [
r1, #-24]
117 0x10,0x8b,0x2d,0xed = vpush {
d8,
d9,
d10,
d11,
d12,
d13,
d14,
d15}
118 0x07,0x0b,0xb5,0xec = fldmiax
r5!, {
d0,
d1,
d2}
119 0x05,0x4b,0x90,0x0c = fldmiaxeq
r0, {
d4,
d5}
120 0x07,0x4b,0x35,0x1d = fldmdbxne
r5!, {
d4,
d5,
d6}
121 0x11,0x0b,0xa5,0xec = fstmiax
r5!, {
d0,
d1,
d2,
d3,
d4,
d5,
d6,
d7}
122 0x05,0x8b,0x84,0x0c = fstmiaxeq
r4, {
d8,
d9}
123 0x07,0x2b,0x27,0x1d = fstmdbxne
r7!, {
d2,
d3,
d4}
124 0x40,0x0b,0xbd,0xee = vcvtr.s32.f64
s0,
d0
125 0x60,0x0a,0xbd,0xee = vcvtr.s32.f32
s0,
s1
126 0x40,0x0b,0xbc,0xee = vcvtr.u32.f64
s0,
d0
127 0x60,0x0a,0xbc,0xee = vcvtr.u32.f32
s0,
s1
140 0xc6,0x0a,0xbb,0xee = vcvt.f32.u32
s0,
s0, #20
141 0xc0,0x0b,0xba,0xee = vcvt.f64.s32
d0,
d0, #32
142 0x67,0x0a,0xbb,0xee = vcvt.f32.u16
s0,
s0, #1
143 0x40,0x0b,0xba,0xee = vcvt.f64.s16
d0,
d0, #16
144 0xc6,0x0a,0xfa,0xee = vcvt.f32.s32
s1,
s1, #20
145 0xc0,0x4b,0xfb,0xee = vcvt.f64.u32
d20,
d20, #32
146 0x67,0x8a,0xfa,0xee = vcvt.f32.s16
s17,
s17, #1
147 0x40,0x7b,0xfb,0xee = vcvt.f64.u16
d23,
d23, #16
148 0xc6,0x6a,0xbf,0xee = vcvt.u32.f32
s12,
s12, #20
149 0xc0,0x2b,0xbe,0xee = vcvt.s32.f64
d2,
d2, #32
150 0x67,0xea,0xbf,0xee = vcvt.u16.f32
s28,
s28, #1
151 0x40,0xfb,0xbe,0xee = vcvt.s16.f64
d15,
d15, #16
152 0xc6,0x0a,0xfe,0xee = vcvt.s32.f32
s1,
s1, #20
153 0xc0,0x4b,0xff,0xee = vcvt.u32.f64
d20,
d20, #32
154 0x67,0x8a,0xfe,0xee = vcvt.s16.f32
s17,
s17, #1
155 0x40,0x7b,0xff,0xee = vcvt.u16.f64
d23,
d23, #16
156 0x10,0x40,0x80,0xf2 =
vmov.i32
d4, #0x0
157 0x12,0x46,0x84,0xf2 =
vmov.i32
d4, #0x42000000
static RzILOpEffect * vmov(cs_insn *insn, bool is_thumb)