Rizin
unix-like reverse engineering framework and cli tools
intervaltree.c
Go to the documentation of this file.
1 // SPDX-FileCopyrightText: 2019 thestr4ng3r <info@florianmaerkl.de>
2 // SPDX-License-Identifier: LGPL-3.0-only
3 
5 #include <rz_util/rz_assert.h>
6 
7 #define unwrap(rbnode) ((rbnode) ? container_of(rbnode, RzIntervalNode, node) : NULL)
8 
9 static void node_max(RBNode *node) {
10  RzIntervalNode *intervalnode = unwrap(node);
11  intervalnode->max_end = intervalnode->end;
12  int i;
13  for (i = 0; i < 2; i++) {
14  if (node->child[i]) {
15  ut64 end = unwrap(node->child[i])->max_end;
16  if (end > intervalnode->max_end) {
17  intervalnode->max_end = end;
18  }
19  }
20  }
21 }
22 
23 static int cmp(const void *incoming, const RBNode *in_tree, void *user) {
24  ut64 incoming_start = *(ut64 *)incoming;
25  ut64 other_start = container_of(in_tree, const RzIntervalNode, node)->start;
26  if (incoming_start < other_start) {
27  return -1;
28  }
29  if (incoming_start > other_start) {
30  return 1;
31  }
32  return 0;
33 }
34 
35 // like cmp, but handles searches for an exact RzIntervalNode * in the tree instead of only comparing the start values
36 static int cmp_exact_node(const void *incoming, const RBNode *in_tree, void *user) {
37  RzIntervalNode *incoming_node = (RzIntervalNode *)incoming;
38  const RzIntervalNode *node = container_of(in_tree, const RzIntervalNode, node);
39  if (node == incoming_node) {
40  return 0;
41  }
42  if (incoming_node->start < node->start) {
43  return -1;
44  }
45  if (incoming_node->start > node->start) {
46  return 1;
47  }
48  // Here we have the same start value, but a different pointer.
49  // This means we need to guide the caller into the direction where the actual node is.
50  // Since we have nothing to compare anymore, we have to iterate through all the same-start children to find the correct path.
51  RBIter *path_cache = user;
52  if (!path_cache->len) {
53  RBNode *cur = (RBNode *)&node->node;
54  // go down to the leftmost child that has the same start
55  while (cur) {
56  path_cache->path[path_cache->len++] = cur;
57  if (incoming_node->start <= unwrap(cur)->start) {
58  cur = cur->child[0];
59  } else {
60  cur = cur->child[1];
61  }
62  }
63  // iterate through all children with the same start and stop when the pointer is identical
64  // The RBIter works a bit different than normal here. We store each node in the path, including right-descended ones
65  // because we want to get the full path in the end.
66  while (rz_rbtree_iter_has(path_cache)) {
67  RzIntervalNode *intervalnode = rz_rbtree_iter_get(path_cache, RzIntervalNode, node);
68  if (intervalnode == incoming_node || intervalnode->start > incoming_node->start) {
69  break;
70  }
71  // rz_rbtree_iter_next does not work here
72  RBNode *rbnode = &intervalnode->node;
73  if (rbnode->child[1]) {
74  // next node after the current is always the leftmost in the right branch
75  for (rbnode = rbnode->child[1]; rbnode; rbnode = rbnode->child[0]) {
76  path_cache->path[path_cache->len++] = rbnode;
77  }
78  } else {
79  // if there is no right branch, go up
80  do {
81  rbnode = path_cache->path[--path_cache->len];
82  } while (path_cache->len && path_cache->path[path_cache->len - 1]->child[1] == rbnode);
83  }
84  }
85  }
86 
87  RBNode *next_child = NULL;
88  // Go through the path to find the next node one step down
89  size_t i;
90  for (i = 0; i < path_cache->len - 1; i++) {
91  if (unwrap(path_cache->path[i]) == node) {
92  next_child = path_cache->path[i + 1];
93  break;
94  }
95  }
96 
97  // Determine the direction from the next child node
98  return (next_child && node->node.child[0] == next_child) ? -1 : 1;
99 }
100 
102  tree->root = NULL;
103  tree->free = free;
104 }
105 
106 static void interval_node_free(RBNode *node, void *user) {
107  RzIntervalNode *ragenode /* >:-O */ = unwrap(node);
108  if (user) {
109  ((RContRBFree)user)(ragenode->data);
110  }
111  free(ragenode);
112 }
113 
115  if (!tree || !tree->root) {
116  return;
117  }
119 }
120 
122  rz_return_val_if_fail(end >= start, false);
124  if (!node) {
125  return false;
126  }
127  node->start = start;
128  node->end = end;
129  node->data = data;
130  RBNode *root = tree->root ? &tree->root->node : NULL;
131  bool r = rz_rbtree_aug_insert(&root, &start, &node->node, cmp, NULL, node_max);
132  tree->root = unwrap(root);
133  if (!r) {
134  free(node);
135  }
136  return r;
137 }
138 
140  RBNode *root = &tree->root->node;
141  RBIter path_cache = { 0 };
142  bool r = rz_rbtree_aug_delete(&root, node, cmp_exact_node, &path_cache, interval_node_free, free ? tree->free : NULL, node_max);
143  tree->root = unwrap(root);
144  return r;
145 }
146 
147 RZ_API bool rz_interval_tree_resize(RzIntervalTree *tree, RzIntervalNode *node, ut64 new_start, ut64 new_end) {
148  rz_return_val_if_fail(new_end >= new_start, false);
149  if (node->start != new_start) {
150  // Start change means the tree needs a different structure
151  void *data = node->data;
152  if (!rz_interval_tree_delete(tree, node, false)) {
153  return false;
154  }
155  return rz_interval_tree_insert(tree, new_start, new_end, data);
156  }
157  if (node->end != new_end) {
158  // Only end change just needs the updated augmented max value to be propagated upwards
159  node->end = new_end;
160  RBIter path_cache = { 0 };
161  return rz_rbtree_aug_update_sum(&tree->root->node, node, &node->node, cmp_exact_node, &path_cache, node_max);
162  }
163  // no change
164  return true;
165 }
166 
167 // This must always return the topmost node that matches start!
168 // Otherwise rz_interval_tree_first_at will break!!!
170  RzIntervalNode *node = tree->root;
171  while (node) {
172  if (start < node->start) {
173  node = unwrap(node->node.child[0]);
174  } else if (start > node->start) {
175  node = unwrap(node->node.child[1]);
176  } else {
177  return node;
178  }
179  }
180  return NULL;
181 }
182 
184  RBIter it = { 0 };
185 
186  // Find the topmost node matching start so we have a sub-tree with all entries that we want to find.
187  RzIntervalNode *top_intervalnode = rz_interval_tree_node_at(tree, start);
188  if (!top_intervalnode) {
189  return it;
190  }
191 
192  // If there are more nodes with the same key, they can be in both children.
193  RBNode *node = &top_intervalnode->node;
194  while (node) {
195  if (start <= unwrap(node)->start) {
196  it.path[it.len++] = node;
197  node = node->child[0];
198  } else {
199  node = node->child[1];
200  }
201  }
202 
203  return it;
204 }
205 
208  while (rz_rbtree_iter_has(&it)) {
209  RzIntervalNode *intervalnode = rz_rbtree_iter_get(&it, RzIntervalNode, node);
210  if (intervalnode->start != start) {
211  break;
212  }
213  if (intervalnode->data == data) {
214  return intervalnode;
215  }
216  rz_rbtree_iter_next(&it);
217  }
218  return NULL;
219 }
220 
223  bool ret = true;
224  while (rz_rbtree_iter_has(&it)) {
225  RzIntervalNode *intervalnode = rz_rbtree_iter_get(&it, RzIntervalNode, node);
226  if (intervalnode->start != start) {
227  break;
228  }
229  ret = cb(intervalnode, user);
230  if (!ret) {
231  break;
232  }
233  rz_rbtree_iter_next(&it);
234  }
235  return ret;
236 }
237 
238 RZ_API bool rz_interval_node_all_in(RzIntervalNode *node, ut64 value, bool end_inclusive, RzIntervalIterCb cb, void *user) {
239  while (node && value < node->start) {
240  // less than the current node, but might still be contained further down
241  node = unwrap(node->node.child[0]);
242  }
243  if (!node) {
244  return true;
245  }
246  if (end_inclusive ? value > node->max_end : value >= node->max_end) {
247  return true;
248  }
249  if (end_inclusive ? value <= node->end : value < node->end) {
250  if (!cb(node, user)) {
251  return false;
252  }
253  }
254  // This can be done more efficiently by building the stack manually
255  bool ret = rz_interval_node_all_in(unwrap(node->node.child[0]), value, end_inclusive, cb, user);
256  if (!ret) {
257  return false;
258  }
259  return rz_interval_node_all_in(unwrap(node->node.child[1]), value, end_inclusive, cb, user);
260 }
261 
262 RZ_API bool rz_interval_tree_all_in(RzIntervalTree *tree, ut64 value, bool end_inclusive, RzIntervalIterCb cb, void *user) {
263  // all in! 🂡
264  return rz_interval_node_all_in(tree->root, value, end_inclusive, cb, user);
265 }
266 
267 static bool rz_interval_node_all_intersect(RzIntervalNode *node, ut64 start, ut64 end, bool end_inclusive, RzIntervalIterCb cb, void *user) {
268  rz_return_val_if_fail(end >= start, true);
269  while (node && (end_inclusive ? end < node->start : end <= node->start)) {
270  // less than the current node, but might still be contained further down
271  node = unwrap(node->node.child[0]);
272  }
273  if (!node) {
274  return true;
275  }
276  if (end_inclusive ? start > node->max_end : start >= node->max_end) {
277  return true;
278  }
279  if (end_inclusive ? start <= node->end : start < node->end) {
280  if (!cb(node, user)) {
281  return false;
282  }
283  }
284  // This can be done more efficiently by building the stack manually
285  if (!rz_interval_node_all_intersect(unwrap(node->node.child[0]), start, end, end_inclusive, cb, user)) {
286  return false;
287  }
288  return rz_interval_node_all_intersect(unwrap(node->node.child[1]), start, end, end_inclusive, cb, user);
289 }
290 
291 RZ_API bool rz_interval_tree_all_intersect(RzIntervalTree *tree, ut64 start, ut64 end, bool end_inclusive, RzIntervalIterCb cb, void *user) {
292  return rz_interval_node_all_intersect(tree->root, start, end, end_inclusive, cb, user);
293 }
lzma_index ** i
Definition: index.h:629
static int value
Definition: cmd_api.c:93
#define RZ_API
#define NULL
Definition: cris-opc.c:27
#define r
Definition: crypto_rc6.c:12
static static sync static getppid static getegid const char static filename char static len const char char static bufsiz static mask static vfork const void static prot static getpgrp const char static swapflags static arg static fd static protocol static who struct sockaddr static addrlen static backlog struct timeval struct timezone static tz const struct iovec static count static mode const void const struct sockaddr static tolen const char static pathname void static offset struct stat static buf void long static basep static whence static length const void static len static semflg const void static shmflg const struct timespec struct timespec static rem const char static group const void start
Definition: sflib.h:133
int root
Definition: enough.c:226
RZ_API void Ht_() free(HtName_(Ht) *ht)
Definition: ht_inc.c:130
RZ_API bool rz_interval_tree_all_intersect(RzIntervalTree *tree, ut64 start, ut64 end, bool end_inclusive, RzIntervalIterCb cb, void *user)
Definition: intervaltree.c:291
static int cmp(const void *incoming, const RBNode *in_tree, void *user)
Definition: intervaltree.c:23
#define unwrap(rbnode)
Definition: intervaltree.c:7
RZ_API bool rz_interval_tree_delete(RzIntervalTree *tree, RzIntervalNode *node, bool free)
Definition: intervaltree.c:139
RZ_API bool rz_interval_node_all_in(RzIntervalNode *node, ut64 value, bool end_inclusive, RzIntervalIterCb cb, void *user)
Definition: intervaltree.c:238
RZ_API RzIntervalNode * rz_interval_tree_node_at(RzIntervalTree *tree, ut64 start)
Definition: intervaltree.c:169
RZ_API bool rz_interval_tree_all_in(RzIntervalTree *tree, ut64 value, bool end_inclusive, RzIntervalIterCb cb, void *user)
Definition: intervaltree.c:262
static int cmp_exact_node(const void *incoming, const RBNode *in_tree, void *user)
Definition: intervaltree.c:36
RZ_API RzIntervalNode * rz_interval_tree_node_at_data(RzIntervalTree *tree, ut64 start, void *data)
Definition: intervaltree.c:206
static void node_max(RBNode *node)
Definition: intervaltree.c:9
RZ_API bool rz_interval_tree_resize(RzIntervalTree *tree, RzIntervalNode *node, ut64 new_start, ut64 new_end)
Definition: intervaltree.c:147
RZ_API bool rz_interval_tree_all_at(RzIntervalTree *tree, ut64 start, RzIntervalIterCb cb, void *user)
Definition: intervaltree.c:221
RZ_API void rz_interval_tree_init(RzIntervalTree *tree, RzIntervalNodeFree free)
Definition: intervaltree.c:101
RZ_API bool rz_interval_tree_insert(RzIntervalTree *tree, ut64 start, ut64 end, void *data)
Definition: intervaltree.c:121
RZ_API RBIter rz_interval_tree_first_at(RzIntervalTree *tree, ut64 start)
Definition: intervaltree.c:183
static void interval_node_free(RBNode *node, void *user)
Definition: intervaltree.c:106
static bool rz_interval_node_all_intersect(RzIntervalNode *node, ut64 start, ut64 end, bool end_inclusive, RzIntervalIterCb cb, void *user)
Definition: intervaltree.c:267
RZ_API void rz_interval_tree_fini(RzIntervalTree *tree)
Definition: intervaltree.c:114
while(len< limit &&buf1[len]==buf2[len])++len
#define rz_return_val_if_fail(expr, val)
Definition: rz_assert.h:108
void(* RzIntervalNodeFree)(void *data)
bool(* RzIntervalIterCb)(RzIntervalNode *node, void *user)
#define rz_rbtree_iter_get(it, struc, rb)
Definition: rz_rbtree.h:85
RZ_API void rz_rbtree_free(RZ_NULLABLE RBNode *root, RBNodeFree freefn, void *user)
Definition: rbtree.c:281
void(* RContRBFree)(void *)
Definition: rz_rbtree.h:47
RZ_API bool rz_rbtree_aug_update_sum(RBNode *root, void *data, RBNode *node, RBComparator cmp, void *cmp_user, RBNodeSum sum)
Returns true if the sum has been updated, false if node has not been found.
Definition: rbtree.c:235
#define rz_rbtree_iter_has(it)
Definition: rz_rbtree.h:87
RZ_API bool rz_rbtree_aug_delete(RBNode **root, void *data, RBComparator cmp, void *cmp_user, RBNodeFree freefn, void *free_user, RBNodeSum sum)
Returns true if a node with an equal key is deleted.
Definition: rbtree.c:67
RZ_API void rz_rbtree_iter_next(RBIter *it)
Definition: rbtree.c:355
RZ_API bool rz_rbtree_aug_insert(RBNode **root, void *data, RBNode *node, RBComparator cmp, void *cmp_user, RBNodeSum sum)
Returns true if the node was inserted successfully.
Definition: rbtree.c:163
#define RZ_NEW0(x)
Definition: rz_types.h:284
#define container_of(ptr, type, member)
Definition: rz_types.h:650
RzIntervalNodeFree free
RzIntervalNode * root
RBNode * path[RZ_RBTREE_MAX_HEIGHT]
Definition: rz_rbtree.h:43
struct rz_rb_node_t * child[2]
Definition: rz_rbtree.h:20
ut64(WINAPI *w32_GetEnabledXStateFeatures)()
static const char * cb[]
Definition: z80_tab.h:176