php-internal-docs 8.4.8
Unofficial docs for php/php-src
Loading...
Searching...
No Matches
ir_private.h
Go to the documentation of this file.
1/*
2 * IR - Lightweight JIT Compilation Framework
3 * (Common data structures and non public definitions)
4 * Copyright (C) 2022 Zend by Perforce.
5 * Authors: Dmitry Stogov <dmitry@php.net>
6 */
7
8#ifndef IR_PRIVATE_H
9#define IR_PRIVATE_H
10#include <string.h>
11#include <stdlib.h>
12
13#ifdef IR_DEBUG
14# include <assert.h>
15# define IR_ASSERT(x) assert(x)
16#else
17# define IR_ASSERT(x)
18#endif
19
20#ifdef _WIN32
21# include <intrin.h>
22# ifdef _M_X64
23# pragma intrinsic(_BitScanForward64)
24# pragma intrinsic(_BitScanReverse64)
25# endif
26# pragma intrinsic(_BitScanForward)
27# pragma intrinsic(_BitScanReverse)
28#endif
29
30#ifdef __has_builtin
31# if __has_builtin(__builtin_expect)
32# define EXPECTED(condition) __builtin_expect(!!(condition), 1)
33# define UNEXPECTED(condition) __builtin_expect(!!(condition), 0)
34# endif
35# if __has_attribute(__aligned__)
36# define IR_SET_ALIGNED(alignment, decl) decl __attribute__ ((__aligned__ (alignment)))
37# endif
38# if __has_attribute(__fallthrough__)
39# define IR_FALLTHROUGH __attribute__((__fallthrough__))
40# endif
41#elif defined(_WIN32)
42# define IR_SET_ALIGNED(alignment, decl) __declspec(align(alignment)) decl
43#else /* GCC prior to 10 or non-clang/msvc compilers */
44#define __has_builtin(x) 0
45#endif
46#ifndef EXPECTED
47# define EXPECTED(condition) (condition)
48# define UNEXPECTED(condition) (condition)
49#endif
50#ifndef IR_SET_ALIGNED
51# define IR_SET_ALIGNED(alignment, decl) decl
52#endif
53#ifndef IR_FALLTHROUGH
54# define IR_FALLTHROUGH ((void)0)
55#endif
56
57/*** Helper routines ***/
58
59#define IR_ALIGNED_SIZE(size, alignment) \
60 (((size) + ((alignment) - 1)) & ~((alignment) - 1))
61
62#define IR_MAX(a, b) (((a) > (b)) ? (a) : (b))
63#define IR_MIN(a, b) (((a) < (b)) ? (a) : (b))
64
65#define IR_IS_POWER_OF_TWO(x) (!((x) & ((x) - 1)))
66
67#define IR_LOG2(x) ir_ntzl(x)
68
69IR_ALWAYS_INLINE uint8_t ir_rol8(uint8_t op1, uint8_t op2)
70{
71 return (op1 << op2) | (op1 >> (8 - op2));
72}
73
74IR_ALWAYS_INLINE uint16_t ir_rol16(uint16_t op1, uint16_t op2)
75{
76 return (op1 << op2) | (op1 >> (16 - op2));
77}
78
79IR_ALWAYS_INLINE uint32_t ir_rol32(uint32_t op1, uint32_t op2)
80{
81 return (op1 << op2) | (op1 >> (32 - op2));
82}
83
84IR_ALWAYS_INLINE uint64_t ir_rol64(uint64_t op1, uint64_t op2)
85{
86 return (op1 << op2) | (op1 >> (64 - op2));
87}
88
89IR_ALWAYS_INLINE uint8_t ir_ror8(uint8_t op1, uint8_t op2)
90{
91 return (op1 >> op2) | (op1 << (8 - op2));
92}
93
94IR_ALWAYS_INLINE uint16_t ir_ror16(uint16_t op1, uint16_t op2)
95{
96 return (op1 >> op2) | (op1 << (16 - op2));
97}
98
99IR_ALWAYS_INLINE uint32_t ir_ror32(uint32_t op1, uint32_t op2)
100{
101 return (op1 >> op2) | (op1 << (32 - op2));
102}
103
104IR_ALWAYS_INLINE uint64_t ir_ror64(uint64_t op1, uint64_t op2)
105{
106 return (op1 >> op2) | (op1 << (64 - op2));
107}
108
109/* Number of trailing zero bits (0x01 -> 0; 0x40 -> 6; 0x00 -> LEN) */
110IR_ALWAYS_INLINE uint32_t ir_ntz(uint32_t num)
111{
112#if (defined(__GNUC__) || __has_builtin(__builtin_ctz))
113 return __builtin_ctz(num);
114#elif defined(_WIN32)
115 uint32_t index;
116
117 if (!_BitScanForward(&index, num)) {
118 /* undefined behavior */
119 return 32;
120 }
121
122 return index;
123#else
124 int n;
125
126 if (num == 0) return 32;
127
128 n = 1;
129 if ((num & 0x0000ffff) == 0) {n += 16; num = num >> 16;}
130 if ((num & 0x000000ff) == 0) {n += 8; num = num >> 8;}
131 if ((num & 0x0000000f) == 0) {n += 4; num = num >> 4;}
132 if ((num & 0x00000003) == 0) {n += 2; num = num >> 2;}
133 return n - (num & 1);
134#endif
135}
136
137/* Number of trailing zero bits (0x01 -> 0; 0x40 -> 6; 0x00 -> LEN) */
138IR_ALWAYS_INLINE uint32_t ir_ntzl(uint64_t num)
139{
140 // Note that the _WIN64 case should come before __has_builtin() below so that
141 // clang-cl on Windows will use the uint64_t version, not the "long" uint32_t
142 // version.
143#if defined(_WIN64)
144 unsigned long index;
145
146 if (!_BitScanForward64(&index, num)) {
147 /* undefined behavior */
148 return 64;
149 }
150
151 return (uint32_t) index;
152#elif (defined(__GNUC__) || __has_builtin(__builtin_ctzl))
153 return __builtin_ctzl(num);
154#else
155 uint32_t n;
156
157 if (num == 0) return 64;
158
159 n = 1;
160 if ((num & 0xffffffff) == 0) {n += 32; num = num >> 32;}
161 if ((num & 0x0000ffff) == 0) {n += 16; num = num >> 16;}
162 if ((num & 0x000000ff) == 0) {n += 8; num = num >> 8;}
163 if ((num & 0x0000000f) == 0) {n += 4; num = num >> 4;}
164 if ((num & 0x00000003) == 0) {n += 2; num = num >> 2;}
165 return n - (uint32_t)(num & 1);
166#endif
167}
168
169/* Number of leading zero bits (Undefined for zero) */
170IR_ALWAYS_INLINE int ir_nlz(uint32_t num)
171{
172#if (defined(__GNUC__) || __has_builtin(__builtin_clz))
173 return __builtin_clz(num);
174#elif defined(_WIN32)
175 uint32_t index;
176
177 if (!_BitScanReverse(&index, num)) {
178 /* undefined behavior */
179 return 32;
180 }
181
182 return (int) (32 - 1) - index;
183#else
184 uint32_t x;
185 uint32_t n;
186
187 n = 32;
188 x = num >> 16; if (x != 0) {n -= 16; num = x;}
189 x = num >> 8; if (x != 0) {n -= 8; num = x;}
190 x = num >> 4; if (x != 0) {n -= 4; num = x;}
191 x = num >> 2; if (x != 0) {n -= 2; num = x;}
192 x = num >> 1; if (x != 0) return n - 2;
193 return n - num;
194#endif
195}
196
197IR_ALWAYS_INLINE int ir_nlzl(uint64_t num)
198{
199#if (defined(__GNUC__) || __has_builtin(__builtin_clzll))
200 return __builtin_clzll(num);
201#elif defined(_WIN64)
202 unsigned long index;
203
204 if (!_BitScanReverse64(&index, num)) {
205 /* undefined behavior */
206 return 64;
207 }
208
209 return (int) (64 - 1) - index;
210#else
211 uint64_t x;
212 uint32_t n;
213
214 n = 64;
215 x = num >> 32; if (x != 0) {n -= 32; num = x;}
216 x = num >> 16; if (x != 0) {n -= 16; num = x;}
217 x = num >> 8; if (x != 0) {n -= 8; num = x;}
218 x = num >> 4; if (x != 0) {n -= 4; num = x;}
219 x = num >> 2; if (x != 0) {n -= 2; num = x;}
220 x = num >> 1; if (x != 0) return n - 2;
221 return n - (uint32_t)num;
222#endif
223}
224
225/*** Helper data types ***/
226
227/* Arena */
228struct _ir_arena {
229 char *ptr;
230 char *end;
232};
233
235{
237
238 IR_ASSERT(size >= IR_ALIGNED_SIZE(sizeof(ir_arena), 8));
240 arena->ptr = (char*) arena + IR_ALIGNED_SIZE(sizeof(ir_arena), 8);
241 arena->end = (char*) arena + size;
242 arena->prev = NULL;
243 return arena;
244}
245
247{
248 do {
249 ir_arena *prev = arena->prev;
251 arena = prev;
252 } while (arena);
253}
254
256{
257 ir_arena *arena = *arena_ptr;
258 char *ptr = arena->ptr;
259
261
262 if (EXPECTED(size <= (size_t)(arena->end - ptr))) {
263 arena->ptr = ptr + size;
264 } else {
265 size_t arena_size =
266 UNEXPECTED((size + IR_ALIGNED_SIZE(sizeof(ir_arena), 8)) > (size_t)(arena->end - (char*) arena)) ?
267 (size + IR_ALIGNED_SIZE(sizeof(ir_arena), 8)) :
268 (size_t)(arena->end - (char*) arena);
269 ir_arena *new_arena = (ir_arena*)ir_mem_malloc(arena_size);
270
271 ptr = (char*) new_arena + IR_ALIGNED_SIZE(sizeof(ir_arena), 8);
272 new_arena->ptr = (char*) new_arena + IR_ALIGNED_SIZE(sizeof(ir_arena), 8) + size;
273 new_arena->end = (char*) new_arena + arena_size;
274 new_arena->prev = arena;
275 *arena_ptr = new_arena;
276 }
277
278 return (void*) ptr;
279}
280
282{
283 return arena->ptr;
284}
285
286IR_ALWAYS_INLINE void ir_release(ir_arena **arena_ptr, void *checkpoint)
287{
288 ir_arena *arena = *arena_ptr;
289
290 while (UNEXPECTED((char*)checkpoint > arena->end) ||
291 UNEXPECTED((char*)checkpoint <= (char*)arena)) {
292 ir_arena *prev = arena->prev;
294 *arena_ptr = arena = prev;
295 }
296 IR_ASSERT((char*)checkpoint > (char*)arena && (char*)checkpoint <= arena->end);
297 arena->ptr = (char*)checkpoint;
298}
299
300/* Bitsets */
301#if defined(IR_TARGET_X86)
302# define IR_BITSET_BITS 32
303# define IR_BITSET_ONE 1U
304# define ir_bitset_base_t uint32_t
305# define ir_bitset_ntz ir_ntz
306#else
307# define IR_BITSET_BITS 64
308# ifdef _M_X64 /* MSVC*/
309# define IR_BITSET_ONE 1ui64
310# else
311# define IR_BITSET_ONE 1UL
312# endif
313# define ir_bitset_base_t uint64_t
314# define ir_bitset_ntz ir_ntzl
315#endif
316
318
320{
321 return (n + (IR_BITSET_BITS - 1)) / IR_BITSET_BITS;
322}
323
328
330{
332}
333
335{
336 set[n / IR_BITSET_BITS] &= ~(IR_BITSET_ONE << (n % IR_BITSET_BITS));
337}
338
339IR_ALWAYS_INLINE bool ir_bitset_in(const ir_bitset set, uint32_t n)
340{
341 return (set[(n / IR_BITSET_BITS)] & (IR_BITSET_ONE << (n % IR_BITSET_BITS))) != 0;
342}
343
345{
346 memset(set, 0, len * (IR_BITSET_BITS / 8));
347}
348
350{
351 memset(set, 0xff, len * (IR_BITSET_BITS / 8));
352}
353
355{
356 uint32_t i;
357 for (i = 0; i < len; i++) {
358 if (set[i]) {
359 return 0;
360 }
361 }
362 return 1;
363}
364
365IR_ALWAYS_INLINE bool ir_bitset_equal(const ir_bitset set1, const ir_bitset set2, uint32_t len)
366{
367 return memcmp(set1, set2, len * (IR_BITSET_BITS / 8)) == 0;
368}
369
370IR_ALWAYS_INLINE void ir_bitset_copy(ir_bitset set1, const ir_bitset set2, uint32_t len)
371{
372 memcpy(set1, set2, len * (IR_BITSET_BITS / 8));
373}
374
376{
377 uint32_t i;
378
379 for (i = 0; i < len; i++) {
380 set1[i] &= set2[i];
381 }
382}
383
385{
386 uint32_t i;
387
388 for (i = 0; i < len; i++) {
389 set1[i] |= set2[i];
390 }
391}
392
394{
395 uint32_t i;
396
397 for (i = 0; i < len; i++) {
398 set1[i] = set1[i] & ~set2[i];
399 }
400}
401
402IR_ALWAYS_INLINE bool ir_bitset_is_subset(const ir_bitset set1, const ir_bitset set2, uint32_t len)
403{
404 uint32_t i;
405
406 for (i = 0; i < len; i++) {
407 if (set1[i] & ~set2[i]) {
408 return 0;
409 }
410 }
411 return 1;
412}
413
415{
416 uint32_t i;
417
418 for (i = 0; i < len; i++) {
419 if (set[i]) {
420 return IR_BITSET_BITS * i + ir_bitset_ntz(set[i]);
421 }
422 }
423 return -1; /* empty set */
424}
425
427{
428 uint32_t i = len;
429
430 while (i > 0) {
431 i--;
432 if (set[i]) {
433 uint32_t j = IR_BITSET_BITS * i - 1;
434 ir_bitset_base_t x = set[i];
435 do {
436 x = x >> 1;
437 j++;
438 } while (x != 0);
439 return j;
440 }
441 }
442 return -1; /* empty set */
443}
444
446{
447 uint32_t i;
448
449 for (i = 0; i < len; i++) {
450 ir_bitset_base_t x = set[i];
451
452 if (x) {
453 int bit = IR_BITSET_BITS * i + ir_bitset_ntz(x);
454 set[i] = x & (x - 1);
455 return bit;
456 }
457 }
458 return -1; /* empty set */
459}
460
461#define IR_BITSET_FOREACH(set, len, bit) do { \
462 ir_bitset _set = (set); \
463 uint32_t _i, _len = (len); \
464 for (_i = 0; _i < _len; _set++, _i++) { \
465 ir_bitset_base_t _x = *_set; \
466 while (_x) { \
467 (bit) = IR_BITSET_BITS * _i + ir_bitset_ntz(_x); \
468 _x &= _x - 1;
469
470#define IR_BITSET_FOREACH_DIFFERENCE(set1, set2, len, bit) do { \
471 ir_bitset _set1 = (set1); \
472 ir_bitset _set2 = (set2); \
473 uint32_t _i, _len = (len); \
474 for (_i = 0; _i < _len; _i++) { \
475 ir_bitset_base_t _x = _set1[_i] & ~_set2[_i]; \
476 while (_x) { \
477 (bit) = IR_BITSET_BITS * _i + ir_bitset_ntz(_x); \
478 _x &= _x - 1;
479
480#define IR_BITSET_FOREACH_END() \
481 } \
482 } \
483} while (0)
484
485/* Sparse Set */
486typedef struct _ir_sparse_set {
487 uint32_t size;
488 uint32_t len;
489 uint32_t *data;
491
492#define IR_SPARSE_SET_DENSE(set, n) (set)->data[n]
493#define IR_SPARSE_SET_SPARSE(set, n) (set)->data[-1 - ((int32_t)(n))]
494
496{
497 set->size = size;
498 set->len = 0;
499 set->data = (uint32_t*)ir_mem_malloc(sizeof(uint32_t) * 2 * size) + size;
500#ifdef IR_DEBUG
501 /* initialize sparse part to avoid valgrind warnings */
502 memset(&IR_SPARSE_SET_SPARSE(set, size - 1), 0, size * sizeof(uint32_t));
503#endif
504}
505
507{
508 set->len = 0;
509}
510
515
517{
518 return set->len == 0;
519}
520
522{
523 uint32_t idx = IR_SPARSE_SET_SPARSE(set, n);
524
525 return idx < set->len && IR_SPARSE_SET_DENSE(set, idx) == n;
526}
527
529{
530 uint32_t idx;
531
533 idx = set->len++;
534 IR_SPARSE_SET_DENSE(set, idx) = n;
535 IR_SPARSE_SET_SPARSE(set, n) = idx;
536}
537
539{
540 uint32_t last;
541
543 last = IR_SPARSE_SET_DENSE(set, set->len - 1);
544 if (last != n) {
545 uint32_t idx = IR_SPARSE_SET_SPARSE(set, n);
546
547 IR_SPARSE_SET_DENSE(set, idx) = last;
548 IR_SPARSE_SET_SPARSE(set, last) = idx;
549
550 }
551 set->len--;
552}
553
555{
556 if (set->len > 0) {
557 set->len--;
558 return IR_SPARSE_SET_DENSE(set, set->len);
559 }
560 return -1; /* empty set */
561}
562
563#define IR_SPARSE_SET_FOREACH(set, bit) do { \
564 ir_sparse_set *_set = (set); \
565 uint32_t _i, _len = _set->len; \
566 uint32_t *_p = _set->data; \
567 for (_i = 0; _i < _len; _p++, _i++) { \
568 (bit) = *_p; \
569
570#define IR_SPARSE_SET_FOREACH_END() \
571 } \
572} while (0)
573
574/* Bit Queue */
575typedef struct _ir_bitqueue {
576 uint32_t len;
577 uint32_t pos;
580
582{
583 q->len = ir_bitset_len(n);
584 q->pos = q->len - 1;
585 q->set = ir_bitset_malloc(n);
586}
587
589{
590 uint32_t len = ir_bitset_len(n);
591 IR_ASSERT(len >= q->len);
592 if (len > q->len) {
593 q->set = ir_mem_realloc(q->set, len * (IR_BITSET_BITS / 8));
594 memset(q->set + q->len, 0, (len - q->len) * (IR_BITSET_BITS / 8));
595 q->len = len;
596 }
597}
598
603
605{
606 q->pos = q->len - 1;
607 ir_bitset_clear(q->set, q->len);
608}
609
611{
612 uint32_t i = q->pos;
613 ir_bitset_base_t x, *p = q->set + i;
614 do {
615 x = *p;
616 if (x) {
617 int bit = IR_BITSET_BITS * i + ir_bitset_ntz(x);
618 *p = x & (x - 1);
619 q->pos = i;
620 return bit;
621 }
622 p++;
623 i++;
624 } while (i < q->len);
625 q->pos = q->len - 1;
626 return -1; /* empty set */
627}
628
630{
631 uint32_t i = n / IR_BITSET_BITS;
632 q->set[i] |= IR_BITSET_ONE << (n % IR_BITSET_BITS);
633 if (i < q->pos) {
634 q->pos = i;
635 }
636}
637
639{
640 ir_bitset_excl(q->set, n);
641}
642
644{
645 return ir_bitset_in(q->set, n);
646}
647
648/* Dynamic array of numeric references */
649typedef struct _ir_array {
651 uint32_t size;
653
654void ir_array_grow(ir_array *a, uint32_t size);
655void ir_array_insert(ir_array *a, uint32_t i, ir_ref val);
656void ir_array_remove(ir_array *a, uint32_t i);
657
659{
660 a->refs = ir_mem_malloc(size * sizeof(ir_ref));
661 a->size = size;
662}
663
665{
666 ir_mem_free(a->refs);
667 a->refs = NULL;
668 a->size = 0;
669}
670
672{
673 return a->size;
674}
675
677{
678 return (i < a->size) ? a->refs[i] : IR_UNUSED;
679}
680
682{
683 IR_ASSERT(i < a->size);
684 return a->refs[i];
685}
686
688{
689 if (i >= a->size) {
690 ir_array_grow(a, i + 1);
691 }
692 a->refs[i] = val;
693}
694
696{
697 IR_ASSERT(i < a->size);
698 a->refs[i] = val;
699}
700
701/* List/Stack of numeric references */
702typedef struct _ir_list {
704 uint32_t len;
706
707uint32_t ir_list_find(const ir_list *l, ir_ref val);
708void ir_list_insert(ir_list *l, uint32_t i, ir_ref val);
709void ir_list_remove(ir_list *l, uint32_t i);
710
712{
713 ir_array_init(&l->a, size);
714 l->len = 0;
715}
716
718{
719 ir_array_free(&l->a);
720 l->len = 0;
721}
722
724{
725 l->len = 0;
726}
727
729{
730 return l->len;
731}
732
734{
735 return ir_array_size(&l->a);
736}
737
739{
740 ir_array_set(&l->a, l->len++, val);
741}
742
747
749{
750 IR_ASSERT(l->len > 0);
751 return ir_array_at(&l->a, --l->len);
752}
753
755{
756 IR_ASSERT(l->len > 0);
757 return ir_array_at(&l->a, l->len - 1);
758}
759
761{
762 IR_ASSERT(i < l->len);
763 return ir_array_at(&l->a, i);
764}
765
767{
768 IR_ASSERT(i < l->len);
770}
771
772/* Doesn't preserve order */
774{
775 IR_ASSERT(i < l->len);
776 l->len--;
777 ir_array_set_unchecked(&l->a, i, ir_array_at(&l->a, l->len));
778}
779
781{
782 return ir_list_find(l, val) != (uint32_t)-1;
783}
784
785/* Worklist (unique list) */
790
796
802
804{
805 return ir_list_len(&w->l);
806}
807
809{
810 return ir_list_capasity(&w->l);
811}
812
818
820{
821 IR_ASSERT(val >= 0 && (uint32_t)val < ir_worklist_capasity(w));
822 if (ir_bitset_in(w->visited, val)) {
823 return 0;
824 }
828 return 1;
829}
830
835
840
841/* IR Hash Table */
842#define IR_INVALID_IDX 0xffffffff
843#define IR_INVALID_VAL 0x80000000
844
850
851typedef struct _ir_hashtab {
852 void *data;
853 uint32_t mask;
854 uint32_t size;
855 uint32_t count;
856 uint32_t pos;
858
859void ir_hashtab_init(ir_hashtab *tab, uint32_t size);
860void ir_hashtab_free(ir_hashtab *tab);
861ir_ref ir_hashtab_find(const ir_hashtab *tab, uint32_t key);
862bool ir_hashtab_add(ir_hashtab *tab, uint32_t key, ir_ref val);
864
865/* IR Addr Table */
871
872void ir_addrtab_init(ir_hashtab *tab, uint32_t size);
873void ir_addrtab_free(ir_hashtab *tab);
874ir_ref ir_addrtab_find(const ir_hashtab *tab, uint64_t key);
875void ir_addrtab_set(ir_hashtab *tab, uint64_t key, ir_ref val);
876
877/*** IR OP info ***/
878extern const uint8_t ir_type_flags[IR_LAST_TYPE];
879extern const char *ir_type_name[IR_LAST_TYPE];
880extern const char *ir_type_cname[IR_LAST_TYPE];
881extern const uint8_t ir_type_size[IR_LAST_TYPE];
882extern const uint32_t ir_op_flags[IR_LAST_OP];
883extern const char *ir_op_name[IR_LAST_OP];
884
885void ir_print_escaped_str(const char *s, size_t len, FILE *f);
886
887#define IR_IS_CONST_OP(op) ((op) > IR_NOP && (op) <= IR_C_FLOAT)
888#define IR_IS_FOLDABLE_OP(op) ((op) <= IR_LAST_FOLDABLE_OP)
889#define IR_IS_SYM_CONST(op) ((op) == IR_STR || (op) == IR_SYM || (op) == IR_FUNC)
890
891ir_ref ir_const_ex(ir_ctx *ctx, ir_val val, uint8_t type, uint32_t optx);
892
894{
895 if (IR_IS_SYM_CONST(v->op)) {
896 return 1;
897 } else if (v->type == IR_BOOL) {
898 return v->val.b;
899 } else if (IR_IS_TYPE_INT(v->type)) {
900 return v->val.i64 != 0;
901 } else if (v->type == IR_DOUBLE) {
902 return v->val.d != 0.0;
903 } else {
904 IR_ASSERT(v->type == IR_FLOAT);
905 return v->val.f != 0.0;
906 }
907 return 0;
908}
909
911{
912 if (ref == IR_TRUE) {
913 return 1;
914 } else if (ref == IR_FALSE) {
915 return 0;
916 } else {
918 return ir_const_is_true(&ctx->ir_base[ref]);
919 }
920}
921
922/* IR OP flags */
923#define IR_OP_FLAG_OPERANDS_SHIFT 3
924
925#define IR_OP_FLAG_EDGES_MASK 0x03
926#define IR_OP_FLAG_VAR_INPUTS 0x04
927#define IR_OP_FLAG_OPERANDS_MASK 0x18
928#define IR_OP_FLAG_MEM_MASK ((1<<6)|(1<<7))
929
930#define IR_OP_FLAG_DATA (1<<8)
931#define IR_OP_FLAG_CONTROL (1<<9)
932#define IR_OP_FLAG_MEM (1<<10)
933#define IR_OP_FLAG_COMMUTATIVE (1<<11)
934#define IR_OP_FLAG_BB_START (1<<12)
935#define IR_OP_FLAG_BB_END (1<<13)
936#define IR_OP_FLAG_TERMINATOR (1<<14)
937#define IR_OP_FLAG_PINNED (1<<15)
938
939#define IR_OP_FLAG_MEM_LOAD ((0<<6)|(0<<7))
940#define IR_OP_FLAG_MEM_STORE ((0<<6)|(1<<7))
941#define IR_OP_FLAG_MEM_CALL ((1<<6)|(0<<7))
942#define IR_OP_FLAG_MEM_ALLOC ((1<<6)|(1<<7))
943#define IR_OP_FLAG_MEM_MASK ((1<<6)|(1<<7))
944
945#define IR_OPND_UNUSED 0x0
946#define IR_OPND_DATA 0x1
947#define IR_OPND_CONTROL 0x2
948#define IR_OPND_CONTROL_DEP 0x3
949#define IR_OPND_CONTROL_REF 0x4
950#define IR_OPND_STR 0x5
951#define IR_OPND_NUM 0x6
952#define IR_OPND_PROB 0x7
953#define IR_OPND_PROTO 0x8
954
955#define IR_OP_FLAGS(op_flags, op1_flags, op2_flags, op3_flags) \
956 ((op_flags) | ((op1_flags) << 20) | ((op2_flags) << 24) | ((op3_flags) << 28))
957
958#define IR_INPUT_EDGES_COUNT(flags) (flags & IR_OP_FLAG_EDGES_MASK)
959#define IR_OPERANDS_COUNT(flags) ((flags & IR_OP_FLAG_OPERANDS_MASK) >> IR_OP_FLAG_OPERANDS_SHIFT)
960
961#define IR_OP_HAS_VAR_INPUTS(flags) ((flags) & IR_OP_FLAG_VAR_INPUTS)
962
963#define IR_OPND_KIND(flags, i) \
964 (((flags) >> (16 + (4 * (((i) > 3) ? 3 : (i))))) & 0xf)
965
966#define IR_IS_REF_OPND_KIND(kind) \
967 ((kind) >= IR_OPND_DATA && (kind) <= IR_OPND_CONTROL_REF)
968
970{
971 uint32_t flags = ir_op_flags[insn->op];
972 uint32_t n = IR_OPERANDS_COUNT(flags);
973
975 /* MERGE, PHI, CALL, etc */
976 n = insn->inputs_count;
977 }
978 return n;
979}
980
982{
983 uint32_t flags = ir_op_flags[insn->op];
984 uint32_t n = IR_INPUT_EDGES_COUNT(flags);
986 /* MERGE, PHI, CALL, etc */
987 n = insn->inputs_count;
988 }
989 return n;
990}
991
992IR_ALWAYS_INLINE uint32_t ir_insn_inputs_to_len(uint32_t inputs_count)
993{
994 return 1 + (inputs_count >> 2);
995}
996
998{
999 return ir_insn_inputs_to_len(insn->inputs_count);
1000}
1001
1002/*** IR Context Private Flags (ir_ctx->flags2) ***/
1003#define IR_CFG_HAS_LOOPS (1<<0)
1004#define IR_IRREDUCIBLE_CFG (1<<1)
1005#define IR_HAS_ALLOCA (1<<2)
1006#define IR_HAS_CALLS (1<<3)
1007#define IR_OPT_IN_SCCP (1<<4)
1008#define IR_LINEAR (1<<5)
1009#define IR_HAS_VA_START (1<<6)
1010#define IR_HAS_VA_COPY (1<<7)
1011#define IR_HAS_VA_ARG_GP (1<<8)
1012#define IR_HAS_VA_ARG_FP (1<<9)
1013#define IR_HAS_FP_RET_SLOT (1<<10)
1014#define IR_16B_FRAME_ALIGNMENT (1<<11)
1015
1016/* Temporary: MEM2SSA -> SCCP */
1017#define IR_MEM2SSA_VARS (1<<25)
1018
1019/* Temporary: SCCP -> CFG */
1020#define IR_CFG_REACHABLE (1<<26)
1021
1022/* Temporary: Dominators -> Loops */
1023#define IR_NO_LOOPS (1<<25)
1024
1025/* Temporary: Live Ranges */
1026#define IR_LR_HAVE_DESSA_MOVES (1<<25)
1027
1028/* Temporary: Register Allocator */
1029#define IR_RA_HAVE_SPLITS (1<<25)
1030#define IR_RA_HAVE_SPILLS (1<<26)
1031
1032#define IR_RESERVED_FLAG_1 (1U<<31)
1033
1034/*** IR Use Lists ***/
1036 ir_ref refs; /* index in ir_ctx->use_edges[] array */
1038};
1039
1040void ir_use_list_remove_all(ir_ctx *ctx, ir_ref def, ir_ref use);
1041void ir_use_list_remove_one(ir_ctx *ctx, ir_ref def, ir_ref use);
1042void ir_use_list_replace_all(ir_ctx *ctx, ir_ref def, ir_ref use, ir_ref new_use);
1043void ir_use_list_replace_one(ir_ctx *ctx, ir_ref def, ir_ref use, ir_ref new_use);
1044bool ir_use_list_add(ir_ctx *ctx, ir_ref def, ir_ref use);
1045void ir_use_list_sort(ir_ctx *ctx, ir_ref def);
1046
1048{
1049 ir_use_list *use_list = &ctx->use_lists[ref];
1050 ir_ref n = use_list->count;
1051 ir_ref *p;
1052
1054 for (p = &ctx->use_edges[use_list->refs]; n > 0; p++, n--) {
1055 ir_ref next = *p;
1056 ir_insn *insn = &ctx->ir_base[next];
1057
1058 if ((ir_op_flags[insn->op] & IR_OP_FLAG_CONTROL) && insn->op1 == ref) {
1059 return next;
1060 }
1061 }
1062 IR_ASSERT(0);
1063 return IR_UNUSED;
1064}
1065
1066/*** Modification helpers ***/
1067#define MAKE_NOP(_insn) do { \
1068 ir_insn *__insn = _insn; \
1069 __insn->optx = IR_NOP; \
1070 __insn->op1 = __insn->op2 = __insn->op3 = IR_UNUSED; \
1071 } while (0)
1072
1073#define CLEAR_USES(_ref) do { \
1074 ir_use_list *__use_list = &ctx->use_lists[_ref]; \
1075 __use_list->count = 0; \
1076 } while (0)
1077
1078#define SWAP_REFS(_ref1, _ref2) do { \
1079 ir_ref _tmp = _ref1; \
1080 _ref1 = _ref2; \
1081 _ref2 = _tmp; \
1082 } while (0)
1083
1084#define SWAP_INSNS(_insn1, _insn2) do { \
1085 ir_insn *_tmp = _insn1; \
1086 _insn1 = _insn2; \
1087 _insn2 = _tmp; \
1088 } while (0)
1089
1090void ir_replace(ir_ctx *ctx, ir_ref ref, ir_ref new_ref);
1091void ir_update_op(ir_ctx *ctx, ir_ref ref, uint32_t idx, ir_ref new_val);
1092
1093/*** Iterative Optimization ***/
1094void ir_iter_replace(ir_ctx *ctx, ir_ref ref, ir_ref new_ref, ir_bitqueue *worklist);
1095void ir_iter_update_op(ir_ctx *ctx, ir_ref ref, uint32_t idx, ir_ref new_val, ir_bitqueue *worklist);
1096void ir_iter_opt(ir_ctx *ctx, ir_bitqueue *worklist);
1097
1098/*** IR Basic Blocks info ***/
1099#define IR_IS_BB_START(op) \
1100 ((ir_op_flags[op] & IR_OP_FLAG_BB_START) != 0)
1101
1102#define IR_IS_BB_MERGE(op) \
1103 ((op) == IR_MERGE || (op) == IR_LOOP_BEGIN)
1104
1105#define IR_IS_BB_END(op) \
1106 ((ir_op_flags[op] & IR_OP_FLAG_BB_END) != 0)
1107
1108#define IR_BB_UNREACHABLE (1<<0)
1109#define IR_BB_START (1<<1)
1110#define IR_BB_ENTRY (1<<2)
1111#define IR_BB_LOOP_HEADER (1<<3)
1112#define IR_BB_IRREDUCIBLE_LOOP (1<<4)
1113#define IR_BB_DESSA_MOVES (1<<5) /* translation out of SSA requires MOVEs */
1114#define IR_BB_EMPTY (1<<6)
1115#define IR_BB_PREV_EMPTY_ENTRY (1<<7)
1116#define IR_BB_OSR_ENTRY_LOADS (1<<8) /* OSR Entry-point with register LOADs */
1117#define IR_BB_LOOP_WITH_ENTRY (1<<9) /* set together with LOOP_HEADER if there is an ENTRY in the loop */
1118
1119/* The following flags are set by GCM */
1120#define IR_BB_HAS_PHI (1<<10)
1121#define IR_BB_HAS_PI (1<<11)
1122#define IR_BB_HAS_PARAM (1<<12)
1123#define IR_BB_HAS_VAR (1<<13)
1124
1125/* The following flags are set by BB scheduler */
1126#define IR_BB_ALIGN_LOOP (1<<14)
1127
1129 uint32_t flags;
1130 ir_ref start; /* index of first instruction */
1131 ir_ref end; /* index of last instruction */
1132 uint32_t successors; /* index in ir_ctx->cfg_edges[] array */
1134 uint32_t predecessors; /* index in ir_ctx->cfg_edges[] array */
1136 union {
1137 uint32_t dom_parent; /* immediate dominator block */
1138 uint32_t idom; /* immediate dominator block */
1139 };
1140 union {
1141 uint32_t dom_depth; /* depth from the root of the dominators tree */
1142 uint32_t postnum; /* used temporary during tree constructon */
1143 };
1144 uint32_t dom_child; /* first dominated blocks */
1145 uint32_t dom_next_child; /* next dominated block (linked list) */
1146 uint32_t loop_header;
1147 uint32_t loop_depth;
1148};
1149
1150void ir_build_prev_refs(ir_ctx *ctx);
1151uint32_t ir_skip_empty_target_blocks(const ir_ctx *ctx, uint32_t b);
1152uint32_t ir_next_block(const ir_ctx *ctx, uint32_t b);
1153void ir_get_true_false_blocks(const ir_ctx *ctx, uint32_t b, uint32_t *true_block, uint32_t *false_block);
1154
1155IR_ALWAYS_INLINE uint32_t ir_phi_input_number(const ir_ctx *ctx, const ir_block *bb, uint32_t from)
1156{
1157 uint32_t n, *p;
1158
1159 for (n = 0, p = &ctx->cfg_edges[bb->predecessors]; n < bb->predecessors_count; p++, n++) {
1160 if (*p == from) {
1161 return n + 2; /* first input is a reference to MERGE */
1162 }
1163 }
1164 IR_ASSERT(0);
1165 return 0;
1166}
1167
1168/*** Folding Engine (see ir.c and ir_fold.h) ***/
1176
1177ir_ref ir_folding(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3, ir_insn *op1_insn, ir_insn *op2_insn, ir_insn *op3_insn);
1178
1179/*** Alias Analyzes (see ir.c) ***/
1184
1185/*** Predicates (see ir.c) ***/
1187
1188/*** IR Live Info ***/
1191
1192#define IR_SUB_REFS_COUNT 4
1193
1194#define IR_LOAD_SUB_REF 0
1195#define IR_USE_SUB_REF 1
1196#define IR_DEF_SUB_REF 2
1197#define IR_SAVE_SUB_REF 3
1198
1199#define IR_LIVE_POS_TO_REF(pos) ((pos) / IR_SUB_REFS_COUNT)
1200#define IR_LIVE_POS_TO_SUB_REF(pos) ((pos) % IR_SUB_REFS_COUNT)
1201
1202#define IR_LIVE_POS_FROM_REF(ref) ((ref) * IR_SUB_REFS_COUNT)
1203
1204#define IR_START_LIVE_POS_FROM_REF(ref) ((ref) * IR_SUB_REFS_COUNT)
1205#define IR_LOAD_LIVE_POS_FROM_REF(ref) ((ref) * IR_SUB_REFS_COUNT + IR_LOAD_SUB_REF)
1206#define IR_USE_LIVE_POS_FROM_REF(ref) ((ref) * IR_SUB_REFS_COUNT + IR_USE_SUB_REF)
1207#define IR_DEF_LIVE_POS_FROM_REF(ref) ((ref) * IR_SUB_REFS_COUNT + IR_DEF_SUB_REF)
1208#define IR_SAVE_LIVE_POS_FROM_REF(ref) ((ref) * IR_SUB_REFS_COUNT + IR_SAVE_SUB_REF)
1209#define IR_END_LIVE_POS_FROM_REF(ref) ((ref) * IR_SUB_REFS_COUNT + IR_SUB_REFS_COUNT)
1210
1211/* ir_use_pos.flags bits */
1212#define IR_USE_MUST_BE_IN_REG (1<<0)
1213#define IR_USE_SHOULD_BE_IN_REG (1<<1)
1214#define IR_DEF_REUSES_OP1_REG (1<<2)
1215#define IR_DEF_CONFLICTS_WITH_INPUT_REGS (1<<3)
1216
1217#define IR_FUSED_USE (1<<6)
1218#define IR_PHI_USE (1<<7)
1219
1220#define IR_OP1_MUST_BE_IN_REG (1<<8)
1221#define IR_OP1_SHOULD_BE_IN_REG (1<<9)
1222#define IR_OP2_MUST_BE_IN_REG (1<<10)
1223#define IR_OP2_SHOULD_BE_IN_REG (1<<11)
1224#define IR_OP3_MUST_BE_IN_REG (1<<12)
1225#define IR_OP3_SHOULD_BE_IN_REG (1<<13)
1226
1227#define IR_USE_FLAGS(def_flags, op_num) (((def_flags) >> (6 + (IR_MIN((op_num), 3) * 2))) & 3)
1228
1230 uint16_t op_num; /* 0 - means result */
1231 int8_t hint;
1232 uint8_t flags;
1233 ir_ref hint_ref; /* negative references are used for FUSION anf PHI */
1236};
1237
1239 ir_live_pos start; /* inclusive */
1240 ir_live_pos end; /* exclusive */
1242};
1243
1244/* ir_live_interval.flags bits (two low bits are reserved for temporary register number) */
1245#define IR_LIVE_INTERVAL_FIXED (1<<0)
1246#define IR_LIVE_INTERVAL_TEMP (1<<1)
1247#define IR_LIVE_INTERVAL_HAS_HINT_REGS (1<<2)
1248#define IR_LIVE_INTERVAL_HAS_HINT_REFS (1<<3)
1249#define IR_LIVE_INTERVAL_MEM_PARAM (1<<4)
1250#define IR_LIVE_INTERVAL_MEM_LOAD (1<<5)
1251#define IR_LIVE_INTERVAL_COALESCED (1<<6)
1252#define IR_LIVE_INTERVAL_SPILL_SPECIAL (1<<7) /* spill slot is pre-allocated in a special area (see ir_ctx.spill_reserved_base) */
1253#define IR_LIVE_INTERVAL_SPILLED (1<<8)
1254#define IR_LIVE_INTERVAL_SPLIT_CHILD (1<<9)
1255
1257 uint8_t type;
1258 int8_t reg;
1259 uint16_t flags;
1260 union {
1261 int32_t vreg;
1262 int32_t tmp_ref;
1263 };
1264 union {
1267 };
1268 ir_live_pos end; /* end of the last live range (cahce of ival.range.{next->}end) */
1273 ir_live_interval *list_next; /* linked list of active, inactive or unhandled intervals */
1274};
1275
1276typedef int (*emit_copy_t)(ir_ctx *ctx, uint8_t type, ir_ref from, ir_ref to);
1277
1278int ir_gen_dessa_moves(ir_ctx *ctx, uint32_t b, emit_copy_t emit_copy);
1279
1280#if defined(IR_REGSET_64BIT)
1281
1282/*typedef enum _ir_reg ir_reg;*/
1283typedef int8_t ir_reg;
1284
1285/*** Register Sets ***/
1286#if IR_REGSET_64BIT
1287typedef uint64_t ir_regset;
1288#else
1289typedef uint32_t ir_regset;
1290#endif
1291
1292#define IR_REGSET_EMPTY 0
1293
1294#define IR_REGSET_IS_EMPTY(regset) \
1295 (regset == IR_REGSET_EMPTY)
1296
1297#define IR_REGSET_IS_SINGLETON(regset) \
1298 (regset && !(regset & (regset - 1)))
1299
1300#if IR_REGSET_64BIT
1301# define IR_REGSET(reg) \
1302 (1ull << (reg))
1303#else
1304# define IR_REGSET(reg) \
1305 (1u << (reg))
1306#endif
1307
1308#if IR_REGSET_64BIT
1309# define IR_REGSET_INTERVAL(reg1, reg2) \
1310 (((1ull << ((reg2) - (reg1) + 1)) - 1) << (reg1))
1311#else
1312# define IR_REGSET_INTERVAL(reg1, reg2) \
1313 (((1u << ((reg2) - (reg1) + 1)) - 1) << (reg1))
1314#endif
1315
1316#define IR_REGSET_IN(regset, reg) \
1317 (((regset) & IR_REGSET(reg)) != 0)
1318
1319#define IR_REGSET_INCL(regset, reg) \
1320 (regset) |= IR_REGSET(reg)
1321
1322#define IR_REGSET_EXCL(regset, reg) \
1323 (regset) &= ~IR_REGSET(reg)
1324
1325#define IR_REGSET_UNION(set1, set2) \
1326 ((set1) | (set2))
1327
1328#define IR_REGSET_INTERSECTION(set1, set2) \
1329 ((set1) & (set2))
1330
1331#define IR_REGSET_DIFFERENCE(set1, set2) \
1332 ((set1) & ~(set2))
1333
1334#if IR_REGSET_64BIT
1335# define IR_REGSET_FIRST(set) ((ir_reg)ir_ntzl(set))
1336# define ir_REGSET_LAST(set) ((ir_reg)(ir_nlzl(set)(set)^63))
1337#else
1338# define IR_REGSET_FIRST(set) ((ir_reg)ir_ntz(set))
1339# define IR_REGSET_LAST(set) ((ir_reg)(ir_nlz(set)^31))
1340#endif
1341
1342IR_ALWAYS_INLINE ir_reg ir_regset_pop_first(ir_regset *set)
1343{
1344 ir_reg reg;
1345
1346 IR_ASSERT(!IR_REGSET_IS_EMPTY(*set));
1347 reg = IR_REGSET_FIRST(*set);
1348 *set = (*set) & ((*set) - 1);
1349 return reg;
1350}
1351
1352#define IR_REGSET_FOREACH(set, reg) \
1353 do { \
1354 ir_regset _tmp = (set); \
1355 while (!IR_REGSET_IS_EMPTY(_tmp)) { \
1356 reg = ir_regset_pop_first(&_tmp);
1357
1358#define IR_REGSET_FOREACH_END() \
1359 } \
1360 } while (0)
1361
1362#endif /* defined(IR_REGSET_64BIT) */
1363
1364/*** IR Register Allocation ***/
1365/* Flags for ctx->regs[][] (low bits are used for register number itself) */
1372
1374
1375IR_ALWAYS_INLINE void ir_set_alocated_reg(ir_ctx *ctx, ir_ref ref, int op_num, int8_t reg)
1376{
1377 int8_t *regs = ctx->regs[ref];
1378
1379 if (op_num > 0) {
1380 /* regs[] is not limited by the declared boundary 4, the real boundary checked below */
1381 IR_ASSERT(op_num <= IR_MAX(3, ctx->ir_base[ref].inputs_count));
1382 }
1383 regs[op_num] = reg;
1384}
1385
1386IR_ALWAYS_INLINE int8_t ir_get_alocated_reg(const ir_ctx *ctx, ir_ref ref, int op_num)
1387{
1388 int8_t *regs = ctx->regs[ref];
1389
1390 /* regs[] is not limited by the declared boundary 4, the real boundary checked below */
1391 IR_ASSERT(op_num <= IR_MAX(3, ctx->ir_base[ref].inputs_count));
1392 return regs[op_num];
1393}
1394
1395/*** IR Target Interface ***/
1396
1397/* ctx->rules[] flags */
1398#define IR_FUSED (1U<<31) /* Insn is fused into others (code is generated as part of the fusion root) */
1399#define IR_SKIPPED (1U<<30) /* Insn is skipped (code is not generated) */
1400#define IR_SIMPLE (1U<<29) /* Insn doesn't have any target constraints */
1401#define IR_FUSED_REG (1U<<28) /* Register assignemnt may be stored in ctx->fused_regs instead of ctx->regs */
1402#define IR_MAY_SWAP (1U<<27) /* Allow swapping operands for better register allocation */
1403#define IR_MAY_REUSE (1U<<26) /* Result may reuse register of the source */
1404
1405#define IR_RULE_MASK 0xff
1406
1407extern const char *ir_rule_name[];
1408
1410
1411#define IR_TMP_REG(_num, _type, _start, _end) \
1412 (ir_tmp_reg){.num=(_num), .type=(_type), .start=(_start), .end=(_end)}
1413#define IR_SCRATCH_REG(_reg, _start, _end) \
1414 (ir_tmp_reg){.reg=(_reg), .type=IR_VOID, .start=(_start), .end=(_end)}
1415
1417
1419
1420/* Utility */
1422bool ir_is_fastcall(const ir_ctx *ctx, const ir_insn *insn);
1423bool ir_is_vararg(const ir_ctx *ctx, ir_insn *insn);
1424
1425//#define IR_BITSET_LIVENESS
1426
1427#endif /* IR_PRIVATE_H */
size_t len
Definition apprentice.c:174
prev(array|object &$array)
char s[4]
Definition cdf.c:77
uint32_t v
Definition cdf.c:1237
zend_ffi_type * type
Definition ffi.c:3812
zend_long n
Definition ffi.c:4979
new_type size
Definition ffi.c:4365
void * ptr
Definition ffi.c:3814
memcpy(ptr1, ptr2, size)
memset(ptr, 0, type->size)
zval * val
Definition ffi.c:4262
#define NULL
Definition gdcache.h:45
again j
const char * ir_type_name[IR_LAST_TYPE]
Definition ir.c:56
const uint8_t ir_type_flags[IR_LAST_TYPE]
Definition ir.c:51
const char * ir_type_cname[IR_LAST_TYPE]
Definition ir.c:66
const uint32_t ir_op_flags[IR_LAST_OP]
Definition ir.c:294
const char * ir_op_name[IR_LAST_OP]
Definition ir.c:71
const uint8_t ir_type_size[IR_LAST_TYPE]
Definition ir.c:61
struct _ir_live_range ir_live_range
Definition ir.h:555
enum _ir_type ir_type
#define IR_IS_TYPE_INT(t)
Definition ir.h:145
#define IR_TRUE
Definition ir.h:398
union _ir_val ir_val
struct _ir_live_interval ir_live_interval
Definition ir.h:554
struct _ir_hashtab ir_hashtab
Definition ir.h:482
#define IR_UNUSED
Definition ir.h:395
@ IR_LAST_TYPE
Definition ir.h:153
#define ir_mem_calloc
Definition ir.h:1009
int32_t ir_ref
Definition ir.h:390
#define IR_IS_CONST_REF(ref)
Definition ir.h:392
#define ir_mem_malloc
Definition ir.h:1006
struct _ir_arena ir_arena
Definition ir.h:553
#define IR_FALSE
Definition ir.h:397
#define ir_mem_realloc
Definition ir.h:1012
@ IR_LAST_OP
Definition ir.h:376
#define ir_mem_free
Definition ir.h:1015
struct _ir_ctx ir_ctx
Definition ir.h:550
#define IR_ALWAYS_INLINE
Definition ir.h:108
struct _ir_use_list ir_use_list
Definition ir.h:551
struct _ir_insn ir_insn
struct _ir_block ir_block
Definition ir.h:552
void ir_fix_stack_frame(ir_ctx *ctx)
void ir_iter_opt(ir_ctx *ctx, ir_bitqueue *worklist)
Definition ir_sccp.c:3466
ir_ref ir_addrtab_find(const ir_hashtab *tab, uint64_t key)
Definition ir.c:1730
IR_ALWAYS_INLINE uint32_t ir_rol32(uint32_t op1, uint32_t op2)
Definition ir_private.h:79
IR_ALWAYS_INLINE void ir_bitqueue_grow(ir_bitqueue *q, uint32_t n)
Definition ir_private.h:588
int ir_get_target_constraints(ir_ctx *ctx, ir_ref ref, ir_target_constraints *constraints)
void ir_update_op(ir_ctx *ctx, ir_ref ref, uint32_t idx, ir_ref new_val)
Definition ir.c:1469
struct _ir_list ir_list
IR_ALWAYS_INLINE uint32_t ir_list_len(const ir_list *l)
Definition ir_private.h:728
IR_ALWAYS_INLINE bool ir_const_is_true(const ir_insn *v)
Definition ir_private.h:893
IR_ALWAYS_INLINE int ir_bitset_last(const ir_bitset set, uint32_t len)
Definition ir_private.h:426
IR_ALWAYS_INLINE ir_ref ir_input_edges_count(const ir_ctx *ctx, const ir_insn *insn)
Definition ir_private.h:981
ir_bitset_base_t * ir_bitset
Definition ir_private.h:317
IR_ALWAYS_INLINE uint64_t ir_rol64(uint64_t op1, uint64_t op2)
Definition ir_private.h:84
IR_ALWAYS_INLINE ir_ref ir_array_get(const ir_array *a, uint32_t i)
Definition ir_private.h:676
IR_ALWAYS_INLINE ir_ref ir_list_pop(ir_list *l)
Definition ir_private.h:748
IR_ALWAYS_INLINE uint32_t ir_list_capasity(const ir_list *l)
Definition ir_private.h:733
void ir_list_insert(ir_list *l, uint32_t i, ir_ref val)
Definition ir.c:1509
void ir_use_list_replace_one(ir_ctx *ctx, ir_ref def, ir_ref use, ir_ref new_use)
Definition ir.c:1347
IR_ALWAYS_INLINE void ir_list_push_unchecked(ir_list *l, ir_ref val)
Definition ir_private.h:743
IR_ALWAYS_INLINE void ir_worklist_free(ir_worklist *w)
Definition ir_private.h:797
IR_ALWAYS_INLINE void ir_array_free(ir_array *a)
Definition ir_private.h:664
int(* emit_copy_t)(ir_ctx *ctx, uint8_t type, ir_ref from, ir_ref to)
void ir_build_prev_refs(ir_ctx *ctx)
Definition ir_gcm.c:1347
IR_ALWAYS_INLINE bool ir_bitset_in(const ir_bitset set, uint32_t n)
Definition ir_private.h:339
void ir_use_list_remove_all(ir_ctx *ctx, ir_ref def, ir_ref use)
Definition ir.c:1295
IR_ALWAYS_INLINE void ir_bitqueue_del(ir_bitqueue *q, uint32_t n)
Definition ir_private.h:638
#define IR_ALIGNED_SIZE(size, alignment)
Definition ir_private.h:59
void ir_replace(ir_ctx *ctx, ir_ref ref, ir_ref new_ref)
Definition ir.c:1430
ir_ref ir_check_dominating_predicates(ir_ctx *ctx, ir_ref ref, ir_ref condition)
Definition ir.c:2469
#define ir_bitset_ntz
Definition ir_private.h:314
IR_ALWAYS_INLINE void ir_bitset_copy(ir_bitset set1, const ir_bitset set2, uint32_t len)
Definition ir_private.h:370
ir_ref ir_const_ex(ir_ctx *ctx, ir_val val, uint8_t type, uint32_t optx)
Definition ir.c:512
bool ir_is_fastcall(const ir_ctx *ctx, const ir_insn *insn)
Definition ir_emit.c:114
void ir_array_remove(ir_array *a, uint32_t i)
Definition ir.c:1502
IR_ALWAYS_INLINE uint32_t ir_phi_input_number(const ir_ctx *ctx, const ir_block *bb, uint32_t from)
void ir_array_grow(ir_array *a, uint32_t size)
Definition ir.c:1485
IR_ALWAYS_INLINE void ir_bitqueue_add(ir_bitqueue *q, uint32_t n)
Definition ir_private.h:629
void ir_print_escaped_str(const char *s, size_t len, FILE *f)
Definition ir.c:78
bool ir_is_vararg(const ir_ctx *ctx, ir_insn *insn)
Definition ir_emit.c:120
#define IR_OPERANDS_COUNT(flags)
Definition ir_private.h:959
#define IR_BITSET_BITS
Definition ir_private.h:307
uint32_t ir_list_find(const ir_list *l, ir_ref val)
Definition ir.c:1527
void ir_use_list_sort(ir_ctx *ctx, ir_ref def)
Definition ir.c:1417
IR_ALWAYS_INLINE uint32_t ir_ror32(uint32_t op1, uint32_t op2)
Definition ir_private.h:99
IR_ALWAYS_INLINE void ir_bitset_intersection(ir_bitset set1, const ir_bitset set2, uint32_t len)
Definition ir_private.h:375
struct _ir_addrtab_bucket ir_addrtab_bucket
IR_ALWAYS_INLINE bool ir_bitqueue_in(const ir_bitqueue *q, uint32_t n)
Definition ir_private.h:643
IR_ALWAYS_INLINE void ir_bitset_fill(ir_bitset set, uint32_t len)
Definition ir_private.h:349
IR_ALWAYS_INLINE ir_bitset ir_bitset_malloc(uint32_t n)
Definition ir_private.h:324
IR_ALWAYS_INLINE bool ir_bitset_empty(const ir_bitset set, uint32_t len)
Definition ir_private.h:354
#define EXPECTED(condition)
Definition ir_private.h:47
const char * ir_rule_name[]
IR_ALWAYS_INLINE ir_ref ir_operands_count(const ir_ctx *ctx, const ir_insn *insn)
Definition ir_private.h:969
#define IR_SPARSE_SET_SPARSE(set, n)
Definition ir_private.h:493
ir_ref ir_find_aliasing_load(ir_ctx *ctx, ir_ref ref, ir_type type, ir_ref addr)
Definition ir.c:2064
IR_ALWAYS_INLINE uint32_t ir_ntz(uint32_t num)
Definition ir_private.h:110
int32_t ir_allocate_spill_slot(ir_ctx *ctx, ir_type type, ir_reg_alloc_data *data)
Definition ir_ra.c:2613
IR_ALWAYS_INLINE void ir_sparse_set_add(ir_sparse_set *set, uint32_t n)
Definition ir_private.h:528
IR_ALWAYS_INLINE bool ir_worklist_push(ir_worklist *w, ir_ref val)
Definition ir_private.h:819
IR_ALWAYS_INLINE ir_ref ir_worklist_peek(const ir_worklist *w)
Definition ir_private.h:836
IR_ALWAYS_INLINE void ir_array_set_unchecked(ir_array *a, uint32_t i, ir_ref val)
Definition ir_private.h:695
IR_ALWAYS_INLINE void ir_worklist_clear(ir_worklist *w)
Definition ir_private.h:813
void ir_list_remove(ir_list *l, uint32_t i)
Definition ir.c:1520
struct _ir_hashtab_bucket ir_hashtab_bucket
IR_ALWAYS_INLINE ir_arena * ir_arena_create(size_t size)
Definition ir_private.h:234
ir_ref ir_find_aliasing_vload(ir_ctx *ctx, ir_ref ref, ir_type type, ir_ref var)
Definition ir.c:2110
IR_ALWAYS_INLINE ir_ref ir_list_peek(const ir_list *l)
Definition ir_private.h:754
IR_ALWAYS_INLINE int ir_nlz(uint32_t num)
Definition ir_private.h:170
struct _ir_worklist ir_worklist
IR_ALWAYS_INLINE void ir_bitset_union(ir_bitset set1, const ir_bitset set2, uint32_t len)
Definition ir_private.h:384
IR_ALWAYS_INLINE void ir_bitset_incl(ir_bitset set, uint32_t n)
Definition ir_private.h:329
int ir_gen_dessa_moves(ir_ctx *ctx, uint32_t b, emit_copy_t emit_copy)
Definition ir_ra.c:2107
IR_ALWAYS_INLINE void ir_sparse_set_init(ir_sparse_set *set, uint32_t size)
Definition ir_private.h:495
#define IR_IS_SYM_CONST(op)
Definition ir_private.h:889
IR_ALWAYS_INLINE bool ir_sparse_set_empty(const ir_sparse_set *set)
Definition ir_private.h:516
#define IR_ASSERT(x)
Definition ir_private.h:17
IR_ALWAYS_INLINE int8_t ir_get_alocated_reg(const ir_ctx *ctx, ir_ref ref, int op_num)
IR_ALWAYS_INLINE bool ir_sparse_set_in(const ir_sparse_set *set, uint32_t n)
Definition ir_private.h:521
ir_ref ir_live_pos
IR_ALWAYS_INLINE void ir_worklist_init(ir_worklist *w, uint32_t size)
Definition ir_private.h:791
IR_ALWAYS_INLINE bool ir_bitset_equal(const ir_bitset set1, const ir_bitset set2, uint32_t len)
Definition ir_private.h:365
#define IR_BITSET_ONE
Definition ir_private.h:311
IR_ALWAYS_INLINE uint32_t ir_worklist_capasity(const ir_worklist *w)
Definition ir_private.h:808
IR_ALWAYS_INLINE void ir_list_free(ir_list *l)
Definition ir_private.h:717
IR_ALWAYS_INLINE uint32_t ir_bitset_len(uint32_t n)
Definition ir_private.h:319
void ir_hashtab_init(ir_hashtab *tab, uint32_t size)
Definition ir.c:1580
#define IR_MAX(a, b)
Definition ir_private.h:62
IR_ALWAYS_INLINE void ir_sparse_set_del(ir_sparse_set *set, uint32_t n)
Definition ir_private.h:538
enum _ir_fold_action ir_fold_action
void ir_iter_update_op(ir_ctx *ctx, ir_ref ref, uint32_t idx, ir_ref new_val, ir_bitqueue *worklist)
Definition ir_sccp.c:1237
#define IR_SPARSE_SET_DENSE(set, n)
Definition ir_private.h:492
ir_type ir_get_return_type(ir_ctx *ctx)
IR_ALWAYS_INLINE void ir_bitqueue_init(ir_bitqueue *q, uint32_t n)
Definition ir_private.h:581
ir_ref ir_find_aliasing_store(ir_ctx *ctx, ir_ref ref, ir_ref addr, ir_ref val)
Definition ir.c:2204
IR_ALWAYS_INLINE int ir_bitset_first(const ir_bitset set, uint32_t len)
Definition ir_private.h:414
IR_ALWAYS_INLINE void ir_release(ir_arena **arena_ptr, void *checkpoint)
Definition ir_private.h:286
_ir_fold_action
@ IR_FOLD_DO_EMIT
@ IR_FOLD_DO_COPY
@ IR_FOLD_DO_CONST
@ IR_FOLD_DO_CSE
@ IR_FOLD_DO_RESTART
IR_ALWAYS_INLINE ir_ref ir_list_at(const ir_list *l, uint32_t i)
Definition ir_private.h:760
IR_ALWAYS_INLINE bool ir_list_contains(const ir_list *l, ir_ref val)
Definition ir_private.h:780
IR_ALWAYS_INLINE uint16_t ir_ror16(uint16_t op1, uint16_t op2)
Definition ir_private.h:94
void ir_use_list_replace_all(ir_ctx *ctx, ir_ref def, ir_ref use, ir_ref new_use)
Definition ir.c:1363
void ir_hashtab_key_sort(ir_hashtab *tab)
Definition ir.c:1653
IR_ALWAYS_INLINE void ir_list_init(ir_list *l, uint32_t size)
Definition ir_private.h:711
IR_ALWAYS_INLINE int ir_nlzl(uint64_t num)
Definition ir_private.h:197
struct _ir_reg_alloc_data ir_reg_alloc_data
IR_ALWAYS_INLINE uint8_t ir_rol8(uint8_t op1, uint8_t op2)
Definition ir_private.h:69
IR_ALWAYS_INLINE int ir_bitqueue_pop(ir_bitqueue *q)
Definition ir_private.h:610
IR_ALWAYS_INLINE void ir_bitset_difference(ir_bitset set1, const ir_bitset set2, uint32_t len)
Definition ir_private.h:393
IR_ALWAYS_INLINE bool ir_bitset_is_subset(const ir_bitset set1, const ir_bitset set2, uint32_t len)
Definition ir_private.h:402
IR_ALWAYS_INLINE void ir_array_set(ir_array *a, uint32_t i, ir_ref val)
Definition ir_private.h:687
IR_ALWAYS_INLINE void ir_sparse_set_free(ir_sparse_set *set)
Definition ir_private.h:511
IR_ALWAYS_INLINE void * ir_arena_checkpoint(ir_arena *arena)
Definition ir_private.h:281
IR_ALWAYS_INLINE int ir_bitset_pop_first(ir_bitset set, uint32_t len)
Definition ir_private.h:445
IR_ALWAYS_INLINE uint32_t ir_insn_inputs_to_len(uint32_t inputs_count)
Definition ir_private.h:992
IR_ALWAYS_INLINE void ir_list_set(ir_list *l, uint32_t i, ir_ref val)
Definition ir_private.h:766
#define IR_INPUT_EDGES_COUNT(flags)
Definition ir_private.h:958
IR_ALWAYS_INLINE ir_ref ir_array_at(const ir_array *a, uint32_t i)
Definition ir_private.h:681
IR_ALWAYS_INLINE void ir_list_clear(ir_list *l)
Definition ir_private.h:723
IR_ALWAYS_INLINE void ir_arena_free(ir_arena *arena)
Definition ir_private.h:246
IR_ALWAYS_INLINE void ir_bitset_clear(ir_bitset set, uint32_t len)
Definition ir_private.h:344
IR_ALWAYS_INLINE uint32_t ir_sparse_set_pop(ir_sparse_set *set)
Definition ir_private.h:554
IR_ALWAYS_INLINE bool ir_ref_is_true(ir_ctx *ctx, ir_ref ref)
Definition ir_private.h:910
#define IR_OP_FLAG_CONTROL
Definition ir_private.h:931
IR_ALWAYS_INLINE void ir_bitqueue_free(ir_bitqueue *q)
Definition ir_private.h:599
IR_ALWAYS_INLINE void ir_list_push(ir_list *l, ir_ref val)
Definition ir_private.h:738
ir_ref ir_folding(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3, ir_insn *op1_insn, ir_insn *op2_insn, ir_insn *op3_insn)
Definition ir.c:932
#define ir_bitset_base_t
Definition ir_private.h:313
void ir_use_list_remove_one(ir_ctx *ctx, ir_ref def, ir_ref use)
Definition ir.c:1321
IR_ALWAYS_INLINE ir_ref ir_next_control(const ir_ctx *ctx, ir_ref ref)
IR_ALWAYS_INLINE void ir_bitqueue_clear(ir_bitqueue *q)
Definition ir_private.h:604
ir_ref ir_find_aliasing_vstore(ir_ctx *ctx, ir_ref ref, ir_ref addr, ir_ref val)
Definition ir.c:2284
IR_ALWAYS_INLINE ir_ref ir_worklist_pop(ir_worklist *w)
Definition ir_private.h:831
bool ir_use_list_add(ir_ctx *ctx, ir_ref def, ir_ref use)
Definition ir.c:1378
void ir_array_insert(ir_array *a, uint32_t i, ir_ref val)
Definition ir.c:1492
uint32_t ir_skip_empty_target_blocks(const ir_ctx *ctx, uint32_t b)
Definition ir_cfg.c:1893
void ir_iter_replace(ir_ctx *ctx, ir_ref ref, ir_ref new_ref, ir_bitqueue *worklist)
Definition ir_sccp.c:1165
IR_ALWAYS_INLINE void ir_sparse_set_clear(ir_sparse_set *set)
Definition ir_private.h:506
IR_ALWAYS_INLINE uint32_t ir_worklist_len(const ir_worklist *w)
Definition ir_private.h:803
struct _ir_bitqueue ir_bitqueue
struct _ir_sparse_set ir_sparse_set
struct _ir_use_pos ir_use_pos
void ir_addrtab_init(ir_hashtab *tab, uint32_t size)
Definition ir.c:1709
uint32_t ir_next_block(const ir_ctx *ctx, uint32_t b)
Definition ir_cfg.c:1898
IR_ALWAYS_INLINE void ir_set_alocated_reg(ir_ctx *ctx, ir_ref ref, int op_num, int8_t reg)
IR_ALWAYS_INLINE uint32_t ir_ntzl(uint64_t num)
Definition ir_private.h:138
IR_ALWAYS_INLINE uint64_t ir_ror64(uint64_t op1, uint64_t op2)
Definition ir_private.h:104
struct _ir_target_constraints ir_target_constraints
void ir_hashtab_free(ir_hashtab *tab)
Definition ir.c:1593
IR_ALWAYS_INLINE uint16_t ir_rol16(uint16_t op1, uint16_t op2)
Definition ir_private.h:74
#define UNEXPECTED(condition)
Definition ir_private.h:48
IR_ALWAYS_INLINE void ir_array_init(ir_array *a, uint32_t size)
Definition ir_private.h:658
void ir_addrtab_set(ir_hashtab *tab, uint64_t key, ir_ref val)
Definition ir.c:1746
void ir_addrtab_free(ir_hashtab *tab)
Definition ir.c:1722
void ir_get_true_false_blocks(const ir_ctx *ctx, uint32_t b, uint32_t *true_block, uint32_t *false_block)
Definition ir_cfg.c:1929
ir_ref ir_hashtab_find(const ir_hashtab *tab, uint32_t key)
Definition ir.c:1601
IR_ALWAYS_INLINE void ir_list_del(ir_list *l, uint32_t i)
Definition ir_private.h:773
IR_ALWAYS_INLINE uint32_t ir_array_size(const ir_array *a)
Definition ir_private.h:671
IR_ALWAYS_INLINE uint32_t ir_insn_len(const ir_insn *insn)
Definition ir_private.h:997
IR_ALWAYS_INLINE void ir_bitset_excl(ir_bitset set, uint32_t n)
Definition ir_private.h:334
bool ir_hashtab_add(ir_hashtab *tab, uint32_t key, ir_ref val)
Definition ir.c:1617
struct _ir_array ir_array
IR_ALWAYS_INLINE void * ir_arena_alloc(ir_arena **arena_ptr, size_t size)
Definition ir_private.h:255
IR_ALWAYS_INLINE uint8_t ir_ror8(uint8_t op1, uint8_t op2)
Definition ir_private.h:89
#define IR_OP_HAS_VAR_INPUTS(flags)
Definition ir_private.h:961
#define next(ls)
Definition minilua.c:2661
char * arena
Definition php_bcmath.h:37
unsigned const char * end
Definition php_ffi.h:51
unsigned const char * pos
Definition php_ffi.h:52
unsigned char key[REFLECTION_KEY_LEN]
zend_constant * data
p
Definition session.c:1105
char * ptr
Definition ir_private.h:229
char * end
Definition ir_private.h:230
ir_arena * prev
Definition ir_private.h:231
uint32_t size
Definition ir_private.h:651
ir_ref * refs
Definition ir_private.h:650
uint32_t pos
Definition ir_private.h:577
uint32_t len
Definition ir_private.h:576
ir_bitset set
Definition ir_private.h:578
ir_ref end
uint32_t dom_next_child
uint32_t dom_parent
ir_ref start
uint32_t idom
uint32_t loop_header
uint32_t flags
uint32_t loop_depth
uint32_t successors
uint32_t dom_child
uint32_t successors_count
uint32_t dom_depth
uint32_t predecessors
uint32_t postnum
uint32_t predecessors_count
uint32_t * cfg_edges
Definition ir.h:593
ir_use_list * use_lists
Definition ir.h:587
ir_regs * regs
Definition ir.h:612
ir_insn * ir_base
Definition ir.h:574
ir_ref * use_edges
Definition ir.h:588
void * data
Definition ir_private.h:852
uint32_t mask
Definition ir_private.h:853
uint32_t size
Definition ir_private.h:854
uint32_t pos
Definition ir_private.h:856
uint32_t count
Definition ir_private.h:855
ir_array a
Definition ir_private.h:703
uint32_t len
Definition ir_private.h:704
ir_live_range * current_range
ir_live_interval * next
int32_t stack_spill_pos
ir_live_pos end
ir_live_range range
ir_use_pos * use_pos
ir_live_interval * list_next
ir_live_pos end
ir_live_range * next
ir_live_pos start
ir_live_interval ** handled
uint32_t * data
Definition ir_private.h:489
uint32_t size
Definition ir_private.h:487
ir_use_pos * next
ir_ref hint_ref
ir_live_pos pos
uint8_t flags
uint16_t op_num
ir_bitset visited
Definition ir_private.h:788
$obj a
Definition test.php:84
int last
#define UNEXPECTED(condition)
op2
op1