15# define IR_ASSERT(x) assert(x)
23# pragma intrinsic(_BitScanForward64)
24# pragma intrinsic(_BitScanReverse64)
26# pragma intrinsic(_BitScanForward)
27# pragma intrinsic(_BitScanReverse)
31# if __has_builtin(__builtin_expect)
32# define EXPECTED(condition) __builtin_expect(!!(condition), 1)
33# define UNEXPECTED(condition) __builtin_expect(!!(condition), 0)
35# if __has_attribute(__aligned__)
36# define IR_SET_ALIGNED(alignment, decl) decl __attribute__ ((__aligned__ (alignment)))
38# if __has_attribute(__fallthrough__)
39# define IR_FALLTHROUGH __attribute__((__fallthrough__))
42# define IR_SET_ALIGNED(alignment, decl) __declspec(align(alignment)) decl
44#define __has_builtin(x) 0
47# define EXPECTED(condition) (condition)
48# define UNEXPECTED(condition) (condition)
51# define IR_SET_ALIGNED(alignment, decl) decl
54# define IR_FALLTHROUGH ((void)0)
59#define IR_ALIGNED_SIZE(size, alignment) \
60 (((size) + ((alignment) - 1)) & ~((alignment) - 1))
62#define IR_MAX(a, b) (((a) > (b)) ? (a) : (b))
63#define IR_MIN(a, b) (((a) < (b)) ? (a) : (b))
65#define IR_IS_POWER_OF_TWO(x) (!((x) & ((x) - 1)))
67#define IR_LOG2(x) ir_ntzl(x)
112#if (defined(__GNUC__) || __has_builtin(__builtin_ctz))
113 return __builtin_ctz(num);
117 if (!_BitScanForward(&index, num)) {
126 if (num == 0)
return 32;
129 if ((num & 0x0000ffff) == 0) {
n += 16; num = num >> 16;}
130 if ((num & 0x000000ff) == 0) {
n += 8; num = num >> 8;}
131 if ((num & 0x0000000f) == 0) {
n += 4; num = num >> 4;}
132 if ((num & 0x00000003) == 0) {
n += 2; num = num >> 2;}
133 return n - (num & 1);
146 if (!_BitScanForward64(&index, num)) {
151 return (uint32_t) index;
152#elif (defined(__GNUC__) || __has_builtin(__builtin_ctzl))
153 return __builtin_ctzl(num);
157 if (num == 0)
return 64;
160 if ((num & 0xffffffff) == 0) {
n += 32; num = num >> 32;}
161 if ((num & 0x0000ffff) == 0) {
n += 16; num = num >> 16;}
162 if ((num & 0x000000ff) == 0) {
n += 8; num = num >> 8;}
163 if ((num & 0x0000000f) == 0) {
n += 4; num = num >> 4;}
164 if ((num & 0x00000003) == 0) {
n += 2; num = num >> 2;}
165 return n - (uint32_t)(num & 1);
172#if (defined(__GNUC__) || __has_builtin(__builtin_clz))
173 return __builtin_clz(num);
177 if (!_BitScanReverse(&index, num)) {
182 return (
int) (32 - 1) - index;
188 x = num >> 16;
if (x != 0) {
n -= 16; num = x;}
189 x = num >> 8;
if (x != 0) {
n -= 8; num = x;}
190 x = num >> 4;
if (x != 0) {
n -= 4; num = x;}
191 x = num >> 2;
if (x != 0) {
n -= 2; num = x;}
192 x = num >> 1;
if (x != 0)
return n - 2;
199#if (defined(__GNUC__) || __has_builtin(__builtin_clzll))
200 return __builtin_clzll(num);
204 if (!_BitScanReverse64(&index, num)) {
209 return (
int) (64 - 1) - index;
215 x = num >> 32;
if (x != 0) {
n -= 32; num = x;}
216 x = num >> 16;
if (x != 0) {
n -= 16; num = x;}
217 x = num >> 8;
if (x != 0) {
n -= 8; num = x;}
218 x = num >> 4;
if (x != 0) {
n -= 4; num = x;}
219 x = num >> 2;
if (x != 0) {
n -= 2; num = x;}
220 x = num >> 1;
if (x != 0)
return n - 2;
221 return n - (uint32_t)num;
273 new_arena->
end = (
char*) new_arena + arena_size;
275 *arena_ptr = new_arena;
297 arena->ptr = (
char*)checkpoint;
301#if defined(IR_TARGET_X86)
302# define IR_BITSET_BITS 32
303# define IR_BITSET_ONE 1U
304# define ir_bitset_base_t uint32_t
305# define ir_bitset_ntz ir_ntz
307# define IR_BITSET_BITS 64
309# define IR_BITSET_ONE 1ui64
311# define IR_BITSET_ONE 1UL
313# define ir_bitset_base_t uint64_t
314# define ir_bitset_ntz ir_ntzl
357 for (i = 0; i <
len; i++) {
379 for (i = 0; i <
len; i++) {
388 for (i = 0; i <
len; i++) {
397 for (i = 0; i <
len; i++) {
398 set1[i] = set1[i] & ~set2[i];
406 for (i = 0; i <
len; i++) {
407 if (set1[i] & ~set2[i]) {
418 for (i = 0; i <
len; i++) {
449 for (i = 0; i <
len; i++) {
454 set[i] = x & (x - 1);
461#define IR_BITSET_FOREACH(set, len, bit) do { \
462 ir_bitset _set = (set); \
463 uint32_t _i, _len = (len); \
464 for (_i = 0; _i < _len; _set++, _i++) { \
465 ir_bitset_base_t _x = *_set; \
467 (bit) = IR_BITSET_BITS * _i + ir_bitset_ntz(_x); \
470#define IR_BITSET_FOREACH_DIFFERENCE(set1, set2, len, bit) do { \
471 ir_bitset _set1 = (set1); \
472 ir_bitset _set2 = (set2); \
473 uint32_t _i, _len = (len); \
474 for (_i = 0; _i < _len; _i++) { \
475 ir_bitset_base_t _x = _set1[_i] & ~_set2[_i]; \
477 (bit) = IR_BITSET_BITS * _i + ir_bitset_ntz(_x); \
480#define IR_BITSET_FOREACH_END() \
492#define IR_SPARSE_SET_DENSE(set, n) (set)->data[n]
493#define IR_SPARSE_SET_SPARSE(set, n) (set)->data[-1 - ((int32_t)(n))]
518 return set->
len == 0;
563#define IR_SPARSE_SET_FOREACH(set, bit) do { \
564 ir_sparse_set *_set = (set); \
565 uint32_t _i, _len = _set->len; \
566 uint32_t *_p = _set->data; \
567 for (_i = 0; _i < _len; _p++, _i++) { \
570#define IR_SPARSE_SET_FOREACH_END() \
624 }
while (i < q->
len);
842#define IR_INVALID_IDX 0xffffffff
843#define IR_INVALID_VAL 0x80000000
887#define IR_IS_CONST_OP(op) ((op) > IR_NOP && (op) <= IR_C_FLOAT)
888#define IR_IS_FOLDABLE_OP(op) ((op) <= IR_LAST_FOLDABLE_OP)
889#define IR_IS_SYM_CONST(op) ((op) == IR_STR || (op) == IR_SYM || (op) == IR_FUNC)
897 }
else if (
v->type == IR_BOOL) {
900 return v->val.i64 != 0;
901 }
else if (
v->type == IR_DOUBLE) {
902 return v->val.d != 0.0;
905 return v->val.f != 0.0;
923#define IR_OP_FLAG_OPERANDS_SHIFT 3
925#define IR_OP_FLAG_EDGES_MASK 0x03
926#define IR_OP_FLAG_VAR_INPUTS 0x04
927#define IR_OP_FLAG_OPERANDS_MASK 0x18
928#define IR_OP_FLAG_MEM_MASK ((1<<6)|(1<<7))
930#define IR_OP_FLAG_DATA (1<<8)
931#define IR_OP_FLAG_CONTROL (1<<9)
932#define IR_OP_FLAG_MEM (1<<10)
933#define IR_OP_FLAG_COMMUTATIVE (1<<11)
934#define IR_OP_FLAG_BB_START (1<<12)
935#define IR_OP_FLAG_BB_END (1<<13)
936#define IR_OP_FLAG_TERMINATOR (1<<14)
937#define IR_OP_FLAG_PINNED (1<<15)
939#define IR_OP_FLAG_MEM_LOAD ((0<<6)|(0<<7))
940#define IR_OP_FLAG_MEM_STORE ((0<<6)|(1<<7))
941#define IR_OP_FLAG_MEM_CALL ((1<<6)|(0<<7))
942#define IR_OP_FLAG_MEM_ALLOC ((1<<6)|(1<<7))
943#define IR_OP_FLAG_MEM_MASK ((1<<6)|(1<<7))
945#define IR_OPND_UNUSED 0x0
946#define IR_OPND_DATA 0x1
947#define IR_OPND_CONTROL 0x2
948#define IR_OPND_CONTROL_DEP 0x3
949#define IR_OPND_CONTROL_REF 0x4
950#define IR_OPND_STR 0x5
951#define IR_OPND_NUM 0x6
952#define IR_OPND_PROB 0x7
953#define IR_OPND_PROTO 0x8
955#define IR_OP_FLAGS(op_flags, op1_flags, op2_flags, op3_flags) \
956 ((op_flags) | ((op1_flags) << 20) | ((op2_flags) << 24) | ((op3_flags) << 28))
958#define IR_INPUT_EDGES_COUNT(flags) (flags & IR_OP_FLAG_EDGES_MASK)
959#define IR_OPERANDS_COUNT(flags) ((flags & IR_OP_FLAG_OPERANDS_MASK) >> IR_OP_FLAG_OPERANDS_SHIFT)
961#define IR_OP_HAS_VAR_INPUTS(flags) ((flags) & IR_OP_FLAG_VAR_INPUTS)
963#define IR_OPND_KIND(flags, i) \
964 (((flags) >> (16 + (4 * (((i) > 3) ? 3 : (i))))) & 0xf)
966#define IR_IS_REF_OPND_KIND(kind) \
967 ((kind) >= IR_OPND_DATA && (kind) <= IR_OPND_CONTROL_REF)
976 n = insn->inputs_count;
987 n = insn->inputs_count;
994 return 1 + (inputs_count >> 2);
1003#define IR_CFG_HAS_LOOPS (1<<0)
1004#define IR_IRREDUCIBLE_CFG (1<<1)
1005#define IR_HAS_ALLOCA (1<<2)
1006#define IR_HAS_CALLS (1<<3)
1007#define IR_OPT_IN_SCCP (1<<4)
1008#define IR_LINEAR (1<<5)
1009#define IR_HAS_VA_START (1<<6)
1010#define IR_HAS_VA_COPY (1<<7)
1011#define IR_HAS_VA_ARG_GP (1<<8)
1012#define IR_HAS_VA_ARG_FP (1<<9)
1013#define IR_HAS_FP_RET_SLOT (1<<10)
1014#define IR_16B_FRAME_ALIGNMENT (1<<11)
1017#define IR_MEM2SSA_VARS (1<<25)
1020#define IR_CFG_REACHABLE (1<<26)
1023#define IR_NO_LOOPS (1<<25)
1026#define IR_LR_HAVE_DESSA_MOVES (1<<25)
1029#define IR_RA_HAVE_SPLITS (1<<25)
1030#define IR_RA_HAVE_SPILLS (1<<26)
1032#define IR_RESERVED_FLAG_1 (1U<<31)
1067#define MAKE_NOP(_insn) do { \
1068 ir_insn *__insn = _insn; \
1069 __insn->optx = IR_NOP; \
1070 __insn->op1 = __insn->op2 = __insn->op3 = IR_UNUSED; \
1073#define CLEAR_USES(_ref) do { \
1074 ir_use_list *__use_list = &ctx->use_lists[_ref]; \
1075 __use_list->count = 0; \
1078#define SWAP_REFS(_ref1, _ref2) do { \
1079 ir_ref _tmp = _ref1; \
1084#define SWAP_INSNS(_insn1, _insn2) do { \
1085 ir_insn *_tmp = _insn1; \
1099#define IR_IS_BB_START(op) \
1100 ((ir_op_flags[op] & IR_OP_FLAG_BB_START) != 0)
1102#define IR_IS_BB_MERGE(op) \
1103 ((op) == IR_MERGE || (op) == IR_LOOP_BEGIN)
1105#define IR_IS_BB_END(op) \
1106 ((ir_op_flags[op] & IR_OP_FLAG_BB_END) != 0)
1108#define IR_BB_UNREACHABLE (1<<0)
1109#define IR_BB_START (1<<1)
1110#define IR_BB_ENTRY (1<<2)
1111#define IR_BB_LOOP_HEADER (1<<3)
1112#define IR_BB_IRREDUCIBLE_LOOP (1<<4)
1113#define IR_BB_DESSA_MOVES (1<<5)
1114#define IR_BB_EMPTY (1<<6)
1115#define IR_BB_PREV_EMPTY_ENTRY (1<<7)
1116#define IR_BB_OSR_ENTRY_LOADS (1<<8)
1117#define IR_BB_LOOP_WITH_ENTRY (1<<9)
1120#define IR_BB_HAS_PHI (1<<10)
1121#define IR_BB_HAS_PI (1<<11)
1122#define IR_BB_HAS_PARAM (1<<12)
1123#define IR_BB_HAS_VAR (1<<13)
1126#define IR_BB_ALIGN_LOOP (1<<14)
1192#define IR_SUB_REFS_COUNT 4
1194#define IR_LOAD_SUB_REF 0
1195#define IR_USE_SUB_REF 1
1196#define IR_DEF_SUB_REF 2
1197#define IR_SAVE_SUB_REF 3
1199#define IR_LIVE_POS_TO_REF(pos) ((pos) / IR_SUB_REFS_COUNT)
1200#define IR_LIVE_POS_TO_SUB_REF(pos) ((pos) % IR_SUB_REFS_COUNT)
1202#define IR_LIVE_POS_FROM_REF(ref) ((ref) * IR_SUB_REFS_COUNT)
1204#define IR_START_LIVE_POS_FROM_REF(ref) ((ref) * IR_SUB_REFS_COUNT)
1205#define IR_LOAD_LIVE_POS_FROM_REF(ref) ((ref) * IR_SUB_REFS_COUNT + IR_LOAD_SUB_REF)
1206#define IR_USE_LIVE_POS_FROM_REF(ref) ((ref) * IR_SUB_REFS_COUNT + IR_USE_SUB_REF)
1207#define IR_DEF_LIVE_POS_FROM_REF(ref) ((ref) * IR_SUB_REFS_COUNT + IR_DEF_SUB_REF)
1208#define IR_SAVE_LIVE_POS_FROM_REF(ref) ((ref) * IR_SUB_REFS_COUNT + IR_SAVE_SUB_REF)
1209#define IR_END_LIVE_POS_FROM_REF(ref) ((ref) * IR_SUB_REFS_COUNT + IR_SUB_REFS_COUNT)
1212#define IR_USE_MUST_BE_IN_REG (1<<0)
1213#define IR_USE_SHOULD_BE_IN_REG (1<<1)
1214#define IR_DEF_REUSES_OP1_REG (1<<2)
1215#define IR_DEF_CONFLICTS_WITH_INPUT_REGS (1<<3)
1217#define IR_FUSED_USE (1<<6)
1218#define IR_PHI_USE (1<<7)
1220#define IR_OP1_MUST_BE_IN_REG (1<<8)
1221#define IR_OP1_SHOULD_BE_IN_REG (1<<9)
1222#define IR_OP2_MUST_BE_IN_REG (1<<10)
1223#define IR_OP2_SHOULD_BE_IN_REG (1<<11)
1224#define IR_OP3_MUST_BE_IN_REG (1<<12)
1225#define IR_OP3_SHOULD_BE_IN_REG (1<<13)
1227#define IR_USE_FLAGS(def_flags, op_num) (((def_flags) >> (6 + (IR_MIN((op_num), 3) * 2))) & 3)
1245#define IR_LIVE_INTERVAL_FIXED (1<<0)
1246#define IR_LIVE_INTERVAL_TEMP (1<<1)
1247#define IR_LIVE_INTERVAL_HAS_HINT_REGS (1<<2)
1248#define IR_LIVE_INTERVAL_HAS_HINT_REFS (1<<3)
1249#define IR_LIVE_INTERVAL_MEM_PARAM (1<<4)
1250#define IR_LIVE_INTERVAL_MEM_LOAD (1<<5)
1251#define IR_LIVE_INTERVAL_COALESCED (1<<6)
1252#define IR_LIVE_INTERVAL_SPILL_SPECIAL (1<<7)
1253#define IR_LIVE_INTERVAL_SPILLED (1<<8)
1254#define IR_LIVE_INTERVAL_SPLIT_CHILD (1<<9)
1280#if defined(IR_REGSET_64BIT)
1283typedef int8_t ir_reg;
1287typedef uint64_t ir_regset;
1289typedef uint32_t ir_regset;
1292#define IR_REGSET_EMPTY 0
1294#define IR_REGSET_IS_EMPTY(regset) \
1295 (regset == IR_REGSET_EMPTY)
1297#define IR_REGSET_IS_SINGLETON(regset) \
1298 (regset && !(regset & (regset - 1)))
1301# define IR_REGSET(reg) \
1304# define IR_REGSET(reg) \
1309# define IR_REGSET_INTERVAL(reg1, reg2) \
1310 (((1ull << ((reg2) - (reg1) + 1)) - 1) << (reg1))
1312# define IR_REGSET_INTERVAL(reg1, reg2) \
1313 (((1u << ((reg2) - (reg1) + 1)) - 1) << (reg1))
1316#define IR_REGSET_IN(regset, reg) \
1317 (((regset) & IR_REGSET(reg)) != 0)
1319#define IR_REGSET_INCL(regset, reg) \
1320 (regset) |= IR_REGSET(reg)
1322#define IR_REGSET_EXCL(regset, reg) \
1323 (regset) &= ~IR_REGSET(reg)
1325#define IR_REGSET_UNION(set1, set2) \
1328#define IR_REGSET_INTERSECTION(set1, set2) \
1331#define IR_REGSET_DIFFERENCE(set1, set2) \
1335# define IR_REGSET_FIRST(set) ((ir_reg)ir_ntzl(set))
1336# define ir_REGSET_LAST(set) ((ir_reg)(ir_nlzl(set)(set)^63))
1338# define IR_REGSET_FIRST(set) ((ir_reg)ir_ntz(set))
1339# define IR_REGSET_LAST(set) ((ir_reg)(ir_nlz(set)^31))
1347 reg = IR_REGSET_FIRST(*set);
1348 *set = (*set) & ((*set) - 1);
1352#define IR_REGSET_FOREACH(set, reg) \
1354 ir_regset _tmp = (set); \
1355 while (!IR_REGSET_IS_EMPTY(_tmp)) { \
1356 reg = ir_regset_pop_first(&_tmp);
1358#define IR_REGSET_FOREACH_END() \
1377 int8_t *regs = ctx->
regs[ref];
1388 int8_t *regs = ctx->
regs[ref];
1392 return regs[op_num];
1398#define IR_FUSED (1U<<31)
1399#define IR_SKIPPED (1U<<30)
1400#define IR_SIMPLE (1U<<29)
1401#define IR_FUSED_REG (1U<<28)
1402#define IR_MAY_SWAP (1U<<27)
1403#define IR_MAY_REUSE (1U<<26)
1405#define IR_RULE_MASK 0xff
1411#define IR_TMP_REG(_num, _type, _start, _end) \
1412 (ir_tmp_reg){.num=(_num), .type=(_type), .start=(_start), .end=(_end)}
1413#define IR_SCRATCH_REG(_reg, _start, _end) \
1414 (ir_tmp_reg){.reg=(_reg), .type=IR_VOID, .start=(_start), .end=(_end)}
prev(array|object &$array)
memset(ptr, 0, type->size)
const char * ir_type_name[IR_LAST_TYPE]
const uint8_t ir_type_flags[IR_LAST_TYPE]
const char * ir_type_cname[IR_LAST_TYPE]
const uint32_t ir_op_flags[IR_LAST_OP]
const char * ir_op_name[IR_LAST_OP]
const uint8_t ir_type_size[IR_LAST_TYPE]
struct _ir_live_range ir_live_range
#define IR_IS_TYPE_INT(t)
struct _ir_live_interval ir_live_interval
struct _ir_hashtab ir_hashtab
#define IR_IS_CONST_REF(ref)
struct _ir_arena ir_arena
struct _ir_use_list ir_use_list
struct _ir_block ir_block
void ir_fix_stack_frame(ir_ctx *ctx)
void ir_iter_opt(ir_ctx *ctx, ir_bitqueue *worklist)
ir_ref ir_addrtab_find(const ir_hashtab *tab, uint64_t key)
IR_ALWAYS_INLINE uint32_t ir_rol32(uint32_t op1, uint32_t op2)
IR_ALWAYS_INLINE void ir_bitqueue_grow(ir_bitqueue *q, uint32_t n)
int ir_get_target_constraints(ir_ctx *ctx, ir_ref ref, ir_target_constraints *constraints)
void ir_update_op(ir_ctx *ctx, ir_ref ref, uint32_t idx, ir_ref new_val)
IR_ALWAYS_INLINE uint32_t ir_list_len(const ir_list *l)
IR_ALWAYS_INLINE bool ir_const_is_true(const ir_insn *v)
IR_ALWAYS_INLINE int ir_bitset_last(const ir_bitset set, uint32_t len)
IR_ALWAYS_INLINE ir_ref ir_input_edges_count(const ir_ctx *ctx, const ir_insn *insn)
ir_bitset_base_t * ir_bitset
IR_ALWAYS_INLINE uint64_t ir_rol64(uint64_t op1, uint64_t op2)
IR_ALWAYS_INLINE ir_ref ir_array_get(const ir_array *a, uint32_t i)
IR_ALWAYS_INLINE ir_ref ir_list_pop(ir_list *l)
IR_ALWAYS_INLINE uint32_t ir_list_capasity(const ir_list *l)
void ir_list_insert(ir_list *l, uint32_t i, ir_ref val)
void ir_use_list_replace_one(ir_ctx *ctx, ir_ref def, ir_ref use, ir_ref new_use)
IR_ALWAYS_INLINE void ir_list_push_unchecked(ir_list *l, ir_ref val)
IR_ALWAYS_INLINE void ir_worklist_free(ir_worklist *w)
IR_ALWAYS_INLINE void ir_array_free(ir_array *a)
int(* emit_copy_t)(ir_ctx *ctx, uint8_t type, ir_ref from, ir_ref to)
void ir_build_prev_refs(ir_ctx *ctx)
IR_ALWAYS_INLINE bool ir_bitset_in(const ir_bitset set, uint32_t n)
void ir_use_list_remove_all(ir_ctx *ctx, ir_ref def, ir_ref use)
IR_ALWAYS_INLINE void ir_bitqueue_del(ir_bitqueue *q, uint32_t n)
#define IR_ALIGNED_SIZE(size, alignment)
void ir_replace(ir_ctx *ctx, ir_ref ref, ir_ref new_ref)
ir_ref ir_check_dominating_predicates(ir_ctx *ctx, ir_ref ref, ir_ref condition)
IR_ALWAYS_INLINE void ir_bitset_copy(ir_bitset set1, const ir_bitset set2, uint32_t len)
ir_ref ir_const_ex(ir_ctx *ctx, ir_val val, uint8_t type, uint32_t optx)
bool ir_is_fastcall(const ir_ctx *ctx, const ir_insn *insn)
void ir_array_remove(ir_array *a, uint32_t i)
IR_ALWAYS_INLINE uint32_t ir_phi_input_number(const ir_ctx *ctx, const ir_block *bb, uint32_t from)
void ir_array_grow(ir_array *a, uint32_t size)
IR_ALWAYS_INLINE void ir_bitqueue_add(ir_bitqueue *q, uint32_t n)
void ir_print_escaped_str(const char *s, size_t len, FILE *f)
bool ir_is_vararg(const ir_ctx *ctx, ir_insn *insn)
#define IR_OPERANDS_COUNT(flags)
uint32_t ir_list_find(const ir_list *l, ir_ref val)
void ir_use_list_sort(ir_ctx *ctx, ir_ref def)
IR_ALWAYS_INLINE uint32_t ir_ror32(uint32_t op1, uint32_t op2)
IR_ALWAYS_INLINE void ir_bitset_intersection(ir_bitset set1, const ir_bitset set2, uint32_t len)
struct _ir_addrtab_bucket ir_addrtab_bucket
IR_ALWAYS_INLINE bool ir_bitqueue_in(const ir_bitqueue *q, uint32_t n)
IR_ALWAYS_INLINE void ir_bitset_fill(ir_bitset set, uint32_t len)
IR_ALWAYS_INLINE ir_bitset ir_bitset_malloc(uint32_t n)
IR_ALWAYS_INLINE bool ir_bitset_empty(const ir_bitset set, uint32_t len)
#define EXPECTED(condition)
const char * ir_rule_name[]
IR_ALWAYS_INLINE ir_ref ir_operands_count(const ir_ctx *ctx, const ir_insn *insn)
#define IR_SPARSE_SET_SPARSE(set, n)
ir_ref ir_find_aliasing_load(ir_ctx *ctx, ir_ref ref, ir_type type, ir_ref addr)
IR_ALWAYS_INLINE uint32_t ir_ntz(uint32_t num)
int32_t ir_allocate_spill_slot(ir_ctx *ctx, ir_type type, ir_reg_alloc_data *data)
IR_ALWAYS_INLINE void ir_sparse_set_add(ir_sparse_set *set, uint32_t n)
IR_ALWAYS_INLINE bool ir_worklist_push(ir_worklist *w, ir_ref val)
IR_ALWAYS_INLINE ir_ref ir_worklist_peek(const ir_worklist *w)
IR_ALWAYS_INLINE void ir_array_set_unchecked(ir_array *a, uint32_t i, ir_ref val)
IR_ALWAYS_INLINE void ir_worklist_clear(ir_worklist *w)
void ir_list_remove(ir_list *l, uint32_t i)
struct _ir_hashtab_bucket ir_hashtab_bucket
IR_ALWAYS_INLINE ir_arena * ir_arena_create(size_t size)
ir_ref ir_find_aliasing_vload(ir_ctx *ctx, ir_ref ref, ir_type type, ir_ref var)
IR_ALWAYS_INLINE ir_ref ir_list_peek(const ir_list *l)
IR_ALWAYS_INLINE int ir_nlz(uint32_t num)
struct _ir_worklist ir_worklist
IR_ALWAYS_INLINE void ir_bitset_union(ir_bitset set1, const ir_bitset set2, uint32_t len)
IR_ALWAYS_INLINE void ir_bitset_incl(ir_bitset set, uint32_t n)
int ir_gen_dessa_moves(ir_ctx *ctx, uint32_t b, emit_copy_t emit_copy)
IR_ALWAYS_INLINE void ir_sparse_set_init(ir_sparse_set *set, uint32_t size)
#define IR_IS_SYM_CONST(op)
IR_ALWAYS_INLINE bool ir_sparse_set_empty(const ir_sparse_set *set)
IR_ALWAYS_INLINE int8_t ir_get_alocated_reg(const ir_ctx *ctx, ir_ref ref, int op_num)
IR_ALWAYS_INLINE bool ir_sparse_set_in(const ir_sparse_set *set, uint32_t n)
IR_ALWAYS_INLINE void ir_worklist_init(ir_worklist *w, uint32_t size)
IR_ALWAYS_INLINE bool ir_bitset_equal(const ir_bitset set1, const ir_bitset set2, uint32_t len)
IR_ALWAYS_INLINE uint32_t ir_worklist_capasity(const ir_worklist *w)
IR_ALWAYS_INLINE void ir_list_free(ir_list *l)
IR_ALWAYS_INLINE uint32_t ir_bitset_len(uint32_t n)
void ir_hashtab_init(ir_hashtab *tab, uint32_t size)
IR_ALWAYS_INLINE void ir_sparse_set_del(ir_sparse_set *set, uint32_t n)
enum _ir_fold_action ir_fold_action
void ir_iter_update_op(ir_ctx *ctx, ir_ref ref, uint32_t idx, ir_ref new_val, ir_bitqueue *worklist)
#define IR_SPARSE_SET_DENSE(set, n)
ir_type ir_get_return_type(ir_ctx *ctx)
IR_ALWAYS_INLINE void ir_bitqueue_init(ir_bitqueue *q, uint32_t n)
ir_ref ir_find_aliasing_store(ir_ctx *ctx, ir_ref ref, ir_ref addr, ir_ref val)
IR_ALWAYS_INLINE int ir_bitset_first(const ir_bitset set, uint32_t len)
IR_ALWAYS_INLINE void ir_release(ir_arena **arena_ptr, void *checkpoint)
IR_ALWAYS_INLINE ir_ref ir_list_at(const ir_list *l, uint32_t i)
IR_ALWAYS_INLINE bool ir_list_contains(const ir_list *l, ir_ref val)
IR_ALWAYS_INLINE uint16_t ir_ror16(uint16_t op1, uint16_t op2)
void ir_use_list_replace_all(ir_ctx *ctx, ir_ref def, ir_ref use, ir_ref new_use)
void ir_hashtab_key_sort(ir_hashtab *tab)
IR_ALWAYS_INLINE void ir_list_init(ir_list *l, uint32_t size)
IR_ALWAYS_INLINE int ir_nlzl(uint64_t num)
struct _ir_reg_alloc_data ir_reg_alloc_data
IR_ALWAYS_INLINE uint8_t ir_rol8(uint8_t op1, uint8_t op2)
IR_ALWAYS_INLINE int ir_bitqueue_pop(ir_bitqueue *q)
IR_ALWAYS_INLINE void ir_bitset_difference(ir_bitset set1, const ir_bitset set2, uint32_t len)
IR_ALWAYS_INLINE bool ir_bitset_is_subset(const ir_bitset set1, const ir_bitset set2, uint32_t len)
IR_ALWAYS_INLINE void ir_array_set(ir_array *a, uint32_t i, ir_ref val)
IR_ALWAYS_INLINE void ir_sparse_set_free(ir_sparse_set *set)
IR_ALWAYS_INLINE void * ir_arena_checkpoint(ir_arena *arena)
IR_ALWAYS_INLINE int ir_bitset_pop_first(ir_bitset set, uint32_t len)
IR_ALWAYS_INLINE uint32_t ir_insn_inputs_to_len(uint32_t inputs_count)
IR_ALWAYS_INLINE void ir_list_set(ir_list *l, uint32_t i, ir_ref val)
#define IR_INPUT_EDGES_COUNT(flags)
IR_ALWAYS_INLINE ir_ref ir_array_at(const ir_array *a, uint32_t i)
IR_ALWAYS_INLINE void ir_list_clear(ir_list *l)
IR_ALWAYS_INLINE void ir_arena_free(ir_arena *arena)
IR_ALWAYS_INLINE void ir_bitset_clear(ir_bitset set, uint32_t len)
IR_ALWAYS_INLINE uint32_t ir_sparse_set_pop(ir_sparse_set *set)
IR_ALWAYS_INLINE bool ir_ref_is_true(ir_ctx *ctx, ir_ref ref)
#define IR_OP_FLAG_CONTROL
IR_ALWAYS_INLINE void ir_bitqueue_free(ir_bitqueue *q)
IR_ALWAYS_INLINE void ir_list_push(ir_list *l, ir_ref val)
ir_ref ir_folding(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3, ir_insn *op1_insn, ir_insn *op2_insn, ir_insn *op3_insn)
void ir_use_list_remove_one(ir_ctx *ctx, ir_ref def, ir_ref use)
IR_ALWAYS_INLINE ir_ref ir_next_control(const ir_ctx *ctx, ir_ref ref)
IR_ALWAYS_INLINE void ir_bitqueue_clear(ir_bitqueue *q)
ir_ref ir_find_aliasing_vstore(ir_ctx *ctx, ir_ref ref, ir_ref addr, ir_ref val)
IR_ALWAYS_INLINE ir_ref ir_worklist_pop(ir_worklist *w)
bool ir_use_list_add(ir_ctx *ctx, ir_ref def, ir_ref use)
void ir_array_insert(ir_array *a, uint32_t i, ir_ref val)
uint32_t ir_skip_empty_target_blocks(const ir_ctx *ctx, uint32_t b)
void ir_iter_replace(ir_ctx *ctx, ir_ref ref, ir_ref new_ref, ir_bitqueue *worklist)
IR_ALWAYS_INLINE void ir_sparse_set_clear(ir_sparse_set *set)
IR_ALWAYS_INLINE uint32_t ir_worklist_len(const ir_worklist *w)
struct _ir_bitqueue ir_bitqueue
struct _ir_sparse_set ir_sparse_set
struct _ir_use_pos ir_use_pos
void ir_addrtab_init(ir_hashtab *tab, uint32_t size)
uint32_t ir_next_block(const ir_ctx *ctx, uint32_t b)
IR_ALWAYS_INLINE void ir_set_alocated_reg(ir_ctx *ctx, ir_ref ref, int op_num, int8_t reg)
IR_ALWAYS_INLINE uint32_t ir_ntzl(uint64_t num)
IR_ALWAYS_INLINE uint64_t ir_ror64(uint64_t op1, uint64_t op2)
struct _ir_target_constraints ir_target_constraints
void ir_hashtab_free(ir_hashtab *tab)
IR_ALWAYS_INLINE uint16_t ir_rol16(uint16_t op1, uint16_t op2)
#define UNEXPECTED(condition)
IR_ALWAYS_INLINE void ir_array_init(ir_array *a, uint32_t size)
void ir_addrtab_set(ir_hashtab *tab, uint64_t key, ir_ref val)
void ir_addrtab_free(ir_hashtab *tab)
void ir_get_true_false_blocks(const ir_ctx *ctx, uint32_t b, uint32_t *true_block, uint32_t *false_block)
ir_ref ir_hashtab_find(const ir_hashtab *tab, uint32_t key)
IR_ALWAYS_INLINE void ir_list_del(ir_list *l, uint32_t i)
IR_ALWAYS_INLINE uint32_t ir_array_size(const ir_array *a)
IR_ALWAYS_INLINE uint32_t ir_insn_len(const ir_insn *insn)
IR_ALWAYS_INLINE void ir_bitset_excl(ir_bitset set, uint32_t n)
bool ir_hashtab_add(ir_hashtab *tab, uint32_t key, ir_ref val)
struct _ir_array ir_array
IR_ALWAYS_INLINE void * ir_arena_alloc(ir_arena **arena_ptr, size_t size)
IR_ALWAYS_INLINE uint8_t ir_ror8(uint8_t op1, uint8_t op2)
#define IR_OP_HAS_VAR_INPUTS(flags)
unsigned const char * end
unsigned const char * pos
unsigned char key[REFLECTION_KEY_LEN]
uint32_t successors_count
uint32_t predecessors_count
ir_live_range * current_range
ir_live_interval * list_next
ir_live_interval ** handled
#define UNEXPECTED(condition)