php-internal-docs 8.4.8
Unofficial docs for php/php-src
Loading...
Searching...
No Matches
ir.c
Go to the documentation of this file.
1/*
2 * IR - Lightweight JIT Compilation Framework
3 * (IR construction, folding, utilities)
4 * Copyright (C) 2022 Zend by Perforce.
5 * Authors: Dmitry Stogov <dmitry@php.net>
6 *
7 * The logical IR representation is based on Cliff Click's Sea of Nodes.
8 * See: C. Click, M. Paleczny. "A Simple Graph-Based Intermediate
9 * Representation" In ACM SIGPLAN Workshop on Intermediate Representations
10 * (IR '95), pages 35-49, Jan. 1995.
11 *
12 * The physical IR representation is based on Mike Pall's LuaJIT IR.
13 * See: M. Pall. "LuaJIT 2.0 intellectual property disclosure and research
14 * opportunities" November 2009 http://lua-users.org/lists/lua-l/2009-11/msg00089.html
15 */
16
17#ifndef _GNU_SOURCE
18# define _GNU_SOURCE
19#endif
20
21#ifndef _WIN32
22# include <sys/mman.h>
23# if defined(__linux__) || defined(__sun)
24# include <alloca.h>
25# endif
26# if defined(__APPLE__) && defined(__aarch64__)
27# include <libkern/OSCacheControl.h>
28# endif
29#else
30# define WIN32_LEAN_AND_MEAN
31# include <windows.h>
32#endif
33
34#include "ir.h"
35#include "ir_private.h"
36
37#include <stddef.h>
38#include <stdlib.h>
39#include <math.h>
40
41#ifdef HAVE_VALGRIND
42# include <valgrind/valgrind.h>
43#endif
44
45#define IR_TYPE_FLAGS(name, type, field, flags) ((flags)|sizeof(type)),
46#define IR_TYPE_NAME(name, type, field, flags) #name,
47#define IR_TYPE_CNAME(name, type, field, flags) #type,
48#define IR_TYPE_SIZE(name, type, field, flags) sizeof(type),
49#define IR_OP_NAME(name, flags, op1, op2, op3) #name,
50
51const uint8_t ir_type_flags[IR_LAST_TYPE] = {
52 0,
54};
55
56const char *ir_type_name[IR_LAST_TYPE] = {
57 "void",
59};
60
61const uint8_t ir_type_size[IR_LAST_TYPE] = {
62 0,
64};
65
67 "void",
69};
70
71const char *ir_op_name[IR_LAST_OP] = {
73#ifdef IR_PHP
75#endif
76};
77
78void ir_print_escaped_str(const char *s, size_t len, FILE *f)
79{
80 char ch;
81
82 while (len > 0) {
83 ch = *s;
84 switch (ch) {
85 case '\\': fputs("\\\\", f); break;
86 case '\'': fputs("'", f); break;
87 case '\"': fputs("\\\"", f); break;
88 case '\a': fputs("\\a", f); break;
89 case '\b': fputs("\\b", f); break;
90 case '\033': fputs("\\e", f); break;
91 case '\f': fputs("\\f", f); break;
92 case '\n': fputs("\\n", f); break;
93 case '\r': fputs("\\r", f); break;
94 case '\t': fputs("\\t", f); break;
95 case '\v': fputs("\\v", f); break;
96 case '\?': fputs("\\?", f); break;
97 default:
98#ifdef __aarch64__
99 if (ch < 32) {
100#else
101 if (ch >= 0 && ch < 32) {
102#endif
103 fprintf(f, "\\%c%c%c",
104 '0' + ((ch >> 6) % 8),
105 '0' + ((ch >> 3) % 8),
106 '0' + (ch % 8));
107 break;
108 } else {
109 fputc(ch, f);
110 }
111 }
112 s++;
113 len--;
114 }
115}
116
117void ir_print_const(const ir_ctx *ctx, const ir_insn *insn, FILE *f, bool quoted)
118{
119 char buf[128];
120
121 if (insn->op == IR_FUNC || insn->op == IR_SYM) {
122 fprintf(f, "%s", ir_get_str(ctx, insn->val.name));
123 return;
124 } else if (insn->op == IR_STR) {
125 size_t len;
126 const char *str = ir_get_strl(ctx, insn->val.str, &len);
127
128 if (quoted) {
129 fprintf(f, "\"");
130 ir_print_escaped_str(str, len, f);
131 fprintf(f, "\"");
132 } else {
133 ir_print_escaped_str(str, len, f);
134 }
135 return;
136 }
137 IR_ASSERT(IR_IS_CONST_OP(insn->op) || insn->op == IR_FUNC_ADDR);
138 switch (insn->type) {
139 case IR_BOOL:
140 fprintf(f, "%u", insn->val.b);
141 break;
142 case IR_U8:
143 fprintf(f, "%u", insn->val.u8);
144 break;
145 case IR_U16:
146 fprintf(f, "%u", insn->val.u16);
147 break;
148 case IR_U32:
149 fprintf(f, "%u", insn->val.u32);
150 break;
151 case IR_U64:
152 fprintf(f, "%" PRIu64, insn->val.u64);
153 break;
154 case IR_ADDR:
155 if (insn->val.addr) {
156 fprintf(f, "0x%" PRIxPTR, insn->val.addr);
157 } else {
158 fprintf(f, "0");
159 }
160 break;
161 case IR_CHAR:
162 if (insn->val.c == '\\') {
163 fprintf(f, "'\\\\'");
164 } else if (insn->val.c >= ' ') {
165 fprintf(f, "'%c'", insn->val.c);
166 } else if (insn->val.c == '\t') {
167 fprintf(f, "'\\t'");
168 } else if (insn->val.c == '\r') {
169 fprintf(f, "'\\r'");
170 } else if (insn->val.c == '\n') {
171 fprintf(f, "'\\n'");
172 } else if (insn->val.c == '\0') {
173 fprintf(f, "'\\0'");
174 } else {
175 fprintf(f, "%u", insn->val.c);
176 }
177 break;
178 case IR_I8:
179 fprintf(f, "%d", insn->val.i8);
180 break;
181 case IR_I16:
182 fprintf(f, "%d", insn->val.i16);
183 break;
184 case IR_I32:
185 fprintf(f, "%d", insn->val.i32);
186 break;
187 case IR_I64:
188 fprintf(f, "%" PRIi64, insn->val.i64);
189 break;
190 case IR_DOUBLE:
191 if (isnan(insn->val.d)) {
192 fprintf(f, "nan");
193 } else {
194 snprintf(buf, sizeof(buf), "%g", insn->val.d);
195 if (strtod(buf, NULL) != insn->val.d) {
196 snprintf(buf, sizeof(buf), "%.53e", insn->val.d);
197 if (strtod(buf, NULL) != insn->val.d) {
198 IR_ASSERT(0 && "can't format double");
199 }
200 }
201 fprintf(f, "%s", buf);
202 }
203 break;
204 case IR_FLOAT:
205 if (isnan(insn->val.f)) {
206 fprintf(f, "nan");
207 } else {
208 snprintf(buf, sizeof(buf), "%g", insn->val.f);
209 if (strtod(buf, NULL) != insn->val.f) {
210 snprintf(buf, sizeof(buf), "%.24e", insn->val.f);
211 if (strtod(buf, NULL) != insn->val.f) {
212 IR_ASSERT(0 && "can't format float");
213 }
214 }
215 fprintf(f, "%s", buf);
216 }
217 break;
218 default:
219 IR_ASSERT(0);
220 break;
221 }
222}
223
224#define ir_op_flag_v 0
225#define ir_op_flag_v0X3 (0 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
226#define ir_op_flag_d IR_OP_FLAG_DATA
227#define ir_op_flag_d0 ir_op_flag_d
228#define ir_op_flag_d1 (ir_op_flag_d | 1 | (1 << IR_OP_FLAG_OPERANDS_SHIFT))
229#define ir_op_flag_d1X1 (ir_op_flag_d | 1 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
230#define ir_op_flag_d2 (ir_op_flag_d | 2 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
231#define ir_op_flag_d2C (ir_op_flag_d | IR_OP_FLAG_COMMUTATIVE | 2 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
232#define ir_op_flag_d3 (ir_op_flag_d | 3 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
233#define ir_op_flag_r IR_OP_FLAG_DATA // "d" and "r" are the same now
234#define ir_op_flag_r0 ir_op_flag_r
235#define ir_op_flag_p (IR_OP_FLAG_DATA | IR_OP_FLAG_PINNED)
236#define ir_op_flag_p1 (ir_op_flag_p | 1 | (1 << IR_OP_FLAG_OPERANDS_SHIFT))
237#define ir_op_flag_p1X1 (ir_op_flag_p | 1 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
238#define ir_op_flag_p1X2 (ir_op_flag_p | 1 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
239#define ir_op_flag_p2 (ir_op_flag_p | 2 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
240#define ir_op_flag_pN (ir_op_flag_p | IR_OP_FLAG_VAR_INPUTS)
241#define ir_op_flag_c IR_OP_FLAG_CONTROL
242#define ir_op_flag_c1X2 (ir_op_flag_c | 1 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
243#define ir_op_flag_c3 (ir_op_flag_c | 3 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
244#define ir_op_flag_S (IR_OP_FLAG_CONTROL|IR_OP_FLAG_BB_START)
245#define ir_op_flag_S0X1 (ir_op_flag_S | 0 | (1 << IR_OP_FLAG_OPERANDS_SHIFT))
246#define ir_op_flag_S1 (ir_op_flag_S | 1 | (1 << IR_OP_FLAG_OPERANDS_SHIFT))
247#define ir_op_flag_S1X1 (ir_op_flag_S | 1 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
248#define ir_op_flag_S2 (ir_op_flag_S | 2 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
249#define ir_op_flag_S2X1 (ir_op_flag_S | 2 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
250#define ir_op_flag_SN (ir_op_flag_S | IR_OP_FLAG_VAR_INPUTS)
251#define ir_op_flag_E (IR_OP_FLAG_CONTROL|IR_OP_FLAG_BB_END)
252#define ir_op_flag_E1 (ir_op_flag_E | 1 | (1 << IR_OP_FLAG_OPERANDS_SHIFT))
253#define ir_op_flag_E2 (ir_op_flag_E | 2 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
254#define ir_op_flag_T (IR_OP_FLAG_CONTROL|IR_OP_FLAG_BB_END|IR_OP_FLAG_TERMINATOR)
255#define ir_op_flag_T2X1 (ir_op_flag_T | 2 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
256#define ir_op_flag_T1X2 (ir_op_flag_T | 1 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
257#define ir_op_flag_l (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_LOAD)
258#define ir_op_flag_l0 ir_op_flag_l
259#define ir_op_flag_l1 (ir_op_flag_l | 1 | (1 << IR_OP_FLAG_OPERANDS_SHIFT))
260#define ir_op_flag_l1X1 (ir_op_flag_l | 1 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
261#define ir_op_flag_l1X2 (ir_op_flag_l | 1 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
262#define ir_op_flag_l2 (ir_op_flag_l | 2 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
263#define ir_op_flag_l3 (ir_op_flag_l | 3 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
264#define ir_op_flag_s (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_STORE)
265#define ir_op_flag_s0 ir_op_flag_s
266#define ir_op_flag_s1 (ir_op_flag_s | 1 | (1 << IR_OP_FLAG_OPERANDS_SHIFT))
267#define ir_op_flag_s2 (ir_op_flag_s | 2 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
268#define ir_op_flag_s2X1 (ir_op_flag_s | 2 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
269#define ir_op_flag_s3 (ir_op_flag_s | 3 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
270#define ir_op_flag_x1 (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_CALL | 1 | (1 << IR_OP_FLAG_OPERANDS_SHIFT))
271#define ir_op_flag_x2 (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_CALL | 2 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
272#define ir_op_flag_x3 (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_CALL | 3 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
273#define ir_op_flag_xN (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_CALL | IR_OP_FLAG_VAR_INPUTS)
274#define ir_op_flag_a1 (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_ALLOC | 1 | (1 << IR_OP_FLAG_OPERANDS_SHIFT))
275#define ir_op_flag_a2 (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_ALLOC | 2 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
276
277#define ir_op_kind____ IR_OPND_UNUSED
278#define ir_op_kind_def IR_OPND_DATA
279#define ir_op_kind_ref IR_OPND_DATA
280#define ir_op_kind_src IR_OPND_CONTROL
281#define ir_op_kind_reg IR_OPND_CONTROL_DEP
282#define ir_op_kind_ret IR_OPND_CONTROL_REF
283#define ir_op_kind_str IR_OPND_STR
284#define ir_op_kind_num IR_OPND_NUM
285#define ir_op_kind_fld IR_OPND_STR
286#define ir_op_kind_var IR_OPND_DATA
287#define ir_op_kind_prb IR_OPND_PROB
288#define ir_op_kind_opt IR_OPND_PROB
289#define ir_op_kind_pro IR_OPND_PROTO
290
291#define _IR_OP_FLAGS(name, flags, op1, op2, op3) \
292 IR_OP_FLAGS(ir_op_flag_ ## flags, ir_op_kind_ ## op1, ir_op_kind_ ## op2, ir_op_kind_ ## op3),
293
294const uint32_t ir_op_flags[IR_LAST_OP] = {
296#ifdef IR_PHP
298#endif
299};
300
301static void ir_grow_bottom(ir_ctx *ctx)
302{
303 ir_insn *buf = ctx->ir_base - ctx->consts_limit;
304 ir_ref old_consts_limit = ctx->consts_limit;
305
306 if (ctx->consts_limit < 1024 * 4) {
307 ctx->consts_limit *= 2;
308 } else if (ctx->consts_limit < 1024 * 4 * 2) {
309 ctx->consts_limit = 1024 * 4 * 2;
310 } else {
311 ctx->consts_limit += 1024 * 4;
312 }
313 buf = ir_mem_realloc(buf, (ctx->consts_limit + ctx->insns_limit) * sizeof(ir_insn));
314 memmove(buf + (ctx->consts_limit - old_consts_limit),
315 buf,
316 (old_consts_limit + ctx->insns_count) * sizeof(ir_insn));
317 ctx->ir_base = buf + ctx->consts_limit;
318}
319
320static ir_ref ir_next_const(ir_ctx *ctx)
321{
322 ir_ref ref = ctx->consts_count;
323
324 if (UNEXPECTED(ref >= ctx->consts_limit)) {
325 ir_grow_bottom(ctx);
326 }
327 ctx->consts_count = ref + 1;
328 return -ref;
329}
330
331static void ir_grow_top(ir_ctx *ctx)
332{
333 ir_ref old_insns_limit = ctx->insns_limit;
334 ir_insn *buf = ctx->ir_base - ctx->consts_limit;
335
336 if (ctx->insns_limit < 1024 * 4) {
337 ctx->insns_limit *= 2;
338 } else if (ctx->insns_limit < 1024 * 4 * 2) {
339 ctx->insns_limit = 1024 * 4 * 2;
340 } else {
341 ctx->insns_limit += 1024 * 4;
342 }
343 buf = ir_mem_realloc(buf, (ctx->consts_limit + ctx->insns_limit) * sizeof(ir_insn));
344 ctx->ir_base = buf + ctx->consts_limit;
345
346 if (ctx->use_lists) {
347 ctx->use_lists = ir_mem_realloc(ctx->use_lists, ctx->insns_limit * sizeof(ir_use_list));
348 memset(ctx->use_lists + old_insns_limit, 0,
349 (ctx->insns_limit - old_insns_limit) * sizeof(ir_use_list));
350 }
351
352 if (ctx->cfg_map) {
353 ctx->cfg_map = ir_mem_realloc(ctx->cfg_map, ctx->insns_limit * sizeof(uint32_t));
354 memset(ctx->cfg_map + old_insns_limit, 0,
355 (ctx->insns_limit - old_insns_limit) * sizeof(uint32_t));
356 }
357}
358
359static ir_ref ir_next_insn(ir_ctx *ctx)
360{
361 ir_ref ref = ctx->insns_count;
362
363 if (UNEXPECTED(ref >= ctx->insns_limit)) {
364 ir_grow_top(ctx);
365 }
366 ctx->insns_count = ref + 1;
367 return ref;
368}
369
371{
372 ir_insn *buf = ir_mem_malloc((ctx->consts_count + ctx->insns_count) * sizeof(ir_insn));
373
374 memcpy(buf, ctx->ir_base - ctx->consts_count, (ctx->consts_count + ctx->insns_count) * sizeof(ir_insn));
375 ir_mem_free(ctx->ir_base - ctx->consts_limit);
376 ctx->insns_limit = ctx->insns_count;
377 ctx->consts_limit = ctx->consts_count;
378 ctx->ir_base = buf + ctx->consts_limit;
379}
380
381void ir_init(ir_ctx *ctx, uint32_t flags, ir_ref consts_limit, ir_ref insns_limit)
382{
383 ir_insn *buf;
384
385 IR_ASSERT(consts_limit >= IR_CONSTS_LIMIT_MIN);
386 IR_ASSERT(insns_limit >= IR_INSNS_LIMIT_MIN);
387
388 memset(ctx, 0, sizeof(ir_ctx));
389
390 ctx->insns_count = IR_UNUSED + 1;
391 ctx->insns_limit = insns_limit;
392 ctx->consts_count = -(IR_TRUE - 1);
393 ctx->consts_limit = consts_limit;
394 ctx->fold_cse_limit = IR_UNUSED + 1;
395 ctx->flags = flags;
396
397 ctx->spill_base = -1;
398 ctx->fixed_stack_frame_size = -1;
399
400 buf = ir_mem_malloc((consts_limit + insns_limit) * sizeof(ir_insn));
401 ctx->ir_base = buf + consts_limit;
402
403 MAKE_NOP(&ctx->ir_base[IR_UNUSED]);
404 ctx->ir_base[IR_NULL].optx = IR_OPT(IR_C_ADDR, IR_ADDR);
405 ctx->ir_base[IR_NULL].val.u64 = 0;
406 ctx->ir_base[IR_FALSE].optx = IR_OPT(IR_C_BOOL, IR_BOOL);
407 ctx->ir_base[IR_FALSE].val.u64 = 0;
408 ctx->ir_base[IR_TRUE].optx = IR_OPT(IR_C_BOOL, IR_BOOL);
409 ctx->ir_base[IR_TRUE].val.u64 = 1;
410}
411
412void ir_free(ir_ctx *ctx)
413{
414 ir_insn *buf = ctx->ir_base - ctx->consts_limit;
416 if (ctx->strtab.data) {
417 ir_strtab_free(&ctx->strtab);
418 }
419 if (ctx->binding) {
421 ir_mem_free(ctx->binding);
422 }
423 if (ctx->use_lists) {
425 }
426 if (ctx->use_edges) {
428 }
429 if (ctx->cfg_blocks) {
431 }
432 if (ctx->cfg_edges) {
434 }
435 if (ctx->cfg_map) {
436 ir_mem_free(ctx->cfg_map);
437 }
438 if (ctx->cfg_schedule) {
440 }
441 if (ctx->rules) {
442 ir_mem_free(ctx->rules);
443 }
444 if (ctx->vregs) {
445 ir_mem_free(ctx->vregs);
446 }
447 if (ctx->live_intervals) {
449 }
450 if (ctx->arena) {
451 ir_arena_free(ctx->arena);
452 }
453 if (ctx->regs) {
454 ir_mem_free(ctx->regs);
455 if (ctx->fused_regs) {
458 }
459 }
460 if (ctx->prev_ref) {
461 ir_mem_free(ctx->prev_ref);
462 }
463 if (ctx->entries) {
464 ir_mem_free(ctx->entries);
465 }
466 if (ctx->osr_entry_loads) {
469 }
470}
471
473{
474 ir_ref ref = ir_next_const(ctx);
475 ir_insn *insn = &ctx->ir_base[ref];
476
477 insn->optx = IR_OPT(IR_ADDR, IR_ADDR);
478 insn->val.u64 = addr;
479 /* don't insert into constants chain */
480 insn->prev_const = IR_UNUSED;
481#if 0
482 insn->prev_const = ctx->prev_const_chain[IR_ADDR];
483 ctx->prev_const_chain[IR_ADDR] = ref;
484#endif
485#if 0
486 ir_insn *prev_insn, *next_insn;
487 ir_ref next;
488
489 prev_insn = NULL;
490 next = ctx->prev_const_chain[IR_ADDR];
491 while (next) {
492 next_insn = &ctx->ir_base[next];
493 if (UNEXPECTED(next_insn->val.u64 >= addr)) {
494 break;
495 }
496 prev_insn = next_insn;
497 next = next_insn->prev_const;
498 }
499
500 if (prev_insn) {
501 insn->prev_const = prev_insn->prev_const;
502 prev_insn->prev_const = ref;
503 } else {
504 insn->prev_const = ctx->prev_const_chain[IR_ADDR];
505 ctx->prev_const_chain[IR_ADDR] = ref;
506 }
507#endif
508
509 return ref;
510}
511
512ir_ref ir_const_ex(ir_ctx *ctx, ir_val val, uint8_t type, uint32_t optx)
513{
514 ir_insn *insn, *prev_insn;
515 ir_ref ref, prev;
516
517 if (type == IR_BOOL) {
518 return val.u64 ? IR_TRUE : IR_FALSE;
519 } else if (type == IR_ADDR && val.u64 == 0) {
520 return IR_NULL;
521 }
522 prev_insn = NULL;
523 ref = ctx->prev_const_chain[type];
524 while (ref) {
525 insn = &ctx->ir_base[ref];
526 if (UNEXPECTED(insn->val.u64 >= val.u64)) {
527 if (insn->val.u64 == val.u64) {
528 if (insn->optx == optx) {
529 return ref;
530 }
531 } else {
532 break;
533 }
534 }
535 prev_insn = insn;
536 ref = insn->prev_const;
537 }
538
539 if (prev_insn) {
540 prev = prev_insn->prev_const;
541 prev_insn->prev_const = -ctx->consts_count;
542 } else {
543 prev = ctx->prev_const_chain[type];
544 ctx->prev_const_chain[type] = -ctx->consts_count;
545 }
546
547 ref = ir_next_const(ctx);
548 insn = &ctx->ir_base[ref];
549 insn->prev_const = prev;
550
551 insn->optx = optx;
552 insn->val.u64 = val.u64;
553
554 return ref;
555}
556
558{
559 return ir_const_ex(ctx, val, type, IR_OPT(type, type));
560}
561
562ir_ref ir_const_i8(ir_ctx *ctx, int8_t c)
563{
564 ir_val val;
565 val.i64 = c;
566 return ir_const(ctx, val, IR_I8);
567}
568
569ir_ref ir_const_i16(ir_ctx *ctx, int16_t c)
570{
571 ir_val val;
572 val.i64 = c;
573 return ir_const(ctx, val, IR_I16);
574}
575
576ir_ref ir_const_i32(ir_ctx *ctx, int32_t c)
577{
578 ir_val val;
579 val.i64 = c;
580 return ir_const(ctx, val, IR_I32);
581}
582
583ir_ref ir_const_i64(ir_ctx *ctx, int64_t c)
584{
585 ir_val val;
586 val.i64 = c;
587 return ir_const(ctx, val, IR_I64);
588}
589
590ir_ref ir_const_u8(ir_ctx *ctx, uint8_t c)
591{
592 ir_val val;
593 val.u64 = c;
594 return ir_const(ctx, val, IR_U8);
595}
596
597ir_ref ir_const_u16(ir_ctx *ctx, uint16_t c)
598{
599 ir_val val;
600 val.u64 = c;
601 return ir_const(ctx, val, IR_U16);
602}
603
604ir_ref ir_const_u32(ir_ctx *ctx, uint32_t c)
605{
606 ir_val val;
607 val.u64 = c;
608 return ir_const(ctx, val, IR_U32);
609}
610
611ir_ref ir_const_u64(ir_ctx *ctx, uint64_t c)
612{
613 ir_val val;
614 val.u64 = c;
615 return ir_const(ctx, val, IR_U64);
616}
617
619{
620 return (c) ? IR_TRUE : IR_FALSE;
621}
622
624{
625 ir_val val;
626 val.i64 = c;
627 return ir_const(ctx, val, IR_CHAR);
628}
629
631{
632 ir_val val;
633 val.u32_hi = 0;
634 val.f = c;
635 return ir_const(ctx, val, IR_FLOAT);
636}
637
639{
640 ir_val val;
641 val.d = c;
642 return ir_const(ctx, val, IR_DOUBLE);
643}
644
645ir_ref ir_const_addr(ir_ctx *ctx, uintptr_t c)
646{
647 if (c == 0) {
648 return IR_NULL;
649 }
650 ir_val val;
651 val.u64 = c;
652 return ir_const(ctx, val, IR_ADDR);
653}
654
655ir_ref ir_const_func_addr(ir_ctx *ctx, uintptr_t c, ir_ref proto)
656{
657 if (c == 0) {
658 return IR_NULL;
659 }
660 ir_val val;
661 val.u64 = c;
662 IR_ASSERT(proto >= 0 && proto < 0xffff);
663 return ir_const_ex(ctx, val, IR_ADDR, IR_OPTX(IR_FUNC_ADDR, IR_ADDR, proto));
664}
665
667{
668 ir_val val;
669 val.u64 = str;
670 IR_ASSERT(proto >= 0 && proto < 0xffff);
671 return ir_const_ex(ctx, val, IR_ADDR, IR_OPTX(IR_FUNC, IR_ADDR, proto));
672}
673
675{
676 ir_val val;
677 val.u64 = str;
678 return ir_const_ex(ctx, val, IR_ADDR, IR_OPTX(IR_SYM, IR_ADDR, 0));
679}
680
682{
683 ir_val val;
684 val.u64 = str;
685 return ir_const_ex(ctx, val, IR_ADDR, IR_OPTX(IR_STR, IR_ADDR, 0));
686}
687
688ir_ref ir_str(ir_ctx *ctx, const char *s)
689{
690 size_t len;
691
692 if (!ctx->strtab.data) {
693 ir_strtab_init(&ctx->strtab, 64, 4096);
694 }
695 len = strlen(s);
696 IR_ASSERT(len <= 0xffffffff);
697 return ir_strtab_lookup(&ctx->strtab, s, (uint32_t)len, ir_strtab_count(&ctx->strtab) + 1);
698}
699
700ir_ref ir_strl(ir_ctx *ctx, const char *s, size_t len)
701{
702 if (!ctx->strtab.data) {
703 ir_strtab_init(&ctx->strtab, 64, 4096);
704 }
705 IR_ASSERT(len <= 0xffffffff);
706 return ir_strtab_lookup(&ctx->strtab, s, (uint32_t)len, ir_strtab_count(&ctx->strtab) + 1);
707}
708
709const char *ir_get_str(const ir_ctx *ctx, ir_ref idx)
710{
711 IR_ASSERT(ctx->strtab.data);
712 return ir_strtab_str(&ctx->strtab, idx - 1);
713}
714
715const char *ir_get_strl(const ir_ctx *ctx, ir_ref idx, size_t *len)
716{
717 IR_ASSERT(ctx->strtab.data);
718 return ir_strtab_strl(&ctx->strtab, idx - 1, len);
719}
720
721ir_ref ir_proto_0(ir_ctx *ctx, uint8_t flags, ir_type ret_type)
722{
723 ir_proto_t proto;
724
725 proto.flags = flags;
726 proto.ret_type = ret_type;
727 proto.params_count = 0;
728 return ir_strl(ctx, (const char *)&proto, offsetof(ir_proto_t, param_types) + 0);
729}
730
731ir_ref ir_proto_1(ir_ctx *ctx, uint8_t flags, ir_type ret_type, ir_type t1)
732{
733 ir_proto_t proto;
734
735 proto.flags = flags;
736 proto.ret_type = ret_type;
737 proto.params_count = 1;
738 proto.param_types[0] = t1;
739 return ir_strl(ctx, (const char *)&proto, offsetof(ir_proto_t, param_types) + 1);
740}
741
743{
744 ir_proto_t proto;
745
746 proto.flags = flags;
747 proto.ret_type = ret_type;
748 proto.params_count = 2;
749 proto.param_types[0] = t1;
750 proto.param_types[1] = t2;
751 return ir_strl(ctx, (const char *)&proto, offsetof(ir_proto_t, param_types) + 2);
752}
753
755{
756 ir_proto_t proto;
757
758 proto.flags = flags;
759 proto.ret_type = ret_type;
760 proto.params_count = 3;
761 proto.param_types[0] = t1;
762 proto.param_types[1] = t2;
763 proto.param_types[2] = t3;
764 return ir_strl(ctx, (const char *)&proto, offsetof(ir_proto_t, param_types) + 3);
765}
766
768 ir_type t4)
769{
770 ir_proto_t proto;
771
772 proto.flags = flags;
773 proto.ret_type = ret_type;
774 proto.params_count = 4;
775 proto.param_types[0] = t1;
776 proto.param_types[1] = t2;
777 proto.param_types[2] = t3;
778 proto.param_types[3] = t4;
779 return ir_strl(ctx, (const char *)&proto, offsetof(ir_proto_t, param_types) + 4);
780}
781
783 ir_type t4, ir_type t5)
784{
785 ir_proto_t proto;
786
787 proto.flags = flags;
788 proto.ret_type = ret_type;
789 proto.params_count = 5;
790 proto.param_types[0] = t1;
791 proto.param_types[1] = t2;
792 proto.param_types[2] = t3;
793 proto.param_types[3] = t4;
794 proto.param_types[4] = t5;
795 return ir_strl(ctx, (const char *)&proto, offsetof(ir_proto_t, param_types) + 5);
796}
797
798ir_ref ir_proto(ir_ctx *ctx, uint8_t flags, ir_type ret_type, uint32_t params_count, uint8_t *param_types)
799{
800 ir_proto_t *proto = alloca(offsetof(ir_proto_t, param_types) + params_count);
801
802 IR_ASSERT(params_count <= IR_MAX_PROTO_PARAMS);
803 proto->flags = flags;
804 proto->ret_type = ret_type;
805 proto->params_count = params_count;
806 memcpy(proto->param_types, param_types, params_count);
807 return ir_strl(ctx, (const char *)proto, offsetof(ir_proto_t, param_types) + params_count);
808}
809
810/* IR construction */
811ir_ref ir_emit(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3)
812{
813 ir_ref ref = ir_next_insn(ctx);
814 ir_insn *insn = &ctx->ir_base[ref];
815
816 insn->optx = opt;
817 insn->op1 = op1;
818 insn->op2 = op2;
819 insn->op3 = op3;
820
821 return ref;
822}
823
824ir_ref ir_emit0(ir_ctx *ctx, uint32_t opt)
825{
826 return ir_emit(ctx, opt, IR_UNUSED, IR_UNUSED, IR_UNUSED);
827}
828
829ir_ref ir_emit1(ir_ctx *ctx, uint32_t opt, ir_ref op1)
830{
831 return ir_emit(ctx, opt, op1, IR_UNUSED, IR_UNUSED);
832}
833
834ir_ref ir_emit2(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2)
835{
836 return ir_emit(ctx, opt, op1, op2, IR_UNUSED);
837}
838
839ir_ref ir_emit3(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3)
840{
841 return ir_emit(ctx, opt, op1, op2, op3);
842}
843
844static ir_ref _ir_fold_cse(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3)
845{
846 ir_ref ref = ctx->prev_insn_chain[opt & IR_OPT_OP_MASK];
847 ir_insn *insn;
848
849 if (ref) {
850 ir_ref limit = ctx->fold_cse_limit;
851
852 if (op1 > limit) {
853 limit = op1;
854 }
855 if (op2 > limit) {
856 limit = op2;
857 }
858 if (op3 > limit) {
859 limit = op3;
860 }
861 while (ref >= limit) {
862 insn = &ctx->ir_base[ref];
863 if (insn->opt == opt && insn->op1 == op1 && insn->op2 == op2 && insn->op3 == op3) {
864 return ref;
865 }
866 if (!insn->prev_insn_offset) {
867 break;
868 }
869 ref = ref - (ir_ref)(uint32_t)insn->prev_insn_offset;
870 }
871 }
872
873 return IR_UNUSED;
874}
875
876#define IR_FOLD(X) IR_FOLD1(X, __LINE__)
877#define IR_FOLD1(X, Y) IR_FOLD2(X, Y)
878#define IR_FOLD2(X, Y) case IR_RULE_ ## Y:
879
880#define IR_FOLD_ERROR(msg) do { \
881 IR_ASSERT(0 && (msg)); \
882 goto ir_fold_emit; \
883 } while (0)
884
885#define IR_FOLD_CONST_U(_val) do { \
886 val.u64 = (_val); \
887 goto ir_fold_const; \
888 } while (0)
889
890#define IR_FOLD_CONST_I(_val) do { \
891 val.i64 = (_val); \
892 goto ir_fold_const; \
893 } while (0)
894
895#define IR_FOLD_CONST_D(_val) do { \
896 val.d = (_val); \
897 goto ir_fold_const; \
898 } while (0)
899
900#define IR_FOLD_CONST_F(_val) do { \
901 val.f = (_val); \
902 val.u32_hi = 0; \
903 goto ir_fold_const; \
904 } while (0)
905
906#define IR_FOLD_COPY(op) do { \
907 ref = (op); \
908 goto ir_fold_copy; \
909 } while (0)
910
911#define IR_FOLD_BOOL(cond) \
912 IR_FOLD_COPY((cond) ? IR_TRUE : IR_FALSE)
913
914#define IR_FOLD_NAMED(name) ir_fold_ ## name:
915#define IR_FOLD_DO_NAMED(name) goto ir_fold_ ## name
916#define IR_FOLD_RESTART goto ir_fold_restart
917#define IR_FOLD_CSE goto ir_fold_cse
918#define IR_FOLD_EMIT goto ir_fold_emit
919#define IR_FOLD_NEXT break
920
921#include "ir_fold_hash.h"
922
923#define IR_FOLD_RULE(x) ((x) >> 21)
924#define IR_FOLD_KEY(x) ((x) & 0x1fffff)
925
926/*
927 * key = insn->op | (insn->op1->op << 7) | (insn->op2->op << 14)
928 *
929 * ANY and UNUSED ops are represented by 0
930 */
931
932ir_ref ir_folding(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3, ir_insn *op1_insn, ir_insn *op2_insn, ir_insn *op3_insn)
933{
934 uint8_t op;
935 ir_ref ref;
936 ir_val val;
937 uint32_t key, any;
938 (void) op3_insn;
939
940restart:
941 key = (opt & IR_OPT_OP_MASK) + ((uint32_t)op1_insn->op << 7) + ((uint32_t)op2_insn->op << 14);
942 any = 0x1fffff;
943 do {
944 uint32_t k = key & any;
945 uint32_t h = _ir_fold_hashkey(k);
946 uint32_t fh = _ir_fold_hash[h];
947 if (IR_FOLD_KEY(fh) == k
948#ifdef IR_FOLD_SEMI_PERFECT_HASH
949 || (fh = _ir_fold_hash[h+1], (fh & 0x1fffff) == k)
950#endif
951 ) {
952 switch (IR_FOLD_RULE(fh)) {
953#include "ir_fold.h"
954 default:
955 break;
956 }
957 }
958 if (any == 0x7f) {
959 /* All parrerns are checked. Pass on to CSE. */
960 goto ir_fold_cse;
961 }
962 /* op2/op1/op op2/_/op _/op1/op _/_/op
963 * 0x1fffff -> 0x1fc07f -> 0x003fff -> 0x00007f
964 * from masks to bis: 11 -> 10 -> 01 -> 00
965 *
966 * a b => x y
967 * 1 1 1 0
968 * 1 0 0 1
969 * 0 1 0 0
970 *
971 * x = a & b; y = !b
972 */
973 any = ((any & (any << 7)) & 0x1fc000) | (~any & 0x3f80) | 0x7f;
974 } while (1);
975
976ir_fold_restart:
977 if (!ctx->use_lists) {
978 op1_insn = ctx->ir_base + op1;
979 op2_insn = ctx->ir_base + op2;
980 op3_insn = ctx->ir_base + op3;
981 goto restart;
982 } else {
983 ctx->fold_insn.optx = opt;
984 ctx->fold_insn.op1 = op1;
985 ctx->fold_insn.op2 = op2;
986 ctx->fold_insn.op3 = op3;
987 return IR_FOLD_DO_RESTART;
988 }
989ir_fold_cse:
990 if (!ctx->use_lists) {
991 /* Local CSE */
992 ref = _ir_fold_cse(ctx, opt, op1, op2, op3);
993 if (ref) {
994 return ref;
995 }
996
997 ref = ir_emit(ctx, opt, op1, op2, op3);
998
999 /* Update local CSE chain */
1000 op = opt & IR_OPT_OP_MASK;
1001 ir_ref prev = ctx->prev_insn_chain[op];
1002 ir_insn *insn = ctx->ir_base + ref;
1003 if (!prev || ref - prev > 0xffff) {
1004 /* can't fit into 16-bit */
1005 insn->prev_insn_offset = 0;
1006 } else {
1007 insn->prev_insn_offset = ref - prev;
1008 }
1009 ctx->prev_insn_chain[op] = ref;
1010
1011 return ref;
1012 } else {
1013 ctx->fold_insn.optx = opt;
1014 ctx->fold_insn.op1 = op1;
1015 ctx->fold_insn.op2 = op2;
1016 ctx->fold_insn.op3 = op3;
1017 return IR_FOLD_DO_CSE;
1018 }
1019ir_fold_emit:
1020 if (!ctx->use_lists) {
1021 return ir_emit(ctx, opt, op1, op2, op3);
1022 } else {
1023 ctx->fold_insn.optx = opt;
1024 ctx->fold_insn.op1 = op1;
1025 ctx->fold_insn.op2 = op2;
1026 ctx->fold_insn.op3 = op3;
1027 return IR_FOLD_DO_EMIT;
1028 }
1029ir_fold_copy:
1030 if (!ctx->use_lists) {
1031 return ref;
1032 } else {
1033 ctx->fold_insn.op1 = ref;
1034 return IR_FOLD_DO_COPY;
1035 }
1036ir_fold_const:
1037 if (!ctx->use_lists) {
1038 return ir_const(ctx, val, IR_OPT_TYPE(opt));
1039 } else {
1040 ctx->fold_insn.opt = IR_OPT(IR_OPT_TYPE(opt), IR_OPT_TYPE(opt));
1041 ctx->fold_insn.val.u64 = val.u64;
1042 return IR_FOLD_DO_CONST;
1043 }
1044}
1045
1046ir_ref ir_fold(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3)
1047{
1048 if (UNEXPECTED(!(ctx->flags & IR_OPT_FOLDING))) {
1049 if ((opt & IR_OPT_OP_MASK) == IR_PHI) {
1050 opt |= (3 << IR_OPT_INPUTS_SHIFT);
1051 }
1052 return ir_emit(ctx, opt, op1, op2, op3);
1053 }
1054 return ir_folding(ctx, opt, op1, op2, op3, ctx->ir_base + op1, ctx->ir_base + op2, ctx->ir_base + op3);
1055}
1056
1057ir_ref ir_fold0(ir_ctx *ctx, uint32_t opt)
1058{
1059 return ir_fold(ctx, opt, IR_UNUSED, IR_UNUSED, IR_UNUSED);
1060}
1061
1062ir_ref ir_fold1(ir_ctx *ctx, uint32_t opt, ir_ref op1)
1063{
1064 return ir_fold(ctx, opt, op1, IR_UNUSED, IR_UNUSED);
1065}
1066
1068{
1069 return ir_fold(ctx, opt, op1, op2, IR_UNUSED);
1070}
1071
1072ir_ref ir_fold3(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3)
1073{
1074 return ir_fold(ctx, opt, op1, op2, op3);
1075}
1076
1077ir_ref ir_emit_N(ir_ctx *ctx, uint32_t opt, int32_t count)
1078{
1079 int i;
1080 ir_ref *p, ref = ctx->insns_count;
1081 ir_insn *insn;
1082
1083 IR_ASSERT(count >= 0);
1084 while (UNEXPECTED(ref + count/4 >= ctx->insns_limit)) {
1085 ir_grow_top(ctx);
1086 }
1087 ctx->insns_count = ref + 1 + count/4;
1088
1089 insn = &ctx->ir_base[ref];
1090 insn->optx = opt | (count << IR_OPT_INPUTS_SHIFT);
1091 for (i = 1, p = insn->ops + i; i <= (count|3); i++, p++) {
1092 *p = IR_UNUSED;
1093 }
1094
1095 return ref;
1096}
1097
1098void ir_set_op(ir_ctx *ctx, ir_ref ref, int32_t n, ir_ref val)
1099{
1100 ir_insn *insn = &ctx->ir_base[ref];
1101
1102#ifdef IR_DEBUG
1103 if (n > 3) {
1104 int32_t count;
1105
1107 count = insn->inputs_count;
1108 IR_ASSERT(n <= count);
1109 }
1110#endif
1111 ir_insn_set_op(insn, n, val);
1112}
1113
1114ir_ref ir_get_op(ir_ctx *ctx, ir_ref ref, int32_t n)
1115{
1116 ir_insn *insn = &ctx->ir_base[ref];
1117
1118#ifdef IR_DEBUG
1119 if (n > 3) {
1120 int32_t count;
1121
1123 count = insn->inputs_count;
1124 IR_ASSERT(n <= count);
1125 }
1126#endif
1127 return ir_insn_op(insn, n);
1128}
1129
1130ir_ref ir_param(ir_ctx *ctx, ir_type type, ir_ref region, const char *name, int pos)
1131{
1132 IR_ASSERT(ctx->ir_base[region].op == IR_START);
1133 return ir_emit(ctx, IR_OPT(IR_PARAM, type), region, ir_str(ctx, name), pos);
1134}
1135
1136ir_ref ir_var(ir_ctx *ctx, ir_type type, ir_ref region, const char *name)
1137{
1138 IR_ASSERT(IR_IS_BB_START(ctx->ir_base[region].op));
1139 return ir_emit(ctx, IR_OPT(IR_VAR, type), region, ir_str(ctx, name), IR_UNUSED);
1140}
1141
1143{
1144 if (IR_IS_CONST_REF(def)) {
1145 return def;
1146 }
1147 if (!ctx->binding) {
1148 ctx->binding = ir_mem_malloc(sizeof(ir_hashtab));;
1149 ir_hashtab_init(ctx->binding, 16);
1150 }
1151 /* Node may be bound to some special spill slot (using negative "var") */
1152 IR_ASSERT(var < 0);
1153 if (!ir_hashtab_add(ctx->binding, def, var)) {
1154 /* Add a copy with different binding */
1155 def = ir_emit2(ctx, IR_OPT(IR_COPY, ctx->ir_base[def].type), def, 1);
1156 ir_hashtab_add(ctx->binding, def, var);
1157 }
1158 return def;
1159}
1160
1162{
1163 ir_ref var = ir_hashtab_find(ctx->binding, ref);
1164 return (var != (ir_ref)IR_INVALID_VAL) ? var : 0;
1165}
1166
1167/* Batch construction of def->use edges */
1168#if 0
1170{
1171 ir_ref n, i, j, *p, def;
1172 ir_insn *insn;
1173 uint32_t edges_count;
1174 ir_use_list *lists = ir_mem_calloc(ctx->insns_limit, sizeof(ir_use_list));
1175 ir_ref *edges;
1176 ir_use_list *use_list;
1177
1178 for (i = IR_UNUSED + 1, insn = ctx->ir_base + i; i < ctx->insns_count;) {
1179 uint32_t flags = ir_op_flags[insn->op];
1180
1182 n = insn->inputs_count;
1183 } else {
1184 n = insn->inputs_count = IR_INPUT_EDGES_COUNT(flags);
1185 }
1186 for (j = n, p = insn->ops + 1; j > 0; j--, p++) {
1187 def = *p;
1188 if (def > 0) {
1189 lists[def].count++;
1190 }
1191 }
1193 i += n;
1194 insn += n;
1195 }
1196
1197 edges_count = 0;
1198 for (i = IR_UNUSED + 1, use_list = &lists[i]; i < ctx->insns_count; i++, use_list++) {
1199 use_list->refs = edges_count;
1200 edges_count += use_list->count;
1201 use_list->count = 0;
1202 }
1203
1204 edges = ir_mem_malloc(IR_ALIGNED_SIZE(edges_count * sizeof(ir_ref), 4096));
1205 for (i = IR_UNUSED + 1, insn = ctx->ir_base + i; i < ctx->insns_count;) {
1206 n = insn->inputs_count;
1207 for (j = n, p = insn->ops + 1; j > 0; j--, p++) {
1208 def = *p;
1209 if (def > 0) {
1210 use_list = &lists[def];
1211 edges[use_list->refs + use_list->count++] = i;
1212 }
1213 }
1215 i += n;
1216 insn += n;
1217 }
1218
1219 ctx->use_edges = edges;
1220 ctx->use_edges_count = edges_count;
1221 ctx->use_lists = lists;
1222}
1223#else
1225{
1226 ir_ref n, i, j, *p, def;
1227 ir_insn *insn;
1228 size_t linked_lists_size, linked_lists_top = 0, edges_count = 0;
1229 ir_use_list *lists = ir_mem_calloc(ctx->insns_limit, sizeof(ir_use_list));
1230 ir_ref *edges;
1231 ir_use_list *use_list;
1232 ir_ref *linked_lists;
1233
1234 linked_lists_size = IR_ALIGNED_SIZE(ctx->insns_count, 1024);
1235 linked_lists = ir_mem_malloc(linked_lists_size * sizeof(ir_ref));
1236 for (i = IR_UNUSED + 1, insn = ctx->ir_base + i; i < ctx->insns_count;) {
1237 uint32_t flags = ir_op_flags[insn->op];
1238
1240 n = insn->inputs_count;
1241 } else {
1242 n = insn->inputs_count = IR_INPUT_EDGES_COUNT(flags);
1243 }
1244 for (j = n, p = insn->ops + 1; j > 0; j--, p++) {
1245 def = *p;
1246 if (def > 0) {
1247 use_list = &lists[def];
1248 edges_count++;
1249 if (!use_list->refs) {
1250 /* store a single "use" directly in "refs" using a positive number */
1251 use_list->refs = i;
1252 use_list->count = 1;
1253 } else {
1254 if (UNEXPECTED(linked_lists_top >= linked_lists_size)) {
1255 linked_lists_size += 1024;
1256 linked_lists = ir_mem_realloc(linked_lists, linked_lists_size * sizeof(ir_ref));
1257 }
1258 /* form a linked list of "uses" (like in binsort) */
1259 linked_lists[linked_lists_top] = i; /* store the "use" */
1260 linked_lists[linked_lists_top + 1] = use_list->refs; /* store list next */
1261 use_list->refs = -(linked_lists_top + 1); /* store a head of the list using a negative number */
1262 linked_lists_top += 2;
1263 use_list->count++;
1264 }
1265 }
1266 }
1268 i += n;
1269 insn += n;
1270 }
1271
1272 ctx->use_edges_count = edges_count;
1273 edges = ir_mem_malloc(IR_ALIGNED_SIZE(edges_count * sizeof(ir_ref), 4096));
1274 for (use_list = lists + ctx->insns_count - 1; use_list != lists; use_list--) {
1275 n = use_list->refs;
1276 if (n) {
1277 /* transform linked list to plain array */
1278 while (n < 0) {
1279 n = -n;
1280 edges[--edges_count] = linked_lists[n - 1];
1281 n = linked_lists[n];
1282 }
1283 IR_ASSERT(n > 0);
1284 edges[--edges_count] = n;
1285 use_list->refs = edges_count;
1286 }
1287 }
1288
1289 ctx->use_edges = edges;
1290 ctx->use_lists = lists;
1291 ir_mem_free(linked_lists);
1292}
1293#endif
1294
1296{
1297 ir_ref n, *p, *q, use;
1298 ir_use_list *use_list;
1299
1300 IR_ASSERT(from > 0);
1301 use_list = &ctx->use_lists[from];
1302 n = use_list->count;
1303 for (p = q = &ctx->use_edges[use_list->refs]; n > 0; p++, n--) {
1304 use = *p;
1305 if (use != ref) {
1306 if (p != q) {
1307 *q = use;
1308 }
1309 q++;
1310 }
1311 }
1312 if (p != q) {
1313 use_list->count -= (p - q);
1314 do {
1315 *q = IR_UNUSED;
1316 q++;
1317 } while (q != p);
1318 }
1319}
1320
1322{
1323 ir_ref n, *p;
1324 ir_use_list *use_list;
1325
1326 IR_ASSERT(from > 0);
1327 use_list = &ctx->use_lists[from];
1328 n = use_list->count;
1329 p = &ctx->use_edges[use_list->refs];
1330 while (n > 0) {
1331 if (*p == ref) {
1332 use_list->count--;
1333 n--;
1334 while (n > 0) {
1335 *p = *(p+1);
1336 p++;
1337 n--;
1338 }
1339 *p = IR_UNUSED;
1340 break;
1341 }
1342 p++;
1343 n--;
1344 }
1345}
1346
1348{
1349 ir_use_list *use_list;
1350 ir_ref n, *p;
1351
1352 IR_ASSERT(ref > 0);
1353 use_list = &ctx->use_lists[ref];
1354 n = use_list->count;
1355 for (p = &ctx->use_edges[use_list->refs]; n > 0; p++, n--) {
1356 if (*p == use) {
1357 *p = new_use;
1358 break;
1359 }
1360 }
1361}
1362
1364{
1365 ir_use_list *use_list;
1366 ir_ref n, *p;
1367
1368 IR_ASSERT(ref > 0);
1369 use_list = &ctx->use_lists[ref];
1370 n = use_list->count;
1371 for (p = &ctx->use_edges[use_list->refs]; n > 0; p++, n--) {
1372 if (*p == use) {
1373 *p = new_use;
1374 }
1375 }
1376}
1377
1379{
1380 ir_use_list *use_list;
1381 ir_ref n;
1382
1383 IR_ASSERT(to > 0);
1384 use_list = &ctx->use_lists[to];
1385 n = use_list->refs + use_list->count;
1386 if (n < ctx->use_edges_count && ctx->use_edges[n] == IR_UNUSED) {
1387 ctx->use_edges[n] = ref;
1388 use_list->count++;
1389 return 0;
1390 } else {
1391 size_t old_size = IR_ALIGNED_SIZE(ctx->use_edges_count * sizeof(ir_ref), 4096);
1392 size_t new_size = IR_ALIGNED_SIZE((ctx->use_edges_count + use_list->count + 1) * sizeof(ir_ref), 4096);
1393
1394 if (old_size < new_size) {
1395 /* Reallocate the whole edges buffer (this is inefficient) */
1396 ctx->use_edges = ir_mem_realloc(ctx->use_edges, new_size);
1397 } else if (n == ctx->use_edges_count) {
1398 ctx->use_edges[n] = ref;
1399 use_list->count++;
1400 ctx->use_edges_count++;
1401 return 0;
1402 }
1403 memcpy(ctx->use_edges + ctx->use_edges_count, ctx->use_edges + use_list->refs, use_list->count * sizeof(ir_ref));
1404 use_list->refs = ctx->use_edges_count;
1405 ctx->use_edges[use_list->refs + use_list->count] = ref;
1406 use_list->count++;
1407 ctx->use_edges_count += use_list->count;
1408 return 1;
1409 }
1410}
1411
1412static int ir_ref_cmp(const void *p1, const void *p2)
1413{
1414 return *(ir_ref*)p1 - *(ir_ref*)p2;
1415}
1416
1418{
1419 ir_use_list *use_list;
1420 uint32_t n;
1421
1422 IR_ASSERT(ref > 0);
1423 use_list = &ctx->use_lists[ref];
1424 n = use_list->count;
1425 if (n > 1) {
1426 qsort(ctx->use_edges + use_list->refs, n, sizeof(ir_ref), ir_ref_cmp);
1427 }
1428}
1429
1430void ir_replace(ir_ctx *ctx, ir_ref ref, ir_ref new_ref)
1431{
1432 int i, j, n, *p, use;
1433 ir_insn *insn;
1434 ir_use_list *use_list;
1435
1436 IR_ASSERT(ref != new_ref);
1437 use_list = &ctx->use_lists[ref];
1438 n = use_list->count;
1439 p = ctx->use_edges + use_list->refs;
1440
1441 if (new_ref <= 0) {
1442 /* constant or IR_UNUSED */
1443 for (; n; p++, n--) {
1444 use = *p;
1445 IR_ASSERT(use != ref);
1446 insn = &ctx->ir_base[use];
1447 j = ir_insn_find_op(insn, ref);
1448 IR_ASSERT(j > 0);
1449 ir_insn_set_op(insn, j, new_ref);
1450 }
1451 } else {
1452 for (i = 0; i < n; p++, i++) {
1453 use = *p;
1454 IR_ASSERT(use != ref);
1455 insn = &ctx->ir_base[use];
1456 j = ir_insn_find_op(insn, ref);
1457 IR_ASSERT(j > 0);
1458 ir_insn_set_op(insn, j, new_ref);
1459 if (ir_use_list_add(ctx, new_ref, use)) {
1460 /* restore after reallocation */
1461 use_list = &ctx->use_lists[ref];
1462 n = use_list->count;
1463 p = &ctx->use_edges[use_list->refs + i];
1464 }
1465 }
1466 }
1467}
1468
1469void ir_update_op(ir_ctx *ctx, ir_ref ref, uint32_t idx, ir_ref new_val)
1470{
1471 ir_insn *insn = &ctx->ir_base[ref];
1472 ir_ref old_val = ir_insn_op(insn, idx);
1473
1474 IR_ASSERT(old_val != new_val);
1475 if (new_val > 0) {
1476 ir_use_list_add(ctx, new_val, ref);
1477 }
1478 ir_insn_set_op(insn, idx, new_val);
1479 if (old_val > 0) {
1480 ir_use_list_remove_one(ctx, old_val, ref);
1481 }
1482}
1483
1484/* Helper Data Types */
1486{
1487 IR_ASSERT(size > a->size);
1488 a->refs = ir_mem_realloc(a->refs, size * sizeof(ir_ref));
1489 a->size = size;
1490}
1491
1493{
1494 IR_ASSERT(i < a->size);
1495 if (a->refs[a->size - 1]) {
1496 ir_array_grow(a, a->size + 1);
1497 }
1498 memmove(a->refs + i + 1, a->refs + i, (a->size - i - 1) * sizeof(ir_ref));
1499 a->refs[i] = val;
1500}
1501
1502void ir_array_remove(ir_array *a, uint32_t i)
1503{
1504 IR_ASSERT(i < a->size);
1505 memmove(a->refs + i, a->refs + i + 1, (a->size - i - 1) * sizeof(ir_ref));
1506 a->refs[a->size - 1] = IR_UNUSED;
1507}
1508
1509void ir_list_insert(ir_list *l, uint32_t i, ir_ref val)
1510{
1511 IR_ASSERT(i < l->len);
1512 if (l->len >= l->a.size) {
1513 ir_array_grow(&l->a, l->a.size + 1);
1514 }
1515 memmove(l->a.refs + i + 1, l->a.refs + i, (l->len - i) * sizeof(ir_ref));
1516 l->a.refs[i] = val;
1517 l->len++;
1518}
1519
1520void ir_list_remove(ir_list *l, uint32_t i)
1521{
1522 IR_ASSERT(i < l->len);
1523 memmove(l->a.refs + i, l->a.refs + i + 1, (l->len - i) * sizeof(ir_ref));
1524 l->len--;
1525}
1526
1527uint32_t ir_list_find(const ir_list *l, ir_ref val)
1528{
1529 uint32_t i;
1530
1531 for (i = 0; i < l->len; i++) {
1532 if (ir_array_at(&l->a, i) == val) {
1533 return i;
1534 }
1535 }
1536 return (uint32_t)-1;
1537}
1538
1539static uint32_t ir_hashtab_hash_size(uint32_t size)
1540{
1541 size -= 1;
1542 size |= (size >> 1);
1543 size |= (size >> 2);
1544 size |= (size >> 4);
1545 size |= (size >> 8);
1546 size |= (size >> 16);
1547 return IR_MAX(size + 1, 4);
1548}
1549
1550static void ir_hashtab_resize(ir_hashtab *tab)
1551{
1552 uint32_t old_hash_size = (uint32_t)(-(int32_t)tab->mask);
1553 char *old_data = tab->data;
1554 uint32_t size = tab->size * 2;
1555 uint32_t hash_size = ir_hashtab_hash_size(size);
1556 char *data = ir_mem_malloc(hash_size * sizeof(uint32_t) + size * sizeof(ir_hashtab_bucket));
1558 uint32_t pos, i;
1559
1560 memset(data, -1, hash_size * sizeof(uint32_t));
1561 tab->data = data + (hash_size * sizeof(uint32_t));
1562 tab->mask = (uint32_t)(-(int32_t)hash_size);
1563 tab->size = size;
1564
1565 memcpy(tab->data, old_data, tab->count * sizeof(ir_hashtab_bucket));
1566 ir_mem_free(old_data - (old_hash_size * sizeof(uint32_t)));
1567
1568 i = tab->count;
1569 pos = 0;
1570 p = (ir_hashtab_bucket*)tab->data;
1571 do {
1572 uint32_t key = p->key | tab->mask;
1573 p->next = ((uint32_t*)tab->data)[(int32_t)key];
1574 ((uint32_t*)tab->data)[(int32_t)key] = pos;
1575 pos += sizeof(ir_hashtab_bucket);
1576 p++;
1577 } while (--i);
1578}
1579
1580void ir_hashtab_init(ir_hashtab *tab, uint32_t size)
1581{
1582 IR_ASSERT(size > 0);
1583 uint32_t hash_size = ir_hashtab_hash_size(size);
1584 char *data = ir_mem_malloc(hash_size * sizeof(uint32_t) + size * sizeof(ir_hashtab_bucket));
1585 memset(data, -1, hash_size * sizeof(uint32_t));
1586 tab->data = (data + (hash_size * sizeof(uint32_t)));
1587 tab->mask = (uint32_t)(-(int32_t)hash_size);
1588 tab->size = size;
1589 tab->count = 0;
1590 tab->pos = 0;
1591}
1592
1594{
1595 uint32_t hash_size = (uint32_t)(-(int32_t)tab->mask);
1596 char *data = (char*)tab->data - (hash_size * sizeof(uint32_t));
1598 tab->data = NULL;
1599}
1600
1602{
1603 const char *data = (const char*)tab->data;
1604 uint32_t pos = ((uint32_t*)data)[(int32_t)(key | tab->mask)];
1606
1607 while (pos != IR_INVALID_IDX) {
1608 p = (ir_hashtab_bucket*)(data + pos);
1609 if (p->key == key) {
1610 return p->val;
1611 }
1612 pos = p->next;
1613 }
1614 return IR_INVALID_VAL;
1615}
1616
1618{
1619 char *data = (char*)tab->data;
1620 uint32_t pos = ((uint32_t*)data)[(int32_t)(key | tab->mask)];
1622
1623 while (pos != IR_INVALID_IDX) {
1624 p = (ir_hashtab_bucket*)(data + pos);
1625 if (p->key == key) {
1626 return p->val == val;
1627 }
1628 pos = p->next;
1629 }
1630
1631 if (UNEXPECTED(tab->count >= tab->size)) {
1632 ir_hashtab_resize(tab);
1633 data = tab->data;
1634 }
1635
1636 pos = tab->pos;
1637 tab->pos += sizeof(ir_hashtab_bucket);
1638 tab->count++;
1639 p = (ir_hashtab_bucket*)(data + pos);
1640 p->key = key;
1641 p->val = val;
1642 key |= tab->mask;
1643 p->next = ((uint32_t*)data)[(int32_t)key];
1644 ((uint32_t*)data)[(int32_t)key] = pos;
1645 return 1;
1646}
1647
1648static int ir_hashtab_key_cmp(const void *b1, const void *b2)
1649{
1650 return ((ir_hashtab_bucket*)b1)->key - ((ir_hashtab_bucket*)b2)->key;
1651}
1652
1654{
1656 uint32_t hash_size, pos, i;
1657
1658 if (!tab->count) {
1659 return;
1660 }
1661
1662 qsort(tab->data, tab->count, sizeof(ir_hashtab_bucket), ir_hashtab_key_cmp);
1663
1664 hash_size = ir_hashtab_hash_size(tab->size);
1665 memset((char*)tab->data - (hash_size * sizeof(uint32_t)), -1, hash_size * sizeof(uint32_t));
1666
1667 i = tab->count;
1668 pos = 0;
1669 p = (ir_hashtab_bucket*)tab->data;
1670 do {
1671 uint32_t key = p->key | tab->mask;
1672 p->next = ((uint32_t*)tab->data)[(int32_t)key];
1673 ((uint32_t*)tab->data)[(int32_t)key] = pos;
1674 pos += sizeof(ir_hashtab_bucket);
1675 p++;
1676 } while (--i);
1677}
1678
1679static void ir_addrtab_resize(ir_hashtab *tab)
1680{
1681 uint32_t old_hash_size = (uint32_t)(-(int32_t)tab->mask);
1682 char *old_data = tab->data;
1683 uint32_t size = tab->size * 2;
1684 uint32_t hash_size = ir_hashtab_hash_size(size);
1685 char *data = ir_mem_malloc(hash_size * sizeof(uint32_t) + size * sizeof(ir_addrtab_bucket));
1687 uint32_t pos, i;
1688
1689 memset(data, -1, hash_size * sizeof(uint32_t));
1690 tab->data = data + (hash_size * sizeof(uint32_t));
1691 tab->mask = (uint32_t)(-(int32_t)hash_size);
1692 tab->size = size;
1693
1694 memcpy(tab->data, old_data, tab->count * sizeof(ir_addrtab_bucket));
1695 ir_mem_free(old_data - (old_hash_size * sizeof(uint32_t)));
1696
1697 i = tab->count;
1698 pos = 0;
1699 p = (ir_addrtab_bucket*)tab->data;
1700 do {
1701 uint32_t key = (uint32_t)p->key | tab->mask;
1702 p->next = ((uint32_t*)tab->data)[(int32_t)key];
1703 ((uint32_t*)tab->data)[(int32_t)key] = pos;
1704 pos += sizeof(ir_addrtab_bucket);
1705 p++;
1706 } while (--i);
1707}
1708
1709void ir_addrtab_init(ir_hashtab *tab, uint32_t size)
1710{
1711 IR_ASSERT(size > 0);
1712 uint32_t hash_size = ir_hashtab_hash_size(size);
1713 char *data = ir_mem_malloc(hash_size * sizeof(uint32_t) + size * sizeof(ir_addrtab_bucket));
1714 memset(data, -1, hash_size * sizeof(uint32_t));
1715 tab->data = (data + (hash_size * sizeof(uint32_t)));
1716 tab->mask = (uint32_t)(-(int32_t)hash_size);
1717 tab->size = size;
1718 tab->count = 0;
1719 tab->pos = 0;
1720}
1721
1723{
1724 uint32_t hash_size = (uint32_t)(-(int32_t)tab->mask);
1725 char *data = (char*)tab->data - (hash_size * sizeof(uint32_t));
1727 tab->data = NULL;
1728}
1729
1731{
1732 const char *data = (const char*)tab->data;
1733 uint32_t pos = ((uint32_t*)data)[(int32_t)(key | tab->mask)];
1735
1736 while (pos != IR_INVALID_IDX) {
1737 p = (ir_addrtab_bucket*)(data + pos);
1738 if (p->key == key) {
1739 return p->val;
1740 }
1741 pos = p->next;
1742 }
1743 return IR_INVALID_VAL;
1744}
1745
1747{
1748 char *data = (char*)tab->data;
1749 uint32_t pos = ((uint32_t*)data)[(int32_t)(key | tab->mask)];
1751
1752 while (pos != IR_INVALID_IDX) {
1753 p = (ir_addrtab_bucket*)(data + pos);
1754 if (p->key == key) {
1755 p->val = val;
1756 return;
1757 }
1758 pos = p->next;
1759 }
1760
1761 if (UNEXPECTED(tab->count >= tab->size)) {
1762 ir_addrtab_resize(tab);
1763 data = tab->data;
1764 }
1765
1766 pos = tab->pos;
1767 tab->pos += sizeof(ir_addrtab_bucket);
1768 tab->count++;
1769 p = (ir_addrtab_bucket*)(data + pos);
1770 p->key = key;
1771 p->val = val;
1772 key |= tab->mask;
1773 p->next = ((uint32_t*)data)[(int32_t)key];
1774 ((uint32_t*)data)[(int32_t)key] = pos;
1775}
1776
1777/* Memory API */
1778#ifdef _WIN32
1779void *ir_mem_mmap(size_t size)
1780{
1781 void *ret;
1782
1783#ifdef _M_X64
1784 DWORD size_hi = size >> 32, size_lo = size & 0xffffffff;
1785#else
1786 DWORD size_hi = 0, size_lo = size;
1787#endif
1788
1789 HANDLE h = CreateFileMapping(INVALID_HANDLE_VALUE, NULL, PAGE_EXECUTE_READWRITE, size_hi, size_lo, NULL);
1790
1791 ret = MapViewOfFile(h, FILE_MAP_READ | FILE_MAP_WRITE | FILE_MAP_EXECUTE, 0, 0, size);
1792 if (!ret) {
1793 CloseHandle(h);
1794 }
1795
1796 return ret;
1797}
1798
1799int ir_mem_unmap(void *ptr, size_t size)
1800{
1801 /* XXX file handle is leaked. */
1802 UnmapViewOfFile(ptr);
1803 return 1;
1804}
1805
1806int ir_mem_protect(void *ptr, size_t size)
1807{
1808 return 1;
1809}
1810
1811int ir_mem_unprotect(void *ptr, size_t size)
1812{
1813 return 1;
1814}
1815
1816int ir_mem_flush(void *ptr, size_t size)
1817{
1818 return 1;
1819}
1820#else
1821void *ir_mem_mmap(size_t size)
1822{
1823 int prot_flags = PROT_EXEC;
1824#if defined(__NetBSD__)
1825 prot_flags |= PROT_MPROTECT(PROT_READ|PROT_WRITE);
1826#endif
1827 void *ret = mmap(NULL, size, prot_flags, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
1828 if (ret == MAP_FAILED) {
1829 ret = NULL;
1830 }
1831 return ret;
1832}
1833
1834int ir_mem_unmap(void *ptr, size_t size)
1835{
1836 munmap(ptr, size);
1837 return 1;
1838}
1839
1840int ir_mem_protect(void *ptr, size_t size)
1841{
1842 if (mprotect(ptr, size, PROT_READ | PROT_EXEC) != 0) {
1843#ifdef IR_DEBUG
1844 fprintf(stderr, "mprotect() failed\n");
1845#endif
1846 return 0;
1847 }
1848 return 1;
1849}
1850
1851int ir_mem_unprotect(void *ptr, size_t size)
1852{
1853 if (mprotect(ptr, size, PROT_READ | PROT_WRITE) != 0) {
1854#ifdef IR_DEBUG
1855 fprintf(stderr, "mprotect() failed\n");
1856#endif
1857 return 0;
1858 }
1859 return 1;
1860}
1861
1862int ir_mem_flush(void *ptr, size_t size)
1863{
1864#if ((defined(__GNUC__) && ZEND_GCC_VERSION >= 4003) || __has_builtin(__builtin___clear_cache))
1865 __builtin___clear_cache((char*)(ptr), (char*)(ptr) + size);
1866#endif
1867#if defined(__APPLE__) && defined(__aarch64__)
1868 sys_icache_invalidate(ptr, size);
1869#endif
1870#ifdef HAVE_VALGRIND
1871 VALGRIND_DISCARD_TRANSLATIONS(ptr, size);
1872#endif
1873 return 1;
1874}
1875#endif
1876
1877/* Alias Analyses */
1883
1884#if 0
1885static ir_alias ir_check_aliasing(ir_ctx *ctx, ir_ref addr1, ir_ref addr2)
1886{
1887 ir_insn *insn1, *insn2;
1888
1889 if (addr1 == addr2) {
1890 return IR_MUST_ALIAS;
1891 }
1892
1893 insn1 = &ctx->ir_base[addr1];
1894 insn2 = &ctx->ir_base[addr2];
1895 if (insn1->op == IR_ADD && IR_IS_CONST_REF(insn1->op2)) {
1896 if (insn1->op1 == addr2) {
1897 uintptr_t offset1 = ctx->ir_base[insn1->op2].val.u64;
1898 return (offset1 != 0) ? IR_MUST_ALIAS : IR_NO_ALIAS;
1899 } else if (insn2->op == IR_ADD && IR_IS_CONST_REF(insn1->op2) && insn1->op1 == insn2->op1) {
1900 if (insn1->op2 == insn2->op2) {
1901 return IR_MUST_ALIAS;
1902 } else if (IR_IS_CONST_REF(insn1->op2) && IR_IS_CONST_REF(insn2->op2)) {
1903 uintptr_t offset1 = ctx->ir_base[insn1->op2].val.u64;
1904 uintptr_t offset2 = ctx->ir_base[insn2->op2].val.u64;
1905
1906 return (offset1 == offset2) ? IR_MUST_ALIAS : IR_NO_ALIAS;
1907 }
1908 }
1909 } else if (insn2->op == IR_ADD && IR_IS_CONST_REF(insn2->op2)) {
1910 if (insn2->op1 == addr1) {
1911 uintptr_t offset2 = ctx->ir_base[insn2->op2].val.u64;
1912
1913 return (offset2 != 0) ? IR_MUST_ALIAS : IR_NO_ALIAS;
1914 }
1915 }
1916 return IR_MAY_ALIAS;
1917}
1918#endif
1919
1921{
1922 ir_insn *insn1, *insn2;
1923 ir_ref base1, base2, off1, off2;
1924
1925 /* this must be already check */
1926 IR_ASSERT(addr1 != addr2);
1927
1928 insn1 = &ctx->ir_base[addr1];
1929 insn2 = &ctx->ir_base[addr2];
1930 if (insn1->op != IR_ADD) {
1931 base1 = addr1;
1932 off1 = IR_UNUSED;
1933 } else if (ctx->ir_base[insn1->op2].op == IR_SYM
1934 || ctx->ir_base[insn1->op2].op == IR_ALLOCA
1935 || ctx->ir_base[insn1->op2].op == IR_VADDR) {
1936 base1 = insn1->op2;
1937 off1 = insn1->op1;
1938 } else {
1939 base1 = insn1->op1;
1940 off1 = insn1->op2;
1941 }
1942 if (insn2->op != IR_ADD) {
1943 base2 = addr2;
1944 off2 = IR_UNUSED;
1945 } else if (ctx->ir_base[insn2->op2].op == IR_SYM
1946 || ctx->ir_base[insn2->op2].op == IR_ALLOCA
1947 || ctx->ir_base[insn2->op2].op == IR_VADDR) {
1948 base2 = insn2->op2;
1949 off2 = insn2->op1;
1950 } else {
1951 base2 = insn2->op1;
1952 off2 = insn2->op2;
1953 }
1954 if (base1 == base2) {
1955 uintptr_t offset1, offset2;
1956
1957 if (!off1) {
1958 offset1 = 0;
1959 } else if (IR_IS_CONST_REF(off1) && !IR_IS_SYM_CONST(ctx->ir_base[off1].op)) {
1960 offset1 = ctx->ir_base[off1].val.addr;
1961 } else {
1962 return IR_MAY_ALIAS;
1963 }
1964 if (!off2) {
1965 offset2 = 0;
1966 } else if (IR_IS_CONST_REF(off2) && !IR_IS_SYM_CONST(ctx->ir_base[off2].op)) {
1967 offset2 = ctx->ir_base[off2].val.addr;
1968 } else {
1969 return IR_MAY_ALIAS;
1970 }
1971 if (offset1 == offset2) {
1972 return IR_MUST_ALIAS;
1973 } else if (offset1 < offset2) {
1974 return offset1 + ir_type_size[type1] <= offset2 ? IR_NO_ALIAS : IR_MUST_ALIAS;
1975 } else {
1976 return offset2 + ir_type_size[type2] <= offset1 ? IR_NO_ALIAS : IR_MUST_ALIAS;
1977 }
1978 } else {
1979 insn1 = &ctx->ir_base[base1];
1980 insn2 = &ctx->ir_base[base2];
1981 while (insn1->op == IR_ADD) {
1982 insn1 = &ctx->ir_base[insn1->op2];
1983 if (insn1->op == IR_SYM
1984 || insn1->op == IR_ALLOCA
1985 || insn1->op == IR_VADDR) {
1986 break;
1987 } else {
1988 insn1 = &ctx->ir_base[insn1->op1];
1989 }
1990 }
1991 while (insn2->op == IR_ADD) {
1992 insn2 = &ctx->ir_base[insn2->op2];
1993 if (insn2->op == IR_SYM
1994 || insn2->op == IR_ALLOCA
1995 || insn2->op == IR_VADDR) {
1996 break;
1997 } else {
1998 insn2 = &ctx->ir_base[insn2->op1];
1999 }
2000 }
2001 if (insn1 == insn2) {
2002 return IR_MAY_ALIAS;
2003 }
2004 if ((insn1->op == IR_ALLOCA && (insn2->op == IR_ALLOCA || insn2->op == IR_VADDR || insn2->op == IR_SYM || insn2->op == IR_PARAM))
2005 || (insn1->op == IR_VADDR && (insn2->op == IR_ALLOCA || insn2->op == IR_VADDR || insn2->op == IR_SYM || insn2->op == IR_PARAM))
2006 || (insn1->op == IR_SYM && (insn2->op == IR_ALLOCA || insn2->op == IR_VADDR || insn2->op == IR_SYM))
2007 || (insn1->op == IR_PARAM && (insn2->op == IR_ALLOCA || insn2->op == IR_VADDR))) {
2008 return IR_NO_ALIAS;
2009 }
2010 }
2011 return IR_MAY_ALIAS;
2012}
2013
2015{
2016 ir_insn *insn;
2017 uint32_t modified_regset = 0;
2018
2019 while (ref > limit) {
2020 insn = &ctx->ir_base[ref];
2021 if (insn->op == IR_LOAD) {
2022 if (insn->op2 == addr) {
2023 if (insn->type == type) {
2024 return ref; /* load forwarding (L2L) */
2025 } else if (ir_type_size[insn->type] == ir_type_size[type]) {
2026 return ref; /* load forwarding with bitcast (L2L) */
2027 } else if (ir_type_size[insn->type] > ir_type_size[type]
2028 && IR_IS_TYPE_INT(type) && IR_IS_TYPE_INT(insn->type)) {
2029 return ref; /* partial load forwarding (L2L) */
2030 }
2031 }
2032 } else if (insn->op == IR_STORE) {
2033 ir_type type2 = ctx->ir_base[insn->op3].type;
2034
2035 if (insn->op2 == addr) {
2036 if (ctx->ir_base[insn->op3].op == IR_RLOAD
2037 && (modified_regset & (1 << ctx->ir_base[insn->op3].op2))) {
2038 /* anti-dependency */
2039 return IR_UNUSED;
2040 } else if (type2 == type) {
2041 return insn->op3; /* store forwarding (S2L) */
2042 } else if (ir_type_size[type2] == ir_type_size[type]) {
2043 return insn->op3; /* store forwarding with bitcast (S2L) */
2044 } else if (ir_type_size[type2] > ir_type_size[type]
2046 return insn->op3; /* partial store forwarding (S2L) */
2047 } else {
2048 return IR_UNUSED;
2049 }
2050 } else if (ir_check_partial_aliasing(ctx, addr, insn->op2, type, type2) != IR_NO_ALIAS) {
2051 return IR_UNUSED;
2052 }
2053 } else if (insn->op == IR_RSTORE) {
2054 modified_regset |= (1 << insn->op3);
2055 } else if (insn->op == IR_MERGE || insn->op == IR_LOOP_BEGIN || insn->op == IR_CALL || insn->op == IR_VSTORE) {
2056 return IR_UNUSED;
2057 }
2058 ref = insn->op1;
2059 }
2060
2061 return IR_UNUSED;
2062}
2063
2065{
2066 return ir_find_aliasing_load_i(ctx, ref, type, addr, (addr > 0 && addr < ref) ? addr : 1);
2067}
2068
2070{
2071 ir_insn *insn;
2072
2073 while (ref > var) {
2074 insn = &ctx->ir_base[ref];
2075 if (insn->op == IR_VLOAD) {
2076 if (insn->op2 == var) {
2077 if (insn->type == type) {
2078 return ref; /* load forwarding (L2L) */
2079 } else if (ir_type_size[insn->type] == ir_type_size[type]) {
2080 return ir_fold1(ctx, IR_OPT(IR_BITCAST, type), ref); /* load forwarding with bitcast (L2L) */
2081 } else if (ir_type_size[insn->type] > ir_type_size[type]
2082 && IR_IS_TYPE_INT(type) && IR_IS_TYPE_INT(insn->type)) {
2083 return ir_fold1(ctx, IR_OPT(IR_TRUNC, type), ref); /* partial load forwarding (L2L) */
2084 }
2085 }
2086 } else if (insn->op == IR_VSTORE) {
2087 ir_type type2 = ctx->ir_base[insn->op3].type;
2088
2089 if (insn->op2 == var) {
2090 if (type2 == type) {
2091 return insn->op3; /* store forwarding (S2L) */
2092 } else if (ir_type_size[type2] == ir_type_size[type]) {
2093 return ir_fold1(ctx, IR_OPT(IR_BITCAST, type), insn->op3); /* store forwarding with bitcast (S2L) */
2094 } else if (ir_type_size[type2] > ir_type_size[type]
2096 return ir_fold1(ctx, IR_OPT(IR_TRUNC, type), insn->op3); /* partial store forwarding (S2L) */
2097 } else {
2098 break;
2099 }
2100 }
2101 } else if (insn->op == IR_MERGE || insn->op == IR_LOOP_BEGIN || insn->op == IR_CALL || insn->op == IR_STORE) {
2102 break;
2103 }
2104 ref = insn->op1;
2105 }
2106
2107 return IR_UNUSED;
2108}
2109
2111{
2112 return ir_find_aliasing_vload_i(ctx, ref, type, var);
2113}
2114
2116{
2118 ir_insn *insn;
2119 ir_type type = ctx->ir_base[val].type;
2120 ir_type type2;
2121 bool guarded = 0;
2122
2123// if (!IR_IS_CONST_REF(val)) {
2124// insn = &ctx->ir_base[val];
2125// if (insn->op == IR_BITCAST
2126// && !IR_IS_CONST_REF(insn->op1)
2127// && ir_type_size[insn->type] == ir_type_size[ctx->ir_base[insn->op1].type]) {
2128// /* skip BITCAST */
2129// val = insn->op1;
2130// }
2131// }
2132
2133 while (ref > limit) {
2134 insn = &ctx->ir_base[ref];
2135 if (insn->op == IR_STORE) {
2136 if (insn->op2 == addr) {
2137 if (ctx->ir_base[insn->op3].type == type) {
2138 if (insn->op3 == val) {
2139 /* dead STORE (store the same value once again) */
2140 return ref;
2141 } else {
2142 if (!guarded) {
2143 /* the previous STORE is dead (there are no LOADs) */
2144 if (!ctx->use_lists) {
2145 if (next) {
2146 ctx->ir_base[next].op1 = insn->op1;
2147 } else {
2148 ctx->control = insn->op1;
2149 }
2150 } else {
2151 ir_ref prev = insn->op1;
2152
2153 if (!next) {
2154 IR_ASSERT(ctx->use_lists[ref].count == 1);
2155 next = ctx->use_edges[ctx->use_lists[ref].refs];
2156 }
2157 ctx->ir_base[next].op1 = prev;
2158 ir_use_list_remove_one(ctx, ref, next);
2159 ir_use_list_replace_one(ctx, prev, ref, next);
2160 if (!IR_IS_CONST_REF(insn->op2)) {
2161 ir_use_list_remove_one(ctx, insn->op2, ref);
2162 }
2163 if (!IR_IS_CONST_REF(insn->op3)) {
2164 ir_use_list_remove_one(ctx, insn->op3, ref);
2165 }
2166 insn->op1 = IR_UNUSED;
2167 }
2168 MAKE_NOP(insn);
2169 }
2170 break;
2171 }
2172 } else {
2173 break;
2174 }
2175 } else {
2176 type2 = ctx->ir_base[insn->op3].type;
2177 goto check_aliasing;
2178 }
2179 } else if (insn->op == IR_LOAD) {
2180 if (insn->op2 == addr) {
2181 if (ref == val) {
2182 /* dead STORE (store the value that was loaded before) */
2183 return ref;
2184 }
2185 break;
2186 }
2187 type2 = insn->type;
2188check_aliasing:
2189 if (ir_check_partial_aliasing(ctx, addr, insn->op2, type, type2) != IR_NO_ALIAS) {
2190 break;
2191 }
2192 } else if (insn->op == IR_GUARD || insn->op == IR_GUARD_NOT) {
2193 guarded = 1;
2194 } else if (insn->op >= IR_START || insn->op == IR_CALL) {
2195 break;
2196 }
2197 next = ref;
2198 ref = insn->op1;
2199 }
2200
2201 return IR_UNUSED;
2202}
2203
2205{
2206 return ir_find_aliasing_store_i(ctx, ref, addr, val, (addr > 0 && addr < ref) ? addr : 1);
2207}
2208
2210{
2211 ir_ref limit = var;
2213 ir_insn *insn;
2214 bool guarded = 0;
2215
2216// if (!IR_IS_CONST_REF(val)) {
2217// insn = &ctx->ir_base[val];
2218// if (insn->op == IR_BITCAST
2219// && !IR_IS_CONST_REF(insn->op1)
2220// && ir_type_size[insn->type] == ir_type_size[ctx->ir_base[insn->op1].type]) {
2221// /* skip BITCAST */
2222// val = insn->op1;
2223// }
2224// }
2225
2226 while (ref > limit) {
2227 insn = &ctx->ir_base[ref];
2228 if (insn->op == IR_VSTORE) {
2229 if (insn->op2 == var) {
2230 if (insn->op3 == val) {
2231 /* dead VSTORE */
2232 return ref;
2233 } else {
2234 if (!guarded) {
2235 /* the previous VSTORE is dead (there are no VLOADs) */
2236 if (!ctx->use_lists) {
2237 if (next) {
2238 ctx->ir_base[next].op1 = insn->op1;
2239 } else {
2240 ctx->control = insn->op1;
2241 }
2242 } else {
2243 ir_ref prev = insn->op1;
2244
2245 if (!next) {
2246 IR_ASSERT(ctx->use_lists[ref].count == 1);
2247 next = ctx->use_edges[ctx->use_lists[ref].refs];
2248 }
2249 ctx->ir_base[next].op1 = prev;
2250 ir_use_list_remove_one(ctx, ref, next);
2251 ir_use_list_replace_one(ctx, prev, ref, next);
2252 if (!IR_IS_CONST_REF(insn->op2)) {
2253 ir_use_list_remove_one(ctx, insn->op2, ref);
2254 }
2255 if (!IR_IS_CONST_REF(insn->op3)) {
2256 ir_use_list_remove_one(ctx, insn->op3, ref);
2257 }
2258 insn->op1 = IR_UNUSED;
2259 }
2260 MAKE_NOP(insn);
2261 }
2262 break;
2263 }
2264 }
2265 } else if (insn->op == IR_VLOAD) {
2266 if (insn->op2 == var) {
2267 if (ref == val) {
2268 /* dead VSTORE */
2269 return ref;
2270 }
2271 break;
2272 }
2273 } else if (insn->op == IR_GUARD || insn->op == IR_GUARD_NOT) {
2274 guarded = 1;
2275 } else if (insn->op >= IR_START || insn->op == IR_CALL || insn->op == IR_LOAD || insn->op == IR_STORE) {
2276 break;
2277 }
2278 next = ref;
2279 ref = insn->op1;
2280 }
2281 return IR_UNUSED;
2282}
2283
2285{
2286 return ir_find_aliasing_vstore_i(ctx, ref, var, val);
2287}
2288
2289/* IR Construction API */
2290
2292{
2293 IR_ASSERT(ctx->control);
2294 IR_ASSERT(ctx->ir_base[ctx->control].op == IR_START);
2295 IR_ASSERT(ctx->insns_count == num + 1);
2296 return ir_param(ctx, type, ctx->control, name, num);
2297}
2298
2300{
2301// IR_ASSERT(ctx->control);
2302// IR_ASSERT(IR_IS_BB_START(ctx->ir_base[ctx->control].op));
2303// TODO: VAR may be insterted after some "memory" instruction
2304 ir_ref ref = ctx->control;
2305
2306 while (1) {
2307 IR_ASSERT(ref);
2308 if (IR_IS_BB_START(ctx->ir_base[ref].op)) {
2309 break;
2310 }
2311 ref = ctx->ir_base[ref].op1;
2312 }
2313 return ir_var(ctx, type, ref, name);
2314}
2315
2317{
2318 IR_ASSERT(ctx->control);
2319 IR_ASSERT(ctx->ir_base[ctx->control].op == IR_MERGE || ctx->ir_base[ctx->control].op == IR_LOOP_BEGIN);
2320 if (src1 == src2 && src1 != IR_UNUSED) {
2321 return src1;
2322 }
2323 return ir_emit3(ctx, IR_OPTX(IR_PHI, type, 3), ctx->control, src1, src2);
2324}
2325
2327{
2328 IR_ASSERT(ctx->control);
2329 IR_ASSERT(n > 0);
2330 if (n == 1) {
2331 return inputs[0];
2332 } else {
2333 ir_ref i;
2334 ir_ref ref;
2335
2336 if (UNEXPECTED(!(ctx->flags & IR_OPT_FOLDING))) {
2337 IR_ASSERT(ctx->ir_base[ctx->control].op == IR_MERGE
2338 || ctx->ir_base[ctx->control].op == IR_LOOP_BEGIN);
2339 ref = inputs[0];
2340 if (ref != IR_UNUSED) {
2341 for (i = 1; i < n; i++) {
2342 if (inputs[i] != ref) {
2343 break;
2344 }
2345 }
2346 if (i == n) {
2347 /* all the same */
2348 return ref;
2349 }
2350 }
2351 }
2352
2353 ref = ir_emit_N(ctx, IR_OPT(IR_PHI, type), n + 1);
2354 ir_set_op(ctx, ref, 1, ctx->control);
2355 for (i = 0; i < n; i++) {
2356 ir_set_op(ctx, ref, i + 2, inputs[i]);
2357 }
2358 return ref;
2359 }
2360}
2361
2363{
2364 ir_insn *insn = &ctx->ir_base[phi];
2365 ir_ref *ops = insn->ops;
2366
2367 IR_ASSERT(insn->op == IR_PHI);
2368 IR_ASSERT(ctx->ir_base[insn->op1].op == IR_MERGE || ctx->ir_base[insn->op1].op == IR_LOOP_BEGIN);
2369 IR_ASSERT(pos > 0 && pos < insn->inputs_count);
2370 pos++; /* op1 is used for control */
2371 ops[pos] = src;
2372}
2373
2375{
2376 IR_ASSERT(!ctx->control);
2377 IR_ASSERT(ctx->insns_count == 1);
2378 ctx->control = ir_emit0(ctx, IR_START);
2379}
2380
2381void _ir_ENTRY(ir_ctx *ctx, ir_ref src, ir_ref num)
2382{
2383 IR_ASSERT(!ctx->control);
2384 /* fake control edge */
2386 || ctx->ir_base[src].op == IR_END
2387 || ctx->ir_base[src].op == IR_LOOP_END); /* return from a recursive call */
2388 ctx->control = ir_emit2(ctx, IR_ENTRY, src, num);
2389}
2390
2391void _ir_BEGIN(ir_ctx *ctx, ir_ref src)
2392{
2393 IR_ASSERT(!ctx->control);
2394 if (EXPECTED(ctx->flags & IR_OPT_FOLDING)
2395 && src
2396 && src + 1 == ctx->insns_count
2397 && ctx->ir_base[src].op == IR_END) {
2398 /* merge with the last END */
2399 ctx->control = ctx->ir_base[src].op1;
2400 ctx->insns_count--;
2401 } else {
2402 ctx->control = ir_emit1(ctx, IR_BEGIN, src);
2403 }
2404}
2405
2406static ir_ref _ir_fold_condition(ir_ctx *ctx, ir_ref ref)
2407{
2408 ir_insn *insn = &ctx->ir_base[ref];
2409
2410 if (insn->op == IR_NE && IR_IS_CONST_REF(insn->op2)) {
2411 ir_insn *op2_insn = &ctx->ir_base[insn->op2];
2412
2413 if (IR_IS_TYPE_INT(op2_insn->type) && op2_insn->val.u64 == 0) {
2414 ref = insn->op1;
2415 insn = &ctx->ir_base[ref];
2416 if (insn->op == IR_ALLOCA || insn->op == IR_VADDR) {
2417 return IR_TRUE;
2418 }
2419 }
2420 } else if (insn->op == IR_EQ && insn->op2 == IR_TRUE) {
2421 ref = insn->op1;
2422 insn = &ctx->ir_base[ref];
2423 } else if (insn->op == IR_EQ && insn->op2 == IR_NULL) {
2424 ir_insn *op1_insn = &ctx->ir_base[insn->op1];
2425 if (op1_insn->op == IR_ALLOCA || op1_insn->op == IR_VADDR) {
2426 return IR_FALSE;
2427 }
2428 }
2429// while (insn->op == IR_SEXT || insn->op == IR_ZEXT || insn->op == IR_BITCAST) {
2430// ref = insn->op1;
2431// insn = &ctx->ir_base[ref];
2432// }
2433 return ref;
2434}
2435
2437{
2438 ir_insn *prev = NULL;
2439 ir_insn *insn;
2440
2441 while (ref > limit) {
2442 insn = &ctx->ir_base[ref];
2443 if (insn->op == IR_GUARD_NOT) {
2444 if (insn->op2 == condition) {
2445 return IR_FALSE;
2446 }
2447 } else if (insn->op == IR_GUARD) {
2448 if (insn->op2 == condition) {
2449 return IR_TRUE;
2450 }
2451 } else if (insn->op == IR_IF) {
2452 if (insn->op2 == condition) {
2453 if (prev->op == IR_IF_TRUE) {
2454 return IR_TRUE;
2455 } else if (prev->op == IR_IF_FALSE) {
2456 return IR_FALSE;
2457 }
2458 }
2459 } else if (insn->op == IR_START || insn->op == IR_MERGE || insn->op == IR_LOOP_BEGIN) {
2460 break;
2461 }
2462 prev = insn;
2463 ref = insn->op1;
2464 }
2465
2466 return condition;
2467}
2468
2470{
2471 IR_ASSERT(!IR_IS_CONST_REF(condition));
2472 return ir_check_dominating_predicates_i(ctx, ref, condition, (condition < ref) ? condition : 1);
2473}
2474
2475ir_ref _ir_IF(ir_ctx *ctx, ir_ref condition)
2476{
2477 ir_ref if_ref;
2478
2479 IR_ASSERT(ctx->control);
2480 if (UNEXPECTED(!(ctx->flags & IR_OPT_FOLDING))) {
2481 if_ref = ir_emit2(ctx, IR_IF, ctx->control, condition);
2482 ctx->control = IR_UNUSED;
2483 return if_ref;
2484 }
2485
2486 condition = _ir_fold_condition(ctx, condition);
2487 if (IR_IS_CONST_REF(condition)) {
2488 condition = ir_ref_is_true(ctx, condition) ? IR_TRUE : IR_FALSE;
2489 } else {
2490 condition = ir_check_dominating_predicates_i(ctx, ctx->control, condition, condition);
2491 }
2492 if_ref = ir_emit2(ctx, IR_IF, ctx->control, condition);
2493 ctx->control = IR_UNUSED;
2494 return if_ref;
2495}
2496
2497void _ir_IF_TRUE(ir_ctx *ctx, ir_ref if_ref)
2498{
2499 IR_ASSERT(!ctx->control);
2500 IR_ASSERT(if_ref);
2501 IR_ASSERT(ctx->ir_base[if_ref].op == IR_IF);
2502 ctx->control = ir_emit1(ctx, IR_IF_TRUE, if_ref);
2503}
2504
2506{
2507 IR_ASSERT(!ctx->control);
2508 IR_ASSERT(if_ref);
2509 IR_ASSERT(ctx->ir_base[if_ref].op == IR_IF);
2510 /* op2 is used as an indicator of low-probability branch */
2511 ctx->control = ir_emit2(ctx, IR_IF_TRUE, if_ref, 1);
2512}
2513
2514void _ir_IF_FALSE(ir_ctx *ctx, ir_ref if_ref)
2515{
2516 IR_ASSERT(!ctx->control);
2517 IR_ASSERT(if_ref);
2518 IR_ASSERT(ctx->ir_base[if_ref].op == IR_IF);
2519 ctx->control = ir_emit1(ctx, IR_IF_FALSE, if_ref);
2520}
2521
2523{
2524 IR_ASSERT(!ctx->control);
2525 IR_ASSERT(if_ref);
2526 IR_ASSERT(ctx->ir_base[if_ref].op == IR_IF);
2527 /* op2 is used as an indicator of low-probability branch */
2528 ctx->control = ir_emit2(ctx, IR_IF_FALSE, if_ref, 1);
2529}
2530
2532{
2533 ir_ref ref;
2534
2535 IR_ASSERT(ctx->control);
2536 ref = ir_emit1(ctx, IR_END, ctx->control);
2537 ctx->control = IR_UNUSED;
2538 return ref;
2539}
2540
2541void _ir_MERGE_2(ir_ctx *ctx, ir_ref src1, ir_ref src2)
2542{
2543 IR_ASSERT(!ctx->control);
2544 ctx->control = ir_emit2(ctx, IR_OPTX(IR_MERGE, IR_VOID, 2), src1, src2);
2545}
2546
2547void _ir_MERGE_N(ir_ctx *ctx, ir_ref n, ir_ref *inputs)
2548{
2549 IR_ASSERT(!ctx->control);
2550 IR_ASSERT(n > 0);
2551 if (n == 1) {
2552 _ir_BEGIN(ctx, inputs[0]);
2553 } else {
2554 ir_ref *ops;
2555
2556 ctx->control = ir_emit_N(ctx, IR_MERGE, n);
2557 ops = ctx->ir_base[ctx->control].ops;
2558 while (n) {
2559 n--;
2560 ops[n + 1] = inputs[n];
2561 }
2562 }
2563}
2564
2566{
2567 ir_insn *insn = &ctx->ir_base[merge];
2568 ir_ref *ops = insn->ops;
2569
2570 IR_ASSERT(insn->op == IR_MERGE || insn->op == IR_LOOP_BEGIN);
2571 IR_ASSERT(pos > 0 && pos <= insn->inputs_count);
2572 ops[pos] = src;
2573}
2574
2576{
2577 ir_ref ref;
2578
2579 IR_ASSERT(ctx->control);
2580 IR_ASSERT(!list || ctx->ir_base[list].op == IR_END);
2581 /* create a liked list of END nodes with the same destination through END.op2 */
2582 ref = ir_emit2(ctx, IR_END, ctx->control, list);
2583 ctx->control = IR_UNUSED;
2584 return ref;
2585}
2586
2588{
2589 ir_ref ref;
2590
2591 IR_ASSERT(ctx->control);
2592 IR_ASSERT(!list || ctx->ir_base[list].op == IR_END);
2593 /* create a liked list of END nodes with the same destination through END.op2 */
2594 ref = ir_emit3(ctx, IR_END, ctx->control, list, val);
2595 ctx->control = IR_UNUSED;
2596 return ref;
2597}
2598
2600{
2601 ir_ref ref = list;
2602
2603 if (list != IR_UNUSED) {
2604 uint32_t n = 0;
2605
2606 IR_ASSERT(!ctx->control);
2607
2608 /* count inputs count */
2609 do {
2610 ir_insn *insn = &ctx->ir_base[ref];
2611
2612 IR_ASSERT(insn->op == IR_END);
2613 ref = insn->op2;
2614 n++;
2615 } while (ref != IR_UNUSED);
2616
2617
2618 /* create MERGE node */
2619 IR_ASSERT(n > 0);
2620 if (n == 1) {
2621 ctx->ir_base[list].op2 = IR_UNUSED;
2622 _ir_BEGIN(ctx, list);
2623 } else {
2624 ctx->control = ir_emit_N(ctx, IR_MERGE, n);
2625 ref = list;
2626 while (n) {
2627 ir_insn *insn = &ctx->ir_base[ref];
2628
2629 ir_set_op(ctx, ctx->control, n, ref);
2630 ref = insn->op2;
2631 insn->op2 = IR_UNUSED;
2632 n--;
2633 }
2634 }
2635 }
2636}
2637
2639{
2640 ir_insn *merge, *end;
2641 ir_ref phi, *ops, i;
2642 ir_type type;
2643
2644 if (list == IR_UNUSED) {
2645 return IR_UNUSED;
2646 }
2647 end = &ctx->ir_base[list];
2648 if (!end->op2) {
2649 phi = end->op3;
2650 end->op3 = IR_UNUSED;
2651 _ir_BEGIN(ctx, list);
2652 } else if (!end->op3) {
2653 _ir_MERGE_LIST(ctx, list);
2654 phi = IR_UNUSED;
2655 } else {
2656 type = ctx->ir_base[end->op3].type;
2657 _ir_MERGE_LIST(ctx, list);
2658 merge = &ctx->ir_base[ctx->control];
2659 IR_ASSERT(merge->op == IR_MERGE);
2660 phi = ir_emit_N(ctx, IR_OPT(IR_PHI, type), merge->inputs_count + 1);
2661 merge = &ctx->ir_base[ctx->control];
2662 ops = merge->ops;
2663 ir_set_op(ctx, phi, 1, ctx->control);
2664 for (i = 0; i < merge->inputs_count; i++) {
2665 end = &ctx->ir_base[ops[i + 1]];
2666 ir_set_op(ctx, phi, i + 2, end->op3);
2667 end->op3 = IR_END;
2668 }
2669 }
2670 return phi;
2671}
2672
2674{
2675 IR_ASSERT(!ctx->control);
2676 ctx->control = ir_emit2(ctx, IR_OPTX(IR_LOOP_BEGIN, IR_VOID, 2), src1, IR_UNUSED);
2677 return ctx->control;
2678}
2679
2681{
2682 ir_ref ref;
2683
2684 IR_ASSERT(ctx->control);
2685 ref = ir_emit1(ctx, IR_LOOP_END, ctx->control);
2686 ctx->control = IR_UNUSED;
2687 return ref;
2688}
2689
2691{
2692 IR_ASSERT(ctx->control);
2693 return ctx->control = ir_emit2(ctx, IR_OPTX(IR_CALL, type, 2), ctx->control, func);
2694}
2695
2697{
2698 IR_ASSERT(ctx->control);
2699 return ctx->control = ir_emit3(ctx, IR_OPTX(IR_CALL, type, 3), ctx->control, func, arg1);
2700}
2701
2703{
2704 ir_ref call;
2705
2706 IR_ASSERT(ctx->control);
2707 call = ir_emit_N(ctx, IR_OPT(IR_CALL, type), 4);
2708 ir_set_op(ctx, call, 1, ctx->control);
2709 ir_set_op(ctx, call, 2, func);
2710 ir_set_op(ctx, call, 3, arg1);
2711 ir_set_op(ctx, call, 4, arg2);
2712 ctx->control = call;
2713 return call;
2714}
2715
2717{
2718 ir_ref call;
2719
2720 IR_ASSERT(ctx->control);
2721 call = ir_emit_N(ctx, IR_OPT(IR_CALL, type), 5);
2722 ir_set_op(ctx, call, 1, ctx->control);
2723 ir_set_op(ctx, call, 2, func);
2724 ir_set_op(ctx, call, 3, arg1);
2725 ir_set_op(ctx, call, 4, arg2);
2726 ir_set_op(ctx, call, 5, arg3);
2727 ctx->control = call;
2728 return call;
2729}
2730
2732{
2733 ir_ref call;
2734
2735 IR_ASSERT(ctx->control);
2736 call = ir_emit_N(ctx, IR_OPT(IR_CALL, type), 6);
2737 ir_set_op(ctx, call, 1, ctx->control);
2738 ir_set_op(ctx, call, 2, func);
2739 ir_set_op(ctx, call, 3, arg1);
2740 ir_set_op(ctx, call, 4, arg2);
2741 ir_set_op(ctx, call, 5, arg3);
2742 ir_set_op(ctx, call, 6, arg4);
2743 ctx->control = call;
2744 return call;
2745}
2746
2748{
2749 ir_ref call;
2750
2751 IR_ASSERT(ctx->control);
2752 call = ir_emit_N(ctx, IR_OPT(IR_CALL, type), 7);
2753 ir_set_op(ctx, call, 1, ctx->control);
2754 ir_set_op(ctx, call, 2, func);
2755 ir_set_op(ctx, call, 3, arg1);
2756 ir_set_op(ctx, call, 4, arg2);
2757 ir_set_op(ctx, call, 5, arg3);
2758 ir_set_op(ctx, call, 6, arg4);
2759 ir_set_op(ctx, call, 7, arg5);
2760 ctx->control = call;
2761 return call;
2762}
2763
2765{
2766 ir_ref call;
2767
2768 IR_ASSERT(ctx->control);
2769 call = ir_emit_N(ctx, IR_OPT(IR_CALL, type), 8);
2770 ir_set_op(ctx, call, 1, ctx->control);
2771 ir_set_op(ctx, call, 2, func);
2772 ir_set_op(ctx, call, 3, arg1);
2773 ir_set_op(ctx, call, 4, arg2);
2774 ir_set_op(ctx, call, 5, arg3);
2775 ir_set_op(ctx, call, 6, arg4);
2776 ir_set_op(ctx, call, 7, arg5);
2777 ir_set_op(ctx, call, 8, arg6);
2778 ctx->control = call;
2779 return call;
2780}
2781
2783{
2784 ir_ref call;
2785 uint32_t i;
2786
2787 IR_ASSERT(ctx->control);
2788 call = ir_emit_N(ctx, IR_OPT(IR_CALL, type), count + 2);
2789 ir_set_op(ctx, call, 1, ctx->control);
2790 ir_set_op(ctx, call, 2, func);
2791 for (i = 0; i < count; i++) {
2792 ir_set_op(ctx, call, i + 3, args[i]);
2793 }
2794 ctx->control = call;
2795 return call;
2796}
2797
2799{
2800 IR_ASSERT(ctx->control);
2801 ctx->control = ir_emit3(ctx, IR_UNREACHABLE, ctx->control, IR_UNUSED, ctx->ir_base[1].op1);
2802 ctx->ir_base[1].op1 = ctx->control;
2803 ctx->control = IR_UNUSED;
2804}
2805
2807{
2808 IR_ASSERT(ctx->control);
2809 if (ctx->ret_type == (ir_type)-1) {
2810 ctx->ret_type = type;
2811 }
2812 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2813 ctx->control = ir_emit2(ctx, IR_OPTX(IR_TAILCALL, type, 2), ctx->control, func);
2814 _ir_UNREACHABLE(ctx);
2815}
2816
2818{
2819 IR_ASSERT(ctx->control);
2820 if (ctx->ret_type == (ir_type)-1) {
2821 ctx->ret_type = type;
2822 }
2823 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2824 ctx->control = ir_emit3(ctx, IR_OPTX(IR_TAILCALL, type, 3), ctx->control, func, arg1);
2825 _ir_UNREACHABLE(ctx);
2826}
2827
2829{
2830 ir_ref call;
2831
2832 IR_ASSERT(ctx->control);
2833 if (ctx->ret_type == (ir_type)-1) {
2834 ctx->ret_type = type;
2835 }
2836 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2837 call = ir_emit_N(ctx, IR_OPT(IR_TAILCALL, type), 4);
2838 ir_set_op(ctx, call, 1, ctx->control);
2839 ir_set_op(ctx, call, 2, func);
2840 ir_set_op(ctx, call, 3, arg1);
2841 ir_set_op(ctx, call, 4, arg2);
2842 ctx->control = call;
2843 _ir_UNREACHABLE(ctx);
2844}
2845
2847{
2848 ir_ref call;
2849
2850 IR_ASSERT(ctx->control);
2851 if (ctx->ret_type == (ir_type)-1) {
2852 ctx->ret_type = type;
2853 }
2854 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2855 call = ir_emit_N(ctx, IR_OPT(IR_TAILCALL, type), 5);
2856 ir_set_op(ctx, call, 1, ctx->control);
2857 ir_set_op(ctx, call, 2, func);
2858 ir_set_op(ctx, call, 3, arg1);
2859 ir_set_op(ctx, call, 4, arg2);
2860 ir_set_op(ctx, call, 5, arg3);
2861 ctx->control = call;
2862 _ir_UNREACHABLE(ctx);
2863}
2864
2866{
2867 ir_ref call;
2868
2869 IR_ASSERT(ctx->control);
2870 if (ctx->ret_type == (ir_type)-1) {
2871 ctx->ret_type = type;
2872 }
2873 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2874 call = ir_emit_N(ctx, IR_OPT(IR_TAILCALL, type), 6);
2875 ir_set_op(ctx, call, 1, ctx->control);
2876 ir_set_op(ctx, call, 2, func);
2877 ir_set_op(ctx, call, 3, arg1);
2878 ir_set_op(ctx, call, 4, arg2);
2879 ir_set_op(ctx, call, 5, arg3);
2880 ir_set_op(ctx, call, 6, arg4);
2881 ctx->control = call;
2882 _ir_UNREACHABLE(ctx);
2883}
2884
2886{
2887 ir_ref call;
2888
2889 IR_ASSERT(ctx->control);
2890 if (ctx->ret_type == (ir_type)-1) {
2891 ctx->ret_type = type;
2892 }
2893 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2894 call = ir_emit_N(ctx, IR_OPT(IR_TAILCALL, type), 7);
2895 ir_set_op(ctx, call, 1, ctx->control);
2896 ir_set_op(ctx, call, 2, func);
2897 ir_set_op(ctx, call, 3, arg1);
2898 ir_set_op(ctx, call, 4, arg2);
2899 ir_set_op(ctx, call, 5, arg3);
2900 ir_set_op(ctx, call, 6, arg4);
2901 ir_set_op(ctx, call, 7, arg5);
2902 ctx->control = call;
2903 _ir_UNREACHABLE(ctx);
2904}
2905
2907{
2908 ir_ref call;
2909
2910 IR_ASSERT(ctx->control);
2911 if (ctx->ret_type == (ir_type)-1) {
2912 ctx->ret_type = type;
2913 }
2914 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2915 call = ir_emit_N(ctx, IR_OPT(IR_TAILCALL, type), 8);
2916 ir_set_op(ctx, call, 1, ctx->control);
2917 ir_set_op(ctx, call, 2, func);
2918 ir_set_op(ctx, call, 3, arg1);
2919 ir_set_op(ctx, call, 4, arg2);
2920 ir_set_op(ctx, call, 5, arg3);
2921 ir_set_op(ctx, call, 6, arg4);
2922 ir_set_op(ctx, call, 7, arg5);
2923 ir_set_op(ctx, call, 8, arg6);
2924 ctx->control = call;
2925 _ir_UNREACHABLE(ctx);
2926}
2927
2929{
2930 ir_ref call;
2931 uint32_t i;
2932
2933 IR_ASSERT(ctx->control);
2934 if (ctx->ret_type == (ir_type)-1) {
2935 ctx->ret_type = type;
2936 }
2937 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2938 call = ir_emit_N(ctx, IR_OPT(IR_TAILCALL, type), count + 2);
2939 ir_set_op(ctx, call, 1, ctx->control);
2940 ir_set_op(ctx, call, 2, func);
2941 for (i = 0; i < count; i++) {
2942 ir_set_op(ctx, call, i + 3, args[i]);
2943 }
2944 ctx->control = call;
2945 _ir_UNREACHABLE(ctx);
2946}
2947
2949{
2950 ir_ref ref;
2951
2952 IR_ASSERT(ctx->control);
2953 ref = ir_emit2(ctx, IR_SWITCH, ctx->control, val);
2954 ctx->control = IR_UNUSED;
2955 return ref;
2956}
2957
2958void _ir_CASE_VAL(ir_ctx *ctx, ir_ref switch_ref, ir_ref val)
2959{
2960 IR_ASSERT(!ctx->control);
2961 ctx->control = ir_emit2(ctx, IR_CASE_VAL, switch_ref, val);
2962}
2963
2964void _ir_CASE_DEFAULT(ir_ctx *ctx, ir_ref switch_ref)
2965{
2966 IR_ASSERT(!ctx->control);
2967 ctx->control = ir_emit1(ctx, IR_CASE_DEFAULT, switch_ref);
2968}
2969
2971{
2972 ir_type type = (val != IR_UNUSED) ? ctx->ir_base[val].type : IR_VOID;
2973
2974 IR_ASSERT(ctx->control);
2975 if (ctx->ret_type == (ir_type)-1) {
2976 ctx->ret_type = type;
2977 }
2978 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2979 ctx->control = ir_emit3(ctx, IR_RETURN, ctx->control, val, ctx->ir_base[1].op1);
2980 ctx->ir_base[1].op1 = ctx->control;
2981 ctx->control = IR_UNUSED;
2982}
2983
2985{
2986 IR_ASSERT(ctx->control);
2987 ctx->control = ir_emit3(ctx, IR_IJMP, ctx->control, addr, ctx->ir_base[1].op1);
2988 ctx->ir_base[1].op1 = ctx->control;
2989 ctx->control = IR_UNUSED;
2990}
2991
2993{
2994 if (offset) {
2995 addr = ir_fold2(ctx, IR_OPT(IR_ADD, IR_ADDR), addr, ir_const_addr(ctx, offset));
2996 }
2997 return addr;
2998}
2999
3000void _ir_GUARD(ir_ctx *ctx, ir_ref condition, ir_ref addr)
3001{
3002 IR_ASSERT(ctx->control);
3003 if (IR_IS_CONST_REF(condition)) {
3004 if (ir_ref_is_true(ctx, condition)) {
3005 return;
3006 }
3007 condition = IR_FALSE;
3008 } else if (EXPECTED(ctx->flags & IR_OPT_FOLDING)) {
3009 condition = ir_check_dominating_predicates_i(ctx, ctx->control, condition, condition);
3010 if (condition == IR_TRUE) {
3011 return;
3012 }
3013 }
3014 if (ctx->snapshot_create) {
3015 ctx->snapshot_create(ctx, addr);
3016 }
3017 ctx->control = ir_emit3(ctx, IR_GUARD, ctx->control, condition, addr);
3018}
3019
3020void _ir_GUARD_NOT(ir_ctx *ctx, ir_ref condition, ir_ref addr)
3021{
3022 IR_ASSERT(ctx->control);
3023 if (IR_IS_CONST_REF(condition)) {
3024 if (!ir_ref_is_true(ctx, condition)) {
3025 return;
3026 }
3027 condition = IR_TRUE;
3028 } else if (EXPECTED(ctx->flags & IR_OPT_FOLDING)) {
3029 condition = ir_check_dominating_predicates_i(ctx, ctx->control, condition, condition);
3030 if (condition == IR_FALSE) {
3031 return;
3032 }
3033 }
3034 if (ctx->snapshot_create) {
3035 ctx->snapshot_create(ctx, addr);
3036 }
3037 ctx->control = ir_emit3(ctx, IR_GUARD_NOT, ctx->control, condition, addr);
3038}
3039
3041{
3042 ir_ref snapshot;
3043
3044 IR_ASSERT(ctx->control);
3045 snapshot = ir_emit_N(ctx, IR_SNAPSHOT, 1 + n); /* op1 is used for control */
3046 ctx->ir_base[snapshot].op1 = ctx->control;
3047 ctx->control = snapshot;
3048 return snapshot;
3049}
3050
3052{
3053 ir_insn *insn = &ctx->ir_base[snapshot];
3054 ir_ref *ops = insn->ops;
3055
3056 IR_ASSERT(val < snapshot);
3057 IR_ASSERT(insn->op == IR_SNAPSHOT);
3058 pos++; /* op1 is used for control */
3059 IR_ASSERT(pos > 1 && pos <= insn->inputs_count);
3060 ops[pos] = val;
3061}
3062
3064{
3065 IR_ASSERT(ctx->control);
3066 return ctx->control = ir_emit2(ctx, IR_OPT(IR_EXITCALL, IR_I32), ctx->control, func);
3067}
3068
3070{
3071 IR_ASSERT(ctx->control);
3072 return ctx->control = ir_emit2(ctx, IR_OPT(IR_ALLOCA, IR_ADDR), ctx->control, size);
3073}
3074
3076{
3077 IR_ASSERT(ctx->control);
3078 ctx->control = ir_emit2(ctx, IR_AFREE, ctx->control, size);
3079}
3080
3082{
3083 ir_ref ref;
3084
3085 IR_ASSERT(ctx->control);
3086 if (EXPECTED(ctx->flags & IR_OPT_FOLDING)) {
3087 ref = ir_find_aliasing_vload_i(ctx, ctx->control, type, var);
3088 if (ref) {
3089 ir_insn *insn = &ctx->ir_base[ref];
3090 if (insn->type == type) {
3091 return ref;
3092 } else if (ir_type_size[insn->type] == ir_type_size[type]) {
3093 return ir_fold1(ctx, IR_OPT(IR_BITCAST, type), ref); /* load forwarding with bitcast (L2L) */
3094 } else {
3095 return ir_fold1(ctx, IR_OPT(IR_TRUNC, type), ref); /* partial load forwarding (L2L) */
3096 }
3097 }
3098 }
3099 return ctx->control = ir_emit2(ctx, IR_OPT(IR_VLOAD, type), ctx->control, var);
3100}
3101
3103{
3104 IR_ASSERT(ctx->control);
3105 if (EXPECTED(ctx->flags & IR_OPT_FOLDING)) {
3106 if (ir_find_aliasing_vstore_i(ctx, ctx->control, var, val)) {
3107 /* dead STORE */
3108 return;
3109 }
3110 }
3111 ctx->control = ir_emit3(ctx, IR_VSTORE, ctx->control, var, val);
3112}
3113
3115{
3116 IR_ASSERT(ctx->control);
3117 return ctx->control = ir_emit3(ctx, IR_OPT(IR_TLS, IR_ADDR), ctx->control, index, offset);
3118}
3119
3121{
3122 IR_ASSERT(ctx->control);
3123 return ctx->control = ir_emit2(ctx, IR_OPT(IR_RLOAD, type), ctx->control, reg);
3124}
3125
3127{
3128 IR_ASSERT(ctx->control);
3129 ctx->control = ir_emit3(ctx, IR_RSTORE, ctx->control, val, reg);
3130}
3131
3133{
3134 ir_ref ref;
3135
3136 IR_ASSERT(ctx->control);
3137 if (EXPECTED(ctx->flags & IR_OPT_FOLDING)) {
3138 if (ctx->ir_base[addr].op == IR_VADDR) {
3139 return _ir_VLOAD(ctx, type, ctx->ir_base[addr].op1);
3140 }
3141 ref = ir_find_aliasing_load_i(ctx, ctx->control, type, addr, (addr > 0) ? addr : 1);
3142 if (ref) {
3143 ir_insn *insn = &ctx->ir_base[ref];
3144 if (insn->type == type) {
3145 return ref;
3146 } else if (ir_type_size[insn->type] == ir_type_size[type]) {
3147 return ir_fold1(ctx, IR_OPT(IR_BITCAST, type), ref); /* load forwarding with bitcast (L2L) */
3148 } else {
3149 return ir_fold1(ctx, IR_OPT(IR_TRUNC, type), ref); /* partial load forwarding (L2L) */
3150 }
3151 }
3152 }
3153 return ctx->control = ir_emit2(ctx, IR_OPT(IR_LOAD, type), ctx->control, addr);
3154}
3155
3157{
3158 IR_ASSERT(ctx->control);
3159 if (EXPECTED(ctx->flags & IR_OPT_FOLDING)) {
3160 if (ctx->ir_base[addr].op == IR_VADDR) {
3161 _ir_VSTORE(ctx, ctx->ir_base[addr].op1, val);
3162 return;
3163 }
3164 if (ir_find_aliasing_store_i(ctx, ctx->control, addr, val, (addr > 0) ? addr : 1)) {
3165 /* dead STORE */
3166 return;
3167 }
3168 }
3169 ctx->control = ir_emit3(ctx, IR_STORE, ctx->control, addr, val);
3170}
3171
3173{
3174 IR_ASSERT(ctx->control);
3175 ctx->control = ir_emit2(ctx, IR_VA_START, ctx->control, list);
3176}
3177
3178void _ir_VA_END(ir_ctx *ctx, ir_ref list)
3179{
3180 IR_ASSERT(ctx->control);
3181 ctx->control = ir_emit2(ctx, IR_VA_END, ctx->control, list);
3182}
3183
3184void _ir_VA_COPY(ir_ctx *ctx, ir_ref dst, ir_ref src)
3185{
3186 IR_ASSERT(ctx->control);
3187 ctx->control = ir_emit3(ctx, IR_VA_COPY, ctx->control, dst, src);
3188}
3189
3191{
3192 IR_ASSERT(ctx->control);
3193 return ctx->control = ir_emit2(ctx, IR_OPT(IR_VA_ARG, type), ctx->control, list);
3194}
3195
3197{
3198 IR_ASSERT(ctx->control);
3199 return ctx->control = ir_emit1(ctx, IR_OPT(IR_BLOCK_BEGIN, IR_ADDR), ctx->control);
3200}
size_t len
Definition apprentice.c:174
fprintf($stream, string $format, mixed ... $values)
prev(array|object &$array)
fputs($stream, string $data, ?int $length=null)
count(Countable|array $value, int $mode=COUNT_NORMAL)
char s[4]
Definition cdf.c:77
#define DWORD
Definition exif.c:1762
zend_ffi_type * type
Definition ffi.c:3812
zend_ffi_type * type1
Definition ffi.c:4448
zend_long ch
Definition ffi.c:4580
zend_long n
Definition ffi.c:4979
new_type size
Definition ffi.c:4365
void * ptr
Definition ffi.c:3814
memcpy(ptr1, ptr2, size)
zend_ffi_type * type2
Definition ffi.c:4448
memset(ptr, 0, type->size)
zval * val
Definition ffi.c:4262
zend_ffi_ctype_name_buf buf
Definition ffi.c:4685
const php_stream_filter_ops * ops
Definition filters.c:1899
zend_long offset
#define NULL
Definition gdcache.h:45
again j
void _ir_TAILCALL_4(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3, ir_ref arg4)
Definition ir.c:2865
const char * ir_get_strl(const ir_ctx *ctx, ir_ref idx, size_t *len)
Definition ir.c:715
ir_ref ir_addrtab_find(const ir_hashtab *tab, uint64_t key)
Definition ir.c:1730
bool ir_use_list_add(ir_ctx *ctx, ir_ref to, ir_ref ref)
Definition ir.c:1378
ir_ref ir_binding_find(const ir_ctx *ctx, ir_ref ref)
Definition ir.c:1161
ir_ref ir_fold2(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2)
Definition ir.c:1067
void ir_free(ir_ctx *ctx)
Definition ir.c:412
ir_ref ir_str(ir_ctx *ctx, const char *s)
Definition ir.c:688
void ir_update_op(ir_ctx *ctx, ir_ref ref, uint32_t idx, ir_ref new_val)
Definition ir.c:1469
void _ir_CASE_DEFAULT(ir_ctx *ctx, ir_ref switch_ref)
Definition ir.c:2964
ir_ref ir_emit3(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3)
Definition ir.c:839
ir_ref ir_const_func(ir_ctx *ctx, ir_ref str, ir_ref proto)
Definition ir.c:666
void _ir_IJMP(ir_ctx *ctx, ir_ref addr)
Definition ir.c:2984
ir_ref ir_bind(ir_ctx *ctx, ir_ref var, ir_ref def)
Definition ir.c:1142
void _ir_AFREE(ir_ctx *ctx, ir_ref size)
Definition ir.c:3075
ir_ref ir_const_i64(ir_ctx *ctx, int64_t c)
Definition ir.c:583
ir_ref ir_get_op(ir_ctx *ctx, ir_ref ref, int32_t n)
Definition ir.c:1114
ir_ref ir_fold3(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3)
Definition ir.c:1072
void ir_truncate(ir_ctx *ctx)
Definition ir.c:370
ir_ref _ir_EXITCALL(ir_ctx *ctx, ir_ref func)
Definition ir.c:3063
void ir_list_insert(ir_list *l, uint32_t i, ir_ref val)
Definition ir.c:1509
ir_ref _ir_IF(ir_ctx *ctx, ir_ref condition)
Definition ir.c:2475
ir_ref ir_const_i32(ir_ctx *ctx, int32_t c)
Definition ir.c:576
ir_ref ir_emit2(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2)
Definition ir.c:834
void ir_use_list_replace_one(ir_ctx *ctx, ir_ref ref, ir_ref use, ir_ref new_use)
Definition ir.c:1347
ir_ref ir_const_addr(ir_ctx *ctx, uintptr_t c)
Definition ir.c:645
const char * ir_type_name[IR_LAST_TYPE]
Definition ir.c:56
void ir_replace(ir_ctx *ctx, ir_ref ref, ir_ref new_ref)
Definition ir.c:1430
void _ir_TAILCALL_5(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3, ir_ref arg4, ir_ref arg5)
Definition ir.c:2885
ir_ref ir_check_dominating_predicates(ir_ctx *ctx, ir_ref ref, ir_ref condition)
Definition ir.c:2469
ir_ref _ir_CALL_4(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3, ir_ref arg4)
Definition ir.c:2731
ir_ref ir_const_ex(ir_ctx *ctx, ir_val val, uint8_t type, uint32_t optx)
Definition ir.c:512
void ir_array_remove(ir_array *a, uint32_t i)
Definition ir.c:1502
void ir_array_grow(ir_array *a, uint32_t size)
Definition ir.c:1485
void _ir_TAILCALL_2(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2)
Definition ir.c:2828
ir_ref _ir_ADD_OFFSET(ir_ctx *ctx, ir_ref addr, uintptr_t offset)
Definition ir.c:2992
void _ir_SNAPSHOT_SET_OP(ir_ctx *ctx, ir_ref snapshot, ir_ref pos, ir_ref val)
Definition ir.c:3051
ir_ref ir_const_double(ir_ctx *ctx, double c)
Definition ir.c:638
ir_ref _ir_BLOCK_BEGIN(ir_ctx *ctx)
Definition ir.c:3196
void _ir_GUARD_NOT(ir_ctx *ctx, ir_ref condition, ir_ref addr)
Definition ir.c:3020
ir_ref ir_const_bool(ir_ctx *ctx, bool c)
Definition ir.c:618
void ir_print_escaped_str(const char *s, size_t len, FILE *f)
Definition ir.c:78
void ir_build_def_use_lists(ir_ctx *ctx)
Definition ir.c:1224
ir_ref _ir_SNAPSHOT(ir_ctx *ctx, ir_ref n)
Definition ir.c:3040
int ir_mem_unprotect(void *ptr, size_t size)
Definition ir.c:1851
int ir_mem_unmap(void *ptr, size_t size)
Definition ir.c:1834
#define IR_FOLD_RULE(x)
Definition ir.c:923
uint32_t ir_list_find(const ir_list *l, ir_ref val)
Definition ir.c:1527
const uint8_t ir_type_flags[IR_LAST_TYPE]
Definition ir.c:51
void _ir_CASE_VAL(ir_ctx *ctx, ir_ref switch_ref, ir_ref val)
Definition ir.c:2958
int ir_mem_flush(void *ptr, size_t size)
Definition ir.c:1862
ir_ref ir_const_i8(ir_ctx *ctx, int8_t c)
Definition ir.c:562
void _ir_BEGIN(ir_ctx *ctx, ir_ref src)
Definition ir.c:2391
void _ir_MERGE_2(ir_ctx *ctx, ir_ref src1, ir_ref src2)
Definition ir.c:2541
ir_ref ir_emit1(ir_ctx *ctx, uint32_t opt, ir_ref op1)
Definition ir.c:829
void _ir_START(ir_ctx *ctx)
Definition ir.c:2374
ir_ref ir_find_aliasing_load(ir_ctx *ctx, ir_ref ref, ir_type type, ir_ref addr)
Definition ir.c:2064
ir_ref ir_fold(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3)
Definition ir.c:1046
ir_ref ir_const_func_addr(ir_ctx *ctx, uintptr_t c, ir_ref proto)
Definition ir.c:655
void ir_use_list_sort(ir_ctx *ctx, ir_ref ref)
Definition ir.c:1417
void ir_list_remove(ir_list *l, uint32_t i)
Definition ir.c:1520
void _ir_TAILCALL_6(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3, ir_ref arg4, ir_ref arg5, ir_ref arg6)
Definition ir.c:2906
ir_ref ir_find_aliasing_vload(ir_ctx *ctx, ir_ref ref, ir_type type, ir_ref var)
Definition ir.c:2110
void _ir_TAILCALL_1(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1)
Definition ir.c:2817
ir_ref ir_emit_N(ir_ctx *ctx, uint32_t opt, int32_t count)
Definition ir.c:1077
ir_ref ir_const_u64(ir_ctx *ctx, uint64_t c)
Definition ir.c:611
const char * ir_type_cname[IR_LAST_TYPE]
Definition ir.c:66
ir_ref ir_unique_const_addr(ir_ctx *ctx, uintptr_t addr)
Definition ir.c:472
ir_ref _ir_ALLOCA(ir_ctx *ctx, ir_ref size)
Definition ir.c:3069
void _ir_STORE(ir_ctx *ctx, ir_ref addr, ir_ref val)
Definition ir.c:3156
#define IR_TYPE_SIZE(name, type, field, flags)
Definition ir.c:48
ir_ref ir_find_aliasing_vstore(ir_ctx *ctx, ir_ref ref, ir_ref var, ir_ref val)
Definition ir.c:2284
ir_ref ir_const_i16(ir_ctx *ctx, int16_t c)
Definition ir.c:569
ir_ref ir_const(ir_ctx *ctx, ir_val val, uint8_t type)
Definition ir.c:557
void _ir_ENTRY(ir_ctx *ctx, ir_ref src, ir_ref num)
Definition ir.c:2381
ir_ref ir_proto_5(ir_ctx *ctx, uint8_t flags, ir_type ret_type, ir_type t1, ir_type t2, ir_type t3, ir_type t4, ir_type t5)
Definition ir.c:782
void _ir_RETURN(ir_ctx *ctx, ir_ref val)
Definition ir.c:2970
void ir_hashtab_init(ir_hashtab *tab, uint32_t size)
Definition ir.c:1580
IR_ALWAYS_INLINE ir_ref ir_find_aliasing_vstore_i(ir_ctx *ctx, ir_ref ref, ir_ref var, ir_ref val)
Definition ir.c:2209
void _ir_VA_COPY(ir_ctx *ctx, ir_ref dst, ir_ref src)
Definition ir.c:3184
const char * ir_get_str(const ir_ctx *ctx, ir_ref idx)
Definition ir.c:709
ir_ref ir_var(ir_ctx *ctx, ir_type type, ir_ref region, const char *name)
Definition ir.c:1136
ir_ref ir_emit(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3)
Definition ir.c:811
ir_ref _ir_VLOAD(ir_ctx *ctx, ir_type type, ir_ref var)
Definition ir.c:3081
void ir_use_list_replace_all(ir_ctx *ctx, ir_ref ref, ir_ref use, ir_ref new_use)
Definition ir.c:1363
ir_ref _ir_LOOP_BEGIN(ir_ctx *ctx, ir_ref src1)
Definition ir.c:2673
ir_ref ir_find_aliasing_store(ir_ctx *ctx, ir_ref ref, ir_ref addr, ir_ref val)
Definition ir.c:2204
ir_ref ir_fold1(ir_ctx *ctx, uint32_t opt, ir_ref op1)
Definition ir.c:1062
ir_ref _ir_VAR(ir_ctx *ctx, ir_type type, const char *name)
Definition ir.c:2299
_ir_alias
Definition ir.c:1878
@ IR_NO_ALIAS
Definition ir.c:1880
@ IR_MUST_ALIAS
Definition ir.c:1881
@ IR_MAY_ALIAS
Definition ir.c:1879
void _ir_IF_FALSE(ir_ctx *ctx, ir_ref if_ref)
Definition ir.c:2514
ir_ref _ir_PHI_2(ir_ctx *ctx, ir_type type, ir_ref src1, ir_ref src2)
Definition ir.c:2316
ir_ref _ir_PHI_N(ir_ctx *ctx, ir_type type, ir_ref n, ir_ref *inputs)
Definition ir.c:2326
ir_ref _ir_RLOAD(ir_ctx *ctx, ir_type type, ir_ref reg)
Definition ir.c:3120
void _ir_VA_START(ir_ctx *ctx, ir_ref list)
Definition ir.c:3172
void ir_hashtab_key_sort(ir_hashtab *tab)
Definition ir.c:1653
ir_ref ir_const_sym(ir_ctx *ctx, ir_ref str)
Definition ir.c:674
ir_ref _ir_LOAD(ir_ctx *ctx, ir_type type, ir_ref addr)
Definition ir.c:3132
ir_alias ir_check_partial_aliasing(const ir_ctx *ctx, ir_ref addr1, ir_ref addr2, ir_type type1, ir_type type2)
Definition ir.c:1920
ir_ref ir_strl(ir_ctx *ctx, const char *s, size_t len)
Definition ir.c:700
const uint32_t ir_op_flags[IR_LAST_OP]
Definition ir.c:294
void ir_set_op(ir_ctx *ctx, ir_ref ref, int32_t n, ir_ref val)
Definition ir.c:1098
void ir_use_list_remove_one(ir_ctx *ctx, ir_ref from, ir_ref ref)
Definition ir.c:1321
void ir_print_const(const ir_ctx *ctx, const ir_insn *insn, FILE *f, bool quoted)
Definition ir.c:117
ir_ref _ir_CALL_1(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1)
Definition ir.c:2696
IR_ALWAYS_INLINE ir_ref ir_find_aliasing_vload_i(ir_ctx *ctx, ir_ref ref, ir_type type, ir_ref var)
Definition ir.c:2069
ir_ref _ir_TLS(ir_ctx *ctx, ir_ref index, ir_ref offset)
Definition ir.c:3114
void ir_init(ir_ctx *ctx, uint32_t flags, ir_ref consts_limit, ir_ref insns_limit)
Definition ir.c:381
void _ir_VA_END(ir_ctx *ctx, ir_ref list)
Definition ir.c:3178
void _ir_TAILCALL(ir_ctx *ctx, ir_type type, ir_ref func)
Definition ir.c:2806
ir_ref ir_proto_2(ir_ctx *ctx, uint8_t flags, ir_type ret_type, ir_type t1, ir_type t2)
Definition ir.c:742
ir_ref _ir_PHI_LIST(ir_ctx *ctx, ir_ref list)
Definition ir.c:2638
ir_ref ir_proto(ir_ctx *ctx, uint8_t flags, ir_type ret_type, uint32_t params_count, uint8_t *param_types)
Definition ir.c:798
ir_ref ir_const_u8(ir_ctx *ctx, uint8_t c)
Definition ir.c:590
ir_ref ir_const_u32(ir_ctx *ctx, uint32_t c)
Definition ir.c:604
#define IR_TYPE_FLAGS(name, type, field, flags)
Definition ir.c:45
int ir_mem_protect(void *ptr, size_t size)
Definition ir.c:1840
IR_ALWAYS_INLINE ir_ref ir_check_dominating_predicates_i(ir_ctx *ctx, ir_ref ref, ir_ref condition, ir_ref limit)
Definition ir.c:2436
IR_ALWAYS_INLINE ir_ref ir_find_aliasing_load_i(ir_ctx *ctx, ir_ref ref, ir_type type, ir_ref addr, ir_ref limit)
Definition ir.c:2014
void ir_use_list_remove_all(ir_ctx *ctx, ir_ref from, ir_ref ref)
Definition ir.c:1295
const char * ir_op_name[IR_LAST_OP]
Definition ir.c:71
#define IR_OP_NAME(name, flags, op1, op2, op3)
Definition ir.c:49
void _ir_GUARD(ir_ctx *ctx, ir_ref condition, ir_ref addr)
Definition ir.c:3000
ir_ref ir_folding(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3, ir_insn *op1_insn, ir_insn *op2_insn, ir_insn *op3_insn)
Definition ir.c:932
ir_ref ir_const_float(ir_ctx *ctx, float c)
Definition ir.c:630
ir_ref _ir_CALL_N(ir_ctx *ctx, ir_type type, ir_ref func, uint32_t count, ir_ref *args)
Definition ir.c:2782
ir_ref _ir_SWITCH(ir_ctx *ctx, ir_ref val)
Definition ir.c:2948
ir_ref _ir_VA_ARG(ir_ctx *ctx, ir_type type, ir_ref list)
Definition ir.c:3190
#define _IR_OP_FLAGS(name, flags, op1, op2, op3)
Definition ir.c:291
void _ir_IF_FALSE_cold(ir_ctx *ctx, ir_ref if_ref)
Definition ir.c:2522
ir_ref ir_proto_0(ir_ctx *ctx, uint8_t flags, ir_type ret_type)
Definition ir.c:721
ir_ref ir_param(ir_ctx *ctx, ir_type type, ir_ref region, const char *name, int pos)
Definition ir.c:1130
void ir_array_insert(ir_array *a, uint32_t i, ir_ref val)
Definition ir.c:1492
ir_ref _ir_LOOP_END(ir_ctx *ctx)
Definition ir.c:2680
void ir_addrtab_init(ir_hashtab *tab, uint32_t size)
Definition ir.c:1709
void _ir_MERGE_LIST(ir_ctx *ctx, ir_ref list)
Definition ir.c:2599
enum _ir_alias ir_alias
ir_ref _ir_CALL(ir_ctx *ctx, ir_type type, ir_ref func)
Definition ir.c:2690
void _ir_TAILCALL_N(ir_ctx *ctx, ir_type type, ir_ref func, uint32_t count, ir_ref *args)
Definition ir.c:2928
void _ir_PHI_SET_OP(ir_ctx *ctx, ir_ref phi, ir_ref pos, ir_ref src)
Definition ir.c:2362
ir_ref ir_proto_1(ir_ctx *ctx, uint8_t flags, ir_type ret_type, ir_type t1)
Definition ir.c:731
ir_ref ir_const_str(ir_ctx *ctx, ir_ref str)
Definition ir.c:681
ir_ref ir_const_char(ir_ctx *ctx, char c)
Definition ir.c:623
const uint8_t ir_type_size[IR_LAST_TYPE]
Definition ir.c:61
ir_ref _ir_CALL_5(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3, ir_ref arg4, ir_ref arg5)
Definition ir.c:2747
ir_ref _ir_PARAM(ir_ctx *ctx, ir_type type, const char *name, ir_ref num)
Definition ir.c:2291
void _ir_MERGE_N(ir_ctx *ctx, ir_ref n, ir_ref *inputs)
Definition ir.c:2547
void ir_hashtab_free(ir_hashtab *tab)
Definition ir.c:1593
void _ir_VSTORE(ir_ctx *ctx, ir_ref var, ir_ref val)
Definition ir.c:3102
ir_ref _ir_CALL_3(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3)
Definition ir.c:2716
IR_ALWAYS_INLINE ir_ref ir_find_aliasing_store_i(ir_ctx *ctx, ir_ref ref, ir_ref addr, ir_ref val, ir_ref limit)
Definition ir.c:2115
ir_ref ir_proto_4(ir_ctx *ctx, uint8_t flags, ir_type ret_type, ir_type t1, ir_type t2, ir_type t3, ir_type t4)
Definition ir.c:767
ir_ref _ir_CALL_2(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2)
Definition ir.c:2702
#define IR_FOLD_KEY(x)
Definition ir.c:924
ir_ref ir_const_u16(ir_ctx *ctx, uint16_t c)
Definition ir.c:597
void ir_addrtab_set(ir_hashtab *tab, uint64_t key, ir_ref val)
Definition ir.c:1746
void ir_addrtab_free(ir_hashtab *tab)
Definition ir.c:1722
void _ir_MERGE_SET_OP(ir_ctx *ctx, ir_ref merge, ir_ref pos, ir_ref src)
Definition ir.c:2565
ir_ref ir_hashtab_find(const ir_hashtab *tab, uint32_t key)
Definition ir.c:1601
void _ir_TAILCALL_3(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3)
Definition ir.c:2846
void * ir_mem_mmap(size_t size)
Definition ir.c:1821
void _ir_RSTORE(ir_ctx *ctx, ir_ref reg, ir_ref val)
Definition ir.c:3126
void _ir_IF_TRUE_cold(ir_ctx *ctx, ir_ref if_ref)
Definition ir.c:2505
ir_ref ir_fold0(ir_ctx *ctx, uint32_t opt)
Definition ir.c:1057
ir_ref _ir_END_LIST(ir_ctx *ctx, ir_ref list)
Definition ir.c:2575
ir_ref _ir_END(ir_ctx *ctx)
Definition ir.c:2531
void _ir_IF_TRUE(ir_ctx *ctx, ir_ref if_ref)
Definition ir.c:2497
#define IR_TYPE_NAME(name, type, field, flags)
Definition ir.c:46
ir_ref _ir_CALL_6(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3, ir_ref arg4, ir_ref arg5, ir_ref arg6)
Definition ir.c:2764
#define IR_TYPE_CNAME(name, type, field, flags)
Definition ir.c:47
bool ir_hashtab_add(ir_hashtab *tab, uint32_t key, ir_ref val)
Definition ir.c:1617
ir_ref _ir_END_PHI_LIST(ir_ctx *ctx, ir_ref list, ir_ref val)
Definition ir.c:2587
ir_ref ir_proto_3(ir_ctx *ctx, uint8_t flags, ir_type ret_type, ir_type t1, ir_type t2, ir_type t3)
Definition ir.c:754
ir_ref ir_emit0(ir_ctx *ctx, uint32_t opt)
Definition ir.c:824
void _ir_UNREACHABLE(ir_ctx *ctx)
Definition ir.c:2798
const char * ir_strtab_strl(const ir_strtab *strtab, ir_ref idx, size_t *len)
Definition ir_strtab.c:208
#define IR_TYPES(_)
Definition ir.h:128
#define IR_OPTX(op, type, n)
Definition ir.h:386
enum _ir_type ir_type
#define IR_IS_TYPE_INT(t)
Definition ir.h:145
IR_ALWAYS_INLINE uint32_t ir_insn_find_op(const ir_insn *insn, ir_ref val)
Definition ir.h:739
#define IR_TRUE
Definition ir.h:398
union _ir_val ir_val
#define IR_OPT_FOLDING
Definition ir.h:531
struct _ir_hashtab ir_hashtab
Definition ir.h:482
#define IR_OPS(_)
Definition ir.h:220
#define IR_UNUSED
Definition ir.h:395
@ IR_VOID
Definition ir.h:151
@ IR_LAST_TYPE
Definition ir.h:153
#define IR_NULL
Definition ir.h:396
#define IR_OPT_OP_MASK
Definition ir.h:380
#define ir_mem_calloc
Definition ir.h:1009
int32_t ir_ref
Definition ir.h:390
#define ir_strtab_count(strtab)
Definition ir.h:496
IR_ALWAYS_INLINE ir_ref ir_insn_op(const ir_insn *insn, int32_t n)
Definition ir.h:727
struct _ir_proto_t ir_proto_t
#define IR_OPT(op, type)
Definition ir.h:385
#define IR_IS_CONST_REF(ref)
Definition ir.h:392
#define ir_mem_malloc
Definition ir.h:1006
IR_ALWAYS_INLINE void ir_insn_set_op(ir_insn *insn, int32_t n, ir_ref val)
Definition ir.h:733
#define IR_OPT_TYPE(opt)
Definition ir.h:387
const char * ir_strtab_str(const ir_strtab *strtab, ir_ref idx)
Definition ir_strtab.c:202
ir_ref ir_strtab_lookup(ir_strtab *strtab, const char *str, uint32_t len, ir_ref val)
Definition ir_strtab.c:134
#define IR_FALSE
Definition ir.h:397
#define ir_mem_realloc
Definition ir.h:1012
@ IR_LAST_OP
Definition ir.h:376
#define ir_mem_free
Definition ir.h:1015
void ir_strtab_free(ir_strtab *strtab)
Definition ir_strtab.c:216
#define IR_INSNS_LIMIT_MIN
Definition ir.h:402
#define IR_MAX_PROTO_PARAMS
Definition ir.h:682
struct _ir_ctx ir_ctx
Definition ir.h:550
#define IR_CONSTS_LIMIT_MIN
Definition ir.h:401
#define IR_OPT_INPUTS_SHIFT
Definition ir.h:383
#define IR_ALWAYS_INLINE
Definition ir.h:108
void ir_strtab_init(ir_strtab *strtab, uint32_t count, uint32_t buf_size)
Definition ir_strtab.c:93
struct _ir_use_list ir_use_list
Definition ir.h:551
struct _ir_insn ir_insn
#define IR_PHP_OPS(_)
Definition ir_php.h:11
struct _ir_list ir_list
#define IR_IS_CONST_OP(op)
Definition ir_private.h:887
#define IR_ALIGNED_SIZE(size, alignment)
Definition ir_private.h:59
struct _ir_addrtab_bucket ir_addrtab_bucket
struct _ir_hashtab_bucket ir_hashtab_bucket
#define IR_IS_SYM_CONST(op)
Definition ir_private.h:889
#define IR_ASSERT(x)
Definition ir_private.h:17
#define IR_IS_BB_START(op)
#define IR_INVALID_VAL
Definition ir_private.h:843
IR_ALWAYS_INLINE void ir_list_free(ir_list *l)
Definition ir_private.h:717
#define IR_MAX(a, b)
Definition ir_private.h:62
#define IR_INVALID_IDX
Definition ir_private.h:842
@ IR_FOLD_DO_EMIT
@ IR_FOLD_DO_COPY
@ IR_FOLD_DO_CONST
@ IR_FOLD_DO_CSE
@ IR_FOLD_DO_RESTART
IR_ALWAYS_INLINE uint32_t ir_insn_inputs_to_len(uint32_t inputs_count)
Definition ir_private.h:992
#define IR_INPUT_EDGES_COUNT(flags)
Definition ir_private.h:958
IR_ALWAYS_INLINE ir_ref ir_array_at(const ir_array *a, uint32_t i)
Definition ir_private.h:681
IR_ALWAYS_INLINE void ir_arena_free(ir_arena *arena)
Definition ir_private.h:246
IR_ALWAYS_INLINE bool ir_ref_is_true(ir_ctx *ctx, ir_ref ref)
Definition ir_private.h:910
#define IR_OP_FLAG_TERMINATOR
Definition ir_private.h:936
struct _ir_array ir_array
#define IR_OP_HAS_VAR_INPUTS(flags)
Definition ir_private.h:961
#define next(ls)
Definition minilua.c:2661
#define memmove(a, b, c)
unsigned const char * end
Definition php_ffi.h:51
unsigned const char * pos
Definition php_ffi.h:52
#define t4
#define t1
#define t3
#define t2
#define offsetof(STRUCTURE, FIELD)
unsigned char key[REFLECTION_KEY_LEN]
zend_constant * data
int mprotect(void *addr, size_t size, int protection)
Definition phpdbg_win.c:22
#define PROT_READ
Definition phpdbg_win.h:26
#define PROT_WRITE
Definition phpdbg_win.h:27
p
Definition session.c:1105
uint32_t size
Definition ir_private.h:651
ir_ref * refs
Definition ir_private.h:650
uint32_t * cfg_edges
Definition ir.h:593
ir_live_interval ** live_intervals
Definition ir.h:609
ir_hashtab * binding
Definition ir.h:586
ir_ref control
Definition ir.h:617
ir_insn fold_insn
Definition ir.h:585
ir_ref prev_const_chain[IR_LAST_TYPE]
Definition ir.h:645
ir_ref consts_count
Definition ir.h:577
uint32_t * entries
Definition ir.h:632
int32_t fixed_stack_frame_size
Definition ir.h:602
ir_ref fold_cse_limit
Definition ir.h:584
ir_ref * prev_ref
Definition ir.h:614
ir_strtab * fused_regs
Definition ir.h:613
ir_arena * arena
Definition ir.h:610
uint32_t * cfg_schedule
Definition ir.h:595
ir_snapshot_create_t snapshot_create
Definition ir.h:621
ir_block * cfg_blocks
Definition ir.h:592
ir_ref use_edges_count
Definition ir.h:589
ir_type ret_type
Definition ir.h:581
uint32_t * vregs
Definition ir.h:597
ir_strtab strtab
Definition ir.h:643
void * osr_entry_loads
Definition ir.h:633
ir_use_list * use_lists
Definition ir.h:587
ir_ref consts_limit
Definition ir.h:578
ir_regs * regs
Definition ir.h:612
ir_insn * ir_base
Definition ir.h:574
uint32_t flags
Definition ir.h:579
ir_ref insns_count
Definition ir.h:575
uint32_t * rules
Definition ir.h:596
ir_ref * use_edges
Definition ir.h:588
uint32_t * cfg_map
Definition ir.h:594
ir_ref prev_insn_chain[IR_LAST_FOLDABLE_OP+1]
Definition ir.h:644
ir_ref insns_limit
Definition ir.h:576
int32_t spill_base
Definition ir.h:599
void * data
Definition ir_private.h:852
uint32_t mask
Definition ir_private.h:853
uint32_t size
Definition ir_private.h:854
uint32_t pos
Definition ir_private.h:856
uint32_t count
Definition ir_private.h:855
ir_val val
Definition ir.h:477
ir_array a
Definition ir_private.h:703
uint32_t len
Definition ir_private.h:704
uint8_t param_types[5]
Definition ir.h:688
uint8_t ret_type
Definition ir.h:686
uint8_t flags
Definition ir.h:685
uint8_t params_count
Definition ir.h:687
void * data
Definition ir.h:486
$obj a
Definition test.php:84
uint64_t u64
Definition ir.h:411
int64_t i64
Definition ir.h:412
double d
Definition ir.h:410
#define MAP_FAILED
Definition zend_alloc.c:98
strlen(string $string)
execute_data func
zval * args
#define MAKE_NOP(opline)
#define snprintf
ZEND_API void(ZEND_FASTCALL *zend_touch_vm_stack_data)(void *vm_stack_data)
char * alloca()
#define EXPECTED(condition)
#define UNEXPECTED(condition)
zval * arg1
zval * arg2
zend_string * name
zval * arg3
op2
op1
zval * ret
zend_execute_data * call