php-internal-docs 8.4.8
Unofficial docs for php/php-src
Loading...
Searching...
No Matches
ir_fold.h
Go to the documentation of this file.
1/*
2 * IR - Lightweight JIT Compilation Framework
3 * (Folding engine rules)
4 * Copyright (C) 2022 Zend by Perforce.
5 * Authors: Dmitry Stogov <dmitry@php.net>
6 *
7 * Based on Mike Pall's implementation for LuaJIT.
8 */
9
10/* Constant Folding */
11IR_FOLD(EQ(C_BOOL, C_BOOL))
12IR_FOLD(EQ(C_U8, C_U8))
13IR_FOLD(EQ(C_U16, C_U16))
14IR_FOLD(EQ(C_U32, C_U32))
15IR_FOLD(EQ(C_U64, C_U64))
16IR_FOLD(EQ(C_ADDR, C_ADDR))
17IR_FOLD(EQ(C_CHAR, C_CHAR))
18IR_FOLD(EQ(C_I8, C_I8))
19IR_FOLD(EQ(C_I16, C_I16))
20IR_FOLD(EQ(C_I32, C_I32))
21IR_FOLD(EQ(C_I64, C_I64))
22{
23 IR_FOLD_BOOL(op1_insn->val.u64 == op2_insn->val.u64);
24}
25
26IR_FOLD(EQ(C_DOUBLE, C_DOUBLE))
27{
28 IR_FOLD_BOOL(op1_insn->val.d == op2_insn->val.d);
29}
30
31IR_FOLD(EQ(C_FLOAT, C_FLOAT))
32{
33 IR_FOLD_BOOL(op1_insn->val.d == op2_insn->val.d);
34}
35
36IR_FOLD(NE(C_BOOL, C_BOOL))
37IR_FOLD(NE(C_U8, C_U8))
38IR_FOLD(NE(C_U16, C_U16))
39IR_FOLD(NE(C_U32, C_U32))
40IR_FOLD(NE(C_U64, C_U64))
41IR_FOLD(NE(C_ADDR, C_ADDR))
42IR_FOLD(NE(C_CHAR, C_CHAR))
43IR_FOLD(NE(C_I8, C_I8))
44IR_FOLD(NE(C_I16, C_I16))
45IR_FOLD(NE(C_I32, C_I32))
46IR_FOLD(NE(C_I64, C_I64))
47{
48 IR_FOLD_BOOL(op1_insn->val.u64 != op2_insn->val.u64);
49}
50
51IR_FOLD(NE(C_DOUBLE, C_DOUBLE))
52{
53 IR_FOLD_BOOL(op1_insn->val.d != op2_insn->val.d);
54}
55
56IR_FOLD(NE(C_FLOAT, C_FLOAT))
57{
58 IR_FOLD_BOOL(op1_insn->val.f != op2_insn->val.f);
59}
60
61IR_FOLD(LT(C_BOOL, C_BOOL))
62IR_FOLD(LT(C_U8, C_U8))
63IR_FOLD(LT(C_U16, C_U16))
64IR_FOLD(LT(C_U32, C_U32))
65IR_FOLD(LT(C_U64, C_U64))
66IR_FOLD(LT(C_ADDR, C_ADDR))
67{
68 IR_FOLD_BOOL(op1_insn->val.u64 < op2_insn->val.u64);
69}
70
71IR_FOLD(LT(C_CHAR, C_CHAR))
72IR_FOLD(LT(C_I8, C_I8))
73IR_FOLD(LT(C_I16, C_I16))
74IR_FOLD(LT(C_I32, C_I32))
75IR_FOLD(LT(C_I64, C_I64))
76{
77 IR_FOLD_BOOL(op1_insn->val.i64 < op2_insn->val.i64);
78}
79
80IR_FOLD(LT(C_DOUBLE, C_DOUBLE))
81{
82 IR_FOLD_BOOL(op1_insn->val.d < op2_insn->val.d);
83}
84
85IR_FOLD(LT(C_FLOAT, C_FLOAT))
86{
87 IR_FOLD_BOOL(op1_insn->val.f < op2_insn->val.f);
88}
89
90IR_FOLD(GE(C_BOOL, C_BOOL))
91IR_FOLD(GE(C_U8, C_U8))
92IR_FOLD(GE(C_U16, C_U16))
93IR_FOLD(GE(C_U32, C_U32))
94IR_FOLD(GE(C_U64, C_U64))
95IR_FOLD(GE(C_ADDR, C_ADDR))
96{
97 IR_FOLD_BOOL(op1_insn->val.u64 >= op2_insn->val.u64);
98}
99
100IR_FOLD(GE(C_CHAR, C_CHAR))
101IR_FOLD(GE(C_I8, C_I8))
102IR_FOLD(GE(C_I16, C_I16))
103IR_FOLD(GE(C_I32, C_I32))
104IR_FOLD(GE(C_I64, C_I64))
105{
106 IR_FOLD_BOOL(op1_insn->val.i64 >= op2_insn->val.i64);
107}
108
109IR_FOLD(GE(C_DOUBLE, C_DOUBLE))
110{
111 IR_FOLD_BOOL(op1_insn->val.d >= op2_insn->val.d);
112}
113
114IR_FOLD(GE(C_FLOAT, C_FLOAT))
115{
116 IR_FOLD_BOOL(op1_insn->val.f >= op2_insn->val.f);
117}
118
119IR_FOLD(LE(C_BOOL, C_BOOL))
120IR_FOLD(LE(C_U8, C_U8))
121IR_FOLD(LE(C_U16, C_U16))
122IR_FOLD(LE(C_U32, C_U32))
123IR_FOLD(LE(C_U64, C_U64))
124IR_FOLD(LE(C_ADDR, C_ADDR))
125{
126 IR_FOLD_BOOL(op1_insn->val.u64 <= op2_insn->val.u64);
127}
128
129IR_FOLD(LE(C_CHAR, C_CHAR))
130IR_FOLD(LE(C_I8, C_I8))
131IR_FOLD(LE(C_I16, C_I16))
132IR_FOLD(LE(C_I32, C_I32))
133IR_FOLD(LE(C_I64, C_I64))
134{
135 IR_FOLD_BOOL(op1_insn->val.i64 <= op2_insn->val.i64);
136}
137
138IR_FOLD(LE(C_DOUBLE, C_DOUBLE))
139{
140 IR_FOLD_BOOL(op1_insn->val.d <= op2_insn->val.d);
141}
142
143IR_FOLD(LE(C_FLOAT, C_FLOAT))
144{
145 IR_FOLD_BOOL(op1_insn->val.f <= op2_insn->val.f);
146}
147
148IR_FOLD(GT(C_BOOL, C_BOOL))
149IR_FOLD(GT(C_U8, C_U8))
150IR_FOLD(GT(C_U16, C_U16))
151IR_FOLD(GT(C_U32, C_U32))
152IR_FOLD(GT(C_U64, C_U64))
153IR_FOLD(GT(C_ADDR, C_ADDR))
154{
155 IR_FOLD_BOOL(op1_insn->val.u64 > op2_insn->val.u64);
156}
157
158IR_FOLD(GT(C_CHAR, C_CHAR))
159IR_FOLD(GT(C_I8, C_I8))
160IR_FOLD(GT(C_I16, C_I16))
161IR_FOLD(GT(C_I32, C_I32))
162IR_FOLD(GT(C_I64, C_I64))
163{
164 IR_FOLD_BOOL(op1_insn->val.i64 > op2_insn->val.i64);
165}
166
167IR_FOLD(GT(C_DOUBLE, C_DOUBLE))
168{
169 IR_FOLD_BOOL(op1_insn->val.d > op2_insn->val.d);
170}
171
172IR_FOLD(GT(C_FLOAT, C_FLOAT))
173{
174 IR_FOLD_BOOL(op1_insn->val.f > op2_insn->val.f);
175}
176
177IR_FOLD(ULT(C_BOOL, C_BOOL))
178IR_FOLD(ULT(C_U8, C_U8))
179IR_FOLD(ULT(C_U16, C_U16))
180IR_FOLD(ULT(C_U32, C_U32))
181IR_FOLD(ULT(C_U64, C_U64))
182IR_FOLD(ULT(C_ADDR, C_ADDR))
183IR_FOLD(ULT(C_CHAR, C_CHAR))
184IR_FOLD(ULT(C_I8, C_I8))
185IR_FOLD(ULT(C_I16, C_I16))
186IR_FOLD(ULT(C_I32, C_I32))
187IR_FOLD(ULT(C_I64, C_I64))
188{
189 IR_FOLD_BOOL(op1_insn->val.u64 < op2_insn->val.u64);
190}
191
192IR_FOLD(ULT(C_DOUBLE, C_DOUBLE))
193{
194 IR_FOLD_BOOL(!(op1_insn->val.d >= op2_insn->val.d));
195}
196
197IR_FOLD(ULT(C_FLOAT, C_FLOAT))
198{
199 IR_FOLD_BOOL(!(op1_insn->val.f >= op2_insn->val.f));
200}
201
202IR_FOLD(UGE(C_BOOL, C_BOOL))
203IR_FOLD(UGE(C_U8, C_U8))
204IR_FOLD(UGE(C_U16, C_U16))
205IR_FOLD(UGE(C_U32, C_U32))
206IR_FOLD(UGE(C_U64, C_U64))
207IR_FOLD(UGE(C_ADDR, C_ADDR))
208IR_FOLD(UGE(C_CHAR, C_CHAR))
209IR_FOLD(UGE(C_I8, C_I8))
210IR_FOLD(UGE(C_I16, C_I16))
211IR_FOLD(UGE(C_I32, C_I32))
212IR_FOLD(UGE(C_I64, C_I64))
213{
214 IR_FOLD_BOOL(op1_insn->val.u64 >= op2_insn->val.u64);
215}
216
217IR_FOLD(UGE(C_DOUBLE, C_DOUBLE))
218{
219 IR_FOLD_BOOL(!(op1_insn->val.d < op2_insn->val.d));
220}
221
222IR_FOLD(UGE(C_FLOAT, C_FLOAT))
223{
224 IR_FOLD_BOOL(!(op1_insn->val.f < op2_insn->val.f));
225}
226
227IR_FOLD(ULE(C_BOOL, C_BOOL))
228IR_FOLD(ULE(C_U8, C_U8))
229IR_FOLD(ULE(C_U16, C_U16))
230IR_FOLD(ULE(C_U32, C_U32))
231IR_FOLD(ULE(C_U64, C_U64))
232IR_FOLD(ULE(C_ADDR, C_ADDR))
233IR_FOLD(ULE(C_CHAR, C_CHAR))
234IR_FOLD(ULE(C_I8, C_I8))
235IR_FOLD(ULE(C_I16, C_I16))
236IR_FOLD(ULE(C_I32, C_I32))
237IR_FOLD(ULE(C_I64, C_I64))
238{
239 IR_FOLD_BOOL(op1_insn->val.u64 <= op2_insn->val.u64);
240}
241
242IR_FOLD(ULE(C_DOUBLE, C_DOUBLE))
243{
244 IR_FOLD_BOOL(!(op1_insn->val.d > op2_insn->val.d));
245}
246
247IR_FOLD(ULE(C_FLOAT, C_FLOAT))
248{
249 IR_FOLD_BOOL(!(op1_insn->val.f > op2_insn->val.f));
250}
251
252IR_FOLD(UGT(C_BOOL, C_BOOL))
253IR_FOLD(UGT(C_U8, C_U8))
254IR_FOLD(UGT(C_U16, C_U16))
255IR_FOLD(UGT(C_U32, C_U32))
256IR_FOLD(UGT(C_U64, C_U64))
257IR_FOLD(UGT(C_ADDR, C_ADDR))
258IR_FOLD(UGT(C_CHAR, C_CHAR))
259IR_FOLD(UGT(C_I8, C_I8))
260IR_FOLD(UGT(C_I16, C_I16))
261IR_FOLD(UGT(C_I32, C_I32))
262IR_FOLD(UGT(C_I64, C_I64))
263{
264 IR_FOLD_BOOL(op1_insn->val.u64 > op2_insn->val.u64);
265}
266
267IR_FOLD(UGT(C_DOUBLE, C_DOUBLE))
268{
269 IR_FOLD_BOOL(!(op1_insn->val.d <= op2_insn->val.d));
270}
271
272IR_FOLD(UGT(C_FLOAT, C_FLOAT))
273{
274 IR_FOLD_BOOL(!(op1_insn->val.f <= op2_insn->val.f));
275}
276
277IR_FOLD(ADD(C_U8, C_U8))
278{
279 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
280 IR_FOLD_CONST_U(op1_insn->val.u8 + op2_insn->val.u8);
281}
282
283IR_FOLD(ADD(C_U16, C_U16))
284{
285 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
286 IR_FOLD_CONST_U(op1_insn->val.u16 + op2_insn->val.u16);
287}
288
289IR_FOLD(ADD(C_U32, C_U32))
290{
291 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type || (sizeof(void*) == 4 && IR_OPT_TYPE(opt) == IR_ADDR));
292 IR_FOLD_CONST_U(op1_insn->val.u32 + op2_insn->val.u32);
293}
294
295IR_FOLD(ADD(C_U64, C_U64))
296{
297 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type || (sizeof(void*) == 8 && IR_OPT_TYPE(opt) == IR_ADDR));
298 IR_FOLD_CONST_U(op1_insn->val.u64 + op2_insn->val.u64);
299}
300
301IR_FOLD(ADD(C_ADDR, C_ADDR))
302IR_FOLD(ADD(C_ADDR, C_INTPTR))
303IR_FOLD(ADD(C_ADDR, C_UINTPTR))
304IR_FOLD(ADD(C_INTPTR, C_ADDR))
305IR_FOLD(ADD(C_UINTPTR, C_ADDR))
306{
307// IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
308 IR_FOLD_CONST_U(op1_insn->val.addr + op2_insn->val.addr);
309}
310
311IR_FOLD(ADD(C_I8, C_I8))
312{
313 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
314 IR_FOLD_CONST_I(op1_insn->val.i8 + op2_insn->val.i8);
315}
316
317IR_FOLD(ADD(C_I16, C_I16))
318{
319 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
320 IR_FOLD_CONST_I(op1_insn->val.i16 + op2_insn->val.i16);
321}
322
323IR_FOLD(ADD(C_I32, C_I32))
324{
325 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type || (sizeof(void*) == 4 && IR_OPT_TYPE(opt) == IR_ADDR));
326 /* Here and below we use "unsigned math" to prevent undefined signed overflow behavior */
327 IR_FOLD_CONST_I((int32_t)(op1_insn->val.u32 + op2_insn->val.u32));
328}
329
330IR_FOLD(ADD(C_I64, C_I64))
331{
332 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type || (sizeof(void*) == 8 && IR_OPT_TYPE(opt) == IR_ADDR));
333 IR_FOLD_CONST_I(op1_insn->val.u64 + op2_insn->val.u64);
334}
335
336IR_FOLD(ADD(C_DOUBLE, C_DOUBLE))
337{
338 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
339 IR_FOLD_CONST_D(op1_insn->val.d + op2_insn->val.d);
340}
341
342IR_FOLD(ADD(C_FLOAT, C_FLOAT))
343{
344 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
345 IR_FOLD_CONST_F(op1_insn->val.f + op2_insn->val.f);
346}
347
348IR_FOLD(SUB(C_U8, C_U8))
349{
350 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
351 IR_FOLD_CONST_U(op1_insn->val.u8 - op2_insn->val.u8);
352}
353
354IR_FOLD(SUB(C_U16, C_U16))
355{
356 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
357 IR_FOLD_CONST_U(op1_insn->val.u16 - op2_insn->val.u16);
358}
359
360IR_FOLD(SUB(C_U32, C_U32))
361{
362 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
363 IR_FOLD_CONST_U(op1_insn->val.u32 - op2_insn->val.u32);
364}
365
366IR_FOLD(SUB(C_U64, C_U64))
367{
368 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
369 IR_FOLD_CONST_U(op1_insn->val.u64 - op2_insn->val.u64);
370}
371
372IR_FOLD(SUB(C_ADDR, C_ADDR))
373IR_FOLD(SUB(C_ADDR, C_INTPTR))
374IR_FOLD(SUB(C_ADDR, C_UINTPTR))
375IR_FOLD(SUB(C_INTPTR, C_ADDR))
376IR_FOLD(SUB(C_UINTPTR, C_ADDR))
377{
378// IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
379 IR_FOLD_CONST_U(op1_insn->val.addr - op2_insn->val.addr);
380}
381
382IR_FOLD(SUB(C_I8, C_I8))
383{
384 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
385 IR_FOLD_CONST_I(op1_insn->val.i8 - op2_insn->val.i8);
386}
387
388IR_FOLD(SUB(C_I16, C_I16))
389{
390 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
391 IR_FOLD_CONST_I(op1_insn->val.i16 - op2_insn->val.i16);
392}
393
394IR_FOLD(SUB(C_I32, C_I32))
395{
396 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
397 IR_FOLD_CONST_I((int32_t)(op1_insn->val.u32 - op2_insn->val.u32));
398}
399
400IR_FOLD(SUB(C_I64, C_I64))
401{
402 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
403 IR_FOLD_CONST_I(op1_insn->val.u64 - op2_insn->val.u64);
404}
405
406IR_FOLD(SUB(C_DOUBLE, C_DOUBLE))
407{
408 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
409 IR_FOLD_CONST_D(op1_insn->val.d - op2_insn->val.d);
410}
411
412IR_FOLD(SUB(C_FLOAT, C_FLOAT))
413{
414 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
415 IR_FOLD_CONST_F(op1_insn->val.f - op2_insn->val.f);
416}
417
418IR_FOLD(MUL(C_U8, C_U8))
419{
420 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
421 IR_FOLD_CONST_U(op1_insn->val.u8 * op2_insn->val.u8);
422}
423
424IR_FOLD(MUL(C_U16, C_U16))
425{
426 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
427 IR_FOLD_CONST_U(op1_insn->val.u16 * op2_insn->val.u16);
428}
429
430IR_FOLD(MUL(C_U32, C_U32))
431{
432 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
433 IR_FOLD_CONST_U(op1_insn->val.u32 * op2_insn->val.u32);
434}
435
436IR_FOLD(MUL(C_U64, C_U64))
437{
438 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
439 IR_FOLD_CONST_U(op1_insn->val.u64 * op2_insn->val.u64);
440}
441
442IR_FOLD(MUL(C_ADDR, C_ADDR))
443IR_FOLD(MUL(C_ADDR, C_INTPTR))
444IR_FOLD(MUL(C_ADDR, C_UINTPTR))
445IR_FOLD(MUL(C_INTPTR, C_ADDR))
446IR_FOLD(MUL(C_UINTPTR, C_ADDR))
447{
448// IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
449 IR_FOLD_CONST_U(op1_insn->val.addr * op2_insn->val.addr);
450}
451
452IR_FOLD(MUL(C_I8, C_I8))
453{
454 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
455 IR_FOLD_CONST_I(op1_insn->val.i8 * op2_insn->val.i8);
456}
457
458IR_FOLD(MUL(C_I16, C_I16))
459{
460 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
461 IR_FOLD_CONST_I(op1_insn->val.i16 * op2_insn->val.i16);
462}
463
464IR_FOLD(MUL(C_I32, C_I32))
465{
466 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
467 IR_FOLD_CONST_I((int32_t)(op1_insn->val.u32 * op2_insn->val.u32));
468}
469
470IR_FOLD(MUL(C_I64, C_I64))
471{
472 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
473 IR_FOLD_CONST_I(op1_insn->val.u64 * op2_insn->val.u64);
474}
475
476IR_FOLD(MUL(C_DOUBLE, C_DOUBLE))
477{
478 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
479 IR_FOLD_CONST_D(op1_insn->val.d * op2_insn->val.d);
480}
481
482IR_FOLD(MUL(C_FLOAT, C_FLOAT))
483{
484 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
485 IR_FOLD_CONST_F(op1_insn->val.f * op2_insn->val.f);
486}
487
488IR_FOLD(DIV(C_U8, C_U8))
489IR_FOLD(DIV(C_U16, C_U16))
490IR_FOLD(DIV(C_U32, C_U32))
491IR_FOLD(DIV(C_U64, C_U64))
492IR_FOLD(DIV(C_ADDR, C_ADDR))
493{
494 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
495 if (op2_insn->val.u64 == 0) {
496 /* division by zero */
498 }
499 IR_FOLD_CONST_U(op1_insn->val.u64 / op2_insn->val.u64);
500}
501
502IR_FOLD(DIV(C_I8, C_I8))
503IR_FOLD(DIV(C_I16, C_I16))
504IR_FOLD(DIV(C_I32, C_I32))
505IR_FOLD(DIV(C_I64, C_I64))
506{
507 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
508 if (op2_insn->val.i64 == 0) {
509 /* division by zero */
511 }
512 IR_FOLD_CONST_I(op1_insn->val.i64 / op2_insn->val.i64);
513}
514
515IR_FOLD(DIV(C_DOUBLE, C_DOUBLE))
516{
517 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
518 IR_FOLD_CONST_D(op1_insn->val.d / op2_insn->val.d);
519}
520
521IR_FOLD(DIV(C_FLOAT, C_FLOAT))
522{
523 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
524 IR_FOLD_CONST_F(op1_insn->val.f / op2_insn->val.f);
525}
526
527IR_FOLD(MOD(C_U8, C_U8))
528IR_FOLD(MOD(C_U16, C_U16))
529IR_FOLD(MOD(C_U32, C_U32))
530IR_FOLD(MOD(C_U64, C_U64))
531IR_FOLD(MOD(C_ADDR, C_ADDR))
532{
533 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
534 if (op2_insn->val.u64 == 0) {
535 /* division by zero */
537 }
538 IR_FOLD_CONST_U(op1_insn->val.u64 % op2_insn->val.u64);
539}
540
541IR_FOLD(MOD(C_I8, C_I8))
542IR_FOLD(MOD(C_I16, C_I16))
543IR_FOLD(MOD(C_I32, C_I32))
544IR_FOLD(MOD(C_I64, C_I64))
545{
546 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
547 if (op2_insn->val.i64 == 0) {
548 /* division by zero */
550 }
551 IR_FOLD_CONST_I(op1_insn->val.i64 % op2_insn->val.i64);
552}
553
554IR_FOLD(NEG(C_I8))
555IR_FOLD(NEG(C_I16))
556IR_FOLD(NEG(C_I32))
557IR_FOLD(NEG(C_I64))
558{
559 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
560 IR_FOLD_CONST_I(-op1_insn->val.u64);
561}
562
563IR_FOLD(NEG(C_DOUBLE))
564{
565 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
566 IR_FOLD_CONST_D(-op1_insn->val.d);
567}
568
569IR_FOLD(NEG(C_FLOAT))
570{
571 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
572 IR_FOLD_CONST_F(-op1_insn->val.f);
573}
574
575IR_FOLD(ABS(C_I8))
576IR_FOLD(ABS(C_I16))
577IR_FOLD(ABS(C_I32))
578IR_FOLD(ABS(C_I64))
579{
580 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
581 if (op1_insn->val.i64 >= 0) {
583 } else {
584 IR_FOLD_CONST_I(-op1_insn->val.u64);
585 }
586}
587
588IR_FOLD(ABS(C_DOUBLE))
589{
590 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
591 IR_FOLD_CONST_D(fabs(op1_insn->val.d));
592}
593
594IR_FOLD(ABS(C_FLOAT))
595{
596 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
597 IR_FOLD_CONST_F(fabsf(op1_insn->val.f));
598}
599
600IR_FOLD(ADD_OV(C_U8, C_U8))
601IR_FOLD(ADD_OV(C_U16, C_U16))
602IR_FOLD(ADD_OV(C_U32, C_U32))
603IR_FOLD(ADD_OV(C_U64, C_U64))
604{
605 ir_type type = IR_OPT_TYPE(opt);
606 uint64_t max = ((uint64_t)0xffffffffffffffff) >> (64 - ir_type_size[type] * 8);
607 IR_ASSERT(type == op1_insn->type);
608 if (op1_insn->val.u64 > max - op2_insn->val.u64) {
610 }
611 IR_FOLD_CONST_U(op1_insn->val.u64 + op2_insn->val.u64);
612}
613
614IR_FOLD(ADD_OV(C_I8, C_I8))
615IR_FOLD(ADD_OV(C_I16, C_I16))
616IR_FOLD(ADD_OV(C_I32, C_I32))
617IR_FOLD(ADD_OV(C_I64, C_I64))
618{
619 ir_type type = IR_OPT_TYPE(opt);
620 int64_t max = ((uint64_t)0x7fffffffffffffff) >> (64 - ir_type_size[type] * 8);
621 int64_t min = - max - 1;
622 IR_ASSERT(type == op1_insn->type);
623 if ((op2_insn->val.i64 > 0 && op1_insn->val.i64 > max - op2_insn->val.i64)
624 || (op2_insn->val.i64 < 0 && op1_insn->val.i64 < min - op2_insn->val.i64)) {
626 }
627 IR_FOLD_CONST_I(op1_insn->val.i64 + op2_insn->val.i64);
628}
629
630IR_FOLD(SUB_OV(C_U8, C_U8))
631IR_FOLD(SUB_OV(C_U16, C_U16))
632IR_FOLD(SUB_OV(C_U32, C_U32))
633IR_FOLD(SUB_OV(C_U64, C_U64))
634{
635 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
636 if (op2_insn->val.u64 > op1_insn->val.u64) {
638 }
639 IR_FOLD_CONST_U(op1_insn->val.u64 - op2_insn->val.u64);
640}
641
642IR_FOLD(SUB_OV(C_I8, C_I8))
643IR_FOLD(SUB_OV(C_I16, C_I16))
644IR_FOLD(SUB_OV(C_I32, C_I32))
645IR_FOLD(SUB_OV(C_I64, C_I64))
646{
647 ir_type type = IR_OPT_TYPE(opt);
648 int64_t max = ((uint64_t)0x7fffffffffffffff) >> (64 - ir_type_size[type] * 8);
649 int64_t min = - max - 1;
650 IR_ASSERT(type == op1_insn->type);
651 if ((op2_insn->val.i64 > 0 && op1_insn->val.i64 < min + op2_insn->val.i64)
652 || (op2_insn->val.i64 < 0 && op1_insn->val.i64 > max + op2_insn->val.i64)) {
654 }
655 IR_FOLD_CONST_I(op1_insn->val.i64 - op2_insn->val.i64);
656}
657
658IR_FOLD(MUL_OV(C_U8, C_U8))
659IR_FOLD(MUL_OV(C_U16, C_U16))
660IR_FOLD(MUL_OV(C_U32, C_U32))
661IR_FOLD(MUL_OV(C_U64, C_U64))
662{
663 ir_type type = IR_OPT_TYPE(opt);
664 uint64_t max = ((uint64_t)0xffffffffffffffff) >> (64 - ir_type_size[type] * 8);
665 uint64_t res;
666 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
667 res = op1_insn->val.u64 * op2_insn->val.u64;
668 if (op1_insn->val.u64 != 0 && res / op1_insn->val.u64 != op2_insn->val.u64 && res <= max) {
670 }
672}
673
674IR_FOLD(MUL_OV(C_I8, C_I8))
675IR_FOLD(MUL_OV(C_I16, C_I16))
676IR_FOLD(MUL_OV(C_I32, C_I32))
677IR_FOLD(MUL_OV(C_I64, C_I64))
678{
679 ir_type type = IR_OPT_TYPE(opt);
680 int64_t max = ((uint64_t)0x7fffffffffffffff) >> (64 - ir_type_size[type] * 8);
681 int64_t min = - max - 1;
682 int64_t res;
683 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
684 res = op1_insn->val.u64 * op2_insn->val.u64;
685 if (op1_insn->val.i64 != 0 && res / op1_insn->val.i64 != op2_insn->val.i64 && res >= min && res <= max) {
687 }
689}
690
691IR_FOLD(OVERFLOW(_))
692{
693 if (op1_insn->op != IR_ADD_OV && op1_insn->op != IR_SUB_OV && op1_insn->op != IR_MUL_OV) {
695 }
697}
698
699IR_FOLD(NOT(C_BOOL))
700{
701 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
702 IR_FOLD_BOOL(!op1_insn->val.u64);
703}
704
705IR_FOLD(NOT(C_U8))
706IR_FOLD(NOT(C_CHAR))
707{
708 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
709 IR_FOLD_CONST_U(~op1_insn->val.u8);
710}
711
712IR_FOLD(NOT(C_I8))
713{
714 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
715 IR_FOLD_CONST_I(~op1_insn->val.i8);
716}
717
718IR_FOLD(NOT(C_U16))
719{
720 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
721 IR_FOLD_CONST_U(~op1_insn->val.u16);
722}
723
724IR_FOLD(NOT(C_I16))
725{
726 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
727 IR_FOLD_CONST_I(~op1_insn->val.i16);
728}
729
730IR_FOLD(NOT(C_U32))
731{
732 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
733 IR_FOLD_CONST_U(~op1_insn->val.u32);
734}
735
736IR_FOLD(NOT(C_I32))
737{
738 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
739 IR_FOLD_CONST_I(~op1_insn->val.i32);
740}
741
742IR_FOLD(NOT(C_U64))
743IR_FOLD(NOT(C_I64))
744{
745 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
746 IR_FOLD_CONST_U(~op1_insn->val.u64);
747}
748
749IR_FOLD(OR(C_BOOL, C_BOOL))
750{
751 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
752 IR_FOLD_BOOL(op1_insn->val.b || op2_insn->val.b);
753}
754
755IR_FOLD(OR(C_CHAR, C_CHAR))
756IR_FOLD(OR(C_U8, C_U8))
757IR_FOLD(OR(C_U16, C_U16))
758IR_FOLD(OR(C_U32, C_U32))
759IR_FOLD(OR(C_U64, C_U64))
760{
761 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
762 IR_FOLD_CONST_U(op1_insn->val.u64 | op2_insn->val.u64);
763}
764
765IR_FOLD(OR(C_I8, C_I8))
766IR_FOLD(OR(C_I16, C_I16))
767IR_FOLD(OR(C_I32, C_I32))
768IR_FOLD(OR(C_I64, C_I64))
769{
770 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
771 IR_FOLD_CONST_I(op1_insn->val.i64 | op2_insn->val.i64);
772}
773
774IR_FOLD(AND(C_BOOL, C_BOOL))
775{
776 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
777 IR_FOLD_BOOL(op1_insn->val.b && op2_insn->val.b);
778}
779
780IR_FOLD(AND(C_CHAR, C_CHAR))
781IR_FOLD(AND(C_U8, C_U8))
782IR_FOLD(AND(C_U16, C_U16))
783IR_FOLD(AND(C_U32, C_U32))
784IR_FOLD(AND(C_U64, C_U64))
785{
786 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
787 IR_FOLD_CONST_U(op1_insn->val.u64 & op2_insn->val.u64);
788}
789
790IR_FOLD(AND(C_I8, C_I8))
791IR_FOLD(AND(C_I16, C_I16))
792IR_FOLD(AND(C_I32, C_I32))
793IR_FOLD(AND(C_I64, C_I64))
794{
795 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
796 IR_FOLD_CONST_I(op1_insn->val.i64 & op2_insn->val.i64);
797}
798
799IR_FOLD(XOR(C_BOOL, C_BOOL))
800{
801 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
802 IR_FOLD_BOOL(op1_insn->val.b != op2_insn->val.b);
803}
804
805IR_FOLD(XOR(C_U8, C_U8))
806IR_FOLD(XOR(C_CHAR, C_CHAR))
807{
808 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
809 IR_FOLD_CONST_U(op1_insn->val.u8 ^ op2_insn->val.u8);
810}
811
812IR_FOLD(XOR(C_I8, C_I8))
813{
814 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
815 IR_FOLD_CONST_I(op1_insn->val.i8 ^ op2_insn->val.i8);
816}
817
818IR_FOLD(XOR(C_U16, C_U16))
819{
820 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
821 IR_FOLD_CONST_U(op1_insn->val.u16 ^ op2_insn->val.u16);
822}
823
824IR_FOLD(XOR(C_I16, C_I16))
825{
826 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
827 IR_FOLD_CONST_I(op1_insn->val.i16 ^ op2_insn->val.i16);
828}
829
830IR_FOLD(XOR(C_U32, C_U32))
831{
832 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
833 IR_FOLD_CONST_U(op1_insn->val.u32 ^ op2_insn->val.u32);
834}
835
836IR_FOLD(XOR(C_I32, C_I32))
837{
838 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
839 IR_FOLD_CONST_I(op1_insn->val.i32 ^ op2_insn->val.i32);
840}
841
842IR_FOLD(XOR(C_U64, C_U64))
843{
844 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
845 IR_FOLD_CONST_U(op1_insn->val.u64 ^ op2_insn->val.u64);
846}
847
848IR_FOLD(XOR(C_I64, C_I64))
849{
850 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
851 IR_FOLD_CONST_I(op1_insn->val.i64 ^ op2_insn->val.i64);
852}
853
854IR_FOLD(SHL(C_U8, C_U8))
855IR_FOLD(SHL(C_CHAR, C_CHAR))
856{
857 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
858 IR_FOLD_CONST_U(op1_insn->val.u8 << op2_insn->val.u8);
859}
860
861IR_FOLD(SHL(C_I8, C_I8))
862{
863 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
864 IR_FOLD_CONST_I(op1_insn->val.i8 << op2_insn->val.i8);
865}
866
867IR_FOLD(SHL(C_U16, C_U16))
868{
869 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
870 IR_FOLD_CONST_U(op1_insn->val.u16 << op2_insn->val.u16);
871}
872
873IR_FOLD(SHL(C_I16, C_I16))
874{
875 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
876 IR_FOLD_CONST_I(op1_insn->val.i16 << op2_insn->val.i16);
877}
878
879IR_FOLD(SHL(C_U32, C_U32))
880{
881 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
882 IR_FOLD_CONST_U(op1_insn->val.u32 << op2_insn->val.u32);
883}
884
885IR_FOLD(SHL(C_I32, C_I32))
886{
887 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
888 IR_FOLD_CONST_I(op1_insn->val.i32 << op2_insn->val.i32);
889}
890
891IR_FOLD(SHL(C_U64, C_U64))
892IR_FOLD(SHL(C_I64, C_I64))
893{
894 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
895 IR_FOLD_CONST_U(op1_insn->val.u64 << op2_insn->val.u64);
896}
897
898IR_FOLD(SHR(C_U8, C_U8))
899IR_FOLD(SHR(C_CHAR, C_CHAR))
900{
901 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
902 IR_FOLD_CONST_U(op1_insn->val.u8 >> op2_insn->val.u8);
903}
904
905IR_FOLD(SHR(C_I8, C_I8))
906{
907 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
908 IR_FOLD_CONST_I((int8_t)(op1_insn->val.u8 >> op2_insn->val.u8));
909}
910
911IR_FOLD(SHR(C_U16, C_U16))
912{
913 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
914 IR_FOLD_CONST_U(op1_insn->val.u16 >> op2_insn->val.u16);
915}
916
917IR_FOLD(SHR(C_I16, C_I16))
918{
919 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
920 IR_FOLD_CONST_U((int16_t)(op1_insn->val.u16 >> op2_insn->val.u16));
921}
922
923IR_FOLD(SHR(C_U32, C_U32))
924{
925 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
926 IR_FOLD_CONST_U(op1_insn->val.u32 >> op2_insn->val.u32);
927}
928
929IR_FOLD(SHR(C_I32, C_I32))
930{
931 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
932 IR_FOLD_CONST_U((int32_t)(op1_insn->val.u32 >> op2_insn->val.u32));
933}
934
935IR_FOLD(SHR(C_U64, C_U64))
936IR_FOLD(SHR(C_I64, C_I64))
937{
938 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
939 IR_FOLD_CONST_U(op1_insn->val.u64 >> op2_insn->val.u64);
940}
941
942IR_FOLD(SAR(C_U8, C_U8))
943IR_FOLD(SAR(C_CHAR, C_CHAR))
944{
945 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
946 IR_FOLD_CONST_U((uint8_t)(op1_insn->val.i8 >> op2_insn->val.i8));
947}
948
949IR_FOLD(SAR(C_I8, C_I8))
950{
951 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
952 IR_FOLD_CONST_I(op1_insn->val.i8 >> op2_insn->val.i8);
953}
954
955IR_FOLD(SAR(C_U16, C_U16))
956{
957 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
958 IR_FOLD_CONST_U((uint16_t)(op1_insn->val.i16 >> op2_insn->val.i16));
959}
960
961IR_FOLD(SAR(C_I16, C_I16))
962{
963 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
964 IR_FOLD_CONST_I(op1_insn->val.i16 >> op2_insn->val.i16);
965}
966
967IR_FOLD(SAR(C_U32, C_U32))
968{
969 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
970 IR_FOLD_CONST_U((uint32_t)(op1_insn->val.i32 >> op2_insn->val.i32));
971}
972
973IR_FOLD(SAR(C_I32, C_I32))
974{
975 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
976 IR_FOLD_CONST_I(op1_insn->val.i32 >> op2_insn->val.i32);
977}
978
979IR_FOLD(SAR(C_U64, C_U64))
980IR_FOLD(SAR(C_I64, C_I64))
981{
982 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
983 IR_FOLD_CONST_I(op1_insn->val.i64 >> op2_insn->val.i64);
984}
985
986IR_FOLD(ROL(C_U8, C_U8))
987IR_FOLD(ROL(C_CHAR, C_CHAR))
988{
989 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
990 IR_FOLD_CONST_U(ir_rol8(op1_insn->val.u8, op2_insn->val.u8));
991}
992
993IR_FOLD(ROL(C_I8, C_I8))
994{
995 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
996 IR_FOLD_CONST_I((int8_t)ir_rol8(op1_insn->val.u8, op2_insn->val.u8));
997}
998
999IR_FOLD(ROL(C_U16, C_U16))
1000{
1001 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1002 IR_FOLD_CONST_U(ir_rol16(op1_insn->val.u16, op2_insn->val.u16));
1003}
1004
1005IR_FOLD(ROL(C_I16, C_I16))
1006{
1007 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1008 IR_FOLD_CONST_I((int16_t)ir_rol16(op1_insn->val.u16, op2_insn->val.u16));
1009}
1010
1011IR_FOLD(ROL(C_U32, C_U32))
1012{
1013 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1014 IR_FOLD_CONST_U(ir_rol32(op1_insn->val.u32, op2_insn->val.u32));
1015}
1016
1017IR_FOLD(ROL(C_I32, C_I32))
1018{
1019 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1020 IR_FOLD_CONST_I((int32_t)ir_rol32(op1_insn->val.u32, op2_insn->val.u32));
1021}
1022
1023IR_FOLD(ROL(C_U64, C_U64))
1024IR_FOLD(ROL(C_I64, C_I64))
1025{
1026 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1027 IR_FOLD_CONST_U(ir_rol64(op1_insn->val.u64, op2_insn->val.u64));
1028}
1029
1030IR_FOLD(ROR(C_U8, C_U8))
1031IR_FOLD(ROR(C_CHAR, C_CHAR))
1032{
1033 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1034 IR_FOLD_CONST_U(ir_ror8(op1_insn->val.u8, op2_insn->val.u8));
1035}
1036
1037IR_FOLD(ROR(C_I8, C_I8))
1038{
1039 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1040 IR_FOLD_CONST_I((int8_t)ir_ror8(op1_insn->val.u8, op2_insn->val.u8));
1041}
1042
1043IR_FOLD(ROR(C_U16, C_U16))
1044{
1045 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1046 IR_FOLD_CONST_U(ir_ror16(op1_insn->val.u16, op2_insn->val.u16));
1047}
1048
1049IR_FOLD(ROR(C_I16, C_I16))
1050{
1051 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1052 IR_FOLD_CONST_I((int16_t)ir_ror16(op1_insn->val.u16, op2_insn->val.u16));
1053}
1054
1055IR_FOLD(ROR(C_U32, C_U32))
1056{
1057 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1058 IR_FOLD_CONST_U(ir_ror32(op1_insn->val.u32, op2_insn->val.u32));
1059}
1060
1061IR_FOLD(ROR(C_I32, C_I32))
1062{
1063 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1064 IR_FOLD_CONST_I((int32_t)ir_ror32(op1_insn->val.u32, op2_insn->val.u32));
1065}
1066
1067IR_FOLD(ROR(C_U64, C_U64))
1068IR_FOLD(ROR(C_I64, C_I64))
1069{
1070 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1071 IR_FOLD_CONST_U(ir_ror64(op1_insn->val.u64, op2_insn->val.u64));
1072}
1073
1074//IR_FOLD(BSWAP(CONST))
1075//TODO: bswap
1076
1077IR_FOLD(MIN(C_BOOL, C_BOOL))
1078IR_FOLD(MIN(C_U8, C_U8))
1079IR_FOLD(MIN(C_U16, C_U16))
1080IR_FOLD(MIN(C_U32, C_U32))
1081IR_FOLD(MIN(C_U64, C_U64))
1082IR_FOLD(MIN(C_ADDR, C_ADDR))
1083{
1084 IR_FOLD_COPY(op1_insn->val.u64 <= op2_insn->val.u64 ? op1 : op2);
1085}
1086
1087IR_FOLD(MIN(C_CHAR, C_CHAR))
1088IR_FOLD(MIN(C_I8, C_U8))
1089IR_FOLD(MIN(C_I16, C_U16))
1090IR_FOLD(MIN(C_I32, C_U32))
1091IR_FOLD(MIN(C_I64, C_U64))
1092{
1093 IR_FOLD_COPY(op1_insn->val.i64 <= op2_insn->val.i64 ? op1 : op2);
1094}
1095
1096IR_FOLD(MIN(C_DOUBLE, C_DOUBLE))
1097{
1098 IR_FOLD_COPY(op1_insn->val.d <= op2_insn->val.d ? op1 : op2);
1099}
1100
1101IR_FOLD(MIN(C_FLOAT, C_FLOAT))
1102{
1103 IR_FOLD_COPY(op1_insn->val.f <= op2_insn->val.f ? op1 : op2);
1104}
1105
1106IR_FOLD(MAX(C_BOOL, C_BOOL))
1107IR_FOLD(MAX(C_U8, C_U8))
1108IR_FOLD(MAX(C_U16, C_U16))
1109IR_FOLD(MAX(C_U32, C_U32))
1110IR_FOLD(MAX(C_U64, C_U64))
1111IR_FOLD(MAX(C_ADDR, C_ADDR))
1112{
1113 IR_FOLD_COPY(op1_insn->val.u64 >= op2_insn->val.u64 ? op1 : op2);
1114}
1115
1116IR_FOLD(MAX(C_CHAR, C_CHAR))
1117IR_FOLD(MAX(C_I8, C_U8))
1118IR_FOLD(MAX(C_I16, C_U16))
1119IR_FOLD(MAX(C_I32, C_U32))
1120IR_FOLD(MAX(C_I64, C_U64))
1121{
1122 IR_FOLD_COPY(op1_insn->val.i64 >= op2_insn->val.i64 ? op1 : op2);
1123}
1124
1125IR_FOLD(MAX(C_DOUBLE, C_DOUBLE))
1126{
1127 IR_FOLD_COPY(op1_insn->val.d >= op2_insn->val.d ? op1 : op2);
1128}
1129
1130IR_FOLD(MAX(C_FLOAT, C_FLOAT))
1131{
1132 IR_FOLD_COPY(op1_insn->val.f >= op2_insn->val.f ? op1 : op2);
1133}
1134
1135IR_FOLD(SEXT(C_I8))
1136IR_FOLD(SEXT(C_U8))
1137IR_FOLD(SEXT(C_BOOL))
1138{
1141 IR_FOLD_CONST_I((int64_t)op1_insn->val.i8);
1142}
1143
1144IR_FOLD(SEXT(C_I16))
1145IR_FOLD(SEXT(C_U16))
1146{
1148 IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1149 IR_FOLD_CONST_I((int64_t)op1_insn->val.i16);
1150}
1151
1152IR_FOLD(SEXT(C_I32))
1153IR_FOLD(SEXT(C_U32))
1154{
1156 IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1157 IR_FOLD_CONST_I((int64_t)op1_insn->val.i32);
1158}
1159
1160IR_FOLD(ZEXT(C_I8))
1161IR_FOLD(ZEXT(C_U8))
1162IR_FOLD(ZEXT(C_BOOL))
1163{
1165 IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1166 IR_FOLD_CONST_U((uint64_t)op1_insn->val.u8);
1167}
1168
1169IR_FOLD(ZEXT(C_I16))
1170IR_FOLD(ZEXT(C_U16))
1171{
1173 IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1174 IR_FOLD_CONST_U((uint64_t)op1_insn->val.u16);
1175}
1176
1177IR_FOLD(ZEXT(C_I32))
1178IR_FOLD(ZEXT(C_U32))
1179{
1181 IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1182 IR_FOLD_CONST_U((uint64_t)op1_insn->val.u32);
1183}
1184
1185IR_FOLD(TRUNC(C_I16))
1186IR_FOLD(TRUNC(C_I32))
1187IR_FOLD(TRUNC(C_I64))
1188IR_FOLD(TRUNC(C_U16))
1189IR_FOLD(TRUNC(C_U32))
1190IR_FOLD(TRUNC(C_U64))
1191{
1194 switch (IR_OPT_TYPE(opt)) {
1195 default:
1196 IR_ASSERT(0);
1197 case IR_I8:
1198 IR_FOLD_CONST_I(op1_insn->val.i8);
1199 case IR_I16:
1200 IR_FOLD_CONST_I(op1_insn->val.i16);
1201 case IR_I32:
1202 IR_FOLD_CONST_I(op1_insn->val.i32);
1203 case IR_U8:
1204 IR_FOLD_CONST_U(op1_insn->val.u8);
1205 case IR_U16:
1206 IR_FOLD_CONST_U(op1_insn->val.u16);
1207 case IR_U32:
1208 IR_FOLD_CONST_U(op1_insn->val.u32);
1209 }
1210}
1211
1212
1213IR_FOLD(BITCAST(C_I8))
1214IR_FOLD(BITCAST(C_I16))
1215IR_FOLD(BITCAST(C_I32))
1216IR_FOLD(BITCAST(C_I64))
1217IR_FOLD(BITCAST(C_U8))
1218IR_FOLD(BITCAST(C_U16))
1219IR_FOLD(BITCAST(C_U32))
1220IR_FOLD(BITCAST(C_U64))
1221IR_FOLD(BITCAST(C_FLOAT))
1222IR_FOLD(BITCAST(C_DOUBLE))
1223IR_FOLD(BITCAST(C_BOOL))
1224IR_FOLD(BITCAST(C_CHAR))
1225IR_FOLD(BITCAST(C_ADDR))
1226{
1227 IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] == ir_type_size[op1_insn->type]);
1228 switch (IR_OPT_TYPE(opt)) {
1229 default:
1230 IR_ASSERT(0);
1231 case IR_BOOL:
1232 IR_FOLD_BOOL(op1_insn->val.i8 != 0);
1233 case IR_I8:
1234 IR_FOLD_CONST_I(op1_insn->val.i8);
1235 case IR_I16:
1236 IR_FOLD_CONST_I(op1_insn->val.i16);
1237 case IR_I32:
1238 IR_FOLD_CONST_I(op1_insn->val.i32);
1239 case IR_I64:
1240 IR_FOLD_CONST_I(op1_insn->val.i64);
1241 case IR_U8:
1242 IR_FOLD_CONST_U(op1_insn->val.u8);
1243 case IR_U16:
1244 IR_FOLD_CONST_U(op1_insn->val.u16);
1245 case IR_U32:
1246 IR_FOLD_CONST_U(op1_insn->val.u32);
1247 case IR_U64:
1248 IR_FOLD_CONST_U(op1_insn->val.u64);
1249 case IR_FLOAT:
1250 IR_FOLD_CONST_F(op1_insn->val.f);
1251 case IR_DOUBLE:
1252 IR_FOLD_CONST_D(op1_insn->val.d);
1253 case IR_CHAR:
1254 IR_FOLD_CONST_I(op1_insn->val.c);
1255 case IR_ADDR:
1256 IR_FOLD_CONST_U(op1_insn->val.addr);
1257 }
1258}
1259
1260IR_FOLD(INT2FP(C_I8))
1261IR_FOLD(INT2FP(C_I16))
1262IR_FOLD(INT2FP(C_I32))
1263IR_FOLD(INT2FP(C_I64))
1264{
1265 if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1266 IR_FOLD_CONST_D((double)op1_insn->val.i64);
1267 } else {
1268 IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1269 IR_FOLD_CONST_F((float)op1_insn->val.i64);
1270 }
1271}
1272
1273IR_FOLD(INT2FP(C_U8))
1274IR_FOLD(INT2FP(C_U16))
1275IR_FOLD(INT2FP(C_U32))
1276IR_FOLD(INT2FP(C_U64))
1277{
1278 if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1279 IR_FOLD_CONST_D((double)op1_insn->val.u64);
1280 } else {
1281 IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1282 IR_FOLD_CONST_F((float)op1_insn->val.u64);
1283 }
1284}
1285
1286IR_FOLD(FP2INT(C_FLOAT))
1287{
1289 switch (IR_OPT_TYPE(opt)) {
1290 default:
1291 IR_ASSERT(0);
1292 case IR_I8:
1293 IR_FOLD_CONST_I((int8_t)op1_insn->val.f);
1294 case IR_I16:
1295 IR_FOLD_CONST_I((int16_t)op1_insn->val.f);
1296 case IR_I32:
1297 IR_FOLD_CONST_I((int32_t)op1_insn->val.f);
1298 case IR_I64:
1299 IR_FOLD_CONST_I((int64_t)op1_insn->val.f);
1300 case IR_U8:
1301 IR_FOLD_CONST_U((uint8_t)op1_insn->val.f);
1302 case IR_U16:
1303 IR_FOLD_CONST_U((uint16_t)op1_insn->val.f);
1304 case IR_U32:
1305 IR_FOLD_CONST_U((uint32_t)op1_insn->val.f);
1306 case IR_U64:
1307 IR_FOLD_CONST_U((uint64_t)op1_insn->val.f);
1308 }
1309}
1310
1311IR_FOLD(FP2INT(C_DOUBLE))
1312{
1314 switch (IR_OPT_TYPE(opt)) {
1315 default:
1316 IR_ASSERT(0);
1317 case IR_I8:
1318 IR_FOLD_CONST_I((int8_t)op1_insn->val.d);
1319 case IR_I16:
1320 IR_FOLD_CONST_I((int16_t)op1_insn->val.d);
1321 case IR_I32:
1322 IR_FOLD_CONST_I((int32_t)op1_insn->val.d);
1323 case IR_I64:
1324 IR_FOLD_CONST_I((int64_t)op1_insn->val.d);
1325 case IR_U8:
1326 IR_FOLD_CONST_U((uint8_t)op1_insn->val.d);
1327 case IR_U16:
1328 IR_FOLD_CONST_U((uint16_t)op1_insn->val.d);
1329 case IR_U32:
1330 IR_FOLD_CONST_U((uint32_t)op1_insn->val.d);
1331 case IR_U64:
1332 IR_FOLD_CONST_U((uint64_t)op1_insn->val.d);
1333 }
1334}
1335
1336IR_FOLD(FP2FP(C_FLOAT))
1337{
1338 if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1339 IR_FOLD_CONST_D((double)op1_insn->val.f);
1340 } else {
1341 IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1343 }
1344}
1345
1346IR_FOLD(FP2FP(C_DOUBLE))
1347{
1348 if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1350 } else {
1351 IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1352 IR_FOLD_CONST_F((float)op1_insn->val.d);
1353 }
1354}
1355
1356// TODO: constant functions (e.g. sin, cos)
1357
1358/* Copy Propagation */
1359IR_FOLD(COPY(_))
1360{
1361 IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1362 if (!op2) {
1364 }
1365 /* skip CSE */
1367}
1368
1369IR_FOLD(PHI(_, _)) // TODO: PHI(_, _, _)
1370{
1371 if (op2 == op3 && op3 != IR_UNUSED) {
1373 }
1374 /* skip CSE */
1375 opt = opt | (3 << IR_OPT_INPUTS_SHIFT);
1377}
1378
1379IR_FOLD(COND(C_BOOL, _)) // TODO: COND(CONST, _, _)
1380IR_FOLD(COND(C_U8, _))
1381IR_FOLD(COND(C_U16, _))
1382IR_FOLD(COND(C_U32, _))
1383IR_FOLD(COND(C_U64, _))
1384IR_FOLD(COND(C_ADDR, _))
1385IR_FOLD(COND(C_CHAR, _))
1386IR_FOLD(COND(C_I8, _))
1387IR_FOLD(COND(C_I16, _))
1388IR_FOLD(COND(C_I32, _))
1389IR_FOLD(COND(C_I64, _))
1390IR_FOLD(COND(C_DOUBLE, _))
1391IR_FOLD(COND(C_FLOAT, _))
1392{
1393 if (ir_const_is_true(op1_insn)) {
1395 } else {
1396 IR_FOLD_COPY(op3);
1397 }
1398}
1399
1400IR_FOLD(BITCAST(_))
1401{
1402 if (IR_OPT_TYPE(opt) == op1_insn->type) {
1404 }
1406}
1407
1408/* Algebraic simplifications */
1409IR_FOLD(ABS(ABS))
1410{
1411 /* abs(x = abs(y)) => x */
1413}
1414
1415IR_FOLD(ABS(NEG))
1416{
1417 /* abs(neg(y)) => abs(y) */
1418 op1 = op1_insn->op1;
1420}
1421
1422IR_FOLD(NEG(NEG))
1423IR_FOLD(NOT(NOT))
1424IR_FOLD(BSWAP(BSWAP))
1425{
1426 /* f(f(y)) => y */
1427 IR_FOLD_COPY(op1_insn->op1);
1428}
1429
1430IR_FOLD(EQ(_, C_BOOL))
1431{
1432 if (op2 == IR_TRUE) {
1434 } else {
1435 opt = IR_OPT(IR_NOT, IR_BOOL);
1436 op2 = IR_UNUSED;
1438 }
1439}
1440
1441IR_FOLD(NE(_, C_BOOL))
1442{
1443 if (op2 != IR_TRUE) {
1445 } else {
1446 opt = IR_OPT(IR_NOT, IR_BOOL);
1447 op2 = IR_UNUSED;
1449 }
1450}
1451
1452IR_FOLD(EQ(ZEXT, C_U16))
1453IR_FOLD(EQ(ZEXT, C_U32))
1454IR_FOLD(EQ(ZEXT, C_U64))
1455IR_FOLD(EQ(ZEXT, C_I16))
1456IR_FOLD(EQ(ZEXT, C_I32))
1457IR_FOLD(EQ(ZEXT, C_I64))
1458IR_FOLD(EQ(ZEXT, C_ADDR))
1459IR_FOLD(EQ(SEXT, C_U16))
1460IR_FOLD(EQ(SEXT, C_U32))
1461IR_FOLD(EQ(SEXT, C_U64))
1462IR_FOLD(EQ(SEXT, C_I16))
1463IR_FOLD(EQ(SEXT, C_I32))
1464IR_FOLD(EQ(SEXT, C_I64))
1465IR_FOLD(EQ(SEXT, C_ADDR))
1466{
1467 if (ctx->use_lists && ctx->use_lists[op1_insn->op1].count != 1) {
1468 /* pass */
1469 } else if (op2_insn->val.u64 == 0 && ctx->ir_base[op1_insn->op1].type == IR_BOOL) {
1470 opt = IR_OPT(IR_NOT, IR_BOOL);
1471 op1 = op1_insn->op1;
1472 op2 = IR_UNUSED;
1474 } else {
1475 ir_type type = ctx->ir_base[op1_insn->op1].type;
1476
1477 if (IR_IS_TYPE_SIGNED(type)) {
1478 switch (ir_type_size[type]) {
1479 case 1: val.i64 = op2_insn->val.i8; break;
1480 case 2: val.i64 = op2_insn->val.i16; break;
1481 case 4: val.i64 = op2_insn->val.i32; break;
1482 default: val.u64 = op2_insn->val.u64; break;
1483 }
1484 } else {
1485 switch (ir_type_size[type]) {
1486 case 1: val.u64 = op2_insn->val.u8; break;
1487 case 2: val.u64 = op2_insn->val.u16; break;
1488 case 4: val.u64 = op2_insn->val.u32; break;
1489 default: val.u64 = op2_insn->val.u64; break;
1490 }
1491 }
1492 op1 = op1_insn->op1;
1493 op2 = ir_const(ctx, val, type);
1495 }
1497}
1498
1499IR_FOLD(NE(ZEXT, C_U16))
1500IR_FOLD(NE(ZEXT, C_U32))
1501IR_FOLD(NE(ZEXT, C_U64))
1502IR_FOLD(NE(ZEXT, C_I16))
1503IR_FOLD(NE(ZEXT, C_I32))
1504IR_FOLD(NE(ZEXT, C_I64))
1505IR_FOLD(NE(ZEXT, C_ADDR))
1506IR_FOLD(NE(SEXT, C_U16))
1507IR_FOLD(NE(SEXT, C_U32))
1508IR_FOLD(NE(SEXT, C_U64))
1509IR_FOLD(NE(SEXT, C_I16))
1510IR_FOLD(NE(SEXT, C_I32))
1511IR_FOLD(NE(SEXT, C_I64))
1512IR_FOLD(NE(SEXT, C_ADDR))
1513{
1514 if (ctx->use_lists && ctx->use_lists[op1_insn->op1].count != 1) {
1515 /* pass */
1516 } else if (op2_insn->val.u64 == 0 && ctx->ir_base[op1_insn->op1].type == IR_BOOL) {
1517 IR_FOLD_COPY(op1_insn->op1);
1518 } else {
1519 ir_type type = ctx->ir_base[op1_insn->op1].type;
1520
1521 if (IR_IS_TYPE_SIGNED(type)) {
1522 switch (ir_type_size[type]) {
1523 case 1: val.i64 = op2_insn->val.i8; break;
1524 case 2: val.i64 = op2_insn->val.i16; break;
1525 case 4: val.i64 = op2_insn->val.i32; break;
1526 default: val.u64 = op2_insn->val.u64; break;
1527 }
1528 } else {
1529 switch (ir_type_size[type]) {
1530 case 1: val.u64 = op2_insn->val.u8; break;
1531 case 2: val.u64 = op2_insn->val.u16; break;
1532 case 4: val.u64 = op2_insn->val.u32; break;
1533 default: val.u64 = op2_insn->val.u64; break;
1534 }
1535 }
1536 op1 = op1_insn->op1;
1537 op2 = ir_const(ctx, val, type);
1539 }
1541}
1542
1543IR_FOLD(NOT(EQ))
1544IR_FOLD(NOT(NE))
1545IR_FOLD(NOT(LT))
1546IR_FOLD(NOT(GE))
1547IR_FOLD(NOT(LE))
1548IR_FOLD(NOT(GT))
1549IR_FOLD(NOT(ULT))
1550IR_FOLD(NOT(UGE))
1551IR_FOLD(NOT(ULE))
1552IR_FOLD(NOT(UGT))
1553{
1554 if (IR_IS_TYPE_INT(ctx->ir_base[op1_insn->op1].type)) {
1555 opt = op1_insn->opt ^ 1;
1556 op1 = op1_insn->op1;
1557 op2 = op1_insn->op2;
1559 }
1561}
1562
1563IR_FOLD(EQ(SUB, C_U8))
1564IR_FOLD(EQ(SUB, C_U16))
1565IR_FOLD(EQ(SUB, C_U32))
1566IR_FOLD(EQ(SUB, C_U64))
1567IR_FOLD(EQ(SUB, C_I8))
1568IR_FOLD(EQ(SUB, C_I16))
1569IR_FOLD(EQ(SUB, C_I32))
1570IR_FOLD(EQ(SUB, C_I64))
1571IR_FOLD(EQ(SUB, C_ADDR))
1572IR_FOLD(NE(SUB, C_U8))
1573IR_FOLD(NE(SUB, C_U16))
1574IR_FOLD(NE(SUB, C_U32))
1575IR_FOLD(NE(SUB, C_U64))
1576IR_FOLD(NE(SUB, C_I8))
1577IR_FOLD(NE(SUB, C_I16))
1578IR_FOLD(NE(SUB, C_I32))
1579IR_FOLD(NE(SUB, C_I64))
1580IR_FOLD(NE(SUB, C_ADDR))
1581{
1582 /* (a - b) == 0 => a == b */
1583 if (ctx->use_lists && ctx->use_lists[op1].count == 1 && op2_insn->val.u64 == 0) {
1584 op1 = op1_insn->op1;
1585 op2 = op1_insn->op2;
1587 }
1589}
1590
1591IR_FOLD(ADD(_, C_U8))
1592IR_FOLD(ADD(_, C_U16))
1593IR_FOLD(ADD(_, C_U32))
1594IR_FOLD(ADD(_, C_U64))
1595IR_FOLD(ADD(_, C_I8))
1596IR_FOLD(ADD(_, C_I16))
1597IR_FOLD(ADD(_, C_I32))
1598IR_FOLD(ADD(_, C_I64))
1599IR_FOLD(ADD(_, C_ADDR))
1600IR_FOLD(SUB(_, C_U8))
1601IR_FOLD(SUB(_, C_U16))
1602IR_FOLD(SUB(_, C_U32))
1603IR_FOLD(SUB(_, C_U64))
1604IR_FOLD(SUB(_, C_I8))
1605IR_FOLD(SUB(_, C_I16))
1606IR_FOLD(SUB(_, C_I32))
1607IR_FOLD(SUB(_, C_I64))
1608IR_FOLD(SUB(_, C_ADDR))
1609IR_FOLD(ADD_OV(_, C_U8))
1610IR_FOLD(ADD_OV(_, C_U16))
1611IR_FOLD(ADD_OV(_, C_U32))
1612IR_FOLD(ADD_OV(_, C_U64))
1613IR_FOLD(ADD_OV(_, C_I8))
1614IR_FOLD(ADD_OV(_, C_I16))
1615IR_FOLD(ADD_OV(_, C_I32))
1616IR_FOLD(ADD_OV(_, C_I64))
1617IR_FOLD(ADD_OV(_, C_ADDR))
1618IR_FOLD(SUB_OV(_, C_U8))
1619IR_FOLD(SUB_OV(_, C_U16))
1620IR_FOLD(SUB_OV(_, C_U32))
1621IR_FOLD(SUB_OV(_, C_U64))
1622IR_FOLD(SUB_OV(_, C_I8))
1623IR_FOLD(SUB_OV(_, C_I16))
1624IR_FOLD(SUB_OV(_, C_I32))
1625IR_FOLD(SUB_OV(_, C_I64))
1626IR_FOLD(SUB_OV(_, C_ADDR))
1627{
1628 if (op2_insn->val.u64 == 0) {
1629 /* a +/- 0 => a */
1631 }
1633}
1634
1635/* This rule is useful for ADD(0, SYM) => SYM */
1636IR_FOLD(ADD(C_U8, _))
1637IR_FOLD(ADD(C_U16, _))
1638IR_FOLD(ADD(C_U32, _))
1639IR_FOLD(ADD(C_U64, _))
1640IR_FOLD(ADD(C_I8, _))
1641IR_FOLD(ADD(C_I16, _))
1642IR_FOLD(ADD(C_I32, _))
1643IR_FOLD(ADD(C_I64, _))
1644IR_FOLD(ADD(C_ADDR, _))
1645{
1646 if (op1_insn->val.u64 == 0) {
1647 /* 0 + a => a */
1649 }
1651}
1652
1653IR_FOLD(SUB(C_I8, _))
1654IR_FOLD(SUB(C_I16, _))
1655IR_FOLD(SUB(C_I32, _))
1656IR_FOLD(SUB(C_I64, _))
1657{
1658 if (op1_insn->val.u64 == 0) {
1659 /* 0 - a => -a (invalid for +0.0) */
1660 opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1661 op1 = op2;
1662 op2 = IR_UNUSED;
1664 }
1666}
1667
1668IR_FOLD(UGE(_, C_U8))
1669IR_FOLD(UGE(_, C_U16))
1670IR_FOLD(UGE(_, C_U32))
1671IR_FOLD(UGE(_, C_U64))
1672IR_FOLD(UGE(_, C_I8))
1673IR_FOLD(UGE(_, C_I16))
1674IR_FOLD(UGE(_, C_I32))
1675IR_FOLD(UGE(_, C_I64))
1676IR_FOLD(UGE(_, C_ADDR))
1677{
1678 if (op2_insn->val.u64 == 0) {
1680 }
1682}
1683
1684IR_FOLD(UGT(_, C_U8))
1685IR_FOLD(UGT(_, C_U16))
1686IR_FOLD(UGT(_, C_U32))
1687IR_FOLD(UGT(_, C_U64))
1688IR_FOLD(UGT(_, C_I8))
1689IR_FOLD(UGT(_, C_I16))
1690IR_FOLD(UGT(_, C_I32))
1691IR_FOLD(UGT(_, C_I64))
1692IR_FOLD(UGT(_, C_ADDR))
1693{
1694 if (op2_insn->val.u64 == 0) {
1695 opt = IR_OPT(IR_NE, IR_BOOL);
1697 }
1699}
1700
1701IR_FOLD(ULT(_, C_U8))
1702IR_FOLD(ULT(_, C_U16))
1703IR_FOLD(ULT(_, C_U32))
1704IR_FOLD(ULT(_, C_U64))
1705IR_FOLD(ULT(_, C_I8))
1706IR_FOLD(ULT(_, C_I16))
1707IR_FOLD(ULT(_, C_I32))
1708IR_FOLD(ULT(_, C_I64))
1709IR_FOLD(ULT(_, C_ADDR))
1710{
1711 if (op2_insn->val.u64 == 0) {
1713 }
1715}
1716
1717IR_FOLD(ULE(_, C_U8))
1718IR_FOLD(ULE(_, C_U16))
1719IR_FOLD(ULE(_, C_U32))
1720IR_FOLD(ULE(_, C_U64))
1721IR_FOLD(ULE(_, C_I8))
1722IR_FOLD(ULE(_, C_I16))
1723IR_FOLD(ULE(_, C_I32))
1724IR_FOLD(ULE(_, C_I64))
1725IR_FOLD(ULE(_, C_ADDR))
1726{
1727 if (op2_insn->val.u64 == 0) {
1728 opt = IR_OPT(IR_EQ, IR_BOOL);
1730 }
1732}
1733
1734IR_FOLD(ADD(NEG, _))
1735{
1736 /* (-a) + b => b - a */
1737 opt++; /* ADD -> SUB */
1738 op1 = op2;
1739 op2 = op1_insn->op1;
1741}
1742
1743IR_FOLD(ADD(_, NEG))
1744IR_FOLD(SUB(_,NEG))
1745{
1746 /* a + (-b) => a - b */
1747 opt ^= 1; /* ADD <-> SUB */
1748 op2 = op2_insn->op1;
1750}
1751
1752IR_FOLD(ADD(SUB, _))
1753{
1754 if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1755 if (op1_insn->op2 == op2) {
1756 /* (a - b) + b => a */
1757 IR_FOLD_COPY(op1_insn->op1);
1758 }
1759 }
1761}
1762
1763IR_FOLD(ADD(_, SUB))
1764{
1765 if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1766 if (op2_insn->op2 == op1) {
1767 /* a + (b - a) => b */
1768 IR_FOLD_COPY(op2_insn->op1);
1769 }
1770 }
1772}
1773
1774IR_FOLD(SUB(ADD, _))
1775{
1776 if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1777 if (op1_insn->op1 == op2) {
1778 /* (a + b) - a => b */
1779 IR_FOLD_COPY(op1_insn->op2);
1780 } else if (op1_insn->op2 == op2) {
1781 /* (a + b) - a => b */
1782 IR_FOLD_COPY(op1_insn->op1);
1783 }
1784 }
1786}
1787
1788IR_FOLD(SUB(_, ADD))
1789{
1790 if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1791 if (op2_insn->op1 == op1) {
1792 /* a - (a + b) => -b */
1793 opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1794 op1 = op2_insn->op2;
1795 op2 = IR_UNUSED;
1797 } else if (op2_insn->op2 == op1) {
1798 /* b - (a + b) => -a */
1799 opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1800 op1 = op2_insn->op1;
1801 op2 = IR_UNUSED;
1803 }
1804 }
1806}
1807
1808IR_FOLD(SUB(SUB, _))
1809{
1810 if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1811 if (op1_insn->op1 == op2) {
1812 /* (a - b) - a => -b */
1813 opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1814 op1 = op1_insn->op2;
1815 op2 = IR_UNUSED;
1817 }
1818 }
1820}
1821
1822IR_FOLD(SUB(_, SUB))
1823{
1824 if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1825 if (op2_insn->op1 == op1) {
1826 /* a - (a - b) => b */
1827 IR_FOLD_COPY(op2_insn->op2);
1828 }
1829 }
1831}
1832
1833IR_FOLD(SUB(ADD, ADD))
1834{
1835 if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1836 if (op1_insn->op1 == op2_insn->op1) {
1837 /* (a + b) - (a + c) => b - c */
1838 op1 = op1_insn->op2;
1839 op2 = op2_insn->op2;
1841 } else if (op1_insn->op1 == op2_insn->op2) {
1842 /* (a + b) - (c + a) => b - c */
1843 op1 = op1_insn->op2;
1844 op2 = op2_insn->op1;
1846 } else if (op1_insn->op2 == op2_insn->op1) {
1847 /* (a + b) - (b + c) => a - c */
1848 op1 = op1_insn->op1;
1849 op2 = op2_insn->op2;
1851 } else if (op1_insn->op2 == op2_insn->op2) {
1852 /* (a + b) - (c + b) => a - c */
1853 op1 = op1_insn->op1;
1854 op2 = op2_insn->op1;
1856 }
1857 }
1859}
1860
1861// IR_FOLD(SUB(NEG, CONST)) TODO: -a - b => -b - a
1862
1863IR_FOLD(MUL(NEG, C_I8))
1864IR_FOLD(MUL(NEG, C_I16))
1865IR_FOLD(MUL(NEG, C_I32))
1866IR_FOLD(MUL(NEG, C_I64))
1867IR_FOLD(DIV(NEG, C_I8))
1868IR_FOLD(DIV(NEG, C_I16))
1869IR_FOLD(DIV(NEG, C_I32))
1870IR_FOLD(DIV(NEG, C_I64))
1871{
1872 op1 = op1_insn->op1;
1873 val.i64 = -op2_insn->val.i64;
1874 op2 = ir_const(ctx, val, op2_insn->type);
1876}
1877
1878IR_FOLD(MUL(NEG, C_FLOAT))
1879IR_FOLD(DIV(NEG, C_FLOAT))
1880{
1881 op1 = op1_insn->op1;
1882 val.f = -op2_insn->val.f;
1883 op2 = ir_const(ctx, val, op2_insn->type);
1885}
1886
1887IR_FOLD(MUL(NEG, C_DOUBLE))
1888IR_FOLD(DIV(NEG, C_DOUBLE))
1889{
1890 op1 = op1_insn->op1;
1891 val.d = -op2_insn->val.d;
1892 op2 = ir_const(ctx, val, op2_insn->type);
1894}
1895
1896IR_FOLD(MUL(_, C_U8))
1897IR_FOLD(MUL(_, C_U16))
1898IR_FOLD(MUL(_, C_U32))
1899IR_FOLD(MUL(_, C_U64))
1900IR_FOLD(MUL(_, C_ADDR))
1901{
1902 if (op2_insn->val.u64 == 0) {
1903 /* a * 0 => 0 */
1905 } else if (op2_insn->val.u64 == 1) {
1907 }
1909}
1910
1911IR_FOLD(MUL(_, C_I8))
1912IR_FOLD(MUL(_, C_I16))
1913IR_FOLD(MUL(_, C_I32))
1914IR_FOLD(MUL(_, C_I64))
1915{
1916 if (op2_insn->val.i64 == 0) {
1917 /* a * 0 => 0 */
1919 } else if (op2_insn->val.i64 == 1) {
1920 /* a * 1 => a */
1922 } else if (op2_insn->val.i64 == -1) {
1923 /* a * -1 => -a */
1924 opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1925 op2 = IR_UNUSED;
1927 }
1929}
1930
1931IR_FOLD(MUL(_, C_DOUBLE))
1932{
1933 if (op2_insn->val.d == 1.0) {
1934 /* a * 1.0 => a */
1936 } else if (op2_insn->val.d == 2.0) {
1937 /* a * 2.0 => a + a */
1938 opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1939 op2 = op1;
1941 } else if (op2_insn->val.d == -1.0) {
1942 /* a * -1.0 => -a */
1943 opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1944 op2 = IR_UNUSED;
1946 }
1948}
1949
1950IR_FOLD(MUL(_, C_FLOAT))
1951{
1952 if (op2_insn->val.f == 1.0) {
1953 /* a * 1.0 => a */
1955 } else if (op2_insn->val.f == 2.0) {
1956 /* a * 2.0 => a + a */
1957 opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1958 op2 = op1;
1960 } else if (op2_insn->val.f == -1.0) {
1961 /* a * -1.0 => -a */
1962 opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1963 op2 = IR_UNUSED;
1965 }
1967}
1968
1969IR_FOLD(DIV(_, C_U8))
1970IR_FOLD(DIV(_, C_U16))
1971IR_FOLD(DIV(_, C_U32))
1972IR_FOLD(DIV(_, C_U64))
1973{
1974 if (op2_insn->val.u64 == 1) {
1976 }
1978}
1979
1980IR_FOLD(DIV(_, C_I8))
1981IR_FOLD(DIV(_, C_I16))
1982IR_FOLD(DIV(_, C_I32))
1983IR_FOLD(DIV(_, C_I64))
1984{
1985 if (op2_insn->val.i64 == 1) {
1986 /* a / 1 => a */
1988 } else if (op2_insn->val.i64 == -1) {
1989 /* a / -1 => -a */
1990 opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1991 op2 = IR_UNUSED;
1993 }
1995}
1996
1997IR_FOLD(MOD(_, C_U8))
1998IR_FOLD(MOD(_, C_U16))
1999IR_FOLD(MOD(_, C_U32))
2000IR_FOLD(MOD(_, C_U64))
2001IR_FOLD(MOD(_, C_I8))
2002IR_FOLD(MOD(_, C_I16))
2003IR_FOLD(MOD(_, C_I32))
2004IR_FOLD(MOD(_, C_I64))
2005{
2006 if (op2_insn->val.i64 == 1) {
2007 /* a % 1 => 0 */
2008 IR_FOLD_CONST_U(0);
2009 }
2011}
2012
2013IR_FOLD(DIV(_, C_DOUBLE))
2014{
2015 if (op2_insn->val.d == 1.0) {
2016 /* a / 1.0 => a */
2018 } else if (op2_insn->val.d == -1.0) {
2019 /* a / -1.0 => -a */
2020 opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
2021 op2 = IR_UNUSED;
2023 }
2025}
2026
2027IR_FOLD(DIV(_, C_FLOAT))
2028{
2029 if (op2_insn->val.f == 1.0) {
2030 /* a / 1.0 => a */
2032 } else if (op2_insn->val.f == -1.0) {
2033 /* a / -1.0 => -a */
2034 opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
2035 op2 = IR_UNUSED;
2037 }
2039}
2040
2041IR_FOLD(MUL(NEG, NEG))
2042IR_FOLD(DIV(NEG, NEG))
2043{
2044 op1 = op1_insn->op1;
2045 op2 = op2_insn->op1;
2047}
2048
2049IR_FOLD(AND(_, C_BOOL))
2050{
2051 IR_FOLD_COPY(op2_insn->val.b ? op1 : op2);
2052}
2053
2054IR_FOLD(AND(_, C_U8))
2055IR_FOLD(AND(_, C_I8))
2056IR_FOLD(AND(_, C_CHAR))
2057{
2058 if (op2_insn->val.i8 == 0) {
2059 /* a & 0 => 0 */
2061 } else if (op2_insn->val.i8 == -1) {
2063 }
2065}
2066
2067IR_FOLD(AND(_, C_U16))
2068IR_FOLD(AND(_, C_I16))
2069{
2070 if (op2_insn->val.i16 == 0) {
2071 /* a & 0 => 0 */
2073 } else if (op2_insn->val.i16 == -1) {
2075 }
2077}
2078
2079IR_FOLD(AND(_, C_U32))
2080IR_FOLD(AND(_, C_I32))
2081{
2082 if (op2_insn->val.i32 == 0) {
2083 /* a & 0 => 0 */
2085 } else if (op2_insn->val.i32 == -1) {
2087 }
2089}
2090
2091IR_FOLD(AND(_, C_U64))
2092IR_FOLD(AND(_, C_I64))
2093{
2094 if (op2_insn->val.i64 == 0) {
2095 /* a & 0 => 0 */
2097 } else if (op2_insn->val.i64 == -1) {
2099 }
2101}
2102
2103IR_FOLD(OR(_, C_BOOL))
2104{
2105 IR_FOLD_COPY(op2_insn->val.b ? op2 : op1);
2106}
2107
2108IR_FOLD(OR(_, C_U8))
2109IR_FOLD(OR(_, C_I8))
2110IR_FOLD(OR(_, C_CHAR))
2111{
2112 if (op2_insn->val.i8 == -1) {
2113 /* a | 1 => 1 */
2115 } else if (op2_insn->val.i8 == 0) {
2117 }
2119}
2120
2121IR_FOLD(OR(_, C_U16))
2122IR_FOLD(OR(_, C_I16))
2123{
2124 if (op2_insn->val.i16 == -1) {
2125 /* a | 1 => 1 */
2127 } else if (op2_insn->val.i16 == 0) {
2129 }
2131}
2132
2133IR_FOLD(OR(_, C_U32))
2134IR_FOLD(OR(_, C_I32))
2135{
2136 if (op2_insn->val.i32 == -1) {
2137 /* a | 1 => 1 */
2139 } else if (op2_insn->val.i32 == -0) {
2141 }
2143}
2144
2145IR_FOLD(OR(_, C_U64))
2146IR_FOLD(OR(_, C_I64))
2147{
2148 if (op2_insn->val.i64 == -1) {
2149 /* a | 1 => 1 */
2151 } else if (op2_insn->val.i64 == 0) {
2153 }
2155}
2156
2157IR_FOLD(XOR(_, C_BOOL))
2158{
2159 if (!op2_insn->val.b) {
2160 /* a ^ 0 => a */
2162 } else {
2163 /* a ^ 1 => !a */
2164 opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
2165 op2 = IR_UNUSED;
2167 }
2168}
2169
2170IR_FOLD(XOR(_, C_U8))
2171IR_FOLD(XOR(_, C_I8))
2172IR_FOLD(XOR(_, C_CHAR))
2173{
2174 if (op2_insn->val.i8 == 0) {
2175 /* a ^ 0 => a */
2177 } else if (op2_insn->val.i8 == -1) {
2178 /* a ^ 1 => ~a */
2179 opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
2180 op2 = IR_UNUSED;
2182 }
2184}
2185
2186IR_FOLD(XOR(_, C_U16))
2187IR_FOLD(XOR(_, C_I16))
2188{
2189 if (op2_insn->val.i16 == 0) {
2190 /* a ^ 0 => a */
2192 } else if (op2_insn->val.i16 == -1) {
2193 /* a ^ 1 => ~a */
2194 opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
2195 op2 = IR_UNUSED;
2197 }
2199}
2200
2201IR_FOLD(XOR(_, C_U32))
2202IR_FOLD(XOR(_, C_I32))
2203{
2204 if (op2_insn->val.i32 == 0) {
2205 /* a ^ 0 => a */
2207 } else if (op2_insn->val.i32 == -1) {
2208 /* a ^ 1 => ~a */
2209 opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
2210 op2 = IR_UNUSED;
2212 }
2214}
2215
2216IR_FOLD(XOR(_, C_U64))
2217IR_FOLD(XOR(_, C_I64))
2218{
2219 if (op2_insn->val.i64 == 0) {
2220 /* a ^ 0 => a */
2222 } else if (op2_insn->val.i64 == -1) {
2223 /* a ^ 1 => ~a */
2224 opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
2225 op2 = IR_UNUSED;
2227 }
2229}
2230
2231IR_FOLD(SHL(_, C_U8))
2232IR_FOLD(SHL(_, C_U16))
2233IR_FOLD(SHL(_, C_U32))
2234IR_FOLD(SHL(_, C_U64))
2235IR_FOLD(SHL(_, C_I8))
2236IR_FOLD(SHL(_, C_I16))
2237IR_FOLD(SHL(_, C_I32))
2238IR_FOLD(SHL(_, C_I64))
2239{
2240 if (op2_insn->val.u64 == 0) {
2241 /* a << 0 => a */
2243 } else if (op2_insn->val.u64 == 1) {
2244 /* a << 1 => a + a */
2245 opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
2246 op2 = op1;
2248 }
2250}
2251
2252IR_FOLD(SHR(_, C_U8))
2253IR_FOLD(SHR(_, C_U16))
2254IR_FOLD(SHR(_, C_U32))
2255IR_FOLD(SHR(_, C_U64))
2256IR_FOLD(SHR(_, C_I8))
2257IR_FOLD(SHR(_, C_I16))
2258IR_FOLD(SHR(_, C_I32))
2259IR_FOLD(SHR(_, C_I64))
2260IR_FOLD(SAR(_, C_U8))
2261IR_FOLD(SAR(_, C_U16))
2262IR_FOLD(SAR(_, C_U32))
2263IR_FOLD(SAR(_, C_U64))
2264IR_FOLD(SAR(_, C_I8))
2265IR_FOLD(SAR(_, C_I16))
2266IR_FOLD(SAR(_, C_I32))
2267IR_FOLD(SAR(_, C_I64))
2268IR_FOLD(ROL(_, C_U8))
2269IR_FOLD(ROL(_, C_U16))
2270IR_FOLD(ROL(_, C_U32))
2271IR_FOLD(ROL(_, C_U64))
2272IR_FOLD(ROL(_, C_I8))
2273IR_FOLD(ROL(_, C_I16))
2274IR_FOLD(ROL(_, C_I32))
2275IR_FOLD(ROL(_, C_I64))
2276IR_FOLD(ROR(_, C_U8))
2277IR_FOLD(ROR(_, C_U16))
2278IR_FOLD(ROR(_, C_U32))
2279IR_FOLD(ROR(_, C_U64))
2280IR_FOLD(ROR(_, C_I8))
2281IR_FOLD(ROR(_, C_I16))
2282IR_FOLD(ROR(_, C_I32))
2283IR_FOLD(ROR(_, C_I64))
2284{
2285 if (op2_insn->val.u64 == 0) {
2286 /* a >> 0 => a */
2288 }
2290}
2291
2292IR_FOLD(SHL(C_U8, _))
2293IR_FOLD(SHL(C_U16, _))
2294IR_FOLD(SHL(C_U32, _))
2295IR_FOLD(SHL(C_U64, _))
2296IR_FOLD(SHL(C_I8, _))
2297IR_FOLD(SHL(C_I16, _))
2298IR_FOLD(SHL(C_I32, _))
2299IR_FOLD(SHL(C_I64, _))
2300IR_FOLD(SHR(C_U8, _))
2301IR_FOLD(SHR(C_U16, _))
2302IR_FOLD(SHR(C_U32, _))
2303IR_FOLD(SHR(C_U64, _))
2304IR_FOLD(SHR(C_I8, _))
2305IR_FOLD(SHR(C_I16, _))
2306IR_FOLD(SHR(C_I32, _))
2307IR_FOLD(SHR(C_I64, _))
2308{
2309 if (op1_insn->val.u64 == 0) {
2310 /* 0 << a => 0 */
2312 }
2314}
2315
2316IR_FOLD(SAR(C_U8, _))
2317IR_FOLD(SAR(C_I8, _))
2318IR_FOLD(ROL(C_U8, _))
2319IR_FOLD(ROL(C_I8, _))
2320IR_FOLD(ROR(C_U8, _))
2321IR_FOLD(ROR(C_I8, _))
2322{
2323 if (op1_insn->val.i8 == 0 || op1_insn->val.i8 == -1) {
2325 }
2327}
2328
2329IR_FOLD(SAR(C_U16, _))
2330IR_FOLD(SAR(C_I16, _))
2331IR_FOLD(ROL(C_U16, _))
2332IR_FOLD(ROL(C_I16, _))
2333IR_FOLD(ROR(C_U16, _))
2334IR_FOLD(ROR(C_I16, _))
2335{
2336 if (op1_insn->val.i16 == 0 || op1_insn->val.i16 == -1) {
2338 }
2340}
2341
2342IR_FOLD(SAR(C_U32, _))
2343IR_FOLD(SAR(C_I32, _))
2344IR_FOLD(ROL(C_U32, _))
2345IR_FOLD(ROL(C_I32, _))
2346IR_FOLD(ROR(C_U32, _))
2347IR_FOLD(ROR(C_I32, _))
2348{
2349 if (op1_insn->val.i32 == 0 || op1_insn->val.i32 == -1) {
2351 }
2353}
2354
2355IR_FOLD(SAR(C_U64, _))
2356IR_FOLD(SAR(C_I64, _))
2357IR_FOLD(ROL(C_U64, _))
2358IR_FOLD(ROL(C_I64, _))
2359IR_FOLD(ROR(C_U64, _))
2360IR_FOLD(ROR(C_I64, _))
2361{
2362 if (op1_insn->val.i64 == 0 || op1_insn->val.i64 == -1) {
2364 }
2366}
2367
2368IR_FOLD(LT(ABS, C_I8))
2369IR_FOLD(LT(ABS, C_I16))
2370IR_FOLD(LT(ABS, C_I32))
2371IR_FOLD(LT(ABS, C_I64))
2372IR_FOLD(LT(ABS, C_FLOAT))
2373IR_FOLD(LT(ABS, C_DOUBLE))
2374{
2375 if (op2_insn->val.u64 == 0) {
2376 /* abs() < 0 => false */
2378 }
2380}
2381
2382IR_FOLD(GE(ABS, C_I8))
2383IR_FOLD(GE(ABS, C_I16))
2384IR_FOLD(GE(ABS, C_I32))
2385IR_FOLD(GE(ABS, C_I64))
2386IR_FOLD(GE(ABS, C_FLOAT))
2387IR_FOLD(GE(ABS, C_DOUBLE))
2388{
2389 if (op2_insn->val.u64 == 0) {
2390 /* abs() >= 0 => true */
2392 }
2394}
2395
2396// TODO: conversions
2397IR_FOLD(FP2FP(FP2FP))
2398{
2399 if (IR_OPT_TYPE(opt) == IR_FLOAT) {
2400 /* (float)(double)f => f */
2401 IR_ASSERT(op1_insn->type == IR_DOUBLE);
2402 IR_ASSERT(ctx->ir_base[op1_insn->op1].type == IR_FLOAT);
2403 IR_FOLD_COPY(op1_insn->op1);
2404 }
2406}
2407
2408IR_FOLD(FP2INT(INT2FP))
2409{
2410 ir_type dst_type = IR_OPT_TYPE(opt);
2411 ir_type src_type = ctx->ir_base[op1_insn->op1].type;
2412
2413 if (ir_type_size[src_type] >= ir_type_size[op1_insn->type]) {
2414 /* source integer type can not fit into intermediate floating point */
2416 }
2417 /* (int)(double)i => i */
2418 if (src_type == dst_type) {
2419 IR_FOLD_COPY(op1_insn->op1);
2420 }
2422}
2423
2424IR_FOLD(TRUNC(ZEXT))
2425IR_FOLD(TRUNC(SEXT))
2426{
2427 ir_type dst_type = IR_OPT_TYPE(opt);
2428 ir_type src_type = ctx->ir_base[op1_insn->op1].type;
2429
2430 /* (int32_t)(int64_t)i => i */
2431 if (src_type == dst_type) {
2432 IR_FOLD_COPY(op1_insn->op1);
2433 } else if (ir_type_size[src_type] == ir_type_size[dst_type]) {
2434 opt = IR_OPT(IR_BITCAST, dst_type);
2435 op1 = op1_insn->op1;
2437 } else if (ir_type_size[src_type] > ir_type_size[dst_type]) {
2438 opt = IR_OPT(IR_TRUNC, dst_type);
2439 op1 = op1_insn->op1;
2441 } else {
2442 opt = IR_OPT(op1_insn->op, dst_type);
2443 op1 = op1_insn->op1;
2445 }
2447}
2448
2449IR_FOLD(TRUNC(BITCAST))
2450IR_FOLD(ZEXT(BITCAST))
2451IR_FOLD(SEXT(BITCAST))
2452{
2453 if (IR_IS_TYPE_INT(ctx->ir_base[op1_insn->op1].type)) {
2454 op1 = op1_insn->op1;
2456 }
2458}
2459
2460IR_FOLD(BITCAST(BITCAST))
2461{
2462 ir_type dst_type = IR_OPT_TYPE(opt);
2463 ir_type src_type = ctx->ir_base[op1_insn->op1].type;
2464
2465 if (src_type == dst_type) {
2466 IR_FOLD_COPY(op1_insn->op1);
2467 } else if (IR_IS_TYPE_INT(src_type) == IR_IS_TYPE_INT(dst_type)) {
2468 op1 = op1_insn->op1;
2470 }
2472}
2473
2474IR_FOLD(TRUNC(TRUNC))
2475IR_FOLD(ZEXT(ZEXT))
2477{
2478 op1 = op1_insn->op1;
2480}
2481
2482IR_FOLD(SEXT(ZEXT))
2483{
2484 op1 = op1_insn->op1;
2485 opt = IR_OPT(IR_ZEXT, IR_OPT_TYPE(opt));
2487}
2488
2490{
2491 if (IR_IS_CONST_REF(op1_insn->op2)
2492 && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)
2493 && !(ctx->ir_base[op1_insn->op2].val.u64
2494 & (1ULL << ((ir_type_size[op1_insn->type] * 8) - 1)))) {
2495 /* SEXT(AND(_, 0b0*)) -> ZEXT(AND(_, 0b0*)) */
2496 opt = IR_OPT(IR_ZEXT, IR_OPT_TYPE(opt));
2498 }
2500}
2501
2503{
2504 if (IR_IS_CONST_REF(op1_insn->op2)
2505 && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)
2506 && ctx->ir_base[op1_insn->op2].val.u64 != 0) {
2507 opt = IR_OPT(IR_ZEXT, IR_OPT_TYPE(opt));
2509 }
2511}
2512
2513IR_FOLD(TRUNC(AND))
2514{
2515 if (IR_IS_CONST_REF(op1_insn->op2)) {
2516 size_t size = ir_type_size[IR_OPT_TYPE(opt)];
2517 uint64_t mask = ctx->ir_base[op1_insn->op2].val.u64;
2518
2519 if (size == 1) {
2520 if (mask == 0xff) {
2521 op1 = op1_insn->op1;
2523 }
2524 } else if (size == 2) {
2525 if (mask == 0xffff) {
2526 op1 = op1_insn->op1;
2528 }
2529 } else if (size == 4) {
2530 if (mask == 0xffffffff) {
2531 op1 = op1_insn->op1;
2533 }
2534 }
2535 }
2537}
2538
2539IR_FOLD(AND(ZEXT, C_I16))
2540IR_FOLD(AND(ZEXT, C_U16))
2541IR_FOLD(AND(ZEXT, C_I32))
2542IR_FOLD(AND(ZEXT, C_U32))
2543IR_FOLD(AND(ZEXT, C_I64))
2544IR_FOLD(AND(ZEXT, C_U64))
2545IR_FOLD(AND(ZEXT, C_ADDR))
2546{
2547 ir_type src_size = ir_type_size[ctx->ir_base[op1_insn->op1].type];
2548
2549 if ((src_size == 1 && op2_insn->val.u64 == 0xff)
2550 || (src_size == 2 && op2_insn->val.u64 == 0xffff)
2551 || (src_size == 4 && op2_insn->val.u64 == 0xffffffff)) {
2553 }
2555}
2556
2557IR_FOLD(AND(SEXT, C_I16))
2558IR_FOLD(AND(SEXT, C_U16))
2559IR_FOLD(AND(SEXT, C_I32))
2560IR_FOLD(AND(SEXT, C_U32))
2561IR_FOLD(AND(SEXT, C_I64))
2562IR_FOLD(AND(SEXT, C_U64))
2563IR_FOLD(AND(SEXT, C_ADDR))
2564{
2565 ir_type src_size = ir_type_size[ctx->ir_base[op1_insn->op1].type];
2566
2567 if ((src_size == 1 && op2_insn->val.u64 == 0xff)
2568 || (src_size == 2 && op2_insn->val.u64 == 0xffff)
2569 || (src_size == 4 && op2_insn->val.u64 == 0xffffffff)) {
2570 opt = IR_OPT(IR_ZEXT, IR_OPT_TYPE(opt));
2571 op1 = op1_insn->op1;
2572 op2 = IR_UNUSED;
2574 }
2576}
2577IR_FOLD(AND(SHR, C_I8))
2578IR_FOLD(AND(SHR, C_U8))
2579{
2580 if (IR_IS_CONST_REF(op1_insn->op2)) {
2581 if (((uint8_t)-1) >> ctx->ir_base[op1_insn->op2].val.u8 == op2_insn->val.u8) {
2583 }
2584 }
2586}
2587
2588IR_FOLD(AND(SHR, C_I16))
2589IR_FOLD(AND(SHR, C_U16))
2590{
2591 if (IR_IS_CONST_REF(op1_insn->op2)) {
2592 if (((uint16_t)-1) >> ctx->ir_base[op1_insn->op2].val.u16 == op2_insn->val.u16) {
2594 }
2595 }
2597}
2598
2599IR_FOLD(AND(SHR, C_I32))
2600IR_FOLD(AND(SHR, C_U32))
2601{
2602 if (IR_IS_CONST_REF(op1_insn->op2)) {
2603 if (((uint32_t)-1) >> ctx->ir_base[op1_insn->op2].val.u32 == op2_insn->val.u32) {
2605 }
2606 }
2608}
2609
2610IR_FOLD(AND(SHR, C_I64))
2611IR_FOLD(AND(SHR, C_U64))
2612{
2613 if (IR_IS_CONST_REF(op1_insn->op2)) {
2614 if (((uint64_t)-1) >> ctx->ir_base[op1_insn->op2].val.u64 == op2_insn->val.u64) {
2616 }
2617 }
2619}
2620
2621IR_FOLD(EQ(FP2FP, C_DOUBLE))
2622IR_FOLD(NE(FP2FP, C_DOUBLE))
2623IR_FOLD(LT(FP2FP, C_DOUBLE))
2624IR_FOLD(GE(FP2FP, C_DOUBLE))
2625IR_FOLD(LE(FP2FP, C_DOUBLE))
2626IR_FOLD(GT(FP2FP, C_DOUBLE))
2627IR_FOLD(ULT(FP2FP, C_DOUBLE))
2628IR_FOLD(UGE(FP2FP, C_DOUBLE))
2629IR_FOLD(ULE(FP2FP, C_DOUBLE))
2630IR_FOLD(UGT(FP2FP, C_DOUBLE))
2631{
2632 IR_ASSERT(op1_insn->type == IR_DOUBLE);
2633 IR_ASSERT(ctx->ir_base[op1_insn->op1].type == IR_FLOAT);
2634 if (op2_insn->val.d == (double)(float)op2_insn->val.d) {
2635 op1 = op1_insn->op1;
2636 op2 = ir_const_float(ctx, (float)op2_insn->val.d);
2638 }
2640}
2641
2642// TODO: Reassociation
2643IR_FOLD(ADD(ADD, C_U8))
2644IR_FOLD(ADD(ADD, C_U16))
2645IR_FOLD(ADD(ADD, C_U32))
2646IR_FOLD(ADD(ADD, C_U64))
2647IR_FOLD(ADD(ADD, C_ADDR))
2648{
2649 if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2650 /* (x + c1) + c2 => x + (c1 + c2) */
2651 val.u64 = ctx->ir_base[op1_insn->op2].val.u64 + op2_insn->val.u64;
2652 op1 = op1_insn->op1;
2653 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2655 }
2657}
2658
2659IR_FOLD(ADD(ADD, C_I8))
2660IR_FOLD(ADD(ADD, C_I16))
2661IR_FOLD(ADD(ADD, C_I32))
2662IR_FOLD(ADD(ADD, C_I64))
2663{
2664 if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2665 /* (x + c1) + c2 => x + (c1 + c2) */
2666 val.i64 = ctx->ir_base[op1_insn->op2].val.u64 + op2_insn->val.u64;
2667 op1 = op1_insn->op1;
2668 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2670 }
2672}
2673
2674IR_FOLD(ADD(SUB, C_U8))
2675IR_FOLD(ADD(SUB, C_U16))
2676IR_FOLD(ADD(SUB, C_U32))
2677IR_FOLD(ADD(SUB, C_U64))
2678IR_FOLD(ADD(SUB, C_ADDR))
2679{
2680 if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2681 /* (x - c1) + c2 => x + (c2 - c1) */
2682 val.u64 = op2_insn->val.u64 - ctx->ir_base[op1_insn->op2].val.u64;
2683 op1 = op1_insn->op1;
2684 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2686 } else if (IR_IS_CONST_REF(op1_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op1].op)) {
2687 /* (c1 - x) + c2 => (c1 + c2) - x */
2688 val.u64 = ctx->ir_base[op1_insn->op1].val.u64 + op2_insn->val.u64;
2689 opt++; /* ADD -> SUB */
2690 op2 = op1_insn->op2;
2691 op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2693 }
2695}
2696
2697IR_FOLD(ADD(SUB, C_I8))
2698IR_FOLD(ADD(SUB, C_I16))
2699IR_FOLD(ADD(SUB, C_I32))
2700IR_FOLD(ADD(SUB, C_I64))
2701{
2702 if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2703 /* (x - c1) + c2 => x + (c2 - c1) */
2704 val.i64 = op2_insn->val.u64 - ctx->ir_base[op1_insn->op2].val.u64;
2705 if (val.i64 < 0 && val.i64 != INT64_MIN) {
2706 val.i64 = -val.i64;
2707 opt++; /* ADD -> SUB */
2708 }
2709 op1 = op1_insn->op1;
2710 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2712 } else if (IR_IS_CONST_REF(op1_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op1].op)) {
2713 /* (c1 - x) + c2 => (c1 + c2) - x */
2714 val.i64 = ctx->ir_base[op1_insn->op1].val.u64 + op2_insn->val.u64;
2715 opt++; /* ADD -> SUB */
2716 op2 = op1_insn->op2;
2717 op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2719 }
2721}
2722
2723IR_FOLD(SUB(ADD, C_U8))
2724IR_FOLD(SUB(ADD, C_U16))
2725IR_FOLD(SUB(ADD, C_U32))
2726IR_FOLD(SUB(ADD, C_U64))
2727IR_FOLD(SUB(ADD, C_ADDR))
2728{
2729 if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2730 /* (x + c1) - c2 => x + (c1 - c2) */
2731 val.u64 = ctx->ir_base[op1_insn->op2].val.u64 - op2_insn->val.u64;
2732 opt--; /* SUB -> ADD */
2733 op1 = op1_insn->op1;
2734 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2736 }
2738}
2739
2740IR_FOLD(SUB(ADD, C_I8))
2741IR_FOLD(SUB(ADD, C_I16))
2742IR_FOLD(SUB(ADD, C_I32))
2743IR_FOLD(SUB(ADD, C_I64))
2744{
2745 if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2746 /* (x + c1) - c2 => x + (c1 - c2) */
2747 val.i64 = ctx->ir_base[op1_insn->op2].val.u64 - op2_insn->val.u64;
2748 if (val.i64 < 0 && val.i64 != INT64_MIN) {
2749 val.i64 = -val.i64;
2750 } else {
2751 opt--; /* SUB -> ADD */
2752 }
2753 op1 = op1_insn->op1;
2754 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2756 }
2758}
2759
2760IR_FOLD(SUB(C_U8, ADD))
2761IR_FOLD(SUB(C_U16, ADD))
2762IR_FOLD(SUB(C_U32, ADD))
2763IR_FOLD(SUB(C_U64, ADD))
2764IR_FOLD(SUB(C_ADDR, ADD))
2765{
2766 if (IR_IS_CONST_REF(op2_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op2].op)) {
2767 /* c1 - (x + c2) => (c1 - c2) - x */
2768 val.u64 = op1_insn->val.u64 - ctx->ir_base[op2_insn->op2].val.u64;
2769 op2 = op2_insn->op1;
2770 op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2772 }
2774}
2775
2776IR_FOLD(SUB(C_I8, ADD))
2777IR_FOLD(SUB(C_I16, ADD))
2778IR_FOLD(SUB(C_I32, ADD))
2779IR_FOLD(SUB(C_I64, ADD))
2780{
2781 if (IR_IS_CONST_REF(op2_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op2].op)) {
2782 /* c1 - (x + c2) => (c1 - c2) - x */
2783 val.i64 = op1_insn->val.u64 - ctx->ir_base[op2_insn->op2].val.u64;
2784 op2 = op2_insn->op1;
2785 op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2787 }
2789}
2790
2791IR_FOLD(SUB(SUB, C_U8))
2792IR_FOLD(SUB(SUB, C_U16))
2793IR_FOLD(SUB(SUB, C_U32))
2794IR_FOLD(SUB(SUB, C_U64))
2795IR_FOLD(SUB(SUB, C_ADDR))
2796{
2797 if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2798 /* (x - c1) - c2 => x - (c1 + c2) */
2799 val.u64 = ctx->ir_base[op1_insn->op2].val.u64 + op2_insn->val.u64;
2800 if (val.i64 < 0 && val.i64 != INT64_MIN) {
2801 val.i64 = -val.i64;
2802 opt--; /* SUB -> ADD */
2803 }
2804 op1 = op1_insn->op1;
2805 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2807 } else if (IR_IS_CONST_REF(op1_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op1].op)) {
2808 /* (c1 - x) - c2 => (c1 - c2) - x */
2809 val.u64 = ctx->ir_base[op1_insn->op1].val.u64 - op2_insn->val.u64;
2810 op2 = op1_insn->op2;
2811 op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2813 }
2815}
2816
2817IR_FOLD(SUB(SUB, C_I8))
2818IR_FOLD(SUB(SUB, C_I16))
2819IR_FOLD(SUB(SUB, C_I32))
2820IR_FOLD(SUB(SUB, C_I64))
2821{
2822 if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2823 /* (x - c1) - c2 => x - (c1 + c2) */
2824 val.i64 = ctx->ir_base[op1_insn->op2].val.u64 + op2_insn->val.u64;
2825 if (val.i64 < 0 && val.i64 != INT64_MIN) {
2826 val.i64 = -val.i64;
2827 opt--; /* SUB -> ADD */
2828 }
2829 op1 = op1_insn->op1;
2830 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2832 } else if (IR_IS_CONST_REF(op1_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op1].op)) {
2833 /* (c1 - x) - c2 => (c1 - c2) - x */
2834 val.i64 = ctx->ir_base[op1_insn->op1].val.u64 - op2_insn->val.u64;
2835 op2 = op1_insn->op2;
2836 op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2838 }
2840}
2841
2842IR_FOLD(SUB(C_U8, SUB))
2843IR_FOLD(SUB(C_U16, SUB))
2844IR_FOLD(SUB(C_U32, SUB))
2845IR_FOLD(SUB(C_U64, SUB))
2846IR_FOLD(SUB(C_ADDR, SUB))
2847{
2848 if (IR_IS_CONST_REF(op2_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op2].op)) {
2849 /* c1 - (x - c2) => (c1 + c2) - x */
2850 val.u64 = op1_insn->val.u64 + ctx->ir_base[op2_insn->op2].val.u64;
2851 op2 = op2_insn->op1;
2852 op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2854 } else if (IR_IS_CONST_REF(op2_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op1].op)) {
2855 /* c1 - (c2 - x) => x + (c1 - c2) */
2856 val.u64 = op1_insn->val.u64 - ctx->ir_base[op2_insn->op1].val.u64;
2857 if (val.i64 < 0 && val.i64 != INT64_MIN) {
2858 val.i64 = -val.i64;
2859 opt++; /* ADD -> SUB */
2860 }
2861 op1 = op2_insn->op2;
2862 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2864 }
2866}
2867
2868IR_FOLD(SUB(C_I8, SUB))
2869IR_FOLD(SUB(C_I16, SUB))
2870IR_FOLD(SUB(C_I32, SUB))
2871IR_FOLD(SUB(C_I64, SUB))
2872{
2873 if (IR_IS_CONST_REF(op2_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op2].op)) {
2874 /* c1 - (x - c2) => (c1 + c2) - x */
2875 val.i64 = op1_insn->val.u64 + ctx->ir_base[op2_insn->op2].val.u64;
2876 op2 = op2_insn->op1;
2877 op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2879 } else if (IR_IS_CONST_REF(op2_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op1].op)) {
2880 /* c1 - (c2 - x) => x + (c1 - c2) */
2881 val.i64 = op1_insn->val.u64 - ctx->ir_base[op2_insn->op1].val.u64;
2882 if (val.i64 < 0 && val.i64 != INT64_MIN) {
2883 val.i64 = -val.i64;
2884 opt++; /* ADD -> SUB */
2885 }
2886 op1 = op2_insn->op2;
2887 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2889 }
2891}
2892
2893IR_FOLD(MUL(MUL, C_U8))
2894IR_FOLD(MUL(MUL, C_U16))
2895IR_FOLD(MUL(MUL, C_U32))
2896IR_FOLD(MUL(MUL, C_U64))
2897IR_FOLD(MUL(MUL, C_ADDR))
2898{
2899 if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2900 /* (x * c1) * c2 => x * (c1 * c2) */
2901 val.u64 = ctx->ir_base[op1_insn->op2].val.u64 * op2_insn->val.u64;
2902 op1 = op1_insn->op1;
2903 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2905 }
2907}
2908
2909IR_FOLD(MUL(MUL, C_I8))
2910IR_FOLD(MUL(MUL, C_I16))
2911IR_FOLD(MUL(MUL, C_I32))
2912IR_FOLD(MUL(MUL, C_I64))
2913{
2914 if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2915 /* (x * c1) * c2 => x * (c1 * c2) */
2916 val.i64 = ctx->ir_base[op1_insn->op2].val.u64 * op2_insn->val.u64;
2917 op1 = op1_insn->op1;
2918 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2920 }
2922}
2923
2924IR_FOLD(AND(AND, C_U8))
2925IR_FOLD(AND(AND, C_U16))
2926IR_FOLD(AND(AND, C_U32))
2927IR_FOLD(AND(AND, C_U64))
2928IR_FOLD(AND(AND, C_I8))
2929IR_FOLD(AND(AND, C_I16))
2930IR_FOLD(AND(AND, C_I32))
2931IR_FOLD(AND(AND, C_I64))
2932IR_FOLD(AND(AND, C_ADDR))
2933{
2934 if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2935 /* (x & c1) & c2 => x & (c1 & c2) */
2936 val.u64 = ctx->ir_base[op1_insn->op2].val.u64 & op2_insn->val.u64;
2937 op1 = op1_insn->op1;
2938 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2940 }
2942}
2943
2944IR_FOLD(OR(OR, C_U8))
2945IR_FOLD(OR(OR, C_U16))
2946IR_FOLD(OR(OR, C_U32))
2947IR_FOLD(OR(OR, C_U64))
2948IR_FOLD(OR(OR, C_I8))
2949IR_FOLD(OR(OR, C_I16))
2950IR_FOLD(OR(OR, C_I32))
2951IR_FOLD(OR(OR, C_I64))
2952IR_FOLD(OR(OR, C_ADDR))
2953{
2954 if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2955 /* (x | c1) | c2 => x | (c1 | c2) */
2956 val.u64 = ctx->ir_base[op1_insn->op2].val.u64 | op2_insn->val.u64;
2957 op1 = op1_insn->op1;
2958 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2960 }
2962}
2963
2964IR_FOLD(XOR(XOR, C_U8))
2965IR_FOLD(XOR(XOR, C_U16))
2966IR_FOLD(XOR(XOR, C_U32))
2967IR_FOLD(XOR(XOR, C_U64))
2968IR_FOLD(XOR(XOR, C_I8))
2969IR_FOLD(XOR(XOR, C_I16))
2970IR_FOLD(XOR(XOR, C_I32))
2971IR_FOLD(XOR(XOR, C_I64))
2972IR_FOLD(XOR(XOR, C_ADDR))
2973{
2974 if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2975 /* (x ^ c1) ^ c2 => x ^ (c1 ^ c2) */
2976 val.u64 = ctx->ir_base[op1_insn->op2].val.u64 ^ op2_insn->val.u64;
2977 op1 = op1_insn->op1;
2978 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2980 }
2982}
2983
2984IR_FOLD(AND(AND, _))
2985IR_FOLD(OR(OR, _))
2986IR_FOLD(MIN(MIN, _))
2987IR_FOLD(MAX(MAX, _))
2988{
2989 if (op1_insn->op1 == op2 || op1_insn->op2 == op2) {
2991 }
2993}
2994
2995IR_FOLD(XOR(XOR, _))
2996{
2997 if (op1_insn->op1 == op2) {
2998 IR_FOLD_COPY(op1_insn->op2);
2999 } else if (op1_insn->op2 == op2) {
3000 IR_FOLD_COPY(op1_insn->op1);
3001 }
3003}
3004
3005/* ROL/ROR */
3006IR_FOLD(OR(SHL, SHR))
3007IR_FOLD(OR(SHR, SHL))
3008IR_FOLD(ADD(SHL, SHR))
3009IR_FOLD(ADD(SHR, SHL))
3010{
3011 if (op1_insn->op1 == op2_insn->op1) {
3012 if (IR_IS_CONST_REF(op1_insn->op2) && IR_IS_CONST_REF(op2_insn->op2)) {
3013 if (ctx->ir_base[op1_insn->op2].val.u64 + ctx->ir_base[op2_insn->op2].val.u64 ==
3014 ir_type_size[IR_OPT_TYPE(opt)] * 8) {
3015 /* (x << c) | (x >> (32 - c)) -> ROL(x, c) */
3016 op1 = op1_insn->op1;
3017 op2 = op1_insn->op2;
3018 opt = op1_insn->opt + 3; /* SHL -> ROL, SHR -> ROR */
3020 }
3021 } else if (ctx->ir_base[op2_insn->op2].op == IR_SUB
3022 && IR_IS_CONST_REF(ctx->ir_base[op2_insn->op2].op1)
3023 && ctx->ir_base[op2_insn->op2].op2 == op1_insn->op2
3024 && ctx->ir_base[ctx->ir_base[op2_insn->op2].op1].val.u64 == ir_type_size[IR_OPT_TYPE(opt)] * 8) {
3025 /* (x << y) | (x >> (32 - y)) -> ROL(x, y) */
3026 op1 = op1_insn->op1;
3027 op2 = op1_insn->op2;
3028 opt = op1_insn->opt + 3; /* SHL -> ROL, SHR -> ROR */
3030 } else if (ctx->ir_base[op1_insn->op2].op == IR_SUB
3031 && IR_IS_CONST_REF(ctx->ir_base[op1_insn->op2].op1)
3032 && ctx->ir_base[op1_insn->op2].op2 == op2_insn->op2
3033 && ctx->ir_base[ctx->ir_base[op1_insn->op2].op1].val.u64 == ir_type_size[IR_OPT_TYPE(opt)] * 8) {
3034 /* (x << (32 - y)) | (x >> y) -> ROR(x, y) */
3035 op1 = op2_insn->op1;
3036 op2 = op2_insn->op2;
3037 opt = op2_insn->opt + 3; /* SHL -> ROL, SHR -> ROR */
3039 }
3040 }
3042}
3043
3044
3045/* Swap operands (move lower ref to op2) for better CSE */
3046IR_FOLD(MUL(_, _))
3047IR_FOLD_NAMED(swap_ops)
3048{
3049 if (op1 < op2) { /* move lower ref to op2 */
3050 SWAP_REFS(op1, op2);
3052 }
3054}
3055
3056IR_FOLD(ADD_OV(_, _))
3057IR_FOLD(MUL_OV(_, _))
3058{
3059 if (op1 < op2) { /* move lower ref to op2 */
3060 SWAP_REFS(op1, op2);
3062 }
3063 /* skip CSE ??? */
3065}
3066
3067IR_FOLD(ADD(_, _))
3068{
3069 if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt)) && op1 == op2) {
3070 /* a + a => a * 2 */
3072 val.u64 = 2;
3073 opt = IR_MUL | (opt & IR_OPT_TYPE_MASK);
3074 op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
3076 }
3077 IR_FOLD_DO_NAMED(swap_ops);
3078}
3079
3080IR_FOLD(SUB(_, _))
3081{
3082 if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt)) && op1 == op2) {
3083 IR_FOLD_CONST_U(0);
3084 }
3086}
3087
3088IR_FOLD(SUB_OV(_, _))
3089{
3090 if (op1 == op2) {
3091 IR_FOLD_CONST_U(0);
3092 }
3093 /* skip CSE ??? */
3095}
3096
3097/* Binary operations with op1 == op2 */
3098IR_FOLD(AND(_,_))
3099IR_FOLD(OR(_,_))
3100IR_FOLD(MIN(_, _))
3101IR_FOLD(MAX(_, _))
3102{
3103 /* a & a => a */
3104 if (op1 == op2) {
3106 }
3107 IR_FOLD_DO_NAMED(swap_ops);
3108}
3109
3110IR_FOLD(XOR(_,_))
3111{
3112 /* a xor a => 0 */
3113 if (op1 == op2) {
3114 IR_FOLD_CONST_U(0);
3115 }
3116 IR_FOLD_DO_NAMED(swap_ops);
3117}
3118
3119IR_FOLD(EQ(_, _))
3120IR_FOLD(NE(_, _))
3121{
3122 if (op1 != op2) {
3123 IR_FOLD_DO_NAMED(swap_ops);
3124 } else if (IR_IS_TYPE_INT(op1_insn->type)) {
3125 /* a == a => true */
3126 IR_FOLD_BOOL((opt & IR_OPT_OP_MASK) == IR_EQ);
3127 }
3129}
3130
3131IR_FOLD(LT(_, _))
3132IR_FOLD(GE(_, _))
3133IR_FOLD(LE(_, _))
3134IR_FOLD(GT(_, _))
3135{
3136 if (op1 == op2) {
3137 if (IR_IS_TYPE_INT(op1_insn->type)) {
3138 /* a >= a => true (two low bits are differ) */
3139 IR_FOLD_BOOL((opt ^ (opt >> 1)) & 1);
3140 }
3141 } else if (op1 < op2) { /* move lower ref to op2 */
3142 SWAP_REFS(op1, op2);
3143 opt ^= 3; /* [U]LT <-> [U]GT, [U]LE <-> [U]GE */
3145 }
3147}
3148
3149IR_FOLD(ULT(_, _))
3150IR_FOLD(UGE(_, _))
3151IR_FOLD(ULE(_, _))
3152IR_FOLD(UGT(_, _))
3153{
3154 if (op1 == op2) {
3155 /* a >= a => true (two low bits are differ) */
3156 IR_FOLD_BOOL((opt ^ (opt >> 1)) & 1);
3157 } else if (op1 < op2) { /* move lower ref to op2 */
3158 SWAP_REFS(op1, op2);
3159 opt ^= 3; /* [U]LT <-> [U]GT, [U]LE <-> [U]GE */
3161 }
3163}
3164
3165IR_FOLD(COND(_, _)) // TODO: COND(_, _, _)
3166{
3167 if (op2 == op3) {
3169 }
3171}
#define max(a, b)
Definition exif.c:60
zend_ffi_type * type
Definition ffi.c:3812
new_type size
Definition ffi.c:4365
zend_string * res
Definition ffi.c:4692
zval * val
Definition ffi.c:4262
_(string $message)
#define NOT
Definition glob.c:94
#define ROL(n, x)
#define SHR(b, x)
Definition hash_sha.c:115
#define IR_FOLD_CONST_U(_val)
Definition ir.c:885
#define IR_FOLD_CONST_D(_val)
Definition ir.c:895
#define IR_FOLD(X)
Definition ir.c:876
#define IR_FOLD_NEXT
Definition ir.c:919
#define IR_FOLD_RESTART
Definition ir.c:916
#define IR_FOLD_BOOL(cond)
Definition ir.c:911
#define IR_FOLD_EMIT
Definition ir.c:918
#define IR_FOLD_CONST_F(_val)
Definition ir.c:900
ir_ref ir_const(ir_ctx *ctx, ir_val val, uint8_t type)
Definition ir.c:557
#define IR_FOLD_DO_NAMED(name)
Definition ir.c:915
#define IR_FOLD_NAMED(name)
Definition ir.c:914
#define IR_FOLD_COPY(op)
Definition ir.c:906
ir_ref ir_const_float(ir_ctx *ctx, float c)
Definition ir.c:630
const uint8_t ir_type_size[IR_LAST_TYPE]
Definition ir.c:61
#define IR_FOLD_CONST_I(_val)
Definition ir.c:890
enum _ir_type ir_type
#define IR_IS_TYPE_INT(t)
Definition ir.h:145
#define IR_TRUE
Definition ir.h:398
#define IR_UNUSED
Definition ir.h:395
#define IR_OPT_OP_MASK
Definition ir.h:380
#define IR_OPT(op, type)
Definition ir.h:385
#define IR_IS_CONST_REF(ref)
Definition ir.h:392
#define IR_IS_TYPE_SIGNED(t)
Definition ir.h:144
#define IR_OPT_TYPE(opt)
Definition ir.h:387
#define IR_FALSE
Definition ir.h:397
#define IR_OPT_INPUTS_SHIFT
Definition ir.h:383
#define IR_OPT_TYPE_MASK
Definition ir.h:381
IR_ALWAYS_INLINE uint32_t ir_rol32(uint32_t op1, uint32_t op2)
Definition ir_private.h:79
#define SWAP_REFS(_ref1, _ref2)
IR_ALWAYS_INLINE bool ir_const_is_true(const ir_insn *v)
Definition ir_private.h:893
IR_ALWAYS_INLINE uint64_t ir_rol64(uint64_t op1, uint64_t op2)
Definition ir_private.h:84
IR_ALWAYS_INLINE uint32_t ir_ror32(uint32_t op1, uint32_t op2)
Definition ir_private.h:99
#define IR_IS_SYM_CONST(op)
Definition ir_private.h:889
#define IR_ASSERT(x)
Definition ir_private.h:17
IR_ALWAYS_INLINE uint16_t ir_ror16(uint16_t op1, uint16_t op2)
Definition ir_private.h:94
IR_ALWAYS_INLINE uint8_t ir_rol8(uint8_t op1, uint8_t op2)
Definition ir_private.h:69
IR_ALWAYS_INLINE uint64_t ir_ror64(uint64_t op1, uint64_t op2)
Definition ir_private.h:104
IR_ALWAYS_INLINE uint16_t ir_rol16(uint16_t op1, uint16_t op2)
Definition ir_private.h:74
IR_ALWAYS_INLINE uint8_t ir_ror8(uint8_t op1, uint8_t op2)
Definition ir_private.h:89
#define min(a, b)
#define SUB
#define ADD
#define MUL
#define AND
#define OR
#define XOR
#define DIV
#define NEG
#define SAR
#define SHL
#define ROR
#define SEXT(s, v, p)
Definition softmagic.c:109
#define MIN(a, b)
#define MAX(a, b)
op2
op1