28#define ARM_ABI_INFO " ABI:softfp"
30#define ARM_ABI_INFO " ABI:hardfp"
35#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
37#elif (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
40#error "Internal error: Unknown ARM architecture"
48#define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
49#define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
50#define TMP_PC (SLJIT_NUMBER_OF_REGISTERS + 4)
52#define TMP_FREG1 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
53#define TMP_FREG2 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2)
57#define CONST_POOL_ALIGNMENT 8
58#define CONST_POOL_EMPTY 0xffffffff
60#define ALIGN_INSTRUCTION(ptr) \
61 (sljit_ins*)(((sljit_ins)(ptr) + (CONST_POOL_ALIGNMENT * sizeof(sljit_ins)) - 1) & ~((CONST_POOL_ALIGNMENT * sizeof(sljit_ins)) - 1))
62#define MAX_DIFFERENCE(max_diff) \
63 (((max_diff) / (sljit_s32)sizeof(sljit_ins)) - (CONST_POOL_ALIGNMENT - 1))
66static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
67 0, 0, 1, 2, 3, 11, 10, 9, 8, 7, 6, 5, 4, 13, 12, 14, 15
70static const sljit_u8 freg_map[((SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2) << 1) + 1] = {
72 0, 1, 2, 3, 4, 5, 15, 14, 13, 12, 11, 10, 9, 8,
74 0, 1, 2, 3, 4, 5, 15, 14, 13, 12, 11, 10, 9, 8,
78static const sljit_u8 freg_ebit_map[((SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2) << 1) + 1] = {
80 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
82 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
86#define RM(rm) ((sljit_ins)reg_map[rm])
87#define RM8(rm) ((sljit_ins)reg_map[rm] << 8)
88#define RD(rd) ((sljit_ins)reg_map[rd] << 12)
89#define RN(rn) ((sljit_ins)reg_map[rn] << 16)
91#define VM(vm) (((sljit_ins)freg_map[vm]) | ((sljit_ins)freg_ebit_map[vm] << 5))
92#define VD(vd) (((sljit_ins)freg_map[vd] << 12) | ((sljit_ins)freg_ebit_map[vd] << 22))
93#define VN(vn) (((sljit_ins)freg_map[vn] << 16) | ((sljit_ins)freg_ebit_map[vn] << 7))
101#define COND_MASK 0xf0000000
102#define CONDITIONAL 0xe0000000
103#define PUSH_POOL 0xff000000
105#define ADC 0xe0a00000
106#define ADD 0xe0800000
107#define AND 0xe0000000
109#define BIC 0xe1c00000
110#define BKPT 0xe1200070
112#define BLX 0xe12fff30
114#define CLZ 0xe16f0f10
115#define CMN 0xe1600000
116#define CMP 0xe1400000
117#define EOR 0xe0200000
118#define LDR 0xe5100000
119#define LDR_POST 0xe4100000
120#define LDREX 0xe1900f9f
121#define LDREXB 0xe1d00f9f
122#define LDREXH 0xe1f00f9f
123#define MLA 0xe0200090
124#define MOV 0xe1a00000
125#define MUL 0xe0000090
126#define MVN 0xe1e00000
127#define NOP 0xe1a00000
128#define ORR 0xe1800000
129#define PUSH 0xe92d0000
130#define POP 0xe8bd0000
131#define REV 0xe6bf0f30
132#define REV16 0xe6bf0fb0
133#define RSB 0xe0600000
134#define RSC 0xe0e00000
135#define SBC 0xe0c00000
136#define SMULL 0xe0c00090
137#define STR 0xe5000000
138#define STREX 0xe1800f90
139#define STREXB 0xe1c00f90
140#define STREXH 0xe1e00f90
141#define SUB 0xe0400000
142#define SXTB 0xe6af0070
143#define SXTH 0xe6bf0070
144#define TST 0xe1000000
145#define UMULL 0xe0800090
146#define UXTB 0xe6ef0070
147#define UXTH 0xe6ff0070
148#define VABS_F32 0xeeb00ac0
149#define VADD_F32 0xee300a00
150#define VAND 0xf2000110
151#define VCMP_F32 0xeeb40a40
152#define VCVT_F32_S32 0xeeb80ac0
153#define VCVT_F32_U32 0xeeb80a40
154#define VCVT_F64_F32 0xeeb70ac0
155#define VCVT_S32_F32 0xeebd0ac0
156#define VDIV_F32 0xee800a00
157#define VDUP 0xee800b10
158#define VDUP_s 0xf3b00c00
159#define VEOR 0xf3000110
160#define VLD1 0xf4200000
161#define VLD1_r 0xf4a00c00
162#define VLD1_s 0xf4a00000
163#define VLDR_F32 0xed100a00
164#define VMOV_F32 0xeeb00a40
165#define VMOV 0xee000a10
166#define VMOV2 0xec400a10
167#define VMOV_i 0xf2800010
168#define VMOV_s 0xee000b10
169#define VMOVN 0xf3b20200
170#define VMRS 0xeef1fa10
171#define VMUL_F32 0xee200a00
172#define VNEG_F32 0xeeb10a40
173#define VORR 0xf2200110
174#define VPOP 0xecbd0b00
175#define VPUSH 0xed2d0b00
176#define VSHLL 0xf2800a10
177#define VSHR 0xf2800010
178#define VSRA 0xf2800110
179#define VST1 0xf4000000
180#define VST1_s 0xf4800000
181#define VSTR_F32 0xed000a00
182#define VSUB_F32 0xee300a40
184#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
186#define MOVT 0xe3400000
187#define MOVW 0xe3000000
188#define RBIT 0xe6ff0f30
191#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
208#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
222 SLJIT_ASSERT(compiler->cpool_fill > 0 && compiler->cpool_fill <= CPOOL_SIZE);
226 *inst = 0xff000000 | compiler->cpool_fill;
235 cpool_ptr = compiler->cpool;
236 cpool_end = cpool_ptr + compiler->cpool_fill;
237 while (cpool_ptr < cpool_end) {
241 *inst = *cpool_ptr++;
244 compiler->cpool_fill = 0;
272 else if (compiler->cpool_fill > 0) {
273 cpool_ptr = compiler->cpool;
274 cpool_end = cpool_ptr + compiler->cpool_fill;
275 cpool_unique_ptr = compiler->cpool_unique;
277 if ((*cpool_ptr == literal) && !(*cpool_unique_ptr)) {
278 cpool_index = (
sljit_uw)(cpool_ptr - compiler->cpool);
283 }
while (cpool_ptr < cpool_end);
286 if (cpool_index == CPOOL_SIZE) {
288 if (compiler->cpool_fill < CPOOL_SIZE) {
289 cpool_index = compiler->cpool_fill;
290 compiler->cpool_fill++;
295 compiler->cpool_fill = 1;
303 *
ptr = inst | cpool_index;
305 compiler->cpool[cpool_index] = literal;
306 compiler->cpool_unique[cpool_index] = 0;
308 compiler->cpool_diff = compiler->
size;
319 SLJIT_ASSERT(compiler->cpool_fill < CPOOL_SIZE && (inst & 0xfff) == 0);
323 *
ptr = inst | compiler->cpool_fill;
325 compiler->cpool[compiler->cpool_fill] = literal;
326 compiler->cpool_unique[compiler->cpool_fill] = 1;
327 compiler->cpool_fill++;
329 compiler->cpool_diff = compiler->
size;
337 return push_cpool(compiler);
355 sljit_uw* clear_const_pool = const_pool;
356 sljit_uw* clear_const_pool_end = const_pool + cpool_size;
362 while (clear_const_pool < clear_const_pool_end)
363 *clear_const_pool++ = (
sljit_uw)(-1);
365 while (last_pc_patch < code_ptr) {
367 if ((*last_pc_patch & 0x0e0f0000) == 0x040f0000) {
368 diff = (
sljit_uw)(const_pool - last_pc_patch);
369 ind = (*last_pc_patch) & 0xfff;
372 SLJIT_ASSERT(ind < cpool_size && !(*last_pc_patch & (1 << 25)) && (*last_pc_patch & (1 << 20)));
374 const_pool[ind] = counter;
379 ind = const_pool[ind];
382 if (diff >= 2 || ind > 0) {
383 diff = (diff + (
sljit_uw)ind - 2) << 2;
385 *last_pc_patch = (*last_pc_patch & ~(
sljit_uw)0xfff) | diff;
388 *last_pc_patch = (*last_pc_patch & ~(
sljit_uw)(0xfff | (1 << 23))) | 0x004;
397 struct future_patch*
next;
405 struct future_patch *curr_patch, *prev_patch;
411 value = cpool_start_address[cpool_current_index];
413 curr_patch = *first_patch;
417 value = cpool_start_address[cpool_current_index];
420 if ((
sljit_uw)curr_patch->index == cpool_current_index) {
423 prev_patch->next = curr_patch->next;
425 *first_patch = curr_patch->next;
429 prev_patch = curr_patch;
430 curr_patch = curr_patch->next;
435 if (
value > cpool_current_index) {
438 while (*first_patch) {
439 curr_patch = *first_patch;
440 *first_patch = (*first_patch)->next;
445 curr_patch->next = *first_patch;
448 *first_patch = curr_patch;
450 cpool_start_address[
value] = *buf_ptr;
471 return push_inst(compiler,
MOVT |
RD(reg) | ((imm >> 12) & 0xf0000) | (((
sljit_u32)imm >> 16) & 0xfff));
483#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
484 if (jump->flags & IS_BL)
488 if (jump->flags & JUMP_ADDR)
489 diff = ((
sljit_sw)jump->u.target - (
sljit_sw)(code_ptr + 2) - executable_offset);
499#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
500 if (jump->flags & IS_BL) {
501 if (diff <= 0x01ffffff && diff >= -0x02000000) {
503 jump->flags |= PATCH_B;
508 if (diff <= 0x01ffffff && diff >= -0x02000000) {
510 jump->flags |= PATCH_B;
514 if (diff <= 0x01ffffff && diff >= -0x02000000) {
516 jump->flags |= PATCH_B;
525#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
534 if (diff <= 0x7fffff && diff >= -0x800000) {
543 inst = (
sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
554 inst = (
sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
560 if (mov_pc & (1 << 23))
561 ptr = inst + ((mov_pc & 0xfff) >> 2) + 2;
565 if (*inst != mov_pc) {
573 inst = (
sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
580 inst = (
sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
607 inst[0] =
MOVW | (inst[0] & 0xf000) | ((new_addr << 4) & 0xf0000) | (new_addr & 0xfff);
608 inst[1] =
MOVT | (inst[1] & 0xf000) | ((new_addr >> 12) & 0xf0000) | ((new_addr >> 16) & 0xfff);
612 inst = (
sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
624#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
632 src2 = get_imm(new_constant);
638 *inst = 0xe3a00000 | (ldr_literal & 0xf000) | src2;
642 inst = (
sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
648 src2 = get_imm(~new_constant);
654 *inst = 0xe3e00000 | (ldr_literal & 0xf000) | src2;
658 inst = (
sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
664 if (ldr_literal & (1 << 23))
665 ptr = inst + ((ldr_literal & 0xfff) >> 2) + 2;
669 if (*inst != ldr_literal) {
678 inst = (
sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
703 inst[0] =
MOVW | (inst[0] & 0xf000) | ((new_constant << 4) & 0xf0000) | (new_constant & 0xfff);
704 inst[1] =
MOVT | (inst[1] & 0xf000) | ((new_constant >> 12) & 0xf0000) | ((new_constant >> 16) & 0xfff);
708 inst = (
sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
720 if (jump->flags & JUMP_ADDR)
721 addr = jump->u.target;
723 addr = (
sljit_uw)SLJIT_ADD_EXEC_OFFSET(code + jump->u.label->size, executable_offset);
728 if ((diff & 0x3) == 0 && diff <= (0x3fc + 2 * SSIZE_OF(ins)) && diff >= (-0x3fc + 2 * SSIZE_OF(ins))) {
729 jump->flags |= PATCH_B;
733#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
740#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
747 SLJIT_NEXT_DEFINE_TYPES;
753 jump = compiler->
jumps;
754 const_ = compiler->
consts;
755 SLJIT_NEXT_INIT_TYPES();
758 SLJIT_GET_NEXT_MIN();
760 if (next_min_addr == SLJIT_MAX_ADDRESS)
763 if (next_min_addr == next_label_size) {
764 label->
size -= size_reduce;
767 next_label_size = SLJIT_GET_NEXT_SIZE(label);
770 if (next_min_addr == next_const_addr) {
771 const_->
addr -= size_reduce;
772 const_ = const_->
next;
773 next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
777 if (next_min_addr != next_jump_addr)
780 jump->addr -= size_reduce;
781 if (!(jump->flags & JUMP_MOV_ADDR)) {
782 total_size = JUMP_MAX_SIZE - 1;
788 if (diff <= (0x01ffffff / SSIZE_OF(ins)) && diff >= (-0x02000000 / SSIZE_OF(ins)))
792 size_reduce += JUMP_MAX_SIZE - 1 - total_size;
797 if (!(jump->flags & JUMP_ADDR)) {
799 if (diff <= 0xff + 2 && diff >= -0xff + 2)
803 size_reduce += 1 - total_size;
806 jump->flags |= total_size << JUMP_SIZE_SHIFT;
808 next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
811 compiler->
size -= size_reduce;
824 SLJIT_NEXT_DEFINE_TYPES;
828#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
834 struct future_patch *first_patch;
842 CHECK_PTR(check_sljit_generate_code(compiler));
845#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
846 compiler->
size += (compiler->patches << 1);
847 if (compiler->cpool_fill > 0)
850 reduce_code_size(compiler);
855 reverse_buf(compiler);
858#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
860 cpool_skip_alignment = 0;
861 cpool_current_index = 0;
862 cpool_start_address =
NULL;
864 last_pc_patch = code;
870 jump = compiler->
jumps;
871 const_ = compiler->
consts;
872 SLJIT_NEXT_INIT_TYPES();
873 SLJIT_GET_NEXT_MIN();
877 buf_end = buf_ptr + (
buf->used_size >> 2);
879#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
880 if (cpool_size > 0) {
881 if (cpool_skip_alignment > 0) {
883 cpool_skip_alignment--;
885 if (
SLJIT_UNLIKELY(resolve_const_pool_index(compiler, &first_patch, cpool_current_index, cpool_start_address, buf_ptr))) {
886 SLJIT_FREE_EXEC(code, exec_allocator_data);
891 if (++cpool_current_index >= cpool_size) {
896 }
else if ((*buf_ptr & 0xff000000) !=
PUSH_POOL) {
898 *code_ptr = *buf_ptr++;
899 if (next_min_addr == word_count) {
904 if (next_min_addr == next_label_size) {
905 label->
u.
addr = (
sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
908 next_label_size = SLJIT_GET_NEXT_SIZE(label);
912 if (next_min_addr == next_jump_addr) {
913 if (!(jump->flags & JUMP_MOV_ADDR)) {
914#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
915 if (detect_jump_type(jump, code_ptr, code, executable_offset))
919 word_count += jump->flags >> JUMP_SIZE_SHIFT;
921 if (!detect_jump_type(jump, code_ptr, code, executable_offset)) {
922 code_ptr[2] = code_ptr[0];
923 addr = ((code_ptr[0] & 0xf) << 12);
931#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
932 word_count += jump->flags >> JUMP_SIZE_SHIFT;
935 code_ptr += mov_addr_get_length(jump, code_ptr, code, executable_offset);
939 next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
940 }
else if (next_min_addr == next_const_addr) {
942 const_ = const_->
next;
943 next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
946 SLJIT_GET_NEXT_MIN();
949#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
955 cpool_current_index = patch_pc_relative_loads(last_pc_patch, code_ptr, cpool_start_address, cpool_size);
956 if (cpool_current_index > 0) {
958 *code_ptr =
B | (((
sljit_ins)(cpool_start_address - code_ptr) + cpool_current_index - 2) &
~PUSH_POOL);
959 code_ptr = (
sljit_ins*)(cpool_start_address + cpool_current_index);
962 cpool_current_index = 0;
963 last_pc_patch = code_ptr;
967 }
while (buf_ptr < buf_end);
971 if (label && label->
size == word_count) {
972 label->
u.
addr = (
sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
981#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
983 if (compiler->cpool_fill > 0) {
985 cpool_current_index = patch_pc_relative_loads(last_pc_patch, code_ptr, cpool_start_address, compiler->cpool_fill);
986 if (cpool_current_index > 0)
987 code_ptr = (
sljit_ins*)(cpool_start_address + cpool_current_index);
989 buf_ptr = compiler->cpool;
990 buf_end = buf_ptr + compiler->cpool_fill;
991 cpool_current_index = 0;
992 while (buf_ptr < buf_end) {
993 if (
SLJIT_UNLIKELY(resolve_const_pool_index(compiler, &first_patch, cpool_current_index, cpool_start_address, buf_ptr))) {
994 SLJIT_FREE_EXEC(code, exec_allocator_data);
999 cpool_current_index++;
1005 jump = compiler->
jumps;
1007 addr = (jump->flags & JUMP_ADDR) ? jump->u.target : jump->u.label->u.addr;
1010 if (jump->flags & JUMP_MOV_ADDR) {
1011#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
1017 if (jump->flags & PATCH_B) {
1019 diff = ((
sljit_sw)
addr - (
sljit_sw)SLJIT_ADD_EXEC_OFFSET(buf_ptr + 2, executable_offset)) >> 2;
1029 buf_ptr[0] =
addr | (buf_ptr[0] & 0xf000) |
RN(
TMP_PC) | (1 << 25) | (0xf << 8) | (
sljit_ins)(diff & 0xff);
1031#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
1032 buf_ptr[((buf_ptr[0] & 0xfff) >> 2) + 2] =
addr;
1034 buf_ptr[1] =
MOVT | buf_ptr[0] | ((
addr >> 12) & 0xf0000) | ((
addr >> 16) & 0xfff);
1035 buf_ptr[0] =
MOVW | buf_ptr[0] | ((
addr << 4) & 0xf0000) | (
addr & 0xfff);
1038 }
else if (jump->flags & PATCH_B) {
1040 SLJIT_ASSERT(diff <= 0x01ffffff && diff >= -0x02000000);
1041 *buf_ptr |= (diff >> 2) & 0x00ffffff;
1043#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
1044 if (jump->flags & IS_BL)
1050 code_ptr[1] = *buf_ptr;
1051 set_jump_addr((
sljit_uw)code_ptr, executable_offset,
addr, 0);
1054 if (*buf_ptr & (1 << 23))
1055 buf_ptr += ((*buf_ptr & 0xfff) >> 2) + 2;
1061 set_jump_addr((
sljit_uw)buf_ptr, executable_offset,
addr, 0);
1068#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
1069 const_ = compiler->
consts;
1075 code_ptr[1] = *buf_ptr;
1076 if (*buf_ptr & (1 << 23))
1077 buf_ptr += ((*buf_ptr & 0xfff) >> 2) + 2;
1081 set_const_value((
sljit_uw)code_ptr, executable_offset, *buf_ptr, 0);
1084 const_ = const_->
next;
1094 code = (
sljit_ins*)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
1095 code_ptr = (
sljit_ins*)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
1104 switch (feature_type) {
1107#ifdef SLJIT_IS_FPU_AVAILABLE
1114#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
1117#ifdef SLJIT_IS_FPU_AVAILABLE
1137#if defined(SLJIT_CONFIG_ARM_V6) && SLJIT_CONFIG_ARM_V6
1153#define WORD_SIZE 0x00
1154#define BYTE_SIZE 0x01
1155#define HALF_SIZE 0x02
1158#define LOAD_DATA 0x08
1161#define ALLOW_IMM 0x10
1162#define ALLOW_INV_IMM 0x20
1163#define ALLOW_ANY_IMM (ALLOW_IMM | ALLOW_INV_IMM)
1164#define ALLOW_NEG_IMM 0x40
1165#define ALLOW_DOUBLE_IMM 0x80
1172static const sljit_ins data_transfer_insts[16] = {
1192#define EMIT_DATA_TRANSFER(type, add, target_reg, base_reg, arg) \
1193 (data_transfer_insts[(type) & 0xf] | ((add) << 23) | RD(target_reg) | RN(base_reg) | (sljit_ins)(arg))
1197#define IS_TYPE1_TRANSFER(type) \
1198 (data_transfer_insts[(type) & 0xf] & 0x04000000)
1199#define TYPE2_TRANSFER_IMM(imm) \
1200 (((imm) & 0xf) | (((imm) & 0xf0) << 4) | (1 << 22))
1202#define EMIT_FPU_OPERATION(opcode, mode, dst, src1, src2) \
1203 ((sljit_ins)(opcode) | (sljit_ins)(mode) | VD(dst) | VM(src1) | VN(src2))
1207#define ARGS_SWAPPED 0x01
1211#define REGISTER_OP 0x04
1213#define UNUSED_RETURN 0x08
1215#define SET_FLAGS (1 << 20)
1220#define SRC2_IMM (1 << 25)
1243 CHECK(check_sljit_emit_enter(compiler,
options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
1244 set_emit_enter(compiler,
options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
1249 for (i =
SLJIT_S0 - saved_arg_count; i > tmp; i--)
1261 FAIL_IF(push_inst(compiler,
PUSH | (1 << 14) | imm));
1266 size = GET_SAVED_REGISTERS_SIZE(scratches, saveds - saved_arg_count, 1);
1269 if ((
size & SSIZE_OF(sw)) != 0) {
1271 size += SSIZE_OF(sw);
1274 if (fsaveds + fscratches >= SLJIT_NUMBER_OF_FLOAT_REGISTERS) {
1284 local_size = ((
size + local_size + 0x7) & ~0x7) -
size;
1292 saved_arg_count = 0;
1297 float_arg_count = 0;
1300 switch (arg_types & SLJIT_ARG_MASK) {
1328 tmp = word_arg_count;
1343 compiler->args_size =
offset;
1350 switch (arg_types & SLJIT_ARG_MASK) {
1352 if (
offset != old_offset)
1358 if (f32_offset != 0) {
1362 if (
offset != old_offset)
1364 f32_offset = old_offset;
1383 while (remap_ptr > remap)
1384 FAIL_IF(push_inst(compiler, *(--remap_ptr)));
1400 CHECK(check_sljit_set_context(compiler,
options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
1401 set_set_context(compiler,
options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
1403 size = GET_SAVED_REGISTERS_SIZE(scratches, saveds - SLJIT_KEPT_SAVEDS_COUNT(
options), 1);
1407 size += SSIZE_OF(sw);
1425 sljit_s32 local_size, fscratches, fsaveds, i, tmp;
1440 if (fsaveds + fscratches >= SLJIT_NUMBER_OF_FLOAT_REGISTERS) {
1449 local_size = GET_SAVED_REGISTERS_SIZE(compiler->
scratches, compiler->
saveds, 1) & 0x7;
1452 if (frame_size < 0) {
1455 }
else if (frame_size > 0) {
1456 SLJIT_ASSERT(frame_size == 1 || (frame_size & 0x7) == 0);
1462 reg_list |= (
sljit_uw)1 << reg_map[lr_dst];
1469 reg_list |= (
sljit_uw)1 << reg_map[i];
1470 }
while (--i > tmp);
1477 reg_list |= (
sljit_uw)1 << reg_map[i];
1481 if (lr_dst ==
TMP_REG2 && reg_list == 0) {
1486 if (lr_dst == 0 && (reg_list & (reg_list - 1)) == 0) {
1489 if (reg_list != 0) {
1491 if (local_size <= 0xfff) {
1492 if (local_size == 0) {
1494 if (frame_size == 0)
1496 if (frame_size > 2 * SSIZE_OF(sw))
1502 }
else if (frame_size == 0) {
1503 frame_size = (restored_reg ==
TMP_REG2) ? SSIZE_OF(sw) : 2 * SSIZE_OF(sw);
1509 local_size += SSIZE_OF(sw);
1513 local_size += SSIZE_OF(sw);
1515 if (frame_size > local_size)
1517 else if (frame_size < local_size)
1524 frame_size -= SSIZE_OF(sw);
1526 frame_size -= SSIZE_OF(sw);
1531 tmp = (restored_reg ==
TMP_REG2) ? 0x800004 : 0x800008;
1541 if ((reg_list & (reg_list - 1)) == 0) {
1548 FAIL_IF(push_inst(compiler,
POP | reg_list));
1562 CHECK(check_sljit_emit_return_void(compiler));
1564 return emit_stack_frame_release(compiler, 0);
1571 CHECK(check_sljit_emit_return_to(compiler, src, srcw));
1583 FAIL_IF(emit_stack_frame_release(compiler, 1));
1585 SLJIT_SKIP_CHECKS(compiler);
1606 return push_inst(compiler,
MOV |
RD(dst) |
RM(src2));
1642#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
1647 return push_inst(compiler, (
EOR ^ 0xf0000000) |
SRC2_IMM |
RD(dst) |
RN(dst) | 0x1f);
1650 return push_inst(compiler,
CLZ |
RD(dst) |
RM(dst));
1657 return push_inst(compiler,
REV |
RD(dst) |
RM(src2));
1694 compiler->status_flags_state = 0;
1697 return push_inst(compiler,
MUL |
RN(dst) |
RM8(src2) |
RM(src1));
1741 if (compiler->shift_imm == 0x20) {
1754 return push_inst(compiler,
MLA |
RN(dst) |
RD(dst) |
RM8(src2) |
RM(src1));
1763 if (compiler->shift_imm != 0x20) {
1766 if (compiler->shift_imm != 0)
1768 RD(dst) | (compiler->shift_imm << 7) | (shift_type << 5) |
RM(src2));
1783#undef EMIT_SHIFT_INS_AND_RETURN
1794 if (!(imm & 0xff000000)) {
1798 imm = (imm << 24) | (imm >> 8);
1802 if (!(imm & 0xff000000)) {
1807 if (!(imm & 0xf0000000)) {
1812 if (!(imm & 0xc0000000)) {
1817 if (!(imm & 0x00ffffff))
1818 return SRC2_IMM | (imm >> 24) | (rol << 8);
1832 if (!(imm & mask)) {
1834 imm = (imm << rol) | (imm >> (32 - rol));
1836 rol = 4 + (rol >> 1);
1844 imm = (imm << 8) | (imm >> 24);
1848 if (!(imm & mask)) {
1850 imm = (imm << rol) | (imm >> (32 - rol));
1852 rol = (rol >> 1) - 8;
1867 if (!(imm & 0xff000000)) {
1868 imm1 =
SRC2_IMM | ((imm >> 16) & 0xff) | (((rol + 4) & 0xf) << 8);
1869 *imm2 =
SRC2_IMM | ((imm >> 8) & 0xff) | (((rol + 8) & 0xf) << 8);
1870 }
else if (imm & 0xc0000000) {
1871 imm1 =
SRC2_IMM | ((imm >> 24) & 0xff) | ((rol & 0xf) << 8);
1875 if (!(imm & 0xff000000)) {
1880 if (!(imm & 0xf0000000)) {
1885 if (!(imm & 0xc0000000)) {
1890 if (!(imm & 0x00ffffff))
1891 *imm2 =
SRC2_IMM | (imm >> 24) | ((rol & 0xf) << 8);
1895 if (!(imm & 0xf0000000)) {
1900 if (!(imm & 0xc0000000)) {
1905 imm1 =
SRC2_IMM | ((imm >> 24) & 0xff) | ((rol & 0xf) << 8);
1909 if (!(imm & 0xf0000000)) {
1914 if (!(imm & 0xc0000000)) {
1919 if (!(imm & 0x00ffffff))
1920 *imm2 =
SRC2_IMM | (imm >> 24) | ((rol & 0xf) << 8);
1931#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
1935 return push_inst(compiler,
MOVW |
RD(reg) | ((imm << 4) & 0xf0000) | (imm & 0xfff));
1941 return push_inst(compiler,
MOV |
RD(reg) | tmp);
1943 tmp = get_imm(~imm);
1945 return push_inst(compiler,
MVN |
RD(reg) | tmp);
1947#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
1949 imm1 = compute_imm(imm, &imm2);
1952 return push_inst(compiler,
ORR |
RD(reg) |
RN(reg) | imm2);
1955 imm1 = compute_imm(~imm, &imm2);
1958 return push_inst(compiler,
BIC |
RD(reg) |
RN(reg) | imm2);
1964 FAIL_IF(push_inst(compiler,
MOVW |
RD(reg) | ((imm << 4) & 0xf0000) | (imm & 0xfff)));
1967 return push_inst(compiler,
MOVT |
RD(reg) | ((imm >> 12) & 0xf0000) | ((imm >> 16) & 0xfff));
1985 FAIL_IF(load_immediate(compiler, tmp_reg, tmp));
1999 if (
arg & OFFS_REG_MASK) {
2000 offset_reg = OFFS_REG(
arg);
2004 if (argw != 0 && (mask == 0xff)) {
2011 RM(offset_reg) | (mask == 0xff ? 0 : (1 << 25)) | ((
sljit_ins)argw << 7)));
2028 }
else if (argw < -mask) {
2042 if (argw <= mask && argw >= -mask) {
2059 RM(tmp_reg) | (mask == 0xff ? 0 : (1 << 25))));
2131 op = neg_op | GET_ALL_FLAGS(op);
2164 op = neg_op | GET_ALL_FLAGS(op);
2172 dst_reg = FAST_IS_REG(dst) ? dst :
TMP_REG2;
2179 if (FAST_IS_REG(src2))
2180 return emit_op_mem(compiler, inp_flags, src2, dst, dstw,
TMP_REG1);
2183 if (FAST_IS_REG(src2) && dst_reg !=
TMP_REG2)
2186 src2_tmp_reg = dst_reg;
2196 if (src2_reg == 0 && (src2 &
SLJIT_MEM)) {
2197 src2_reg = src2_tmp_reg;
2202 if (FAST_IS_REG(src1))
2213 if (src2_reg == 0) {
2214 src2_reg = src2_tmp_reg;
2216 if (FAST_IS_REG(src2))
2223 if (src1_reg == 0) {
2230 if (src2_reg == 0 && neg_op != 0) {
2236 if (src2_reg == 0) {
2238 src2_reg = src2_tmp_reg;
2252 if (src1_reg == 0) {
2257 if (src1_reg == 0 && neg_op != 0) {
2263 if (src1_reg == 0) {
2281 return emit_op_mem(compiler, inp_flags, dst_reg, dst, dstw,
TMP_REG1);
2288#if defined(__GNUC__)
2289extern unsigned int __aeabi_uidivmod(
unsigned int numerator,
unsigned int denominator);
2290extern int __aeabi_idivmod(
int numerator,
int denominator);
2292#error "Software divmod functions are needed"
2305 CHECK(check_sljit_emit_op0(compiler, op));
2324 SLJIT_ASSERT(reg_map[2] == 1 && reg_map[3] == 2 && reg_map[4] == 3);
2326 saved_reg_count = 0;
2328 saved_reg_list[saved_reg_count++] = 3;
2330 saved_reg_list[saved_reg_count++] = 2;
2332 saved_reg_list[saved_reg_count++] = 1;
2334 if (saved_reg_count > 0) {
2335 FAIL_IF(push_inst(compiler,
STR | 0x2d0000 | (saved_reg_count >= 3 ? 16 : 8)
2336 | (saved_reg_list[0] << 12) ));
2337 if (saved_reg_count >= 2) {
2339 FAIL_IF(push_inst(compiler,
STR | 0x8d0004 | (saved_reg_list[1] << 12) ));
2341 if (saved_reg_count >= 3) {
2343 FAIL_IF(push_inst(compiler,
STR | 0x8d0008 | (saved_reg_list[2] << 12) ));
2347#if defined(__GNUC__)
2351#error "Software divmod functions are needed"
2354 if (saved_reg_count > 0) {
2355 if (saved_reg_count >= 3) {
2357 FAIL_IF(push_inst(compiler,
LDR | 0x8d0008 | (saved_reg_list[2] << 12) ));
2359 if (saved_reg_count >= 2) {
2361 FAIL_IF(push_inst(compiler,
LDR | 0x8d0004 | (saved_reg_list[1] << 12) ));
2363 return push_inst(compiler, (
LDR ^ (1 << 24)) | 0x8d0000 | (
sljit_ins)(saved_reg_count >= 3 ? 16 : 8)
2364 | (saved_reg_list[0] << 12) );
2380 CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
2381 ADJUST_LOCAL_OFFSET(dst, dstw);
2382 ADJUST_LOCAL_OFFSET(src, srcw);
2409 return emit_op(compiler, op, 0, dst, dstw,
TMP_REG1, 0, src, srcw);
2427 CHECK(check_sljit_emit_op2(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w));
2428 ADJUST_LOCAL_OFFSET(dst, dstw);
2429 ADJUST_LOCAL_OFFSET(src1, src1w);
2430 ADJUST_LOCAL_OFFSET(src2, src2w);
2447 return emit_op(compiler, op, inp_flags, dst, dstw, src1, src1w, src2, src2w);
2450 return emit_op(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w);
2453 return emit_op(compiler, op,
ALLOW_ANY_IMM, dst, dstw, src1, src1w, src2, src2w);
2464 compiler->shift_imm = src2w & 0x1f;
2465 return emit_op(compiler, op, 0, dst, dstw,
TMP_REG1, 0, src1, src1w);
2467 compiler->shift_imm = 0x20;
2468 return emit_op(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w);
2480 CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
2482 SLJIT_SKIP_CHECKS(compiler);
2492 CHECK(check_sljit_emit_op2r(compiler, op, dst_reg, src1, src1w, src2, src2w));
2493 ADJUST_LOCAL_OFFSET(src1, src1w);
2494 ADJUST_LOCAL_OFFSET(src2, src2w);
2498 return emit_op(compiler, op, 0, dst_reg, 0, src1, src1w, src2, src2w);
2513 CHECK(check_sljit_emit_shift_into(compiler, op, dst_reg, src1_reg, src2_reg, src3, src3w));
2518 if (src1_reg == src2_reg) {
2519 SLJIT_SKIP_CHECKS(compiler);
2523 ADJUST_LOCAL_OFFSET(src3, src3w);
2533 src3w = (src3w ^ 0x1f) + 1;
2534 return push_inst(compiler,
ORR |
RD(dst_reg) |
RN(dst_reg) |
RM(src2_reg) | ((
sljit_ins)(is_left ? 1 : 0) << 5) | ((
sljit_ins)src3w << 7));
2557 CHECK(check_sljit_emit_op_src(compiler, op, src, srcw));
2558 ADJUST_LOCAL_OFFSET(src, srcw);
2564 if (FAST_IS_REG(src))
2589 CHECK(check_sljit_emit_op_dst(compiler, op, dst, dstw));
2590 ADJUST_LOCAL_OFFSET(dst, dstw);
2596 if (FAST_IS_REG(dst))
2604 if ((
size & SSIZE_OF(sw)) == 0)
2605 size += SSIZE_OF(sw);
2612 dst_r = FAST_IS_REG(dst) ? dst :
TMP_REG2;
2625 CHECK_REG_INDEX(check_sljit_get_register_index(
type, reg));
2628 return reg_map[reg];
2631 return freg_map[reg];
2634 return freg_map[reg] & ~0x1;
2644 CHECK(check_sljit_emit_op_custom(compiler, instruction,
size));
2646 return push_inst(compiler, *(
sljit_ins*)instruction);
2653#define FPU_LOAD (1 << 20)
2654#define EMIT_FPU_DATA_TRANSFER(inst, add, base, freg, offs) \
2655 ((inst) | (sljit_ins)((add) << 23) | RN(base) | VD(freg) | (sljit_ins)(offs))
2673 if (!(argw & ~0x3fc))
2675 if (!(-argw & ~0x3fc))
2714 if (FAST_IS_REG(dst))
2718 return emit_fop_mem(compiler, 0,
TMP_FREG1, dst, dstw);
2727 if (FAST_IS_REG(src))
2749 return sljit_emit_fop1_conv_f64_from_w(compiler,
VCVT_F32_S32 | (~op &
SLJIT_32), dst, dstw, src, srcw);
2756 return sljit_emit_fop1_conv_f64_from_w(compiler,
VCVT_F32_U32 | (~op &
SLJIT_32), dst, dstw, src, srcw);
2793 SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
2795 dst_r = FAST_IS_REG(dst) ? dst :
TMP_FREG1;
2827 return emit_fop_mem(compiler, (op &
SLJIT_32), dst_r, dst, dstw);
2839 CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
2840 ADJUST_LOCAL_OFFSET(dst, dstw);
2841 ADJUST_LOCAL_OFFSET(src1, src1w);
2842 ADJUST_LOCAL_OFFSET(src2, src2w);
2846 dst_r = FAST_IS_REG(dst) ? dst :
TMP_FREG1;
2884#undef EMIT_FPU_DATA_TRANSFER
2889#if defined(__ARM_NEON) && __ARM_NEON
2899 CHECK(check_sljit_emit_fset32(compiler, freg,
value));
2903#if defined(__ARM_NEON) && __ARM_NEON
2904 if ((
u.imm << (32 - 19)) == 0) {
2905 exp = (
u.imm >> (23 + 2)) & 0x3f;
2907 if (
exp == 0x20 ||
exp == 0x1f) {
2908 ins = ((
u.imm >> 24) & 0x80) | ((
u.imm >> 19) & 0x7f);
2909 return push_inst(compiler, (
VMOV_F32 ^ (1 << 6)) | ((ins & 0xf0) << 12) |
VD(freg) | (ins & 0xf));
2921#if defined(__ARM_NEON) && __ARM_NEON
2931 CHECK(check_sljit_emit_fset64(compiler, freg,
value));
2935#if defined(__ARM_NEON) && __ARM_NEON
2936 if (
u.imm[0] == 0 && (
u.imm[1] << (64 - 48)) == 0) {
2937 exp = (
u.imm[1] >> ((52 - 32) + 2)) & 0x1ff;
2939 if (
exp == 0x100 ||
exp == 0xff) {
2940 ins = ((
u.imm[1] >> (56 - 32)) & 0x80) | ((
u.imm[1] >> (48 - 32)) & 0x7f);
2941 return push_inst(compiler, (
VMOV_F32 ^ (1 << 6)) | (1 << 8) | ((ins & 0xf0) << 12) |
VD(freg) | (ins & 0xf));
2947 if (
u.imm[0] ==
u.imm[1])
2961 CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
2963 if (reg & REG_PAIR_MASK) {
2964 reg2 = REG_PAIR_SECOND(reg);
2965 reg = REG_PAIR_FIRST(reg);
2978 return push_inst(compiler, inst);
3079 CHECK_PTR(check_sljit_emit_label(compiler));
3086 set_label(label, compiler);
3095 CHECK_PTR(check_sljit_emit_jump(compiler,
type));
3104#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
3108 jump->addr = compiler->
size;
3113 compiler->patches++;
3116 jump->flags |= IS_BL;
3117 jump->addr = compiler->
size;
3121 jump->addr = compiler->
size;
3123 jump->flags |= IS_BL;
3125 compiler->
size += JUMP_MAX_SIZE - 1;
3143 if (src && FAST_IS_REG(*src))
3151 switch (arg_types & SLJIT_ARG_MASK) {
3167 word_arg_offset +=
sizeof(
sljit_sw);
3189 FAIL_IF(emit_stack_frame_release(compiler, -1));
3195 switch (
types & SLJIT_ARG_MASK) {
3198 offset = *(--offset_ptr);
3210 | (float_arg_count << 12) | ((
offset - 4 *
sizeof(
sljit_sw)) >> 2)));
3214 offset = *(--offset_ptr);
3217 if (src_offset ==
offset) {
3221 FAIL_IF(push_inst(compiler,
VMOV | 0x100000 | (float_arg_count << 16) | (
offset << 10)));
3224 | (float_arg_count << 12) | ((
offset - 4 *
sizeof(
sljit_sw)) >> 2)));
3227 word_arg_offset -=
sizeof(
sljit_sw);
3228 offset = *(--offset_ptr);
3232 if (
offset != word_arg_offset) {
3234 if (src_offset ==
offset) {
3238 else if (src_offset == word_arg_offset) {
3242 FAIL_IF(push_inst(compiler,
MOV | (
offset << 10) | (word_arg_offset >> 2)));
3258 FAIL_IF(push_inst(compiler,
VMOV2 | (1 << 16) | (0 << 12) | 0));
3260 FAIL_IF(push_inst(compiler,
VMOV | (0 << 16) | (0 << 12)));
3277 switch (arg_types & SLJIT_ARG_MASK) {
3279 if (
offset != new_offset)
3287 if (f32_offset != 0) {
3289 0x400000, f32_offset,
offset, 0)));
3292 if (
offset != new_offset)
3294 0, new_offset,
offset, 0)));
3295 f32_offset = new_offset;
3318 CHECK_PTR(check_sljit_emit_call(compiler,
type, arg_types));
3322 PTR_FAIL_IF(softfloat_call_with_args(compiler, arg_types,
NULL, &extra_space));
3328 SLJIT_SKIP_CHECKS(compiler);
3332 if (extra_space > 0) {
3346 PTR_FAIL_IF(softfloat_post_call_with_args(compiler, arg_types));
3352 PTR_FAIL_IF(emit_stack_frame_release(compiler, -1));
3358 PTR_FAIL_IF(hardfloat_call_with_args(compiler, arg_types));
3361 SLJIT_SKIP_CHECKS(compiler);
3370 CHECK(check_sljit_emit_ijump(compiler,
type, src, srcw));
3371 ADJUST_LOCAL_OFFSET(src, srcw);
3376 if (FAST_IS_REG(src)) {
3392#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
3394 FAIL_IF(prepare_blx(compiler));
3395 jump->addr = compiler->
size;
3398 jump->addr = compiler->
size;
3402 jump->addr = compiler->
size;
3404 compiler->
size += JUMP_MAX_SIZE - 1;
3418 CHECK(check_sljit_emit_icall(compiler,
type, arg_types, src, srcw));
3432 FAIL_IF(softfloat_call_with_args(compiler, arg_types, &src, &extra_space));
3438 SLJIT_SKIP_CHECKS(compiler);
3441 if (extra_space > 0) {
3453 return softfloat_post_call_with_args(compiler, arg_types);
3458 FAIL_IF(emit_stack_frame_release(compiler, -1));
3464 FAIL_IF(hardfloat_call_with_args(compiler, arg_types));
3467 SLJIT_SKIP_CHECKS(compiler);
3479 SLJIT_SKIP_CHECKS(compiler);
3483 if (FAST_IS_REG(src)) {
3489 SLJIT_SKIP_CHECKS(compiler);
3506 CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw,
type));
3507 ADJUST_LOCAL_OFFSET(dst, dstw);
3510 cc = get_cc(compiler,
type);
3511 dst_reg = FAST_IS_REG(dst) ? dst :
TMP_REG1;
3547 CHECK(check_sljit_emit_select(compiler,
type, dst_reg, src1, src1w, src2_reg));
3549 ADJUST_LOCAL_OFFSET(src1, src1w);
3551 if (src2_reg != dst_reg && src1 == dst_reg) {
3561 if (src2_reg != dst_reg) {
3569 }
else if (dst_reg != src2_reg)
3577 return push_inst(compiler, ((
MOV |
RD(dst_reg) | tmp) & ~
COND_MASK) | cc);
3581 return push_inst(compiler, ((
MVN |
RD(dst_reg) | tmp) & ~
COND_MASK) | cc);
3583#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
3585 FAIL_IF(push_inst(compiler, (
MOVW & ~
COND_MASK) | cc |
RD(dst_reg) | ((tmp << 4) & 0xf0000) | (tmp & 0xfff)));
3588 return push_inst(compiler, (
MOVT & ~
COND_MASK) | cc |
RD(dst_reg) | ((tmp >> 12) & 0xf0000) | ((tmp >> 16) & 0xfff));
3595 return push_inst(compiler, ((
MOV |
RD(dst_reg) |
RM(src1)) & ~
COND_MASK) | cc);
3606 CHECK(check_sljit_emit_fselect(compiler,
type, dst_freg, src1, src1w, src2_freg));
3608 ADJUST_LOCAL_OFFSET(src1, src1w);
3612 if (dst_freg != src2_freg) {
3613 if (dst_freg == src1) {
3630#undef EMIT_FPU_OPERATION
3652 if (argw <= max_offset && argw >= -mask) {
3701 CHECK(check_sljit_emit_mem(compiler,
type, reg, mem, memw));
3703 if (!(reg & REG_PAIR_MASK))
3704 return sljit_emit_mem_unaligned(compiler,
type, reg, mem, memw);
3706 ADJUST_LOCAL_OFFSET(mem, memw);
3708 FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xfff - 4));
3713 if (REG_PAIR_FIRST(reg) == (mem & REG_MASK)) {
3733 CHECK(check_sljit_emit_mem_update(compiler,
type, reg, mem, memw));
3735 is_type1_transfer = 1;
3737 switch (
type & 0xff) {
3750 is_type1_transfer = 0;
3754 is_type1_transfer = 0;
3758 is_type1_transfer = 0;
3773 if (!is_type1_transfer && memw != 0)
3776 if (is_type1_transfer) {
3777 if (memw > 4095 || memw < -4095)
3779 }
else if (memw > 255 || memw < -255)
3791 if (is_type1_transfer)
3799 return push_inst(compiler, inst);
3809 if (is_type1_transfer) {
3815 return push_inst(compiler, inst | (
sljit_ins)memw);
3831 CHECK(check_sljit_emit_fmem(compiler,
type, freg, mem, memw));
3842 FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xfff - 4));
3855 FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xfff - 4));
3886 imm = get_imm((
sljit_uw)(memw < 0 ? -memw : memw));
3907#define SLJIT_QUAD_OTHER_HALF(freg) ((((freg) & 0x1) << 1) - 1)
3919 CHECK(check_sljit_emit_simd_mov(compiler,
type, freg, srcdst, srcdstw));
3921 ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
3923 if (reg_size != 3 && reg_size != 4)
3933 freg = simd_get_quad_reg_index(freg);
3937 srcdst = simd_get_quad_reg_index(srcdst);
3940 ins =
VD(srcdst) |
VN(freg) |
VM(freg);
3942 ins =
VD(freg) |
VN(srcdst) |
VM(srcdst);
3947 return push_inst(compiler,
VORR | ins);
3950 FAIL_IF(sljit_emit_simd_mem_offset(compiler, &srcdst, srcdstw));
3956 | (
sljit_ins)((reg_size == 3) ? (0x7 << 8) : (0xa << 8));
3962 else if (alignment >= 3)
3965 return push_inst(compiler, ins |
RN(srcdst) | ((
sljit_ins)elem_size) << 6 | 0xf);
3982 switch (elem_size) {
3992 if (
value <= 0xff) {
3997 if ((
value & 0xff) == 0) {
4015 if (
value <= 0xff) {
4071 CHECK(check_sljit_emit_simd_replicate(compiler,
type, freg, src, srcw));
4073 ADJUST_LOCAL_OFFSET(src, srcw);
4075 if (reg_size != 3 && reg_size != 4)
4085 freg = simd_get_quad_reg_index(freg);
4088 return push_inst(compiler,
VMOV_i | ((reg_size == 4) ? (1 << 6) : 0) |
VD(freg));
4096 }
else if (freg != src)
4102 return push_inst(compiler,
VORR |
VD(freg) |
VN(src) |
VM(src));
4107 FAIL_IF(sljit_emit_simd_mem_offset(compiler, &src, srcw));
4114 return push_inst(compiler,
VLD1_r | ins |
VD(freg) |
RN(src) | 0xf);
4131 imm = simd_get_imm(elem_size, (
sljit_uw)srcw);
4137 return push_inst(compiler,
VMOV_i | imm |
VD(freg));
4144 switch (elem_size) {
4159 return push_inst(compiler,
VDUP | ins |
VN(freg) |
RD(src));
4171 CHECK(check_sljit_emit_simd_lane_mov(compiler,
type, freg, lane_index, srcdst, srcdstw));
4173 ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
4175 if (reg_size != 3 && reg_size != 4)
4185 freg = simd_get_quad_reg_index(freg);
4188 ins = (reg_size == 3) ? 0 : ((
sljit_ins)1 << 6);
4191 if (elem_size == 3 && !(srcdst &
SLJIT_MEM)) {
4192 if (lane_index == 1)
4199 return push_inst(compiler,
VMOV_i |
VD(freg));
4212 if (reg_size == 4 && lane_index >= (0x8 >> elem_size)) {
4213 lane_index -= (0x8 >> elem_size);
4221 FAIL_IF(sljit_emit_simd_mem_offset(compiler, &srcdst, srcdstw));
4223 lane_index = lane_index << elem_size;
4224 ins = (
sljit_ins)((elem_size << 10) | (lane_index << 5));
4229 if (elem_size == 3) {
4231 return push_inst(compiler,
VORR |
VD(srcdst) |
VN(freg) |
VM(freg));
4236 if (freg_ebit_map[freg] == 0) {
4237 if (lane_index == 1)
4240 return push_inst(compiler,
VMOV_F32 |
VD(srcdst) |
VM(freg));
4261 else if (elem_size == 1)
4266 lane_index = lane_index << elem_size;
4267 ins |= (
sljit_ins)(((lane_index & 0x4) << 19) | ((lane_index & 0x3) << 5));
4276 return push_inst(compiler,
VMOV_s | ins |
VN(freg) |
RD(srcdst));
4288 CHECK(check_sljit_emit_simd_lane_replicate(compiler,
type, freg, src, src_lane_index));
4290 if (reg_size != 3 && reg_size != 4)
4299 if (reg_size == 4) {
4300 freg = simd_get_quad_reg_index(freg);
4301 src = simd_get_quad_reg_index(src);
4303 if (src_lane_index >= (0x8 >> elem_size)) {
4304 src_lane_index -= (0x8 >> elem_size);
4309 if (elem_size == 3) {
4316 return push_inst(compiler,
VORR |
VD(freg) |
VN(src) |
VM(src));
4320 ins = ((((
sljit_ins)src_lane_index << 1) | 1) << (16 + elem_size));
4325 return push_inst(compiler,
VDUP_s | ins |
VD(freg) |
VM(src));
4338 CHECK(check_sljit_emit_simd_extend(compiler,
type, freg, src, srcw));
4340 ADJUST_LOCAL_OFFSET(src, srcw);
4342 if (reg_size != 3 && reg_size != 4)
4352 freg = simd_get_quad_reg_index(freg);
4355 FAIL_IF(sljit_emit_simd_mem_offset(compiler, &src, srcw));
4356 if (reg_size == 4 && elem2_size - elem_size == 1)
4357 FAIL_IF(push_inst(compiler,
VLD1 | (0x7 << 8) |
VD(freg) |
RN(src) | 0xf));
4361 }
else if (reg_size == 4)
4362 src = simd_get_quad_reg_index(src);
4365 dst_reg = (reg_size == 4) ? freg :
TMP_FREG2;
4369 | ((
sljit_ins)1 << (19 + elem_size)) |
VD(dst_reg) |
VM(src)));
4371 }
while (++elem_size < elem2_size);
4403 CHECK(check_sljit_emit_simd_sign(compiler,
type, freg, dst, dstw));
4405 ADJUST_LOCAL_OFFSET(dst, dstw);
4407 if (reg_size != 3 && reg_size != 4)
4416 switch (elem_size) {
4419 ins =
VSHR | (1 << 24) | (0x9 << 16);
4422 imms = (reg_size == 4) ? 0x243219 : 0x2231;
4423 ins =
VSHR | (1 << 24) | (0x11 << 16);
4426 imms = (reg_size == 4) ? 0x2231 : 0x21;
4427 ins =
VSHR | (1 << 24) | (0x21 << 16);
4431 ins =
VSHR | (1 << 24) | (0x1 << 16) | (1 << 7);
4435 if (reg_size == 4) {
4436 freg = simd_get_quad_reg_index(freg);
4443 if (reg_size == 4 && elem_size > 0)
4446 ins = (reg_size == 4 && elem_size == 0) ? (1 << 6) : 0;
4448 while (imms >= 0x100) {
4455 dst_r = FAST_IS_REG(dst) ? dst :
TMP_REG1;
4458 if (reg_size == 4 && elem_size == 0) {
4478 CHECK(check_sljit_emit_simd_op2(compiler,
type, dst_freg, src1_freg, src2_freg));
4480 if (reg_size != 3 && reg_size != 4)
4486 switch (SLJIT_SIMD_GET_OPCODE(
type)) {
4501 if (reg_size == 4) {
4502 dst_freg = simd_get_quad_reg_index(dst_freg);
4503 src1_freg = simd_get_quad_reg_index(src1_freg);
4504 src2_freg = simd_get_quad_reg_index(src2_freg);
4508 return push_inst(compiler, ins |
VD(dst_freg) |
VN(src1_freg) |
VM(src2_freg));
4520 CHECK(check_sljit_emit_atomic_load(compiler, op, dst_reg, mem_reg));
4534 return push_inst(compiler, ins |
RN(mem_reg) |
RD(dst_reg));
4548 CHECK(check_sljit_emit_atomic_store(compiler, op, src_reg, mem_reg, temp_reg));
4575 CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
4576 ADJUST_LOCAL_OFFSET(dst, dstw);
4580 set_const(const_, compiler);
4582 dst_r = FAST_IS_REG(dst) ? dst :
TMP_REG2;
4584#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
4585 PTR_FAIL_IF(push_inst_with_unique_literal(compiler,
4587 compiler->patches++;
4589 PTR_FAIL_IF(emit_imm(compiler, dst_r, init_value));
4603 CHECK_PTR(check_sljit_emit_mov_addr(compiler, dst, dstw));
4604 ADJUST_LOCAL_OFFSET(dst, dstw);
4606 dst_r = FAST_IS_REG(dst) ? dst :
TMP_REG2;
4608#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
4610 compiler->patches++;
4617 set_mov_addr(jump, compiler, 1);
4619#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
4620 compiler->
size += 1;
4630 set_jump_addr(
addr, executable_offset, new_target, 1);
4635 set_const_value(
addr, executable_offset, (
sljit_uw)new_constant, 1);
zend_ffi_ctype_name_buf buf
PHP_JSON_API size_t int options
#define SLJIT_UNREACHABLE()
unsigned short int sljit_u16
signed short int sljit_s16
#define SLJIT_UNLIKELY(x)
#define SLJIT_API_FUNC_ATTRIBUTE
#define SLJIT_COMPILE_ASSERT(x, description)
#define SLJIT_F64_SECOND(reg)
#define SLJIT_MALLOC(size, allocator_data)
#define SLJIT_FREE(ptr, allocator_data)
#define SLJIT_UNUSED_ARG(arg)
#define SLJIT_NUMBER_OF_SCRATCH_FLOAT_REGISTERS
#define SLJIT_TMP_FREGISTER_BASE
#define SLJIT_CACHE_FLUSH(from, to)
#define SLJIT_UPDATE_WX_FLAGS(from, to, enable_exec)
#define PTR_FAIL_IF(expr)
#define PTR_FAIL_WITH_EXEC_IF(ptr)
#define CHECK_ERROR_PTR()
#define SLJIT_UNORDERED_OR_LESS_EQUAL
#define SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN
#define SLJIT_CALL_REG_ARG
#define SLJIT_ARG_TYPE_SCRATCH_REG
#define SLJIT_SIMD_OP2_AND
#define SLJIT_ORDERED_LESS_EQUAL
#define SLJIT_FAST_RETURN
#define SLJIT_SIMD_REG_64
#define SLJIT_ATOMIC_NOT_STORED
#define SLJIT_FIRST_SAVED_REG
#define SLJIT_UNORDERED_OR_GREATER
#define SLJIT_ARG_TYPE_F32
#define SLJIT_ORDERED_GREATER_EQUAL
#define SLJIT_FUNC_ADDR(func_name)
#define SLJIT_PREFETCH_L3
#define SLJIT_SIG_GREATER_EQUAL
#define SLJIT_RETURN_FREG
#define SLJIT_UNORDERED_OR_NOT_EQUAL
#define SLJIT_ARG_TYPE_F64
#define SLJIT_SIMD_EXTEND_SIGNED
#define SLJIT_PREFETCH_L1
#define SLJIT_SIMD_OP2_XOR
#define SLJIT_COPYSIGN_F64
#define SLJIT_ORDERED_EQUAL
#define SLJIT_ERR_UNSUPPORTED
#define SLJIT_UNORDERED_OR_LESS
#define SLJIT_ORDERED_GREATER
#define SLJIT_SIMD_REGS_ARE_PAIRS
#define SLJIT_SIG_LESS_EQUAL
#define SLJIT_UNORDERED_OR_EQUAL
#define SLJIT_ERR_ALLOC_FAILED
#define SLJIT_CALL_RETURN
#define SLJIT_REWRITABLE_JUMP
#define SLJIT_NOT_OVERFLOW
#define SLJIT_F_NOT_EQUAL
#define SLJIT_F_GREATER_EQUAL
#define SLJIT_CURRENT_FLAGS_ADD
#define SLJIT_HAS_PREFETCH
#define SLJIT_ENTER_REG_ARG
#define SLJIT_SIG_GREATER
#define SLJIT_SIMD_LANE_ZERO
#define SLJIT_FLOAT_REGISTER
#define SLJIT_ATOMIC_STORED
#define SLJIT_GET_RETURN_ADDRESS
#define SLJIT_MEM_ALIGNED_32
#define SLJIT_HAS_F64_AS_F32_PAIR
#define SLJIT_FIRST_SAVED_FLOAT_REG
#define SLJIT_SIMD_OP2_OR
#define SLJIT_SIMD_LANE_SIGNED
#define SLJIT_GREATER_EQUAL
#define SLJIT_GP_REGISTER
#define SLJIT_SKIP_FRAMES_BEFORE_RETURN
#define SLJIT_ERR_COMPILED
#define SLJIT_HAS_COPY_F64
#define SLJIT_SIMD_REG_128
#define SLJIT_F_LESS_EQUAL
#define SLJIT_ORDERED_LESS
#define SLJIT_HAS_COPY_F32
#define SLJIT_CONV_F64_FROM_F32
#define SLJIT_REG_PAIR(r1, r2)
#define SLJIT_PREFETCH_L2
#define SLJIT_SET_ATOMIC_STORED
#define SLJIT_CURRENT_FLAGS_SUB
#define SLJIT_PREFETCH_ONCE
#define SLJIT_ORDERED_NOT_EQUAL
#define SLJIT_COPY_FROM_F64
#define SLJIT_UNORDERED_OR_GREATER_EQUAL
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 type)
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump * sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump * sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 arg_types)
#define EMIT_DATA_TRANSFER(type, add, target_reg, base_reg, arg)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2r(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst_reg, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 dst_reg, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2_reg)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 reg, sljit_s32 mem, sljit_sw memw)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_to(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
SLJIT_API_FUNC_ATTRIBUTE const char * sljit_get_platform_name(void)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
SLJIT_API_FUNC_ATTRIBUTE void * sljit_generate_code(struct sljit_compiler *compiler, sljit_s32 options, void *exec_allocator_data)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)
#define CONST_POOL_ALIGNMENT
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_load(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst_reg, sljit_s32 mem_reg)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 type, sljit_s32 reg)
SLJIT_API_FUNC_ATTRIBUTE struct sljit_const * sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_replicate(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 freg, sljit_s32 src, sljit_sw srcw)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_op2(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 dst_freg, sljit_s32 src1_freg, sljit_s32 src2_freg)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
SLJIT_API_FUNC_ATTRIBUTE struct sljit_label * sljit_emit_label(struct sljit_compiler *compiler)
#define MAX_DIFFERENCE(max_diff)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset32(struct sljit_compiler *compiler, sljit_s32 freg, sljit_f32 value)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 freg, sljit_s32 reg)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_void(struct sljit_compiler *compiler)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem_update(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 reg, sljit_s32 mem, sljit_sw memw)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler, sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds, sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_mov(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 freg, sljit_s32 lane_index, sljit_s32 srcdst, sljit_sw srcdstw)
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump * sljit_emit_mov_addr(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst_reg, sljit_s32 src1_reg, sljit_s32 src2_reg, sljit_s32 src3, sljit_sw src3w)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_extend(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 freg, sljit_s32 src, sljit_sw srcw)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler, sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds, sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 freg, sljit_s32 srcdst, sljit_sw srcdstw)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler, sljit_s32 freg, sljit_f64 value)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_dst(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw)
#define EMIT_FPU_DATA_TRANSFER(inst, add, base, freg, offs)
#define ALIGN_INSTRUCTION(ptr)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_store(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src_reg, sljit_s32 mem_reg, sljit_s32 temp_reg)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fselect(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 dst_freg, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2_freg)
#define SLJIT_QUAD_OTHER_HALF(freg)
#define IS_TYPE1_TRANSFER(type)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
#define EMIT_FPU_OPERATION(opcode, mode, dst, src1, src2)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_sign(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 freg, sljit_s32 dst, sljit_sw dstw)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fmem(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 freg, sljit_s32 mem, sljit_sw memw)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler, void *instruction, sljit_u32 size)
#define TYPE2_TRANSFER_IMM(imm)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 arg_types, sljit_s32 src, sljit_sw srcw)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_replicate(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 freg, sljit_s32 src, sljit_s32 src_lane_index)
#define SLJIT_IS_FPU_AVAILABLE
struct sljit_const * consts
sljit_sw executable_offset
struct sljit_jump * jumps
struct sljit_label * last_label
struct sljit_memory_fragment * buf
struct sljit_label * labels
struct sljit_const * next
struct sljit_label * next
union sljit_label::@034003116150245300057154161307153110213245130244 u