summaryrefslogtreecommitdiffstats
path: root/src/3rdparty/pcre2/src/sljit/sljitNativeX86_32.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/3rdparty/pcre2/src/sljit/sljitNativeX86_32.c')
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeX86_32.c592
1 files changed, 468 insertions, 124 deletions
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeX86_32.c b/src/3rdparty/pcre2/src/sljit/sljitNativeX86_32.c
index 78f3dcb06f..074e64b9f2 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeX86_32.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativeX86_32.c
@@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
- * Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
+ * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@@ -38,7 +38,7 @@ static sljit_s32 emit_do_imm(struct sljit_compiler *compiler, sljit_u8 opcode, s
return SLJIT_SUCCESS;
}
-static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type)
+static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type, sljit_sw executable_offset)
{
if (type == SLJIT_JUMP) {
*code_ptr++ = JMP_i32;
@@ -57,27 +57,47 @@ static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_
if (jump->flags & JUMP_LABEL)
jump->flags |= PATCH_MW;
else
- sljit_unaligned_store_sw(code_ptr, jump->u.target - (jump->addr + 4));
+ sljit_unaligned_store_sw(code_ptr, jump->u.target - (jump->addr + 4) - (sljit_uw)executable_offset);
code_ptr += 4;
return code_ptr;
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
- sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
- sljit_s32 size;
+ sljit_s32 args, size;
sljit_u8 *inst;
CHECK_ERROR();
- CHECK(check_sljit_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
- set_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size);
+ CHECK(check_sljit_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
+ set_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
+ args = get_arg_count(arg_types);
compiler->args = args;
- compiler->flags_saved = 0;
- size = 1 + (scratches > 7 ? (scratches - 7) : 0) + (saveds <= 3 ? saveds : 3);
+ /* [esp+0] for saving temporaries and function calls. */
+ compiler->stack_tmp_size = 2 * sizeof(sljit_sw);
+
+#if !(defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
+ if (scratches > 3)
+ compiler->stack_tmp_size = 3 * sizeof(sljit_sw);
+#endif
+
+ compiler->saveds_offset = compiler->stack_tmp_size;
+ if (scratches > 3)
+ compiler->saveds_offset += ((scratches > (3 + 6)) ? 6 : (scratches - 3)) * sizeof(sljit_sw);
+
+ compiler->locals_offset = compiler->saveds_offset;
+
+ if (saveds > 3)
+ compiler->locals_offset += (saveds - 3) * sizeof(sljit_sw);
+
+ if (options & SLJIT_F64_ALIGNMENT)
+ compiler->locals_offset = (compiler->locals_offset + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1);
+
+ size = 1 + (scratches > 9 ? (scratches - 9) : 0) + (saveds <= 3 ? saveds : 3);
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
size += (args > 0 ? (args * 2) : 0) + (args > 2 ? 2 : 0);
#else
@@ -94,113 +114,169 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
*inst++ = MOD_REG | (reg_map[TMP_REG1] << 3) | 0x4 /* esp */;
}
#endif
- if (saveds > 2 || scratches > 7)
+ if (saveds > 2 || scratches > 9)
PUSH_REG(reg_map[SLJIT_S2]);
- if (saveds > 1 || scratches > 8)
+ if (saveds > 1 || scratches > 10)
PUSH_REG(reg_map[SLJIT_S1]);
- if (saveds > 0 || scratches > 9)
+ if (saveds > 0 || scratches > 11)
PUSH_REG(reg_map[SLJIT_S0]);
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
if (args > 0) {
- *inst++ = MOV_r_rm;
- *inst++ = MOD_REG | (reg_map[SLJIT_S0] << 3) | reg_map[SLJIT_R2];
+ inst[0] = MOV_r_rm;
+ inst[1] = MOD_REG | (reg_map[SLJIT_S0] << 3) | reg_map[SLJIT_R2];
+ inst += 2;
}
if (args > 1) {
- *inst++ = MOV_r_rm;
- *inst++ = MOD_REG | (reg_map[SLJIT_S1] << 3) | reg_map[SLJIT_R1];
+ inst[0] = MOV_r_rm;
+ inst[1] = MOD_REG | (reg_map[SLJIT_S1] << 3) | reg_map[SLJIT_R1];
+ inst += 2;
}
if (args > 2) {
- *inst++ = MOV_r_rm;
- *inst++ = MOD_DISP8 | (reg_map[SLJIT_S2] << 3) | 0x4 /* esp */;
- *inst++ = 0x24;
- *inst++ = sizeof(sljit_sw) * (3 + 2); /* saveds >= 3 as well. */
+ inst[0] = MOV_r_rm;
+ inst[1] = MOD_DISP8 | (reg_map[SLJIT_S2] << 3) | 0x4 /* esp */;
+ inst[2] = 0x24;
+ inst[3] = sizeof(sljit_sw) * (3 + 2); /* saveds >= 3 as well. */
}
#else
if (args > 0) {
- *inst++ = MOV_r_rm;
- *inst++ = MOD_DISP8 | (reg_map[SLJIT_S0] << 3) | reg_map[TMP_REG1];
- *inst++ = sizeof(sljit_sw) * 2;
+ inst[0] = MOV_r_rm;
+ inst[1] = MOD_DISP8 | (reg_map[SLJIT_S0] << 3) | reg_map[TMP_REG1];
+ inst[2] = sizeof(sljit_sw) * 2;
+ inst += 3;
}
if (args > 1) {
- *inst++ = MOV_r_rm;
- *inst++ = MOD_DISP8 | (reg_map[SLJIT_S1] << 3) | reg_map[TMP_REG1];
- *inst++ = sizeof(sljit_sw) * 3;
+ inst[0] = MOV_r_rm;
+ inst[1] = MOD_DISP8 | (reg_map[SLJIT_S1] << 3) | reg_map[TMP_REG1];
+ inst[2] = sizeof(sljit_sw) * 3;
+ inst += 3;
}
if (args > 2) {
- *inst++ = MOV_r_rm;
- *inst++ = MOD_DISP8 | (reg_map[SLJIT_S2] << 3) | reg_map[TMP_REG1];
- *inst++ = sizeof(sljit_sw) * 4;
+ inst[0] = MOV_r_rm;
+ inst[1] = MOD_DISP8 | (reg_map[SLJIT_S2] << 3) | reg_map[TMP_REG1];
+ inst[2] = sizeof(sljit_sw) * 4;
}
#endif
- SLJIT_COMPILE_ASSERT(SLJIT_LOCALS_OFFSET >= (2 + 4) * sizeof(sljit_uw), require_at_least_two_words);
+ SLJIT_ASSERT(SLJIT_LOCALS_OFFSET > 0);
+
#if defined(__APPLE__)
/* Ignore pushed registers and SLJIT_LOCALS_OFFSET when computing the aligned local size. */
- saveds = (2 + (scratches > 7 ? (scratches - 7) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw);
+ saveds = (2 + (scratches > 9 ? (scratches - 9) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw);
local_size = ((SLJIT_LOCALS_OFFSET + saveds + local_size + 15) & ~15) - saveds;
#else
- if (options & SLJIT_DOUBLE_ALIGNMENT) {
- local_size = SLJIT_LOCALS_OFFSET + ((local_size + 7) & ~7);
-
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 17);
- FAIL_IF(!inst);
-
- INC_SIZE(17);
- inst[0] = MOV_r_rm;
- inst[1] = MOD_REG | (reg_map[TMP_REG1] << 3) | reg_map[SLJIT_SP];
- inst[2] = GROUP_F7;
- inst[3] = MOD_REG | (0 << 3) | reg_map[SLJIT_SP];
- sljit_unaligned_store_sw(inst + 4, 0x4);
- inst[8] = JNE_i8;
- inst[9] = 6;
- inst[10] = GROUP_BINARY_81;
- inst[11] = MOD_REG | (5 << 3) | reg_map[SLJIT_SP];
- sljit_unaligned_store_sw(inst + 12, 0x4);
- inst[16] = PUSH_r + reg_map[TMP_REG1];
- }
+ if (options & SLJIT_F64_ALIGNMENT)
+ local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1));
else
- local_size = SLJIT_LOCALS_OFFSET + ((local_size + 3) & ~3);
+ local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_sw) - 1) & ~(sizeof(sljit_sw) - 1));
#endif
compiler->local_size = local_size;
+
#ifdef _WIN32
- if (local_size > 1024) {
-#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
- FAIL_IF(emit_do_imm(compiler, MOV_r_i32 + reg_map[SLJIT_R0], local_size));
-#else
- local_size -= SLJIT_LOCALS_OFFSET;
- FAIL_IF(emit_do_imm(compiler, MOV_r_i32 + reg_map[SLJIT_R0], local_size));
- FAIL_IF(emit_non_cum_binary(compiler, SUB_r_rm, SUB_rm_r, SUB, SUB_EAX_i32,
- SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, SLJIT_LOCALS_OFFSET));
-#endif
- FAIL_IF(sljit_emit_ijump(compiler, SLJIT_CALL1, SLJIT_IMM, SLJIT_FUNC_OFFSET(sljit_grow_stack)));
+ if (local_size > 0) {
+ if (local_size <= 4 * 4096) {
+ if (local_size > 4096)
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -4096);
+ if (local_size > 2 * 4096)
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -4096 * 2);
+ if (local_size > 3 * 4096)
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -4096 * 3);
+ }
+ else {
+ EMIT_MOV(compiler, SLJIT_R0, 0, SLJIT_SP, 0);
+ EMIT_MOV(compiler, SLJIT_R1, 0, SLJIT_IMM, (local_size - 1) >> 12);
+
+ SLJIT_ASSERT (reg_map[SLJIT_R0] == 0);
+
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_R0), -4096);
+ FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
+ SLJIT_R0, 0, SLJIT_R0, 0, SLJIT_IMM, 4096));
+ FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
+ SLJIT_R1, 0, SLJIT_R1, 0, SLJIT_IMM, 1));
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
+ FAIL_IF(!inst);
+
+ INC_SIZE(2);
+ inst[0] = JNE_i8;
+ inst[1] = (sljit_s8) -16;
+ }
+
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -local_size);
}
#endif
SLJIT_ASSERT(local_size > 0);
- return emit_non_cum_binary(compiler, SUB_r_rm, SUB_rm_r, SUB, SUB_EAX_i32,
+
+#if !defined(__APPLE__)
+ if (options & SLJIT_F64_ALIGNMENT) {
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_SP, 0);
+
+ /* Some space might allocated during sljit_grow_stack() above on WIN32. */
+ FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
+ SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, local_size + sizeof(sljit_sw)));
+
+#if defined _WIN32 && !(defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
+ if (compiler->local_size > 1024)
+ FAIL_IF(emit_cum_binary(compiler, BINARY_OPCODE(ADD),
+ TMP_REG1, 0, TMP_REG1, 0, SLJIT_IMM, sizeof(sljit_sw)));
+#endif
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 6);
+ FAIL_IF(!inst);
+
+ INC_SIZE(6);
+ inst[0] = GROUP_BINARY_81;
+ inst[1] = MOD_REG | AND | reg_map[SLJIT_SP];
+ sljit_unaligned_store_sw(inst + 2, ~(sizeof(sljit_f64) - 1));
+
+ /* The real local size must be used. */
+ return emit_mov(compiler, SLJIT_MEM1(SLJIT_SP), compiler->local_size, TMP_REG1, 0);
+ }
+#endif
+ return emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, local_size);
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
- sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
CHECK_ERROR();
- CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
- set_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size);
+ CHECK(check_sljit_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
+ set_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
- compiler->args = args;
+ compiler->args = get_arg_count(arg_types);
+
+ /* [esp+0] for saving temporaries and function calls. */
+ compiler->stack_tmp_size = 2 * sizeof(sljit_sw);
+
+#if !(defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
+ if (scratches > 3)
+ compiler->stack_tmp_size = 3 * sizeof(sljit_sw);
+#endif
+
+ compiler->saveds_offset = compiler->stack_tmp_size;
+ if (scratches > 3)
+ compiler->saveds_offset += ((scratches > (3 + 6)) ? 6 : (scratches - 3)) * sizeof(sljit_sw);
+
+ compiler->locals_offset = compiler->saveds_offset;
+
+ if (saveds > 3)
+ compiler->locals_offset += (saveds - 3) * sizeof(sljit_sw);
+
+ if (options & SLJIT_F64_ALIGNMENT)
+ compiler->locals_offset = (compiler->locals_offset + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1);
#if defined(__APPLE__)
- saveds = (2 + (scratches > 7 ? (scratches - 7) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw);
+ saveds = (2 + (scratches > 9 ? (scratches - 9) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw);
compiler->local_size = ((SLJIT_LOCALS_OFFSET + saveds + local_size + 15) & ~15) - saveds;
#else
- if (options & SLJIT_DOUBLE_ALIGNMENT)
- compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + 7) & ~7);
+ if (options & SLJIT_F64_ALIGNMENT)
+ compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1));
else
- compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + 3) & ~3);
+ compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_sw) - 1) & ~(sizeof(sljit_sw) - 1));
#endif
return SLJIT_SUCCESS;
}
@@ -214,23 +290,19 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *comp
CHECK(check_sljit_emit_return(compiler, op, src, srcw));
SLJIT_ASSERT(compiler->args >= 0);
- compiler->flags_saved = 0;
FAIL_IF(emit_mov_before_return(compiler, op, src, srcw));
SLJIT_ASSERT(compiler->local_size > 0);
- FAIL_IF(emit_cum_binary(compiler, ADD_r_rm, ADD_rm_r, ADD, ADD_EAX_i32,
- SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, compiler->local_size));
#if !defined(__APPLE__)
- if (compiler->options & SLJIT_DOUBLE_ALIGNMENT) {
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
- FAIL_IF(!inst);
-
- INC_SIZE(3);
- inst[0] = MOV_r_rm;
- inst[1] = (reg_map[SLJIT_SP] << 3) | 0x4 /* SIB */;
- inst[2] = (4 << 3) | reg_map[SLJIT_SP];
- }
+ if (compiler->options & SLJIT_F64_ALIGNMENT)
+ EMIT_MOV(compiler, SLJIT_SP, 0, SLJIT_MEM1(SLJIT_SP), compiler->local_size)
+ else
+ FAIL_IF(emit_cum_binary(compiler, BINARY_OPCODE(ADD),
+ SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, compiler->local_size));
+#else
+ FAIL_IF(emit_cum_binary(compiler, BINARY_OPCODE(ADD),
+ SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, compiler->local_size));
#endif
size = 2 + (compiler->scratches > 7 ? (compiler->scratches - 7) : 0) +
@@ -247,11 +319,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *comp
INC_SIZE(size);
- if (compiler->saveds > 0 || compiler->scratches > 9)
+ if (compiler->saveds > 0 || compiler->scratches > 11)
POP_REG(reg_map[SLJIT_S0]);
- if (compiler->saveds > 1 || compiler->scratches > 8)
+ if (compiler->saveds > 1 || compiler->scratches > 10)
POP_REG(reg_map[SLJIT_S1]);
- if (compiler->saveds > 2 || compiler->scratches > 7)
+ if (compiler->saveds > 2 || compiler->scratches > 9)
POP_REG(reg_map[SLJIT_S2]);
POP_REG(reg_map[TMP_REG1]);
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
@@ -366,7 +438,7 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_s32
if ((flags & EX86_BIN_INS) && (a & SLJIT_IMM))
*inst = (flags & EX86_BYTE_ARG) ? GROUP_BINARY_83 : GROUP_BINARY_81;
- if ((a & SLJIT_IMM) || (a == 0))
+ if (a & SLJIT_IMM)
*buf_ptr = 0;
else if (!(flags & EX86_SSE2_OP1))
*buf_ptr = reg_map[a] << 3;
@@ -438,42 +510,324 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_s32
/* Call / return instructions */
/* --------------------------------------------------------------------- */
-static SLJIT_INLINE sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 type)
+#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
+
+static sljit_s32 c_fast_call_get_stack_size(sljit_s32 arg_types, sljit_s32 *word_arg_count_ptr)
{
- sljit_u8 *inst;
+ sljit_s32 stack_size = 0;
+ sljit_s32 word_arg_count = 0;
+
+ arg_types >>= SLJIT_DEF_SHIFT;
+
+ while (arg_types) {
+ switch (arg_types & SLJIT_DEF_MASK) {
+ case SLJIT_ARG_TYPE_F32:
+ stack_size += sizeof(sljit_f32);
+ break;
+ case SLJIT_ARG_TYPE_F64:
+ stack_size += sizeof(sljit_f64);
+ break;
+ default:
+ word_arg_count++;
+ if (word_arg_count > 2)
+ stack_size += sizeof(sljit_sw);
+ break;
+ }
-#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
- inst = (sljit_u8*)ensure_buf(compiler, type >= SLJIT_CALL3 ? 1 + 2 + 1 : 1 + 2);
- FAIL_IF(!inst);
- INC_SIZE(type >= SLJIT_CALL3 ? 2 + 1 : 2);
+ arg_types >>= SLJIT_DEF_SHIFT;
+ }
+
+ if (word_arg_count_ptr)
+ *word_arg_count_ptr = word_arg_count;
+
+ return stack_size;
+}
- if (type >= SLJIT_CALL3)
+static sljit_s32 c_fast_call_with_args(struct sljit_compiler *compiler,
+ sljit_s32 arg_types, sljit_s32 stack_size, sljit_s32 word_arg_count, sljit_s32 swap_args)
+{
+ sljit_u8 *inst;
+ sljit_s32 float_arg_count;
+
+ if (stack_size == sizeof(sljit_sw) && word_arg_count == 3) {
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
+ FAIL_IF(!inst);
+ INC_SIZE(1);
PUSH_REG(reg_map[SLJIT_R2]);
- *inst++ = MOV_r_rm;
- *inst++ = MOD_REG | (reg_map[SLJIT_R2] << 3) | reg_map[SLJIT_R0];
+ }
+ else if (stack_size > 0) {
+ if (word_arg_count >= 4)
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), compiler->saveds_offset - sizeof(sljit_sw));
+
+ FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
+ SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, stack_size));
+
+ stack_size = 0;
+ arg_types >>= SLJIT_DEF_SHIFT;
+ word_arg_count = 0;
+ float_arg_count = 0;
+ while (arg_types) {
+ switch (arg_types & SLJIT_DEF_MASK) {
+ case SLJIT_ARG_TYPE_F32:
+ float_arg_count++;
+ FAIL_IF(emit_sse2_store(compiler, 1, SLJIT_MEM1(SLJIT_SP), stack_size, float_arg_count));
+ stack_size += sizeof(sljit_f32);
+ break;
+ case SLJIT_ARG_TYPE_F64:
+ float_arg_count++;
+ FAIL_IF(emit_sse2_store(compiler, 0, SLJIT_MEM1(SLJIT_SP), stack_size, float_arg_count));
+ stack_size += sizeof(sljit_f64);
+ break;
+ default:
+ word_arg_count++;
+ if (word_arg_count == 3) {
+ EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), stack_size, SLJIT_R2, 0);
+ stack_size += sizeof(sljit_sw);
+ }
+ else if (word_arg_count == 4) {
+ EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), stack_size, TMP_REG1, 0);
+ stack_size += sizeof(sljit_sw);
+ }
+ break;
+ }
+
+ arg_types >>= SLJIT_DEF_SHIFT;
+ }
+ }
+
+ if (word_arg_count > 0) {
+ if (swap_args) {
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
+ FAIL_IF(!inst);
+ INC_SIZE(1);
+
+ *inst++ = XCHG_EAX_r | reg_map[SLJIT_R2];
+ }
+ else {
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
+ FAIL_IF(!inst);
+ INC_SIZE(2);
+
+ *inst++ = MOV_r_rm;
+ *inst++ = MOD_REG | (reg_map[SLJIT_R2] << 3) | reg_map[SLJIT_R0];
+ }
+ }
+
+ return SLJIT_SUCCESS;
+}
+
+#endif
+
+static sljit_s32 cdecl_call_get_stack_size(struct sljit_compiler *compiler, sljit_s32 arg_types, sljit_s32 *word_arg_count_ptr)
+{
+ sljit_s32 stack_size = 0;
+ sljit_s32 word_arg_count = 0;
+
+ arg_types >>= SLJIT_DEF_SHIFT;
+
+ while (arg_types) {
+ switch (arg_types & SLJIT_DEF_MASK) {
+ case SLJIT_ARG_TYPE_F32:
+ stack_size += sizeof(sljit_f32);
+ break;
+ case SLJIT_ARG_TYPE_F64:
+ stack_size += sizeof(sljit_f64);
+ break;
+ default:
+ word_arg_count++;
+ stack_size += sizeof(sljit_sw);
+ break;
+ }
+
+ arg_types >>= SLJIT_DEF_SHIFT;
+ }
+
+ if (word_arg_count_ptr)
+ *word_arg_count_ptr = word_arg_count;
+
+ if (stack_size <= compiler->stack_tmp_size)
+ return 0;
+
+#if defined(__APPLE__)
+ return ((stack_size - compiler->stack_tmp_size + 15) & ~15);
#else
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 4 * (type - SLJIT_CALL0));
+ return stack_size - compiler->stack_tmp_size;
+#endif
+}
+
+static sljit_s32 cdecl_call_with_args(struct sljit_compiler *compiler,
+ sljit_s32 arg_types, sljit_s32 stack_size, sljit_s32 word_arg_count)
+{
+ sljit_s32 float_arg_count = 0;
+
+ if (word_arg_count >= 4)
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), compiler->saveds_offset - sizeof(sljit_sw));
+
+ if (stack_size > 0)
+ FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
+ SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, stack_size));
+
+ stack_size = 0;
+ word_arg_count = 0;
+ arg_types >>= SLJIT_DEF_SHIFT;
+
+ while (arg_types) {
+ switch (arg_types & SLJIT_DEF_MASK) {
+ case SLJIT_ARG_TYPE_F32:
+ float_arg_count++;
+ FAIL_IF(emit_sse2_store(compiler, 1, SLJIT_MEM1(SLJIT_SP), stack_size, float_arg_count));
+ stack_size += sizeof(sljit_f32);
+ break;
+ case SLJIT_ARG_TYPE_F64:
+ float_arg_count++;
+ FAIL_IF(emit_sse2_store(compiler, 0, SLJIT_MEM1(SLJIT_SP), stack_size, float_arg_count));
+ stack_size += sizeof(sljit_f64);
+ break;
+ default:
+ word_arg_count++;
+ EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), stack_size, (word_arg_count >= 4) ? TMP_REG1 : word_arg_count, 0);
+ stack_size += sizeof(sljit_sw);
+ break;
+ }
+
+ arg_types >>= SLJIT_DEF_SHIFT;
+ }
+
+ return SLJIT_SUCCESS;
+}
+
+static sljit_s32 post_call_with_args(struct sljit_compiler *compiler,
+ sljit_s32 arg_types, sljit_s32 stack_size)
+{
+ sljit_u8 *inst;
+ sljit_s32 single;
+
+ if (stack_size > 0)
+ FAIL_IF(emit_cum_binary(compiler, BINARY_OPCODE(ADD),
+ SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, stack_size));
+
+ if ((arg_types & SLJIT_DEF_MASK) < SLJIT_ARG_TYPE_F32)
+ return SLJIT_SUCCESS;
+
+ single = ((arg_types & SLJIT_DEF_MASK) == SLJIT_ARG_TYPE_F32);
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
FAIL_IF(!inst);
- INC_SIZE(4 * (type - SLJIT_CALL0));
-
- *inst++ = MOV_rm_r;
- *inst++ = MOD_DISP8 | (reg_map[SLJIT_R0] << 3) | 0x4 /* SIB */;
- *inst++ = (0x4 /* none*/ << 3) | reg_map[SLJIT_SP];
- *inst++ = 0;
- if (type >= SLJIT_CALL2) {
- *inst++ = MOV_rm_r;
- *inst++ = MOD_DISP8 | (reg_map[SLJIT_R1] << 3) | 0x4 /* SIB */;
- *inst++ = (0x4 /* none*/ << 3) | reg_map[SLJIT_SP];
- *inst++ = sizeof(sljit_sw);
+ INC_SIZE(3);
+ inst[0] = single ? FSTPS : FSTPD;
+ inst[1] = (0x03 << 3) | 0x04;
+ inst[2] = (0x04 << 3) | reg_map[SLJIT_SP];
+
+ return emit_sse2_load(compiler, single, SLJIT_FR0, SLJIT_MEM1(SLJIT_SP), 0);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 arg_types)
+{
+ struct sljit_jump *jump;
+ sljit_s32 stack_size = 0;
+ sljit_s32 word_arg_count;
+
+ CHECK_ERROR_PTR();
+ CHECK_PTR(check_sljit_emit_call(compiler, type, arg_types));
+
+#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
+ if ((type & 0xff) == SLJIT_CALL) {
+ stack_size = c_fast_call_get_stack_size(arg_types, &word_arg_count);
+ PTR_FAIL_IF(c_fast_call_with_args(compiler, arg_types, stack_size, word_arg_count, 0));
+
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
+ || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ compiler->skip_checks = 1;
+#endif
+
+ jump = sljit_emit_jump(compiler, type);
+ PTR_FAIL_IF(jump == NULL);
+
+ PTR_FAIL_IF(post_call_with_args(compiler, arg_types, 0));
+ return jump;
}
- if (type >= SLJIT_CALL3) {
- *inst++ = MOV_rm_r;
- *inst++ = MOD_DISP8 | (reg_map[SLJIT_R2] << 3) | 0x4 /* SIB */;
- *inst++ = (0x4 /* none*/ << 3) | reg_map[SLJIT_SP];
- *inst++ = 2 * sizeof(sljit_sw);
+#endif
+
+ stack_size = cdecl_call_get_stack_size(compiler, arg_types, &word_arg_count);
+ PTR_FAIL_IF(cdecl_call_with_args(compiler, arg_types, stack_size, word_arg_count));
+
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
+ || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ compiler->skip_checks = 1;
+#endif
+
+ jump = sljit_emit_jump(compiler, type);
+ PTR_FAIL_IF(jump == NULL);
+
+ PTR_FAIL_IF(post_call_with_args(compiler, arg_types, stack_size));
+ return jump;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 arg_types,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 stack_size = 0;
+ sljit_s32 word_arg_count;
+#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
+ sljit_s32 swap_args;
+#endif
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_icall(compiler, type, arg_types, src, srcw));
+
+#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
+ SLJIT_ASSERT(reg_map[SLJIT_R0] == 0 && reg_map[SLJIT_R2] == 1 && SLJIT_R0 == 1 && SLJIT_R2 == 3);
+
+ if ((type & 0xff) == SLJIT_CALL) {
+ stack_size = c_fast_call_get_stack_size(arg_types, &word_arg_count);
+ swap_args = 0;
+
+ if (word_arg_count > 0) {
+ if ((src & REG_MASK) == SLJIT_R2 || OFFS_REG(src) == SLJIT_R2) {
+ swap_args = 1;
+ if (((src & REG_MASK) | 0x2) == SLJIT_R2)
+ src ^= 0x2;
+ if ((OFFS_REG(src) | 0x2) == SLJIT_R2)
+ src ^= TO_OFFS_REG(0x2);
+ }
+ }
+
+ FAIL_IF(c_fast_call_with_args(compiler, arg_types, stack_size, word_arg_count, swap_args));
+
+ compiler->saveds_offset += stack_size;
+ compiler->locals_offset += stack_size;
+
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
+ || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ compiler->skip_checks = 1;
+#endif
+ FAIL_IF(sljit_emit_ijump(compiler, type, src, srcw));
+
+ compiler->saveds_offset -= stack_size;
+ compiler->locals_offset -= stack_size;
+
+ return post_call_with_args(compiler, arg_types, 0);
}
#endif
- return SLJIT_SUCCESS;
+
+ stack_size = cdecl_call_get_stack_size(compiler, arg_types, &word_arg_count);
+ FAIL_IF(cdecl_call_with_args(compiler, arg_types, stack_size, word_arg_count));
+
+ compiler->saveds_offset += stack_size;
+ compiler->locals_offset += stack_size;
+
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
+ || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ compiler->skip_checks = 1;
+#endif
+ FAIL_IF(sljit_emit_ijump(compiler, type, src, srcw));
+
+ compiler->saveds_offset -= stack_size;
+ compiler->locals_offset -= stack_size;
+
+ return post_call_with_args(compiler, arg_types, stack_size);
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
@@ -524,7 +878,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler
INC_SIZE(1 + 1);
PUSH_REG(reg_map[src]);
}
- else if (src & SLJIT_MEM) {
+ else {
inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
FAIL_IF(!inst);
*inst++ = GROUP_FF;
@@ -534,16 +888,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler
FAIL_IF(!inst);
INC_SIZE(1);
}
- else {
- /* SLJIT_IMM. */
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 5 + 1);
- FAIL_IF(!inst);
-
- INC_SIZE(5 + 1);
- *inst++ = PUSH_i32;
- sljit_unaligned_store_sw(inst, srcw);
- inst += sizeof(sljit_sw);
- }
RET();
return SLJIT_SUCCESS;