summaryrefslogtreecommitdiffstats
path: root/src/3rdparty
diff options
context:
space:
mode:
authorLiang Qi <liang.qi@qt.io>2016-08-18 11:07:51 +0200
committerLiang Qi <liang.qi@qt.io>2016-08-18 11:07:51 +0200
commit0ca4fcbc36527ce02b604aa88e8bd0d37d40baf4 (patch)
tree6adf165fb783272d720416a49a51d9ad169d16de /src/3rdparty
parent9aeaa363e533e1486e6e9d7526bbab7c2472be5e (diff)
parent748aa6b06462804a9671997302df292ae9788d5c (diff)
Merge remote-tracking branch 'origin/5.6' into 5.7
Conflicts: src/plugins/platforms/xcb/qxcbintegration.cpp Change-Id: I2d71d06a55f730df19ace0dd3304238584a0497f
Diffstat (limited to 'src/3rdparty')
-rw-r--r--src/3rdparty/pcre/AUTHORS6
-rw-r--r--src/3rdparty/pcre/LICENCE6
-rw-r--r--src/3rdparty/pcre/pcre.h4
-rw-r--r--src/3rdparty/pcre/pcre_compile.c59
-rw-r--r--src/3rdparty/pcre/pcre_get.c4
-rw-r--r--src/3rdparty/pcre/pcre_internal.h8
-rw-r--r--src/3rdparty/pcre/pcre_jit_compile.c2875
-rw-r--r--src/3rdparty/pcre/pcre_study.c2
-rw-r--r--src/3rdparty/pcre/sljit/sljitConfigInternal.h67
-rw-r--r--src/3rdparty/pcre/sljit/sljitExecAllocator.c8
-rw-r--r--src/3rdparty/pcre/sljit/sljitLir.c422
-rw-r--r--src/3rdparty/pcre/sljit/sljitLir.h430
-rw-r--r--src/3rdparty/pcre/sljit/sljitNativeARM_32.c398
-rw-r--r--src/3rdparty/pcre/sljit/sljitNativeARM_64.c358
-rw-r--r--src/3rdparty/pcre/sljit/sljitNativeARM_T2_32.c370
-rw-r--r--src/3rdparty/pcre/sljit/sljitNativeMIPS_32.c24
-rw-r--r--src/3rdparty/pcre/sljit/sljitNativeMIPS_64.c44
-rw-r--r--src/3rdparty/pcre/sljit/sljitNativeMIPS_common.c380
-rw-r--r--src/3rdparty/pcre/sljit/sljitNativePPC_32.c26
-rw-r--r--src/3rdparty/pcre/sljit/sljitNativePPC_64.c28
-rw-r--r--src/3rdparty/pcre/sljit/sljitNativePPC_common.c388
-rw-r--r--src/3rdparty/pcre/sljit/sljitNativeSPARC_32.c24
-rw-r--r--src/3rdparty/pcre/sljit/sljitNativeSPARC_common.c288
-rw-r--r--src/3rdparty/pcre/sljit/sljitNativeTILEGX_64.c212
-rw-r--r--src/3rdparty/pcre/sljit/sljitNativeX86_32.c80
-rw-r--r--src/3rdparty/pcre/sljit/sljitNativeX86_64.c170
-rw-r--r--src/3rdparty/pcre/sljit/sljitNativeX86_common.c706
-rw-r--r--src/3rdparty/pcre/sljit/sljitUtils.c11
28 files changed, 4228 insertions, 3170 deletions
diff --git a/src/3rdparty/pcre/AUTHORS b/src/3rdparty/pcre/AUTHORS
index d33723f198..342417a8a1 100644
--- a/src/3rdparty/pcre/AUTHORS
+++ b/src/3rdparty/pcre/AUTHORS
@@ -8,7 +8,7 @@ Email domain: cam.ac.uk
University of Cambridge Computing Service,
Cambridge, England.
-Copyright (c) 1997-2015 University of Cambridge
+Copyright (c) 1997-2016 University of Cambridge
All rights reserved
@@ -19,7 +19,7 @@ Written by: Zoltan Herczeg
Email local part: hzmester
Emain domain: freemail.hu
-Copyright(c) 2010-2015 Zoltan Herczeg
+Copyright(c) 2010-2016 Zoltan Herczeg
All rights reserved.
@@ -30,7 +30,7 @@ Written by: Zoltan Herczeg
Email local part: hzmester
Emain domain: freemail.hu
-Copyright(c) 2009-2015 Zoltan Herczeg
+Copyright(c) 2009-2016 Zoltan Herczeg
All rights reserved.
diff --git a/src/3rdparty/pcre/LICENCE b/src/3rdparty/pcre/LICENCE
index 9f6f98e477..dd977af971 100644
--- a/src/3rdparty/pcre/LICENCE
+++ b/src/3rdparty/pcre/LICENCE
@@ -25,7 +25,7 @@ Email domain: cam.ac.uk
University of Cambridge Computing Service,
Cambridge, England.
-Copyright (c) 1997-2015 University of Cambridge
+Copyright (c) 1997-2016 University of Cambridge
All rights reserved.
@@ -36,7 +36,7 @@ Written by: Zoltan Herczeg
Email local part: hzmester
Emain domain: freemail.hu
-Copyright(c) 2010-2015 Zoltan Herczeg
+Copyright(c) 2010-2016 Zoltan Herczeg
All rights reserved.
@@ -47,7 +47,7 @@ Written by: Zoltan Herczeg
Email local part: hzmester
Emain domain: freemail.hu
-Copyright(c) 2009-2015 Zoltan Herczeg
+Copyright(c) 2009-2016 Zoltan Herczeg
All rights reserved.
diff --git a/src/3rdparty/pcre/pcre.h b/src/3rdparty/pcre/pcre.h
index 609deb5be6..7055970065 100644
--- a/src/3rdparty/pcre/pcre.h
+++ b/src/3rdparty/pcre/pcre.h
@@ -43,8 +43,8 @@ POSSIBILITY OF SUCH DAMAGE.
#define PCRE_MAJOR 8
#define PCRE_MINOR 39
-#define PCRE_PRERELEASE -RC1
-#define PCRE_DATE 2015-11-23
+#define PCRE_PRERELEASE
+#define PCRE_DATE 2016-06-14
/* When an application links to a PCRE DLL in Windows, the symbols that are
imported have to be identified as such. When building PCRE, the appropriate
diff --git a/src/3rdparty/pcre/pcre_compile.c b/src/3rdparty/pcre/pcre_compile.c
index b9a239e554..7cd3950123 100644
--- a/src/3rdparty/pcre/pcre_compile.c
+++ b/src/3rdparty/pcre/pcre_compile.c
@@ -6,7 +6,7 @@
and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
- Copyright (c) 1997-2014 University of Cambridge
+ Copyright (c) 1997-2016 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -485,7 +485,7 @@ static const char error_texts[] =
"lookbehind assertion is not fixed length\0"
"malformed number or name after (?(\0"
"conditional group contains more than two branches\0"
- "assertion expected after (?(\0"
+ "assertion expected after (?( or (?(?C)\0"
"(?R or (?[+-]digits must be followed by )\0"
/* 30 */
"unknown POSIX class name\0"
@@ -560,6 +560,7 @@ static const char error_texts[] =
/* 85 */
"parentheses are too deeply nested (stack check)\0"
"digits missing in \\x{} or \\o{}\0"
+ "regular expression is too complicated\0"
;
/* Table to identify digits and hex digits. This is used when compiling
@@ -4566,6 +4567,10 @@ for (;; ptr++)
pcre_uint32 ec;
pcre_uchar mcbuffer[8];
+ /* Come here to restart the loop without advancing the pointer. */
+
+ REDO_LOOP:
+
/* Get next character in the pattern */
c = *ptr;
@@ -4591,7 +4596,8 @@ for (;; ptr++)
if (code > cd->start_workspace + cd->workspace_size -
WORK_SIZE_SAFETY_MARGIN) /* Check for overrun */
{
- *errorcodeptr = ERR52;
+ *errorcodeptr = (code >= cd->start_workspace + cd->workspace_size)?
+ ERR52 : ERR87;
goto FAILED;
}
@@ -4710,11 +4716,7 @@ for (;; ptr++)
/* If we skipped any characters, restart the loop. Otherwise, we didn't see
a comment. */
- if (ptr > wscptr)
- {
- ptr--;
- continue;
- }
+ if (ptr > wscptr) goto REDO_LOOP;
}
/* Skip over (?# comments. We need to do this here because we want to know if
@@ -4855,15 +4857,15 @@ for (;; ptr++)
if (STRNCMP_UC_C8(ptr+1, STRING_WEIRD_STARTWORD, 6) == 0)
{
nestptr = ptr + 7;
- ptr = sub_start_of_word - 1;
- continue;
+ ptr = sub_start_of_word;
+ goto REDO_LOOP;
}
if (STRNCMP_UC_C8(ptr+1, STRING_WEIRD_ENDWORD, 6) == 0)
{
nestptr = ptr + 7;
- ptr = sub_end_of_word - 1;
- continue;
+ ptr = sub_end_of_word;
+ goto REDO_LOOP;
}
/* Handle a real character class. */
@@ -6626,8 +6628,21 @@ for (;; ptr++)
cd->had_accept = TRUE;
for (oc = cd->open_caps; oc != NULL; oc = oc->next)
{
- *code++ = OP_CLOSE;
- PUT2INC(code, 0, oc->number);
+ if (lengthptr != NULL)
+ {
+#ifdef COMPILE_PCRE8
+ *lengthptr += 1 + IMM2_SIZE;
+#elif defined COMPILE_PCRE16
+ *lengthptr += 2 + IMM2_SIZE;
+#elif defined COMPILE_PCRE32
+ *lengthptr += 4 + IMM2_SIZE;
+#endif
+ }
+ else
+ {
+ *code++ = OP_CLOSE;
+ PUT2INC(code, 0, oc->number);
+ }
}
setverb = *code++ =
(cd->assert_depth > 0)? OP_ASSERT_ACCEPT : OP_ACCEPT;
@@ -6756,6 +6771,15 @@ for (;; ptr++)
for (i = 3;; i++) if (!IS_DIGIT(ptr[i])) break;
if (ptr[i] == CHAR_RIGHT_PARENTHESIS)
tempptr += i + 1;
+
+ /* tempptr should now be pointing to the opening parenthesis of the
+ assertion condition. */
+
+ if (*tempptr != CHAR_LEFT_PARENTHESIS)
+ {
+ *errorcodeptr = ERR28;
+ goto FAILED;
+ }
}
/* For conditions that are assertions, check the syntax, and then exit
@@ -7296,7 +7320,12 @@ for (;; ptr++)
so far in order to get the number. If the name is not found, leave
the value of recno as 0 for a forward reference. */
- else
+ /* This patch (removing "else") fixes a problem when a reference is
+ to multiple identically named nested groups from within the nest.
+ Once again, it is not the "proper" fix, and it results in an
+ over-allocation of memory. */
+
+ /* else */
{
ng = cd->named_groups;
for (i = 0; i < cd->names_found; i++, ng++)
diff --git a/src/3rdparty/pcre/pcre_get.c b/src/3rdparty/pcre/pcre_get.c
index cdd2abc80f..9475d5e88c 100644
--- a/src/3rdparty/pcre/pcre_get.c
+++ b/src/3rdparty/pcre/pcre_get.c
@@ -250,7 +250,7 @@ Arguments:
code the compiled regex
stringname the name of the capturing substring
ovector the vector of matched substrings
- stringcount number of captured substrings
+ stringcount number of captured substrings
Returns: the number of the first that is set,
or the number of the last one if none are set,
@@ -464,7 +464,7 @@ for (i = 0; i < double_count; i += 2)
{
size += sizeof(pcre_uchar *) + IN_UCHARS(1);
if (ovector[i+1] > ovector[i]) size += IN_UCHARS(ovector[i+1] - ovector[i]);
- }
+ }
stringlist = (pcre_uchar **)(PUBL(malloc))(size);
if (stringlist == NULL) return PCRE_ERROR_NOMEMORY;
diff --git a/src/3rdparty/pcre/pcre_internal.h b/src/3rdparty/pcre/pcre_internal.h
index f7a5ee7aa6..2923b29f82 100644
--- a/src/3rdparty/pcre/pcre_internal.h
+++ b/src/3rdparty/pcre/pcre_internal.h
@@ -7,7 +7,7 @@
and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
- Copyright (c) 1997-2014 University of Cambridge
+ Copyright (c) 1997-2016 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -275,7 +275,7 @@ pcre.h(.in) and disable (comment out) this message. */
typedef pcre_uint16 pcre_uchar;
#define UCHAR_SHIFT (1)
-#define IN_UCHARS(x) ((x) << UCHAR_SHIFT)
+#define IN_UCHARS(x) ((x) * 2)
#define MAX_255(c) ((c) <= 255u)
#define TABLE_GET(c, table, default) (MAX_255(c)? ((table)[c]):(default))
@@ -283,7 +283,7 @@ typedef pcre_uint16 pcre_uchar;
typedef pcre_uint32 pcre_uchar;
#define UCHAR_SHIFT (2)
-#define IN_UCHARS(x) ((x) << UCHAR_SHIFT)
+#define IN_UCHARS(x) ((x) * 4)
#define MAX_255(c) ((c) <= 255u)
#define TABLE_GET(c, table, default) (MAX_255(c)? ((table)[c]):(default))
@@ -2289,7 +2289,7 @@ enum { ERR0, ERR1, ERR2, ERR3, ERR4, ERR5, ERR6, ERR7, ERR8, ERR9,
ERR50, ERR51, ERR52, ERR53, ERR54, ERR55, ERR56, ERR57, ERR58, ERR59,
ERR60, ERR61, ERR62, ERR63, ERR64, ERR65, ERR66, ERR67, ERR68, ERR69,
ERR70, ERR71, ERR72, ERR73, ERR74, ERR75, ERR76, ERR77, ERR78, ERR79,
- ERR80, ERR81, ERR82, ERR83, ERR84, ERR85, ERR86, ERRCOUNT };
+ ERR80, ERR81, ERR82, ERR83, ERR84, ERR85, ERR86, ERR87, ERRCOUNT };
/* JIT compiling modes. The function list is indexed by them. */
diff --git a/src/3rdparty/pcre/pcre_jit_compile.c b/src/3rdparty/pcre/pcre_jit_compile.c
index 445de0cbef..4f15a27ac2 100644
--- a/src/3rdparty/pcre/pcre_jit_compile.c
+++ b/src/3rdparty/pcre/pcre_jit_compile.c
@@ -168,13 +168,13 @@ typedef struct jit_arguments {
pcre_uchar *mark_ptr;
void *callout_data;
/* Everything else after. */
- pcre_uint32 limit_match;
+ sljit_u32 limit_match;
int real_offset_count;
int offset_count;
- pcre_uint8 notbol;
- pcre_uint8 noteol;
- pcre_uint8 notempty;
- pcre_uint8 notempty_atstart;
+ sljit_u8 notbol;
+ sljit_u8 noteol;
+ sljit_u8 notempty;
+ sljit_u8 notempty_atstart;
} jit_arguments;
typedef struct executable_functions {
@@ -183,8 +183,8 @@ typedef struct executable_functions {
sljit_uw executable_sizes[JIT_NUMBER_OF_COMPILE_MODES];
PUBL(jit_callback) callback;
void *userdata;
- pcre_uint32 top_bracket;
- pcre_uint32 limit_match;
+ sljit_u32 top_bracket;
+ sljit_u32 limit_match;
} executable_functions;
typedef struct jump_list {
@@ -277,11 +277,25 @@ typedef struct braminzero_backtrack {
struct sljit_label *matchingpath;
} braminzero_backtrack;
-typedef struct iterator_backtrack {
+typedef struct char_iterator_backtrack {
backtrack_common common;
/* Next iteration. */
struct sljit_label *matchingpath;
-} iterator_backtrack;
+ union {
+ jump_list *backtracks;
+ struct {
+ unsigned int othercasebit;
+ pcre_uchar chr;
+ BOOL enabled;
+ } charpos;
+ } u;
+} char_iterator_backtrack;
+
+typedef struct ref_iterator_backtrack {
+ backtrack_common common;
+ /* Next iteration. */
+ struct sljit_label *matchingpath;
+} ref_iterator_backtrack;
typedef struct recurse_entry {
struct recurse_entry *next;
@@ -321,40 +335,46 @@ typedef struct compiler_common {
/* First byte code. */
pcre_uchar *start;
/* Maps private data offset to each opcode. */
- sljit_si *private_data_ptrs;
+ sljit_s32 *private_data_ptrs;
/* Chain list of read-only data ptrs. */
void *read_only_data_head;
/* Tells whether the capturing bracket is optimized. */
- pcre_uint8 *optimized_cbracket;
+ sljit_u8 *optimized_cbracket;
/* Tells whether the starting offset is a target of then. */
- pcre_uint8 *then_offsets;
+ sljit_u8 *then_offsets;
/* Current position where a THEN must jump. */
then_trap_backtrack *then_trap;
/* Starting offset of private data for capturing brackets. */
- int cbra_ptr;
+ sljit_s32 cbra_ptr;
/* Output vector starting point. Must be divisible by 2. */
- int ovector_start;
+ sljit_s32 ovector_start;
+ /* Points to the starting character of the current match. */
+ sljit_s32 start_ptr;
/* Last known position of the requested byte. */
- int req_char_ptr;
+ sljit_s32 req_char_ptr;
/* Head of the last recursion. */
- int recursive_head_ptr;
- /* First inspected character for partial matching. */
- int start_used_ptr;
+ sljit_s32 recursive_head_ptr;
+ /* First inspected character for partial matching.
+ (Needed for avoiding zero length partial matches.) */
+ sljit_s32 start_used_ptr;
/* Starting pointer for partial soft matches. */
- int hit_start;
- /* End pointer of the first line. */
- int first_line_end;
+ sljit_s32 hit_start;
+ /* Pointer of the match end position. */
+ sljit_s32 match_end_ptr;
/* Points to the marked string. */
- int mark_ptr;
+ sljit_s32 mark_ptr;
/* Recursive control verb management chain. */
- int control_head_ptr;
+ sljit_s32 control_head_ptr;
/* Points to the last matched capture block index. */
- int capture_last_ptr;
- /* Points to the starting position of the current match. */
- int start_ptr;
+ sljit_s32 capture_last_ptr;
+ /* Fast forward skipping byte code pointer. */
+ pcre_uchar *fast_forward_bc_ptr;
+ /* Locals used by fast fail optimization. */
+ sljit_s32 fast_fail_start_ptr;
+ sljit_s32 fast_fail_end_ptr;
/* Flipped and lower case tables. */
- const pcre_uint8 *fcc;
+ const sljit_u8 *fcc;
sljit_sw lcc;
/* Mode can be PCRE_STUDY_JIT_COMPILE and others. */
int mode;
@@ -366,20 +386,20 @@ typedef struct compiler_common {
BOOL has_skip_arg;
/* (*THEN) is found in the pattern. */
BOOL has_then;
- /* Needs to know the start position anytime. */
- BOOL needs_start_ptr;
+ /* (*SKIP) or (*SKIP:arg) is found in lookbehind assertion. */
+ BOOL has_skip_in_assert_back;
/* Currently in recurse or negative assert. */
BOOL local_exit;
/* Currently in a positive assert. */
BOOL positive_assert;
/* Newline control. */
int nltype;
- pcre_uint32 nlmax;
- pcre_uint32 nlmin;
+ sljit_u32 nlmax;
+ sljit_u32 nlmin;
int newline;
int bsr_nltype;
- pcre_uint32 bsr_nlmax;
- pcre_uint32 bsr_nlmin;
+ sljit_u32 bsr_nlmax;
+ sljit_u32 bsr_nlmin;
/* Dollar endonly. */
int endonly;
/* Tables. */
@@ -419,6 +439,7 @@ typedef struct compiler_common {
BOOL utf;
#ifdef SUPPORT_UCP
BOOL use_ucp;
+ jump_list *getucd;
#endif
#ifdef COMPILE_PCRE8
jump_list *utfreadchar;
@@ -426,9 +447,6 @@ typedef struct compiler_common {
jump_list *utfreadtype8;
#endif
#endif /* SUPPORT_UTF */
-#ifdef SUPPORT_UCP
- jump_list *getucd;
-#endif
} compiler_common;
/* For byte_sequence_compare. */
@@ -439,27 +457,27 @@ typedef struct compare_context {
#if defined SLJIT_UNALIGNED && SLJIT_UNALIGNED
int ucharptr;
union {
- sljit_si asint;
- sljit_uh asushort;
+ sljit_s32 asint;
+ sljit_u16 asushort;
#if defined COMPILE_PCRE8
- sljit_ub asbyte;
- sljit_ub asuchars[4];
+ sljit_u8 asbyte;
+ sljit_u8 asuchars[4];
#elif defined COMPILE_PCRE16
- sljit_uh asuchars[2];
+ sljit_u16 asuchars[2];
#elif defined COMPILE_PCRE32
- sljit_ui asuchars[1];
+ sljit_u32 asuchars[1];
#endif
} c;
union {
- sljit_si asint;
- sljit_uh asushort;
+ sljit_s32 asint;
+ sljit_u16 asushort;
#if defined COMPILE_PCRE8
- sljit_ub asbyte;
- sljit_ub asuchars[4];
+ sljit_u8 asbyte;
+ sljit_u8 asuchars[4];
#elif defined COMPILE_PCRE16
- sljit_uh asuchars[2];
+ sljit_u16 asuchars[2];
#elif defined COMPILE_PCRE32
- sljit_ui asuchars[1];
+ sljit_u32 asuchars[1];
#endif
} oc;
#endif
@@ -501,14 +519,14 @@ the start pointers when the end of the capturing group has not yet reached. */
#define PRIVATE_DATA(cc) (common->private_data_ptrs[(cc) - common->start])
#if defined COMPILE_PCRE8
-#define MOV_UCHAR SLJIT_MOV_UB
-#define MOVU_UCHAR SLJIT_MOVU_UB
+#define MOV_UCHAR SLJIT_MOV_U8
+#define MOVU_UCHAR SLJIT_MOVU_U8
#elif defined COMPILE_PCRE16
-#define MOV_UCHAR SLJIT_MOV_UH
-#define MOVU_UCHAR SLJIT_MOVU_UH
+#define MOV_UCHAR SLJIT_MOV_U16
+#define MOVU_UCHAR SLJIT_MOVU_U16
#elif defined COMPILE_PCRE32
-#define MOV_UCHAR SLJIT_MOV_UI
-#define MOVU_UCHAR SLJIT_MOVU_UI
+#define MOV_UCHAR SLJIT_MOV_U32
+#define MOVU_UCHAR SLJIT_MOVU_U32
#else
#error Unsupported compiling mode
#endif
@@ -564,11 +582,6 @@ SLJIT_ASSERT(*cc >= OP_KET && *cc <= OP_KETRPOS);
return count;
}
-static int ones_in_half_byte[16] = {
- /* 0 */ 0, 1, 1, 2, /* 4 */ 1, 2, 2, 3,
- /* 8 */ 1, 2, 2, 3, /* 12 */ 2, 3, 3, 4
-};
-
/* Functions whose might need modification for all new supported opcodes:
next_opcode
check_opcode_types
@@ -780,6 +793,7 @@ static BOOL check_opcode_types(compiler_common *common, pcre_uchar *cc, pcre_uch
{
int count;
pcre_uchar *slot;
+pcre_uchar *assert_back_end = cc - 1;
/* Calculate important variables (like stack size) and checks whether all opcodes are supported. */
while (cc < ccend)
@@ -850,15 +864,19 @@ while (cc < ccend)
cc += 2 + 2 * LINK_SIZE;
break;
+ case OP_ASSERTBACK:
+ slot = bracketend(cc);
+ if (slot > assert_back_end)
+ assert_back_end = slot;
+ cc += 1 + LINK_SIZE;
+ break;
+
case OP_THEN_ARG:
common->has_then = TRUE;
common->control_head_ptr = 1;
/* Fall through. */
case OP_PRUNE_ARG:
- common->needs_start_ptr = TRUE;
- /* Fall through. */
-
case OP_MARK:
if (common->mark_ptr == 0)
{
@@ -871,17 +889,20 @@ while (cc < ccend)
case OP_THEN:
common->has_then = TRUE;
common->control_head_ptr = 1;
- /* Fall through. */
+ cc += 1;
+ break;
- case OP_PRUNE:
case OP_SKIP:
- common->needs_start_ptr = TRUE;
+ if (cc < assert_back_end)
+ common->has_skip_in_assert_back = TRUE;
cc += 1;
break;
case OP_SKIP_ARG:
common->control_head_ptr = 1;
common->has_skip_arg = TRUE;
+ if (cc < assert_back_end)
+ common->has_skip_in_assert_back = TRUE;
cc += 1 + 2 + cc[1];
break;
@@ -895,8 +916,189 @@ while (cc < ccend)
return TRUE;
}
+static BOOL is_accelerated_repeat(pcre_uchar *cc)
+{
+switch(*cc)
+ {
+ case OP_TYPESTAR:
+ case OP_TYPEMINSTAR:
+ case OP_TYPEPLUS:
+ case OP_TYPEMINPLUS:
+ case OP_TYPEPOSSTAR:
+ case OP_TYPEPOSPLUS:
+ return (cc[1] != OP_ANYNL && cc[1] != OP_EXTUNI);
+
+ case OP_STAR:
+ case OP_MINSTAR:
+ case OP_PLUS:
+ case OP_MINPLUS:
+ case OP_POSSTAR:
+ case OP_POSPLUS:
+
+ case OP_STARI:
+ case OP_MINSTARI:
+ case OP_PLUSI:
+ case OP_MINPLUSI:
+ case OP_POSSTARI:
+ case OP_POSPLUSI:
+
+ case OP_NOTSTAR:
+ case OP_NOTMINSTAR:
+ case OP_NOTPLUS:
+ case OP_NOTMINPLUS:
+ case OP_NOTPOSSTAR:
+ case OP_NOTPOSPLUS:
+
+ case OP_NOTSTARI:
+ case OP_NOTMINSTARI:
+ case OP_NOTPLUSI:
+ case OP_NOTMINPLUSI:
+ case OP_NOTPOSSTARI:
+ case OP_NOTPOSPLUSI:
+ return TRUE;
+
+ case OP_CLASS:
+ case OP_NCLASS:
+#if defined SUPPORT_UTF || !defined COMPILE_PCRE8
+ case OP_XCLASS:
+ cc += (*cc == OP_XCLASS) ? GET(cc, 1) : (int)(1 + (32 / sizeof(pcre_uchar)));
+#else
+ cc += (1 + (32 / sizeof(pcre_uchar)));
+#endif
+
+ switch(*cc)
+ {
+ case OP_CRSTAR:
+ case OP_CRMINSTAR:
+ case OP_CRPLUS:
+ case OP_CRMINPLUS:
+ case OP_CRPOSSTAR:
+ case OP_CRPOSPLUS:
+ return TRUE;
+ }
+ break;
+ }
+return FALSE;
+}
+
+static SLJIT_INLINE BOOL detect_fast_forward_skip(compiler_common *common, int *private_data_start)
+{
+pcre_uchar *cc = common->start;
+pcre_uchar *end;
+
+/* Skip not repeated brackets. */
+while (TRUE)
+ {
+ switch(*cc)
+ {
+ case OP_SOD:
+ case OP_SOM:
+ case OP_SET_SOM:
+ case OP_NOT_WORD_BOUNDARY:
+ case OP_WORD_BOUNDARY:
+ case OP_EODN:
+ case OP_EOD:
+ case OP_CIRC:
+ case OP_CIRCM:
+ case OP_DOLL:
+ case OP_DOLLM:
+ /* Zero width assertions. */
+ cc++;
+ continue;
+ }
+
+ if (*cc != OP_BRA && *cc != OP_CBRA)
+ break;
+
+ end = cc + GET(cc, 1);
+ if (*end != OP_KET || PRIVATE_DATA(end) != 0)
+ return FALSE;
+ if (*cc == OP_CBRA)
+ {
+ if (common->optimized_cbracket[GET2(cc, 1 + LINK_SIZE)] == 0)
+ return FALSE;
+ cc += IMM2_SIZE;
+ }
+ cc += 1 + LINK_SIZE;
+ }
+
+if (is_accelerated_repeat(cc))
+ {
+ common->fast_forward_bc_ptr = cc;
+ common->private_data_ptrs[(cc + 1) - common->start] = *private_data_start;
+ *private_data_start += sizeof(sljit_sw);
+ return TRUE;
+ }
+return FALSE;
+}
+
+static SLJIT_INLINE void detect_fast_fail(compiler_common *common, pcre_uchar *cc, int *private_data_start, sljit_s32 depth)
+{
+ pcre_uchar *next_alt;
+
+ SLJIT_ASSERT(*cc == OP_BRA || *cc == OP_CBRA);
+
+ if (*cc == OP_CBRA && common->optimized_cbracket[GET2(cc, 1 + LINK_SIZE)] == 0)
+ return;
+
+ next_alt = bracketend(cc) - (1 + LINK_SIZE);
+ if (*next_alt != OP_KET || PRIVATE_DATA(next_alt) != 0)
+ return;
+
+ do
+ {
+ next_alt = cc + GET(cc, 1);
+
+ cc += 1 + LINK_SIZE + ((*cc == OP_CBRA) ? IMM2_SIZE : 0);
+
+ while (TRUE)
+ {
+ switch(*cc)
+ {
+ case OP_SOD:
+ case OP_SOM:
+ case OP_SET_SOM:
+ case OP_NOT_WORD_BOUNDARY:
+ case OP_WORD_BOUNDARY:
+ case OP_EODN:
+ case OP_EOD:
+ case OP_CIRC:
+ case OP_CIRCM:
+ case OP_DOLL:
+ case OP_DOLLM:
+ /* Zero width assertions. */
+ cc++;
+ continue;
+ }
+ break;
+ }
+
+ if (depth > 0 && (*cc == OP_BRA || *cc == OP_CBRA))
+ detect_fast_fail(common, cc, private_data_start, depth - 1);
+
+ if (is_accelerated_repeat(cc))
+ {
+ common->private_data_ptrs[(cc + 1) - common->start] = *private_data_start;
+
+ if (common->fast_fail_start_ptr == 0)
+ common->fast_fail_start_ptr = *private_data_start;
+
+ *private_data_start += sizeof(sljit_sw);
+ common->fast_fail_end_ptr = *private_data_start;
+
+ if (*private_data_start > SLJIT_MAX_LOCAL_SIZE)
+ return;
+ }
+
+ cc = next_alt;
+ }
+ while (*cc == OP_ALT);
+}
+
static int get_class_iterator_size(pcre_uchar *cc)
{
+sljit_u32 min;
+sljit_u32 max;
switch(*cc)
{
case OP_CRSTAR:
@@ -911,9 +1113,14 @@ switch(*cc)
case OP_CRRANGE:
case OP_CRMINRANGE:
- if (GET2(cc, 1) == GET2(cc, 1 + IMM2_SIZE))
- return 0;
- return 2;
+ min = GET2(cc, 1);
+ max = GET2(cc, 1 + IMM2_SIZE);
+ if (max == 0)
+ return (*cc == OP_CRRANGE) ? 2 : 1;
+ max -= min;
+ if (max > 2)
+ max = 2;
+ return max;
default:
return 0;
@@ -1186,14 +1393,14 @@ while (cc < ccend)
case OP_CLASS:
case OP_NCLASS:
- size += 1 + 32 / sizeof(pcre_uchar);
space = get_class_iterator_size(cc + size);
+ size = 1 + 32 / sizeof(pcre_uchar);
break;
#if defined SUPPORT_UTF || !defined COMPILE_PCRE8
case OP_XCLASS:
- size = GET(cc, 1);
space = get_class_iterator_size(cc + size);
+ size = GET(cc, 1);
break;
#endif
@@ -1406,6 +1613,7 @@ while (cc < ccend)
case OP_CLASS:
case OP_NCLASS:
case OP_XCLASS:
+ case OP_CALLOUT:
cc = next_opcode(common, cc);
SLJIT_ASSERT(cc != NULL);
@@ -1990,7 +2198,7 @@ if (save)
SLJIT_ASSERT(cc == ccend && stackptr == stacktop && (save || (tmp1empty && tmp2empty)));
}
-static SLJIT_INLINE pcre_uchar *set_then_offsets(compiler_common *common, pcre_uchar *cc, pcre_uint8 *current_offset)
+static SLJIT_INLINE pcre_uchar *set_then_offsets(compiler_common *common, pcre_uchar *cc, sljit_u8 *current_offset)
{
pcre_uchar *end = bracketend(cc);
BOOL has_alternatives = cc[GET(cc, 1)] == OP_ALT;
@@ -2113,6 +2321,7 @@ static SLJIT_INLINE void allocate_stack(compiler_common *common, int size)
/* May destroy all locals and registers except TMP2. */
DEFINE_COMPILER;
+SLJIT_ASSERT(size > 0);
OP2(SLJIT_ADD, STACK_TOP, 0, STACK_TOP, 0, SLJIT_IMM, size * sizeof(sljit_sw));
#ifdef DESTROY_REGISTERS
OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, 12345);
@@ -2127,6 +2336,8 @@ add_stub(common, CMP(SLJIT_GREATER, STACK_TOP, 0, STACK_LIMIT, 0));
static SLJIT_INLINE void free_stack(compiler_common *common, int size)
{
DEFINE_COMPILER;
+
+SLJIT_ASSERT(size > 0);
OP2(SLJIT_SUB, STACK_TOP, 0, STACK_TOP, 0, SLJIT_IMM, size * sizeof(sljit_sw));
}
@@ -2190,6 +2401,18 @@ else
}
}
+static SLJIT_INLINE void reset_fast_fail(compiler_common *common)
+{
+DEFINE_COMPILER;
+sljit_s32 i;
+
+SLJIT_ASSERT(common->fast_fail_start_ptr < common->fast_fail_end_ptr);
+
+OP2(SLJIT_SUB, TMP1, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+for (i = common->fast_fail_start_ptr; i < common->fast_fail_end_ptr; i += sizeof(sljit_sw))
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), i, TMP1, 0);
+}
+
static SLJIT_INLINE void do_reset_match(compiler_common *common, int length)
{
DEFINE_COMPILER;
@@ -2262,7 +2485,7 @@ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), OVECTOR(1), STR_PTR, 0);
OP1(SLJIT_MOV, SLJIT_R0, 0, ARGUMENTS, 0);
if (common->mark_ptr != 0)
OP1(SLJIT_MOV, SLJIT_R2, 0, SLJIT_MEM1(SLJIT_SP), common->mark_ptr);
-OP1(SLJIT_MOV_SI, SLJIT_R1, 0, SLJIT_MEM1(SLJIT_R0), SLJIT_OFFSETOF(jit_arguments, offset_count));
+OP1(SLJIT_MOV_S32, SLJIT_R1, 0, SLJIT_MEM1(SLJIT_R0), SLJIT_OFFSETOF(jit_arguments, offset_count));
if (common->mark_ptr != 0)
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_R0), SLJIT_OFFSETOF(jit_arguments, mark_ptr), SLJIT_R2, 0);
OP2(SLJIT_SUB, SLJIT_R2, 0, SLJIT_MEM1(SLJIT_R0), SLJIT_OFFSETOF(jit_arguments, offsets), SLJIT_IMM, sizeof(int));
@@ -2277,7 +2500,7 @@ OP2(SLJIT_ADD, SLJIT_S0, 0, SLJIT_S0, 0, SLJIT_IMM, sizeof(sljit_sw));
#if defined COMPILE_PCRE16 || defined COMPILE_PCRE32
OP2(SLJIT_ASHR, SLJIT_S1, 0, SLJIT_S1, 0, SLJIT_IMM, UCHAR_SHIFT);
#endif
-OP1(SLJIT_MOVU_SI, SLJIT_MEM1(SLJIT_R2), sizeof(int), SLJIT_S1, 0);
+OP1(SLJIT_MOVU_S32, SLJIT_MEM1(SLJIT_R2), sizeof(int), SLJIT_S1, 0);
OP2(SLJIT_SUB | SLJIT_SET_E, SLJIT_R1, 0, SLJIT_R1, 0, SLJIT_IMM, 1);
JUMPTO(SLJIT_NOT_ZERO, loop);
JUMPHERE(early_quit);
@@ -2310,7 +2533,7 @@ SLJIT_ASSERT(common->start_used_ptr != 0 && common->start_ptr != 0
OP1(SLJIT_MOV, SLJIT_R1, 0, ARGUMENTS, 0);
OP1(SLJIT_MOV, SLJIT_RETURN_REG, 0, SLJIT_IMM, PCRE_ERROR_PARTIAL);
-OP1(SLJIT_MOV_SI, SLJIT_R2, 0, SLJIT_MEM1(SLJIT_R1), SLJIT_OFFSETOF(jit_arguments, real_offset_count));
+OP1(SLJIT_MOV_S32, SLJIT_R2, 0, SLJIT_MEM1(SLJIT_R1), SLJIT_OFFSETOF(jit_arguments, real_offset_count));
CMPTO(SLJIT_SIG_LESS, SLJIT_R2, 0, SLJIT_IMM, 2, quit);
/* Store match begin and end. */
@@ -2322,7 +2545,7 @@ OP2(SLJIT_SUB, SLJIT_R2, 0, SLJIT_MEM1(SLJIT_SP), common->mode == JIT_PARTIAL_HA
#if defined COMPILE_PCRE16 || defined COMPILE_PCRE32
OP2(SLJIT_ASHR, SLJIT_R2, 0, SLJIT_R2, 0, SLJIT_IMM, UCHAR_SHIFT);
#endif
-OP1(SLJIT_MOV_SI, SLJIT_MEM1(SLJIT_R1), 2 * sizeof(int), SLJIT_R2, 0);
+OP1(SLJIT_MOV_S32, SLJIT_MEM1(SLJIT_R1), 2 * sizeof(int), SLJIT_R2, 0);
JUMPHERE(jump);
OP1(SLJIT_MOV, SLJIT_R2, 0, SLJIT_MEM1(SLJIT_SP), common->mode == JIT_PARTIAL_HARD_COMPILE ? common->start_used_ptr : common->hit_start);
@@ -2330,13 +2553,13 @@ OP2(SLJIT_SUB, SLJIT_S1, 0, STR_END, 0, SLJIT_S0, 0);
#if defined COMPILE_PCRE16 || defined COMPILE_PCRE32
OP2(SLJIT_ASHR, SLJIT_S1, 0, SLJIT_S1, 0, SLJIT_IMM, UCHAR_SHIFT);
#endif
-OP1(SLJIT_MOV_SI, SLJIT_MEM1(SLJIT_R1), sizeof(int), SLJIT_S1, 0);
+OP1(SLJIT_MOV_S32, SLJIT_MEM1(SLJIT_R1), sizeof(int), SLJIT_S1, 0);
OP2(SLJIT_SUB, SLJIT_R2, 0, SLJIT_R2, 0, SLJIT_S0, 0);
#if defined COMPILE_PCRE16 || defined COMPILE_PCRE32
OP2(SLJIT_ASHR, SLJIT_R2, 0, SLJIT_R2, 0, SLJIT_IMM, UCHAR_SHIFT);
#endif
-OP1(SLJIT_MOV_SI, SLJIT_MEM1(SLJIT_R1), 0, SLJIT_R2, 0);
+OP1(SLJIT_MOV_S32, SLJIT_MEM1(SLJIT_R1), 0, SLJIT_R2, 0);
JUMPTO(SLJIT_JUMP, quit);
}
@@ -2573,7 +2796,7 @@ else
JUMPHERE(jump);
}
-static void peek_char(compiler_common *common, pcre_uint32 max)
+static void peek_char(compiler_common *common, sljit_u32 max)
{
/* Reads the character into TMP1, keeps STR_PTR.
Does not check STR_END. TMP2 Destroyed. */
@@ -2618,12 +2841,12 @@ if (common->utf)
#if defined SUPPORT_UTF && defined COMPILE_PCRE8
-static BOOL is_char7_bitset(const pcre_uint8 *bitset, BOOL nclass)
+static BOOL is_char7_bitset(const sljit_u8 *bitset, BOOL nclass)
{
/* Tells whether the character codes below 128 are enough
to determine a match. */
-const pcre_uint8 value = nclass ? 0xff : 0;
-const pcre_uint8 *end = bitset + 32;
+const sljit_u8 value = nclass ? 0xff : 0;
+const sljit_u8 *end = bitset + 32;
bitset += 16;
do
@@ -2648,12 +2871,12 @@ SLJIT_ASSERT(common->utf);
OP1(MOV_UCHAR, TMP2, 0, SLJIT_MEM1(STR_PTR), 0);
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
-OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(TMP2), common->ctypes);
+OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP2), common->ctypes);
if (full_read)
{
jump = CMP(SLJIT_LESS, TMP2, 0, SLJIT_IMM, 0xc0);
- OP1(SLJIT_MOV_UB, TMP2, 0, SLJIT_MEM1(TMP2), (sljit_sw)PRIV(utf8_table4) - 0xc0);
+ OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM1(TMP2), (sljit_sw)PRIV(utf8_table4) - 0xc0);
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
JUMPHERE(jump);
}
@@ -2661,7 +2884,7 @@ if (full_read)
#endif /* SUPPORT_UTF && COMPILE_PCRE8 */
-static void read_char_range(compiler_common *common, pcre_uint32 min, pcre_uint32 max, BOOL update_str_ptr)
+static void read_char_range(compiler_common *common, sljit_u32 min, sljit_u32 max, BOOL update_str_ptr)
{
/* Reads the precise value of a character into TMP1, if the character is
between min and max (c >= min && c <= max). Otherwise it returns with a value
@@ -2692,7 +2915,7 @@ if (common->utf)
{
OP2(SLJIT_SUB, TMP2, 0, TMP1, 0, SLJIT_IMM, 0xf0);
if (update_str_ptr)
- OP1(SLJIT_MOV_UB, RETURN_ADDR, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
+ OP1(SLJIT_MOV_U8, RETURN_ADDR, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(0));
jump2 = CMP(SLJIT_GREATER, TMP2, 0, SLJIT_IMM, 0x7);
OP2(SLJIT_SHL, TMP2, 0, TMP2, 0, SLJIT_IMM, 6);
@@ -2716,7 +2939,7 @@ if (common->utf)
{
OP2(SLJIT_SUB, TMP2, 0, TMP1, 0, SLJIT_IMM, 0xe0);
if (update_str_ptr)
- OP1(SLJIT_MOV_UB, RETURN_ADDR, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
+ OP1(SLJIT_MOV_U8, RETURN_ADDR, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(0));
jump2 = CMP(SLJIT_GREATER, TMP2, 0, SLJIT_IMM, 0xf);
OP2(SLJIT_SHL, TMP2, 0, TMP2, 0, SLJIT_IMM, 6);
@@ -2736,7 +2959,7 @@ if (common->utf)
add_jump(compiler, (max < 0x10000) ? &common->utfreadchar16 : &common->utfreadchar, JUMP(SLJIT_FAST_CALL));
else if (max < 128)
{
- OP1(SLJIT_MOV_UB, TMP2, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
+ OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
}
else
@@ -2745,7 +2968,7 @@ if (common->utf)
if (!update_str_ptr)
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
else
- OP1(SLJIT_MOV_UB, RETURN_ADDR, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
+ OP1(SLJIT_MOV_U8, RETURN_ADDR, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
OP2(SLJIT_AND, TMP1, 0, TMP1, 0, SLJIT_IMM, 0x3f);
OP2(SLJIT_SHL, TMP1, 0, TMP1, 0, SLJIT_IMM, 6);
OP2(SLJIT_AND, TMP2, 0, TMP2, 0, SLJIT_IMM, 0x3f);
@@ -2815,7 +3038,7 @@ if (common->utf)
{
/* This can be an extra read in some situations, but hopefully
it is needed in most cases. */
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(TMP2), common->ctypes);
+ OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP2), common->ctypes);
jump = CMP(SLJIT_LESS, TMP2, 0, SLJIT_IMM, 0xc0);
if (!update_str_ptr)
{
@@ -2827,7 +3050,7 @@ if (common->utf)
OP2(SLJIT_OR, TMP2, 0, TMP2, 0, TMP1, 0);
OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, 0);
jump2 = CMP(SLJIT_GREATER, TMP2, 0, SLJIT_IMM, 255);
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(TMP2), common->ctypes);
+ OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP2), common->ctypes);
JUMPHERE(jump2);
}
else
@@ -2842,7 +3065,7 @@ if (common->utf)
OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, 0);
jump = CMP(SLJIT_GREATER, TMP2, 0, SLJIT_IMM, 255);
#endif
-OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(TMP2), common->ctypes);
+OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP2), common->ctypes);
#if !defined COMPILE_PCRE8
JUMPHERE(jump);
#endif
@@ -3034,7 +3257,7 @@ compare = CMP(SLJIT_GREATER, TMP2, 0, SLJIT_IMM, 0x3);
OP2(SLJIT_SHL, TMP2, 0, TMP2, 0, SLJIT_IMM, 6);
OP2(SLJIT_AND, TMP1, 0, TMP1, 0, SLJIT_IMM, 0x3f);
OP2(SLJIT_OR, TMP2, 0, TMP2, 0, TMP1, 0);
-OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(TMP2), common->ctypes);
+OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP2), common->ctypes);
sljit_emit_fast_return(compiler, RETURN_ADDR, 0);
JUMPHERE(compare);
@@ -3043,7 +3266,7 @@ sljit_emit_fast_return(compiler, RETURN_ADDR, 0);
/* We only have types for characters less than 256. */
JUMPHERE(jump);
-OP1(SLJIT_MOV_UB, TMP2, 0, SLJIT_MEM1(TMP2), (sljit_sw)PRIV(utf8_table4) - 0xc0);
+OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM1(TMP2), (sljit_sw)PRIV(utf8_table4) - 0xc0);
OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, 0);
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
sljit_emit_fast_return(compiler, RETURN_ADDR, 0);
@@ -3069,26 +3292,26 @@ SLJIT_ASSERT(UCD_BLOCK_SIZE == 128 && sizeof(ucd_record) == 8);
sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
OP2(SLJIT_LSHR, TMP2, 0, TMP1, 0, SLJIT_IMM, UCD_BLOCK_SHIFT);
-OP1(SLJIT_MOV_UB, TMP2, 0, SLJIT_MEM1(TMP2), (sljit_sw)PRIV(ucd_stage1));
+OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM1(TMP2), (sljit_sw)PRIV(ucd_stage1));
OP2(SLJIT_AND, TMP1, 0, TMP1, 0, SLJIT_IMM, UCD_BLOCK_MASK);
OP2(SLJIT_SHL, TMP2, 0, TMP2, 0, SLJIT_IMM, UCD_BLOCK_SHIFT);
OP2(SLJIT_ADD, TMP1, 0, TMP1, 0, TMP2, 0);
OP1(SLJIT_MOV, TMP2, 0, SLJIT_IMM, (sljit_sw)PRIV(ucd_stage2));
-OP1(SLJIT_MOV_UH, TMP2, 0, SLJIT_MEM2(TMP2, TMP1), 1);
+OP1(SLJIT_MOV_U16, TMP2, 0, SLJIT_MEM2(TMP2, TMP1), 1);
OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, (sljit_sw)PRIV(ucd_records) + SLJIT_OFFSETOF(ucd_record, chartype));
-OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM2(TMP1, TMP2), 3);
+OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM2(TMP1, TMP2), 3);
sljit_emit_fast_return(compiler, RETURN_ADDR, 0);
}
#endif
-static SLJIT_INLINE struct sljit_label *mainloop_entry(compiler_common *common, BOOL hascrorlf, BOOL firstline)
+static SLJIT_INLINE struct sljit_label *mainloop_entry(compiler_common *common, BOOL hascrorlf)
{
DEFINE_COMPILER;
struct sljit_label *mainloop;
struct sljit_label *newlinelabel = NULL;
struct sljit_jump *start;
struct sljit_jump *end = NULL;
-struct sljit_jump *nl = NULL;
+struct sljit_jump *end2 = NULL;
#if defined SUPPORT_UTF && !defined COMPILE_PCRE32
struct sljit_jump *singlechar;
#endif
@@ -3096,14 +3319,13 @@ jump_list *newline = NULL;
BOOL newlinecheck = FALSE;
BOOL readuchar = FALSE;
-if (!(hascrorlf || firstline) && (common->nltype == NLTYPE_ANY ||
- common->nltype == NLTYPE_ANYCRLF || common->newline > 255))
+if (!(hascrorlf || (common->match_end_ptr != 0)) &&
+ (common->nltype == NLTYPE_ANY || common->nltype == NLTYPE_ANYCRLF || common->newline > 255))
newlinecheck = TRUE;
-if (firstline)
+if (common->match_end_ptr != 0)
{
/* Search for the end of the first line. */
- SLJIT_ASSERT(common->first_line_end != 0);
OP1(SLJIT_MOV, TMP3, 0, STR_PTR, 0);
if (common->nltype == NLTYPE_FIXED && common->newline > 255)
@@ -3116,19 +3338,19 @@ if (firstline)
CMPTO(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, (common->newline >> 8) & 0xff, mainloop);
CMPTO(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, common->newline & 0xff, mainloop);
JUMPHERE(end);
- OP2(SLJIT_SUB, SLJIT_MEM1(SLJIT_SP), common->first_line_end, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+ OP2(SLJIT_SUB, SLJIT_MEM1(SLJIT_SP), common->match_end_ptr, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
}
else
{
end = CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0);
mainloop = LABEL();
/* Continual stores does not cause data dependency. */
- OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->first_line_end, STR_PTR, 0);
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->match_end_ptr, STR_PTR, 0);
read_char_range(common, common->nlmin, common->nlmax, TRUE);
check_newlinechar(common, common->nltype, &newline, TRUE);
CMPTO(SLJIT_LESS, STR_PTR, 0, STR_END, 0, mainloop);
JUMPHERE(end);
- OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->first_line_end, STR_PTR, 0);
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->match_end_ptr, STR_PTR, 0);
set_jumps(newline, LABEL());
}
@@ -3149,7 +3371,7 @@ if (newlinecheck)
OP2(SLJIT_SHL, TMP1, 0, TMP1, 0, SLJIT_IMM, UCHAR_SHIFT);
#endif
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP1, 0);
- nl = JUMP(SLJIT_JUMP);
+ end2 = JUMP(SLJIT_JUMP);
}
mainloop = LABEL();
@@ -3172,7 +3394,7 @@ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
if (common->utf)
{
singlechar = CMP(SLJIT_LESS, TMP1, 0, SLJIT_IMM, 0xc0);
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
+ OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP1, 0);
JUMPHERE(singlechar);
}
@@ -3194,51 +3416,52 @@ JUMPHERE(start);
if (newlinecheck)
{
JUMPHERE(end);
- JUMPHERE(nl);
+ JUMPHERE(end2);
}
return mainloop;
}
#define MAX_N_CHARS 16
-#define MAX_N_BYTES 8
+#define MAX_DIFF_CHARS 6
-static SLJIT_INLINE void add_prefix_byte(pcre_uint8 byte, pcre_uint8 *bytes)
+static SLJIT_INLINE void add_prefix_char(pcre_uchar chr, pcre_uchar *chars)
{
-pcre_uint8 len = bytes[0];
-int i;
+pcre_uchar i, len;
+len = chars[0];
if (len == 255)
return;
if (len == 0)
{
- bytes[0] = 1;
- bytes[1] = byte;
+ chars[0] = 1;
+ chars[1] = chr;
return;
}
for (i = len; i > 0; i--)
- if (bytes[i] == byte)
+ if (chars[i] == chr)
return;
-if (len >= MAX_N_BYTES - 1)
+if (len >= MAX_DIFF_CHARS - 1)
{
- bytes[0] = 255;
+ chars[0] = 255;
return;
}
len++;
-bytes[len] = byte;
-bytes[0] = len;
+chars[len] = chr;
+chars[0] = len;
}
-static int scan_prefix(compiler_common *common, pcre_uchar *cc, pcre_uint32 *chars, pcre_uint8 *bytes, int max_chars, pcre_uint32 *rec_count)
+static int scan_prefix(compiler_common *common, pcre_uchar *cc, pcre_uchar *chars, int max_chars, sljit_u32 *rec_count)
{
/* Recursive function, which scans prefix literals. */
-BOOL last, any, caseless;
+BOOL last, any, class, caseless;
int len, repeat, len_save, consumed = 0;
-pcre_uint32 chr, mask;
+sljit_u32 chr; /* Any unicode character. */
+sljit_u8 *bytes, *bytes_end, byte;
pcre_uchar *alternative, *cc_save, *oc;
#if defined SUPPORT_UTF && defined COMPILE_PCRE8
pcre_uchar othercase[8];
@@ -3257,6 +3480,7 @@ while (TRUE)
last = TRUE;
any = FALSE;
+ class = FALSE;
caseless = FALSE;
switch (*cc)
@@ -3320,7 +3544,7 @@ while (TRUE)
#ifdef SUPPORT_UTF
if (common->utf && HAS_EXTRALEN(*cc)) len += GET_EXTRALEN(*cc);
#endif
- max_chars = scan_prefix(common, cc + len, chars, bytes, max_chars, rec_count);
+ max_chars = scan_prefix(common, cc + len, chars, max_chars, rec_count);
if (max_chars == 0)
return consumed;
last = FALSE;
@@ -3343,7 +3567,7 @@ while (TRUE)
alternative = cc + GET(cc, 1);
while (*alternative == OP_ALT)
{
- max_chars = scan_prefix(common, alternative + 1 + LINK_SIZE, chars, bytes, max_chars, rec_count);
+ max_chars = scan_prefix(common, alternative + 1 + LINK_SIZE, chars, max_chars, rec_count);
if (max_chars == 0)
return consumed;
alternative += GET(alternative, 1);
@@ -3356,18 +3580,17 @@ while (TRUE)
case OP_CLASS:
#if defined SUPPORT_UTF && defined COMPILE_PCRE8
- if (common->utf && !is_char7_bitset((const pcre_uint8 *)(cc + 1), FALSE)) return consumed;
+ if (common->utf && !is_char7_bitset((const sljit_u8 *)(cc + 1), FALSE))
+ return consumed;
#endif
- any = TRUE;
- cc += 1 + 32 / sizeof(pcre_uchar);
+ class = TRUE;
break;
case OP_NCLASS:
#if defined SUPPORT_UTF && !defined COMPILE_PCRE32
if (common->utf) return consumed;
#endif
- any = TRUE;
- cc += 1 + 32 / sizeof(pcre_uchar);
+ class = TRUE;
break;
#if defined SUPPORT_UTF || !defined COMPILE_PCRE8
@@ -3382,7 +3605,7 @@ while (TRUE)
case OP_DIGIT:
#if defined SUPPORT_UTF && defined COMPILE_PCRE8
- if (common->utf && !is_char7_bitset((const pcre_uint8 *)common->ctypes - cbit_length + cbit_digit, FALSE))
+ if (common->utf && !is_char7_bitset((const sljit_u8 *)common->ctypes - cbit_length + cbit_digit, FALSE))
return consumed;
#endif
any = TRUE;
@@ -3391,7 +3614,7 @@ while (TRUE)
case OP_WHITESPACE:
#if defined SUPPORT_UTF && defined COMPILE_PCRE8
- if (common->utf && !is_char7_bitset((const pcre_uint8 *)common->ctypes - cbit_length + cbit_space, FALSE))
+ if (common->utf && !is_char7_bitset((const sljit_u8 *)common->ctypes - cbit_length + cbit_space, FALSE))
return consumed;
#endif
any = TRUE;
@@ -3400,7 +3623,7 @@ while (TRUE)
case OP_WORDCHAR:
#if defined SUPPORT_UTF && defined COMPILE_PCRE8
- if (common->utf && !is_char7_bitset((const pcre_uint8 *)common->ctypes - cbit_length + cbit_word, FALSE))
+ if (common->utf && !is_char7_bitset((const sljit_u8 *)common->ctypes - cbit_length + cbit_word, FALSE))
return consumed;
#endif
any = TRUE;
@@ -3423,10 +3646,10 @@ while (TRUE)
cc++;
break;
-#ifdef SUPPORT_UCP
+#ifdef SUPPORT_UTF
case OP_NOTPROP:
case OP_PROP:
-#if defined SUPPORT_UTF && !defined COMPILE_PCRE32
+#ifndef COMPILE_PCRE32
if (common->utf) return consumed;
#endif
any = TRUE;
@@ -3455,30 +3678,114 @@ while (TRUE)
if (any)
{
-#if defined COMPILE_PCRE8
- mask = 0xff;
-#elif defined COMPILE_PCRE16
- mask = 0xffff;
-#elif defined COMPILE_PCRE32
- mask = 0xffffffff;
-#else
- SLJIT_ASSERT_STOP();
-#endif
+ do
+ {
+ chars[0] = 255;
+
+ consumed++;
+ if (--max_chars == 0)
+ return consumed;
+ chars += MAX_DIFF_CHARS;
+ }
+ while (--repeat > 0);
+
+ repeat = 1;
+ continue;
+ }
+
+ if (class)
+ {
+ bytes = (sljit_u8*) (cc + 1);
+ cc += 1 + 32 / sizeof(pcre_uchar);
+
+ switch (*cc)
+ {
+ case OP_CRSTAR:
+ case OP_CRMINSTAR:
+ case OP_CRPOSSTAR:
+ case OP_CRQUERY:
+ case OP_CRMINQUERY:
+ case OP_CRPOSQUERY:
+ max_chars = scan_prefix(common, cc + 1, chars, max_chars, rec_count);
+ if (max_chars == 0)
+ return consumed;
+ break;
+
+ default:
+ case OP_CRPLUS:
+ case OP_CRMINPLUS:
+ case OP_CRPOSPLUS:
+ break;
+
+ case OP_CRRANGE:
+ case OP_CRMINRANGE:
+ case OP_CRPOSRANGE:
+ repeat = GET2(cc, 1);
+ if (repeat <= 0)
+ return consumed;
+ break;
+ }
do
{
- chars[0] = mask;
- chars[1] = mask;
- bytes[0] = 255;
+ if (bytes[31] & 0x80)
+ chars[0] = 255;
+ else if (chars[0] != 255)
+ {
+ bytes_end = bytes + 32;
+ chr = 0;
+ do
+ {
+ byte = *bytes++;
+ SLJIT_ASSERT((chr & 0x7) == 0);
+ if (byte == 0)
+ chr += 8;
+ else
+ {
+ do
+ {
+ if ((byte & 0x1) != 0)
+ add_prefix_char(chr, chars);
+ byte >>= 1;
+ chr++;
+ }
+ while (byte != 0);
+ chr = (chr + 7) & ~7;
+ }
+ }
+ while (chars[0] != 255 && bytes < bytes_end);
+ bytes = bytes_end - 32;
+ }
consumed++;
if (--max_chars == 0)
return consumed;
- chars += 2;
- bytes += MAX_N_BYTES;
+ chars += MAX_DIFF_CHARS;
}
while (--repeat > 0);
+ switch (*cc)
+ {
+ case OP_CRSTAR:
+ case OP_CRMINSTAR:
+ case OP_CRPOSSTAR:
+ return consumed;
+
+ case OP_CRQUERY:
+ case OP_CRMINQUERY:
+ case OP_CRPOSQUERY:
+ cc++;
+ break;
+
+ case OP_CRRANGE:
+ case OP_CRMINRANGE:
+ case OP_CRPOSRANGE:
+ if (GET2(cc, 1) != GET2(cc, 1 + IMM2_SIZE))
+ return consumed;
+ cc += 1 + 2 * IMM2_SIZE;
+ break;
+ }
+
repeat = 1;
continue;
}
@@ -3505,7 +3812,10 @@ while (TRUE)
}
}
else
+ {
caseless = FALSE;
+ othercase[0] = 0; /* Stops compiler warning - PH */
+ }
len_save = len;
cc_save = cc;
@@ -3515,43 +3825,16 @@ while (TRUE)
do
{
chr = *cc;
-#ifdef COMPILE_PCRE32
- if (SLJIT_UNLIKELY(chr == NOTACHAR))
- return consumed;
-#endif
- add_prefix_byte((pcre_uint8)chr, bytes);
+ add_prefix_char(*cc, chars);
- mask = 0;
if (caseless)
- {
- add_prefix_byte((pcre_uint8)*oc, bytes);
- mask = *cc ^ *oc;
- chr |= mask;
- }
-
-#ifdef COMPILE_PCRE32
- if (chars[0] == NOTACHAR && chars[1] == 0)
-#else
- if (chars[0] == NOTACHAR)
-#endif
- {
- chars[0] = chr;
- chars[1] = mask;
- }
- else
- {
- mask |= chars[0] ^ chr;
- chr |= mask;
- chars[0] = chr;
- chars[1] |= mask;
- }
+ add_prefix_char(*oc, chars);
len--;
consumed++;
if (--max_chars == 0)
return consumed;
- chars += 2;
- bytes += MAX_N_BYTES;
+ chars += MAX_DIFF_CHARS;
cc++;
oc++;
}
@@ -3570,163 +3853,576 @@ while (TRUE)
}
}
-static SLJIT_INLINE BOOL fast_forward_first_n_chars(compiler_common *common, BOOL firstline)
+#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
+
+static sljit_s32 character_to_int32(pcre_uchar chr)
+{
+sljit_s32 value = (sljit_s32)chr;
+#if defined COMPILE_PCRE8
+#define SSE2_COMPARE_TYPE_INDEX 0
+return (value << 24) | (value << 16) | (value << 8) | value;
+#elif defined COMPILE_PCRE16
+#define SSE2_COMPARE_TYPE_INDEX 1
+return (value << 16) | value;
+#elif defined COMPILE_PCRE32
+#define SSE2_COMPARE_TYPE_INDEX 2
+return value;
+#else
+#error "Unsupported unit width"
+#endif
+}
+
+static SLJIT_INLINE void fast_forward_first_char2_sse2(compiler_common *common, pcre_uchar char1, pcre_uchar char2)
+{
+DEFINE_COMPILER;
+struct sljit_label *start;
+struct sljit_jump *quit[3];
+struct sljit_jump *nomatch;
+sljit_u8 instruction[8];
+sljit_s32 tmp1_ind = sljit_get_register_index(TMP1);
+sljit_s32 tmp2_ind = sljit_get_register_index(TMP2);
+sljit_s32 str_ptr_ind = sljit_get_register_index(STR_PTR);
+BOOL load_twice = FALSE;
+pcre_uchar bit;
+
+bit = char1 ^ char2;
+if (!is_powerof2(bit))
+ bit = 0;
+
+if ((char1 != char2) && bit == 0)
+ load_twice = TRUE;
+
+quit[0] = CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0);
+
+/* First part (unaligned start) */
+
+OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, character_to_int32(char1 | bit));
+
+SLJIT_ASSERT(tmp1_ind < 8 && tmp2_ind == 1);
+
+/* MOVD xmm, r/m32 */
+instruction[0] = 0x66;
+instruction[1] = 0x0f;
+instruction[2] = 0x6e;
+instruction[3] = 0xc0 | (2 << 3) | tmp1_ind;
+sljit_emit_op_custom(compiler, instruction, 4);
+
+if (char1 != char2)
+ {
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, character_to_int32(bit != 0 ? bit : char2));
+
+ /* MOVD xmm, r/m32 */
+ instruction[3] = 0xc0 | (3 << 3) | tmp1_ind;
+ sljit_emit_op_custom(compiler, instruction, 4);
+ }
+
+/* PSHUFD xmm1, xmm2/m128, imm8 */
+instruction[2] = 0x70;
+instruction[3] = 0xc0 | (2 << 3) | 2;
+instruction[4] = 0;
+sljit_emit_op_custom(compiler, instruction, 5);
+
+if (char1 != char2)
+ {
+ /* PSHUFD xmm1, xmm2/m128, imm8 */
+ instruction[3] = 0xc0 | (3 << 3) | 3;
+ instruction[4] = 0;
+ sljit_emit_op_custom(compiler, instruction, 5);
+ }
+
+OP2(SLJIT_AND, TMP2, 0, STR_PTR, 0, SLJIT_IMM, 0xf);
+OP2(SLJIT_AND, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, ~0xf);
+
+/* MOVDQA xmm1, xmm2/m128 */
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+
+if (str_ptr_ind < 8)
+ {
+ instruction[2] = 0x6f;
+ instruction[3] = (0 << 3) | str_ptr_ind;
+ sljit_emit_op_custom(compiler, instruction, 4);
+
+ if (load_twice)
+ {
+ instruction[3] = (1 << 3) | str_ptr_ind;
+ sljit_emit_op_custom(compiler, instruction, 4);
+ }
+ }
+else
+ {
+ instruction[1] = 0x41;
+ instruction[2] = 0x0f;
+ instruction[3] = 0x6f;
+ instruction[4] = (0 << 3) | (str_ptr_ind & 0x7);
+ sljit_emit_op_custom(compiler, instruction, 5);
+
+ if (load_twice)
+ {
+ instruction[4] = (1 << 3) | str_ptr_ind;
+ sljit_emit_op_custom(compiler, instruction, 5);
+ }
+ instruction[1] = 0x0f;
+ }
+
+#else
+
+instruction[2] = 0x6f;
+instruction[3] = (0 << 3) | str_ptr_ind;
+sljit_emit_op_custom(compiler, instruction, 4);
+
+if (load_twice)
+ {
+ instruction[3] = (1 << 3) | str_ptr_ind;
+ sljit_emit_op_custom(compiler, instruction, 4);
+ }
+
+#endif
+
+if (bit != 0)
+ {
+ /* POR xmm1, xmm2/m128 */
+ instruction[2] = 0xeb;
+ instruction[3] = 0xc0 | (0 << 3) | 3;
+ sljit_emit_op_custom(compiler, instruction, 4);
+ }
+
+/* PCMPEQB/W/D xmm1, xmm2/m128 */
+instruction[2] = 0x74 + SSE2_COMPARE_TYPE_INDEX;
+instruction[3] = 0xc0 | (0 << 3) | 2;
+sljit_emit_op_custom(compiler, instruction, 4);
+
+if (load_twice)
+ {
+ instruction[3] = 0xc0 | (1 << 3) | 3;
+ sljit_emit_op_custom(compiler, instruction, 4);
+ }
+
+/* PMOVMSKB reg, xmm */
+instruction[2] = 0xd7;
+instruction[3] = 0xc0 | (tmp1_ind << 3) | 0;
+sljit_emit_op_custom(compiler, instruction, 4);
+
+if (load_twice)
+ {
+ OP1(SLJIT_MOV, TMP3, 0, TMP2, 0);
+ instruction[3] = 0xc0 | (tmp2_ind << 3) | 1;
+ sljit_emit_op_custom(compiler, instruction, 4);
+
+ OP2(SLJIT_OR, TMP1, 0, TMP1, 0, TMP2, 0);
+ OP1(SLJIT_MOV, TMP2, 0, TMP3, 0);
+ }
+
+OP2(SLJIT_ASHR, TMP1, 0, TMP1, 0, TMP2, 0);
+
+/* BSF r32, r/m32 */
+instruction[0] = 0x0f;
+instruction[1] = 0xbc;
+instruction[2] = 0xc0 | (tmp1_ind << 3) | tmp1_ind;
+sljit_emit_op_custom(compiler, instruction, 3);
+
+nomatch = JUMP(SLJIT_ZERO);
+
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP1, 0);
+quit[1] = JUMP(SLJIT_JUMP);
+
+JUMPHERE(nomatch);
+
+start = LABEL();
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, 16);
+quit[2] = CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0);
+
+/* Second part (aligned) */
+
+instruction[0] = 0x66;
+instruction[1] = 0x0f;
+
+/* MOVDQA xmm1, xmm2/m128 */
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+
+if (str_ptr_ind < 8)
+ {
+ instruction[2] = 0x6f;
+ instruction[3] = (0 << 3) | str_ptr_ind;
+ sljit_emit_op_custom(compiler, instruction, 4);
+
+ if (load_twice)
+ {
+ instruction[3] = (1 << 3) | str_ptr_ind;
+ sljit_emit_op_custom(compiler, instruction, 4);
+ }
+ }
+else
+ {
+ instruction[1] = 0x41;
+ instruction[2] = 0x0f;
+ instruction[3] = 0x6f;
+ instruction[4] = (0 << 3) | (str_ptr_ind & 0x7);
+ sljit_emit_op_custom(compiler, instruction, 5);
+
+ if (load_twice)
+ {
+ instruction[4] = (1 << 3) | str_ptr_ind;
+ sljit_emit_op_custom(compiler, instruction, 5);
+ }
+ instruction[1] = 0x0f;
+ }
+
+#else
+
+instruction[2] = 0x6f;
+instruction[3] = (0 << 3) | str_ptr_ind;
+sljit_emit_op_custom(compiler, instruction, 4);
+
+if (load_twice)
+ {
+ instruction[3] = (1 << 3) | str_ptr_ind;
+ sljit_emit_op_custom(compiler, instruction, 4);
+ }
+
+#endif
+
+if (bit != 0)
+ {
+ /* POR xmm1, xmm2/m128 */
+ instruction[2] = 0xeb;
+ instruction[3] = 0xc0 | (0 << 3) | 3;
+ sljit_emit_op_custom(compiler, instruction, 4);
+ }
+
+/* PCMPEQB/W/D xmm1, xmm2/m128 */
+instruction[2] = 0x74 + SSE2_COMPARE_TYPE_INDEX;
+instruction[3] = 0xc0 | (0 << 3) | 2;
+sljit_emit_op_custom(compiler, instruction, 4);
+
+if (load_twice)
+ {
+ instruction[3] = 0xc0 | (1 << 3) | 3;
+ sljit_emit_op_custom(compiler, instruction, 4);
+ }
+
+/* PMOVMSKB reg, xmm */
+instruction[2] = 0xd7;
+instruction[3] = 0xc0 | (tmp1_ind << 3) | 0;
+sljit_emit_op_custom(compiler, instruction, 4);
+
+if (load_twice)
+ {
+ instruction[3] = 0xc0 | (tmp2_ind << 3) | 1;
+ sljit_emit_op_custom(compiler, instruction, 4);
+
+ OP2(SLJIT_OR, TMP1, 0, TMP1, 0, TMP2, 0);
+ }
+
+/* BSF r32, r/m32 */
+instruction[0] = 0x0f;
+instruction[1] = 0xbc;
+instruction[2] = 0xc0 | (tmp1_ind << 3) | tmp1_ind;
+sljit_emit_op_custom(compiler, instruction, 3);
+
+JUMPTO(SLJIT_ZERO, start);
+
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP1, 0);
+
+start = LABEL();
+SET_LABEL(quit[0], start);
+SET_LABEL(quit[1], start);
+SET_LABEL(quit[2], start);
+}
+
+#undef SSE2_COMPARE_TYPE_INDEX
+
+#endif
+
+static void fast_forward_first_char2(compiler_common *common, pcre_uchar char1, pcre_uchar char2, sljit_s32 offset)
{
DEFINE_COMPILER;
struct sljit_label *start;
struct sljit_jump *quit;
-pcre_uint32 chars[MAX_N_CHARS * 2];
-pcre_uint8 bytes[MAX_N_CHARS * MAX_N_BYTES];
-pcre_uint8 ones[MAX_N_CHARS];
-int offsets[3];
-pcre_uint32 mask;
-pcre_uint8 *byte_set, *byte_set_end;
-int i, max, from;
-int range_right = -1, range_len = 3 - 1;
-sljit_ub *update_table = NULL;
-BOOL in_range;
-pcre_uint32 rec_count;
+struct sljit_jump *found;
+pcre_uchar mask;
+#if defined SUPPORT_UTF && !defined COMPILE_PCRE32
+struct sljit_label *utf_start = NULL;
+struct sljit_jump *utf_quit = NULL;
+#endif
+BOOL has_match_end = (common->match_end_ptr != 0);
-for (i = 0; i < MAX_N_CHARS; i++)
+if (offset > 0)
+ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(offset));
+
+if (has_match_end)
{
- chars[i << 1] = NOTACHAR;
- chars[(i << 1) + 1] = 0;
- bytes[i * MAX_N_BYTES] = 0;
+ OP1(SLJIT_MOV, TMP3, 0, STR_END, 0);
+
+ OP2(SLJIT_ADD, STR_END, 0, SLJIT_MEM1(SLJIT_SP), common->match_end_ptr, SLJIT_IMM, IN_UCHARS(offset + 1));
+#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
+ if (sljit_x86_is_cmov_available())
+ {
+ OP2(SLJIT_SUB | SLJIT_SET_U, SLJIT_UNUSED, 0, STR_END, 0, TMP3, 0);
+ sljit_x86_emit_cmov(compiler, SLJIT_GREATER, STR_END, TMP3, 0);
+ }
+#endif
+ {
+ quit = CMP(SLJIT_LESS_EQUAL, STR_END, 0, TMP3, 0);
+ OP1(SLJIT_MOV, STR_END, 0, TMP3, 0);
+ JUMPHERE(quit);
+ }
}
-rec_count = 10000;
-max = scan_prefix(common, common->start, chars, bytes, MAX_N_CHARS, &rec_count);
+#if defined SUPPORT_UTF && !defined COMPILE_PCRE32
+if (common->utf && offset > 0)
+ utf_start = LABEL();
+#endif
-if (max <= 1)
- return FALSE;
+#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
-for (i = 0; i < max; i++)
+/* SSE2 accelerated first character search. */
+
+if (sljit_x86_is_sse2_available())
{
- mask = chars[(i << 1) + 1];
- ones[i] = ones_in_half_byte[mask & 0xf];
- mask >>= 4;
- while (mask != 0)
+ fast_forward_first_char2_sse2(common, char1, char2);
+
+ SLJIT_ASSERT(common->mode == JIT_COMPILE || offset == 0);
+ if (common->mode == JIT_COMPILE)
{
- ones[i] += ones_in_half_byte[mask & 0xf];
- mask >>= 4;
+ /* In complete mode, we don't need to run a match when STR_PTR == STR_END. */
+ SLJIT_ASSERT(common->forced_quit_label == NULL);
+ OP1(SLJIT_MOV, SLJIT_RETURN_REG, 0, SLJIT_IMM, PCRE_ERROR_NOMATCH);
+ add_jump(compiler, &common->forced_quit, CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0));
+
+#if defined SUPPORT_UTF && !defined COMPILE_PCRE32
+ if (common->utf && offset > 0)
+ {
+ SLJIT_ASSERT(common->mode == JIT_COMPILE);
+
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(-offset));
+ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+#if defined COMPILE_PCRE8
+ OP2(SLJIT_AND, TMP1, 0, TMP1, 0, SLJIT_IMM, 0xc0);
+ CMPTO(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, 0x80, utf_start);
+#elif defined COMPILE_PCRE16
+ OP2(SLJIT_AND, TMP1, 0, TMP1, 0, SLJIT_IMM, 0xfc00);
+ CMPTO(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, 0xdc00, utf_start);
+#else
+#error "Unknown code width"
+#endif
+ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+ }
+#endif
+
+ if (offset > 0)
+ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(offset));
}
+ else if (sljit_x86_is_cmov_available())
+ {
+ OP2(SLJIT_SUB | SLJIT_SET_U, SLJIT_UNUSED, 0, STR_PTR, 0, STR_END, 0);
+ sljit_x86_emit_cmov(compiler, SLJIT_GREATER_EQUAL, STR_PTR, has_match_end ? SLJIT_MEM1(SLJIT_SP) : STR_END, has_match_end ? common->match_end_ptr : 0);
+ }
+ else
+ {
+ quit = CMP(SLJIT_LESS, STR_PTR, 0, STR_END, 0);
+ OP1(SLJIT_MOV, STR_PTR, 0, has_match_end ? SLJIT_MEM1(SLJIT_SP) : STR_END, has_match_end ? common->match_end_ptr : 0);
+ JUMPHERE(quit);
+ }
+
+ if (has_match_end)
+ OP1(SLJIT_MOV, STR_END, 0, TMP3, 0);
+ return;
}
+#endif
+
+quit = CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0);
+
+start = LABEL();
+OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), 0);
+
+if (char1 == char2)
+ found = CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, char1);
+else
+ {
+ mask = char1 ^ char2;
+ if (is_powerof2(mask))
+ {
+ OP2(SLJIT_OR, TMP1, 0, TMP1, 0, SLJIT_IMM, mask);
+ found = CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, char1 | mask);
+ }
+ else
+ {
+ OP2(SLJIT_SUB | SLJIT_SET_E, SLJIT_UNUSED, 0, TMP1, 0, SLJIT_IMM, char1);
+ OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_UNUSED, 0, SLJIT_EQUAL);
+ OP2(SLJIT_SUB | SLJIT_SET_E, SLJIT_UNUSED, 0, TMP1, 0, SLJIT_IMM, char2);
+ OP_FLAGS(SLJIT_OR | SLJIT_SET_E, TMP2, 0, TMP2, 0, SLJIT_EQUAL);
+ found = JUMP(SLJIT_NOT_ZERO);
+ }
+ }
+
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+CMPTO(SLJIT_LESS, STR_PTR, 0, STR_END, 0, start);
+
+#if defined SUPPORT_UTF && !defined COMPILE_PCRE32
+if (common->utf && offset > 0)
+ utf_quit = JUMP(SLJIT_JUMP);
+#endif
+
+JUMPHERE(found);
+
+#if defined SUPPORT_UTF && !defined COMPILE_PCRE32
+if (common->utf && offset > 0)
+ {
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(-offset));
+ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+#if defined COMPILE_PCRE8
+ OP2(SLJIT_AND, TMP1, 0, TMP1, 0, SLJIT_IMM, 0xc0);
+ CMPTO(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, 0x80, utf_start);
+#elif defined COMPILE_PCRE16
+ OP2(SLJIT_AND, TMP1, 0, TMP1, 0, SLJIT_IMM, 0xfc00);
+ CMPTO(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, 0xdc00, utf_start);
+#else
+#error "Unknown code width"
+#endif
+ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+ JUMPHERE(utf_quit);
+ }
+#endif
+
+JUMPHERE(quit);
+
+if (has_match_end)
+ {
+ quit = CMP(SLJIT_LESS, STR_PTR, 0, STR_END, 0);
+ OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP), common->match_end_ptr);
+ if (offset > 0)
+ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(offset));
+ JUMPHERE(quit);
+ OP1(SLJIT_MOV, STR_END, 0, TMP3, 0);
+ }
+
+if (offset > 0)
+ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(offset));
+}
+
+static SLJIT_INLINE BOOL fast_forward_first_n_chars(compiler_common *common)
+{
+DEFINE_COMPILER;
+struct sljit_label *start;
+struct sljit_jump *quit;
+struct sljit_jump *match;
+/* bytes[0] represent the number of characters between 0
+and MAX_N_BYTES - 1, 255 represents any character. */
+pcre_uchar chars[MAX_N_CHARS * MAX_DIFF_CHARS];
+sljit_s32 offset;
+pcre_uchar mask;
+pcre_uchar *char_set, *char_set_end;
+int i, max, from;
+int range_right = -1, range_len;
+sljit_u8 *update_table = NULL;
+BOOL in_range;
+sljit_u32 rec_count;
+
+for (i = 0; i < MAX_N_CHARS; i++)
+ chars[i * MAX_DIFF_CHARS] = 0;
+
+rec_count = 10000;
+max = scan_prefix(common, common->start, chars, MAX_N_CHARS, &rec_count);
+
+if (max < 1)
+ return FALSE;
+
in_range = FALSE;
-from = 0; /* Prevent compiler "uninitialized" warning */
+/* Prevent compiler "uninitialized" warning */
+from = 0;
+range_len = 4 /* minimum length */ - 1;
for (i = 0; i <= max; i++)
{
- if (in_range && (i - from) > range_len && (bytes[(i - 1) * MAX_N_BYTES] <= 4))
+ if (in_range && (i - from) > range_len && (chars[(i - 1) * MAX_DIFF_CHARS] < 255))
{
range_len = i - from;
range_right = i - 1;
}
- if (i < max && bytes[i * MAX_N_BYTES] < 255)
+ if (i < max && chars[i * MAX_DIFF_CHARS] < 255)
{
+ SLJIT_ASSERT(chars[i * MAX_DIFF_CHARS] > 0);
if (!in_range)
{
in_range = TRUE;
from = i;
}
}
- else if (in_range)
+ else
in_range = FALSE;
}
if (range_right >= 0)
{
- update_table = (sljit_ub *)allocate_read_only_data(common, 256);
+ update_table = (sljit_u8 *)allocate_read_only_data(common, 256);
if (update_table == NULL)
return TRUE;
memset(update_table, IN_UCHARS(range_len), 256);
for (i = 0; i < range_len; i++)
{
- byte_set = bytes + ((range_right - i) * MAX_N_BYTES);
- SLJIT_ASSERT(byte_set[0] > 0 && byte_set[0] < 255);
- byte_set_end = byte_set + byte_set[0];
- byte_set++;
- while (byte_set <= byte_set_end)
+ char_set = chars + ((range_right - i) * MAX_DIFF_CHARS);
+ SLJIT_ASSERT(char_set[0] > 0 && char_set[0] < 255);
+ char_set_end = char_set + char_set[0];
+ char_set++;
+ while (char_set <= char_set_end)
{
- if (update_table[*byte_set] > IN_UCHARS(i))
- update_table[*byte_set] = IN_UCHARS(i);
- byte_set++;
+ if (update_table[(*char_set) & 0xff] > IN_UCHARS(i))
+ update_table[(*char_set) & 0xff] = IN_UCHARS(i);
+ char_set++;
}
}
}
-offsets[0] = -1;
+offset = -1;
/* Scan forward. */
for (i = 0; i < max; i++)
- if (ones[i] <= 2) {
- offsets[0] = i;
- break;
- }
-
-if (offsets[0] < 0 && range_right < 0)
- return FALSE;
-
-if (offsets[0] >= 0)
{
- /* Scan backward. */
- offsets[1] = -1;
- for (i = max - 1; i > offsets[0]; i--)
- if (ones[i] <= 2 && i != range_right)
- {
- offsets[1] = i;
- break;
- }
-
- /* This case is handled better by fast_forward_first_char. */
- if (offsets[1] == -1 && offsets[0] == 0 && range_right < 0)
- return FALSE;
-
- offsets[2] = -1;
- /* We only search for a middle character if there is no range check. */
- if (offsets[1] >= 0 && range_right == -1)
+ if (offset == -1)
{
- /* Scan from middle. */
- for (i = (offsets[0] + offsets[1]) / 2 + 1; i < offsets[1]; i++)
- if (ones[i] <= 2)
+ if (chars[i * MAX_DIFF_CHARS] <= 2)
+ offset = i;
+ }
+ else if (chars[offset * MAX_DIFF_CHARS] == 2 && chars[i * MAX_DIFF_CHARS] <= 2)
+ {
+ if (chars[i * MAX_DIFF_CHARS] == 1)
+ offset = i;
+ else
+ {
+ mask = chars[offset * MAX_DIFF_CHARS + 1] ^ chars[offset * MAX_DIFF_CHARS + 2];
+ if (!is_powerof2(mask))
{
- offsets[2] = i;
- break;
+ mask = chars[i * MAX_DIFF_CHARS + 1] ^ chars[i * MAX_DIFF_CHARS + 2];
+ if (is_powerof2(mask))
+ offset = i;
}
-
- if (offsets[2] == -1)
- {
- for (i = (offsets[0] + offsets[1]) / 2; i > offsets[0]; i--)
- if (ones[i] <= 2)
- {
- offsets[2] = i;
- break;
- }
}
}
+ }
- SLJIT_ASSERT(offsets[1] == -1 || (offsets[0] < offsets[1]));
- SLJIT_ASSERT(offsets[2] == -1 || (offsets[0] < offsets[2] && offsets[1] > offsets[2]));
-
- chars[0] = chars[offsets[0] << 1];
- chars[1] = chars[(offsets[0] << 1) + 1];
- if (offsets[2] >= 0)
- {
- chars[2] = chars[offsets[2] << 1];
- chars[3] = chars[(offsets[2] << 1) + 1];
- }
- if (offsets[1] >= 0)
- {
- chars[4] = chars[offsets[1] << 1];
- chars[5] = chars[(offsets[1] << 1) + 1];
- }
+if (range_right < 0)
+ {
+ if (offset < 0)
+ return FALSE;
+ SLJIT_ASSERT(chars[offset * MAX_DIFF_CHARS] >= 1 && chars[offset * MAX_DIFF_CHARS] <= 2);
+ /* Works regardless the value is 1 or 2. */
+ mask = chars[offset * MAX_DIFF_CHARS + chars[offset * MAX_DIFF_CHARS]];
+ fast_forward_first_char2(common, chars[offset * MAX_DIFF_CHARS + 1], mask, offset);
+ return TRUE;
}
+if (range_right == offset)
+ offset = -1;
+
+SLJIT_ASSERT(offset == -1 || (chars[offset * MAX_DIFF_CHARS] >= 1 && chars[offset * MAX_DIFF_CHARS] <= 2));
+
max -= 1;
-if (firstline)
+SLJIT_ASSERT(max > 0);
+if (common->match_end_ptr != 0)
{
- SLJIT_ASSERT(common->first_line_end != 0);
- OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(SLJIT_SP), common->first_line_end);
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(SLJIT_SP), common->match_end_ptr);
OP1(SLJIT_MOV, TMP3, 0, STR_END, 0);
OP2(SLJIT_SUB, STR_END, 0, STR_END, 0, SLJIT_IMM, IN_UCHARS(max));
quit = CMP(SLJIT_LESS_EQUAL, STR_END, 0, TMP1, 0);
@@ -3736,68 +4432,86 @@ if (firstline)
else
OP2(SLJIT_SUB, STR_END, 0, STR_END, 0, SLJIT_IMM, IN_UCHARS(max));
+SLJIT_ASSERT(range_right >= 0);
+
#if !(defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
-if (range_right >= 0)
- OP1(SLJIT_MOV, RETURN_ADDR, 0, SLJIT_IMM, (sljit_sw)update_table);
+OP1(SLJIT_MOV, RETURN_ADDR, 0, SLJIT_IMM, (sljit_sw)update_table);
#endif
start = LABEL();
quit = CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0);
-SLJIT_ASSERT(range_right >= 0 || offsets[0] >= 0);
-
-if (range_right >= 0)
- {
#if defined COMPILE_PCRE8 || (defined SLJIT_LITTLE_ENDIAN && SLJIT_LITTLE_ENDIAN)
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(range_right));
+OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(range_right));
#else
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(range_right + 1) - 1);
+OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(range_right + 1) - 1);
#endif
#if !(defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM2(RETURN_ADDR, TMP1), 0);
+OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM2(RETURN_ADDR, TMP1), 0);
#else
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)update_table);
+OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)update_table);
#endif
- OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP1, 0);
- CMPTO(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, 0, start);
- }
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP1, 0);
+CMPTO(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, 0, start);
-if (offsets[0] >= 0)
+if (offset >= 0)
{
- OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(offsets[0]));
- if (offsets[1] >= 0)
- OP1(MOV_UCHAR, TMP2, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(offsets[1]));
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(offset));
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
- if (chars[1] != 0)
- OP2(SLJIT_OR, TMP1, 0, TMP1, 0, SLJIT_IMM, chars[1]);
- CMPTO(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, chars[0], start);
- if (offsets[2] >= 0)
- OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(offsets[2] - 1));
-
- if (offsets[1] >= 0)
+ if (chars[offset * MAX_DIFF_CHARS] == 1)
+ CMPTO(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, chars[offset * MAX_DIFF_CHARS + 1], start);
+ else
{
- if (chars[5] != 0)
- OP2(SLJIT_OR, TMP2, 0, TMP2, 0, SLJIT_IMM, chars[5]);
- CMPTO(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, chars[4], start);
+ mask = chars[offset * MAX_DIFF_CHARS + 1] ^ chars[offset * MAX_DIFF_CHARS + 2];
+ if (is_powerof2(mask))
+ {
+ OP2(SLJIT_OR, TMP1, 0, TMP1, 0, SLJIT_IMM, mask);
+ CMPTO(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, chars[offset * MAX_DIFF_CHARS + 1] | mask, start);
+ }
+ else
+ {
+ match = CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, chars[offset * MAX_DIFF_CHARS + 1]);
+ CMPTO(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, chars[offset * MAX_DIFF_CHARS + 2], start);
+ JUMPHERE(match);
+ }
}
+ }
- if (offsets[2] >= 0)
+#if defined SUPPORT_UTF && !defined COMPILE_PCRE32
+if (common->utf && offset != 0)
+ {
+ if (offset < 0)
{
- if (chars[3] != 0)
- OP2(SLJIT_OR, TMP1, 0, TMP1, 0, SLJIT_IMM, chars[3]);
- CMPTO(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, chars[2], start);
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), 0);
+ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
}
- OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+ else
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(-1));
+#if defined COMPILE_PCRE8
+ OP2(SLJIT_AND, TMP1, 0, TMP1, 0, SLJIT_IMM, 0xc0);
+ CMPTO(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, 0x80, start);
+#elif defined COMPILE_PCRE16
+ OP2(SLJIT_AND, TMP1, 0, TMP1, 0, SLJIT_IMM, 0xfc00);
+ CMPTO(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, 0xdc00, start);
+#else
+#error "Unknown code width"
+#endif
+ if (offset < 0)
+ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
}
+#endif
+
+if (offset >= 0)
+ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
JUMPHERE(quit);
-if (firstline)
+if (common->match_end_ptr != 0)
{
if (range_right >= 0)
- OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(SLJIT_SP), common->first_line_end);
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(SLJIT_SP), common->match_end_ptr);
OP1(SLJIT_MOV, STR_END, 0, TMP3, 0);
if (range_right >= 0)
{
@@ -3812,66 +4526,26 @@ return TRUE;
}
#undef MAX_N_CHARS
-#undef MAX_N_BYTES
+#undef MAX_DIFF_CHARS
-static SLJIT_INLINE void fast_forward_first_char(compiler_common *common, pcre_uchar first_char, BOOL caseless, BOOL firstline)
+static SLJIT_INLINE void fast_forward_first_char(compiler_common *common, pcre_uchar first_char, BOOL caseless)
{
-DEFINE_COMPILER;
-struct sljit_label *start;
-struct sljit_jump *quit;
-struct sljit_jump *found;
-pcre_uchar oc, bit;
-
-if (firstline)
- {
- SLJIT_ASSERT(common->first_line_end != 0);
- OP1(SLJIT_MOV, TMP3, 0, STR_END, 0);
- OP1(SLJIT_MOV, STR_END, 0, SLJIT_MEM1(SLJIT_SP), common->first_line_end);
- }
-
-start = LABEL();
-quit = CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0);
-OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), 0);
+pcre_uchar oc;
oc = first_char;
if (caseless)
{
oc = TABLE_GET(first_char, common->fcc, first_char);
-#if defined SUPPORT_UCP && !(defined COMPILE_PCRE8)
+#if defined SUPPORT_UCP && !defined COMPILE_PCRE8
if (first_char > 127 && common->utf)
oc = UCD_OTHERCASE(first_char);
#endif
}
-if (first_char == oc)
- found = CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, first_char);
-else
- {
- bit = first_char ^ oc;
- if (is_powerof2(bit))
- {
- OP2(SLJIT_OR, TMP2, 0, TMP1, 0, SLJIT_IMM, bit);
- found = CMP(SLJIT_EQUAL, TMP2, 0, SLJIT_IMM, first_char | bit);
- }
- else
- {
- OP2(SLJIT_SUB | SLJIT_SET_E, SLJIT_UNUSED, 0, TMP1, 0, SLJIT_IMM, first_char);
- OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_UNUSED, 0, SLJIT_EQUAL);
- OP2(SLJIT_SUB | SLJIT_SET_E, SLJIT_UNUSED, 0, TMP1, 0, SLJIT_IMM, oc);
- OP_FLAGS(SLJIT_OR | SLJIT_SET_E, TMP2, 0, TMP2, 0, SLJIT_EQUAL);
- found = JUMP(SLJIT_NOT_ZERO);
- }
- }
-OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
-JUMPTO(SLJIT_JUMP, start);
-JUMPHERE(found);
-JUMPHERE(quit);
-
-if (firstline)
- OP1(SLJIT_MOV, STR_END, 0, TMP3, 0);
+fast_forward_first_char2(common, first_char, oc, 0);
}
-static SLJIT_INLINE void fast_forward_newline(compiler_common *common, BOOL firstline)
+static SLJIT_INLINE void fast_forward_newline(compiler_common *common)
{
DEFINE_COMPILER;
struct sljit_label *loop;
@@ -3882,11 +4556,10 @@ struct sljit_jump *foundcr = NULL;
struct sljit_jump *notfoundnl;
jump_list *newline = NULL;
-if (firstline)
+if (common->match_end_ptr != 0)
{
- SLJIT_ASSERT(common->first_line_end != 0);
OP1(SLJIT_MOV, TMP3, 0, STR_END, 0);
- OP1(SLJIT_MOV, STR_END, 0, SLJIT_MEM1(SLJIT_SP), common->first_line_end);
+ OP1(SLJIT_MOV, STR_END, 0, SLJIT_MEM1(SLJIT_SP), common->match_end_ptr);
}
if (common->nltype == NLTYPE_FIXED && common->newline > 255)
@@ -3917,7 +4590,7 @@ if (common->nltype == NLTYPE_FIXED && common->newline > 255)
JUMPHERE(firstchar);
JUMPHERE(lastchar);
- if (firstline)
+ if (common->match_end_ptr != 0)
OP1(SLJIT_MOV, STR_END, 0, TMP3, 0);
return;
}
@@ -3955,13 +4628,13 @@ if (common->nltype == NLTYPE_ANY || common->nltype == NLTYPE_ANYCRLF)
JUMPHERE(lastchar);
JUMPHERE(firstchar);
-if (firstline)
+if (common->match_end_ptr != 0)
OP1(SLJIT_MOV, STR_END, 0, TMP3, 0);
}
-static BOOL check_class_ranges(compiler_common *common, const pcre_uint8 *bits, BOOL nclass, BOOL invert, jump_list **backtracks);
+static BOOL check_class_ranges(compiler_common *common, const sljit_u8 *bits, BOOL nclass, BOOL invert, jump_list **backtracks);
-static SLJIT_INLINE void fast_forward_start_bits(compiler_common *common, pcre_uint8 *start_bits, BOOL firstline)
+static SLJIT_INLINE void fast_forward_start_bits(compiler_common *common, const sljit_u8 *start_bits)
{
DEFINE_COMPILER;
struct sljit_label *start;
@@ -3972,11 +4645,10 @@ jump_list *matches = NULL;
struct sljit_jump *jump;
#endif
-if (firstline)
+if (common->match_end_ptr != 0)
{
- SLJIT_ASSERT(common->first_line_end != 0);
OP1(SLJIT_MOV, RETURN_ADDR, 0, STR_END, 0);
- OP1(SLJIT_MOV, STR_END, 0, SLJIT_MEM1(SLJIT_SP), common->first_line_end);
+ OP1(SLJIT_MOV, STR_END, 0, SLJIT_MEM1(SLJIT_SP), common->match_end_ptr);
}
start = LABEL();
@@ -3996,7 +4668,7 @@ if (!check_class_ranges(common, start_bits, (start_bits[31] & 0x80) != 0, TRUE,
#endif
OP2(SLJIT_AND, TMP2, 0, TMP1, 0, SLJIT_IMM, 0x7);
OP2(SLJIT_LSHR, TMP1, 0, TMP1, 0, SLJIT_IMM, 3);
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)start_bits);
+ OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)start_bits);
OP2(SLJIT_SHL, TMP2, 0, SLJIT_IMM, 1, TMP2, 0);
OP2(SLJIT_AND | SLJIT_SET_E, SLJIT_UNUSED, 0, TMP1, 0, TMP2, 0);
found = JUMP(SLJIT_NOT_ZERO);
@@ -4012,7 +4684,7 @@ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
if (common->utf)
{
CMPTO(SLJIT_LESS, TMP1, 0, SLJIT_IMM, 0xc0, start);
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
+ OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP1, 0);
}
#elif defined COMPILE_PCRE16
@@ -4034,7 +4706,7 @@ if (matches != NULL)
set_jumps(matches, LABEL());
JUMPHERE(quit);
-if (firstline)
+if (common->match_end_ptr != 0)
OP1(SLJIT_MOV, STR_END, 0, RETURN_ADDR, 0);
}
@@ -4047,7 +4719,7 @@ struct sljit_jump *alreadyfound;
struct sljit_jump *found;
struct sljit_jump *foundoc = NULL;
struct sljit_jump *notfound;
-pcre_uint32 oc, bit;
+sljit_u32 oc, bit;
SLJIT_ASSERT(common->req_char_ptr != 0);
OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(SLJIT_SP), common->req_char_ptr);
@@ -4184,7 +4856,7 @@ else
if (common->utf)
jump = CMP(SLJIT_GREATER, TMP1, 0, SLJIT_IMM, 255);
#endif /* COMPILE_PCRE8 */
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(TMP1), common->ctypes);
+ OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP1), common->ctypes);
OP2(SLJIT_LSHR, TMP1, 0, TMP1, 0, SLJIT_IMM, 4 /* ctype_word */);
OP2(SLJIT_AND, TMP1, 0, TMP1, 0, SLJIT_IMM, 1);
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), LOCALS1, TMP1, 0);
@@ -4229,7 +4901,7 @@ else
if (common->utf)
jump = CMP(SLJIT_GREATER, TMP1, 0, SLJIT_IMM, 255);
#endif
- OP1(SLJIT_MOV_UB, TMP2, 0, SLJIT_MEM1(TMP1), common->ctypes);
+ OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM1(TMP1), common->ctypes);
OP2(SLJIT_LSHR, TMP2, 0, TMP2, 0, SLJIT_IMM, 4 /* ctype_word */);
OP2(SLJIT_AND, TMP2, 0, TMP2, 0, SLJIT_IMM, 1);
#ifndef COMPILE_PCRE8
@@ -4245,11 +4917,12 @@ OP2(SLJIT_XOR | SLJIT_SET_E, SLJIT_UNUSED, 0, TMP2, 0, SLJIT_MEM1(SLJIT_SP), LOC
sljit_emit_fast_return(compiler, SLJIT_MEM1(SLJIT_SP), LOCALS0);
}
-static BOOL check_class_ranges(compiler_common *common, const pcre_uint8 *bits, BOOL nclass, BOOL invert, jump_list **backtracks)
+static BOOL check_class_ranges(compiler_common *common, const sljit_u8 *bits, BOOL nclass, BOOL invert, jump_list **backtracks)
{
+/* May destroy TMP1. */
DEFINE_COMPILER;
int ranges[MAX_RANGE_SIZE];
-pcre_uint8 bit, cbit, all;
+sljit_u8 bit, cbit, all;
int i, byte, length = 0;
bit = bits[0] & 0x1;
@@ -4544,12 +5217,12 @@ OP1(MOVU_UCHAR, CHAR2, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(1));
#ifndef COMPILE_PCRE8
jump = CMP(SLJIT_GREATER, CHAR1, 0, SLJIT_IMM, 255);
#endif
-OP1(SLJIT_MOV_UB, CHAR1, 0, SLJIT_MEM2(LCC_TABLE, CHAR1), 0);
+OP1(SLJIT_MOV_U8, CHAR1, 0, SLJIT_MEM2(LCC_TABLE, CHAR1), 0);
#ifndef COMPILE_PCRE8
JUMPHERE(jump);
jump = CMP(SLJIT_GREATER, CHAR2, 0, SLJIT_IMM, 255);
#endif
-OP1(SLJIT_MOV_UB, CHAR2, 0, SLJIT_MEM2(LCC_TABLE, CHAR2), 0);
+OP1(SLJIT_MOV_U8, CHAR2, 0, SLJIT_MEM2(LCC_TABLE, CHAR2), 0);
#ifndef COMPILE_PCRE8
JUMPHERE(jump);
#endif
@@ -4574,11 +5247,11 @@ sljit_emit_fast_return(compiler, RETURN_ADDR, 0);
static const pcre_uchar * SLJIT_CALL do_utf_caselesscmp(pcre_uchar *src1, jit_arguments *args, pcre_uchar *end1)
{
/* This function would be ineffective to do in JIT level. */
-pcre_uint32 c1, c2;
+sljit_u32 c1, c2;
const pcre_uchar *src2 = args->uchar_ptr;
const pcre_uchar *end2 = args->end;
const ucd_record *ur;
-const pcre_uint32 *pp;
+const sljit_u32 *pp;
while (src1 < end1)
{
@@ -4638,16 +5311,16 @@ if (context->sourcereg == -1)
#if defined COMPILE_PCRE8
#if defined SLJIT_UNALIGNED && SLJIT_UNALIGNED
if (context->length >= 4)
- OP1(SLJIT_MOV_SI, TMP1, 0, SLJIT_MEM1(STR_PTR), -context->length);
+ OP1(SLJIT_MOV_S32, TMP1, 0, SLJIT_MEM1(STR_PTR), -context->length);
else if (context->length >= 2)
- OP1(SLJIT_MOV_UH, TMP1, 0, SLJIT_MEM1(STR_PTR), -context->length);
+ OP1(SLJIT_MOV_U16, TMP1, 0, SLJIT_MEM1(STR_PTR), -context->length);
else
#endif
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(STR_PTR), -context->length);
+ OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(STR_PTR), -context->length);
#elif defined COMPILE_PCRE16
#if defined SLJIT_UNALIGNED && SLJIT_UNALIGNED
if (context->length >= 4)
- OP1(SLJIT_MOV_SI, TMP1, 0, SLJIT_MEM1(STR_PTR), -context->length);
+ OP1(SLJIT_MOV_S32, TMP1, 0, SLJIT_MEM1(STR_PTR), -context->length);
else
#endif
OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), -context->length);
@@ -4689,12 +5362,12 @@ do
#endif
{
if (context->length >= 4)
- OP1(SLJIT_MOV_SI, context->sourcereg, 0, SLJIT_MEM1(STR_PTR), -context->length);
+ OP1(SLJIT_MOV_S32, context->sourcereg, 0, SLJIT_MEM1(STR_PTR), -context->length);
else if (context->length >= 2)
- OP1(SLJIT_MOV_UH, context->sourcereg, 0, SLJIT_MEM1(STR_PTR), -context->length);
+ OP1(SLJIT_MOV_U16, context->sourcereg, 0, SLJIT_MEM1(STR_PTR), -context->length);
#if defined COMPILE_PCRE8
else if (context->length >= 1)
- OP1(SLJIT_MOV_UB, context->sourcereg, 0, SLJIT_MEM1(STR_PTR), -context->length);
+ OP1(SLJIT_MOV_U8, context->sourcereg, 0, SLJIT_MEM1(STR_PTR), -context->length);
#endif /* COMPILE_PCRE8 */
context->sourcereg = context->sourcereg == TMP1 ? TMP2 : TMP1;
@@ -4777,6 +5450,8 @@ return cc;
} \
charoffset = (value);
+static pcre_uchar *compile_char1_matchingpath(compiler_common *common, pcre_uchar type, pcre_uchar *cc, jump_list **backtracks, BOOL check_str_ptr);
+
static void compile_xclass_matchingpath(compiler_common *common, pcre_uchar *cc, jump_list **backtracks)
{
DEFINE_COMPILER;
@@ -4793,8 +5468,8 @@ BOOL utf = common->utf;
#ifdef SUPPORT_UCP
BOOL needstype = FALSE, needsscript = FALSE, needschar = FALSE;
BOOL charsaved = FALSE;
-int typereg = TMP1, scriptreg = TMP1;
-const pcre_uint32 *other_cases;
+int typereg = TMP1;
+const sljit_u32 *other_cases;
sljit_uw typeoffset;
#endif
@@ -4856,6 +5531,14 @@ while (*cc != XCL_END)
switch(*cc)
{
case PT_ANY:
+ /* Any either accepts everything or ignored. */
+ if (cc[-1] == XCL_PROP)
+ {
+ compile_char1_matchingpath(common, OP_ALLANY, cc, backtracks, FALSE);
+ if (list == backtracks)
+ add_jump(compiler, backtracks, JUMP(SLJIT_JUMP));
+ return;
+ }
break;
case PT_LAMP:
@@ -4892,10 +5575,10 @@ while (*cc != XCL_END)
}
#endif
}
+SLJIT_ASSERT(compares > 0);
/* We are not necessary in utf mode even in 8 bit mode. */
cc = ccbegin;
-detect_partial_match(common, backtracks);
read_char_range(common, min, max, (cc[-1] & XCL_NOT) != 0);
if ((cc[-1] & XCL_HASPROP) == 0)
@@ -4903,11 +5586,11 @@ if ((cc[-1] & XCL_HASPROP) == 0)
if ((cc[-1] & XCL_MAP) != 0)
{
jump = CMP(SLJIT_GREATER, TMP1, 0, SLJIT_IMM, 255);
- if (!check_class_ranges(common, (const pcre_uint8 *)cc, (((const pcre_uint8 *)cc)[31] & 0x80) != 0, TRUE, &found))
+ if (!check_class_ranges(common, (const sljit_u8 *)cc, (((const sljit_u8 *)cc)[31] & 0x80) != 0, TRUE, &found))
{
OP2(SLJIT_AND, TMP2, 0, TMP1, 0, SLJIT_IMM, 0x7);
OP2(SLJIT_LSHR, TMP1, 0, TMP1, 0, SLJIT_IMM, 3);
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)cc);
+ OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)cc);
OP2(SLJIT_SHL, TMP2, 0, SLJIT_IMM, 1, TMP2, 0);
OP2(SLJIT_AND | SLJIT_SET_E, SLJIT_UNUSED, 0, TMP1, 0, TMP2, 0);
add_jump(compiler, &found, JUMP(SLJIT_NOT_ZERO));
@@ -4926,11 +5609,11 @@ if ((cc[-1] & XCL_HASPROP) == 0)
}
else if ((cc[-1] & XCL_MAP) != 0)
{
- OP1(SLJIT_MOV, TMP3, 0, TMP1, 0);
+ OP1(SLJIT_MOV, RETURN_ADDR, 0, TMP1, 0);
#ifdef SUPPORT_UCP
charsaved = TRUE;
#endif
- if (!check_class_ranges(common, (const pcre_uint8 *)cc, FALSE, TRUE, list))
+ if (!check_class_ranges(common, (const sljit_u8 *)cc, FALSE, TRUE, list))
{
#ifdef COMPILE_PCRE8
jump = NULL;
@@ -4940,7 +5623,7 @@ else if ((cc[-1] & XCL_MAP) != 0)
OP2(SLJIT_AND, TMP2, 0, TMP1, 0, SLJIT_IMM, 0x7);
OP2(SLJIT_LSHR, TMP1, 0, TMP1, 0, SLJIT_IMM, 3);
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)cc);
+ OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)cc);
OP2(SLJIT_SHL, TMP2, 0, SLJIT_IMM, 1, TMP2, 0);
OP2(SLJIT_AND | SLJIT_SET_E, SLJIT_UNUSED, 0, TMP1, 0, TMP2, 0);
add_jump(compiler, list, JUMP(SLJIT_NOT_ZERO));
@@ -4951,45 +5634,82 @@ else if ((cc[-1] & XCL_MAP) != 0)
JUMPHERE(jump);
}
- OP1(SLJIT_MOV, TMP1, 0, TMP3, 0);
+ OP1(SLJIT_MOV, TMP1, 0, RETURN_ADDR, 0);
cc += 32 / sizeof(pcre_uchar);
}
#ifdef SUPPORT_UCP
-/* Simple register allocation. TMP1 is preferred if possible. */
if (needstype || needsscript)
{
if (needschar && !charsaved)
- OP1(SLJIT_MOV, TMP3, 0, TMP1, 0);
- add_jump(compiler, &common->getucd, JUMP(SLJIT_FAST_CALL));
- if (needschar)
+ OP1(SLJIT_MOV, RETURN_ADDR, 0, TMP1, 0);
+
+ OP2(SLJIT_LSHR, TMP2, 0, TMP1, 0, SLJIT_IMM, UCD_BLOCK_SHIFT);
+ OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM1(TMP2), (sljit_sw)PRIV(ucd_stage1));
+ OP2(SLJIT_AND, TMP1, 0, TMP1, 0, SLJIT_IMM, UCD_BLOCK_MASK);
+ OP2(SLJIT_SHL, TMP2, 0, TMP2, 0, SLJIT_IMM, UCD_BLOCK_SHIFT);
+ OP2(SLJIT_ADD, TMP1, 0, TMP1, 0, TMP2, 0);
+ OP1(SLJIT_MOV, TMP2, 0, SLJIT_IMM, (sljit_sw)PRIV(ucd_stage2));
+ OP1(SLJIT_MOV_U16, TMP2, 0, SLJIT_MEM2(TMP2, TMP1), 1);
+
+ /* Before anything else, we deal with scripts. */
+ if (needsscript)
{
- if (needstype)
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, (sljit_sw)PRIV(ucd_records) + SLJIT_OFFSETOF(ucd_record, script));
+ OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM2(TMP1, TMP2), 3);
+
+ ccbegin = cc;
+
+ while (*cc != XCL_END)
{
- OP1(SLJIT_MOV, RETURN_ADDR, 0, TMP1, 0);
- typereg = RETURN_ADDR;
+ if (*cc == XCL_SINGLE)
+ {
+ cc ++;
+ GETCHARINCTEST(c, cc);
+ }
+ else if (*cc == XCL_RANGE)
+ {
+ cc ++;
+ GETCHARINCTEST(c, cc);
+ GETCHARINCTEST(c, cc);
+ }
+ else
+ {
+ SLJIT_ASSERT(*cc == XCL_PROP || *cc == XCL_NOTPROP);
+ cc++;
+ if (*cc == PT_SC)
+ {
+ compares--;
+ invertcmp = (compares == 0 && list != backtracks);
+ if (cc[-1] == XCL_NOTPROP)
+ invertcmp ^= 0x1;
+ jump = CMP(SLJIT_EQUAL ^ invertcmp, TMP1, 0, SLJIT_IMM, (int)cc[1]);
+ add_jump(compiler, compares > 0 ? list : backtracks, jump);
+ }
+ cc += 2;
+ }
}
- if (needsscript)
- scriptreg = TMP3;
- OP1(SLJIT_MOV, TMP1, 0, TMP3, 0);
+ cc = ccbegin;
}
- else if (needstype && needsscript)
- scriptreg = TMP3;
- /* In all other cases only one of them was specified, and that can goes to TMP1. */
- if (needsscript)
+ if (needschar)
{
- if (scriptreg == TMP1)
+ OP1(SLJIT_MOV, TMP1, 0, RETURN_ADDR, 0);
+ }
+
+ if (needstype)
+ {
+ if (!needschar)
{
- OP1(SLJIT_MOV, scriptreg, 0, SLJIT_IMM, (sljit_sw)PRIV(ucd_records) + SLJIT_OFFSETOF(ucd_record, script));
- OP1(SLJIT_MOV_UB, scriptreg, 0, SLJIT_MEM2(scriptreg, TMP2), 3);
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, (sljit_sw)PRIV(ucd_records) + SLJIT_OFFSETOF(ucd_record, chartype));
+ OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM2(TMP1, TMP2), 3);
}
else
{
OP2(SLJIT_SHL, TMP2, 0, TMP2, 0, SLJIT_IMM, 3);
- OP2(SLJIT_ADD, TMP2, 0, TMP2, 0, SLJIT_IMM, (sljit_sw)PRIV(ucd_records) + SLJIT_OFFSETOF(ucd_record, script));
- OP1(SLJIT_MOV_UB, scriptreg, 0, SLJIT_MEM1(TMP2), 0);
+ OP1(SLJIT_MOV_U8, RETURN_ADDR, 0, SLJIT_MEM1(TMP2), (sljit_sw)PRIV(ucd_records) + SLJIT_OFFSETOF(ucd_record, chartype));
+ typereg = RETURN_ADDR;
}
}
}
@@ -5061,20 +5781,15 @@ while (*cc != XCL_END)
#ifdef SUPPORT_UCP
else
{
+ SLJIT_ASSERT(*cc == XCL_PROP || *cc == XCL_NOTPROP);
if (*cc == XCL_NOTPROP)
invertcmp ^= 0x1;
cc++;
switch(*cc)
{
case PT_ANY:
- if (list != backtracks)
- {
- if ((cc[-1] == XCL_NOTPROP && compares > 0) || (cc[-1] == XCL_PROP && compares == 0))
- continue;
- }
- else if (cc[-1] == XCL_NOTPROP)
- continue;
- jump = JUMP(SLJIT_JUMP);
+ if (!invertcmp)
+ jump = JUMP(SLJIT_JUMP);
break;
case PT_LAMP:
@@ -5098,7 +5813,8 @@ while (*cc != XCL_END)
break;
case PT_SC:
- jump = CMP(SLJIT_EQUAL ^ invertcmp, scriptreg, 0, SLJIT_IMM, (int)cc[1]);
+ compares++;
+ /* Do nothing. */
break;
case PT_SPACE:
@@ -5264,6 +5980,10 @@ while (*cc != XCL_END)
OP_FLAGS(SLJIT_OR | SLJIT_SET_E, TMP2, 0, TMP2, 0, SLJIT_LESS_EQUAL);
jump = JUMP(SLJIT_NOT_ZERO ^ invertcmp);
break;
+
+ default:
+ SLJIT_ASSERT_STOP();
+ break;
}
cc += 2;
}
@@ -5282,19 +6002,13 @@ if (found != NULL)
#endif
-static pcre_uchar *compile_char1_matchingpath(compiler_common *common, pcre_uchar type, pcre_uchar *cc, jump_list **backtracks)
+static pcre_uchar *compile_simple_assertion_matchingpath(compiler_common *common, pcre_uchar type, pcre_uchar *cc, jump_list **backtracks)
{
DEFINE_COMPILER;
int length;
-unsigned int c, oc, bit;
-compare_context context;
struct sljit_jump *jump[4];
-jump_list *end_list;
#ifdef SUPPORT_UTF
struct sljit_label *label;
-#ifdef SUPPORT_UCP
-pcre_uchar propdata[5];
-#endif
#endif /* SUPPORT_UTF */
switch(type)
@@ -5317,12 +6031,220 @@ switch(type)
add_jump(compiler, backtracks, JUMP(type == OP_NOT_WORD_BOUNDARY ? SLJIT_NOT_ZERO : SLJIT_ZERO));
return cc;
+ case OP_EODN:
+ /* Requires rather complex checks. */
+ jump[0] = CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0);
+ if (common->nltype == NLTYPE_FIXED && common->newline > 255)
+ {
+ OP2(SLJIT_ADD, TMP2, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(2));
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(0));
+ if (common->mode == JIT_COMPILE)
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, STR_END, 0));
+ else
+ {
+ jump[1] = CMP(SLJIT_EQUAL, TMP2, 0, STR_END, 0);
+ OP2(SLJIT_SUB | SLJIT_SET_U, SLJIT_UNUSED, 0, TMP2, 0, STR_END, 0);
+ OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_UNUSED, 0, SLJIT_LESS);
+ OP2(SLJIT_SUB | SLJIT_SET_E, SLJIT_UNUSED, 0, TMP1, 0, SLJIT_IMM, (common->newline >> 8) & 0xff);
+ OP_FLAGS(SLJIT_OR | SLJIT_SET_E, TMP2, 0, TMP2, 0, SLJIT_NOT_EQUAL);
+ add_jump(compiler, backtracks, JUMP(SLJIT_NOT_EQUAL));
+ check_partial(common, TRUE);
+ add_jump(compiler, backtracks, JUMP(SLJIT_JUMP));
+ JUMPHERE(jump[1]);
+ }
+ OP1(MOV_UCHAR, TMP2, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(1));
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, (common->newline >> 8) & 0xff));
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, common->newline & 0xff));
+ }
+ else if (common->nltype == NLTYPE_FIXED)
+ {
+ OP2(SLJIT_ADD, TMP2, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(0));
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, STR_END, 0));
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, common->newline));
+ }
+ else
+ {
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(0));
+ jump[1] = CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, CHAR_CR);
+ OP2(SLJIT_ADD, TMP2, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(2));
+ OP2(SLJIT_SUB | SLJIT_SET_U, SLJIT_UNUSED, 0, TMP2, 0, STR_END, 0);
+ jump[2] = JUMP(SLJIT_GREATER);
+ add_jump(compiler, backtracks, JUMP(SLJIT_LESS));
+ /* Equal. */
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(1));
+ jump[3] = CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, CHAR_NL);
+ add_jump(compiler, backtracks, JUMP(SLJIT_JUMP));
+
+ JUMPHERE(jump[1]);
+ if (common->nltype == NLTYPE_ANYCRLF)
+ {
+ OP2(SLJIT_ADD, TMP2, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+ add_jump(compiler, backtracks, CMP(SLJIT_LESS, TMP2, 0, STR_END, 0));
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, CHAR_NL));
+ }
+ else
+ {
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), LOCALS1, STR_PTR, 0);
+ read_char_range(common, common->nlmin, common->nlmax, TRUE);
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, STR_PTR, 0, STR_END, 0));
+ add_jump(compiler, &common->anynewline, JUMP(SLJIT_FAST_CALL));
+ add_jump(compiler, backtracks, JUMP(SLJIT_ZERO));
+ OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP), LOCALS1);
+ }
+ JUMPHERE(jump[2]);
+ JUMPHERE(jump[3]);
+ }
+ JUMPHERE(jump[0]);
+ check_partial(common, FALSE);
+ return cc;
+
+ case OP_EOD:
+ add_jump(compiler, backtracks, CMP(SLJIT_LESS, STR_PTR, 0, STR_END, 0));
+ check_partial(common, FALSE);
+ return cc;
+
+ case OP_DOLL:
+ OP1(SLJIT_MOV, TMP2, 0, ARGUMENTS, 0);
+ OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM1(TMP2), SLJIT_OFFSETOF(jit_arguments, noteol));
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, 0));
+
+ if (!common->endonly)
+ compile_simple_assertion_matchingpath(common, OP_EODN, cc, backtracks);
+ else
+ {
+ add_jump(compiler, backtracks, CMP(SLJIT_LESS, STR_PTR, 0, STR_END, 0));
+ check_partial(common, FALSE);
+ }
+ return cc;
+
+ case OP_DOLLM:
+ jump[1] = CMP(SLJIT_LESS, STR_PTR, 0, STR_END, 0);
+ OP1(SLJIT_MOV, TMP2, 0, ARGUMENTS, 0);
+ OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM1(TMP2), SLJIT_OFFSETOF(jit_arguments, noteol));
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, 0));
+ check_partial(common, FALSE);
+ jump[0] = JUMP(SLJIT_JUMP);
+ JUMPHERE(jump[1]);
+
+ if (common->nltype == NLTYPE_FIXED && common->newline > 255)
+ {
+ OP2(SLJIT_ADD, TMP2, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(2));
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(0));
+ if (common->mode == JIT_COMPILE)
+ add_jump(compiler, backtracks, CMP(SLJIT_GREATER, TMP2, 0, STR_END, 0));
+ else
+ {
+ jump[1] = CMP(SLJIT_LESS_EQUAL, TMP2, 0, STR_END, 0);
+ /* STR_PTR = STR_END - IN_UCHARS(1) */
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, (common->newline >> 8) & 0xff));
+ check_partial(common, TRUE);
+ add_jump(compiler, backtracks, JUMP(SLJIT_JUMP));
+ JUMPHERE(jump[1]);
+ }
+
+ OP1(MOV_UCHAR, TMP2, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(1));
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, (common->newline >> 8) & 0xff));
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, common->newline & 0xff));
+ }
+ else
+ {
+ peek_char(common, common->nlmax);
+ check_newlinechar(common, common->nltype, backtracks, FALSE);
+ }
+ JUMPHERE(jump[0]);
+ return cc;
+
+ case OP_CIRC:
+ OP1(SLJIT_MOV, TMP2, 0, ARGUMENTS, 0);
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(TMP2), SLJIT_OFFSETOF(jit_arguments, begin));
+ add_jump(compiler, backtracks, CMP(SLJIT_GREATER, STR_PTR, 0, TMP1, 0));
+ OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM1(TMP2), SLJIT_OFFSETOF(jit_arguments, notbol));
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, 0));
+ return cc;
+
+ case OP_CIRCM:
+ OP1(SLJIT_MOV, TMP2, 0, ARGUMENTS, 0);
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(TMP2), SLJIT_OFFSETOF(jit_arguments, begin));
+ jump[1] = CMP(SLJIT_GREATER, STR_PTR, 0, TMP1, 0);
+ OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM1(TMP2), SLJIT_OFFSETOF(jit_arguments, notbol));
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, 0));
+ jump[0] = JUMP(SLJIT_JUMP);
+ JUMPHERE(jump[1]);
+
+ add_jump(compiler, backtracks, CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0));
+ if (common->nltype == NLTYPE_FIXED && common->newline > 255)
+ {
+ OP2(SLJIT_SUB, TMP2, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(2));
+ add_jump(compiler, backtracks, CMP(SLJIT_LESS, TMP2, 0, TMP1, 0));
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(-2));
+ OP1(MOV_UCHAR, TMP2, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(-1));
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, (common->newline >> 8) & 0xff));
+ add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, common->newline & 0xff));
+ }
+ else
+ {
+ skip_char_back(common);
+ read_char_range(common, common->nlmin, common->nlmax, TRUE);
+ check_newlinechar(common, common->nltype, backtracks, FALSE);
+ }
+ JUMPHERE(jump[0]);
+ return cc;
+
+ case OP_REVERSE:
+ length = GET(cc, 0);
+ if (length == 0)
+ return cc + LINK_SIZE;
+ OP1(SLJIT_MOV, TMP1, 0, ARGUMENTS, 0);
+#ifdef SUPPORT_UTF
+ if (common->utf)
+ {
+ OP1(SLJIT_MOV, TMP3, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, begin));
+ OP1(SLJIT_MOV, TMP2, 0, SLJIT_IMM, length);
+ label = LABEL();
+ add_jump(compiler, backtracks, CMP(SLJIT_LESS_EQUAL, STR_PTR, 0, TMP3, 0));
+ skip_char_back(common);
+ OP2(SLJIT_SUB | SLJIT_SET_E, TMP2, 0, TMP2, 0, SLJIT_IMM, 1);
+ JUMPTO(SLJIT_NOT_ZERO, label);
+ }
+ else
+#endif
+ {
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, begin));
+ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(length));
+ add_jump(compiler, backtracks, CMP(SLJIT_LESS, STR_PTR, 0, TMP1, 0));
+ }
+ check_start_used_ptr(common);
+ return cc + LINK_SIZE;
+ }
+SLJIT_ASSERT_STOP();
+return cc;
+}
+
+static pcre_uchar *compile_char1_matchingpath(compiler_common *common, pcre_uchar type, pcre_uchar *cc, jump_list **backtracks, BOOL check_str_ptr)
+{
+DEFINE_COMPILER;
+int length;
+unsigned int c, oc, bit;
+compare_context context;
+struct sljit_jump *jump[3];
+jump_list *end_list;
+#ifdef SUPPORT_UTF
+struct sljit_label *label;
+#ifdef SUPPORT_UCP
+pcre_uchar propdata[5];
+#endif
+#endif /* SUPPORT_UTF */
+
+switch(type)
+ {
case OP_NOT_DIGIT:
case OP_DIGIT:
/* Digits are usually 0-9, so it is worth to optimize them. */
- detect_partial_match(common, backtracks);
+ if (check_str_ptr)
+ detect_partial_match(common, backtracks);
#if defined SUPPORT_UTF && defined COMPILE_PCRE8
- if (common->utf && is_char7_bitset((const pcre_uint8*)common->ctypes - cbit_length + cbit_digit, FALSE))
+ if (common->utf && is_char7_bitset((const sljit_u8 *)common->ctypes - cbit_length + cbit_digit, FALSE))
read_char7_type(common, type == OP_NOT_DIGIT);
else
#endif
@@ -5334,9 +6256,10 @@ switch(type)
case OP_NOT_WHITESPACE:
case OP_WHITESPACE:
- detect_partial_match(common, backtracks);
+ if (check_str_ptr)
+ detect_partial_match(common, backtracks);
#if defined SUPPORT_UTF && defined COMPILE_PCRE8
- if (common->utf && is_char7_bitset((const pcre_uint8*)common->ctypes - cbit_length + cbit_space, FALSE))
+ if (common->utf && is_char7_bitset((const sljit_u8 *)common->ctypes - cbit_length + cbit_space, FALSE))
read_char7_type(common, type == OP_NOT_WHITESPACE);
else
#endif
@@ -5347,9 +6270,10 @@ switch(type)
case OP_NOT_WORDCHAR:
case OP_WORDCHAR:
- detect_partial_match(common, backtracks);
+ if (check_str_ptr)
+ detect_partial_match(common, backtracks);
#if defined SUPPORT_UTF && defined COMPILE_PCRE8
- if (common->utf && is_char7_bitset((const pcre_uint8*)common->ctypes - cbit_length + cbit_word, FALSE))
+ if (common->utf && is_char7_bitset((const sljit_u8 *)common->ctypes - cbit_length + cbit_word, FALSE))
read_char7_type(common, type == OP_NOT_WORDCHAR);
else
#endif
@@ -5359,7 +6283,8 @@ switch(type)
return cc;
case OP_ANY:
- detect_partial_match(common, backtracks);
+ if (check_str_ptr)
+ detect_partial_match(common, backtracks);
read_char_range(common, common->nlmin, common->nlmax, TRUE);
if (common->nltype == NLTYPE_FIXED && common->newline > 255)
{
@@ -5380,7 +6305,8 @@ switch(type)
return cc;
case OP_ALLANY:
- detect_partial_match(common, backtracks);
+ if (check_str_ptr)
+ detect_partial_match(common, backtracks);
#ifdef SUPPORT_UTF
if (common->utf)
{
@@ -5389,7 +6315,7 @@ switch(type)
#if defined COMPILE_PCRE8 || defined COMPILE_PCRE16
#if defined COMPILE_PCRE8
jump[0] = CMP(SLJIT_LESS, TMP1, 0, SLJIT_IMM, 0xc0);
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
+ OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP1, 0);
#elif defined COMPILE_PCRE16
jump[0] = CMP(SLJIT_LESS, TMP1, 0, SLJIT_IMM, 0xd800);
@@ -5408,7 +6334,8 @@ switch(type)
return cc;
case OP_ANYBYTE:
- detect_partial_match(common, backtracks);
+ if (check_str_ptr)
+ detect_partial_match(common, backtracks);
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
return cc;
@@ -5421,13 +6348,16 @@ switch(type)
propdata[2] = cc[0];
propdata[3] = cc[1];
propdata[4] = XCL_END;
+ if (check_str_ptr)
+ detect_partial_match(common, backtracks);
compile_xclass_matchingpath(common, propdata, backtracks);
return cc + 2;
#endif
#endif
case OP_ANYNL:
- detect_partial_match(common, backtracks);
+ if (check_str_ptr)
+ detect_partial_match(common, backtracks);
read_char_range(common, common->bsr_nlmin, common->bsr_nlmax, FALSE);
jump[0] = CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, CHAR_CR);
/* We don't need to handle soft partial matching case. */
@@ -5449,7 +6379,8 @@ switch(type)
case OP_NOT_HSPACE:
case OP_HSPACE:
- detect_partial_match(common, backtracks);
+ if (check_str_ptr)
+ detect_partial_match(common, backtracks);
read_char_range(common, 0x9, 0x3000, type == OP_NOT_HSPACE);
add_jump(compiler, &common->hspace, JUMP(SLJIT_FAST_CALL));
add_jump(compiler, backtracks, JUMP(type == OP_NOT_HSPACE ? SLJIT_NOT_ZERO : SLJIT_ZERO));
@@ -5457,7 +6388,8 @@ switch(type)
case OP_NOT_VSPACE:
case OP_VSPACE:
- detect_partial_match(common, backtracks);
+ if (check_str_ptr)
+ detect_partial_match(common, backtracks);
read_char_range(common, 0xa, 0x2029, type == OP_NOT_VSPACE);
add_jump(compiler, &common->vspace, JUMP(SLJIT_FAST_CALL));
add_jump(compiler, backtracks, JUMP(type == OP_NOT_VSPACE ? SLJIT_NOT_ZERO : SLJIT_ZERO));
@@ -5465,13 +6397,14 @@ switch(type)
#ifdef SUPPORT_UCP
case OP_EXTUNI:
- detect_partial_match(common, backtracks);
+ if (check_str_ptr)
+ detect_partial_match(common, backtracks);
read_char(common);
add_jump(compiler, &common->getucd, JUMP(SLJIT_FAST_CALL));
OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, (sljit_sw)PRIV(ucd_records) + SLJIT_OFFSETOF(ucd_record, gbprop));
/* Optimize register allocation: use a real register. */
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), LOCALS0, STACK_TOP, 0);
- OP1(SLJIT_MOV_UB, STACK_TOP, 0, SLJIT_MEM2(TMP1, TMP2), 3);
+ OP1(SLJIT_MOV_U8, STACK_TOP, 0, SLJIT_MEM2(TMP1, TMP2), 3);
label = LABEL();
jump[0] = CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0);
@@ -5479,10 +6412,10 @@ switch(type)
read_char(common);
add_jump(compiler, &common->getucd, JUMP(SLJIT_FAST_CALL));
OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, (sljit_sw)PRIV(ucd_records) + SLJIT_OFFSETOF(ucd_record, gbprop));
- OP1(SLJIT_MOV_UB, TMP2, 0, SLJIT_MEM2(TMP1, TMP2), 3);
+ OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM2(TMP1, TMP2), 3);
OP2(SLJIT_SHL, STACK_TOP, 0, STACK_TOP, 0, SLJIT_IMM, 2);
- OP1(SLJIT_MOV_UI, TMP1, 0, SLJIT_MEM1(STACK_TOP), (sljit_sw)PRIV(ucp_gbtable));
+ OP1(SLJIT_MOV_U32, TMP1, 0, SLJIT_MEM1(STACK_TOP), (sljit_sw)PRIV(ucp_gbtable));
OP1(SLJIT_MOV, STACK_TOP, 0, TMP2, 0);
OP2(SLJIT_SHL, TMP2, 0, SLJIT_IMM, 1, TMP2, 0);
OP2(SLJIT_AND | SLJIT_SET_E, SLJIT_UNUSED, 0, TMP1, 0, TMP2, 0);
@@ -5502,173 +6435,14 @@ switch(type)
return cc;
#endif
- case OP_EODN:
- /* Requires rather complex checks. */
- jump[0] = CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0);
- if (common->nltype == NLTYPE_FIXED && common->newline > 255)
- {
- OP2(SLJIT_ADD, TMP2, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(2));
- OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(0));
- if (common->mode == JIT_COMPILE)
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, STR_END, 0));
- else
- {
- jump[1] = CMP(SLJIT_EQUAL, TMP2, 0, STR_END, 0);
- OP2(SLJIT_SUB | SLJIT_SET_U, SLJIT_UNUSED, 0, TMP2, 0, STR_END, 0);
- OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_UNUSED, 0, SLJIT_LESS);
- OP2(SLJIT_SUB | SLJIT_SET_E, SLJIT_UNUSED, 0, TMP1, 0, SLJIT_IMM, (common->newline >> 8) & 0xff);
- OP_FLAGS(SLJIT_OR | SLJIT_SET_E, TMP2, 0, TMP2, 0, SLJIT_NOT_EQUAL);
- add_jump(compiler, backtracks, JUMP(SLJIT_NOT_EQUAL));
- check_partial(common, TRUE);
- add_jump(compiler, backtracks, JUMP(SLJIT_JUMP));
- JUMPHERE(jump[1]);
- }
- OP1(MOV_UCHAR, TMP2, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(1));
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, (common->newline >> 8) & 0xff));
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, common->newline & 0xff));
- }
- else if (common->nltype == NLTYPE_FIXED)
- {
- OP2(SLJIT_ADD, TMP2, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
- OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(0));
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, STR_END, 0));
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, common->newline));
- }
- else
- {
- OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(0));
- jump[1] = CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, CHAR_CR);
- OP2(SLJIT_ADD, TMP2, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(2));
- OP2(SLJIT_SUB | SLJIT_SET_U, SLJIT_UNUSED, 0, TMP2, 0, STR_END, 0);
- jump[2] = JUMP(SLJIT_GREATER);
- add_jump(compiler, backtracks, JUMP(SLJIT_LESS));
- /* Equal. */
- OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(1));
- jump[3] = CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, CHAR_NL);
- add_jump(compiler, backtracks, JUMP(SLJIT_JUMP));
-
- JUMPHERE(jump[1]);
- if (common->nltype == NLTYPE_ANYCRLF)
- {
- OP2(SLJIT_ADD, TMP2, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
- add_jump(compiler, backtracks, CMP(SLJIT_LESS, TMP2, 0, STR_END, 0));
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, CHAR_NL));
- }
- else
- {
- OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), LOCALS1, STR_PTR, 0);
- read_char_range(common, common->nlmin, common->nlmax, TRUE);
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, STR_PTR, 0, STR_END, 0));
- add_jump(compiler, &common->anynewline, JUMP(SLJIT_FAST_CALL));
- add_jump(compiler, backtracks, JUMP(SLJIT_ZERO));
- OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP), LOCALS1);
- }
- JUMPHERE(jump[2]);
- JUMPHERE(jump[3]);
- }
- JUMPHERE(jump[0]);
- check_partial(common, FALSE);
- return cc;
-
- case OP_EOD:
- add_jump(compiler, backtracks, CMP(SLJIT_LESS, STR_PTR, 0, STR_END, 0));
- check_partial(common, FALSE);
- return cc;
-
- case OP_CIRC:
- OP1(SLJIT_MOV, TMP2, 0, ARGUMENTS, 0);
- OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(TMP2), SLJIT_OFFSETOF(jit_arguments, begin));
- add_jump(compiler, backtracks, CMP(SLJIT_GREATER, STR_PTR, 0, TMP1, 0));
- OP1(SLJIT_MOV_UB, TMP2, 0, SLJIT_MEM1(TMP2), SLJIT_OFFSETOF(jit_arguments, notbol));
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, 0));
- return cc;
-
- case OP_CIRCM:
- OP1(SLJIT_MOV, TMP2, 0, ARGUMENTS, 0);
- OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(TMP2), SLJIT_OFFSETOF(jit_arguments, begin));
- jump[1] = CMP(SLJIT_GREATER, STR_PTR, 0, TMP1, 0);
- OP1(SLJIT_MOV_UB, TMP2, 0, SLJIT_MEM1(TMP2), SLJIT_OFFSETOF(jit_arguments, notbol));
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, 0));
- jump[0] = JUMP(SLJIT_JUMP);
- JUMPHERE(jump[1]);
-
- add_jump(compiler, backtracks, CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0));
- if (common->nltype == NLTYPE_FIXED && common->newline > 255)
- {
- OP2(SLJIT_SUB, TMP2, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(2));
- add_jump(compiler, backtracks, CMP(SLJIT_LESS, TMP2, 0, TMP1, 0));
- OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(-2));
- OP1(MOV_UCHAR, TMP2, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(-1));
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, (common->newline >> 8) & 0xff));
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, common->newline & 0xff));
- }
- else
- {
- skip_char_back(common);
- read_char_range(common, common->nlmin, common->nlmax, TRUE);
- check_newlinechar(common, common->nltype, backtracks, FALSE);
- }
- JUMPHERE(jump[0]);
- return cc;
-
- case OP_DOLL:
- OP1(SLJIT_MOV, TMP2, 0, ARGUMENTS, 0);
- OP1(SLJIT_MOV_UB, TMP2, 0, SLJIT_MEM1(TMP2), SLJIT_OFFSETOF(jit_arguments, noteol));
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, 0));
-
- if (!common->endonly)
- compile_char1_matchingpath(common, OP_EODN, cc, backtracks);
- else
- {
- add_jump(compiler, backtracks, CMP(SLJIT_LESS, STR_PTR, 0, STR_END, 0));
- check_partial(common, FALSE);
- }
- return cc;
-
- case OP_DOLLM:
- jump[1] = CMP(SLJIT_LESS, STR_PTR, 0, STR_END, 0);
- OP1(SLJIT_MOV, TMP2, 0, ARGUMENTS, 0);
- OP1(SLJIT_MOV_UB, TMP2, 0, SLJIT_MEM1(TMP2), SLJIT_OFFSETOF(jit_arguments, noteol));
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, 0));
- check_partial(common, FALSE);
- jump[0] = JUMP(SLJIT_JUMP);
- JUMPHERE(jump[1]);
-
- if (common->nltype == NLTYPE_FIXED && common->newline > 255)
- {
- OP2(SLJIT_ADD, TMP2, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(2));
- OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(0));
- if (common->mode == JIT_COMPILE)
- add_jump(compiler, backtracks, CMP(SLJIT_GREATER, TMP2, 0, STR_END, 0));
- else
- {
- jump[1] = CMP(SLJIT_LESS_EQUAL, TMP2, 0, STR_END, 0);
- /* STR_PTR = STR_END - IN_UCHARS(1) */
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, (common->newline >> 8) & 0xff));
- check_partial(common, TRUE);
- add_jump(compiler, backtracks, JUMP(SLJIT_JUMP));
- JUMPHERE(jump[1]);
- }
-
- OP1(MOV_UCHAR, TMP2, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(1));
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, (common->newline >> 8) & 0xff));
- add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, common->newline & 0xff));
- }
- else
- {
- peek_char(common, common->nlmax);
- check_newlinechar(common, common->nltype, backtracks, FALSE);
- }
- JUMPHERE(jump[0]);
- return cc;
-
case OP_CHAR:
case OP_CHARI:
length = 1;
#ifdef SUPPORT_UTF
if (common->utf && HAS_EXTRALEN(*cc)) length += GET_EXTRALEN(*cc);
#endif
- if (common->mode == JIT_COMPILE && (type == OP_CHAR || !char_has_othercase(common, cc) || char_get_othercase_bit(common, cc) != 0))
+ if (common->mode == JIT_COMPILE && check_str_ptr
+ && (type == OP_CHAR || !char_has_othercase(common, cc) || char_get_othercase_bit(common, cc) != 0))
{
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(length));
add_jump(compiler, backtracks, CMP(SLJIT_GREATER, STR_PTR, 0, STR_END, 0));
@@ -5681,7 +6455,8 @@ switch(type)
return byte_sequence_compare(common, type == OP_CHARI, cc, &context, backtracks);
}
- detect_partial_match(common, backtracks);
+ if (check_str_ptr)
+ detect_partial_match(common, backtracks);
#ifdef SUPPORT_UTF
if (common->utf)
{
@@ -5713,7 +6488,8 @@ switch(type)
case OP_NOT:
case OP_NOTI:
- detect_partial_match(common, backtracks);
+ if (check_str_ptr)
+ detect_partial_match(common, backtracks);
length = 1;
#ifdef SUPPORT_UTF
if (common->utf)
@@ -5722,7 +6498,7 @@ switch(type)
c = *cc;
if (c < 128)
{
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(STR_PTR), 0);
+ OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(STR_PTR), 0);
if (type == OP_NOT || !char_has_othercase(common, cc))
add_jump(compiler, backtracks, CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, c));
else
@@ -5774,16 +6550,17 @@ switch(type)
case OP_CLASS:
case OP_NCLASS:
- detect_partial_match(common, backtracks);
+ if (check_str_ptr)
+ detect_partial_match(common, backtracks);
#if defined SUPPORT_UTF && defined COMPILE_PCRE8
- bit = (common->utf && is_char7_bitset((const pcre_uint8 *)cc, type == OP_NCLASS)) ? 127 : 255;
+ bit = (common->utf && is_char7_bitset((const sljit_u8 *)cc, type == OP_NCLASS)) ? 127 : 255;
read_char_range(common, 0, bit, type == OP_NCLASS);
#else
read_char_range(common, 0, 255, type == OP_NCLASS);
#endif
- if (check_class_ranges(common, (const pcre_uint8 *)cc, type == OP_NCLASS, FALSE, backtracks))
+ if (check_class_ranges(common, (const sljit_u8 *)cc, type == OP_NCLASS, FALSE, backtracks))
return cc + 32 / sizeof(pcre_uchar);
#if defined SUPPORT_UTF && defined COMPILE_PCRE8
@@ -5808,7 +6585,7 @@ switch(type)
OP2(SLJIT_AND, TMP2, 0, TMP1, 0, SLJIT_IMM, 0x7);
OP2(SLJIT_LSHR, TMP1, 0, TMP1, 0, SLJIT_IMM, 3);
- OP1(SLJIT_MOV_UB, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)cc);
+ OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)cc);
OP2(SLJIT_SHL, TMP2, 0, SLJIT_IMM, 1, TMP2, 0);
OP2(SLJIT_AND | SLJIT_SET_E, SLJIT_UNUSED, 0, TMP1, 0, TMP2, 0);
add_jump(compiler, backtracks, JUMP(SLJIT_ZERO));
@@ -5817,40 +6594,15 @@ switch(type)
if (jump[0] != NULL)
JUMPHERE(jump[0]);
#endif
-
return cc + 32 / sizeof(pcre_uchar);
#if defined SUPPORT_UTF || defined COMPILE_PCRE16 || defined COMPILE_PCRE32
case OP_XCLASS:
+ if (check_str_ptr)
+ detect_partial_match(common, backtracks);
compile_xclass_matchingpath(common, cc + LINK_SIZE, backtracks);
return cc + GET(cc, 0) - 1;
#endif
-
- case OP_REVERSE:
- length = GET(cc, 0);
- if (length == 0)
- return cc + LINK_SIZE;
- OP1(SLJIT_MOV, TMP1, 0, ARGUMENTS, 0);
-#ifdef SUPPORT_UTF
- if (common->utf)
- {
- OP1(SLJIT_MOV, TMP3, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, begin));
- OP1(SLJIT_MOV, TMP2, 0, SLJIT_IMM, length);
- label = LABEL();
- add_jump(compiler, backtracks, CMP(SLJIT_LESS_EQUAL, STR_PTR, 0, TMP3, 0));
- skip_char_back(common);
- OP2(SLJIT_SUB | SLJIT_SET_E, TMP2, 0, TMP2, 0, SLJIT_IMM, 1);
- JUMPTO(SLJIT_NOT_ZERO, label);
- }
- else
-#endif
- {
- OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, begin));
- OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(length));
- add_jump(compiler, backtracks, CMP(SLJIT_LESS, STR_PTR, 0, TMP1, 0));
- }
- check_start_used_ptr(common);
- return cc + LINK_SIZE;
}
SLJIT_ASSERT_STOP();
return cc;
@@ -5919,7 +6671,7 @@ if (context.length > 0)
}
/* A non-fixed length character will be checked if length == 0. */
-return compile_char1_matchingpath(common, *cc, cc + 1, backtracks);
+return compile_char1_matchingpath(common, *cc, cc + 1, backtracks, TRUE);
}
/* Forward definitions. */
@@ -6094,7 +6846,7 @@ pcre_uchar *ccbegin = cc;
int min = 0, max = 0;
BOOL minimize;
-PUSH_BACKTRACK(sizeof(iterator_backtrack), cc, NULL);
+PUSH_BACKTRACK(sizeof(ref_iterator_backtrack), cc, NULL);
if (ref)
offset = GET2(cc, 1) << 1;
@@ -6214,7 +6966,7 @@ if (!minimize)
}
JUMPHERE(zerolength);
- BACKTRACK_AS(iterator_backtrack)->matchingpath = LABEL();
+ BACKTRACK_AS(ref_iterator_backtrack)->matchingpath = LABEL();
count_match(common);
return cc;
@@ -6260,7 +7012,7 @@ else
}
}
-BACKTRACK_AS(iterator_backtrack)->matchingpath = LABEL();
+BACKTRACK_AS(ref_iterator_backtrack)->matchingpath = LABEL();
if (max > 0)
add_jump(compiler, &backtrack->topbacktracks, CMP(SLJIT_GREATER_EQUAL, SLJIT_MEM1(STACK_TOP), STACK(1), SLJIT_IMM, max));
@@ -6274,7 +7026,7 @@ if (min > 1)
OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(STACK_TOP), STACK(1));
OP2(SLJIT_ADD, TMP1, 0, TMP1, 0, SLJIT_IMM, 1);
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(1), TMP1, 0);
- CMPTO(SLJIT_LESS, TMP1, 0, SLJIT_IMM, min, BACKTRACK_AS(iterator_backtrack)->matchingpath);
+ CMPTO(SLJIT_LESS, TMP1, 0, SLJIT_IMM, min, BACKTRACK_AS(ref_iterator_backtrack)->matchingpath);
}
else if (max > 0)
OP2(SLJIT_ADD, SLJIT_MEM1(STACK_TOP), STACK(1), SLJIT_MEM1(STACK_TOP), STACK(1), SLJIT_IMM, 1);
@@ -6419,8 +7171,8 @@ allocate_stack(common, CALLOUT_ARG_SIZE / sizeof(sljit_sw));
SLJIT_ASSERT(common->capture_last_ptr != 0);
OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(SLJIT_SP), common->capture_last_ptr);
OP1(SLJIT_MOV, TMP1, 0, ARGUMENTS, 0);
-OP1(SLJIT_MOV_SI, SLJIT_MEM1(STACK_TOP), CALLOUT_ARG_OFFSET(callout_number), SLJIT_IMM, cc[1]);
-OP1(SLJIT_MOV_SI, SLJIT_MEM1(STACK_TOP), CALLOUT_ARG_OFFSET(capture_last), TMP2, 0);
+OP1(SLJIT_MOV_S32, SLJIT_MEM1(STACK_TOP), CALLOUT_ARG_OFFSET(callout_number), SLJIT_IMM, cc[1]);
+OP1(SLJIT_MOV_S32, SLJIT_MEM1(STACK_TOP), CALLOUT_ARG_OFFSET(capture_last), TMP2, 0);
/* These pointer sized fields temporarly stores internal variables. */
OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(SLJIT_SP), OVECTOR(0));
@@ -6429,8 +7181,8 @@ OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), CALLOUT_ARG_OFFSET(subject), TMP2, 0);
if (common->mark_ptr != 0)
OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, mark_ptr));
-OP1(SLJIT_MOV_SI, SLJIT_MEM1(STACK_TOP), CALLOUT_ARG_OFFSET(pattern_position), SLJIT_IMM, GET(cc, 2));
-OP1(SLJIT_MOV_SI, SLJIT_MEM1(STACK_TOP), CALLOUT_ARG_OFFSET(next_item_length), SLJIT_IMM, GET(cc, 2 + LINK_SIZE));
+OP1(SLJIT_MOV_S32, SLJIT_MEM1(STACK_TOP), CALLOUT_ARG_OFFSET(pattern_position), SLJIT_IMM, GET(cc, 2));
+OP1(SLJIT_MOV_S32, SLJIT_MEM1(STACK_TOP), CALLOUT_ARG_OFFSET(next_item_length), SLJIT_IMM, GET(cc, 2 + LINK_SIZE));
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), CALLOUT_ARG_OFFSET(mark), (common->mark_ptr != 0) ? TMP2 : SLJIT_IMM, 0);
/* Needed to save important temporary registers. */
@@ -6438,7 +7190,7 @@ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), LOCALS0, STACK_TOP, 0);
OP2(SLJIT_SUB, SLJIT_R1, 0, STACK_TOP, 0, SLJIT_IMM, CALLOUT_ARG_SIZE);
GET_LOCAL_BASE(SLJIT_R2, 0, OVECTOR_START);
sljit_emit_ijump(compiler, SLJIT_CALL3, SLJIT_IMM, SLJIT_FUNC_OFFSET(do_callout));
-OP1(SLJIT_MOV_SI, SLJIT_RETURN_REG, 0, SLJIT_RETURN_REG, 0);
+OP1(SLJIT_MOV_S32, SLJIT_RETURN_REG, 0, SLJIT_RETURN_REG, 0);
OP1(SLJIT_MOV, STACK_TOP, 0, SLJIT_MEM1(SLJIT_SP), LOCALS0);
free_stack(common, CALLOUT_ARG_SIZE / sizeof(sljit_sw));
@@ -6455,6 +7207,32 @@ return cc + 2 + 2 * LINK_SIZE;
#undef CALLOUT_ARG_SIZE
#undef CALLOUT_ARG_OFFSET
+static SLJIT_INLINE BOOL assert_needs_str_ptr_saving(pcre_uchar *cc)
+{
+while (TRUE)
+ {
+ switch (*cc)
+ {
+ case OP_NOT_WORD_BOUNDARY:
+ case OP_WORD_BOUNDARY:
+ case OP_CIRC:
+ case OP_CIRCM:
+ case OP_DOLL:
+ case OP_DOLLM:
+ case OP_CALLOUT:
+ case OP_ALT:
+ cc += PRIV(OP_lengths)[*cc];
+ break;
+
+ case OP_KET:
+ return FALSE;
+
+ default:
+ return TRUE;
+ }
+ }
+}
+
static pcre_uchar *compile_assert_matchingpath(compiler_common *common, pcre_uchar *cc, assert_backtrack *backtrack, BOOL conditional)
{
DEFINE_COMPILER;
@@ -6511,15 +7289,28 @@ if (bra == OP_BRAMINZERO)
if (framesize < 0)
{
- extrasize = needs_control_head ? 2 : 1;
+ extrasize = 1;
+ if (bra == OP_BRA && !assert_needs_str_ptr_saving(ccbegin + 1 + LINK_SIZE))
+ extrasize = 0;
+
+ if (needs_control_head)
+ extrasize++;
+
if (framesize == no_frame)
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), private_data_ptr, STACK_TOP, 0);
- allocate_stack(common, extrasize);
+
+ if (extrasize > 0)
+ allocate_stack(common, extrasize);
+
if (needs_control_head)
OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(SLJIT_SP), common->control_head_ptr);
- OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), STR_PTR, 0);
+
+ if (extrasize > 0)
+ OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), STR_PTR, 0);
+
if (needs_control_head)
{
+ SLJIT_ASSERT(extrasize == 2);
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->control_head_ptr, SLJIT_IMM, 0);
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(1), TMP1, 0);
}
@@ -6528,12 +7319,14 @@ else
{
extrasize = needs_control_head ? 3 : 2;
allocate_stack(common, framesize + extrasize);
+
OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr);
OP2(SLJIT_SUB, TMP2, 0, STACK_TOP, 0, SLJIT_IMM, (framesize + extrasize) * sizeof(sljit_sw));
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), private_data_ptr, TMP2, 0);
if (needs_control_head)
OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(SLJIT_SP), common->control_head_ptr);
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), STR_PTR, 0);
+
if (needs_control_head)
{
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(2), TMP1, 0);
@@ -6542,6 +7335,7 @@ else
}
else
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(1), TMP1, 0);
+
init_frame(common, ccbegin, NULL, framesize + extrasize - 1, extrasize, FALSE);
}
@@ -6565,7 +7359,7 @@ while (1)
altbacktrack.top = NULL;
altbacktrack.topbacktracks = NULL;
- if (*ccbegin == OP_ALT)
+ if (*ccbegin == OP_ALT && extrasize > 0)
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(0));
altbacktrack.cc = ccbegin;
@@ -6594,8 +7388,9 @@ while (1)
{
if (framesize == no_frame)
OP1(SLJIT_MOV, STACK_TOP, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr);
- else
+ else if (extrasize > 0)
free_stack(common, extrasize);
+
if (needs_control_head)
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->control_head_ptr, SLJIT_MEM1(STACK_TOP), 0);
}
@@ -6621,7 +7416,10 @@ while (1)
{
/* We know that STR_PTR was stored on the top of the stack. */
if (conditional)
- OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), needs_control_head ? sizeof(sljit_sw) : 0);
+ {
+ if (extrasize > 0)
+ OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), needs_control_head ? sizeof(sljit_sw) : 0);
+ }
else if (bra == OP_BRAZERO)
{
if (framesize < 0)
@@ -6698,7 +7496,7 @@ if (needs_control_head)
if (opcode == OP_ASSERT || opcode == OP_ASSERTBACK)
{
/* Assert is failed. */
- if (conditional || bra == OP_BRAZERO)
+ if ((conditional && extrasize > 0) || bra == OP_BRAZERO)
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(0));
if (framesize < 0)
@@ -6710,7 +7508,7 @@ if (opcode == OP_ASSERT || opcode == OP_ASSERTBACK)
free_stack(common, 1);
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), SLJIT_IMM, 0);
}
- else
+ else if (extrasize > 0)
free_stack(common, extrasize);
}
else
@@ -6735,7 +7533,9 @@ if (opcode == OP_ASSERT || opcode == OP_ASSERTBACK)
if (framesize < 0)
{
/* We know that STR_PTR was stored on the top of the stack. */
- OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), (extrasize - 1) * sizeof(sljit_sw));
+ if (extrasize > 0)
+ OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), (extrasize - 1) * sizeof(sljit_sw));
+
/* Keep the STR_PTR on the top of the stack. */
if (bra == OP_BRAZERO)
{
@@ -6798,14 +7598,16 @@ else
/* AssertNot is successful. */
if (framesize < 0)
{
- OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(0));
+ if (extrasize > 0)
+ OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(0));
+
if (bra != OP_BRA)
{
if (extrasize == 2)
free_stack(common, 1);
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), SLJIT_IMM, 0);
}
- else
+ else if (extrasize > 0)
free_stack(common, extrasize);
}
else
@@ -6867,7 +7669,9 @@ if (framesize < 0)
stacksize = needs_control_head ? 1 : 0;
if (ket != OP_KET || has_alternatives)
stacksize++;
- free_stack(common, stacksize);
+
+ if (stacksize > 0)
+ free_stack(common, stacksize);
}
if (needs_control_head)
@@ -7513,9 +8317,13 @@ while (*cc == OP_ALT)
cc += GET(cc, 1);
cc += 1 + LINK_SIZE;
-/* Temporarily encoding the needs_control_head in framesize. */
if (opcode == OP_ONCE)
+ {
+ /* We temporarily encode the needs_control_head in the lowest bit.
+ Note: on the target architectures of SLJIT the ((x << 1) >> 1) returns
+ the same value for small signed numbers (including negative numbers). */
BACKTRACK_AS(bracket_backtrack)->u.framesize = (BACKTRACK_AS(bracket_backtrack)->u.framesize << 1) | (needs_control_head ? 1 : 0);
+ }
return cc + repeat_length;
}
@@ -7802,11 +8610,13 @@ count_match(common);
return cc + 1 + LINK_SIZE;
}
-static SLJIT_INLINE pcre_uchar *get_iterator_parameters(compiler_common *common, pcre_uchar *cc, pcre_uchar *opcode, pcre_uchar *type, int *max, int *min, pcre_uchar **end)
+static SLJIT_INLINE pcre_uchar *get_iterator_parameters(compiler_common *common, pcre_uchar *cc, pcre_uchar *opcode, pcre_uchar *type, sljit_u32 *max, sljit_u32 *exact, pcre_uchar **end)
{
int class_len;
*opcode = *cc;
+*exact = 0;
+
if (*opcode >= OP_STAR && *opcode <= OP_POSUPTO)
{
cc++;
@@ -7834,7 +8644,7 @@ else if (*opcode >= OP_TYPESTAR && *opcode <= OP_TYPEPOSUPTO)
{
cc++;
*opcode -= OP_TYPESTAR - OP_STAR;
- *type = 0;
+ *type = OP_END;
}
else
{
@@ -7843,60 +8653,105 @@ else
cc++;
class_len = (*type < OP_XCLASS) ? (int)(1 + (32 / sizeof(pcre_uchar))) : GET(cc, 0);
*opcode = cc[class_len - 1];
+
if (*opcode >= OP_CRSTAR && *opcode <= OP_CRMINQUERY)
{
*opcode -= OP_CRSTAR - OP_STAR;
- if (end != NULL)
- *end = cc + class_len;
+ *end = cc + class_len;
+
+ if (*opcode == OP_PLUS || *opcode == OP_MINPLUS)
+ {
+ *exact = 1;
+ *opcode -= OP_PLUS - OP_STAR;
+ }
}
else if (*opcode >= OP_CRPOSSTAR && *opcode <= OP_CRPOSQUERY)
{
*opcode -= OP_CRPOSSTAR - OP_POSSTAR;
- if (end != NULL)
- *end = cc + class_len;
+ *end = cc + class_len;
+
+ if (*opcode == OP_POSPLUS)
+ {
+ *exact = 1;
+ *opcode = OP_POSSTAR;
+ }
}
else
{
SLJIT_ASSERT(*opcode == OP_CRRANGE || *opcode == OP_CRMINRANGE || *opcode == OP_CRPOSRANGE);
*max = GET2(cc, (class_len + IMM2_SIZE));
- *min = GET2(cc, class_len);
+ *exact = GET2(cc, class_len);
- if (*min == 0)
+ if (*max == 0)
{
- SLJIT_ASSERT(*max != 0);
- *opcode = (*opcode == OP_CRRANGE) ? OP_UPTO : (*opcode == OP_CRMINRANGE ? OP_MINUPTO : OP_POSUPTO);
+ if (*opcode == OP_CRPOSRANGE)
+ *opcode = OP_POSSTAR;
+ else
+ *opcode -= OP_CRRANGE - OP_STAR;
}
- if (*max == *min)
- *opcode = OP_EXACT;
-
- if (end != NULL)
- *end = cc + class_len + 2 * IMM2_SIZE;
+ else
+ {
+ *max -= *exact;
+ if (*max == 0)
+ *opcode = OP_EXACT;
+ else if (*max == 1)
+ {
+ if (*opcode == OP_CRPOSRANGE)
+ *opcode = OP_POSQUERY;
+ else
+ *opcode -= OP_CRRANGE - OP_QUERY;
+ }
+ else
+ {
+ if (*opcode == OP_CRPOSRANGE)
+ *opcode = OP_POSUPTO;
+ else
+ *opcode -= OP_CRRANGE - OP_UPTO;
+ }
+ }
+ *end = cc + class_len + 2 * IMM2_SIZE;
}
return cc;
}
-if (*opcode == OP_UPTO || *opcode == OP_MINUPTO || *opcode == OP_EXACT || *opcode == OP_POSUPTO)
+switch(*opcode)
{
+ case OP_EXACT:
+ *exact = GET2(cc, 0);
+ cc += IMM2_SIZE;
+ break;
+
+ case OP_PLUS:
+ case OP_MINPLUS:
+ *exact = 1;
+ *opcode -= OP_PLUS - OP_STAR;
+ break;
+
+ case OP_POSPLUS:
+ *exact = 1;
+ *opcode = OP_POSSTAR;
+ break;
+
+ case OP_UPTO:
+ case OP_MINUPTO:
+ case OP_POSUPTO:
*max = GET2(cc, 0);
cc += IMM2_SIZE;
+ break;
}
-if (*type == 0)
+if (*type == OP_END)
{
*type = *cc;
- if (end != NULL)
- *end = next_opcode(common, cc);
+ *end = next_opcode(common, cc);
cc++;
return cc;
}
-if (end != NULL)
- {
- *end = cc + 1;
+*end = cc + 1;
#ifdef SUPPORT_UTF
- if (common->utf && HAS_EXTRALEN(*cc)) *end += GET_EXTRALEN(*cc);
+if (common->utf && HAS_EXTRALEN(*cc)) *end += GET_EXTRALEN(*cc);
#endif
- }
return cc;
}
@@ -7906,9 +8761,15 @@ DEFINE_COMPILER;
backtrack_common *backtrack;
pcre_uchar opcode;
pcre_uchar type;
-int max = -1, min = -1;
+sljit_u32 max = 0, exact;
+BOOL fast_fail;
+sljit_s32 fast_str_ptr;
+BOOL charpos_enabled;
+pcre_uchar charpos_char;
+unsigned int charpos_othercasebit;
pcre_uchar *end;
-jump_list *nomatch = NULL;
+jump_list *no_match = NULL;
+jump_list *no_char1_match = NULL;
struct sljit_jump *jump = NULL;
struct sljit_label *label;
int private_data_ptr = PRIVATE_DATA(cc);
@@ -7917,83 +8778,92 @@ int offset0 = (private_data_ptr == 0) ? STACK(0) : private_data_ptr;
int offset1 = (private_data_ptr == 0) ? STACK(1) : private_data_ptr + (int)sizeof(sljit_sw);
int tmp_base, tmp_offset;
-PUSH_BACKTRACK(sizeof(iterator_backtrack), cc, NULL);
+PUSH_BACKTRACK(sizeof(char_iterator_backtrack), cc, NULL);
-cc = get_iterator_parameters(common, cc, &opcode, &type, &max, &min, &end);
+fast_str_ptr = PRIVATE_DATA(cc + 1);
+fast_fail = TRUE;
-switch(type)
+SLJIT_ASSERT(common->fast_forward_bc_ptr == NULL || fast_str_ptr == 0 || cc == common->fast_forward_bc_ptr);
+
+if (cc == common->fast_forward_bc_ptr)
+ fast_fail = FALSE;
+else if (common->fast_fail_start_ptr == 0)
+ fast_str_ptr = 0;
+
+SLJIT_ASSERT(common->fast_forward_bc_ptr != NULL || fast_str_ptr == 0
+ || (fast_str_ptr >= common->fast_fail_start_ptr && fast_str_ptr <= common->fast_fail_end_ptr));
+
+cc = get_iterator_parameters(common, cc, &opcode, &type, &max, &exact, &end);
+
+if (type != OP_EXTUNI)
{
- case OP_NOT_DIGIT:
- case OP_DIGIT:
- case OP_NOT_WHITESPACE:
- case OP_WHITESPACE:
- case OP_NOT_WORDCHAR:
- case OP_WORDCHAR:
- case OP_ANY:
- case OP_ALLANY:
- case OP_ANYBYTE:
- case OP_ANYNL:
- case OP_NOT_HSPACE:
- case OP_HSPACE:
- case OP_NOT_VSPACE:
- case OP_VSPACE:
- case OP_CHAR:
- case OP_CHARI:
- case OP_NOT:
- case OP_NOTI:
- case OP_CLASS:
- case OP_NCLASS:
tmp_base = TMP3;
tmp_offset = 0;
- break;
-
- default:
- SLJIT_ASSERT_STOP();
- /* Fall through. */
-
- case OP_EXTUNI:
- case OP_XCLASS:
- case OP_NOTPROP:
- case OP_PROP:
+ }
+else
+ {
tmp_base = SLJIT_MEM1(SLJIT_SP);
tmp_offset = POSSESSIVE0;
- break;
}
+if (fast_fail && fast_str_ptr != 0)
+ add_jump(compiler, &backtrack->topbacktracks, CMP(SLJIT_LESS_EQUAL, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP), fast_str_ptr));
+
+/* Handle fixed part first. */
+if (exact > 1)
+ {
+ SLJIT_ASSERT(fast_str_ptr == 0);
+ if (common->mode == JIT_COMPILE
+#ifdef SUPPORT_UTF
+ && !common->utf
+#endif
+ )
+ {
+ OP2(SLJIT_ADD, TMP1, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(exact));
+ add_jump(compiler, &backtrack->topbacktracks, CMP(SLJIT_GREATER, TMP1, 0, STR_END, 0));
+ OP1(SLJIT_MOV, tmp_base, tmp_offset, SLJIT_IMM, exact);
+ label = LABEL();
+ compile_char1_matchingpath(common, type, cc, &backtrack->topbacktracks, FALSE);
+ OP2(SLJIT_SUB | SLJIT_SET_E, tmp_base, tmp_offset, tmp_base, tmp_offset, SLJIT_IMM, 1);
+ JUMPTO(SLJIT_NOT_ZERO, label);
+ }
+ else
+ {
+ OP1(SLJIT_MOV, tmp_base, tmp_offset, SLJIT_IMM, exact);
+ label = LABEL();
+ compile_char1_matchingpath(common, type, cc, &backtrack->topbacktracks, TRUE);
+ OP2(SLJIT_SUB | SLJIT_SET_E, tmp_base, tmp_offset, tmp_base, tmp_offset, SLJIT_IMM, 1);
+ JUMPTO(SLJIT_NOT_ZERO, label);
+ }
+ }
+else if (exact == 1)
+ compile_char1_matchingpath(common, type, cc, &backtrack->topbacktracks, TRUE);
+
switch(opcode)
{
case OP_STAR:
- case OP_PLUS:
case OP_UPTO:
- case OP_CRRANGE:
+ SLJIT_ASSERT(fast_str_ptr == 0 || opcode == OP_STAR);
+
if (type == OP_ANYNL || type == OP_EXTUNI)
{
SLJIT_ASSERT(private_data_ptr == 0);
- if (opcode == OP_STAR || opcode == OP_UPTO)
- {
- allocate_stack(common, 2);
- OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), STR_PTR, 0);
- OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(1), SLJIT_IMM, 0);
- }
- else
- {
- allocate_stack(common, 1);
- OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), SLJIT_IMM, 0);
- }
+ SLJIT_ASSERT(fast_str_ptr == 0);
+
+ allocate_stack(common, 2);
+ OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), STR_PTR, 0);
+ OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(1), SLJIT_IMM, 0);
- if (opcode == OP_UPTO || opcode == OP_CRRANGE)
- OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), POSSESSIVE0, SLJIT_IMM, 0);
+ if (opcode == OP_UPTO)
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), POSSESSIVE0, SLJIT_IMM, max);
label = LABEL();
- compile_char1_matchingpath(common, type, cc, &backtrack->topbacktracks);
- if (opcode == OP_UPTO || opcode == OP_CRRANGE)
+ compile_char1_matchingpath(common, type, cc, &BACKTRACK_AS(char_iterator_backtrack)->u.backtracks, TRUE);
+ if (opcode == OP_UPTO)
{
OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(SLJIT_SP), POSSESSIVE0);
- OP2(SLJIT_ADD, TMP1, 0, TMP1, 0, SLJIT_IMM, 1);
- if (opcode == OP_CRRANGE && min > 0)
- CMPTO(SLJIT_LESS, TMP1, 0, SLJIT_IMM, min, label);
- if (opcode == OP_UPTO || (opcode == OP_CRRANGE && max > 0))
- jump = CMP(SLJIT_GREATER_EQUAL, TMP1, 0, SLJIT_IMM, max);
+ OP2(SLJIT_SUB | SLJIT_SET_E, TMP1, 0, TMP1, 0, SLJIT_IMM, 1);
+ jump = JUMP(SLJIT_ZERO);
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), POSSESSIVE0, TMP1, 0);
}
@@ -8006,134 +8876,268 @@ switch(opcode)
}
else
{
- if (opcode == OP_PLUS)
- compile_char1_matchingpath(common, type, cc, &backtrack->topbacktracks);
- if (private_data_ptr == 0)
- allocate_stack(common, 2);
- OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
- if (opcode <= OP_PLUS)
+ charpos_enabled = FALSE;
+ charpos_char = 0;
+ charpos_othercasebit = 0;
+
+ if ((type != OP_CHAR && type != OP_CHARI) && (*end == OP_CHAR || *end == OP_CHARI))
+ {
+ charpos_enabled = TRUE;
+#ifdef SUPPORT_UTF
+ charpos_enabled = !common->utf || !HAS_EXTRALEN(end[1]);
+#endif
+ if (charpos_enabled && *end == OP_CHARI && char_has_othercase(common, end + 1))
+ {
+ charpos_othercasebit = char_get_othercase_bit(common, end + 1);
+ if (charpos_othercasebit == 0)
+ charpos_enabled = FALSE;
+ }
+
+ if (charpos_enabled)
+ {
+ charpos_char = end[1];
+ /* Consumpe the OP_CHAR opcode. */
+ end += 2;
+#if defined COMPILE_PCRE8
+ SLJIT_ASSERT((charpos_othercasebit >> 8) == 0);
+#elif defined COMPILE_PCRE16 || defined COMPILE_PCRE32
+ SLJIT_ASSERT((charpos_othercasebit >> 9) == 0);
+ if ((charpos_othercasebit & 0x100) != 0)
+ charpos_othercasebit = (charpos_othercasebit & 0xff) << 8;
+#endif
+ if (charpos_othercasebit != 0)
+ charpos_char |= charpos_othercasebit;
+
+ BACKTRACK_AS(char_iterator_backtrack)->u.charpos.enabled = TRUE;
+ BACKTRACK_AS(char_iterator_backtrack)->u.charpos.chr = charpos_char;
+ BACKTRACK_AS(char_iterator_backtrack)->u.charpos.othercasebit = charpos_othercasebit;
+ }
+ }
+
+ if (charpos_enabled)
+ {
+ if (opcode == OP_UPTO)
+ OP1(SLJIT_MOV, tmp_base, tmp_offset, SLJIT_IMM, max + 1);
+
+ /* Search the first instance of charpos_char. */
+ jump = JUMP(SLJIT_JUMP);
+ label = LABEL();
+ if (opcode == OP_UPTO)
+ {
+ OP2(SLJIT_SUB | SLJIT_SET_E, tmp_base, tmp_offset, tmp_base, tmp_offset, SLJIT_IMM, 1);
+ add_jump(compiler, &backtrack->topbacktracks, JUMP(SLJIT_ZERO));
+ }
+ compile_char1_matchingpath(common, type, cc, &backtrack->topbacktracks, FALSE);
+ if (fast_str_ptr != 0)
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), fast_str_ptr, STR_PTR, 0);
+ JUMPHERE(jump);
+
+ detect_partial_match(common, &backtrack->topbacktracks);
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(0));
+ if (charpos_othercasebit != 0)
+ OP2(SLJIT_OR, TMP1, 0, TMP1, 0, SLJIT_IMM, charpos_othercasebit);
+ CMPTO(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, charpos_char, label);
+
+ if (private_data_ptr == 0)
+ allocate_stack(common, 2);
+ OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
OP1(SLJIT_MOV, base, offset1, STR_PTR, 0);
- else
- OP1(SLJIT_MOV, base, offset1, SLJIT_IMM, 1);
- label = LABEL();
- compile_char1_matchingpath(common, type, cc, &nomatch);
- OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
- if (opcode <= OP_PLUS)
- JUMPTO(SLJIT_JUMP, label);
- else if (opcode == OP_CRRANGE && max == 0)
+ if (opcode == OP_UPTO)
+ {
+ OP2(SLJIT_SUB | SLJIT_SET_E, tmp_base, tmp_offset, tmp_base, tmp_offset, SLJIT_IMM, 1);
+ add_jump(compiler, &no_match, JUMP(SLJIT_ZERO));
+ }
+
+ /* Search the last instance of charpos_char. */
+ label = LABEL();
+ compile_char1_matchingpath(common, type, cc, &no_match, FALSE);
+ if (fast_str_ptr != 0)
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), fast_str_ptr, STR_PTR, 0);
+ detect_partial_match(common, &no_match);
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(0));
+ if (charpos_othercasebit != 0)
+ OP2(SLJIT_OR, TMP1, 0, TMP1, 0, SLJIT_IMM, charpos_othercasebit);
+ if (opcode == OP_STAR)
+ {
+ CMPTO(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, charpos_char, label);
+ OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
+ }
+ else
+ {
+ jump = CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, charpos_char);
+ OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
+ JUMPHERE(jump);
+ }
+
+ if (opcode == OP_UPTO)
+ {
+ OP2(SLJIT_SUB | SLJIT_SET_E, tmp_base, tmp_offset, tmp_base, tmp_offset, SLJIT_IMM, 1);
+ JUMPTO(SLJIT_NOT_ZERO, label);
+ }
+ else
+ JUMPTO(SLJIT_JUMP, label);
+
+ set_jumps(no_match, LABEL());
+ OP1(SLJIT_MOV, STR_PTR, 0, base, offset0);
+ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+ OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
+ }
+#if defined SUPPORT_UTF && !defined COMPILE_PCRE32
+ else if (common->utf)
{
- OP2(SLJIT_ADD, base, offset1, base, offset1, SLJIT_IMM, 1);
- JUMPTO(SLJIT_JUMP, label);
+ if (private_data_ptr == 0)
+ allocate_stack(common, 2);
+
+ OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
+ OP1(SLJIT_MOV, base, offset1, STR_PTR, 0);
+
+ if (opcode == OP_UPTO)
+ OP1(SLJIT_MOV, tmp_base, tmp_offset, SLJIT_IMM, max);
+
+ label = LABEL();
+ compile_char1_matchingpath(common, type, cc, &no_match, TRUE);
+ OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
+
+ if (opcode == OP_UPTO)
+ {
+ OP2(SLJIT_SUB | SLJIT_SET_E, tmp_base, tmp_offset, tmp_base, tmp_offset, SLJIT_IMM, 1);
+ JUMPTO(SLJIT_NOT_ZERO, label);
+ }
+ else
+ JUMPTO(SLJIT_JUMP, label);
+
+ set_jumps(no_match, LABEL());
+ OP1(SLJIT_MOV, STR_PTR, 0, base, offset0);
+ if (fast_str_ptr != 0)
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), fast_str_ptr, STR_PTR, 0);
}
+#endif
else
{
- OP1(SLJIT_MOV, TMP1, 0, base, offset1);
- OP2(SLJIT_ADD, TMP1, 0, TMP1, 0, SLJIT_IMM, 1);
- OP1(SLJIT_MOV, base, offset1, TMP1, 0);
- CMPTO(SLJIT_LESS, TMP1, 0, SLJIT_IMM, max + 1, label);
+ if (private_data_ptr == 0)
+ allocate_stack(common, 2);
+
+ OP1(SLJIT_MOV, base, offset1, STR_PTR, 0);
+ if (opcode == OP_UPTO)
+ OP1(SLJIT_MOV, tmp_base, tmp_offset, SLJIT_IMM, max);
+
+ label = LABEL();
+ detect_partial_match(common, &no_match);
+ compile_char1_matchingpath(common, type, cc, &no_char1_match, FALSE);
+ if (opcode == OP_UPTO)
+ {
+ OP2(SLJIT_SUB | SLJIT_SET_E, tmp_base, tmp_offset, tmp_base, tmp_offset, SLJIT_IMM, 1);
+ JUMPTO(SLJIT_NOT_ZERO, label);
+ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+ }
+ else
+ JUMPTO(SLJIT_JUMP, label);
+
+ set_jumps(no_char1_match, LABEL());
+ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+ set_jumps(no_match, LABEL());
+ OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
+ if (fast_str_ptr != 0)
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), fast_str_ptr, STR_PTR, 0);
}
- set_jumps(nomatch, LABEL());
- if (opcode == OP_CRRANGE)
- add_jump(compiler, &backtrack->topbacktracks, CMP(SLJIT_LESS, base, offset1, SLJIT_IMM, min + 1));
- OP1(SLJIT_MOV, STR_PTR, 0, base, offset0);
}
- BACKTRACK_AS(iterator_backtrack)->matchingpath = LABEL();
+ BACKTRACK_AS(char_iterator_backtrack)->matchingpath = LABEL();
break;
case OP_MINSTAR:
- case OP_MINPLUS:
- if (opcode == OP_MINPLUS)
- compile_char1_matchingpath(common, type, cc, &backtrack->topbacktracks);
if (private_data_ptr == 0)
allocate_stack(common, 1);
OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
- BACKTRACK_AS(iterator_backtrack)->matchingpath = LABEL();
+ BACKTRACK_AS(char_iterator_backtrack)->matchingpath = LABEL();
+ if (fast_str_ptr != 0)
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), fast_str_ptr, STR_PTR, 0);
break;
case OP_MINUPTO:
- case OP_CRMINRANGE:
+ SLJIT_ASSERT(fast_str_ptr == 0);
if (private_data_ptr == 0)
allocate_stack(common, 2);
OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
- OP1(SLJIT_MOV, base, offset1, SLJIT_IMM, 1);
- if (opcode == OP_CRMINRANGE)
- add_jump(compiler, &backtrack->topbacktracks, JUMP(SLJIT_JUMP));
- BACKTRACK_AS(iterator_backtrack)->matchingpath = LABEL();
+ OP1(SLJIT_MOV, base, offset1, SLJIT_IMM, max + 1);
+ BACKTRACK_AS(char_iterator_backtrack)->matchingpath = LABEL();
break;
case OP_QUERY:
case OP_MINQUERY:
+ SLJIT_ASSERT(fast_str_ptr == 0);
if (private_data_ptr == 0)
allocate_stack(common, 1);
OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
if (opcode == OP_QUERY)
- compile_char1_matchingpath(common, type, cc, &backtrack->topbacktracks);
- BACKTRACK_AS(iterator_backtrack)->matchingpath = LABEL();
+ compile_char1_matchingpath(common, type, cc, &BACKTRACK_AS(char_iterator_backtrack)->u.backtracks, TRUE);
+ BACKTRACK_AS(char_iterator_backtrack)->matchingpath = LABEL();
break;
case OP_EXACT:
- OP1(SLJIT_MOV, tmp_base, tmp_offset, SLJIT_IMM, max);
- label = LABEL();
- compile_char1_matchingpath(common, type, cc, &backtrack->topbacktracks);
- OP2(SLJIT_SUB | SLJIT_SET_E, tmp_base, tmp_offset, tmp_base, tmp_offset, SLJIT_IMM, 1);
- JUMPTO(SLJIT_NOT_ZERO, label);
break;
case OP_POSSTAR:
- case OP_POSPLUS:
- case OP_POSUPTO:
- if (opcode == OP_POSPLUS)
- compile_char1_matchingpath(common, type, cc, &backtrack->topbacktracks);
- if (opcode == OP_POSUPTO)
- OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), POSSESSIVE1, SLJIT_IMM, max);
- OP1(SLJIT_MOV, tmp_base, tmp_offset, STR_PTR, 0);
- label = LABEL();
- compile_char1_matchingpath(common, type, cc, &nomatch);
- OP1(SLJIT_MOV, tmp_base, tmp_offset, STR_PTR, 0);
- if (opcode != OP_POSUPTO)
- JUMPTO(SLJIT_JUMP, label);
- else
+#if defined SUPPORT_UTF && !defined COMPILE_PCRE32
+ if (common->utf)
{
- OP2(SLJIT_SUB | SLJIT_SET_E, SLJIT_MEM1(SLJIT_SP), POSSESSIVE1, SLJIT_MEM1(SLJIT_SP), POSSESSIVE1, SLJIT_IMM, 1);
- JUMPTO(SLJIT_NOT_ZERO, label);
+ OP1(SLJIT_MOV, tmp_base, tmp_offset, STR_PTR, 0);
+ label = LABEL();
+ compile_char1_matchingpath(common, type, cc, &no_match, TRUE);
+ OP1(SLJIT_MOV, tmp_base, tmp_offset, STR_PTR, 0);
+ JUMPTO(SLJIT_JUMP, label);
+ set_jumps(no_match, LABEL());
+ OP1(SLJIT_MOV, STR_PTR, 0, tmp_base, tmp_offset);
+ if (fast_str_ptr != 0)
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), fast_str_ptr, STR_PTR, 0);
+ break;
}
- set_jumps(nomatch, LABEL());
- OP1(SLJIT_MOV, STR_PTR, 0, tmp_base, tmp_offset);
- break;
-
- case OP_POSQUERY:
- OP1(SLJIT_MOV, tmp_base, tmp_offset, STR_PTR, 0);
- compile_char1_matchingpath(common, type, cc, &nomatch);
- OP1(SLJIT_MOV, tmp_base, tmp_offset, STR_PTR, 0);
- set_jumps(nomatch, LABEL());
- OP1(SLJIT_MOV, STR_PTR, 0, tmp_base, tmp_offset);
+#endif
+ label = LABEL();
+ detect_partial_match(common, &no_match);
+ compile_char1_matchingpath(common, type, cc, &no_char1_match, FALSE);
+ JUMPTO(SLJIT_JUMP, label);
+ set_jumps(no_char1_match, LABEL());
+ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+ set_jumps(no_match, LABEL());
+ if (fast_str_ptr != 0)
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), fast_str_ptr, STR_PTR, 0);
break;
- case OP_CRPOSRANGE:
- /* Combination of OP_EXACT and OP_POSSTAR or OP_POSUPTO */
- OP1(SLJIT_MOV, tmp_base, tmp_offset, SLJIT_IMM, min);
+ case OP_POSUPTO:
+ SLJIT_ASSERT(fast_str_ptr == 0);
+#if defined SUPPORT_UTF && !defined COMPILE_PCRE32
+ if (common->utf)
+ {
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), POSSESSIVE1, STR_PTR, 0);
+ OP1(SLJIT_MOV, tmp_base, tmp_offset, SLJIT_IMM, max);
+ label = LABEL();
+ compile_char1_matchingpath(common, type, cc, &no_match, TRUE);
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), POSSESSIVE1, STR_PTR, 0);
+ OP2(SLJIT_SUB | SLJIT_SET_E, tmp_base, tmp_offset, tmp_base, tmp_offset, SLJIT_IMM, 1);
+ JUMPTO(SLJIT_NOT_ZERO, label);
+ set_jumps(no_match, LABEL());
+ OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP), POSSESSIVE1);
+ break;
+ }
+#endif
+ OP1(SLJIT_MOV, tmp_base, tmp_offset, SLJIT_IMM, max);
label = LABEL();
- compile_char1_matchingpath(common, type, cc, &backtrack->topbacktracks);
+ detect_partial_match(common, &no_match);
+ compile_char1_matchingpath(common, type, cc, &no_char1_match, FALSE);
OP2(SLJIT_SUB | SLJIT_SET_E, tmp_base, tmp_offset, tmp_base, tmp_offset, SLJIT_IMM, 1);
JUMPTO(SLJIT_NOT_ZERO, label);
+ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+ set_jumps(no_char1_match, LABEL());
+ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+ set_jumps(no_match, LABEL());
+ break;
- if (max != 0)
- {
- SLJIT_ASSERT(max - min > 0);
- OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), POSSESSIVE1, SLJIT_IMM, max - min);
- }
+ case OP_POSQUERY:
+ SLJIT_ASSERT(fast_str_ptr == 0);
OP1(SLJIT_MOV, tmp_base, tmp_offset, STR_PTR, 0);
- label = LABEL();
- compile_char1_matchingpath(common, type, cc, &nomatch);
+ compile_char1_matchingpath(common, type, cc, &no_match, TRUE);
OP1(SLJIT_MOV, tmp_base, tmp_offset, STR_PTR, 0);
- if (max == 0)
- JUMPTO(SLJIT_JUMP, label);
- else
- {
- OP2(SLJIT_SUB | SLJIT_SET_E, SLJIT_MEM1(SLJIT_SP), POSSESSIVE1, SLJIT_MEM1(SLJIT_SP), POSSESSIVE1, SLJIT_IMM, 1);
- JUMPTO(SLJIT_NOT_ZERO, label);
- }
- set_jumps(nomatch, LABEL());
+ set_jumps(no_match, LABEL());
OP1(SLJIT_MOV, STR_PTR, 0, tmp_base, tmp_offset);
break;
@@ -8174,9 +9178,9 @@ if (common->accept_label == NULL)
else
CMPTO(SLJIT_NOT_EQUAL, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP), OVECTOR(0), common->accept_label);
OP1(SLJIT_MOV, TMP1, 0, ARGUMENTS, 0);
-OP1(SLJIT_MOV_UB, TMP2, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, notempty));
+OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, notempty));
add_jump(compiler, &backtrack->topbacktracks, CMP(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, 0));
-OP1(SLJIT_MOV_UB, TMP2, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, notempty_atstart));
+OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, notempty_atstart));
if (common->accept_label == NULL)
add_jump(compiler, &common->accept, CMP(SLJIT_EQUAL, TMP2, 0, SLJIT_IMM, 0));
else
@@ -8298,6 +9302,16 @@ while (cc < ccend)
case OP_SOM:
case OP_NOT_WORD_BOUNDARY:
case OP_WORD_BOUNDARY:
+ case OP_EODN:
+ case OP_EOD:
+ case OP_DOLL:
+ case OP_DOLLM:
+ case OP_CIRC:
+ case OP_CIRCM:
+ case OP_REVERSE:
+ cc = compile_simple_assertion_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks);
+ break;
+
case OP_NOT_DIGIT:
case OP_DIGIT:
case OP_NOT_WHITESPACE:
@@ -8315,16 +9329,9 @@ while (cc < ccend)
case OP_NOT_VSPACE:
case OP_VSPACE:
case OP_EXTUNI:
- case OP_EODN:
- case OP_EOD:
- case OP_CIRC:
- case OP_CIRCM:
- case OP_DOLL:
- case OP_DOLLM:
case OP_NOT:
case OP_NOTI:
- case OP_REVERSE:
- cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks);
+ cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks, TRUE);
break;
case OP_SET_SOM:
@@ -8341,7 +9348,7 @@ while (cc < ccend)
if (common->mode == JIT_COMPILE)
cc = compile_charn_matchingpath(common, cc, ccend, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks);
else
- cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks);
+ cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks, TRUE);
break;
case OP_STAR:
@@ -8417,7 +9424,7 @@ while (cc < ccend)
if (cc[1 + (32 / sizeof(pcre_uchar))] >= OP_CRSTAR && cc[1 + (32 / sizeof(pcre_uchar))] <= OP_CRPOSRANGE)
cc = compile_iterator_matchingpath(common, cc, parent);
else
- cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks);
+ cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks, TRUE);
break;
#if defined SUPPORT_UTF || defined COMPILE_PCRE16 || defined COMPILE_PCRE32
@@ -8425,7 +9432,7 @@ while (cc < ccend)
if (*(cc + GET(cc, 1)) >= OP_CRSTAR && *(cc + GET(cc, 1)) <= OP_CRPOSRANGE)
cc = compile_iterator_matchingpath(common, cc, parent);
else
- cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks);
+ cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks, TRUE);
break;
#endif
@@ -8601,95 +9608,82 @@ DEFINE_COMPILER;
pcre_uchar *cc = current->cc;
pcre_uchar opcode;
pcre_uchar type;
-int max = -1, min = -1;
+sljit_u32 max = 0, exact;
struct sljit_label *label = NULL;
struct sljit_jump *jump = NULL;
jump_list *jumplist = NULL;
+pcre_uchar *end;
int private_data_ptr = PRIVATE_DATA(cc);
int base = (private_data_ptr == 0) ? SLJIT_MEM1(STACK_TOP) : SLJIT_MEM1(SLJIT_SP);
int offset0 = (private_data_ptr == 0) ? STACK(0) : private_data_ptr;
int offset1 = (private_data_ptr == 0) ? STACK(1) : private_data_ptr + (int)sizeof(sljit_sw);
-cc = get_iterator_parameters(common, cc, &opcode, &type, &max, &min, NULL);
+cc = get_iterator_parameters(common, cc, &opcode, &type, &max, &exact, &end);
switch(opcode)
{
case OP_STAR:
- case OP_PLUS:
case OP_UPTO:
- case OP_CRRANGE:
if (type == OP_ANYNL || type == OP_EXTUNI)
{
SLJIT_ASSERT(private_data_ptr == 0);
- set_jumps(current->topbacktracks, LABEL());
+ set_jumps(CURRENT_AS(char_iterator_backtrack)->u.backtracks, LABEL());
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(0));
free_stack(common, 1);
- CMPTO(SLJIT_NOT_EQUAL, STR_PTR, 0, SLJIT_IMM, 0, CURRENT_AS(iterator_backtrack)->matchingpath);
+ CMPTO(SLJIT_NOT_EQUAL, STR_PTR, 0, SLJIT_IMM, 0, CURRENT_AS(char_iterator_backtrack)->matchingpath);
}
else
{
- if (opcode == OP_UPTO)
- min = 0;
- if (opcode <= OP_PLUS)
+ if (CURRENT_AS(char_iterator_backtrack)->u.charpos.enabled)
{
OP1(SLJIT_MOV, STR_PTR, 0, base, offset0);
- jump = CMP(SLJIT_LESS_EQUAL, STR_PTR, 0, base, offset1);
+ OP1(SLJIT_MOV, TMP2, 0, base, offset1);
+ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+
+ jump = CMP(SLJIT_LESS_EQUAL, STR_PTR, 0, TMP2, 0);
+ label = LABEL();
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(-1));
+ OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
+ if (CURRENT_AS(char_iterator_backtrack)->u.charpos.othercasebit != 0)
+ OP2(SLJIT_OR, TMP1, 0, TMP1, 0, SLJIT_IMM, CURRENT_AS(char_iterator_backtrack)->u.charpos.othercasebit);
+ CMPTO(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, CURRENT_AS(char_iterator_backtrack)->u.charpos.chr, CURRENT_AS(char_iterator_backtrack)->matchingpath);
+ skip_char_back(common);
+ CMPTO(SLJIT_GREATER, STR_PTR, 0, TMP2, 0, label);
}
else
{
- OP1(SLJIT_MOV, TMP1, 0, base, offset1);
OP1(SLJIT_MOV, STR_PTR, 0, base, offset0);
- jump = CMP(SLJIT_LESS_EQUAL, TMP1, 0, SLJIT_IMM, min + 1);
- OP2(SLJIT_SUB, base, offset1, TMP1, 0, SLJIT_IMM, 1);
+ jump = CMP(SLJIT_LESS_EQUAL, STR_PTR, 0, base, offset1);
+ skip_char_back(common);
+ OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
+ JUMPTO(SLJIT_JUMP, CURRENT_AS(char_iterator_backtrack)->matchingpath);
}
- skip_char_back(common);
- OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
- JUMPTO(SLJIT_JUMP, CURRENT_AS(iterator_backtrack)->matchingpath);
- if (opcode == OP_CRRANGE)
- set_jumps(current->topbacktracks, LABEL());
JUMPHERE(jump);
if (private_data_ptr == 0)
free_stack(common, 2);
- if (opcode == OP_PLUS)
- set_jumps(current->topbacktracks, LABEL());
}
break;
case OP_MINSTAR:
- case OP_MINPLUS:
OP1(SLJIT_MOV, STR_PTR, 0, base, offset0);
- compile_char1_matchingpath(common, type, cc, &jumplist);
+ compile_char1_matchingpath(common, type, cc, &jumplist, TRUE);
OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
- JUMPTO(SLJIT_JUMP, CURRENT_AS(iterator_backtrack)->matchingpath);
+ JUMPTO(SLJIT_JUMP, CURRENT_AS(char_iterator_backtrack)->matchingpath);
set_jumps(jumplist, LABEL());
if (private_data_ptr == 0)
free_stack(common, 1);
- if (opcode == OP_MINPLUS)
- set_jumps(current->topbacktracks, LABEL());
break;
case OP_MINUPTO:
- case OP_CRMINRANGE:
- if (opcode == OP_CRMINRANGE)
- {
- label = LABEL();
- set_jumps(current->topbacktracks, label);
- }
+ OP1(SLJIT_MOV, TMP1, 0, base, offset1);
OP1(SLJIT_MOV, STR_PTR, 0, base, offset0);
- compile_char1_matchingpath(common, type, cc, &jumplist);
+ OP2(SLJIT_SUB | SLJIT_SET_E, TMP1, 0, TMP1, 0, SLJIT_IMM, 1);
+ add_jump(compiler, &jumplist, JUMP(SLJIT_ZERO));
- OP1(SLJIT_MOV, TMP1, 0, base, offset1);
- OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
- OP2(SLJIT_ADD, TMP1, 0, TMP1, 0, SLJIT_IMM, 1);
OP1(SLJIT_MOV, base, offset1, TMP1, 0);
-
- if (opcode == OP_CRMINRANGE)
- CMPTO(SLJIT_LESS, TMP1, 0, SLJIT_IMM, min + 1, label);
-
- if (opcode == OP_CRMINRANGE && max == 0)
- JUMPTO(SLJIT_JUMP, CURRENT_AS(iterator_backtrack)->matchingpath);
- else
- CMPTO(SLJIT_LESS, TMP1, 0, SLJIT_IMM, max + 2, CURRENT_AS(iterator_backtrack)->matchingpath);
+ compile_char1_matchingpath(common, type, cc, &jumplist, TRUE);
+ OP1(SLJIT_MOV, base, offset0, STR_PTR, 0);
+ JUMPTO(SLJIT_JUMP, CURRENT_AS(char_iterator_backtrack)->matchingpath);
set_jumps(jumplist, LABEL());
if (private_data_ptr == 0)
@@ -8699,12 +9693,12 @@ switch(opcode)
case OP_QUERY:
OP1(SLJIT_MOV, STR_PTR, 0, base, offset0);
OP1(SLJIT_MOV, base, offset0, SLJIT_IMM, 0);
- CMPTO(SLJIT_NOT_EQUAL, STR_PTR, 0, SLJIT_IMM, 0, CURRENT_AS(iterator_backtrack)->matchingpath);
+ CMPTO(SLJIT_NOT_EQUAL, STR_PTR, 0, SLJIT_IMM, 0, CURRENT_AS(char_iterator_backtrack)->matchingpath);
jump = JUMP(SLJIT_JUMP);
- set_jumps(current->topbacktracks, LABEL());
+ set_jumps(CURRENT_AS(char_iterator_backtrack)->u.backtracks, LABEL());
OP1(SLJIT_MOV, STR_PTR, 0, base, offset0);
OP1(SLJIT_MOV, base, offset0, SLJIT_IMM, 0);
- JUMPTO(SLJIT_JUMP, CURRENT_AS(iterator_backtrack)->matchingpath);
+ JUMPTO(SLJIT_JUMP, CURRENT_AS(char_iterator_backtrack)->matchingpath);
JUMPHERE(jump);
if (private_data_ptr == 0)
free_stack(common, 1);
@@ -8714,8 +9708,8 @@ switch(opcode)
OP1(SLJIT_MOV, STR_PTR, 0, base, offset0);
OP1(SLJIT_MOV, base, offset0, SLJIT_IMM, 0);
jump = CMP(SLJIT_EQUAL, STR_PTR, 0, SLJIT_IMM, 0);
- compile_char1_matchingpath(common, type, cc, &jumplist);
- JUMPTO(SLJIT_JUMP, CURRENT_AS(iterator_backtrack)->matchingpath);
+ compile_char1_matchingpath(common, type, cc, &jumplist, TRUE);
+ JUMPTO(SLJIT_JUMP, CURRENT_AS(char_iterator_backtrack)->matchingpath);
set_jumps(jumplist, LABEL());
JUMPHERE(jump);
if (private_data_ptr == 0)
@@ -8723,11 +9717,6 @@ switch(opcode)
break;
case OP_EXACT:
- case OP_POSPLUS:
- case OP_CRPOSRANGE:
- set_jumps(current->topbacktracks, LABEL());
- break;
-
case OP_POSSTAR:
case OP_POSQUERY:
case OP_POSUPTO:
@@ -8737,6 +9726,8 @@ switch(opcode)
SLJIT_ASSERT_STOP();
break;
}
+
+set_jumps(current->topbacktracks, LABEL());
}
static SLJIT_INLINE void compile_ref_iterator_backtrackingpath(compiler_common *common, struct backtrack_common *current)
@@ -8754,12 +9745,12 @@ if ((type & 0x1) == 0)
set_jumps(current->topbacktracks, LABEL());
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(0));
free_stack(common, 1);
- CMPTO(SLJIT_NOT_EQUAL, STR_PTR, 0, SLJIT_IMM, 0, CURRENT_AS(iterator_backtrack)->matchingpath);
+ CMPTO(SLJIT_NOT_EQUAL, STR_PTR, 0, SLJIT_IMM, 0, CURRENT_AS(ref_iterator_backtrack)->matchingpath);
return;
}
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(0));
-CMPTO(SLJIT_NOT_EQUAL, STR_PTR, 0, SLJIT_IMM, 0, CURRENT_AS(iterator_backtrack)->matchingpath);
+CMPTO(SLJIT_NOT_EQUAL, STR_PTR, 0, SLJIT_IMM, 0, CURRENT_AS(ref_iterator_backtrack)->matchingpath);
set_jumps(current->topbacktracks, LABEL());
free_stack(common, ref ? 2 : 3);
}
@@ -9258,7 +10249,9 @@ else if (opcode == OP_ONCE)
/* The STR_PTR must be released. */
stacksize++;
}
- free_stack(common, stacksize);
+
+ if (stacksize > 0)
+ free_stack(common, stacksize);
JUMPHERE(once);
/* Restore previous private_data_ptr */
@@ -9688,8 +10681,8 @@ common->currententry->entry = LABEL();
set_jumps(common->currententry->calls, common->currententry->entry);
sljit_emit_fast_enter(compiler, TMP2, 0);
-allocate_stack(common, private_data_size + framesize + alternativesize);
count_match(common);
+allocate_stack(common, private_data_size + framesize + alternativesize);
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(private_data_size + framesize + alternativesize - 1), TMP2, 0);
copy_private_data(common, ccbegin, ccend, TRUE, private_data_size + framesize + alternativesize, framesize + alternativesize, needs_control_head);
if (needs_control_head)
@@ -9795,7 +10788,7 @@ struct sljit_compiler *compiler;
backtrack_common rootbacktrack;
compiler_common common_data;
compiler_common *common = &common_data;
-const pcre_uint8 *tables = re->tables;
+const sljit_u8 *tables = re->tables;
pcre_study_data *study;
int private_data_size;
pcre_uchar *ccend;
@@ -9907,7 +10900,7 @@ ccend = bracketend(common->start);
/* Calculate the local space size on the stack. */
common->ovector_start = LIMIT_MATCH + sizeof(sljit_sw);
-common->optimized_cbracket = (pcre_uint8 *)SLJIT_MALLOC(re->top_bracket + 1, compiler->allocator_data);
+common->optimized_cbracket = (sljit_u8 *)SLJIT_MALLOC(re->top_bracket + 1, compiler->allocator_data);
if (!common->optimized_cbracket)
return;
#if defined DEBUG_FORCE_UNOPTIMIZED_CBRAS && DEBUG_FORCE_UNOPTIMIZED_CBRAS == 1
@@ -9942,15 +10935,10 @@ if (mode != JIT_COMPILE)
common->hit_start = common->ovector_start;
common->ovector_start += 2 * sizeof(sljit_sw);
}
- else
- {
- SLJIT_ASSERT(mode == JIT_PARTIAL_HARD_COMPILE);
- common->needs_start_ptr = TRUE;
- }
}
if ((re->options & PCRE_FIRSTLINE) != 0)
{
- common->first_line_end = common->ovector_start;
+ common->match_end_ptr = common->ovector_start;
common->ovector_start += sizeof(sljit_sw);
}
#if defined DEBUG_FORCE_CONTROL_HEAD && DEBUG_FORCE_CONTROL_HEAD
@@ -9961,14 +10949,12 @@ if (common->control_head_ptr != 0)
common->control_head_ptr = common->ovector_start;
common->ovector_start += sizeof(sljit_sw);
}
-if (common->needs_start_ptr && common->has_set_som)
+if (common->has_set_som)
{
/* Saving the real start pointer is necessary. */
common->start_ptr = common->ovector_start;
common->ovector_start += sizeof(sljit_sw);
}
-else
- common->needs_start_ptr = FALSE;
/* Aligning ovector to even number of sljit words. */
if ((common->ovector_start & sizeof(sljit_sw)) != 0)
@@ -9985,16 +10971,24 @@ SLJIT_ASSERT(!(common->req_char_ptr != 0 && common->start_used_ptr != 0));
common->cbra_ptr = OVECTOR_START + (re->top_bracket + 1) * 2 * sizeof(sljit_sw);
total_length = ccend - common->start;
-common->private_data_ptrs = (sljit_si *)SLJIT_MALLOC(total_length * (sizeof(sljit_si) + (common->has_then ? 1 : 0)), compiler->allocator_data);
+common->private_data_ptrs = (sljit_s32 *)SLJIT_MALLOC(total_length * (sizeof(sljit_s32) + (common->has_then ? 1 : 0)), compiler->allocator_data);
if (!common->private_data_ptrs)
{
SLJIT_FREE(common->optimized_cbracket, compiler->allocator_data);
return;
}
-memset(common->private_data_ptrs, 0, total_length * sizeof(sljit_si));
+memset(common->private_data_ptrs, 0, total_length * sizeof(sljit_s32));
private_data_size = common->cbra_ptr + (re->top_bracket + 1) * sizeof(sljit_sw);
set_private_data_ptrs(common, &private_data_size, ccend);
+if ((re->options & PCRE_ANCHORED) == 0 && (re->options & PCRE_NO_START_OPTIMIZE) == 0)
+ {
+ if (!detect_fast_forward_skip(common, &private_data_size) && !common->has_skip_in_assert_back)
+ detect_fast_fail(common, common->start, &private_data_size, 4);
+ }
+
+SLJIT_ASSERT(common->fast_fail_start_ptr <= common->fast_fail_end_ptr);
+
if (private_data_size > SLJIT_MAX_LOCAL_SIZE)
{
SLJIT_FREE(common->private_data_ptrs, compiler->allocator_data);
@@ -10004,7 +10998,7 @@ if (private_data_size > SLJIT_MAX_LOCAL_SIZE)
if (common->has_then)
{
- common->then_offsets = (pcre_uint8 *)(common->private_data_ptrs + total_length);
+ common->then_offsets = (sljit_u8 *)(common->private_data_ptrs + total_length);
memset(common->then_offsets, 0, total_length);
set_then_offsets(common, common->start, NULL);
}
@@ -10031,12 +11025,15 @@ OP1(SLJIT_MOV, TMP1, 0, SLJIT_S0, 0);
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, str));
OP1(SLJIT_MOV, STR_END, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, end));
OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, stack));
-OP1(SLJIT_MOV_UI, TMP1, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, limit_match));
+OP1(SLJIT_MOV_U32, TMP1, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, limit_match));
OP1(SLJIT_MOV, STACK_TOP, 0, SLJIT_MEM1(TMP2), SLJIT_OFFSETOF(struct sljit_stack, base));
OP1(SLJIT_MOV, STACK_LIMIT, 0, SLJIT_MEM1(TMP2), SLJIT_OFFSETOF(struct sljit_stack, limit));
OP2(SLJIT_ADD, TMP1, 0, TMP1, 0, SLJIT_IMM, 1);
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), LIMIT_MATCH, TMP1, 0);
+if (common->fast_fail_start_ptr < common->fast_fail_end_ptr)
+ reset_fast_fail(common);
+
if (mode == JIT_PARTIAL_SOFT_COMPILE)
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->hit_start, SLJIT_IMM, -1);
if (common->mark_ptr != 0)
@@ -10047,19 +11044,19 @@ if (common->control_head_ptr != 0)
/* Main part of the matching */
if ((re->options & PCRE_ANCHORED) == 0)
{
- mainloop_label = mainloop_entry(common, (re->flags & PCRE_HASCRORLF) != 0, (re->options & PCRE_FIRSTLINE) != 0);
+ mainloop_label = mainloop_entry(common, (re->flags & PCRE_HASCRORLF) != 0);
continue_match_label = LABEL();
/* Forward search if possible. */
if ((re->options & PCRE_NO_START_OPTIMIZE) == 0)
{
- if (mode == JIT_COMPILE && fast_forward_first_n_chars(common, (re->options & PCRE_FIRSTLINE) != 0))
+ if (mode == JIT_COMPILE && fast_forward_first_n_chars(common))
;
else if ((re->flags & PCRE_FIRSTSET) != 0)
- fast_forward_first_char(common, (pcre_uchar)re->first_char, (re->flags & PCRE_FCH_CASELESS) != 0, (re->options & PCRE_FIRSTLINE) != 0);
+ fast_forward_first_char(common, (pcre_uchar)re->first_char, (re->flags & PCRE_FCH_CASELESS) != 0);
else if ((re->flags & PCRE_STARTLINE) != 0)
- fast_forward_newline(common, (re->options & PCRE_FIRSTLINE) != 0);
+ fast_forward_newline(common);
else if (study != NULL && (study->flags & PCRE_STUDY_MAPPED) != 0)
- fast_forward_start_bits(common, study->start_bits, (re->options & PCRE_FIRSTLINE) != 0);
+ fast_forward_start_bits(common, study->start_bits);
}
}
else
@@ -10080,14 +11077,11 @@ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), OVECTOR(0), STR_PTR, 0);
OP1(SLJIT_MOV, COUNT_MATCH, 0, SLJIT_MEM1(SLJIT_SP), LIMIT_MATCH);
if (common->capture_last_ptr != 0)
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->capture_last_ptr, SLJIT_IMM, -1);
+if (common->fast_forward_bc_ptr != NULL)
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), PRIVATE_DATA(common->fast_forward_bc_ptr + 1), STR_PTR, 0);
-if (common->needs_start_ptr)
- {
- SLJIT_ASSERT(common->start_ptr != OVECTOR(0));
+if (common->start_ptr != OVECTOR(0))
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->start_ptr, STR_PTR, 0);
- }
-else
- SLJIT_ASSERT(common->start_ptr == OVECTOR(0));
/* Copy the beginning of the string. */
if (mode == JIT_PARTIAL_SOFT_COMPILE)
@@ -10166,11 +11160,12 @@ if (mode == JIT_PARTIAL_SOFT_COMPILE)
/* Check we have remaining characters. */
if ((re->options & PCRE_ANCHORED) == 0 && (re->options & PCRE_FIRSTLINE) != 0)
{
- SLJIT_ASSERT(common->first_line_end != 0);
- OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(SLJIT_SP), common->first_line_end);
+ SLJIT_ASSERT(common->match_end_ptr != 0);
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(SLJIT_SP), common->match_end_ptr);
}
-OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP), common->start_ptr);
+OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP),
+ (common->fast_forward_bc_ptr != NULL) ? (PRIVATE_DATA(common->fast_forward_bc_ptr + 1)) : common->start_ptr);
if ((re->options & PCRE_ANCHORED) == 0)
{
@@ -10181,12 +11176,7 @@ if ((re->options & PCRE_ANCHORED) == 0)
/* There cannot be more newlines here. */
}
else
- {
- if ((re->options & PCRE_FIRSTLINE) == 0)
- CMPTO(SLJIT_LESS, STR_PTR, 0, STR_END, 0, mainloop_label);
- else
- CMPTO(SLJIT_LESS, STR_PTR, 0, TMP1, 0, mainloop_label);
- }
+ CMPTO(SLJIT_LESS, STR_PTR, 0, ((re->options & PCRE_FIRSTLINE) == 0) ? STR_END : TMP1, 0, mainloop_label);
}
/* No more remaining characters. */
@@ -10205,15 +11195,18 @@ if (common->might_be_empty)
{
JUMPHERE(empty_match);
OP1(SLJIT_MOV, TMP1, 0, ARGUMENTS, 0);
- OP1(SLJIT_MOV_UB, TMP2, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, notempty));
+ OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, notempty));
CMPTO(SLJIT_NOT_EQUAL, TMP2, 0, SLJIT_IMM, 0, empty_match_backtrack_label);
- OP1(SLJIT_MOV_UB, TMP2, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, notempty_atstart));
+ OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, notempty_atstart));
CMPTO(SLJIT_EQUAL, TMP2, 0, SLJIT_IMM, 0, empty_match_found_label);
OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, str));
CMPTO(SLJIT_NOT_EQUAL, TMP2, 0, STR_PTR, 0, empty_match_found_label);
JUMPTO(SLJIT_JUMP, empty_match_backtrack_label);
}
+common->fast_forward_bc_ptr = NULL;
+common->fast_fail_start_ptr = 0;
+common->fast_fail_end_ptr = 0;
common->currententry = common->entries;
common->local_exit = TRUE;
quit_label = common->quit_label;
@@ -10395,7 +11388,7 @@ union {
void *executable_func;
jit_function call_executable_func;
} convert_executable_func;
-pcre_uint8 local_space[MACHINE_STACK_SIZE];
+sljit_u8 local_space[MACHINE_STACK_SIZE];
struct sljit_stack local_stack;
local_stack.top = (sljit_sw)&local_space;
@@ -10435,7 +11428,7 @@ arguments.begin = subject;
arguments.end = subject + length;
arguments.mark_ptr = NULL;
/* JIT decreases this value less frequently than the interpreter. */
-arguments.limit_match = ((extra_data->flags & PCRE_EXTRA_MATCH_LIMIT) == 0) ? MATCH_LIMIT : (pcre_uint32)(extra_data->match_limit);
+arguments.limit_match = ((extra_data->flags & PCRE_EXTRA_MATCH_LIMIT) == 0) ? MATCH_LIMIT : (sljit_u32)(extra_data->match_limit);
if (functions->limit_match != 0 && functions->limit_match < arguments.limit_match)
arguments.limit_match = functions->limit_match;
arguments.notbol = (options & PCRE_NOTBOL) != 0;
@@ -10528,7 +11521,7 @@ arguments.begin = subject_ptr;
arguments.end = subject_ptr + length;
arguments.mark_ptr = NULL;
/* JIT decreases this value less frequently than the interpreter. */
-arguments.limit_match = ((extra_data->flags & PCRE_EXTRA_MATCH_LIMIT) == 0) ? MATCH_LIMIT : (pcre_uint32)(extra_data->match_limit);
+arguments.limit_match = ((extra_data->flags & PCRE_EXTRA_MATCH_LIMIT) == 0) ? MATCH_LIMIT : (sljit_u32)(extra_data->match_limit);
if (functions->limit_match != 0 && functions->limit_match < arguments.limit_match)
arguments.limit_match = functions->limit_match;
arguments.notbol = (options & PCRE_NOTBOL) != 0;
diff --git a/src/3rdparty/pcre/pcre_study.c b/src/3rdparty/pcre/pcre_study.c
index 7fd0ba0b3d..d9d4960d84 100644
--- a/src/3rdparty/pcre/pcre_study.c
+++ b/src/3rdparty/pcre/pcre_study.c
@@ -1371,7 +1371,7 @@ do
for (c = 0; c < 16; c++) start_bits[c] |= map[c];
for (c = 128; c < 256; c++)
{
- if ((map[c/8] && (1 << (c&7))) != 0)
+ if ((map[c/8] & (1 << (c&7))) != 0)
{
int d = (c >> 6) | 0xc0; /* Set bit for this starter */
start_bits[d/8] |= (1 << (d&7)); /* and then skip on to the */
diff --git a/src/3rdparty/pcre/sljit/sljitConfigInternal.h b/src/3rdparty/pcre/sljit/sljitConfigInternal.h
index 16e3547c93..9275b14992 100644
--- a/src/3rdparty/pcre/sljit/sljitConfigInternal.h
+++ b/src/3rdparty/pcre/sljit/sljitConfigInternal.h
@@ -31,14 +31,14 @@
SLJIT defines the following architecture dependent types and macros:
Types:
- sljit_sb, sljit_ub : signed and unsigned 8 bit byte
- sljit_sh, sljit_uh : signed and unsigned 16 bit half-word (short) type
- sljit_si, sljit_ui : signed and unsigned 32 bit integer type
- sljit_sw, sljit_uw : signed and unsigned machine word, enough to store a pointer
- sljit_p : unsgined pointer value (usually the same as sljit_uw, but
- some 64 bit ABIs may use 32 bit pointers)
- sljit_s : single precision floating point value
- sljit_d : double precision floating point value
+ sljit_s8, sljit_u8 : signed and unsigned 8 bit integer type
+ sljit_s16, sljit_u16 : signed and unsigned 16 bit integer type
+ sljit_s32, sljit_u32 : signed and unsigned 32 bit integer type
+ sljit_sw, sljit_uw : signed and unsigned machine word, enough to store a pointer
+ sljit_p : unsgined pointer value (usually the same as sljit_uw, but
+ some 64 bit ABIs may use 32 bit pointers)
+ sljit_f32 : 32 bit single precision floating point value
+ sljit_f64 : 64 bit double precision floating point value
Macros for feature detection (boolean):
SLJIT_32BIT_ARCHITECTURE : 32 bit architecture
@@ -56,10 +56,10 @@
SLJIT_NUMBER_OF_SCRATCH_FLOAT_REGISTERS : number of available floating point scratch registers
SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS : number of available floating point saved registers
SLJIT_WORD_SHIFT : the shift required to apply when accessing a sljit_sw/sljit_uw array by index
- SLJIT_DOUBLE_SHIFT : the shift required to apply when accessing
- a double precision floating point array by index
- SLJIT_SINGLE_SHIFT : the shift required to apply when accessing
- a single precision floating point array by index
+ SLJIT_F32_SHIFT : the shift required to apply when accessing
+ a single precision floating point array by index
+ SLJIT_F64_SHIFT : the shift required to apply when accessing
+ a double precision floating point array by index
SLJIT_LOCALS_OFFSET : local space starting offset (SLJIT_SP + SLJIT_LOCALS_OFFSET)
SLJIT_RETURN_ADDRESS_OFFSET : a return instruction always adds this offset to the return address
@@ -252,11 +252,6 @@
#endif
#endif /* !SLJIT_INLINE */
-#ifndef SLJIT_CONST
-/* Const variables. */
-#define SLJIT_CONST const
-#endif
-
#ifndef SLJIT_UNUSED_ARG
/* Unused arguments. */
#define SLJIT_UNUSED_ARG(arg) (void)arg
@@ -284,6 +279,15 @@
/* Instruction cache flush. */
/****************************/
+#if (!defined SLJIT_CACHE_FLUSH && defined __has_builtin)
+#if __has_builtin(__builtin___clear_cache)
+
+#define SLJIT_CACHE_FLUSH(from, to) \
+ __builtin___clear_cache((char*)from, (char*)to)
+
+#endif /* __has_builtin(__builtin___clear_cache) */
+#endif /* (!defined SLJIT_CACHE_FLUSH && defined __has_builtin) */
+
#ifndef SLJIT_CACHE_FLUSH
#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
@@ -300,6 +304,11 @@
#define SLJIT_CACHE_FLUSH(from, to) \
sys_icache_invalidate((char*)(from), (char*)(to) - (char*)(from))
+#elif (defined(__GNUC__) && (__GNUC__ >= 5 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)))
+
+#define SLJIT_CACHE_FLUSH(from, to) \
+ __builtin___clear_cache((char*)from, (char*)to)
+
#elif defined __ANDROID__
/* Android lacks __clear_cache; instead, cacheflush should be used. */
@@ -312,12 +321,14 @@
/* The __clear_cache() implementation of GCC is a dummy function on PowerPC. */
#define SLJIT_CACHE_FLUSH(from, to) \
ppc_cache_flush((from), (to))
+#define SLJIT_CACHE_FLUSH_OWN_IMPL 1
#elif (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
/* The __clear_cache() implementation of GCC is a dummy function on Sparc. */
#define SLJIT_CACHE_FLUSH(from, to) \
sparc_cache_flush((from), (to))
+#define SLJIT_CACHE_FLUSH_OWN_IMPL 1
#else
@@ -330,20 +341,20 @@
#endif /* !SLJIT_CACHE_FLUSH */
/******************************************************/
-/* Byte/half/int/word/single/double type definitions. */
+/* Integer and floating point type definitions. */
/******************************************************/
/* 8 bit byte type. */
-typedef unsigned char sljit_ub;
-typedef signed char sljit_sb;
+typedef unsigned char sljit_u8;
+typedef signed char sljit_s8;
/* 16 bit half-word type. */
-typedef unsigned short int sljit_uh;
-typedef signed short int sljit_sh;
+typedef unsigned short int sljit_u16;
+typedef signed short int sljit_s16;
/* 32 bit integer type. */
-typedef unsigned int sljit_ui;
-typedef signed int sljit_si;
+typedef unsigned int sljit_u32;
+typedef signed int sljit_s32;
/* Machine word type. Enough for storing a pointer.
32 bit for 32 bit machines.
@@ -377,15 +388,15 @@ typedef long int sljit_sw;
typedef sljit_uw sljit_p;
/* Floating point types. */
-typedef float sljit_s;
-typedef double sljit_d;
+typedef float sljit_f32;
+typedef double sljit_f64;
/* Shift for pointer sized data. */
#define SLJIT_POINTER_SHIFT SLJIT_WORD_SHIFT
/* Shift for double precision sized data. */
-#define SLJIT_DOUBLE_SHIFT 3
-#define SLJIT_SINGLE_SHIFT 2
+#define SLJIT_F32_SHIFT 2
+#define SLJIT_F64_SHIFT 3
#ifndef SLJIT_W
diff --git a/src/3rdparty/pcre/sljit/sljitExecAllocator.c b/src/3rdparty/pcre/sljit/sljitExecAllocator.c
index f24ed33797..54f05f5dd7 100644
--- a/src/3rdparty/pcre/sljit/sljitExecAllocator.c
+++ b/src/3rdparty/pcre/sljit/sljitExecAllocator.c
@@ -137,10 +137,10 @@ struct free_block {
};
#define AS_BLOCK_HEADER(base, offset) \
- ((struct block_header*)(((sljit_ub*)base) + offset))
+ ((struct block_header*)(((sljit_u8*)base) + offset))
#define AS_FREE_BLOCK(base, offset) \
- ((struct free_block*)(((sljit_ub*)base) + offset))
-#define MEM_START(base) ((void*)(((sljit_ub*)base) + sizeof(struct block_header)))
+ ((struct free_block*)(((sljit_u8*)base) + offset))
+#define MEM_START(base) ((void*)(((sljit_u8*)base) + sizeof(struct block_header)))
#define ALIGN_SIZE(size) (((size) + sizeof(struct block_header) + 7) & ~7)
static struct free_block* free_blocks;
@@ -153,7 +153,7 @@ static SLJIT_INLINE void sljit_insert_free_block(struct free_block *free_block,
free_block->size = size;
free_block->next = free_blocks;
- free_block->prev = 0;
+ free_block->prev = NULL;
if (free_blocks)
free_blocks->prev = free_block;
free_blocks = free_block;
diff --git a/src/3rdparty/pcre/sljit/sljitLir.c b/src/3rdparty/pcre/sljit/sljitLir.c
index 0f1b1c9cce..ec1781e4c7 100644
--- a/src/3rdparty/pcre/sljit/sljitLir.c
+++ b/src/3rdparty/pcre/sljit/sljitLir.c
@@ -77,16 +77,16 @@
#if !(defined SLJIT_CONFIG_UNSUPPORTED && SLJIT_CONFIG_UNSUPPORTED)
#define GET_OPCODE(op) \
- ((op) & ~(SLJIT_INT_OP | SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S | SLJIT_SET_O | SLJIT_SET_C | SLJIT_KEEP_FLAGS))
+ ((op) & ~(SLJIT_I32_OP | SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S | SLJIT_SET_O | SLJIT_SET_C | SLJIT_KEEP_FLAGS))
#define GET_FLAGS(op) \
((op) & (SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S | SLJIT_SET_O | SLJIT_SET_C))
#define GET_ALL_FLAGS(op) \
- ((op) & (SLJIT_INT_OP | SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S | SLJIT_SET_O | SLJIT_SET_C | SLJIT_KEEP_FLAGS))
+ ((op) & (SLJIT_I32_OP | SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S | SLJIT_SET_O | SLJIT_SET_C | SLJIT_KEEP_FLAGS))
#define TYPE_CAST_NEEDED(op) \
- (((op) >= SLJIT_MOV_UB && (op) <= SLJIT_MOV_SH) || ((op) >= SLJIT_MOVU_UB && (op) <= SLJIT_MOVU_SH))
+ (((op) >= SLJIT_MOV_U8 && (op) <= SLJIT_MOV_S16) || ((op) >= SLJIT_MOVU_U8 && (op) <= SLJIT_MOVU_S16))
#define BUF_SIZE 4096
@@ -257,7 +257,7 @@
return 1; \
} while (0)
-#define CHECK_RETURN_TYPE sljit_si
+#define CHECK_RETURN_TYPE sljit_s32
#define CHECK_RETURN_OK return 0
#define CHECK(x) \
@@ -320,7 +320,7 @@
#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5) || (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
#define SLJIT_NEEDS_COMPILER_INIT 1
-static sljit_si compiler_initialized = 0;
+static sljit_s32 compiler_initialized = 0;
/* A thread safe initialization. */
static void init_compiler(void);
#endif
@@ -333,17 +333,17 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_compiler* sljit_create_compiler(void *allo
SLJIT_ZEROMEM(compiler, sizeof(struct sljit_compiler));
SLJIT_COMPILE_ASSERT(
- sizeof(sljit_sb) == 1 && sizeof(sljit_ub) == 1
- && sizeof(sljit_sh) == 2 && sizeof(sljit_uh) == 2
- && sizeof(sljit_si) == 4 && sizeof(sljit_ui) == 4
+ sizeof(sljit_s8) == 1 && sizeof(sljit_u8) == 1
+ && sizeof(sljit_s16) == 2 && sizeof(sljit_u16) == 2
+ && sizeof(sljit_s32) == 4 && sizeof(sljit_u32) == 4
&& (sizeof(sljit_p) == 4 || sizeof(sljit_p) == 8)
&& sizeof(sljit_p) <= sizeof(sljit_sw)
&& (sizeof(sljit_sw) == 4 || sizeof(sljit_sw) == 8)
&& (sizeof(sljit_uw) == 4 || sizeof(sljit_uw) == 8),
invalid_integer_types);
- SLJIT_COMPILE_ASSERT(SLJIT_INT_OP == SLJIT_SINGLE_OP,
+ SLJIT_COMPILE_ASSERT(SLJIT_I32_OP == SLJIT_F32_OP,
int_op_and_single_op_must_be_the_same);
- SLJIT_COMPILE_ASSERT(SLJIT_REWRITABLE_JUMP != SLJIT_SINGLE_OP,
+ SLJIT_COMPILE_ASSERT(SLJIT_REWRITABLE_JUMP != SLJIT_F32_OP,
rewritable_jump_and_single_op_must_not_be_the_same);
/* Only the non-zero members must be set. */
@@ -379,14 +379,14 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_compiler* sljit_create_compiler(void *allo
#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
compiler->cpool = (sljit_uw*)SLJIT_MALLOC(CPOOL_SIZE * sizeof(sljit_uw)
- + CPOOL_SIZE * sizeof(sljit_ub), allocator_data);
+ + CPOOL_SIZE * sizeof(sljit_u8), allocator_data);
if (!compiler->cpool) {
SLJIT_FREE(compiler->buf, allocator_data);
SLJIT_FREE(compiler->abuf, allocator_data);
SLJIT_FREE(compiler, allocator_data);
return NULL;
}
- compiler->cpool_unique = (sljit_ub*)(compiler->cpool + CPOOL_SIZE);
+ compiler->cpool_unique = (sljit_u8*)(compiler->cpool + CPOOL_SIZE);
compiler->cpool_diff = 0xffffffff;
#endif
@@ -485,7 +485,7 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_target(struct sljit_jump *jump, sljit_uw
static void* ensure_buf(struct sljit_compiler *compiler, sljit_uw size)
{
- sljit_ub *ret;
+ sljit_u8 *ret;
struct sljit_memory_fragment *new_frag;
SLJIT_ASSERT(size <= 256);
@@ -504,7 +504,7 @@ static void* ensure_buf(struct sljit_compiler *compiler, sljit_uw size)
static void* ensure_abuf(struct sljit_compiler *compiler, sljit_uw size)
{
- sljit_ub *ret;
+ sljit_u8 *ret;
struct sljit_memory_fragment *new_frag;
SLJIT_ASSERT(size <= 256);
@@ -521,7 +521,7 @@ static void* ensure_abuf(struct sljit_compiler *compiler, sljit_uw size)
return new_frag->memory;
}
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_alloc_memory(struct sljit_compiler *compiler, sljit_si size)
+SLJIT_API_FUNC_ATTRIBUTE void* sljit_alloc_memory(struct sljit_compiler *compiler, sljit_s32 size)
{
CHECK_ERROR_PTR();
@@ -554,8 +554,8 @@ static SLJIT_INLINE void reverse_buf(struct sljit_compiler *compiler)
}
static SLJIT_INLINE void set_emit_enter(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
SLJIT_UNUSED_ARG(args);
SLJIT_UNUSED_ARG(local_size);
@@ -571,8 +571,8 @@ static SLJIT_INLINE void set_emit_enter(struct sljit_compiler *compiler,
}
static SLJIT_INLINE void set_set_context(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
SLJIT_UNUSED_ARG(args);
SLJIT_UNUSED_ARG(local_size);
@@ -598,7 +598,7 @@ static SLJIT_INLINE void set_label(struct sljit_label *label, struct sljit_compi
compiler->last_label = label;
}
-static SLJIT_INLINE void set_jump(struct sljit_jump *jump, struct sljit_compiler *compiler, sljit_si flags)
+static SLJIT_INLINE void set_jump(struct sljit_jump *jump, struct sljit_compiler *compiler, sljit_s32 flags)
{
jump->next = NULL;
jump->flags = flags;
@@ -654,19 +654,19 @@ static SLJIT_INLINE void set_const(struct sljit_const *const_, struct sljit_comp
break; \
case SLJIT_BREAKPOINT: \
case SLJIT_NOP: \
- case SLJIT_LUMUL: \
- case SLJIT_LSMUL: \
+ case SLJIT_LMUL_UW: \
+ case SLJIT_LMUL_SW: \
case SLJIT_MOV: \
- case SLJIT_MOV_UI: \
+ case SLJIT_MOV_U32: \
case SLJIT_MOV_P: \
case SLJIT_MOVU: \
- case SLJIT_MOVU_UI: \
+ case SLJIT_MOVU_U32: \
case SLJIT_MOVU_P: \
/* Nothing allowed */ \
- CHECK_ARGUMENT(!(op & (SLJIT_INT_OP | SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S | SLJIT_SET_O | SLJIT_SET_C | SLJIT_KEEP_FLAGS))); \
+ CHECK_ARGUMENT(!(op & (SLJIT_I32_OP | SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S | SLJIT_SET_O | SLJIT_SET_C | SLJIT_KEEP_FLAGS))); \
break; \
default: \
- /* Only SLJIT_INT_OP or SLJIT_SINGLE_OP is allowed. */ \
+ /* Only SLJIT_I32_OP or SLJIT_F32_OP is allowed. */ \
CHECK_ARGUMENT(!(op & (SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S | SLJIT_SET_O | SLJIT_SET_C | SLJIT_KEEP_FLAGS))); \
break; \
}
@@ -674,12 +674,12 @@ static SLJIT_INLINE void set_const(struct sljit_const *const_, struct sljit_comp
#define FUNCTION_CHECK_FOP() \
CHECK_ARGUMENT(!GET_FLAGS(op) || !(op & SLJIT_KEEP_FLAGS)); \
switch (GET_OPCODE(op)) { \
- case SLJIT_DCMP: \
+ case SLJIT_CMP_F64: \
CHECK_ARGUMENT(!(op & (SLJIT_SET_U | SLJIT_SET_O | SLJIT_SET_C | SLJIT_KEEP_FLAGS))); \
CHECK_ARGUMENT((op & (SLJIT_SET_E | SLJIT_SET_S))); \
break; \
default: \
- /* Only SLJIT_INT_OP or SLJIT_SINGLE_OP is allowed. */ \
+ /* Only SLJIT_I32_OP or SLJIT_F32_OP is allowed. */ \
CHECK_ARGUMENT(!(op & (SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S | SLJIT_SET_O | SLJIT_SET_C | SLJIT_KEEP_FLAGS))); \
break; \
}
@@ -844,38 +844,38 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_compiler_verbose(struct sljit_compiler *comp
fprintf(compiler->verbose, "fs%d", SLJIT_NUMBER_OF_FLOAT_REGISTERS - (p)); \
}
-static SLJIT_CONST char* op0_names[] = {
- (char*)"breakpoint", (char*)"nop", (char*)"lumul", (char*)"lsmul",
- (char*)"udivmod", (char*)"sdivmod", (char*)"udivi", (char*)"sdivi"
+static const char* op0_names[] = {
+ (char*)"breakpoint", (char*)"nop", (char*)"lmul.uw", (char*)"lmul.sw",
+ (char*)"divmod.u", (char*)"divmod.s", (char*)"div.u", (char*)"div.s"
};
-static SLJIT_CONST char* op1_names[] = {
- (char*)"mov", (char*)"mov_ub", (char*)"mov_sb", (char*)"mov_uh",
- (char*)"mov_sh", (char*)"mov_ui", (char*)"mov_si", (char*)"mov_p",
- (char*)"movu", (char*)"movu_ub", (char*)"movu_sb", (char*)"movu_uh",
- (char*)"movu_sh", (char*)"movu_ui", (char*)"movu_si", (char*)"movu_p",
+static const char* op1_names[] = {
+ (char*)"", (char*)".u8", (char*)".s8", (char*)".u16",
+ (char*)".s16", (char*)".u32", (char*)".s32", (char*)".p",
+ (char*)"", (char*)".u8", (char*)".s8", (char*)".u16",
+ (char*)".s16", (char*)".u32", (char*)".s32", (char*)".p",
(char*)"not", (char*)"neg", (char*)"clz",
};
-static SLJIT_CONST char* op2_names[] = {
+static const char* op2_names[] = {
(char*)"add", (char*)"addc", (char*)"sub", (char*)"subc",
(char*)"mul", (char*)"and", (char*)"or", (char*)"xor",
(char*)"shl", (char*)"lshr", (char*)"ashr",
};
-static SLJIT_CONST char* fop1_names[] = {
+static const char* fop1_names[] = {
(char*)"mov", (char*)"conv", (char*)"conv", (char*)"conv",
(char*)"conv", (char*)"conv", (char*)"cmp", (char*)"neg",
(char*)"abs",
};
-static SLJIT_CONST char* fop2_names[] = {
+static const char* fop2_names[] = {
(char*)"add", (char*)"sub", (char*)"mul", (char*)"div"
};
-#define JUMP_PREFIX(type) \
- ((type & 0xff) <= SLJIT_MUL_NOT_OVERFLOW ? ((type & SLJIT_INT_OP) ? "i_" : "") \
- : ((type & 0xff) <= SLJIT_D_ORDERED ? ((type & SLJIT_SINGLE_OP) ? "s_" : "d_") : ""))
+#define JUMP_POSTFIX(type) \
+ ((type & 0xff) <= SLJIT_MUL_NOT_OVERFLOW ? ((type & SLJIT_I32_OP) ? "32" : "") \
+ : ((type & 0xff) <= SLJIT_ORDERED_F64 ? ((type & SLJIT_F32_OP) ? ".f32" : ".f64") : ""))
static char* jump_names[] = {
(char*)"equal", (char*)"not_equal",
@@ -923,8 +923,8 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_generate_code(struct sljit_com
}
static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_enter(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
SLJIT_UNUSED_ARG(compiler);
@@ -949,8 +949,8 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_enter(struct sljit_compil
}
static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_set_context(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
if (SLJIT_UNLIKELY(compiler->skip_checks)) {
compiler->skip_checks = 0;
@@ -977,7 +977,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_set_context(struct sljit_compi
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
{
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
CHECK_ARGUMENT(compiler->scratches >= 0);
@@ -993,7 +993,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_return(struct sljit_compi
if (op == SLJIT_UNUSED)
fprintf(compiler->verbose, " return\n");
else {
- fprintf(compiler->verbose, " return.%s ", op1_names[op - SLJIT_OP1_BASE]);
+ fprintf(compiler->verbose, " return%s ", op1_names[op - SLJIT_OP1_BASE]);
sljit_verbose_param(compiler, src, srcw);
fprintf(compiler->verbose, "\n");
}
@@ -1002,7 +1002,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_return(struct sljit_compi
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
FUNCTION_CHECK_DST(dst, dstw);
@@ -1017,7 +1017,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fast_enter(struct sljit_c
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
{
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
FUNCTION_CHECK_SRC(src, srcw);
@@ -1032,23 +1032,29 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fast_return(struct sljit_
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op0(struct sljit_compiler *compiler, sljit_si op)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
{
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
- CHECK_ARGUMENT((op >= SLJIT_BREAKPOINT && op <= SLJIT_LSMUL)
- || ((op & ~SLJIT_INT_OP) >= SLJIT_UDIVMOD && (op & ~SLJIT_INT_OP) <= SLJIT_SDIVI));
- CHECK_ARGUMENT(op < SLJIT_LUMUL || compiler->scratches >= 2);
+ CHECK_ARGUMENT((op >= SLJIT_BREAKPOINT && op <= SLJIT_LMUL_SW)
+ || ((op & ~SLJIT_I32_OP) >= SLJIT_DIVMOD_UW && (op & ~SLJIT_I32_OP) <= SLJIT_DIV_SW));
+ CHECK_ARGUMENT(op < SLJIT_LMUL_UW || compiler->scratches >= 2);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose))
- fprintf(compiler->verbose, " %s%s\n", !(op & SLJIT_INT_OP) ? "" : "i", op0_names[GET_OPCODE(op) - SLJIT_OP0_BASE]);
+ {
+ fprintf(compiler->verbose, " %s", op0_names[GET_OPCODE(op) - SLJIT_OP0_BASE]);
+ if (GET_OPCODE(op) >= SLJIT_DIVMOD_UW) {
+ fprintf(compiler->verbose, (op & SLJIT_I32_OP) ? "32" : "w");
+ }
+ fprintf(compiler->verbose, "\n");
+ }
#endif
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
if (SLJIT_UNLIKELY(compiler->skip_checks)) {
compiler->skip_checks = 0;
@@ -1064,9 +1070,18 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op1(struct sljit_compiler
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
- fprintf(compiler->verbose, " %s%s%s%s%s%s%s%s ", !(op & SLJIT_INT_OP) ? "" : "i", op1_names[GET_OPCODE(op) - SLJIT_OP1_BASE],
- !(op & SLJIT_SET_E) ? "" : ".e", !(op & SLJIT_SET_U) ? "" : ".u", !(op & SLJIT_SET_S) ? "" : ".s",
- !(op & SLJIT_SET_O) ? "" : ".o", !(op & SLJIT_SET_C) ? "" : ".c", !(op & SLJIT_KEEP_FLAGS) ? "" : ".k");
+ if (GET_OPCODE(op) <= SLJIT_MOVU_P)
+ {
+ fprintf(compiler->verbose, " mov%s%s%s ", (GET_OPCODE(op) >= SLJIT_MOVU) ? "u" : "",
+ !(op & SLJIT_I32_OP) ? "" : "32", (op != SLJIT_MOV32 && op != SLJIT_MOVU32) ? op1_names[GET_OPCODE(op) - SLJIT_OP1_BASE] : "");
+ }
+ else
+ {
+ fprintf(compiler->verbose, " %s%s%s%s%s%s%s%s ", op1_names[GET_OPCODE(op) - SLJIT_OP1_BASE], !(op & SLJIT_I32_OP) ? "" : "32",
+ !(op & SLJIT_SET_E) ? "" : ".e", !(op & SLJIT_SET_U) ? "" : ".u", !(op & SLJIT_SET_S) ? "" : ".s",
+ !(op & SLJIT_SET_O) ? "" : ".o", !(op & SLJIT_SET_C) ? "" : ".c", !(op & SLJIT_KEEP_FLAGS) ? "" : ".k");
+ }
+
sljit_verbose_param(compiler, dst, dstw);
fprintf(compiler->verbose, ", ");
sljit_verbose_param(compiler, src, srcw);
@@ -1076,10 +1091,10 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op1(struct sljit_compiler
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
if (SLJIT_UNLIKELY(compiler->skip_checks)) {
compiler->skip_checks = 0;
@@ -1095,7 +1110,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op2(struct sljit_compiler
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
- fprintf(compiler->verbose, " %s%s%s%s%s%s%s%s ", !(op & SLJIT_INT_OP) ? "" : "i", op2_names[GET_OPCODE(op) - SLJIT_OP2_BASE],
+ fprintf(compiler->verbose, " %s%s%s%s%s%s%s%s ", op2_names[GET_OPCODE(op) - SLJIT_OP2_BASE], !(op & SLJIT_I32_OP) ? "" : "32",
!(op & SLJIT_SET_E) ? "" : ".e", !(op & SLJIT_SET_U) ? "" : ".u", !(op & SLJIT_SET_S) ? "" : ".s",
!(op & SLJIT_SET_O) ? "" : ".o", !(op & SLJIT_SET_C) ? "" : ".c", !(op & SLJIT_KEEP_FLAGS) ? "" : ".k");
sljit_verbose_param(compiler, dst, dstw);
@@ -1109,7 +1124,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op2(struct sljit_compiler
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_get_register_index(sljit_si reg)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_get_register_index(sljit_s32 reg)
{
SLJIT_UNUSED_ARG(reg);
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
@@ -1118,7 +1133,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_get_register_index(sljit_si re
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_get_float_register_index(sljit_si reg)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_get_float_register_index(sljit_s32 reg)
{
SLJIT_UNUSED_ARG(reg);
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
@@ -1128,7 +1143,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_get_float_register_index(sljit
}
static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op_custom(struct sljit_compiler *compiler,
- void *instruction, sljit_si size)
+ void *instruction, sljit_s32 size)
{
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
int i;
@@ -1152,16 +1167,16 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op_custom(struct sljit_co
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
fprintf(compiler->verbose, " op_custom");
for (i = 0; i < size; i++)
- fprintf(compiler->verbose, " 0x%x", ((sljit_ub*)instruction)[i]);
+ fprintf(compiler->verbose, " 0x%x", ((sljit_u8*)instruction)[i]);
fprintf(compiler->verbose, "\n");
}
#endif
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
if (SLJIT_UNLIKELY(compiler->skip_checks)) {
compiler->skip_checks = 0;
@@ -1170,19 +1185,19 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1(struct sljit_compile
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
CHECK_ARGUMENT(sljit_is_fpu_available());
- CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_DMOV && GET_OPCODE(op) <= SLJIT_DABS);
+ CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_MOV_F64 && GET_OPCODE(op) <= SLJIT_ABS_F64);
FUNCTION_CHECK_FOP();
FUNCTION_FCHECK(src, srcw);
FUNCTION_FCHECK(dst, dstw);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
- if (GET_OPCODE(op) == SLJIT_CONVD_FROMS)
- fprintf(compiler->verbose, " %s%s ", fop1_names[SLJIT_CONVD_FROMS - SLJIT_FOP1_BASE],
- (op & SLJIT_SINGLE_OP) ? "s.fromd" : "d.froms");
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32)
+ fprintf(compiler->verbose, " %s%s ", fop1_names[SLJIT_CONV_F64_FROM_F32 - SLJIT_FOP1_BASE],
+ (op & SLJIT_F32_OP) ? ".f32.from.f64" : ".f64.from.f32");
else
- fprintf(compiler->verbose, " %s%s ", (op & SLJIT_SINGLE_OP) ? "s" : "d",
- fop1_names[GET_OPCODE(op) - SLJIT_FOP1_BASE]);
+ fprintf(compiler->verbose, " %s%s ", fop1_names[GET_OPCODE(op) - SLJIT_FOP1_BASE],
+ (op & SLJIT_F32_OP) ? ".f32" : ".f64");
sljit_verbose_fparam(compiler, dst, dstw);
fprintf(compiler->verbose, ", ");
@@ -1193,9 +1208,9 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1(struct sljit_compile
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_si op,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
if (SLJIT_UNLIKELY(compiler->skip_checks)) {
compiler->skip_checks = 0;
@@ -1204,14 +1219,14 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_cmp(struct sljit_com
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
CHECK_ARGUMENT(sljit_is_fpu_available());
- CHECK_ARGUMENT(GET_OPCODE(op) == SLJIT_DCMP);
+ CHECK_ARGUMENT(GET_OPCODE(op) == SLJIT_CMP_F64);
FUNCTION_CHECK_FOP();
FUNCTION_FCHECK(src1, src1w);
FUNCTION_FCHECK(src2, src2w);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
- fprintf(compiler->verbose, " %s%s%s%s ", (op & SLJIT_SINGLE_OP) ? "s" : "d", fop1_names[SLJIT_DCMP - SLJIT_FOP1_BASE],
+ fprintf(compiler->verbose, " %s%s%s%s ", fop1_names[SLJIT_CMP_F64 - SLJIT_FOP1_BASE], (op & SLJIT_F32_OP) ? ".f32" : ".f64",
(op & SLJIT_SET_E) ? ".e" : "", (op & SLJIT_SET_S) ? ".s" : "");
sljit_verbose_fparam(compiler, src1, src1w);
fprintf(compiler->verbose, ", ");
@@ -1222,9 +1237,9 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_cmp(struct sljit_com
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_convw_fromd(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
if (SLJIT_UNLIKELY(compiler->skip_checks)) {
compiler->skip_checks = 0;
@@ -1233,7 +1248,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_convw_fromd(struct s
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
CHECK_ARGUMENT(sljit_is_fpu_available());
- CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_CONVW_FROMD && GET_OPCODE(op) <= SLJIT_CONVI_FROMD);
+ CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_CONV_SW_FROM_F64 && GET_OPCODE(op) <= SLJIT_CONV_S32_FROM_F64);
FUNCTION_CHECK_FOP();
FUNCTION_FCHECK(src, srcw);
FUNCTION_CHECK_DST(dst, dstw);
@@ -1241,8 +1256,8 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_convw_fromd(struct s
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
fprintf(compiler->verbose, " %s%s.from%s ", fop1_names[GET_OPCODE(op) - SLJIT_FOP1_BASE],
- (GET_OPCODE(op) == SLJIT_CONVI_FROMD) ? "i" : "w",
- (op & SLJIT_SINGLE_OP) ? "s" : "d");
+ (GET_OPCODE(op) == SLJIT_CONV_S32_FROM_F64) ? ".s32" : ".sw",
+ (op & SLJIT_F32_OP) ? ".f32" : ".f64");
sljit_verbose_param(compiler, dst, dstw);
fprintf(compiler->verbose, ", ");
sljit_verbose_fparam(compiler, src, srcw);
@@ -1252,9 +1267,9 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_convw_fromd(struct s
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_convd_fromw(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
if (SLJIT_UNLIKELY(compiler->skip_checks)) {
compiler->skip_checks = 0;
@@ -1263,7 +1278,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_convd_fromw(struct s
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
CHECK_ARGUMENT(sljit_is_fpu_available());
- CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_CONVD_FROMW && GET_OPCODE(op) <= SLJIT_CONVD_FROMI);
+ CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_CONV_F64_FROM_SW && GET_OPCODE(op) <= SLJIT_CONV_F64_FROM_S32);
FUNCTION_CHECK_FOP();
FUNCTION_CHECK_SRC(src, srcw);
FUNCTION_FCHECK(dst, dstw);
@@ -1271,8 +1286,8 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_convd_fromw(struct s
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
fprintf(compiler->verbose, " %s%s.from%s ", fop1_names[GET_OPCODE(op) - SLJIT_FOP1_BASE],
- (op & SLJIT_SINGLE_OP) ? "s" : "d",
- (GET_OPCODE(op) == SLJIT_CONVD_FROMI) ? "i" : "w");
+ (op & SLJIT_F32_OP) ? ".f32" : ".f64",
+ (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32) ? ".s32" : ".sw");
sljit_verbose_fparam(compiler, dst, dstw);
fprintf(compiler->verbose, ", ");
sljit_verbose_param(compiler, src, srcw);
@@ -1282,14 +1297,14 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_convd_fromw(struct s
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
CHECK_ARGUMENT(sljit_is_fpu_available());
- CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_DADD && GET_OPCODE(op) <= SLJIT_DDIV);
+ CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_ADD_F64 && GET_OPCODE(op) <= SLJIT_DIV_F64);
FUNCTION_CHECK_FOP();
FUNCTION_FCHECK(src1, src1w);
FUNCTION_FCHECK(src2, src2w);
@@ -1297,7 +1312,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop2(struct sljit_compile
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
- fprintf(compiler->verbose, " %s%s ", (op & SLJIT_SINGLE_OP) ? "s" : "d", fop2_names[GET_OPCODE(op) - SLJIT_FOP2_BASE]);
+ fprintf(compiler->verbose, " %s%s ", fop2_names[GET_OPCODE(op) - SLJIT_FOP2_BASE], (op & SLJIT_F32_OP) ? ".f32" : ".f64");
sljit_verbose_fparam(compiler, dst, dstw);
fprintf(compiler->verbose, ", ");
sljit_verbose_fparam(compiler, src1, src1w);
@@ -1320,7 +1335,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_label(struct sljit_compil
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_jump(struct sljit_compiler *compiler, sljit_si type)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
{
if (SLJIT_UNLIKELY(compiler->skip_checks)) {
compiler->skip_checks = 0;
@@ -1328,33 +1343,33 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_jump(struct sljit_compile
}
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
- CHECK_ARGUMENT(!(type & ~(0xff | SLJIT_REWRITABLE_JUMP | SLJIT_INT_OP)));
+ CHECK_ARGUMENT(!(type & ~(0xff | SLJIT_REWRITABLE_JUMP | SLJIT_I32_OP)));
CHECK_ARGUMENT((type & 0xff) >= SLJIT_EQUAL && (type & 0xff) <= SLJIT_CALL3);
- CHECK_ARGUMENT((type & 0xff) < SLJIT_JUMP || !(type & SLJIT_INT_OP));
+ CHECK_ARGUMENT((type & 0xff) < SLJIT_JUMP || !(type & SLJIT_I32_OP));
CHECK_ARGUMENT((type & 0xff) <= SLJIT_CALL0 || ((type & 0xff) - SLJIT_CALL0) <= compiler->scratches);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose))
- fprintf(compiler->verbose, " jump%s.%s%s\n", !(type & SLJIT_REWRITABLE_JUMP) ? "" : ".r",
- JUMP_PREFIX(type), jump_names[type & 0xff]);
+ fprintf(compiler->verbose, " jump%s %s%s\n", !(type & SLJIT_REWRITABLE_JUMP) ? "" : ".r",
+ jump_names[type & 0xff], JUMP_POSTFIX(type));
#endif
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_cmp(struct sljit_compiler *compiler, sljit_si type,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_cmp(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
- CHECK_ARGUMENT(!(type & ~(0xff | SLJIT_REWRITABLE_JUMP | SLJIT_INT_OP)));
+ CHECK_ARGUMENT(!(type & ~(0xff | SLJIT_REWRITABLE_JUMP | SLJIT_I32_OP)));
CHECK_ARGUMENT((type & 0xff) >= SLJIT_EQUAL && (type & 0xff) <= SLJIT_SIG_LESS_EQUAL);
FUNCTION_CHECK_SRC(src1, src1w);
FUNCTION_CHECK_SRC(src2, src2w);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
- fprintf(compiler->verbose, " cmp%s.%s%s ", !(type & SLJIT_REWRITABLE_JUMP) ? "" : ".r",
- (type & SLJIT_INT_OP) ? "i_" : "", jump_names[type & 0xff]);
+ fprintf(compiler->verbose, " cmp%s %s%s, ", !(type & SLJIT_REWRITABLE_JUMP) ? "" : ".r",
+ jump_names[type & 0xff], (type & SLJIT_I32_OP) ? "32" : "");
sljit_verbose_param(compiler, src1, src1w);
fprintf(compiler->verbose, ", ");
sljit_verbose_param(compiler, src2, src2w);
@@ -1364,21 +1379,21 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_cmp(struct sljit_compiler
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fcmp(struct sljit_compiler *compiler, sljit_si type,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fcmp(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
CHECK_ARGUMENT(sljit_is_fpu_available());
- CHECK_ARGUMENT(!(type & ~(0xff | SLJIT_REWRITABLE_JUMP | SLJIT_SINGLE_OP)));
- CHECK_ARGUMENT((type & 0xff) >= SLJIT_D_EQUAL && (type & 0xff) <= SLJIT_D_ORDERED);
+ CHECK_ARGUMENT(!(type & ~(0xff | SLJIT_REWRITABLE_JUMP | SLJIT_F32_OP)));
+ CHECK_ARGUMENT((type & 0xff) >= SLJIT_EQUAL_F64 && (type & 0xff) <= SLJIT_ORDERED_F64);
FUNCTION_FCHECK(src1, src1w);
FUNCTION_FCHECK(src2, src2w);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
- fprintf(compiler->verbose, " fcmp%s.%s%s ", !(type & SLJIT_REWRITABLE_JUMP) ? "" : ".r",
- (type & SLJIT_SINGLE_OP) ? "s_" : "d_", jump_names[type & 0xff]);
+ fprintf(compiler->verbose, " fcmp%s %s%s, ", !(type & SLJIT_REWRITABLE_JUMP) ? "" : ".r",
+ jump_names[type & 0xff], (type & SLJIT_F32_OP) ? ".f32" : ".f64");
sljit_verbose_fparam(compiler, src1, src1w);
fprintf(compiler->verbose, ", ");
sljit_verbose_fparam(compiler, src2, src2w);
@@ -1388,7 +1403,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fcmp(struct sljit_compile
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_ijump(struct sljit_compiler *compiler, sljit_si type, sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
{
if (SLJIT_UNLIKELY(compiler->skip_checks)) {
compiler->skip_checks = 0;
@@ -1410,15 +1425,15 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_ijump(struct sljit_compil
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw,
- sljit_si type)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw,
+ sljit_s32 type)
{
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
- CHECK_ARGUMENT(!(type & ~(0xff | SLJIT_INT_OP)));
- CHECK_ARGUMENT((type & 0xff) >= SLJIT_EQUAL && (type & 0xff) <= SLJIT_D_ORDERED);
- CHECK_ARGUMENT(op == SLJIT_MOV || GET_OPCODE(op) == SLJIT_MOV_UI || GET_OPCODE(op) == SLJIT_MOV_SI
+ CHECK_ARGUMENT(!(type & ~(0xff | SLJIT_I32_OP)));
+ CHECK_ARGUMENT((type & 0xff) >= SLJIT_EQUAL && (type & 0xff) <= SLJIT_ORDERED_F64);
+ CHECK_ARGUMENT(op == SLJIT_MOV || GET_OPCODE(op) == SLJIT_MOV_U32 || GET_OPCODE(op) == SLJIT_MOV_S32
|| (GET_OPCODE(op) >= SLJIT_AND && GET_OPCODE(op) <= SLJIT_XOR));
CHECK_ARGUMENT((op & (SLJIT_SET_U | SLJIT_SET_S | SLJIT_SET_O | SLJIT_SET_C)) == 0);
CHECK_ARGUMENT((op & (SLJIT_SET_E | SLJIT_KEEP_FLAGS)) != (SLJIT_SET_E | SLJIT_KEEP_FLAGS));
@@ -1431,21 +1446,22 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op_flags(struct sljit_com
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
- fprintf(compiler->verbose, " flags.%s%s%s%s ", !(op & SLJIT_INT_OP) ? "" : "i",
- GET_OPCODE(op) >= SLJIT_OP2_BASE ? op2_names[GET_OPCODE(op) - SLJIT_OP2_BASE] : op1_names[GET_OPCODE(op) - SLJIT_OP1_BASE],
- !(op & SLJIT_SET_E) ? "" : ".e", !(op & SLJIT_KEEP_FLAGS) ? "" : ".k");
+ fprintf(compiler->verbose, " flags %s%s%s%s, ",
+ !(op & SLJIT_SET_E) ? "" : ".e", !(op & SLJIT_KEEP_FLAGS) ? "" : ".k",
+ GET_OPCODE(op) < SLJIT_OP2_BASE ? "mov" : op2_names[GET_OPCODE(op) - SLJIT_OP2_BASE],
+ GET_OPCODE(op) < SLJIT_OP2_BASE ? op1_names[GET_OPCODE(op) - SLJIT_OP1_BASE] : ((op & SLJIT_I32_OP) ? "32" : ""));
sljit_verbose_param(compiler, dst, dstw);
if (src != SLJIT_UNUSED) {
fprintf(compiler->verbose, ", ");
sljit_verbose_param(compiler, src, srcw);
}
- fprintf(compiler->verbose, ", %s%s\n", JUMP_PREFIX(type), jump_names[type & 0xff]);
+ fprintf(compiler->verbose, ", %s%s\n", jump_names[type & 0xff], JUMP_POSTFIX(type));
}
#endif
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_get_local_base(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw offset)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
{
SLJIT_UNUSED_ARG(offset);
@@ -1462,7 +1478,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_get_local_base(struct sljit_co
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw init_value)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
{
SLJIT_UNUSED_ARG(init_value);
@@ -1482,31 +1498,31 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_const(struct sljit_compil
#endif /* SLJIT_ARGUMENT_CHECKS || SLJIT_VERBOSE */
#define SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw) \
- SLJIT_COMPILE_ASSERT(!(SLJIT_CONVW_FROMD & 0x1) && !(SLJIT_CONVD_FROMW & 0x1), \
+ SLJIT_COMPILE_ASSERT(!(SLJIT_CONV_SW_FROM_F64 & 0x1) && !(SLJIT_CONV_F64_FROM_SW & 0x1), \
invalid_float_opcodes); \
- if (GET_OPCODE(op) >= SLJIT_CONVW_FROMD && GET_OPCODE(op) <= SLJIT_DCMP) { \
- if (GET_OPCODE(op) == SLJIT_DCMP) { \
+ if (GET_OPCODE(op) >= SLJIT_CONV_SW_FROM_F64 && GET_OPCODE(op) <= SLJIT_CMP_F64) { \
+ if (GET_OPCODE(op) == SLJIT_CMP_F64) { \
CHECK(check_sljit_emit_fop1_cmp(compiler, op, dst, dstw, src, srcw)); \
ADJUST_LOCAL_OFFSET(dst, dstw); \
ADJUST_LOCAL_OFFSET(src, srcw); \
return sljit_emit_fop1_cmp(compiler, op, dst, dstw, src, srcw); \
} \
- if ((GET_OPCODE(op) | 0x1) == SLJIT_CONVI_FROMD) { \
- CHECK(check_sljit_emit_fop1_convw_fromd(compiler, op, dst, dstw, src, srcw)); \
+ if ((GET_OPCODE(op) | 0x1) == SLJIT_CONV_S32_FROM_F64) { \
+ CHECK(check_sljit_emit_fop1_conv_sw_from_f64(compiler, op, dst, dstw, src, srcw)); \
ADJUST_LOCAL_OFFSET(dst, dstw); \
ADJUST_LOCAL_OFFSET(src, srcw); \
- return sljit_emit_fop1_convw_fromd(compiler, op, dst, dstw, src, srcw); \
+ return sljit_emit_fop1_conv_sw_from_f64(compiler, op, dst, dstw, src, srcw); \
} \
- CHECK(check_sljit_emit_fop1_convd_fromw(compiler, op, dst, dstw, src, srcw)); \
+ CHECK(check_sljit_emit_fop1_conv_f64_from_sw(compiler, op, dst, dstw, src, srcw)); \
ADJUST_LOCAL_OFFSET(dst, dstw); \
ADJUST_LOCAL_OFFSET(src, srcw); \
- return sljit_emit_fop1_convd_fromw(compiler, op, dst, dstw, src, srcw); \
+ return sljit_emit_fop1_conv_f64_from_sw(compiler, op, dst, dstw, src, srcw); \
} \
CHECK(check_sljit_emit_fop1(compiler, op, dst, dstw, src, srcw)); \
ADJUST_LOCAL_OFFSET(dst, dstw); \
ADJUST_LOCAL_OFFSET(src, srcw);
-static SLJIT_INLINE sljit_si emit_mov_before_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 emit_mov_before_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
{
/* Return if don't need to do anything. */
if (op == SLJIT_UNUSED)
@@ -1517,7 +1533,7 @@ static SLJIT_INLINE sljit_si emit_mov_before_return(struct sljit_compiler *compi
if (src == SLJIT_RETURN_REG && (op == SLJIT_MOV || op == SLJIT_MOV_P))
return SLJIT_SUCCESS;
#else
- if (src == SLJIT_RETURN_REG && (op == SLJIT_MOV || op == SLJIT_MOV_UI || op == SLJIT_MOV_SI || op == SLJIT_MOV_P))
+ if (src == SLJIT_RETURN_REG && (op == SLJIT_MOV || op == SLJIT_MOV_U32 || op == SLJIT_MOV_S32 || op == SLJIT_MOV_P))
return SLJIT_SUCCESS;
#endif
@@ -1576,12 +1592,12 @@ static SLJIT_INLINE sljit_si emit_mov_before_return(struct sljit_compiler *compi
#if !(defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS)
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler *compiler, sljit_si type,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
/* Default compare for most architectures. */
- sljit_si flags, tmp_src, condition;
+ sljit_s32 flags, tmp_src, condition;
sljit_sw tmp_srcw;
CHECK_ERROR_PTR();
@@ -1629,7 +1645,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
condition = SLJIT_SIG_GREATER_EQUAL;
break;
}
- type = condition | (type & (SLJIT_INT_OP | SLJIT_REWRITABLE_JUMP));
+ type = condition | (type & (SLJIT_I32_OP | SLJIT_REWRITABLE_JUMP));
tmp_src = src1;
src1 = src2;
src2 = tmp_src;
@@ -1649,7 +1665,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
|| (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
compiler->skip_checks = 1;
#endif
- PTR_FAIL_IF(sljit_emit_op2(compiler, SLJIT_SUB | flags | (type & SLJIT_INT_OP),
+ PTR_FAIL_IF(sljit_emit_op2(compiler, SLJIT_SUB | flags | (type & SLJIT_I32_OP),
SLJIT_UNUSED, 0, src1, src1w, src2, src2w));
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
|| (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
@@ -1658,25 +1674,25 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
return sljit_emit_jump(compiler, condition | (type & SLJIT_REWRITABLE_JUMP));
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_fcmp(struct sljit_compiler *compiler, sljit_si type,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_fcmp(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_si flags, condition;
+ sljit_s32 flags, condition;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_fcmp(compiler, type, src1, src1w, src2, src2w));
condition = type & 0xff;
- flags = (condition <= SLJIT_D_NOT_EQUAL) ? SLJIT_SET_E : SLJIT_SET_S;
- if (type & SLJIT_SINGLE_OP)
- flags |= SLJIT_SINGLE_OP;
+ flags = (condition <= SLJIT_NOT_EQUAL_F64) ? SLJIT_SET_E : SLJIT_SET_S;
+ if (type & SLJIT_F32_OP)
+ flags |= SLJIT_F32_OP;
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
|| (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
compiler->skip_checks = 1;
#endif
- sljit_emit_fop1(compiler, SLJIT_DCMP | flags, src1, src1w, src2, src2w);
+ sljit_emit_fop1(compiler, SLJIT_CMP_F64 | flags, src1, src1w, src2, src2w);
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
|| (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
@@ -1689,7 +1705,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_fcmp(struct sljit_compile
#if !(defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_local_base(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw offset)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
{
CHECK_ERROR();
CHECK(check_sljit_get_local_base(compiler, dst, dstw, offset));
@@ -1710,7 +1726,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_local_base(struct sljit_compiler *co
/* Empty function bodies for those machines, which are not (yet) supported. */
-SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name(void)
+SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
{
return "unsupported";
}
@@ -1727,7 +1743,7 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_compiler(struct sljit_compiler *compile
SLJIT_ASSERT_STOP();
}
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_alloc_memory(struct sljit_compiler *compiler, sljit_si size)
+SLJIT_API_FUNC_ATTRIBUTE void* sljit_alloc_memory(struct sljit_compiler *compiler, sljit_s32 size)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(size);
@@ -1757,9 +1773,9 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_code(void* code)
SLJIT_ASSERT_STOP();
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(options);
@@ -1773,9 +1789,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(options);
@@ -1789,7 +1805,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compi
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(op);
@@ -1799,7 +1815,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(dst);
@@ -1808,7 +1824,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *c
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(src);
@@ -1817,7 +1833,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler, sljit_si op)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(op);
@@ -1825,9 +1841,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(op);
@@ -1839,10 +1855,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(op);
@@ -1856,14 +1872,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
{
SLJIT_ASSERT_STOP();
return reg;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *compiler,
- void *instruction, sljit_si size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
+ void *instruction, sljit_s32 size)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(instruction);
@@ -1872,15 +1888,15 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *co
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void)
{
SLJIT_ASSERT_STOP();
return 0;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(op);
@@ -1892,10 +1908,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compile
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(op);
@@ -1916,7 +1932,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
return NULL;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(type);
@@ -1924,9 +1940,9 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
return NULL;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler *compiler, sljit_si type,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(type);
@@ -1938,9 +1954,9 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
return NULL;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_fcmp(struct sljit_compiler *compiler, sljit_si type,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_fcmp(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(type);
@@ -1966,7 +1982,7 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_target(struct sljit_jump *jump, sljit_uw
SLJIT_ASSERT_STOP();
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compiler, sljit_si type, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(type);
@@ -1976,10 +1992,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compil
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw,
- sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw,
+ sljit_s32 type)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(op);
@@ -1992,7 +2008,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_local_base(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw offset)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(dst);
@@ -2002,7 +2018,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_local_base(struct sljit_compiler *co
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw initval)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw initval)
{
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(dst);
diff --git a/src/3rdparty/pcre/sljit/sljitLir.h b/src/3rdparty/pcre/sljit/sljitLir.h
index 2e2e9ac09c..df69b8656f 100644
--- a/src/3rdparty/pcre/sljit/sljitLir.h
+++ b/src/3rdparty/pcre/sljit/sljitLir.h
@@ -226,7 +226,7 @@ of sljitConfigInternal.h */
/* Floating point registers */
/* --------------------------------------------------------------------- */
-/* Each floating point register can store a double or single precision
+/* Each floating point register can store a 32 or a 64 bit precision
value. The FR and FS register sets are overlap in the same way as R
and S register sets. See above. */
@@ -271,7 +271,7 @@ struct sljit_memory_fragment {
struct sljit_memory_fragment *next;
sljit_uw used_size;
/* Must be aligned to sljit_sw. */
- sljit_ub memory[1];
+ sljit_u8 memory[1];
};
struct sljit_label {
@@ -297,8 +297,8 @@ struct sljit_const {
};
struct sljit_compiler {
- sljit_si error;
- sljit_si options;
+ sljit_s32 error;
+ sljit_s32 options;
struct sljit_label *labels;
struct sljit_jump *jumps;
@@ -312,36 +312,36 @@ struct sljit_compiler {
struct sljit_memory_fragment *abuf;
/* Used scratch registers. */
- sljit_si scratches;
+ sljit_s32 scratches;
/* Used saved registers. */
- sljit_si saveds;
+ sljit_s32 saveds;
/* Used float scratch registers. */
- sljit_si fscratches;
+ sljit_s32 fscratches;
/* Used float saved registers. */
- sljit_si fsaveds;
+ sljit_s32 fsaveds;
/* Local stack size. */
- sljit_si local_size;
+ sljit_s32 local_size;
/* Code size. */
sljit_uw size;
/* For statistical purposes. */
sljit_uw executable_size;
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- sljit_si args;
+ sljit_s32 args;
#endif
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- sljit_si mode32;
+ sljit_s32 mode32;
#endif
#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
- sljit_si flags_saved;
+ sljit_s32 flags_saved;
#endif
#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
/* Constant pool handling. */
sljit_uw *cpool;
- sljit_ub *cpool_unique;
+ sljit_u8 *cpool_unique;
sljit_uw cpool_diff;
sljit_uw cpool_fill;
/* Other members. */
@@ -352,40 +352,40 @@ struct sljit_compiler {
#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5) || (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
/* Temporary fields. */
sljit_uw shift_imm;
- sljit_si cache_arg;
+ sljit_s32 cache_arg;
sljit_sw cache_argw;
#endif
#if (defined SLJIT_CONFIG_ARM_THUMB2 && SLJIT_CONFIG_ARM_THUMB2)
- sljit_si cache_arg;
+ sljit_s32 cache_arg;
sljit_sw cache_argw;
#endif
#if (defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64)
- sljit_si cache_arg;
+ sljit_s32 cache_arg;
sljit_sw cache_argw;
#endif
#if (defined SLJIT_CONFIG_PPC && SLJIT_CONFIG_PPC)
sljit_sw imm;
- sljit_si cache_arg;
+ sljit_s32 cache_arg;
sljit_sw cache_argw;
#endif
#if (defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS)
- sljit_si delay_slot;
- sljit_si cache_arg;
+ sljit_s32 delay_slot;
+ sljit_s32 cache_arg;
sljit_sw cache_argw;
#endif
#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
- sljit_si delay_slot;
- sljit_si cache_arg;
+ sljit_s32 delay_slot;
+ sljit_s32 cache_arg;
sljit_sw cache_argw;
#endif
#if (defined SLJIT_CONFIG_TILEGX && SLJIT_CONFIG_TILEGX)
- sljit_si cache_arg;
+ sljit_s32 cache_arg;
sljit_sw cache_argw;
#endif
@@ -396,13 +396,13 @@ struct sljit_compiler {
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS) \
|| (defined SLJIT_DEBUG && SLJIT_DEBUG)
/* Local size passed to the functions. */
- sljit_si logical_local_size;
+ sljit_s32 logical_local_size;
#endif
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS) \
|| (defined SLJIT_DEBUG && SLJIT_DEBUG) \
|| (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
- sljit_si skip_checks;
+ sljit_s32 skip_checks;
#endif
};
@@ -427,7 +427,7 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_compiler(struct sljit_compiler *compile
error code. Thus there is no need for checking the error after every
call, it is enough to do it before the code is compiled. Removing
these checks increases the performance of the compiling process. */
-static SLJIT_INLINE sljit_si sljit_get_compiler_error(struct sljit_compiler *compiler) { return compiler->error; }
+static SLJIT_INLINE sljit_s32 sljit_get_compiler_error(struct sljit_compiler *compiler) { return compiler->error; }
/* Sets the compiler error code to SLJIT_ERR_ALLOC_FAILED except
if an error was detected before. After the error code is set
@@ -448,7 +448,7 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_compiler_memory_error(struct sljit_compi
indicate that there is no more memory (does not set the current error code
of the compiler to out-of-memory status).
*/
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_alloc_memory(struct sljit_compiler *compiler, sljit_si size);
+SLJIT_API_FUNC_ATTRIBUTE void* sljit_alloc_memory(struct sljit_compiler *compiler, sljit_s32 size);
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
/* Passing NULL disables verbose. */
@@ -518,9 +518,9 @@ offset 0 is aligned to sljit_d. Otherwise it is aligned to sljit_uw. */
/* The local_size must be >= 0 and <= SLJIT_MAX_LOCAL_SIZE. */
#define SLJIT_MAX_LOCAL_SIZE 65536
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size);
/* The machine code has a context (which contains the local stack space size,
number of used registers, etc.) which initialized by sljit_emit_enter. Several
@@ -532,9 +532,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
Note: every call of sljit_emit_enter and sljit_set_context overwrites
the previous context. */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size);
/* Return from machine code. The op argument can be SLJIT_UNUSED which means the
function does not return with anything or any opcode between SLJIT_MOV and
@@ -542,8 +542,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compi
is SLJIT_UNUSED, otherwise see below the description about source and
destination arguments. */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op,
- sljit_si src, sljit_sw srcw);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src, sljit_sw srcw);
/* Fast calling mechanism for utility functions (see SLJIT_FAST_CALL). All registers and
even the stack frame is passed to the callee. The return address is preserved in
@@ -560,8 +560,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
/* Note: although sljit_emit_fast_return could be replaced by an ijump, it is not suggested,
since many architectures do clever branch prediction on call / return instruction pairs. */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw);
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw);
/*
Source and destination values for arithmetical instructions
@@ -624,31 +624,29 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *
#define SLJIT_MEM2(r1, r2) (SLJIT_MEM | (r1) | ((r2) << 8))
#define SLJIT_IMM 0x40
-/* Set 32 bit operation mode (I) on 64 bit CPUs. The flag is totally ignored on
- 32 bit CPUs. If this flag is set for an arithmetic operation, it uses only the
- lower 32 bit of the input register(s), and set the CPU status flags according
- to the 32 bit result. The higher 32 bits are undefined for both the input and
- output. However, the CPU might not ignore those higher 32 bits, like MIPS, which
- expects it to be the sign extension of the lower 32 bit. All 32 bit operations
- are undefined, if this condition is not fulfilled. Therefore, when SLJIT_INT_OP
- is specified, all register arguments must be the result of other operations with
- the same SLJIT_INT_OP flag. In other words, although a register can hold either
- a 64 or 32 bit value, these values cannot be mixed. The only exceptions are
- SLJIT_IMOV and SLJIT_IMOVU (SLJIT_MOV_SI/SLJIT_MOVU_SI with SLJIT_INT_OP flag)
- which can convert any source argument to SLJIT_INT_OP compatible result. This
- conversion might be unnecessary on some CPUs like x86-64, since the upper 32
- bit is always ignored. In this case SLJIT is clever enough to not generate any
- instructions if the source and destination operands are the same registers.
- Affects sljit_emit_op0, sljit_emit_op1 and sljit_emit_op2. */
-#define SLJIT_INT_OP 0x100
-
-/* Single precision mode (SP). This flag is similar to SLJIT_INT_OP, just
+/* Set 32 bit operation mode (I) on 64 bit CPUs. This flag is ignored on 32
+ bit CPUs. When this flag is set for an arithmetic operation, only the
+ lower 32 bit of the input register(s) are used, and the CPU status flags
+ are set according to the 32 bit result. Although the higher 32 bit of
+ the input and the result registers are not defined by SLJIT, it might be
+ defined by the CPU architecture (e.g. MIPS). To satisfy these requirements
+ all source registers must be computed by operations where this flag is
+ also set. In other words 32 and 64 bit arithmetic operations cannot be
+ mixed. The only exception is SLJIT_IMOV and SLJIT_IMOVU whose source
+ register can hold any 32 or 64 bit value. This source register is
+ converted to a 32 bit compatible format. SLJIT does not generate any
+ instructions on certain CPUs (e.g. on x86 and ARM) if the source and
+ destination operands are the same registers. Affects sljit_emit_op0,
+ sljit_emit_op1 and sljit_emit_op2. */
+#define SLJIT_I32_OP 0x100
+
+/* F32 precision mode (SP). This flag is similar to SLJIT_I32_OP, just
it applies to floating point registers (it is even the same bit). When
- this flag is passed, the CPU performs single precision floating point
- operations. Similar to SLJIT_INT_OP, all register arguments must be the
- result of other floating point operations with this flag. Affects
+ this flag is passed, the CPU performs 32 bit floating point operations.
+ Similar to SLJIT_I32_OP, all register arguments must be computed by
+ floating point operations where this flag is also set. Affects
sljit_emit_fop1, sljit_emit_fop2 and sljit_emit_fcmp. */
-#define SLJIT_SINGLE_OP 0x100
+#define SLJIT_F32_OP 0x100
/* Common CPU status flags for all architectures (x86, ARM, PPC)
- carry flag
@@ -697,43 +695,41 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *
/* Flags: - (may destroy flags)
Unsigned multiplication of SLJIT_R0 and SLJIT_R1.
Result is placed into SLJIT_R1:SLJIT_R0 (high:low) word */
-#define SLJIT_LUMUL (SLJIT_OP0_BASE + 2)
+#define SLJIT_LMUL_UW (SLJIT_OP0_BASE + 2)
/* Flags: - (may destroy flags)
Signed multiplication of SLJIT_R0 and SLJIT_R1.
Result is placed into SLJIT_R1:SLJIT_R0 (high:low) word */
-#define SLJIT_LSMUL (SLJIT_OP0_BASE + 3)
+#define SLJIT_LMUL_SW (SLJIT_OP0_BASE + 3)
/* Flags: I - (may destroy flags)
Unsigned divide of the value in SLJIT_R0 by the value in SLJIT_R1.
The result is placed into SLJIT_R0 and the remainder into SLJIT_R1.
Note: if SLJIT_R1 is 0, the behaviour is undefined. */
-#define SLJIT_UDIVMOD (SLJIT_OP0_BASE + 4)
-#define SLJIT_IUDIVMOD (SLJIT_UDIVMOD | SLJIT_INT_OP)
+#define SLJIT_DIVMOD_UW (SLJIT_OP0_BASE + 4)
+#define SLJIT_DIVMOD_U32 (SLJIT_DIVMOD_UW | SLJIT_I32_OP)
/* Flags: I - (may destroy flags)
Signed divide of the value in SLJIT_R0 by the value in SLJIT_R1.
The result is placed into SLJIT_R0 and the remainder into SLJIT_R1.
Note: if SLJIT_R1 is 0, the behaviour is undefined.
Note: if SLJIT_R1 is -1 and SLJIT_R0 is integer min (0x800..00),
the behaviour is undefined. */
-#define SLJIT_SDIVMOD (SLJIT_OP0_BASE + 5)
-#define SLJIT_ISDIVMOD (SLJIT_SDIVMOD | SLJIT_INT_OP)
+#define SLJIT_DIVMOD_SW (SLJIT_OP0_BASE + 5)
+#define SLJIT_DIVMOD_S32 (SLJIT_DIVMOD_SW | SLJIT_I32_OP)
/* Flags: I - (may destroy flags)
Unsigned divide of the value in SLJIT_R0 by the value in SLJIT_R1.
The result is placed into SLJIT_R0. SLJIT_R1 preserves its value.
- Note: if SLJIT_R1 is 0, the behaviour is undefined.
- Note: SLJIT_SDIV is single precision divide. */
-#define SLJIT_UDIVI (SLJIT_OP0_BASE + 6)
-#define SLJIT_IUDIVI (SLJIT_UDIVI | SLJIT_INT_OP)
+ Note: if SLJIT_R1 is 0, the behaviour is undefined. */
+#define SLJIT_DIV_UW (SLJIT_OP0_BASE + 6)
+#define SLJIT_DIV_U32 (SLJIT_DIV_UW | SLJIT_I32_OP)
/* Flags: I - (may destroy flags)
Signed divide of the value in SLJIT_R0 by the value in SLJIT_R1.
The result is placed into SLJIT_R0. SLJIT_R1 preserves its value.
Note: if SLJIT_R1 is 0, the behaviour is undefined.
Note: if SLJIT_R1 is -1 and SLJIT_R0 is integer min (0x800..00),
- the behaviour is undefined.
- Note: SLJIT_SDIV is single precision divide. */
-#define SLJIT_SDIVI (SLJIT_OP0_BASE + 7)
-#define SLJIT_ISDIVI (SLJIT_SDIVI | SLJIT_INT_OP)
+ the behaviour is undefined. */
+#define SLJIT_DIV_SW (SLJIT_OP0_BASE + 7)
+#define SLJIT_DIV_S32 (SLJIT_DIV_SW | SLJIT_I32_OP)
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler, sljit_si op);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op);
/* Starting index of opcodes for sljit_emit_op1. */
#define SLJIT_OP1_BASE 32
@@ -752,188 +748,188 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
/* Flags: - (never set any flags) */
#define SLJIT_MOV (SLJIT_OP1_BASE + 0)
/* Flags: I - (never set any flags) */
-#define SLJIT_MOV_UB (SLJIT_OP1_BASE + 1)
-#define SLJIT_IMOV_UB (SLJIT_MOV_UB | SLJIT_INT_OP)
+#define SLJIT_MOV_U8 (SLJIT_OP1_BASE + 1)
+#define SLJIT_MOV32_U8 (SLJIT_MOV_U8 | SLJIT_I32_OP)
/* Flags: I - (never set any flags) */
-#define SLJIT_MOV_SB (SLJIT_OP1_BASE + 2)
-#define SLJIT_IMOV_SB (SLJIT_MOV_SB | SLJIT_INT_OP)
+#define SLJIT_MOV_S8 (SLJIT_OP1_BASE + 2)
+#define SLJIT_MOV32_S8 (SLJIT_MOV_S8 | SLJIT_I32_OP)
/* Flags: I - (never set any flags) */
-#define SLJIT_MOV_UH (SLJIT_OP1_BASE + 3)
-#define SLJIT_IMOV_UH (SLJIT_MOV_UH | SLJIT_INT_OP)
+#define SLJIT_MOV_U16 (SLJIT_OP1_BASE + 3)
+#define SLJIT_MOV32_U16 (SLJIT_MOV_U16 | SLJIT_I32_OP)
/* Flags: I - (never set any flags) */
-#define SLJIT_MOV_SH (SLJIT_OP1_BASE + 4)
-#define SLJIT_IMOV_SH (SLJIT_MOV_SH | SLJIT_INT_OP)
+#define SLJIT_MOV_S16 (SLJIT_OP1_BASE + 4)
+#define SLJIT_MOV32_S16 (SLJIT_MOV_S16 | SLJIT_I32_OP)
/* Flags: I - (never set any flags)
- Note: see SLJIT_INT_OP for further details. */
-#define SLJIT_MOV_UI (SLJIT_OP1_BASE + 5)
-/* No SLJIT_INT_OP form, since it is the same as SLJIT_IMOV. */
+ Note: no SLJIT_MOV32_U32 form, since it is the same as SLJIT_MOV32 */
+#define SLJIT_MOV_U32 (SLJIT_OP1_BASE + 5)
/* Flags: I - (never set any flags)
- Note: see SLJIT_INT_OP for further details. */
-#define SLJIT_MOV_SI (SLJIT_OP1_BASE + 6)
-#define SLJIT_IMOV (SLJIT_MOV_SI | SLJIT_INT_OP)
+ Note: no SLJIT_MOV32_S32 form, since it is the same as SLJIT_MOV32 */
+#define SLJIT_MOV_S32 (SLJIT_OP1_BASE + 6)
+/* Flags: I - (never set any flags) */
+#define SLJIT_MOV32 (SLJIT_MOV_S32 | SLJIT_I32_OP)
/* Flags: - (never set any flags) */
#define SLJIT_MOV_P (SLJIT_OP1_BASE + 7)
/* Flags: - (never set any flags) */
#define SLJIT_MOVU (SLJIT_OP1_BASE + 8)
/* Flags: I - (never set any flags) */
-#define SLJIT_MOVU_UB (SLJIT_OP1_BASE + 9)
-#define SLJIT_IMOVU_UB (SLJIT_MOVU_UB | SLJIT_INT_OP)
+#define SLJIT_MOVU_U8 (SLJIT_OP1_BASE + 9)
+#define SLJIT_MOVU32_U8 (SLJIT_MOVU_U8 | SLJIT_I32_OP)
/* Flags: I - (never set any flags) */
-#define SLJIT_MOVU_SB (SLJIT_OP1_BASE + 10)
-#define SLJIT_IMOVU_SB (SLJIT_MOVU_SB | SLJIT_INT_OP)
+#define SLJIT_MOVU_S8 (SLJIT_OP1_BASE + 10)
+#define SLJIT_MOVU32_S8 (SLJIT_MOVU_S8 | SLJIT_I32_OP)
/* Flags: I - (never set any flags) */
-#define SLJIT_MOVU_UH (SLJIT_OP1_BASE + 11)
-#define SLJIT_IMOVU_UH (SLJIT_MOVU_UH | SLJIT_INT_OP)
+#define SLJIT_MOVU_U16 (SLJIT_OP1_BASE + 11)
+#define SLJIT_MOVU32_U16 (SLJIT_MOVU_U16 | SLJIT_I32_OP)
/* Flags: I - (never set any flags) */
-#define SLJIT_MOVU_SH (SLJIT_OP1_BASE + 12)
-#define SLJIT_IMOVU_SH (SLJIT_MOVU_SH | SLJIT_INT_OP)
+#define SLJIT_MOVU_S16 (SLJIT_OP1_BASE + 12)
+#define SLJIT_MOVU32_S16 (SLJIT_MOVU_S16 | SLJIT_I32_OP)
/* Flags: I - (never set any flags)
- Note: see SLJIT_INT_OP for further details. */
-#define SLJIT_MOVU_UI (SLJIT_OP1_BASE + 13)
-/* No SLJIT_INT_OP form, since it is the same as SLJIT_IMOVU. */
+ Note: no SLJIT_MOVU32_U32 form, since it is the same as SLJIT_MOVU32 */
+#define SLJIT_MOVU_U32 (SLJIT_OP1_BASE + 13)
/* Flags: I - (never set any flags)
- Note: see SLJIT_INT_OP for further details. */
-#define SLJIT_MOVU_SI (SLJIT_OP1_BASE + 14)
-#define SLJIT_IMOVU (SLJIT_MOVU_SI | SLJIT_INT_OP)
+ Note: no SLJIT_MOVU32_S32 form, since it is the same as SLJIT_MOVU32 */
+#define SLJIT_MOVU_S32 (SLJIT_OP1_BASE + 14)
+/* Flags: I - (never set any flags) */
+#define SLJIT_MOVU32 (SLJIT_MOVU_S32 | SLJIT_I32_OP)
/* Flags: - (never set any flags) */
#define SLJIT_MOVU_P (SLJIT_OP1_BASE + 15)
/* Flags: I | E | K */
#define SLJIT_NOT (SLJIT_OP1_BASE + 16)
-#define SLJIT_INOT (SLJIT_NOT | SLJIT_INT_OP)
+#define SLJIT_NOT32 (SLJIT_NOT | SLJIT_I32_OP)
/* Flags: I | E | O | K */
#define SLJIT_NEG (SLJIT_OP1_BASE + 17)
-#define SLJIT_INEG (SLJIT_NEG | SLJIT_INT_OP)
+#define SLJIT_NEG32 (SLJIT_NEG | SLJIT_I32_OP)
/* Count leading zeroes
Flags: I | E | K
Important note! Sparc 32 does not support K flag, since
the required popc instruction is introduced only in sparc 64. */
#define SLJIT_CLZ (SLJIT_OP1_BASE + 18)
-#define SLJIT_ICLZ (SLJIT_CLZ | SLJIT_INT_OP)
+#define SLJIT_CLZ32 (SLJIT_CLZ | SLJIT_I32_OP)
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw);
/* Starting index of opcodes for sljit_emit_op2. */
#define SLJIT_OP2_BASE 96
/* Flags: I | E | O | C | K */
#define SLJIT_ADD (SLJIT_OP2_BASE + 0)
-#define SLJIT_IADD (SLJIT_ADD | SLJIT_INT_OP)
+#define SLJIT_ADD32 (SLJIT_ADD | SLJIT_I32_OP)
/* Flags: I | C | K */
#define SLJIT_ADDC (SLJIT_OP2_BASE + 1)
-#define SLJIT_IADDC (SLJIT_ADDC | SLJIT_INT_OP)
+#define SLJIT_ADDC32 (SLJIT_ADDC | SLJIT_I32_OP)
/* Flags: I | E | U | S | O | C | K */
#define SLJIT_SUB (SLJIT_OP2_BASE + 2)
-#define SLJIT_ISUB (SLJIT_SUB | SLJIT_INT_OP)
+#define SLJIT_SUB32 (SLJIT_SUB | SLJIT_I32_OP)
/* Flags: I | C | K */
#define SLJIT_SUBC (SLJIT_OP2_BASE + 3)
-#define SLJIT_ISUBC (SLJIT_SUBC | SLJIT_INT_OP)
+#define SLJIT_SUBC32 (SLJIT_SUBC | SLJIT_I32_OP)
/* Note: integer mul
Flags: I | O (see SLJIT_C_MUL_*) | K */
#define SLJIT_MUL (SLJIT_OP2_BASE + 4)
-#define SLJIT_IMUL (SLJIT_MUL | SLJIT_INT_OP)
+#define SLJIT_MUL32 (SLJIT_MUL | SLJIT_I32_OP)
/* Flags: I | E | K */
#define SLJIT_AND (SLJIT_OP2_BASE + 5)
-#define SLJIT_IAND (SLJIT_AND | SLJIT_INT_OP)
+#define SLJIT_AND32 (SLJIT_AND | SLJIT_I32_OP)
/* Flags: I | E | K */
#define SLJIT_OR (SLJIT_OP2_BASE + 6)
-#define SLJIT_IOR (SLJIT_OR | SLJIT_INT_OP)
+#define SLJIT_OR32 (SLJIT_OR | SLJIT_I32_OP)
/* Flags: I | E | K */
#define SLJIT_XOR (SLJIT_OP2_BASE + 7)
-#define SLJIT_IXOR (SLJIT_XOR | SLJIT_INT_OP)
+#define SLJIT_XOR32 (SLJIT_XOR | SLJIT_I32_OP)
/* Flags: I | E | K
Let bit_length be the length of the shift operation: 32 or 64.
If src2 is immediate, src2w is masked by (bit_length - 1).
Otherwise, if the content of src2 is outside the range from 0
to bit_length - 1, the result is undefined. */
#define SLJIT_SHL (SLJIT_OP2_BASE + 8)
-#define SLJIT_ISHL (SLJIT_SHL | SLJIT_INT_OP)
+#define SLJIT_SHL32 (SLJIT_SHL | SLJIT_I32_OP)
/* Flags: I | E | K
Let bit_length be the length of the shift operation: 32 or 64.
If src2 is immediate, src2w is masked by (bit_length - 1).
Otherwise, if the content of src2 is outside the range from 0
to bit_length - 1, the result is undefined. */
#define SLJIT_LSHR (SLJIT_OP2_BASE + 9)
-#define SLJIT_ILSHR (SLJIT_LSHR | SLJIT_INT_OP)
+#define SLJIT_LSHR32 (SLJIT_LSHR | SLJIT_I32_OP)
/* Flags: I | E | K
Let bit_length be the length of the shift operation: 32 or 64.
If src2 is immediate, src2w is masked by (bit_length - 1).
Otherwise, if the content of src2 is outside the range from 0
to bit_length - 1, the result is undefined. */
#define SLJIT_ASHR (SLJIT_OP2_BASE + 10)
-#define SLJIT_IASHR (SLJIT_ASHR | SLJIT_INT_OP)
+#define SLJIT_ASHR32 (SLJIT_ASHR | SLJIT_I32_OP)
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w);
/* Returns with non-zero if fpu is available. */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void);
/* Starting index of opcodes for sljit_emit_fop1. */
#define SLJIT_FOP1_BASE 128
/* Flags: SP - (never set any flags) */
-#define SLJIT_DMOV (SLJIT_FOP1_BASE + 0)
-#define SLJIT_SMOV (SLJIT_DMOV | SLJIT_SINGLE_OP)
+#define SLJIT_MOV_F64 (SLJIT_FOP1_BASE + 0)
+#define SLJIT_MOV_F32 (SLJIT_MOV_F64 | SLJIT_F32_OP)
/* Convert opcodes: CONV[DST_TYPE].FROM[SRC_TYPE]
SRC/DST TYPE can be: D - double, S - single, W - signed word, I - signed int
Rounding mode when the destination is W or I: round towards zero. */
/* Flags: SP - (never set any flags) */
-#define SLJIT_CONVD_FROMS (SLJIT_FOP1_BASE + 1)
-#define SLJIT_CONVS_FROMD (SLJIT_CONVD_FROMS | SLJIT_SINGLE_OP)
+#define SLJIT_CONV_F64_FROM_F32 (SLJIT_FOP1_BASE + 1)
+#define SLJIT_CONV_F32_FROM_F64 (SLJIT_CONV_F64_FROM_F32 | SLJIT_F32_OP)
/* Flags: SP - (never set any flags) */
-#define SLJIT_CONVW_FROMD (SLJIT_FOP1_BASE + 2)
-#define SLJIT_CONVW_FROMS (SLJIT_CONVW_FROMD | SLJIT_SINGLE_OP)
+#define SLJIT_CONV_SW_FROM_F64 (SLJIT_FOP1_BASE + 2)
+#define SLJIT_CONV_SW_FROM_F32 (SLJIT_CONV_SW_FROM_F64 | SLJIT_F32_OP)
/* Flags: SP - (never set any flags) */
-#define SLJIT_CONVI_FROMD (SLJIT_FOP1_BASE + 3)
-#define SLJIT_CONVI_FROMS (SLJIT_CONVI_FROMD | SLJIT_SINGLE_OP)
+#define SLJIT_CONV_S32_FROM_F64 (SLJIT_FOP1_BASE + 3)
+#define SLJIT_CONV_S32_FROM_F32 (SLJIT_CONV_S32_FROM_F64 | SLJIT_F32_OP)
/* Flags: SP - (never set any flags) */
-#define SLJIT_CONVD_FROMW (SLJIT_FOP1_BASE + 4)
-#define SLJIT_CONVS_FROMW (SLJIT_CONVD_FROMW | SLJIT_SINGLE_OP)
+#define SLJIT_CONV_F64_FROM_SW (SLJIT_FOP1_BASE + 4)
+#define SLJIT_CONV_F32_FROM_SW (SLJIT_CONV_F64_FROM_SW | SLJIT_F32_OP)
/* Flags: SP - (never set any flags) */
-#define SLJIT_CONVD_FROMI (SLJIT_FOP1_BASE + 5)
-#define SLJIT_CONVS_FROMI (SLJIT_CONVD_FROMI | SLJIT_SINGLE_OP)
+#define SLJIT_CONV_F64_FROM_S32 (SLJIT_FOP1_BASE + 5)
+#define SLJIT_CONV_F32_FROM_S32 (SLJIT_CONV_F64_FROM_S32 | SLJIT_F32_OP)
/* Note: dst is the left and src is the right operand for SLJIT_CMPD.
Note: NaN check is always performed. If SLJIT_C_FLOAT_UNORDERED flag
is set, the comparison result is unpredictable.
Flags: SP | E | S (see SLJIT_C_FLOAT_*) */
-#define SLJIT_DCMP (SLJIT_FOP1_BASE + 6)
-#define SLJIT_SCMP (SLJIT_DCMP | SLJIT_SINGLE_OP)
+#define SLJIT_CMP_F64 (SLJIT_FOP1_BASE + 6)
+#define SLJIT_CMP_F32 (SLJIT_CMP_F64 | SLJIT_F32_OP)
/* Flags: SP - (never set any flags) */
-#define SLJIT_DNEG (SLJIT_FOP1_BASE + 7)
-#define SLJIT_SNEG (SLJIT_DNEG | SLJIT_SINGLE_OP)
+#define SLJIT_NEG_F64 (SLJIT_FOP1_BASE + 7)
+#define SLJIT_NEG_F32 (SLJIT_NEG_F64 | SLJIT_F32_OP)
/* Flags: SP - (never set any flags) */
-#define SLJIT_DABS (SLJIT_FOP1_BASE + 8)
-#define SLJIT_SABS (SLJIT_DABS | SLJIT_SINGLE_OP)
+#define SLJIT_ABS_F64 (SLJIT_FOP1_BASE + 8)
+#define SLJIT_ABS_F32 (SLJIT_ABS_F64 | SLJIT_F32_OP)
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw);
/* Starting index of opcodes for sljit_emit_fop2. */
#define SLJIT_FOP2_BASE 160
/* Flags: SP - (never set any flags) */
-#define SLJIT_DADD (SLJIT_FOP2_BASE + 0)
-#define SLJIT_SADD (SLJIT_DADD | SLJIT_SINGLE_OP)
+#define SLJIT_ADD_F64 (SLJIT_FOP2_BASE + 0)
+#define SLJIT_ADD_F32 (SLJIT_ADD_F64 | SLJIT_F32_OP)
/* Flags: SP - (never set any flags) */
-#define SLJIT_DSUB (SLJIT_FOP2_BASE + 1)
-#define SLJIT_SSUB (SLJIT_DSUB | SLJIT_SINGLE_OP)
+#define SLJIT_SUB_F64 (SLJIT_FOP2_BASE + 1)
+#define SLJIT_SUB_F32 (SLJIT_SUB_F64 | SLJIT_F32_OP)
/* Flags: SP - (never set any flags) */
-#define SLJIT_DMUL (SLJIT_FOP2_BASE + 2)
-#define SLJIT_SMUL (SLJIT_DMUL | SLJIT_SINGLE_OP)
+#define SLJIT_MUL_F64 (SLJIT_FOP2_BASE + 2)
+#define SLJIT_MUL_F32 (SLJIT_MUL_F64 | SLJIT_F32_OP)
/* Flags: SP - (never set any flags) */
-#define SLJIT_DDIV (SLJIT_FOP2_BASE + 3)
-#define SLJIT_SDIV (SLJIT_DDIV | SLJIT_SINGLE_OP)
+#define SLJIT_DIV_F64 (SLJIT_FOP2_BASE + 3)
+#define SLJIT_DIV_F32 (SLJIT_DIV_F64 | SLJIT_F32_OP)
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w);
/* Label and jump instructions. */
@@ -943,58 +939,58 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
/* Integer comparison types. */
#define SLJIT_EQUAL 0
-#define SLJIT_I_EQUAL (SLJIT_EQUAL | SLJIT_INT_OP)
+#define SLJIT_EQUAL32 (SLJIT_EQUAL | SLJIT_I32_OP)
#define SLJIT_ZERO 0
-#define SLJIT_I_ZERO (SLJIT_ZERO | SLJIT_INT_OP)
+#define SLJIT_ZERO32 (SLJIT_ZERO | SLJIT_I32_OP)
#define SLJIT_NOT_EQUAL 1
-#define SLJIT_I_NOT_EQUAL (SLJIT_NOT_EQUAL | SLJIT_INT_OP)
+#define SLJIT_NOT_EQUAL32 (SLJIT_NOT_EQUAL | SLJIT_I32_OP)
#define SLJIT_NOT_ZERO 1
-#define SLJIT_I_NOT_ZERO (SLJIT_NOT_ZERO | SLJIT_INT_OP)
+#define SLJIT_NOT_ZERO32 (SLJIT_NOT_ZERO | SLJIT_I32_OP)
#define SLJIT_LESS 2
-#define SLJIT_I_LESS (SLJIT_LESS | SLJIT_INT_OP)
+#define SLJIT_LESS32 (SLJIT_LESS | SLJIT_I32_OP)
#define SLJIT_GREATER_EQUAL 3
-#define SLJIT_I_GREATER_EQUAL (SLJIT_GREATER_EQUAL | SLJIT_INT_OP)
+#define SLJIT_GREATER_EQUAL32 (SLJIT_GREATER_EQUAL | SLJIT_I32_OP)
#define SLJIT_GREATER 4
-#define SLJIT_I_GREATER (SLJIT_GREATER | SLJIT_INT_OP)
+#define SLJIT_GREATER32 (SLJIT_GREATER | SLJIT_I32_OP)
#define SLJIT_LESS_EQUAL 5
-#define SLJIT_I_LESS_EQUAL (SLJIT_LESS_EQUAL | SLJIT_INT_OP)
+#define SLJIT_LESS_EQUAL32 (SLJIT_LESS_EQUAL | SLJIT_I32_OP)
#define SLJIT_SIG_LESS 6
-#define SLJIT_I_SIG_LESS (SLJIT_SIG_LESS | SLJIT_INT_OP)
+#define SLJIT_SIG_LESS32 (SLJIT_SIG_LESS | SLJIT_I32_OP)
#define SLJIT_SIG_GREATER_EQUAL 7
-#define SLJIT_I_SIG_GREATER_EQUAL (SLJIT_SIG_GREATER_EQUAL | SLJIT_INT_OP)
+#define SLJIT_SIG_GREATER_EQUAL32 (SLJIT_SIG_GREATER_EQUAL | SLJIT_I32_OP)
#define SLJIT_SIG_GREATER 8
-#define SLJIT_I_SIG_GREATER (SLJIT_SIG_GREATER | SLJIT_INT_OP)
+#define SLJIT_SIG_GREATER32 (SLJIT_SIG_GREATER | SLJIT_I32_OP)
#define SLJIT_SIG_LESS_EQUAL 9
-#define SLJIT_I_SIG_LESS_EQUAL (SLJIT_SIG_LESS_EQUAL | SLJIT_INT_OP)
+#define SLJIT_SIG_LESS_EQUAL32 (SLJIT_SIG_LESS_EQUAL | SLJIT_I32_OP)
#define SLJIT_OVERFLOW 10
-#define SLJIT_I_OVERFLOW (SLJIT_OVERFLOW | SLJIT_INT_OP)
+#define SLJIT_OVERFLOW32 (SLJIT_OVERFLOW | SLJIT_I32_OP)
#define SLJIT_NOT_OVERFLOW 11
-#define SLJIT_I_NOT_OVERFLOW (SLJIT_NOT_OVERFLOW | SLJIT_INT_OP)
+#define SLJIT_NOT_OVERFLOW32 (SLJIT_NOT_OVERFLOW | SLJIT_I32_OP)
#define SLJIT_MUL_OVERFLOW 12
-#define SLJIT_I_MUL_OVERFLOW (SLJIT_MUL_OVERFLOW | SLJIT_INT_OP)
+#define SLJIT_MUL_OVERFLOW32 (SLJIT_MUL_OVERFLOW | SLJIT_I32_OP)
#define SLJIT_MUL_NOT_OVERFLOW 13
-#define SLJIT_I_MUL_NOT_OVERFLOW (SLJIT_MUL_NOT_OVERFLOW | SLJIT_INT_OP)
+#define SLJIT_MUL_NOT_OVERFLOW32 (SLJIT_MUL_NOT_OVERFLOW | SLJIT_I32_OP)
/* Floating point comparison types. */
-#define SLJIT_D_EQUAL 14
-#define SLJIT_S_EQUAL (SLJIT_D_EQUAL | SLJIT_SINGLE_OP)
-#define SLJIT_D_NOT_EQUAL 15
-#define SLJIT_S_NOT_EQUAL (SLJIT_D_NOT_EQUAL | SLJIT_SINGLE_OP)
-#define SLJIT_D_LESS 16
-#define SLJIT_S_LESS (SLJIT_D_LESS | SLJIT_SINGLE_OP)
-#define SLJIT_D_GREATER_EQUAL 17
-#define SLJIT_S_GREATER_EQUAL (SLJIT_D_GREATER_EQUAL | SLJIT_SINGLE_OP)
-#define SLJIT_D_GREATER 18
-#define SLJIT_S_GREATER (SLJIT_D_GREATER | SLJIT_SINGLE_OP)
-#define SLJIT_D_LESS_EQUAL 19
-#define SLJIT_S_LESS_EQUAL (SLJIT_D_LESS_EQUAL | SLJIT_SINGLE_OP)
-#define SLJIT_D_UNORDERED 20
-#define SLJIT_S_UNORDERED (SLJIT_D_UNORDERED | SLJIT_SINGLE_OP)
-#define SLJIT_D_ORDERED 21
-#define SLJIT_S_ORDERED (SLJIT_D_ORDERED | SLJIT_SINGLE_OP)
+#define SLJIT_EQUAL_F64 14
+#define SLJIT_EQUAL_F32 (SLJIT_EQUAL_F64 | SLJIT_F32_OP)
+#define SLJIT_NOT_EQUAL_F64 15
+#define SLJIT_NOT_EQUAL_F32 (SLJIT_NOT_EQUAL_F64 | SLJIT_F32_OP)
+#define SLJIT_LESS_F64 16
+#define SLJIT_LESS_F32 (SLJIT_LESS_F64 | SLJIT_F32_OP)
+#define SLJIT_GREATER_EQUAL_F64 17
+#define SLJIT_GREATER_EQUAL_F32 (SLJIT_GREATER_EQUAL_F64 | SLJIT_F32_OP)
+#define SLJIT_GREATER_F64 18
+#define SLJIT_GREATER_F32 (SLJIT_GREATER_F64 | SLJIT_F32_OP)
+#define SLJIT_LESS_EQUAL_F64 19
+#define SLJIT_LESS_EQUAL_F32 (SLJIT_LESS_EQUAL_F64 | SLJIT_F32_OP)
+#define SLJIT_UNORDERED_F64 20
+#define SLJIT_UNORDERED_F32 (SLJIT_UNORDERED_F64 | SLJIT_F32_OP)
+#define SLJIT_ORDERED_F64 21
+#define SLJIT_ORDERED_F32 (SLJIT_ORDERED_F64 | SLJIT_F32_OP)
/* Unconditional jump types. */
#define SLJIT_JUMP 22
@@ -1014,7 +1010,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
type can be combined (or'ed) with SLJIT_REWRITABLE_JUMP
Flags: - (never set any flags) for both conditional and unconditional jumps.
Flags: destroy all flags for calls. */
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_si type);
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type);
/* Basic arithmetic comparison. In most architectures it is implemented as
an SLJIT_SUB operation (with SLJIT_UNUSED destination and setting
@@ -1024,23 +1020,23 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
type must be between SLJIT_EQUAL and SLJIT_I_SIG_LESS_EQUAL
type can be combined (or'ed) with SLJIT_REWRITABLE_JUMP
Flags: destroy flags. */
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler *compiler, sljit_si type,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w);
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w);
/* Basic floating point comparison. In most architectures it is implemented as
an SLJIT_FCMP operation (setting appropriate flags) followed by a
sljit_emit_jump. However some architectures (i.e: MIPS) may employ
special optimizations here. It is suggested to use this comparison form
when appropriate.
- type must be between SLJIT_D_EQUAL and SLJIT_S_ORDERED
+ type must be between SLJIT_EQUAL_F64 and SLJIT_ORDERED_F32
type can be combined (or'ed) with SLJIT_REWRITABLE_JUMP
Flags: destroy flags.
Note: if either operand is NaN, the behaviour is undefined for
types up to SLJIT_S_LESS_EQUAL. */
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_fcmp(struct sljit_compiler *compiler, sljit_si type,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w);
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_fcmp(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w);
/* Set the destination of the jump to this label. */
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_label(struct sljit_jump *jump, struct sljit_label* label);
@@ -1053,14 +1049,14 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_target(struct sljit_jump *jump, sljit_uw
Indirect form: any other valid addressing mode
Flags: - (never set any flags) for unconditional jumps.
Flags: destroy all flags for calls. */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compiler, sljit_si type, sljit_si src, sljit_sw srcw);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw);
/* Perform the operation using the conditional flags as the second argument.
Type must always be between SLJIT_EQUAL and SLJIT_S_ORDERED. The value
represented by the type is 1, if the condition represented by the type
is fulfilled, and 0 otherwise.
- If op == SLJIT_MOV, SLJIT_MOV_SI, SLJIT_MOV_UI:
+ If op == SLJIT_MOV, SLJIT_MOV_S32, SLJIT_MOV_U32:
Set dst to the value represented by the type (0 or 1).
Src must be SLJIT_UNUSED, and srcw must be 0
Flags: - (never set any flags)
@@ -1070,18 +1066,18 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compil
Important note: only dst=src and dstw=srcw is supported at the moment!
Flags: I | E | K
Note: sljit_emit_op_flags does nothing, if dst is SLJIT_UNUSED (regardless of op). */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw,
- sljit_si type);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw,
+ sljit_s32 type);
/* Copies the base address of SLJIT_SP + offset to dst.
Flags: - (never set any flags) */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_local_base(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw offset);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset);
/* The constant can be changed runtime (see: sljit_set_const)
Flags: - (never set any flags) */
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw init_value);
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value);
/* After the code generation the address for label, jump and const instructions
are computed. Since these structures are freed by sljit_free_compiler, the
@@ -1104,7 +1100,7 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_consta
/* Get the human readable name of the platform. Can be useful on platforms
like ARM, where ARM and Thumb2 functions can be mixed, and
it is useful to know the type of the code generator. */
-SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name(void);
+SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void);
/* Portable helper function to get an offset of a member. */
#define SLJIT_OFFSETOF(base, member) ((sljit_sw)(&((base*)0x10)->member) - 0x10)
@@ -1196,14 +1192,14 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_function_context(void** func_ptr, struct
Note: it returns with -1 for virtual registers (only on x86-32). */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_register_index(sljit_si reg);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg);
/* The following function is a helper function for sljit_emit_op_custom.
It returns with the real machine register index of any SLJIT_FLOAT register.
Note: the index is always an even number on ARM (except ARM-64), MIPS, and SPARC. */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_float_register_index(sljit_si reg);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg);
/* Any instruction can be inserted into the instruction stream by
sljit_emit_op_custom. It has a similar purpose as inline assembly.
@@ -1215,18 +1211,18 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_float_register_index(sljit_si reg);
if size == 4, the instruction argument must be 4 byte aligned.
Otherwise: size must be 4 and instruction argument must be 4 byte aligned. */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *compiler,
- void *instruction, sljit_si size);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
+ void *instruction, sljit_s32 size);
#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
/* Returns with non-zero if sse2 is available. */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_x86_is_sse2_available(void);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_x86_is_sse2_available(void);
/* Returns with non-zero if cmov instruction is available. */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_x86_is_cmov_available(void);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_x86_is_cmov_available(void);
/* Emit a conditional mov instruction on x86 CPUs. This instruction
moves src to destination, if the condition is satisfied. Unlike
@@ -1235,14 +1231,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_x86_is_cmov_available(void);
checked by sljit_x86_is_cmov_available function.
type must be between SLJIT_EQUAL and SLJIT_S_ORDERED
dst_reg must be a valid register and it can be combined
- with SLJIT_INT_OP to perform 32 bit arithmetic
+ with SLJIT_I32_OP to perform 32 bit arithmetic
Flags: I - (never set any flags)
*/
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_x86_emit_cmov(struct sljit_compiler *compiler,
- sljit_si type,
- sljit_si dst_reg,
- sljit_si src, sljit_sw srcw);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_x86_emit_cmov(struct sljit_compiler *compiler,
+ sljit_s32 type,
+ sljit_s32 dst_reg,
+ sljit_s32 src, sljit_sw srcw);
#endif
diff --git a/src/3rdparty/pcre/sljit/sljitNativeARM_32.c b/src/3rdparty/pcre/sljit/sljitNativeARM_32.c
index 5cd4c71a29..b92808f526 100644
--- a/src/3rdparty/pcre/sljit/sljitNativeARM_32.c
+++ b/src/3rdparty/pcre/sljit/sljitNativeARM_32.c
@@ -24,7 +24,7 @@
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name(void)
+SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
{
#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
return "ARMv7" SLJIT_CPUINFO;
@@ -52,10 +52,10 @@ SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name(void)
#define ALIGN_INSTRUCTION(ptr) \
(sljit_uw*)(((sljit_uw)(ptr) + (CONST_POOL_ALIGNMENT * sizeof(sljit_uw)) - 1) & ~((CONST_POOL_ALIGNMENT * sizeof(sljit_uw)) - 1))
#define MAX_DIFFERENCE(max_diff) \
- (((max_diff) / (sljit_si)sizeof(sljit_uw)) - (CONST_POOL_ALIGNMENT - 1))
+ (((max_diff) / (sljit_s32)sizeof(sljit_uw)) - (CONST_POOL_ALIGNMENT - 1))
/* See sljit_emit_enter and sljit_emit_op0 if you want to change them. */
-static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 6] = {
+static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 6] = {
0, 0, 1, 2, 11, 10, 9, 8, 7, 6, 5, 4, 13, 3, 12, 14, 15
};
@@ -126,13 +126,13 @@ static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 6] = {
#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
-static sljit_si push_cpool(struct sljit_compiler *compiler)
+static sljit_s32 push_cpool(struct sljit_compiler *compiler)
{
/* Pushing the constant pool into the instruction stream. */
sljit_uw* inst;
sljit_uw* cpool_ptr;
sljit_uw* cpool_end;
- sljit_si i;
+ sljit_s32 i;
/* The label could point the address after the constant pool. */
if (compiler->last_label && compiler->last_label->size == compiler->size)
@@ -164,7 +164,7 @@ static sljit_si push_cpool(struct sljit_compiler *compiler)
return SLJIT_SUCCESS;
}
-static sljit_si push_inst(struct sljit_compiler *compiler, sljit_uw inst)
+static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_uw inst)
{
sljit_uw* ptr;
@@ -178,13 +178,13 @@ static sljit_si push_inst(struct sljit_compiler *compiler, sljit_uw inst)
return SLJIT_SUCCESS;
}
-static sljit_si push_inst_with_literal(struct sljit_compiler *compiler, sljit_uw inst, sljit_uw literal)
+static sljit_s32 push_inst_with_literal(struct sljit_compiler *compiler, sljit_uw inst, sljit_uw literal)
{
sljit_uw* ptr;
sljit_uw cpool_index = CPOOL_SIZE;
sljit_uw* cpool_ptr;
sljit_uw* cpool_end;
- sljit_ub* cpool_unique_ptr;
+ sljit_u8* cpool_unique_ptr;
if (SLJIT_UNLIKELY(compiler->cpool_diff != CONST_POOL_EMPTY && compiler->size - compiler->cpool_diff >= MAX_DIFFERENCE(4092)))
FAIL_IF(push_cpool(compiler));
@@ -228,7 +228,7 @@ static sljit_si push_inst_with_literal(struct sljit_compiler *compiler, sljit_uw
return SLJIT_SUCCESS;
}
-static sljit_si push_inst_with_unique_literal(struct sljit_compiler *compiler, sljit_uw inst, sljit_uw literal)
+static sljit_s32 push_inst_with_unique_literal(struct sljit_compiler *compiler, sljit_uw inst, sljit_uw literal)
{
sljit_uw* ptr;
if (SLJIT_UNLIKELY((compiler->cpool_diff != CONST_POOL_EMPTY && compiler->size - compiler->cpool_diff >= MAX_DIFFERENCE(4092)) || compiler->cpool_fill >= CPOOL_SIZE))
@@ -248,7 +248,7 @@ static sljit_si push_inst_with_unique_literal(struct sljit_compiler *compiler, s
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si prepare_blx(struct sljit_compiler *compiler)
+static SLJIT_INLINE sljit_s32 prepare_blx(struct sljit_compiler *compiler)
{
/* Place for at least two instruction (doesn't matter whether the first has a literal). */
if (SLJIT_UNLIKELY(compiler->cpool_diff != CONST_POOL_EMPTY && compiler->size - compiler->cpool_diff >= MAX_DIFFERENCE(4088)))
@@ -256,7 +256,7 @@ static SLJIT_INLINE sljit_si prepare_blx(struct sljit_compiler *compiler)
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si emit_blx(struct sljit_compiler *compiler)
+static SLJIT_INLINE sljit_s32 emit_blx(struct sljit_compiler *compiler)
{
/* Must follow tightly the previous instruction (to be able to convert it to bl instruction). */
SLJIT_ASSERT(compiler->cpool_diff == CONST_POOL_EMPTY || compiler->size - compiler->cpool_diff < MAX_DIFFERENCE(4092));
@@ -286,7 +286,7 @@ static sljit_uw patch_pc_relative_loads(sljit_uw *last_pc_patch, sljit_uw *code_
/* Must be a load instruction with immediate offset. */
SLJIT_ASSERT(ind < cpool_size && !(*last_pc_patch & (1 << 25)) && (*last_pc_patch & (1 << 20)));
- if ((sljit_si)const_pool[ind] < 0) {
+ if ((sljit_s32)const_pool[ind] < 0) {
const_pool[ind] = counter;
ind = counter;
counter++;
@@ -311,26 +311,26 @@ static sljit_uw patch_pc_relative_loads(sljit_uw *last_pc_patch, sljit_uw *code_
/* In some rare ocasions we may need future patches. The probability is close to 0 in practice. */
struct future_patch {
struct future_patch* next;
- sljit_si index;
- sljit_si value;
+ sljit_s32 index;
+ sljit_s32 value;
};
-static sljit_si resolve_const_pool_index(struct sljit_compiler *compiler, struct future_patch **first_patch, sljit_uw cpool_current_index, sljit_uw *cpool_start_address, sljit_uw *buf_ptr)
+static sljit_s32 resolve_const_pool_index(struct sljit_compiler *compiler, struct future_patch **first_patch, sljit_uw cpool_current_index, sljit_uw *cpool_start_address, sljit_uw *buf_ptr)
{
- sljit_si value;
+ sljit_s32 value;
struct future_patch *curr_patch, *prev_patch;
SLJIT_UNUSED_ARG(compiler);
/* Using the values generated by patch_pc_relative_loads. */
if (!*first_patch)
- value = (sljit_si)cpool_start_address[cpool_current_index];
+ value = (sljit_s32)cpool_start_address[cpool_current_index];
else {
curr_patch = *first_patch;
- prev_patch = 0;
+ prev_patch = NULL;
while (1) {
if (!curr_patch) {
- value = (sljit_si)cpool_start_address[cpool_current_index];
+ value = (sljit_s32)cpool_start_address[cpool_current_index];
break;
}
if ((sljit_uw)curr_patch->index == cpool_current_index) {
@@ -370,7 +370,7 @@ static sljit_si resolve_const_pool_index(struct sljit_compiler *compiler, struct
#else
-static sljit_si push_inst(struct sljit_compiler *compiler, sljit_uw inst)
+static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_uw inst)
{
sljit_uw* ptr;
@@ -381,7 +381,7 @@ static sljit_si push_inst(struct sljit_compiler *compiler, sljit_uw inst)
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si emit_imm(struct sljit_compiler *compiler, sljit_si reg, sljit_sw imm)
+static SLJIT_INLINE sljit_s32 emit_imm(struct sljit_compiler *compiler, sljit_s32 reg, sljit_sw imm)
{
FAIL_IF(push_inst(compiler, MOVW | RD(reg) | ((imm << 4) & 0xf0000) | (imm & 0xfff)));
return push_inst(compiler, MOVT | RD(reg) | ((imm >> 12) & 0xf0000) | ((imm >> 16) & 0xfff));
@@ -389,7 +389,7 @@ static SLJIT_INLINE sljit_si emit_imm(struct sljit_compiler *compiler, sljit_si
#endif
-static SLJIT_INLINE sljit_si detect_jump_type(struct sljit_jump *jump, sljit_uw *code_ptr, sljit_uw *code)
+static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_uw *code_ptr, sljit_uw *code)
{
sljit_sw diff;
@@ -446,13 +446,13 @@ static SLJIT_INLINE sljit_si detect_jump_type(struct sljit_jump *jump, sljit_uw
return 0;
}
-static SLJIT_INLINE void inline_set_jump_addr(sljit_uw addr, sljit_uw new_addr, sljit_si flush)
+static SLJIT_INLINE void inline_set_jump_addr(sljit_uw addr, sljit_uw new_addr, sljit_s32 flush)
{
#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
sljit_uw *ptr = (sljit_uw*)addr;
sljit_uw *inst = (sljit_uw*)ptr[0];
sljit_uw mov_pc = ptr[1];
- sljit_si bl = (mov_pc & 0x0000f000) != RD(TMP_PC);
+ sljit_s32 bl = (mov_pc & 0x0000f000) != RD(TMP_PC);
sljit_sw diff = (sljit_sw)(((sljit_sw)new_addr - (sljit_sw)(inst + 2)) >> 2);
if (diff <= 0x7fffff && diff >= -0x800000) {
@@ -504,7 +504,7 @@ static SLJIT_INLINE void inline_set_jump_addr(sljit_uw addr, sljit_uw new_addr,
static sljit_uw get_imm(sljit_uw imm);
-static SLJIT_INLINE void inline_set_const(sljit_uw addr, sljit_sw new_constant, sljit_si flush)
+static SLJIT_INLINE void inline_set_const(sljit_uw addr, sljit_sw new_constant, sljit_s32 flush)
{
#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
sljit_uw *ptr = (sljit_uw*)addr;
@@ -789,7 +789,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
}
#endif
- SLJIT_ASSERT(code_ptr - code <= (sljit_si)size);
+ SLJIT_ASSERT(code_ptr - code <= (sljit_s32)size);
compiler->error = SLJIT_ERR_COMPILED;
compiler->executable_size = (code_ptr - code) * sizeof(sljit_uw);
@@ -820,16 +820,16 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
#define EMIT_DATA_PROCESS_INS(opcode, set_flags, dst, src1, src2) \
(0xe0000000 | ((opcode) << 21) | (set_flags) | RD(dst) | RN(src1) | (src2))
-static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si inp_flags,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w);
+static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 inp_flags,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w);
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
- sljit_si size, i, tmp;
+ sljit_s32 size, i, tmp;
sljit_uw push;
CHECK_ERROR();
@@ -866,11 +866,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
- sljit_si size;
+ sljit_s32 size;
CHECK_ERROR();
CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
@@ -881,9 +881,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compi
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
{
- sljit_si i, tmp;
+ sljit_s32 i, tmp;
sljit_uw pop;
CHECK_ERROR();
@@ -983,8 +983,8 @@ static sljit_sw data_transfer_insts[16] = {
} \
return push_inst(compiler, EMIT_DATA_PROCESS_INS(MOV_DP, flags & SET_FLAGS, dst, SLJIT_UNUSED, (reg_map[(flags & ARGS_SWAPPED) ? src1 : src2] << 8) | (opcode << 5) | 0x10 | ((flags & ARGS_SWAPPED) ? reg_map[src2] : reg_map[src1])));
-static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, sljit_si op, sljit_si flags,
- sljit_si dst, sljit_si src1, sljit_si src2)
+static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
+ sljit_s32 dst, sljit_s32 src1, sljit_s32 src2)
{
sljit_sw mul_inst;
@@ -1001,17 +1001,17 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
}
return SLJIT_SUCCESS;
- case SLJIT_MOV_UB:
- case SLJIT_MOV_SB:
+ case SLJIT_MOV_U8:
+ case SLJIT_MOV_S8:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & ARGS_SWAPPED));
if ((flags & (REG_DEST | REG_SOURCE)) == (REG_DEST | REG_SOURCE)) {
#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- if (op == SLJIT_MOV_UB)
+ if (op == SLJIT_MOV_U8)
return push_inst(compiler, EMIT_DATA_PROCESS_INS(AND_DP, 0, dst, src2, SRC2_IMM | 0xff));
FAIL_IF(push_inst(compiler, EMIT_DATA_PROCESS_INS(MOV_DP, 0, dst, SLJIT_UNUSED, (24 << 7) | reg_map[src2])));
- return push_inst(compiler, EMIT_DATA_PROCESS_INS(MOV_DP, 0, dst, SLJIT_UNUSED, (24 << 7) | (op == SLJIT_MOV_UB ? 0x20 : 0x40) | reg_map[dst]));
+ return push_inst(compiler, EMIT_DATA_PROCESS_INS(MOV_DP, 0, dst, SLJIT_UNUSED, (24 << 7) | (op == SLJIT_MOV_U8 ? 0x20 : 0x40) | reg_map[dst]));
#else
- return push_inst(compiler, (op == SLJIT_MOV_UB ? UXTB : SXTB) | RD(dst) | RM(src2));
+ return push_inst(compiler, (op == SLJIT_MOV_U8 ? UXTB : SXTB) | RD(dst) | RM(src2));
#endif
}
else if (dst != src2) {
@@ -1022,15 +1022,15 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
}
return SLJIT_SUCCESS;
- case SLJIT_MOV_UH:
- case SLJIT_MOV_SH:
+ case SLJIT_MOV_U16:
+ case SLJIT_MOV_S16:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & ARGS_SWAPPED));
if ((flags & (REG_DEST | REG_SOURCE)) == (REG_DEST | REG_SOURCE)) {
#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
FAIL_IF(push_inst(compiler, EMIT_DATA_PROCESS_INS(MOV_DP, 0, dst, SLJIT_UNUSED, (16 << 7) | reg_map[src2])));
- return push_inst(compiler, EMIT_DATA_PROCESS_INS(MOV_DP, 0, dst, SLJIT_UNUSED, (16 << 7) | (op == SLJIT_MOV_UH ? 0x20 : 0x40) | reg_map[dst]));
+ return push_inst(compiler, EMIT_DATA_PROCESS_INS(MOV_DP, 0, dst, SLJIT_UNUSED, (16 << 7) | (op == SLJIT_MOV_U16 ? 0x20 : 0x40) | reg_map[dst]));
#else
- return push_inst(compiler, (op == SLJIT_MOV_UH ? UXTH : SXTH) | RD(dst) | RM(src2));
+ return push_inst(compiler, (op == SLJIT_MOV_U16 ? UXTH : SXTH) | RD(dst) | RM(src2));
#endif
}
else if (dst != src2) {
@@ -1139,7 +1139,7 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
Returns with 0 if not possible. */
static sljit_uw get_imm(sljit_uw imm)
{
- sljit_si rol;
+ sljit_s32 rol;
if (imm <= 0xff)
return SRC2_IMM | imm;
@@ -1175,12 +1175,12 @@ static sljit_uw get_imm(sljit_uw imm)
}
#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
-static sljit_si generate_int(struct sljit_compiler *compiler, sljit_si reg, sljit_uw imm, sljit_si positive)
+static sljit_s32 generate_int(struct sljit_compiler *compiler, sljit_s32 reg, sljit_uw imm, sljit_s32 positive)
{
sljit_uw mask;
sljit_uw imm1;
sljit_uw imm2;
- sljit_si rol;
+ sljit_s32 rol;
/* Step1: Search a zero byte (8 continous zero bit). */
mask = 0xff000000;
@@ -1286,7 +1286,7 @@ static sljit_si generate_int(struct sljit_compiler *compiler, sljit_si reg, slji
}
#endif
-static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si reg, sljit_uw imm)
+static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 reg, sljit_uw imm)
{
sljit_uw tmp;
@@ -1317,7 +1317,7 @@ static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si reg, sl
}
/* Helper function. Dst should be reg + value, using at most 1 instruction, flags does not set. */
-static sljit_si emit_set_delta(struct sljit_compiler *compiler, sljit_si dst, sljit_si reg, sljit_sw value)
+static sljit_s32 emit_set_delta(struct sljit_compiler *compiler, sljit_s32 dst, sljit_s32 reg, sljit_sw value)
{
if (value >= 0) {
value = get_imm(value);
@@ -1333,7 +1333,7 @@ static sljit_si emit_set_delta(struct sljit_compiler *compiler, sljit_si dst, sl
}
/* Can perform an operation using at most 1 instruction. */
-static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si inp_flags, sljit_si reg, sljit_si arg, sljit_sw argw)
+static sljit_s32 getput_arg_fast(struct sljit_compiler *compiler, sljit_s32 inp_flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
{
sljit_uw imm;
@@ -1408,7 +1408,7 @@ static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si inp_fl
/* See getput_arg below.
Note: can_cache is called only for binary operators. Those
operators always uses word arguments without write back. */
-static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
+static sljit_s32 can_cache(sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
{
/* Immediate caching is not supported as it would be an operation on constant arguments. */
if (arg & SLJIT_IMM)
@@ -1456,9 +1456,9 @@ static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_
}
/* Emit the necessary instructions. See can_cache above. */
-static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si inp_flags, sljit_si reg, sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
+static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 inp_flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
{
- sljit_si tmp_r;
+ sljit_s32 tmp_r;
sljit_sw max_delta;
sljit_sw sign;
sljit_uw imm;
@@ -1583,7 +1583,7 @@ static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si inp_flags,
return push_inst(compiler, EMIT_DATA_TRANSFER(inp_flags, 1, inp_flags & WRITE_BACK, reg, arg & REG_MASK, reg_map[tmp_r] | (max_delta & 0xf00 ? SRC2_IMM : 0)));
}
-static SLJIT_INLINE sljit_si emit_op_mem(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg, sljit_sw argw)
+static SLJIT_INLINE sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
{
if (getput_arg_fast(compiler, flags, reg, arg, argw))
return compiler->error;
@@ -1592,17 +1592,17 @@ static SLJIT_INLINE sljit_si emit_op_mem(struct sljit_compiler *compiler, sljit_
return getput_arg(compiler, flags, reg, arg, argw, 0, 0);
}
-static SLJIT_INLINE sljit_si emit_op_mem2(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg1, sljit_sw arg1w, sljit_si arg2, sljit_sw arg2w)
+static SLJIT_INLINE sljit_s32 emit_op_mem2(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg1, sljit_sw arg1w, sljit_s32 arg2, sljit_sw arg2w)
{
if (getput_arg_fast(compiler, flags, reg, arg1, arg1w))
return compiler->error;
return getput_arg(compiler, flags, reg, arg1, arg1w, arg2, arg2w);
}
-static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si inp_flags,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 inp_flags,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
/* arg1 goes to TMP_REG1 or src reg
arg2 goes to TMP_REG2, imm or src reg
@@ -1610,25 +1610,25 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si i
result goes to TMP_REG2, so put result can use TMP_REG1 and TMP_REG3. */
/* We prefers register and simple consts. */
- sljit_si dst_r;
- sljit_si src1_r;
- sljit_si src2_r = 0;
- sljit_si sugg_src2_r = TMP_REG2;
- sljit_si flags = GET_FLAGS(op) ? SET_FLAGS : 0;
+ sljit_s32 dst_r;
+ sljit_s32 src1_r;
+ sljit_s32 src2_r = 0;
+ sljit_s32 sugg_src2_r = TMP_REG2;
+ sljit_s32 flags = GET_FLAGS(op) ? SET_FLAGS : 0;
compiler->cache_arg = 0;
compiler->cache_argw = 0;
/* Destination check. */
if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) {
- if (op >= SLJIT_MOV && op <= SLJIT_MOVU_SI && !(src2 & SLJIT_MEM))
+ if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32 && !(src2 & SLJIT_MEM))
return SLJIT_SUCCESS;
dst_r = TMP_REG2;
}
else if (FAST_IS_REG(dst)) {
dst_r = dst;
flags |= REG_DEST;
- if (op >= SLJIT_MOV && op <= SLJIT_MOVU_SI)
+ if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32)
sugg_src2_r = dst_r;
}
else {
@@ -1695,7 +1695,7 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si i
if (FAST_IS_REG(src2)) {
src2_r = src2;
flags |= REG_SOURCE;
- if (!(flags & REG_DEST) && op >= SLJIT_MOV && op <= SLJIT_MOVU_SI)
+ if (!(flags & REG_DEST) && op >= SLJIT_MOV && op <= SLJIT_MOVU_S32)
dst_r = src2_r;
}
else do { /* do { } while(0) is used because of breaks. */
@@ -1804,7 +1804,7 @@ extern int __aeabi_idivmod(int numerator, int denominator);
}
#endif
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler, sljit_si op)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
{
CHECK_ERROR();
CHECK(check_sljit_emit_op0(compiler, op));
@@ -1817,58 +1817,58 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
case SLJIT_NOP:
FAIL_IF(push_inst(compiler, NOP));
break;
- case SLJIT_LUMUL:
- case SLJIT_LSMUL:
+ case SLJIT_LMUL_UW:
+ case SLJIT_LMUL_SW:
#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
- return push_inst(compiler, (op == SLJIT_LUMUL ? UMULL : SMULL)
+ return push_inst(compiler, (op == SLJIT_LMUL_UW ? UMULL : SMULL)
| (reg_map[SLJIT_R1] << 16)
| (reg_map[SLJIT_R0] << 12)
| (reg_map[SLJIT_R0] << 8)
| reg_map[SLJIT_R1]);
#else
FAIL_IF(push_inst(compiler, EMIT_DATA_PROCESS_INS(MOV_DP, 0, TMP_REG1, SLJIT_UNUSED, RM(SLJIT_R1))));
- return push_inst(compiler, (op == SLJIT_LUMUL ? UMULL : SMULL)
+ return push_inst(compiler, (op == SLJIT_LMUL_UW ? UMULL : SMULL)
| (reg_map[SLJIT_R1] << 16)
| (reg_map[SLJIT_R0] << 12)
| (reg_map[SLJIT_R0] << 8)
| reg_map[TMP_REG1]);
#endif
- case SLJIT_UDIVMOD:
- case SLJIT_SDIVMOD:
- case SLJIT_UDIVI:
- case SLJIT_SDIVI:
- SLJIT_COMPILE_ASSERT((SLJIT_UDIVMOD & 0x2) == 0 && SLJIT_UDIVI - 0x2 == SLJIT_UDIVMOD, bad_div_opcode_assignments);
+ case SLJIT_DIVMOD_UW:
+ case SLJIT_DIVMOD_SW:
+ case SLJIT_DIV_UW:
+ case SLJIT_DIV_SW:
+ SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW & 0x2) == 0 && SLJIT_DIV_UW - 0x2 == SLJIT_DIVMOD_UW, bad_div_opcode_assignments);
SLJIT_COMPILE_ASSERT(reg_map[2] == 1 && reg_map[3] == 2, bad_register_mapping);
- if ((op >= SLJIT_UDIVI) && (compiler->scratches >= 3)) {
+ if ((op >= SLJIT_DIV_UW) && (compiler->scratches >= 3)) {
FAIL_IF(push_inst(compiler, 0xe52d2008 /* str r2, [sp, #-8]! */));
FAIL_IF(push_inst(compiler, 0xe58d1004 /* str r1, [sp, #4] */));
}
- else if ((op >= SLJIT_UDIVI) || (compiler->scratches >= 3))
- FAIL_IF(push_inst(compiler, 0xe52d0008 | (op >= SLJIT_UDIVI ? 0x1000 : 0x2000) /* str r1/r2, [sp, #-8]! */));
+ else if ((op >= SLJIT_DIV_UW) || (compiler->scratches >= 3))
+ FAIL_IF(push_inst(compiler, 0xe52d0008 | (op >= SLJIT_DIV_UW ? 0x1000 : 0x2000) /* str r1/r2, [sp, #-8]! */));
#if defined(__GNUC__)
FAIL_IF(sljit_emit_ijump(compiler, SLJIT_FAST_CALL, SLJIT_IMM,
- ((op | 0x2) == SLJIT_UDIVI ? SLJIT_FUNC_OFFSET(__aeabi_uidivmod) : SLJIT_FUNC_OFFSET(__aeabi_idivmod))));
+ ((op | 0x2) == SLJIT_DIV_UW ? SLJIT_FUNC_OFFSET(__aeabi_uidivmod) : SLJIT_FUNC_OFFSET(__aeabi_idivmod))));
#else
#error "Software divmod functions are needed"
#endif
- if ((op >= SLJIT_UDIVI) && (compiler->scratches >= 3)) {
+ if ((op >= SLJIT_DIV_UW) && (compiler->scratches >= 3)) {
FAIL_IF(push_inst(compiler, 0xe59d1004 /* ldr r1, [sp, #4] */));
FAIL_IF(push_inst(compiler, 0xe49d2008 /* ldr r2, [sp], #8 */));
}
- else if ((op >= SLJIT_UDIVI) || (compiler->scratches >= 3))
- return push_inst(compiler, 0xe49d0008 | (op >= SLJIT_UDIVI ? 0x1000 : 0x2000) /* ldr r1/r2, [sp], #8 */);
+ else if ((op >= SLJIT_DIV_UW) || (compiler->scratches >= 3))
+ return push_inst(compiler, 0xe49d0008 | (op >= SLJIT_DIV_UW ? 0x1000 : 0x2000) /* ldr r1/r2, [sp], #8 */);
return SLJIT_SUCCESS;
}
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
@@ -1877,40 +1877,40 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
switch (GET_OPCODE(op)) {
case SLJIT_MOV:
- case SLJIT_MOV_UI:
- case SLJIT_MOV_SI:
+ case SLJIT_MOV_U32:
+ case SLJIT_MOV_S32:
case SLJIT_MOV_P:
return emit_op(compiler, SLJIT_MOV, ALLOW_ANY_IMM, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOV_UB:
- return emit_op(compiler, SLJIT_MOV_UB, ALLOW_ANY_IMM | BYTE_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_ub)srcw : srcw);
+ case SLJIT_MOV_U8:
+ return emit_op(compiler, SLJIT_MOV_U8, ALLOW_ANY_IMM | BYTE_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8)srcw : srcw);
- case SLJIT_MOV_SB:
- return emit_op(compiler, SLJIT_MOV_SB, ALLOW_ANY_IMM | SIGNED_DATA | BYTE_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sb)srcw : srcw);
+ case SLJIT_MOV_S8:
+ return emit_op(compiler, SLJIT_MOV_S8, ALLOW_ANY_IMM | SIGNED_DATA | BYTE_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8)srcw : srcw);
- case SLJIT_MOV_UH:
- return emit_op(compiler, SLJIT_MOV_UH, ALLOW_ANY_IMM | HALF_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_uh)srcw : srcw);
+ case SLJIT_MOV_U16:
+ return emit_op(compiler, SLJIT_MOV_U16, ALLOW_ANY_IMM | HALF_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16)srcw : srcw);
- case SLJIT_MOV_SH:
- return emit_op(compiler, SLJIT_MOV_SH, ALLOW_ANY_IMM | SIGNED_DATA | HALF_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sh)srcw : srcw);
+ case SLJIT_MOV_S16:
+ return emit_op(compiler, SLJIT_MOV_S16, ALLOW_ANY_IMM | SIGNED_DATA | HALF_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16)srcw : srcw);
case SLJIT_MOVU:
- case SLJIT_MOVU_UI:
- case SLJIT_MOVU_SI:
+ case SLJIT_MOVU_U32:
+ case SLJIT_MOVU_S32:
case SLJIT_MOVU_P:
return emit_op(compiler, SLJIT_MOV, ALLOW_ANY_IMM | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOVU_UB:
- return emit_op(compiler, SLJIT_MOV_UB, ALLOW_ANY_IMM | BYTE_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_ub)srcw : srcw);
+ case SLJIT_MOVU_U8:
+ return emit_op(compiler, SLJIT_MOV_U8, ALLOW_ANY_IMM | BYTE_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8)srcw : srcw);
- case SLJIT_MOVU_SB:
- return emit_op(compiler, SLJIT_MOV_SB, ALLOW_ANY_IMM | SIGNED_DATA | BYTE_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sb)srcw : srcw);
+ case SLJIT_MOVU_S8:
+ return emit_op(compiler, SLJIT_MOV_S8, ALLOW_ANY_IMM | SIGNED_DATA | BYTE_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8)srcw : srcw);
- case SLJIT_MOVU_UH:
- return emit_op(compiler, SLJIT_MOV_UH, ALLOW_ANY_IMM | HALF_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_uh)srcw : srcw);
+ case SLJIT_MOVU_U16:
+ return emit_op(compiler, SLJIT_MOV_U16, ALLOW_ANY_IMM | HALF_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16)srcw : srcw);
- case SLJIT_MOVU_SH:
- return emit_op(compiler, SLJIT_MOV_SH, ALLOW_ANY_IMM | SIGNED_DATA | HALF_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sh)srcw : srcw);
+ case SLJIT_MOVU_S16:
+ return emit_op(compiler, SLJIT_MOV_S16, ALLOW_ANY_IMM | SIGNED_DATA | HALF_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16)srcw : srcw);
case SLJIT_NOT:
return emit_op(compiler, op, ALLOW_ANY_IMM, dst, dstw, TMP_REG1, 0, src, srcw);
@@ -1929,10 +1929,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
CHECK_ERROR();
CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@@ -1971,20 +1971,20 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
{
CHECK_REG_INDEX(check_sljit_get_register_index(reg));
return reg_map[reg];
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_float_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
{
CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
return reg << 1;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *compiler,
- void *instruction, sljit_si size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
+ void *instruction, sljit_s32 size)
{
CHECK_ERROR();
CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
@@ -2000,7 +2000,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *co
/* 0 - no fpu
1 - vfp */
-static sljit_si arm_fpu_type = -1;
+static sljit_s32 arm_fpu_type = -1;
static void init_compiler(void)
{
@@ -2011,7 +2011,7 @@ static void init_compiler(void)
arm_fpu_type = 1;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void)
{
#ifdef SLJIT_IS_FPU_AVAILABLE
return SLJIT_IS_FPU_AVAILABLE;
@@ -2026,7 +2026,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
#define arm_fpu_type 1
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void)
{
/* Always available. */
return 1;
@@ -2040,11 +2040,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
#define EMIT_FPU_OPERATION(opcode, mode, dst, src1, src2) \
((opcode) | (mode) | ((dst) << 12) | (src1) | ((src2) << 16))
-static sljit_si emit_fop_mem(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg, sljit_sw argw)
+static sljit_s32 emit_fop_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
{
sljit_sw tmp;
sljit_uw imm;
- sljit_sw inst = VSTR_F32 | (flags & (SLJIT_SINGLE_OP | FPU_LOAD));
+ sljit_sw inst = VSTR_F32 | (flags & (SLJIT_F32_OP | FPU_LOAD));
SLJIT_ASSERT(arg & SLJIT_MEM);
if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
@@ -2104,16 +2104,16 @@ static sljit_si emit_fop_mem(struct sljit_compiler *compiler, sljit_si flags, sl
return push_inst(compiler, EMIT_FPU_DATA_TRANSFER(inst, 1, TMP_REG3, reg, 0));
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_convw_fromd(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
if (src & SLJIT_MEM) {
- FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, TMP_FREG1, src, srcw));
+ FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_F32_OP) | FPU_LOAD, TMP_FREG1, src, srcw));
src = TMP_FREG1;
}
- FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VCVT_S32_F32, op & SLJIT_SINGLE_OP, TMP_FREG1, src, 0)));
+ FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VCVT_S32_F32, op & SLJIT_F32_OP, TMP_FREG1, src, 0)));
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
@@ -2125,11 +2125,11 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convw_fromd(struct sljit_compiler *
return emit_fop_mem(compiler, 0, TMP_FREG1, dst, dstw);
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
if (FAST_IS_REG(src))
FAIL_IF(push_inst(compiler, VMOV | RD(src) | (TMP_FREG1 << 16)));
@@ -2142,85 +2142,85 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *
FAIL_IF(push_inst(compiler, VMOV | RD(TMP_REG1) | (TMP_FREG1 << 16)));
}
- FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VCVT_F32_S32, op & SLJIT_SINGLE_OP, dst_r, TMP_FREG1, 0)));
+ FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VCVT_F32_S32, op & SLJIT_F32_OP, dst_r, TMP_FREG1, 0)));
if (dst & SLJIT_MEM)
- return emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP), TMP_FREG1, dst, dstw);
+ return emit_fop_mem(compiler, (op & SLJIT_F32_OP), TMP_FREG1, dst, dstw);
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_si op,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
if (src1 & SLJIT_MEM) {
- FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, TMP_FREG1, src1, src1w));
+ FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_F32_OP) | FPU_LOAD, TMP_FREG1, src1, src1w));
src1 = TMP_FREG1;
}
if (src2 & SLJIT_MEM) {
- FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, TMP_FREG2, src2, src2w));
+ FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_F32_OP) | FPU_LOAD, TMP_FREG2, src2, src2w));
src2 = TMP_FREG2;
}
- FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VCMP_F32, op & SLJIT_SINGLE_OP, src1, src2, 0)));
+ FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VCMP_F32, op & SLJIT_F32_OP, src1, src2, 0)));
return push_inst(compiler, VMRS);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r;
+ sljit_s32 dst_r;
CHECK_ERROR();
compiler->cache_arg = 0;
compiler->cache_argw = 0;
- if (GET_OPCODE(op) != SLJIT_CONVD_FROMS)
- op ^= SLJIT_SINGLE_OP;
+ if (GET_OPCODE(op) != SLJIT_CONV_F64_FROM_F32)
+ op ^= SLJIT_F32_OP;
- SLJIT_COMPILE_ASSERT((SLJIT_SINGLE_OP == 0x100), float_transfer_bit_error);
+ SLJIT_COMPILE_ASSERT((SLJIT_F32_OP == 0x100), float_transfer_bit_error);
SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
if (src & SLJIT_MEM) {
- FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, dst_r, src, srcw));
+ FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_F32_OP) | FPU_LOAD, dst_r, src, srcw));
src = dst_r;
}
switch (GET_OPCODE(op)) {
- case SLJIT_DMOV:
+ case SLJIT_MOV_F64:
if (src != dst_r) {
if (dst_r != TMP_FREG1)
- FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VMOV_F32, op & SLJIT_SINGLE_OP, dst_r, src, 0)));
+ FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VMOV_F32, op & SLJIT_F32_OP, dst_r, src, 0)));
else
dst_r = src;
}
break;
- case SLJIT_DNEG:
- FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VNEG_F32, op & SLJIT_SINGLE_OP, dst_r, src, 0)));
+ case SLJIT_NEG_F64:
+ FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VNEG_F32, op & SLJIT_F32_OP, dst_r, src, 0)));
break;
- case SLJIT_DABS:
- FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VABS_F32, op & SLJIT_SINGLE_OP, dst_r, src, 0)));
+ case SLJIT_ABS_F64:
+ FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VABS_F32, op & SLJIT_F32_OP, dst_r, src, 0)));
break;
- case SLJIT_CONVD_FROMS:
- FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VCVT_F64_F32, op & SLJIT_SINGLE_OP, dst_r, src, 0)));
- op ^= SLJIT_SINGLE_OP;
+ case SLJIT_CONV_F64_FROM_F32:
+ FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VCVT_F64_F32, op & SLJIT_F32_OP, dst_r, src, 0)));
+ op ^= SLJIT_F32_OP;
break;
}
if (dst & SLJIT_MEM)
- return emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP), dst_r, dst, dstw);
+ return emit_fop_mem(compiler, (op & SLJIT_F32_OP), dst_r, dst, dstw);
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_si dst_r;
+ sljit_s32 dst_r;
CHECK_ERROR();
CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@@ -2230,40 +2230,40 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compile
compiler->cache_arg = 0;
compiler->cache_argw = 0;
- op ^= SLJIT_SINGLE_OP;
+ op ^= SLJIT_F32_OP;
dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
if (src2 & SLJIT_MEM) {
- FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, TMP_FREG2, src2, src2w));
+ FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_F32_OP) | FPU_LOAD, TMP_FREG2, src2, src2w));
src2 = TMP_FREG2;
}
if (src1 & SLJIT_MEM) {
- FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, TMP_FREG1, src1, src1w));
+ FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_F32_OP) | FPU_LOAD, TMP_FREG1, src1, src1w));
src1 = TMP_FREG1;
}
switch (GET_OPCODE(op)) {
- case SLJIT_DADD:
- FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VADD_F32, op & SLJIT_SINGLE_OP, dst_r, src2, src1)));
+ case SLJIT_ADD_F64:
+ FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VADD_F32, op & SLJIT_F32_OP, dst_r, src2, src1)));
break;
- case SLJIT_DSUB:
- FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VSUB_F32, op & SLJIT_SINGLE_OP, dst_r, src2, src1)));
+ case SLJIT_SUB_F64:
+ FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VSUB_F32, op & SLJIT_F32_OP, dst_r, src2, src1)));
break;
- case SLJIT_DMUL:
- FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VMUL_F32, op & SLJIT_SINGLE_OP, dst_r, src2, src1)));
+ case SLJIT_MUL_F64:
+ FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VMUL_F32, op & SLJIT_F32_OP, dst_r, src2, src1)));
break;
- case SLJIT_DDIV:
- FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VDIV_F32, op & SLJIT_SINGLE_OP, dst_r, src2, src1)));
+ case SLJIT_DIV_F64:
+ FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VDIV_F32, op & SLJIT_F32_OP, dst_r, src2, src1)));
break;
}
if (dst_r == TMP_FREG1)
- FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP), TMP_FREG1, dst, dstw));
+ FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_F32_OP), TMP_FREG1, dst, dstw));
return SLJIT_SUCCESS;
}
@@ -2276,7 +2276,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compile
/* Other instructions */
/* --------------------------------------------------------------------- */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
@@ -2299,7 +2299,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *c
return getput_arg(compiler, WORD_DATA, TMP_REG2, dst, dstw, 0, 0);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_fast_return(compiler, src, srcw));
@@ -2326,33 +2326,33 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *
/* Conditional instructions */
/* --------------------------------------------------------------------- */
-static sljit_uw get_cc(sljit_si type)
+static sljit_uw get_cc(sljit_s32 type)
{
switch (type) {
case SLJIT_EQUAL:
case SLJIT_MUL_NOT_OVERFLOW:
- case SLJIT_D_EQUAL:
+ case SLJIT_EQUAL_F64:
return 0x00000000;
case SLJIT_NOT_EQUAL:
case SLJIT_MUL_OVERFLOW:
- case SLJIT_D_NOT_EQUAL:
+ case SLJIT_NOT_EQUAL_F64:
return 0x10000000;
case SLJIT_LESS:
- case SLJIT_D_LESS:
+ case SLJIT_LESS_F64:
return 0x30000000;
case SLJIT_GREATER_EQUAL:
- case SLJIT_D_GREATER_EQUAL:
+ case SLJIT_GREATER_EQUAL_F64:
return 0x20000000;
case SLJIT_GREATER:
- case SLJIT_D_GREATER:
+ case SLJIT_GREATER_F64:
return 0x80000000;
case SLJIT_LESS_EQUAL:
- case SLJIT_D_LESS_EQUAL:
+ case SLJIT_LESS_EQUAL_F64:
return 0x90000000;
case SLJIT_SIG_LESS:
@@ -2368,11 +2368,11 @@ static sljit_uw get_cc(sljit_si type)
return 0xd0000000;
case SLJIT_OVERFLOW:
- case SLJIT_D_UNORDERED:
+ case SLJIT_UNORDERED_F64:
return 0x60000000;
case SLJIT_NOT_OVERFLOW:
- case SLJIT_D_ORDERED:
+ case SLJIT_ORDERED_F64:
return 0x70000000;
default:
@@ -2397,7 +2397,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
return label;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
{
struct sljit_jump *jump;
@@ -2438,7 +2438,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
return jump;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compiler, sljit_si type, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
{
struct sljit_jump *jump;
@@ -2475,12 +2475,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compil
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw,
- sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw,
+ sljit_s32 type)
{
- sljit_si dst_r, flags = GET_ALL_FLAGS(op);
+ sljit_s32 dst_r, flags = GET_ALL_FLAGS(op);
sljit_uw cc, ins;
CHECK_ERROR();
@@ -2528,10 +2528,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
return (flags & SLJIT_SET_E) ? push_inst(compiler, EMIT_DATA_PROCESS_INS(MOV_DP, SET_FLAGS, TMP_REG1, SLJIT_UNUSED, RM(dst_r))) : SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw init_value)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
{
struct sljit_const *const_;
- sljit_si reg;
+ sljit_s32 reg;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
diff --git a/src/3rdparty/pcre/sljit/sljitNativeARM_64.c b/src/3rdparty/pcre/sljit/sljitNativeARM_64.c
index 044a675eee..d9958512c8 100644
--- a/src/3rdparty/pcre/sljit/sljitNativeARM_64.c
+++ b/src/3rdparty/pcre/sljit/sljitNativeARM_64.c
@@ -24,13 +24,13 @@
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name(void)
+SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
{
return "ARM-64" SLJIT_CPUINFO;
}
/* Length of an instruction word */
-typedef sljit_ui sljit_ins;
+typedef sljit_u32 sljit_ins;
#define TMP_ZERO (0)
@@ -43,7 +43,7 @@ typedef sljit_ui sljit_ins;
#define TMP_FREG1 (0)
#define TMP_FREG2 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
-static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 8] = {
+static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 8] = {
31, 0, 1, 2, 3, 4, 5, 6, 7, 12, 13, 14, 15, 16, 17, 8, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 29, 9, 10, 11, 30, 31
};
@@ -124,7 +124,7 @@ static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 8] = {
/* dest_reg is the absolute name of the register
Useful for reordering instructions in the delay slot. */
-static sljit_si push_inst(struct sljit_compiler *compiler, sljit_ins ins)
+static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins)
{
sljit_ins *ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
FAIL_IF(!ptr);
@@ -133,7 +133,7 @@ static sljit_si push_inst(struct sljit_compiler *compiler, sljit_ins ins)
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si emit_imm64_const(struct sljit_compiler *compiler, sljit_si dst, sljit_uw imm)
+static SLJIT_INLINE sljit_s32 emit_imm64_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_uw imm)
{
FAIL_IF(push_inst(compiler, MOVZ | RD(dst) | ((imm & 0xffff) << 5)));
FAIL_IF(push_inst(compiler, MOVK | RD(dst) | (((imm >> 16) & 0xffff) << 5) | (1 << 21)));
@@ -143,7 +143,7 @@ static SLJIT_INLINE sljit_si emit_imm64_const(struct sljit_compiler *compiler, s
static SLJIT_INLINE void modify_imm64_const(sljit_ins* inst, sljit_uw new_imm)
{
- sljit_si dst = inst[0] & 0x1f;
+ sljit_s32 dst = inst[0] & 0x1f;
SLJIT_ASSERT((inst[0] & 0xffe00000) == MOVZ && (inst[1] & 0xffe00000) == (MOVK | (1 << 21)));
inst[0] = MOVZ | dst | ((new_imm & 0xffff) << 5);
inst[1] = MOVK | dst | (((new_imm >> 16) & 0xffff) << 5) | (1 << 21);
@@ -151,7 +151,7 @@ static SLJIT_INLINE void modify_imm64_const(sljit_ins* inst, sljit_uw new_imm)
inst[3] = MOVK | dst | ((new_imm >> 48) << 5) | (3 << 21);
}
-static SLJIT_INLINE sljit_si detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code)
+static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code)
{
sljit_sw diff;
sljit_uw target_addr;
@@ -212,7 +212,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
sljit_ins *buf_end;
sljit_uw word_count;
sljit_uw addr;
- sljit_si dst;
+ sljit_s32 dst;
struct sljit_label *label;
struct sljit_jump *jump;
@@ -346,9 +346,9 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
#define LOGICAL_IMM_CHECK 0x100
-static sljit_ins logical_imm(sljit_sw imm, sljit_si len)
+static sljit_ins logical_imm(sljit_sw imm, sljit_s32 len)
{
- sljit_si negated, ones, right;
+ sljit_s32 negated, ones, right;
sljit_uw mask, uimm;
sljit_ins ins;
@@ -356,12 +356,12 @@ static sljit_ins logical_imm(sljit_sw imm, sljit_si len)
len &= ~LOGICAL_IMM_CHECK;
if (len == 32 && (imm == 0 || imm == -1))
return 0;
- if (len == 16 && ((sljit_si)imm == 0 || (sljit_si)imm == -1))
+ if (len == 16 && ((sljit_s32)imm == 0 || (sljit_s32)imm == -1))
return 0;
}
SLJIT_ASSERT((len == 32 && imm != 0 && imm != -1)
- || (len == 16 && (sljit_si)imm != 0 && (sljit_si)imm != -1));
+ || (len == 16 && (sljit_s32)imm != 0 && (sljit_s32)imm != -1));
uimm = (sljit_uw)imm;
while (1) {
if (len <= 0) {
@@ -410,10 +410,10 @@ static sljit_ins logical_imm(sljit_sw imm, sljit_si len)
#undef COUNT_TRAILING_ZERO
-static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si dst, sljit_sw simm)
+static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw simm)
{
sljit_uw imm = (sljit_uw)simm;
- sljit_si i, zeros, ones, first;
+ sljit_s32 i, zeros, ones, first;
sljit_ins bitmask;
if (imm <= 0xffff)
@@ -512,15 +512,15 @@ static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si dst, sl
dst = TMP_ZERO; \
}
-static sljit_si emit_op_imm(struct sljit_compiler *compiler, sljit_si flags, sljit_si dst, sljit_sw arg1, sljit_sw arg2)
+static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 dst, sljit_sw arg1, sljit_sw arg2)
{
/* dst must be register, TMP_REG1
arg1 must be register, TMP_REG1, imm
arg2 must be register, TMP_REG2, imm */
sljit_ins inv_bits = (flags & INT_OP) ? (1 << 31) : 0;
sljit_ins inst_bits;
- sljit_si op = (flags & 0xffff);
- sljit_si reg;
+ sljit_s32 op = (flags & 0xffff);
+ sljit_s32 reg;
sljit_sw imm, nimm;
if (SLJIT_UNLIKELY((flags & (ARG1_IMM | ARG2_IMM)) == (ARG1_IMM | ARG2_IMM))) {
@@ -667,34 +667,34 @@ static sljit_si emit_op_imm(struct sljit_compiler *compiler, sljit_si flags, slj
if (dst == arg2)
return SLJIT_SUCCESS;
return push_inst(compiler, ORR | RD(dst) | RN(TMP_ZERO) | RM(arg2));
- case SLJIT_MOV_UB:
- case SLJIT_MOVU_UB:
+ case SLJIT_MOV_U8:
+ case SLJIT_MOVU_U8:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
return push_inst(compiler, (UBFM ^ (1 << 31)) | RD(dst) | RN(arg2) | (7 << 10));
- case SLJIT_MOV_SB:
- case SLJIT_MOVU_SB:
+ case SLJIT_MOV_S8:
+ case SLJIT_MOVU_S8:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
if (!(flags & INT_OP))
inv_bits |= 1 << 22;
return push_inst(compiler, (SBFM ^ inv_bits) | RD(dst) | RN(arg2) | (7 << 10));
- case SLJIT_MOV_UH:
- case SLJIT_MOVU_UH:
+ case SLJIT_MOV_U16:
+ case SLJIT_MOVU_U16:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
return push_inst(compiler, (UBFM ^ (1 << 31)) | RD(dst) | RN(arg2) | (15 << 10));
- case SLJIT_MOV_SH:
- case SLJIT_MOVU_SH:
+ case SLJIT_MOV_S16:
+ case SLJIT_MOVU_S16:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
if (!(flags & INT_OP))
inv_bits |= 1 << 22;
return push_inst(compiler, (SBFM ^ inv_bits) | RD(dst) | RN(arg2) | (15 << 10));
- case SLJIT_MOV_UI:
- case SLJIT_MOVU_UI:
+ case SLJIT_MOV_U32:
+ case SLJIT_MOVU_U32:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
if ((flags & INT_OP) && dst == arg2)
return SLJIT_SUCCESS;
return push_inst(compiler, (ORR ^ (1 << 31)) | RD(dst) | RN(TMP_ZERO) | RM(arg2));
- case SLJIT_MOV_SI:
- case SLJIT_MOVU_SI:
+ case SLJIT_MOV_S32:
+ case SLJIT_MOVU_S32:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
if ((flags & INT_OP) && dst == arg2)
return SLJIT_SUCCESS;
@@ -777,28 +777,28 @@ set_flags:
#define MEM_SIZE_SHIFT(flags) ((flags) >> 8)
-static SLJIT_CONST sljit_ins sljit_mem_imm[4] = {
+static const sljit_ins sljit_mem_imm[4] = {
/* u l */ 0x39400000 /* ldrb [reg,imm] */,
/* u s */ 0x39000000 /* strb [reg,imm] */,
/* s l */ 0x39800000 /* ldrsb [reg,imm] */,
/* s s */ 0x39000000 /* strb [reg,imm] */,
};
-static SLJIT_CONST sljit_ins sljit_mem_simm[4] = {
+static const sljit_ins sljit_mem_simm[4] = {
/* u l */ 0x38400000 /* ldurb [reg,imm] */,
/* u s */ 0x38000000 /* sturb [reg,imm] */,
/* s l */ 0x38800000 /* ldursb [reg,imm] */,
/* s s */ 0x38000000 /* sturb [reg,imm] */,
};
-static SLJIT_CONST sljit_ins sljit_mem_pre_simm[4] = {
+static const sljit_ins sljit_mem_pre_simm[4] = {
/* u l */ 0x38400c00 /* ldrb [reg,imm]! */,
/* u s */ 0x38000c00 /* strb [reg,imm]! */,
/* s l */ 0x38800c00 /* ldrsb [reg,imm]! */,
/* s s */ 0x38000c00 /* strb [reg,imm]! */,
};
-static SLJIT_CONST sljit_ins sljit_mem_reg[4] = {
+static const sljit_ins sljit_mem_reg[4] = {
/* u l */ 0x38606800 /* ldrb [reg,reg] */,
/* u s */ 0x38206800 /* strb [reg,reg] */,
/* s l */ 0x38a06800 /* ldrsb [reg,reg] */,
@@ -806,7 +806,7 @@ static SLJIT_CONST sljit_ins sljit_mem_reg[4] = {
};
/* Helper function. Dst should be reg + value, using at most 1 instruction, flags does not set. */
-static sljit_si emit_set_delta(struct sljit_compiler *compiler, sljit_si dst, sljit_si reg, sljit_sw value)
+static sljit_s32 emit_set_delta(struct sljit_compiler *compiler, sljit_s32 dst, sljit_s32 reg, sljit_sw value)
{
if (value >= 0) {
if (value <= 0xfff)
@@ -825,9 +825,9 @@ static sljit_si emit_set_delta(struct sljit_compiler *compiler, sljit_si dst, sl
}
/* Can perform an operation using at most 1 instruction. */
-static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg, sljit_sw argw)
+static sljit_s32 getput_arg_fast(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
{
- sljit_ui shift = MEM_SIZE_SHIFT(flags);
+ sljit_u32 shift = MEM_SIZE_SHIFT(flags);
SLJIT_ASSERT(arg & SLJIT_MEM);
@@ -882,7 +882,7 @@ static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si flags,
/* see getput_arg below.
Note: can_cache is called only for binary operators. Those
operators always uses word arguments without write back. */
-static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
+static sljit_s32 can_cache(sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
{
sljit_sw diff;
if ((arg & OFFS_REG_MASK) || !(next_arg & SLJIT_MEM))
@@ -906,11 +906,11 @@ static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_
}
/* Emit the necessary instructions. See can_cache above. */
-static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg,
- sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
+static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg,
+ sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
{
- sljit_ui shift = MEM_SIZE_SHIFT(flags);
- sljit_si tmp_r, other_r;
+ sljit_u32 shift = MEM_SIZE_SHIFT(flags);
+ sljit_s32 tmp_r, other_r;
sljit_sw diff;
SLJIT_ASSERT(arg & SLJIT_MEM);
@@ -1040,7 +1040,7 @@ static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si flags, slji
return push_inst(compiler, sljit_mem_imm[flags & 0x3] | (shift << 30) | RT(reg) | RN(TMP_REG3));
}
-static SLJIT_INLINE sljit_si emit_op_mem(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg, sljit_sw argw)
+static SLJIT_INLINE sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
{
if (getput_arg_fast(compiler, flags, reg, arg, argw))
return compiler->error;
@@ -1049,7 +1049,7 @@ static SLJIT_INLINE sljit_si emit_op_mem(struct sljit_compiler *compiler, sljit_
return getput_arg(compiler, flags, reg, arg, argw, 0, 0);
}
-static SLJIT_INLINE sljit_si emit_op_mem2(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg1, sljit_sw arg1w, sljit_si arg2, sljit_sw arg2w)
+static SLJIT_INLINE sljit_s32 emit_op_mem2(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg1, sljit_sw arg1w, sljit_s32 arg2, sljit_sw arg2w)
{
if (getput_arg_fast(compiler, flags, reg, arg1, arg1w))
return compiler->error;
@@ -1060,11 +1060,11 @@ static SLJIT_INLINE sljit_si emit_op_mem2(struct sljit_compiler *compiler, sljit
/* Entry, exit */
/* --------------------------------------------------------------------- */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
- sljit_si i, tmp, offs, prev, saved_regs_size;
+ sljit_s32 i, tmp, offs, prev, saved_regs_size;
CHECK_ERROR();
CHECK(check_sljit_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
@@ -1148,9 +1148,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
CHECK_ERROR();
CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
@@ -1162,10 +1162,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compi
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
{
- sljit_si local_size;
- sljit_si i, tmp, offs, prev, saved_regs_size;
+ sljit_s32 local_size;
+ sljit_s32 i, tmp, offs, prev, saved_regs_size;
CHECK_ERROR();
CHECK(check_sljit_emit_return(compiler, op, src, srcw));
@@ -1243,9 +1243,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
/* Operators */
/* --------------------------------------------------------------------- */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler, sljit_si op)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
{
- sljit_ins inv_bits = (op & SLJIT_INT_OP) ? (1 << 31) : 0;
+ sljit_ins inv_bits = (op & SLJIT_I32_OP) ? (1 << 31) : 0;
CHECK_ERROR();
CHECK(check_sljit_emit_op0(compiler, op));
@@ -1256,31 +1256,31 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
return push_inst(compiler, BRK);
case SLJIT_NOP:
return push_inst(compiler, NOP);
- case SLJIT_LUMUL:
- case SLJIT_LSMUL:
+ case SLJIT_LMUL_UW:
+ case SLJIT_LMUL_SW:
FAIL_IF(push_inst(compiler, ORR | RD(TMP_REG1) | RN(TMP_ZERO) | RM(SLJIT_R0)));
FAIL_IF(push_inst(compiler, MADD | RD(SLJIT_R0) | RN(SLJIT_R0) | RM(SLJIT_R1) | RT2(TMP_ZERO)));
- return push_inst(compiler, (op == SLJIT_LUMUL ? UMULH : SMULH) | RD(SLJIT_R1) | RN(TMP_REG1) | RM(SLJIT_R1));
- case SLJIT_UDIVMOD:
- case SLJIT_SDIVMOD:
+ return push_inst(compiler, (op == SLJIT_LMUL_UW ? UMULH : SMULH) | RD(SLJIT_R1) | RN(TMP_REG1) | RM(SLJIT_R1));
+ case SLJIT_DIVMOD_UW:
+ case SLJIT_DIVMOD_SW:
FAIL_IF(push_inst(compiler, (ORR ^ inv_bits) | RD(TMP_REG1) | RN(TMP_ZERO) | RM(SLJIT_R0)));
- FAIL_IF(push_inst(compiler, ((op == SLJIT_UDIVMOD ? UDIV : SDIV) ^ inv_bits) | RD(SLJIT_R0) | RN(SLJIT_R0) | RM(SLJIT_R1)));
+ FAIL_IF(push_inst(compiler, ((op == SLJIT_DIVMOD_UW ? UDIV : SDIV) ^ inv_bits) | RD(SLJIT_R0) | RN(SLJIT_R0) | RM(SLJIT_R1)));
FAIL_IF(push_inst(compiler, (MADD ^ inv_bits) | RD(SLJIT_R1) | RN(SLJIT_R0) | RM(SLJIT_R1) | RT2(TMP_ZERO)));
return push_inst(compiler, (SUB ^ inv_bits) | RD(SLJIT_R1) | RN(TMP_REG1) | RM(SLJIT_R1));
- case SLJIT_UDIVI:
- case SLJIT_SDIVI:
- return push_inst(compiler, ((op == SLJIT_UDIVI ? UDIV : SDIV) ^ inv_bits) | RD(SLJIT_R0) | RN(SLJIT_R0) | RM(SLJIT_R1));
+ case SLJIT_DIV_UW:
+ case SLJIT_DIV_SW:
+ return push_inst(compiler, ((op == SLJIT_DIV_UW ? UDIV : SDIV) ^ inv_bits) | RD(SLJIT_R0) | RN(SLJIT_R0) | RM(SLJIT_R1));
}
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r, flags, mem_flags;
- sljit_si op_flags = GET_ALL_FLAGS(op);
+ sljit_s32 dst_r, flags, mem_flags;
+ sljit_s32 op_flags = GET_ALL_FLAGS(op);
CHECK_ERROR();
CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
@@ -1299,69 +1299,69 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
case SLJIT_MOV_P:
flags = WORD_SIZE;
break;
- case SLJIT_MOV_UB:
+ case SLJIT_MOV_U8:
flags = BYTE_SIZE;
if (src & SLJIT_IMM)
- srcw = (sljit_ub)srcw;
+ srcw = (sljit_u8)srcw;
break;
- case SLJIT_MOV_SB:
+ case SLJIT_MOV_S8:
flags = BYTE_SIZE | SIGNED;
if (src & SLJIT_IMM)
- srcw = (sljit_sb)srcw;
+ srcw = (sljit_s8)srcw;
break;
- case SLJIT_MOV_UH:
+ case SLJIT_MOV_U16:
flags = HALF_SIZE;
if (src & SLJIT_IMM)
- srcw = (sljit_uh)srcw;
+ srcw = (sljit_u16)srcw;
break;
- case SLJIT_MOV_SH:
+ case SLJIT_MOV_S16:
flags = HALF_SIZE | SIGNED;
if (src & SLJIT_IMM)
- srcw = (sljit_sh)srcw;
+ srcw = (sljit_s16)srcw;
break;
- case SLJIT_MOV_UI:
+ case SLJIT_MOV_U32:
flags = INT_SIZE;
if (src & SLJIT_IMM)
- srcw = (sljit_ui)srcw;
+ srcw = (sljit_u32)srcw;
break;
- case SLJIT_MOV_SI:
+ case SLJIT_MOV_S32:
flags = INT_SIZE | SIGNED;
if (src & SLJIT_IMM)
- srcw = (sljit_si)srcw;
+ srcw = (sljit_s32)srcw;
break;
case SLJIT_MOVU:
case SLJIT_MOVU_P:
flags = WORD_SIZE | UPDATE;
break;
- case SLJIT_MOVU_UB:
+ case SLJIT_MOVU_U8:
flags = BYTE_SIZE | UPDATE;
if (src & SLJIT_IMM)
- srcw = (sljit_ub)srcw;
+ srcw = (sljit_u8)srcw;
break;
- case SLJIT_MOVU_SB:
+ case SLJIT_MOVU_S8:
flags = BYTE_SIZE | SIGNED | UPDATE;
if (src & SLJIT_IMM)
- srcw = (sljit_sb)srcw;
+ srcw = (sljit_s8)srcw;
break;
- case SLJIT_MOVU_UH:
+ case SLJIT_MOVU_U16:
flags = HALF_SIZE | UPDATE;
if (src & SLJIT_IMM)
- srcw = (sljit_uh)srcw;
+ srcw = (sljit_u16)srcw;
break;
- case SLJIT_MOVU_SH:
+ case SLJIT_MOVU_S16:
flags = HALF_SIZE | SIGNED | UPDATE;
if (src & SLJIT_IMM)
- srcw = (sljit_sh)srcw;
+ srcw = (sljit_s16)srcw;
break;
- case SLJIT_MOVU_UI:
+ case SLJIT_MOVU_U32:
flags = INT_SIZE | UPDATE;
if (src & SLJIT_IMM)
- srcw = (sljit_ui)srcw;
+ srcw = (sljit_u32)srcw;
break;
- case SLJIT_MOVU_SI:
+ case SLJIT_MOVU_S32:
flags = INT_SIZE | SIGNED | UPDATE;
if (src & SLJIT_IMM)
- srcw = (sljit_si)srcw;
+ srcw = (sljit_s32)srcw;
break;
default:
SLJIT_ASSERT_STOP();
@@ -1378,7 +1378,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
FAIL_IF(getput_arg(compiler, flags, dst_r, src, srcw, dst, dstw));
} else {
if (dst_r != TMP_REG1)
- return emit_op_imm(compiler, op | ((op_flags & SLJIT_INT_OP) ? INT_OP : 0), dst_r, TMP_REG1, src);
+ return emit_op_imm(compiler, op | ((op_flags & SLJIT_I32_OP) ? INT_OP : 0), dst_r, TMP_REG1, src);
dst_r = src;
}
@@ -1393,7 +1393,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
flags = GET_FLAGS(op_flags) ? SET_FLAGS : 0;
mem_flags = WORD_SIZE;
- if (op_flags & SLJIT_INT_OP) {
+ if (op_flags & SLJIT_I32_OP) {
flags |= INT_OP;
mem_flags = INT_SIZE;
}
@@ -1411,8 +1411,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
if (src & SLJIT_IMM) {
flags |= ARG2_IMM;
- if (op_flags & SLJIT_INT_OP)
- srcw = (sljit_si)srcw;
+ if (op_flags & SLJIT_I32_OP)
+ srcw = (sljit_s32)srcw;
} else
srcw = src;
@@ -1427,12 +1427,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_si dst_r, flags, mem_flags;
+ sljit_s32 dst_r, flags, mem_flags;
CHECK_ERROR();
CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@@ -1446,7 +1446,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
flags = GET_FLAGS(op) ? SET_FLAGS : 0;
mem_flags = WORD_SIZE;
- if (op & SLJIT_INT_OP) {
+ if (op & SLJIT_I32_OP) {
flags |= INT_OP;
mem_flags = INT_SIZE;
}
@@ -1512,20 +1512,20 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
{
CHECK_REG_INDEX(check_sljit_get_register_index(reg));
return reg_map[reg];
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_float_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
{
CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
return reg;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *compiler,
- void *instruction, sljit_si size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
+ void *instruction, sljit_s32 size)
{
CHECK_ERROR();
CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
@@ -1537,7 +1537,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *co
/* Floating point operators */
/* --------------------------------------------------------------------- */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void)
{
#ifdef SLJIT_IS_FPU_AVAILABLE
return SLJIT_IS_FPU_AVAILABLE;
@@ -1547,11 +1547,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
#endif
}
-static sljit_si emit_fop_mem(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg, sljit_sw argw)
+static sljit_s32 emit_fop_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
{
- sljit_ui shift = MEM_SIZE_SHIFT(flags);
+ sljit_u32 shift = MEM_SIZE_SHIFT(flags);
sljit_ins ins_bits = (shift << 30);
- sljit_si other_r;
+ sljit_s32 other_r;
sljit_sw diff;
SLJIT_ASSERT(arg & SLJIT_MEM);
@@ -1600,45 +1600,45 @@ static sljit_si emit_fop_mem(struct sljit_compiler *compiler, sljit_si flags, sl
return push_inst(compiler, STR_FI | ins_bits | VT(reg) | RN(TMP_REG3));
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_convw_fromd(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
- sljit_ins inv_bits = (op & SLJIT_SINGLE_OP) ? (1 << 22) : 0;
+ sljit_s32 dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
+ sljit_ins inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
- if (GET_OPCODE(op) == SLJIT_CONVI_FROMD)
+ if (GET_OPCODE(op) == SLJIT_CONV_S32_FROM_F64)
inv_bits |= (1 << 31);
if (src & SLJIT_MEM) {
- emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) ? INT_SIZE : WORD_SIZE, TMP_FREG1, src, srcw);
+ emit_fop_mem(compiler, (op & SLJIT_F32_OP) ? INT_SIZE : WORD_SIZE, TMP_FREG1, src, srcw);
src = TMP_FREG1;
}
FAIL_IF(push_inst(compiler, (FCVTZS ^ inv_bits) | RD(dst_r) | VN(src)));
if (dst_r == TMP_REG1 && dst != SLJIT_UNUSED)
- return emit_op_mem(compiler, ((GET_OPCODE(op) == SLJIT_CONVI_FROMD) ? INT_SIZE : WORD_SIZE) | STORE, TMP_REG1, dst, dstw);
+ return emit_op_mem(compiler, ((GET_OPCODE(op) == SLJIT_CONV_S32_FROM_F64) ? INT_SIZE : WORD_SIZE) | STORE, TMP_REG1, dst, dstw);
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
- sljit_ins inv_bits = (op & SLJIT_SINGLE_OP) ? (1 << 22) : 0;
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
+ sljit_ins inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
- if (GET_OPCODE(op) == SLJIT_CONVD_FROMI)
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
inv_bits |= (1 << 31);
if (src & SLJIT_MEM) {
- emit_op_mem(compiler, ((GET_OPCODE(op) == SLJIT_CONVD_FROMI) ? INT_SIZE : WORD_SIZE), TMP_REG1, src, srcw);
+ emit_op_mem(compiler, ((GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32) ? INT_SIZE : WORD_SIZE), TMP_REG1, src, srcw);
src = TMP_REG1;
} else if (src & SLJIT_IMM) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (GET_OPCODE(op) == SLJIT_CONVD_FROMI)
- srcw = (sljit_si)srcw;
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
+ srcw = (sljit_s32)srcw;
#endif
FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
src = TMP_REG1;
@@ -1647,16 +1647,16 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *
FAIL_IF(push_inst(compiler, (SCVTF ^ inv_bits) | VD(dst_r) | RN(src)));
if (dst & SLJIT_MEM)
- return emit_fop_mem(compiler, ((op & SLJIT_SINGLE_OP) ? INT_SIZE : WORD_SIZE) | STORE, TMP_FREG1, dst, dstw);
+ return emit_fop_mem(compiler, ((op & SLJIT_F32_OP) ? INT_SIZE : WORD_SIZE) | STORE, TMP_FREG1, dst, dstw);
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_si op,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_si mem_flags = (op & SLJIT_SINGLE_OP) ? INT_SIZE : WORD_SIZE;
- sljit_ins inv_bits = (op & SLJIT_SINGLE_OP) ? (1 << 22) : 0;
+ sljit_s32 mem_flags = (op & SLJIT_F32_OP) ? INT_SIZE : WORD_SIZE;
+ sljit_ins inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
if (src1 & SLJIT_MEM) {
emit_fop_mem(compiler, mem_flags, TMP_FREG1, src1, src1w);
@@ -1671,11 +1671,11 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_cmp(struct sljit_compiler *compiler
return push_inst(compiler, (FCMP ^ inv_bits) | VN(src1) | VM(src2));
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r, mem_flags = (op & SLJIT_SINGLE_OP) ? INT_SIZE : WORD_SIZE;
+ sljit_s32 dst_r, mem_flags = (op & SLJIT_F32_OP) ? INT_SIZE : WORD_SIZE;
sljit_ins inv_bits;
CHECK_ERROR();
@@ -1685,16 +1685,16 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compile
SLJIT_COMPILE_ASSERT((INT_SIZE ^ 0x100) == WORD_SIZE, must_be_one_bit_difference);
SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
- inv_bits = (op & SLJIT_SINGLE_OP) ? (1 << 22) : 0;
+ inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
if (src & SLJIT_MEM) {
- emit_fop_mem(compiler, (GET_OPCODE(op) == SLJIT_CONVD_FROMS) ? (mem_flags ^ 0x100) : mem_flags, dst_r, src, srcw);
+ emit_fop_mem(compiler, (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32) ? (mem_flags ^ 0x100) : mem_flags, dst_r, src, srcw);
src = dst_r;
}
switch (GET_OPCODE(op)) {
- case SLJIT_DMOV:
+ case SLJIT_MOV_F64:
if (src != dst_r) {
if (dst_r != TMP_FREG1)
FAIL_IF(push_inst(compiler, (FMOV ^ inv_bits) | VD(dst_r) | VN(src)));
@@ -1702,14 +1702,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compile
dst_r = src;
}
break;
- case SLJIT_DNEG:
+ case SLJIT_NEG_F64:
FAIL_IF(push_inst(compiler, (FNEG ^ inv_bits) | VD(dst_r) | VN(src)));
break;
- case SLJIT_DABS:
+ case SLJIT_ABS_F64:
FAIL_IF(push_inst(compiler, (FABS ^ inv_bits) | VD(dst_r) | VN(src)));
break;
- case SLJIT_CONVD_FROMS:
- FAIL_IF(push_inst(compiler, FCVT | ((op & SLJIT_SINGLE_OP) ? (1 << 22) : (1 << 15)) | VD(dst_r) | VN(src)));
+ case SLJIT_CONV_F64_FROM_F32:
+ FAIL_IF(push_inst(compiler, FCVT | ((op & SLJIT_F32_OP) ? (1 << 22) : (1 << 15)) | VD(dst_r) | VN(src)));
break;
}
@@ -1718,13 +1718,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compile
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_si dst_r, mem_flags = (op & SLJIT_SINGLE_OP) ? INT_SIZE : WORD_SIZE;
- sljit_ins inv_bits = (op & SLJIT_SINGLE_OP) ? (1 << 22) : 0;
+ sljit_s32 dst_r, mem_flags = (op & SLJIT_F32_OP) ? INT_SIZE : WORD_SIZE;
+ sljit_ins inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
CHECK_ERROR();
CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@@ -1746,16 +1746,16 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compile
}
switch (GET_OPCODE(op)) {
- case SLJIT_DADD:
+ case SLJIT_ADD_F64:
FAIL_IF(push_inst(compiler, (FADD ^ inv_bits) | VD(dst_r) | VN(src1) | VM(src2)));
break;
- case SLJIT_DSUB:
+ case SLJIT_SUB_F64:
FAIL_IF(push_inst(compiler, (FSUB ^ inv_bits) | VD(dst_r) | VN(src1) | VM(src2)));
break;
- case SLJIT_DMUL:
+ case SLJIT_MUL_F64:
FAIL_IF(push_inst(compiler, (FMUL ^ inv_bits) | VD(dst_r) | VN(src1) | VM(src2)));
break;
- case SLJIT_DDIV:
+ case SLJIT_DIV_F64:
FAIL_IF(push_inst(compiler, (FDIV ^ inv_bits) | VD(dst_r) | VN(src1) | VM(src2)));
break;
}
@@ -1769,7 +1769,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compile
/* Other instructions */
/* --------------------------------------------------------------------- */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
@@ -1786,7 +1786,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *c
return emit_op_mem(compiler, WORD_SIZE | STORE, TMP_LR, dst, dstw);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_fast_return(compiler, src, srcw));
@@ -1806,33 +1806,33 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *
/* Conditional instructions */
/* --------------------------------------------------------------------- */
-static sljit_uw get_cc(sljit_si type)
+static sljit_uw get_cc(sljit_s32 type)
{
switch (type) {
case SLJIT_EQUAL:
case SLJIT_MUL_NOT_OVERFLOW:
- case SLJIT_D_EQUAL:
+ case SLJIT_EQUAL_F64:
return 0x1;
case SLJIT_NOT_EQUAL:
case SLJIT_MUL_OVERFLOW:
- case SLJIT_D_NOT_EQUAL:
+ case SLJIT_NOT_EQUAL_F64:
return 0x0;
case SLJIT_LESS:
- case SLJIT_D_LESS:
+ case SLJIT_LESS_F64:
return 0x2;
case SLJIT_GREATER_EQUAL:
- case SLJIT_D_GREATER_EQUAL:
+ case SLJIT_GREATER_EQUAL_F64:
return 0x3;
case SLJIT_GREATER:
- case SLJIT_D_GREATER:
+ case SLJIT_GREATER_F64:
return 0x9;
case SLJIT_LESS_EQUAL:
- case SLJIT_D_LESS_EQUAL:
+ case SLJIT_LESS_EQUAL_F64:
return 0x8;
case SLJIT_SIG_LESS:
@@ -1848,11 +1848,11 @@ static sljit_uw get_cc(sljit_si type)
return 0xc;
case SLJIT_OVERFLOW:
- case SLJIT_D_UNORDERED:
+ case SLJIT_UNORDERED_F64:
return 0x7;
case SLJIT_NOT_OVERFLOW:
- case SLJIT_D_ORDERED:
+ case SLJIT_ORDERED_F64:
return 0x6;
default:
@@ -1877,7 +1877,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
return label;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
{
struct sljit_jump *jump;
@@ -1903,11 +1903,11 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
return jump;
}
-static SLJIT_INLINE struct sljit_jump* emit_cmp_to0(struct sljit_compiler *compiler, sljit_si type,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE struct sljit_jump* emit_cmp_to0(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 src, sljit_sw srcw)
{
struct sljit_jump *jump;
- sljit_ins inv_bits = (type & SLJIT_INT_OP) ? (1 << 31) : 0;
+ sljit_ins inv_bits = (type & SLJIT_I32_OP) ? (1 << 31) : 0;
SLJIT_ASSERT((type & 0xff) == SLJIT_EQUAL || (type & 0xff) == SLJIT_NOT_EQUAL);
ADJUST_LOCAL_OFFSET(src, srcw);
@@ -1937,7 +1937,7 @@ static SLJIT_INLINE struct sljit_jump* emit_cmp_to0(struct sljit_compiler *compi
return jump;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compiler, sljit_si type, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
{
struct sljit_jump *jump;
@@ -1964,12 +1964,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compil
return push_inst(compiler, ((type >= SLJIT_FAST_CALL) ? BLR : BR) | RN(TMP_REG1));
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw,
- sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw,
+ sljit_s32 type)
{
- sljit_si dst_r, flags, mem_flags;
+ sljit_s32 dst_r, flags, mem_flags;
sljit_ins cc;
CHECK_ERROR();
@@ -1994,7 +1994,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
compiler->cache_argw = 0;
flags = GET_FLAGS(op) ? SET_FLAGS : 0;
mem_flags = WORD_SIZE;
- if (op & SLJIT_INT_OP) {
+ if (op & SLJIT_I32_OP) {
flags |= INT_OP;
mem_flags = INT_SIZE;
}
@@ -2014,10 +2014,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
return emit_op_mem2(compiler, mem_flags | STORE, TMP_REG1, dst, dstw, 0, 0);
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw init_value)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
{
struct sljit_const *const_;
- sljit_si dst_r;
+ sljit_s32 dst_r;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
diff --git a/src/3rdparty/pcre/sljit/sljitNativeARM_T2_32.c b/src/3rdparty/pcre/sljit/sljitNativeARM_T2_32.c
index f9803f5d44..1ed44a8130 100644
--- a/src/3rdparty/pcre/sljit/sljitNativeARM_T2_32.c
+++ b/src/3rdparty/pcre/sljit/sljitNativeARM_T2_32.c
@@ -24,13 +24,13 @@
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name(void)
+SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
{
return "ARM-Thumb2" SLJIT_CPUINFO;
}
/* Length of an instruction word. */
-typedef sljit_ui sljit_ins;
+typedef sljit_u32 sljit_ins;
/* Last register + 1. */
#define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
@@ -42,7 +42,7 @@ typedef sljit_ui sljit_ins;
#define TMP_FREG2 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
/* See sljit_emit_enter and sljit_emit_op0 if you want to change them. */
-static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 6] = {
+static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 6] = {
0, 0, 1, 2, 12, 11, 10, 9, 8, 7, 6, 5, 13, 3, 4, 14, 15
};
@@ -181,21 +181,21 @@ static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 6] = {
#define VSTR_F32 0xed000a00
#define VSUB_F32 0xee300a40
-static sljit_si push_inst16(struct sljit_compiler *compiler, sljit_ins inst)
+static sljit_s32 push_inst16(struct sljit_compiler *compiler, sljit_ins inst)
{
- sljit_uh *ptr;
+ sljit_u16 *ptr;
SLJIT_ASSERT(!(inst & 0xffff0000));
- ptr = (sljit_uh*)ensure_buf(compiler, sizeof(sljit_uh));
+ ptr = (sljit_u16*)ensure_buf(compiler, sizeof(sljit_u16));
FAIL_IF(!ptr);
*ptr = inst;
compiler->size++;
return SLJIT_SUCCESS;
}
-static sljit_si push_inst32(struct sljit_compiler *compiler, sljit_ins inst)
+static sljit_s32 push_inst32(struct sljit_compiler *compiler, sljit_ins inst)
{
- sljit_uh *ptr = (sljit_uh*)ensure_buf(compiler, sizeof(sljit_ins));
+ sljit_u16 *ptr = (sljit_u16*)ensure_buf(compiler, sizeof(sljit_ins));
FAIL_IF(!ptr);
*ptr++ = inst >> 16;
*ptr = inst;
@@ -203,7 +203,7 @@ static sljit_si push_inst32(struct sljit_compiler *compiler, sljit_ins inst)
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si emit_imm32_const(struct sljit_compiler *compiler, sljit_si dst, sljit_uw imm)
+static SLJIT_INLINE sljit_s32 emit_imm32_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_uw imm)
{
FAIL_IF(push_inst32(compiler, MOVW | RD4(dst) |
COPY_BITS(imm, 12, 16, 4) | COPY_BITS(imm, 11, 26, 1) | COPY_BITS(imm, 8, 12, 3) | (imm & 0xff)));
@@ -211,9 +211,9 @@ static SLJIT_INLINE sljit_si emit_imm32_const(struct sljit_compiler *compiler, s
COPY_BITS(imm, 12 + 16, 16, 4) | COPY_BITS(imm, 11 + 16, 26, 1) | COPY_BITS(imm, 8 + 16, 12, 3) | ((imm & 0xff0000) >> 16));
}
-static SLJIT_INLINE void modify_imm32_const(sljit_uh *inst, sljit_uw new_imm)
+static SLJIT_INLINE void modify_imm32_const(sljit_u16 *inst, sljit_uw new_imm)
{
- sljit_si dst = inst[1] & 0x0f00;
+ sljit_s32 dst = inst[1] & 0x0f00;
SLJIT_ASSERT(((inst[0] & 0xfbf0) == (MOVW >> 16)) && ((inst[2] & 0xfbf0) == (MOVT >> 16)) && dst == (inst[3] & 0x0f00));
inst[0] = (MOVW >> 16) | COPY_BITS(new_imm, 12, 0, 4) | COPY_BITS(new_imm, 11, 10, 1);
inst[1] = dst | COPY_BITS(new_imm, 8, 12, 3) | (new_imm & 0xff);
@@ -221,7 +221,7 @@ static SLJIT_INLINE void modify_imm32_const(sljit_uh *inst, sljit_uw new_imm)
inst[3] = dst | COPY_BITS(new_imm, 8 + 16, 12, 3) | ((new_imm & 0xff0000) >> 16);
}
-static SLJIT_INLINE sljit_si detect_jump_type(struct sljit_jump *jump, sljit_uh *code_ptr, sljit_uh *code)
+static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_u16 *code_ptr, sljit_u16 *code)
{
sljit_sw diff;
@@ -278,13 +278,13 @@ static SLJIT_INLINE sljit_si detect_jump_type(struct sljit_jump *jump, sljit_uh
static SLJIT_INLINE void set_jump_instruction(struct sljit_jump *jump)
{
- sljit_si type = (jump->flags >> 4) & 0xf;
+ sljit_s32 type = (jump->flags >> 4) & 0xf;
sljit_sw diff;
- sljit_uh *jump_inst;
- sljit_si s, j1, j2;
+ sljit_u16 *jump_inst;
+ sljit_s32 s, j1, j2;
if (SLJIT_UNLIKELY(type == 0)) {
- modify_imm32_const((sljit_uh*)jump->addr, (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target);
+ modify_imm32_const((sljit_u16*)jump->addr, (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target);
return;
}
@@ -294,7 +294,7 @@ static SLJIT_INLINE void set_jump_instruction(struct sljit_jump *jump)
}
else
diff = ((sljit_sw)(jump->u.label->addr) - (sljit_sw)(jump->addr + 4)) >> 1;
- jump_inst = (sljit_uh*)jump->addr;
+ jump_inst = (sljit_u16*)jump->addr;
switch (type) {
case 1:
@@ -342,10 +342,10 @@ static SLJIT_INLINE void set_jump_instruction(struct sljit_jump *jump)
SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
{
struct sljit_memory_fragment *buf;
- sljit_uh *code;
- sljit_uh *code_ptr;
- sljit_uh *buf_ptr;
- sljit_uh *buf_end;
+ sljit_u16 *code;
+ sljit_u16 *code_ptr;
+ sljit_u16 *buf_ptr;
+ sljit_u16 *buf_end;
sljit_uw half_count;
struct sljit_label *label;
@@ -356,7 +356,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
CHECK_PTR(check_sljit_generate_code(compiler));
reverse_buf(compiler);
- code = (sljit_uh*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_uh));
+ code = (sljit_u16*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_u16));
PTR_FAIL_WITH_EXEC_IF(code);
buf = compiler->buf;
@@ -367,7 +367,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
const_ = compiler->consts;
do {
- buf_ptr = (sljit_uh*)buf->memory;
+ buf_ptr = (sljit_u16*)buf->memory;
buf_end = buf_ptr + (buf->used_size >> 1);
do {
*code_ptr = *buf_ptr++;
@@ -414,7 +414,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
}
compiler->error = SLJIT_ERR_COMPILED;
- compiler->executable_size = (code_ptr - code) * sizeof(sljit_uh);
+ compiler->executable_size = (code_ptr - code) * sizeof(sljit_u16);
SLJIT_CACHE_FLUSH(code, code_ptr);
/* Set thumb mode flag. */
return (void*)((sljit_uw)code | 0x1);
@@ -428,7 +428,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
static sljit_uw get_imm(sljit_uw imm)
{
/* Thumb immediate form. */
- sljit_si counter;
+ sljit_s32 counter;
if (imm <= 0xff)
return imm;
@@ -474,7 +474,7 @@ static sljit_uw get_imm(sljit_uw imm)
return ((imm >> 24) & 0x7f) | COPY_BITS(counter, 4, 26, 1) | COPY_BITS(counter, 1, 12, 3) | COPY_BITS(counter, 0, 7, 1);
}
-static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si dst, sljit_uw imm)
+static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst, sljit_uw imm)
{
sljit_uw tmp;
@@ -508,12 +508,12 @@ static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si dst, sl
#define SLOW_SRC1 0x0800000
#define SLOW_SRC2 0x1000000
-static sljit_si emit_op_imm(struct sljit_compiler *compiler, sljit_si flags, sljit_si dst, sljit_uw arg1, sljit_uw arg2)
+static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 dst, sljit_uw arg1, sljit_uw arg2)
{
/* dst must be register, TMP_REG1
arg1 must be register, TMP_REG1, imm
arg2 must be register, TMP_REG2, imm */
- sljit_si reg;
+ sljit_s32 reg;
sljit_uw imm, nimm;
if (SLJIT_UNLIKELY((flags & (ARG1_IMM | ARG2_IMM)) == (ARG1_IMM | ARG2_IMM))) {
@@ -677,37 +677,37 @@ static sljit_si emit_op_imm(struct sljit_compiler *compiler, sljit_si flags, slj
/* Both arguments are registers. */
switch (flags & 0xffff) {
case SLJIT_MOV:
- case SLJIT_MOV_UI:
- case SLJIT_MOV_SI:
+ case SLJIT_MOV_U32:
+ case SLJIT_MOV_S32:
case SLJIT_MOV_P:
case SLJIT_MOVU:
- case SLJIT_MOVU_UI:
- case SLJIT_MOVU_SI:
+ case SLJIT_MOVU_U32:
+ case SLJIT_MOVU_S32:
case SLJIT_MOVU_P:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
if (dst == arg2)
return SLJIT_SUCCESS;
return push_inst16(compiler, MOV | SET_REGS44(dst, arg2));
- case SLJIT_MOV_UB:
- case SLJIT_MOVU_UB:
+ case SLJIT_MOV_U8:
+ case SLJIT_MOVU_U8:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
if (IS_2_LO_REGS(dst, arg2))
return push_inst16(compiler, UXTB | RD3(dst) | RN3(arg2));
return push_inst32(compiler, UXTB_W | RD4(dst) | RM4(arg2));
- case SLJIT_MOV_SB:
- case SLJIT_MOVU_SB:
+ case SLJIT_MOV_S8:
+ case SLJIT_MOVU_S8:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
if (IS_2_LO_REGS(dst, arg2))
return push_inst16(compiler, SXTB | RD3(dst) | RN3(arg2));
return push_inst32(compiler, SXTB_W | RD4(dst) | RM4(arg2));
- case SLJIT_MOV_UH:
- case SLJIT_MOVU_UH:
+ case SLJIT_MOV_U16:
+ case SLJIT_MOVU_U16:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
if (IS_2_LO_REGS(dst, arg2))
return push_inst16(compiler, UXTH | RD3(dst) | RN3(arg2));
return push_inst32(compiler, UXTH_W | RD4(dst) | RM4(arg2));
- case SLJIT_MOV_SH:
- case SLJIT_MOVU_SH:
+ case SLJIT_MOV_S16:
+ case SLJIT_MOVU_S16:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
if (IS_2_LO_REGS(dst, arg2))
return push_inst16(compiler, SXTH | RD3(dst) | RN3(arg2));
@@ -813,7 +813,7 @@ static sljit_si emit_op_imm(struct sljit_compiler *compiler, sljit_si flags, slj
s = store
*/
-static SLJIT_CONST sljit_ins sljit_mem16[12] = {
+static const sljit_ins sljit_mem16[12] = {
/* w u l */ 0x5800 /* ldr */,
/* w u s */ 0x5000 /* str */,
/* w s l */ 0x5800 /* ldr */,
@@ -830,7 +830,7 @@ static SLJIT_CONST sljit_ins sljit_mem16[12] = {
/* h s s */ 0x5200 /* strh */,
};
-static SLJIT_CONST sljit_ins sljit_mem16_imm5[12] = {
+static const sljit_ins sljit_mem16_imm5[12] = {
/* w u l */ 0x6800 /* ldr imm5 */,
/* w u s */ 0x6000 /* str imm5 */,
/* w s l */ 0x6800 /* ldr imm5 */,
@@ -849,7 +849,7 @@ static SLJIT_CONST sljit_ins sljit_mem16_imm5[12] = {
#define MEM_IMM8 0xc00
#define MEM_IMM12 0x800000
-static SLJIT_CONST sljit_ins sljit_mem32[12] = {
+static const sljit_ins sljit_mem32[12] = {
/* w u l */ 0xf8500000 /* ldr.w */,
/* w u s */ 0xf8400000 /* str.w */,
/* w s l */ 0xf8500000 /* ldr.w */,
@@ -867,7 +867,7 @@ static SLJIT_CONST sljit_ins sljit_mem32[12] = {
};
/* Helper function. Dst should be reg + value, using at most 1 instruction, flags does not set. */
-static sljit_si emit_set_delta(struct sljit_compiler *compiler, sljit_si dst, sljit_si reg, sljit_sw value)
+static sljit_s32 emit_set_delta(struct sljit_compiler *compiler, sljit_s32 dst, sljit_s32 reg, sljit_sw value)
{
if (value >= 0) {
if (value <= 0xfff)
@@ -888,9 +888,9 @@ static sljit_si emit_set_delta(struct sljit_compiler *compiler, sljit_si dst, sl
}
/* Can perform an operation using at most 1 instruction. */
-static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg, sljit_sw argw)
+static sljit_s32 getput_arg_fast(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
{
- sljit_si other_r, shift;
+ sljit_s32 other_r, shift;
SLJIT_ASSERT(arg & SLJIT_MEM);
@@ -975,7 +975,7 @@ static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si flags,
/* see getput_arg below.
Note: can_cache is called only for binary operators. Those
operators always uses word arguments without write back. */
-static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
+static sljit_s32 can_cache(sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
{
sljit_sw diff;
if ((arg & OFFS_REG_MASK) || !(next_arg & SLJIT_MEM))
@@ -999,10 +999,10 @@ static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_
}
/* Emit the necessary instructions. See can_cache above. */
-static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg,
- sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
+static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg,
+ sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
{
- sljit_si tmp_r, other_r;
+ sljit_s32 tmp_r, other_r;
sljit_sw diff;
SLJIT_ASSERT(arg & SLJIT_MEM);
@@ -1107,7 +1107,7 @@ static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si flags, slji
return push_inst32(compiler, sljit_mem32[flags] | MEM_IMM12 | RT4(reg) | RN4(TMP_REG3) | 0);
}
-static SLJIT_INLINE sljit_si emit_op_mem(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg, sljit_sw argw)
+static SLJIT_INLINE sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
{
if (getput_arg_fast(compiler, flags, reg, arg, argw))
return compiler->error;
@@ -1116,7 +1116,7 @@ static SLJIT_INLINE sljit_si emit_op_mem(struct sljit_compiler *compiler, sljit_
return getput_arg(compiler, flags, reg, arg, argw, 0, 0);
}
-static SLJIT_INLINE sljit_si emit_op_mem2(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg1, sljit_sw arg1w, sljit_si arg2, sljit_sw arg2w)
+static SLJIT_INLINE sljit_s32 emit_op_mem2(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg1, sljit_sw arg1w, sljit_s32 arg2, sljit_sw arg2w)
{
if (getput_arg_fast(compiler, flags, reg, arg1, arg1w))
return compiler->error;
@@ -1127,11 +1127,11 @@ static SLJIT_INLINE sljit_si emit_op_mem2(struct sljit_compiler *compiler, sljit
/* Entry, exit */
/* --------------------------------------------------------------------- */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
- sljit_si size, i, tmp;
+ sljit_s32 size, i, tmp;
sljit_ins push;
CHECK_ERROR();
@@ -1172,11 +1172,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
- sljit_si size;
+ sljit_s32 size;
CHECK_ERROR();
CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
@@ -1187,9 +1187,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compi
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
{
- sljit_si i, tmp;
+ sljit_s32 i, tmp;
sljit_ins pop;
CHECK_ERROR();
@@ -1237,7 +1237,7 @@ extern int __aeabi_idivmod(int numerator, int denominator);
}
#endif
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler, sljit_si op)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
{
sljit_sw saved_reg_list[3];
sljit_sw saved_reg_count;
@@ -1251,18 +1251,18 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
return push_inst16(compiler, BKPT);
case SLJIT_NOP:
return push_inst16(compiler, NOP);
- case SLJIT_LUMUL:
- case SLJIT_LSMUL:
- return push_inst32(compiler, (op == SLJIT_LUMUL ? UMULL : SMULL)
+ case SLJIT_LMUL_UW:
+ case SLJIT_LMUL_SW:
+ return push_inst32(compiler, (op == SLJIT_LMUL_UW ? UMULL : SMULL)
| (reg_map[SLJIT_R1] << 8)
| (reg_map[SLJIT_R0] << 12)
| (reg_map[SLJIT_R0] << 16)
| reg_map[SLJIT_R1]);
- case SLJIT_UDIVMOD:
- case SLJIT_SDIVMOD:
- case SLJIT_UDIVI:
- case SLJIT_SDIVI:
- SLJIT_COMPILE_ASSERT((SLJIT_UDIVMOD & 0x2) == 0 && SLJIT_UDIVI - 0x2 == SLJIT_UDIVMOD, bad_div_opcode_assignments);
+ case SLJIT_DIVMOD_UW:
+ case SLJIT_DIVMOD_SW:
+ case SLJIT_DIV_UW:
+ case SLJIT_DIV_SW:
+ SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW & 0x2) == 0 && SLJIT_DIV_UW - 0x2 == SLJIT_DIVMOD_UW, bad_div_opcode_assignments);
SLJIT_COMPILE_ASSERT(reg_map[2] == 1 && reg_map[3] == 2 && reg_map[4] == 12, bad_register_mapping);
saved_reg_count = 0;
@@ -1270,7 +1270,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
saved_reg_list[saved_reg_count++] = 12;
if (compiler->scratches >= 3)
saved_reg_list[saved_reg_count++] = 2;
- if (op >= SLJIT_UDIVI)
+ if (op >= SLJIT_DIV_UW)
saved_reg_list[saved_reg_count++] = 1;
if (saved_reg_count > 0) {
@@ -1288,7 +1288,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
#if defined(__GNUC__)
FAIL_IF(sljit_emit_ijump(compiler, SLJIT_FAST_CALL, SLJIT_IMM,
- ((op | 0x2) == SLJIT_UDIVI ? SLJIT_FUNC_OFFSET(__aeabi_uidivmod) : SLJIT_FUNC_OFFSET(__aeabi_idivmod))));
+ ((op | 0x2) == SLJIT_DIV_UW ? SLJIT_FUNC_OFFSET(__aeabi_uidivmod) : SLJIT_FUNC_OFFSET(__aeabi_idivmod))));
#else
#error "Software divmod functions are needed"
#endif
@@ -1311,12 +1311,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r, flags;
- sljit_si op_flags = GET_ALL_FLAGS(op);
+ sljit_s32 dst_r, flags;
+ sljit_s32 op_flags = GET_ALL_FLAGS(op);
CHECK_ERROR();
CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
@@ -1332,56 +1332,56 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
if (op >= SLJIT_MOV && op <= SLJIT_MOVU_P) {
switch (op) {
case SLJIT_MOV:
- case SLJIT_MOV_UI:
- case SLJIT_MOV_SI:
+ case SLJIT_MOV_U32:
+ case SLJIT_MOV_S32:
case SLJIT_MOV_P:
flags = WORD_SIZE;
break;
- case SLJIT_MOV_UB:
+ case SLJIT_MOV_U8:
flags = BYTE_SIZE;
if (src & SLJIT_IMM)
- srcw = (sljit_ub)srcw;
+ srcw = (sljit_u8)srcw;
break;
- case SLJIT_MOV_SB:
+ case SLJIT_MOV_S8:
flags = BYTE_SIZE | SIGNED;
if (src & SLJIT_IMM)
- srcw = (sljit_sb)srcw;
+ srcw = (sljit_s8)srcw;
break;
- case SLJIT_MOV_UH:
+ case SLJIT_MOV_U16:
flags = HALF_SIZE;
if (src & SLJIT_IMM)
- srcw = (sljit_uh)srcw;
+ srcw = (sljit_u16)srcw;
break;
- case SLJIT_MOV_SH:
+ case SLJIT_MOV_S16:
flags = HALF_SIZE | SIGNED;
if (src & SLJIT_IMM)
- srcw = (sljit_sh)srcw;
+ srcw = (sljit_s16)srcw;
break;
case SLJIT_MOVU:
- case SLJIT_MOVU_UI:
- case SLJIT_MOVU_SI:
+ case SLJIT_MOVU_U32:
+ case SLJIT_MOVU_S32:
case SLJIT_MOVU_P:
flags = WORD_SIZE | UPDATE;
break;
- case SLJIT_MOVU_UB:
+ case SLJIT_MOVU_U8:
flags = BYTE_SIZE | UPDATE;
if (src & SLJIT_IMM)
- srcw = (sljit_ub)srcw;
+ srcw = (sljit_u8)srcw;
break;
- case SLJIT_MOVU_SB:
+ case SLJIT_MOVU_S8:
flags = BYTE_SIZE | SIGNED | UPDATE;
if (src & SLJIT_IMM)
- srcw = (sljit_sb)srcw;
+ srcw = (sljit_s8)srcw;
break;
- case SLJIT_MOVU_UH:
+ case SLJIT_MOVU_U16:
flags = HALF_SIZE | UPDATE;
if (src & SLJIT_IMM)
- srcw = (sljit_uh)srcw;
+ srcw = (sljit_u16)srcw;
break;
- case SLJIT_MOVU_SH:
+ case SLJIT_MOVU_S16:
flags = HALF_SIZE | SIGNED | UPDATE;
if (src & SLJIT_IMM)
- srcw = (sljit_sh)srcw;
+ srcw = (sljit_s16)srcw;
break;
default:
SLJIT_ASSERT_STOP();
@@ -1444,12 +1444,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_si dst_r, flags;
+ sljit_s32 dst_r, flags;
CHECK_ERROR();
CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@@ -1523,26 +1523,26 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
{
CHECK_REG_INDEX(check_sljit_get_register_index(reg));
return reg_map[reg];
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_float_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
{
CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
return reg << 1;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *compiler,
- void *instruction, sljit_si size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
+ void *instruction, sljit_s32 size)
{
CHECK_ERROR();
CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
if (size == 2)
- return push_inst16(compiler, *(sljit_uh*)instruction);
+ return push_inst16(compiler, *(sljit_u16*)instruction);
return push_inst32(compiler, *(sljit_ins*)instruction);
}
@@ -1550,7 +1550,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *co
/* Floating point operators */
/* --------------------------------------------------------------------- */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void)
{
#ifdef SLJIT_IS_FPU_AVAILABLE
return SLJIT_IS_FPU_AVAILABLE;
@@ -1562,11 +1562,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
#define FPU_LOAD (1 << 20)
-static sljit_si emit_fop_mem(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg, sljit_sw argw)
+static sljit_s32 emit_fop_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
{
sljit_sw tmp;
sljit_uw imm;
- sljit_sw inst = VSTR_F32 | (flags & (SLJIT_SINGLE_OP | FPU_LOAD));
+ sljit_sw inst = VSTR_F32 | (flags & (SLJIT_F32_OP | FPU_LOAD));
SLJIT_ASSERT(arg & SLJIT_MEM);
@@ -1626,16 +1626,16 @@ static sljit_si emit_fop_mem(struct sljit_compiler *compiler, sljit_si flags, sl
return push_inst32(compiler, inst | 0x800000 | RN4(TMP_REG3) | DD4(reg));
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_convw_fromd(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
if (src & SLJIT_MEM) {
- FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, TMP_FREG1, src, srcw));
+ FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_F32_OP) | FPU_LOAD, TMP_FREG1, src, srcw));
src = TMP_FREG1;
}
- FAIL_IF(push_inst32(compiler, VCVT_S32_F32 | (op & SLJIT_SINGLE_OP) | DD4(TMP_FREG1) | DM4(src)));
+ FAIL_IF(push_inst32(compiler, VCVT_S32_F32 | (op & SLJIT_F32_OP) | DD4(TMP_FREG1) | DM4(src)));
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
@@ -1647,11 +1647,11 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convw_fromd(struct sljit_compiler *
return emit_fop_mem(compiler, 0, TMP_FREG1, dst, dstw);
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
if (FAST_IS_REG(src))
FAIL_IF(push_inst32(compiler, VMOV | RT4(src) | DN4(TMP_FREG1)));
@@ -1664,85 +1664,85 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *
FAIL_IF(push_inst32(compiler, VMOV | RT4(TMP_REG1) | DN4(TMP_FREG1)));
}
- FAIL_IF(push_inst32(compiler, VCVT_F32_S32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DM4(TMP_FREG1)));
+ FAIL_IF(push_inst32(compiler, VCVT_F32_S32 | (op & SLJIT_F32_OP) | DD4(dst_r) | DM4(TMP_FREG1)));
if (dst & SLJIT_MEM)
- return emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP), TMP_FREG1, dst, dstw);
+ return emit_fop_mem(compiler, (op & SLJIT_F32_OP), TMP_FREG1, dst, dstw);
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_si op,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
if (src1 & SLJIT_MEM) {
- emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, TMP_FREG1, src1, src1w);
+ emit_fop_mem(compiler, (op & SLJIT_F32_OP) | FPU_LOAD, TMP_FREG1, src1, src1w);
src1 = TMP_FREG1;
}
if (src2 & SLJIT_MEM) {
- emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, TMP_FREG2, src2, src2w);
+ emit_fop_mem(compiler, (op & SLJIT_F32_OP) | FPU_LOAD, TMP_FREG2, src2, src2w);
src2 = TMP_FREG2;
}
- FAIL_IF(push_inst32(compiler, VCMP_F32 | (op & SLJIT_SINGLE_OP) | DD4(src1) | DM4(src2)));
+ FAIL_IF(push_inst32(compiler, VCMP_F32 | (op & SLJIT_F32_OP) | DD4(src1) | DM4(src2)));
return push_inst32(compiler, VMRS);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r;
+ sljit_s32 dst_r;
CHECK_ERROR();
compiler->cache_arg = 0;
compiler->cache_argw = 0;
- if (GET_OPCODE(op) != SLJIT_CONVD_FROMS)
- op ^= SLJIT_SINGLE_OP;
+ if (GET_OPCODE(op) != SLJIT_CONV_F64_FROM_F32)
+ op ^= SLJIT_F32_OP;
- SLJIT_COMPILE_ASSERT((SLJIT_SINGLE_OP == 0x100), float_transfer_bit_error);
+ SLJIT_COMPILE_ASSERT((SLJIT_F32_OP == 0x100), float_transfer_bit_error);
SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
if (src & SLJIT_MEM) {
- emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, dst_r, src, srcw);
+ emit_fop_mem(compiler, (op & SLJIT_F32_OP) | FPU_LOAD, dst_r, src, srcw);
src = dst_r;
}
switch (GET_OPCODE(op)) {
- case SLJIT_DMOV:
+ case SLJIT_MOV_F64:
if (src != dst_r) {
if (dst_r != TMP_FREG1)
- FAIL_IF(push_inst32(compiler, VMOV_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DM4(src)));
+ FAIL_IF(push_inst32(compiler, VMOV_F32 | (op & SLJIT_F32_OP) | DD4(dst_r) | DM4(src)));
else
dst_r = src;
}
break;
- case SLJIT_DNEG:
- FAIL_IF(push_inst32(compiler, VNEG_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DM4(src)));
+ case SLJIT_NEG_F64:
+ FAIL_IF(push_inst32(compiler, VNEG_F32 | (op & SLJIT_F32_OP) | DD4(dst_r) | DM4(src)));
break;
- case SLJIT_DABS:
- FAIL_IF(push_inst32(compiler, VABS_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DM4(src)));
+ case SLJIT_ABS_F64:
+ FAIL_IF(push_inst32(compiler, VABS_F32 | (op & SLJIT_F32_OP) | DD4(dst_r) | DM4(src)));
break;
- case SLJIT_CONVD_FROMS:
- FAIL_IF(push_inst32(compiler, VCVT_F64_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DM4(src)));
- op ^= SLJIT_SINGLE_OP;
+ case SLJIT_CONV_F64_FROM_F32:
+ FAIL_IF(push_inst32(compiler, VCVT_F64_F32 | (op & SLJIT_F32_OP) | DD4(dst_r) | DM4(src)));
+ op ^= SLJIT_F32_OP;
break;
}
if (dst & SLJIT_MEM)
- return emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP), dst_r, dst, dstw);
+ return emit_fop_mem(compiler, (op & SLJIT_F32_OP), dst_r, dst, dstw);
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_si dst_r;
+ sljit_s32 dst_r;
CHECK_ERROR();
CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@@ -1752,36 +1752,36 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compile
compiler->cache_arg = 0;
compiler->cache_argw = 0;
- op ^= SLJIT_SINGLE_OP;
+ op ^= SLJIT_F32_OP;
dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
if (src1 & SLJIT_MEM) {
- emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, TMP_FREG1, src1, src1w);
+ emit_fop_mem(compiler, (op & SLJIT_F32_OP) | FPU_LOAD, TMP_FREG1, src1, src1w);
src1 = TMP_FREG1;
}
if (src2 & SLJIT_MEM) {
- emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, TMP_FREG2, src2, src2w);
+ emit_fop_mem(compiler, (op & SLJIT_F32_OP) | FPU_LOAD, TMP_FREG2, src2, src2w);
src2 = TMP_FREG2;
}
switch (GET_OPCODE(op)) {
- case SLJIT_DADD:
- FAIL_IF(push_inst32(compiler, VADD_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DN4(src1) | DM4(src2)));
+ case SLJIT_ADD_F64:
+ FAIL_IF(push_inst32(compiler, VADD_F32 | (op & SLJIT_F32_OP) | DD4(dst_r) | DN4(src1) | DM4(src2)));
break;
- case SLJIT_DSUB:
- FAIL_IF(push_inst32(compiler, VSUB_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DN4(src1) | DM4(src2)));
+ case SLJIT_SUB_F64:
+ FAIL_IF(push_inst32(compiler, VSUB_F32 | (op & SLJIT_F32_OP) | DD4(dst_r) | DN4(src1) | DM4(src2)));
break;
- case SLJIT_DMUL:
- FAIL_IF(push_inst32(compiler, VMUL_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DN4(src1) | DM4(src2)));
+ case SLJIT_MUL_F64:
+ FAIL_IF(push_inst32(compiler, VMUL_F32 | (op & SLJIT_F32_OP) | DD4(dst_r) | DN4(src1) | DM4(src2)));
break;
- case SLJIT_DDIV:
- FAIL_IF(push_inst32(compiler, VDIV_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DN4(src1) | DM4(src2)));
+ case SLJIT_DIV_F64:
+ FAIL_IF(push_inst32(compiler, VDIV_F32 | (op & SLJIT_F32_OP) | DD4(dst_r) | DN4(src1) | DM4(src2)));
break;
}
if (!(dst & SLJIT_MEM))
return SLJIT_SUCCESS;
- return emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP), TMP_FREG1, dst, dstw);
+ return emit_fop_mem(compiler, (op & SLJIT_F32_OP), TMP_FREG1, dst, dstw);
}
#undef FPU_LOAD
@@ -1790,7 +1790,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compile
/* Other instructions */
/* --------------------------------------------------------------------- */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
@@ -1813,7 +1813,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *c
return getput_arg(compiler, WORD_SIZE | STORE, TMP_REG2, dst, dstw, 0, 0);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_fast_return(compiler, src, srcw));
@@ -1840,33 +1840,33 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *
/* Conditional instructions */
/* --------------------------------------------------------------------- */
-static sljit_uw get_cc(sljit_si type)
+static sljit_uw get_cc(sljit_s32 type)
{
switch (type) {
case SLJIT_EQUAL:
case SLJIT_MUL_NOT_OVERFLOW:
- case SLJIT_D_EQUAL:
+ case SLJIT_EQUAL_F64:
return 0x0;
case SLJIT_NOT_EQUAL:
case SLJIT_MUL_OVERFLOW:
- case SLJIT_D_NOT_EQUAL:
+ case SLJIT_NOT_EQUAL_F64:
return 0x1;
case SLJIT_LESS:
- case SLJIT_D_LESS:
+ case SLJIT_LESS_F64:
return 0x3;
case SLJIT_GREATER_EQUAL:
- case SLJIT_D_GREATER_EQUAL:
+ case SLJIT_GREATER_EQUAL_F64:
return 0x2;
case SLJIT_GREATER:
- case SLJIT_D_GREATER:
+ case SLJIT_GREATER_F64:
return 0x8;
case SLJIT_LESS_EQUAL:
- case SLJIT_D_LESS_EQUAL:
+ case SLJIT_LESS_EQUAL_F64:
return 0x9;
case SLJIT_SIG_LESS:
@@ -1882,11 +1882,11 @@ static sljit_uw get_cc(sljit_si type)
return 0xd;
case SLJIT_OVERFLOW:
- case SLJIT_D_UNORDERED:
+ case SLJIT_UNORDERED_F64:
return 0x6;
case SLJIT_NOT_OVERFLOW:
- case SLJIT_D_ORDERED:
+ case SLJIT_ORDERED_F64:
return 0x7;
default: /* SLJIT_JUMP */
@@ -1911,7 +1911,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
return label;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
{
struct sljit_jump *jump;
sljit_ins cc;
@@ -1944,7 +1944,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
return jump;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compiler, sljit_si type, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
{
struct sljit_jump *jump;
@@ -1972,12 +1972,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compil
return push_inst16(compiler, (type <= SLJIT_JUMP ? BX : BLX) | RN3(TMP_REG1));
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw,
- sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw,
+ sljit_s32 type)
{
- sljit_si dst_r, flags = GET_ALL_FLAGS(op);
+ sljit_s32 dst_r, flags = GET_ALL_FLAGS(op);
sljit_ins cc, ins;
CHECK_ERROR();
@@ -2054,10 +2054,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw init_value)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
{
struct sljit_const *const_;
- sljit_si dst_r;
+ sljit_s32 dst_r;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
@@ -2077,14 +2077,14 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_addr)
{
- sljit_uh *inst = (sljit_uh*)addr;
+ sljit_u16 *inst = (sljit_u16*)addr;
modify_imm32_const(inst, new_addr);
SLJIT_CACHE_FLUSH(inst, inst + 4);
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant)
{
- sljit_uh *inst = (sljit_uh*)addr;
+ sljit_u16 *inst = (sljit_u16*)addr;
modify_imm32_const(inst, new_constant);
SLJIT_CACHE_FLUSH(inst, inst + 4);
}
diff --git a/src/3rdparty/pcre/sljit/sljitNativeMIPS_32.c b/src/3rdparty/pcre/sljit/sljitNativeMIPS_32.c
index b2b60d7a4c..5096e4f55e 100644
--- a/src/3rdparty/pcre/sljit/sljitNativeMIPS_32.c
+++ b/src/3rdparty/pcre/sljit/sljitNativeMIPS_32.c
@@ -26,7 +26,7 @@
/* mips 32-bit arch dependent functions. */
-static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si dst_ar, sljit_sw imm)
+static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst_ar, sljit_sw imm)
{
if (!(imm & ~0xffff))
return push_inst(compiler, ORI | SA(0) | TA(dst_ar) | IMM(imm), dst_ar);
@@ -66,24 +66,24 @@ static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si dst_ar,
FAIL_IF(push_inst(compiler, op_v | S(src2) | T(src1) | D(dst), DR(dst))); \
}
-static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, sljit_si op, sljit_si flags,
- sljit_si dst, sljit_si src1, sljit_sw src2)
+static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
+ sljit_s32 dst, sljit_s32 src1, sljit_sw src2)
{
switch (GET_OPCODE(op)) {
case SLJIT_MOV:
- case SLJIT_MOV_UI:
- case SLJIT_MOV_SI:
+ case SLJIT_MOV_U32:
+ case SLJIT_MOV_S32:
case SLJIT_MOV_P:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
if (dst != src2)
return push_inst(compiler, ADDU | S(src2) | TA(0) | D(dst), DR(dst));
return SLJIT_SUCCESS;
- case SLJIT_MOV_UB:
- case SLJIT_MOV_SB:
+ case SLJIT_MOV_U8:
+ case SLJIT_MOV_S8:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
- if (op == SLJIT_MOV_SB) {
+ if (op == SLJIT_MOV_S8) {
#if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1)
return push_inst(compiler, SEB | T(src2) | D(dst), DR(dst));
#else
@@ -97,11 +97,11 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
SLJIT_ASSERT_STOP();
return SLJIT_SUCCESS;
- case SLJIT_MOV_UH:
- case SLJIT_MOV_SH:
+ case SLJIT_MOV_U16:
+ case SLJIT_MOV_S16:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
- if (op == SLJIT_MOV_SH) {
+ if (op == SLJIT_MOV_S16) {
#if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1)
return push_inst(compiler, SEH | T(src2) | D(dst), DR(dst));
#else
@@ -341,7 +341,7 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw init_value)
+static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw init_value)
{
FAIL_IF(push_inst(compiler, LUI | T(dst) | IMM(init_value >> 16), DR(dst)));
return push_inst(compiler, ORI | S(dst) | T(dst) | IMM(init_value), DR(dst));
diff --git a/src/3rdparty/pcre/sljit/sljitNativeMIPS_64.c b/src/3rdparty/pcre/sljit/sljitNativeMIPS_64.c
index 185fb5768e..c7ee8c9c2e 100644
--- a/src/3rdparty/pcre/sljit/sljitNativeMIPS_64.c
+++ b/src/3rdparty/pcre/sljit/sljitNativeMIPS_64.c
@@ -26,11 +26,11 @@
/* mips 64-bit arch dependent functions. */
-static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si dst_ar, sljit_sw imm)
+static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst_ar, sljit_sw imm)
{
- sljit_si shift = 32;
- sljit_si shift2;
- sljit_si inv = 0;
+ sljit_s32 shift = 32;
+ sljit_s32 shift2;
+ sljit_s32 inv = 0;
sljit_ins ins;
sljit_uw uimm;
@@ -119,7 +119,7 @@ static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si dst_ar,
}
#define SELECT_OP(a, b) \
- (!(op & SLJIT_INT_OP) ? a : b)
+ (!(op & SLJIT_I32_OP) ? a : b)
#define EMIT_LOGICAL(op_imm, op_norm) \
if (flags & SRC2_IMM) { \
@@ -138,27 +138,27 @@ static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si dst_ar,
#define EMIT_SHIFT(op_dimm, op_dimm32, op_imm, op_dv, op_v) \
if (flags & SRC2_IMM) { \
if (src2 >= 32) { \
- SLJIT_ASSERT(!(op & SLJIT_INT_OP)); \
+ SLJIT_ASSERT(!(op & SLJIT_I32_OP)); \
ins = op_dimm32; \
src2 -= 32; \
} \
else \
- ins = (op & SLJIT_INT_OP) ? op_imm : op_dimm; \
+ ins = (op & SLJIT_I32_OP) ? op_imm : op_dimm; \
if (op & SLJIT_SET_E) \
FAIL_IF(push_inst(compiler, ins | T(src1) | DA(EQUAL_FLAG) | SH_IMM(src2), EQUAL_FLAG)); \
if (CHECK_FLAGS(SLJIT_SET_E)) \
FAIL_IF(push_inst(compiler, ins | T(src1) | D(dst) | SH_IMM(src2), DR(dst))); \
} \
else { \
- ins = (op & SLJIT_INT_OP) ? op_v : op_dv; \
+ ins = (op & SLJIT_I32_OP) ? op_v : op_dv; \
if (op & SLJIT_SET_E) \
FAIL_IF(push_inst(compiler, ins | S(src2) | T(src1) | DA(EQUAL_FLAG), EQUAL_FLAG)); \
if (CHECK_FLAGS(SLJIT_SET_E)) \
FAIL_IF(push_inst(compiler, ins | S(src2) | T(src1) | D(dst), DR(dst))); \
}
-static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, sljit_si op, sljit_si flags,
- sljit_si dst, sljit_si src1, sljit_sw src2)
+static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
+ sljit_s32 dst, sljit_s32 src1, sljit_sw src2)
{
sljit_ins ins;
@@ -170,11 +170,11 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
return push_inst(compiler, SELECT_OP(DADDU, ADDU) | S(src2) | TA(0) | D(dst), DR(dst));
return SLJIT_SUCCESS;
- case SLJIT_MOV_UB:
- case SLJIT_MOV_SB:
+ case SLJIT_MOV_U8:
+ case SLJIT_MOV_S8:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
- if (op == SLJIT_MOV_SB) {
+ if (op == SLJIT_MOV_S8) {
FAIL_IF(push_inst(compiler, DSLL32 | T(src2) | D(dst) | SH_IMM(24), DR(dst)));
return push_inst(compiler, DSRA32 | T(dst) | D(dst) | SH_IMM(24), DR(dst));
}
@@ -184,11 +184,11 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
SLJIT_ASSERT_STOP();
return SLJIT_SUCCESS;
- case SLJIT_MOV_UH:
- case SLJIT_MOV_SH:
+ case SLJIT_MOV_U16:
+ case SLJIT_MOV_S16:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
- if (op == SLJIT_MOV_SH) {
+ if (op == SLJIT_MOV_S16) {
FAIL_IF(push_inst(compiler, DSLL32 | T(src2) | D(dst) | SH_IMM(16), DR(dst)));
return push_inst(compiler, DSRA32 | T(dst) | D(dst) | SH_IMM(16), DR(dst));
}
@@ -198,12 +198,12 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
SLJIT_ASSERT_STOP();
return SLJIT_SUCCESS;
- case SLJIT_MOV_UI:
- SLJIT_ASSERT(!(op & SLJIT_INT_OP));
+ case SLJIT_MOV_U32:
+ SLJIT_ASSERT(!(op & SLJIT_I32_OP));
FAIL_IF(push_inst(compiler, DSLL32 | T(src2) | D(dst) | SH_IMM(0), DR(dst)));
return push_inst(compiler, DSRL32 | T(dst) | D(dst) | SH_IMM(0), DR(dst));
- case SLJIT_MOV_SI:
+ case SLJIT_MOV_S32:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
return push_inst(compiler, SLL | T(src2) | D(dst) | SH_IMM(0), DR(dst));
@@ -231,7 +231,7 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
FAIL_IF(push_inst(compiler, SELECT_OP(DADDU, ADDU) | S(src2) | TA(0) | D(TMP_REG1), DR(TMP_REG1)));
/* Check zero. */
FAIL_IF(push_inst(compiler, BEQ | S(TMP_REG1) | TA(0) | IMM(5), UNMOVABLE_INS));
- FAIL_IF(push_inst(compiler, ORI | SA(0) | T(dst) | IMM((op & SLJIT_INT_OP) ? 32 : 64), UNMOVABLE_INS));
+ FAIL_IF(push_inst(compiler, ORI | SA(0) | T(dst) | IMM((op & SLJIT_I32_OP) ? 32 : 64), UNMOVABLE_INS));
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | SA(0) | T(dst) | IMM(-1), DR(dst)));
/* Loop for searching the highest bit. */
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | S(dst) | T(dst) | IMM(1), DR(dst)));
@@ -392,7 +392,7 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
SLJIT_ASSERT(!(flags & SRC2_IMM));
if (!(op & SLJIT_SET_O)) {
#if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1)
- if (op & SLJIT_INT_OP)
+ if (op & SLJIT_I32_OP)
return push_inst(compiler, MUL | S(src1) | T(src2) | D(dst), DR(dst));
FAIL_IF(push_inst(compiler, DMULT | S(src1) | T(src2), MOVABLE_INS));
return push_inst(compiler, MFLO | D(dst), DR(dst));
@@ -436,7 +436,7 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw init_value)
+static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw init_value)
{
FAIL_IF(push_inst(compiler, LUI | T(dst) | IMM(init_value >> 48), DR(dst)));
FAIL_IF(push_inst(compiler, ORI | S(dst) | T(dst) | IMM(init_value >> 32), DR(dst)));
diff --git a/src/3rdparty/pcre/sljit/sljitNativeMIPS_common.c b/src/3rdparty/pcre/sljit/sljitNativeMIPS_common.c
index cf3535f81a..c2c251b1ff 100644
--- a/src/3rdparty/pcre/sljit/sljitNativeMIPS_common.c
+++ b/src/3rdparty/pcre/sljit/sljitNativeMIPS_common.c
@@ -27,7 +27,7 @@
/* Latest MIPS architecture. */
/* Automatically detect SLJIT_MIPS_R1 */
-SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name(void)
+SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
{
#if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1)
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
@@ -42,7 +42,7 @@ SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name(void)
/* Length of an instruction word
Both for mips-32 and mips-64 */
-typedef sljit_ui sljit_ins;
+typedef sljit_u32 sljit_ins;
#define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
#define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
@@ -68,7 +68,7 @@ typedef sljit_ui sljit_ins;
#define TMP_FREG1 (0)
#define TMP_FREG2 ((SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1) << 1)
-static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
+static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
0, 2, 5, 6, 7, 8, 9, 10, 11, 24, 23, 22, 21, 20, 19, 18, 17, 16, 29, 3, 25, 4
};
@@ -201,7 +201,7 @@ static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
/* dest_reg is the absolute name of the register
Useful for reordering instructions in the delay slot. */
-static sljit_si push_inst(struct sljit_compiler *compiler, sljit_ins ins, sljit_si delay_slot)
+static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins, sljit_s32 delay_slot)
{
SLJIT_ASSERT(delay_slot == MOVABLE_INS || delay_slot >= UNMOVABLE_INS
|| delay_slot == ((ins >> 11) & 0x1f) || delay_slot == ((ins >> 16) & 0x1f));
@@ -213,7 +213,7 @@ static sljit_si push_inst(struct sljit_compiler *compiler, sljit_ins ins, sljit_
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_ins invert_branch(sljit_si flags)
+static SLJIT_INLINE sljit_ins invert_branch(sljit_s32 flags)
{
return (flags & IS_BIT26_COND) ? (1 << 26) : (1 << 16);
}
@@ -538,12 +538,12 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
#include "sljitNativeMIPS_64.c"
#endif
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
sljit_ins base;
- sljit_si i, tmp, offs;
+ sljit_s32 i, tmp, offs;
CHECK_ERROR();
CHECK(check_sljit_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
@@ -575,12 +575,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
tmp = saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - saveds) : SLJIT_FIRST_SAVED_REG;
for (i = SLJIT_S0; i >= tmp; i--) {
- offs -= (sljit_si)(sizeof(sljit_sw));
+ offs -= (sljit_s32)(sizeof(sljit_sw));
FAIL_IF(push_inst(compiler, STACK_STORE | base | T(i) | IMM(offs), MOVABLE_INS));
}
for (i = scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
- offs -= (sljit_si)(sizeof(sljit_sw));
+ offs -= (sljit_s32)(sizeof(sljit_sw));
FAIL_IF(push_inst(compiler, STACK_STORE | base | T(i) | IMM(offs), MOVABLE_INS));
}
@@ -594,9 +594,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
CHECK_ERROR();
CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
@@ -611,9 +611,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compi
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
{
- sljit_si local_size, i, tmp, offs;
+ sljit_s32 local_size, i, tmp, offs;
sljit_ins base;
CHECK_ERROR();
@@ -631,19 +631,19 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
local_size = 0;
}
- FAIL_IF(push_inst(compiler, STACK_LOAD | base | TA(RETURN_ADDR_REG) | IMM(local_size - (sljit_si)sizeof(sljit_sw)), RETURN_ADDR_REG));
- offs = local_size - (sljit_si)GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds, 1);
+ FAIL_IF(push_inst(compiler, STACK_LOAD | base | TA(RETURN_ADDR_REG) | IMM(local_size - (sljit_s32)sizeof(sljit_sw)), RETURN_ADDR_REG));
+ offs = local_size - (sljit_s32)GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds, 1);
tmp = compiler->scratches;
for (i = SLJIT_FIRST_SAVED_REG; i <= tmp; i++) {
FAIL_IF(push_inst(compiler, STACK_LOAD | base | T(i) | IMM(offs), DR(i)));
- offs += (sljit_si)(sizeof(sljit_sw));
+ offs += (sljit_s32)(sizeof(sljit_sw));
}
tmp = compiler->saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - compiler->saveds) : SLJIT_FIRST_SAVED_REG;
for (i = tmp; i <= SLJIT_S0; i++) {
FAIL_IF(push_inst(compiler, STACK_LOAD | base | T(i) | IMM(offs), DR(i)));
- offs += (sljit_si)(sizeof(sljit_sw));
+ offs += (sljit_s32)(sizeof(sljit_sw));
}
SLJIT_ASSERT(offs == local_size - (sljit_sw)(sizeof(sljit_sw)));
@@ -668,7 +668,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
#define ARCH_32_64(a, b) b
#endif
-static SLJIT_CONST sljit_ins data_transfer_insts[16 + 4] = {
+static const sljit_ins data_transfer_insts[16 + 4] = {
/* u w s */ ARCH_32_64(HI(43) /* sw */, HI(63) /* sd */),
/* u w l */ ARCH_32_64(HI(35) /* lw */, HI(55) /* ld */),
/* u b s */ HI(40) /* sb */,
@@ -698,7 +698,7 @@ static SLJIT_CONST sljit_ins data_transfer_insts[16 + 4] = {
/* reg_ar is an absoulute register! */
/* Can perform an operation using at most 1 instruction. */
-static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg_ar, sljit_si arg, sljit_sw argw)
+static sljit_s32 getput_arg_fast(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg_ar, sljit_s32 arg, sljit_sw argw)
{
SLJIT_ASSERT(arg & SLJIT_MEM);
@@ -716,7 +716,7 @@ static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si flags,
/* See getput_arg below.
Note: can_cache is called only for binary operators. Those
operators always uses word arguments without write back. */
-static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
+static sljit_s32 can_cache(sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
{
SLJIT_ASSERT((arg & SLJIT_MEM) && (next_arg & SLJIT_MEM));
@@ -739,9 +739,9 @@ static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_
}
/* Emit the necessary instructions. See can_cache above. */
-static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg_ar, sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
+static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg_ar, sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
{
- sljit_si tmp_ar, base, delay_slot;
+ sljit_s32 tmp_ar, base, delay_slot;
SLJIT_ASSERT(arg & SLJIT_MEM);
if (!(next_arg & SLJIT_MEM)) {
@@ -878,7 +878,7 @@ static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si flags, slji
return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | SA(tmp_ar) | TA(reg_ar), delay_slot);
}
-static SLJIT_INLINE sljit_si emit_op_mem(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg_ar, sljit_si arg, sljit_sw argw)
+static SLJIT_INLINE sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg_ar, sljit_s32 arg, sljit_sw argw)
{
if (getput_arg_fast(compiler, flags, reg_ar, arg, argw))
return compiler->error;
@@ -887,26 +887,26 @@ static SLJIT_INLINE sljit_si emit_op_mem(struct sljit_compiler *compiler, sljit_
return getput_arg(compiler, flags, reg_ar, arg, argw, 0, 0);
}
-static SLJIT_INLINE sljit_si emit_op_mem2(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg1, sljit_sw arg1w, sljit_si arg2, sljit_sw arg2w)
+static SLJIT_INLINE sljit_s32 emit_op_mem2(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg1, sljit_sw arg1w, sljit_s32 arg2, sljit_sw arg2w)
{
if (getput_arg_fast(compiler, flags, reg, arg1, arg1w))
return compiler->error;
return getput_arg(compiler, flags, reg, arg1, arg1w, arg2, arg2w);
}
-static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si flags,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
/* arg1 goes to TMP_REG1 or src reg
arg2 goes to TMP_REG2, imm or src reg
TMP_REG3 can be used for caching
result goes to TMP_REG2, so put result can use TMP_REG1 and TMP_REG3. */
- sljit_si dst_r = TMP_REG2;
- sljit_si src1_r;
+ sljit_s32 dst_r = TMP_REG2;
+ sljit_s32 src1_r;
sljit_sw src2_r = 0;
- sljit_si sugg_src2_r = TMP_REG2;
+ sljit_s32 sugg_src2_r = TMP_REG2;
if (!(flags & ALT_KEEP_CACHE)) {
compiler->cache_arg = 0;
@@ -914,7 +914,7 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si f
}
if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) {
- if (op >= SLJIT_MOV && op <= SLJIT_MOVU_SI && !(src2 & SLJIT_MEM))
+ if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32 && !(src2 & SLJIT_MEM))
return SLJIT_SUCCESS;
if (GET_FLAGS(op))
flags |= UNUSED_DEST;
@@ -922,7 +922,7 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si f
else if (FAST_IS_REG(dst)) {
dst_r = dst;
flags |= REG_DEST;
- if (op >= SLJIT_MOV && op <= SLJIT_MOVU_SI)
+ if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32)
sugg_src2_r = dst_r;
}
else if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, flags | ARG_TEST, DR(TMP_REG1), dst, dstw))
@@ -976,7 +976,7 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si f
if (FAST_IS_REG(src2)) {
src2_r = src2;
flags |= REG2_SOURCE;
- if (!(flags & REG_DEST) && op >= SLJIT_MOV && op <= SLJIT_MOVU_SI)
+ if (!(flags & REG_DEST) && op >= SLJIT_MOV && op <= SLJIT_MOVU_S32)
dst_r = src2_r;
}
else if (src2 & SLJIT_IMM) {
@@ -987,7 +987,7 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si f
}
else {
src2_r = 0;
- if ((op >= SLJIT_MOV && op <= SLJIT_MOVU_SI) && (dst & SLJIT_MEM))
+ if ((op >= SLJIT_MOV && op <= SLJIT_MOVU_S32) && (dst & SLJIT_MEM))
dst_r = 0;
}
}
@@ -1029,10 +1029,10 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si f
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler, sljit_si op)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
{
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
- sljit_si int_op = op & SLJIT_INT_OP;
+ sljit_s32 int_op = op & SLJIT_I32_OP;
#endif
CHECK_ERROR();
@@ -1044,20 +1044,20 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
return push_inst(compiler, BREAK, UNMOVABLE_INS);
case SLJIT_NOP:
return push_inst(compiler, NOP, UNMOVABLE_INS);
- case SLJIT_LUMUL:
- case SLJIT_LSMUL:
+ case SLJIT_LMUL_UW:
+ case SLJIT_LMUL_SW:
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
- FAIL_IF(push_inst(compiler, (op == SLJIT_LUMUL ? DMULTU : DMULT) | S(SLJIT_R0) | T(SLJIT_R1), MOVABLE_INS));
+ FAIL_IF(push_inst(compiler, (op == SLJIT_LMUL_UW ? DMULTU : DMULT) | S(SLJIT_R0) | T(SLJIT_R1), MOVABLE_INS));
#else
- FAIL_IF(push_inst(compiler, (op == SLJIT_LUMUL ? MULTU : MULT) | S(SLJIT_R0) | T(SLJIT_R1), MOVABLE_INS));
+ FAIL_IF(push_inst(compiler, (op == SLJIT_LMUL_UW ? MULTU : MULT) | S(SLJIT_R0) | T(SLJIT_R1), MOVABLE_INS));
#endif
FAIL_IF(push_inst(compiler, MFLO | D(SLJIT_R0), DR(SLJIT_R0)));
return push_inst(compiler, MFHI | D(SLJIT_R1), DR(SLJIT_R1));
- case SLJIT_UDIVMOD:
- case SLJIT_SDIVMOD:
- case SLJIT_UDIVI:
- case SLJIT_SDIVI:
- SLJIT_COMPILE_ASSERT((SLJIT_UDIVMOD & 0x2) == 0 && SLJIT_UDIVI - 0x2 == SLJIT_UDIVMOD, bad_div_opcode_assignments);
+ case SLJIT_DIVMOD_UW:
+ case SLJIT_DIVMOD_SW:
+ case SLJIT_DIV_UW:
+ case SLJIT_DIV_SW:
+ SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW & 0x2) == 0 && SLJIT_DIV_UW - 0x2 == SLJIT_DIVMOD_UW, bad_div_opcode_assignments);
#if !(defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1)
FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
@@ -1065,28 +1065,28 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
if (int_op)
- FAIL_IF(push_inst(compiler, ((op | 0x2) == SLJIT_UDIVI ? DIVU : DIV) | S(SLJIT_R0) | T(SLJIT_R1), MOVABLE_INS));
+ FAIL_IF(push_inst(compiler, ((op | 0x2) == SLJIT_DIV_UW ? DIVU : DIV) | S(SLJIT_R0) | T(SLJIT_R1), MOVABLE_INS));
else
- FAIL_IF(push_inst(compiler, ((op | 0x2) == SLJIT_UDIVI ? DDIVU : DDIV) | S(SLJIT_R0) | T(SLJIT_R1), MOVABLE_INS));
+ FAIL_IF(push_inst(compiler, ((op | 0x2) == SLJIT_DIV_UW ? DDIVU : DDIV) | S(SLJIT_R0) | T(SLJIT_R1), MOVABLE_INS));
#else
- FAIL_IF(push_inst(compiler, ((op | 0x2) == SLJIT_UDIVI ? DIVU : DIV) | S(SLJIT_R0) | T(SLJIT_R1), MOVABLE_INS));
+ FAIL_IF(push_inst(compiler, ((op | 0x2) == SLJIT_DIV_UW ? DIVU : DIV) | S(SLJIT_R0) | T(SLJIT_R1), MOVABLE_INS));
#endif
FAIL_IF(push_inst(compiler, MFLO | D(SLJIT_R0), DR(SLJIT_R0)));
- return (op >= SLJIT_UDIVI) ? SLJIT_SUCCESS : push_inst(compiler, MFHI | D(SLJIT_R1), DR(SLJIT_R1));
+ return (op >= SLJIT_DIV_UW) ? SLJIT_SUCCESS : push_inst(compiler, MFHI | D(SLJIT_R1), DR(SLJIT_R1));
}
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
# define flags 0
#else
- sljit_si flags = 0;
+ sljit_s32 flags = 0;
#endif
CHECK_ERROR();
@@ -1095,10 +1095,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
ADJUST_LOCAL_OFFSET(src, srcw);
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
- if ((op & SLJIT_INT_OP) && GET_OPCODE(op) >= SLJIT_NOT) {
+ if ((op & SLJIT_I32_OP) && GET_OPCODE(op) >= SLJIT_NOT) {
flags |= INT_DATA | SIGNED_DATA;
if (src & SLJIT_IMM)
- srcw = (sljit_si)srcw;
+ srcw = (sljit_s32)srcw;
}
#endif
@@ -1107,61 +1107,61 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
case SLJIT_MOV_P:
return emit_op(compiler, SLJIT_MOV, WORD_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOV_UI:
+ case SLJIT_MOV_U32:
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
- return emit_op(compiler, SLJIT_MOV_UI, INT_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
+ return emit_op(compiler, SLJIT_MOV_U32, INT_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
#else
- return emit_op(compiler, SLJIT_MOV_UI, INT_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_ui)srcw : srcw);
+ return emit_op(compiler, SLJIT_MOV_U32, INT_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u32)srcw : srcw);
#endif
- case SLJIT_MOV_SI:
+ case SLJIT_MOV_S32:
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
- return emit_op(compiler, SLJIT_MOV_SI, INT_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
+ return emit_op(compiler, SLJIT_MOV_S32, INT_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
#else
- return emit_op(compiler, SLJIT_MOV_SI, INT_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_si)srcw : srcw);
+ return emit_op(compiler, SLJIT_MOV_S32, INT_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s32)srcw : srcw);
#endif
- case SLJIT_MOV_UB:
- return emit_op(compiler, SLJIT_MOV_UB, BYTE_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_ub)srcw : srcw);
+ case SLJIT_MOV_U8:
+ return emit_op(compiler, SLJIT_MOV_U8, BYTE_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8)srcw : srcw);
- case SLJIT_MOV_SB:
- return emit_op(compiler, SLJIT_MOV_SB, BYTE_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sb)srcw : srcw);
+ case SLJIT_MOV_S8:
+ return emit_op(compiler, SLJIT_MOV_S8, BYTE_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8)srcw : srcw);
- case SLJIT_MOV_UH:
- return emit_op(compiler, SLJIT_MOV_UH, HALF_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_uh)srcw : srcw);
+ case SLJIT_MOV_U16:
+ return emit_op(compiler, SLJIT_MOV_U16, HALF_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16)srcw : srcw);
- case SLJIT_MOV_SH:
- return emit_op(compiler, SLJIT_MOV_SH, HALF_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sh)srcw : srcw);
+ case SLJIT_MOV_S16:
+ return emit_op(compiler, SLJIT_MOV_S16, HALF_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16)srcw : srcw);
case SLJIT_MOVU:
case SLJIT_MOVU_P:
return emit_op(compiler, SLJIT_MOV, WORD_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOVU_UI:
+ case SLJIT_MOVU_U32:
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
- return emit_op(compiler, SLJIT_MOV_UI, INT_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
+ return emit_op(compiler, SLJIT_MOV_U32, INT_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
#else
- return emit_op(compiler, SLJIT_MOV_UI, INT_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_ui)srcw : srcw);
+ return emit_op(compiler, SLJIT_MOV_U32, INT_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u32)srcw : srcw);
#endif
- case SLJIT_MOVU_SI:
+ case SLJIT_MOVU_S32:
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
- return emit_op(compiler, SLJIT_MOV_SI, INT_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
+ return emit_op(compiler, SLJIT_MOV_S32, INT_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
#else
- return emit_op(compiler, SLJIT_MOV_SI, INT_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_si)srcw : srcw);
+ return emit_op(compiler, SLJIT_MOV_S32, INT_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s32)srcw : srcw);
#endif
- case SLJIT_MOVU_UB:
- return emit_op(compiler, SLJIT_MOV_UB, BYTE_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_ub)srcw : srcw);
+ case SLJIT_MOVU_U8:
+ return emit_op(compiler, SLJIT_MOV_U8, BYTE_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8)srcw : srcw);
- case SLJIT_MOVU_SB:
- return emit_op(compiler, SLJIT_MOV_SB, BYTE_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sb)srcw : srcw);
+ case SLJIT_MOVU_S8:
+ return emit_op(compiler, SLJIT_MOV_S8, BYTE_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8)srcw : srcw);
- case SLJIT_MOVU_UH:
- return emit_op(compiler, SLJIT_MOV_UH, HALF_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_uh)srcw : srcw);
+ case SLJIT_MOVU_U16:
+ return emit_op(compiler, SLJIT_MOV_U16, HALF_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16)srcw : srcw);
- case SLJIT_MOVU_SH:
- return emit_op(compiler, SLJIT_MOV_SH, HALF_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sh)srcw : srcw);
+ case SLJIT_MOVU_S16:
+ return emit_op(compiler, SLJIT_MOV_S16, HALF_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16)srcw : srcw);
case SLJIT_NOT:
return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, src, srcw);
@@ -1180,15 +1180,15 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
#endif
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
# define flags 0
#else
- sljit_si flags = 0;
+ sljit_s32 flags = 0;
#endif
CHECK_ERROR();
@@ -1198,12 +1198,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
ADJUST_LOCAL_OFFSET(src2, src2w);
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
- if (op & SLJIT_INT_OP) {
+ if (op & SLJIT_I32_OP) {
flags |= INT_DATA | SIGNED_DATA;
if (src1 & SLJIT_IMM)
- src1w = (sljit_si)src1w;
+ src1w = (sljit_s32)src1w;
if (src2 & SLJIT_IMM)
- src2w = (sljit_si)src2w;
+ src2w = (sljit_s32)src2w;
}
#endif
@@ -1232,7 +1232,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
src2w &= 0x1f;
#else
if (src2 & SLJIT_IMM) {
- if (op & SLJIT_INT_OP)
+ if (op & SLJIT_I32_OP)
src2w &= 0x1f;
else
src2w &= 0x3f;
@@ -1248,20 +1248,20 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
#endif
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
{
CHECK_REG_INDEX(check_sljit_get_register_index(reg));
return reg_map[reg];
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_float_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
{
CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
return reg << 1;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *compiler,
- void *instruction, sljit_si size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
+ void *instruction, sljit_s32 size)
{
CHECK_ERROR();
CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
@@ -1273,7 +1273,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *co
/* Floating point operators */
/* --------------------------------------------------------------------- */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void)
{
#ifdef SLJIT_IS_FPU_AVAILABLE
return SLJIT_IS_FPU_AVAILABLE;
@@ -1286,17 +1286,17 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
#endif
}
-#define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_SINGLE_OP) >> 7))
-#define FMT(op) (((op & SLJIT_SINGLE_OP) ^ SLJIT_SINGLE_OP) << (21 - 8))
+#define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_F32_OP) >> 7))
+#define FMT(op) (((op & SLJIT_F32_OP) ^ SLJIT_F32_OP) << (21 - 8))
-static SLJIT_INLINE sljit_si sljit_emit_fop1_convw_fromd(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
# define flags 0
#else
- sljit_si flags = (GET_OPCODE(op) == SLJIT_CONVW_FROMD) << 21;
+ sljit_s32 flags = (GET_OPCODE(op) == SLJIT_CONV_SW_FROM_F64) << 21;
#endif
if (src & SLJIT_MEM) {
@@ -1322,17 +1322,17 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convw_fromd(struct sljit_compiler *
#endif
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
# define flags 0
#else
- sljit_si flags = (GET_OPCODE(op) == SLJIT_CONVD_FROMW) << 21;
+ sljit_s32 flags = (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_SW) << 21;
#endif
- sljit_si dst_r = FAST_IS_REG(dst) ? (dst << 1) : TMP_FREG1;
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? (dst << 1) : TMP_FREG1;
if (FAST_IS_REG(src))
FAIL_IF(push_inst(compiler, MTC1 | flags | T(src) | FS(TMP_FREG1), MOVABLE_INS));
@@ -1342,14 +1342,14 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *
}
else {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (GET_OPCODE(op) == SLJIT_CONVD_FROMI)
- srcw = (sljit_si)srcw;
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
+ srcw = (sljit_s32)srcw;
#endif
FAIL_IF(load_immediate(compiler, DR(TMP_REG1), srcw));
FAIL_IF(push_inst(compiler, MTC1 | flags | T(TMP_REG1) | FS(TMP_FREG1), MOVABLE_INS));
}
- FAIL_IF(push_inst(compiler, CVT_S_S | flags | (4 << 21) | (((op & SLJIT_SINGLE_OP) ^ SLJIT_SINGLE_OP) >> 8) | FS(TMP_FREG1) | FD(dst_r), MOVABLE_INS));
+ FAIL_IF(push_inst(compiler, CVT_S_S | flags | (4 << 21) | (((op & SLJIT_F32_OP) ^ SLJIT_F32_OP) >> 8) | FS(TMP_FREG1) | FD(dst_r), MOVABLE_INS));
if (dst & SLJIT_MEM)
return emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, 0, 0);
@@ -1360,9 +1360,9 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *
#endif
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_si op,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
if (src1 & SLJIT_MEM) {
FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w));
@@ -1399,21 +1399,21 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_cmp(struct sljit_compiler *compiler
return push_inst(compiler, C_UN_S | FMT(op) | FT(src2) | FS(src1), FCSR_FCC);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r;
+ sljit_s32 dst_r;
CHECK_ERROR();
compiler->cache_arg = 0;
compiler->cache_argw = 0;
- SLJIT_COMPILE_ASSERT((SLJIT_SINGLE_OP == 0x100) && !(DOUBLE_DATA & 0x2), float_transfer_bit_error);
+ SLJIT_COMPILE_ASSERT((SLJIT_F32_OP == 0x100) && !(DOUBLE_DATA & 0x2), float_transfer_bit_error);
SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
- if (GET_OPCODE(op) == SLJIT_CONVD_FROMS)
- op ^= SLJIT_SINGLE_OP;
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32)
+ op ^= SLJIT_F32_OP;
dst_r = FAST_IS_REG(dst) ? (dst << 1) : TMP_FREG1;
@@ -1425,7 +1425,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compile
src <<= 1;
switch (GET_OPCODE(op)) {
- case SLJIT_DMOV:
+ case SLJIT_MOV_F64:
if (src != dst_r) {
if (dst_r != TMP_FREG1)
FAIL_IF(push_inst(compiler, MOV_S | FMT(op) | FS(src) | FD(dst_r), MOVABLE_INS));
@@ -1433,15 +1433,15 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compile
dst_r = src;
}
break;
- case SLJIT_DNEG:
+ case SLJIT_NEG_F64:
FAIL_IF(push_inst(compiler, NEG_S | FMT(op) | FS(src) | FD(dst_r), MOVABLE_INS));
break;
- case SLJIT_DABS:
+ case SLJIT_ABS_F64:
FAIL_IF(push_inst(compiler, ABS_S | FMT(op) | FS(src) | FD(dst_r), MOVABLE_INS));
break;
- case SLJIT_CONVD_FROMS:
- FAIL_IF(push_inst(compiler, CVT_S_S | ((op & SLJIT_SINGLE_OP) ? 1 : (1 << 21)) | FS(src) | FD(dst_r), MOVABLE_INS));
- op ^= SLJIT_SINGLE_OP;
+ case SLJIT_CONV_F64_FROM_F32:
+ FAIL_IF(push_inst(compiler, CVT_S_S | ((op & SLJIT_F32_OP) ? 1 : (1 << 21)) | FS(src) | FD(dst_r), MOVABLE_INS));
+ op ^= SLJIT_F32_OP;
break;
}
@@ -1450,12 +1450,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compile
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_si dst_r, flags = 0;
+ sljit_s32 dst_r, flags = 0;
CHECK_ERROR();
CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@@ -1509,19 +1509,19 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compile
src2 = TMP_FREG2;
switch (GET_OPCODE(op)) {
- case SLJIT_DADD:
+ case SLJIT_ADD_F64:
FAIL_IF(push_inst(compiler, ADD_S | FMT(op) | FT(src2) | FS(src1) | FD(dst_r), MOVABLE_INS));
break;
- case SLJIT_DSUB:
+ case SLJIT_SUB_F64:
FAIL_IF(push_inst(compiler, SUB_S | FMT(op) | FT(src2) | FS(src1) | FD(dst_r), MOVABLE_INS));
break;
- case SLJIT_DMUL:
+ case SLJIT_MUL_F64:
FAIL_IF(push_inst(compiler, MUL_S | FMT(op) | FT(src2) | FS(src1) | FD(dst_r), MOVABLE_INS));
break;
- case SLJIT_DDIV:
+ case SLJIT_DIV_F64:
FAIL_IF(push_inst(compiler, DIV_S | FMT(op) | FT(src2) | FS(src1) | FD(dst_r), MOVABLE_INS));
break;
}
@@ -1536,7 +1536,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compile
/* Other instructions */
/* --------------------------------------------------------------------- */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
@@ -1553,7 +1553,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *c
return emit_op_mem(compiler, WORD_DATA, RETURN_ADDR_REG, dst, dstw);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_fast_return(compiler, src, srcw));
@@ -1617,12 +1617,12 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
flags = IS_BIT16_COND; \
delay_check = FCSR_FCC;
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
{
struct sljit_jump *jump;
sljit_ins inst;
- sljit_si flags = 0;
- sljit_si delay_check = UNMOVABLE_INS;
+ sljit_s32 flags = 0;
+ sljit_s32 delay_check = UNMOVABLE_INS;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_jump(compiler, type));
@@ -1634,27 +1634,27 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
switch (type) {
case SLJIT_EQUAL:
- case SLJIT_D_NOT_EQUAL:
+ case SLJIT_NOT_EQUAL_F64:
BR_NZ(EQUAL_FLAG);
break;
case SLJIT_NOT_EQUAL:
- case SLJIT_D_EQUAL:
+ case SLJIT_EQUAL_F64:
BR_Z(EQUAL_FLAG);
break;
case SLJIT_LESS:
- case SLJIT_D_LESS:
+ case SLJIT_LESS_F64:
BR_Z(ULESS_FLAG);
break;
case SLJIT_GREATER_EQUAL:
- case SLJIT_D_GREATER_EQUAL:
+ case SLJIT_GREATER_EQUAL_F64:
BR_NZ(ULESS_FLAG);
break;
case SLJIT_GREATER:
- case SLJIT_D_GREATER:
+ case SLJIT_GREATER_F64:
BR_Z(UGREATER_FLAG);
break;
case SLJIT_LESS_EQUAL:
- case SLJIT_D_LESS_EQUAL:
+ case SLJIT_LESS_EQUAL_F64:
BR_NZ(UGREATER_FLAG);
break;
case SLJIT_SIG_LESS:
@@ -1677,10 +1677,10 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
case SLJIT_MUL_NOT_OVERFLOW:
BR_NZ(OVERFLOW_FLAG);
break;
- case SLJIT_D_UNORDERED:
+ case SLJIT_UNORDERED_F64:
BR_F();
break;
- case SLJIT_D_ORDERED:
+ case SLJIT_ORDERED_F64:
BR_T();
break;
default:
@@ -1733,12 +1733,12 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
src2 = 0; \
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler *compiler, sljit_si type,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
struct sljit_jump *jump;
- sljit_si flags;
+ sljit_s32 flags;
sljit_ins inst;
CHECK_ERROR_PTR();
@@ -1748,7 +1748,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
compiler->cache_arg = 0;
compiler->cache_argw = 0;
- flags = ((type & SLJIT_INT_OP) ? INT_DATA : WORD_DATA) | LOAD_DATA;
+ flags = ((type & SLJIT_I32_OP) ? INT_DATA : WORD_DATA) | LOAD_DATA;
if (src1 & SLJIT_MEM) {
PTR_FAIL_IF(emit_op_mem2(compiler, flags, DR(TMP_REG1), src1, src1w, src2, src2w));
src1 = TMP_REG1;
@@ -1854,13 +1854,13 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
#undef RESOLVE_IMM1
#undef RESOLVE_IMM2
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_fcmp(struct sljit_compiler *compiler, sljit_si type,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_fcmp(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
struct sljit_jump *jump;
sljit_ins inst;
- sljit_si if_true;
+ sljit_s32 if_true;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_fcmp(compiler, type, src1, src1w, src2, src2w));
@@ -1888,37 +1888,37 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_fcmp(struct sljit_compile
jump->flags |= IS_BIT16_COND;
switch (type & 0xff) {
- case SLJIT_D_EQUAL:
+ case SLJIT_EQUAL_F64:
inst = C_UEQ_S;
if_true = 1;
break;
- case SLJIT_D_NOT_EQUAL:
+ case SLJIT_NOT_EQUAL_F64:
inst = C_UEQ_S;
if_true = 0;
break;
- case SLJIT_D_LESS:
+ case SLJIT_LESS_F64:
inst = C_ULT_S;
if_true = 1;
break;
- case SLJIT_D_GREATER_EQUAL:
+ case SLJIT_GREATER_EQUAL_F64:
inst = C_ULT_S;
if_true = 0;
break;
- case SLJIT_D_GREATER:
+ case SLJIT_GREATER_F64:
inst = C_ULE_S;
if_true = 0;
break;
- case SLJIT_D_LESS_EQUAL:
+ case SLJIT_LESS_EQUAL_F64:
inst = C_ULE_S;
if_true = 1;
break;
- case SLJIT_D_UNORDERED:
+ case SLJIT_UNORDERED_F64:
inst = C_UN_S;
if_true = 1;
break;
default: /* Make compilers happy. */
SLJIT_ASSERT_STOP();
- case SLJIT_D_ORDERED:
+ case SLJIT_ORDERED_F64:
inst = C_UN_S;
if_true = 0;
break;
@@ -1943,9 +1943,9 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_fcmp(struct sljit_compile
#undef FLOAT_DATA
#undef FMT
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compiler, sljit_si type, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
{
- sljit_si src_r = TMP_REG2;
+ sljit_s32 src_r = TMP_REG2;
struct sljit_jump *jump = NULL;
CHECK_ERROR();
@@ -2001,17 +2001,17 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compil
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw,
- sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw,
+ sljit_s32 type)
{
- sljit_si sugg_dst_ar, dst_ar;
- sljit_si flags = GET_ALL_FLAGS(op);
+ sljit_s32 sugg_dst_ar, dst_ar;
+ sljit_s32 flags = GET_ALL_FLAGS(op);
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
# define mem_type WORD_DATA
#else
- sljit_si mem_type = (op & SLJIT_INT_OP) ? (INT_DATA | SIGNED_DATA) : WORD_DATA;
+ sljit_s32 mem_type = (op & SLJIT_I32_OP) ? (INT_DATA | SIGNED_DATA) : WORD_DATA;
#endif
CHECK_ERROR();
@@ -2023,7 +2023,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
op = GET_OPCODE(op);
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
- if (op == SLJIT_MOV_SI || op == SLJIT_MOV_UI)
+ if (op == SLJIT_MOV_S32 || op == SLJIT_MOV_U32)
mem_type = INT_DATA | SIGNED_DATA;
#endif
sugg_dst_ar = DR((op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2);
@@ -2045,14 +2045,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
break;
case SLJIT_LESS:
case SLJIT_GREATER_EQUAL:
- case SLJIT_D_LESS:
- case SLJIT_D_GREATER_EQUAL:
+ case SLJIT_LESS_F64:
+ case SLJIT_GREATER_EQUAL_F64:
dst_ar = ULESS_FLAG;
break;
case SLJIT_GREATER:
case SLJIT_LESS_EQUAL:
- case SLJIT_D_GREATER:
- case SLJIT_D_LESS_EQUAL:
+ case SLJIT_GREATER_F64:
+ case SLJIT_LESS_EQUAL_F64:
dst_ar = UGREATER_FLAG;
break;
case SLJIT_SIG_LESS:
@@ -2073,13 +2073,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
dst_ar = sugg_dst_ar;
type ^= 0x1; /* Flip type bit for the XORI below. */
break;
- case SLJIT_D_EQUAL:
- case SLJIT_D_NOT_EQUAL:
+ case SLJIT_EQUAL_F64:
+ case SLJIT_NOT_EQUAL_F64:
dst_ar = EQUAL_FLAG;
break;
- case SLJIT_D_UNORDERED:
- case SLJIT_D_ORDERED:
+ case SLJIT_UNORDERED_F64:
+ case SLJIT_ORDERED_F64:
FAIL_IF(push_inst(compiler, CFC1 | TA(sugg_dst_ar) | DA(FCSR_REG), sugg_dst_ar));
FAIL_IF(push_inst(compiler, SRL | TA(sugg_dst_ar) | DA(sugg_dst_ar) | SH_IMM(23), sugg_dst_ar));
FAIL_IF(push_inst(compiler, ANDI | SA(sugg_dst_ar) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar));
@@ -2115,10 +2115,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
#endif
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw init_value)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
{
struct sljit_const *const_;
- sljit_si reg;
+ sljit_s32 reg;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
diff --git a/src/3rdparty/pcre/sljit/sljitNativePPC_32.c b/src/3rdparty/pcre/sljit/sljitNativePPC_32.c
index b14b75ceb5..0f23cf86dd 100644
--- a/src/3rdparty/pcre/sljit/sljitNativePPC_32.c
+++ b/src/3rdparty/pcre/sljit/sljitNativePPC_32.c
@@ -26,7 +26,7 @@
/* ppc 32-bit arch dependent functions. */
-static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si reg, sljit_sw imm)
+static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 reg, sljit_sw imm)
{
if (imm <= SIMM_MAX && imm >= SIMM_MIN)
return push_inst(compiler, ADDI | D(reg) | A(0) | IMM(imm));
@@ -41,39 +41,39 @@ static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si reg, sl
#define INS_CLEAR_LEFT(dst, src, from) \
(RLWINM | S(src) | A(dst) | ((from) << 6) | (31 << 1))
-static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, sljit_si op, sljit_si flags,
- sljit_si dst, sljit_si src1, sljit_si src2)
+static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
+ sljit_s32 dst, sljit_s32 src1, sljit_s32 src2)
{
switch (op) {
case SLJIT_MOV:
- case SLJIT_MOV_UI:
- case SLJIT_MOV_SI:
+ case SLJIT_MOV_U32:
+ case SLJIT_MOV_S32:
case SLJIT_MOV_P:
SLJIT_ASSERT(src1 == TMP_REG1);
if (dst != src2)
return push_inst(compiler, OR | S(src2) | A(dst) | B(src2));
return SLJIT_SUCCESS;
- case SLJIT_MOV_UB:
- case SLJIT_MOV_SB:
+ case SLJIT_MOV_U8:
+ case SLJIT_MOV_S8:
SLJIT_ASSERT(src1 == TMP_REG1);
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
- if (op == SLJIT_MOV_SB)
+ if (op == SLJIT_MOV_S8)
return push_inst(compiler, EXTSB | S(src2) | A(dst));
return push_inst(compiler, INS_CLEAR_LEFT(dst, src2, 24));
}
- else if ((flags & REG_DEST) && op == SLJIT_MOV_SB)
+ else if ((flags & REG_DEST) && op == SLJIT_MOV_S8)
return push_inst(compiler, EXTSB | S(src2) | A(dst));
else {
SLJIT_ASSERT(dst == src2);
}
return SLJIT_SUCCESS;
- case SLJIT_MOV_UH:
- case SLJIT_MOV_SH:
+ case SLJIT_MOV_U16:
+ case SLJIT_MOV_S16:
SLJIT_ASSERT(src1 == TMP_REG1);
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
- if (op == SLJIT_MOV_SH)
+ if (op == SLJIT_MOV_S16)
return push_inst(compiler, EXTSH | S(src2) | A(dst));
return push_inst(compiler, INS_CLEAR_LEFT(dst, src2, 16));
}
@@ -244,7 +244,7 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si emit_const(struct sljit_compiler *compiler, sljit_si reg, sljit_sw init_value)
+static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_s32 reg, sljit_sw init_value)
{
FAIL_IF(push_inst(compiler, ADDIS | D(reg) | A(0) | IMM(init_value >> 16)));
return push_inst(compiler, ORI | S(reg) | A(reg) | IMM(init_value));
diff --git a/src/3rdparty/pcre/sljit/sljitNativePPC_64.c b/src/3rdparty/pcre/sljit/sljitNativePPC_64.c
index 182ac7b3da..8e3223f725 100644
--- a/src/3rdparty/pcre/sljit/sljitNativePPC_64.c
+++ b/src/3rdparty/pcre/sljit/sljitNativePPC_64.c
@@ -41,7 +41,7 @@
#define PUSH_RLDICR(reg, shift) \
push_inst(compiler, RLDI(reg, reg, 63 - shift, shift, 1))
-static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si reg, sljit_sw imm)
+static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 reg, sljit_sw imm)
{
sljit_uw tmp;
sljit_uw shift;
@@ -145,8 +145,8 @@ static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si reg, sl
src1 = TMP_REG1; \
}
-static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, sljit_si op, sljit_si flags,
- sljit_si dst, sljit_si src1, sljit_si src2)
+static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
+ sljit_s32 dst, sljit_s32 src1, sljit_s32 src2)
{
switch (op) {
case SLJIT_MOV:
@@ -156,11 +156,11 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
return push_inst(compiler, OR | S(src2) | A(dst) | B(src2));
return SLJIT_SUCCESS;
- case SLJIT_MOV_UI:
- case SLJIT_MOV_SI:
+ case SLJIT_MOV_U32:
+ case SLJIT_MOV_S32:
SLJIT_ASSERT(src1 == TMP_REG1);
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
- if (op == SLJIT_MOV_SI)
+ if (op == SLJIT_MOV_S32)
return push_inst(compiler, EXTSW | S(src2) | A(dst));
return push_inst(compiler, INS_CLEAR_LEFT(dst, src2, 0));
}
@@ -169,26 +169,26 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
}
return SLJIT_SUCCESS;
- case SLJIT_MOV_UB:
- case SLJIT_MOV_SB:
+ case SLJIT_MOV_U8:
+ case SLJIT_MOV_S8:
SLJIT_ASSERT(src1 == TMP_REG1);
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
- if (op == SLJIT_MOV_SB)
+ if (op == SLJIT_MOV_S8)
return push_inst(compiler, EXTSB | S(src2) | A(dst));
return push_inst(compiler, INS_CLEAR_LEFT(dst, src2, 24));
}
- else if ((flags & REG_DEST) && op == SLJIT_MOV_SB)
+ else if ((flags & REG_DEST) && op == SLJIT_MOV_S8)
return push_inst(compiler, EXTSB | S(src2) | A(dst));
else {
SLJIT_ASSERT(dst == src2);
}
return SLJIT_SUCCESS;
- case SLJIT_MOV_UH:
- case SLJIT_MOV_SH:
+ case SLJIT_MOV_U16:
+ case SLJIT_MOV_S16:
SLJIT_ASSERT(src1 == TMP_REG1);
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
- if (op == SLJIT_MOV_SH)
+ if (op == SLJIT_MOV_S16)
return push_inst(compiler, EXTSH | S(src2) | A(dst));
return push_inst(compiler, INS_CLEAR_LEFT(dst, src2, 16));
}
@@ -389,7 +389,7 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si emit_const(struct sljit_compiler *compiler, sljit_si reg, sljit_sw init_value)
+static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_s32 reg, sljit_sw init_value)
{
FAIL_IF(push_inst(compiler, ADDIS | D(reg) | A(0) | IMM(init_value >> 48)));
FAIL_IF(push_inst(compiler, ORI | S(reg) | A(reg) | IMM(init_value >> 32)));
diff --git a/src/3rdparty/pcre/sljit/sljitNativePPC_common.c b/src/3rdparty/pcre/sljit/sljitNativePPC_common.c
index b6a043f4e4..a3647327bf 100644
--- a/src/3rdparty/pcre/sljit/sljitNativePPC_common.c
+++ b/src/3rdparty/pcre/sljit/sljitNativePPC_common.c
@@ -24,14 +24,14 @@
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name(void)
+SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
{
return "PowerPC" SLJIT_CPUINFO;
}
/* Length of an instruction word.
Both for ppc-32 and ppc-64. */
-typedef sljit_ui sljit_ins;
+typedef sljit_u32 sljit_ins;
#if ((defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32) && (defined _AIX)) \
|| (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
@@ -46,6 +46,8 @@ typedef sljit_ui sljit_ins;
#define SLJIT_PASS_ENTRY_ADDR_TO_CALL 1
#endif
+#if (defined SLJIT_CACHE_FLUSH_OWN_IMPL && SLJIT_CACHE_FLUSH_OWN_IMPL)
+
static void ppc_cache_flush(sljit_ins *from, sljit_ins *to)
{
#ifdef _AIX
@@ -87,6 +89,8 @@ static void ppc_cache_flush(sljit_ins *from, sljit_ins *to)
#endif /* _AIX */
}
+#endif /* (defined SLJIT_CACHE_FLUSH_OWN_IMPL && SLJIT_CACHE_FLUSH_OWN_IMPL) */
+
#define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
#define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
#define TMP_REG3 (SLJIT_NUMBER_OF_REGISTERS + 4)
@@ -101,7 +105,7 @@ static void ppc_cache_flush(sljit_ins *from, sljit_ins *to)
#define TMP_FREG1 (0)
#define TMP_FREG2 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
-static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 7] = {
+static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 7] = {
0, 3, 4, 5, 6, 7, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 1, 8, 9, 10, 31, 12
};
@@ -236,7 +240,7 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_function_context(void** func_ptr, struct
}
#endif
-static sljit_si push_inst(struct sljit_compiler *compiler, sljit_ins ins)
+static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins)
{
sljit_ins *ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
FAIL_IF(!ptr);
@@ -245,7 +249,7 @@ static sljit_si push_inst(struct sljit_compiler *compiler, sljit_ins ins)
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code)
+static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code)
{
sljit_sw diff;
sljit_uw target_addr;
@@ -571,32 +575,32 @@ ALT_FORM6 0x200000 */
#define STACK_LOAD LD
#endif
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
- sljit_si i, tmp, offs;
+ sljit_s32 i, tmp, offs;
CHECK_ERROR();
CHECK(check_sljit_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
set_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size);
FAIL_IF(push_inst(compiler, MFLR | D(0)));
- offs = -(sljit_si)(sizeof(sljit_sw));
+ offs = -(sljit_s32)(sizeof(sljit_sw));
FAIL_IF(push_inst(compiler, STACK_STORE | S(TMP_ZERO) | A(SLJIT_SP) | IMM(offs)));
tmp = saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - saveds) : SLJIT_FIRST_SAVED_REG;
for (i = SLJIT_S0; i >= tmp; i--) {
- offs -= (sljit_si)(sizeof(sljit_sw));
+ offs -= (sljit_s32)(sizeof(sljit_sw));
FAIL_IF(push_inst(compiler, STACK_STORE | S(i) | A(SLJIT_SP) | IMM(offs)));
}
for (i = scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
- offs -= (sljit_si)(sizeof(sljit_sw));
+ offs -= (sljit_s32)(sizeof(sljit_sw));
FAIL_IF(push_inst(compiler, STACK_STORE | S(i) | A(SLJIT_SP) | IMM(offs)));
}
- SLJIT_ASSERT(offs == -(sljit_si)GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds, 1));
+ SLJIT_ASSERT(offs == -(sljit_s32)GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds, 1));
#if (defined SLJIT_PPC_STACK_FRAME_V2 && SLJIT_PPC_STACK_FRAME_V2)
FAIL_IF(push_inst(compiler, STACK_STORE | S(0) | A(SLJIT_SP) | IMM(2 * sizeof(sljit_sw))));
@@ -635,9 +639,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
CHECK_ERROR();
CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
@@ -648,9 +652,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compi
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
{
- sljit_si i, tmp, offs;
+ sljit_s32 i, tmp, offs;
CHECK_ERROR();
CHECK(check_sljit_emit_return(compiler, op, src, srcw));
@@ -670,18 +674,18 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
FAIL_IF(push_inst(compiler, STACK_LOAD | D(0) | A(SLJIT_SP) | IMM(sizeof(sljit_sw))));
#endif
- offs = -(sljit_si)GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds, 1);
+ offs = -(sljit_s32)GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds, 1);
tmp = compiler->scratches;
for (i = SLJIT_FIRST_SAVED_REG; i <= tmp; i++) {
FAIL_IF(push_inst(compiler, STACK_LOAD | D(i) | A(SLJIT_SP) | IMM(offs)));
- offs += (sljit_si)(sizeof(sljit_sw));
+ offs += (sljit_s32)(sizeof(sljit_sw));
}
tmp = compiler->saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - compiler->saveds) : SLJIT_FIRST_SAVED_REG;
for (i = tmp; i <= SLJIT_S0; i++) {
FAIL_IF(push_inst(compiler, STACK_LOAD | D(i) | A(SLJIT_SP) | IMM(offs)));
- offs += (sljit_si)(sizeof(sljit_sw));
+ offs += (sljit_s32)(sizeof(sljit_sw));
}
FAIL_IF(push_inst(compiler, STACK_LOAD | D(TMP_ZERO) | A(SLJIT_SP) | IMM(offs)));
@@ -722,7 +726,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
(((inst) & ~(INT_ALIGNED | UPDATE_REQ)) | (((flags) & MEM_MASK) <= GPR_REG ? D(reg) : FD(reg)))
#endif
-static SLJIT_CONST sljit_ins data_transfer_insts[64 + 8] = {
+static const sljit_ins data_transfer_insts[64 + 8] = {
/* -------- Unsigned -------- */
@@ -841,7 +845,7 @@ static SLJIT_CONST sljit_ins data_transfer_insts[64 + 8] = {
#undef ARCH_32_64
/* Simple cases, (no caching is required). */
-static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si inp_flags, sljit_si reg, sljit_si arg, sljit_sw argw)
+static sljit_s32 getput_arg_fast(struct sljit_compiler *compiler, sljit_s32 inp_flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
{
sljit_ins inst;
@@ -891,7 +895,7 @@ static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si inp_fl
/* See getput_arg below.
Note: can_cache is called only for binary operators. Those operator always
uses word arguments without write back. */
-static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
+static sljit_s32 can_cache(sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
{
sljit_sw high_short, next_high_short;
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
@@ -940,9 +944,9 @@ static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_
#endif
/* Emit the necessary instructions. See can_cache above. */
-static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si inp_flags, sljit_si reg, sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
+static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 inp_flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
{
- sljit_si tmp_r;
+ sljit_s32 tmp_r;
sljit_ins inst;
sljit_sw high_short, next_high_short;
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
@@ -992,7 +996,7 @@ static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si inp_flags,
#endif
arg &= REG_MASK;
- high_short = (sljit_si)(argw + ((argw & 0x8000) << 1)) & ~0xffff;
+ high_short = (sljit_s32)(argw + ((argw & 0x8000) << 1)) & ~0xffff;
/* The getput_arg_fast should handle this otherwise. */
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
SLJIT_ASSERT(high_short && high_short <= 0x7fffffffl && high_short >= -0x80000000l);
@@ -1010,7 +1014,7 @@ static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si inp_flags,
}
else if (compiler->cache_arg != (SLJIT_MEM | arg) || high_short != compiler->cache_argw) {
if ((next_arg & SLJIT_MEM) && !(next_arg & OFFS_REG_MASK)) {
- next_high_short = (sljit_si)(next_argw + ((next_argw & 0x8000) << 1)) & ~0xffff;
+ next_high_short = (sljit_s32)(next_argw + ((next_argw & 0x8000) << 1)) & ~0xffff;
if (high_short == next_high_short) {
compiler->cache_arg = SLJIT_MEM | arg;
compiler->cache_argw = high_short;
@@ -1107,27 +1111,27 @@ static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si inp_flags,
#endif
}
-static SLJIT_INLINE sljit_si emit_op_mem2(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg1, sljit_sw arg1w, sljit_si arg2, sljit_sw arg2w)
+static SLJIT_INLINE sljit_s32 emit_op_mem2(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg1, sljit_sw arg1w, sljit_s32 arg2, sljit_sw arg2w)
{
if (getput_arg_fast(compiler, flags, reg, arg1, arg1w))
return compiler->error;
return getput_arg(compiler, flags, reg, arg1, arg1w, arg2, arg2w);
}
-static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si input_flags,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 input_flags,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
/* arg1 goes to TMP_REG1 or src reg
arg2 goes to TMP_REG2, imm or src reg
TMP_REG3 can be used for caching
result goes to TMP_REG2, so put result can use TMP_REG1 and TMP_REG3. */
- sljit_si dst_r;
- sljit_si src1_r;
- sljit_si src2_r;
- sljit_si sugg_src2_r = TMP_REG2;
- sljit_si flags = input_flags & (ALT_FORM1 | ALT_FORM2 | ALT_FORM3 | ALT_FORM4 | ALT_FORM5 | ALT_FORM6 | ALT_SIGN_EXT | ALT_SET_FLAGS);
+ sljit_s32 dst_r;
+ sljit_s32 src1_r;
+ sljit_s32 src2_r;
+ sljit_s32 sugg_src2_r = TMP_REG2;
+ sljit_s32 flags = input_flags & (ALT_FORM1 | ALT_FORM2 | ALT_FORM3 | ALT_FORM4 | ALT_FORM5 | ALT_FORM6 | ALT_SIGN_EXT | ALT_SET_FLAGS);
if (!(input_flags & ALT_KEEP_CACHE)) {
compiler->cache_arg = 0;
@@ -1136,14 +1140,14 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si i
/* Destination check. */
if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) {
- if (op >= SLJIT_MOV && op <= SLJIT_MOVU_SI && !(src2 & SLJIT_MEM))
+ if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32 && !(src2 & SLJIT_MEM))
return SLJIT_SUCCESS;
dst_r = TMP_REG2;
}
else if (FAST_IS_REG(dst)) {
dst_r = dst;
flags |= REG_DEST;
- if (op >= SLJIT_MOV && op <= SLJIT_MOVU_SI)
+ if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32)
sugg_src2_r = dst_r;
}
else {
@@ -1178,7 +1182,7 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si i
if (FAST_IS_REG(src2)) {
src2_r = src2;
flags |= REG2_SOURCE;
- if (!(flags & REG_DEST) && op >= SLJIT_MOV && op <= SLJIT_MOVU_SI)
+ if (!(flags & REG_DEST) && op >= SLJIT_MOV && op <= SLJIT_MOVU_S32)
dst_r = src2_r;
}
else if (src2 & SLJIT_IMM) {
@@ -1243,10 +1247,10 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si i
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler, sljit_si op)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
{
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
- sljit_si int_op = op & SLJIT_INT_OP;
+ sljit_s32 int_op = op & SLJIT_I32_OP;
#endif
CHECK_ERROR();
@@ -1257,33 +1261,33 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
case SLJIT_BREAKPOINT:
case SLJIT_NOP:
return push_inst(compiler, NOP);
- case SLJIT_LUMUL:
- case SLJIT_LSMUL:
+ case SLJIT_LMUL_UW:
+ case SLJIT_LMUL_SW:
FAIL_IF(push_inst(compiler, OR | S(SLJIT_R0) | A(TMP_REG1) | B(SLJIT_R0)));
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
FAIL_IF(push_inst(compiler, MULLD | D(SLJIT_R0) | A(TMP_REG1) | B(SLJIT_R1)));
- return push_inst(compiler, (op == SLJIT_LUMUL ? MULHDU : MULHD) | D(SLJIT_R1) | A(TMP_REG1) | B(SLJIT_R1));
+ return push_inst(compiler, (op == SLJIT_LMUL_UW ? MULHDU : MULHD) | D(SLJIT_R1) | A(TMP_REG1) | B(SLJIT_R1));
#else
FAIL_IF(push_inst(compiler, MULLW | D(SLJIT_R0) | A(TMP_REG1) | B(SLJIT_R1)));
- return push_inst(compiler, (op == SLJIT_LUMUL ? MULHWU : MULHW) | D(SLJIT_R1) | A(TMP_REG1) | B(SLJIT_R1));
+ return push_inst(compiler, (op == SLJIT_LMUL_UW ? MULHWU : MULHW) | D(SLJIT_R1) | A(TMP_REG1) | B(SLJIT_R1));
#endif
- case SLJIT_UDIVMOD:
- case SLJIT_SDIVMOD:
+ case SLJIT_DIVMOD_UW:
+ case SLJIT_DIVMOD_SW:
FAIL_IF(push_inst(compiler, OR | S(SLJIT_R0) | A(TMP_REG1) | B(SLJIT_R0)));
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
- FAIL_IF(push_inst(compiler, (int_op ? (op == SLJIT_UDIVMOD ? DIVWU : DIVW) : (op == SLJIT_UDIVMOD ? DIVDU : DIVD)) | D(SLJIT_R0) | A(SLJIT_R0) | B(SLJIT_R1)));
+ FAIL_IF(push_inst(compiler, (int_op ? (op == SLJIT_DIVMOD_UW ? DIVWU : DIVW) : (op == SLJIT_DIVMOD_UW ? DIVDU : DIVD)) | D(SLJIT_R0) | A(SLJIT_R0) | B(SLJIT_R1)));
FAIL_IF(push_inst(compiler, (int_op ? MULLW : MULLD) | D(SLJIT_R1) | A(SLJIT_R0) | B(SLJIT_R1)));
#else
- FAIL_IF(push_inst(compiler, (op == SLJIT_UDIVMOD ? DIVWU : DIVW) | D(SLJIT_R0) | A(SLJIT_R0) | B(SLJIT_R1)));
+ FAIL_IF(push_inst(compiler, (op == SLJIT_DIVMOD_UW ? DIVWU : DIVW) | D(SLJIT_R0) | A(SLJIT_R0) | B(SLJIT_R1)));
FAIL_IF(push_inst(compiler, MULLW | D(SLJIT_R1) | A(SLJIT_R0) | B(SLJIT_R1)));
#endif
return push_inst(compiler, SUBF | D(SLJIT_R1) | A(SLJIT_R1) | B(TMP_REG1));
- case SLJIT_UDIVI:
- case SLJIT_SDIVI:
+ case SLJIT_DIV_UW:
+ case SLJIT_DIV_SW:
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
- return push_inst(compiler, (int_op ? (op == SLJIT_UDIVI ? DIVWU : DIVW) : (op == SLJIT_UDIVI ? DIVDU : DIVD)) | D(SLJIT_R0) | A(SLJIT_R0) | B(SLJIT_R1));
+ return push_inst(compiler, (int_op ? (op == SLJIT_DIV_UW ? DIVWU : DIVW) : (op == SLJIT_DIV_UW ? DIVDU : DIVD)) | D(SLJIT_R0) | A(SLJIT_R0) | B(SLJIT_R1));
#else
- return push_inst(compiler, (op == SLJIT_UDIVI ? DIVWU : DIVW) | D(SLJIT_R0) | A(SLJIT_R0) | B(SLJIT_R1));
+ return push_inst(compiler, (op == SLJIT_DIV_UW ? DIVWU : DIVW) | D(SLJIT_R0) | A(SLJIT_R0) | B(SLJIT_R1));
#endif
}
@@ -1293,12 +1297,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
#define EMIT_MOV(type, type_flags, type_cast) \
emit_op(compiler, (src & SLJIT_IMM) ? SLJIT_MOV : type, flags | (type_flags), dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? type_cast srcw : srcw)
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si flags = GET_FLAGS(op) ? ALT_SET_FLAGS : 0;
- sljit_si op_flags = GET_ALL_FLAGS(op);
+ sljit_s32 flags = GET_FLAGS(op) ? ALT_SET_FLAGS : 0;
+ sljit_s32 op_flags = GET_ALL_FLAGS(op);
CHECK_ERROR();
CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
@@ -1312,21 +1316,21 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
if (op_flags & SLJIT_SET_O)
FAIL_IF(push_inst(compiler, MTXER | S(TMP_ZERO)));
- if (op_flags & SLJIT_INT_OP) {
+ if (op_flags & SLJIT_I32_OP) {
if (op < SLJIT_NOT) {
if (FAST_IS_REG(src) && src == dst) {
if (!TYPE_CAST_NEEDED(op))
return SLJIT_SUCCESS;
}
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
- if (op == SLJIT_MOV_SI && (src & SLJIT_MEM))
- op = SLJIT_MOV_UI;
- if (op == SLJIT_MOVU_SI && (src & SLJIT_MEM))
- op = SLJIT_MOVU_UI;
- if (op == SLJIT_MOV_UI && (src & SLJIT_IMM))
- op = SLJIT_MOV_SI;
- if (op == SLJIT_MOVU_UI && (src & SLJIT_IMM))
- op = SLJIT_MOVU_SI;
+ if (op == SLJIT_MOV_S32 && (src & SLJIT_MEM))
+ op = SLJIT_MOV_U32;
+ if (op == SLJIT_MOVU_S32 && (src & SLJIT_MEM))
+ op = SLJIT_MOVU_U32;
+ if (op == SLJIT_MOV_U32 && (src & SLJIT_IMM))
+ op = SLJIT_MOV_S32;
+ if (op == SLJIT_MOVU_U32 && (src & SLJIT_IMM))
+ op = SLJIT_MOVU_S32;
#endif
}
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
@@ -1334,7 +1338,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
/* Most operations expect sign extended arguments. */
flags |= INT_DATA | SIGNED_DATA;
if (src & SLJIT_IMM)
- srcw = (sljit_si)srcw;
+ srcw = (sljit_s32)srcw;
}
#endif
}
@@ -1343,58 +1347,58 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
case SLJIT_MOV:
case SLJIT_MOV_P:
#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
- case SLJIT_MOV_UI:
- case SLJIT_MOV_SI:
+ case SLJIT_MOV_U32:
+ case SLJIT_MOV_S32:
#endif
return emit_op(compiler, SLJIT_MOV, flags | WORD_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
- case SLJIT_MOV_UI:
- return EMIT_MOV(SLJIT_MOV_UI, INT_DATA, (sljit_ui));
+ case SLJIT_MOV_U32:
+ return EMIT_MOV(SLJIT_MOV_U32, INT_DATA, (sljit_u32));
- case SLJIT_MOV_SI:
- return EMIT_MOV(SLJIT_MOV_SI, INT_DATA | SIGNED_DATA, (sljit_si));
+ case SLJIT_MOV_S32:
+ return EMIT_MOV(SLJIT_MOV_S32, INT_DATA | SIGNED_DATA, (sljit_s32));
#endif
- case SLJIT_MOV_UB:
- return EMIT_MOV(SLJIT_MOV_UB, BYTE_DATA, (sljit_ub));
+ case SLJIT_MOV_U8:
+ return EMIT_MOV(SLJIT_MOV_U8, BYTE_DATA, (sljit_u8));
- case SLJIT_MOV_SB:
- return EMIT_MOV(SLJIT_MOV_SB, BYTE_DATA | SIGNED_DATA, (sljit_sb));
+ case SLJIT_MOV_S8:
+ return EMIT_MOV(SLJIT_MOV_S8, BYTE_DATA | SIGNED_DATA, (sljit_s8));
- case SLJIT_MOV_UH:
- return EMIT_MOV(SLJIT_MOV_UH, HALF_DATA, (sljit_uh));
+ case SLJIT_MOV_U16:
+ return EMIT_MOV(SLJIT_MOV_U16, HALF_DATA, (sljit_u16));
- case SLJIT_MOV_SH:
- return EMIT_MOV(SLJIT_MOV_SH, HALF_DATA | SIGNED_DATA, (sljit_sh));
+ case SLJIT_MOV_S16:
+ return EMIT_MOV(SLJIT_MOV_S16, HALF_DATA | SIGNED_DATA, (sljit_s16));
case SLJIT_MOVU:
case SLJIT_MOVU_P:
#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
- case SLJIT_MOVU_UI:
- case SLJIT_MOVU_SI:
+ case SLJIT_MOVU_U32:
+ case SLJIT_MOVU_S32:
#endif
return emit_op(compiler, SLJIT_MOV, flags | WORD_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
- case SLJIT_MOVU_UI:
- return EMIT_MOV(SLJIT_MOV_UI, INT_DATA | WRITE_BACK, (sljit_ui));
+ case SLJIT_MOVU_U32:
+ return EMIT_MOV(SLJIT_MOV_U32, INT_DATA | WRITE_BACK, (sljit_u32));
- case SLJIT_MOVU_SI:
- return EMIT_MOV(SLJIT_MOV_SI, INT_DATA | SIGNED_DATA | WRITE_BACK, (sljit_si));
+ case SLJIT_MOVU_S32:
+ return EMIT_MOV(SLJIT_MOV_S32, INT_DATA | SIGNED_DATA | WRITE_BACK, (sljit_s32));
#endif
- case SLJIT_MOVU_UB:
- return EMIT_MOV(SLJIT_MOV_UB, BYTE_DATA | WRITE_BACK, (sljit_ub));
+ case SLJIT_MOVU_U8:
+ return EMIT_MOV(SLJIT_MOV_U8, BYTE_DATA | WRITE_BACK, (sljit_u8));
- case SLJIT_MOVU_SB:
- return EMIT_MOV(SLJIT_MOV_SB, BYTE_DATA | SIGNED_DATA | WRITE_BACK, (sljit_sb));
+ case SLJIT_MOVU_S8:
+ return EMIT_MOV(SLJIT_MOV_S8, BYTE_DATA | SIGNED_DATA | WRITE_BACK, (sljit_s8));
- case SLJIT_MOVU_UH:
- return EMIT_MOV(SLJIT_MOV_UH, HALF_DATA | WRITE_BACK, (sljit_uh));
+ case SLJIT_MOVU_U16:
+ return EMIT_MOV(SLJIT_MOV_U16, HALF_DATA | WRITE_BACK, (sljit_u16));
- case SLJIT_MOVU_SH:
- return EMIT_MOV(SLJIT_MOV_SH, HALF_DATA | SIGNED_DATA | WRITE_BACK, (sljit_sh));
+ case SLJIT_MOVU_S16:
+ return EMIT_MOV(SLJIT_MOV_S16, HALF_DATA | SIGNED_DATA | WRITE_BACK, (sljit_s16));
case SLJIT_NOT:
return emit_op(compiler, SLJIT_NOT, flags, dst, dstw, TMP_REG1, 0, src, srcw);
@@ -1404,7 +1408,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
case SLJIT_CLZ:
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
- return emit_op(compiler, SLJIT_CLZ, flags | (!(op_flags & SLJIT_INT_OP) ? 0 : ALT_FORM1), dst, dstw, TMP_REG1, 0, src, srcw);
+ return emit_op(compiler, SLJIT_CLZ, flags | (!(op_flags & SLJIT_I32_OP) ? 0 : ALT_FORM1), dst, dstw, TMP_REG1, 0, src, srcw);
#else
return emit_op(compiler, SLJIT_CLZ, flags, dst, dstw, TMP_REG1, 0, src, srcw);
#endif
@@ -1448,12 +1452,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
((src) & SLJIT_IMM)
#endif
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_si flags = GET_FLAGS(op) ? ALT_SET_FLAGS : 0;
+ sljit_s32 flags = GET_FLAGS(op) ? ALT_SET_FLAGS : 0;
CHECK_ERROR();
CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@@ -1467,13 +1471,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
src2 = TMP_ZERO;
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
- if (op & SLJIT_INT_OP) {
+ if (op & SLJIT_I32_OP) {
/* Most operations expect sign extended arguments. */
flags |= INT_DATA | SIGNED_DATA;
if (src1 & SLJIT_IMM)
- src1w = (sljit_si)(src1w);
+ src1w = (sljit_s32)(src1w);
if (src2 & SLJIT_IMM)
- src2w = (sljit_si)(src2w);
+ src2w = (sljit_s32)(src2w);
if (GET_FLAGS(op))
flags |= ALT_SIGN_EXT;
}
@@ -1549,7 +1553,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
}
if (dst == SLJIT_UNUSED && (op & (SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S)) && !(op & (SLJIT_SET_O | SLJIT_SET_C))) {
if (!(op & SLJIT_SET_U)) {
- /* We know ALT_SIGN_EXT is set if it is an SLJIT_INT_OP on 64 bit systems. */
+ /* We know ALT_SIGN_EXT is set if it is an SLJIT_I32_OP on 64 bit systems. */
if (TEST_SL_IMM(src2, src2w)) {
compiler->imm = src2w & 0xffff;
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0);
@@ -1560,7 +1564,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
}
}
if (!(op & (SLJIT_SET_E | SLJIT_SET_S))) {
- /* We know ALT_SIGN_EXT is set if it is an SLJIT_INT_OP on 64 bit systems. */
+ /* We know ALT_SIGN_EXT is set if it is an SLJIT_I32_OP on 64 bit systems. */
if (TEST_UL_IMM(src2, src2w)) {
compiler->imm = src2w & 0xffff;
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
@@ -1579,7 +1583,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
}
}
- /* We know ALT_SIGN_EXT is set if it is an SLJIT_INT_OP on 64 bit systems. */
+ /* We know ALT_SIGN_EXT is set if it is an SLJIT_I32_OP on 64 bit systems. */
return emit_op(compiler, SLJIT_SUB, flags | (!(op & SLJIT_SET_U) ? 0 : ALT_FORM6), dst, dstw, src1, src1w, src2, src2w);
case SLJIT_SUBC:
@@ -1587,7 +1591,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
case SLJIT_MUL:
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
- if (op & SLJIT_INT_OP)
+ if (op & SLJIT_I32_OP)
flags |= ALT_FORM2;
#endif
if (!GET_FLAGS(op)) {
@@ -1643,7 +1647,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
case SLJIT_SHL:
case SLJIT_LSHR:
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
- if (op & SLJIT_INT_OP)
+ if (op & SLJIT_I32_OP)
flags |= ALT_FORM2;
#endif
if (src2 & SLJIT_IMM) {
@@ -1656,20 +1660,20 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
{
CHECK_REG_INDEX(check_sljit_get_register_index(reg));
return reg_map[reg];
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_float_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
{
CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
return reg;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *compiler,
- void *instruction, sljit_si size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
+ void *instruction, sljit_s32 size)
{
CHECK_ERROR();
CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
@@ -1681,7 +1685,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *co
/* Floating point operators */
/* --------------------------------------------------------------------- */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void)
{
#ifdef SLJIT_IS_FPU_AVAILABLE
return SLJIT_IS_FPU_AVAILABLE;
@@ -1691,8 +1695,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
#endif
}
-#define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_SINGLE_OP) >> 6))
-#define SELECT_FOP(op, single, double) ((op & SLJIT_SINGLE_OP) ? single : double)
+#define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_F32_OP) >> 6))
+#define SELECT_FOP(op, single, double) ((op & SLJIT_F32_OP) ? single : double)
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
#define FLOAT_TMP_MEM_OFFSET (6 * sizeof(sljit_sw))
@@ -1709,9 +1713,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
#endif /* SLJIT_CONFIG_PPC_64 */
-static SLJIT_INLINE sljit_si sljit_emit_fop1_convw_fromd(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
if (src & SLJIT_MEM) {
/* We can ignore the temporary data store on the stack from caching point of view. */
@@ -1721,12 +1725,12 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convw_fromd(struct sljit_compiler *
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
op = GET_OPCODE(op);
- FAIL_IF(push_inst(compiler, (op == SLJIT_CONVI_FROMD ? FCTIWZ : FCTIDZ) | FD(TMP_FREG1) | FB(src)));
+ FAIL_IF(push_inst(compiler, (op == SLJIT_CONV_S32_FROM_F64 ? FCTIWZ : FCTIDZ) | FD(TMP_FREG1) | FB(src)));
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
- if (op == SLJIT_CONVW_FROMD) {
+ if (op == SLJIT_CONV_SW_FROM_F64) {
if (FAST_IS_REG(dst)) {
FAIL_IF(emit_op_mem2(compiler, DOUBLE_DATA, TMP_FREG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, 0, 0));
return emit_op_mem2(compiler, WORD_DATA | LOAD_DATA, dst, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, 0, 0);
@@ -1777,21 +1781,21 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convw_fromd(struct sljit_compiler *
return push_inst(compiler, STFIWX | FS(TMP_FREG1) | A(dst & REG_MASK) | B(dstw));
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
- sljit_si dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
if (src & SLJIT_IMM) {
- if (GET_OPCODE(op) == SLJIT_CONVD_FROMI)
- srcw = (sljit_si)srcw;
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
+ srcw = (sljit_s32)srcw;
FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
src = TMP_REG1;
}
- else if (GET_OPCODE(op) == SLJIT_CONVD_FROMI) {
+ else if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32) {
if (FAST_IS_REG(src))
FAIL_IF(push_inst(compiler, EXTSW | S(src) | A(TMP_REG1)));
else
@@ -1810,14 +1814,14 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *
if (dst & SLJIT_MEM)
return emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, 0, 0);
- if (op & SLJIT_SINGLE_OP)
+ if (op & SLJIT_F32_OP)
return push_inst(compiler, FRSP | FD(dst_r) | FB(dst_r));
return SLJIT_SUCCESS;
#else
- sljit_si dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
- sljit_si invert_sign = 1;
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
+ sljit_s32 invert_sign = 1;
if (src & SLJIT_IMM) {
FAIL_IF(load_immediate(compiler, TMP_REG1, srcw ^ 0x80000000));
@@ -1848,16 +1852,16 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *
if (dst & SLJIT_MEM)
return emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, 0, 0);
- if (op & SLJIT_SINGLE_OP)
+ if (op & SLJIT_F32_OP)
return push_inst(compiler, FRSP | FD(dst_r) | FB(dst_r));
return SLJIT_SUCCESS;
#endif
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_si op,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
if (src1 & SLJIT_MEM) {
FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w));
@@ -1872,21 +1876,21 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_cmp(struct sljit_compiler *compiler
return push_inst(compiler, FCMPU | CRD(4) | FA(src1) | FB(src2));
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r;
+ sljit_s32 dst_r;
CHECK_ERROR();
compiler->cache_arg = 0;
compiler->cache_argw = 0;
- SLJIT_COMPILE_ASSERT((SLJIT_SINGLE_OP == 0x100) && !(DOUBLE_DATA & 0x4), float_transfer_bit_error);
+ SLJIT_COMPILE_ASSERT((SLJIT_F32_OP == 0x100) && !(DOUBLE_DATA & 0x4), float_transfer_bit_error);
SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
- if (GET_OPCODE(op) == SLJIT_CONVD_FROMS)
- op ^= SLJIT_SINGLE_OP;
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32)
+ op ^= SLJIT_F32_OP;
dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
@@ -1896,14 +1900,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compile
}
switch (GET_OPCODE(op)) {
- case SLJIT_CONVD_FROMS:
- op ^= SLJIT_SINGLE_OP;
- if (op & SLJIT_SINGLE_OP) {
+ case SLJIT_CONV_F64_FROM_F32:
+ op ^= SLJIT_F32_OP;
+ if (op & SLJIT_F32_OP) {
FAIL_IF(push_inst(compiler, FRSP | FD(dst_r) | FB(src)));
break;
}
/* Fall through. */
- case SLJIT_DMOV:
+ case SLJIT_MOV_F64:
if (src != dst_r) {
if (dst_r != TMP_FREG1)
FAIL_IF(push_inst(compiler, FMR | FD(dst_r) | FB(src)));
@@ -1911,10 +1915,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compile
dst_r = src;
}
break;
- case SLJIT_DNEG:
+ case SLJIT_NEG_F64:
FAIL_IF(push_inst(compiler, FNEG | FD(dst_r) | FB(src)));
break;
- case SLJIT_DABS:
+ case SLJIT_ABS_F64:
FAIL_IF(push_inst(compiler, FABS | FD(dst_r) | FB(src)));
break;
}
@@ -1924,12 +1928,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compile
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_si dst_r, flags = 0;
+ sljit_s32 dst_r, flags = 0;
CHECK_ERROR();
CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@@ -1979,19 +1983,19 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compile
src2 = TMP_FREG2;
switch (GET_OPCODE(op)) {
- case SLJIT_DADD:
+ case SLJIT_ADD_F64:
FAIL_IF(push_inst(compiler, SELECT_FOP(op, FADDS, FADD) | FD(dst_r) | FA(src1) | FB(src2)));
break;
- case SLJIT_DSUB:
+ case SLJIT_SUB_F64:
FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSUBS, FSUB) | FD(dst_r) | FA(src1) | FB(src2)));
break;
- case SLJIT_DMUL:
+ case SLJIT_MUL_F64:
FAIL_IF(push_inst(compiler, SELECT_FOP(op, FMULS, FMUL) | FD(dst_r) | FA(src1) | FC(src2) /* FMUL use FC as src2 */));
break;
- case SLJIT_DDIV:
+ case SLJIT_DIV_F64:
FAIL_IF(push_inst(compiler, SELECT_FOP(op, FDIVS, FDIV) | FD(dst_r) | FA(src1) | FB(src2)));
break;
}
@@ -2009,7 +2013,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compile
/* Other instructions */
/* --------------------------------------------------------------------- */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
@@ -2027,7 +2031,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *c
return emit_op(compiler, SLJIT_MOV, WORD_DATA, dst, dstw, TMP_REG1, 0, TMP_REG2, 0);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_fast_return(compiler, src, srcw));
@@ -2065,7 +2069,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
return label;
}
-static sljit_ins get_bo_bi_flags(sljit_si type)
+static sljit_ins get_bo_bi_flags(sljit_s32 type)
{
switch (type) {
case SLJIT_EQUAL:
@@ -2075,19 +2079,19 @@ static sljit_ins get_bo_bi_flags(sljit_si type)
return (4 << 21) | (2 << 16);
case SLJIT_LESS:
- case SLJIT_D_LESS:
+ case SLJIT_LESS_F64:
return (12 << 21) | ((4 + 0) << 16);
case SLJIT_GREATER_EQUAL:
- case SLJIT_D_GREATER_EQUAL:
+ case SLJIT_GREATER_EQUAL_F64:
return (4 << 21) | ((4 + 0) << 16);
case SLJIT_GREATER:
- case SLJIT_D_GREATER:
+ case SLJIT_GREATER_F64:
return (12 << 21) | ((4 + 1) << 16);
case SLJIT_LESS_EQUAL:
- case SLJIT_D_LESS_EQUAL:
+ case SLJIT_LESS_EQUAL_F64:
return (4 << 21) | ((4 + 1) << 16);
case SLJIT_SIG_LESS:
@@ -2110,16 +2114,16 @@ static sljit_ins get_bo_bi_flags(sljit_si type)
case SLJIT_MUL_NOT_OVERFLOW:
return (4 << 21) | (3 << 16);
- case SLJIT_D_EQUAL:
+ case SLJIT_EQUAL_F64:
return (12 << 21) | ((4 + 2) << 16);
- case SLJIT_D_NOT_EQUAL:
+ case SLJIT_NOT_EQUAL_F64:
return (4 << 21) | ((4 + 2) << 16);
- case SLJIT_D_UNORDERED:
+ case SLJIT_UNORDERED_F64:
return (12 << 21) | ((4 + 3) << 16);
- case SLJIT_D_ORDERED:
+ case SLJIT_ORDERED_F64:
return (4 << 21) | ((4 + 3) << 16);
default:
@@ -2128,7 +2132,7 @@ static sljit_ins get_bo_bi_flags(sljit_si type)
}
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
{
struct sljit_jump *jump;
sljit_ins bo_bi_flags;
@@ -2160,10 +2164,10 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
return jump;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compiler, sljit_si type, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
{
struct sljit_jump *jump = NULL;
- sljit_si src_r;
+ sljit_s32 src_r;
CHECK_ERROR();
CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
@@ -2211,13 +2215,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compil
#define INVERT_BIT(dst) \
FAIL_IF(push_inst(compiler, XORI | S(dst) | A(dst) | 0x1));
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw,
- sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw,
+ sljit_s32 type)
{
- sljit_si reg, input_flags;
- sljit_si flags = GET_ALL_FLAGS(op);
+ sljit_s32 reg, input_flags;
+ sljit_s32 flags = GET_ALL_FLAGS(op);
sljit_sw original_dstw = dstw;
CHECK_ERROR();
@@ -2235,7 +2239,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
if (op >= SLJIT_ADD && (src & SLJIT_MEM)) {
ADJUST_LOCAL_OFFSET(src, srcw);
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
- input_flags = (flags & SLJIT_INT_OP) ? INT_DATA : WORD_DATA;
+ input_flags = (flags & SLJIT_I32_OP) ? INT_DATA : WORD_DATA;
#else
input_flags = WORD_DATA;
#endif
@@ -2255,23 +2259,23 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
break;
case SLJIT_LESS:
- case SLJIT_D_LESS:
+ case SLJIT_LESS_F64:
GET_CR_BIT(4 + 0, reg);
break;
case SLJIT_GREATER_EQUAL:
- case SLJIT_D_GREATER_EQUAL:
+ case SLJIT_GREATER_EQUAL_F64:
GET_CR_BIT(4 + 0, reg);
INVERT_BIT(reg);
break;
case SLJIT_GREATER:
- case SLJIT_D_GREATER:
+ case SLJIT_GREATER_F64:
GET_CR_BIT(4 + 1, reg);
break;
case SLJIT_LESS_EQUAL:
- case SLJIT_D_LESS_EQUAL:
+ case SLJIT_LESS_EQUAL_F64:
GET_CR_BIT(4 + 1, reg);
INVERT_BIT(reg);
break;
@@ -2305,20 +2309,20 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
INVERT_BIT(reg);
break;
- case SLJIT_D_EQUAL:
+ case SLJIT_EQUAL_F64:
GET_CR_BIT(4 + 2, reg);
break;
- case SLJIT_D_NOT_EQUAL:
+ case SLJIT_NOT_EQUAL_F64:
GET_CR_BIT(4 + 2, reg);
INVERT_BIT(reg);
break;
- case SLJIT_D_UNORDERED:
+ case SLJIT_UNORDERED_F64:
GET_CR_BIT(4 + 3, reg);
break;
- case SLJIT_D_ORDERED:
+ case SLJIT_ORDERED_F64:
GET_CR_BIT(4 + 3, reg);
INVERT_BIT(reg);
break;
@@ -2333,7 +2337,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
if (op == SLJIT_MOV)
input_flags = WORD_DATA;
else {
- op = SLJIT_MOV_UI;
+ op = SLJIT_MOV_U32;
input_flags = INT_DATA;
}
#else
@@ -2352,10 +2356,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
return sljit_emit_op2(compiler, op | flags, dst, original_dstw, src, srcw, TMP_REG2, 0);
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw init_value)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
{
struct sljit_const *const_;
- sljit_si reg;
+ sljit_s32 reg;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
diff --git a/src/3rdparty/pcre/sljit/sljitNativeSPARC_32.c b/src/3rdparty/pcre/sljit/sljitNativeSPARC_32.c
index 4a2e6293de..7e589a17c2 100644
--- a/src/3rdparty/pcre/sljit/sljitNativeSPARC_32.c
+++ b/src/3rdparty/pcre/sljit/sljitNativeSPARC_32.c
@@ -24,7 +24,7 @@
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si dst, sljit_sw imm)
+static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw imm)
{
if (imm <= SIMM_MAX && imm >= SIMM_MIN)
return push_inst(compiler, OR | D(dst) | S1(0) | IMM(imm), DR(dst));
@@ -35,26 +35,26 @@ static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si dst, sl
#define ARG2(flags, src2) ((flags & SRC2_IMM) ? IMM(src2) : S2(src2))
-static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, sljit_si op, sljit_si flags,
- sljit_si dst, sljit_si src1, sljit_sw src2)
+static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
+ sljit_s32 dst, sljit_s32 src1, sljit_sw src2)
{
SLJIT_COMPILE_ASSERT(ICC_IS_SET == SET_FLAGS, icc_is_set_and_set_flags_must_be_the_same);
switch (op) {
case SLJIT_MOV:
- case SLJIT_MOV_UI:
- case SLJIT_MOV_SI:
+ case SLJIT_MOV_U32:
+ case SLJIT_MOV_S32:
case SLJIT_MOV_P:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
if (dst != src2)
return push_inst(compiler, OR | D(dst) | S1(0) | S2(src2), DR(dst));
return SLJIT_SUCCESS;
- case SLJIT_MOV_UB:
- case SLJIT_MOV_SB:
+ case SLJIT_MOV_U8:
+ case SLJIT_MOV_S8:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
- if (op == SLJIT_MOV_UB)
+ if (op == SLJIT_MOV_U8)
return push_inst(compiler, AND | D(dst) | S1(src2) | IMM(0xff), DR(dst));
FAIL_IF(push_inst(compiler, SLL | D(dst) | S1(src2) | IMM(24), DR(dst)));
return push_inst(compiler, SRA | D(dst) | S1(dst) | IMM(24), DR(dst));
@@ -63,12 +63,12 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
SLJIT_ASSERT_STOP();
return SLJIT_SUCCESS;
- case SLJIT_MOV_UH:
- case SLJIT_MOV_SH:
+ case SLJIT_MOV_U16:
+ case SLJIT_MOV_S16:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
FAIL_IF(push_inst(compiler, SLL | D(dst) | S1(src2) | IMM(16), DR(dst)));
- return push_inst(compiler, (op == SLJIT_MOV_SH ? SRA : SRL) | D(dst) | S1(dst) | IMM(16), DR(dst));
+ return push_inst(compiler, (op == SLJIT_MOV_S16 ? SRA : SRL) | D(dst) | S1(dst) | IMM(16), DR(dst));
}
else if (dst != src2)
SLJIT_ASSERT_STOP();
@@ -139,7 +139,7 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw init_value)
+static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw init_value)
{
FAIL_IF(push_inst(compiler, SETHI | D(dst) | ((init_value >> 10) & 0x3fffff), DR(dst)));
return push_inst(compiler, OR | D(dst) | S1(dst) | IMM_ARG | (init_value & 0x3ff), DR(dst));
diff --git a/src/3rdparty/pcre/sljit/sljitNativeSPARC_common.c b/src/3rdparty/pcre/sljit/sljitNativeSPARC_common.c
index 327c4267be..f3a33a1097 100644
--- a/src/3rdparty/pcre/sljit/sljitNativeSPARC_common.c
+++ b/src/3rdparty/pcre/sljit/sljitNativeSPARC_common.c
@@ -24,14 +24,16 @@
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name(void)
+SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
{
return "SPARC" SLJIT_CPUINFO;
}
/* Length of an instruction word
Both for sparc-32 and sparc-64 */
-typedef sljit_ui sljit_ins;
+typedef sljit_u32 sljit_ins;
+
+#if (defined SLJIT_CACHE_FLUSH_OWN_IMPL && SLJIT_CACHE_FLUSH_OWN_IMPL)
static void sparc_cache_flush(sljit_ins *from, sljit_ins *to)
{
@@ -82,6 +84,8 @@ static void sparc_cache_flush(sljit_ins *from, sljit_ins *to)
#endif
}
+#endif /* (defined SLJIT_CACHE_FLUSH_OWN_IMPL && SLJIT_CACHE_FLUSH_OWN_IMPL) */
+
/* TMP_REG2 is not used by getput_arg */
#define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
#define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
@@ -91,7 +95,7 @@ static void sparc_cache_flush(sljit_ins *from, sljit_ins *to)
#define TMP_FREG1 (0)
#define TMP_FREG2 ((SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1) << 1)
-static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 6] = {
+static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 6] = {
0, 8, 9, 10, 13, 29, 28, 27, 23, 22, 21, 20, 19, 18, 17, 16, 26, 25, 24, 14, 1, 11, 12, 15
};
@@ -181,7 +185,7 @@ static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 6] = {
/* dest_reg is the absolute name of the register
Useful for reordering instructions in the delay slot. */
-static sljit_si push_inst(struct sljit_compiler *compiler, sljit_ins ins, sljit_si delay_slot)
+static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins, sljit_s32 delay_slot)
{
sljit_ins *ptr;
SLJIT_ASSERT((delay_slot & DST_INS_MASK) == UNMOVABLE_INS
@@ -340,7 +344,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
SLJIT_ASSERT(!label);
SLJIT_ASSERT(!jump);
SLJIT_ASSERT(!const_);
- SLJIT_ASSERT(code_ptr - code <= (sljit_si)compiler->size);
+ SLJIT_ASSERT(code_ptr - code <= (sljit_s32)compiler->size);
jump = compiler->jumps;
while (jump) {
@@ -418,9 +422,9 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
#include "sljitNativeSPARC_64.c"
#endif
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
CHECK_ERROR();
CHECK(check_sljit_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
@@ -442,9 +446,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
CHECK_ERROR();
CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
@@ -454,7 +458,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compi
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_return(compiler, op, src, srcw));
@@ -478,7 +482,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
#define ARCH_32_64(a, b) b
#endif
-static SLJIT_CONST sljit_ins data_transfer_insts[16 + 4] = {
+static const sljit_ins data_transfer_insts[16 + 4] = {
/* u w s */ ARCH_32_64(OPC1(3) | OPC3(0x04) /* stw */, OPC1(3) | OPC3(0x0e) /* stx */),
/* u w l */ ARCH_32_64(OPC1(3) | OPC3(0x00) /* lduw */, OPC1(3) | OPC3(0x0b) /* ldx */),
/* u b s */ OPC1(3) | OPC3(0x05) /* stb */,
@@ -506,7 +510,7 @@ static SLJIT_CONST sljit_ins data_transfer_insts[16 + 4] = {
#undef ARCH_32_64
/* Can perform an operation using at most 1 instruction. */
-static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg, sljit_sw argw)
+static sljit_s32 getput_arg_fast(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
{
SLJIT_ASSERT(arg & SLJIT_MEM);
@@ -529,7 +533,7 @@ static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si flags,
/* See getput_arg below.
Note: can_cache is called only for binary operators. Those
operators always uses word arguments without write back. */
-static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
+static sljit_s32 can_cache(sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
{
SLJIT_ASSERT((arg & SLJIT_MEM) && (next_arg & SLJIT_MEM));
@@ -549,9 +553,9 @@ static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_
}
/* Emit the necessary instructions. See can_cache above. */
-static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
+static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
{
- sljit_si base, arg2, delay_slot;
+ sljit_s32 base, arg2, delay_slot;
sljit_ins dest;
SLJIT_ASSERT(arg & SLJIT_MEM);
@@ -613,7 +617,7 @@ static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si flags, slji
return push_inst(compiler, ADD | D(base) | S1(base) | S2(arg2), DR(base));
}
-static SLJIT_INLINE sljit_si emit_op_mem(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg, sljit_sw argw)
+static SLJIT_INLINE sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
{
if (getput_arg_fast(compiler, flags, reg, arg, argw))
return compiler->error;
@@ -622,26 +626,26 @@ static SLJIT_INLINE sljit_si emit_op_mem(struct sljit_compiler *compiler, sljit_
return getput_arg(compiler, flags, reg, arg, argw, 0, 0);
}
-static SLJIT_INLINE sljit_si emit_op_mem2(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg1, sljit_sw arg1w, sljit_si arg2, sljit_sw arg2w)
+static SLJIT_INLINE sljit_s32 emit_op_mem2(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg1, sljit_sw arg1w, sljit_s32 arg2, sljit_sw arg2w)
{
if (getput_arg_fast(compiler, flags, reg, arg1, arg1w))
return compiler->error;
return getput_arg(compiler, flags, reg, arg1, arg1w, arg2, arg2w);
}
-static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si flags,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
/* arg1 goes to TMP_REG1 or src reg
arg2 goes to TMP_REG2, imm or src reg
TMP_REG3 can be used for caching
result goes to TMP_REG2, so put result can use TMP_REG1 and TMP_REG3. */
- sljit_si dst_r = TMP_REG2;
- sljit_si src1_r;
+ sljit_s32 dst_r = TMP_REG2;
+ sljit_s32 src1_r;
sljit_sw src2_r = 0;
- sljit_si sugg_src2_r = TMP_REG2;
+ sljit_s32 sugg_src2_r = TMP_REG2;
if (!(flags & ALT_KEEP_CACHE)) {
compiler->cache_arg = 0;
@@ -649,13 +653,13 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si f
}
if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) {
- if (op >= SLJIT_MOV && op <= SLJIT_MOVU_SI && !(src2 & SLJIT_MEM))
+ if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32 && !(src2 & SLJIT_MEM))
return SLJIT_SUCCESS;
}
else if (FAST_IS_REG(dst)) {
dst_r = dst;
flags |= REG_DEST;
- if (op >= SLJIT_MOV && op <= SLJIT_MOVU_SI)
+ if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32)
sugg_src2_r = dst_r;
}
else if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, flags | ARG_TEST, TMP_REG1, dst, dstw))
@@ -705,7 +709,7 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si f
if (FAST_IS_REG(src2)) {
src2_r = src2;
flags |= REG2_SOURCE;
- if (!(flags & REG_DEST) && op >= SLJIT_MOV && op <= SLJIT_MOVU_SI)
+ if (!(flags & REG_DEST) && op >= SLJIT_MOV && op <= SLJIT_MOVU_S32)
dst_r = src2_r;
}
else if (src2 & SLJIT_IMM) {
@@ -716,7 +720,7 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si f
}
else {
src2_r = 0;
- if ((op >= SLJIT_MOV && op <= SLJIT_MOVU_SI) && (dst & SLJIT_MEM))
+ if ((op >= SLJIT_MOV && op <= SLJIT_MOVU_S32) && (dst & SLJIT_MEM))
dst_r = 0;
}
}
@@ -758,7 +762,7 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si f
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler, sljit_si op)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
{
CHECK_ERROR();
CHECK(check_sljit_emit_op0(compiler, op));
@@ -769,30 +773,30 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
return push_inst(compiler, TA, UNMOVABLE_INS);
case SLJIT_NOP:
return push_inst(compiler, NOP, UNMOVABLE_INS);
- case SLJIT_LUMUL:
- case SLJIT_LSMUL:
+ case SLJIT_LMUL_UW:
+ case SLJIT_LMUL_SW:
#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
- FAIL_IF(push_inst(compiler, (op == SLJIT_LUMUL ? UMUL : SMUL) | D(SLJIT_R0) | S1(SLJIT_R0) | S2(SLJIT_R1), DR(SLJIT_R0)));
+ FAIL_IF(push_inst(compiler, (op == SLJIT_LMUL_UW ? UMUL : SMUL) | D(SLJIT_R0) | S1(SLJIT_R0) | S2(SLJIT_R1), DR(SLJIT_R0)));
return push_inst(compiler, RDY | D(SLJIT_R1), DR(SLJIT_R1));
#else
#error "Implementation required"
#endif
- case SLJIT_UDIVMOD:
- case SLJIT_SDIVMOD:
- case SLJIT_UDIVI:
- case SLJIT_SDIVI:
- SLJIT_COMPILE_ASSERT((SLJIT_UDIVMOD & 0x2) == 0 && SLJIT_UDIVI - 0x2 == SLJIT_UDIVMOD, bad_div_opcode_assignments);
+ case SLJIT_DIVMOD_UW:
+ case SLJIT_DIVMOD_SW:
+ case SLJIT_DIV_UW:
+ case SLJIT_DIV_SW:
+ SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW & 0x2) == 0 && SLJIT_DIV_UW - 0x2 == SLJIT_DIVMOD_UW, bad_div_opcode_assignments);
#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
- if ((op | 0x2) == SLJIT_UDIVI)
+ if ((op | 0x2) == SLJIT_DIV_UW)
FAIL_IF(push_inst(compiler, WRY | S1(0), MOVABLE_INS));
else {
FAIL_IF(push_inst(compiler, SRA | D(TMP_REG1) | S1(SLJIT_R0) | IMM(31), DR(TMP_REG1)));
FAIL_IF(push_inst(compiler, WRY | S1(TMP_REG1), MOVABLE_INS));
}
- if (op <= SLJIT_SDIVMOD)
+ if (op <= SLJIT_DIVMOD_SW)
FAIL_IF(push_inst(compiler, OR | D(TMP_REG2) | S1(0) | S2(SLJIT_R0), DR(TMP_REG2)));
- FAIL_IF(push_inst(compiler, ((op | 0x2) == SLJIT_UDIVI ? UDIV : SDIV) | D(SLJIT_R0) | S1(SLJIT_R0) | S2(SLJIT_R1), DR(SLJIT_R0)));
- if (op >= SLJIT_UDIVI)
+ FAIL_IF(push_inst(compiler, ((op | 0x2) == SLJIT_DIV_UW ? UDIV : SDIV) | D(SLJIT_R0) | S1(SLJIT_R0) | S2(SLJIT_R1), DR(SLJIT_R0)));
+ if (op >= SLJIT_DIV_UW)
return SLJIT_SUCCESS;
FAIL_IF(push_inst(compiler, SMUL | D(SLJIT_R1) | S1(SLJIT_R0) | S2(SLJIT_R1), DR(SLJIT_R1)));
return push_inst(compiler, SUB | D(SLJIT_R1) | S1(TMP_REG2) | S2(SLJIT_R1), DR(SLJIT_R1));
@@ -804,11 +808,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si flags = GET_FLAGS(op) ? SET_FLAGS : 0;
+ sljit_s32 flags = GET_FLAGS(op) ? SET_FLAGS : 0;
CHECK_ERROR();
CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
@@ -821,45 +825,45 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
case SLJIT_MOV_P:
return emit_op(compiler, SLJIT_MOV, flags | WORD_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOV_UI:
- return emit_op(compiler, SLJIT_MOV_UI, flags | INT_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
+ case SLJIT_MOV_U32:
+ return emit_op(compiler, SLJIT_MOV_U32, flags | INT_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOV_SI:
- return emit_op(compiler, SLJIT_MOV_SI, flags | INT_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
+ case SLJIT_MOV_S32:
+ return emit_op(compiler, SLJIT_MOV_S32, flags | INT_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOV_UB:
- return emit_op(compiler, SLJIT_MOV_UB, flags | BYTE_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_ub)srcw : srcw);
+ case SLJIT_MOV_U8:
+ return emit_op(compiler, SLJIT_MOV_U8, flags | BYTE_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8)srcw : srcw);
- case SLJIT_MOV_SB:
- return emit_op(compiler, SLJIT_MOV_SB, flags | BYTE_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sb)srcw : srcw);
+ case SLJIT_MOV_S8:
+ return emit_op(compiler, SLJIT_MOV_S8, flags | BYTE_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8)srcw : srcw);
- case SLJIT_MOV_UH:
- return emit_op(compiler, SLJIT_MOV_UH, flags | HALF_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_uh)srcw : srcw);
+ case SLJIT_MOV_U16:
+ return emit_op(compiler, SLJIT_MOV_U16, flags | HALF_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16)srcw : srcw);
- case SLJIT_MOV_SH:
- return emit_op(compiler, SLJIT_MOV_SH, flags | HALF_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sh)srcw : srcw);
+ case SLJIT_MOV_S16:
+ return emit_op(compiler, SLJIT_MOV_S16, flags | HALF_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16)srcw : srcw);
case SLJIT_MOVU:
case SLJIT_MOVU_P:
return emit_op(compiler, SLJIT_MOV, flags | WORD_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOVU_UI:
- return emit_op(compiler, SLJIT_MOV_UI, flags | INT_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
+ case SLJIT_MOVU_U32:
+ return emit_op(compiler, SLJIT_MOV_U32, flags | INT_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOVU_SI:
- return emit_op(compiler, SLJIT_MOV_SI, flags | INT_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
+ case SLJIT_MOVU_S32:
+ return emit_op(compiler, SLJIT_MOV_S32, flags | INT_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOVU_UB:
- return emit_op(compiler, SLJIT_MOV_UB, flags | BYTE_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_ub)srcw : srcw);
+ case SLJIT_MOVU_U8:
+ return emit_op(compiler, SLJIT_MOV_U8, flags | BYTE_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8)srcw : srcw);
- case SLJIT_MOVU_SB:
- return emit_op(compiler, SLJIT_MOV_SB, flags | BYTE_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sb)srcw : srcw);
+ case SLJIT_MOVU_S8:
+ return emit_op(compiler, SLJIT_MOV_S8, flags | BYTE_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8)srcw : srcw);
- case SLJIT_MOVU_UH:
- return emit_op(compiler, SLJIT_MOV_UH, flags | HALF_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_uh)srcw : srcw);
+ case SLJIT_MOVU_U16:
+ return emit_op(compiler, SLJIT_MOV_U16, flags | HALF_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16)srcw : srcw);
- case SLJIT_MOVU_SH:
- return emit_op(compiler, SLJIT_MOV_SH, flags | HALF_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sh)srcw : srcw);
+ case SLJIT_MOVU_S16:
+ return emit_op(compiler, SLJIT_MOV_S16, flags | HALF_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16)srcw : srcw);
case SLJIT_NOT:
case SLJIT_CLZ:
@@ -872,12 +876,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_si flags = GET_FLAGS(op) ? SET_FLAGS : 0;
+ sljit_s32 flags = GET_FLAGS(op) ? SET_FLAGS : 0;
CHECK_ERROR();
CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@@ -914,20 +918,20 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
{
CHECK_REG_INDEX(check_sljit_get_register_index(reg));
return reg_map[reg];
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_float_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
{
CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
return reg << 1;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *compiler,
- void *instruction, sljit_si size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
+ void *instruction, sljit_s32 size)
{
CHECK_ERROR();
CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
@@ -939,7 +943,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *co
/* Floating point operators */
/* --------------------------------------------------------------------- */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void)
{
#ifdef SLJIT_IS_FPU_AVAILABLE
return SLJIT_IS_FPU_AVAILABLE;
@@ -949,13 +953,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
#endif
}
-#define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_SINGLE_OP) >> 7))
-#define SELECT_FOP(op, single, double) ((op & SLJIT_SINGLE_OP) ? single : double)
+#define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_F32_OP) >> 7))
+#define SELECT_FOP(op, single, double) ((op & SLJIT_F32_OP) ? single : double)
#define FLOAT_TMP_MEM_OFFSET (22 * sizeof(sljit_sw))
-static SLJIT_INLINE sljit_si sljit_emit_fop1_convw_fromd(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
if (src & SLJIT_MEM) {
FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src, srcw, dst, dstw));
@@ -978,16 +982,16 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convw_fromd(struct sljit_compiler *
return emit_op_mem2(compiler, SINGLE_DATA, TMP_FREG1, dst, dstw, 0, 0);
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r = FAST_IS_REG(dst) ? (dst << 1) : TMP_FREG1;
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? (dst << 1) : TMP_FREG1;
if (src & SLJIT_IMM) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (GET_OPCODE(op) == SLJIT_CONVD_FROMI)
- srcw = (sljit_si)srcw;
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
+ srcw = (sljit_s32)srcw;
#endif
FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
src = TMP_REG1;
@@ -1008,9 +1012,9 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_si op,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
if (src1 & SLJIT_MEM) {
FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w));
@@ -1029,21 +1033,21 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_cmp(struct sljit_compiler *compiler
return push_inst(compiler, SELECT_FOP(op, FCMPS, FCMPD) | S1A(src1) | S2A(src2), FCC_IS_SET | MOVABLE_INS);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r;
+ sljit_s32 dst_r;
CHECK_ERROR();
compiler->cache_arg = 0;
compiler->cache_argw = 0;
- SLJIT_COMPILE_ASSERT((SLJIT_SINGLE_OP == 0x100) && !(DOUBLE_DATA & 0x2), float_transfer_bit_error);
+ SLJIT_COMPILE_ASSERT((SLJIT_F32_OP == 0x100) && !(DOUBLE_DATA & 0x2), float_transfer_bit_error);
SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
- if (GET_OPCODE(op) == SLJIT_CONVD_FROMS)
- op ^= SLJIT_SINGLE_OP;
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32)
+ op ^= SLJIT_F32_OP;
dst_r = FAST_IS_REG(dst) ? (dst << 1) : TMP_FREG1;
@@ -1055,30 +1059,30 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compile
src <<= 1;
switch (GET_OPCODE(op)) {
- case SLJIT_DMOV:
+ case SLJIT_MOV_F64:
if (src != dst_r) {
if (dst_r != TMP_FREG1) {
FAIL_IF(push_inst(compiler, FMOVS | DA(dst_r) | S2A(src), MOVABLE_INS));
- if (!(op & SLJIT_SINGLE_OP))
+ if (!(op & SLJIT_F32_OP))
FAIL_IF(push_inst(compiler, FMOVS | DA(dst_r | 1) | S2A(src | 1), MOVABLE_INS));
}
else
dst_r = src;
}
break;
- case SLJIT_DNEG:
+ case SLJIT_NEG_F64:
FAIL_IF(push_inst(compiler, FNEGS | DA(dst_r) | S2A(src), MOVABLE_INS));
- if (dst_r != src && !(op & SLJIT_SINGLE_OP))
+ if (dst_r != src && !(op & SLJIT_F32_OP))
FAIL_IF(push_inst(compiler, FMOVS | DA(dst_r | 1) | S2A(src | 1), MOVABLE_INS));
break;
- case SLJIT_DABS:
+ case SLJIT_ABS_F64:
FAIL_IF(push_inst(compiler, FABSS | DA(dst_r) | S2A(src), MOVABLE_INS));
- if (dst_r != src && !(op & SLJIT_SINGLE_OP))
+ if (dst_r != src && !(op & SLJIT_F32_OP))
FAIL_IF(push_inst(compiler, FMOVS | DA(dst_r | 1) | S2A(src | 1), MOVABLE_INS));
break;
- case SLJIT_CONVD_FROMS:
+ case SLJIT_CONV_F64_FROM_F32:
FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSTOD, FDTOS) | DA(dst_r) | S2A(src), MOVABLE_INS));
- op ^= SLJIT_SINGLE_OP;
+ op ^= SLJIT_F32_OP;
break;
}
@@ -1087,12 +1091,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compile
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_si dst_r, flags = 0;
+ sljit_s32 dst_r, flags = 0;
CHECK_ERROR();
CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@@ -1146,19 +1150,19 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compile
src2 = TMP_FREG2;
switch (GET_OPCODE(op)) {
- case SLJIT_DADD:
+ case SLJIT_ADD_F64:
FAIL_IF(push_inst(compiler, SELECT_FOP(op, FADDS, FADDD) | DA(dst_r) | S1A(src1) | S2A(src2), MOVABLE_INS));
break;
- case SLJIT_DSUB:
+ case SLJIT_SUB_F64:
FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSUBS, FSUBD) | DA(dst_r) | S1A(src1) | S2A(src2), MOVABLE_INS));
break;
- case SLJIT_DMUL:
+ case SLJIT_MUL_F64:
FAIL_IF(push_inst(compiler, SELECT_FOP(op, FMULS, FMULD) | DA(dst_r) | S1A(src1) | S2A(src2), MOVABLE_INS));
break;
- case SLJIT_DDIV:
+ case SLJIT_DIV_F64:
FAIL_IF(push_inst(compiler, SELECT_FOP(op, FDIVS, FDIVD) | DA(dst_r) | S1A(src1) | S2A(src2), MOVABLE_INS));
break;
}
@@ -1176,7 +1180,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compile
/* Other instructions */
/* --------------------------------------------------------------------- */
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
@@ -1193,7 +1197,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *c
return emit_op_mem(compiler, WORD_DATA, TMP_LINK, dst, dstw);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_fast_return(compiler, src, srcw));
@@ -1231,33 +1235,33 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
return label;
}
-static sljit_ins get_cc(sljit_si type)
+static sljit_ins get_cc(sljit_s32 type)
{
switch (type) {
case SLJIT_EQUAL:
case SLJIT_MUL_NOT_OVERFLOW:
- case SLJIT_D_NOT_EQUAL: /* Unordered. */
+ case SLJIT_NOT_EQUAL_F64: /* Unordered. */
return DA(0x1);
case SLJIT_NOT_EQUAL:
case SLJIT_MUL_OVERFLOW:
- case SLJIT_D_EQUAL:
+ case SLJIT_EQUAL_F64:
return DA(0x9);
case SLJIT_LESS:
- case SLJIT_D_GREATER: /* Unordered. */
+ case SLJIT_GREATER_F64: /* Unordered. */
return DA(0x5);
case SLJIT_GREATER_EQUAL:
- case SLJIT_D_LESS_EQUAL:
+ case SLJIT_LESS_EQUAL_F64:
return DA(0xd);
case SLJIT_GREATER:
- case SLJIT_D_GREATER_EQUAL: /* Unordered. */
+ case SLJIT_GREATER_EQUAL_F64: /* Unordered. */
return DA(0xc);
case SLJIT_LESS_EQUAL:
- case SLJIT_D_LESS:
+ case SLJIT_LESS_F64:
return DA(0x4);
case SLJIT_SIG_LESS:
@@ -1273,11 +1277,11 @@ static sljit_ins get_cc(sljit_si type)
return DA(0x2);
case SLJIT_OVERFLOW:
- case SLJIT_D_UNORDERED:
+ case SLJIT_UNORDERED_F64:
return DA(0x7);
case SLJIT_NOT_OVERFLOW:
- case SLJIT_D_ORDERED:
+ case SLJIT_ORDERED_F64:
return DA(0xf);
default:
@@ -1286,7 +1290,7 @@ static sljit_ins get_cc(sljit_si type)
}
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
{
struct sljit_jump *jump;
@@ -1298,7 +1302,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
type &= 0xff;
- if (type < SLJIT_D_EQUAL) {
+ if (type < SLJIT_EQUAL_F64) {
jump->flags |= IS_COND;
if (((compiler->delay_slot & DST_INS_MASK) != UNMOVABLE_INS) && !(compiler->delay_slot & ICC_IS_SET))
jump->flags |= IS_MOVABLE;
@@ -1332,10 +1336,10 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
return jump;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compiler, sljit_si type, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
{
struct sljit_jump *jump = NULL;
- sljit_si src_r;
+ sljit_s32 src_r;
CHECK_ERROR();
CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
@@ -1367,12 +1371,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compil
return push_inst(compiler, NOP, UNMOVABLE_INS);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw,
- sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw,
+ sljit_s32 type)
{
- sljit_si reg, flags = (GET_FLAGS(op) ? SET_FLAGS : 0);
+ sljit_s32 reg, flags = (GET_FLAGS(op) ? SET_FLAGS : 0);
CHECK_ERROR();
CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type));
@@ -1395,7 +1399,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
}
type &= 0xff;
- if (type < SLJIT_D_EQUAL)
+ if (type < SLJIT_EQUAL_F64)
FAIL_IF(push_inst(compiler, BICC | get_cc(type) | 3, UNMOVABLE_INS));
else
FAIL_IF(push_inst(compiler, FBFCC | get_cc(type) | 3, UNMOVABLE_INS));
@@ -1412,9 +1416,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
#endif
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw init_value)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
{
- sljit_si reg;
+ sljit_s32 reg;
struct sljit_const *const_;
CHECK_ERROR_PTR();
diff --git a/src/3rdparty/pcre/sljit/sljitNativeTILEGX_64.c b/src/3rdparty/pcre/sljit/sljitNativeTILEGX_64.c
index 4d40392fa8..462a8b9cd9 100644
--- a/src/3rdparty/pcre/sljit/sljitNativeTILEGX_64.c
+++ b/src/3rdparty/pcre/sljit/sljitNativeTILEGX_64.c
@@ -49,7 +49,7 @@
#define ADDR_TMP (SLJIT_NUMBER_OF_REGISTERS + 5)
#define PIC_ADDR_REG TMP_REG2
-static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 6] = {
+static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 6] = {
63, 0, 1, 2, 3, 4, 30, 31, 32, 33, 34, 54, 5, 16, 6, 7
};
@@ -106,7 +106,7 @@ static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 6] = {
*/
#define CHECK_FLAGS(list) (!(flags & UNUSED_DEST) || (op & GET_FLAGS(~(list))))
-SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char *sljit_get_platform_name(void)
+SLJIT_API_FUNC_ATTRIBUTE const char *sljit_get_platform_name(void)
{
return "TileGX" SLJIT_CPUINFO;
}
@@ -307,7 +307,7 @@ struct jit_instr {
#define JOFF_X1(x) create_JumpOff_X1(x)
#define BOFF_X1(x) create_BrOff_X1(x)
-static SLJIT_CONST tilegx_mnemonic data_transfer_insts[16] = {
+static const tilegx_mnemonic data_transfer_insts[16] = {
/* u w s */ TILEGX_OPC_ST /* st */,
/* u w l */ TILEGX_OPC_LD /* ld */,
/* u b s */ TILEGX_OPC_ST1 /* st1 */,
@@ -327,7 +327,7 @@ static SLJIT_CONST tilegx_mnemonic data_transfer_insts[16] = {
};
#ifdef TILEGX_JIT_DEBUG
-static sljit_si push_inst_debug(struct sljit_compiler *compiler, sljit_ins ins, int line)
+static sljit_s32 push_inst_debug(struct sljit_compiler *compiler, sljit_ins ins, int line)
{
sljit_ins *ptr = (sljit_ins *)ensure_buf(compiler, sizeof(sljit_ins));
FAIL_IF(!ptr);
@@ -338,7 +338,7 @@ static sljit_si push_inst_debug(struct sljit_compiler *compiler, sljit_ins ins,
return SLJIT_SUCCESS;
}
-static sljit_si push_inst_nodebug(struct sljit_compiler *compiler, sljit_ins ins)
+static sljit_s32 push_inst_nodebug(struct sljit_compiler *compiler, sljit_ins ins)
{
sljit_ins *ptr = (sljit_ins *)ensure_buf(compiler, sizeof(sljit_ins));
FAIL_IF(!ptr);
@@ -349,7 +349,7 @@ static sljit_si push_inst_nodebug(struct sljit_compiler *compiler, sljit_ins ins
#define push_inst(a, b) push_inst_debug(a, b, __LINE__)
#else
-static sljit_si push_inst(struct sljit_compiler *compiler, sljit_ins ins)
+static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins)
{
sljit_ins *ptr = (sljit_ins *)ensure_buf(compiler, sizeof(sljit_ins));
FAIL_IF(!ptr);
@@ -557,7 +557,7 @@ const struct Format* compute_format()
return match;
}
-sljit_si assign_pipes()
+sljit_s32 assign_pipes()
{
unsigned long output_registers = 0;
unsigned int i = 0;
@@ -621,7 +621,7 @@ tilegx_bundle_bits get_bundle_bit(struct jit_instr *inst)
return bits;
}
-static sljit_si update_buffer(struct sljit_compiler *compiler)
+static sljit_s32 update_buffer(struct sljit_compiler *compiler)
{
int i;
int orig_index = inst_buf_index;
@@ -733,7 +733,7 @@ static sljit_si update_buffer(struct sljit_compiler *compiler)
SLJIT_ASSERT_STOP();
}
-static sljit_si flush_buffer(struct sljit_compiler *compiler)
+static sljit_s32 flush_buffer(struct sljit_compiler *compiler)
{
while (inst_buf_index != 0) {
FAIL_IF(update_buffer(compiler));
@@ -741,7 +741,7 @@ static sljit_si flush_buffer(struct sljit_compiler *compiler)
return SLJIT_SUCCESS;
}
-static sljit_si push_4_buffer(struct sljit_compiler *compiler, tilegx_mnemonic opc, int op0, int op1, int op2, int op3, int line)
+static sljit_s32 push_4_buffer(struct sljit_compiler *compiler, tilegx_mnemonic opc, int op0, int op1, int op2, int op3, int line)
{
if (inst_buf_index == TILEGX_MAX_INSTRUCTIONS_PER_BUNDLE)
FAIL_IF(update_buffer(compiler));
@@ -761,7 +761,7 @@ static sljit_si push_4_buffer(struct sljit_compiler *compiler, tilegx_mnemonic o
return SLJIT_SUCCESS;
}
-static sljit_si push_3_buffer(struct sljit_compiler *compiler, tilegx_mnemonic opc, int op0, int op1, int op2, int line)
+static sljit_s32 push_3_buffer(struct sljit_compiler *compiler, tilegx_mnemonic opc, int op0, int op1, int op2, int line)
{
if (inst_buf_index == TILEGX_MAX_INSTRUCTIONS_PER_BUNDLE)
FAIL_IF(update_buffer(compiler));
@@ -822,7 +822,7 @@ static sljit_si push_3_buffer(struct sljit_compiler *compiler, tilegx_mnemonic o
return SLJIT_SUCCESS;
}
-static sljit_si push_2_buffer(struct sljit_compiler *compiler, tilegx_mnemonic opc, int op0, int op1, int line)
+static sljit_s32 push_2_buffer(struct sljit_compiler *compiler, tilegx_mnemonic opc, int op0, int op1, int line)
{
if (inst_buf_index == TILEGX_MAX_INSTRUCTIONS_PER_BUNDLE)
FAIL_IF(update_buffer(compiler));
@@ -867,7 +867,7 @@ static sljit_si push_2_buffer(struct sljit_compiler *compiler, tilegx_mnemonic o
return SLJIT_SUCCESS;
}
-static sljit_si push_0_buffer(struct sljit_compiler *compiler, tilegx_mnemonic opc, int line)
+static sljit_s32 push_0_buffer(struct sljit_compiler *compiler, tilegx_mnemonic opc, int line)
{
if (inst_buf_index == TILEGX_MAX_INSTRUCTIONS_PER_BUNDLE)
FAIL_IF(update_buffer(compiler));
@@ -883,7 +883,7 @@ static sljit_si push_0_buffer(struct sljit_compiler *compiler, tilegx_mnemonic o
return SLJIT_SUCCESS;
}
-static sljit_si push_jr_buffer(struct sljit_compiler *compiler, tilegx_mnemonic opc, int op0, int line)
+static sljit_s32 push_jr_buffer(struct sljit_compiler *compiler, tilegx_mnemonic opc, int op0, int line)
{
if (inst_buf_index == TILEGX_MAX_INSTRUCTIONS_PER_BUNDLE)
FAIL_IF(update_buffer(compiler));
@@ -1117,7 +1117,7 @@ SLJIT_API_FUNC_ATTRIBUTE void * sljit_generate_code(struct sljit_compiler *compi
return code;
}
-static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si dst_ar, sljit_sw imm)
+static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst_ar, sljit_sw imm)
{
if (imm <= SIMM_16BIT_MAX && imm >= SIMM_16BIT_MIN)
@@ -1140,7 +1140,7 @@ static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si dst_ar,
return SHL16INSLI(dst_ar, dst_ar, imm);
}
-static sljit_si emit_const(struct sljit_compiler *compiler, sljit_si dst_ar, sljit_sw imm, int flush)
+static sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_s32 dst_ar, sljit_sw imm, int flush)
{
/* Should *not* be optimized as load_immediate, as pcre relocation
mechanism will match this fixed 4-instruction pattern. */
@@ -1155,7 +1155,7 @@ static sljit_si emit_const(struct sljit_compiler *compiler, sljit_si dst_ar, slj
return SHL16INSLI(dst_ar, dst_ar, imm);
}
-static sljit_si emit_const_64(struct sljit_compiler *compiler, sljit_si dst_ar, sljit_sw imm, int flush)
+static sljit_s32 emit_const_64(struct sljit_compiler *compiler, sljit_s32 dst_ar, sljit_sw imm, int flush)
{
/* Should *not* be optimized as load_immediate, as pcre relocation
mechanism will match this fixed 4-instruction pattern. */
@@ -1172,12 +1172,12 @@ static sljit_si emit_const_64(struct sljit_compiler *compiler, sljit_si dst_ar,
return SHL16INSLI(reg_map[dst_ar], reg_map[dst_ar], imm);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
sljit_ins base;
- sljit_si i, tmp;
+ sljit_s32 i, tmp;
CHECK_ERROR();
CHECK(check_sljit_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
@@ -1222,9 +1222,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
CHECK_ERROR();
CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
@@ -1236,12 +1236,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compi
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
{
- sljit_si local_size;
+ sljit_s32 local_size;
sljit_ins base;
- sljit_si i, tmp;
- sljit_si saveds;
+ sljit_s32 i, tmp;
+ sljit_s32 saveds;
CHECK_ERROR();
CHECK(check_sljit_emit_return(compiler, op, src, srcw));
@@ -1285,7 +1285,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
/* reg_ar is an absoulute register! */
/* Can perform an operation using at most 1 instruction. */
-static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg_ar, sljit_si arg, sljit_sw argw)
+static sljit_s32 getput_arg_fast(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg_ar, sljit_s32 arg, sljit_sw argw)
{
SLJIT_ASSERT(arg & SLJIT_MEM);
@@ -1311,7 +1311,7 @@ static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si flags,
/* See getput_arg below.
Note: can_cache is called only for binary operators. Those
operators always uses word arguments without write back. */
-static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
+static sljit_s32 can_cache(sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
{
SLJIT_ASSERT((arg & SLJIT_MEM) && (next_arg & SLJIT_MEM));
@@ -1337,9 +1337,9 @@ static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_
}
/* Emit the necessary instructions. See can_cache above. */
-static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg_ar, sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
+static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg_ar, sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
{
- sljit_si tmp_ar, base;
+ sljit_s32 tmp_ar, base;
SLJIT_ASSERT(arg & SLJIT_MEM);
if (!(next_arg & SLJIT_MEM)) {
@@ -1530,7 +1530,7 @@ static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si flags, slji
return PB2(data_transfer_insts[flags & MEM_MASK], tmp_ar, reg_ar);
}
-static SLJIT_INLINE sljit_si emit_op_mem(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg_ar, sljit_si arg, sljit_sw argw)
+static SLJIT_INLINE sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg_ar, sljit_s32 arg, sljit_sw argw)
{
if (getput_arg_fast(compiler, flags, reg_ar, arg, argw))
return compiler->error;
@@ -1540,14 +1540,14 @@ static SLJIT_INLINE sljit_si emit_op_mem(struct sljit_compiler *compiler, sljit_
return getput_arg(compiler, flags, reg_ar, arg, argw, 0, 0);
}
-static SLJIT_INLINE sljit_si emit_op_mem2(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg1, sljit_sw arg1w, sljit_si arg2, sljit_sw arg2w)
+static SLJIT_INLINE sljit_s32 emit_op_mem2(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg1, sljit_sw arg1w, sljit_s32 arg2, sljit_sw arg2w)
{
if (getput_arg_fast(compiler, flags, reg, arg1, arg1w))
return compiler->error;
return getput_arg(compiler, flags, reg, arg1, arg1w, arg2, arg2w);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
@@ -1564,7 +1564,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *c
return emit_op_mem(compiler, WORD_DATA, RA, dst, dstw);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_fast_return(compiler, src, srcw));
@@ -1582,9 +1582,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *
return JR(RA);
}
-static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, sljit_si op, sljit_si flags, sljit_si dst, sljit_si src1, sljit_sw src2)
+static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags, sljit_s32 dst, sljit_s32 src1, sljit_sw src2)
{
- sljit_si overflow_ra = 0;
+ sljit_s32 overflow_ra = 0;
switch (GET_OPCODE(op)) {
case SLJIT_MOV:
@@ -1594,11 +1594,11 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
return ADD(reg_map[dst], reg_map[src2], ZERO);
return SLJIT_SUCCESS;
- case SLJIT_MOV_UI:
- case SLJIT_MOV_SI:
+ case SLJIT_MOV_U32:
+ case SLJIT_MOV_S32:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
- if (op == SLJIT_MOV_SI)
+ if (op == SLJIT_MOV_S32)
return BFEXTS(reg_map[dst], reg_map[src2], 0, 31);
return BFEXTU(reg_map[dst], reg_map[src2], 0, 31);
@@ -1609,11 +1609,11 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
return SLJIT_SUCCESS;
- case SLJIT_MOV_UB:
- case SLJIT_MOV_SB:
+ case SLJIT_MOV_U8:
+ case SLJIT_MOV_S8:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
- if (op == SLJIT_MOV_SB)
+ if (op == SLJIT_MOV_S8)
return BFEXTS(reg_map[dst], reg_map[src2], 0, 7);
return BFEXTU(reg_map[dst], reg_map[src2], 0, 7);
@@ -1624,11 +1624,11 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
return SLJIT_SUCCESS;
- case SLJIT_MOV_UH:
- case SLJIT_MOV_SH:
+ case SLJIT_MOV_U16:
+ case SLJIT_MOV_S16:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
- if (op == SLJIT_MOV_SH)
+ if (op == SLJIT_MOV_S16)
return BFEXTS(reg_map[dst], reg_map[src2], 0, 15);
return BFEXTU(reg_map[dst], reg_map[src2], 0, 15);
@@ -1956,16 +1956,16 @@ static SLJIT_INLINE sljit_si emit_single_op(struct sljit_compiler *compiler, slj
return SLJIT_SUCCESS;
}
-static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si flags, sljit_si dst, sljit_sw dstw, sljit_si src1, sljit_sw src1w, sljit_si src2, sljit_sw src2w)
+static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags, sljit_s32 dst, sljit_sw dstw, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)
{
/* arg1 goes to TMP_REG1 or src reg.
arg2 goes to TMP_REG2, imm or src reg.
TMP_REG3 can be used for caching.
result goes to TMP_REG2, so put result can use TMP_REG1 and TMP_REG3. */
- sljit_si dst_r = TMP_REG2;
- sljit_si src1_r;
+ sljit_s32 dst_r = TMP_REG2;
+ sljit_s32 src1_r;
sljit_sw src2_r = 0;
- sljit_si sugg_src2_r = TMP_REG2;
+ sljit_s32 sugg_src2_r = TMP_REG2;
if (!(flags & ALT_KEEP_CACHE)) {
compiler->cache_arg = 0;
@@ -1973,14 +1973,14 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si f
}
if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) {
- if (op >= SLJIT_MOV && op <= SLJIT_MOVU_SI && !(src2 & SLJIT_MEM))
+ if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32 && !(src2 & SLJIT_MEM))
return SLJIT_SUCCESS;
if (GET_FLAGS(op))
flags |= UNUSED_DEST;
} else if (FAST_IS_REG(dst)) {
dst_r = dst;
flags |= REG_DEST;
- if (op >= SLJIT_MOV && op <= SLJIT_MOVU_SI)
+ if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32)
sugg_src2_r = dst_r;
} else if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, flags | ARG_TEST, TMP_REG1_mapped, dst, dstw))
flags |= SLOW_DEST;
@@ -2033,7 +2033,7 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si f
if (FAST_IS_REG(src2)) {
src2_r = src2;
flags |= REG2_SOURCE;
- if (!(flags & REG_DEST) && op >= SLJIT_MOV && op <= SLJIT_MOVU_SI)
+ if (!(flags & REG_DEST) && op >= SLJIT_MOV && op <= SLJIT_MOVU_S32)
dst_r = src2_r;
} else if (src2 & SLJIT_IMM) {
if (!(flags & SRC2_IMM)) {
@@ -2042,7 +2042,7 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si f
src2_r = sugg_src2_r;
} else {
src2_r = 0;
- if ((op >= SLJIT_MOV && op <= SLJIT_MOVU_SI) && (dst & SLJIT_MEM))
+ if ((op >= SLJIT_MOV && op <= SLJIT_MOVU_S32) && (dst & SLJIT_MEM))
dst_r = 0;
}
}
@@ -2082,11 +2082,11 @@ static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si f
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_si op, sljit_si dst, sljit_sw dstw, sljit_si src, sljit_sw srcw, sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw, sljit_s32 type)
{
- sljit_si sugg_dst_ar, dst_ar;
- sljit_si flags = GET_ALL_FLAGS(op);
- sljit_si mem_type = (op & SLJIT_INT_OP) ? (INT_DATA | SIGNED_DATA) : WORD_DATA;
+ sljit_s32 sugg_dst_ar, dst_ar;
+ sljit_s32 flags = GET_ALL_FLAGS(op);
+ sljit_s32 mem_type = (op & SLJIT_I32_OP) ? (INT_DATA | SIGNED_DATA) : WORD_DATA;
CHECK_ERROR();
CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type));
@@ -2096,7 +2096,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
return SLJIT_SUCCESS;
op = GET_OPCODE(op);
- if (op == SLJIT_MOV_SI || op == SLJIT_MOV_UI)
+ if (op == SLJIT_MOV_S32 || op == SLJIT_MOV_U32)
mem_type = INT_DATA | SIGNED_DATA;
sugg_dst_ar = reg_map[(op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2];
@@ -2168,7 +2168,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler, sljit_si op) {
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op) {
CHECK_ERROR();
CHECK(check_sljit_emit_op0(compiler, op));
@@ -2180,17 +2180,19 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
case SLJIT_BREAKPOINT:
return PI(BPT);
- case SLJIT_LUMUL:
- case SLJIT_LSMUL:
- case SLJIT_UDIVI:
- case SLJIT_SDIVI:
+ case SLJIT_LMUL_UW:
+ case SLJIT_LMUL_SW:
+ case SLJIT_DIVMOD_UW:
+ case SLJIT_DIVMOD_SW:
+ case SLJIT_DIV_UW:
+ case SLJIT_DIV_SW:
SLJIT_ASSERT_STOP();
}
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler, sljit_si op, sljit_si dst, sljit_sw dstw, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
@@ -2202,45 +2204,45 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
case SLJIT_MOV_P:
return emit_op(compiler, SLJIT_MOV, WORD_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOV_UI:
- return emit_op(compiler, SLJIT_MOV_UI, INT_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
+ case SLJIT_MOV_U32:
+ return emit_op(compiler, SLJIT_MOV_U32, INT_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOV_SI:
- return emit_op(compiler, SLJIT_MOV_SI, INT_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
+ case SLJIT_MOV_S32:
+ return emit_op(compiler, SLJIT_MOV_S32, INT_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOV_UB:
- return emit_op(compiler, SLJIT_MOV_UB, BYTE_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_ub) srcw : srcw);
+ case SLJIT_MOV_U8:
+ return emit_op(compiler, SLJIT_MOV_U8, BYTE_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8) srcw : srcw);
- case SLJIT_MOV_SB:
- return emit_op(compiler, SLJIT_MOV_SB, BYTE_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sb) srcw : srcw);
+ case SLJIT_MOV_S8:
+ return emit_op(compiler, SLJIT_MOV_S8, BYTE_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8) srcw : srcw);
- case SLJIT_MOV_UH:
- return emit_op(compiler, SLJIT_MOV_UH, HALF_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_uh) srcw : srcw);
+ case SLJIT_MOV_U16:
+ return emit_op(compiler, SLJIT_MOV_U16, HALF_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16) srcw : srcw);
- case SLJIT_MOV_SH:
- return emit_op(compiler, SLJIT_MOV_SH, HALF_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sh) srcw : srcw);
+ case SLJIT_MOV_S16:
+ return emit_op(compiler, SLJIT_MOV_S16, HALF_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16) srcw : srcw);
case SLJIT_MOVU:
case SLJIT_MOVU_P:
return emit_op(compiler, SLJIT_MOV, WORD_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOVU_UI:
- return emit_op(compiler, SLJIT_MOV_UI, INT_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
+ case SLJIT_MOVU_U32:
+ return emit_op(compiler, SLJIT_MOV_U32, INT_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOVU_SI:
- return emit_op(compiler, SLJIT_MOV_SI, INT_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
+ case SLJIT_MOVU_S32:
+ return emit_op(compiler, SLJIT_MOV_S32, INT_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
- case SLJIT_MOVU_UB:
- return emit_op(compiler, SLJIT_MOV_UB, BYTE_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_ub) srcw : srcw);
+ case SLJIT_MOVU_U8:
+ return emit_op(compiler, SLJIT_MOV_U8, BYTE_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8) srcw : srcw);
- case SLJIT_MOVU_SB:
- return emit_op(compiler, SLJIT_MOV_SB, BYTE_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sb) srcw : srcw);
+ case SLJIT_MOVU_S8:
+ return emit_op(compiler, SLJIT_MOV_S8, BYTE_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8) srcw : srcw);
- case SLJIT_MOVU_UH:
- return emit_op(compiler, SLJIT_MOV_UH, HALF_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_uh) srcw : srcw);
+ case SLJIT_MOVU_U16:
+ return emit_op(compiler, SLJIT_MOV_U16, HALF_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16) srcw : srcw);
- case SLJIT_MOVU_SH:
- return emit_op(compiler, SLJIT_MOV_SH, HALF_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sh) srcw : srcw);
+ case SLJIT_MOVU_S16:
+ return emit_op(compiler, SLJIT_MOV_S16, HALF_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16) srcw : srcw);
case SLJIT_NOT:
return emit_op(compiler, op, 0, dst, dstw, TMP_REG1, 0, src, srcw);
@@ -2249,13 +2251,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
return emit_op(compiler, SLJIT_SUB | GET_ALL_FLAGS(op), IMM_OP, dst, dstw, SLJIT_IMM, 0, src, srcw);
case SLJIT_CLZ:
- return emit_op(compiler, op, (op & SLJIT_INT_OP) ? INT_DATA : WORD_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
+ return emit_op(compiler, op, (op & SLJIT_I32_OP) ? INT_DATA : WORD_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
}
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler, sljit_si op, sljit_si dst, sljit_sw dstw, sljit_si src1, sljit_sw src1w, sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)
{
CHECK_ERROR();
CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@@ -2285,7 +2287,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
case SLJIT_ASHR:
if (src2 & SLJIT_IMM)
src2w &= 0x3f;
- if (op & SLJIT_INT_OP)
+ if (op & SLJIT_I32_OP)
src2w &= 0x1f;
return emit_op(compiler, op, IMM_OP, dst, dstw, src1, src1w, src2, src2w);
@@ -2312,9 +2314,9 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label * sljit_emit_label(struct sljit_comp
return label;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compiler, sljit_si type, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
{
- sljit_si src_r = TMP_REG2;
+ sljit_s32 src_r = TMP_REG2;
struct sljit_jump *jump = NULL;
flush_buffer(compiler);
@@ -2401,11 +2403,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compil
inst = BNEZ_X1 | SRCA_X1(src); \
flags = IS_COND;
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump * sljit_emit_jump(struct sljit_compiler *compiler, sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump * sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
{
struct sljit_jump *jump;
sljit_ins inst;
- sljit_si flags = 0;
+ sljit_s32 flags = 0;
flush_buffer(compiler);
@@ -2485,25 +2487,25 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump * sljit_emit_jump(struct sljit_compil
return jump;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void)
{
return 0;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compiler, sljit_si op, sljit_si dst, sljit_sw dstw, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)
{
SLJIT_ASSERT_STOP();
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compiler, sljit_si op, sljit_si dst, sljit_sw dstw, sljit_si src1, sljit_sw src1w, sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)
{
SLJIT_ASSERT_STOP();
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_const * sljit_emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw init_value)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_const * sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
{
struct sljit_const *const_;
- sljit_si reg;
+ sljit_s32 reg;
flush_buffer(compiler);
@@ -2545,14 +2547,14 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_consta
SLJIT_CACHE_FLUSH(inst, inst + 4);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
{
CHECK_REG_INDEX(check_sljit_get_register_index(reg));
return reg_map[reg];
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *compiler,
- void *instruction, sljit_si size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
+ void *instruction, sljit_s32 size)
{
CHECK_ERROR();
CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
diff --git a/src/3rdparty/pcre/sljit/sljitNativeX86_32.c b/src/3rdparty/pcre/sljit/sljitNativeX86_32.c
index d7129c8e26..cd3c656c66 100644
--- a/src/3rdparty/pcre/sljit/sljitNativeX86_32.c
+++ b/src/3rdparty/pcre/sljit/sljitNativeX86_32.c
@@ -26,11 +26,11 @@
/* x86 32-bit arch dependent functions. */
-static sljit_si emit_do_imm(struct sljit_compiler *compiler, sljit_ub opcode, sljit_sw imm)
+static sljit_s32 emit_do_imm(struct sljit_compiler *compiler, sljit_u8 opcode, sljit_sw imm)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1 + sizeof(sljit_sw));
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + sizeof(sljit_sw));
FAIL_IF(!inst);
INC_SIZE(1 + sizeof(sljit_sw));
*inst++ = opcode;
@@ -38,7 +38,7 @@ static sljit_si emit_do_imm(struct sljit_compiler *compiler, sljit_ub opcode, sl
return SLJIT_SUCCESS;
}
-static sljit_ub* generate_far_jump_code(struct sljit_jump *jump, sljit_ub *code_ptr, sljit_si type)
+static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type)
{
if (type == SLJIT_JUMP) {
*code_ptr++ = JMP_i32;
@@ -63,12 +63,12 @@ static sljit_ub* generate_far_jump_code(struct sljit_jump *jump, sljit_ub *code_
return code_ptr;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
- sljit_si size;
- sljit_ub *inst;
+ sljit_s32 size;
+ sljit_u8 *inst;
CHECK_ERROR();
CHECK(check_sljit_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
@@ -83,7 +83,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
#else
size += (args > 0 ? (2 + args * 3) : 0);
#endif
- inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
FAIL_IF(!inst);
INC_SIZE(size);
@@ -143,7 +143,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
if (options & SLJIT_DOUBLE_ALIGNMENT) {
local_size = SLJIT_LOCALS_OFFSET + ((local_size + 7) & ~7);
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 17);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 17);
FAIL_IF(!inst);
INC_SIZE(17);
@@ -183,9 +183,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, local_size);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
CHECK_ERROR();
CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
@@ -205,10 +205,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compi
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
{
- sljit_si size;
- sljit_ub *inst;
+ sljit_s32 size;
+ sljit_u8 *inst;
CHECK_ERROR();
CHECK(check_sljit_emit_return(compiler, op, src, srcw));
@@ -223,7 +223,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
#if !defined(__APPLE__)
if (compiler->options & SLJIT_DOUBLE_ALIGNMENT) {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 3);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
FAIL_IF(!inst);
INC_SIZE(3);
@@ -242,7 +242,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
if (compiler->args > 0)
size += 2;
#endif
- inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
FAIL_IF(!inst);
INC_SIZE(size);
@@ -271,16 +271,16 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
/* --------------------------------------------------------------------- */
/* Size contains the flags as well. */
-static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si size,
+static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_s32 size,
/* The register or immediate operand. */
- sljit_si a, sljit_sw imma,
+ sljit_s32 a, sljit_sw imma,
/* The general operand (not immediate). */
- sljit_si b, sljit_sw immb)
+ sljit_s32 b, sljit_sw immb)
{
- sljit_ub *inst;
- sljit_ub *buf_ptr;
- sljit_si flags = size & ~0xf;
- sljit_si inst_size;
+ sljit_u8 *inst;
+ sljit_u8 *buf_ptr;
+ sljit_s32 flags = size & ~0xf;
+ sljit_s32 inst_size;
/* Both cannot be switched on. */
SLJIT_ASSERT((flags & (EX86_BIN_INS | EX86_SHIFT_INS)) != (EX86_BIN_INS | EX86_SHIFT_INS));
@@ -310,7 +310,7 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
else if (immb != 0 && !(b & OFFS_REG_MASK)) {
/* Immediate operand. */
if (immb <= 127 && immb >= -128)
- inst_size += sizeof(sljit_sb);
+ inst_size += sizeof(sljit_s8);
else
inst_size += sizeof(sljit_sw);
}
@@ -347,7 +347,7 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
else
SLJIT_ASSERT(!(flags & EX86_SHIFT_INS) || a == SLJIT_PREF_SHIFT_REG);
- inst = (sljit_ub*)ensure_buf(compiler, 1 + inst_size);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + inst_size);
PTR_FAIL_IF(!inst);
/* Encoding the byte. */
@@ -438,12 +438,12 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
/* Call / return instructions */
/* --------------------------------------------------------------------- */
-static SLJIT_INLINE sljit_si call_with_args(struct sljit_compiler *compiler, sljit_si type)
+static SLJIT_INLINE sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 type)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
- inst = (sljit_ub*)ensure_buf(compiler, type >= SLJIT_CALL3 ? 1 + 2 + 1 : 1 + 2);
+ inst = (sljit_u8*)ensure_buf(compiler, type >= SLJIT_CALL3 ? 1 + 2 + 1 : 1 + 2);
FAIL_IF(!inst);
INC_SIZE(type >= SLJIT_CALL3 ? 2 + 1 : 2);
@@ -452,7 +452,7 @@ static SLJIT_INLINE sljit_si call_with_args(struct sljit_compiler *compiler, slj
*inst++ = MOV_r_rm;
*inst++ = MOD_REG | (reg_map[SLJIT_R2] << 3) | reg_map[SLJIT_R0];
#else
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 4 * (type - SLJIT_CALL0));
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 4 * (type - SLJIT_CALL0));
FAIL_IF(!inst);
INC_SIZE(4 * (type - SLJIT_CALL0));
@@ -476,9 +476,9 @@ static SLJIT_INLINE sljit_si call_with_args(struct sljit_compiler *compiler, slj
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
CHECK_ERROR();
CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
@@ -492,7 +492,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *c
if (FAST_IS_REG(dst)) {
/* Unused dest is possible here. */
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
FAIL_IF(!inst);
INC_SIZE(1);
@@ -507,9 +507,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *c
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
CHECK_ERROR();
CHECK(check_sljit_emit_fast_return(compiler, src, srcw));
@@ -518,7 +518,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *
CHECK_EXTRA_REGS(src, srcw, (void)0);
if (FAST_IS_REG(src)) {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 1);
FAIL_IF(!inst);
INC_SIZE(1 + 1);
@@ -530,13 +530,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *
*inst++ = GROUP_FF;
*inst |= PUSH_rm;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
FAIL_IF(!inst);
INC_SIZE(1);
}
else {
/* SLJIT_IMM. */
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 5 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 5 + 1);
FAIL_IF(!inst);
INC_SIZE(5 + 1);
diff --git a/src/3rdparty/pcre/sljit/sljitNativeX86_64.c b/src/3rdparty/pcre/sljit/sljitNativeX86_64.c
index 1790d8a4d0..6ea27e5513 100644
--- a/src/3rdparty/pcre/sljit/sljitNativeX86_64.c
+++ b/src/3rdparty/pcre/sljit/sljitNativeX86_64.c
@@ -26,11 +26,11 @@
/* x86 64-bit arch dependent functions. */
-static sljit_si emit_load_imm64(struct sljit_compiler *compiler, sljit_si reg, sljit_sw imm)
+static sljit_s32 emit_load_imm64(struct sljit_compiler *compiler, sljit_s32 reg, sljit_sw imm)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 2 + sizeof(sljit_sw));
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + sizeof(sljit_sw));
FAIL_IF(!inst);
INC_SIZE(2 + sizeof(sljit_sw));
*inst++ = REX_W | ((reg_map[reg] <= 7) ? 0 : REX_B);
@@ -39,7 +39,7 @@ static sljit_si emit_load_imm64(struct sljit_compiler *compiler, sljit_si reg, s
return SLJIT_SUCCESS;
}
-static sljit_ub* generate_far_jump_code(struct sljit_jump *jump, sljit_ub *code_ptr, sljit_si type)
+static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type)
{
if (type < SLJIT_JUMP) {
/* Invert type. */
@@ -65,9 +65,9 @@ static sljit_ub* generate_far_jump_code(struct sljit_jump *jump, sljit_ub *code_
return code_ptr;
}
-static sljit_ub* generate_fixed_jump(sljit_ub *code_ptr, sljit_sw addr, sljit_si type)
+static sljit_u8* generate_fixed_jump(sljit_u8 *code_ptr, sljit_sw addr, sljit_s32 type)
{
- sljit_sw delta = addr - ((sljit_sw)code_ptr + 1 + sizeof(sljit_si));
+ sljit_sw delta = addr - ((sljit_sw)code_ptr + 1 + sizeof(sljit_s32));
if (delta <= HALFWORD_MAX && delta >= HALFWORD_MIN) {
*code_ptr++ = (type == 2) ? CALL_i32 : JMP_i32;
@@ -87,12 +87,12 @@ static sljit_ub* generate_fixed_jump(sljit_ub *code_ptr, sljit_sw addr, sljit_si
return code_ptr;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
- sljit_si i, tmp, size, saved_register_size;
- sljit_ub *inst;
+ sljit_s32 i, tmp, size, saved_register_size;
+ sljit_u8 *inst;
CHECK_ERROR();
CHECK(check_sljit_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
@@ -106,7 +106,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
tmp = saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - saveds) : SLJIT_FIRST_SAVED_REG;
for (i = SLJIT_S0; i >= tmp; i--) {
size = reg_map[i] >= 8 ? 2 : 1;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
FAIL_IF(!inst);
INC_SIZE(size);
if (reg_map[i] >= 8)
@@ -116,7 +116,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
for (i = scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
size = reg_map[i] >= 8 ? 2 : 1;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
FAIL_IF(!inst);
INC_SIZE(size);
if (reg_map[i] >= 8)
@@ -126,7 +126,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
if (args > 0) {
size = args * 3;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
FAIL_IF(!inst);
INC_SIZE(size);
@@ -172,9 +172,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
#ifdef _WIN64
if (local_size > 1024) {
/* Allocate stack for the callback, which grows the stack. */
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 4 + (3 + sizeof(sljit_si)));
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 4 + (3 + sizeof(sljit_s32)));
FAIL_IF(!inst);
- INC_SIZE(4 + (3 + sizeof(sljit_si)));
+ INC_SIZE(4 + (3 + sizeof(sljit_s32)));
*inst++ = REX_W;
*inst++ = GROUP_BINARY_83;
*inst++ = MOD_REG | SUB | 4;
@@ -193,7 +193,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
*inst++ = REX_W;
*inst++ = MOV_rm_i32;
*inst++ = MOD_REG | reg_lmap[SLJIT_R0];
- *(sljit_si*)inst = local_size;
+ *(sljit_s32*)inst = local_size;
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
|| (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
compiler->skip_checks = 1;
@@ -204,7 +204,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
SLJIT_ASSERT(local_size > 0);
if (local_size <= 127) {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 4);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
FAIL_IF(!inst);
INC_SIZE(4);
*inst++ = REX_W;
@@ -213,35 +213,35 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
*inst++ = local_size;
}
else {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 7);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 7);
FAIL_IF(!inst);
INC_SIZE(7);
*inst++ = REX_W;
*inst++ = GROUP_BINARY_81;
*inst++ = MOD_REG | SUB | 4;
- *(sljit_si*)inst = local_size;
- inst += sizeof(sljit_si);
+ *(sljit_s32*)inst = local_size;
+ inst += sizeof(sljit_s32);
}
#ifdef _WIN64
/* Save xmm6 register: movaps [rsp + 0x20], xmm6 */
if (fscratches >= 6 || fsaveds >= 1) {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 5);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 5);
FAIL_IF(!inst);
INC_SIZE(5);
*inst++ = GROUP_0F;
- *(sljit_si*)inst = 0x20247429;
+ *(sljit_s32*)inst = 0x20247429;
}
#endif
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compiler,
- sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
- sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
{
- sljit_si saved_register_size;
+ sljit_s32 saved_register_size;
CHECK_ERROR();
CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
@@ -253,10 +253,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compi
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
{
- sljit_si i, tmp, size;
- sljit_ub *inst;
+ sljit_s32 i, tmp, size;
+ sljit_u8 *inst;
CHECK_ERROR();
CHECK(check_sljit_emit_return(compiler, op, src, srcw));
@@ -267,17 +267,17 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
#ifdef _WIN64
/* Restore xmm6 register: movaps xmm6, [rsp + 0x20] */
if (compiler->fscratches >= 6 || compiler->fsaveds >= 1) {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 5);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 5);
FAIL_IF(!inst);
INC_SIZE(5);
*inst++ = GROUP_0F;
- *(sljit_si*)inst = 0x20247428;
+ *(sljit_s32*)inst = 0x20247428;
}
#endif
SLJIT_ASSERT(compiler->local_size > 0);
if (compiler->local_size <= 127) {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 4);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
FAIL_IF(!inst);
INC_SIZE(4);
*inst++ = REX_W;
@@ -286,19 +286,19 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
*inst = compiler->local_size;
}
else {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 7);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 7);
FAIL_IF(!inst);
INC_SIZE(7);
*inst++ = REX_W;
*inst++ = GROUP_BINARY_81;
*inst++ = MOD_REG | ADD | 4;
- *(sljit_si*)inst = compiler->local_size;
+ *(sljit_s32*)inst = compiler->local_size;
}
tmp = compiler->scratches;
for (i = SLJIT_FIRST_SAVED_REG; i <= tmp; i++) {
size = reg_map[i] >= 8 ? 2 : 1;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
FAIL_IF(!inst);
INC_SIZE(size);
if (reg_map[i] >= 8)
@@ -309,7 +309,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
tmp = compiler->saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - compiler->saveds) : SLJIT_FIRST_SAVED_REG;
for (i = tmp; i <= SLJIT_S0; i++) {
size = reg_map[i] >= 8 ? 2 : 1;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
FAIL_IF(!inst);
INC_SIZE(size);
if (reg_map[i] >= 8)
@@ -317,7 +317,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
POP_REG(reg_lmap[i]);
}
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
FAIL_IF(!inst);
INC_SIZE(1);
RET();
@@ -328,32 +328,32 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
/* Operators */
/* --------------------------------------------------------------------- */
-static sljit_si emit_do_imm32(struct sljit_compiler *compiler, sljit_ub rex, sljit_ub opcode, sljit_sw imm)
+static sljit_s32 emit_do_imm32(struct sljit_compiler *compiler, sljit_u8 rex, sljit_u8 opcode, sljit_sw imm)
{
- sljit_ub *inst;
- sljit_si length = 1 + (rex ? 1 : 0) + sizeof(sljit_si);
+ sljit_u8 *inst;
+ sljit_s32 length = 1 + (rex ? 1 : 0) + sizeof(sljit_s32);
- inst = (sljit_ub*)ensure_buf(compiler, 1 + length);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + length);
FAIL_IF(!inst);
INC_SIZE(length);
if (rex)
*inst++ = rex;
*inst++ = opcode;
- *(sljit_si*)inst = imm;
+ *(sljit_s32*)inst = imm;
return SLJIT_SUCCESS;
}
-static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si size,
+static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_s32 size,
/* The register or immediate operand. */
- sljit_si a, sljit_sw imma,
+ sljit_s32 a, sljit_sw imma,
/* The general operand (not immediate). */
- sljit_si b, sljit_sw immb)
+ sljit_s32 b, sljit_sw immb)
{
- sljit_ub *inst;
- sljit_ub *buf_ptr;
- sljit_ub rex = 0;
- sljit_si flags = size & ~0xf;
- sljit_si inst_size;
+ sljit_u8 *inst;
+ sljit_u8 *buf_ptr;
+ sljit_u8 rex = 0;
+ sljit_s32 flags = size & ~0xf;
+ sljit_s32 inst_size;
/* The immediate operand must be 32 bit. */
SLJIT_ASSERT(!(a & SLJIT_IMM) || compiler->mode32 || IS_HALFWORD(imma));
@@ -400,7 +400,7 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
}
if ((b & REG_MASK) == SLJIT_UNUSED)
- inst_size += 1 + sizeof(sljit_si); /* SIB byte required to avoid RIP based addressing. */
+ inst_size += 1 + sizeof(sljit_s32); /* SIB byte required to avoid RIP based addressing. */
else {
if (reg_map[b & REG_MASK] >= 8)
rex |= REX_B;
@@ -408,12 +408,12 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
if (immb != 0 && (!(b & OFFS_REG_MASK) || (b & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_SP))) {
/* Immediate operand. */
if (immb <= 127 && immb >= -128)
- inst_size += sizeof(sljit_sb);
+ inst_size += sizeof(sljit_s8);
else
- inst_size += sizeof(sljit_si);
+ inst_size += sizeof(sljit_s32);
}
else if (reg_lmap[b & REG_MASK] == 5)
- inst_size += sizeof(sljit_sb);
+ inst_size += sizeof(sljit_s8);
if ((b & OFFS_REG_MASK) != SLJIT_UNUSED) {
inst_size += 1; /* SIB byte. */
@@ -444,7 +444,7 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
else if (flags & EX86_HALF_ARG)
inst_size += sizeof(short);
else
- inst_size += sizeof(sljit_si);
+ inst_size += sizeof(sljit_s32);
}
else {
SLJIT_ASSERT(!(flags & EX86_SHIFT_INS) || a == SLJIT_PREF_SHIFT_REG);
@@ -456,7 +456,7 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
if (rex)
inst_size++;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + inst_size);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + inst_size);
PTR_FAIL_IF(!inst);
/* Encoding the byte. */
@@ -516,8 +516,8 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
if (immb <= 127 && immb >= -128)
*buf_ptr++ = immb; /* 8 bit displacement. */
else {
- *(sljit_si*)buf_ptr = immb; /* 32 bit displacement. */
- buf_ptr += sizeof(sljit_si);
+ *(sljit_s32*)buf_ptr = immb; /* 32 bit displacement. */
+ buf_ptr += sizeof(sljit_s32);
}
}
}
@@ -533,8 +533,8 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
else {
*buf_ptr++ |= 0x04;
*buf_ptr++ = 0x25;
- *(sljit_si*)buf_ptr = immb; /* 32 bit displacement. */
- buf_ptr += sizeof(sljit_si);
+ *(sljit_s32*)buf_ptr = immb; /* 32 bit displacement. */
+ buf_ptr += sizeof(sljit_s32);
}
if (a & SLJIT_IMM) {
@@ -543,7 +543,7 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
else if (flags & EX86_HALF_ARG)
*(short*)buf_ptr = imma;
else if (!(flags & EX86_SHIFT_INS))
- *(sljit_si*)buf_ptr = imma;
+ *(sljit_s32*)buf_ptr = imma;
}
return !(flags & EX86_SHIFT_INS) ? inst : (inst + 1);
@@ -553,14 +553,14 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
/* Call / return instructions */
/* --------------------------------------------------------------------- */
-static SLJIT_INLINE sljit_si call_with_args(struct sljit_compiler *compiler, sljit_si type)
+static SLJIT_INLINE sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 type)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
#ifndef _WIN64
SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R1] == 6 && reg_map[SLJIT_R0] < 8 && reg_map[SLJIT_R2] < 8, args_registers);
- inst = (sljit_ub*)ensure_buf(compiler, 1 + ((type < SLJIT_CALL3) ? 3 : 6));
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + ((type < SLJIT_CALL3) ? 3 : 6));
FAIL_IF(!inst);
INC_SIZE((type < SLJIT_CALL3) ? 3 : 6);
if (type >= SLJIT_CALL3) {
@@ -574,7 +574,7 @@ static SLJIT_INLINE sljit_si call_with_args(struct sljit_compiler *compiler, slj
#else
SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R1] == 2 && reg_map[SLJIT_R0] < 8 && reg_map[SLJIT_R2] < 8, args_registers);
- inst = (sljit_ub*)ensure_buf(compiler, 1 + ((type < SLJIT_CALL3) ? 3 : 6));
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + ((type < SLJIT_CALL3) ? 3 : 6));
FAIL_IF(!inst);
INC_SIZE((type < SLJIT_CALL3) ? 3 : 6);
if (type >= SLJIT_CALL3) {
@@ -589,9 +589,9 @@ static SLJIT_INLINE sljit_si call_with_args(struct sljit_compiler *compiler, slj
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
CHECK_ERROR();
CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
@@ -603,14 +603,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *c
if (FAST_IS_REG(dst)) {
if (reg_map[dst] < 8) {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
FAIL_IF(!inst);
INC_SIZE(1);
POP_REG(reg_lmap[dst]);
return SLJIT_SUCCESS;
}
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 2);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
FAIL_IF(!inst);
INC_SIZE(2);
*inst++ = REX_B;
@@ -626,9 +626,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *c
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
CHECK_ERROR();
CHECK(check_sljit_emit_fast_return(compiler, src, srcw));
@@ -641,14 +641,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *
if (FAST_IS_REG(src)) {
if (reg_map[src] < 8) {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 1);
FAIL_IF(!inst);
INC_SIZE(1 + 1);
PUSH_REG(reg_lmap[src]);
}
else {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 2 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + 1);
FAIL_IF(!inst);
INC_SIZE(2 + 1);
@@ -664,20 +664,20 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *
*inst++ = GROUP_FF;
*inst |= PUSH_rm;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
FAIL_IF(!inst);
INC_SIZE(1);
}
else {
SLJIT_ASSERT(IS_HALFWORD(srcw));
/* SLJIT_IMM. */
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 5 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 5 + 1);
FAIL_IF(!inst);
INC_SIZE(5 + 1);
*inst++ = PUSH_i32;
- *(sljit_si*)inst = srcw;
- inst += sizeof(sljit_si);
+ *(sljit_s32*)inst = srcw;
+ inst += sizeof(sljit_s32);
}
RET();
@@ -689,12 +689,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *
/* Extend input */
/* --------------------------------------------------------------------- */
-static sljit_si emit_mov_int(struct sljit_compiler *compiler, sljit_si sign,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static sljit_s32 emit_mov_int(struct sljit_compiler *compiler, sljit_s32 sign,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_ub* inst;
- sljit_si dst_r;
+ sljit_u8* inst;
+ sljit_s32 dst_r;
compiler->mode32 = 0;
@@ -704,7 +704,7 @@ static sljit_si emit_mov_int(struct sljit_compiler *compiler, sljit_si sign,
if (src & SLJIT_IMM) {
if (FAST_IS_REG(dst)) {
if (sign || ((sljit_uw)srcw <= 0x7fffffff)) {
- inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, (sljit_sw)(sljit_si)srcw, dst, dstw);
+ inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, (sljit_sw)(sljit_s32)srcw, dst, dstw);
FAIL_IF(!inst);
*inst = MOV_rm_i32;
return SLJIT_SUCCESS;
@@ -712,7 +712,7 @@ static sljit_si emit_mov_int(struct sljit_compiler *compiler, sljit_si sign,
return emit_load_imm64(compiler, dst, srcw);
}
compiler->mode32 = 1;
- inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, (sljit_sw)(sljit_si)srcw, dst, dstw);
+ inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, (sljit_sw)(sljit_s32)srcw, dst, dstw);
FAIL_IF(!inst);
*inst = MOV_rm_i32;
compiler->mode32 = 0;
diff --git a/src/3rdparty/pcre/sljit/sljitNativeX86_common.c b/src/3rdparty/pcre/sljit/sljitNativeX86_common.c
index f03393a2d3..4aa5020623 100644
--- a/src/3rdparty/pcre/sljit/sljitNativeX86_common.c
+++ b/src/3rdparty/pcre/sljit/sljitNativeX86_common.c
@@ -24,7 +24,7 @@
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name(void)
+SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
{
return "x86" SLJIT_CPUINFO;
}
@@ -66,7 +66,7 @@ SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name(void)
/* Last register + 1. */
#define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
-static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 3] = {
+static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 3] = {
0, 0, 2, 1, 0, 0, 0, 0, 7, 6, 3, 4, 5
};
@@ -89,20 +89,20 @@ static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 3] = {
therefore r12 is better for SAVED_EREG than SAVED_REG. */
#ifndef _WIN64
/* 1st passed in rdi, 2nd argument passed in rsi, 3rd in rdx. */
-static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
+static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
0, 0, 6, 1, 8, 11, 10, 12, 5, 13, 14, 15, 3, 4, 2, 7, 9
};
/* low-map. reg_map & 0x7. */
-static SLJIT_CONST sljit_ub reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 5] = {
+static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 5] = {
0, 0, 6, 1, 0, 3, 2, 4, 5, 5, 6, 7, 3, 4, 2, 7, 1
};
#else
/* 1st passed in rcx, 2nd argument passed in rdx, 3rd in r8. */
-static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
+static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
0, 0, 2, 1, 11, 12, 5, 13, 14, 15, 7, 6, 3, 4, 10, 8, 9
};
/* low-map. reg_map & 0x7. */
-static SLJIT_CONST sljit_ub reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 5] = {
+static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 5] = {
0, 0, 2, 1, 3, 4, 5, 5, 6, 7, 7, 6, 3, 4, 2, 0, 1
};
#endif
@@ -269,9 +269,9 @@ static SLJIT_CONST sljit_ub reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 5] = {
built-in CPU features. Therefore they can be overwritten by different threads
if they detect the CPU features in the same time. */
#if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
-static sljit_si cpu_has_sse2 = -1;
+static sljit_s32 cpu_has_sse2 = -1;
#endif
-static sljit_si cpu_has_cmov = -1;
+static sljit_s32 cpu_has_cmov = -1;
#ifdef _WIN32_WCE
#include <cmnintrin.h>
@@ -281,13 +281,13 @@ static sljit_si cpu_has_cmov = -1;
static void get_cpu_features(void)
{
- sljit_ui features;
+ sljit_u32 features;
#if defined(_MSC_VER) && _MSC_VER >= 1400
int CPUInfo[4];
__cpuid(CPUInfo, 1);
- features = (sljit_ui)CPUInfo[3];
+ features = (sljit_u32)CPUInfo[3];
#elif defined(__GNUC__) || defined(__INTEL_COMPILER) || defined(__SUNPRO_C) || defined(__ghs)
@@ -330,31 +330,31 @@ static void get_cpu_features(void)
cpu_has_cmov = (features >> 15) & 0x1;
}
-static sljit_ub get_jump_code(sljit_si type)
+static sljit_u8 get_jump_code(sljit_s32 type)
{
switch (type) {
case SLJIT_EQUAL:
- case SLJIT_D_EQUAL:
+ case SLJIT_EQUAL_F64:
return 0x84 /* je */;
case SLJIT_NOT_EQUAL:
- case SLJIT_D_NOT_EQUAL:
+ case SLJIT_NOT_EQUAL_F64:
return 0x85 /* jne */;
case SLJIT_LESS:
- case SLJIT_D_LESS:
+ case SLJIT_LESS_F64:
return 0x82 /* jc */;
case SLJIT_GREATER_EQUAL:
- case SLJIT_D_GREATER_EQUAL:
+ case SLJIT_GREATER_EQUAL_F64:
return 0x83 /* jae */;
case SLJIT_GREATER:
- case SLJIT_D_GREATER:
+ case SLJIT_GREATER_F64:
return 0x87 /* jnbe */;
case SLJIT_LESS_EQUAL:
- case SLJIT_D_LESS_EQUAL:
+ case SLJIT_LESS_EQUAL_F64:
return 0x86 /* jbe */;
case SLJIT_SIG_LESS:
@@ -377,24 +377,24 @@ static sljit_ub get_jump_code(sljit_si type)
case SLJIT_MUL_NOT_OVERFLOW:
return 0x81 /* jno */;
- case SLJIT_D_UNORDERED:
+ case SLJIT_UNORDERED_F64:
return 0x8a /* jp */;
- case SLJIT_D_ORDERED:
+ case SLJIT_ORDERED_F64:
return 0x8b /* jpo */;
}
return 0;
}
-static sljit_ub* generate_far_jump_code(struct sljit_jump *jump, sljit_ub *code_ptr, sljit_si type);
+static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type);
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
-static sljit_ub* generate_fixed_jump(sljit_ub *code_ptr, sljit_sw addr, sljit_si type);
+static sljit_u8* generate_fixed_jump(sljit_u8 *code_ptr, sljit_sw addr, sljit_s32 type);
#endif
-static sljit_ub* generate_near_jump_code(struct sljit_jump *jump, sljit_ub *code_ptr, sljit_ub *code, sljit_si type)
+static sljit_u8* generate_near_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_s32 type)
{
- sljit_si short_jump;
+ sljit_s32 short_jump;
sljit_uw label_addr;
if (jump->flags & JUMP_LABEL)
@@ -432,13 +432,13 @@ static sljit_ub* generate_near_jump_code(struct sljit_jump *jump, sljit_ub *code
if (short_jump) {
jump->flags |= PATCH_MB;
- code_ptr += sizeof(sljit_sb);
+ code_ptr += sizeof(sljit_s8);
} else {
jump->flags |= PATCH_MW;
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
code_ptr += sizeof(sljit_sw);
#else
- code_ptr += sizeof(sljit_si);
+ code_ptr += sizeof(sljit_s32);
#endif
}
@@ -448,11 +448,11 @@ static sljit_ub* generate_near_jump_code(struct sljit_jump *jump, sljit_ub *code
SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
{
struct sljit_memory_fragment *buf;
- sljit_ub *code;
- sljit_ub *code_ptr;
- sljit_ub *buf_ptr;
- sljit_ub *buf_end;
- sljit_ub len;
+ sljit_u8 *code;
+ sljit_u8 *code_ptr;
+ sljit_u8 *buf_ptr;
+ sljit_u8 *buf_end;
+ sljit_u8 len;
struct sljit_label *label;
struct sljit_jump *jump;
@@ -463,7 +463,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
reverse_buf(compiler);
/* Second code generation pass. */
- code = (sljit_ub*)SLJIT_MALLOC_EXEC(compiler->size);
+ code = (sljit_u8*)SLJIT_MALLOC_EXEC(compiler->size);
PTR_FAIL_WITH_EXEC_IF(code);
buf = compiler->buf;
@@ -526,23 +526,23 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
jump = compiler->jumps;
while (jump) {
if (jump->flags & PATCH_MB) {
- SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - (jump->addr + sizeof(sljit_sb))) >= -128 && (sljit_sw)(jump->u.label->addr - (jump->addr + sizeof(sljit_sb))) <= 127);
- *(sljit_ub*)jump->addr = (sljit_ub)(jump->u.label->addr - (jump->addr + sizeof(sljit_sb)));
+ SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - (jump->addr + sizeof(sljit_s8))) >= -128 && (sljit_sw)(jump->u.label->addr - (jump->addr + sizeof(sljit_s8))) <= 127);
+ *(sljit_u8*)jump->addr = (sljit_u8)(jump->u.label->addr - (jump->addr + sizeof(sljit_s8)));
} else if (jump->flags & PATCH_MW) {
if (jump->flags & JUMP_LABEL) {
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
*(sljit_sw*)jump->addr = (sljit_sw)(jump->u.label->addr - (jump->addr + sizeof(sljit_sw)));
#else
- SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - (jump->addr + sizeof(sljit_si))) >= HALFWORD_MIN && (sljit_sw)(jump->u.label->addr - (jump->addr + sizeof(sljit_si))) <= HALFWORD_MAX);
- *(sljit_si*)jump->addr = (sljit_si)(jump->u.label->addr - (jump->addr + sizeof(sljit_si)));
+ SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - (jump->addr + sizeof(sljit_s32))) >= HALFWORD_MIN && (sljit_sw)(jump->u.label->addr - (jump->addr + sizeof(sljit_s32))) <= HALFWORD_MAX);
+ *(sljit_s32*)jump->addr = (sljit_s32)(jump->u.label->addr - (jump->addr + sizeof(sljit_s32)));
#endif
}
else {
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
*(sljit_sw*)jump->addr = (sljit_sw)(jump->u.target - (jump->addr + sizeof(sljit_sw)));
#else
- SLJIT_ASSERT((sljit_sw)(jump->u.target - (jump->addr + sizeof(sljit_si))) >= HALFWORD_MIN && (sljit_sw)(jump->u.target - (jump->addr + sizeof(sljit_si))) <= HALFWORD_MAX);
- *(sljit_si*)jump->addr = (sljit_si)(jump->u.target - (jump->addr + sizeof(sljit_si)));
+ SLJIT_ASSERT((sljit_sw)(jump->u.target - (jump->addr + sizeof(sljit_s32))) >= HALFWORD_MIN && (sljit_sw)(jump->u.target - (jump->addr + sizeof(sljit_s32))) <= HALFWORD_MAX);
+ *(sljit_s32*)jump->addr = (sljit_s32)(jump->u.target - (jump->addr + sizeof(sljit_s32)));
#endif
}
}
@@ -565,32 +565,32 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
/* Operators */
/* --------------------------------------------------------------------- */
-static sljit_si emit_cum_binary(struct sljit_compiler *compiler,
- sljit_ub op_rm, sljit_ub op_mr, sljit_ub op_imm, sljit_ub op_eax_imm,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w);
+static sljit_s32 emit_cum_binary(struct sljit_compiler *compiler,
+ sljit_u8 op_rm, sljit_u8 op_mr, sljit_u8 op_imm, sljit_u8 op_eax_imm,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w);
-static sljit_si emit_non_cum_binary(struct sljit_compiler *compiler,
- sljit_ub op_rm, sljit_ub op_mr, sljit_ub op_imm, sljit_ub op_eax_imm,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w);
+static sljit_s32 emit_non_cum_binary(struct sljit_compiler *compiler,
+ sljit_u8 op_rm, sljit_u8 op_mr, sljit_u8 op_imm, sljit_u8 op_eax_imm,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w);
-static sljit_si emit_mov(struct sljit_compiler *compiler,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw);
+static sljit_s32 emit_mov(struct sljit_compiler *compiler,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw);
-static SLJIT_INLINE sljit_si emit_save_flags(struct sljit_compiler *compiler)
+static SLJIT_INLINE sljit_s32 emit_save_flags(struct sljit_compiler *compiler)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 5);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 5);
FAIL_IF(!inst);
INC_SIZE(5);
#else
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 6);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 6);
FAIL_IF(!inst);
INC_SIZE(6);
*inst++ = REX_W;
@@ -598,23 +598,23 @@ static SLJIT_INLINE sljit_si emit_save_flags(struct sljit_compiler *compiler)
*inst++ = LEA_r_m; /* lea esp/rsp, [esp/rsp + sizeof(sljit_sw)] */
*inst++ = 0x64;
*inst++ = 0x24;
- *inst++ = (sljit_ub)sizeof(sljit_sw);
+ *inst++ = (sljit_u8)sizeof(sljit_sw);
*inst++ = PUSHF;
compiler->flags_saved = 1;
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si emit_restore_flags(struct sljit_compiler *compiler, sljit_si keep_flags)
+static SLJIT_INLINE sljit_s32 emit_restore_flags(struct sljit_compiler *compiler, sljit_s32 keep_flags)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 5);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 5);
FAIL_IF(!inst);
INC_SIZE(5);
*inst++ = POPF;
#else
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 6);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 6);
FAIL_IF(!inst);
INC_SIZE(6);
*inst++ = POPF;
@@ -623,7 +623,7 @@ static SLJIT_INLINE sljit_si emit_restore_flags(struct sljit_compiler *compiler,
*inst++ = LEA_r_m; /* lea esp/rsp, [esp/rsp - sizeof(sljit_sw)] */
*inst++ = 0x64;
*inst++ = 0x24;
- *inst++ = (sljit_ub)-(sljit_sb)sizeof(sljit_sw);
+ *inst++ = (sljit_u8)(-(sljit_s8)sizeof(sljit_sw));
compiler->flags_saved = keep_flags;
return SLJIT_SUCCESS;
}
@@ -640,7 +640,7 @@ static void SLJIT_CALL sljit_grow_stack(sljit_sw local_size)
CPU cycles if the stack is large enough. However, you don't know it in
advance, so it must always be called. I think this is a bad design in
general even if it has some reasons. */
- *(volatile sljit_si*)alloca(local_size) = 0;
+ *(volatile sljit_s32*)alloca(local_size) = 0;
}
#endif
@@ -651,11 +651,11 @@ static void SLJIT_CALL sljit_grow_stack(sljit_sw local_size)
#include "sljitNativeX86_64.c"
#endif
-static sljit_si emit_mov(struct sljit_compiler *compiler,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static sljit_s32 emit_mov(struct sljit_compiler *compiler,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_ub* inst;
+ sljit_u8* inst;
if (dst == SLJIT_UNUSED) {
/* No destination, doesn't need to setup flags. */
@@ -719,11 +719,11 @@ static sljit_si emit_mov(struct sljit_compiler *compiler,
#define EMIT_MOV(compiler, dst, dstw, src, srcw) \
FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw));
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler, sljit_si op)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- sljit_si size;
+ sljit_s32 size;
#endif
CHECK_ERROR();
@@ -731,23 +731,23 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
switch (GET_OPCODE(op)) {
case SLJIT_BREAKPOINT:
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
FAIL_IF(!inst);
INC_SIZE(1);
*inst = INT3;
break;
case SLJIT_NOP:
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
FAIL_IF(!inst);
INC_SIZE(1);
*inst = NOP;
break;
- case SLJIT_LUMUL:
- case SLJIT_LSMUL:
- case SLJIT_UDIVMOD:
- case SLJIT_SDIVMOD:
- case SLJIT_UDIVI:
- case SLJIT_SDIVI:
+ case SLJIT_LMUL_UW:
+ case SLJIT_LMUL_SW:
+ case SLJIT_DIVMOD_UW:
+ case SLJIT_DIVMOD_SW:
+ case SLJIT_DIV_UW:
+ case SLJIT_DIV_SW:
compiler->flags_saved = 0;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
#ifdef _WIN64
@@ -763,12 +763,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
&& reg_map[TMP_REG1] == 2,
invalid_register_assignment_for_div_mul);
#endif
- compiler->mode32 = op & SLJIT_INT_OP;
+ compiler->mode32 = op & SLJIT_I32_OP;
#endif
- SLJIT_COMPILE_ASSERT((SLJIT_UDIVMOD & 0x2) == 0 && SLJIT_UDIVI - 0x2 == SLJIT_UDIVMOD, bad_div_opcode_assignments);
+ SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW & 0x2) == 0 && SLJIT_DIV_UW - 0x2 == SLJIT_DIVMOD_UW, bad_div_opcode_assignments);
op = GET_OPCODE(op);
- if ((op | 0x2) == SLJIT_UDIVI) {
+ if ((op | 0x2) == SLJIT_DIV_UW) {
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64)
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R1, 0);
inst = emit_x86_instruction(compiler, 1, SLJIT_R1, 0, SLJIT_R1, 0);
@@ -779,24 +779,24 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
*inst = XOR_r_rm;
}
- if ((op | 0x2) == SLJIT_SDIVI) {
+ if ((op | 0x2) == SLJIT_DIV_SW) {
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64)
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R1, 0);
#endif
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
FAIL_IF(!inst);
INC_SIZE(1);
*inst = CDQ;
#else
if (compiler->mode32) {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
FAIL_IF(!inst);
INC_SIZE(1);
*inst = CDQ;
} else {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 2);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
FAIL_IF(!inst);
INC_SIZE(2);
*inst++ = REX_W;
@@ -806,27 +806,27 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
}
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 2);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
FAIL_IF(!inst);
INC_SIZE(2);
*inst++ = GROUP_F7;
- *inst = MOD_REG | ((op >= SLJIT_UDIVMOD) ? reg_map[TMP_REG1] : reg_map[SLJIT_R1]);
+ *inst = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_map[TMP_REG1] : reg_map[SLJIT_R1]);
#else
#ifdef _WIN64
- size = (!compiler->mode32 || op >= SLJIT_UDIVMOD) ? 3 : 2;
+ size = (!compiler->mode32 || op >= SLJIT_DIVMOD_UW) ? 3 : 2;
#else
size = (!compiler->mode32) ? 3 : 2;
#endif
- inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
FAIL_IF(!inst);
INC_SIZE(size);
#ifdef _WIN64
if (!compiler->mode32)
- *inst++ = REX_W | ((op >= SLJIT_UDIVMOD) ? REX_B : 0);
- else if (op >= SLJIT_UDIVMOD)
+ *inst++ = REX_W | ((op >= SLJIT_DIVMOD_UW) ? REX_B : 0);
+ else if (op >= SLJIT_DIVMOD_UW)
*inst++ = REX_B;
*inst++ = GROUP_F7;
- *inst = MOD_REG | ((op >= SLJIT_UDIVMOD) ? reg_lmap[TMP_REG1] : reg_lmap[SLJIT_R1]);
+ *inst = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_lmap[TMP_REG1] : reg_lmap[SLJIT_R1]);
#else
if (!compiler->mode32)
*inst++ = REX_W;
@@ -835,26 +835,26 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
#endif
#endif
switch (op) {
- case SLJIT_LUMUL:
+ case SLJIT_LMUL_UW:
*inst |= MUL;
break;
- case SLJIT_LSMUL:
+ case SLJIT_LMUL_SW:
*inst |= IMUL;
break;
- case SLJIT_UDIVMOD:
- case SLJIT_UDIVI:
+ case SLJIT_DIVMOD_UW:
+ case SLJIT_DIV_UW:
*inst |= DIV;
break;
- case SLJIT_SDIVMOD:
- case SLJIT_SDIVI:
+ case SLJIT_DIVMOD_SW:
+ case SLJIT_DIV_SW:
*inst |= IDIV;
break;
}
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) && !defined(_WIN64)
- if (op <= SLJIT_SDIVMOD)
+ if (op <= SLJIT_DIVMOD_SW)
EMIT_MOV(compiler, SLJIT_R1, 0, TMP_REG1, 0);
#else
- if (op >= SLJIT_UDIVI)
+ if (op >= SLJIT_DIV_UW)
EMIT_MOV(compiler, SLJIT_R1, 0, TMP_REG1, 0);
#endif
break;
@@ -865,20 +865,20 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler
#define ENCODE_PREFIX(prefix) \
do { \
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1); \
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1); \
FAIL_IF(!inst); \
INC_SIZE(1); \
*inst = (prefix); \
} while (0)
-static sljit_si emit_mov_byte(struct sljit_compiler *compiler, sljit_si sign,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static sljit_s32 emit_mov_byte(struct sljit_compiler *compiler, sljit_s32 sign,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_ub* inst;
- sljit_si dst_r;
+ sljit_u8* inst;
+ sljit_s32 dst_r;
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- sljit_si work_r;
+ sljit_s32 work_r;
#endif
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
@@ -1016,12 +1016,12 @@ static sljit_si emit_mov_byte(struct sljit_compiler *compiler, sljit_si sign,
return SLJIT_SUCCESS;
}
-static sljit_si emit_mov_half(struct sljit_compiler *compiler, sljit_si sign,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static sljit_s32 emit_mov_half(struct sljit_compiler *compiler, sljit_s32 sign,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_ub* inst;
- sljit_si dst_r;
+ sljit_u8* inst;
+ sljit_s32 dst_r;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
compiler->mode32 = 0;
@@ -1067,11 +1067,11 @@ static sljit_si emit_mov_half(struct sljit_compiler *compiler, sljit_si sign,
return SLJIT_SUCCESS;
}
-static sljit_si emit_unary(struct sljit_compiler *compiler, sljit_ub opcode,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static sljit_s32 emit_unary(struct sljit_compiler *compiler, sljit_u8 opcode,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_ub* inst;
+ sljit_u8* inst;
if (dst == SLJIT_UNUSED) {
EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
@@ -1106,11 +1106,11 @@ static sljit_si emit_unary(struct sljit_compiler *compiler, sljit_ub opcode,
return SLJIT_SUCCESS;
}
-static sljit_si emit_not_with_flags(struct sljit_compiler *compiler,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static sljit_s32 emit_not_with_flags(struct sljit_compiler *compiler,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_ub* inst;
+ sljit_u8* inst;
if (dst == SLJIT_UNUSED) {
EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
@@ -1146,12 +1146,12 @@ static sljit_si emit_not_with_flags(struct sljit_compiler *compiler,
return SLJIT_SUCCESS;
}
-static sljit_si emit_clz(struct sljit_compiler *compiler, sljit_si op_flags,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static sljit_s32 emit_clz(struct sljit_compiler *compiler, sljit_s32 op_flags,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_ub* inst;
- sljit_si dst_r;
+ sljit_u8* inst;
+ sljit_s32 dst_r;
SLJIT_UNUSED_ARG(op_flags);
if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) {
@@ -1164,7 +1164,7 @@ static sljit_si emit_clz(struct sljit_compiler *compiler, sljit_si op_flags,
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 31, TMP_REG1, 0);
#else
- inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, !(op_flags & SLJIT_INT_OP) ? 63 : 31, TMP_REG1, 0);
+ inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, !(op_flags & SLJIT_I32_OP) ? 63 : 31, TMP_REG1, 0);
#endif
FAIL_IF(!inst);
*inst |= SHR;
@@ -1199,8 +1199,8 @@ static sljit_si emit_clz(struct sljit_compiler *compiler, sljit_si op_flags,
#else
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
compiler->mode32 = 0;
- EMIT_MOV(compiler, dst_r, 0, SLJIT_IMM, !(op_flags & SLJIT_INT_OP) ? 64 + 63 : 32 + 31);
- compiler->mode32 = op_flags & SLJIT_INT_OP;
+ EMIT_MOV(compiler, dst_r, 0, SLJIT_IMM, !(op_flags & SLJIT_I32_OP) ? 64 + 63 : 32 + 31);
+ compiler->mode32 = op_flags & SLJIT_I32_OP;
#endif
if (cpu_has_cmov == -1)
@@ -1213,7 +1213,7 @@ static sljit_si emit_clz(struct sljit_compiler *compiler, sljit_si op_flags,
*inst = CMOVNE_r_rm;
} else {
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 4);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
FAIL_IF(!inst);
INC_SIZE(4);
@@ -1222,7 +1222,7 @@ static sljit_si emit_clz(struct sljit_compiler *compiler, sljit_si op_flags,
*inst++ = MOV_r_rm;
*inst++ = MOD_REG | (reg_map[dst_r] << 3) | reg_map[TMP_REG1];
#else
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 5);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 5);
FAIL_IF(!inst);
INC_SIZE(5);
@@ -1237,7 +1237,7 @@ static sljit_si emit_clz(struct sljit_compiler *compiler, sljit_si op_flags,
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 31, dst_r, 0);
#else
- inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, !(op_flags & SLJIT_INT_OP) ? 63 : 31, dst_r, 0);
+ inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, !(op_flags & SLJIT_I32_OP) ? 63 : 31, dst_r, 0);
#endif
FAIL_IF(!inst);
*(inst + 1) |= XOR;
@@ -1255,16 +1255,16 @@ static sljit_si emit_clz(struct sljit_compiler *compiler, sljit_si op_flags,
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_ub* inst;
- sljit_si update = 0;
- sljit_si op_flags = GET_ALL_FLAGS(op);
+ sljit_u8* inst;
+ sljit_s32 update = 0;
+ sljit_s32 op_flags = GET_ALL_FLAGS(op);
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- sljit_si dst_is_ereg = 0;
- sljit_si src_is_ereg = 0;
+ sljit_s32 dst_is_ereg = 0;
+ sljit_s32 src_is_ereg = 0;
#else
# define src_is_ereg 0
#endif
@@ -1277,7 +1277,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
CHECK_EXTRA_REGS(dst, dstw, dst_is_ereg = 1);
CHECK_EXTRA_REGS(src, srcw, src_is_ereg = 1);
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = op_flags & SLJIT_INT_OP;
+ compiler->mode32 = op_flags & SLJIT_I32_OP;
#endif
op = GET_OPCODE(op);
@@ -1286,20 +1286,20 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
compiler->mode32 = 0;
#endif
- if (op_flags & SLJIT_INT_OP) {
+ if (op_flags & SLJIT_I32_OP) {
if (FAST_IS_REG(src) && src == dst) {
if (!TYPE_CAST_NEEDED(op))
return SLJIT_SUCCESS;
}
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (op == SLJIT_MOV_SI && (src & SLJIT_MEM))
- op = SLJIT_MOV_UI;
- if (op == SLJIT_MOVU_SI && (src & SLJIT_MEM))
- op = SLJIT_MOVU_UI;
- if (op == SLJIT_MOV_UI && (src & SLJIT_IMM))
- op = SLJIT_MOV_SI;
- if (op == SLJIT_MOVU_UI && (src & SLJIT_IMM))
- op = SLJIT_MOVU_SI;
+ if (op == SLJIT_MOV_S32 && (src & SLJIT_MEM))
+ op = SLJIT_MOV_U32;
+ if (op == SLJIT_MOVU_S32 && (src & SLJIT_MEM))
+ op = SLJIT_MOVU_U32;
+ if (op == SLJIT_MOV_U32 && (src & SLJIT_IMM))
+ op = SLJIT_MOV_S32;
+ if (op == SLJIT_MOVU_U32 && (src & SLJIT_IMM))
+ op = SLJIT_MOVU_S32;
#endif
}
@@ -1311,24 +1311,24 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
if (src & SLJIT_IMM) {
switch (op) {
- case SLJIT_MOV_UB:
- srcw = (sljit_ub)srcw;
+ case SLJIT_MOV_U8:
+ srcw = (sljit_u8)srcw;
break;
- case SLJIT_MOV_SB:
- srcw = (sljit_sb)srcw;
+ case SLJIT_MOV_S8:
+ srcw = (sljit_s8)srcw;
break;
- case SLJIT_MOV_UH:
- srcw = (sljit_uh)srcw;
+ case SLJIT_MOV_U16:
+ srcw = (sljit_u16)srcw;
break;
- case SLJIT_MOV_SH:
- srcw = (sljit_sh)srcw;
+ case SLJIT_MOV_S16:
+ srcw = (sljit_s16)srcw;
break;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- case SLJIT_MOV_UI:
- srcw = (sljit_ui)srcw;
+ case SLJIT_MOV_U32:
+ srcw = (sljit_u32)srcw;
break;
- case SLJIT_MOV_SI:
- srcw = (sljit_si)srcw;
+ case SLJIT_MOV_S32:
+ srcw = (sljit_s32)srcw;
break;
#endif
}
@@ -1347,7 +1347,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
}
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- if (SLJIT_UNLIKELY(dst_is_ereg) && (!(op == SLJIT_MOV || op == SLJIT_MOV_UI || op == SLJIT_MOV_SI || op == SLJIT_MOV_P) || (src & SLJIT_MEM))) {
+ if (SLJIT_UNLIKELY(dst_is_ereg) && (!(op == SLJIT_MOV || op == SLJIT_MOV_U32 || op == SLJIT_MOV_S32 || op == SLJIT_MOV_P) || (src & SLJIT_MEM))) {
SLJIT_ASSERT(dst == SLJIT_MEM1(SLJIT_SP));
dst = TMP_REG1;
}
@@ -1357,28 +1357,28 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
case SLJIT_MOV:
case SLJIT_MOV_P:
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- case SLJIT_MOV_UI:
- case SLJIT_MOV_SI:
+ case SLJIT_MOV_U32:
+ case SLJIT_MOV_S32:
#endif
FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw));
break;
- case SLJIT_MOV_UB:
+ case SLJIT_MOV_U8:
FAIL_IF(emit_mov_byte(compiler, 0, dst, dstw, src, srcw));
break;
- case SLJIT_MOV_SB:
+ case SLJIT_MOV_S8:
FAIL_IF(emit_mov_byte(compiler, 1, dst, dstw, src, srcw));
break;
- case SLJIT_MOV_UH:
+ case SLJIT_MOV_U16:
FAIL_IF(emit_mov_half(compiler, 0, dst, dstw, src, srcw));
break;
- case SLJIT_MOV_SH:
+ case SLJIT_MOV_S16:
FAIL_IF(emit_mov_half(compiler, 1, dst, dstw, src, srcw));
break;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- case SLJIT_MOV_UI:
+ case SLJIT_MOV_U32:
FAIL_IF(emit_mov_int(compiler, 0, dst, dstw, src, srcw));
break;
- case SLJIT_MOV_SI:
+ case SLJIT_MOV_S32:
FAIL_IF(emit_mov_int(compiler, 1, dst, dstw, src, srcw));
break;
#endif
@@ -1454,13 +1454,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler
#endif
-static sljit_si emit_cum_binary(struct sljit_compiler *compiler,
- sljit_ub op_rm, sljit_ub op_mr, sljit_ub op_imm, sljit_ub op_eax_imm,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static sljit_s32 emit_cum_binary(struct sljit_compiler *compiler,
+ sljit_u8 op_rm, sljit_u8 op_mr, sljit_u8 op_imm, sljit_u8 op_eax_imm,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_ub* inst;
+ sljit_u8* inst;
if (dst == SLJIT_UNUSED) {
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
@@ -1570,13 +1570,13 @@ static sljit_si emit_cum_binary(struct sljit_compiler *compiler,
return SLJIT_SUCCESS;
}
-static sljit_si emit_non_cum_binary(struct sljit_compiler *compiler,
- sljit_ub op_rm, sljit_ub op_mr, sljit_ub op_imm, sljit_ub op_eax_imm,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static sljit_s32 emit_non_cum_binary(struct sljit_compiler *compiler,
+ sljit_u8 op_rm, sljit_u8 op_mr, sljit_u8 op_imm, sljit_u8 op_eax_imm,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_ub* inst;
+ sljit_u8* inst;
if (dst == SLJIT_UNUSED) {
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
@@ -1652,13 +1652,13 @@ static sljit_si emit_non_cum_binary(struct sljit_compiler *compiler,
return SLJIT_SUCCESS;
}
-static sljit_si emit_mul(struct sljit_compiler *compiler,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static sljit_s32 emit_mul(struct sljit_compiler *compiler,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_ub* inst;
- sljit_si dst_r;
+ sljit_u8* inst;
+ sljit_s32 dst_r;
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
@@ -1686,17 +1686,17 @@ static sljit_si emit_mul(struct sljit_compiler *compiler,
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
FAIL_IF(!inst);
*inst = IMUL_r_rm_i8;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
FAIL_IF(!inst);
INC_SIZE(1);
- *inst = (sljit_sb)src1w;
+ *inst = (sljit_s8)src1w;
}
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
else {
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
FAIL_IF(!inst);
*inst = IMUL_r_rm_i32;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 4);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
FAIL_IF(!inst);
INC_SIZE(4);
*(sljit_sw*)inst = src1w;
@@ -1706,10 +1706,10 @@ static sljit_si emit_mul(struct sljit_compiler *compiler,
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
FAIL_IF(!inst);
*inst = IMUL_r_rm_i32;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 4);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
FAIL_IF(!inst);
INC_SIZE(4);
- *(sljit_si*)inst = (sljit_si)src1w;
+ *(sljit_s32*)inst = (sljit_s32)src1w;
}
else {
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_IMM, src1w);
@@ -1729,17 +1729,17 @@ static sljit_si emit_mul(struct sljit_compiler *compiler,
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
FAIL_IF(!inst);
*inst = IMUL_r_rm_i8;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
FAIL_IF(!inst);
INC_SIZE(1);
- *inst = (sljit_sb)src2w;
+ *inst = (sljit_s8)src2w;
}
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
else {
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
FAIL_IF(!inst);
*inst = IMUL_r_rm_i32;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 4);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
FAIL_IF(!inst);
INC_SIZE(4);
*(sljit_sw*)inst = src2w;
@@ -1749,10 +1749,10 @@ static sljit_si emit_mul(struct sljit_compiler *compiler,
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
FAIL_IF(!inst);
*inst = IMUL_r_rm_i32;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 4);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
FAIL_IF(!inst);
INC_SIZE(4);
- *(sljit_si*)inst = (sljit_si)src2w;
+ *(sljit_s32*)inst = (sljit_s32)src2w;
}
else {
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_IMM, src2w);
@@ -1782,13 +1782,13 @@ static sljit_si emit_mul(struct sljit_compiler *compiler,
return SLJIT_SUCCESS;
}
-static sljit_si emit_lea_binary(struct sljit_compiler *compiler, sljit_si keep_flags,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static sljit_s32 emit_lea_binary(struct sljit_compiler *compiler, sljit_s32 keep_flags,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_ub* inst;
- sljit_si dst_r, done = 0;
+ sljit_u8* inst;
+ sljit_s32 dst_r, done = 0;
/* These cases better be left to handled by normal way. */
if (!keep_flags) {
@@ -1809,7 +1809,7 @@ static sljit_si emit_lea_binary(struct sljit_compiler *compiler, sljit_si keep_f
}
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
if ((src2 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src2w))) {
- inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), (sljit_si)src2w);
+ inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), (sljit_s32)src2w);
#else
if (src2 & SLJIT_IMM) {
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), src2w);
@@ -1822,7 +1822,7 @@ static sljit_si emit_lea_binary(struct sljit_compiler *compiler, sljit_si keep_f
else if (FAST_IS_REG(src2)) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
if ((src1 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src1w))) {
- inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), (sljit_si)src1w);
+ inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), (sljit_s32)src1w);
#else
if (src1 & SLJIT_IMM) {
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), src1w);
@@ -1841,11 +1841,11 @@ static sljit_si emit_lea_binary(struct sljit_compiler *compiler, sljit_si keep_f
return SLJIT_ERR_UNSUPPORTED;
}
-static sljit_si emit_cmp_binary(struct sljit_compiler *compiler,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_ub* inst;
+ sljit_u8* inst;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
@@ -1892,11 +1892,11 @@ static sljit_si emit_cmp_binary(struct sljit_compiler *compiler,
return SLJIT_SUCCESS;
}
-static sljit_si emit_test_binary(struct sljit_compiler *compiler,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static sljit_s32 emit_test_binary(struct sljit_compiler *compiler,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_ub* inst;
+ sljit_u8* inst;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
@@ -2002,13 +2002,13 @@ static sljit_si emit_test_binary(struct sljit_compiler *compiler,
return SLJIT_SUCCESS;
}
-static sljit_si emit_shift(struct sljit_compiler *compiler,
- sljit_ub mode,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static sljit_s32 emit_shift(struct sljit_compiler *compiler,
+ sljit_u8 mode,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_ub* inst;
+ sljit_u8* inst;
if ((src2 & SLJIT_IMM) || (src2 == SLJIT_PREF_SHIFT_REG)) {
if (dst == src1 && dstw == src1w) {
@@ -2091,11 +2091,11 @@ static sljit_si emit_shift(struct sljit_compiler *compiler,
return SLJIT_SUCCESS;
}
-static sljit_si emit_shift_with_flags(struct sljit_compiler *compiler,
- sljit_ub mode, sljit_si set_flags,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static sljit_s32 emit_shift_with_flags(struct sljit_compiler *compiler,
+ sljit_u8 mode, sljit_s32 set_flags,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
/* The CPU does not set flags if the shift count is 0. */
if (src2 & SLJIT_IMM) {
@@ -2126,10 +2126,10 @@ static sljit_si emit_shift_with_flags(struct sljit_compiler *compiler,
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
CHECK_ERROR();
CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@@ -2141,7 +2141,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
CHECK_EXTRA_REGS(src1, src1w, (void)0);
CHECK_EXTRA_REGS(src2, src2w, (void)0);
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = op & SLJIT_INT_OP;
+ compiler->mode32 = op & SLJIT_I32_OP;
#endif
if (GET_OPCODE(op) >= SLJIT_MUL) {
@@ -2221,7 +2221,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
{
CHECK_REG_INDEX(check_sljit_get_register_index(reg));
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
@@ -2231,21 +2231,21 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_register_index(sljit_si reg)
return reg_map[reg];
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_float_register_index(sljit_si reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
{
CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
return reg;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *compiler,
- void *instruction, sljit_si size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
+ void *instruction, sljit_s32 size)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
CHECK_ERROR();
CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
- inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
FAIL_IF(!inst);
INC_SIZE(size);
SLJIT_MEMMOVE(inst, instruction, size);
@@ -2257,12 +2257,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *co
/* --------------------------------------------------------------------- */
/* Alignment + 2 * 16 bytes. */
-static sljit_si sse2_data[3 + (4 + 4) * 2];
-static sljit_si *sse2_buffer;
+static sljit_s32 sse2_data[3 + (4 + 4) * 2];
+static sljit_s32 *sse2_buffer;
static void init_compiler(void)
{
- sse2_buffer = (sljit_si*)(((sljit_uw)sse2_data + 15) & ~0xf);
+ sse2_buffer = (sljit_s32*)(((sljit_uw)sse2_data + 15) & ~0xf);
/* Single precision constants. */
sse2_buffer[0] = 0x80000000;
sse2_buffer[4] = 0x7fffffff;
@@ -2273,7 +2273,7 @@ static void init_compiler(void)
sse2_buffer[13] = 0x7fffffff;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void)
{
#ifdef SLJIT_IS_FPU_AVAILABLE
return SLJIT_IS_FPU_AVAILABLE;
@@ -2286,10 +2286,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
#endif /* SLJIT_DETECT_SSE2 */
}
-static sljit_si emit_sse2(struct sljit_compiler *compiler, sljit_ub opcode,
- sljit_si single, sljit_si xmm1, sljit_si xmm2, sljit_sw xmm2w)
+static sljit_s32 emit_sse2(struct sljit_compiler *compiler, sljit_u8 opcode,
+ sljit_s32 single, sljit_s32 xmm1, sljit_s32 xmm2, sljit_sw xmm2w)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
inst = emit_x86_instruction(compiler, 2 | (single ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, xmm1, 0, xmm2, xmm2w);
FAIL_IF(!inst);
@@ -2298,10 +2298,10 @@ static sljit_si emit_sse2(struct sljit_compiler *compiler, sljit_ub opcode,
return SLJIT_SUCCESS;
}
-static sljit_si emit_sse2_logic(struct sljit_compiler *compiler, sljit_ub opcode,
- sljit_si pref66, sljit_si xmm1, sljit_si xmm2, sljit_sw xmm2w)
+static sljit_s32 emit_sse2_logic(struct sljit_compiler *compiler, sljit_u8 opcode,
+ sljit_s32 pref66, sljit_s32 xmm1, sljit_s32 xmm2, sljit_sw xmm2w)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
inst = emit_x86_instruction(compiler, 2 | (pref66 ? EX86_PREF_66 : 0) | EX86_SSE2, xmm1, 0, xmm2, xmm2w);
FAIL_IF(!inst);
@@ -2310,31 +2310,31 @@ static sljit_si emit_sse2_logic(struct sljit_compiler *compiler, sljit_ub opcode
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si emit_sse2_load(struct sljit_compiler *compiler,
- sljit_si single, sljit_si dst, sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 emit_sse2_load(struct sljit_compiler *compiler,
+ sljit_s32 single, sljit_s32 dst, sljit_s32 src, sljit_sw srcw)
{
return emit_sse2(compiler, MOVSD_x_xm, single, dst, src, srcw);
}
-static SLJIT_INLINE sljit_si emit_sse2_store(struct sljit_compiler *compiler,
- sljit_si single, sljit_si dst, sljit_sw dstw, sljit_si src)
+static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler,
+ sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src)
{
return emit_sse2(compiler, MOVSD_xm_x, single, src, dst, dstw);
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_convw_fromd(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
- sljit_ub *inst;
+ sljit_s32 dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
+ sljit_u8 *inst;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (GET_OPCODE(op) == SLJIT_CONVW_FROMD)
+ if (GET_OPCODE(op) == SLJIT_CONV_SW_FROM_F64)
compiler->mode32 = 0;
#endif
- inst = emit_x86_instruction(compiler, 2 | ((op & SLJIT_SINGLE_OP) ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2_OP2, dst_r, 0, src, srcw);
+ inst = emit_x86_instruction(compiler, 2 | ((op & SLJIT_F32_OP) ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2_OP2, dst_r, 0, src, srcw);
FAIL_IF(!inst);
*inst++ = GROUP_0F;
*inst = CVTTSD2SI_r_xm;
@@ -2344,29 +2344,29 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convw_fromd(struct sljit_compiler *
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
- sljit_ub *inst;
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
+ sljit_u8 *inst;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (GET_OPCODE(op) == SLJIT_CONVD_FROMW)
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_SW)
compiler->mode32 = 0;
#endif
if (src & SLJIT_IMM) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (GET_OPCODE(op) == SLJIT_CONVD_FROMI)
- srcw = (sljit_si)srcw;
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
+ srcw = (sljit_s32)srcw;
#endif
EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
src = TMP_REG1;
srcw = 0;
}
- inst = emit_x86_instruction(compiler, 2 | ((op & SLJIT_SINGLE_OP) ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2_OP1, dst_r, 0, src, srcw);
+ inst = emit_x86_instruction(compiler, 2 | ((op & SLJIT_F32_OP) ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2_OP1, dst_r, 0, src, srcw);
FAIL_IF(!inst);
*inst++ = GROUP_0F;
*inst = CVTSI2SD_x_rm;
@@ -2375,27 +2375,27 @@ static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *
compiler->mode32 = 1;
#endif
if (dst_r == TMP_FREG)
- return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, TMP_FREG);
+ return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, TMP_FREG);
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_si sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_si op,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
compiler->flags_saved = 0;
if (!FAST_IS_REG(src1)) {
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, TMP_FREG, src1, src1w));
+ FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, TMP_FREG, src1, src1w));
src1 = TMP_FREG;
}
- return emit_sse2_logic(compiler, UCOMISD_x_xm, !(op & SLJIT_SINGLE_OP), src1, src2, src2w);
+ return emit_sse2_logic(compiler, UCOMISD_x_xm, !(op & SLJIT_F32_OP), src1, src2, src2w);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_si dst_r;
+ sljit_s32 dst_r;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
compiler->mode32 = 1;
@@ -2404,65 +2404,65 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compile
CHECK_ERROR();
SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
- if (GET_OPCODE(op) == SLJIT_DMOV) {
+ if (GET_OPCODE(op) == SLJIT_MOV_F64) {
if (FAST_IS_REG(dst))
- return emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, dst, src, srcw);
+ return emit_sse2_load(compiler, op & SLJIT_F32_OP, dst, src, srcw);
if (FAST_IS_REG(src))
- return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, src);
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, TMP_FREG, src, srcw));
- return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, TMP_FREG);
+ return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, src);
+ FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, TMP_FREG, src, srcw));
+ return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, TMP_FREG);
}
- if (GET_OPCODE(op) == SLJIT_CONVD_FROMS) {
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32) {
dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
if (FAST_IS_REG(src)) {
/* We overwrite the high bits of source. From SLJIT point of view,
this is not an issue.
Note: In SSE3, we could also use MOVDDUP and MOVSLDUP. */
- FAIL_IF(emit_sse2_logic(compiler, UNPCKLPD_x_xm, op & SLJIT_SINGLE_OP, src, src, 0));
+ FAIL_IF(emit_sse2_logic(compiler, UNPCKLPD_x_xm, op & SLJIT_F32_OP, src, src, 0));
}
else {
- FAIL_IF(emit_sse2_load(compiler, !(op & SLJIT_SINGLE_OP), TMP_FREG, src, srcw));
+ FAIL_IF(emit_sse2_load(compiler, !(op & SLJIT_F32_OP), TMP_FREG, src, srcw));
src = TMP_FREG;
}
- FAIL_IF(emit_sse2_logic(compiler, CVTPD2PS_x_xm, op & SLJIT_SINGLE_OP, dst_r, src, 0));
+ FAIL_IF(emit_sse2_logic(compiler, CVTPD2PS_x_xm, op & SLJIT_F32_OP, dst_r, src, 0));
if (dst_r == TMP_FREG)
- return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, TMP_FREG);
+ return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, TMP_FREG);
return SLJIT_SUCCESS;
}
if (SLOW_IS_REG(dst)) {
dst_r = dst;
if (dst != src)
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, dst_r, src, srcw));
+ FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, dst_r, src, srcw));
}
else {
dst_r = TMP_FREG;
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, dst_r, src, srcw));
+ FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, dst_r, src, srcw));
}
switch (GET_OPCODE(op)) {
- case SLJIT_DNEG:
- FAIL_IF(emit_sse2_logic(compiler, XORPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_SINGLE_OP ? sse2_buffer : sse2_buffer + 8)));
+ case SLJIT_NEG_F64:
+ FAIL_IF(emit_sse2_logic(compiler, XORPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_F32_OP ? sse2_buffer : sse2_buffer + 8)));
break;
- case SLJIT_DABS:
- FAIL_IF(emit_sse2_logic(compiler, ANDPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_SINGLE_OP ? sse2_buffer + 4 : sse2_buffer + 12)));
+ case SLJIT_ABS_F64:
+ FAIL_IF(emit_sse2_logic(compiler, ANDPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_F32_OP ? sse2_buffer + 4 : sse2_buffer + 12)));
break;
}
if (dst_r == TMP_FREG)
- return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, TMP_FREG);
+ return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, TMP_FREG);
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src1, sljit_sw src1w,
- sljit_si src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
{
- sljit_si dst_r;
+ sljit_s32 dst_r;
CHECK_ERROR();
CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@@ -2478,43 +2478,43 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compile
dst_r = dst;
if (dst == src1)
; /* Do nothing here. */
- else if (dst == src2 && (op == SLJIT_DADD || op == SLJIT_DMUL)) {
+ else if (dst == src2 && (op == SLJIT_ADD_F64 || op == SLJIT_MUL_F64)) {
/* Swap arguments. */
src2 = src1;
src2w = src1w;
}
else if (dst != src2)
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, dst_r, src1, src1w));
+ FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, dst_r, src1, src1w));
else {
dst_r = TMP_FREG;
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, TMP_FREG, src1, src1w));
+ FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, TMP_FREG, src1, src1w));
}
}
else {
dst_r = TMP_FREG;
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, TMP_FREG, src1, src1w));
+ FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, TMP_FREG, src1, src1w));
}
switch (GET_OPCODE(op)) {
- case SLJIT_DADD:
- FAIL_IF(emit_sse2(compiler, ADDSD_x_xm, op & SLJIT_SINGLE_OP, dst_r, src2, src2w));
+ case SLJIT_ADD_F64:
+ FAIL_IF(emit_sse2(compiler, ADDSD_x_xm, op & SLJIT_F32_OP, dst_r, src2, src2w));
break;
- case SLJIT_DSUB:
- FAIL_IF(emit_sse2(compiler, SUBSD_x_xm, op & SLJIT_SINGLE_OP, dst_r, src2, src2w));
+ case SLJIT_SUB_F64:
+ FAIL_IF(emit_sse2(compiler, SUBSD_x_xm, op & SLJIT_F32_OP, dst_r, src2, src2w));
break;
- case SLJIT_DMUL:
- FAIL_IF(emit_sse2(compiler, MULSD_x_xm, op & SLJIT_SINGLE_OP, dst_r, src2, src2w));
+ case SLJIT_MUL_F64:
+ FAIL_IF(emit_sse2(compiler, MULSD_x_xm, op & SLJIT_F32_OP, dst_r, src2, src2w));
break;
- case SLJIT_DDIV:
- FAIL_IF(emit_sse2(compiler, DIVSD_x_xm, op & SLJIT_SINGLE_OP, dst_r, src2, src2w));
+ case SLJIT_DIV_F64:
+ FAIL_IF(emit_sse2(compiler, DIVSD_x_xm, op & SLJIT_F32_OP, dst_r, src2, src2w));
break;
}
if (dst_r == TMP_FREG)
- return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, TMP_FREG);
+ return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, TMP_FREG);
return SLJIT_SUCCESS;
}
@@ -2524,7 +2524,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compile
SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
struct sljit_label *label;
CHECK_ERROR_PTR();
@@ -2542,7 +2542,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
PTR_FAIL_IF(!label);
set_label(label, compiler);
- inst = (sljit_ub*)ensure_buf(compiler, 2);
+ inst = (sljit_u8*)ensure_buf(compiler, 2);
PTR_FAIL_IF(!inst);
*inst++ = 0;
@@ -2551,9 +2551,9 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
return label;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
struct sljit_jump *jump;
CHECK_ERROR_PTR();
@@ -2580,7 +2580,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
compiler->size += (type >= SLJIT_JUMP) ? (10 + 3) : (2 + 10 + 3);
#endif
- inst = (sljit_ub*)ensure_buf(compiler, 2);
+ inst = (sljit_u8*)ensure_buf(compiler, 2);
PTR_FAIL_IF_NULL(inst);
*inst++ = 0;
@@ -2588,9 +2588,9 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
return jump;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compiler, sljit_si type, sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
struct sljit_jump *jump;
CHECK_ERROR();
@@ -2638,7 +2638,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compil
compiler->size += 10 + 3;
#endif
- inst = (sljit_ub*)ensure_buf(compiler, 2);
+ inst = (sljit_u8*)ensure_buf(compiler, 2);
FAIL_IF_NULL(inst);
*inst++ = 0;
@@ -2657,18 +2657,18 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compil
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_si op,
- sljit_si dst, sljit_sw dstw,
- sljit_si src, sljit_sw srcw,
- sljit_si type)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw,
+ sljit_s32 type)
{
- sljit_ub *inst;
- sljit_ub cond_set = 0;
+ sljit_u8 *inst;
+ sljit_u8 cond_set = 0;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- sljit_si reg;
+ sljit_s32 reg;
#else
/* CHECK_EXTRA_REGS migh overwrite these values. */
- sljit_si dst_save = dst;
+ sljit_s32 dst_save = dst;
sljit_sw dstw_save = dstw;
#endif
@@ -2690,7 +2690,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst) && dst == src) {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 4 + 3);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 4 + 3);
FAIL_IF(!inst);
INC_SIZE(4 + 3);
/* Set low register to conditional flag. */
@@ -2706,7 +2706,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
reg = (op == SLJIT_MOV && FAST_IS_REG(dst)) ? dst : TMP_REG1;
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 4 + 4);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 4 + 4);
FAIL_IF(!inst);
INC_SIZE(4 + 4);
/* Set low register to conditional flag. */
@@ -2735,7 +2735,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
if (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst)) {
if (reg_map[dst] <= 4) {
/* Low byte is accessible. */
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 3 + 3);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 3 + 3);
FAIL_IF(!inst);
INC_SIZE(3 + 3);
/* Set low byte to conditional flag. */
@@ -2758,7 +2758,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
/* a xor reg, reg operation would overwrite the flags. */
EMIT_MOV(compiler, dst, 0, SLJIT_IMM, 0);
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 3);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
FAIL_IF(!inst);
INC_SIZE(3);
@@ -2769,7 +2769,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
return SLJIT_SUCCESS;
}
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1);
FAIL_IF(!inst);
INC_SIZE(1 + 3 + 3 + 1);
*inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
@@ -2788,7 +2788,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst) && dst == src && reg_map[dst] <= 4) {
SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R0] == 0, scratch_reg1_must_be_eax);
if (dst != SLJIT_R0) {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1 + 3 + 2 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 2 + 1);
FAIL_IF(!inst);
INC_SIZE(1 + 3 + 2 + 1);
/* Set low register to conditional flag. */
@@ -2801,7 +2801,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
*inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
}
else {
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 2 + 3 + 2 + 2);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + 3 + 2 + 2);
FAIL_IF(!inst);
INC_SIZE(2 + 3 + 2 + 2);
/* Set low register to conditional flag. */
@@ -2819,7 +2819,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
}
/* Set TMP_REG1 to the bit. */
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1);
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1);
FAIL_IF(!inst);
INC_SIZE(1 + 3 + 3 + 1);
*inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
@@ -2845,7 +2845,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *com
#endif /* SLJIT_CONFIG_X86_64 */
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_local_base(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw offset)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
{
CHECK_ERROR();
CHECK(check_sljit_get_local_base(compiler, dst, dstw, offset));
@@ -2876,12 +2876,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_local_base(struct sljit_compiler *co
return emit_mov(compiler, dst, dstw, SLJIT_SP, 0);
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw init_value)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
{
- sljit_ub *inst;
+ sljit_u8 *inst;
struct sljit_const *const_;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- sljit_si reg;
+ sljit_s32 reg;
#endif
CHECK_ERROR_PTR();
@@ -2908,7 +2908,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
return NULL;
#endif
- inst = (sljit_ub*)ensure_buf(compiler, 2);
+ inst = (sljit_u8*)ensure_buf(compiler, 2);
PTR_FAIL_IF(!inst);
*inst++ = 0;
@@ -2937,7 +2937,7 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_consta
*(sljit_sw*)addr = new_constant;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_x86_is_sse2_available(void)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_x86_is_sse2_available(void)
{
#if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
if (cpu_has_sse2 == -1)
@@ -2948,34 +2948,34 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_x86_is_sse2_available(void)
#endif
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_x86_is_cmov_available(void)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_x86_is_cmov_available(void)
{
if (cpu_has_cmov == -1)
get_cpu_features();
return cpu_has_cmov;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_x86_emit_cmov(struct sljit_compiler *compiler,
- sljit_si type,
- sljit_si dst_reg,
- sljit_si src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_x86_emit_cmov(struct sljit_compiler *compiler,
+ sljit_s32 type,
+ sljit_s32 dst_reg,
+ sljit_s32 src, sljit_sw srcw)
{
- sljit_ub* inst;
+ sljit_u8* inst;
CHECK_ERROR();
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
CHECK_ARGUMENT(sljit_x86_is_cmov_available());
- CHECK_ARGUMENT(!(type & ~(0xff | SLJIT_INT_OP)));
- CHECK_ARGUMENT((type & 0xff) >= SLJIT_EQUAL && (type & 0xff) <= SLJIT_D_ORDERED);
- CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(dst_reg & ~SLJIT_INT_OP));
+ CHECK_ARGUMENT(!(type & ~(0xff | SLJIT_I32_OP)));
+ CHECK_ARGUMENT((type & 0xff) >= SLJIT_EQUAL && (type & 0xff) <= SLJIT_ORDERED_F64);
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(dst_reg & ~SLJIT_I32_OP));
FUNCTION_CHECK_SRC(src, srcw);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
fprintf(compiler->verbose, " x86_cmov%s %s%s, ",
- !(dst_reg & SLJIT_INT_OP) ? "" : ".i",
- JUMP_PREFIX(type), jump_names[type & 0xff]);
- sljit_verbose_reg(compiler, dst_reg & ~SLJIT_INT_OP);
+ !(dst_reg & SLJIT_I32_OP) ? "" : ".i",
+ jump_names[type & 0xff], JUMP_POSTFIX(type));
+ sljit_verbose_reg(compiler, dst_reg & ~SLJIT_I32_OP);
fprintf(compiler->verbose, ", ");
sljit_verbose_param(compiler, src, srcw);
fprintf(compiler->verbose, "\n");
@@ -2986,9 +2986,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_x86_emit_cmov(struct sljit_compiler *com
CHECK_EXTRA_REGS(src, srcw, (void)0);
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = dst_reg & SLJIT_INT_OP;
+ compiler->mode32 = dst_reg & SLJIT_I32_OP;
#endif
- dst_reg &= ~SLJIT_INT_OP;
+ dst_reg &= ~SLJIT_I32_OP;
if (SLJIT_UNLIKELY(src & SLJIT_IMM)) {
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, srcw);
diff --git a/src/3rdparty/pcre/sljit/sljitUtils.c b/src/3rdparty/pcre/sljit/sljitUtils.c
index 5294b5f3f9..ec5c321194 100644
--- a/src/3rdparty/pcre/sljit/sljitUtils.c
+++ b/src/3rdparty/pcre/sljit/sljitUtils.c
@@ -163,11 +163,11 @@ SLJIT_API_FUNC_ATTRIBUTE void SLJIT_CALL sljit_release_lock(void)
#include <fcntl.h>
/* Some old systems does not have MAP_ANON. */
-static sljit_si dev_zero = -1;
+static sljit_s32 dev_zero = -1;
#if (defined SLJIT_SINGLE_THREADED && SLJIT_SINGLE_THREADED)
-static SLJIT_INLINE sljit_si open_dev_zero(void)
+static SLJIT_INLINE sljit_s32 open_dev_zero(void)
{
dev_zero = open("/dev/zero", O_RDWR);
return dev_zero < 0;
@@ -179,10 +179,13 @@ static SLJIT_INLINE sljit_si open_dev_zero(void)
static pthread_mutex_t dev_zero_mutex = PTHREAD_MUTEX_INITIALIZER;
-static SLJIT_INLINE sljit_si open_dev_zero(void)
+static SLJIT_INLINE sljit_s32 open_dev_zero(void)
{
pthread_mutex_lock(&dev_zero_mutex);
- dev_zero = open("/dev/zero", O_RDWR);
+ /* The dev_zero might be initialized by another thread during the waiting. */
+ if (dev_zero < 0) {
+ dev_zero = open("/dev/zero", O_RDWR);
+ }
pthread_mutex_unlock(&dev_zero_mutex);
return dev_zero < 0;
}