summaryrefslogtreecommitdiffstats
path: root/src/3rdparty/pcre2
diff options
context:
space:
mode:
Diffstat (limited to 'src/3rdparty/pcre2')
-rw-r--r--src/3rdparty/pcre2/AUTHORS6
-rw-r--r--src/3rdparty/pcre2/CMakeLists.txt1
-rw-r--r--src/3rdparty/pcre2/LICENCE6
-rwxr-xr-xsrc/3rdparty/pcre2/import_from_pcre2_tarball.sh16
-rw-r--r--src/3rdparty/pcre2/qt_attribution.json14
-rw-r--r--src/3rdparty/pcre2/src/config.h8
-rw-r--r--src/3rdparty/pcre2/src/pcre2.h33
-rw-r--r--src/3rdparty/pcre2/src/pcre2_auto_possess.c16
-rw-r--r--src/3rdparty/pcre2/src/pcre2_chartables.c12
-rw-r--r--src/3rdparty/pcre2/src/pcre2_chkdint.c96
-rw-r--r--src/3rdparty/pcre2/src/pcre2_compile.c1298
-rw-r--r--src/3rdparty/pcre2/src/pcre2_context.c54
-rw-r--r--src/3rdparty/pcre2/src/pcre2_dfa_match.c167
-rw-r--r--src/3rdparty/pcre2/src/pcre2_error.c10
-rw-r--r--src/3rdparty/pcre2/src/pcre2_extuni.c28
-rw-r--r--src/3rdparty/pcre2/src/pcre2_find_bracket.c10
-rw-r--r--src/3rdparty/pcre2/src/pcre2_internal.h155
-rw-r--r--src/3rdparty/pcre2/src/pcre2_intmodedep.h58
-rw-r--r--src/3rdparty/pcre2/src/pcre2_jit_compile.c1429
-rw-r--r--src/3rdparty/pcre2/src/pcre2_jit_match.c16
-rw-r--r--src/3rdparty/pcre2/src/pcre2_jit_misc.c4
-rw-r--r--src/3rdparty/pcre2/src/pcre2_jit_neon_inc.h53
-rw-r--r--src/3rdparty/pcre2/src/pcre2_jit_simd_inc.h1061
-rw-r--r--src/3rdparty/pcre2/src/pcre2_maketables.c8
-rw-r--r--src/3rdparty/pcre2/src/pcre2_match.c469
-rw-r--r--src/3rdparty/pcre2/src/pcre2_match_data.c12
-rw-r--r--src/3rdparty/pcre2/src/pcre2_study.c104
-rw-r--r--src/3rdparty/pcre2/src/pcre2_substring.c7
-rw-r--r--src/3rdparty/pcre2/src/pcre2_tables.c12
-rw-r--r--src/3rdparty/pcre2/src/pcre2_ucd.c5584
-rw-r--r--src/3rdparty/pcre2/src/pcre2_ucp.h48
-rw-r--r--src/3rdparty/pcre2/src/pcre2_ucptables.c529
-rw-r--r--src/3rdparty/pcre2/src/pcre2_valid_utf.c48
-rw-r--r--src/3rdparty/pcre2/src/pcre2_xclass.c53
-rw-r--r--src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorApple.c137
-rw-r--r--src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorCore.c (renamed from src/3rdparty/pcre2/src/sljit/sljitExecAllocator.c)254
-rw-r--r--src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorFreeBSD.c89
-rw-r--r--src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorPosix.c62
-rw-r--r--src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorWindows.c40
-rw-r--r--src/3rdparty/pcre2/src/sljit/allocator_src/sljitProtExecAllocatorNetBSD.c72
-rw-r--r--src/3rdparty/pcre2/src/sljit/allocator_src/sljitProtExecAllocatorPosix.c172
-rw-r--r--src/3rdparty/pcre2/src/sljit/allocator_src/sljitWXExecAllocatorPosix.c (renamed from src/3rdparty/pcre2/src/sljit/sljitWXExecAllocator.c)101
-rw-r--r--src/3rdparty/pcre2/src/sljit/allocator_src/sljitWXExecAllocatorWindows.c102
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitConfig.h26
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitConfigCPU.h188
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitConfigInternal.h388
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitLir.c1919
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitLir.h1157
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeARM_32.c2418
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeARM_64.c1738
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeARM_T2_32.c1758
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeLOONGARCH_64.c3765
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeMIPS_32.c178
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeMIPS_64.c84
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeMIPS_common.c1439
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativePPC_32.c153
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativePPC_64.c152
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativePPC_common.c1318
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeRISCV_32.c71
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeRISCV_64.c43
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeRISCV_common.c1073
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeS390X.c1671
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeSPARC_32.c283
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeSPARC_common.c1673
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeX86_32.c549
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeX86_64.c582
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitNativeX86_common.c3605
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitProtExecAllocator.c474
-rw-r--r--src/3rdparty/pcre2/src/sljit/sljitSerialize.c516
69 files changed, 26713 insertions, 12962 deletions
diff --git a/src/3rdparty/pcre2/AUTHORS b/src/3rdparty/pcre2/AUTHORS
index 11ef898b25..9669f7755a 100644
--- a/src/3rdparty/pcre2/AUTHORS
+++ b/src/3rdparty/pcre2/AUTHORS
@@ -8,7 +8,7 @@ Email domain: gmail.com
Retired from University of Cambridge Computing Service,
Cambridge, England.
-Copyright (c) 1997-2022 University of Cambridge
+Copyright (c) 1997-2024 University of Cambridge
All rights reserved
@@ -19,7 +19,7 @@ Written by: Zoltan Herczeg
Email local part: hzmester
Emain domain: freemail.hu
-Copyright(c) 2010-2022 Zoltan Herczeg
+Copyright(c) 2010-2024 Zoltan Herczeg
All rights reserved.
@@ -30,7 +30,7 @@ Written by: Zoltan Herczeg
Email local part: hzmester
Emain domain: freemail.hu
-Copyright(c) 2009-2022 Zoltan Herczeg
+Copyright(c) 2009-2024 Zoltan Herczeg
All rights reserved.
####
diff --git a/src/3rdparty/pcre2/CMakeLists.txt b/src/3rdparty/pcre2/CMakeLists.txt
index a688cf608a..22b90a57e1 100644
--- a/src/3rdparty/pcre2/CMakeLists.txt
+++ b/src/3rdparty/pcre2/CMakeLists.txt
@@ -11,6 +11,7 @@ qt_internal_add_3rdparty_library(BundledPcre2
src/pcre2.h
src/pcre2_auto_possess.c
src/pcre2_chartables.c
+ src/pcre2_chkdint.c
src/pcre2_compile.c
src/pcre2_config.c
src/pcre2_context.c
diff --git a/src/3rdparty/pcre2/LICENCE b/src/3rdparty/pcre2/LICENCE
index 2f3cd5cac5..3c1ef032de 100644
--- a/src/3rdparty/pcre2/LICENCE
+++ b/src/3rdparty/pcre2/LICENCE
@@ -26,7 +26,7 @@ Email domain: gmail.com
Retired from University of Cambridge Computing Service,
Cambridge, England.
-Copyright (c) 1997-2022 University of Cambridge
+Copyright (c) 1997-2024 University of Cambridge
All rights reserved.
@@ -37,7 +37,7 @@ Written by: Zoltan Herczeg
Email local part: hzmester
Email domain: freemail.hu
-Copyright(c) 2010-2022 Zoltan Herczeg
+Copyright(c) 2010-2024 Zoltan Herczeg
All rights reserved.
@@ -48,7 +48,7 @@ Written by: Zoltan Herczeg
Email local part: hzmester
Email domain: freemail.hu
-Copyright(c) 2009-2022 Zoltan Herczeg
+Copyright(c) 2009-2024 Zoltan Herczeg
All rights reserved.
diff --git a/src/3rdparty/pcre2/import_from_pcre2_tarball.sh b/src/3rdparty/pcre2/import_from_pcre2_tarball.sh
index 3a21416441..09f2ca9283 100755
--- a/src/3rdparty/pcre2/import_from_pcre2_tarball.sh
+++ b/src/3rdparty/pcre2/import_from_pcre2_tarball.sh
@@ -48,6 +48,7 @@ FILES="
LICENCE
src/pcre2_auto_possess.c
+ src/pcre2_chkdint.c
src/pcre2_compile.c
src/pcre2_config.c
src/pcre2_context.c
@@ -80,14 +81,15 @@ FILES="
src/pcre2_ucptables.c
src/pcre2_valid_utf.c
src/pcre2_xclass.c
+ src/sljit/sljitConfigCPU.h
src/sljit/sljitConfig.h
src/sljit/sljitConfigInternal.h
- src/sljit/sljitExecAllocator.c
src/sljit/sljitLir.c
src/sljit/sljitLir.h
src/sljit/sljitNativeARM_32.c
src/sljit/sljitNativeARM_64.c
src/sljit/sljitNativeARM_T2_32.c
+ src/sljit/sljitNativeLOONGARCH_64.c
src/sljit/sljitNativeMIPS_32.c
src/sljit/sljitNativeMIPS_64.c
src/sljit/sljitNativeMIPS_common.c
@@ -101,9 +103,17 @@ FILES="
src/sljit/sljitNativeX86_32.c
src/sljit/sljitNativeX86_64.c
src/sljit/sljitNativeX86_common.c
- src/sljit/sljitProtExecAllocator.c
+ src/sljit/sljitSerialize.c
src/sljit/sljitUtils.c
- src/sljit/sljitWXExecAllocator.c
+ src/sljit/allocator_src/sljitExecAllocatorPosix.c
+ src/sljit/allocator_src/sljitProtExecAllocatorPosix.c
+ src/sljit/allocator_src/sljitWXExecAllocatorPosix.c
+ src/sljit/allocator_src/sljitProtExecAllocatorNetBSD.c
+ src/sljit/allocator_src/sljitExecAllocatorWindows.c
+ src/sljit/allocator_src/sljitExecAllocatorFreeBSD.c
+ src/sljit/allocator_src/sljitExecAllocatorApple.c
+ src/sljit/allocator_src/sljitWXExecAllocatorWindows.c
+ src/sljit/allocator_src/sljitExecAllocatorCore.c
"
for i in $FILES; do
diff --git a/src/3rdparty/pcre2/qt_attribution.json b/src/3rdparty/pcre2/qt_attribution.json
index 5599dd389e..d9dc1ae697 100644
--- a/src/3rdparty/pcre2/qt_attribution.json
+++ b/src/3rdparty/pcre2/qt_attribution.json
@@ -8,13 +8,13 @@
"Description": "The PCRE library is a set of functions that implement regular expression pattern matching using the same syntax and semantics as Perl 5.",
"Homepage": "http://www.pcre.org/",
- "Version": "10.42",
- "DownloadLocation": "https://github.com/PCRE2Project/pcre2/releases/download/pcre2-10.42/pcre2-10.42.tar.bz2",
+ "Version": "10.44",
+ "DownloadLocation": "https://github.com/PCRE2Project/pcre2/releases/download/pcre2-10.44/pcre2-10.44.tar.bz2",
"License": "BSD 3-clause \"New\" or \"Revised\" License with PCRE2 binary-like Packages Exception",
"LicenseId": "LicenseRef-BSD-3-Clause-with-PCRE2-Binary-Like-Packages-Exception",
"LicenseFile": "LICENCE",
- "Copyright": "Copyright (c) 1997-2022 University of Cambridge
-Copyright (c) 2010-2022 Zoltan Herczeg"
+ "Copyright": ["Copyright (c) 1997-2024 University of Cambridge",
+ "Copyright (c) 2010-2024 Zoltan Herczeg"]
},
{
"Id": "pcre2-sljit",
@@ -25,11 +25,11 @@ Copyright (c) 2010-2022 Zoltan Herczeg"
"Path": "src/sljit",
"Description": "The PCRE library is a set of functions that implement regular expression pattern matching using the same syntax and semantics as Perl 5.",
"Homepage": "http://www.pcre.org/",
- "Version": "10.42",
- "DownloadLocation": "https://github.com/PCRE2Project/pcre2/releases/download/pcre2-10.42/pcre2-10.42.tar.bz2",
+ "Version": "10.44",
+ "DownloadLocation": "https://github.com/PCRE2Project/pcre2/releases/download/pcre2-10.44/pcre2-10.44.tar.bz2",
"License": "BSD 2-clause \"Simplified\" License",
"LicenseId": "BSD-2-Clause",
"LicenseFile": "LICENCE-SLJIT",
- "Copyright": "Copyright (c) 2009-2022 Zoltan Herczeg"
+ "Copyright": "Copyright (c) 2009-2024 Zoltan Herczeg"
}
]
diff --git a/src/3rdparty/pcre2/src/config.h b/src/3rdparty/pcre2/src/config.h
index eeade9d9ce..72518dca5f 100644
--- a/src/3rdparty/pcre2/src/config.h
+++ b/src/3rdparty/pcre2/src/config.h
@@ -14,13 +14,15 @@
#define MAX_NAME_SIZE 32
#define NEWLINE_DEFAULT 2
#define PARENS_NEST_LIMIT 250
+#define MAX_VARLOOKBEHIND 255
#define SUPPORT_UNICODE
+#define PCRE2_EXPORT
/*
man 3 pcre2jit for a list of supported platforms;
- as PCRE2 10.22, stable JIT support is available for:
- - ARM 32-bit (v5, v7, and Thumb2)
+ as PCRE2 10.43, stable JIT support is available for:
+ - ARM 32-bit (v7 and Thumb2)
- ARM 64-bit
- Intel x86 32-bit and 64-bit
- MIPS 32-bit and 64-bit
@@ -32,7 +34,7 @@
#if !defined(PCRE2_DISABLE_JIT) && (\
/* ARM */ \
(defined(__GNUC__) \
- && (defined(__arm__) || defined(__TARGET_ARCH_ARM) || defined(_M_ARM) || defined(__aarch64__))) \
+ && (defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || defined(__aarch64__))) \
/* x86 32/64 */ \
|| defined(__i386) || defined(__i386__) || defined(_M_IX86) \
|| defined(__x86_64) || defined(__x86_64__) || defined(__amd64) || defined(_M_X64) \
diff --git a/src/3rdparty/pcre2/src/pcre2.h b/src/3rdparty/pcre2/src/pcre2.h
index 1cbecd0e86..a322d9f2d5 100644
--- a/src/3rdparty/pcre2/src/pcre2.h
+++ b/src/3rdparty/pcre2/src/pcre2.h
@@ -5,7 +5,7 @@
/* This is the public header file for the PCRE library, second API, to be
#included by applications that call PCRE2 functions.
- Copyright (c) 2016-2021 University of Cambridge
+ Copyright (c) 2016-2024 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -42,9 +42,9 @@ POSSIBILITY OF SUCH DAMAGE.
/* The current PCRE version information. */
#define PCRE2_MAJOR 10
-#define PCRE2_MINOR 42
+#define PCRE2_MINOR 44
#define PCRE2_PRERELEASE
-#define PCRE2_DATE 2022-12-11
+#define PCRE2_DATE 2024-06-07
/* When an application links to a PCRE DLL in Windows, the symbols that are
imported have to be identified as such. When building PCRE2, the appropriate
@@ -153,6 +153,12 @@ D is inspected during pcre2_dfa_match() execution
#define PCRE2_EXTRA_ESCAPED_CR_IS_LF 0x00000010u /* C */
#define PCRE2_EXTRA_ALT_BSUX 0x00000020u /* C */
#define PCRE2_EXTRA_ALLOW_LOOKAROUND_BSK 0x00000040u /* C */
+#define PCRE2_EXTRA_CASELESS_RESTRICT 0x00000080u /* C */
+#define PCRE2_EXTRA_ASCII_BSD 0x00000100u /* C */
+#define PCRE2_EXTRA_ASCII_BSS 0x00000200u /* C */
+#define PCRE2_EXTRA_ASCII_BSW 0x00000400u /* C */
+#define PCRE2_EXTRA_ASCII_POSIX 0x00000800u /* C */
+#define PCRE2_EXTRA_ASCII_DIGIT 0x00001000u /* C */
/* These are for pcre2_jit_compile(). */
@@ -180,11 +186,12 @@ pcre2_jit_match() ignores the latter since it bypasses all sanity checks). */
#define PCRE2_SUBSTITUTE_UNSET_EMPTY 0x00000400u /* pcre2_substitute() only */
#define PCRE2_SUBSTITUTE_UNKNOWN_UNSET 0x00000800u /* pcre2_substitute() only */
#define PCRE2_SUBSTITUTE_OVERFLOW_LENGTH 0x00001000u /* pcre2_substitute() only */
-#define PCRE2_NO_JIT 0x00002000u /* Not for pcre2_dfa_match() */
+#define PCRE2_NO_JIT 0x00002000u /* not for pcre2_dfa_match() */
#define PCRE2_COPY_MATCHED_SUBJECT 0x00004000u
#define PCRE2_SUBSTITUTE_LITERAL 0x00008000u /* pcre2_substitute() only */
#define PCRE2_SUBSTITUTE_MATCHED 0x00010000u /* pcre2_substitute() only */
#define PCRE2_SUBSTITUTE_REPLACEMENT_ONLY 0x00020000u /* pcre2_substitute() only */
+#define PCRE2_DISABLE_RECURSELOOP_CHECK 0x00040000u /* not for pcre2_dfa_match() or pcre2_jit_match() */
/* Options for pcre2_pattern_convert(). */
@@ -399,6 +406,7 @@ released, the numbers must not be changed. */
#define PCRE2_ERROR_CONVERT_SYNTAX (-64)
#define PCRE2_ERROR_INTERNAL_DUPMATCH (-65)
#define PCRE2_ERROR_DFA_UINVALID_UTF (-66)
+#define PCRE2_ERROR_INVALIDOFFSET (-67)
/* Request types for pcre2_pattern_info() */
@@ -575,7 +583,7 @@ PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION pcre2_config(uint32_t, void *);
PCRE2_EXP_DECL pcre2_general_context *PCRE2_CALL_CONVENTION \
pcre2_general_context_copy(pcre2_general_context *); \
PCRE2_EXP_DECL pcre2_general_context *PCRE2_CALL_CONVENTION \
- pcre2_general_context_create(void *(*)(PCRE2_SIZE, void *), \
+ pcre2_general_context_create(void *(*)(size_t, void *), \
void (*)(void *, void *), void *); \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_general_context_free(pcre2_general_context *);
@@ -596,6 +604,10 @@ PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_max_pattern_length(pcre2_compile_context *, PCRE2_SIZE); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
+ pcre2_set_max_pattern_compiled_length(pcre2_compile_context *, PCRE2_SIZE); \
+PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
+ pcre2_set_max_varlookbehind(pcre2_compile_context *, uint32_t); \
+PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_newline(pcre2_compile_context *, uint32_t); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_parens_nest_limit(pcre2_compile_context *, uint32_t); \
@@ -628,7 +640,7 @@ PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_recursion_limit(pcre2_match_context *, uint32_t); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_recursion_memory_management(pcre2_match_context *, \
- void *(*)(PCRE2_SIZE, void *), void (*)(void *, void *), void *);
+ void *(*)(size_t, void *), void (*)(void *, void *), void *);
#define PCRE2_CONVERT_CONTEXT_FUNCTIONS \
PCRE2_EXP_DECL pcre2_convert_context *PCRE2_CALL_CONVENTION \
@@ -687,6 +699,8 @@ PCRE2_EXP_DECL PCRE2_SPTR PCRE2_CALL_CONVENTION \
pcre2_get_mark(pcre2_match_data *); \
PCRE2_EXP_DECL PCRE2_SIZE PCRE2_CALL_CONVENTION \
pcre2_get_match_data_size(pcre2_match_data *); \
+PCRE2_EXP_DECL PCRE2_SIZE PCRE2_CALL_CONVENTION \
+ pcre2_get_match_data_heapframes_size(pcre2_match_data *); \
PCRE2_EXP_DECL uint32_t PCRE2_CALL_CONVENTION \
pcre2_get_ovector_count(pcre2_match_data *); \
PCRE2_EXP_DECL PCRE2_SIZE *PCRE2_CALL_CONVENTION \
@@ -722,7 +736,7 @@ PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_substring_number_from_name(const pcre2_code *, PCRE2_SPTR); \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
- pcre2_substring_list_free(PCRE2_SPTR *); \
+ pcre2_substring_list_free(PCRE2_UCHAR **); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_substring_list_get(pcre2_match_data *, PCRE2_UCHAR ***, PCRE2_SIZE **);
@@ -771,7 +785,7 @@ PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_jit_free_unused_memory(pcre2_general_context *); \
PCRE2_EXP_DECL pcre2_jit_stack *PCRE2_CALL_CONVENTION \
- pcre2_jit_stack_create(PCRE2_SIZE, PCRE2_SIZE, pcre2_general_context *); \
+ pcre2_jit_stack_create(size_t, size_t, pcre2_general_context *); \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_jit_stack_assign(pcre2_match_context *, pcre2_jit_callback, void *); \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
@@ -851,6 +865,7 @@ pcre2_compile are called by application code. */
#define pcre2_general_context_free PCRE2_SUFFIX(pcre2_general_context_free_)
#define pcre2_get_error_message PCRE2_SUFFIX(pcre2_get_error_message_)
#define pcre2_get_mark PCRE2_SUFFIX(pcre2_get_mark_)
+#define pcre2_get_match_data_heapframes_size PCRE2_SUFFIX(pcre2_get_match_data_heapframes_size_)
#define pcre2_get_match_data_size PCRE2_SUFFIX(pcre2_get_match_data_size_)
#define pcre2_get_ovector_pointer PCRE2_SUFFIX(pcre2_get_ovector_pointer_)
#define pcre2_get_ovector_count PCRE2_SUFFIX(pcre2_get_ovector_count_)
@@ -886,7 +901,9 @@ pcre2_compile are called by application code. */
#define pcre2_set_glob_separator PCRE2_SUFFIX(pcre2_set_glob_separator_)
#define pcre2_set_heap_limit PCRE2_SUFFIX(pcre2_set_heap_limit_)
#define pcre2_set_match_limit PCRE2_SUFFIX(pcre2_set_match_limit_)
+#define pcre2_set_max_varlookbehind PCRE2_SUFFIX(pcre2_set_max_varlookbehind_)
#define pcre2_set_max_pattern_length PCRE2_SUFFIX(pcre2_set_max_pattern_length_)
+#define pcre2_set_max_pattern_compiled_length PCRE2_SUFFIX(pcre2_set_max_pattern_compiled_length_)
#define pcre2_set_newline PCRE2_SUFFIX(pcre2_set_newline_)
#define pcre2_set_parens_nest_limit PCRE2_SUFFIX(pcre2_set_parens_nest_limit_)
#define pcre2_set_offset_limit PCRE2_SUFFIX(pcre2_set_offset_limit_)
diff --git a/src/3rdparty/pcre2/src/pcre2_auto_possess.c b/src/3rdparty/pcre2/src/pcre2_auto_possess.c
index 419fd49001..210d13d37a 100644
--- a/src/3rdparty/pcre2/src/pcre2_auto_possess.c
+++ b/src/3rdparty/pcre2/src/pcre2_auto_possess.c
@@ -560,6 +560,8 @@ matches to an empty string (also represented by a non-zero value). */
for(;;)
{
+ PCRE2_SPTR bracode;
+
/* All operations move the code pointer forward.
Therefore infinite recursions are not possible. */
@@ -617,7 +619,8 @@ for(;;)
recursions. (This could be improved by keeping a list of group numbers that
are called by recursion.) */
- switch(*(code - GET(code, 1)))
+ bracode = code - GET(code, 1);
+ switch(*bracode)
{
case OP_CBRA:
case OP_SCBRA:
@@ -636,16 +639,19 @@ for(;;)
break;
/* Atomic sub-patterns and assertions can always auto-possessify their
- last iterator. However, if the group was entered as a result of checking
- a previous iterator, this is not possible. */
+ last iterator except for variable length lookbehinds. However, if the
+ group was entered as a result of checking a previous iterator, this is
+ not possible. */
case OP_ASSERT:
case OP_ASSERT_NOT:
- case OP_ASSERTBACK:
- case OP_ASSERTBACK_NOT:
case OP_ONCE:
return !entered_a_group;
+ case OP_ASSERTBACK:
+ case OP_ASSERTBACK_NOT:
+ return (bracode[1+LINK_SIZE] == OP_VREVERSE)? FALSE : !entered_a_group;
+
/* Non-atomic assertions - don't possessify last iterator. This needs
more thought. */
diff --git a/src/3rdparty/pcre2/src/pcre2_chartables.c b/src/3rdparty/pcre2/src/pcre2_chartables.c
index 861914d1ac..7362c3f234 100644
--- a/src/3rdparty/pcre2/src/pcre2_chartables.c
+++ b/src/3rdparty/pcre2/src/pcre2_chartables.c
@@ -5,7 +5,8 @@
/* This file was automatically written by the pcre2_dftables auxiliary
program. It contains character tables that are used when no external
tables are passed to PCRE2 by the application that calls it. The tables
-are used only for characters whose code values are less than 256. */
+are used only for characters whose code values are less than 256, and
+only relevant if not in UCP mode. */
/* This set of tables was written in the C locale. */
@@ -18,13 +19,6 @@ PCRE2 is configured with --enable-rebuild-chartables. However, you can run
pcre2_dftables manually with the -L option to build tables using the LC_ALL
locale. */
-/* The following #include is present because without it gcc 4.x may remove
-the array definition from the final binary if PCRE2 is built into a static
-library and dead code stripping is activated. This leads to link errors.
-Pulling in the header ensures that the array gets flagged as "someone
-outside this compilation unit might reference this" and so it will always
-be supplied to the linker. */
-
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
@@ -163,7 +157,7 @@ graph, print, punct, and cntrl. Other classes are built from combinations. */
0x02 letter
0x04 lower case letter
0x08 decimal digit
- 0x10 alphanumeric or '_'
+ 0x10 word (alphanumeric or '_')
*/
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, /* 0- 7 */
diff --git a/src/3rdparty/pcre2/src/pcre2_chkdint.c b/src/3rdparty/pcre2/src/pcre2_chkdint.c
new file mode 100644
index 0000000000..d04f6f8cf1
--- /dev/null
+++ b/src/3rdparty/pcre2/src/pcre2_chkdint.c
@@ -0,0 +1,96 @@
+/*************************************************
+* Perl-Compatible Regular Expressions *
+*************************************************/
+
+/* PCRE is a library of functions to support regular expressions whose syntax
+and semantics are as close as possible to those of the Perl 5 language.
+
+ Written by Philip Hazel
+ Copyright (c) 2023 University of Cambridge
+
+-----------------------------------------------------------------------------
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ * Neither the name of the University of Cambridge nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+-----------------------------------------------------------------------------
+*/
+
+/* This file contains functions to implement checked integer operation */
+
+#ifndef PCRE2_PCRE2TEST
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "pcre2_internal.h"
+#endif
+
+/*************************************************
+* Checked Integer Multiplication *
+*************************************************/
+
+/*
+Arguments:
+ r A pointer to PCRE2_SIZE to store the answer
+ a, b Two integers
+
+Returns: Bool indicating if the operation overflows
+
+It is modeled after C23's <stdckdint.h> interface
+The INT64_OR_DOUBLE type is a 64-bit integer type when available,
+otherwise double. */
+
+BOOL
+PRIV(ckd_smul)(PCRE2_SIZE *r, int a, int b)
+{
+#ifdef HAVE_BUILTIN_MUL_OVERFLOW
+PCRE2_SIZE m;
+
+if (__builtin_mul_overflow(a, b, &m)) return TRUE;
+
+*r = m;
+#else
+INT64_OR_DOUBLE m;
+
+#ifdef PCRE2_DEBUG
+if (a < 0 || b < 0) abort();
+#endif
+
+m = (INT64_OR_DOUBLE)a * (INT64_OR_DOUBLE)b;
+
+#if defined INT64_MAX || defined int64_t
+if (sizeof(m) > sizeof(*r) && m > (INT64_OR_DOUBLE)PCRE2_SIZE_MAX) return TRUE;
+*r = (PCRE2_SIZE)m;
+#else
+if (m > PCRE2_SIZE_MAX) return TRUE;
+*r = m;
+#endif
+
+#endif
+
+return FALSE;
+}
+
+/* End of pcre_chkdint.c */
diff --git a/src/3rdparty/pcre2/src/pcre2_compile.c b/src/3rdparty/pcre2/src/pcre2_compile.c
index edf7e82e6e..8e6787aba3 100644
--- a/src/3rdparty/pcre2/src/pcre2_compile.c
+++ b/src/3rdparty/pcre2/src/pcre2_compile.c
@@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
- New API code Copyright (c) 2016-2022 University of Cambridge
+ New API code Copyright (c) 2016-2024 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -118,17 +118,17 @@ them will be able to (i.e. assume a 64-bit world). */
#ifdef SUPPORT_UNICODE
static unsigned int
- add_list_to_class_internal(uint8_t *, PCRE2_UCHAR **, uint32_t,
+ add_list_to_class_internal(uint8_t *, PCRE2_UCHAR **, uint32_t, uint32_t,
compile_block *, const uint32_t *, unsigned int);
#endif
static int
- compile_regex(uint32_t, PCRE2_UCHAR **, uint32_t **, int *, uint32_t,
- uint32_t *, uint32_t *, uint32_t *, uint32_t *, branch_chain *,
- compile_block *, PCRE2_SIZE *);
+ compile_regex(uint32_t, uint32_t, PCRE2_UCHAR **, uint32_t **, int *,
+ uint32_t, uint32_t *, uint32_t *, uint32_t *, uint32_t *, branch_chain *,
+ open_capitem *, compile_block *, PCRE2_SIZE *);
static int
- get_branchlength(uint32_t **, int *, int *, parsed_recurse_check *,
+ get_branchlength(uint32_t **, int *, int *, int *, parsed_recurse_check *,
compile_block *);
static BOOL
@@ -694,8 +694,8 @@ static uint32_t chartypeoffset[] = {
now all in a single string, to reduce the number of relocations when a shared
library is dynamically loaded. The list of lengths is terminated by a zero
length entry. The first three must be alpha, lower, upper, as this is assumed
-for handling case independence. The indices for graph, print, and punct are
-needed, so identify them. */
+for handling case independence. The indices for several classes are needed, so
+identify them. */
static const char posix_names[] =
STRING_alpha0 STRING_lower0 STRING_upper0 STRING_alnum0
@@ -706,9 +706,11 @@ static const char posix_names[] =
static const uint8_t posix_name_lengths[] = {
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 4, 6, 0 };
-#define PC_GRAPH 8
-#define PC_PRINT 9
-#define PC_PUNCT 10
+#define PC_DIGIT 7
+#define PC_GRAPH 8
+#define PC_PRINT 9
+#define PC_PUNCT 10
+#define PC_XDIGIT 13
/* Table of class bit maps for each POSIX class. Each class is formed from a
base map, with an optional addition or removal of another map. Then, for some
@@ -721,20 +723,20 @@ absolute value of the third field has these meanings: 0 => no tweaking, 1 =>
remove vertical space characters, 2 => remove underscore. */
static const int posix_class_maps[] = {
- cbit_word, cbit_digit, -2, /* alpha */
- cbit_lower, -1, 0, /* lower */
- cbit_upper, -1, 0, /* upper */
- cbit_word, -1, 2, /* alnum - word without underscore */
- cbit_print, cbit_cntrl, 0, /* ascii */
- cbit_space, -1, 1, /* blank - a GNU extension */
- cbit_cntrl, -1, 0, /* cntrl */
- cbit_digit, -1, 0, /* digit */
- cbit_graph, -1, 0, /* graph */
- cbit_print, -1, 0, /* print */
- cbit_punct, -1, 0, /* punct */
- cbit_space, -1, 0, /* space */
- cbit_word, -1, 0, /* word - a Perl extension */
- cbit_xdigit,-1, 0 /* xdigit */
+ cbit_word, cbit_digit, -2, /* alpha */
+ cbit_lower, -1, 0, /* lower */
+ cbit_upper, -1, 0, /* upper */
+ cbit_word, -1, 2, /* alnum - word without underscore */
+ cbit_print, cbit_cntrl, 0, /* ascii */
+ cbit_space, -1, 1, /* blank - a GNU extension */
+ cbit_cntrl, -1, 0, /* cntrl */
+ cbit_digit, -1, 0, /* digit */
+ cbit_graph, -1, 0, /* graph */
+ cbit_print, -1, 0, /* print */
+ cbit_punct, -1, 0, /* punct */
+ cbit_space, -1, 0, /* space */
+ cbit_word, -1, 0, /* word - a Perl extension */
+ cbit_xdigit, -1, 0 /* xdigit */
};
#ifdef SUPPORT_UNICODE
@@ -756,7 +758,7 @@ static int posix_substitutes[] = {
PT_PXPUNCT, 0, /* punct */
PT_PXSPACE, 0, /* space */ /* Xps is POSIX space, but from 8.34 */
PT_WORD, 0, /* word */ /* Perl and POSIX space are the same */
- -1, 0 /* xdigit, treat as non-UCP */
+ PT_PXXDIGIT, 0 /* xdigit */ /* Perl has additional hex digits */
};
#define POSIX_SUBSIZE (sizeof(posix_substitutes) / (2*sizeof(uint32_t)))
#endif /* SUPPORT_UNICODE */
@@ -779,13 +781,15 @@ are allowed. */
PCRE2_NO_DOTSTAR_ANCHOR|PCRE2_UCP|PCRE2_UNGREEDY)
#define PUBLIC_LITERAL_COMPILE_EXTRA_OPTIONS \
- (PCRE2_EXTRA_MATCH_LINE|PCRE2_EXTRA_MATCH_WORD)
+ (PCRE2_EXTRA_MATCH_LINE|PCRE2_EXTRA_MATCH_WORD|PCRE2_EXTRA_CASELESS_RESTRICT)
#define PUBLIC_COMPILE_EXTRA_OPTIONS \
(PUBLIC_LITERAL_COMPILE_EXTRA_OPTIONS| \
PCRE2_EXTRA_ALLOW_SURROGATE_ESCAPES|PCRE2_EXTRA_BAD_ESCAPE_IS_LITERAL| \
PCRE2_EXTRA_ESCAPED_CR_IS_LF|PCRE2_EXTRA_ALT_BSUX| \
- PCRE2_EXTRA_ALLOW_LOOKAROUND_BSK)
+ PCRE2_EXTRA_ALLOW_LOOKAROUND_BSK|PCRE2_EXTRA_ASCII_BSD| \
+ PCRE2_EXTRA_ASCII_BSS|PCRE2_EXTRA_ASCII_BSW|PCRE2_EXTRA_ASCII_POSIX| \
+ PCRE2_EXTRA_ASCII_DIGIT)
/* Compile time error code numbers. They are given names so that they can more
easily be tracked. When a new number is added, the tables called eint1 and
@@ -804,7 +808,8 @@ enum { ERR0 = COMPILE_ERROR_BASE,
ERR61, ERR62, ERR63, ERR64, ERR65, ERR66, ERR67, ERR68, ERR69, ERR70,
ERR71, ERR72, ERR73, ERR74, ERR75, ERR76, ERR77, ERR78, ERR79, ERR80,
ERR81, ERR82, ERR83, ERR84, ERR85, ERR86, ERR87, ERR88, ERR89, ERR90,
- ERR91, ERR92, ERR93, ERR94, ERR95, ERR96, ERR97, ERR98, ERR99 };
+ ERR91, ERR92, ERR93, ERR94, ERR95, ERR96, ERR97, ERR98, ERR99, ERR100,
+ ERR101 };
/* This is a table of start-of-pattern options such as (*UTF) and settings such
as (*LIMIT_MATCH=nnnn) and (*CRLF). For completeness and backward
@@ -817,7 +822,8 @@ enum { PSO_OPT, /* Value is an option bit */
PSO_BSR, /* Value is a \R type */
PSO_LIMH, /* Read integer value for heap limit */
PSO_LIMM, /* Read integer value for match limit */
- PSO_LIMD }; /* Read integer value for depth limit */
+ PSO_LIMD /* Read integer value for depth limit */
+ };
typedef struct pso {
const uint8_t *name;
@@ -828,7 +834,7 @@ typedef struct pso {
/* NB: STRING_UTFn_RIGHTPAR contains the length as well */
-static pso pso_list[] = {
+static const pso pso_list[] = {
{ (uint8_t *)STRING_UTFn_RIGHTPAR, PSO_OPT, PCRE2_UTF },
{ (uint8_t *)STRING_UTF_RIGHTPAR, 4, PSO_OPT, PCRE2_UTF },
{ (uint8_t *)STRING_UCP_RIGHTPAR, 4, PSO_OPT, PCRE2_UCP },
@@ -1059,24 +1065,24 @@ for (;;)
case META_SKIP: fprintf(stderr, "META (*SKIP)"); break;
case META_THEN: fprintf(stderr, "META (*THEN)"); break;
- case META_OPTIONS: fprintf(stderr, "META_OPTIONS 0x%02x", *pptr++); break;
+ case META_OPTIONS:
+ fprintf(stderr, "META_OPTIONS 0x%08x 0x%08x", pptr[0], pptr[1]);
+ pptr += 2;
+ break;
case META_LOOKBEHIND:
- fprintf(stderr, "META (?<= %d offset=", meta_arg);
- GETOFFSET(offset, pptr);
- fprintf(stderr, "%zd", offset);
+ fprintf(stderr, "META (?<= %d %d", meta_arg, *pptr);
+ pptr += 2;
break;
case META_LOOKBEHIND_NA:
- fprintf(stderr, "META (*naplb: %d offset=", meta_arg);
- GETOFFSET(offset, pptr);
- fprintf(stderr, "%zd", offset);
+ fprintf(stderr, "META (*naplb: %d %d", meta_arg, *pptr);
+ pptr += 2;
break;
case META_LOOKBEHINDNOT:
- fprintf(stderr, "META (?<! %d offset=", meta_arg);
- GETOFFSET(offset, pptr);
- fprintf(stderr, "%zd", offset);
+ fprintf(stderr, "META (?<! %d %d", meta_arg, *pptr);
+ pptr += 2;
break;
case META_CALLOUT_NUMBER:
@@ -1297,9 +1303,9 @@ if (code != NULL)
*************************************************/
/* This function is used to read numbers in the pattern. The initial pointer
-must be the sign or first digit of the number. When relative values (introduced
-by + or -) are allowed, they are relative group numbers, and the result must be
-greater than zero.
+must be at the sign or first digit of the number. When relative values
+(introduced by + or -) are allowed, they are relative group numbers, and the
+result must be greater than zero.
Arguments:
ptrptr points to the character pointer variable
@@ -1383,17 +1389,18 @@ return yield;
* Read repeat counts *
*************************************************/
-/* Read an item of the form {n,m} and return the values if non-NULL pointers
+/* Read an item of the form {n,m} and return the values when non-NULL pointers
are supplied. Repeat counts must be less than 65536 (MAX_REPEAT_COUNT); a
larger value is used for "unlimited". We have to use signed arguments for
-read_number() because it is capable of returning a signed value.
+read_number() because it is capable of returning a signed value. As of Perl
+5.34.0 either n or m may be absent, but not both. Perl also allows spaces and
+tabs after { and before } and between the numbers and the comma, so we do too.
Arguments:
- ptrptr points to pointer to character after'{'
+ ptrptr points to pointer to character after '{'
ptrend pointer to end of input
minp if not NULL, pointer to int for min
- maxp if not NULL, pointer to int for max (-1 if no max)
- returned as -1 if no max
+ maxp if not NULL, pointer to int for max
errorcodeptr points to error code variable
Returns: FALSE if not a repeat quantifier, errorcode set zero
@@ -1405,57 +1412,96 @@ static BOOL
read_repeat_counts(PCRE2_SPTR *ptrptr, PCRE2_SPTR ptrend, uint32_t *minp,
uint32_t *maxp, int *errorcodeptr)
{
-PCRE2_SPTR p;
+PCRE2_SPTR p = *ptrptr;
+PCRE2_SPTR pp;
BOOL yield = FALSE;
-BOOL had_comma = FALSE;
+BOOL had_minimum = FALSE;
int32_t min = 0;
int32_t max = REPEAT_UNLIMITED; /* This value is larger than MAX_REPEAT_COUNT */
-/* Check the syntax */
-
*errorcodeptr = 0;
-for (p = *ptrptr;; p++)
+while (p < ptrend && (*p == CHAR_SPACE || *p == CHAR_HT)) p++;
+
+/* Check the syntax before interpreting. Otherwise, a non-quantifier sequence
+such as "X{123456ABC" would incorrectly give a "number too big in quantifier"
+error. */
+
+pp = p;
+if (pp < ptrend && IS_DIGIT(*pp))
+ {
+ had_minimum = TRUE;
+ while (++pp < ptrend && IS_DIGIT(*pp)) {}
+ }
+
+while (pp < ptrend && (*pp == CHAR_SPACE || *pp == CHAR_HT)) pp++;
+if (pp >= ptrend) return FALSE;
+
+if (*pp == CHAR_RIGHT_CURLY_BRACKET)
+ {
+ if (!had_minimum) return FALSE;
+ }
+else
{
- uint32_t c;
- if (p >= ptrend) return FALSE;
- c = *p;
- if (IS_DIGIT(c)) continue;
- if (c == CHAR_RIGHT_CURLY_BRACKET) break;
- if (c == CHAR_COMMA)
+ if (*pp++ != CHAR_COMMA) return FALSE;
+ while (pp < ptrend && (*pp == CHAR_SPACE || *pp == CHAR_HT)) pp++;
+ if (pp >= ptrend) return FALSE;
+ if (IS_DIGIT(*pp))
{
- if (had_comma) return FALSE;
- had_comma = TRUE;
+ while (++pp < ptrend && IS_DIGIT(*pp)) {}
}
- else return FALSE;
+ else if (!had_minimum) return FALSE;
+ while (pp < ptrend && (*pp == CHAR_SPACE || *pp == CHAR_HT)) pp++;
+ if (pp >= ptrend || *pp != CHAR_RIGHT_CURLY_BRACKET) return FALSE;
}
-/* The only error from read_number() is for a number that is too big. */
+/* Now process the quantifier for real. We know it must be {n} or (n,} or {,m}
+or {n,m}. The only error that read_number() can return is for a number that is
+too big. If *errorcodeptr is returned as zero it means no number was found. */
-p = *ptrptr;
-if (!read_number(&p, ptrend, -1, MAX_REPEAT_COUNT, ERR5, &min, errorcodeptr))
- goto EXIT;
+/* Deal with {,m} or n too big. If we successfully read m there is no need to
+check m >= n because n defaults to zero. */
-if (*p == CHAR_RIGHT_CURLY_BRACKET)
+if (!read_number(&p, ptrend, -1, MAX_REPEAT_COUNT, ERR5, &min, errorcodeptr))
{
- p++;
- max = min;
+ if (*errorcodeptr != 0) goto EXIT; /* n too big */
+ p++; /* Skip comma and subsequent spaces */
+ while (p < ptrend && (*p == CHAR_SPACE || *p == CHAR_HT)) p++;
+ if (!read_number(&p, ptrend, -1, MAX_REPEAT_COUNT, ERR5, &max, errorcodeptr))
+ {
+ if (*errorcodeptr != 0) goto EXIT; /* m too big */
+ }
}
+
+/* Have read one number. Deal with {n} or {n,} or {n,m} */
+
else
{
- if (*(++p) != CHAR_RIGHT_CURLY_BRACKET)
+ while (p < ptrend && (*p == CHAR_SPACE || *p == CHAR_HT)) p++;
+ if (*p == CHAR_RIGHT_CURLY_BRACKET)
{
- if (!read_number(&p, ptrend, -1, MAX_REPEAT_COUNT, ERR5, &max,
- errorcodeptr))
- goto EXIT;
+ max = min;
+ }
+ else /* Handle {n,} or {n,m} */
+ {
+ p++; /* Skip comma and subsequent spaces */
+ while (p < ptrend && (*p == CHAR_SPACE || *p == CHAR_HT)) p++;
+ if (!read_number(&p, ptrend, -1, MAX_REPEAT_COUNT, ERR5, &max, errorcodeptr))
+ {
+ if (*errorcodeptr != 0) goto EXIT; /* m too big */
+ }
+
if (max < min)
{
*errorcodeptr = ERR4;
goto EXIT;
}
}
- p++;
}
+/* Valid quantifier exists */
+
+while (p < ptrend && (*p == CHAR_SPACE || *p == CHAR_HT)) p++;
+p++;
yield = TRUE;
if (minp != NULL) *minp = (uint32_t)min;
if (maxp != NULL) *maxp = (uint32_t)max;
@@ -1491,6 +1537,7 @@ Arguments:
chptr points to a returned data character
errorcodeptr points to the errorcode variable (containing zero)
options the current options bits
+ xoptions the current extra options bits
isclass TRUE if inside a character class
cb compile data block or NULL when called from pcre2_substitute()
@@ -1502,10 +1549,12 @@ Returns: zero => a data character
int
PRIV(check_escape)(PCRE2_SPTR *ptrptr, PCRE2_SPTR ptrend, uint32_t *chptr,
- int *errorcodeptr, uint32_t options, uint32_t extra_options, BOOL isclass,
+ int *errorcodeptr, uint32_t options, uint32_t xoptions, BOOL isclass,
compile_block *cb)
{
BOOL utf = (options & PCRE2_UTF) != 0;
+BOOL alt_bsux =
+ ((options & PCRE2_ALT_BSUX) | (xoptions & PCRE2_EXTRA_ALT_BSUX)) != 0;
PCRE2_SPTR ptr = *ptrptr;
uint32_t c, cc;
int escape = 0;
@@ -1539,7 +1588,7 @@ else if ((i = escapes[c - ESCAPES_FIRST]) != 0)
if (i > 0)
{
c = (uint32_t)i;
- if (c == CHAR_CR && (extra_options & PCRE2_EXTRA_ESCAPED_CR_IS_LF) != 0)
+ if (c == CHAR_CR && (xoptions & PCRE2_EXTRA_ESCAPED_CR_IS_LF) != 0)
c = CHAR_LF;
}
else /* Negative table entry */
@@ -1557,6 +1606,10 @@ else if ((i = escapes[c - ESCAPES_FIRST]) != 0)
{
PCRE2_SPTR p = ptr + 1;
+ /* Perl ignores spaces and tabs after { */
+
+ while (p < ptrend && (*p == CHAR_SPACE || *p == CHAR_HT)) p++;
+
/* \N{U+ can be handled by the \x{ code. However, this construction is
not valid in EBCDIC environments because it specifies a Unicode
character, not a codepoint in the local code. For example \N{U+0041}
@@ -1571,7 +1624,7 @@ else if ((i = escapes[c - ESCAPES_FIRST]) != 0)
#else
if (utf)
{
- ptr = p + 1;
+ ptr = p + 2;
escape = 0; /* Not a fancy escape after all */
goto COME_FROM_NU;
}
@@ -1602,8 +1655,6 @@ else
int s;
PCRE2_SPTR oldptr;
BOOL overflow;
- BOOL alt_bsux =
- ((options & PCRE2_ALT_BSUX) | (extra_options & PCRE2_EXTRA_ALT_BSUX)) != 0;
/* Filter calls from pcre2_substitute(). */
@@ -1632,7 +1683,9 @@ else
is set. Otherwise, \u must be followed by exactly four hex digits or, if
PCRE2_EXTRA_ALT_BSUX is set, by any number of hex digits in braces.
Otherwise it is a lowercase u letter. This gives some compatibility with
- ECMAScript (aka JavaScript). */
+ ECMAScript (aka JavaScript). Unlike other braced items, white space is NOT
+ allowed. When \u{ is not followed by hex digits, a special return is given
+ because otherwise \u{ 12} (for example) would be treated as u{12}. */
case CHAR_u:
if (!alt_bsux) *errorcodeptr = ERR37; else
@@ -1641,11 +1694,11 @@ else
if (ptr >= ptrend) break;
if (*ptr == CHAR_LEFT_CURLY_BRACKET &&
- (extra_options & PCRE2_EXTRA_ALT_BSUX) != 0)
+ (xoptions & PCRE2_EXTRA_ALT_BSUX) != 0)
{
PCRE2_SPTR hptr = ptr + 1;
- cc = 0;
+ cc = 0;
while (hptr < ptrend && (xc = XDIGIT(*hptr)) != 0xff)
{
if ((cc & 0xf0000000) != 0) /* Test for 32-bit overflow */
@@ -1661,7 +1714,11 @@ else
if (hptr == ptr + 1 || /* No hex digits */
hptr >= ptrend || /* Hit end of input */
*hptr != CHAR_RIGHT_CURLY_BRACKET) /* No } terminator */
- break; /* Hex escape not recognized */
+ {
+ escape = ESC_ub; /* Special return */
+ ptr++; /* Skip { */
+ break; /* Hex escape not recognized */
+ }
c = cc; /* Accept the code point */
ptr = hptr + 1;
@@ -1685,7 +1742,7 @@ else
if (c > 0x10ffffU) *errorcodeptr = ERR77;
else
if (c >= 0xd800 && c <= 0xdfff &&
- (extra_options & PCRE2_EXTRA_ALLOW_SURROGATE_ESCAPES) == 0)
+ (xoptions & PCRE2_EXTRA_ALLOW_SURROGATE_ESCAPES) == 0)
*errorcodeptr = ERR73;
}
else if (c > MAX_NON_UTF_CHAR) *errorcodeptr = ERR77;
@@ -1741,12 +1798,16 @@ else
if (*ptr == CHAR_LEFT_CURLY_BRACKET)
{
PCRE2_SPTR p = ptr + 1;
+
+ while (p < ptrend && (*p == CHAR_SPACE || *p == CHAR_HT)) p++;
if (!read_number(&p, ptrend, cb->bracount, MAX_GROUP_NUMBER, ERR61, &s,
errorcodeptr))
{
if (*errorcodeptr == 0) escape = ESC_k; /* No number found */
break;
}
+ while (p < ptrend && (*p == CHAR_SPACE || *p == CHAR_HT)) p++;
+
if (p >= ptrend || *p != CHAR_RIGHT_CURLY_BRACKET)
{
*errorcodeptr = ERR57;
@@ -1842,56 +1903,64 @@ else
break;
/* \o is a relatively new Perl feature, supporting a more general way of
- specifying character codes in octal. The only supported form is \o{ddd}. */
+ specifying character codes in octal. The only supported form is \o{ddd},
+ with optional spaces or tabs after { and before }. */
case CHAR_o:
if (ptr >= ptrend || *ptr++ != CHAR_LEFT_CURLY_BRACKET)
{
ptr--;
*errorcodeptr = ERR55;
+ break;
}
- else if (ptr >= ptrend || *ptr == CHAR_RIGHT_CURLY_BRACKET)
+
+ while (ptr < ptrend && (*ptr == CHAR_SPACE || *ptr == CHAR_HT)) ptr++;
+ if (ptr >= ptrend || *ptr == CHAR_RIGHT_CURLY_BRACKET)
+ {
*errorcodeptr = ERR78;
- else
+ break;
+ }
+
+ c = 0;
+ overflow = FALSE;
+ while (ptr < ptrend && *ptr >= CHAR_0 && *ptr <= CHAR_7)
{
- c = 0;
- overflow = FALSE;
- while (ptr < ptrend && *ptr >= CHAR_0 && *ptr <= CHAR_7)
- {
- cc = *ptr++;
- if (c == 0 && cc == CHAR_0) continue; /* Leading zeroes */
+ cc = *ptr++;
+ if (c == 0 && cc == CHAR_0) continue; /* Leading zeroes */
#if PCRE2_CODE_UNIT_WIDTH == 32
- if (c >= 0x20000000l) { overflow = TRUE; break; }
+ if (c >= 0x20000000l) { overflow = TRUE; break; }
#endif
- c = (c << 3) + (cc - CHAR_0);
+ c = (c << 3) + (cc - CHAR_0);
#if PCRE2_CODE_UNIT_WIDTH == 8
- if (c > (utf ? 0x10ffffU : 0xffU)) { overflow = TRUE; break; }
+ if (c > (utf ? 0x10ffffU : 0xffU)) { overflow = TRUE; break; }
#elif PCRE2_CODE_UNIT_WIDTH == 16
- if (c > (utf ? 0x10ffffU : 0xffffU)) { overflow = TRUE; break; }
+ if (c > (utf ? 0x10ffffU : 0xffffU)) { overflow = TRUE; break; }
#elif PCRE2_CODE_UNIT_WIDTH == 32
- if (utf && c > 0x10ffffU) { overflow = TRUE; break; }
+ if (utf && c > 0x10ffffU) { overflow = TRUE; break; }
#endif
- }
- if (overflow)
- {
- while (ptr < ptrend && *ptr >= CHAR_0 && *ptr <= CHAR_7) ptr++;
- *errorcodeptr = ERR34;
- }
- else if (ptr < ptrend && *ptr++ == CHAR_RIGHT_CURLY_BRACKET)
- {
- if (utf && c >= 0xd800 && c <= 0xdfff &&
- (extra_options & PCRE2_EXTRA_ALLOW_SURROGATE_ESCAPES) == 0)
- {
- ptr--;
- *errorcodeptr = ERR73;
- }
- }
- else
+ }
+
+ while (ptr < ptrend && (*ptr == CHAR_SPACE || *ptr == CHAR_HT)) ptr++;
+
+ if (overflow)
+ {
+ while (ptr < ptrend && *ptr >= CHAR_0 && *ptr <= CHAR_7) ptr++;
+ *errorcodeptr = ERR34;
+ }
+ else if (ptr < ptrend && *ptr++ == CHAR_RIGHT_CURLY_BRACKET)
+ {
+ if (utf && c >= 0xd800 && c <= 0xdfff &&
+ (xoptions & PCRE2_EXTRA_ALLOW_SURROGATE_ESCAPES) == 0)
{
ptr--;
- *errorcodeptr = ERR64;
+ *errorcodeptr = ERR73;
}
}
+ else
+ {
+ ptr--;
+ *errorcodeptr = ERR64;
+ }
break;
/* When PCRE2_ALT_BSUX or PCRE2_EXTRA_ALT_BSUX is set, \x must be followed
@@ -1919,10 +1988,13 @@ else
{
if (ptr < ptrend && *ptr == CHAR_LEFT_CURLY_BRACKET)
{
+ ptr++;
+ while (ptr < ptrend && (*ptr == CHAR_SPACE || *ptr == CHAR_HT)) ptr++;
+
#ifndef EBCDIC
COME_FROM_NU:
#endif
- if (++ptr >= ptrend || *ptr == CHAR_RIGHT_CURLY_BRACKET)
+ if (ptr >= ptrend || *ptr == CHAR_RIGHT_CURLY_BRACKET)
{
*errorcodeptr = ERR78;
break;
@@ -1945,6 +2017,12 @@ else
}
}
+ /* Perl ignores spaces and tabs before } */
+
+ while (ptr < ptrend && (*ptr == CHAR_SPACE || *ptr == CHAR_HT)) ptr++;
+
+ /* On overflow, skip remaining hex digits */
+
if (overflow)
{
while (ptr < ptrend && XDIGIT(*ptr) != 0xff) ptr++;
@@ -1953,17 +2031,17 @@ else
else if (ptr < ptrend && *ptr++ == CHAR_RIGHT_CURLY_BRACKET)
{
if (utf && c >= 0xd800 && c <= 0xdfff &&
- (extra_options & PCRE2_EXTRA_ALLOW_SURROGATE_ESCAPES) == 0)
+ (xoptions & PCRE2_EXTRA_ALLOW_SURROGATE_ESCAPES) == 0)
{
ptr--;
*errorcodeptr = ERR73;
}
}
- /* If the sequence of hex digits does not end with '}', give an error.
- We used just to recognize this construct and fall through to the normal
- \x handling, but nowadays Perl gives an error, which seems much more
- sensible, so we do too. */
+ /* If the sequence of hex digits (followed by optional space) does not
+ end with '}', give an error. We used just to recognize this construct
+ and fall through to the normal \x handling, but nowadays Perl gives an
+ error, which seems much more sensible, so we do too. */
else
{
@@ -2117,7 +2195,11 @@ if (c == CHAR_LEFT_CURLY_BRACKET)
{
if (ptr >= cb->end_pattern) goto ERROR_RETURN;
c = *ptr++;
+#if PCRE2_CODE_UNIT_WIDTH != 8
+ while (c == '_' || c == '-' || (c <= 0xff && isspace(c)))
+#else
while (c == '_' || c == '-' || isspace(c))
+#endif
{
if (ptr >= cb->end_pattern) goto ERROR_RETURN;
c = *ptr++;
@@ -2355,12 +2437,13 @@ return -1;
/* This function is called from parse_regex() below whenever it needs to read
the name of a subpattern or a (*VERB) or an (*alpha_assertion). The initial
-pointer must be to the character before the name. If that character is '*' we
-are reading a verb or alpha assertion name. The pointer is updated to point
-after the name, for a VERB or alpha assertion name, or after tha name's
-terminator for a subpattern name. Returning both the offset and the name
-pointer is redundant information, but some callers use one and some the other,
-so it is simplest just to return both.
+pointer must be to the preceding character. If that character is '*' we are
+reading a verb or alpha assertion name. The pointer is updated to point after
+the name, for a VERB or alpha assertion name, or after tha name's terminator
+for a subpattern name. Returning both the offset and the name pointer is
+redundant information, but some callers use one and some the other, so it is
+simplest just to return both. When the name is in braces, spaces and tabs are
+allowed (and ignored) at either end.
Arguments:
ptrptr points to the character pointer variable
@@ -2383,9 +2466,13 @@ read_name(PCRE2_SPTR *ptrptr, PCRE2_SPTR ptrend, BOOL utf, uint32_t terminator,
int *errorcodeptr, compile_block *cb)
{
PCRE2_SPTR ptr = *ptrptr;
-BOOL is_group = (*ptr != CHAR_ASTERISK);
+BOOL is_group = (*ptr++ != CHAR_ASTERISK);
+BOOL is_braced = terminator == CHAR_RIGHT_CURLY_BRACKET;
-if (++ptr >= ptrend) /* No characters in name */
+if (is_braced)
+ while (ptr < ptrend && (*ptr == CHAR_SPACE || *ptr == CHAR_HT)) ptr++;
+
+if (ptr >= ptrend) /* No characters in name */
{
*errorcodeptr = is_group? ERR62: /* Subpattern name expected */
ERR60; /* Verb not recognized or malformed */
@@ -2464,6 +2551,8 @@ if (is_group)
*errorcodeptr = ERR62; /* Subpattern name expected */
goto FAILED;
}
+ if (is_braced)
+ while (ptr < ptrend && (*ptr == CHAR_SPACE || *ptr == CHAR_HT)) ptr++;
if (ptr >= ptrend || *ptr != (PCRE2_UCHAR)terminator)
{
*errorcodeptr = ERR42;
@@ -2533,6 +2622,85 @@ return parsed_pattern;
/*************************************************
+* Handle \d, \D, \s, \S, \w, \W *
+*************************************************/
+
+/* This function is called from parse_regex() below, both for freestanding
+escapes, and those within classes, to handle those escapes that may change when
+Unicode property support is requested. Note that PCRE2_UCP will never be set
+without Unicode support because that is checked when pcre2_compile() is called.
+
+Arguments:
+ escape the ESC_... value
+ parsed_pattern where to add the code
+ options options bits
+ xoptions extra options bits
+
+Returns: updated value of parsed_pattern
+*/
+static uint32_t *
+handle_escdsw(int escape, uint32_t *parsed_pattern, uint32_t options,
+ uint32_t xoptions)
+{
+uint32_t ascii_option = 0;
+uint32_t prop = ESC_p;
+
+switch(escape)
+ {
+ case ESC_D:
+ prop = ESC_P;
+ /* Fall through */
+ case ESC_d:
+ ascii_option = PCRE2_EXTRA_ASCII_BSD;
+ break;
+
+ case ESC_S:
+ prop = ESC_P;
+ /* Fall through */
+ case ESC_s:
+ ascii_option = PCRE2_EXTRA_ASCII_BSS;
+ break;
+
+ case ESC_W:
+ prop = ESC_P;
+ /* Fall through */
+ case ESC_w:
+ ascii_option = PCRE2_EXTRA_ASCII_BSW;
+ break;
+ }
+
+if ((options & PCRE2_UCP) == 0 || (xoptions & ascii_option) != 0)
+ {
+ *parsed_pattern++ = META_ESCAPE + escape;
+ }
+else
+ {
+ *parsed_pattern++ = META_ESCAPE + prop;
+ switch(escape)
+ {
+ case ESC_d:
+ case ESC_D:
+ *parsed_pattern++ = (PT_PC << 16) | ucp_Nd;
+ break;
+
+ case ESC_s:
+ case ESC_S:
+ *parsed_pattern++ = PT_SPACE << 16;
+ break;
+
+ case ESC_w:
+ case ESC_W:
+ *parsed_pattern++ = PT_WORD << 16;
+ break;
+ }
+ }
+
+return parsed_pattern;
+}
+
+
+
+/*************************************************
* Parse regex and identify named groups *
*************************************************/
@@ -2560,6 +2728,7 @@ typedef struct nest_save {
uint16_t max_group;
uint16_t flags;
uint32_t options;
+ uint32_t xoptions;
} nest_save;
#define NSF_RESET 0x0001u
@@ -2575,6 +2744,10 @@ the main compiling phase. */
PCRE2_EXTENDED|PCRE2_EXTENDED_MORE|PCRE2_MULTILINE|PCRE2_NO_AUTO_CAPTURE| \
PCRE2_UNGREEDY)
+#define PARSE_TRACKED_EXTRA_OPTIONS (PCRE2_EXTRA_CASELESS_RESTRICT| \
+ PCRE2_EXTRA_ASCII_BSD|PCRE2_EXTRA_ASCII_BSS|PCRE2_EXTRA_ASCII_BSW| \
+ PCRE2_EXTRA_ASCII_DIGIT|PCRE2_EXTRA_ASCII_POSIX)
+
/* States used for analyzing ranges in character classes. The two OK values
must be last. */
@@ -2609,9 +2782,11 @@ uint32_t *verbstartptr = NULL;
uint32_t *previous_callout = NULL;
uint32_t *parsed_pattern = cb->parsed_pattern;
uint32_t *parsed_pattern_end = cb->parsed_pattern_end;
+uint32_t *this_parsed_item = NULL;
+uint32_t *prev_parsed_item = NULL;
uint32_t meta_quantifier = 0;
uint32_t add_after_mark = 0;
-uint32_t extra_options = cb->cx->extra_options;
+uint32_t xoptions = cb->cx->extra_options;
uint16_t nest_depth = 0;
int after_manual_callout = 0;
int expect_cond_assert = 0;
@@ -2635,12 +2810,12 @@ nest_save *top_nest, *end_nests;
/* Insert leading items for word and line matching (features provided for the
benefit of pcre2grep). */
-if ((extra_options & PCRE2_EXTRA_MATCH_LINE) != 0)
+if ((xoptions & PCRE2_EXTRA_MATCH_LINE) != 0)
{
*parsed_pattern++ = META_CIRCUMFLEX;
*parsed_pattern++ = META_NOCAPTURE;
}
-else if ((extra_options & PCRE2_EXTRA_MATCH_WORD) != 0)
+else if ((xoptions & PCRE2_EXTRA_MATCH_WORD) != 0)
{
*parsed_pattern++ = META_ESCAPE + ESC_b;
*parsed_pattern++ = META_NOCAPTURE;
@@ -2691,6 +2866,7 @@ while (ptr < ptrend)
int prev_expect_cond_assert;
uint32_t min_repeat = 0, max_repeat = 0;
uint32_t set, unset, *optset;
+ uint32_t xset, xunset, *xoptset;
uint32_t terminator;
uint32_t prev_meta_quantifier;
BOOL prev_okquantifier;
@@ -2709,6 +2885,17 @@ while (ptr < ptrend)
goto FAILED; /* Parentheses too deeply nested */
}
+ /* If the last time round this loop something was added, parsed_pattern will
+ no longer be equal to this_parsed_item. Remember where the previous item
+ started and reset for the next item. Note that sometimes round the loop,
+ nothing gets added (e.g. for ignored white space). */
+
+ if (this_parsed_item != parsed_pattern)
+ {
+ prev_parsed_item = this_parsed_item;
+ this_parsed_item = parsed_pattern;
+ }
+
/* Get next input character, save its position for callout handling. */
thisptr = ptr;
@@ -2817,7 +3004,7 @@ while (ptr < ptrend)
if ((options & PCRE2_ALT_VERBNAMES) != 0)
{
escape = PRIV(check_escape)(&ptr, ptrend, &c, &errorcode, options,
- cb->cx->extra_options, FALSE, cb);
+ xoptions, FALSE, cb);
if (errorcode != 0) goto FAILED;
}
else escape = 0; /* Treat all as literal */
@@ -2831,6 +3018,11 @@ while (ptr < ptrend)
*parsed_pattern++ = c;
break;
+ case ESC_ub:
+ *parsed_pattern++ = CHAR_u;
+ PARSED_LITERAL(CHAR_LEFT_CURLY_BRACKET, parsed_pattern);
+ break;
+
case ESC_Q:
inescq = TRUE;
break;
@@ -2917,8 +3109,11 @@ while (ptr < ptrend)
!read_repeat_counts(&tempptr, ptrend, NULL, NULL, &errorcode))))
{
if (after_manual_callout-- <= 0)
+ {
parsed_pattern = manage_callouts(thisptr, &previous_callout, auto_callout,
parsed_pattern, cb);
+ this_parsed_item = parsed_pattern; /* New start for current item */
+ }
}
/* If expect_cond_assert is 2, we have just passed (?( and are expecting an
@@ -2995,7 +3190,6 @@ while (ptr < ptrend)
continue; /* Next character in pattern */
}
-
/* Process the next item in the main part of a pattern. */
switch(c)
@@ -3010,11 +3204,11 @@ while (ptr < ptrend)
case CHAR_BACKSLASH:
tempptr = ptr;
escape = PRIV(check_escape)(&ptr, ptrend, &c, &errorcode, options,
- cb->cx->extra_options, FALSE, cb);
+ xoptions, FALSE, cb);
if (errorcode != 0)
{
ESCAPE_FAILED:
- if ((extra_options & PCRE2_EXTRA_BAD_ESCAPE_IS_LITERAL) == 0)
+ if ((xoptions & PCRE2_EXTRA_BAD_ESCAPE_IS_LITERAL) == 0)
goto FAILED;
ptr = tempptr;
if (ptr >= ptrend) c = CHAR_BACKSLASH; else
@@ -3088,6 +3282,16 @@ while (ptr < ptrend)
*parsed_pattern++ = META_ESCAPE + escape;
break;
+ /* This is a special return that happens only in EXTRA_ALT_BSUX mode,
+ when \u{ is not followed by hex digits and }. It requests two literal
+ characters, u and { and we need this, as otherwise \u{ 12} (for example)
+ would be treated as u{12} now that spaces are allowed in quantifiers. */
+
+ case ESC_ub:
+ *parsed_pattern++ = CHAR_u;
+ PARSED_LITERAL(CHAR_LEFT_CURLY_BRACKET, parsed_pattern);
+ break;
+
case ESC_X:
#ifndef SUPPORT_UNICODE
errorcode = ERR45; /* Supported only with Unicode support */
@@ -3107,9 +3311,7 @@ while (ptr < ptrend)
*parsed_pattern++ = META_ESCAPE + escape;
break;
- /* Escapes that change in UCP mode. Note that PCRE2_UCP will never be set
- without Unicode support because it is checked when pcre2_compile() is
- called. */
+ /* Escapes that may change in UCP mode. */
case ESC_d:
case ESC_D:
@@ -3118,33 +3320,8 @@ while (ptr < ptrend)
case ESC_w:
case ESC_W:
okquantifier = TRUE;
- if ((options & PCRE2_UCP) == 0)
- {
- *parsed_pattern++ = META_ESCAPE + escape;
- }
- else
- {
- *parsed_pattern++ = META_ESCAPE +
- ((escape == ESC_d || escape == ESC_s || escape == ESC_w)?
- ESC_p : ESC_P);
- switch(escape)
- {
- case ESC_d:
- case ESC_D:
- *parsed_pattern++ = (PT_PC << 16) | ucp_Nd;
- break;
-
- case ESC_s:
- case ESC_S:
- *parsed_pattern++ = PT_SPACE << 16;
- break;
-
- case ESC_w:
- case ESC_W:
- *parsed_pattern++ = PT_WORD << 16;
- break;
- }
- }
+ parsed_pattern = handle_escdsw(escape, parsed_pattern, options,
+ xoptions);
break;
/* Unicode property matching */
@@ -3206,7 +3383,8 @@ while (ptr < ptrend)
if (errorcode != 0) goto ESCAPE_FAILED;
}
- /* Not a numerical recursion */
+ /* Not a numerical recursion. Perl allows spaces and tabs after { and
+ before } but not for other delimiters. */
if (!read_name(&ptr, ptrend, utf, terminator, &offset, &name, &namelen,
&errorcode, cb)) goto ESCAPE_FAILED;
@@ -3273,7 +3451,8 @@ while (ptr < ptrend)
/* ---- Quantifier post-processing ---- */
- /* Check that a quantifier is allowed after the previous item. */
+ /* Check that a quantifier is allowed after the previous item. This
+ guarantees that there is a previous item. */
CHECK_QUANTIFIER:
if (!prev_okquantifier)
@@ -3288,7 +3467,7 @@ while (ptr < ptrend)
wrapping it in non-capturing brackets, but we have to allow for a preceding
(*MARK) for when (*ACCEPT) has an argument. */
- if (parsed_pattern[-1] == META_ACCEPT)
+ if (*prev_parsed_item == META_ACCEPT)
{
uint32_t *p;
for (p = parsed_pattern - 1; p >= verbstartptr; p--) p[1] = p[0];
@@ -3507,18 +3686,24 @@ while (ptr < ptrend)
class_range_state = RANGE_NO;
- /* When PCRE2_UCP is set, some of the POSIX classes are converted to
- use Unicode properties \p or \P or, in one case, \h or \H. The
- substitutes table has two values per class, containing the type and
- value of a \p or \P item. The special cases are specified with a
- negative type: a non-zero value causes \h or \H to be used, and a zero
- value falls through to behave like a non-UCP POSIX class. */
+ /* When PCRE2_UCP is set, unless PCRE2_EXTRA_ASCII_POSIX is set, some
+ of the POSIX classes are converted to use Unicode properties \p or \P
+ or, in one case, \h or \H. The substitutes table has two values per
+ class, containing the type and value of a \p or \P item. The special
+ cases are specified with a negative type: a non-zero value causes \h or
+ \H to be used, and a zero value falls through to behave like a non-UCP
+ POSIX class. There are now also some extra options that force ASCII for
+ some classes. */
#ifdef SUPPORT_UNICODE
- if ((options & PCRE2_UCP) != 0)
+ if ((options & PCRE2_UCP) != 0 &&
+ (xoptions & PCRE2_EXTRA_ASCII_POSIX) == 0 &&
+ !((xoptions & PCRE2_EXTRA_ASCII_DIGIT) != 0 &&
+ (posix_class == PC_DIGIT || posix_class == PC_XDIGIT)))
{
int ptype = posix_substitutes[2*posix_class];
int pvalue = posix_substitutes[2*posix_class + 1];
+
if (ptype >= 0)
{
*parsed_pattern++ = META_ESCAPE + (posix_negate? ESC_P : ESC_p);
@@ -3587,11 +3772,11 @@ while (ptr < ptrend)
{
tempptr = ptr;
escape = PRIV(check_escape)(&ptr, ptrend, &c, &errorcode, options,
- cb->cx->extra_options, TRUE, cb);
+ xoptions, TRUE, cb);
if (errorcode != 0)
{
- if ((extra_options & PCRE2_EXTRA_BAD_ESCAPE_IS_LITERAL) == 0)
+ if ((xoptions & PCRE2_EXTRA_BAD_ESCAPE_IS_LITERAL) == 0)
goto FAILED;
ptr = tempptr;
if (ptr >= ptrend) c = CHAR_BACKSLASH; else
@@ -3605,7 +3790,7 @@ while (ptr < ptrend)
{
case 0: /* Escaped character code point is in c */
char_is_literal = FALSE;
- goto CLASS_LITERAL;
+ goto CLASS_LITERAL; /* (a few lines above) */
case ESC_b:
c = CHAR_BS; /* \b is backspace in a class */
@@ -3656,7 +3841,7 @@ while (ptr < ptrend)
*parsed_pattern++ = META_ESCAPE + escape;
break;
- /* These escapes are converted to Unicode property tests when
+ /* These escapes may be converted to Unicode property tests when
PCRE2_UCP is set. */
case ESC_d:
@@ -3665,33 +3850,8 @@ while (ptr < ptrend)
case ESC_S:
case ESC_w:
case ESC_W:
- if ((options & PCRE2_UCP) == 0)
- {
- *parsed_pattern++ = META_ESCAPE + escape;
- }
- else
- {
- *parsed_pattern++ = META_ESCAPE +
- ((escape == ESC_d || escape == ESC_s || escape == ESC_w)?
- ESC_p : ESC_P);
- switch(escape)
- {
- case ESC_d:
- case ESC_D:
- *parsed_pattern++ = (PT_PC << 16) | ucp_Nd;
- break;
-
- case ESC_s:
- case ESC_S:
- *parsed_pattern++ = PT_SPACE << 16;
- break;
-
- case ESC_w:
- case ESC_W:
- *parsed_pattern++ = PT_WORD << 16;
- break;
- }
- }
+ parsed_pattern = handle_escdsw(escape, parsed_pattern, options,
+ xoptions);
break;
/* Explicit Unicode property matching */
@@ -3890,6 +4050,7 @@ while (ptr < ptrend)
top_nest->nest_depth = nest_depth;
top_nest->flags = NSF_ATOMICSR;
top_nest->options = options & PARSE_TRACKED_OPTIONS;
+ top_nest->xoptions = xoptions & PARSE_TRACKED_EXTRA_OPTIONS;
}
break;
#else /* SUPPORT_UNICODE */
@@ -4022,6 +4183,7 @@ while (ptr < ptrend)
top_nest->nest_depth = nest_depth;
top_nest->flags = 0;
top_nest->options = options & PARSE_TRACKED_OPTIONS;
+ top_nest->xoptions = xoptions & PARSE_TRACKED_EXTRA_OPTIONS;
/* Start of non-capturing group that resets the capture count for each
branch. */
@@ -4036,24 +4198,28 @@ while (ptr < ptrend)
ptr++;
}
- /* Scan for options imnsxJU to be set or unset. */
+ /* Scan for options imnrsxJU to be set or unset. */
else
{
BOOL hyphenok = TRUE;
uint32_t oldoptions = options;
+ uint32_t oldxoptions = xoptions;
top_nest->reset_group = 0;
top_nest->max_group = 0;
set = unset = 0;
optset = &set;
+ xset = xunset = 0;
+ xoptset = &xset;
- /* ^ at the start unsets imnsx and disables the subsequent use of - */
+ /* ^ at the start unsets irmnsx and disables the subsequent use of - */
if (ptr < ptrend && *ptr == CHAR_CIRCUMFLEX_ACCENT)
{
options &= ~(PCRE2_CASELESS|PCRE2_MULTILINE|PCRE2_NO_AUTO_CAPTURE|
PCRE2_DOTALL|PCRE2_EXTENDED|PCRE2_EXTENDED_MORE);
+ xoptions &= ~(PCRE2_EXTRA_CASELESS_RESTRICT);
hyphenok = FALSE;
ptr++;
}
@@ -4071,9 +4237,51 @@ while (ptr < ptrend)
goto FAILED;
}
optset = &unset;
+ xoptset = &xunset;
hyphenok = FALSE;
break;
+ /* There are some two-character sequences that start with 'a'. */
+
+ case CHAR_a:
+ if (ptr < ptrend)
+ {
+ if (*ptr == CHAR_D)
+ {
+ *xoptset |= PCRE2_EXTRA_ASCII_BSD;
+ ptr++;
+ break;
+ }
+ if (*ptr == CHAR_P)
+ {
+ *xoptset |= (PCRE2_EXTRA_ASCII_POSIX|PCRE2_EXTRA_ASCII_DIGIT);
+ ptr++;
+ break;
+ }
+ if (*ptr == CHAR_S)
+ {
+ *xoptset |= PCRE2_EXTRA_ASCII_BSS;
+ ptr++;
+ break;
+ }
+ if (*ptr == CHAR_T)
+ {
+ *xoptset |= PCRE2_EXTRA_ASCII_DIGIT;
+ ptr++;
+ break;
+ }
+ if (*ptr == CHAR_W)
+ {
+ *xoptset |= PCRE2_EXTRA_ASCII_BSW;
+ ptr++;
+ break;
+ }
+ }
+ *xoptset |= PCRE2_EXTRA_ASCII_BSD|PCRE2_EXTRA_ASCII_BSS|
+ PCRE2_EXTRA_ASCII_BSW|
+ PCRE2_EXTRA_ASCII_DIGIT|PCRE2_EXTRA_ASCII_POSIX;
+ break;
+
case CHAR_J: /* Record that it changed in the external options */
*optset |= PCRE2_DUPNAMES;
cb->external_flags |= PCRE2_JCHANGED;
@@ -4082,6 +4290,7 @@ while (ptr < ptrend)
case CHAR_i: *optset |= PCRE2_CASELESS; break;
case CHAR_m: *optset |= PCRE2_MULTILINE; break;
case CHAR_n: *optset |= PCRE2_NO_AUTO_CAPTURE; break;
+ case CHAR_r: *xoptset|= PCRE2_EXTRA_CASELESS_RESTRICT; break;
case CHAR_s: *optset |= PCRE2_DOTALL; break;
case CHAR_U: *optset |= PCRE2_UNGREEDY; break;
@@ -4112,6 +4321,7 @@ while (ptr < ptrend)
unset |= PCRE2_EXTENDED_MORE;
options = (options | set) & (~unset);
+ xoptions = (xoptions | xset) & (~xunset);
/* If the options ended with ')' this is not the start of a nested
group with option changes, so the options change at this level.
@@ -4132,10 +4342,11 @@ while (ptr < ptrend)
/* If nothing changed, no need to record. */
- if (options != oldoptions)
+ if (options != oldoptions || xoptions != oldxoptions)
{
*parsed_pattern++ = META_OPTIONS;
*parsed_pattern++ = options;
+ *parsed_pattern++ = xoptions;
}
} /* End options processing */
break; /* End default case after (? */
@@ -4605,6 +4816,7 @@ while (ptr < ptrend)
top_nest->nest_depth = nest_depth;
top_nest->flags = NSF_CONDASSERT;
top_nest->options = options & PARSE_TRACKED_OPTIONS;
+ top_nest->xoptions = xoptions & PARSE_TRACKED_EXTRA_OPTIONS;
}
break;
@@ -4738,6 +4950,7 @@ while (ptr < ptrend)
if (top_nest != NULL && top_nest->nest_depth == nest_depth)
{
options = (options & ~PARSE_TRACKED_OPTIONS) | top_nest->options;
+ xoptions = (xoptions & ~PARSE_TRACKED_EXTRA_OPTIONS) | top_nest->xoptions;
if ((top_nest->flags & NSF_RESET) != 0 &&
top_nest->max_group > cb->bracount)
cb->bracount = top_nest->max_group;
@@ -4780,12 +4993,12 @@ parsed_pattern = manage_callouts(ptr, &previous_callout, auto_callout,
/* Insert trailing items for word and line matching (features provided for the
benefit of pcre2grep). */
-if ((extra_options & PCRE2_EXTRA_MATCH_LINE) != 0)
+if ((xoptions & PCRE2_EXTRA_MATCH_LINE) != 0)
{
*parsed_pattern++ = META_KET;
*parsed_pattern++ = META_DOLLAR;
}
-else if ((extra_options & PCRE2_EXTRA_MATCH_WORD) != 0)
+else if ((xoptions & PCRE2_EXTRA_MATCH_WORD) != 0)
{
*parsed_pattern++ = META_KET;
*parsed_pattern++ = META_ESCAPE + ESC_b;
@@ -4862,6 +5075,8 @@ for (;;)
case OP_WORD_BOUNDARY:
case OP_NOT_WORD_BOUNDARY:
+ case OP_UCP_WORD_BOUNDARY:
+ case OP_NOT_UCP_WORD_BOUNDARY:
if (!skipassert) return code;
/* Fall through */
@@ -4913,7 +5128,8 @@ for (;;)
* Get othercase range *
*************************************************/
-/* This function is passed the start and end of a class range in UCP mode. It
+/* This function is passed the start and end of a class range in UCP mode. For
+single characters the range may be just one character long. The function
searches up the characters, looking for ranges of characters in the "other"
case. Each call returns the next one, updating the start address. A character
with multiple other cases is returned on its own with a special return value.
@@ -4923,31 +5139,44 @@ Arguments:
d end value
ocptr where to put start of othercase range
odptr where to put end of othercase range
+ restricted TRUE if caseless restriction applies
Yield: -1 when no more
0 when a range is returned
- >0 the CASESET offset for char with multiple other cases
- in this case, ocptr contains the original
+ >0 the CASESET offset for char with multiple other cases;
+ for this return, *ocptr contains the original
*/
static int
get_othercase_range(uint32_t *cptr, uint32_t d, uint32_t *ocptr,
- uint32_t *odptr)
+ uint32_t *odptr, BOOL restricted)
{
uint32_t c, othercase, next;
unsigned int co;
/* Find the first character that has an other case. If it has multiple other
-cases, return its case offset value. */
+cases, return its case offset value. When CASELESS_RESTRICT is set, ignore the
+multi-case entries that begin with ASCII values. In 32-bit mode, a value
+greater than the Unicode maximum ends the range. */
for (c = *cptr; c <= d; c++)
{
- if ((co = UCD_CASESET(c)) != 0)
+#if PCRE2_CODE_UNIT_WIDTH == 32
+ if (c > MAX_UTF_CODE_POINT) return -1;
+#endif
+ if ((co = UCD_CASESET(c)) != 0 &&
+ (!restricted || PRIV(ucd_caseless_sets)[co] > 127))
{
*ocptr = c++; /* Character that has the set */
*cptr = c; /* Rest of input range */
return (int)co;
}
+
+ /* This is not a valid multiple-case character. Check that the single other
+ case is different to the original. We don't need to check "restricted" here
+ because the non-ASCII characters with multiple cases that include an ASCII
+ character don't have a different "othercase". */
+
if ((othercase = UCD_OTHERCASE(c)) != c) break;
}
@@ -4988,7 +5217,8 @@ add_to_class().
Arguments:
classbits the bit map for characters < 256
uchardptr points to the pointer for extra data
- options the options word
+ options the options bits
+ xoptions the extra options bits
cb compile data
start start of range character
end end of range character
@@ -4999,7 +5229,8 @@ Returns: the number of < 256 characters added
static unsigned int
add_to_class_internal(uint8_t *classbits, PCRE2_UCHAR **uchardptr,
- uint32_t options, compile_block *cb, uint32_t start, uint32_t end)
+ uint32_t options, uint32_t xoptions, compile_block *cb, uint32_t start,
+ uint32_t end)
{
uint32_t c;
uint32_t classbits_end = (end <= 0xff ? end : 0xff);
@@ -5007,8 +5238,8 @@ unsigned int n8 = 0;
/* If caseless matching is required, scan the range and process alternate
cases. In Unicode, there are 8-bit characters that have alternate cases that
-are greater than 255 and vice-versa. Sometimes we can just extend the original
-range. */
+are greater than 255 and vice-versa (though these may be ignored if caseless
+restriction is in force). Sometimes we can just extend the original range. */
if ((options & PCRE2_CASELESS) != 0)
{
@@ -5021,20 +5252,23 @@ if ((options & PCRE2_CASELESS) != 0)
options &= ~PCRE2_CASELESS; /* Remove for recursive calls */
c = start;
- while ((rc = get_othercase_range(&c, end, &oc, &od)) >= 0)
+ while ((rc = get_othercase_range(&c, end, &oc, &od,
+ (xoptions & PCRE2_EXTRA_CASELESS_RESTRICT) != 0)) >= 0)
{
/* Handle a single character that has more than one other case. */
- if (rc > 0) n8 += add_list_to_class_internal(classbits, uchardptr, options, cb,
- PRIV(ucd_caseless_sets) + rc, oc);
+ if (rc > 0) n8 += add_list_to_class_internal(classbits, uchardptr,
+ options, xoptions, cb, PRIV(ucd_caseless_sets) + rc, oc);
/* Do nothing if the other case range is within the original range. */
- else if (oc >= cb->class_range_start && od <= cb->class_range_end) continue;
+ else if (oc >= cb->class_range_start && od <= cb->class_range_end)
+ continue;
- /* Extend the original range if there is overlap, noting that if oc < c, we
- can't have od > end because a subrange is always shorter than the basic
- range. Otherwise, use a recursive call to add the additional range. */
+ /* Extend the original range if there is overlap, noting that if oc < c,
+ we can't have od > end because a subrange is always shorter than the
+ basic range. Otherwise, use a recursive call to add the additional range.
+ */
else if (oc < start && od >= start - 1) start = oc; /* Extend downwards */
else if (od > end && oc <= end + 1)
@@ -5042,10 +5276,13 @@ if ((options & PCRE2_CASELESS) != 0)
end = od; /* Extend upwards */
if (end > classbits_end) classbits_end = (end <= 0xff ? end : 0xff);
}
- else n8 += add_to_class_internal(classbits, uchardptr, options, cb, oc, od);
+ else n8 += add_to_class_internal(classbits, uchardptr, options, xoptions,
+ cb, oc, od);
}
}
else
+#else
+ (void)xoptions; /* Avoid compiler warning */
#endif /* SUPPORT_UNICODE */
/* Not UTF mode */
@@ -5141,7 +5378,8 @@ add_to_class_internal(), with which it is mutually recursive.
Arguments:
classbits the bit map for characters < 256
uchardptr points to the pointer for extra data
- options the options word
+ options the options bits
+ xoptions the extra options bits
cb contains pointers to tables etc.
p points to row of 32-bit values, terminated by NOTACHAR
except character to omit; this is used when adding lists of
@@ -5154,7 +5392,8 @@ Returns: the number of < 256 characters added
static unsigned int
add_list_to_class_internal(uint8_t *classbits, PCRE2_UCHAR **uchardptr,
- uint32_t options, compile_block *cb, const uint32_t *p, unsigned int except)
+ uint32_t options, uint32_t xoptions, compile_block *cb, const uint32_t *p,
+ unsigned int except)
{
unsigned int n8 = 0;
while (p[0] < NOTACHAR)
@@ -5163,7 +5402,8 @@ while (p[0] < NOTACHAR)
if (p[0] != except)
{
while(p[n+1] == p[0] + n + 1) n++;
- n8 += add_to_class_internal(classbits, uchardptr, options, cb, p[0], p[n]);
+ n8 += add_to_class_internal(classbits, uchardptr, options, xoptions, cb,
+ p[0], p[n]);
}
p += n + 1;
}
@@ -5183,7 +5423,8 @@ to avoid duplication when handling case-independence.
Arguments:
classbits the bit map for characters < 256
uchardptr points to the pointer for extra data
- options the options word
+ options the options bits
+ xoptions the extra options bits
cb compile data
start start of range character
end end of range character
@@ -5194,11 +5435,12 @@ Returns: the number of < 256 characters added
static unsigned int
add_to_class(uint8_t *classbits, PCRE2_UCHAR **uchardptr, uint32_t options,
- compile_block *cb, uint32_t start, uint32_t end)
+ uint32_t xoptions, compile_block *cb, uint32_t start, uint32_t end)
{
cb->class_range_start = start;
cb->class_range_end = end;
-return add_to_class_internal(classbits, uchardptr, options, cb, start, end);
+return add_to_class_internal(classbits, uchardptr, options, xoptions, cb,
+ start, end);
}
@@ -5215,7 +5457,8 @@ case-independence.
Arguments:
classbits the bit map for characters < 256
uchardptr points to the pointer for extra data
- options the options word
+ options the options bits
+ xoptions the extra options bits
cb contains pointers to tables etc.
p points to row of 32-bit values, terminated by NOTACHAR
except character to omit; this is used when adding lists of
@@ -5228,7 +5471,7 @@ Returns: the number of < 256 characters added
static unsigned int
add_list_to_class(uint8_t *classbits, PCRE2_UCHAR **uchardptr, uint32_t options,
- compile_block *cb, const uint32_t *p, unsigned int except)
+ uint32_t xoptions, compile_block *cb, const uint32_t *p, unsigned int except)
{
unsigned int n8 = 0;
while (p[0] < NOTACHAR)
@@ -5239,7 +5482,8 @@ while (p[0] < NOTACHAR)
while(p[n+1] == p[0] + n + 1) n++;
cb->class_range_start = p[0];
cb->class_range_end = p[n];
- n8 += add_to_class_internal(classbits, uchardptr, options, cb, p[0], p[n]);
+ n8 += add_to_class_internal(classbits, uchardptr, options, xoptions, cb,
+ p[0], p[n]);
}
p += n + 1;
}
@@ -5258,7 +5502,8 @@ vertical whitespace to a class. The list must be in order.
Arguments:
classbits the bit map for characters < 256
uchardptr points to the pointer for extra data
- options the options word
+ options the options bits
+ xoptions the extra options bits
cb contains pointers to tables etc.
p points to row of 32-bit values, terminated by NOTACHAR
@@ -5268,16 +5513,16 @@ Returns: the number of < 256 characters added
static unsigned int
add_not_list_to_class(uint8_t *classbits, PCRE2_UCHAR **uchardptr,
- uint32_t options, compile_block *cb, const uint32_t *p)
+ uint32_t options, uint32_t xoptions, compile_block *cb, const uint32_t *p)
{
BOOL utf = (options & PCRE2_UTF) != 0;
unsigned int n8 = 0;
if (p[0] > 0)
- n8 += add_to_class(classbits, uchardptr, options, cb, 0, p[0] - 1);
+ n8 += add_to_class(classbits, uchardptr, options, xoptions, cb, 0, p[0] - 1);
while (p[0] < NOTACHAR)
{
while (p[1] == p[0] + 1) p++;
- n8 += add_to_class(classbits, uchardptr, options, cb, p[0] + 1,
+ n8 += add_to_class(classbits, uchardptr, options, xoptions, cb, p[0] + 1,
(p[1] == NOTACHAR) ? (utf ? 0x10ffffu : 0xffffffffu) : p[1] - 1);
p++;
}
@@ -5368,6 +5613,7 @@ real compile phase. The value of lengthptr distinguishes the two phases.
Arguments:
optionsptr pointer to the option bits
+ xoptionsptr pointer to the extra option bits
codeptr points to the pointer to the current code point
pptrptr points to the current parsed pattern pointer
errorcodeptr points to error code variable
@@ -5376,6 +5622,7 @@ Arguments:
reqcuptr place to put the last required code unit
reqcuflagsptr place to put the last required code unit flags
bcptr points to current branch chain
+ open_caps points to current capitem
cb contains pointers to tables etc.
lengthptr NULL during the real compile phase
points to length accumulator during pre-compile phase
@@ -5386,9 +5633,10 @@ Returns: 0 There's been an error, *errorcodeptr is non-zero
*/
static int
-compile_branch(uint32_t *optionsptr, PCRE2_UCHAR **codeptr, uint32_t **pptrptr,
- int *errorcodeptr, uint32_t *firstcuptr, uint32_t *firstcuflagsptr,
- uint32_t *reqcuptr, uint32_t *reqcuflagsptr, branch_chain *bcptr,
+compile_branch(uint32_t *optionsptr, uint32_t *xoptionsptr,
+ PCRE2_UCHAR **codeptr, uint32_t **pptrptr, int *errorcodeptr,
+ uint32_t *firstcuptr, uint32_t *firstcuflagsptr, uint32_t *reqcuptr,
+ uint32_t *reqcuflagsptr, branch_chain *bcptr, open_capitem *open_caps,
compile_block *cb, PCRE2_SIZE *lengthptr)
{
int bravalue = 0;
@@ -5398,6 +5646,7 @@ uint32_t repeat_min = 0, repeat_max = 0; /* To please picky compilers */
uint32_t greedy_default, greedy_non_default;
uint32_t repeat_type, op_type;
uint32_t options = *optionsptr; /* May change dynamically */
+uint32_t xoptions = *xoptionsptr; /* May change dynamically */
uint32_t firstcu, reqcu;
uint32_t zeroreqcu, zerofirstcu;
uint32_t escape;
@@ -5423,8 +5672,8 @@ const uint8_t *cbits = cb->cbits;
uint8_t classbits[32];
/* We can fish out the UTF setting once and for all into a BOOL, but we must
-not do this for other options (e.g. PCRE2_EXTENDED) because they may change
-dynamically as we process the pattern. */
+not do this for other options (e.g. PCRE2_EXTENDED) that may change dynamically
+as we process the pattern. */
#ifdef SUPPORT_UNICODE
BOOL utf = (options & PCRE2_UTF) != 0;
@@ -5633,8 +5882,8 @@ for (;; pptr++)
If the class contains characters outside the 0-255 range, a different
opcode is compiled. It may optionally have a bit map for characters < 256,
- but those above are are explicitly listed afterwards. A flag code unit
- tells whether the bitmap is present, and whether this is a negated class or
+ but those above are explicitly listed afterwards. A flag code unit tells
+ whether the bitmap is present, and whether this is a negated class or
not. */
case META_CLASS_NOT:
@@ -5675,11 +5924,14 @@ for (;; pptr++)
/* For caseless UTF or UCP mode, check whether this character has more
than one other case. If so, generate a special OP_NOTPROP item instead of
- OP_NOTI. */
+ OP_NOTI. When restricted by PCRE2_EXTRA_CASELESS_RESTRICT, ignore any
+ caseless set that starts with an ASCII character. */
#ifdef SUPPORT_UNICODE
if ((utf||ucp) && (options & PCRE2_CASELESS) != 0 &&
- (d = UCD_CASESET(c)) != 0)
+ (d = UCD_CASESET(c)) != 0 &&
+ ((xoptions & PCRE2_EXTRA_CASELESS_RESTRICT) == 0 ||
+ PRIV(ucd_caseless_sets)[d] > 127))
{
*code++ = OP_NOTPROP;
*code++ = PT_CLIST;
@@ -5687,7 +5939,7 @@ for (;; pptr++)
break; /* We are finished with this class */
}
#endif
- /* Char has only one other case, or UCP not available */
+ /* Char has only one other (usable) case, or UCP not available */
*code++ = ((options & PCRE2_CASELESS) != 0)? OP_NOTI: OP_NOT;
code += PUTCHAR(c, code);
@@ -5697,7 +5949,9 @@ for (;; pptr++)
/* Handle character classes that contain more than just one literal
character. If there are exactly two characters in a positive class, see if
they are case partners. This can be optimized to generate a caseless single
- character match (which also sets first/required code units if relevant). */
+ character match (which also sets first/required code units if relevant).
+ When casing restrictions apply, ignore a caseless set if both characters
+ are ASCII. */
if (meta == META_CLASS && pptr[1] < META_END && pptr[2] < META_END &&
pptr[3] == META_CLASS_END)
@@ -5705,7 +5959,9 @@ for (;; pptr++)
uint32_t c = pptr[1];
#ifdef SUPPORT_UNICODE
- if (UCD_CASESET(c) == 0)
+ if (UCD_CASESET(c) == 0 ||
+ ((xoptions & PCRE2_EXTRA_CASELESS_RESTRICT) != 0 &&
+ c < 128 && pptr[2] < 128))
#endif
{
uint32_t d;
@@ -5797,41 +6053,45 @@ for (;; pptr++)
XCL_PROP/XCL_NOTPROP directly, which is done here. */
#ifdef SUPPORT_UNICODE
- if ((options & PCRE2_UCP) != 0) switch(posix_class)
+ if ((options & PCRE2_UCP) != 0 &&
+ (xoptions & PCRE2_EXTRA_ASCII_POSIX) == 0)
{
- case PC_GRAPH:
- case PC_PRINT:
- case PC_PUNCT:
- *class_uchardata++ = local_negate? XCL_NOTPROP : XCL_PROP;
- *class_uchardata++ = (PCRE2_UCHAR)
- ((posix_class == PC_GRAPH)? PT_PXGRAPH :
- (posix_class == PC_PRINT)? PT_PXPRINT : PT_PXPUNCT);
- *class_uchardata++ = 0;
- xclass_has_prop = TRUE;
- goto CONTINUE_CLASS;
-
- /* For the other POSIX classes (ascii, xdigit) we are going to
- fall through to the non-UCP case and build a bit map for
- characters with code points less than 256. However, if we are in
- a negated POSIX class, characters with code points greater than
- 255 must either all match or all not match, depending on whether
- the whole class is not or is negated. For example, for
- [[:^ascii:]... they must all match, whereas for [^[:^xdigit:]...
- they must not.
-
- In the special case where there are no xclass items, this is
- automatically handled by the use of OP_CLASS or OP_NCLASS, but an
- explicit range is needed for OP_XCLASS. Setting a flag here
- causes the range to be generated later when it is known that
- OP_XCLASS is required. In the 8-bit library this is relevant only in
- utf mode, since no wide characters can exist otherwise. */
+ switch(posix_class)
+ {
+ case PC_GRAPH:
+ case PC_PRINT:
+ case PC_PUNCT:
+ *class_uchardata++ = local_negate? XCL_NOTPROP : XCL_PROP;
+ *class_uchardata++ = (PCRE2_UCHAR)
+ ((posix_class == PC_GRAPH)? PT_PXGRAPH :
+ (posix_class == PC_PRINT)? PT_PXPRINT : PT_PXPUNCT);
+ *class_uchardata++ = 0;
+ xclass_has_prop = TRUE;
+ goto CONTINUE_CLASS;
+
+ /* For the other POSIX classes (ex: ascii) we are going to
+ fall through to the non-UCP case and build a bit map for
+ characters with code points less than 256. However, if we are in
+ a negated POSIX class, characters with code points greater than
+ 255 must either all match or all not match, depending on whether
+ the whole class is not or is negated. For example, for
+ [[:^ascii:]... they must all match, whereas for [^[:^ascii:]...
+ they must not.
+
+ In the special case where there are no xclass items, this is
+ automatically handled by the use of OP_CLASS or OP_NCLASS, but an
+ explicit range is needed for OP_XCLASS. Setting a flag here
+ causes the range to be generated later when it is known that
+ OP_XCLASS is required. In the 8-bit library this is relevant only in
+ utf mode, since no wide characters can exist otherwise. */
- default:
+ default:
#if PCRE2_CODE_UNIT_WIDTH == 8
- if (utf)
+ if (utf)
#endif
- match_all_or_no_wide_chars |= local_negate;
- break;
+ match_all_or_no_wide_chars |= local_negate;
+ break;
+ }
}
#endif /* SUPPORT_UNICODE */
@@ -5957,22 +6217,24 @@ for (;; pptr++)
case ESC_h:
(void)add_list_to_class(classbits, &class_uchardata,
- options & ~PCRE2_CASELESS, cb, PRIV(hspace_list), NOTACHAR);
+ options & ~PCRE2_CASELESS, xoptions, cb, PRIV(hspace_list),
+ NOTACHAR);
break;
case ESC_H:
(void)add_not_list_to_class(classbits, &class_uchardata,
- options & ~PCRE2_CASELESS, cb, PRIV(hspace_list));
+ options & ~PCRE2_CASELESS, xoptions, cb, PRIV(hspace_list));
break;
case ESC_v:
(void)add_list_to_class(classbits, &class_uchardata,
- options & ~PCRE2_CASELESS, cb, PRIV(vspace_list), NOTACHAR);
+ options & ~PCRE2_CASELESS, xoptions, cb, PRIV(vspace_list),
+ NOTACHAR);
break;
case ESC_V:
(void)add_not_list_to_class(classbits, &class_uchardata,
- options & ~PCRE2_CASELESS, cb, PRIV(vspace_list));
+ options & ~PCRE2_CASELESS, xoptions, cb, PRIV(vspace_list));
break;
/* If Unicode is not supported, \P and \p are not allowed and are
@@ -6046,32 +6308,32 @@ for (;; pptr++)
if (C <= CHAR_i)
{
class_has_8bitchar +=
- add_to_class(classbits, &class_uchardata, options, cb, C + uc,
- ((D < CHAR_i)? D : CHAR_i) + uc);
+ add_to_class(classbits, &class_uchardata, options, xoptions,
+ cb, C + uc, ((D < CHAR_i)? D : CHAR_i) + uc);
C = CHAR_j;
}
if (C <= D && C <= CHAR_r)
{
class_has_8bitchar +=
- add_to_class(classbits, &class_uchardata, options, cb, C + uc,
- ((D < CHAR_r)? D : CHAR_r) + uc);
+ add_to_class(classbits, &class_uchardata, options, xoptions,
+ cb, C + uc, ((D < CHAR_r)? D : CHAR_r) + uc);
C = CHAR_s;
}
if (C <= D)
{
class_has_8bitchar +=
- add_to_class(classbits, &class_uchardata, options, cb, C + uc,
- D + uc);
+ add_to_class(classbits, &class_uchardata, options, xoptions,
+ cb, C + uc, D + uc);
}
}
else
#endif
/* Not an EBCDIC special range */
- class_has_8bitchar +=
- add_to_class(classbits, &class_uchardata, options, cb, c, d);
+ class_has_8bitchar += add_to_class(classbits, &class_uchardata,
+ options, xoptions, cb, c, d);
goto CONTINUE_CLASS; /* Go get the next char in the class */
} /* End of range handling */
@@ -6079,7 +6341,8 @@ for (;; pptr++)
/* Handle a single character. */
class_has_8bitchar +=
- add_to_class(classbits, &class_uchardata, options, cb, meta, meta);
+ add_to_class(classbits, &class_uchardata, options, xoptions, cb,
+ meta, meta);
}
/* Continue to the next item in the class. */
@@ -6124,11 +6387,11 @@ for (;; pptr++)
characters > 255 are in or not in the class, so any that were explicitly
given as well can be ignored.
- In the UCP case, if certain negated POSIX classes ([:^ascii:] or
- [^:xdigit:]) were present in a class, we either have to match or not match
- all wide characters (depending on whether the whole class is or is not
- negated). This requirement is indicated by match_all_or_no_wide_chars being
- true. We do this by including an explicit range, which works in both cases.
+ In the UCP case, if certain negated POSIX classes (ex: [:^ascii:]) were
+ were present in a class, we either have to match or not match all wide
+ characters (depending on whether the whole class is or is not negated).
+ This requirement is indicated by match_all_or_no_wide_chars being true.
+ We do this by including an explicit range, which works in both cases.
This applies only in UTF and 16-bit and 32-bit non-UTF modes, since there
cannot be any wide characters in 8-bit non-UTF mode.
@@ -6232,7 +6495,7 @@ for (;; pptr++)
case META_ACCEPT:
cb->had_accept = had_accept = TRUE;
- for (oc = cb->open_caps;
+ for (oc = open_caps;
oc != NULL && oc->assert_depth >= cb->assert_depth;
oc = oc->next)
{
@@ -6317,6 +6580,7 @@ for (;; pptr++)
case META_OPTIONS:
*optionsptr = options = *(++pptr);
+ *xoptionsptr = xoptions = *(++pptr);
greedy_default = ((options & PCRE2_UNGREEDY) != 0);
greedy_non_default = greedy_default ^ 1;
req_caseopt = ((options & PCRE2_CASELESS) != 0)? REQ_CASELESS : 0;
@@ -6562,7 +6826,8 @@ for (;; pptr++)
if ((group_return =
compile_regex(
- options, /* The option state */
+ options, /* The options state */
+ xoptions, /* The extra options state */
&tempcode, /* Where to put code (updated) */
&pptr, /* Input pointer (updated) */
errorcodeptr, /* Where to put an error message */
@@ -6572,6 +6837,7 @@ for (;; pptr++)
&subreqcu, /* For possible last char */
&subreqcuflags,
bcptr, /* Current branch chain */
+ open_caps, /* Pointer to capture stack */
cb, /* Compile data block */
(lengthptr == NULL)? NULL : /* Actual compile phase */
&length_prevgroup /* Pre-compile phase */
@@ -7112,15 +7378,12 @@ for (;; pptr++)
/* In the pre-compile phase, we don't actually do the replication. We
just adjust the length as if we had. Do some paranoid checks for
- potential integer overflow. The INT64_OR_DOUBLE type is a 64-bit
- integer type when available, otherwise double. */
+ potential integer overflow. */
if (lengthptr != NULL)
{
- PCRE2_SIZE delta = replicate*(1 + LINK_SIZE);
- if ((INT64_OR_DOUBLE)replicate*
- (INT64_OR_DOUBLE)(1 + LINK_SIZE) >
- (INT64_OR_DOUBLE)INT_MAX ||
+ PCRE2_SIZE delta;
+ if (PRIV(ckd_smul)(&delta, replicate, 1 + LINK_SIZE) ||
OFLOW_MAX - *lengthptr < delta)
{
*errorcodeptr = ERR20;
@@ -7282,15 +7545,13 @@ for (;; pptr++)
{
/* In the pre-compile phase, we don't actually do the replication.
We just adjust the length as if we had. Do some paranoid checks for
- potential integer overflow. The INT64_OR_DOUBLE type is a 64-bit
- integer type when available, otherwise double. */
+ potential integer overflow. */
if (lengthptr != NULL)
{
- PCRE2_SIZE delta = (repeat_min - 1)*length_prevgroup;
- if ((INT64_OR_DOUBLE)(repeat_min - 1)*
- (INT64_OR_DOUBLE)length_prevgroup >
- (INT64_OR_DOUBLE)INT_MAX ||
+ PCRE2_SIZE delta;
+ if (PRIV(ckd_smul)(&delta, repeat_min - 1,
+ (int)length_prevgroup) ||
OFLOW_MAX - *lengthptr < delta)
{
*errorcodeptr = ERR20;
@@ -7334,21 +7595,19 @@ for (;; pptr++)
just adjust the length as if we had. For each repetition we must add
1 to the length for BRAZERO and for all but the last repetition we
must add 2 + 2*LINKSIZE to allow for the nesting that occurs. Do some
- paranoid checks to avoid integer overflow. The INT64_OR_DOUBLE type
- is a 64-bit integer type when available, otherwise double. */
+ paranoid checks to avoid integer overflow. */
if (lengthptr != NULL && repeat_max > 0)
{
- PCRE2_SIZE delta = repeat_max*(length_prevgroup + 1 + 2 + 2*LINK_SIZE) -
- 2 - 2*LINK_SIZE; /* Last one doesn't nest */
- if ((INT64_OR_DOUBLE)repeat_max *
- (INT64_OR_DOUBLE)(length_prevgroup + 1 + 2 + 2*LINK_SIZE)
- > (INT64_OR_DOUBLE)INT_MAX ||
- OFLOW_MAX - *lengthptr < delta)
+ PCRE2_SIZE delta;
+ if (PRIV(ckd_smul)(&delta, repeat_max,
+ (int)length_prevgroup + 1 + 2 + 2*LINK_SIZE) ||
+ OFLOW_MAX + (2 + 2*LINK_SIZE) - *lengthptr < delta)
{
*errorcodeptr = ERR20;
return 0;
}
+ delta -= (2 + 2*LINK_SIZE); /* Last one doesn't nest */
*lengthptr += delta;
}
@@ -7901,7 +8160,7 @@ for (;; pptr++)
done. However, there's an option, in case anyone was relying on it. */
if (cb->assert_depth > 0 && meta_arg == ESC_K &&
- (cb->cx->extra_options & PCRE2_EXTRA_ALLOW_LOOKAROUND_BSK) == 0)
+ (xoptions & PCRE2_EXTRA_ALLOW_LOOKAROUND_BSK) == 0)
{
*errorcodeptr = ERR99;
return 0;
@@ -7909,23 +8168,41 @@ for (;; pptr++)
/* For the rest (including \X when Unicode is supported - if not it's
faulted at parse time), the OP value is the escape value when PCRE2_UCP is
- not set; if it is set, these escapes do not show up here because they are
- converted into Unicode property tests in parse_regex(). Note that \b and \B
- do a one-character lookbehind, and \A also behaves as if it does. */
+ not set; if it is set, most of them do not show up here because they are
+ converted into Unicode property tests in parse_regex().
- if (meta_arg == ESC_C) cb->external_flags |= PCRE2_HASBKC; /* Record */
- if ((meta_arg == ESC_b || meta_arg == ESC_B || meta_arg == ESC_A) &&
- cb->max_lookbehind == 0)
- cb->max_lookbehind = 1;
+ In non-UTF mode, and for both 32-bit modes, we turn \C into OP_ALLANY
+ instead of OP_ANYBYTE so that it works in DFA mode and in lookbehinds.
+ There are special UCP codes for \B and \b which are used in UCP mode unless
+ "word" matching is being forced to ASCII.
- /* In non-UTF mode, and for both 32-bit modes, we turn \C into OP_ALLANY
- instead of OP_ANYBYTE so that it works in DFA mode and in lookbehinds. */
+ Note that \b and \B do a one-character lookbehind, and \A also behaves as
+ if it does. */
+ switch(meta_arg)
+ {
+ case ESC_C:
+ cb->external_flags |= PCRE2_HASBKC; /* Record */
#if PCRE2_CODE_UNIT_WIDTH == 32
- *code++ = (meta_arg == ESC_C)? OP_ALLANY : meta_arg;
+ meta_arg = OP_ALLANY;
#else
- *code++ = (!utf && meta_arg == ESC_C)? OP_ALLANY : meta_arg;
+ if (!utf) meta_arg = OP_ALLANY;
#endif
+ break;
+
+ case ESC_B:
+ case ESC_b:
+ if ((options & PCRE2_UCP) != 0 && (xoptions & PCRE2_EXTRA_ASCII_BSW) == 0)
+ meta_arg = (meta_arg == ESC_B)? OP_NOT_UCP_WORD_BOUNDARY :
+ OP_UCP_WORD_BOUNDARY;
+ /* Fall through */
+
+ case ESC_A:
+ if (cb->max_lookbehind == 0) cb->max_lookbehind = 1;
+ break;
+ }
+
+ *code++ = meta_arg;
break; /* End META_ESCAPE */
@@ -7953,13 +8230,16 @@ for (;; pptr++)
/* For caseless UTF or UCP mode, check whether this character has more than
one other case. If so, generate a special OP_PROP item instead of OP_CHARI.
- */
+ When casing restrictions apply, ignore caseless sets that start with an
+ ASCII character. */
#ifdef SUPPORT_UNICODE
if ((utf||ucp) && (options & PCRE2_CASELESS) != 0)
{
uint32_t caseset = UCD_CASESET(meta);
- if (caseset != 0)
+ if (caseset != 0 &&
+ ((xoptions & PCRE2_EXTRA_CASELESS_RESTRICT) == 0 ||
+ PRIV(ucd_caseless_sets)[caseset] > 127))
{
*code++ = OP_PROP;
*code++ = PT_CLIST;
@@ -8075,6 +8355,7 @@ the two phases.
Arguments:
options option bits, including any changes for this subpattern
+ xoptions extra option bits, ditto
codeptr -> the address of the current code pointer
pptrptr -> the address of the current parsed pattern pointer
errorcodeptr -> pointer to error code variable
@@ -8094,10 +8375,11 @@ Returns: 0 There has been an error
*/
static int
-compile_regex(uint32_t options, PCRE2_UCHAR **codeptr, uint32_t **pptrptr,
- int *errorcodeptr, uint32_t skipunits, uint32_t *firstcuptr,
- uint32_t *firstcuflagsptr, uint32_t *reqcuptr, uint32_t *reqcuflagsptr,
- branch_chain *bcptr, compile_block *cb, PCRE2_SIZE *lengthptr)
+compile_regex(uint32_t options, uint32_t xoptions, PCRE2_UCHAR **codeptr,
+ uint32_t **pptrptr, int *errorcodeptr, uint32_t skipunits,
+ uint32_t *firstcuptr, uint32_t *firstcuflagsptr, uint32_t *reqcuptr,
+ uint32_t *reqcuflagsptr, branch_chain *bcptr, open_capitem *open_caps,
+ compile_block *cb, PCRE2_SIZE *lengthptr)
{
PCRE2_UCHAR *code = *codeptr;
PCRE2_UCHAR *last_branch = code;
@@ -8109,6 +8391,7 @@ int okreturn = 1;
uint32_t *pptr = *pptrptr;
uint32_t firstcu, reqcu;
uint32_t lookbehindlength;
+uint32_t lookbehindminlength;
uint32_t firstcuflags, reqcuflags;
uint32_t branchfirstcu, branchreqcu;
uint32_t branchfirstcuflags, branchreqcuflags;
@@ -8151,9 +8434,10 @@ lookbehind = *code == OP_ASSERTBACK ||
if (lookbehind)
{
lookbehindlength = META_DATA(pptr[-1]);
+ lookbehindminlength = *pptr;
pptr += SIZEOFFSET;
}
-else lookbehindlength = 0;
+else lookbehindlength = lookbehindminlength = 0;
/* If this is a capturing subpattern, add to the chain of open capturing items
so that we can detect them if (*ACCEPT) is encountered. Note that only OP_CBRA
@@ -8164,9 +8448,9 @@ if (*code == OP_CBRA)
{
capnumber = GET2(code, 1 + LINK_SIZE);
capitem.number = capnumber;
- capitem.next = cb->open_caps;
+ capitem.next = open_caps;
capitem.assert_depth = cb->assert_depth;
- cb->open_caps = &capitem;
+ open_caps = &capitem;
}
/* Offset is set zero to mark that this bracket is still open */
@@ -8180,22 +8464,39 @@ for (;;)
{
int branch_return;
- /* Insert OP_REVERSE if this is as lookbehind assertion. */
+ /* Insert OP_REVERSE or OP_VREVERSE if this is a lookbehind assertion. There
+ is only a single mimimum length for the whole assertion. When the mimimum
+ length is LOOKBEHIND_MAX it means that all branches are of fixed length,
+ though not necessarily the same length. In this case, the original OP_REVERSE
+ can be used. It can also be used if a branch in a variable length lookbehind
+ has the same maximum and minimum. Otherwise, use OP_VREVERSE, which has both
+ maximum and minimum values. */
if (lookbehind && lookbehindlength > 0)
{
- *code++ = OP_REVERSE;
- PUTINC(code, 0, lookbehindlength);
- length += 1 + LINK_SIZE;
+ if (lookbehindminlength == LOOKBEHIND_MAX ||
+ lookbehindminlength == lookbehindlength)
+ {
+ *code++ = OP_REVERSE;
+ PUT2INC(code, 0, lookbehindlength);
+ length += 1 + IMM2_SIZE;
+ }
+ else
+ {
+ *code++ = OP_VREVERSE;
+ PUT2INC(code, 0, lookbehindminlength);
+ PUT2INC(code, 0, lookbehindlength);
+ length += 1 + 2*IMM2_SIZE;
+ }
}
/* Now compile the branch; in the pre-compile phase its length gets added
into the length. */
if ((branch_return =
- compile_branch(&options, &code, &pptr, errorcodeptr, &branchfirstcu,
- &branchfirstcuflags, &branchreqcu, &branchreqcuflags, &bc,
- cb, (lengthptr == NULL)? NULL : &length)) == 0)
+ compile_branch(&options, &xoptions, &code, &pptr, errorcodeptr,
+ &branchfirstcu, &branchfirstcuflags, &branchreqcu, &branchreqcuflags,
+ &bc, open_caps, cb, (lengthptr == NULL)? NULL : &length)) == 0)
return 0;
/* If a branch can match an empty string, so can the whole group. */
@@ -8293,10 +8594,6 @@ for (;;)
PUT(code, 1, (int)(code - start_bracket));
code += 1 + LINK_SIZE;
- /* If it was a capturing subpattern, remove the block from the chain. */
-
- if (capnumber > 0) cb->open_caps = cb->open_caps->next;
-
/* Set values to pass back */
*codeptr = code;
@@ -8339,8 +8636,8 @@ for (;;)
code += 1 + LINK_SIZE;
}
- /* Set the lookbehind length (if not in a lookbehind the value will be zero)
- and then advance past the vertical bar. */
+ /* Set the maximum lookbehind length for the next branch (if not in a
+ lookbehind the value will be zero) and then advance past the vertical bar. */
lookbehindlength = META_DATA(*pptr);
pptr++;
@@ -9051,13 +9348,13 @@ return pptr;
*************************************************/
/* This is called for nested groups within a branch of a lookbehind whose
-length is being computed. If all the branches in the nested group have the same
-length, that is OK. On entry, the pointer must be at the first element after
-the group initializing code. On exit it points to OP_KET. Caching is used to
-improve processing speed when the same capturing group occurs many times.
+length is being computed. On entry, the pointer must be at the first element
+after the group initializing code. On exit it points to OP_KET. Caching is used
+to improve processing speed when the same capturing group occurs many times.
Arguments:
pptrptr pointer to pointer in the parsed pattern
+ minptr where to return the minimum length
isinline FALSE if a reference or recursion; TRUE for inline group
errcodeptr pointer to the errorcode
lcptr pointer to the loop counter
@@ -9065,15 +9362,17 @@ Arguments:
recurses chain of recurse_check to catch mutual recursion
cb pointer to the compile data
-Returns: the group length or a negative number
+Returns: the maximum group length or a negative number
*/
static int
-get_grouplength(uint32_t **pptrptr, BOOL isinline, int *errcodeptr, int *lcptr,
- int group, parsed_recurse_check *recurses, compile_block *cb)
+get_grouplength(uint32_t **pptrptr, int *minptr, BOOL isinline, int *errcodeptr,
+ int *lcptr, int group, parsed_recurse_check *recurses, compile_block *cb)
{
-int branchlength;
+uint32_t *gi = cb->groupinfo + 2 * group;
+int branchlength, branchminlength;
int grouplength = -1;
+int groupminlength = INT_MAX;
/* The cache can be used only if there is no possibility of there being two
groups with the same number. We do not need to set the end pointer for a group
@@ -9082,11 +9381,12 @@ an inline group. */
if (group > 0 && (cb->external_flags & PCRE2_DUPCAPUSED) == 0)
{
- uint32_t groupinfo = cb->groupinfo[group];
+ uint32_t groupinfo = gi[0];
if ((groupinfo & GI_NOT_FIXED_LENGTH) != 0) return -1;
if ((groupinfo & GI_SET_FIXED_LENGTH) != 0)
{
if (isinline) *pptrptr = parsed_skip(*pptrptr, PSKIP_KET);
+ *minptr = gi[1];
return groupinfo & GI_FIXED_LENGTH_MASK;
}
}
@@ -9095,20 +9395,26 @@ if (group > 0 && (cb->external_flags & PCRE2_DUPCAPUSED) == 0)
for(;;)
{
- branchlength = get_branchlength(pptrptr, errcodeptr, lcptr, recurses, cb);
+ branchlength = get_branchlength(pptrptr, &branchminlength, errcodeptr, lcptr,
+ recurses, cb);
if (branchlength < 0) goto ISNOTFIXED;
- if (grouplength == -1) grouplength = branchlength;
- else if (grouplength != branchlength) goto ISNOTFIXED;
+ if (branchlength > grouplength) grouplength = branchlength;
+ if (branchminlength < groupminlength) groupminlength = branchminlength;
if (**pptrptr == META_KET) break;
*pptrptr += 1; /* Skip META_ALT */
}
if (group > 0)
- cb->groupinfo[group] |= (uint32_t)(GI_SET_FIXED_LENGTH | grouplength);
+ {
+ gi[0] |= (uint32_t)(GI_SET_FIXED_LENGTH | grouplength);
+ gi[1] = groupminlength;
+ }
+
+*minptr = groupminlength;
return grouplength;
ISNOTFIXED:
-if (group > 0) cb->groupinfo[group] |= GI_NOT_FIXED_LENGTH;
+if (group > 0) gi[0] |= GI_NOT_FIXED_LENGTH;
return -1;
}
@@ -9118,27 +9424,30 @@ return -1;
* Find length of a parsed branch *
*************************************************/
-/* Return a fixed length for a branch in a lookbehind, giving an error if the
-length is not fixed. On entry, *pptrptr points to the first element inside the
-branch. On exit it is set to point to the ALT or KET.
+/* Return fixed maximum and minimum lengths for a branch in a lookbehind,
+giving an error if the length is not limited. On entry, *pptrptr points to the
+first element inside the branch. On exit it is set to point to the ALT or KET.
Arguments:
pptrptr pointer to pointer in the parsed pattern
+ minptr where to return the minimum length
errcodeptr pointer to error code
lcptr pointer to loop counter
recurses chain of recurse_check to catch mutual recursion
cb pointer to compile block
-Returns: the length, or a negative value on error
+Returns: the maximum length, or a negative value on error
*/
static int
-get_branchlength(uint32_t **pptrptr, int *errcodeptr, int *lcptr,
+get_branchlength(uint32_t **pptrptr, int *minptr, int *errcodeptr, int *lcptr,
parsed_recurse_check *recurses, compile_block *cb)
{
int branchlength = 0;
-int grouplength;
+int branchminlength = 0;
+int grouplength, groupminlength;
uint32_t lastitemlength = 0;
+uint32_t lastitemminlength = 0;
uint32_t *pptr = *pptrptr;
PCRE2_SIZE offset;
parsed_recurse_check this_recurse;
@@ -9162,10 +9471,12 @@ for (;; pptr++)
uint32_t escape;
uint32_t group = 0;
uint32_t itemlength = 0;
+ uint32_t itemminlength = 0;
+ uint32_t min, max;
if (*pptr < META_END)
{
- itemlength = 1;
+ itemlength = itemminlength = 1;
}
else switch (META_CODE(*pptr))
@@ -9200,24 +9511,24 @@ for (;; pptr++)
break;
case META_OPTIONS:
- pptr += 1;
+ pptr += 2;
break;
case META_BIGVALUE:
- itemlength = 1;
+ itemlength = itemminlength = 1;
pptr += 1;
break;
case META_CLASS:
case META_CLASS_NOT:
- itemlength = 1;
+ itemlength = itemminlength = 1;
pptr = parsed_skip(pptr, PSKIP_CLASS);
if (pptr == NULL) goto PARSED_SKIP_FAILED;
break;
case META_CLASS_EMPTY_NOT:
case META_DOT:
- itemlength = 1;
+ itemlength = itemminlength = 1;
break;
case META_CALLOUT_NUMBER:
@@ -9228,14 +9539,19 @@ for (;; pptr++)
pptr += 3 + SIZEOFFSET;
break;
- /* Only some escapes consume a character. Of those, \R and \X are never
- allowed because they might match more than character. \C is allowed only in
- 32-bit and non-UTF 8/16-bit modes. */
+ /* Only some escapes consume a character. Of those, \R can match one or two
+ characters, but \X is never allowed because it matches an unknown number of
+ characters. \C is allowed only in 32-bit and non-UTF 8/16-bit modes. */
case META_ESCAPE:
escape = META_DATA(*pptr);
- if (escape == ESC_R || escape == ESC_X) return -1;
- if (escape > ESC_b && escape < ESC_Z)
+ if (escape == ESC_X) return -1;
+ if (escape == ESC_R)
+ {
+ itemminlength = 1;
+ itemlength = 2;
+ }
+ else if (escape > ESC_b && escape < ESC_Z)
{
#if PCRE2_CODE_UNIT_WIDTH != 32
if ((cb->external_options & PCRE2_UTF) != 0 && escape == ESC_C)
@@ -9244,7 +9560,7 @@ for (;; pptr++)
return -1;
}
#endif
- itemlength = 1;
+ itemlength = itemminlength = 1;
if (escape == ESC_p || escape == ESC_P) pptr++; /* Skip prop data */
}
break;
@@ -9400,14 +9716,15 @@ for (;; pptr++)
in the cache. */
gptr++;
- grouplength = get_grouplength(&gptr, FALSE, errcodeptr, lcptr, group,
- &this_recurse, cb);
+ grouplength = get_grouplength(&gptr, &groupminlength, FALSE, errcodeptr,
+ lcptr, group, &this_recurse, cb);
if (grouplength < 0)
{
if (*errcodeptr == 0) goto ISNOTFIXED;
return -1; /* Error already set */
}
itemlength = grouplength;
+ itemminlength = groupminlength;
break;
/* A (DEFINE) group is never obeyed inline and so it does not contribute to
@@ -9445,41 +9762,44 @@ for (;; pptr++)
case META_SCRIPT_RUN:
pptr++;
CHECK_GROUP:
- grouplength = get_grouplength(&pptr, TRUE, errcodeptr, lcptr, group,
- recurses, cb);
+ grouplength = get_grouplength(&pptr, &groupminlength, TRUE, errcodeptr,
+ lcptr, group, recurses, cb);
if (grouplength < 0) return -1;
itemlength = grouplength;
+ itemminlength = groupminlength;
break;
+ case META_QUERY:
+ case META_QUERY_PLUS:
+ case META_QUERY_QUERY:
+ min = 0;
+ max = 1;
+ goto REPETITION;
+
/* Exact repetition is OK; variable repetition is not. A repetition of zero
must subtract the length that has already been added. */
case META_MINMAX:
case META_MINMAX_PLUS:
case META_MINMAX_QUERY:
- if (pptr[1] == pptr[2])
+ min = pptr[1];
+ max = pptr[2];
+ pptr += 2;
+
+ REPETITION:
+ if (max != REPEAT_UNLIMITED)
{
- switch(pptr[1])
+ if (lastitemlength != 0 && /* Should not occur, but just in case */
+ max != 0 &&
+ (INT_MAX - branchlength)/lastitemlength < max - 1)
{
- case 0:
- branchlength -= lastitemlength;
- break;
-
- case 1:
- itemlength = 0;
- break;
-
- default: /* Check for integer overflow */
- if (lastitemlength != 0 && /* Should not occur, but just in case */
- INT_MAX/lastitemlength < pptr[1] - 1)
- {
- *errcodeptr = ERR87; /* Integer overflow; lookbehind too big */
- return -1;
- }
- itemlength = (pptr[1] - 1) * lastitemlength;
- break;
+ *errcodeptr = ERR87; /* Integer overflow; lookbehind too big */
+ return -1;
}
- pptr += 2;
+ if (min == 0) branchminlength -= lastitemminlength;
+ else itemminlength = (min - 1) * lastitemminlength;
+ if (max == 0) branchlength -= lastitemlength;
+ else itemlength = (max - 1) * lastitemlength;
break;
}
/* Fall through */
@@ -9493,7 +9813,9 @@ for (;; pptr++)
}
/* Add the item length to the branchlength, checking for integer overflow and
- for the branch length exceeding the limit. */
+ for the branch length exceeding the overall limit. Later, if there is at
+ least one variable-length branch in the group, there is a test for the
+ (smaller) variable-length branch length limit. */
if (INT_MAX - branchlength < (int)itemlength ||
(branchlength += itemlength) > LOOKBEHIND_MAX)
@@ -9502,13 +9824,17 @@ for (;; pptr++)
return -1;
}
+ branchminlength += itemminlength;
+
/* Save this item length for use if the next item is a quantifier. */
lastitemlength = itemlength;
+ lastitemminlength = itemminlength;
}
EXIT:
*pptrptr = pptr;
+*minptr = branchminlength;
return branchlength;
PARSED_SKIP_FAILED:
@@ -9523,9 +9849,9 @@ return -1;
*************************************************/
/* This function is called for each lookbehind, to set the lengths in its
-branches. An error occurs if any branch does not have a fixed length that is
-less than the maximum (65535). On exit, the pointer must be left on the final
-ket.
+branches. An error occurs if any branch does not have a limited maximum length
+that is less than the limit (65535). On exit, the pointer must be left on the
+final ket.
The function also maintains the max_lookbehind value. Any lookbehind branch
that contains a nested lookbehind may actually look further back than the
@@ -9548,16 +9874,27 @@ set_lookbehind_lengths(uint32_t **pptrptr, int *errcodeptr, int *lcptr,
parsed_recurse_check *recurses, compile_block *cb)
{
PCRE2_SIZE offset;
-int branchlength;
uint32_t *bptr = *pptrptr;
+uint32_t *gbptr = bptr;
+int maxlength = 0;
+int minlength = INT_MAX;
+BOOL variable = FALSE;
READPLUSOFFSET(offset, bptr); /* Offset for error messages */
*pptrptr += SIZEOFFSET;
+/* Each branch can have a different maximum length, but we can keep only a
+single minimum for the whole group, because there's nowhere to save individual
+values in the META_ALT item. */
+
do
{
+ int branchlength, branchminlength;
+
*pptrptr += 1;
- branchlength = get_branchlength(pptrptr, errcodeptr, lcptr, recurses, cb);
+ branchlength = get_branchlength(pptrptr, &branchminlength, errcodeptr, lcptr,
+ recurses, cb);
+
if (branchlength < 0)
{
/* The errorcode and offset may already be set from a nested lookbehind. */
@@ -9565,12 +9902,37 @@ do
if (cb->erroroffset == PCRE2_UNSET) cb->erroroffset = offset;
return FALSE;
}
+
+ if (branchlength != branchminlength) variable = TRUE;
+ if (branchminlength < minlength) minlength = branchminlength;
+ if (branchlength > maxlength) maxlength = branchlength;
if (branchlength > cb->max_lookbehind) cb->max_lookbehind = branchlength;
*bptr |= branchlength; /* branchlength never more than 65535 */
bptr = *pptrptr;
}
-while (*bptr == META_ALT);
+while (META_CODE(*bptr) == META_ALT);
+/* If any branch is of variable length, the whole lookbehind is of variable
+length. If the maximum length of any branch exceeds the maximum for variable
+lookbehinds, give an error. Otherwise, the minimum length is set in the word
+that follows the original group META value. For a fixed-length lookbehind, this
+is set to LOOKBEHIND_MAX, to indicate that each branch is of a fixed (but
+possibly different) length. */
+
+if (variable)
+ {
+ gbptr[1] = minlength;
+ if ((uint32_t)maxlength > cb->max_varlookbehind)
+ {
+ *errcodeptr = ERR100;
+ cb->erroroffset = offset;
+ return FALSE;
+ }
+ }
+else gbptr[1] = LOOKBEHIND_MAX;
+
+
+gbptr[1] = variable? minlength : LOOKBEHIND_MAX;
return TRUE;
}
@@ -9703,7 +10065,6 @@ for (; *pptr != META_END; pptr++)
break;
case META_BIGVALUE:
- case META_OPTIONS:
case META_POSIX:
case META_POSIX_NEG:
pptr += 1;
@@ -9712,6 +10073,7 @@ for (; *pptr != META_END; pptr++)
case META_MINMAX:
case META_MINMAX_QUERY:
case META_MINMAX_PLUS:
+ case META_OPTIONS:
pptr += 2;
break;
@@ -9820,12 +10182,15 @@ if (errorptr == NULL || erroroffset == NULL) return NULL;
*errorptr = ERR0;
*erroroffset = 0;
-/* There must be a pattern! */
+/* There must be a pattern, but NULL is allowed with zero length. */
if (pattern == NULL)
{
- *errorptr = ERR16;
- return NULL;
+ if (patlen == 0) pattern = (PCRE2_SPTR)""; else
+ {
+ *errorptr = ERR16;
+ return NULL;
+ }
}
/* A NULL compile context means "use a default context" */
@@ -9890,13 +10255,13 @@ cb.external_options = options;
cb.groupinfo = stack_groupinfo;
cb.had_recurse = FALSE;
cb.lastcapture = 0;
-cb.max_lookbehind = 0;
+cb.max_lookbehind = 0; /* Max encountered */
+cb.max_varlookbehind = ccontext->max_varlookbehind; /* Limit */
cb.name_entry_size = 0;
cb.name_table = NULL;
cb.named_groups = named_groups;
cb.named_group_list_size = NAMED_GROUP_LIST_SIZE;
cb.names_found = 0;
-cb.open_caps = NULL;
cb.parens_depth = 0;
cb.parsed_pattern = stack_parsed_pattern;
cb.req_varyopt = 0;
@@ -9949,7 +10314,7 @@ if ((options & PCRE2_LITERAL) == 0)
for (i = 0; i < sizeof(pso_list)/sizeof(pso); i++)
{
uint32_t c, pp;
- pso *p = pso_list + i;
+ const pso *p = pso_list + i;
if (patlen - skipatstart - 2 >= p->length &&
PRIV(strncmp_c8)(ptr + skipatstart + 2, (char *)(p->name),
@@ -10158,39 +10523,36 @@ cb.parsed_pattern_end = cb.parsed_pattern + parsed_size_needed + 1;
errorcode = parse_regex(ptr, cb.external_options, &has_lookbehind, &cb);
if (errorcode != 0) goto HAD_CB_ERROR;
-/* Workspace is needed to remember information about numbered groups: whether a
-group can match an empty string and what its fixed length is. This is done to
-avoid the possibility of recursive references causing very long compile times
-when checking these features. Unnumbered groups do not have this exposure since
-they cannot be referenced. We use an indexed vector for this purpose. If there
-are sufficiently few groups, the default vector on the stack, as set up above,
-can be used. Otherwise we have to get/free a special vector. The vector must be
-initialized to zero. */
-
-if (cb.bracount >= GROUPINFO_DEFAULT_SIZE)
- {
- cb.groupinfo = ccontext->memctl.malloc(
- (cb.bracount + 1)*sizeof(uint32_t), ccontext->memctl.memory_data);
- if (cb.groupinfo == NULL)
- {
- errorcode = ERR21;
- cb.erroroffset = 0;
- goto HAD_CB_ERROR;
- }
- }
-memset(cb.groupinfo, 0, (cb.bracount + 1) * sizeof(uint32_t));
-
-/* If there were any lookbehinds, scan the parsed pattern to figure out their
-lengths. */
+/* If there are any lookbehinds, scan the parsed pattern to figure out their
+lengths. Workspace is needed to remember whether numbered groups are or are not
+of limited length, and if limited, what the minimum and maximum lengths are.
+This caching saves re-computing the length of any group that is referenced more
+than once, which is particularly relevant when recursion is involved.
+Unnumbered groups do not have this exposure because they cannot be referenced.
+If there are sufficiently few groups, the default index vector on the stack, as
+set up above, can be used. Otherwise we have to get/free some heap memory. The
+vector must be initialized to zero. */
if (has_lookbehind)
{
int loopcount = 0;
+ if (cb.bracount >= GROUPINFO_DEFAULT_SIZE/2)
+ {
+ cb.groupinfo = ccontext->memctl.malloc(
+ (2 * (cb.bracount + 1))*sizeof(uint32_t), ccontext->memctl.memory_data);
+ if (cb.groupinfo == NULL)
+ {
+ errorcode = ERR21;
+ cb.erroroffset = 0;
+ goto HAD_CB_ERROR;
+ }
+ }
+ memset(cb.groupinfo, 0, (2 * cb.bracount + 1) * sizeof(uint32_t));
errorcode = check_lookbehinds(cb.parsed_pattern, NULL, NULL, &cb, &loopcount);
if (errorcode != 0) goto HAD_CB_ERROR;
}
-/* For debugging, there is a function that shows the parsed data vector. */
+/* For debugging, there is a function that shows the parsed pattern vector. */
#ifdef DEBUG_SHOW_PARSED
fprintf(stderr, "+++ Pre-scan complete:\n");
@@ -10227,8 +10589,9 @@ pptr = cb.parsed_pattern;
code = cworkspace;
*code = OP_BRA;
-(void)compile_regex(cb.external_options, &code, &pptr, &errorcode, 0, &firstcu,
- &firstcuflags, &reqcu, &reqcuflags, NULL, &cb, &length);
+(void)compile_regex(cb.external_options, ccontext->extra_options, &code, &pptr,
+ &errorcode, 0, &firstcu, &firstcuflags, &reqcu, &reqcuflags, NULL, NULL,
+ &cb, &length);
if (errorcode != 0) goto HAD_CB_ERROR; /* Offset is in cb.erroroffset */
@@ -10240,14 +10603,21 @@ if (length > MAX_PATTERN_SIZE)
goto HAD_CB_ERROR;
}
-/* Compute the size of, and then get and initialize, the data block for storing
-the compiled pattern and names table. Integer overflow should no longer be
-possible because nowadays we limit the maximum value of cb.names_found and
-cb.name_entry_size. */
+/* Compute the size of, then, if not too large, get and initialize the data
+block for storing the compiled pattern and names table. Integer overflow should
+no longer be possible because nowadays we limit the maximum value of
+cb.names_found and cb.name_entry_size. */
re_blocksize = sizeof(pcre2_real_code) +
CU2BYTES(length +
(PCRE2_SIZE)cb.names_found * (PCRE2_SIZE)cb.name_entry_size);
+
+if (re_blocksize > ccontext->max_pattern_compiled_length)
+ {
+ errorcode = ERR101;
+ goto HAD_CB_ERROR;
+ }
+
re = (pcre2_real_code *)
ccontext->memctl.malloc(re_blocksize, ccontext->memctl.memory_data);
if (re == NULL)
@@ -10306,7 +10676,6 @@ cb.start_code = codestart;
cb.req_varyopt = 0;
cb.had_accept = FALSE;
cb.had_pruneorskip = FALSE;
-cb.open_caps = NULL;
/* If any named groups were found, create the name/number table from the list
created in the pre-pass. */
@@ -10325,8 +10694,9 @@ of the function here. */
pptr = cb.parsed_pattern;
code = (PCRE2_UCHAR *)codestart;
*code = OP_BRA;
-regexrc = compile_regex(re->overall_options, &code, &pptr, &errorcode, 0,
- &firstcu, &firstcuflags, &reqcu, &reqcuflags, NULL, &cb, NULL);
+regexrc = compile_regex(re->overall_options, ccontext->extra_options, &code,
+ &pptr, &errorcode, 0, &firstcu, &firstcuflags, &reqcu, &reqcuflags, NULL,
+ NULL, &cb, NULL);
if (regexrc < 0) re->flags |= PCRE2_MATCH_EMPTY;
re->top_bracket = cb.bracount;
re->top_backref = cb.top_backref;
diff --git a/src/3rdparty/pcre2/src/pcre2_context.c b/src/3rdparty/pcre2/src/pcre2_context.c
index 8e05ede50c..9edbd1b2ae 100644
--- a/src/3rdparty/pcre2/src/pcre2_context.c
+++ b/src/3rdparty/pcre2/src/pcre2_context.c
@@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
- New API code Copyright (c) 2016-2022 University of Cambridge
+ New API code Copyright (c) 2016-2024 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -136,10 +136,13 @@ const pcre2_compile_context PRIV(default_compile_context) = {
NULL, /* Stack guard data */
PRIV(default_tables), /* Character tables */
PCRE2_UNSET, /* Max pattern length */
+ PCRE2_UNSET, /* Max pattern compiled length */
BSR_DEFAULT, /* Backslash R default */
NEWLINE_DEFAULT, /* Newline convention */
PARENS_NEST_LIMIT, /* As it says */
- 0 }; /* Extra options */
+ 0, /* Extra options */
+ MAX_VARLOOKBEHIND /* As it says */
+ };
/* The create function copies the default into the new memory, but must
override the default memory handling functions if a gcontext was provided. */
@@ -228,49 +231,48 @@ return ccontext;
PCRE2_EXP_DEFN pcre2_general_context * PCRE2_CALL_CONVENTION
pcre2_general_context_copy(pcre2_general_context *gcontext)
{
-pcre2_general_context *new =
+pcre2_general_context *newcontext =
gcontext->memctl.malloc(sizeof(pcre2_real_general_context),
gcontext->memctl.memory_data);
-if (new == NULL) return NULL;
-memcpy(new, gcontext, sizeof(pcre2_real_general_context));
-return new;
+if (newcontext == NULL) return NULL;
+memcpy(newcontext, gcontext, sizeof(pcre2_real_general_context));
+return newcontext;
}
PCRE2_EXP_DEFN pcre2_compile_context * PCRE2_CALL_CONVENTION
pcre2_compile_context_copy(pcre2_compile_context *ccontext)
{
-pcre2_compile_context *new =
+pcre2_compile_context *newcontext =
ccontext->memctl.malloc(sizeof(pcre2_real_compile_context),
ccontext->memctl.memory_data);
-if (new == NULL) return NULL;
-memcpy(new, ccontext, sizeof(pcre2_real_compile_context));
-return new;
+if (newcontext == NULL) return NULL;
+memcpy(newcontext, ccontext, sizeof(pcre2_real_compile_context));
+return newcontext;
}
PCRE2_EXP_DEFN pcre2_match_context * PCRE2_CALL_CONVENTION
pcre2_match_context_copy(pcre2_match_context *mcontext)
{
-pcre2_match_context *new =
+pcre2_match_context *newcontext =
mcontext->memctl.malloc(sizeof(pcre2_real_match_context),
mcontext->memctl.memory_data);
-if (new == NULL) return NULL;
-memcpy(new, mcontext, sizeof(pcre2_real_match_context));
-return new;
+if (newcontext == NULL) return NULL;
+memcpy(newcontext, mcontext, sizeof(pcre2_real_match_context));
+return newcontext;
}
-
PCRE2_EXP_DEFN pcre2_convert_context * PCRE2_CALL_CONVENTION
pcre2_convert_context_copy(pcre2_convert_context *ccontext)
{
-pcre2_convert_context *new =
+pcre2_convert_context *newcontext =
ccontext->memctl.malloc(sizeof(pcre2_real_convert_context),
ccontext->memctl.memory_data);
-if (new == NULL) return NULL;
-memcpy(new, ccontext, sizeof(pcre2_real_convert_context));
-return new;
+if (newcontext == NULL) return NULL;
+memcpy(newcontext, ccontext, sizeof(pcre2_real_convert_context));
+return newcontext;
}
@@ -352,6 +354,13 @@ return 0;
}
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
+pcre2_set_max_pattern_compiled_length(pcre2_compile_context *ccontext, PCRE2_SIZE length)
+{
+ccontext->max_pattern_compiled_length = length;
+return 0;
+}
+
+PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
pcre2_set_newline(pcre2_compile_context *ccontext, uint32_t newline)
{
switch(newline)
@@ -371,6 +380,13 @@ switch(newline)
}
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
+pcre2_set_max_varlookbehind(pcre2_compile_context *ccontext, uint32_t limit)
+{
+ccontext->max_varlookbehind = limit;
+return 0;
+}
+
+PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
pcre2_set_parens_nest_limit(pcre2_compile_context *ccontext, uint32_t limit)
{
ccontext->parens_nest_limit = limit;
diff --git a/src/3rdparty/pcre2/src/pcre2_dfa_match.c b/src/3rdparty/pcre2/src/pcre2_dfa_match.c
index b16e594cc0..caae65248f 100644
--- a/src/3rdparty/pcre2/src/pcre2_dfa_match.c
+++ b/src/3rdparty/pcre2/src/pcre2_dfa_match.c
@@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
- New API code Copyright (c) 2016-2022 University of Cambridge
+ New API code Copyright (c) 2016-2023 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -168,7 +168,7 @@ static const uint8_t coptable[] = {
0, /* KetRmax */
0, /* KetRmin */
0, /* KetRpos */
- 0, /* Reverse */
+ 0, 0, /* Reverse, Vreverse */
0, /* Assert */
0, /* Assert not */
0, /* Assert behind */
@@ -187,7 +187,8 @@ static const uint8_t coptable[] = {
0, 0, 0, 0, /* SKIP, SKIP_ARG, THEN, THEN_ARG */
0, 0, /* COMMIT, COMMIT_ARG */
0, 0, 0, /* FAIL, ACCEPT, ASSERT_ACCEPT */
- 0, 0, 0 /* CLOSE, SKIPZERO, DEFINE */
+ 0, 0, 0, /* CLOSE, SKIPZERO, DEFINE */
+ 0, 0 /* \B and \b in UCP mode */
};
/* This table identifies those opcodes that inspect a character. It is used to
@@ -245,7 +246,7 @@ static const uint8_t poptable[] = {
0, /* KetRmax */
0, /* KetRmin */
0, /* KetRpos */
- 0, /* Reverse */
+ 0, 0, /* Reverse, Vreverse */
0, /* Assert */
0, /* Assert not */
0, /* Assert behind */
@@ -264,7 +265,8 @@ static const uint8_t poptable[] = {
0, 0, 0, 0, /* SKIP, SKIP_ARG, THEN, THEN_ARG */
0, 0, /* COMMIT, COMMIT_ARG */
0, 0, 0, /* FAIL, ACCEPT, ASSERT_ACCEPT */
- 0, 0, 0 /* CLOSE, SKIPZERO, DEFINE */
+ 0, 0, 0, /* CLOSE, SKIPZERO, DEFINE */
+ 1, 1 /* \B and \b in UCP mode */
};
/* These 2 tables allow for compact code for testing for \D, \d, \S, \s, \W,
@@ -426,7 +428,7 @@ overflow. */
else
{
- uint32_t newsize = (rws->size >= UINT32_MAX/2)? UINT32_MAX/2 : rws->size * 2;
+ uint32_t newsize = (rws->size >= UINT32_MAX/(sizeof(int)*2))? UINT32_MAX/sizeof(int) : rws->size * 2;
uint32_t newsizeK = newsize/(1024/sizeof(int));
if (newsizeK + mb->heap_used > mb->heap_limit)
@@ -589,7 +591,7 @@ if (*this_start_code == OP_ASSERTBACK || *this_start_code == OP_ASSERTBACK_NOT)
end_code = this_start_code;
do
{
- size_t back = (size_t)GET(end_code, 2+LINK_SIZE);
+ size_t back = (size_t)GET2(end_code, 2+LINK_SIZE);
if (back > max_back) max_back = back;
end_code += GET(end_code, 1);
}
@@ -633,8 +635,8 @@ if (*this_start_code == OP_ASSERTBACK || *this_start_code == OP_ASSERTBACK_NOT)
end_code = this_start_code;
do
{
- uint32_t revlen = (end_code[1+LINK_SIZE] == OP_REVERSE)? 1 + LINK_SIZE : 0;
- size_t back = (revlen == 0)? 0 : (size_t)GET(end_code, 2+LINK_SIZE);
+ uint32_t revlen = (end_code[1+LINK_SIZE] == OP_REVERSE)? 1 + IMM2_SIZE : 0;
+ size_t back = (revlen == 0)? 0 : (size_t)GET2(end_code, 2+LINK_SIZE);
if (back <= gone_back)
{
int bstate = (int)(end_code - start_code + 1 + LINK_SIZE + revlen);
@@ -1100,6 +1102,8 @@ for (;;)
/*-----------------------------------------------------------------*/
case OP_WORD_BOUNDARY:
case OP_NOT_WORD_BOUNDARY:
+ case OP_NOT_UCP_WORD_BOUNDARY:
+ case OP_UCP_WORD_BOUNDARY:
{
int left_word, right_word;
@@ -1112,13 +1116,13 @@ for (;;)
#endif
GETCHARTEST(d, temp);
#ifdef SUPPORT_UNICODE
- if ((mb->poptions & PCRE2_UCP) != 0)
+ if (codevalue == OP_UCP_WORD_BOUNDARY ||
+ codevalue == OP_NOT_UCP_WORD_BOUNDARY)
{
- if (d == '_') left_word = TRUE; else
- {
- uint32_t cat = UCD_CATEGORY(d);
- left_word = (cat == ucp_L || cat == ucp_N);
- }
+ int chartype = UCD_CHARTYPE(d);
+ int category = PRIV(ucp_gentype)[chartype];
+ left_word = (category == ucp_L || category == ucp_N ||
+ chartype == ucp_Mn || chartype == ucp_Pc);
}
else
#endif
@@ -1137,13 +1141,13 @@ for (;;)
mb->last_used_ptr = temp;
}
#ifdef SUPPORT_UNICODE
- if ((mb->poptions & PCRE2_UCP) != 0)
+ if (codevalue == OP_UCP_WORD_BOUNDARY ||
+ codevalue == OP_NOT_UCP_WORD_BOUNDARY)
{
- if (c == '_') right_word = TRUE; else
- {
- uint32_t cat = UCD_CATEGORY(c);
- right_word = (cat == ucp_L || cat == ucp_N);
- }
+ int chartype = UCD_CHARTYPE(c);
+ int category = PRIV(ucp_gentype)[chartype];
+ right_word = (category == ucp_L || category == ucp_N ||
+ chartype == ucp_Mn || chartype == ucp_Pc);
}
else
#endif
@@ -1151,7 +1155,9 @@ for (;;)
}
else right_word = FALSE;
- if ((left_word == right_word) == (codevalue == OP_NOT_WORD_BOUNDARY))
+ if ((left_word == right_word) ==
+ (codevalue == OP_NOT_WORD_BOUNDARY ||
+ codevalue == OP_NOT_UCP_WORD_BOUNDARY))
{ ADD_ACTIVE(state_offset + 1, 0); }
}
break;
@@ -1168,6 +1174,7 @@ for (;;)
if (clen > 0)
{
BOOL OK;
+ int chartype;
const uint32_t *cp;
const ucd_record * prop = GET_UCD(c);
switch(code[1])
@@ -1177,8 +1184,9 @@ for (;;)
break;
case PT_LAMP:
- OK = prop->chartype == ucp_Lu || prop->chartype == ucp_Ll ||
- prop->chartype == ucp_Lt;
+ chartype = prop->chartype;
+ OK = chartype == ucp_Lu || chartype == ucp_Ll ||
+ chartype == ucp_Lt;
break;
case PT_GC:
@@ -1201,8 +1209,9 @@ for (;;)
/* These are specials for combination cases. */
case PT_ALNUM:
- OK = PRIV(ucp_gentype)[prop->chartype] == ucp_L ||
- PRIV(ucp_gentype)[prop->chartype] == ucp_N;
+ chartype = prop->chartype;
+ OK = PRIV(ucp_gentype)[chartype] == ucp_L ||
+ PRIV(ucp_gentype)[chartype] == ucp_N;
break;
/* Perl space used to exclude VT, but from Perl 5.18 it is included,
@@ -1225,12 +1234,20 @@ for (;;)
break;
case PT_WORD:
- OK = PRIV(ucp_gentype)[prop->chartype] == ucp_L ||
- PRIV(ucp_gentype)[prop->chartype] == ucp_N ||
- c == CHAR_UNDERSCORE;
+ chartype = prop->chartype;
+ OK = PRIV(ucp_gentype)[chartype] == ucp_L ||
+ PRIV(ucp_gentype)[chartype] == ucp_N ||
+ chartype == ucp_Mn || chartype == ucp_Pc;
break;
case PT_CLIST:
+#if PCRE2_CODE_UNIT_WIDTH == 32
+ if (c > MAX_UTF_CODE_POINT)
+ {
+ OK = FALSE;
+ break;
+ }
+#endif
cp = PRIV(ucd_caseless_sets) + code[2];
for (;;)
{
@@ -1440,6 +1457,7 @@ for (;;)
if (clen > 0)
{
BOOL OK;
+ int chartype;
const uint32_t *cp;
const ucd_record * prop = GET_UCD(c);
switch(code[2])
@@ -1449,8 +1467,8 @@ for (;;)
break;
case PT_LAMP:
- OK = prop->chartype == ucp_Lu || prop->chartype == ucp_Ll ||
- prop->chartype == ucp_Lt;
+ chartype = prop->chartype;
+ OK = chartype == ucp_Lu || chartype == ucp_Ll || chartype == ucp_Lt;
break;
case PT_GC:
@@ -1473,8 +1491,9 @@ for (;;)
/* These are specials for combination cases. */
case PT_ALNUM:
- OK = PRIV(ucp_gentype)[prop->chartype] == ucp_L ||
- PRIV(ucp_gentype)[prop->chartype] == ucp_N;
+ chartype = prop->chartype;
+ OK = PRIV(ucp_gentype)[chartype] == ucp_L ||
+ PRIV(ucp_gentype)[chartype] == ucp_N;
break;
/* Perl space used to exclude VT, but from Perl 5.18 it is included,
@@ -1497,12 +1516,20 @@ for (;;)
break;
case PT_WORD:
- OK = PRIV(ucp_gentype)[prop->chartype] == ucp_L ||
- PRIV(ucp_gentype)[prop->chartype] == ucp_N ||
- c == CHAR_UNDERSCORE;
+ chartype = prop->chartype;
+ OK = PRIV(ucp_gentype)[chartype] == ucp_L ||
+ PRIV(ucp_gentype)[chartype] == ucp_N ||
+ chartype == ucp_Mn || chartype == ucp_Pc;
break;
case PT_CLIST:
+#if PCRE2_CODE_UNIT_WIDTH == 32
+ if (c > MAX_UTF_CODE_POINT)
+ {
+ OK = FALSE;
+ break;
+ }
+#endif
cp = PRIV(ucd_caseless_sets) + code[3];
for (;;)
{
@@ -1695,6 +1722,7 @@ for (;;)
if (clen > 0)
{
BOOL OK;
+ int chartype;
const uint32_t *cp;
const ucd_record * prop = GET_UCD(c);
switch(code[2])
@@ -1704,8 +1732,8 @@ for (;;)
break;
case PT_LAMP:
- OK = prop->chartype == ucp_Lu || prop->chartype == ucp_Ll ||
- prop->chartype == ucp_Lt;
+ chartype = prop->chartype;
+ OK = chartype == ucp_Lu || chartype == ucp_Ll || chartype == ucp_Lt;
break;
case PT_GC:
@@ -1728,8 +1756,9 @@ for (;;)
/* These are specials for combination cases. */
case PT_ALNUM:
- OK = PRIV(ucp_gentype)[prop->chartype] == ucp_L ||
- PRIV(ucp_gentype)[prop->chartype] == ucp_N;
+ chartype = prop->chartype;
+ OK = PRIV(ucp_gentype)[chartype] == ucp_L ||
+ PRIV(ucp_gentype)[chartype] == ucp_N;
break;
/* Perl space used to exclude VT, but from Perl 5.18 it is included,
@@ -1752,12 +1781,20 @@ for (;;)
break;
case PT_WORD:
- OK = PRIV(ucp_gentype)[prop->chartype] == ucp_L ||
- PRIV(ucp_gentype)[prop->chartype] == ucp_N ||
- c == CHAR_UNDERSCORE;
+ chartype = prop->chartype;
+ OK = PRIV(ucp_gentype)[chartype] == ucp_L ||
+ PRIV(ucp_gentype)[chartype] == ucp_N ||
+ chartype == ucp_Mn || chartype == ucp_Pc;
break;
case PT_CLIST:
+#if PCRE2_CODE_UNIT_WIDTH == 32
+ if (c > MAX_UTF_CODE_POINT)
+ {
+ OK = FALSE;
+ break;
+ }
+#endif
cp = PRIV(ucd_caseless_sets) + code[3];
for (;;)
{
@@ -1975,6 +2012,7 @@ for (;;)
if (clen > 0)
{
BOOL OK;
+ int chartype;
const uint32_t *cp;
const ucd_record * prop = GET_UCD(c);
switch(code[1 + IMM2_SIZE + 1])
@@ -1984,8 +2022,8 @@ for (;;)
break;
case PT_LAMP:
- OK = prop->chartype == ucp_Lu || prop->chartype == ucp_Ll ||
- prop->chartype == ucp_Lt;
+ chartype = prop->chartype;
+ OK = chartype == ucp_Lu || chartype == ucp_Ll || chartype == ucp_Lt;
break;
case PT_GC:
@@ -2009,8 +2047,9 @@ for (;;)
/* These are specials for combination cases. */
case PT_ALNUM:
- OK = PRIV(ucp_gentype)[prop->chartype] == ucp_L ||
- PRIV(ucp_gentype)[prop->chartype] == ucp_N;
+ chartype = prop->chartype;
+ OK = PRIV(ucp_gentype)[chartype] == ucp_L ||
+ PRIV(ucp_gentype)[chartype] == ucp_N;
break;
/* Perl space used to exclude VT, but from Perl 5.18 it is included,
@@ -2033,12 +2072,20 @@ for (;;)
break;
case PT_WORD:
- OK = PRIV(ucp_gentype)[prop->chartype] == ucp_L ||
- PRIV(ucp_gentype)[prop->chartype] == ucp_N ||
- c == CHAR_UNDERSCORE;
+ chartype = prop->chartype;
+ OK = PRIV(ucp_gentype)[chartype] == ucp_L ||
+ PRIV(ucp_gentype)[chartype] == ucp_N ||
+ chartype == ucp_Mn || chartype == ucp_Pc;
break;
case PT_CLIST:
+#if PCRE2_CODE_UNIT_WIDTH == 32
+ if (c > MAX_UTF_CODE_POINT)
+ {
+ OK = FALSE;
+ break;
+ }
+#endif
cp = PRIV(ucd_caseless_sets) + code[1 + IMM2_SIZE + 2];
for (;;)
{
@@ -2894,7 +2941,6 @@ for (;;)
int *local_workspace;
PCRE2_SIZE *local_offsets;
RWS_anchor *rws = (RWS_anchor *)RWS;
- dfa_recursion_info *ri;
PCRE2_SPTR callpat = start_code + GET(code, 1);
uint32_t recno = (callpat == mb->start_code)? 0 :
GET2(callpat, 1 + LINK_SIZE);
@@ -2911,18 +2957,24 @@ for (;;)
rws->free -= RWS_RSIZE + RWS_OVEC_RSIZE;
/* Check for repeating a recursion without advancing the subject
- pointer. This should catch convoluted mutual recursions. (Some simple
- cases are caught at compile time.) */
+ pointer or last used character. This should catch convoluted mutual
+ recursions. (Some simple cases are caught at compile time.) */
- for (ri = mb->recursive; ri != NULL; ri = ri->prevrec)
- if (recno == ri->group_num && ptr == ri->subject_position)
+ for (dfa_recursion_info *ri = mb->recursive;
+ ri != NULL;
+ ri = ri->prevrec)
+ {
+ if (recno == ri->group_num && ptr == ri->subject_position &&
+ mb->last_used_ptr == ri->last_used_ptr)
return PCRE2_ERROR_RECURSELOOP;
+ }
/* Remember this recursion and where we started it so as to
catch infinite loops. */
new_recursive.group_num = recno;
new_recursive.subject_position = ptr;
+ new_recursive.last_used_ptr = mb->last_used_ptr;
new_recursive.prevrec = mb->recursive;
mb->recursive = &new_recursive;
@@ -3424,7 +3476,7 @@ anchored = (options & (PCRE2_ANCHORED|PCRE2_DFA_RESTART)) != 0 ||
where to start. */
startline = (re->flags & PCRE2_STARTLINE) != 0;
-firstline = (re->overall_options & PCRE2_FIRSTLINE) != 0;
+firstline = !anchored && (re->overall_options & PCRE2_FIRSTLINE) != 0;
bumpalong_limit = end_subject;
/* Initialize and set up the fixed fields in the callout block, with a pointer
@@ -3994,8 +4046,9 @@ for (;;)
match_data->ovector[0] = (PCRE2_SIZE)(start_match - subject);
match_data->ovector[1] = (PCRE2_SIZE)(end_subject - subject);
}
+ match_data->subject_length = length;
match_data->leftchar = (PCRE2_SIZE)(mb->start_used_ptr - subject);
- match_data->rightchar = (PCRE2_SIZE)( mb->last_used_ptr - subject);
+ match_data->rightchar = (PCRE2_SIZE)(mb->last_used_ptr - subject);
match_data->startchar = (PCRE2_SIZE)(start_match - subject);
match_data->rc = rc;
diff --git a/src/3rdparty/pcre2/src/pcre2_error.c b/src/3rdparty/pcre2/src/pcre2_error.c
index 09904c03e3..7fa997aa95 100644
--- a/src/3rdparty/pcre2/src/pcre2_error.c
+++ b/src/3rdparty/pcre2/src/pcre2_error.c
@@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
- New API code Copyright (c) 2016-2021 University of Cambridge
+ New API code Copyright (c) 2016-2024 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -82,7 +82,7 @@ static const unsigned char compile_error_texts[] =
"missing closing parenthesis\0"
/* 15 */
"reference to non-existent subpattern\0"
- "pattern passed as NULL\0"
+ "pattern passed as NULL with non-zero length\0"
"unrecognised compile-time option bit(s)\0"
"missing ) after (?# comment\0"
"parentheses are too deeply nested\0"
@@ -93,7 +93,7 @@ static const unsigned char compile_error_texts[] =
"internal error: code overflow\0"
"missing closing parenthesis for condition\0"
/* 25 */
- "lookbehind assertion is not fixed length\0"
+ "length of lookbehind assertion is not limited\0"
"a relative value of zero is not allowed\0"
"conditional subpattern contains more than two branches\0"
"assertion expected after (?( or (?(?C)\0"
@@ -187,6 +187,9 @@ static const unsigned char compile_error_texts[] =
"too many capturing groups (maximum 65535)\0"
"atomic assertion expected after (?( or (?(?C)\0"
"\\K is not allowed in lookarounds (but see PCRE2_EXTRA_ALLOW_LOOKAROUND_BSK)\0"
+ /* 100 */
+ "branch too long in variable-length lookbehind assertion\0"
+ "compiled pattern would be longer than the limit set by the application\0"
;
/* Match-time and UTF error texts are in the same format. */
@@ -272,6 +275,7 @@ static const unsigned char match_error_texts[] =
/* 65 */
"internal error - duplicate substitution match\0"
"PCRE2_MATCH_INVALID_UTF is not supported for DFA matching\0"
+ "INTERNAL ERROR: invalid substring offset\0"
;
diff --git a/src/3rdparty/pcre2/src/pcre2_extuni.c b/src/3rdparty/pcre2/src/pcre2_extuni.c
index b23946b0d1..4ed9f00c55 100644
--- a/src/3rdparty/pcre2/src/pcre2_extuni.c
+++ b/src/3rdparty/pcre2/src/pcre2_extuni.c
@@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
- New API code Copyright (c) 2016-2021 University of Cambridge
+ New API code Copyright (c) 2016-2024 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -75,7 +75,11 @@ return NULL;
* Match an extended grapheme sequence *
*************************************************/
-/*
+/* NOTE: The logic contained in this function is replicated in three special-
+purpose functions in the pcre2_jit_compile.c module. If the logic below is
+changed, they must be kept in step so that the interpreter and the JIT have the
+same behaviour.
+
Arguments:
c the first character
eptr pointer to next character
@@ -92,6 +96,7 @@ PCRE2_SPTR
PRIV(extuni)(uint32_t c, PCRE2_SPTR eptr, PCRE2_SPTR start_subject,
PCRE2_SPTR end_subject, BOOL utf, int *xcount)
{
+BOOL was_ep_ZWJ = FALSE;
int lgb = UCD_GRAPHBREAK(c);
while (eptr < end_subject)
@@ -102,6 +107,12 @@ while (eptr < end_subject)
rgb = UCD_GRAPHBREAK(c);
if ((PRIV(ucp_gbtable)[lgb] & (1u << rgb)) == 0) break;
+ /* ZWJ followed by Extended Pictographic is allowed only if the ZWJ was
+ preceded by Extended Pictographic. */
+
+ if (lgb == ucp_gbZWJ && rgb == ucp_gbExtended_Pictographic && !was_ep_ZWJ)
+ break;
+
/* Not breaking between Regional Indicators is allowed only if there
are an even number of preceding RIs. */
@@ -129,12 +140,15 @@ while (eptr < end_subject)
if ((ricount & 1) != 0) break; /* Grapheme break required */
}
- /* If Extend or ZWJ follows Extended_Pictographic, do not update lgb; this
- allows any number of them before a following Extended_Pictographic. */
+ /* Set a flag when ZWJ follows Extended Pictographic (with optional Extend in
+ between; see next statement). */
+
+ was_ep_ZWJ = (lgb == ucp_gbExtended_Pictographic && rgb == ucp_gbZWJ);
+
+ /* If Extend follows Extended_Pictographic, do not update lgb; this allows
+ any number of them before a following ZWJ. */
- if ((rgb != ucp_gbExtend && rgb != ucp_gbZWJ) ||
- lgb != ucp_gbExtended_Pictographic)
- lgb = rgb;
+ if (rgb != ucp_gbExtend || lgb != ucp_gbExtended_Pictographic) lgb = rgb;
eptr += len;
if (xcount != NULL) *xcount += 1;
diff --git a/src/3rdparty/pcre2/src/pcre2_find_bracket.c b/src/3rdparty/pcre2/src/pcre2_find_bracket.c
index 70baa1394f..1290c5e9de 100644
--- a/src/3rdparty/pcre2/src/pcre2_find_bracket.c
+++ b/src/3rdparty/pcre2/src/pcre2_find_bracket.c
@@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
- New API code Copyright (c) 2016-2018 University of Cambridge
+ New API code Copyright (c) 2016-2023 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -41,9 +41,9 @@ POSSIBILITY OF SUCH DAMAGE.
/* This module contains a single function that scans through a compiled pattern
until it finds a capturing bracket with the given number, or, if the number is
-negative, an instance of OP_REVERSE for a lookbehind. The function is called
-from pcre2_compile.c and also from pcre2_study.c when finding the minimum
-matching length. */
+negative, an instance of OP_REVERSE or OP_VREVERSE for a lookbehind. The
+function is called from pcre2_compile.c and also from pcre2_study.c when
+finding the minimum matching length. */
#ifdef HAVE_CONFIG_H
@@ -85,7 +85,7 @@ for (;;)
/* Handle lookbehind */
- else if (c == OP_REVERSE)
+ else if (c == OP_REVERSE || c == OP_VREVERSE)
{
if (number < 0) return (PCRE2_UCHAR *)code;
code += PRIV(OP_lengths)[c];
diff --git a/src/3rdparty/pcre2/src/pcre2_internal.h b/src/3rdparty/pcre2/src/pcre2_internal.h
index 92dd3138d4..e5808182e6 100644
--- a/src/3rdparty/pcre2/src/pcre2_internal.h
+++ b/src/3rdparty/pcre2/src/pcre2_internal.h
@@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
- New API code Copyright (c) 2016-2022 University of Cambridge
+ New API code Copyright (c) 2016-2023 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -51,6 +51,24 @@ pcre2test.c with CODE_UNIT_WIDTH == 0. */
#error The use of both EBCDIC and SUPPORT_UNICODE is not supported.
#endif
+/* When compiling one of the libraries, the value of PCRE2_CODE_UNIT_WIDTH must
+be 8, 16, or 32. AutoTools and CMake ensure that this is always the case, but
+other other building methods may not, so here is a check. It is cut out when
+building pcre2test, bcause that sets the value to zero. No other source should
+be including this file. There is no explicit way of forcing a compile to be
+abandoned, but trying to include a non-existent file seems cleanest. Otherwise
+there will be many irrelevant consequential errors. */
+
+#if (!defined PCRE2_BUILDING_PCRE2TEST && !defined PCRE2_DFTABLES) && \
+ (!defined PCRE2_CODE_UNIT_WIDTH || \
+ (PCRE2_CODE_UNIT_WIDTH != 8 && \
+ PCRE2_CODE_UNIT_WIDTH != 16 && \
+ PCRE2_CODE_UNIT_WIDTH != 32))
+#error PCRE2_CODE_UNIT_WIDTH must be defined as 8, 16, or 32.
+#include <AbandonCompile>
+#endif
+
+
/* Standard C headers */
#include <ctype.h>
@@ -119,20 +137,20 @@ only if it is not already set. */
#ifndef PCRE2_EXP_DECL
# ifdef _WIN32
# ifndef PCRE2_STATIC
-# define PCRE2_EXP_DECL extern __declspec(dllexport)
-# define PCRE2_EXP_DEFN __declspec(dllexport)
+# define PCRE2_EXP_DECL extern __declspec(dllexport)
+# define PCRE2_EXP_DEFN __declspec(dllexport)
# else
-# define PCRE2_EXP_DECL extern
+# define PCRE2_EXP_DECL extern PCRE2_EXPORT
# define PCRE2_EXP_DEFN
# endif
# else
# ifdef __cplusplus
-# define PCRE2_EXP_DECL extern "C"
+# define PCRE2_EXP_DECL extern "C" PCRE2_EXPORT
# else
-# define PCRE2_EXP_DECL extern
+# define PCRE2_EXP_DECL extern PCRE2_EXPORT
# endif
# ifndef PCRE2_EXP_DEFN
-# define PCRE2_EXP_DEFN PCRE2_EXP_DECL
+# define PCRE2_EXP_DEFN PCRE2_EXP_DECL
# endif
# endif
#endif
@@ -156,8 +174,8 @@ pcre2_match() because of the way it backtracks. */
#define PCRE2_SPTR CUSTOM_SUBJECT_PTR
#endif
-/* When checking for integer overflow in pcre2_compile(), we need to handle
-large integers. If a 64-bit integer type is available, we can use that.
+/* When checking for integer overflow, we need to handle large integers.
+If a 64-bit integer type is available, we can use that.
Otherwise we have to cast to double, which of course requires floating point
arithmetic. Handle this by defining a macro for the appropriate type. */
@@ -1281,7 +1299,7 @@ match. */
#define PT_ALNUM 6 /* Alphanumeric - the union of L and N */
#define PT_SPACE 7 /* Perl space - general category Z plus 9,10,12,13 */
#define PT_PXSPACE 8 /* POSIX space - Z plus 9,10,11,12,13 */
-#define PT_WORD 9 /* Word - L plus N plus underscore */
+#define PT_WORD 9 /* Word - L, N, Mn, or Pc */
#define PT_CLIST 10 /* Pseudo-property: match character list */
#define PT_UCNC 11 /* Universal Character nameable character */
#define PT_BIDICL 12 /* Specified bidi class */
@@ -1297,6 +1315,7 @@ table. */
#define PT_PXGRAPH 14 /* [:graph:] - characters that mark the paper */
#define PT_PXPRINT 15 /* [:print:] - [:graph:] plus non-control spaces */
#define PT_PXPUNCT 16 /* [:punct:] - punctuation characters */
+#define PT_PXXDIGIT 17 /* [:xdigit:] - hex digits */
/* This value is used when parsing \p and \P escapes to indicate that neither
\p{script:...} nor \p{scx:...} has been encountered. */
@@ -1327,6 +1346,12 @@ mode rather than an escape sequence. It is also used for [^] in JavaScript
compatibility mode, and for \C in non-utf mode. In non-DOTALL mode, "." behaves
like \N.
+ESC_ub is a special return from check_escape() when, in BSUX mode, \u{ is not
+followed by hex digits and }, in which case it should mean a literal "u"
+followed by a literal "{". This hack is necessary for cases like \u{ 12}
+because without it, this is interpreted as u{12} now that spaces are allowed in
+quantifiers.
+
Negative numbers are used to encode a backreference (\1, \2, \3, etc.) in
check_escape(). There are tests in the code for an escape greater than ESC_b
and less than ESC_Z to detect the types that may be repeated. These are the
@@ -1336,7 +1361,7 @@ consume a character, that code will have to change. */
enum { ESC_A = 1, ESC_G, ESC_K, ESC_B, ESC_b, ESC_D, ESC_d, ESC_S, ESC_s,
ESC_W, ESC_w, ESC_N, ESC_dum, ESC_C, ESC_P, ESC_p, ESC_R, ESC_H,
ESC_h, ESC_V, ESC_v, ESC_X, ESC_Z, ESC_z,
- ESC_E, ESC_Q, ESC_g, ESC_k };
+ ESC_E, ESC_Q, ESC_g, ESC_k, ESC_ub };
/********************** Opcode definitions ******************/
@@ -1372,8 +1397,8 @@ enum {
OP_SOD, /* 1 Start of data: \A */
OP_SOM, /* 2 Start of match (subject + offset): \G */
OP_SET_SOM, /* 3 Set start of match (\K) */
- OP_NOT_WORD_BOUNDARY, /* 4 \B */
- OP_WORD_BOUNDARY, /* 5 \b */
+ OP_NOT_WORD_BOUNDARY, /* 4 \B -- see also OP_NOT_UCP_WORD_BOUNDARY */
+ OP_WORD_BOUNDARY, /* 5 \b -- see also OP_UCP_WORD_BOUNDARY */
OP_NOT_DIGIT, /* 6 \D */
OP_DIGIT, /* 7 \d */
OP_NOT_WHITESPACE, /* 8 \S */
@@ -1547,78 +1572,85 @@ enum {
/* The assertions must come before BRA, CBRA, ONCE, and COND. */
OP_REVERSE, /* 125 Move pointer back - used in lookbehind assertions */
- OP_ASSERT, /* 126 Positive lookahead */
- OP_ASSERT_NOT, /* 127 Negative lookahead */
- OP_ASSERTBACK, /* 128 Positive lookbehind */
- OP_ASSERTBACK_NOT, /* 129 Negative lookbehind */
- OP_ASSERT_NA, /* 130 Positive non-atomic lookahead */
- OP_ASSERTBACK_NA, /* 131 Positive non-atomic lookbehind */
+ OP_VREVERSE, /* 126 Move pointer back - variable */
+ OP_ASSERT, /* 127 Positive lookahead */
+ OP_ASSERT_NOT, /* 128 Negative lookahead */
+ OP_ASSERTBACK, /* 129 Positive lookbehind */
+ OP_ASSERTBACK_NOT, /* 130 Negative lookbehind */
+ OP_ASSERT_NA, /* 131 Positive non-atomic lookahead */
+ OP_ASSERTBACK_NA, /* 132 Positive non-atomic lookbehind */
/* ONCE, SCRIPT_RUN, BRA, BRAPOS, CBRA, CBRAPOS, and COND must come
immediately after the assertions, with ONCE first, as there's a test for >=
ONCE for a subpattern that isn't an assertion. The POS versions must
immediately follow the non-POS versions in each case. */
- OP_ONCE, /* 132 Atomic group, contains captures */
- OP_SCRIPT_RUN, /* 133 Non-capture, but check characters' scripts */
- OP_BRA, /* 134 Start of non-capturing bracket */
- OP_BRAPOS, /* 135 Ditto, with unlimited, possessive repeat */
- OP_CBRA, /* 136 Start of capturing bracket */
- OP_CBRAPOS, /* 137 Ditto, with unlimited, possessive repeat */
- OP_COND, /* 138 Conditional group */
+ OP_ONCE, /* 133 Atomic group, contains captures */
+ OP_SCRIPT_RUN, /* 134 Non-capture, but check characters' scripts */
+ OP_BRA, /* 135 Start of non-capturing bracket */
+ OP_BRAPOS, /* 136 Ditto, with unlimited, possessive repeat */
+ OP_CBRA, /* 137 Start of capturing bracket */
+ OP_CBRAPOS, /* 138 Ditto, with unlimited, possessive repeat */
+ OP_COND, /* 139 Conditional group */
/* These five must follow the previous five, in the same order. There's a
check for >= SBRA to distinguish the two sets. */
- OP_SBRA, /* 139 Start of non-capturing bracket, check empty */
- OP_SBRAPOS, /* 149 Ditto, with unlimited, possessive repeat */
- OP_SCBRA, /* 141 Start of capturing bracket, check empty */
- OP_SCBRAPOS, /* 142 Ditto, with unlimited, possessive repeat */
- OP_SCOND, /* 143 Conditional group, check empty */
+ OP_SBRA, /* 140 Start of non-capturing bracket, check empty */
+ OP_SBRAPOS, /* 141 Ditto, with unlimited, possessive repeat */
+ OP_SCBRA, /* 142 Start of capturing bracket, check empty */
+ OP_SCBRAPOS, /* 143 Ditto, with unlimited, possessive repeat */
+ OP_SCOND, /* 144 Conditional group, check empty */
/* The next two pairs must (respectively) be kept together. */
- OP_CREF, /* 144 Used to hold a capture number as condition */
- OP_DNCREF, /* 145 Used to point to duplicate names as a condition */
- OP_RREF, /* 146 Used to hold a recursion number as condition */
- OP_DNRREF, /* 147 Used to point to duplicate names as a condition */
- OP_FALSE, /* 148 Always false (used by DEFINE and VERSION) */
- OP_TRUE, /* 149 Always true (used by VERSION) */
+ OP_CREF, /* 145 Used to hold a capture number as condition */
+ OP_DNCREF, /* 146 Used to point to duplicate names as a condition */
+ OP_RREF, /* 147 Used to hold a recursion number as condition */
+ OP_DNRREF, /* 148 Used to point to duplicate names as a condition */
+ OP_FALSE, /* 149 Always false (used by DEFINE and VERSION) */
+ OP_TRUE, /* 150 Always true (used by VERSION) */
- OP_BRAZERO, /* 150 These two must remain together and in this */
- OP_BRAMINZERO, /* 151 order. */
- OP_BRAPOSZERO, /* 152 */
+ OP_BRAZERO, /* 151 These two must remain together and in this */
+ OP_BRAMINZERO, /* 152 order. */
+ OP_BRAPOSZERO, /* 153 */
/* These are backtracking control verbs */
- OP_MARK, /* 153 always has an argument */
- OP_PRUNE, /* 154 */
- OP_PRUNE_ARG, /* 155 same, but with argument */
- OP_SKIP, /* 156 */
- OP_SKIP_ARG, /* 157 same, but with argument */
- OP_THEN, /* 158 */
- OP_THEN_ARG, /* 159 same, but with argument */
- OP_COMMIT, /* 160 */
- OP_COMMIT_ARG, /* 161 same, but with argument */
+ OP_MARK, /* 154 always has an argument */
+ OP_PRUNE, /* 155 */
+ OP_PRUNE_ARG, /* 156 same, but with argument */
+ OP_SKIP, /* 157 */
+ OP_SKIP_ARG, /* 158 same, but with argument */
+ OP_THEN, /* 159 */
+ OP_THEN_ARG, /* 160 same, but with argument */
+ OP_COMMIT, /* 161 */
+ OP_COMMIT_ARG, /* 162 same, but with argument */
/* These are forced failure and success verbs. FAIL and ACCEPT do accept an
argument, but these cases can be compiled as, for example, (*MARK:X)(*FAIL)
without the need for a special opcode. */
- OP_FAIL, /* 162 */
- OP_ACCEPT, /* 163 */
- OP_ASSERT_ACCEPT, /* 164 Used inside assertions */
- OP_CLOSE, /* 165 Used before OP_ACCEPT to close open captures */
+ OP_FAIL, /* 163 */
+ OP_ACCEPT, /* 164 */
+ OP_ASSERT_ACCEPT, /* 165 Used inside assertions */
+ OP_CLOSE, /* 166 Used before OP_ACCEPT to close open captures */
/* This is used to skip a subpattern with a {0} quantifier */
- OP_SKIPZERO, /* 166 */
+ OP_SKIPZERO, /* 167 */
/* This is used to identify a DEFINE group during compilation so that it can
be checked for having only one branch. It is changed to OP_FALSE before
compilation finishes. */
- OP_DEFINE, /* 167 */
+ OP_DEFINE, /* 168 */
+
+ /* These opcodes replace their normal counterparts in UCP mode when
+ PCRE2_EXTRA_ASCII_BSW is not set. */
+
+ OP_NOT_UCP_WORD_BOUNDARY, /* 169 */
+ OP_UCP_WORD_BOUNDARY, /* 170 */
/* This is not an opcode, but is used to check that tables indexed by opcode
are the correct length, in order to catch updating errors - there have been
@@ -1664,7 +1696,7 @@ some cases doesn't actually use these names at all). */
"class", "nclass", "xclass", "Ref", "Refi", "DnRef", "DnRefi", \
"Recurse", "Callout", "CalloutStr", \
"Alt", "Ket", "KetRmax", "KetRmin", "KetRpos", \
- "Reverse", "Assert", "Assert not", \
+ "Reverse", "VReverse", "Assert", "Assert not", \
"Assert back", "Assert back not", \
"Non-atomic assert", "Non-atomic assert back", \
"Once", \
@@ -1679,7 +1711,7 @@ some cases doesn't actually use these names at all). */
"*MARK", "*PRUNE", "*PRUNE", "*SKIP", "*SKIP", \
"*THEN", "*THEN", "*COMMIT", "*COMMIT", "*FAIL", \
"*ACCEPT", "*ASSERT_ACCEPT", \
- "Close", "Skip zero", "Define"
+ "Close", "Skip zero", "Define", "\\B (ucp)", "\\b (ucp)"
/* This macro defines the length of fixed length operations in the compiled
@@ -1746,7 +1778,8 @@ in UTF-8 mode. The code that uses this table must know about such things. */
1+LINK_SIZE, /* KetRmax */ \
1+LINK_SIZE, /* KetRmin */ \
1+LINK_SIZE, /* KetRpos */ \
- 1+LINK_SIZE, /* Reverse */ \
+ 1+IMM2_SIZE, /* Reverse */ \
+ 1+2*IMM2_SIZE, /* VReverse */ \
1+LINK_SIZE, /* Assert */ \
1+LINK_SIZE, /* Assert not */ \
1+LINK_SIZE, /* Assert behind */ \
@@ -1775,7 +1808,8 @@ in UTF-8 mode. The code that uses this table must know about such things. */
1, 3, /* COMMIT, COMMIT_ARG */ \
1, 1, 1, /* FAIL, ACCEPT, ASSERT_ACCEPT */ \
1+IMM2_SIZE, 1, /* CLOSE, SKIPZERO */ \
- 1 /* DEFINE */
+ 1, /* DEFINE */ \
+ 1, 1 /* \B and \b in UCP mode */
/* A magic value for OP_RREF to indicate the "any recursion" condition. */
@@ -2042,6 +2076,9 @@ extern void * _pcre2_memmove(void *, const void *, size_t);
#endif
#endif /* PCRE2_CODE_UNIT_WIDTH */
+
+extern BOOL PRIV(ckd_smul)(PCRE2_SIZE *, int, int);
+
#endif /* PCRE2_INTERNAL_H_IDEMPOTENT_GUARD */
/* End of pcre2_internal.h */
diff --git a/src/3rdparty/pcre2/src/pcre2_intmodedep.h b/src/3rdparty/pcre2/src/pcre2_intmodedep.h
index 390e737a6e..9bd9e694a4 100644
--- a/src/3rdparty/pcre2/src/pcre2_intmodedep.h
+++ b/src/3rdparty/pcre2/src/pcre2_intmodedep.h
@@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
- New API code Copyright (c) 2016-2022 University of Cambridge
+ New API code Copyright (c) 2016-2024 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -568,10 +568,12 @@ typedef struct pcre2_real_compile_context {
void *stack_guard_data;
const uint8_t *tables;
PCRE2_SIZE max_pattern_length;
+ PCRE2_SIZE max_pattern_compiled_length;
uint16_t bsr_convention;
uint16_t newline_convention;
uint32_t parens_nest_limit;
uint32_t extra_options;
+ uint32_t max_varlookbehind;
} pcre2_real_compile_context;
/* The real match context structure. */
@@ -605,12 +607,12 @@ defined specially because it is required in pcre2_serialize_decode() when
copying the size from possibly unaligned memory into a variable of the same
type. Use a macro rather than a typedef to avoid compiler warnings when this
file is included multiple times by pcre2test. LOOKBEHIND_MAX specifies the
-largest lookbehind that is supported. (OP_REVERSE in a pattern has a 16-bit
-argument in 8-bit and 16-bit modes, so we need no more than a 16-bit field
-here.) */
+largest lookbehind that is supported. (OP_REVERSE and OP_VREVERSE in a pattern
+have 16-bit arguments in 8-bit and 16-bit modes, so we need no more than a
+16-bit field here.) */
#undef CODE_BLOCKSIZE_TYPE
-#define CODE_BLOCKSIZE_TYPE size_t
+#define CODE_BLOCKSIZE_TYPE PCRE2_SIZE
#undef LOOKBEHIND_MAX
#define LOOKBEHIND_MAX UINT16_MAX
@@ -658,6 +660,7 @@ typedef struct pcre2_real_match_data {
PCRE2_SPTR mark; /* Pointer to last mark */
struct heapframe *heapframes; /* Backtracking frames heap memory */
PCRE2_SIZE heapframes_size; /* Malloc-ed size */
+ PCRE2_SIZE subject_length; /* Subject length */
PCRE2_SIZE leftchar; /* Offset to leftmost code unit */
PCRE2_SIZE rightchar; /* Offset to rightmost code unit */
PCRE2_SIZE startchar; /* Offset to starting code unit */
@@ -675,8 +678,8 @@ typedef struct pcre2_real_match_data {
#ifndef PCRE2_PCRE2TEST
-/* Structures for checking for mutual recursion when scanning compiled or
-parsed code. */
+/* Structures for checking for mutual function recursion when scanning compiled
+or parsed code. */
typedef struct recurse_check {
struct recurse_check *prev;
@@ -688,7 +691,7 @@ typedef struct parsed_recurse_check {
uint32_t *groupptr;
} parsed_recurse_check;
-/* Structure for building a cache when filling in recursion offsets. */
+/* Structure for building a cache when filling in pattern recursion offsets. */
typedef struct recurse_cache {
PCRE2_SPTR group;
@@ -734,7 +737,6 @@ typedef struct compile_block {
uint16_t name_entry_size; /* Size of each entry */
uint16_t parens_depth; /* Depth of nested parentheses */
uint16_t assert_depth; /* Depth of nested assertions */
- open_capitem *open_caps; /* Chain of open capture items */
named_group *named_groups; /* Points to vector in pre-compile */
uint32_t named_group_list_size; /* Number of entries in the list */
uint32_t external_options; /* External (initial) options */
@@ -752,10 +754,11 @@ typedef struct compile_block {
uint32_t class_range_end; /* Overall class range end */
PCRE2_UCHAR nl[4]; /* Newline string when fixed length */
uint32_t req_varyopt; /* "After variable item" flag for reqbyte */
- int max_lookbehind; /* Maximum lookbehind (characters) */
+ uint32_t max_varlookbehind; /* Limit for variable lookbehinds */
+ int max_lookbehind; /* Maximum lookbehind encountered (characters) */
BOOL had_accept; /* (*ACCEPT) encountered */
BOOL had_pruneorskip; /* (*PRUNE) or (*SKIP) encountered */
- BOOL had_recurse; /* Had a recursion or subroutine call */
+ BOOL had_recurse; /* Had a pattern recursion or subroutine call */
BOOL dupnames; /* Duplicate names exist */
} compile_block;
@@ -773,6 +776,7 @@ call within the pattern when running pcre2_dfa_match(). */
typedef struct dfa_recursion_info {
struct dfa_recursion_info *prevrec;
PCRE2_SPTR subject_position;
+ PCRE2_SPTR last_used_ptr;
uint32_t group_num;
} dfa_recursion_info;
@@ -793,7 +797,7 @@ typedef struct heapframe {
PCRE2_SIZE length; /* Used for character, string, or code lengths */
PCRE2_SIZE back_frame; /* Amount to subtract on RRETURN */
PCRE2_SIZE temp_size; /* Used for short-term PCRE2_SIZE values */
- uint32_t rdepth; /* "Recursion" depth */
+ uint32_t rdepth; /* Function "recursion" depth within pcre2_match() */
uint32_t group_frame_type; /* Type information for group frames */
uint32_t temp_32[4]; /* Used for short-term 32-bit or BOOL values */
uint8_t return_id; /* Where to go on in internal "return" */
@@ -826,14 +830,15 @@ typedef struct heapframe {
allows for exactly the right size ovector for the number of capturing
parentheses. (See also the comment for pcre2_real_match_data above.) */
- PCRE2_SPTR eptr; /* MUST BE FIRST */
- PCRE2_SPTR start_match; /* Can be adjusted by \K */
- PCRE2_SPTR mark; /* Most recent mark on the success path */
- uint32_t current_recurse; /* Current (deepest) recursion number */
- uint32_t capture_last; /* Most recent capture */
- PCRE2_SIZE last_group_offset; /* Saved offset to most recent group frame */
- PCRE2_SIZE offset_top; /* Offset after highest capture */
- PCRE2_SIZE ovector[131072]; /* Must be last in the structure */
+ PCRE2_SPTR eptr; /* MUST BE FIRST */
+ PCRE2_SPTR start_match; /* Can be adjusted by \K */
+ PCRE2_SPTR mark; /* Most recent mark on the success path */
+ PCRE2_SPTR recurse_last_used; /* Last character used at time of pattern recursion */
+ uint32_t current_recurse; /* Group number of current (deepest) pattern recursion */
+ uint32_t capture_last; /* Most recent capture */
+ PCRE2_SIZE last_group_offset; /* Saved offset to most recent group frame */
+ PCRE2_SIZE offset_top; /* Offset after highest capture */
+ PCRE2_SIZE ovector[131072]; /* Must be last in the structure */
} heapframe;
/* This typedef is a check that the size of the heapframe structure is a
@@ -858,7 +863,7 @@ doing traditional NFA matching (pcre2_match() and friends). */
typedef struct match_block {
pcre2_memctl memctl; /* For general use */
- PCRE2_SIZE heap_limit; /* As it says */
+ uint32_t heap_limit; /* As it says */
uint32_t match_limit; /* As it says */
uint32_t match_limit_depth; /* As it says */
uint32_t match_call_count; /* Number of times a new frame is created */
@@ -875,10 +880,11 @@ typedef struct match_block {
uint16_t name_count; /* Number of names in name table */
uint16_t name_entry_size; /* Size of entry in names table */
PCRE2_SPTR name_table; /* Table of group names */
- PCRE2_SPTR start_code; /* For use when recursing */
+ PCRE2_SPTR start_code; /* For use in pattern recursion */
PCRE2_SPTR start_subject; /* Start of the subject string */
PCRE2_SPTR check_subject; /* Where UTF-checked from */
- PCRE2_SPTR end_subject; /* End of the subject string */
+ PCRE2_SPTR end_subject; /* Usable end of the subject string */
+ PCRE2_SPTR true_end_subject; /* Actual end of the subject string */
PCRE2_SPTR end_match_ptr; /* Subject position at end match */
PCRE2_SPTR start_used_ptr; /* Earliest consulted character */
PCRE2_SPTR last_used_ptr; /* Latest consulted character */
@@ -886,7 +892,7 @@ typedef struct match_block {
PCRE2_SPTR nomatch_mark; /* Mark pointer to pass back on failure */
PCRE2_SPTR verb_ecode_ptr; /* For passing back info */
PCRE2_SPTR verb_skip_ptr; /* For passing back a (*SKIP) name */
- uint32_t verb_current_recurse; /* Current recurse when (*VERB) happens */
+ uint32_t verb_current_recurse; /* Current recursion group when (*VERB) happens */
uint32_t moptions; /* Match options */
uint32_t poptions; /* Pattern options */
uint32_t skip_arg_count; /* For counting SKIP_ARGs */
@@ -911,7 +917,7 @@ typedef struct dfa_match_block {
PCRE2_SPTR last_used_ptr; /* Latest consulted character */
const uint8_t *tables; /* Character tables */
PCRE2_SIZE start_offset; /* The start offset value */
- PCRE2_SIZE heap_limit; /* As it says */
+ uint32_t heap_limit; /* As it says */
PCRE2_SIZE heap_used; /* As it says */
uint32_t match_limit; /* As it says */
uint32_t match_limit_depth; /* As it says */
@@ -926,7 +932,7 @@ typedef struct dfa_match_block {
pcre2_callout_block *cb; /* Points to a callout block */
void *callout_data; /* To pass back to callouts */
int (*callout)(pcre2_callout_block *,void *); /* Callout function or NULL */
- dfa_recursion_info *recursive; /* Linked list of recursion data */
+ dfa_recursion_info *recursive; /* Linked list of pattern recursion data */
} dfa_match_block;
#endif /* PCRE2_PCRE2TEST */
diff --git a/src/3rdparty/pcre2/src/pcre2_jit_compile.c b/src/3rdparty/pcre2/src/pcre2_jit_compile.c
index 0afd27c5ee..92f4fb858b 100644
--- a/src/3rdparty/pcre2/src/pcre2_jit_compile.c
+++ b/src/3rdparty/pcre2/src/pcre2_jit_compile.c
@@ -8,7 +8,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
This module by Zoltan Herczeg
Original API code Copyright (c) 1997-2012 University of Cambridge
- New API code Copyright (c) 2016-2021 University of Cambridge
+ New API code Copyright (c) 2016-2024 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -43,6 +43,12 @@ POSSIBILITY OF SUCH DAMAGE.
#include "config.h"
#endif
+#if defined(__has_feature)
+#if __has_feature(memory_sanitizer)
+#include <sanitizer/msan_interface.h>
+#endif /* __has_feature(memory_sanitizer) */
+#endif /* defined(__has_feature) */
+
#include "pcre2_internal.h"
#ifdef SUPPORT_JIT
@@ -236,12 +242,21 @@ code generator. It is allocated by compile_matchingpath, and contains
the arguments for compile_backtrackingpath. Must be the first member
of its descendants. */
typedef struct backtrack_common {
- /* Concatenation stack. */
+ /* Backtracking path of an opcode, which falls back
+ to our opcode, if it cannot resume matching. */
struct backtrack_common *prev;
- jump_list *nextbacktracks;
- /* Internal stack (for component operators). */
+ /* Backtracks for opcodes without backtracking path.
+ These opcodes are between 'prev' and the current
+ opcode, and they never resume the match. */
+ jump_list *simple_backtracks;
+ /* Internal backtracking list for block constructs
+ which contains other opcodes, such as brackets,
+ asserts, conditionals, etc. */
struct backtrack_common *top;
- jump_list *topbacktracks;
+ /* Backtracks used internally by the opcode. For component
+ opcodes, this list is also used by those opcodes without
+ backtracking path which follows the 'top' backtrack. */
+ jump_list *own_backtracks;
/* Opcode pointer. */
PCRE2_SPTR cc;
} backtrack_common;
@@ -273,7 +288,7 @@ typedef struct bracket_backtrack {
/* For OP_ONCE. Less than 0 if not needed. */
int framesize;
/* For brackets with >3 alternatives. */
- struct sljit_put_label *matching_put_label;
+ struct sljit_jump *matching_mov_addr;
} u;
/* Points to our private memory word on the stack. */
int private_data_ptr;
@@ -338,6 +353,12 @@ typedef struct recurse_backtrack {
BOOL inlined_pattern;
} recurse_backtrack;
+typedef struct vreverse_backtrack {
+ backtrack_common common;
+ /* Return to the matching path. */
+ struct sljit_label *matchingpath;
+} vreverse_backtrack;
+
#define OP_THEN_TRAP OP_TABLE_LENGTH
typedef struct then_trap_backtrack {
@@ -404,7 +425,9 @@ typedef struct compiler_common {
sljit_s32 match_end_ptr;
/* Points to the marked string. */
sljit_s32 mark_ptr;
- /* Recursive control verb management chain. */
+ /* Head of the recursive control verb management chain.
+ Each item must have a previous offset and type
+ (see control_types) values. See do_search_mark. */
sljit_s32 control_head_ptr;
/* Points to the last matched capture block index. */
sljit_s32 capture_last_ptr;
@@ -474,12 +497,15 @@ typedef struct compiler_common {
jump_list *stackalloc;
jump_list *revertframes;
jump_list *wordboundary;
+ jump_list *ucp_wordboundary;
jump_list *anynewline;
jump_list *hspace;
jump_list *vspace;
jump_list *casefulcmp;
jump_list *caselesscmp;
jump_list *reset_match;
+ /* Same as reset_match, but resets the STR_PTR as well. */
+ jump_list *restart_match;
BOOL unset_backref;
BOOL alt_circumflex;
#ifdef SUPPORT_UNICODE
@@ -636,8 +662,8 @@ the start pointers when the end of the capturing group has not yet reached. */
sljit_set_label(sljit_emit_cmp(compiler, (type), (src1), (src1w), (src2), (src2w)), (label))
#define OP_FLAGS(op, dst, dstw, type) \
sljit_emit_op_flags(compiler, (op), (dst), (dstw), (type))
-#define CMOV(type, dst_reg, src, srcw) \
- sljit_emit_cmov(compiler, (type), (dst_reg), (src), (srcw))
+#define SELECT(type, dst_reg, src1, src1w, src2_reg) \
+ sljit_emit_select(compiler, (type), (dst_reg), (src1), (src1w), (src2_reg))
#define GET_LOCAL_BASE(dst, dstw, offset) \
sljit_get_local_base(compiler, (dst), (dstw), (offset))
@@ -857,6 +883,21 @@ SLJIT_ASSERT(*cc >= OP_KET && *cc <= OP_KETRPOS);
return count;
}
+static BOOL find_vreverse(PCRE2_SPTR cc)
+{
+ SLJIT_ASSERT(*cc == OP_ASSERTBACK || *cc == OP_ASSERTBACK_NOT || *cc == OP_ASSERTBACK_NA);
+
+ do
+ {
+ if (cc[1 + LINK_SIZE] == OP_VREVERSE)
+ return TRUE;
+ cc += GET(cc, 1);
+ }
+ while (*cc == OP_ALT);
+
+ return FALSE;
+}
+
/* Functions whose might need modification for all new supported opcodes:
next_opcode
check_opcode_types
@@ -927,6 +968,7 @@ switch(*cc)
case OP_KETRMIN:
case OP_KETRPOS:
case OP_REVERSE:
+ case OP_VREVERSE:
case OP_ASSERT:
case OP_ASSERT_NOT:
case OP_ASSERTBACK:
@@ -963,6 +1005,8 @@ switch(*cc)
case OP_ASSERT_ACCEPT:
case OP_CLOSE:
case OP_SKIPZERO:
+ case OP_NOT_UCP_WORD_BOUNDARY:
+ case OP_UCP_WORD_BOUNDARY:
return cc + PRIV(OP_lengths)[*cc];
case OP_CHAR:
@@ -1231,34 +1275,37 @@ while (cc < ccend)
return TRUE;
}
-#define EARLY_FAIL_ENHANCE_MAX (1 + 3)
+#define EARLY_FAIL_ENHANCE_MAX (3 + 3)
/*
-start:
- 0 - skip / early fail allowed
- 1 - only early fail with range allowed
- >1 - (start - 1) early fail is processed
+ Start represent the number of allowed early fail enhancements
-return: current number of iterators enhanced with fast fail
+ The 0-2 values has a special meaning:
+ 0 - skip is allowed for all iterators
+ 1 - fail is allowed for all iterators
+ 2 - fail is allowed for greedy iterators
+ 3 - only ranged early fail is allowed
+ >3 - (start - 3) number of remaining ranged early fails allowed
+
+return: the updated value of start
*/
-static int detect_early_fail(compiler_common *common, PCRE2_SPTR cc, int *private_data_start,
- sljit_s32 depth, int start, BOOL fast_forward_allowed)
+static int detect_early_fail(compiler_common *common, PCRE2_SPTR cc,
+ int *private_data_start, sljit_s32 depth, int start)
{
PCRE2_SPTR begin = cc;
PCRE2_SPTR next_alt;
PCRE2_SPTR end;
PCRE2_SPTR accelerated_start;
-BOOL prev_fast_forward_allowed;
int result = 0;
-int count;
+int count, prev_count;
SLJIT_ASSERT(*cc == OP_ONCE || *cc == OP_BRA || *cc == OP_CBRA);
SLJIT_ASSERT(*cc != OP_CBRA || common->optimized_cbracket[GET2(cc, 1 + LINK_SIZE)] != 0);
SLJIT_ASSERT(start < EARLY_FAIL_ENHANCE_MAX);
next_alt = cc + GET(cc, 1);
-if (*next_alt == OP_ALT)
- fast_forward_allowed = FALSE;
+if (*next_alt == OP_ALT && start < 1)
+ start = 1;
do
{
@@ -1282,6 +1329,8 @@ do
case OP_CIRCM:
case OP_DOLL:
case OP_DOLLM:
+ case OP_NOT_UCP_WORD_BOUNDARY:
+ case OP_UCP_WORD_BOUNDARY:
/* Zero width assertions. */
cc++;
continue;
@@ -1299,21 +1348,22 @@ do
case OP_HSPACE:
case OP_NOT_VSPACE:
case OP_VSPACE:
- fast_forward_allowed = FALSE;
+ if (count < 1)
+ count = 1;
cc++;
continue;
case OP_ANYNL:
case OP_EXTUNI:
- fast_forward_allowed = FALSE;
- if (count == 0)
- count = 1;
+ if (count < 3)
+ count = 3;
cc++;
continue;
case OP_NOTPROP:
case OP_PROP:
- fast_forward_allowed = FALSE;
+ if (count < 1)
+ count = 1;
cc += 1 + 2;
continue;
@@ -1321,17 +1371,22 @@ do
case OP_CHARI:
case OP_NOT:
case OP_NOTI:
- fast_forward_allowed = FALSE;
+ if (count < 1)
+ count = 1;
cc += 2;
#ifdef SUPPORT_UNICODE
if (common->utf && HAS_EXTRALEN(cc[-1])) cc += GET_EXTRALEN(cc[-1]);
#endif
continue;
- case OP_TYPESTAR:
case OP_TYPEMINSTAR:
- case OP_TYPEPLUS:
case OP_TYPEMINPLUS:
+ if (count == 2)
+ count = 3;
+ /* Fall through */
+
+ case OP_TYPESTAR:
+ case OP_TYPEPLUS:
case OP_TYPEPOSSTAR:
case OP_TYPEPOSPLUS:
/* The type or prop opcode is skipped in the next iteration. */
@@ -1343,14 +1398,18 @@ do
break;
}
- if (count == 0)
+ if (count < 3)
+ count = 3;
+ continue;
+
+ case OP_TYPEEXACT:
+ if (count < 1)
count = 1;
- fast_forward_allowed = FALSE;
+ cc += 1 + IMM2_SIZE;
continue;
case OP_TYPEUPTO:
case OP_TYPEMINUPTO:
- case OP_TYPEEXACT:
case OP_TYPEPOSUPTO:
cc += IMM2_SIZE;
/* Fall through */
@@ -1359,37 +1418,40 @@ do
case OP_TYPEMINQUERY:
case OP_TYPEPOSQUERY:
/* The type or prop opcode is skipped in the next iteration. */
- fast_forward_allowed = FALSE;
- if (count == 0)
- count = 1;
+ if (count < 3)
+ count = 3;
cc += 1;
continue;
- case OP_STAR:
case OP_MINSTAR:
- case OP_PLUS:
case OP_MINPLUS:
+ case OP_MINSTARI:
+ case OP_MINPLUSI:
+ case OP_NOTMINSTAR:
+ case OP_NOTMINPLUS:
+ case OP_NOTMINSTARI:
+ case OP_NOTMINPLUSI:
+ if (count == 2)
+ count = 3;
+ /* Fall through */
+
+ case OP_STAR:
+ case OP_PLUS:
case OP_POSSTAR:
case OP_POSPLUS:
case OP_STARI:
- case OP_MINSTARI:
case OP_PLUSI:
- case OP_MINPLUSI:
case OP_POSSTARI:
case OP_POSPLUSI:
case OP_NOTSTAR:
- case OP_NOTMINSTAR:
case OP_NOTPLUS:
- case OP_NOTMINPLUS:
case OP_NOTPOSSTAR:
case OP_NOTPOSPLUS:
case OP_NOTSTARI:
- case OP_NOTMINSTARI:
case OP_NOTPLUSI:
- case OP_NOTMINPLUSI:
case OP_NOTPOSSTARI:
case OP_NOTPOSPLUSI:
accelerated_start = cc;
@@ -1399,9 +1461,17 @@ do
#endif
break;
+ case OP_EXACT:
+ if (count < 1)
+ count = 1;
+ cc += 2 + IMM2_SIZE;
+#ifdef SUPPORT_UNICODE
+ if (common->utf && HAS_EXTRALEN(cc[-1])) cc += GET_EXTRALEN(cc[-1]);
+#endif
+ continue;
+
case OP_UPTO:
case OP_MINUPTO:
- case OP_EXACT:
case OP_POSUPTO:
case OP_UPTOI:
case OP_MINUPTOI:
@@ -1430,9 +1500,8 @@ do
case OP_NOTQUERYI:
case OP_NOTMINQUERYI:
case OP_NOTPOSQUERYI:
- fast_forward_allowed = FALSE;
- if (count == 0)
- count = 1;
+ if (count < 3)
+ count = 3;
cc += 2;
#ifdef SUPPORT_UNICODE
if (common->utf && HAS_EXTRALEN(cc[-1])) cc += GET_EXTRALEN(cc[-1]);
@@ -1452,10 +1521,14 @@ do
switch (*cc)
{
- case OP_CRSTAR:
case OP_CRMINSTAR:
- case OP_CRPLUS:
case OP_CRMINPLUS:
+ if (count == 2)
+ count = 3;
+ /* Fall through */
+
+ case OP_CRSTAR:
+ case OP_CRPLUS:
case OP_CRPOSSTAR:
case OP_CRPOSPLUS:
cc++;
@@ -1464,44 +1537,60 @@ do
case OP_CRRANGE:
case OP_CRMINRANGE:
case OP_CRPOSRANGE:
+ if (GET2(cc, 1) == GET2(cc, 1 + IMM2_SIZE))
+ {
+ /* Exact repeat. */
+ cc += 1 + 2 * IMM2_SIZE;
+ if (count < 1)
+ count = 1;
+ continue;
+ }
+
cc += 2 * IMM2_SIZE;
/* Fall through */
case OP_CRQUERY:
case OP_CRMINQUERY:
case OP_CRPOSQUERY:
cc++;
- if (count == 0)
- count = 1;
- /* Fall through */
+ if (count < 3)
+ count = 3;
+ continue;
+
default:
- accelerated_start = NULL;
- fast_forward_allowed = FALSE;
+ /* No repeat. */
+ if (count < 1)
+ count = 1;
continue;
}
break;
- case OP_ONCE:
case OP_BRA:
case OP_CBRA:
- end = cc + GET(cc, 1);
+ prev_count = count;
+ if (count < 1)
+ count = 1;
- prev_fast_forward_allowed = fast_forward_allowed;
- fast_forward_allowed = FALSE;
if (depth >= 4)
break;
- end = bracketend(cc) - (1 + LINK_SIZE);
- if (*end != OP_KET || (*cc == OP_CBRA && common->optimized_cbracket[GET2(cc, 1 + LINK_SIZE)] == 0))
+ if (count < 3 && cc[GET(cc, 1)] == OP_ALT)
+ count = 3;
+
+ end = bracketend(cc);
+ if (end[-1 - LINK_SIZE] != OP_KET || (*cc == OP_CBRA && common->optimized_cbracket[GET2(cc, 1 + LINK_SIZE)] == 0))
break;
- count = detect_early_fail(common, cc, private_data_start, depth + 1, count, prev_fast_forward_allowed);
+ prev_count = detect_early_fail(common, cc, private_data_start, depth + 1, prev_count);
+
+ if (prev_count > count)
+ count = prev_count;
if (PRIVATE_DATA(cc) != 0)
common->private_data_ptrs[begin - common->start] = 1;
if (count < EARLY_FAIL_ENHANCE_MAX)
{
- cc = end + (1 + LINK_SIZE);
+ cc = end;
continue;
}
break;
@@ -1514,55 +1603,52 @@ do
continue;
}
- if (accelerated_start != NULL)
+ if (accelerated_start == NULL)
+ break;
+
+ if (count == 0)
{
- if (count == 0)
- {
- count++;
+ common->fast_forward_bc_ptr = accelerated_start;
+ common->private_data_ptrs[(accelerated_start + 1) - common->start] = ((*private_data_start) << 3) | type_skip;
+ *private_data_start += sizeof(sljit_sw);
+ count = 4;
+ }
+ else if (count < 3)
+ {
+ common->private_data_ptrs[(accelerated_start + 1) - common->start] = ((*private_data_start) << 3) | type_fail;
- if (fast_forward_allowed)
- {
- common->fast_forward_bc_ptr = accelerated_start;
- common->private_data_ptrs[(accelerated_start + 1) - common->start] = ((*private_data_start) << 3) | type_skip;
- *private_data_start += sizeof(sljit_sw);
- }
- else
- {
- common->private_data_ptrs[(accelerated_start + 1) - common->start] = ((*private_data_start) << 3) | type_fail;
+ if (common->early_fail_start_ptr == 0)
+ common->early_fail_start_ptr = *private_data_start;
- if (common->early_fail_start_ptr == 0)
- common->early_fail_start_ptr = *private_data_start;
+ *private_data_start += sizeof(sljit_sw);
+ common->early_fail_end_ptr = *private_data_start;
- *private_data_start += sizeof(sljit_sw);
- common->early_fail_end_ptr = *private_data_start;
+ if (*private_data_start > SLJIT_MAX_LOCAL_SIZE)
+ return EARLY_FAIL_ENHANCE_MAX;
- if (*private_data_start > SLJIT_MAX_LOCAL_SIZE)
- return EARLY_FAIL_ENHANCE_MAX;
- }
- }
- else
- {
- common->private_data_ptrs[(accelerated_start + 1) - common->start] = ((*private_data_start) << 3) | type_fail_range;
+ count = 4;
+ }
+ else
+ {
+ common->private_data_ptrs[(accelerated_start + 1) - common->start] = ((*private_data_start) << 3) | type_fail_range;
- if (common->early_fail_start_ptr == 0)
- common->early_fail_start_ptr = *private_data_start;
+ if (common->early_fail_start_ptr == 0)
+ common->early_fail_start_ptr = *private_data_start;
- *private_data_start += 2 * sizeof(sljit_sw);
- common->early_fail_end_ptr = *private_data_start;
+ *private_data_start += 2 * sizeof(sljit_sw);
+ common->early_fail_end_ptr = *private_data_start;
- if (*private_data_start > SLJIT_MAX_LOCAL_SIZE)
- return EARLY_FAIL_ENHANCE_MAX;
- }
+ if (*private_data_start > SLJIT_MAX_LOCAL_SIZE)
+ return EARLY_FAIL_ENHANCE_MAX;
- /* Cannot be part of a repeat. */
- common->private_data_ptrs[begin - common->start] = 1;
count++;
-
- if (count < EARLY_FAIL_ENHANCE_MAX)
- continue;
}
- break;
+ /* Cannot be part of a repeat. */
+ common->private_data_ptrs[begin - common->start] = 1;
+
+ if (count >= EARLY_FAIL_ENHANCE_MAX)
+ break;
}
if (*cc != OP_ALT && *cc != OP_KET)
@@ -1795,7 +1881,6 @@ while (cc < ccend)
case OP_ASSERTBACK:
case OP_ASSERTBACK_NOT:
case OP_ASSERT_NA:
- case OP_ASSERTBACK_NA:
case OP_ONCE:
case OP_SCRIPT_RUN:
case OP_BRAPOS:
@@ -1807,6 +1892,19 @@ while (cc < ccend)
bracketlen = 1 + LINK_SIZE;
break;
+ case OP_ASSERTBACK_NA:
+ common->private_data_ptrs[cc - common->start] = private_data_ptr;
+ private_data_ptr += sizeof(sljit_sw);
+
+ if (find_vreverse(cc))
+ {
+ common->private_data_ptrs[cc + 1 - common->start] = 1;
+ private_data_ptr += sizeof(sljit_sw);
+ }
+
+ bracketlen = 1 + LINK_SIZE;
+ break;
+
case OP_CBRAPOS:
case OP_SCBRAPOS:
common->private_data_ptrs[cc - common->start] = private_data_ptr;
@@ -2106,6 +2204,9 @@ while (cc < ccend)
case OP_CALLOUT:
case OP_CALLOUT_STR:
+ case OP_NOT_UCP_WORD_BOUNDARY:
+ case OP_UCP_WORD_BOUNDARY:
+
cc = next_opcode(common, cc);
SLJIT_ASSERT(cc != NULL);
break;
@@ -2261,7 +2362,7 @@ int i;
for (i = 0; i < RECURSE_TMP_REG_COUNT; i++)
{
SLJIT_ASSERT(status->tmp_regs[i] >= 0);
- SLJIT_ASSERT(sljit_get_register_index(status->saved_tmp_regs[i]) < 0 || status->tmp_regs[i] == status->saved_tmp_regs[i]);
+ SLJIT_ASSERT(sljit_get_register_index(SLJIT_GP_REGISTER, status->saved_tmp_regs[i]) < 0 || status->tmp_regs[i] == status->saved_tmp_regs[i]);
status->store_bases[i] = -1;
}
@@ -2281,7 +2382,7 @@ SLJIT_ASSERT(load_base > 0 && store_base > 0);
if (status->store_bases[next_tmp_reg] == -1)
{
/* Preserve virtual registers. */
- if (sljit_get_register_index(status->saved_tmp_regs[next_tmp_reg]) < 0)
+ if (sljit_get_register_index(SLJIT_GP_REGISTER, status->saved_tmp_regs[next_tmp_reg]) < 0)
OP1(SLJIT_MOV, status->saved_tmp_regs[next_tmp_reg], 0, tmp_reg, 0);
}
else
@@ -2310,7 +2411,7 @@ for (i = 0; i < RECURSE_TMP_REG_COUNT; i++)
OP1(SLJIT_MOV, SLJIT_MEM1(status->store_bases[next_tmp_reg]), status->store_offsets[next_tmp_reg], tmp_reg, 0);
/* Restore virtual registers. */
- if (sljit_get_register_index(saved_tmp_reg) < 0)
+ if (sljit_get_register_index(SLJIT_GP_REGISTER, saved_tmp_reg) < 0)
OP1(SLJIT_MOV, tmp_reg, 0, saved_tmp_reg, 0);
}
@@ -3047,8 +3148,16 @@ if (*cc == OP_COND || *cc == OP_SCOND)
has_alternatives = FALSE;
cc = next_opcode(common, cc);
+
if (has_alternatives)
+ {
+ if (*cc == OP_REVERSE)
+ cc += 1 + IMM2_SIZE;
+ else if (*cc == OP_VREVERSE)
+ cc += 1 + 2 * IMM2_SIZE;
+
current_offset = common->then_offsets + (cc - common->start);
+ }
while (cc < end)
{
@@ -3057,7 +3166,18 @@ while (cc < end)
else
{
if (*cc == OP_ALT && has_alternatives)
- current_offset = common->then_offsets + (cc + 1 + LINK_SIZE - common->start);
+ {
+ cc += 1 + LINK_SIZE;
+
+ if (*cc == OP_REVERSE)
+ cc += 1 + IMM2_SIZE;
+ else if (*cc == OP_VREVERSE)
+ cc += 1 + 2 * IMM2_SIZE;
+
+ current_offset = common->then_offsets + (cc - common->start);
+ continue;
+ }
+
if (*cc >= OP_THEN && *cc <= OP_THEN_ARG && current_offset != NULL)
*current_offset = 1;
cc = next_opcode(common, cc);
@@ -3081,7 +3201,7 @@ return (value & (value - 1)) == 0;
static SLJIT_INLINE void set_jumps(jump_list *list, struct sljit_label *label)
{
-while (list)
+while (list != NULL)
{
/* sljit_set_label is clever enough to do nothing
if either the jump or the label is NULL. */
@@ -3239,7 +3359,7 @@ if (size == sizeof(sljit_sw))
return;
}
-if (sljit_get_register_index(TMP3) >= 0 && !sljit_has_cpu_feature(SLJIT_HAS_ZERO_REGISTER))
+if (sljit_get_register_index(SLJIT_GP_REGISTER, TMP3) >= 0 && !sljit_has_cpu_feature(SLJIT_HAS_ZERO_REGISTER))
{
OP1(SLJIT_MOV, TMP3, 0, SLJIT_IMM, 0);
src = TMP3;
@@ -3818,9 +3938,9 @@ if (common->invalid_utf)
{
OP2(SLJIT_SUB, TMP2, 0, TMP1, 0, SLJIT_IMM, 0xd800);
OP2U(SLJIT_SUB | SLJIT_SET_GREATER_EQUAL, TMP1, 0, SLJIT_IMM, 0x110000);
- CMOV(SLJIT_GREATER_EQUAL, TMP1, SLJIT_IMM, INVALID_UTF_CHAR);
+ SELECT(SLJIT_GREATER_EQUAL, TMP1, SLJIT_IMM, INVALID_UTF_CHAR, TMP1);
OP2U(SLJIT_SUB | SLJIT_SET_LESS, TMP2, 0, SLJIT_IMM, 0xe000 - 0xd800);
- CMOV(SLJIT_LESS, TMP1, SLJIT_IMM, INVALID_UTF_CHAR);
+ SELECT(SLJIT_LESS, TMP1, SLJIT_IMM, INVALID_UTF_CHAR, TMP1);
}
}
#endif /* PCRE2_CODE_UNIT_WIDTH == [8|16|32] */
@@ -4058,9 +4178,9 @@ if (common->utf)
OP2(SLJIT_ADD, RETURN_ADDR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
OP2U(SLJIT_SUB | SLJIT_SET_LESS, TMP2, 0, SLJIT_IMM, 0x400);
if (options & READ_CHAR_UPDATE_STR_PTR)
- CMOV(SLJIT_LESS, STR_PTR, RETURN_ADDR, 0);
+ SELECT(SLJIT_LESS, STR_PTR, RETURN_ADDR, 0, STR_PTR);
if (max >= 0xd800)
- CMOV(SLJIT_LESS, TMP1, SLJIT_IMM, 0x10000);
+ SELECT(SLJIT_LESS, TMP1, SLJIT_IMM, 0x10000, TMP1);
}
else
{
@@ -4085,15 +4205,46 @@ if (common->invalid_utf)
{
OP2(SLJIT_SUB, TMP2, 0, TMP1, 0, SLJIT_IMM, 0xd800);
OP2U(SLJIT_SUB | SLJIT_SET_GREATER_EQUAL, TMP1, 0, SLJIT_IMM, 0x110000);
- CMOV(SLJIT_GREATER_EQUAL, TMP1, SLJIT_IMM, INVALID_UTF_CHAR);
+ SELECT(SLJIT_GREATER_EQUAL, TMP1, SLJIT_IMM, INVALID_UTF_CHAR, TMP1);
OP2U(SLJIT_SUB | SLJIT_SET_LESS, TMP2, 0, SLJIT_IMM, 0xe000 - 0xd800);
- CMOV(SLJIT_LESS, TMP1, SLJIT_IMM, INVALID_UTF_CHAR);
+ SELECT(SLJIT_LESS, TMP1, SLJIT_IMM, INVALID_UTF_CHAR, TMP1);
}
}
#endif /* PCRE2_CODE_UNIT_WIDTH == [8|16|32] */
#endif /* SUPPORT_UNICODE */
}
+static void skip_valid_char(compiler_common *common)
+{
+DEFINE_COMPILER;
+#if (defined SUPPORT_UNICODE) && (PCRE2_CODE_UNIT_WIDTH == 8 || PCRE2_CODE_UNIT_WIDTH == 16)
+struct sljit_jump *jump;
+#endif
+
+#if (defined SUPPORT_UNICODE) && (PCRE2_CODE_UNIT_WIDTH == 8 || PCRE2_CODE_UNIT_WIDTH == 16)
+ if (common->utf)
+ {
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), 0);
+ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+#if PCRE2_CODE_UNIT_WIDTH == 8
+ jump = CMP(SLJIT_LESS, TMP1, 0, SLJIT_IMM, 0xc0);
+ OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
+ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP1, 0);
+#elif PCRE2_CODE_UNIT_WIDTH == 16
+ jump = CMP(SLJIT_LESS, TMP1, 0, SLJIT_IMM, 0xd800);
+ OP2(SLJIT_AND, TMP1, 0, TMP1, 0, SLJIT_IMM, 0xfc00);
+ OP2U(SLJIT_SUB | SLJIT_SET_Z, TMP1, 0, SLJIT_IMM, 0xd800);
+ OP_FLAGS(SLJIT_MOV, TMP1, 0, SLJIT_EQUAL);
+ OP2(SLJIT_SHL, TMP1, 0, TMP1, 0, SLJIT_IMM, 1);
+ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP1, 0);
+#endif /* PCRE2_CODE_UNIT_WIDTH == 8 */
+ JUMPHERE(jump);
+ return;
+ }
+#endif /* SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH == [8|16] */
+ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+}
+
#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH == 8
static BOOL is_char7_bitset(const sljit_u8 *bitset, BOOL nclass)
@@ -4135,6 +4286,7 @@ if (negated)
if (common->invalid_utf)
{
+ OP1(SLJIT_MOV, TMP1, 0, TMP2, 0);
add_jump(compiler, &common->utfreadchar_invalid, JUMP(SLJIT_FAST_CALL));
add_jump(compiler, backtracks, CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, INVALID_UTF_CHAR));
OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, 0);
@@ -4242,7 +4394,7 @@ if (common->utf && negated)
{
OP2(SLJIT_ADD, RETURN_ADDR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
OP2U(SLJIT_SUB | SLJIT_SET_LESS, TMP2, 0, SLJIT_IMM, 0x400);
- CMOV(SLJIT_LESS, STR_PTR, RETURN_ADDR, 0);
+ SELECT(SLJIT_LESS, STR_PTR, RETURN_ADDR, 0, STR_PTR);
}
else
{
@@ -4399,7 +4551,7 @@ of the character (>= 0xc0). Return char value in TMP1. */
DEFINE_COMPILER;
struct sljit_jump *jump;
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
OP1(MOV_UCHAR, TMP2, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(0));
OP2(SLJIT_SHL, TMP1, 0, TMP1, 0, SLJIT_IMM, 6);
OP2(SLJIT_AND, TMP2, 0, TMP2, 0, SLJIT_IMM, 0x3f);
@@ -4445,7 +4597,7 @@ DEFINE_COMPILER;
struct sljit_jump *jump;
struct sljit_jump *compare;
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
OP2U(SLJIT_AND | SLJIT_SET_Z, TMP2, 0, SLJIT_IMM, 0x20);
jump = JUMP(SLJIT_NOT_ZERO);
@@ -4487,7 +4639,7 @@ struct sljit_label *three_byte_entry;
struct sljit_label *exit_invalid_label;
struct sljit_jump *exit_invalid[11];
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
OP2(SLJIT_SUB, TMP1, 0, TMP1, 0, SLJIT_IMM, 0xc2);
@@ -4522,7 +4674,7 @@ OP2(SLJIT_OR, TMP1, 0, TMP1, 0, TMP2, 0);
if (has_cmov)
{
OP2U(SLJIT_SUB | SLJIT_SET_GREATER_EQUAL, TMP2, 0, SLJIT_IMM, 0x40);
- CMOV(SLJIT_GREATER_EQUAL, TMP1, SLJIT_IMM, 0x20000);
+ SELECT(SLJIT_GREATER_EQUAL, TMP1, SLJIT_IMM, 0x20000, TMP1);
exit_invalid[2] = NULL;
}
else
@@ -4537,7 +4689,7 @@ OP2(SLJIT_SUB, TMP1, 0, TMP1, 0, SLJIT_IMM, 0x2d800);
if (has_cmov)
{
OP2U(SLJIT_SUB | SLJIT_SET_LESS, TMP1, 0, SLJIT_IMM, 0x800);
- CMOV(SLJIT_LESS, TMP1, SLJIT_IMM, INVALID_UTF_CHAR - 0xd800);
+ SELECT(SLJIT_LESS, TMP1, SLJIT_IMM, INVALID_UTF_CHAR - 0xd800, TMP1);
exit_invalid[3] = NULL;
}
else
@@ -4548,7 +4700,7 @@ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
if (has_cmov)
{
OP2U(SLJIT_SUB | SLJIT_SET_LESS, TMP1, 0, SLJIT_IMM, 0x800);
- CMOV(SLJIT_LESS, TMP1, SLJIT_IMM, INVALID_UTF_CHAR);
+ SELECT(SLJIT_LESS, TMP1, SLJIT_IMM, INVALID_UTF_CHAR, TMP1);
exit_invalid[4] = NULL;
}
else
@@ -4565,7 +4717,7 @@ OP2(SLJIT_OR, TMP1, 0, TMP1, 0, TMP2, 0);
if (has_cmov)
{
OP2U(SLJIT_SUB | SLJIT_SET_GREATER_EQUAL, TMP2, 0, SLJIT_IMM, 0x40);
- CMOV(SLJIT_GREATER_EQUAL, TMP1, SLJIT_IMM, 0);
+ SELECT(SLJIT_GREATER_EQUAL, TMP1, SLJIT_IMM, 0, TMP1);
exit_invalid[5] = NULL;
}
else
@@ -4575,7 +4727,7 @@ OP2(SLJIT_SUB, TMP1, 0, TMP1, 0, SLJIT_IMM, 0xc10000);
if (has_cmov)
{
OP2U(SLJIT_SUB | SLJIT_SET_GREATER_EQUAL, TMP1, 0, SLJIT_IMM, 0x100000);
- CMOV(SLJIT_GREATER_EQUAL, TMP1, SLJIT_IMM, INVALID_UTF_CHAR - 0x10000);
+ SELECT(SLJIT_GREATER_EQUAL, TMP1, SLJIT_IMM, INVALID_UTF_CHAR - 0x10000, TMP1);
exit_invalid[6] = NULL;
}
else
@@ -4612,7 +4764,7 @@ OP2(SLJIT_OR, TMP1, 0, TMP1, 0, TMP2, 0);
if (has_cmov)
{
OP2U(SLJIT_SUB | SLJIT_SET_GREATER_EQUAL, TMP2, 0, SLJIT_IMM, 0x40);
- CMOV(SLJIT_GREATER_EQUAL, TMP1, SLJIT_IMM, INVALID_UTF_CHAR);
+ SELECT(SLJIT_GREATER_EQUAL, TMP1, SLJIT_IMM, INVALID_UTF_CHAR, TMP1);
exit_invalid[10] = NULL;
}
else
@@ -4643,7 +4795,7 @@ struct sljit_label *skip_start;
struct sljit_label *three_byte_exit;
struct sljit_jump *jump[5];
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
if (common->nltype != NLTYPE_ANY)
{
@@ -4734,7 +4886,7 @@ struct sljit_label *exit_ok_label;
struct sljit_label *exit_invalid_label;
struct sljit_jump *exit_invalid[7];
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(3));
exit_invalid[0] = CMP(SLJIT_GREATER_EQUAL, TMP1, 0, SLJIT_IMM, 0xc0);
@@ -4825,7 +4977,7 @@ static void do_utfpeakcharback(compiler_common *common)
DEFINE_COMPILER;
struct sljit_jump *jump[2];
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(-2));
OP2(SLJIT_SUB, TMP1, 0, TMP1, 0, SLJIT_IMM, 0xc0);
@@ -4868,7 +5020,7 @@ struct sljit_label *three_byte_entry;
struct sljit_label *exit_invalid_label;
struct sljit_jump *exit_invalid[8];
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
OP2(SLJIT_ADD, TMP2, 0, TMP2, 0, SLJIT_IMM, IN_UCHARS(3));
exit_invalid[0] = CMP(SLJIT_GREATER_EQUAL, TMP1, 0, SLJIT_IMM, 0xc0);
@@ -4905,7 +5057,7 @@ OP2(SLJIT_SUB, TMP1, 0, TMP1, 0, SLJIT_IMM, 0xd800);
if (has_cmov)
{
OP2U(SLJIT_SUB | SLJIT_SET_LESS, TMP1, 0, SLJIT_IMM, 0x800);
- CMOV(SLJIT_LESS, TMP1, SLJIT_IMM, -0xd800);
+ SELECT(SLJIT_LESS, TMP1, SLJIT_IMM, -0xd800, TMP1);
exit_invalid[2] = NULL;
}
else
@@ -4915,7 +5067,7 @@ OP2(SLJIT_ADD, TMP1, 0, TMP1, 0, SLJIT_IMM, 0xd800);
if (has_cmov)
{
OP2U(SLJIT_SUB | SLJIT_SET_LESS, TMP1, 0, SLJIT_IMM, 0x800);
- CMOV(SLJIT_LESS, TMP1, SLJIT_IMM, INVALID_UTF_CHAR);
+ SELECT(SLJIT_LESS, TMP1, SLJIT_IMM, INVALID_UTF_CHAR, TMP1);
exit_invalid[3] = NULL;
}
else
@@ -4940,7 +5092,7 @@ OP2(SLJIT_ADD, TMP1, 0, TMP1, 0, TMP2, 0);
if (has_cmov)
{
OP2U(SLJIT_SUB | SLJIT_SET_GREATER_EQUAL, TMP1, 0, SLJIT_IMM, 0x100000);
- CMOV(SLJIT_GREATER_EQUAL, TMP1, SLJIT_IMM, INVALID_UTF_CHAR - 0x10000);
+ SELECT(SLJIT_GREATER_EQUAL, TMP1, SLJIT_IMM, INVALID_UTF_CHAR - 0x10000, TMP1);
exit_invalid[5] = NULL;
}
else
@@ -5000,7 +5152,7 @@ undefined for invalid characters. */
DEFINE_COMPILER;
struct sljit_jump *exit_invalid[3];
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
/* TMP2 contains the high surrogate. */
exit_invalid[0] = CMP(SLJIT_GREATER_EQUAL, TMP1, 0, SLJIT_IMM, 0xdc00);
@@ -5033,7 +5185,7 @@ char value in TMP1. */
DEFINE_COMPILER;
struct sljit_jump *exit_invalid[2];
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
/* TMP2 contains the high surrogate. */
exit_invalid[0] = CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0);
@@ -5062,7 +5214,7 @@ static void do_utfmoveback_invalid(compiler_common *common)
DEFINE_COMPILER;
struct sljit_jump *exit_invalid[3];
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
exit_invalid[0] = CMP(SLJIT_LESS, TMP1, 0, SLJIT_IMM, 0x400);
exit_invalid[1] = CMP(SLJIT_GREATER_EQUAL, TMP2, 0, STR_PTR, 0);
@@ -5091,7 +5243,7 @@ DEFINE_COMPILER;
struct sljit_jump *jump;
struct sljit_jump *exit_invalid[3];
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
jump = CMP(SLJIT_GREATER_EQUAL, TMP1, 0, SLJIT_IMM, 0xe000);
OP2(SLJIT_ADD, TMP2, 0, TMP2, 0, SLJIT_IMM, IN_UCHARS(1));
@@ -5140,7 +5292,7 @@ SLJIT_ASSERT(record->caseset == 0 && record->other_case == 0);
SLJIT_ASSERT(UCD_BLOCK_SIZE == 128 && sizeof(ucd_record) == 12);
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
#if PCRE2_CODE_UNIT_WIDTH == 32
if (!common->utf)
@@ -5180,7 +5332,7 @@ SLJIT_ASSERT(record->caseset == 0 && record->other_case == 0);
SLJIT_ASSERT(UCD_BLOCK_SIZE == 128 && sizeof(ucd_record) == 12);
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
#if PCRE2_CODE_UNIT_WIDTH == 32
if (!common->utf)
@@ -5379,7 +5531,7 @@ else if (common->utf)
{
OP2(SLJIT_ADD, TMP2, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
OP2U(SLJIT_SUB | SLJIT_SET_LESS, TMP1, 0, SLJIT_IMM, 0x400);
- CMOV(SLJIT_LESS, STR_PTR, TMP2, 0);
+ SELECT(SLJIT_LESS, STR_PTR, TMP2, 0, STR_PTR);
}
else
{
@@ -5486,6 +5638,8 @@ while (TRUE)
case OP_CIRCM:
case OP_DOLL:
case OP_DOLLM:
+ case OP_NOT_UCP_WORD_BOUNDARY:
+ case OP_UCP_WORD_BOUNDARY:
/* Zero width assertions. */
cc++;
continue;
@@ -5737,7 +5891,7 @@ while (TRUE)
chr++;
}
while (byte != 0);
- chr = (chr + 7) & ~7;
+ chr = (chr + 7) & (sljit_u32)(~7);
}
}
while (chars->count != 255 && bytes < bytes_end);
@@ -5797,7 +5951,10 @@ while (TRUE)
chr = *cc;
#ifdef SUPPORT_UNICODE
if (common->ucp && chr > 127)
- othercase[0] = UCD_OTHERCASE(chr);
+ {
+ chr = UCD_OTHERCASE(chr);
+ othercase[0] = (chr == (PCRE2_UCHAR)chr) ? chr : *cc;
+ }
else
#endif
othercase[0] = TABLE_GET(chr, common->fcc, chr);
@@ -5869,6 +6026,7 @@ static BOOL check_fast_forward_char_pair_simd(compiler_common *common, fast_forw
{
sljit_s32 i, j, max_i = 0, max_j = 0;
sljit_u32 max_pri = 0;
+ sljit_s32 max_offset = max_fast_forward_char_pair_offset();
PCRE2_UCHAR a1, a2, a_pri, b1, b2, b_pri;
for (i = max - 1; i >= 1; i--)
@@ -5879,7 +6037,7 @@ static BOOL check_fast_forward_char_pair_simd(compiler_common *common, fast_forw
a2 = chars[i].chars[1];
a_pri = chars[i].last_count;
- j = i - max_fast_forward_char_pair_offset();
+ j = i - max_offset;
if (j < 0)
j = 0;
@@ -5935,7 +6093,7 @@ if (has_match_end)
OP2(SLJIT_ADD, TMP1, 0, TMP1, 0, SLJIT_IMM, IN_UCHARS(offset + 1));
OP2U(SLJIT_SUB | SLJIT_SET_GREATER, STR_END, 0, TMP1, 0);
- CMOV(SLJIT_GREATER, STR_END, TMP1, 0);
+ SELECT(SLJIT_GREATER, STR_END, TMP1, 0, STR_END);
}
#ifdef JIT_HAS_FAST_FORWARD_CHAR_SIMD
@@ -6028,25 +6186,34 @@ if (max < 1)
/* Convert last_count to priority. */
for (i = 0; i < max; i++)
{
- SLJIT_ASSERT(chars[i].count > 0 && chars[i].last_count <= chars[i].count);
+ SLJIT_ASSERT(chars[i].last_count <= chars[i].count);
- if (chars[i].count == 1)
+ switch (chars[i].count)
{
+ case 0:
+ chars[i].count = 255;
+ chars[i].last_count = 0;
+ break;
+
+ case 1:
chars[i].last_count = (chars[i].last_count == 1) ? 7 : 5;
/* Simplifies algorithms later. */
chars[i].chars[1] = chars[i].chars[0];
- }
- else if (chars[i].count == 2)
- {
+ break;
+
+ case 2:
SLJIT_ASSERT(chars[i].chars[0] != chars[i].chars[1]);
if (is_powerof2(chars[i].chars[0] ^ chars[i].chars[1]))
chars[i].last_count = (chars[i].last_count == 2) ? 6 : 4;
else
chars[i].last_count = (chars[i].last_count == 2) ? 3 : 2;
- }
- else
+ break;
+
+ default:
chars[i].last_count = (chars[i].count == 255) ? 0 : 1;
+ break;
+ }
}
#ifdef JIT_HAS_FAST_FORWARD_CHAR_PAIR_SIMD
@@ -6138,7 +6305,7 @@ if (common->match_end_ptr != 0)
OP2(SLJIT_SUB | SLJIT_SET_LESS, STR_END, 0, STR_END, 0, SLJIT_IMM, IN_UCHARS(max));
add_jump(compiler, &common->failed_match, JUMP(SLJIT_LESS));
OP2U(SLJIT_SUB | SLJIT_SET_GREATER, STR_END, 0, TMP1, 0);
- CMOV(SLJIT_GREATER, STR_END, TMP1, 0);
+ SELECT(SLJIT_GREATER, STR_END, TMP1, 0, STR_END);
}
else
{
@@ -6368,7 +6535,7 @@ if (JIT_HAS_FAST_FORWARD_CHAR_SIMD && (common->nltype == NLTYPE_FIXED || common-
if (common->mode != PCRE2_JIT_COMPLETE)
{
OP2U(SLJIT_SUB | SLJIT_SET_GREATER, STR_PTR, 0, STR_END, 0);
- CMOV(SLJIT_GREATER, STR_PTR, STR_END, 0);
+ SELECT(SLJIT_GREATER, STR_PTR, STR_END, 0, STR_PTR);
}
}
}
@@ -6430,7 +6597,7 @@ if (common->match_end_ptr != 0)
OP1(SLJIT_MOV, RETURN_ADDR, 0, STR_END, 0);
OP2(SLJIT_ADD, TMP1, 0, TMP1, 0, SLJIT_IMM, IN_UCHARS(1));
OP2U(SLJIT_SUB | SLJIT_SET_GREATER, STR_END, 0, TMP1, 0);
- CMOV(SLJIT_GREATER, STR_END, TMP1, 0);
+ SELECT(SLJIT_GREATER, STR_END, TMP1, 0, STR_END);
}
start = LABEL();
@@ -6567,13 +6734,14 @@ DEFINE_COMPILER;
struct sljit_jump *jump;
struct sljit_label *mainloop;
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
GET_LOCAL_BASE(TMP1, 0, 0);
/* Drop frames until we reach STACK_TOP. */
mainloop = LABEL();
OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(STACK_TOP), -SSIZE_OF(sw));
-jump = CMP(SLJIT_SIG_LESS_EQUAL, TMP2, 0, SLJIT_IMM, 0);
+OP2U(SLJIT_SUB | SLJIT_SET_SIG_LESS_EQUAL | SLJIT_SET_Z, TMP2, 0, SLJIT_IMM, 0);
+jump = JUMP(SLJIT_SIG_LESS_EQUAL);
OP2(SLJIT_ADD, TMP2, 0, TMP2, 0, TMP1, 0);
if (HAS_VIRTUAL_REGISTERS)
@@ -6594,7 +6762,8 @@ else
JUMPTO(SLJIT_JUMP, mainloop);
JUMPHERE(jump);
-jump = CMP(SLJIT_NOT_ZERO /* SIG_LESS */, TMP2, 0, SLJIT_IMM, 0);
+sljit_set_current_flags(compiler, SLJIT_CURRENT_FLAGS_SUB | SLJIT_CURRENT_FLAGS_COMPARE | SLJIT_SET_SIG_LESS_EQUAL | SLJIT_SET_Z);
+jump = JUMP(SLJIT_NOT_ZERO /* SIG_LESS */);
/* End of reverting values. */
OP_SRC(SLJIT_FAST_RETURN, RETURN_ADDR, 0);
@@ -6615,7 +6784,17 @@ else
JUMPTO(SLJIT_JUMP, mainloop);
}
-static void check_wordboundary(compiler_common *common)
+#ifdef SUPPORT_UNICODE
+#define UCPCAT(bit) (1 << (bit))
+#define UCPCAT2(bit1, bit2) (UCPCAT(bit1) | UCPCAT(bit2))
+#define UCPCAT3(bit1, bit2, bit3) (UCPCAT(bit1) | UCPCAT(bit2) | UCPCAT(bit3))
+#define UCPCAT_RANGE(start, end) (((1 << ((end) + 1)) - 1) - ((1 << (start)) - 1))
+#define UCPCAT_L UCPCAT_RANGE(ucp_Ll, ucp_Lu)
+#define UCPCAT_N UCPCAT_RANGE(ucp_Nd, ucp_No)
+#define UCPCAT_ALL ((1 << (ucp_Zs + 1)) - 1)
+#endif
+
+static void check_wordboundary(compiler_common *common, BOOL ucp)
{
DEFINE_COMPILER;
struct sljit_jump *skipread;
@@ -6629,9 +6808,10 @@ jump_list *invalid_utf2 = NULL;
struct sljit_jump *jump;
#endif /* PCRE2_CODE_UNIT_WIDTH != 8 || SUPPORT_UNICODE */
+SLJIT_UNUSED_ARG(ucp);
SLJIT_COMPILE_ASSERT(ctype_word == 0x10, ctype_word_must_be_16);
-sljit_emit_fast_enter(compiler, SLJIT_MEM1(SLJIT_SP), LOCALS0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, SLJIT_MEM1(SLJIT_SP), LOCALS0);
/* Get type of the previous char, and put it to TMP3. */
OP1(SLJIT_MOV, TMP1, 0, ARGUMENTS, 0);
OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, begin));
@@ -6668,19 +6848,12 @@ else
/* Testing char type. */
#ifdef SUPPORT_UNICODE
-if (common->ucp)
+if (ucp)
{
- OP1(SLJIT_MOV, TMP2, 0, SLJIT_IMM, 1);
- jump = CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, CHAR_UNDERSCORE);
add_jump(compiler, &common->getucdtype, JUMP(SLJIT_FAST_CALL));
- OP2(SLJIT_SUB, TMP1, 0, TMP1, 0, SLJIT_IMM, ucp_Ll);
- OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, TMP1, 0, SLJIT_IMM, ucp_Lu - ucp_Ll);
- OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_LESS_EQUAL);
- OP2(SLJIT_SUB, TMP1, 0, TMP1, 0, SLJIT_IMM, ucp_Nd - ucp_Ll);
- OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, TMP1, 0, SLJIT_IMM, ucp_No - ucp_Nd);
- OP_FLAGS(SLJIT_OR, TMP2, 0, SLJIT_LESS_EQUAL);
- JUMPHERE(jump);
- OP1(SLJIT_MOV, TMP3, 0, TMP2, 0);
+ OP2(SLJIT_SHL, TMP2, 0, SLJIT_IMM, 1, TMP1, 0);
+ OP2U(SLJIT_AND | SLJIT_SET_Z, TMP2, 0, SLJIT_IMM, UCPCAT2(ucp_Mn, ucp_Pc) | UCPCAT_L | UCPCAT_N);
+ OP_FLAGS(SLJIT_MOV, TMP3, 0, SLJIT_NOT_ZERO);
}
else
#endif /* SUPPORT_UNICODE */
@@ -6714,18 +6887,12 @@ peek_char(common, READ_CHAR_MAX, SLJIT_MEM1(SLJIT_SP), LOCALS1, &invalid_utf2);
valid_utf = LABEL();
-if (common->ucp)
+if (ucp)
{
- OP1(SLJIT_MOV, TMP2, 0, SLJIT_IMM, 1);
- jump = CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, CHAR_UNDERSCORE);
add_jump(compiler, &common->getucdtype, JUMP(SLJIT_FAST_CALL));
- OP2(SLJIT_SUB, TMP1, 0, TMP1, 0, SLJIT_IMM, ucp_Ll);
- OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, TMP1, 0, SLJIT_IMM, ucp_Lu - ucp_Ll);
- OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_LESS_EQUAL);
- OP2(SLJIT_SUB, TMP1, 0, TMP1, 0, SLJIT_IMM, ucp_Nd - ucp_Ll);
- OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, TMP1, 0, SLJIT_IMM, ucp_No - ucp_Nd);
- OP_FLAGS(SLJIT_OR, TMP2, 0, SLJIT_LESS_EQUAL);
- JUMPHERE(jump);
+ OP2(SLJIT_SHL, TMP2, 0, SLJIT_IMM, 1, TMP1, 0);
+ OP2U(SLJIT_AND | SLJIT_SET_Z, TMP2, 0, SLJIT_IMM, UCPCAT2(ucp_Mn, ucp_Pc) | UCPCAT_L | UCPCAT_N);
+ OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_NOT_ZERO);
}
else
#endif /* SUPPORT_UNICODE */
@@ -6786,7 +6953,7 @@ int i, byte, length = 0;
bit = bits[0] & 0x1;
/* All bits will be zero or one (since bit is zero or one). */
-all = -bit;
+all = (sljit_u8)-bit;
for (i = 0; i < 256; )
{
@@ -6803,7 +6970,7 @@ for (i = 0; i < 256; )
ranges[length] = i;
length++;
bit = cbit;
- all = -cbit;
+ all = (sljit_u8)-cbit; /* sign extend bit into byte */
}
i++;
}
@@ -6947,7 +7114,7 @@ for (i = 0; i < 32; i++)
byte = bits[i];
if (nclass)
- byte = ~byte;
+ byte = (sljit_u8)~byte;
j = 0;
while (byte != 0)
@@ -7003,7 +7170,7 @@ while (i < len)
else
{
OP2U(SLJIT_SUB | SLJIT_SET_Z, TMP1, 0, SLJIT_IMM, char_list[i]);
- CMOV(SLJIT_ZERO, TMP2, TMP1, 0);
+ SELECT(SLJIT_ZERO, TMP2, TMP1, 0, TMP2);
}
i++;
}
@@ -7017,7 +7184,7 @@ if (j != 0)
{
j--;
OP2U(SLJIT_SUB | SLJIT_SET_Z, TMP1, 0, SLJIT_IMM, char_list[i] & 0xff);
- CMOV(SLJIT_ZERO, TMP2, TMP1, 0);
+ SELECT(SLJIT_ZERO, TMP2, TMP1, 0, TMP2);
}
}
@@ -7042,7 +7209,7 @@ static void check_anynewline(compiler_common *common)
/* Check whether TMP1 contains a newline character. TMP2 destroyed. */
DEFINE_COMPILER;
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
OP2(SLJIT_SUB, TMP1, 0, TMP1, 0, SLJIT_IMM, 0x0a);
OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, TMP1, 0, SLJIT_IMM, 0x0d - 0x0a);
@@ -7069,7 +7236,7 @@ static void check_hspace(compiler_common *common)
/* Check whether TMP1 contains a newline character. TMP2 destroyed. */
DEFINE_COMPILER;
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
OP2U(SLJIT_SUB | SLJIT_SET_Z, TMP1, 0, SLJIT_IMM, 0x09);
OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_EQUAL);
@@ -7108,7 +7275,7 @@ static void check_vspace(compiler_common *common)
/* Check whether TMP1 contains a newline character. TMP2 destroyed. */
DEFINE_COMPILER;
-sljit_emit_fast_enter(compiler, RETURN_ADDR, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, RETURN_ADDR, 0);
OP2(SLJIT_SUB, TMP1, 0, TMP1, 0, SLJIT_IMM, 0x0a);
OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, TMP1, 0, SLJIT_IMM, 0x0d - 0x0a);
@@ -7150,7 +7317,7 @@ else
char2_reg = RETURN_ADDR;
}
-sljit_emit_fast_enter(compiler, SLJIT_MEM1(SLJIT_SP), LOCALS0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, SLJIT_MEM1(SLJIT_SP), LOCALS0);
OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
if (char1_reg == STR_END)
@@ -7237,7 +7404,7 @@ if (sljit_emit_mem_update(compiler, MOV_UCHAR | SLJIT_MEM_SUPP | SLJIT_MEM_POST,
else if (sljit_emit_mem_update(compiler, MOV_UCHAR | SLJIT_MEM_SUPP | SLJIT_MEM_PRE, char1_reg, SLJIT_MEM1(TMP1), IN_UCHARS(1)) == SLJIT_SUCCESS)
opt_type = 2;
-sljit_emit_fast_enter(compiler, SLJIT_MEM1(SLJIT_SP), LOCALS0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, SLJIT_MEM1(SLJIT_SP), LOCALS0);
OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), LOCALS1, char1_reg, 0);
@@ -7464,16 +7631,6 @@ return cc;
#if defined SUPPORT_UNICODE || PCRE2_CODE_UNIT_WIDTH != 8
-#define SET_TYPE_OFFSET(value) \
- if ((value) != typeoffset) \
- { \
- if ((value) < typeoffset) \
- OP2(SLJIT_ADD, typereg, 0, typereg, 0, SLJIT_IMM, typeoffset - (value)); \
- else \
- OP2(SLJIT_SUB, typereg, 0, typereg, 0, SLJIT_IMM, (value) - typeoffset); \
- } \
- typeoffset = (value);
-
#define SET_CHAR_OFFSET(value) \
if ((value) != charoffset) \
{ \
@@ -7498,7 +7655,6 @@ static PCRE2_SPTR compile_char1_matchingpath(compiler_common *common, PCRE2_UCHA
#define XCLASS_SCRIPT_EXTENSION_NOTPROP 0x080
#define XCLASS_SCRIPT_EXTENSION_RESTORE_RETURN_ADDR 0x100
#define XCLASS_SCRIPT_EXTENSION_RESTORE_LOCALS0 0x200
-
#endif /* SUPPORT_UNICODE */
static void compile_xclass_matchingpath(compiler_common *common, PCRE2_SPTR cc, jump_list **backtracks)
@@ -7516,9 +7672,10 @@ BOOL utf = common->utf;
#ifdef SUPPORT_UNICODE
sljit_u32 unicode_status = 0;
+sljit_u32 category_list = 0;
+sljit_u32 items;
int typereg = TMP1;
const sljit_u32 *other_cases;
-sljit_uw typeoffset;
#endif /* SUPPORT_UNICODE */
/* Scanning the necessary info. */
@@ -7535,6 +7692,7 @@ if (cc[-1] & XCL_MAP)
while (*cc != XCL_END)
{
compares++;
+
if (*cc == XCL_SINGLE)
{
cc ++;
@@ -7561,6 +7719,7 @@ while (*cc != XCL_END)
{
SLJIT_ASSERT(*cc == XCL_PROP || *cc == XCL_NOTPROP);
cc++;
+
if (*cc == PT_CLIST && cc[-1] == XCL_PROP)
{
other_cases = PRIV(ucd_caseless_sets) + cc[1];
@@ -7577,24 +7736,36 @@ while (*cc != XCL_END)
min = 0;
}
+ items = 0;
+
switch(*cc)
{
case PT_ANY:
/* Any either accepts everything or ignored. */
if (cc[-1] == XCL_PROP)
- {
- compile_char1_matchingpath(common, OP_ALLANY, cc, backtracks, FALSE);
- if (list == backtracks)
- add_jump(compiler, backtracks, JUMP(SLJIT_JUMP));
- return;
- }
+ items = UCPCAT_ALL;
+ else
+ compares--;
break;
case PT_LAMP:
+ items = UCPCAT3(ucp_Lu, ucp_Ll, ucp_Lt);
+ break;
+
case PT_GC:
+ items = UCPCAT_RANGE(PRIV(ucp_typerange)[(int)cc[1] * 2], PRIV(ucp_typerange)[(int)cc[1] * 2 + 1]);
+ break;
+
case PT_PC:
+ items = UCPCAT(cc[1]);
+ break;
+
+ case PT_WORD:
+ items = UCPCAT2(ucp_Mn, ucp_Pc) | UCPCAT_L | UCPCAT_N;
+ break;
+
case PT_ALNUM:
- unicode_status |= XCLASS_HAS_TYPE;
+ items = UCPCAT_L | UCPCAT_N;
break;
case PT_SCX:
@@ -7613,7 +7784,6 @@ while (*cc != XCL_END)
case PT_SPACE:
case PT_PXSPACE:
- case PT_WORD:
case PT_PXGRAPH:
case PT_PXPRINT:
case PT_PXPUNCT:
@@ -7622,6 +7792,7 @@ while (*cc != XCL_END)
case PT_CLIST:
case PT_UCNC:
+ case PT_PXXDIGIT:
unicode_status |= XCLASS_SAVE_CHAR;
break;
@@ -7637,11 +7808,42 @@ while (*cc != XCL_END)
SLJIT_UNREACHABLE();
break;
}
+
+ if (items > 0)
+ {
+ if (cc[-1] == XCL_NOTPROP)
+ items ^= UCPCAT_ALL;
+ category_list |= items;
+ unicode_status |= XCLASS_HAS_TYPE;
+ compares--;
+ }
+
cc += 2;
}
#endif /* SUPPORT_UNICODE */
}
+
+#ifdef SUPPORT_UNICODE
+if (category_list == UCPCAT_ALL)
+ {
+ /* All characters are accepted, same as dotall. */
+ compile_char1_matchingpath(common, OP_ALLANY, cc, backtracks, FALSE);
+ if (list == backtracks)
+ add_jump(compiler, backtracks, JUMP(SLJIT_JUMP));
+ return;
+ }
+
+if (compares == 0 && category_list == 0)
+ {
+ /* No characters are accepted, same as (*F) or dotall. */
+ compile_char1_matchingpath(common, OP_ALLANY, cc, backtracks, FALSE);
+ if (list != backtracks)
+ add_jump(compiler, backtracks, JUMP(SLJIT_JUMP));
+ return;
+ }
+#else /* !SUPPORT_UNICODE */
SLJIT_ASSERT(compares > 0);
+#endif /* SUPPORT_UNICODE */
/* We are not necessary in utf mode even in 8 bit mode. */
cc = ccbegin;
@@ -7742,6 +7944,9 @@ if (unicode_status & XCLASS_NEEDS_UCD)
ccbegin = cc;
+ if (category_list != 0)
+ compares++;
+
if (unicode_status & XCLASS_HAS_BIDICL)
{
OP1(SLJIT_MOV_U16, TMP1, 0, SLJIT_MEM1(TMP2), (sljit_sw)PRIV(ucd_records) + SLJIT_OFFSETOF(ucd_record, scriptx_bidiclass));
@@ -7810,7 +8015,7 @@ if (unicode_status & XCLASS_NEEDS_UCD)
if (cc[-1] == XCL_NOTPROP)
invertcmp ^= 0x1;
- OP2U(SLJIT_AND32 | SLJIT_SET_Z, SLJIT_MEM1(TMP1), (sljit_sw)(PRIV(ucd_boolprop_sets) + (cc[1] >> 5)), SLJIT_IMM, (sljit_sw)1 << (cc[1] & 0x1f));
+ OP2U(SLJIT_AND32 | SLJIT_SET_Z, SLJIT_MEM1(TMP1), (sljit_sw)(PRIV(ucd_boolprop_sets) + (cc[1] >> 5)), SLJIT_IMM, (sljit_sw)(1u << (cc[1] & 0x1f)));
add_jump(compiler, compares > 0 ? list : backtracks, JUMP(SLJIT_NOT_ZERO ^ invertcmp));
}
cc += 2;
@@ -7921,7 +8126,7 @@ if (unicode_status & XCLASS_NEEDS_UCD)
invertcmp ^= 0x1;
}
- OP2U(SLJIT_AND32 | SLJIT_SET_Z, SLJIT_MEM1(TMP1), (sljit_sw)(PRIV(ucd_script_sets) + (cc[1] >> 5)), SLJIT_IMM, (sljit_sw)1 << (cc[1] & 0x1f));
+ OP2U(SLJIT_AND32 | SLJIT_SET_Z, SLJIT_MEM1(TMP1), (sljit_sw)(PRIV(ucd_script_sets) + (cc[1] >> 5)), SLJIT_IMM, (sljit_sw)(1u << (cc[1] & 0x1f)));
add_jump(compiler, compares > 0 ? list : backtracks, JUMP(SLJIT_NOT_ZERO ^ invertcmp));
if (jump != NULL)
@@ -7946,7 +8151,16 @@ if (unicode_status & XCLASS_NEEDS_UCD)
if (unicode_status & XCLASS_SAVE_CHAR)
typereg = RETURN_ADDR;
- OP1(SLJIT_MOV_U8, typereg, 0, SLJIT_MEM1(TMP2), (sljit_sw)PRIV(ucd_records) + SLJIT_OFFSETOF(ucd_record, chartype));
+ OP1(SLJIT_MOV_U8, TMP2, 0, SLJIT_MEM1(TMP2), (sljit_sw)PRIV(ucd_records) + SLJIT_OFFSETOF(ucd_record, chartype));
+ OP2(SLJIT_SHL, typereg, 0, SLJIT_IMM, 1, TMP2, 0);
+
+ if (category_list > 0)
+ {
+ compares--;
+ invertcmp = (compares == 0 && list != backtracks);
+ OP2U(SLJIT_AND | SLJIT_SET_Z, typereg, 0, SLJIT_IMM, category_list);
+ add_jump(compiler, compares > 0 ? list : backtracks, JUMP(SLJIT_NOT_ZERO ^ invertcmp));
+ }
}
}
#endif /* SUPPORT_UNICODE */
@@ -7954,9 +8168,6 @@ if (unicode_status & XCLASS_NEEDS_UCD)
/* Generating code. */
charoffset = 0;
numberofcmps = 0;
-#ifdef SUPPORT_UNICODE
-typeoffset = 0;
-#endif /* SUPPORT_UNICODE */
while (*cc != XCL_END)
{
@@ -8024,36 +8235,17 @@ while (*cc != XCL_END)
switch(*cc)
{
case PT_ANY:
- if (!invertcmp)
- jump = JUMP(SLJIT_JUMP);
- break;
-
case PT_LAMP:
- OP2U(SLJIT_SUB | SLJIT_SET_Z, typereg, 0, SLJIT_IMM, ucp_Lu - typeoffset);
- OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_EQUAL);
- OP2U(SLJIT_SUB | SLJIT_SET_Z, typereg, 0, SLJIT_IMM, ucp_Ll - typeoffset);
- OP_FLAGS(SLJIT_OR, TMP2, 0, SLJIT_EQUAL);
- OP2U(SLJIT_SUB | SLJIT_SET_Z, typereg, 0, SLJIT_IMM, ucp_Lt - typeoffset);
- OP_FLAGS(SLJIT_OR | SLJIT_SET_Z, TMP2, 0, SLJIT_EQUAL);
- jump = JUMP(SLJIT_NOT_ZERO ^ invertcmp);
- break;
-
case PT_GC:
- c = PRIV(ucp_typerange)[(int)cc[1] * 2];
- SET_TYPE_OFFSET(c);
- jump = CMP(SLJIT_LESS_EQUAL ^ invertcmp, typereg, 0, SLJIT_IMM, PRIV(ucp_typerange)[(int)cc[1] * 2 + 1] - c);
- break;
-
case PT_PC:
- jump = CMP(SLJIT_EQUAL ^ invertcmp, typereg, 0, SLJIT_IMM, (int)cc[1] - typeoffset);
- break;
-
case PT_SC:
case PT_SCX:
case PT_BOOL:
case PT_BIDICL:
+ case PT_WORD:
+ case PT_ALNUM:
compares++;
- /* Do nothing. */
+ /* Already handled. */
break;
case PT_SPACE:
@@ -8068,24 +8260,8 @@ while (*cc != XCL_END)
OP2U(SLJIT_SUB | SLJIT_SET_Z, TMP1, 0, SLJIT_IMM, 0x180e - 0x9);
OP_FLAGS(SLJIT_OR, TMP2, 0, SLJIT_EQUAL);
- SET_TYPE_OFFSET(ucp_Zl);
- OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, typereg, 0, SLJIT_IMM, ucp_Zs - ucp_Zl);
- OP_FLAGS(SLJIT_OR | SLJIT_SET_Z, TMP2, 0, SLJIT_LESS_EQUAL);
- jump = JUMP(SLJIT_NOT_ZERO ^ invertcmp);
- break;
-
- case PT_WORD:
- OP2U(SLJIT_SUB | SLJIT_SET_Z, TMP1, 0, SLJIT_IMM, (sljit_sw)(CHAR_UNDERSCORE - charoffset));
- OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_EQUAL);
- /* Fall through. */
-
- case PT_ALNUM:
- SET_TYPE_OFFSET(ucp_Ll);
- OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, typereg, 0, SLJIT_IMM, ucp_Lu - ucp_Ll);
- OP_FLAGS((*cc == PT_ALNUM) ? SLJIT_MOV : SLJIT_OR, TMP2, 0, SLJIT_LESS_EQUAL);
- SET_TYPE_OFFSET(ucp_Nd);
- OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, typereg, 0, SLJIT_IMM, ucp_No - ucp_Nd);
- OP_FLAGS(SLJIT_OR | SLJIT_SET_Z, TMP2, 0, SLJIT_LESS_EQUAL);
+ OP2U(SLJIT_AND | SLJIT_SET_Z, typereg, 0, SLJIT_IMM, UCPCAT_RANGE(ucp_Zl, ucp_Zs));
+ OP_FLAGS(SLJIT_OR | SLJIT_SET_Z, TMP2, 0, SLJIT_NOT_ZERO);
jump = JUMP(SLJIT_NOT_ZERO ^ invertcmp);
break;
@@ -8160,13 +8336,13 @@ while (*cc != XCL_END)
break;
case PT_PXGRAPH:
- /* C and Z groups are the farthest two groups. */
- SET_TYPE_OFFSET(ucp_Ll);
- OP2U(SLJIT_SUB | SLJIT_SET_GREATER, typereg, 0, SLJIT_IMM, ucp_So - ucp_Ll);
- OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_GREATER);
+ OP2U(SLJIT_AND | SLJIT_SET_Z, typereg, 0, SLJIT_IMM, UCPCAT_RANGE(ucp_Cc, ucp_Cs) | UCPCAT_RANGE(ucp_Zl, ucp_Zs));
+ OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_NOT_ZERO);
- jump = CMP(SLJIT_NOT_EQUAL, typereg, 0, SLJIT_IMM, ucp_Cf - ucp_Ll);
+ OP2U(SLJIT_AND | SLJIT_SET_Z, typereg, 0, SLJIT_IMM, UCPCAT(ucp_Cf));
+ jump = JUMP(SLJIT_ZERO);
+ c = charoffset;
/* In case of ucp_Cf, we overwrite the result. */
SET_CHAR_OFFSET(0x2066);
OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, TMP1, 0, SLJIT_IMM, 0x2069 - 0x2066);
@@ -8178,21 +8354,21 @@ while (*cc != XCL_END)
OP2U(SLJIT_SUB | SLJIT_SET_Z, TMP1, 0, SLJIT_IMM, 0x180e - 0x2066);
OP_FLAGS(SLJIT_OR, TMP2, 0, SLJIT_EQUAL);
+ /* Restore charoffset. */
+ SET_CHAR_OFFSET(c);
+
JUMPHERE(jump);
jump = CMP(SLJIT_ZERO ^ invertcmp, TMP2, 0, SLJIT_IMM, 0);
break;
case PT_PXPRINT:
- /* C and Z groups are the farthest two groups. */
- SET_TYPE_OFFSET(ucp_Ll);
- OP2U(SLJIT_SUB | SLJIT_SET_GREATER, typereg, 0, SLJIT_IMM, ucp_So - ucp_Ll);
- OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_GREATER);
-
- OP2U(SLJIT_SUB | SLJIT_SET_Z, typereg, 0, SLJIT_IMM, ucp_Zs - ucp_Ll);
- OP_FLAGS(SLJIT_AND, TMP2, 0, SLJIT_NOT_EQUAL);
+ OP2U(SLJIT_AND | SLJIT_SET_Z, typereg, 0, SLJIT_IMM, UCPCAT_RANGE(ucp_Cc, ucp_Cs) | UCPCAT2(ucp_Zl, ucp_Zp));
+ OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_NOT_ZERO);
- jump = CMP(SLJIT_NOT_EQUAL, typereg, 0, SLJIT_IMM, ucp_Cf - ucp_Ll);
+ OP2U(SLJIT_AND | SLJIT_SET_Z, typereg, 0, SLJIT_IMM, UCPCAT(ucp_Cf));
+ jump = JUMP(SLJIT_ZERO);
+ c = charoffset;
/* In case of ucp_Cf, we overwrite the result. */
SET_CHAR_OFFSET(0x2066);
OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, TMP1, 0, SLJIT_IMM, 0x2069 - 0x2066);
@@ -8201,22 +8377,54 @@ while (*cc != XCL_END)
OP2U(SLJIT_SUB | SLJIT_SET_Z, TMP1, 0, SLJIT_IMM, 0x061c - 0x2066);
OP_FLAGS(SLJIT_OR, TMP2, 0, SLJIT_EQUAL);
+ /* Restore charoffset. */
+ SET_CHAR_OFFSET(c);
+
JUMPHERE(jump);
jump = CMP(SLJIT_ZERO ^ invertcmp, TMP2, 0, SLJIT_IMM, 0);
break;
case PT_PXPUNCT:
- SET_TYPE_OFFSET(ucp_Sc);
- OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, typereg, 0, SLJIT_IMM, ucp_So - ucp_Sc);
- OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_LESS_EQUAL);
+ OP2U(SLJIT_AND | SLJIT_SET_Z, typereg, 0, SLJIT_IMM, UCPCAT_RANGE(ucp_Sc, ucp_So));
+ OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_NOT_ZERO);
SET_CHAR_OFFSET(0);
OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, TMP1, 0, SLJIT_IMM, 0x7f);
OP_FLAGS(SLJIT_AND, TMP2, 0, SLJIT_LESS_EQUAL);
- SET_TYPE_OFFSET(ucp_Pc);
- OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, typereg, 0, SLJIT_IMM, ucp_Ps - ucp_Pc);
- OP_FLAGS(SLJIT_OR | SLJIT_SET_Z, TMP2, 0, SLJIT_LESS_EQUAL);
+ OP2U(SLJIT_AND | SLJIT_SET_Z, typereg, 0, SLJIT_IMM, UCPCAT_RANGE(ucp_Pc, ucp_Ps));
+ OP_FLAGS(SLJIT_OR | SLJIT_SET_Z, TMP2, 0, SLJIT_NOT_ZERO);
+ jump = JUMP(SLJIT_NOT_ZERO ^ invertcmp);
+ break;
+
+ case PT_PXXDIGIT:
+ SET_CHAR_OFFSET(CHAR_A);
+ OP2(SLJIT_AND, TMP2, 0, TMP1, 0, SLJIT_IMM, ~0x20);
+ OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, TMP2, 0, SLJIT_IMM, CHAR_F - CHAR_A);
+ OP_FLAGS(SLJIT_MOV, TMP2, 0, SLJIT_LESS_EQUAL);
+
+ SET_CHAR_OFFSET(CHAR_0);
+ OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, TMP1, 0, SLJIT_IMM, CHAR_9 - CHAR_0);
+ OP_FLAGS(SLJIT_OR, TMP2, 0, SLJIT_LESS_EQUAL);
+
+ SET_CHAR_OFFSET(0xff10);
+ jump = CMP(SLJIT_GREATER, TMP1, 0, SLJIT_IMM, 0xff46 - 0xff10);
+
+ OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, TMP1, 0, SLJIT_IMM, 0xff19 - 0xff10);
+ OP_FLAGS(SLJIT_OR, TMP2, 0, SLJIT_LESS_EQUAL);
+
+ SET_CHAR_OFFSET(0xff21);
+ OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, TMP1, 0, SLJIT_IMM, 0xff26 - 0xff21);
+ OP_FLAGS(SLJIT_OR, TMP2, 0, SLJIT_LESS_EQUAL);
+
+ SET_CHAR_OFFSET(0xff41);
+ OP2U(SLJIT_SUB | SLJIT_SET_LESS_EQUAL, TMP1, 0, SLJIT_IMM, 0xff46 - 0xff41);
+ OP_FLAGS(SLJIT_OR, TMP2, 0, SLJIT_LESS_EQUAL);
+
+ SET_CHAR_OFFSET(0xff10);
+
+ JUMPHERE(jump);
+ OP2U(SLJIT_SUB | SLJIT_SET_Z, TMP2, 0, SLJIT_IMM, 0);
jump = JUMP(SLJIT_NOT_ZERO ^ invertcmp);
break;
@@ -8232,6 +8440,7 @@ while (*cc != XCL_END)
add_jump(compiler, compares > 0 ? list : backtracks, jump);
}
+SLJIT_ASSERT(compares == 0);
if (found != NULL)
set_jumps(found, LABEL());
}
@@ -8244,11 +8453,7 @@ if (found != NULL)
static PCRE2_SPTR compile_simple_assertion_matchingpath(compiler_common *common, PCRE2_UCHAR type, PCRE2_SPTR cc, jump_list **backtracks)
{
DEFINE_COMPILER;
-int length;
struct sljit_jump *jump[4];
-#ifdef SUPPORT_UNICODE
-struct sljit_label *label;
-#endif /* SUPPORT_UNICODE */
switch(type)
{
@@ -8276,16 +8481,18 @@ switch(type)
case OP_NOT_WORD_BOUNDARY:
case OP_WORD_BOUNDARY:
- add_jump(compiler, &common->wordboundary, JUMP(SLJIT_FAST_CALL));
+ case OP_NOT_UCP_WORD_BOUNDARY:
+ case OP_UCP_WORD_BOUNDARY:
+ add_jump(compiler, (type == OP_NOT_WORD_BOUNDARY || type == OP_WORD_BOUNDARY) ? &common->wordboundary : &common->ucp_wordboundary, JUMP(SLJIT_FAST_CALL));
#ifdef SUPPORT_UNICODE
if (common->invalid_utf)
{
- add_jump(compiler, backtracks, CMP((type == OP_NOT_WORD_BOUNDARY) ? SLJIT_NOT_EQUAL : SLJIT_SIG_LESS_EQUAL, TMP2, 0, SLJIT_IMM, 0));
+ add_jump(compiler, backtracks, CMP((type == OP_NOT_WORD_BOUNDARY || type == OP_NOT_UCP_WORD_BOUNDARY) ? SLJIT_NOT_EQUAL : SLJIT_SIG_LESS_EQUAL, TMP2, 0, SLJIT_IMM, 0));
return cc;
}
#endif /* SUPPORT_UNICODE */
sljit_set_current_flags(compiler, SLJIT_SET_Z);
- add_jump(compiler, backtracks, JUMP(type == OP_NOT_WORD_BOUNDARY ? SLJIT_NOT_ZERO : SLJIT_ZERO));
+ add_jump(compiler, backtracks, JUMP((type == OP_NOT_WORD_BOUNDARY || type == OP_NOT_UCP_WORD_BOUNDARY) ? SLJIT_NOT_ZERO : SLJIT_ZERO));
return cc;
case OP_EODN:
@@ -8481,36 +8688,6 @@ switch(type)
}
JUMPHERE(jump[0]);
return cc;
-
- case OP_REVERSE:
- length = GET(cc, 0);
- if (length == 0)
- return cc + LINK_SIZE;
- if (HAS_VIRTUAL_REGISTERS)
- {
- OP1(SLJIT_MOV, TMP1, 0, ARGUMENTS, 0);
- OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, begin));
- }
- else
- OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(ARGUMENTS), SLJIT_OFFSETOF(jit_arguments, begin));
-#ifdef SUPPORT_UNICODE
- if (common->utf)
- {
- OP1(SLJIT_MOV, TMP3, 0, SLJIT_IMM, length);
- label = LABEL();
- add_jump(compiler, backtracks, CMP(SLJIT_LESS_EQUAL, STR_PTR, 0, TMP2, 0));
- move_back(common, backtracks, FALSE);
- OP2(SLJIT_SUB | SLJIT_SET_Z, TMP3, 0, TMP3, 0, SLJIT_IMM, 1);
- JUMPTO(SLJIT_NOT_ZERO, label);
- }
- else
-#endif
- {
- OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(length));
- add_jump(compiler, backtracks, CMP(SLJIT_LESS, STR_PTR, 0, TMP2, 0));
- }
- check_start_used_ptr(common);
- return cc + LINK_SIZE;
}
SLJIT_UNREACHABLE();
return cc;
@@ -8520,6 +8697,10 @@ return cc;
#if PCRE2_CODE_UNIT_WIDTH != 32
+/* The code in this function copies the logic of the interpreter function that
+is defined in the pcre2_extuni.c source. If that code is updated, this
+function, and those below it, must be kept in step (note by PH, June 2024). */
+
static PCRE2_SPTR SLJIT_FUNC do_extuni_utf(jit_arguments *args, PCRE2_SPTR cc)
{
PCRE2_SPTR start_subject = args->begin;
@@ -8527,6 +8708,7 @@ PCRE2_SPTR end_subject = args->end;
int lgb, rgb, ricount;
PCRE2_SPTR prevcc, endcc, bptr;
BOOL first = TRUE;
+BOOL was_ep_ZWJ = FALSE;
uint32_t c;
prevcc = cc;
@@ -8547,6 +8729,12 @@ do
if ((PRIV(ucp_gbtable)[lgb] & (1 << rgb)) == 0)
break;
+ /* ZWJ followed by Extended Pictographic is allowed only if the ZWJ was
+ preceded by Extended Pictographic. */
+
+ if (lgb == ucp_gbZWJ && rgb == ucp_gbExtended_Pictographic && !was_ep_ZWJ)
+ break;
+
/* Not breaking between Regional Indicators is allowed only if there
are an even number of preceding RIs. */
@@ -8571,11 +8759,15 @@ do
if ((ricount & 1) != 0) break; /* Grapheme break required */
}
- /* If Extend or ZWJ follows Extended_Pictographic, do not update lgb; this
- allows any number of them before a following Extended_Pictographic. */
+ /* Set a flag when ZWJ follows Extended Pictographic (with optional Extend in
+ between; see next statement). */
+
+ was_ep_ZWJ = (lgb == ucp_gbExtended_Pictographic && rgb == ucp_gbZWJ);
+
+ /* If Extend follows Extended_Pictographic, do not update lgb; this allows
+ any number of them before a following ZWJ. */
- if ((rgb != ucp_gbExtend && rgb != ucp_gbZWJ) ||
- lgb != ucp_gbExtended_Pictographic)
+ if (rgb != ucp_gbExtend || lgb != ucp_gbExtended_Pictographic)
lgb = rgb;
prevcc = endcc;
@@ -8588,6 +8780,10 @@ return endcc;
#endif /* PCRE2_CODE_UNIT_WIDTH != 32 */
+/* The code in this function copies the logic of the interpreter function that
+is defined in the pcre2_extuni.c source. If that code is updated, this
+function, and the one below it, must be kept in step (note by PH, June 2024). */
+
static PCRE2_SPTR SLJIT_FUNC do_extuni_utf_invalid(jit_arguments *args, PCRE2_SPTR cc)
{
PCRE2_SPTR start_subject = args->begin;
@@ -8595,6 +8791,7 @@ PCRE2_SPTR end_subject = args->end;
int lgb, rgb, ricount;
PCRE2_SPTR prevcc, endcc, bptr;
BOOL first = TRUE;
+BOOL was_ep_ZWJ = FALSE;
uint32_t c;
prevcc = cc;
@@ -8615,6 +8812,12 @@ do
if ((PRIV(ucp_gbtable)[lgb] & (1 << rgb)) == 0)
break;
+ /* ZWJ followed by Extended Pictographic is allowed only if the ZWJ was
+ preceded by Extended Pictographic. */
+
+ if (lgb == ucp_gbZWJ && rgb == ucp_gbExtended_Pictographic && !was_ep_ZWJ)
+ break;
+
/* Not breaking between Regional Indicators is allowed only if there
are an even number of preceding RIs. */
@@ -8638,11 +8841,15 @@ do
break; /* Grapheme break required */
}
- /* If Extend or ZWJ follows Extended_Pictographic, do not update lgb; this
- allows any number of them before a following Extended_Pictographic. */
+ /* Set a flag when ZWJ follows Extended Pictographic (with optional Extend in
+ between; see next statement). */
- if ((rgb != ucp_gbExtend && rgb != ucp_gbZWJ) ||
- lgb != ucp_gbExtended_Pictographic)
+ was_ep_ZWJ = (lgb == ucp_gbExtended_Pictographic && rgb == ucp_gbZWJ);
+
+ /* If Extend follows Extended_Pictographic, do not update lgb; this allows
+ any number of them before a following ZWJ. */
+
+ if (rgb != ucp_gbExtend || lgb != ucp_gbExtended_Pictographic)
lgb = rgb;
prevcc = endcc;
@@ -8653,6 +8860,10 @@ while (cc < end_subject);
return endcc;
}
+/* The code in this function copies the logic of the interpreter function that
+is defined in the pcre2_extuni.c source. If that code is updated, this
+function must be kept in step (note by PH, June 2024). */
+
static PCRE2_SPTR SLJIT_FUNC do_extuni_no_utf(jit_arguments *args, PCRE2_SPTR cc)
{
PCRE2_SPTR start_subject = args->begin;
@@ -8660,6 +8871,7 @@ PCRE2_SPTR end_subject = args->end;
int lgb, rgb, ricount;
PCRE2_SPTR bptr;
uint32_t c;
+BOOL was_ep_ZWJ = FALSE;
/* Patch by PH */
/* GETCHARINC(c, cc); */
@@ -8667,7 +8879,7 @@ c = *cc++;
#if PCRE2_CODE_UNIT_WIDTH == 32
if (c >= 0x110000)
- return NULL;
+ return cc;
#endif /* PCRE2_CODE_UNIT_WIDTH == 32 */
lgb = UCD_GRAPHBREAK(c);
@@ -8683,6 +8895,12 @@ while (cc < end_subject)
if ((PRIV(ucp_gbtable)[lgb] & (1 << rgb)) == 0)
break;
+ /* ZWJ followed by Extended Pictographic is allowed only if the ZWJ was
+ preceded by Extended Pictographic. */
+
+ if (lgb == ucp_gbZWJ && rgb == ucp_gbExtended_Pictographic && !was_ep_ZWJ)
+ break;
+
/* Not breaking between Regional Indicators is allowed only if there
are an even number of preceding RIs. */
@@ -8710,11 +8928,15 @@ while (cc < end_subject)
break; /* Grapheme break required */
}
- /* If Extend or ZWJ follows Extended_Pictographic, do not update lgb; this
- allows any number of them before a following Extended_Pictographic. */
+ /* Set a flag when ZWJ follows Extended Pictographic (with optional Extend in
+ between; see next statement). */
- if ((rgb != ucp_gbExtend && rgb != ucp_gbZWJ) ||
- lgb != ucp_gbExtended_Pictographic)
+ was_ep_ZWJ = (lgb == ucp_gbExtended_Pictographic && rgb == ucp_gbZWJ);
+
+ /* If Extend follows Extended_Pictographic, do not update lgb; this allows
+ any number of them before a following ZWJ. */
+
+ if (rgb != ucp_gbExtend || lgb != ucp_gbExtended_Pictographic)
lgb = rgb;
cc++;
@@ -8809,35 +9031,14 @@ switch(type)
if (check_str_ptr)
detect_partial_match(common, backtracks);
#ifdef SUPPORT_UNICODE
- if (common->utf)
+ if (common->utf && common->invalid_utf)
{
- if (common->invalid_utf)
- {
- read_char(common, 0, READ_CHAR_MAX, backtracks, READ_CHAR_UPDATE_STR_PTR);
- return cc;
- }
-
-#if PCRE2_CODE_UNIT_WIDTH == 8 || PCRE2_CODE_UNIT_WIDTH == 16
- OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), 0);
- OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
-#if PCRE2_CODE_UNIT_WIDTH == 8
- jump[0] = CMP(SLJIT_LESS, TMP1, 0, SLJIT_IMM, 0xc0);
- OP1(SLJIT_MOV_U8, TMP1, 0, SLJIT_MEM1(TMP1), (sljit_sw)PRIV(utf8_table4) - 0xc0);
- OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP1, 0);
-#elif PCRE2_CODE_UNIT_WIDTH == 16
- jump[0] = CMP(SLJIT_LESS, TMP1, 0, SLJIT_IMM, 0xd800);
- OP2(SLJIT_AND, TMP1, 0, TMP1, 0, SLJIT_IMM, 0xfc00);
- OP2U(SLJIT_SUB | SLJIT_SET_Z, TMP1, 0, SLJIT_IMM, 0xd800);
- OP_FLAGS(SLJIT_MOV, TMP1, 0, SLJIT_EQUAL);
- OP2(SLJIT_SHL, TMP1, 0, TMP1, 0, SLJIT_IMM, 1);
- OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP1, 0);
-#endif /* PCRE2_CODE_UNIT_WIDTH == 8 */
- JUMPHERE(jump[0]);
+ read_char(common, 0, READ_CHAR_MAX, backtracks, READ_CHAR_UPDATE_STR_PTR);
return cc;
-#endif /* PCRE2_CODE_UNIT_WIDTH == [8|16] */
}
#endif /* SUPPORT_UNICODE */
- OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+
+ skip_valid_char(common);
return cc;
case OP_ANYBYTE:
@@ -8928,7 +9129,7 @@ switch(type)
#else
sljit_emit_icall(compiler, SLJIT_CALL, SLJIT_ARGS2(W, W, W), SLJIT_IMM,
common->invalid_utf ? SLJIT_FUNC_ADDR(do_extuni_utf_invalid) : SLJIT_FUNC_ADDR(do_extuni_no_utf));
- if (!common->utf || common->invalid_utf)
+ if (common->invalid_utf)
add_jump(compiler, backtracks, CMP(SLJIT_EQUAL, SLJIT_RETURN_REG, 0, SLJIT_IMM, 0));
#endif
@@ -8990,7 +9191,7 @@ switch(type)
if (sljit_has_cpu_feature(SLJIT_HAS_CMOV))
{
OP2U(SLJIT_SUB | SLJIT_SET_Z, TMP1, 0, SLJIT_IMM, oc);
- CMOV(SLJIT_EQUAL, TMP1, SLJIT_IMM, c);
+ SELECT(SLJIT_EQUAL, TMP1, SLJIT_IMM, c, TMP1);
add_jump(compiler, backtracks, CMP(SLJIT_NOT_EQUAL, TMP1, 0, SLJIT_IMM, c));
}
else
@@ -9509,14 +9710,16 @@ if (!minimize)
if (ref)
OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(SLJIT_SP), OVECTOR(offset));
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), SLJIT_IMM, 0);
+
if (ref)
{
- add_jump(compiler, &backtrack->topbacktracks, CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_MEM1(SLJIT_SP), OVECTOR(1)));
+ if (!common->unset_backref)
+ add_jump(compiler, &backtrack->own_backtracks, CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_MEM1(SLJIT_SP), OVECTOR(1)));
zerolength = CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_MEM1(SLJIT_SP), OVECTOR(offset + 1));
}
else
{
- compile_dnref_search(common, ccbegin, &backtrack->topbacktracks);
+ compile_dnref_search(common, ccbegin, &backtrack->own_backtracks);
OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(TMP2), 0);
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), POSSESSIVE1, TMP2, 0);
zerolength = CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_MEM1(TMP2), sizeof(sljit_sw));
@@ -9529,7 +9732,7 @@ if (!minimize)
label = LABEL();
if (!ref)
OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(SLJIT_SP), POSSESSIVE1);
- compile_ref_matchingpath(common, ccbegin, &backtrack->topbacktracks, FALSE, FALSE);
+ compile_ref_matchingpath(common, ccbegin, &backtrack->own_backtracks, FALSE, FALSE);
if (min > 1 || max > 1)
{
@@ -9591,12 +9794,13 @@ else
{
if (ref)
{
- add_jump(compiler, &backtrack->topbacktracks, CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_MEM1(SLJIT_SP), OVECTOR(1)));
+ if (!common->unset_backref)
+ add_jump(compiler, &backtrack->own_backtracks, CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_MEM1(SLJIT_SP), OVECTOR(1)));
zerolength = CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_MEM1(SLJIT_SP), OVECTOR(offset + 1));
}
else
{
- compile_dnref_search(common, ccbegin, &backtrack->topbacktracks);
+ compile_dnref_search(common, ccbegin, &backtrack->own_backtracks);
OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(TMP2), 0);
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(2), TMP2, 0);
zerolength = CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_MEM1(TMP2), sizeof(sljit_sw));
@@ -9605,11 +9809,11 @@ else
BACKTRACK_AS(ref_iterator_backtrack)->matchingpath = LABEL();
if (max > 0)
- add_jump(compiler, &backtrack->topbacktracks, CMP(SLJIT_GREATER_EQUAL, SLJIT_MEM1(STACK_TOP), STACK(1), SLJIT_IMM, max));
+ add_jump(compiler, &backtrack->own_backtracks, CMP(SLJIT_GREATER_EQUAL, SLJIT_MEM1(STACK_TOP), STACK(1), SLJIT_IMM, max));
if (!ref)
OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(STACK_TOP), STACK(2));
-compile_ref_matchingpath(common, ccbegin, &backtrack->topbacktracks, TRUE, TRUE);
+compile_ref_matchingpath(common, ccbegin, &backtrack->own_backtracks, TRUE, TRUE);
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), STR_PTR, 0);
if (min > 1)
@@ -9684,7 +9888,7 @@ if (entry->entry_label == NULL)
else
JUMPTO(SLJIT_FAST_CALL, entry->entry_label);
/* Leave if the match is failed. */
-add_jump(compiler, &backtrack->topbacktracks, CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, 0));
+add_jump(compiler, &backtrack->own_backtracks, CMP(SLJIT_EQUAL, TMP1, 0, SLJIT_IMM, 0));
BACKTRACK_AS(recurse_backtrack)->matchingpath = LABEL();
return cc + 1 + LINK_SIZE;
}
@@ -9812,7 +10016,7 @@ free_stack(common, callout_arg_size);
/* Check return value. */
OP2U(SLJIT_SUB32 | SLJIT_SET_Z | SLJIT_SET_SIG_GREATER, SLJIT_RETURN_REG, 0, SLJIT_IMM, 0);
-add_jump(compiler, &backtrack->topbacktracks, JUMP(SLJIT_SIG_GREATER));
+add_jump(compiler, &backtrack->own_backtracks, JUMP(SLJIT_SIG_GREATER));
if (common->abort_label == NULL)
add_jump(compiler, &common->abort, JUMP(SLJIT_NOT_EQUAL) /* SIG_LESS */);
else
@@ -9823,6 +10027,106 @@ return cc + callout_length;
#undef CALLOUT_ARG_SIZE
#undef CALLOUT_ARG_OFFSET
+static PCRE2_SPTR compile_reverse_matchingpath(compiler_common *common, PCRE2_SPTR cc, backtrack_common *parent)
+{
+DEFINE_COMPILER;
+backtrack_common *backtrack = NULL;
+jump_list **reverse_failed;
+unsigned int lmin, lmax;
+#ifdef SUPPORT_UNICODE
+struct sljit_jump *jump;
+struct sljit_label *label;
+#endif
+
+SLJIT_ASSERT(parent->top == NULL);
+
+if (*cc == OP_REVERSE)
+ {
+ reverse_failed = &parent->own_backtracks;
+ lmin = GET2(cc, 1);
+ lmax = lmin;
+ cc += 1 + IMM2_SIZE;
+
+ SLJIT_ASSERT(lmin > 0);
+ }
+else
+ {
+ SLJIT_ASSERT(*cc == OP_VREVERSE);
+ PUSH_BACKTRACK(sizeof(vreverse_backtrack), cc, NULL);
+
+ reverse_failed = &backtrack->own_backtracks;
+ lmin = GET2(cc, 1);
+ lmax = GET2(cc, 1 + IMM2_SIZE);
+ cc += 1 + 2 * IMM2_SIZE;
+
+ SLJIT_ASSERT(lmin < lmax);
+ }
+
+if (HAS_VIRTUAL_REGISTERS)
+ {
+ OP1(SLJIT_MOV, TMP1, 0, ARGUMENTS, 0);
+ OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(TMP1), SLJIT_OFFSETOF(jit_arguments, begin));
+ }
+else
+ OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(ARGUMENTS), SLJIT_OFFSETOF(jit_arguments, begin));
+
+#ifdef SUPPORT_UNICODE
+if (common->utf)
+ {
+ if (lmin > 0)
+ {
+ OP1(SLJIT_MOV, TMP3, 0, SLJIT_IMM, lmin);
+ label = LABEL();
+ add_jump(compiler, reverse_failed, CMP(SLJIT_LESS_EQUAL, STR_PTR, 0, TMP2, 0));
+ move_back(common, reverse_failed, FALSE);
+ OP2(SLJIT_SUB | SLJIT_SET_Z, TMP3, 0, TMP3, 0, SLJIT_IMM, 1);
+ JUMPTO(SLJIT_NOT_ZERO, label);
+ }
+
+ if (lmin < lmax)
+ {
+ OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(3), STR_PTR, 0);
+
+ OP1(SLJIT_MOV, TMP3, 0, SLJIT_IMM, lmax - lmin);
+ label = LABEL();
+ jump = CMP(SLJIT_LESS_EQUAL, STR_PTR, 0, TMP2, 0);
+ move_back(common, reverse_failed, FALSE);
+ OP2(SLJIT_SUB | SLJIT_SET_Z, TMP3, 0, TMP3, 0, SLJIT_IMM, 1);
+ JUMPTO(SLJIT_NOT_ZERO, label);
+
+ JUMPHERE(jump);
+ OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(2), STR_PTR, 0);
+ }
+ }
+else
+#endif
+ {
+ if (lmin > 0)
+ {
+ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(lmin));
+ add_jump(compiler, reverse_failed, CMP(SLJIT_LESS, STR_PTR, 0, TMP2, 0));
+ }
+
+ if (lmin < lmax)
+ {
+ OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(3), STR_PTR, 0);
+
+ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(lmax - lmin));
+ OP2U(SLJIT_SUB | SLJIT_SET_LESS, STR_PTR, 0, TMP2, 0);
+ SELECT(SLJIT_LESS, STR_PTR, TMP2, 0, STR_PTR);
+
+ OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(2), STR_PTR, 0);
+ }
+ }
+
+check_start_used_ptr(common);
+
+if (lmin < lmax)
+ BACKTRACK_AS(vreverse_backtrack)->matchingpath = LABEL();
+
+return cc;
+}
+
static SLJIT_INLINE BOOL assert_needs_str_ptr_saving(PCRE2_SPTR cc)
{
while (TRUE)
@@ -9841,6 +10145,8 @@ while (TRUE)
case OP_DOLLM:
case OP_CALLOUT:
case OP_ALT:
+ case OP_NOT_UCP_WORD_BOUNDARY:
+ case OP_UCP_WORD_BOUNDARY:
cc += PRIV(OP_lengths)[*cc];
break;
@@ -9860,13 +10166,15 @@ int framesize;
int extrasize;
BOOL local_quit_available = FALSE;
BOOL needs_control_head;
+BOOL end_block_size = 0;
+BOOL has_vreverse;
int private_data_ptr;
backtrack_common altbacktrack;
PCRE2_SPTR ccbegin;
PCRE2_UCHAR opcode;
PCRE2_UCHAR bra = OP_BRA;
jump_list *tmp = NULL;
-jump_list **target = (conditional) ? &backtrack->condfailed : &backtrack->common.topbacktracks;
+jump_list **target = (conditional) ? &backtrack->condfailed : &backtrack->common.own_backtracks;
jump_list **found;
/* Saving previous accept variables. */
BOOL save_local_quit_available = common->local_quit_available;
@@ -9889,6 +10197,7 @@ if (*cc == OP_BRAZERO || *cc == OP_BRAMINZERO)
bra = *cc;
cc++;
}
+
private_data_ptr = PRIVATE_DATA(cc);
SLJIT_ASSERT(private_data_ptr != 0);
framesize = get_framesize(common, cc, NULL, FALSE, &needs_control_head);
@@ -9908,12 +10217,17 @@ if (bra == OP_BRAMINZERO)
brajump = CMP(SLJIT_EQUAL, STR_PTR, 0, SLJIT_IMM, 0);
}
+if ((opcode == OP_ASSERTBACK || opcode == OP_ASSERTBACK_NOT) && find_vreverse(ccbegin))
+ end_block_size = 3;
+
if (framesize < 0)
{
extrasize = 1;
if (bra == OP_BRA && !assert_needs_str_ptr_saving(ccbegin + 1 + LINK_SIZE))
extrasize = 0;
+ extrasize += end_block_size;
+
if (needs_control_head)
extrasize++;
@@ -9931,18 +10245,19 @@ if (framesize < 0)
if (needs_control_head)
{
- SLJIT_ASSERT(extrasize == 2);
+ SLJIT_ASSERT(extrasize == end_block_size + 2);
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->control_head_ptr, SLJIT_IMM, 0);
- OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(1), TMP1, 0);
+ OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(end_block_size + 1), TMP1, 0);
}
}
else
{
- extrasize = needs_control_head ? 3 : 2;
+ extrasize = (needs_control_head ? 3 : 2) + end_block_size;
+
+ OP1(SLJIT_MOV, TMP2, 0, STACK_TOP, 0);
allocate_stack(common, framesize + extrasize);
OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr);
- OP2(SLJIT_ADD, TMP2, 0, STACK_TOP, 0, SLJIT_IMM, (framesize + extrasize) * sizeof(sljit_sw));
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), private_data_ptr, TMP2, 0);
if (needs_control_head)
OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(SLJIT_SP), common->control_head_ptr);
@@ -9950,16 +10265,22 @@ else
if (needs_control_head)
{
- OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(2), TMP1, 0);
- OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(1), TMP2, 0);
+ OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(end_block_size + 2), TMP1, 0);
+ OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(end_block_size + 1), TMP2, 0);
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->control_head_ptr, SLJIT_IMM, 0);
}
else
- OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(1), TMP1, 0);
+ OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(end_block_size + 1), TMP1, 0);
init_frame(common, ccbegin, NULL, framesize + extrasize - 1, extrasize);
}
+if (end_block_size > 0)
+ {
+ OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(1), STR_END, 0);
+ OP1(SLJIT_MOV, STR_END, 0, STR_PTR, 0);
+ }
+
memset(&altbacktrack, 0, sizeof(backtrack_common));
if (conditional || (opcode == OP_ASSERT_NOT || opcode == OP_ASSERTBACK_NOT))
{
@@ -9978,13 +10299,19 @@ while (1)
common->accept_label = NULL;
common->accept = NULL;
altbacktrack.top = NULL;
- altbacktrack.topbacktracks = NULL;
+ altbacktrack.own_backtracks = NULL;
if (*ccbegin == OP_ALT && extrasize > 0)
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(0));
altbacktrack.cc = ccbegin;
- compile_matchingpath(common, ccbegin + 1 + LINK_SIZE, cc, &altbacktrack);
+ ccbegin += 1 + LINK_SIZE;
+
+ has_vreverse = (*ccbegin == OP_VREVERSE);
+ if (*ccbegin == OP_REVERSE || has_vreverse)
+ ccbegin = compile_reverse_matchingpath(common, ccbegin, &altbacktrack);
+
+ compile_matchingpath(common, ccbegin, cc, &altbacktrack);
if (SLJIT_UNLIKELY(sljit_get_compiler_error(compiler)))
{
if (local_quit_available)
@@ -10000,6 +10327,13 @@ while (1)
common->accept = save_accept;
return NULL;
}
+
+ if (has_vreverse)
+ {
+ SLJIT_ASSERT(altbacktrack.top != NULL);
+ add_jump(compiler, &altbacktrack.top->simple_backtracks, CMP(SLJIT_LESS, STR_PTR, 0, STR_END, 0));
+ }
+
common->accept_label = LABEL();
if (common->accept != NULL)
set_jumps(common->accept, common->accept_label);
@@ -10012,6 +10346,9 @@ while (1)
else if (extrasize > 0)
free_stack(common, extrasize);
+ if (end_block_size > 0)
+ OP1(SLJIT_MOV, STR_END, 0, SLJIT_MEM1(STACK_TOP), STACK(-extrasize + 1));
+
if (needs_control_head)
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->control_head_ptr, SLJIT_MEM1(STACK_TOP), STACK(-1));
}
@@ -10021,12 +10358,20 @@ while (1)
{
/* We don't need to keep the STR_PTR, only the previous private_data_ptr. */
OP2(SLJIT_SUB, STACK_TOP, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr, SLJIT_IMM, (framesize + 1) * sizeof(sljit_sw));
+
+ if (end_block_size > 0)
+ OP1(SLJIT_MOV, STR_END, 0, SLJIT_MEM1(STACK_TOP), STACK(-extrasize + 2));
+
if (needs_control_head)
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->control_head_ptr, SLJIT_MEM1(STACK_TOP), STACK(-1));
}
else
{
OP1(SLJIT_MOV, STACK_TOP, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr);
+
+ if (end_block_size > 0)
+ OP1(SLJIT_MOV, STR_END, 0, SLJIT_MEM1(STACK_TOP), STACK(-framesize - extrasize + 1));
+
if (needs_control_head)
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->control_head_ptr, SLJIT_MEM1(STACK_TOP), STACK(-framesize - 2));
add_jump(compiler, &common->revertframes, JUMP(SLJIT_FAST_CALL));
@@ -10040,7 +10385,7 @@ while (1)
if (conditional)
{
if (extrasize > 0)
- OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), needs_control_head ? STACK(-2) : STACK(-1));
+ OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(-end_block_size - (needs_control_head ? 2 : 1)));
}
else if (bra == OP_BRAZERO)
{
@@ -10079,7 +10424,7 @@ while (1)
common->accept = save_accept;
return NULL;
}
- set_jumps(altbacktrack.topbacktracks, LABEL());
+ set_jumps(altbacktrack.own_backtracks, LABEL());
if (*cc != OP_ALT)
break;
@@ -10112,8 +10457,11 @@ if (common->positive_assertion_quit != NULL)
JUMPHERE(jump);
}
+if (end_block_size > 0)
+ OP1(SLJIT_MOV, STR_END, 0, SLJIT_MEM1(STACK_TOP), STACK(1));
+
if (needs_control_head)
- OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->control_head_ptr, SLJIT_MEM1(STACK_TOP), STACK(1));
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->control_head_ptr, SLJIT_MEM1(STACK_TOP), STACK(end_block_size + 1));
if (opcode == OP_ASSERT || opcode == OP_ASSERTBACK)
{
@@ -10126,8 +10474,8 @@ if (opcode == OP_ASSERT || opcode == OP_ASSERTBACK)
/* The topmost item should be 0. */
if (bra == OP_BRAZERO)
{
- if (extrasize == 2)
- free_stack(common, 1);
+ if (extrasize >= 2)
+ free_stack(common, extrasize - 1);
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), SLJIT_IMM, 0);
}
else if (extrasize > 0)
@@ -10161,8 +10509,9 @@ if (opcode == OP_ASSERT || opcode == OP_ASSERTBACK)
/* Keep the STR_PTR on the top of the stack. */
if (bra == OP_BRAZERO)
{
+ /* This allocation is always successful. */
OP2(SLJIT_SUB, STACK_TOP, 0, STACK_TOP, 0, SLJIT_IMM, sizeof(sljit_sw));
- if (extrasize == 2)
+ if (extrasize >= 2)
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), STR_PTR, 0);
}
else if (bra == OP_BRAMINZERO)
@@ -10182,8 +10531,9 @@ if (opcode == OP_ASSERT || opcode == OP_ASSERTBACK)
else
{
/* We don't need to keep the STR_PTR, only the previous private_data_ptr. */
- OP2(SLJIT_SUB, STACK_TOP, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr, SLJIT_IMM, (framesize + 2) * sizeof(sljit_sw));
- if (extrasize == 2)
+ OP2(SLJIT_SUB, STACK_TOP, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr, SLJIT_IMM, (framesize + end_block_size + 2) * sizeof(sljit_sw));
+
+ if (extrasize == 2 + end_block_size)
{
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(0));
if (bra == OP_BRAMINZERO)
@@ -10191,7 +10541,7 @@ if (opcode == OP_ASSERT || opcode == OP_ASSERTBACK)
}
else
{
- SLJIT_ASSERT(extrasize == 3);
+ SLJIT_ASSERT(extrasize == 3 + end_block_size);
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(-1));
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), bra == OP_BRAZERO ? STR_PTR : SLJIT_IMM, 0);
}
@@ -10215,7 +10565,7 @@ if (opcode == OP_ASSERT || opcode == OP_ASSERTBACK)
OP2(SLJIT_ADD, STACK_TOP, 0, STACK_TOP, 0, SLJIT_IMM, (framesize - 1) * sizeof(sljit_sw));
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), private_data_ptr, TMP1, 0);
}
- set_jumps(backtrack->common.topbacktracks, LABEL());
+ set_jumps(backtrack->common.own_backtracks, LABEL());
}
}
else
@@ -10228,8 +10578,8 @@ else
if (bra != OP_BRA)
{
- if (extrasize == 2)
- free_stack(common, 1);
+ if (extrasize >= 2)
+ free_stack(common, extrasize - 1);
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), SLJIT_IMM, 0);
}
else if (extrasize > 0)
@@ -10260,9 +10610,9 @@ else
if (bra != OP_BRA)
{
- SLJIT_ASSERT(found == &backtrack->common.topbacktracks);
- set_jumps(backtrack->common.topbacktracks, LABEL());
- backtrack->common.topbacktracks = NULL;
+ SLJIT_ASSERT(found == &backtrack->common.own_backtracks);
+ set_jumps(backtrack->common.own_backtracks, LABEL());
+ backtrack->common.own_backtracks = NULL;
}
}
@@ -10371,7 +10721,7 @@ static PCRE2_SPTR SLJIT_FUNC do_script_run_utf(PCRE2_SPTR ptr, PCRE2_SPTR endptr
#endif /* SUPPORT_UNICODE */
-static SLJIT_INLINE void match_script_run_common(compiler_common *common, int private_data_ptr, backtrack_common *parent)
+static void match_script_run_common(compiler_common *common, int private_data_ptr, backtrack_common *parent)
{
DEFINE_COMPILER;
@@ -10386,7 +10736,7 @@ sljit_emit_icall(compiler, SLJIT_CALL, SLJIT_ARGS2(W, W, W), SLJIT_IMM, SLJIT_FU
#endif
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_RETURN_REG, 0);
-add_jump(compiler, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks, CMP(SLJIT_EQUAL, SLJIT_RETURN_REG, 0, SLJIT_IMM, 0));
+add_jump(compiler, parent->top != NULL ? &parent->top->simple_backtracks : &parent->own_backtracks, CMP(SLJIT_EQUAL, SLJIT_RETURN_REG, 0, SLJIT_IMM, 0));
}
/*
@@ -10460,6 +10810,7 @@ PCRE2_UCHAR ket;
assert_backtrack *assert;
BOOL has_alternatives;
BOOL needs_control_head = FALSE;
+BOOL has_vreverse = FALSE;
struct sljit_jump *jump;
struct sljit_jump *skip;
struct sljit_label *rmax_label = NULL;
@@ -10709,6 +11060,21 @@ else if (opcode == OP_CBRA || opcode == OP_SCBRA)
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), TMP2, 0);
}
}
+else if (opcode == OP_ASSERTBACK_NA && PRIVATE_DATA(ccbegin + 1))
+ {
+ OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr);
+ allocate_stack(common, 4);
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr + sizeof(sljit_sw));
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), private_data_ptr, STR_PTR, 0);
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), private_data_ptr + sizeof(sljit_sw), STR_END, 0);
+ OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), TMP2, 0);
+ OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(1), TMP1, 0);
+ OP1(SLJIT_MOV, STR_END, 0, STR_PTR, 0);
+
+ has_vreverse = (*matchingpath == OP_VREVERSE);
+ if (*matchingpath == OP_REVERSE || has_vreverse)
+ matchingpath = compile_reverse_matchingpath(common, matchingpath, backtrack);
+ }
else if (opcode == OP_ASSERT_NA || opcode == OP_ASSERTBACK_NA || opcode == OP_SCRIPT_RUN || opcode == OP_SBRA || opcode == OP_SCOND)
{
/* Saving the previous value. */
@@ -10716,6 +11082,9 @@ else if (opcode == OP_ASSERT_NA || opcode == OP_ASSERTBACK_NA || opcode == OP_SC
allocate_stack(common, 1);
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), private_data_ptr, STR_PTR, 0);
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(0), TMP2, 0);
+
+ if (*matchingpath == OP_REVERSE)
+ matchingpath = compile_reverse_matchingpath(common, matchingpath, backtrack);
}
else if (has_alternatives)
{
@@ -10835,14 +11204,28 @@ compile_matchingpath(common, matchingpath, cc, backtrack);
if (SLJIT_UNLIKELY(sljit_get_compiler_error(compiler)))
return NULL;
-if (opcode == OP_ASSERT_NA || opcode == OP_ASSERTBACK_NA)
- OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr);
-
-if (opcode == OP_ONCE)
- match_once_common(common, ket, BACKTRACK_AS(bracket_backtrack)->u.framesize, private_data_ptr, has_alternatives, needs_control_head);
+switch (opcode)
+ {
+ case OP_ASSERTBACK_NA:
+ if (has_vreverse)
+ {
+ SLJIT_ASSERT(backtrack->top != NULL && PRIVATE_DATA(ccbegin + 1));
+ add_jump(compiler, &backtrack->top->simple_backtracks, CMP(SLJIT_LESS, STR_PTR, 0, STR_END, 0));
+ }
-if (opcode == OP_SCRIPT_RUN)
- match_script_run_common(common, private_data_ptr, backtrack);
+ if (PRIVATE_DATA(ccbegin + 1))
+ OP1(SLJIT_MOV, STR_END, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr + sizeof(sljit_sw));
+ break;
+ case OP_ASSERT_NA:
+ OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr);
+ break;
+ case OP_ONCE:
+ match_once_common(common, ket, BACKTRACK_AS(bracket_backtrack)->u.framesize, private_data_ptr, has_alternatives, needs_control_head);
+ break;
+ case OP_SCRIPT_RUN:
+ match_script_run_common(common, private_data_ptr, backtrack);
+ break;
+ }
stacksize = 0;
if (repeat_type == OP_MINUPTO)
@@ -10901,7 +11284,7 @@ if (has_alternatives)
if (i <= 3)
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(stacksize), SLJIT_IMM, 0);
else
- BACKTRACK_AS(bracket_backtrack)->u.matching_put_label = sljit_emit_put_label(compiler, SLJIT_MEM1(STACK_TOP), STACK(stacksize));
+ BACKTRACK_AS(bracket_backtrack)->u.matching_mov_addr = sljit_emit_mov_addr(compiler, SLJIT_MEM1(STACK_TOP), STACK(stacksize));
}
if (ket != OP_KETRMAX)
BACKTRACK_AS(bracket_backtrack)->alternative_matchingpath = LABEL();
@@ -10988,17 +11371,22 @@ if (bra == OP_BRAMINZERO)
/* Continue to the normal backtrack. */
}
-if ((ket != OP_KET && bra != OP_BRAMINZERO) || bra == OP_BRAZERO)
+if ((ket != OP_KET && bra != OP_BRAMINZERO) || bra == OP_BRAZERO || (has_alternatives && repeat_type != OP_EXACT))
count_match(common);
cc += 1 + LINK_SIZE;
if (opcode == OP_ONCE)
{
+ int data;
+ int framesize = BACKTRACK_AS(bracket_backtrack)->u.framesize;
+
+ SLJIT_ASSERT(SHRT_MIN <= framesize && framesize < SHRT_MAX/2);
/* We temporarily encode the needs_control_head in the lowest bit.
- Note: on the target architectures of SLJIT the ((x << 1) >> 1) returns
- the same value for small signed numbers (including negative numbers). */
- BACKTRACK_AS(bracket_backtrack)->u.framesize = (int)((unsigned)BACKTRACK_AS(bracket_backtrack)->u.framesize << 1) | (needs_control_head ? 1 : 0);
+ The real value should be short enough for this operation to work
+ without triggering Undefined Behaviour. */
+ data = (int)((short)((unsigned short)framesize << 1) | (needs_control_head ? 1 : 0));
+ BACKTRACK_AS(bracket_backtrack)->u.framesize = data;
}
return cc + repeat_length;
}
@@ -11041,7 +11429,7 @@ switch(opcode)
case OP_CBRAPOS:
case OP_SCBRAPOS:
offset = GET2(cc, 1 + LINK_SIZE);
- /* This case cannot be optimized in the same was as
+ /* This case cannot be optimized in the same way as
normal capturing brackets. */
SLJIT_ASSERT(common->optimized_cbracket[offset] == 0);
cbraprivptr = OVECTOR_PRIV(offset);
@@ -11158,7 +11546,7 @@ loop = LABEL();
while (*cc != OP_KETRPOS)
{
backtrack->top = NULL;
- backtrack->topbacktracks = NULL;
+ backtrack->own_backtracks = NULL;
cc += GET(cc, 1);
compile_matchingpath(common, ccbegin, cc, backtrack);
@@ -11239,7 +11627,7 @@ while (*cc != OP_KETRPOS)
compile_backtrackingpath(common, backtrack->top);
if (SLJIT_UNLIKELY(sljit_get_compiler_error(compiler)))
return NULL;
- set_jumps(backtrack->topbacktracks, LABEL());
+ set_jumps(backtrack->own_backtracks, LABEL());
if (framesize < 0)
{
@@ -11271,13 +11659,13 @@ while (*cc != OP_KETRPOS)
/* We don't have to restore the control head in case of a failed match. */
-backtrack->topbacktracks = NULL;
+backtrack->own_backtracks = NULL;
if (!zero)
{
if (framesize < 0)
- add_jump(compiler, &backtrack->topbacktracks, CMP(SLJIT_NOT_EQUAL, SLJIT_MEM1(STACK_TOP), STACK(stacksize - 1), SLJIT_IMM, 0));
+ add_jump(compiler, &backtrack->own_backtracks, CMP(SLJIT_NOT_EQUAL, SLJIT_MEM1(STACK_TOP), STACK(stacksize - 1), SLJIT_IMM, 0));
else /* TMP2 is set to [private_data_ptr] above. */
- add_jump(compiler, &backtrack->topbacktracks, CMP(SLJIT_NOT_EQUAL, SLJIT_MEM1(TMP2), STACK(-stacksize), SLJIT_IMM, 0));
+ add_jump(compiler, &backtrack->own_backtracks, CMP(SLJIT_NOT_EQUAL, SLJIT_MEM1(TMP2), STACK(-stacksize), SLJIT_IMM, 0));
}
/* None of them matched. */
@@ -11473,7 +11861,7 @@ SLJIT_ASSERT(common->fast_forward_bc_ptr != NULL || early_fail_ptr == 0
|| (early_fail_ptr >= common->early_fail_start_ptr && early_fail_ptr <= common->early_fail_end_ptr));
if (early_fail_type == type_fail)
- add_jump(compiler, &backtrack->topbacktracks, CMP(SLJIT_LESS_EQUAL, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP), early_fail_ptr));
+ add_jump(compiler, &backtrack->own_backtracks, CMP(SLJIT_LESS_EQUAL, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP), early_fail_ptr));
cc = get_iterator_parameters(common, cc, &opcode, &type, &max, &exact, &end);
@@ -11500,10 +11888,10 @@ if (exact > 1)
&& type != OP_ANYNL && type != OP_EXTUNI)
{
OP2(SLJIT_ADD, TMP1, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(exact));
- add_jump(compiler, &backtrack->topbacktracks, CMP(SLJIT_GREATER, TMP1, 0, STR_END, 0));
+ add_jump(compiler, &backtrack->own_backtracks, CMP(SLJIT_GREATER, TMP1, 0, STR_END, 0));
OP1(SLJIT_MOV, tmp_base, tmp_offset, SLJIT_IMM, exact);
label = LABEL();
- compile_char1_matchingpath(common, type, cc, &backtrack->topbacktracks, FALSE);
+ compile_char1_matchingpath(common, type, cc, &backtrack->own_backtracks, FALSE);
OP2(SLJIT_SUB | SLJIT_SET_Z, tmp_base, tmp_offset, tmp_base, tmp_offset, SLJIT_IMM, 1);
JUMPTO(SLJIT_NOT_ZERO, label);
}
@@ -11511,13 +11899,13 @@ if (exact > 1)
{
OP1(SLJIT_MOV, tmp_base, tmp_offset, SLJIT_IMM, exact);
label = LABEL();
- compile_char1_matchingpath(common, type, cc, &backtrack->topbacktracks, TRUE);
+ compile_char1_matchingpath(common, type, cc, &backtrack->own_backtracks, TRUE);
OP2(SLJIT_SUB | SLJIT_SET_Z, tmp_base, tmp_offset, tmp_base, tmp_offset, SLJIT_IMM, 1);
JUMPTO(SLJIT_NOT_ZERO, label);
}
}
else if (exact == 1)
- compile_char1_matchingpath(common, type, cc, &backtrack->topbacktracks, TRUE);
+ compile_char1_matchingpath(common, type, cc, &backtrack->own_backtracks, TRUE);
if (early_fail_type == type_fail_range)
{
@@ -11526,7 +11914,7 @@ if (early_fail_type == type_fail_range)
OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(SLJIT_SP), early_fail_ptr + SSIZE_OF(sw));
OP2(SLJIT_SUB, TMP1, 0, TMP1, 0, TMP2, 0);
OP2(SLJIT_SUB, TMP2, 0, STR_PTR, 0, TMP2, 0);
- add_jump(compiler, &backtrack->topbacktracks, CMP(SLJIT_LESS_EQUAL, TMP2, 0, TMP1, 0));
+ add_jump(compiler, &backtrack->own_backtracks, CMP(SLJIT_LESS_EQUAL, TMP2, 0, TMP1, 0));
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), early_fail_ptr, STR_PTR, 0);
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), early_fail_ptr + SSIZE_OF(sw), STR_PTR, 0);
@@ -11606,7 +11994,7 @@ switch(opcode)
if (common->mode == PCRE2_JIT_COMPLETE)
{
OP2U(SLJIT_SUB | SLJIT_SET_GREATER, STR_PTR, 0, STR_END, 0);
- CMOV(SLJIT_GREATER, STR_PTR, STR_END, 0);
+ SELECT(SLJIT_GREATER, STR_PTR, STR_END, 0, STR_PTR);
}
else
{
@@ -11674,14 +12062,14 @@ switch(opcode)
if (opcode == OP_UPTO)
{
OP2(SLJIT_SUB | SLJIT_SET_Z, tmp_base, tmp_offset, tmp_base, tmp_offset, SLJIT_IMM, 1);
- add_jump(compiler, &backtrack->topbacktracks, JUMP(SLJIT_ZERO));
+ add_jump(compiler, &backtrack->own_backtracks, JUMP(SLJIT_ZERO));
}
- compile_char1_matchingpath(common, type, cc, &backtrack->topbacktracks, FALSE);
+ compile_char1_matchingpath(common, type, cc, &backtrack->own_backtracks, FALSE);
if (early_fail_ptr != 0)
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), early_fail_ptr, STR_PTR, 0);
JUMPHERE(jump);
- detect_partial_match(common, &backtrack->topbacktracks);
+ detect_partial_match(common, &backtrack->own_backtracks);
OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(0));
if (charpos_othercasebit != 0)
OP2(SLJIT_OR, TMP1, 0, TMP1, 0, SLJIT_IMM, charpos_othercasebit);
@@ -11835,7 +12223,7 @@ switch(opcode)
}
#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH != 32
- if (common->utf)
+ if (type == OP_EXTUNI || common->utf)
{
OP1(SLJIT_MOV, tmp_base, tmp_offset, STR_PTR, 0);
detect_partial_match(common, &no_match);
@@ -11899,7 +12287,7 @@ switch(opcode)
if (common->mode == PCRE2_JIT_COMPLETE)
{
OP2U(SLJIT_SUB | SLJIT_SET_GREATER, STR_PTR, 0, STR_END, 0);
- CMOV(SLJIT_GREATER, STR_PTR, STR_END, 0);
+ SELECT(SLJIT_GREATER, STR_PTR, STR_END, 0, STR_PTR);
}
else
{
@@ -11952,12 +12340,12 @@ PUSH_BACKTRACK(sizeof(backtrack_common), cc, NULL);
if (*cc == OP_FAIL)
{
- add_jump(compiler, &backtrack->topbacktracks, JUMP(SLJIT_JUMP));
+ add_jump(compiler, &backtrack->own_backtracks, JUMP(SLJIT_JUMP));
return cc + 1;
}
if (*cc == OP_ACCEPT && common->currententry == NULL && (common->re->overall_options & PCRE2_ENDANCHORED) != 0)
- add_jump(compiler, &common->reset_match, CMP(SLJIT_NOT_EQUAL, STR_PTR, 0, STR_END, 0));
+ add_jump(compiler, &common->restart_match, CMP(SLJIT_NOT_EQUAL, STR_PTR, 0, STR_END, 0));
if (*cc == OP_ASSERT_ACCEPT || common->currententry != NULL || !common->might_be_empty)
{
@@ -11983,7 +12371,7 @@ else
OP1(SLJIT_MOV_U32, TMP2, 0, SLJIT_MEM1(ARGUMENTS), SLJIT_OFFSETOF(jit_arguments, options));
OP2U(SLJIT_AND | SLJIT_SET_Z, TMP2, 0, SLJIT_IMM, PCRE2_NOTEMPTY);
-add_jump(compiler, &backtrack->topbacktracks, JUMP(SLJIT_NOT_ZERO));
+add_jump(compiler, &backtrack->own_backtracks, JUMP(SLJIT_NOT_ZERO));
OP2U(SLJIT_AND | SLJIT_SET_Z, TMP2, 0, SLJIT_IMM, PCRE2_NOTEMPTY_ATSTART);
if (common->accept_label == NULL)
add_jump(compiler, &common->accept, JUMP(SLJIT_ZERO));
@@ -11995,7 +12383,7 @@ if (common->accept_label == NULL)
add_jump(compiler, &common->accept, CMP(SLJIT_NOT_EQUAL, TMP2, 0, STR_PTR, 0));
else
CMPTO(SLJIT_NOT_EQUAL, TMP2, 0, STR_PTR, 0, common->accept_label);
-add_jump(compiler, &backtrack->topbacktracks, JUMP(SLJIT_JUMP));
+add_jump(compiler, &backtrack->own_backtracks, JUMP(SLJIT_JUMP));
return cc + 1;
}
@@ -12115,8 +12503,9 @@ while (cc < ccend)
case OP_DOLLM:
case OP_CIRC:
case OP_CIRCM:
- case OP_REVERSE:
- cc = compile_simple_assertion_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks);
+ case OP_NOT_UCP_WORD_BOUNDARY:
+ case OP_UCP_WORD_BOUNDARY:
+ cc = compile_simple_assertion_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->simple_backtracks : &parent->own_backtracks);
break;
case OP_NOT_DIGIT:
@@ -12138,7 +12527,7 @@ while (cc < ccend)
case OP_EXTUNI:
case OP_NOT:
case OP_NOTI:
- cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks, TRUE);
+ cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->simple_backtracks : &parent->own_backtracks, TRUE);
break;
case OP_SET_SOM:
@@ -12153,9 +12542,9 @@ while (cc < ccend)
case OP_CHAR:
case OP_CHARI:
if (common->mode == PCRE2_JIT_COMPLETE)
- cc = compile_charn_matchingpath(common, cc, ccend, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks);
+ cc = compile_charn_matchingpath(common, cc, ccend, parent->top != NULL ? &parent->top->simple_backtracks : &parent->own_backtracks);
else
- cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks, TRUE);
+ cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->simple_backtracks : &parent->own_backtracks, TRUE);
break;
case OP_STAR:
@@ -12231,7 +12620,7 @@ while (cc < ccend)
if (cc[1 + (32 / sizeof(PCRE2_UCHAR))] >= OP_CRSTAR && cc[1 + (32 / sizeof(PCRE2_UCHAR))] <= OP_CRPOSRANGE)
cc = compile_iterator_matchingpath(common, cc, parent);
else
- cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks, TRUE);
+ cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->simple_backtracks : &parent->own_backtracks, TRUE);
break;
#if defined SUPPORT_UNICODE || PCRE2_CODE_UNIT_WIDTH == 16 || PCRE2_CODE_UNIT_WIDTH == 32
@@ -12239,7 +12628,7 @@ while (cc < ccend)
if (*(cc + GET(cc, 1)) >= OP_CRSTAR && *(cc + GET(cc, 1)) <= OP_CRPOSRANGE)
cc = compile_iterator_matchingpath(common, cc, parent);
else
- cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks, TRUE);
+ cc = compile_char1_matchingpath(common, *cc, cc + 1, parent->top != NULL ? &parent->top->simple_backtracks : &parent->own_backtracks, TRUE);
break;
#endif
@@ -12249,7 +12638,7 @@ while (cc < ccend)
cc = compile_ref_iterator_matchingpath(common, cc, parent);
else
{
- compile_ref_matchingpath(common, cc, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks, TRUE, FALSE);
+ compile_ref_matchingpath(common, cc, parent->top != NULL ? &parent->top->simple_backtracks : &parent->own_backtracks, TRUE, FALSE);
cc += 1 + IMM2_SIZE;
}
break;
@@ -12260,8 +12649,8 @@ while (cc < ccend)
cc = compile_ref_iterator_matchingpath(common, cc, parent);
else
{
- compile_dnref_search(common, cc, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks);
- compile_ref_matchingpath(common, cc, parent->top != NULL ? &parent->top->nextbacktracks : &parent->topbacktracks, TRUE, FALSE);
+ compile_dnref_search(common, cc, parent->top != NULL ? &parent->top->simple_backtracks : &parent->own_backtracks);
+ compile_ref_matchingpath(common, cc, parent->top != NULL ? &parent->top->simple_backtracks : &parent->own_backtracks, TRUE, FALSE);
cc += 1 + 2 * IMM2_SIZE;
}
break;
@@ -12539,7 +12928,7 @@ switch(opcode)
break;
}
-set_jumps(current->topbacktracks, LABEL());
+set_jumps(current->own_backtracks, LABEL());
}
static SLJIT_INLINE void compile_ref_iterator_backtrackingpath(compiler_common *common, struct backtrack_common *current)
@@ -12554,7 +12943,7 @@ type = cc[ref ? 1 + IMM2_SIZE : 1 + 2 * IMM2_SIZE];
if ((type & 0x1) == 0)
{
/* Maximize case. */
- set_jumps(current->topbacktracks, LABEL());
+ set_jumps(current->own_backtracks, LABEL());
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(0));
free_stack(common, 1);
CMPTO(SLJIT_NOT_EQUAL, STR_PTR, 0, SLJIT_IMM, 0, CURRENT_AS(ref_iterator_backtrack)->matchingpath);
@@ -12563,7 +12952,7 @@ if ((type & 0x1) == 0)
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(0));
CMPTO(SLJIT_NOT_EQUAL, STR_PTR, 0, SLJIT_IMM, 0, CURRENT_AS(ref_iterator_backtrack)->matchingpath);
-set_jumps(current->topbacktracks, LABEL());
+set_jumps(current->own_backtracks, LABEL());
free_stack(common, ref ? 2 : 3);
}
@@ -12584,7 +12973,7 @@ if (!CURRENT_AS(recurse_backtrack)->inlined_pattern)
else
compile_backtrackingpath(common, current->top);
-set_jumps(current->topbacktracks, LABEL());
+set_jumps(current->own_backtracks, LABEL());
}
static void compile_assert_backtrackingpath(compiler_common *common, struct backtrack_common *current)
@@ -12603,13 +12992,13 @@ if (*cc == OP_BRAZERO)
if (bra == OP_BRAZERO)
{
- SLJIT_ASSERT(current->topbacktracks == NULL);
+ SLJIT_ASSERT(current->own_backtracks == NULL);
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(0));
}
if (CURRENT_AS(assert_backtrack)->framesize < 0)
{
- set_jumps(current->topbacktracks, LABEL());
+ set_jumps(current->own_backtracks, LABEL());
if (bra == OP_BRAZERO)
{
@@ -12641,10 +13030,10 @@ if (*cc == OP_ASSERT || *cc == OP_ASSERTBACK)
OP2(SLJIT_ADD, STACK_TOP, 0, STACK_TOP, 0, SLJIT_IMM, (CURRENT_AS(assert_backtrack)->framesize - 1) * sizeof(sljit_sw));
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), CURRENT_AS(assert_backtrack)->private_data_ptr, TMP1, 0);
- set_jumps(current->topbacktracks, LABEL());
+ set_jumps(current->own_backtracks, LABEL());
}
else
- set_jumps(current->topbacktracks, LABEL());
+ set_jumps(current->own_backtracks, LABEL());
if (bra == OP_BRAZERO)
{
@@ -12671,13 +13060,14 @@ PCRE2_UCHAR ket;
assert_backtrack *assert;
BOOL has_alternatives;
BOOL needs_control_head = FALSE;
+BOOL has_vreverse;
struct sljit_jump *brazero = NULL;
struct sljit_jump *next_alt = NULL;
struct sljit_jump *once = NULL;
struct sljit_jump *cond = NULL;
struct sljit_label *rmin_label = NULL;
struct sljit_label *exact_label = NULL;
-struct sljit_put_label *put_label = NULL;
+struct sljit_jump *mov_addr = NULL;
if (*cc == OP_BRAZERO || *cc == OP_BRAMINZERO)
{
@@ -12838,8 +13228,8 @@ else if (has_alternatives)
{
sljit_emit_ijump(compiler, SLJIT_JUMP, TMP1, 0);
- SLJIT_ASSERT(CURRENT_AS(bracket_backtrack)->u.matching_put_label);
- sljit_set_put_label(CURRENT_AS(bracket_backtrack)->u.matching_put_label, LABEL());
+ SLJIT_ASSERT(CURRENT_AS(bracket_backtrack)->u.matching_mov_addr);
+ sljit_set_label(CURRENT_AS(bracket_backtrack)->u.matching_mov_addr, LABEL());
sljit_emit_op0(compiler, SLJIT_ENDBR);
}
else
@@ -12847,8 +13237,8 @@ else if (has_alternatives)
}
COMPILE_BACKTRACKINGPATH(current->top);
-if (current->topbacktracks)
- set_jumps(current->topbacktracks, LABEL());
+if (current->own_backtracks)
+ set_jumps(current->own_backtracks, LABEL());
if (SLJIT_UNLIKELY(opcode == OP_COND) || SLJIT_UNLIKELY(opcode == OP_SCOND))
{
@@ -12884,14 +13274,25 @@ if (has_alternatives)
do
{
current->top = NULL;
- current->topbacktracks = NULL;
- current->nextbacktracks = NULL;
+ current->own_backtracks = NULL;
+ current->simple_backtracks = NULL;
/* Conditional blocks always have an additional alternative, even if it is empty. */
if (*cc == OP_ALT)
{
ccprev = cc + 1 + LINK_SIZE;
cc += GET(cc, 1);
- if (opcode != OP_COND && opcode != OP_SCOND)
+
+ has_vreverse = FALSE;
+ if (opcode == OP_ASSERTBACK || opcode == OP_ASSERTBACK_NA)
+ {
+ SLJIT_ASSERT(private_data_ptr != 0);
+ OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr);
+
+ has_vreverse = (*ccprev == OP_VREVERSE);
+ if (*ccprev == OP_REVERSE || has_vreverse)
+ ccprev = compile_reverse_matchingpath(common, ccprev, current);
+ }
+ else if (opcode != OP_COND && opcode != OP_SCOND)
{
if (opcode != OP_ONCE)
{
@@ -12903,15 +13304,30 @@ if (has_alternatives)
else
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(needs_control_head ? 1 : 0));
}
+
compile_matchingpath(common, ccprev, cc, current);
if (SLJIT_UNLIKELY(sljit_get_compiler_error(compiler)))
return;
- if (opcode == OP_ASSERT_NA || opcode == OP_ASSERTBACK_NA)
- OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr);
+ switch (opcode)
+ {
+ case OP_ASSERTBACK_NA:
+ if (has_vreverse)
+ {
+ SLJIT_ASSERT(current->top != NULL && PRIVATE_DATA(ccbegin + 1));
+ add_jump(compiler, &current->top->simple_backtracks, CMP(SLJIT_LESS, STR_PTR, 0, STR_END, 0));
+ }
- if (opcode == OP_SCRIPT_RUN)
- match_script_run_common(common, private_data_ptr, current);
+ if (PRIVATE_DATA(ccbegin + 1))
+ OP1(SLJIT_MOV, STR_END, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr + sizeof(sljit_sw));
+ break;
+ case OP_ASSERT_NA:
+ OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr);
+ break;
+ case OP_SCRIPT_RUN:
+ match_script_run_common(common, private_data_ptr, current);
+ break;
+ }
}
/* Instructions after the current alternative is successfully matched. */
@@ -12966,7 +13382,7 @@ if (has_alternatives)
if (alt_max <= 3)
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(stacksize), SLJIT_IMM, alt_count);
else
- put_label = sljit_emit_put_label(compiler, SLJIT_MEM1(STACK_TOP), STACK(stacksize));
+ mov_addr = sljit_emit_mov_addr(compiler, SLJIT_MEM1(STACK_TOP), STACK(stacksize));
}
if (offset != 0 && ket == OP_KETRMAX && common->optimized_cbracket[offset >> 1] != 0)
@@ -12992,15 +13408,15 @@ if (has_alternatives)
}
else
{
- sljit_set_put_label(put_label, LABEL());
+ sljit_set_label(mov_addr, LABEL());
sljit_emit_op0(compiler, SLJIT_ENDBR);
}
}
COMPILE_BACKTRACKINGPATH(current->top);
- if (current->topbacktracks)
- set_jumps(current->topbacktracks, LABEL());
- SLJIT_ASSERT(!current->nextbacktracks);
+ if (current->own_backtracks)
+ set_jumps(current->own_backtracks, LABEL());
+ SLJIT_ASSERT(!current->simple_backtracks);
}
while (*cc == OP_ALT);
@@ -13042,6 +13458,15 @@ if (offset != 0)
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), private_data_ptr, TMP1, 0);
}
}
+else if (opcode == OP_ASSERTBACK_NA && PRIVATE_DATA(ccbegin + 1))
+ {
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(STACK_TOP), STACK(0));
+ OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(STACK_TOP), STACK(1));
+ OP1(SLJIT_MOV, STR_END, 0, SLJIT_MEM1(SLJIT_SP), private_data_ptr + sizeof(sljit_sw));
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), private_data_ptr, TMP1, 0);
+ OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), private_data_ptr + sizeof(sljit_sw), TMP2, 0);
+ free_stack(common, 4);
+ }
else if (opcode == OP_ASSERT_NA || opcode == OP_ASSERTBACK_NA || opcode == OP_SCRIPT_RUN || opcode == OP_SBRA || opcode == OP_SCOND)
{
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), private_data_ptr, SLJIT_MEM1(STACK_TOP), STACK(0));
@@ -13128,12 +13553,19 @@ static SLJIT_INLINE void compile_bracketpos_backtrackingpath(compiler_common *co
DEFINE_COMPILER;
int offset;
struct sljit_jump *jump;
+PCRE2_SPTR cc;
+/* No retry on backtrack, just drop everything. */
if (CURRENT_AS(bracketpos_backtrack)->framesize < 0)
{
- if (*current->cc == OP_CBRAPOS || *current->cc == OP_SCBRAPOS)
+ cc = current->cc;
+
+ if (*cc == OP_BRAPOSZERO)
+ cc++;
+
+ if (*cc == OP_CBRAPOS || *cc == OP_SCBRAPOS)
{
- offset = (GET2(current->cc, 1 + LINK_SIZE)) << 1;
+ offset = (GET2(cc, 1 + LINK_SIZE)) << 1;
OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(STACK_TOP), STACK(0));
OP1(SLJIT_MOV, TMP2, 0, SLJIT_MEM1(STACK_TOP), STACK(1));
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), OVECTOR(offset), TMP1, 0);
@@ -13143,7 +13575,7 @@ if (CURRENT_AS(bracketpos_backtrack)->framesize < 0)
if (common->capture_last_ptr != 0)
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), common->capture_last_ptr, TMP1, 0);
}
- set_jumps(current->topbacktracks, LABEL());
+ set_jumps(current->own_backtracks, LABEL());
free_stack(common, CURRENT_AS(bracketpos_backtrack)->stacksize);
return;
}
@@ -13152,10 +13584,10 @@ OP1(SLJIT_MOV, STACK_TOP, 0, SLJIT_MEM1(SLJIT_SP), CURRENT_AS(bracketpos_backtra
add_jump(compiler, &common->revertframes, JUMP(SLJIT_FAST_CALL));
OP2(SLJIT_ADD, STACK_TOP, 0, STACK_TOP, 0, SLJIT_IMM, (CURRENT_AS(bracketpos_backtrack)->framesize - 1) * sizeof(sljit_sw));
-if (current->topbacktracks)
+if (current->own_backtracks)
{
jump = JUMP(SLJIT_JUMP);
- set_jumps(current->topbacktracks, LABEL());
+ set_jumps(current->own_backtracks, LABEL());
/* Drop the stack frame. */
free_stack(common, CURRENT_AS(bracketpos_backtrack)->stacksize);
JUMPHERE(jump);
@@ -13168,8 +13600,8 @@ static SLJIT_INLINE void compile_braminzero_backtrackingpath(compiler_common *co
assert_backtrack backtrack;
current->top = NULL;
-current->topbacktracks = NULL;
-current->nextbacktracks = NULL;
+current->own_backtracks = NULL;
+current->simple_backtracks = NULL;
if (current->cc[1] > OP_ASSERTBACK_NOT)
{
/* Manual call of compile_bracket_matchingpath and compile_bracket_backtrackingpath. */
@@ -13184,7 +13616,7 @@ else
/* Manual call of compile_assert_matchingpath. */
compile_assert_matchingpath(common, current->cc, &backtrack, FALSE);
}
-SLJIT_ASSERT(!current->nextbacktracks && !current->topbacktracks);
+SLJIT_ASSERT(!current->simple_backtracks && !current->own_backtracks);
}
static SLJIT_INLINE void compile_control_verb_backtrackingpath(compiler_common *common, struct backtrack_common *current)
@@ -13249,6 +13681,23 @@ else
add_jump(compiler, &common->reset_match, JUMP(SLJIT_JUMP));
}
+static SLJIT_INLINE void compile_vreverse_backtrackingpath(compiler_common *common, struct backtrack_common *current)
+{
+DEFINE_COMPILER;
+struct sljit_jump *jump;
+struct sljit_label *label;
+
+OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(2));
+jump = CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(3));
+skip_valid_char(common);
+OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(2), STR_PTR, 0);
+JUMPTO(SLJIT_JUMP, CURRENT_AS(vreverse_backtrack)->matchingpath);
+
+label = LABEL();
+sljit_set_label(jump, label);
+set_jumps(current->own_backtracks, label);
+}
+
static SLJIT_INLINE void compile_then_trap_backtrackingpath(compiler_common *common, struct backtrack_common *current)
{
DEFINE_COMPILER;
@@ -13289,8 +13738,8 @@ then_trap_backtrack *save_then_trap = common->then_trap;
while (current)
{
- if (current->nextbacktracks != NULL)
- set_jumps(current->nextbacktracks, LABEL());
+ if (current->simple_backtracks != NULL)
+ set_jumps(current->simple_backtracks, LABEL());
switch(*current->cc)
{
case OP_SET_SOM:
@@ -13456,7 +13905,11 @@ while (current)
case OP_FAIL:
case OP_ACCEPT:
case OP_ASSERT_ACCEPT:
- set_jumps(current->topbacktracks, LABEL());
+ set_jumps(current->own_backtracks, LABEL());
+ break;
+
+ case OP_VREVERSE:
+ compile_vreverse_backtrackingpath(common, current);
break;
case OP_THEN_TRAP:
@@ -13487,7 +13940,7 @@ jump_list *match = NULL;
struct sljit_jump *next_alt = NULL;
struct sljit_jump *accept_exit = NULL;
struct sljit_label *quit;
-struct sljit_put_label *put_label = NULL;
+struct sljit_jump *mov_addr = NULL;
/* Recurse captures then. */
common->then_trap = NULL;
@@ -13502,7 +13955,7 @@ SLJIT_ASSERT(common->currententry->entry_label == NULL && common->recursive_head
common->currententry->entry_label = LABEL();
set_jumps(common->currententry->entry_calls, common->currententry->entry_label);
-sljit_emit_fast_enter(compiler, TMP2, 0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, TMP2, 0);
count_match(common);
local_size = (alt_max > 1) ? 2 : 1;
@@ -13535,7 +13988,7 @@ cc += GET(cc, 1);
while (1)
{
altbacktrack.top = NULL;
- altbacktrack.topbacktracks = NULL;
+ altbacktrack.own_backtracks = NULL;
if (altbacktrack.cc != ccbegin)
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(STACK_TOP), STACK(0));
@@ -13550,7 +14003,7 @@ while (1)
if (alt_max > 1 || (recurse_flags & recurse_flag_accept_found))
{
if (alt_max > 3)
- put_label = sljit_emit_put_label(compiler, SLJIT_MEM1(STACK_TOP), STACK(1));
+ mov_addr = sljit_emit_mov_addr(compiler, SLJIT_MEM1(STACK_TOP), STACK(1));
else
OP1(SLJIT_MOV, SLJIT_MEM1(STACK_TOP), STACK(1), SLJIT_IMM, alt_count);
}
@@ -13564,7 +14017,7 @@ while (1)
common->currententry->backtrack_label = LABEL();
set_jumps(common->currententry->backtrack_calls, common->currententry->backtrack_label);
- sljit_emit_fast_enter(compiler, TMP1, 0);
+ sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, TMP1, 0);
if (recurse_flags & recurse_flag_accept_found)
accept_exit = CMP(SLJIT_EQUAL, SLJIT_MEM1(STACK_TOP), STACK(1), SLJIT_IMM, -1);
@@ -13583,7 +14036,7 @@ while (1)
if (alt_max > 3)
{
sljit_emit_ijump(compiler, SLJIT_JUMP, TMP1, 0);
- sljit_set_put_label(put_label, LABEL());
+ sljit_set_label(mov_addr, LABEL());
sljit_emit_op0(compiler, SLJIT_ENDBR);
}
else
@@ -13594,7 +14047,7 @@ while (1)
}
else if (alt_max > 3)
{
- sljit_set_put_label(put_label, LABEL());
+ sljit_set_label(mov_addr, LABEL());
sljit_emit_op0(compiler, SLJIT_ENDBR);
}
else
@@ -13612,7 +14065,7 @@ while (1)
compile_backtrackingpath(common, altbacktrack.top);
if (SLJIT_UNLIKELY(sljit_get_compiler_error(compiler)))
return;
- set_jumps(altbacktrack.topbacktracks, LABEL());
+ set_jumps(altbacktrack.own_backtracks, LABEL());
if (*cc != OP_ALT)
break;
@@ -13718,9 +14171,9 @@ jump_list *reqcu_not_found = NULL;
SLJIT_ASSERT(tables);
#if HAS_VIRTUAL_REGISTERS == 1
-SLJIT_ASSERT(sljit_get_register_index(TMP3) < 0 && sljit_get_register_index(ARGUMENTS) < 0 && sljit_get_register_index(RETURN_ADDR) < 0);
+SLJIT_ASSERT(sljit_get_register_index(SLJIT_GP_REGISTER, TMP3) < 0 && sljit_get_register_index(SLJIT_GP_REGISTER, ARGUMENTS) < 0 && sljit_get_register_index(SLJIT_GP_REGISTER, RETURN_ADDR) < 0);
#elif HAS_VIRTUAL_REGISTERS == 0
-SLJIT_ASSERT(sljit_get_register_index(TMP3) >= 0 && sljit_get_register_index(ARGUMENTS) >= 0 && sljit_get_register_index(RETURN_ADDR) >= 0);
+SLJIT_ASSERT(sljit_get_register_index(SLJIT_GP_REGISTER, TMP3) >= 0 && sljit_get_register_index(SLJIT_GP_REGISTER, ARGUMENTS) >= 0 && sljit_get_register_index(SLJIT_GP_REGISTER, RETURN_ADDR) >= 0);
#else
#error "Invalid value for HAS_VIRTUAL_REGISTERS"
#endif
@@ -13892,13 +14345,13 @@ memset(common->private_data_ptrs, 0, total_length * sizeof(sljit_s32));
private_data_size = common->cbra_ptr + (re->top_bracket + 1) * sizeof(sljit_sw);
if ((re->overall_options & PCRE2_ANCHORED) == 0 && (re->overall_options & PCRE2_NO_START_OPTIMIZE) == 0 && !common->has_skip_in_assert_back)
- detect_early_fail(common, common->start, &private_data_size, 0, 0, TRUE);
+ detect_early_fail(common, common->start, &private_data_size, 0, 0);
set_private_data_ptrs(common, &private_data_size, ccend);
SLJIT_ASSERT(common->early_fail_start_ptr <= common->early_fail_end_ptr);
-if (private_data_size > SLJIT_MAX_LOCAL_SIZE)
+if (private_data_size > 65536)
{
SLJIT_FREE(common->private_data_ptrs, allocator_data);
SLJIT_FREE(common->optimized_cbracket, allocator_data);
@@ -13912,7 +14365,7 @@ if (common->has_then)
set_then_offsets(common, common->start, NULL);
}
-compiler = sljit_create_compiler(allocator_data, NULL);
+compiler = sljit_create_compiler(allocator_data);
if (!compiler)
{
SLJIT_FREE(common->optimized_cbracket, allocator_data);
@@ -13923,7 +14376,7 @@ common->compiler = compiler;
/* Main pcre2_jit_exec entry. */
SLJIT_ASSERT((private_data_size & (sizeof(sljit_sw) - 1)) == 0);
-sljit_emit_enter(compiler, 0, SLJIT_ARGS1(W, W), 5, 5, 0, 0, private_data_size);
+sljit_emit_enter(compiler, 0, SLJIT_ARGS1(W, W), 5, 5, SLJIT_NUMBER_OF_SCRATCH_FLOAT_REGISTERS, 0, private_data_size);
/* Register init. */
reset_ovector(common, (re->top_bracket + 1) * 2);
@@ -14187,7 +14640,7 @@ common->quit_label = quit_label;
/* This is a (really) rare case. */
set_jumps(common->stackalloc, LABEL());
/* RETURN_ADDR is not a saved register. */
-sljit_emit_fast_enter(compiler, SLJIT_MEM1(SLJIT_SP), LOCALS0);
+sljit_emit_op_dst(compiler, SLJIT_FAST_ENTER, SLJIT_MEM1(SLJIT_SP), LOCALS0);
SLJIT_ASSERT(TMP1 == SLJIT_R0 && STR_PTR == SLJIT_R1);
@@ -14225,7 +14678,12 @@ if (common->revertframes != NULL)
if (common->wordboundary != NULL)
{
set_jumps(common->wordboundary, LABEL());
- check_wordboundary(common);
+ check_wordboundary(common, FALSE);
+ }
+if (common->ucp_wordboundary != NULL)
+ {
+ set_jumps(common->ucp_wordboundary, LABEL());
+ check_wordboundary(common, TRUE);
}
if (common->anynewline != NULL)
{
@@ -14252,10 +14710,17 @@ if (common->caselesscmp != NULL)
set_jumps(common->caselesscmp, LABEL());
do_caselesscmp(common);
}
-if (common->reset_match != NULL)
+if (common->reset_match != NULL || common->restart_match != NULL)
{
+ if (common->restart_match != NULL)
+ {
+ set_jumps(common->restart_match, LABEL());
+ OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_MEM1(SLJIT_SP), common->start_ptr);
+ }
+
set_jumps(common->reset_match, LABEL());
do_reset_match(common, (re->top_bracket + 1) * 2);
+ /* The value of restart_match is in TMP1. */
CMPTO(SLJIT_GREATER, STR_PTR, 0, TMP1, 0, continue_match_label);
OP1(SLJIT_MOV, STR_PTR, 0, TMP1, 0);
JUMPTO(SLJIT_JUMP, reset_match_label);
@@ -14315,7 +14780,7 @@ if (common->getucdtype != NULL)
SLJIT_FREE(common->optimized_cbracket, allocator_data);
SLJIT_FREE(common->private_data_ptrs, allocator_data);
-executable_func = sljit_generate_code(compiler);
+executable_func = sljit_generate_code(compiler, 0, NULL);
executable_size = sljit_get_generated_code_size(compiler);
sljit_free_compiler(compiler);
diff --git a/src/3rdparty/pcre2/src/pcre2_jit_match.c b/src/3rdparty/pcre2/src/pcre2_jit_match.c
index 1ab3af073e..ae5903e202 100644
--- a/src/3rdparty/pcre2/src/pcre2_jit_match.c
+++ b/src/3rdparty/pcre2/src/pcre2_jit_match.c
@@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
- New API code Copyright (c) 2016-2018 University of Cambridge
+ New API code Copyright (c) 2016-2023 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -42,6 +42,12 @@ POSSIBILITY OF SUCH DAMAGE.
#error This file must be included from pcre2_jit_compile.c.
#endif
+#if defined(__has_feature)
+#if __has_feature(memory_sanitizer)
+#include <sanitizer/msan_interface.h>
+#endif /* __has_feature(memory_sanitizer) */
+#endif /* defined(__has_feature) */
+
#ifdef SUPPORT_JIT
static SLJIT_NOINLINE int jit_machine_stack_exec(jit_arguments *arguments, jit_function executable_func)
@@ -171,6 +177,7 @@ if (rc > (int)oveccount)
rc = 0;
match_data->code = re;
match_data->subject = (rc >= 0 || rc == PCRE2_ERROR_PARTIAL)? subject : NULL;
+match_data->subject_length = length;
match_data->rc = rc;
match_data->startchar = arguments.startchar_ptr - subject;
match_data->leftchar = 0;
@@ -178,6 +185,13 @@ match_data->rightchar = 0;
match_data->mark = arguments.mark_ptr;
match_data->matchedby = PCRE2_MATCHEDBY_JIT;
+#if defined(__has_feature)
+#if __has_feature(memory_sanitizer)
+if (rc > 0)
+ __msan_unpoison(match_data->ovector, 2 * rc * sizeof(match_data->ovector[0]));
+#endif /* __has_feature(memory_sanitizer) */
+#endif /* defined(__has_feature) */
+
return match_data->rc;
#endif /* SUPPORT_JIT */
diff --git a/src/3rdparty/pcre2/src/pcre2_jit_misc.c b/src/3rdparty/pcre2/src/pcre2_jit_misc.c
index bb6a5589cb..c3abc0b33b 100644
--- a/src/3rdparty/pcre2/src/pcre2_jit_misc.c
+++ b/src/3rdparty/pcre2/src/pcre2_jit_misc.c
@@ -141,8 +141,8 @@ if (startsize == 0 || maxsize == 0 || maxsize > SIZE_MAX - STACK_GROWTH_RATE)
return NULL;
if (startsize > maxsize)
startsize = maxsize;
-startsize = (startsize + STACK_GROWTH_RATE - 1) & ~(STACK_GROWTH_RATE - 1);
-maxsize = (maxsize + STACK_GROWTH_RATE - 1) & ~(STACK_GROWTH_RATE - 1);
+startsize = (startsize + STACK_GROWTH_RATE - 1) & (size_t)(~(STACK_GROWTH_RATE - 1));
+maxsize = (maxsize + STACK_GROWTH_RATE - 1) & (size_t)(~(STACK_GROWTH_RATE - 1));
jit_stack = PRIV(memctl_malloc)(sizeof(pcre2_real_jit_stack), (pcre2_memctl *)gcontext);
if (jit_stack == NULL) return NULL;
diff --git a/src/3rdparty/pcre2/src/pcre2_jit_neon_inc.h b/src/3rdparty/pcre2/src/pcre2_jit_neon_inc.h
index 165602edc0..4a718b67b7 100644
--- a/src/3rdparty/pcre2/src/pcre2_jit_neon_inc.h
+++ b/src/3rdparty/pcre2/src/pcre2_jit_neon_inc.h
@@ -82,7 +82,12 @@ POSSIBILITY OF SUCH DAMAGE.
# endif
# endif
-static sljit_u8* SLJIT_FUNC FF_FUN(sljit_u8 *str_end, sljit_u8 *str_ptr, sljit_uw offs1, sljit_uw offs2, sljit_uw chars)
+#if (defined(__GNUC__) && __SANITIZE_ADDRESS__) \
+ || (defined(__clang__) \
+ && ((__clang_major__ == 3 && __clang_minor__ >= 3) || (__clang_major__ > 3)))
+__attribute__((no_sanitize_address))
+#endif
+static sljit_u8* SLJIT_FUNC FF_FUN(sljit_u8 *str_end, sljit_u8 **str_ptr, sljit_uw offs1, sljit_uw offs2, sljit_uw chars)
#undef FF_FUN
{
quad_word qw;
@@ -171,7 +176,7 @@ else
}
# endif
-str_ptr += IN_UCHARS(offs1);
+*str_ptr += IN_UCHARS(offs1);
#endif
#if PCRE2_CODE_UNIT_WIDTH != 8
@@ -183,13 +188,13 @@ restart:;
#endif
#if defined(FFCPS)
-if (str_ptr >= str_end)
+if (*str_ptr >= str_end)
return NULL;
-sljit_u8 *p1 = str_ptr - diff;
+sljit_u8 *p1 = *str_ptr - diff;
#endif
-sljit_s32 align_offset = ((uint64_t)str_ptr & 0xf);
-str_ptr = (sljit_u8 *) ((uint64_t)str_ptr & ~0xf);
-vect_t data = VLD1Q(str_ptr);
+sljit_s32 align_offset = ((uint64_t)*str_ptr & 0xf);
+*str_ptr = (sljit_u8 *) ((uint64_t)*str_ptr & ~0xf);
+vect_t data = VLD1Q(*str_ptr);
#if PCRE2_CODE_UNIT_WIDTH != 8
data = VANDQ(data, char_mask);
#endif
@@ -212,9 +217,9 @@ vect_t prev_data = data;
# endif
vect_t data2;
-if (p1 < str_ptr)
+if (p1 < *str_ptr)
{
- data2 = VLD1Q(str_ptr - diff);
+ data2 = VLD1Q(*str_ptr - diff);
#if PCRE2_CODE_UNIT_WIDTH != 8
data2 = VANDQ(data2, char_mask);
#endif
@@ -242,12 +247,12 @@ if (align_offset < 8)
qw.dw[0] >>= align_offset * 8;
if (qw.dw[0])
{
- str_ptr += align_offset + __builtin_ctzll(qw.dw[0]) / 8;
+ *str_ptr += align_offset + __builtin_ctzll(qw.dw[0]) / 8;
goto match;
}
if (qw.dw[1])
{
- str_ptr += 8 + __builtin_ctzll(qw.dw[1]) / 8;
+ *str_ptr += 8 + __builtin_ctzll(qw.dw[1]) / 8;
goto match;
}
}
@@ -256,15 +261,15 @@ else
qw.dw[1] >>= (align_offset - 8) * 8;
if (qw.dw[1])
{
- str_ptr += align_offset + __builtin_ctzll(qw.dw[1]) / 8;
+ *str_ptr += align_offset + __builtin_ctzll(qw.dw[1]) / 8;
goto match;
}
}
-str_ptr += 16;
+*str_ptr += 16;
-while (str_ptr < str_end)
+while (*str_ptr < str_end)
{
- vect_t orig_data = VLD1Q(str_ptr);
+ vect_t orig_data = VLD1Q(*str_ptr);
#if PCRE2_CODE_UNIT_WIDTH != 8
orig_data = VANDQ(orig_data, char_mask);
#endif
@@ -287,7 +292,7 @@ while (str_ptr < str_end)
# if defined (FFCPS_DIFF1)
data2 = VEXTQ(prev_data, data, VECTOR_FACTOR - 1);
# else
- data2 = VLD1Q(str_ptr - diff);
+ data2 = VLD1Q(*str_ptr - diff);
# if PCRE2_CODE_UNIT_WIDTH != 8
data2 = VANDQ(data2, char_mask);
# endif
@@ -312,11 +317,11 @@ while (str_ptr < str_end)
VST1Q(qw.mem, eq);
if (qw.dw[0])
- str_ptr += __builtin_ctzll(qw.dw[0]) / 8;
+ *str_ptr += __builtin_ctzll(qw.dw[0]) / 8;
else if (qw.dw[1])
- str_ptr += 8 + __builtin_ctzll(qw.dw[1]) / 8;
+ *str_ptr += 8 + __builtin_ctzll(qw.dw[1]) / 8;
else {
- str_ptr += 16;
+ *str_ptr += 16;
#if defined (FFCPS_DIFF1)
prev_data = orig_data;
#endif
@@ -324,24 +329,24 @@ while (str_ptr < str_end)
}
match:;
- if (str_ptr >= str_end)
+ if (*str_ptr >= str_end)
/* Failed match. */
return NULL;
#if defined(FF_UTF)
- if (utf_continue((PCRE2_SPTR)str_ptr - offs1))
+ if (utf_continue((PCRE2_SPTR)*str_ptr - offs1))
{
/* Not a match. */
- str_ptr += IN_UCHARS(1);
+ *str_ptr += IN_UCHARS(1);
goto restart;
}
#endif
/* Match. */
#if defined (FFCPS)
- str_ptr -= IN_UCHARS(offs1);
+ *str_ptr -= IN_UCHARS(offs1);
#endif
- return str_ptr;
+ return *str_ptr;
}
/* Failed match. */
diff --git a/src/3rdparty/pcre2/src/pcre2_jit_simd_inc.h b/src/3rdparty/pcre2/src/pcre2_jit_simd_inc.h
index 1a5ce4ed09..502977fc32 100644
--- a/src/3rdparty/pcre2/src/pcre2_jit_simd_inc.h
+++ b/src/3rdparty/pcre2/src/pcre2_jit_simd_inc.h
@@ -42,7 +42,8 @@ POSSIBILITY OF SUCH DAMAGE.
#if !(defined SUPPORT_VALGRIND)
#if ((defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86) \
- || (defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X))
+ || (defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X) \
+ || (defined SLJIT_CONFIG_LOONGARCH_64 && SLJIT_CONFIG_LOONGARCH_64))
typedef enum {
vector_compare_match1,
@@ -50,18 +51,39 @@ typedef enum {
vector_compare_match2,
} vector_compare_type;
-static SLJIT_INLINE sljit_u32 max_fast_forward_char_pair_offset(void)
+#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
+static SLJIT_INLINE sljit_s32 max_fast_forward_char_pair_offset(void)
{
#if PCRE2_CODE_UNIT_WIDTH == 8
+/* The AVX2 code path is currently disabled. */
+/* return sljit_has_cpu_feature(SLJIT_HAS_AVX2) ? 31 : 15; */
return 15;
#elif PCRE2_CODE_UNIT_WIDTH == 16
+/* The AVX2 code path is currently disabled. */
+/* return sljit_has_cpu_feature(SLJIT_HAS_AVX2) ? 15 : 7; */
return 7;
#elif PCRE2_CODE_UNIT_WIDTH == 32
+/* The AVX2 code path is currently disabled. */
+/* return sljit_has_cpu_feature(SLJIT_HAS_AVX2) ? 7 : 3; */
return 3;
#else
#error "Unsupported unit width"
#endif
}
+#else /* !SLJIT_CONFIG_X86 */
+static SLJIT_INLINE sljit_s32 max_fast_forward_char_pair_offset(void)
+{
+#if PCRE2_CODE_UNIT_WIDTH == 8
+return 15;
+#elif PCRE2_CODE_UNIT_WIDTH == 16
+return 7;
+#elif PCRE2_CODE_UNIT_WIDTH == 32
+return 3;
+#else
+#error "Unsupported unit width"
+#endif
+}
+#endif /* SLJIT_CONFIG_X86 */
#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH != 32
static struct sljit_jump *jump_if_utf_char_start(struct sljit_compiler *compiler, sljit_s32 reg)
@@ -86,49 +108,35 @@ static sljit_s32 character_to_int32(PCRE2_UCHAR chr)
{
sljit_u32 value = chr;
#if PCRE2_CODE_UNIT_WIDTH == 8
-#define SSE2_COMPARE_TYPE_INDEX 0
+#define SIMD_COMPARE_TYPE_INDEX 0
return (sljit_s32)((value << 24) | (value << 16) | (value << 8) | value);
#elif PCRE2_CODE_UNIT_WIDTH == 16
-#define SSE2_COMPARE_TYPE_INDEX 1
+#define SIMD_COMPARE_TYPE_INDEX 1
return (sljit_s32)((value << 16) | value);
#elif PCRE2_CODE_UNIT_WIDTH == 32
-#define SSE2_COMPARE_TYPE_INDEX 2
+#define SIMD_COMPARE_TYPE_INDEX 2
return (sljit_s32)(value);
#else
#error "Unsupported unit width"
#endif
}
-static void load_from_mem_sse2(struct sljit_compiler *compiler, sljit_s32 dst_xmm_reg, sljit_s32 src_general_reg, sljit_s8 offset)
+static void fast_forward_char_pair_sse2_compare(struct sljit_compiler *compiler, vector_compare_type compare_type,
+ sljit_s32 reg_type, int step, sljit_s32 dst_ind, sljit_s32 cmp1_ind, sljit_s32 cmp2_ind, sljit_s32 tmp_ind)
{
-sljit_u8 instruction[5];
-
-SLJIT_ASSERT(dst_xmm_reg < 8);
-SLJIT_ASSERT(src_general_reg < 8);
-
-/* MOVDQA xmm1, xmm2/m128 */
-instruction[0] = ((sljit_u8)offset & 0xf) == 0 ? 0x66 : 0xf3;
-instruction[1] = 0x0f;
-instruction[2] = 0x6f;
+sljit_u8 instruction[4];
-if (offset == 0)
+if (reg_type == SLJIT_SIMD_REG_128)
{
- instruction[3] = (dst_xmm_reg << 3) | src_general_reg;
- sljit_emit_op_custom(compiler, instruction, 4);
- return;
+ instruction[0] = 0x66;
+ instruction[1] = 0x0f;
+ }
+else
+ {
+ /* Two byte VEX prefix. */
+ instruction[0] = 0xc5;
+ instruction[1] = 0xfd;
}
-
-instruction[3] = 0x40 | (dst_xmm_reg << 3) | src_general_reg;
-instruction[4] = (sljit_u8)offset;
-sljit_emit_op_custom(compiler, instruction, 5);
-}
-
-static void fast_forward_char_pair_sse2_compare(struct sljit_compiler *compiler, vector_compare_type compare_type,
- int step, sljit_s32 dst_ind, sljit_s32 cmp1_ind, sljit_s32 cmp2_ind, sljit_s32 tmp_ind)
-{
-sljit_u8 instruction[4];
-instruction[0] = 0x66;
-instruction[1] = 0x0f;
SLJIT_ASSERT(step >= 0 && step <= 3);
@@ -139,8 +147,10 @@ if (compare_type != vector_compare_match2)
if (compare_type == vector_compare_match1i)
{
/* POR xmm1, xmm2/m128 */
- /* instruction[0] = 0x66; */
- /* instruction[1] = 0x0f; */
+ if (reg_type == SLJIT_SIMD_REG_256)
+ instruction[1] ^= (dst_ind << 3);
+
+ /* Prefix is filled. */
instruction[2] = 0xeb;
instruction[3] = 0xc0 | (dst_ind << 3) | cmp2_ind;
sljit_emit_op_custom(compiler, instruction, 4);
@@ -152,20 +162,35 @@ if (compare_type != vector_compare_match2)
return;
/* PCMPEQB/W/D xmm1, xmm2/m128 */
- /* instruction[0] = 0x66; */
- /* instruction[1] = 0x0f; */
- instruction[2] = 0x74 + SSE2_COMPARE_TYPE_INDEX;
+ if (reg_type == SLJIT_SIMD_REG_256)
+ instruction[1] ^= (dst_ind << 3);
+
+ /* Prefix is filled. */
+ instruction[2] = 0x74 + SIMD_COMPARE_TYPE_INDEX;
instruction[3] = 0xc0 | (dst_ind << 3) | cmp1_ind;
sljit_emit_op_custom(compiler, instruction, 4);
return;
}
+if (reg_type == SLJIT_SIMD_REG_256)
+ {
+ if (step == 2)
+ return;
+
+ if (step == 0)
+ {
+ step = 2;
+ instruction[1] ^= (dst_ind << 3);
+ }
+ }
+
switch (step)
{
case 0:
+ SLJIT_ASSERT(reg_type == SLJIT_SIMD_REG_128);
+
/* MOVDQA xmm1, xmm2/m128 */
- /* instruction[0] = 0x66; */
- /* instruction[1] = 0x0f; */
+ /* Prefix is filled. */
instruction[2] = 0x6f;
instruction[3] = 0xc0 | (tmp_ind << 3) | dst_ind;
sljit_emit_op_custom(compiler, instruction, 4);
@@ -173,26 +198,29 @@ switch (step)
case 1:
/* PCMPEQB/W/D xmm1, xmm2/m128 */
- /* instruction[0] = 0x66; */
- /* instruction[1] = 0x0f; */
- instruction[2] = 0x74 + SSE2_COMPARE_TYPE_INDEX;
+ if (reg_type == SLJIT_SIMD_REG_256)
+ instruction[1] ^= (dst_ind << 3);
+
+ /* Prefix is filled. */
+ instruction[2] = 0x74 + SIMD_COMPARE_TYPE_INDEX;
instruction[3] = 0xc0 | (dst_ind << 3) | cmp1_ind;
sljit_emit_op_custom(compiler, instruction, 4);
return;
case 2:
/* PCMPEQB/W/D xmm1, xmm2/m128 */
- /* instruction[0] = 0x66; */
- /* instruction[1] = 0x0f; */
- instruction[2] = 0x74 + SSE2_COMPARE_TYPE_INDEX;
+ /* Prefix is filled. */
+ instruction[2] = 0x74 + SIMD_COMPARE_TYPE_INDEX;
instruction[3] = 0xc0 | (tmp_ind << 3) | cmp2_ind;
sljit_emit_op_custom(compiler, instruction, 4);
return;
case 3:
/* POR xmm1, xmm2/m128 */
- /* instruction[0] = 0x66; */
- /* instruction[1] = 0x0f; */
+ if (reg_type == SLJIT_SIMD_REG_256)
+ instruction[1] ^= (dst_ind << 3);
+
+ /* Prefix is filled. */
instruction[2] = 0xeb;
instruction[3] = 0xc0 | (dst_ind << 3) | tmp_ind;
sljit_emit_op_custom(compiler, instruction, 4);
@@ -200,12 +228,16 @@ switch (step)
}
}
-#define JIT_HAS_FAST_FORWARD_CHAR_SIMD (sljit_has_cpu_feature(SLJIT_HAS_SSE2))
+#define JIT_HAS_FAST_FORWARD_CHAR_SIMD (sljit_has_cpu_feature(SLJIT_HAS_SIMD))
static void fast_forward_char_simd(compiler_common *common, PCRE2_UCHAR char1, PCRE2_UCHAR char2, sljit_s32 offset)
{
DEFINE_COMPILER;
sljit_u8 instruction[8];
+/* The AVX2 code path is currently disabled. */
+/* sljit_s32 reg_type = sljit_has_cpu_feature(SLJIT_HAS_AVX2) ? SLJIT_SIMD_REG_256 : SLJIT_SIMD_REG_128; */
+sljit_s32 reg_type = SLJIT_SIMD_REG_128;
+sljit_s32 value;
struct sljit_label *start;
#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH != 32
struct sljit_label *restart;
@@ -213,12 +245,11 @@ struct sljit_label *restart;
struct sljit_jump *quit;
struct sljit_jump *partial_quit[2];
vector_compare_type compare_type = vector_compare_match1;
-sljit_s32 tmp1_reg_ind = sljit_get_register_index(TMP1);
-sljit_s32 str_ptr_reg_ind = sljit_get_register_index(STR_PTR);
-sljit_s32 data_ind = 0;
-sljit_s32 tmp_ind = 1;
-sljit_s32 cmp1_ind = 2;
-sljit_s32 cmp2_ind = 3;
+sljit_s32 tmp1_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, TMP1);
+sljit_s32 data_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_FR0);
+sljit_s32 cmp1_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_FR1);
+sljit_s32 cmp2_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_FR2);
+sljit_s32 tmp_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_FR3);
sljit_u32 bit = 0;
int i;
@@ -241,61 +272,34 @@ if (common->mode == PCRE2_JIT_COMPLETE)
add_jump(compiler, &common->failed_match, partial_quit[0]);
/* First part (unaligned start) */
-
-OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, character_to_int32(char1 | bit));
-
-SLJIT_ASSERT(tmp1_reg_ind < 8);
-
-/* MOVD xmm, r/m32 */
-instruction[0] = 0x66;
-instruction[1] = 0x0f;
-instruction[2] = 0x6e;
-instruction[3] = 0xc0 | (cmp1_ind << 3) | tmp1_reg_ind;
-sljit_emit_op_custom(compiler, instruction, 4);
+value = SLJIT_SIMD_REG_128 | SLJIT_SIMD_ELEM_32 | SLJIT_SIMD_LANE_ZERO;
+sljit_emit_simd_lane_mov(compiler, value, SLJIT_FR1, 0, SLJIT_IMM, character_to_int32(char1 | bit));
if (char1 != char2)
- {
- OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, character_to_int32(bit != 0 ? bit : char2));
-
- /* MOVD xmm, r/m32 */
- instruction[3] = 0xc0 | (cmp2_ind << 3) | tmp1_reg_ind;
- sljit_emit_op_custom(compiler, instruction, 4);
- }
+ sljit_emit_simd_lane_mov(compiler, value, SLJIT_FR2, 0, SLJIT_IMM, character_to_int32(bit != 0 ? bit : char2));
OP1(SLJIT_MOV, TMP2, 0, STR_PTR, 0);
-/* PSHUFD xmm1, xmm2/m128, imm8 */
-/* instruction[0] = 0x66; */
-/* instruction[1] = 0x0f; */
-instruction[2] = 0x70;
-instruction[3] = 0xc0 | (cmp1_ind << 3) | cmp1_ind;
-instruction[4] = 0;
-sljit_emit_op_custom(compiler, instruction, 5);
+sljit_emit_simd_lane_replicate(compiler, reg_type | SLJIT_SIMD_ELEM_32, SLJIT_FR1, SLJIT_FR1, 0);
if (char1 != char2)
- {
- /* PSHUFD xmm1, xmm2/m128, imm8 */
- instruction[3] = 0xc0 | (cmp2_ind << 3) | cmp2_ind;
- sljit_emit_op_custom(compiler, instruction, 5);
- }
+ sljit_emit_simd_lane_replicate(compiler, reg_type | SLJIT_SIMD_ELEM_32, SLJIT_FR2, SLJIT_FR2, 0);
#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH != 32
restart = LABEL();
#endif
-OP2(SLJIT_AND, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, ~0xf);
-OP2(SLJIT_AND, TMP2, 0, TMP2, 0, SLJIT_IMM, 0xf);
-load_from_mem_sse2(compiler, data_ind, str_ptr_reg_ind, 0);
-for (i = 0; i < 4; i++)
- fast_forward_char_pair_sse2_compare(compiler, compare_type, i, data_ind, cmp1_ind, cmp2_ind, tmp_ind);
+value = (reg_type == SLJIT_SIMD_REG_256) ? 0x1f : 0xf;
+OP2(SLJIT_AND, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, ~value);
+OP2(SLJIT_AND, TMP2, 0, TMP2, 0, SLJIT_IMM, value);
-/* PMOVMSKB reg, xmm */
-/* instruction[0] = 0x66; */
-/* instruction[1] = 0x0f; */
-instruction[2] = 0xd7;
-instruction[3] = 0xc0 | (tmp1_reg_ind << 3) | data_ind;
-sljit_emit_op_custom(compiler, instruction, 4);
+value = (reg_type == SLJIT_SIMD_REG_256) ? SLJIT_SIMD_MEM_ALIGNED_256 : SLJIT_SIMD_MEM_ALIGNED_128;
+sljit_emit_simd_mov(compiler, reg_type | value, SLJIT_FR0, SLJIT_MEM1(STR_PTR), 0);
+for (i = 0; i < 4; i++)
+ fast_forward_char_pair_sse2_compare(compiler, compare_type, reg_type, i, data_ind, cmp1_ind, cmp2_ind, tmp_ind);
+
+sljit_emit_simd_sign(compiler, SLJIT_SIMD_STORE | reg_type | SLJIT_SIMD_ELEM_8, SLJIT_FR0, TMP1, 0);
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
OP2(SLJIT_LSHR, TMP1, 0, TMP1, 0, TMP2, 0);
@@ -306,27 +310,24 @@ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
/* Second part (aligned) */
start = LABEL();
-OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, 16);
+value = (reg_type == SLJIT_SIMD_REG_256) ? 32 : 16;
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, value);
partial_quit[1] = CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0);
if (common->mode == PCRE2_JIT_COMPLETE)
add_jump(compiler, &common->failed_match, partial_quit[1]);
-load_from_mem_sse2(compiler, data_ind, str_ptr_reg_ind, 0);
+value = (reg_type == SLJIT_SIMD_REG_256) ? SLJIT_SIMD_MEM_ALIGNED_256 : SLJIT_SIMD_MEM_ALIGNED_128;
+sljit_emit_simd_mov(compiler, reg_type | value, SLJIT_FR0, SLJIT_MEM1(STR_PTR), 0);
for (i = 0; i < 4; i++)
- fast_forward_char_pair_sse2_compare(compiler, compare_type, i, data_ind, cmp1_ind, cmp2_ind, tmp_ind);
-
-/* PMOVMSKB reg, xmm */
-/* instruction[0] = 0x66; */
-/* instruction[1] = 0x0f; */
-instruction[2] = 0xd7;
-instruction[3] = 0xc0 | (tmp1_reg_ind << 3) | data_ind;
-sljit_emit_op_custom(compiler, instruction, 4);
+ fast_forward_char_pair_sse2_compare(compiler, compare_type, reg_type, i, data_ind, cmp1_ind, cmp2_ind, tmp_ind);
+sljit_emit_simd_sign(compiler, SLJIT_SIMD_STORE | reg_type | SLJIT_SIMD_ELEM_8, SLJIT_FR0, TMP1, 0);
CMPTO(SLJIT_ZERO, TMP1, 0, SLJIT_IMM, 0, start);
JUMPHERE(quit);
+SLJIT_ASSERT(tmp1_reg_ind < 8);
/* BSF r32, r/m32 */
instruction[0] = 0x0f;
instruction[1] = 0xbc;
@@ -340,7 +341,7 @@ if (common->mode != PCRE2_JIT_COMPLETE)
JUMPHERE(partial_quit[0]);
JUMPHERE(partial_quit[1]);
OP2U(SLJIT_SUB | SLJIT_SET_GREATER, STR_PTR, 0, STR_END, 0);
- CMOV(SLJIT_GREATER, STR_PTR, STR_END, 0);
+ SELECT(SLJIT_GREATER, STR_PTR, STR_END, 0, STR_PTR);
}
else
add_jump(compiler, &common->failed_match, CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0));
@@ -364,22 +365,25 @@ if (common->utf && offset > 0)
#endif
}
-#define JIT_HAS_FAST_REQUESTED_CHAR_SIMD (sljit_has_cpu_feature(SLJIT_HAS_SSE2))
+#define JIT_HAS_FAST_REQUESTED_CHAR_SIMD (sljit_has_cpu_feature(SLJIT_HAS_SIMD))
static jump_list *fast_requested_char_simd(compiler_common *common, PCRE2_UCHAR char1, PCRE2_UCHAR char2)
{
DEFINE_COMPILER;
sljit_u8 instruction[8];
+/* The AVX2 code path is currently disabled. */
+/* sljit_s32 reg_type = sljit_has_cpu_feature(SLJIT_HAS_AVX2) ? SLJIT_SIMD_REG_256 : SLJIT_SIMD_REG_128; */
+sljit_s32 reg_type = SLJIT_SIMD_REG_128;
+sljit_s32 value;
struct sljit_label *start;
struct sljit_jump *quit;
jump_list *not_found = NULL;
vector_compare_type compare_type = vector_compare_match1;
-sljit_s32 tmp1_reg_ind = sljit_get_register_index(TMP1);
-sljit_s32 str_ptr_reg_ind = sljit_get_register_index(STR_PTR);
-sljit_s32 data_ind = 0;
-sljit_s32 tmp_ind = 1;
-sljit_s32 cmp1_ind = 2;
-sljit_s32 cmp2_ind = 3;
+sljit_s32 tmp1_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, TMP1);
+sljit_s32 data_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_FR0);
+sljit_s32 cmp1_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_FR1);
+sljit_s32 cmp2_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_FR2);
+sljit_s32 tmp_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_FR3);
sljit_u32 bit = 0;
int i;
@@ -401,57 +405,30 @@ OP1(SLJIT_MOV, TMP3, 0, STR_PTR, 0);
/* First part (unaligned start) */
-OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, character_to_int32(char1 | bit));
-
-SLJIT_ASSERT(tmp1_reg_ind < 8);
-
-/* MOVD xmm, r/m32 */
-instruction[0] = 0x66;
-instruction[1] = 0x0f;
-instruction[2] = 0x6e;
-instruction[3] = 0xc0 | (cmp1_ind << 3) | tmp1_reg_ind;
-sljit_emit_op_custom(compiler, instruction, 4);
+value = SLJIT_SIMD_REG_128 | SLJIT_SIMD_ELEM_32 | SLJIT_SIMD_LANE_ZERO;
+sljit_emit_simd_lane_mov(compiler, value, SLJIT_FR1, 0, SLJIT_IMM, character_to_int32(char1 | bit));
if (char1 != char2)
- {
- OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, character_to_int32(bit != 0 ? bit : char2));
-
- /* MOVD xmm, r/m32 */
- instruction[3] = 0xc0 | (cmp2_ind << 3) | tmp1_reg_ind;
- sljit_emit_op_custom(compiler, instruction, 4);
- }
+ sljit_emit_simd_lane_mov(compiler, value, SLJIT_FR2, 0, SLJIT_IMM, character_to_int32(bit != 0 ? bit : char2));
OP1(SLJIT_MOV, STR_PTR, 0, TMP2, 0);
-/* PSHUFD xmm1, xmm2/m128, imm8 */
-/* instruction[0] = 0x66; */
-/* instruction[1] = 0x0f; */
-instruction[2] = 0x70;
-instruction[3] = 0xc0 | (cmp1_ind << 3) | cmp1_ind;
-instruction[4] = 0;
-sljit_emit_op_custom(compiler, instruction, 5);
+sljit_emit_simd_lane_replicate(compiler, reg_type | SLJIT_SIMD_ELEM_32, SLJIT_FR1, SLJIT_FR1, 0);
if (char1 != char2)
- {
- /* PSHUFD xmm1, xmm2/m128, imm8 */
- instruction[3] = 0xc0 | (cmp2_ind << 3) | cmp2_ind;
- sljit_emit_op_custom(compiler, instruction, 5);
- }
+ sljit_emit_simd_lane_replicate(compiler, reg_type | SLJIT_SIMD_ELEM_32, SLJIT_FR2, SLJIT_FR2, 0);
-OP2(SLJIT_AND, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, ~0xf);
-OP2(SLJIT_AND, TMP2, 0, TMP2, 0, SLJIT_IMM, 0xf);
+value = (reg_type == SLJIT_SIMD_REG_256) ? 0x1f : 0xf;
+OP2(SLJIT_AND, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, ~value);
+OP2(SLJIT_AND, TMP2, 0, TMP2, 0, SLJIT_IMM, value);
-load_from_mem_sse2(compiler, data_ind, str_ptr_reg_ind, 0);
-for (i = 0; i < 4; i++)
- fast_forward_char_pair_sse2_compare(compiler, compare_type, i, data_ind, cmp1_ind, cmp2_ind, tmp_ind);
+value = (reg_type == SLJIT_SIMD_REG_256) ? SLJIT_SIMD_MEM_ALIGNED_256 : SLJIT_SIMD_MEM_ALIGNED_128;
+sljit_emit_simd_mov(compiler, reg_type | value, SLJIT_FR0, SLJIT_MEM1(STR_PTR), 0);
-/* PMOVMSKB reg, xmm */
-/* instruction[0] = 0x66; */
-/* instruction[1] = 0x0f; */
-instruction[2] = 0xd7;
-instruction[3] = 0xc0 | (tmp1_reg_ind << 3) | data_ind;
-sljit_emit_op_custom(compiler, instruction, 4);
+for (i = 0; i < 4; i++)
+ fast_forward_char_pair_sse2_compare(compiler, compare_type, reg_type, i, data_ind, cmp1_ind, cmp2_ind, tmp_ind);
+sljit_emit_simd_sign(compiler, SLJIT_SIMD_STORE | reg_type | SLJIT_SIMD_ELEM_8, SLJIT_FR0, TMP1, 0);
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
OP2(SLJIT_LSHR, TMP1, 0, TMP1, 0, TMP2, 0);
@@ -462,25 +439,23 @@ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
/* Second part (aligned) */
start = LABEL();
-OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, 16);
+value = (reg_type == SLJIT_SIMD_REG_256) ? 32 : 16;
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, value);
add_jump(compiler, &not_found, CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0));
-load_from_mem_sse2(compiler, data_ind, str_ptr_reg_ind, 0);
-for (i = 0; i < 4; i++)
- fast_forward_char_pair_sse2_compare(compiler, compare_type, i, data_ind, cmp1_ind, cmp2_ind, tmp_ind);
+value = (reg_type == SLJIT_SIMD_REG_256) ? SLJIT_SIMD_MEM_ALIGNED_256 : SLJIT_SIMD_MEM_ALIGNED_128;
+sljit_emit_simd_mov(compiler, reg_type | value, SLJIT_FR0, SLJIT_MEM1(STR_PTR), 0);
-/* PMOVMSKB reg, xmm */
-/* instruction[0] = 0x66; */
-/* instruction[1] = 0x0f; */
-instruction[2] = 0xd7;
-instruction[3] = 0xc0 | (tmp1_reg_ind << 3) | data_ind;
-sljit_emit_op_custom(compiler, instruction, 4);
+for (i = 0; i < 4; i++)
+ fast_forward_char_pair_sse2_compare(compiler, compare_type, reg_type, i, data_ind, cmp1_ind, cmp2_ind, tmp_ind);
+sljit_emit_simd_sign(compiler, SLJIT_SIMD_STORE | reg_type | SLJIT_SIMD_ELEM_8, SLJIT_FR0, TMP1, 0);
CMPTO(SLJIT_ZERO, TMP1, 0, SLJIT_IMM, 0, start);
JUMPHERE(quit);
+SLJIT_ASSERT(tmp1_reg_ind < 8);
/* BSF r32, r/m32 */
instruction[0] = 0x0f;
instruction[1] = 0xbc;
@@ -496,29 +471,31 @@ return not_found;
#ifndef _WIN64
-#define JIT_HAS_FAST_FORWARD_CHAR_PAIR_SIMD (sljit_has_cpu_feature(SLJIT_HAS_SSE2))
+#define JIT_HAS_FAST_FORWARD_CHAR_PAIR_SIMD (sljit_has_cpu_feature(SLJIT_HAS_SIMD))
static void fast_forward_char_pair_simd(compiler_common *common, sljit_s32 offs1,
PCRE2_UCHAR char1a, PCRE2_UCHAR char1b, sljit_s32 offs2, PCRE2_UCHAR char2a, PCRE2_UCHAR char2b)
{
DEFINE_COMPILER;
sljit_u8 instruction[8];
+/* The AVX2 code path is currently disabled. */
+/* sljit_s32 reg_type = sljit_has_cpu_feature(SLJIT_HAS_AVX2) ? SLJIT_SIMD_REG_256 : SLJIT_SIMD_REG_128; */
+sljit_s32 reg_type = SLJIT_SIMD_REG_128;
+sljit_s32 value;
vector_compare_type compare1_type = vector_compare_match1;
vector_compare_type compare2_type = vector_compare_match1;
sljit_u32 bit1 = 0;
sljit_u32 bit2 = 0;
sljit_u32 diff = IN_UCHARS(offs1 - offs2);
-sljit_s32 tmp1_reg_ind = sljit_get_register_index(TMP1);
-sljit_s32 tmp2_reg_ind = sljit_get_register_index(TMP2);
-sljit_s32 str_ptr_reg_ind = sljit_get_register_index(STR_PTR);
-sljit_s32 data1_ind = 0;
-sljit_s32 data2_ind = 1;
-sljit_s32 tmp1_ind = 2;
-sljit_s32 tmp2_ind = 3;
-sljit_s32 cmp1a_ind = 4;
-sljit_s32 cmp1b_ind = 5;
-sljit_s32 cmp2a_ind = 6;
-sljit_s32 cmp2b_ind = 7;
+sljit_s32 tmp1_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, TMP1);
+sljit_s32 data1_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_FR0);
+sljit_s32 data2_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_FR1);
+sljit_s32 cmp1a_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_FR2);
+sljit_s32 cmp2a_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_FR3);
+sljit_s32 cmp1b_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_FR4);
+sljit_s32 cmp2b_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_FR5);
+sljit_s32 tmp1_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_FR6);
+sljit_s32 tmp2_ind = sljit_get_register_index(SLJIT_FLOAT_REGISTER, SLJIT_TMP_FR0);
struct sljit_label *start;
#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH != 32
struct sljit_label *restart;
@@ -526,9 +503,8 @@ struct sljit_label *restart;
struct sljit_jump *jump[2];
int i;
-SLJIT_ASSERT(common->mode == PCRE2_JIT_COMPLETE && offs1 > offs2);
-SLJIT_ASSERT(diff <= IN_UCHARS(max_fast_forward_char_pair_offset()));
-SLJIT_ASSERT(tmp1_reg_ind < 8 && tmp2_reg_ind == 1);
+SLJIT_ASSERT(common->mode == PCRE2_JIT_COMPLETE && offs1 > offs2 && offs2 >= 0);
+SLJIT_ASSERT(diff <= (unsigned)IN_UCHARS(max_fast_forward_char_pair_offset()));
/* Initialize. */
if (common->match_end_ptr != 0)
@@ -538,17 +514,12 @@ if (common->match_end_ptr != 0)
OP2(SLJIT_ADD, TMP1, 0, TMP1, 0, SLJIT_IMM, IN_UCHARS(offs1 + 1));
OP2U(SLJIT_SUB | SLJIT_SET_LESS, TMP1, 0, STR_END, 0);
- CMOV(SLJIT_LESS, STR_END, TMP1, 0);
+ SELECT(SLJIT_LESS, STR_END, TMP1, 0, STR_END);
}
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(offs1));
add_jump(compiler, &common->failed_match, CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0));
-/* MOVD xmm, r/m32 */
-instruction[0] = 0x66;
-instruction[1] = 0x0f;
-instruction[2] = 0x6e;
-
if (char1a == char1b)
OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, character_to_int32(char1a));
else
@@ -569,14 +540,11 @@ else
}
}
-instruction[3] = 0xc0 | (cmp1a_ind << 3) | tmp1_reg_ind;
-sljit_emit_op_custom(compiler, instruction, 4);
+value = SLJIT_SIMD_REG_128 | SLJIT_SIMD_ELEM_32 | SLJIT_SIMD_LANE_ZERO;
+sljit_emit_simd_lane_mov(compiler, value, SLJIT_FR2, 0, TMP1, 0);
if (char1a != char1b)
- {
- instruction[3] = 0xc0 | (cmp1b_ind << 3) | tmp2_reg_ind;
- sljit_emit_op_custom(compiler, instruction, 4);
- }
+ sljit_emit_simd_lane_mov(compiler, value, SLJIT_FR4, 0, TMP2, 0);
if (char2a == char2b)
OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, character_to_int32(char2a));
@@ -598,38 +566,18 @@ else
}
}
-instruction[3] = 0xc0 | (cmp2a_ind << 3) | tmp1_reg_ind;
-sljit_emit_op_custom(compiler, instruction, 4);
+sljit_emit_simd_lane_mov(compiler, value, SLJIT_FR3, 0, TMP1, 0);
if (char2a != char2b)
- {
- instruction[3] = 0xc0 | (cmp2b_ind << 3) | tmp2_reg_ind;
- sljit_emit_op_custom(compiler, instruction, 4);
- }
-
-/* PSHUFD xmm1, xmm2/m128, imm8 */
-/* instruction[0] = 0x66; */
-/* instruction[1] = 0x0f; */
-instruction[2] = 0x70;
-instruction[4] = 0;
-
-instruction[3] = 0xc0 | (cmp1a_ind << 3) | cmp1a_ind;
-sljit_emit_op_custom(compiler, instruction, 5);
+ sljit_emit_simd_lane_mov(compiler, value, SLJIT_FR5, 0, TMP2, 0);
+sljit_emit_simd_lane_replicate(compiler, reg_type | SLJIT_SIMD_ELEM_32, SLJIT_FR2, SLJIT_FR2, 0);
if (char1a != char1b)
- {
- instruction[3] = 0xc0 | (cmp1b_ind << 3) | cmp1b_ind;
- sljit_emit_op_custom(compiler, instruction, 5);
- }
-
-instruction[3] = 0xc0 | (cmp2a_ind << 3) | cmp2a_ind;
-sljit_emit_op_custom(compiler, instruction, 5);
+ sljit_emit_simd_lane_replicate(compiler, reg_type | SLJIT_SIMD_ELEM_32, SLJIT_FR4, SLJIT_FR4, 0);
+sljit_emit_simd_lane_replicate(compiler, reg_type | SLJIT_SIMD_ELEM_32, SLJIT_FR3, SLJIT_FR3, 0);
if (char2a != char2b)
- {
- instruction[3] = 0xc0 | (cmp2b_ind << 3) | cmp2b_ind;
- sljit_emit_op_custom(compiler, instruction, 5);
- }
+ sljit_emit_simd_lane_replicate(compiler, reg_type | SLJIT_SIMD_ELEM_32, SLJIT_FR5, SLJIT_FR5, 0);
#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH != 32
restart = LABEL();
@@ -637,55 +585,91 @@ restart = LABEL();
OP2(SLJIT_SUB, TMP1, 0, STR_PTR, 0, SLJIT_IMM, diff);
OP1(SLJIT_MOV, TMP2, 0, STR_PTR, 0);
-OP2(SLJIT_AND, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, ~0xf);
+value = (reg_type == SLJIT_SIMD_REG_256) ? ~0x1f : ~0xf;
+OP2(SLJIT_AND, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, value);
-load_from_mem_sse2(compiler, data1_ind, str_ptr_reg_ind, 0);
+value = (reg_type == SLJIT_SIMD_REG_256) ? SLJIT_SIMD_MEM_ALIGNED_256 : SLJIT_SIMD_MEM_ALIGNED_128;
+sljit_emit_simd_mov(compiler, reg_type | value, SLJIT_FR0, SLJIT_MEM1(STR_PTR), 0);
jump[0] = CMP(SLJIT_GREATER_EQUAL, TMP1, 0, STR_PTR, 0);
-load_from_mem_sse2(compiler, data2_ind, str_ptr_reg_ind, -(sljit_s8)diff);
+sljit_emit_simd_mov(compiler, reg_type, SLJIT_FR1, SLJIT_MEM1(STR_PTR), -(sljit_sw)diff);
jump[1] = JUMP(SLJIT_JUMP);
JUMPHERE(jump[0]);
-/* MOVDQA xmm1, xmm2/m128 */
-/* instruction[0] = 0x66; */
-/* instruction[1] = 0x0f; */
-instruction[2] = 0x6f;
-instruction[3] = 0xc0 | (data2_ind << 3) | data1_ind;
-sljit_emit_op_custom(compiler, instruction, 4);
-
-/* PSLLDQ xmm1, imm8 */
-/* instruction[0] = 0x66; */
-/* instruction[1] = 0x0f; */
-instruction[2] = 0x73;
-instruction[3] = 0xc0 | (7 << 3) | data2_ind;
-instruction[4] = diff;
-sljit_emit_op_custom(compiler, instruction, 5);
+if (reg_type == SLJIT_SIMD_REG_256)
+ {
+ if (diff != 16)
+ {
+ /* PSLLDQ ymm1, ymm2, imm8 */
+ instruction[0] = 0xc5;
+ instruction[1] = (sljit_u8)(0xf9 ^ (data2_ind << 3));
+ instruction[2] = 0x73;
+ instruction[3] = 0xc0 | (7 << 3) | data1_ind;
+ instruction[4] = diff & 0xf;
+ sljit_emit_op_custom(compiler, instruction, 5);
+ }
+
+ instruction[0] = 0xc4;
+ instruction[1] = 0xe3;
+ if (diff < 16)
+ {
+ /* VINSERTI128 xmm1, xmm2, xmm3/m128 */
+ /* instruction[0] = 0xc4; */
+ /* instruction[1] = 0xe3; */
+ instruction[2] = (sljit_u8)(0x7d ^ (data2_ind << 3));
+ instruction[3] = 0x38;
+ SLJIT_ASSERT(sljit_get_register_index(SLJIT_GP_REGISTER, STR_PTR) <= 7);
+ instruction[4] = 0x40 | (data2_ind << 3) | sljit_get_register_index(SLJIT_GP_REGISTER, STR_PTR);
+ instruction[5] = (sljit_u8)(16 - diff);
+ instruction[6] = 1;
+ sljit_emit_op_custom(compiler, instruction, 7);
+ }
+ else
+ {
+ /* VPERM2I128 xmm1, xmm2, xmm3/m128 */
+ /* instruction[0] = 0xc4; */
+ /* instruction[1] = 0xe3; */
+ value = (diff == 16) ? data1_ind : data2_ind;
+ instruction[2] = (sljit_u8)(0x7d ^ (value << 3));
+ instruction[3] = 0x46;
+ instruction[4] = 0xc0 | (data2_ind << 3) | value;
+ instruction[5] = 0x08;
+ sljit_emit_op_custom(compiler, instruction, 6);
+ }
+ }
+else
+ {
+ /* MOVDQA xmm1, xmm2/m128 */
+ instruction[0] = 0x66;
+ instruction[1] = 0x0f;
+ instruction[2] = 0x6f;
+ instruction[3] = 0xc0 | (data2_ind << 3) | data1_ind;
+ sljit_emit_op_custom(compiler, instruction, 4);
+
+ /* PSLLDQ xmm1, imm8 */
+ /* instruction[0] = 0x66; */
+ /* instruction[1] = 0x0f; */
+ instruction[2] = 0x73;
+ instruction[3] = 0xc0 | (7 << 3) | data2_ind;
+ instruction[4] = diff;
+ sljit_emit_op_custom(compiler, instruction, 5);
+ }
JUMPHERE(jump[1]);
-OP2(SLJIT_AND, TMP2, 0, TMP2, 0, SLJIT_IMM, 0xf);
+value = (reg_type == SLJIT_SIMD_REG_256) ? 0x1f : 0xf;
+OP2(SLJIT_AND, TMP2, 0, TMP2, 0, SLJIT_IMM, value);
for (i = 0; i < 4; i++)
{
- fast_forward_char_pair_sse2_compare(compiler, compare2_type, i, data2_ind, cmp2a_ind, cmp2b_ind, tmp2_ind);
- fast_forward_char_pair_sse2_compare(compiler, compare1_type, i, data1_ind, cmp1a_ind, cmp1b_ind, tmp1_ind);
+ fast_forward_char_pair_sse2_compare(compiler, compare2_type, reg_type, i, data2_ind, cmp2a_ind, cmp2b_ind, tmp2_ind);
+ fast_forward_char_pair_sse2_compare(compiler, compare1_type, reg_type, i, data1_ind, cmp1a_ind, cmp1b_ind, tmp1_ind);
}
-/* PAND xmm1, xmm2/m128 */
-/* instruction[0] = 0x66; */
-/* instruction[1] = 0x0f; */
-instruction[2] = 0xdb;
-instruction[3] = 0xc0 | (data1_ind << 3) | data2_ind;
-sljit_emit_op_custom(compiler, instruction, 4);
-
-/* PMOVMSKB reg, xmm */
-/* instruction[0] = 0x66; */
-/* instruction[1] = 0x0f; */
-instruction[2] = 0xd7;
-instruction[3] = 0xc0 | (tmp1_reg_ind << 3) | 0;
-sljit_emit_op_custom(compiler, instruction, 4);
+sljit_emit_simd_op2(compiler, SLJIT_SIMD_OP2_AND | reg_type, SLJIT_FR0, SLJIT_FR0, SLJIT_FR1);
+sljit_emit_simd_sign(compiler, SLJIT_SIMD_STORE | reg_type | SLJIT_SIMD_ELEM_8, SLJIT_FR0, TMP1, 0);
/* Ignore matches before the first STR_PTR. */
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
@@ -698,36 +682,28 @@ OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
/* Main loop. */
start = LABEL();
-OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, 16);
+value = (reg_type == SLJIT_SIMD_REG_256) ? 32 : 16;
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, value);
add_jump(compiler, &common->failed_match, CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0));
-load_from_mem_sse2(compiler, data1_ind, str_ptr_reg_ind, 0);
-load_from_mem_sse2(compiler, data2_ind, str_ptr_reg_ind, -(sljit_s8)diff);
+value = (reg_type == SLJIT_SIMD_REG_256) ? SLJIT_SIMD_MEM_ALIGNED_256 : SLJIT_SIMD_MEM_ALIGNED_128;
+sljit_emit_simd_mov(compiler, reg_type | value, SLJIT_FR0, SLJIT_MEM1(STR_PTR), 0);
+sljit_emit_simd_mov(compiler, reg_type, SLJIT_FR1, SLJIT_MEM1(STR_PTR), -(sljit_sw)diff);
for (i = 0; i < 4; i++)
{
- fast_forward_char_pair_sse2_compare(compiler, compare1_type, i, data1_ind, cmp1a_ind, cmp1b_ind, tmp2_ind);
- fast_forward_char_pair_sse2_compare(compiler, compare2_type, i, data2_ind, cmp2a_ind, cmp2b_ind, tmp1_ind);
+ fast_forward_char_pair_sse2_compare(compiler, compare1_type, reg_type, i, data1_ind, cmp1a_ind, cmp1b_ind, tmp2_ind);
+ fast_forward_char_pair_sse2_compare(compiler, compare2_type, reg_type, i, data2_ind, cmp2a_ind, cmp2b_ind, tmp1_ind);
}
-/* PAND xmm1, xmm2/m128 */
-/* instruction[0] = 0x66; */
-/* instruction[1] = 0x0f; */
-instruction[2] = 0xdb;
-instruction[3] = 0xc0 | (data1_ind << 3) | data2_ind;
-sljit_emit_op_custom(compiler, instruction, 4);
-
-/* PMOVMSKB reg, xmm */
-/* instruction[0] = 0x66; */
-/* instruction[1] = 0x0f; */
-instruction[2] = 0xd7;
-instruction[3] = 0xc0 | (tmp1_reg_ind << 3) | 0;
-sljit_emit_op_custom(compiler, instruction, 4);
+sljit_emit_simd_op2(compiler, SLJIT_SIMD_OP2_AND | reg_type, SLJIT_FR0, SLJIT_FR0, SLJIT_FR1);
+sljit_emit_simd_sign(compiler, SLJIT_SIMD_STORE | reg_type | SLJIT_SIMD_ELEM_8, SLJIT_FR0, TMP1, 0);
CMPTO(SLJIT_ZERO, TMP1, 0, SLJIT_IMM, 0, start);
JUMPHERE(jump[0]);
+SLJIT_ASSERT(tmp1_reg_ind < 8);
/* BSF r32, r/m32 */
instruction[0] = 0x0f;
instruction[1] = 0xbc;
@@ -762,7 +738,7 @@ if (common->match_end_ptr != 0)
#endif /* !_WIN64 */
-#undef SSE2_COMPARE_TYPE_INDEX
+#undef SIMD_COMPARE_TYPE_INDEX
#endif /* SLJIT_CONFIG_X86 */
@@ -865,14 +841,14 @@ static void fast_forward_char_simd(compiler_common *common, PCRE2_UCHAR char1, P
{
DEFINE_COMPILER;
int_char ic;
-struct sljit_jump *partial_quit;
+struct sljit_jump *partial_quit, *quit;
/* Save temporary registers. */
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), LOCALS0, STR_PTR, 0);
OP1(SLJIT_MOV, SLJIT_MEM1(SLJIT_SP), LOCALS1, TMP3, 0);
/* Prepare function arguments */
OP1(SLJIT_MOV, SLJIT_R0, 0, STR_END, 0);
-OP1(SLJIT_MOV, SLJIT_R1, 0, STR_PTR, 0);
+GET_LOCAL_BASE(SLJIT_R1, 0, LOCALS0);
OP1(SLJIT_MOV, SLJIT_R2, 0, SLJIT_IMM, offset);
if (char1 == char2)
@@ -944,9 +920,14 @@ if (common->mode == PCRE2_JIT_COMPLETE)
/* Fast forward STR_PTR to the result of memchr. */
OP1(SLJIT_MOV, STR_PTR, 0, SLJIT_RETURN_REG, 0);
-
if (common->mode != PCRE2_JIT_COMPLETE)
+ {
+ quit = CMP(SLJIT_NOT_ZERO, SLJIT_RETURN_REG, 0, SLJIT_IMM, 0);
JUMPHERE(partial_quit);
+ OP2U(SLJIT_SUB | SLJIT_SET_GREATER, STR_PTR, 0, STR_END, 0);
+ SELECT(SLJIT_GREATER, STR_PTR, STR_END, 0, STR_PTR);
+ JUMPHERE(quit);
+ }
}
typedef enum {
@@ -1068,10 +1049,10 @@ else
OP2(SLJIT_ADD, SLJIT_R0, 0, SLJIT_R0, 0, SLJIT_IMM, IN_UCHARS(offs1 + 1));
OP2U(SLJIT_SUB | SLJIT_SET_LESS, STR_END, 0, SLJIT_R0, 0);
- CMOV(SLJIT_LESS, SLJIT_R0, STR_END, 0);
+ SELECT(SLJIT_LESS, SLJIT_R0, STR_END, 0, SLJIT_R0);
}
-OP1(SLJIT_MOV, SLJIT_R1, 0, STR_PTR, 0);
+GET_LOCAL_BASE(SLJIT_R1, 0, LOCALS0);
OP1(SLJIT_MOV_S32, SLJIT_R2, 0, SLJIT_IMM, offs1);
OP1(SLJIT_MOV_S32, SLJIT_R3, 0, SLJIT_IMM, offs2);
ic.c.c1 = char1a;
@@ -1177,7 +1158,7 @@ if (step == 0)
OP1(SLJIT_MOV, tmp_general_reg, 0, SLJIT_IMM, chr);
/* VLVG */
- instruction[0] = (sljit_u16)(0xe700 | (dst_vreg << 4) | sljit_get_register_index(tmp_general_reg));
+ instruction[0] = (sljit_u16)(0xe700 | (dst_vreg << 4) | sljit_get_register_index(SLJIT_GP_REGISTER, tmp_general_reg));
instruction[1] = 0;
instruction[2] = (sljit_u16)((VECTOR_ELEMENT_SIZE << 12) | (0x8 << 8) | 0x22);
sljit_emit_op_custom(compiler, instruction, 6);
@@ -1256,8 +1237,8 @@ struct sljit_label *restart;
struct sljit_jump *quit;
struct sljit_jump *partial_quit[2];
vector_compare_type compare_type = vector_compare_match1;
-sljit_s32 tmp1_reg_ind = sljit_get_register_index(TMP1);
-sljit_s32 str_ptr_reg_ind = sljit_get_register_index(STR_PTR);
+sljit_s32 tmp1_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, TMP1);
+sljit_s32 str_ptr_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, STR_PTR);
sljit_s32 data_ind = 0;
sljit_s32 tmp_ind = 1;
sljit_s32 cmp1_ind = 2;
@@ -1419,7 +1400,7 @@ if (common->mode != PCRE2_JIT_COMPLETE)
JUMPHERE(partial_quit[0]);
JUMPHERE(partial_quit[1]);
OP2U(SLJIT_SUB | SLJIT_SET_GREATER, STR_PTR, 0, STR_END, 0);
- CMOV(SLJIT_GREATER, STR_PTR, STR_END, 0);
+ SELECT(SLJIT_GREATER, STR_PTR, STR_END, 0, STR_PTR);
}
else
add_jump(compiler, &common->failed_match, CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0));
@@ -1454,8 +1435,8 @@ struct sljit_label *start;
struct sljit_jump *quit;
jump_list *not_found = NULL;
vector_compare_type compare_type = vector_compare_match1;
-sljit_s32 tmp1_reg_ind = sljit_get_register_index(TMP1);
-sljit_s32 tmp3_reg_ind = sljit_get_register_index(TMP3);
+sljit_s32 tmp1_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, TMP1);
+sljit_s32 tmp3_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, TMP3);
sljit_s32 data_ind = 0;
sljit_s32 tmp_ind = 1;
sljit_s32 cmp1_ind = 2;
@@ -1624,9 +1605,9 @@ vector_compare_type compare2_type = vector_compare_match1;
sljit_u32 bit1 = 0;
sljit_u32 bit2 = 0;
sljit_s32 diff = IN_UCHARS(offs2 - offs1);
-sljit_s32 tmp1_reg_ind = sljit_get_register_index(TMP1);
-sljit_s32 tmp2_reg_ind = sljit_get_register_index(TMP2);
-sljit_s32 str_ptr_reg_ind = sljit_get_register_index(STR_PTR);
+sljit_s32 tmp1_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, TMP1);
+sljit_s32 tmp2_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, TMP2);
+sljit_s32 str_ptr_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, STR_PTR);
sljit_s32 data1_ind = 0;
sljit_s32 data2_ind = 1;
sljit_s32 tmp1_ind = 2;
@@ -1674,7 +1655,7 @@ if (common->match_end_ptr != 0)
OP2(SLJIT_ADD, TMP1, 0, TMP1, 0, SLJIT_IMM, IN_UCHARS(offs1 + 1));
OP2U(SLJIT_SUB | SLJIT_SET_LESS, TMP1, 0, STR_END, 0);
- CMOV(SLJIT_LESS, STR_END, TMP1, 0);
+ SELECT(SLJIT_LESS, STR_END, TMP1, 0, STR_END);
}
OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(offs1));
@@ -1855,4 +1836,520 @@ if (common->match_end_ptr != 0)
#endif /* SLJIT_CONFIG_S390X */
+#if (defined SLJIT_CONFIG_LOONGARCH_64 && SLJIT_CONFIG_LOONGARCH_64)
+
+#ifdef __linux__
+/* Using getauxval(AT_HWCAP) under Linux for detecting whether LSX is available */
+#include <sys/auxv.h>
+#define LOONGARCH_HWCAP_LSX (1 << 4)
+#define HAS_LSX_SUPPORT ((getauxval(AT_HWCAP) & LOONGARCH_HWCAP_LSX) != 0)
+#else
+#define HAS_LSX_SUPPORT 0
+#endif
+
+typedef sljit_ins sljit_u32;
+
+#define SI12_IMM_MASK 0x003ffc00
+#define UI5_IMM_MASK 0x00007c00
+#define UI2_IMM_MASK 0x00000c00
+
+#define VD(vd) ((sljit_ins)vd << 0)
+#define VJ(vj) ((sljit_ins)vj << 5)
+#define VK(vk) ((sljit_ins)vk << 10)
+#define RD_V(rd) ((sljit_ins)rd << 0)
+#define RJ_V(rj) ((sljit_ins)rj << 5)
+
+#define IMM_SI12(imm) (((sljit_ins)(imm) << 10) & SI12_IMM_MASK)
+#define IMM_UI5(imm) (((sljit_ins)(imm) << 10) & UI5_IMM_MASK)
+#define IMM_UI2(imm) (((sljit_ins)(imm) << 10) & UI2_IMM_MASK)
+
+// LSX OPCODES:
+#define VLD 0x2c000000
+#define VOR_V 0x71268000
+#define VAND_V 0x71260000
+#define VBSLL_V 0x728e0000
+#define VMSKLTZ_B 0x729c4000
+#define VPICKVE2GR_WU 0x72f3e000
+
+#if PCRE2_CODE_UNIT_WIDTH == 8
+#define VREPLGR2VR 0x729f0000
+#define VSEQ 0x70000000
+#elif PCRE2_CODE_UNIT_WIDTH == 16
+#define VREPLGR2VR 0x729f0400
+#define VSEQ 0x70008000
+#else
+#define VREPLGR2VR 0x729f0800
+#define VSEQ 0x70010000
+#endif
+
+static void fast_forward_char_pair_lsx_compare(struct sljit_compiler *compiler, vector_compare_type compare_type,
+ sljit_s32 dst_ind, sljit_s32 cmp1_ind, sljit_s32 cmp2_ind, sljit_s32 tmp_ind)
+{
+if (compare_type != vector_compare_match2)
+ {
+ if (compare_type == vector_compare_match1i)
+ {
+ /* VOR.V vd, vj, vk */
+ push_inst(compiler, VOR_V | VD(dst_ind) | VJ(cmp2_ind) | VK(dst_ind));
+ }
+
+ /* VSEQ.B/H/W vd, vj, vk */
+ push_inst(compiler, VSEQ | VD(dst_ind) | VJ(dst_ind) | VK(cmp1_ind));
+ return;
+ }
+
+/* VBSLL.V vd, vj, ui5 */
+push_inst(compiler, VBSLL_V | VD(tmp_ind) | VJ(dst_ind) | IMM_UI5(0));
+
+/* VSEQ.B/H/W vd, vj, vk */
+push_inst(compiler, VSEQ | VD(dst_ind) | VJ(dst_ind) | VK(cmp1_ind));
+
+/* VSEQ.B/H/W vd, vj, vk */
+push_inst(compiler, VSEQ | VD(tmp_ind) | VJ(tmp_ind) | VK(cmp2_ind));
+
+/* VOR vd, vj, vk */
+push_inst(compiler, VOR_V | VD(dst_ind) | VJ(tmp_ind) | VK(dst_ind));
+return;
+}
+
+#define JIT_HAS_FAST_FORWARD_CHAR_SIMD HAS_LSX_SUPPORT
+
+static void fast_forward_char_simd(compiler_common *common, PCRE2_UCHAR char1, PCRE2_UCHAR char2, sljit_s32 offset)
+{
+DEFINE_COMPILER;
+struct sljit_label *start;
+#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH != 32
+struct sljit_label *restart;
+#endif
+struct sljit_jump *quit;
+struct sljit_jump *partial_quit[2];
+vector_compare_type compare_type = vector_compare_match1;
+sljit_s32 tmp1_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, TMP1);
+sljit_s32 str_ptr_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, STR_PTR);
+sljit_s32 data_ind = 0;
+sljit_s32 tmp_ind = 1;
+sljit_s32 cmp1_ind = 2;
+sljit_s32 cmp2_ind = 3;
+sljit_u32 bit = 0;
+
+SLJIT_UNUSED_ARG(offset);
+
+if (char1 != char2)
+ {
+ bit = char1 ^ char2;
+ compare_type = vector_compare_match1i;
+
+ if (!is_powerof2(bit))
+ {
+ bit = 0;
+ compare_type = vector_compare_match2;
+ }
+ }
+
+partial_quit[0] = CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0);
+if (common->mode == PCRE2_JIT_COMPLETE)
+ add_jump(compiler, &common->failed_match, partial_quit[0]);
+
+/* First part (unaligned start) */
+
+OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, char1 | bit);
+
+/* VREPLGR2VR.B/H/W vd, rj */
+push_inst(compiler, VREPLGR2VR | VD(cmp1_ind) | RJ_V(tmp1_reg_ind));
+
+if (char1 != char2)
+ {
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, bit != 0 ? bit : char2);
+
+ /* VREPLGR2VR.B/H/W vd, rj */
+ push_inst(compiler, VREPLGR2VR | VD(cmp2_ind) | RJ_V(tmp1_reg_ind));
+ }
+
+OP1(SLJIT_MOV, TMP2, 0, STR_PTR, 0);
+
+#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH != 32
+restart = LABEL();
+#endif
+
+OP2(SLJIT_AND, TMP2, 0, TMP2, 0, SLJIT_IMM, 0xf);
+OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
+
+/* VLD vd, rj, si12 */
+push_inst(compiler, VLD | VD(data_ind) | RJ_V(str_ptr_reg_ind) | IMM_SI12(0));
+fast_forward_char_pair_lsx_compare(compiler, compare_type, data_ind, cmp1_ind, cmp2_ind, tmp_ind);
+
+/* VMSKLTZ.B vd, vj */
+push_inst(compiler, VMSKLTZ_B | VD(tmp_ind) | VJ(data_ind));
+
+/* VPICKVE2GR.WU rd, vj, ui2 */
+push_inst(compiler, VPICKVE2GR_WU | RD_V(tmp1_reg_ind) | VJ(tmp_ind) | IMM_UI2(0));
+
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
+OP2(SLJIT_LSHR, TMP1, 0, TMP1, 0, TMP2, 0);
+
+quit = CMP(SLJIT_NOT_ZERO, TMP1, 0, SLJIT_IMM, 0);
+
+OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
+
+/* Second part (aligned) */
+start = LABEL();
+
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, 16);
+
+partial_quit[1] = CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0);
+if (common->mode == PCRE2_JIT_COMPLETE)
+ add_jump(compiler, &common->failed_match, partial_quit[1]);
+
+/* VLD vd, rj, si12 */
+push_inst(compiler, VLD | VD(data_ind) | RJ_V(str_ptr_reg_ind) | IMM_SI12(0));
+fast_forward_char_pair_lsx_compare(compiler, compare_type, data_ind, cmp1_ind, cmp2_ind, tmp_ind);
+
+/* VMSKLTZ.B vd, vj */
+push_inst(compiler, VMSKLTZ_B | VD(tmp_ind) | VJ(data_ind));
+
+/* VPICKVE2GR.WU rd, vj, ui2 */
+push_inst(compiler, VPICKVE2GR_WU | RD_V(tmp1_reg_ind) | VJ(tmp_ind) | IMM_UI2(0));
+
+CMPTO(SLJIT_ZERO, TMP1, 0, SLJIT_IMM, 0, start);
+
+JUMPHERE(quit);
+
+/* CTZ.W rd, rj */
+push_inst(compiler, CTZ_W | RD_V(tmp1_reg_ind) | RJ_V(tmp1_reg_ind));
+
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP1, 0);
+
+if (common->mode != PCRE2_JIT_COMPLETE)
+ {
+ JUMPHERE(partial_quit[0]);
+ JUMPHERE(partial_quit[1]);
+ OP2U(SLJIT_SUB | SLJIT_SET_GREATER, STR_PTR, 0, STR_END, 0);
+ SELECT(SLJIT_GREATER, STR_PTR, STR_END, 0, STR_PTR);
+ }
+else
+ add_jump(compiler, &common->failed_match, CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0));
+
+#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH != 32
+if (common->utf && offset > 0)
+ {
+ SLJIT_ASSERT(common->mode == PCRE2_JIT_COMPLETE);
+
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(-offset));
+
+ quit = jump_if_utf_char_start(compiler, TMP1);
+
+ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+ add_jump(compiler, &common->failed_match, CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0));
+ OP1(SLJIT_MOV, TMP2, 0, STR_PTR, 0);
+ JUMPTO(SLJIT_JUMP, restart);
+
+ JUMPHERE(quit);
+ }
+#endif
+}
+
+#define JIT_HAS_FAST_REQUESTED_CHAR_SIMD HAS_LSX_SUPPORT
+
+static jump_list *fast_requested_char_simd(compiler_common *common, PCRE2_UCHAR char1, PCRE2_UCHAR char2)
+{
+DEFINE_COMPILER;
+struct sljit_label *start;
+struct sljit_jump *quit;
+jump_list *not_found = NULL;
+vector_compare_type compare_type = vector_compare_match1;
+sljit_s32 tmp1_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, TMP1);
+sljit_s32 str_ptr_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, STR_PTR);
+sljit_s32 data_ind = 0;
+sljit_s32 tmp_ind = 1;
+sljit_s32 cmp1_ind = 2;
+sljit_s32 cmp2_ind = 3;
+sljit_u32 bit = 0;
+
+if (char1 != char2)
+ {
+ bit = char1 ^ char2;
+ compare_type = vector_compare_match1i;
+
+ if (!is_powerof2(bit))
+ {
+ bit = 0;
+ compare_type = vector_compare_match2;
+ }
+ }
+
+add_jump(compiler, &not_found, CMP(SLJIT_GREATER_EQUAL, TMP1, 0, STR_END, 0));
+OP1(SLJIT_MOV, TMP2, 0, TMP1, 0);
+OP1(SLJIT_MOV, TMP3, 0, STR_PTR, 0);
+
+/* First part (unaligned start) */
+
+OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, char1 | bit);
+
+/* VREPLGR2VR vd, rj */
+push_inst(compiler, VREPLGR2VR | VD(cmp1_ind) | RJ_V(tmp1_reg_ind));
+
+if (char1 != char2)
+ {
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, bit != 0 ? bit : char2);
+ /* VREPLGR2VR vd, rj */
+ push_inst(compiler, VREPLGR2VR | VD(cmp2_ind) | RJ_V(tmp1_reg_ind));
+ }
+
+OP1(SLJIT_MOV, STR_PTR, 0, TMP2, 0);
+OP2(SLJIT_AND, TMP2, 0, TMP2, 0, SLJIT_IMM, 0xf);
+OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
+
+/* VLD vd, rj, si12 */
+push_inst(compiler, VLD | VD(data_ind) | RJ_V(str_ptr_reg_ind) | IMM_SI12(0));
+fast_forward_char_pair_lsx_compare(compiler, compare_type, data_ind, cmp1_ind, cmp2_ind, tmp_ind);
+
+/* VMSKLTZ.B vd, vj */
+push_inst(compiler, VMSKLTZ_B | VD(tmp_ind) | VJ(data_ind));
+
+/* VPICKVE2GR.WU rd, vj, ui2 */
+push_inst(compiler, VPICKVE2GR_WU | RD_V(tmp1_reg_ind) | VJ(tmp_ind) | IMM_UI2(0));
+
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
+OP2(SLJIT_LSHR, TMP1, 0, TMP1, 0, TMP2, 0);
+
+quit = CMP(SLJIT_NOT_ZERO, TMP1, 0, SLJIT_IMM, 0);
+
+OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
+
+/* Second part (aligned) */
+start = LABEL();
+
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, 16);
+
+add_jump(compiler, &not_found, CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0));
+
+/* VLD vd, rj, si12 */
+push_inst(compiler, VLD | VD(data_ind) | RJ_V(str_ptr_reg_ind) | IMM_SI12(0));
+fast_forward_char_pair_lsx_compare(compiler, compare_type, data_ind, cmp1_ind, cmp2_ind, tmp_ind);
+
+/* VMSKLTZ.B vd, vj */
+push_inst(compiler, VMSKLTZ_B | VD(tmp_ind) | VJ(data_ind));
+
+/* VPICKVE2GR.WU rd, vj, ui2 */
+push_inst(compiler, VPICKVE2GR_WU | RD_V(tmp1_reg_ind) | VJ(tmp_ind) | IMM_UI2(0));
+
+CMPTO(SLJIT_ZERO, TMP1, 0, SLJIT_IMM, 0, start);
+
+JUMPHERE(quit);
+
+/* CTZ.W rd, rj */
+push_inst(compiler, CTZ_W | RD_V(tmp1_reg_ind) | RJ_V(tmp1_reg_ind));
+
+OP2(SLJIT_ADD, TMP1, 0, TMP1, 0, STR_PTR, 0);
+add_jump(compiler, &not_found, CMP(SLJIT_GREATER_EQUAL, TMP1, 0, STR_END, 0));
+
+OP1(SLJIT_MOV, STR_PTR, 0, TMP3, 0);
+return not_found;
+}
+
+#define JIT_HAS_FAST_FORWARD_CHAR_PAIR_SIMD HAS_LSX_SUPPORT
+
+static void fast_forward_char_pair_simd(compiler_common *common, sljit_s32 offs1,
+ PCRE2_UCHAR char1a, PCRE2_UCHAR char1b, sljit_s32 offs2, PCRE2_UCHAR char2a, PCRE2_UCHAR char2b)
+{
+DEFINE_COMPILER;
+vector_compare_type compare1_type = vector_compare_match1;
+vector_compare_type compare2_type = vector_compare_match1;
+sljit_u32 bit1 = 0;
+sljit_u32 bit2 = 0;
+sljit_u32 diff = IN_UCHARS(offs1 - offs2);
+sljit_s32 tmp1_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, TMP1);
+sljit_s32 tmp2_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, TMP2);
+sljit_s32 str_ptr_reg_ind = sljit_get_register_index(SLJIT_GP_REGISTER, STR_PTR);
+sljit_s32 data1_ind = 0;
+sljit_s32 data2_ind = 1;
+sljit_s32 tmp1_ind = 2;
+sljit_s32 tmp2_ind = 3;
+sljit_s32 cmp1a_ind = 4;
+sljit_s32 cmp1b_ind = 5;
+sljit_s32 cmp2a_ind = 6;
+sljit_s32 cmp2b_ind = 7;
+struct sljit_label *start;
+#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH != 32
+struct sljit_label *restart;
+#endif
+struct sljit_jump *jump[2];
+
+SLJIT_ASSERT(common->mode == PCRE2_JIT_COMPLETE && offs1 > offs2);
+SLJIT_ASSERT(diff <= (unsigned)IN_UCHARS(max_fast_forward_char_pair_offset()));
+
+/* Initialize. */
+if (common->match_end_ptr != 0)
+ {
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_MEM1(SLJIT_SP), common->match_end_ptr);
+ OP2(SLJIT_ADD, TMP1, 0, TMP1, 0, SLJIT_IMM, IN_UCHARS(offs1 + 1));
+ OP1(SLJIT_MOV, TMP3, 0, STR_END, 0);
+
+ OP2U(SLJIT_SUB | SLJIT_SET_LESS, TMP1, 0, STR_END, 0);
+ SELECT(SLJIT_LESS, STR_END, TMP1, 0, STR_END);
+ }
+
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(offs1));
+add_jump(compiler, &common->failed_match, CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0));
+
+if (char1a == char1b)
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, char1a);
+else
+ {
+ bit1 = char1a ^ char1b;
+ if (is_powerof2(bit1))
+ {
+ compare1_type = vector_compare_match1i;
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, char1a | bit1);
+ OP1(SLJIT_MOV, TMP2, 0, SLJIT_IMM, bit1);
+ }
+ else
+ {
+ compare1_type = vector_compare_match2;
+ bit1 = 0;
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, char1a);
+ OP1(SLJIT_MOV, TMP2, 0, SLJIT_IMM, char1b);
+ }
+ }
+
+/* VREPLGR2VR vd, rj */
+push_inst(compiler, VREPLGR2VR | VD(cmp1a_ind) | RJ_V(tmp1_reg_ind));
+
+if (char1a != char1b)
+ {
+ /* VREPLGR2VR vd, rj */
+ push_inst(compiler, VREPLGR2VR | VD(cmp1b_ind) | RJ_V(tmp2_reg_ind));
+ }
+
+if (char2a == char2b)
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, char2a);
+else
+ {
+ bit2 = char2a ^ char2b;
+ if (is_powerof2(bit2))
+ {
+ compare2_type = vector_compare_match1i;
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, char2a | bit2);
+ OP1(SLJIT_MOV, TMP2, 0, SLJIT_IMM, bit2);
+ }
+ else
+ {
+ compare2_type = vector_compare_match2;
+ bit2 = 0;
+ OP1(SLJIT_MOV, TMP1, 0, SLJIT_IMM, char2a);
+ OP1(SLJIT_MOV, TMP2, 0, SLJIT_IMM, char2b);
+ }
+ }
+
+/* VREPLGR2VR vd, rj */
+push_inst(compiler, VREPLGR2VR | VD(cmp2a_ind) | RJ_V(tmp1_reg_ind));
+
+if (char2a != char2b)
+ {
+ /* VREPLGR2VR vd, rj */
+ push_inst(compiler, VREPLGR2VR | VD(cmp2b_ind) | RJ_V(tmp2_reg_ind));
+ }
+
+#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH != 32
+restart = LABEL();
+#endif
+
+OP2(SLJIT_SUB, TMP1, 0, STR_PTR, 0, SLJIT_IMM, diff);
+OP1(SLJIT_MOV, TMP2, 0, STR_PTR, 0);
+OP2(SLJIT_AND, TMP2, 0, TMP2, 0, SLJIT_IMM, 0xf);
+OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
+
+/* VLD vd, rj, si12 */
+push_inst(compiler, VLD | VD(data1_ind) | RJ_V(str_ptr_reg_ind) | IMM_SI12(0));
+
+jump[0] = CMP(SLJIT_GREATER_EQUAL, TMP1, 0, STR_PTR, 0);
+
+/* VLD vd, rj, si12 */
+push_inst(compiler, VLD | VD(data2_ind) | RJ_V(str_ptr_reg_ind) | IMM_SI12(-(sljit_s8)diff));
+jump[1] = JUMP(SLJIT_JUMP);
+
+JUMPHERE(jump[0]);
+
+/* VBSLL.V vd, vj, ui5 */
+push_inst(compiler, VBSLL_V | VD(data2_ind) | VJ(data1_ind) | IMM_UI5(diff));
+
+JUMPHERE(jump[1]);
+
+fast_forward_char_pair_lsx_compare(compiler, compare2_type, data2_ind, cmp2a_ind, cmp2b_ind, tmp2_ind);
+fast_forward_char_pair_lsx_compare(compiler, compare1_type, data1_ind, cmp1a_ind, cmp1b_ind, tmp1_ind);
+
+/* VAND vd, vj, vk */
+push_inst(compiler, VOR_V | VD(data1_ind) | VJ(data1_ind) | VK(data2_ind));
+
+/* VMSKLTZ.B vd, vj */
+push_inst(compiler, VMSKLTZ_B | VD(tmp1_ind) | VJ(data1_ind));
+
+/* VPICKVE2GR.WU rd, vj, ui2 */
+push_inst(compiler, VPICKVE2GR_WU | RD_V(tmp1_reg_ind) | VJ(tmp1_ind) | IMM_UI2(0));
+
+/* Ignore matches before the first STR_PTR. */
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
+OP2(SLJIT_LSHR, TMP1, 0, TMP1, 0, TMP2, 0);
+
+jump[0] = CMP(SLJIT_NOT_ZERO, TMP1, 0, SLJIT_IMM, 0);
+
+OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, TMP2, 0);
+
+/* Main loop. */
+start = LABEL();
+
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, 16);
+add_jump(compiler, &common->failed_match, CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0));
+
+/* VLD vd, rj, si12 */
+push_inst(compiler, VLD | VD(data1_ind) | RJ_V(str_ptr_reg_ind) | IMM_SI12(0));
+push_inst(compiler, VLD | VD(data2_ind) | RJ_V(str_ptr_reg_ind) | IMM_SI12(-(sljit_s8)diff));
+
+fast_forward_char_pair_lsx_compare(compiler, compare1_type, data1_ind, cmp1a_ind, cmp1b_ind, tmp2_ind);
+fast_forward_char_pair_lsx_compare(compiler, compare2_type, data2_ind, cmp2a_ind, cmp2b_ind, tmp1_ind);
+
+/* VAND.V vd, vj, vk */
+push_inst(compiler, VAND_V | VD(data1_ind) | VJ(data1_ind) | VK(data2_ind));
+
+/* VMSKLTZ.B vd, vj */
+push_inst(compiler, VMSKLTZ_B | VD(tmp1_ind) | VJ(data1_ind));
+
+/* VPICKVE2GR.WU rd, vj, ui2 */
+push_inst(compiler, VPICKVE2GR_WU | RD_V(tmp1_reg_ind) | VJ(tmp1_ind) | IMM_UI2(0));
+
+CMPTO(SLJIT_ZERO, TMP1, 0, SLJIT_IMM, 0, start);
+
+JUMPHERE(jump[0]);
+
+/* CTZ.W rd, rj */
+push_inst(compiler, CTZ_W | RD_V(tmp1_reg_ind) | RJ_V(tmp1_reg_ind));
+
+OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, TMP1, 0);
+
+add_jump(compiler, &common->failed_match, CMP(SLJIT_GREATER_EQUAL, STR_PTR, 0, STR_END, 0));
+
+#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH != 32
+if (common->utf)
+ {
+ OP1(MOV_UCHAR, TMP1, 0, SLJIT_MEM1(STR_PTR), IN_UCHARS(-offs1));
+
+ jump[0] = jump_if_utf_char_start(compiler, TMP1);
+
+ OP2(SLJIT_ADD, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(1));
+ CMPTO(SLJIT_LESS, STR_PTR, 0, STR_END, 0, restart);
+
+ add_jump(compiler, &common->failed_match, JUMP(SLJIT_JUMP));
+
+ JUMPHERE(jump[0]);
+ }
+#endif
+
+OP2(SLJIT_SUB, STR_PTR, 0, STR_PTR, 0, SLJIT_IMM, IN_UCHARS(offs1));
+
+if (common->match_end_ptr != 0)
+ OP1(SLJIT_MOV, STR_END, 0, TMP3, 0);
+}
+
+#endif /* SLJIT_CONFIG_LOONGARCH_64 */
+
#endif /* !SUPPORT_VALGRIND */
diff --git a/src/3rdparty/pcre2/src/pcre2_maketables.c b/src/3rdparty/pcre2/src/pcre2_maketables.c
index 56d2494023..ac8b63b809 100644
--- a/src/3rdparty/pcre2/src/pcre2_maketables.c
+++ b/src/3rdparty/pcre2/src/pcre2_maketables.c
@@ -52,8 +52,6 @@ PCRE2_DFTABLES is defined. */
# include "pcre2_internal.h"
#endif
-
-
/*************************************************
* Create PCRE2 character tables *
*************************************************/
@@ -98,7 +96,11 @@ for (i = 0; i < 256; i++) *p++ = tolower(i);
/* Next the case-flipping table */
-for (i = 0; i < 256; i++) *p++ = islower(i)? toupper(i) : tolower(i);
+for (i = 0; i < 256; i++)
+ {
+ int c = islower(i)? toupper(i) : tolower(i);
+ *p++ = (c < 256)? c : i;
+ }
/* Then the character class tables. Don't try to be clever and save effort on
exclusive ones - in some locales things may be different.
diff --git a/src/3rdparty/pcre2/src/pcre2_match.c b/src/3rdparty/pcre2/src/pcre2_match.c
index 168b9fad01..6c422c2e5e 100644
--- a/src/3rdparty/pcre2/src/pcre2_match.c
+++ b/src/3rdparty/pcre2/src/pcre2_match.c
@@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
- New API code Copyright (c) 2015-2022 University of Cambridge
+ New API code Copyright (c) 2015-2024 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -43,6 +43,8 @@ POSSIBILITY OF SUCH DAMAGE.
#include "config.h"
#endif
+#include "pcre2_internal.h"
+
/* These defines enable debugging code */
/* #define DEBUG_FRAMES_DISPLAY */
@@ -53,6 +55,10 @@ POSSIBILITY OF SUCH DAMAGE.
#include <stdarg.h>
#endif
+#ifdef DEBUG_SHOW_OPS
+static const char *OP_names[] = { OP_NAME_LIST };
+#endif
+
/* These defines identify the name of the block containing "static"
information, and fields within it. */
@@ -60,8 +66,6 @@ information, and fields within it. */
#define PSSTART start_subject /* Field containing processed string start */
#define PSEND end_subject /* Field containing processed string end */
-#include "pcre2_internal.h"
-
#define RECURSE_UNSET 0xffffffffu /* Bigger than max group number */
/* Masks for identifying the public options that are permitted at match time. */
@@ -69,7 +73,8 @@ information, and fields within it. */
#define PUBLIC_MATCH_OPTIONS \
(PCRE2_ANCHORED|PCRE2_ENDANCHORED|PCRE2_NOTBOL|PCRE2_NOTEOL|PCRE2_NOTEMPTY| \
PCRE2_NOTEMPTY_ATSTART|PCRE2_NO_UTF_CHECK|PCRE2_PARTIAL_HARD| \
- PCRE2_PARTIAL_SOFT|PCRE2_NO_JIT|PCRE2_COPY_MATCHED_SUBJECT)
+ PCRE2_PARTIAL_SOFT|PCRE2_NO_JIT|PCRE2_COPY_MATCHED_SUBJECT| \
+ PCRE2_DISABLE_RECURSELOOP_CHECK)
#define PUBLIC_JIT_MATCH_OPTIONS \
(PCRE2_NO_UTF_CHECK|PCRE2_NOTBOL|PCRE2_NOTEOL|PCRE2_NOTEMPTY|\
@@ -150,7 +155,7 @@ changed, the code at RETURN_SWITCH below must be updated in sync. */
enum { RM1=1, RM2, RM3, RM4, RM5, RM6, RM7, RM8, RM9, RM10,
RM11, RM12, RM13, RM14, RM15, RM16, RM17, RM18, RM19, RM20,
RM21, RM22, RM23, RM24, RM25, RM26, RM27, RM28, RM29, RM30,
- RM31, RM32, RM33, RM34, RM35, RM36 };
+ RM31, RM32, RM33, RM34, RM35, RM36, RM37 };
#ifdef SUPPORT_WIDE_CHARS
enum { RM100=100, RM101 };
@@ -597,11 +602,12 @@ heapframe *P = NULL;
heapframe *frames_top; /* End of frames vector */
heapframe *assert_accept_frame = NULL; /* For passing back a frame with captures */
-PCRE2_SIZE heapframes_size; /* Usable size of frames vector */
PCRE2_SIZE frame_copy_size; /* Amount to copy when creating a new frame */
/* Local variables that do not need to be preserved over calls to RRMATCH(). */
+PCRE2_SPTR branch_end = NULL;
+PCRE2_SPTR branch_start;
PCRE2_SPTR bracode; /* Temp pointer to start of group */
PCRE2_SIZE offset; /* Used for group offsets */
PCRE2_SIZE length; /* Used for various length calculations */
@@ -635,13 +641,10 @@ copied when a new frame is created. */
frame_copy_size = frame_size - offsetof(heapframe, eptr);
-/* Set up the first frame and the end of the frames vector. We set the local
-heapframes_size to the usuable amount of the vector, that is, a whole number of
-frames. */
+/* Set up the first frame and the end of the frames vector. */
F = match_data->heapframes;
-heapframes_size = (match_data->heapframes_size / frame_size) * frame_size;
-frames_top = (heapframe *)((char *)F + heapframes_size);
+frames_top = (heapframe *)((char *)F + match_data->heapframes_size);
Frdepth = 0; /* "Recursion" depth */
Fcapture_last = 0; /* Number of most recent capture */
@@ -662,35 +665,54 @@ MATCH_RECURSE:
doubling the size, but constrained by the heap limit (which is in KiB). */
N = (heapframe *)((char *)F + frame_size);
-if (N >= frames_top)
+if ((heapframe *)((char *)N + frame_size) >= frames_top)
{
heapframe *new;
- PCRE2_SIZE newsize = match_data->heapframes_size * 2;
+ PCRE2_SIZE newsize;
+ PCRE2_SIZE usedsize = (char *)N - (char *)(match_data->heapframes);
- if (newsize > mb->heap_limit)
+ if (match_data->heapframes_size >= PCRE2_SIZE_MAX / 2)
{
- PCRE2_SIZE maxsize = (mb->heap_limit/frame_size) * frame_size;
- if (match_data->heapframes_size >= maxsize) return PCRE2_ERROR_HEAPLIMIT;
- newsize = maxsize;
+ if (match_data->heapframes_size == PCRE2_SIZE_MAX - 1)
+ return PCRE2_ERROR_NOMEMORY;
+ newsize = PCRE2_SIZE_MAX - 1;
}
+ else
+ newsize = match_data->heapframes_size * 2;
+ if (newsize / 1024 >= mb->heap_limit)
+ {
+ PCRE2_SIZE old_size = match_data->heapframes_size / 1024;
+ if (mb->heap_limit <= old_size)
+ return PCRE2_ERROR_HEAPLIMIT;
+ else
+ {
+ PCRE2_SIZE max_delta = 1024 * (mb->heap_limit - old_size);
+ int over_bytes = match_data->heapframes_size % 1024;
+ if (over_bytes) max_delta -= (1024 - over_bytes);
+ newsize = match_data->heapframes_size + max_delta;
+ }
+ }
+
+ /* With a heap limit set, the permitted additional size may not be enough for
+ another frame, so do a final check. */
+
+ if (newsize - usedsize < frame_size) return PCRE2_ERROR_HEAPLIMIT;
new = match_data->memctl.malloc(newsize, match_data->memctl.memory_data);
if (new == NULL) return PCRE2_ERROR_NOMEMORY;
- memcpy(new, match_data->heapframes, heapframes_size);
+ memcpy(new, match_data->heapframes, usedsize);
- F = (heapframe *)((char *)new + ((char *)F - (char *)match_data->heapframes));
- N = (heapframe *)((char *)F + frame_size);
+ N = (heapframe *)((char *)new + usedsize);
+ F = (heapframe *)((char *)N - frame_size);
match_data->memctl.free(match_data->heapframes, match_data->memctl.memory_data);
match_data->heapframes = new;
match_data->heapframes_size = newsize;
-
- heapframes_size = (newsize / frame_size) * frame_size;
- frames_top = (heapframe *)((char *)new + heapframes_size);
+ frames_top = (heapframe *)((char *)new + newsize);
}
#ifdef DEBUG_SHOW_RMATCH
-fprintf(stderr, "++ RMATCH %2d frame=%d", Freturn_id, Frdepth + 1);
+fprintf(stderr, "++ RMATCH %d frame=%d", Freturn_id, Frdepth + 1);
if (group_frame_type != 0)
{
fprintf(stderr, " type=%x ", group_frame_type);
@@ -760,10 +782,16 @@ opcodes. */
if (mb->match_call_count++ >= mb->match_limit) return PCRE2_ERROR_MATCHLIMIT;
if (Frdepth >= mb->match_limit_depth) return PCRE2_ERROR_DEPTHLIMIT;
+#ifdef DEBUG_SHOW_OPS
+fprintf(stderr, "\n++ New frame: type=0x%x subject offset %ld\n",
+ GF_IDMASK(Fgroup_frame_type), Feptr - mb->start_subject);
+#endif
+
for (;;)
{
#ifdef DEBUG_SHOW_OPS
-fprintf(stderr, "++ op=%d\n", *Fecode);
+fprintf(stderr, "++ %2ld op=%3d %s\n", Fecode - mb->start_code, *Fecode,
+ OP_names[*Fecode]);
#endif
Fop = (uint8_t)(*Fecode); /* Cast needed for 16-bit and 32-bit modes */
@@ -811,15 +839,16 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
assert_accept_frame = F;
RRETURN(MATCH_ACCEPT);
- /* If recursing, we have to find the most recent recursion. */
+ /* For ACCEPT within a recursion, we have to find the most recent
+ recursion. If not in a recursion, fall through to code that is common with
+ OP_END. */
case OP_ACCEPT:
- case OP_END:
-
- /* Handle end of a recursion. */
-
if (Fcurrent_recurse != RECURSE_UNSET)
{
+#ifdef DEBUG_SHOW_OPS
+ fprintf(stderr, "++ Accept within recursion\n");
+#endif
offset = Flast_group_offset;
for(;;)
{
@@ -842,27 +871,49 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
Fecode += 1 + LINK_SIZE;
continue;
}
+ /* Fall through */
+
+ /* OP_END itself can never be reached within a recursion because that is
+ picked up when the OP_KET that always precedes OP_END is reached. */
+
+ case OP_END:
- /* Not a recursion. Fail for an empty string match if either PCRE2_NOTEMPTY
- is set, or if PCRE2_NOTEMPTY_ATSTART is set and we have matched at the
- start of the subject. In both cases, backtracking will then try other
- alternatives, if any. */
+ /* Fail for an empty string match if either PCRE2_NOTEMPTY is set, or if
+ PCRE2_NOTEMPTY_ATSTART is set and we have matched at the start of the
+ subject. In both cases, backtracking will then try other alternatives, if
+ any. */
if (Feptr == Fstart_match &&
((mb->moptions & PCRE2_NOTEMPTY) != 0 ||
((mb->moptions & PCRE2_NOTEMPTY_ATSTART) != 0 &&
Fstart_match == mb->start_subject + mb->start_offset)))
+ {
+#ifdef DEBUG_SHOW_OPS
+ fprintf(stderr, "++ Backtrack because empty string\n");
+#endif
RRETURN(MATCH_NOMATCH);
+ }
- /* Also fail if PCRE2_ENDANCHORED is set and the end of the match is not
+ /* Fail if PCRE2_ENDANCHORED is set and the end of the match is not
the end of the subject. After (*ACCEPT) we fail the entire match (at this
- position) but backtrack on reaching the end of the pattern. */
+ position) but backtrack if we've reached the end of the pattern. This
+ applies whether or not we are in a recursion. */
if (Feptr < mb->end_subject &&
((mb->moptions | mb->poptions) & PCRE2_ENDANCHORED) != 0)
{
- if (Fop == OP_END) RRETURN(MATCH_NOMATCH);
- return MATCH_NOMATCH;
+ if (Fop == OP_END)
+ {
+#ifdef DEBUG_SHOW_OPS
+ fprintf(stderr, "++ Backtrack because not at end (endanchored set)\n");
+#endif
+ RRETURN(MATCH_NOMATCH);
+ }
+
+#ifdef DEBUG_SHOW_OPS
+ fprintf(stderr, "++ Failed ACCEPT not at end (endanchnored set)\n");
+#endif
+ return MATCH_NOMATCH; /* (*ACCEPT) */
}
/* We have a successful match of the whole pattern. Record the result and
@@ -2435,6 +2486,7 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
GETCHARINCTEST(fc, Feptr);
{
const uint32_t *cp;
+ uint32_t chartype;
const ucd_record *prop = GET_UCD(fc);
BOOL notmatch = Fop == OP_NOTPROP;
@@ -2445,9 +2497,10 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
break;
case PT_LAMP:
- if ((prop->chartype == ucp_Lu ||
- prop->chartype == ucp_Ll ||
- prop->chartype == ucp_Lt) == notmatch)
+ chartype = prop->chartype;
+ if ((chartype == ucp_Lu ||
+ chartype == ucp_Ll ||
+ chartype == ucp_Lt) == notmatch)
RRETURN(MATCH_NOMATCH);
break;
@@ -2477,8 +2530,9 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
/* These are specials */
case PT_ALNUM:
- if ((PRIV(ucp_gentype)[prop->chartype] == ucp_L ||
- PRIV(ucp_gentype)[prop->chartype] == ucp_N) == notmatch)
+ chartype = prop->chartype;
+ if ((PRIV(ucp_gentype)[chartype] == ucp_L ||
+ PRIV(ucp_gentype)[chartype] == ucp_N) == notmatch)
RRETURN(MATCH_NOMATCH);
break;
@@ -2503,13 +2557,22 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
break;
case PT_WORD:
- if ((PRIV(ucp_gentype)[prop->chartype] == ucp_L ||
- PRIV(ucp_gentype)[prop->chartype] == ucp_N ||
- fc == CHAR_UNDERSCORE) == notmatch)
+ chartype = prop->chartype;
+ if ((PRIV(ucp_gentype)[chartype] == ucp_L ||
+ PRIV(ucp_gentype)[chartype] == ucp_N ||
+ chartype == ucp_Mn ||
+ chartype == ucp_Pc) == notmatch)
RRETURN(MATCH_NOMATCH);
break;
case PT_CLIST:
+#if PCRE2_CODE_UNIT_WIDTH == 32
+ if (fc > MAX_UTF_CODE_POINT)
+ {
+ if (notmatch) break;;
+ RRETURN(MATCH_NOMATCH);
+ }
+#endif
cp = PRIV(ucd_caseless_sets) + Fecode[2];
for (;;)
{
@@ -2805,16 +2868,17 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
case PT_WORD:
for (i = 1; i <= Lmin; i++)
{
- int category;
+ int chartype, category;
if (Feptr >= mb->end_subject)
{
SCHECK_PARTIAL();
RRETURN(MATCH_NOMATCH);
}
GETCHARINCTEST(fc, Feptr);
- category = UCD_CATEGORY(fc);
+ chartype = UCD_CHARTYPE(fc);
+ category = PRIV(ucp_gentype)[chartype];
if ((category == ucp_L || category == ucp_N ||
- fc == CHAR_UNDERSCORE) == notmatch)
+ chartype == ucp_Mn || chartype == ucp_Pc) == notmatch)
RRETURN(MATCH_NOMATCH);
}
break;
@@ -2829,6 +2893,13 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
RRETURN(MATCH_NOMATCH);
}
GETCHARINCTEST(fc, Feptr);
+#if PCRE2_CODE_UNIT_WIDTH == 32
+ if (fc > MAX_UTF_CODE_POINT)
+ {
+ if (notmatch) continue;
+ RRETURN(MATCH_NOMATCH);
+ }
+#endif
cp = PRIV(ucd_caseless_sets) + Lpropvalue;
for (;;)
{
@@ -3609,7 +3680,7 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
case PT_WORD:
for (;;)
{
- int category;
+ int chartype, category;
RMATCH(Fecode, RM215);
if (rrc != MATCH_NOMATCH) RRETURN(rrc);
if (Lmin++ >= Lmax) RRETURN(MATCH_NOMATCH);
@@ -3619,10 +3690,12 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
RRETURN(MATCH_NOMATCH);
}
GETCHARINCTEST(fc, Feptr);
- category = UCD_CATEGORY(fc);
+ chartype = UCD_CHARTYPE(fc);
+ category = PRIV(ucp_gentype)[chartype];
if ((category == ucp_L ||
category == ucp_N ||
- fc == CHAR_UNDERSCORE) == (Lctype == OP_NOTPROP))
+ chartype == ucp_Mn ||
+ chartype == ucp_Pc) == (Lctype == OP_NOTPROP))
RRETURN(MATCH_NOMATCH);
}
/* Control never gets here */
@@ -3640,6 +3713,13 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
RRETURN(MATCH_NOMATCH);
}
GETCHARINCTEST(fc, Feptr);
+#if PCRE2_CODE_UNIT_WIDTH == 32
+ if (fc > MAX_UTF_CODE_POINT)
+ {
+ if (Lctype == OP_NOTPROP) continue;
+ RRETURN(MATCH_NOMATCH);
+ }
+#endif
cp = PRIV(ucd_caseless_sets) + Lpropvalue;
for (;;)
{
@@ -4190,7 +4270,7 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
case PT_WORD:
for (i = Lmin; i < Lmax; i++)
{
- int category;
+ int chartype, category;
int len = 1;
if (Feptr >= mb->end_subject)
{
@@ -4198,9 +4278,12 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
break;
}
GETCHARLENTEST(fc, Feptr, len);
- category = UCD_CATEGORY(fc);
- if ((category == ucp_L || category == ucp_N ||
- fc == CHAR_UNDERSCORE) == notmatch)
+ chartype = UCD_CHARTYPE(fc);
+ category = PRIV(ucp_gentype)[chartype];
+ if ((category == ucp_L ||
+ category == ucp_N ||
+ chartype == ucp_Mn ||
+ chartype == ucp_Pc) == notmatch)
break;
Feptr+= len;
}
@@ -4217,14 +4300,24 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
break;
}
GETCHARLENTEST(fc, Feptr, len);
- cp = PRIV(ucd_caseless_sets) + Lpropvalue;
- for (;;)
+#if PCRE2_CODE_UNIT_WIDTH == 32
+ if (fc > MAX_UTF_CODE_POINT)
{
- if (fc < *cp)
- { if (notmatch) break; else goto GOT_MAX; }
- if (fc == *cp++)
- { if (notmatch) goto GOT_MAX; else break; }
+ if (!notmatch) goto GOT_MAX;
+ }
+ else
+#endif
+ {
+ cp = PRIV(ucd_caseless_sets) + Lpropvalue;
+ for (;;)
+ {
+ if (fc < *cp)
+ { if (notmatch) break; else goto GOT_MAX; }
+ if (fc == *cp++)
+ { if (notmatch) goto GOT_MAX; else break; }
+ }
}
+
Feptr += len;
}
GOT_MAX:
@@ -5322,9 +5415,11 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
/* ===================================================================== */
- /* Recursion either matches the current regex, or some subexpression. The
- offset data is the offset to the starting bracket from the start of the
- whole pattern. (This is so that it works from duplicated subpatterns.) */
+ /* Pattern recursion either matches the current regex, or some
+ subexpression. The offset data is the offset to the starting bracket from
+ the start of the whole pattern. This is so that it works from duplicated
+ subpatterns. For a whole-pattern recursion, we have to infer the number
+ zero. */
#define Lframe_type F->temp_32[0]
#define Lstart_branch F->temp_sptr[0]
@@ -5333,9 +5428,12 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
bracode = mb->start_code + GET(Fecode, 1);
number = (bracode == mb->start_code)? 0 : GET2(bracode, 1 + LINK_SIZE);
- /* If we are already in a recursion, check for repeating the same one
- without advancing the subject pointer. This should catch convoluted mutual
- recursions. (Some simple cases are caught at compile time.) */
+ /* If we are already in a pattern recursion, check for repeating the same
+ one without changing the subject pointer or the last referenced character
+ in the subject. This should catch convoluted mutual recursions; some
+ simple cases are caught at compile time. However, there are rare cases when
+ this check needs to be turned off. In this case, actual recursion loops
+ will be caught by the match or heap limits. */
if (Fcurrent_recurse != RECURSE_UNSET)
{
@@ -5346,15 +5444,19 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
P = (heapframe *)((char *)N - frame_size);
if (N->group_frame_type == (GF_RECURSE | number))
{
- if (Feptr == P->eptr) return PCRE2_ERROR_RECURSELOOP;
+ if (Feptr == P->eptr && mb->last_used_ptr == P->recurse_last_used &&
+ (mb->moptions & PCRE2_DISABLE_RECURSELOOP_CHECK) == 0)
+ return PCRE2_ERROR_RECURSELOOP;
break;
}
offset = P->last_group_offset;
}
}
- /* Now run the recursion, branch by branch. */
+ /* Remember the current last referenced character and then run the
+ recursion branch by branch. */
+ F->recurse_last_used = mb->last_used_ptr;
Lstart_branch = bracode;
Lframe_type = GF_RECURSE | number;
@@ -5683,13 +5785,13 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
/* ===================================================================== */
- /* Move the subject pointer back. This occurs only at the start of each
- branch of a lookbehind assertion. If we are too close to the start to move
- back, fail. When working with UTF-8 we move back a number of characters,
- not bytes. */
+ /* Move the subject pointer back by one fixed amount. This occurs at the
+ start of each branch that has a fixed length in a lookbehind assertion. If
+ we are too close to the start to move back, fail. When working with UTF-8
+ we move back a number of characters, not bytes. */
case OP_REVERSE:
- number = GET(Fecode, 1);
+ number = GET2(Fecode, 1);
#ifdef SUPPORT_UNICODE
if (utf)
{
@@ -5703,7 +5805,7 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
else
#endif
- /* No UTF-8 support, or not in UTF-8 mode: count is code unit count */
+ /* No UTF support, or not in UTF mode: count is code unit count */
{
if ((ptrdiff_t)number > Feptr - mb->start_subject) RRETURN(MATCH_NOMATCH);
@@ -5713,15 +5815,84 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
/* Save the earliest consulted character, then skip to next opcode */
if (Feptr < mb->start_used_ptr) mb->start_used_ptr = Feptr;
- Fecode += 1 + LINK_SIZE;
+ Fecode += 1 + IMM2_SIZE;
break;
/* ===================================================================== */
+ /* Move the subject pointer back by a variable amount. This occurs at the
+ start of each branch of a lookbehind assertion when the branch has a
+ variable, but limited, length. A loop is needed to try matching the branch
+ after moving back different numbers of characters. If we are too close to
+ the start to move back even the minimum amount, fail. When working with
+ UTF-8 we move back a number of characters, not bytes. */
+
+#define Lmin F->temp_32[0]
+#define Lmax F->temp_32[1]
+#define Leptr F->temp_sptr[0]
+
+ case OP_VREVERSE:
+ Lmin = GET2(Fecode, 1);
+ Lmax = GET2(Fecode, 1 + IMM2_SIZE);
+ Leptr = Feptr;
+
+ /* Move back by the maximum branch length and then work forwards. This
+ ensures that items such as \d{3,5} get the maximum length, which is
+ relevant for captures, and makes for Perl compatibility. */
+
+#ifdef SUPPORT_UNICODE
+ if (utf)
+ {
+ for (i = 0; i < Lmax; i++)
+ {
+ if (Feptr == mb->start_subject)
+ {
+ if (i < Lmin) RRETURN(MATCH_NOMATCH);
+ Lmax = i;
+ break;
+ }
+ Feptr--;
+ BACKCHAR(Feptr);
+ }
+ }
+ else
+#endif
+
+ /* No UTF support or not in UTF mode */
+
+ {
+ ptrdiff_t diff = Feptr - mb->start_subject;
+ uint32_t available = (diff > 65535)? 65535 : ((diff > 0)? (int)diff : 0);
+ if (Lmin > available) RRETURN(MATCH_NOMATCH);
+ if (Lmax > available) Lmax = available;
+ Feptr -= Lmax;
+ }
+
+ /* Now try matching, moving forward one character on failure, until we
+ reach the mimimum back length. */
+
+ for (;;)
+ {
+ RMATCH(Fecode + 1 + 2 * IMM2_SIZE, RM37);
+ if (rrc != MATCH_NOMATCH) RRETURN(rrc);
+ if (Lmax-- <= Lmin) RRETURN(MATCH_NOMATCH);
+ Feptr++;
+#ifdef SUPPORT_UNICODE
+ if (utf) { FORWARDCHARTEST(Feptr, mb->end_subject); }
+#endif
+ }
+ /* Control never reaches here */
+
+#undef Lmin
+#undef Lmax
+#undef Leptr
+
+ /* ===================================================================== */
/* An alternation is the end of a branch; scan along to find the end of the
bracketed group. */
case OP_ALT:
+ branch_end = Fecode;
do Fecode += GET(Fecode,1); while (*Fecode == OP_ALT);
break;
@@ -5729,7 +5900,8 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
/* ===================================================================== */
/* The end of a parenthesized group. For all but OP_BRA and OP_COND, the
starting frame was added to the chained frames in order to remember the
- starting subject position for the group. */
+ starting subject position for the group. (Not true for OP_BRA when it's a
+ whole pattern recursion, but that is handled separately below.)*/
case OP_KET:
case OP_KETRMIN:
@@ -5738,8 +5910,14 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
bracode = Fecode - GET(Fecode, 1);
- /* Point N to the frame at the start of the most recent group.
- Remember the subject pointer at the start of the group. */
+ if (branch_end == NULL) branch_end = Fecode;
+ branch_start = bracode;
+ while (branch_start + GET(branch_start, 1) != branch_end)
+ branch_start += GET(branch_start, 1);
+ branch_end = NULL;
+
+ /* Point N to the frame at the start of the most recent group, and P to its
+ predecessor. Remember the subject pointer at the start of the group. */
if (*bracode != OP_BRA && *bracode != OP_COND)
{
@@ -5775,27 +5953,64 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
switch (*bracode)
{
- case OP_BRA: /* No need to do anything for these */
- case OP_COND:
+ /* Whole pattern recursion is handled as a recursion into group 0, but
+ the entire pattern is wrapped in OP_BRA/OP_KET rather than a capturing
+ group - a design mistake: it should perhaps have been capture group 0.
+ Anyway, that means the end of such recursion must be handled here. It is
+ detected by checking for an immediately following OP_END when we are
+ recursing in group 0. If this is not the end of a whole-pattern
+ recursion, there is nothing to be done. */
+
+ case OP_BRA:
+ if (Fcurrent_recurse != 0 || Fecode[1+LINK_SIZE] != OP_END) break;
+
+ /* It is the end of whole-pattern recursion. */
+
+ offset = Flast_group_offset;
+ if (offset == PCRE2_UNSET) return PCRE2_ERROR_INTERNAL;
+ N = (heapframe *)((char *)match_data->heapframes + offset);
+ P = (heapframe *)((char *)N - frame_size);
+ Flast_group_offset = P->last_group_offset;
+
+ /* Reinstate the previous set of captures and then carry on after the
+ recursion call. */
+
+ memcpy((char *)F + offsetof(heapframe, ovector), P->ovector,
+ Foffset_top * sizeof(PCRE2_SIZE));
+ Foffset_top = P->offset_top;
+ Fcapture_last = P->capture_last;
+ Fcurrent_recurse = P->current_recurse;
+ Fecode = P->ecode + 1 + LINK_SIZE;
+ continue; /* With next opcode */
+
+ case OP_COND: /* No need to do anything for these */
case OP_SCOND:
break;
/* Non-atomic positive assertions are like OP_BRA, except that the
subject pointer must be put back to where it was at the start of the
- assertion. */
+ assertion. For a variable lookbehind, check its end point. */
- case OP_ASSERT_NA:
case OP_ASSERTBACK_NA:
+ if (branch_start[1 + LINK_SIZE] == OP_VREVERSE && Feptr != P->eptr)
+ RRETURN(MATCH_NOMATCH);
+ /* Fall through */
+
+ case OP_ASSERT_NA:
if (Feptr > mb->last_used_ptr) mb->last_used_ptr = Feptr;
Feptr = P->eptr;
break;
/* Atomic positive assertions are like OP_ONCE, except that in addition
the subject pointer must be put back to where it was at the start of the
- assertion. */
+ assertion. For a variable lookbehind, check its end point. */
- case OP_ASSERT:
case OP_ASSERTBACK:
+ if (branch_start[1 + LINK_SIZE] == OP_VREVERSE && Feptr != P->eptr)
+ RRETURN(MATCH_NOMATCH);
+ /* Fall through */
+
+ case OP_ASSERT:
if (Feptr > mb->last_used_ptr) mb->last_used_ptr = Feptr;
Feptr = P->eptr;
/* Fall through */
@@ -5816,10 +6031,15 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
break;
/* A matching negative assertion returns MATCH, which is turned into
- NOMATCH at the assertion level. */
+ NOMATCH at the assertion level. For a variable lookbehind, check its end
+ point. */
- case OP_ASSERT_NOT:
case OP_ASSERTBACK_NOT:
+ if (branch_start[1 + LINK_SIZE] == OP_VREVERSE && Feptr != P->eptr)
+ RRETURN(MATCH_NOMATCH);
+ /* Fall through */
+
+ case OP_ASSERT_NOT:
RRETURN(MATCH_MATCH);
/* At the end of a script run, apply the script-checking rules. This code
@@ -5830,9 +6050,8 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
if (!PRIV(script_run)(P->eptr, Feptr, utf)) RRETURN(MATCH_NOMATCH);
break;
- /* Whole-pattern recursion is coded as a recurse into group 0, so it
- won't be picked up here. Instead, we catch it when the OP_END is reached.
- Other recursion is handled here. */
+ /* Whole-pattern recursion is coded as a recurse into group 0, and is
+ handled with OP_BRA above. Other recursion is handled here. */
case OP_CBRA:
case OP_CBRAPOS:
@@ -5847,7 +6066,7 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
{
P = (heapframe *)((char *)N - frame_size);
memcpy((char *)F + offsetof(heapframe, ovector), P->ovector,
- P->offset_top * sizeof(PCRE2_SIZE));
+ Foffset_top * sizeof(PCRE2_SIZE));
Foffset_top = P->offset_top;
Fcapture_last = P->capture_last;
Fcurrent_recurse = P->current_recurse;
@@ -5930,10 +6149,10 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
if ((mb->poptions & PCRE2_DOLLAR_ENDONLY) == 0) goto ASSERT_NL_OR_EOS;
/* Fall through */
- /* Unconditional end of subject assertion (\z) */
+ /* Unconditional end of subject assertion (\z). */
case OP_EOD:
- if (Feptr < mb->end_subject) RRETURN(MATCH_NOMATCH);
+ if (Feptr < mb->true_end_subject) RRETURN(MATCH_NOMATCH);
if (mb->partial != 0)
{
mb->hitend = TRUE;
@@ -6045,6 +6264,8 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
case OP_NOT_WORD_BOUNDARY:
case OP_WORD_BOUNDARY:
+ case OP_NOT_UCP_WORD_BOUNDARY:
+ case OP_UCP_WORD_BOUNDARY:
if (Feptr == mb->check_subject) prev_is_word = FALSE; else
{
PCRE2_SPTR lastptr = Feptr - 1;
@@ -6059,13 +6280,12 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
fc = *lastptr;
if (lastptr < mb->start_used_ptr) mb->start_used_ptr = lastptr;
#ifdef SUPPORT_UNICODE
- if ((mb->poptions & PCRE2_UCP) != 0)
+ if (Fop == OP_UCP_WORD_BOUNDARY || Fop == OP_NOT_UCP_WORD_BOUNDARY)
{
- if (fc == '_') prev_is_word = TRUE; else
- {
- int cat = UCD_CATEGORY(fc);
- prev_is_word = (cat == ucp_L || cat == ucp_N);
- }
+ int chartype = UCD_CHARTYPE(fc);
+ int category = PRIV(ucp_gentype)[chartype];
+ prev_is_word = (category == ucp_L || category == ucp_N ||
+ chartype == ucp_Mn || chartype == ucp_Pc);
}
else
#endif /* SUPPORT_UNICODE */
@@ -6093,13 +6313,12 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
fc = *Feptr;
if (nextptr > mb->last_used_ptr) mb->last_used_ptr = nextptr;
#ifdef SUPPORT_UNICODE
- if ((mb->poptions & PCRE2_UCP) != 0)
+ if (Fop == OP_UCP_WORD_BOUNDARY || Fop == OP_NOT_UCP_WORD_BOUNDARY)
{
- if (fc == '_') cur_is_word = TRUE; else
- {
- int cat = UCD_CATEGORY(fc);
- cur_is_word = (cat == ucp_L || cat == ucp_N);
- }
+ int chartype = UCD_CHARTYPE(fc);
+ int category = PRIV(ucp_gentype)[chartype];
+ cur_is_word = (category == ucp_L || category == ucp_N ||
+ chartype == ucp_Mn || chartype == ucp_Pc);
}
else
#endif /* SUPPORT_UNICODE */
@@ -6108,7 +6327,7 @@ fprintf(stderr, "++ op=%d\n", *Fecode);
/* Now see if the situation is what we want */
- if ((*Fecode++ == OP_WORD_BOUNDARY)?
+ if ((*Fecode++ == OP_WORD_BOUNDARY || Fop == OP_UCP_WORD_BOUNDARY)?
cur_is_word == prev_is_word : cur_is_word != prev_is_word)
RRETURN(MATCH_NOMATCH);
break;
@@ -6254,7 +6473,7 @@ F = (heapframe *)((char *)F - Fback_frame); /* Backtrack */
mb->cb->callout_flags |= PCRE2_CALLOUT_BACKTRACK; /* Note for callouts */
#ifdef DEBUG_SHOW_RMATCH
-fprintf(stderr, "++ RETURN %d to %d\n", rrc, Freturn_id);
+fprintf(stderr, "++ RETURN %d to RM%d\n", rrc, Freturn_id);
#endif
switch (Freturn_id)
@@ -6263,7 +6482,7 @@ switch (Freturn_id)
LBL( 9) LBL(10) LBL(11) LBL(12) LBL(13) LBL(14) LBL(15) LBL(16)
LBL(17) LBL(18) LBL(19) LBL(20) LBL(21) LBL(22) LBL(23) LBL(24)
LBL(25) LBL(26) LBL(27) LBL(28) LBL(29) LBL(30) LBL(31) LBL(32)
- LBL(33) LBL(34) LBL(35) LBL(36)
+ LBL(33) LBL(34) LBL(35) LBL(36) LBL(37)
#ifdef SUPPORT_WIDE_CHARS
LBL(100) LBL(101)
@@ -6551,6 +6770,7 @@ if (use_jit)
match_data, mcontext);
if (rc != PCRE2_ERROR_JIT_BADOPTION)
{
+ match_data->subject_length = length;
if (rc >= 0 && (options & PCRE2_COPY_MATCHED_SUBJECT) != 0)
{
length = CU2BYTES(length + was_zero_terminated);
@@ -6719,7 +6939,7 @@ if (mcontext == NULL)
else mb->memctl = mcontext->memctl;
anchored = ((re->overall_options | options) & PCRE2_ANCHORED) != 0;
-firstline = (re->overall_options & PCRE2_FIRSTLINE) != 0;
+firstline = !anchored && (re->overall_options & PCRE2_FIRSTLINE) != 0;
startline = (re->flags & PCRE2_STARTLINE) != 0;
bumpalong_limit = (mcontext->offset_limit == PCRE2_UNSET)?
true_end_subject : subject + mcontext->offset_limit;
@@ -6742,6 +6962,7 @@ mb->callout_data = mcontext->callout_data;
mb->start_subject = subject;
mb->start_offset = start_offset;
mb->end_subject = end_subject;
+mb->true_end_subject = true_end_subject;
mb->hasthen = (re->flags & PCRE2_HASTHEN) != 0;
mb->allowemptypartial = (re->max_lookbehind > 0) ||
(re->flags & PCRE2_MATCH_EMPTY) != 0;
@@ -6801,7 +7022,7 @@ the pattern. It is not used at all if there are no capturing parentheses.
frame_size is the total size of each frame
match_data->heapframes is the pointer to the frames vector
- match_data->heapframes_size is the total size of the vector
+ match_data->heapframes_size is the allocated size of the vector
We must pad the frame_size for alignment to ensure subsequent frames are as
aligned as heapframe. Whilst ovector is word-aligned due to being a PCRE2_SIZE
@@ -6816,7 +7037,7 @@ frame_size = (offsetof(heapframe, ovector) +
smaller. */
mb->heap_limit = ((mcontext->heap_limit < re->limit_heap)?
- mcontext->heap_limit : re->limit_heap) * 1024;
+ mcontext->heap_limit : re->limit_heap);
mb->match_limit = (mcontext->match_limit < re->limit_match)?
mcontext->match_limit : re->limit_match;
@@ -6827,19 +7048,19 @@ mb->match_limit_depth = (mcontext->depth_limit < re->limit_depth)?
/* If a pattern has very many capturing parentheses, the frame size may be very
large. Set the initial frame vector size to ensure that there are at least 10
available frames, but enforce a minimum of START_FRAMES_SIZE. If this is
-greater than the heap limit, get as large a vector as possible. Always round
-the size to a multiple of the frame size. */
+greater than the heap limit, get as large a vector as possible. */
heapframes_size = frame_size * 10;
if (heapframes_size < START_FRAMES_SIZE) heapframes_size = START_FRAMES_SIZE;
-if (heapframes_size > mb->heap_limit)
+if (heapframes_size / 1024 > mb->heap_limit)
{
- if (frame_size > mb->heap_limit ) return PCRE2_ERROR_HEAPLIMIT;
- heapframes_size = mb->heap_limit;
+ PCRE2_SIZE max_size = 1024 * mb->heap_limit;
+ if (max_size < frame_size) return PCRE2_ERROR_HEAPLIMIT;
+ heapframes_size = max_size;
}
/* If an existing frame vector in the match_data block is large enough, we can
-use it.Otherwise, free any pre-existing vector and get a new one. */
+use it. Otherwise, free any pre-existing vector and get a new one. */
if (match_data->heapframes_size < heapframes_size)
{
@@ -7286,9 +7507,17 @@ for(;;)
mb->end_offset_top = 0;
mb->skip_arg_count = 0;
+#ifdef DEBUG_SHOW_OPS
+ fprintf(stderr, "++ Calling match()\n");
+#endif
+
rc = match(start_match, mb->start_code, re->top_bracket, frame_size,
match_data, mb);
+#ifdef DEBUG_SHOW_OPS
+ fprintf(stderr, "++ match() returned %d\n\n", rc);
+#endif
+
if (mb->hitend && start_partial == NULL)
{
start_partial = mb->start_used_ptr;
@@ -7436,6 +7665,7 @@ if (utf && end_subject != true_end_subject &&
if (start_match >= true_end_subject)
{
rc = MATCH_NOMATCH; /* In case it was partial */
+ match_partial = NULL;
break;
}
@@ -7485,6 +7715,7 @@ if (rc == MATCH_MATCH)
{
match_data->rc = ((int)mb->end_offset_top >= 2 * match_data->oveccount)?
0 : (int)mb->end_offset_top/2 + 1;
+ match_data->subject_length = length;
match_data->startchar = start_match - subject;
match_data->leftchar = mb->start_used_ptr - subject;
match_data->rightchar = ((mb->last_used_ptr > mb->end_match_ptr)?
@@ -7499,6 +7730,7 @@ if (rc == MATCH_MATCH)
match_data->flags |= PCRE2_MD_COPIED_SUBJECT;
}
else match_data->subject = subject;
+
return match_data->rc;
}
@@ -7520,6 +7752,7 @@ PCRE2_ERROR_PARTIAL. */
else if (match_partial != NULL)
{
match_data->subject = subject;
+ match_data->subject_length = length;
match_data->ovector[0] = match_partial - subject;
match_data->ovector[1] = end_subject - subject;
match_data->startchar = match_partial - subject;
diff --git a/src/3rdparty/pcre2/src/pcre2_match_data.c b/src/3rdparty/pcre2/src/pcre2_match_data.c
index fa129b8bc5..757dab9df5 100644
--- a/src/3rdparty/pcre2/src/pcre2_match_data.c
+++ b/src/3rdparty/pcre2/src/pcre2_match_data.c
@@ -170,4 +170,16 @@ return offsetof(pcre2_match_data, ovector) +
2 * (match_data->oveccount) * sizeof(PCRE2_SIZE);
}
+
+
+/*************************************************
+* Get heapframes size *
+*************************************************/
+
+PCRE2_EXP_DEFN PCRE2_SIZE PCRE2_CALL_CONVENTION
+pcre2_get_match_data_heapframes_size(pcre2_match_data *match_data)
+{
+return match_data->heapframes_size;
+}
+
/* End of pcre2_match_data.c */
diff --git a/src/3rdparty/pcre2/src/pcre2_study.c b/src/3rdparty/pcre2/src/pcre2_study.c
index 4db3ad1184..792e696dad 100644
--- a/src/3rdparty/pcre2/src/pcre2_study.c
+++ b/src/3rdparty/pcre2/src/pcre2_study.c
@@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
- New API code Copyright (c) 2016-2021 University of Cambridge
+ New API code Copyright (c) 2016-2023 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -256,6 +256,7 @@ for (;;)
/* Skip over things that don't match chars */
case OP_REVERSE:
+ case OP_VREVERSE:
case OP_CREF:
case OP_DNCREF:
case OP_RREF:
@@ -273,6 +274,8 @@ for (;;)
case OP_DOLLM:
case OP_NOT_WORD_BOUNDARY:
case OP_WORD_BOUNDARY:
+ case OP_NOT_UCP_WORD_BOUNDARY:
+ case OP_UCP_WORD_BOUNDARY:
cc += PRIV(OP_lengths)[*cc];
break;
@@ -976,6 +979,7 @@ do
while (try_next) /* Loop for items in this branch */
{
int rc;
+ PCRE2_SPTR ncode;
uint8_t *classmap = NULL;
#ifdef SUPPORT_WIDE_CHARS
PCRE2_UCHAR xclassflags;
@@ -1054,6 +1058,7 @@ do
case OP_REF:
case OP_REFI:
case OP_REVERSE:
+ case OP_VREVERSE:
case OP_RREF:
case OP_SCOND:
case OP_SET_SOM:
@@ -1101,13 +1106,100 @@ do
case OP_WORD_BOUNDARY:
case OP_NOT_WORD_BOUNDARY:
+ case OP_UCP_WORD_BOUNDARY:
+ case OP_NOT_UCP_WORD_BOUNDARY:
tcode++;
break;
- /* If we hit a bracket or a positive lookahead assertion, recurse to set
- bits from within the subpattern. If it can't find anything, we have to
- give up. If it finds some mandatory character(s), we are done for this
- branch. Otherwise, carry on scanning after the subpattern. */
+ /* For a positive lookahead assertion, inspect what immediately follows,
+ ignoring intermediate assertions and callouts. If the next item is one
+ that sets a mandatory character, skip this assertion. Otherwise, treat it
+ the same as other bracket groups. */
+
+ case OP_ASSERT:
+ case OP_ASSERT_NA:
+ ncode = tcode + GET(tcode, 1);
+ while (*ncode == OP_ALT) ncode += GET(ncode, 1);
+ ncode += 1 + LINK_SIZE;
+
+ /* Skip irrelevant items */
+
+ for (BOOL done = FALSE; !done;)
+ {
+ switch (*ncode)
+ {
+ case OP_ASSERT:
+ case OP_ASSERT_NOT:
+ case OP_ASSERTBACK:
+ case OP_ASSERTBACK_NOT:
+ case OP_ASSERT_NA:
+ case OP_ASSERTBACK_NA:
+ ncode += GET(ncode, 1);
+ while (*ncode == OP_ALT) ncode += GET(ncode, 1);
+ ncode += 1 + LINK_SIZE;
+ break;
+
+ case OP_WORD_BOUNDARY:
+ case OP_NOT_WORD_BOUNDARY:
+ case OP_UCP_WORD_BOUNDARY:
+ case OP_NOT_UCP_WORD_BOUNDARY:
+ ncode++;
+ break;
+
+ case OP_CALLOUT:
+ ncode += PRIV(OP_lengths)[OP_CALLOUT];
+ break;
+
+ case OP_CALLOUT_STR:
+ ncode += GET(ncode, 1 + 2*LINK_SIZE);
+ break;
+
+ default:
+ done = TRUE;
+ break;
+ }
+ }
+
+ /* Now check the next significant item. */
+
+ switch(*ncode)
+ {
+ default:
+ break;
+
+ case OP_PROP:
+ if (ncode[1] != PT_CLIST) break;
+ /* Fall through */
+ case OP_ANYNL:
+ case OP_CHAR:
+ case OP_CHARI:
+ case OP_EXACT:
+ case OP_EXACTI:
+ case OP_HSPACE:
+ case OP_MINPLUS:
+ case OP_MINPLUSI:
+ case OP_PLUS:
+ case OP_PLUSI:
+ case OP_POSPLUS:
+ case OP_POSPLUSI:
+ case OP_VSPACE:
+ /* Note that these types will only be present in non-UCP mode. */
+ case OP_DIGIT:
+ case OP_NOT_DIGIT:
+ case OP_WORDCHAR:
+ case OP_NOT_WORDCHAR:
+ case OP_WHITESPACE:
+ case OP_NOT_WHITESPACE:
+ tcode = ncode;
+ continue; /* With the following significant opcode */
+ }
+ /* Fall through */
+
+ /* For a group bracket or a positive assertion without an immediately
+ following mandatory setting, recurse to set bits from within the
+ subpattern. If it can't find anything, we have to give up. If it finds
+ some mandatory character(s), we are done for this branch. Otherwise,
+ carry on scanning after the subpattern. */
case OP_BRA:
case OP_SBRA:
@@ -1119,8 +1211,6 @@ do
case OP_SCBRAPOS:
case OP_ONCE:
case OP_SCRIPT_RUN:
- case OP_ASSERT:
- case OP_ASSERT_NA:
rc = set_start_bits(re, tcode, utf, ucp, depthptr);
if (rc == SSB_DONE)
{
diff --git a/src/3rdparty/pcre2/src/pcre2_substring.c b/src/3rdparty/pcre2/src/pcre2_substring.c
index ddf5774e15..14e919dce9 100644
--- a/src/3rdparty/pcre2/src/pcre2_substring.c
+++ b/src/3rdparty/pcre2/src/pcre2_substring.c
@@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
- New API code Copyright (c) 2016-2018 University of Cambridge
+ New API code Copyright (c) 2016-2023 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -309,6 +309,7 @@ Returns: if successful: 0
PCRE2_ERROR_NOSUBSTRING: no such substring
PCRE2_ERROR_UNAVAILABLE: ovector is too small
PCRE2_ERROR_UNSET: substring is not set
+ PCRE2_ERROR_INVALIDOFFSET: internal error, should not occur
*/
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
@@ -341,6 +342,8 @@ else /* Matched using pcre2_dfa_match() */
left = match_data->ovector[stringnumber*2];
right = match_data->ovector[stringnumber*2+1];
+if (left > match_data->subject_length || right > match_data->subject_length)
+ return PCRE2_ERROR_INVALIDOFFSET;
if (sizeptr != NULL) *sizeptr = (left > right)? 0 : right - left;
return 0;
}
@@ -442,7 +445,7 @@ Returns: nothing
*/
PCRE2_EXP_DEFN void PCRE2_CALL_CONVENTION
-pcre2_substring_list_free(PCRE2_SPTR *list)
+pcre2_substring_list_free(PCRE2_UCHAR **list)
{
if (list != NULL)
{
diff --git a/src/3rdparty/pcre2/src/pcre2_tables.c b/src/3rdparty/pcre2/src/pcre2_tables.c
index e00252f1eb..097a1acca8 100644
--- a/src/3rdparty/pcre2/src/pcre2_tables.c
+++ b/src/3rdparty/pcre2/src/pcre2_tables.c
@@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
- New API code Copyright (c) 2016-2021 University of Cambridge
+ New API code Copyright (c) 2016-2024 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -171,9 +171,9 @@ are implementing).
6. Do not break after Prepend characters.
7. Do not break within emoji modifier sequences or emoji zwj sequences. That
- is, do not break between characters with the Extended_Pictographic property.
- Extend and ZWJ characters are allowed between the characters; this cannot be
- represented in this table, the code has to deal with it.
+ is, do not break between characters with the Extended_Pictographic property
+ if a ZWJ intervenes. Extend characters are allowed between the characters;
+ this cannot be represented in this table, the code has to deal with it.
8. Do not break within emoji flag sequences. That is, do not break between
regional indicator (RI) symbols if there are an odd number of RI characters
@@ -203,8 +203,8 @@ const uint32_t PRIV(ucp_gbtable)[] = {
ESZ|(1u<<ucp_gbT), /* 10 LVT */
(1u<<ucp_gbRegional_Indicator), /* 11 Regional Indicator */
ESZ, /* 12 Other */
- ESZ, /* 13 ZWJ */
- ESZ|(1u<<ucp_gbExtended_Pictographic) /* 14 Extended Pictographic */
+ ESZ|(1u<<ucp_gbExtended_Pictographic), /* 13 ZWJ */
+ ESZ /* 14 Extended Pictographic */
};
#undef ESZ
diff --git a/src/3rdparty/pcre2/src/pcre2_ucd.c b/src/3rdparty/pcre2/src/pcre2_ucd.c
index 5e0fc37c35..97dbc8b26f 100644
--- a/src/3rdparty/pcre2/src/pcre2_ucd.c
+++ b/src/3rdparty/pcre2/src/pcre2_ucd.c
@@ -68,15 +68,15 @@ the tables when not needed. But don't leave a totally empty module because some
compilers barf at that. Instead, just supply some small dummy tables. */
#ifndef SUPPORT_UNICODE
-const ucd_record PRIV(ucd_records)[] = {{0,0,0,0,0,0,0 }};
+const ucd_record PRIV(ucd_records)[] = {{0,0,0,0,0,0,0}};
const uint16_t PRIV(ucd_stage1)[] = {0};
const uint16_t PRIV(ucd_stage2)[] = {0};
const uint32_t PRIV(ucd_caseless_sets)[] = {0};
#else
-/* Total size: 111116 bytes, block size: 128. */
+/* Total size: 112564 bytes, block size: 128. */
-const char *PRIV(unicode_version) = "14.0.0";
+const char *PRIV(unicode_version) = "15.0.0";
/* When recompiling tables with a new Unicode version, please check the types
in this structure definition with those in pcre2_internal.h (the actual field
@@ -152,16 +152,16 @@ decimal digits. It is used to ensure that all the digits in a script run come
from the same set. */
const uint32_t PRIV(ucd_digit_sets)[] = {
- 66, /* Number of subsequent values */
+ 68, /* Number of subsequent values */
0x00039, 0x00669, 0x006f9, 0x007c9, 0x0096f, 0x009ef, 0x00a6f, 0x00aef,
0x00b6f, 0x00bef, 0x00c6f, 0x00cef, 0x00d6f, 0x00def, 0x00e59, 0x00ed9,
0x00f29, 0x01049, 0x01099, 0x017e9, 0x01819, 0x0194f, 0x019d9, 0x01a89,
0x01a99, 0x01b59, 0x01bb9, 0x01c49, 0x01c59, 0x0a629, 0x0a8d9, 0x0a909,
0x0a9d9, 0x0a9f9, 0x0aa59, 0x0abf9, 0x0ff19, 0x104a9, 0x10d39, 0x1106f,
0x110f9, 0x1113f, 0x111d9, 0x112f9, 0x11459, 0x114d9, 0x11659, 0x116c9,
- 0x11739, 0x118e9, 0x11959, 0x11c59, 0x11d59, 0x11da9, 0x16a69, 0x16ac9,
- 0x16b59, 0x1d7d7, 0x1d7e1, 0x1d7eb, 0x1d7f5, 0x1d7ff, 0x1e149, 0x1e2f9,
- 0x1e959, 0x1fbf9,
+ 0x11739, 0x118e9, 0x11959, 0x11c59, 0x11d59, 0x11da9, 0x11f59, 0x16a69,
+ 0x16ac9, 0x16b59, 0x1d7d7, 0x1d7e1, 0x1d7eb, 0x1d7f5, 0x1d7ff, 0x1e149,
+ 0x1e2f9, 0x1e4f9, 0x1e959, 0x1fbf9,
};
/* This vector is a list of script bitsets for the Script Extension property.
@@ -323,6 +323,7 @@ const uint32_t PRIV(ucd_boolprop_sets)[] = {
0x21004024u, 0x00040000u,
0x20808004u, 0x00040000u,
0x60800944u, 0x000c0004u,
+ 0x60800064u, 0x000c0004u,
0x60802004u, 0x000c0000u,
0x60800344u, 0x000c8000u,
0x22808000u, 0x00040000u,
@@ -334,7 +335,6 @@ const uint32_t PRIV(ucd_boolprop_sets)[] = {
0x01008020u, 0x00000000u,
0x21408024u, 0x00040000u,
0x00808000u, 0x00000000u,
- 0x60800064u, 0x000c0004u,
0x60800044u, 0x000c1004u,
0x60800064u, 0x000c1004u,
0x01002020u, 0x00000001u,
@@ -424,7 +424,7 @@ offset to multichar other cases or zero (8 bits), offset to other case or zero
(32 bits, signed), bidi class (5 bits) and script extension (11 bits) packed
into a 16-bit field, and offset in binary properties table (16 bits). */
-const ucd_record PRIV(ucd_records)[] = { /* 16908 bytes, record size 12 */
+const ucd_record PRIV(ucd_records)[] = { /* 17076 bytes, record size 12 */
{ 69, 0, 2, 0, 0, 6144, 2, }, /* 0 */
{ 69, 0, 2, 0, 0, 43008, 4, }, /* 1 */
{ 69, 0, 1, 0, 0, 4096, 4, }, /* 2 */
@@ -498,7 +498,7 @@ const ucd_record PRIV(ucd_records)[] = { /* 16908 bytes, record size 12 */
{ 0, 5, 12, 0, 0, 18432, 60, }, /* 70 */
{ 0, 5, 12, 0, 0, 18432, 80, }, /* 71 */
{ 0, 9, 12, 0, -121, 18432, 74, }, /* 72 */
- { 0, 5, 12, 1, -268, 18432, 70, }, /* 73 */
+ { 0, 5, 12, 1, 0, 18432, 70, }, /* 73 */
{ 0, 5, 12, 0, 195, 18432, 76, }, /* 74 */
{ 0, 9, 12, 0, 210, 18432, 74, }, /* 75 */
{ 0, 9, 12, 0, 206, 18432, 74, }, /* 76 */
@@ -819,57 +819,57 @@ const ucd_record PRIV(ucd_records)[] = { /* 16908 bytes, record size 12 */
{ 11, 23, 12, 0, 0, 14336, 68, }, /* 391 */
{ 12, 12, 3, 0, 0, 26624, 130, }, /* 392 */
{ 12, 10, 5, 0, 0, 18432, 144, }, /* 393 */
- { 12, 12, 3, 0, 0, 26624, 102, }, /* 394 */
- { 12, 7, 12, 0, 0, 18432, 82, }, /* 395 */
- { 12, 12, 3, 0, 0, 26624, 96, }, /* 396 */
- { 12, 12, 3, 0, 0, 26624, 146, }, /* 397 */
- { 12, 13, 12, 0, 0, 18432, 138, }, /* 398 */
- { 12, 21, 12, 0, 0, 18432, 68, }, /* 399 */
- { 12, 15, 12, 0, 0, 28672, 68, }, /* 400 */
- { 12, 26, 12, 0, 0, 18432, 68, }, /* 401 */
- { 13, 7, 12, 0, 0, 18432, 82, }, /* 402 */
- { 13, 12, 3, 0, 0, 26624, 130, }, /* 403 */
- { 13, 10, 5, 0, 0, 18432, 144, }, /* 404 */
- { 13, 21, 12, 0, 0, 18432, 68, }, /* 405 */
- { 13, 12, 3, 0, 0, 26624, 96, }, /* 406 */
- { 13, 12, 3, 0, 0, 18432, 130, }, /* 407 */
- { 13, 10, 3, 0, 0, 18432, 148, }, /* 408 */
- { 13, 12, 3, 0, 0, 26624, 146, }, /* 409 */
- { 13, 13, 12, 0, 0, 18528, 138, }, /* 410 */
- { 14, 12, 3, 0, 0, 26624, 130, }, /* 411 */
- { 14, 10, 5, 0, 0, 18432, 144, }, /* 412 */
- { 14, 7, 12, 0, 0, 18432, 82, }, /* 413 */
- { 14, 12, 3, 0, 0, 26624, 146, }, /* 414 */
- { 14, 10, 3, 0, 0, 18432, 148, }, /* 415 */
- { 14, 7, 4, 0, 0, 18432, 82, }, /* 416 */
- { 14, 26, 12, 0, 0, 18432, 68, }, /* 417 */
- { 14, 15, 12, 0, 0, 18432, 68, }, /* 418 */
- { 14, 13, 12, 0, 0, 18432, 138, }, /* 419 */
- { 15, 12, 3, 0, 0, 26624, 130, }, /* 420 */
- { 15, 10, 5, 0, 0, 18432, 144, }, /* 421 */
- { 15, 7, 12, 0, 0, 18432, 82, }, /* 422 */
- { 15, 12, 3, 0, 0, 26624, 146, }, /* 423 */
- { 15, 10, 3, 0, 0, 18432, 148, }, /* 424 */
- { 15, 13, 12, 0, 0, 18432, 138, }, /* 425 */
- { 15, 21, 12, 0, 0, 18432, 68, }, /* 426 */
- { 72, 7, 12, 0, 0, 18432, 82, }, /* 427 */
- { 72, 12, 3, 0, 0, 26624, 130, }, /* 428 */
- { 72, 7, 5, 0, 0, 18432, 152, }, /* 429 */
- { 72, 12, 3, 0, 0, 26624, 154, }, /* 430 */
- { 69, 23, 12, 0, 0, 14336, 68, }, /* 431 */
- { 72, 7, 12, 0, 0, 18432, 156, }, /* 432 */
- { 72, 6, 12, 0, 0, 18432, 136, }, /* 433 */
- { 72, 12, 3, 0, 0, 26624, 96, }, /* 434 */
- { 72, 21, 12, 0, 0, 18432, 68, }, /* 435 */
- { 72, 13, 12, 0, 0, 18432, 138, }, /* 436 */
- { 72, 21, 12, 0, 0, 18432, 106, }, /* 437 */
- { 73, 7, 12, 0, 0, 18432, 82, }, /* 438 */
- { 73, 12, 3, 0, 0, 26624, 130, }, /* 439 */
- { 73, 7, 5, 0, 0, 18432, 152, }, /* 440 */
- { 73, 12, 3, 0, 0, 26624, 146, }, /* 441 */
- { 73, 7, 12, 0, 0, 18432, 156, }, /* 442 */
- { 73, 6, 12, 0, 0, 18432, 136, }, /* 443 */
- { 73, 12, 3, 0, 0, 26624, 96, }, /* 444 */
+ { 12, 7, 12, 0, 0, 18432, 82, }, /* 394 */
+ { 12, 12, 3, 0, 0, 26624, 96, }, /* 395 */
+ { 12, 12, 3, 0, 0, 26624, 146, }, /* 396 */
+ { 12, 13, 12, 0, 0, 18432, 138, }, /* 397 */
+ { 12, 21, 12, 0, 0, 18432, 68, }, /* 398 */
+ { 12, 15, 12, 0, 0, 28672, 68, }, /* 399 */
+ { 12, 26, 12, 0, 0, 18432, 68, }, /* 400 */
+ { 13, 7, 12, 0, 0, 18432, 82, }, /* 401 */
+ { 13, 12, 3, 0, 0, 26624, 130, }, /* 402 */
+ { 13, 10, 5, 0, 0, 18432, 144, }, /* 403 */
+ { 13, 21, 12, 0, 0, 18432, 68, }, /* 404 */
+ { 13, 12, 3, 0, 0, 26624, 96, }, /* 405 */
+ { 13, 12, 3, 0, 0, 18432, 130, }, /* 406 */
+ { 13, 10, 3, 0, 0, 18432, 148, }, /* 407 */
+ { 13, 12, 3, 0, 0, 26624, 146, }, /* 408 */
+ { 13, 13, 12, 0, 0, 18528, 138, }, /* 409 */
+ { 14, 12, 3, 0, 0, 26624, 130, }, /* 410 */
+ { 14, 10, 5, 0, 0, 18432, 144, }, /* 411 */
+ { 14, 7, 12, 0, 0, 18432, 82, }, /* 412 */
+ { 14, 12, 3, 0, 0, 26624, 146, }, /* 413 */
+ { 14, 10, 3, 0, 0, 18432, 148, }, /* 414 */
+ { 14, 7, 4, 0, 0, 18432, 82, }, /* 415 */
+ { 14, 26, 12, 0, 0, 18432, 68, }, /* 416 */
+ { 14, 15, 12, 0, 0, 18432, 68, }, /* 417 */
+ { 14, 13, 12, 0, 0, 18432, 138, }, /* 418 */
+ { 15, 12, 3, 0, 0, 26624, 130, }, /* 419 */
+ { 15, 10, 5, 0, 0, 18432, 144, }, /* 420 */
+ { 15, 7, 12, 0, 0, 18432, 82, }, /* 421 */
+ { 15, 12, 3, 0, 0, 26624, 146, }, /* 422 */
+ { 15, 10, 3, 0, 0, 18432, 148, }, /* 423 */
+ { 15, 13, 12, 0, 0, 18432, 138, }, /* 424 */
+ { 15, 21, 12, 0, 0, 18432, 68, }, /* 425 */
+ { 72, 7, 12, 0, 0, 18432, 82, }, /* 426 */
+ { 72, 12, 3, 0, 0, 26624, 130, }, /* 427 */
+ { 72, 7, 5, 0, 0, 18432, 152, }, /* 428 */
+ { 72, 12, 3, 0, 0, 26624, 154, }, /* 429 */
+ { 69, 23, 12, 0, 0, 14336, 68, }, /* 430 */
+ { 72, 7, 12, 0, 0, 18432, 156, }, /* 431 */
+ { 72, 6, 12, 0, 0, 18432, 136, }, /* 432 */
+ { 72, 12, 3, 0, 0, 26624, 96, }, /* 433 */
+ { 72, 21, 12, 0, 0, 18432, 68, }, /* 434 */
+ { 72, 13, 12, 0, 0, 18432, 138, }, /* 435 */
+ { 72, 21, 12, 0, 0, 18432, 106, }, /* 436 */
+ { 73, 7, 12, 0, 0, 18432, 82, }, /* 437 */
+ { 73, 12, 3, 0, 0, 26624, 130, }, /* 438 */
+ { 73, 7, 5, 0, 0, 18432, 152, }, /* 439 */
+ { 73, 12, 3, 0, 0, 26624, 146, }, /* 440 */
+ { 73, 7, 12, 0, 0, 18432, 156, }, /* 441 */
+ { 73, 6, 12, 0, 0, 18432, 136, }, /* 442 */
+ { 73, 12, 3, 0, 0, 26624, 96, }, /* 443 */
+ { 73, 12, 3, 0, 0, 26624, 102, }, /* 444 */
{ 73, 13, 12, 0, 0, 18432, 138, }, /* 445 */
{ 74, 7, 12, 0, 0, 18432, 82, }, /* 446 */
{ 74, 26, 12, 0, 0, 18432, 68, }, /* 447 */
@@ -884,431 +884,431 @@ const ucd_record PRIV(ucd_records)[] = { /* 16908 bytes, record size 12 */
{ 74, 12, 3, 0, 0, 26624, 130, }, /* 456 */
{ 74, 12, 3, 0, 0, 26624, 162, }, /* 457 */
{ 74, 10, 5, 0, 0, 18432, 144, }, /* 458 */
- { 74, 12, 3, 0, 0, 26624, 146, }, /* 459 */
- { 69, 26, 12, 0, 0, 18432, 68, }, /* 460 */
- { 16, 7, 12, 0, 0, 18432, 82, }, /* 461 */
- { 16, 10, 12, 0, 0, 18432, 144, }, /* 462 */
- { 16, 12, 3, 0, 0, 26624, 130, }, /* 463 */
- { 16, 10, 5, 0, 0, 18432, 144, }, /* 464 */
- { 16, 12, 3, 0, 0, 26624, 96, }, /* 465 */
- { 16, 12, 3, 0, 0, 26624, 146, }, /* 466 */
- { 16, 13, 12, 0, 0, 18549, 138, }, /* 467 */
- { 16, 21, 12, 0, 0, 18432, 124, }, /* 468 */
- { 16, 21, 12, 0, 0, 18432, 68, }, /* 469 */
- { 16, 10, 12, 0, 0, 18432, 164, }, /* 470 */
- { 16, 12, 3, 0, 0, 26624, 128, }, /* 471 */
- { 16, 13, 12, 0, 0, 18432, 138, }, /* 472 */
- { 16, 26, 12, 0, 0, 18432, 68, }, /* 473 */
- { 17, 9, 12, 0, 7264, 18432, 74, }, /* 474 */
- { 17, 5, 12, 0, 3008, 18432, 166, }, /* 475 */
- { 69, 21, 12, 0, 0, 18510, 68, }, /* 476 */
- { 17, 6, 12, 0, 0, 18432, 142, }, /* 477 */
- { 18, 7, 6, 0, 0, 18432, 82, }, /* 478 */
- { 18, 7, 6, 0, 0, 18432, 168, }, /* 479 */
- { 18, 7, 7, 0, 0, 18432, 168, }, /* 480 */
- { 18, 7, 7, 0, 0, 18432, 82, }, /* 481 */
- { 18, 7, 8, 0, 0, 18432, 82, }, /* 482 */
- { 75, 7, 12, 0, 0, 18432, 82, }, /* 483 */
- { 75, 12, 3, 0, 0, 26624, 96, }, /* 484 */
- { 75, 21, 12, 0, 0, 18432, 68, }, /* 485 */
- { 75, 21, 12, 0, 0, 18432, 106, }, /* 486 */
- { 75, 21, 12, 0, 0, 18432, 124, }, /* 487 */
- { 75, 15, 12, 0, 0, 18432, 138, }, /* 488 */
- { 75, 15, 12, 0, 0, 18432, 68, }, /* 489 */
- { 75, 26, 12, 0, 0, 28672, 68, }, /* 490 */
- { 76, 9, 12, 0, 38864, 18432, 170, }, /* 491 */
- { 76, 9, 12, 0, 8, 18432, 170, }, /* 492 */
- { 76, 5, 12, 0, -8, 18432, 70, }, /* 493 */
- { 77, 17, 12, 0, 0, 28672, 126, }, /* 494 */
- { 77, 7, 12, 0, 0, 18432, 82, }, /* 495 */
- { 77, 26, 12, 0, 0, 18432, 68, }, /* 496 */
- { 77, 21, 12, 0, 0, 18432, 124, }, /* 497 */
- { 78, 29, 12, 0, 0, 45056, 52, }, /* 498 */
- { 78, 7, 12, 0, 0, 18432, 82, }, /* 499 */
- { 78, 22, 12, 0, 0, 28672, 158, }, /* 500 */
- { 78, 18, 12, 0, 0, 28672, 158, }, /* 501 */
- { 79, 7, 12, 0, 0, 18432, 82, }, /* 502 */
- { 69, 21, 12, 0, 0, 18432, 106, }, /* 503 */
- { 79, 14, 12, 0, 0, 18432, 82, }, /* 504 */
- { 25, 7, 12, 0, 0, 18432, 82, }, /* 505 */
- { 25, 12, 3, 0, 0, 26624, 130, }, /* 506 */
- { 25, 12, 3, 0, 0, 26624, 146, }, /* 507 */
- { 25, 10, 5, 0, 0, 18432, 172, }, /* 508 */
- { 26, 7, 12, 0, 0, 18432, 82, }, /* 509 */
- { 26, 12, 3, 0, 0, 26624, 130, }, /* 510 */
- { 26, 10, 5, 0, 0, 18432, 174, }, /* 511 */
- { 69, 21, 12, 0, 0, 18573, 124, }, /* 512 */
- { 27, 7, 12, 0, 0, 18432, 82, }, /* 513 */
- { 27, 12, 3, 0, 0, 26624, 130, }, /* 514 */
- { 28, 7, 12, 0, 0, 18432, 82, }, /* 515 */
- { 28, 12, 3, 0, 0, 26624, 130, }, /* 516 */
- { 80, 7, 12, 0, 0, 18432, 82, }, /* 517 */
- { 80, 7, 12, 0, 0, 18432, 140, }, /* 518 */
- { 80, 12, 3, 0, 0, 26624, 100, }, /* 519 */
- { 80, 10, 5, 0, 0, 18432, 144, }, /* 520 */
- { 80, 12, 3, 0, 0, 26624, 130, }, /* 521 */
- { 80, 12, 3, 0, 0, 26624, 96, }, /* 522 */
- { 80, 12, 3, 0, 0, 26624, 146, }, /* 523 */
- { 80, 21, 12, 0, 0, 18432, 106, }, /* 524 */
- { 80, 6, 12, 0, 0, 18432, 142, }, /* 525 */
- { 80, 21, 12, 0, 0, 18432, 68, }, /* 526 */
- { 80, 23, 12, 0, 0, 14336, 68, }, /* 527 */
- { 80, 13, 12, 0, 0, 18432, 138, }, /* 528 */
- { 80, 15, 12, 0, 0, 28672, 68, }, /* 529 */
- { 19, 21, 12, 0, 0, 28672, 68, }, /* 530 */
- { 69, 21, 12, 0, 0, 28777, 106, }, /* 531 */
- { 69, 21, 12, 0, 0, 28777, 124, }, /* 532 */
- { 19, 21, 12, 0, 0, 28672, 106, }, /* 533 */
- { 19, 17, 12, 0, 0, 28672, 126, }, /* 534 */
- { 19, 21, 12, 0, 0, 28672, 124, }, /* 535 */
- { 19, 21, 12, 0, 0, 28672, 176, }, /* 536 */
- { 19, 12, 3, 0, 0, 26624, 178, }, /* 537 */
- { 19, 1, 2, 0, 0, 6144, 66, }, /* 538 */
- { 19, 13, 12, 0, 0, 18432, 138, }, /* 539 */
- { 19, 7, 12, 0, 0, 18432, 82, }, /* 540 */
- { 19, 6, 12, 0, 0, 18432, 136, }, /* 541 */
- { 19, 12, 3, 0, 0, 26624, 180, }, /* 542 */
- { 19, 12, 3, 0, 0, 26624, 130, }, /* 543 */
- { 29, 7, 12, 0, 0, 18432, 82, }, /* 544 */
- { 29, 12, 3, 0, 0, 26624, 130, }, /* 545 */
- { 29, 10, 5, 0, 0, 18432, 144, }, /* 546 */
- { 29, 12, 3, 0, 0, 26624, 96, }, /* 547 */
- { 29, 26, 12, 0, 0, 28672, 68, }, /* 548 */
- { 29, 21, 12, 0, 0, 28672, 124, }, /* 549 */
- { 29, 13, 12, 0, 0, 18432, 138, }, /* 550 */
- { 30, 7, 12, 0, 0, 18432, 82, }, /* 551 */
- { 89, 7, 12, 0, 0, 18432, 82, }, /* 552 */
- { 89, 7, 12, 0, 0, 18432, 156, }, /* 553 */
- { 89, 13, 12, 0, 0, 18432, 138, }, /* 554 */
- { 89, 15, 12, 0, 0, 18432, 138, }, /* 555 */
- { 89, 26, 12, 0, 0, 28672, 68, }, /* 556 */
- { 80, 26, 12, 0, 0, 28672, 68, }, /* 557 */
- { 33, 7, 12, 0, 0, 18432, 82, }, /* 558 */
- { 33, 12, 3, 0, 0, 26624, 130, }, /* 559 */
- { 33, 10, 5, 0, 0, 18432, 144, }, /* 560 */
- { 33, 21, 12, 0, 0, 18432, 68, }, /* 561 */
- { 106, 7, 12, 0, 0, 18432, 82, }, /* 562 */
- { 106, 10, 5, 0, 0, 18432, 144, }, /* 563 */
- { 106, 12, 3, 0, 0, 26624, 130, }, /* 564 */
- { 106, 12, 3, 0, 0, 26624, 182, }, /* 565 */
- { 106, 10, 12, 0, 0, 18432, 144, }, /* 566 */
- { 106, 12, 3, 0, 0, 26624, 96, }, /* 567 */
- { 106, 13, 12, 0, 0, 18432, 138, }, /* 568 */
- { 106, 21, 12, 0, 0, 18432, 68, }, /* 569 */
- { 106, 6, 12, 0, 0, 18432, 136, }, /* 570 */
- { 106, 21, 12, 0, 0, 18432, 124, }, /* 571 */
- { 84, 11, 3, 0, 0, 26624, 184, }, /* 572 */
- { 84, 12, 3, 0, 0, 26624, 130, }, /* 573 */
- { 93, 12, 3, 0, 0, 26624, 130, }, /* 574 */
- { 93, 10, 5, 0, 0, 18432, 144, }, /* 575 */
- { 93, 7, 12, 0, 0, 18432, 82, }, /* 576 */
- { 93, 12, 3, 0, 0, 26624, 96, }, /* 577 */
- { 93, 10, 3, 0, 0, 18432, 148, }, /* 578 */
- { 93, 10, 5, 0, 0, 18432, 172, }, /* 579 */
- { 93, 13, 12, 0, 0, 18432, 138, }, /* 580 */
- { 93, 21, 12, 0, 0, 18432, 124, }, /* 581 */
- { 93, 21, 12, 0, 0, 18432, 68, }, /* 582 */
- { 93, 21, 12, 0, 0, 18432, 106, }, /* 583 */
- { 93, 26, 12, 0, 0, 18432, 68, }, /* 584 */
- { 96, 12, 3, 0, 0, 26624, 130, }, /* 585 */
- { 96, 10, 5, 0, 0, 18432, 144, }, /* 586 */
- { 96, 7, 12, 0, 0, 18432, 82, }, /* 587 */
- { 96, 10, 5, 0, 0, 18432, 172, }, /* 588 */
- { 96, 12, 3, 0, 0, 26624, 146, }, /* 589 */
- { 96, 13, 12, 0, 0, 18432, 138, }, /* 590 */
- { 119, 7, 12, 0, 0, 18432, 82, }, /* 591 */
- { 119, 12, 3, 0, 0, 26624, 102, }, /* 592 */
- { 119, 10, 5, 0, 0, 18432, 144, }, /* 593 */
- { 119, 12, 3, 0, 0, 26624, 130, }, /* 594 */
- { 119, 10, 5, 0, 0, 18432, 174, }, /* 595 */
- { 119, 21, 12, 0, 0, 18432, 68, }, /* 596 */
- { 97, 7, 12, 0, 0, 18432, 82, }, /* 597 */
- { 97, 10, 5, 0, 0, 18432, 144, }, /* 598 */
- { 97, 12, 3, 0, 0, 26624, 130, }, /* 599 */
- { 97, 12, 3, 0, 0, 26624, 186, }, /* 600 */
- { 97, 12, 3, 0, 0, 26624, 96, }, /* 601 */
- { 97, 21, 12, 0, 0, 18432, 124, }, /* 602 */
- { 97, 21, 12, 0, 0, 18432, 106, }, /* 603 */
- { 97, 13, 12, 0, 0, 18432, 138, }, /* 604 */
- { 98, 13, 12, 0, 0, 18432, 138, }, /* 605 */
- { 98, 7, 12, 0, 0, 18432, 82, }, /* 606 */
- { 98, 6, 12, 0, 0, 18432, 92, }, /* 607 */
- { 98, 6, 12, 0, 0, 18432, 94, }, /* 608 */
- { 98, 21, 12, 0, 0, 18432, 124, }, /* 609 */
- { 2, 5, 12, 63, -6222, 18432, 70, }, /* 610 */
- { 2, 5, 12, 67, -6221, 18432, 70, }, /* 611 */
- { 2, 5, 12, 71, -6212, 18432, 70, }, /* 612 */
- { 2, 5, 12, 75, -6210, 18432, 70, }, /* 613 */
- { 2, 5, 12, 79, -6210, 18432, 70, }, /* 614 */
- { 2, 5, 12, 79, -6211, 18432, 70, }, /* 615 */
- { 2, 5, 12, 84, -6204, 18432, 70, }, /* 616 */
- { 2, 5, 12, 88, -6180, 18432, 70, }, /* 617 */
- { 2, 5, 12, 108, 35267, 18432, 70, }, /* 618 */
- { 17, 9, 12, 0, -3008, 18432, 74, }, /* 619 */
- { 96, 21, 12, 0, 0, 18432, 68, }, /* 620 */
- { 84, 12, 3, 0, 0, 26762, 96, }, /* 621 */
- { 84, 12, 3, 0, 0, 26630, 96, }, /* 622 */
- { 69, 21, 12, 0, 0, 18498, 188, }, /* 623 */
- { 84, 12, 3, 0, 0, 26666, 96, }, /* 624 */
- { 84, 12, 3, 0, 0, 26696, 96, }, /* 625 */
- { 84, 12, 3, 0, 0, 26780, 96, }, /* 626 */
- { 69, 10, 5, 0, 0, 18474, 160, }, /* 627 */
- { 69, 7, 12, 0, 0, 18501, 82, }, /* 628 */
- { 69, 7, 12, 0, 0, 18474, 82, }, /* 629 */
- { 69, 7, 12, 0, 0, 18438, 82, }, /* 630 */
- { 69, 7, 12, 0, 0, 18594, 82, }, /* 631 */
- { 69, 7, 12, 0, 0, 18498, 82, }, /* 632 */
- { 84, 12, 3, 0, 0, 26750, 96, }, /* 633 */
- { 69, 10, 5, 0, 0, 18435, 160, }, /* 634 */
- { 84, 12, 3, 0, 0, 26690, 96, }, /* 635 */
- { 69, 7, 12, 0, 0, 18453, 82, }, /* 636 */
- { 2, 5, 12, 0, 0, 18432, 60, }, /* 637 */
- { 1, 6, 12, 0, 0, 18432, 88, }, /* 638 */
- { 2, 6, 12, 0, 0, 18432, 190, }, /* 639 */
- { 0, 5, 12, 0, 35332, 18432, 76, }, /* 640 */
- { 0, 5, 12, 0, 3814, 18432, 76, }, /* 641 */
- { 0, 5, 12, 0, 35384, 18432, 76, }, /* 642 */
- { 0, 5, 12, 0, 0, 18432, 192, }, /* 643 */
- { 0, 6, 12, 0, 0, 18432, 190, }, /* 644 */
- { 0, 6, 12, 0, 0, 18432, 194, }, /* 645 */
- { 1, 6, 12, 0, 0, 18432, 190, }, /* 646 */
- { 84, 12, 3, 0, 0, 26636, 102, }, /* 647 */
- { 84, 12, 3, 0, 0, 26687, 96, }, /* 648 */
- { 84, 12, 3, 0, 0, 26648, 96, }, /* 649 */
- { 0, 9, 12, 92, 1, 18432, 74, }, /* 650 */
- { 0, 5, 12, 92, -1, 18432, 76, }, /* 651 */
- { 0, 5, 12, 0, 0, 18432, 70, }, /* 652 */
- { 0, 5, 12, 92, -58, 18432, 70, }, /* 653 */
- { 0, 9, 12, 0, -7615, 18432, 74, }, /* 654 */
- { 1, 5, 12, 0, 8, 18432, 76, }, /* 655 */
- { 1, 9, 12, 0, -8, 18432, 74, }, /* 656 */
- { 1, 5, 12, 0, 74, 18432, 76, }, /* 657 */
- { 1, 5, 12, 0, 86, 18432, 76, }, /* 658 */
- { 1, 5, 12, 0, 100, 18432, 76, }, /* 659 */
- { 1, 5, 12, 0, 128, 18432, 76, }, /* 660 */
- { 1, 5, 12, 0, 112, 18432, 76, }, /* 661 */
- { 1, 5, 12, 0, 126, 18432, 76, }, /* 662 */
- { 1, 5, 12, 0, 8, 18432, 70, }, /* 663 */
- { 1, 8, 12, 0, -8, 18432, 86, }, /* 664 */
- { 1, 5, 12, 0, 0, 18432, 70, }, /* 665 */
- { 1, 5, 12, 0, 9, 18432, 70, }, /* 666 */
- { 1, 9, 12, 0, -74, 18432, 74, }, /* 667 */
- { 1, 8, 12, 0, -9, 18432, 86, }, /* 668 */
- { 1, 5, 12, 21, -7173, 18432, 76, }, /* 669 */
- { 1, 9, 12, 0, -86, 18432, 74, }, /* 670 */
- { 1, 9, 12, 0, -100, 18432, 74, }, /* 671 */
- { 1, 9, 12, 0, -112, 18432, 74, }, /* 672 */
- { 1, 9, 12, 0, -128, 18432, 74, }, /* 673 */
- { 1, 9, 12, 0, -126, 18432, 74, }, /* 674 */
- { 69, 29, 12, 0, 0, 45056, 52, }, /* 675 */
- { 84, 1, 3, 0, 0, 6144, 196, }, /* 676 */
- { 84, 1, 13, 0, 0, 6144, 198, }, /* 677 */
- { 69, 1, 2, 0, 0, 18432, 200, }, /* 678 */
- { 69, 1, 2, 0, 0, 34816, 200, }, /* 679 */
- { 69, 17, 12, 0, 0, 28672, 202, }, /* 680 */
- { 69, 21, 12, 0, 0, 28672, 64, }, /* 681 */
- { 69, 20, 12, 0, 0, 28672, 204, }, /* 682 */
- { 69, 19, 12, 0, 0, 28672, 204, }, /* 683 */
- { 69, 22, 12, 0, 0, 28672, 206, }, /* 684 */
- { 69, 20, 12, 0, 0, 28672, 206, }, /* 685 */
- { 69, 19, 12, 0, 0, 28672, 206, }, /* 686 */
- { 69, 21, 12, 0, 0, 28672, 208, }, /* 687 */
- { 69, 27, 2, 0, 0, 45056, 50, }, /* 688 */
- { 69, 28, 2, 0, 0, 4096, 50, }, /* 689 */
- { 69, 1, 2, 0, 0, 20480, 134, }, /* 690 */
- { 69, 1, 2, 0, 0, 36864, 134, }, /* 691 */
- { 69, 1, 2, 0, 0, 30720, 134, }, /* 692 */
- { 69, 1, 2, 0, 0, 24576, 134, }, /* 693 */
- { 69, 1, 2, 0, 0, 40960, 134, }, /* 694 */
- { 69, 29, 12, 0, 0, 8291, 52, }, /* 695 */
- { 69, 21, 12, 0, 0, 14336, 54, }, /* 696 */
- { 69, 21, 12, 0, 0, 14336, 64, }, /* 697 */
- { 69, 21, 14, 0, 0, 28672, 210, }, /* 698 */
- { 69, 21, 12, 0, 0, 28672, 212, }, /* 699 */
- { 69, 16, 12, 0, 0, 28672, 138, }, /* 700 */
- { 69, 16, 12, 0, 0, 28672, 214, }, /* 701 */
- { 69, 25, 12, 0, 0, 8192, 64, }, /* 702 */
- { 69, 22, 12, 0, 0, 28672, 216, }, /* 703 */
- { 69, 18, 12, 0, 0, 28672, 216, }, /* 704 */
- { 69, 21, 12, 0, 0, 28672, 202, }, /* 705 */
- { 69, 1, 2, 0, 0, 6144, 218, }, /* 706 */
- { 68, 2, 2, 0, 0, 6144, 220, }, /* 707 */
- { 69, 1, 2, 0, 0, 22528, 134, }, /* 708 */
- { 69, 1, 2, 0, 0, 38912, 134, }, /* 709 */
- { 69, 1, 2, 0, 0, 16384, 134, }, /* 710 */
- { 69, 1, 2, 0, 0, 32768, 134, }, /* 711 */
- { 69, 1, 2, 0, 0, 6144, 222, }, /* 712 */
- { 69, 25, 12, 0, 0, 12288, 118, }, /* 713 */
- { 69, 25, 12, 0, 0, 12288, 224, }, /* 714 */
- { 69, 25, 12, 0, 0, 28672, 118, }, /* 715 */
- { 69, 22, 12, 0, 0, 28672, 226, }, /* 716 */
- { 69, 18, 12, 0, 0, 28672, 226, }, /* 717 */
- { 68, 2, 12, 0, 0, 14336, 0, }, /* 718 */
- { 84, 12, 3, 0, 0, 26624, 228, }, /* 719 */
- { 84, 11, 3, 0, 0, 26624, 120, }, /* 720 */
- { 84, 11, 3, 0, 0, 26624, 230, }, /* 721 */
- { 84, 12, 3, 0, 0, 26753, 102, }, /* 722 */
- { 69, 26, 12, 0, 0, 28672, 68, }, /* 723 */
- { 69, 9, 12, 0, 0, 18432, 112, }, /* 724 */
- { 69, 5, 12, 0, 0, 18432, 232, }, /* 725 */
- { 69, 25, 12, 0, 0, 28672, 234, }, /* 726 */
- { 69, 26, 14, 0, 0, 28672, 236, }, /* 727 */
- { 1, 9, 12, 96, -7517, 18432, 74, }, /* 728 */
- { 69, 26, 12, 0, 0, 28672, 118, }, /* 729 */
- { 0, 9, 12, 100, -8383, 18432, 74, }, /* 730 */
- { 0, 9, 12, 104, -8262, 18432, 74, }, /* 731 */
- { 69, 26, 12, 0, 0, 14336, 238, }, /* 732 */
- { 0, 9, 12, 0, 28, 18432, 74, }, /* 733 */
- { 69, 7, 12, 0, 0, 18432, 240, }, /* 734 */
- { 69, 5, 14, 0, 0, 18432, 242, }, /* 735 */
- { 69, 5, 12, 0, 0, 18432, 244, }, /* 736 */
- { 0, 5, 12, 0, -28, 18432, 76, }, /* 737 */
- { 0, 14, 12, 0, 16, 18432, 74, }, /* 738 */
- { 0, 14, 12, 0, -16, 18432, 76, }, /* 739 */
- { 0, 14, 12, 0, 0, 18432, 82, }, /* 740 */
- { 69, 25, 14, 0, 0, 28672, 246, }, /* 741 */
- { 69, 26, 14, 0, 0, 28672, 246, }, /* 742 */
- { 69, 26, 12, 0, 0, 28672, 64, }, /* 743 */
- { 69, 25, 12, 0, 0, 28672, 248, }, /* 744 */
- { 69, 25, 12, 0, 0, 12288, 250, }, /* 745 */
- { 69, 22, 12, 0, 0, 28672, 248, }, /* 746 */
- { 69, 18, 12, 0, 0, 28672, 248, }, /* 747 */
- { 69, 26, 14, 0, 0, 28672, 252, }, /* 748 */
- { 69, 22, 12, 0, 0, 28672, 254, }, /* 749 */
- { 69, 18, 12, 0, 0, 28672, 254, }, /* 750 */
- { 69, 26, 12, 0, 0, 18432, 54, }, /* 751 */
- { 69, 26, 14, 0, 0, 28672, 256, }, /* 752 */
- { 68, 2, 12, 0, 0, 18432, 258, }, /* 753 */
- { 69, 26, 12, 0, 26, 18432, 260, }, /* 754 */
- { 69, 26, 14, 0, 26, 18432, 262, }, /* 755 */
- { 69, 26, 12, 0, -26, 18432, 264, }, /* 756 */
- { 69, 25, 14, 0, 0, 28672, 266, }, /* 757 */
- { 69, 26, 14, 0, 0, 28672, 268, }, /* 758 */
- { 69, 26, 14, 0, 0, 28672, 270, }, /* 759 */
- { 69, 25, 14, 0, 0, 28672, 268, }, /* 760 */
- { 69, 26, 14, 0, 0, 18432, 256, }, /* 761 */
- { 69, 26, 14, 0, 0, 28672, 272, }, /* 762 */
- { 88, 26, 12, 0, 0, 18432, 54, }, /* 763 */
- { 69, 26, 12, 0, 0, 28672, 216, }, /* 764 */
- { 35, 9, 12, 0, 48, 18432, 74, }, /* 765 */
- { 35, 5, 12, 0, -48, 18432, 76, }, /* 766 */
- { 0, 9, 12, 0, -10743, 18432, 74, }, /* 767 */
- { 0, 9, 12, 0, -3814, 18432, 74, }, /* 768 */
- { 0, 9, 12, 0, -10727, 18432, 74, }, /* 769 */
- { 0, 5, 12, 0, -10795, 18432, 76, }, /* 770 */
- { 0, 5, 12, 0, -10792, 18432, 76, }, /* 771 */
- { 0, 9, 12, 0, -10780, 18432, 74, }, /* 772 */
- { 0, 9, 12, 0, -10749, 18432, 74, }, /* 773 */
- { 0, 9, 12, 0, -10783, 18432, 74, }, /* 774 */
- { 0, 9, 12, 0, -10782, 18432, 74, }, /* 775 */
- { 0, 9, 12, 0, -10815, 18432, 74, }, /* 776 */
- { 34, 5, 12, 0, 0, 18432, 60, }, /* 777 */
- { 34, 26, 12, 0, 0, 28672, 68, }, /* 778 */
- { 34, 12, 3, 0, 0, 26624, 96, }, /* 779 */
- { 34, 21, 12, 0, 0, 28672, 68, }, /* 780 */
- { 34, 15, 12, 0, 0, 28672, 68, }, /* 781 */
- { 17, 5, 12, 0, -7264, 18432, 76, }, /* 782 */
- { 90, 7, 12, 0, 0, 18432, 82, }, /* 783 */
- { 90, 6, 12, 0, 0, 18432, 142, }, /* 784 */
- { 90, 21, 12, 0, 0, 18432, 68, }, /* 785 */
- { 90, 12, 3, 0, 0, 26624, 182, }, /* 786 */
- { 2, 12, 3, 0, 0, 26624, 130, }, /* 787 */
- { 69, 20, 12, 0, 0, 28672, 216, }, /* 788 */
- { 69, 19, 12, 0, 0, 28672, 216, }, /* 789 */
- { 69, 6, 12, 0, 0, 28672, 274, }, /* 790 */
- { 69, 21, 12, 0, 0, 28672, 276, }, /* 791 */
- { 69, 21, 12, 0, 0, 28726, 54, }, /* 792 */
- { 23, 26, 12, 0, 0, 28672, 278, }, /* 793 */
- { 69, 26, 12, 0, 0, 28672, 280, }, /* 794 */
- { 69, 26, 12, 0, 0, 28672, 282, }, /* 795 */
- { 69, 21, 12, 0, 0, 28825, 276, }, /* 796 */
- { 69, 21, 12, 0, 0, 28825, 212, }, /* 797 */
- { 69, 21, 12, 0, 0, 28819, 54, }, /* 798 */
- { 23, 6, 12, 0, 0, 18432, 136, }, /* 799 */
- { 69, 7, 12, 0, 0, 18447, 284, }, /* 800 */
- { 23, 14, 12, 0, 0, 18432, 284, }, /* 801 */
- { 69, 22, 12, 0, 0, 28825, 216, }, /* 802 */
- { 69, 18, 12, 0, 0, 28825, 216, }, /* 803 */
- { 69, 22, 12, 0, 0, 28825, 62, }, /* 804 */
- { 69, 18, 12, 0, 0, 28825, 62, }, /* 805 */
- { 69, 26, 12, 0, 0, 28819, 54, }, /* 806 */
- { 69, 17, 12, 0, 0, 28819, 202, }, /* 807 */
- { 69, 22, 12, 0, 0, 28819, 206, }, /* 808 */
- { 69, 18, 12, 0, 0, 28819, 206, }, /* 809 */
- { 84, 12, 3, 0, 0, 26669, 96, }, /* 810 */
- { 18, 10, 3, 0, 0, 18432, 286, }, /* 811 */
- { 69, 17, 14, 0, 0, 28819, 288, }, /* 812 */
- { 69, 6, 12, 0, 0, 18525, 136, }, /* 813 */
- { 69, 26, 12, 0, 0, 28819, 68, }, /* 814 */
- { 23, 6, 12, 0, 0, 18432, 142, }, /* 815 */
- { 69, 7, 12, 0, 0, 18564, 82, }, /* 816 */
- { 69, 21, 14, 0, 0, 28804, 236, }, /* 817 */
- { 69, 26, 12, 0, 0, 28687, 68, }, /* 818 */
- { 20, 7, 12, 0, 0, 18432, 82, }, /* 819 */
- { 84, 12, 3, 0, 0, 26717, 96, }, /* 820 */
- { 69, 24, 12, 0, 0, 28765, 290, }, /* 821 */
- { 20, 6, 12, 0, 0, 18432, 136, }, /* 822 */
- { 69, 17, 12, 0, 0, 28765, 126, }, /* 823 */
- { 21, 7, 12, 0, 0, 18432, 82, }, /* 824 */
- { 69, 21, 12, 0, 0, 28825, 68, }, /* 825 */
- { 69, 6, 12, 0, 0, 18525, 94, }, /* 826 */
- { 21, 6, 12, 0, 0, 18432, 136, }, /* 827 */
- { 22, 7, 12, 0, 0, 18432, 82, }, /* 828 */
- { 18, 7, 12, 0, 0, 18432, 82, }, /* 829 */
- { 18, 7, 12, 0, 0, 18432, 168, }, /* 830 */
- { 69, 26, 12, 0, 0, 18447, 68, }, /* 831 */
- { 69, 15, 12, 0, 0, 18447, 68, }, /* 832 */
- { 18, 26, 12, 0, 0, 18432, 68, }, /* 833 */
- { 18, 26, 12, 0, 0, 28672, 68, }, /* 834 */
- { 69, 15, 12, 0, 0, 18432, 68, }, /* 835 */
- { 69, 26, 14, 0, 0, 18447, 236, }, /* 836 */
- { 21, 26, 12, 0, 0, 18432, 68, }, /* 837 */
- { 23, 7, 12, 0, 0, 18432, 292, }, /* 838 */
- { 24, 7, 12, 0, 0, 18432, 82, }, /* 839 */
- { 24, 6, 12, 0, 0, 18432, 136, }, /* 840 */
- { 24, 26, 12, 0, 0, 28672, 68, }, /* 841 */
- { 111, 7, 12, 0, 0, 18432, 82, }, /* 842 */
- { 111, 6, 12, 0, 0, 18432, 142, }, /* 843 */
- { 111, 21, 12, 0, 0, 18432, 106, }, /* 844 */
- { 111, 21, 12, 0, 0, 18432, 124, }, /* 845 */
- { 99, 7, 12, 0, 0, 18432, 82, }, /* 846 */
- { 99, 6, 12, 0, 0, 18432, 136, }, /* 847 */
- { 99, 21, 12, 0, 0, 28672, 106, }, /* 848 */
- { 99, 21, 12, 0, 0, 28672, 124, }, /* 849 */
- { 99, 13, 12, 0, 0, 18432, 138, }, /* 850 */
- { 2, 9, 12, 108, 1, 18432, 74, }, /* 851 */
- { 2, 5, 12, 108, -35267, 18432, 76, }, /* 852 */
- { 2, 7, 12, 0, 0, 18432, 82, }, /* 853 */
- { 2, 21, 12, 0, 0, 28672, 68, }, /* 854 */
- { 2, 12, 3, 0, 0, 26624, 96, }, /* 855 */
- { 2, 6, 12, 0, 0, 28672, 92, }, /* 856 */
- { 2, 6, 12, 0, 0, 18432, 88, }, /* 857 */
- { 112, 7, 12, 0, 0, 18432, 82, }, /* 858 */
- { 112, 14, 12, 0, 0, 18432, 82, }, /* 859 */
- { 112, 12, 3, 0, 0, 26624, 96, }, /* 860 */
- { 112, 21, 12, 0, 0, 18432, 68, }, /* 861 */
- { 112, 21, 12, 0, 0, 18432, 124, }, /* 862 */
- { 112, 21, 12, 0, 0, 18432, 106, }, /* 863 */
- { 69, 24, 12, 0, 0, 28762, 56, }, /* 864 */
- { 0, 9, 12, 0, -35332, 18432, 74, }, /* 865 */
- { 69, 24, 12, 0, 0, 18432, 56, }, /* 866 */
- { 0, 9, 12, 0, -42280, 18432, 74, }, /* 867 */
- { 0, 5, 12, 0, 48, 18432, 76, }, /* 868 */
- { 0, 9, 12, 0, -42308, 18432, 74, }, /* 869 */
- { 0, 9, 12, 0, -42319, 18432, 74, }, /* 870 */
- { 0, 9, 12, 0, -42315, 18432, 74, }, /* 871 */
- { 0, 9, 12, 0, -42305, 18432, 74, }, /* 872 */
- { 0, 9, 12, 0, -42258, 18432, 74, }, /* 873 */
- { 0, 9, 12, 0, -42282, 18432, 74, }, /* 874 */
- { 0, 9, 12, 0, -42261, 18432, 74, }, /* 875 */
- { 0, 9, 12, 0, 928, 18432, 74, }, /* 876 */
- { 0, 9, 12, 0, -48, 18432, 74, }, /* 877 */
- { 0, 9, 12, 0, -42307, 18432, 74, }, /* 878 */
- { 0, 9, 12, 0, -35384, 18432, 74, }, /* 879 */
- { 0, 6, 12, 0, 0, 18432, 142, }, /* 880 */
+ { 74, 12, 3, 0, 0, 26624, 128, }, /* 459 */
+ { 74, 12, 3, 0, 0, 26624, 146, }, /* 460 */
+ { 69, 26, 12, 0, 0, 18432, 68, }, /* 461 */
+ { 16, 7, 12, 0, 0, 18432, 82, }, /* 462 */
+ { 16, 10, 12, 0, 0, 18432, 144, }, /* 463 */
+ { 16, 12, 3, 0, 0, 26624, 130, }, /* 464 */
+ { 16, 10, 5, 0, 0, 18432, 144, }, /* 465 */
+ { 16, 12, 3, 0, 0, 26624, 96, }, /* 466 */
+ { 16, 12, 3, 0, 0, 26624, 146, }, /* 467 */
+ { 16, 13, 12, 0, 0, 18549, 138, }, /* 468 */
+ { 16, 21, 12, 0, 0, 18432, 124, }, /* 469 */
+ { 16, 21, 12, 0, 0, 18432, 68, }, /* 470 */
+ { 16, 10, 12, 0, 0, 18432, 164, }, /* 471 */
+ { 16, 12, 3, 0, 0, 26624, 128, }, /* 472 */
+ { 16, 13, 12, 0, 0, 18432, 138, }, /* 473 */
+ { 16, 26, 12, 0, 0, 18432, 68, }, /* 474 */
+ { 17, 9, 12, 0, 7264, 18432, 74, }, /* 475 */
+ { 17, 5, 12, 0, 3008, 18432, 166, }, /* 476 */
+ { 69, 21, 12, 0, 0, 18510, 68, }, /* 477 */
+ { 17, 6, 12, 0, 0, 18432, 168, }, /* 478 */
+ { 18, 7, 6, 0, 0, 18432, 82, }, /* 479 */
+ { 18, 7, 6, 0, 0, 18432, 170, }, /* 480 */
+ { 18, 7, 7, 0, 0, 18432, 170, }, /* 481 */
+ { 18, 7, 7, 0, 0, 18432, 82, }, /* 482 */
+ { 18, 7, 8, 0, 0, 18432, 82, }, /* 483 */
+ { 75, 7, 12, 0, 0, 18432, 82, }, /* 484 */
+ { 75, 12, 3, 0, 0, 26624, 96, }, /* 485 */
+ { 75, 21, 12, 0, 0, 18432, 68, }, /* 486 */
+ { 75, 21, 12, 0, 0, 18432, 106, }, /* 487 */
+ { 75, 21, 12, 0, 0, 18432, 124, }, /* 488 */
+ { 75, 15, 12, 0, 0, 18432, 138, }, /* 489 */
+ { 75, 15, 12, 0, 0, 18432, 68, }, /* 490 */
+ { 75, 26, 12, 0, 0, 28672, 68, }, /* 491 */
+ { 76, 9, 12, 0, 38864, 18432, 172, }, /* 492 */
+ { 76, 9, 12, 0, 8, 18432, 172, }, /* 493 */
+ { 76, 5, 12, 0, -8, 18432, 70, }, /* 494 */
+ { 77, 17, 12, 0, 0, 28672, 126, }, /* 495 */
+ { 77, 7, 12, 0, 0, 18432, 82, }, /* 496 */
+ { 77, 26, 12, 0, 0, 18432, 68, }, /* 497 */
+ { 77, 21, 12, 0, 0, 18432, 124, }, /* 498 */
+ { 78, 29, 12, 0, 0, 45056, 52, }, /* 499 */
+ { 78, 7, 12, 0, 0, 18432, 82, }, /* 500 */
+ { 78, 22, 12, 0, 0, 28672, 158, }, /* 501 */
+ { 78, 18, 12, 0, 0, 28672, 158, }, /* 502 */
+ { 79, 7, 12, 0, 0, 18432, 82, }, /* 503 */
+ { 69, 21, 12, 0, 0, 18432, 106, }, /* 504 */
+ { 79, 14, 12, 0, 0, 18432, 82, }, /* 505 */
+ { 25, 7, 12, 0, 0, 18432, 82, }, /* 506 */
+ { 25, 12, 3, 0, 0, 26624, 130, }, /* 507 */
+ { 25, 12, 3, 0, 0, 26624, 146, }, /* 508 */
+ { 25, 10, 5, 0, 0, 18432, 174, }, /* 509 */
+ { 26, 7, 12, 0, 0, 18432, 82, }, /* 510 */
+ { 26, 12, 3, 0, 0, 26624, 130, }, /* 511 */
+ { 26, 10, 5, 0, 0, 18432, 176, }, /* 512 */
+ { 69, 21, 12, 0, 0, 18573, 124, }, /* 513 */
+ { 27, 7, 12, 0, 0, 18432, 82, }, /* 514 */
+ { 27, 12, 3, 0, 0, 26624, 130, }, /* 515 */
+ { 28, 7, 12, 0, 0, 18432, 82, }, /* 516 */
+ { 28, 12, 3, 0, 0, 26624, 130, }, /* 517 */
+ { 80, 7, 12, 0, 0, 18432, 82, }, /* 518 */
+ { 80, 7, 12, 0, 0, 18432, 140, }, /* 519 */
+ { 80, 12, 3, 0, 0, 26624, 100, }, /* 520 */
+ { 80, 10, 5, 0, 0, 18432, 144, }, /* 521 */
+ { 80, 12, 3, 0, 0, 26624, 130, }, /* 522 */
+ { 80, 12, 3, 0, 0, 26624, 96, }, /* 523 */
+ { 80, 12, 3, 0, 0, 26624, 146, }, /* 524 */
+ { 80, 21, 12, 0, 0, 18432, 106, }, /* 525 */
+ { 80, 6, 12, 0, 0, 18432, 142, }, /* 526 */
+ { 80, 21, 12, 0, 0, 18432, 68, }, /* 527 */
+ { 80, 23, 12, 0, 0, 14336, 68, }, /* 528 */
+ { 80, 13, 12, 0, 0, 18432, 138, }, /* 529 */
+ { 80, 15, 12, 0, 0, 28672, 68, }, /* 530 */
+ { 19, 21, 12, 0, 0, 28672, 68, }, /* 531 */
+ { 69, 21, 12, 0, 0, 28777, 106, }, /* 532 */
+ { 69, 21, 12, 0, 0, 28777, 124, }, /* 533 */
+ { 19, 21, 12, 0, 0, 28672, 106, }, /* 534 */
+ { 19, 17, 12, 0, 0, 28672, 126, }, /* 535 */
+ { 19, 21, 12, 0, 0, 28672, 124, }, /* 536 */
+ { 19, 21, 12, 0, 0, 28672, 178, }, /* 537 */
+ { 19, 12, 3, 0, 0, 26624, 180, }, /* 538 */
+ { 19, 1, 2, 0, 0, 6144, 66, }, /* 539 */
+ { 19, 13, 12, 0, 0, 18432, 138, }, /* 540 */
+ { 19, 7, 12, 0, 0, 18432, 82, }, /* 541 */
+ { 19, 6, 12, 0, 0, 18432, 136, }, /* 542 */
+ { 19, 12, 3, 0, 0, 26624, 182, }, /* 543 */
+ { 19, 12, 3, 0, 0, 26624, 130, }, /* 544 */
+ { 29, 7, 12, 0, 0, 18432, 82, }, /* 545 */
+ { 29, 12, 3, 0, 0, 26624, 130, }, /* 546 */
+ { 29, 10, 5, 0, 0, 18432, 144, }, /* 547 */
+ { 29, 12, 3, 0, 0, 26624, 96, }, /* 548 */
+ { 29, 26, 12, 0, 0, 28672, 68, }, /* 549 */
+ { 29, 21, 12, 0, 0, 28672, 124, }, /* 550 */
+ { 29, 13, 12, 0, 0, 18432, 138, }, /* 551 */
+ { 30, 7, 12, 0, 0, 18432, 82, }, /* 552 */
+ { 89, 7, 12, 0, 0, 18432, 82, }, /* 553 */
+ { 89, 7, 12, 0, 0, 18432, 156, }, /* 554 */
+ { 89, 13, 12, 0, 0, 18432, 138, }, /* 555 */
+ { 89, 15, 12, 0, 0, 18432, 138, }, /* 556 */
+ { 89, 26, 12, 0, 0, 28672, 68, }, /* 557 */
+ { 80, 26, 12, 0, 0, 28672, 68, }, /* 558 */
+ { 33, 7, 12, 0, 0, 18432, 82, }, /* 559 */
+ { 33, 12, 3, 0, 0, 26624, 130, }, /* 560 */
+ { 33, 10, 5, 0, 0, 18432, 144, }, /* 561 */
+ { 33, 21, 12, 0, 0, 18432, 68, }, /* 562 */
+ { 106, 7, 12, 0, 0, 18432, 82, }, /* 563 */
+ { 106, 10, 5, 0, 0, 18432, 144, }, /* 564 */
+ { 106, 12, 3, 0, 0, 26624, 130, }, /* 565 */
+ { 106, 12, 3, 0, 0, 26624, 184, }, /* 566 */
+ { 106, 10, 12, 0, 0, 18432, 144, }, /* 567 */
+ { 106, 12, 3, 0, 0, 26624, 96, }, /* 568 */
+ { 106, 13, 12, 0, 0, 18432, 138, }, /* 569 */
+ { 106, 21, 12, 0, 0, 18432, 68, }, /* 570 */
+ { 106, 6, 12, 0, 0, 18432, 136, }, /* 571 */
+ { 106, 21, 12, 0, 0, 18432, 124, }, /* 572 */
+ { 84, 11, 3, 0, 0, 26624, 186, }, /* 573 */
+ { 84, 12, 3, 0, 0, 26624, 130, }, /* 574 */
+ { 93, 12, 3, 0, 0, 26624, 130, }, /* 575 */
+ { 93, 10, 5, 0, 0, 18432, 144, }, /* 576 */
+ { 93, 7, 12, 0, 0, 18432, 82, }, /* 577 */
+ { 93, 12, 3, 0, 0, 26624, 96, }, /* 578 */
+ { 93, 10, 3, 0, 0, 18432, 148, }, /* 579 */
+ { 93, 10, 5, 0, 0, 18432, 174, }, /* 580 */
+ { 93, 13, 12, 0, 0, 18432, 138, }, /* 581 */
+ { 93, 21, 12, 0, 0, 18432, 124, }, /* 582 */
+ { 93, 21, 12, 0, 0, 18432, 68, }, /* 583 */
+ { 93, 21, 12, 0, 0, 18432, 106, }, /* 584 */
+ { 93, 26, 12, 0, 0, 18432, 68, }, /* 585 */
+ { 96, 12, 3, 0, 0, 26624, 130, }, /* 586 */
+ { 96, 10, 5, 0, 0, 18432, 144, }, /* 587 */
+ { 96, 7, 12, 0, 0, 18432, 82, }, /* 588 */
+ { 96, 10, 5, 0, 0, 18432, 174, }, /* 589 */
+ { 96, 12, 3, 0, 0, 26624, 146, }, /* 590 */
+ { 96, 13, 12, 0, 0, 18432, 138, }, /* 591 */
+ { 119, 7, 12, 0, 0, 18432, 82, }, /* 592 */
+ { 119, 12, 3, 0, 0, 26624, 102, }, /* 593 */
+ { 119, 10, 5, 0, 0, 18432, 144, }, /* 594 */
+ { 119, 12, 3, 0, 0, 26624, 130, }, /* 595 */
+ { 119, 10, 5, 0, 0, 18432, 176, }, /* 596 */
+ { 119, 21, 12, 0, 0, 18432, 68, }, /* 597 */
+ { 97, 7, 12, 0, 0, 18432, 82, }, /* 598 */
+ { 97, 10, 5, 0, 0, 18432, 144, }, /* 599 */
+ { 97, 12, 3, 0, 0, 26624, 130, }, /* 600 */
+ { 97, 12, 3, 0, 0, 26624, 188, }, /* 601 */
+ { 97, 12, 3, 0, 0, 26624, 96, }, /* 602 */
+ { 97, 21, 12, 0, 0, 18432, 124, }, /* 603 */
+ { 97, 21, 12, 0, 0, 18432, 106, }, /* 604 */
+ { 97, 13, 12, 0, 0, 18432, 138, }, /* 605 */
+ { 98, 13, 12, 0, 0, 18432, 138, }, /* 606 */
+ { 98, 7, 12, 0, 0, 18432, 82, }, /* 607 */
+ { 98, 6, 12, 0, 0, 18432, 92, }, /* 608 */
+ { 98, 6, 12, 0, 0, 18432, 94, }, /* 609 */
+ { 98, 21, 12, 0, 0, 18432, 124, }, /* 610 */
+ { 2, 5, 12, 63, -6222, 18432, 70, }, /* 611 */
+ { 2, 5, 12, 67, -6221, 18432, 70, }, /* 612 */
+ { 2, 5, 12, 71, -6212, 18432, 70, }, /* 613 */
+ { 2, 5, 12, 75, -6210, 18432, 70, }, /* 614 */
+ { 2, 5, 12, 79, -6210, 18432, 70, }, /* 615 */
+ { 2, 5, 12, 79, -6211, 18432, 70, }, /* 616 */
+ { 2, 5, 12, 84, -6204, 18432, 70, }, /* 617 */
+ { 2, 5, 12, 88, -6180, 18432, 70, }, /* 618 */
+ { 2, 5, 12, 108, 35267, 18432, 70, }, /* 619 */
+ { 17, 9, 12, 0, -3008, 18432, 74, }, /* 620 */
+ { 96, 21, 12, 0, 0, 18432, 68, }, /* 621 */
+ { 84, 12, 3, 0, 0, 26762, 96, }, /* 622 */
+ { 84, 12, 3, 0, 0, 26630, 96, }, /* 623 */
+ { 69, 21, 12, 0, 0, 18498, 190, }, /* 624 */
+ { 84, 12, 3, 0, 0, 26666, 96, }, /* 625 */
+ { 84, 12, 3, 0, 0, 26696, 96, }, /* 626 */
+ { 84, 12, 3, 0, 0, 26780, 96, }, /* 627 */
+ { 69, 10, 5, 0, 0, 18474, 160, }, /* 628 */
+ { 69, 7, 12, 0, 0, 18501, 82, }, /* 629 */
+ { 69, 7, 12, 0, 0, 18474, 82, }, /* 630 */
+ { 69, 7, 12, 0, 0, 18438, 82, }, /* 631 */
+ { 69, 7, 12, 0, 0, 18594, 82, }, /* 632 */
+ { 69, 7, 12, 0, 0, 18498, 82, }, /* 633 */
+ { 84, 12, 3, 0, 0, 26750, 96, }, /* 634 */
+ { 69, 10, 5, 0, 0, 18435, 160, }, /* 635 */
+ { 84, 12, 3, 0, 0, 26690, 96, }, /* 636 */
+ { 69, 7, 12, 0, 0, 18453, 82, }, /* 637 */
+ { 2, 5, 12, 0, 0, 18432, 60, }, /* 638 */
+ { 1, 6, 12, 0, 0, 18432, 88, }, /* 639 */
+ { 2, 6, 12, 0, 0, 18432, 168, }, /* 640 */
+ { 0, 5, 12, 0, 35332, 18432, 76, }, /* 641 */
+ { 0, 5, 12, 0, 3814, 18432, 76, }, /* 642 */
+ { 0, 5, 12, 0, 35384, 18432, 76, }, /* 643 */
+ { 0, 5, 12, 0, 0, 18432, 192, }, /* 644 */
+ { 0, 6, 12, 0, 0, 18432, 168, }, /* 645 */
+ { 0, 6, 12, 0, 0, 18432, 194, }, /* 646 */
+ { 1, 6, 12, 0, 0, 18432, 168, }, /* 647 */
+ { 84, 12, 3, 0, 0, 26636, 102, }, /* 648 */
+ { 84, 12, 3, 0, 0, 26687, 96, }, /* 649 */
+ { 84, 12, 3, 0, 0, 26648, 96, }, /* 650 */
+ { 0, 9, 12, 92, 1, 18432, 74, }, /* 651 */
+ { 0, 5, 12, 92, -1, 18432, 76, }, /* 652 */
+ { 0, 5, 12, 0, 0, 18432, 70, }, /* 653 */
+ { 0, 5, 12, 92, -58, 18432, 70, }, /* 654 */
+ { 0, 9, 12, 0, -7615, 18432, 74, }, /* 655 */
+ { 1, 5, 12, 0, 8, 18432, 76, }, /* 656 */
+ { 1, 9, 12, 0, -8, 18432, 74, }, /* 657 */
+ { 1, 5, 12, 0, 74, 18432, 76, }, /* 658 */
+ { 1, 5, 12, 0, 86, 18432, 76, }, /* 659 */
+ { 1, 5, 12, 0, 100, 18432, 76, }, /* 660 */
+ { 1, 5, 12, 0, 128, 18432, 76, }, /* 661 */
+ { 1, 5, 12, 0, 112, 18432, 76, }, /* 662 */
+ { 1, 5, 12, 0, 126, 18432, 76, }, /* 663 */
+ { 1, 5, 12, 0, 8, 18432, 70, }, /* 664 */
+ { 1, 8, 12, 0, -8, 18432, 86, }, /* 665 */
+ { 1, 5, 12, 0, 0, 18432, 70, }, /* 666 */
+ { 1, 5, 12, 0, 9, 18432, 70, }, /* 667 */
+ { 1, 9, 12, 0, -74, 18432, 74, }, /* 668 */
+ { 1, 8, 12, 0, -9, 18432, 86, }, /* 669 */
+ { 1, 5, 12, 21, -7173, 18432, 76, }, /* 670 */
+ { 1, 9, 12, 0, -86, 18432, 74, }, /* 671 */
+ { 1, 9, 12, 0, -100, 18432, 74, }, /* 672 */
+ { 1, 9, 12, 0, -112, 18432, 74, }, /* 673 */
+ { 1, 9, 12, 0, -128, 18432, 74, }, /* 674 */
+ { 1, 9, 12, 0, -126, 18432, 74, }, /* 675 */
+ { 69, 29, 12, 0, 0, 45056, 52, }, /* 676 */
+ { 84, 1, 3, 0, 0, 6144, 196, }, /* 677 */
+ { 84, 1, 13, 0, 0, 6144, 198, }, /* 678 */
+ { 69, 1, 2, 0, 0, 18432, 200, }, /* 679 */
+ { 69, 1, 2, 0, 0, 34816, 200, }, /* 680 */
+ { 69, 17, 12, 0, 0, 28672, 202, }, /* 681 */
+ { 69, 21, 12, 0, 0, 28672, 64, }, /* 682 */
+ { 69, 20, 12, 0, 0, 28672, 204, }, /* 683 */
+ { 69, 19, 12, 0, 0, 28672, 204, }, /* 684 */
+ { 69, 22, 12, 0, 0, 28672, 206, }, /* 685 */
+ { 69, 20, 12, 0, 0, 28672, 206, }, /* 686 */
+ { 69, 19, 12, 0, 0, 28672, 206, }, /* 687 */
+ { 69, 21, 12, 0, 0, 28672, 208, }, /* 688 */
+ { 69, 27, 2, 0, 0, 45056, 50, }, /* 689 */
+ { 69, 28, 2, 0, 0, 4096, 50, }, /* 690 */
+ { 69, 1, 2, 0, 0, 20480, 134, }, /* 691 */
+ { 69, 1, 2, 0, 0, 36864, 134, }, /* 692 */
+ { 69, 1, 2, 0, 0, 30720, 134, }, /* 693 */
+ { 69, 1, 2, 0, 0, 24576, 134, }, /* 694 */
+ { 69, 1, 2, 0, 0, 40960, 134, }, /* 695 */
+ { 69, 29, 12, 0, 0, 8291, 52, }, /* 696 */
+ { 69, 21, 12, 0, 0, 14336, 54, }, /* 697 */
+ { 69, 21, 12, 0, 0, 14336, 64, }, /* 698 */
+ { 69, 21, 14, 0, 0, 28672, 210, }, /* 699 */
+ { 69, 21, 12, 0, 0, 28672, 212, }, /* 700 */
+ { 69, 16, 12, 0, 0, 28672, 138, }, /* 701 */
+ { 69, 16, 12, 0, 0, 28672, 214, }, /* 702 */
+ { 69, 25, 12, 0, 0, 8192, 64, }, /* 703 */
+ { 69, 22, 12, 0, 0, 28672, 216, }, /* 704 */
+ { 69, 18, 12, 0, 0, 28672, 216, }, /* 705 */
+ { 69, 21, 12, 0, 0, 28672, 202, }, /* 706 */
+ { 69, 1, 2, 0, 0, 6144, 218, }, /* 707 */
+ { 68, 2, 2, 0, 0, 6144, 220, }, /* 708 */
+ { 69, 1, 2, 0, 0, 22528, 134, }, /* 709 */
+ { 69, 1, 2, 0, 0, 38912, 134, }, /* 710 */
+ { 69, 1, 2, 0, 0, 16384, 134, }, /* 711 */
+ { 69, 1, 2, 0, 0, 32768, 134, }, /* 712 */
+ { 69, 1, 2, 0, 0, 6144, 222, }, /* 713 */
+ { 69, 25, 12, 0, 0, 12288, 118, }, /* 714 */
+ { 69, 25, 12, 0, 0, 12288, 224, }, /* 715 */
+ { 69, 25, 12, 0, 0, 28672, 118, }, /* 716 */
+ { 69, 22, 12, 0, 0, 28672, 226, }, /* 717 */
+ { 69, 18, 12, 0, 0, 28672, 226, }, /* 718 */
+ { 68, 2, 12, 0, 0, 14336, 0, }, /* 719 */
+ { 84, 12, 3, 0, 0, 26624, 228, }, /* 720 */
+ { 84, 11, 3, 0, 0, 26624, 120, }, /* 721 */
+ { 84, 11, 3, 0, 0, 26624, 230, }, /* 722 */
+ { 84, 12, 3, 0, 0, 26753, 102, }, /* 723 */
+ { 69, 26, 12, 0, 0, 28672, 68, }, /* 724 */
+ { 69, 9, 12, 0, 0, 18432, 112, }, /* 725 */
+ { 69, 5, 12, 0, 0, 18432, 232, }, /* 726 */
+ { 69, 25, 12, 0, 0, 28672, 234, }, /* 727 */
+ { 69, 26, 14, 0, 0, 28672, 236, }, /* 728 */
+ { 1, 9, 12, 96, -7517, 18432, 74, }, /* 729 */
+ { 69, 26, 12, 0, 0, 28672, 118, }, /* 730 */
+ { 0, 9, 12, 100, 0, 18432, 74, }, /* 731 */
+ { 0, 9, 12, 104, -8262, 18432, 74, }, /* 732 */
+ { 69, 26, 12, 0, 0, 14336, 238, }, /* 733 */
+ { 0, 9, 12, 0, 28, 18432, 74, }, /* 734 */
+ { 69, 7, 12, 0, 0, 18432, 240, }, /* 735 */
+ { 69, 5, 14, 0, 0, 18432, 242, }, /* 736 */
+ { 69, 5, 12, 0, 0, 18432, 244, }, /* 737 */
+ { 0, 5, 12, 0, -28, 18432, 76, }, /* 738 */
+ { 0, 14, 12, 0, 16, 18432, 74, }, /* 739 */
+ { 0, 14, 12, 0, -16, 18432, 76, }, /* 740 */
+ { 0, 14, 12, 0, 0, 18432, 82, }, /* 741 */
+ { 69, 25, 14, 0, 0, 28672, 246, }, /* 742 */
+ { 69, 26, 14, 0, 0, 28672, 246, }, /* 743 */
+ { 69, 26, 12, 0, 0, 28672, 64, }, /* 744 */
+ { 69, 25, 12, 0, 0, 28672, 248, }, /* 745 */
+ { 69, 25, 12, 0, 0, 12288, 250, }, /* 746 */
+ { 69, 22, 12, 0, 0, 28672, 248, }, /* 747 */
+ { 69, 18, 12, 0, 0, 28672, 248, }, /* 748 */
+ { 69, 26, 14, 0, 0, 28672, 252, }, /* 749 */
+ { 69, 22, 12, 0, 0, 28672, 254, }, /* 750 */
+ { 69, 18, 12, 0, 0, 28672, 254, }, /* 751 */
+ { 69, 26, 12, 0, 0, 18432, 54, }, /* 752 */
+ { 69, 26, 14, 0, 0, 28672, 256, }, /* 753 */
+ { 68, 2, 12, 0, 0, 18432, 258, }, /* 754 */
+ { 69, 26, 12, 0, 26, 18432, 260, }, /* 755 */
+ { 69, 26, 14, 0, 26, 18432, 262, }, /* 756 */
+ { 69, 26, 12, 0, -26, 18432, 264, }, /* 757 */
+ { 69, 25, 14, 0, 0, 28672, 266, }, /* 758 */
+ { 69, 26, 14, 0, 0, 28672, 268, }, /* 759 */
+ { 69, 26, 14, 0, 0, 28672, 270, }, /* 760 */
+ { 69, 25, 14, 0, 0, 28672, 268, }, /* 761 */
+ { 69, 26, 14, 0, 0, 18432, 256, }, /* 762 */
+ { 69, 26, 14, 0, 0, 28672, 272, }, /* 763 */
+ { 88, 26, 12, 0, 0, 18432, 54, }, /* 764 */
+ { 69, 26, 12, 0, 0, 28672, 216, }, /* 765 */
+ { 35, 9, 12, 0, 48, 18432, 74, }, /* 766 */
+ { 35, 5, 12, 0, -48, 18432, 76, }, /* 767 */
+ { 0, 9, 12, 0, -10743, 18432, 74, }, /* 768 */
+ { 0, 9, 12, 0, -3814, 18432, 74, }, /* 769 */
+ { 0, 9, 12, 0, -10727, 18432, 74, }, /* 770 */
+ { 0, 5, 12, 0, -10795, 18432, 76, }, /* 771 */
+ { 0, 5, 12, 0, -10792, 18432, 76, }, /* 772 */
+ { 0, 9, 12, 0, -10780, 18432, 74, }, /* 773 */
+ { 0, 9, 12, 0, -10749, 18432, 74, }, /* 774 */
+ { 0, 9, 12, 0, -10783, 18432, 74, }, /* 775 */
+ { 0, 9, 12, 0, -10782, 18432, 74, }, /* 776 */
+ { 0, 9, 12, 0, -10815, 18432, 74, }, /* 777 */
+ { 34, 5, 12, 0, 0, 18432, 60, }, /* 778 */
+ { 34, 26, 12, 0, 0, 28672, 68, }, /* 779 */
+ { 34, 12, 3, 0, 0, 26624, 96, }, /* 780 */
+ { 34, 21, 12, 0, 0, 28672, 68, }, /* 781 */
+ { 34, 15, 12, 0, 0, 28672, 68, }, /* 782 */
+ { 17, 5, 12, 0, -7264, 18432, 76, }, /* 783 */
+ { 90, 7, 12, 0, 0, 18432, 82, }, /* 784 */
+ { 90, 6, 12, 0, 0, 18432, 142, }, /* 785 */
+ { 90, 21, 12, 0, 0, 18432, 68, }, /* 786 */
+ { 90, 12, 3, 0, 0, 26624, 184, }, /* 787 */
+ { 2, 12, 3, 0, 0, 26624, 130, }, /* 788 */
+ { 69, 20, 12, 0, 0, 28672, 216, }, /* 789 */
+ { 69, 19, 12, 0, 0, 28672, 216, }, /* 790 */
+ { 69, 6, 12, 0, 0, 28672, 274, }, /* 791 */
+ { 69, 21, 12, 0, 0, 28672, 276, }, /* 792 */
+ { 69, 21, 12, 0, 0, 28726, 54, }, /* 793 */
+ { 23, 26, 12, 0, 0, 28672, 278, }, /* 794 */
+ { 69, 26, 12, 0, 0, 28672, 280, }, /* 795 */
+ { 69, 26, 12, 0, 0, 28672, 282, }, /* 796 */
+ { 69, 21, 12, 0, 0, 28825, 276, }, /* 797 */
+ { 69, 21, 12, 0, 0, 28825, 212, }, /* 798 */
+ { 69, 21, 12, 0, 0, 28819, 54, }, /* 799 */
+ { 23, 6, 12, 0, 0, 18432, 136, }, /* 800 */
+ { 69, 7, 12, 0, 0, 18447, 284, }, /* 801 */
+ { 23, 14, 12, 0, 0, 18432, 284, }, /* 802 */
+ { 69, 22, 12, 0, 0, 28825, 216, }, /* 803 */
+ { 69, 18, 12, 0, 0, 28825, 216, }, /* 804 */
+ { 69, 22, 12, 0, 0, 28825, 62, }, /* 805 */
+ { 69, 18, 12, 0, 0, 28825, 62, }, /* 806 */
+ { 69, 26, 12, 0, 0, 28819, 54, }, /* 807 */
+ { 69, 17, 12, 0, 0, 28819, 202, }, /* 808 */
+ { 69, 22, 12, 0, 0, 28819, 206, }, /* 809 */
+ { 69, 18, 12, 0, 0, 28819, 206, }, /* 810 */
+ { 84, 12, 3, 0, 0, 26669, 96, }, /* 811 */
+ { 18, 10, 3, 0, 0, 18432, 286, }, /* 812 */
+ { 69, 17, 14, 0, 0, 28819, 288, }, /* 813 */
+ { 69, 6, 12, 0, 0, 18525, 136, }, /* 814 */
+ { 69, 26, 12, 0, 0, 28819, 68, }, /* 815 */
+ { 23, 6, 12, 0, 0, 18432, 142, }, /* 816 */
+ { 69, 7, 12, 0, 0, 18564, 82, }, /* 817 */
+ { 69, 21, 14, 0, 0, 28804, 236, }, /* 818 */
+ { 69, 26, 12, 0, 0, 28687, 68, }, /* 819 */
+ { 20, 7, 12, 0, 0, 18432, 82, }, /* 820 */
+ { 84, 12, 3, 0, 0, 26717, 96, }, /* 821 */
+ { 69, 24, 12, 0, 0, 28765, 290, }, /* 822 */
+ { 20, 6, 12, 0, 0, 18432, 136, }, /* 823 */
+ { 69, 17, 12, 0, 0, 28765, 126, }, /* 824 */
+ { 21, 7, 12, 0, 0, 18432, 82, }, /* 825 */
+ { 69, 21, 12, 0, 0, 28825, 68, }, /* 826 */
+ { 69, 6, 12, 0, 0, 18525, 94, }, /* 827 */
+ { 21, 6, 12, 0, 0, 18432, 136, }, /* 828 */
+ { 22, 7, 12, 0, 0, 18432, 82, }, /* 829 */
+ { 18, 7, 12, 0, 0, 18432, 82, }, /* 830 */
+ { 18, 7, 12, 0, 0, 18432, 170, }, /* 831 */
+ { 69, 26, 12, 0, 0, 18447, 68, }, /* 832 */
+ { 69, 15, 12, 0, 0, 18447, 68, }, /* 833 */
+ { 18, 26, 12, 0, 0, 18432, 68, }, /* 834 */
+ { 18, 26, 12, 0, 0, 28672, 68, }, /* 835 */
+ { 69, 15, 12, 0, 0, 18432, 68, }, /* 836 */
+ { 69, 26, 14, 0, 0, 18447, 236, }, /* 837 */
+ { 21, 26, 12, 0, 0, 18432, 68, }, /* 838 */
+ { 23, 7, 12, 0, 0, 18432, 292, }, /* 839 */
+ { 24, 7, 12, 0, 0, 18432, 82, }, /* 840 */
+ { 24, 6, 12, 0, 0, 18432, 136, }, /* 841 */
+ { 24, 26, 12, 0, 0, 28672, 68, }, /* 842 */
+ { 111, 7, 12, 0, 0, 18432, 82, }, /* 843 */
+ { 111, 6, 12, 0, 0, 18432, 142, }, /* 844 */
+ { 111, 21, 12, 0, 0, 18432, 106, }, /* 845 */
+ { 111, 21, 12, 0, 0, 18432, 124, }, /* 846 */
+ { 99, 7, 12, 0, 0, 18432, 82, }, /* 847 */
+ { 99, 6, 12, 0, 0, 18432, 136, }, /* 848 */
+ { 99, 21, 12, 0, 0, 28672, 106, }, /* 849 */
+ { 99, 21, 12, 0, 0, 28672, 124, }, /* 850 */
+ { 99, 13, 12, 0, 0, 18432, 138, }, /* 851 */
+ { 2, 9, 12, 108, 1, 18432, 74, }, /* 852 */
+ { 2, 5, 12, 108, -35267, 18432, 76, }, /* 853 */
+ { 2, 7, 12, 0, 0, 18432, 82, }, /* 854 */
+ { 2, 21, 12, 0, 0, 28672, 68, }, /* 855 */
+ { 2, 12, 3, 0, 0, 26624, 96, }, /* 856 */
+ { 2, 6, 12, 0, 0, 28672, 92, }, /* 857 */
+ { 2, 6, 12, 0, 0, 18432, 88, }, /* 858 */
+ { 112, 7, 12, 0, 0, 18432, 82, }, /* 859 */
+ { 112, 14, 12, 0, 0, 18432, 82, }, /* 860 */
+ { 112, 12, 3, 0, 0, 26624, 96, }, /* 861 */
+ { 112, 21, 12, 0, 0, 18432, 68, }, /* 862 */
+ { 112, 21, 12, 0, 0, 18432, 124, }, /* 863 */
+ { 112, 21, 12, 0, 0, 18432, 106, }, /* 864 */
+ { 69, 24, 12, 0, 0, 28762, 56, }, /* 865 */
+ { 0, 9, 12, 0, -35332, 18432, 74, }, /* 866 */
+ { 69, 24, 12, 0, 0, 18432, 56, }, /* 867 */
+ { 0, 9, 12, 0, -42280, 18432, 74, }, /* 868 */
+ { 0, 5, 12, 0, 48, 18432, 76, }, /* 869 */
+ { 0, 9, 12, 0, -42308, 18432, 74, }, /* 870 */
+ { 0, 9, 12, 0, -42319, 18432, 74, }, /* 871 */
+ { 0, 9, 12, 0, -42315, 18432, 74, }, /* 872 */
+ { 0, 9, 12, 0, -42305, 18432, 74, }, /* 873 */
+ { 0, 9, 12, 0, -42258, 18432, 74, }, /* 874 */
+ { 0, 9, 12, 0, -42282, 18432, 74, }, /* 875 */
+ { 0, 9, 12, 0, -42261, 18432, 74, }, /* 876 */
+ { 0, 9, 12, 0, 928, 18432, 74, }, /* 877 */
+ { 0, 9, 12, 0, -48, 18432, 74, }, /* 878 */
+ { 0, 9, 12, 0, -42307, 18432, 74, }, /* 879 */
+ { 0, 9, 12, 0, -35384, 18432, 74, }, /* 880 */
{ 36, 7, 12, 0, 0, 18432, 82, }, /* 881 */
{ 36, 12, 3, 0, 0, 26624, 130, }, /* 882 */
- { 36, 12, 3, 0, 0, 26624, 182, }, /* 883 */
+ { 36, 12, 3, 0, 0, 26624, 184, }, /* 883 */
{ 36, 10, 5, 0, 0, 18432, 144, }, /* 884 */
{ 36, 26, 12, 0, 0, 28672, 68, }, /* 885 */
{ 69, 15, 12, 0, 0, 18612, 68, }, /* 886 */
@@ -1331,18 +1331,18 @@ const ucd_record PRIV(ucd_records)[] = { /* 16908 bytes, record size 12 */
{ 39, 7, 12, 0, 0, 18432, 82, }, /* 903 */
{ 39, 12, 3, 0, 0, 26624, 130, }, /* 904 */
{ 39, 12, 3, 0, 0, 26624, 96, }, /* 905 */
- { 69, 21, 12, 0, 0, 18567, 188, }, /* 906 */
+ { 69, 21, 12, 0, 0, 18567, 190, }, /* 906 */
{ 39, 21, 12, 0, 0, 18432, 124, }, /* 907 */
{ 101, 7, 12, 0, 0, 18432, 82, }, /* 908 */
{ 101, 12, 3, 0, 0, 26624, 130, }, /* 909 */
{ 101, 10, 5, 0, 0, 18432, 144, }, /* 910 */
- { 101, 10, 5, 0, 0, 18432, 172, }, /* 911 */
+ { 101, 10, 5, 0, 0, 18432, 174, }, /* 911 */
{ 101, 21, 12, 0, 0, 18432, 68, }, /* 912 */
{ 40, 12, 3, 0, 0, 26624, 130, }, /* 913 */
{ 40, 10, 5, 0, 0, 18432, 144, }, /* 914 */
{ 40, 7, 12, 0, 0, 18432, 82, }, /* 915 */
{ 40, 12, 3, 0, 0, 26624, 96, }, /* 916 */
- { 40, 10, 5, 0, 0, 18432, 172, }, /* 917 */
+ { 40, 10, 5, 0, 0, 18432, 174, }, /* 917 */
{ 40, 21, 12, 0, 0, 18432, 68, }, /* 918 */
{ 40, 21, 12, 0, 0, 18432, 106, }, /* 919 */
{ 40, 21, 12, 0, 0, 18432, 124, }, /* 920 */
@@ -1370,470 +1370,484 @@ const ucd_record PRIV(ucd_records)[] = { /* 16908 bytes, record size 12 */
{ 113, 6, 12, 0, 0, 18432, 136, }, /* 942 */
{ 113, 12, 3, 0, 0, 26624, 146, }, /* 943 */
{ 0, 5, 12, 0, -928, 18432, 76, }, /* 944 */
- { 0, 6, 12, 0, 0, 18432, 92, }, /* 945 */
- { 76, 5, 12, 0, -38864, 18432, 70, }, /* 946 */
- { 113, 10, 5, 0, 0, 18432, 160, }, /* 947 */
- { 113, 13, 12, 0, 0, 18432, 138, }, /* 948 */
- { 18, 7, 9, 0, 0, 18432, 82, }, /* 949 */
- { 18, 7, 10, 0, 0, 18432, 82, }, /* 950 */
- { 68, 4, 12, 0, 0, 18432, 0, }, /* 951 */
- { 68, 3, 12, 0, 0, 18432, 0, }, /* 952 */
- { 23, 7, 12, 0, 0, 18432, 284, }, /* 953 */
- { 71, 25, 12, 0, 0, 12288, 118, }, /* 954 */
- { 3, 7, 12, 0, 0, 0, 296, }, /* 955 */
- { 69, 18, 12, 0, 0, 28705, 54, }, /* 956 */
- { 69, 22, 12, 0, 0, 28705, 54, }, /* 957 */
- { 68, 2, 12, 0, 0, 6144, 298, }, /* 958 */
- { 3, 7, 12, 0, 0, 39, 82, }, /* 959 */
- { 3, 26, 12, 0, 0, 28711, 68, }, /* 960 */
- { 84, 12, 3, 0, 0, 26624, 178, }, /* 961 */
- { 84, 12, 3, 0, 0, 26624, 300, }, /* 962 */
- { 69, 21, 12, 0, 0, 28672, 68, }, /* 963 */
- { 69, 21, 12, 0, 0, 28672, 122, }, /* 964 */
- { 69, 22, 12, 0, 0, 28672, 68, }, /* 965 */
- { 69, 18, 12, 0, 0, 28672, 68, }, /* 966 */
- { 69, 17, 12, 0, 0, 28672, 126, }, /* 967 */
- { 69, 22, 12, 0, 0, 28672, 302, }, /* 968 */
- { 69, 18, 12, 0, 0, 28672, 302, }, /* 969 */
- { 69, 21, 12, 0, 0, 8192, 106, }, /* 970 */
- { 69, 21, 12, 0, 0, 8192, 304, }, /* 971 */
- { 69, 21, 12, 0, 0, 8192, 306, }, /* 972 */
- { 69, 21, 12, 0, 0, 28672, 124, }, /* 973 */
- { 69, 22, 12, 0, 0, 28672, 158, }, /* 974 */
- { 69, 18, 12, 0, 0, 28672, 158, }, /* 975 */
- { 69, 21, 12, 0, 0, 14336, 68, }, /* 976 */
- { 69, 21, 12, 0, 0, 28672, 118, }, /* 977 */
- { 69, 17, 12, 0, 0, 12288, 224, }, /* 978 */
- { 69, 25, 12, 0, 0, 28672, 226, }, /* 979 */
- { 69, 21, 12, 0, 0, 28672, 302, }, /* 980 */
- { 69, 21, 12, 0, 0, 28672, 308, }, /* 981 */
- { 69, 17, 12, 0, 0, 12288, 126, }, /* 982 */
- { 69, 21, 12, 0, 0, 8192, 68, }, /* 983 */
- { 69, 13, 12, 0, 0, 10240, 310, }, /* 984 */
- { 0, 9, 12, 0, 32, 18432, 312, }, /* 985 */
- { 69, 24, 12, 0, 0, 28672, 314, }, /* 986 */
- { 0, 5, 12, 0, -32, 18432, 316, }, /* 987 */
- { 69, 21, 12, 0, 0, 28825, 124, }, /* 988 */
- { 69, 22, 12, 0, 0, 28825, 318, }, /* 989 */
- { 69, 18, 12, 0, 0, 28825, 318, }, /* 990 */
- { 69, 21, 12, 0, 0, 28825, 106, }, /* 991 */
- { 69, 6, 3, 0, 0, 18525, 320, }, /* 992 */
- { 69, 1, 2, 0, 0, 28672, 322, }, /* 993 */
- { 31, 7, 12, 0, 0, 18432, 82, }, /* 994 */
- { 69, 21, 12, 0, 0, 18552, 68, }, /* 995 */
- { 69, 21, 12, 0, 0, 28792, 68, }, /* 996 */
- { 69, 21, 12, 0, 0, 18483, 68, }, /* 997 */
- { 69, 15, 12, 0, 0, 18555, 68, }, /* 998 */
- { 69, 26, 12, 0, 0, 18483, 68, }, /* 999 */
- { 1, 14, 12, 0, 0, 28672, 82, }, /* 1000 */
- { 1, 15, 12, 0, 0, 28672, 68, }, /* 1001 */
- { 1, 26, 12, 0, 0, 28672, 68, }, /* 1002 */
- { 1, 26, 12, 0, 0, 18432, 68, }, /* 1003 */
- { 102, 7, 12, 0, 0, 18432, 82, }, /* 1004 */
- { 103, 7, 12, 0, 0, 18432, 82, }, /* 1005 */
- { 84, 12, 3, 0, 0, 26651, 96, }, /* 1006 */
- { 69, 15, 12, 0, 0, 10267, 68, }, /* 1007 */
- { 81, 7, 12, 0, 0, 18432, 82, }, /* 1008 */
- { 81, 15, 12, 0, 0, 18432, 68, }, /* 1009 */
- { 82, 7, 12, 0, 0, 18432, 82, }, /* 1010 */
- { 82, 14, 12, 0, 0, 18432, 82, }, /* 1011 */
- { 53, 7, 12, 0, 0, 18432, 82, }, /* 1012 */
- { 53, 12, 3, 0, 0, 26624, 130, }, /* 1013 */
- { 85, 7, 12, 0, 0, 18432, 82, }, /* 1014 */
- { 85, 21, 12, 0, 0, 18432, 106, }, /* 1015 */
- { 91, 7, 12, 0, 0, 18432, 82, }, /* 1016 */
- { 91, 21, 12, 0, 0, 18432, 106, }, /* 1017 */
- { 91, 14, 12, 0, 0, 18432, 82, }, /* 1018 */
- { 83, 9, 12, 0, 40, 18432, 74, }, /* 1019 */
- { 83, 5, 12, 0, -40, 18432, 76, }, /* 1020 */
- { 86, 7, 12, 0, 0, 18432, 82, }, /* 1021 */
- { 87, 7, 12, 0, 0, 18432, 82, }, /* 1022 */
- { 87, 13, 12, 0, 0, 18432, 138, }, /* 1023 */
- { 145, 9, 12, 0, 40, 18432, 74, }, /* 1024 */
- { 145, 5, 12, 0, -40, 18432, 76, }, /* 1025 */
- { 127, 7, 12, 0, 0, 18432, 82, }, /* 1026 */
- { 125, 7, 12, 0, 0, 18432, 82, }, /* 1027 */
- { 125, 21, 12, 0, 0, 18432, 68, }, /* 1028 */
- { 161, 9, 12, 0, 39, 18432, 74, }, /* 1029 */
- { 161, 5, 12, 0, -39, 18432, 76, }, /* 1030 */
- { 49, 7, 12, 0, 0, 18432, 82, }, /* 1031 */
- { 0, 6, 12, 0, 0, 18432, 94, }, /* 1032 */
- { 32, 7, 12, 0, 0, 34816, 82, }, /* 1033 */
- { 114, 7, 12, 0, 0, 34816, 82, }, /* 1034 */
- { 114, 21, 12, 0, 0, 34816, 106, }, /* 1035 */
- { 114, 15, 12, 0, 0, 34816, 68, }, /* 1036 */
- { 133, 7, 12, 0, 0, 34816, 82, }, /* 1037 */
- { 133, 26, 12, 0, 0, 34816, 68, }, /* 1038 */
- { 133, 15, 12, 0, 0, 34816, 68, }, /* 1039 */
- { 132, 7, 12, 0, 0, 34816, 82, }, /* 1040 */
- { 132, 15, 12, 0, 0, 34816, 68, }, /* 1041 */
- { 139, 7, 12, 0, 0, 34816, 82, }, /* 1042 */
- { 139, 15, 12, 0, 0, 34816, 68, }, /* 1043 */
- { 95, 7, 12, 0, 0, 34816, 82, }, /* 1044 */
- { 95, 15, 12, 0, 0, 34816, 68, }, /* 1045 */
- { 95, 21, 12, 0, 0, 28672, 106, }, /* 1046 */
- { 104, 7, 12, 0, 0, 34816, 82, }, /* 1047 */
- { 104, 21, 12, 0, 0, 34816, 68, }, /* 1048 */
- { 122, 7, 12, 0, 0, 34816, 82, }, /* 1049 */
- { 121, 7, 12, 0, 0, 34816, 82, }, /* 1050 */
- { 121, 15, 12, 0, 0, 34816, 68, }, /* 1051 */
- { 92, 7, 12, 0, 0, 34816, 82, }, /* 1052 */
- { 92, 12, 3, 0, 0, 26624, 130, }, /* 1053 */
- { 92, 12, 3, 0, 0, 26624, 102, }, /* 1054 */
- { 92, 12, 3, 0, 0, 26624, 182, }, /* 1055 */
- { 92, 15, 12, 0, 0, 34816, 68, }, /* 1056 */
- { 92, 21, 12, 0, 0, 34816, 68, }, /* 1057 */
- { 92, 21, 12, 0, 0, 34816, 124, }, /* 1058 */
- { 115, 7, 12, 0, 0, 34816, 82, }, /* 1059 */
- { 115, 15, 12, 0, 0, 34816, 68, }, /* 1060 */
- { 115, 21, 12, 0, 0, 34816, 68, }, /* 1061 */
- { 131, 7, 12, 0, 0, 34816, 82, }, /* 1062 */
- { 131, 15, 12, 0, 0, 34816, 68, }, /* 1063 */
- { 51, 7, 12, 0, 0, 34816, 82, }, /* 1064 */
- { 51, 26, 12, 0, 0, 34816, 68, }, /* 1065 */
- { 51, 12, 3, 0, 0, 26624, 96, }, /* 1066 */
- { 51, 15, 12, 0, 0, 34816, 68, }, /* 1067 */
- { 51, 21, 12, 0, 0, 34816, 106, }, /* 1068 */
- { 51, 21, 12, 0, 0, 34918, 106, }, /* 1069 */
- { 51, 21, 12, 0, 0, 34816, 68, }, /* 1070 */
- { 108, 7, 12, 0, 0, 34816, 82, }, /* 1071 */
- { 108, 21, 12, 0, 0, 28672, 68, }, /* 1072 */
- { 108, 21, 12, 0, 0, 28672, 106, }, /* 1073 */
- { 116, 7, 12, 0, 0, 34816, 82, }, /* 1074 */
- { 116, 15, 12, 0, 0, 34816, 68, }, /* 1075 */
- { 117, 7, 12, 0, 0, 34816, 82, }, /* 1076 */
- { 117, 15, 12, 0, 0, 34816, 68, }, /* 1077 */
- { 54, 7, 12, 0, 0, 34816, 82, }, /* 1078 */
- { 54, 21, 12, 0, 0, 34816, 106, }, /* 1079 */
- { 54, 15, 12, 0, 0, 34816, 68, }, /* 1080 */
- { 118, 7, 12, 0, 0, 34816, 82, }, /* 1081 */
- { 140, 9, 12, 0, 64, 34816, 74, }, /* 1082 */
- { 140, 5, 12, 0, -64, 34816, 76, }, /* 1083 */
- { 140, 15, 12, 0, 0, 34816, 68, }, /* 1084 */
- { 62, 7, 12, 0, 0, 0, 82, }, /* 1085 */
- { 62, 7, 12, 0, 0, 0, 294, }, /* 1086 */
- { 62, 12, 3, 0, 0, 26624, 128, }, /* 1087 */
- { 62, 13, 12, 0, 0, 2048, 138, }, /* 1088 */
- { 3, 15, 12, 0, 0, 2048, 68, }, /* 1089 */
- { 65, 7, 12, 0, 0, 34816, 82, }, /* 1090 */
- { 65, 12, 3, 0, 0, 26624, 130, }, /* 1091 */
- { 65, 17, 12, 0, 0, 34816, 126, }, /* 1092 */
- { 152, 7, 12, 0, 0, 34816, 82, }, /* 1093 */
- { 152, 15, 12, 0, 0, 34816, 68, }, /* 1094 */
- { 63, 7, 12, 0, 0, 0, 82, }, /* 1095 */
- { 63, 12, 3, 0, 0, 26624, 96, }, /* 1096 */
- { 63, 15, 12, 0, 0, 0, 68, }, /* 1097 */
- { 63, 21, 12, 0, 0, 0, 124, }, /* 1098 */
- { 67, 7, 12, 0, 0, 34816, 82, }, /* 1099 */
- { 67, 12, 3, 0, 0, 26624, 96, }, /* 1100 */
- { 67, 21, 12, 0, 0, 34816, 124, }, /* 1101 */
- { 156, 7, 12, 0, 0, 34816, 82, }, /* 1102 */
- { 156, 15, 12, 0, 0, 34816, 68, }, /* 1103 */
- { 153, 7, 12, 0, 0, 34816, 82, }, /* 1104 */
- { 120, 10, 5, 0, 0, 18432, 144, }, /* 1105 */
- { 120, 12, 3, 0, 0, 26624, 130, }, /* 1106 */
- { 120, 7, 12, 0, 0, 18432, 82, }, /* 1107 */
- { 120, 12, 3, 0, 0, 26624, 146, }, /* 1108 */
- { 120, 21, 12, 0, 0, 18432, 124, }, /* 1109 */
- { 120, 21, 12, 0, 0, 18432, 106, }, /* 1110 */
- { 120, 15, 12, 0, 0, 28672, 68, }, /* 1111 */
- { 120, 13, 12, 0, 0, 18432, 138, }, /* 1112 */
- { 120, 12, 3, 0, 0, 26624, 182, }, /* 1113 */
- { 41, 12, 3, 0, 0, 26624, 102, }, /* 1114 */
- { 41, 10, 5, 0, 0, 18432, 144, }, /* 1115 */
- { 41, 7, 12, 0, 0, 18432, 82, }, /* 1116 */
- { 41, 12, 3, 0, 0, 26624, 130, }, /* 1117 */
- { 41, 12, 3, 0, 0, 26624, 146, }, /* 1118 */
- { 41, 12, 3, 0, 0, 26624, 96, }, /* 1119 */
- { 41, 21, 12, 0, 0, 18432, 68, }, /* 1120 */
- { 41, 1, 4, 0, 0, 18432, 132, }, /* 1121 */
- { 41, 21, 12, 0, 0, 18432, 124, }, /* 1122 */
- { 124, 7, 12, 0, 0, 18432, 82, }, /* 1123 */
- { 124, 13, 12, 0, 0, 18432, 138, }, /* 1124 */
- { 43, 12, 3, 0, 0, 26624, 130, }, /* 1125 */
- { 43, 7, 12, 0, 0, 18432, 82, }, /* 1126 */
- { 43, 10, 5, 0, 0, 18432, 144, }, /* 1127 */
- { 43, 12, 3, 0, 0, 26624, 146, }, /* 1128 */
- { 43, 13, 12, 0, 0, 18432, 138, }, /* 1129 */
- { 43, 21, 12, 0, 0, 18432, 68, }, /* 1130 */
- { 43, 21, 12, 0, 0, 18432, 124, }, /* 1131 */
- { 50, 7, 12, 0, 0, 18432, 82, }, /* 1132 */
- { 50, 12, 3, 0, 0, 26624, 96, }, /* 1133 */
- { 50, 21, 12, 0, 0, 18432, 68, }, /* 1134 */
- { 44, 12, 3, 0, 0, 26624, 130, }, /* 1135 */
- { 44, 10, 5, 0, 0, 18432, 144, }, /* 1136 */
- { 44, 7, 12, 0, 0, 18432, 82, }, /* 1137 */
- { 44, 10, 5, 0, 0, 18432, 172, }, /* 1138 */
- { 44, 7, 4, 0, 0, 18432, 82, }, /* 1139 */
- { 44, 21, 12, 0, 0, 18432, 124, }, /* 1140 */
- { 44, 21, 12, 0, 0, 18432, 68, }, /* 1141 */
- { 44, 12, 3, 0, 0, 26624, 102, }, /* 1142 */
- { 44, 12, 3, 0, 0, 26624, 96, }, /* 1143 */
- { 44, 13, 12, 0, 0, 18432, 138, }, /* 1144 */
- { 15, 15, 12, 0, 0, 18432, 68, }, /* 1145 */
- { 48, 7, 12, 0, 0, 18432, 82, }, /* 1146 */
- { 48, 10, 5, 0, 0, 18432, 144, }, /* 1147 */
- { 48, 12, 3, 0, 0, 26624, 130, }, /* 1148 */
- { 48, 10, 5, 0, 0, 18432, 172, }, /* 1149 */
- { 48, 12, 3, 0, 0, 26624, 96, }, /* 1150 */
- { 48, 21, 12, 0, 0, 18432, 124, }, /* 1151 */
- { 48, 21, 12, 0, 0, 18432, 106, }, /* 1152 */
- { 48, 21, 12, 0, 0, 18432, 68, }, /* 1153 */
- { 57, 7, 12, 0, 0, 18432, 82, }, /* 1154 */
- { 57, 21, 12, 0, 0, 18432, 124, }, /* 1155 */
- { 55, 7, 12, 0, 0, 18432, 82, }, /* 1156 */
- { 55, 12, 3, 0, 0, 26624, 130, }, /* 1157 */
- { 55, 10, 5, 0, 0, 18432, 144, }, /* 1158 */
- { 55, 12, 3, 0, 0, 26624, 96, }, /* 1159 */
- { 55, 12, 3, 0, 0, 26624, 146, }, /* 1160 */
- { 55, 13, 12, 0, 0, 18432, 138, }, /* 1161 */
- { 47, 12, 3, 0, 0, 26624, 130, }, /* 1162 */
- { 47, 12, 3, 0, 0, 26705, 130, }, /* 1163 */
- { 47, 10, 5, 0, 0, 18432, 144, }, /* 1164 */
- { 47, 10, 5, 0, 0, 18513, 144, }, /* 1165 */
- { 47, 7, 12, 0, 0, 18432, 82, }, /* 1166 */
- { 84, 12, 3, 0, 0, 26705, 102, }, /* 1167 */
- { 47, 12, 3, 0, 0, 26705, 96, }, /* 1168 */
- { 47, 10, 3, 0, 0, 18432, 148, }, /* 1169 */
- { 47, 10, 5, 0, 0, 18432, 172, }, /* 1170 */
- { 47, 7, 12, 0, 0, 18432, 324, }, /* 1171 */
- { 47, 12, 3, 0, 0, 26624, 96, }, /* 1172 */
- { 144, 7, 12, 0, 0, 18432, 82, }, /* 1173 */
- { 144, 10, 5, 0, 0, 18432, 144, }, /* 1174 */
- { 144, 12, 3, 0, 0, 26624, 130, }, /* 1175 */
- { 144, 12, 3, 0, 0, 26624, 146, }, /* 1176 */
- { 144, 12, 3, 0, 0, 26624, 96, }, /* 1177 */
- { 144, 21, 12, 0, 0, 18432, 124, }, /* 1178 */
- { 144, 21, 12, 0, 0, 18432, 106, }, /* 1179 */
- { 144, 21, 12, 0, 0, 18432, 68, }, /* 1180 */
- { 144, 13, 12, 0, 0, 18432, 138, }, /* 1181 */
- { 144, 12, 3, 0, 0, 26624, 102, }, /* 1182 */
- { 56, 7, 12, 0, 0, 18432, 82, }, /* 1183 */
- { 56, 10, 3, 0, 0, 18432, 148, }, /* 1184 */
- { 56, 10, 5, 0, 0, 18432, 144, }, /* 1185 */
- { 56, 12, 3, 0, 0, 26624, 130, }, /* 1186 */
- { 56, 12, 3, 0, 0, 26624, 146, }, /* 1187 */
- { 56, 12, 3, 0, 0, 26624, 96, }, /* 1188 */
- { 56, 21, 12, 0, 0, 18432, 68, }, /* 1189 */
- { 56, 13, 12, 0, 0, 18432, 138, }, /* 1190 */
- { 135, 7, 12, 0, 0, 18432, 82, }, /* 1191 */
- { 135, 10, 3, 0, 0, 18432, 148, }, /* 1192 */
- { 135, 10, 5, 0, 0, 18432, 144, }, /* 1193 */
- { 135, 12, 3, 0, 0, 26624, 130, }, /* 1194 */
- { 135, 12, 3, 0, 0, 26624, 146, }, /* 1195 */
- { 135, 12, 3, 0, 0, 26624, 96, }, /* 1196 */
- { 135, 21, 12, 0, 0, 18432, 68, }, /* 1197 */
- { 135, 21, 12, 0, 0, 18432, 124, }, /* 1198 */
- { 135, 21, 12, 0, 0, 18432, 106, }, /* 1199 */
- { 135, 21, 12, 0, 0, 18432, 176, }, /* 1200 */
- { 52, 7, 12, 0, 0, 18432, 82, }, /* 1201 */
- { 52, 10, 5, 0, 0, 18432, 144, }, /* 1202 */
- { 52, 12, 3, 0, 0, 26624, 130, }, /* 1203 */
- { 52, 12, 3, 0, 0, 26624, 146, }, /* 1204 */
- { 52, 21, 12, 0, 0, 18432, 124, }, /* 1205 */
- { 52, 21, 12, 0, 0, 18432, 68, }, /* 1206 */
- { 52, 13, 12, 0, 0, 18432, 138, }, /* 1207 */
- { 45, 7, 12, 0, 0, 18432, 82, }, /* 1208 */
- { 45, 12, 3, 0, 0, 26624, 130, }, /* 1209 */
- { 45, 10, 5, 0, 0, 18432, 144, }, /* 1210 */
- { 45, 10, 5, 0, 0, 18432, 172, }, /* 1211 */
- { 45, 12, 3, 0, 0, 26624, 96, }, /* 1212 */
- { 45, 21, 12, 0, 0, 18432, 68, }, /* 1213 */
- { 45, 13, 12, 0, 0, 18432, 138, }, /* 1214 */
- { 137, 7, 12, 0, 0, 18432, 82, }, /* 1215 */
- { 137, 12, 3, 0, 0, 26624, 130, }, /* 1216 */
- { 137, 10, 12, 0, 0, 18432, 144, }, /* 1217 */
- { 137, 10, 5, 0, 0, 18432, 144, }, /* 1218 */
- { 137, 12, 3, 0, 0, 26624, 146, }, /* 1219 */
- { 137, 13, 12, 0, 0, 18432, 138, }, /* 1220 */
- { 137, 15, 12, 0, 0, 18432, 68, }, /* 1221 */
- { 137, 21, 12, 0, 0, 18432, 124, }, /* 1222 */
- { 137, 26, 12, 0, 0, 18432, 68, }, /* 1223 */
- { 60, 7, 12, 0, 0, 18432, 82, }, /* 1224 */
- { 60, 10, 5, 0, 0, 18432, 144, }, /* 1225 */
- { 60, 12, 3, 0, 0, 26624, 130, }, /* 1226 */
- { 60, 12, 3, 0, 0, 26624, 146, }, /* 1227 */
- { 60, 12, 3, 0, 0, 26624, 96, }, /* 1228 */
- { 60, 21, 12, 0, 0, 18432, 68, }, /* 1229 */
- { 136, 9, 12, 0, 32, 18432, 74, }, /* 1230 */
- { 136, 5, 12, 0, -32, 18432, 76, }, /* 1231 */
- { 136, 13, 12, 0, 0, 18432, 138, }, /* 1232 */
- { 136, 15, 12, 0, 0, 18432, 68, }, /* 1233 */
- { 136, 7, 12, 0, 0, 18432, 82, }, /* 1234 */
- { 157, 7, 12, 0, 0, 18432, 82, }, /* 1235 */
- { 157, 10, 3, 0, 0, 18432, 148, }, /* 1236 */
- { 157, 10, 5, 0, 0, 18432, 144, }, /* 1237 */
- { 157, 12, 3, 0, 0, 26624, 130, }, /* 1238 */
- { 157, 10, 5, 0, 0, 18432, 172, }, /* 1239 */
- { 157, 12, 3, 0, 0, 26624, 146, }, /* 1240 */
- { 157, 7, 4, 0, 0, 18432, 82, }, /* 1241 */
- { 157, 12, 3, 0, 0, 26624, 96, }, /* 1242 */
- { 157, 21, 12, 0, 0, 18432, 124, }, /* 1243 */
- { 157, 21, 12, 0, 0, 18432, 68, }, /* 1244 */
- { 157, 13, 12, 0, 0, 18432, 138, }, /* 1245 */
- { 64, 7, 12, 0, 0, 18432, 82, }, /* 1246 */
- { 64, 10, 5, 0, 0, 18432, 144, }, /* 1247 */
- { 64, 12, 3, 0, 0, 26624, 130, }, /* 1248 */
- { 64, 12, 3, 0, 0, 26624, 146, }, /* 1249 */
- { 64, 21, 12, 0, 0, 18432, 68, }, /* 1250 */
- { 149, 7, 12, 0, 0, 18432, 82, }, /* 1251 */
- { 149, 12, 3, 0, 0, 26624, 130, }, /* 1252 */
- { 149, 12, 3, 0, 0, 18432, 130, }, /* 1253 */
- { 149, 12, 3, 0, 0, 26624, 102, }, /* 1254 */
- { 149, 12, 3, 0, 0, 26624, 146, }, /* 1255 */
- { 149, 10, 5, 0, 0, 18432, 144, }, /* 1256 */
- { 149, 7, 4, 0, 0, 18432, 82, }, /* 1257 */
- { 149, 21, 12, 0, 0, 18432, 68, }, /* 1258 */
- { 149, 21, 12, 0, 0, 18432, 124, }, /* 1259 */
- { 148, 7, 12, 0, 0, 18432, 82, }, /* 1260 */
- { 148, 12, 3, 0, 0, 26624, 130, }, /* 1261 */
- { 148, 10, 5, 0, 0, 18432, 144, }, /* 1262 */
- { 148, 7, 4, 0, 0, 18432, 82, }, /* 1263 */
- { 148, 12, 3, 0, 0, 26624, 326, }, /* 1264 */
- { 148, 12, 3, 0, 0, 26624, 146, }, /* 1265 */
- { 148, 21, 12, 0, 0, 18432, 68, }, /* 1266 */
- { 148, 21, 12, 0, 0, 18432, 124, }, /* 1267 */
- { 148, 21, 12, 0, 0, 18432, 106, }, /* 1268 */
- { 134, 7, 12, 0, 0, 18432, 82, }, /* 1269 */
- { 142, 7, 12, 0, 0, 18432, 82, }, /* 1270 */
- { 142, 10, 5, 0, 0, 18432, 144, }, /* 1271 */
- { 142, 12, 3, 0, 0, 26624, 130, }, /* 1272 */
- { 142, 12, 3, 0, 0, 18432, 146, }, /* 1273 */
- { 142, 21, 12, 0, 0, 18432, 124, }, /* 1274 */
- { 142, 21, 12, 0, 0, 18432, 106, }, /* 1275 */
- { 142, 21, 12, 0, 0, 18432, 68, }, /* 1276 */
- { 142, 13, 12, 0, 0, 18432, 138, }, /* 1277 */
- { 142, 15, 12, 0, 0, 18432, 68, }, /* 1278 */
- { 143, 21, 12, 0, 0, 18432, 68, }, /* 1279 */
- { 143, 21, 12, 0, 0, 18432, 106, }, /* 1280 */
- { 143, 7, 12, 0, 0, 18432, 82, }, /* 1281 */
- { 143, 12, 3, 0, 0, 26624, 130, }, /* 1282 */
- { 143, 10, 5, 0, 0, 18432, 144, }, /* 1283 */
- { 59, 7, 12, 0, 0, 18432, 82, }, /* 1284 */
- { 59, 12, 3, 0, 0, 26624, 130, }, /* 1285 */
- { 59, 12, 3, 0, 0, 26624, 96, }, /* 1286 */
- { 59, 12, 3, 0, 0, 26624, 146, }, /* 1287 */
- { 59, 7, 4, 0, 0, 18432, 82, }, /* 1288 */
- { 59, 13, 12, 0, 0, 18432, 138, }, /* 1289 */
- { 61, 7, 12, 0, 0, 18432, 82, }, /* 1290 */
- { 61, 10, 5, 0, 0, 18432, 144, }, /* 1291 */
- { 61, 12, 3, 0, 0, 26624, 130, }, /* 1292 */
- { 61, 12, 3, 0, 0, 26624, 146, }, /* 1293 */
- { 61, 13, 12, 0, 0, 18432, 138, }, /* 1294 */
- { 150, 7, 12, 0, 0, 18432, 82, }, /* 1295 */
- { 150, 12, 3, 0, 0, 26624, 130, }, /* 1296 */
- { 150, 10, 5, 0, 0, 18432, 144, }, /* 1297 */
- { 150, 21, 12, 0, 0, 18432, 124, }, /* 1298 */
- { 11, 15, 12, 0, 0, 18432, 68, }, /* 1299 */
- { 11, 21, 12, 0, 0, 18432, 68, }, /* 1300 */
- { 94, 7, 12, 0, 0, 18432, 82, }, /* 1301 */
- { 94, 14, 12, 0, 0, 18432, 82, }, /* 1302 */
- { 94, 21, 12, 0, 0, 18432, 106, }, /* 1303 */
- { 66, 7, 12, 0, 0, 18432, 82, }, /* 1304 */
- { 66, 21, 12, 0, 0, 18432, 68, }, /* 1305 */
- { 109, 7, 12, 0, 0, 18432, 82, }, /* 1306 */
- { 109, 1, 2, 0, 0, 18432, 322, }, /* 1307 */
- { 138, 7, 12, 0, 0, 18432, 82, }, /* 1308 */
- { 130, 7, 12, 0, 0, 18432, 82, }, /* 1309 */
- { 130, 13, 12, 0, 0, 18432, 138, }, /* 1310 */
- { 130, 21, 12, 0, 0, 18432, 124, }, /* 1311 */
- { 159, 7, 12, 0, 0, 18432, 82, }, /* 1312 */
- { 159, 13, 12, 0, 0, 18432, 138, }, /* 1313 */
- { 126, 7, 12, 0, 0, 18432, 82, }, /* 1314 */
- { 126, 12, 3, 0, 0, 26624, 96, }, /* 1315 */
- { 126, 21, 12, 0, 0, 18432, 124, }, /* 1316 */
- { 128, 7, 12, 0, 0, 18432, 82, }, /* 1317 */
- { 128, 12, 3, 0, 0, 26624, 96, }, /* 1318 */
- { 128, 21, 12, 0, 0, 18432, 124, }, /* 1319 */
- { 128, 21, 12, 0, 0, 18432, 106, }, /* 1320 */
- { 128, 21, 12, 0, 0, 18432, 68, }, /* 1321 */
- { 128, 26, 12, 0, 0, 18432, 68, }, /* 1322 */
- { 128, 6, 12, 0, 0, 18432, 142, }, /* 1323 */
- { 128, 6, 12, 0, 0, 18432, 136, }, /* 1324 */
- { 128, 13, 12, 0, 0, 18432, 138, }, /* 1325 */
- { 128, 15, 12, 0, 0, 18432, 68, }, /* 1326 */
- { 151, 9, 12, 0, 32, 18432, 74, }, /* 1327 */
- { 151, 5, 12, 0, -32, 18432, 76, }, /* 1328 */
- { 151, 15, 12, 0, 0, 18432, 68, }, /* 1329 */
- { 151, 21, 12, 0, 0, 18432, 106, }, /* 1330 */
- { 151, 21, 12, 0, 0, 18432, 124, }, /* 1331 */
- { 151, 21, 12, 0, 0, 18432, 68, }, /* 1332 */
- { 123, 7, 12, 0, 0, 18432, 82, }, /* 1333 */
- { 123, 12, 3, 0, 0, 26624, 130, }, /* 1334 */
- { 123, 10, 5, 0, 0, 18432, 144, }, /* 1335 */
- { 123, 12, 3, 0, 0, 26624, 128, }, /* 1336 */
- { 123, 6, 12, 0, 0, 18432, 92, }, /* 1337 */
- { 146, 6, 12, 0, 0, 18432, 136, }, /* 1338 */
- { 147, 6, 12, 0, 0, 18432, 136, }, /* 1339 */
- { 23, 21, 12, 0, 0, 28672, 68, }, /* 1340 */
- { 158, 12, 3, 0, 0, 26624, 328, }, /* 1341 */
- { 23, 10, 5, 0, 0, 18432, 164, }, /* 1342 */
- { 146, 7, 12, 0, 0, 18432, 284, }, /* 1343 */
- { 158, 7, 12, 0, 0, 18432, 284, }, /* 1344 */
- { 21, 6, 12, 0, 0, 18432, 92, }, /* 1345 */
- { 147, 7, 12, 0, 0, 18432, 284, }, /* 1346 */
- { 46, 7, 12, 0, 0, 18432, 82, }, /* 1347 */
- { 46, 26, 12, 0, 0, 18432, 68, }, /* 1348 */
- { 46, 12, 3, 0, 0, 26624, 102, }, /* 1349 */
- { 46, 12, 3, 0, 0, 26624, 130, }, /* 1350 */
- { 46, 21, 12, 0, 0, 18432, 124, }, /* 1351 */
- { 69, 1, 2, 0, 0, 6153, 66, }, /* 1352 */
- { 69, 10, 3, 0, 0, 18432, 330, }, /* 1353 */
- { 69, 10, 5, 0, 0, 18432, 138, }, /* 1354 */
- { 69, 10, 5, 0, 0, 18432, 160, }, /* 1355 */
- { 69, 10, 3, 0, 0, 18432, 286, }, /* 1356 */
- { 1, 12, 3, 0, 0, 26624, 102, }, /* 1357 */
- { 69, 25, 12, 0, 0, 18432, 118, }, /* 1358 */
- { 69, 13, 12, 0, 0, 10240, 214, }, /* 1359 */
- { 141, 26, 12, 0, 0, 18432, 68, }, /* 1360 */
- { 141, 12, 3, 0, 0, 26624, 102, }, /* 1361 */
- { 141, 21, 12, 0, 0, 18432, 106, }, /* 1362 */
- { 141, 21, 12, 0, 0, 18432, 124, }, /* 1363 */
- { 141, 21, 12, 0, 0, 18432, 68, }, /* 1364 */
- { 35, 12, 3, 0, 0, 26624, 130, }, /* 1365 */
- { 154, 7, 12, 0, 0, 18432, 82, }, /* 1366 */
- { 154, 12, 3, 0, 0, 26624, 96, }, /* 1367 */
- { 154, 6, 12, 0, 0, 18432, 142, }, /* 1368 */
- { 154, 6, 12, 0, 0, 18432, 136, }, /* 1369 */
- { 154, 13, 12, 0, 0, 18432, 138, }, /* 1370 */
- { 154, 26, 12, 0, 0, 18432, 68, }, /* 1371 */
- { 160, 7, 12, 0, 0, 18432, 82, }, /* 1372 */
- { 160, 12, 3, 0, 0, 26624, 96, }, /* 1373 */
- { 155, 7, 12, 0, 0, 18432, 82, }, /* 1374 */
- { 155, 12, 3, 0, 0, 26624, 96, }, /* 1375 */
- { 155, 13, 12, 0, 0, 18432, 138, }, /* 1376 */
- { 155, 23, 12, 0, 0, 14336, 68, }, /* 1377 */
- { 129, 7, 12, 0, 0, 34816, 82, }, /* 1378 */
- { 129, 15, 12, 0, 0, 34816, 68, }, /* 1379 */
- { 129, 12, 3, 0, 0, 26624, 96, }, /* 1380 */
- { 58, 9, 12, 0, 34, 34816, 74, }, /* 1381 */
- { 58, 5, 12, 0, -34, 34816, 76, }, /* 1382 */
- { 58, 12, 3, 0, 0, 26624, 150, }, /* 1383 */
- { 58, 12, 3, 0, 0, 26624, 130, }, /* 1384 */
- { 58, 12, 3, 0, 0, 26624, 96, }, /* 1385 */
- { 58, 6, 12, 0, 0, 34816, 142, }, /* 1386 */
- { 58, 13, 12, 0, 0, 34816, 138, }, /* 1387 */
- { 58, 21, 12, 0, 0, 34816, 68, }, /* 1388 */
- { 69, 15, 12, 0, 0, 0, 68, }, /* 1389 */
- { 69, 26, 12, 0, 0, 0, 68, }, /* 1390 */
- { 69, 23, 12, 0, 0, 0, 68, }, /* 1391 */
- { 3, 7, 12, 0, 0, 0, 240, }, /* 1392 */
- { 69, 26, 14, 0, 0, 28672, 332, }, /* 1393 */
- { 69, 26, 14, 0, 0, 28672, 334, }, /* 1394 */
- { 68, 2, 14, 0, 0, 18432, 336, }, /* 1395 */
- { 69, 26, 12, 0, 0, 18432, 338, }, /* 1396 */
- { 69, 26, 14, 0, 0, 18432, 340, }, /* 1397 */
- { 69, 26, 14, 0, 0, 18432, 334, }, /* 1398 */
- { 69, 26, 11, 0, 0, 18432, 342, }, /* 1399 */
- { 20, 26, 12, 0, 0, 18432, 68, }, /* 1400 */
- { 69, 26, 14, 0, 0, 18432, 236, }, /* 1401 */
- { 69, 26, 14, 0, 0, 18447, 334, }, /* 1402 */
- { 69, 26, 14, 0, 0, 28672, 344, }, /* 1403 */
- { 69, 26, 14, 0, 0, 28672, 346, }, /* 1404 */
- { 69, 24, 3, 0, 0, 28672, 348, }, /* 1405 */
- { 69, 26, 14, 0, 0, 28672, 350, }, /* 1406 */
- { 69, 13, 12, 0, 0, 10240, 138, }, /* 1407 */
- { 69, 1, 3, 0, 0, 6144, 352, }, /* 1408 */
+ { 76, 5, 12, 0, -38864, 18432, 70, }, /* 945 */
+ { 113, 10, 5, 0, 0, 18432, 160, }, /* 946 */
+ { 113, 13, 12, 0, 0, 18432, 138, }, /* 947 */
+ { 18, 7, 9, 0, 0, 18432, 82, }, /* 948 */
+ { 18, 7, 10, 0, 0, 18432, 82, }, /* 949 */
+ { 68, 4, 12, 0, 0, 18432, 0, }, /* 950 */
+ { 68, 3, 12, 0, 0, 18432, 0, }, /* 951 */
+ { 23, 7, 12, 0, 0, 18432, 284, }, /* 952 */
+ { 71, 25, 12, 0, 0, 12288, 118, }, /* 953 */
+ { 3, 7, 12, 0, 0, 0, 296, }, /* 954 */
+ { 69, 18, 12, 0, 0, 28705, 54, }, /* 955 */
+ { 69, 22, 12, 0, 0, 28705, 54, }, /* 956 */
+ { 68, 2, 12, 0, 0, 6144, 298, }, /* 957 */
+ { 3, 7, 12, 0, 0, 39, 82, }, /* 958 */
+ { 3, 26, 12, 0, 0, 28711, 68, }, /* 959 */
+ { 84, 12, 3, 0, 0, 26624, 180, }, /* 960 */
+ { 84, 12, 3, 0, 0, 26624, 300, }, /* 961 */
+ { 69, 21, 12, 0, 0, 28672, 68, }, /* 962 */
+ { 69, 21, 12, 0, 0, 28672, 122, }, /* 963 */
+ { 69, 22, 12, 0, 0, 28672, 68, }, /* 964 */
+ { 69, 18, 12, 0, 0, 28672, 68, }, /* 965 */
+ { 69, 17, 12, 0, 0, 28672, 126, }, /* 966 */
+ { 69, 22, 12, 0, 0, 28672, 302, }, /* 967 */
+ { 69, 18, 12, 0, 0, 28672, 302, }, /* 968 */
+ { 69, 21, 12, 0, 0, 8192, 106, }, /* 969 */
+ { 69, 21, 12, 0, 0, 8192, 304, }, /* 970 */
+ { 69, 21, 12, 0, 0, 8192, 306, }, /* 971 */
+ { 69, 21, 12, 0, 0, 28672, 124, }, /* 972 */
+ { 69, 22, 12, 0, 0, 28672, 158, }, /* 973 */
+ { 69, 18, 12, 0, 0, 28672, 158, }, /* 974 */
+ { 69, 21, 12, 0, 0, 14336, 68, }, /* 975 */
+ { 69, 21, 12, 0, 0, 28672, 118, }, /* 976 */
+ { 69, 17, 12, 0, 0, 12288, 224, }, /* 977 */
+ { 69, 25, 12, 0, 0, 28672, 226, }, /* 978 */
+ { 69, 21, 12, 0, 0, 28672, 302, }, /* 979 */
+ { 69, 21, 12, 0, 0, 28672, 308, }, /* 980 */
+ { 69, 17, 12, 0, 0, 12288, 126, }, /* 981 */
+ { 69, 21, 12, 0, 0, 8192, 68, }, /* 982 */
+ { 69, 13, 12, 0, 0, 10240, 310, }, /* 983 */
+ { 0, 9, 12, 0, 32, 18432, 312, }, /* 984 */
+ { 69, 24, 12, 0, 0, 28672, 314, }, /* 985 */
+ { 0, 5, 12, 0, -32, 18432, 316, }, /* 986 */
+ { 69, 21, 12, 0, 0, 28825, 124, }, /* 987 */
+ { 69, 22, 12, 0, 0, 28825, 318, }, /* 988 */
+ { 69, 18, 12, 0, 0, 28825, 318, }, /* 989 */
+ { 69, 21, 12, 0, 0, 28825, 106, }, /* 990 */
+ { 69, 6, 3, 0, 0, 18525, 320, }, /* 991 */
+ { 69, 1, 2, 0, 0, 28672, 322, }, /* 992 */
+ { 31, 7, 12, 0, 0, 18432, 82, }, /* 993 */
+ { 69, 21, 12, 0, 0, 18552, 68, }, /* 994 */
+ { 69, 21, 12, 0, 0, 28792, 68, }, /* 995 */
+ { 69, 21, 12, 0, 0, 18483, 68, }, /* 996 */
+ { 69, 15, 12, 0, 0, 18555, 68, }, /* 997 */
+ { 69, 26, 12, 0, 0, 18483, 68, }, /* 998 */
+ { 1, 14, 12, 0, 0, 28672, 82, }, /* 999 */
+ { 1, 15, 12, 0, 0, 28672, 68, }, /* 1000 */
+ { 1, 26, 12, 0, 0, 28672, 68, }, /* 1001 */
+ { 1, 26, 12, 0, 0, 18432, 68, }, /* 1002 */
+ { 102, 7, 12, 0, 0, 18432, 82, }, /* 1003 */
+ { 103, 7, 12, 0, 0, 18432, 82, }, /* 1004 */
+ { 84, 12, 3, 0, 0, 26651, 96, }, /* 1005 */
+ { 69, 15, 12, 0, 0, 10267, 68, }, /* 1006 */
+ { 81, 7, 12, 0, 0, 18432, 82, }, /* 1007 */
+ { 81, 15, 12, 0, 0, 18432, 68, }, /* 1008 */
+ { 82, 7, 12, 0, 0, 18432, 82, }, /* 1009 */
+ { 82, 14, 12, 0, 0, 18432, 82, }, /* 1010 */
+ { 53, 7, 12, 0, 0, 18432, 82, }, /* 1011 */
+ { 53, 12, 3, 0, 0, 26624, 130, }, /* 1012 */
+ { 85, 7, 12, 0, 0, 18432, 82, }, /* 1013 */
+ { 85, 21, 12, 0, 0, 18432, 106, }, /* 1014 */
+ { 91, 7, 12, 0, 0, 18432, 82, }, /* 1015 */
+ { 91, 21, 12, 0, 0, 18432, 106, }, /* 1016 */
+ { 91, 14, 12, 0, 0, 18432, 82, }, /* 1017 */
+ { 83, 9, 12, 0, 40, 18432, 74, }, /* 1018 */
+ { 83, 5, 12, 0, -40, 18432, 76, }, /* 1019 */
+ { 86, 7, 12, 0, 0, 18432, 82, }, /* 1020 */
+ { 87, 7, 12, 0, 0, 18432, 82, }, /* 1021 */
+ { 87, 13, 12, 0, 0, 18432, 138, }, /* 1022 */
+ { 145, 9, 12, 0, 40, 18432, 74, }, /* 1023 */
+ { 145, 5, 12, 0, -40, 18432, 76, }, /* 1024 */
+ { 127, 7, 12, 0, 0, 18432, 82, }, /* 1025 */
+ { 125, 7, 12, 0, 0, 18432, 82, }, /* 1026 */
+ { 125, 21, 12, 0, 0, 18432, 68, }, /* 1027 */
+ { 161, 9, 12, 0, 39, 18432, 74, }, /* 1028 */
+ { 161, 5, 12, 0, -39, 18432, 76, }, /* 1029 */
+ { 49, 7, 12, 0, 0, 18432, 82, }, /* 1030 */
+ { 0, 6, 12, 0, 0, 18432, 94, }, /* 1031 */
+ { 32, 7, 12, 0, 0, 34816, 82, }, /* 1032 */
+ { 114, 7, 12, 0, 0, 34816, 82, }, /* 1033 */
+ { 114, 21, 12, 0, 0, 34816, 106, }, /* 1034 */
+ { 114, 15, 12, 0, 0, 34816, 68, }, /* 1035 */
+ { 133, 7, 12, 0, 0, 34816, 82, }, /* 1036 */
+ { 133, 26, 12, 0, 0, 34816, 68, }, /* 1037 */
+ { 133, 15, 12, 0, 0, 34816, 68, }, /* 1038 */
+ { 132, 7, 12, 0, 0, 34816, 82, }, /* 1039 */
+ { 132, 15, 12, 0, 0, 34816, 68, }, /* 1040 */
+ { 139, 7, 12, 0, 0, 34816, 82, }, /* 1041 */
+ { 139, 15, 12, 0, 0, 34816, 68, }, /* 1042 */
+ { 95, 7, 12, 0, 0, 34816, 82, }, /* 1043 */
+ { 95, 15, 12, 0, 0, 34816, 68, }, /* 1044 */
+ { 95, 21, 12, 0, 0, 28672, 106, }, /* 1045 */
+ { 104, 7, 12, 0, 0, 34816, 82, }, /* 1046 */
+ { 104, 21, 12, 0, 0, 34816, 68, }, /* 1047 */
+ { 122, 7, 12, 0, 0, 34816, 82, }, /* 1048 */
+ { 121, 7, 12, 0, 0, 34816, 82, }, /* 1049 */
+ { 121, 15, 12, 0, 0, 34816, 68, }, /* 1050 */
+ { 92, 7, 12, 0, 0, 34816, 82, }, /* 1051 */
+ { 92, 12, 3, 0, 0, 26624, 130, }, /* 1052 */
+ { 92, 12, 3, 0, 0, 26624, 102, }, /* 1053 */
+ { 92, 12, 3, 0, 0, 26624, 184, }, /* 1054 */
+ { 92, 15, 12, 0, 0, 34816, 68, }, /* 1055 */
+ { 92, 21, 12, 0, 0, 34816, 68, }, /* 1056 */
+ { 92, 21, 12, 0, 0, 34816, 124, }, /* 1057 */
+ { 115, 7, 12, 0, 0, 34816, 82, }, /* 1058 */
+ { 115, 15, 12, 0, 0, 34816, 68, }, /* 1059 */
+ { 115, 21, 12, 0, 0, 34816, 68, }, /* 1060 */
+ { 131, 7, 12, 0, 0, 34816, 82, }, /* 1061 */
+ { 131, 15, 12, 0, 0, 34816, 68, }, /* 1062 */
+ { 51, 7, 12, 0, 0, 34816, 82, }, /* 1063 */
+ { 51, 26, 12, 0, 0, 34816, 68, }, /* 1064 */
+ { 51, 12, 3, 0, 0, 26624, 96, }, /* 1065 */
+ { 51, 15, 12, 0, 0, 34816, 68, }, /* 1066 */
+ { 51, 21, 12, 0, 0, 34816, 106, }, /* 1067 */
+ { 51, 21, 12, 0, 0, 34918, 106, }, /* 1068 */
+ { 51, 21, 12, 0, 0, 34816, 68, }, /* 1069 */
+ { 108, 7, 12, 0, 0, 34816, 82, }, /* 1070 */
+ { 108, 21, 12, 0, 0, 28672, 68, }, /* 1071 */
+ { 108, 21, 12, 0, 0, 28672, 106, }, /* 1072 */
+ { 116, 7, 12, 0, 0, 34816, 82, }, /* 1073 */
+ { 116, 15, 12, 0, 0, 34816, 68, }, /* 1074 */
+ { 117, 7, 12, 0, 0, 34816, 82, }, /* 1075 */
+ { 117, 15, 12, 0, 0, 34816, 68, }, /* 1076 */
+ { 54, 7, 12, 0, 0, 34816, 82, }, /* 1077 */
+ { 54, 21, 12, 0, 0, 34816, 106, }, /* 1078 */
+ { 54, 15, 12, 0, 0, 34816, 68, }, /* 1079 */
+ { 118, 7, 12, 0, 0, 34816, 82, }, /* 1080 */
+ { 140, 9, 12, 0, 64, 34816, 74, }, /* 1081 */
+ { 140, 5, 12, 0, -64, 34816, 76, }, /* 1082 */
+ { 140, 15, 12, 0, 0, 34816, 68, }, /* 1083 */
+ { 62, 7, 12, 0, 0, 0, 82, }, /* 1084 */
+ { 62, 7, 12, 0, 0, 0, 294, }, /* 1085 */
+ { 62, 12, 3, 0, 0, 26624, 128, }, /* 1086 */
+ { 62, 13, 12, 0, 0, 2048, 138, }, /* 1087 */
+ { 3, 15, 12, 0, 0, 2048, 68, }, /* 1088 */
+ { 65, 7, 12, 0, 0, 34816, 82, }, /* 1089 */
+ { 65, 12, 3, 0, 0, 26624, 130, }, /* 1090 */
+ { 65, 17, 12, 0, 0, 34816, 126, }, /* 1091 */
+ { 152, 7, 12, 0, 0, 34816, 82, }, /* 1092 */
+ { 152, 15, 12, 0, 0, 34816, 68, }, /* 1093 */
+ { 63, 7, 12, 0, 0, 0, 82, }, /* 1094 */
+ { 63, 12, 3, 0, 0, 26624, 96, }, /* 1095 */
+ { 63, 15, 12, 0, 0, 0, 68, }, /* 1096 */
+ { 63, 21, 12, 0, 0, 0, 124, }, /* 1097 */
+ { 67, 7, 12, 0, 0, 34816, 82, }, /* 1098 */
+ { 67, 12, 3, 0, 0, 26624, 96, }, /* 1099 */
+ { 67, 21, 12, 0, 0, 34816, 124, }, /* 1100 */
+ { 156, 7, 12, 0, 0, 34816, 82, }, /* 1101 */
+ { 156, 15, 12, 0, 0, 34816, 68, }, /* 1102 */
+ { 153, 7, 12, 0, 0, 34816, 82, }, /* 1103 */
+ { 120, 10, 5, 0, 0, 18432, 144, }, /* 1104 */
+ { 120, 12, 3, 0, 0, 26624, 130, }, /* 1105 */
+ { 120, 7, 12, 0, 0, 18432, 82, }, /* 1106 */
+ { 120, 12, 3, 0, 0, 26624, 146, }, /* 1107 */
+ { 120, 21, 12, 0, 0, 18432, 124, }, /* 1108 */
+ { 120, 21, 12, 0, 0, 18432, 106, }, /* 1109 */
+ { 120, 15, 12, 0, 0, 28672, 68, }, /* 1110 */
+ { 120, 13, 12, 0, 0, 18432, 138, }, /* 1111 */
+ { 120, 12, 3, 0, 0, 26624, 184, }, /* 1112 */
+ { 41, 12, 3, 0, 0, 26624, 130, }, /* 1113 */
+ { 41, 10, 5, 0, 0, 18432, 144, }, /* 1114 */
+ { 41, 7, 12, 0, 0, 18432, 82, }, /* 1115 */
+ { 41, 12, 3, 0, 0, 26624, 146, }, /* 1116 */
+ { 41, 12, 3, 0, 0, 26624, 96, }, /* 1117 */
+ { 41, 21, 12, 0, 0, 18432, 68, }, /* 1118 */
+ { 41, 1, 4, 0, 0, 18432, 132, }, /* 1119 */
+ { 41, 21, 12, 0, 0, 18432, 124, }, /* 1120 */
+ { 124, 7, 12, 0, 0, 18432, 82, }, /* 1121 */
+ { 124, 13, 12, 0, 0, 18432, 138, }, /* 1122 */
+ { 43, 12, 3, 0, 0, 26624, 130, }, /* 1123 */
+ { 43, 7, 12, 0, 0, 18432, 82, }, /* 1124 */
+ { 43, 10, 5, 0, 0, 18432, 144, }, /* 1125 */
+ { 43, 12, 3, 0, 0, 26624, 146, }, /* 1126 */
+ { 43, 13, 12, 0, 0, 18432, 138, }, /* 1127 */
+ { 43, 21, 12, 0, 0, 18432, 68, }, /* 1128 */
+ { 43, 21, 12, 0, 0, 18432, 124, }, /* 1129 */
+ { 50, 7, 12, 0, 0, 18432, 82, }, /* 1130 */
+ { 50, 12, 3, 0, 0, 26624, 96, }, /* 1131 */
+ { 50, 21, 12, 0, 0, 18432, 68, }, /* 1132 */
+ { 44, 12, 3, 0, 0, 26624, 130, }, /* 1133 */
+ { 44, 10, 5, 0, 0, 18432, 144, }, /* 1134 */
+ { 44, 7, 12, 0, 0, 18432, 82, }, /* 1135 */
+ { 44, 10, 5, 0, 0, 18432, 174, }, /* 1136 */
+ { 44, 7, 4, 0, 0, 18432, 82, }, /* 1137 */
+ { 44, 21, 12, 0, 0, 18432, 124, }, /* 1138 */
+ { 44, 21, 12, 0, 0, 18432, 68, }, /* 1139 */
+ { 44, 12, 3, 0, 0, 26624, 102, }, /* 1140 */
+ { 44, 12, 3, 0, 0, 26624, 96, }, /* 1141 */
+ { 44, 13, 12, 0, 0, 18432, 138, }, /* 1142 */
+ { 15, 15, 12, 0, 0, 18432, 68, }, /* 1143 */
+ { 48, 7, 12, 0, 0, 18432, 82, }, /* 1144 */
+ { 48, 10, 5, 0, 0, 18432, 144, }, /* 1145 */
+ { 48, 12, 3, 0, 0, 26624, 130, }, /* 1146 */
+ { 48, 10, 5, 0, 0, 18432, 174, }, /* 1147 */
+ { 48, 12, 3, 0, 0, 26624, 96, }, /* 1148 */
+ { 48, 21, 12, 0, 0, 18432, 124, }, /* 1149 */
+ { 48, 21, 12, 0, 0, 18432, 106, }, /* 1150 */
+ { 48, 21, 12, 0, 0, 18432, 68, }, /* 1151 */
+ { 57, 7, 12, 0, 0, 18432, 82, }, /* 1152 */
+ { 57, 21, 12, 0, 0, 18432, 124, }, /* 1153 */
+ { 55, 7, 12, 0, 0, 18432, 82, }, /* 1154 */
+ { 55, 12, 3, 0, 0, 26624, 130, }, /* 1155 */
+ { 55, 10, 5, 0, 0, 18432, 144, }, /* 1156 */
+ { 55, 12, 3, 0, 0, 26624, 96, }, /* 1157 */
+ { 55, 12, 3, 0, 0, 26624, 146, }, /* 1158 */
+ { 55, 13, 12, 0, 0, 18432, 138, }, /* 1159 */
+ { 47, 12, 3, 0, 0, 26624, 130, }, /* 1160 */
+ { 47, 12, 3, 0, 0, 26705, 130, }, /* 1161 */
+ { 47, 10, 5, 0, 0, 18432, 144, }, /* 1162 */
+ { 47, 10, 5, 0, 0, 18513, 144, }, /* 1163 */
+ { 47, 7, 12, 0, 0, 18432, 82, }, /* 1164 */
+ { 84, 12, 3, 0, 0, 26705, 102, }, /* 1165 */
+ { 47, 12, 3, 0, 0, 26705, 96, }, /* 1166 */
+ { 47, 10, 3, 0, 0, 18432, 148, }, /* 1167 */
+ { 47, 10, 5, 0, 0, 18432, 174, }, /* 1168 */
+ { 47, 7, 12, 0, 0, 18432, 324, }, /* 1169 */
+ { 47, 12, 3, 0, 0, 26624, 96, }, /* 1170 */
+ { 144, 7, 12, 0, 0, 18432, 82, }, /* 1171 */
+ { 144, 10, 5, 0, 0, 18432, 144, }, /* 1172 */
+ { 144, 12, 3, 0, 0, 26624, 130, }, /* 1173 */
+ { 144, 12, 3, 0, 0, 26624, 146, }, /* 1174 */
+ { 144, 12, 3, 0, 0, 26624, 96, }, /* 1175 */
+ { 144, 21, 12, 0, 0, 18432, 124, }, /* 1176 */
+ { 144, 21, 12, 0, 0, 18432, 106, }, /* 1177 */
+ { 144, 21, 12, 0, 0, 18432, 68, }, /* 1178 */
+ { 144, 13, 12, 0, 0, 18432, 138, }, /* 1179 */
+ { 144, 12, 3, 0, 0, 26624, 102, }, /* 1180 */
+ { 56, 7, 12, 0, 0, 18432, 82, }, /* 1181 */
+ { 56, 10, 3, 0, 0, 18432, 148, }, /* 1182 */
+ { 56, 10, 5, 0, 0, 18432, 144, }, /* 1183 */
+ { 56, 12, 3, 0, 0, 26624, 130, }, /* 1184 */
+ { 56, 12, 3, 0, 0, 26624, 146, }, /* 1185 */
+ { 56, 12, 3, 0, 0, 26624, 96, }, /* 1186 */
+ { 56, 21, 12, 0, 0, 18432, 68, }, /* 1187 */
+ { 56, 13, 12, 0, 0, 18432, 138, }, /* 1188 */
+ { 135, 7, 12, 0, 0, 18432, 82, }, /* 1189 */
+ { 135, 10, 3, 0, 0, 18432, 148, }, /* 1190 */
+ { 135, 10, 5, 0, 0, 18432, 144, }, /* 1191 */
+ { 135, 12, 3, 0, 0, 26624, 130, }, /* 1192 */
+ { 135, 12, 3, 0, 0, 26624, 146, }, /* 1193 */
+ { 135, 12, 3, 0, 0, 26624, 96, }, /* 1194 */
+ { 135, 21, 12, 0, 0, 18432, 68, }, /* 1195 */
+ { 135, 21, 12, 0, 0, 18432, 124, }, /* 1196 */
+ { 135, 21, 12, 0, 0, 18432, 106, }, /* 1197 */
+ { 135, 21, 12, 0, 0, 18432, 178, }, /* 1198 */
+ { 52, 7, 12, 0, 0, 18432, 82, }, /* 1199 */
+ { 52, 10, 5, 0, 0, 18432, 144, }, /* 1200 */
+ { 52, 12, 3, 0, 0, 26624, 130, }, /* 1201 */
+ { 52, 12, 3, 0, 0, 26624, 146, }, /* 1202 */
+ { 52, 21, 12, 0, 0, 18432, 124, }, /* 1203 */
+ { 52, 21, 12, 0, 0, 18432, 68, }, /* 1204 */
+ { 52, 13, 12, 0, 0, 18432, 138, }, /* 1205 */
+ { 45, 7, 12, 0, 0, 18432, 82, }, /* 1206 */
+ { 45, 12, 3, 0, 0, 26624, 130, }, /* 1207 */
+ { 45, 10, 5, 0, 0, 18432, 144, }, /* 1208 */
+ { 45, 10, 5, 0, 0, 18432, 174, }, /* 1209 */
+ { 45, 12, 3, 0, 0, 26624, 96, }, /* 1210 */
+ { 45, 21, 12, 0, 0, 18432, 68, }, /* 1211 */
+ { 45, 13, 12, 0, 0, 18432, 138, }, /* 1212 */
+ { 137, 7, 12, 0, 0, 18432, 82, }, /* 1213 */
+ { 137, 12, 3, 0, 0, 26624, 130, }, /* 1214 */
+ { 137, 10, 12, 0, 0, 18432, 144, }, /* 1215 */
+ { 137, 10, 5, 0, 0, 18432, 144, }, /* 1216 */
+ { 137, 12, 3, 0, 0, 26624, 146, }, /* 1217 */
+ { 137, 13, 12, 0, 0, 18432, 138, }, /* 1218 */
+ { 137, 15, 12, 0, 0, 18432, 68, }, /* 1219 */
+ { 137, 21, 12, 0, 0, 18432, 124, }, /* 1220 */
+ { 137, 26, 12, 0, 0, 18432, 68, }, /* 1221 */
+ { 60, 7, 12, 0, 0, 18432, 82, }, /* 1222 */
+ { 60, 10, 5, 0, 0, 18432, 144, }, /* 1223 */
+ { 60, 12, 3, 0, 0, 26624, 130, }, /* 1224 */
+ { 60, 12, 3, 0, 0, 26624, 146, }, /* 1225 */
+ { 60, 12, 3, 0, 0, 26624, 96, }, /* 1226 */
+ { 60, 21, 12, 0, 0, 18432, 68, }, /* 1227 */
+ { 136, 9, 12, 0, 32, 18432, 74, }, /* 1228 */
+ { 136, 5, 12, 0, -32, 18432, 76, }, /* 1229 */
+ { 136, 13, 12, 0, 0, 18432, 138, }, /* 1230 */
+ { 136, 15, 12, 0, 0, 18432, 68, }, /* 1231 */
+ { 136, 7, 12, 0, 0, 18432, 82, }, /* 1232 */
+ { 157, 7, 12, 0, 0, 18432, 82, }, /* 1233 */
+ { 157, 10, 3, 0, 0, 18432, 148, }, /* 1234 */
+ { 157, 10, 5, 0, 0, 18432, 144, }, /* 1235 */
+ { 157, 12, 3, 0, 0, 26624, 130, }, /* 1236 */
+ { 157, 10, 5, 0, 0, 18432, 174, }, /* 1237 */
+ { 157, 12, 3, 0, 0, 26624, 146, }, /* 1238 */
+ { 157, 7, 4, 0, 0, 18432, 82, }, /* 1239 */
+ { 157, 12, 3, 0, 0, 26624, 96, }, /* 1240 */
+ { 157, 21, 12, 0, 0, 18432, 124, }, /* 1241 */
+ { 157, 21, 12, 0, 0, 18432, 68, }, /* 1242 */
+ { 157, 13, 12, 0, 0, 18432, 138, }, /* 1243 */
+ { 64, 7, 12, 0, 0, 18432, 82, }, /* 1244 */
+ { 64, 10, 5, 0, 0, 18432, 144, }, /* 1245 */
+ { 64, 12, 3, 0, 0, 26624, 130, }, /* 1246 */
+ { 64, 12, 3, 0, 0, 26624, 146, }, /* 1247 */
+ { 64, 21, 12, 0, 0, 18432, 68, }, /* 1248 */
+ { 149, 7, 12, 0, 0, 18432, 82, }, /* 1249 */
+ { 149, 12, 3, 0, 0, 26624, 130, }, /* 1250 */
+ { 149, 12, 3, 0, 0, 18432, 130, }, /* 1251 */
+ { 149, 12, 3, 0, 0, 26624, 102, }, /* 1252 */
+ { 149, 12, 3, 0, 0, 26624, 146, }, /* 1253 */
+ { 149, 10, 5, 0, 0, 18432, 144, }, /* 1254 */
+ { 149, 7, 4, 0, 0, 18432, 82, }, /* 1255 */
+ { 149, 21, 12, 0, 0, 18432, 68, }, /* 1256 */
+ { 149, 21, 12, 0, 0, 18432, 124, }, /* 1257 */
+ { 148, 7, 12, 0, 0, 18432, 82, }, /* 1258 */
+ { 148, 12, 3, 0, 0, 26624, 130, }, /* 1259 */
+ { 148, 10, 5, 0, 0, 18432, 144, }, /* 1260 */
+ { 148, 7, 4, 0, 0, 18432, 82, }, /* 1261 */
+ { 148, 12, 3, 0, 0, 26624, 326, }, /* 1262 */
+ { 148, 12, 3, 0, 0, 26624, 146, }, /* 1263 */
+ { 148, 21, 12, 0, 0, 18432, 68, }, /* 1264 */
+ { 148, 21, 12, 0, 0, 18432, 124, }, /* 1265 */
+ { 148, 21, 12, 0, 0, 18432, 106, }, /* 1266 */
+ { 134, 7, 12, 0, 0, 18432, 82, }, /* 1267 */
+ { 142, 7, 12, 0, 0, 18432, 82, }, /* 1268 */
+ { 142, 10, 5, 0, 0, 18432, 144, }, /* 1269 */
+ { 142, 12, 3, 0, 0, 26624, 130, }, /* 1270 */
+ { 142, 12, 3, 0, 0, 18432, 146, }, /* 1271 */
+ { 142, 21, 12, 0, 0, 18432, 124, }, /* 1272 */
+ { 142, 21, 12, 0, 0, 18432, 106, }, /* 1273 */
+ { 142, 21, 12, 0, 0, 18432, 68, }, /* 1274 */
+ { 142, 13, 12, 0, 0, 18432, 138, }, /* 1275 */
+ { 142, 15, 12, 0, 0, 18432, 68, }, /* 1276 */
+ { 143, 21, 12, 0, 0, 18432, 68, }, /* 1277 */
+ { 143, 21, 12, 0, 0, 18432, 106, }, /* 1278 */
+ { 143, 7, 12, 0, 0, 18432, 82, }, /* 1279 */
+ { 143, 12, 3, 0, 0, 26624, 130, }, /* 1280 */
+ { 143, 10, 5, 0, 0, 18432, 144, }, /* 1281 */
+ { 59, 7, 12, 0, 0, 18432, 82, }, /* 1282 */
+ { 59, 12, 3, 0, 0, 26624, 130, }, /* 1283 */
+ { 59, 12, 3, 0, 0, 26624, 96, }, /* 1284 */
+ { 59, 12, 3, 0, 0, 26624, 146, }, /* 1285 */
+ { 59, 7, 4, 0, 0, 18432, 82, }, /* 1286 */
+ { 59, 13, 12, 0, 0, 18432, 138, }, /* 1287 */
+ { 61, 7, 12, 0, 0, 18432, 82, }, /* 1288 */
+ { 61, 10, 5, 0, 0, 18432, 144, }, /* 1289 */
+ { 61, 12, 3, 0, 0, 26624, 130, }, /* 1290 */
+ { 61, 12, 3, 0, 0, 26624, 146, }, /* 1291 */
+ { 61, 13, 12, 0, 0, 18432, 138, }, /* 1292 */
+ { 150, 7, 12, 0, 0, 18432, 82, }, /* 1293 */
+ { 150, 12, 3, 0, 0, 26624, 130, }, /* 1294 */
+ { 150, 10, 5, 0, 0, 18432, 144, }, /* 1295 */
+ { 150, 21, 12, 0, 0, 18432, 124, }, /* 1296 */
+ { 162, 12, 3, 0, 0, 26624, 130, }, /* 1297 */
+ { 162, 7, 4, 0, 0, 18432, 82, }, /* 1298 */
+ { 162, 10, 5, 0, 0, 18432, 144, }, /* 1299 */
+ { 162, 7, 12, 0, 0, 18432, 82, }, /* 1300 */
+ { 162, 10, 5, 0, 0, 18432, 176, }, /* 1301 */
+ { 162, 12, 3, 0, 0, 26624, 184, }, /* 1302 */
+ { 162, 21, 12, 0, 0, 18432, 124, }, /* 1303 */
+ { 162, 21, 12, 0, 0, 18432, 68, }, /* 1304 */
+ { 162, 13, 12, 0, 0, 18432, 138, }, /* 1305 */
+ { 11, 15, 12, 0, 0, 18432, 68, }, /* 1306 */
+ { 11, 21, 12, 0, 0, 18432, 68, }, /* 1307 */
+ { 94, 7, 12, 0, 0, 18432, 82, }, /* 1308 */
+ { 94, 14, 12, 0, 0, 18432, 82, }, /* 1309 */
+ { 94, 21, 12, 0, 0, 18432, 106, }, /* 1310 */
+ { 66, 7, 12, 0, 0, 18432, 82, }, /* 1311 */
+ { 66, 21, 12, 0, 0, 18432, 68, }, /* 1312 */
+ { 109, 7, 12, 0, 0, 18432, 82, }, /* 1313 */
+ { 109, 1, 2, 0, 0, 18432, 322, }, /* 1314 */
+ { 109, 12, 3, 0, 0, 26624, 102, }, /* 1315 */
+ { 109, 12, 3, 0, 0, 26624, 96, }, /* 1316 */
+ { 138, 7, 12, 0, 0, 18432, 82, }, /* 1317 */
+ { 130, 7, 12, 0, 0, 18432, 82, }, /* 1318 */
+ { 130, 13, 12, 0, 0, 18432, 138, }, /* 1319 */
+ { 130, 21, 12, 0, 0, 18432, 124, }, /* 1320 */
+ { 159, 7, 12, 0, 0, 18432, 82, }, /* 1321 */
+ { 159, 13, 12, 0, 0, 18432, 138, }, /* 1322 */
+ { 126, 7, 12, 0, 0, 18432, 82, }, /* 1323 */
+ { 126, 12, 3, 0, 0, 26624, 96, }, /* 1324 */
+ { 126, 21, 12, 0, 0, 18432, 124, }, /* 1325 */
+ { 128, 7, 12, 0, 0, 18432, 82, }, /* 1326 */
+ { 128, 12, 3, 0, 0, 26624, 96, }, /* 1327 */
+ { 128, 21, 12, 0, 0, 18432, 124, }, /* 1328 */
+ { 128, 21, 12, 0, 0, 18432, 106, }, /* 1329 */
+ { 128, 21, 12, 0, 0, 18432, 68, }, /* 1330 */
+ { 128, 26, 12, 0, 0, 18432, 68, }, /* 1331 */
+ { 128, 6, 12, 0, 0, 18432, 142, }, /* 1332 */
+ { 128, 6, 12, 0, 0, 18432, 136, }, /* 1333 */
+ { 128, 13, 12, 0, 0, 18432, 138, }, /* 1334 */
+ { 128, 15, 12, 0, 0, 18432, 68, }, /* 1335 */
+ { 151, 9, 12, 0, 32, 18432, 74, }, /* 1336 */
+ { 151, 5, 12, 0, -32, 18432, 76, }, /* 1337 */
+ { 151, 15, 12, 0, 0, 18432, 68, }, /* 1338 */
+ { 151, 21, 12, 0, 0, 18432, 106, }, /* 1339 */
+ { 151, 21, 12, 0, 0, 18432, 124, }, /* 1340 */
+ { 151, 21, 12, 0, 0, 18432, 68, }, /* 1341 */
+ { 123, 7, 12, 0, 0, 18432, 82, }, /* 1342 */
+ { 123, 12, 3, 0, 0, 26624, 130, }, /* 1343 */
+ { 123, 10, 5, 0, 0, 18432, 144, }, /* 1344 */
+ { 123, 12, 3, 0, 0, 26624, 128, }, /* 1345 */
+ { 123, 6, 12, 0, 0, 18432, 92, }, /* 1346 */
+ { 146, 6, 12, 0, 0, 18432, 136, }, /* 1347 */
+ { 147, 6, 12, 0, 0, 18432, 136, }, /* 1348 */
+ { 23, 21, 12, 0, 0, 28672, 68, }, /* 1349 */
+ { 158, 12, 3, 0, 0, 26624, 328, }, /* 1350 */
+ { 23, 10, 5, 0, 0, 18432, 164, }, /* 1351 */
+ { 146, 7, 12, 0, 0, 18432, 284, }, /* 1352 */
+ { 158, 7, 12, 0, 0, 18432, 284, }, /* 1353 */
+ { 21, 6, 12, 0, 0, 18432, 92, }, /* 1354 */
+ { 147, 7, 12, 0, 0, 18432, 284, }, /* 1355 */
+ { 46, 7, 12, 0, 0, 18432, 82, }, /* 1356 */
+ { 46, 26, 12, 0, 0, 18432, 68, }, /* 1357 */
+ { 46, 12, 3, 0, 0, 26624, 102, }, /* 1358 */
+ { 46, 12, 3, 0, 0, 26624, 130, }, /* 1359 */
+ { 46, 21, 12, 0, 0, 18432, 124, }, /* 1360 */
+ { 69, 1, 2, 0, 0, 6153, 66, }, /* 1361 */
+ { 69, 10, 3, 0, 0, 18432, 330, }, /* 1362 */
+ { 69, 10, 5, 0, 0, 18432, 138, }, /* 1363 */
+ { 69, 10, 5, 0, 0, 18432, 160, }, /* 1364 */
+ { 69, 10, 3, 0, 0, 18432, 286, }, /* 1365 */
+ { 1, 12, 3, 0, 0, 26624, 102, }, /* 1366 */
+ { 69, 25, 12, 0, 0, 18432, 118, }, /* 1367 */
+ { 69, 13, 12, 0, 0, 10240, 214, }, /* 1368 */
+ { 141, 26, 12, 0, 0, 18432, 68, }, /* 1369 */
+ { 141, 12, 3, 0, 0, 26624, 102, }, /* 1370 */
+ { 141, 21, 12, 0, 0, 18432, 106, }, /* 1371 */
+ { 141, 21, 12, 0, 0, 18432, 124, }, /* 1372 */
+ { 141, 21, 12, 0, 0, 18432, 68, }, /* 1373 */
+ { 35, 12, 3, 0, 0, 26624, 130, }, /* 1374 */
+ { 2, 6, 12, 0, 0, 18432, 90, }, /* 1375 */
+ { 154, 7, 12, 0, 0, 18432, 82, }, /* 1376 */
+ { 154, 12, 3, 0, 0, 26624, 96, }, /* 1377 */
+ { 154, 6, 12, 0, 0, 18432, 142, }, /* 1378 */
+ { 154, 6, 12, 0, 0, 18432, 136, }, /* 1379 */
+ { 154, 13, 12, 0, 0, 18432, 138, }, /* 1380 */
+ { 154, 26, 12, 0, 0, 18432, 68, }, /* 1381 */
+ { 160, 7, 12, 0, 0, 18432, 82, }, /* 1382 */
+ { 160, 12, 3, 0, 0, 26624, 96, }, /* 1383 */
+ { 155, 7, 12, 0, 0, 18432, 82, }, /* 1384 */
+ { 155, 12, 3, 0, 0, 26624, 96, }, /* 1385 */
+ { 155, 13, 12, 0, 0, 18432, 138, }, /* 1386 */
+ { 155, 23, 12, 0, 0, 14336, 68, }, /* 1387 */
+ { 163, 7, 12, 0, 0, 18432, 82, }, /* 1388 */
+ { 163, 6, 12, 0, 0, 18432, 142, }, /* 1389 */
+ { 163, 12, 3, 0, 0, 26624, 102, }, /* 1390 */
+ { 163, 13, 12, 0, 0, 18432, 138, }, /* 1391 */
+ { 129, 7, 12, 0, 0, 34816, 82, }, /* 1392 */
+ { 129, 15, 12, 0, 0, 34816, 68, }, /* 1393 */
+ { 129, 12, 3, 0, 0, 26624, 96, }, /* 1394 */
+ { 58, 9, 12, 0, 34, 34816, 74, }, /* 1395 */
+ { 58, 5, 12, 0, -34, 34816, 76, }, /* 1396 */
+ { 58, 12, 3, 0, 0, 26624, 150, }, /* 1397 */
+ { 58, 12, 3, 0, 0, 26624, 130, }, /* 1398 */
+ { 58, 12, 3, 0, 0, 26624, 96, }, /* 1399 */
+ { 58, 6, 12, 0, 0, 34816, 142, }, /* 1400 */
+ { 58, 13, 12, 0, 0, 34816, 138, }, /* 1401 */
+ { 58, 21, 12, 0, 0, 34816, 68, }, /* 1402 */
+ { 69, 15, 12, 0, 0, 0, 68, }, /* 1403 */
+ { 69, 26, 12, 0, 0, 0, 68, }, /* 1404 */
+ { 69, 23, 12, 0, 0, 0, 68, }, /* 1405 */
+ { 3, 7, 12, 0, 0, 0, 240, }, /* 1406 */
+ { 69, 26, 14, 0, 0, 28672, 332, }, /* 1407 */
+ { 69, 26, 14, 0, 0, 28672, 334, }, /* 1408 */
+ { 68, 2, 14, 0, 0, 18432, 336, }, /* 1409 */
+ { 69, 26, 12, 0, 0, 18432, 338, }, /* 1410 */
+ { 69, 26, 14, 0, 0, 18432, 340, }, /* 1411 */
+ { 69, 26, 14, 0, 0, 18432, 334, }, /* 1412 */
+ { 69, 26, 11, 0, 0, 18432, 342, }, /* 1413 */
+ { 20, 26, 12, 0, 0, 18432, 68, }, /* 1414 */
+ { 69, 26, 14, 0, 0, 18432, 236, }, /* 1415 */
+ { 69, 26, 14, 0, 0, 18447, 334, }, /* 1416 */
+ { 69, 26, 14, 0, 0, 28672, 344, }, /* 1417 */
+ { 69, 26, 14, 0, 0, 28672, 346, }, /* 1418 */
+ { 69, 24, 3, 0, 0, 28672, 348, }, /* 1419 */
+ { 69, 26, 14, 0, 0, 28672, 350, }, /* 1420 */
+ { 69, 13, 12, 0, 0, 10240, 138, }, /* 1421 */
+ { 69, 1, 3, 0, 0, 6144, 352, }, /* 1422 */
};
const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
@@ -1872,35 +1886,35 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
142,143,144,145,146,147,148,149,150,151,152,153,154,154,155,156, /* U+10000 */
157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172, /* U+10800 */
173,174,175,176,177,178,179,146,180,181,146,182,183,184,185,146, /* U+11000 */
-186,187,188,189,190,191,146,146,192,193,194,195,146,196,146,197, /* U+11800 */
-198,198,198,198,198,198,198,199,200,198,201,146,146,146,146,146, /* U+12000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,202, /* U+12800 */
-203,203,203,203,203,203,203,203,204,146,146,146,146,146,146,146, /* U+13000 */
+186,187,188,189,190,191,192,146,193,194,195,196,146,197,198,199, /* U+11800 */
+200,200,200,200,200,200,200,201,202,200,203,146,146,146,146,146, /* U+12000 */
+146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,204, /* U+12800 */
+205,205,205,205,205,205,205,205,206,146,146,146,146,146,146,146, /* U+13000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+13800 */
-146,146,146,146,146,146,146,146,205,205,205,205,206,146,146,146, /* U+14000 */
+146,146,146,146,146,146,146,146,207,207,207,207,208,146,146,146, /* U+14000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+14800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+15000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+15800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+16000 */
-207,207,207,207,208,209,210,211,146,146,146,146,212,213,214,215, /* U+16800 */
-216,216,216,216,216,216,216,216,216,216,216,216,216,216,216,216, /* U+17000 */
-216,216,216,216,216,216,216,216,216,216,216,216,216,216,216,216, /* U+17800 */
-216,216,216,216,216,216,216,216,216,216,216,216,216,216,216,217, /* U+18000 */
-216,216,216,216,216,216,218,218,218,219,220,146,146,146,146,146, /* U+18800 */
+209,209,209,209,210,211,212,213,146,146,146,146,214,215,216,217, /* U+16800 */
+218,218,218,218,218,218,218,218,218,218,218,218,218,218,218,218, /* U+17000 */
+218,218,218,218,218,218,218,218,218,218,218,218,218,218,218,218, /* U+17800 */
+218,218,218,218,218,218,218,218,218,218,218,218,218,218,218,219, /* U+18000 */
+218,218,218,218,218,218,220,220,220,221,222,146,146,146,146,146, /* U+18800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+19000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+19800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+1A000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,221, /* U+1A800 */
-222,223,224,225,225,226,146,146,146,146,146,146,146,146,146,146, /* U+1B000 */
-146,146,146,146,146,146,146,146,227,228,146,146,146,146,146,146, /* U+1B800 */
+146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,223, /* U+1A800 */
+224,225,226,227,227,228,146,146,146,146,146,146,146,146,146,146, /* U+1B000 */
+146,146,146,146,146,146,146,146,229,230,146,146,146,146,146,146, /* U+1B800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+1C000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,229,230, /* U+1C800 */
-231,232,233,234,235,236,237,146,238,239,240,241,242,243,244,245, /* U+1D000 */
-246,246,246,246,247,248,146,146,146,146,146,146,146,146,249,146, /* U+1D800 */
-250,146,251,146,146,252,146,146,146,146,146,146,146,146,146,253, /* U+1E000 */
-254,255,256,168,168,168,168,168,257,258,259,168,260,261,168,168, /* U+1E800 */
-262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277, /* U+1F000 */
-278,279,280,281,282,283,284,285,267,267,267,267,267,267,267,286, /* U+1F800 */
+146,146,146,146,146,146,146,146,146,146,146,146,146,146,231,232, /* U+1C800 */
+233,234,235,236,237,238,239,146,240,241,242,243,244,245,246,247, /* U+1D000 */
+248,248,248,248,249,250,146,146,146,146,146,146,146,146,251,146, /* U+1D800 */
+252,253,254,146,146,255,146,146,146,256,146,146,146,146,146,257, /* U+1E000 */
+258,259,260,168,168,168,168,168,261,262,263,168,264,265,168,168, /* U+1E800 */
+266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281, /* U+1F000 */
+282,283,284,285,286,287,288,289,271,271,271,271,271,271,271,290, /* U+1F800 */
101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+20000 */
101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+20800 */
101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+21000 */
@@ -1921,23 +1935,23 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+28800 */
101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+29000 */
101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+29800 */
-101,101,101,101,101,101,101,101,101,101,101,101,101,287,101,101, /* U+2A000 */
+101,101,101,101,101,101,101,101,101,101,101,101,101,291,101,101, /* U+2A000 */
101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+2A800 */
-101,101,101,101,101,101,101,101,101,101,101,101,101,101,288,101, /* U+2B000 */
-289,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+2B800 */
+101,101,101,101,101,101,101,101,101,101,101,101,101,101,292,101, /* U+2B000 */
+293,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+2B800 */
101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+2C000 */
-101,101,101,101,101,101,101,101,101,101,101,101,101,290,101,101, /* U+2C800 */
+101,101,101,101,101,101,101,101,101,101,101,101,101,294,101,101, /* U+2C800 */
101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+2D000 */
101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+2D800 */
101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+2E000 */
-101,101,101,101,101,101,101,291,146,146,146,146,146,146,146,146, /* U+2E800 */
+101,101,101,101,101,101,101,295,146,146,146,146,146,146,146,146, /* U+2E800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+2F000 */
-129,129,129,129,292,146,146,146,146,146,146,146,146,146,146,293, /* U+2F800 */
+129,129,129,129,296,146,146,146,146,146,146,146,146,146,146,297, /* U+2F800 */
101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+30000 */
101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+30800 */
-101,101,101,101,101,101,294,146,146,146,146,146,146,146,146,146, /* U+31000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+31800 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+32000 */
+101,101,101,101,101,101,298,101,101,101,101,101,101,101,101,101, /* U+31000 */
+101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101, /* U+31800 */
+101,101,101,101,101,101,101,299,146,146,146,146,146,146,146,146, /* U+32000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+32800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+33000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+33800 */
@@ -1964,7 +1978,7 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+3E000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+3E800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+3F000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,293, /* U+3F800 */
+146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,297, /* U+3F800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+40000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+40800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+41000 */
@@ -1996,7 +2010,7 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+4E000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+4E800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+4F000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,293, /* U+4F800 */
+146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,297, /* U+4F800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+50000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+50800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+51000 */
@@ -2028,7 +2042,7 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+5E000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+5E800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+5F000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,293, /* U+5F800 */
+146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,297, /* U+5F800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+60000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+60800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+61000 */
@@ -2060,7 +2074,7 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+6E000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+6E800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+6F000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,293, /* U+6F800 */
+146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,297, /* U+6F800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+70000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+70800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+71000 */
@@ -2092,7 +2106,7 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+7E000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+7E800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+7F000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,293, /* U+7F800 */
+146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,297, /* U+7F800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+80000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+80800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+81000 */
@@ -2124,7 +2138,7 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+8E000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+8E800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+8F000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,293, /* U+8F800 */
+146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,297, /* U+8F800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+90000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+90800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+91000 */
@@ -2156,7 +2170,7 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+9E000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+9E800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+9F000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,293, /* U+9F800 */
+146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,297, /* U+9F800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+A0000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+A0800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+A1000 */
@@ -2188,7 +2202,7 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+AE000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+AE800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+AF000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,293, /* U+AF800 */
+146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,297, /* U+AF800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+B0000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+B0800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+B1000 */
@@ -2220,7 +2234,7 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+BE000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+BE800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+BF000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,293, /* U+BF800 */
+146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,297, /* U+BF800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+C0000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+C0800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+C1000 */
@@ -2252,7 +2266,7 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+CE000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+CE800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+CF000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,293, /* U+CF800 */
+146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,297, /* U+CF800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+D0000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+D0800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+D1000 */
@@ -2284,9 +2298,9 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+DE000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+DE800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+DF000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,293, /* U+DF800 */
-295,296,297,298,296,296,296,296,296,296,296,296,296,296,296,296, /* U+E0000 */
-296,296,296,296,296,296,296,296,296,296,296,296,296,296,296,296, /* U+E0800 */
+146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,297, /* U+DF800 */
+300,301,302,303,301,301,301,301,301,301,301,301,301,301,301,301, /* U+E0000 */
+301,301,301,301,301,301,301,301,301,301,301,301,301,301,301,301, /* U+E0800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+E1000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+E1800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+E2000 */
@@ -2316,7 +2330,7 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+EE000 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+EE800 */
146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146, /* U+EF000 */
-146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,293, /* U+EF800 */
+146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,297, /* U+EF800 */
128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128, /* U+F0000 */
128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128, /* U+F0800 */
128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128, /* U+F1000 */
@@ -2348,7 +2362,7 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128, /* U+FE000 */
128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128, /* U+FE800 */
128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128, /* U+FF000 */
-128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,299, /* U+FF800 */
+128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,304, /* U+FF800 */
128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128, /* U+100000 */
128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128, /* U+100800 */
128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128, /* U+101000 */
@@ -2380,10 +2394,10 @@ const uint16_t PRIV(ucd_stage1)[] = { /* 17408 bytes */
128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128, /* U+10E000 */
128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128, /* U+10E800 */
128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128, /* U+10F000 */
-128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,299, /* U+10F800 */
+128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,304, /* U+10F800 */
};
-const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
+const uint16_t PRIV(ucd_stage2)[] = { /* 78080 bytes, block = 128 */
/* block 0 */
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 1, 3, 4, 0, 0,
@@ -2626,62 +2640,62 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
388,388,388,389,390,390,390,390,390,391,390,163,163,163,163,163,
/* block 24 */
-392,393,393,393,394,395,395,395,395,395,395,395,395,163,395,395,
-395,163,395,395,395,395,395,395,395,395,395,395,395,395,395,395,
-395,395,395,395,395,395,395,395,395,163,395,395,395,395,395,395,
-395,395,395,395,395,395,395,395,395,395,163,163,396,395,392,392,
-392,393,393,393,393,163,392,392,392,163,392,392,392,397,163,163,
-163,163,163,163,163,392,392,163,395,395,395,163,163,395,163,163,
-395,395,392,392,163,163,398,398,398,398,398,398,398,398,398,398,
-163,163,163,163,163,163,163,399,400,400,400,400,400,400,400,401,
+392,393,393,393,392,394,394,394,394,394,394,394,394,163,394,394,
+394,163,394,394,394,394,394,394,394,394,394,394,394,394,394,394,
+394,394,394,394,394,394,394,394,394,163,394,394,394,394,394,394,
+394,394,394,394,394,394,394,394,394,394,163,163,395,394,392,392,
+392,393,393,393,393,163,392,392,392,163,392,392,392,396,163,163,
+163,163,163,163,163,392,392,163,394,394,394,163,163,394,163,163,
+394,394,392,392,163,163,397,397,397,397,397,397,397,397,397,397,
+163,163,163,163,163,163,163,398,399,399,399,399,399,399,399,400,
/* block 25 */
-402,403,404,404,405,402,402,402,402,402,402,402,402,163,402,402,
-402,163,402,402,402,402,402,402,402,402,402,402,402,402,402,402,
-402,402,402,402,402,402,402,402,402,163,402,402,402,402,402,402,
-402,402,402,402,163,402,402,402,402,402,163,163,406,402,404,407,
-404,404,408,404,404,163,407,404,404,163,404,404,403,409,163,163,
-163,163,163,163,163,408,408,163,163,163,163,163,163,402,402,163,
-402,402,403,403,163,163,410,410,410,410,410,410,410,410,410,410,
-163,402,402,163,163,163,163,163,163,163,163,163,163,163,163,163,
+401,402,403,403,404,401,401,401,401,401,401,401,401,163,401,401,
+401,163,401,401,401,401,401,401,401,401,401,401,401,401,401,401,
+401,401,401,401,401,401,401,401,401,163,401,401,401,401,401,401,
+401,401,401,401,163,401,401,401,401,401,163,163,405,401,403,406,
+403,403,407,403,403,163,406,403,403,163,403,403,402,408,163,163,
+163,163,163,163,163,407,407,163,163,163,163,163,163,401,401,163,
+401,401,402,402,163,163,409,409,409,409,409,409,409,409,409,409,
+163,401,401,403,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 26 */
-411,411,412,412,413,413,413,413,413,413,413,413,413,163,413,413,
-413,163,413,413,413,413,413,413,413,413,413,413,413,413,413,413,
-413,413,413,413,413,413,413,413,413,413,413,413,413,413,413,413,
-413,413,413,413,413,413,413,413,413,413,413,414,414,413,415,412,
-412,411,411,411,411,163,412,412,412,163,412,412,412,414,416,417,
-163,163,163,163,413,413,413,415,418,418,418,418,418,418,418,413,
-413,413,411,411,163,163,419,419,419,419,419,419,419,419,419,419,
-418,418,418,418,418,418,418,418,418,417,413,413,413,413,413,413,
+410,410,411,411,412,412,412,412,412,412,412,412,412,163,412,412,
+412,163,412,412,412,412,412,412,412,412,412,412,412,412,412,412,
+412,412,412,412,412,412,412,412,412,412,412,412,412,412,412,412,
+412,412,412,412,412,412,412,412,412,412,412,413,413,412,414,411,
+411,410,410,410,410,163,411,411,411,163,411,411,411,413,415,416,
+163,163,163,163,412,412,412,414,417,417,417,417,417,417,417,412,
+412,412,410,410,163,163,418,418,418,418,418,418,418,418,418,418,
+417,417,417,417,417,417,417,417,417,416,412,412,412,412,412,412,
/* block 27 */
-163,420,421,421,163,422,422,422,422,422,422,422,422,422,422,422,
-422,422,422,422,422,422,422,163,163,163,422,422,422,422,422,422,
-422,422,422,422,422,422,422,422,422,422,422,422,422,422,422,422,
-422,422,163,422,422,422,422,422,422,422,422,422,163,422,163,163,
-422,422,422,422,422,422,422,163,163,163,423,163,163,163,163,424,
-421,421,420,420,420,163,420,163,421,421,421,421,421,421,421,424,
-163,163,163,163,163,163,425,425,425,425,425,425,425,425,425,425,
-163,163,421,421,426,163,163,163,163,163,163,163,163,163,163,163,
+163,419,420,420,163,421,421,421,421,421,421,421,421,421,421,421,
+421,421,421,421,421,421,421,163,163,163,421,421,421,421,421,421,
+421,421,421,421,421,421,421,421,421,421,421,421,421,421,421,421,
+421,421,163,421,421,421,421,421,421,421,421,421,163,421,163,163,
+421,421,421,421,421,421,421,163,163,163,422,163,163,163,163,423,
+420,420,419,419,419,163,419,163,420,420,420,420,420,420,420,423,
+163,163,163,163,163,163,424,424,424,424,424,424,424,424,424,424,
+163,163,420,420,425,163,163,163,163,163,163,163,163,163,163,163,
/* block 28 */
-163,427,427,427,427,427,427,427,427,427,427,427,427,427,427,427,
-427,427,427,427,427,427,427,427,427,427,427,427,427,427,427,427,
-427,427,427,427,427,427,427,427,427,427,427,427,427,427,427,427,
-427,428,427,429,428,428,428,428,428,428,430,163,163,163,163,431,
-432,432,432,432,432,427,433,434,434,434,434,434,434,428,434,435,
-436,436,436,436,436,436,436,436,436,436,437,437,163,163,163,163,
+163,426,426,426,426,426,426,426,426,426,426,426,426,426,426,426,
+426,426,426,426,426,426,426,426,426,426,426,426,426,426,426,426,
+426,426,426,426,426,426,426,426,426,426,426,426,426,426,426,426,
+426,427,426,428,427,427,427,427,427,427,429,163,163,163,163,430,
+431,431,431,431,431,426,432,433,433,433,433,433,433,427,433,434,
+435,435,435,435,435,435,435,435,435,435,436,436,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 29 */
-163,438,438,163,438,163,438,438,438,438,438,163,438,438,438,438,
-438,438,438,438,438,438,438,438,438,438,438,438,438,438,438,438,
-438,438,438,438,163,438,163,438,438,438,438,438,438,438,438,438,
-438,439,438,440,439,439,439,439,439,439,441,439,439,438,163,163,
-442,442,442,442,442,163,443,163,444,444,444,444,444,439,163,163,
-445,445,445,445,445,445,445,445,445,445,163,163,438,438,438,438,
+163,437,437,163,437,163,437,437,437,437,437,163,437,437,437,437,
+437,437,437,437,437,437,437,437,437,437,437,437,437,437,437,437,
+437,437,437,437,163,437,163,437,437,437,437,437,437,437,437,437,
+437,438,437,439,438,438,438,438,438,438,440,438,438,437,163,163,
+441,441,441,441,441,163,442,163,443,443,443,443,443,438,444,163,
+445,445,445,445,445,445,445,445,445,445,163,163,437,437,437,437,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
@@ -2696,274 +2710,274 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
163,456,456,456,456,456,456,457,456,457,456,456,456,456,456,458,
/* block 31 */
-456,456,450,450,459,448,450,450,446,446,446,446,446,456,456,456,
+456,456,459,459,460,448,450,450,446,446,446,446,446,456,456,456,
456,456,456,456,456,456,456,456,163,456,456,456,456,456,456,456,
456,456,456,456,456,456,456,456,456,456,456,456,456,456,456,456,
456,456,456,456,456,456,456,456,456,456,456,456,456,163,447,447,
447,447,447,447,447,447,450,447,447,447,447,447,447,163,447,447,
-448,448,448,448,448,460,460,460,460,448,448,163,163,163,163,163,
+448,448,448,448,448,461,461,461,461,448,448,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 32 */
-461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
-461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
-461,461,461,461,461,461,461,461,461,461,461,462,462,463,463,463,
-463,464,463,463,463,463,463,465,462,466,466,464,464,463,463,461,
-467,467,467,467,467,467,467,467,467,467,468,468,469,469,469,469,
-461,461,461,461,461,461,464,464,463,463,461,461,461,461,463,463,
-463,461,462,470,470,461,461,462,462,470,470,470,470,470,461,461,
-461,463,463,463,463,461,461,461,461,461,461,461,461,461,461,461,
+462,462,462,462,462,462,462,462,462,462,462,462,462,462,462,462,
+462,462,462,462,462,462,462,462,462,462,462,462,462,462,462,462,
+462,462,462,462,462,462,462,462,462,462,462,463,463,464,464,464,
+464,465,464,464,464,464,464,466,463,467,467,465,465,464,464,462,
+468,468,468,468,468,468,468,468,468,468,469,469,470,470,470,470,
+462,462,462,462,462,462,465,465,464,464,462,462,462,462,464,464,
+464,462,463,471,471,462,462,463,463,471,471,471,471,471,462,462,
+462,464,464,464,464,462,462,462,462,462,462,462,462,462,462,462,
/* block 33 */
-461,461,463,462,464,463,463,470,470,470,470,470,470,471,461,470,
-472,472,472,472,472,472,472,472,472,472,470,470,462,463,473,473,
-474,474,474,474,474,474,474,474,474,474,474,474,474,474,474,474,
-474,474,474,474,474,474,474,474,474,474,474,474,474,474,474,474,
-474,474,474,474,474,474,163,474,163,163,163,163,163,474,163,163,
+462,462,464,463,465,464,464,471,471,471,471,471,471,472,462,471,
+473,473,473,473,473,473,473,473,473,473,471,471,463,464,474,474,
475,475,475,475,475,475,475,475,475,475,475,475,475,475,475,475,
475,475,475,475,475,475,475,475,475,475,475,475,475,475,475,475,
-475,475,475,475,475,475,475,475,475,475,475,476,477,475,475,475,
+475,475,475,475,475,475,163,475,163,163,163,163,163,475,163,163,
+476,476,476,476,476,476,476,476,476,476,476,476,476,476,476,476,
+476,476,476,476,476,476,476,476,476,476,476,476,476,476,476,476,
+476,476,476,476,476,476,476,476,476,476,476,477,478,476,476,476,
/* block 34 */
-478,478,478,478,478,478,478,478,478,478,478,478,478,478,478,478,
-478,478,478,478,478,478,478,478,478,478,478,478,478,478,478,478,
-478,478,478,478,478,478,478,478,478,478,478,478,478,478,478,478,
-478,478,478,478,478,478,478,478,478,478,478,478,478,478,478,478,
-478,478,478,478,478,478,478,478,478,478,478,478,478,478,478,478,
-478,478,478,478,478,478,478,478,478,478,478,478,478,478,478,479,
-480,481,481,481,481,481,481,481,481,481,481,481,481,481,481,481,
-481,481,481,481,481,481,481,481,481,481,481,481,481,481,481,481,
+479,479,479,479,479,479,479,479,479,479,479,479,479,479,479,479,
+479,479,479,479,479,479,479,479,479,479,479,479,479,479,479,479,
+479,479,479,479,479,479,479,479,479,479,479,479,479,479,479,479,
+479,479,479,479,479,479,479,479,479,479,479,479,479,479,479,479,
+479,479,479,479,479,479,479,479,479,479,479,479,479,479,479,479,
+479,479,479,479,479,479,479,479,479,479,479,479,479,479,479,480,
+481,482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,
+482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,
/* block 35 */
-481,481,481,481,481,481,481,481,481,481,481,481,481,481,481,481,
-481,481,481,481,481,481,481,481,481,481,481,481,481,481,481,481,
-481,481,481,481,481,481,481,481,482,482,482,482,482,482,482,482,
-482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,
482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,
482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,
-482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,
-482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,
-
-/* block 36 */
-483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
+482,482,482,482,482,482,482,482,483,483,483,483,483,483,483,483,
483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
-483,483,483,483,483,483,483,483,483,163,483,483,483,483,163,163,
-483,483,483,483,483,483,483,163,483,163,483,483,483,483,163,163,
483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
+/* block 36 */
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
+484,484,484,484,484,484,484,484,484,163,484,484,484,484,163,163,
+484,484,484,484,484,484,484,163,484,163,484,484,484,484,163,163,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
+
/* block 37 */
-483,483,483,483,483,483,483,483,483,163,483,483,483,483,163,163,
-483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
-483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
-483,163,483,483,483,483,163,163,483,483,483,483,483,483,483,163,
-483,163,483,483,483,483,163,163,483,483,483,483,483,483,483,483,
-483,483,483,483,483,483,483,163,483,483,483,483,483,483,483,483,
-483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
-483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
+484,484,484,484,484,484,484,484,484,163,484,484,484,484,163,163,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
+484,163,484,484,484,484,163,163,484,484,484,484,484,484,484,163,
+484,163,484,484,484,484,163,163,484,484,484,484,484,484,484,484,
+484,484,484,484,484,484,484,163,484,484,484,484,484,484,484,484,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
/* block 38 */
-483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
-483,163,483,483,483,483,163,163,483,483,483,483,483,483,483,483,
-483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
-483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
-483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
-483,483,483,483,483,483,483,483,483,483,483,163,163,484,484,484,
-485,486,487,486,486,486,486,487,487,488,488,488,488,488,488,488,
-488,488,489,489,489,489,489,489,489,489,489,489,489,163,163,163,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
+484,163,484,484,484,484,163,163,484,484,484,484,484,484,484,484,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
+484,484,484,484,484,484,484,484,484,484,484,163,163,485,485,485,
+486,487,488,487,487,487,487,488,488,489,489,489,489,489,489,489,
+489,489,490,490,490,490,490,490,490,490,490,490,490,163,163,163,
/* block 39 */
-483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
-490,490,490,490,490,490,490,490,490,490,163,163,163,163,163,163,
-491,491,491,491,491,491,491,491,491,491,491,491,491,491,491,491,
-491,491,491,491,491,491,491,491,491,491,491,491,491,491,491,491,
-491,491,491,491,491,491,491,491,491,491,491,491,491,491,491,491,
-491,491,491,491,491,491,491,491,491,491,491,491,491,491,491,491,
-491,491,491,491,491,491,491,491,491,491,491,491,491,491,491,491,
-492,492,492,492,492,492,163,163,493,493,493,493,493,493,163,163,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
+491,491,491,491,491,491,491,491,491,491,163,163,163,163,163,163,
+492,492,492,492,492,492,492,492,492,492,492,492,492,492,492,492,
+492,492,492,492,492,492,492,492,492,492,492,492,492,492,492,492,
+492,492,492,492,492,492,492,492,492,492,492,492,492,492,492,492,
+492,492,492,492,492,492,492,492,492,492,492,492,492,492,492,492,
+492,492,492,492,492,492,492,492,492,492,492,492,492,492,492,492,
+493,493,493,493,493,493,163,163,494,494,494,494,494,494,163,163,
/* block 40 */
-494,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
+495,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
/* block 41 */
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
/* block 42 */
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,496,497,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,497,498,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
/* block 43 */
-498,499,499,499,499,499,499,499,499,499,499,499,499,499,499,499,
-499,499,499,499,499,499,499,499,499,499,499,500,501,163,163,163,
-502,502,502,502,502,502,502,502,502,502,502,502,502,502,502,502,
-502,502,502,502,502,502,502,502,502,502,502,502,502,502,502,502,
-502,502,502,502,502,502,502,502,502,502,502,502,502,502,502,502,
-502,502,502,502,502,502,502,502,502,502,502,502,502,502,502,502,
-502,502,502,502,502,502,502,502,502,502,502,503,503,503,504,504,
-504,502,502,502,502,502,502,502,502,163,163,163,163,163,163,163,
+499,500,500,500,500,500,500,500,500,500,500,500,500,500,500,500,
+500,500,500,500,500,500,500,500,500,500,500,501,502,163,163,163,
+503,503,503,503,503,503,503,503,503,503,503,503,503,503,503,503,
+503,503,503,503,503,503,503,503,503,503,503,503,503,503,503,503,
+503,503,503,503,503,503,503,503,503,503,503,503,503,503,503,503,
+503,503,503,503,503,503,503,503,503,503,503,503,503,503,503,503,
+503,503,503,503,503,503,503,503,503,503,503,504,504,504,505,505,
+505,503,503,503,503,503,503,503,503,163,163,163,163,163,163,163,
/* block 44 */
-505,505,505,505,505,505,505,505,505,505,505,505,505,505,505,505,
-505,505,506,506,507,508,163,163,163,163,163,163,163,163,163,505,
-509,509,509,509,509,509,509,509,509,509,509,509,509,509,509,509,
-509,509,510,510,511,512,512,163,163,163,163,163,163,163,163,163,
-513,513,513,513,513,513,513,513,513,513,513,513,513,513,513,513,
-513,513,514,514,163,163,163,163,163,163,163,163,163,163,163,163,
-515,515,515,515,515,515,515,515,515,515,515,515,515,163,515,515,
-515,163,516,516,163,163,163,163,163,163,163,163,163,163,163,163,
+506,506,506,506,506,506,506,506,506,506,506,506,506,506,506,506,
+506,506,507,507,508,509,163,163,163,163,163,163,163,163,163,506,
+510,510,510,510,510,510,510,510,510,510,510,510,510,510,510,510,
+510,510,511,511,512,513,513,163,163,163,163,163,163,163,163,163,
+514,514,514,514,514,514,514,514,514,514,514,514,514,514,514,514,
+514,514,515,515,163,163,163,163,163,163,163,163,163,163,163,163,
+516,516,516,516,516,516,516,516,516,516,516,516,516,163,516,516,
+516,163,517,517,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 45 */
-517,517,517,517,517,517,517,517,517,517,517,517,517,517,517,517,
-517,517,517,517,517,517,517,517,517,517,517,517,517,517,517,517,
-517,517,517,518,518,517,517,517,517,517,517,517,517,517,517,517,
-517,517,517,517,519,519,520,521,521,521,521,521,521,521,520,520,
-520,520,520,520,520,520,521,520,520,522,522,522,522,522,522,522,
-522,522,523,522,524,524,524,525,526,526,524,527,517,522,163,163,
-528,528,528,528,528,528,528,528,528,528,163,163,163,163,163,163,
+518,518,518,518,518,518,518,518,518,518,518,518,518,518,518,518,
+518,518,518,518,518,518,518,518,518,518,518,518,518,518,518,518,
+518,518,518,519,519,518,518,518,518,518,518,518,518,518,518,518,
+518,518,518,518,520,520,521,522,522,522,522,522,522,522,521,521,
+521,521,521,521,521,521,522,521,521,523,523,523,523,523,523,523,
+523,523,524,523,525,525,525,526,527,527,525,528,518,523,163,163,
529,529,529,529,529,529,529,529,529,529,163,163,163,163,163,163,
+530,530,530,530,530,530,530,530,530,530,163,163,163,163,163,163,
/* block 46 */
-530,530,531,532,533,531,534,530,533,535,536,537,537,537,538,537,
-539,539,539,539,539,539,539,539,539,539,163,163,163,163,163,163,
-540,540,540,540,540,540,540,540,540,540,540,540,540,540,540,540,
-540,540,540,540,540,540,540,540,540,540,540,540,540,540,540,540,
-540,540,540,541,540,540,540,540,540,540,540,540,540,540,540,540,
-540,540,540,540,540,540,540,540,540,540,540,540,540,540,540,540,
-540,540,540,540,540,540,540,540,540,540,540,540,540,540,540,540,
-540,540,540,540,540,540,540,540,540,163,163,163,163,163,163,163,
+531,531,532,533,534,532,535,531,534,536,537,538,538,538,539,538,
+540,540,540,540,540,540,540,540,540,540,163,163,163,163,163,163,
+541,541,541,541,541,541,541,541,541,541,541,541,541,541,541,541,
+541,541,541,541,541,541,541,541,541,541,541,541,541,541,541,541,
+541,541,541,542,541,541,541,541,541,541,541,541,541,541,541,541,
+541,541,541,541,541,541,541,541,541,541,541,541,541,541,541,541,
+541,541,541,541,541,541,541,541,541,541,541,541,541,541,541,541,
+541,541,541,541,541,541,541,541,541,163,163,163,163,163,163,163,
/* block 47 */
-540,540,540,540,540,542,542,540,540,540,540,540,540,540,540,540,
-540,540,540,540,540,540,540,540,540,540,540,540,540,540,540,540,
-540,540,540,540,540,540,540,540,540,543,540,163,163,163,163,163,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-495,495,495,495,495,495,163,163,163,163,163,163,163,163,163,163,
+541,541,541,541,541,543,543,541,541,541,541,541,541,541,541,541,
+541,541,541,541,541,541,541,541,541,541,541,541,541,541,541,541,
+541,541,541,541,541,541,541,541,541,544,541,163,163,163,163,163,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+496,496,496,496,496,496,163,163,163,163,163,163,163,163,163,163,
/* block 48 */
-544,544,544,544,544,544,544,544,544,544,544,544,544,544,544,544,
-544,544,544,544,544,544,544,544,544,544,544,544,544,544,544,163,
-545,545,545,546,546,546,546,545,545,546,546,546,163,163,163,163,
-546,546,545,546,546,546,546,546,546,547,547,547,163,163,163,163,
-548,163,163,163,549,549,550,550,550,550,550,550,550,550,550,550,
-551,551,551,551,551,551,551,551,551,551,551,551,551,551,551,551,
-551,551,551,551,551,551,551,551,551,551,551,551,551,551,163,163,
-551,551,551,551,551,163,163,163,163,163,163,163,163,163,163,163,
+545,545,545,545,545,545,545,545,545,545,545,545,545,545,545,545,
+545,545,545,545,545,545,545,545,545,545,545,545,545,545,545,163,
+546,546,546,547,547,547,547,546,546,547,547,547,163,163,163,163,
+547,547,546,547,547,547,547,547,547,548,548,548,163,163,163,163,
+549,163,163,163,550,550,551,551,551,551,551,551,551,551,551,551,
+552,552,552,552,552,552,552,552,552,552,552,552,552,552,552,552,
+552,552,552,552,552,552,552,552,552,552,552,552,552,552,163,163,
+552,552,552,552,552,163,163,163,163,163,163,163,163,163,163,163,
/* block 49 */
-552,552,552,552,552,552,552,552,552,552,552,552,552,552,552,552,
-552,552,552,552,552,552,552,552,552,552,552,552,552,552,552,552,
-552,552,552,552,552,552,552,552,552,552,552,552,163,163,163,163,
-552,552,552,552,552,553,553,553,552,552,553,552,552,552,552,552,
-552,552,552,552,552,552,552,552,552,552,163,163,163,163,163,163,
-554,554,554,554,554,554,554,554,554,554,555,163,163,163,556,556,
-557,557,557,557,557,557,557,557,557,557,557,557,557,557,557,557,
-557,557,557,557,557,557,557,557,557,557,557,557,557,557,557,557,
+553,553,553,553,553,553,553,553,553,553,553,553,553,553,553,553,
+553,553,553,553,553,553,553,553,553,553,553,553,553,553,553,553,
+553,553,553,553,553,553,553,553,553,553,553,553,163,163,163,163,
+553,553,553,553,553,554,554,554,553,553,554,553,553,553,553,553,
+553,553,553,553,553,553,553,553,553,553,163,163,163,163,163,163,
+555,555,555,555,555,555,555,555,555,555,556,163,163,163,557,557,
+558,558,558,558,558,558,558,558,558,558,558,558,558,558,558,558,
+558,558,558,558,558,558,558,558,558,558,558,558,558,558,558,558,
/* block 50 */
-558,558,558,558,558,558,558,558,558,558,558,558,558,558,558,558,
-558,558,558,558,558,558,558,559,559,560,560,559,163,163,561,561,
-562,562,562,562,562,562,562,562,562,562,562,562,562,562,562,562,
-562,562,562,562,562,562,562,562,562,562,562,562,562,562,562,562,
-562,562,562,562,562,562,562,562,562,562,562,562,562,562,562,562,
-562,562,562,562,562,563,564,563,564,564,564,564,564,564,564,163,
-565,566,564,566,566,564,564,564,564,564,564,564,564,563,563,563,
-563,563,563,564,564,567,567,567,567,567,567,567,567,163,163,567,
+559,559,559,559,559,559,559,559,559,559,559,559,559,559,559,559,
+559,559,559,559,559,559,559,560,560,561,561,560,163,163,562,562,
+563,563,563,563,563,563,563,563,563,563,563,563,563,563,563,563,
+563,563,563,563,563,563,563,563,563,563,563,563,563,563,563,563,
+563,563,563,563,563,563,563,563,563,563,563,563,563,563,563,563,
+563,563,563,563,563,564,565,564,565,565,565,565,565,565,565,163,
+566,567,565,567,567,565,565,565,565,565,565,565,565,564,564,564,
+564,564,564,565,565,568,568,568,568,568,568,568,568,163,163,568,
/* block 51 */
-568,568,568,568,568,568,568,568,568,568,163,163,163,163,163,163,
-568,568,568,568,568,568,568,568,568,568,163,163,163,163,163,163,
-569,569,569,569,569,569,569,570,571,571,571,571,569,569,163,163,
-154,154,154,154,154,154,154,154,154,154,154,154,154,154,572,573,
-573,154,154,154,154,154,154,154,154,154,154,154,573,573,573,163,
+569,569,569,569,569,569,569,569,569,569,163,163,163,163,163,163,
+569,569,569,569,569,569,569,569,569,569,163,163,163,163,163,163,
+570,570,570,570,570,570,570,571,572,572,572,572,570,570,163,163,
+154,154,154,154,154,154,154,154,154,154,154,154,154,154,573,574,
+574,154,154,154,154,154,154,154,154,154,154,154,574,574,574,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 52 */
-574,574,574,574,575,576,576,576,576,576,576,576,576,576,576,576,
-576,576,576,576,576,576,576,576,576,576,576,576,576,576,576,576,
-576,576,576,576,576,576,576,576,576,576,576,576,576,576,576,576,
-576,576,576,576,577,578,574,574,574,574,574,575,574,575,575,575,
-575,575,574,575,579,576,576,576,576,576,576,576,576,163,163,163,
-580,580,580,580,580,580,580,580,580,580,581,581,582,583,581,581,
-582,584,584,584,584,584,584,584,584,584,584,577,577,577,577,577,
-577,577,577,577,584,584,584,584,584,584,584,584,584,581,581,163,
+575,575,575,575,576,577,577,577,577,577,577,577,577,577,577,577,
+577,577,577,577,577,577,577,577,577,577,577,577,577,577,577,577,
+577,577,577,577,577,577,577,577,577,577,577,577,577,577,577,577,
+577,577,577,577,578,579,575,575,575,575,575,576,575,576,576,576,
+576,576,575,576,580,577,577,577,577,577,577,577,577,163,163,163,
+581,581,581,581,581,581,581,581,581,581,582,582,583,584,582,582,
+583,585,585,585,585,585,585,585,585,585,585,578,578,578,578,578,
+578,578,578,578,585,585,585,585,585,585,585,585,585,582,582,163,
/* block 53 */
-585,585,586,587,587,587,587,587,587,587,587,587,587,587,587,587,
-587,587,587,587,587,587,587,587,587,587,587,587,587,587,587,587,
-587,586,585,585,585,585,586,586,585,585,588,589,585,585,587,587,
-590,590,590,590,590,590,590,590,590,590,587,587,587,587,587,587,
-591,591,591,591,591,591,591,591,591,591,591,591,591,591,591,591,
-591,591,591,591,591,591,591,591,591,591,591,591,591,591,591,591,
-591,591,591,591,591,591,592,593,594,594,593,593,593,594,593,594,
-594,594,595,595,163,163,163,163,163,163,163,163,596,596,596,596,
+586,586,587,588,588,588,588,588,588,588,588,588,588,588,588,588,
+588,588,588,588,588,588,588,588,588,588,588,588,588,588,588,588,
+588,587,586,586,586,586,587,587,586,586,589,590,586,586,588,588,
+591,591,591,591,591,591,591,591,591,591,588,588,588,588,588,588,
+592,592,592,592,592,592,592,592,592,592,592,592,592,592,592,592,
+592,592,592,592,592,592,592,592,592,592,592,592,592,592,592,592,
+592,592,592,592,592,592,593,594,595,595,594,594,594,595,594,595,
+595,595,596,596,163,163,163,163,163,163,163,163,597,597,597,597,
/* block 54 */
-597,597,597,597,597,597,597,597,597,597,597,597,597,597,597,597,
-597,597,597,597,597,597,597,597,597,597,597,597,597,597,597,597,
-597,597,597,597,598,598,598,598,598,598,598,598,599,599,599,599,
-599,599,599,599,598,598,600,601,163,163,163,602,602,603,603,603,
-604,604,604,604,604,604,604,604,604,604,163,163,163,597,597,597,
-605,605,605,605,605,605,605,605,605,605,606,606,606,606,606,606,
-606,606,606,606,606,606,606,606,606,606,606,606,606,606,606,606,
-606,606,606,606,606,606,606,606,607,607,607,608,607,607,609,609,
+598,598,598,598,598,598,598,598,598,598,598,598,598,598,598,598,
+598,598,598,598,598,598,598,598,598,598,598,598,598,598,598,598,
+598,598,598,598,599,599,599,599,599,599,599,599,600,600,600,600,
+600,600,600,600,599,599,601,602,163,163,163,603,603,604,604,604,
+605,605,605,605,605,605,605,605,605,605,163,163,163,598,598,598,
+606,606,606,606,606,606,606,606,606,606,607,607,607,607,607,607,
+607,607,607,607,607,607,607,607,607,607,607,607,607,607,607,607,
+607,607,607,607,607,607,607,607,608,608,608,609,608,608,610,610,
/* block 55 */
-610,611,612,613,614,615,616,617,618,163,163,163,163,163,163,163,
-619,619,619,619,619,619,619,619,619,619,619,619,619,619,619,619,
-619,619,619,619,619,619,619,619,619,619,619,619,619,619,619,619,
-619,619,619,619,619,619,619,619,619,619,619,163,163,619,619,619,
-620,620,620,620,620,620,620,620,163,163,163,163,163,163,163,163,
-621,622,621,623,622,624,624,625,624,625,626,622,625,625,622,622,
-625,627,622,622,622,622,622,622,622,628,629,630,630,624,630,630,
-630,630,631,632,633,629,629,634,635,635,636,163,163,163,163,163,
+611,612,613,614,615,616,617,618,619,163,163,163,163,163,163,163,
+620,620,620,620,620,620,620,620,620,620,620,620,620,620,620,620,
+620,620,620,620,620,620,620,620,620,620,620,620,620,620,620,620,
+620,620,620,620,620,620,620,620,620,620,620,163,163,620,620,620,
+621,621,621,621,621,621,621,621,163,163,163,163,163,163,163,163,
+622,623,622,624,623,625,625,626,625,626,627,623,626,626,623,623,
+626,628,623,623,623,623,623,623,623,629,630,631,631,625,631,631,
+631,631,632,633,634,630,630,635,636,636,637,163,163,163,163,163,
/* block 56 */
70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
- 70, 70, 70, 70, 70, 70,221,221,221,221,221,637,147,147,147,147,
+ 70, 70, 70, 70, 70, 70,221,221,221,221,221,638,147,147,147,147,
147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,
147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,
-147,147,147,147,147,147,147,147,147,147,147,147,147,638,638,638,
-638,638,148,147,147,147,638,638,638,638,638, 70, 70, 70, 70, 70,
- 70, 70, 70, 70, 70, 70, 70, 70,639,640, 70, 70, 70,641, 70, 70,
+147,147,147,147,147,147,147,147,147,147,147,147,147,639,639,639,
+639,639,148,147,147,147,639,639,639,639,639, 70, 70, 70, 70, 70,
+ 70, 70, 70, 70, 70, 70, 70, 70,640,641, 70, 70, 70,642, 70, 70,
/* block 57 */
- 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,642, 70,
- 70, 70, 70, 70, 70, 70,643, 70, 70, 70, 70,644,644,644,644,644,
-644,644,644,644,645,644,644,644,645,644,644,644,644,644,644,644,
-644,644,644,644,644,644,644,644,644,644,644,644,644,644,644,646,
-647,647,158,158,154,154,154,154,154,154,154,154,154,154,154,154,
+ 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,643, 70,
+ 70, 70, 70, 70, 70, 70,644, 70, 70, 70, 70,645,645,645,645,645,
+645,645,645,645,646,645,645,645,646,645,645,645,645,645,645,645,
+645,645,645,645,645,645,645,645,645,645,645,645,645,645,645,647,
+648,648,158,158,154,154,154,154,154,154,154,154,154,154,154,154,
158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,
-158,158,158,158,158,158,158,573,573,573,573,573,573,573,573,573,
-573,573,573,573,573,154,154,154,648,154,649,154,154,154,154,154,
+158,158,158,158,158,158,158,574,574,574,574,574,574,574,574,574,
+574,574,574,574,574,154,154,154,649,154,650,154,154,154,154,154,
/* block 58 */
65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
@@ -2972,12 +2986,12 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
-650,651, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
+651,652, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
/* block 59 */
65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
- 65, 66, 65, 66, 65, 66, 69, 69, 69, 69,652,653, 70, 70,654, 70,
+ 65, 66, 65, 66, 65, 66, 69, 69, 69, 69,653,654, 70, 70,655, 70,
65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 67, 65, 66, 65, 66,
@@ -2986,123 +3000,123 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
/* block 60 */
-655,655,655,655,655,655,655,655,656,656,656,656,656,656,656,656,
-655,655,655,655,655,655,163,163,656,656,656,656,656,656,163,163,
-655,655,655,655,655,655,655,655,656,656,656,656,656,656,656,656,
-655,655,655,655,655,655,655,655,656,656,656,656,656,656,656,656,
-655,655,655,655,655,655,163,163,656,656,656,656,656,656,163,163,
-173,655,173,655,173,655,173,655,163,656,163,656,163,656,163,656,
-655,655,655,655,655,655,655,655,656,656,656,656,656,656,656,656,
-657,657,658,658,658,658,659,659,660,660,661,661,662,662,163,163,
+656,656,656,656,656,656,656,656,657,657,657,657,657,657,657,657,
+656,656,656,656,656,656,163,163,657,657,657,657,657,657,163,163,
+656,656,656,656,656,656,656,656,657,657,657,657,657,657,657,657,
+656,656,656,656,656,656,656,656,657,657,657,657,657,657,657,657,
+656,656,656,656,656,656,163,163,657,657,657,657,657,657,163,163,
+173,656,173,656,173,656,173,656,163,657,163,657,163,657,163,657,
+656,656,656,656,656,656,656,656,657,657,657,657,657,657,657,657,
+658,658,659,659,659,659,660,660,661,661,662,662,663,663,163,163,
/* block 61 */
-663,663,663,663,663,663,663,663,664,664,664,664,664,664,664,664,
-663,663,663,663,663,663,663,663,664,664,664,664,664,664,664,664,
-663,663,663,663,663,663,663,663,664,664,664,664,664,664,664,664,
-655,655,665,666,665,163,173,665,656,656,667,667,668,162,669,162,
-162,162,665,666,665,163,173,665,670,670,670,670,668,162,162,162,
-655,655,173,173,163,163,173,173,656,656,671,671,163,162,162,162,
-655,655,173,173,173,215,173,173,656,656,672,672,220,162,162,162,
-163,163,665,666,665,163,173,665,673,673,674,674,668,162,162,163,
+664,664,664,664,664,664,664,664,665,665,665,665,665,665,665,665,
+664,664,664,664,664,664,664,664,665,665,665,665,665,665,665,665,
+664,664,664,664,664,664,664,664,665,665,665,665,665,665,665,665,
+656,656,666,667,666,163,173,666,657,657,668,668,669,162,670,162,
+162,162,666,667,666,163,173,666,671,671,671,671,669,162,162,162,
+656,656,173,173,163,163,173,173,657,657,672,672,163,162,162,162,
+656,656,173,173,173,215,173,173,657,657,673,673,220,162,162,162,
+163,163,666,667,666,163,173,666,674,674,675,675,669,162,162,163,
/* block 62 */
-675,675,675,675,675,675,675,675,675,675,675, 51,676,677,678,679,
-680,680,680,680,680,680,681, 43,682,683,684,685,685,686,684,685,
- 43, 43, 43, 43,687, 43, 43,687,688,689,690,691,692,693,694,695,
-696,696,697,697,697, 43, 43, 43, 43, 49, 57, 43,698,699, 43,700,
-701, 43, 43, 43,702,703,704,699,699,698, 43, 43, 43, 43, 43, 43,
- 43, 43, 50,705,700, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43,675,
- 51,706,706,706,706,707,708,709,710,711,712,712,712,712,712,712,
- 54,645,163,163, 54, 54, 54, 54, 54, 54,713,714,715,716,717,644,
+676,676,676,676,676,676,676,676,676,676,676, 51,677,678,679,680,
+681,681,681,681,681,681,682, 43,683,684,685,686,686,687,685,686,
+ 43, 43, 43, 43,688, 43, 43,688,689,690,691,692,693,694,695,696,
+697,697,698,698,698, 43, 43, 43, 43, 49, 57, 43,699,700, 43,701,
+702, 43, 43, 43,703,704,705,700,700,699, 43, 43, 43, 43, 43, 43,
+ 43, 43, 50,706,701, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43,676,
+ 51,707,707,707,707,708,709,710,711,712,713,713,713,713,713,713,
+ 54,646,163,163, 54, 54, 54, 54, 54, 54,714,715,716,717,718,645,
/* block 63 */
- 54, 54, 54, 54, 54, 54, 54, 54, 54, 54,713,714,715,716,717,163,
-644,644,644,644,644,644,644,644,644,644,644,644,644,163,163,163,
-431,431,431,431,431,431,431,431,431,431,431,431,431,431,431,431,
-431,431,431,431,431,431,431,431,431,431,431,431,431,431,431,431,
-431,718,718,718,718,718,718,718,718,718,718,718,718,718,718,718,
-719,719,719,719,719,719,719,719,719,719,719,719,719,720,720,720,
-720,719,720,721,720,719,719,158,158,158,158,719,719,719,719,719,
-722,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+ 54, 54, 54, 54, 54, 54, 54, 54, 54, 54,714,715,716,717,718,163,
+645,645,645,645,645,645,645,645,645,645,645,645,645,163,163,163,
+430,430,430,430,430,430,430,430,430,430,430,430,430,430,430,430,
+430,430,430,430,430,430,430,430,430,430,430,430,430,430,430,430,
+430,719,719,719,719,719,719,719,719,719,719,719,719,719,719,719,
+720,720,720,720,720,720,720,720,720,720,720,720,720,721,721,721,
+721,720,721,722,721,720,720,158,158,158,158,720,720,720,720,720,
+723,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 64 */
-723,723,724,723,723,723,723,724,723,723,725,724,724,724,725,725,
-724,724,724,725,723,724,723,723,726,724,724,724,724,724,723,723,
-723,723,727,723,724,723,728,723,724,729,730,731,724,724,732,725,
-724,724,733,724,725,734,734,734,734,735,723,723,725,725,724,724,
-715,715,715,715,715,724,725,725,736,736,723,715,723,723,737,460,
+724,724,725,724,724,724,724,725,724,724,726,725,725,725,726,726,
+725,725,725,726,724,725,724,724,727,725,725,725,725,725,724,724,
+724,724,728,724,725,724,729,724,725,730,731,732,725,725,733,726,
+725,725,734,725,726,735,735,735,735,736,724,724,726,726,725,725,
+716,716,716,716,716,725,726,726,737,737,724,716,724,724,738,461,
58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58,
-738,738,738,738,738,738,738,738,738,738,738,738,738,738,738,738,
739,739,739,739,739,739,739,739,739,739,739,739,739,739,739,739,
+740,740,740,740,740,740,740,740,740,740,740,740,740,740,740,740,
/* block 65 */
-740,740,740, 65, 66,740,740,740,740, 58,723,723,163,163,163,163,
- 50, 50, 50, 50,741,742,742,742,742,742, 50, 50,743,743,743,743,
- 50,743,743, 50,743,743, 50,743, 45,742,742,743,743,743, 50, 45,
-743,743, 45, 45, 45, 45,743,743, 45, 45, 45, 45,743,743,743,743,
-743,743,743,743,743,743,743,743,743,743,743,743,743,743, 50, 50,
-743,743, 50,743, 50,743,743,743,743,743,743,743, 45,743, 45, 45,
- 45, 45, 45, 45,743,743, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
+741,741,741, 65, 66,741,741,741,741, 58,724,724,163,163,163,163,
+ 50, 50, 50, 50,742,743,743,743,743,743, 50, 50,744,744,744,744,
+ 50,744,744, 50,744,744, 50,744, 45,743,743,744,744,744, 50, 45,
+744,744, 45, 45, 45, 45,744,744, 45, 45, 45, 45,744,744,744,744,
+744,744,744,744,744,744,744,744,744,744,744,744,744,744, 50, 50,
+744,744, 50,744, 50,744,744,744,744,744,744,744, 45,744, 45, 45,
+ 45, 45, 45, 45,744,744, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
/* block 66 */
- 50, 50, 50, 50, 50, 50, 50, 50,744,744,744,744,744,744, 50, 50,
- 50, 50,745, 53, 50,744, 50, 50, 50, 50, 50, 50, 50, 50, 50,744,
-744,744,744, 50,744, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
- 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,744,744, 50, 50,
- 50, 50, 50,744, 50,744, 50, 50, 50, 50, 50, 50,744, 50, 50, 50,
- 50, 50,744,744,744,744, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
- 50, 50, 50, 50,744,744,744,744,744,744,744,744, 50, 50,744,744,
-744,744,744,744,744,744,744,744,744,744,744,744,744,744,744,744,
+ 50, 50, 50, 50, 50, 50, 50, 50,745,745,745,745,745,745, 50, 50,
+ 50, 50,746, 53, 50,745, 50, 50, 50, 50, 50, 50, 50, 50, 50,745,
+745,745,745, 50,745, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
+ 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,745,745, 50, 50,
+ 50, 50, 50,745, 50,745, 50, 50, 50, 50, 50, 50,745, 50, 50, 50,
+ 50, 50,745,745,745,745, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
+ 50, 50, 50, 50,745,745,745,745,745,745,745,745, 50, 50,745,745,
+745,745,745,745,745,745,745,745,745,745,745,745,745,745,745,745,
/* block 67 */
-744,744,744,744,744,744,744,744,744,744,744,744, 50, 50, 50,744,
-744,744,744, 50, 50, 50, 50, 50,744, 50, 50, 50, 50, 50, 50, 50,
- 50, 50,744,744, 50, 50,744, 50,744,744, 50,744, 50, 50, 50, 50,
-744,744,744,744,744,744,744,744,744, 50, 50, 50, 50, 50, 50, 50,
- 50, 50, 50, 50, 50, 50, 50, 50, 50,744,744,744,744,744, 50, 50,
-744,744, 50, 50, 50, 50,744,744,744,744,744,744,744,744,744,744,
-744,744,744,744,744,744,744,744,744,744,744,744,744,744, 50, 50,
-744,744,744,744,744, 50,744,744, 50, 50,744,744,744,744,744, 50,
+745,745,745,745,745,745,745,745,745,745,745,745, 50, 50, 50,745,
+745,745,745, 50, 50, 50, 50, 50,745, 50, 50, 50, 50, 50, 50, 50,
+ 50, 50,745,745, 50, 50,745, 50,745,745, 50,745, 50, 50, 50, 50,
+745,745,745,745,745,745,745,745,745, 50, 50, 50, 50, 50, 50, 50,
+ 50, 50, 50, 50, 50, 50, 50, 50, 50,745,745,745,745,745, 50, 50,
+745,745, 50, 50, 50, 50,745,745,745,745,745,745,745,745,745,745,
+745,745,745,745,745,745,745,745,745,745,745,745,745,745, 50, 50,
+745,745,745,745,745, 50,745,745, 50, 50,745,745,745,745,745, 50,
/* block 68 */
- 45, 45, 45, 45, 45, 45, 45, 45,746,747,746,747, 45, 45, 45, 45,
- 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,748,748, 45, 45, 45, 45,
- 50, 50, 45, 45, 45, 45, 45, 45, 47,749,750, 45, 45, 45, 45, 45,
- 45, 45, 45, 45, 45, 45,751,751,751,751,751,751,751,751,751,751,
-751,751,751,751,751,751,751,751,751,751,751,751,751,751,751,751,
-751,751,751,751,751,751,751,751,751,751,751,751,751,751,751,751,
-751,751,751,751,751,751,751,751,751,751,751,751,751,751,751,751,
-751,751,751,751,751,751,751,751,751,751,751, 45, 50, 45, 45, 45,
+ 45, 45, 45, 45, 45, 45, 45, 45,747,748,747,748, 45, 45, 45, 45,
+ 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,749,749, 45, 45, 45, 45,
+ 50, 50, 45, 45, 45, 45, 45, 45, 47,750,751, 45, 45, 45, 45, 45,
+ 45, 45, 45, 45, 45, 45,752,752,752,752,752,752,752,752,752,752,
+752,752,752,752,752,752,752,752,752,752,752,752,752,752,752,752,
+752,752,752,752,752,752,752,752,752,752,752,752,752,752,752,752,
+752,752,752,752,752,752,752,752,752,752,752,752,752,752,752,752,
+752,752,752,752,752,752,752,752,752,752,752, 45, 50, 45, 45, 45,
/* block 69 */
- 45, 45, 45, 45, 45, 45, 45, 45,752, 45, 45, 45, 45, 45, 45, 45,
- 45, 45, 45, 45, 45,751, 45, 45, 45, 45, 45, 50, 50, 50, 50, 50,
+ 45, 45, 45, 45, 45, 45, 45, 45,753, 45, 45, 45, 45, 45, 45, 45,
+ 45, 45, 45, 45, 45,752, 45, 45, 45, 45, 45, 50, 50, 50, 50, 50,
50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
- 50, 50, 50, 50,743,743, 45,743, 45, 45, 45, 45, 45, 45, 45, 45,
+ 50, 50, 50, 50,744,744, 45,744, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 47,
-743, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 50, 50, 50, 50,
- 50, 50,743, 45, 45, 45, 45, 45, 45,748,748,748,748, 47, 47, 47,
-748, 47, 47,748, 45, 45, 45, 45, 47, 47, 47, 45, 45, 45, 45, 45,
+744, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 50, 50, 50, 50,
+ 50, 50,744, 45, 45, 45, 45, 45, 45,749,749,749,749, 47, 47, 47,
+749, 47, 47,749, 45, 45, 45, 45, 47, 47, 47, 45, 45, 45, 45, 45,
/* block 70 */
45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
- 45, 45, 45, 45, 45, 45, 45,753,753,753,753,753,753,753,753,753,
-753,753,753,753,753,753,753,753,753,753,753,753,753,753,753,753,
- 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,753,753,753,753,753,
-753,753,753,753,753,753,753,753,753,753,753,753,753,753,753,753,
+ 45, 45, 45, 45, 45, 45, 45,754,754,754,754,754,754,754,754,754,
+754,754,754,754,754,754,754,754,754,754,754,754,754,754,754,754,
+ 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,754,754,754,754,754,
+754,754,754,754,754,754,754,754,754,754,754,754,754,754,754,754,
58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58,
58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58,
/* block 71 */
58, 58, 58, 58, 58, 58, 58, 58, 54, 54, 54, 54, 54, 54, 54, 54,
- 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,754,754,754,754,754,754,754,754,754,754,
-754,754,755,754,754,754,754,754,754,754,754,754,754,754,754,754,
-756,756,756,756,756,756,756,756,756,756,756,756,756,756,756,756,
-756,756,756,756,756,756,756,756,756,756, 58, 58, 58, 58, 58, 58,
+ 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,755,755,755,755,755,755,755,755,755,755,
+755,755,756,755,755,755,755,755,755,755,755,755,755,755,755,755,
+757,757,757,757,757,757,757,757,757,757,757,757,757,757,757,757,
+757,757,757,757,757,757,757,757,757,757, 58, 58, 58, 58, 58, 58,
58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58,
/* block 72 */
@@ -3118,132 +3132,132 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
/* block 73 */
45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
-743,743, 45, 45, 45, 45, 45, 45, 45, 45, 47, 47, 45, 45,743,743,
-743,743,743,743,743,743,742, 50, 45, 45, 45, 45,743,743,743,743,
-742, 50, 45, 45, 45, 45,743,743, 45, 45,743,743, 45, 45, 45,743,
-743,743,743,743, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
- 45, 45,743, 45,743, 45, 45,743,743,743,743,743,743, 45, 45, 45,
- 45, 45, 45, 45, 45, 45, 45, 45, 50, 50, 50,741,741,757,757, 50,
+744,744, 45, 45, 45, 45, 45, 45, 45, 45, 47, 47, 45, 45,744,744,
+744,744,744,744,744,744,743, 50, 45, 45, 45, 45,744,744,744,744,
+743, 50, 45, 45, 45, 45,744,744, 45, 45,744,744, 45, 45, 45,744,
+744,744,744,744, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
+ 45, 45,744, 45,744, 45, 45,744,744,744,744,744,744, 45, 45, 45,
+ 45, 45, 45, 45, 45, 45, 45, 45, 50, 50, 50,742,742,758,758, 50,
/* block 74 */
- 47, 47, 47, 47, 47,758,743,752,752,752,752,752,752,752, 47,752,
-752, 47,752, 45,748,748,752,752, 47,752,752,752,752,759,752,752,
- 47,752, 47, 47,752,752, 47,752,752,752, 47,752,752,752, 47, 47,
-752,752,752,752,752,752,752,752, 47, 47, 47,752,752,752,752,752,
-742,752,742,752,752,752,752,752,748,748,748,748,748,748,748,748,
-748,748,748,748,752,752,752,752,752,752,752,752,752,752,752, 47,
-742,758,758,742,752, 47, 47,752, 47,752,752,752,752,758,758,760,
-752,752,752,752,752,752,752,752,752,752,752, 47,752,752, 47,748,
+ 47, 47, 47, 47, 47,759,744,753,753,753,753,753,753,753, 47,753,
+753, 47,753, 45,749,749,753,753, 47,753,753,753,753,760,753,753,
+ 47,753, 47, 47,753,753, 47,753,753,753, 47,753,753,753, 47, 47,
+753,753,753,753,753,753,753,753, 47, 47, 47,753,753,753,753,753,
+743,753,743,753,753,753,753,753,749,749,749,749,749,749,749,749,
+749,749,749,749,753,753,753,753,753,753,753,753,753,753,753, 47,
+743,759,759,743,753, 47, 47,753, 47,753,753,753,753,759,759,761,
+753,753,753,753,753,753,753,753,753,753,753, 47,753,753, 47,749,
/* block 75 */
-752,752,752,752,752,752, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
-752,752, 47,748, 47, 47, 47, 47,752, 47,752, 47, 47,752,752,752,
- 47,748,752,752,752,752,752, 47,752,752,748,748,761,752,752,752,
- 47, 47,752,752,752,752,752,752,752,752,752,752,752,748,748,752,
-752,752,752,752,748,748,752,752, 47,752,752,752,752,752,748, 47,
-752, 47,752, 47,748,752,752,752,752,752,752,752,752,752,752,752,
-752,752,752,752,752,752,752,752,752, 47,748,752,752,752,752,752,
- 47, 47,748,748, 47,748,752, 47, 47,759,748,752,752,748,752,752,
+753,753,753,753,753,753, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
+753,753, 47,749, 47, 47, 47, 47,753, 47,753, 47, 47,753,753,753,
+ 47,749,753,753,753,753,753, 47,753,753,749,749,762,753,753,753,
+ 47, 47,753,753,753,753,753,753,753,753,753,753,753,749,749,753,
+753,753,753,753,749,749,753,753, 47,753,753,753,753,753,749, 47,
+753, 47,753, 47,749,753,753,753,753,753,753,753,753,753,753,753,
+753,753,753,753,753,753,753,753,753, 47,749,753,753,753,753,753,
+ 47, 47,749,749, 47,749,753, 47, 47,760,749,753,753,749,753,753,
/* block 76 */
-752,752, 47,752,752,748, 45, 45, 47, 47,762,762,759,759,752, 47,
-752,752, 47, 45, 47, 45, 47, 45, 45, 45, 45, 45, 45, 47, 45, 45,
- 45, 47, 45, 45, 45, 45, 45, 45,748, 45, 45, 45, 45, 45, 45, 45,
+753,753, 47,753,753,749, 45, 45, 47, 47,763,763,760,760,753, 47,
+753,753, 47, 45, 47, 45, 47, 45, 45, 45, 45, 45, 45, 47, 45, 45,
+ 45, 47, 45, 45, 45, 45, 45, 45,749, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 47, 47, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
- 45, 45, 45, 45, 47, 45, 45, 47, 45, 45, 45, 45,748, 45,748, 45,
- 45, 45, 45,748,748,748, 45,748, 45, 45, 45, 45, 45, 45, 45, 45,
- 45, 45, 45, 47, 47,752,752,752,703,704,703,704,703,704,703,704,
-703,704,703,704,703,704, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58,
+ 45, 45, 45, 45, 47, 45, 45, 47, 45, 45, 45, 45,749, 45,749, 45,
+ 45, 45, 45,749,749,749, 45,749, 45, 45, 45, 45, 45, 45, 45, 45,
+ 45, 45, 45, 47, 47,753,753,753,704,705,704,705,704,705,704,705,
+704,705,704,705,704,705, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58,
/* block 77 */
58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58,
- 58, 58, 58, 58, 45,748,748,748, 45, 45, 45, 45, 45, 45, 45, 45,
+ 58, 58, 58, 58, 45,749,749,749, 45, 45, 45, 45, 45, 45, 45, 45,
45, 47, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
-748, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,748,
- 50, 50, 50,744,744,746,747, 50,744,744, 50,744, 50,744, 50, 50,
- 50, 50, 50, 50, 50,744,744, 50, 50, 50, 50, 50,744,744,744, 50,
- 50, 50,744,744,744,744,746,747,746,747,746,747,746,747,746,747,
+749, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,749,
+ 50, 50, 50,745,745,747,748, 50,745,745, 50,745, 50,745, 50, 50,
+ 50, 50, 50, 50, 50,745,745, 50, 50, 50, 50, 50,745,745,745, 50,
+ 50, 50,745,745,745,745,747,748,747,748,747,748,747,748,747,748,
50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
/* block 78 */
-763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,
-763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,
-763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,
-763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,
-763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,
-763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,
-763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,
-763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,763,
+764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,
+764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,
+764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,
+764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,
+764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,
+764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,
+764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,
+764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,764,
/* block 79 */
50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
- 50, 50, 50, 50,741,741, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
+ 50, 50, 50, 50,742,742, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
/* block 80 */
- 50, 50, 50,746,747,746,747,746,747,746,747,746,747,746,747,746,
-747,746,747,746,747,746,747,746,747, 50, 50,744, 50, 50, 50, 50,
-744, 50, 50,744,744,744, 50, 50,744,744,744,744,744,744,744,744,
- 50, 50, 50, 50, 50, 50, 50, 50,744, 50, 50, 50, 50, 50, 50, 50,
-744,744, 50, 50,744,744, 50, 50, 50, 50, 50, 50, 50, 50, 50,744,
-744,744,744, 50,744,744, 50, 50,746,747,746,747, 50, 50, 50, 50,
- 50, 50, 50, 50, 50, 50, 50, 50,744,744, 50, 50, 50, 50, 50, 50,
- 50, 50, 50, 50, 50,744, 50, 50,744,744, 50, 50,746,747, 50, 50,
+ 50, 50, 50,747,748,747,748,747,748,747,748,747,748,747,748,747,
+748,747,748,747,748,747,748,747,748, 50, 50,745, 50, 50, 50, 50,
+745, 50, 50,745,745,745, 50, 50,745,745,745,745,745,745,745,745,
+ 50, 50, 50, 50, 50, 50, 50, 50,745, 50, 50, 50, 50, 50, 50, 50,
+745,745, 50, 50,745,745, 50, 50, 50, 50, 50, 50, 50, 50, 50,745,
+745,745,745, 50,745,745, 50, 50,747,748,747,748, 50, 50, 50, 50,
+ 50, 50, 50, 50, 50, 50, 50, 50,745,745, 50, 50, 50, 50, 50, 50,
+ 50, 50, 50, 50, 50,745, 50, 50,745,745, 50, 50,747,748, 50, 50,
/* block 81 */
50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
- 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,744,744,744,744, 50,
- 50, 50, 50, 50,744,744, 50, 50, 50, 50, 50, 50,744,744, 50, 50,
+ 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,745,745,745,745, 50,
+ 50, 50, 50, 50,745,745, 50, 50, 50, 50, 50, 50,745,745, 50, 50,
50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
- 50, 50, 50, 50,744,744, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
- 50, 50, 50, 50, 50, 50, 50, 50, 50,744,744,744,744,744,744,744,
+ 50, 50, 50, 50,745,745, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
+ 50, 50, 50, 50, 50, 50, 50, 50, 50,745,745,745,745,745,745,745,
/* block 82 */
-744,744,744,744,744,744,744,744,744,744,744,744,744,744,744,744,
-744,744,744,744,744,744,744,744,744,744,744,744,744,744,744,744,
-744,744,744, 50, 50, 50,744,744,744,744,744,744,744,744, 50,744,
-744,744,744,744,744,744,744,744,744,744,744,744,744,744,744,744,
-744,744,744,744,744,744,744,744,744,744,744,744,744,744,744,744,
-744,744,744,744,744,744,744, 50, 50, 50, 50, 50, 50, 50,744, 50,
- 50, 50, 50,744,744,744, 50, 50, 50, 50, 50, 50,744,744,744, 50,
- 50, 50, 50, 50, 50, 50, 50,744,744,744,744, 50, 50, 50, 50, 50,
+745,745,745,745,745,745,745,745,745,745,745,745,745,745,745,745,
+745,745,745,745,745,745,745,745,745,745,745,745,745,745,745,745,
+745,745,745, 50, 50, 50,745,745,745,745,745,745,745,745, 50,745,
+745,745,745,745,745,745,745,745,745,745,745,745,745,745,745,745,
+745,745,745,745,745,745,745,745,745,745,745,745,745,745,745,745,
+745,745,745,745,745,745,745, 50, 50, 50, 50, 50, 50, 50,745, 50,
+ 50, 50, 50,745,745,745, 50, 50, 50, 50, 50, 50,745,745,745, 50,
+ 50, 50, 50, 50, 50, 50, 50,745,745,745,745, 50, 50, 50, 50, 50,
/* block 83 */
45, 45, 45, 45, 45, 47, 47, 47, 45, 45, 45, 45, 45, 45, 45, 45,
- 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,748,748, 45, 45, 45,
+ 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,749,749, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
50, 50, 50, 50, 50, 45, 45, 50, 50, 50, 50, 50, 50, 45, 45, 45,
-748, 45, 45, 45, 45,748, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
+749, 45, 45, 45, 45,749, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
- 45, 45, 45, 45,753,753, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
+ 45, 45, 45, 45,754,754, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
/* block 84 */
45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
- 45, 45, 45, 45, 45, 45,753, 45, 45, 45, 45, 45, 45, 45, 45, 45,
+ 45, 45, 45, 45, 45, 45,754, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
- 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,764, 45,
+ 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,765, 45,
/* block 85 */
-765,765,765,765,765,765,765,765,765,765,765,765,765,765,765,765,
-765,765,765,765,765,765,765,765,765,765,765,765,765,765,765,765,
-765,765,765,765,765,765,765,765,765,765,765,765,765,765,765,765,
766,766,766,766,766,766,766,766,766,766,766,766,766,766,766,766,
766,766,766,766,766,766,766,766,766,766,766,766,766,766,766,766,
766,766,766,766,766,766,766,766,766,766,766,766,766,766,766,766,
- 65, 66,767,768,769,770,771, 65, 66, 65, 66, 65, 66,772,773,774,
-775, 70, 65, 66, 70, 65, 66, 70, 70, 70, 70, 70,645,644,776,776,
+767,767,767,767,767,767,767,767,767,767,767,767,767,767,767,767,
+767,767,767,767,767,767,767,767,767,767,767,767,767,767,767,767,
+767,767,767,767,767,767,767,767,767,767,767,767,767,767,767,767,
+ 65, 66,768,769,770,771,772, 65, 66, 65, 66, 65, 66,773,774,775,
+776, 70, 65, 66, 70, 65, 66, 70, 70, 70, 70, 70,646,645,777,777,
/* block 86 */
211,212,211,212,211,212,211,212,211,212,211,212,211,212,211,212,
@@ -3252,248 +3266,248 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
211,212,211,212,211,212,211,212,211,212,211,212,211,212,211,212,
211,212,211,212,211,212,211,212,211,212,211,212,211,212,211,212,
211,212,211,212,211,212,211,212,211,212,211,212,211,212,211,212,
-211,212,211,212,777,778,778,778,778,778,778,211,212,211,212,779,
-779,779,211,212,163,163,163,163,163,780,780,780,780,781,780,780,
+211,212,211,212,778,779,779,779,779,779,779,211,212,211,212,780,
+780,780,211,212,163,163,163,163,163,781,781,781,781,782,781,781,
/* block 87 */
-782,782,782,782,782,782,782,782,782,782,782,782,782,782,782,782,
-782,782,782,782,782,782,782,782,782,782,782,782,782,782,782,782,
-782,782,782,782,782,782,163,782,163,163,163,163,163,782,163,163,
-783,783,783,783,783,783,783,783,783,783,783,783,783,783,783,783,
783,783,783,783,783,783,783,783,783,783,783,783,783,783,783,783,
783,783,783,783,783,783,783,783,783,783,783,783,783,783,783,783,
-783,783,783,783,783,783,783,783,163,163,163,163,163,163,163,784,
-785,163,163,163,163,163,163,163,163,163,163,163,163,163,163,786,
+783,783,783,783,783,783,163,783,163,163,163,163,163,783,163,163,
+784,784,784,784,784,784,784,784,784,784,784,784,784,784,784,784,
+784,784,784,784,784,784,784,784,784,784,784,784,784,784,784,784,
+784,784,784,784,784,784,784,784,784,784,784,784,784,784,784,784,
+784,784,784,784,784,784,784,784,163,163,163,163,163,163,163,785,
+786,163,163,163,163,163,163,163,163,163,163,163,163,163,163,787,
/* block 88 */
-483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
-483,483,483,483,483,483,483,163,163,163,163,163,163,163,163,163,
-483,483,483,483,483,483,483,163,483,483,483,483,483,483,483,163,
-483,483,483,483,483,483,483,163,483,483,483,483,483,483,483,163,
-483,483,483,483,483,483,483,163,483,483,483,483,483,483,483,163,
-483,483,483,483,483,483,483,163,483,483,483,483,483,483,483,163,
-787,787,787,787,787,787,787,787,787,787,787,787,787,787,787,787,
-787,787,787,787,787,787,787,787,787,787,787,787,787,787,787,787,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,
+484,484,484,484,484,484,484,163,163,163,163,163,163,163,163,163,
+484,484,484,484,484,484,484,163,484,484,484,484,484,484,484,163,
+484,484,484,484,484,484,484,163,484,484,484,484,484,484,484,163,
+484,484,484,484,484,484,484,163,484,484,484,484,484,484,484,163,
+484,484,484,484,484,484,484,163,484,484,484,484,484,484,484,163,
+788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,
+788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,
/* block 89 */
- 43, 43,788,789,788,789, 43, 43, 43,788,789, 43,788,789, 43, 43,
- 43, 43, 43, 43, 43, 43, 43,680, 43, 43,680, 43,788,789, 43, 43,
-788,789,703,704,703,704,703,704,703,704, 43, 43, 43, 43,699,790,
- 43, 43, 43, 43, 43, 43, 43, 43, 43, 43,680,680,699, 43, 43, 43,
-680,791,684,792, 43, 43, 43, 43, 43, 43, 43, 43,791, 43,791,791,
- 45, 45, 43,699,699,703,704,703,704,703,704,703,704,680,753,753,
-753,753,753,753,753,753,753,753,753,753,753,753,753,753,753,753,
-753,753,753,753,753,753,753,753,753,753,753,753,753,753,753,753,
+ 43, 43,789,790,789,790, 43, 43, 43,789,790, 43,789,790, 43, 43,
+ 43, 43, 43, 43, 43, 43, 43,681, 43, 43,681, 43,789,790, 43, 43,
+789,790,704,705,704,705,704,705,704,705, 43, 43, 43, 43,700,791,
+ 43, 43, 43, 43, 43, 43, 43, 43, 43, 43,681,681,700, 43, 43, 43,
+681,792,685,793, 43, 43, 43, 43, 43, 43, 43, 43,792, 43,792,792,
+ 45, 45, 43,700,700,704,705,704,705,704,705,704,705,681,754,754,
+754,754,754,754,754,754,754,754,754,754,754,754,754,754,754,754,
+754,754,754,754,754,754,754,754,754,754,754,754,754,754,754,754,
/* block 90 */
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,163,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,163,163,163,163,163,163,163,163,163,163,163,163,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,163,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 91 */
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
/* block 92 */
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,793,
-793,793,793,793,793,793,163,163,163,163,163,163,163,163,163,163,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,794,
+794,794,794,794,794,794,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-794,794,795,795,794,794,794,794,794,794,794,794,163,163,163,163,
+795,795,796,796,795,795,795,795,795,795,795,795,163,163,163,163,
/* block 93 */
-675,796,797,798,723,799,800,801,802,803,802,803,804,805,804,805,
-802,803, 45,806,802,803,802,803,802,803,802,803,807,808,809,809,
- 45,801,801,801,801,801,801,801,801,801,810,810,810,810,811,811,
-812,813,813,813,813,813,723,814,801,801,801,815,816,817,818,818,
-163,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
+676,797,798,799,724,800,801,802,803,804,803,804,805,806,805,806,
+803,804, 45,807,803,804,803,804,803,804,803,804,808,809,810,810,
+ 45,802,802,802,802,802,802,802,802,802,811,811,811,811,812,812,
+813,814,814,814,814,814,724,815,802,802,802,816,817,818,819,819,
+163,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
/* block 94 */
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,163,163,820,820,821,821,822,822,819,
-823,824,824,824,824,824,824,824,824,824,824,824,824,824,824,824,
-824,824,824,824,824,824,824,824,824,824,824,824,824,824,824,824,
-824,824,824,824,824,824,824,824,824,824,824,824,824,824,824,824,
-824,824,824,824,824,824,824,824,824,824,824,824,824,824,824,824,
-824,824,824,824,824,824,824,824,824,824,824,824,824,824,824,824,
-824,824,824,824,824,824,824,824,824,824,824,825,826,827,827,824,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,163,163,821,821,822,822,823,823,820,
+824,825,825,825,825,825,825,825,825,825,825,825,825,825,825,825,
+825,825,825,825,825,825,825,825,825,825,825,825,825,825,825,825,
+825,825,825,825,825,825,825,825,825,825,825,825,825,825,825,825,
+825,825,825,825,825,825,825,825,825,825,825,825,825,825,825,825,
+825,825,825,825,825,825,825,825,825,825,825,825,825,825,825,825,
+825,825,825,825,825,825,825,825,825,825,825,826,827,828,828,825,
/* block 95 */
-163,163,163,163,163,828,828,828,828,828,828,828,828,828,828,828,
-828,828,828,828,828,828,828,828,828,828,828,828,828,828,828,828,
-828,828,828,828,828,828,828,828,828,828,828,828,828,828,828,828,
-163,829,829,829,829,829,829,829,829,829,829,829,829,829,829,829,
-829,829,829,829,829,829,829,829,829,829,829,829,829,829,829,829,
+163,163,163,163,163,829,829,829,829,829,829,829,829,829,829,829,
829,829,829,829,829,829,829,829,829,829,829,829,829,829,829,829,
-829,829,829,829,830,829,829,829,829,829,829,829,829,829,829,829,
829,829,829,829,829,829,829,829,829,829,829,829,829,829,829,829,
+163,830,830,830,830,830,830,830,830,830,830,830,830,830,830,830,
+830,830,830,830,830,830,830,830,830,830,830,830,830,830,830,830,
+830,830,830,830,830,830,830,830,830,830,830,830,830,830,830,830,
+830,830,830,830,831,830,830,830,830,830,830,830,830,830,830,830,
+830,830,830,830,830,830,830,830,830,830,830,830,830,830,830,830,
/* block 96 */
-829,829,829,829,829,829,829,829,829,829,829,829,829,829,829,163,
-831,831,832,832,832,832,831,831,831,831,831,831,831,831,831,831,
-828,828,828,828,828,828,828,828,828,828,828,828,828,828,828,828,
-828,828,828,828,828,828,828,828,828,828,828,828,828,828,828,828,
-818,818,818,818,818,818,818,818,818,818,818,818,818,818,818,818,
-818,818,818,818,818,818,818,818,818,818,818,818,818,818,818,818,
-818,818,818,818,163,163,163,163,163,163,163,163,163,163,163,163,
-824,824,824,824,824,824,824,824,824,824,824,824,824,824,824,824,
+830,830,830,830,830,830,830,830,830,830,830,830,830,830,830,163,
+832,832,833,833,833,833,832,832,832,832,832,832,832,832,832,832,
+829,829,829,829,829,829,829,829,829,829,829,829,829,829,829,829,
+829,829,829,829,829,829,829,829,829,829,829,829,829,829,829,829,
+819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
+819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
+819,819,819,819,163,163,163,163,163,163,163,163,163,163,163,163,
+825,825,825,825,825,825,825,825,825,825,825,825,825,825,825,825,
/* block 97 */
-833,833,833,833,833,833,833,833,833,833,833,833,833,833,833,833,
-833,833,833,833,833,833,833,833,833,833,833,833,833,834,834,163,
-832,832,832,832,832,832,832,832,832,832,831,831,831,831,831,831,
-831,831,831,831,831,831,831,831,831,831,831,831,831,831,831,831,
-831,831,831,831,831,831,831,831,835,835,835,835,835,835,835,835,
-723, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58,
-833,833,833,833,833,833,833,833,833,833,833,833,833,833,833,833,
-833,833,833,833,833,833,833,833,833,833,833,833,834,834,834,460,
+834,834,834,834,834,834,834,834,834,834,834,834,834,834,834,834,
+834,834,834,834,834,834,834,834,834,834,834,834,834,835,835,163,
+833,833,833,833,833,833,833,833,833,833,832,832,832,832,832,832,
+832,832,832,832,832,832,832,832,832,832,832,832,832,832,832,832,
+832,832,832,832,832,832,832,832,836,836,836,836,836,836,836,836,
+724, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58,
+834,834,834,834,834,834,834,834,834,834,834,834,834,834,834,834,
+834,834,834,834,834,834,834,834,834,834,834,834,835,835,835,461,
/* block 98 */
-832,832,832,832,832,832,832,832,832,832,831,831,831,831,831,831,
-831,831,831,831,831,831,831,836,831,836,831,831,831,831,831,831,
-831,831,831,831,831,831,831,831,831,831,831,831,831,831,831,831,
-831, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58,
-831,831,831,831,831,831,831,831,831,831,831,831,723,723,723,723,
-837,837,837,837,837,837,837,837,837,837,837,837,837,837,837,837,
-837,837,837,837,837,837,837,837,837,837,837,837,837,837,837,837,
-837,837,837,837,837,837,837,837,837,837,837,837,837,837,837,831,
-
-/* block 99 */
-837,837,837,837,837,837,837,837,837,837,837,837,837,837,837,837,
-837,837,837,837,837,837,837,837,837,837,837,837,837,837,837,837,
-837,837,837,837,837,837,837,837,837,837,837,837,837,837,837,837,
-837,837,837,837,837,837,837,837,837,837,837,837,837,837,837,837,
-837,837,837,837,837,837,837,837,837,837,837,837,837,837,837,837,
-837,837,837,837,837,837,837,837,831,831,831,831,831,831,831,831,
-831,831,831,831,831,831,831,831,831,831,831,831,831,831,831,831,
-831,460,460,460,460,460,460,723,723,723,723,831,831,831,831,831,
-
-/* block 100 */
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,723,723,
-831,831,831,831,831,831,831,831,831,831,831,831,831,831,831,831,
-831,831,831,831,831,831,831,831,831,831,831,831,831,831,831,723,
-
-/* block 101 */
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
+833,833,833,833,833,833,833,833,833,833,832,832,832,832,832,832,
+832,832,832,832,832,832,832,837,832,837,832,832,832,832,832,832,
+832,832,832,832,832,832,832,832,832,832,832,832,832,832,832,832,
+832, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58,
+832,832,832,832,832,832,832,832,832,832,832,832,724,724,724,724,
838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
+838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,832,
+
+/* block 99 */
838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
+838,838,838,838,838,838,838,838,832,832,832,832,832,832,832,832,
+832,832,832,832,832,832,832,832,832,832,832,832,832,832,832,832,
+832,461,461,461,461,461,461,724,724,724,724,832,832,832,832,832,
-/* block 102 */
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
+/* block 100 */
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,724,724,
+832,832,832,832,832,832,832,832,832,832,832,832,832,832,832,832,
+832,832,832,832,832,832,832,832,832,832,832,832,832,832,832,724,
-/* block 103 */
-839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
-839,839,839,839,839,840,839,839,839,839,839,839,839,839,839,839,
-839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
-839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+/* block 101 */
839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
-
-/* block 104 */
839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+
+/* block 102 */
839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+
+/* block 103 */
+840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,
+840,840,840,840,840,841,840,840,840,840,840,840,840,840,840,840,
+840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,
+840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,
+840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,
+840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,
+840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,
+840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,
+
+/* block 104 */
+840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,
+840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,
+840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,
+840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,
+840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,
+840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,
+840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,
+840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,840,
/* block 105 */
-839,839,839,839,839,839,839,839,839,839,839,839,839,163,163,163,
-841,841,841,841,841,841,841,841,841,841,841,841,841,841,841,841,
-841,841,841,841,841,841,841,841,841,841,841,841,841,841,841,841,
-841,841,841,841,841,841,841,841,841,841,841,841,841,841,841,841,
-841,841,841,841,841,841,841,163,163,163,163,163,163,163,163,163,
+840,840,840,840,840,840,840,840,840,840,840,840,840,163,163,163,
+842,842,842,842,842,842,842,842,842,842,842,842,842,842,842,842,
842,842,842,842,842,842,842,842,842,842,842,842,842,842,842,842,
842,842,842,842,842,842,842,842,842,842,842,842,842,842,842,842,
-842,842,842,842,842,842,842,842,843,843,843,843,843,843,844,845,
+842,842,842,842,842,842,842,163,163,163,163,163,163,163,163,163,
+843,843,843,843,843,843,843,843,843,843,843,843,843,843,843,843,
+843,843,843,843,843,843,843,843,843,843,843,843,843,843,843,843,
+843,843,843,843,843,843,843,843,844,844,844,844,844,844,845,846,
/* block 106 */
-846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,
-846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,
-846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,
-846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,
-846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,
-846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,
-846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,
-846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,
+847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,
+847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,
+847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,
+847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,
+847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,
+847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,
+847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,
+847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,
/* block 107 */
-846,846,846,846,846,846,846,846,846,846,846,846,847,848,849,849,
-846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,846,
-850,850,850,850,850,850,850,850,850,850,846,846,163,163,163,163,
+847,847,847,847,847,847,847,847,847,847,847,847,848,849,850,850,
+847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,847,
+851,851,851,851,851,851,851,851,851,851,847,847,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-240,241,240,241,240,241,240,241,240,241,851,852,240,241,240,241,
+240,241,240,241,240,241,240,241,240,241,852,853,240,241,240,241,
240,241,240,241,240,241,240,241,240,241,240,241,240,241,240,241,
-240,241,240,241,240,241,240,241,240,241,240,241,240,241,853,246,
-248,248,248,854,787,787,787,787,787,787,787,787,855,855,854,856,
+240,241,240,241,240,241,240,241,240,241,240,241,240,241,854,246,
+248,248,248,855,788,788,788,788,788,788,788,788,856,856,855,857,
/* block 108 */
240,241,240,241,240,241,240,241,240,241,240,241,240,241,240,241,
-240,241,240,241,240,241,240,241,240,241,240,241,857,857,787,787,
-858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
-858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
-858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
-858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
-858,858,858,858,858,858,859,859,859,859,859,859,859,859,859,859,
-860,860,861,862,863,863,863,862,163,163,163,163,163,163,163,163,
+240,241,240,241,240,241,240,241,240,241,240,241,858,858,788,788,
+859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,
+859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,
+859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,
+859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,
+859,859,859,859,859,859,860,860,860,860,860,860,860,860,860,860,
+861,861,862,863,864,864,864,863,163,163,163,163,163,163,163,163,
/* block 109 */
-864,864,864,864,864,864,864,864, 46, 46, 46, 46, 46, 46, 46, 46,
+865,865,865,865,865,865,865,865, 46, 46, 46, 46, 46, 46, 46, 46,
46, 46, 46, 46, 46, 46, 46,149,149,149,149,149,149,149,149,149,
46, 46, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
70, 70, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
-644, 70, 70, 70, 70, 70, 70, 70, 70, 65, 66, 65, 66,865, 65, 66,
+645, 70, 70, 70, 70, 70, 70, 70, 70, 65, 66, 65, 66,866, 65, 66,
/* block 110 */
- 65, 66, 65, 66, 65, 66, 65, 66,149,866,866, 65, 66,867, 70, 92,
- 65, 66, 65, 66,868, 70, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
- 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,869,870,871,872,869, 70,
-873,874,875,876, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
- 65, 66, 65, 66,877,878,879, 65, 66, 65, 66,163,163,163,163,163,
+ 65, 66, 65, 66, 65, 66, 65, 66,149,867,867, 65, 66,868, 70, 92,
+ 65, 66, 65, 66,869, 70, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
+ 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,870,871,872,873,870, 70,
+874,875,876,877, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66, 65, 66,
+ 65, 66, 65, 66,878,879,880, 65, 66, 65, 66,163,163,163,163,163,
65, 66,163, 70,163, 70, 65, 66, 65, 66,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-163,163,880,880,880, 65, 66, 92,147,147, 70, 92, 92, 92, 92, 92,
+163,163,645,645,645, 65, 66, 92,147,147, 70, 92, 92, 92, 92, 92,
/* block 111 */
881,881,882,881,881,881,883,881,881,881,881,882,881,881,881,881,
@@ -3522,8 +3536,8 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
908,908,908,908,908,908,908,908,908,908,908,908,908,908,908,908,
908,908,908,908,908,908,908,909,909,909,909,909,909,909,909,909,
909,909,910,911,163,163,163,163,163,163,163,163,163,163,163,912,
-478,478,478,478,478,478,478,478,478,478,478,478,478,478,478,478,
-478,478,478,478,478,478,478,478,478,478,478,478,478,163,163,163,
+479,479,479,479,479,479,479,479,479,479,479,479,479,479,479,479,
+479,479,479,479,479,479,479,479,479,479,479,479,479,163,163,163,
/* block 114 */
913,913,913,914,915,915,915,915,915,915,915,915,915,915,915,915,
@@ -3532,8 +3546,8 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
915,915,915,916,914,914,913,913,913,913,914,914,913,913,914,914,
917,918,918,918,918,918,918,919,920,920,918,918,918,918,163,921,
922,922,922,922,922,922,922,922,922,922,163,163,163,163,918,918,
-461,461,461,461,461,471,923,461,461,461,461,461,461,461,461,461,
-472,472,472,472,472,472,472,472,472,472,461,461,461,461,461,163,
+462,462,462,462,462,472,923,462,462,462,462,462,462,462,462,462,
+473,473,473,473,473,473,473,473,473,473,462,462,462,462,462,163,
/* block 115 */
924,924,924,924,924,924,924,924,924,924,924,924,924,924,924,924,
@@ -3542,8 +3556,8 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
926,925,925,926,926,925,925,163,163,163,163,163,163,163,163,163,
924,924,924,925,924,924,924,924,924,924,924,924,925,926,163,163,
927,927,927,927,927,927,927,927,927,927,163,163,928,929,929,929,
-461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
-923,461,461,461,461,461,461,473,473,473,461,470,471,470,461,461,
+462,462,462,462,462,462,462,462,462,462,462,462,462,462,462,462,
+923,462,462,462,462,462,462,474,474,474,462,471,472,471,462,462,
/* block 116 */
930,930,930,930,930,930,930,930,930,930,930,930,930,930,930,930,
@@ -3556,106 +3570,116 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
941,941,938,942,942,939,943,163,163,163,163,163,163,163,163,163,
/* block 117 */
-163,483,483,483,483,483,483,163,163,483,483,483,483,483,483,163,
-163,483,483,483,483,483,483,163,163,163,163,163,163,163,163,163,
-483,483,483,483,483,483,483,163,483,483,483,483,483,483,483,163,
+163,484,484,484,484,484,484,163,163,484,484,484,484,484,484,163,
+163,484,484,484,484,484,484,163,163,163,163,163,163,163,163,163,
+484,484,484,484,484,484,484,163,484,484,484,484,484,484,484,163,
70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
- 70, 70, 70,944, 70, 70, 70, 70, 70, 70, 70,866,147,147,147,147,
- 70, 70, 70, 70, 70,221, 70, 70, 70,945, 46, 46,163,163,163,163,
-946,946,946,946,946,946,946,946,946,946,946,946,946,946,946,946,
+ 70, 70, 70,944, 70, 70, 70, 70, 70, 70, 70,867,147,147,147,147,
+ 70, 70, 70, 70, 70,221, 70, 70, 70,147, 46, 46,163,163,163,163,
+945,945,945,945,945,945,945,945,945,945,945,945,945,945,945,945,
/* block 118 */
-946,946,946,946,946,946,946,946,946,946,946,946,946,946,946,946,
-946,946,946,946,946,946,946,946,946,946,946,946,946,946,946,946,
-946,946,946,946,946,946,946,946,946,946,946,946,946,946,946,946,
-946,946,946,946,946,946,946,946,946,946,946,946,946,946,946,946,
+945,945,945,945,945,945,945,945,945,945,945,945,945,945,945,945,
+945,945,945,945,945,945,945,945,945,945,945,945,945,945,945,945,
+945,945,945,945,945,945,945,945,945,945,945,945,945,945,945,945,
+945,945,945,945,945,945,945,945,945,945,945,945,945,945,945,945,
938,938,938,938,938,938,938,938,938,938,938,938,938,938,938,938,
938,938,938,938,938,938,938,938,938,938,938,938,938,938,938,938,
-938,938,938,939,939,940,939,939,940,939,939,941,947,943,163,163,
-948,948,948,948,948,948,948,948,948,948,163,163,163,163,163,163,
+938,938,938,939,939,940,939,939,940,939,939,941,946,943,163,163,
+947,947,947,947,947,947,947,947,947,947,163,163,163,163,163,163,
/* block 119 */
-949,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,949,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,949,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,949,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-949,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
+948,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,948,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,948,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,948,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+948,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
/* block 120 */
-950,950,950,950,950,950,950,950,950,950,950,950,949,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,949,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,949,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-949,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,949,950,950,950,
+949,949,949,949,949,949,949,949,949,949,949,949,948,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,948,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,948,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+948,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,948,949,949,949,
/* block 121 */
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,949,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,949,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-949,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,949,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,948,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,948,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+948,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,948,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
/* block 122 */
-950,950,950,950,950,950,950,950,949,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,949,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-949,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,949,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,949,950,950,950,950,950,950,950,
+949,949,949,949,949,949,949,949,948,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,948,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+948,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,948,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,948,949,949,949,949,949,949,949,
/* block 123 */
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,949,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-949,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,949,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,949,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,948,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+948,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,948,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,948,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
/* block 124 */
-950,950,950,950,949,950,950,950,950,950,950,950,950,950,950,950,
+949,949,949,949,948,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+948,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,948,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,948,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,948,949,949,949,949,949,949,949,949,949,949,949,
+
+/* block 125 */
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+948,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,948,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,948,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,948,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+
+/* block 126 */
+949,949,949,949,949,949,949,949,948,949,949,949,949,949,949,949,
+949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,949,
+949,949,949,949,163,163,163,163,163,163,163,163,163,163,163,163,
+482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,
+482,482,482,482,482,482,482,163,163,163,163,483,483,483,483,483,
+483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
+483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,
+483,483,483,483,483,483,483,483,483,483,483,483,163,163,163,163,
+
+/* block 127 */
950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-949,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,949,950,950,950,
950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,949,950,950,950,950,950,950,950,
950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,949,950,950,950,950,950,950,950,950,950,950,950,
-
-/* block 125 */
950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-949,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,950,950,950,950,949,950,950,950,
950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,950,950,950,950,949,950,950,950,950,950,950,950,
950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,949,950,950,950,950,950,950,950,950,950,950,950,
950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-
-/* block 126 */
-950,950,950,950,950,950,950,950,949,950,950,950,950,950,950,950,
950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,950,
-950,950,950,950,163,163,163,163,163,163,163,163,163,163,163,163,
-481,481,481,481,481,481,481,481,481,481,481,481,481,481,481,481,
-481,481,481,481,481,481,481,163,163,163,163,482,482,482,482,482,
-482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,
-482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,482,
-482,482,482,482,482,482,482,482,482,482,482,482,163,163,163,163,
-/* block 127 */
+/* block 128 */
951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,
951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,
951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,
@@ -3665,7 +3689,7 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,
951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,
-/* block 128 */
+/* block 129 */
952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
@@ -3675,40 +3699,30 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
-/* block 129 */
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-
/* block 130 */
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,838,838,
-953,838,953,838,838,953,953,953,953,953,953,953,953,953,953,838,
-953,838,953,838,838,953,953,838,838,838,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,163,163,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
+952,952,952,952,952,952,952,952,952,952,952,952,952,952,839,839,
+952,839,952,839,839,952,952,952,952,952,952,952,952,952,952,839,
+952,839,952,839,839,952,952,839,839,839,952,952,952,952,952,952,
+952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
+952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
+952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
+952,952,952,952,952,952,952,952,952,952,952,952,952,952,163,163,
+952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
/* block 131 */
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,163,163,163,163,163,163,
+952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
+952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
+952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
+952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
+952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
+952,952,952,952,952,952,952,952,952,952,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 132 */
-652,652,652,652,652,652,652,163,163,163,163,163,163,163,163,163,
+653,653,653,653,653,653,653,163,163,163,163,163,163,163,163,163,
163,163,163,257,257,257,257,257,163,163,163,163,163,270,265,270,
-270,270,270,270,270,270,270,270,270,954,270,270,270,270,270,270,
+270,270,270,270,270,270,270,270,270,953,270,270,270,270,270,270,
270,270,270,270,270,270,270,262,270,270,270,270,270,262,270,262,
270,270,262,270,270,262,270,270,270,270,270,270,270,270,270,270,
286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,
@@ -3731,8 +3745,8 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,
286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,
286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,
-286,286,286,286,286,286,286,286,286,286,286,286,286,286,955,955,
-955,955,955,955,286,286,286,286,286,286,286,286,286,286,286,286,
+286,286,286,286,286,286,286,286,286,286,286,286,286,286,954,954,
+954,954,954,954,286,286,286,286,286,286,286,286,286,286,286,286,
286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,
/* block 135 */
@@ -3749,7 +3763,7 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,
286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,
286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,
-286,286,286,286,286,286,286,286,286,286,286,286,286,286,956,957,
+286,286,286,286,286,286,286,286,286,286,286,286,286,286,955,956,
280,280,280,280,280,280,280,280,280,280,280,280,280,280,280,280,
286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,
286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,
@@ -3761,19 +3775,19 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,
286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,
286,286,286,286,286,286,286,286,302,302,302,302,302,302,302,280,
-958,958,958,958,958,958,958,958,958,958,958,958,958,958,958,958,
-958,958,958,958,958,958,958,958,958,958,958,958,958,958,958,958,
-286,286,959,286,286,286,286,286,286,286,955,955,277,960,280,280,
+957,957,957,957,957,957,957,957,957,957,957,957,957,957,957,957,
+957,957,957,957,957,957,957,957,957,957,957,957,957,957,957,957,
+286,286,958,286,286,286,286,286,286,286,954,954,277,959,280,280,
/* block 138 */
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,962,
-963,963,963,964,963,963,963,965,966,963,163,163,163,163,163,163,
-154,154,154,154,154,154,154,154,154,154,154,154,154,154,855,855,
-963,967,967,700,700,965,966,965,966,965,966,965,966,965,966,965,
-966,968,969,968,969,798,798,965,966,963,963,963,963,700,700,700,
-970,166,971,163,166,972,973,973,967,974,975,974,975,974,975,976,
-963,977,713,978,979,979,715,163,977,431,976,963,163,163,163,163,
-955,286,955,286,955,302,955,286,955,286,955,286,955,286,955,286,
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,961,
+962,962,962,963,962,962,962,964,965,962,163,163,163,163,163,163,
+154,154,154,154,154,154,154,154,154,154,154,154,154,154,856,856,
+962,966,966,701,701,964,965,964,965,964,965,964,965,964,965,964,
+965,967,968,967,968,799,799,964,965,962,962,962,962,701,701,701,
+969,166,970,163,166,971,972,972,966,973,974,973,974,973,974,975,
+962,976,714,977,978,978,716,163,976,430,975,962,163,163,163,163,
+954,286,954,286,954,302,954,286,954,286,954,286,954,286,954,286,
/* block 139 */
286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,286,
@@ -3786,64 +3800,64 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
286,286,286,286,286,286,286,286,286,286,286,286,286,302,302, 51,
/* block 140 */
-163,973,980,976,431,976,963,981,974,975,963,713,970,982,971,983,
-984,984,984,984,984,984,984,984,984,984,972,166,979,715,979,973,
-963,985,985,985,985,985,985, 59, 59, 59, 59, 59, 59, 59, 59, 59,
- 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59,974,977,975,986,700,
- 46,987,987,987,987,987,987, 62, 62, 62, 62, 62, 62, 62, 62, 62,
- 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62,974,715,975,715,974,
-975,988,989,990,991,825,824,824,824,824,824,824,824,824,824,824,
-826,824,824,824,824,824,824,824,824,824,824,824,824,824,824,824,
+163,972,979,975,430,975,962,980,973,974,962,714,969,981,970,982,
+983,983,983,983,983,983,983,983,983,983,971,166,978,716,978,972,
+962,984,984,984,984,984,984, 59, 59, 59, 59, 59, 59, 59, 59, 59,
+ 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59,973,976,974,985,701,
+ 46,986,986,986,986,986,986, 62, 62, 62, 62, 62, 62, 62, 62, 62,
+ 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62,973,716,974,716,973,
+974,987,988,989,990,826,825,825,825,825,825,825,825,825,825,825,
+827,825,825,825,825,825,825,825,825,825,825,825,825,825,825,825,
/* block 141 */
-824,824,824,824,824,824,824,824,824,824,824,824,824,824,824,824,
-824,824,824,824,824,824,824,824,824,824,824,824,824,824,992,992,
-830,829,829,829,829,829,829,829,829,829,829,829,829,829,829,829,
-829,829,829,829,829,829,829,829,829,829,829,829,829,829,829,163,
-163,163,829,829,829,829,829,829,163,163,829,829,829,829,829,829,
-163,163,829,829,829,829,829,829,163,163,829,829,829,163,163,163,
-431,431,715, 46,723,431,431,163,723,715,715,715,715,723,723,163,
-707,707,707,707,707,707,707,707,707,993,993,993,723,723,958,958,
+825,825,825,825,825,825,825,825,825,825,825,825,825,825,825,825,
+825,825,825,825,825,825,825,825,825,825,825,825,825,825,991,991,
+831,830,830,830,830,830,830,830,830,830,830,830,830,830,830,830,
+830,830,830,830,830,830,830,830,830,830,830,830,830,830,830,163,
+163,163,830,830,830,830,830,830,163,163,830,830,830,830,830,830,
+163,163,830,830,830,830,830,830,163,163,830,830,830,163,163,163,
+430,430,716, 46,724,430,430,163,724,716,716,716,716,724,724,163,
+708,708,708,708,708,708,708,708,708,992,992,992,724,724,957,957,
/* block 142 */
-994,994,994,994,994,994,994,994,994,994,994,994,163,994,994,994,
-994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,
-994,994,994,994,994,994,994,163,994,994,994,994,994,994,994,994,
-994,994,994,994,994,994,994,994,994,994,994,163,994,994,163,994,
-994,994,994,994,994,994,994,994,994,994,994,994,994,994,163,163,
-994,994,994,994,994,994,994,994,994,994,994,994,994,994,163,163,
+993,993,993,993,993,993,993,993,993,993,993,993,163,993,993,993,
+993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,
+993,993,993,993,993,993,993,163,993,993,993,993,993,993,993,993,
+993,993,993,993,993,993,993,993,993,993,993,163,993,993,163,993,
+993,993,993,993,993,993,993,993,993,993,993,993,993,993,163,163,
+993,993,993,993,993,993,993,993,993,993,993,993,993,993,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 143 */
-994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,
-994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,
-994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,
-994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,
-994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,
-994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,
-994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,994,
-994,994,994,994,994,994,994,994,994,994,994,163,163,163,163,163,
+993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,
+993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,
+993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,
+993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,
+993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,
+993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,
+993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,993,
+993,993,993,993,993,993,993,993,993,993,993,163,163,163,163,163,
/* block 144 */
-995,996,997,163,163,163,163,998,998,998,998,998,998,998,998,998,
-998,998,998,998,998,998,998,998,998,998,998,998,998,998,998,998,
-998,998,998,998,998,998,998,998,998,998,998,998,998,998,998,998,
-998,998,998,998,163,163,163,999,999,999,999,999,999,999,999,999,
-1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,
-1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,
-1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,1000,
-1000,1000,1000,1000,1000,1001,1001,1001,1001,1002,1002,1002,1002,1002,1002,1002,
+994,995,996,163,163,163,163,997,997,997,997,997,997,997,997,997,
+997,997,997,997,997,997,997,997,997,997,997,997,997,997,997,997,
+997,997,997,997,997,997,997,997,997,997,997,997,997,997,997,997,
+997,997,997,997,163,163,163,998,998,998,998,998,998,998,998,998,
+999,999,999,999,999,999,999,999,999,999,999,999,999,999,999,999,
+999,999,999,999,999,999,999,999,999,999,999,999,999,999,999,999,
+999,999,999,999,999,999,999,999,999,999,999,999,999,999,999,999,
+999,999,999,999,999,1000,1000,1000,1000,1001,1001,1001,1001,1001,1001,1001,
/* block 145 */
-1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1001,1001,1002,1003,1003,163,
-723,723,723,723,723,723,723,723,723,723,723,723,723,163,163,163,
-1002,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1000,1000,1001,1002,1002,163,
+724,724,724,724,724,724,724,724,724,724,724,724,724,163,163,163,
+1001,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,158,163,163,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,158,163,163,
/* block 146 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
@@ -3856,97 +3870,97 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 147 */
+1003,1003,1003,1003,1003,1003,1003,1003,1003,1003,1003,1003,1003,1003,1003,1003,
+1003,1003,1003,1003,1003,1003,1003,1003,1003,1003,1003,1003,1003,163,163,163,
+1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,
+1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,
1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,
-1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,1004,163,163,163,
-1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,
-1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,
-1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,1005,
-1005,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-1006,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,
-1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,163,163,163,163,
+1004,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+1005,1006,1006,1006,1006,1006,1006,1006,1006,1006,1006,1006,1006,1006,1006,1006,
+1006,1006,1006,1006,1006,1006,1006,1006,1006,1006,1006,1006,163,163,163,163,
/* block 148 */
-1008,1008,1008,1008,1008,1008,1008,1008,1008,1008,1008,1008,1008,1008,1008,1008,
-1008,1008,1008,1008,1008,1008,1008,1008,1008,1008,1008,1008,1008,1008,1008,1008,
-1009,1009,1009,1009,163,163,163,163,163,163,163,163,163,1008,1008,1008,
-1010,1010,1010,1010,1010,1010,1010,1010,1010,1010,1010,1010,1010,1010,1010,1010,
-1010,1011,1010,1010,1010,1010,1010,1010,1010,1010,1011,163,163,163,163,163,
-1012,1012,1012,1012,1012,1012,1012,1012,1012,1012,1012,1012,1012,1012,1012,1012,
-1012,1012,1012,1012,1012,1012,1012,1012,1012,1012,1012,1012,1012,1012,1012,1012,
-1012,1012,1012,1012,1012,1012,1013,1013,1013,1013,1013,163,163,163,163,163,
+1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,
+1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,1007,
+1008,1008,1008,1008,163,163,163,163,163,163,163,163,163,1007,1007,1007,
+1009,1009,1009,1009,1009,1009,1009,1009,1009,1009,1009,1009,1009,1009,1009,1009,
+1009,1010,1009,1009,1009,1009,1009,1009,1009,1009,1010,163,163,163,163,163,
+1011,1011,1011,1011,1011,1011,1011,1011,1011,1011,1011,1011,1011,1011,1011,1011,
+1011,1011,1011,1011,1011,1011,1011,1011,1011,1011,1011,1011,1011,1011,1011,1011,
+1011,1011,1011,1011,1011,1011,1012,1012,1012,1012,1012,163,163,163,163,163,
/* block 149 */
-1014,1014,1014,1014,1014,1014,1014,1014,1014,1014,1014,1014,1014,1014,1014,1014,
-1014,1014,1014,1014,1014,1014,1014,1014,1014,1014,1014,1014,1014,1014,163,1015,
-1016,1016,1016,1016,1016,1016,1016,1016,1016,1016,1016,1016,1016,1016,1016,1016,
-1016,1016,1016,1016,1016,1016,1016,1016,1016,1016,1016,1016,1016,1016,1016,1016,
-1016,1016,1016,1016,163,163,163,163,1016,1016,1016,1016,1016,1016,1016,1016,
-1017,1018,1018,1018,1018,1018,163,163,163,163,163,163,163,163,163,163,
+1013,1013,1013,1013,1013,1013,1013,1013,1013,1013,1013,1013,1013,1013,1013,1013,
+1013,1013,1013,1013,1013,1013,1013,1013,1013,1013,1013,1013,1013,1013,163,1014,
+1015,1015,1015,1015,1015,1015,1015,1015,1015,1015,1015,1015,1015,1015,1015,1015,
+1015,1015,1015,1015,1015,1015,1015,1015,1015,1015,1015,1015,1015,1015,1015,1015,
+1015,1015,1015,1015,163,163,163,163,1015,1015,1015,1015,1015,1015,1015,1015,
+1016,1017,1017,1017,1017,1017,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 150 */
+1018,1018,1018,1018,1018,1018,1018,1018,1018,1018,1018,1018,1018,1018,1018,1018,
+1018,1018,1018,1018,1018,1018,1018,1018,1018,1018,1018,1018,1018,1018,1018,1018,
+1018,1018,1018,1018,1018,1018,1018,1018,1019,1019,1019,1019,1019,1019,1019,1019,
1019,1019,1019,1019,1019,1019,1019,1019,1019,1019,1019,1019,1019,1019,1019,1019,
1019,1019,1019,1019,1019,1019,1019,1019,1019,1019,1019,1019,1019,1019,1019,1019,
-1019,1019,1019,1019,1019,1019,1019,1019,1020,1020,1020,1020,1020,1020,1020,1020,
1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,
1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,
-1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,
-1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,
-1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,
+1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,1020,
/* block 151 */
-1022,1022,1022,1022,1022,1022,1022,1022,1022,1022,1022,1022,1022,1022,1022,1022,
-1022,1022,1022,1022,1022,1022,1022,1022,1022,1022,1022,1022,1022,1022,163,163,
-1023,1023,1023,1023,1023,1023,1023,1023,1023,1023,163,163,163,163,163,163,
-1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,
+1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,
+1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,1021,163,163,
+1022,1022,1022,1022,1022,1022,1022,1022,1022,1022,163,163,163,163,163,163,
+1023,1023,1023,1023,1023,1023,1023,1023,1023,1023,1023,1023,1023,1023,1023,1023,
+1023,1023,1023,1023,1023,1023,1023,1023,1023,1023,1023,1023,1023,1023,1023,1023,
+1023,1023,1023,1023,163,163,163,163,1024,1024,1024,1024,1024,1024,1024,1024,
1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,
-1024,1024,1024,1024,163,163,163,163,1025,1025,1025,1025,1025,1025,1025,1025,
-1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,
-1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,163,163,163,163,
+1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,1024,163,163,163,163,
/* block 152 */
+1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,
+1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,1025,
+1025,1025,1025,1025,1025,1025,1025,1025,163,163,163,163,163,163,163,163,
1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,
1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,
-1026,1026,1026,1026,1026,1026,1026,1026,163,163,163,163,163,163,163,163,
-1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,
-1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,
-1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,1027,
-1027,1027,1027,1027,163,163,163,163,163,163,163,163,163,163,163,1028,
-1029,1029,1029,1029,1029,1029,1029,1029,1029,1029,1029,163,1029,1029,1029,1029,
+1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,1026,
+1026,1026,1026,1026,163,163,163,163,163,163,163,163,163,163,163,1027,
+1028,1028,1028,1028,1028,1028,1028,1028,1028,1028,1028,163,1028,1028,1028,1028,
/* block 153 */
-1029,1029,1029,1029,1029,1029,1029,1029,1029,1029,1029,163,1029,1029,1029,1029,
-1029,1029,1029,163,1029,1029,163,1030,1030,1030,1030,1030,1030,1030,1030,1030,
-1030,1030,163,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,
-1030,1030,163,1030,1030,1030,1030,1030,1030,1030,163,1030,1030,163,163,163,
+1028,1028,1028,1028,1028,1028,1028,1028,1028,1028,1028,163,1028,1028,1028,1028,
+1028,1028,1028,163,1028,1028,163,1029,1029,1029,1029,1029,1029,1029,1029,1029,
+1029,1029,163,1029,1029,1029,1029,1029,1029,1029,1029,1029,1029,1029,1029,1029,
+1029,1029,163,1029,1029,1029,1029,1029,1029,1029,163,1029,1029,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 154 */
-1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,
-1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,
-1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,
-1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,
-1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,
-1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,
-1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,
-1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,
+1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,
+1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,
+1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,
+1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,
+1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,
+1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,
+1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,
+1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,
/* block 155 */
-1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,
-1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,
-1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,
-1031,1031,1031,1031,1031,1031,1031,163,163,163,163,163,163,163,163,163,
-1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,1031,
-1031,1031,1031,1031,1031,1031,163,163,163,163,163,163,163,163,163,163,
-1031,1031,1031,1031,1031,1031,1031,1031,163,163,163,163,163,163,163,163,
+1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,
+1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,
+1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,
+1030,1030,1030,1030,1030,1030,1030,163,163,163,163,163,163,163,163,163,
+1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,1030,
+1030,1030,1030,1030,1030,1030,163,163,163,163,163,163,163,163,163,163,
+1030,1030,1030,1030,1030,1030,1030,1030,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 156 */
-147,1032,1032,147,147,147,163,147,147,147,147,147,147,147,147,147,
+147,1031,1031,147,147,147,163,147,147,147,147,147,147,147,147,147,
147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,
147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,
147,163,147,147,147,147,147,147,147,147,147,163,163,163,163,163,
@@ -3956,79 +3970,79 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 157 */
-1033,1033,1033,1033,1033,1033,262,262,1033,262,1033,1033,1033,1033,1033,1033,
-1033,1033,1033,1033,1033,1033,1033,1033,1033,1033,1033,1033,1033,1033,1033,1033,
+1032,1032,1032,1032,1032,1032,262,262,1032,262,1032,1032,1032,1032,1032,1032,
+1032,1032,1032,1032,1032,1032,1032,1032,1032,1032,1032,1032,1032,1032,1032,1032,
+1032,1032,1032,1032,1032,1032,1032,1032,1032,1032,1032,1032,1032,1032,1032,1032,
+1032,1032,1032,1032,1032,1032,262,1032,1032,262,262,262,1032,262,262,1032,
1033,1033,1033,1033,1033,1033,1033,1033,1033,1033,1033,1033,1033,1033,1033,1033,
-1033,1033,1033,1033,1033,1033,262,1033,1033,262,262,262,1033,262,262,1033,
-1034,1034,1034,1034,1034,1034,1034,1034,1034,1034,1034,1034,1034,1034,1034,1034,
-1034,1034,1034,1034,1034,1034,262,1035,1036,1036,1036,1036,1036,1036,1036,1036,
-1037,1037,1037,1037,1037,1037,1037,1037,1037,1037,1037,1037,1037,1037,1037,1037,
-1037,1037,1037,1037,1037,1037,1037,1038,1038,1039,1039,1039,1039,1039,1039,1039,
+1033,1033,1033,1033,1033,1033,262,1034,1035,1035,1035,1035,1035,1035,1035,1035,
+1036,1036,1036,1036,1036,1036,1036,1036,1036,1036,1036,1036,1036,1036,1036,1036,
+1036,1036,1036,1036,1036,1036,1036,1037,1037,1038,1038,1038,1038,1038,1038,1038,
/* block 158 */
-1040,1040,1040,1040,1040,1040,1040,1040,1040,1040,1040,1040,1040,1040,1040,1040,
-1040,1040,1040,1040,1040,1040,1040,1040,1040,1040,1040,1040,1040,1040,1040,262,
-262,262,262,262,262,262,262,1041,1041,1041,1041,1041,1041,1041,1041,1041,
+1039,1039,1039,1039,1039,1039,1039,1039,1039,1039,1039,1039,1039,1039,1039,1039,
+1039,1039,1039,1039,1039,1039,1039,1039,1039,1039,1039,1039,1039,1039,1039,262,
+262,262,262,262,262,262,262,1040,1040,1040,1040,1040,1040,1040,1040,1040,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
-1042,1042,1042,1042,1042,1042,1042,1042,1042,1042,1042,1042,1042,1042,1042,1042,
-1042,1042,1042,262,1042,1042,262,262,262,262,262,1043,1043,1043,1043,1043,
+1041,1041,1041,1041,1041,1041,1041,1041,1041,1041,1041,1041,1041,1041,1041,1041,
+1041,1041,1041,262,1041,1041,262,262,262,262,262,1042,1042,1042,1042,1042,
/* block 159 */
-1044,1044,1044,1044,1044,1044,1044,1044,1044,1044,1044,1044,1044,1044,1044,1044,
-1044,1044,1044,1044,1044,1044,1045,1045,1045,1045,1045,1045,262,262,262,1046,
-1047,1047,1047,1047,1047,1047,1047,1047,1047,1047,1047,1047,1047,1047,1047,1047,
-1047,1047,1047,1047,1047,1047,1047,1047,1047,1047,262,262,262,262,262,1048,
+1043,1043,1043,1043,1043,1043,1043,1043,1043,1043,1043,1043,1043,1043,1043,1043,
+1043,1043,1043,1043,1043,1043,1044,1044,1044,1044,1044,1044,262,262,262,1045,
+1046,1046,1046,1046,1046,1046,1046,1046,1046,1046,1046,1046,1046,1046,1046,1046,
+1046,1046,1046,1046,1046,1046,1046,1046,1046,1046,262,262,262,262,262,1047,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
/* block 160 */
+1048,1048,1048,1048,1048,1048,1048,1048,1048,1048,1048,1048,1048,1048,1048,1048,
+1048,1048,1048,1048,1048,1048,1048,1048,1048,1048,1048,1048,1048,1048,1048,1048,
1049,1049,1049,1049,1049,1049,1049,1049,1049,1049,1049,1049,1049,1049,1049,1049,
-1049,1049,1049,1049,1049,1049,1049,1049,1049,1049,1049,1049,1049,1049,1049,1049,
+1049,1049,1049,1049,1049,1049,1049,1049,262,262,262,262,1050,1050,1049,1049,
+1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,
+262,262,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,
+1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,
1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,1050,
-1050,1050,1050,1050,1050,1050,1050,1050,262,262,262,262,1051,1051,1050,1050,
-1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,
-262,262,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,
-1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,
-1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,
/* block 161 */
-1052,1053,1053,1053,262,1053,1053,262,262,262,262,262,1053,1053,1053,1053,
-1052,1052,1052,1052,262,1052,1052,1052,262,1052,1052,1052,1052,1052,1052,1052,
-1052,1052,1052,1052,1052,1052,1052,1052,1052,1052,1052,1052,1052,1052,1052,1052,
-1052,1052,1052,1052,1052,1052,262,262,1054,1054,1054,262,262,262,262,1055,
-1056,1056,1056,1056,1056,1056,1056,1056,1056,262,262,262,262,262,262,262,
-1057,1057,1057,1057,1057,1057,1058,1058,1057,262,262,262,262,262,262,262,
-1059,1059,1059,1059,1059,1059,1059,1059,1059,1059,1059,1059,1059,1059,1059,1059,
-1059,1059,1059,1059,1059,1059,1059,1059,1059,1059,1059,1059,1059,1060,1060,1061,
+1051,1052,1052,1052,262,1052,1052,262,262,262,262,262,1052,1052,1052,1052,
+1051,1051,1051,1051,262,1051,1051,1051,262,1051,1051,1051,1051,1051,1051,1051,
+1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,1051,
+1051,1051,1051,1051,1051,1051,262,262,1053,1053,1053,262,262,262,262,1054,
+1055,1055,1055,1055,1055,1055,1055,1055,1055,262,262,262,262,262,262,262,
+1056,1056,1056,1056,1056,1056,1057,1057,1056,262,262,262,262,262,262,262,
+1058,1058,1058,1058,1058,1058,1058,1058,1058,1058,1058,1058,1058,1058,1058,1058,
+1058,1058,1058,1058,1058,1058,1058,1058,1058,1058,1058,1058,1058,1059,1059,1060,
/* block 162 */
-1062,1062,1062,1062,1062,1062,1062,1062,1062,1062,1062,1062,1062,1062,1062,1062,
-1062,1062,1062,1062,1062,1062,1062,1062,1062,1062,1062,1062,1062,1063,1063,1063,
+1061,1061,1061,1061,1061,1061,1061,1061,1061,1061,1061,1061,1061,1061,1061,1061,
+1061,1061,1061,1061,1061,1061,1061,1061,1061,1061,1061,1061,1061,1062,1062,1062,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
-1064,1064,1064,1064,1064,1064,1064,1064,1065,1064,1064,1064,1064,1064,1064,1064,
-1064,1064,1064,1064,1064,1064,1064,1064,1064,1064,1064,1064,1064,1064,1064,1064,
-1064,1064,1064,1064,1064,1066,1066,262,262,262,262,1067,1067,1067,1067,1067,
-1068,1068,1069,1068,1068,1068,1070,262,262,262,262,262,262,262,262,262,
+1063,1063,1063,1063,1063,1063,1063,1063,1064,1063,1063,1063,1063,1063,1063,1063,
+1063,1063,1063,1063,1063,1063,1063,1063,1063,1063,1063,1063,1063,1063,1063,1063,
+1063,1063,1063,1063,1063,1065,1065,262,262,262,262,1066,1066,1066,1066,1066,
+1067,1067,1068,1067,1067,1067,1069,262,262,262,262,262,262,262,262,262,
/* block 163 */
-1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,
-1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,
-1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,1071,
-1071,1071,1071,1071,1071,1071,262,262,262,1072,1073,1073,1073,1073,1073,1073,
-1074,1074,1074,1074,1074,1074,1074,1074,1074,1074,1074,1074,1074,1074,1074,1074,
-1074,1074,1074,1074,1074,1074,262,262,1075,1075,1075,1075,1075,1075,1075,1075,
-1076,1076,1076,1076,1076,1076,1076,1076,1076,1076,1076,1076,1076,1076,1076,1076,
-1076,1076,1076,262,262,262,262,262,1077,1077,1077,1077,1077,1077,1077,1077,
+1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,
+1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,
+1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,1070,
+1070,1070,1070,1070,1070,1070,262,262,262,1071,1072,1072,1072,1072,1072,1072,
+1073,1073,1073,1073,1073,1073,1073,1073,1073,1073,1073,1073,1073,1073,1073,1073,
+1073,1073,1073,1073,1073,1073,262,262,1074,1074,1074,1074,1074,1074,1074,1074,
+1075,1075,1075,1075,1075,1075,1075,1075,1075,1075,1075,1075,1075,1075,1075,1075,
+1075,1075,1075,262,262,262,262,262,1076,1076,1076,1076,1076,1076,1076,1076,
/* block 164 */
-1078,1078,1078,1078,1078,1078,1078,1078,1078,1078,1078,1078,1078,1078,1078,1078,
-1078,1078,262,262,262,262,262,262,262,1079,1079,1079,1079,262,262,262,
-262,262,262,262,262,262,262,262,262,1080,1080,1080,1080,1080,1080,1080,
+1077,1077,1077,1077,1077,1077,1077,1077,1077,1077,1077,1077,1077,1077,1077,1077,
+1077,1077,262,262,262,262,262,262,262,1078,1078,1078,1078,262,262,262,
+262,262,262,262,262,262,262,262,262,1079,1079,1079,1079,1079,1079,1079,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
@@ -4036,30 +4050,30 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
/* block 165 */
-1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,
-1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,
-1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,
-1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,
-1081,1081,1081,1081,1081,1081,1081,1081,1081,262,262,262,262,262,262,262,
+1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,
+1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,
+1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,
+1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,1080,
+1080,1080,1080,1080,1080,1080,1080,1080,1080,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
/* block 166 */
+1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,
+1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,
+1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,1081,
+1081,1081,1081,262,262,262,262,262,262,262,262,262,262,262,262,262,
1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,
1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,
1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,1082,
-1082,1082,1082,262,262,262,262,262,262,262,262,262,262,262,262,262,
-1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,
-1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,
-1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,1083,
-1083,1083,1083,262,262,262,262,262,262,262,1084,1084,1084,1084,1084,1084,
+1082,1082,1082,262,262,262,262,262,262,262,1083,1083,1083,1083,1083,1083,
/* block 167 */
-1085,1085,1085,1085,1085,1085,1085,1085,1085,1085,1085,1085,1085,1085,1085,1085,
-1085,1085,1085,1085,1085,1085,1085,1085,1085,1085,1085,1085,1085,1085,1085,1085,
-1085,1085,1086,1086,1087,1087,1087,1087,302,302,302,302,302,302,302,302,
-1088,1088,1088,1088,1088,1088,1088,1088,1088,1088,302,302,302,302,302,302,
+1084,1084,1084,1084,1084,1084,1084,1084,1084,1084,1084,1084,1084,1084,1084,1084,
+1084,1084,1084,1084,1084,1084,1084,1084,1084,1084,1084,1084,1084,1084,1084,1084,
+1084,1084,1085,1085,1086,1086,1086,1086,302,302,302,302,302,302,302,302,
+1087,1087,1087,1087,1087,1087,1087,1087,1087,1087,302,302,302,302,302,302,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
@@ -4082,174 +4096,174 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
-1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,
-1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,262,
+1088,1088,1088,1088,1088,1088,1088,1088,1088,1088,1088,1088,1088,1088,1088,1088,
+1088,1088,1088,1088,1088,1088,1088,1088,1088,1088,1088,1088,1088,1088,1088,262,
/* block 170 */
-1090,1090,1090,1090,1090,1090,1090,1090,1090,1090,1090,1090,1090,1090,1090,1090,
-1090,1090,1090,1090,1090,1090,1090,1090,1090,1090,1090,1090,1090,1090,1090,1090,
-1090,1090,1090,1090,1090,1090,1090,1090,1090,1090,262,1091,1091,1092,262,262,
-1090,1090,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
-262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
-262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
-262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
-262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
+1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,
+1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,
+1089,1089,1089,1089,1089,1089,1089,1089,1089,1089,262,1090,1090,1091,262,262,
+1089,1089,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
+302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,
+302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,
+302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,
+302,302,302,302,302,302,302,302,302,302,302,302,302,291,291,291,
/* block 171 */
-1093,1093,1093,1093,1093,1093,1093,1093,1093,1093,1093,1093,1093,1093,1093,1093,
-1093,1093,1093,1093,1093,1093,1093,1093,1093,1093,1093,1093,1093,1094,1094,1094,
-1094,1094,1094,1094,1094,1094,1094,1093,262,262,262,262,262,262,262,262,
-1095,1095,1095,1095,1095,1095,1095,1095,1095,1095,1095,1095,1095,1095,1095,1095,
-1095,1095,1095,1095,1095,1095,1096,1096,1096,1096,1096,1096,1096,1096,1096,1096,
-1096,1097,1097,1097,1097,1098,1098,1098,1098,1098,302,302,302,302,302,302,
+1092,1092,1092,1092,1092,1092,1092,1092,1092,1092,1092,1092,1092,1092,1092,1092,
+1092,1092,1092,1092,1092,1092,1092,1092,1092,1092,1092,1092,1092,1093,1093,1093,
+1093,1093,1093,1093,1093,1093,1093,1092,262,262,262,262,262,262,262,262,
+1094,1094,1094,1094,1094,1094,1094,1094,1094,1094,1094,1094,1094,1094,1094,1094,
+1094,1094,1094,1094,1094,1094,1095,1095,1095,1095,1095,1095,1095,1095,1095,1095,
+1095,1096,1096,1096,1096,1097,1097,1097,1097,1097,302,302,302,302,302,302,
302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,
-1099,1099,1099,1099,1099,1099,1099,1099,1099,1099,1099,1099,1099,1099,1099,1099,
+1098,1098,1098,1098,1098,1098,1098,1098,1098,1098,1098,1098,1098,1098,1098,1098,
/* block 172 */
-1099,1099,1100,1100,1100,1100,1101,1101,1101,1101,262,262,262,262,262,262,
+1098,1098,1099,1099,1099,1099,1100,1100,1100,1100,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
-1102,1102,1102,1102,1102,1102,1102,1102,1102,1102,1102,1102,1102,1102,1102,1102,
-1102,1102,1102,1102,1102,1103,1103,1103,1103,1103,1103,1103,262,262,262,262,
+1101,1101,1101,1101,1101,1101,1101,1101,1101,1101,1101,1101,1101,1101,1101,1101,
+1101,1101,1101,1101,1101,1102,1102,1102,1102,1102,1102,1102,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
-1104,1104,1104,1104,1104,1104,1104,1104,1104,1104,1104,1104,1104,1104,1104,1104,
-1104,1104,1104,1104,1104,1104,1104,262,262,262,262,262,262,262,262,262,
+1103,1103,1103,1103,1103,1103,1103,1103,1103,1103,1103,1103,1103,1103,1103,1103,
+1103,1103,1103,1103,1103,1103,1103,262,262,262,262,262,262,262,262,262,
/* block 173 */
-1105,1106,1105,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,
-1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,
-1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,1107,
-1107,1107,1107,1107,1107,1107,1107,1107,1106,1106,1106,1106,1106,1106,1106,1106,
-1106,1106,1106,1106,1106,1106,1108,1109,1109,1110,1110,1110,1110,1110,163,163,
-163,163,1111,1111,1111,1111,1111,1111,1111,1111,1111,1111,1111,1111,1111,1111,
-1111,1111,1111,1111,1111,1111,1112,1112,1112,1112,1112,1112,1112,1112,1112,1112,
-1108,1107,1107,1106,1106,1107,163,163,163,163,163,163,163,163,163,1113,
+1104,1105,1104,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,
+1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,
+1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,1106,
+1106,1106,1106,1106,1106,1106,1106,1106,1105,1105,1105,1105,1105,1105,1105,1105,
+1105,1105,1105,1105,1105,1105,1107,1108,1108,1109,1109,1109,1109,1109,163,163,
+163,163,1110,1110,1110,1110,1110,1110,1110,1110,1110,1110,1110,1110,1110,1110,
+1110,1110,1110,1110,1110,1110,1111,1111,1111,1111,1111,1111,1111,1111,1111,1111,
+1107,1106,1106,1105,1105,1106,163,163,163,163,163,163,163,163,163,1112,
/* block 174 */
-1114,1114,1115,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,
-1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,
-1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,1116,
-1115,1115,1115,1117,1117,1117,1117,1115,1115,1118,1119,1120,1120,1121,1122,1122,
-1122,1122,1117,163,163,163,163,163,163,163,163,163,163,1121,163,163,
-1123,1123,1123,1123,1123,1123,1123,1123,1123,1123,1123,1123,1123,1123,1123,1123,
-1123,1123,1123,1123,1123,1123,1123,1123,1123,163,163,163,163,163,163,163,
-1124,1124,1124,1124,1124,1124,1124,1124,1124,1124,163,163,163,163,163,163,
+1113,1113,1114,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,
+1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,
+1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,1115,
+1114,1114,1114,1113,1113,1113,1113,1114,1114,1116,1117,1118,1118,1119,1120,1120,
+1120,1120,1113,163,163,163,163,163,163,163,163,163,163,1119,163,163,
+1121,1121,1121,1121,1121,1121,1121,1121,1121,1121,1121,1121,1121,1121,1121,1121,
+1121,1121,1121,1121,1121,1121,1121,1121,1121,163,163,163,163,163,163,163,
+1122,1122,1122,1122,1122,1122,1122,1122,1122,1122,163,163,163,163,163,163,
/* block 175 */
-1125,1125,1125,1126,1126,1126,1126,1126,1126,1126,1126,1126,1126,1126,1126,1126,
-1126,1126,1126,1126,1126,1126,1126,1126,1126,1126,1126,1126,1126,1126,1126,1126,
-1126,1126,1126,1126,1126,1126,1126,1125,1125,1125,1125,1125,1127,1125,1125,1125,
-1125,1125,1125,1128,1128,163,1129,1129,1129,1129,1129,1129,1129,1129,1129,1129,
-1130,1131,1131,1131,1126,1127,1127,1126,163,163,163,163,163,163,163,163,
-1132,1132,1132,1132,1132,1132,1132,1132,1132,1132,1132,1132,1132,1132,1132,1132,
-1132,1132,1132,1132,1132,1132,1132,1132,1132,1132,1132,1132,1132,1132,1132,1132,
-1132,1132,1132,1133,1134,1134,1132,163,163,163,163,163,163,163,163,163,
+1123,1123,1123,1124,1124,1124,1124,1124,1124,1124,1124,1124,1124,1124,1124,1124,
+1124,1124,1124,1124,1124,1124,1124,1124,1124,1124,1124,1124,1124,1124,1124,1124,
+1124,1124,1124,1124,1124,1124,1124,1123,1123,1123,1123,1123,1125,1123,1123,1123,
+1123,1123,1123,1126,1126,163,1127,1127,1127,1127,1127,1127,1127,1127,1127,1127,
+1128,1129,1129,1129,1124,1125,1125,1124,163,163,163,163,163,163,163,163,
+1130,1130,1130,1130,1130,1130,1130,1130,1130,1130,1130,1130,1130,1130,1130,1130,
+1130,1130,1130,1130,1130,1130,1130,1130,1130,1130,1130,1130,1130,1130,1130,1130,
+1130,1130,1130,1131,1132,1132,1130,163,163,163,163,163,163,163,163,163,
/* block 176 */
-1135,1135,1136,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,
-1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,
-1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,1137,
-1137,1137,1137,1136,1136,1136,1135,1135,1135,1135,1135,1135,1135,1135,1135,1136,
-1138,1137,1139,1139,1137,1140,1140,1141,1141,1142,1143,1143,1143,1140,1136,1135,
-1144,1144,1144,1144,1144,1144,1144,1144,1144,1144,1137,1141,1137,1141,1140,1140,
-163,1145,1145,1145,1145,1145,1145,1145,1145,1145,1145,1145,1145,1145,1145,1145,
-1145,1145,1145,1145,1145,163,163,163,163,163,163,163,163,163,163,163,
+1133,1133,1134,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,
+1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,
+1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,1135,
+1135,1135,1135,1134,1134,1134,1133,1133,1133,1133,1133,1133,1133,1133,1133,1134,
+1136,1135,1137,1137,1135,1138,1138,1139,1139,1140,1141,1141,1141,1138,1134,1133,
+1142,1142,1142,1142,1142,1142,1142,1142,1142,1142,1135,1139,1135,1139,1138,1138,
+163,1143,1143,1143,1143,1143,1143,1143,1143,1143,1143,1143,1143,1143,1143,1143,
+1143,1143,1143,1143,1143,163,163,163,163,163,163,163,163,163,163,163,
/* block 177 */
-1146,1146,1146,1146,1146,1146,1146,1146,1146,1146,1146,1146,1146,1146,1146,1146,
-1146,1146,163,1146,1146,1146,1146,1146,1146,1146,1146,1146,1146,1146,1146,1146,
-1146,1146,1146,1146,1146,1146,1146,1146,1146,1146,1146,1146,1147,1147,1147,1148,
-1148,1148,1147,1147,1148,1149,1150,1148,1151,1151,1152,1151,1151,1153,1148,163,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+1144,1144,1144,1144,1144,1144,1144,1144,1144,1144,1144,1144,1144,1144,1144,1144,
+1144,1144,163,1144,1144,1144,1144,1144,1144,1144,1144,1144,1144,1144,1144,1144,
+1144,1144,1144,1144,1144,1144,1144,1144,1144,1144,1144,1144,1145,1145,1145,1146,
+1146,1146,1145,1145,1146,1147,1148,1146,1149,1149,1150,1149,1149,1151,1146,1144,
+1144,1146,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 178 */
-1154,1154,1154,1154,1154,1154,1154,163,1154,163,1154,1154,1154,1154,163,1154,
-1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,163,1154,
-1154,1154,1154,1154,1154,1154,1154,1154,1154,1155,163,163,163,163,163,163,
-1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,
-1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,
-1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1156,1157,
-1158,1158,1158,1157,1157,1157,1157,1157,1157,1159,1160,163,163,163,163,163,
-1161,1161,1161,1161,1161,1161,1161,1161,1161,1161,163,163,163,163,163,163,
+1152,1152,1152,1152,1152,1152,1152,163,1152,163,1152,1152,1152,1152,163,1152,
+1152,1152,1152,1152,1152,1152,1152,1152,1152,1152,1152,1152,1152,1152,163,1152,
+1152,1152,1152,1152,1152,1152,1152,1152,1152,1153,163,163,163,163,163,163,
+1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,
+1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,
+1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1154,1155,
+1156,1156,1156,1155,1155,1155,1155,1155,1155,1157,1158,163,163,163,163,163,
+1159,1159,1159,1159,1159,1159,1159,1159,1159,1159,163,163,163,163,163,163,
/* block 179 */
-1162,1163,1164,1165,163,1166,1166,1166,1166,1166,1166,1166,1166,163,163,1166,
-1166,163,163,1166,1166,1166,1166,1166,1166,1166,1166,1166,1166,1166,1166,1166,
-1166,1166,1166,1166,1166,1166,1166,1166,1166,163,1166,1166,1166,1166,1166,1166,
-1166,163,1166,1166,163,1166,1166,1166,1166,1166,163,1167,1168,1166,1169,1164,
-1162,1164,1164,1164,1164,163,163,1164,1164,163,163,1164,1164,1170,163,163,
-1166,163,163,163,163,163,163,1169,163,163,163,163,163,1171,1166,1166,
-1166,1166,1164,1164,163,163,1172,1172,1172,1172,1172,1172,1172,163,163,163,
-1172,1172,1172,1172,1172,163,163,163,163,163,163,163,163,163,163,163,
+1160,1161,1162,1163,163,1164,1164,1164,1164,1164,1164,1164,1164,163,163,1164,
+1164,163,163,1164,1164,1164,1164,1164,1164,1164,1164,1164,1164,1164,1164,1164,
+1164,1164,1164,1164,1164,1164,1164,1164,1164,163,1164,1164,1164,1164,1164,1164,
+1164,163,1164,1164,163,1164,1164,1164,1164,1164,163,1165,1166,1164,1167,1162,
+1160,1162,1162,1162,1162,163,163,1162,1162,163,163,1162,1162,1168,163,163,
+1164,163,163,163,163,163,163,1167,163,163,163,163,163,1169,1164,1164,
+1164,1164,1162,1162,163,163,1170,1170,1170,1170,1170,1170,1170,163,163,163,
+1170,1170,1170,1170,1170,163,163,163,163,163,163,163,163,163,163,163,
/* block 180 */
-1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,
-1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,
-1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,1173,
-1173,1173,1173,1173,1173,1174,1174,1174,1175,1175,1175,1175,1175,1175,1175,1175,
-1174,1174,1176,1175,1175,1174,1177,1173,1173,1173,1173,1178,1178,1179,1180,1180,
-1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1179,1179,163,1180,1182,1173,
-1173,1173,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,
+1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,
+1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,1171,
+1171,1171,1171,1171,1171,1172,1172,1172,1173,1173,1173,1173,1173,1173,1173,1173,
+1172,1172,1174,1173,1173,1172,1175,1171,1171,1171,1171,1176,1176,1177,1178,1178,
+1179,1179,1179,1179,1179,1179,1179,1179,1179,1179,1177,1177,163,1178,1180,1171,
+1171,1171,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 181 */
-1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,
-1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,
-1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,1183,
-1184,1185,1185,1186,1186,1186,1186,1186,1186,1185,1186,1185,1185,1184,1185,1186,
-1186,1185,1187,1188,1183,1183,1189,1183,163,163,163,163,163,163,163,163,
-1190,1190,1190,1190,1190,1190,1190,1190,1190,1190,163,163,163,163,163,163,
+1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,
+1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,
+1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,1181,
+1182,1183,1183,1184,1184,1184,1184,1184,1184,1183,1184,1183,1183,1182,1183,1184,
+1184,1183,1185,1186,1181,1181,1187,1181,163,163,163,163,163,163,163,163,
+1188,1188,1188,1188,1188,1188,1188,1188,1188,1188,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 182 */
-1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,
-1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,
-1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1191,1192,
-1193,1193,1194,1194,1194,1194,163,163,1193,1193,1193,1193,1194,1194,1193,1195,
-1196,1197,1198,1198,1199,1199,1200,1200,1200,1198,1198,1198,1198,1198,1198,1198,
-1198,1198,1198,1198,1198,1198,1198,1198,1191,1191,1191,1191,1194,1194,163,163,
+1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,
+1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,
+1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1189,1190,
+1191,1191,1192,1192,1192,1192,163,163,1191,1191,1191,1191,1192,1192,1191,1193,
+1194,1195,1196,1196,1197,1197,1198,1198,1198,1196,1196,1196,1196,1196,1196,1196,
+1196,1196,1196,1196,1196,1196,1196,1196,1189,1189,1189,1189,1192,1192,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 183 */
-1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,
-1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,
-1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,1201,
-1202,1202,1202,1203,1203,1203,1203,1203,1203,1203,1203,1202,1202,1203,1202,1204,
-1203,1205,1205,1206,1201,163,163,163,163,163,163,163,163,163,163,163,
-1207,1207,1207,1207,1207,1207,1207,1207,1207,1207,163,163,163,163,163,163,
-530,530,530,530,530,530,530,530,530,530,530,530,530,163,163,163,
+1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,
+1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,
+1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,1199,
+1200,1200,1200,1201,1201,1201,1201,1201,1201,1201,1201,1200,1200,1201,1200,1202,
+1201,1203,1203,1204,1199,163,163,163,163,163,163,163,163,163,163,163,
+1205,1205,1205,1205,1205,1205,1205,1205,1205,1205,163,163,163,163,163,163,
+531,531,531,531,531,531,531,531,531,531,531,531,531,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 184 */
-1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,
-1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,
-1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,1208,1209,1210,1209,1210,1210,
-1209,1209,1209,1209,1209,1209,1211,1212,1208,1213,163,163,163,163,163,163,
-1214,1214,1214,1214,1214,1214,1214,1214,1214,1214,163,163,163,163,163,163,
+1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,
+1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,
+1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,1206,1207,1208,1207,1208,1208,
+1207,1207,1207,1207,1207,1207,1209,1210,1206,1211,163,163,163,163,163,163,
+1212,1212,1212,1212,1212,1212,1212,1212,1212,1212,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 185 */
-1215,1215,1215,1215,1215,1215,1215,1215,1215,1215,1215,1215,1215,1215,1215,1215,
-1215,1215,1215,1215,1215,1215,1215,1215,1215,1215,1215,163,163,1216,1216,1216,
-1217,1217,1216,1216,1216,1216,1218,1216,1216,1216,1216,1219,163,163,163,163,
-1220,1220,1220,1220,1220,1220,1220,1220,1220,1220,1221,1221,1222,1222,1222,1223,
-1215,1215,1215,1215,1215,1215,1215,163,163,163,163,163,163,163,163,163,
+1213,1213,1213,1213,1213,1213,1213,1213,1213,1213,1213,1213,1213,1213,1213,1213,
+1213,1213,1213,1213,1213,1213,1213,1213,1213,1213,1213,163,163,1214,1214,1214,
+1215,1215,1214,1214,1214,1214,1216,1214,1214,1214,1214,1217,163,163,163,163,
+1218,1218,1218,1218,1218,1218,1218,1218,1218,1218,1219,1219,1220,1220,1220,1221,
+1213,1213,1213,1213,1213,1213,1213,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 186 */
-1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,
-1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,
-1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,1224,1225,1225,1225,1226,
-1226,1226,1226,1226,1226,1226,1226,1226,1225,1227,1228,1229,163,163,163,163,
+1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,
+1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,
+1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,1222,1223,1223,1223,1224,
+1224,1224,1224,1224,1224,1224,1224,1224,1223,1225,1226,1227,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
@@ -4258,245 +4272,265 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
/* block 187 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-1230,1230,1230,1230,1230,1230,1230,1230,1230,1230,1230,1230,1230,1230,1230,1230,
-1230,1230,1230,1230,1230,1230,1230,1230,1230,1230,1230,1230,1230,1230,1230,1230,
-1231,1231,1231,1231,1231,1231,1231,1231,1231,1231,1231,1231,1231,1231,1231,1231,
-1231,1231,1231,1231,1231,1231,1231,1231,1231,1231,1231,1231,1231,1231,1231,1231,
-1232,1232,1232,1232,1232,1232,1232,1232,1232,1232,1233,1233,1233,1233,1233,1233,
-1233,1233,1233,163,163,163,163,163,163,163,163,163,163,163,163,1234,
+1228,1228,1228,1228,1228,1228,1228,1228,1228,1228,1228,1228,1228,1228,1228,1228,
+1228,1228,1228,1228,1228,1228,1228,1228,1228,1228,1228,1228,1228,1228,1228,1228,
+1229,1229,1229,1229,1229,1229,1229,1229,1229,1229,1229,1229,1229,1229,1229,1229,
+1229,1229,1229,1229,1229,1229,1229,1229,1229,1229,1229,1229,1229,1229,1229,1229,
+1230,1230,1230,1230,1230,1230,1230,1230,1230,1230,1231,1231,1231,1231,1231,1231,
+1231,1231,1231,163,163,163,163,163,163,163,163,163,163,163,163,1232,
/* block 188 */
-1235,1235,1235,1235,1235,1235,1235,163,163,1235,163,163,1235,1235,1235,1235,
-1235,1235,1235,1235,163,1235,1235,163,1235,1235,1235,1235,1235,1235,1235,1235,
-1235,1235,1235,1235,1235,1235,1235,1235,1235,1235,1235,1235,1235,1235,1235,1235,
-1236,1237,1237,1237,1237,1237,163,1237,1237,163,163,1238,1238,1239,1240,1241,
-1237,1241,1237,1242,1243,1244,1243,163,163,163,163,163,163,163,163,163,
-1245,1245,1245,1245,1245,1245,1245,1245,1245,1245,163,163,163,163,163,163,
+1233,1233,1233,1233,1233,1233,1233,163,163,1233,163,163,1233,1233,1233,1233,
+1233,1233,1233,1233,163,1233,1233,163,1233,1233,1233,1233,1233,1233,1233,1233,
+1233,1233,1233,1233,1233,1233,1233,1233,1233,1233,1233,1233,1233,1233,1233,1233,
+1234,1235,1235,1235,1235,1235,163,1235,1235,163,163,1236,1236,1237,1238,1239,
+1235,1239,1235,1240,1241,1242,1241,163,163,163,163,163,163,163,163,163,
+1243,1243,1243,1243,1243,1243,1243,1243,1243,1243,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 189 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-1246,1246,1246,1246,1246,1246,1246,1246,163,163,1246,1246,1246,1246,1246,1246,
-1246,1246,1246,1246,1246,1246,1246,1246,1246,1246,1246,1246,1246,1246,1246,1246,
-1246,1246,1246,1246,1246,1246,1246,1246,1246,1246,1246,1246,1246,1246,1246,1246,
-1246,1247,1247,1247,1248,1248,1248,1248,163,163,1248,1248,1247,1247,1247,1247,
-1249,1246,1250,1246,1247,163,163,163,163,163,163,163,163,163,163,163,
+1244,1244,1244,1244,1244,1244,1244,1244,163,163,1244,1244,1244,1244,1244,1244,
+1244,1244,1244,1244,1244,1244,1244,1244,1244,1244,1244,1244,1244,1244,1244,1244,
+1244,1244,1244,1244,1244,1244,1244,1244,1244,1244,1244,1244,1244,1244,1244,1244,
+1244,1245,1245,1245,1246,1246,1246,1246,163,163,1246,1246,1245,1245,1245,1245,
+1247,1244,1248,1244,1245,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 190 */
-1251,1252,1252,1252,1252,1252,1252,1253,1253,1252,1252,1251,1251,1251,1251,1251,
-1251,1251,1251,1251,1251,1251,1251,1251,1251,1251,1251,1251,1251,1251,1251,1251,
-1251,1251,1251,1251,1251,1251,1251,1251,1251,1251,1251,1251,1251,1251,1251,1251,
-1251,1251,1251,1254,1255,1252,1252,1252,1252,1256,1257,1252,1252,1252,1252,1258,
-1258,1258,1259,1259,1258,1258,1258,1255,163,163,163,163,163,163,163,163,
-1260,1261,1261,1261,1261,1261,1261,1262,1262,1261,1261,1261,1260,1260,1260,1260,
-1260,1260,1260,1260,1260,1260,1260,1260,1260,1260,1260,1260,1260,1260,1260,1260,
-1260,1260,1260,1260,1260,1260,1260,1260,1260,1260,1260,1260,1260,1260,1260,1260,
+1249,1250,1250,1250,1250,1250,1250,1251,1251,1250,1250,1249,1249,1249,1249,1249,
+1249,1249,1249,1249,1249,1249,1249,1249,1249,1249,1249,1249,1249,1249,1249,1249,
+1249,1249,1249,1249,1249,1249,1249,1249,1249,1249,1249,1249,1249,1249,1249,1249,
+1249,1249,1249,1252,1253,1250,1250,1250,1250,1254,1255,1250,1250,1250,1250,1256,
+1256,1256,1257,1257,1256,1256,1256,1253,163,163,163,163,163,163,163,163,
+1258,1259,1259,1259,1259,1259,1259,1260,1260,1259,1259,1259,1258,1258,1258,1258,
+1258,1258,1258,1258,1258,1258,1258,1258,1258,1258,1258,1258,1258,1258,1258,1258,
+1258,1258,1258,1258,1258,1258,1258,1258,1258,1258,1258,1258,1258,1258,1258,1258,
/* block 191 */
-1260,1260,1260,1260,1263,1263,1263,1263,1263,1263,1261,1261,1261,1261,1261,1261,
-1261,1261,1261,1261,1261,1261,1261,1262,1264,1265,1266,1267,1267,1260,1266,1266,
-1266,1268,1268,163,163,163,163,163,163,163,163,163,163,163,163,163,
-495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,495,
-1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,
-1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,
-1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,1269,
-1269,1269,1269,1269,1269,1269,1269,1269,1269,163,163,163,163,163,163,163,
+1258,1258,1258,1258,1261,1261,1261,1261,1261,1261,1259,1259,1259,1259,1259,1259,
+1259,1259,1259,1259,1259,1259,1259,1260,1262,1263,1264,1265,1265,1258,1264,1264,
+1264,1266,1266,163,163,163,163,163,163,163,163,163,163,163,163,163,
+496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,496,
+1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,
+1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,
+1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,1267,
+1267,1267,1267,1267,1267,1267,1267,1267,1267,163,163,163,163,163,163,163,
/* block 192 */
-1270,1270,1270,1270,1270,1270,1270,1270,1270,163,1270,1270,1270,1270,1270,1270,
-1270,1270,1270,1270,1270,1270,1270,1270,1270,1270,1270,1270,1270,1270,1270,1270,
-1270,1270,1270,1270,1270,1270,1270,1270,1270,1270,1270,1270,1270,1270,1270,1271,
-1272,1272,1272,1272,1272,1272,1272,163,1272,1272,1272,1272,1272,1272,1271,1273,
-1270,1274,1274,1275,1276,1276,163,163,163,163,163,163,163,163,163,163,
-1277,1277,1277,1277,1277,1277,1277,1277,1277,1277,1278,1278,1278,1278,1278,1278,
-1278,1278,1278,1278,1278,1278,1278,1278,1278,1278,1278,1278,1278,163,163,163,
-1279,1280,1281,1281,1281,1281,1281,1281,1281,1281,1281,1281,1281,1281,1281,1281,
-
-/* block 193 */
-1281,1281,1281,1281,1281,1281,1281,1281,1281,1281,1281,1281,1281,1281,1281,1281,
-163,163,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,
-1282,1282,1282,1282,1282,1282,1282,1282,163,1283,1282,1282,1282,1282,1282,1282,
-1282,1283,1282,1282,1283,1282,1282,163,163,163,163,163,163,163,163,163,
+343,343,343,343,343,343,343,343,343,343,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-/* block 194 */
-1284,1284,1284,1284,1284,1284,1284,163,1284,1284,163,1284,1284,1284,1284,1284,
-1284,1284,1284,1284,1284,1284,1284,1284,1284,1284,1284,1284,1284,1284,1284,1284,
-1284,1284,1284,1284,1284,1284,1284,1284,1284,1284,1284,1284,1284,1284,1284,1284,
-1284,1285,1285,1285,1285,1285,1285,163,163,163,1285,163,1285,1285,163,1285,
-1285,1285,1286,1285,1287,1287,1288,1285,163,163,163,163,163,163,163,163,
-1289,1289,1289,1289,1289,1289,1289,1289,1289,1289,163,163,163,163,163,163,
-1290,1290,1290,1290,1290,1290,163,1290,1290,163,1290,1290,1290,1290,1290,1290,
-1290,1290,1290,1290,1290,1290,1290,1290,1290,1290,1290,1290,1290,1290,1290,1290,
+/* block 193 */
+1268,1268,1268,1268,1268,1268,1268,1268,1268,163,1268,1268,1268,1268,1268,1268,
+1268,1268,1268,1268,1268,1268,1268,1268,1268,1268,1268,1268,1268,1268,1268,1268,
+1268,1268,1268,1268,1268,1268,1268,1268,1268,1268,1268,1268,1268,1268,1268,1269,
+1270,1270,1270,1270,1270,1270,1270,163,1270,1270,1270,1270,1270,1270,1269,1271,
+1268,1272,1272,1273,1274,1274,163,163,163,163,163,163,163,163,163,163,
+1275,1275,1275,1275,1275,1275,1275,1275,1275,1275,1276,1276,1276,1276,1276,1276,
+1276,1276,1276,1276,1276,1276,1276,1276,1276,1276,1276,1276,1276,163,163,163,
+1277,1278,1279,1279,1279,1279,1279,1279,1279,1279,1279,1279,1279,1279,1279,1279,
-/* block 195 */
-1290,1290,1290,1290,1290,1290,1290,1290,1290,1290,1291,1291,1291,1291,1291,163,
-1292,1292,163,1291,1291,1292,1291,1293,1290,163,163,163,163,163,163,163,
-1294,1294,1294,1294,1294,1294,1294,1294,1294,1294,163,163,163,163,163,163,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+/* block 194 */
+1279,1279,1279,1279,1279,1279,1279,1279,1279,1279,1279,1279,1279,1279,1279,1279,
+163,163,1280,1280,1280,1280,1280,1280,1280,1280,1280,1280,1280,1280,1280,1280,
+1280,1280,1280,1280,1280,1280,1280,1280,163,1281,1280,1280,1280,1280,1280,1280,
+1280,1281,1280,1280,1281,1280,1280,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+/* block 195 */
+1282,1282,1282,1282,1282,1282,1282,163,1282,1282,163,1282,1282,1282,1282,1282,
+1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,
+1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,1282,
+1282,1283,1283,1283,1283,1283,1283,163,163,163,1283,163,1283,1283,163,1283,
+1283,1283,1284,1283,1285,1285,1286,1283,163,163,163,163,163,163,163,163,
+1287,1287,1287,1287,1287,1287,1287,1287,1287,1287,163,163,163,163,163,163,
+1288,1288,1288,1288,1288,1288,163,1288,1288,163,1288,1288,1288,1288,1288,1288,
+1288,1288,1288,1288,1288,1288,1288,1288,1288,1288,1288,1288,1288,1288,1288,1288,
+
/* block 196 */
+1288,1288,1288,1288,1288,1288,1288,1288,1288,1288,1289,1289,1289,1289,1289,163,
+1290,1290,163,1289,1289,1290,1289,1291,1288,163,163,163,163,163,163,163,
+1292,1292,1292,1292,1292,1292,1292,1292,1292,1292,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-1295,1295,1295,1295,1295,1295,1295,1295,1295,1295,1295,1295,1295,1295,1295,1295,
-1295,1295,1295,1296,1296,1297,1297,1298,1298,163,163,163,163,163,163,163,
/* block 197 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-842,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-1299,1299,1299,1299,1299,1299,1299,1299,1299,1299,1299,1299,1299,1299,1299,1299,
-388,388,1299,388,1299,390,390,390,390,390,390,390,390,391,391,391,
-391,390,390,390,390,390,390,390,390,390,390,390,390,390,390,390,
-390,390,163,163,163,163,163,163,163,163,163,163,163,163,163,1300,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+1293,1293,1293,1293,1293,1293,1293,1293,1293,1293,1293,1293,1293,1293,1293,1293,
+1293,1293,1293,1294,1294,1295,1295,1296,1296,163,163,163,163,163,163,163,
/* block 198 */
-1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,
-1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,
-1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,
-1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,
-1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,
-1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,
-1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,
-1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,
-
-/* block 199 */
-1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,
-1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,163,163,163,163,163,163,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+1297,1297,1298,1299,1300,1300,1300,1300,1300,1300,1300,1300,1300,1300,1300,1300,
+1300,163,1300,1300,1300,1300,1300,1300,1300,1300,1300,1300,1300,1300,1300,1300,
+1300,1300,1300,1300,1300,1300,1300,1300,1300,1300,1300,1300,1300,1300,1300,1300,
+1300,1300,1300,1300,1299,1299,1297,1297,1297,1297,1297,163,163,163,1299,1299,
+1297,1301,1302,1303,1303,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,
+1305,1305,1305,1305,1305,1305,1305,1305,1305,1305,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+
+/* block 199 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+843,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,
+388,388,1306,388,1306,390,390,390,390,390,390,390,390,391,391,391,
+391,390,390,390,390,390,390,390,390,390,390,390,390,390,390,390,
+390,390,163,163,163,163,163,163,163,163,163,163,163,163,163,1307,
/* block 200 */
-1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,
-1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,
-1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,
-1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,
-1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,
-1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,
-1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,1302,163,
-1303,1303,1303,1303,1303,163,163,163,163,163,163,163,163,163,163,163,
+1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
+1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
+1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
+1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
+1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
+1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
+1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
+1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
/* block 201 */
-1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,
-1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,
-1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,
-1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,1301,
-1301,1301,1301,1301,163,163,163,163,163,163,163,163,163,163,163,163,
+1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
+1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 202 */
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,
-1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,
-1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,
-1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,
-1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,
-1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,1304,
-1304,1305,1305,163,163,163,163,163,163,163,163,163,163,163,163,163,
+1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,
+1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,
+1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,
+1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,
+1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,
+1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,
+1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,163,
+1310,1310,1310,1310,1310,163,163,163,163,163,163,163,163,163,163,163,
/* block 203 */
-1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,
-1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,
-1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,
-1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,
-1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,
-1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,
-1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,
-1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,
-
-/* block 204 */
-1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,
-1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,
-1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,1306,163,
-1307,1307,1307,1307,1307,1307,1307,1307,1307,163,163,163,163,163,163,163,
+1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
+1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
+1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
+1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
+1308,1308,1308,1308,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+
+/* block 204 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,
+1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,
+1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,
+1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,
+1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,
+1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,1311,
+1311,1312,1312,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 205 */
-1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
-1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
-1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
-1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
-1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
-1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
-1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
-1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
+1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,
+1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,
+1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,
+1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,
+1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,
+1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,
+1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,
+1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,
/* block 206 */
-1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
-1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
-1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
-1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,1308,
-1308,1308,1308,1308,1308,1308,1308,163,163,163,163,163,163,163,163,163,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,
+1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,
+1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,
+1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,
+1315,1313,1313,1313,1313,1313,1313,1316,1316,1316,1316,1316,1316,1316,1316,1316,
+1316,1316,1316,1316,1316,1316,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 207 */
-858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
-858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
-858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
-858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
-858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
-858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
-858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
-858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
+1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
+1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
+1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
+1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
+1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
+1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
+1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
+1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
/* block 208 */
-858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
-858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
-858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
-858,858,858,858,858,858,858,858,858,163,163,163,163,163,163,163,
-1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,
-1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,1309,163,
-1310,1310,1310,1310,1310,1310,1310,1310,1310,1310,163,163,163,163,1311,1311,
-1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,
+1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
+1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
+1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
+1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
+1317,1317,1317,1317,1317,1317,1317,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 209 */
-1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,
-1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,
-1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,
-1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,1312,163,
-1313,1313,1313,1313,1313,1313,1313,1313,1313,1313,163,163,163,163,163,163,
-1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,
-1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,1314,163,163,
-1315,1315,1315,1315,1315,1316,163,163,163,163,163,163,163,163,163,163,
+859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,
+859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,
+859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,
+859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,
+859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,
+859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,
+859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,
+859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,
/* block 210 */
-1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
-1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
-1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
-1318,1318,1318,1318,1318,1318,1318,1319,1319,1320,1321,1321,1322,1322,1322,1322,
-1323,1323,1324,1324,1319,1322,163,163,163,163,163,163,163,163,163,163,
-1325,1325,1325,1325,1325,1325,1325,1325,1325,1325,163,1326,1326,1326,1326,1326,
-1326,1326,163,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
-1317,1317,1317,1317,1317,1317,1317,1317,163,163,163,163,163,1317,1317,1317,
+859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,
+859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,
+859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,859,
+859,859,859,859,859,859,859,859,859,163,163,163,163,163,163,163,
+1318,1318,1318,1318,1318,1318,1318,1318,1318,1318,1318,1318,1318,1318,1318,1318,
+1318,1318,1318,1318,1318,1318,1318,1318,1318,1318,1318,1318,1318,1318,1318,163,
+1319,1319,1319,1319,1319,1319,1319,1319,1319,1319,163,163,163,163,1320,1320,
+1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,
/* block 211 */
-1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,1317,
+1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,
+1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,
+1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,
+1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,1321,163,
+1322,1322,1322,1322,1322,1322,1322,1322,1322,1322,163,163,163,163,163,163,
+1323,1323,1323,1323,1323,1323,1323,1323,1323,1323,1323,1323,1323,1323,1323,1323,
+1323,1323,1323,1323,1323,1323,1323,1323,1323,1323,1323,1323,1323,1323,163,163,
+1324,1324,1324,1324,1324,1325,163,163,163,163,163,163,163,163,163,163,
+
+/* block 212 */
+1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,
+1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,
+1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,
+1327,1327,1327,1327,1327,1327,1327,1328,1328,1329,1330,1330,1331,1331,1331,1331,
+1332,1332,1333,1333,1328,1331,163,163,163,163,163,163,163,163,163,163,
+1334,1334,1334,1334,1334,1334,1334,1334,1334,1334,163,1335,1335,1335,1335,1335,
+1335,1335,163,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,
+1326,1326,1326,1326,1326,1326,1326,1326,163,163,163,163,163,1326,1326,1326,
+
+/* block 213 */
+1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,1326,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
@@ -4505,19 +4539,19 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-/* block 212 */
+/* block 214 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-1327,1327,1327,1327,1327,1327,1327,1327,1327,1327,1327,1327,1327,1327,1327,1327,
-1327,1327,1327,1327,1327,1327,1327,1327,1327,1327,1327,1327,1327,1327,1327,1327,
-1328,1328,1328,1328,1328,1328,1328,1328,1328,1328,1328,1328,1328,1328,1328,1328,
-1328,1328,1328,1328,1328,1328,1328,1328,1328,1328,1328,1328,1328,1328,1328,1328,
+1336,1336,1336,1336,1336,1336,1336,1336,1336,1336,1336,1336,1336,1336,1336,1336,
+1336,1336,1336,1336,1336,1336,1336,1336,1336,1336,1336,1336,1336,1336,1336,1336,
+1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,
+1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,
-/* block 213 */
-1329,1329,1329,1329,1329,1329,1329,1329,1329,1329,1329,1329,1329,1329,1329,1329,
-1329,1329,1329,1329,1329,1329,1329,1330,1331,1332,1332,163,163,163,163,163,
+/* block 215 */
+1338,1338,1338,1338,1338,1338,1338,1338,1338,1338,1338,1338,1338,1338,1338,1338,
+1338,1338,1338,1338,1338,1338,1338,1339,1340,1341,1341,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
@@ -4525,68 +4559,68 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-/* block 214 */
-1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,
-1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,
-1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,
-1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,
-1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,1333,163,163,163,163,1334,
-1333,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,
-1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,
-1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,1335,
+/* block 216 */
+1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,
+1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,
+1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,
+1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,
+1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,1342,163,163,163,163,1343,
+1342,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
+1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
+1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
-/* block 215 */
-1335,1335,1335,1335,1335,1335,1335,1335,163,163,163,163,163,163,163,1336,
-1336,1336,1336,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,1337,
+/* block 217 */
+1344,1344,1344,1344,1344,1344,1344,1344,163,163,163,163,163,163,163,1345,
+1345,1345,1345,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-1338,1339,1340,799,1341,163,163,163,163,163,163,163,163,163,163,163,
-1342,1342,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-
-/* block 216 */
-1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,
-1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,
-1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,
-1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,
-1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,
-1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,
-1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,
-1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,
-
-/* block 217 */
-1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,
-1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,
-1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,
-1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,
-1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,
-1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,
-1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,1343,
-1343,1343,1343,1343,1343,1343,1343,1343,163,163,163,163,163,163,163,163,
+1347,1348,1349,800,1350,163,163,163,163,163,163,163,163,163,163,163,
+1351,1351,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 218 */
-1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
-1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
-1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
-1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
-1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
-1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
-1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
-1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
+1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,
+1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,
+1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,
+1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,
+1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,
+1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,
+1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,
+1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,
/* block 219 */
-1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
-1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
-1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
-1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
-1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,1344,
-1344,1344,1344,1344,1344,1344,163,163,163,163,163,163,163,163,163,163,
+1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,
+1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,
+1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,
+1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,
+1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,
+1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,
+1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,1352,
+1352,1352,1352,1352,1352,1352,1352,1352,163,163,163,163,163,163,163,163,
+
+/* block 220 */
+1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,
+1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,
+1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,
+1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,
+1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,
+1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,
+1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,
+1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,
+
+/* block 221 */
+1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,
+1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,
+1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,
+1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,
+1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,1353,
+1353,1353,1353,1353,1353,1353,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-/* block 220 */
-1343,1343,1343,1343,1343,1343,1343,1343,1343,163,163,163,163,163,163,163,
+/* block 222 */
+1352,1352,1352,1352,1352,1352,1352,1352,1352,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
@@ -4595,7 +4629,7 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-/* block 221 */
+/* block 223 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
@@ -4603,359 +4637,379 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-1345,1345,1345,1345,163,1345,1345,1345,1345,1345,1345,1345,163,1345,1345,163,
-
-/* block 222 */
-824,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-
-/* block 223 */
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
+1354,1354,1354,1354,163,1354,1354,1354,1354,1354,1354,1354,163,1354,1354,163,
/* block 224 */
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,819,
-824,824,824,163,163,163,163,163,163,163,163,163,163,163,163,163,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-819,819,819,163,163,163,163,163,163,163,163,163,163,163,163,163,
-163,163,163,163,824,824,824,824,163,163,163,163,163,163,163,163,
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
+825,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
/* block 225 */
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
/* block 226 */
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,
-1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,1346,163,163,163,163,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,820,
+825,825,825,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,820,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+820,820,820,163,163,825,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,825,825,825,825,163,163,163,163,163,163,163,163,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
/* block 227 */
-1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,
-1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,
-1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,
-1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,
-1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,
-1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,
-1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,163,163,163,163,163,
-1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,163,163,163,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
/* block 228 */
-1347,1347,1347,1347,1347,1347,1347,1347,1347,163,163,163,163,163,163,163,
-1347,1347,1347,1347,1347,1347,1347,1347,1347,1347,163,163,1348,1349,1350,1351,
-1352,1352,1352,1352,163,163,163,163,163,163,163,163,163,163,163,163,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,
+1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,1355,163,163,163,163,
+
+/* block 229 */
+1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,
+1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,
+1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,
+1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,
+1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,
+1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,
+1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,163,163,163,163,163,
+1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,163,163,163,
+
+/* block 230 */
+1356,1356,1356,1356,1356,1356,1356,1356,1356,163,163,163,163,163,163,163,
+1356,1356,1356,1356,1356,1356,1356,1356,1356,1356,163,163,1357,1358,1359,1360,
+1361,1361,1361,1361,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-/* block 229 */
+/* block 231 */
154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,
154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,
154,154,154,154,154,154,154,154,154,154,154,154,154,154,163,163,
154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,
154,154,154,154,154,154,154,163,163,163,163,163,163,163,163,163,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
-/* block 230 */
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,163,163,163,163,163,163,163,163,163,163,163,163,
+/* block 232 */
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-/* block 231 */
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-
-/* block 232 */
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,163,163,163,163,163,163,163,163,163,163,
-
/* block 233 */
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,163,163,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,1353,1354,154,154,154,460,460,460,1355,1356,1356,
-1356,1356,1356, 51, 51, 51, 51, 51, 51, 51, 51,154,154,154,154,154,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
/* block 234 */
-154,154,154,460,460,154,154,154,154,154,154,154,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,154,154,154,154,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,723,723,163,163,163,163,163,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,163,163,163,163,163,163,163,163,163,163,
/* block 235 */
-1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,
-1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,
-1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,
-1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,1002,
-1002,1002,1357,1357,1357,1002,163,163,163,163,163,163,163,163,163,163,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,163,163,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,1362,1363,154,154,154,461,461,461,1364,1365,1365,
+1365,1365,1365, 51, 51, 51, 51, 51, 51, 51, 51,154,154,154,154,154,
/* block 236 */
+154,154,154,461,461,154,154,154,154,154,154,154,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,154,154,154,154,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,724,724,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+
+/* block 237 */
+1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,
+1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,
+1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,
+1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,1001,
+1001,1001,1366,1366,1366,1001,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+
+/* block 238 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-835,835,835,835,835,835,835,835,835,835,835,835,835,835,835,835,
-835,835,835,835,163,163,163,163,163,163,163,163,163,163,163,163,
-
-/* block 237 */
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,163,163,163,163,163,163,163,163,163,
-832,832,832,832,832,832,832,832,832,832,832,832,832,832,832,832,
-832,832,835,835,835,835,835,835,835,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+836,836,836,836,836,836,836,836,836,836,836,836,836,836,836,836,
+836,836,836,836,163,163,163,163,163,163,163,163,163,163,163,163,
+836,836,836,836,836,836,836,836,836,836,836,836,836,836,836,836,
+836,836,836,836,163,163,163,163,163,163,163,163,163,163,163,163,
-/* block 238 */
+/* block 239 */
724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
-724,724,724,724,724,724,724,724,724,724,725,725,725,725,725,725,
-725,725,736,736,725,725,725,725,725,725,725,725,725,725,725,725,
-725,725,725,725,724,724,724,724,724,724,724,724,724,724,724,724,
-724,724,724,724,724,724,724,724,724,724,724,724,724,724,725,725,
-725,725,725,725,725,163,736,736,725,725,725,725,725,725,725,725,
-725,725,725,725,725,725,725,725,724,724,724,724,724,724,724,724,
724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
-
-/* block 239 */
-724,724,725,725,725,725,725,725,725,725,736,736,725,725,725,725,
-725,725,725,725,725,725,725,725,725,725,725,725,724,163,724,724,
-163,163,724,163,163,724,724,163,163,724,724,724,724,163,724,724,
-724,724,724,724,724,724,725,725,725,725,163,725,163,725,736,736,
-725,725,725,725,163,725,725,725,725,725,725,725,725,725,725,725,
724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
-724,724,724,724,724,724,724,724,724,724,725,725,725,725,725,725,
-725,725,736,736,725,725,725,725,725,725,725,725,725,725,725,725,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,163,163,163,163,163,163,163,163,163,
+833,833,833,833,833,833,833,833,833,833,833,833,833,833,833,833,
+833,833,836,836,836,836,836,836,836,163,163,163,163,163,163,163,
/* block 240 */
-725,725,725,725,724,724,163,724,724,724,724,163,163,724,724,724,
-724,724,724,724,724,163,724,724,724,724,724,724,724,163,725,725,
-725,725,725,725,725,725,736,736,725,725,725,725,725,725,725,725,
-725,725,725,725,725,725,725,725,724,724,163,724,724,724,724,163,
-724,724,724,724,724,163,724,163,163,163,724,724,724,724,724,724,
-724,163,725,725,725,725,725,725,725,725,736,736,725,725,725,725,
-725,725,725,725,725,725,725,725,725,725,725,725,724,724,724,724,
-724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,
+725,725,725,725,725,725,725,725,725,725,726,726,726,726,726,726,
+726,726,737,737,726,726,726,726,726,726,726,726,726,726,726,726,
+726,726,726,726,725,725,725,725,725,725,725,725,725,725,725,725,
+725,725,725,725,725,725,725,725,725,725,725,725,725,725,726,726,
+726,726,726,726,726,163,737,737,726,726,726,726,726,726,726,726,
+726,726,726,726,726,726,726,726,725,725,725,725,725,725,725,725,
+725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,
/* block 241 */
-724,724,724,724,724,724,725,725,725,725,725,725,725,725,736,736,
+725,725,726,726,726,726,726,726,726,726,737,737,726,726,726,726,
+726,726,726,726,726,726,726,726,726,726,726,726,725,163,725,725,
+163,163,725,163,163,725,725,163,163,725,725,725,725,163,725,725,
+725,725,725,725,725,725,726,726,726,726,163,726,163,726,737,737,
+726,726,726,726,163,726,726,726,726,726,726,726,726,726,726,726,
725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,
-724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
-724,724,724,724,724,724,724,724,724,724,725,725,725,725,725,725,
-725,725,736,736,725,725,725,725,725,725,725,725,725,725,725,725,
-725,725,725,725,724,724,724,724,724,724,724,724,724,724,724,724,
-724,724,724,724,724,724,724,724,724,724,724,724,724,724,725,725,
-725,725,725,725,725,725,736,736,725,725,725,725,725,725,725,725,
+725,725,725,725,725,725,725,725,725,725,726,726,726,726,726,726,
+726,726,737,737,726,726,726,726,726,726,726,726,726,726,726,726,
/* block 242 */
-725,725,725,725,725,725,725,725,724,724,724,724,724,724,724,724,
-724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
-724,724,725,725,725,725,725,725,725,725,736,736,725,725,725,725,
-725,725,725,725,725,725,725,725,725,725,725,725,724,724,724,724,
-724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
-724,724,724,724,724,724,725,725,725,725,725,725,725,725,736,736,
+726,726,726,726,725,725,163,725,725,725,725,163,163,725,725,725,
+725,725,725,725,725,163,725,725,725,725,725,725,725,163,726,726,
+726,726,726,726,726,726,737,737,726,726,726,726,726,726,726,726,
+726,726,726,726,726,726,726,726,725,725,163,725,725,725,725,163,
+725,725,725,725,725,163,725,163,163,163,725,725,725,725,725,725,
+725,163,726,726,726,726,726,726,726,726,737,737,726,726,726,726,
+726,726,726,726,726,726,726,726,726,726,726,726,725,725,725,725,
725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,
-724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
/* block 243 */
-724,724,724,724,724,724,724,724,724,724,725,725,725,725,725,725,
-725,725,736,736,725,725,725,725,725,725,725,725,725,725,725,725,
-725,725,725,725,725,725,163,163,724,724,724,724,724,724,724,724,
-724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
-724,1358,725,725,725,725,725,725,725,725,725,725,725,725,725,725,
-725,725,725,725,725,725,725,725,725,725,725,715,725,725,725,725,
-725,725,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
-724,724,724,724,724,724,724,724,724,724,724,1358,725,725,725,725,
+725,725,725,725,725,725,726,726,726,726,726,726,726,726,737,737,
+726,726,726,726,726,726,726,726,726,726,726,726,726,726,726,726,
+725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,
+725,725,725,725,725,725,725,725,725,725,726,726,726,726,726,726,
+726,726,737,737,726,726,726,726,726,726,726,726,726,726,726,726,
+726,726,726,726,725,725,725,725,725,725,725,725,725,725,725,725,
+725,725,725,725,725,725,725,725,725,725,725,725,725,725,726,726,
+726,726,726,726,726,726,737,737,726,726,726,726,726,726,726,726,
/* block 244 */
+726,726,726,726,726,726,726,726,725,725,725,725,725,725,725,725,
725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,
-725,725,725,725,725,715,725,725,725,725,725,725,724,724,724,724,
-724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
-724,724,724,724,724,1358,725,725,725,725,725,725,725,725,725,725,
-725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,715,
-725,725,725,725,725,725,724,724,724,724,724,724,724,724,724,724,
-724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,1358,
+725,725,726,726,726,726,726,726,726,726,737,737,726,726,726,726,
+726,726,726,726,726,726,726,726,726,726,726,726,725,725,725,725,
+725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,
+725,725,725,725,725,725,726,726,726,726,726,726,726,726,737,737,
+726,726,726,726,726,726,726,726,726,726,726,726,726,726,726,726,
725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,
/* block 245 */
-725,725,725,725,725,725,725,725,725,715,725,725,725,725,725,725,
-724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
-724,724,724,724,724,724,724,724,724,1358,725,725,725,725,725,725,
+725,725,725,725,725,725,725,725,725,725,726,726,726,726,726,726,
+726,726,737,737,726,726,726,726,726,726,726,726,726,726,726,726,
+726,726,726,726,726,726,163,163,725,725,725,725,725,725,725,725,
725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,
-725,725,725,715,725,725,725,725,725,725,724,725,163,163,1359,1359,
-1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,
-1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,
-1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,1359,
+725,1367,726,726,726,726,726,726,726,726,726,726,726,726,726,726,
+726,726,726,726,726,726,726,726,726,726,726,716,726,726,726,726,
+726,726,725,725,725,725,725,725,725,725,725,725,725,725,725,725,
+725,725,725,725,725,725,725,725,725,725,725,1367,726,726,726,726,
/* block 246 */
-1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,
-1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,
-1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,
-1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,
-1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,
-1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,
-1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,
-1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,
+726,726,726,726,726,726,726,726,726,726,726,726,726,726,726,726,
+726,726,726,726,726,716,726,726,726,726,726,726,725,725,725,725,
+725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,
+725,725,725,725,725,1367,726,726,726,726,726,726,726,726,726,726,
+726,726,726,726,726,726,726,726,726,726,726,726,726,726,726,716,
+726,726,726,726,726,726,725,725,725,725,725,725,725,725,725,725,
+725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,1367,
+726,726,726,726,726,726,726,726,726,726,726,726,726,726,726,726,
/* block 247 */
-1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,
-1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,
-1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,
-1361,1361,1361,1361,1361,1361,1361,1360,1360,1360,1360,1361,1361,1361,1361,1361,
-1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,
-1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,
-1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1360,1360,1360,
-1360,1360,1360,1360,1360,1361,1360,1360,1360,1360,1360,1360,1360,1360,1360,1360,
+726,726,726,726,726,726,726,726,726,716,726,726,726,726,726,726,
+725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,725,
+725,725,725,725,725,725,725,725,725,1367,726,726,726,726,726,726,
+726,726,726,726,726,726,726,726,726,726,726,726,726,726,726,726,
+726,726,726,716,726,726,726,726,726,726,725,726,163,163,1368,1368,
+1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,
+1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,
+1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,1368,
/* block 248 */
-1360,1360,1360,1360,1361,1360,1360,1362,1363,1362,1362,1364,163,163,163,163,
-163,163,163,163,163,163,163,163,163,163,163,1361,1361,1361,1361,1361,
-163,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,1361,
+1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,
+1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,
+1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,
+1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,
+1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,
+1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,
+1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,
+1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,
+
+/* block 249 */
+1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,
+1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,
+1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,
+1370,1370,1370,1370,1370,1370,1370,1369,1369,1369,1369,1370,1370,1370,1370,1370,
+1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,
+1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,
+1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1369,1369,1369,
+1369,1369,1369,1369,1369,1370,1369,1369,1369,1369,1369,1369,1369,1369,1369,1369,
+
+/* block 250 */
+1369,1369,1369,1369,1370,1369,1369,1371,1372,1371,1371,1373,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,1370,1370,1370,1370,1370,
+163,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-/* block 249 */
+/* block 251 */
70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 92, 70, 70, 70, 70, 70,
- 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,643, 70, 70, 70, 70,163,
+ 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,644, 70, 70, 70, 70,163,
+163,163,163,163,163, 70, 70, 70, 70, 70, 70,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+
+/* block 252 */
+1374,1374,1374,1374,1374,1374,1374,163,1374,1374,1374,1374,1374,1374,1374,1374,
+1374,1374,1374,1374,1374,1374,1374,1374,1374,163,163,1374,1374,1374,1374,1374,
+1374,1374,163,1374,1374,163,1374,1374,1374,1374,1374,163,163,163,163,163,
+858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
+858,858,858,858,858,858,858,858,858,858,858,858,1375,1375,858,858,
+858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,858,
+858,858,858,858,858,858,858,858,1375,858,858,858,858,858,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-/* block 250 */
-1365,1365,1365,1365,1365,1365,1365,163,1365,1365,1365,1365,1365,1365,1365,1365,
-1365,1365,1365,1365,1365,1365,1365,1365,1365,163,163,1365,1365,1365,1365,1365,
-1365,1365,163,1365,1365,163,1365,1365,1365,1365,1365,163,163,163,163,163,
+/* block 253 */
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,788,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-/* block 251 */
-1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,
-1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,
-1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,1366,163,163,163,
-1367,1367,1367,1367,1367,1367,1367,1368,1368,1368,1368,1368,1369,1369,163,163,
-1370,1370,1370,1370,1370,1370,1370,1370,1370,1370,163,163,163,163,1366,1371,
+/* block 254 */
+1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,
+1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,
+1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,163,163,163,
+1377,1377,1377,1377,1377,1377,1377,1378,1378,1378,1378,1378,1379,1379,163,163,
+1380,1380,1380,1380,1380,1380,1380,1380,1380,1380,163,163,163,163,1376,1381,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-/* block 252 */
+/* block 255 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-1372,1372,1372,1372,1372,1372,1372,1372,1372,1372,1372,1372,1372,1372,1372,1372,
-1372,1372,1372,1372,1372,1372,1372,1372,1372,1372,1372,1372,1372,1372,1373,163,
+1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,
+1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1383,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,
-1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,
-1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,1374,1375,1375,1375,1375,
-1376,1376,1376,1376,1376,1376,1376,1376,1376,1376,163,163,163,163,163,1377,
+1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,
+1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,
+1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,1384,1385,1385,1385,1385,
+1386,1386,1386,1386,1386,1386,1386,1386,1386,1386,163,163,163,163,163,1387,
-/* block 253 */
+/* block 256 */
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+1388,1388,1388,1388,1388,1388,1388,1388,1388,1388,1388,1388,1388,1388,1388,1388,
+1388,1388,1388,1388,1388,1388,1388,1388,1388,1388,1388,1389,1390,1390,1390,1390,
+1391,1391,1391,1391,1391,1391,1391,1391,1391,1391,163,163,163,163,163,163,
+
+/* block 257 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-483,483,483,483,483,483,483,163,483,483,483,483,163,483,483,163,
-483,483,483,483,483,483,483,483,483,483,483,483,483,483,483,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+484,484,484,484,484,484,484,163,484,484,484,484,163,484,484,163,
+484,484,484,484,484,484,484,484,484,484,484,484,484,484,484,163,
-/* block 254 */
-1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,
-1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,
-1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,
-1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,
-1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,
-1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,
-1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,
-1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,
+/* block 258 */
+1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,
+1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,
+1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,
+1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,
+1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,
+1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,
+1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,
+1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,
-/* block 255 */
-1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,
-1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,
-1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,
-1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,1378,
-1378,1378,1378,1378,1378,262,262,1379,1379,1379,1379,1379,1379,1379,1379,1379,
-1380,1380,1380,1380,1380,1380,1380,262,262,262,262,262,262,262,262,262,
+/* block 259 */
+1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,
+1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,
+1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,
+1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,
+1392,1392,1392,1392,1392,262,262,1393,1393,1393,1393,1393,1393,1393,1393,1393,
+1394,1394,1394,1394,1394,1394,1394,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
-/* block 256 */
-1381,1381,1381,1381,1381,1381,1381,1381,1381,1381,1381,1381,1381,1381,1381,1381,
-1381,1381,1381,1381,1381,1381,1381,1381,1381,1381,1381,1381,1381,1381,1381,1381,
-1381,1381,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,
-1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,1382,
-1382,1382,1382,1382,1383,1383,1383,1384,1385,1385,1385,1386,262,262,262,262,
-1387,1387,1387,1387,1387,1387,1387,1387,1387,1387,262,262,262,262,1388,1388,
+/* block 260 */
+1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
+1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
+1395,1395,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,
+1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,
+1396,1396,1396,1396,1397,1397,1397,1398,1399,1399,1399,1400,262,262,262,262,
+1401,1401,1401,1401,1401,1401,1401,1401,1401,1401,262,262,262,262,1402,1402,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
-/* block 257 */
+/* block 261 */
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
@@ -4963,351 +5017,351 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
-302,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,
+302,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,
-/* block 258 */
-1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,
-1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,
-1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1390,1389,1389,1389,
-1391,1389,1389,1389,1389,302,302,302,302,302,302,302,302,302,302,302,
+/* block 262 */
+1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,
+1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,
+1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1404,1403,1403,1403,
+1405,1403,1403,1403,1403,302,302,302,302,302,302,302,302,302,302,302,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
-/* block 259 */
-302,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,
-1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,
-1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1390,1389,
-1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,1389,302,302,
+/* block 263 */
+302,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,
+1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,
+1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1404,1403,
+1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,302,302,
302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,262,
-/* block 260 */
-1392,1392,1392,1392,302,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,
-1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,
-302,1392,1392,302,1392,302,302,1392,302,1392,1392,1392,1392,1392,1392,1392,
-1392,1392,1392,302,1392,1392,1392,1392,302,1392,302,1392,302,302,302,302,
-302,302,1392,302,302,302,302,1392,302,1392,302,1392,302,1392,1392,1392,
-302,1392,1392,302,1392,302,302,1392,302,1392,302,1392,302,1392,302,1392,
-302,1392,1392,302,1392,302,302,1392,1392,1392,1392,302,1392,1392,1392,1392,
-1392,1392,1392,302,1392,1392,1392,1392,302,1392,1392,1392,1392,302,1392,302,
+/* block 264 */
+1406,1406,1406,1406,302,1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,
+1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,
+302,1406,1406,302,1406,302,302,1406,302,1406,1406,1406,1406,1406,1406,1406,
+1406,1406,1406,302,1406,1406,1406,1406,302,1406,302,1406,302,302,302,302,
+302,302,1406,302,302,302,302,1406,302,1406,302,1406,302,1406,1406,1406,
+302,1406,1406,302,1406,302,302,1406,302,1406,302,1406,302,1406,302,1406,
+302,1406,1406,302,1406,302,302,1406,1406,1406,1406,302,1406,1406,1406,1406,
+1406,1406,1406,302,1406,1406,1406,1406,302,1406,1406,1406,1406,302,1406,302,
-/* block 261 */
-1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,302,1392,1392,1392,1392,1392,
-1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,302,302,302,302,
-302,1392,1392,1392,302,1392,1392,1392,1392,1392,302,1392,1392,1392,1392,1392,
-1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,1392,302,302,302,302,
+/* block 265 */
+1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,302,1406,1406,1406,1406,1406,
+1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,302,302,302,302,
+302,1406,1406,1406,302,1406,1406,1406,1406,1406,302,1406,1406,1406,1406,1406,
+1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,1406,302,302,302,302,
302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,
302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,
302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,302,
274,274,302,302,302,302,302,302,302,302,302,302,302,302,302,302,
-/* block 262 */
-1393,1393,1393,1393,1394,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1395,1395,1395,1395,
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-
-/* block 263 */
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,1393,1393,1393,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1395,
-1395,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1395,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1394,
-1395,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,1393,1393,1393,1393,1393,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-
-/* block 264 */
- 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 58, 58,1393,1393,1393,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,1393,
-1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,
-1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,460,460,460,460,460,460,
-1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,
-1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,723,723,1393,1393,1393,1393,
-1397,1397,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,1397,1397,
-
-/* block 265 */
-1396,1396,1396,1396,1396,1396,1396,1396,1396,1396,460,460,460,460,1398,460,
-460,1398,1398,1398,1398,1398,1398,1398,1398,1398,1398,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,1393,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1399,1399,1399,1399,1399,1399,1399,1399,1399,1399,
-1399,1399,1399,1399,1399,1399,1399,1399,1399,1399,1399,1399,1399,1399,1399,1399,
-
/* block 266 */
-1400,1398,1401,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-460,460,460,460,460,460,460,460,460,460,1398,460,460,460,460,460,
-460,460,460,460,460,460,460,460,460,460,460,460,460,460,460,1398,
-460,460,1398,1398,1398,1398,1398,1401,1398,1398,1398,460,1395,1395,1395,1395,
-460,460,460,460,460,460,460,460,460,1395,1395,1395,1395,1395,1395,1395,
-1402,1402,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1393,1393,1393,1393,1393,1393,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
+1407,1407,1407,1407,1408,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1409,1409,1409,1409,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
/* block 267 */
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,1407,1407,1407,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1409,
+1409,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1409,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1408,
+1409,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,1407,1407,1407,1407,1407,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
/* block 268 */
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,727,1393,1393,727,727,727,727,727,727,727,727,727,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,727,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,727,1394,1394,
+ 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 58, 58,1407,1407,1407,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,1407,
+1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,
+1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,461,461,461,461,461,461,
+1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,
+1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,724,724,1407,1407,1407,1407,
+1411,1411,1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,1411,1411,
/* block 269 */
-1394,1394,1394,1394,1394,1403,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1393,1393,727,727,1393,727,727,727,1393,1393,727,727,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1403,1403,1403,1394,1394,1403,1394,1394,1403,1404,1404,727,727,1394,
-1394,1394,1394,1394,727,727,727,727,727,727,727,727,727,727,727,727,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1393,1393,727,1394,727,1393,727,1394,1394,1394,1405,1405,1405,1405,1405,
+1410,1410,1410,1410,1410,1410,1410,1410,1410,1410,461,461,461,461,1412,461,
+461,1412,1412,1412,1412,1412,1412,1412,1412,1412,1412,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,1407,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1413,1413,1413,1413,1413,1413,1413,1413,1413,1413,
+1413,1413,1413,1413,1413,1413,1413,1413,1413,1413,1413,1413,1413,1413,1413,1413,
/* block 270 */
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,727,
-1394,727,1403,1403,1394,1394,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,
-1403,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,
-1403,1403,1403,1403,1403,1403,1403,1403,1403,1394,1394,1394,1403,1394,1394,1394,
+1414,1412,1415,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+461,461,461,461,461,461,461,461,461,461,1412,461,461,461,461,461,
+461,461,461,461,461,461,461,461,461,461,461,461,461,461,461,1412,
+461,461,1412,1412,1412,1412,1412,1415,1412,1412,1412,461,1409,1409,1409,1409,
+461,461,461,461,461,461,461,461,461,1409,1409,1409,1409,1409,1409,1409,
+1416,1416,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1407,1407,1407,1407,1407,1407,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
/* block 271 */
-1394,1403,1403,1403,1394,1403,1403,1403,1394,1394,1394,1394,1394,1394,1394,1403,
-1394,1403,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1403,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,727,1393,1394,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
/* block 272 */
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,723,723,
-723,723,723,723,723,723,1393,1393,1393,727,727,1394,1394,1394,1394,1393,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1393,1393,1393,1393,1393,1393,1393,727,
-727,1393,1393,727,1404,1404,727,727,727,727,1403,1393,1393,1393,1393,1393,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,728,1407,1407,728,728,728,728,728,728,728,728,728,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,728,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,728,1408,1408,
/* block 273 */
-1393,1393,1393,1393,1393,1393,1393,727,1393,1393,727,727,727,727,1393,1393,
-1404,1393,1393,1393,1393,1403,1403,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,1393,1393,1393,1394,727,1393,1393,727,1393,1393,1393,1393,1393,1393,1393,
-1393,727,727,1393,1393,1393,1393,1393,1393,1393,1393,1393,727,1393,1393,1393,
-1393,1393,727,727,727,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,727,727,727,1393,1393,1393,1393,1393,1393,1393,1393,727,727,727,1393,
-1393,727,1393,727,1393,1393,1393,1393,727,1393,1393,1393,1393,1393,1393,727,
-1393,1393,1393,727,1393,1393,1393,1393,1393,1393,727,1394,1394,1394,1394,1394,
+1408,1408,1408,1408,1408,1417,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1407,1407,728,728,1407,728,728,728,1407,1407,728,728,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1417,1417,1417,1408,1408,1417,1408,1408,1417,1418,1418,728,728,1408,
+1408,1408,1408,1408,728,728,728,728,728,728,728,728,728,728,728,728,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1407,1407,728,1408,728,1407,728,1408,1408,1408,1419,1419,1419,1419,1419,
/* block 274 */
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1403,1403,1403,1394,1394,1394,1403,1403,1403,1403,1403,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,728,
+1408,728,1417,1417,1408,1408,1417,1417,1417,1417,1417,1417,1417,1417,1417,1417,
+1417,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1417,1417,1417,1417,1417,1417,1417,1417,1417,1417,
+1417,1417,1417,1417,1417,1417,1417,1417,1417,1408,1408,1408,1417,1408,1408,1408,
/* block 275 */
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1403,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1403,1403,1403,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1403,1394,1394,1394,1394,1394,1393,1393,1393,1393,1393,727,1403,727,727,727,
-1394,1394,1394,1393,1393,1394,1394,1394,1395,1395,1395,1395,1395,1394,1394,1394,
-727,727,727,727,727,727,1393,1393,1393,727,1393,1394,1394,1395,1395,1395,
-727,1393,1393,727,1394,1394,1394,1394,1394,1394,1394,1394,1394,1395,1395,1395,
+1408,1417,1417,1417,1408,1417,1417,1417,1408,1408,1408,1408,1408,1408,1408,1417,
+1408,1417,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1417,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,728,1407,1408,
/* block 276 */
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,724,724,
+724,724,724,724,724,724,1407,1407,1407,728,728,1408,1408,1408,1408,1407,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1407,1407,1407,1407,1407,1407,1407,728,
+728,1407,1407,728,1418,1418,728,728,728,728,1417,1407,1407,1407,1407,1407,
/* block 277 */
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,1393,1393,1393,1393,1395,1395,1395,1395,1395,1395,1395,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1395,1395,1395,1395,
-1394,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
+1407,1407,1407,1407,1407,1407,1407,728,1407,1407,728,728,728,728,1407,1407,
+1418,1407,1407,1407,1407,1417,1417,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,1407,1407,1407,1408,728,1407,1407,728,1407,1407,1407,1407,1407,1407,1407,
+1407,728,728,1407,1407,1407,1407,1407,1407,1407,1407,1407,728,1407,1407,1407,
+1407,1407,728,728,728,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,728,728,728,1407,1407,1407,1407,1407,1407,1407,1407,728,728,728,1407,
+1407,728,1407,728,1407,1407,1407,1407,728,1407,1407,1407,1407,1407,1407,728,
+1407,1407,1407,728,1407,1407,1407,1407,1407,1407,728,1408,1408,1408,1408,1408,
/* block 278 */
-723,723,723,723,723,723,723,723,723,723,723,723,1395,1395,1395,1395,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,1395,1395,1395,1395,1395,1395,1395,1395,
-723,723,723,723,723,723,723,723,723,723,1395,1395,1395,1395,1395,1395,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1417,1417,1417,1408,1408,1408,1417,1417,1417,1417,1417,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
/* block 279 */
-723,723,723,723,723,723,723,723,1395,1395,1395,1395,1395,1395,1395,1395,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,1395,1395,
-1393,1393,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1417,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1417,1417,1417,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1417,1408,1408,1408,1408,1408,1407,1407,1407,1407,1407,728,1417,728,728,728,
+1408,1408,1408,1407,1407,1408,1408,1408,1409,1409,1409,1409,1408,1408,1408,1408,
+728,728,728,728,728,728,1407,1407,1407,728,1407,1408,1408,1409,1409,1409,
+728,1407,1407,728,1408,1408,1408,1408,1408,1408,1408,1408,1408,1409,1409,1409,
/* block 280 */
-723,723,723,723,723,723,723,723,723,723,723,723,1403,1394,1394,1403,
-1394,1394,1394,1394,1394,1394,1394,1394,1403,1403,1403,1403,1403,1403,1403,1403,
-1394,1394,1394,1394,1394,1394,1403,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1394,723,1403,1403,1403,1394,
-1394,1394,1394,1394,1394,1394,723,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1403,1394,1394,1394,1394,1394,1394,1394,1394,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,1407,1407,1407,1409,1409,1409,1409,1407,1407,1407,1407,1407,
/* block 281 */
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1406,1406,1406,1406,1394,1403,1403,1394,1403,1403,1394,1403,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1403,1403,1403,
-1394,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1403,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,1407,1407,1407,1407,1407,1409,1409,1409,1409,1409,1409,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1409,1409,1409,1409,
+1408,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
/* block 282 */
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,
-1393,1393,1393,1393,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1393,1395,1395,
-1394,1394,1394,1394,1394,1395,1395,1395,1394,1394,1394,1394,1394,1395,1395,1395,
+724,724,724,724,724,724,724,724,724,724,724,724,1409,1409,1409,1409,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,1409,1409,1409,1409,1409,1409,1409,1409,
+724,724,724,724,724,724,724,724,724,724,1409,1409,1409,1409,1409,1409,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
/* block 283 */
-1394,1394,1394,1394,1394,1394,1394,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1395,1395,1395,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1395,1395,1395,1395,1395,
-1394,1394,1394,1403,1403,1403,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1394,1394,1394,1394,1394,1394,1394,1394,1394,1394,1395,1395,1395,1395,1395,1395,
-1394,1394,1394,1394,1394,1394,1394,1394,1395,1395,1395,1395,1395,1395,1395,1395,
-1403,1403,1403,1403,1403,1403,1403,1395,1395,1395,1395,1395,1395,1395,1395,1395,
+724,724,724,724,724,724,724,724,1409,1409,1409,1409,1409,1409,1409,1409,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,1409,1409,
+1407,1407,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
/* block 284 */
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
+724,724,724,724,724,724,724,724,724,724,724,724,1417,1408,1408,1417,
+1408,1408,1408,1408,1408,1408,1408,1408,1417,1417,1417,1417,1417,1417,1417,1417,
+1408,1408,1408,1408,1408,1408,1417,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1417,1417,1417,1417,1417,1417,1417,1417,1417,1417,1408,724,1417,1417,1417,1408,
+1408,1408,1408,1408,1408,1408,724,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1417,1408,1408,1408,1408,1408,1408,1408,1408,
/* block 285 */
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,163,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,723,
-723,723,723,723,723,723,723,723,723,723,723,163,163,163,163,163,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,163,163,163,163,163,163,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1420,1420,1420,1420,1408,1417,1417,1408,1417,1417,1408,1417,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1417,1417,1417,
+1408,1417,1417,1417,1417,1417,1417,1417,1417,1417,1417,1417,1417,1417,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
/* block 286 */
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,
-1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,1395,958,958,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,
+1407,1407,1407,1407,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1407,1409,1409,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1409,1409,1409,
/* block 287 */
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1409,1409,1409,1409,1409,1409,1409,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1409,1408,
+1408,1408,1408,1417,1417,1417,1409,1409,1409,1409,1409,1409,1409,1409,1408,1408,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1409,1409,1409,1409,
+1408,1408,1408,1408,1408,1408,1408,1408,1408,1409,1409,1409,1409,1409,1409,1409,
+1417,1417,1417,1417,1417,1417,1417,1417,1417,1409,1409,1409,1409,1409,1409,1409,
/* block 288 */
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,163,163,163,163,163,163,163,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
/* block 289 */
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,163,163,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,163,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,724,
+724,724,724,724,724,724,724,724,724,724,724,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+1421,1421,1421,1421,1421,1421,1421,1421,1421,1421,163,163,163,163,163,163,
/* block 290 */
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,
+1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,1409,957,957,
/* block 291 */
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
/* block 292 */
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,953,
-953,953,953,953,953,953,953,953,953,953,953,953,953,953,163,163,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,163,163,163,163,163,163,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+
+/* block 293 */
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,163,163,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+
+/* block 294 */
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+
+/* block 295 */
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+
+/* block 296 */
+952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
+952,952,952,952,952,952,952,952,952,952,952,952,952,952,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
@@ -5315,7 +5369,7 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-/* block 293 */
+/* block 297 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
@@ -5323,67 +5377,77 @@ const uint16_t PRIV(ucd_stage2)[] = { /* 76800 bytes, block = 128 */
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-163,163,163,163,163,163,163,163,163,163,163,163,163,163,958,958,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,957,957,
-/* block 294 */
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,838,
-838,838,838,838,838,838,838,838,838,838,838,163,163,163,163,163,
+/* block 298 */
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,163,163,163,163,163,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+
+/* block 299 */
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,839,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
+163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,
-/* block 295 */
-707,712,707,707,707,707,707,707,707,707,707,707,707,707,707,707,
-707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,
-1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
-1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
-1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
-1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
-1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
-1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,1408,
-
-/* block 296 */
-707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,
-707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,
-707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,
-707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,
-707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,
-707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,
-707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,
-707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,
-
-/* block 297 */
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,
-
-/* block 298 */
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,
-961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,961,
-707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,707,
-
-/* block 299 */
-952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
-952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
-952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
-952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
-952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
-952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
-952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,952,
-952,952,952,952,952,952,952,952,952,952,952,952,952,952,958,958,
+/* block 300 */
+708,713,708,708,708,708,708,708,708,708,708,708,708,708,708,708,
+708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,
+1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,
+1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,
+1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,
+1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,
+1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,
+1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,1422,
+
+/* block 301 */
+708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,
+708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,
+708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,
+708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,
+708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,
+708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,
+708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,
+708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,
+
+/* block 302 */
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,
+
+/* block 303 */
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,
+960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,960,
+708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,708,
+
+/* block 304 */
+951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,
+951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,
+951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,
+951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,
+951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,
+951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,
+951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,951,
+951,951,951,951,951,951,951,951,951,951,951,951,951,951,957,957,
};
#if UCD_BLOCK_SIZE != 128
diff --git a/src/3rdparty/pcre2/src/pcre2_ucp.h b/src/3rdparty/pcre2/src/pcre2_ucp.h
index 282238982d..9ccc829750 100644
--- a/src/3rdparty/pcre2/src/pcre2_ucp.h
+++ b/src/3rdparty/pcre2/src/pcre2_ucp.h
@@ -166,29 +166,29 @@ enum {
/* These are the bidi class values. */
enum {
- ucp_bidiAL, /* Arabic letter */
- ucp_bidiAN, /* Arabic number */
- ucp_bidiB, /* Paragraph separator */
- ucp_bidiBN, /* Boundary neutral */
- ucp_bidiCS, /* Common separator */
- ucp_bidiEN, /* European number */
- ucp_bidiES, /* European separator */
- ucp_bidiET, /* European terminator */
- ucp_bidiFSI, /* First strong isolate */
- ucp_bidiL, /* Left to right */
- ucp_bidiLRE, /* Left to right embedding */
- ucp_bidiLRI, /* Left to right isolate */
- ucp_bidiLRO, /* Left to right override */
- ucp_bidiNSM, /* Non-spacing mark */
- ucp_bidiON, /* Other neutral */
- ucp_bidiPDF, /* Pop directional format */
- ucp_bidiPDI, /* Pop directional isolate */
- ucp_bidiR, /* Right to left */
- ucp_bidiRLE, /* Right to left embedding */
- ucp_bidiRLI, /* Right to left isolate */
- ucp_bidiRLO, /* Right to left override */
- ucp_bidiS, /* Segment separator */
- ucp_bidiWS, /* White space */
+ ucp_bidiAL, /* Arabic_Letter */
+ ucp_bidiAN, /* Arabic_Number */
+ ucp_bidiB, /* Paragraph_Separator */
+ ucp_bidiBN, /* Boundary_Neutral */
+ ucp_bidiCS, /* Common_Separator */
+ ucp_bidiEN, /* European_Number */
+ ucp_bidiES, /* European_Separator */
+ ucp_bidiET, /* European_Terminator */
+ ucp_bidiFSI, /* First_Strong_Isolate */
+ ucp_bidiL, /* Left_To_Right */
+ ucp_bidiLRE, /* Left_To_Right_Embedding */
+ ucp_bidiLRI, /* Left_To_Right_Isolate */
+ ucp_bidiLRO, /* Left_To_Right_Override */
+ ucp_bidiNSM, /* Nonspacing_Mark */
+ ucp_bidiON, /* Other_Neutral */
+ ucp_bidiPDF, /* Pop_Directional_Format */
+ ucp_bidiPDI, /* Pop_Directional_Isolate */
+ ucp_bidiR, /* Right_To_Left */
+ ucp_bidiRLE, /* Right_To_Left_Embedding */
+ ucp_bidiRLI, /* Right_To_Left_Isolate */
+ ucp_bidiRLO, /* Right_To_Left_Override */
+ ucp_bidiS, /* Segment_Separator */
+ ucp_bidiWS, /* White_Space */
};
/* These are grapheme break properties. The Extended Pictographic property
@@ -380,6 +380,8 @@ enum {
ucp_Tangsa,
ucp_Toto,
ucp_Vithkuqi,
+ ucp_Kawi,
+ ucp_Nag_Mundari,
/* This must be last */
ucp_Script_Count
diff --git a/src/3rdparty/pcre2/src/pcre2_ucptables.c b/src/3rdparty/pcre2/src/pcre2_ucptables.c
index bd1b67a9f1..2110014c29 100644
--- a/src/3rdparty/pcre2/src/pcre2_ucptables.c
+++ b/src/3rdparty/pcre2/src/pcre2_ucptables.c
@@ -265,6 +265,7 @@ the "loose matching" rules that Unicode advises and Perl uses. */
#define STRING_kana0 STR_k STR_a STR_n STR_a "\0"
#define STRING_kannada0 STR_k STR_a STR_n STR_n STR_a STR_d STR_a "\0"
#define STRING_katakana0 STR_k STR_a STR_t STR_a STR_k STR_a STR_n STR_a "\0"
+#define STRING_kawi0 STR_k STR_a STR_w STR_i "\0"
#define STRING_kayahli0 STR_k STR_a STR_y STR_a STR_h STR_l STR_i "\0"
#define STRING_khar0 STR_k STR_h STR_a STR_r "\0"
#define STRING_kharoshthi0 STR_k STR_h STR_a STR_r STR_o STR_s STR_h STR_t STR_h STR_i "\0"
@@ -347,6 +348,8 @@ the "loose matching" rules that Unicode advises and Perl uses. */
#define STRING_mymr0 STR_m STR_y STR_m STR_r "\0"
#define STRING_n0 STR_n "\0"
#define STRING_nabataean0 STR_n STR_a STR_b STR_a STR_t STR_a STR_e STR_a STR_n "\0"
+#define STRING_nagm0 STR_n STR_a STR_g STR_m "\0"
+#define STRING_nagmundari0 STR_n STR_a STR_g STR_m STR_u STR_n STR_d STR_a STR_r STR_i "\0"
#define STRING_nand0 STR_n STR_a STR_n STR_d "\0"
#define STRING_nandinagari0 STR_n STR_a STR_n STR_d STR_i STR_n STR_a STR_g STR_a STR_r STR_i "\0"
#define STRING_narb0 STR_n STR_a STR_r STR_b "\0"
@@ -753,6 +756,7 @@ const char PRIV(utt_names)[] =
STRING_kana0
STRING_kannada0
STRING_katakana0
+ STRING_kawi0
STRING_kayahli0
STRING_khar0
STRING_kharoshthi0
@@ -835,6 +839,8 @@ const char PRIV(utt_names)[] =
STRING_mymr0
STRING_n0
STRING_nabataean0
+ STRING_nagm0
+ STRING_nagmundari0
STRING_nand0
STRING_nandinagari0
STRING_narb0
@@ -1241,280 +1247,283 @@ const ucp_type_table PRIV(utt)[] = {
{ 1665, PT_SCX, ucp_Katakana },
{ 1670, PT_SCX, ucp_Kannada },
{ 1678, PT_SCX, ucp_Katakana },
- { 1687, PT_SCX, ucp_Kayah_Li },
- { 1695, PT_SC, ucp_Kharoshthi },
+ { 1687, PT_SC, ucp_Kawi },
+ { 1692, PT_SCX, ucp_Kayah_Li },
{ 1700, PT_SC, ucp_Kharoshthi },
- { 1711, PT_SC, ucp_Khitan_Small_Script },
- { 1729, PT_SC, ucp_Khmer },
- { 1735, PT_SC, ucp_Khmer },
- { 1740, PT_SCX, ucp_Khojki },
+ { 1705, PT_SC, ucp_Kharoshthi },
+ { 1716, PT_SC, ucp_Khitan_Small_Script },
+ { 1734, PT_SC, ucp_Khmer },
+ { 1740, PT_SC, ucp_Khmer },
{ 1745, PT_SCX, ucp_Khojki },
- { 1752, PT_SCX, ucp_Khudawadi },
- { 1762, PT_SC, ucp_Khitan_Small_Script },
- { 1767, PT_SCX, ucp_Kannada },
- { 1772, PT_SCX, ucp_Kaithi },
- { 1777, PT_GC, ucp_L },
- { 1779, PT_LAMP, 0 },
- { 1782, PT_SC, ucp_Tai_Tham },
- { 1787, PT_SC, ucp_Lao },
- { 1791, PT_SC, ucp_Lao },
- { 1796, PT_SCX, ucp_Latin },
- { 1802, PT_SCX, ucp_Latin },
- { 1807, PT_LAMP, 0 },
- { 1810, PT_SC, ucp_Lepcha },
+ { 1750, PT_SCX, ucp_Khojki },
+ { 1757, PT_SCX, ucp_Khudawadi },
+ { 1767, PT_SC, ucp_Khitan_Small_Script },
+ { 1772, PT_SCX, ucp_Kannada },
+ { 1777, PT_SCX, ucp_Kaithi },
+ { 1782, PT_GC, ucp_L },
+ { 1784, PT_LAMP, 0 },
+ { 1787, PT_SC, ucp_Tai_Tham },
+ { 1792, PT_SC, ucp_Lao },
+ { 1796, PT_SC, ucp_Lao },
+ { 1801, PT_SCX, ucp_Latin },
+ { 1807, PT_SCX, ucp_Latin },
+ { 1812, PT_LAMP, 0 },
{ 1815, PT_SC, ucp_Lepcha },
- { 1822, PT_SCX, ucp_Limbu },
+ { 1820, PT_SC, ucp_Lepcha },
{ 1827, PT_SCX, ucp_Limbu },
- { 1833, PT_SCX, ucp_Linear_A },
- { 1838, PT_SCX, ucp_Linear_B },
- { 1843, PT_SCX, ucp_Linear_A },
- { 1851, PT_SCX, ucp_Linear_B },
- { 1859, PT_SC, ucp_Lisu },
- { 1864, PT_PC, ucp_Ll },
- { 1867, PT_PC, ucp_Lm },
- { 1870, PT_PC, ucp_Lo },
- { 1873, PT_BOOL, ucp_Logical_Order_Exception },
- { 1877, PT_BOOL, ucp_Logical_Order_Exception },
- { 1899, PT_BOOL, ucp_Lowercase },
- { 1905, PT_BOOL, ucp_Lowercase },
- { 1915, PT_PC, ucp_Lt },
- { 1918, PT_PC, ucp_Lu },
- { 1921, PT_SC, ucp_Lycian },
+ { 1832, PT_SCX, ucp_Limbu },
+ { 1838, PT_SCX, ucp_Linear_A },
+ { 1843, PT_SCX, ucp_Linear_B },
+ { 1848, PT_SCX, ucp_Linear_A },
+ { 1856, PT_SCX, ucp_Linear_B },
+ { 1864, PT_SC, ucp_Lisu },
+ { 1869, PT_PC, ucp_Ll },
+ { 1872, PT_PC, ucp_Lm },
+ { 1875, PT_PC, ucp_Lo },
+ { 1878, PT_BOOL, ucp_Logical_Order_Exception },
+ { 1882, PT_BOOL, ucp_Logical_Order_Exception },
+ { 1904, PT_BOOL, ucp_Lowercase },
+ { 1910, PT_BOOL, ucp_Lowercase },
+ { 1920, PT_PC, ucp_Lt },
+ { 1923, PT_PC, ucp_Lu },
{ 1926, PT_SC, ucp_Lycian },
- { 1933, PT_SC, ucp_Lydian },
+ { 1931, PT_SC, ucp_Lycian },
{ 1938, PT_SC, ucp_Lydian },
- { 1945, PT_GC, ucp_M },
- { 1947, PT_SCX, ucp_Mahajani },
- { 1956, PT_SCX, ucp_Mahajani },
- { 1961, PT_SC, ucp_Makasar },
+ { 1943, PT_SC, ucp_Lydian },
+ { 1950, PT_GC, ucp_M },
+ { 1952, PT_SCX, ucp_Mahajani },
+ { 1961, PT_SCX, ucp_Mahajani },
{ 1966, PT_SC, ucp_Makasar },
- { 1974, PT_SCX, ucp_Malayalam },
- { 1984, PT_SCX, ucp_Mandaic },
+ { 1971, PT_SC, ucp_Makasar },
+ { 1979, PT_SCX, ucp_Malayalam },
{ 1989, PT_SCX, ucp_Mandaic },
- { 1997, PT_SCX, ucp_Manichaean },
+ { 1994, PT_SCX, ucp_Mandaic },
{ 2002, PT_SCX, ucp_Manichaean },
- { 2013, PT_SC, ucp_Marchen },
+ { 2007, PT_SCX, ucp_Manichaean },
{ 2018, PT_SC, ucp_Marchen },
- { 2026, PT_SCX, ucp_Masaram_Gondi },
- { 2039, PT_BOOL, ucp_Math },
- { 2044, PT_PC, ucp_Mc },
- { 2047, PT_PC, ucp_Me },
- { 2050, PT_SC, ucp_Medefaidrin },
- { 2062, PT_SC, ucp_Medefaidrin },
- { 2067, PT_SC, ucp_Meetei_Mayek },
- { 2079, PT_SC, ucp_Mende_Kikakui },
+ { 2023, PT_SC, ucp_Marchen },
+ { 2031, PT_SCX, ucp_Masaram_Gondi },
+ { 2044, PT_BOOL, ucp_Math },
+ { 2049, PT_PC, ucp_Mc },
+ { 2052, PT_PC, ucp_Me },
+ { 2055, PT_SC, ucp_Medefaidrin },
+ { 2067, PT_SC, ucp_Medefaidrin },
+ { 2072, PT_SC, ucp_Meetei_Mayek },
{ 2084, PT_SC, ucp_Mende_Kikakui },
- { 2097, PT_SC, ucp_Meroitic_Cursive },
- { 2102, PT_SC, ucp_Meroitic_Hieroglyphs },
- { 2107, PT_SC, ucp_Meroitic_Cursive },
- { 2123, PT_SC, ucp_Meroitic_Hieroglyphs },
- { 2143, PT_SC, ucp_Miao },
- { 2148, PT_SCX, ucp_Malayalam },
- { 2153, PT_PC, ucp_Mn },
- { 2156, PT_SCX, ucp_Modi },
- { 2161, PT_SCX, ucp_Mongolian },
+ { 2089, PT_SC, ucp_Mende_Kikakui },
+ { 2102, PT_SC, ucp_Meroitic_Cursive },
+ { 2107, PT_SC, ucp_Meroitic_Hieroglyphs },
+ { 2112, PT_SC, ucp_Meroitic_Cursive },
+ { 2128, PT_SC, ucp_Meroitic_Hieroglyphs },
+ { 2148, PT_SC, ucp_Miao },
+ { 2153, PT_SCX, ucp_Malayalam },
+ { 2158, PT_PC, ucp_Mn },
+ { 2161, PT_SCX, ucp_Modi },
{ 2166, PT_SCX, ucp_Mongolian },
- { 2176, PT_SC, ucp_Mro },
- { 2180, PT_SC, ucp_Mro },
- { 2185, PT_SC, ucp_Meetei_Mayek },
- { 2190, PT_SCX, ucp_Multani },
+ { 2171, PT_SCX, ucp_Mongolian },
+ { 2181, PT_SC, ucp_Mro },
+ { 2185, PT_SC, ucp_Mro },
+ { 2190, PT_SC, ucp_Meetei_Mayek },
{ 2195, PT_SCX, ucp_Multani },
- { 2203, PT_SCX, ucp_Myanmar },
- { 2211, PT_SCX, ucp_Myanmar },
- { 2216, PT_GC, ucp_N },
- { 2218, PT_SC, ucp_Nabataean },
- { 2228, PT_SCX, ucp_Nandinagari },
- { 2233, PT_SCX, ucp_Nandinagari },
- { 2245, PT_SC, ucp_Old_North_Arabian },
- { 2250, PT_SC, ucp_Nabataean },
- { 2255, PT_BOOL, ucp_Noncharacter_Code_Point },
- { 2261, PT_PC, ucp_Nd },
- { 2264, PT_SC, ucp_Newa },
- { 2269, PT_SC, ucp_New_Tai_Lue },
- { 2279, PT_SCX, ucp_Nko },
- { 2283, PT_SCX, ucp_Nko },
- { 2288, PT_PC, ucp_Nl },
- { 2291, PT_PC, ucp_No },
- { 2294, PT_BOOL, ucp_Noncharacter_Code_Point },
- { 2316, PT_SC, ucp_Nushu },
- { 2321, PT_SC, ucp_Nushu },
- { 2327, PT_SC, ucp_Nyiakeng_Puachue_Hmong },
- { 2348, PT_SC, ucp_Ogham },
- { 2353, PT_SC, ucp_Ogham },
- { 2359, PT_SC, ucp_Ol_Chiki },
- { 2367, PT_SC, ucp_Ol_Chiki },
- { 2372, PT_SC, ucp_Old_Hungarian },
- { 2385, PT_SC, ucp_Old_Italic },
- { 2395, PT_SC, ucp_Old_North_Arabian },
- { 2411, PT_SCX, ucp_Old_Permic },
- { 2421, PT_SC, ucp_Old_Persian },
- { 2432, PT_SC, ucp_Old_Sogdian },
- { 2443, PT_SC, ucp_Old_South_Arabian },
- { 2459, PT_SC, ucp_Old_Turkic },
- { 2469, PT_SCX, ucp_Old_Uyghur },
- { 2479, PT_SCX, ucp_Oriya },
- { 2485, PT_SC, ucp_Old_Turkic },
- { 2490, PT_SCX, ucp_Oriya },
- { 2495, PT_SC, ucp_Osage },
- { 2501, PT_SC, ucp_Osage },
- { 2506, PT_SC, ucp_Osmanya },
- { 2511, PT_SC, ucp_Osmanya },
- { 2519, PT_SCX, ucp_Old_Uyghur },
- { 2524, PT_GC, ucp_P },
- { 2526, PT_SC, ucp_Pahawh_Hmong },
- { 2538, PT_SC, ucp_Palmyrene },
- { 2543, PT_SC, ucp_Palmyrene },
- { 2553, PT_BOOL, ucp_Pattern_Syntax },
- { 2560, PT_BOOL, ucp_Pattern_Syntax },
- { 2574, PT_BOOL, ucp_Pattern_White_Space },
- { 2592, PT_BOOL, ucp_Pattern_White_Space },
- { 2598, PT_SC, ucp_Pau_Cin_Hau },
- { 2603, PT_SC, ucp_Pau_Cin_Hau },
- { 2613, PT_PC, ucp_Pc },
- { 2616, PT_BOOL, ucp_Prepended_Concatenation_Mark },
- { 2620, PT_PC, ucp_Pd },
- { 2623, PT_PC, ucp_Pe },
- { 2626, PT_SCX, ucp_Old_Permic },
- { 2631, PT_PC, ucp_Pf },
- { 2634, PT_SCX, ucp_Phags_Pa },
- { 2639, PT_SCX, ucp_Phags_Pa },
- { 2647, PT_SC, ucp_Inscriptional_Pahlavi },
- { 2652, PT_SCX, ucp_Psalter_Pahlavi },
- { 2657, PT_SC, ucp_Phoenician },
- { 2662, PT_SC, ucp_Phoenician },
- { 2673, PT_PC, ucp_Pi },
- { 2676, PT_SC, ucp_Miao },
- { 2681, PT_PC, ucp_Po },
- { 2684, PT_BOOL, ucp_Prepended_Concatenation_Mark },
- { 2711, PT_SC, ucp_Inscriptional_Parthian },
- { 2716, PT_PC, ucp_Ps },
- { 2719, PT_SCX, ucp_Psalter_Pahlavi },
- { 2734, PT_SCX, ucp_Coptic },
- { 2739, PT_SC, ucp_Inherited },
- { 2744, PT_BOOL, ucp_Quotation_Mark },
- { 2750, PT_BOOL, ucp_Quotation_Mark },
- { 2764, PT_BOOL, ucp_Radical },
- { 2772, PT_BOOL, ucp_Regional_Indicator },
- { 2790, PT_SC, ucp_Rejang },
- { 2797, PT_BOOL, ucp_Regional_Indicator },
- { 2800, PT_SC, ucp_Rejang },
- { 2805, PT_SCX, ucp_Hanifi_Rohingya },
- { 2810, PT_SC, ucp_Runic },
- { 2816, PT_SC, ucp_Runic },
- { 2821, PT_GC, ucp_S },
- { 2823, PT_SC, ucp_Samaritan },
- { 2833, PT_SC, ucp_Samaritan },
- { 2838, PT_SC, ucp_Old_South_Arabian },
- { 2843, PT_SC, ucp_Saurashtra },
- { 2848, PT_SC, ucp_Saurashtra },
- { 2859, PT_PC, ucp_Sc },
- { 2862, PT_BOOL, ucp_Soft_Dotted },
- { 2865, PT_BOOL, ucp_Sentence_Terminal },
- { 2882, PT_SC, ucp_SignWriting },
- { 2887, PT_SCX, ucp_Sharada },
- { 2895, PT_SC, ucp_Shavian },
- { 2903, PT_SC, ucp_Shavian },
+ { 2200, PT_SCX, ucp_Multani },
+ { 2208, PT_SCX, ucp_Myanmar },
+ { 2216, PT_SCX, ucp_Myanmar },
+ { 2221, PT_GC, ucp_N },
+ { 2223, PT_SC, ucp_Nabataean },
+ { 2233, PT_SC, ucp_Nag_Mundari },
+ { 2238, PT_SC, ucp_Nag_Mundari },
+ { 2249, PT_SCX, ucp_Nandinagari },
+ { 2254, PT_SCX, ucp_Nandinagari },
+ { 2266, PT_SC, ucp_Old_North_Arabian },
+ { 2271, PT_SC, ucp_Nabataean },
+ { 2276, PT_BOOL, ucp_Noncharacter_Code_Point },
+ { 2282, PT_PC, ucp_Nd },
+ { 2285, PT_SC, ucp_Newa },
+ { 2290, PT_SC, ucp_New_Tai_Lue },
+ { 2300, PT_SCX, ucp_Nko },
+ { 2304, PT_SCX, ucp_Nko },
+ { 2309, PT_PC, ucp_Nl },
+ { 2312, PT_PC, ucp_No },
+ { 2315, PT_BOOL, ucp_Noncharacter_Code_Point },
+ { 2337, PT_SC, ucp_Nushu },
+ { 2342, PT_SC, ucp_Nushu },
+ { 2348, PT_SC, ucp_Nyiakeng_Puachue_Hmong },
+ { 2369, PT_SC, ucp_Ogham },
+ { 2374, PT_SC, ucp_Ogham },
+ { 2380, PT_SC, ucp_Ol_Chiki },
+ { 2388, PT_SC, ucp_Ol_Chiki },
+ { 2393, PT_SC, ucp_Old_Hungarian },
+ { 2406, PT_SC, ucp_Old_Italic },
+ { 2416, PT_SC, ucp_Old_North_Arabian },
+ { 2432, PT_SCX, ucp_Old_Permic },
+ { 2442, PT_SC, ucp_Old_Persian },
+ { 2453, PT_SC, ucp_Old_Sogdian },
+ { 2464, PT_SC, ucp_Old_South_Arabian },
+ { 2480, PT_SC, ucp_Old_Turkic },
+ { 2490, PT_SCX, ucp_Old_Uyghur },
+ { 2500, PT_SCX, ucp_Oriya },
+ { 2506, PT_SC, ucp_Old_Turkic },
+ { 2511, PT_SCX, ucp_Oriya },
+ { 2516, PT_SC, ucp_Osage },
+ { 2522, PT_SC, ucp_Osage },
+ { 2527, PT_SC, ucp_Osmanya },
+ { 2532, PT_SC, ucp_Osmanya },
+ { 2540, PT_SCX, ucp_Old_Uyghur },
+ { 2545, PT_GC, ucp_P },
+ { 2547, PT_SC, ucp_Pahawh_Hmong },
+ { 2559, PT_SC, ucp_Palmyrene },
+ { 2564, PT_SC, ucp_Palmyrene },
+ { 2574, PT_BOOL, ucp_Pattern_Syntax },
+ { 2581, PT_BOOL, ucp_Pattern_Syntax },
+ { 2595, PT_BOOL, ucp_Pattern_White_Space },
+ { 2613, PT_BOOL, ucp_Pattern_White_Space },
+ { 2619, PT_SC, ucp_Pau_Cin_Hau },
+ { 2624, PT_SC, ucp_Pau_Cin_Hau },
+ { 2634, PT_PC, ucp_Pc },
+ { 2637, PT_BOOL, ucp_Prepended_Concatenation_Mark },
+ { 2641, PT_PC, ucp_Pd },
+ { 2644, PT_PC, ucp_Pe },
+ { 2647, PT_SCX, ucp_Old_Permic },
+ { 2652, PT_PC, ucp_Pf },
+ { 2655, PT_SCX, ucp_Phags_Pa },
+ { 2660, PT_SCX, ucp_Phags_Pa },
+ { 2668, PT_SC, ucp_Inscriptional_Pahlavi },
+ { 2673, PT_SCX, ucp_Psalter_Pahlavi },
+ { 2678, PT_SC, ucp_Phoenician },
+ { 2683, PT_SC, ucp_Phoenician },
+ { 2694, PT_PC, ucp_Pi },
+ { 2697, PT_SC, ucp_Miao },
+ { 2702, PT_PC, ucp_Po },
+ { 2705, PT_BOOL, ucp_Prepended_Concatenation_Mark },
+ { 2732, PT_SC, ucp_Inscriptional_Parthian },
+ { 2737, PT_PC, ucp_Ps },
+ { 2740, PT_SCX, ucp_Psalter_Pahlavi },
+ { 2755, PT_SCX, ucp_Coptic },
+ { 2760, PT_SC, ucp_Inherited },
+ { 2765, PT_BOOL, ucp_Quotation_Mark },
+ { 2771, PT_BOOL, ucp_Quotation_Mark },
+ { 2785, PT_BOOL, ucp_Radical },
+ { 2793, PT_BOOL, ucp_Regional_Indicator },
+ { 2811, PT_SC, ucp_Rejang },
+ { 2818, PT_BOOL, ucp_Regional_Indicator },
+ { 2821, PT_SC, ucp_Rejang },
+ { 2826, PT_SCX, ucp_Hanifi_Rohingya },
+ { 2831, PT_SC, ucp_Runic },
+ { 2837, PT_SC, ucp_Runic },
+ { 2842, PT_GC, ucp_S },
+ { 2844, PT_SC, ucp_Samaritan },
+ { 2854, PT_SC, ucp_Samaritan },
+ { 2859, PT_SC, ucp_Old_South_Arabian },
+ { 2864, PT_SC, ucp_Saurashtra },
+ { 2869, PT_SC, ucp_Saurashtra },
+ { 2880, PT_PC, ucp_Sc },
+ { 2883, PT_BOOL, ucp_Soft_Dotted },
+ { 2886, PT_BOOL, ucp_Sentence_Terminal },
+ { 2903, PT_SC, ucp_SignWriting },
{ 2908, PT_SCX, ucp_Sharada },
- { 2913, PT_SC, ucp_Siddham },
- { 2918, PT_SC, ucp_Siddham },
- { 2926, PT_SC, ucp_SignWriting },
- { 2938, PT_SCX, ucp_Khudawadi },
- { 2943, PT_SCX, ucp_Sinhala },
- { 2948, PT_SCX, ucp_Sinhala },
- { 2956, PT_PC, ucp_Sk },
- { 2959, PT_PC, ucp_Sm },
- { 2962, PT_PC, ucp_So },
- { 2965, PT_BOOL, ucp_Soft_Dotted },
- { 2976, PT_SCX, ucp_Sogdian },
- { 2981, PT_SCX, ucp_Sogdian },
- { 2989, PT_SC, ucp_Old_Sogdian },
- { 2994, PT_SC, ucp_Sora_Sompeng },
- { 2999, PT_SC, ucp_Sora_Sompeng },
- { 3011, PT_SC, ucp_Soyombo },
- { 3016, PT_SC, ucp_Soyombo },
- { 3024, PT_BOOL, ucp_White_Space },
- { 3030, PT_BOOL, ucp_Sentence_Terminal },
- { 3036, PT_SC, ucp_Sundanese },
- { 3041, PT_SC, ucp_Sundanese },
- { 3051, PT_SCX, ucp_Syloti_Nagri },
- { 3056, PT_SCX, ucp_Syloti_Nagri },
- { 3068, PT_SCX, ucp_Syriac },
- { 3073, PT_SCX, ucp_Syriac },
- { 3080, PT_SCX, ucp_Tagalog },
- { 3088, PT_SCX, ucp_Tagbanwa },
- { 3093, PT_SCX, ucp_Tagbanwa },
- { 3102, PT_SCX, ucp_Tai_Le },
- { 3108, PT_SC, ucp_Tai_Tham },
- { 3116, PT_SC, ucp_Tai_Viet },
- { 3124, PT_SCX, ucp_Takri },
- { 3129, PT_SCX, ucp_Takri },
- { 3135, PT_SCX, ucp_Tai_Le },
- { 3140, PT_SC, ucp_New_Tai_Lue },
- { 3145, PT_SCX, ucp_Tamil },
- { 3151, PT_SCX, ucp_Tamil },
- { 3156, PT_SC, ucp_Tangut },
- { 3161, PT_SC, ucp_Tangsa },
- { 3168, PT_SC, ucp_Tangut },
- { 3175, PT_SC, ucp_Tai_Viet },
- { 3180, PT_SCX, ucp_Telugu },
- { 3185, PT_SCX, ucp_Telugu },
- { 3192, PT_BOOL, ucp_Terminal_Punctuation },
- { 3197, PT_BOOL, ucp_Terminal_Punctuation },
- { 3217, PT_SC, ucp_Tifinagh },
- { 3222, PT_SCX, ucp_Tagalog },
- { 3227, PT_SCX, ucp_Thaana },
- { 3232, PT_SCX, ucp_Thaana },
- { 3239, PT_SC, ucp_Thai },
- { 3244, PT_SC, ucp_Tibetan },
- { 3252, PT_SC, ucp_Tibetan },
- { 3257, PT_SC, ucp_Tifinagh },
- { 3266, PT_SCX, ucp_Tirhuta },
- { 3271, PT_SCX, ucp_Tirhuta },
- { 3279, PT_SC, ucp_Tangsa },
- { 3284, PT_SC, ucp_Toto },
- { 3289, PT_SC, ucp_Ugaritic },
- { 3294, PT_SC, ucp_Ugaritic },
- { 3303, PT_BOOL, ucp_Unified_Ideograph },
- { 3309, PT_BOOL, ucp_Unified_Ideograph },
- { 3326, PT_SC, ucp_Unknown },
- { 3334, PT_BOOL, ucp_Uppercase },
- { 3340, PT_BOOL, ucp_Uppercase },
- { 3350, PT_SC, ucp_Vai },
- { 3354, PT_SC, ucp_Vai },
- { 3359, PT_BOOL, ucp_Variation_Selector },
- { 3377, PT_SC, ucp_Vithkuqi },
- { 3382, PT_SC, ucp_Vithkuqi },
- { 3391, PT_BOOL, ucp_Variation_Selector },
- { 3394, PT_SC, ucp_Wancho },
- { 3401, PT_SC, ucp_Warang_Citi },
- { 3406, PT_SC, ucp_Warang_Citi },
- { 3417, PT_SC, ucp_Wancho },
- { 3422, PT_BOOL, ucp_White_Space },
- { 3433, PT_BOOL, ucp_White_Space },
- { 3440, PT_ALNUM, 0 },
- { 3444, PT_BOOL, ucp_XID_Continue },
- { 3449, PT_BOOL, ucp_XID_Continue },
- { 3461, PT_BOOL, ucp_XID_Start },
- { 3466, PT_BOOL, ucp_XID_Start },
- { 3475, PT_SC, ucp_Old_Persian },
- { 3480, PT_PXSPACE, 0 },
- { 3484, PT_SPACE, 0 },
- { 3488, PT_SC, ucp_Cuneiform },
- { 3493, PT_UCNC, 0 },
- { 3497, PT_WORD, 0 },
- { 3501, PT_SCX, ucp_Yezidi },
- { 3506, PT_SCX, ucp_Yezidi },
- { 3513, PT_SCX, ucp_Yi },
- { 3516, PT_SCX, ucp_Yi },
- { 3521, PT_GC, ucp_Z },
- { 3523, PT_SC, ucp_Zanabazar_Square },
- { 3539, PT_SC, ucp_Zanabazar_Square },
- { 3544, PT_SC, ucp_Inherited },
- { 3549, PT_PC, ucp_Zl },
- { 3552, PT_PC, ucp_Zp },
- { 3555, PT_PC, ucp_Zs },
- { 3558, PT_SC, ucp_Common },
- { 3563, PT_SC, ucp_Unknown }
+ { 2916, PT_SC, ucp_Shavian },
+ { 2924, PT_SC, ucp_Shavian },
+ { 2929, PT_SCX, ucp_Sharada },
+ { 2934, PT_SC, ucp_Siddham },
+ { 2939, PT_SC, ucp_Siddham },
+ { 2947, PT_SC, ucp_SignWriting },
+ { 2959, PT_SCX, ucp_Khudawadi },
+ { 2964, PT_SCX, ucp_Sinhala },
+ { 2969, PT_SCX, ucp_Sinhala },
+ { 2977, PT_PC, ucp_Sk },
+ { 2980, PT_PC, ucp_Sm },
+ { 2983, PT_PC, ucp_So },
+ { 2986, PT_BOOL, ucp_Soft_Dotted },
+ { 2997, PT_SCX, ucp_Sogdian },
+ { 3002, PT_SCX, ucp_Sogdian },
+ { 3010, PT_SC, ucp_Old_Sogdian },
+ { 3015, PT_SC, ucp_Sora_Sompeng },
+ { 3020, PT_SC, ucp_Sora_Sompeng },
+ { 3032, PT_SC, ucp_Soyombo },
+ { 3037, PT_SC, ucp_Soyombo },
+ { 3045, PT_BOOL, ucp_White_Space },
+ { 3051, PT_BOOL, ucp_Sentence_Terminal },
+ { 3057, PT_SC, ucp_Sundanese },
+ { 3062, PT_SC, ucp_Sundanese },
+ { 3072, PT_SCX, ucp_Syloti_Nagri },
+ { 3077, PT_SCX, ucp_Syloti_Nagri },
+ { 3089, PT_SCX, ucp_Syriac },
+ { 3094, PT_SCX, ucp_Syriac },
+ { 3101, PT_SCX, ucp_Tagalog },
+ { 3109, PT_SCX, ucp_Tagbanwa },
+ { 3114, PT_SCX, ucp_Tagbanwa },
+ { 3123, PT_SCX, ucp_Tai_Le },
+ { 3129, PT_SC, ucp_Tai_Tham },
+ { 3137, PT_SC, ucp_Tai_Viet },
+ { 3145, PT_SCX, ucp_Takri },
+ { 3150, PT_SCX, ucp_Takri },
+ { 3156, PT_SCX, ucp_Tai_Le },
+ { 3161, PT_SC, ucp_New_Tai_Lue },
+ { 3166, PT_SCX, ucp_Tamil },
+ { 3172, PT_SCX, ucp_Tamil },
+ { 3177, PT_SC, ucp_Tangut },
+ { 3182, PT_SC, ucp_Tangsa },
+ { 3189, PT_SC, ucp_Tangut },
+ { 3196, PT_SC, ucp_Tai_Viet },
+ { 3201, PT_SCX, ucp_Telugu },
+ { 3206, PT_SCX, ucp_Telugu },
+ { 3213, PT_BOOL, ucp_Terminal_Punctuation },
+ { 3218, PT_BOOL, ucp_Terminal_Punctuation },
+ { 3238, PT_SC, ucp_Tifinagh },
+ { 3243, PT_SCX, ucp_Tagalog },
+ { 3248, PT_SCX, ucp_Thaana },
+ { 3253, PT_SCX, ucp_Thaana },
+ { 3260, PT_SC, ucp_Thai },
+ { 3265, PT_SC, ucp_Tibetan },
+ { 3273, PT_SC, ucp_Tibetan },
+ { 3278, PT_SC, ucp_Tifinagh },
+ { 3287, PT_SCX, ucp_Tirhuta },
+ { 3292, PT_SCX, ucp_Tirhuta },
+ { 3300, PT_SC, ucp_Tangsa },
+ { 3305, PT_SC, ucp_Toto },
+ { 3310, PT_SC, ucp_Ugaritic },
+ { 3315, PT_SC, ucp_Ugaritic },
+ { 3324, PT_BOOL, ucp_Unified_Ideograph },
+ { 3330, PT_BOOL, ucp_Unified_Ideograph },
+ { 3347, PT_SC, ucp_Unknown },
+ { 3355, PT_BOOL, ucp_Uppercase },
+ { 3361, PT_BOOL, ucp_Uppercase },
+ { 3371, PT_SC, ucp_Vai },
+ { 3375, PT_SC, ucp_Vai },
+ { 3380, PT_BOOL, ucp_Variation_Selector },
+ { 3398, PT_SC, ucp_Vithkuqi },
+ { 3403, PT_SC, ucp_Vithkuqi },
+ { 3412, PT_BOOL, ucp_Variation_Selector },
+ { 3415, PT_SC, ucp_Wancho },
+ { 3422, PT_SC, ucp_Warang_Citi },
+ { 3427, PT_SC, ucp_Warang_Citi },
+ { 3438, PT_SC, ucp_Wancho },
+ { 3443, PT_BOOL, ucp_White_Space },
+ { 3454, PT_BOOL, ucp_White_Space },
+ { 3461, PT_ALNUM, 0 },
+ { 3465, PT_BOOL, ucp_XID_Continue },
+ { 3470, PT_BOOL, ucp_XID_Continue },
+ { 3482, PT_BOOL, ucp_XID_Start },
+ { 3487, PT_BOOL, ucp_XID_Start },
+ { 3496, PT_SC, ucp_Old_Persian },
+ { 3501, PT_PXSPACE, 0 },
+ { 3505, PT_SPACE, 0 },
+ { 3509, PT_SC, ucp_Cuneiform },
+ { 3514, PT_UCNC, 0 },
+ { 3518, PT_WORD, 0 },
+ { 3522, PT_SCX, ucp_Yezidi },
+ { 3527, PT_SCX, ucp_Yezidi },
+ { 3534, PT_SCX, ucp_Yi },
+ { 3537, PT_SCX, ucp_Yi },
+ { 3542, PT_GC, ucp_Z },
+ { 3544, PT_SC, ucp_Zanabazar_Square },
+ { 3560, PT_SC, ucp_Zanabazar_Square },
+ { 3565, PT_SC, ucp_Inherited },
+ { 3570, PT_PC, ucp_Zl },
+ { 3573, PT_PC, ucp_Zp },
+ { 3576, PT_PC, ucp_Zs },
+ { 3579, PT_SC, ucp_Common },
+ { 3584, PT_SC, ucp_Unknown }
};
const size_t PRIV(utt_size) = sizeof(PRIV(utt)) / sizeof(ucp_type_table);
diff --git a/src/3rdparty/pcre2/src/pcre2_valid_utf.c b/src/3rdparty/pcre2/src/pcre2_valid_utf.c
index e47ea78f16..de411b919e 100644
--- a/src/3rdparty/pcre2/src/pcre2_valid_utf.c
+++ b/src/3rdparty/pcre2/src/pcre2_valid_utf.c
@@ -171,7 +171,7 @@ for (p = string; length > 0; p++)
if (((d = *(++p)) & 0xc0) != 0x80)
{
- *erroroffset = (int)(p - string) - 1;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 1;
return PCRE2_ERROR_UTF8_ERR6;
}
@@ -186,7 +186,7 @@ for (p = string; length > 0; p++)
case 1: if ((c & 0x3e) == 0)
{
- *erroroffset = (int)(p - string) - 1;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 1;
return PCRE2_ERROR_UTF8_ERR15;
}
break;
@@ -198,17 +198,17 @@ for (p = string; length > 0; p++)
case 2:
if ((*(++p) & 0xc0) != 0x80) /* Third byte */
{
- *erroroffset = (int)(p - string) - 2;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 2;
return PCRE2_ERROR_UTF8_ERR7;
}
if (c == 0xe0 && (d & 0x20) == 0)
{
- *erroroffset = (int)(p - string) - 2;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 2;
return PCRE2_ERROR_UTF8_ERR16;
}
if (c == 0xed && d >= 0xa0)
{
- *erroroffset = (int)(p - string) - 2;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 2;
return PCRE2_ERROR_UTF8_ERR14;
}
break;
@@ -220,22 +220,22 @@ for (p = string; length > 0; p++)
case 3:
if ((*(++p) & 0xc0) != 0x80) /* Third byte */
{
- *erroroffset = (int)(p - string) - 2;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 2;
return PCRE2_ERROR_UTF8_ERR7;
}
if ((*(++p) & 0xc0) != 0x80) /* Fourth byte */
{
- *erroroffset = (int)(p - string) - 3;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 3;
return PCRE2_ERROR_UTF8_ERR8;
}
if (c == 0xf0 && (d & 0x30) == 0)
{
- *erroroffset = (int)(p - string) - 3;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 3;
return PCRE2_ERROR_UTF8_ERR17;
}
if (c > 0xf4 || (c == 0xf4 && d > 0x8f))
{
- *erroroffset = (int)(p - string) - 3;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 3;
return PCRE2_ERROR_UTF8_ERR13;
}
break;
@@ -251,22 +251,22 @@ for (p = string; length > 0; p++)
case 4:
if ((*(++p) & 0xc0) != 0x80) /* Third byte */
{
- *erroroffset = (int)(p - string) - 2;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 2;
return PCRE2_ERROR_UTF8_ERR7;
}
if ((*(++p) & 0xc0) != 0x80) /* Fourth byte */
{
- *erroroffset = (int)(p - string) - 3;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 3;
return PCRE2_ERROR_UTF8_ERR8;
}
if ((*(++p) & 0xc0) != 0x80) /* Fifth byte */
{
- *erroroffset = (int)(p - string) - 4;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 4;
return PCRE2_ERROR_UTF8_ERR9;
}
if (c == 0xf8 && (d & 0x38) == 0)
{
- *erroroffset = (int)(p - string) - 4;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 4;
return PCRE2_ERROR_UTF8_ERR18;
}
break;
@@ -277,27 +277,27 @@ for (p = string; length > 0; p++)
case 5:
if ((*(++p) & 0xc0) != 0x80) /* Third byte */
{
- *erroroffset = (int)(p - string) - 2;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 2;
return PCRE2_ERROR_UTF8_ERR7;
}
if ((*(++p) & 0xc0) != 0x80) /* Fourth byte */
{
- *erroroffset = (int)(p - string) - 3;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 3;
return PCRE2_ERROR_UTF8_ERR8;
}
if ((*(++p) & 0xc0) != 0x80) /* Fifth byte */
{
- *erroroffset = (int)(p - string) - 4;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 4;
return PCRE2_ERROR_UTF8_ERR9;
}
if ((*(++p) & 0xc0) != 0x80) /* Sixth byte */
{
- *erroroffset = (int)(p - string) - 5;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 5;
return PCRE2_ERROR_UTF8_ERR10;
}
if (c == 0xfc && (d & 0x3c) == 0)
{
- *erroroffset = (int)(p - string) - 5;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 5;
return PCRE2_ERROR_UTF8_ERR19;
}
break;
@@ -309,7 +309,7 @@ for (p = string; length > 0; p++)
if (ab > 3)
{
- *erroroffset = (int)(p - string) - ab;
+ *erroroffset = (PCRE2_SIZE)(p - string) - ab;
return (ab == 4)? PCRE2_ERROR_UTF8_ERR11 : PCRE2_ERROR_UTF8_ERR12;
}
}
@@ -340,21 +340,21 @@ for (p = string; length > 0; p++)
/* High surrogate. Must be a followed by a low surrogate. */
if (length == 0)
{
- *erroroffset = p - string;
+ *erroroffset = (PCRE2_SIZE)(p - string);
return PCRE2_ERROR_UTF16_ERR1;
}
p++;
length--;
if ((*p & 0xfc00) != 0xdc00)
{
- *erroroffset = p - string - 1;
+ *erroroffset = (PCRE2_SIZE)(p - string) - 1;
return PCRE2_ERROR_UTF16_ERR2;
}
}
else
{
/* Isolated low surrogate. Always an error. */
- *erroroffset = p - string;
+ *erroroffset = (PCRE2_SIZE)(p - string);
return PCRE2_ERROR_UTF16_ERR3;
}
}
@@ -379,14 +379,14 @@ for (p = string; length > 0; length--, p++)
/* Normal UTF-32 code point. Neither high nor low surrogate. */
if (c > 0x10ffffu)
{
- *erroroffset = p - string;
+ *erroroffset = (PCRE2_SIZE)(p - string);
return PCRE2_ERROR_UTF32_ERR2;
}
}
else
{
/* A surrogate */
- *erroroffset = p - string;
+ *erroroffset = (PCRE2_SIZE)(p - string);
return PCRE2_ERROR_UTF32_ERR1;
}
}
diff --git a/src/3rdparty/pcre2/src/pcre2_xclass.c b/src/3rdparty/pcre2/src/pcre2_xclass.c
index bb57196449..5df25d2c8d 100644
--- a/src/3rdparty/pcre2/src/pcre2_xclass.c
+++ b/src/3rdparty/pcre2/src/pcre2_xclass.c
@@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
- New API code Copyright (c) 2016-2022 University of Cambridge
+ New API code Copyright (c) 2016-2023 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@@ -133,6 +133,7 @@ while ((t = *data++) != XCL_END)
#ifdef SUPPORT_UNICODE
else /* XCL_PROP & XCL_NOTPROP */
{
+ int chartype;
const ucd_record *prop = GET_UCD(c);
BOOL isprop = t == XCL_PROP;
BOOL ok;
@@ -144,8 +145,9 @@ while ((t = *data++) != XCL_END)
break;
case PT_LAMP:
- if ((prop->chartype == ucp_Lu || prop->chartype == ucp_Ll ||
- prop->chartype == ucp_Lt) == isprop) return !negated;
+ chartype = prop->chartype;
+ if ((chartype == ucp_Lu || chartype == ucp_Ll ||
+ chartype == ucp_Lt) == isprop) return !negated;
break;
case PT_GC:
@@ -168,8 +170,9 @@ while ((t = *data++) != XCL_END)
break;
case PT_ALNUM:
- if ((PRIV(ucp_gentype)[prop->chartype] == ucp_L ||
- PRIV(ucp_gentype)[prop->chartype] == ucp_N) == isprop)
+ chartype = prop->chartype;
+ if ((PRIV(ucp_gentype)[chartype] == ucp_L ||
+ PRIV(ucp_gentype)[chartype] == ucp_N) == isprop)
return !negated;
break;
@@ -194,9 +197,10 @@ while ((t = *data++) != XCL_END)
break;
case PT_WORD:
- if ((PRIV(ucp_gentype)[prop->chartype] == ucp_L ||
- PRIV(ucp_gentype)[prop->chartype] == ucp_N || c == CHAR_UNDERSCORE)
- == isprop)
+ chartype = prop->chartype;
+ if ((PRIV(ucp_gentype)[chartype] == ucp_L ||
+ PRIV(ucp_gentype)[chartype] == ucp_N ||
+ chartype == ucp_Mn || chartype == ucp_Pc) == isprop)
return !negated;
break;
@@ -238,9 +242,10 @@ while ((t = *data++) != XCL_END)
*/
case PT_PXGRAPH:
- if ((PRIV(ucp_gentype)[prop->chartype] != ucp_Z &&
- (PRIV(ucp_gentype)[prop->chartype] != ucp_C ||
- (prop->chartype == ucp_Cf &&
+ chartype = prop->chartype;
+ if ((PRIV(ucp_gentype)[chartype] != ucp_Z &&
+ (PRIV(ucp_gentype)[chartype] != ucp_C ||
+ (chartype == ucp_Cf &&
c != 0x061c && c != 0x180e && (c < 0x2066 || c > 0x2069))
)) == isprop)
return !negated;
@@ -250,10 +255,11 @@ while ((t = *data++) != XCL_END)
not Zl and not Zp, and U+180E. */
case PT_PXPRINT:
- if ((prop->chartype != ucp_Zl &&
- prop->chartype != ucp_Zp &&
- (PRIV(ucp_gentype)[prop->chartype] != ucp_C ||
- (prop->chartype == ucp_Cf &&
+ chartype = prop->chartype;
+ if ((chartype != ucp_Zl &&
+ chartype != ucp_Zp &&
+ (PRIV(ucp_gentype)[chartype] != ucp_C ||
+ (chartype == ucp_Cf &&
c != 0x061c && (c < 0x2066 || c > 0x2069))
)) == isprop)
return !negated;
@@ -264,8 +270,21 @@ while ((t = *data++) != XCL_END)
compatibility (these are $+<=>^`|~). */
case PT_PXPUNCT:
- if ((PRIV(ucp_gentype)[prop->chartype] == ucp_P ||
- (c < 128 && PRIV(ucp_gentype)[prop->chartype] == ucp_S)) == isprop)
+ chartype = prop->chartype;
+ if ((PRIV(ucp_gentype)[chartype] == ucp_P ||
+ (c < 128 && PRIV(ucp_gentype)[chartype] == ucp_S)) == isprop)
+ return !negated;
+ break;
+
+ /* Perl has two sets of hex digits */
+
+ case PT_PXXDIGIT:
+ if (((c >= CHAR_0 && c <= CHAR_9) ||
+ (c >= CHAR_A && c <= CHAR_F) ||
+ (c >= CHAR_a && c <= CHAR_f) ||
+ (c >= 0xff10 && c <= 0xff19) || /* Fullwidth digits */
+ (c >= 0xff21 && c <= 0xff26) || /* Fullwidth letters */
+ (c >= 0xff41 && c <= 0xff46)) == isprop)
return !negated;
break;
diff --git a/src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorApple.c b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorApple.c
new file mode 100644
index 0000000000..9bd2094f46
--- /dev/null
+++ b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorApple.c
@@ -0,0 +1,137 @@
+/*
+ * Stack-less Just-In-Time compiler
+ *
+ * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
+ * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <sys/types.h>
+#include <sys/mman.h>
+/*
+ On macOS systems, returns MAP_JIT if it is defined _and_ we're running on a
+ version where it's OK to have more than one JIT block or where MAP_JIT is
+ required.
+ On non-macOS systems, returns MAP_JIT if it is defined.
+*/
+#include <TargetConditionals.h>
+
+#if (defined(TARGET_OS_OSX) && TARGET_OS_OSX) || (TARGET_OS_MAC && !TARGET_OS_IPHONE)
+
+#if defined(SLJIT_CONFIG_X86) && SLJIT_CONFIG_X86
+
+#include <sys/utsname.h>
+#include <stdlib.h>
+
+#define SLJIT_UPDATE_WX_FLAGS(from, to, enable_exec)
+
+#ifdef MAP_JIT
+#define SLJIT_MAP_JIT (get_map_jit_flag())
+static SLJIT_INLINE int get_map_jit_flag(void)
+{
+ size_t page_size;
+ void *ptr;
+ struct utsname name;
+ static int map_jit_flag = -1;
+
+ if (map_jit_flag < 0) {
+ map_jit_flag = 0;
+ uname(&name);
+
+ /* Kernel version for 10.14.0 (Mojave) or later */
+ if (atoi(name.release) >= 18) {
+ page_size = get_page_alignment() + 1;
+ /* Only use MAP_JIT if a hardened runtime is used */
+ ptr = mmap(NULL, page_size, PROT_WRITE | PROT_EXEC,
+ MAP_PRIVATE | MAP_ANON, -1, 0);
+
+ if (ptr != MAP_FAILED)
+ munmap(ptr, page_size);
+ else
+ map_jit_flag = MAP_JIT;
+ }
+ }
+ return map_jit_flag;
+}
+#else /* !defined(MAP_JIT) */
+#define SLJIT_MAP_JIT (0)
+#endif
+
+#elif defined(SLJIT_CONFIG_ARM) && SLJIT_CONFIG_ARM
+
+#include <AvailabilityMacros.h>
+#include <pthread.h>
+
+#define SLJIT_MAP_JIT (MAP_JIT)
+#define SLJIT_UPDATE_WX_FLAGS(from, to, enable_exec) \
+ apple_update_wx_flags(enable_exec)
+
+static SLJIT_INLINE void apple_update_wx_flags(sljit_s32 enable_exec)
+{
+#if MAC_OS_X_VERSION_MIN_REQUIRED < 110000
+ if (__builtin_available(macos 11, *))
+#endif /* BigSur */
+ pthread_jit_write_protect_np(enable_exec);
+}
+
+#elif defined(SLJIT_CONFIG_PPC) && SLJIT_CONFIG_PPC
+
+#define SLJIT_MAP_JIT (0)
+#define SLJIT_UPDATE_WX_FLAGS(from, to, enable_exec)
+
+#else
+#error "Unsupported architecture"
+#endif /* SLJIT_CONFIG */
+
+#else /* !TARGET_OS_OSX */
+
+#ifdef MAP_JIT
+#define SLJIT_MAP_JIT (MAP_JIT)
+#else
+#define SLJIT_MAP_JIT (0)
+#endif
+
+#endif /* TARGET_OS_OSX */
+
+static SLJIT_INLINE void* alloc_chunk(sljit_uw size)
+{
+ void *retval;
+ int prot = PROT_READ | PROT_WRITE | PROT_EXEC;
+ int flags = MAP_PRIVATE;
+ int fd = -1;
+
+ flags |= MAP_ANON | SLJIT_MAP_JIT;
+
+ retval = mmap(NULL, size, prot, flags, fd, 0);
+ if (retval == MAP_FAILED)
+ return NULL;
+
+ SLJIT_UPDATE_WX_FLAGS(retval, (uint8_t *)retval + size, 0);
+
+ return retval;
+}
+
+static SLJIT_INLINE void free_chunk(void *chunk, sljit_uw size)
+{
+ munmap(chunk, size);
+}
+
+#include "sljitExecAllocatorCore.c"
diff --git a/src/3rdparty/pcre2/src/sljit/sljitExecAllocator.c b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorCore.c
index 92d940ddc2..4e1119bc40 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitExecAllocator.c
+++ b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorCore.c
@@ -61,166 +61,42 @@
[ one big free block ]
*/
-/* --------------------------------------------------------------------- */
-/* System (OS) functions */
-/* --------------------------------------------------------------------- */
-
-/* 64 KByte. */
-#define CHUNK_SIZE (sljit_uw)0x10000u
-
-/*
- alloc_chunk / free_chunk :
- * allocate executable system memory chunks
- * the size is always divisible by CHUNK_SIZE
- SLJIT_ALLOCATOR_LOCK / SLJIT_ALLOCATOR_UNLOCK :
- * provided as part of sljitUtils
- * only the allocator requires this lock, sljit is fully thread safe
- as it only uses local variables
-*/
-
-#ifdef _WIN32
-#define SLJIT_UPDATE_WX_FLAGS(from, to, enable_exec)
-
-static SLJIT_INLINE void* alloc_chunk(sljit_uw size)
-{
- return VirtualAlloc(NULL, size, MEM_COMMIT | MEM_RESERVE, PAGE_EXECUTE_READWRITE);
-}
-
-static SLJIT_INLINE void free_chunk(void *chunk, sljit_uw size)
-{
- SLJIT_UNUSED_ARG(size);
- VirtualFree(chunk, 0, MEM_RELEASE);
-}
-
-#else /* POSIX */
-
-#if defined(__APPLE__) && defined(MAP_JIT)
-/*
- On macOS systems, returns MAP_JIT if it is defined _and_ we're running on a
- version where it's OK to have more than one JIT block or where MAP_JIT is
- required.
- On non-macOS systems, returns MAP_JIT if it is defined.
+/* Expected functions:
+ alloc_chunk / free_chunk :
+ * allocate executable system memory chunks
+ * the size is always divisible by CHUNK_SIZE
+ SLJIT_ALLOCATOR_LOCK / SLJIT_ALLOCATOR_UNLOCK :
+ * provided as part of sljitUtils
+ * only the allocator requires this lock, sljit is fully thread safe
+ as it only uses local variables
+
+ Supported defines:
+ SLJIT_HAS_CHUNK_HEADER - (optional) sljit_chunk_header is defined
+ SLJIT_HAS_EXECUTABLE_OFFSET - (optional) has executable offset data
+ SLJIT_UPDATE_WX_FLAGS - (optional) update WX flags
*/
-#include <TargetConditionals.h>
-#if TARGET_OS_OSX
-#if defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86
-#ifdef MAP_ANON
-#include <sys/utsname.h>
-#include <stdlib.h>
-#define SLJIT_MAP_JIT (get_map_jit_flag())
+#ifdef SLJIT_HAS_CHUNK_HEADER
+#define CHUNK_HEADER_SIZE (sizeof(struct sljit_chunk_header))
+#else /* !SLJIT_HAS_CHUNK_HEADER */
+#define CHUNK_HEADER_SIZE 0
+#endif /* SLJIT_HAS_CHUNK_HEADER */
-static SLJIT_INLINE int get_map_jit_flag()
-{
- size_t page_size;
- void *ptr;
- struct utsname name;
- static int map_jit_flag = -1;
-
- if (map_jit_flag < 0) {
- map_jit_flag = 0;
- uname(&name);
-
- /* Kernel version for 10.14.0 (Mojave) or later */
- if (atoi(name.release) >= 18) {
- page_size = get_page_alignment() + 1;
- /* Only use MAP_JIT if a hardened runtime is used */
- ptr = mmap(NULL, page_size, PROT_WRITE | PROT_EXEC,
- MAP_PRIVATE | MAP_ANON, -1, 0);
-
- if (ptr != MAP_FAILED)
- munmap(ptr, page_size);
- else
- map_jit_flag = MAP_JIT;
- }
- }
- return map_jit_flag;
-}
-#endif /* MAP_ANON */
-#else /* !SLJIT_CONFIG_X86 */
-#if !(defined SLJIT_CONFIG_ARM && SLJIT_CONFIG_ARM)
-#error "Unsupported architecture"
-#endif /* SLJIT_CONFIG_ARM */
-#include <AvailabilityMacros.h>
-#include <pthread.h>
-
-#define SLJIT_MAP_JIT (MAP_JIT)
-#define SLJIT_UPDATE_WX_FLAGS(from, to, enable_exec) \
- apple_update_wx_flags(enable_exec)
-
-static SLJIT_INLINE void apple_update_wx_flags(sljit_s32 enable_exec)
-{
-#if MAC_OS_X_VERSION_MIN_REQUIRED >= 110000
- pthread_jit_write_protect_np(enable_exec);
-#else
-#error "Must target Big Sur or newer"
-#endif /* BigSur */
-}
-#endif /* SLJIT_CONFIG_X86 */
-#else /* !TARGET_OS_OSX */
-#define SLJIT_MAP_JIT (MAP_JIT)
-#endif /* TARGET_OS_OSX */
-#endif /* __APPLE__ && MAP_JIT */
#ifndef SLJIT_UPDATE_WX_FLAGS
#define SLJIT_UPDATE_WX_FLAGS(from, to, enable_exec)
-#endif /* !SLJIT_UPDATE_WX_FLAGS */
-#ifndef SLJIT_MAP_JIT
-#define SLJIT_MAP_JIT (0)
-#endif /* !SLJIT_MAP_JIT */
-
-static SLJIT_INLINE void* alloc_chunk(sljit_uw size)
-{
- void *retval;
- int prot = PROT_READ | PROT_WRITE | PROT_EXEC;
- int flags = MAP_PRIVATE;
- int fd = -1;
-
-#ifdef PROT_MAX
- prot |= PROT_MAX(prot);
-#endif
-
-#ifdef MAP_ANON
- flags |= MAP_ANON | SLJIT_MAP_JIT;
-#else /* !MAP_ANON */
- if (SLJIT_UNLIKELY((dev_zero < 0) && open_dev_zero()))
- return NULL;
-
- fd = dev_zero;
-#endif /* MAP_ANON */
-
- retval = mmap(NULL, size, prot, flags, fd, 0);
- if (retval == MAP_FAILED)
- return NULL;
-
-#ifdef __FreeBSD__
- /* HardenedBSD's mmap lies, so check permissions again */
- if (mprotect(retval, size, PROT_READ | PROT_WRITE | PROT_EXEC) < 0) {
- munmap(retval, size);
- return NULL;
- }
-#endif /* FreeBSD */
+#endif /* SLJIT_UPDATE_WX_FLAGS */
- SLJIT_UPDATE_WX_FLAGS(retval, (uint8_t *)retval + size, 0);
-
- return retval;
-}
-
-static SLJIT_INLINE void free_chunk(void *chunk, sljit_uw size)
-{
- munmap(chunk, size);
-}
-
-#endif /* windows */
-
-/* --------------------------------------------------------------------- */
-/* Common functions */
-/* --------------------------------------------------------------------- */
-
-#define CHUNK_MASK (~(CHUNK_SIZE - 1))
+#ifndef CHUNK_SIZE
+/* 64 KByte if not specified. */
+#define CHUNK_SIZE (sljit_uw)0x10000
+#endif /* CHUNK_SIZE */
struct block_header {
sljit_uw size;
sljit_uw prev_size;
+#ifdef SLJIT_HAS_EXECUTABLE_OFFSET
+ sljit_sw executable_offset;
+#endif /* SLJIT_HAS_EXECUTABLE_OFFSET */
};
struct free_block {
@@ -234,8 +110,10 @@ struct free_block {
((struct block_header*)(((sljit_u8*)base) + offset))
#define AS_FREE_BLOCK(base, offset) \
((struct free_block*)(((sljit_u8*)base) + offset))
-#define MEM_START(base) ((void*)(((sljit_u8*)base) + sizeof(struct block_header)))
+#define MEM_START(base) ((void*)((base) + 1))
+#define CHUNK_MASK (~(CHUNK_SIZE - 1))
#define ALIGN_SIZE(size) (((size) + sizeof(struct block_header) + 7u) & ~(sljit_uw)7)
+#define CHUNK_EXTRA_SIZE (sizeof(struct block_header) + CHUNK_HEADER_SIZE)
static struct free_block* free_blocks;
static sljit_uw allocated_size;
@@ -273,11 +151,21 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_malloc_exec(sljit_uw size)
struct free_block *free_block;
sljit_uw chunk_size;
- SLJIT_ALLOCATOR_LOCK();
+#ifdef SLJIT_HAS_CHUNK_HEADER
+ struct sljit_chunk_header *chunk_header;
+#else /* !SLJIT_HAS_CHUNK_HEADER */
+ void *chunk_header;
+#endif /* SLJIT_HAS_CHUNK_HEADER */
+
+#ifdef SLJIT_HAS_EXECUTABLE_OFFSET
+ sljit_sw executable_offset;
+#endif /* SLJIT_HAS_EXECUTABLE_OFFSET */
+
if (size < (64 - sizeof(struct block_header)))
size = (64 - sizeof(struct block_header));
size = ALIGN_SIZE(size);
+ SLJIT_ALLOCATOR_LOCK();
free_block = free_blocks;
while (free_block) {
if (free_block->size >= size) {
@@ -289,9 +177,11 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_malloc_exec(sljit_uw size)
free_block->size = chunk_size;
header = AS_BLOCK_HEADER(free_block, chunk_size);
header->prev_size = chunk_size;
+#ifdef SLJIT_HAS_EXECUTABLE_OFFSET
+ header->executable_offset = free_block->header.executable_offset;
+#endif /* SLJIT_HAS_EXECUTABLE_OFFSET */
AS_BLOCK_HEADER(header, size)->prev_size = size;
- }
- else {
+ } else {
sljit_remove_free_block(free_block);
header = (struct block_header*)free_block;
size = chunk_size;
@@ -304,17 +194,28 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_malloc_exec(sljit_uw size)
free_block = free_block->next;
}
- chunk_size = (size + sizeof(struct block_header) + CHUNK_SIZE - 1) & CHUNK_MASK;
- header = (struct block_header*)alloc_chunk(chunk_size);
- if (!header) {
+ chunk_size = (size + CHUNK_EXTRA_SIZE + CHUNK_SIZE - 1) & CHUNK_MASK;
+
+ chunk_header = alloc_chunk(chunk_size);
+ if (!chunk_header) {
SLJIT_ALLOCATOR_UNLOCK();
return NULL;
}
- chunk_size -= sizeof(struct block_header);
+#ifdef SLJIT_HAS_EXECUTABLE_OFFSET
+ executable_offset = (sljit_sw)((sljit_u8*)chunk_header->executable - (sljit_u8*)chunk_header);
+#endif /* SLJIT_HAS_EXECUTABLE_OFFSET */
+
+ chunk_size -= CHUNK_EXTRA_SIZE;
total_size += chunk_size;
+ header = (struct block_header*)(((sljit_u8*)chunk_header) + CHUNK_HEADER_SIZE);
+
header->prev_size = 0;
+#ifdef SLJIT_HAS_EXECUTABLE_OFFSET
+ header->executable_offset = executable_offset;
+#endif /* SLJIT_HAS_EXECUTABLE_OFFSET */
+
if (chunk_size > size + 64) {
/* Cut the allocated space into a free and a used block. */
allocated_size += size;
@@ -323,10 +224,12 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_malloc_exec(sljit_uw size)
free_block = AS_FREE_BLOCK(header, size);
free_block->header.prev_size = size;
+#ifdef SLJIT_HAS_EXECUTABLE_OFFSET
+ free_block->header.executable_offset = executable_offset;
+#endif /* SLJIT_HAS_EXECUTABLE_OFFSET */
sljit_insert_free_block(free_block, chunk_size);
next_header = AS_BLOCK_HEADER(free_block, chunk_size);
- }
- else {
+ } else {
/* All space belongs to this allocation. */
allocated_size += chunk_size;
header->size = chunk_size;
@@ -334,22 +237,29 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_malloc_exec(sljit_uw size)
}
next_header->size = 1;
next_header->prev_size = chunk_size;
+#ifdef SLJIT_HAS_EXECUTABLE_OFFSET
+ next_header->executable_offset = executable_offset;
+#endif /* SLJIT_HAS_EXECUTABLE_OFFSET */
SLJIT_ALLOCATOR_UNLOCK();
return MEM_START(header);
}
-SLJIT_API_FUNC_ATTRIBUTE void sljit_free_exec(void* ptr)
+SLJIT_API_FUNC_ATTRIBUTE void sljit_free_exec(void *ptr)
{
struct block_header *header;
- struct free_block* free_block;
+ struct free_block *free_block;
SLJIT_ALLOCATOR_LOCK();
header = AS_BLOCK_HEADER(ptr, -(sljit_sw)sizeof(struct block_header));
+#ifdef SLJIT_HAS_EXECUTABLE_OFFSET
+ header = AS_BLOCK_HEADER(header, -header->executable_offset);
+#endif /* SLJIT_HAS_EXECUTABLE_OFFSET */
allocated_size -= header->size;
- /* Connecting free blocks together if possible. */
SLJIT_UPDATE_WX_FLAGS(NULL, NULL, 0);
+ /* Connecting free blocks together if possible. */
+
/* If header->prev_size == 0, free_block will equal to header.
In this case, free_block->header.size will be > 0. */
free_block = AS_FREE_BLOCK(header, -(sljit_sw)header->prev_size);
@@ -357,8 +267,7 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_exec(void* ptr)
free_block->size += header->size;
header = AS_BLOCK_HEADER(free_block, free_block->size);
header->prev_size = free_block->size;
- }
- else {
+ } else {
free_block = (struct free_block*)header;
sljit_insert_free_block(free_block, header->size);
}
@@ -377,7 +286,7 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_exec(void* ptr)
if (total_size - free_block->size > (allocated_size * 3 / 2)) {
total_size -= free_block->size;
sljit_remove_free_block(free_block);
- free_chunk(free_block, free_block->size + sizeof(struct block_header));
+ free_chunk(free_block, free_block->size + CHUNK_EXTRA_SIZE);
}
}
@@ -396,16 +305,23 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_unused_memory_exec(void)
free_block = free_blocks;
while (free_block) {
next_free_block = free_block->next;
- if (!free_block->header.prev_size &&
+ if (!free_block->header.prev_size &&
AS_BLOCK_HEADER(free_block, free_block->size)->size == 1) {
total_size -= free_block->size;
sljit_remove_free_block(free_block);
- free_chunk(free_block, free_block->size + sizeof(struct block_header));
+ free_chunk(free_block, free_block->size + CHUNK_EXTRA_SIZE);
}
free_block = next_free_block;
}
- SLJIT_ASSERT((total_size && free_blocks) || (!total_size && !free_blocks));
+ SLJIT_ASSERT(total_size || (!total_size && !free_blocks));
SLJIT_UPDATE_WX_FLAGS(NULL, NULL, 1);
SLJIT_ALLOCATOR_UNLOCK();
}
+
+#ifdef SLJIT_HAS_EXECUTABLE_OFFSET
+SLJIT_API_FUNC_ATTRIBUTE sljit_sw sljit_exec_offset(void *code)
+{
+ return ((struct block_header*)SLJIT_CODE_TO_PTR(code))[-1].executable_offset;
+}
+#endif /* SLJIT_HAS_EXECUTABLE_OFFSET */
diff --git a/src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorFreeBSD.c b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorFreeBSD.c
new file mode 100644
index 0000000000..3b93a4df76
--- /dev/null
+++ b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorFreeBSD.c
@@ -0,0 +1,89 @@
+/*
+ * Stack-less Just-In-Time compiler
+ *
+ * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
+ * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <sys/mman.h>
+#include <sys/procctl.h>
+
+#ifdef PROC_WXMAP_CTL
+static SLJIT_INLINE int sljit_is_wx_block(void)
+{
+ static int wx_block = -1;
+ if (wx_block < 0) {
+ int sljit_wx_enable = PROC_WX_MAPPINGS_PERMIT;
+ wx_block = !!procctl(P_PID, 0, PROC_WXMAP_CTL, &sljit_wx_enable);
+ }
+ return wx_block;
+}
+
+#define SLJIT_IS_WX_BLOCK sljit_is_wx_block()
+#else /* !PROC_WXMAP_CTL */
+#define SLJIT_IS_WX_BLOCK (1)
+#endif /* PROC_WXMAP_CTL */
+
+static SLJIT_INLINE void* alloc_chunk(sljit_uw size)
+{
+ void *retval;
+ int prot = PROT_READ | PROT_WRITE | PROT_EXEC;
+ int flags = MAP_PRIVATE;
+ int fd = -1;
+
+#ifdef PROT_MAX
+ prot |= PROT_MAX(prot);
+#endif
+
+#ifdef MAP_ANON
+ flags |= MAP_ANON;
+#else /* !MAP_ANON */
+ if (SLJIT_UNLIKELY((dev_zero < 0) && open_dev_zero()))
+ return NULL;
+
+ fd = dev_zero;
+#endif /* MAP_ANON */
+
+retry:
+ retval = mmap(NULL, size, prot, flags, fd, 0);
+ if (retval == MAP_FAILED) {
+ if (!SLJIT_IS_WX_BLOCK)
+ goto retry;
+
+ return NULL;
+ }
+
+ /* HardenedBSD's mmap lies, so check permissions again. */
+ if (mprotect(retval, size, PROT_READ | PROT_WRITE | PROT_EXEC) < 0) {
+ munmap(retval, size);
+ return NULL;
+ }
+
+ return retval;
+}
+
+static SLJIT_INLINE void free_chunk(void *chunk, sljit_uw size)
+{
+ munmap(chunk, size);
+}
+
+#include "sljitExecAllocatorCore.c"
diff --git a/src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorPosix.c b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorPosix.c
new file mode 100644
index 0000000000..a775f5629a
--- /dev/null
+++ b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorPosix.c
@@ -0,0 +1,62 @@
+/*
+ * Stack-less Just-In-Time compiler
+ *
+ * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
+ * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <sys/types.h>
+#include <sys/mman.h>
+
+static SLJIT_INLINE void* alloc_chunk(sljit_uw size)
+{
+ void *retval;
+ int prot = PROT_READ | PROT_WRITE | PROT_EXEC;
+ int flags = MAP_PRIVATE;
+ int fd = -1;
+
+#ifdef PROT_MAX
+ prot |= PROT_MAX(prot);
+#endif
+
+#ifdef MAP_ANON
+ flags |= MAP_ANON;
+#else /* !MAP_ANON */
+ if (SLJIT_UNLIKELY((dev_zero < 0) && open_dev_zero()))
+ return NULL;
+
+ fd = dev_zero;
+#endif /* MAP_ANON */
+
+ retval = mmap(NULL, size, prot, flags, fd, 0);
+ if (retval == MAP_FAILED)
+ return NULL;
+
+ return retval;
+}
+
+static SLJIT_INLINE void free_chunk(void *chunk, sljit_uw size)
+{
+ munmap(chunk, size);
+}
+
+#include "sljitExecAllocatorCore.c"
diff --git a/src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorWindows.c b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorWindows.c
new file mode 100644
index 0000000000..f152a5a2cd
--- /dev/null
+++ b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitExecAllocatorWindows.c
@@ -0,0 +1,40 @@
+/*
+ * Stack-less Just-In-Time compiler
+ *
+ * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
+ * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#define SLJIT_UPDATE_WX_FLAGS(from, to, enable_exec)
+
+static SLJIT_INLINE void* alloc_chunk(sljit_uw size)
+{
+ return VirtualAlloc(NULL, size, MEM_COMMIT | MEM_RESERVE, PAGE_EXECUTE_READWRITE);
+}
+
+static SLJIT_INLINE void free_chunk(void *chunk, sljit_uw size)
+{
+ SLJIT_UNUSED_ARG(size);
+ VirtualFree(chunk, 0, MEM_RELEASE);
+}
+
+#include "sljitExecAllocatorCore.c"
diff --git a/src/3rdparty/pcre2/src/sljit/allocator_src/sljitProtExecAllocatorNetBSD.c b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitProtExecAllocatorNetBSD.c
new file mode 100644
index 0000000000..0b7fd57787
--- /dev/null
+++ b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitProtExecAllocatorNetBSD.c
@@ -0,0 +1,72 @@
+/*
+ * Stack-less Just-In-Time compiler
+ *
+ * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
+ * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#define SLJIT_HAS_CHUNK_HEADER
+#define SLJIT_HAS_EXECUTABLE_OFFSET
+
+struct sljit_chunk_header {
+ void *executable;
+};
+
+/*
+ * MAP_REMAPDUP is a NetBSD extension available sinde 8.0, make sure to
+ * adjust your feature macros (ex: -D_NETBSD_SOURCE) as needed
+ */
+static SLJIT_INLINE struct sljit_chunk_header* alloc_chunk(sljit_uw size)
+{
+ struct sljit_chunk_header *retval;
+
+ retval = (struct sljit_chunk_header *)mmap(NULL, size,
+ PROT_READ | PROT_WRITE | PROT_MPROTECT(PROT_EXEC),
+ MAP_ANON | MAP_SHARED, -1, 0);
+
+ if (retval == MAP_FAILED)
+ return NULL;
+
+ retval->executable = mremap(retval, size, NULL, size, MAP_REMAPDUP);
+ if (retval->executable == MAP_FAILED) {
+ munmap((void *)retval, size);
+ return NULL;
+ }
+
+ if (mprotect(retval->executable, size, PROT_READ | PROT_EXEC) == -1) {
+ munmap(retval->executable, size);
+ munmap((void *)retval, size);
+ return NULL;
+ }
+
+ return retval;
+}
+
+static SLJIT_INLINE void free_chunk(void *chunk, sljit_uw size)
+{
+ struct sljit_chunk_header *header = ((struct sljit_chunk_header *)chunk) - 1;
+
+ munmap(header->executable, size);
+ munmap((void *)header, size);
+}
+
+#include "sljitExecAllocatorCore.c"
diff --git a/src/3rdparty/pcre2/src/sljit/allocator_src/sljitProtExecAllocatorPosix.c b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitProtExecAllocatorPosix.c
new file mode 100644
index 0000000000..f7cb6c5670
--- /dev/null
+++ b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitProtExecAllocatorPosix.c
@@ -0,0 +1,172 @@
+/*
+ * Stack-less Just-In-Time compiler
+ *
+ * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
+ * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#define SLJIT_HAS_CHUNK_HEADER
+#define SLJIT_HAS_EXECUTABLE_OFFSET
+
+struct sljit_chunk_header {
+ void *executable;
+};
+
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <string.h>
+
+#ifndef O_NOATIME
+#define O_NOATIME 0
+#endif
+
+/* this is a linux extension available since kernel 3.11 */
+#ifndef O_TMPFILE
+#define O_TMPFILE 0x404000
+#endif
+
+#ifndef _GNU_SOURCE
+char *secure_getenv(const char *name);
+int mkostemp(char *template, int flags);
+#endif
+
+static SLJIT_INLINE int create_tempfile(void)
+{
+ int fd;
+ char tmp_name[256];
+ size_t tmp_name_len = 0;
+ char *dir;
+ struct stat st;
+#if defined(SLJIT_SINGLE_THREADED) && SLJIT_SINGLE_THREADED
+ mode_t mode;
+#endif
+
+#ifdef HAVE_MEMFD_CREATE
+ /* this is a GNU extension, make sure to use -D_GNU_SOURCE */
+ fd = memfd_create("sljit", MFD_CLOEXEC);
+ if (fd != -1) {
+ fchmod(fd, 0);
+ return fd;
+ }
+#endif
+
+ dir = secure_getenv("TMPDIR");
+
+ if (dir) {
+ size_t len = strlen(dir);
+ if (len > 0 && len < sizeof(tmp_name)) {
+ if ((stat(dir, &st) == 0) && S_ISDIR(st.st_mode)) {
+ memcpy(tmp_name, dir, len + 1);
+ tmp_name_len = len;
+ }
+ }
+ }
+
+#ifdef P_tmpdir
+ if (!tmp_name_len) {
+ tmp_name_len = strlen(P_tmpdir);
+ if (tmp_name_len > 0 && tmp_name_len < sizeof(tmp_name))
+ strcpy(tmp_name, P_tmpdir);
+ }
+#endif
+ if (!tmp_name_len) {
+ strcpy(tmp_name, "/tmp");
+ tmp_name_len = 4;
+ }
+
+ SLJIT_ASSERT(tmp_name_len > 0 && tmp_name_len < sizeof(tmp_name));
+
+ if (tmp_name_len > 1 && tmp_name[tmp_name_len - 1] == '/')
+ tmp_name[--tmp_name_len] = '\0';
+
+ fd = open(tmp_name, O_TMPFILE | O_EXCL | O_RDWR | O_NOATIME | O_CLOEXEC, 0);
+ if (fd != -1)
+ return fd;
+
+ if (tmp_name_len >= sizeof(tmp_name) - 7)
+ return -1;
+
+ strcpy(tmp_name + tmp_name_len, "/XXXXXX");
+#if defined(SLJIT_SINGLE_THREADED) && SLJIT_SINGLE_THREADED
+ mode = umask(0777);
+#endif
+ fd = mkostemp(tmp_name, O_CLOEXEC | O_NOATIME);
+#if defined(SLJIT_SINGLE_THREADED) && SLJIT_SINGLE_THREADED
+ umask(mode);
+#else
+ fchmod(fd, 0);
+#endif
+
+ if (fd == -1)
+ return -1;
+
+ if (unlink(tmp_name)) {
+ close(fd);
+ return -1;
+ }
+
+ return fd;
+}
+
+static SLJIT_INLINE struct sljit_chunk_header* alloc_chunk(sljit_uw size)
+{
+ struct sljit_chunk_header *retval;
+ int fd;
+
+ fd = create_tempfile();
+ if (fd == -1)
+ return NULL;
+
+ if (ftruncate(fd, (off_t)size)) {
+ close(fd);
+ return NULL;
+ }
+
+ retval = (struct sljit_chunk_header *)mmap(NULL, size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
+
+ if (retval == MAP_FAILED) {
+ close(fd);
+ return NULL;
+ }
+
+ retval->executable = mmap(NULL, size, PROT_READ | PROT_EXEC, MAP_SHARED, fd, 0);
+
+ if (retval->executable == MAP_FAILED) {
+ munmap((void *)retval, size);
+ close(fd);
+ return NULL;
+ }
+
+ close(fd);
+ return retval;
+}
+
+static SLJIT_INLINE void free_chunk(void *chunk, sljit_uw size)
+{
+ struct sljit_chunk_header *header = ((struct sljit_chunk_header *)chunk) - 1;
+
+ munmap(header->executable, size);
+ munmap((void *)header, size);
+}
+
+#include "sljitExecAllocatorCore.c"
diff --git a/src/3rdparty/pcre2/src/sljit/sljitWXExecAllocator.c b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitWXExecAllocatorPosix.c
index 6893813155..36d301434a 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitWXExecAllocator.c
+++ b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitWXExecAllocatorPosix.c
@@ -25,8 +25,7 @@
*/
/*
- This file contains a simple W^X executable memory allocator for POSIX
- like systems and Windows
+ This file contains a simple W^X executable memory allocator
In *NIX, MAP_ANON is required (that is considered a feature) so make
sure to set the right availability macros for your system or the code
@@ -51,55 +50,41 @@
not possible.
*/
-#define SLJIT_UPDATE_WX_FLAGS(from, to, enable_exec) \
- sljit_update_wx_flags((from), (to), (enable_exec))
-
-#ifndef _WIN32
#include <sys/types.h>
#include <sys/mman.h>
-#ifdef __NetBSD__
-#define SLJIT_PROT_WX PROT_MPROTECT(PROT_EXEC)
-#define check_se_protected(ptr, size) (0)
-#else /* POSIX */
+#define SLJIT_UPDATE_WX_FLAGS(from, to, enable_exec) \
+ sljit_update_wx_flags((from), (to), (enable_exec))
+
#if !(defined SLJIT_SINGLE_THREADED && SLJIT_SINGLE_THREADED)
#include <pthread.h>
#define SLJIT_SE_LOCK() pthread_mutex_lock(&se_lock)
#define SLJIT_SE_UNLOCK() pthread_mutex_unlock(&se_lock)
+#else
+#define SLJIT_SE_LOCK()
+#define SLJIT_SE_UNLOCK()
#endif /* !SLJIT_SINGLE_THREADED */
-#define check_se_protected(ptr, size) generic_se_protected(ptr, size)
+#define SLJIT_WX_IS_BLOCK(ptr, size) generic_check_is_wx_block(ptr, size)
-static SLJIT_INLINE int generic_se_protected(void *ptr, sljit_uw size)
+static SLJIT_INLINE int generic_check_is_wx_block(void *ptr, sljit_uw size)
{
if (SLJIT_LIKELY(!mprotect(ptr, size, PROT_EXEC)))
- return mprotect(ptr, size, PROT_READ | PROT_WRITE);
+ return !!mprotect(ptr, size, PROT_READ | PROT_WRITE);
- return -1;
+ return 1;
}
-#endif /* NetBSD */
-
-#ifndef SLJIT_SE_LOCK
-#define SLJIT_SE_LOCK()
-#endif
-#ifndef SLJIT_SE_UNLOCK
-#define SLJIT_SE_UNLOCK()
-#endif
-#ifndef SLJIT_PROT_WX
-#define SLJIT_PROT_WX 0
-#endif
SLJIT_API_FUNC_ATTRIBUTE void* sljit_malloc_exec(sljit_uw size)
{
-#if !(defined SLJIT_SINGLE_THREADED && SLJIT_SINGLE_THREADED) \
- && !defined(__NetBSD__)
+#if !(defined SLJIT_SINGLE_THREADED && SLJIT_SINGLE_THREADED)
static pthread_mutex_t se_lock = PTHREAD_MUTEX_INITIALIZER;
#endif
- static int se_protected = !SLJIT_PROT_WX;
- int prot = PROT_READ | PROT_WRITE | SLJIT_PROT_WX;
+ static int wx_block = -1;
+ int prot = PROT_READ | PROT_WRITE;
sljit_uw* ptr;
- if (SLJIT_UNLIKELY(se_protected < 0))
+ if (SLJIT_UNLIKELY(wx_block > 0))
return NULL;
#ifdef PROT_MAX
@@ -112,11 +97,11 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_malloc_exec(sljit_uw size)
if (ptr == MAP_FAILED)
return NULL;
- if (SLJIT_UNLIKELY(se_protected > 0)) {
+ if (SLJIT_UNLIKELY(wx_block < 0)) {
SLJIT_SE_LOCK();
- se_protected = check_se_protected(ptr, size);
+ wx_block = SLJIT_WX_IS_BLOCK(ptr, size);
SLJIT_SE_UNLOCK();
- if (SLJIT_UNLIKELY(se_protected < 0)) {
+ if (SLJIT_UNLIKELY(wx_block)) {
munmap((void *)ptr, size);
return NULL;
}
@@ -126,7 +111,6 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_malloc_exec(sljit_uw size)
return ptr;
}
-#undef SLJIT_PROT_WX
#undef SLJIT_SE_UNLOCK
#undef SLJIT_SE_LOCK
@@ -136,7 +120,7 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_exec(void* ptr)
munmap((void*)start_ptr, *start_ptr);
}
-static void sljit_update_wx_flags(void *from, void *to, sljit_s32 enable_exec)
+static void sljit_update_wx_flags(void *from, void *to, int enable_exec)
{
sljit_uw page_mask = (sljit_uw)get_page_alignment();
sljit_uw start = (sljit_uw)from;
@@ -151,53 +135,6 @@ static void sljit_update_wx_flags(void *from, void *to, sljit_s32 enable_exec)
mprotect((void*)start, end - start, prot);
}
-#else /* windows */
-
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_malloc_exec(sljit_uw size)
-{
- sljit_uw *ptr;
-
- size += sizeof(sljit_uw);
- ptr = (sljit_uw*)VirtualAlloc(NULL, size,
- MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
-
- if (!ptr)
- return NULL;
-
- *ptr++ = size;
-
- return ptr;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE void sljit_free_exec(void* ptr)
-{
- sljit_uw start = (sljit_uw)ptr - sizeof(sljit_uw);
-#if defined(SLJIT_DEBUG) && SLJIT_DEBUG
- sljit_uw page_mask = (sljit_uw)get_page_alignment();
-
- SLJIT_ASSERT(!(start & page_mask));
-#endif
- VirtualFree((void*)start, 0, MEM_RELEASE);
-}
-
-static void sljit_update_wx_flags(void *from, void *to, sljit_s32 enable_exec)
-{
- DWORD oldprot;
- sljit_uw page_mask = (sljit_uw)get_page_alignment();
- sljit_uw start = (sljit_uw)from;
- sljit_uw end = (sljit_uw)to;
- DWORD prot = enable_exec ? PAGE_EXECUTE : PAGE_READWRITE;
-
- SLJIT_ASSERT(start < end);
-
- start &= ~page_mask;
- end = (end + page_mask) & ~page_mask;
-
- VirtualProtect((void*)start, end - start, prot, &oldprot);
-}
-
-#endif /* !windows */
-
SLJIT_API_FUNC_ATTRIBUTE void sljit_free_unused_memory_exec(void)
{
/* This allocator does not keep unused memory for future allocations. */
diff --git a/src/3rdparty/pcre2/src/sljit/allocator_src/sljitWXExecAllocatorWindows.c b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitWXExecAllocatorWindows.c
new file mode 100644
index 0000000000..a9553bd7da
--- /dev/null
+++ b/src/3rdparty/pcre2/src/sljit/allocator_src/sljitWXExecAllocatorWindows.c
@@ -0,0 +1,102 @@
+/*
+ * Stack-less Just-In-Time compiler
+ *
+ * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
+ * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+/*
+ This file contains a simple W^X executable memory allocator
+
+ In *NIX, MAP_ANON is required (that is considered a feature) so make
+ sure to set the right availability macros for your system or the code
+ will fail to build.
+
+ If your system doesn't support mapping of anonymous pages (ex: IRIX) it
+ is also likely that it doesn't need this allocator and should be using
+ the standard one instead.
+
+ It allocates a separate map for each code block and may waste a lot of
+ memory, because whatever was requested, will be rounded up to the page
+ size (minimum 4KB, but could be even bigger).
+
+ It changes the page permissions (RW <-> RX) as needed and therefore, if you
+ will be updating the code after it has been generated, need to make sure to
+ block any concurrent execution, or could result in a SIGBUS, that could
+ even manifest itself at a different address than the one that was being
+ modified.
+
+ Only use if you are unable to use the regular allocator because of security
+ restrictions and adding exceptions to your application or the system are
+ not possible.
+*/
+
+#define SLJIT_UPDATE_WX_FLAGS(from, to, enable_exec) \
+ sljit_update_wx_flags((from), (to), (enable_exec))
+
+SLJIT_API_FUNC_ATTRIBUTE void* sljit_malloc_exec(sljit_uw size)
+{
+ sljit_uw *ptr;
+
+ size += sizeof(sljit_uw);
+ ptr = (sljit_uw*)VirtualAlloc(NULL, size,
+ MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
+
+ if (!ptr)
+ return NULL;
+
+ *ptr++ = size;
+
+ return ptr;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE void sljit_free_exec(void* ptr)
+{
+ sljit_uw start = (sljit_uw)ptr - sizeof(sljit_uw);
+#if defined(SLJIT_DEBUG) && SLJIT_DEBUG
+ sljit_uw page_mask = (sljit_uw)get_page_alignment();
+
+ SLJIT_ASSERT(!(start & page_mask));
+#endif
+ VirtualFree((void*)start, 0, MEM_RELEASE);
+}
+
+static void sljit_update_wx_flags(void *from, void *to, sljit_s32 enable_exec)
+{
+ DWORD oldprot;
+ sljit_uw page_mask = (sljit_uw)get_page_alignment();
+ sljit_uw start = (sljit_uw)from;
+ sljit_uw end = (sljit_uw)to;
+ DWORD prot = enable_exec ? PAGE_EXECUTE : PAGE_READWRITE;
+
+ SLJIT_ASSERT(start < end);
+
+ start &= ~page_mask;
+ end = (end + page_mask) & ~page_mask;
+
+ VirtualProtect((void*)start, end - start, prot, &oldprot);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE void sljit_free_unused_memory_exec(void)
+{
+ /* This allocator does not keep unused memory for future allocations. */
+}
diff --git a/src/3rdparty/pcre2/src/sljit/sljitConfig.h b/src/3rdparty/pcre2/src/sljit/sljitConfig.h
index 5fba7aa638..364c8bb788 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitConfig.h
+++ b/src/3rdparty/pcre2/src/sljit/sljitConfig.h
@@ -39,28 +39,6 @@ extern "C" {
*/
/* --------------------------------------------------------------------- */
-/* Architecture */
-/* --------------------------------------------------------------------- */
-
-/* Architecture selection. */
-/* #define SLJIT_CONFIG_X86_32 1 */
-/* #define SLJIT_CONFIG_X86_64 1 */
-/* #define SLJIT_CONFIG_ARM_V5 1 */
-/* #define SLJIT_CONFIG_ARM_V7 1 */
-/* #define SLJIT_CONFIG_ARM_THUMB2 1 */
-/* #define SLJIT_CONFIG_ARM_64 1 */
-/* #define SLJIT_CONFIG_PPC_32 1 */
-/* #define SLJIT_CONFIG_PPC_64 1 */
-/* #define SLJIT_CONFIG_MIPS_32 1 */
-/* #define SLJIT_CONFIG_MIPS_64 1 */
-/* #define SLJIT_CONFIG_RISCV_32 1 */
-/* #define SLJIT_CONFIG_RISCV_64 1 */
-/* #define SLJIT_CONFIG_S390X 1 */
-
-/* #define SLJIT_CONFIG_AUTO 1 */
-/* #define SLJIT_CONFIG_UNSUPPORTED 1 */
-
-/* --------------------------------------------------------------------- */
/* Utilities */
/* --------------------------------------------------------------------- */
@@ -96,7 +74,9 @@ extern "C" {
/* Executable code allocation:
If SLJIT_EXECUTABLE_ALLOCATOR is not defined, the application should
- define SLJIT_MALLOC_EXEC, SLJIT_FREE_EXEC, and SLJIT_EXEC_OFFSET. */
+ define SLJIT_MALLOC_EXEC and SLJIT_FREE_EXEC.
+ Optionally, depending on the implementation used for the allocator,
+ SLJIT_EXEC_OFFSET and SLJIT_UPDATE_WX_FLAGS might also be needed. */
#ifndef SLJIT_EXECUTABLE_ALLOCATOR
/* Enabled by default. */
#define SLJIT_EXECUTABLE_ALLOCATOR 1
diff --git a/src/3rdparty/pcre2/src/sljit/sljitConfigCPU.h b/src/3rdparty/pcre2/src/sljit/sljitConfigCPU.h
new file mode 100644
index 0000000000..2720bdab0b
--- /dev/null
+++ b/src/3rdparty/pcre2/src/sljit/sljitConfigCPU.h
@@ -0,0 +1,188 @@
+/*
+ * Stack-less Just-In-Time compiler
+ *
+ * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
+ * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef SLJIT_CONFIG_CPU_H_
+#define SLJIT_CONFIG_CPU_H_
+
+/* --------------------------------------------------------------------- */
+/* Architecture */
+/* --------------------------------------------------------------------- */
+
+/* Architecture selection. */
+/* #define SLJIT_CONFIG_X86_32 1 */
+/* #define SLJIT_CONFIG_X86_64 1 */
+/* #define SLJIT_CONFIG_ARM_V6 1 */
+/* #define SLJIT_CONFIG_ARM_V7 1 */
+/* #define SLJIT_CONFIG_ARM_THUMB2 1 */
+/* #define SLJIT_CONFIG_ARM_64 1 */
+/* #define SLJIT_CONFIG_PPC_32 1 */
+/* #define SLJIT_CONFIG_PPC_64 1 */
+/* #define SLJIT_CONFIG_MIPS_32 1 */
+/* #define SLJIT_CONFIG_MIPS_64 1 */
+/* #define SLJIT_CONFIG_RISCV_32 1 */
+/* #define SLJIT_CONFIG_RISCV_64 1 */
+/* #define SLJIT_CONFIG_S390X 1 */
+/* #define SLJIT_CONFIG_LOONGARCH_64 */
+
+/* #define SLJIT_CONFIG_AUTO 1 */
+/* #define SLJIT_CONFIG_UNSUPPORTED 1 */
+
+/*****************/
+/* Sanity check. */
+/*****************/
+
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) \
+ + (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) \
+ + (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6) \
+ + (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7) \
+ + (defined SLJIT_CONFIG_ARM_THUMB2 && SLJIT_CONFIG_ARM_THUMB2) \
+ + (defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64) \
+ + (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32) \
+ + (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64) \
+ + (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) \
+ + (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64) \
+ + (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32) \
+ + (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64) \
+ + (defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X) \
+ + (defined SLJIT_CONFIG_LOONGARCH_64 && SLJIT_CONFIG_LOONGARCH_64) \
+ + (defined SLJIT_CONFIG_AUTO && SLJIT_CONFIG_AUTO) \
+ + (defined SLJIT_CONFIG_UNSUPPORTED && SLJIT_CONFIG_UNSUPPORTED) >= 2
+#error "Multiple architectures are selected"
+#endif
+
+#if !(defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) \
+ && !(defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) \
+ && !(defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6) \
+ && !(defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7) \
+ && !(defined SLJIT_CONFIG_ARM_THUMB2 && SLJIT_CONFIG_ARM_THUMB2) \
+ && !(defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64) \
+ && !(defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32) \
+ && !(defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64) \
+ && !(defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) \
+ && !(defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64) \
+ && !(defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32) \
+ && !(defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64) \
+ && !(defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X) \
+ && !(defined SLJIT_CONFIG_LOONGARCH_64 && SLJIT_CONFIG_LOONGARCH_64) \
+ && !(defined SLJIT_CONFIG_UNSUPPORTED && SLJIT_CONFIG_UNSUPPORTED) \
+ && !(defined SLJIT_CONFIG_AUTO && SLJIT_CONFIG_AUTO)
+#if defined SLJIT_CONFIG_AUTO && !SLJIT_CONFIG_AUTO
+#error "An architecture must be selected"
+#else /* SLJIT_CONFIG_AUTO */
+#define SLJIT_CONFIG_AUTO 1
+#endif /* !SLJIT_CONFIG_AUTO */
+#endif /* !SLJIT_CONFIG */
+
+/********************************************************/
+/* Automatic CPU detection (requires compiler support). */
+/********************************************************/
+
+#if (defined SLJIT_CONFIG_AUTO && SLJIT_CONFIG_AUTO)
+#ifndef _WIN32
+
+#if defined(__i386__) || defined(__i386)
+#define SLJIT_CONFIG_X86_32 1
+#elif defined(__x86_64__)
+#define SLJIT_CONFIG_X86_64 1
+#elif defined(__aarch64__)
+#define SLJIT_CONFIG_ARM_64 1
+#elif defined(__thumb2__)
+#define SLJIT_CONFIG_ARM_THUMB2 1
+#elif (defined(__ARM_ARCH) && __ARM_ARCH >= 7) || \
+ ((defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7R__) || defined(__ARM_ARCH_7S__)) \
+ || (defined(__ARM_ARCH_8A__) || defined(__ARM_ARCH_8R__)) \
+ || (defined(__ARM_ARCH_9A__)))
+#define SLJIT_CONFIG_ARM_V7 1
+#elif defined(__arm__) || defined (__ARM__)
+#define SLJIT_CONFIG_ARM_V6 1
+#elif defined(__ppc64__) || defined(__powerpc64__) || (defined(_ARCH_PPC64) && defined(__64BIT__)) || (defined(_POWER) && defined(__64BIT__))
+#define SLJIT_CONFIG_PPC_64 1
+#elif defined(__ppc__) || defined(__powerpc__) || defined(_ARCH_PPC) || defined(_ARCH_PWR) || defined(_ARCH_PWR2) || defined(_POWER)
+#define SLJIT_CONFIG_PPC_32 1
+#elif defined(__mips__) && !defined(_LP64)
+#define SLJIT_CONFIG_MIPS_32 1
+#elif defined(__mips64)
+#define SLJIT_CONFIG_MIPS_64 1
+#elif defined (__riscv_xlen) && (__riscv_xlen == 32)
+#define SLJIT_CONFIG_RISCV_32 1
+#elif defined (__riscv_xlen) && (__riscv_xlen == 64)
+#define SLJIT_CONFIG_RISCV_64 1
+#elif defined (__loongarch_lp64)
+#define SLJIT_CONFIG_LOONGARCH_64 1
+#elif defined(__s390x__)
+#define SLJIT_CONFIG_S390X 1
+#else
+/* Unsupported architecture */
+#define SLJIT_CONFIG_UNSUPPORTED 1
+#endif
+
+#else /* _WIN32 */
+
+#if defined(_M_X64) || defined(__x86_64__)
+#define SLJIT_CONFIG_X86_64 1
+#elif (defined(_M_ARM) && _M_ARM >= 7 && defined(_M_ARMT)) || defined(__thumb2__)
+#define SLJIT_CONFIG_ARM_THUMB2 1
+#elif (defined(_M_ARM) && _M_ARM >= 7)
+#define SLJIT_CONFIG_ARM_V7 1
+#elif defined(_ARM_)
+#define SLJIT_CONFIG_ARM_V6 1
+#elif defined(_M_ARM64) || defined(__aarch64__)
+#define SLJIT_CONFIG_ARM_64 1
+#else
+#define SLJIT_CONFIG_X86_32 1
+#endif
+
+#endif /* !_WIN32 */
+#endif /* SLJIT_CONFIG_AUTO */
+
+#if (defined SLJIT_CONFIG_UNSUPPORTED && SLJIT_CONFIG_UNSUPPORTED)
+#undef SLJIT_EXECUTABLE_ALLOCATOR
+#endif /* SLJIT_CONFIG_UNSUPPORTED */
+
+/******************************/
+/* CPU family type detection. */
+/******************************/
+
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6) || (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7) \
+ || (defined SLJIT_CONFIG_ARM_THUMB2 && SLJIT_CONFIG_ARM_THUMB2)
+#define SLJIT_CONFIG_ARM_32 1
+#endif
+
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+#define SLJIT_CONFIG_X86 1
+#elif (defined SLJIT_CONFIG_ARM_32 && SLJIT_CONFIG_ARM_32) || (defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64)
+#define SLJIT_CONFIG_ARM 1
+#elif (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32) || (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+#define SLJIT_CONFIG_PPC 1
+#elif (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) || (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
+#define SLJIT_CONFIG_MIPS 1
+#elif (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32) || (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
+#define SLJIT_CONFIG_RISCV 1
+#elif (defined SLJIT_CONFIG_LOONGARCH_64 && SLJIT_CONFIG_LOONGARCH_64)
+#define SLJIT_CONFIG_LOONGARCH 1
+#endif
+
+#endif /* SLJIT_CONFIG_CPU_H_ */
diff --git a/src/3rdparty/pcre2/src/sljit/sljitConfigInternal.h b/src/3rdparty/pcre2/src/sljit/sljitConfigInternal.h
index cd3ce69734..de06dd8e0c 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitConfigInternal.h
+++ b/src/3rdparty/pcre2/src/sljit/sljitConfigInternal.h
@@ -49,8 +49,8 @@ extern "C" {
sljit_s16, sljit_u16 : signed and unsigned 16 bit integer type
sljit_s32, sljit_u32 : signed and unsigned 32 bit integer type
sljit_sw, sljit_uw : signed and unsigned machine word, enough to store a pointer
- sljit_p : unsgined pointer value (usually the same as sljit_uw, but
- some 64 bit ABIs may use 32 bit pointers)
+ sljit_sp, sljit_up : signed and unsigned pointer value (usually the same as
+ sljit_uw, but some 64 bit ABIs may use 32 bit pointers)
sljit_f32 : 32 bit single precision floating point value
sljit_f64 : 64 bit double precision floating point value
@@ -61,6 +61,8 @@ extern "C" {
SLJIT_BIG_ENDIAN : big endian architecture
SLJIT_UNALIGNED : unaligned memory accesses for non-fpu operations are supported
SLJIT_FPU_UNALIGNED : unaligned memory accesses for fpu operations are supported
+ SLJIT_MASKED_SHIFT : all word shifts are always masked
+ SLJIT_MASKED_SHIFT32 : all 32 bit shifts are always masked
SLJIT_INDIRECT_CALL : see SLJIT_FUNC_ADDR() for more information
Constants:
@@ -70,6 +72,8 @@ extern "C" {
SLJIT_NUMBER_OF_FLOAT_REGISTERS : number of available floating point registers
SLJIT_NUMBER_OF_SCRATCH_FLOAT_REGISTERS : number of available floating point scratch registers
SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS : number of available floating point saved registers
+ SLJIT_NUMBER_OF_TEMPORARY_REGISTERS : number of available temporary registers
+ SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS : number of available temporary floating point registers
SLJIT_WORD_SHIFT : the shift required to apply when accessing a sljit_sw/sljit_uw array by index
SLJIT_F32_SHIFT : the shift required to apply when accessing
a single precision floating point array by index
@@ -79,141 +83,31 @@ extern "C" {
the scratch register index of ecx is stored in this variable
SLJIT_LOCALS_OFFSET : local space starting offset (SLJIT_SP + SLJIT_LOCALS_OFFSET)
SLJIT_RETURN_ADDRESS_OFFSET : a return instruction always adds this offset to the return address
+ SLJIT_CONV_MAX_FLOAT : result when a floating point value is converted to integer
+ and the floating point value is higher than the maximum integer value
+ (possible values: SLJIT_CONV_RESULT_MAX_INT or SLJIT_CONV_RESULT_MIN_INT)
+ SLJIT_CONV_MIN_FLOAT : result when a floating point value is converted to integer
+ and the floating point value is lower than the minimum integer value
+ (possible values: SLJIT_CONV_RESULT_MAX_INT or SLJIT_CONV_RESULT_MIN_INT)
+ SLJIT_CONV_NAN_FLOAT : result when a NaN floating point value is converted to integer
+ (possible values: SLJIT_CONV_RESULT_MAX_INT, SLJIT_CONV_RESULT_MIN_INT,
+ or SLJIT_CONV_RESULT_ZERO)
Other macros:
+ SLJIT_TMP_R0 .. R9 : accessing temporary registers
+ SLJIT_TMP_R(i) : accessing temporary registers
+ SLJIT_TMP_FR0 .. FR9 : accessing temporary floating point registers
+ SLJIT_TMP_FR(i) : accessing temporary floating point registers
+ SLJIT_TMP_DEST_REG : a temporary register for results
+ SLJIT_TMP_MEM_REG : a temporary base register for accessing memory
+ (can be the same as SLJIT_TMP_DEST_REG)
+ SLJIT_TMP_DEST_FREG : a temporary register for float results
SLJIT_FUNC : calling convention attribute for both calling JIT from C and C calling back from JIT
SLJIT_W(number) : defining 64 bit constants on 64 bit architectures (platform independent helper)
+ SLJIT_F64_SECOND(reg) : provides the register index of the second 32 bit part of a 64 bit
+ floating point register when SLJIT_HAS_F64_AS_F32_PAIR returns non-zero
*/
-/*****************/
-/* Sanity check. */
-/*****************/
-
-#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) \
- + (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) \
- + (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5) \
- + (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7) \
- + (defined SLJIT_CONFIG_ARM_THUMB2 && SLJIT_CONFIG_ARM_THUMB2) \
- + (defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64) \
- + (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32) \
- + (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64) \
- + (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) \
- + (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64) \
- + (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32) \
- + (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64) \
- + (defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X) \
- + (defined SLJIT_CONFIG_AUTO && SLJIT_CONFIG_AUTO) \
- + (defined SLJIT_CONFIG_UNSUPPORTED && SLJIT_CONFIG_UNSUPPORTED) >= 2
-#error "Multiple architectures are selected"
-#endif
-
-#if !(defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) \
- && !(defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) \
- && !(defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5) \
- && !(defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7) \
- && !(defined SLJIT_CONFIG_ARM_THUMB2 && SLJIT_CONFIG_ARM_THUMB2) \
- && !(defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64) \
- && !(defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32) \
- && !(defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64) \
- && !(defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) \
- && !(defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64) \
- && !(defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32) \
- && !(defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64) \
- && !(defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X) \
- && !(defined SLJIT_CONFIG_UNSUPPORTED && SLJIT_CONFIG_UNSUPPORTED) \
- && !(defined SLJIT_CONFIG_AUTO && SLJIT_CONFIG_AUTO)
-#if defined SLJIT_CONFIG_AUTO && !SLJIT_CONFIG_AUTO
-#error "An architecture must be selected"
-#else /* SLJIT_CONFIG_AUTO */
-#define SLJIT_CONFIG_AUTO 1
-#endif /* !SLJIT_CONFIG_AUTO */
-#endif /* !SLJIT_CONFIG */
-
-/********************************************************/
-/* Automatic CPU detection (requires compiler support). */
-/********************************************************/
-
-#if (defined SLJIT_CONFIG_AUTO && SLJIT_CONFIG_AUTO)
-
-#ifndef _WIN32
-
-#if defined(__i386__) || defined(__i386)
-#define SLJIT_CONFIG_X86_32 1
-#elif defined(__x86_64__)
-#define SLJIT_CONFIG_X86_64 1
-#elif defined(__arm__) || defined(__ARM__)
-#ifdef __thumb2__
-#define SLJIT_CONFIG_ARM_THUMB2 1
-#elif defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7R__)
-#define SLJIT_CONFIG_ARM_V7 1
-#else
-#define SLJIT_CONFIG_ARM_V5 1
-#endif
-#elif defined (__aarch64__)
-#define SLJIT_CONFIG_ARM_64 1
-#elif defined(__ppc64__) || defined(__powerpc64__) || (defined(_ARCH_PPC64) && defined(__64BIT__)) || (defined(_POWER) && defined(__64BIT__))
-#define SLJIT_CONFIG_PPC_64 1
-#elif defined(__ppc__) || defined(__powerpc__) || defined(_ARCH_PPC) || defined(_ARCH_PWR) || defined(_ARCH_PWR2) || defined(_POWER)
-#define SLJIT_CONFIG_PPC_32 1
-#elif defined(__mips__) && !defined(_LP64)
-#define SLJIT_CONFIG_MIPS_32 1
-#elif defined(__mips64)
-#define SLJIT_CONFIG_MIPS_64 1
-#elif defined (__riscv_xlen) && (__riscv_xlen == 32)
-#define SLJIT_CONFIG_RISCV_32 1
-#elif defined (__riscv_xlen) && (__riscv_xlen == 64)
-#define SLJIT_CONFIG_RISCV_64 1
-#elif defined(__s390x__)
-#define SLJIT_CONFIG_S390X 1
-#else
-/* Unsupported architecture */
-#define SLJIT_CONFIG_UNSUPPORTED 1
-#endif
-
-#else /* _WIN32 */
-
-#if defined(_M_X64) || defined(__x86_64__)
-#define SLJIT_CONFIG_X86_64 1
-#elif (defined(_M_ARM) && _M_ARM >= 7 && defined(_M_ARMT)) || defined(__thumb2__)
-#define SLJIT_CONFIG_ARM_THUMB2 1
-#elif (defined(_M_ARM) && _M_ARM >= 7)
-#define SLJIT_CONFIG_ARM_V7 1
-#elif defined(_ARM_)
-#define SLJIT_CONFIG_ARM_V5 1
-#elif defined(_M_ARM64) || defined(__aarch64__)
-#define SLJIT_CONFIG_ARM_64 1
-#else
-#define SLJIT_CONFIG_X86_32 1
-#endif
-
-#endif /* !_WIN32 */
-#endif /* SLJIT_CONFIG_AUTO */
-
-#if (defined SLJIT_CONFIG_UNSUPPORTED && SLJIT_CONFIG_UNSUPPORTED)
-#undef SLJIT_EXECUTABLE_ALLOCATOR
-#endif
-
-/******************************/
-/* CPU family type detection. */
-/******************************/
-
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5) || (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7) \
- || (defined SLJIT_CONFIG_ARM_THUMB2 && SLJIT_CONFIG_ARM_THUMB2)
-#define SLJIT_CONFIG_ARM_32 1
-#endif
-
-#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
-#define SLJIT_CONFIG_X86 1
-#elif (defined SLJIT_CONFIG_ARM_32 && SLJIT_CONFIG_ARM_32) || (defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64)
-#define SLJIT_CONFIG_ARM 1
-#elif (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32) || (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
-#define SLJIT_CONFIG_PPC 1
-#elif (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) || (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
-#define SLJIT_CONFIG_MIPS 1
-#elif (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32) || (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
-#define SLJIT_CONFIG_RISCV 1
-#endif
-
/***********************************************************/
/* Intel Control-flow Enforcement Technology (CET) spport. */
/***********************************************************/
@@ -242,23 +136,23 @@ extern "C" {
*/
#ifndef SLJIT_MALLOC
-#define SLJIT_MALLOC(size, allocator_data) malloc(size)
+#define SLJIT_MALLOC(size, allocator_data) (malloc(size))
#endif
#ifndef SLJIT_FREE
-#define SLJIT_FREE(ptr, allocator_data) free(ptr)
+#define SLJIT_FREE(ptr, allocator_data) (free(ptr))
#endif
#ifndef SLJIT_MEMCPY
-#define SLJIT_MEMCPY(dest, src, len) memcpy(dest, src, len)
+#define SLJIT_MEMCPY(dest, src, len) (memcpy(dest, src, len))
#endif
#ifndef SLJIT_MEMMOVE
-#define SLJIT_MEMMOVE(dest, src, len) memmove(dest, src, len)
+#define SLJIT_MEMMOVE(dest, src, len) (memmove(dest, src, len))
#endif
#ifndef SLJIT_ZEROMEM
-#define SLJIT_ZEROMEM(dest, len) memset(dest, 0, len)
+#define SLJIT_ZEROMEM(dest, len) (memset(dest, 0, len))
#endif
/***************************/
@@ -308,7 +202,7 @@ extern "C" {
/* Type of public API functions. */
/*********************************/
-#ifndef SLJIT_API_FUNC_ATTRIBUTE
+#ifndef SLJIT_API_FUNC_ATTRIBUTE
#if (defined SLJIT_CONFIG_STATIC && SLJIT_CONFIG_STATIC)
/* Static ABI functions. For all-in-one programs. */
@@ -328,6 +222,10 @@ extern "C" {
/* Instruction cache flush. */
/****************************/
+#ifdef __APPLE__
+#include <AvailabilityMacros.h>
+#endif
+
/*
* TODO:
*
@@ -368,7 +266,7 @@ extern "C" {
/* Not required to implement on archs with unified caches. */
#define SLJIT_CACHE_FLUSH(from, to)
-#elif defined __APPLE__
+#elif defined(__APPLE__) && MAC_OS_X_VERSION_MIN_REQUIRED >= 1050
/* Supported by all macs since Mac OS 10.5.
However, it does not work on non-jailbroken iOS devices,
@@ -433,14 +331,15 @@ typedef signed int sljit_s32;
#if (defined SLJIT_CONFIG_UNSUPPORTED && SLJIT_CONFIG_UNSUPPORTED)
/* Just to have something. */
#define SLJIT_WORD_SHIFT 0
-typedef unsigned long int sljit_uw;
-typedef long int sljit_sw;
+typedef unsigned int sljit_uw;
+typedef int sljit_sw;
#elif !(defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) \
&& !(defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64) \
&& !(defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64) \
&& !(defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64) \
&& !(defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64) \
- && !(defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X)
+ && !(defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X) \
+ && !(defined SLJIT_CONFIG_LOONGARCH_64 && SLJIT_CONFIG_LOONGARCH_64)
#define SLJIT_32BIT_ARCHITECTURE 1
#define SLJIT_WORD_SHIFT 2
typedef unsigned int sljit_uw;
@@ -463,7 +362,8 @@ typedef long int sljit_sw;
#endif /* _WIN32 */
#endif
-typedef sljit_uw sljit_p;
+typedef sljit_sw sljit_sp;
+typedef sljit_uw sljit_up;
/* Floating point types. */
typedef float sljit_f32;
@@ -476,12 +376,46 @@ typedef double sljit_f64;
#define SLJIT_F32_SHIFT 2
#define SLJIT_F64_SHIFT 3
+#define SLJIT_CONV_RESULT_MAX_INT 0
+#define SLJIT_CONV_RESULT_MIN_INT 1
+#define SLJIT_CONV_RESULT_ZERO 2
+
+#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
+#define SLJIT_CONV_MAX_FLOAT SLJIT_CONV_RESULT_MIN_INT
+#define SLJIT_CONV_MIN_FLOAT SLJIT_CONV_RESULT_MIN_INT
+#define SLJIT_CONV_NAN_FLOAT SLJIT_CONV_RESULT_MIN_INT
+#elif (defined SLJIT_CONFIG_ARM && SLJIT_CONFIG_ARM)
+#define SLJIT_CONV_MAX_FLOAT SLJIT_CONV_RESULT_MAX_INT
+#define SLJIT_CONV_MIN_FLOAT SLJIT_CONV_RESULT_MIN_INT
+#define SLJIT_CONV_NAN_FLOAT SLJIT_CONV_RESULT_ZERO
+#elif (defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS)
+#define SLJIT_CONV_MAX_FLOAT SLJIT_CONV_RESULT_MAX_INT
+#define SLJIT_CONV_MIN_FLOAT SLJIT_CONV_RESULT_MAX_INT
+#define SLJIT_CONV_NAN_FLOAT SLJIT_CONV_RESULT_MAX_INT
+#elif (defined SLJIT_CONFIG_PPC && SLJIT_CONFIG_PPC)
+#define SLJIT_CONV_MAX_FLOAT SLJIT_CONV_RESULT_MAX_INT
+#define SLJIT_CONV_MIN_FLOAT SLJIT_CONV_RESULT_MIN_INT
+#define SLJIT_CONV_NAN_FLOAT SLJIT_CONV_RESULT_MIN_INT
+#elif (defined SLJIT_CONFIG_RISCV && SLJIT_CONFIG_RISCV)
+#define SLJIT_CONV_MAX_FLOAT SLJIT_CONV_RESULT_MAX_INT
+#define SLJIT_CONV_MIN_FLOAT SLJIT_CONV_RESULT_MIN_INT
+#define SLJIT_CONV_NAN_FLOAT SLJIT_CONV_RESULT_MAX_INT
+#elif (defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X)
+#define SLJIT_CONV_MAX_FLOAT SLJIT_CONV_RESULT_MAX_INT
+#define SLJIT_CONV_MIN_FLOAT SLJIT_CONV_RESULT_MIN_INT
+#define SLJIT_CONV_NAN_FLOAT SLJIT_CONV_RESULT_MIN_INT
+#elif (defined SLJIT_CONFIG_LOONGARCH && SLJIT_CONFIG_LOONGARCH)
+#define SLJIT_CONV_MAX_FLOAT SLJIT_CONV_RESULT_MAX_INT
+#define SLJIT_CONV_MIN_FLOAT SLJIT_CONV_RESULT_MIN_INT
+#define SLJIT_CONV_NAN_FLOAT SLJIT_CONV_RESULT_ZERO
+#else
+#error "Result for float to integer conversion is not defined"
+#endif
+
#ifndef SLJIT_W
/* Defining long constants. */
-#if (defined SLJIT_CONFIG_UNSUPPORTED && SLJIT_CONFIG_UNSUPPORTED)
-#define SLJIT_W(w) (w##l)
-#elif (defined SLJIT_64BIT_ARCHITECTURE && SLJIT_64BIT_ARCHITECTURE)
+#if (defined SLJIT_64BIT_ARCHITECTURE && SLJIT_64BIT_ARCHITECTURE)
#ifdef _WIN64
#define SLJIT_W(w) (w##ll)
#else /* !windows */
@@ -521,9 +455,10 @@ typedef double sljit_f64;
/* Auto detecting mips revision. */
#if (defined __mips_isa_rev) && (__mips_isa_rev >= 6)
#define SLJIT_MIPS_REV 6
-#elif (defined __mips_isa_rev && __mips_isa_rev >= 1) \
- || (defined __clang__ && defined _MIPS_ARCH_OCTEON) \
- || (defined __clang__ && defined _MIPS_ARCH_P5600)
+#elif defined(__mips_isa_rev) && __mips_isa_rev >= 1
+#define SLJIT_MIPS_REV __mips_isa_rev
+#elif defined(__clang__) \
+ && (defined(_MIPS_ARCH_OCTEON) || defined(_MIPS_ARCH_P5600))
/* clang either forgets to define (clang-7) __mips_isa_rev at all
* or sets it to zero (clang-8,-9) for -march=octeon (MIPS64 R2+)
* and -march=p5600 (MIPS32 R5).
@@ -562,7 +497,8 @@ typedef double sljit_f64;
|| (defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64) \
|| (defined SLJIT_CONFIG_PPC && SLJIT_CONFIG_PPC) \
|| (defined SLJIT_CONFIG_RISCV && SLJIT_CONFIG_RISCV) \
- || (defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X)
+ || (defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X) \
+ || (defined SLJIT_CONFIG_LOONGARCH && SLJIT_CONFIG_LOONGARCH)
#define SLJIT_UNALIGNED 1
#endif
@@ -574,7 +510,8 @@ typedef double sljit_f64;
|| (defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64) \
|| (defined SLJIT_CONFIG_PPC && SLJIT_CONFIG_PPC) \
|| (defined SLJIT_CONFIG_RISCV && SLJIT_CONFIG_RISCV) \
- || (defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X)
+ || (defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X) \
+ || (defined SLJIT_CONFIG_LOONGARCH && SLJIT_CONFIG_LOONGARCH)
#define SLJIT_FPU_UNALIGNED 1
#endif
@@ -629,14 +566,16 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_unused_memory_exec(void);
#endif /* SLJIT_FREE_EXEC */
#if (defined SLJIT_PROT_EXECUTABLE_ALLOCATOR && SLJIT_PROT_EXECUTABLE_ALLOCATOR)
-SLJIT_API_FUNC_ATTRIBUTE sljit_sw sljit_exec_offset(void* ptr);
-#define SLJIT_EXEC_OFFSET(ptr) sljit_exec_offset(ptr)
-#else
-#define SLJIT_EXEC_OFFSET(ptr) 0
-#endif
+SLJIT_API_FUNC_ATTRIBUTE sljit_sw sljit_exec_offset(void *code);
+#define SLJIT_EXEC_OFFSET(code) sljit_exec_offset(code)
+#endif /* SLJIT_PROT_EXECUTABLE_ALLOCATOR */
#endif /* SLJIT_EXECUTABLE_ALLOCATOR */
+#ifndef SLJIT_EXEC_OFFSET
+#define SLJIT_EXEC_OFFSET(ptr) 0
+#endif
+
/**********************************************/
/* Registers and locals offset determination. */
/**********************************************/
@@ -645,15 +584,24 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_sw sljit_exec_offset(void* ptr);
#define SLJIT_NUMBER_OF_REGISTERS 12
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 7
+#define SLJIT_NUMBER_OF_TEMPORARY_REGISTERS 1
#define SLJIT_NUMBER_OF_FLOAT_REGISTERS 7
#define SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS 0
+#define SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS 1
+#define SLJIT_TMP_DEST_REG SLJIT_TMP_R0
+#define SLJIT_TMP_MEM_REG SLJIT_TMP_R0
+#define SLJIT_TMP_DEST_FREG SLJIT_TMP_FR0
#define SLJIT_LOCALS_OFFSET_BASE (8 * SSIZE_OF(sw))
#define SLJIT_PREF_SHIFT_REG SLJIT_R2
+#define SLJIT_MASKED_SHIFT 1
+#define SLJIT_MASKED_SHIFT32 1
#elif (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
#define SLJIT_NUMBER_OF_REGISTERS 13
+#define SLJIT_NUMBER_OF_TEMPORARY_REGISTERS 2
#define SLJIT_NUMBER_OF_FLOAT_REGISTERS 15
+#define SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS 1
#ifndef _WIN64
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 6
#define SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS 0
@@ -663,38 +611,52 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_sw sljit_exec_offset(void* ptr);
#define SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS 10
#define SLJIT_LOCALS_OFFSET_BASE (4 * SSIZE_OF(sw))
#endif /* !_WIN64 */
+#define SLJIT_TMP_DEST_REG SLJIT_TMP_R0
+#define SLJIT_TMP_MEM_REG SLJIT_TMP_R0
+#define SLJIT_TMP_DEST_FREG SLJIT_TMP_FR0
#define SLJIT_PREF_SHIFT_REG SLJIT_R3
+#define SLJIT_MASKED_SHIFT 1
+#define SLJIT_MASKED_SHIFT32 1
-#elif (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5) || (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
-
-#define SLJIT_NUMBER_OF_REGISTERS 12
-#define SLJIT_NUMBER_OF_SAVED_REGISTERS 8
-#define SLJIT_NUMBER_OF_FLOAT_REGISTERS 14
-#define SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS 8
-#define SLJIT_LOCALS_OFFSET_BASE 0
-
-#elif (defined SLJIT_CONFIG_ARM_THUMB2 && SLJIT_CONFIG_ARM_THUMB2)
+#elif (defined SLJIT_CONFIG_ARM_32 && SLJIT_CONFIG_ARM_32)
#define SLJIT_NUMBER_OF_REGISTERS 12
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 8
+#define SLJIT_NUMBER_OF_TEMPORARY_REGISTERS 2
#define SLJIT_NUMBER_OF_FLOAT_REGISTERS 14
#define SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS 8
+#define SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS 2
+#define SLJIT_TMP_DEST_REG SLJIT_TMP_R1
+#define SLJIT_TMP_MEM_REG SLJIT_TMP_R1
+#define SLJIT_TMP_DEST_FREG SLJIT_TMP_FR0
#define SLJIT_LOCALS_OFFSET_BASE 0
#elif (defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64)
#define SLJIT_NUMBER_OF_REGISTERS 26
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 10
+#define SLJIT_NUMBER_OF_TEMPORARY_REGISTERS 3
#define SLJIT_NUMBER_OF_FLOAT_REGISTERS 30
#define SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS 8
+#define SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS 2
+#define SLJIT_TMP_DEST_REG SLJIT_TMP_R0
+#define SLJIT_TMP_MEM_REG SLJIT_TMP_R0
+#define SLJIT_TMP_DEST_FREG SLJIT_TMP_FR0
#define SLJIT_LOCALS_OFFSET_BASE (2 * (sljit_s32)sizeof(sljit_sw))
+#define SLJIT_MASKED_SHIFT 1
+#define SLJIT_MASKED_SHIFT32 1
#elif (defined SLJIT_CONFIG_PPC && SLJIT_CONFIG_PPC)
#define SLJIT_NUMBER_OF_REGISTERS 23
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 17
+#define SLJIT_NUMBER_OF_TEMPORARY_REGISTERS 3
#define SLJIT_NUMBER_OF_FLOAT_REGISTERS 30
#define SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS 18
+#define SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS 2
+#define SLJIT_TMP_DEST_REG SLJIT_TMP_R1
+#define SLJIT_TMP_MEM_REG SLJIT_TMP_R1
+#define SLJIT_TMP_DEST_FREG SLJIT_TMP_FR0
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64) || (defined _AIX)
#define SLJIT_LOCALS_OFFSET_BASE ((6 + 8) * (sljit_s32)sizeof(sljit_sw))
#elif (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
@@ -717,14 +679,28 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_sw sljit_exec_offset(void* ptr);
#define SLJIT_NUMBER_OF_FLOAT_REGISTERS 29
#define SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS 8
#endif
+#define SLJIT_NUMBER_OF_TEMPORARY_REGISTERS 5
+#define SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS 3
+#define SLJIT_TMP_DEST_REG SLJIT_TMP_R1
+#define SLJIT_TMP_MEM_REG SLJIT_TMP_R1
+#define SLJIT_TMP_DEST_FREG SLJIT_TMP_FR0
+#define SLJIT_MASKED_SHIFT 1
+#define SLJIT_MASKED_SHIFT32 1
#elif (defined SLJIT_CONFIG_RISCV && SLJIT_CONFIG_RISCV)
#define SLJIT_NUMBER_OF_REGISTERS 23
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 12
-#define SLJIT_LOCALS_OFFSET_BASE 0
+#define SLJIT_NUMBER_OF_TEMPORARY_REGISTERS 5
#define SLJIT_NUMBER_OF_FLOAT_REGISTERS 30
#define SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS 12
+#define SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS 2
+#define SLJIT_TMP_DEST_REG SLJIT_TMP_R1
+#define SLJIT_TMP_MEM_REG SLJIT_TMP_R1
+#define SLJIT_TMP_DEST_FREG SLJIT_TMP_FR0
+#define SLJIT_LOCALS_OFFSET_BASE 0
+#define SLJIT_MASKED_SHIFT 1
+#define SLJIT_MASKED_SHIFT32 1
#elif (defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X)
@@ -751,16 +727,43 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_sw sljit_exec_offset(void* ptr);
#define SLJIT_NUMBER_OF_REGISTERS 12
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 8
+#define SLJIT_NUMBER_OF_TEMPORARY_REGISTERS 3
#define SLJIT_NUMBER_OF_FLOAT_REGISTERS 15
#define SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS 8
+#define SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS 1
+#define SLJIT_TMP_DEST_REG SLJIT_TMP_R0
+#define SLJIT_TMP_MEM_REG SLJIT_TMP_R2
+#define SLJIT_TMP_DEST_FREG SLJIT_TMP_FR0
#define SLJIT_LOCALS_OFFSET_BASE SLJIT_S390X_DEFAULT_STACK_FRAME_SIZE
+#define SLJIT_MASKED_SHIFT 1
+
+#elif (defined SLJIT_CONFIG_LOONGARCH && SLJIT_CONFIG_LOONGARCH)
+
+#define SLJIT_NUMBER_OF_REGISTERS 23
+#define SLJIT_NUMBER_OF_SAVED_REGISTERS 10
+#define SLJIT_NUMBER_OF_TEMPORARY_REGISTERS 5
+#define SLJIT_NUMBER_OF_FLOAT_REGISTERS 30
+#define SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS 12
+#define SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS 2
+#define SLJIT_TMP_DEST_REG SLJIT_TMP_R1
+#define SLJIT_TMP_MEM_REG SLJIT_TMP_R1
+#define SLJIT_TMP_DEST_FREG SLJIT_TMP_FR0
+#define SLJIT_LOCALS_OFFSET_BASE 0
+#define SLJIT_MASKED_SHIFT 1
+#define SLJIT_MASKED_SHIFT32 1
#elif (defined SLJIT_CONFIG_UNSUPPORTED && SLJIT_CONFIG_UNSUPPORTED)
+/* Just to have something. */
#define SLJIT_NUMBER_OF_REGISTERS 0
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 0
+#define SLJIT_NUMBER_OF_TEMPORARY_REGISTERS 0
#define SLJIT_NUMBER_OF_FLOAT_REGISTERS 0
#define SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS 0
+#define SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS 0
+#define SLJIT_TMP_DEST_REG 0
+#define SLJIT_TMP_MEM_REG 0
+#define SLJIT_TMP_DEST_FREG 0
#define SLJIT_LOCALS_OFFSET_BASE 0
#endif
@@ -773,6 +776,45 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_sw sljit_exec_offset(void* ptr);
#define SLJIT_NUMBER_OF_SCRATCH_FLOAT_REGISTERS \
(SLJIT_NUMBER_OF_FLOAT_REGISTERS - SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS)
+/**********************************/
+/* Temporary register management. */
+/**********************************/
+
+#define SLJIT_TMP_REGISTER_BASE (SLJIT_NUMBER_OF_REGISTERS + 2)
+#define SLJIT_TMP_FREGISTER_BASE (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
+
+/* WARNING: Accessing temporary registers is not recommended, because they
+ are also used by the JIT compiler for various computations. Using them
+ might have any side effects including incorrect operations and crashes,
+ so use them at your own risk. The machine registers themselves might have
+ limitations, e.g. the r0 register on s390x / ppc cannot be used as
+ base address for memory operations. */
+
+/* Temporary registers */
+#define SLJIT_TMP_R0 (SLJIT_TMP_REGISTER_BASE + 0)
+#define SLJIT_TMP_R1 (SLJIT_TMP_REGISTER_BASE + 1)
+#define SLJIT_TMP_R2 (SLJIT_TMP_REGISTER_BASE + 2)
+#define SLJIT_TMP_R3 (SLJIT_TMP_REGISTER_BASE + 3)
+#define SLJIT_TMP_R4 (SLJIT_TMP_REGISTER_BASE + 4)
+#define SLJIT_TMP_R5 (SLJIT_TMP_REGISTER_BASE + 5)
+#define SLJIT_TMP_R6 (SLJIT_TMP_REGISTER_BASE + 6)
+#define SLJIT_TMP_R7 (SLJIT_TMP_REGISTER_BASE + 7)
+#define SLJIT_TMP_R8 (SLJIT_TMP_REGISTER_BASE + 8)
+#define SLJIT_TMP_R9 (SLJIT_TMP_REGISTER_BASE + 9)
+#define SLJIT_TMP_R(i) (SLJIT_TMP_REGISTER_BASE + (i))
+
+#define SLJIT_TMP_FR0 (SLJIT_TMP_FREGISTER_BASE + 0)
+#define SLJIT_TMP_FR1 (SLJIT_TMP_FREGISTER_BASE + 1)
+#define SLJIT_TMP_FR2 (SLJIT_TMP_FREGISTER_BASE + 2)
+#define SLJIT_TMP_FR3 (SLJIT_TMP_FREGISTER_BASE + 3)
+#define SLJIT_TMP_FR4 (SLJIT_TMP_FREGISTER_BASE + 4)
+#define SLJIT_TMP_FR5 (SLJIT_TMP_FREGISTER_BASE + 5)
+#define SLJIT_TMP_FR6 (SLJIT_TMP_FREGISTER_BASE + 6)
+#define SLJIT_TMP_FR7 (SLJIT_TMP_FREGISTER_BASE + 7)
+#define SLJIT_TMP_FR8 (SLJIT_TMP_FREGISTER_BASE + 8)
+#define SLJIT_TMP_FR9 (SLJIT_TMP_FREGISTER_BASE + 9)
+#define SLJIT_TMP_FR(i) (SLJIT_TMP_FREGISTER_BASE + (i))
+
/********************************/
/* CPU status flags management. */
/********************************/
@@ -781,10 +823,24 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_sw sljit_exec_offset(void* ptr);
|| (defined SLJIT_CONFIG_PPC && SLJIT_CONFIG_PPC) \
|| (defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS) \
|| (defined SLJIT_CONFIG_RISCV && SLJIT_CONFIG_RISCV) \
- || (defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X)
+ || (defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X) \
+ || (defined SLJIT_CONFIG_LOONGARCH && SLJIT_CONFIG_LOONGARCH)
#define SLJIT_HAS_STATUS_FLAGS_STATE 1
#endif
+/***************************************/
+/* Floating point register management. */
+/***************************************/
+
+#if (defined SLJIT_CONFIG_ARM_32 && SLJIT_CONFIG_ARM_32) \
+ || (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
+#define SLJIT_F64_SECOND(reg) \
+ ((reg) + SLJIT_FS0 + SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS)
+#else /* !SLJIT_CONFIG_ARM_32 && !SLJIT_CONFIG_MIPS_32 */
+#define SLJIT_F64_SECOND(reg) \
+ (reg)
+#endif /* SLJIT_CONFIG_ARM_32 || SLJIT_CONFIG_MIPS_32 */
+
/*************************************/
/* Debug and verbose related macros. */
/*************************************/
diff --git a/src/3rdparty/pcre2/src/sljit/sljitLir.c b/src/3rdparty/pcre2/src/sljit/sljitLir.c
index abafe1add9..2dca17cd6f 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitLir.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitLir.c
@@ -93,7 +93,8 @@
#define SSIZE_OF(type) ((sljit_s32)sizeof(sljit_ ## type))
#define VARIABLE_FLAG_SHIFT (10)
-#define VARIABLE_FLAG_MASK (0x3f << VARIABLE_FLAG_SHIFT)
+/* All variable flags are even. */
+#define VARIABLE_FLAG_MASK (0x3e << VARIABLE_FLAG_SHIFT)
#define GET_FLAG_TYPE(op) ((op) >> VARIABLE_FLAG_SHIFT)
#define GET_OPCODE(op) \
@@ -122,47 +123,71 @@
#endif
/* Parameter parsing. */
-#define REG_MASK 0x3f
+#define REG_MASK 0x7f
#define OFFS_REG(reg) (((reg) >> 8) & REG_MASK)
#define OFFS_REG_MASK (REG_MASK << 8)
#define TO_OFFS_REG(reg) ((reg) << 8)
-/* When reg cannot be unused. */
-#define FAST_IS_REG(reg) ((reg) <= REG_MASK)
+#define FAST_IS_REG(reg) ((reg) < REG_MASK)
/* Mask for argument types. */
#define SLJIT_ARG_MASK 0x7
#define SLJIT_ARG_FULL_MASK (SLJIT_ARG_MASK | SLJIT_ARG_TYPE_SCRATCH_REG)
-/* Mask for sljit_emit_mem. */
-#define REG_PAIR_MASK 0xff00
-#define REG_PAIR_FIRST(reg) ((reg) & 0xff)
+/* Mask for register pairs. */
+#define REG_PAIR_MASK 0x7f00
+#define REG_PAIR_FIRST(reg) ((reg) & 0x7f)
#define REG_PAIR_SECOND(reg) ((reg) >> 8)
/* Mask for sljit_emit_enter. */
#define SLJIT_KEPT_SAVEDS_COUNT(options) ((options) & 0x3)
+/* Getters for simd operations, which returns with log2(size). */
+#define SLJIT_SIMD_GET_OPCODE(type) ((type) & 0xff)
+#define SLJIT_SIMD_GET_REG_SIZE(type) (((type) >> 12) & 0x3f)
+#define SLJIT_SIMD_GET_ELEM_SIZE(type) (((type) >> 18) & 0x3f)
+#define SLJIT_SIMD_GET_ELEM2_SIZE(type) (((type) >> 24) & 0x3f)
+
+#define SLJIT_SIMD_CHECK_REG(type) (((type) & 0x3f000) >= SLJIT_SIMD_REG_64 && ((type) & 0x3f000) <= SLJIT_SIMD_REG_512)
+#define SLJIT_SIMD_TYPE_MASK(m) ((sljit_s32)0xff000fff & ~(SLJIT_SIMD_FLOAT | SLJIT_SIMD_TEST | (m)))
+#define SLJIT_SIMD_TYPE_MASK2(m) ((sljit_s32)0xc0000fff & ~(SLJIT_SIMD_FLOAT | SLJIT_SIMD_TEST | (m)))
+
/* Jump flags. */
-#define JUMP_LABEL 0x1
-#define JUMP_ADDR 0x2
+#define JUMP_ADDR 0x1
+#define JUMP_MOV_ADDR 0x2
/* SLJIT_REWRITABLE_JUMP is 0x1000. */
#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
-# define PATCH_MB 0x4
-# define PATCH_MW 0x8
+# define PATCH_MB 0x04
+# define PATCH_MW 0x08
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
# define PATCH_MD 0x10
-#endif
+# define MOV_ADDR_HI 0x20
+# define JUMP_MAX_SIZE ((sljit_uw)(10 + 3))
+# define CJUMP_MAX_SIZE ((sljit_uw)(2 + 10 + 3))
+#else /* !SLJIT_CONFIG_X86_64 */
+# define JUMP_MAX_SIZE ((sljit_uw)5)
+# define CJUMP_MAX_SIZE ((sljit_uw)6)
+#endif /* SLJIT_CONFIG_X86_64 */
# define TYPE_SHIFT 13
+#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
+/* Bits 7..12 is for debug jump size, SLJIT_REWRITABLE_JUMP is 0x1000 */
+# define JUMP_SIZE_SHIFT 7
+#endif /* SLJIT_DEBUG */
#endif /* SLJIT_CONFIG_X86 */
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5) || (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
-# define IS_BL 0x4
-# define PATCH_B 0x8
-#endif /* SLJIT_CONFIG_ARM_V5 || SLJIT_CONFIG_ARM_V7 */
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6) || (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
+# define IS_BL 0x04
+# define PATCH_B 0x08
+#endif /* SLJIT_CONFIG_ARM_V6 || SLJIT_CONFIG_ARM_V7 */
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
# define CPOOL_SIZE 512
-#endif /* SLJIT_CONFIG_ARM_V5 */
+#endif /* SLJIT_CONFIG_ARM_V6 */
+
+#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
+# define JUMP_SIZE_SHIFT 26
+# define JUMP_MAX_SIZE ((sljit_uw)3)
+#endif /* SLJIT_CONFIG_ARM_V7 */
#if (defined SLJIT_CONFIG_ARM_THUMB2 && SLJIT_CONFIG_ARM_THUMB2)
# define IS_COND 0x04
@@ -171,25 +196,30 @@
# define PATCH_TYPE1 0x10
/* conditional + imm20 */
# define PATCH_TYPE2 0x20
- /* IT + imm24 */
-# define PATCH_TYPE3 0x30
/* imm11 */
-# define PATCH_TYPE4 0x40
+# define PATCH_TYPE3 0x30
/* imm24 */
-# define PATCH_TYPE5 0x50
+# define PATCH_TYPE4 0x40
/* BL + imm24 */
-# define PATCH_BL 0x60
+# define PATCH_TYPE5 0x50
+ /* addwi/subwi */
+# define PATCH_TYPE6 0x60
/* 0xf00 cc code for branches */
+# define JUMP_SIZE_SHIFT 26
+# define JUMP_MAX_SIZE ((sljit_uw)5)
#endif /* SLJIT_CONFIG_ARM_THUMB2 */
#if (defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64)
# define IS_COND 0x004
# define IS_CBZ 0x008
# define IS_BL 0x010
-# define PATCH_B 0x020
-# define PATCH_COND 0x040
-# define PATCH_ABS48 0x080
-# define PATCH_ABS64 0x100
+# define PATCH_COND 0x020
+# define PATCH_B 0x040
+# define PATCH_B32 0x080
+# define PATCH_ABS48 0x100
+# define PATCH_ABS64 0x200
+# define JUMP_SIZE_SHIFT 58
+# define JUMP_MAX_SIZE ((sljit_uw)5)
#endif /* SLJIT_CONFIG_ARM_64 */
#if (defined SLJIT_CONFIG_PPC && SLJIT_CONFIG_PPC)
@@ -200,8 +230,12 @@
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
# define PATCH_ABS32 0x040
# define PATCH_ABS48 0x080
+# define JUMP_SIZE_SHIFT 58
+# define JUMP_MAX_SIZE ((sljit_uw)7)
+#else /* !SLJIT_CONFIG_PPC_64 */
+# define JUMP_SIZE_SHIFT 26
+# define JUMP_MAX_SIZE ((sljit_uw)4)
#endif /* SLJIT_CONFIG_PPC_64 */
-# define REMOVE_COND 0x100
#endif /* SLJIT_CONFIG_PPC */
#if (defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS)
@@ -243,20 +277,37 @@
# define PATCH_ABS32 0x080
# define PATCH_ABS44 0x100
# define PATCH_ABS52 0x200
+# define JUMP_SIZE_SHIFT 58
+# define JUMP_MAX_SIZE ((sljit_uw)6)
#else /* !SLJIT_CONFIG_RISCV_64 */
-# define PATCH_REL32 0x0
+# define JUMP_SIZE_SHIFT 26
+# define JUMP_MAX_SIZE ((sljit_uw)2)
#endif /* SLJIT_CONFIG_RISCV_64 */
#endif /* SLJIT_CONFIG_RISCV */
+#if (defined SLJIT_CONFIG_LOONGARCH && SLJIT_CONFIG_LOONGARCH)
+# define IS_COND 0x004
+# define IS_CALL 0x008
+
+# define PATCH_B 0x010
+# define PATCH_J 0x020
+
+# define PATCH_REL32 0x040
+# define PATCH_ABS32 0x080
+# define PATCH_ABS52 0x100
+# define JUMP_SIZE_SHIFT 58
+# define JUMP_MAX_SIZE ((sljit_uw)4)
+
+#endif /* SLJIT_CONFIG_LOONGARCH */
/* Stack management. */
#define GET_SAVED_REGISTERS_SIZE(scratches, saveds, extra) \
(((scratches < SLJIT_NUMBER_OF_SCRATCH_REGISTERS ? 0 : (scratches - SLJIT_NUMBER_OF_SCRATCH_REGISTERS)) + \
(saveds) + (sljit_s32)(extra)) * (sljit_s32)sizeof(sljit_sw))
-#define GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, size) \
+#define GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, type) \
(((fscratches < SLJIT_NUMBER_OF_SCRATCH_FLOAT_REGISTERS ? 0 : (fscratches - SLJIT_NUMBER_OF_SCRATCH_FLOAT_REGISTERS)) + \
- (fsaveds)) * (sljit_s32)(size))
+ (fsaveds)) * SSIZE_OF(type))
#define ADJUST_LOCAL_OFFSET(p, i) \
if ((p) == (SLJIT_MEM1(SLJIT_SP))) \
@@ -267,30 +318,62 @@
/* Utils can still be used even if SLJIT_CONFIG_UNSUPPORTED is set. */
#include "sljitUtils.c"
+#if (defined SLJIT_CONFIG_ARM_THUMB2 && SLJIT_CONFIG_ARM_THUMB2)
+#define SLJIT_CODE_TO_PTR(code) ((void*)((sljit_up)(code) & ~(sljit_up)0x1))
+#elif (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
+#define SLJIT_CODE_TO_PTR(code) ((void*)(*(sljit_up*)code))
+#else /* !SLJIT_CONFIG_ARM_THUMB2 && !SLJIT_INDIRECT_CALL */
+#define SLJIT_CODE_TO_PTR(code) ((void*)(code))
+#endif /* SLJIT_CONFIG_ARM_THUMB2 || SLJIT_INDIRECT_CALL */
+
#if !(defined SLJIT_CONFIG_UNSUPPORTED && SLJIT_CONFIG_UNSUPPORTED)
#if (defined SLJIT_EXECUTABLE_ALLOCATOR && SLJIT_EXECUTABLE_ALLOCATOR)
#if (defined SLJIT_PROT_EXECUTABLE_ALLOCATOR && SLJIT_PROT_EXECUTABLE_ALLOCATOR)
-#include "sljitProtExecAllocator.c"
-#elif (defined SLJIT_WX_EXECUTABLE_ALLOCATOR && SLJIT_WX_EXECUTABLE_ALLOCATOR)
-#include "sljitWXExecAllocator.c"
+
+#if defined(__NetBSD__)
+#include "allocator_src/sljitProtExecAllocatorNetBSD.c"
#else
-#include "sljitExecAllocator.c"
+#include "allocator_src/sljitProtExecAllocatorPosix.c"
#endif
+#elif (defined SLJIT_WX_EXECUTABLE_ALLOCATOR && SLJIT_WX_EXECUTABLE_ALLOCATOR)
+
+#if defined(_WIN32)
+#include "allocator_src/sljitWXExecAllocatorWindows.c"
+#else
+#include "allocator_src/sljitWXExecAllocatorPosix.c"
#endif
-#if (defined SLJIT_PROT_EXECUTABLE_ALLOCATOR && SLJIT_PROT_EXECUTABLE_ALLOCATOR)
-#define SLJIT_ADD_EXEC_OFFSET(ptr, exec_offset) ((sljit_u8 *)(ptr) + (exec_offset))
#else
-#define SLJIT_ADD_EXEC_OFFSET(ptr, exec_offset) ((sljit_u8 *)(ptr))
+
+#if defined(_WIN32)
+#include "allocator_src/sljitExecAllocatorWindows.c"
+#elif defined(__APPLE__)
+#include "allocator_src/sljitExecAllocatorApple.c"
+#elif defined(__FreeBSD__)
+#include "allocator_src/sljitExecAllocatorFreeBSD.c"
+#else
+#include "allocator_src/sljitExecAllocatorPosix.c"
#endif
+#endif
+
+#else /* !SLJIT_EXECUTABLE_ALLOCATOR */
+
#ifndef SLJIT_UPDATE_WX_FLAGS
#define SLJIT_UPDATE_WX_FLAGS(from, to, enable_exec)
#endif
+#endif /* SLJIT_EXECUTABLE_ALLOCATOR */
+
+#if (defined SLJIT_PROT_EXECUTABLE_ALLOCATOR && SLJIT_PROT_EXECUTABLE_ALLOCATOR)
+#define SLJIT_ADD_EXEC_OFFSET(ptr, exec_offset) ((sljit_u8 *)(ptr) + (exec_offset))
+#else
+#define SLJIT_ADD_EXEC_OFFSET(ptr, exec_offset) ((sljit_u8 *)(ptr))
+#endif
+
/* Argument checking features. */
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
@@ -371,7 +454,7 @@ static sljit_s32 compiler_initialized = 0;
static void init_compiler(void);
#endif
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_compiler* sljit_create_compiler(void *allocator_data, void *exec_allocator_data)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_compiler* sljit_create_compiler(void *allocator_data)
{
struct sljit_compiler *compiler = (struct sljit_compiler*)SLJIT_MALLOC(sizeof(struct sljit_compiler), allocator_data);
if (!compiler)
@@ -382,10 +465,11 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_compiler* sljit_create_compiler(void *allo
sizeof(sljit_s8) == 1 && sizeof(sljit_u8) == 1
&& sizeof(sljit_s16) == 2 && sizeof(sljit_u16) == 2
&& sizeof(sljit_s32) == 4 && sizeof(sljit_u32) == 4
- && (sizeof(sljit_p) == 4 || sizeof(sljit_p) == 8)
- && sizeof(sljit_p) <= sizeof(sljit_sw)
+ && (sizeof(sljit_up) == 4 || sizeof(sljit_up) == 8)
+ && sizeof(sljit_up) <= sizeof(sljit_sw)
+ && sizeof(sljit_up) == sizeof(sljit_sp)
&& (sizeof(sljit_sw) == 4 || sizeof(sljit_sw) == 8)
- && (sizeof(sljit_uw) == 4 || sizeof(sljit_uw) == 8),
+ && (sizeof(sljit_uw) == sizeof(sljit_sw)),
invalid_integer_types);
SLJIT_COMPILE_ASSERT(SLJIT_REWRITABLE_JUMP != SLJIT_32,
rewritable_jump_and_single_op_must_not_be_the_same);
@@ -396,7 +480,6 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_compiler* sljit_create_compiler(void *allo
compiler->error = SLJIT_SUCCESS;
compiler->allocator_data = allocator_data;
- compiler->exec_allocator_data = exec_allocator_data;
compiler->buf = (struct sljit_memory_fragment*)SLJIT_MALLOC(BUF_SIZE, allocator_data);
compiler->abuf = (struct sljit_memory_fragment*)SLJIT_MALLOC(ABUF_SIZE, allocator_data);
@@ -422,9 +505,9 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_compiler* sljit_create_compiler(void *allo
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
compiler->args_size = -1;
-#endif
+#endif /* SLJIT_CONFIG_X86_32 */
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
compiler->cpool = (sljit_uw*)SLJIT_MALLOC(CPOOL_SIZE * sizeof(sljit_uw)
+ CPOOL_SIZE * sizeof(sljit_u8), allocator_data);
if (!compiler->cpool) {
@@ -435,18 +518,18 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_compiler* sljit_create_compiler(void *allo
}
compiler->cpool_unique = (sljit_u8*)(compiler->cpool + CPOOL_SIZE);
compiler->cpool_diff = 0xffffffff;
-#endif
+#endif /* SLJIT_CONFIG_ARM_V6 */
#if (defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS)
compiler->delay_slot = UNMOVABLE_INS;
-#endif
+#endif /* SLJIT_CONFIG_MIPS */
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS) \
|| (defined SLJIT_DEBUG && SLJIT_DEBUG)
compiler->last_flags = 0;
compiler->last_return = -1;
compiler->logical_local_size = 0;
-#endif
+#endif /* SLJIT_ARGUMENT_CHECKS || SLJIT_DEBUG */
#if (defined SLJIT_NEEDS_COMPILER_INIT && SLJIT_NEEDS_COMPILER_INIT)
if (!compiler_initialized) {
@@ -479,7 +562,7 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_compiler(struct sljit_compiler *compile
SLJIT_FREE(curr, allocator_data);
}
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
SLJIT_FREE(compiler->cpool, allocator_data);
#endif
SLJIT_FREE(compiler, allocator_data);
@@ -491,37 +574,17 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_compiler_memory_error(struct sljit_compi
compiler->error = SLJIT_ERR_ALLOC_FAILED;
}
-#if (defined SLJIT_CONFIG_ARM_THUMB2 && SLJIT_CONFIG_ARM_THUMB2)
SLJIT_API_FUNC_ATTRIBUTE void sljit_free_code(void* code, void *exec_allocator_data)
{
SLJIT_UNUSED_ARG(exec_allocator_data);
- /* Remove thumb mode flag. */
- SLJIT_FREE_EXEC((void*)((sljit_uw)code & ~(sljit_uw)0x1), exec_allocator_data);
+ SLJIT_FREE_EXEC(SLJIT_CODE_TO_PTR(code), exec_allocator_data);
}
-#elif (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
-SLJIT_API_FUNC_ATTRIBUTE void sljit_free_code(void* code, void *exec_allocator_data)
-{
- SLJIT_UNUSED_ARG(exec_allocator_data);
-
- /* Resolve indirection. */
- code = (void*)(*(sljit_uw*)code);
- SLJIT_FREE_EXEC(code, exec_allocator_data);
-}
-#else
-SLJIT_API_FUNC_ATTRIBUTE void sljit_free_code(void* code, void *exec_allocator_data)
-{
- SLJIT_UNUSED_ARG(exec_allocator_data);
-
- SLJIT_FREE_EXEC(code, exec_allocator_data);
-}
-#endif
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_label(struct sljit_jump *jump, struct sljit_label* label)
{
if (SLJIT_LIKELY(!!jump) && SLJIT_LIKELY(!!label)) {
jump->flags &= (sljit_uw)~JUMP_ADDR;
- jump->flags |= JUMP_LABEL;
jump->u.label = label;
}
}
@@ -529,18 +592,11 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_label(struct sljit_jump *jump, struct sl
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_target(struct sljit_jump *jump, sljit_uw target)
{
if (SLJIT_LIKELY(!!jump)) {
- jump->flags &= (sljit_uw)~JUMP_LABEL;
jump->flags |= JUMP_ADDR;
jump->u.target = target;
}
}
-SLJIT_API_FUNC_ATTRIBUTE void sljit_set_put_label(struct sljit_put_label *put_label, struct sljit_label *label)
-{
- if (SLJIT_LIKELY(!!put_label))
- put_label->label = label;
-}
-
#define SLJIT_CURRENT_FLAGS_ALL \
(SLJIT_CURRENT_FLAGS_32 | SLJIT_CURRENT_FLAGS_ADD | SLJIT_CURRENT_FLAGS_SUB | SLJIT_CURRENT_FLAGS_COMPARE)
@@ -635,31 +691,66 @@ static SLJIT_INLINE void reverse_buf(struct sljit_compiler *compiler)
compiler->buf = prev;
}
-/* Only used in RISC architectures where the instruction size is constant */
-#if !(defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86) \
- && !(defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X)
-
-static SLJIT_INLINE sljit_uw compute_next_addr(struct sljit_label *label, struct sljit_jump *jump,
- struct sljit_const *const_, struct sljit_put_label *put_label)
+static SLJIT_INLINE void* allocate_executable_memory(sljit_uw size, sljit_s32 options,
+ void *exec_allocator_data, sljit_sw *executable_offset)
{
- sljit_uw result = ~(sljit_uw)0;
+ void *code;
+ struct sljit_generate_code_buffer *buffer;
+
+ if (SLJIT_LIKELY(!(options & SLJIT_GENERATE_CODE_BUFFER))) {
+ code = SLJIT_MALLOC_EXEC(size, exec_allocator_data);
+ *executable_offset = SLJIT_EXEC_OFFSET(code);
+ return code;
+ }
- if (label)
- result = label->size;
+ buffer = (struct sljit_generate_code_buffer*)exec_allocator_data;
- if (jump && jump->addr < result)
- result = jump->addr;
+ if (size <= buffer->size) {
+ *executable_offset = buffer->executable_offset;
+ return buffer->buffer;
+ }
+
+ return NULL;
+}
- if (const_ && const_->addr < result)
- result = const_->addr;
+#define SLJIT_MAX_ADDRESS ~(sljit_uw)0
- if (put_label && put_label->addr < result)
- result = put_label->addr;
+#define SLJIT_GET_NEXT_SIZE(ptr) (ptr != NULL) ? ((ptr)->size) : SLJIT_MAX_ADDRESS
+#define SLJIT_GET_NEXT_ADDRESS(ptr) (ptr != NULL) ? ((ptr)->addr) : SLJIT_MAX_ADDRESS
+
+#if !(defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
+
+#define SLJIT_NEXT_DEFINE_TYPES \
+ sljit_uw next_label_size; \
+ sljit_uw next_jump_addr; \
+ sljit_uw next_const_addr; \
+ sljit_uw next_min_addr
+
+#define SLJIT_NEXT_INIT_TYPES() \
+ next_label_size = SLJIT_GET_NEXT_SIZE(label); \
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump); \
+ next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
+
+#define SLJIT_GET_NEXT_MIN() \
+ next_min_addr = sljit_get_next_min(next_label_size, next_jump_addr, next_const_addr);
+
+static SLJIT_INLINE sljit_uw sljit_get_next_min(sljit_uw next_label_size,
+ sljit_uw next_jump_addr, sljit_uw next_const_addr)
+{
+ sljit_uw result = next_jump_addr;
+
+ SLJIT_ASSERT(result == SLJIT_MAX_ADDRESS || result != next_const_addr);
+
+ if (next_const_addr < result)
+ result = next_const_addr;
+
+ if (next_label_size < result)
+ result = next_label_size;
return result;
}
-#endif /* !SLJIT_CONFIG_X86 && !SLJIT_CONFIG_S390X */
+#endif /* !SLJIT_CONFIG_X86 */
static SLJIT_INLINE void set_emit_enter(struct sljit_compiler *compiler,
sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds,
@@ -700,8 +791,9 @@ static SLJIT_INLINE void set_set_context(struct sljit_compiler *compiler,
static SLJIT_INLINE void set_label(struct sljit_label *label, struct sljit_compiler *compiler)
{
label->next = NULL;
+ label->u.index = compiler->label_count++;
label->size = compiler->size;
- if (compiler->last_label)
+ if (compiler->last_label != NULL)
compiler->last_label->next = label;
else
compiler->labels = label;
@@ -712,7 +804,21 @@ static SLJIT_INLINE void set_jump(struct sljit_jump *jump, struct sljit_compiler
{
jump->next = NULL;
jump->flags = flags;
- if (compiler->last_jump)
+ jump->u.label = NULL;
+ if (compiler->last_jump != NULL)
+ compiler->last_jump->next = jump;
+ else
+ compiler->jumps = jump;
+ compiler->last_jump = jump;
+}
+
+static SLJIT_INLINE void set_mov_addr(struct sljit_jump *jump, struct sljit_compiler *compiler, sljit_uw offset)
+{
+ jump->next = NULL;
+ jump->addr = compiler->size - offset;
+ jump->flags = JUMP_MOV_ADDR;
+ jump->u.label = NULL;
+ if (compiler->last_jump != NULL)
compiler->last_jump->next = jump;
else
compiler->jumps = jump;
@@ -723,26 +829,13 @@ static SLJIT_INLINE void set_const(struct sljit_const *const_, struct sljit_comp
{
const_->next = NULL;
const_->addr = compiler->size;
- if (compiler->last_const)
+ if (compiler->last_const != NULL)
compiler->last_const->next = const_;
else
compiler->consts = const_;
compiler->last_const = const_;
}
-static SLJIT_INLINE void set_put_label(struct sljit_put_label *put_label, struct sljit_compiler *compiler, sljit_uw offset)
-{
- put_label->next = NULL;
- put_label->label = NULL;
- put_label->addr = compiler->size - offset;
- put_label->flags = 0;
- if (compiler->last_put_label)
- compiler->last_put_label->next = put_label;
- else
- compiler->put_labels = put_label;
- compiler->last_put_label = put_label;
-}
-
#define ADDRESSING_DEPENDS_ON(exp, reg) \
(((exp) & SLJIT_MEM) && (((exp) & REG_MASK) == reg || OFFS_REG(exp) == reg))
@@ -802,11 +895,8 @@ static sljit_s32 function_check_arguments(sljit_s32 arg_types, sljit_s32 scratch
#define FUNCTION_CHECK_IS_REG(r) \
(((r) >= SLJIT_R0 && (r) < (SLJIT_R0 + compiler->scratches)) \
- || ((r) > (SLJIT_S0 - compiler->saveds) && (r) <= SLJIT_S0))
-
-#define FUNCTION_CHECK_IS_FREG(fr) \
- (((fr) >= SLJIT_FR0 && (fr) < (SLJIT_FR0 + compiler->fscratches)) \
- || ((fr) > (SLJIT_FS0 - compiler->fsaveds) && (fr) <= SLJIT_FS0))
+ || ((r) > (SLJIT_S0 - compiler->saveds) && (r) <= SLJIT_S0) \
+ || ((r) >= SLJIT_TMP_REGISTER_BASE && (r) < (SLJIT_TMP_REGISTER_BASE + SLJIT_NUMBER_OF_TEMPORARY_REGISTERS)))
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
#define CHECK_IF_VIRTUAL_REGISTER(p) ((p) <= SLJIT_S3 && (p) >= SLJIT_S8)
@@ -816,7 +906,7 @@ static sljit_s32 function_check_arguments(sljit_s32 arg_types, sljit_s32 scratch
static sljit_s32 function_check_src_mem(struct sljit_compiler *compiler, sljit_s32 p, sljit_sw i)
{
- if (compiler->scratches == -1 || compiler->saveds == -1)
+ if (compiler->scratches == -1)
return 0;
if (!(p & SLJIT_MEM))
@@ -853,7 +943,7 @@ static sljit_s32 function_check_src_mem(struct sljit_compiler *compiler, sljit_s
static sljit_s32 function_check_src(struct sljit_compiler *compiler, sljit_s32 p, sljit_sw i)
{
- if (compiler->scratches == -1 || compiler->saveds == -1)
+ if (compiler->scratches == -1)
return 0;
if (FUNCTION_CHECK_IS_REG(p))
@@ -870,7 +960,7 @@ static sljit_s32 function_check_src(struct sljit_compiler *compiler, sljit_s32 p
static sljit_s32 function_check_dst(struct sljit_compiler *compiler, sljit_s32 p, sljit_sw i)
{
- if (compiler->scratches == -1 || compiler->saveds == -1)
+ if (compiler->scratches == -1)
return 0;
if (FUNCTION_CHECK_IS_REG(p))
@@ -882,19 +972,59 @@ static sljit_s32 function_check_dst(struct sljit_compiler *compiler, sljit_s32 p
#define FUNCTION_CHECK_DST(p, i) \
CHECK_ARGUMENT(function_check_dst(compiler, p, i));
+#if (defined SLJIT_CONFIG_ARM_32 && SLJIT_CONFIG_ARM_32) \
+ || (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
+
+#define FUNCTION_CHECK_IS_FREG(fr, is_32) \
+ function_check_is_freg(compiler, (fr), (is_32))
+
+static sljit_s32 function_check_is_freg(struct sljit_compiler *compiler, sljit_s32 fr, sljit_s32 is_32);
+
+#define FUNCTION_FCHECK(p, i, is_32) \
+ CHECK_ARGUMENT(function_fcheck(compiler, (p), (i), (is_32)));
+
+static sljit_s32 function_fcheck(struct sljit_compiler *compiler, sljit_s32 p, sljit_sw i, sljit_s32 is_32)
+{
+ if (compiler->scratches == -1)
+ return 0;
+
+ if (FUNCTION_CHECK_IS_FREG(p, is_32))
+ return (i == 0);
+
+ return function_check_src_mem(compiler, p, i);
+}
+
+#else /* !SLJIT_CONFIG_ARM_32 && !SLJIT_CONFIG_MIPS_32 */
+#define FUNCTION_CHECK_IS_FREG(fr, is_32) \
+ function_check_is_freg(compiler, (fr))
+
+static sljit_s32 function_check_is_freg(struct sljit_compiler *compiler, sljit_s32 fr)
+{
+ if (compiler->scratches == -1)
+ return 0;
+
+ return (fr >= SLJIT_FR0 && fr < (SLJIT_FR0 + compiler->fscratches))
+ || (fr > (SLJIT_FS0 - compiler->fsaveds) && fr <= SLJIT_FS0)
+ || (fr >= SLJIT_TMP_FREGISTER_BASE && fr < (SLJIT_TMP_FREGISTER_BASE + SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS));
+}
+
+#define FUNCTION_FCHECK(p, i, is_32) \
+ CHECK_ARGUMENT(function_fcheck(compiler, (p), (i)));
+
static sljit_s32 function_fcheck(struct sljit_compiler *compiler, sljit_s32 p, sljit_sw i)
{
- if (compiler->scratches == -1 || compiler->saveds == -1)
+ if (compiler->scratches == -1)
return 0;
- if (FUNCTION_CHECK_IS_FREG(p))
+ if ((p >= SLJIT_FR0 && p < (SLJIT_FR0 + compiler->fscratches))
+ || (p > (SLJIT_FS0 - compiler->fsaveds) && p <= SLJIT_FS0)
+ || (p >= SLJIT_TMP_FREGISTER_BASE && p < (SLJIT_TMP_FREGISTER_BASE + SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS)))
return (i == 0);
return function_check_src_mem(compiler, p, i);
}
-#define FUNCTION_FCHECK(p, i) \
- CHECK_ARGUMENT(function_fcheck(compiler, p, i));
+#endif /* SLJIT_CONFIG_ARM_32 || SLJIT_CONFIG_MIPS_32 */
#endif /* SLJIT_ARGUMENT_CHECKS */
@@ -923,23 +1053,35 @@ static void sljit_verbose_reg(struct sljit_compiler *compiler, sljit_s32 r)
{
if (r < (SLJIT_R0 + compiler->scratches))
fprintf(compiler->verbose, "r%d", r - SLJIT_R0);
- else if (r != SLJIT_SP)
+ else if (r < SLJIT_SP)
fprintf(compiler->verbose, "s%d", SLJIT_NUMBER_OF_REGISTERS - r);
- else
+ else if (r == SLJIT_SP)
fprintf(compiler->verbose, "sp");
+ else
+ fprintf(compiler->verbose, "t%d", r - SLJIT_TMP_REGISTER_BASE);
}
static void sljit_verbose_freg(struct sljit_compiler *compiler, sljit_s32 r)
{
+#if (defined SLJIT_CONFIG_ARM_32 && SLJIT_CONFIG_ARM_32) \
+ || (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
+ if (r >= SLJIT_F64_SECOND(SLJIT_FR0)) {
+ fprintf(compiler->verbose, "^");
+ r -= SLJIT_F64_SECOND(0);
+ }
+#endif /* SLJIT_CONFIG_ARM_32 || SLJIT_CONFIG_MIPS_32 */
+
if (r < (SLJIT_FR0 + compiler->fscratches))
fprintf(compiler->verbose, "fr%d", r - SLJIT_FR0);
- else
+ else if (r < SLJIT_TMP_FREGISTER_BASE)
fprintf(compiler->verbose, "fs%d", SLJIT_NUMBER_OF_FLOAT_REGISTERS - r);
+ else
+ fprintf(compiler->verbose, "ft%d", r - SLJIT_TMP_FREGISTER_BASE);
}
static void sljit_verbose_param(struct sljit_compiler *compiler, sljit_s32 p, sljit_sw i)
{
- if ((p) & SLJIT_IMM)
+ if ((p) == SLJIT_IMM)
fprintf(compiler->verbose, "#%" SLJIT_PRINT_D "d", (i));
else if ((p) & SLJIT_MEM) {
if ((p) & REG_MASK) {
@@ -991,9 +1133,17 @@ static const char* op0_names[] = {
};
static const char* op1_names[] = {
+ "mov", "mov", "mov", "mov",
+ "mov", "mov", "mov", "mov",
+ "mov", "clz", "ctz", "rev",
+ "rev", "rev", "rev", "rev"
+};
+
+static const char* op1_types[] = {
"", ".u8", ".s8", ".u16",
".s16", ".u32", ".s32", "32",
- ".p", "not", "clz", "ctz"
+ ".p", "", "", "",
+ ".u16", ".s16", ".u32", ".s32"
};
static const char* op2_names[] = {
@@ -1003,22 +1153,40 @@ static const char* op2_names[] = {
"ashr", "mashr", "rotl", "rotr"
};
-static const char* op_src_names[] = {
+static const char* op2r_names[] = {
+ "muladd"
+};
+
+static const char* op_src_dst_names[] = {
"fast_return", "skip_frames_before_fast_return",
"prefetch_l1", "prefetch_l2",
"prefetch_l3", "prefetch_once",
+ "fast_enter", "get_return_address"
};
static const char* fop1_names[] = {
"mov", "conv", "conv", "conv",
- "conv", "conv", "cmp", "neg",
- "abs",
+ "conv", "conv", "conv", "conv",
+ "cmp", "neg", "abs",
+};
+
+static const char* fop1_conv_types[] = {
+ "sw", "s32", "sw", "s32",
+ "uw", "u32"
};
static const char* fop2_names[] = {
"add", "sub", "mul", "div"
};
+static const char* fop2r_names[] = {
+ "copysign"
+};
+
+static const char* simd_op2_names[] = {
+ "and", "or", "xor"
+};
+
static const char* jump_names[] = {
"equal", "not_equal",
"less", "greater_equal",
@@ -1026,7 +1194,8 @@ static const char* jump_names[] = {
"sig_less", "sig_greater_equal",
"sig_greater", "sig_less_equal",
"overflow", "not_overflow",
- "carry", "",
+ "carry", "not_carry",
+ "atomic_stored", "atomic_not_stored",
"f_equal", "f_not_equal",
"f_less", "f_greater_equal",
"f_greater", "f_less_equal",
@@ -1069,13 +1238,19 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_generate_code(struct sljit_com
jump = compiler->jumps;
while (jump) {
/* All jumps have target. */
- CHECK_ARGUMENT(jump->flags & (JUMP_LABEL | JUMP_ADDR));
+ CHECK_ARGUMENT((jump->flags & JUMP_ADDR) || jump->u.label != NULL);
jump = jump->next;
}
#endif
CHECK_RETURN_OK;
}
+#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
+#define SLJIT_ENTER_CPU_SPECIFIC_OPTIONS (SLJIT_ENTER_USE_VEX)
+#else /* !SLJIT_CONFIG_X86 */
+#define SLJIT_ENTER_CPU_SPECIFIC_OPTIONS (0)
+#endif /* !SLJIT_CONFIG_X86 */
+
static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_enter(struct sljit_compiler *compiler,
sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
@@ -1084,9 +1259,9 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_enter(struct sljit_compil
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
if (options & SLJIT_ENTER_REG_ARG) {
- CHECK_ARGUMENT(!(options & ~(0x3 | SLJIT_ENTER_REG_ARG)));
+ CHECK_ARGUMENT(!(options & ~(0x3 | SLJIT_ENTER_REG_ARG | SLJIT_ENTER_CPU_SPECIFIC_OPTIONS)));
} else {
- CHECK_ARGUMENT(options == 0);
+ CHECK_ARGUMENT((options & ~SLJIT_ENTER_CPU_SPECIFIC_OPTIONS) == 0);
}
CHECK_ARGUMENT(SLJIT_KEPT_SAVEDS_COUNT(options) <= 3 && SLJIT_KEPT_SAVEDS_COUNT(options) <= saveds);
CHECK_ARGUMENT(scratches >= 0 && scratches <= SLJIT_NUMBER_OF_REGISTERS);
@@ -1120,13 +1295,19 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_enter(struct sljit_compil
fprintf(compiler->verbose, "],");
if (options & SLJIT_ENTER_REG_ARG) {
- fprintf(compiler->verbose, " enter:reg_arg,");
-
if (SLJIT_KEPT_SAVEDS_COUNT(options) > 0)
- fprintf(compiler->verbose, " keep:%d,", SLJIT_KEPT_SAVEDS_COUNT(options));
+ fprintf(compiler->verbose, " opt:reg_arg(%d),", SLJIT_KEPT_SAVEDS_COUNT(options));
+ else
+ fprintf(compiler->verbose, " opt:reg_arg,");
}
- fprintf(compiler->verbose, "scratches:%d, saveds:%d, fscratches:%d, fsaveds:%d, local_size:%d\n",
+#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
+ if (options & SLJIT_ENTER_USE_VEX) {
+ fprintf(compiler->verbose, " opt:use_vex,");
+ }
+#endif /* !SLJIT_CONFIG_X86 */
+
+ fprintf(compiler->verbose, " scratches:%d, saveds:%d, fscratches:%d, fsaveds:%d, local_size:%d\n",
scratches, saveds, fscratches, fsaveds, local_size);
}
#endif
@@ -1141,9 +1322,9 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_set_context(struct sljit_compi
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
if (options & SLJIT_ENTER_REG_ARG) {
- CHECK_ARGUMENT(!(options & ~(0x3 | SLJIT_ENTER_REG_ARG)));
+ CHECK_ARGUMENT(!(options & ~(0x3 | SLJIT_ENTER_REG_ARG | SLJIT_ENTER_CPU_SPECIFIC_OPTIONS)));
} else {
- CHECK_ARGUMENT(options == 0);
+ CHECK_ARGUMENT((options & ~SLJIT_ENTER_CPU_SPECIFIC_OPTIONS) == 0);
}
CHECK_ARGUMENT(SLJIT_KEPT_SAVEDS_COUNT(options) <= 3 && SLJIT_KEPT_SAVEDS_COUNT(options) <= saveds);
CHECK_ARGUMENT(scratches >= 0 && scratches <= SLJIT_NUMBER_OF_REGISTERS);
@@ -1177,11 +1358,17 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_set_context(struct sljit_compi
fprintf(compiler->verbose, "],");
if (options & SLJIT_ENTER_REG_ARG) {
- fprintf(compiler->verbose, " enter:reg_arg,");
-
if (SLJIT_KEPT_SAVEDS_COUNT(options) > 0)
- fprintf(compiler->verbose, " keep:%d,", SLJIT_KEPT_SAVEDS_COUNT(options));
+ fprintf(compiler->verbose, " opt:reg_arg(%d),", SLJIT_KEPT_SAVEDS_COUNT(options));
+ else
+ fprintf(compiler->verbose, " opt:reg_arg,");
+ }
+
+#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
+ if (options & SLJIT_ENTER_USE_VEX) {
+ fprintf(compiler->verbose, " opt:use_vex,");
}
+#endif /* !SLJIT_CONFIG_X86 */
fprintf(compiler->verbose, " scratches:%d, saveds:%d, fscratches:%d, fsaveds:%d, local_size:%d\n",
scratches, saveds, fscratches, fsaveds, local_size);
@@ -1190,6 +1377,8 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_set_context(struct sljit_compi
CHECK_RETURN_OK;
}
+#undef SLJIT_ENTER_CPU_SPECIFIC_OPTIONS
+
static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_return_void(struct sljit_compiler *compiler)
{
if (SLJIT_UNLIKELY(compiler->skip_checks)) {
@@ -1198,7 +1387,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_return_void(struct sljit_
}
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
- CHECK_ARGUMENT(compiler->last_return == SLJIT_ARG_TYPE_VOID);
+ CHECK_ARGUMENT(compiler->last_return == SLJIT_ARG_TYPE_RET_VOID);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
@@ -1241,7 +1430,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_return(struct sljit_compi
if (GET_OPCODE(op) < SLJIT_MOV_F64) {
FUNCTION_CHECK_SRC(src, srcw);
} else {
- FUNCTION_FCHECK(src, srcw);
+ FUNCTION_FCHECK(src, srcw, op & SLJIT_32);
}
compiler->last_flags = 0;
#endif
@@ -1249,7 +1438,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_return(struct sljit_compi
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
if (GET_OPCODE(op) < SLJIT_MOV_F64) {
fprintf(compiler->verbose, " return%s%s ", !(op & SLJIT_32) ? "" : "32",
- op1_names[GET_OPCODE(op) - SLJIT_OP1_BASE]);
+ op1_types[GET_OPCODE(op) - SLJIT_OP1_BASE]);
sljit_verbose_param(compiler, src, srcw);
} else {
fprintf(compiler->verbose, " return%s ", !(op & SLJIT_32) ? ".f64" : ".f32");
@@ -1277,22 +1466,6 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_return_to(struct sljit_co
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
-{
-#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
- FUNCTION_CHECK_DST(dst, dstw);
- compiler->last_flags = 0;
-#endif
-#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
- if (SLJIT_UNLIKELY(!!compiler->verbose)) {
- fprintf(compiler->verbose, " fast_enter ");
- sljit_verbose_param(compiler, dst, dstw);
- fprintf(compiler->verbose, "\n");
- }
-#endif
- CHECK_RETURN_OK;
-}
-
static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
{
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
@@ -1326,16 +1499,16 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op1(struct sljit_compiler
}
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
- CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_MOV && GET_OPCODE(op) <= SLJIT_CTZ);
+ CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_MOV && GET_OPCODE(op) <= SLJIT_REV_S32);
switch (GET_OPCODE(op)) {
- case SLJIT_NOT:
- /* Only SLJIT_32 and SLJIT_SET_Z are allowed. */
- CHECK_ARGUMENT(!(op & VARIABLE_FLAG_MASK));
- break;
case SLJIT_MOV:
case SLJIT_MOV_U32:
+ case SLJIT_MOV_S32:
+ case SLJIT_MOV32:
case SLJIT_MOV_P:
+ case SLJIT_REV_U32:
+ case SLJIT_REV_S32:
/* Nothing allowed */
CHECK_ARGUMENT(!(op & (SLJIT_32 | SLJIT_SET_Z | VARIABLE_FLAG_MASK)));
break;
@@ -1347,25 +1520,11 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op1(struct sljit_compiler
FUNCTION_CHECK_DST(dst, dstw);
FUNCTION_CHECK_SRC(src, srcw);
-
- if (GET_OPCODE(op) >= SLJIT_NOT) {
- CHECK_ARGUMENT(src != SLJIT_IMM);
- compiler->last_flags = GET_FLAG_TYPE(op) | (op & (SLJIT_32 | SLJIT_SET_Z));
- }
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
- if (GET_OPCODE(op) <= SLJIT_MOV_P)
- {
- fprintf(compiler->verbose, " mov%s%s ", !(op & SLJIT_32) ? "" : "32",
- op1_names[GET_OPCODE(op) - SLJIT_OP1_BASE]);
- }
- else
- {
- fprintf(compiler->verbose, " %s%s%s%s%s ", op1_names[GET_OPCODE(op) - SLJIT_OP1_BASE], !(op & SLJIT_32) ? "" : "32",
- !(op & SLJIT_SET_Z) ? "" : ".z", !(op & VARIABLE_FLAG_MASK) ? "" : ".",
- !(op & VARIABLE_FLAG_MASK) ? "" : jump_names[GET_FLAG_TYPE(op)]);
- }
+ fprintf(compiler->verbose, " %s%s%s ", op1_names[GET_OPCODE(op) - SLJIT_OP1_BASE],
+ !(op & SLJIT_32) ? "" : "32", op1_types[GET_OPCODE(op) - SLJIT_OP1_BASE]);
sljit_verbose_param(compiler, dst, dstw);
fprintf(compiler->verbose, ", ");
@@ -1376,6 +1535,94 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op1(struct sljit_compiler
CHECK_RETURN_OK;
}
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_atomic_load(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 mem_reg)
+{
+ if (SLJIT_UNLIKELY(compiler->skip_checks)) {
+ compiler->skip_checks = 0;
+ CHECK_RETURN_OK;
+ }
+
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_ATOMIC));
+ CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_MOV && GET_OPCODE(op) <= SLJIT_MOV_P);
+ CHECK_ARGUMENT(GET_OPCODE(op) != SLJIT_MOV_S8 && GET_OPCODE(op) != SLJIT_MOV_S16 && GET_OPCODE(op) != SLJIT_MOV_S32);
+
+ /* All arguments must be valid registers. */
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(dst_reg));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(mem_reg) && !CHECK_IF_VIRTUAL_REGISTER(mem_reg));
+
+ if (op == SLJIT_MOV32_U8 || op == SLJIT_MOV32_U16) {
+ /* Only SLJIT_32 is allowed. */
+ CHECK_ARGUMENT(!(op & (VARIABLE_FLAG_MASK | SLJIT_SET_Z)));
+ } else {
+ /* Nothing allowed. */
+ CHECK_ARGUMENT(!(op & (SLJIT_32 | SLJIT_SET_Z | VARIABLE_FLAG_MASK)));
+ }
+
+ compiler->last_flags = 0;
+#endif /* SLJIT_ARGUMENT_CHECKS */
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ fprintf(compiler->verbose, " atomic_load%s%s ", !(op & SLJIT_32) ? "" : "32",
+ op1_types[GET_OPCODE(op) - SLJIT_OP1_BASE]);
+ sljit_verbose_reg(compiler, dst_reg);
+ fprintf(compiler->verbose, ", [");
+ sljit_verbose_reg(compiler, mem_reg);
+ fprintf(compiler->verbose, "]\n");
+ }
+#endif /* SLJIT_VERBOSE */
+ CHECK_RETURN_OK;
+}
+
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_atomic_store(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src_reg,
+ sljit_s32 mem_reg,
+ sljit_s32 temp_reg)
+{
+ if (SLJIT_UNLIKELY(compiler->skip_checks)) {
+ compiler->skip_checks = 0;
+ CHECK_RETURN_OK;
+ }
+
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_ATOMIC));
+ CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_MOV && GET_OPCODE(op) <= SLJIT_MOV_P);
+ CHECK_ARGUMENT(GET_OPCODE(op) != SLJIT_MOV_S8 && GET_OPCODE(op) != SLJIT_MOV_S16 && GET_OPCODE(op) != SLJIT_MOV_S32);
+
+ /* All arguments must be valid registers. */
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(src_reg));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(mem_reg) && !CHECK_IF_VIRTUAL_REGISTER(mem_reg));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(temp_reg) && src_reg != temp_reg);
+
+ CHECK_ARGUMENT(!(op & VARIABLE_FLAG_MASK) || GET_FLAG_TYPE(op) == SLJIT_ATOMIC_STORED);
+
+ if (GET_OPCODE(op) == SLJIT_MOV_U8 || GET_OPCODE(op) == SLJIT_MOV_U16) {
+ /* Only SLJIT_32, SLJIT_ATOMIC_STORED are allowed. */
+ CHECK_ARGUMENT(!(op & SLJIT_SET_Z));
+ } else {
+ /* Only SLJIT_ATOMIC_STORED is allowed. */
+ CHECK_ARGUMENT(!(op & (SLJIT_32 | SLJIT_SET_Z)));
+ }
+
+ compiler->last_flags = GET_FLAG_TYPE(op) | (op & SLJIT_32);
+#endif /* SLJIT_ARGUMENT_CHECKS */
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ fprintf(compiler->verbose, " atomic_store%s%s%s ", !(op & SLJIT_32) ? "" : "32",
+ op1_types[GET_OPCODE(op) - SLJIT_OP1_BASE], !(op & VARIABLE_FLAG_MASK) ? "" : ".stored");
+ sljit_verbose_reg(compiler, src_reg);
+ fprintf(compiler->verbose, ", [");
+ sljit_verbose_reg(compiler, mem_reg);
+ fprintf(compiler->verbose, "], ");
+ sljit_verbose_reg(compiler, temp_reg);
+ fprintf(compiler->verbose, "\n");
+ }
+#endif /* SLJIT_VERBOSE */
+ CHECK_RETURN_OK;
+}
+
static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 unset,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src1, sljit_sw src1w,
@@ -1460,29 +1707,61 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op2(struct sljit_compiler
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src_dst,
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT((op | SLJIT_32) == SLJIT_MULADD32);
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(dst_reg));
+ FUNCTION_CHECK_SRC(src1, src1w);
+ FUNCTION_CHECK_SRC(src2, src2w);
+ compiler->last_flags = 0;
+#endif
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ fprintf(compiler->verbose, " %s%s ", op2r_names[GET_OPCODE(op) - SLJIT_OP2R_BASE], !(op & SLJIT_32) ? "" : "32");
+
+ sljit_verbose_reg(compiler, dst_reg);
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_param(compiler, src1, src1w);
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_param(compiler, src2, src2w);
+ fprintf(compiler->verbose, "\n");
+ }
+#endif
+ CHECK_RETURN_OK;
+}
+
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 src1_reg,
+ sljit_s32 src2_reg,
+ sljit_s32 src3, sljit_sw src3w)
+{
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
CHECK_ARGUMENT(GET_OPCODE(op) == SLJIT_SHL || GET_OPCODE(op) == SLJIT_LSHR
|| GET_OPCODE(op) == SLJIT_MSHL || GET_OPCODE(op) == SLJIT_MLSHR);
CHECK_ARGUMENT((op & ~(0xff | SLJIT_32 | SLJIT_SHIFT_INTO_NON_ZERO)) == 0);
- CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(src_dst));
- FUNCTION_CHECK_SRC(src1, src1w);
- FUNCTION_CHECK_SRC(src2, src2w);
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(dst_reg));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(src1_reg));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(src2_reg));
+ FUNCTION_CHECK_SRC(src3, src3w);
+ CHECK_ARGUMENT(dst_reg != src2_reg);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
fprintf(compiler->verbose, " %s%s.into%s ", op2_names[GET_OPCODE(op) - SLJIT_OP2_BASE], !(op & SLJIT_32) ? "" : "32",
(op & SLJIT_SHIFT_INTO_NON_ZERO) ? ".nz" : "");
- sljit_verbose_reg(compiler, src_dst);
+ sljit_verbose_reg(compiler, dst_reg);
fprintf(compiler->verbose, ", ");
- sljit_verbose_param(compiler, src1, src1w);
+ sljit_verbose_reg(compiler, src1_reg);
fprintf(compiler->verbose, ", ");
- sljit_verbose_param(compiler, src2, src2w);
+ sljit_verbose_reg(compiler, src2_reg);
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_param(compiler, src3, src3w);
fprintf(compiler->verbose, "\n");
}
#endif
@@ -1496,19 +1775,16 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op_src(struct sljit_compi
CHECK_ARGUMENT(op >= SLJIT_FAST_RETURN && op <= SLJIT_PREFETCH_ONCE);
FUNCTION_CHECK_SRC(src, srcw);
- if (op == SLJIT_FAST_RETURN || op == SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN)
- {
+ if (op == SLJIT_FAST_RETURN || op == SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN) {
CHECK_ARGUMENT(src != SLJIT_IMM);
compiler->last_flags = 0;
- }
- else if (op >= SLJIT_PREFETCH_L1 && op <= SLJIT_PREFETCH_ONCE)
- {
+ } else if (op >= SLJIT_PREFETCH_L1 && op <= SLJIT_PREFETCH_ONCE) {
CHECK_ARGUMENT(src & SLJIT_MEM);
}
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
- fprintf(compiler->verbose, " %s ", op_src_names[op - SLJIT_OP_SRC_BASE]);
+ fprintf(compiler->verbose, " %s ", op_src_dst_names[op - SLJIT_OP_SRC_DST_BASE]);
sljit_verbose_param(compiler, src, srcw);
fprintf(compiler->verbose, "\n");
}
@@ -1516,20 +1792,39 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op_src(struct sljit_compi
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_get_register_index(sljit_s32 reg)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op_dst(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw)
{
- SLJIT_UNUSED_ARG(reg);
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
- CHECK_ARGUMENT(reg > 0 && reg <= SLJIT_NUMBER_OF_REGISTERS);
+ CHECK_ARGUMENT(op >= SLJIT_FAST_ENTER && op <= SLJIT_GET_RETURN_ADDRESS);
+ FUNCTION_CHECK_DST(dst, dstw);
+
+ if (op == SLJIT_FAST_ENTER)
+ compiler->last_flags = 0;
+#endif
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ fprintf(compiler->verbose, " %s ", op_src_dst_names[op - SLJIT_OP_SRC_DST_BASE]);
+ sljit_verbose_param(compiler, dst, dstw);
+ fprintf(compiler->verbose, "\n");
+ }
#endif
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_get_float_register_index(sljit_s32 reg)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_get_register_index(sljit_s32 type, sljit_s32 reg)
{
+ SLJIT_UNUSED_ARG(type);
SLJIT_UNUSED_ARG(reg);
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
- CHECK_ARGUMENT(reg > 0 && reg <= SLJIT_NUMBER_OF_FLOAT_REGISTERS);
+ if (type == SLJIT_GP_REGISTER) {
+ CHECK_ARGUMENT((reg > 0 && reg <= SLJIT_NUMBER_OF_REGISTERS)
+ || (reg >= SLJIT_TMP_REGISTER_BASE && reg < (SLJIT_TMP_REGISTER_BASE + SLJIT_NUMBER_OF_TEMPORARY_REGISTERS)));
+ } else {
+ CHECK_ARGUMENT(type == SLJIT_FLOAT_REGISTER || ((type >> 12) == 0 || ((type >> 12) >= 3 && (type >> 12) <= 6)));
+ CHECK_ARGUMENT((reg > 0 && reg <= SLJIT_NUMBER_OF_FLOAT_REGISTERS)
+ || (reg >= SLJIT_TMP_FREGISTER_BASE && reg < (SLJIT_TMP_FREGISTER_BASE + SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS)));
+ }
#endif
CHECK_RETURN_OK;
}
@@ -1583,8 +1878,8 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1(struct sljit_compile
CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_FPU));
CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_MOV_F64 && GET_OPCODE(op) <= SLJIT_ABS_F64);
CHECK_ARGUMENT(!(op & (SLJIT_SET_Z | VARIABLE_FLAG_MASK)));
- FUNCTION_FCHECK(src, srcw);
- FUNCTION_FCHECK(dst, dstw);
+ FUNCTION_FCHECK(src, srcw, op & SLJIT_32);
+ FUNCTION_FCHECK(dst, dstw, op & SLJIT_32);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
@@ -1623,8 +1918,8 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_cmp(struct sljit_com
CHECK_ARGUMENT(!(op & SLJIT_SET_Z));
CHECK_ARGUMENT((op & VARIABLE_FLAG_MASK)
|| (GET_FLAG_TYPE(op) >= SLJIT_F_EQUAL && GET_FLAG_TYPE(op) <= SLJIT_ORDERED_LESS_EQUAL));
- FUNCTION_FCHECK(src1, src1w);
- FUNCTION_FCHECK(src2, src2w);
+ FUNCTION_FCHECK(src1, src1w, op & SLJIT_32);
+ FUNCTION_FCHECK(src2, src2w, op & SLJIT_32);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
@@ -1653,15 +1948,14 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_conv_sw_from_f64(str
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_FPU));
- CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_CONV_SW_FROM_F64 && GET_OPCODE(op) <= SLJIT_CONV_S32_FROM_F64);
CHECK_ARGUMENT(!(op & (SLJIT_SET_Z | VARIABLE_FLAG_MASK)));
- FUNCTION_FCHECK(src, srcw);
+ FUNCTION_FCHECK(src, srcw, op & SLJIT_32);
FUNCTION_CHECK_DST(dst, dstw);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
fprintf(compiler->verbose, " %s%s.from%s ", fop1_names[GET_OPCODE(op) - SLJIT_FOP1_BASE],
- (GET_OPCODE(op) == SLJIT_CONV_S32_FROM_F64) ? ".s32" : ".sw",
+ fop1_conv_types[GET_OPCODE(op) - SLJIT_CONV_SW_FROM_F64],
(op & SLJIT_32) ? ".f32" : ".f64");
sljit_verbose_param(compiler, dst, dstw);
fprintf(compiler->verbose, ", ");
@@ -1672,7 +1966,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_conv_sw_from_f64(str
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_conv_f64_from_w(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw)
{
@@ -1683,16 +1977,15 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop1_conv_f64_from_sw(str
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_FPU));
- CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_CONV_F64_FROM_SW && GET_OPCODE(op) <= SLJIT_CONV_F64_FROM_S32);
CHECK_ARGUMENT(!(op & (SLJIT_SET_Z | VARIABLE_FLAG_MASK)));
FUNCTION_CHECK_SRC(src, srcw);
- FUNCTION_FCHECK(dst, dstw);
+ FUNCTION_FCHECK(dst, dstw, op & SLJIT_32);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
- fprintf(compiler->verbose, " %s%s.from%s ", fop1_names[GET_OPCODE(op) - SLJIT_FOP1_BASE],
+ fprintf(compiler->verbose, " %s%s.from.%s ", fop1_names[GET_OPCODE(op) - SLJIT_FOP1_BASE],
(op & SLJIT_32) ? ".f32" : ".f64",
- (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32) ? ".s32" : ".sw");
+ fop1_conv_types[GET_OPCODE(op) - SLJIT_CONV_SW_FROM_F64]);
sljit_verbose_fparam(compiler, dst, dstw);
fprintf(compiler->verbose, ", ");
sljit_verbose_param(compiler, src, srcw);
@@ -1707,13 +2000,18 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop2(struct sljit_compile
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
+ if (SLJIT_UNLIKELY(compiler->skip_checks)) {
+ compiler->skip_checks = 0;
+ CHECK_RETURN_OK;
+ }
+
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_FPU));
CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_ADD_F64 && GET_OPCODE(op) <= SLJIT_DIV_F64);
CHECK_ARGUMENT(!(op & (SLJIT_SET_Z | VARIABLE_FLAG_MASK)));
- FUNCTION_FCHECK(src1, src1w);
- FUNCTION_FCHECK(src2, src2w);
- FUNCTION_FCHECK(dst, dstw);
+ FUNCTION_FCHECK(src1, src1w, op & SLJIT_32);
+ FUNCTION_FCHECK(src2, src2w, op & SLJIT_32);
+ FUNCTION_FCHECK(dst, dstw, op & SLJIT_32);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
@@ -1729,6 +2027,138 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop2(struct sljit_compile
CHECK_RETURN_OK;
}
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fop2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
+{
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_FPU));
+ CHECK_ARGUMENT(GET_OPCODE(op) == SLJIT_COPYSIGN_F64);
+ FUNCTION_FCHECK(src1, src1w, op & SLJIT_32);
+ FUNCTION_FCHECK(src2, src2w, op & SLJIT_32);
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(dst_freg, op & SLJIT_32));
+#endif
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ fprintf(compiler->verbose, " %s%s ", fop2r_names[GET_OPCODE(op) - SLJIT_FOP2R_BASE], (op & SLJIT_32) ? ".f32" : ".f64");
+ sljit_verbose_freg(compiler, dst_freg);
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_fparam(compiler, src1, src1w);
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_fparam(compiler, src2, src2w);
+ fprintf(compiler->verbose, "\n");
+ }
+#endif
+ CHECK_RETURN_OK;
+}
+
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fset32(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f32 value)
+{
+ SLJIT_UNUSED_ARG(value);
+
+ if (SLJIT_UNLIKELY(compiler->skip_checks)) {
+ compiler->skip_checks = 0;
+ CHECK_RETURN_OK;
+ }
+
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_FPU));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(freg, 1));
+#endif
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ fprintf(compiler->verbose, " fset32 ");
+ sljit_verbose_freg(compiler, freg);
+ fprintf(compiler->verbose, ", %f\n", value);
+ }
+#endif
+ CHECK_RETURN_OK;
+}
+
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fset64(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f64 value)
+{
+ SLJIT_UNUSED_ARG(value);
+
+ if (SLJIT_UNLIKELY(compiler->skip_checks)) {
+ compiler->skip_checks = 0;
+ CHECK_RETURN_OK;
+ }
+
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_FPU));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(freg, 0));
+#endif
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ fprintf(compiler->verbose, " fset64 ");
+ sljit_verbose_freg(compiler, freg);
+ fprintf(compiler->verbose, ", %f\n", value);
+ }
+#endif
+ CHECK_RETURN_OK;
+}
+
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 freg, sljit_s32 reg)
+{
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_FPU));
+ CHECK_ARGUMENT(GET_OPCODE(op) >= SLJIT_COPY_TO_F64 && GET_OPCODE(op) <= SLJIT_COPY_FROM_F64);
+ CHECK_ARGUMENT(!(op & (SLJIT_SET_Z | VARIABLE_FLAG_MASK)));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(freg, op & SLJIT_32));
+
+#if (defined SLJIT_64BIT_ARCHITECTURE && SLJIT_64BIT_ARCHITECTURE)
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(reg));
+#else /* !SLJIT_64BIT_ARCHITECTURE */
+ switch (op) {
+ case SLJIT_COPY32_TO_F32:
+ case SLJIT_COPY32_FROM_F32:
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(reg));
+ break;
+ case SLJIT_COPY_TO_F64:
+ case SLJIT_COPY_FROM_F64:
+ if (reg & REG_PAIR_MASK) {
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(REG_PAIR_FIRST(reg)));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(REG_PAIR_SECOND(reg)));
+
+ if (op == SLJIT_COPY_TO_F64)
+ break;
+
+ CHECK_ARGUMENT(REG_PAIR_FIRST(reg) != REG_PAIR_SECOND(reg));
+ break;
+ }
+
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(reg));
+ break;
+ }
+#endif /* SLJIT_64BIT_ARCHITECTURE */
+#endif
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ fprintf(compiler->verbose, " copy%s_%s_f%s ", (op & SLJIT_32) ? "32" : "",
+ GET_OPCODE(op) == SLJIT_COPY_TO_F64 ? "to" : "from", (op & SLJIT_32) ? "32" : "64");
+
+ sljit_verbose_freg(compiler, freg);
+
+ if (reg & REG_PAIR_MASK) {
+ fprintf(compiler->verbose, ", {");
+ sljit_verbose_reg(compiler, REG_PAIR_FIRST(reg));
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_reg(compiler, REG_PAIR_SECOND(reg));
+ fprintf(compiler->verbose, "}\n");
+ } else {
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_reg(compiler, reg);
+ fprintf(compiler->verbose, "\n");
+ }
+ }
+#endif
+ CHECK_RETURN_OK;
+}
+
static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_label(struct sljit_compiler *compiler)
{
SLJIT_UNUSED_ARG(compiler);
@@ -1753,7 +2183,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_label(struct sljit_compil
#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86) \
|| (defined SLJIT_CONFIG_ARM && SLJIT_CONFIG_ARM)
#define CHECK_UNORDERED(type, last_flags) \
- ((((type) & 0xff) == SLJIT_UNORDERED || ((type) & 0xff) == SLJIT_ORDERED) && \
+ ((((type) & 0xfe) == SLJIT_ORDERED) && \
((last_flags) & 0xff) >= SLJIT_UNORDERED && ((last_flags) & 0xff) <= SLJIT_ORDERED_LESS_EQUAL)
#else
#define CHECK_UNORDERED(type, last_flags) 0
@@ -1775,11 +2205,10 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_jump(struct sljit_compile
if ((type & 0xff) <= SLJIT_NOT_ZERO)
CHECK_ARGUMENT(compiler->last_flags & SLJIT_SET_Z);
else if ((compiler->last_flags & 0xff) == SLJIT_CARRY) {
- CHECK_ARGUMENT((type & 0xff) == SLJIT_CARRY || (type & 0xff) == SLJIT_NOT_CARRY);
+ CHECK_ARGUMENT((type & 0xfe) == SLJIT_CARRY);
compiler->last_flags = 0;
} else
- CHECK_ARGUMENT((type & 0xff) == (compiler->last_flags & 0xff)
- || ((type & 0xff) == SLJIT_NOT_OVERFLOW && (compiler->last_flags & 0xff) == SLJIT_OVERFLOW)
+ CHECK_ARGUMENT((type & 0xfe) == (compiler->last_flags & 0xff)
|| CHECK_UNORDERED(type, compiler->last_flags));
}
#endif
@@ -1863,10 +2292,9 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fcmp(struct sljit_compile
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_FPU));
CHECK_ARGUMENT(!(type & ~(0xff | SLJIT_REWRITABLE_JUMP | SLJIT_32)));
- CHECK_ARGUMENT((type & 0xff) >= SLJIT_F_EQUAL && (type & 0xff) <= SLJIT_ORDERED_LESS_EQUAL
- && ((type & 0xff) <= SLJIT_ORDERED || sljit_cmp_info(type & 0xff)));
- FUNCTION_FCHECK(src1, src1w);
- FUNCTION_FCHECK(src2, src2w);
+ CHECK_ARGUMENT((type & 0xff) >= SLJIT_F_EQUAL && (type & 0xff) <= SLJIT_ORDERED_LESS_EQUAL);
+ FUNCTION_FCHECK(src1, src1w, type & SLJIT_32);
+ FUNCTION_FCHECK(src2, src2w, type & SLJIT_32);
compiler->last_flags = 0;
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
@@ -1961,9 +2389,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op_flags(struct sljit_com
if (type <= SLJIT_NOT_ZERO)
CHECK_ARGUMENT(compiler->last_flags & SLJIT_SET_Z);
else
- CHECK_ARGUMENT(type == (compiler->last_flags & 0xff)
- || (type == SLJIT_NOT_CARRY && (compiler->last_flags & 0xff) == SLJIT_CARRY)
- || (type == SLJIT_NOT_OVERFLOW && (compiler->last_flags & 0xff) == SLJIT_OVERFLOW)
+ CHECK_ARGUMENT((type & 0xfe) == (compiler->last_flags & 0xff)
|| CHECK_UNORDERED(type, compiler->last_flags));
FUNCTION_CHECK_DST(dst, dstw);
@@ -1975,7 +2401,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op_flags(struct sljit_com
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
fprintf(compiler->verbose, " flags.%s%s%s ",
GET_OPCODE(op) < SLJIT_OP2_BASE ? "mov" : op2_names[GET_OPCODE(op) - SLJIT_OP2_BASE],
- GET_OPCODE(op) < SLJIT_OP2_BASE ? op1_names[GET_OPCODE(op) - SLJIT_OP1_BASE] : ((op & SLJIT_32) ? "32" : ""),
+ GET_OPCODE(op) < SLJIT_OP2_BASE ? op1_types[GET_OPCODE(op) - SLJIT_OP1_BASE] : ((op & SLJIT_32) ? "32" : ""),
!(op & SLJIT_SET_Z) ? "" : ".z");
sljit_verbose_param(compiler, dst, dstw);
fprintf(compiler->verbose, ", %s\n", jump_names[type]);
@@ -1984,9 +2410,10 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_op_flags(struct sljit_com
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_select(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 dst_reg,
- sljit_s32 src, sljit_sw srcw)
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_reg)
{
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
sljit_s32 cond = type & ~SLJIT_32;
@@ -1995,27 +2422,68 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_cmov(struct sljit_compile
CHECK_ARGUMENT(compiler->scratches != -1 && compiler->saveds != -1);
CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(dst_reg));
- if (src != SLJIT_IMM) {
- CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(src));
- CHECK_ARGUMENT(srcw == 0);
- }
+ FUNCTION_CHECK_SRC(src1, src1w);
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_REG(src2_reg));
if (cond <= SLJIT_NOT_ZERO)
CHECK_ARGUMENT(compiler->last_flags & SLJIT_SET_Z);
- else
- CHECK_ARGUMENT(cond == (compiler->last_flags & 0xff)
- || (cond == SLJIT_NOT_CARRY && (compiler->last_flags & 0xff) == SLJIT_CARRY)
- || (cond == SLJIT_NOT_OVERFLOW && (compiler->last_flags & 0xff) == SLJIT_OVERFLOW)
+ else if ((compiler->last_flags & 0xff) == SLJIT_CARRY) {
+ CHECK_ARGUMENT((type & 0xfe) == SLJIT_CARRY);
+ compiler->last_flags = 0;
+ } else
+ CHECK_ARGUMENT((cond & 0xfe) == (compiler->last_flags & 0xff)
|| CHECK_UNORDERED(cond, compiler->last_flags));
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
- fprintf(compiler->verbose, " cmov%s %s, ",
+ fprintf(compiler->verbose, " select%s %s, ",
!(type & SLJIT_32) ? "" : "32",
jump_names[type & ~SLJIT_32]);
sljit_verbose_reg(compiler, dst_reg);
fprintf(compiler->verbose, ", ");
- sljit_verbose_param(compiler, src, srcw);
+ sljit_verbose_param(compiler, src1, src1w);
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_reg(compiler, src2_reg);
+ fprintf(compiler->verbose, "\n");
+ }
+#endif
+ CHECK_RETURN_OK;
+}
+
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fselect(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_freg)
+{
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ sljit_s32 cond = type & ~SLJIT_32;
+
+ CHECK_ARGUMENT(cond >= SLJIT_EQUAL && cond <= SLJIT_ORDERED_LESS_EQUAL);
+
+ CHECK_ARGUMENT(compiler->fscratches != -1 && compiler->fsaveds != -1);
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(dst_freg, type & SLJIT_32));
+ FUNCTION_FCHECK(src1, src1w, type & SLJIT_32);
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(src2_freg, type & SLJIT_32));
+
+ if (cond <= SLJIT_NOT_ZERO)
+ CHECK_ARGUMENT(compiler->last_flags & SLJIT_SET_Z);
+ else if ((compiler->last_flags & 0xff) == SLJIT_CARRY) {
+ CHECK_ARGUMENT((type & 0xfe) == SLJIT_CARRY);
+ compiler->last_flags = 0;
+ } else
+ CHECK_ARGUMENT((cond & 0xfe) == (compiler->last_flags & 0xff)
+ || CHECK_UNORDERED(cond, compiler->last_flags));
+#endif
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ fprintf(compiler->verbose, " fselect%s %s, ",
+ !(type & SLJIT_32) ? "" : "32",
+ jump_names[type & ~SLJIT_32]);
+ sljit_verbose_freg(compiler, dst_freg);
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_fparam(compiler, src1, src1w);
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_freg(compiler, src2_freg);
fprintf(compiler->verbose, "\n");
}
#endif
@@ -2026,33 +2494,35 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_mem(struct sljit_compiler
sljit_s32 reg,
sljit_s32 mem, sljit_sw memw)
{
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ sljit_s32 allowed_flags;
+#endif /* SLJIT_ARGUMENT_CHECKS */
+
if (SLJIT_UNLIKELY(compiler->skip_checks)) {
compiler->skip_checks = 0;
CHECK_RETURN_OK;
}
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
- sljit_s32 allowed_flags;
-
if (type & SLJIT_MEM_UNALIGNED) {
- CHECK_ARGUMENT(!(type & (SLJIT_MEM_UNALIGNED_16 | SLJIT_MEM_UNALIGNED_32)));
- } else if (type & SLJIT_MEM_UNALIGNED_16) {
- CHECK_ARGUMENT(!(type & SLJIT_MEM_UNALIGNED_32));
+ CHECK_ARGUMENT(!(type & (SLJIT_MEM_ALIGNED_16 | SLJIT_MEM_ALIGNED_32)));
+ } else if (type & SLJIT_MEM_ALIGNED_16) {
+ CHECK_ARGUMENT(!(type & SLJIT_MEM_ALIGNED_32));
} else {
- CHECK_ARGUMENT((reg & REG_PAIR_MASK) || (type & SLJIT_MEM_UNALIGNED_32));
+ CHECK_ARGUMENT((reg & REG_PAIR_MASK) || (type & SLJIT_MEM_ALIGNED_32));
}
allowed_flags = SLJIT_MEM_UNALIGNED;
switch (type & 0xff) {
+ case SLJIT_MOV_P:
+ case SLJIT_MOV:
+ allowed_flags |= SLJIT_MEM_ALIGNED_32;
+ /* fallthrough */
case SLJIT_MOV_U32:
case SLJIT_MOV_S32:
case SLJIT_MOV32:
- allowed_flags = SLJIT_MEM_UNALIGNED | SLJIT_MEM_UNALIGNED_16;
- break;
- case SLJIT_MOV:
- case SLJIT_MOV_P:
- allowed_flags = SLJIT_MEM_UNALIGNED | SLJIT_MEM_UNALIGNED_16 | SLJIT_MEM_UNALIGNED_32;
+ allowed_flags |= SLJIT_MEM_ALIGNED_16;
break;
}
@@ -2079,15 +2549,14 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_mem(struct sljit_compiler
else
fprintf(compiler->verbose, " %s%s%s",
(type & SLJIT_MEM_STORE) ? "store" : "load",
- !(type & SLJIT_32) ? "" : "32",
- op1_names[(type & 0xff) - SLJIT_OP1_BASE]);
+ !(type & SLJIT_32) ? "" : "32", op1_types[(type & 0xff) - SLJIT_OP1_BASE]);
if (type & SLJIT_MEM_UNALIGNED)
- printf(".un");
- else if (type & SLJIT_MEM_UNALIGNED_16)
- printf(".un16");
- else if (type & SLJIT_MEM_UNALIGNED_32)
- printf(".un32");
+ printf(".unal");
+ else if (type & SLJIT_MEM_ALIGNED_16)
+ printf(".al16");
+ else if (type & SLJIT_MEM_ALIGNED_32)
+ printf(".al32");
if (reg & REG_PAIR_MASK) {
fprintf(compiler->verbose, " {");
@@ -2140,7 +2609,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_mem_update(struct sljit_c
fprintf(compiler->verbose, " %s%s%s.%s ",
(type & SLJIT_MEM_STORE) ? "store" : "load",
!(type & SLJIT_32) ? "" : "32",
- op1_names[(type & 0xff) - SLJIT_OP1_BASE],
+ op1_types[(type & 0xff) - SLJIT_OP1_BASE],
(type & SLJIT_MEM_POST) ? "post" : "pre");
sljit_verbose_reg(compiler, reg);
@@ -2157,19 +2626,20 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fmem(struct sljit_compile
sljit_s32 mem, sljit_sw memw)
{
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_FPU));
CHECK_ARGUMENT((type & 0xff) == SLJIT_MOV_F64);
if (type & SLJIT_MEM_UNALIGNED) {
- CHECK_ARGUMENT(!(type & (SLJIT_MEM_UNALIGNED_16 | SLJIT_MEM_UNALIGNED_32)));
- } else if (type & SLJIT_MEM_UNALIGNED_16) {
- CHECK_ARGUMENT(!(type & SLJIT_MEM_UNALIGNED_32));
+ CHECK_ARGUMENT(!(type & (SLJIT_MEM_ALIGNED_16 | SLJIT_MEM_ALIGNED_32)));
+ } else if (type & SLJIT_MEM_ALIGNED_16) {
+ CHECK_ARGUMENT(!(type & SLJIT_MEM_ALIGNED_32));
} else {
- CHECK_ARGUMENT(type & SLJIT_MEM_UNALIGNED_32);
+ CHECK_ARGUMENT(type & SLJIT_MEM_ALIGNED_32);
CHECK_ARGUMENT(!(type & SLJIT_32));
}
- CHECK_ARGUMENT(!(type & ~(0xff | SLJIT_32 | SLJIT_MEM_STORE | SLJIT_MEM_UNALIGNED | SLJIT_MEM_UNALIGNED_16 | SLJIT_MEM_UNALIGNED_32)));
- CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(freg));
+ CHECK_ARGUMENT(!(type & ~(0xff | SLJIT_32 | SLJIT_MEM_STORE | SLJIT_MEM_UNALIGNED | SLJIT_MEM_ALIGNED_16 | SLJIT_MEM_ALIGNED_32)));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(freg, type & SLJIT_32));
FUNCTION_CHECK_SRC_MEM(mem, memw);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
@@ -2179,11 +2649,11 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fmem(struct sljit_compile
!(type & SLJIT_32) ? "f64" : "f32");
if (type & SLJIT_MEM_UNALIGNED)
- printf(".un");
- else if (type & SLJIT_MEM_UNALIGNED_16)
- printf(".un16");
- else if (type & SLJIT_MEM_UNALIGNED_32)
- printf(".un32");
+ printf(".unal");
+ else if (type & SLJIT_MEM_ALIGNED_16)
+ printf(".al16");
+ else if (type & SLJIT_MEM_ALIGNED_32)
+ printf(".al32");
fprintf(compiler->verbose, " ");
sljit_verbose_freg(compiler, freg);
@@ -2200,10 +2670,11 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fmem_update(struct sljit_
sljit_s32 mem, sljit_sw memw)
{
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_FPU));
CHECK_ARGUMENT((type & 0xff) == SLJIT_MOV_F64);
CHECK_ARGUMENT((type & ~(0xff | SLJIT_32 | SLJIT_MEM_STORE | SLJIT_MEM_SUPP | SLJIT_MEM_POST)) == 0);
FUNCTION_CHECK_SRC_MEM(mem, memw);
- CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(freg));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(freg, type & SLJIT_32));
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
@@ -2226,7 +2697,297 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fmem_update(struct sljit_
}
#endif
CHECK_RETURN_OK;
+}
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 srcdst, sljit_sw srcdstw)
+{
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_SIMD));
+ CHECK_ARGUMENT((type & SLJIT_SIMD_TYPE_MASK2(SLJIT_SIMD_STORE)) == 0);
+ CHECK_ARGUMENT(SLJIT_SIMD_CHECK_REG(type));
+ CHECK_ARGUMENT(SLJIT_SIMD_GET_ELEM_SIZE(type) <= SLJIT_SIMD_GET_REG_SIZE(type));
+ CHECK_ARGUMENT(SLJIT_SIMD_GET_ELEM2_SIZE(type) <= (srcdst & SLJIT_MEM) ? SLJIT_SIMD_GET_REG_SIZE(type) : 0);
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(freg, 0));
+ FUNCTION_FCHECK(srcdst, srcdstw, 0);
+#endif
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ if (type & SLJIT_SIMD_TEST)
+ CHECK_RETURN_OK;
+ if (sljit_emit_simd_mov(compiler, type | SLJIT_SIMD_TEST, freg, srcdst, srcdstw) == SLJIT_ERR_UNSUPPORTED) {
+ fprintf(compiler->verbose, " # simd_mem: unsupported form, no instructions are emitted\n");
+ CHECK_RETURN_OK;
+ }
+
+ fprintf(compiler->verbose, " simd_%s.%d.%s%d",
+ (type & SLJIT_SIMD_STORE) ? "store" : "load",
+ (8 << SLJIT_SIMD_GET_REG_SIZE(type)),
+ (type & SLJIT_SIMD_FLOAT) ? "f" : "",
+ (8 << SLJIT_SIMD_GET_ELEM_SIZE(type)));
+
+ if ((type & 0x3f000000) == SLJIT_SIMD_MEM_UNALIGNED)
+ fprintf(compiler->verbose, ".unal ");
+ else
+ fprintf(compiler->verbose, ".al%d ", (8 << SLJIT_SIMD_GET_ELEM2_SIZE(type)));
+
+ sljit_verbose_freg(compiler, freg);
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_fparam(compiler, srcdst, srcdstw);
+ fprintf(compiler->verbose, "\n");
+ }
+#endif
+ CHECK_RETURN_OK;
+}
+
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_simd_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw)
+{
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_SIMD));
+ CHECK_ARGUMENT((type & SLJIT_SIMD_TYPE_MASK(0)) == 0);
+ CHECK_ARGUMENT(SLJIT_SIMD_CHECK_REG(type));
+ CHECK_ARGUMENT(SLJIT_SIMD_GET_ELEM_SIZE(type) < SLJIT_SIMD_GET_REG_SIZE(type));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(freg, 0));
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ if (src == SLJIT_IMM) {
+ CHECK_ARGUMENT(srcw == 0);
+ } else {
+ FUNCTION_FCHECK(src, srcw, SLJIT_SIMD_GET_ELEM_SIZE(type) == 2);
+ }
+ } else if (src != SLJIT_IMM) {
+ FUNCTION_CHECK_DST(src, srcw);
+ }
+#endif
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ if (type & SLJIT_SIMD_TEST)
+ CHECK_RETURN_OK;
+ if (sljit_emit_simd_replicate(compiler, type | SLJIT_SIMD_TEST, freg, src, srcw) == SLJIT_ERR_UNSUPPORTED) {
+ fprintf(compiler->verbose, " # simd_dup: unsupported form, no instructions are emitted\n");
+ CHECK_RETURN_OK;
+ }
+
+ fprintf(compiler->verbose, " simd_replicate.%d.%s%d ",
+ (8 << SLJIT_SIMD_GET_REG_SIZE(type)),
+ (type & SLJIT_SIMD_FLOAT) ? "f" : "",
+ (8 << SLJIT_SIMD_GET_ELEM_SIZE(type)));
+
+ sljit_verbose_freg(compiler, freg);
+ fprintf(compiler->verbose, ", ");
+ if (type & SLJIT_SIMD_FLOAT)
+ sljit_verbose_fparam(compiler, src, srcw);
+ else
+ sljit_verbose_param(compiler, src, srcw);
+ fprintf(compiler->verbose, "\n");
+ }
+#endif
+ CHECK_RETURN_OK;
+}
+
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_simd_lane_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg, sljit_s32 lane_index,
+ sljit_s32 srcdst, sljit_sw srcdstw)
+{
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_SIMD));
+ CHECK_ARGUMENT((type & SLJIT_SIMD_TYPE_MASK(SLJIT_SIMD_STORE | SLJIT_SIMD_LANE_ZERO | SLJIT_SIMD_LANE_SIGNED | SLJIT_32)) == 0);
+ CHECK_ARGUMENT((type & (SLJIT_SIMD_STORE | SLJIT_SIMD_LANE_ZERO)) != (SLJIT_SIMD_STORE | SLJIT_SIMD_LANE_ZERO));
+ CHECK_ARGUMENT((type & (SLJIT_SIMD_STORE | SLJIT_SIMD_LANE_SIGNED)) != SLJIT_SIMD_LANE_SIGNED);
+ CHECK_ARGUMENT(!(type & SLJIT_SIMD_FLOAT) || !(type & (SLJIT_SIMD_LANE_SIGNED | SLJIT_32)));
+ CHECK_ARGUMENT(SLJIT_SIMD_CHECK_REG(type));
+ CHECK_ARGUMENT(SLJIT_SIMD_GET_ELEM_SIZE(type) < SLJIT_SIMD_GET_REG_SIZE(type));
+ CHECK_ARGUMENT(!(type & SLJIT_32) || SLJIT_SIMD_GET_ELEM_SIZE(type) <= 2);
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(freg, 0));
+ CHECK_ARGUMENT(lane_index >= 0 && lane_index < (1 << (SLJIT_SIMD_GET_REG_SIZE(type) - SLJIT_SIMD_GET_ELEM_SIZE(type))));
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ FUNCTION_FCHECK(srcdst, srcdstw, SLJIT_SIMD_GET_ELEM_SIZE(type) == 2);
+ } else if ((type & SLJIT_SIMD_STORE) || srcdst != SLJIT_IMM) {
+ FUNCTION_CHECK_DST(srcdst, srcdstw);
+ }
+#endif
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ if (type & SLJIT_SIMD_TEST)
+ CHECK_RETURN_OK;
+ if (sljit_emit_simd_lane_mov(compiler, type | SLJIT_SIMD_TEST, freg, lane_index, srcdst, srcdstw) == SLJIT_ERR_UNSUPPORTED) {
+ fprintf(compiler->verbose, " # simd_move_lane: unsupported form, no instructions are emitted\n");
+ CHECK_RETURN_OK;
+ }
+
+ fprintf(compiler->verbose, " simd_%s_lane%s%s%s.%d.%s%d ",
+ (type & SLJIT_SIMD_STORE) ? "store" : "load",
+ (type & SLJIT_32) ? "32" : "",
+ (type & SLJIT_SIMD_LANE_ZERO) ? "_z" : "",
+ (type & SLJIT_SIMD_LANE_SIGNED) ? "_s" : "",
+ (8 << SLJIT_SIMD_GET_REG_SIZE(type)),
+ (type & SLJIT_SIMD_FLOAT) ? "f" : "",
+ (8 << SLJIT_SIMD_GET_ELEM_SIZE(type)));
+
+ sljit_verbose_freg(compiler, freg);
+ fprintf(compiler->verbose, "[%d], ", lane_index);
+ if (type & SLJIT_SIMD_FLOAT)
+ sljit_verbose_fparam(compiler, srcdst, srcdstw);
+ else
+ sljit_verbose_param(compiler, srcdst, srcdstw);
+ fprintf(compiler->verbose, "\n");
+ }
+#endif
+ CHECK_RETURN_OK;
+}
+
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_simd_lane_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_s32 src_lane_index)
+{
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_SIMD));
+ CHECK_ARGUMENT((type & SLJIT_SIMD_TYPE_MASK(0)) == 0);
+ CHECK_ARGUMENT(SLJIT_SIMD_CHECK_REG(type));
+ CHECK_ARGUMENT(SLJIT_SIMD_GET_ELEM_SIZE(type) < SLJIT_SIMD_GET_REG_SIZE(type));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(freg, 0));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(src, 0));
+ CHECK_ARGUMENT(src_lane_index >= 0 && src_lane_index < (1 << (SLJIT_SIMD_GET_REG_SIZE(type) - SLJIT_SIMD_GET_ELEM_SIZE(type))));
+#endif
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ if (type & SLJIT_SIMD_TEST)
+ CHECK_RETURN_OK;
+ if (sljit_emit_simd_lane_replicate(compiler, type | SLJIT_SIMD_TEST, freg, src, src_lane_index) == SLJIT_ERR_UNSUPPORTED) {
+ fprintf(compiler->verbose, " # simd_lane_replicate: unsupported form, no instructions are emitted\n");
+ CHECK_RETURN_OK;
+ }
+
+ fprintf(compiler->verbose, " simd_lane_replicate.%d.%s%d ",
+ (8 << SLJIT_SIMD_GET_REG_SIZE(type)),
+ (type & SLJIT_SIMD_FLOAT) ? "f" : "",
+ (8 << SLJIT_SIMD_GET_ELEM_SIZE(type)));
+
+ sljit_verbose_freg(compiler, freg);
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_freg(compiler, src);
+ fprintf(compiler->verbose, "[%d]\n", src_lane_index);
+ }
+#endif
+ CHECK_RETURN_OK;
+}
+
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_simd_extend(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw)
+{
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_SIMD));
+ CHECK_ARGUMENT((type & SLJIT_SIMD_TYPE_MASK2(SLJIT_SIMD_EXTEND_SIGNED)) == 0);
+ CHECK_ARGUMENT((type & (SLJIT_SIMD_EXTEND_SIGNED | SLJIT_SIMD_FLOAT)) != (SLJIT_SIMD_EXTEND_SIGNED | SLJIT_SIMD_FLOAT));
+ CHECK_ARGUMENT(SLJIT_SIMD_CHECK_REG(type));
+ CHECK_ARGUMENT(SLJIT_SIMD_GET_ELEM2_SIZE(type) < SLJIT_SIMD_GET_REG_SIZE(type));
+ CHECK_ARGUMENT(SLJIT_SIMD_GET_ELEM_SIZE(type) < SLJIT_SIMD_GET_ELEM2_SIZE(type));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(freg, 0));
+ FUNCTION_FCHECK(src, srcw, SLJIT_SIMD_GET_ELEM_SIZE(type) == 2);
+#endif
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ if (type & SLJIT_SIMD_TEST)
+ CHECK_RETURN_OK;
+ if (sljit_emit_simd_extend(compiler, type | SLJIT_SIMD_TEST, freg, src, srcw) == SLJIT_ERR_UNSUPPORTED) {
+ fprintf(compiler->verbose, " # simd_extend: unsupported form, no instructions are emitted\n");
+ CHECK_RETURN_OK;
+ }
+
+ fprintf(compiler->verbose, " simd_load_extend%s.%d.%s%d.%s%d ",
+ (type & SLJIT_SIMD_EXTEND_SIGNED) ? "_s" : "",
+ (8 << SLJIT_SIMD_GET_REG_SIZE(type)),
+ (type & SLJIT_SIMD_FLOAT) ? "f" : "",
+ (8 << SLJIT_SIMD_GET_ELEM2_SIZE(type)),
+ (type & SLJIT_SIMD_FLOAT) ? "f" : "",
+ (8 << SLJIT_SIMD_GET_ELEM_SIZE(type)));
+
+ sljit_verbose_freg(compiler, freg);
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_fparam(compiler, src, srcw);
+ fprintf(compiler->verbose, "\n");
+ }
+#endif
+ CHECK_RETURN_OK;
+}
+
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_simd_sign(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 dst, sljit_sw dstw)
+{
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_SIMD));
+ CHECK_ARGUMENT((type & SLJIT_SIMD_TYPE_MASK(SLJIT_32)) == SLJIT_SIMD_STORE);
+ CHECK_ARGUMENT(SLJIT_SIMD_CHECK_REG(type));
+ CHECK_ARGUMENT(SLJIT_SIMD_GET_ELEM_SIZE(type) < SLJIT_SIMD_GET_REG_SIZE(type));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(freg, 0));
+ FUNCTION_CHECK_DST(dst, dstw);
+#endif
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ if (type & SLJIT_SIMD_TEST)
+ CHECK_RETURN_OK;
+ if (sljit_emit_simd_sign(compiler, type | SLJIT_SIMD_TEST, freg, dst, dstw) == SLJIT_ERR_UNSUPPORTED) {
+ fprintf(compiler->verbose, " # simd_sign: unsupported form, no instructions are emitted\n");
+ CHECK_RETURN_OK;
+ }
+
+ fprintf(compiler->verbose, " simd_store_sign%s.%d.%s%d ",
+ (type & SLJIT_32) ? "32" : "",
+ (8 << SLJIT_SIMD_GET_REG_SIZE(type)),
+ (type & SLJIT_SIMD_FLOAT) ? "f" : "",
+ (8 << SLJIT_SIMD_GET_ELEM_SIZE(type)));
+
+ sljit_verbose_freg(compiler, freg);
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_param(compiler, dst, dstw);
+ fprintf(compiler->verbose, "\n");
+ }
+#endif
+ CHECK_RETURN_OK;
+}
+
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_simd_op2(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg, sljit_s32 src1_freg, sljit_s32 src2_freg)
+{
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+ CHECK_ARGUMENT(sljit_has_cpu_feature(SLJIT_HAS_SIMD));
+ CHECK_ARGUMENT((type & SLJIT_SIMD_TYPE_MASK(0)) >= SLJIT_SIMD_OP2_AND && (type & SLJIT_SIMD_TYPE_MASK(0)) <= SLJIT_SIMD_OP2_XOR);
+ CHECK_ARGUMENT(SLJIT_SIMD_CHECK_REG(type));
+ CHECK_ARGUMENT(SLJIT_SIMD_GET_ELEM_SIZE(type) <= SLJIT_SIMD_GET_REG_SIZE(type));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(dst_freg, 0));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(src1_freg, 0));
+ CHECK_ARGUMENT(FUNCTION_CHECK_IS_FREG(src2_freg, 0));
+#endif
+#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
+ if (SLJIT_UNLIKELY(!!compiler->verbose)) {
+ if (type & SLJIT_SIMD_TEST)
+ CHECK_RETURN_OK;
+ if (sljit_emit_simd_op2(compiler, type | SLJIT_SIMD_TEST, dst_freg, src1_freg, src2_freg) == SLJIT_ERR_UNSUPPORTED) {
+ fprintf(compiler->verbose, " # simd_op2: unsupported form, no instructions are emitted\n");
+ CHECK_RETURN_OK;
+ }
+
+ fprintf(compiler->verbose, " simd_%s.%d.%s%d ",
+ simd_op2_names[SLJIT_SIMD_GET_OPCODE(type) - 1],
+ (8 << SLJIT_SIMD_GET_REG_SIZE(type)),
+ (type & SLJIT_SIMD_FLOAT) ? "f" : "",
+ (8 << SLJIT_SIMD_GET_ELEM_SIZE(type)));
+
+ sljit_verbose_freg(compiler, dst_freg);
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_freg(compiler, src1_freg);
+ fprintf(compiler->verbose, ", ");
+ sljit_verbose_freg(compiler, src2_freg);
+ fprintf(compiler->verbose, "\n");
+ }
+#endif
+ CHECK_RETURN_OK;
}
static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
@@ -2264,14 +3025,14 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_const(struct sljit_compil
CHECK_RETURN_OK;
}
-static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_mov_addr(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
FUNCTION_CHECK_DST(dst, dstw);
#endif
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
if (SLJIT_UNLIKELY(!!compiler->verbose)) {
- fprintf(compiler->verbose, " put_label ");
+ fprintf(compiler->verbose, " mov_addr ");
sljit_verbose_param(compiler, dst, dstw);
fprintf(compiler->verbose, "\n");
}
@@ -2286,7 +3047,7 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_put_label(struct sljit_co
#endif /* SLJIT_ARGUMENT_CHECKS || SLJIT_VERBOSE */
#define SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw) \
- SLJIT_COMPILE_ASSERT(!(SLJIT_CONV_SW_FROM_F64 & 0x1) && !(SLJIT_CONV_F64_FROM_SW & 0x1), \
+ SLJIT_COMPILE_ASSERT(!(SLJIT_CONV_SW_FROM_F64 & 0x1) && !(SLJIT_CONV_F64_FROM_SW & 0x1) && !(SLJIT_CONV_F64_FROM_UW & 0x1), \
invalid_float_opcodes); \
if (GET_OPCODE(op) >= SLJIT_CONV_SW_FROM_F64 && GET_OPCODE(op) <= SLJIT_CMP_F64) { \
if (GET_OPCODE(op) == SLJIT_CMP_F64) { \
@@ -2301,48 +3062,22 @@ static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_put_label(struct sljit_co
ADJUST_LOCAL_OFFSET(src, srcw); \
return sljit_emit_fop1_conv_sw_from_f64(compiler, op, dst, dstw, src, srcw); \
} \
- CHECK(check_sljit_emit_fop1_conv_f64_from_sw(compiler, op, dst, dstw, src, srcw)); \
+ if ((GET_OPCODE(op) | 0x1) == SLJIT_CONV_F64_FROM_S32) { \
+ CHECK(check_sljit_emit_fop1_conv_f64_from_w(compiler, op, dst, dstw, src, srcw)); \
+ ADJUST_LOCAL_OFFSET(dst, dstw); \
+ ADJUST_LOCAL_OFFSET(src, srcw); \
+ return sljit_emit_fop1_conv_f64_from_sw(compiler, op, dst, dstw, src, srcw); \
+ } \
+ CHECK(check_sljit_emit_fop1_conv_f64_from_w(compiler, op, dst, dstw, src, srcw)); \
ADJUST_LOCAL_OFFSET(dst, dstw); \
ADJUST_LOCAL_OFFSET(src, srcw); \
- return sljit_emit_fop1_conv_f64_from_sw(compiler, op, dst, dstw, src, srcw); \
+ return sljit_emit_fop1_conv_f64_from_uw(compiler, op, dst, dstw, src, srcw); \
} \
CHECK(check_sljit_emit_fop1(compiler, op, dst, dstw, src, srcw)); \
ADJUST_LOCAL_OFFSET(dst, dstw); \
ADJUST_LOCAL_OFFSET(src, srcw);
-#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86) \
- || (defined SLJIT_CONFIG_PPC && SLJIT_CONFIG_PPC) \
- || ((defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS) && !(defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6)) \
- || (defined SLJIT_CONFIG_RISCV && SLJIT_CONFIG_RISCV) \
- || (defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X)
-
-static SLJIT_INLINE sljit_s32 sljit_emit_cmov_generic(struct sljit_compiler *compiler, sljit_s32 type,
- sljit_s32 dst_reg,
- sljit_s32 src, sljit_sw srcw)
-{
- struct sljit_label *label;
- struct sljit_jump *jump;
- sljit_s32 op = (type & SLJIT_32) ? SLJIT_MOV32 : SLJIT_MOV;
-
- SLJIT_SKIP_CHECKS(compiler);
- jump = sljit_emit_jump(compiler, (type & ~SLJIT_32) ^ 0x1);
- FAIL_IF(!jump);
-
- SLJIT_SKIP_CHECKS(compiler);
- FAIL_IF(sljit_emit_op1(compiler, op, dst_reg, 0, src, srcw));
-
- SLJIT_SKIP_CHECKS(compiler);
- label = sljit_emit_label(compiler);
- FAIL_IF(!label);
-
- sljit_set_label(jump, label);
- return SLJIT_SUCCESS;
-}
-
-#endif
-
-#if (!(defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS) || (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)) \
- && !(defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (!(defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS) || (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6))
static sljit_s32 sljit_emit_mem_unaligned(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 reg,
@@ -2355,7 +3090,7 @@ static sljit_s32 sljit_emit_mem_unaligned(struct sljit_compiler *compiler, sljit
return sljit_emit_op1(compiler, type & (0xff | SLJIT_32), reg, 0, mem, memw);
}
-#endif /* (!SLJIT_CONFIG_MIPS || SLJIT_MIPS_REV >= 6) && !SLJIT_CONFIG_ARM_V5 */
+#endif /* (!SLJIT_CONFIG_MIPS || SLJIT_MIPS_REV >= 6) */
#if (!(defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS) || (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)) \
&& !(defined SLJIT_CONFIG_ARM_32 && SLJIT_CONFIG_ARM_32)
@@ -2401,7 +3136,7 @@ static sljit_s32 sljit_emit_fmem_unaligned(struct sljit_compiler *compiler, slji
#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
# include "sljitNativeX86_common.c"
-#elif (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#elif (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
# include "sljitNativeARM_32.c"
#elif (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
# include "sljitNativeARM_32.c"
@@ -2417,8 +3152,12 @@ static sljit_s32 sljit_emit_fmem_unaligned(struct sljit_compiler *compiler, slji
# include "sljitNativeRISCV_common.c"
#elif (defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X)
# include "sljitNativeS390X.c"
+#elif (defined SLJIT_CONFIG_LOONGARCH && SLJIT_CONFIG_LOONGARCH)
+# include "sljitNativeLOONGARCH_64.c"
#endif
+#include "sljitSerialize.c"
+
static SLJIT_INLINE sljit_s32 emit_mov_before_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
{
#if (defined SLJIT_64BIT_ARCHITECTURE && SLJIT_64BIT_ARCHITECTURE)
@@ -2463,8 +3202,29 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *comp
return sljit_emit_return_void(compiler);
}
+#if !(defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86) \
+ && !(defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X) \
+ && !(defined(SLJIT_CONFIG_LOONGARCH_64) && SLJIT_CONFIG_LOONGARCH_64)
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
+{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fop2r(compiler, op, dst_freg, src1, src1w, src2, src2w));
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+ ADJUST_LOCAL_OFFSET(src2, src2w);
+
+ SLJIT_SKIP_CHECKS(compiler);
+ return sljit_emit_fop2(compiler, op, dst_freg, 0, src1, src1w, src2, src2w);
+}
+
+#endif /* !SLJIT_CONFIG_X86 && !SLJIT_CONFIG_S390X && !SLJIT_CONFIG_LOONGARCH_64 */
+
#if !(defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS) \
- && !(defined SLJIT_CONFIG_RISCV && SLJIT_CONFIG_RISCV)
+ && !(defined SLJIT_CONFIG_RISCV && SLJIT_CONFIG_RISCV) \
+ && !(defined SLJIT_CONFIG_LOONGARCH && SLJIT_CONFIG_LOONGARCH)
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 src1, sljit_sw src1w,
@@ -2480,18 +3240,18 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
condition = type & 0xff;
#if (defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64)
if ((condition == SLJIT_EQUAL || condition == SLJIT_NOT_EQUAL)) {
- if ((src1 & SLJIT_IMM) && !src1w) {
+ if (src1 == SLJIT_IMM && !src1w) {
src1 = src2;
src1w = src2w;
src2 = SLJIT_IMM;
src2w = 0;
}
- if ((src2 & SLJIT_IMM) && !src2w)
+ if (src2 == SLJIT_IMM && !src2w)
return emit_cmp_to0(compiler, type, src1, src1w);
}
#endif
- if (SLJIT_UNLIKELY((src1 & SLJIT_IMM) && !(src2 & SLJIT_IMM))) {
+ if (SLJIT_UNLIKELY(src1 == SLJIT_IMM && src2 != SLJIT_IMM)) {
/* Immediate is preferred as second argument by most architectures. */
switch (condition) {
case SLJIT_LESS:
@@ -2532,7 +3292,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
if (condition <= SLJIT_NOT_ZERO)
flags = SLJIT_SET_Z;
else
- flags = condition << VARIABLE_FLAG_SHIFT;
+ flags = (condition & 0xfe) << VARIABLE_FLAG_SHIFT;
SLJIT_SKIP_CHECKS(compiler);
PTR_FAIL_IF(sljit_emit_op2u(compiler,
@@ -2544,20 +3304,17 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
#endif /* !SLJIT_CONFIG_MIPS */
-#if (defined SLJIT_CONFIG_ARM && SLJIT_CONFIG_ARM)
+#if (defined SLJIT_CONFIG_ARM_32 && SLJIT_CONFIG_ARM_32)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
{
- if (type < SLJIT_UNORDERED || type > SLJIT_ORDERED_LESS_EQUAL)
- return 0;
-
switch (type) {
case SLJIT_UNORDERED_OR_EQUAL:
case SLJIT_ORDERED_NOT_EQUAL:
- return 0;
+ return 1;
}
- return 1;
+ return 0;
}
#endif /* SLJIT_CONFIG_ARM */
@@ -2570,7 +3327,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_fcmp(struct sljit_compile
CHECK_PTR(check_sljit_emit_fcmp(compiler, type, src1, src1w, src2, src2w));
SLJIT_SKIP_CHECKS(compiler);
- sljit_emit_fop1(compiler, SLJIT_CMP_F64 | ((type & 0xff) << VARIABLE_FLAG_SHIFT) | (type & SLJIT_32), src1, src1w, src2, src2w);
+ sljit_emit_fop1(compiler, SLJIT_CMP_F64 | ((type & 0xfe) << VARIABLE_FLAG_SHIFT) | (type & SLJIT_32), src1, src1w, src2, src2w);
SLJIT_SKIP_CHECKS(compiler);
return sljit_emit_jump(compiler, type);
@@ -2630,507 +3387,175 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fmem_update(struct sljit_compiler
#endif /* !SLJIT_CONFIG_ARM_64 && !SLJIT_CONFIG_PPC */
#if !(defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86) \
- && !(defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64)
+ && !(defined SLJIT_CONFIG_ARM && SLJIT_CONFIG_ARM) \
+ && !(defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X) \
+ && !(defined SLJIT_CONFIG_LOONGARCH && SLJIT_CONFIG_LOONGARCH)
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 srcdst, sljit_sw srcdstw)
{
CHECK_ERROR();
- CHECK(check_sljit_get_local_base(compiler, dst, dstw, offset));
-
- ADJUST_LOCAL_OFFSET(SLJIT_MEM1(SLJIT_SP), offset);
-
- SLJIT_SKIP_CHECKS(compiler);
-
- if (offset != 0)
- return sljit_emit_op2(compiler, SLJIT_ADD, dst, dstw, SLJIT_SP, 0, SLJIT_IMM, offset);
- return sljit_emit_op1(compiler, SLJIT_MOV, dst, dstw, SLJIT_SP, 0);
-}
-
-#endif
-
-#else /* SLJIT_CONFIG_UNSUPPORTED */
-
-/* Empty function bodies for those machines, which are not (yet) supported. */
-
-SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
-{
- return "unsupported";
-}
-
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_compiler* sljit_create_compiler(void *allocator_data, void *exec_allocator_data)
-{
- SLJIT_UNUSED_ARG(allocator_data);
- SLJIT_UNUSED_ARG(exec_allocator_data);
- SLJIT_UNREACHABLE();
- return NULL;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE void sljit_free_compiler(struct sljit_compiler *compiler)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNREACHABLE();
-}
-
-SLJIT_API_FUNC_ATTRIBUTE void sljit_set_compiler_memory_error(struct sljit_compiler *compiler)
-{
+ CHECK(check_sljit_emit_simd_mov(compiler, type, freg, srcdst, srcdstw));
SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNREACHABLE();
-}
-
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_alloc_memory(struct sljit_compiler *compiler, sljit_s32 size)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(size);
- SLJIT_UNREACHABLE();
- return NULL;
-}
-
-#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
-SLJIT_API_FUNC_ATTRIBUTE void sljit_compiler_verbose(struct sljit_compiler *compiler, FILE* verbose)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(verbose);
- SLJIT_UNREACHABLE();
-}
-#endif
-
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNREACHABLE();
- return NULL;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
-{
- SLJIT_UNUSED_ARG(feature_type);
- SLJIT_UNREACHABLE();
- return 0;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
-{
SLJIT_UNUSED_ARG(type);
- SLJIT_UNREACHABLE();
- return 0;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE void sljit_free_code(void* code, void *exec_allocator_data)
-{
- SLJIT_UNUSED_ARG(code);
- SLJIT_UNUSED_ARG(exec_allocator_data);
- SLJIT_UNREACHABLE();
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
- sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
- sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(options);
- SLJIT_UNUSED_ARG(arg_types);
- SLJIT_UNUSED_ARG(scratches);
- SLJIT_UNUSED_ARG(saveds);
- SLJIT_UNUSED_ARG(fscratches);
- SLJIT_UNUSED_ARG(fsaveds);
- SLJIT_UNUSED_ARG(local_size);
- SLJIT_UNREACHABLE();
- return SLJIT_ERR_UNSUPPORTED;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
- sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
- sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(options);
- SLJIT_UNUSED_ARG(arg_types);
- SLJIT_UNUSED_ARG(scratches);
- SLJIT_UNUSED_ARG(saveds);
- SLJIT_UNUSED_ARG(fscratches);
- SLJIT_UNUSED_ARG(fsaveds);
- SLJIT_UNUSED_ARG(local_size);
- SLJIT_UNREACHABLE();
- return SLJIT_ERR_UNSUPPORTED;
-}
+ SLJIT_UNUSED_ARG(freg);
+ SLJIT_UNUSED_ARG(srcdst);
+ SLJIT_UNUSED_ARG(srcdstw);
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_void(struct sljit_compiler *compiler)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNREACHABLE();
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw)
{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_replicate(compiler, type, freg, src, srcw));
SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(op);
+ SLJIT_UNUSED_ARG(type);
+ SLJIT_UNUSED_ARG(freg);
SLJIT_UNUSED_ARG(src);
SLJIT_UNUSED_ARG(srcw);
- SLJIT_UNREACHABLE();
- return SLJIT_ERR_UNSUPPORTED;
-}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_to(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(src);
- SLJIT_UNUSED_ARG(srcw);
- SLJIT_UNREACHABLE();
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg, sljit_s32 lane_index,
+ sljit_s32 srcdst, sljit_sw srcdstw)
{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_lane_mov(compiler, type, freg, lane_index, srcdst, srcdstw));
SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(dst);
- SLJIT_UNUSED_ARG(dstw);
- SLJIT_UNREACHABLE();
- return SLJIT_ERR_UNSUPPORTED;
-}
+ SLJIT_UNUSED_ARG(type);
+ SLJIT_UNUSED_ARG(freg);
+ SLJIT_UNUSED_ARG(lane_index);
+ SLJIT_UNUSED_ARG(srcdst);
+ SLJIT_UNUSED_ARG(srcdstw);
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(op);
- SLJIT_UNREACHABLE();
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_s32 src_lane_index)
{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_lane_replicate(compiler, type, freg, src, src_lane_index));
SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(op);
- SLJIT_UNUSED_ARG(dst);
- SLJIT_UNUSED_ARG(dstw);
+ SLJIT_UNUSED_ARG(type);
+ SLJIT_UNUSED_ARG(freg);
SLJIT_UNUSED_ARG(src);
- SLJIT_UNUSED_ARG(srcw);
- SLJIT_UNREACHABLE();
- return SLJIT_ERR_UNSUPPORTED;
-}
+ SLJIT_UNUSED_ARG(src_lane_index);
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(op);
- SLJIT_UNUSED_ARG(dst);
- SLJIT_UNUSED_ARG(dstw);
- SLJIT_UNUSED_ARG(src1);
- SLJIT_UNUSED_ARG(src1w);
- SLJIT_UNUSED_ARG(src2);
- SLJIT_UNUSED_ARG(src2w);
- SLJIT_UNREACHABLE();
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(op);
- SLJIT_UNUSED_ARG(src1);
- SLJIT_UNUSED_ARG(src1w);
- SLJIT_UNUSED_ARG(src2);
- SLJIT_UNUSED_ARG(src2w);
- SLJIT_UNREACHABLE();
- return SLJIT_ERR_UNSUPPORTED;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src_dst,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(op);
- SLJIT_UNUSED_ARG(src_dst);
- SLJIT_UNUSED_ARG(src1);
- SLJIT_UNUSED_ARG(src1w);
- SLJIT_UNUSED_ARG(src2);
- SLJIT_UNUSED_ARG(src2w);
- SLJIT_UNREACHABLE();
- return SLJIT_ERR_UNSUPPORTED;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_extend(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
sljit_s32 src, sljit_sw srcw)
{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_extend(compiler, type, freg, src, srcw));
SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(op);
+ SLJIT_UNUSED_ARG(type);
+ SLJIT_UNUSED_ARG(freg);
SLJIT_UNUSED_ARG(src);
SLJIT_UNUSED_ARG(srcw);
- SLJIT_UNREACHABLE();
- return SLJIT_ERR_UNSUPPORTED;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
-{
- SLJIT_UNREACHABLE();
- return reg;
-}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
- void *instruction, sljit_u32 size)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(instruction);
- SLJIT_UNUSED_ARG(size);
- SLJIT_UNREACHABLE();
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE void sljit_set_current_flags(struct sljit_compiler *compiler, sljit_s32 current_flags)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(current_flags);
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_sign(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 dst, sljit_sw dstw)
{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_sign(compiler, type, freg, dst, dstw));
SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(op);
+ SLJIT_UNUSED_ARG(type);
+ SLJIT_UNUSED_ARG(freg);
SLJIT_UNUSED_ARG(dst);
SLJIT_UNUSED_ARG(dstw);
- SLJIT_UNUSED_ARG(src);
- SLJIT_UNUSED_ARG(srcw);
- SLJIT_UNREACHABLE();
- return SLJIT_ERR_UNSUPPORTED;
-}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(op);
- SLJIT_UNUSED_ARG(dst);
- SLJIT_UNUSED_ARG(dstw);
- SLJIT_UNUSED_ARG(src1);
- SLJIT_UNUSED_ARG(src1w);
- SLJIT_UNUSED_ARG(src2);
- SLJIT_UNUSED_ARG(src2w);
- SLJIT_UNREACHABLE();
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNREACHABLE();
- return NULL;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(type);
- SLJIT_UNREACHABLE();
- return NULL;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type,
- sljit_s32 arg_types)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(type);
- SLJIT_UNUSED_ARG(arg_types);
- SLJIT_UNREACHABLE();
- return NULL;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler *compiler, sljit_s32 type,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(type);
- SLJIT_UNUSED_ARG(src1);
- SLJIT_UNUSED_ARG(src1w);
- SLJIT_UNUSED_ARG(src2);
- SLJIT_UNUSED_ARG(src2w);
- SLJIT_UNREACHABLE();
- return NULL;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_fcmp(struct sljit_compiler *compiler, sljit_s32 type,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_op2(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg, sljit_s32 src1_freg, sljit_s32 src2_freg)
{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_op2(compiler, type, dst_freg, src1_freg, src2_freg));
SLJIT_UNUSED_ARG(compiler);
SLJIT_UNUSED_ARG(type);
- SLJIT_UNUSED_ARG(src1);
- SLJIT_UNUSED_ARG(src1w);
- SLJIT_UNUSED_ARG(src2);
- SLJIT_UNUSED_ARG(src2w);
- SLJIT_UNREACHABLE();
- return NULL;
-}
+ SLJIT_UNUSED_ARG(dst_freg);
+ SLJIT_UNUSED_ARG(src1_freg);
+ SLJIT_UNUSED_ARG(src2_freg);
-SLJIT_API_FUNC_ATTRIBUTE void sljit_set_label(struct sljit_jump *jump, struct sljit_label* label)
-{
- SLJIT_UNUSED_ARG(jump);
- SLJIT_UNUSED_ARG(label);
- SLJIT_UNREACHABLE();
-}
-
-SLJIT_API_FUNC_ATTRIBUTE void sljit_set_target(struct sljit_jump *jump, sljit_uw target)
-{
- SLJIT_UNUSED_ARG(jump);
- SLJIT_UNUSED_ARG(target);
- SLJIT_UNREACHABLE();
-}
-
-SLJIT_API_FUNC_ATTRIBUTE void sljit_set_put_label(struct sljit_put_label *put_label, struct sljit_label *label)
-{
- SLJIT_UNUSED_ARG(put_label);
- SLJIT_UNUSED_ARG(label);
- SLJIT_UNREACHABLE();
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(type);
- SLJIT_UNUSED_ARG(src);
- SLJIT_UNUSED_ARG(srcw);
- SLJIT_UNREACHABLE();
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compiler, sljit_s32 type,
- sljit_s32 arg_types,
- sljit_s32 src, sljit_sw srcw)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(type);
- SLJIT_UNUSED_ARG(arg_types);
- SLJIT_UNUSED_ARG(src);
- SLJIT_UNUSED_ARG(srcw);
- SLJIT_UNREACHABLE();
- return SLJIT_ERR_UNSUPPORTED;
-}
+#endif /* !SLJIT_CONFIG_X86 && !SLJIT_CONFIG_ARM */
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 type)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(op);
- SLJIT_UNUSED_ARG(dst);
- SLJIT_UNUSED_ARG(dstw);
- SLJIT_UNUSED_ARG(type);
- SLJIT_UNREACHABLE();
- return SLJIT_ERR_UNSUPPORTED;
-}
+#if !(defined(SLJIT_CONFIG_X86) && SLJIT_CONFIG_X86) \
+ && !(defined(SLJIT_CONFIG_ARM) && SLJIT_CONFIG_ARM) \
+ && !(defined(SLJIT_CONFIG_S390X) && SLJIT_CONFIG_S390X) \
+ && !(defined(SLJIT_CONFIG_LOONGARCH) && SLJIT_CONFIG_LOONGARCH)
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_load(struct sljit_compiler *compiler,
+ sljit_s32 op,
sljit_s32 dst_reg,
- sljit_s32 src, sljit_sw srcw)
+ sljit_s32 mem_reg)
{
SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(type);
+ SLJIT_UNUSED_ARG(op);
SLJIT_UNUSED_ARG(dst_reg);
- SLJIT_UNUSED_ARG(src);
- SLJIT_UNUSED_ARG(srcw);
- SLJIT_UNREACHABLE();
- return SLJIT_ERR_UNSUPPORTED;
-}
+ SLJIT_UNUSED_ARG(mem_reg);
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 reg, sljit_s32 mem, sljit_sw memw)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(type);
- SLJIT_UNUSED_ARG(reg);
- SLJIT_UNUSED_ARG(mem);
- SLJIT_UNUSED_ARG(memw);
- SLJIT_UNREACHABLE();
- return SLJIT_ERR_UNSUPPORTED;
-}
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_atomic_load(compiler, op, dst_reg, mem_reg));
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem_update(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 reg, sljit_s32 mem, sljit_sw memw)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(type);
- SLJIT_UNUSED_ARG(reg);
- SLJIT_UNUSED_ARG(mem);
- SLJIT_UNUSED_ARG(memw);
- SLJIT_UNREACHABLE();
return SLJIT_ERR_UNSUPPORTED;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fmem(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 freg, sljit_s32 mem, sljit_sw memw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_store(struct sljit_compiler *compiler,
+ sljit_s32 op,
+ sljit_s32 src_reg,
+ sljit_s32 mem_reg,
+ sljit_s32 temp_reg)
{
SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(type);
- SLJIT_UNUSED_ARG(freg);
- SLJIT_UNUSED_ARG(mem);
- SLJIT_UNUSED_ARG(memw);
- SLJIT_UNREACHABLE();
- return SLJIT_ERR_UNSUPPORTED;
-}
+ SLJIT_UNUSED_ARG(op);
+ SLJIT_UNUSED_ARG(src_reg);
+ SLJIT_UNUSED_ARG(mem_reg);
+ SLJIT_UNUSED_ARG(temp_reg);
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_atomic_store(compiler, op, src_reg, mem_reg, temp_reg));
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fmem_update(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 freg, sljit_s32 mem, sljit_sw memw)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(type);
- SLJIT_UNUSED_ARG(freg);
- SLJIT_UNUSED_ARG(mem);
- SLJIT_UNUSED_ARG(memw);
- SLJIT_UNREACHABLE();
return SLJIT_ERR_UNSUPPORTED;
}
+#endif /* !SLJIT_CONFIG_X86 && !SLJIT_CONFIG_ARM && !SLJIT_CONFIG_S390X && !SLJIT_CONFIG_LOONGARCH */
+
+#if !(defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86) \
+ && !(defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64)
+
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(dst);
- SLJIT_UNUSED_ARG(dstw);
- SLJIT_UNUSED_ARG(offset);
- SLJIT_UNREACHABLE();
- return SLJIT_ERR_UNSUPPORTED;
-}
+ CHECK_ERROR();
+ CHECK(check_sljit_get_local_base(compiler, dst, dstw, offset));
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw initval)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(dst);
- SLJIT_UNUSED_ARG(dstw);
- SLJIT_UNUSED_ARG(initval);
- SLJIT_UNREACHABLE();
- return NULL;
-}
+ ADJUST_LOCAL_OFFSET(SLJIT_MEM1(SLJIT_SP), offset);
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
-{
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(dst);
- SLJIT_UNUSED_ARG(dstw);
- return NULL;
-}
+ SLJIT_SKIP_CHECKS(compiler);
-SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
-{
- SLJIT_UNUSED_ARG(addr);
- SLJIT_UNUSED_ARG(new_target);
- SLJIT_UNUSED_ARG(executable_offset);
- SLJIT_UNREACHABLE();
+ if (offset != 0)
+ return sljit_emit_op2(compiler, SLJIT_ADD, dst, dstw, SLJIT_SP, 0, SLJIT_IMM, offset);
+ return sljit_emit_op1(compiler, SLJIT_MOV, dst, dstw, SLJIT_SP, 0);
}
-SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
-{
- SLJIT_UNUSED_ARG(addr);
- SLJIT_UNUSED_ARG(new_constant);
- SLJIT_UNUSED_ARG(executable_offset);
- SLJIT_UNREACHABLE();
-}
+#endif /* !SLJIT_CONFIG_X86 && !SLJIT_CONFIG_ARM_64 */
#endif /* !SLJIT_CONFIG_UNSUPPORTED */
diff --git a/src/3rdparty/pcre2/src/sljit/sljitLir.h b/src/3rdparty/pcre2/src/sljit/sljitLir.h
index c6a0832ef8..8b6fa69a0a 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitLir.h
+++ b/src/3rdparty/pcre2/src/sljit/sljitLir.h
@@ -72,6 +72,7 @@
#include "sljitConfigPre.h"
#endif /* SLJIT_HAVE_CONFIG_PRE */
+#include "sljitConfigCPU.h"
#include "sljitConfig.h"
/* The following header file defines useful macros for fine tuning
@@ -107,9 +108,9 @@ extern "C" {
/* Cannot allocate executable memory.
Only sljit_generate_code() returns with this error code. */
#define SLJIT_ERR_EX_ALLOC_FAILED 3
-/* Return value for SLJIT_CONFIG_UNSUPPORTED placeholder architecture. */
+/* Unsupported instruction form. */
#define SLJIT_ERR_UNSUPPORTED 4
-/* An ivalid argument is passed to any SLJIT function. */
+/* An invalid argument is passed to any SLJIT function. */
#define SLJIT_ERR_BAD_ARGUMENT 5
/* --------------------------------------------------------------------- */
@@ -127,40 +128,40 @@ extern "C" {
is the first saved register, the one before the last is the second saved
register, and so on.
- If an architecture provides two scratch and three saved registers,
- its scratch and saved register sets are the following:
+ For example, in an architecture with only five registers (A-E), if two
+ are scratch and three saved registers, they will be defined as follows:
- R0 | | R0 is always a scratch register
- R1 | | R1 is always a scratch register
- [R2] | S2 | R2 and S2 represent the same physical register
- [R3] | S1 | R3 and S1 represent the same physical register
- [R4] | S0 | R4 and S0 represent the same physical register
+ A | R0 | | R0 always represent scratch register A
+ B | R1 | | R1 always represent scratch register B
+ C | [R2] | S2 | R2 and S2 represent the same physical register C
+ D | [R3] | S1 | R3 and S1 represent the same physical register D
+ E | [R4] | S0 | R4 and S0 represent the same physical register E
- Note: SLJIT_NUMBER_OF_SCRATCH_REGISTERS would be 2 and
- SLJIT_NUMBER_OF_SAVED_REGISTERS would be 3 for this architecture.
+ Note: SLJIT_NUMBER_OF_SCRATCH_REGISTERS will be 2 and
+ SLJIT_NUMBER_OF_SAVED_REGISTERS will be 3.
- Note: On all supported architectures SLJIT_NUMBER_OF_REGISTERS >= 12
+ Note: For all supported architectures SLJIT_NUMBER_OF_REGISTERS >= 12
and SLJIT_NUMBER_OF_SAVED_REGISTERS >= 6. However, 6 registers
are virtual on x86-32. See below.
The purpose of this definition is convenience: saved registers can
- be used as extra scratch registers. For example four registers can
- be specified as scratch registers and the fifth one as saved register
- on the CPU above and any user code which requires four scratch
- registers can run unmodified. The SLJIT compiler automatically saves
- the content of the two extra scratch register on the stack. Scratch
- registers can also be preserved by saving their value on the stack
- but this needs to be done manually.
+ be used as extra scratch registers. For example, building in the
+ previous example, four registers can be specified as scratch registers
+ and the fifth one as saved register, allowing any user code which requires
+ four scratch registers to run unmodified. The SLJIT compiler automatically
+ saves the content of the two extra scratch register on the stack. Scratch
+ registers can also be preserved by saving their value on the stack but
+ that needs to be done manually.
Note: To emphasize that registers assigned to R2-R4 are saved
registers, they are enclosed by square brackets.
- Note: sljit_emit_enter and sljit_set_context defines whether a register
- is S or R register. E.g: when 3 scratches and 1 saved is mapped
- by sljit_emit_enter, the allowed register set will be: R0-R2 and
- S0. Although S2 is mapped to the same position as R2, it does not
- available in the current configuration. Furthermore the S1 register
- is not available at all.
+ Note: sljit_emit_enter and sljit_set_context define whether a register
+ is S or R register. E.g: if in the previous example 3 scratches and
+ 1 saved are mapped by sljit_emit_enter, the allowed register set
+ will be: R0-R2 and S0. Although S2 is mapped to the same register
+ than R2, it is not available in that configuration. Furthermore
+ the S1 register cannot be used at all.
*/
/* Scratch registers. */
@@ -209,7 +210,7 @@ extern "C" {
/* The SLJIT_SP provides direct access to the linear stack space allocated by
sljit_emit_enter. It can only be used in the following form: SLJIT_MEM1(SLJIT_SP).
The immediate offset is extended by the relative stack offset automatically.
- The sljit_get_local_base can be used to obtain the real address of a value. */
+ sljit_get_local_base can be used to obtain the real address of a value. */
#define SLJIT_SP (SLJIT_NUMBER_OF_REGISTERS + 1)
/* Return with machine word. */
@@ -221,7 +222,7 @@ extern "C" {
/* --------------------------------------------------------------------- */
/* Each floating point register can store a 32 or a 64 bit precision
- value. The FR and FS register sets are overlap in the same way as R
+ value. The FR and FS register sets overlap in the same way as R
and S register sets. See above. */
/* Floating point scratch registers. */
@@ -231,6 +232,10 @@ extern "C" {
#define SLJIT_FR3 4
#define SLJIT_FR4 5
#define SLJIT_FR5 6
+#define SLJIT_FR6 7
+#define SLJIT_FR7 8
+#define SLJIT_FR8 9
+#define SLJIT_FR9 10
/* All FR registers provided by the architecture can be accessed by SLJIT_FR(i)
The i parameter must be >= 0 and < SLJIT_NUMBER_OF_FLOAT_REGISTERS. */
#define SLJIT_FR(i) (1 + (i))
@@ -242,6 +247,10 @@ extern "C" {
#define SLJIT_FS3 (SLJIT_NUMBER_OF_FLOAT_REGISTERS - 3)
#define SLJIT_FS4 (SLJIT_NUMBER_OF_FLOAT_REGISTERS - 4)
#define SLJIT_FS5 (SLJIT_NUMBER_OF_FLOAT_REGISTERS - 5)
+#define SLJIT_FS6 (SLJIT_NUMBER_OF_FLOAT_REGISTERS - 6)
+#define SLJIT_FS7 (SLJIT_NUMBER_OF_FLOAT_REGISTERS - 7)
+#define SLJIT_FS8 (SLJIT_NUMBER_OF_FLOAT_REGISTERS - 8)
+#define SLJIT_FS9 (SLJIT_NUMBER_OF_FLOAT_REGISTERS - 9)
/* All S registers provided by the architecture can be accessed by SLJIT_FS(i)
The i parameter must be >= 0 and < SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS. */
#define SLJIT_FS(i) (SLJIT_NUMBER_OF_FLOAT_REGISTERS - (i))
@@ -260,23 +269,39 @@ extern "C" {
/* The following argument type definitions are used by sljit_emit_enter,
sljit_set_context, sljit_emit_call, and sljit_emit_icall functions.
- As for sljit_emit_call and sljit_emit_icall, the first integer argument
+ For sljit_emit_call and sljit_emit_icall, the first integer argument
must be placed into SLJIT_R0, the second one into SLJIT_R1, and so on.
Similarly the first floating point argument must be placed into SLJIT_FR0,
the second one into SLJIT_FR1, and so on.
- As for sljit_emit_enter, the integer arguments can be stored in scratch
- or saved registers. The first integer argument without _R postfix is
- stored in SLJIT_S0, the next one in SLJIT_S1, and so on. The integer
- arguments with _R postfix are placed into scratch registers. The index
- of the scratch register is the count of the previous integer arguments
- starting from SLJIT_R0. The floating point arguments are always placed
- into SLJIT_FR0, SLJIT_FR1, and so on.
+ For sljit_emit_enter, the integer arguments can be stored in scratch
+ or saved registers. Scratch registers are identified by a _R suffix.
- Note: if a function is called by sljit_emit_call/sljit_emit_icall and
- an argument is stored in a scratch register by sljit_emit_enter,
- that argument uses the same scratch register index for both
- integer and floating point arguments.
+ If only saved registers are used, then the allocation mirrors what is
+ done for the "call" functions but using saved registers, meaning that
+ the first integer argument goes to SLJIT_S0, the second one goes into
+ SLJIT_S1, and so on.
+
+ If scratch registers are used, then the way the integer registers are
+ allocated changes so that SLJIT_S0, SLJIT_S1, etc; will be assigned
+ only for the arguments not using scratch registers, while SLJIT_R<n>
+ will be used for the ones using scratch registers.
+
+ Furthermore, the index (shown as "n" above) that will be used for the
+ scratch register depends on how many previous integer registers
+ (scratch or saved) were used already, starting with SLJIT_R0.
+ Eventhough some indexes will be likely skipped, they still need to be
+ accounted for in the scratches parameter of sljit_emit_enter. See below
+ for some examples.
+
+ The floating point arguments always use scratch registers (but not the
+ _R suffix like the integer arguments) and must use SLJIT_FR0, SLJIT_FR1,
+ just like in the "call" functions.
+
+ Note: the mapping for scratch registers is part of the compiler context
+ and therefore a new context after sljit_emit_call/sljit_emit_icall
+ could remove access to some scratch registers that were used as
+ arguments.
Example function definition:
sljit_f32 SLJIT_FUNC example_c_callback(void *arg_a,
@@ -288,29 +313,33 @@ extern "C" {
| SLJIT_ARG_VALUE(SLJIT_ARG_TYPE_32, 3) | SLJIT_ARG_VALUE(SLJIT_ARG_TYPE_F32, 4)
Short form of argument type definition:
- SLJIT_ARGS4(32, P, F64, 32, F32)
+ SLJIT_ARGS4(F32, P, F64, 32, F32)
Argument passing:
arg_a must be placed in SLJIT_R0
- arg_c must be placed in SLJIT_R1
arg_b must be placed in SLJIT_FR0
+ arg_c must be placed in SLJIT_R1
arg_d must be placed in SLJIT_FR1
Examples for argument processing by sljit_emit_enter:
- SLJIT_ARGS4(VOID, P, 32_R, F32, W)
+ SLJIT_ARGS4V(P, 32_R, F32, W)
Arguments are placed into: SLJIT_S0, SLJIT_R1, SLJIT_FR0, SLJIT_S1
+ The type of the result is void.
- SLJIT_ARGS4(VOID, W, W_R, W, W_R)
+ SLJIT_ARGS4(F32, W, W_R, W, W_R)
Arguments are placed into: SLJIT_S0, SLJIT_R1, SLJIT_S1, SLJIT_R3
+ The type of the result is sljit_f32.
- SLJIT_ARGS4(VOID, F64, W, F32, W_R)
+ SLJIT_ARGS4(P, W, F32, P_R)
Arguments are placed into: SLJIT_FR0, SLJIT_S0, SLJIT_FR1, SLJIT_R1
+ The type of the result is pointer.
Note: it is recommended to pass the scratch arguments first
followed by the saved arguments:
- SLJIT_ARGS4(VOID, W_R, W_R, W, W)
+ SLJIT_ARGS4(W, W_R, W_R, W, W)
Arguments are placed into: SLJIT_R0, SLJIT_R1, SLJIT_S0, SLJIT_S1
+ The type of the result is sljit_sw / sljit_uw.
*/
/* The following flag is only allowed for the integer arguments of
@@ -318,21 +347,21 @@ extern "C" {
stored in a scratch register instead of a saved register. */
#define SLJIT_ARG_TYPE_SCRATCH_REG 0x8
-/* Void result, can only be used by SLJIT_ARG_RETURN. */
-#define SLJIT_ARG_TYPE_VOID 0
+/* No return value, only supported by SLJIT_ARG_RETURN. */
+#define SLJIT_ARG_TYPE_RET_VOID 0
/* Machine word sized integer argument or result. */
-#define SLJIT_ARG_TYPE_W 1
+#define SLJIT_ARG_TYPE_W 1
#define SLJIT_ARG_TYPE_W_R (SLJIT_ARG_TYPE_W | SLJIT_ARG_TYPE_SCRATCH_REG)
/* 32 bit integer argument or result. */
-#define SLJIT_ARG_TYPE_32 2
+#define SLJIT_ARG_TYPE_32 2
#define SLJIT_ARG_TYPE_32_R (SLJIT_ARG_TYPE_32 | SLJIT_ARG_TYPE_SCRATCH_REG)
/* Pointer sized integer argument or result. */
-#define SLJIT_ARG_TYPE_P 3
+#define SLJIT_ARG_TYPE_P 3
#define SLJIT_ARG_TYPE_P_R (SLJIT_ARG_TYPE_P | SLJIT_ARG_TYPE_SCRATCH_REG)
/* 64 bit floating point argument or result. */
-#define SLJIT_ARG_TYPE_F64 4
+#define SLJIT_ARG_TYPE_F64 4
/* 32 bit floating point argument or result. */
-#define SLJIT_ARG_TYPE_F32 5
+#define SLJIT_ARG_TYPE_F32 5
#define SLJIT_ARG_SHIFT 4
#define SLJIT_ARG_RETURN(type) (type)
@@ -345,24 +374,40 @@ extern "C" {
can be shortened to:
SLJIT_ARGS1(W, F32)
+
+ Another example where no value is returned:
+ SLJIT_ARG_RETURN(SLJIT_ARG_TYPE_RET_VOID) | SLJIT_ARG_VALUE(SLJIT_ARG_TYPE_W_R, 1)
+
+ can be shortened to:
+ SLJIT_ARGS1V(W_R)
*/
#define SLJIT_ARG_TO_TYPE(type) SLJIT_ARG_TYPE_ ## type
#define SLJIT_ARGS0(ret) \
SLJIT_ARG_RETURN(SLJIT_ARG_TO_TYPE(ret))
+#define SLJIT_ARGS0V() \
+ SLJIT_ARG_RETURN(SLJIT_ARG_TYPE_RET_VOID)
#define SLJIT_ARGS1(ret, arg1) \
(SLJIT_ARGS0(ret) | SLJIT_ARG_VALUE(SLJIT_ARG_TO_TYPE(arg1), 1))
+#define SLJIT_ARGS1V(arg1) \
+ (SLJIT_ARGS0V() | SLJIT_ARG_VALUE(SLJIT_ARG_TO_TYPE(arg1), 1))
#define SLJIT_ARGS2(ret, arg1, arg2) \
(SLJIT_ARGS1(ret, arg1) | SLJIT_ARG_VALUE(SLJIT_ARG_TO_TYPE(arg2), 2))
+#define SLJIT_ARGS2V(arg1, arg2) \
+ (SLJIT_ARGS1V(arg1) | SLJIT_ARG_VALUE(SLJIT_ARG_TO_TYPE(arg2), 2))
#define SLJIT_ARGS3(ret, arg1, arg2, arg3) \
(SLJIT_ARGS2(ret, arg1, arg2) | SLJIT_ARG_VALUE(SLJIT_ARG_TO_TYPE(arg3), 3))
+#define SLJIT_ARGS3V(arg1, arg2, arg3) \
+ (SLJIT_ARGS2V(arg1, arg2) | SLJIT_ARG_VALUE(SLJIT_ARG_TO_TYPE(arg3), 3))
#define SLJIT_ARGS4(ret, arg1, arg2, arg3, arg4) \
(SLJIT_ARGS3(ret, arg1, arg2, arg3) | SLJIT_ARG_VALUE(SLJIT_ARG_TO_TYPE(arg4), 4))
+#define SLJIT_ARGS4V(arg1, arg2, arg3, arg4) \
+ (SLJIT_ARGS3V(arg1, arg2, arg3) | SLJIT_ARG_VALUE(SLJIT_ARG_TO_TYPE(arg4), 4))
/* --------------------------------------------------------------------- */
/* Main structures and functions */
@@ -382,7 +427,10 @@ struct sljit_memory_fragment {
struct sljit_label {
struct sljit_label *next;
- sljit_uw addr;
+ union {
+ sljit_uw index;
+ sljit_uw addr;
+ } u;
/* The maximum size difference. */
sljit_uw size;
};
@@ -398,36 +446,35 @@ struct sljit_jump {
} u;
};
-struct sljit_put_label {
- struct sljit_put_label *next;
- struct sljit_label *label;
- sljit_uw addr;
- sljit_uw flags;
-};
-
struct sljit_const {
struct sljit_const *next;
sljit_uw addr;
};
+struct sljit_generate_code_buffer {
+ void *buffer;
+ sljit_uw size;
+ sljit_sw executable_offset;
+};
+
struct sljit_compiler {
sljit_s32 error;
sljit_s32 options;
struct sljit_label *labels;
struct sljit_jump *jumps;
- struct sljit_put_label *put_labels;
struct sljit_const *consts;
struct sljit_label *last_label;
struct sljit_jump *last_jump;
struct sljit_const *last_const;
- struct sljit_put_label *last_put_label;
void *allocator_data;
- void *exec_allocator_data;
+ void *user_data;
struct sljit_memory_fragment *buf;
struct sljit_memory_fragment *abuf;
+ /* Number of labels created by the compiler. */
+ sljit_uw label_count;
/* Available scratch registers. */
sljit_s32 scratches;
/* Available saved registers. */
@@ -447,17 +494,18 @@ struct sljit_compiler {
#if (defined SLJIT_HAS_STATUS_FLAGS_STATE && SLJIT_HAS_STATUS_FLAGS_STATE)
sljit_s32 status_flags_state;
-#endif
+#endif /* SLJIT_HAS_STATUS_FLAGS_STATE */
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
sljit_s32 args_size;
-#endif
+#endif /* SLJIT_CONFIG_X86_32 */
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ /* Temporary fields. */
sljit_s32 mode32;
-#endif
+#endif /* SLJIT_CONFIG_X86_64 */
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
/* Constant pool handling. */
sljit_uw *cpool;
sljit_u8 *cpool_unique;
@@ -466,44 +514,54 @@ struct sljit_compiler {
/* Other members. */
/* Contains pointer, "ldr pc, [...]" pairs. */
sljit_uw patches;
-#endif
+#endif /* SLJIT_CONFIG_ARM_V6 */
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5) || (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6) || (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
/* Temporary fields. */
sljit_uw shift_imm;
-#endif /* SLJIT_CONFIG_ARM_V5 || SLJIT_CONFIG_ARM_V7 */
+#endif /* SLJIT_CONFIG_ARM_V6 || SLJIT_CONFIG_ARM_V6 */
#if (defined SLJIT_CONFIG_ARM_32 && SLJIT_CONFIG_ARM_32) && (defined __SOFTFP__)
sljit_uw args_size;
-#endif
+#endif /* SLJIT_CONFIG_ARM_32 && __SOFTFP__ */
#if (defined SLJIT_CONFIG_PPC && SLJIT_CONFIG_PPC)
+ /* Temporary fields. */
sljit_u32 imm;
-#endif
+#endif /* SLJIT_CONFIG_PPC */
#if (defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS)
sljit_s32 delay_slot;
+ /* Temporary fields. */
sljit_s32 cache_arg;
sljit_sw cache_argw;
-#endif
+#endif /* SLJIT_CONFIG_MIPS */
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
sljit_uw args_size;
-#endif
+#endif /* SLJIT_CONFIG_MIPS_32 */
#if (defined SLJIT_CONFIG_RISCV && SLJIT_CONFIG_RISCV)
+ /* Temporary fields. */
sljit_s32 cache_arg;
sljit_sw cache_argw;
-#endif
+#endif /* SLJIT_CONFIG_RISCV */
#if (defined SLJIT_CONFIG_S390X && SLJIT_CONFIG_S390X)
/* Need to allocate register save area to make calls. */
+ /* Temporary fields. */
sljit_s32 mode;
-#endif
+#endif /* SLJIT_CONFIG_S390X */
+
+#if (defined SLJIT_CONFIG_LOONGARCH && SLJIT_CONFIG_LOONGARCH)
+ /* Temporary fields. */
+ sljit_s32 cache_arg;
+ sljit_sw cache_argw;
+#endif /* SLJIT_CONFIG_LOONGARCH */
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
FILE* verbose;
-#endif
+#endif /* SLJIT_VERBOSE */
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS) \
|| (defined SLJIT_DEBUG && SLJIT_DEBUG)
@@ -514,7 +572,7 @@ struct sljit_compiler {
sljit_s32 last_return;
/* Local size passed to entry functions. */
sljit_s32 logical_local_size;
-#endif
+#endif /* SLJIT_ARGUMENT_CHECKS || SLJIT_DEBUG */
#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS) \
|| (defined SLJIT_DEBUG && SLJIT_DEBUG) \
@@ -522,7 +580,7 @@ struct sljit_compiler {
/* Trust arguments when an API function is called.
Used internally for calling API functions. */
sljit_s32 skip_checks;
-#endif
+#endif /* SLJIT_ARGUMENT_CHECKS || SLJIT_DEBUG || SLJIT_VERBOSE */
};
/* --------------------------------------------------------------------- */
@@ -533,12 +591,10 @@ struct sljit_compiler {
custom memory managers. This pointer is passed to SLJIT_MALLOC
and SLJIT_FREE macros. Most allocators (including the default
one) ignores this value, and it is recommended to pass NULL
- as a dummy value for allocator_data. The exec_allocator_data
- has the same purpose but this one is passed to SLJIT_MALLOC_EXEC /
- SLJIT_MALLOC_FREE functions.
+ as a dummy value for allocator_data.
Returns NULL if failed. */
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_compiler* sljit_create_compiler(void *allocator_data, void *exec_allocator_data);
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_compiler* sljit_create_compiler(void *allocator_data);
/* Frees everything except the compiled machine code. */
SLJIT_API_FUNC_ATTRIBUTE void sljit_free_compiler(struct sljit_compiler *compiler);
@@ -558,8 +614,7 @@ static SLJIT_INLINE sljit_s32 sljit_get_compiler_error(struct sljit_compiler *co
after the code is compiled. */
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_compiler_memory_error(struct sljit_compiler *compiler);
-/*
- Allocate a small amount of memory. The size must be <= 64 bytes on 32 bit,
+/* Allocate a small amount of memory. The size must be <= 64 bytes on 32 bit,
and <= 128 bytes on 64 bit architectures. The memory area is owned by the
compiler, and freed by sljit_free_compiler. The returned pointer is
sizeof(sljit_sw) aligned. Excellent for allocating small blocks during
@@ -567,28 +622,40 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_compiler_memory_error(struct sljit_compi
to contain at most 16 pointers. If the size is outside of the range,
the function will return with NULL. However, this return value does not
indicate that there is no more memory (does not set the current error code
- of the compiler to out-of-memory status).
-*/
+ of the compiler to out-of-memory status). */
SLJIT_API_FUNC_ATTRIBUTE void* sljit_alloc_memory(struct sljit_compiler *compiler, sljit_s32 size);
+/* Returns the allocator data passed to sljit_create_compiler. */
+static SLJIT_INLINE void* sljit_compiler_get_allocator_data(struct sljit_compiler *compiler) { return compiler->allocator_data; }
+/* Sets/get the user data for a compiler. */
+static SLJIT_INLINE void sljit_compiler_set_user_data(struct sljit_compiler *compiler, void *user_data) { compiler->user_data = user_data; }
+static SLJIT_INLINE void* sljit_compiler_get_user_data(struct sljit_compiler *compiler) { return compiler->user_data; }
+
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE)
/* Passing NULL disables verbose. */
SLJIT_API_FUNC_ATTRIBUTE void sljit_compiler_verbose(struct sljit_compiler *compiler, FILE* verbose);
#endif
-/*
- Create executable code from the instruction stream. This is the final step
- of the code generation so no more instructions can be emitted after this call.
-*/
+/* Option bits for sljit_generate_code. */
+
+/* The exec_allocator_data points to a pre-allocated
+ buffer which type is sljit_generate_code_buffer. */
+#define SLJIT_GENERATE_CODE_BUFFER 0x1
+
+/* Create executable code from the instruction stream. This is the final step
+ of the code generation, and no more instructions can be emitted after this call.
+
+ options is the combination of SLJIT_GENERATE_CODE_* bits
+ exec_allocator_data is passed to SLJIT_MALLOC_EXEC and
+ SLJIT_MALLOC_FREE functions */
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler);
+SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler, sljit_s32 options, void *exec_allocator_data);
/* Free executable code. */
SLJIT_API_FUNC_ATTRIBUTE void sljit_free_code(void* code, void *exec_allocator_data);
-/*
- When the protected executable allocator is used the JIT code is mapped
+/* When the protected executable allocator is used the JIT code is mapped
twice. The first mapping has read/write and the second mapping has read/exec
permissions. This function returns with the relative offset of the executable
mapping using the writable mapping as the base after the machine code is
@@ -596,16 +663,13 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_code(void* code, void *exec_allocator_d
allocator, since it uses only one mapping with read/write/exec permissions.
Dynamic code modifications requires this value.
- Before a successful code generation, this function returns with 0.
-*/
+ Before a successful code generation, this function returns with 0. */
static SLJIT_INLINE sljit_sw sljit_get_executable_offset(struct sljit_compiler *compiler) { return compiler->executable_offset; }
-/*
- The executable memory consumption of the generated code can be retrieved by
+/* The executable memory consumption of the generated code can be retrieved by
this function. The returned value can be used for statistical purposes.
- Before a successful code generation, this function returns with 0.
-*/
+ Before a successful code generation, this function returns with 0. */
static SLJIT_INLINE sljit_uw sljit_get_generated_code_size(struct sljit_compiler *compiler) { return compiler->executable_size; }
/* Returns with non-zero if the feature or limitation type passed as its
@@ -628,30 +692,54 @@ static SLJIT_INLINE sljit_uw sljit_get_generated_code_size(struct sljit_compiler
#define SLJIT_HAS_CLZ 3
/* [Emulated] Count trailing zero is supported. */
#define SLJIT_HAS_CTZ 4
+/* [Emulated] Reverse the order of bytes is supported. */
+#define SLJIT_HAS_REV 5
/* [Emulated] Rotate left/right is supported. */
-#define SLJIT_HAS_ROT 5
+#define SLJIT_HAS_ROT 6
/* [Emulated] Conditional move is supported. */
-#define SLJIT_HAS_CMOV 6
+#define SLJIT_HAS_CMOV 7
/* [Emulated] Prefetch instruction is available (emulated as a nop). */
-#define SLJIT_HAS_PREFETCH 7
+#define SLJIT_HAS_PREFETCH 8
+/* [Emulated] Copy from/to f32 operation is available (see sljit_emit_fcopy). */
+#define SLJIT_HAS_COPY_F32 9
+/* [Emulated] Copy from/to f64 operation is available (see sljit_emit_fcopy). */
+#define SLJIT_HAS_COPY_F64 10
+/* [Not emulated] The 64 bit floating point registers can be used as
+ two separate 32 bit floating point registers (e.g. ARM32). The
+ second 32 bit part can be accessed by SLJIT_F64_SECOND. */
+#define SLJIT_HAS_F64_AS_F32_PAIR 11
+/* [Not emulated] Some SIMD operations are supported by the compiler. */
+#define SLJIT_HAS_SIMD 12
+/* [Not emulated] SIMD registers are mapped to a pair of double precision
+ floating point registers. E.g. passing either SLJIT_FR0 or SLJIT_FR1 to
+ a simd operation represents the same 128 bit register, and both SLJIT_FR0
+ and SLJIT_FR1 are overwritten. */
+#define SLJIT_SIMD_REGS_ARE_PAIRS 13
+/* [Not emulated] Atomic support is available (fine-grained). */
+#define SLJIT_HAS_ATOMIC 14
#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
-/* [Not emulated] SSE2 support is available on x86. */
-#define SLJIT_HAS_SSE2 100
+/* [Not emulated] AVX support is available on x86. */
+#define SLJIT_HAS_AVX 100
+/* [Not emulated] AVX2 support is available on x86. */
+#define SLJIT_HAS_AVX2 101
+#endif
+
+#if (defined SLJIT_CONFIG_LOONGARCH)
+/* [Not emulated] LASX support is available on LoongArch */
+#define SLJIT_HAS_LASX 201
#endif
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type);
/* If type is between SLJIT_ORDERED_EQUAL and SLJIT_ORDERED_LESS_EQUAL,
- sljit_cmp_info returns one, if the cpu supports the passed floating
- point comparison type.
+ sljit_cmp_info returns with:
+ zero - if the cpu supports the floating point comparison type
+ one - if the comparison requires two machine instructions
+ two - if the comparison requires more than two machine instructions
- If type is SLJIT_UNORDERED or SLJIT_ORDERED, sljit_cmp_info returns
- one, if the cpu supports checking the unordered comparison result
- regardless of the comparison type passed to the comparison instruction.
- The returned value is always one, if there is at least one type between
- SLJIT_ORDERED_EQUAL and SLJIT_ORDERED_LESS_EQUAL where sljit_cmp_info
- returns with a zero value.
+ When the result is non-zero, it is recommended to avoid
+ using the specified comparison type if it is easy to do so.
Otherwise it returns zero. */
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type);
@@ -662,7 +750,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type);
/*
The executable code is a function from the viewpoint of the C
- language. The function calls must obey to the ABI (Application
+ language. The function calls must conform to the ABI (Application
Binary Interface) of the platform, which specify the purpose of
machine registers and stack handling among other things. The
sljit_emit_enter function emits the necessary instructions for
@@ -711,17 +799,22 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type);
global / local context pointers) across function calls. The
value of n must be between 1 and 3. This option is only
supported by SLJIT_ENTER_REG_ARG calling convention. */
-#define SLJIT_ENTER_KEEP(n) (n)
+#define SLJIT_ENTER_KEEP(n) (n)
/* The compiled function uses an SLJIT specific register argument
calling convention. This is a lightweight function call type where
both the caller and the called functions must be compiled by
SLJIT. The type argument of the call must be SLJIT_CALL_REG_ARG
and all arguments must be stored in scratch registers. */
-#define SLJIT_ENTER_REG_ARG 0x00000004
+#define SLJIT_ENTER_REG_ARG 0x00000004
/* The local_size must be >= 0 and <= SLJIT_MAX_LOCAL_SIZE. */
-#define SLJIT_MAX_LOCAL_SIZE 65536
+#define SLJIT_MAX_LOCAL_SIZE 1048576
+
+#if (defined SLJIT_CONFIG_X86 && SLJIT_CONFIG_X86)
+/* Use VEX prefix for all SIMD operations on x86. */
+#define SLJIT_ENTER_USE_VEX 0x00010000
+#endif /* !SLJIT_CONFIG_X86 */
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
@@ -732,9 +825,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
by sljit_emit_enter. Several functions (such as sljit_emit_return)
requires this context to be able to generate the appropriate code.
However, some code fragments (compiled separately) may have no
- normal entry point so their context is unknown for the compiler.
+ normal entry point so their context is unknown to the compiler.
- The sljit_set_context and sljit_emit_enter have the same arguments,
+ sljit_set_context and sljit_emit_enter have the same arguments,
but sljit_set_context does not generate any machine code.
Note: every call of sljit_emit_enter and sljit_set_context overwrites
@@ -767,28 +860,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *comp
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_to(struct sljit_compiler *compiler,
sljit_s32 src, sljit_sw srcw);
-/* Generating entry and exit points for fast call functions (see SLJIT_FAST_CALL).
- Both sljit_emit_fast_enter and SLJIT_FAST_RETURN operations preserve the
- values of all registers and stack frame. The return address is stored in the
- dst argument of sljit_emit_fast_enter, and this return address can be passed
- to SLJIT_FAST_RETURN to continue the execution after the fast call.
-
- Fast calls are cheap operations (usually only a single call instruction is
- emitted) but they do not preserve any registers. However the callee function
- can freely use / update any registers and the local area which can be
- efficiently exploited by various optimizations. Registers can be saved
- and restored manually if needed.
-
- Although returning to different address by SLJIT_FAST_RETURN is possible,
- this address usually cannot be predicted by the return address predictor of
- modern CPUs which may reduce performance. Furthermore certain security
- enhancement technologies such as Intel Control-flow Enforcement Technology
- (CET) may disallow returning to a different address.
-
- Flags: - (does not modify flags). */
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw);
-
/*
Source and destination operands for arithmetical instructions
imm - a simple immediate value (cannot be used as a destination)
@@ -811,12 +882,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *
int | 4 byte (physical_address & 0x3 == 0)
word | 4 byte if SLJIT_32BIT_ARCHITECTURE is defined and its value is 1
| 8 byte if SLJIT_64BIT_ARCHITECTURE is defined and its value is 1
- pointer | size of sljit_p type (4 byte on 32 bit machines, 4 or 8 byte
+ pointer | size of sljit_up type (4 byte on 32 bit machines, 4 or 8 byte
| on 64 bit machines)
Note: Different architectures have different addressing limitations.
A single instruction is enough for the following addressing
- modes. Other adrressing modes are emulated by instruction
+ modes. Other addressing modes are emulated by instruction
sequences. This information could help to improve those code
generators which focuses only a few architectures.
@@ -847,6 +918,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *
s390x: [reg+imm], -2^19 <= imm < 2^19
[reg+reg] is supported
Write-back is not supported
+ loongarch: [reg+imm], -2048 <= imm <= 2047
+ [reg+reg] is supported
+ Write-back is not supported
*/
/* Macros for specifying operand types. */
@@ -854,9 +928,25 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *
#define SLJIT_MEM0() (SLJIT_MEM)
#define SLJIT_MEM1(r1) (SLJIT_MEM | (r1))
#define SLJIT_MEM2(r1, r2) (SLJIT_MEM | (r1) | ((r2) << 8))
-#define SLJIT_IMM 0x40
+#define SLJIT_IMM 0x7f
#define SLJIT_REG_PAIR(r1, r2) ((r1) | ((r2) << 8))
+/* Macros for checking operand types (only for valid arguments). */
+#define SLJIT_IS_REG(arg) ((arg) > 0 && (arg) < SLJIT_IMM)
+#define SLJIT_IS_MEM(arg) ((arg) & SLJIT_MEM)
+#define SLJIT_IS_MEM0(arg) ((arg) == SLJIT_MEM)
+#define SLJIT_IS_MEM1(arg) ((arg) > SLJIT_MEM && (arg) < (SLJIT_MEM << 1))
+#define SLJIT_IS_MEM2(arg) (((arg) & SLJIT_MEM) && (arg) >= (SLJIT_MEM << 1))
+#define SLJIT_IS_IMM(arg) ((arg) == SLJIT_IMM)
+#define SLJIT_IS_REG_PAIR(arg) (!((arg) & SLJIT_MEM) && (arg) >= (SLJIT_MEM << 1))
+
+/* Macros for extracting registers from operands. */
+/* Support operands which contains a single register or
+ constructed using SLJIT_MEM1, SLJIT_MEM2, or SLJIT_REG_PAIR. */
+#define SLJIT_EXTRACT_REG(arg) ((arg) & 0x7f)
+/* Support operands which constructed using SLJIT_MEM2, or SLJIT_REG_PAIR. */
+#define SLJIT_EXTRACT_SECOND_REG(arg) ((arg) >> 8)
+
/* Sets 32 bit operation mode on 64 bit CPUs. This option is ignored on
32 bit CPUs. When this option is set for an arithmetic operation, only
the lower 32 bits of the input registers are used, and the CPU status
@@ -1028,7 +1118,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compile
S16 - signed 16 bit data transfer
U32 - unsigned int (32 bit) data transfer
S32 - signed int (32 bit) data transfer
- P - pointer (sljit_p) data transfer
+ P - pointer (sljit_up) data transfer
*/
/* Flags: - (does not modify flags) */
@@ -1057,27 +1147,57 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compile
Note: loads a pointer sized data, useful on x32 mode (a 64 bit mode
on x86-64 which uses 32 bit pointers) or similar compiling modes */
#define SLJIT_MOV_P (SLJIT_OP1_BASE + 8)
-/* Flags: Z
- Note: immediate source argument is not supported */
-#define SLJIT_NOT (SLJIT_OP1_BASE + 9)
-#define SLJIT_NOT32 (SLJIT_NOT | SLJIT_32)
/* Count leading zeroes
Flags: - (may destroy flags)
Note: immediate source argument is not supported */
-#define SLJIT_CLZ (SLJIT_OP1_BASE + 10)
+#define SLJIT_CLZ (SLJIT_OP1_BASE + 9)
#define SLJIT_CLZ32 (SLJIT_CLZ | SLJIT_32)
/* Count trailing zeroes
Flags: - (may destroy flags)
Note: immediate source argument is not supported */
-#define SLJIT_CTZ (SLJIT_OP1_BASE + 11)
+#define SLJIT_CTZ (SLJIT_OP1_BASE + 10)
#define SLJIT_CTZ32 (SLJIT_CTZ | SLJIT_32)
+/* Reverse the order of bytes
+ Flags: - (may destroy flags)
+ Note: converts between little and big endian formats
+ Note: immediate source argument is not supported */
+#define SLJIT_REV (SLJIT_OP1_BASE + 11)
+#define SLJIT_REV32 (SLJIT_REV | SLJIT_32)
+/* Reverse the order of bytes in the lower 16 bit and extend as unsigned
+ Flags: - (may destroy flags)
+ Note: converts between little and big endian formats
+ Note: immediate source argument is not supported */
+#define SLJIT_REV_U16 (SLJIT_OP1_BASE + 12)
+#define SLJIT_REV32_U16 (SLJIT_REV_U16 | SLJIT_32)
+/* Reverse the order of bytes in the lower 16 bit and extend as signed
+ Flags: - (may destroy flags)
+ Note: converts between little and big endian formats
+ Note: immediate source argument is not supported */
+#define SLJIT_REV_S16 (SLJIT_OP1_BASE + 13)
+#define SLJIT_REV32_S16 (SLJIT_REV_S16 | SLJIT_32)
+/* Reverse the order of bytes in the lower 32 bit and extend as unsigned
+ Flags: - (may destroy flags)
+ Note: converts between little and big endian formats
+ Note: immediate source argument is not supported */
+#define SLJIT_REV_U32 (SLJIT_OP1_BASE + 14)
+/* Reverse the order of bytes in the lower 32 bit and extend as signed
+ Flags: - (may destroy flags)
+ Note: converts between little and big endian formats
+ Note: immediate source argument is not supported */
+#define SLJIT_REV_S32 (SLJIT_OP1_BASE + 15)
+
+/* The following unary operations are supported by using sljit_emit_op2:
+ - binary not: SLJIT_XOR with immedate -1 as src1 or src2
+ - negate: SLJIT_SUB with immedate 0 as src1
+ Note: these operations are optimized by the compiler if the
+ target CPU has specialized instruction forms for them. */
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw);
/* Starting index of opcodes for sljit_emit_op2. */
-#define SLJIT_OP2_BASE 96
+#define SLJIT_OP2_BASE 64
/* Flags: Z | OVERFLOW | CARRY */
#define SLJIT_ADD (SLJIT_OP2_BASE + 0)
@@ -1165,6 +1285,19 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compil
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w);
+/* Starting index of opcodes for sljit_emit_op2r. */
+#define SLJIT_OP2R_BASE 96
+
+/* Flags: - (may destroy flags) */
+#define SLJIT_MULADD (SLJIT_OP2R_BASE + 0)
+#define SLJIT_MULADD32 (SLJIT_MULADD | SLJIT_32)
+
+/* Similar to sljit_emit_fop2, except the destination is always a register. */
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w);
+
/* Emit a left or right shift operation, where the bits shifted
in comes from a separate source operand. All operands are
interpreted as unsigned integers.
@@ -1174,80 +1307,97 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compil
op must be one of the following operations:
SLJIT_SHL or SLJIT_SHL32:
- src_dst <<= src2
- src_dst |= ((src1 >> 1) >> (src2 ^ value_mask))
+ dst_reg = src1_reg << src3_reg
+ dst_reg |= ((src2_reg >> 1) >> (src3 ^ value_mask))
SLJIT_MSHL or SLJIT_MSHL32:
- src2 &= value_mask
+ src3 &= value_mask
perform the SLJIT_SHL or SLJIT_SHL32 operation
SLJIT_LSHR or SLJIT_LSHR32:
- src_dst >>= src2
- src_dst |= ((src1 << 1) << (src2 ^ value_mask))
+ dst_reg = src1_reg >> src3_reg
+ dst_reg |= ((src2_reg << 1) << (src3 ^ value_mask))
SLJIT_MLSHR or SLJIT_MLSHR32:
- src2 &= value_mask
+ src3 &= value_mask
perform the SLJIT_LSHR or SLJIT_LSHR32 operation
op can be combined (or'ed) with SLJIT_SHIFT_INTO_NON_ZERO
- src_dst must be a register which content is updated after
- the operation is completed
- src1 / src1w contains the bits which shifted into src_dst
- src2 / src2w contains the shift amount
+ dst_reg specifies the destination register, where dst_reg
+ and src2_reg cannot be the same registers
+ src1_reg specifies the source register
+ src2_reg specifies the register which is shifted into src1_reg
+ src3 / src3w contains the shift amount
- Note: a rotate operation can be performed if src_dst and
- src1 are set to the same register
+ Note: a rotate operation is performed if src1_reg and
+ src2_reg are the same registers
Flags: - (may destroy flags) */
-/* The src2 contains a non-zero value. Improves the generated
- code on certain architectures, which provides a small
- performance improvement. */
+/* The src3 operand contains a non-zero value. Improves
+ the generated code on certain architectures, which
+ provides a small performance improvement. */
#define SLJIT_SHIFT_INTO_NON_ZERO 0x200
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src_dst,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w);
+ sljit_s32 dst_reg,
+ sljit_s32 src1_reg,
+ sljit_s32 src2_reg,
+ sljit_s32 src3, sljit_sw src3w);
-/* Starting index of opcodes for sljit_emit_op2. */
-#define SLJIT_OP_SRC_BASE 128
+/* Starting index of opcodes for sljit_emit_op_src
+ and sljit_emit_op_dst. */
+#define SLJIT_OP_SRC_DST_BASE 112
-/* Note: src cannot be an immedate value
+/* Fast return, see SLJIT_FAST_CALL for more details.
+ Note: src cannot be an immedate value
Flags: - (does not modify flags) */
-#define SLJIT_FAST_RETURN (SLJIT_OP_SRC_BASE + 0)
+#define SLJIT_FAST_RETURN (SLJIT_OP_SRC_DST_BASE + 0)
/* Skip stack frames before fast return.
Note: src cannot be an immedate value
Flags: may destroy flags. */
-#define SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN (SLJIT_OP_SRC_BASE + 1)
+#define SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN (SLJIT_OP_SRC_DST_BASE + 1)
/* Prefetch value into the level 1 data cache
Note: if the target CPU does not support data prefetch,
no instructions are emitted.
Note: this instruction never fails, even if the memory address is invalid.
Flags: - (does not modify flags) */
-#define SLJIT_PREFETCH_L1 (SLJIT_OP_SRC_BASE + 2)
+#define SLJIT_PREFETCH_L1 (SLJIT_OP_SRC_DST_BASE + 2)
/* Prefetch value into the level 2 data cache
Note: same as SLJIT_PREFETCH_L1 if the target CPU
does not support this instruction form.
Note: this instruction never fails, even if the memory address is invalid.
Flags: - (does not modify flags) */
-#define SLJIT_PREFETCH_L2 (SLJIT_OP_SRC_BASE + 3)
+#define SLJIT_PREFETCH_L2 (SLJIT_OP_SRC_DST_BASE + 3)
/* Prefetch value into the level 3 data cache
Note: same as SLJIT_PREFETCH_L2 if the target CPU
does not support this instruction form.
Note: this instruction never fails, even if the memory address is invalid.
Flags: - (does not modify flags) */
-#define SLJIT_PREFETCH_L3 (SLJIT_OP_SRC_BASE + 4)
+#define SLJIT_PREFETCH_L3 (SLJIT_OP_SRC_DST_BASE + 4)
/* Prefetch a value which is only used once (and can be discarded afterwards)
Note: same as SLJIT_PREFETCH_L1 if the target CPU
does not support this instruction form.
Note: this instruction never fails, even if the memory address is invalid.
Flags: - (does not modify flags) */
-#define SLJIT_PREFETCH_ONCE (SLJIT_OP_SRC_BASE + 5)
+#define SLJIT_PREFETCH_ONCE (SLJIT_OP_SRC_DST_BASE + 5)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 src, sljit_sw srcw);
+/* Fast enter, see SLJIT_FAST_CALL for more details.
+ Flags: - (does not modify flags) */
+#define SLJIT_FAST_ENTER (SLJIT_OP_SRC_DST_BASE + 6)
+
+/* Copies the return address into dst. The return address is the
+ address where the execution continues after the called function
+ returns (see: sljit_emit_return / sljit_emit_return_void).
+ Flags: - (does not modify flags) */
+#define SLJIT_GET_RETURN_ADDRESS (SLJIT_OP_SRC_DST_BASE + 7)
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_dst(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw);
+
/* Starting index of opcodes for sljit_emit_fop1. */
-#define SLJIT_FOP1_BASE 160
+#define SLJIT_FOP1_BASE 144
/* Flags: - (does not modify flags) */
#define SLJIT_MOV_F64 (SLJIT_FOP1_BASE + 0)
@@ -1270,15 +1420,21 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *comp
/* Flags: - (may destroy flags) */
#define SLJIT_CONV_F64_FROM_S32 (SLJIT_FOP1_BASE + 5)
#define SLJIT_CONV_F32_FROM_S32 (SLJIT_CONV_F64_FROM_S32 | SLJIT_32)
+/* Flags: - (may destroy flags) */
+#define SLJIT_CONV_F64_FROM_UW (SLJIT_FOP1_BASE + 6)
+#define SLJIT_CONV_F32_FROM_UW (SLJIT_CONV_F64_FROM_UW | SLJIT_32)
+/* Flags: - (may destroy flags) */
+#define SLJIT_CONV_F64_FROM_U32 (SLJIT_FOP1_BASE + 7)
+#define SLJIT_CONV_F32_FROM_U32 (SLJIT_CONV_F64_FROM_U32 | SLJIT_32)
/* Note: dst is the left and src is the right operand for SLJIT_CMP_F32/64.
Flags: EQUAL_F | LESS_F | GREATER_EQUAL_F | GREATER_F | LESS_EQUAL_F */
-#define SLJIT_CMP_F64 (SLJIT_FOP1_BASE + 6)
+#define SLJIT_CMP_F64 (SLJIT_FOP1_BASE + 8)
#define SLJIT_CMP_F32 (SLJIT_CMP_F64 | SLJIT_32)
/* Flags: - (may destroy flags) */
-#define SLJIT_NEG_F64 (SLJIT_FOP1_BASE + 7)
+#define SLJIT_NEG_F64 (SLJIT_FOP1_BASE + 9)
#define SLJIT_NEG_F32 (SLJIT_NEG_F64 | SLJIT_32)
/* Flags: - (may destroy flags) */
-#define SLJIT_ABS_F64 (SLJIT_FOP1_BASE + 8)
+#define SLJIT_ABS_F64 (SLJIT_FOP1_BASE + 10)
#define SLJIT_ABS_F32 (SLJIT_ABS_F64 | SLJIT_32)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
@@ -1286,7 +1442,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compil
sljit_s32 src, sljit_sw srcw);
/* Starting index of opcodes for sljit_emit_fop2. */
-#define SLJIT_FOP2_BASE 192
+#define SLJIT_FOP2_BASE 176
/* Flags: - (may destroy flags) */
#define SLJIT_ADD_F64 (SLJIT_FOP2_BASE + 0)
@@ -1306,10 +1462,90 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w);
+/* Starting index of opcodes for sljit_emit_fop2r. */
+#define SLJIT_FOP2R_BASE 192
+
+/* Flags: - (may destroy flags) */
+#define SLJIT_COPYSIGN_F64 (SLJIT_FOP2R_BASE + 0)
+#define SLJIT_COPYSIGN_F32 (SLJIT_COPYSIGN_F64 | SLJIT_32)
+
+/* Similar to sljit_emit_fop2, except the destination is always a register. */
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w);
+
+/* Sets a floating point register to an immediate value. */
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset32(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f32 value);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f64 value);
+
+/* The following opcodes are used by sljit_emit_fcopy(). */
+
+/* 64 bit: copy a 64 bit value from an integer register into a
+ 64 bit floating point register without any modifications.
+ 32 bit: copy a 32 bit register or register pair into a 64 bit
+ floating point register without any modifications. The
+ register, or the first register of the register pair
+ replaces the high order 32 bit of the floating point
+ register. If a register pair is passed, the low
+ order 32 bit is replaced by the second register.
+ Otherwise, the low order 32 bit is unchanged. */
+#define SLJIT_COPY_TO_F64 1
+/* Copy a 32 bit value from an integer register into a 32 bit
+ floating point register without any modifications. */
+#define SLJIT_COPY32_TO_F32 (SLJIT_COPY_TO_F64 | SLJIT_32)
+/* 64 bit: copy the value of a 64 bit floating point register into
+ an integer register without any modifications.
+ 32 bit: copy a 64 bit floating point register into a 32 bit register
+ or a 32 bit register pair without any modifications. The
+ high order 32 bit of the floating point register is copied
+ into the register, or the first register of the register
+ pair. If a register pair is passed, the low order 32 bit
+ is copied into the second register. */
+#define SLJIT_COPY_FROM_F64 2
+/* Copy the value of a 32 bit floating point register into an integer
+ register without any modifications. The register should be processed
+ with 32 bit operations later. */
+#define SLJIT_COPY32_FROM_F32 (SLJIT_COPY_FROM_F64 | SLJIT_32)
+
+/* Special data copy which involves floating point registers.
+
+ op must be between SLJIT_COPY_TO_F64 and SLJIT_COPY32_FROM_F32
+ freg must be a floating point register
+ reg must be a register or register pair */
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 freg, sljit_s32 reg);
+
/* Label and jump instructions. */
SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler);
+/* The SLJIT_FAST_CALL is a calling method for creating lightweight function
+ calls. This type of calls preserve the values of all registers and stack
+ frame. Unlike normal function calls, the enter and return operations must
+ be performed by the SLJIT_FAST_ENTER and SLJIT_FAST_RETURN operations
+ respectively. The return address is stored in the dst argument of the
+ SLJIT_FAST_ENTER operation, and this return address should be passed as
+ the src argument for the SLJIT_FAST_RETURN operation to return from the
+ called function.
+
+ Fast calls are cheap operations (usually only a single call instruction is
+ emitted) but they do not preserve any registers. However the callee function
+ can freely use / update any registers and the locals area which can be
+ efficiently exploited by various optimizations. Registers can be saved
+ and restored manually if needed.
+
+ Although returning to different address by SLJIT_FAST_RETURN is possible,
+ this address usually cannot be predicted by the return address predictor of
+ modern CPUs which may reduce performance. Furthermore certain security
+ enhancement technologies such as Intel Control-flow Enforcement Technology
+ (CET) may disallow returning to a different address (indirect jumps
+ can be used instead, see SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN). */
+
/* Invert (negate) conditional type: xor (^) with 0x1 */
/* Integer comparison types. */
@@ -1321,19 +1557,19 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
#define SLJIT_LESS 2
#define SLJIT_SET_LESS SLJIT_SET(SLJIT_LESS)
#define SLJIT_GREATER_EQUAL 3
-#define SLJIT_SET_GREATER_EQUAL SLJIT_SET(SLJIT_GREATER_EQUAL)
+#define SLJIT_SET_GREATER_EQUAL SLJIT_SET(SLJIT_LESS)
#define SLJIT_GREATER 4
#define SLJIT_SET_GREATER SLJIT_SET(SLJIT_GREATER)
#define SLJIT_LESS_EQUAL 5
-#define SLJIT_SET_LESS_EQUAL SLJIT_SET(SLJIT_LESS_EQUAL)
+#define SLJIT_SET_LESS_EQUAL SLJIT_SET(SLJIT_GREATER)
#define SLJIT_SIG_LESS 6
#define SLJIT_SET_SIG_LESS SLJIT_SET(SLJIT_SIG_LESS)
#define SLJIT_SIG_GREATER_EQUAL 7
-#define SLJIT_SET_SIG_GREATER_EQUAL SLJIT_SET(SLJIT_SIG_GREATER_EQUAL)
+#define SLJIT_SET_SIG_GREATER_EQUAL SLJIT_SET(SLJIT_SIG_LESS)
#define SLJIT_SIG_GREATER 8
#define SLJIT_SET_SIG_GREATER SLJIT_SET(SLJIT_SIG_GREATER)
#define SLJIT_SIG_LESS_EQUAL 9
-#define SLJIT_SET_SIG_LESS_EQUAL SLJIT_SET(SLJIT_SIG_LESS_EQUAL)
+#define SLJIT_SET_SIG_LESS_EQUAL SLJIT_SET(SLJIT_SIG_GREATER)
#define SLJIT_OVERFLOW 10
#define SLJIT_SET_OVERFLOW SLJIT_SET(SLJIT_OVERFLOW)
@@ -1344,70 +1580,74 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
#define SLJIT_SET_CARRY SLJIT_SET(SLJIT_CARRY)
#define SLJIT_NOT_CARRY 13
+#define SLJIT_ATOMIC_STORED 14
+#define SLJIT_SET_ATOMIC_STORED SLJIT_SET(SLJIT_ATOMIC_STORED)
+#define SLJIT_ATOMIC_NOT_STORED 15
+
/* Basic floating point comparison types.
Note: when the comparison result is unordered, their behaviour is unspecified. */
-#define SLJIT_F_EQUAL 14
+#define SLJIT_F_EQUAL 16
#define SLJIT_SET_F_EQUAL SLJIT_SET(SLJIT_F_EQUAL)
-#define SLJIT_F_NOT_EQUAL 15
-#define SLJIT_SET_F_NOT_EQUAL SLJIT_SET(SLJIT_F_NOT_EQUAL)
-#define SLJIT_F_LESS 16
+#define SLJIT_F_NOT_EQUAL 17
+#define SLJIT_SET_F_NOT_EQUAL SLJIT_SET(SLJIT_F_EQUAL)
+#define SLJIT_F_LESS 18
#define SLJIT_SET_F_LESS SLJIT_SET(SLJIT_F_LESS)
-#define SLJIT_F_GREATER_EQUAL 17
-#define SLJIT_SET_F_GREATER_EQUAL SLJIT_SET(SLJIT_F_GREATER_EQUAL)
-#define SLJIT_F_GREATER 18
+#define SLJIT_F_GREATER_EQUAL 19
+#define SLJIT_SET_F_GREATER_EQUAL SLJIT_SET(SLJIT_F_LESS)
+#define SLJIT_F_GREATER 20
#define SLJIT_SET_F_GREATER SLJIT_SET(SLJIT_F_GREATER)
-#define SLJIT_F_LESS_EQUAL 19
-#define SLJIT_SET_F_LESS_EQUAL SLJIT_SET(SLJIT_F_LESS_EQUAL)
+#define SLJIT_F_LESS_EQUAL 21
+#define SLJIT_SET_F_LESS_EQUAL SLJIT_SET(SLJIT_F_GREATER)
/* Jumps when either argument contains a NaN value. */
-#define SLJIT_UNORDERED 20
+#define SLJIT_UNORDERED 22
#define SLJIT_SET_UNORDERED SLJIT_SET(SLJIT_UNORDERED)
/* Jumps when neither argument contains a NaN value. */
-#define SLJIT_ORDERED 21
-#define SLJIT_SET_ORDERED SLJIT_SET(SLJIT_ORDERED)
+#define SLJIT_ORDERED 23
+#define SLJIT_SET_ORDERED SLJIT_SET(SLJIT_UNORDERED)
/* Ordered / unordered floating point comparison types.
Note: each comparison type has an ordered and unordered form. Some
architectures supports only either of them (see: sljit_cmp_info). */
-#define SLJIT_ORDERED_EQUAL 22
+#define SLJIT_ORDERED_EQUAL 24
#define SLJIT_SET_ORDERED_EQUAL SLJIT_SET(SLJIT_ORDERED_EQUAL)
-#define SLJIT_UNORDERED_OR_NOT_EQUAL 23
-#define SLJIT_SET_UNORDERED_OR_NOT_EQUAL SLJIT_SET(SLJIT_UNORDERED_OR_NOT_EQUAL)
-#define SLJIT_ORDERED_LESS 24
+#define SLJIT_UNORDERED_OR_NOT_EQUAL 25
+#define SLJIT_SET_UNORDERED_OR_NOT_EQUAL SLJIT_SET(SLJIT_ORDERED_EQUAL)
+#define SLJIT_ORDERED_LESS 26
#define SLJIT_SET_ORDERED_LESS SLJIT_SET(SLJIT_ORDERED_LESS)
-#define SLJIT_UNORDERED_OR_GREATER_EQUAL 25
-#define SLJIT_SET_UNORDERED_OR_GREATER_EQUAL SLJIT_SET(SLJIT_UNORDERED_OR_GREATER_EQUAL)
-#define SLJIT_ORDERED_GREATER 26
+#define SLJIT_UNORDERED_OR_GREATER_EQUAL 27
+#define SLJIT_SET_UNORDERED_OR_GREATER_EQUAL SLJIT_SET(SLJIT_ORDERED_LESS)
+#define SLJIT_ORDERED_GREATER 28
#define SLJIT_SET_ORDERED_GREATER SLJIT_SET(SLJIT_ORDERED_GREATER)
-#define SLJIT_UNORDERED_OR_LESS_EQUAL 27
-#define SLJIT_SET_UNORDERED_OR_LESS_EQUAL SLJIT_SET(SLJIT_UNORDERED_OR_LESS_EQUAL)
+#define SLJIT_UNORDERED_OR_LESS_EQUAL 29
+#define SLJIT_SET_UNORDERED_OR_LESS_EQUAL SLJIT_SET(SLJIT_ORDERED_GREATER)
-#define SLJIT_UNORDERED_OR_EQUAL 28
+#define SLJIT_UNORDERED_OR_EQUAL 30
#define SLJIT_SET_UNORDERED_OR_EQUAL SLJIT_SET(SLJIT_UNORDERED_OR_EQUAL)
-#define SLJIT_ORDERED_NOT_EQUAL 29
-#define SLJIT_SET_ORDERED_NOT_EQUAL SLJIT_SET(SLJIT_ORDERED_NOT_EQUAL)
-#define SLJIT_UNORDERED_OR_LESS 30
+#define SLJIT_ORDERED_NOT_EQUAL 31
+#define SLJIT_SET_ORDERED_NOT_EQUAL SLJIT_SET(SLJIT_UNORDERED_OR_EQUAL)
+#define SLJIT_UNORDERED_OR_LESS 32
#define SLJIT_SET_UNORDERED_OR_LESS SLJIT_SET(SLJIT_UNORDERED_OR_LESS)
-#define SLJIT_ORDERED_GREATER_EQUAL 31
-#define SLJIT_SET_ORDERED_GREATER_EQUAL SLJIT_SET(SLJIT_ORDERED_GREATER_EQUAL)
-#define SLJIT_UNORDERED_OR_GREATER 32
+#define SLJIT_ORDERED_GREATER_EQUAL 33
+#define SLJIT_SET_ORDERED_GREATER_EQUAL SLJIT_SET(SLJIT_UNORDERED_OR_LESS)
+#define SLJIT_UNORDERED_OR_GREATER 34
#define SLJIT_SET_UNORDERED_OR_GREATER SLJIT_SET(SLJIT_UNORDERED_OR_GREATER)
-#define SLJIT_ORDERED_LESS_EQUAL 33
-#define SLJIT_SET_ORDERED_LESS_EQUAL SLJIT_SET(SLJIT_ORDERED_LESS_EQUAL)
+#define SLJIT_ORDERED_LESS_EQUAL 35
+#define SLJIT_SET_ORDERED_LESS_EQUAL SLJIT_SET(SLJIT_UNORDERED_OR_GREATER)
/* Unconditional jump types. */
-#define SLJIT_JUMP 34
-/* Fast calling method. See sljit_emit_fast_enter / SLJIT_FAST_RETURN. */
-#define SLJIT_FAST_CALL 35
+#define SLJIT_JUMP 36
+/* Fast calling method. See the description above. */
+#define SLJIT_FAST_CALL 37
/* Default C calling convention. */
-#define SLJIT_CALL 36
+#define SLJIT_CALL 38
/* Called function must be compiled by SLJIT.
See SLJIT_ENTER_REG_ARG option. */
-#define SLJIT_CALL_REG_ARG 37
+#define SLJIT_CALL_REG_ARG 39
/* The target can be changed during runtime (see: sljit_set_jump_addr). */
#define SLJIT_REWRITABLE_JUMP 0x1000
@@ -1497,19 +1737,42 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
sljit_s32 dst, sljit_sw dstw,
sljit_s32 type);
-/* Emit a conditional mov instruction which moves source to destination,
- if the condition is satisfied. Unlike other arithmetic operations this
- instruction does not support memory access.
+/* Emit a conditional select instruction which moves src1 to dst_reg,
+ if the condition is satisfied, or src2_reg to dst_reg otherwise.
type must be between SLJIT_EQUAL and SLJIT_ORDERED_LESS_EQUAL
- type can be combined (or'ed) with SLJIT_32
- dst_reg must be a valid register
- src must be a valid register or immediate (SLJIT_IMM)
+ type can be combined (or'ed) with SLJIT_32 to move 32 bit
+ register values instead of word sized ones
+ dst_reg and src2_reg must be valid registers
+ src1 must be valid operand
+
+ Note: if src1 is a memory operand, its value
+ might be loaded even if the condition is false.
Flags: - (does not modify flags) */
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 dst_reg,
- sljit_s32 src, sljit_sw srcw);
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_reg);
+
+/* Emit a conditional floating point select instruction which moves
+ src1 to dst_reg, if the condition is satisfied, or src2_reg to
+ dst_reg otherwise.
+
+ type must be between SLJIT_EQUAL and SLJIT_ORDERED_LESS_EQUAL
+ type can be combined (or'ed) with SLJIT_32 to move 32 bit
+ floating point values instead of 64 bit ones
+ dst_freg and src2_freg must be valid floating point registers
+ src1 must be valid operand
+
+ Note: if src1 is a memory operand, its value
+ might be loaded even if the condition is false.
+
+ Flags: - (does not modify flags) */
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fselect(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_freg);
/* The following flags are used by sljit_emit_mem(), sljit_emit_mem_update(),
sljit_emit_fmem(), and sljit_emit_fmem_update(). */
@@ -1524,9 +1787,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compil
/* Load or stora data from an unaligned (byte aligned) address. */
#define SLJIT_MEM_UNALIGNED 0x000400
/* Load or stora data from a 16 bit aligned address. */
-#define SLJIT_MEM_UNALIGNED_16 0x000800
+#define SLJIT_MEM_ALIGNED_16 0x000800
/* Load or stora data from a 32 bit aligned address. */
-#define SLJIT_MEM_UNALIGNED_32 0x001000
+#define SLJIT_MEM_ALIGNED_32 0x001000
/* The following flags are used by sljit_emit_mem_update(),
and sljit_emit_fmem_update(). */
@@ -1544,8 +1807,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compil
/* The sljit_emit_mem emits instructions for various memory operations:
- When SLJIT_MEM_UNALIGNED / SLJIT_MEM_UNALIGNED_16 /
- SLJIT_MEM_UNALIGNED_32 is set in type argument:
+ When SLJIT_MEM_UNALIGNED / SLJIT_MEM_ALIGNED_16 /
+ SLJIT_MEM_ALIGNED_32 is set in type argument:
Emit instructions for unaligned memory loads or stores. When
SLJIT_UNALIGNED is not defined, the only way to access unaligned
memory data is using sljit_emit_mem. Otherwise all operations (e.g.
@@ -1560,8 +1823,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compil
location specified by the mem/memw arguments, and the end address
of this operation is the starting address of the data transfer
between the second register and memory. The type argument must
- be SLJIT_MOV. The SLJIT_MEM_UNALIGNED* options are allowed for
- this operation.
+ be SLJIT_MOV. The SLJIT_MEM_UNALIGNED / SLJIT_MEM_ALIGNED_*
+ options are allowed for this operation.
type must be between SLJIT_MOV and SLJIT_MOV_P and can be
combined (or'ed) with SLJIT_MEM_* flags
@@ -1625,6 +1888,286 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fmem_update(struct sljit_compiler
sljit_s32 freg,
sljit_s32 mem, sljit_sw memw);
+/* The following options are used by several simd operations. */
+
+/* Load data into a simd register, this is the default */
+#define SLJIT_SIMD_LOAD 0x000000
+/* Store data from a simd register */
+#define SLJIT_SIMD_STORE 0x000001
+/* The simd register contains floating point values */
+#define SLJIT_SIMD_FLOAT 0x000400
+/* Tests whether the operation is available */
+#define SLJIT_SIMD_TEST 0x000800
+/* Move data to/from a 64 bit (8 byte) long SIMD register */
+#define SLJIT_SIMD_REG_64 (3 << 12)
+/* Move data to/from a 128 bit (16 byte) long SIMD register */
+#define SLJIT_SIMD_REG_128 (4 << 12)
+/* Move data to/from a 256 bit (32 byte) long SIMD register */
+#define SLJIT_SIMD_REG_256 (5 << 12)
+/* Move data to/from a 512 bit (64 byte) long SIMD register */
+#define SLJIT_SIMD_REG_512 (6 << 12)
+/* Element size is 8 bit long (this is the default), usually cannot be combined with SLJIT_SIMD_FLOAT */
+#define SLJIT_SIMD_ELEM_8 (0 << 18)
+/* Element size is 16 bit long, usually cannot be combined with SLJIT_SIMD_FLOAT */
+#define SLJIT_SIMD_ELEM_16 (1 << 18)
+/* Element size is 32 bit long */
+#define SLJIT_SIMD_ELEM_32 (2 << 18)
+/* Element size is 64 bit long */
+#define SLJIT_SIMD_ELEM_64 (3 << 18)
+/* Element size is 128 bit long */
+#define SLJIT_SIMD_ELEM_128 (4 << 18)
+/* Element size is 256 bit long */
+#define SLJIT_SIMD_ELEM_256 (5 << 18)
+
+/* The following options are used by sljit_emit_simd_mov(). */
+
+/* Memory address is unaligned (this is the default) */
+#define SLJIT_SIMD_MEM_UNALIGNED (0 << 24)
+/* Memory address is 16 bit aligned */
+#define SLJIT_SIMD_MEM_ALIGNED_16 (1 << 24)
+/* Memory address is 32 bit aligned */
+#define SLJIT_SIMD_MEM_ALIGNED_32 (2 << 24)
+/* Memory address is 64 bit aligned */
+#define SLJIT_SIMD_MEM_ALIGNED_64 (3 << 24)
+/* Memory address is 128 bit aligned */
+#define SLJIT_SIMD_MEM_ALIGNED_128 (4 << 24)
+/* Memory address is 256 bit aligned */
+#define SLJIT_SIMD_MEM_ALIGNED_256 (5 << 24)
+/* Memory address is 512 bit aligned */
+#define SLJIT_SIMD_MEM_ALIGNED_512 (6 << 24)
+
+/* Moves data between a simd register and memory.
+
+ If the operation is not supported, it returns with
+ SLJIT_ERR_UNSUPPORTED. If SLJIT_SIMD_TEST is passed,
+ it does not emit any instructions.
+
+ type must be a combination of SLJIT_SIMD_* and
+ SLJIT_SIMD_MEM_* options
+ freg is the source or destination simd register
+ of the operation
+ srcdst must be a memory operand or a simd register
+
+ Note:
+ The alignment and element size must be
+ less or equal than simd register size.
+
+ Flags: - (does not modify flags) */
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 srcdst, sljit_sw srcdstw);
+
+/* Replicates a scalar value to all lanes of a simd
+ register.
+
+ If the operation is not supported, it returns with
+ SLJIT_ERR_UNSUPPORTED. If SLJIT_SIMD_TEST is passed,
+ it does not emit any instructions.
+
+ type must be a combination of SLJIT_SIMD_* options
+ except SLJIT_SIMD_STORE.
+ freg is the destination simd register of the operation
+ src is the value which is replicated
+
+ Note:
+ The src == SLJIT_IMM and srcw == 0 can be used to
+ clear a register even when SLJIT_SIMD_FLOAT is set.
+
+ Flags: - (does not modify flags) */
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw);
+
+/* The following options are used by sljit_emit_simd_lane_mov(). */
+
+/* Clear all bits of the simd register before loading the lane. */
+#define SLJIT_SIMD_LANE_ZERO 0x000002
+/* Sign extend the integer value stored from the lane. */
+#define SLJIT_SIMD_LANE_SIGNED 0x000004
+
+/* Moves data between a simd register lane and a register or
+ memory. If the srcdst argument is a register, it must be
+ a floating point register when SLJIT_SIMD_FLOAT is specified,
+ or a general purpose register otherwise.
+
+ If the operation is not supported, it returns with
+ SLJIT_ERR_UNSUPPORTED. If SLJIT_SIMD_TEST is passed,
+ it does not emit any instructions.
+
+ type must be a combination of SLJIT_SIMD_* options
+ Further options:
+ SLJIT_32 - when SLJIT_SIMD_FLOAT is not set
+ SLJIT_SIMD_LANE_SIGNED - when SLJIT_SIMD_STORE
+ is set and SLJIT_SIMD_FLOAT is not set
+ SLJIT_SIMD_LANE_ZERO - when SLJIT_SIMD_LOAD
+ is specified
+ freg is the source or destination simd register
+ of the operation
+ lane_index is the index of the lane
+ srcdst is the destination operand for loads, and
+ source operand for stores
+
+ Note:
+ The elem size must be lower than register size.
+
+ Flags: - (does not modify flags) */
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg, sljit_s32 lane_index,
+ sljit_s32 srcdst, sljit_sw srcdstw);
+
+/* Replicates a scalar value from a lane to all lanes
+ of a simd register.
+
+ If the operation is not supported, it returns with
+ SLJIT_ERR_UNSUPPORTED. If SLJIT_SIMD_TEST is passed,
+ it does not emit any instructions.
+
+ type must be a combination of SLJIT_SIMD_* options
+ except SLJIT_SIMD_STORE.
+ freg is the destination simd register of the operation
+ src is the simd register which lane is replicated
+ src_lane_index is the lane index of the src register
+
+ Flags: - (does not modify flags) */
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_s32 src_lane_index);
+
+/* The following options are used by sljit_emit_simd_load_extend(). */
+
+/* Sign extend the integer elements */
+#define SLJIT_SIMD_EXTEND_SIGNED 0x000002
+/* Extend data to 16 bit */
+#define SLJIT_SIMD_EXTEND_16 (1 << 24)
+/* Extend data to 32 bit */
+#define SLJIT_SIMD_EXTEND_32 (2 << 24)
+/* Extend data to 64 bit */
+#define SLJIT_SIMD_EXTEND_64 (3 << 24)
+
+/* Extend elements and stores them in a simd register.
+ The extension operation increases the size of the
+ elements (e.g. from 16 bit to 64 bit). For integer
+ values, the extension can be signed or unsigned.
+
+ If the operation is not supported, it returns with
+ SLJIT_ERR_UNSUPPORTED. If SLJIT_SIMD_TEST is passed,
+ it does not emit any instructions.
+
+ type must be a combination of SLJIT_SIMD_*, and
+ SLJIT_SIMD_EXTEND_* options except SLJIT_SIMD_STORE
+ freg is the destination simd register of the operation
+ src must be a memory operand or a simd register.
+ In the latter case, the source elements are stored
+ in the lower half of the register.
+
+ Flags: - (does not modify flags) */
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_extend(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw);
+
+/* Extract the highest bit (usually the sign bit) from
+ each elements of a vector.
+
+ If the operation is not supported, it returns with
+ SLJIT_ERR_UNSUPPORTED. If SLJIT_SIMD_TEST is passed,
+ it does not emit any instructions.
+
+ type must be a combination of SLJIT_SIMD_* and SLJIT_32
+ options except SLJIT_SIMD_LOAD
+ freg is the source simd register of the operation
+ dst is the destination operand
+
+ Flags: - (does not modify flags) */
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_sign(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 dst, sljit_sw dstw);
+
+/* The following options are used by sljit_emit_simd_op2(). */
+
+/* Binary 'and' operation */
+#define SLJIT_SIMD_OP2_AND 0x000001
+/* Binary 'or' operation */
+#define SLJIT_SIMD_OP2_OR 0x000002
+/* Binary 'xor' operation */
+#define SLJIT_SIMD_OP2_XOR 0x000003
+
+/* Perform simd operations using simd registers.
+
+ If the operation is not supported, it returns with
+ SLJIT_ERR_UNSUPPORTED. If SLJIT_SIMD_TEST is passed,
+ it does not emit any instructions.
+
+ type must be a combination of SLJIT_SIMD_* and SLJIT_SIMD_OP2_
+ options except SLJIT_SIMD_LOAD and SLJIT_SIMD_STORE
+ dst_freg is the destination register of the operation
+ src1_freg is the first source register of the operation
+ src1_freg is the second source register of the operation
+
+ Flags: - (does not modify flags) */
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_op2(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg, sljit_s32 src1_freg, sljit_s32 src2_freg);
+
+/* The sljit_emit_atomic_load and sljit_emit_atomic_store operation pair
+ can perform an atomic read-modify-write operation. First, an unsigned
+ value must be loaded from memory using sljit_emit_atomic_load. Then,
+ the updated value must be written back to the same memory location by
+ sljit_emit_atomic_store. A thread can only perform a single atomic
+ operation at a time.
+
+ Note: atomic operations are experimental, and not implemented
+ for all cpus.
+
+ The following conditions must be satisfied, or the operation
+ is undefined:
+ - the address provided in mem_reg must be divisible by the size of
+ the value (only naturally aligned updates are supported)
+ - no memory writes are allowed between the load and store operations
+ regardless of its target address (currently read operations are
+ allowed, but this might change in the future)
+ - the memory operation (op) and the base address (stored in mem_reg)
+ passed to the load/store operations must be the same (the mem_reg
+ can be a different register, only its value must be the same)
+ - an store must always follow a load for the same transaction.
+
+ op must be between SLJIT_MOV and SLJIT_MOV_P, excluding all
+ signed loads such as SLJIT_MOV32_S16
+ dst_reg is the register where the data will be loaded into
+ mem_reg is the base address of the memory load (it cannot be
+ SLJIT_SP or a virtual register on x86-32)
+
+ Flags: - (does not modify flags) */
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_load(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 mem_reg);
+
+/* The sljit_emit_atomic_load and sljit_emit_atomic_store operations
+ allows performing an atomic read-modify-write operation. See the
+ description of sljit_emit_atomic_load.
+
+ op must be between SLJIT_MOV and SLJIT_MOV_P, excluding all signed
+ loads such as SLJIT_MOV32_S16
+ src_reg is the register which value is stored into the memory
+ mem_reg is the base address of the memory store (it cannot be
+ SLJIT_SP or a virtual register on x86-32)
+ temp_reg is a not preserved scratch register, which must be
+ initialized with the value loaded into the dst_reg during the
+ corresponding sljit_emit_atomic_load operation, or the operation
+ is undefined
+
+ Flags: ATOMIC_STORED is set if the operation is successful,
+ otherwise the memory remains unchanged. */
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_store(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src_reg,
+ sljit_s32 mem_reg,
+ sljit_s32 temp_reg);
+
/* Copies the base address of SLJIT_SP + offset to dst. The offset can
represent the starting address of a value in the local data (stack).
The offset is not limited by the local data limits, it can be any value.
@@ -1642,17 +2185,15 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *c
Flags: - (does not modify flags) */
SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value);
-/* Store the value of a label (see: sljit_set_put_label)
+/* Store the value of a label (see: sljit_set_label / sljit_set_target)
Flags: - (does not modify flags) */
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw);
-
-/* Set the value stored by put_label to this label. */
-SLJIT_API_FUNC_ATTRIBUTE void sljit_set_put_label(struct sljit_put_label *put_label, struct sljit_label *label);
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_mov_addr(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw);
-/* After the code generation the address for label, jump and const instructions
- are computed. Since these structures are freed by sljit_free_compiler, the
- addresses must be preserved by the user program elsewere. */
-static SLJIT_INLINE sljit_uw sljit_get_label_addr(struct sljit_label *label) { return label->addr; }
+/* Provides the address of label, jump and const instructions after sljit_generate_code
+ is called. The returned value is unspecified before the sljit_generate_code call.
+ Since these structures are freed by sljit_free_compiler, the addresses must be
+ preserved by the user program elsewere. */
+static SLJIT_INLINE sljit_uw sljit_get_label_addr(struct sljit_label *label) { return label->u.addr; }
static SLJIT_INLINE sljit_uw sljit_get_jump_addr(struct sljit_jump *jump) { return jump->addr; }
static SLJIT_INLINE sljit_uw sljit_get_const_addr(struct sljit_const *const_) { return const_->addr; }
@@ -1665,30 +2206,39 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_consta
/* CPU specific functions */
/* --------------------------------------------------------------------- */
+/* Types for sljit_get_register_index */
+
+/* General purpose (integer) registers. */
+#define SLJIT_GP_REGISTER 0
+/* Floating point registers. */
+#define SLJIT_FLOAT_REGISTER 1
+
/* The following function is a helper function for sljit_emit_op_custom.
- It returns with the real machine register index ( >=0 ) of any SLJIT_R,
- SLJIT_S and SLJIT_SP registers.
+ It returns with the real machine register index ( >=0 ) of any registers.
- Note: it returns with -1 for virtual registers (only on x86-32). */
+ When type is SLJIT_GP_REGISTER:
+ reg must be an SLJIT_R(i), SLJIT_S(i), or SLJIT_SP register
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg);
+ When type is SLJIT_FLOAT_REGISTER:
+ reg must be an SLJIT_FR(i) or SLJIT_FS(i) register
-/* The following function is a helper function for sljit_emit_op_custom.
- It returns with the real machine register ( >= 0 ) index of any SLJIT_FR,
- and SLJIT_FS register.
+ When type is SLJIT_SIMD_REG_64 / 128 / 256 / 512 :
+ reg must be an SLJIT_FR(i) or SLJIT_FS(i) register
- Note: the index is always an even number on ARM-32, MIPS. */
+ Note: it returns with -1 for unknown registers, such as virtual
+ registers on x86-32 or unsupported simd registers. */
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg);
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 type, sljit_s32 reg);
/* Any instruction can be inserted into the instruction stream by
sljit_emit_op_custom. It has a similar purpose as inline assembly.
The size parameter must match to the instruction size of the target
architecture:
- x86: 0 < size <= 15. The instruction argument can be byte aligned.
+ x86: 0 < size <= 15, the instruction argument can be byte aligned.
Thumb2: if size == 2, the instruction argument must be 2 byte aligned.
if size == 4, the instruction argument must be 4 byte aligned.
+ s390x: size can be 2, 4, or 6, the instruction argument can be byte aligned.
Otherwise: size must be 4 and instruction argument must be 4 byte aligned. */
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
@@ -1717,6 +2267,98 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_current_flags(struct sljit_compiler *com
sljit_s32 current_flags);
/* --------------------------------------------------------------------- */
+/* Serialization functions */
+/* --------------------------------------------------------------------- */
+
+/* Label/jump/const enumeration functions. The items in each group
+ are enumerated in creation order. Serialization / deserialization
+ preserves this order for each group. For example the fifth label
+ after deserialization refers to the same machine code location as
+ the fifth label before the serialization. */
+static SLJIT_INLINE struct sljit_label *sljit_get_first_label(struct sljit_compiler *compiler) { return compiler->labels; }
+static SLJIT_INLINE struct sljit_jump *sljit_get_first_jump(struct sljit_compiler *compiler) { return compiler->jumps; }
+static SLJIT_INLINE struct sljit_const *sljit_get_first_const(struct sljit_compiler *compiler) { return compiler->consts; }
+
+static SLJIT_INLINE struct sljit_label *sljit_get_next_label(struct sljit_label *label) { return label->next; }
+static SLJIT_INLINE struct sljit_jump *sljit_get_next_jump(struct sljit_jump *jump) { return jump->next; }
+static SLJIT_INLINE struct sljit_const *sljit_get_next_const(struct sljit_const *const_) { return const_->next; }
+
+/* A number starting from 0 is assigned to each label, which
+represents its creation index. The first label created by the
+compiler has index 0, the second has index 1, the third has
+index 2, and so on. The returned value is unspecified after
+sljit_generate_code() is called. */
+static SLJIT_INLINE sljit_uw sljit_get_label_index(struct sljit_label *label) { return label->u.index; }
+
+/* The sljit_jump_has_label() and sljit_jump_has_target() functions
+returns non-zero value if a label or target is set for the jump
+respectively. Both may return with a zero value. The other two
+functions return the value assigned to the jump. */
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_jump_has_label(struct sljit_jump *jump);
+static SLJIT_INLINE struct sljit_label *sljit_jump_get_label(struct sljit_jump *jump) { return jump->u.label; }
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_jump_has_target(struct sljit_jump *jump);
+static SLJIT_INLINE sljit_uw sljit_jump_get_target(struct sljit_jump *jump) { return jump->u.target; }
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_jump_is_mov_addr(struct sljit_jump *jump);
+
+/* Option bits for sljit_serialize_compiler. */
+
+/* When debugging is enabled, the serialized buffer contains
+debugging information unless this option is specified. */
+#define SLJIT_SERIALIZE_IGNORE_DEBUG 0x1
+
+/* Serialize the internal structure of the compiler into a buffer.
+If the serialization is successful, the returned value is a newly
+allocated buffer which is allocated by the memory allocator assigned
+to the compiler. Otherwise the returned value is NULL. Unlike
+sljit_generate_code(), serialization does not modify the internal
+state of the compiler, so the code generation can be continued.
+
+ options must be the combination of SLJIT_SERIALIZE_* option bits
+ size is an output argument, which is set to the byte size of
+ the result buffer if the operation is successful
+
+Notes:
+ - This function is useful for ahead-of-time compilation (AOT).
+ - The returned buffer must be freed later by the caller.
+ The SLJIT_FREE() macro is suitable for this purpose:
+ SLJIT_FREE(returned_buffer, sljit_get_allocator_data(compiler))
+ - Memory allocated by sljit_alloc_memory() is not serialized.
+ - The type of the returned buffer is sljit_uw* to emphasize that
+ the buffer is word aligned. However, the 'size' output argument
+ contains the byte size, so this value is always divisible by
+ sizeof(sljit_uw).
+*/
+SLJIT_API_FUNC_ATTRIBUTE sljit_uw* sljit_serialize_compiler(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_uw *size);
+
+/* Construct a new compiler instance from a buffer produced by
+sljit_serialize_compiler(). If the operation is successful, the new
+compiler instance is returned. Otherwise the returned value is NULL.
+
+ buffer points to a word aligned memory data which was
+ created by sljit_serialize_compiler()
+ size is the byte size of the buffer
+ options must be 0
+ allocator_data specify an allocator specific data, see
+ sljit_create_compiler() for further details
+
+Notes:
+ - Labels assigned to jumps are restored with their
+ corresponding label in the label set created by
+ the deserializer. Target addresses assigned to
+ jumps are also restored. Uninitialized jumps
+ remain uninitialized.
+ - After the deserialization, sljit_generate_code() does
+ not need to be the next operation on the returned
+ compiler, the code generation can be continued.
+ Even sljit_serialize_compiler() can be called again.
+ - When debugging is enabled, a buffers without debug
+ information cannot be deserialized.
+*/
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_compiler *sljit_deserialize_compiler(sljit_uw* buffer, sljit_uw size,
+ sljit_s32 options, void *allocator_data);
+
+/* --------------------------------------------------------------------- */
/* Miscellaneous utility functions */
/* --------------------------------------------------------------------- */
@@ -1725,7 +2367,8 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_current_flags(struct sljit_compiler *com
to know the type of the code generator. */
SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void);
-/* Portable helper function to get an offset of a member. */
+/* Portable helper function to get an offset of a member.
+ Same as offsetof() macro defined in stddef.h */
#define SLJIT_OFFSETOF(base, member) ((sljit_sw)(&((base*)0x10)->member) - 0x10)
#if (defined SLJIT_UTIL_STACK && SLJIT_UTIL_STACK)
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeARM_32.c b/src/3rdparty/pcre2/src/sljit/sljitNativeARM_32.c
index 54b8ade063..a253c06f01 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeARM_32.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativeARM_32.c
@@ -34,13 +34,16 @@ SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
{
#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
return "ARMv7" SLJIT_CPUINFO ARM_ABI_INFO;
-#elif (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- return "ARMv5" SLJIT_CPUINFO ARM_ABI_INFO;
+#elif (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ return "ARMv6" SLJIT_CPUINFO ARM_ABI_INFO;
#else
#error "Internal error: Unknown ARM architecture"
#endif
}
+/* Length of an instruction word. */
+typedef sljit_u32 sljit_ins;
+
/* Last register + 1. */
#define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
#define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
@@ -55,27 +58,39 @@ SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
#define CONST_POOL_EMPTY 0xffffffff
#define ALIGN_INSTRUCTION(ptr) \
- (sljit_uw*)(((sljit_uw)(ptr) + (CONST_POOL_ALIGNMENT * sizeof(sljit_uw)) - 1) & ~((CONST_POOL_ALIGNMENT * sizeof(sljit_uw)) - 1))
+ (sljit_ins*)(((sljit_ins)(ptr) + (CONST_POOL_ALIGNMENT * sizeof(sljit_ins)) - 1) & ~((CONST_POOL_ALIGNMENT * sizeof(sljit_ins)) - 1))
#define MAX_DIFFERENCE(max_diff) \
- (((max_diff) / (sljit_s32)sizeof(sljit_uw)) - (CONST_POOL_ALIGNMENT - 1))
+ (((max_diff) / (sljit_s32)sizeof(sljit_ins)) - (CONST_POOL_ALIGNMENT - 1))
/* See sljit_emit_enter and sljit_emit_op0 if you want to change them. */
static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
0, 0, 1, 2, 3, 11, 10, 9, 8, 7, 6, 5, 4, 13, 12, 14, 15
};
-static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
- 0, 0, 1, 2, 3, 4, 5, 15, 14, 13, 12, 11, 10, 9, 8, 6, 7
+static const sljit_u8 freg_map[((SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2) << 1) + 1] = {
+ 0,
+ 0, 1, 2, 3, 4, 5, 15, 14, 13, 12, 11, 10, 9, 8,
+ 7, 6,
+ 0, 1, 2, 3, 4, 5, 15, 14, 13, 12, 11, 10, 9, 8,
+ 7, 6
+};
+
+static const sljit_u8 freg_ebit_map[((SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2) << 1) + 1] = {
+ 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1
};
-#define RM(rm) ((sljit_uw)reg_map[rm])
-#define RM8(rm) ((sljit_uw)reg_map[rm] << 8)
-#define RD(rd) ((sljit_uw)reg_map[rd] << 12)
-#define RN(rn) ((sljit_uw)reg_map[rn] << 16)
+#define RM(rm) ((sljit_ins)reg_map[rm])
+#define RM8(rm) ((sljit_ins)reg_map[rm] << 8)
+#define RD(rd) ((sljit_ins)reg_map[rd] << 12)
+#define RN(rn) ((sljit_ins)reg_map[rn] << 16)
-#define VM(rm) ((sljit_uw)freg_map[rm])
-#define VD(rd) ((sljit_uw)freg_map[rd] << 12)
-#define VN(rn) ((sljit_uw)freg_map[rn] << 16)
+#define VM(vm) (((sljit_ins)freg_map[vm]) | ((sljit_ins)freg_ebit_map[vm] << 5))
+#define VD(vd) (((sljit_ins)freg_map[vd] << 12) | ((sljit_ins)freg_ebit_map[vd] << 22))
+#define VN(vn) (((sljit_ins)freg_map[vn] << 16) | ((sljit_ins)freg_ebit_map[vn] << 7))
/* --------------------------------------------------------------------- */
/* Instrucion forms */
@@ -92,16 +107,20 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
#define AND 0xe0000000
#define B 0xea000000
#define BIC 0xe1c00000
+#define BKPT 0xe1200070
#define BL 0xeb000000
#define BLX 0xe12fff30
#define BX 0xe12fff10
#define CLZ 0xe16f0f10
#define CMN 0xe1600000
#define CMP 0xe1400000
-#define BKPT 0xe1200070
#define EOR 0xe0200000
#define LDR 0xe5100000
#define LDR_POST 0xe4100000
+#define LDREX 0xe1900f9f
+#define LDREXB 0xe1d00f9f
+#define LDREXH 0xe1f00f9f
+#define MLA 0xe0200090
#define MOV 0xe1a00000
#define MUL 0xe0000090
#define MVN 0xe1e00000
@@ -109,50 +128,89 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
#define ORR 0xe1800000
#define PUSH 0xe92d0000
#define POP 0xe8bd0000
-#define RBIT 0xe6ff0f30
+#define REV 0xe6bf0f30
+#define REV16 0xe6bf0fb0
#define RSB 0xe0600000
#define RSC 0xe0e00000
#define SBC 0xe0c00000
#define SMULL 0xe0c00090
#define STR 0xe5000000
+#define STREX 0xe1800f90
+#define STREXB 0xe1c00f90
+#define STREXH 0xe1e00f90
#define SUB 0xe0400000
+#define SXTB 0xe6af0070
+#define SXTH 0xe6bf0070
#define TST 0xe1000000
#define UMULL 0xe0800090
+#define UXTB 0xe6ef0070
+#define UXTH 0xe6ff0070
#define VABS_F32 0xeeb00ac0
#define VADD_F32 0xee300a00
+#define VAND 0xf2000110
#define VCMP_F32 0xeeb40a40
#define VCVT_F32_S32 0xeeb80ac0
+#define VCVT_F32_U32 0xeeb80a40
#define VCVT_F64_F32 0xeeb70ac0
#define VCVT_S32_F32 0xeebd0ac0
#define VDIV_F32 0xee800a00
+#define VDUP 0xee800b10
+#define VDUP_s 0xf3b00c00
+#define VEOR 0xf3000110
+#define VLD1 0xf4200000
+#define VLD1_r 0xf4a00c00
+#define VLD1_s 0xf4a00000
#define VLDR_F32 0xed100a00
#define VMOV_F32 0xeeb00a40
#define VMOV 0xee000a10
#define VMOV2 0xec400a10
+#define VMOV_i 0xf2800010
+#define VMOV_s 0xee000b10
+#define VMOVN 0xf3b20200
#define VMRS 0xeef1fa10
#define VMUL_F32 0xee200a00
#define VNEG_F32 0xeeb10a40
+#define VORR 0xf2200110
#define VPOP 0xecbd0b00
#define VPUSH 0xed2d0b00
+#define VSHLL 0xf2800a10
+#define VSHR 0xf2800010
+#define VSRA 0xf2800110
+#define VST1 0xf4000000
+#define VST1_s 0xf4800000
#define VSTR_F32 0xed000a00
#define VSUB_F32 0xee300a40
#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
/* Arm v7 specific instructions. */
-#define MOVW 0xe3000000
#define MOVT 0xe3400000
-#define SXTB 0xe6af0070
-#define SXTH 0xe6bf0070
-#define UXTB 0xe6ef0070
-#define UXTH 0xe6ff0070
+#define MOVW 0xe3000000
+#define RBIT 0xe6ff0f30
#endif
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+
+static sljit_s32 function_check_is_freg(struct sljit_compiler *compiler, sljit_s32 fr, sljit_s32 is_32)
+{
+ if (compiler->scratches == -1)
+ return 0;
+
+ if (is_32 && fr >= SLJIT_F64_SECOND(SLJIT_FR0))
+ fr -= SLJIT_F64_SECOND(0);
+
+ return (fr >= SLJIT_FR0 && fr < (SLJIT_FR0 + compiler->fscratches))
+ || (fr > (SLJIT_FS0 - compiler->fsaveds) && fr <= SLJIT_FS0)
+ || (fr >= SLJIT_TMP_FREGISTER_BASE && fr < (SLJIT_TMP_FREGISTER_BASE + SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS));
+}
+
+#endif /* SLJIT_ARGUMENT_CHECKS */
+
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
static sljit_s32 push_cpool(struct sljit_compiler *compiler)
{
/* Pushing the constant pool into the instruction stream. */
- sljit_uw* inst;
+ sljit_ins* inst;
sljit_uw* cpool_ptr;
sljit_uw* cpool_end;
sljit_s32 i;
@@ -162,13 +220,13 @@ static sljit_s32 push_cpool(struct sljit_compiler *compiler)
compiler->last_label->size += compiler->cpool_fill + (CONST_POOL_ALIGNMENT - 1) + 1;
SLJIT_ASSERT(compiler->cpool_fill > 0 && compiler->cpool_fill <= CPOOL_SIZE);
- inst = (sljit_uw*)ensure_buf(compiler, sizeof(sljit_uw));
+ inst = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
FAIL_IF(!inst);
compiler->size++;
*inst = 0xff000000 | compiler->cpool_fill;
for (i = 0; i < CONST_POOL_ALIGNMENT - 1; i++) {
- inst = (sljit_uw*)ensure_buf(compiler, sizeof(sljit_uw));
+ inst = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
FAIL_IF(!inst);
compiler->size++;
*inst = 0;
@@ -177,7 +235,7 @@ static sljit_s32 push_cpool(struct sljit_compiler *compiler)
cpool_ptr = compiler->cpool;
cpool_end = cpool_ptr + compiler->cpool_fill;
while (cpool_ptr < cpool_end) {
- inst = (sljit_uw*)ensure_buf(compiler, sizeof(sljit_uw));
+ inst = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
FAIL_IF(!inst);
compiler->size++;
*inst = *cpool_ptr++;
@@ -187,23 +245,23 @@ static sljit_s32 push_cpool(struct sljit_compiler *compiler)
return SLJIT_SUCCESS;
}
-static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_uw inst)
+static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins inst)
{
- sljit_uw* ptr;
+ sljit_ins* ptr;
if (SLJIT_UNLIKELY(compiler->cpool_diff != CONST_POOL_EMPTY && compiler->size - compiler->cpool_diff >= MAX_DIFFERENCE(4092)))
FAIL_IF(push_cpool(compiler));
- ptr = (sljit_uw*)ensure_buf(compiler, sizeof(sljit_uw));
+ ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
FAIL_IF(!ptr);
compiler->size++;
*ptr = inst;
return SLJIT_SUCCESS;
}
-static sljit_s32 push_inst_with_literal(struct sljit_compiler *compiler, sljit_uw inst, sljit_uw literal)
+static sljit_s32 push_inst_with_literal(struct sljit_compiler *compiler, sljit_ins inst, sljit_uw literal)
{
- sljit_uw* ptr;
+ sljit_ins* ptr;
sljit_uw cpool_index = CPOOL_SIZE;
sljit_uw* cpool_ptr;
sljit_uw* cpool_end;
@@ -239,7 +297,7 @@ static sljit_s32 push_inst_with_literal(struct sljit_compiler *compiler, sljit_u
}
SLJIT_ASSERT((inst & 0xfff) == 0);
- ptr = (sljit_uw*)ensure_buf(compiler, sizeof(sljit_uw));
+ ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
FAIL_IF(!ptr);
compiler->size++;
*ptr = inst | cpool_index;
@@ -251,14 +309,15 @@ static sljit_s32 push_inst_with_literal(struct sljit_compiler *compiler, sljit_u
return SLJIT_SUCCESS;
}
-static sljit_s32 push_inst_with_unique_literal(struct sljit_compiler *compiler, sljit_uw inst, sljit_uw literal)
+static sljit_s32 push_inst_with_unique_literal(struct sljit_compiler *compiler, sljit_ins inst, sljit_uw literal)
{
- sljit_uw* ptr;
+ sljit_ins* ptr;
+
if (SLJIT_UNLIKELY((compiler->cpool_diff != CONST_POOL_EMPTY && compiler->size - compiler->cpool_diff >= MAX_DIFFERENCE(4092)) || compiler->cpool_fill >= CPOOL_SIZE))
FAIL_IF(push_cpool(compiler));
SLJIT_ASSERT(compiler->cpool_fill < CPOOL_SIZE && (inst & 0xfff) == 0);
- ptr = (sljit_uw*)ensure_buf(compiler, sizeof(sljit_uw));
+ ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
FAIL_IF(!ptr);
compiler->size++;
*ptr = inst | compiler->cpool_fill;
@@ -305,7 +364,7 @@ static sljit_uw patch_pc_relative_loads(sljit_uw *last_pc_patch, sljit_uw *code_
while (last_pc_patch < code_ptr) {
/* Data transfer instruction with Rn == r15. */
- if ((*last_pc_patch & 0x0c0f0000) == 0x040f0000) {
+ if ((*last_pc_patch & 0x0e0f0000) == 0x040f0000) {
diff = (sljit_uw)(const_pool - last_pc_patch);
ind = (*last_pc_patch) & 0xfff;
@@ -395,11 +454,11 @@ static sljit_s32 resolve_const_pool_index(struct sljit_compiler *compiler, struc
#else
-static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_uw inst)
+static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins inst)
{
- sljit_uw* ptr;
+ sljit_ins* ptr;
- ptr = (sljit_uw*)ensure_buf(compiler, sizeof(sljit_uw));
+ ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
FAIL_IF(!ptr);
compiler->size++;
*ptr = inst;
@@ -421,14 +480,15 @@ static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_uw
if (jump->flags & SLJIT_REWRITABLE_JUMP)
return 0;
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
if (jump->flags & IS_BL)
code_ptr--;
+#endif /* SLJIT_CONFIG_ARM_V6 */
if (jump->flags & JUMP_ADDR)
diff = ((sljit_sw)jump->u.target - (sljit_sw)(code_ptr + 2) - executable_offset);
else {
- SLJIT_ASSERT(jump->flags & JUMP_LABEL);
+ SLJIT_ASSERT(jump->u.label != NULL);
diff = ((sljit_sw)(code + jump->u.label->size) - (sljit_sw)(code_ptr + 2));
}
@@ -436,6 +496,7 @@ static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_uw
if (diff & 0x3)
return 0;
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
if (jump->flags & IS_BL) {
if (diff <= 0x01ffffff && diff >= -0x02000000) {
*code_ptr = (BL - CONDITIONAL) | (*(code_ptr + 1) & COND_MASK);
@@ -449,34 +510,22 @@ static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_uw
jump->flags |= PATCH_B;
}
}
-#else
- if (jump->flags & JUMP_ADDR)
- diff = ((sljit_sw)jump->u.target - (sljit_sw)code_ptr - executable_offset);
- else {
- SLJIT_ASSERT(jump->flags & JUMP_LABEL);
- diff = ((sljit_sw)(code + jump->u.label->size) - (sljit_sw)code_ptr);
- }
-
- /* Branch to Thumb code has not been optimized yet. */
- if (diff & 0x3)
- return 0;
-
+#else /* !SLJIT_CONFIG_ARM_V6 */
if (diff <= 0x01ffffff && diff >= -0x02000000) {
- code_ptr -= 2;
- *code_ptr = ((jump->flags & IS_BL) ? (BL - CONDITIONAL) : (B - CONDITIONAL)) | (code_ptr[2] & COND_MASK);
+ *code_ptr = ((jump->flags & IS_BL) ? (BL - CONDITIONAL) : (B - CONDITIONAL)) | (*code_ptr & COND_MASK);
jump->flags |= PATCH_B;
return 1;
}
-#endif
+#endif /* SLJIT_CONFIG_ARM_V6 */
return 0;
}
-static SLJIT_INLINE void inline_set_jump_addr(sljit_uw jump_ptr, sljit_sw executable_offset, sljit_uw new_addr, sljit_s32 flush_cache)
+static void set_jump_addr(sljit_uw jump_ptr, sljit_sw executable_offset, sljit_uw new_addr, sljit_s32 flush_cache)
{
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- sljit_uw *ptr = (sljit_uw *)jump_ptr;
- sljit_uw *inst = (sljit_uw *)ptr[0];
- sljit_uw mov_pc = ptr[1];
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ sljit_ins *ptr = (sljit_ins*)jump_ptr;
+ sljit_ins *inst = (sljit_ins*)ptr[0];
+ sljit_ins mov_pc = ptr[1];
sljit_s32 bl = (mov_pc & 0x0000f000) != RD(TMP_PC);
sljit_sw diff = (sljit_sw)(((sljit_sw)new_addr - (sljit_sw)(inst + 2) - executable_offset) >> 2);
@@ -491,7 +540,7 @@ static SLJIT_INLINE void inline_set_jump_addr(sljit_uw jump_ptr, sljit_sw execut
inst[0] = (mov_pc & COND_MASK) | (B - CONDITIONAL) | (diff & 0xffffff);
if (flush_cache) {
SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 1);
- inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
+ inst = (sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 1);
}
} else {
@@ -502,7 +551,7 @@ static SLJIT_INLINE void inline_set_jump_addr(sljit_uw jump_ptr, sljit_sw execut
inst[1] = NOP;
if (flush_cache) {
SLJIT_UPDATE_WX_FLAGS(inst, inst + 2, 1);
- inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
+ inst = (sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 2);
}
}
@@ -521,14 +570,14 @@ static SLJIT_INLINE void inline_set_jump_addr(sljit_uw jump_ptr, sljit_sw execut
if (!bl) {
if (flush_cache) {
SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 1);
- inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
+ inst = (sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 1);
}
} else {
inst[1] = BLX | RM(TMP_REG1);
if (flush_cache) {
SLJIT_UPDATE_WX_FLAGS(inst, inst + 2, 1);
- inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
+ inst = (sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 2);
}
}
@@ -544,8 +593,8 @@ static SLJIT_INLINE void inline_set_jump_addr(sljit_uw jump_ptr, sljit_sw execut
SLJIT_UPDATE_WX_FLAGS(ptr, ptr + 1, 1);
}
}
-#else
- sljit_uw *inst = (sljit_uw*)jump_ptr;
+#else /* !SLJIT_CONFIG_ARM_V6 */
+ sljit_ins *inst = (sljit_ins*)jump_ptr;
SLJIT_UNUSED_ARG(executable_offset);
@@ -560,21 +609,21 @@ static SLJIT_INLINE void inline_set_jump_addr(sljit_uw jump_ptr, sljit_sw execut
if (flush_cache) {
SLJIT_UPDATE_WX_FLAGS(inst, inst + 2, 1);
- inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
+ inst = (sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 2);
}
-#endif
+#endif /* SLJIT_CONFIG_ARM_V6 */
}
static sljit_uw get_imm(sljit_uw imm);
static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 reg, sljit_uw imm);
static sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw, sljit_s32 tmp_reg);
-static SLJIT_INLINE void inline_set_const(sljit_uw addr, sljit_sw executable_offset, sljit_uw new_constant, sljit_s32 flush_cache)
+static void set_const_value(sljit_uw addr, sljit_sw executable_offset, sljit_uw new_constant, sljit_s32 flush_cache)
{
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- sljit_uw *ptr = (sljit_uw*)addr;
- sljit_uw *inst = (sljit_uw*)ptr[0];
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ sljit_ins *ptr = (sljit_ins*)addr;
+ sljit_ins *inst = (sljit_ins*)ptr[0];
sljit_uw ldr_literal = ptr[1];
sljit_uw src2;
@@ -590,7 +639,7 @@ static SLJIT_INLINE void inline_set_const(sljit_uw addr, sljit_sw executable_off
if (flush_cache) {
SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 1);
- inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
+ inst = (sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 1);
}
return;
@@ -606,7 +655,7 @@ static SLJIT_INLINE void inline_set_const(sljit_uw addr, sljit_sw executable_off
if (flush_cache) {
SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 1);
- inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
+ inst = (sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 1);
}
return;
@@ -626,7 +675,7 @@ static SLJIT_INLINE void inline_set_const(sljit_uw addr, sljit_sw executable_off
if (flush_cache) {
SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 1);
- inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
+ inst = (sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 1);
}
}
@@ -640,8 +689,8 @@ static SLJIT_INLINE void inline_set_const(sljit_uw addr, sljit_sw executable_off
if (flush_cache) {
SLJIT_UPDATE_WX_FLAGS(ptr, ptr + 1, 1);
}
-#else
- sljit_uw *inst = (sljit_uw*)addr;
+#else /* !SLJIT_CONFIG_ARM_V6 */
+ sljit_ins *inst = (sljit_ins*)addr;
SLJIT_UNUSED_ARG(executable_offset);
@@ -656,92 +705,185 @@ static SLJIT_INLINE void inline_set_const(sljit_uw addr, sljit_sw executable_off
if (flush_cache) {
SLJIT_UPDATE_WX_FLAGS(inst, inst + 2, 1);
- inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
+ inst = (sljit_ins*)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 2);
}
-#endif
+#endif /* SLJIT_CONFIG_ARM_V6 */
}
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
+static SLJIT_INLINE sljit_sw mov_addr_get_length(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
+{
+ sljit_uw addr;
+ sljit_sw diff;
+ SLJIT_UNUSED_ARG(executable_offset);
+
+ if (jump->flags & JUMP_ADDR)
+ addr = jump->u.target;
+ else
+ addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code + jump->u.label->size, executable_offset);
+
+ /* The pc+8 offset is represented by the 2 * SSIZE_OF(ins) below. */
+ diff = (sljit_sw)addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+
+ if ((diff & 0x3) == 0 && diff <= (0x3fc + 2 * SSIZE_OF(ins)) && diff >= (-0x3fc + 2 * SSIZE_OF(ins))) {
+ jump->flags |= PATCH_B;
+ return 0;
+ }
+
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ return 0;
+#else /* !SLJIT_CONFIG_ARM_V6 */
+ return 1;
+#endif /* SLJIT_CONFIG_ARM_V6 */
+}
+
+#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
+
+static void reduce_code_size(struct sljit_compiler *compiler)
+{
+ struct sljit_label *label;
+ struct sljit_jump *jump;
+ struct sljit_const *const_;
+ SLJIT_NEXT_DEFINE_TYPES;
+ sljit_uw total_size;
+ sljit_uw size_reduce = 0;
+ sljit_sw diff;
+
+ label = compiler->labels;
+ jump = compiler->jumps;
+ const_ = compiler->consts;
+ SLJIT_NEXT_INIT_TYPES();
+
+ while (1) {
+ SLJIT_GET_NEXT_MIN();
+
+ if (next_min_addr == SLJIT_MAX_ADDRESS)
+ break;
+
+ if (next_min_addr == next_label_size) {
+ label->size -= size_reduce;
+
+ label = label->next;
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
+ }
+
+ if (next_min_addr == next_const_addr) {
+ const_->addr -= size_reduce;
+ const_ = const_->next;
+ next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
+ continue;
+ }
+
+ if (next_min_addr != next_jump_addr)
+ continue;
+
+ jump->addr -= size_reduce;
+ if (!(jump->flags & JUMP_MOV_ADDR)) {
+ total_size = JUMP_MAX_SIZE - 1;
+
+ if (!(jump->flags & (SLJIT_REWRITABLE_JUMP | JUMP_ADDR))) {
+ /* Unit size: instruction. */
+ diff = (sljit_sw)jump->u.label->size - (sljit_sw)jump->addr - 2;
+
+ if (diff <= (0x01ffffff / SSIZE_OF(ins)) && diff >= (-0x02000000 / SSIZE_OF(ins)))
+ total_size = 1 - 1;
+ }
+
+ size_reduce += JUMP_MAX_SIZE - 1 - total_size;
+ } else {
+ /* Real size minus 1. Unit size: instruction. */
+ total_size = 1;
+
+ if (!(jump->flags & JUMP_ADDR)) {
+ diff = (sljit_sw)jump->u.label->size - (sljit_sw)jump->addr;
+ if (diff <= 0xff + 2 && diff >= -0xff + 2)
+ total_size = 0;
+ }
+
+ size_reduce += 1 - total_size;
+ }
+
+ jump->flags |= total_size << JUMP_SIZE_SHIFT;
+ jump = jump->next;
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+ }
+
+ compiler->size -= size_reduce;
+}
+
+#endif /* SLJIT_CONFIG_ARM_V7 */
+
+SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler, sljit_s32 options, void *exec_allocator_data)
{
struct sljit_memory_fragment *buf;
- sljit_uw *code;
- sljit_uw *code_ptr;
- sljit_uw *buf_ptr;
- sljit_uw *buf_end;
- sljit_uw size;
+ sljit_ins *code;
+ sljit_ins *code_ptr;
+ sljit_ins *buf_ptr;
+ sljit_ins *buf_end;
sljit_uw word_count;
- sljit_uw next_addr;
+ SLJIT_NEXT_DEFINE_TYPES;
sljit_sw executable_offset;
sljit_uw addr;
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+ sljit_sw diff;
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
sljit_uw cpool_size;
sljit_uw cpool_skip_alignment;
sljit_uw cpool_current_index;
- sljit_uw *cpool_start_address;
- sljit_uw *last_pc_patch;
+ sljit_ins *cpool_start_address;
+ sljit_ins *last_pc_patch;
struct future_patch *first_patch;
#endif
struct sljit_label *label;
struct sljit_jump *jump;
struct sljit_const *const_;
- struct sljit_put_label *put_label;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_generate_code(compiler));
- reverse_buf(compiler);
/* Second code generation pass. */
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- size = compiler->size + (compiler->patches << 1);
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ compiler->size += (compiler->patches << 1);
if (compiler->cpool_fill > 0)
- size += compiler->cpool_fill + CONST_POOL_ALIGNMENT - 1;
-#else
- size = compiler->size;
-#endif
- code = (sljit_uw*)SLJIT_MALLOC_EXEC(size * sizeof(sljit_uw), compiler->exec_allocator_data);
+ compiler->size += compiler->cpool_fill + CONST_POOL_ALIGNMENT - 1;
+#else /* !SLJIT_CONFIG_ARM_V6 */
+ reduce_code_size(compiler);
+#endif /* SLJIT_CONFIG_ARM_V6 */
+ code = (sljit_ins*)allocate_executable_memory(compiler->size * sizeof(sljit_ins), options, exec_allocator_data, &executable_offset);
PTR_FAIL_WITH_EXEC_IF(code);
+
+ reverse_buf(compiler);
buf = compiler->buf;
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
cpool_size = 0;
cpool_skip_alignment = 0;
cpool_current_index = 0;
cpool_start_address = NULL;
first_patch = NULL;
last_pc_patch = code;
-#endif
+#endif /* SLJIT_CONFIG_ARM_V6 */
code_ptr = code;
word_count = 0;
- next_addr = 1;
- executable_offset = SLJIT_EXEC_OFFSET(code);
-
label = compiler->labels;
jump = compiler->jumps;
const_ = compiler->consts;
- put_label = compiler->put_labels;
-
- if (label && label->size == 0) {
- label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
- label = label->next;
- }
+ SLJIT_NEXT_INIT_TYPES();
+ SLJIT_GET_NEXT_MIN();
do {
- buf_ptr = (sljit_uw*)buf->memory;
+ buf_ptr = (sljit_ins*)buf->memory;
buf_end = buf_ptr + (buf->used_size >> 2);
do {
- word_count++;
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
if (cpool_size > 0) {
if (cpool_skip_alignment > 0) {
buf_ptr++;
cpool_skip_alignment--;
- }
- else {
+ } else {
if (SLJIT_UNLIKELY(resolve_const_pool_index(compiler, &first_patch, cpool_current_index, cpool_start_address, buf_ptr))) {
- SLJIT_FREE_EXEC(code, compiler->exec_allocator_data);
+ SLJIT_FREE_EXEC(code, exec_allocator_data);
compiler->error = SLJIT_ERR_ALLOC_FAILED;
return NULL;
}
@@ -749,64 +891,63 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
if (++cpool_current_index >= cpool_size) {
SLJIT_ASSERT(!first_patch);
cpool_size = 0;
- if (label && label->size == word_count) {
- /* Points after the current instruction. */
- label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
- label->size = (sljit_uw)(code_ptr - code);
- label = label->next;
-
- next_addr = compute_next_addr(label, jump, const_, put_label);
- }
}
}
- }
- else if ((*buf_ptr & 0xff000000) != PUSH_POOL) {
-#endif
+ } else if ((*buf_ptr & 0xff000000) != PUSH_POOL) {
+#endif /* SLJIT_CONFIG_ARM_V6 */
*code_ptr = *buf_ptr++;
- if (next_addr == word_count) {
+ if (next_min_addr == word_count) {
SLJIT_ASSERT(!label || label->size >= word_count);
SLJIT_ASSERT(!jump || jump->addr >= word_count);
SLJIT_ASSERT(!const_ || const_->addr >= word_count);
- SLJIT_ASSERT(!put_label || put_label->addr >= word_count);
-
- /* These structures are ordered by their address. */
- if (jump && jump->addr == word_count) {
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- if (detect_jump_type(jump, code_ptr, code, executable_offset))
- code_ptr--;
- jump->addr = (sljit_uw)code_ptr;
-#else
- jump->addr = (sljit_uw)(code_ptr - 2);
- if (detect_jump_type(jump, code_ptr, code, executable_offset))
- code_ptr -= 2;
-#endif
- jump = jump->next;
- }
- if (label && label->size == word_count) {
- /* code_ptr can be affected above. */
- label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr + 1, executable_offset);
- label->size = (sljit_uw)((code_ptr + 1) - code);
+
+ if (next_min_addr == next_label_size) {
+ label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ label->size = (sljit_uw)(code_ptr - code);
label = label->next;
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
}
- if (const_ && const_->addr == word_count) {
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+
+ /* These structures are ordered by their address. */
+ if (next_min_addr == next_jump_addr) {
+ if (!(jump->flags & JUMP_MOV_ADDR)) {
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ if (detect_jump_type(jump, code_ptr, code, executable_offset))
+ code_ptr--;
+ jump->addr = (sljit_uw)code_ptr;
+#else /* !SLJIT_CONFIG_ARM_V6 */
+ word_count += jump->flags >> JUMP_SIZE_SHIFT;
+ jump->addr = (sljit_uw)code_ptr;
+ if (!detect_jump_type(jump, code_ptr, code, executable_offset)) {
+ code_ptr[2] = code_ptr[0];
+ addr = ((code_ptr[0] & 0xf) << 12);
+ code_ptr[0] = MOVW | addr;
+ code_ptr[1] = MOVT | addr;
+ code_ptr += 2;
+ }
+ SLJIT_ASSERT((sljit_uw)code_ptr - jump->addr <= (jump->flags >> JUMP_SIZE_SHIFT) * sizeof(sljit_ins));
+#endif /* SLJIT_CONFIG_ARM_V6 */
+ } else {
+#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
+ word_count += jump->flags >> JUMP_SIZE_SHIFT;
+#endif /* SLJIT_CONFIG_ARM_V7 */
+ addr = (sljit_uw)code_ptr;
+ code_ptr += mov_addr_get_length(jump, code_ptr, code, executable_offset);
+ jump->addr = addr;
+ }
+ jump = jump->next;
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+ } else if (next_min_addr == next_const_addr) {
const_->addr = (sljit_uw)code_ptr;
-#else
- const_->addr = (sljit_uw)(code_ptr - 1);
-#endif
const_ = const_->next;
+ next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
}
- if (put_label && put_label->addr == word_count) {
- SLJIT_ASSERT(put_label->label);
- put_label->addr = (sljit_uw)code_ptr;
- put_label = put_label->next;
- }
- next_addr = compute_next_addr(label, jump, const_, put_label);
+
+ SLJIT_GET_NEXT_MIN();
}
code_ptr++;
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- }
- else {
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ } else {
/* Fortunately, no need to shift. */
cpool_size = *buf_ptr++ & ~PUSH_POOL;
SLJIT_ASSERT(cpool_size > 0);
@@ -814,37 +955,43 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
cpool_current_index = patch_pc_relative_loads(last_pc_patch, code_ptr, cpool_start_address, cpool_size);
if (cpool_current_index > 0) {
/* Unconditional branch. */
- *code_ptr = B | (((sljit_uw)(cpool_start_address - code_ptr) + cpool_current_index - 2) & ~PUSH_POOL);
- code_ptr = (sljit_uw*)(cpool_start_address + cpool_current_index);
+ *code_ptr = B | (((sljit_ins)(cpool_start_address - code_ptr) + cpool_current_index - 2) & ~PUSH_POOL);
+ code_ptr = (sljit_ins*)(cpool_start_address + cpool_current_index);
}
cpool_skip_alignment = CONST_POOL_ALIGNMENT - 1;
cpool_current_index = 0;
last_pc_patch = code_ptr;
}
-#endif
+#endif /* SLJIT_CONFIG_ARM_V6 */
+ word_count++;
} while (buf_ptr < buf_end);
buf = buf->next;
} while (buf);
+ if (label && label->size == word_count) {
+ label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ label->size = (sljit_uw)(code_ptr - code);
+ label = label->next;
+ }
+
SLJIT_ASSERT(!label);
SLJIT_ASSERT(!jump);
SLJIT_ASSERT(!const_);
- SLJIT_ASSERT(!put_label);
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
SLJIT_ASSERT(cpool_size == 0);
if (compiler->cpool_fill > 0) {
cpool_start_address = ALIGN_INSTRUCTION(code_ptr);
cpool_current_index = patch_pc_relative_loads(last_pc_patch, code_ptr, cpool_start_address, compiler->cpool_fill);
if (cpool_current_index > 0)
- code_ptr = (sljit_uw*)(cpool_start_address + cpool_current_index);
+ code_ptr = (sljit_ins*)(cpool_start_address + cpool_current_index);
buf_ptr = compiler->cpool;
buf_end = buf_ptr + compiler->cpool_fill;
cpool_current_index = 0;
while (buf_ptr < buf_end) {
if (SLJIT_UNLIKELY(resolve_const_pool_index(compiler, &first_patch, cpool_current_index, cpool_start_address, buf_ptr))) {
- SLJIT_FREE_EXEC(code, compiler->exec_allocator_data);
+ SLJIT_FREE_EXEC(code, exec_allocator_data);
compiler->error = SLJIT_ERR_ALLOC_FAILED;
return NULL;
}
@@ -857,91 +1004,95 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
jump = compiler->jumps;
while (jump) {
- buf_ptr = (sljit_uw *)jump->addr;
+ addr = (jump->flags & JUMP_ADDR) ? jump->u.target : jump->u.label->u.addr;
+ buf_ptr = (sljit_ins*)jump->addr;
+
+ if (jump->flags & JUMP_MOV_ADDR) {
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ SLJIT_ASSERT((buf_ptr[0] & (sljit_ins)0xffff0000) == 0xe59f0000);
+#else /* !SLJIT_CONFIG_ARM_V6 */
+ SLJIT_ASSERT((buf_ptr[0] & ~(sljit_ins)0xf000) == 0);
+#endif /* SLJIT_CONFIG_ARM_V6 */
+
+ if (jump->flags & PATCH_B) {
+ SLJIT_ASSERT((((sljit_sw)addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(buf_ptr + 2, executable_offset)) & 0x3) == 0);
+ diff = ((sljit_sw)addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(buf_ptr + 2, executable_offset)) >> 2;
+
+ SLJIT_ASSERT(diff <= 0xff && diff >= -0xff);
+
+ addr = ADD;
+ if (diff < 0) {
+ diff = -diff;
+ addr = SUB;
+ }
- if (jump->flags & PATCH_B) {
- addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr + 2, executable_offset);
- if (!(jump->flags & JUMP_ADDR)) {
- SLJIT_ASSERT(jump->flags & JUMP_LABEL);
- SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - addr) <= 0x01ffffff && (sljit_sw)(jump->u.label->addr - addr) >= -0x02000000);
- *buf_ptr |= ((jump->u.label->addr - addr) >> 2) & 0x00ffffff;
- }
- else {
- SLJIT_ASSERT((sljit_sw)(jump->u.target - addr) <= 0x01ffffff && (sljit_sw)(jump->u.target - addr) >= -0x02000000);
- *buf_ptr |= ((jump->u.target - addr) >> 2) & 0x00ffffff;
+ buf_ptr[0] = addr | (buf_ptr[0] & 0xf000) | RN(TMP_PC) | (1 << 25) | (0xf << 8) | (sljit_ins)(diff & 0xff);
+ } else {
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ buf_ptr[((buf_ptr[0] & 0xfff) >> 2) + 2] = addr;
+#else /* !SLJIT_CONFIG_ARM_V6 */
+ buf_ptr[1] = MOVT | buf_ptr[0] | ((addr >> 12) & 0xf0000) | ((addr >> 16) & 0xfff);
+ buf_ptr[0] = MOVW | buf_ptr[0] | ((addr << 4) & 0xf0000) | (addr & 0xfff);
+#endif /* SLJIT_CONFIG_ARM_V6 */
}
- }
- else if (jump->flags & SLJIT_REWRITABLE_JUMP) {
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- jump->addr = (sljit_uw)code_ptr;
- code_ptr[0] = (sljit_uw)buf_ptr;
- code_ptr[1] = *buf_ptr;
- inline_set_jump_addr((sljit_uw)code_ptr, executable_offset, (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target, 0);
- code_ptr += 2;
-#else
- inline_set_jump_addr((sljit_uw)buf_ptr, executable_offset, (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target, 0);
-#endif
- }
- else {
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+ } else if (jump->flags & PATCH_B) {
+ diff = (sljit_sw)addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(buf_ptr + 2, executable_offset);
+ SLJIT_ASSERT(diff <= 0x01ffffff && diff >= -0x02000000);
+ *buf_ptr |= (diff >> 2) & 0x00ffffff;
+ } else {
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
if (jump->flags & IS_BL)
buf_ptr--;
- if (*buf_ptr & (1 << 23))
- buf_ptr += ((*buf_ptr & 0xfff) >> 2) + 2;
- else
- buf_ptr += 1;
- *buf_ptr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
-#else
- inline_set_jump_addr((sljit_uw)buf_ptr, executable_offset, (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target, 0);
-#endif
+
+ if (jump->flags & SLJIT_REWRITABLE_JUMP) {
+ jump->addr = (sljit_uw)code_ptr;
+ code_ptr[0] = (sljit_ins)buf_ptr;
+ code_ptr[1] = *buf_ptr;
+ set_jump_addr((sljit_uw)code_ptr, executable_offset, addr, 0);
+ code_ptr += 2;
+ } else {
+ if (*buf_ptr & (1 << 23))
+ buf_ptr += ((*buf_ptr & 0xfff) >> 2) + 2;
+ else
+ buf_ptr += 1;
+ *buf_ptr = addr;
+ }
+#else /* !SLJIT_CONFIG_ARM_V6 */
+ set_jump_addr((sljit_uw)buf_ptr, executable_offset, addr, 0);
+#endif /* SLJIT_CONFIG_ARM_V6 */
}
+
jump = jump->next;
}
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
const_ = compiler->consts;
while (const_) {
- buf_ptr = (sljit_uw*)const_->addr;
+ buf_ptr = (sljit_ins*)const_->addr;
const_->addr = (sljit_uw)code_ptr;
- code_ptr[0] = (sljit_uw)buf_ptr;
+ code_ptr[0] = (sljit_ins)buf_ptr;
code_ptr[1] = *buf_ptr;
if (*buf_ptr & (1 << 23))
buf_ptr += ((*buf_ptr & 0xfff) >> 2) + 2;
else
buf_ptr += 1;
/* Set the value again (can be a simple constant). */
- inline_set_const((sljit_uw)code_ptr, executable_offset, *buf_ptr, 0);
+ set_const_value((sljit_uw)code_ptr, executable_offset, *buf_ptr, 0);
code_ptr += 2;
const_ = const_->next;
}
-#endif
+#endif /* SLJIT_CONFIG_ARM_V6 */
- put_label = compiler->put_labels;
- while (put_label) {
- addr = put_label->label->addr;
- buf_ptr = (sljit_uw*)put_label->addr;
-
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- SLJIT_ASSERT((buf_ptr[0] & 0xffff0000) == 0xe59f0000);
- buf_ptr[((buf_ptr[0] & 0xfff) >> 2) + 2] = addr;
-#else
- SLJIT_ASSERT((buf_ptr[-1] & 0xfff00000) == MOVW && (buf_ptr[0] & 0xfff00000) == MOVT);
- buf_ptr[-1] |= ((addr << 4) & 0xf0000) | (addr & 0xfff);
- buf_ptr[0] |= ((addr >> 12) & 0xf0000) | ((addr >> 16) & 0xfff);
-#endif
- put_label = put_label->next;
- }
-
- SLJIT_ASSERT(code_ptr - code <= (sljit_s32)size);
+ SLJIT_ASSERT(code_ptr - code <= (sljit_s32)compiler->size);
compiler->error = SLJIT_ERR_COMPILED;
compiler->executable_offset = executable_offset;
compiler->executable_size = (sljit_uw)(code_ptr - code) * sizeof(sljit_uw);
- code = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
- code_ptr = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ code = (sljit_ins*)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
+ code_ptr = (sljit_ins*)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
SLJIT_CACHE_FLUSH(code, code_ptr);
SLJIT_UPDATE_WX_FLAGS(code, code_ptr, 1);
@@ -952,26 +1103,42 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
{
switch (feature_type) {
case SLJIT_HAS_FPU:
+ case SLJIT_HAS_F64_AS_F32_PAIR:
#ifdef SLJIT_IS_FPU_AVAILABLE
- return SLJIT_IS_FPU_AVAILABLE;
+ return (SLJIT_IS_FPU_AVAILABLE) != 0;
#else
/* Available by default. */
return 1;
-#endif
+#endif /* SLJIT_IS_FPU_AVAILABLE */
+ case SLJIT_HAS_SIMD:
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ return 0;
+#else
+#ifdef SLJIT_IS_FPU_AVAILABLE
+ return (SLJIT_IS_FPU_AVAILABLE) != 0;
+#else
+ /* Available by default. */
+ return 1;
+#endif /* SLJIT_IS_FPU_AVAILABLE */
+#endif /* SLJIT_CONFIG_ARM_V6 */
+ case SLJIT_SIMD_REGS_ARE_PAIRS:
case SLJIT_HAS_CLZ:
case SLJIT_HAS_ROT:
case SLJIT_HAS_CMOV:
-#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
- case SLJIT_HAS_CTZ:
+ case SLJIT_HAS_REV:
case SLJIT_HAS_PREFETCH:
-#endif
+ case SLJIT_HAS_COPY_F32:
+ case SLJIT_HAS_COPY_F64:
+ case SLJIT_HAS_ATOMIC:
return 1;
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
case SLJIT_HAS_CTZ:
+#if defined(SLJIT_CONFIG_ARM_V6) && SLJIT_CONFIG_ARM_V6
return 2;
-#endif
+#else
+ return 1;
+#endif /* SLJIT_CONFIG_ARM_V6 */
default:
return 0;
@@ -991,17 +1158,18 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
#define LOAD_DATA 0x08
/* Flag bits for emit_op. */
-#define ALLOW_IMM 0x10
-#define ALLOW_INV_IMM 0x20
-#define ALLOW_ANY_IMM (ALLOW_IMM | ALLOW_INV_IMM)
-#define ALLOW_NEG_IMM 0x40
+#define ALLOW_IMM 0x10
+#define ALLOW_INV_IMM 0x20
+#define ALLOW_ANY_IMM (ALLOW_IMM | ALLOW_INV_IMM)
+#define ALLOW_NEG_IMM 0x40
+#define ALLOW_DOUBLE_IMM 0x80
/* s/l - store/load (1 bit)
u/s - signed/unsigned (1 bit)
w/b/h/N - word/byte/half/NOT allowed (2 bit)
Storing signed and unsigned values are the same operations. */
-static const sljit_uw data_transfer_insts[16] = {
+static const sljit_ins data_transfer_insts[16] = {
/* s u w */ 0xe5000000 /* str */,
/* s u b */ 0xe5400000 /* strb */,
/* s u h */ 0xe10000b0 /* strh */,
@@ -1022,7 +1190,7 @@ static const sljit_uw data_transfer_insts[16] = {
};
#define EMIT_DATA_TRANSFER(type, add, target_reg, base_reg, arg) \
- (data_transfer_insts[(type) & 0xf] | ((add) << 23) | RD(target_reg) | RN(base_reg) | (sljit_uw)(arg))
+ (data_transfer_insts[(type) & 0xf] | ((add) << 23) | RD(target_reg) | RN(base_reg) | (sljit_ins)(arg))
/* Normal ldr/str instruction.
Type2: ldrsb, ldrh, ldrsh */
@@ -1032,7 +1200,7 @@ static const sljit_uw data_transfer_insts[16] = {
(((imm) & 0xf) | (((imm) & 0xf0) << 4) | (1 << 22))
#define EMIT_FPU_OPERATION(opcode, mode, dst, src1, src2) \
- ((sljit_uw)(opcode) | (sljit_uw)(mode) | VD(dst) | VM(src1) | VN(src2))
+ ((sljit_ins)(opcode) | (sljit_ins)(mode) | VD(dst) | VM(src1) | VN(src2))
/* Flags for emit_op: */
/* Arguments are swapped. */
@@ -1040,7 +1208,7 @@ static const sljit_uw data_transfer_insts[16] = {
/* Inverted immediate. */
#define INV_IMM 0x02
/* Source and destination is register. */
-#define MOVE_REG_CONV 0x04
+#define REGISTER_OP 0x04
/* Unused return value. */
#define UNUSED_RETURN 0x08
/* SET_FLAGS must be (1 << 20) as it is also the value of S bit (can be used for optimization). */
@@ -1104,12 +1272,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
}
if (fsaveds + fscratches >= SLJIT_NUMBER_OF_FLOAT_REGISTERS) {
- FAIL_IF(push_inst(compiler, VPUSH | VD(SLJIT_FS0) | ((sljit_uw)SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS << 1)));
+ FAIL_IF(push_inst(compiler, VPUSH | VD(SLJIT_FS0) | ((sljit_ins)SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS << 1)));
} else {
if (fsaveds > 0)
- FAIL_IF(push_inst(compiler, VPUSH | VD(SLJIT_FS0) | ((sljit_uw)fsaveds << 1)));
+ FAIL_IF(push_inst(compiler, VPUSH | VD(SLJIT_FS0) | ((sljit_ins)fsaveds << 1)));
if (fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG)
- FAIL_IF(push_inst(compiler, VPUSH | VD(fscratches) | ((sljit_uw)(fscratches - (SLJIT_FIRST_SAVED_FLOAT_REG - 1)) << 1)));
+ FAIL_IF(push_inst(compiler, VPUSH | VD(fscratches) | ((sljit_ins)(fscratches - (SLJIT_FIRST_SAVED_FLOAT_REG - 1)) << 1)));
}
}
@@ -1138,7 +1306,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
FAIL_IF(push_inst(compiler, VMOV2 | (offset << 10) | ((offset + sizeof(sljit_sw)) << 14) | float_arg_count));
else
FAIL_IF(push_inst(compiler, VLDR_F32 | 0x800100 | RN(SLJIT_SP)
- | (float_arg_count << 12) | ((offset + (sljit_uw)size - 4 * sizeof(sljit_sw)) >> 2)));
+ | (float_arg_count << 12) | ((offset + (sljit_ins)size - 4 * sizeof(sljit_sw)) >> 2)));
float_arg_count++;
offset += sizeof(sljit_f64) - sizeof(sljit_sw);
break;
@@ -1147,7 +1315,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
FAIL_IF(push_inst(compiler, VMOV | (float_arg_count << 16) | (offset << 10)));
else
FAIL_IF(push_inst(compiler, VLDR_F32 | 0x800000 | RN(SLJIT_SP)
- | (float_arg_count << 12) | ((offset + (sljit_uw)size - 4 * sizeof(sljit_sw)) >> 2)));
+ | (float_arg_count << 12) | ((offset + (sljit_ins)size - 4 * sizeof(sljit_sw)) >> 2)));
float_arg_count++;
break;
default:
@@ -1164,7 +1332,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
if (offset < 4 * sizeof(sljit_sw))
FAIL_IF(push_inst(compiler, MOV | RD(tmp) | (offset >> 2)));
else
- FAIL_IF(push_inst(compiler, LDR | 0x800000 | RN(SLJIT_SP) | RD(tmp) | (offset + (sljit_uw)size - 4 * sizeof(sljit_sw))));
+ FAIL_IF(push_inst(compiler, LDR | 0x800000 | RN(SLJIT_SP) | RD(tmp) | (offset + (sljit_ins)size - 4 * sizeof(sljit_sw))));
break;
}
@@ -1217,7 +1385,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
#endif
if (local_size > 0)
- FAIL_IF(emit_op(compiler, SLJIT_SUB, ALLOW_IMM, SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, local_size));
+ FAIL_IF(emit_op(compiler, SLJIT_SUB, ALLOW_IMM | ALLOW_DOUBLE_IMM, SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, local_size));
return SLJIT_SUCCESS;
}
@@ -1234,6 +1402,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *comp
size = GET_SAVED_REGISTERS_SIZE(scratches, saveds - SLJIT_KEPT_SAVEDS_COUNT(options), 1);
+ /* Doubles are saved, so alignment is unaffected. */
if ((size & SSIZE_OF(sw)) != 0 && (fsaveds > 0 || fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG))
size += SSIZE_OF(sw);
@@ -1245,13 +1414,8 @@ static sljit_s32 emit_add_sp(struct sljit_compiler *compiler, sljit_uw imm)
{
sljit_uw imm2 = get_imm(imm);
- if (imm2 == 0) {
- imm2 = (imm & ~(sljit_uw)0x3ff) >> 10;
- imm = (imm & 0x3ff) >> 2;
-
- FAIL_IF(push_inst(compiler, ADD | SRC2_IMM | RD(SLJIT_SP) | RN(SLJIT_SP) | 0xb00 | imm2));
- return push_inst(compiler, ADD | SRC2_IMM | RD(SLJIT_SP) | RN(SLJIT_SP) | 0xf00 | (imm & 0xff));
- }
+ if (imm2 == 0)
+ return emit_op(compiler, SLJIT_ADD, ALLOW_IMM | ALLOW_DOUBLE_IMM, SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, (sljit_sw)imm);
return push_inst(compiler, ADD | RD(SLJIT_SP) | RN(SLJIT_SP) | imm2);
}
@@ -1274,12 +1438,12 @@ static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit
FAIL_IF(emit_add_sp(compiler, (sljit_uw)local_size));
if (fsaveds + fscratches >= SLJIT_NUMBER_OF_FLOAT_REGISTERS) {
- FAIL_IF(push_inst(compiler, VPOP | VD(SLJIT_FS0) | ((sljit_uw)SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS << 1)));
+ FAIL_IF(push_inst(compiler, VPOP | VD(SLJIT_FS0) | ((sljit_ins)SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS << 1)));
} else {
if (fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG)
- FAIL_IF(push_inst(compiler, VPOP | VD(fscratches) | ((sljit_uw)(fscratches - (SLJIT_FIRST_SAVED_FLOAT_REG - 1)) << 1)));
+ FAIL_IF(push_inst(compiler, VPOP | VD(fscratches) | ((sljit_ins)(fscratches - (SLJIT_FIRST_SAVED_FLOAT_REG - 1)) << 1)));
if (fsaveds > 0)
- FAIL_IF(push_inst(compiler, VPOP | VD(SLJIT_FS0) | ((sljit_uw)fsaveds << 1)));
+ FAIL_IF(push_inst(compiler, VPOP | VD(SLJIT_FS0) | ((sljit_ins)fsaveds << 1)));
}
local_size = GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds, 1) & 0x7;
@@ -1330,10 +1494,10 @@ static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit
if (frame_size == 0)
return push_inst(compiler, LDR_POST | RN(SLJIT_SP) | RD(restored_reg) | 0x800008);
if (frame_size > 2 * SSIZE_OF(sw))
- return push_inst(compiler, LDR_POST | RN(SLJIT_SP) | RD(restored_reg) | (sljit_uw)(frame_size - (2 * SSIZE_OF(sw))));
+ return push_inst(compiler, LDR_POST | RN(SLJIT_SP) | RD(restored_reg) | (sljit_ins)(frame_size - (2 * SSIZE_OF(sw))));
}
- FAIL_IF(push_inst(compiler, LDR | 0x800000 | RN(SLJIT_SP) | RD(restored_reg) | (sljit_uw)local_size));
+ FAIL_IF(push_inst(compiler, LDR | 0x800000 | RN(SLJIT_SP) | RD(restored_reg) | (sljit_ins)local_size));
tmp = 1;
} else if (frame_size == 0) {
frame_size = (restored_reg == TMP_REG2) ? SSIZE_OF(sw) : 2 * SSIZE_OF(sw);
@@ -1349,7 +1513,7 @@ static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit
local_size += SSIZE_OF(sw);
if (frame_size > local_size)
- FAIL_IF(push_inst(compiler, SUB | RD(SLJIT_SP) | RN(SLJIT_SP) | (1 << 25) | (sljit_uw)(frame_size - local_size)));
+ FAIL_IF(push_inst(compiler, SUB | RD(SLJIT_SP) | RN(SLJIT_SP) | (1 << 25) | (sljit_ins)(frame_size - local_size)));
else if (frame_size < local_size)
FAIL_IF(emit_add_sp(compiler, (sljit_uw)(local_size - frame_size)));
@@ -1361,11 +1525,11 @@ static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit
if (restored_reg != TMP_REG2)
frame_size -= SSIZE_OF(sw);
- return push_inst(compiler, LDR | 0x800000 | RN(SLJIT_SP) | RD(restored_reg) | (sljit_uw)frame_size);
+ return push_inst(compiler, LDR | 0x800000 | RN(SLJIT_SP) | RD(restored_reg) | (sljit_ins)frame_size);
}
tmp = (restored_reg == TMP_REG2) ? 0x800004 : 0x800008;
- return push_inst(compiler, LDR_POST | RN(SLJIT_SP) | RD(restored_reg) | (sljit_uw)tmp);
+ return push_inst(compiler, LDR_POST | RN(SLJIT_SP) | RD(restored_reg) | (sljit_ins)tmp);
}
if (local_size > 0)
@@ -1384,7 +1548,7 @@ static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit
FAIL_IF(push_inst(compiler, POP | reg_list));
if (frame_size > 0)
- return push_inst(compiler, SUB | RD(SLJIT_SP) | RN(SLJIT_SP) | (1 << 25) | ((sljit_uw)frame_size - sizeof(sljit_sw)));
+ return push_inst(compiler, SUB | RD(SLJIT_SP) | RN(SLJIT_SP) | (1 << 25) | ((sljit_ins)frame_size - sizeof(sljit_sw)));
if (lr_dst != 0)
return SLJIT_SUCCESS;
@@ -1429,10 +1593,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_to(struct sljit_compiler *c
static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
sljit_uw dst, sljit_uw src1, sljit_uw src2)
{
- sljit_s32 is_masked;
+ sljit_s32 reg, is_masked;
sljit_uw shift_type;
- switch (GET_OPCODE(op)) {
+ switch (op) {
case SLJIT_MOV:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & ARGS_SWAPPED));
if (dst != src2) {
@@ -1446,17 +1610,10 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
case SLJIT_MOV_U8:
case SLJIT_MOV_S8:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & ARGS_SWAPPED));
- if (flags & MOVE_REG_CONV) {
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- if (op == SLJIT_MOV_U8)
- return push_inst(compiler, AND | RD(dst) | RN(src2) | SRC2_IMM | 0xff);
- FAIL_IF(push_inst(compiler, MOV | RD(dst) | (24 << 7) | RM(src2)));
- return push_inst(compiler, MOV | RD(dst) | (24 << 7) | (op == SLJIT_MOV_U8 ? 0x20 : 0x40) | RM(dst));
-#else
+ if (flags & REGISTER_OP)
return push_inst(compiler, (op == SLJIT_MOV_U8 ? UXTB : SXTB) | RD(dst) | RM(src2));
-#endif
- }
- else if (dst != src2) {
+
+ if (dst != src2) {
SLJIT_ASSERT(src2 & SRC2_IMM);
return push_inst(compiler, ((flags & INV_IMM) ? MVN : MOV) | RD(dst) | src2);
}
@@ -1465,26 +1622,15 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
case SLJIT_MOV_U16:
case SLJIT_MOV_S16:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & ARGS_SWAPPED));
- if (flags & MOVE_REG_CONV) {
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- FAIL_IF(push_inst(compiler, MOV | RD(dst) | (16 << 7) | RM(src2)));
- return push_inst(compiler, MOV | RD(dst) | (16 << 7) | (op == SLJIT_MOV_U16 ? 0x20 : 0x40) | RM(dst));
-#else
+ if (flags & REGISTER_OP)
return push_inst(compiler, (op == SLJIT_MOV_U16 ? UXTH : SXTH) | RD(dst) | RM(src2));
-#endif
- }
- else if (dst != src2) {
+
+ if (dst != src2) {
SLJIT_ASSERT(src2 & SRC2_IMM);
return push_inst(compiler, ((flags & INV_IMM) ? MVN : MOV) | RD(dst) | src2);
}
return SLJIT_SUCCESS;
- case SLJIT_NOT:
- if (src2 & SRC2_IMM)
- return push_inst(compiler, ((flags & INV_IMM) ? MOV : MVN) | (flags & SET_FLAGS) | RD(dst) | src2);
-
- return push_inst(compiler, MVN | (flags & SET_FLAGS) | RD(dst) | RM(src2));
-
case SLJIT_CLZ:
SLJIT_ASSERT(!(flags & INV_IMM) && !(src2 & SRC2_IMM));
FAIL_IF(push_inst(compiler, CLZ | RD(dst) | RM(src2)));
@@ -1492,18 +1638,31 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
case SLJIT_CTZ:
SLJIT_ASSERT(!(flags & INV_IMM) && !(src2 & SRC2_IMM));
- SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & ARGS_SWAPPED));
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- FAIL_IF(push_inst(compiler, RSB | SRC2_IMM | RD(TMP_REG1) | RN(src2) | 0));
- FAIL_IF(push_inst(compiler, AND | RD(TMP_REG2) | RN(src2) | RM(TMP_REG1)));
- FAIL_IF(push_inst(compiler, CLZ | RD(dst) | RM(TMP_REG2)));
+ SLJIT_ASSERT(src1 == TMP_REG1 && src2 != TMP_REG2 && !(flags & ARGS_SWAPPED));
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ FAIL_IF(push_inst(compiler, RSB | SRC2_IMM | RD(TMP_REG2) | RN(src2) | 0));
+ FAIL_IF(push_inst(compiler, AND | RD(TMP_REG1) | RN(src2) | RM(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, CLZ | RD(dst) | RM(TMP_REG1)));
FAIL_IF(push_inst(compiler, CMP | SET_FLAGS | SRC2_IMM | RN(dst) | 32));
return push_inst(compiler, (EOR ^ 0xf0000000) | SRC2_IMM | RD(dst) | RN(dst) | 0x1f);
-#else /* !SLJIT_CONFIG_ARM_V5 */
+#else /* !SLJIT_CONFIG_ARM_V6 */
FAIL_IF(push_inst(compiler, RBIT | RD(dst) | RM(src2)));
return push_inst(compiler, CLZ | RD(dst) | RM(dst));
-#endif /* SLJIT_CONFIG_ARM_V5 */
+#endif /* SLJIT_CONFIG_ARM_V6 */
+
+ case SLJIT_REV:
+ case SLJIT_REV_U32:
+ case SLJIT_REV_S32:
+ SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & ARGS_SWAPPED));
+ return push_inst(compiler, REV | RD(dst) | RM(src2));
+ case SLJIT_REV_U16:
+ case SLJIT_REV_S16:
+ SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & ARGS_SWAPPED));
+ FAIL_IF(push_inst(compiler, REV16 | RD(dst) | RM(src2)));
+ if (!(flags & REGISTER_OP))
+ return SLJIT_SUCCESS;
+ return push_inst(compiler, (op == SLJIT_REV_U16 ? UXTH : SXTH) | RD(dst) | RM(dst));
case SLJIT_ADD:
SLJIT_ASSERT(!(flags & INV_IMM));
@@ -1534,13 +1693,14 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
SLJIT_ASSERT(!(src2 & SRC2_IMM));
compiler->status_flags_state = 0;
- if (!HAS_FLAGS(op))
+ if (!(flags & SET_FLAGS))
return push_inst(compiler, MUL | RN(dst) | RM8(src2) | RM(src1));
- FAIL_IF(push_inst(compiler, SMULL | RN(TMP_REG1) | RD(dst) | RM8(src2) | RM(src1)));
+ reg = dst == TMP_REG1 ? TMP_REG2 : TMP_REG1;
+ FAIL_IF(push_inst(compiler, SMULL | RN(reg) | RD(dst) | RM8(src2) | RM(src1)));
/* cmp TMP_REG1, dst asr #31. */
- return push_inst(compiler, CMP | SET_FLAGS | RN(TMP_REG1) | RM(dst) | 0xfc0);
+ return push_inst(compiler, CMP | SET_FLAGS | RN(reg) | RM(dst) | 0xfc0);
case SLJIT_AND:
if ((flags & (UNUSED_RETURN | INV_IMM)) == UNUSED_RETURN)
@@ -1553,25 +1713,28 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return push_inst(compiler, ORR | (flags & SET_FLAGS) | RD(dst) | RN(src1) | ((src2 & SRC2_IMM) ? src2 : RM(src2)));
case SLJIT_XOR:
- SLJIT_ASSERT(!(flags & INV_IMM));
+ if (flags & INV_IMM) {
+ SLJIT_ASSERT(src2 == SRC2_IMM);
+ return push_inst(compiler, MVN | (flags & SET_FLAGS) | RD(dst) | RM(src1));
+ }
return push_inst(compiler, EOR | (flags & SET_FLAGS) | RD(dst) | RN(src1) | ((src2 & SRC2_IMM) ? src2 : RM(src2)));
case SLJIT_SHL:
case SLJIT_MSHL:
shift_type = 0;
- is_masked = GET_OPCODE(op) == SLJIT_MSHL;
+ is_masked = op == SLJIT_MSHL;
break;
case SLJIT_LSHR:
case SLJIT_MLSHR:
shift_type = 1;
- is_masked = GET_OPCODE(op) == SLJIT_MLSHR;
+ is_masked = op == SLJIT_MLSHR;
break;
case SLJIT_ASHR:
case SLJIT_MASHR:
shift_type = 2;
- is_masked = GET_OPCODE(op) == SLJIT_MASHR;
+ is_masked = op == SLJIT_MASHR;
break;
case SLJIT_ROTL:
@@ -1587,6 +1750,9 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
is_masked = 0;
break;
+ case SLJIT_MULADD:
+ return push_inst(compiler, MLA | RN(dst) | RD(dst) | RM8(src2) | RM(src1));
+
default:
SLJIT_UNREACHABLE();
return SLJIT_SUCCESS;
@@ -1611,7 +1777,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
}
return push_inst(compiler, MOV | (flags & SET_FLAGS) | RD(dst)
- | RM8(src2) | (sljit_uw)(shift_type << 5) | 0x10 | RM(src1));
+ | RM8(src2) | (sljit_ins)(shift_type << 5) | 0x10 | RM(src1));
}
#undef EMIT_SHIFT_INS_AND_RETURN
@@ -1628,8 +1794,7 @@ static sljit_uw get_imm(sljit_uw imm)
if (!(imm & 0xff000000)) {
imm <<= 8;
rol = 8;
- }
- else {
+ } else {
imm = (imm << 24) | (imm >> 8);
rol = 0;
}
@@ -1651,22 +1816,19 @@ static sljit_uw get_imm(sljit_uw imm)
if (!(imm & 0x00ffffff))
return SRC2_IMM | (imm >> 24) | (rol << 8);
- else
- return 0;
+ return 0;
}
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
-static sljit_s32 generate_int(struct sljit_compiler *compiler, sljit_s32 reg, sljit_uw imm, sljit_s32 positive)
+static sljit_uw compute_imm(sljit_uw imm, sljit_uw* imm2)
{
sljit_uw mask;
sljit_uw imm1;
- sljit_uw imm2;
sljit_uw rol;
/* Step1: Search a zero byte (8 continous zero bit). */
mask = 0xff000000;
rol = 8;
- while(1) {
+ while (1) {
if (!(imm & mask)) {
/* Rol imm by rol. */
imm = (imm << rol) | (imm >> (32 - rol));
@@ -1674,6 +1836,7 @@ static sljit_s32 generate_int(struct sljit_compiler *compiler, sljit_s32 reg, sl
rol = 4 + (rol >> 1);
break;
}
+
rol += 2;
mask >>= 2;
if (mask & 0x3) {
@@ -1703,9 +1866,8 @@ static sljit_s32 generate_int(struct sljit_compiler *compiler, sljit_s32 reg, sl
if (!(imm & 0xff000000)) {
imm1 = SRC2_IMM | ((imm >> 16) & 0xff) | (((rol + 4) & 0xf) << 8);
- imm2 = SRC2_IMM | ((imm >> 8) & 0xff) | (((rol + 8) & 0xf) << 8);
- }
- else if (imm & 0xc0000000) {
+ *imm2 = SRC2_IMM | ((imm >> 8) & 0xff) | (((rol + 8) & 0xf) << 8);
+ } else if (imm & 0xc0000000) {
imm1 = SRC2_IMM | ((imm >> 24) & 0xff) | ((rol & 0xf) << 8);
imm <<= 8;
rol += 4;
@@ -1726,11 +1888,10 @@ static sljit_s32 generate_int(struct sljit_compiler *compiler, sljit_s32 reg, sl
}
if (!(imm & 0x00ffffff))
- imm2 = SRC2_IMM | (imm >> 24) | ((rol & 0xf) << 8);
+ *imm2 = SRC2_IMM | (imm >> 24) | ((rol & 0xf) << 8);
else
return 0;
- }
- else {
+ } else {
if (!(imm & 0xf0000000)) {
imm <<= 4;
rol += 2;
@@ -1756,25 +1917,23 @@ static sljit_s32 generate_int(struct sljit_compiler *compiler, sljit_s32 reg, sl
}
if (!(imm & 0x00ffffff))
- imm2 = SRC2_IMM | (imm >> 24) | ((rol & 0xf) << 8);
+ *imm2 = SRC2_IMM | (imm >> 24) | ((rol & 0xf) << 8);
else
return 0;
}
- FAIL_IF(push_inst(compiler, (positive ? MOV : MVN) | RD(reg) | imm1));
- FAIL_IF(push_inst(compiler, (positive ? ORR : BIC) | RD(reg) | RN(reg) | imm2));
- return 1;
+ return imm1;
}
-#endif
static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 reg, sljit_uw imm)
{
sljit_uw tmp;
-
-#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ sljit_uw imm1, imm2;
+#else /* !SLJIT_CONFIG_ARM_V6 */
if (!(imm & ~(sljit_uw)0xffff))
return push_inst(compiler, MOVW | RD(reg) | ((imm << 4) & 0xf0000) | (imm & 0xfff));
-#endif
+#endif /* SLJIT_CONFIG_ARM_V6 */
/* Create imm by 1 inst. */
tmp = get_imm(imm);
@@ -1785,19 +1944,28 @@ static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 reg,
if (tmp)
return push_inst(compiler, MVN | RD(reg) | tmp);
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
/* Create imm by 2 inst. */
- FAIL_IF(generate_int(compiler, reg, imm, 1));
- FAIL_IF(generate_int(compiler, reg, ~imm, 0));
+ imm1 = compute_imm(imm, &imm2);
+ if (imm1 != 0) {
+ FAIL_IF(push_inst(compiler, MOV | RD(reg) | imm1));
+ return push_inst(compiler, ORR | RD(reg) | RN(reg) | imm2);
+ }
+
+ imm1 = compute_imm(~imm, &imm2);
+ if (imm1 != 0) {
+ FAIL_IF(push_inst(compiler, MVN | RD(reg) | imm1));
+ return push_inst(compiler, BIC | RD(reg) | RN(reg) | imm2);
+ }
/* Load integer. */
return push_inst_with_literal(compiler, EMIT_DATA_TRANSFER(WORD_SIZE | LOAD_DATA, 1, reg, TMP_PC, 0), imm);
-#else
+#else /* !SLJIT_CONFIG_ARM_V6 */
FAIL_IF(push_inst(compiler, MOVW | RD(reg) | ((imm << 4) & 0xf0000) | (imm & 0xfff)));
if (imm <= 0xffff)
return SLJIT_SUCCESS;
return push_inst(compiler, MOVT | RD(reg) | ((imm >> 12) & 0xf0000) | ((imm >> 16) & 0xfff));
-#endif
+#endif /* SLJIT_CONFIG_ARM_V6 */
}
static sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg,
@@ -1834,13 +2002,13 @@ static sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, s
argw &= 0x3;
if (argw != 0 && (mask == 0xff)) {
- FAIL_IF(push_inst(compiler, ADD | RD(tmp_reg) | RN(arg) | RM(offset_reg) | ((sljit_uw)argw << 7)));
+ FAIL_IF(push_inst(compiler, ADD | RD(tmp_reg) | RN(arg) | RM(offset_reg) | ((sljit_ins)argw << 7)));
return push_inst(compiler, EMIT_DATA_TRANSFER(flags, 1, reg, tmp_reg, TYPE2_TRANSFER_IMM(0)));
}
/* Bit 25: RM is offset. */
return push_inst(compiler, EMIT_DATA_TRANSFER(flags, 1, reg, arg,
- RM(offset_reg) | (mask == 0xff ? 0 : (1 << 25)) | ((sljit_uw)argw << 7)));
+ RM(offset_reg) | (mask == 0xff ? 0 : (1 << 25)) | ((sljit_ins)argw << 7)));
}
arg &= REG_MASK;
@@ -1902,18 +2070,25 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
/* We prefers register and simple consts. */
sljit_s32 dst_reg;
- sljit_s32 src1_reg;
+ sljit_s32 src1_reg = 0;
sljit_s32 src2_reg = 0;
+ sljit_s32 src2_tmp_reg = 0;
sljit_s32 flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
sljit_s32 neg_op = 0;
+ sljit_u32 imm2;
+
+ op = GET_OPCODE(op);
- if (dst == TMP_REG2)
+ if (flags & SET_FLAGS)
+ inp_flags &= ~ALLOW_DOUBLE_IMM;
+
+ if (dst == TMP_REG1)
flags |= UNUSED_RETURN;
SLJIT_ASSERT(!(inp_flags & ALLOW_INV_IMM) || (inp_flags & ALLOW_IMM));
if (inp_flags & ALLOW_NEG_IMM) {
- switch (GET_OPCODE(op)) {
+ switch (op) {
case SLJIT_ADD:
compiler->status_flags_state = SLJIT_CURRENT_FLAGS_ADD;
neg_op = SLJIT_SUB;
@@ -1937,10 +2112,11 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
if (!(inp_flags & ALLOW_IMM))
break;
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
src2_reg = (sljit_s32)get_imm((sljit_uw)src2w);
if (src2_reg)
break;
+
if (inp_flags & ALLOW_INV_IMM) {
src2_reg = (sljit_s32)get_imm(~(sljit_uw)src2w);
if (src2_reg) {
@@ -1948,8 +2124,9 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
break;
}
}
+
if (neg_op != 0) {
- src2_reg = (sljit_s32)get_imm((sljit_uw)-src2w);
+ src2_reg = (sljit_s32)get_imm((neg_op == SLJIT_ADD || neg_op == SLJIT_SUB) ? (sljit_uw)-src2w : ~(sljit_uw)src2w);
if (src2_reg) {
op = neg_op | GET_ALL_FLAGS(op);
break;
@@ -1957,7 +2134,7 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
}
}
- if (src1 & SLJIT_IMM) {
+ if (src1 == SLJIT_IMM) {
src2_reg = (sljit_s32)get_imm((sljit_uw)src1w);
if (src2_reg) {
flags |= ARGS_SWAPPED;
@@ -1965,6 +2142,7 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
src1w = src2w;
break;
}
+
if (inp_flags & ALLOW_INV_IMM) {
src2_reg = (sljit_s32)get_imm(~(sljit_uw)src1w);
if (src2_reg) {
@@ -1974,8 +2152,11 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
break;
}
}
+
if (neg_op >= SLJIT_SUB) {
/* Note: additive operation (commutative). */
+ SLJIT_ASSERT(op == SLJIT_ADD || op == SLJIT_ADDC);
+
src2_reg = (sljit_s32)get_imm((sljit_uw)-src1w);
if (src2_reg) {
src1 = src2;
@@ -1987,18 +2168,6 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
}
} while(0);
- /* Source 1. */
- if (FAST_IS_REG(src1))
- src1_reg = src1;
- else if (src1 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem(compiler, inp_flags | LOAD_DATA, TMP_REG1, src1, src1w, TMP_REG1));
- src1_reg = TMP_REG1;
- }
- else {
- FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)src1w));
- src1_reg = TMP_REG1;
- }
-
/* Destination. */
dst_reg = FAST_IS_REG(dst) ? dst : TMP_REG2;
@@ -2008,23 +2177,100 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
inp_flags &= ~SIGNED;
if (FAST_IS_REG(src2))
- return emit_op_mem(compiler, inp_flags, src2, dst, dstw, TMP_REG2);
+ return emit_op_mem(compiler, inp_flags, src2, dst, dstw, TMP_REG1);
}
if (FAST_IS_REG(src2) && dst_reg != TMP_REG2)
- flags |= MOVE_REG_CONV;
+ flags |= REGISTER_OP;
+
+ src2_tmp_reg = dst_reg;
+ } else {
+ if (op == SLJIT_REV_U16 || op == SLJIT_REV_S16) {
+ if (!(dst & SLJIT_MEM) && (!(src2 & SLJIT_MEM) || op == SLJIT_REV_S16))
+ flags |= REGISTER_OP;
+ }
+
+ src2_tmp_reg = FAST_IS_REG(src1) ? TMP_REG1 : TMP_REG2;
+ }
+
+ if (src2_reg == 0 && (src2 & SLJIT_MEM)) {
+ src2_reg = src2_tmp_reg;
+ FAIL_IF(emit_op_mem(compiler, inp_flags | LOAD_DATA, src2_reg, src2, src2w, TMP_REG1));
+ }
+
+ /* Source 1. */
+ if (FAST_IS_REG(src1))
+ src1_reg = src1;
+ else if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, inp_flags | LOAD_DATA, TMP_REG1, src1, src1w, TMP_REG1));
+ src1_reg = TMP_REG1;
+ } else if (!(inp_flags & ALLOW_DOUBLE_IMM) || src2_reg != 0 || op == SLJIT_SUB || op == SLJIT_SUBC) {
+ FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)src1w));
+ src1_reg = TMP_REG1;
}
/* Source 2. */
if (src2_reg == 0) {
- src2_reg = (op <= SLJIT_MOV_P) ? dst_reg : TMP_REG2;
+ src2_reg = src2_tmp_reg;
if (FAST_IS_REG(src2))
src2_reg = src2;
- else if (src2 & SLJIT_MEM)
- FAIL_IF(emit_op_mem(compiler, inp_flags | LOAD_DATA, src2_reg, src2, src2w, TMP_REG2));
- else
+ else if (!(inp_flags & ALLOW_DOUBLE_IMM))
FAIL_IF(load_immediate(compiler, src2_reg, (sljit_uw)src2w));
+ else {
+ SLJIT_ASSERT(!(flags & SET_FLAGS));
+
+ if (src1_reg == 0) {
+ FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)src1w));
+ src1_reg = TMP_REG1;
+ }
+
+ src2_reg = (sljit_s32)compute_imm((sljit_uw)src2w, &imm2);
+
+ if (src2_reg == 0 && neg_op != 0) {
+ src2_reg = (sljit_s32)compute_imm((sljit_uw)-src2w, &imm2);
+ if (src2_reg != 0)
+ op = neg_op;
+ }
+
+ if (src2_reg == 0) {
+ FAIL_IF(load_immediate(compiler, src2_tmp_reg, (sljit_uw)src2w));
+ src2_reg = src2_tmp_reg;
+ } else {
+ FAIL_IF(emit_single_op(compiler, op, flags, (sljit_uw)dst_reg, (sljit_uw)src1_reg, (sljit_uw)src2_reg));
+ src1_reg = dst_reg;
+ src2_reg = (sljit_s32)imm2;
+
+ if (op == SLJIT_ADDC)
+ op = SLJIT_ADD;
+ else if (op == SLJIT_SUBC)
+ op = SLJIT_SUB;
+ }
+ }
+ }
+
+ if (src1_reg == 0) {
+ SLJIT_ASSERT((inp_flags & ALLOW_DOUBLE_IMM) && !(flags & SET_FLAGS));
+
+ src1_reg = (sljit_s32)compute_imm((sljit_uw)src1w, &imm2);
+
+ if (src1_reg == 0 && neg_op != 0) {
+ src1_reg = (sljit_s32)compute_imm((sljit_uw)-src1w, &imm2);
+ if (src1_reg != 0)
+ op = neg_op;
+ }
+
+ if (src1_reg == 0) {
+ FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)src1w));
+ src1_reg = TMP_REG1;
+ } else {
+ FAIL_IF(emit_single_op(compiler, op, flags, (sljit_uw)dst_reg, (sljit_uw)src2_reg, (sljit_uw)src1_reg));
+ src1_reg = dst_reg;
+ src2_reg = (sljit_s32)imm2;
+
+ if (op == SLJIT_ADDC)
+ op = SLJIT_ADD;
+ }
}
FAIL_IF(emit_single_op(compiler, op, flags, (sljit_uw)dst_reg, (sljit_uw)src1_reg, (sljit_uw)src2_reg));
@@ -2114,7 +2360,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compile
SLJIT_ASSERT(saved_reg_list[1] < 8);
FAIL_IF(push_inst(compiler, LDR | 0x8d0004 | (saved_reg_list[1] << 12) /* ldr rX, [sp, #4] */));
}
- return push_inst(compiler, (LDR ^ (1 << 24)) | 0x8d0000 | (sljit_uw)(saved_reg_count >= 3 ? 16 : 8)
+ return push_inst(compiler, (LDR ^ (1 << 24)) | 0x8d0000 | (sljit_ins)(saved_reg_count >= 3 ? 16 : 8)
| (saved_reg_list[0] << 12) /* ldr rX, [sp], #8/16 */);
}
return SLJIT_SUCCESS;
@@ -2144,23 +2390,27 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
return emit_op(compiler, SLJIT_MOV, ALLOW_ANY_IMM, dst, dstw, TMP_REG1, 0, src, srcw);
case SLJIT_MOV_U8:
- return emit_op(compiler, SLJIT_MOV_U8, ALLOW_ANY_IMM | BYTE_SIZE, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8)srcw : srcw);
+ return emit_op(compiler, SLJIT_MOV_U8, ALLOW_ANY_IMM | BYTE_SIZE, dst, dstw, TMP_REG1, 0, src, (src == SLJIT_IMM) ? (sljit_u8)srcw : srcw);
case SLJIT_MOV_S8:
- return emit_op(compiler, SLJIT_MOV_S8, ALLOW_ANY_IMM | SIGNED | BYTE_SIZE, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8)srcw : srcw);
+ return emit_op(compiler, SLJIT_MOV_S8, ALLOW_ANY_IMM | SIGNED | BYTE_SIZE, dst, dstw, TMP_REG1, 0, src, (src == SLJIT_IMM) ? (sljit_s8)srcw : srcw);
case SLJIT_MOV_U16:
- return emit_op(compiler, SLJIT_MOV_U16, ALLOW_ANY_IMM | HALF_SIZE, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16)srcw : srcw);
+ return emit_op(compiler, SLJIT_MOV_U16, ALLOW_ANY_IMM | HALF_SIZE, dst, dstw, TMP_REG1, 0, src, (src == SLJIT_IMM) ? (sljit_u16)srcw : srcw);
case SLJIT_MOV_S16:
- return emit_op(compiler, SLJIT_MOV_S16, ALLOW_ANY_IMM | SIGNED | HALF_SIZE, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16)srcw : srcw);
-
- case SLJIT_NOT:
- return emit_op(compiler, op, ALLOW_ANY_IMM, dst, dstw, TMP_REG1, 0, src, srcw);
+ return emit_op(compiler, SLJIT_MOV_S16, ALLOW_ANY_IMM | SIGNED | HALF_SIZE, dst, dstw, TMP_REG1, 0, src, (src == SLJIT_IMM) ? (sljit_s16)srcw : srcw);
case SLJIT_CLZ:
case SLJIT_CTZ:
+ case SLJIT_REV:
+ case SLJIT_REV_U32:
+ case SLJIT_REV_S32:
return emit_op(compiler, op, 0, dst, dstw, TMP_REG1, 0, src, srcw);
+
+ case SLJIT_REV_U16:
+ case SLJIT_REV_S16:
+ return emit_op(compiler, op, HALF_SIZE, dst, dstw, TMP_REG1, 0, src, srcw);
}
return SLJIT_SUCCESS;
@@ -2171,6 +2421,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
+ sljit_s32 inp_flags;
+
CHECK_ERROR();
CHECK(check_sljit_emit_op2(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w));
ADJUST_LOCAL_OFFSET(dst, dstw);
@@ -2182,11 +2434,17 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
case SLJIT_ADDC:
case SLJIT_SUB:
case SLJIT_SUBC:
- return emit_op(compiler, op, ALLOW_IMM | ALLOW_NEG_IMM, dst, dstw, src1, src1w, src2, src2w);
+ return emit_op(compiler, op, ALLOW_IMM | ALLOW_NEG_IMM | ALLOW_DOUBLE_IMM, dst, dstw, src1, src1w, src2, src2w);
case SLJIT_OR:
+ return emit_op(compiler, op, ALLOW_IMM | ALLOW_DOUBLE_IMM, dst, dstw, src1, src1w, src2, src2w);
+
case SLJIT_XOR:
- return emit_op(compiler, op, ALLOW_IMM, dst, dstw, src1, src1w, src2, src2w);
+ inp_flags = ALLOW_IMM | ALLOW_DOUBLE_IMM;
+ if ((src1 == SLJIT_IMM && src1w == -1) || (src2 == SLJIT_IMM && src2w == -1)) {
+ inp_flags |= ALLOW_INV_IMM;
+ }
+ return emit_op(compiler, op, inp_flags, dst, dstw, src1, src1w, src2, src2w);
case SLJIT_MUL:
return emit_op(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w);
@@ -2202,7 +2460,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
case SLJIT_MASHR:
case SLJIT_ROTL:
case SLJIT_ROTR:
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
compiler->shift_imm = src2w & 0x1f;
return emit_op(compiler, op, 0, dst, dstw, TMP_REG1, 0, src1, src1w);
} else {
@@ -2222,64 +2480,74 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compil
CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
SLJIT_SKIP_CHECKS(compiler);
- return sljit_emit_op2(compiler, op, TMP_REG2, 0, src1, src1w, src2, src2w);
+ return sljit_emit_op2(compiler, op, TMP_REG1, 0, src1, src1w, src2, src2w);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src_dst,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op2r(compiler, op, dst_reg, src1, src1w, src2, src2w));
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+ ADJUST_LOCAL_OFFSET(src2, src2w);
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MULADD:
+ return emit_op(compiler, op, 0, dst_reg, 0, src1, src1w, src2, src2w);
+ }
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 src1_reg,
+ sljit_s32 src2_reg,
+ sljit_s32 src3, sljit_sw src3w)
+{
sljit_s32 is_left;
CHECK_ERROR();
- CHECK(check_sljit_emit_shift_into(compiler, op, src_dst, src1, src1w, src2, src2w));
+ CHECK(check_sljit_emit_shift_into(compiler, op, dst_reg, src1_reg, src2_reg, src3, src3w));
op = GET_OPCODE(op);
is_left = (op == SLJIT_SHL || op == SLJIT_MSHL);
- if (src_dst == src1) {
+ if (src1_reg == src2_reg) {
SLJIT_SKIP_CHECKS(compiler);
- return sljit_emit_op2(compiler, is_left ? SLJIT_ROTL : SLJIT_ROTR, src_dst, 0, src_dst, 0, src2, src2w);
+ return sljit_emit_op2(compiler, is_left ? SLJIT_ROTL : SLJIT_ROTR, dst_reg, 0, src1_reg, 0, src3, src3w);
}
- ADJUST_LOCAL_OFFSET(src1, src1w);
- ADJUST_LOCAL_OFFSET(src2, src2w);
+ ADJUST_LOCAL_OFFSET(src3, src3w);
/* Shift type of ROR is 3. */
- if (src2 & SLJIT_IMM) {
- src2w &= 0x1f;
+ if (src3 == SLJIT_IMM) {
+ src3w &= 0x1f;
- if (src2w == 0)
+ if (src3w == 0)
return SLJIT_SUCCESS;
- } else if (src2 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG2, src2, src2w, TMP_REG2));
- src2 = TMP_REG2;
- }
- if (src1 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG1, src1, src1w, TMP_REG1));
- src1 = TMP_REG1;
- } else if (src1 & SLJIT_IMM) {
- FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)src1w));
- src1 = TMP_REG1;
+ FAIL_IF(push_inst(compiler, MOV | RD(dst_reg) | RM(src1_reg) | ((sljit_ins)(is_left ? 0 : 1) << 5) | ((sljit_ins)src3w << 7)));
+ src3w = (src3w ^ 0x1f) + 1;
+ return push_inst(compiler, ORR | RD(dst_reg) | RN(dst_reg) | RM(src2_reg) | ((sljit_ins)(is_left ? 1 : 0) << 5) | ((sljit_ins)src3w << 7));
}
- if (src2 & SLJIT_IMM) {
- FAIL_IF(push_inst(compiler, MOV | RD(src_dst) | RM(src_dst) | ((sljit_uw)(is_left ? 0 : 1) << 5) | ((sljit_uw)src2w << 7)));
- src2w = (src2w ^ 0x1f) + 1;
- return push_inst(compiler, ORR | RD(src_dst) | RN(src_dst) | RM(src1) | ((sljit_uw)(is_left ? 1 : 0) << 5) | ((sljit_uw)src2w << 7));
+ if (src3 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG2, src3, src3w, TMP_REG2));
+ src3 = TMP_REG2;
}
- if (op == SLJIT_MSHL || op == SLJIT_MLSHR) {
- FAIL_IF(push_inst(compiler, AND | SRC2_IMM | RD(TMP_REG2) | RN(src2) | 0x1f));
- src2 = TMP_REG2;
+ if (op == SLJIT_MSHL || op == SLJIT_MLSHR || dst_reg == src3) {
+ FAIL_IF(push_inst(compiler, AND | SRC2_IMM | RD(TMP_REG2) | RN(src3) | 0x1f));
+ src3 = TMP_REG2;
}
- FAIL_IF(push_inst(compiler, MOV | RD(src_dst) | RM8(src2) | ((sljit_uw)(is_left ? 0 : 1) << 5) | 0x10 | RM(src_dst)));
- FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG1) | RM(src1) | ((sljit_uw)(is_left ? 1 : 0) << 5) | (1 << 7)));
- FAIL_IF(push_inst(compiler, EOR | SRC2_IMM | RD(TMP_REG2) | RN(src2) | 0x1f));
- return push_inst(compiler, ORR | RD(src_dst) | RN(src_dst) | RM(TMP_REG1) | ((sljit_uw)(is_left ? 1 : 0) << 5) | 0x10 | RM8(TMP_REG2));
+ FAIL_IF(push_inst(compiler, MOV | RD(dst_reg) | RM8(src3) | ((sljit_ins)(is_left ? 0 : 1) << 5) | 0x10 | RM(src1_reg)));
+ FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG1) | RM(src2_reg) | ((sljit_ins)(is_left ? 1 : 0) << 5) | (1 << 7)));
+ FAIL_IF(push_inst(compiler, EOR | SRC2_IMM | RD(TMP_REG2) | RN(src3) | 0x1f));
+ return push_inst(compiler, ORR | RD(dst_reg) | RN(dst_reg) | RM8(TMP_REG2) | ((sljit_ins)(is_left ? 1 : 0) << 5) | 0x10 | RM(TMP_REG1));
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
@@ -2305,27 +2573,67 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *comp
case SLJIT_PREFETCH_L2:
case SLJIT_PREFETCH_L3:
case SLJIT_PREFETCH_ONCE:
-#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
SLJIT_ASSERT(src & SLJIT_MEM);
return emit_op_mem(compiler, PRELOAD | LOAD_DATA, TMP_PC, src, srcw, TMP_REG1);
-#else /* !SLJIT_CONFIG_ARM_V7 */
- return SLJIT_SUCCESS;
-#endif /* SLJIT_CONFIG_ARM_V7 */
}
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_dst(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw)
{
- CHECK_REG_INDEX(check_sljit_get_register_index(reg));
- return reg_map[reg];
+ sljit_s32 size, dst_r;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op_dst(compiler, op, dst, dstw));
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ switch (op) {
+ case SLJIT_FAST_ENTER:
+ SLJIT_ASSERT(reg_map[TMP_REG2] == 14);
+
+ if (FAST_IS_REG(dst))
+ return push_inst(compiler, MOV | RD(dst) | RM(TMP_REG2));
+ break;
+ case SLJIT_GET_RETURN_ADDRESS:
+ size = GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds - SLJIT_KEPT_SAVEDS_COUNT(compiler->options), 0);
+
+ if (compiler->fsaveds > 0 || compiler->fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG) {
+ /* The size of pc is not added above. */
+ if ((size & SSIZE_OF(sw)) == 0)
+ size += SSIZE_OF(sw);
+
+ size += GET_SAVED_FLOAT_REGISTERS_SIZE(compiler->fscratches, compiler->fsaveds, f64);
+ }
+
+ SLJIT_ASSERT(((compiler->local_size + size + SSIZE_OF(sw)) & 0x7) == 0);
+
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
+ FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, dst_r, SLJIT_MEM1(SLJIT_SP), compiler->local_size + size, TMP_REG1));
+ break;
+ }
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem(compiler, WORD_SIZE, TMP_REG2, dst, dstw, TMP_REG1);
+
+ return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 type, sljit_s32 reg)
{
- CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
- return (freg_map[reg] << 1);
+ CHECK_REG_INDEX(check_sljit_get_register_index(type, reg));
+
+ if (type == SLJIT_GP_REGISTER)
+ return reg_map[reg];
+
+ if (type == SLJIT_FLOAT_REGISTER || type == SLJIT_SIMD_REG_64)
+ return freg_map[reg];
+
+ if (type != SLJIT_SIMD_REG_128)
+ return freg_map[reg] & ~0x1;
+
+ return -1;
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
@@ -2335,7 +2643,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *c
CHECK_ERROR();
CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
- return push_inst(compiler, *(sljit_uw*)instruction);
+ return push_inst(compiler, *(sljit_ins*)instruction);
}
/* --------------------------------------------------------------------- */
@@ -2344,19 +2652,19 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *c
#define FPU_LOAD (1 << 20)
#define EMIT_FPU_DATA_TRANSFER(inst, add, base, freg, offs) \
- ((inst) | (sljit_uw)((add) << 23) | RN(base) | VD(freg) | (sljit_uw)(offs))
+ ((inst) | (sljit_ins)((add) << 23) | RN(base) | VD(freg) | (sljit_ins)(offs))
static sljit_s32 emit_fop_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
{
sljit_uw imm;
- sljit_uw inst = VSTR_F32 | (flags & (SLJIT_32 | FPU_LOAD));
+ sljit_ins inst = VSTR_F32 | (flags & (SLJIT_32 | FPU_LOAD));
SLJIT_ASSERT(arg & SLJIT_MEM);
arg &= ~SLJIT_MEM;
if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
- FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG2) | RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | (((sljit_uw)argw & 0x3) << 7)));
- arg = TMP_REG2;
+ FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG1) | RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | (((sljit_ins)argw & 0x3) << 7)));
+ arg = TMP_REG1;
argw = 0;
}
@@ -2369,25 +2677,25 @@ static sljit_s32 emit_fop_mem(struct sljit_compiler *compiler, sljit_s32 flags,
imm = get_imm((sljit_uw)argw & ~(sljit_uw)0x3fc);
if (imm) {
- FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG2) | RN(arg & REG_MASK) | imm));
- return push_inst(compiler, EMIT_FPU_DATA_TRANSFER(inst, 1, TMP_REG2, reg, (argw & 0x3fc) >> 2));
+ FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG1) | RN(arg & REG_MASK) | imm));
+ return push_inst(compiler, EMIT_FPU_DATA_TRANSFER(inst, 1, TMP_REG1, reg, (argw & 0x3fc) >> 2));
}
imm = get_imm((sljit_uw)-argw & ~(sljit_uw)0x3fc);
if (imm) {
argw = -argw;
- FAIL_IF(push_inst(compiler, SUB | RD(TMP_REG2) | RN(arg & REG_MASK) | imm));
- return push_inst(compiler, EMIT_FPU_DATA_TRANSFER(inst, 0, TMP_REG2, reg, (argw & 0x3fc) >> 2));
+ FAIL_IF(push_inst(compiler, SUB | RD(TMP_REG1) | RN(arg & REG_MASK) | imm));
+ return push_inst(compiler, EMIT_FPU_DATA_TRANSFER(inst, 0, TMP_REG1, reg, (argw & 0x3fc) >> 2));
}
}
if (arg) {
- FAIL_IF(load_immediate(compiler, TMP_REG2, (sljit_uw)argw));
- FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG2) | RN(arg & REG_MASK) | RM(TMP_REG2)));
+ FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)argw));
+ FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG1) | RN(arg & REG_MASK) | RM(TMP_REG1)));
}
else
- FAIL_IF(load_immediate(compiler, TMP_REG2, (sljit_uw)argw));
+ FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)argw));
- return push_inst(compiler, EMIT_FPU_DATA_TRANSFER(inst, 1, TMP_REG2, reg, 0));
+ return push_inst(compiler, EMIT_FPU_DATA_TRANSFER(inst, 1, TMP_REG1, reg, 0));
}
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
@@ -2410,14 +2718,12 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
return emit_fop_mem(compiler, 0, TMP_FREG1, dst, dstw);
}
-static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+static sljit_s32 sljit_emit_fop1_conv_f64_from_w(struct sljit_compiler *compiler, sljit_ins ins,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw)
{
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
- op ^= SLJIT_32;
-
if (FAST_IS_REG(src))
FAIL_IF(push_inst(compiler, VMOV | RD(src) | VN(TMP_FREG1)));
else if (src & SLJIT_MEM) {
@@ -2429,13 +2735,27 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_comp
FAIL_IF(push_inst(compiler, VMOV | RD(TMP_REG1) | VN(TMP_FREG1)));
}
- FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VCVT_F32_S32, op & SLJIT_32, dst_r, TMP_FREG1, 0)));
+ FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(ins, ins & SLJIT_32, dst_r, TMP_FREG1, 0)));
if (dst & SLJIT_MEM)
- return emit_fop_mem(compiler, (op & SLJIT_32), TMP_FREG1, dst, dstw);
+ return emit_fop_mem(compiler, (ins & SLJIT_32), TMP_FREG1, dst, dstw);
return SLJIT_SUCCESS;
}
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ return sljit_emit_fop1_conv_f64_from_w(compiler, VCVT_F32_S32 | (~op & SLJIT_32), dst, dstw, src, srcw);
+}
+
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_uw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ return sljit_emit_fop1_conv_f64_from_w(compiler, VCVT_F32_U32 | (~op & SLJIT_32), dst, dstw, src, srcw);
+}
+
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
@@ -2453,7 +2773,12 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compile
}
FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VCMP_F32, op & SLJIT_32, src1, src2, 0)));
- return push_inst(compiler, VMRS);
+ FAIL_IF(push_inst(compiler, VMRS));
+
+ if (GET_FLAG_TYPE(op) != SLJIT_UNORDERED_OR_EQUAL)
+ return SLJIT_SUCCESS;
+
+ return push_inst(compiler, (CMP - CONDITIONAL) | (0x60000000 /* VS */) | SET_FLAGS | RN(TMP_REG1) | RM(TMP_REG1));
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
@@ -2480,7 +2805,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compil
switch (GET_OPCODE(op)) {
case SLJIT_MOV_F64:
if (src != dst_r) {
- if (dst_r != TMP_FREG1)
+ if (!(dst & SLJIT_MEM))
FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VMOV_F32, op & SLJIT_32, dst_r, src, 0)));
else
dst_r = src;
@@ -2534,21 +2859,23 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
case SLJIT_ADD_F64:
FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VADD_F32, op & SLJIT_32, dst_r, src2, src1)));
break;
-
case SLJIT_SUB_F64:
FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VSUB_F32, op & SLJIT_32, dst_r, src2, src1)));
break;
-
case SLJIT_MUL_F64:
FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VMUL_F32, op & SLJIT_32, dst_r, src2, src1)));
break;
-
case SLJIT_DIV_F64:
FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VDIV_F32, op & SLJIT_32, dst_r, src2, src1)));
break;
+ case SLJIT_COPYSIGN_F64:
+ FAIL_IF(push_inst(compiler, VMOV | (1 << 20) | VN(src2) | RD(TMP_REG1) | ((op & SLJIT_32) ? (1 << 7) : 0)));
+ FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VABS_F32, op & SLJIT_32, dst_r, src1, 0)));
+ FAIL_IF(push_inst(compiler, CMP | SET_FLAGS | RN(TMP_REG1) | SRC2_IMM | 0));
+ return push_inst(compiler, EMIT_FPU_OPERATION((VNEG_F32 & ~COND_MASK) | 0xb0000000, op & SLJIT_32, dst_r, dst_r, 0));
}
- if (dst_r == TMP_FREG1)
+ if (dst_r != dst)
FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_32), TMP_FREG1, dst, dstw));
return SLJIT_SUCCESS;
@@ -2556,42 +2883,120 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
#undef EMIT_FPU_DATA_TRANSFER
-/* --------------------------------------------------------------------- */
-/* Other instructions */
-/* --------------------------------------------------------------------- */
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset32(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f32 value)
+{
+#if defined(__ARM_NEON) && __ARM_NEON
+ sljit_u32 exp;
+ sljit_ins ins;
+#endif /* NEON */
+ union {
+ sljit_u32 imm;
+ sljit_f32 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset32(compiler, freg, value));
+
+ u.value = value;
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+#if defined(__ARM_NEON) && __ARM_NEON
+ if ((u.imm << (32 - 19)) == 0) {
+ exp = (u.imm >> (23 + 2)) & 0x3f;
+
+ if (exp == 0x20 || exp == 0x1f) {
+ ins = ((u.imm >> 24) & 0x80) | ((u.imm >> 19) & 0x7f);
+ return push_inst(compiler, (VMOV_F32 ^ (1 << 6)) | ((ins & 0xf0) << 12) | VD(freg) | (ins & 0xf));
+ }
+ }
+#endif /* NEON */
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, u.imm));
+ return push_inst(compiler, VMOV | VN(freg) | RD(TMP_REG1));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f64 value)
{
+#if defined(__ARM_NEON) && __ARM_NEON
+ sljit_u32 exp;
+ sljit_ins ins;
+#endif /* NEON */
+ union {
+ sljit_u32 imm[2];
+ sljit_f64 value;
+ } u;
+
CHECK_ERROR();
- CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
- ADJUST_LOCAL_OFFSET(dst, dstw);
+ CHECK(check_sljit_emit_fset64(compiler, freg, value));
- SLJIT_ASSERT(reg_map[TMP_REG2] == 14);
+ u.value = value;
- if (FAST_IS_REG(dst))
- return push_inst(compiler, MOV | RD(dst) | RM(TMP_REG2));
+#if defined(__ARM_NEON) && __ARM_NEON
+ if (u.imm[0] == 0 && (u.imm[1] << (64 - 48)) == 0) {
+ exp = (u.imm[1] >> ((52 - 32) + 2)) & 0x1ff;
+
+ if (exp == 0x100 || exp == 0xff) {
+ ins = ((u.imm[1] >> (56 - 32)) & 0x80) | ((u.imm[1] >> (48 - 32)) & 0x7f);
+ return push_inst(compiler, (VMOV_F32 ^ (1 << 6)) | (1 << 8) | ((ins & 0xf0) << 12) | VD(freg) | (ins & 0xf));
+ }
+ }
+#endif /* NEON */
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, u.imm[0]));
+ if (u.imm[0] == u.imm[1])
+ return push_inst(compiler, VMOV2 | RN(TMP_REG1) | RD(TMP_REG1) | VM(freg));
- /* Memory. */
- return emit_op_mem(compiler, WORD_SIZE, TMP_REG2, dst, dstw, TMP_REG1);
+ FAIL_IF(load_immediate(compiler, TMP_REG2, u.imm[1]));
+ return push_inst(compiler, VMOV2 | RN(TMP_REG2) | RD(TMP_REG1) | VM(freg));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 freg, sljit_s32 reg)
+{
+ sljit_s32 reg2;
+ sljit_ins inst;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
+
+ if (reg & REG_PAIR_MASK) {
+ reg2 = REG_PAIR_SECOND(reg);
+ reg = REG_PAIR_FIRST(reg);
+
+ inst = VMOV2 | RN(reg) | RD(reg2) | VM(freg);
+ } else {
+ inst = VMOV | VN(freg) | RD(reg);
+
+ if (!(op & SLJIT_32))
+ inst |= 1 << 7;
+ }
+
+ if (GET_OPCODE(op) == SLJIT_COPY_FROM_F64)
+ inst |= 1 << 20;
+
+ return push_inst(compiler, inst);
}
/* --------------------------------------------------------------------- */
/* Conditional instructions */
/* --------------------------------------------------------------------- */
-static sljit_uw get_cc(struct sljit_compiler *compiler, sljit_s32 type)
+static sljit_ins get_cc(struct sljit_compiler *compiler, sljit_s32 type)
{
switch (type) {
case SLJIT_EQUAL:
+ case SLJIT_ATOMIC_STORED:
case SLJIT_F_EQUAL:
case SLJIT_ORDERED_EQUAL:
- case SLJIT_UNORDERED_OR_EQUAL: /* Not supported. */
+ case SLJIT_UNORDERED_OR_EQUAL:
return 0x00000000;
case SLJIT_NOT_EQUAL:
+ case SLJIT_ATOMIC_NOT_STORED:
case SLJIT_F_NOT_EQUAL:
case SLJIT_UNORDERED_OR_NOT_EQUAL:
- case SLJIT_ORDERED_NOT_EQUAL: /* Not supported. */
+ case SLJIT_ORDERED_NOT_EQUAL:
return 0x10000000;
case SLJIT_CARRY:
@@ -2696,31 +3101,29 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
SLJIT_ASSERT(reg_map[TMP_REG1] != 14);
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
if (type >= SLJIT_FAST_CALL)
PTR_FAIL_IF(prepare_blx(compiler));
+
+ jump->addr = compiler->size;
PTR_FAIL_IF(push_inst_with_unique_literal(compiler, ((EMIT_DATA_TRANSFER(WORD_SIZE | LOAD_DATA, 1,
type <= SLJIT_JUMP ? TMP_PC : TMP_REG1, TMP_PC, 0)) & ~COND_MASK) | get_cc(compiler, type), 0));
- if (jump->flags & SLJIT_REWRITABLE_JUMP) {
- jump->addr = compiler->size;
+ if (jump->flags & SLJIT_REWRITABLE_JUMP)
compiler->patches++;
- }
if (type >= SLJIT_FAST_CALL) {
jump->flags |= IS_BL;
+ jump->addr = compiler->size;
PTR_FAIL_IF(emit_blx(compiler));
}
-
- if (!(jump->flags & SLJIT_REWRITABLE_JUMP))
- jump->addr = compiler->size;
-#else
+#else /* !SLJIT_CONFIG_ARM_V6 */
+ jump->addr = compiler->size;
if (type >= SLJIT_FAST_CALL)
jump->flags |= IS_BL;
- PTR_FAIL_IF(emit_imm(compiler, TMP_REG1, 0));
PTR_FAIL_IF(push_inst(compiler, (((type <= SLJIT_JUMP ? BX : BLX) | RM(TMP_REG1)) & ~COND_MASK) | get_cc(compiler, type)));
- jump->addr = compiler->size;
-#endif
+ compiler->size += JUMP_MAX_SIZE - 1;
+#endif /* SLJIT_CONFIG_ARM_V6 */
return jump;
}
@@ -2738,7 +3141,7 @@ static sljit_s32 softfloat_call_with_args(struct sljit_compiler *compiler, sljit
sljit_u8 *offset_ptr = offsets;
if (src && FAST_IS_REG(*src))
- src_offset = (sljit_uw)reg_map[*src] * sizeof(sljit_sw);
+ src_offset = (sljit_u32)reg_map[*src] * sizeof(sljit_sw);
arg_types >>= SLJIT_ARG_SHIFT;
@@ -2773,7 +3176,7 @@ static sljit_s32 softfloat_call_with_args(struct sljit_compiler *compiler, sljit
if (is_tail_call)
offset += sizeof(sljit_sw);
- offset = ((offset - 4 * sizeof(sljit_sw)) + 0x7) & ~(sljit_uw)0x7;
+ offset = ((offset - 4 * sizeof(sljit_sw)) + 0x7) & ~(sljit_u32)0x7;
*extra_space = offset;
@@ -2903,8 +3306,6 @@ static sljit_s32 hardfloat_call_with_args(struct sljit_compiler *compiler, sljit
#endif /* __SOFTFP__ */
-#undef EMIT_FPU_OPERATION
-
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 arg_types)
{
@@ -2971,7 +3372,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
SLJIT_ASSERT(reg_map[TMP_REG1] != 14);
- if (!(src & SLJIT_IMM)) {
+ if (src != SLJIT_IMM) {
if (FAST_IS_REG(src)) {
SLJIT_ASSERT(reg_map[src] != 14);
return push_inst(compiler, (type <= SLJIT_JUMP ? BX : BLX) | RM(src));
@@ -2988,17 +3389,20 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
set_jump(jump, compiler, JUMP_ADDR | ((type >= SLJIT_FAST_CALL) ? IS_BL : 0));
jump->u.target = (sljit_uw)srcw;
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
if (type >= SLJIT_FAST_CALL)
FAIL_IF(prepare_blx(compiler));
+ jump->addr = compiler->size;
FAIL_IF(push_inst_with_unique_literal(compiler, EMIT_DATA_TRANSFER(WORD_SIZE | LOAD_DATA, 1, type <= SLJIT_JUMP ? TMP_PC : TMP_REG1, TMP_PC, 0), 0));
- if (type >= SLJIT_FAST_CALL)
+ if (type >= SLJIT_FAST_CALL) {
+ jump->addr = compiler->size;
FAIL_IF(emit_blx(compiler));
-#else
- FAIL_IF(emit_imm(compiler, TMP_REG1, 0));
- FAIL_IF(push_inst(compiler, (type <= SLJIT_JUMP ? BX : BLX) | RM(TMP_REG1)));
-#endif
+ }
+#else /* !SLJIT_CONFIG_ARM_V6 */
jump->addr = compiler->size;
+ FAIL_IF(push_inst(compiler, (type <= SLJIT_JUMP ? BX : BLX) | RM(TMP_REG1)));
+ compiler->size += JUMP_MAX_SIZE - 1;
+#endif /* SLJIT_CONFIG_ARM_V6 */
return SLJIT_SUCCESS;
}
@@ -3096,7 +3500,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
sljit_s32 type)
{
sljit_s32 dst_reg, flags = GET_ALL_FLAGS(op);
- sljit_uw cc, ins;
+ sljit_ins cc, ins;
CHECK_ERROR();
CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
@@ -3132,61 +3536,114 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 dst_reg,
- sljit_s32 src, sljit_sw srcw)
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_reg)
{
- sljit_uw cc, tmp;
+ sljit_ins cc, tmp;
CHECK_ERROR();
- CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
+ CHECK(check_sljit_emit_select(compiler, type, dst_reg, src1, src1w, src2_reg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+ if (src2_reg != dst_reg && src1 == dst_reg) {
+ src1 = src2_reg;
+ src1w = 0;
+ src2_reg = dst_reg;
+ type ^= 0x1;
+ }
+
+ if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, (src2_reg != dst_reg) ? dst_reg : TMP_REG1, src1, src1w, TMP_REG1));
+
+ if (src2_reg != dst_reg) {
+ src1 = src2_reg;
+ src1w = 0;
+ type ^= 0x1;
+ } else {
+ src1 = TMP_REG1;
+ src1w = 0;
+ }
+ } else if (dst_reg != src2_reg)
+ FAIL_IF(push_inst(compiler, MOV | RD(dst_reg) | RM(src2_reg)));
cc = get_cc(compiler, type & ~SLJIT_32);
- if (SLJIT_UNLIKELY(src & SLJIT_IMM)) {
- tmp = get_imm((sljit_uw)srcw);
+ if (SLJIT_UNLIKELY(src1 == SLJIT_IMM)) {
+ tmp = get_imm((sljit_uw)src1w);
if (tmp)
return push_inst(compiler, ((MOV | RD(dst_reg) | tmp) & ~COND_MASK) | cc);
- tmp = get_imm(~(sljit_uw)srcw);
+ tmp = get_imm(~(sljit_uw)src1w);
if (tmp)
return push_inst(compiler, ((MVN | RD(dst_reg) | tmp) & ~COND_MASK) | cc);
#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
- tmp = (sljit_uw)srcw;
+ tmp = (sljit_ins)src1w;
FAIL_IF(push_inst(compiler, (MOVW & ~COND_MASK) | cc | RD(dst_reg) | ((tmp << 4) & 0xf0000) | (tmp & 0xfff)));
if (tmp <= 0xffff)
return SLJIT_SUCCESS;
return push_inst(compiler, (MOVT & ~COND_MASK) | cc | RD(dst_reg) | ((tmp >> 12) & 0xf0000) | ((tmp >> 16) & 0xfff));
-#else
- FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)srcw));
- src = TMP_REG1;
-#endif
+#else /* !SLJIT_CONFIG_ARM_V7 */
+ FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)src1w));
+ src1 = TMP_REG1;
+#endif /* SLJIT_CONFIG_ARM_V7 */
}
- return push_inst(compiler, ((MOV | RD(dst_reg) | RM(src)) & ~COND_MASK) | cc);
+ return push_inst(compiler, ((MOV | RD(dst_reg) | RM(src1)) & ~COND_MASK) | cc);
}
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fselect(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_freg)
+{
+ sljit_ins cc;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fselect(compiler, type, dst_freg, src1, src1w, src2_freg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+ type ^= SLJIT_32;
+
+ if (dst_freg != src2_freg) {
+ if (dst_freg == src1) {
+ src1 = src2_freg;
+ src1w = 0;
+ type ^= 0x1;
+ } else
+ FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VMOV_F32, (type & SLJIT_32), dst_freg, src2_freg, 0)));
+ }
+
+ if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_fop_mem(compiler, (type & SLJIT_32) | FPU_LOAD, TMP_FREG2, src1, src1w));
+ src1 = TMP_FREG2;
+ }
+
+ cc = get_cc(compiler, type & ~SLJIT_32);
+ return push_inst(compiler, EMIT_FPU_OPERATION((VMOV_F32 & ~COND_MASK) | cc, (type & SLJIT_32), dst_freg, src1, 0));
+}
+
+#undef EMIT_FPU_OPERATION
+
static sljit_s32 update_mem_addr(struct sljit_compiler *compiler, sljit_s32 *mem, sljit_sw *memw, sljit_s32 max_offset)
{
sljit_s32 arg = *mem;
sljit_sw argw = *memw;
sljit_uw imm, tmp;
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- sljit_sw mask = max_offset >= 0xf00 ? 0xfff : 0xff;
- sljit_sw sign = max_offset >= 0xf00 ? 0x1000 : 0x100;
-#else /* !SLJIT_CONFIG_ARM_V5 */
sljit_sw mask = 0xfff;
sljit_sw sign = 0x1000;
SLJIT_ASSERT(max_offset >= 0xf00);
-#endif /* SLJIT_CONFIG_ARM_V5 */
*mem = TMP_REG1;
if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
*memw = 0;
- return push_inst(compiler, ADD | RD(TMP_REG1) | RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | ((sljit_uw)(argw & 0x3) << 7));
+ return push_inst(compiler, ADD | RD(TMP_REG1) | RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | ((sljit_ins)(argw & 0x3) << 7));
}
arg &= REG_MASK;
@@ -3234,158 +3691,6 @@ static sljit_s32 update_mem_addr(struct sljit_compiler *compiler, sljit_s32 *mem
return push_inst(compiler, ADD | RD(TMP_REG1) | RN(TMP_REG1) | RM(arg));
}
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
-
-static sljit_s32 sljit_emit_mem_unaligned(struct sljit_compiler *compiler, sljit_s32 type,
- sljit_s32 reg,
- sljit_s32 mem, sljit_sw memw)
-{
- sljit_s32 flags, steps, tmp_reg;
- sljit_uw add, shift;
-
- switch (type & 0xff) {
- case SLJIT_MOV_U8:
- case SLJIT_MOV_S8:
- flags = BYTE_SIZE;
- if (!(type & SLJIT_MEM_STORE))
- flags |= LOAD_DATA;
- if ((type & 0xff) == SLJIT_MOV_S8)
- flags |= SIGNED;
-
- return emit_op_mem(compiler, flags, reg, mem, memw, TMP_REG1);
-
- case SLJIT_MOV_U16:
- FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xfff - 1));
- flags = BYTE_SIZE;
- steps = 1;
- break;
-
- case SLJIT_MOV_S16:
- FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xff - 1));
- flags = BYTE_SIZE | SIGNED;
- steps = 1;
- break;
-
- default:
- if (type & SLJIT_MEM_UNALIGNED_32) {
- flags = WORD_SIZE;
- if (!(type & SLJIT_MEM_STORE))
- flags |= LOAD_DATA;
-
- return emit_op_mem(compiler, flags, reg, mem, memw, TMP_REG1);
- }
-
- if (!(type & SLJIT_MEM_UNALIGNED_16)) {
- FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xfff - 3));
- flags = BYTE_SIZE;
- steps = 3;
- break;
- }
-
- FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xff - 2));
-
- add = 1;
- if (memw < 0) {
- add = 0;
- memw = -memw;
- }
-
- tmp_reg = reg;
-
- if (type & SLJIT_MEM_STORE) {
- FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(HALF_SIZE, add, reg, mem, TYPE2_TRANSFER_IMM(memw))));
- FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG2) | RM(reg) | (16 << 7) | (2 << 4)));
- } else {
- if (reg == mem) {
- SLJIT_ASSERT(reg != TMP_REG1);
- tmp_reg = TMP_REG1;
- }
-
- FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(HALF_SIZE | LOAD_DATA, add, tmp_reg, mem, TYPE2_TRANSFER_IMM(memw))));
- }
-
- if (!add) {
- memw -= 2;
- if (memw <= 0) {
- memw = -memw;
- add = 1;
- }
- } else
- memw += 2;
-
- if (type & SLJIT_MEM_STORE)
- return push_inst(compiler, EMIT_DATA_TRANSFER(HALF_SIZE, add, TMP_REG2, mem, TYPE2_TRANSFER_IMM(memw)));
-
- FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(HALF_SIZE | LOAD_DATA, add, TMP_REG2, mem, TYPE2_TRANSFER_IMM(memw))));
- return push_inst(compiler, ORR | RD(reg) | RN(tmp_reg) | RM(TMP_REG2) | (16 << 7));
- }
-
- SLJIT_ASSERT(steps > 0);
-
- add = 1;
- if (memw < 0) {
- add = 0;
- memw = -memw;
- }
-
- if (type & SLJIT_MEM_STORE) {
- FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(BYTE_SIZE, add, reg, mem, memw)));
- FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG2) | RM(reg) | (8 << 7) | (2 << 4)));
-
- while (1) {
- if (!add) {
- memw -= 1;
- if (memw == 0)
- add = 1;
- } else
- memw += 1;
-
- FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(BYTE_SIZE, add, TMP_REG2, mem, memw)));
-
- if (--steps == 0)
- return SLJIT_SUCCESS;
-
- FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG2) | RM(TMP_REG2) | (8 << 7) | (2 << 4)));
- }
- }
-
- tmp_reg = reg;
-
- if (reg == mem) {
- SLJIT_ASSERT(reg != TMP_REG1);
- tmp_reg = TMP_REG1;
- }
-
- shift = 8;
- FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(BYTE_SIZE | LOAD_DATA, add, tmp_reg, mem, memw)));
-
- do {
- if (!add) {
- memw -= 1;
- if (memw == 0)
- add = 1;
- } else
- memw += 1;
-
- if (steps > 1) {
- FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(BYTE_SIZE | LOAD_DATA, add, TMP_REG2, mem, memw)));
- FAIL_IF(push_inst(compiler, ORR | RD(tmp_reg) | RN(tmp_reg) | RM(TMP_REG2) | (shift << 7)));
- shift += 8;
- }
- } while (--steps != 0);
-
- flags |= LOAD_DATA;
-
- if (flags & SIGNED)
- FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(flags, add, TMP_REG2, mem, TYPE2_TRANSFER_IMM(memw))));
- else
- FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(flags, add, TMP_REG2, mem, memw)));
-
- return push_inst(compiler, ORR | RD(reg) | RN(tmp_reg) | RM(TMP_REG2) | (shift << 7));
-}
-
-#endif /* SLJIT_CONFIG_ARM_V5 */
-
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 reg,
sljit_s32 mem, sljit_sw memw)
@@ -3395,30 +3700,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compile
CHECK_ERROR();
CHECK(check_sljit_emit_mem(compiler, type, reg, mem, memw));
- if (!(reg & REG_PAIR_MASK)) {
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- ADJUST_LOCAL_OFFSET(mem, memw);
-#endif /* SLJIT_CONFIG_ARM_V5 */
-
+ if (!(reg & REG_PAIR_MASK))
return sljit_emit_mem_unaligned(compiler, type, reg, mem, memw);
- }
ADJUST_LOCAL_OFFSET(mem, memw);
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- if (type & (SLJIT_MEM_UNALIGNED | SLJIT_MEM_UNALIGNED_16)) {
- FAIL_IF(update_mem_addr(compiler, &mem, &memw, (type & SLJIT_MEM_UNALIGNED_16) ? 0xfff - 6 : 0xfff - 7));
-
- if (!(type & SLJIT_MEM_STORE) && REG_PAIR_FIRST(reg) == (mem & REG_MASK)) {
- FAIL_IF(sljit_emit_mem_unaligned(compiler, type, REG_PAIR_SECOND(reg), SLJIT_MEM1(mem), memw + SSIZE_OF(sw)));
- return sljit_emit_mem_unaligned(compiler, type, REG_PAIR_FIRST(reg), SLJIT_MEM1(mem), memw);
- }
-
- FAIL_IF(sljit_emit_mem_unaligned(compiler, type, REG_PAIR_FIRST(reg), SLJIT_MEM1(mem), memw));
- return sljit_emit_mem_unaligned(compiler, type, REG_PAIR_SECOND(reg), SLJIT_MEM1(mem), memw + SSIZE_OF(sw));
- }
-#endif /* SLJIT_CONFIG_ARM_V5 */
-
FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xfff - 4));
flags = WORD_SIZE;
@@ -3441,7 +3727,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem_update(struct sljit_compiler *
sljit_s32 mem, sljit_sw memw)
{
sljit_s32 flags;
- sljit_uw is_type1_transfer, inst;
+ sljit_ins is_type1_transfer, inst;
CHECK_ERROR();
CHECK(check_sljit_emit_mem_update(compiler, type, reg, mem, memw));
@@ -3500,7 +3786,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem_update(struct sljit_compiler *
if (SLJIT_UNLIKELY(mem & OFFS_REG_MASK)) {
memw &= 0x3;
- inst = EMIT_DATA_TRANSFER(flags, 1, reg, mem & REG_MASK, RM(OFFS_REG(mem)) | ((sljit_uw)memw << 7));
+ inst = EMIT_DATA_TRANSFER(flags, 1, reg, mem & REG_MASK, RM(OFFS_REG(mem)) | ((sljit_ins)memw << 7));
if (is_type1_transfer)
inst |= (1 << 25);
@@ -3526,7 +3812,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem_update(struct sljit_compiler *
else
memw = -memw;
- return push_inst(compiler, inst | (sljit_uw)memw);
+ return push_inst(compiler, inst | (sljit_ins)memw);
}
if (memw >= 0)
@@ -3534,106 +3820,752 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem_update(struct sljit_compiler *
else
memw = -memw;
- return push_inst(compiler, inst | TYPE2_TRANSFER_IMM((sljit_uw)memw));
+ return push_inst(compiler, inst | TYPE2_TRANSFER_IMM((sljit_ins)memw));
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fmem(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 freg,
sljit_s32 mem, sljit_sw memw)
{
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
- sljit_s32 max_offset;
- sljit_s32 dst;
-#endif /* SLJIT_CONFIG_ARM_V5 */
-
CHECK_ERROR();
CHECK(check_sljit_emit_fmem(compiler, type, freg, mem, memw));
- if (type & SLJIT_MEM_UNALIGNED_32)
+ if (type & SLJIT_MEM_ALIGNED_32)
return emit_fop_mem(compiler, ((type ^ SLJIT_32) & SLJIT_32) | ((type & SLJIT_MEM_STORE) ? 0 : FPU_LOAD), freg, mem, memw);
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
if (type & SLJIT_MEM_STORE) {
FAIL_IF(push_inst(compiler, VMOV | (1 << 20) | VN(freg) | RD(TMP_REG2)));
if (type & SLJIT_32)
- return sljit_emit_mem_unaligned(compiler, SLJIT_MOV | SLJIT_MEM_STORE | (type & SLJIT_MEM_UNALIGNED_16), TMP_REG2, mem, memw);
-
- max_offset = 0xfff - 7;
- if (type & SLJIT_MEM_UNALIGNED_16)
- max_offset++;
+ return emit_op_mem(compiler, WORD_SIZE, TMP_REG2, mem, memw, TMP_REG1);
- FAIL_IF(update_mem_addr(compiler, &mem, &memw, max_offset));
+ FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xfff - 4));
mem |= SLJIT_MEM;
- FAIL_IF(sljit_emit_mem_unaligned(compiler, SLJIT_MOV | SLJIT_MEM_STORE | (type & SLJIT_MEM_UNALIGNED_16), TMP_REG2, mem, memw));
-
+ FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG2, mem, memw, TMP_REG1));
FAIL_IF(push_inst(compiler, VMOV | (1 << 20) | VN(freg) | 0x80 | RD(TMP_REG2)));
- return sljit_emit_mem_unaligned(compiler, SLJIT_MOV | SLJIT_MEM_STORE | (type & SLJIT_MEM_UNALIGNED_16), TMP_REG2, mem, memw + 4);
+ return emit_op_mem(compiler, WORD_SIZE, TMP_REG2, mem, memw + 4, TMP_REG1);
}
- max_offset = (type & SLJIT_32) ? 0xfff - 3 : 0xfff - 7;
- if (type & SLJIT_MEM_UNALIGNED_16)
- max_offset++;
+ if (type & SLJIT_32) {
+ FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG2, mem, memw, TMP_REG1));
+ return push_inst(compiler, VMOV | VN(freg) | RD(TMP_REG2));
+ }
- FAIL_IF(update_mem_addr(compiler, &mem, &memw, max_offset));
+ FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xfff - 4));
+ mem |= SLJIT_MEM;
- dst = TMP_REG1;
+ FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG2, mem, memw, TMP_REG1));
+ FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG1, mem, memw + 4, TMP_REG1));
+ return push_inst(compiler, VMOV2 | VM(freg) | RD(TMP_REG2) | RN(TMP_REG1));
+}
- /* Stack offset adjustment is not needed because dst
- is not stored on the stack when mem is SLJIT_SP. */
+static sljit_s32 sljit_emit_simd_mem_offset(struct sljit_compiler *compiler, sljit_s32 *mem_ptr, sljit_sw memw)
+{
+ sljit_s32 mem = *mem_ptr;
+ sljit_uw imm;
- if (mem == TMP_REG1) {
- dst = SLJIT_R3;
+ if (SLJIT_UNLIKELY(mem & OFFS_REG_MASK)) {
+ *mem_ptr = TMP_REG1;
+ return push_inst(compiler, ADD | RD(TMP_REG1) | RN(mem & REG_MASK) | RM(OFFS_REG(mem)) | ((sljit_ins)(memw & 0x3) << 7));
+ }
- if (compiler->scratches >= 4)
- FAIL_IF(push_inst(compiler, STR | (1 << 21) | RN(SLJIT_SP) | RD(SLJIT_R3) | 8));
+ if (SLJIT_UNLIKELY(!(mem & REG_MASK))) {
+ *mem_ptr = TMP_REG1;
+ return load_immediate(compiler, TMP_REG1, (sljit_uw)memw);
}
- mem |= SLJIT_MEM;
+ mem &= REG_MASK;
+
+ if (memw == 0) {
+ *mem_ptr = mem;
+ return SLJIT_SUCCESS;
+ }
+
+ *mem_ptr = TMP_REG1;
+ imm = get_imm((sljit_uw)(memw < 0 ? -memw : memw));
+
+ if (imm != 0)
+ return push_inst(compiler, ((memw < 0) ? SUB : ADD) | RD(TMP_REG1) | RN(mem) | imm);
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)memw));
+ return push_inst(compiler, ADD | RD(TMP_REG1) | RN(TMP_REG1) | RM(mem));
+}
- FAIL_IF(sljit_emit_mem_unaligned(compiler, SLJIT_MOV | (type & SLJIT_MEM_UNALIGNED_16), dst, mem, memw));
- FAIL_IF(push_inst(compiler, VMOV | VN(freg) | RD(dst)));
+static SLJIT_INLINE sljit_s32 simd_get_quad_reg_index(sljit_s32 freg)
+{
+ freg += freg & 0x1;
+
+ SLJIT_ASSERT((freg_map[freg] & 0x1) == (freg <= SLJIT_NUMBER_OF_SCRATCH_FLOAT_REGISTERS));
+
+ if (freg <= SLJIT_NUMBER_OF_SCRATCH_FLOAT_REGISTERS)
+ freg--;
+
+ return freg;
+}
+
+#define SLJIT_QUAD_OTHER_HALF(freg) ((((freg) & 0x1) << 1) - 1)
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 srcdst, sljit_sw srcdstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_s32 alignment = SLJIT_SIMD_GET_ELEM2_SIZE(type);
+ sljit_ins ins;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_mov(compiler, type, freg, srcdst, srcdstw));
+
+ ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
- if (!(type & SLJIT_32)) {
- FAIL_IF(sljit_emit_mem_unaligned(compiler, SLJIT_MOV | (type & SLJIT_MEM_UNALIGNED_16), dst, mem, memw + 4));
- FAIL_IF(push_inst(compiler, VMOV | VN(freg) | 0x80 | RD(dst)));
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 4)
+ freg = simd_get_quad_reg_index(freg);
+
+ if (!(srcdst & SLJIT_MEM)) {
+ if (reg_size == 4)
+ srcdst = simd_get_quad_reg_index(srcdst);
+
+ if (type & SLJIT_SIMD_STORE)
+ ins = VD(srcdst) | VN(freg) | VM(freg);
+ else
+ ins = VD(freg) | VN(srcdst) | VM(srcdst);
+
+ if (reg_size == 4)
+ ins |= (sljit_ins)1 << 6;
+
+ return push_inst(compiler, VORR | ins);
}
- if (dst == SLJIT_R3 && compiler->scratches >= 4)
- FAIL_IF(push_inst(compiler, (LDR ^ (0x1 << 24)) | (0x1 << 23) | RN(SLJIT_SP) | RD(SLJIT_R3) | 8));
- return SLJIT_SUCCESS;
-#else /* !SLJIT_CONFIG_ARM_V5 */
- if (type & SLJIT_MEM_STORE) {
- FAIL_IF(push_inst(compiler, VMOV | (1 << 20) | VN(freg) | RD(TMP_REG2)));
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &srcdst, srcdstw));
- if (type & SLJIT_32)
- return emit_op_mem(compiler, WORD_SIZE, TMP_REG2, mem, memw, TMP_REG1);
+ if (elem_size > 3)
+ elem_size = 3;
- FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xfff - 4));
- mem |= SLJIT_MEM;
+ ins = ((type & SLJIT_SIMD_STORE) ? VST1 : VLD1) | VD(freg)
+ | (sljit_ins)((reg_size == 3) ? (0x7 << 8) : (0xa << 8));
- FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG2, mem, memw, TMP_REG1));
- FAIL_IF(push_inst(compiler, VMOV | (1 << 20) | VN(freg) | 0x80 | RD(TMP_REG2)));
- return emit_op_mem(compiler, WORD_SIZE, TMP_REG2, mem, memw + 4, TMP_REG1);
+ SLJIT_ASSERT(reg_size >= alignment);
+
+ if (alignment == 3)
+ ins |= 0x10;
+ else if (alignment >= 3)
+ ins |= 0x20;
+
+ return push_inst(compiler, ins | RN(srcdst) | ((sljit_ins)elem_size) << 6 | 0xf);
+}
+
+static sljit_ins simd_get_imm(sljit_s32 elem_size, sljit_uw value)
+{
+ sljit_ins result;
+
+ if (elem_size > 1 && (sljit_u16)value == (value >> 16)) {
+ elem_size = 1;
+ value = (sljit_u16)value;
}
- if (type & SLJIT_32) {
- FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG2, mem, memw, TMP_REG1));
- return push_inst(compiler, VMOV | VN(freg) | RD(TMP_REG2));
+ if (elem_size == 1 && (sljit_u8)value == (value >> 8)) {
+ elem_size = 0;
+ value = (sljit_u8)value;
}
- FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xfff - 4));
- mem |= SLJIT_MEM;
+ switch (elem_size) {
+ case 0:
+ SLJIT_ASSERT(value <= 0xff);
+ result = 0xe00;
+ break;
+ case 1:
+ SLJIT_ASSERT(value <= 0xffff);
+ result = 0;
- FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG2, mem, memw, TMP_REG1));
- FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG1, mem, memw + 4, TMP_REG1));
- return push_inst(compiler, VMOV2 | VM(freg) | RD(TMP_REG2) | RN(TMP_REG1));
-#endif /* SLJIT_CONFIG_ARM_V5 */
+ while (1) {
+ if (value <= 0xff) {
+ result |= 0x800;
+ break;
+ }
+
+ if ((value & 0xff) == 0) {
+ value >>= 8;
+ result |= 0xa00;
+ break;
+ }
+
+ if (result != 0)
+ return ~(sljit_ins)0;
+
+ value ^= (sljit_uw)0xffff;
+ result = (1 << 5);
+ }
+ break;
+ default:
+ SLJIT_ASSERT(value <= 0xffffffff);
+ result = 0;
+
+ while (1) {
+ if (value <= 0xff) {
+ result |= 0x000;
+ break;
+ }
+
+ if ((value & ~(sljit_uw)0xff00) == 0) {
+ value >>= 8;
+ result |= 0x200;
+ break;
+ }
+
+ if ((value & ~(sljit_uw)0xff0000) == 0) {
+ value >>= 16;
+ result |= 0x400;
+ break;
+ }
+
+ if ((value & ~(sljit_uw)0xff000000) == 0) {
+ value >>= 24;
+ result |= 0x600;
+ break;
+ }
+
+ if ((value & (sljit_uw)0xff) == 0xff && (value >> 16) == 0) {
+ value >>= 8;
+ result |= 0xc00;
+ break;
+ }
+
+ if ((value & (sljit_uw)0xffff) == 0xffff && (value >> 24) == 0) {
+ value >>= 16;
+ result |= 0xd00;
+ break;
+ }
+
+ if (result != 0)
+ return ~(sljit_ins)0;
+
+ value = ~value;
+ result = (1 << 5);
+ }
+ break;
+ }
+
+ return ((sljit_ins)value & 0xf) | (((sljit_ins)value & 0x70) << 12) | (((sljit_ins)value & 0x80) << 17) | result;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins, imm;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_replicate(compiler, type, freg, src, srcw));
+
+ ADJUST_LOCAL_OFFSET(src, srcw);
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) ? (elem_size < 2 || elem_size > 3) : (elem_size > 2))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 4)
+ freg = simd_get_quad_reg_index(freg);
+
+ if (src == SLJIT_IMM && srcw == 0)
+ return push_inst(compiler, VMOV_i | ((reg_size == 4) ? (1 << 6) : 0) | VD(freg));
+
+ if (SLJIT_UNLIKELY(elem_size == 3)) {
+ SLJIT_ASSERT(type & SLJIT_SIMD_FLOAT);
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(emit_fop_mem(compiler, FPU_LOAD | SLJIT_32, freg, src, srcw));
+ src = freg;
+ } else if (freg != src)
+ FAIL_IF(push_inst(compiler, VORR | VD(freg) | VN(src) | VM(src)));
+
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+
+ if (freg != src)
+ return push_inst(compiler, VORR | VD(freg) | VN(src) | VM(src));
+ return SLJIT_SUCCESS;
+ }
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &src, srcw));
+
+ ins = (sljit_ins)(elem_size << 6);
+
+ if (reg_size == 4)
+ ins |= (sljit_ins)1 << 5;
+
+ return push_inst(compiler, VLD1_r | ins | VD(freg) | RN(src) | 0xf);
+ }
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ SLJIT_ASSERT(elem_size == 2);
+ ins = ((sljit_ins)freg_ebit_map[src] << (16 + 2 + 1)) | ((sljit_ins)1 << (16 + 2));
+
+ if (reg_size == 4)
+ ins |= (sljit_ins)1 << 6;
+
+ return push_inst(compiler, VDUP_s | ins | VD(freg) | (sljit_ins)freg_map[src]);
+ }
+
+ if (src == SLJIT_IMM) {
+ if (elem_size < 2)
+ srcw &= ((sljit_sw)1 << (((sljit_sw)1 << elem_size) << 3)) - 1;
+
+ imm = simd_get_imm(elem_size, (sljit_uw)srcw);
+
+ if (imm != ~(sljit_ins)0) {
+ if (reg_size == 4)
+ imm |= (sljit_ins)1 << 6;
+
+ return push_inst(compiler, VMOV_i | imm | VD(freg));
+ }
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)srcw));
+ src = TMP_REG1;
+ }
+
+ switch (elem_size) {
+ case 0:
+ ins = 1 << 22;
+ break;
+ case 1:
+ ins = 1 << 5;
+ break;
+ default:
+ ins = 0;
+ break;
+ }
+
+ if (reg_size == 4)
+ ins |= (sljit_ins)1 << 21;
+
+ return push_inst(compiler, VDUP | ins | VN(freg) | RD(src));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg, sljit_s32 lane_index,
+ sljit_s32 srcdst, sljit_sw srcdstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_lane_mov(compiler, type, freg, lane_index, srcdst, srcdstw));
+
+ ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) ? (elem_size < 2 || elem_size > 3) : (elem_size > 2))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 4)
+ freg = simd_get_quad_reg_index(freg);
+
+ if (type & SLJIT_SIMD_LANE_ZERO) {
+ ins = (reg_size == 3) ? 0 : ((sljit_ins)1 << 6);
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ if (elem_size == 3 && !(srcdst & SLJIT_MEM)) {
+ if (lane_index == 1)
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+
+ if (srcdst != freg)
+ FAIL_IF(push_inst(compiler, VORR | VD(freg) | VN(srcdst) | VM(srcdst)));
+
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+ return push_inst(compiler, VMOV_i | VD(freg));
+ }
+
+ if (srcdst == freg || (elem_size == 3 && srcdst == (freg + SLJIT_QUAD_OTHER_HALF(freg)))) {
+ FAIL_IF(push_inst(compiler, VORR | ins | VD(TMP_FREG2) | VN(freg) | VM(freg)));
+ srcdst = TMP_FREG2;
+ srcdstw = 0;
+ }
+ }
+
+ FAIL_IF(push_inst(compiler, VMOV_i | ins | VD(freg)));
+ }
+
+ if (reg_size == 4 && lane_index >= (0x8 >> elem_size)) {
+ lane_index -= (0x8 >> elem_size);
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+ }
+
+ if (srcdst & SLJIT_MEM) {
+ if (elem_size == 3)
+ return emit_fop_mem(compiler, ((type & SLJIT_SIMD_STORE) ? 0 : FPU_LOAD) | SLJIT_32, freg, srcdst, srcdstw);
+
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &srcdst, srcdstw));
+
+ lane_index = lane_index << elem_size;
+ ins = (sljit_ins)((elem_size << 10) | (lane_index << 5));
+ return push_inst(compiler, ((type & SLJIT_SIMD_STORE) ? VST1_s : VLD1_s) | ins | VD(freg) | RN(srcdst) | 0xf);
+ }
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ if (elem_size == 3) {
+ if (type & SLJIT_SIMD_STORE)
+ return push_inst(compiler, VORR | VD(srcdst) | VN(freg) | VM(freg));
+ return push_inst(compiler, VMOV_F32 | SLJIT_32 | VD(freg) | VM(srcdst));
+ }
+
+ if (type & SLJIT_SIMD_STORE) {
+ if (freg_ebit_map[freg] == 0) {
+ if (lane_index == 1)
+ freg = SLJIT_F64_SECOND(freg);
+
+ return push_inst(compiler, VMOV_F32 | VD(srcdst) | VM(freg));
+ }
+
+ FAIL_IF(push_inst(compiler, VMOV_s | (1 << 20) | ((sljit_ins)lane_index << 21) | VN(freg) | RD(TMP_REG1)));
+ return push_inst(compiler, VMOV | VN(srcdst) | RD(TMP_REG1));
+ }
+
+ FAIL_IF(push_inst(compiler, VMOV | (1 << 20) | VN(srcdst) | RD(TMP_REG1)));
+ return push_inst(compiler, VMOV_s | ((sljit_ins)lane_index << 21) | VN(freg) | RD(TMP_REG1));
+ }
+
+ if (srcdst == SLJIT_IMM) {
+ if (elem_size < 2)
+ srcdstw &= ((sljit_sw)1 << (((sljit_sw)1 << elem_size) << 3)) - 1;
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)srcdstw));
+ srcdst = TMP_REG1;
+ }
+
+ if (elem_size == 0)
+ ins = 0x400000;
+ else if (elem_size == 1)
+ ins = 0x20;
+ else
+ ins = 0;
+
+ lane_index = lane_index << elem_size;
+ ins |= (sljit_ins)(((lane_index & 0x4) << 19) | ((lane_index & 0x3) << 5));
+
+ if (type & SLJIT_SIMD_STORE) {
+ ins |= (1 << 20);
+
+ if (elem_size < 2 && !(type & SLJIT_SIMD_LANE_SIGNED))
+ ins |= (1 << 23);
+ }
+
+ return push_inst(compiler, VMOV_s | ins | VN(freg) | RD(srcdst));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_s32 src_lane_index)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_lane_replicate(compiler, type, freg, src, src_lane_index));
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 4) {
+ freg = simd_get_quad_reg_index(freg);
+ src = simd_get_quad_reg_index(src);
+
+ if (src_lane_index >= (0x8 >> elem_size)) {
+ src_lane_index -= (0x8 >> elem_size);
+ src += SLJIT_QUAD_OTHER_HALF(src);
+ }
+ }
+
+ if (elem_size == 3) {
+ if (freg != src)
+ FAIL_IF(push_inst(compiler, VORR | VD(freg) | VN(src) | VM(src)));
+
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+
+ if (freg != src)
+ return push_inst(compiler, VORR | VD(freg) | VN(src) | VM(src));
+ return SLJIT_SUCCESS;
+ }
+
+ ins = ((((sljit_ins)src_lane_index << 1) | 1) << (16 + elem_size));
+
+ if (reg_size == 4)
+ ins |= (sljit_ins)1 << 6;
+
+ return push_inst(compiler, VDUP_s | ins | VD(freg) | VM(src));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_extend(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_s32 elem2_size = SLJIT_SIMD_GET_ELEM2_SIZE(type);
+ sljit_s32 dst_reg;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_extend(compiler, type, freg, src, srcw));
+
+ ADJUST_LOCAL_OFFSET(src, srcw);
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size != 2 || elem2_size != 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 4)
+ freg = simd_get_quad_reg_index(freg);
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &src, srcw));
+ if (reg_size == 4 && elem2_size - elem_size == 1)
+ FAIL_IF(push_inst(compiler, VLD1 | (0x7 << 8) | VD(freg) | RN(src) | 0xf));
+ else
+ FAIL_IF(push_inst(compiler, VLD1_s | (sljit_ins)((reg_size - elem2_size + elem_size) << 10) | VD(freg) | RN(src) | 0xf));
+ src = freg;
+ } else if (reg_size == 4)
+ src = simd_get_quad_reg_index(src);
+
+ if (!(type & SLJIT_SIMD_FLOAT)) {
+ dst_reg = (reg_size == 4) ? freg : TMP_FREG2;
+
+ do {
+ FAIL_IF(push_inst(compiler, VSHLL | ((type & SLJIT_SIMD_EXTEND_SIGNED) ? 0 : (1 << 24))
+ | ((sljit_ins)1 << (19 + elem_size)) | VD(dst_reg) | VM(src)));
+ src = dst_reg;
+ } while (++elem_size < elem2_size);
+
+ if (dst_reg == TMP_FREG2)
+ return push_inst(compiler, VORR | VD(freg) | VN(TMP_FREG2) | VM(TMP_FREG2));
+ return SLJIT_SUCCESS;
+ }
+
+ /* No SIMD variant, must use VFP instead. */
+ SLJIT_ASSERT(reg_size == 4);
+
+ if (freg == src) {
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+ FAIL_IF(push_inst(compiler, VCVT_F64_F32 | VD(freg) | VM(src) | 0x20));
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+ return push_inst(compiler, VCVT_F64_F32 | VD(freg) | VM(src));
+ }
+
+ FAIL_IF(push_inst(compiler, VCVT_F64_F32 | VD(freg) | VM(src)));
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+ return push_inst(compiler, VCVT_F64_F32 | VD(freg) | VM(src) | 0x20);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_sign(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 dst, sljit_sw dstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins, imms;
+ sljit_s32 dst_r;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_sign(compiler, type, freg, dst, dstw));
+
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ switch (elem_size) {
+ case 0:
+ imms = 0x243219;
+ ins = VSHR | (1 << 24) | (0x9 << 16);
+ break;
+ case 1:
+ imms = (reg_size == 4) ? 0x243219 : 0x2231;
+ ins = VSHR | (1 << 24) | (0x11 << 16);
+ break;
+ case 2:
+ imms = (reg_size == 4) ? 0x2231 : 0x21;
+ ins = VSHR | (1 << 24) | (0x21 << 16);
+ break;
+ default:
+ imms = 0x21;
+ ins = VSHR | (1 << 24) | (0x1 << 16) | (1 << 7);
+ break;
+ }
+
+ if (reg_size == 4) {
+ freg = simd_get_quad_reg_index(freg);
+ ins |= (sljit_ins)1 << 6;
+ }
+
+ SLJIT_ASSERT((freg_map[TMP_FREG2] & 0x1) == 0);
+ FAIL_IF(push_inst(compiler, ins | VD(TMP_FREG2) | VM(freg)));
+
+ if (reg_size == 4 && elem_size > 0)
+ FAIL_IF(push_inst(compiler, VMOVN | ((sljit_ins)(elem_size - 1) << 18) | VD(TMP_FREG2) | VM(TMP_FREG2)));
+
+ ins = (reg_size == 4 && elem_size == 0) ? (1 << 6) : 0;
+
+ while (imms >= 0x100) {
+ FAIL_IF(push_inst(compiler, VSRA | (1 << 24) | ins | ((imms & 0xff) << 16) | VD(TMP_FREG2) | VM(TMP_FREG2)));
+ imms >>= 8;
+ }
+
+ FAIL_IF(push_inst(compiler, VSRA | (1 << 24) | ins | (1 << 7) | (imms << 16) | VD(TMP_FREG2) | VM(TMP_FREG2)));
+
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
+ FAIL_IF(push_inst(compiler, VMOV_s | (1 << 20) | (1 << 23) | (0x2 << 21) | RD(dst_r) | VN(TMP_FREG2)));
+
+ if (reg_size == 4 && elem_size == 0) {
+ SLJIT_ASSERT(freg_map[TMP_FREG2] + 1 == freg_map[TMP_FREG1]);
+ FAIL_IF(push_inst(compiler, VMOV_s | (1 << 20) | (1 << 23) | (0x2 << 21) | RD(TMP_REG2) | VN(TMP_FREG1)));
+ FAIL_IF(push_inst(compiler, ORR | RD(dst_r) | RN(dst_r) | RM(TMP_REG2) | (0x8 << 7)));
+ }
+
+ if (dst_r == TMP_REG1)
+ return emit_op_mem(compiler, WORD_SIZE, TMP_REG1, dst, dstw, TMP_REG2);
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_op2(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg, sljit_s32 src1_freg, sljit_s32 src2_freg)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_op2(compiler, type, dst_freg, src1_freg, src2_freg));
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ switch (SLJIT_SIMD_GET_OPCODE(type)) {
+ case SLJIT_SIMD_OP2_AND:
+ ins = VAND;
+ break;
+ case SLJIT_SIMD_OP2_OR:
+ ins = VORR;
+ break;
+ case SLJIT_SIMD_OP2_XOR:
+ ins = VEOR;
+ break;
+ }
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 4) {
+ dst_freg = simd_get_quad_reg_index(dst_freg);
+ src1_freg = simd_get_quad_reg_index(src1_freg);
+ src2_freg = simd_get_quad_reg_index(src2_freg);
+ ins |= (sljit_ins)1 << 6;
+ }
+
+ return push_inst(compiler, ins | VD(dst_freg) | VN(src1_freg) | VM(src2_freg));
}
#undef FPU_LOAD
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_load(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 mem_reg)
+{
+ sljit_u32 ins;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_atomic_load(compiler, op, dst_reg, mem_reg));
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MOV_U8:
+ ins = LDREXB;
+ break;
+ case SLJIT_MOV_U16:
+ ins = LDREXH;
+ break;
+ default:
+ ins = LDREX;
+ break;
+ }
+
+ return push_inst(compiler, ins | RN(mem_reg) | RD(dst_reg));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_store(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src_reg,
+ sljit_s32 mem_reg,
+ sljit_s32 temp_reg)
+{
+ sljit_u32 ins;
+
+ /* temp_reg == mem_reg is undefined so use another temp register */
+ SLJIT_UNUSED_ARG(temp_reg);
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_atomic_store(compiler, op, src_reg, mem_reg, temp_reg));
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MOV_U8:
+ ins = STREXB;
+ break;
+ case SLJIT_MOV_U16:
+ ins = STREXH;
+ break;
+ default:
+ ins = STREX;
+ break;
+ }
+
+ FAIL_IF(push_inst(compiler, ins | RN(mem_reg) | RD(TMP_REG1) | RM(src_reg)));
+ if (op & SLJIT_SET_ATOMIC_STORED)
+ return push_inst(compiler, CMP | SET_FLAGS | SRC2_IMM | RN(TMP_REG1));
+
+ return SLJIT_SUCCESS;
+}
+
SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
{
struct sljit_const *const_;
@@ -3643,58 +4575,62 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
ADJUST_LOCAL_OFFSET(dst, dstw);
+ const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
+ PTR_FAIL_IF(!const_);
+ set_const(const_, compiler);
+
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
PTR_FAIL_IF(push_inst_with_unique_literal(compiler,
- EMIT_DATA_TRANSFER(WORD_SIZE | LOAD_DATA, 1, dst_r, TMP_PC, 0), (sljit_uw)init_value));
+ EMIT_DATA_TRANSFER(WORD_SIZE | LOAD_DATA, 1, dst_r, TMP_PC, 0), (sljit_ins)init_value));
compiler->patches++;
-#else
+#else /* !SLJIT_CONFIG_ARM_V6 */
PTR_FAIL_IF(emit_imm(compiler, dst_r, init_value));
-#endif
-
- const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
- PTR_FAIL_IF(!const_);
- set_const(const_, compiler);
+#endif /* SLJIT_CONFIG_ARM_V6 */
if (dst & SLJIT_MEM)
PTR_FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG2, dst, dstw, TMP_REG1));
return const_;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_mov_addr(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
- struct sljit_put_label *put_label;
+ struct sljit_jump *jump;
sljit_s32 dst_r;
CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
+ CHECK_PTR(check_sljit_emit_mov_addr(compiler, dst, dstw));
ADJUST_LOCAL_OFFSET(dst, dstw);
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
-#if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
PTR_FAIL_IF(push_inst_with_unique_literal(compiler, EMIT_DATA_TRANSFER(WORD_SIZE | LOAD_DATA, 1, dst_r, TMP_PC, 0), 0));
compiler->patches++;
-#else
- PTR_FAIL_IF(emit_imm(compiler, dst_r, 0));
-#endif
+#else /* !SLJIT_CONFIG_ARM_V6 */
+ PTR_FAIL_IF(push_inst(compiler, RD(dst_r)));
+#endif /* SLJIT_CONFIG_ARM_V6 */
- put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
- PTR_FAIL_IF(!put_label);
- set_put_label(put_label, compiler, 0);
+ jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
+ PTR_FAIL_IF(!jump);
+ set_mov_addr(jump, compiler, 1);
+
+#if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
+ compiler->size += 1;
+#endif /* SLJIT_CONFIG_ARM_V7 */
if (dst & SLJIT_MEM)
PTR_FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG2, dst, dstw, TMP_REG1));
- return put_label;
+ return jump;
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
{
- inline_set_jump_addr(addr, executable_offset, new_target, 1);
+ set_jump_addr(addr, executable_offset, new_target, 1);
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
{
- inline_set_const(addr, executable_offset, (sljit_uw)new_constant, 1);
+ set_const_value(addr, executable_offset, (sljit_uw)new_constant, 1);
}
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeARM_64.c b/src/3rdparty/pcre2/src/sljit/sljitNativeARM_64.c
index 89f747e7c8..5331ebdf42 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeARM_64.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativeARM_64.c
@@ -67,79 +67,125 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
/* Instrucion forms */
/* --------------------------------------------------------------------- */
-#define ADC 0x9a000000
-#define ADD 0x8b000000
-#define ADDE 0x8b200000
-#define ADDI 0x91000000
-#define AND 0x8a000000
-#define ANDI 0x92000000
-#define ASRV 0x9ac02800
-#define B 0x14000000
-#define B_CC 0x54000000
-#define BL 0x94000000
-#define BLR 0xd63f0000
-#define BR 0xd61f0000
-#define BRK 0xd4200000
-#define CBZ 0xb4000000
-#define CLZ 0xdac01000
-#define CSEL 0x9a800000
-#define CSINC 0x9a800400
-#define EOR 0xca000000
-#define EORI 0xd2000000
-#define EXTR 0x93c00000
-#define FABS 0x1e60c000
-#define FADD 0x1e602800
-#define FCMP 0x1e602000
-#define FCVT 0x1e224000
-#define FCVTZS 0x9e780000
-#define FDIV 0x1e601800
-#define FMOV 0x1e604000
-#define FMUL 0x1e600800
-#define FNEG 0x1e614000
-#define FSUB 0x1e603800
-#define LDRI 0xf9400000
-#define LDRI_F64 0xfd400000
-#define LDRI_POST 0xf8400400
-#define LDP 0xa9400000
-#define LDP_F64 0x6d400000
-#define LDP_POST 0xa8c00000
-#define LDR_PRE 0xf8400c00
-#define LSLV 0x9ac02000
-#define LSRV 0x9ac02400
-#define MADD 0x9b000000
-#define MOVK 0xf2800000
-#define MOVN 0x92800000
-#define MOVZ 0xd2800000
-#define NOP 0xd503201f
-#define ORN 0xaa200000
-#define ORR 0xaa000000
-#define ORRI 0xb2000000
-#define RBIT 0xdac00000
-#define RET 0xd65f0000
-#define RORV 0x9ac02c00
-#define SBC 0xda000000
-#define SBFM 0x93000000
-#define SCVTF 0x9e620000
-#define SDIV 0x9ac00c00
-#define SMADDL 0x9b200000
-#define SMULH 0x9b403c00
-#define STP 0xa9000000
-#define STP_F64 0x6d000000
-#define STP_PRE 0xa9800000
-#define STRB 0x38206800
-#define STRBI 0x39000000
-#define STRI 0xf9000000
-#define STRI_F64 0xfd000000
-#define STR_FI 0x3d000000
-#define STR_FR 0x3c206800
-#define STUR_FI 0x3c000000
-#define STURBI 0x38000000
-#define SUB 0xcb000000
-#define SUBI 0xd1000000
-#define SUBS 0xeb000000
-#define UBFM 0xd3000000
-#define UDIV 0x9ac00800
-#define UMULH 0x9bc03c00
+#define ADC 0x9a000000
+#define ADD 0x8b000000
+#define ADDE 0x8b200000
+#define ADDI 0x91000000
+#define ADR 0x10000000
+#define ADRP 0x90000000
+#define AND 0x8a000000
+#define ANDI 0x92000000
+#define AND_v 0x0e201c00
+#define ASRV 0x9ac02800
+#define B 0x14000000
+#define B_CC 0x54000000
+#define BL 0x94000000
+#define BLR 0xd63f0000
+#define BR 0xd61f0000
+#define BRK 0xd4200000
+#define CAS 0xc8a07c00
+#define CASB 0x08a07c00
+#define CASH 0x48a07c00
+#define CBZ 0xb4000000
+#define CCMPI 0xfa400800
+#define CLZ 0xdac01000
+#define CSEL 0x9a800000
+#define CSINC 0x9a800400
+#define DUP_e 0x0e000400
+#define DUP_g 0x0e000c00
+#define EOR 0xca000000
+#define EOR_v 0x2e201c00
+#define EORI 0xd2000000
+#define EXTR 0x93c00000
+#define FABS 0x1e60c000
+#define FADD 0x1e602800
+#define FCMP 0x1e602000
+#define FCSEL 0x1e600c00
+#define FCVT 0x1e224000
+#define FCVTL 0x0e217800
+#define FCVTZS 0x9e780000
+#define FDIV 0x1e601800
+#define FMOV 0x1e604000
+#define FMOV_R 0x9e660000
+#define FMOV_I 0x1e601000
+#define FMUL 0x1e600800
+#define FNEG 0x1e614000
+#define FSUB 0x1e603800
+#define INS 0x4e001c00
+#define INS_e 0x6e000400
+#define LD1 0x0c407000
+#define LD1_s 0x0d400000
+#define LD1R 0x0d40c000
+#define LDRI 0xf9400000
+#define LDRI_F64 0xfd400000
+#define LDRI_POST 0xf8400400
+#define LDP 0xa9400000
+#define LDP_F64 0x6d400000
+#define LDP_POST 0xa8c00000
+#define LDR_PRE 0xf8400c00
+#define LDXR 0xc85f7c00
+#define LDXRB 0x085f7c00
+#define LDXRH 0x485f7c00
+#define LSLV 0x9ac02000
+#define LSRV 0x9ac02400
+#define MADD 0x9b000000
+#define MOVI 0x0f000400
+#define MOVK 0xf2800000
+#define MOVN 0x92800000
+#define MOVZ 0xd2800000
+#define NOP 0xd503201f
+#define ORN 0xaa200000
+#define ORR 0xaa000000
+#define ORR_v 0x0ea01c00
+#define ORRI 0xb2000000
+#define RBIT 0xdac00000
+#define RET 0xd65f0000
+#define REV 0xdac00c00
+#define REV16 0xdac00400
+#define RORV 0x9ac02c00
+#define SBC 0xda000000
+#define SBFM 0x93400000
+#define SCVTF 0x9e620000
+#define SDIV 0x9ac00c00
+#define SMADDL 0x9b200000
+#define SMOV 0x0e002c00
+#define SMULH 0x9b403c00
+#define SSHLL 0x0f00a400
+#define ST1 0x0c007000
+#define ST1_s 0x0d000000
+#define STP 0xa9000000
+#define STP_F64 0x6d000000
+#define STP_PRE 0xa9800000
+#define STRB 0x38206800
+#define STRBI 0x39000000
+#define STRI 0xf9000000
+#define STRI_F64 0xfd000000
+#define STR_FI 0x3d000000
+#define STR_FR 0x3c206800
+#define STUR_FI 0x3c000000
+#define STURBI 0x38000000
+#define STXR 0xc8007c00
+#define STXRB 0x8007c00
+#define STXRH 0x48007c00
+#define SUB 0xcb000000
+#define SUBI 0xd1000000
+#define SUBS 0xeb000000
+#define TBZ 0x36000000
+#define UBFM 0xd3400000
+#define UCVTF 0x9e630000
+#define UDIV 0x9ac00800
+#define UMOV 0x0e003c00
+#define UMULH 0x9bc03c00
+#define USHLL 0x2f00a400
+#define USHR 0x2f000400
+#define USRA 0x2f001400
+#define XTN 0x0e212800
+
+#define CSET (CSINC | RM(TMP_ZERO) | RN(TMP_ZERO))
+#define LDR (STRI | (1 << 22))
+#define LDRB (STRBI | (1 << 22))
+#define LDRH (LDRB | (1 << 30))
+#define MOV (ORR | RN(TMP_ZERO))
static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins)
{
@@ -158,77 +204,263 @@ static SLJIT_INLINE sljit_s32 emit_imm64_const(struct sljit_compiler *compiler,
return push_inst(compiler, MOVK | RD(dst) | ((sljit_ins)(imm >> 48) << 5) | (3 << 21));
}
-static SLJIT_INLINE sljit_sw detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
+static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
{
sljit_sw diff;
sljit_uw target_addr;
- if (jump->flags & SLJIT_REWRITABLE_JUMP) {
- jump->flags |= PATCH_ABS64;
- return 0;
- }
+ if (jump->flags & SLJIT_REWRITABLE_JUMP)
+ goto exit;
if (jump->flags & JUMP_ADDR)
target_addr = jump->u.target;
else {
- SLJIT_ASSERT(jump->flags & JUMP_LABEL);
+ SLJIT_ASSERT(jump->u.label != NULL);
target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset;
}
- diff = (sljit_sw)target_addr - (sljit_sw)(code_ptr + 4) - executable_offset;
+ diff = (sljit_sw)target_addr - (sljit_sw)code_ptr - executable_offset;
if (jump->flags & IS_COND) {
diff += SSIZE_OF(ins);
if (diff <= 0xfffff && diff >= -0x100000) {
- code_ptr[-5] ^= (jump->flags & IS_CBZ) ? (0x1 << 24) : 0x1;
- jump->addr -= sizeof(sljit_ins);
+ *(--code_ptr) ^= (jump->flags & IS_CBZ) ? (0x1 << 24) : 0x1;
jump->flags |= PATCH_COND;
- return 5;
+ jump->addr -= sizeof(sljit_ins);
+ return code_ptr;
}
diff -= SSIZE_OF(ins);
}
if (diff <= 0x7ffffff && diff >= -0x8000000) {
+ if (jump->flags & IS_COND)
+ code_ptr[-1] -= (4 << 5);
jump->flags |= PATCH_B;
- return 4;
+ return code_ptr;
}
if (target_addr < 0x100000000l) {
if (jump->flags & IS_COND)
- code_ptr[-5] -= (2 << 5);
- code_ptr[-2] = code_ptr[0];
- return 2;
+ code_ptr[-1] -= (2 << 5);
+ code_ptr[2] = code_ptr[0];
+ return code_ptr + 2;
+ }
+
+ if (diff <= 0xfffff000l && diff >= -0x100000000l) {
+ if (jump->flags & IS_COND)
+ code_ptr[-1] -= (2 << 5);
+ jump->flags |= PATCH_B32;
+ code_ptr[2] = code_ptr[0];
+ return code_ptr + 2;
}
if (target_addr < 0x1000000000000l) {
if (jump->flags & IS_COND)
- code_ptr[-5] -= (1 << 5);
+ code_ptr[-1] -= (1 << 5);
jump->flags |= PATCH_ABS48;
- code_ptr[-1] = code_ptr[0];
- return 1;
+ code_ptr[3] = code_ptr[0];
+ return code_ptr + 3;
}
+exit:
jump->flags |= PATCH_ABS64;
- return 0;
+ code_ptr[4] = code_ptr[0];
+ return code_ptr + 4;
}
-static SLJIT_INLINE sljit_sw put_label_get_length(struct sljit_put_label *put_label, sljit_uw max_label)
+static SLJIT_INLINE sljit_sw mov_addr_get_length(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
{
- if (max_label < 0x100000000l) {
- put_label->flags = 0;
- return 2;
+ sljit_uw addr;
+ sljit_sw diff;
+ SLJIT_UNUSED_ARG(executable_offset);
+
+ SLJIT_ASSERT(jump->flags < ((sljit_uw)4 << JUMP_SIZE_SHIFT));
+ if (jump->flags & JUMP_ADDR)
+ addr = jump->u.target;
+ else
+ addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code + jump->u.label->size, executable_offset);
+
+ diff = (sljit_sw)addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+
+ if (diff <= 0xfffff && diff >= -0x100000) {
+ jump->flags |= PATCH_B;
+ return 0;
}
- if (max_label < 0x1000000000000l) {
- put_label->flags = 1;
+ if (diff <= 0xfffff000l && diff >= -0x100000000l) {
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)1 << JUMP_SIZE_SHIFT));
+ jump->flags |= PATCH_B32;
return 1;
}
- put_label->flags = 2;
- return 0;
+ if (addr < 0x100000000l) {
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)1 << JUMP_SIZE_SHIFT));
+ return 1;
+ }
+
+ if (addr < 0x1000000000000l) {
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)2 << JUMP_SIZE_SHIFT));
+ jump->flags |= PATCH_ABS48;
+ return 2;
+ }
+
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)3 << JUMP_SIZE_SHIFT));
+ jump->flags |= PATCH_ABS64;
+ return 3;
+}
+
+static SLJIT_INLINE void generate_jump_or_mov_addr(struct sljit_jump *jump, sljit_sw executable_offset)
+{
+ sljit_sw addr = (sljit_sw)((jump->flags & JUMP_ADDR) ? jump->u.target : jump->u.label->u.addr);
+ sljit_ins* buf_ptr = (sljit_ins*)jump->addr;
+ sljit_u32 dst;
+ SLJIT_UNUSED_ARG(executable_offset);
+
+ if (!(jump->flags & JUMP_MOV_ADDR)) {
+ if (jump->flags & PATCH_COND) {
+ addr = (addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
+ SLJIT_ASSERT(addr <= 0x3ffff && addr >= -0x40000);
+ buf_ptr[0] = (buf_ptr[0] & ~(sljit_ins)0xffffe0) | (sljit_ins)((addr & 0x7ffff) << 5);
+ return;
+ }
+
+ if (jump->flags & PATCH_B) {
+ addr = (addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
+ SLJIT_ASSERT(addr <= 0x1ffffff && addr >= -0x2000000);
+ buf_ptr[0] = ((jump->flags & IS_BL) ? BL : B) | (sljit_ins)(addr & 0x3ffffff);
+ return;
+ }
+
+ dst = (buf_ptr[0] >> 5) & 0x1f;
+
+ if (jump->flags & PATCH_B32) {
+ addr -= (sljit_sw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset) & ~(sljit_sw)0xfff;
+ SLJIT_ASSERT(addr <= 0xfffff000l && addr >= -0x100000000l);
+ buf_ptr[0] = ADRP | (((sljit_ins)(addr >> 12) & 0x3) << 29) | (((sljit_ins)(addr >> 14) & 0x7ffff) << 5) | dst;
+ buf_ptr[1] = ADDI | dst | (dst << 5) | ((sljit_ins)(addr & 0xfff) << 10);
+ return;
+ }
+ } else {
+ dst = *buf_ptr;
+
+ if (jump->flags & PATCH_B) {
+ addr -= (sljit_sw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset);
+ SLJIT_ASSERT(addr <= 0xfffff && addr >= -0x100000);
+ buf_ptr[0] = ADR | (((sljit_ins)addr & 0x3) << 29) | (((sljit_ins)(addr >> 2) & 0x7ffff) << 5) | dst;
+ return;
+ }
+
+ if (jump->flags & PATCH_B32) {
+ addr -= ((sljit_sw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) & ~(sljit_sw)0xfff;
+ SLJIT_ASSERT(addr <= 0xffffffffl && addr >= -0x100000000l);
+ buf_ptr[0] = ADRP | (((sljit_ins)(addr >> 12) & 0x3) << 29) | (((sljit_ins)(addr >> 14) & 0x7ffff) << 5) | dst;
+ buf_ptr[1] = ADDI | dst | (dst << 5) | ((sljit_ins)(addr & 0xfff) << 10);
+ return;
+ }
+ }
+
+ SLJIT_ASSERT((jump->flags & (PATCH_ABS48 | PATCH_ABS64)) || (sljit_uw)addr <= (sljit_uw)0xffffffff);
+ SLJIT_ASSERT((jump->flags & PATCH_ABS64) || (sljit_uw)addr <= (sljit_uw)0xffffffffffff);
+
+ buf_ptr[0] = MOVZ | (((sljit_ins)addr & 0xffff) << 5) | dst;
+ buf_ptr[1] = MOVK | (((sljit_ins)(addr >> 16) & 0xffff) << 5) | (1 << 21) | dst;
+ if (jump->flags & (PATCH_ABS48 | PATCH_ABS64))
+ buf_ptr[2] = MOVK | (((sljit_ins)(addr >> 32) & 0xffff) << 5) | (2 << 21) | dst;
+
+ if (jump->flags & PATCH_ABS64)
+ buf_ptr[3] = MOVK | ((sljit_ins)((sljit_uw)addr >> 48) << 5) | (3 << 21) | dst;
}
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
+static void reduce_code_size(struct sljit_compiler *compiler)
+{
+ struct sljit_label *label;
+ struct sljit_jump *jump;
+ struct sljit_const *const_;
+ SLJIT_NEXT_DEFINE_TYPES;
+ sljit_uw total_size;
+ sljit_uw size_reduce = 0;
+ sljit_sw diff;
+
+ label = compiler->labels;
+ jump = compiler->jumps;
+ const_ = compiler->consts;
+ SLJIT_NEXT_INIT_TYPES();
+
+ while (1) {
+ SLJIT_GET_NEXT_MIN();
+
+ if (next_min_addr == SLJIT_MAX_ADDRESS)
+ break;
+
+ if (next_min_addr == next_label_size) {
+ label->size -= size_reduce;
+
+ label = label->next;
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
+ }
+
+ if (next_min_addr == next_const_addr) {
+ const_->addr -= size_reduce;
+ const_ = const_->next;
+ next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
+ continue;
+ }
+
+ if (next_min_addr != next_jump_addr)
+ continue;
+
+ jump->addr -= size_reduce;
+ if (!(jump->flags & JUMP_MOV_ADDR)) {
+ total_size = JUMP_MAX_SIZE;
+
+ if (!(jump->flags & SLJIT_REWRITABLE_JUMP)) {
+ if (jump->flags & JUMP_ADDR) {
+ if (jump->u.target < 0x100000000l)
+ total_size = 3;
+ else if (jump->u.target < 0x1000000000000l)
+ total_size = 4;
+ } else {
+ /* Unit size: instruction. */
+ diff = (sljit_sw)jump->u.label->size - (sljit_sw)jump->addr;
+
+ if ((jump->flags & IS_COND) && (diff + 1) <= (0xfffff / SSIZE_OF(ins)) && (diff + 1) >= (-0x100000 / SSIZE_OF(ins)))
+ total_size = 0;
+ else if (diff <= (0x7ffffff / SSIZE_OF(ins)) && diff >= (-0x8000000 / SSIZE_OF(ins)))
+ total_size = 1;
+ else if (diff <= (0xfffff000l / SSIZE_OF(ins)) && diff >= (-0x100000000l / SSIZE_OF(ins)))
+ total_size = 3;
+ }
+ }
+
+ size_reduce += JUMP_MAX_SIZE - total_size;
+ } else {
+ /* Real size minus 1. Unit size: instruction. */
+ total_size = 3;
+
+ if (!(jump->flags & JUMP_ADDR)) {
+ diff = (sljit_sw)jump->u.label->size - (sljit_sw)jump->addr;
+
+ if (diff <= (0xfffff / SSIZE_OF(ins)) && diff >= (-0x100000 / SSIZE_OF(ins)))
+ total_size = 0;
+ else if (diff <= (0xfffff000l / SSIZE_OF(ins)) && diff >= (-0x100000000l / SSIZE_OF(ins)))
+ total_size = 1;
+ } else if (jump->u.target < 0x100000000l)
+ total_size = 1;
+ else if (jump->u.target < 0x1000000000000l)
+ total_size = 2;
+
+ size_reduce += 3 - total_size;
+ }
+
+ jump->flags |= total_size << JUMP_SIZE_SHIFT;
+ jump = jump->next;
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+ }
+
+ compiler->size -= size_reduce;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler, sljit_s32 options, void *exec_allocator_data)
{
struct sljit_memory_fragment *buf;
sljit_ins *code;
@@ -236,67 +468,73 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
sljit_ins *buf_ptr;
sljit_ins *buf_end;
sljit_uw word_count;
- sljit_uw next_addr;
+ SLJIT_NEXT_DEFINE_TYPES;
sljit_sw executable_offset;
sljit_sw addr;
- sljit_u32 dst;
struct sljit_label *label;
struct sljit_jump *jump;
struct sljit_const *const_;
- struct sljit_put_label *put_label;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_generate_code(compiler));
- reverse_buf(compiler);
- code = (sljit_ins*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_ins), compiler->exec_allocator_data);
+ reduce_code_size(compiler);
+
+ code = (sljit_ins*)allocate_executable_memory(compiler->size * sizeof(sljit_ins), options, exec_allocator_data, &executable_offset);
PTR_FAIL_WITH_EXEC_IF(code);
+
+ reverse_buf(compiler);
buf = compiler->buf;
code_ptr = code;
word_count = 0;
- next_addr = 0;
- executable_offset = SLJIT_EXEC_OFFSET(code);
-
label = compiler->labels;
jump = compiler->jumps;
const_ = compiler->consts;
- put_label = compiler->put_labels;
+ SLJIT_NEXT_INIT_TYPES();
+ SLJIT_GET_NEXT_MIN();
do {
buf_ptr = (sljit_ins*)buf->memory;
buf_end = buf_ptr + (buf->used_size >> 2);
do {
*code_ptr = *buf_ptr++;
- if (next_addr == word_count) {
+ if (next_min_addr == word_count) {
SLJIT_ASSERT(!label || label->size >= word_count);
SLJIT_ASSERT(!jump || jump->addr >= word_count);
SLJIT_ASSERT(!const_ || const_->addr >= word_count);
- SLJIT_ASSERT(!put_label || put_label->addr >= word_count);
/* These structures are ordered by their address. */
- if (label && label->size == word_count) {
- label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ if (next_min_addr == next_label_size) {
+ label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = (sljit_uw)(code_ptr - code);
label = label->next;
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
}
- if (jump && jump->addr == word_count) {
- jump->addr = (sljit_uw)(code_ptr - 4);
- code_ptr -= detect_jump_type(jump, code_ptr, code, executable_offset);
- jump = jump->next;
- }
- if (const_ && const_->addr == word_count) {
+
+ if (next_min_addr == next_jump_addr) {
+ if (!(jump->flags & JUMP_MOV_ADDR)) {
+ word_count = word_count - 1 + (jump->flags >> JUMP_SIZE_SHIFT);
+ jump->addr = (sljit_uw)code_ptr;
+ code_ptr = detect_jump_type(jump, code_ptr, code, executable_offset);
+ SLJIT_ASSERT((jump->flags & PATCH_COND) || ((sljit_uw)code_ptr - jump->addr < (jump->flags >> JUMP_SIZE_SHIFT) * sizeof(sljit_ins)));
+ } else {
+ word_count += jump->flags >> JUMP_SIZE_SHIFT;
+ addr = (sljit_sw)code_ptr;
+ code_ptr += mov_addr_get_length(jump, code_ptr, code, executable_offset);
+ jump->addr = (sljit_uw)addr;
+ }
+
+ jump = jump->next;
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+ } else if (next_min_addr == next_const_addr) {
const_->addr = (sljit_uw)code_ptr;
const_ = const_->next;
+ next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
}
- if (put_label && put_label->addr == word_count) {
- SLJIT_ASSERT(put_label->label);
- put_label->addr = (sljit_uw)(code_ptr - 3);
- code_ptr -= put_label_get_length(put_label, (sljit_uw)(SLJIT_ADD_EXEC_OFFSET(code, executable_offset) + put_label->label->size));
- put_label = put_label->next;
- }
- next_addr = compute_next_addr(label, jump, const_, put_label);
+
+ SLJIT_GET_NEXT_MIN();
}
code_ptr++;
word_count++;
@@ -306,7 +544,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
} while (buf);
if (label && label->size == word_count) {
- label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = (sljit_uw)(code_ptr - code);
label = label->next;
}
@@ -314,61 +552,14 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
SLJIT_ASSERT(!label);
SLJIT_ASSERT(!jump);
SLJIT_ASSERT(!const_);
- SLJIT_ASSERT(!put_label);
SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size);
jump = compiler->jumps;
while (jump) {
- do {
- addr = (sljit_sw)((jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target);
- buf_ptr = (sljit_ins *)jump->addr;
-
- if (jump->flags & PATCH_B) {
- addr = (addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
- SLJIT_ASSERT(addr <= 0x1ffffff && addr >= -0x2000000);
- buf_ptr[0] = ((jump->flags & IS_BL) ? BL : B) | (sljit_ins)(addr & 0x3ffffff);
- if (jump->flags & IS_COND)
- buf_ptr[-1] -= (4 << 5);
- break;
- }
- if (jump->flags & PATCH_COND) {
- addr = (addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
- SLJIT_ASSERT(addr <= 0x3ffff && addr >= -0x40000);
- buf_ptr[0] = (buf_ptr[0] & ~(sljit_ins)0xffffe0) | (sljit_ins)((addr & 0x7ffff) << 5);
- break;
- }
-
- SLJIT_ASSERT((jump->flags & (PATCH_ABS48 | PATCH_ABS64)) || (sljit_uw)addr <= (sljit_uw)0xffffffff);
- SLJIT_ASSERT((jump->flags & PATCH_ABS64) || (sljit_uw)addr <= (sljit_uw)0xffffffffffff);
-
- dst = buf_ptr[0] & 0x1f;
- buf_ptr[0] = MOVZ | dst | (((sljit_ins)addr & 0xffff) << 5);
- buf_ptr[1] = MOVK | dst | (((sljit_ins)(addr >> 16) & 0xffff) << 5) | (1 << 21);
- if (jump->flags & (PATCH_ABS48 | PATCH_ABS64))
- buf_ptr[2] = MOVK | dst | (((sljit_ins)(addr >> 32) & 0xffff) << 5) | (2 << 21);
- if (jump->flags & PATCH_ABS64)
- buf_ptr[3] = MOVK | dst | ((sljit_ins)(addr >> 48) << 5) | (3 << 21);
- } while (0);
+ generate_jump_or_mov_addr(jump, executable_offset);
jump = jump->next;
}
- put_label = compiler->put_labels;
- while (put_label) {
- addr = (sljit_sw)put_label->label->addr;
- buf_ptr = (sljit_ins*)put_label->addr;
-
- buf_ptr[0] |= ((sljit_ins)addr & 0xffff) << 5;
- buf_ptr[1] |= ((sljit_ins)(addr >> 16) & 0xffff) << 5;
-
- if (put_label->flags >= 1)
- buf_ptr[2] |= ((sljit_ins)(addr >> 32) & 0xffff) << 5;
-
- if (put_label->flags >= 2)
- buf_ptr[3] |= (sljit_ins)(addr >> 48) << 5;
-
- put_label = put_label->next;
- }
-
compiler->error = SLJIT_ERR_COMPILED;
compiler->executable_offset = executable_offset;
compiler->executable_size = (sljit_uw)(code_ptr - code) * sizeof(sljit_ins);
@@ -385,8 +576,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
{
switch (feature_type) {
case SLJIT_HAS_FPU:
+ case SLJIT_HAS_SIMD:
#ifdef SLJIT_IS_FPU_AVAILABLE
- return SLJIT_IS_FPU_AVAILABLE;
+ return (SLJIT_IS_FPU_AVAILABLE) != 0;
#else
/* Available by default. */
return 1;
@@ -394,9 +586,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
case SLJIT_HAS_CLZ:
case SLJIT_HAS_CTZ:
+ case SLJIT_HAS_REV:
case SLJIT_HAS_ROT:
case SLJIT_HAS_CMOV:
case SLJIT_HAS_PREFETCH:
+ case SLJIT_HAS_COPY_F32:
+ case SLJIT_HAS_COPY_F64:
+ case SLJIT_HAS_ATOMIC:
return 1;
default:
@@ -404,6 +600,17 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
}
}
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
+{
+ switch (type) {
+ case SLJIT_UNORDERED_OR_EQUAL:
+ case SLJIT_ORDERED_NOT_EQUAL:
+ return 2;
+ }
+
+ return 0;
+}
+
/* --------------------------------------------------------------------- */
/* Core code generator functions. */
/* --------------------------------------------------------------------- */
@@ -633,21 +840,23 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
imm = (flags & ARG2_IMM) ? arg2 : arg1;
switch (op) {
- case SLJIT_MUL:
case SLJIT_CLZ:
case SLJIT_CTZ:
+ case SLJIT_REV:
+ case SLJIT_REV_U16:
+ case SLJIT_REV_S16:
+ case SLJIT_REV_U32:
+ case SLJIT_REV_S32:
case SLJIT_ADDC:
case SLJIT_SUBC:
+ case SLJIT_MUL:
+ case SLJIT_MULADD:
/* No form with immediate operand (except imm 0, which
is represented by a ZERO register). */
break;
case SLJIT_MOV:
SLJIT_ASSERT(!(flags & SET_FLAGS) && (flags & ARG2_IMM) && arg1 == TMP_REG1);
return load_immediate(compiler, dst, imm);
- case SLJIT_NOT:
- SLJIT_ASSERT(flags & ARG2_IMM);
- FAIL_IF(load_immediate(compiler, dst, (flags & INT_OP) ? (~imm & 0xffffffff) : ~imm));
- goto set_flags;
case SLJIT_SUB:
compiler->status_flags_state = SLJIT_CURRENT_FLAGS_SUB;
if (flags & ARG1_IMM)
@@ -694,8 +903,13 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
break;
CHECK_FLAGS(3 << 29);
return push_inst(compiler, (ANDI ^ inv_bits) | RD(dst) | RN(reg) | inst_bits);
- case SLJIT_OR:
case SLJIT_XOR:
+ if (imm == -1) {
+ FAIL_IF(push_inst(compiler, (ORN ^ inv_bits) | RD(dst) | RN(TMP_ZERO) | RM(reg)));
+ goto set_flags;
+ }
+ /* fallthrough */
+ case SLJIT_OR:
inst_bits = logical_imm(imm, LOGICAL_IMM_CHECK | ((flags & INT_OP) ? 16 : 32));
if (!inst_bits)
break;
@@ -718,6 +932,7 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
inst_bits = ((sljit_ins)1 << 22) | (((sljit_ins)-imm & 0x3f) << 16) | ((63 - (sljit_ins)imm) << 10);
}
+ inv_bits |= inv_bits >> 9;
FAIL_IF(push_inst(compiler, (UBFM ^ inv_bits) | RD(dst) | RN(arg1) | inst_bits));
goto set_flags;
case SLJIT_LSHR:
@@ -727,6 +942,7 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
if (flags & ARG1_IMM)
break;
+ inv_bits |= inv_bits >> 9;
if (op >= SLJIT_ASHR)
inv_bits |= 1 << 30;
@@ -780,22 +996,22 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
if (dst == arg2)
return SLJIT_SUCCESS;
- return push_inst(compiler, ORR | RD(dst) | RN(TMP_ZERO) | RM(arg2));
+ return push_inst(compiler, MOV | RD(dst) | RM(arg2));
case SLJIT_MOV_U8:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
- return push_inst(compiler, (UBFM ^ W_OP) | RD(dst) | RN(arg2) | (7 << 10));
+ inv_bits |= inv_bits >> 9;
+ return push_inst(compiler, (UBFM ^ inv_bits) | RD(dst) | RN(arg2) | (7 << 10));
case SLJIT_MOV_S8:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
- if (!(flags & INT_OP))
- inv_bits |= 1 << 22;
+ inv_bits |= inv_bits >> 9;
return push_inst(compiler, (SBFM ^ inv_bits) | RD(dst) | RN(arg2) | (7 << 10));
case SLJIT_MOV_U16:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
- return push_inst(compiler, (UBFM ^ W_OP) | RD(dst) | RN(arg2) | (15 << 10));
+ inv_bits |= inv_bits >> 9;
+ return push_inst(compiler, (UBFM ^ inv_bits) | RD(dst) | RN(arg2) | (15 << 10));
case SLJIT_MOV_S16:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
- if (!(flags & INT_OP))
- inv_bits |= 1 << 22;
+ inv_bits |= inv_bits >> 9;
return push_inst(compiler, (SBFM ^ inv_bits) | RD(dst) | RN(arg2) | (15 << 10));
case SLJIT_MOV32:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
@@ -804,14 +1020,10 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
/* fallthrough */
case SLJIT_MOV_U32:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
- return push_inst(compiler, (ORR ^ W_OP) | RD(dst) | RN(TMP_ZERO) | RM(arg2));
+ return push_inst(compiler, (MOV ^ W_OP) | RD(dst) | RM(arg2));
case SLJIT_MOV_S32:
SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
return push_inst(compiler, SBFM | (1 << 22) | RD(dst) | RN(arg2) | (31 << 10));
- case SLJIT_NOT:
- SLJIT_ASSERT(arg1 == TMP_REG1);
- FAIL_IF(push_inst(compiler, (ORN ^ inv_bits) | RD(dst) | RN(TMP_ZERO) | RM(arg2)));
- break; /* Set flags. */
case SLJIT_CLZ:
SLJIT_ASSERT(arg1 == TMP_REG1);
return push_inst(compiler, (CLZ ^ inv_bits) | RD(dst) | RN(arg2));
@@ -819,6 +1031,25 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
SLJIT_ASSERT(arg1 == TMP_REG1);
FAIL_IF(push_inst(compiler, (RBIT ^ inv_bits) | RD(dst) | RN(arg2)));
return push_inst(compiler, (CLZ ^ inv_bits) | RD(dst) | RN(dst));
+ case SLJIT_REV:
+ SLJIT_ASSERT(arg1 == TMP_REG1);
+ inv_bits |= inv_bits >> 21;
+ return push_inst(compiler, (REV ^ inv_bits) | RD(dst) | RN(arg2));
+ case SLJIT_REV_U16:
+ case SLJIT_REV_S16:
+ SLJIT_ASSERT(arg1 == TMP_REG1 && dst != TMP_REG2);
+ FAIL_IF(push_inst(compiler, (REV16 ^ (sljit_ins)0x80000000) | RD(dst) | RN(arg2)));
+ if (dst == TMP_REG1 || (arg2 == TMP_REG2 && op == SLJIT_REV_U16))
+ return SLJIT_SUCCESS;
+ inv_bits |= inv_bits >> 9;
+ return push_inst(compiler, ((op == SLJIT_REV_U16 ? UBFM : SBFM) ^ inv_bits) | RD(dst) | RN(dst) | (15 << 10));
+ case SLJIT_REV_U32:
+ case SLJIT_REV_S32:
+ SLJIT_ASSERT(arg1 == TMP_REG1 && dst != TMP_REG2);
+ FAIL_IF(push_inst(compiler, (REV ^ (sljit_ins)0x80000400) | RD(dst) | RN(arg2)));
+ if (op == SLJIT_REV_U32 || dst == TMP_REG1)
+ return SLJIT_SUCCESS;
+ return push_inst(compiler, SBFM | (1 << 22) | RD(dst) | RN(dst) | (31 << 10));
case SLJIT_ADD:
compiler->status_flags_state = SLJIT_CURRENT_FLAGS_ADD;
CHECK_FLAGS(1 << 29);
@@ -874,6 +1105,9 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
/* fallthrough */
case SLJIT_ROTR:
return push_inst(compiler, (RORV ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2));
+ case SLJIT_MULADD:
+ compiler->status_flags_state = 0;
+ return push_inst(compiler, (MADD ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2) | RT2(dst));
default:
SLJIT_UNREACHABLE();
return SLJIT_SUCCESS;
@@ -948,14 +1182,20 @@ static sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, s
if (argw <= 0xff && argw >= -0x100)
return push_inst(compiler, STURBI | type | RT(reg) | RN(arg) | (((sljit_ins)argw & 0x1ff) << 12));
- if (argw >= 0) {
- if (argw <= 0xfff0ff && ((argw + 0x100) & 0xfff) <= 0x1ff) {
+ if (((argw + 0x100) & 0xfff) <= 0x1ff && argw <= 0xfff0ff && argw >= -0xfff100) {
+ if (argw >= 0) {
+ if (argw & 0x100)
+ argw += 0x1000;
+
FAIL_IF(push_inst(compiler, ADDI | (1 << 22) | RD(tmp_reg) | RN(arg) | (((sljit_ins)argw >> 12) << 10)));
return push_inst(compiler, STURBI | type | RT(reg) | RN(tmp_reg) | (((sljit_ins)argw & 0x1ff) << 12));
+ } else {
+ if (!(argw & 0x100))
+ argw -= 0x1000;
+
+ FAIL_IF(push_inst(compiler, SUBI | (1 << 22) | RD(tmp_reg) | RN(arg) | (((sljit_ins)-argw >> 12) << 10)));
+ return push_inst(compiler, STURBI | type | RT(reg) | RN(tmp_reg) | (((sljit_ins)argw & 0x1ff) << 12));
}
- } else if (argw >= -0xfff100 && ((-argw + 0xff) & 0xfff) <= 0x1ff) {
- FAIL_IF(push_inst(compiler, SUBI | (1 << 22) | RD(tmp_reg) | RN(arg) | (((sljit_ins)-argw >> 12) << 10)));
- return push_inst(compiler, STURBI | type | RT(reg) | RN(tmp_reg) | (((sljit_ins)argw & 0x1ff) << 12));
}
FAIL_IF(load_immediate(compiler, tmp_reg, argw));
@@ -980,7 +1220,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
set_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
saved_regs_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds - saved_arg_count, 2);
- saved_regs_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, SSIZE_OF(f64));
+ saved_regs_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
local_size = (local_size + saved_regs_size + 0xf) & ~0xf;
compiler->local_size = local_size;
@@ -1065,7 +1305,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
while (arg_types) {
if ((arg_types & SLJIT_ARG_MASK) < SLJIT_ARG_TYPE_F64) {
if (!(arg_types & SLJIT_ARG_TYPE_SCRATCH_REG)) {
- FAIL_IF(push_inst(compiler, ORR | RD(SLJIT_S0 - saved_arg_count) | RN(TMP_ZERO) | RM(tmp)));
+ FAIL_IF(push_inst(compiler, MOV | RD(SLJIT_S0 - saved_arg_count) | RM(tmp)));
saved_arg_count++;
}
tmp++;
@@ -1153,7 +1393,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *comp
set_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
saved_regs_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds - SLJIT_KEPT_SAVEDS_COUNT(options), 2);
- saved_regs_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, SSIZE_OF(f64));
+ saved_regs_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
compiler->local_size = (local_size + saved_regs_size + 0xf) & ~0xf;
return SLJIT_SUCCESS;
@@ -1272,7 +1512,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_to(struct sljit_compiler *c
src = TMP_REG1;
srcw = 0;
} else if (src >= SLJIT_FIRST_SAVED_REG && src <= (SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options))) {
- FAIL_IF(push_inst(compiler, ORR | RD(TMP_REG1) | RN(TMP_ZERO) | RM(src)));
+ FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG1) | RM(src)));
src = TMP_REG1;
srcw = 0;
}
@@ -1302,12 +1542,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compile
return push_inst(compiler, NOP);
case SLJIT_LMUL_UW:
case SLJIT_LMUL_SW:
- FAIL_IF(push_inst(compiler, ORR | RD(TMP_REG1) | RN(TMP_ZERO) | RM(SLJIT_R0)));
+ FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG1) | RM(SLJIT_R0)));
FAIL_IF(push_inst(compiler, MADD | RD(SLJIT_R0) | RN(SLJIT_R0) | RM(SLJIT_R1) | RT2(TMP_ZERO)));
return push_inst(compiler, (op == SLJIT_LMUL_UW ? UMULH : SMULH) | RD(SLJIT_R1) | RN(TMP_REG1) | RM(SLJIT_R1));
case SLJIT_DIVMOD_UW:
case SLJIT_DIVMOD_SW:
- FAIL_IF(push_inst(compiler, (ORR ^ inv_bits) | RD(TMP_REG1) | RN(TMP_ZERO) | RM(SLJIT_R0)));
+ FAIL_IF(push_inst(compiler, (MOV ^ inv_bits) | RD(TMP_REG1) | RM(SLJIT_R0)));
FAIL_IF(push_inst(compiler, ((op == SLJIT_DIVMOD_UW ? UDIV : SDIV) ^ inv_bits) | RD(SLJIT_R0) | RN(SLJIT_R0) | RM(SLJIT_R1)));
FAIL_IF(push_inst(compiler, (MADD ^ inv_bits) | RD(SLJIT_R1) | RN(SLJIT_R0) | RM(SLJIT_R1) | RT2(TMP_ZERO)));
return push_inst(compiler, (SUB ^ inv_bits) | RD(SLJIT_R1) | RN(TMP_REG1) | RM(SLJIT_R1));
@@ -1339,7 +1579,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
op = GET_OPCODE(op);
if (op >= SLJIT_MOV && op <= SLJIT_MOV_P) {
/* Both operands are registers. */
- if (dst_r != TMP_REG1 && FAST_IS_REG(src))
+ if (FAST_IS_REG(dst) && FAST_IS_REG(src))
return emit_op_imm(compiler, op | ((op_flags & SLJIT_32) ? INT_OP : 0), dst_r, TMP_REG1, src);
switch (op) {
@@ -1349,33 +1589,33 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
break;
case SLJIT_MOV_U8:
mem_flags = BYTE_SIZE;
- if (src & SLJIT_IMM)
+ if (src == SLJIT_IMM)
srcw = (sljit_u8)srcw;
break;
case SLJIT_MOV_S8:
mem_flags = BYTE_SIZE | SIGNED;
- if (src & SLJIT_IMM)
+ if (src == SLJIT_IMM)
srcw = (sljit_s8)srcw;
break;
case SLJIT_MOV_U16:
mem_flags = HALF_SIZE;
- if (src & SLJIT_IMM)
+ if (src == SLJIT_IMM)
srcw = (sljit_u16)srcw;
break;
case SLJIT_MOV_S16:
mem_flags = HALF_SIZE | SIGNED;
- if (src & SLJIT_IMM)
+ if (src == SLJIT_IMM)
srcw = (sljit_s16)srcw;
break;
case SLJIT_MOV_U32:
mem_flags = INT_SIZE;
- if (src & SLJIT_IMM)
+ if (src == SLJIT_IMM)
srcw = (sljit_u32)srcw;
break;
case SLJIT_MOV_S32:
case SLJIT_MOV32:
mem_flags = INT_SIZE | SIGNED;
- if (src & SLJIT_IMM)
+ if (src == SLJIT_IMM)
srcw = (sljit_s32)srcw;
break;
default:
@@ -1384,12 +1624,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
break;
}
- if (src & SLJIT_IMM)
+ if (src == SLJIT_IMM)
FAIL_IF(emit_op_imm(compiler, SLJIT_MOV | ARG2_IMM, dst_r, TMP_REG1, srcw));
else if (!(src & SLJIT_MEM))
dst_r = src;
else
- FAIL_IF(emit_op_mem(compiler, mem_flags, dst_r, src, srcw, TMP_REG1));
+ FAIL_IF(emit_op_mem(compiler, mem_flags, dst_r, src, srcw, TMP_REG2));
if (dst & SLJIT_MEM)
return emit_op_mem(compiler, mem_flags | STORE, dst_r, dst, dstw, TMP_REG2);
@@ -1397,11 +1637,24 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
}
flags = HAS_FLAGS(op_flags) ? SET_FLAGS : 0;
- mem_flags = WORD_SIZE;
- if (op_flags & SLJIT_32) {
- flags |= INT_OP;
+ switch (op) {
+ case SLJIT_REV_U16:
+ case SLJIT_REV_S16:
+ mem_flags = HALF_SIZE;
+ break;
+ case SLJIT_REV_U32:
+ case SLJIT_REV_S32:
mem_flags = INT_SIZE;
+ break;
+ default:
+ mem_flags = WORD_SIZE;
+
+ if (op_flags & SLJIT_32) {
+ flags |= INT_OP;
+ mem_flags = INT_SIZE;
+ }
+ break;
}
if (src & SLJIT_MEM) {
@@ -1438,7 +1691,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
mem_flags = INT_SIZE;
}
- if (dst == TMP_REG1)
+ if (dst == TMP_REG2)
flags |= UNUSED_RETURN;
if (src1 & SLJIT_MEM) {
@@ -1451,12 +1704,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
src2 = TMP_REG2;
}
- if (src1 & SLJIT_IMM)
+ if (src1 == SLJIT_IMM)
flags |= ARG1_IMM;
else
src1w = src1;
- if (src2 & SLJIT_IMM)
+ if (src2 == SLJIT_IMM)
flags |= ARG2_IMM;
else
src2w = src2;
@@ -1476,61 +1729,73 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compil
CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
SLJIT_SKIP_CHECKS(compiler);
- return sljit_emit_op2(compiler, op, TMP_REG1, 0, src1, src1w, src2, src2w);
+ return sljit_emit_op2(compiler, op, TMP_REG2, 0, src1, src1w, src2, src2w);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src_dst,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op2r(compiler, op, dst_reg, src1, src1w, src2, src2w));
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MULADD:
+ SLJIT_SKIP_CHECKS(compiler);
+ return sljit_emit_op2(compiler, op, dst_reg, 0, src1, src1w, src2, src2w);
+ }
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 src1_reg,
+ sljit_s32 src2_reg,
+ sljit_s32 src3, sljit_sw src3w)
+{
sljit_ins inv_bits, imm;
sljit_s32 is_left;
sljit_sw mask;
CHECK_ERROR();
- CHECK(check_sljit_emit_shift_into(compiler, op, src_dst, src1, src1w, src2, src2w));
+ CHECK(check_sljit_emit_shift_into(compiler, op, dst_reg, src1_reg, src2_reg, src3, src3w));
is_left = (GET_OPCODE(op) == SLJIT_SHL || GET_OPCODE(op) == SLJIT_MSHL);
- if (src_dst == src1) {
+ if (src1_reg == src2_reg) {
SLJIT_SKIP_CHECKS(compiler);
- return sljit_emit_op2(compiler, (is_left ? SLJIT_ROTL : SLJIT_ROTR) | (op & SLJIT_32), src_dst, 0, src_dst, 0, src2, src2w);
+ return sljit_emit_op2(compiler, (is_left ? SLJIT_ROTL : SLJIT_ROTR) | (op & SLJIT_32), dst_reg, 0, src1_reg, 0, src3, src3w);
}
- ADJUST_LOCAL_OFFSET(src1, src1w);
- ADJUST_LOCAL_OFFSET(src2, src2w);
+ ADJUST_LOCAL_OFFSET(src3, src3w);
inv_bits = (op & SLJIT_32) ? W_OP : 0;
- mask = inv_bits ? 0x1f : 0x3f;
- if (src2 & SLJIT_IMM) {
- src2w &= mask;
+ if (src3 == SLJIT_IMM) {
+ mask = inv_bits ? 0x1f : 0x3f;
+ src3w &= mask;
- if (src2w == 0)
+ if (src3w == 0)
return SLJIT_SUCCESS;
- } else if (src2 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem(compiler, inv_bits ? INT_SIZE : WORD_SIZE, TMP_REG2, src2, src2w, TMP_REG2));
- src2 = TMP_REG2;
- }
- if (src1 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem(compiler, inv_bits ? INT_SIZE : WORD_SIZE, TMP_REG1, src1, src1w, TMP_REG1));
- src1 = TMP_REG1;
- } else if (src1 & SLJIT_IMM) {
- FAIL_IF(load_immediate(compiler, TMP_REG1, src1w));
- src1 = TMP_REG1;
- }
-
- if (src2 & SLJIT_IMM) {
if (is_left)
- src2w = (src2w ^ mask) + 1;
+ src3w = (src3w ^ mask) + 1;
+
+ return push_inst(compiler, (EXTR ^ (inv_bits | (inv_bits >> 9))) | RD(dst_reg)
+ | RN(is_left ? src1_reg : src2_reg) | RM(is_left ? src2_reg : src1_reg) | ((sljit_ins)src3w << 10));
+ }
- return push_inst(compiler, (EXTR ^ (inv_bits | (inv_bits >> 9))) | RD(src_dst)
- | RN(is_left ? src_dst : src1) | RM(is_left ? src1 : src_dst) | ((sljit_ins)src2w << 10));
+ if (src3 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, inv_bits ? INT_SIZE : WORD_SIZE, TMP_REG2, src3, src3w, TMP_REG2));
+ src3 = TMP_REG2;
+ } else if (dst_reg == src3) {
+ FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG2) | RM(src3)));
+ src3 = TMP_REG2;
}
- FAIL_IF(push_inst(compiler, ((is_left ? LSLV : LSRV) ^ inv_bits) | RD(src_dst) | RN(src_dst) | RM(src2)));
+ FAIL_IF(push_inst(compiler, ((is_left ? LSLV : LSRV) ^ inv_bits) | RD(dst_reg) | RN(src1_reg) | RM(src3)));
if (!(op & SLJIT_SHIFT_INTO_NON_ZERO)) {
/* Shift left/right by 1. */
@@ -1539,18 +1804,18 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *
else
imm = (sljit_ins)(inv_bits ? ((31 << 16) | (30 << 10)) : ((63 << 16) | (62 << 10) | (1 << 22)));
- FAIL_IF(push_inst(compiler, (UBFM ^ inv_bits) | RD(TMP_REG1) | RN(src1) | imm));
+ FAIL_IF(push_inst(compiler, (UBFM ^ (inv_bits | (inv_bits >> 9))) | RD(TMP_REG1) | RN(src2_reg) | imm));
/* Set imm to mask. */
imm = (sljit_ins)(inv_bits ? (4 << 10) : ((5 << 10) | (1 << 22)));
- FAIL_IF(push_inst(compiler, (EORI ^ inv_bits) | RD(TMP_REG2) | RN(src2) | imm));
+ FAIL_IF(push_inst(compiler, (EORI ^ inv_bits) | RD(TMP_REG2) | RN(src3) | imm));
- src1 = TMP_REG1;
+ src2_reg = TMP_REG1;
} else
- FAIL_IF(push_inst(compiler, (SUB ^ inv_bits) | RD(TMP_REG2) | RN(TMP_ZERO) | RM(src2)));
+ FAIL_IF(push_inst(compiler, (SUB ^ inv_bits) | RD(TMP_REG2) | RN(TMP_ZERO) | RM(src3)));
- FAIL_IF(push_inst(compiler, ((is_left ? LSRV : LSLV) ^ inv_bits) | RD(TMP_REG1) | RN(src1) | RM(TMP_REG2)));
- return push_inst(compiler, (ORR ^ inv_bits) | RD(src_dst) | RN(src_dst) | RM(TMP_REG1));
+ FAIL_IF(push_inst(compiler, ((is_left ? LSRV : LSLV) ^ inv_bits) | RD(TMP_REG1) | RN(src2_reg) | RM(TMP_REG2)));
+ return push_inst(compiler, (ORR ^ inv_bits) | RD(dst_reg) | RN(dst_reg) | RM(TMP_REG1));
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
@@ -1563,7 +1828,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *comp
switch (op) {
case SLJIT_FAST_RETURN:
if (FAST_IS_REG(src))
- FAIL_IF(push_inst(compiler, ORR | RD(TMP_LR) | RN(TMP_ZERO) | RM(src)));
+ FAIL_IF(push_inst(compiler, MOV | RD(TMP_LR) | RM(src)));
else
FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_LR, src, srcw, TMP_REG1));
@@ -1593,15 +1858,42 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *comp
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_dst(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw)
{
- CHECK_REG_INDEX(check_sljit_get_register_index(reg));
- return reg_map[reg];
+ sljit_s32 dst_r = TMP_LR;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op_dst(compiler, op, dst, dstw));
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ switch (op) {
+ case SLJIT_FAST_ENTER:
+ if (FAST_IS_REG(dst))
+ return push_inst(compiler, MOV | RD(dst) | RM(TMP_LR));
+ break;
+ case SLJIT_GET_RETURN_ADDRESS:
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
+ FAIL_IF(emit_op_mem(compiler, WORD_SIZE, dst_r, SLJIT_MEM1(SLJIT_SP), 0x8, TMP_REG2));
+ break;
+ }
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem(compiler, WORD_SIZE | STORE, dst_r, dst, dstw, TMP_REG2);
+
+ return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 type, sljit_s32 reg)
{
- CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
+ CHECK_REG_INDEX(check_sljit_get_register_index(type, reg));
+
+ if (type == SLJIT_GP_REGISTER)
+ return reg_map[reg];
+
+ if (type != SLJIT_FLOAT_REGISTER && type != SLJIT_SIMD_REG_64 && type != SLJIT_SIMD_REG_128)
+ return -1;
+
return freg_map[reg];
}
@@ -1635,18 +1927,18 @@ static sljit_s32 emit_fop_mem(struct sljit_compiler *compiler, sljit_s32 flags,
return push_inst(compiler, STR_FR | type | VT(reg)
| RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | (argw ? (1 << 12) : 0));
- FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG1) | RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | ((sljit_ins)argw << 10)));
- return push_inst(compiler, STR_FI | type | VT(reg) | RN(TMP_REG1));
+ FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG2) | RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | ((sljit_ins)argw << 10)));
+ return push_inst(compiler, STR_FI | type | VT(reg) | RN(TMP_REG2));
}
arg &= REG_MASK;
if (!arg) {
- FAIL_IF(load_immediate(compiler, TMP_REG1, argw & ~(0xfff << shift)));
+ FAIL_IF(load_immediate(compiler, TMP_REG2, argw & ~(0xfff << shift)));
argw = (argw >> shift) & 0xfff;
- return push_inst(compiler, STR_FI | type | VT(reg) | RN(TMP_REG1) | ((sljit_ins)argw << 10));
+ return push_inst(compiler, STR_FI | type | VT(reg) | RN(TMP_REG2) | ((sljit_ins)argw << 10));
}
if (argw >= 0 && (argw & ((1 << shift) - 1)) == 0) {
@@ -1654,18 +1946,18 @@ static sljit_s32 emit_fop_mem(struct sljit_compiler *compiler, sljit_s32 flags,
return push_inst(compiler, STR_FI | type | VT(reg) | RN(arg) | ((sljit_ins)argw << (10 - shift)));
if (argw <= 0xffffff) {
- FAIL_IF(push_inst(compiler, ADDI | (1 << 22) | RD(TMP_REG1) | RN(arg) | (((sljit_ins)argw >> 12) << 10)));
+ FAIL_IF(push_inst(compiler, ADDI | (1 << 22) | RD(TMP_REG2) | RN(arg) | (((sljit_ins)argw >> 12) << 10)));
argw = ((argw & 0xfff) >> shift);
- return push_inst(compiler, STR_FI | type | VT(reg) | RN(TMP_REG1) | ((sljit_ins)argw << 10));
+ return push_inst(compiler, STR_FI | type | VT(reg) | RN(TMP_REG2) | ((sljit_ins)argw << 10));
}
}
if (argw <= 255 && argw >= -256)
return push_inst(compiler, STUR_FI | type | VT(reg) | RN(arg) | (((sljit_ins)argw & 0x1ff) << 12));
- FAIL_IF(load_immediate(compiler, TMP_REG1, argw));
- return push_inst(compiler, STR_FR | type | VT(reg) | RN(arg) | RM(TMP_REG1));
+ FAIL_IF(load_immediate(compiler, TMP_REG2, argw));
+ return push_inst(compiler, STR_FR | type | VT(reg) | RN(arg) | RM(TMP_REG2));
}
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
@@ -1679,7 +1971,7 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
inv_bits |= W_OP;
if (src & SLJIT_MEM) {
- emit_fop_mem(compiler, (op & SLJIT_32) ? INT_SIZE : WORD_SIZE, TMP_FREG1, src, srcw);
+ FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_32) ? INT_SIZE : WORD_SIZE, TMP_FREG1, src, srcw));
src = TMP_FREG1;
}
@@ -1690,34 +1982,59 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+static sljit_s32 sljit_emit_fop1_conv_f64_from_w(struct sljit_compiler *compiler, sljit_ins ins,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw)
{
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
- sljit_ins inv_bits = (op & SLJIT_32) ? (1 << 22) : 0;
-
- if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
- inv_bits |= W_OP;
if (src & SLJIT_MEM) {
- emit_op_mem(compiler, ((GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32) ? INT_SIZE : WORD_SIZE), TMP_REG1, src, srcw, TMP_REG1);
+ emit_op_mem(compiler, (ins & W_OP) ? WORD_SIZE : INT_SIZE, TMP_REG1, src, srcw, TMP_REG1);
src = TMP_REG1;
- } else if (src & SLJIT_IMM) {
- if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
- srcw = (sljit_s32)srcw;
-
+ } else if (src == SLJIT_IMM) {
FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
src = TMP_REG1;
}
- FAIL_IF(push_inst(compiler, (SCVTF ^ inv_bits) | VD(dst_r) | RN(src)));
+ FAIL_IF(push_inst(compiler, ins | VD(dst_r) | RN(src)));
if (dst & SLJIT_MEM)
- return emit_fop_mem(compiler, ((op & SLJIT_32) ? INT_SIZE : WORD_SIZE) | STORE, TMP_FREG1, dst, dstw);
+ return emit_fop_mem(compiler, ((ins & (1 << 22)) ? WORD_SIZE : INT_SIZE) | STORE, TMP_FREG1, dst, dstw);
return SLJIT_SUCCESS;
}
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_ins inv_bits = (op & SLJIT_32) ? (1 << 22) : 0;
+
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32) {
+ inv_bits |= W_OP;
+
+ if (src == SLJIT_IMM)
+ srcw = (sljit_s32)srcw;
+ }
+
+ return sljit_emit_fop1_conv_f64_from_w(compiler, SCVTF ^ inv_bits, dst, dstw, src, srcw);
+}
+
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_uw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_ins inv_bits = (op & SLJIT_32) ? (1 << 22) : 0;
+
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_U32) {
+ inv_bits |= W_OP;
+
+ if (src == SLJIT_IMM)
+ srcw = (sljit_u32)srcw;
+ }
+
+ return sljit_emit_fop1_conv_f64_from_w(compiler, UCVTF ^ inv_bits, dst, dstw, src, srcw);
+}
+
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
@@ -1726,16 +2043,22 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compile
sljit_ins inv_bits = (op & SLJIT_32) ? (1 << 22) : 0;
if (src1 & SLJIT_MEM) {
- emit_fop_mem(compiler, mem_flags, TMP_FREG1, src1, src1w);
+ FAIL_IF(emit_fop_mem(compiler, mem_flags, TMP_FREG1, src1, src1w));
src1 = TMP_FREG1;
}
if (src2 & SLJIT_MEM) {
- emit_fop_mem(compiler, mem_flags, TMP_FREG2, src2, src2w);
+ FAIL_IF(emit_fop_mem(compiler, mem_flags, TMP_FREG2, src2, src2w));
src2 = TMP_FREG2;
}
- return push_inst(compiler, (FCMP ^ inv_bits) | VN(src1) | VM(src2));
+ FAIL_IF(push_inst(compiler, (FCMP ^ inv_bits) | VN(src1) | VM(src2)));
+
+ if (GET_FLAG_TYPE(op) != SLJIT_UNORDERED_OR_EQUAL)
+ return SLJIT_SUCCESS;
+
+ FAIL_IF(push_inst(compiler, CSINC | (0x0 << 12) | RD(TMP_REG1) | RN(TMP_ZERO) | RM(TMP_ZERO)));
+ return push_inst(compiler, CCMPI | (0x0 << 16) | (0x7 << 12) | RN(TMP_REG1) | 0x4);
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
@@ -1754,14 +2077,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compil
dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
if (src & SLJIT_MEM) {
- emit_fop_mem(compiler, (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32) ? (mem_flags ^ 0x1) : mem_flags, dst_r, src, srcw);
+ FAIL_IF(emit_fop_mem(compiler, (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32) ? (mem_flags ^ 0x1) : mem_flags, dst_r, src, srcw));
src = dst_r;
}
switch (GET_OPCODE(op)) {
case SLJIT_MOV_F64:
if (src != dst_r) {
- if (dst_r != TMP_FREG1)
+ if (!(dst & SLJIT_MEM))
FAIL_IF(push_inst(compiler, (FMOV ^ inv_bits) | VD(dst_r) | VN(src)));
else
dst_r = src;
@@ -1799,11 +2122,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
if (src1 & SLJIT_MEM) {
- emit_fop_mem(compiler, mem_flags, TMP_FREG1, src1, src1w);
+ FAIL_IF(emit_fop_mem(compiler, mem_flags, TMP_FREG1, src1, src1w));
src1 = TMP_FREG1;
}
if (src2 & SLJIT_MEM) {
- emit_fop_mem(compiler, mem_flags, TMP_FREG2, src2, src2w);
+ FAIL_IF(emit_fop_mem(compiler, mem_flags, TMP_FREG2, src2, src2w));
src2 = TMP_FREG2;
}
@@ -1820,6 +2143,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
case SLJIT_DIV_F64:
FAIL_IF(push_inst(compiler, (FDIV ^ inv_bits) | VD(dst_r) | VN(src1) | VM(src2)));
break;
+ case SLJIT_COPYSIGN_F64:
+ FAIL_IF(push_inst(compiler, (FMOV_R ^ ((op & SLJIT_32) ? (W_OP | (1 << 22)) : 0)) | VN(src2) | RD(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, (FABS ^ inv_bits) | VD(dst_r) | VN(src1)));
+ FAIL_IF(push_inst(compiler, TBZ | ((op & SLJIT_32) ? 0 : ((sljit_ins)1 << 31)) | (0x1f << 19) | (2 << 5) | RT(TMP_REG1)));
+ return push_inst(compiler, (FNEG ^ inv_bits) | VD(dst_r) | VN(dst_r));
}
if (!(dst & SLJIT_MEM))
@@ -1827,21 +2155,79 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
return emit_fop_mem(compiler, mem_flags | STORE, TMP_FREG1, dst, dstw);
}
-/* --------------------------------------------------------------------- */
-/* Other instructions */
-/* --------------------------------------------------------------------- */
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset32(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f32 value)
+{
+ sljit_u32 exp;
+ union {
+ sljit_u32 imm;
+ sljit_f32 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset32(compiler, freg, value));
+
+ u.value = value;
+
+ if (u.imm == 0)
+ return push_inst(compiler, (FMOV_R ^ (W_OP | (1 << 22))) | RN(TMP_ZERO) | VD(freg) | (1 << 16));
+
+ if ((u.imm << (32 - 19)) == 0) {
+ exp = (u.imm >> (23 + 2)) & 0x3f;
+
+ if (exp == 0x20 || exp == 0x1f)
+ return push_inst(compiler, (FMOV_I ^ (1 << 22)) | (sljit_ins)((((u.imm >> 24) & 0x80) | ((u.imm >> 19) & 0x7f)) << 13) | VD(freg));
+ }
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_s32)u.imm));
+ return push_inst(compiler, (FMOV_R ^ (W_OP | (1 << 22))) | RN(TMP_REG1) | VD(freg) | (1 << 16));
+}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f64 value)
{
+ sljit_uw exp;
+ union {
+ sljit_uw imm;
+ sljit_f64 value;
+ } u;
+
CHECK_ERROR();
- CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
- ADJUST_LOCAL_OFFSET(dst, dstw);
+ CHECK(check_sljit_emit_fset64(compiler, freg, value));
+
+ u.value = value;
+
+ if (u.imm == 0)
+ return push_inst(compiler, FMOV_R | RN(TMP_ZERO) | VD(freg) | (sljit_ins)1 << 16);
+
+ if ((u.imm << (64 - 48)) == 0) {
+ exp = (u.imm >> (52 + 2)) & 0x1ff;
- if (FAST_IS_REG(dst))
- return push_inst(compiler, ORR | RD(dst) | RN(TMP_ZERO) | RM(TMP_LR));
+ if (exp == 0x100 || exp == 0xff)
+ return push_inst(compiler, FMOV_I | (sljit_ins)((((u.imm >> 56) & 0x80) | ((u.imm >> 48) & 0x7f)) << 13) | VD(freg));
+ }
- /* Memory. */
- return emit_op_mem(compiler, WORD_SIZE | STORE, TMP_LR, dst, dstw, TMP_REG1);
+ FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_sw)u.imm));
+ return push_inst(compiler, FMOV_R | RN(TMP_REG1) | VD(freg) | (1 << 16));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 freg, sljit_s32 reg)
+{
+ sljit_ins inst;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
+
+ if (GET_OPCODE(op) == SLJIT_COPY_TO_F64)
+ inst = FMOV_R | RN(reg) | VD(freg) | (1 << 16);
+ else
+ inst = FMOV_R | VN(freg) | RD(reg);
+
+ if (op & SLJIT_32)
+ inst ^= W_OP | (1 << 22);
+
+ return push_inst(compiler, inst);
}
/* --------------------------------------------------------------------- */
@@ -1852,15 +2238,17 @@ static sljit_ins get_cc(struct sljit_compiler *compiler, sljit_s32 type)
{
switch (type) {
case SLJIT_EQUAL:
+ case SLJIT_ATOMIC_STORED:
case SLJIT_F_EQUAL:
case SLJIT_ORDERED_EQUAL:
- case SLJIT_UNORDERED_OR_EQUAL: /* Not supported. */
+ case SLJIT_UNORDERED_OR_EQUAL:
return 0x1;
case SLJIT_NOT_EQUAL:
+ case SLJIT_ATOMIC_NOT_STORED:
case SLJIT_F_NOT_EQUAL:
case SLJIT_UNORDERED_OR_NOT_EQUAL:
- case SLJIT_ORDERED_NOT_EQUAL: /* Not supported. */
+ case SLJIT_ORDERED_NOT_EQUAL:
return 0x0;
case SLJIT_CARRY:
@@ -1966,14 +2354,14 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
if (type < SLJIT_JUMP) {
jump->flags |= IS_COND;
PTR_FAIL_IF(push_inst(compiler, B_CC | (6 << 5) | get_cc(compiler, type)));
- }
- else if (type >= SLJIT_FAST_CALL)
+ } else if (type >= SLJIT_FAST_CALL)
jump->flags |= IS_BL;
- PTR_FAIL_IF(emit_imm64_const(compiler, TMP_REG1, 0));
jump->addr = compiler->size;
- PTR_FAIL_IF(push_inst(compiler, ((type >= SLJIT_FAST_CALL) ? BLR : BR) | RN(TMP_REG1)));
+ PTR_FAIL_IF(push_inst(compiler, ((type >= SLJIT_FAST_CALL) ? BLR : BR) | RN(TMP_REG2)));
+ /* Maximum number of instructions required for generating a constant. */
+ compiler->size += JUMP_MAX_SIZE - 1;
return jump;
}
@@ -2011,7 +2399,7 @@ static SLJIT_INLINE struct sljit_jump* emit_cmp_to0(struct sljit_compiler *compi
PTR_FAIL_IF(emit_op_mem(compiler, inv_bits ? INT_SIZE : WORD_SIZE, TMP_REG1, src, srcw, TMP_REG1));
src = TMP_REG1;
}
- else if (src & SLJIT_IMM) {
+ else if (src == SLJIT_IMM) {
PTR_FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
src = TMP_REG1;
}
@@ -2022,9 +2410,11 @@ static SLJIT_INLINE struct sljit_jump* emit_cmp_to0(struct sljit_compiler *compi
inv_bits |= 1 << 24;
PTR_FAIL_IF(push_inst(compiler, (CBZ ^ inv_bits) | (6 << 5) | RT(src)));
- PTR_FAIL_IF(emit_imm64_const(compiler, TMP_REG1, 0));
jump->addr = compiler->size;
- PTR_FAIL_IF(push_inst(compiler, BR | RN(TMP_REG1)));
+ PTR_FAIL_IF(push_inst(compiler, BR | RN(TMP_REG2)));
+
+ /* Maximum number of instructions required for generating a constant. */
+ compiler->size += JUMP_MAX_SIZE - 1;
return jump;
}
@@ -2035,11 +2425,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
CHECK_ERROR();
CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
- if (!(src & SLJIT_IMM)) {
+ if (src != SLJIT_IMM) {
if (src & SLJIT_MEM) {
ADJUST_LOCAL_OFFSET(src, srcw);
- FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG1, src, srcw, TMP_REG1));
- src = TMP_REG1;
+ FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG2, src, srcw, TMP_REG2));
+ src = TMP_REG2;
}
return push_inst(compiler, ((type >= SLJIT_FAST_CALL) ? BLR : BR) | RN(src));
}
@@ -2050,9 +2440,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
set_jump(jump, compiler, JUMP_ADDR | ((type >= SLJIT_FAST_CALL) ? IS_BL : 0));
jump->u.target = (sljit_uw)srcw;
- FAIL_IF(emit_imm64_const(compiler, TMP_REG1, 0));
jump->addr = compiler->size;
- return push_inst(compiler, ((type >= SLJIT_FAST_CALL) ? BLR : BR) | RN(TMP_REG1));
+ /* Maximum number of instructions required for generating a constant. */
+ compiler->size += JUMP_MAX_SIZE - 1;
+ return push_inst(compiler, ((type >= SLJIT_FAST_CALL) ? BLR : BR) | RN(TMP_REG2));
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compiler, sljit_s32 type,
@@ -2071,7 +2462,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compi
if (type & SLJIT_CALL_RETURN) {
if (src >= SLJIT_FIRST_SAVED_REG && src <= (SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options))) {
- FAIL_IF(push_inst(compiler, ORR | RD(TMP_REG1) | RN(TMP_ZERO) | RM(src)));
+ FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG1) | RM(src)));
src = TMP_REG1;
}
@@ -2100,7 +2491,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
if (GET_OPCODE(op) < SLJIT_ADD) {
FAIL_IF(push_inst(compiler, CSINC | (cc << 12) | RD(dst_r) | RN(TMP_ZERO) | RM(TMP_ZERO)));
- if (dst_r == TMP_REG1) {
+ if (dst & SLJIT_MEM) {
mem_flags = (GET_OPCODE(op) == SLJIT_MOV ? WORD_SIZE : INT_SIZE) | STORE;
return emit_op_mem(compiler, mem_flags, TMP_REG1, dst, dstw, TMP_REG2);
}
@@ -2131,27 +2522,53 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 dst_reg,
- sljit_s32 src, sljit_sw srcw)
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_reg)
{
sljit_ins inv_bits = (type & SLJIT_32) ? W_OP : 0;
sljit_ins cc;
CHECK_ERROR();
- CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
+ CHECK(check_sljit_emit_select(compiler, type, dst_reg, src1, src1w, src2_reg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
- if (SLJIT_UNLIKELY(src & SLJIT_IMM)) {
+ if (src1 == SLJIT_IMM) {
if (type & SLJIT_32)
- srcw = (sljit_s32)srcw;
- FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
- src = TMP_REG1;
- srcw = 0;
+ src1w = (sljit_s32)src1w;
+ FAIL_IF(load_immediate(compiler, TMP_REG2, src1w));
+ src1 = TMP_REG2;
+ } else if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG2, src1, src1w, TMP_REG2));
+ src1 = TMP_REG2;
}
cc = get_cc(compiler, type & ~SLJIT_32);
+ return push_inst(compiler, (CSEL ^ inv_bits) | (cc << 12) | RD(dst_reg) | RN(src2_reg) | RM(src1));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fselect(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_freg)
+{
+ sljit_ins inv_bits = (type & SLJIT_32) ? (1 << 22) : 0;
+ sljit_ins cc;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fselect(compiler, type, dst_freg, src1, src1w, src2_freg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+ if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_fop_mem(compiler, (type & SLJIT_32) ? INT_SIZE : WORD_SIZE, TMP_FREG2, src1, src1w));
+ src1 = TMP_FREG2;
+ }
- return push_inst(compiler, (CSEL ^ inv_bits) | (cc << 12) | RD(dst_reg) | RN(dst_reg) | RM(src));
+ cc = get_cc(compiler, type & ~SLJIT_32);
+ return push_inst(compiler, (FCSEL ^ inv_bits) | (cc << 12) | VD(dst_freg) | VN(src2_freg) | VM(src1));
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
@@ -2308,6 +2725,661 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fmem_update(struct sljit_compiler
return push_inst(compiler, inst | VT(freg) | RN(mem & REG_MASK) | (sljit_ins)((memw & 0x1ff) << 12));
}
+static sljit_s32 sljit_emit_simd_mem_offset(struct sljit_compiler *compiler, sljit_s32 *mem_ptr, sljit_sw memw)
+{
+ sljit_ins ins;
+ sljit_s32 mem = *mem_ptr;
+
+ if (SLJIT_UNLIKELY(mem & OFFS_REG_MASK)) {
+ *mem_ptr = TMP_REG2;
+ return push_inst(compiler, ADD | RD(TMP_REG2) | RN(mem & REG_MASK) | RM(OFFS_REG(mem)) | ((sljit_ins)(memw & 0x3) << 10));
+ }
+
+ if (!(mem & REG_MASK)) {
+ *mem_ptr = TMP_REG2;
+ return load_immediate(compiler, TMP_REG2, memw);
+ }
+
+ mem &= REG_MASK;
+
+ if (memw == 0) {
+ *mem_ptr = mem;
+ return SLJIT_SUCCESS;
+ }
+
+ *mem_ptr = TMP_REG2;
+
+ if (memw < -0xffffff || memw > 0xffffff) {
+ FAIL_IF(load_immediate(compiler, TMP_REG2, memw));
+ return push_inst(compiler, ADD | RD(TMP_REG2) | RN(TMP_REG2) | RM(mem));
+ }
+
+ ins = ADDI;
+
+ if (memw < 0) {
+ memw = -memw;
+ ins = SUBI;
+ }
+
+ if (memw > 0xfff) {
+ FAIL_IF(push_inst(compiler, ins | (1 << 22) | RD(TMP_REG2) | RN(mem) | ((sljit_ins)(memw >> 12) << 10)));
+
+ memw &= 0xfff;
+ if (memw == 0)
+ return SLJIT_SUCCESS;
+
+ mem = TMP_REG2;
+ }
+
+ return push_inst(compiler, ins | RD(TMP_REG2) | RN(mem) | ((sljit_ins)memw << 10));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 srcdst, sljit_sw srcdstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_mov(compiler, type, freg, srcdst, srcdstw));
+
+ ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (!(srcdst & SLJIT_MEM)) {
+ if (type & SLJIT_SIMD_STORE)
+ ins = VD(srcdst) | VN(freg) | VM(freg);
+ else
+ ins = VD(freg) | VN(srcdst) | VM(srcdst);
+
+ if (reg_size == 4)
+ ins |= (1 << 30);
+
+ return push_inst(compiler, ORR_v | ins);
+ }
+
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &srcdst, srcdstw));
+
+ if (elem_size > 3)
+ elem_size = 3;
+
+ ins = (type & SLJIT_SIMD_STORE) ? ST1 : LD1;
+
+ if (reg_size == 4)
+ ins |= (1 << 30);
+
+ return push_inst(compiler, ins | ((sljit_ins)elem_size << 10) | RN(srcdst) | VT(freg));
+}
+
+static sljit_ins simd_get_imm(sljit_s32 elem_size, sljit_uw value)
+{
+ sljit_ins result;
+
+ if (elem_size > 2 && (sljit_u32)value == (value >> 32)) {
+ elem_size = 2;
+ value = (sljit_u32)value;
+ }
+
+ if (elem_size == 2 && (sljit_u16)value == (value >> 16)) {
+ elem_size = 1;
+ value = (sljit_u16)value;
+ }
+
+ if (elem_size == 1 && (sljit_u8)value == (value >> 8)) {
+ elem_size = 0;
+ value = (sljit_u8)value;
+ }
+
+ switch (elem_size) {
+ case 0:
+ SLJIT_ASSERT(value <= 0xff);
+ result = 0xe000;
+ break;
+ case 1:
+ SLJIT_ASSERT(value <= 0xffff);
+ result = 0;
+
+ while (1) {
+ if (value <= 0xff) {
+ result |= 0x8000;
+ break;
+ }
+
+ if ((value & 0xff) == 0) {
+ value >>= 8;
+ result |= 0xa000;
+ break;
+ }
+
+ if (result != 0)
+ return ~(sljit_ins)0;
+
+ value ^= (sljit_uw)0xffff;
+ result = (1 << 29);
+ }
+ break;
+ case 2:
+ SLJIT_ASSERT(value <= 0xffffffff);
+ result = 0;
+
+ while (1) {
+ if (value <= 0xff) {
+ result |= 0x0000;
+ break;
+ }
+
+ if ((value & ~(sljit_uw)0xff00) == 0) {
+ value >>= 8;
+ result |= 0x2000;
+ break;
+ }
+
+ if ((value & ~(sljit_uw)0xff0000) == 0) {
+ value >>= 16;
+ result |= 0x4000;
+ break;
+ }
+
+ if ((value & ~(sljit_uw)0xff000000) == 0) {
+ value >>= 24;
+ result |= 0x6000;
+ break;
+ }
+
+ if ((value & (sljit_uw)0xff) == 0xff && (value >> 16) == 0) {
+ value >>= 8;
+ result |= 0xc000;
+ break;
+ }
+
+ if ((value & (sljit_uw)0xffff) == 0xffff && (value >> 24) == 0) {
+ value >>= 16;
+ result |= 0xd000;
+ break;
+ }
+
+ if (result != 0)
+ return ~(sljit_ins)0;
+
+ value ^= (sljit_uw)0xffffffff;
+ result = (1 << 29);
+ }
+ break;
+ default:
+ return ~(sljit_ins)0;
+ }
+
+ return (((sljit_ins)value & 0x1f) << 5) | (((sljit_ins)value & 0xe0) << 11) | result;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins, imm;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_replicate(compiler, type, freg, src, srcw));
+
+ ADJUST_LOCAL_OFFSET(src, srcw);
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &src, srcw));
+
+ ins = (sljit_ins)elem_size << 10;
+
+ if (reg_size == 4)
+ ins |= (sljit_ins)1 << 30;
+
+ return push_inst(compiler, LD1R | ins | RN(src) | VT(freg));
+ }
+
+ ins = (sljit_ins)1 << (16 + elem_size);
+
+ if (reg_size == 4)
+ ins |= (sljit_ins)1 << 30;
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ if (src == SLJIT_IMM)
+ return push_inst(compiler, MOVI | (ins & ((sljit_ins)1 << 30)) | VD(freg));
+
+ return push_inst(compiler, DUP_e | ins | VD(freg) | VN(src));
+ }
+
+ if (src == SLJIT_IMM) {
+ if (elem_size < 3)
+ srcw &= ((sljit_sw)1 << (((sljit_sw)1 << elem_size) << 3)) - 1;
+
+ imm = simd_get_imm(elem_size, (sljit_uw)srcw);
+
+ if (imm != ~(sljit_ins)0) {
+ imm |= ins & ((sljit_ins)1 << 30);
+
+ return push_inst(compiler, MOVI | imm | VD(freg));
+ }
+
+ FAIL_IF(load_immediate(compiler, TMP_REG2, srcw));
+ src = TMP_REG2;
+ }
+
+ return push_inst(compiler, DUP_g | ins | VD(freg) | RN(src));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg, sljit_s32 lane_index,
+ sljit_s32 srcdst, sljit_sw srcdstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_lane_mov(compiler, type, freg, lane_index, srcdst, srcdstw));
+
+ ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (type & SLJIT_SIMD_LANE_ZERO) {
+ ins = (reg_size == 3) ? 0 : ((sljit_ins)1 << 30);
+
+ if ((type & SLJIT_SIMD_FLOAT) && freg == srcdst) {
+ FAIL_IF(push_inst(compiler, ORR_v | ins | VD(TMP_FREG1) | VN(freg) | VM(freg)));
+ srcdst = TMP_FREG1;
+ srcdstw = 0;
+ }
+
+ FAIL_IF(push_inst(compiler, MOVI | ins | VD(freg)));
+ }
+
+ if (srcdst & SLJIT_MEM) {
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &srcdst, srcdstw));
+
+ if (elem_size == 3)
+ ins = 0x8400;
+ else if (elem_size == 0)
+ ins = 0;
+ else
+ ins = (sljit_ins)0x2000 << elem_size;
+
+ lane_index = lane_index << elem_size;
+ ins |= (sljit_ins)(((lane_index & 0x8) << 27) | ((lane_index & 0x7) << 10));
+
+ return push_inst(compiler, ((type & SLJIT_SIMD_STORE) ? ST1_s : LD1_s) | ins | RN(srcdst) | VT(freg));
+ }
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ if (type & SLJIT_SIMD_STORE)
+ ins = INS_e | ((sljit_ins)1 << (16 + elem_size)) | ((sljit_ins)lane_index << (11 + elem_size)) | VD(srcdst) | VN(freg);
+ else
+ ins = INS_e | ((((sljit_ins)lane_index << 1) | 1) << (16 + elem_size)) | VD(freg) | VN(srcdst);
+
+ return push_inst(compiler, ins);
+ }
+
+ if (srcdst == SLJIT_IMM) {
+ if (elem_size < 3)
+ srcdstw &= ((sljit_sw)1 << (((sljit_sw)1 << elem_size) << 3)) - 1;
+
+ FAIL_IF(load_immediate(compiler, TMP_REG2, srcdstw));
+ srcdst = TMP_REG2;
+ }
+
+ if (type & SLJIT_SIMD_STORE) {
+ ins = RD(srcdst) | VN(freg);
+
+ if ((type & SLJIT_SIMD_LANE_SIGNED) && (elem_size < 2 || (elem_size == 2 && !(type & SLJIT_32)))) {
+ ins |= SMOV;
+
+ if (!(type & SLJIT_32))
+ ins |= (sljit_ins)1 << 30;
+ } else
+ ins |= UMOV;
+ } else
+ ins = INS | VD(freg) | RN(srcdst);
+
+ if (elem_size == 3)
+ ins |= (sljit_ins)1 << 30;
+
+ return push_inst(compiler, ins | ((((sljit_ins)lane_index << 1) | 1) << (16 + elem_size)));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_s32 src_lane_index)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_lane_replicate(compiler, type, freg, src, src_lane_index));
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ ins = (((sljit_ins)src_lane_index << 1) | 1) << (16 + elem_size);
+
+ if (reg_size == 4)
+ ins |= (sljit_ins)1 << 30;
+
+ return push_inst(compiler, DUP_e | ins | VD(freg) | VN(src));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_extend(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_s32 elem2_size = SLJIT_SIMD_GET_ELEM2_SIZE(type);
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_extend(compiler, type, freg, src, srcw));
+
+ ADJUST_LOCAL_OFFSET(src, srcw);
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size != 2 || elem2_size != 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &src, srcw));
+
+ if (reg_size == 4 && elem2_size - elem_size == 1)
+ FAIL_IF(push_inst(compiler, LD1 | ((sljit_ins)elem_size << 10) | RN(src) | VT(freg)));
+ else
+ FAIL_IF(push_inst(compiler, LD1_s | ((sljit_ins)0x2000 << (reg_size - elem2_size + elem_size)) | RN(src) | VT(freg)));
+ src = freg;
+ }
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ SLJIT_ASSERT(reg_size == 4);
+ return push_inst(compiler, FCVTL | (1 << 22) | VD(freg) | VN(src));
+ }
+
+ do {
+ FAIL_IF(push_inst(compiler, ((type & SLJIT_SIMD_EXTEND_SIGNED) ? SSHLL : USHLL)
+ | ((sljit_ins)1 << (19 + elem_size)) | VD(freg) | VN(src)));
+ src = freg;
+ } while (++elem_size < elem2_size);
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_sign(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 dst, sljit_sw dstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins, imms;
+ sljit_s32 dst_r;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_sign(compiler, type, freg, dst, dstw));
+
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ switch (elem_size) {
+ case 0:
+ imms = 0x643219;
+ ins = USHR | (0x9 << 16);
+ break;
+ case 1:
+ imms = (reg_size == 4) ? 0x643219 : 0x6231;
+ ins = USHR | (0x11 << 16);
+ break;
+ case 2:
+ imms = (reg_size == 4) ? 0x6231 : 0x61;
+ ins = USHR | (0x21 << 16);
+ break;
+ default:
+ imms = 0x61;
+ ins = USHR | (0x41 << 16);
+ break;
+ }
+
+ if (reg_size == 4)
+ ins |= (1 << 30);
+
+ FAIL_IF(push_inst(compiler, ins | VD(TMP_FREG1) | VN(freg)));
+
+ if (reg_size == 4 && elem_size > 0)
+ FAIL_IF(push_inst(compiler, XTN | ((sljit_ins)(elem_size - 1) << 22) | VD(TMP_FREG1) | VN(TMP_FREG1)));
+
+ if (imms >= 0x100) {
+ ins = (reg_size == 4 && elem_size == 0) ? (1 << 30) : 0;
+
+ do {
+ FAIL_IF(push_inst(compiler, USRA | ins | ((imms & 0xff) << 16) | VD(TMP_FREG1) | VN(TMP_FREG1)));
+ imms >>= 8;
+ } while (imms >= 0x100);
+ }
+
+ FAIL_IF(push_inst(compiler, USRA | (1 << 30) | (imms << 16) | VD(TMP_FREG1) | VN(TMP_FREG1)));
+
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
+ ins = (0x1 << 16);
+
+ if (reg_size == 4 && elem_size == 0) {
+ FAIL_IF(push_inst(compiler, INS_e | (0x3 << 16) | (0x8 << 11) | VD(TMP_FREG1) | VN(TMP_FREG1)));
+ ins = (0x2 << 16);
+ }
+
+ FAIL_IF(push_inst(compiler, UMOV | ins | RD(dst_r) | VN(TMP_FREG1)));
+
+ if (dst_r == TMP_REG2)
+ return emit_op_mem(compiler, STORE | ((type & SLJIT_32) ? INT_SIZE : WORD_SIZE), TMP_REG2, dst, dstw, TMP_REG1);
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_op2(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg, sljit_s32 src1_freg, sljit_s32 src2_freg)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_op2(compiler, type, dst_freg, src1_freg, src2_freg));
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ switch (SLJIT_SIMD_GET_OPCODE(type)) {
+ case SLJIT_SIMD_OP2_AND:
+ ins = AND_v;
+ break;
+ case SLJIT_SIMD_OP2_OR:
+ ins = ORR_v;
+ break;
+ case SLJIT_SIMD_OP2_XOR:
+ ins = EOR_v;
+ break;
+ }
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 4)
+ ins |= (sljit_ins)1 << 30;
+
+ return push_inst(compiler, ins | VD(dst_freg) | VN(src1_freg) | VM(src2_freg));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_load(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 mem_reg)
+{
+ sljit_ins ins;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_atomic_load(compiler, op, dst_reg, mem_reg));
+
+#ifdef __ARM_FEATURE_ATOMICS
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MOV32:
+ case SLJIT_MOV_U32:
+ ins = LDR ^ (1 << 30);
+ break;
+ case SLJIT_MOV_U16:
+ ins = LDRH;
+ break;
+ case SLJIT_MOV_U8:
+ ins = LDRB;
+ break;
+ default:
+ ins = LDR;
+ break;
+ }
+#else /* !__ARM_FEATURE_ATOMICS */
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MOV32:
+ case SLJIT_MOV_U32:
+ ins = LDXR ^ (1 << 30);
+ break;
+ case SLJIT_MOV_U8:
+ ins = LDXRB;
+ break;
+ case SLJIT_MOV_U16:
+ ins = LDXRH;
+ break;
+ default:
+ ins = LDXR;
+ break;
+ }
+#endif /* ARM_FEATURE_ATOMICS */
+ return push_inst(compiler, ins | RN(mem_reg) | RT(dst_reg));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_store(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src_reg,
+ sljit_s32 mem_reg,
+ sljit_s32 temp_reg)
+{
+ sljit_ins ins;
+ sljit_s32 tmp = temp_reg;
+ sljit_ins cmp = 0;
+ sljit_ins inv_bits = W_OP;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_atomic_store(compiler, op, src_reg, mem_reg, temp_reg));
+
+#ifdef __ARM_FEATURE_ATOMICS
+ if (op & SLJIT_SET_ATOMIC_STORED)
+ cmp = (SUBS ^ W_OP) | RD(TMP_ZERO);
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MOV32:
+ case SLJIT_MOV_U32:
+ ins = CAS ^ (1 << 30);
+ break;
+ case SLJIT_MOV_U16:
+ ins = CASH;
+ break;
+ case SLJIT_MOV_U8:
+ ins = CASB;
+ break;
+ default:
+ ins = CAS;
+ inv_bits = 0;
+ if (cmp)
+ cmp ^= W_OP;
+ break;
+ }
+
+ if (cmp) {
+ FAIL_IF(push_inst(compiler, (MOV ^ inv_bits) | RM(temp_reg) | RD(TMP_REG1)));
+ tmp = TMP_REG1;
+ }
+ FAIL_IF(push_inst(compiler, ins | RM(tmp) | RN(mem_reg) | RD(src_reg)));
+ if (!cmp)
+ return SLJIT_SUCCESS;
+
+ FAIL_IF(push_inst(compiler, cmp | RM(tmp) | RN(temp_reg)));
+ FAIL_IF(push_inst(compiler, (CSET ^ inv_bits) | RD(tmp)));
+ return push_inst(compiler, cmp | RM(tmp) | RN(TMP_ZERO));
+#else /* !__ARM_FEATURE_ATOMICS */
+ SLJIT_UNUSED_ARG(tmp);
+ SLJIT_UNUSED_ARG(inv_bits);
+
+ if (op & SLJIT_SET_ATOMIC_STORED)
+ cmp = (SUBI ^ W_OP) | (1 << 29);
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MOV32:
+ case SLJIT_MOV_U32:
+ ins = STXR ^ (1 << 30);
+ break;
+ case SLJIT_MOV_U8:
+ ins = STXRB;
+ break;
+ case SLJIT_MOV_U16:
+ ins = STXRH;
+ break;
+ default:
+ ins = STXR;
+ break;
+ }
+
+ FAIL_IF(push_inst(compiler, ins | RM(TMP_REG1) | RN(mem_reg) | RT(src_reg)));
+ return cmp ? push_inst(compiler, cmp | RD(TMP_ZERO) | RN(TMP_REG1)) : SLJIT_SUCCESS;
+#endif /* __ARM_FEATURE_ATOMICS */
+}
+
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
{
sljit_s32 dst_reg;
@@ -2369,26 +3441,28 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
return const_;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_mov_addr(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
- struct sljit_put_label *put_label;
+ struct sljit_jump *jump;
sljit_s32 dst_r;
CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
+ CHECK_PTR(check_sljit_emit_mov_addr(compiler, dst, dstw));
ADJUST_LOCAL_OFFSET(dst, dstw);
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
- PTR_FAIL_IF(emit_imm64_const(compiler, dst_r, 0));
+ PTR_FAIL_IF(push_inst(compiler, RD(dst_r)));
- put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
- PTR_FAIL_IF(!put_label);
- set_put_label(put_label, compiler, 1);
+ jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
+ PTR_FAIL_IF(!jump);
+ set_mov_addr(jump, compiler, 1);
+
+ compiler->size += 3;
if (dst & SLJIT_MEM)
PTR_FAIL_IF(emit_op_mem(compiler, WORD_SIZE | STORE, dst_r, dst, dstw, TMP_REG2));
- return put_label;
+ return jump;
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeARM_T2_32.c b/src/3rdparty/pcre2/src/sljit/sljitNativeARM_T2_32.c
index 7d6bac077e..799954a859 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeARM_T2_32.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativeARM_T2_32.c
@@ -49,8 +49,20 @@ static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
0, 0, 1, 2, 3, 11, 10, 9, 8, 7, 6, 5, 4, 13, 12, 14, 15
};
-static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
- 0, 0, 1, 2, 3, 4, 5, 15, 14, 13, 12, 11, 10, 9, 8, 6, 7
+static const sljit_u8 freg_map[((SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2) << 1) + 1] = {
+ 0,
+ 0, 1, 2, 3, 4, 5, 15, 14, 13, 12, 11, 10, 9, 8,
+ 7, 6,
+ 0, 1, 2, 3, 4, 5, 15, 14, 13, 12, 11, 10, 9, 8,
+ 7, 6
+};
+
+static const sljit_u8 freg_ebit_map[((SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2) << 1) + 1] = {
+ 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1
};
#define COPY_BITS(src, from, to, bits) \
@@ -75,13 +87,15 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
(reg_map[reg1] <= 7 && reg_map[reg2] <= 7 && reg_map[reg3] <= 7)
/* Thumb32 encodings. */
-#define RD4(rd) ((sljit_ins)reg_map[rd] << 8)
-#define RN4(rn) ((sljit_ins)reg_map[rn] << 16)
#define RM4(rm) ((sljit_ins)reg_map[rm])
+#define RD4(rd) ((sljit_ins)reg_map[rd] << 8)
#define RT4(rt) ((sljit_ins)reg_map[rt] << 12)
-#define DD4(dd) ((sljit_ins)freg_map[dd] << 12)
-#define DN4(dn) ((sljit_ins)freg_map[dn] << 16)
-#define DM4(dm) ((sljit_ins)freg_map[dm])
+#define RN4(rn) ((sljit_ins)reg_map[rn] << 16)
+
+#define VM4(vm) (((sljit_ins)freg_map[vm]) | ((sljit_ins)freg_ebit_map[vm] << 5))
+#define VD4(vd) (((sljit_ins)freg_map[vd] << 12) | ((sljit_ins)freg_ebit_map[vd] << 22))
+#define VN4(vn) (((sljit_ins)freg_map[vn] << 16) | ((sljit_ins)freg_ebit_map[vn] << 7))
+
#define IMM5(imm) \
(COPY_BITS(imm, 2, 12, 3) | (((sljit_ins)imm & 0x3) << 6))
#define IMM12(imm) \
@@ -128,9 +142,12 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
#define EORS 0x4040
#define EOR_W 0xea800000
#define IT 0xbf00
-#define LDR_SP 0x9800
#define LDR 0xf8d00000
+#define LDR_SP 0x9800
#define LDRD 0xe9500000
+#define LDREX 0xe8500f00
+#define LDREXB 0xe8d00f4f
+#define LDREXH 0xe8d00f5f
#define LDRI 0xf8500800
#define LSLS 0x4080
#define LSLSI 0x0000
@@ -140,6 +157,7 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
#define LSRSI 0x0800
#define LSR_W 0xfa20f000
#define LSR_WI 0xea4f0010
+#define MLA 0xfb000000
#define MOV 0x4600
#define MOVS 0x0000
#define MOVSI 0x2000
@@ -160,6 +178,10 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
#define POP_W 0xe8bd0000
#define PUSH 0xb400
#define PUSH_W 0xe92d0000
+#define REV 0xba00
+#define REV_W 0xfa90f080
+#define REV16 0xba40
+#define REV16_W 0xfa90f090
#define RBIT 0xfa90f0a0
#define RORS 0x41c0
#define ROR_W 0xfa60f000
@@ -171,8 +193,11 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
#define SBC_W 0xeb600000
#define SDIV 0xfb90f0f0
#define SMULL 0xfb800000
-#define STRD 0xe9400000
#define STR_SP 0x9000
+#define STRD 0xe9400000
+#define STREX 0xe8400000
+#define STREXB 0xe8c00f40
+#define STREXH 0xe8c00f50
#define SUBS 0x1a00
#define SUBSI3 0x1e00
#define SUBSI8 0x3800
@@ -195,23 +220,57 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
#define UXTH_W 0xfa1ff080
#define VABS_F32 0xeeb00ac0
#define VADD_F32 0xee300a00
+#define VAND 0xef000110
#define VCMP_F32 0xeeb40a40
#define VCVT_F32_S32 0xeeb80ac0
+#define VCVT_F32_U32 0xeeb80a40
#define VCVT_F64_F32 0xeeb70ac0
#define VCVT_S32_F32 0xeebd0ac0
#define VDIV_F32 0xee800a00
+#define VDUP 0xee800b10
+#define VDUP_s 0xffb00c00
+#define VEOR 0xff000110
+#define VLD1 0xf9200000
+#define VLD1_r 0xf9a00c00
+#define VLD1_s 0xf9a00000
#define VLDR_F32 0xed100a00
#define VMOV_F32 0xeeb00a40
#define VMOV 0xee000a10
#define VMOV2 0xec400a10
+#define VMOV_i 0xef800010
+#define VMOV_s 0xee000b10
+#define VMOVN 0xffb20200
#define VMRS 0xeef1fa10
#define VMUL_F32 0xee200a00
#define VNEG_F32 0xeeb10a40
+#define VORR 0xef200110
#define VPOP 0xecbd0b00
#define VPUSH 0xed2d0b00
+#define VSHLL 0xef800a10
+#define VSHR 0xef800010
+#define VSRA 0xef800110
+#define VST1 0xf9000000
+#define VST1_s 0xf9800000
#define VSTR_F32 0xed000a00
#define VSUB_F32 0xee300a40
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+
+static sljit_s32 function_check_is_freg(struct sljit_compiler *compiler, sljit_s32 fr, sljit_s32 is_32)
+{
+ if (compiler->scratches == -1)
+ return 0;
+
+ if (is_32 && fr >= SLJIT_F64_SECOND(SLJIT_FR0))
+ fr -= SLJIT_F64_SECOND(0);
+
+ return (fr >= SLJIT_FR0 && fr < (SLJIT_FR0 + compiler->fscratches))
+ || (fr > (SLJIT_FS0 - compiler->fsaveds) && fr <= SLJIT_FS0)
+ || (fr >= SLJIT_TMP_FREGISTER_BASE && fr < (SLJIT_TMP_FREGISTER_BASE + SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS));
+}
+
+#endif /* SLJIT_ARGUMENT_CHECKS */
+
static sljit_s32 push_inst16(struct sljit_compiler *compiler, sljit_ins inst)
{
sljit_u16 *ptr;
@@ -234,7 +293,7 @@ static sljit_s32 push_inst32(struct sljit_compiler *compiler, sljit_ins inst)
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_s32 emit_imm32_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_uw imm)
+static sljit_s32 emit_imm32_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_uw imm)
{
FAIL_IF(push_inst32(compiler, MOVW | RD4(dst)
| COPY_BITS(imm, 12, 16, 4) | COPY_BITS(imm, 11, 26, 1) | COPY_BITS(imm, 8, 12, 3) | (imm & 0xff)));
@@ -242,137 +301,262 @@ static SLJIT_INLINE sljit_s32 emit_imm32_const(struct sljit_compiler *compiler,
| COPY_BITS(imm, 12 + 16, 16, 4) | COPY_BITS(imm, 11 + 16, 26, 1) | COPY_BITS(imm, 8 + 16, 12, 3) | ((imm & 0xff0000) >> 16));
}
-static SLJIT_INLINE void modify_imm32_const(sljit_u16 *inst, sljit_uw new_imm)
+/* Dst must be in bits[11-8] */
+static void set_imm32_const(sljit_u16 *inst, sljit_ins dst, sljit_uw new_imm)
{
- sljit_ins dst = inst[1] & 0x0f00;
- SLJIT_ASSERT(((inst[0] & 0xfbf0) == (MOVW >> 16)) && ((inst[2] & 0xfbf0) == (MOVT >> 16)) && dst == (inst[3] & 0x0f00));
inst[0] = (sljit_u16)((MOVW >> 16) | COPY_BITS(new_imm, 12, 0, 4) | COPY_BITS(new_imm, 11, 10, 1));
inst[1] = (sljit_u16)(dst | COPY_BITS(new_imm, 8, 12, 3) | (new_imm & 0xff));
inst[2] = (sljit_u16)((MOVT >> 16) | COPY_BITS(new_imm, 12 + 16, 0, 4) | COPY_BITS(new_imm, 11 + 16, 10, 1));
inst[3] = (sljit_u16)(dst | COPY_BITS(new_imm, 8 + 16, 12, 3) | ((new_imm & 0xff0000) >> 16));
}
-static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_u16 *code_ptr, sljit_u16 *code, sljit_sw executable_offset)
+static SLJIT_INLINE void modify_imm32_const(sljit_u16 *inst, sljit_uw new_imm)
+{
+ sljit_ins dst = inst[1] & 0x0f00;
+ SLJIT_ASSERT(((inst[0] & 0xfbf0) == (MOVW >> 16)) && ((inst[2] & 0xfbf0) == (MOVT >> 16)) && dst == (inst[3] & 0x0f00));
+ set_imm32_const(inst, dst, new_imm);
+}
+
+static SLJIT_INLINE sljit_u16* detect_jump_type(struct sljit_jump *jump, sljit_u16 *code_ptr, sljit_u16 *code, sljit_sw executable_offset)
{
sljit_sw diff;
if (jump->flags & SLJIT_REWRITABLE_JUMP)
- return 0;
+ goto exit;
if (jump->flags & JUMP_ADDR) {
/* Branch to ARM code is not optimized yet. */
if (!(jump->u.target & 0x1))
- return 0;
- diff = ((sljit_sw)jump->u.target - (sljit_sw)(code_ptr + 2) - executable_offset) >> 1;
- }
- else {
- SLJIT_ASSERT(jump->flags & JUMP_LABEL);
- diff = ((sljit_sw)(code + jump->u.label->size) - (sljit_sw)(code_ptr + 2)) >> 1;
+ goto exit;
+ diff = (sljit_sw)jump->u.target - (sljit_sw)(code_ptr + 2) - executable_offset;
+ } else {
+ SLJIT_ASSERT(jump->u.label != NULL);
+ diff = (sljit_sw)(code + jump->u.label->size) - (sljit_sw)(code_ptr + 2);
}
if (jump->flags & IS_COND) {
SLJIT_ASSERT(!(jump->flags & IS_BL));
- if (diff <= 127 && diff >= -128) {
+ /* Size of the prefix IT instruction. */
+ diff += SSIZE_OF(u16);
+ if (diff <= 0xff && diff >= -0x100) {
jump->flags |= PATCH_TYPE1;
- return 5;
+ jump->addr = (sljit_uw)(code_ptr - 1);
+ return code_ptr - 1;
}
- if (diff <= 524287 && diff >= -524288) {
+ if (diff <= 0xfffff && diff >= -0x100000) {
jump->flags |= PATCH_TYPE2;
- return 4;
+ jump->addr = (sljit_uw)(code_ptr - 1);
+ return code_ptr;
}
- /* +1 comes from the prefix IT instruction. */
- diff--;
- if (diff <= 8388607 && diff >= -8388608) {
- jump->flags |= PATCH_TYPE3;
- return 3;
+ diff -= SSIZE_OF(u16);
+ } else if (jump->flags & IS_BL) {
+ /* Branch and link. */
+ if (diff <= 0xffffff && diff >= -0x1000000) {
+ jump->flags |= PATCH_TYPE5;
+ return code_ptr + 1;
}
+ goto exit;
+ } else if (diff <= 0x7ff && diff >= -0x800) {
+ jump->flags |= PATCH_TYPE3;
+ return code_ptr;
}
- else if (jump->flags & IS_BL) {
- if (diff <= 8388607 && diff >= -8388608) {
- jump->flags |= PATCH_BL;
- return 3;
- }
+
+ if (diff <= 0xffffff && diff >= -0x1000000) {
+ jump->flags |= PATCH_TYPE4;
+ return code_ptr + 1;
}
- else {
- if (diff <= 1023 && diff >= -1024) {
- jump->flags |= PATCH_TYPE4;
- return 4;
- }
- if (diff <= 8388607 && diff >= -8388608) {
- jump->flags |= PATCH_TYPE5;
- return 3;
- }
+
+exit:
+ code_ptr[4] = code_ptr[0];
+
+ if (jump->flags & IS_COND) {
+ code_ptr[3] = code_ptr[-1];
+ jump->addr = (sljit_uw)(code_ptr - 1);
+ }
+
+ return code_ptr + 4;
+}
+
+static SLJIT_INLINE sljit_sw mov_addr_get_length(struct sljit_jump *jump, sljit_u16 *code_ptr, sljit_u16 *code, sljit_sw executable_offset)
+{
+ sljit_uw addr;
+ sljit_sw diff;
+ SLJIT_UNUSED_ARG(executable_offset);
+
+ if (jump->flags & JUMP_ADDR)
+ addr = jump->u.target;
+ else
+ addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code + jump->u.label->size, executable_offset);
+
+ /* The pc+4 offset is represented by the 2 * SSIZE_OF(sljit_u16) below. */
+ diff = (sljit_sw)addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+
+ /* Note: ADR with imm8 does not set the last bit (Thumb2 flag). */
+
+ if (diff <= 0xffd + 2 * SSIZE_OF(u16) && diff >= -0xfff + 2 * SSIZE_OF(u16)) {
+ jump->flags |= PATCH_TYPE6;
+ return 1;
}
- return 0;
+ return 3;
}
-static SLJIT_INLINE void set_jump_instruction(struct sljit_jump *jump, sljit_sw executable_offset)
+static SLJIT_INLINE void generate_jump_or_mov_addr(struct sljit_jump *jump, sljit_sw executable_offset)
{
sljit_s32 type = (jump->flags >> 4) & 0xf;
+ sljit_u16 *jump_inst = (sljit_u16*)jump->addr;
sljit_sw diff;
- sljit_u16 *jump_inst;
- sljit_s32 s, j1, j2;
+ sljit_ins ins;
+
+ diff = (sljit_sw)((jump->flags & JUMP_ADDR) ? jump->u.target : jump->u.label->u.addr);
if (SLJIT_UNLIKELY(type == 0)) {
- modify_imm32_const((sljit_u16*)jump->addr, (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target);
+ ins = (jump->flags & JUMP_MOV_ADDR) ? *jump_inst : RDN3(TMP_REG1);
+ set_imm32_const((sljit_u16*)jump->addr, ins, (sljit_uw)diff);
return;
}
- if (jump->flags & JUMP_ADDR) {
- SLJIT_ASSERT(jump->u.target & 0x1);
- diff = ((sljit_sw)jump->u.target - (sljit_sw)(jump->addr + sizeof(sljit_u32)) - executable_offset) >> 1;
- }
- else {
- SLJIT_ASSERT(jump->u.label->addr & 0x1);
- diff = ((sljit_sw)(jump->u.label->addr) - (sljit_sw)(jump->addr + sizeof(sljit_u32)) - executable_offset) >> 1;
+ if (SLJIT_UNLIKELY(type == 6)) {
+ SLJIT_ASSERT(jump->flags & JUMP_MOV_ADDR);
+ diff -= (sljit_sw)SLJIT_ADD_EXEC_OFFSET(jump_inst + 2, executable_offset) & ~(sljit_sw)0x3;
+
+ SLJIT_ASSERT(diff <= 0xfff && diff >= -0xfff);
+
+ ins = ADDWI >> 16;
+ if (diff <= 0) {
+ diff = -diff;
+ ins = SUBWI >> 16;
+ }
+
+ jump_inst[1] = (sljit_u16)(jump_inst[0] | COPY_BITS(diff, 8, 12, 3) | (diff & 0xff));
+ jump_inst[0] = (sljit_u16)(ins | 0xf | COPY_BITS(diff, 11, 10, 1));
+ return;
}
- jump_inst = (sljit_u16*)jump->addr;
+
+ SLJIT_ASSERT((diff & 0x1) != 0 && !(jump->flags & JUMP_MOV_ADDR));
+ diff = (diff - (sljit_sw)(jump->addr + sizeof(sljit_u32)) - executable_offset) >> 1;
switch (type) {
case 1:
/* Encoding T1 of 'B' instruction */
- SLJIT_ASSERT(diff <= 127 && diff >= -128 && (jump->flags & IS_COND));
+ SLJIT_ASSERT(diff <= 0x7f && diff >= -0x80 && (jump->flags & IS_COND));
jump_inst[0] = (sljit_u16)(0xd000 | (jump->flags & 0xf00) | ((sljit_ins)diff & 0xff));
return;
case 2:
/* Encoding T3 of 'B' instruction */
- SLJIT_ASSERT(diff <= 524287 && diff >= -524288 && (jump->flags & IS_COND));
+ SLJIT_ASSERT(diff <= 0x7ffff && diff >= -0x80000 && (jump->flags & IS_COND));
jump_inst[0] = (sljit_u16)(0xf000 | COPY_BITS(jump->flags, 8, 6, 4) | COPY_BITS(diff, 11, 0, 6) | COPY_BITS(diff, 19, 10, 1));
jump_inst[1] = (sljit_u16)(0x8000 | COPY_BITS(diff, 17, 13, 1) | COPY_BITS(diff, 18, 11, 1) | ((sljit_ins)diff & 0x7ff));
return;
case 3:
- SLJIT_ASSERT(jump->flags & IS_COND);
- *jump_inst++ = (sljit_u16)(IT | ((jump->flags >> 4) & 0xf0) | 0x8);
- diff--;
- type = 5;
- break;
- case 4:
/* Encoding T2 of 'B' instruction */
- SLJIT_ASSERT(diff <= 1023 && diff >= -1024 && !(jump->flags & IS_COND));
+ SLJIT_ASSERT(diff <= 0x3ff && diff >= -0x400 && !(jump->flags & IS_COND));
jump_inst[0] = (sljit_u16)(0xe000 | (diff & 0x7ff));
return;
}
- SLJIT_ASSERT(diff <= 8388607 && diff >= -8388608);
+ SLJIT_ASSERT(diff <= 0x7fffff && diff >= -0x800000);
+
+ /* Really complex instruction form for branches. Negate with sign bit. */
+ diff ^= ((diff >> 2) & 0x600000) ^ 0x600000;
- /* Really complex instruction form for branches. */
- s = (diff >> 23) & 0x1;
- j1 = (~(diff >> 22) ^ s) & 0x1;
- j2 = (~(diff >> 21) ^ s) & 0x1;
- jump_inst[0] = (sljit_u16)(0xf000 | ((sljit_ins)s << 10) | COPY_BITS(diff, 11, 0, 10));
- jump_inst[1] = (sljit_u16)((j1 << 13) | (j2 << 11) | (diff & 0x7ff));
+ jump_inst[0] = (sljit_u16)(0xf000 | COPY_BITS(diff, 11, 0, 10) | COPY_BITS(diff, 23, 10, 1));
+ jump_inst[1] = (sljit_u16)((diff & 0x7ff) | COPY_BITS(diff, 22, 13, 1) | COPY_BITS(diff, 21, 11, 1));
+
+ SLJIT_ASSERT(type == 4 || type == 5);
/* The others have a common form. */
- if (type == 5) /* Encoding T4 of 'B' instruction */
+ if (type == 4) /* Encoding T4 of 'B' instruction */
jump_inst[1] |= 0x9000;
- else if (type == 6) /* Encoding T1 of 'BL' instruction */
+ else /* Encoding T1 of 'BL' instruction */
jump_inst[1] |= 0xd000;
- else
- SLJIT_UNREACHABLE();
}
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
+static void reduce_code_size(struct sljit_compiler *compiler)
+{
+ struct sljit_label *label;
+ struct sljit_jump *jump;
+ struct sljit_const *const_;
+ SLJIT_NEXT_DEFINE_TYPES;
+ sljit_uw total_size;
+ sljit_uw size_reduce = 0;
+ sljit_sw diff;
+
+ label = compiler->labels;
+ jump = compiler->jumps;
+ const_ = compiler->consts;
+ SLJIT_NEXT_INIT_TYPES();
+
+ while (1) {
+ SLJIT_GET_NEXT_MIN();
+
+ if (next_min_addr == SLJIT_MAX_ADDRESS)
+ break;
+
+ if (next_min_addr == next_label_size) {
+ label->size -= size_reduce;
+
+ label = label->next;
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
+ }
+
+ if (next_min_addr == next_const_addr) {
+ const_->addr -= size_reduce;
+ const_ = const_->next;
+ next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
+ continue;
+ }
+
+ if (next_min_addr != next_jump_addr)
+ continue;
+
+ jump->addr -= size_reduce;
+ if (!(jump->flags & JUMP_MOV_ADDR)) {
+ total_size = JUMP_MAX_SIZE;
+
+ if (!(jump->flags & (SLJIT_REWRITABLE_JUMP | JUMP_ADDR))) {
+ /* Unit size: instruction. */
+ diff = (sljit_sw)jump->u.label->size - (sljit_sw)jump->addr - 2;
+
+ if (jump->flags & IS_COND) {
+ diff++;
+
+ if (diff <= (0xff / SSIZE_OF(u16)) && diff >= (-0x100 / SSIZE_OF(u16)))
+ total_size = 0;
+ else if (diff <= (0xfffff / SSIZE_OF(u16)) && diff >= (-0x100000 / SSIZE_OF(u16)))
+ total_size = 1;
+ diff--;
+ } else if (!(jump->flags & IS_BL) && diff <= (0x7ff / SSIZE_OF(u16)) && diff >= (-0x800 / SSIZE_OF(u16)))
+ total_size = 1;
+
+ if (total_size == JUMP_MAX_SIZE && diff <= (0xffffff / SSIZE_OF(u16)) && diff >= (-0x1000000 / SSIZE_OF(u16)))
+ total_size = 2;
+ }
+
+ size_reduce += JUMP_MAX_SIZE - total_size;
+ } else {
+ /* Real size minus 1. Unit size: instruction. */
+ total_size = 3;
+
+ if (!(jump->flags & JUMP_ADDR)) {
+ diff = (sljit_sw)jump->u.label->size - (sljit_sw)jump->addr;
+
+ if (diff <= (0xffd / SSIZE_OF(u16)) && diff >= (-0xfff / SSIZE_OF(u16)))
+ total_size = 1;
+ }
+
+ size_reduce += 3 - total_size;
+ }
+
+ jump->flags |= total_size << JUMP_SIZE_SHIFT;
+ jump = jump->next;
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+ }
+
+ compiler->size -= size_reduce;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler, sljit_s32 options, void *exec_allocator_data)
{
struct sljit_memory_fragment *buf;
sljit_u16 *code;
@@ -380,64 +564,74 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
sljit_u16 *buf_ptr;
sljit_u16 *buf_end;
sljit_uw half_count;
- sljit_uw next_addr;
+ SLJIT_NEXT_DEFINE_TYPES;
+ sljit_sw addr;
sljit_sw executable_offset;
struct sljit_label *label;
struct sljit_jump *jump;
struct sljit_const *const_;
- struct sljit_put_label *put_label;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_generate_code(compiler));
- reverse_buf(compiler);
- code = (sljit_u16*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_u16), compiler->exec_allocator_data);
+ reduce_code_size(compiler);
+
+ code = (sljit_u16*)allocate_executable_memory(compiler->size * sizeof(sljit_u16), options, exec_allocator_data, &executable_offset);
PTR_FAIL_WITH_EXEC_IF(code);
+
+ reverse_buf(compiler);
buf = compiler->buf;
code_ptr = code;
half_count = 0;
- next_addr = 0;
- executable_offset = SLJIT_EXEC_OFFSET(code);
-
label = compiler->labels;
jump = compiler->jumps;
const_ = compiler->consts;
- put_label = compiler->put_labels;
+ SLJIT_NEXT_INIT_TYPES();
+ SLJIT_GET_NEXT_MIN();
do {
buf_ptr = (sljit_u16*)buf->memory;
buf_end = buf_ptr + (buf->used_size >> 1);
do {
*code_ptr = *buf_ptr++;
- if (next_addr == half_count) {
+ if (next_min_addr == half_count) {
SLJIT_ASSERT(!label || label->size >= half_count);
SLJIT_ASSERT(!jump || jump->addr >= half_count);
SLJIT_ASSERT(!const_ || const_->addr >= half_count);
- SLJIT_ASSERT(!put_label || put_label->addr >= half_count);
/* These structures are ordered by their address. */
- if (label && label->size == half_count) {
- label->addr = ((sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset)) | 0x1;
+ if (next_min_addr == next_label_size) {
+ label->u.addr = ((sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset)) | 0x1;
label->size = (sljit_uw)(code_ptr - code);
label = label->next;
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
}
- if (jump && jump->addr == half_count) {
- jump->addr = (sljit_uw)code_ptr - ((jump->flags & IS_COND) ? 10 : 8);
- code_ptr -= detect_jump_type(jump, code_ptr, code, executable_offset);
- jump = jump->next;
- }
- if (const_ && const_->addr == half_count) {
+
+ if (next_min_addr == next_jump_addr) {
+ if (!(jump->flags & JUMP_MOV_ADDR)) {
+ half_count = half_count - 1 + (jump->flags >> JUMP_SIZE_SHIFT);
+ jump->addr = (sljit_uw)code_ptr;
+ code_ptr = detect_jump_type(jump, code_ptr, code, executable_offset);
+ SLJIT_ASSERT((sljit_uw)code_ptr - jump->addr <
+ ((jump->flags >> JUMP_SIZE_SHIFT) + ((jump->flags & 0xf0) <= PATCH_TYPE2)) * sizeof(sljit_u16));
+ } else {
+ half_count += jump->flags >> JUMP_SIZE_SHIFT;
+ addr = (sljit_sw)code_ptr;
+ code_ptr += mov_addr_get_length(jump, code_ptr, code, executable_offset);
+ jump->addr = (sljit_uw)addr;
+ }
+
+ jump = jump->next;
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+ } else if (next_min_addr == next_const_addr) {
const_->addr = (sljit_uw)code_ptr;
const_ = const_->next;
+ next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
}
- if (put_label && put_label->addr == half_count) {
- SLJIT_ASSERT(put_label->label);
- put_label->addr = (sljit_uw)code_ptr;
- put_label = put_label->next;
- }
- next_addr = compute_next_addr(label, jump, const_, put_label);
+
+ SLJIT_GET_NEXT_MIN();
}
code_ptr++;
half_count++;
@@ -447,7 +641,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
} while (buf);
if (label && label->size == half_count) {
- label->addr = ((sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset)) | 0x1;
+ label->u.addr = ((sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset)) | 0x1;
label->size = (sljit_uw)(code_ptr - code);
label = label->next;
}
@@ -455,21 +649,14 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
SLJIT_ASSERT(!label);
SLJIT_ASSERT(!jump);
SLJIT_ASSERT(!const_);
- SLJIT_ASSERT(!put_label);
SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size);
jump = compiler->jumps;
while (jump) {
- set_jump_instruction(jump, executable_offset);
+ generate_jump_or_mov_addr(jump, executable_offset);
jump = jump->next;
}
- put_label = compiler->put_labels;
- while (put_label) {
- modify_imm32_const((sljit_u16 *)put_label->addr, put_label->label->addr);
- put_label = put_label->next;
- }
-
compiler->error = SLJIT_ERR_COMPILED;
compiler->executable_offset = executable_offset;
compiler->executable_size = (sljit_uw)(code_ptr - code) * sizeof(sljit_u16);
@@ -488,18 +675,25 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
{
switch (feature_type) {
case SLJIT_HAS_FPU:
+ case SLJIT_HAS_F64_AS_F32_PAIR:
+ case SLJIT_HAS_SIMD:
#ifdef SLJIT_IS_FPU_AVAILABLE
- return SLJIT_IS_FPU_AVAILABLE;
+ return (SLJIT_IS_FPU_AVAILABLE) != 0;
#else
/* Available by default. */
return 1;
#endif
+ case SLJIT_SIMD_REGS_ARE_PAIRS:
case SLJIT_HAS_CLZ:
case SLJIT_HAS_CTZ:
+ case SLJIT_HAS_REV:
case SLJIT_HAS_ROT:
case SLJIT_HAS_CMOV:
case SLJIT_HAS_PREFETCH:
+ case SLJIT_HAS_COPY_F32:
+ case SLJIT_HAS_COPY_F64:
+ case SLJIT_HAS_ATOMIC:
return 1;
default:
@@ -592,10 +786,11 @@ static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst,
/* SET_FLAGS must be 0x100000 as it is also the value of S bit (can be used for optimization). */
#define SET_FLAGS 0x0100000
#define UNUSED_RETURN 0x0200000
+#define REGISTER_OP 0x0400000
static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 dst, sljit_uw arg1, sljit_uw arg2)
{
- /* dst must be register, TMP_REG1
+ /* dst must be register
arg1 must be register, imm
arg2 must be register, imm */
sljit_s32 reg;
@@ -615,18 +810,18 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
switch (flags & 0xffff) {
case SLJIT_CLZ:
case SLJIT_CTZ:
+ case SLJIT_REV:
+ case SLJIT_REV_U16:
+ case SLJIT_REV_S16:
+ case SLJIT_REV_U32:
+ case SLJIT_REV_S32:
case SLJIT_MUL:
+ case SLJIT_MULADD:
/* No form with immediate operand. */
break;
case SLJIT_MOV:
SLJIT_ASSERT(!(flags & SET_FLAGS) && (flags & ARG2_IMM) && arg1 == TMP_REG2);
return load_immediate(compiler, dst, imm);
- case SLJIT_NOT:
- if (!(flags & SET_FLAGS))
- return load_immediate(compiler, dst, ~imm);
- /* Since the flags should be set, we just fallback to the register mode.
- Although some clever things could be done here, "NOT IMM" does not worth the efforts. */
- break;
case SLJIT_ADD:
compiler->status_flags_state = SLJIT_CURRENT_FLAGS_ADD;
imm2 = NEGATE(imm);
@@ -657,9 +852,14 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
break;
case SLJIT_ADDC:
compiler->status_flags_state = SLJIT_CURRENT_FLAGS_ADD;
- imm = get_imm(imm);
- if (imm != INVALID_IMM)
- return push_inst32(compiler, ADCI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm);
+ imm2 = get_imm(imm);
+ if (imm2 != INVALID_IMM)
+ return push_inst32(compiler, ADCI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm2);
+ if (flags & ARG2_IMM) {
+ imm = get_imm(~imm);
+ if (imm != INVALID_IMM)
+ return push_inst32(compiler, SBCI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm);
+ }
break;
case SLJIT_SUB:
compiler->status_flags_state = SLJIT_CURRENT_FLAGS_SUB;
@@ -712,9 +912,12 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
compiler->status_flags_state = SLJIT_CURRENT_FLAGS_SUB;
if (flags & ARG1_IMM)
break;
- imm = get_imm(imm);
+ imm2 = get_imm(imm);
+ if (imm2 != INVALID_IMM)
+ return push_inst32(compiler, SBCI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm2);
+ imm = get_imm(~imm);
if (imm != INVALID_IMM)
- return push_inst32(compiler, SBCI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm);
+ return push_inst32(compiler, ADCI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm);
break;
case SLJIT_AND:
imm2 = get_imm(imm);
@@ -733,6 +936,11 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
return push_inst32(compiler, ORNI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm);
break;
case SLJIT_XOR:
+ if (imm == (sljit_uw)-1) {
+ if (IS_2_LO_REGS(dst, reg))
+ return push_inst16(compiler, MVNS | RD3(dst) | RN3(reg));
+ return push_inst32(compiler, MVN_W | (flags & SET_FLAGS) | RD4(dst) | RM4(reg));
+ }
imm = get_imm(imm);
if (imm != INVALID_IMM)
return push_inst32(compiler, EORI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm);
@@ -788,8 +996,7 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
imm = arg2;
arg2 = (arg1 == TMP_REG1) ? TMP_REG2 : TMP_REG1;
FAIL_IF(load_immediate(compiler, (sljit_s32)arg2, imm));
- }
- else {
+ } else {
imm = arg1;
arg1 = (arg2 == TMP_REG1) ? TMP_REG2 : TMP_REG1;
FAIL_IF(load_immediate(compiler, (sljit_s32)arg1, imm));
@@ -829,11 +1036,6 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
if (IS_2_LO_REGS(dst, arg2))
return push_inst16(compiler, SXTH | RD3(dst) | RN3(arg2));
return push_inst32(compiler, SXTH_W | RD4(dst) | RM4(arg2));
- case SLJIT_NOT:
- SLJIT_ASSERT(arg1 == TMP_REG2);
- if (IS_2_LO_REGS(dst, arg2))
- return push_inst16(compiler, MVNS | RD3(dst) | RN3(arg2));
- return push_inst32(compiler, MVN_W | (flags & SET_FLAGS) | RD4(dst) | RM4(arg2));
case SLJIT_CLZ:
SLJIT_ASSERT(arg1 == TMP_REG2);
return push_inst32(compiler, CLZ | RN4(arg2) | RD4(dst) | RM4(arg2));
@@ -841,6 +1043,29 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
SLJIT_ASSERT(arg1 == TMP_REG2);
FAIL_IF(push_inst32(compiler, RBIT | RN4(arg2) | RD4(dst) | RM4(arg2)));
return push_inst32(compiler, CLZ | RN4(dst) | RD4(dst) | RM4(dst));
+ case SLJIT_REV:
+ case SLJIT_REV_U32:
+ case SLJIT_REV_S32:
+ SLJIT_ASSERT(arg1 == TMP_REG2);
+ if (IS_2_LO_REGS(dst, arg2))
+ return push_inst16(compiler, REV | RD3(dst) | RN3(arg2));
+ return push_inst32(compiler, REV_W | RN4(arg2) | RD4(dst) | RM4(arg2));
+ case SLJIT_REV_U16:
+ case SLJIT_REV_S16:
+ SLJIT_ASSERT(arg1 == TMP_REG2);
+
+ if (IS_2_LO_REGS(dst, arg2))
+ FAIL_IF(push_inst16(compiler, REV16 | RD3(dst) | RN3(arg2)));
+ else
+ FAIL_IF(push_inst32(compiler, REV16_W | RN4(arg2) | RD4(dst) | RM4(arg2)));
+
+ if (!(flags & REGISTER_OP))
+ return SLJIT_SUCCESS;
+
+ flags &= 0xffff;
+ if (reg_map[dst] <= 7)
+ return push_inst16(compiler, (flags == SLJIT_REV_U16 ? UXTH : SXTH) | RD3(dst) | RN3(dst));
+ return push_inst32(compiler, (flags == SLJIT_REV_U16 ? UXTH_W : SXTH_W) | RD4(dst) | RM4(dst));
case SLJIT_ADD:
compiler->status_flags_state = SLJIT_CURRENT_FLAGS_ADD;
if (IS_3_LO_REGS(dst, arg1, arg2))
@@ -872,10 +1097,10 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
compiler->status_flags_state = 0;
if (!(flags & SET_FLAGS))
return push_inst32(compiler, MUL | RD4(dst) | RN4(arg1) | RM4(arg2));
- SLJIT_ASSERT(dst != TMP_REG2);
- FAIL_IF(push_inst32(compiler, SMULL | RT4(dst) | RD4(TMP_REG2) | RN4(arg1) | RM4(arg2)));
+ reg = (dst == TMP_REG2) ? TMP_REG1 : TMP_REG2;
+ FAIL_IF(push_inst32(compiler, SMULL | RT4(dst) | RD4(reg) | RN4(arg1) | RM4(arg2)));
/* cmp TMP_REG2, dst asr #31. */
- return push_inst32(compiler, CMP_W | RN4(TMP_REG2) | 0x70e0 | RM4(dst));
+ return push_inst32(compiler, CMP_W | RN4(reg) | 0x70e0 | RM4(dst));
case SLJIT_AND:
if (dst == (sljit_s32)arg1 && IS_2_LO_REGS(dst, arg2))
return push_inst16(compiler, ANDS | RD3(dst) | RN3(arg2));
@@ -891,37 +1116,44 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
return push_inst16(compiler, EORS | RD3(dst) | RN3(arg2));
return push_inst32(compiler, EOR_W | (flags & SET_FLAGS) | RD4(dst) | RN4(arg1) | RM4(arg2));
case SLJIT_MSHL:
- FAIL_IF(push_inst32(compiler, ANDI | RD4(TMP_REG2) | RN4(arg2) | 0x1f));
- arg2 = TMP_REG2;
+ reg = (arg2 == TMP_REG1) ? TMP_REG1 : TMP_REG2;
+ FAIL_IF(push_inst32(compiler, ANDI | RD4(reg) | RN4(arg2) | 0x1f));
+ arg2 = (sljit_uw)reg;
/* fallthrough */
case SLJIT_SHL:
if (dst == (sljit_s32)arg1 && IS_2_LO_REGS(dst, arg2))
return push_inst16(compiler, LSLS | RD3(dst) | RN3(arg2));
return push_inst32(compiler, LSL_W | (flags & SET_FLAGS) | RD4(dst) | RN4(arg1) | RM4(arg2));
case SLJIT_MLSHR:
- FAIL_IF(push_inst32(compiler, ANDI | RD4(TMP_REG2) | RN4(arg2) | 0x1f));
- arg2 = TMP_REG2;
+ reg = (arg2 == TMP_REG1) ? TMP_REG1 : TMP_REG2;
+ FAIL_IF(push_inst32(compiler, ANDI | RD4(reg) | RN4(arg2) | 0x1f));
+ arg2 = (sljit_uw)reg;
/* fallthrough */
case SLJIT_LSHR:
if (dst == (sljit_s32)arg1 && IS_2_LO_REGS(dst, arg2))
return push_inst16(compiler, LSRS | RD3(dst) | RN3(arg2));
return push_inst32(compiler, LSR_W | (flags & SET_FLAGS) | RD4(dst) | RN4(arg1) | RM4(arg2));
case SLJIT_MASHR:
- FAIL_IF(push_inst32(compiler, ANDI | RD4(TMP_REG2) | RN4(arg2) | 0x1f));
- arg2 = TMP_REG2;
+ reg = (arg2 == TMP_REG1) ? TMP_REG1 : TMP_REG2;
+ FAIL_IF(push_inst32(compiler, ANDI | RD4(reg) | RN4(arg2) | 0x1f));
+ arg2 = (sljit_uw)reg;
/* fallthrough */
case SLJIT_ASHR:
if (dst == (sljit_s32)arg1 && IS_2_LO_REGS(dst, arg2))
return push_inst16(compiler, ASRS | RD3(dst) | RN3(arg2));
return push_inst32(compiler, ASR_W | (flags & SET_FLAGS) | RD4(dst) | RN4(arg1) | RM4(arg2));
case SLJIT_ROTL:
- FAIL_IF(push_inst32(compiler, RSB_WI | RD4(TMP_REG2) | RN4(arg2) | 0));
- arg2 = TMP_REG2;
+ reg = (arg2 == TMP_REG1) ? TMP_REG1 : TMP_REG2;
+ FAIL_IF(push_inst32(compiler, RSB_WI | RD4(reg) | RN4(arg2) | 0));
+ arg2 = (sljit_uw)reg;
/* fallthrough */
case SLJIT_ROTR:
if (dst == (sljit_s32)arg1 && IS_2_LO_REGS(dst, arg2))
return push_inst16(compiler, RORS | RD3(dst) | RN3(arg2));
return push_inst32(compiler, ROR_W | RD4(dst) | RN4(arg1) | RM4(arg2));
+ case SLJIT_MULADD:
+ compiler->status_flags_state = 0;
+ return push_inst32(compiler, MLA | RD4(dst) | RN4(arg1) | RM4(arg2) | RT4(dst));
}
SLJIT_UNREACHABLE();
@@ -1176,12 +1408,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
}
if (fsaveds + fscratches >= SLJIT_NUMBER_OF_FLOAT_REGISTERS) {
- FAIL_IF(push_inst32(compiler, VPUSH | DD4(SLJIT_FS0) | ((sljit_uw)SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS << 1)));
+ FAIL_IF(push_inst32(compiler, VPUSH | VD4(SLJIT_FS0) | ((sljit_uw)SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS << 1)));
} else {
if (fsaveds > 0)
- FAIL_IF(push_inst32(compiler, VPUSH | DD4(SLJIT_FS0) | ((sljit_uw)fsaveds << 1)));
+ FAIL_IF(push_inst32(compiler, VPUSH | VD4(SLJIT_FS0) | ((sljit_uw)fsaveds << 1)));
if (fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG)
- FAIL_IF(push_inst32(compiler, VPUSH | DD4(fscratches) | ((sljit_uw)(fscratches - (SLJIT_FIRST_SAVED_FLOAT_REG - 1)) << 1)));
+ FAIL_IF(push_inst32(compiler, VPUSH | VD4(fscratches) | ((sljit_uw)(fscratches - (SLJIT_FIRST_SAVED_FLOAT_REG - 1)) << 1)));
}
}
@@ -1258,17 +1490,17 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
switch (arg_types & SLJIT_ARG_MASK) {
case SLJIT_ARG_TYPE_F64:
if (offset != old_offset)
- *remap_ptr++ = VMOV_F32 | SLJIT_32 | DD4(offset) | DM4(old_offset);
+ *remap_ptr++ = VMOV_F32 | SLJIT_32 | VD4(offset) | VM4(old_offset);
old_offset++;
offset++;
break;
case SLJIT_ARG_TYPE_F32:
if (f32_offset != 0) {
- *remap_ptr++ = VMOV_F32 | 0x20 | DD4(offset) | DM4(f32_offset);
+ *remap_ptr++ = VMOV_F32 | 0x20 | VD4(offset) | VM4(f32_offset);
f32_offset = 0;
} else {
if (offset != old_offset)
- *remap_ptr++ = VMOV_F32 | DD4(offset) | DM4(old_offset);
+ *remap_ptr++ = VMOV_F32 | VD4(offset) | VM4(old_offset);
f32_offset = old_offset;
old_offset++;
}
@@ -1356,6 +1588,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *comp
size = GET_SAVED_REGISTERS_SIZE(scratches, saveds - SLJIT_KEPT_SAVEDS_COUNT(options), 1);
+ /* Doubles are saved, so alignment is unaffected. */
if ((size & SSIZE_OF(sw)) != 0 && (fsaveds > 0 || fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG))
size += SSIZE_OF(sw);
@@ -1401,12 +1634,12 @@ static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit
FAIL_IF(emit_add_sp(compiler, (sljit_uw)local_size));
if (fsaveds + fscratches >= SLJIT_NUMBER_OF_FLOAT_REGISTERS) {
- FAIL_IF(push_inst32(compiler, VPOP | DD4(SLJIT_FS0) | ((sljit_uw)SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS << 1)));
+ FAIL_IF(push_inst32(compiler, VPOP | VD4(SLJIT_FS0) | ((sljit_uw)SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS << 1)));
} else {
if (fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG)
- FAIL_IF(push_inst32(compiler, VPOP | DD4(fscratches) | ((sljit_uw)(fscratches - (SLJIT_FIRST_SAVED_FLOAT_REG - 1)) << 1)));
+ FAIL_IF(push_inst32(compiler, VPOP | VD4(fscratches) | ((sljit_uw)(fscratches - (SLJIT_FIRST_SAVED_FLOAT_REG - 1)) << 1)));
if (fsaveds > 0)
- FAIL_IF(push_inst32(compiler, VPOP | DD4(SLJIT_FS0) | ((sljit_uw)fsaveds << 1)));
+ FAIL_IF(push_inst32(compiler, VPOP | VD4(SLJIT_FS0) | ((sljit_uw)fsaveds << 1)));
}
local_size = GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds, 1) & 0x7;
@@ -1684,14 +1917,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
sljit_s32 src, sljit_sw srcw)
{
sljit_s32 dst_r, flags;
- sljit_s32 op_flags = GET_ALL_FLAGS(op);
CHECK_ERROR();
CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
ADJUST_LOCAL_OFFSET(dst, dstw);
ADJUST_LOCAL_OFFSET(src, srcw);
- dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
op = GET_OPCODE(op);
if (op >= SLJIT_MOV && op <= SLJIT_MOV_P) {
@@ -1705,22 +1937,22 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
break;
case SLJIT_MOV_U8:
flags = BYTE_SIZE;
- if (src & SLJIT_IMM)
+ if (src == SLJIT_IMM)
srcw = (sljit_u8)srcw;
break;
case SLJIT_MOV_S8:
flags = BYTE_SIZE | SIGNED;
- if (src & SLJIT_IMM)
+ if (src == SLJIT_IMM)
srcw = (sljit_s8)srcw;
break;
case SLJIT_MOV_U16:
flags = HALF_SIZE;
- if (src & SLJIT_IMM)
+ if (src == SLJIT_IMM)
srcw = (sljit_u16)srcw;
break;
case SLJIT_MOV_S16:
flags = HALF_SIZE | SIGNED;
- if (src & SLJIT_IMM)
+ if (src == SLJIT_IMM)
srcw = (sljit_s16)srcw;
break;
default:
@@ -1729,33 +1961,39 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
break;
}
- if (src & SLJIT_IMM)
+ if (src == SLJIT_IMM)
FAIL_IF(emit_op_imm(compiler, SLJIT_MOV | ARG2_IMM, dst_r, TMP_REG2, (sljit_uw)srcw));
- else if (src & SLJIT_MEM) {
+ else if (src & SLJIT_MEM)
FAIL_IF(emit_op_mem(compiler, flags, dst_r, src, srcw, TMP_REG1));
- } else {
- if (dst_r != TMP_REG1)
- return emit_op_imm(compiler, op, dst_r, TMP_REG2, (sljit_uw)src);
+ else if (FAST_IS_REG(dst))
+ return emit_op_imm(compiler, op, dst_r, TMP_REG2, (sljit_uw)src);
+ else
dst_r = src;
- }
if (!(dst & SLJIT_MEM))
return SLJIT_SUCCESS;
- return emit_op_mem(compiler, flags | STORE, dst_r, dst, dstw, TMP_REG2);
+ return emit_op_mem(compiler, flags | STORE, dst_r, dst, dstw, TMP_REG1);
}
- flags = HAS_FLAGS(op_flags) ? SET_FLAGS : 0;
+ SLJIT_COMPILE_ASSERT(WORD_SIZE == 0, word_size_must_be_0);
+ flags = WORD_SIZE;
+
+ if (op == SLJIT_REV_U16 || op == SLJIT_REV_S16) {
+ if (!(dst & SLJIT_MEM) && (!(src & SLJIT_MEM) || op == SLJIT_REV_S16))
+ op |= REGISTER_OP;
+ flags |= HALF_SIZE;
+ }
if (src & SLJIT_MEM) {
- FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG1, src, srcw, TMP_REG1));
+ FAIL_IF(emit_op_mem(compiler, flags, TMP_REG1, src, srcw, TMP_REG1));
src = TMP_REG1;
}
- emit_op_imm(compiler, flags | op, dst_r, TMP_REG2, (sljit_uw)src);
+ emit_op_imm(compiler, op, dst_r, TMP_REG2, (sljit_uw)src);
if (SLJIT_UNLIKELY(dst & SLJIT_MEM))
- return emit_op_mem(compiler, flags | STORE, dst_r, dst, dstw, TMP_REG2);
+ return emit_op_mem(compiler, flags | STORE, dst_r, dst, dstw, TMP_REG1);
return SLJIT_SUCCESS;
}
@@ -1764,7 +2002,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
- sljit_s32 dst_reg, flags, src2_reg;
+ sljit_s32 dst_reg, src2_tmp_reg, flags;
CHECK_ERROR();
CHECK(check_sljit_emit_op2(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w));
@@ -1772,36 +2010,34 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
ADJUST_LOCAL_OFFSET(src1, src1w);
ADJUST_LOCAL_OFFSET(src2, src2w);
- dst_reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
+ dst_reg = FAST_IS_REG(dst) ? dst : TMP_REG2;
flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
if (dst == TMP_REG1)
flags |= UNUSED_RETURN;
- if (src1 & SLJIT_IMM)
+ if (src2 == SLJIT_IMM)
+ flags |= ARG2_IMM;
+ else if (src2 & SLJIT_MEM) {
+ src2_tmp_reg = FAST_IS_REG(src1) ? TMP_REG1 : TMP_REG2;
+ emit_op_mem(compiler, WORD_SIZE, src2_tmp_reg, src2, src2w, TMP_REG1);
+ src2w = src2_tmp_reg;
+ } else
+ src2w = src2;
+
+ if (src1 == SLJIT_IMM)
flags |= ARG1_IMM;
else if (src1 & SLJIT_MEM) {
emit_op_mem(compiler, WORD_SIZE, TMP_REG1, src1, src1w, TMP_REG1);
src1w = TMP_REG1;
- }
- else
+ } else
src1w = src1;
- if (src2 & SLJIT_IMM)
- flags |= ARG2_IMM;
- else if (src2 & SLJIT_MEM) {
- src2_reg = (!(flags & ARG1_IMM) && (src1w == TMP_REG1)) ? TMP_REG2 : TMP_REG1;
- emit_op_mem(compiler, WORD_SIZE, src2_reg, src2, src2w, src2_reg);
- src2w = src2_reg;
- }
- else
- src2w = src2;
-
emit_op_imm(compiler, flags | GET_OPCODE(op), dst_reg, (sljit_uw)src1w, (sljit_uw)src2w);
if (!(dst & SLJIT_MEM))
return SLJIT_SUCCESS;
- return emit_op_mem(compiler, WORD_SIZE | STORE, dst_reg, dst, dstw, TMP_REG2);
+ return emit_op_mem(compiler, WORD_SIZE | STORE, dst_reg, dst, dstw, TMP_REG1);
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compiler, sljit_s32 op,
@@ -1815,69 +2051,78 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compil
return sljit_emit_op2(compiler, op, TMP_REG1, 0, src1, src1w, src2, src2w);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src_dst,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op2r(compiler, op, dst_reg, src1, src1w, src2, src2w));
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MULADD:
+ SLJIT_SKIP_CHECKS(compiler);
+ return sljit_emit_op2(compiler, op, dst_reg, 0, src1, src1w, src2, src2w);
+ }
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 src1_reg,
+ sljit_s32 src2_reg,
+ sljit_s32 src3, sljit_sw src3w)
+{
sljit_s32 is_left;
CHECK_ERROR();
- CHECK(check_sljit_emit_shift_into(compiler, op, src_dst, src1, src1w, src2, src2w));
+ CHECK(check_sljit_emit_shift_into(compiler, op, dst_reg, src1_reg, src2_reg, src3, src3w));
op = GET_OPCODE(op);
is_left = (op == SLJIT_SHL || op == SLJIT_MSHL);
- if (src_dst == src1) {
+ if (src1_reg == src2_reg) {
SLJIT_SKIP_CHECKS(compiler);
- return sljit_emit_op2(compiler, is_left ? SLJIT_ROTL : SLJIT_ROTR, src_dst, 0, src_dst, 0, src2, src2w);
+ return sljit_emit_op2(compiler, is_left ? SLJIT_ROTL : SLJIT_ROTR, dst_reg, 0, src1_reg, 0, src3, src3w);
}
- ADJUST_LOCAL_OFFSET(src1, src1w);
- ADJUST_LOCAL_OFFSET(src2, src2w);
+ ADJUST_LOCAL_OFFSET(src3, src3w);
- if (src2 & SLJIT_IMM) {
- src2w &= 0x1f;
+ if (src3 == SLJIT_IMM) {
+ src3w &= 0x1f;
- if (src2w == 0)
+ if (src3w == 0)
return SLJIT_SUCCESS;
- } else if (src2 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG2, src2, src2w, TMP_REG2));
- src2 = TMP_REG2;
- }
- if (src1 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG1, src1, src1w, TMP_REG1));
- src1 = TMP_REG1;
- } else if (src1 & SLJIT_IMM) {
- FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)src1w));
- src1 = TMP_REG1;
- }
-
- if (src2 & SLJIT_IMM) {
- if (reg_map[src_dst] <= 7)
- FAIL_IF(push_inst16(compiler, (is_left ? LSLSI : LSRSI) | RD3(src_dst) | RN3(src_dst) | ((sljit_ins)src2w << 6)));
+ if (IS_2_LO_REGS(dst_reg, src1_reg))
+ FAIL_IF(push_inst16(compiler, (is_left ? LSLSI : LSRSI) | RD3(dst_reg) | RN3(src1_reg) | ((sljit_ins)src3w << 6)));
else
- FAIL_IF(push_inst32(compiler, (is_left ? LSL_WI : LSR_WI) | RD4(src_dst) | RM4(src_dst) | IMM5(src2w)));
+ FAIL_IF(push_inst32(compiler, (is_left ? LSL_WI : LSR_WI) | RD4(dst_reg) | RM4(src1_reg) | IMM5(src3w)));
+
+ src3w = (src3w ^ 0x1f) + 1;
+ return push_inst32(compiler, ORR_W | RD4(dst_reg) | RN4(dst_reg) | RM4(src2_reg) | (is_left ? 0x10 : 0x0) | IMM5(src3w));
+ }
- src2w = (src2w ^ 0x1f) + 1;
- return push_inst32(compiler, ORR_W | RD4(src_dst) | RN4(src_dst) | RM4(src1) | (is_left ? 0x10 : 0x0) | IMM5(src2w));
+ if (src3 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG2, src3, src3w, TMP_REG2));
+ src3 = TMP_REG2;
}
- if (op == SLJIT_MSHL || op == SLJIT_MLSHR) {
- FAIL_IF(push_inst32(compiler, ANDI | RD4(TMP_REG2) | RN4(src2) | 0x1f));
- src2 = TMP_REG2;
+ if (op == SLJIT_MSHL || op == SLJIT_MLSHR || dst_reg == src3) {
+ FAIL_IF(push_inst32(compiler, ANDI | RD4(TMP_REG2) | RN4(src3) | 0x1f));
+ src3 = TMP_REG2;
}
- if (IS_2_LO_REGS(src_dst, src2))
- FAIL_IF(push_inst16(compiler, (is_left ? LSLS : LSRS) | RD3(src_dst) | RN3(src2)));
+ if (dst_reg == src1_reg && IS_2_LO_REGS(dst_reg, src3))
+ FAIL_IF(push_inst16(compiler, (is_left ? LSLS : LSRS) | RD3(dst_reg) | RN3(src3)));
else
- FAIL_IF(push_inst32(compiler, (is_left ? LSL_W : LSR_W) | RD4(src_dst) | RN4(src_dst) | RM4(src2)));
+ FAIL_IF(push_inst32(compiler, (is_left ? LSL_W : LSR_W) | RD4(dst_reg) | RN4(src1_reg) | RM4(src3)));
- FAIL_IF(push_inst32(compiler, (is_left ? LSR_WI : LSL_WI) | RD4(TMP_REG1) | RM4(src1) | (1 << 6)));
- FAIL_IF(push_inst32(compiler, EORI | RD4(TMP_REG2) | RN4(src2) | 0x1f));
+ FAIL_IF(push_inst32(compiler, (is_left ? LSR_WI : LSL_WI) | RD4(TMP_REG1) | RM4(src2_reg) | (1 << 6)));
+ FAIL_IF(push_inst32(compiler, EORI | RD4(TMP_REG2) | RN4(src3) | 0x1f));
FAIL_IF(push_inst32(compiler, (is_left ? LSR_W : LSL_W) | RD4(TMP_REG1) | RN4(TMP_REG1) | RM4(TMP_REG2)));
- return push_inst32(compiler, ORR_W | RD4(src_dst) | RN4(src_dst) | RM4(TMP_REG1));
+ return push_inst32(compiler, ORR_W | RD4(dst_reg) | RN4(dst_reg) | RM4(TMP_REG1));
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
@@ -1909,16 +2154,60 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *comp
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_dst(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw)
{
- CHECK_REG_INDEX(check_sljit_get_register_index(reg));
- return reg_map[reg];
+ sljit_s32 size, dst_r;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op_dst(compiler, op, dst, dstw));
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ switch (op) {
+ case SLJIT_FAST_ENTER:
+ SLJIT_ASSERT(reg_map[TMP_REG2] == 14);
+
+ if (FAST_IS_REG(dst))
+ return push_inst16(compiler, MOV | SET_REGS44(dst, TMP_REG2));
+ break;
+ case SLJIT_GET_RETURN_ADDRESS:
+ size = GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds - SLJIT_KEPT_SAVEDS_COUNT(compiler->options), 0);
+
+ if (compiler->fsaveds > 0 || compiler->fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG) {
+ /* The size of pc is not added above. */
+ if ((size & SSIZE_OF(sw)) == 0)
+ size += SSIZE_OF(sw);
+
+ size += GET_SAVED_FLOAT_REGISTERS_SIZE(compiler->fscratches, compiler->fsaveds, f64);
+ }
+
+ SLJIT_ASSERT(((compiler->local_size + size + SSIZE_OF(sw)) & 0x7) == 0);
+
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
+ FAIL_IF(emit_op_mem(compiler, WORD_SIZE, dst_r, SLJIT_MEM1(SLJIT_SP), compiler->local_size + size, TMP_REG1));
+ break;
+ }
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem(compiler, WORD_SIZE | STORE, TMP_REG2, dst, dstw, TMP_REG1);
+
+ return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 type, sljit_s32 reg)
{
- CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
- return (freg_map[reg] << 1);
+ CHECK_REG_INDEX(check_sljit_get_register_index(type, reg));
+
+ if (type == SLJIT_GP_REGISTER)
+ return reg_map[reg];
+
+ if (type == SLJIT_FLOAT_REGISTER || type == SLJIT_SIMD_REG_64)
+ return freg_map[reg];
+
+ if (type != SLJIT_SIMD_REG_128)
+ return freg_map[reg] & ~0x1;
+
+ return -1;
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
@@ -1954,35 +2243,35 @@ static sljit_s32 emit_fop_mem(struct sljit_compiler *compiler, sljit_s32 flags,
if ((arg & REG_MASK) && (argw & 0x3) == 0) {
if (!(argw & ~0x3fc))
- return push_inst32(compiler, inst | 0x800000 | RN4(arg & REG_MASK) | DD4(reg) | ((sljit_uw)argw >> 2));
+ return push_inst32(compiler, inst | 0x800000 | RN4(arg & REG_MASK) | VD4(reg) | ((sljit_uw)argw >> 2));
if (!(-argw & ~0x3fc))
- return push_inst32(compiler, inst | RN4(arg & REG_MASK) | DD4(reg) | ((sljit_uw)-argw >> 2));
+ return push_inst32(compiler, inst | RN4(arg & REG_MASK) | VD4(reg) | ((sljit_uw)-argw >> 2));
}
if (arg & REG_MASK) {
if (emit_set_delta(compiler, TMP_REG1, arg & REG_MASK, argw) != SLJIT_ERR_UNSUPPORTED) {
FAIL_IF(compiler->error);
- return push_inst32(compiler, inst | 0x800000 | RN4(TMP_REG1) | DD4(reg));
+ return push_inst32(compiler, inst | 0x800000 | RN4(TMP_REG1) | VD4(reg));
}
imm = get_imm((sljit_uw)argw & ~(sljit_uw)0x3fc);
if (imm != INVALID_IMM) {
FAIL_IF(push_inst32(compiler, ADD_WI | RD4(TMP_REG1) | RN4(arg & REG_MASK) | imm));
- return push_inst32(compiler, inst | 0x800000 | RN4(TMP_REG1) | DD4(reg) | (((sljit_uw)argw & 0x3fc) >> 2));
+ return push_inst32(compiler, inst | 0x800000 | RN4(TMP_REG1) | VD4(reg) | (((sljit_uw)argw & 0x3fc) >> 2));
}
imm = get_imm((sljit_uw)-argw & ~(sljit_uw)0x3fc);
if (imm != INVALID_IMM) {
argw = -argw;
FAIL_IF(push_inst32(compiler, SUB_WI | RD4(TMP_REG1) | RN4(arg & REG_MASK) | imm));
- return push_inst32(compiler, inst | RN4(TMP_REG1) | DD4(reg) | (((sljit_uw)argw & 0x3fc) >> 2));
+ return push_inst32(compiler, inst | RN4(TMP_REG1) | VD4(reg) | (((sljit_uw)argw & 0x3fc) >> 2));
}
}
FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)argw));
if (arg & REG_MASK)
FAIL_IF(push_inst16(compiler, ADD | SET_REGS44(TMP_REG1, (arg & REG_MASK))));
- return push_inst32(compiler, inst | 0x800000 | RN4(TMP_REG1) | DD4(reg));
+ return push_inst32(compiler, inst | 0x800000 | RN4(TMP_REG1) | VD4(reg));
}
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
@@ -1996,41 +2285,53 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
src = TMP_FREG1;
}
- FAIL_IF(push_inst32(compiler, VCVT_S32_F32 | (op & SLJIT_32) | DD4(TMP_FREG1) | DM4(src)));
+ FAIL_IF(push_inst32(compiler, VCVT_S32_F32 | (op & SLJIT_32) | VD4(TMP_FREG1) | VM4(src)));
if (FAST_IS_REG(dst))
- return push_inst32(compiler, VMOV | (1 << 20) | RT4(dst) | DN4(TMP_FREG1));
+ return push_inst32(compiler, VMOV | (1 << 20) | RT4(dst) | VN4(TMP_FREG1));
/* Store the integer value from a VFP register. */
return emit_fop_mem(compiler, 0, TMP_FREG1, dst, dstw);
}
-static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+static sljit_s32 sljit_emit_fop1_conv_f64_from_w(struct sljit_compiler *compiler, sljit_ins ins,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw)
{
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
- op ^= SLJIT_32;
-
if (FAST_IS_REG(src))
- FAIL_IF(push_inst32(compiler, VMOV | RT4(src) | DN4(TMP_FREG1)));
+ FAIL_IF(push_inst32(compiler, VMOV | RT4(src) | VN4(TMP_FREG1)));
else if (src & SLJIT_MEM) {
/* Load the integer value into a VFP register. */
FAIL_IF(emit_fop_mem(compiler, FPU_LOAD, TMP_FREG1, src, srcw));
}
else {
FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)srcw));
- FAIL_IF(push_inst32(compiler, VMOV | RT4(TMP_REG1) | DN4(TMP_FREG1)));
+ FAIL_IF(push_inst32(compiler, VMOV | RT4(TMP_REG1) | VN4(TMP_FREG1)));
}
- FAIL_IF(push_inst32(compiler, VCVT_F32_S32 | (op & SLJIT_32) | DD4(dst_r) | DM4(TMP_FREG1)));
+ FAIL_IF(push_inst32(compiler, ins | VD4(dst_r) | VM4(TMP_FREG1)));
if (dst & SLJIT_MEM)
- return emit_fop_mem(compiler, (op & SLJIT_32), TMP_FREG1, dst, dstw);
+ return emit_fop_mem(compiler, (ins & SLJIT_32), TMP_FREG1, dst, dstw);
return SLJIT_SUCCESS;
}
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ return sljit_emit_fop1_conv_f64_from_w(compiler, VCVT_F32_S32 | (~op & SLJIT_32), dst, dstw, src, srcw);
+}
+
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_uw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ return sljit_emit_fop1_conv_f64_from_w(compiler, VCVT_F32_U32 | (~op & SLJIT_32), dst, dstw, src, srcw);
+}
+
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
@@ -2038,17 +2339,23 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compile
op ^= SLJIT_32;
if (src1 & SLJIT_MEM) {
- emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, TMP_FREG1, src1, src1w);
+ FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, TMP_FREG1, src1, src1w));
src1 = TMP_FREG1;
}
if (src2 & SLJIT_MEM) {
- emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, TMP_FREG2, src2, src2w);
+ FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, TMP_FREG2, src2, src2w));
src2 = TMP_FREG2;
}
- FAIL_IF(push_inst32(compiler, VCMP_F32 | (op & SLJIT_32) | DD4(src1) | DM4(src2)));
- return push_inst32(compiler, VMRS);
+ FAIL_IF(push_inst32(compiler, VCMP_F32 | (op & SLJIT_32) | VD4(src1) | VM4(src2)));
+ FAIL_IF(push_inst32(compiler, VMRS));
+
+ if (GET_FLAG_TYPE(op) != SLJIT_UNORDERED_OR_EQUAL)
+ return SLJIT_SUCCESS;
+
+ FAIL_IF(push_inst16(compiler, IT | (0x6 << 4) | 0x8));
+ return push_inst16(compiler, CMP /* Rm, Rn = r0 */);
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
@@ -2068,27 +2375,27 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compil
op ^= SLJIT_32;
if (src & SLJIT_MEM) {
- emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, dst_r, src, srcw);
+ FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, dst_r, src, srcw));
src = dst_r;
}
switch (GET_OPCODE(op)) {
case SLJIT_MOV_F64:
if (src != dst_r) {
- if (dst_r != TMP_FREG1)
- FAIL_IF(push_inst32(compiler, VMOV_F32 | (op & SLJIT_32) | DD4(dst_r) | DM4(src)));
+ if (!(dst & SLJIT_MEM))
+ FAIL_IF(push_inst32(compiler, VMOV_F32 | (op & SLJIT_32) | VD4(dst_r) | VM4(src)));
else
dst_r = src;
}
break;
case SLJIT_NEG_F64:
- FAIL_IF(push_inst32(compiler, VNEG_F32 | (op & SLJIT_32) | DD4(dst_r) | DM4(src)));
+ FAIL_IF(push_inst32(compiler, VNEG_F32 | (op & SLJIT_32) | VD4(dst_r) | VM4(src)));
break;
case SLJIT_ABS_F64:
- FAIL_IF(push_inst32(compiler, VABS_F32 | (op & SLJIT_32) | DD4(dst_r) | DM4(src)));
+ FAIL_IF(push_inst32(compiler, VABS_F32 | (op & SLJIT_32) | VD4(dst_r) | VM4(src)));
break;
case SLJIT_CONV_F64_FROM_F32:
- FAIL_IF(push_inst32(compiler, VCVT_F64_F32 | (op & SLJIT_32) | DD4(dst_r) | DM4(src)));
+ FAIL_IF(push_inst32(compiler, VCVT_F64_F32 | (op & SLJIT_32) | VD4(dst_r) | VM4(src)));
op ^= SLJIT_32;
break;
}
@@ -2115,27 +2422,33 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
if (src1 & SLJIT_MEM) {
- emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, TMP_FREG1, src1, src1w);
+ FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, TMP_FREG1, src1, src1w));
src1 = TMP_FREG1;
}
if (src2 & SLJIT_MEM) {
- emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, TMP_FREG2, src2, src2w);
+ FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, TMP_FREG2, src2, src2w));
src2 = TMP_FREG2;
}
switch (GET_OPCODE(op)) {
case SLJIT_ADD_F64:
- FAIL_IF(push_inst32(compiler, VADD_F32 | (op & SLJIT_32) | DD4(dst_r) | DN4(src1) | DM4(src2)));
+ FAIL_IF(push_inst32(compiler, VADD_F32 | (op & SLJIT_32) | VD4(dst_r) | VN4(src1) | VM4(src2)));
break;
case SLJIT_SUB_F64:
- FAIL_IF(push_inst32(compiler, VSUB_F32 | (op & SLJIT_32) | DD4(dst_r) | DN4(src1) | DM4(src2)));
+ FAIL_IF(push_inst32(compiler, VSUB_F32 | (op & SLJIT_32) | VD4(dst_r) | VN4(src1) | VM4(src2)));
break;
case SLJIT_MUL_F64:
- FAIL_IF(push_inst32(compiler, VMUL_F32 | (op & SLJIT_32) | DD4(dst_r) | DN4(src1) | DM4(src2)));
+ FAIL_IF(push_inst32(compiler, VMUL_F32 | (op & SLJIT_32) | VD4(dst_r) | VN4(src1) | VM4(src2)));
break;
case SLJIT_DIV_F64:
- FAIL_IF(push_inst32(compiler, VDIV_F32 | (op & SLJIT_32) | DD4(dst_r) | DN4(src1) | DM4(src2)));
+ FAIL_IF(push_inst32(compiler, VDIV_F32 | (op & SLJIT_32) | VD4(dst_r) | VN4(src1) | VM4(src2)));
break;
+ case SLJIT_COPYSIGN_F64:
+ FAIL_IF(push_inst32(compiler, VMOV | (1 << 20) | VN4(src2) | RT4(TMP_REG1) | ((op & SLJIT_32) ? (1 << 7) : 0)));
+ FAIL_IF(push_inst32(compiler, VABS_F32 | (op & SLJIT_32) | VD4(dst_r) | VM4(src1)));
+ FAIL_IF(push_inst32(compiler, CMPI_W | RN4(TMP_REG1) | 0));
+ FAIL_IF(push_inst16(compiler, IT | (0xb << 4) | 0x8));
+ return push_inst32(compiler, VNEG_F32 | (op & SLJIT_32) | VD4(dst_r) | VM4(dst_r));
}
if (!(dst & SLJIT_MEM))
@@ -2143,23 +2456,99 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
return emit_fop_mem(compiler, (op & SLJIT_32), TMP_FREG1, dst, dstw);
}
-/* --------------------------------------------------------------------- */
-/* Other instructions */
-/* --------------------------------------------------------------------- */
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset32(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f32 value)
+{
+#if defined(__ARM_NEON) && __ARM_NEON
+ sljit_u32 exp;
+ sljit_ins ins;
+#endif /* NEON */
+ union {
+ sljit_u32 imm;
+ sljit_f32 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset32(compiler, freg, value));
+
+ u.value = value;
+
+#if defined(__ARM_NEON) && __ARM_NEON
+ if ((u.imm << (32 - 19)) == 0) {
+ exp = (u.imm >> (23 + 2)) & 0x3f;
+
+ if (exp == 0x20 || exp == 0x1f) {
+ ins = ((u.imm >> 24) & 0x80) | ((u.imm >> 19) & 0x7f);
+ return push_inst32(compiler, (VMOV_F32 ^ (1 << 6)) | ((ins & 0xf0) << 12) | VD4(freg) | (ins & 0xf));
+ }
+ }
+#endif /* NEON */
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, u.imm));
+ return push_inst32(compiler, VMOV | VN4(freg) | RT4(TMP_REG1));
+}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f64 value)
{
+#if defined(__ARM_NEON) && __ARM_NEON
+ sljit_u32 exp;
+ sljit_ins ins;
+#endif /* NEON */
+ union {
+ sljit_u32 imm[2];
+ sljit_f64 value;
+ } u;
+
CHECK_ERROR();
- CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
- ADJUST_LOCAL_OFFSET(dst, dstw);
+ CHECK(check_sljit_emit_fset64(compiler, freg, value));
- SLJIT_ASSERT(reg_map[TMP_REG2] == 14);
+ u.value = value;
- if (FAST_IS_REG(dst))
- return push_inst16(compiler, MOV | SET_REGS44(dst, TMP_REG2));
+#if defined(__ARM_NEON) && __ARM_NEON
+ if (u.imm[0] == 0 && (u.imm[1] << (64 - 48)) == 0) {
+ exp = (u.imm[1] >> ((52 - 32) + 2)) & 0x1ff;
+
+ if (exp == 0x100 || exp == 0xff) {
+ ins = ((u.imm[1] >> (56 - 32)) & 0x80) | ((u.imm[1] >> (48 - 32)) & 0x7f);
+ return push_inst32(compiler, (VMOV_F32 ^ (1 << 6)) | (1 << 8) | ((ins & 0xf0) << 12) | VD4(freg) | (ins & 0xf));
+ }
+ }
+#endif /* NEON */
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, u.imm[0]));
+ if (u.imm[0] == u.imm[1])
+ return push_inst32(compiler, VMOV2 | RN4(TMP_REG1) | RT4(TMP_REG1) | VM4(freg));
+
+ FAIL_IF(load_immediate(compiler, TMP_REG2, u.imm[1]));
+ return push_inst32(compiler, VMOV2 | RN4(TMP_REG2) | RT4(TMP_REG1) | VM4(freg));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 freg, sljit_s32 reg)
+{
+ sljit_s32 reg2;
+ sljit_ins inst;
- /* Memory. */
- return emit_op_mem(compiler, WORD_SIZE | STORE, TMP_REG2, dst, dstw, TMP_REG1);
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
+
+ if (reg & REG_PAIR_MASK) {
+ reg2 = REG_PAIR_SECOND(reg);
+ reg = REG_PAIR_FIRST(reg);
+
+ inst = VMOV2 | RN4(reg) | RT4(reg2) | VM4(freg);
+ } else {
+ inst = VMOV | VN4(freg) | RT4(reg);
+
+ if (!(op & SLJIT_32))
+ inst |= 1 << 7;
+ }
+
+ if (GET_OPCODE(op) == SLJIT_COPY_FROM_F64)
+ inst |= 1 << 20;
+
+ return push_inst32(compiler, inst);
}
/* --------------------------------------------------------------------- */
@@ -2170,15 +2559,17 @@ static sljit_uw get_cc(struct sljit_compiler *compiler, sljit_s32 type)
{
switch (type) {
case SLJIT_EQUAL:
+ case SLJIT_ATOMIC_STORED:
case SLJIT_F_EQUAL:
case SLJIT_ORDERED_EQUAL:
- case SLJIT_UNORDERED_OR_EQUAL: /* Not supported. */
+ case SLJIT_UNORDERED_OR_EQUAL:
return 0x0;
case SLJIT_NOT_EQUAL:
+ case SLJIT_ATOMIC_NOT_STORED:
case SLJIT_F_NOT_EQUAL:
case SLJIT_UNORDERED_OR_NOT_EQUAL:
- case SLJIT_ORDERED_NOT_EQUAL: /* Not supported. */
+ case SLJIT_ORDERED_NOT_EQUAL:
return 0x1;
case SLJIT_CARRY:
@@ -2282,7 +2673,6 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
type &= 0xff;
- PTR_FAIL_IF(emit_imm32_const(compiler, TMP_REG1, 0));
if (type < SLJIT_JUMP) {
jump->flags |= IS_COND;
cc = get_cc(compiler, type);
@@ -2298,6 +2688,8 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
PTR_FAIL_IF(push_inst16(compiler, BLX | RN3(TMP_REG1)));
}
+ /* Maximum number of instructions required for generating a constant. */
+ compiler->size += JUMP_MAX_SIZE - 1;
return jump;
}
@@ -2453,18 +2845,18 @@ static sljit_s32 hardfloat_call_with_args(struct sljit_compiler *compiler, sljit
switch (arg_types & SLJIT_ARG_MASK) {
case SLJIT_ARG_TYPE_F64:
if (offset != new_offset)
- FAIL_IF(push_inst32(compiler, VMOV_F32 | SLJIT_32 | DD4(new_offset) | DM4(offset)));
+ FAIL_IF(push_inst32(compiler, VMOV_F32 | SLJIT_32 | VD4(new_offset) | VM4(offset)));
new_offset++;
offset++;
break;
case SLJIT_ARG_TYPE_F32:
if (f32_offset != 0) {
- FAIL_IF(push_inst32(compiler, VMOV_F32 | 0x400000 | DD4(f32_offset) | DM4(offset)));
+ FAIL_IF(push_inst32(compiler, VMOV_F32 | 0x400000 | VD4(f32_offset) | VM4(offset)));
f32_offset = 0;
} else {
if (offset != new_offset)
- FAIL_IF(push_inst32(compiler, VMOV_F32 | 0x400000 | DD4(new_offset) | DM4(offset)));
+ FAIL_IF(push_inst32(compiler, VMOV_F32 | 0x400000 | VD4(new_offset) | VM4(offset)));
f32_offset = new_offset;
new_offset++;
}
@@ -2546,7 +2938,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
SLJIT_ASSERT(reg_map[TMP_REG1] != 14);
- if (!(src & SLJIT_IMM)) {
+ if (src != SLJIT_IMM) {
if (FAST_IS_REG(src)) {
SLJIT_ASSERT(reg_map[src] != 14);
return push_inst16(compiler, (type <= SLJIT_JUMP ? BX : BLX) | RN3(src));
@@ -2563,8 +2955,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
set_jump(jump, compiler, JUMP_ADDR | ((type >= SLJIT_FAST_CALL) ? IS_BL : 0));
jump->u.target = (sljit_uw)srcw;
- FAIL_IF(emit_imm32_const(compiler, TMP_REG1, 0));
jump->addr = compiler->size;
+ /* Maximum number of instructions required for generating a constant. */
+ compiler->size += JUMP_MAX_SIZE - 1;
return push_inst16(compiler, (type <= SLJIT_JUMP ? BX : BLX) | RN3(TMP_REG1));
}
@@ -2645,8 +3038,8 @@ static SLJIT_INLINE sljit_s32 emit_fmov_before_return(struct sljit_compiler *com
if (FAST_IS_REG(src)) {
if (op & SLJIT_32)
- return push_inst32(compiler, VMOV | (1 << 20) | DN4(src) | RT4(SLJIT_R0));
- return push_inst32(compiler, VMOV2 | (1 << 20) | DM4(src) | RT4(SLJIT_R0) | RN4(SLJIT_R1));
+ return push_inst32(compiler, VMOV | (1 << 20) | VN4(src) | RT4(SLJIT_R0));
+ return push_inst32(compiler, VMOV2 | (1 << 20) | VM4(src) | RT4(SLJIT_R0) | RN4(SLJIT_R1));
}
SLJIT_SKIP_CHECKS(compiler);
@@ -2711,23 +3104,47 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
return push_inst32(compiler, MOV_W | SET_FLAGS | RD4(TMP_REG1) | RM4(dst_r));
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 dst_reg,
- sljit_s32 src, sljit_sw srcw)
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_reg)
{
sljit_uw cc, tmp;
CHECK_ERROR();
- CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
+ CHECK(check_sljit_emit_select(compiler, type, dst_reg, src1, src1w, src2_reg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+ if (src2_reg != dst_reg && src1 == dst_reg) {
+ src1 = src2_reg;
+ src1w = 0;
+ src2_reg = dst_reg;
+ type ^= 0x1;
+ }
+
+ if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, WORD_SIZE, (src2_reg != dst_reg) ? dst_reg : TMP_REG1, src1, src1w, TMP_REG1));
+
+ if (src2_reg != dst_reg) {
+ src1 = src2_reg;
+ src1w = 0;
+ type ^= 0x1;
+ } else {
+ src1 = TMP_REG1;
+ src1w = 0;
+ }
+ } else if (dst_reg != src2_reg)
+ FAIL_IF(push_inst16(compiler, MOV | SET_REGS44(dst_reg, src2_reg)));
cc = get_cc(compiler, type & ~SLJIT_32);
- if (!(src & SLJIT_IMM)) {
+ if (src1 != SLJIT_IMM) {
FAIL_IF(push_inst16(compiler, IT | (cc << 4) | 0x8));
- return push_inst16(compiler, MOV | SET_REGS44(dst_reg, src));
+ return push_inst16(compiler, MOV | SET_REGS44(dst_reg, src1));
}
- tmp = (sljit_uw) srcw;
+ tmp = (sljit_uw)src1w;
if (tmp < 0x10000) {
/* set low 16 bits, set hi 16 bits to 0. */
@@ -2736,13 +3153,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compil
| COPY_BITS(tmp, 12, 16, 4) | COPY_BITS(tmp, 11, 26, 1) | COPY_BITS(tmp, 8, 12, 3) | (tmp & 0xff));
}
- tmp = get_imm((sljit_uw)srcw);
+ tmp = get_imm((sljit_uw)src1w);
if (tmp != INVALID_IMM) {
FAIL_IF(push_inst16(compiler, IT | (cc << 4) | 0x8));
return push_inst32(compiler, MOV_WI | RD4(dst_reg) | tmp);
}
- tmp = get_imm(~(sljit_uw)srcw);
+ tmp = get_imm(~(sljit_uw)src1w);
if (tmp != INVALID_IMM) {
FAIL_IF(push_inst16(compiler, IT | (cc << 4) | 0x8));
return push_inst32(compiler, MVN_WI | RD4(dst_reg) | tmp);
@@ -2750,13 +3167,43 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compil
FAIL_IF(push_inst16(compiler, IT | (cc << 4) | ((cc & 0x1) << 3) | 0x4));
- tmp = (sljit_uw) srcw;
+ tmp = (sljit_uw)src1w;
FAIL_IF(push_inst32(compiler, MOVW | RD4(dst_reg)
| COPY_BITS(tmp, 12, 16, 4) | COPY_BITS(tmp, 11, 26, 1) | COPY_BITS(tmp, 8, 12, 3) | (tmp & 0xff)));
return push_inst32(compiler, MOVT | RD4(dst_reg)
| COPY_BITS(tmp, 12 + 16, 16, 4) | COPY_BITS(tmp, 11 + 16, 26, 1) | COPY_BITS(tmp, 8 + 16, 12, 3) | ((tmp & 0xff0000) >> 16));
}
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fselect(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_freg)
+{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fselect(compiler, type, dst_freg, src1, src1w, src2_freg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+ type ^= SLJIT_32;
+
+ if (dst_freg != src2_freg) {
+ if (dst_freg == src1) {
+ src1 = src2_freg;
+ src1w = 0;
+ type ^= 0x1;
+ } else
+ FAIL_IF(push_inst32(compiler, VMOV_F32 | (type & SLJIT_32) | VD4(dst_freg) | VM4(src2_freg)));
+ }
+
+ if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_fop_mem(compiler, (type & SLJIT_32) | FPU_LOAD, TMP_FREG2, src1, src1w));
+ src1 = TMP_FREG2;
+ }
+
+ FAIL_IF(push_inst16(compiler, IT | (get_cc(compiler, type & ~SLJIT_32) << 4) | 0x8));
+ return push_inst32(compiler, VMOV_F32 | (type & SLJIT_32) | VD4(dst_freg) | VM4(src1));
+}
+
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 reg,
sljit_s32 mem, sljit_sw memw)
@@ -2770,7 +3217,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compile
if (!(reg & REG_PAIR_MASK))
return sljit_emit_mem_unaligned(compiler, type, reg, mem, memw);
- if (type & (SLJIT_MEM_UNALIGNED | SLJIT_MEM_UNALIGNED_16 | SLJIT_MEM_UNALIGNED_32)) {
+ if (type & (SLJIT_MEM_UNALIGNED | SLJIT_MEM_ALIGNED_16 | SLJIT_MEM_ALIGNED_32)) {
if ((mem & REG_MASK) == 0) {
if ((memw & 0xfff) >= (0x1000 - SSIZE_OF(sw))) {
imm = get_imm((sljit_uw)((memw + 0x1000) & ~0xfff));
@@ -2781,7 +3228,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compile
imm = get_imm((sljit_uw)(memw & ~0xfff));
if (imm != INVALID_IMM)
- memw &= 0xff;
+ memw &= 0xfff;
}
if (imm == INVALID_IMM) {
@@ -3058,11 +3505,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fmem(struct sljit_compiler *compil
CHECK_ERROR();
CHECK(check_sljit_emit_fmem(compiler, type, freg, mem, memw));
- if (type & SLJIT_MEM_UNALIGNED_32)
+ if (type & SLJIT_MEM_ALIGNED_32)
return emit_fop_mem(compiler, ((type ^ SLJIT_32) & SLJIT_32) | ((type & SLJIT_MEM_STORE) ? 0 : FPU_LOAD), freg, mem, memw);
if (type & SLJIT_MEM_STORE) {
- FAIL_IF(push_inst32(compiler, VMOV | (1 << 20) | DN4(freg) | RT4(TMP_REG2)));
+ FAIL_IF(push_inst32(compiler, VMOV | (1 << 20) | VN4(freg) | RT4(TMP_REG2)));
if (type & SLJIT_32)
return emit_op_mem(compiler, WORD_SIZE | STORE, TMP_REG2, mem, memw, TMP_REG1);
@@ -3071,13 +3518,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fmem(struct sljit_compiler *compil
mem |= SLJIT_MEM;
FAIL_IF(emit_op_mem(compiler, WORD_SIZE | STORE, TMP_REG2, mem, memw, TMP_REG1));
- FAIL_IF(push_inst32(compiler, VMOV | (1 << 20) | DN4(freg) | 0x80 | RT4(TMP_REG2)));
+ FAIL_IF(push_inst32(compiler, VMOV | (1 << 20) | VN4(freg) | 0x80 | RT4(TMP_REG2)));
return emit_op_mem(compiler, WORD_SIZE | STORE, TMP_REG2, mem, memw + 4, TMP_REG1);
}
if (type & SLJIT_32) {
FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG2, mem, memw, TMP_REG1));
- return push_inst32(compiler, VMOV | DN4(freg) | RT4(TMP_REG2));
+ return push_inst32(compiler, VMOV | VN4(freg) | RT4(TMP_REG2));
}
FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xfff - 4));
@@ -3085,11 +3532,715 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fmem(struct sljit_compiler *compil
FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG2, mem, memw, TMP_REG1));
FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG1, mem, memw + 4, TMP_REG1));
- return push_inst32(compiler, VMOV2 | DM4(freg) | RT4(TMP_REG2) | RN4(TMP_REG1));
+ return push_inst32(compiler, VMOV2 | VM4(freg) | RT4(TMP_REG2) | RN4(TMP_REG1));
+}
+
+static sljit_s32 sljit_emit_simd_mem_offset(struct sljit_compiler *compiler, sljit_s32 *mem_ptr, sljit_sw memw)
+{
+ sljit_uw imm;
+ sljit_s32 mem = *mem_ptr;
+
+ if (SLJIT_UNLIKELY(mem & OFFS_REG_MASK)) {
+ *mem_ptr = TMP_REG1;
+ return push_inst32(compiler, ADD_W | RD4(TMP_REG1) | RN4(mem & REG_MASK) | RM4(OFFS_REG(mem)) | ((sljit_uw)(memw & 0x3) << 6));
+ }
+
+ if (SLJIT_UNLIKELY(!(mem & REG_MASK))) {
+ *mem_ptr = TMP_REG1;
+ return load_immediate(compiler, TMP_REG1, (sljit_uw)memw);
+ }
+
+ mem &= REG_MASK;
+
+ if (memw == 0) {
+ *mem_ptr = mem;
+ return SLJIT_SUCCESS;
+ }
+
+ *mem_ptr = TMP_REG1;
+ imm = get_imm((sljit_uw)(memw < 0 ? -memw : memw));
+
+ if (imm != INVALID_IMM)
+ return push_inst32(compiler, ((memw < 0) ? SUB_WI : ADD_WI) | RD4(TMP_REG1) | RN4(mem) | imm);
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)memw));
+ return push_inst16(compiler, ADD | SET_REGS44(TMP_REG1, mem));
+}
+
+static SLJIT_INLINE sljit_s32 simd_get_quad_reg_index(sljit_s32 freg)
+{
+ freg += freg & 0x1;
+
+ SLJIT_ASSERT((freg_map[freg] & 0x1) == (freg <= SLJIT_NUMBER_OF_SCRATCH_FLOAT_REGISTERS));
+
+ if (freg <= SLJIT_NUMBER_OF_SCRATCH_FLOAT_REGISTERS)
+ freg--;
+
+ return freg;
+}
+
+#define SLJIT_QUAD_OTHER_HALF(freg) ((((freg) & 0x1) << 1) - 1)
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 srcdst, sljit_sw srcdstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_s32 alignment = SLJIT_SIMD_GET_ELEM2_SIZE(type);
+ sljit_ins ins;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_mov(compiler, type, freg, srcdst, srcdstw));
+
+ ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 4)
+ freg = simd_get_quad_reg_index(freg);
+
+ if (!(srcdst & SLJIT_MEM)) {
+ if (reg_size == 4)
+ srcdst = simd_get_quad_reg_index(srcdst);
+
+ if (type & SLJIT_SIMD_STORE)
+ ins = VD4(srcdst) | VN4(freg) | VM4(freg);
+ else
+ ins = VD4(freg) | VN4(srcdst) | VM4(srcdst);
+
+ if (reg_size == 4)
+ ins |= (sljit_ins)1 << 6;
+
+ return push_inst32(compiler, VORR | ins);
+ }
+
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &srcdst, srcdstw));
+
+ if (elem_size > 3)
+ elem_size = 3;
+
+ ins = ((type & SLJIT_SIMD_STORE) ? VST1 : VLD1) | VD4(freg)
+ | (sljit_ins)((reg_size == 3) ? (0x7 << 8) : (0xa << 8));
+
+ SLJIT_ASSERT(reg_size >= alignment);
+
+ if (alignment == 3)
+ ins |= 0x10;
+ else if (alignment >= 4)
+ ins |= 0x20;
+
+ return push_inst32(compiler, ins | RN4(srcdst) | ((sljit_ins)elem_size) << 6 | 0xf);
+}
+
+static sljit_ins simd_get_imm(sljit_s32 elem_size, sljit_uw value)
+{
+ sljit_ins result;
+
+ if (elem_size > 1 && (sljit_u16)value == (value >> 16)) {
+ elem_size = 1;
+ value = (sljit_u16)value;
+ }
+
+ if (elem_size == 1 && (sljit_u8)value == (value >> 8)) {
+ elem_size = 0;
+ value = (sljit_u8)value;
+ }
+
+ switch (elem_size) {
+ case 0:
+ SLJIT_ASSERT(value <= 0xff);
+ result = 0xe00;
+ break;
+ case 1:
+ SLJIT_ASSERT(value <= 0xffff);
+ result = 0;
+
+ while (1) {
+ if (value <= 0xff) {
+ result |= 0x800;
+ break;
+ }
+
+ if ((value & 0xff) == 0) {
+ value >>= 8;
+ result |= 0xa00;
+ break;
+ }
+
+ if (result != 0)
+ return ~(sljit_ins)0;
+
+ value ^= (sljit_uw)0xffff;
+ result = (1 << 5);
+ }
+ break;
+ default:
+ SLJIT_ASSERT(value <= 0xffffffff);
+ result = 0;
+
+ while (1) {
+ if (value <= 0xff) {
+ result |= 0x000;
+ break;
+ }
+
+ if ((value & ~(sljit_uw)0xff00) == 0) {
+ value >>= 8;
+ result |= 0x200;
+ break;
+ }
+
+ if ((value & ~(sljit_uw)0xff0000) == 0) {
+ value >>= 16;
+ result |= 0x400;
+ break;
+ }
+
+ if ((value & ~(sljit_uw)0xff000000) == 0) {
+ value >>= 24;
+ result |= 0x600;
+ break;
+ }
+
+ if ((value & (sljit_uw)0xff) == 0xff && (value >> 16) == 0) {
+ value >>= 8;
+ result |= 0xc00;
+ break;
+ }
+
+ if ((value & (sljit_uw)0xffff) == 0xffff && (value >> 24) == 0) {
+ value >>= 16;
+ result |= 0xd00;
+ break;
+ }
+
+ if (result != 0)
+ return ~(sljit_ins)0;
+
+ value = ~value;
+ result = (1 << 5);
+ }
+ break;
+ }
+
+ return ((sljit_ins)value & 0xf) | (((sljit_ins)value & 0x70) << 12) | (((sljit_ins)value & 0x80) << 21) | result;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins, imm;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_replicate(compiler, type, freg, src, srcw));
+
+ ADJUST_LOCAL_OFFSET(src, srcw);
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) ? (elem_size < 2 || elem_size > 3) : (elem_size > 2))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 4)
+ freg = simd_get_quad_reg_index(freg);
+
+ if (src == SLJIT_IMM && srcw == 0)
+ return push_inst32(compiler, VMOV_i | ((reg_size == 4) ? (1 << 6) : 0) | VD4(freg));
+
+ if (SLJIT_UNLIKELY(elem_size == 3)) {
+ SLJIT_ASSERT(type & SLJIT_SIMD_FLOAT);
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(emit_fop_mem(compiler, FPU_LOAD | SLJIT_32, freg, src, srcw));
+ src = freg;
+ } else if (freg != src)
+ FAIL_IF(push_inst32(compiler, VORR | VD4(freg) | VN4(src) | VM4(src)));
+
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+
+ if (freg != src)
+ return push_inst32(compiler, VORR | VD4(freg) | VN4(src) | VM4(src));
+ return SLJIT_SUCCESS;
+ }
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &src, srcw));
+
+ ins = (sljit_ins)(elem_size << 6);
+
+ if (reg_size == 4)
+ ins |= 1 << 5;
+
+ return push_inst32(compiler, VLD1_r | ins | VD4(freg) | RN4(src) | 0xf);
+ }
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ SLJIT_ASSERT(elem_size == 2);
+ ins = ((sljit_ins)freg_ebit_map[src] << (16 + 2 + 1)) | ((sljit_ins)1 << (16 + 2));
+
+ if (reg_size == 4)
+ ins |= (sljit_ins)1 << 6;
+
+ return push_inst32(compiler, VDUP_s | ins | VD4(freg) | (sljit_ins)freg_map[src]);
+ }
+
+ if (src == SLJIT_IMM) {
+ if (elem_size < 2)
+ srcw &= ((sljit_sw)1 << (((sljit_sw)1 << elem_size) << 3)) - 1;
+
+ imm = simd_get_imm(elem_size, (sljit_uw)srcw);
+
+ if (imm != ~(sljit_ins)0) {
+ if (reg_size == 4)
+ imm |= (sljit_ins)1 << 6;
+
+ return push_inst32(compiler, VMOV_i | imm | VD4(freg));
+ }
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)srcw));
+ src = TMP_REG1;
+ }
+
+ switch (elem_size) {
+ case 0:
+ ins = 1 << 22;
+ break;
+ case 1:
+ ins = 1 << 5;
+ break;
+ default:
+ ins = 0;
+ break;
+ }
+
+ if (reg_size == 4)
+ ins |= (sljit_ins)1 << 21;
+
+ return push_inst32(compiler, VDUP | ins | VN4(freg) | RT4(src));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg, sljit_s32 lane_index,
+ sljit_s32 srcdst, sljit_sw srcdstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_lane_mov(compiler, type, freg, lane_index, srcdst, srcdstw));
+
+ ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) ? (elem_size < 2 || elem_size > 3) : (elem_size > 2))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 4)
+ freg = simd_get_quad_reg_index(freg);
+
+ if (type & SLJIT_SIMD_LANE_ZERO) {
+ ins = (reg_size == 3) ? 0 : ((sljit_ins)1 << 6);
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ if (elem_size == 3 && !(srcdst & SLJIT_MEM)) {
+ if (lane_index == 1)
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+
+ if (srcdst != freg)
+ FAIL_IF(push_inst32(compiler, VORR | VD4(freg) | VN4(srcdst) | VM4(srcdst)));
+
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+ return push_inst32(compiler, VMOV_i | VD4(freg));
+ }
+
+ if (srcdst == freg || (elem_size == 3 && srcdst == (freg + SLJIT_QUAD_OTHER_HALF(freg)))) {
+ FAIL_IF(push_inst32(compiler, VORR | ins | VD4(TMP_FREG2) | VN4(freg) | VM4(freg)));
+ srcdst = TMP_FREG2;
+ srcdstw = 0;
+ }
+ }
+
+ FAIL_IF(push_inst32(compiler, VMOV_i | ins | VD4(freg)));
+ }
+
+ if (reg_size == 4 && lane_index >= (0x8 >> elem_size)) {
+ lane_index -= (0x8 >> elem_size);
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+ }
+
+ if (srcdst & SLJIT_MEM) {
+ if (elem_size == 3)
+ return emit_fop_mem(compiler, ((type & SLJIT_SIMD_STORE) ? 0 : FPU_LOAD) | SLJIT_32, freg, srcdst, srcdstw);
+
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &srcdst, srcdstw));
+
+ lane_index = lane_index << elem_size;
+ ins = (sljit_ins)((elem_size << 10) | (lane_index << 5));
+ return push_inst32(compiler, ((type & SLJIT_SIMD_STORE) ? VST1_s : VLD1_s) | ins | VD4(freg) | RN4(srcdst) | 0xf);
+ }
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ if (elem_size == 3) {
+ if (type & SLJIT_SIMD_STORE)
+ return push_inst32(compiler, VORR | VD4(srcdst) | VN4(freg) | VM4(freg));
+ return push_inst32(compiler, VMOV_F32 | SLJIT_32 | VD4(freg) | VM4(srcdst));
+ }
+
+ if (type & SLJIT_SIMD_STORE) {
+ if (freg_ebit_map[freg] == 0) {
+ if (lane_index == 1)
+ freg = SLJIT_F64_SECOND(freg);
+
+ return push_inst32(compiler, VMOV_F32 | VD4(srcdst) | VM4(freg));
+ }
+
+ FAIL_IF(push_inst32(compiler, VMOV_s | (1 << 20) | ((sljit_ins)lane_index << 21) | VN4(freg) | RT4(TMP_REG1)));
+ return push_inst32(compiler, VMOV | VN4(srcdst) | RT4(TMP_REG1));
+ }
+
+ FAIL_IF(push_inst32(compiler, VMOV | (1 << 20) | VN4(srcdst) | RT4(TMP_REG1)));
+ return push_inst32(compiler, VMOV_s | ((sljit_ins)lane_index << 21) | VN4(freg) | RT4(TMP_REG1));
+ }
+
+ if (srcdst == SLJIT_IMM) {
+ if (elem_size < 2)
+ srcdstw &= ((sljit_sw)1 << (((sljit_sw)1 << elem_size) << 3)) - 1;
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)srcdstw));
+ srcdst = TMP_REG1;
+ }
+
+ if (elem_size == 0)
+ ins = 0x400000;
+ else if (elem_size == 1)
+ ins = 0x20;
+ else
+ ins = 0;
+
+ lane_index = lane_index << elem_size;
+ ins |= (sljit_ins)(((lane_index & 0x4) << 19) | ((lane_index & 0x3) << 5));
+
+ if (type & SLJIT_SIMD_STORE) {
+ ins |= (1 << 20);
+
+ if (elem_size < 2 && !(type & SLJIT_SIMD_LANE_SIGNED))
+ ins |= (1 << 23);
+ }
+
+ return push_inst32(compiler, VMOV_s | ins | VN4(freg) | RT4(srcdst));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_s32 src_lane_index)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_lane_replicate(compiler, type, freg, src, src_lane_index));
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 4) {
+ freg = simd_get_quad_reg_index(freg);
+ src = simd_get_quad_reg_index(src);
+
+ if (src_lane_index >= (0x8 >> elem_size)) {
+ src_lane_index -= (0x8 >> elem_size);
+ src += SLJIT_QUAD_OTHER_HALF(src);
+ }
+ }
+
+ if (elem_size == 3) {
+ if (freg != src)
+ FAIL_IF(push_inst32(compiler, VORR | VD4(freg) | VN4(src) | VM4(src)));
+
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+
+ if (freg != src)
+ return push_inst32(compiler, VORR | VD4(freg) | VN4(src) | VM4(src));
+ return SLJIT_SUCCESS;
+ }
+
+ ins = ((((sljit_ins)src_lane_index << 1) | 1) << (16 + elem_size));
+
+ if (reg_size == 4)
+ ins |= (sljit_ins)1 << 6;
+
+ return push_inst32(compiler, VDUP_s | ins | VD4(freg) | VM4(src));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_extend(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_s32 elem2_size = SLJIT_SIMD_GET_ELEM2_SIZE(type);
+ sljit_s32 dst_reg;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_extend(compiler, type, freg, src, srcw));
+
+ ADJUST_LOCAL_OFFSET(src, srcw);
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size != 2 || elem2_size != 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 4)
+ freg = simd_get_quad_reg_index(freg);
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &src, srcw));
+ if (reg_size == 4 && elem2_size - elem_size == 1)
+ FAIL_IF(push_inst32(compiler, VLD1 | (0x7 << 8) | VD4(freg) | RN4(src) | 0xf));
+ else
+ FAIL_IF(push_inst32(compiler, VLD1_s | (sljit_ins)((reg_size - elem2_size + elem_size) << 10) | VD4(freg) | RN4(src) | 0xf));
+ src = freg;
+ } else if (reg_size == 4)
+ src = simd_get_quad_reg_index(src);
+
+ if (!(type & SLJIT_SIMD_FLOAT)) {
+ dst_reg = (reg_size == 4) ? freg : TMP_FREG2;
+
+ do {
+ FAIL_IF(push_inst32(compiler, VSHLL | ((type & SLJIT_SIMD_EXTEND_SIGNED) ? 0 : (1 << 28))
+ | ((sljit_ins)1 << (19 + elem_size)) | VD4(dst_reg) | VM4(src)));
+ src = dst_reg;
+ } while (++elem_size < elem2_size);
+
+ if (dst_reg == TMP_FREG2)
+ return push_inst32(compiler, VORR | VD4(freg) | VN4(TMP_FREG2) | VM4(TMP_FREG2));
+ return SLJIT_SUCCESS;
+ }
+
+ /* No SIMD variant, must use VFP instead. */
+ SLJIT_ASSERT(reg_size == 4);
+
+ if (freg == src) {
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+ FAIL_IF(push_inst32(compiler, VCVT_F64_F32 | VD4(freg) | VM4(src) | 0x20));
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+ return push_inst32(compiler, VCVT_F64_F32 | VD4(freg) | VM4(src));
+ }
+
+ FAIL_IF(push_inst32(compiler, VCVT_F64_F32 | VD4(freg) | VM4(src)));
+ freg += SLJIT_QUAD_OTHER_HALF(freg);
+ return push_inst32(compiler, VCVT_F64_F32 | VD4(freg) | VM4(src) | 0x20);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_sign(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 dst, sljit_sw dstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins, imms;
+ sljit_s32 dst_r;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_sign(compiler, type, freg, dst, dstw));
+
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ switch (elem_size) {
+ case 0:
+ imms = 0x243219;
+ ins = VSHR | (1 << 28) | (0x9 << 16);
+ break;
+ case 1:
+ imms = (reg_size == 4) ? 0x243219 : 0x2231;
+ ins = VSHR | (1 << 28) | (0x11 << 16);
+ break;
+ case 2:
+ imms = (reg_size == 4) ? 0x2231 : 0x21;
+ ins = VSHR | (1 << 28) | (0x21 << 16);
+ break;
+ default:
+ imms = 0x21;
+ ins = VSHR | (1 << 28) | (0x1 << 16) | (1 << 7);
+ break;
+ }
+
+ if (reg_size == 4) {
+ freg = simd_get_quad_reg_index(freg);
+ ins |= (sljit_ins)1 << 6;
+ }
+
+ SLJIT_ASSERT((freg_map[TMP_FREG2] & 0x1) == 0);
+ FAIL_IF(push_inst32(compiler, ins | VD4(TMP_FREG2) | VM4(freg)));
+
+ if (reg_size == 4 && elem_size > 0)
+ FAIL_IF(push_inst32(compiler, VMOVN | ((sljit_ins)(elem_size - 1) << 18) | VD4(TMP_FREG2) | VM4(TMP_FREG2)));
+
+ ins = (reg_size == 4 && elem_size == 0) ? (1 << 6) : 0;
+
+ while (imms >= 0x100) {
+ FAIL_IF(push_inst32(compiler, VSRA | (1 << 28) | ins | ((imms & 0xff) << 16) | VD4(TMP_FREG2) | VM4(TMP_FREG2)));
+ imms >>= 8;
+ }
+
+ FAIL_IF(push_inst32(compiler, VSRA | (1 << 28) | ins | (1 << 7) | (imms << 16) | VD4(TMP_FREG2) | VM4(TMP_FREG2)));
+
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
+ FAIL_IF(push_inst32(compiler, VMOV_s | (1 << 20) | (1 << 23) | (0x2 << 21) | RT4(dst_r) | VN4(TMP_FREG2)));
+
+ if (reg_size == 4 && elem_size == 0) {
+ SLJIT_ASSERT(freg_map[TMP_FREG2] + 1 == freg_map[TMP_FREG1]);
+ FAIL_IF(push_inst32(compiler, VMOV_s | (1 << 20) | (1 << 23) | (0x2 << 21) | RT4(TMP_REG2)| VN4(TMP_FREG1)));
+ FAIL_IF(push_inst32(compiler, ORR_W | RD4(dst_r) | RN4(dst_r) | RM4(TMP_REG2) | (0x2 << 12)));
+ }
+
+ if (dst_r == TMP_REG1)
+ return emit_op_mem(compiler, STORE | WORD_SIZE, TMP_REG1, dst, dstw, TMP_REG2);
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_op2(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg, sljit_s32 src1_freg, sljit_s32 src2_freg)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_op2(compiler, type, dst_freg, src1_freg, src2_freg));
+
+ if (reg_size != 3 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ switch (SLJIT_SIMD_GET_OPCODE(type)) {
+ case SLJIT_SIMD_OP2_AND:
+ ins = VAND;
+ break;
+ case SLJIT_SIMD_OP2_OR:
+ ins = VORR;
+ break;
+ case SLJIT_SIMD_OP2_XOR:
+ ins = VEOR;
+ break;
+ }
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 4) {
+ dst_freg = simd_get_quad_reg_index(dst_freg);
+ src1_freg = simd_get_quad_reg_index(src1_freg);
+ src2_freg = simd_get_quad_reg_index(src2_freg);
+ ins |= (sljit_ins)1 << 6;
+ }
+
+ return push_inst32(compiler, ins | VD4(dst_freg) | VN4(src1_freg) | VM4(src2_freg));
}
#undef FPU_LOAD
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_load(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 mem_reg)
+{
+ sljit_ins ins;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_atomic_load(compiler, op, dst_reg, mem_reg));
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MOV_U8:
+ ins = LDREXB;
+ break;
+ case SLJIT_MOV_U16:
+ ins = LDREXH;
+ break;
+ default:
+ ins = LDREX;
+ break;
+ }
+
+ return push_inst32(compiler, ins | RN4(mem_reg) | RT4(dst_reg));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_store(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src_reg,
+ sljit_s32 mem_reg,
+ sljit_s32 temp_reg)
+{
+ sljit_ins ins;
+
+ /* temp_reg == mem_reg is undefined so use another temp register */
+ SLJIT_UNUSED_ARG(temp_reg);
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_atomic_store(compiler, op, src_reg, mem_reg, temp_reg));
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MOV_U8:
+ ins = STREXB | RM4(TMP_REG1);
+ break;
+ case SLJIT_MOV_U16:
+ ins = STREXH | RM4(TMP_REG1);
+ break;
+ default:
+ ins = STREX | RD4(TMP_REG1);
+ break;
+ }
+
+ FAIL_IF(push_inst32(compiler, ins | RN4(mem_reg) | RT4(src_reg)));
+ if (op & SLJIT_SET_ATOMIC_STORED)
+ return push_inst32(compiler, CMPI_W | RN4(TMP_REG1));
+
+ return SLJIT_SUCCESS;
+}
+
SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
{
struct sljit_const *const_;
@@ -3111,25 +4262,26 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
return const_;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_mov_addr(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
- struct sljit_put_label *put_label;
+ struct sljit_jump *jump;
sljit_s32 dst_r;
CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
+ CHECK_PTR(check_sljit_emit_mov_addr(compiler, dst, dstw));
ADJUST_LOCAL_OFFSET(dst, dstw);
- put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
- PTR_FAIL_IF(!put_label);
- set_put_label(put_label, compiler, 0);
+ jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
+ PTR_FAIL_IF(!jump);
+ set_mov_addr(jump, compiler, 0);
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
- PTR_FAIL_IF(emit_imm32_const(compiler, dst_r, 0));
+ PTR_FAIL_IF(push_inst16(compiler, RDN3(dst_r)));
+ compiler->size += 3;
if (dst & SLJIT_MEM)
PTR_FAIL_IF(emit_op_mem(compiler, WORD_SIZE | STORE, dst_r, dst, dstw, TMP_REG2));
- return put_label;
+ return jump;
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeLOONGARCH_64.c b/src/3rdparty/pcre2/src/sljit/sljitNativeLOONGARCH_64.c
new file mode 100644
index 0000000000..2e1d742aee
--- /dev/null
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativeLOONGARCH_64.c
@@ -0,0 +1,3765 @@
+/*
+ * Stack-less Just-In-Time compiler
+ *
+ * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
+ * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
+{
+ return "LOONGARCH" SLJIT_CPUINFO;
+}
+
+typedef sljit_u32 sljit_ins;
+
+#define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
+#define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
+#define TMP_REG3 (SLJIT_NUMBER_OF_REGISTERS + 4)
+#define TMP_ZERO 0
+
+/* Flags are kept in volatile registers. */
+#define EQUAL_FLAG (SLJIT_NUMBER_OF_REGISTERS + 5)
+#define RETURN_ADDR_REG TMP_REG2
+#define OTHER_FLAG (SLJIT_NUMBER_OF_REGISTERS + 6)
+
+#define TMP_FREG1 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
+#define TMP_FREG2 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2)
+
+static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 7] = {
+ 0, 4, 5, 6, 7, 8, 9, 10, 11, 16, 17, 18, 19, 20, 22, 31, 30, 29, 28, 27, 26, 25, 24, 23, 3, 13, 1, 14, 12, 15
+};
+
+static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
+ 0, 0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 31, 30, 29, 28, 27, 26, 25, 24, 8, 9
+};
+
+/* --------------------------------------------------------------------- */
+/* Instrucion forms */
+/* --------------------------------------------------------------------- */
+
+/*
+LoongArch instructions are 32 bits wide, belonging to 9 basic instruction formats (and variants of them):
+
+| Format name | Composition |
+| 2R | Opcode + Rj + Rd |
+| 3R | Opcode + Rk + Rj + Rd |
+| 4R | Opcode + Ra + Rk + Rj + Rd |
+| 2RI8 | Opcode + I8 + Rj + Rd |
+| 2RI12 | Opcode + I12 + Rj + Rd |
+| 2RI14 | Opcode + I14 + Rj + Rd |
+| 2RI16 | Opcode + I16 + Rj + Rd |
+| 1RI21 | Opcode + I21L + Rj + I21H |
+| I26 | Opcode + I26L + I26H |
+
+Rd is the destination register operand, while Rj, Rk and Ra (“a” stands for “additional”) are the source register operands.
+I8/I12/I14/I16/I21/I26 are immediate operands of respective width. The longer I21 and I26 are stored in separate higher and
+lower parts in the instruction word, denoted by the “L” and “H” suffixes. */
+
+#define RD(rd) ((sljit_ins)reg_map[rd])
+#define RJ(rj) ((sljit_ins)reg_map[rj] << 5)
+#define RK(rk) ((sljit_ins)reg_map[rk] << 10)
+#define RA(ra) ((sljit_ins)reg_map[ra] << 15)
+
+#define FD(fd) ((sljit_ins)reg_map[fd])
+#define FRD(fd) ((sljit_ins)freg_map[fd])
+#define FRJ(fj) ((sljit_ins)freg_map[fj] << 5)
+#define FRK(fk) ((sljit_ins)freg_map[fk] << 10)
+#define FRA(fa) ((sljit_ins)freg_map[fa] << 15)
+
+#define IMM_V(imm) ((sljit_ins)(imm) << 10)
+#define IMM_I8(imm) (((sljit_ins)(imm)&0xff) << 10)
+#define IMM_I12(imm) (((sljit_ins)(imm)&0xfff) << 10)
+#define IMM_I14(imm) (((sljit_ins)(imm)&0xfff3) << 10)
+#define IMM_I16(imm) (((sljit_ins)(imm)&0xffff) << 10)
+#define IMM_I20(imm) (((sljit_ins)(imm)&0xffffffff) >> 12 << 5)
+#define IMM_I21(imm) ((((sljit_ins)(imm)&0xffff) << 10) | (((sljit_ins)(imm) >> 16) & 0x1f))
+#define IMM_I26(imm) ((((sljit_ins)(imm)&0xffff) << 10) | (((sljit_ins)(imm) >> 16) & 0x3ff))
+
+#define OPC_I26(opc) ((sljit_ins)(opc) << 26)
+#define OPC_1RI21(opc) ((sljit_ins)(opc) << 26)
+#define OPC_2RI16(opc) ((sljit_ins)(opc) << 26)
+#define OPC_2RI14(opc) ((sljit_ins)(opc) << 24)
+#define OPC_2RI12(opc) ((sljit_ins)(opc) << 22)
+#define OPC_2RI8(opc) ((sljit_ins)(opc) << 18)
+#define OPC_4R(opc) ((sljit_ins)(opc) << 20)
+#define OPC_3R(opc) ((sljit_ins)(opc) << 15)
+#define OPC_2R(opc) ((sljit_ins)(opc) << 10)
+#define OPC_1RI20(opc) ((sljit_ins)(opc) << 25)
+
+/* Arithmetic operation instructions */
+#define ADD_W OPC_3R(0x20)
+#define ADD_D OPC_3R(0x21)
+#define SUB_W OPC_3R(0x22)
+#define SUB_D OPC_3R(0x23)
+#define ADDI_W OPC_2RI12(0xa)
+#define ADDI_D OPC_2RI12(0xb)
+#define ANDI OPC_2RI12(0xd)
+#define ORI OPC_2RI12(0xe)
+#define XORI OPC_2RI12(0xf)
+#define ADDU16I_D OPC_2RI16(0x4)
+#define LU12I_W OPC_1RI20(0xa)
+#define LU32I_D OPC_1RI20(0xb)
+#define LU52I_D OPC_2RI12(0xc)
+#define SLT OPC_3R(0x24)
+#define SLTU OPC_3R(0x25)
+#define SLTI OPC_2RI12(0x8)
+#define SLTUI OPC_2RI12(0x9)
+#define PCADDI OPC_1RI20(0xc)
+#define PCALAU12I OPC_1RI20(0xd)
+#define PCADDU12I OPC_1RI20(0xe)
+#define PCADDU18I OPC_1RI20(0xf)
+#define NOR OPC_3R(0x28)
+#define AND OPC_3R(0x29)
+#define OR OPC_3R(0x2a)
+#define XOR OPC_3R(0x2b)
+#define ORN OPC_3R(0x2c)
+#define ANDN OPC_3R(0x2d)
+#define MUL_W OPC_3R(0x38)
+#define MULH_W OPC_3R(0x39)
+#define MULH_WU OPC_3R(0x3a)
+#define MUL_D OPC_3R(0x3b)
+#define MULH_D OPC_3R(0x3c)
+#define MULH_DU OPC_3R(0x3d)
+#define MULW_D_W OPC_3R(0x3e)
+#define MULW_D_WU OPC_3R(0x3f)
+#define DIV_W OPC_3R(0x40)
+#define MOD_W OPC_3R(0x41)
+#define DIV_WU OPC_3R(0x42)
+#define MOD_WU OPC_3R(0x43)
+#define DIV_D OPC_3R(0x44)
+#define MOD_D OPC_3R(0x45)
+#define DIV_DU OPC_3R(0x46)
+#define MOD_DU OPC_3R(0x47)
+
+/* Bit-shift instructions */
+#define SLL_W OPC_3R(0x2e)
+#define SRL_W OPC_3R(0x2f)
+#define SRA_W OPC_3R(0x30)
+#define SLL_D OPC_3R(0x31)
+#define SRL_D OPC_3R(0x32)
+#define SRA_D OPC_3R(0x33)
+#define ROTR_W OPC_3R(0x36)
+#define ROTR_D OPC_3R(0x37)
+#define SLLI_W OPC_3R(0x81)
+#define SLLI_D ((sljit_ins)(0x41) << 16)
+#define SRLI_W OPC_3R(0x89)
+#define SRLI_D ((sljit_ins)(0x45) << 16)
+#define SRAI_W OPC_3R(0x91)
+#define SRAI_D ((sljit_ins)(0x49) << 16)
+#define ROTRI_W OPC_3R(0x99)
+#define ROTRI_D ((sljit_ins)(0x4d) << 16)
+
+/* Bit-manipulation instructions */
+#define CLO_W OPC_2R(0x4)
+#define CLZ_W OPC_2R(0x5)
+#define CTO_W OPC_2R(0x6)
+#define CTZ_W OPC_2R(0x7)
+#define CLO_D OPC_2R(0x8)
+#define CLZ_D OPC_2R(0x9)
+#define CTO_D OPC_2R(0xa)
+#define CTZ_D OPC_2R(0xb)
+#define REVB_2H OPC_2R(0xc)
+#define REVB_4H OPC_2R(0xd)
+#define REVB_2W OPC_2R(0xe)
+#define REVB_D OPC_2R(0xf)
+#define REVH_2W OPC_2R(0x10)
+#define REVH_D OPC_2R(0x11)
+#define BITREV_4B OPC_2R(0x12)
+#define BITREV_8B OPC_2R(0x13)
+#define BITREV_W OPC_2R(0x14)
+#define BITREV_D OPC_2R(0x15)
+#define EXT_W_H OPC_2R(0x16)
+#define EXT_W_B OPC_2R(0x17)
+#define BSTRINS_W (0x1 << 22 | 1 << 21)
+#define BSTRPICK_W (0x1 << 22 | 1 << 21 | 1 << 15)
+#define BSTRINS_D (0x2 << 22)
+#define BSTRPICK_D (0x3 << 22)
+
+/* Branch instructions */
+#define BEQZ OPC_1RI21(0x10)
+#define BNEZ OPC_1RI21(0x11)
+#define JIRL OPC_2RI16(0x13)
+#define B OPC_I26(0x14)
+#define BL OPC_I26(0x15)
+#define BEQ OPC_2RI16(0x16)
+#define BNE OPC_2RI16(0x17)
+#define BLT OPC_2RI16(0x18)
+#define BGE OPC_2RI16(0x19)
+#define BLTU OPC_2RI16(0x1a)
+#define BGEU OPC_2RI16(0x1b)
+
+/* Memory access instructions */
+#define LD_B OPC_2RI12(0xa0)
+#define LD_H OPC_2RI12(0xa1)
+#define LD_W OPC_2RI12(0xa2)
+#define LD_D OPC_2RI12(0xa3)
+
+#define ST_B OPC_2RI12(0xa4)
+#define ST_H OPC_2RI12(0xa5)
+#define ST_W OPC_2RI12(0xa6)
+#define ST_D OPC_2RI12(0xa7)
+
+#define LD_BU OPC_2RI12(0xa8)
+#define LD_HU OPC_2RI12(0xa9)
+#define LD_WU OPC_2RI12(0xaa)
+
+#define LDX_B OPC_3R(0x7000)
+#define LDX_H OPC_3R(0x7008)
+#define LDX_W OPC_3R(0x7010)
+#define LDX_D OPC_3R(0x7018)
+
+#define STX_B OPC_3R(0x7020)
+#define STX_H OPC_3R(0x7028)
+#define STX_W OPC_3R(0x7030)
+#define STX_D OPC_3R(0x7038)
+
+#define LDX_BU OPC_3R(0x7040)
+#define LDX_HU OPC_3R(0x7048)
+#define LDX_WU OPC_3R(0x7050)
+
+#define PRELD OPC_2RI12(0xab)
+
+/* Atomic memory access instructions */
+#define LL_W OPC_2RI14(0x20)
+#define SC_W OPC_2RI14(0x21)
+#define LL_D OPC_2RI14(0x22)
+#define SC_D OPC_2RI14(0x23)
+
+/* LoongArch V1.10 Instructions */
+#define AMCAS_B OPC_3R(0x70B0)
+#define AMCAS_H OPC_3R(0x70B1)
+#define AMCAS_W OPC_3R(0x70B2)
+#define AMCAS_D OPC_3R(0x70B3)
+
+/* Other instructions */
+#define BREAK OPC_3R(0x54)
+#define DBGCALL OPC_3R(0x55)
+#define SYSCALL OPC_3R(0x56)
+
+/* Basic Floating-Point Instructions */
+/* Floating-Point Arithmetic Operation Instructions */
+#define FADD_S OPC_3R(0x201)
+#define FADD_D OPC_3R(0x202)
+#define FSUB_S OPC_3R(0x205)
+#define FSUB_D OPC_3R(0x206)
+#define FMUL_S OPC_3R(0x209)
+#define FMUL_D OPC_3R(0x20a)
+#define FDIV_S OPC_3R(0x20d)
+#define FDIV_D OPC_3R(0x20e)
+#define FCMP_COND_S OPC_4R(0xc1)
+#define FCMP_COND_D OPC_4R(0xc2)
+#define FCOPYSIGN_S OPC_3R(0x225)
+#define FCOPYSIGN_D OPC_3R(0x226)
+#define FSEL OPC_4R(0xd0)
+#define FABS_S OPC_2R(0x4501)
+#define FABS_D OPC_2R(0x4502)
+#define FNEG_S OPC_2R(0x4505)
+#define FNEG_D OPC_2R(0x4506)
+#define FMOV_S OPC_2R(0x4525)
+#define FMOV_D OPC_2R(0x4526)
+
+/* Floating-Point Conversion Instructions */
+#define FCVT_S_D OPC_2R(0x4646)
+#define FCVT_D_S OPC_2R(0x4649)
+#define FTINTRZ_W_S OPC_2R(0x46a1)
+#define FTINTRZ_W_D OPC_2R(0x46a2)
+#define FTINTRZ_L_S OPC_2R(0x46a9)
+#define FTINTRZ_L_D OPC_2R(0x46aa)
+#define FFINT_S_W OPC_2R(0x4744)
+#define FFINT_S_L OPC_2R(0x4746)
+#define FFINT_D_W OPC_2R(0x4748)
+#define FFINT_D_L OPC_2R(0x474a)
+
+/* Floating-Point Move Instructions */
+#define FMOV_S OPC_2R(0x4525)
+#define FMOV_D OPC_2R(0x4526)
+#define MOVGR2FR_W OPC_2R(0x4529)
+#define MOVGR2FR_D OPC_2R(0x452a)
+#define MOVGR2FRH_W OPC_2R(0x452b)
+#define MOVFR2GR_S OPC_2R(0x452d)
+#define MOVFR2GR_D OPC_2R(0x452e)
+#define MOVFRH2GR_S OPC_2R(0x452f)
+#define MOVGR2FCSR OPC_2R(0x4530)
+#define MOVFCSR2GR OPC_2R(0x4532)
+#define MOVFR2CF OPC_2R(0x4534)
+#define MOVCF2FR OPC_2R(0x4535)
+#define MOVGR2CF OPC_2R(0x4536)
+#define MOVCF2GR OPC_2R(0x4537)
+
+/* Floating-Point Branch Instructions */
+#define BCEQZ OPC_I26(0x12)
+#define BCNEZ OPC_I26(0x12)
+
+/* Floating-Point Common Memory Access Instructions */
+#define FLD_S OPC_2RI12(0xac)
+#define FLD_D OPC_2RI12(0xae)
+#define FST_S OPC_2RI12(0xad)
+#define FST_D OPC_2RI12(0xaf)
+
+#define FLDX_S OPC_3R(0x7060)
+#define FLDX_D OPC_3R(0x7068)
+#define FSTX_S OPC_3R(0x7070)
+#define FSTX_D OPC_3R(0x7078)
+
+/* Vector Instructions */
+
+/* Vector Arithmetic Instructions */
+#define VOR_V OPC_3R(0xe24d)
+#define VXOR_V OPC_3R(0xe24e)
+#define VAND_V OPC_3R(0xe24c)
+#define VMSKLTZ OPC_2R(0x1ca710)
+
+/* Vector Memory Access Instructions */
+#define VLD OPC_2RI12(0xb0)
+#define VST OPC_2RI12(0xb1)
+#define XVLD OPC_2RI12(0xb2)
+#define XVST OPC_2RI12(0xb3)
+#define VSTELM OPC_2RI8(0xc40)
+
+/* Vector Float Conversion Instructions */
+#define VFCVTL_D_S OPC_2R(0x1ca77c)
+
+/* Vector Bit Manipulate Instructions */
+#define VSLLWIL OPC_2R(0x1cc200)
+
+/* Vector Move And Shuffle Instructions */
+#define VLDREPL OPC_2R(0xc0000)
+#define VINSGR2VR OPC_2R(0x1cbac0)
+#define VPICKVE2GR_U OPC_2R(0x1cbce0)
+#define VREPLGR2VR OPC_2R(0x1ca7c0)
+#define VREPLVE OPC_3R(0xe244)
+#define VREPLVEI OPC_2R(0x1cbde0)
+#define XVPERMI OPC_2RI8(0x1dfa)
+
+#define I12_MAX (0x7ff)
+#define I12_MIN (-0x800)
+#define BRANCH16_MAX (0x7fff << 2)
+#define BRANCH16_MIN (-(0x8000 << 2))
+#define BRANCH21_MAX (0xfffff << 2)
+#define BRANCH21_MIN (-(0x100000 << 2))
+#define JUMP_MAX (0x1ffffff << 2)
+#define JUMP_MIN (-(0x2000000 << 2))
+#define JIRL_MAX (0x7fff << 2)
+#define JIRL_MIN (-(0x8000 << 2))
+
+#define S32_MAX (0x7fffffffl)
+#define S32_MIN (-0x80000000l)
+#define S52_MAX (0x7ffffffffffffl)
+
+#define INST(inst, type) ((sljit_ins)((type & SLJIT_32) ? inst##_W : inst##_D))
+
+/* LoongArch CPUCFG register for feature detection */
+#define LOONGARCH_CFG2 0x02
+#define LOONGARCH_CFG2_LAMCAS (1 << 28)
+
+static sljit_u32 cfg2_feature_list = 0;
+
+/* According to Software Development and Build Convention for LoongArch Architectures,
++ the status of LSX and LASX extension must be checked through HWCAP */
+#include <sys/auxv.h>
+
+#define LOONGARCH_HWCAP_LSX (1 << 4)
+#define LOONGARCH_HWCAP_LASX (1 << 5)
+
+static sljit_u32 hwcap_feature_list = 0;
+
+/* Feature type */
+#define GET_CFG2 0
+#define GET_HWCAP 1
+
+static SLJIT_INLINE sljit_u32 get_cpu_features(sljit_u32 feature_type)
+ {
+ if (cfg2_feature_list == 0)
+ __asm__ ("cpucfg %0, %1" : "+&r"(cfg2_feature_list) : "r"(LOONGARCH_CFG2));
+ if (hwcap_feature_list == 0)
+ hwcap_feature_list = (sljit_u32)getauxval(AT_HWCAP);
+
+ return feature_type ? hwcap_feature_list : cfg2_feature_list;
+ }
+
+static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins)
+{
+ sljit_ins *ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
+ FAIL_IF(!ptr);
+ *ptr = ins;
+ compiler->size++;
+ return SLJIT_SUCCESS;
+}
+
+static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code, sljit_sw executable_offset)
+{
+ sljit_sw diff;
+ sljit_uw target_addr;
+ sljit_ins *inst;
+
+ inst = (sljit_ins *)jump->addr;
+
+ if (jump->flags & SLJIT_REWRITABLE_JUMP)
+ goto exit;
+
+ if (jump->flags & JUMP_ADDR)
+ target_addr = jump->u.target;
+ else {
+ SLJIT_ASSERT(jump->u.label != NULL);
+ target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset;
+ }
+
+ diff = (sljit_sw)target_addr - (sljit_sw)inst - executable_offset;
+
+ if (jump->flags & IS_COND) {
+ diff += SSIZE_OF(ins);
+
+ if (diff >= BRANCH16_MIN && diff <= BRANCH16_MAX) {
+ inst--;
+ inst[0] = (inst[0] & 0xfc0003ff) ^ 0x4000000;
+ jump->flags |= PATCH_B;
+ jump->addr = (sljit_uw)inst;
+ return inst;
+ }
+
+ diff -= SSIZE_OF(ins);
+ }
+
+ if (diff >= JUMP_MIN && diff <= JUMP_MAX) {
+ if (jump->flags & IS_COND) {
+ inst[-1] |= (sljit_ins)IMM_I16(2);
+ }
+
+ jump->flags |= PATCH_J;
+ return inst;
+ }
+
+ if (diff >= S32_MIN && diff <= S32_MAX) {
+ if (jump->flags & IS_COND)
+ inst[-1] |= (sljit_ins)IMM_I16(3);
+
+ jump->flags |= PATCH_REL32;
+ inst[1] = inst[0];
+ return inst + 1;
+ }
+
+ if (target_addr <= (sljit_uw)S32_MAX) {
+ if (jump->flags & IS_COND)
+ inst[-1] |= (sljit_ins)IMM_I16(3);
+
+ jump->flags |= PATCH_ABS32;
+ inst[1] = inst[0];
+ return inst + 1;
+ }
+
+ if (target_addr <= S52_MAX) {
+ if (jump->flags & IS_COND)
+ inst[-1] |= (sljit_ins)IMM_I16(4);
+
+ jump->flags |= PATCH_ABS52;
+ inst[2] = inst[0];
+ return inst + 2;
+ }
+
+exit:
+ if (jump->flags & IS_COND)
+ inst[-1] |= (sljit_ins)IMM_I16(5);
+ inst[3] = inst[0];
+ return inst + 3;
+}
+
+static SLJIT_INLINE sljit_sw mov_addr_get_length(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
+{
+ sljit_uw addr;
+ sljit_sw diff;
+ SLJIT_UNUSED_ARG(executable_offset);
+
+ SLJIT_ASSERT(jump->flags < ((sljit_uw)6 << JUMP_SIZE_SHIFT));
+ if (jump->flags & JUMP_ADDR)
+ addr = jump->u.target;
+ else
+ addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code + jump->u.label->size, executable_offset);
+
+ diff = (sljit_sw)addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+
+ if (diff >= S32_MIN && diff <= S32_MAX) {
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)1 << JUMP_SIZE_SHIFT));
+ jump->flags |= PATCH_REL32;
+ return 1;
+ }
+
+ if (addr <= S32_MAX) {
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)1 << JUMP_SIZE_SHIFT));
+ jump->flags |= PATCH_ABS32;
+ return 1;
+ }
+
+ if (addr <= S52_MAX) {
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)2 << JUMP_SIZE_SHIFT));
+ jump->flags |= PATCH_ABS52;
+ return 2;
+ }
+
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)3 << JUMP_SIZE_SHIFT));
+ return 3;
+}
+
+static SLJIT_INLINE void load_addr_to_reg(struct sljit_jump *jump, sljit_sw executable_offset)
+{
+ sljit_uw flags = jump->flags;
+ sljit_uw addr = (flags & JUMP_ADDR) ? jump->u.target : jump->u.label->u.addr;
+ sljit_ins *ins = (sljit_ins*)jump->addr;
+ sljit_u32 reg = (flags & JUMP_MOV_ADDR) ? *ins : TMP_REG1;
+ SLJIT_UNUSED_ARG(executable_offset);
+
+ if (flags & PATCH_REL32) {
+ addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET(ins, executable_offset);
+
+ SLJIT_ASSERT((sljit_sw)addr >= S32_MIN && (sljit_sw)addr <= S32_MAX);
+
+ if ((addr & 0x800) != 0)
+ addr += 0x1000;
+
+ ins[0] = PCADDU12I | RD(reg) | IMM_I20(addr);
+
+ if (!(flags & JUMP_MOV_ADDR)) {
+ SLJIT_ASSERT((ins[1] & OPC_2RI16(0x3f)) == JIRL);
+ ins[1] = (ins[1] & (OPC_2RI16(0x3f) | 0x3ff)) | IMM_I16((addr & 0xfff) >> 2);
+ } else
+ ins[1] = ADDI_D | RD(reg) | RJ(reg) | IMM_I12(addr);
+ return;
+ }
+
+ if (flags & PATCH_ABS32) {
+ SLJIT_ASSERT(addr <= S32_MAX);
+ ins[0] = LU12I_W | RD(reg) | (sljit_ins)(((addr & 0xffffffff) >> 12) << 5);
+ } else if (flags & PATCH_ABS52) {
+ ins[0] = LU12I_W | RD(reg) | (sljit_ins)(((addr & 0xffffffff) >> 12) << 5);
+ ins[1] = LU32I_D | RD(reg) | (sljit_ins)(((addr >> 32) & 0xfffff) << 5);
+ ins += 1;
+ } else {
+ ins[0] = LU12I_W | RD(reg) | (sljit_ins)(((addr & 0xffffffff) >> 12) << 5);
+ ins[1] = LU32I_D | RD(reg) | (sljit_ins)(((addr >> 32) & 0xfffff) << 5);
+ ins[2] = LU52I_D | RD(reg) | RJ(reg) | IMM_I12(addr >> 52);
+ ins += 2;
+ }
+
+ if (!(flags & JUMP_MOV_ADDR)) {
+ SLJIT_ASSERT((ins[1] & OPC_2RI16(0x3f)) == JIRL);
+ ins[1] = (ins[1] & (OPC_2RI16(0x3f) | 0x3ff)) | IMM_I16((addr & 0xfff) >> 2);
+ } else
+ ins[1] = ORI | RD(reg) | RJ(reg) | IMM_I12(addr);
+}
+
+static void reduce_code_size(struct sljit_compiler *compiler)
+{
+ struct sljit_label *label;
+ struct sljit_jump *jump;
+ struct sljit_const *const_;
+ SLJIT_NEXT_DEFINE_TYPES;
+ sljit_uw total_size;
+ sljit_uw size_reduce = 0;
+ sljit_sw diff;
+
+ label = compiler->labels;
+ jump = compiler->jumps;
+ const_ = compiler->consts;
+
+ SLJIT_NEXT_INIT_TYPES();
+
+ while (1) {
+ SLJIT_GET_NEXT_MIN();
+
+ if (next_min_addr == SLJIT_MAX_ADDRESS)
+ break;
+
+ if (next_min_addr == next_label_size) {
+ label->size -= size_reduce;
+
+ label = label->next;
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
+ }
+
+ if (next_min_addr == next_const_addr) {
+ const_->addr -= size_reduce;
+ const_ = const_->next;
+ next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
+ continue;
+ }
+
+ if (next_min_addr != next_jump_addr)
+ continue;
+
+ jump->addr -= size_reduce;
+ if (!(jump->flags & JUMP_MOV_ADDR)) {
+ total_size = JUMP_MAX_SIZE;
+
+ if (!(jump->flags & SLJIT_REWRITABLE_JUMP)) {
+ if (jump->flags & JUMP_ADDR) {
+ if (jump->u.target <= S32_MAX)
+ total_size = 2;
+ else if (jump->u.target <= S52_MAX)
+ total_size = 3;
+ } else {
+ /* Unit size: instruction. */
+ diff = (sljit_sw)jump->u.label->size - (sljit_sw)jump->addr;
+
+ if ((jump->flags & IS_COND) && (diff + 1) <= (BRANCH16_MAX / SSIZE_OF(ins)) && (diff + 1) >= (BRANCH16_MIN / SSIZE_OF(ins)))
+ total_size = 0;
+ else if (diff >= (JUMP_MIN / SSIZE_OF(ins)) && diff <= (JUMP_MAX / SSIZE_OF(ins)))
+ total_size = 1;
+ else if (diff >= (S32_MIN / SSIZE_OF(ins)) && diff <= (S32_MAX / SSIZE_OF(ins)))
+ total_size = 2;
+ }
+ }
+
+ size_reduce += JUMP_MAX_SIZE - total_size;
+ jump->flags |= total_size << JUMP_SIZE_SHIFT;
+ } else {
+ total_size = 3;
+
+ if (!(jump->flags & JUMP_ADDR)) {
+ /* Real size minus 1. Unit size: instruction. */
+ diff = (sljit_sw)jump->u.label->size - (sljit_sw)jump->addr;
+
+ if (diff >= (S32_MIN / SSIZE_OF(ins)) && diff <= (S32_MAX / SSIZE_OF(ins)))
+ total_size = 1;
+ } else if (jump->u.target < S32_MAX)
+ total_size = 1;
+ else if (jump->u.target <= S52_MAX)
+ total_size = 2;
+
+ size_reduce += 3 - total_size;
+ jump->flags |= total_size << JUMP_SIZE_SHIFT;
+ }
+
+ jump = jump->next;
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+ }
+
+ compiler->size -= size_reduce;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler, sljit_s32 options, void *exec_allocator_data)
+{
+ struct sljit_memory_fragment *buf;
+ sljit_ins *code;
+ sljit_ins *code_ptr;
+ sljit_ins *buf_ptr;
+ sljit_ins *buf_end;
+ sljit_uw word_count;
+ SLJIT_NEXT_DEFINE_TYPES;
+ sljit_sw executable_offset;
+ sljit_uw addr;
+
+ struct sljit_label *label;
+ struct sljit_jump *jump;
+ struct sljit_const *const_;
+
+ CHECK_ERROR_PTR();
+ CHECK_PTR(check_sljit_generate_code(compiler));
+
+ reduce_code_size(compiler);
+
+ code = (sljit_ins*)allocate_executable_memory(compiler->size * sizeof(sljit_ins), options, exec_allocator_data, &executable_offset);
+ PTR_FAIL_WITH_EXEC_IF(code);
+
+ reverse_buf(compiler);
+ buf = compiler->buf;
+
+ code_ptr = code;
+ word_count = 0;
+ label = compiler->labels;
+ jump = compiler->jumps;
+ const_ = compiler->consts;
+ SLJIT_NEXT_INIT_TYPES();
+ SLJIT_GET_NEXT_MIN();
+
+ do {
+ buf_ptr = (sljit_ins*)buf->memory;
+ buf_end = buf_ptr + (buf->used_size >> 2);
+ do {
+ *code_ptr = *buf_ptr++;
+ if (next_min_addr == word_count) {
+ SLJIT_ASSERT(!label || label->size >= word_count);
+ SLJIT_ASSERT(!jump || jump->addr >= word_count);
+ SLJIT_ASSERT(!const_ || const_->addr >= word_count);
+
+ /* These structures are ordered by their address. */
+ if (next_min_addr == next_label_size) {
+ label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ label->size = (sljit_uw)(code_ptr - code);
+ label = label->next;
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
+ }
+
+ if (next_min_addr == next_jump_addr) {
+ if (!(jump->flags & JUMP_MOV_ADDR)) {
+ word_count = word_count - 1 + (jump->flags >> JUMP_SIZE_SHIFT);
+ jump->addr = (sljit_uw)code_ptr;
+ code_ptr = detect_jump_type(jump, code, executable_offset);
+ SLJIT_ASSERT((jump->flags & PATCH_B) || ((sljit_uw)code_ptr - jump->addr < (jump->flags >> JUMP_SIZE_SHIFT) * sizeof(sljit_ins)));
+ } else {
+ word_count += jump->flags >> JUMP_SIZE_SHIFT;
+ addr = (sljit_uw)code_ptr;
+ code_ptr += mov_addr_get_length(jump, code_ptr, code, executable_offset);
+ jump->addr = addr;
+ }
+ jump = jump->next;
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+ } else if (next_min_addr == next_const_addr) {
+ const_->addr = (sljit_uw)code_ptr;
+ const_ = const_->next;
+ next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
+ }
+
+ SLJIT_GET_NEXT_MIN();
+ }
+ code_ptr++;
+ word_count++;
+ } while (buf_ptr < buf_end);
+
+ buf = buf->next;
+ } while (buf);
+
+ if (label && label->size == word_count) {
+ label->u.addr = (sljit_uw)code_ptr;
+ label->size = (sljit_uw)(code_ptr - code);
+ label = label->next;
+ }
+
+ SLJIT_ASSERT(!label);
+ SLJIT_ASSERT(!jump);
+ SLJIT_ASSERT(!const_);
+ SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size);
+
+ jump = compiler->jumps;
+ while (jump) {
+ do {
+ if (!(jump->flags & (PATCH_B | PATCH_J)) || (jump->flags & JUMP_MOV_ADDR)) {
+ load_addr_to_reg(jump, executable_offset);
+ break;
+ }
+
+ addr = (jump->flags & JUMP_ADDR) ? jump->u.target : jump->u.label->u.addr;
+ buf_ptr = (sljit_ins *)jump->addr;
+ addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset);
+
+ if (jump->flags & PATCH_B) {
+ SLJIT_ASSERT((sljit_sw)addr >= BRANCH16_MIN && (sljit_sw)addr <= BRANCH16_MAX);
+ buf_ptr[0] |= (sljit_ins)IMM_I16(addr >> 2);
+ break;
+ }
+
+ SLJIT_ASSERT((sljit_sw)addr >= JUMP_MIN && (sljit_sw)addr <= JUMP_MAX);
+ if (jump->flags & IS_CALL)
+ buf_ptr[0] = BL | (sljit_ins)IMM_I26(addr >> 2);
+ else
+ buf_ptr[0] = B | (sljit_ins)IMM_I26(addr >> 2);
+ } while (0);
+ jump = jump->next;
+ }
+
+ compiler->error = SLJIT_ERR_COMPILED;
+ compiler->executable_offset = executable_offset;
+ compiler->executable_size = (sljit_uw)(code_ptr - code) * sizeof(sljit_ins);
+
+ code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
+ code_ptr = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+
+ SLJIT_CACHE_FLUSH(code, code_ptr);
+ SLJIT_UPDATE_WX_FLAGS(code, code_ptr, 1);
+ return code;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
+{
+ switch (feature_type)
+ {
+ case SLJIT_HAS_FPU:
+#ifdef SLJIT_IS_FPU_AVAILABLE
+ return (SLJIT_IS_FPU_AVAILABLE) != 0;
+#else
+ /* Available by default. */
+ return 1;
+#endif
+
+ case SLJIT_HAS_LASX:
+ return (LOONGARCH_HWCAP_LASX & get_cpu_features(GET_HWCAP));
+
+ case SLJIT_HAS_SIMD:
+ return (LOONGARCH_HWCAP_LSX & get_cpu_features(GET_HWCAP));
+
+ case SLJIT_HAS_ATOMIC:
+ return (LOONGARCH_CFG2_LAMCAS & get_cpu_features(GET_CFG2));
+
+ case SLJIT_HAS_CLZ:
+ case SLJIT_HAS_CTZ:
+ case SLJIT_HAS_REV:
+ case SLJIT_HAS_ROT:
+ case SLJIT_HAS_PREFETCH:
+ case SLJIT_HAS_COPY_F32:
+ case SLJIT_HAS_COPY_F64:
+ return 1;
+
+ default:
+ return 0;
+ }
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
+{
+ SLJIT_UNUSED_ARG(type);
+
+ return 0;
+}
+
+/* --------------------------------------------------------------------- */
+/* Entry, exit */
+/* --------------------------------------------------------------------- */
+
+/* Creates an index in data_transfer_insts array. */
+#define LOAD_DATA 0x01
+#define WORD_DATA 0x00
+#define BYTE_DATA 0x02
+#define HALF_DATA 0x04
+#define INT_DATA 0x06
+#define SIGNED_DATA 0x08
+/* Separates integer and floating point registers */
+#define GPR_REG 0x0f
+#define DOUBLE_DATA 0x10
+#define SINGLE_DATA 0x12
+
+#define MEM_MASK 0x1f
+
+#define ARG_TEST 0x00020
+#define ALT_KEEP_CACHE 0x00040
+#define CUMULATIVE_OP 0x00080
+#define IMM_OP 0x00100
+#define MOVE_OP 0x00200
+#define SRC2_IMM 0x00400
+
+#define UNUSED_DEST 0x00800
+#define REG_DEST 0x01000
+#define REG1_SOURCE 0x02000
+#define REG2_SOURCE 0x04000
+#define SLOW_SRC1 0x08000
+#define SLOW_SRC2 0x10000
+#define SLOW_DEST 0x20000
+#define MEM_USE_TMP2 0x40000
+
+#define STACK_STORE ST_D
+#define STACK_LOAD LD_D
+
+static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst_r, sljit_sw imm)
+{
+ if (imm <= I12_MAX && imm >= I12_MIN)
+ return push_inst(compiler, ADDI_D | RD(dst_r) | RJ(TMP_ZERO) | IMM_I12(imm));
+
+ if (imm <= 0x7fffffffl && imm >= -0x80000000l) {
+ FAIL_IF(push_inst(compiler, LU12I_W | RD(dst_r) | (sljit_ins)(((imm & 0xffffffff) >> 12) << 5)));
+ return push_inst(compiler, ORI | RD(dst_r) | RJ(dst_r) | IMM_I12(imm));
+ } else if (imm <= 0x7ffffffffffffl && imm >= -0x8000000000000l) {
+ FAIL_IF(push_inst(compiler, LU12I_W | RD(dst_r) | (sljit_ins)(((imm & 0xffffffff) >> 12) << 5)));
+ FAIL_IF(push_inst(compiler, ORI | RD(dst_r) | RJ(dst_r) | IMM_I12(imm)));
+ return push_inst(compiler, LU32I_D | RD(dst_r) | (sljit_ins)(((imm >> 32) & 0xfffff) << 5));
+ }
+ FAIL_IF(push_inst(compiler, LU12I_W | RD(dst_r) | (sljit_ins)(((imm & 0xffffffff) >> 12) << 5)));
+ FAIL_IF(push_inst(compiler, ORI | RD(dst_r) | RJ(dst_r) | IMM_I12(imm)));
+ FAIL_IF(push_inst(compiler, LU32I_D | RD(dst_r) | (sljit_ins)(((imm >> 32) & 0xfffff) << 5)));
+ return push_inst(compiler, LU52I_D | RD(dst_r) | RJ(dst_r) | IMM_I12(imm >> 52));
+}
+
+#define STACK_MAX_DISTANCE (-I12_MIN)
+
+static sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw);
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
+{
+ sljit_s32 i, tmp, offset;
+ sljit_s32 saved_arg_count = SLJIT_KEPT_SAVEDS_COUNT(options);
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
+ set_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
+
+ local_size += GET_SAVED_REGISTERS_SIZE(scratches, saveds - saved_arg_count, 1);
+ local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
+
+ local_size = (local_size + SLJIT_LOCALS_OFFSET + 15) & ~0xf;
+ compiler->local_size = local_size;
+
+ if (local_size <= STACK_MAX_DISTANCE) {
+ /* Frequent case. */
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(SLJIT_SP) | RJ(SLJIT_SP) | IMM_I12(-local_size)));
+ offset = local_size - SSIZE_OF(sw);
+ local_size = 0;
+ } else {
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(SLJIT_SP) | RJ(SLJIT_SP) | IMM_I12(STACK_MAX_DISTANCE)));
+ local_size -= STACK_MAX_DISTANCE;
+
+ if (local_size > STACK_MAX_DISTANCE)
+ FAIL_IF(load_immediate(compiler, TMP_REG1, local_size));
+ offset = STACK_MAX_DISTANCE - SSIZE_OF(sw);
+ }
+
+ FAIL_IF(push_inst(compiler, STACK_STORE | RD(RETURN_ADDR_REG) | RJ(SLJIT_SP) | IMM_I12(offset)));
+
+ tmp = SLJIT_S0 - saveds;
+ for (i = SLJIT_S0 - saved_arg_count; i > tmp; i--) {
+ offset -= SSIZE_OF(sw);
+ FAIL_IF(push_inst(compiler, STACK_STORE | RD(i) | RJ(SLJIT_SP) | IMM_I12(offset)));
+ }
+
+ for (i = scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
+ offset -= SSIZE_OF(sw);
+ FAIL_IF(push_inst(compiler, STACK_STORE | RD(i) | RJ(SLJIT_SP) | IMM_I12(offset)));
+ }
+
+ tmp = SLJIT_FS0 - fsaveds;
+ for (i = SLJIT_FS0; i > tmp; i--) {
+ offset -= SSIZE_OF(f64);
+ FAIL_IF(push_inst(compiler, FST_D | FRD(i) | RJ(SLJIT_SP) | IMM_I12(offset)));
+ }
+
+ for (i = fscratches; i >= SLJIT_FIRST_SAVED_FLOAT_REG; i--) {
+ offset -= SSIZE_OF(f64);
+ FAIL_IF(push_inst(compiler, FST_D | FRD(i) | RJ(SLJIT_SP) | IMM_I12(offset)));
+ }
+
+ if (local_size > STACK_MAX_DISTANCE)
+ FAIL_IF(push_inst(compiler, SUB_D | RD(SLJIT_SP) | RJ(SLJIT_SP) | RK(TMP_REG1)));
+ else if (local_size > 0)
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(SLJIT_SP) | RJ(SLJIT_SP) | IMM_I12(-local_size)));
+
+ if (options & SLJIT_ENTER_REG_ARG)
+ return SLJIT_SUCCESS;
+
+ arg_types >>= SLJIT_ARG_SHIFT;
+ saved_arg_count = 0;
+ tmp = SLJIT_R0;
+
+ while (arg_types > 0) {
+ if ((arg_types & SLJIT_ARG_MASK) < SLJIT_ARG_TYPE_F64) {
+ if (!(arg_types & SLJIT_ARG_TYPE_SCRATCH_REG)) {
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(SLJIT_S0 - saved_arg_count) | RJ(tmp) | IMM_I12(0)));
+ saved_arg_count++;
+ }
+ tmp++;
+ }
+
+ arg_types >>= SLJIT_ARG_SHIFT;
+ }
+
+ return SLJIT_SUCCESS;
+}
+
+#undef STACK_MAX_DISTANCE
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
+ sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
+{
+ CHECK_ERROR();
+ CHECK(check_sljit_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
+ set_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
+
+ local_size += GET_SAVED_REGISTERS_SIZE(scratches, saveds - SLJIT_KEPT_SAVEDS_COUNT(options), 1);
+ local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
+
+ compiler->local_size = (local_size + SLJIT_LOCALS_OFFSET + 15) & ~0xf;
+
+ return SLJIT_SUCCESS;
+}
+
+#define STACK_MAX_DISTANCE (-I12_MIN - 16)
+
+static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit_s32 is_return_to)
+{
+ sljit_s32 i, tmp, offset;
+ sljit_s32 local_size = compiler->local_size;
+
+ if (local_size > STACK_MAX_DISTANCE) {
+ local_size -= STACK_MAX_DISTANCE;
+
+ if (local_size > STACK_MAX_DISTANCE) {
+ FAIL_IF(load_immediate(compiler, TMP_REG2, local_size));
+ FAIL_IF(push_inst(compiler, ADD_D | RD(SLJIT_SP) | RJ(SLJIT_SP) | RK(TMP_REG2)));
+ } else
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(SLJIT_SP) | RJ(SLJIT_SP) | IMM_I12(local_size)));
+
+ local_size = STACK_MAX_DISTANCE;
+ }
+
+ SLJIT_ASSERT(local_size > 0);
+
+ offset = local_size - SSIZE_OF(sw);
+ if (!is_return_to)
+ FAIL_IF(push_inst(compiler, STACK_LOAD | RD(RETURN_ADDR_REG) | RJ(SLJIT_SP) | IMM_I12(offset)));
+
+ tmp = SLJIT_S0 - compiler->saveds;
+ for (i = SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options); i > tmp; i--) {
+ offset -= SSIZE_OF(sw);
+ FAIL_IF(push_inst(compiler, STACK_LOAD | RD(i) | RJ(SLJIT_SP) | IMM_I12(offset)));
+ }
+
+ for (i = compiler->scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
+ offset -= SSIZE_OF(sw);
+ FAIL_IF(push_inst(compiler, STACK_LOAD | RD(i) | RJ(SLJIT_SP) | IMM_I12(offset)));
+ }
+
+ tmp = SLJIT_FS0 - compiler->fsaveds;
+ for (i = SLJIT_FS0; i > tmp; i--) {
+ offset -= SSIZE_OF(f64);
+ FAIL_IF(push_inst(compiler, FLD_D | FRD(i) | RJ(SLJIT_SP) | IMM_I12(offset)));
+ }
+
+ for (i = compiler->fscratches; i >= SLJIT_FIRST_SAVED_FLOAT_REG; i--) {
+ offset -= SSIZE_OF(f64);
+ FAIL_IF(push_inst(compiler, FLD_D | FRD(i) | RJ(SLJIT_SP) | IMM_I12(offset)));
+ }
+
+ return push_inst(compiler, ADDI_D | RD(SLJIT_SP) | RJ(SLJIT_SP) | IMM_I12(local_size));
+}
+
+#undef STACK_MAX_DISTANCE
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_void(struct sljit_compiler *compiler)
+{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_return_void(compiler));
+
+ FAIL_IF(emit_stack_frame_release(compiler, 0));
+ return push_inst(compiler, JIRL | RD(TMP_ZERO) | RJ(RETURN_ADDR_REG) | IMM_I12(0));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_to(struct sljit_compiler *compiler,
+ sljit_s32 src, sljit_sw srcw)
+{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_return_to(compiler, src, srcw));
+
+ if (src & SLJIT_MEM) {
+ ADJUST_LOCAL_OFFSET(src, srcw);
+ FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw));
+ src = TMP_REG1;
+ srcw = 0;
+ } else if (src >= SLJIT_FIRST_SAVED_REG && src <= (SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options))) {
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(TMP_REG1) | RJ(src) | IMM_I12(0)));
+ src = TMP_REG1;
+ srcw = 0;
+ }
+
+ FAIL_IF(emit_stack_frame_release(compiler, 1));
+
+ SLJIT_SKIP_CHECKS(compiler);
+ return sljit_emit_ijump(compiler, SLJIT_JUMP, src, srcw);
+}
+
+/* --------------------------------------------------------------------- */
+/* Operators */
+/* --------------------------------------------------------------------- */
+
+static const sljit_ins data_transfer_insts[16 + 4] = {
+/* u w s */ ST_D /* st.d */,
+/* u w l */ LD_D /* ld.d */,
+/* u b s */ ST_B /* st.b */,
+/* u b l */ LD_BU /* ld.bu */,
+/* u h s */ ST_H /* st.h */,
+/* u h l */ LD_HU /* ld.hu */,
+/* u i s */ ST_W /* st.w */,
+/* u i l */ LD_WU /* ld.wu */,
+
+/* s w s */ ST_D /* st.d */,
+/* s w l */ LD_D /* ld.d */,
+/* s b s */ ST_B /* st.b */,
+/* s b l */ LD_B /* ld.b */,
+/* s h s */ ST_H /* st.h */,
+/* s h l */ LD_H /* ld.h */,
+/* s i s */ ST_W /* st.w */,
+/* s i l */ LD_W /* ld.w */,
+
+/* d s */ FST_D /* fst.d */,
+/* d l */ FLD_D /* fld.d */,
+/* s s */ FST_S /* fst.s */,
+/* s l */ FLD_S /* fld.s */,
+};
+
+static const sljit_ins data_transfer_insts_x[16 + 4] = {
+/* u w s */ STX_D /* stx.d */,
+/* u w l */ LDX_D /* ldx.d */,
+/* u b s */ STX_B /* stx.b */,
+/* u b l */ LDX_BU /* ldx.bu */,
+/* u h s */ STX_H /* stx.h */,
+/* u h l */ LDX_HU /* ldx.hu */,
+/* u i s */ STX_W /* stx.w */,
+/* u i l */ LDX_WU /* ldx.wu */,
+
+/* s w s */ STX_D /* stx.d */,
+/* s w l */ LDX_D /* ldx.d */,
+/* s b s */ STX_B /* stx.b */,
+/* s b l */ LDX_B /* ldx.b */,
+/* s h s */ STX_H /* stx.h */,
+/* s h l */ LDX_H /* ldx.h */,
+/* s i s */ STX_W /* stx.w */,
+/* s i l */ LDX_W /* ldx.w */,
+
+/* d s */ FSTX_D /* fstx.d */,
+/* d l */ FLDX_D /* fldx.d */,
+/* s s */ FSTX_S /* fstx.s */,
+/* s l */ FLDX_S /* fldx.s */,
+};
+
+static sljit_s32 push_mem_inst(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
+{
+ sljit_ins ins;
+ sljit_s32 base = arg & REG_MASK;
+
+ SLJIT_ASSERT(arg & SLJIT_MEM);
+
+ if (arg & OFFS_REG_MASK) {
+ sljit_s32 offs = OFFS_REG(arg);
+
+ SLJIT_ASSERT(!argw);
+ ins = data_transfer_insts_x[flags & MEM_MASK] |
+ ((flags & MEM_MASK) <= GPR_REG ? RD(reg) : FRD(reg)) |
+ RJ(base) | RK(offs);
+ } else {
+ SLJIT_ASSERT(argw <= 0xfff && argw >= I12_MIN);
+
+ ins = data_transfer_insts[flags & MEM_MASK] |
+ ((flags & MEM_MASK) <= GPR_REG ? RD(reg) : FRD(reg)) |
+ RJ(base) | IMM_I12(argw);
+ }
+ return push_inst(compiler, ins);
+}
+
+/* Can perform an operation using at most 1 instruction. */
+static sljit_s32 getput_arg_fast(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
+{
+ SLJIT_ASSERT(arg & SLJIT_MEM);
+
+ /* argw == 0 (ldx/stx rd, rj, rk) can be used.
+ * argw in [-2048, 2047] (ld/st rd, rj, imm) can be used. */
+ if (!argw || (!(arg & OFFS_REG_MASK) && (argw <= I12_MAX && argw >= I12_MIN))) {
+ /* Works for both absolute and relative addresses. */
+ if (SLJIT_UNLIKELY(flags & ARG_TEST))
+ return 1;
+
+ FAIL_IF(push_mem_inst(compiler, flags, reg, arg, argw));
+ return -1;
+ }
+ return 0;
+}
+
+#define TO_ARGW_HI(argw) (((argw) & ~0xfff) + (((argw) & 0x800) ? 0x1000 : 0))
+
+/* See getput_arg below.
+ Note: can_cache is called only for binary operators. */
+static sljit_s32 can_cache(sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
+{
+ SLJIT_ASSERT((arg & SLJIT_MEM) && (next_arg & SLJIT_MEM));
+
+ if (arg & OFFS_REG_MASK)
+ return 0;
+
+ if (arg == next_arg) {
+ if (((next_argw - argw) <= I12_MAX && (next_argw - argw) >= I12_MIN)
+ || TO_ARGW_HI(argw) == TO_ARGW_HI(next_argw))
+ return 1;
+ return 0;
+ }
+
+ return 0;
+}
+
+/* Emit the necessary instructions. See can_cache above. */
+static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
+{
+ sljit_s32 base = arg & REG_MASK;
+ sljit_s32 tmp_r = (flags & MEM_USE_TMP2) ? TMP_REG2 : TMP_REG1;
+ sljit_sw offset;
+
+ SLJIT_ASSERT(arg & SLJIT_MEM);
+ if (!(next_arg & SLJIT_MEM)) {
+ next_arg = 0;
+ next_argw = 0;
+ }
+
+ if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
+ argw &= 0x3;
+
+ if (SLJIT_UNLIKELY(argw))
+ FAIL_IF(push_inst(compiler, SLLI_D | RD(TMP_REG3) | RJ(OFFS_REG(arg)) | IMM_I12(argw)));
+ return push_mem_inst(compiler, flags, reg, SLJIT_MEM2(base, TMP_REG3), 0);
+ }
+
+ if (compiler->cache_arg == arg && argw - compiler->cache_argw <= I12_MAX && argw - compiler->cache_argw >= I12_MIN)
+ return push_mem_inst(compiler, flags, reg, SLJIT_MEM1(TMP_REG3), argw - compiler->cache_argw);
+
+ if (compiler->cache_arg == SLJIT_MEM && (argw - compiler->cache_argw <= I12_MAX) && (argw - compiler->cache_argw >= I12_MIN)) {
+ offset = argw - compiler->cache_argw;
+ } else {
+ sljit_sw argw_hi=TO_ARGW_HI(argw);
+ compiler->cache_arg = SLJIT_MEM;
+
+ if (next_arg && next_argw - argw <= I12_MAX && next_argw - argw >= I12_MIN && argw_hi != TO_ARGW_HI(next_argw)) {
+ FAIL_IF(load_immediate(compiler, TMP_REG3, argw));
+ compiler->cache_argw = argw;
+ offset = 0;
+ } else {
+ FAIL_IF(load_immediate(compiler, TMP_REG3, argw_hi));
+ compiler->cache_argw = argw_hi;
+ offset = argw & 0xfff;
+ argw = argw_hi;
+ }
+ }
+
+ if (!base)
+ return push_mem_inst(compiler, flags, reg, SLJIT_MEM1(TMP_REG3), offset);
+
+ if (arg == next_arg && next_argw - argw <= I12_MAX && next_argw - argw >= I12_MIN) {
+ compiler->cache_arg = arg;
+ FAIL_IF(push_inst(compiler, ADD_D | RD(TMP_REG3) | RJ(TMP_REG3) | RK(base)));
+ return push_mem_inst(compiler, flags, reg, SLJIT_MEM1(TMP_REG3), offset);
+ }
+
+ if (!offset)
+ return push_mem_inst(compiler, flags, reg, SLJIT_MEM2(base, TMP_REG3), 0);
+
+ FAIL_IF(push_inst(compiler, ADD_D | RD(tmp_r) | RJ(TMP_REG3) | RK(base)));
+ return push_mem_inst(compiler, flags, reg, SLJIT_MEM1(tmp_r), offset);
+}
+
+static sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
+{
+ sljit_s32 base = arg & REG_MASK;
+ sljit_s32 tmp_r = TMP_REG1;
+
+ if (getput_arg_fast(compiler, flags, reg, arg, argw))
+ return compiler->error;
+
+ if ((flags & MEM_MASK) <= GPR_REG && (flags & LOAD_DATA))
+ tmp_r = reg;
+
+ if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
+ argw &= 0x3;
+
+ if (SLJIT_UNLIKELY(argw))
+ FAIL_IF(push_inst(compiler, SLLI_D | RD(tmp_r) | RJ(OFFS_REG(arg)) | IMM_I12(argw)));
+ return push_mem_inst(compiler, flags, reg, SLJIT_MEM2(base, tmp_r), 0);
+ } else {
+ FAIL_IF(load_immediate(compiler, tmp_r, argw));
+
+ if (base != 0)
+ return push_mem_inst(compiler, flags, reg, SLJIT_MEM2(base, tmp_r), 0);
+ return push_mem_inst(compiler, flags, reg, SLJIT_MEM1(tmp_r), 0);
+ }
+}
+
+static SLJIT_INLINE sljit_s32 emit_op_mem2(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg1, sljit_sw arg1w, sljit_s32 arg2, sljit_sw arg2w)
+{
+ if (getput_arg_fast(compiler, flags, reg, arg1, arg1w))
+ return compiler->error;
+ return getput_arg(compiler, flags, reg, arg1, arg1w, arg2, arg2w);
+}
+
+#define IMM_EXTEND(v) (IMM_I12((op & SLJIT_32) ? (v) : (32 + (v))))
+
+/* andi/ori/xori are zero-extended */
+#define EMIT_LOGICAL(op_imm, op_reg) \
+ if (flags & SRC2_IMM) { \
+ if (op & SLJIT_SET_Z) {\
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(EQUAL_FLAG) | RJ(TMP_ZERO) | IMM_I12(src2))); \
+ FAIL_IF(push_inst(compiler, op_reg | RD(EQUAL_FLAG) | RJ(src1) | RK(EQUAL_FLAG))); \
+ } \
+ if (!(flags & UNUSED_DEST)) { \
+ if (dst == src1) { \
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(TMP_REG1) | RJ(TMP_ZERO) | IMM_I12(src2))); \
+ FAIL_IF(push_inst(compiler, op_reg | RD(dst) | RJ(src1) | RK(TMP_REG1))); \
+ } else { \
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(dst) | RJ(TMP_ZERO) | IMM_I12(src2))); \
+ FAIL_IF(push_inst(compiler, op_reg | RD(dst) | RJ(src1) | RK(dst))); \
+ } \
+ } \
+ } else { \
+ if (op & SLJIT_SET_Z) \
+ FAIL_IF(push_inst(compiler, op_reg | RD(EQUAL_FLAG) | RJ(src1) | RK(src2))); \
+ if (!(flags & UNUSED_DEST)) \
+ FAIL_IF(push_inst(compiler, op_reg | RD(dst) | RJ(src1) | RK(src2))); \
+ } \
+ while (0)
+
+#define EMIT_SHIFT(imm, reg) \
+ op_imm = (imm); \
+ op_reg = (reg)
+
+static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
+ sljit_s32 dst, sljit_s32 src1, sljit_sw src2)
+{
+ sljit_s32 is_overflow, is_carry, carry_src_r, is_handled, reg;
+ sljit_ins op_imm, op_reg;
+ sljit_ins word_size = ((op & SLJIT_32) ? 32 : 64);
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MOV:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
+ if (dst != src2)
+ return push_inst(compiler, INST(ADD, op) | RD(dst) | RJ(src2) | IMM_I12(0));
+ return SLJIT_SUCCESS;
+
+ case SLJIT_MOV_U8:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
+ if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE))
+ return push_inst(compiler, ANDI | RD(dst) | RJ(src2) | IMM_I12(0xff));
+ SLJIT_ASSERT(dst == src2);
+ return SLJIT_SUCCESS;
+
+ case SLJIT_MOV_S8:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
+ if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE))
+ return push_inst(compiler, EXT_W_B | RD(dst) | RJ(src2));
+ SLJIT_ASSERT(dst == src2);
+ return SLJIT_SUCCESS;
+
+ case SLJIT_MOV_U16:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
+ if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE))
+ return push_inst(compiler, INST(BSTRPICK, op) | RD(dst) | RJ(src2) | (15 << 16));
+ SLJIT_ASSERT(dst == src2);
+ return SLJIT_SUCCESS;
+
+ case SLJIT_MOV_S16:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
+ if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE))
+ return push_inst(compiler, EXT_W_H | RD(dst) | RJ(src2));
+ SLJIT_ASSERT(dst == src2);
+ return SLJIT_SUCCESS;
+
+ case SLJIT_MOV_U32:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
+ if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE))
+ return push_inst(compiler, BSTRPICK_D | RD(dst) | RJ(src2) | (31 << 16));
+ SLJIT_ASSERT(dst == src2);
+ return SLJIT_SUCCESS;
+
+ case SLJIT_MOV_S32:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
+ if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE))
+ return push_inst(compiler, SLLI_W | RD(dst) | RJ(src2) | IMM_I12(0));
+ SLJIT_ASSERT(dst == src2);
+ return SLJIT_SUCCESS;
+
+ case SLJIT_CLZ:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
+ return push_inst(compiler, INST(CLZ, op) | RD(dst) | RJ(src2));
+
+ case SLJIT_CTZ:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
+ return push_inst(compiler, INST(CTZ, op) | RD(dst) | RJ(src2));
+
+ case SLJIT_REV:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
+ return push_inst(compiler, ((op & SLJIT_32) ? REVB_2W : REVB_D) | RD(dst) | RJ(src2));
+
+ case SLJIT_REV_S16:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
+ FAIL_IF(push_inst(compiler, REVB_2H | RD(dst) | RJ(src2)));
+ return push_inst(compiler, EXT_W_H | RD(dst) | RJ(dst));
+
+ case SLJIT_REV_U16:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
+ FAIL_IF(push_inst(compiler, REVB_2H | RD(dst) | RJ(src2)));
+ return push_inst(compiler, INST(BSTRPICK, op) | RD(dst) | RJ(dst) | (15 << 16));
+
+ case SLJIT_REV_S32:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM) && dst != TMP_REG1);
+ FAIL_IF(push_inst(compiler, REVB_2W | RD(dst) | RJ(src2)));
+ return push_inst(compiler, SLLI_W | RD(dst) | RJ(dst) | IMM_I12(0));
+
+ case SLJIT_REV_U32:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM) && dst != TMP_REG1);
+ FAIL_IF(push_inst(compiler, REVB_2W | RD(dst) | RJ(src2)));
+ return push_inst(compiler, BSTRPICK_D | RD(dst) | RJ(dst) | (31 << 16));
+
+ case SLJIT_ADD:
+ /* Overflow computation (both add and sub): overflow = src1_sign ^ src2_sign ^ result_sign ^ carry_flag */
+ is_overflow = GET_FLAG_TYPE(op) == SLJIT_OVERFLOW;
+ carry_src_r = GET_FLAG_TYPE(op) == SLJIT_CARRY;
+
+ if (flags & SRC2_IMM) {
+ if (is_overflow) {
+ if (src2 >= 0)
+ FAIL_IF(push_inst(compiler, INST(ADDI, op) | RD(EQUAL_FLAG) | RJ(src1) | IMM_I12(0)));
+ else {
+ FAIL_IF(push_inst(compiler, INST(ADDI, op) | RD(EQUAL_FLAG) | RJ(TMP_ZERO) | IMM_I12(-1)));
+ FAIL_IF(push_inst(compiler, XOR | RD(EQUAL_FLAG) | RJ(src1) | RK(EQUAL_FLAG)));
+ }
+ } else if (op & SLJIT_SET_Z)
+ FAIL_IF(push_inst(compiler, INST(ADDI, op) | RD(EQUAL_FLAG) | RJ(src1) | IMM_I12(src2)));
+
+ /* Only the zero flag is needed. */
+ if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
+ FAIL_IF(push_inst(compiler, INST(ADDI, op) | RD(dst) | RJ(src1) | IMM_I12(src2)));
+ } else {
+ if (is_overflow)
+ FAIL_IF(push_inst(compiler, XOR | RD(EQUAL_FLAG) | RJ(src1) | RK(src2)));
+ else if (op & SLJIT_SET_Z)
+ FAIL_IF(push_inst(compiler, INST(ADD, op) | RD(EQUAL_FLAG) | RJ(src1) | RK(src2)));
+
+ if (is_overflow || carry_src_r != 0) {
+ if (src1 != dst)
+ carry_src_r = (sljit_s32)src1;
+ else if (src2 != dst)
+ carry_src_r = (sljit_s32)src2;
+ else {
+ FAIL_IF(push_inst(compiler, INST(ADDI, op) | RD(OTHER_FLAG) | RJ(src1) | IMM_I12(0)));
+ carry_src_r = OTHER_FLAG;
+ }
+ }
+
+ /* Only the zero flag is needed. */
+ if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
+ FAIL_IF(push_inst(compiler, INST(ADD, op) | RD(dst) | RJ(src1) | RK(src2)));
+ }
+
+ /* Carry is zero if a + b >= a or a + b >= b, otherwise it is 1. */
+ if (is_overflow || carry_src_r != 0) {
+ if (flags & SRC2_IMM)
+ FAIL_IF(push_inst(compiler, SLTUI | RD(OTHER_FLAG) | RJ(dst) | IMM_I12(src2)));
+ else
+ FAIL_IF(push_inst(compiler, SLTU | RD(OTHER_FLAG) | RJ(dst) | RK(carry_src_r)));
+ }
+
+ if (!is_overflow)
+ return SLJIT_SUCCESS;
+
+ FAIL_IF(push_inst(compiler, XOR | RD(TMP_REG1) | RJ(dst) | RK(EQUAL_FLAG)));
+ if (op & SLJIT_SET_Z)
+ FAIL_IF(push_inst(compiler, INST(ADD, op) | RD(EQUAL_FLAG) | RJ(dst) | IMM_I12(0)));
+ FAIL_IF(push_inst(compiler, INST(SRLI, op) | RD(TMP_REG1) | RJ(TMP_REG1) | IMM_EXTEND(31)));
+ return push_inst(compiler, XOR | RD(OTHER_FLAG) | RJ(TMP_REG1) | RK(OTHER_FLAG));
+
+ case SLJIT_ADDC:
+ carry_src_r = GET_FLAG_TYPE(op) == SLJIT_CARRY;
+
+ if (flags & SRC2_IMM) {
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(dst) | RJ(src1) | IMM_I12(src2)));
+ } else {
+ if (carry_src_r != 0) {
+ if (src1 != dst)
+ carry_src_r = (sljit_s32)src1;
+ else if (src2 != dst)
+ carry_src_r = (sljit_s32)src2;
+ else {
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(EQUAL_FLAG) | RJ(src1) | IMM_I12(0)));
+ carry_src_r = EQUAL_FLAG;
+ }
+ }
+
+ FAIL_IF(push_inst(compiler, ADD_D | RD(dst) | RJ(src1) | RK(src2)));
+ }
+
+ /* Carry is zero if a + b >= a or a + b >= b, otherwise it is 1. */
+ if (carry_src_r != 0) {
+ if (flags & SRC2_IMM)
+ FAIL_IF(push_inst(compiler, SLTUI | RD(EQUAL_FLAG) | RJ(dst) | IMM_I12(src2)));
+ else
+ FAIL_IF(push_inst(compiler, SLTU | RD(EQUAL_FLAG) | RJ(dst) | RK(carry_src_r)));
+ }
+
+ FAIL_IF(push_inst(compiler, ADD_D | RD(dst) | RJ(dst) | RK(OTHER_FLAG)));
+
+ if (carry_src_r == 0)
+ return SLJIT_SUCCESS;
+
+ /* Set ULESS_FLAG (dst == 0) && (OTHER_FLAG == 1). */
+ FAIL_IF(push_inst(compiler, SLTU | RD(OTHER_FLAG) | RJ(dst) | RK(OTHER_FLAG)));
+ /* Set carry flag. */
+ return push_inst(compiler, OR | RD(OTHER_FLAG) | RJ(OTHER_FLAG) | RK(EQUAL_FLAG));
+
+ case SLJIT_SUB:
+ if ((flags & SRC2_IMM) && src2 == I12_MIN) {
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(TMP_REG2) | RJ(TMP_ZERO) | IMM_I12(src2)));
+ src2 = TMP_REG2;
+ flags &= ~SRC2_IMM;
+ }
+
+ is_handled = 0;
+
+ if (flags & SRC2_IMM) {
+ if (GET_FLAG_TYPE(op) == SLJIT_LESS) {
+ FAIL_IF(push_inst(compiler, SLTUI | RD(OTHER_FLAG) | RJ(src1) | IMM_I12(src2)));
+ is_handled = 1;
+ } else if (GET_FLAG_TYPE(op) == SLJIT_SIG_LESS) {
+ FAIL_IF(push_inst(compiler, SLTI | RD(OTHER_FLAG) | RJ(src1) | IMM_I12(src2)));
+ is_handled = 1;
+ }
+ }
+
+ if (!is_handled && GET_FLAG_TYPE(op) >= SLJIT_LESS && GET_FLAG_TYPE(op) <= SLJIT_SIG_LESS_EQUAL) {
+ is_handled = 1;
+
+ if (flags & SRC2_IMM) {
+ reg = (src1 == TMP_REG1) ? TMP_REG2 : TMP_REG1;
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(reg) | RJ(TMP_ZERO) | IMM_I12(src2)));
+ src2 = reg;
+ flags &= ~SRC2_IMM;
+ }
+
+ switch (GET_FLAG_TYPE(op)) {
+ case SLJIT_LESS:
+ FAIL_IF(push_inst(compiler, SLTU | RD(OTHER_FLAG) | RJ(src1) | RK(src2)));
+ break;
+ case SLJIT_GREATER:
+ FAIL_IF(push_inst(compiler, SLTU | RD(OTHER_FLAG) | RJ(src2) | RK(src1)));
+ break;
+ case SLJIT_SIG_LESS:
+ FAIL_IF(push_inst(compiler, SLT | RD(OTHER_FLAG) | RJ(src1) | RK(src2)));
+ break;
+ case SLJIT_SIG_GREATER:
+ FAIL_IF(push_inst(compiler, SLT | RD(OTHER_FLAG) | RJ(src2) | RK(src1)));
+ break;
+ }
+ }
+
+ if (is_handled) {
+ if (flags & SRC2_IMM) {
+ if (op & SLJIT_SET_Z)
+ FAIL_IF(push_inst(compiler, INST(ADDI, op) | RD(EQUAL_FLAG) | RJ(src1) | IMM_I12(-src2)));
+ if (!(flags & UNUSED_DEST))
+ return push_inst(compiler, INST(ADDI, op) | RD(dst) | RJ(src1) | IMM_I12(-src2));
+ } else {
+ if (op & SLJIT_SET_Z)
+ FAIL_IF(push_inst(compiler, INST(SUB, op) | RD(EQUAL_FLAG) | RJ(src1) | RK(src2)));
+ if (!(flags & UNUSED_DEST))
+ return push_inst(compiler, INST(SUB, op) | RD(dst) | RJ(src1) | RK(src2));
+ }
+ return SLJIT_SUCCESS;
+ }
+
+ is_overflow = GET_FLAG_TYPE(op) == SLJIT_OVERFLOW;
+ is_carry = GET_FLAG_TYPE(op) == SLJIT_CARRY;
+
+ if (flags & SRC2_IMM) {
+ if (is_overflow) {
+ if (src2 >= 0)
+ FAIL_IF(push_inst(compiler, INST(ADDI, op) | RD(EQUAL_FLAG) | RJ(src1) | IMM_I12(0)));
+ else {
+ FAIL_IF(push_inst(compiler, INST(ADDI, op) | RD(EQUAL_FLAG) | RJ(src1) | IMM_I12(-1)));
+ FAIL_IF(push_inst(compiler, XOR | RD(EQUAL_FLAG) | RJ(src1) | RK(EQUAL_FLAG)));
+ }
+ } else if (op & SLJIT_SET_Z)
+ FAIL_IF(push_inst(compiler, INST(ADDI, op) | RD(EQUAL_FLAG) | RJ(src1) | IMM_I12(-src2)));
+
+ if (is_overflow || is_carry)
+ FAIL_IF(push_inst(compiler, SLTUI | RD(OTHER_FLAG) | RJ(src1) | IMM_I12(src2)));
+
+ /* Only the zero flag is needed. */
+ if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
+ FAIL_IF(push_inst(compiler, INST(ADDI, op) | RD(dst) | RJ(src1) | IMM_I12(-src2)));
+ } else {
+ if (is_overflow)
+ FAIL_IF(push_inst(compiler, XOR | RD(EQUAL_FLAG) | RJ(src1) | RK(src2)));
+ else if (op & SLJIT_SET_Z)
+ FAIL_IF(push_inst(compiler, INST(SUB, op) | RD(EQUAL_FLAG) | RJ(src1) | RK(src2)));
+
+ if (is_overflow || is_carry)
+ FAIL_IF(push_inst(compiler, SLTU | RD(OTHER_FLAG) | RJ(src1) | RK(src2)));
+
+ /* Only the zero flag is needed. */
+ if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
+ FAIL_IF(push_inst(compiler, INST(SUB, op) | RD(dst) | RJ(src1) | RK(src2)));
+ }
+
+ if (!is_overflow)
+ return SLJIT_SUCCESS;
+
+ FAIL_IF(push_inst(compiler, XOR | RD(TMP_REG1) | RJ(dst) | RK(EQUAL_FLAG)));
+ if (op & SLJIT_SET_Z)
+ FAIL_IF(push_inst(compiler, INST(ADDI, op) | RD(EQUAL_FLAG) | RJ(dst) | IMM_I12(0)));
+ FAIL_IF(push_inst(compiler, INST(SRLI, op) | RD(TMP_REG1) | RJ(TMP_REG1) | IMM_EXTEND(31)));
+ return push_inst(compiler, XOR | RD(OTHER_FLAG) | RJ(TMP_REG1) | RK(OTHER_FLAG));
+
+ case SLJIT_SUBC:
+ if ((flags & SRC2_IMM) && src2 == I12_MIN) {
+ FAIL_IF(push_inst(compiler, INST(ADDI, op) | RD(TMP_REG2) | RJ(TMP_ZERO) | IMM_I12(src2)));
+ src2 = TMP_REG2;
+ flags &= ~SRC2_IMM;
+ }
+
+ is_carry = GET_FLAG_TYPE(op) == SLJIT_CARRY;
+
+ if (flags & SRC2_IMM) {
+ if (is_carry)
+ FAIL_IF(push_inst(compiler, SLTUI | RD(EQUAL_FLAG) | RJ(src1) | IMM_I12(src2)));
+
+ FAIL_IF(push_inst(compiler, INST(ADDI, op) | RD(dst) | RJ(src1) | IMM_I12(-src2)));
+ } else {
+ if (is_carry)
+ FAIL_IF(push_inst(compiler, SLTU | RD(EQUAL_FLAG) | RJ(src1) | RK(src2)));
+
+ FAIL_IF(push_inst(compiler, INST(SUB, op) | RD(dst) | RJ(src1) | RK(src2)));
+ }
+
+ if (is_carry)
+ FAIL_IF(push_inst(compiler, SLTU | RD(TMP_REG1) | RJ(dst) | RK(OTHER_FLAG)));
+
+ FAIL_IF(push_inst(compiler, INST(SUB, op) | RD(dst) | RJ(dst) | RK(OTHER_FLAG)));
+
+ if (!is_carry)
+ return SLJIT_SUCCESS;
+
+ return push_inst(compiler, OR | RD(OTHER_FLAG) | RJ(EQUAL_FLAG) | RK(TMP_REG1));
+
+ case SLJIT_MUL:
+ SLJIT_ASSERT(!(flags & SRC2_IMM));
+
+ if (GET_FLAG_TYPE(op) != SLJIT_OVERFLOW)
+ return push_inst(compiler, INST(MUL, op) | RD(dst) | RJ(src1) | RK(src2));
+
+ if (op & SLJIT_32) {
+ FAIL_IF(push_inst(compiler, MUL_D | RD(OTHER_FLAG) | RJ(src1) | RK(src2)));
+ FAIL_IF(push_inst(compiler, MUL_W | RD(dst) | RJ(src1) | RK(src2)));
+ return push_inst(compiler, SUB_D | RD(OTHER_FLAG) | RJ(dst) | RK(OTHER_FLAG));
+ }
+
+ FAIL_IF(push_inst(compiler, MULH_D | RD(EQUAL_FLAG) | RJ(src1) | RK(src2)));
+ FAIL_IF(push_inst(compiler, MUL_D | RD(dst) | RJ(src1) | RK(src2)));
+ FAIL_IF(push_inst(compiler, SRAI_D | RD(OTHER_FLAG) | RJ(dst) | IMM_I12((63))));
+ return push_inst(compiler, SUB_D | RD(OTHER_FLAG) | RJ(EQUAL_FLAG) | RK(OTHER_FLAG));
+
+ case SLJIT_AND:
+ EMIT_LOGICAL(ANDI, AND);
+ return SLJIT_SUCCESS;
+
+ case SLJIT_OR:
+ EMIT_LOGICAL(ORI, OR);
+ return SLJIT_SUCCESS;
+
+ case SLJIT_XOR:
+ EMIT_LOGICAL(XORI, XOR);
+ return SLJIT_SUCCESS;
+
+ case SLJIT_SHL:
+ case SLJIT_MSHL:
+ if (op & SLJIT_32) {
+ EMIT_SHIFT(SLLI_W, SLL_W);
+ } else {
+ EMIT_SHIFT(SLLI_D, SLL_D);
+ }
+ break;
+
+ case SLJIT_LSHR:
+ case SLJIT_MLSHR:
+ if (op & SLJIT_32) {
+ EMIT_SHIFT(SRLI_W, SRL_W);
+ } else {
+ EMIT_SHIFT(SRLI_D, SRL_D);
+ }
+ break;
+
+ case SLJIT_ASHR:
+ case SLJIT_MASHR:
+ if (op & SLJIT_32) {
+ EMIT_SHIFT(SRAI_W, SRA_W);
+ } else {
+ EMIT_SHIFT(SRAI_D, SRA_D);
+ }
+ break;
+
+ case SLJIT_ROTL:
+ case SLJIT_ROTR:
+ if (flags & SRC2_IMM) {
+ SLJIT_ASSERT(src2 != 0);
+
+ if (GET_OPCODE(op) == SLJIT_ROTL)
+ src2 = word_size - src2;
+ return push_inst(compiler, INST(ROTRI, op) | RD(dst) | RJ(src1) | IMM_I12(src2));
+ }
+
+ if (src2 == TMP_ZERO) {
+ if (dst != src1)
+ return push_inst(compiler, INST(ADDI, op) | RD(dst) | RJ(src1) | IMM_I12(0));
+ return SLJIT_SUCCESS;
+ }
+
+ if (GET_OPCODE(op) == SLJIT_ROTL) {
+ FAIL_IF(push_inst(compiler, INST(SUB, op)| RD(OTHER_FLAG) | RJ(TMP_ZERO) | RK(src2)));
+ src2 = OTHER_FLAG;
+ }
+ return push_inst(compiler, INST(ROTR, op) | RD(dst) | RJ(src1) | RK(src2));
+
+ default:
+ SLJIT_UNREACHABLE();
+ return SLJIT_SUCCESS;
+ }
+
+ if (flags & SRC2_IMM) {
+ if (op & SLJIT_SET_Z)
+ FAIL_IF(push_inst(compiler, op_imm | RD(EQUAL_FLAG) | RJ(src1) | IMM_I12(src2)));
+
+ if (flags & UNUSED_DEST)
+ return SLJIT_SUCCESS;
+ return push_inst(compiler, op_imm | RD(dst) | RJ(src1) | IMM_I12(src2));
+ }
+
+ if (op & SLJIT_SET_Z)
+ FAIL_IF(push_inst(compiler, op_reg | RD(EQUAL_FLAG) | RJ(src1) | RK(src2)));
+
+ if (flags & UNUSED_DEST)
+ return SLJIT_SUCCESS;
+ return push_inst(compiler, op_reg | RD(dst) | RJ(src1) | RK(src2));
+}
+
+#undef IMM_EXTEND
+
+static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
+{
+ /* arg1 goes to TMP_REG1 or src reg
+ arg2 goes to TMP_REG2, imm or src reg
+ TMP_REG3 can be used for caching
+ result goes to TMP_REG2, so put result can use TMP_REG1 and TMP_REG3. */
+ sljit_s32 dst_r = TMP_REG2;
+ sljit_s32 src1_r;
+ sljit_sw src2_r = 0;
+ sljit_s32 src2_tmp_reg = (GET_OPCODE(op) >= SLJIT_OP2_BASE && FAST_IS_REG(src1)) ? TMP_REG1 : TMP_REG2;
+
+ if (!(flags & ALT_KEEP_CACHE)) {
+ compiler->cache_arg = 0;
+ compiler->cache_argw = 0;
+ }
+
+ if (dst == 0) {
+ SLJIT_ASSERT(HAS_FLAGS(op));
+ flags |= UNUSED_DEST;
+ dst = TMP_REG2;
+ } else if (FAST_IS_REG(dst)) {
+ dst_r = dst;
+ flags |= REG_DEST;
+ if (flags & MOVE_OP)
+ src2_tmp_reg = dst_r;
+ } else if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, flags | ARG_TEST, TMP_REG1, dst, dstw))
+ flags |= SLOW_DEST;
+
+ if (flags & IMM_OP) {
+ if (src2 == SLJIT_IMM && src2w != 0 && src2w <= I12_MAX && src2w >= I12_MIN) {
+ flags |= SRC2_IMM;
+ src2_r = src2w;
+ } else if ((flags & CUMULATIVE_OP) && src1 == SLJIT_IMM && src1w != 0 && src1w <= I12_MAX && src1w >= I12_MIN) {
+ flags |= SRC2_IMM;
+ src2_r = src1w;
+
+ /* And swap arguments. */
+ src1 = src2;
+ src1w = src2w;
+ src2 = SLJIT_IMM;
+ /* src2w = src2_r unneeded. */
+ }
+ }
+
+ /* Source 1. */
+ if (FAST_IS_REG(src1)) {
+ src1_r = src1;
+ flags |= REG1_SOURCE;
+ } else if (src1 == SLJIT_IMM) {
+ if (src1w) {
+ FAIL_IF(load_immediate(compiler, TMP_REG1, src1w));
+ src1_r = TMP_REG1;
+ }
+ else
+ src1_r = TMP_ZERO;
+ } else {
+ if (getput_arg_fast(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w))
+ FAIL_IF(compiler->error);
+ else
+ flags |= SLOW_SRC1;
+ src1_r = TMP_REG1;
+ }
+
+ /* Source 2. */
+ if (FAST_IS_REG(src2)) {
+ src2_r = src2;
+ flags |= REG2_SOURCE;
+ if ((flags & (REG_DEST | MOVE_OP)) == MOVE_OP)
+ dst_r = (sljit_s32)src2_r;
+ } else if (src2 == SLJIT_IMM) {
+ if (!(flags & SRC2_IMM)) {
+ if (src2w) {
+ FAIL_IF(load_immediate(compiler, src2_tmp_reg, src2w));
+ src2_r = src2_tmp_reg;
+ } else {
+ src2_r = TMP_ZERO;
+ if (flags & MOVE_OP) {
+ if (dst & SLJIT_MEM)
+ dst_r = 0;
+ else
+ op = SLJIT_MOV;
+ }
+ }
+ }
+ } else {
+ if (getput_arg_fast(compiler, flags | LOAD_DATA, src2_tmp_reg, src2, src2w))
+ FAIL_IF(compiler->error);
+ else
+ flags |= SLOW_SRC2;
+
+ src2_r = src2_tmp_reg;
+ }
+
+ if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) {
+ SLJIT_ASSERT(src2_r == TMP_REG2);
+ if ((flags & SLOW_DEST) && !can_cache(src2, src2w, src1, src1w) && can_cache(src2, src2w, dst, dstw)) {
+ FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, src2, src2w));
+ FAIL_IF(getput_arg(compiler, flags | LOAD_DATA | MEM_USE_TMP2, TMP_REG2, src2, src2w, dst, dstw));
+ } else {
+ FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG2, src2, src2w, src1, src1w));
+ FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, dst, dstw));
+ }
+ }
+ else if (flags & SLOW_SRC1)
+ FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, dst, dstw));
+ else if (flags & SLOW_SRC2)
+ FAIL_IF(getput_arg(compiler, flags | LOAD_DATA | ((src1_r == TMP_REG1) ? MEM_USE_TMP2 : 0), src2_tmp_reg, src2, src2w, dst, dstw));
+
+ FAIL_IF(emit_single_op(compiler, op, flags, dst_r, src1_r, src2_r));
+
+ if (dst & SLJIT_MEM) {
+ if (!(flags & SLOW_DEST)) {
+ getput_arg_fast(compiler, flags, dst_r, dst, dstw);
+ return compiler->error;
+ }
+ return getput_arg(compiler, flags, dst_r, dst, dstw, 0, 0);
+ }
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
+{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op0(compiler, op));
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_BREAKPOINT:
+ return push_inst(compiler, BREAK);
+ case SLJIT_NOP:
+ return push_inst(compiler, ANDI | RD(TMP_ZERO) | RJ(TMP_ZERO) | IMM_I12(0));
+ case SLJIT_LMUL_UW:
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(TMP_REG1) | RJ(SLJIT_R1) | IMM_I12(0)));
+ FAIL_IF(push_inst(compiler, MULH_DU | RD(SLJIT_R1) | RJ(SLJIT_R0) | RK(SLJIT_R1)));
+ return push_inst(compiler, MUL_D | RD(SLJIT_R0) | RJ(SLJIT_R0) | RK(TMP_REG1));
+ case SLJIT_LMUL_SW:
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(TMP_REG1) | RJ(SLJIT_R1) | IMM_I12(0)));
+ FAIL_IF(push_inst(compiler, MULH_D | RD(SLJIT_R1) | RJ(SLJIT_R0) | RK(SLJIT_R1)));
+ return push_inst(compiler, MUL_D | RD(SLJIT_R0) | RJ(SLJIT_R0) | RK(TMP_REG1));
+ case SLJIT_DIVMOD_UW:
+ FAIL_IF(push_inst(compiler, INST(ADDI, op) | RD(TMP_REG1) | RJ(SLJIT_R0) | IMM_I12(0)));
+ FAIL_IF(push_inst(compiler, ((op & SLJIT_32)? DIV_WU: DIV_DU) | RD(SLJIT_R0) | RJ(SLJIT_R0) | RK(SLJIT_R1)));
+ return push_inst(compiler, ((op & SLJIT_32)? MOD_WU: MOD_DU) | RD(SLJIT_R1) | RJ(TMP_REG1) | RK(SLJIT_R1));
+ case SLJIT_DIVMOD_SW:
+ FAIL_IF(push_inst(compiler, INST(ADDI, op) | RD(TMP_REG1) | RJ(SLJIT_R0) | IMM_I12(0)));
+ FAIL_IF(push_inst(compiler, INST(DIV, op) | RD(SLJIT_R0) | RJ(SLJIT_R0) | RK(SLJIT_R1)));
+ return push_inst(compiler, INST(MOD, op) | RD(SLJIT_R1) | RJ(TMP_REG1) | RK(SLJIT_R1));
+ case SLJIT_DIV_UW:
+ return push_inst(compiler, ((op & SLJIT_32)? DIV_WU: DIV_DU) | RD(SLJIT_R0) | RJ(SLJIT_R0) | RK(SLJIT_R1));
+ case SLJIT_DIV_SW:
+ return push_inst(compiler, INST(DIV, op) | RD(SLJIT_R0) | RJ(SLJIT_R0) | RK(SLJIT_R1));
+ case SLJIT_ENDBR:
+ case SLJIT_SKIP_FRAMES_BEFORE_RETURN:
+ return SLJIT_SUCCESS;
+ }
+
+ SLJIT_UNREACHABLE();
+ return SLJIT_ERR_UNSUPPORTED;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 flags = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+ ADJUST_LOCAL_OFFSET(src, srcw);
+
+ if (op & SLJIT_32)
+ flags = INT_DATA | SIGNED_DATA;
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MOV:
+ case SLJIT_MOV_P:
+ return emit_op(compiler, SLJIT_MOV, WORD_DATA | MOVE_OP, dst, dstw, TMP_ZERO, 0, src, srcw);
+
+ case SLJIT_MOV_U32:
+ return emit_op(compiler, SLJIT_MOV_U32, INT_DATA | MOVE_OP, dst, dstw, TMP_ZERO, 0, src, (src == SLJIT_IMM) ? (sljit_u32)srcw : srcw);
+
+ case SLJIT_MOV_S32:
+ /* Logical operators have no W variant, so sign extended input is necessary for them. */
+ case SLJIT_MOV32:
+ return emit_op(compiler, SLJIT_MOV_S32, INT_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_ZERO, 0, src, (src == SLJIT_IMM) ? (sljit_s32)srcw : srcw);
+
+ case SLJIT_MOV_U8:
+ return emit_op(compiler, op, BYTE_DATA | MOVE_OP, dst, dstw, TMP_ZERO, 0, src, (src == SLJIT_IMM) ? (sljit_u8)srcw : srcw);
+
+ case SLJIT_MOV_S8:
+ return emit_op(compiler, op, BYTE_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_ZERO, 0, src, (src == SLJIT_IMM) ? (sljit_s8)srcw : srcw);
+
+ case SLJIT_MOV_U16:
+ return emit_op(compiler, op, HALF_DATA | MOVE_OP, dst, dstw, TMP_ZERO, 0, src, (src == SLJIT_IMM) ? (sljit_u16)srcw : srcw);
+
+ case SLJIT_MOV_S16:
+ return emit_op(compiler, op, HALF_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_ZERO, 0, src, (src == SLJIT_IMM) ? (sljit_s16)srcw : srcw);
+
+ case SLJIT_CLZ:
+ case SLJIT_CTZ:
+ case SLJIT_REV:
+ return emit_op(compiler, op, flags, dst, dstw, TMP_ZERO, 0, src, srcw);
+
+ case SLJIT_REV_U16:
+ case SLJIT_REV_S16:
+ return emit_op(compiler, op, HALF_DATA, dst, dstw, TMP_ZERO, 0, src, srcw);
+
+ case SLJIT_REV_U32:
+ case SLJIT_REV_S32:
+ return emit_op(compiler, op | SLJIT_32, INT_DATA, dst, dstw, TMP_ZERO, 0, src, srcw);
+ }
+
+ SLJIT_UNREACHABLE();
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
+{
+ sljit_s32 flags = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op2(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w));
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+ ADJUST_LOCAL_OFFSET(src2, src2w);
+
+ if (op & SLJIT_32) {
+ flags |= INT_DATA | SIGNED_DATA;
+ if (src1 == SLJIT_IMM)
+ src1w = (sljit_s32)src1w;
+ if (src2 == SLJIT_IMM)
+ src2w = (sljit_s32)src2w;
+ }
+
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_ADD:
+ case SLJIT_ADDC:
+ compiler->status_flags_state = SLJIT_CURRENT_FLAGS_ADD;
+ return emit_op(compiler, op, flags | CUMULATIVE_OP | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
+
+ case SLJIT_SUB:
+ case SLJIT_SUBC:
+ compiler->status_flags_state = SLJIT_CURRENT_FLAGS_SUB;
+ return emit_op(compiler, op, flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
+
+ case SLJIT_MUL:
+ compiler->status_flags_state = 0;
+ return emit_op(compiler, op, flags | CUMULATIVE_OP, dst, dstw, src1, src1w, src2, src2w);
+
+ case SLJIT_AND:
+ case SLJIT_OR:
+ case SLJIT_XOR:
+ return emit_op(compiler, op, flags | CUMULATIVE_OP | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
+
+ case SLJIT_SHL:
+ case SLJIT_MSHL:
+ case SLJIT_LSHR:
+ case SLJIT_MLSHR:
+ case SLJIT_ASHR:
+ case SLJIT_MASHR:
+ case SLJIT_ROTL:
+ case SLJIT_ROTR:
+ if (src2 == SLJIT_IMM) {
+ if (op & SLJIT_32)
+ src2w &= 0x1f;
+ else
+ src2w &= 0x3f;
+ }
+
+ return emit_op(compiler, op, flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
+ }
+
+ SLJIT_UNREACHABLE();
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
+{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
+
+ SLJIT_SKIP_CHECKS(compiler);
+ return sljit_emit_op2(compiler, op, 0, 0, src1, src1w, src2, src2w);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
+{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op2r(compiler, op, dst_reg, src1, src1w, src2, src2w));
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MULADD:
+ SLJIT_SKIP_CHECKS(compiler);
+ FAIL_IF(sljit_emit_op2(compiler, SLJIT_MUL | (op & SLJIT_32), TMP_REG2, 0, src1, src1w, src2, src2w));
+ return push_inst(compiler, ADD_D | RD(dst_reg) | RJ(dst_reg) | RK(TMP_REG2));
+ }
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 src1_reg,
+ sljit_s32 src2_reg,
+ sljit_s32 src3, sljit_sw src3w)
+{
+ sljit_s32 is_left;
+ sljit_ins ins1, ins2, ins3;
+ sljit_s32 inp_flags = ((op & SLJIT_32) ? INT_DATA : WORD_DATA) | LOAD_DATA;
+ sljit_sw bit_length = (op & SLJIT_32) ? 32 : 64;
+
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_shift_into(compiler, op, dst_reg, src1_reg, src2_reg, src3, src3w));
+
+ is_left = (GET_OPCODE(op) == SLJIT_SHL || GET_OPCODE(op) == SLJIT_MSHL);
+
+ if (src1_reg == src2_reg) {
+ SLJIT_SKIP_CHECKS(compiler);
+ return sljit_emit_op2(compiler, (is_left ? SLJIT_ROTL : SLJIT_ROTR) | (op & SLJIT_32), dst_reg, 0, src1_reg, 0, src3, src3w);
+ }
+
+ ADJUST_LOCAL_OFFSET(src3, src3w);
+
+ if (src3 == SLJIT_IMM) {
+ src3w &= bit_length - 1;
+
+ if (src3w == 0)
+ return SLJIT_SUCCESS;
+
+ if (is_left) {
+ ins1 = INST(SLLI, op) | IMM_I12(src3w);
+ src3w = bit_length - src3w;
+ ins2 = INST(SRLI, op) | IMM_I12(src3w);
+ } else {
+ ins1 = INST(SRLI, op) | IMM_I12(src3w);
+ src3w = bit_length - src3w;
+ ins2 = INST(SLLI, op) | IMM_I12(src3w);
+ }
+
+ FAIL_IF(push_inst(compiler, ins1 | RD(dst_reg) | RJ(src1_reg)));
+ FAIL_IF(push_inst(compiler, ins2 | RD(TMP_REG1) | RJ(src2_reg)));
+ return push_inst(compiler, OR | RD(dst_reg) | RJ(dst_reg) | RK(TMP_REG1));
+ }
+
+ if (src3 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, inp_flags, TMP_REG2, src3, src3w));
+ src3 = TMP_REG2;
+ } else if (dst_reg == src3) {
+ push_inst(compiler, INST(ADDI, op) | RD(TMP_REG2) | RJ(src3) | IMM_I12(0));
+ src3 = TMP_REG2;
+ }
+
+ if (is_left) {
+ ins1 = INST(SLL, op);
+ ins2 = INST(SRLI, op);
+ ins3 = INST(SRL, op);
+ } else {
+ ins1 = INST(SRL, op);
+ ins2 = INST(SLLI, op);
+ ins3 = INST(SLL, op);
+ }
+
+ FAIL_IF(push_inst(compiler, ins1 | RD(dst_reg) | RJ(src1_reg) | RK(src3)));
+
+ if (!(op & SLJIT_SHIFT_INTO_NON_ZERO)) {
+ FAIL_IF(push_inst(compiler, ins2 | RD(TMP_REG1) | RJ(src2_reg) | IMM_I12(1)));
+ FAIL_IF(push_inst(compiler, XORI | RD(TMP_REG2) | RJ(src3) | IMM_I12((sljit_ins)bit_length - 1)));
+ src2_reg = TMP_REG1;
+ } else
+ FAIL_IF(push_inst(compiler, INST(SUB, op) | RD(TMP_REG2) | RJ(TMP_ZERO) | RK(src3)));
+
+ FAIL_IF(push_inst(compiler, ins3 | RD(TMP_REG1) | RJ(src2_reg) | RK(TMP_REG2)));
+ return push_inst(compiler, OR | RD(dst_reg) | RJ(dst_reg) | RK(TMP_REG1));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 base = src & REG_MASK;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op_src(compiler, op, src, srcw));
+ ADJUST_LOCAL_OFFSET(src, srcw);
+
+ switch (op) {
+ case SLJIT_FAST_RETURN:
+ if (FAST_IS_REG(src))
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(RETURN_ADDR_REG) | RJ(src) | IMM_I12(0)));
+ else
+ FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, RETURN_ADDR_REG, src, srcw));
+
+ return push_inst(compiler, JIRL | RD(TMP_ZERO) | RJ(RETURN_ADDR_REG) | IMM_I12(0));
+ case SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN:
+ return SLJIT_SUCCESS;
+ case SLJIT_PREFETCH_L1:
+ case SLJIT_PREFETCH_L2:
+ case SLJIT_PREFETCH_L3:
+ case SLJIT_PREFETCH_ONCE:
+ if (SLJIT_UNLIKELY(src & OFFS_REG_MASK)) {
+ srcw &= 0x3;
+ if (SLJIT_UNLIKELY(srcw))
+ FAIL_IF(push_inst(compiler, SLLI_D | RD(TMP_REG1) | RJ(OFFS_REG(src)) | IMM_I12(srcw)));
+ FAIL_IF(push_inst(compiler, ADD_D | RD(TMP_REG1) | RJ(base) | RK(TMP_REG1)));
+ } else {
+ if (base && srcw <= I12_MAX && srcw >= I12_MIN)
+ return push_inst(compiler,PRELD | RJ(base) | IMM_I12(srcw));
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
+ if (base != 0)
+ FAIL_IF(push_inst(compiler, ADD_D | RD(TMP_REG1) | RJ(base) | RK(TMP_REG1)));
+ }
+ return push_inst(compiler, PRELD | RD(0) | RJ(TMP_REG1));
+ }
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_dst(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw)
+{
+ sljit_s32 dst_r;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op_dst(compiler, op, dst, dstw));
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ switch (op) {
+ case SLJIT_FAST_ENTER:
+ if (FAST_IS_REG(dst))
+ return push_inst(compiler, ADDI_D | RD(dst) | RJ(RETURN_ADDR_REG) | IMM_I12(0));
+
+ SLJIT_ASSERT(RETURN_ADDR_REG == TMP_REG2);
+ break;
+ case SLJIT_GET_RETURN_ADDRESS:
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
+ FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, dst_r, SLJIT_MEM1(SLJIT_SP), compiler->local_size - SSIZE_OF(sw)));
+ break;
+ }
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw);
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 type, sljit_s32 reg)
+{
+ CHECK_REG_INDEX(check_sljit_get_register_index(type, reg));
+
+ if (type == SLJIT_GP_REGISTER)
+ return reg_map[reg];
+
+ if (type != SLJIT_FLOAT_REGISTER && type != SLJIT_SIMD_REG_128 && type != SLJIT_SIMD_REG_256)
+ return -1;
+
+ return freg_map[reg];
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
+ void *instruction, sljit_u32 size)
+{
+ SLJIT_UNUSED_ARG(size);
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
+
+ return push_inst(compiler, *(sljit_ins*)instruction);
+}
+
+/* --------------------------------------------------------------------- */
+/* Floating point operators */
+/* --------------------------------------------------------------------- */
+#define SET_COND(cond) (sljit_ins)(cond << 15)
+
+#define COND_CUN SET_COND(0x8) /* UN */
+#define COND_CEQ SET_COND(0x4) /* EQ */
+#define COND_CUEQ SET_COND(0xc) /* UN EQ */
+#define COND_CLT SET_COND(0x2) /* LT */
+#define COND_CULT SET_COND(0xa) /* UN LT */
+#define COND_CLE SET_COND(0x6) /* LT EQ */
+#define COND_CULE SET_COND(0xe) /* UN LT EQ */
+#define COND_CNE SET_COND(0x10) /* GT LT */
+#define COND_CUNE SET_COND(0x18) /* UN GT LT */
+#define COND_COR SET_COND(0x14) /* GT LT EQ */
+
+#define FINST(inst, type) (sljit_ins)((type & SLJIT_32) ? inst##_S : inst##_D)
+#define FCD(cd) (sljit_ins)(cd & 0x7)
+#define FCJ(cj) (sljit_ins)((cj & 0x7) << 5)
+#define FCA(ca) (sljit_ins)((ca & 0x7) << 15)
+#define F_OTHER_FLAG 1
+
+#define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_32) >> 7))
+
+/* convert to inter exact toward zero */
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_ins inst;
+ sljit_u32 word_data = 0;
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
+
+ switch (GET_OPCODE(op))
+ {
+ case SLJIT_CONV_SW_FROM_F64:
+ word_data = 1;
+ inst = FINST(FTINTRZ_L, op);
+ break;
+ case SLJIT_CONV_S32_FROM_F64:
+ inst = FINST(FTINTRZ_W, op);
+ break;
+ default:
+ inst = BREAK;
+ SLJIT_UNREACHABLE();
+ }
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src, srcw, dst, dstw));
+ src = TMP_FREG1;
+ }
+
+ FAIL_IF(push_inst(compiler, inst | FRD(TMP_FREG1) | FRJ(src)));
+ FAIL_IF(push_inst(compiler, FINST(MOVFR2GR, word_data) | RD(dst_r) | FRJ(TMP_FREG1)));
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem2(compiler, word_data ? WORD_DATA : INT_DATA, TMP_REG2, dst, dstw, 0, 0);
+ return SLJIT_SUCCESS;
+}
+
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_w(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_ins inst;
+ sljit_u32 word_data = 0;
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
+
+ switch (GET_OPCODE(op))
+ {
+ case SLJIT_CONV_F64_FROM_SW:
+ word_data = 1;
+ inst = (sljit_ins)((op & SLJIT_32) ? FFINT_S_L : FFINT_D_L);
+ break;
+ case SLJIT_CONV_F64_FROM_S32:
+ inst = (sljit_ins)((op & SLJIT_32) ? FFINT_S_W : FFINT_D_W);
+ break;
+ default:
+ inst = BREAK;
+ SLJIT_UNREACHABLE();
+ }
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem2(compiler, (word_data ? WORD_DATA : INT_DATA) | LOAD_DATA, TMP_REG1, src, srcw, dst, dstw));
+ src = TMP_REG1;
+ } else if (src == SLJIT_IMM) {
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
+ srcw = (sljit_s32)srcw;
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
+ src = TMP_REG1;
+ }
+ FAIL_IF(push_inst(compiler, (word_data ? MOVGR2FR_D : MOVGR2FR_W) | FRD(dst_r) | RJ(src)));
+ FAIL_IF(push_inst(compiler, inst | FRD(dst_r) | FRJ(dst_r)));
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, 0, 0);
+ return SLJIT_SUCCESS;
+}
+
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ return sljit_emit_fop1_conv_f64_from_w(compiler, op, dst, dstw, src, srcw);
+}
+
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_uw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_ins inst;
+ sljit_u32 word_data = 0;
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
+
+ switch (GET_OPCODE(op))
+ {
+ case SLJIT_CONV_F64_FROM_UW:
+ word_data = 1;
+ inst = (sljit_ins)((op & SLJIT_32) ? FFINT_S_L : FFINT_D_L);
+ break;
+ case SLJIT_CONV_F64_FROM_U32:
+ inst = (sljit_ins)((op & SLJIT_32) ? FFINT_S_W : FFINT_D_W);
+ break;
+ default:
+ inst = BREAK;
+ SLJIT_UNREACHABLE();
+ }
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem2(compiler, (word_data ? WORD_DATA : INT_DATA) | LOAD_DATA, TMP_REG1, src, srcw, dst, dstw));
+ src = TMP_REG1;
+ } else if (src == SLJIT_IMM) {
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_U32)
+ srcw = (sljit_u32)srcw;
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
+ src = TMP_REG1;
+ }
+
+ if (!word_data)
+ FAIL_IF(push_inst(compiler, SRLI_W | RD(src) | RJ(src) | IMM_I12(0)));
+
+ FAIL_IF(push_inst(compiler, BLT | RJ(src) | RD(TMP_ZERO) | IMM_I16(4)));
+
+ FAIL_IF(push_inst(compiler, (word_data ? MOVGR2FR_D : MOVGR2FR_W) | FRD(dst_r) | RJ(src)));
+ FAIL_IF(push_inst(compiler, inst | FRD(dst_r) | FRJ(dst_r)));
+ FAIL_IF(push_inst(compiler, B | IMM_I26(7)));
+
+ FAIL_IF(push_inst(compiler, ANDI | RD(TMP_REG2) | RJ(src) | IMM_I12(1)));
+ FAIL_IF(push_inst(compiler, (word_data ? SRLI_D : SRLI_W) | RD(TMP_REG1) | RJ(src) | IMM_I12(1)));
+ FAIL_IF(push_inst(compiler, OR | RD(TMP_REG1) | RJ(TMP_REG1) | RK(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, INST(MOVGR2FR, (!word_data)) | FRD(dst_r) | RJ(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, inst | FRD(dst_r) | FRJ(dst_r)));
+ FAIL_IF(push_inst(compiler, FINST(FADD, op) | FRD(dst_r) | FRJ(dst_r) | FRK(dst_r)));
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, 0, 0);
+ return SLJIT_SUCCESS;
+}
+
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
+{
+ if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w));
+ src1 = TMP_FREG1;
+ }
+
+ if (src2 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, 0, 0));
+ src2 = TMP_FREG2;
+ }
+
+ FAIL_IF(push_inst(compiler, XOR | RD(OTHER_FLAG) | RJ(OTHER_FLAG) | RK(OTHER_FLAG)));
+
+ switch (GET_FLAG_TYPE(op)) {
+ case SLJIT_F_EQUAL:
+ case SLJIT_ORDERED_EQUAL:
+ FAIL_IF(push_inst(compiler, FINST(FCMP_COND, op) | COND_CEQ | FCD(F_OTHER_FLAG) | FRJ(src1) | FRK(src2)));
+ break;
+ case SLJIT_F_LESS:
+ case SLJIT_ORDERED_LESS:
+ FAIL_IF(push_inst(compiler, FINST(FCMP_COND, op) | COND_CLT | FCD(F_OTHER_FLAG) | FRJ(src1) | FRK(src2)));
+ break;
+ case SLJIT_F_GREATER:
+ case SLJIT_ORDERED_GREATER:
+ FAIL_IF(push_inst(compiler, FINST(FCMP_COND, op) | COND_CLT | FCD(F_OTHER_FLAG) | FRJ(src2) | FRK(src1)));
+ break;
+ case SLJIT_UNORDERED_OR_GREATER:
+ FAIL_IF(push_inst(compiler, FINST(FCMP_COND, op) | COND_CULT | FCD(F_OTHER_FLAG) | FRJ(src2) | FRK(src1)));
+ break;
+ case SLJIT_UNORDERED_OR_LESS:
+ FAIL_IF(push_inst(compiler, FINST(FCMP_COND, op) | COND_CULT | FCD(F_OTHER_FLAG) | FRJ(src1) | FRK(src2)));
+ break;
+ case SLJIT_UNORDERED_OR_EQUAL:
+ FAIL_IF(push_inst(compiler, FINST(FCMP_COND, op) | COND_CUEQ | FCD(F_OTHER_FLAG) | FRJ(src1) | FRK(src2)));
+ break;
+ default: /* SLJIT_UNORDERED */
+ FAIL_IF(push_inst(compiler, FINST(FCMP_COND, op) | COND_CUN | FCD(F_OTHER_FLAG) | FRJ(src1) | FRK(src2)));
+ }
+ return push_inst(compiler, MOVCF2GR | RD(OTHER_FLAG) | FCJ(F_OTHER_FLAG));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 dst_r;
+
+ CHECK_ERROR();
+ compiler->cache_arg = 0;
+ compiler->cache_argw = 0;
+
+ SLJIT_COMPILE_ASSERT((SLJIT_32 == 0x100) && !(DOUBLE_DATA & 0x2), float_transfer_bit_error);
+ SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
+
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32)
+ op ^= SLJIT_32;
+
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, dst_r, src, srcw, dst, dstw));
+ src = dst_r;
+ }
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MOV_F64:
+ if (src != dst_r) {
+ if (!(dst & SLJIT_MEM))
+ FAIL_IF(push_inst(compiler, FINST(FMOV, op) | FRD(dst_r) | FRJ(src)));
+ else
+ dst_r = src;
+ }
+ break;
+ case SLJIT_NEG_F64:
+ FAIL_IF(push_inst(compiler, FINST(FNEG, op) | FRD(dst_r) | FRJ(src)));
+ break;
+ case SLJIT_ABS_F64:
+ FAIL_IF(push_inst(compiler, FINST(FABS, op) | FRD(dst_r) | FRJ(src)));
+ break;
+ case SLJIT_CONV_F64_FROM_F32:
+ /* The SLJIT_32 bit is inverted because sljit_f32 needs to be loaded from the memory. */
+ FAIL_IF(push_inst(compiler, ((op & SLJIT_32) ? FCVT_D_S : FCVT_S_D) | FRD(dst_r) | FRJ(src)));
+ op ^= SLJIT_32;
+ break;
+ }
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem2(compiler, FLOAT_DATA(op), dst_r, dst, dstw, 0, 0);
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
+{
+ sljit_s32 dst_r, flags = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+ ADJUST_LOCAL_OFFSET(src2, src2w);
+
+ compiler->cache_arg = 0;
+ compiler->cache_argw = 0;
+
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG2;
+
+ if (src1 & SLJIT_MEM) {
+ if (getput_arg_fast(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w)) {
+ FAIL_IF(compiler->error);
+ src1 = TMP_FREG1;
+ } else
+ flags |= SLOW_SRC1;
+ }
+
+ if (src2 & SLJIT_MEM) {
+ if (getput_arg_fast(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w)) {
+ FAIL_IF(compiler->error);
+ src2 = TMP_FREG2;
+ } else
+ flags |= SLOW_SRC2;
+ }
+
+ if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) {
+ if ((dst & SLJIT_MEM) && !can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
+ FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, src1, src1w));
+ FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, dst, dstw));
+ } else {
+ FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w));
+ FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, dst, dstw));
+ }
+ }
+ else if (flags & SLOW_SRC1)
+ FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, dst, dstw));
+ else if (flags & SLOW_SRC2)
+ FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, dst, dstw));
+
+ if (flags & SLOW_SRC1)
+ src1 = TMP_FREG1;
+ if (flags & SLOW_SRC2)
+ src2 = TMP_FREG2;
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_ADD_F64:
+ FAIL_IF(push_inst(compiler, FINST(FADD, op) | FRD(dst_r) | FRJ(src1) | FRK(src2)));
+ break;
+ case SLJIT_SUB_F64:
+ FAIL_IF(push_inst(compiler, FINST(FSUB, op) | FRD(dst_r) | FRJ(src1) | FRK(src2)));
+ break;
+ case SLJIT_MUL_F64:
+ FAIL_IF(push_inst(compiler, FINST(FMUL, op) | FRD(dst_r) | FRJ(src1) | FRK(src2)));
+ break;
+ case SLJIT_DIV_F64:
+ FAIL_IF(push_inst(compiler, FINST(FDIV, op) | FRD(dst_r) | FRJ(src1) | FRK(src2)));
+ break;
+ }
+
+ if (dst_r != dst)
+ FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG2, dst, dstw, 0, 0));
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
+{
+ sljit_s32 reg;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fop2r(compiler, op, dst_freg, src1, src1w, src2, src2w));
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+ ADJUST_LOCAL_OFFSET(src2, src2w);
+
+ if (src2 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src2, src2w, 0, 0));
+ src2 = TMP_FREG1;
+ }
+
+ if (src1 & SLJIT_MEM) {
+ reg = (dst_freg == src2) ? TMP_FREG1 : dst_freg;
+ FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, reg, src1, src1w, 0, 0));
+ src1 = reg;
+ }
+
+ return push_inst(compiler, FINST(FCOPYSIGN, op) | FRD(dst_freg) | FRJ(src1) | FRK(src2));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset32(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f32 value)
+{
+ union {
+ sljit_s32 imm;
+ sljit_f32 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset32(compiler, freg, value));
+
+ u.value = value;
+
+ if (u.imm == 0)
+ return push_inst(compiler, MOVGR2FR_W | RJ(TMP_ZERO) | FRD(freg));
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, u.imm));
+ return push_inst(compiler, MOVGR2FR_W | RJ(TMP_REG1) | FRD(freg));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f64 value)
+{
+ union {
+ sljit_sw imm;
+ sljit_f64 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset64(compiler, freg, value));
+
+ u.value = value;
+
+ if (u.imm == 0)
+ return push_inst(compiler, MOVGR2FR_D | RJ(TMP_ZERO) | FRD(freg));
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, u.imm));
+ return push_inst(compiler, MOVGR2FR_D | RJ(TMP_REG1) | FRD(freg));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 freg, sljit_s32 reg)
+{
+ sljit_ins inst;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
+
+ if (GET_OPCODE(op) == SLJIT_COPY_TO_F64)
+ inst = ((op & SLJIT_32) ? MOVGR2FR_W : MOVGR2FR_D) | FRD(freg) | RJ(reg);
+ else
+ inst = ((op & SLJIT_32) ? MOVFR2GR_S : MOVFR2GR_D) | RD(reg) | FRJ(freg);
+ return push_inst(compiler, inst);
+}
+
+/* --------------------------------------------------------------------- */
+/* Conditional instructions */
+/* --------------------------------------------------------------------- */
+
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
+{
+ struct sljit_label *label;
+
+ CHECK_ERROR_PTR();
+ CHECK_PTR(check_sljit_emit_label(compiler));
+
+ if (compiler->last_label && compiler->last_label->size == compiler->size)
+ return compiler->last_label;
+
+ label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
+ PTR_FAIL_IF(!label);
+ set_label(label, compiler);
+ return label;
+}
+
+static sljit_ins get_jump_instruction(sljit_s32 type)
+{
+ switch (type) {
+ case SLJIT_EQUAL:
+ case SLJIT_ATOMIC_NOT_STORED:
+ return BNE | RJ(EQUAL_FLAG) | RD(TMP_ZERO);
+ case SLJIT_NOT_EQUAL:
+ case SLJIT_ATOMIC_STORED:
+ return BEQ | RJ(EQUAL_FLAG) | RD(TMP_ZERO);
+ case SLJIT_LESS:
+ case SLJIT_GREATER:
+ case SLJIT_SIG_LESS:
+ case SLJIT_SIG_GREATER:
+ case SLJIT_OVERFLOW:
+ case SLJIT_CARRY:
+ return BEQ | RJ(OTHER_FLAG) | RD(TMP_ZERO);
+ case SLJIT_GREATER_EQUAL:
+ case SLJIT_LESS_EQUAL:
+ case SLJIT_SIG_GREATER_EQUAL:
+ case SLJIT_SIG_LESS_EQUAL:
+ case SLJIT_NOT_OVERFLOW:
+ case SLJIT_NOT_CARRY:
+ return BNE | RJ(OTHER_FLAG) | RD(TMP_ZERO);
+ case SLJIT_F_EQUAL:
+ case SLJIT_ORDERED_EQUAL:
+ case SLJIT_F_LESS:
+ case SLJIT_ORDERED_LESS:
+ case SLJIT_ORDERED_GREATER:
+ case SLJIT_UNORDERED_OR_GREATER:
+ case SLJIT_F_GREATER:
+ case SLJIT_UNORDERED_OR_LESS:
+ case SLJIT_UNORDERED_OR_EQUAL:
+ case SLJIT_UNORDERED:
+ return BEQ | RJ(OTHER_FLAG) | RD(TMP_ZERO);
+ case SLJIT_ORDERED_NOT_EQUAL:
+ case SLJIT_ORDERED_LESS_EQUAL:
+ case SLJIT_ORDERED_GREATER_EQUAL:
+ case SLJIT_F_NOT_EQUAL:
+ case SLJIT_UNORDERED_OR_NOT_EQUAL:
+ case SLJIT_UNORDERED_OR_GREATER_EQUAL:
+ case SLJIT_UNORDERED_OR_LESS_EQUAL:
+ case SLJIT_F_LESS_EQUAL:
+ case SLJIT_F_GREATER_EQUAL:
+ case SLJIT_ORDERED:
+ return BNE | RJ(OTHER_FLAG) | RD(TMP_ZERO);
+ default:
+ /* Not conditional branch. */
+ return 0;
+ }
+}
+
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
+{
+ struct sljit_jump *jump;
+ sljit_ins inst;
+
+ CHECK_ERROR_PTR();
+ CHECK_PTR(check_sljit_emit_jump(compiler, type));
+
+ jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
+ PTR_FAIL_IF(!jump);
+ set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
+ type &= 0xff;
+
+ inst = get_jump_instruction(type);
+
+ if (inst != 0) {
+ PTR_FAIL_IF(push_inst(compiler, inst));
+ jump->flags |= IS_COND;
+ }
+
+ jump->addr = compiler->size;
+ inst = JIRL | RJ(TMP_REG1) | IMM_I16(0);
+
+ if (type >= SLJIT_FAST_CALL) {
+ jump->flags |= IS_CALL;
+ inst |= RD(RETURN_ADDR_REG);
+ }
+
+ PTR_FAIL_IF(push_inst(compiler, inst));
+
+ /* Maximum number of instructions required for generating a constant. */
+ compiler->size += JUMP_MAX_SIZE - 1;
+ return jump;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 arg_types)
+{
+ SLJIT_UNUSED_ARG(arg_types);
+ CHECK_ERROR_PTR();
+ CHECK_PTR(check_sljit_emit_call(compiler, type, arg_types));
+
+ if (type & SLJIT_CALL_RETURN) {
+ PTR_FAIL_IF(emit_stack_frame_release(compiler, 0));
+ type = SLJIT_JUMP | (type & SLJIT_REWRITABLE_JUMP);
+ }
+
+ SLJIT_SKIP_CHECKS(compiler);
+ return sljit_emit_jump(compiler, type);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
+{
+ struct sljit_jump *jump;
+ sljit_s32 flags;
+ sljit_ins inst;
+ sljit_s32 src2_tmp_reg = FAST_IS_REG(src1) ? TMP_REG1 : TMP_REG2;
+
+ CHECK_ERROR_PTR();
+ CHECK_PTR(check_sljit_emit_cmp(compiler, type, src1, src1w, src2, src2w));
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+ ADJUST_LOCAL_OFFSET(src2, src2w);
+
+ compiler->cache_arg = 0;
+ compiler->cache_argw = 0;
+
+ flags = ((type & SLJIT_32) ? INT_DATA : WORD_DATA) | LOAD_DATA;
+
+ if (src1 & SLJIT_MEM) {
+ PTR_FAIL_IF(emit_op_mem2(compiler, flags, TMP_REG1, src1, src1w, src2, src2w));
+ src1 = TMP_REG1;
+ }
+
+ if (src2 & SLJIT_MEM) {
+ PTR_FAIL_IF(emit_op_mem2(compiler, flags, src2_tmp_reg, src2, src2w, 0, 0));
+ src2 = src2_tmp_reg;
+ }
+
+ if (src1 == SLJIT_IMM) {
+ if (src1w != 0) {
+ PTR_FAIL_IF(load_immediate(compiler, TMP_REG1, src1w));
+ src1 = TMP_REG1;
+ }
+ else
+ src1 = TMP_ZERO;
+ }
+
+ if (src2 == SLJIT_IMM) {
+ if (src2w != 0) {
+ PTR_FAIL_IF(load_immediate(compiler, src2_tmp_reg, src2w));
+ src2 = src2_tmp_reg;
+ }
+ else
+ src2 = TMP_ZERO;
+ }
+
+ jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
+ PTR_FAIL_IF(!jump);
+ set_jump(jump, compiler, (sljit_u32)((type & SLJIT_REWRITABLE_JUMP) | IS_COND));
+ type &= 0xff;
+
+ switch (type) {
+ case SLJIT_EQUAL:
+ inst = BNE | RJ(src1) | RD(src2);
+ break;
+ case SLJIT_NOT_EQUAL:
+ inst = BEQ | RJ(src1) | RD(src2);
+ break;
+ case SLJIT_LESS:
+ inst = BGEU | RJ(src1) | RD(src2);
+ break;
+ case SLJIT_GREATER_EQUAL:
+ inst = BLTU | RJ(src1) | RD(src2);
+ break;
+ case SLJIT_GREATER:
+ inst = BGEU | RJ(src2) | RD(src1);
+ break;
+ case SLJIT_LESS_EQUAL:
+ inst = BLTU | RJ(src2) | RD(src1);
+ break;
+ case SLJIT_SIG_LESS:
+ inst = BGE | RJ(src1) | RD(src2);
+ break;
+ case SLJIT_SIG_GREATER_EQUAL:
+ inst = BLT | RJ(src1) | RD(src2);
+ break;
+ case SLJIT_SIG_GREATER:
+ inst = BGE | RJ(src2) | RD(src1);
+ break;
+ case SLJIT_SIG_LESS_EQUAL:
+ inst = BLT | RJ(src2) | RD(src1);
+ break;
+ default:
+ inst = BREAK;
+ SLJIT_UNREACHABLE();
+ }
+
+ PTR_FAIL_IF(push_inst(compiler, inst));
+
+ jump->addr = compiler->size;
+ PTR_FAIL_IF(push_inst(compiler, JIRL | RD(TMP_ZERO) | RJ(TMP_REG1) | IMM_I12(0)));
+
+ /* Maximum number of instructions required for generating a constant. */
+ compiler->size += JUMP_MAX_SIZE - 1;
+
+ return jump;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
+{
+ struct sljit_jump *jump;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
+
+ if (src != SLJIT_IMM) {
+ if (src & SLJIT_MEM) {
+ ADJUST_LOCAL_OFFSET(src, srcw);
+ FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw));
+ src = TMP_REG1;
+ }
+ return push_inst(compiler, JIRL | RD((type >= SLJIT_FAST_CALL) ? RETURN_ADDR_REG : TMP_ZERO) | RJ(src) | IMM_I12(0));
+ }
+
+ /* These jumps are converted to jump/call instructions when possible. */
+ jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
+ FAIL_IF(!jump);
+ set_jump(jump, compiler, JUMP_ADDR | ((type >= SLJIT_FAST_CALL) ? IS_CALL : 0));
+ jump->u.target = (sljit_uw)srcw;
+
+ jump->addr = compiler->size;
+ FAIL_IF(push_inst(compiler, JIRL | RD((type >= SLJIT_FAST_CALL) ? RETURN_ADDR_REG : TMP_ZERO) | RJ(TMP_REG1) | IMM_I12(0)));
+
+ /* Maximum number of instructions required for generating a constant. */
+ compiler->size += JUMP_MAX_SIZE - 1;
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 arg_types,
+ sljit_s32 src, sljit_sw srcw)
+{
+ SLJIT_UNUSED_ARG(arg_types);
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_icall(compiler, type, arg_types, src, srcw));
+
+ if (src & SLJIT_MEM) {
+ ADJUST_LOCAL_OFFSET(src, srcw);
+ FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw));
+ src = TMP_REG1;
+ }
+
+ if (type & SLJIT_CALL_RETURN) {
+ if (src >= SLJIT_FIRST_SAVED_REG && src <= (SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options))) {
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(TMP_REG1) | RJ(src) | IMM_I12(0)));
+ src = TMP_REG1;
+ }
+
+ FAIL_IF(emit_stack_frame_release(compiler, 0));
+ type = SLJIT_JUMP;
+ }
+
+ SLJIT_SKIP_CHECKS(compiler);
+ return sljit_emit_ijump(compiler, type, src, srcw);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 type)
+{
+ sljit_s32 src_r, dst_r, invert;
+ sljit_s32 saved_op = op;
+ sljit_s32 mem_type = ((op & SLJIT_32) || op == SLJIT_MOV32) ? (INT_DATA | SIGNED_DATA) : WORD_DATA;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ op = GET_OPCODE(op);
+ dst_r = (op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2;
+
+ compiler->cache_arg = 0;
+ compiler->cache_argw = 0;
+
+ if (op >= SLJIT_ADD && (dst & SLJIT_MEM))
+ FAIL_IF(emit_op_mem2(compiler, mem_type | LOAD_DATA, TMP_REG1, dst, dstw, dst, dstw));
+
+ if (type < SLJIT_F_EQUAL) {
+ src_r = OTHER_FLAG;
+ invert = type & 0x1;
+
+ switch (type) {
+ case SLJIT_EQUAL:
+ case SLJIT_NOT_EQUAL:
+ FAIL_IF(push_inst(compiler, SLTUI | RD(dst_r) | RJ(EQUAL_FLAG) | IMM_I12(1)));
+ src_r = dst_r;
+ break;
+ case SLJIT_ATOMIC_STORED:
+ case SLJIT_ATOMIC_NOT_STORED:
+ FAIL_IF(push_inst(compiler, SLTUI | RD(dst_r) | RJ(EQUAL_FLAG) | IMM_I12(1)));
+ src_r = dst_r;
+ invert ^= 0x1;
+ break;
+ case SLJIT_OVERFLOW:
+ case SLJIT_NOT_OVERFLOW:
+ if (compiler->status_flags_state & (SLJIT_CURRENT_FLAGS_ADD | SLJIT_CURRENT_FLAGS_SUB)) {
+ src_r = OTHER_FLAG;
+ break;
+ }
+ FAIL_IF(push_inst(compiler, SLTUI | RD(dst_r) | RJ(OTHER_FLAG) | IMM_I12(1)));
+ src_r = dst_r;
+ invert ^= 0x1;
+ break;
+ }
+ } else {
+ invert = 0;
+ src_r = OTHER_FLAG;
+
+ switch (type) {
+ case SLJIT_ORDERED_NOT_EQUAL:
+ case SLJIT_ORDERED_LESS_EQUAL:
+ case SLJIT_ORDERED_GREATER_EQUAL:
+ case SLJIT_F_NOT_EQUAL:
+ case SLJIT_UNORDERED_OR_NOT_EQUAL:
+ case SLJIT_UNORDERED_OR_GREATER_EQUAL:
+ case SLJIT_UNORDERED_OR_LESS_EQUAL:
+ case SLJIT_F_LESS_EQUAL:
+ case SLJIT_F_GREATER_EQUAL:
+ case SLJIT_ORDERED:
+ invert = 1;
+ break;
+ }
+ }
+
+ if (invert) {
+ FAIL_IF(push_inst(compiler, XORI | RD(dst_r) | RJ(src_r) | IMM_I12(1)));
+ src_r = dst_r;
+ }
+
+ if (op < SLJIT_ADD) {
+ if (dst & SLJIT_MEM)
+ return emit_op_mem(compiler, mem_type, src_r, dst, dstw);
+
+ if (src_r != dst_r)
+ return push_inst(compiler, ADDI_D | RD(dst_r) | RJ(src_r) | IMM_I12(0));
+ return SLJIT_SUCCESS;
+ }
+
+ mem_type |= CUMULATIVE_OP | IMM_OP | ALT_KEEP_CACHE;
+
+ if (dst & SLJIT_MEM)
+ return emit_op(compiler, saved_op, mem_type, dst, dstw, TMP_REG1, 0, src_r, 0);
+ return emit_op(compiler, saved_op, mem_type, dst, dstw, dst, dstw, src_r, 0);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_reg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_reg)
+{
+ sljit_ins *ptr;
+ sljit_uw size;
+ sljit_s32 inp_flags = ((type & SLJIT_32) ? INT_DATA : WORD_DATA) | LOAD_DATA;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_select(compiler, type, dst_reg, src1, src1w, src2_reg));
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+ if (dst_reg != src2_reg) {
+ if (dst_reg == src1) {
+ src1 = src2_reg;
+ src1w = 0;
+ type ^= 0x1;
+ } else {
+ if (ADDRESSING_DEPENDS_ON(src1, dst_reg)) {
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(TMP_REG1) | RJ(dst_reg) | IMM_I12(0)));
+
+ if ((src1 & REG_MASK) == dst_reg)
+ src1 = (src1 & ~REG_MASK) | TMP_REG1;
+
+ if (OFFS_REG(src1) == dst_reg)
+ src1 = (src1 & ~OFFS_REG_MASK) | TO_OFFS_REG(TMP_REG1);
+ }
+
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(dst_reg) | RJ(src2_reg) | IMM_I12(0)));
+ }
+ }
+
+ size = compiler->size;
+
+ ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
+ FAIL_IF(!ptr);
+ compiler->size++;
+
+ if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, inp_flags, dst_reg, src1, src1w));
+ } else if (src1 == SLJIT_IMM) {
+ if (type & SLJIT_32)
+ src1w = (sljit_s32)src1w;
+ FAIL_IF(load_immediate(compiler, dst_reg, src1w));
+ } else
+ FAIL_IF(push_inst(compiler, ADDI_D | RD(dst_reg) | RJ(src1) | IMM_I12(0)));
+
+ *ptr = get_jump_instruction(type & ~SLJIT_32) | IMM_I16(compiler->size - size);
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fselect(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_freg)
+{
+ sljit_s32 invert = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fselect(compiler, type, dst_freg, src1, src1w, src2_freg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+ if ((type & ~SLJIT_32) == SLJIT_EQUAL || (type & ~SLJIT_32) == SLJIT_NOT_EQUAL) {
+ if ((type & ~SLJIT_32) == SLJIT_EQUAL)
+ invert = 1;
+ FAIL_IF(push_inst(compiler, MOVGR2CF | FCD(F_OTHER_FLAG) | RJ(EQUAL_FLAG)));
+ } else {
+ if (get_jump_instruction(type & ~SLJIT_32) == (BNE | RJ(OTHER_FLAG) | RD(TMP_ZERO)))
+ invert = 1;
+ FAIL_IF(push_inst(compiler, MOVGR2CF | FCD(F_OTHER_FLAG) | RJ(OTHER_FLAG)));
+ }
+
+ if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, FLOAT_DATA(type) | LOAD_DATA, TMP_FREG2, src1, src1w));
+ if (invert)
+ return push_inst(compiler, FSEL | FRD(dst_freg) | FRJ(TMP_FREG2) | FRK(src2_freg) | FCA(F_OTHER_FLAG));
+ return push_inst(compiler, FSEL | FRD(dst_freg) | FRJ(src2_freg) | FRK(TMP_FREG2) | FCA(F_OTHER_FLAG));
+ } else {
+ if (invert)
+ return push_inst(compiler, FSEL | FRD(dst_freg) | FRJ(src1) | FRK(src2_freg) | FCA(F_OTHER_FLAG));
+ return push_inst(compiler, FSEL | FRD(dst_freg) | FRJ(src2_freg) | FRK(src1) | FCA(F_OTHER_FLAG));
+ }
+}
+
+#undef FLOAT_DATA
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 reg,
+ sljit_s32 mem, sljit_sw memw)
+{
+ sljit_s32 flags;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_mem(compiler, type, reg, mem, memw));
+
+ if (!(reg & REG_PAIR_MASK))
+ return sljit_emit_mem_unaligned(compiler, type, reg, mem, memw);
+
+ if (SLJIT_UNLIKELY(mem & OFFS_REG_MASK)) {
+ memw &= 0x3;
+
+ if (SLJIT_UNLIKELY(memw != 0)) {
+ FAIL_IF(push_inst(compiler, SLLI_D | RD(TMP_REG1) | RJ(OFFS_REG(mem)) | IMM_I12(memw)));
+ FAIL_IF(push_inst(compiler, ADD_D| RD(TMP_REG1) | RJ(TMP_REG1) | RK(mem & REG_MASK)));
+ } else
+ FAIL_IF(push_inst(compiler, ADD_D| RD(TMP_REG1) | RJ(mem & REG_MASK) | RK(OFFS_REG(mem))));
+
+ mem = TMP_REG1;
+ memw = 0;
+ } else if (memw > I12_MAX - SSIZE_OF(sw) || memw < I12_MIN) {
+ if (((memw + 0x800) & 0xfff) <= 0xfff - SSIZE_OF(sw)) {
+ FAIL_IF(load_immediate(compiler, TMP_REG1, TO_ARGW_HI(memw)));
+ memw &= 0xfff;
+ } else {
+ FAIL_IF(load_immediate(compiler, TMP_REG1, memw));
+ memw = 0;
+ }
+
+ if (mem & REG_MASK)
+ FAIL_IF(push_inst(compiler, ADD_D| RD(TMP_REG1) | RJ(TMP_REG1) | RK(mem & REG_MASK)));
+
+ mem = TMP_REG1;
+ } else {
+ mem &= REG_MASK;
+ memw &= 0xfff;
+ }
+
+ SLJIT_ASSERT((memw >= 0 && memw <= I12_MAX - SSIZE_OF(sw)) || (memw > I12_MAX && memw <= 0xfff));
+
+ if (!(type & SLJIT_MEM_STORE) && mem == REG_PAIR_FIRST(reg)) {
+ FAIL_IF(push_mem_inst(compiler, WORD_DATA | LOAD_DATA, REG_PAIR_SECOND(reg), SLJIT_MEM1(mem), (memw + SSIZE_OF(sw)) & 0xfff));
+ return push_mem_inst(compiler, WORD_DATA | LOAD_DATA, REG_PAIR_FIRST(reg), SLJIT_MEM1(mem), memw);
+ }
+
+ flags = WORD_DATA | (!(type & SLJIT_MEM_STORE) ? LOAD_DATA : 0);
+
+ FAIL_IF(push_mem_inst(compiler, flags, REG_PAIR_FIRST(reg), SLJIT_MEM1(mem), memw));
+ return push_mem_inst(compiler, flags, REG_PAIR_SECOND(reg), SLJIT_MEM1(mem), (memw + SSIZE_OF(sw)) & 0xfff);
+}
+
+#undef TO_ARGW_HI
+
+static sljit_s32 sljit_emit_simd_mem_offset(struct sljit_compiler *compiler, sljit_s32 *mem_ptr, sljit_sw memw)
+{
+ sljit_s32 mem = *mem_ptr;
+
+ if (SLJIT_UNLIKELY(mem & OFFS_REG_MASK)) {
+ *mem_ptr = TMP_REG3;
+ FAIL_IF(push_inst(compiler, SLLI_D | RD(TMP_REG3) | RJ(OFFS_REG(mem)) | IMM_I12(memw & 0x3)));
+ return push_inst(compiler, ADD_D | RD(TMP_REG3) | RJ(TMP_REG3) | RK(mem & REG_MASK));
+ }
+
+ if (!(mem & REG_MASK)) {
+ *mem_ptr = TMP_REG3;
+ return load_immediate(compiler, TMP_REG3, memw);
+ }
+
+ mem &= REG_MASK;
+
+ if (memw == 0) {
+ *mem_ptr = mem;
+ return SLJIT_SUCCESS;
+ }
+
+ *mem_ptr = TMP_REG3;
+
+ FAIL_IF(load_immediate(compiler, TMP_REG3, memw));
+ return push_inst(compiler, ADD_D | RD(TMP_REG3) | RJ(TMP_REG3) | RK(mem));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 srcdst, sljit_sw srcdstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_ins ins = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_mov(compiler, type, freg, srcdst, srcdstw));
+
+ ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
+
+ if (reg_size != 5 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (reg_size == 5 && !(get_cpu_features(GET_HWCAP) & LOONGARCH_HWCAP_LASX))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (!(srcdst & SLJIT_MEM)) {
+ if (type & SLJIT_SIMD_STORE)
+ ins = FRD(srcdst) | FRJ(freg) | FRK(freg);
+ else
+ ins = FRD(freg) | FRJ(srcdst) | FRK(srcdst);
+
+ if (reg_size == 5)
+ ins |= VOR_V | (sljit_ins)1 << 26;
+ else
+ ins |= VOR_V;
+
+ return push_inst(compiler, ins);
+ }
+
+ ins = (type & SLJIT_SIMD_STORE) ? VST : VLD;
+
+ if (reg_size == 5)
+ ins = (type & SLJIT_SIMD_STORE) ? XVST : XVLD;
+
+ if (FAST_IS_REG(srcdst) && srcdst >= 0 && (srcdstw >= I12_MIN && srcdstw <= I12_MAX))
+ return push_inst(compiler, ins | FRD(freg) | RJ((sljit_u8)srcdst) | IMM_I12(srcdstw));
+ else {
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &srcdst, srcdstw));
+ return push_inst(compiler, ins | FRD(freg) | RJ(srcdst) | IMM_I12(0));
+ }
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_replicate(compiler, type, freg, src, srcw));
+
+ ADJUST_LOCAL_OFFSET(src, srcw);
+
+ if (reg_size != 5 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (reg_size == 5 && !(get_cpu_features(GET_HWCAP) & LOONGARCH_HWCAP_LASX))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &src, srcw));
+
+ if (reg_size == 5)
+ ins = (sljit_ins)1 << 25;
+
+ return push_inst(compiler, VLDREPL | ins | FRD(freg) | RJ(src) | (sljit_ins)1 << (23 - elem_size));
+ }
+
+ if (reg_size == 5)
+ ins = (sljit_ins)1 << 26;
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ if (src == SLJIT_IMM)
+ return push_inst(compiler, VREPLGR2VR | ins | FRD(freg) | RJ(TMP_ZERO) | (sljit_ins)elem_size << 10);
+
+ FAIL_IF(push_inst(compiler, VREPLVE | ins | FRD(freg) | FRJ(src) | RK(TMP_ZERO) | (sljit_ins)elem_size << 15));
+
+ if (reg_size == 5) {
+ ins = (sljit_ins)(0x44 << 10);
+ return push_inst(compiler, XVPERMI | ins | FRD(freg) | FRJ(freg));
+ }
+
+ return SLJIT_SUCCESS;
+ }
+
+ ins |= VREPLGR2VR | (sljit_ins)elem_size << 10;
+
+ if (src == SLJIT_IMM) {
+ FAIL_IF(load_immediate(compiler, TMP_REG2, srcw));
+ src = TMP_REG2;
+ }
+
+ return push_inst(compiler, ins | FRD(freg) | RJ(src));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg, sljit_s32 lane_index,
+ sljit_s32 srcdst, sljit_sw srcdstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_lane_mov(compiler, type, freg, lane_index, srcdst, srcdstw));
+
+ ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
+
+ if (reg_size != 5 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (reg_size == 5 && !(get_cpu_features(GET_HWCAP) & LOONGARCH_HWCAP_LASX))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (type & SLJIT_SIMD_LANE_ZERO) {
+ ins = (reg_size == 5) ? ((sljit_ins)1 << 26) : 0;
+
+ if ((type & SLJIT_SIMD_FLOAT) && freg == srcdst) {
+ FAIL_IF(push_inst(compiler, VOR_V | ins | FRD(TMP_FREG1) | FRJ(freg) | FRK(freg)));
+ srcdst = TMP_FREG1;
+ srcdstw = 0;
+ }
+
+ FAIL_IF(push_inst(compiler, VXOR_V | ins | FRD(freg) | FRJ(freg) | FRK(freg)));
+ }
+
+ if (srcdst & SLJIT_MEM) {
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &srcdst, srcdstw));
+
+ if (reg_size == 5)
+ ins = (sljit_ins)1 << 25;
+
+ if (type & SLJIT_SIMD_STORE) {
+ ins |= (sljit_ins)lane_index << 18 | (sljit_ins)(1 << (23 - elem_size));
+ return push_inst(compiler, VSTELM | ins | FRD(freg) | RJ(srcdst));
+ } else {
+ emit_op_mem(compiler, (elem_size == 3 ? WORD_DATA : (elem_size == 2 ? INT_DATA : (elem_size == 1 ? HALF_DATA : BYTE_DATA))) | LOAD_DATA, TMP_REG1, srcdst | SLJIT_MEM, 0);
+ srcdst = TMP_REG1;
+ ins = (sljit_ins)(0x3f ^ (0x1f >> elem_size)) << 10;
+
+ if (reg_size == 5) {
+ if (elem_size < 2) {
+ FAIL_IF(push_inst(compiler, VOR_V | (sljit_ins)1 << 26 | FRD(TMP_FREG1) | FRJ(freg) | FRK(freg)));
+ if (lane_index >= (2 << (3 - elem_size))) {
+ FAIL_IF(push_inst(compiler, XVPERMI | (sljit_ins)1 << 18 | FRD(TMP_FREG1) | FRJ(freg) | IMM_I8(1)));
+ FAIL_IF(push_inst(compiler, VINSGR2VR | ins | FRD(TMP_FREG1) | RJ(srcdst) | IMM_V(lane_index % (2 << (3 - elem_size)))));
+ return push_inst(compiler, XVPERMI | (sljit_ins)1 << 18 | FRD(freg) | FRJ(TMP_FREG1) | IMM_I8(2));
+ } else {
+ FAIL_IF(push_inst(compiler, VINSGR2VR | ins | FRD(freg) | RJ(srcdst) | IMM_V(lane_index)));
+ return push_inst(compiler, XVPERMI | (sljit_ins)1 << 18 | FRD(freg) | FRJ(TMP_FREG1) | IMM_I8(18));
+ }
+ } else
+ ins = (sljit_ins)(0x3f ^ (0x3f >> elem_size)) << 10 | (sljit_ins)1 << 26;
+ }
+
+ return push_inst(compiler, VINSGR2VR | ins | FRD(freg) | RJ(srcdst) | IMM_V(lane_index));
+ }
+ }
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ ins = (reg_size == 5) ? (sljit_ins)(0x3f ^ (0x3f >> elem_size)) << 10 | (sljit_ins)1 << 26 : (sljit_ins)(0x3f ^ (0x1f >> elem_size)) << 10;
+
+ if (type & SLJIT_SIMD_STORE) {
+ FAIL_IF(push_inst(compiler, VPICKVE2GR_U | ins | RD(TMP_REG1) | FRJ(freg) | IMM_V(lane_index)));
+ return push_inst(compiler, VINSGR2VR | ins | FRD(srcdst) | RJ(TMP_REG1) | IMM_V(0));
+ } else {
+ FAIL_IF(push_inst(compiler, VPICKVE2GR_U | ins | RD(TMP_REG1) | FRJ(srcdst) | IMM_V(0)));
+ return push_inst(compiler, VINSGR2VR | ins | FRD(freg) | RJ(TMP_REG1) | IMM_V(lane_index));
+ }
+ }
+
+ if (srcdst == SLJIT_IMM) {
+ FAIL_IF(load_immediate(compiler, TMP_REG1, srcdstw));
+ srcdst = TMP_REG1;
+ }
+
+ if (type & SLJIT_SIMD_STORE) {
+ ins = (sljit_ins)(0x3f ^ (0x1f >> elem_size)) << 10;
+
+ if (type & SLJIT_SIMD_LANE_SIGNED)
+ ins |= (sljit_ins)(VPICKVE2GR_U ^ (0x7 << 18));
+ else
+ ins |= VPICKVE2GR_U;
+
+ if (reg_size == 5) {
+ if (elem_size < 2) {
+ if (lane_index >= (2 << (3 - elem_size))) {
+ if (type & SLJIT_SIMD_LANE_SIGNED)
+ ins |= (sljit_ins)(VPICKVE2GR_U ^ (0x7 << 18));
+ else
+ ins |= VPICKVE2GR_U;
+
+ FAIL_IF(push_inst(compiler, VOR_V | (sljit_ins)1 << 26 | FRD(TMP_FREG1) | FRJ(freg) | FRK(freg)));
+ FAIL_IF(push_inst(compiler, XVPERMI | (sljit_ins)1 << 18 | FRD(TMP_FREG1) | FRJ(freg) | IMM_I8(1)));
+ return push_inst(compiler, ins | RD(srcdst) | FRJ(TMP_FREG1) | IMM_V(lane_index % (2 << (3 - elem_size))));
+ }
+ } else {
+ ins ^= (sljit_ins)1 << (15 - elem_size);
+ ins |= (sljit_ins)1 << 26;
+ }
+ }
+
+ return push_inst(compiler, ins | RD(srcdst) | FRJ(freg) | IMM_V(lane_index));
+ } else {
+ ins = (sljit_ins)(0x3f ^ (0x1f >> elem_size)) << 10;
+
+ if (reg_size == 5) {
+ if (elem_size < 2) {
+ FAIL_IF(push_inst(compiler, VOR_V | (sljit_ins)1 << 26 | FRD(TMP_FREG1) | FRJ(freg) | FRK(freg)));
+ if (lane_index >= (2 << (3 - elem_size))) {
+ FAIL_IF(push_inst(compiler, XVPERMI | (sljit_ins)1 << 18 | FRD(TMP_FREG1) | FRJ(freg) | IMM_I8(1)));
+ FAIL_IF(push_inst(compiler, VINSGR2VR | ins | FRD(TMP_FREG1) | RJ(srcdst) | IMM_V(lane_index % (2 << (3 - elem_size)))));
+ return push_inst(compiler, XVPERMI | (sljit_ins)1 << 18 | FRD(freg) | FRJ(TMP_FREG1) | IMM_I8(2));
+ } else {
+ FAIL_IF(push_inst(compiler, VINSGR2VR | ins | FRD(freg) | RJ(srcdst) | IMM_V(lane_index)));
+ return push_inst(compiler, XVPERMI | (sljit_ins)1 << 18 | FRD(freg) | FRJ(TMP_FREG1) | IMM_I8(18));
+ }
+ } else
+ ins = (sljit_ins)(0x3f ^ (0x3f >> elem_size)) << 10 | (sljit_ins)1 << 26;
+ }
+
+ return push_inst(compiler, VINSGR2VR | ins | FRD(freg) | RJ(srcdst) | IMM_V(lane_index));
+ }
+
+ return SLJIT_ERR_UNSUPPORTED;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_s32 src_lane_index)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_lane_replicate(compiler, type, freg, src, src_lane_index));
+
+ if (reg_size != 5 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (reg_size == 5 && !(get_cpu_features(GET_HWCAP) & LOONGARCH_HWCAP_LASX))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ ins = (sljit_ins)(0x3f ^ (0x1f >> elem_size)) << 10;
+
+ if (reg_size == 5) {
+ FAIL_IF(push_inst(compiler, VREPLVEI | (sljit_ins)1 << 26 | ins | FRD(freg) | FRJ(src) | IMM_V(src_lane_index % (2 << (3 - elem_size)))));
+
+ ins = (src_lane_index < (2 << (3 - elem_size))) ? (sljit_ins)(0x44 << 10) : (sljit_ins)(0xee << 10);
+
+ return push_inst(compiler, XVPERMI | ins | FRD(freg) | FRJ(freg));
+ }
+
+ return push_inst(compiler, VREPLVEI | ins | FRD(freg) | FRJ(src) | IMM_V(src_lane_index));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_extend(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_s32 elem2_size = SLJIT_SIMD_GET_ELEM2_SIZE(type);
+ sljit_ins ins = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_extend(compiler, type, freg, src, srcw));
+
+ ADJUST_LOCAL_OFFSET(src, srcw);
+
+ if (reg_size != 5 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (reg_size == 5 && !(get_cpu_features(GET_HWCAP) & LOONGARCH_HWCAP_LASX))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (src & SLJIT_MEM) {
+ ins = (type & SLJIT_SIMD_STORE) ? VST : VLD;
+
+ if (reg_size == 5)
+ ins = (type & SLJIT_SIMD_STORE) ? XVST : XVLD;
+
+ if (FAST_IS_REG(src) && src >= 0 && (srcw >= I12_MIN && srcw <= I12_MAX))
+ FAIL_IF(push_inst(compiler, ins | FRD(freg) | RJ(src) | IMM_I12(srcw)));
+ else {
+ FAIL_IF(sljit_emit_simd_mem_offset(compiler, &src, srcw));
+ FAIL_IF(push_inst(compiler, ins | FRD(freg) | RJ(src) | IMM_I12(0)));
+ }
+ src = freg;
+ }
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ if (elem_size != 2 || elem2_size != 3)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ ins = 0;
+ if (reg_size == 5) {
+ ins = (sljit_ins)1 << 26;
+ FAIL_IF(push_inst(compiler, XVPERMI | FRD(src) | FRJ(src) | IMM_I8(16)));
+ }
+
+ return push_inst(compiler, VFCVTL_D_S | ins | FRD(freg) | FRJ(src));
+ }
+
+ ins = (type & SLJIT_SIMD_EXTEND_SIGNED) ? VSLLWIL : (VSLLWIL | (sljit_ins)1 << 18);
+
+ if (reg_size == 5)
+ ins |= (sljit_ins)1 << 26;
+
+ do {
+ if (reg_size == 5)
+ FAIL_IF(push_inst(compiler, XVPERMI | FRD(src) | FRJ(src) | IMM_I8(16)));
+
+ FAIL_IF(push_inst(compiler, ins | ((sljit_ins)1 << (13 + elem_size)) | FRD(freg) | FRJ(src)));
+ src = freg;
+ } while (++elem_size < elem2_size);
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_sign(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 dst, sljit_sw dstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins = 0;
+ sljit_s32 dst_r;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_sign(compiler, type, freg, dst, dstw));
+
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ if (reg_size != 5 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (reg_size == 5 && !(get_cpu_features(GET_HWCAP) & LOONGARCH_HWCAP_LASX))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (elem_size > 3 || ((type & SLJIT_SIMD_FLOAT) && elem_size < 2))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
+
+ if (reg_size == 5)
+ ins = (sljit_ins)1 << 26;
+
+ FAIL_IF(push_inst(compiler, VMSKLTZ | ins | (sljit_ins)(elem_size << 10) | FRD(TMP_FREG1) | FRJ(freg)));
+
+ FAIL_IF(push_inst(compiler, VPICKVE2GR_U | (sljit_ins)(0x3c << 10) | RD(dst_r) | FRJ(TMP_FREG1)));
+
+ if (reg_size == 5) {
+ FAIL_IF(push_inst(compiler, VPICKVE2GR_U | (sljit_ins)(0x38 << 10) | ins | RD(TMP_REG3) | FRJ(TMP_FREG1) | IMM_V(2)));
+ FAIL_IF(push_inst(compiler, SLLI_W | RD(TMP_REG3) | RJ(TMP_REG3) | IMM_I12(2 << (3 - elem_size))));
+ FAIL_IF(push_inst(compiler, OR | RD(dst_r) | RJ(dst_r) | RK(TMP_REG3)));
+ }
+
+ if (dst_r == TMP_REG2)
+ return emit_op_mem(compiler, ((type & SLJIT_32) ? INT_DATA : WORD_DATA), TMP_REG2, dst, dstw);
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_op2(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg, sljit_s32 src1_freg, sljit_s32 src2_freg)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_op2(compiler, type, dst_freg, src1_freg, src2_freg));
+
+ if (reg_size != 5 && reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (reg_size == 5 && !(get_cpu_features(GET_HWCAP) & LOONGARCH_HWCAP_LASX))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ switch (SLJIT_SIMD_GET_OPCODE(type)) {
+ case SLJIT_SIMD_OP2_AND:
+ ins = VAND_V;
+ break;
+ case SLJIT_SIMD_OP2_OR:
+ ins = VOR_V;
+ break;
+ case SLJIT_SIMD_OP2_XOR:
+ ins = VXOR_V;
+ break;
+ }
+
+ if (reg_size == 5)
+ ins |= (sljit_ins)1 << 26;
+
+ return push_inst(compiler, ins | FRD(dst_freg) | FRJ(src1_freg) | FRK(src2_freg));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_load(struct sljit_compiler *compiler,
+ sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 mem_reg)
+{
+ sljit_ins ins;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_atomic_load(compiler, op, dst_reg, mem_reg));
+
+ switch(GET_OPCODE(op)) {
+ case SLJIT_MOV_U8:
+ ins = LD_BU;
+ break;
+ case SLJIT_MOV_U16:
+ ins = LD_HU;
+ break;
+ case SLJIT_MOV32:
+ ins = LD_W;
+ break;
+ case SLJIT_MOV_U32:
+ ins = LD_WU;
+ break;
+ default:
+ ins = LD_D;
+ break;
+ }
+
+ return push_inst(compiler, ins | RD(dst_reg) | RJ(mem_reg) | IMM_I12(0));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_store(struct sljit_compiler *compiler,
+ sljit_s32 op,
+ sljit_s32 src_reg,
+ sljit_s32 mem_reg,
+ sljit_s32 temp_reg)
+{
+ sljit_ins ins = 0;
+ sljit_ins unsign = 0;
+ sljit_s32 tmp = temp_reg;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_atomic_store(compiler, op, src_reg, mem_reg, temp_reg));
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MOV_U8:
+ ins = AMCAS_B;
+ unsign = BSTRPICK_D | (7 << 16);
+ break;
+ case SLJIT_MOV_U16:
+ ins = AMCAS_H;
+ unsign = BSTRPICK_D | (15 << 16);
+ break;
+ case SLJIT_MOV32:
+ ins = AMCAS_W;
+ break;
+ case SLJIT_MOV_U32:
+ ins = AMCAS_W;
+ unsign = BSTRPICK_D | (31 << 16);
+ break;
+ default:
+ ins = AMCAS_D;
+ break;
+ }
+
+ if (op & SLJIT_SET_ATOMIC_STORED) {
+ FAIL_IF(push_inst(compiler, XOR | RD(TMP_REG1) | RJ(temp_reg) | RK(TMP_ZERO)));
+ tmp = TMP_REG1;
+ }
+ FAIL_IF(push_inst(compiler, ins | RD(tmp) | RJ(mem_reg) | RK(src_reg)));
+ if (!(op & SLJIT_SET_ATOMIC_STORED))
+ return SLJIT_SUCCESS;
+
+ if (unsign)
+ FAIL_IF(push_inst(compiler, unsign | RD(tmp) | RJ(tmp)));
+
+ FAIL_IF(push_inst(compiler, XOR | RD(EQUAL_FLAG) | RJ(tmp) | RK(temp_reg)));
+ return push_inst(compiler, SLTUI | RD(EQUAL_FLAG) | RJ(EQUAL_FLAG) | IMM_I12(1));
+}
+
+static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw init_value, sljit_ins last_ins)
+{
+ SLJIT_UNUSED_ARG(last_ins);
+
+ FAIL_IF(push_inst(compiler, LU12I_W | RD(dst) | (sljit_ins)(((init_value & 0xffffffff) >> 12) << 5)));
+ FAIL_IF(push_inst(compiler, LU32I_D | RD(dst) | (sljit_ins)(((init_value >> 32) & 0xfffff) << 5)));
+ FAIL_IF(push_inst(compiler, LU52I_D | RD(dst) | RJ(dst) | (sljit_ins)(IMM_I12(init_value >> 52))));
+ return push_inst(compiler, ORI | RD(dst) | RJ(dst) | IMM_I12(init_value));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
+{
+ sljit_ins *inst = (sljit_ins*)addr;
+ SLJIT_UNUSED_ARG(executable_offset);
+
+ SLJIT_UPDATE_WX_FLAGS(inst, inst + 4, 0);
+
+ SLJIT_ASSERT((inst[0] & OPC_1RI20(0x7f)) == LU12I_W);
+ inst[0] = (inst[0] & (OPC_1RI20(0x7f) | 0x1f)) | (sljit_ins)(((new_target & 0xffffffff) >> 12) << 5);
+
+ SLJIT_ASSERT((inst[1] & OPC_1RI20(0x7f)) == LU32I_D);
+ inst[1] = (inst[1] & (OPC_1RI20(0x7f) | 0x1f)) | (sljit_ins)(sljit_ins)(((new_target >> 32) & 0xfffff) << 5);
+
+ SLJIT_ASSERT((inst[2] & OPC_2RI12(0x3ff)) == LU52I_D);
+ inst[2] = (inst[2] & (OPC_2RI12(0x3ff) | 0x3ff)) | IMM_I12(new_target >> 52);
+
+ SLJIT_ASSERT((inst[3] & OPC_2RI12(0x3ff)) == ORI || (inst[3] & OPC_2RI16(0x3f)) == JIRL);
+ if ((inst[3] & OPC_2RI12(0x3ff)) == ORI)
+ inst[3] = (inst[3] & (OPC_2RI12(0x3ff) | 0x3ff)) | IMM_I12(new_target);
+ else
+ inst[3] = (inst[3] & (OPC_2RI16(0x3f) | 0x3ff)) | IMM_I12((new_target & 0xfff) >> 2);
+
+ SLJIT_UPDATE_WX_FLAGS(inst, inst + 4, 1);
+
+ inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
+ SLJIT_CACHE_FLUSH(inst, inst + 4);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
+{
+ struct sljit_const *const_;
+ sljit_s32 dst_r;
+
+ CHECK_ERROR_PTR();
+ CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
+ PTR_FAIL_IF(!const_);
+ set_const(const_, compiler);
+
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
+ PTR_FAIL_IF(emit_const(compiler, dst_r, init_value, 0));
+
+ if (dst & SLJIT_MEM)
+ PTR_FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw));
+
+ return const_;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_mov_addr(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+{
+ struct sljit_jump *jump;
+ sljit_s32 dst_r;
+
+ CHECK_ERROR_PTR();
+ CHECK_PTR(check_sljit_emit_mov_addr(compiler, dst, dstw));
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
+ PTR_FAIL_IF(!jump);
+ set_mov_addr(jump, compiler, 0);
+
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
+ PTR_FAIL_IF(push_inst(compiler, (sljit_ins)dst_r));
+
+ compiler->size += JUMP_MAX_SIZE - 1;
+
+ if (dst & SLJIT_MEM)
+ PTR_FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw));
+
+ return jump;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
+{
+ sljit_set_jump_addr(addr, (sljit_uw)new_constant, executable_offset);
+}
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeMIPS_32.c b/src/3rdparty/pcre2/src/sljit/sljitNativeMIPS_32.c
index e6853c98f6..91153e5f25 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeMIPS_32.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativeMIPS_32.c
@@ -26,6 +26,49 @@
/* mips 32-bit arch dependent functions. */
+static sljit_s32 emit_copysign(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_sw src1, sljit_sw src2, sljit_sw dst)
+{
+ int is_32 = (op & SLJIT_32);
+ sljit_ins mfhc = MFC1, mthc = MTC1;
+ sljit_ins src1_r = FS(src1), src2_r = FS(src2), dst_r = FS(dst);
+
+ if (!is_32) {
+ switch (cpu_feature_list & CPU_FEATURE_FR) {
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
+ case CPU_FEATURE_FR:
+ mfhc = MFHC1;
+ mthc = MTHC1;
+ break;
+#endif /* SLJIT_MIPS_REV >= 2 */
+ default:
+ src1_r |= (1 << 11);
+ src2_r |= (1 << 11);
+ dst_r |= (1 << 11);
+ break;
+ }
+ }
+
+ FAIL_IF(push_inst(compiler, mfhc | T(TMP_REG1) | src1_r, DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, mfhc | T(TMP_REG2) | src2_r, DR(TMP_REG2)));
+ if (!is_32 && src1 != dst)
+ FAIL_IF(push_inst(compiler, MOV_fmt(FMT_S) | FS(src1) | FD(dst), MOVABLE_INS));
+#if !defined(SLJIT_MIPS_REV) || SLJIT_MIPS_REV <= 1
+ else
+ FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
+#endif /* MIPS III */
+ FAIL_IF(push_inst(compiler, XOR | T(TMP_REG1) | D(TMP_REG2) | S(TMP_REG2), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, SRL | T(TMP_REG2) | D(TMP_REG2) | SH_IMM(31), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, SLL | T(TMP_REG2) | D(TMP_REG2) | SH_IMM(31), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, XOR | T(TMP_REG2) | D(TMP_REG1) | S(TMP_REG1), DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, mthc | T(TMP_REG1) | dst_r, MOVABLE_INS));
+#if !defined(SLJIT_MIPS_REV) || SLJIT_MIPS_REV <= 1
+ if (mthc == MTC1)
+ return push_inst(compiler, NOP, UNMOVABLE_INS);
+#endif /* MIPS III */
+ return SLJIT_SUCCESS;
+}
+
static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst_ar, sljit_sw imm)
{
if (!(imm & ~0xffff))
@@ -44,6 +87,108 @@ static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_
return push_inst(compiler, ORI | S(dst) | T(dst) | IMM(init_value), DR(dst));
}
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f64 value)
+{
+ union {
+ struct {
+#if defined(SLJIT_LITTLE_ENDIAN) && SLJIT_LITTLE_ENDIAN
+ sljit_s32 lo;
+ sljit_s32 hi;
+#else /* !SLJIT_LITTLE_ENDIAN */
+ sljit_s32 hi;
+ sljit_s32 lo;
+#endif /* SLJIT_LITTLE_ENDIAN */
+ } bin;
+ sljit_f64 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset64(compiler, freg, value));
+
+ u.value = value;
+
+ if (u.bin.lo != 0)
+ FAIL_IF(load_immediate(compiler, DR(TMP_REG1), u.bin.lo));
+ if (u.bin.hi != 0)
+ FAIL_IF(load_immediate(compiler, DR(TMP_REG2), u.bin.hi));
+
+ FAIL_IF(push_inst(compiler, MTC1 | (u.bin.lo != 0 ? T(TMP_REG1) : TA(0)) | FS(freg), MOVABLE_INS));
+ switch (cpu_feature_list & CPU_FEATURE_FR) {
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
+ case CPU_FEATURE_FR:
+ return push_inst(compiler, MTHC1 | (u.bin.hi != 0 ? T(TMP_REG2) : TA(0)) | FS(freg), MOVABLE_INS);
+#endif /* SLJIT_MIPS_REV >= 2 */
+ default:
+ FAIL_IF(push_inst(compiler, MTC1 | (u.bin.hi != 0 ? T(TMP_REG2) : TA(0)) | FS(freg) | (1 << 11), MOVABLE_INS));
+ break;
+ }
+#if !defined(SLJIT_MIPS_REV) || SLJIT_MIPS_REV <= 1
+ FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
+#endif /* MIPS III */
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 freg, sljit_s32 reg)
+{
+ sljit_s32 reg2 = 0;
+ sljit_ins inst = FS(freg);
+ sljit_ins mthc = MTC1, mfhc = MFC1;
+ int is_32 = (op & SLJIT_32);
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
+
+ op = GET_OPCODE(op);
+ if (reg & REG_PAIR_MASK) {
+ reg2 = REG_PAIR_SECOND(reg);
+ reg = REG_PAIR_FIRST(reg);
+
+ inst |= T(reg2);
+
+ if (op == SLJIT_COPY_TO_F64)
+ FAIL_IF(push_inst(compiler, MTC1 | inst, MOVABLE_INS));
+ else
+ FAIL_IF(push_inst(compiler, MFC1 | inst, DR(reg2)));
+
+ inst = FS(freg) | (1 << 11);
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
+ if (cpu_feature_list & CPU_FEATURE_FR) {
+ mthc = MTHC1;
+ mfhc = MFHC1;
+ inst = FS(freg);
+ }
+#endif /* SLJIT_MIPS_REV >= 2 */
+ }
+
+ inst |= T(reg);
+ if (!is_32 && !reg2) {
+ switch (cpu_feature_list & CPU_FEATURE_FR) {
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
+ case CPU_FEATURE_FR:
+ mthc = MTHC1;
+ mfhc = MFHC1;
+ break;
+#endif /* SLJIT_MIPS_REV >= 2 */
+ default:
+ inst |= (1 << 11);
+ break;
+ }
+ }
+
+ if (op == SLJIT_COPY_TO_F64)
+ FAIL_IF(push_inst(compiler, mthc | inst, MOVABLE_INS));
+ else
+ FAIL_IF(push_inst(compiler, mfhc | inst, DR(reg)));
+
+#if !defined(SLJIT_MIPS_REV) || SLJIT_MIPS_REV <= 1
+ if (mthc == MTC1 || mfhc == MFC1)
+ return push_inst(compiler, NOP, UNMOVABLE_INS);
+#endif /* MIPS III */
+ return SLJIT_SUCCESS;
+}
+
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
{
sljit_ins *inst = (sljit_ins *)addr;
@@ -74,8 +219,13 @@ static sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 arg_t
sljit_ins ins = NOP;
sljit_u8 offsets[4];
sljit_u8 *offsets_ptr = offsets;
+#if defined(SLJIT_LITTLE_ENDIAN) && SLJIT_LITTLE_ENDIAN
+ sljit_ins f64_hi = TA(7), f64_lo = TA(6);
+#else
+ sljit_ins f64_hi = TA(6), f64_lo = TA(7);
+#endif /* SLJIT_LITTLE_ENDIAN */
- SLJIT_ASSERT(reg_map[TMP_REG1] == 4 && freg_map[TMP_FREG1] == 12);
+ SLJIT_ASSERT(reg_map[TMP_REG2] == 4 && freg_map[TMP_FREG1] == 12);
arg_types >>= SLJIT_ARG_SHIFT;
@@ -138,20 +288,28 @@ static sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 arg_t
switch (types & SLJIT_ARG_MASK) {
case SLJIT_ARG_TYPE_F64:
- if (*offsets_ptr < 4 * sizeof (sljit_sw)) {
+ if (*offsets_ptr < 4 * sizeof(sljit_sw)) {
if (prev_ins != NOP)
FAIL_IF(push_inst(compiler, prev_ins, MOVABLE_INS));
/* Must be preceded by at least one other argument,
* and its starting offset must be 8 because of alignment. */
SLJIT_ASSERT((*offsets_ptr >> 2) == 2);
-
- prev_ins = MFC1 | TA(6) | FS(float_arg_count) | (1 << 11);
- ins = MFC1 | TA(7) | FS(float_arg_count);
+ switch (cpu_feature_list & CPU_FEATURE_FR) {
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
+ case CPU_FEATURE_FR:
+ prev_ins = MFHC1 | f64_hi | FS(float_arg_count);
+ break;
+#endif /* SLJIT_MIPS_REV >= 2 */
+ default:
+ prev_ins = MFC1 | f64_hi | FS(float_arg_count) | (1 << 11);
+ break;
+ }
+ ins = MFC1 | f64_lo | FS(float_arg_count);
} else if (*offsets_ptr < 254)
ins = SDC1 | S(SLJIT_SP) | FT(float_arg_count) | IMM(*offsets_ptr);
else if (*offsets_ptr == 254)
- ins = MOV_S | FMT_D | FS(SLJIT_FR0) | FD(TMP_FREG1);
+ ins = MOV_fmt(FMT_D) | FS(SLJIT_FR0) | FD(TMP_FREG1);
float_arg_count--;
break;
@@ -161,7 +319,7 @@ static sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 arg_t
else if (*offsets_ptr < 254)
ins = SWC1 | S(SLJIT_SP) | FT(float_arg_count) | IMM(*offsets_ptr);
else if (*offsets_ptr == 254)
- ins = MOV_S | FMT_S | FS(SLJIT_FR0) | FD(TMP_FREG1);
+ ins = MOV_fmt(FMT_S) | FS(SLJIT_FR0) | FD(TMP_FREG1);
float_arg_count--;
break;
@@ -212,7 +370,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compile
} else if (type & SLJIT_CALL_RETURN)
PTR_FAIL_IF(emit_stack_frame_release(compiler, 0, &ins));
- SLJIT_ASSERT(DR(PIC_ADDR_REG) == 25 && PIC_ADDR_REG == TMP_REG2);
+ SLJIT_ASSERT(DR(PIC_ADDR_REG) == 25);
if (ins == NOP && compiler->delay_slot != UNMOVABLE_INS)
jump->flags |= IS_MOVABLE;
@@ -283,9 +441,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compi
return sljit_emit_ijump(compiler, type, src, srcw);
}
- SLJIT_ASSERT(DR(PIC_ADDR_REG) == 25 && PIC_ADDR_REG == TMP_REG2);
+ SLJIT_ASSERT(DR(PIC_ADDR_REG) == 25);
- if (src & SLJIT_IMM)
+ if (src == SLJIT_IMM)
FAIL_IF(load_immediate(compiler, DR(PIC_ADDR_REG), srcw));
else if (src != PIC_ADDR_REG)
FAIL_IF(push_inst(compiler, ADDU | S(src) | TA(0) | D(PIC_ADDR_REG), DR(PIC_ADDR_REG)));
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeMIPS_64.c b/src/3rdparty/pcre2/src/sljit/sljitNativeMIPS_64.c
index d2a5924f8e..b9f03a7bd2 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeMIPS_64.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativeMIPS_64.c
@@ -26,6 +26,23 @@
/* mips 64-bit arch dependent functions. */
+static sljit_s32 emit_copysign(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src1, sljit_s32 src2, sljit_s32 dst)
+{
+ FAIL_IF(push_inst(compiler, SELECT_OP(DMFC1, MFC1) | T(TMP_REG1) | FS(src1), DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, SELECT_OP(DMFC1, MFC1) | T(TMP_REG2) | FS(src2), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, XOR | S(TMP_REG2) | T(TMP_REG1) | D(TMP_REG2), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, SELECT_OP(DSRL32, SRL) | T(TMP_REG2) | D(TMP_REG2) | SH_IMM(31), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, SELECT_OP(DSLL32, SLL) | T(TMP_REG2) | D(TMP_REG2) | SH_IMM(31), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, XOR | S(TMP_REG1) | T(TMP_REG2) | D(TMP_REG1), DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, SELECT_OP(DMTC1, MTC1) | T(TMP_REG1) | FS(dst), MOVABLE_INS));
+#if !defined(SLJIT_MIPS_REV) || SLJIT_MIPS_REV <= 1
+ if (!(op & SLJIT_32))
+ return push_inst(compiler, NOP, UNMOVABLE_INS);
+#endif /* MIPS III */
+ return SLJIT_SUCCESS;
+}
+
static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst_ar, sljit_sw imm)
{
sljit_s32 shift = 32;
@@ -128,6 +145,57 @@ static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_
return push_inst(compiler, ORI | S(dst) | T(dst) | IMM(init_value), DR(dst));
}
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f64 value)
+{
+ union {
+ sljit_sw imm;
+ sljit_f64 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset64(compiler, freg, value));
+
+ u.value = value;
+
+ if (u.imm == 0) {
+ FAIL_IF(push_inst(compiler, DMTC1 | TA(0) | FS(freg), MOVABLE_INS));
+#if !defined(SLJIT_MIPS_REV) || SLJIT_MIPS_REV <= 1
+ FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
+#endif /* MIPS III */
+ return SLJIT_SUCCESS;
+ }
+
+ FAIL_IF(load_immediate(compiler, DR(TMP_REG1), u.imm));
+ FAIL_IF(push_inst(compiler, DMTC1 | T(TMP_REG1) | FS(freg), MOVABLE_INS));
+#if !defined(SLJIT_MIPS_REV) || SLJIT_MIPS_REV <= 1
+ FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
+#endif /* MIPS III */
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 freg, sljit_s32 reg)
+{
+ sljit_ins inst;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
+
+ inst = T(reg) | FS(freg);
+
+ if (GET_OPCODE(op) == SLJIT_COPY_TO_F64)
+ FAIL_IF(push_inst(compiler, SELECT_OP(DMTC1, MTC1) | inst, MOVABLE_INS));
+ else
+ FAIL_IF(push_inst(compiler, SELECT_OP(DMFC1, MFC1) | inst, DR(reg)));
+
+#if !defined(SLJIT_MIPS_REV) || SLJIT_MIPS_REV <= 1
+ if (!(op & SLJIT_32))
+ return push_inst(compiler, NOP, UNMOVABLE_INS);
+#endif /* MIPS III */
+ return SLJIT_SUCCESS;
+}
+
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
{
sljit_ins *inst = (sljit_ins *)addr;
@@ -157,7 +225,7 @@ static sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 arg_t
sljit_ins prev_ins = *ins_ptr;
sljit_ins ins = NOP;
- SLJIT_ASSERT(reg_map[TMP_REG1] == 4 && freg_map[TMP_FREG1] == 12);
+ SLJIT_ASSERT(reg_map[TMP_REG2] == 4 && freg_map[TMP_FREG1] == 12);
arg_types >>= SLJIT_ARG_SHIFT;
@@ -183,17 +251,17 @@ static sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 arg_t
switch (types & SLJIT_ARG_MASK) {
case SLJIT_ARG_TYPE_F64:
if (arg_count != float_arg_count)
- ins = MOV_S | FMT_D | FS(float_arg_count) | FD(arg_count);
+ ins = MOV_fmt(FMT_D) | FS(float_arg_count) | FD(arg_count);
else if (arg_count == 1)
- ins = MOV_S | FMT_D | FS(SLJIT_FR0) | FD(TMP_FREG1);
+ ins = MOV_fmt(FMT_D) | FS(SLJIT_FR0) | FD(TMP_FREG1);
arg_count--;
float_arg_count--;
break;
case SLJIT_ARG_TYPE_F32:
if (arg_count != float_arg_count)
- ins = MOV_S | FMT_S | FS(float_arg_count) | FD(arg_count);
+ ins = MOV_fmt(FMT_S) | FS(float_arg_count) | FD(arg_count);
else if (arg_count == 1)
- ins = MOV_S | FMT_S | FS(SLJIT_FR0) | FD(TMP_FREG1);
+ ins = MOV_fmt(FMT_S) | FS(SLJIT_FR0) | FD(TMP_FREG1);
arg_count--;
float_arg_count--;
break;
@@ -241,7 +309,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compile
if ((type & 0xff) != SLJIT_CALL_REG_ARG)
PTR_FAIL_IF(call_with_args(compiler, arg_types, &ins));
- SLJIT_ASSERT(DR(PIC_ADDR_REG) == 25 && PIC_ADDR_REG == TMP_REG2);
+ SLJIT_ASSERT(DR(PIC_ADDR_REG) == 25);
if (ins == NOP && compiler->delay_slot != UNMOVABLE_INS)
jump->flags |= IS_MOVABLE;
@@ -298,9 +366,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compi
return sljit_emit_ijump(compiler, type, src, srcw);
}
- SLJIT_ASSERT(DR(PIC_ADDR_REG) == 25 && PIC_ADDR_REG == TMP_REG2);
+ SLJIT_ASSERT(DR(PIC_ADDR_REG) == 25 && PIC_ADDR_REG == TMP_REG1);
- if (src & SLJIT_IMM)
+ if (src == SLJIT_IMM)
FAIL_IF(load_immediate(compiler, DR(PIC_ADDR_REG), srcw));
else if (src != PIC_ADDR_REG)
FAIL_IF(push_inst(compiler, DADDU | S(src) | TA(0) | D(PIC_ADDR_REG), DR(PIC_ADDR_REG)));
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeMIPS_common.c b/src/3rdparty/pcre2/src/sljit/sljitNativeMIPS_common.c
index 9afe901c38..88eb30b7f1 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeMIPS_common.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativeMIPS_common.c
@@ -26,9 +26,12 @@
/* Latest MIPS architecture. */
-#ifndef __mips_hard_float
+#ifdef HAVE_PRCTL
+#include <sys/prctl.h>
+#endif
+
+#if !defined(__mips_hard_float) || defined(__mips_single_float)
/* Disable automatic detection, covers both -msoft-float and -mno-float */
-#undef SLJIT_IS_FPU_AVAILABLE
#define SLJIT_IS_FPU_AVAILABLE 0
#endif
@@ -42,6 +45,14 @@ SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
return "MIPS64-R6" SLJIT_CPUINFO;
#endif /* SLJIT_CONFIG_MIPS_32 */
+#elif (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 5)
+
+#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
+ return "MIPS32-R5" SLJIT_CPUINFO;
+#else /* !SLJIT_CONFIG_MIPS_32 */
+ return "MIPS64-R5" SLJIT_CPUINFO;
+#endif /* SLJIT_CONFIG_MIPS_32 */
+
#elif (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 2)
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
@@ -72,7 +83,7 @@ typedef sljit_u32 sljit_ins;
#define TMP_REG3 (SLJIT_NUMBER_OF_REGISTERS + 4)
/* For position independent code, t9 must contain the function address. */
-#define PIC_ADDR_REG TMP_REG2
+#define PIC_ADDR_REG TMP_REG1
/* Floating point status register. */
#define FCSR_REG 31
@@ -83,27 +94,31 @@ typedef sljit_u32 sljit_ins;
#define EQUAL_FLAG 3
#define OTHER_FLAG 1
+static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 7] = {
+ 0, 2, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 24, 23, 22, 21, 20, 19, 18, 17, 16, 29, 25, 4, 31, 3, 1
+};
+
#define TMP_FREG1 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
#define TMP_FREG2 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2)
#define TMP_FREG3 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3)
-static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
- 0, 2, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 24, 23, 22, 21, 20, 19, 18, 17, 16, 29, 4, 25, 31
-};
-
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
-static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 4] = {
- 0, 0, 14, 2, 4, 6, 8, 18, 30, 28, 26, 24, 22, 20, 12, 10, 16
+static const sljit_u8 freg_map[((SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3) << 1) + 1] = {
+ 0,
+ 0, 14, 2, 4, 6, 8, 18, 30, 28, 26, 24, 22, 20,
+ 12, 10, 16,
+ 1, 15, 3, 5, 7, 9, 19, 31, 29, 27, 25, 23, 21,
+ 13, 11, 17
};
-#else
+#else /* !SLJIT_CONFIG_MIPS_32 */
static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 4] = {
0, 0, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 1, 2, 3, 4, 5, 6, 7, 8, 9, 31, 30, 29, 28, 27, 26, 25, 24, 12, 11, 10
};
-#endif
+#endif /* SLJIT_CONFIG_MIPS_32 */
/* --------------------------------------------------------------------- */
/* Instrucion forms */
@@ -200,10 +215,18 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 4] = {
#define DMULTU (HI(0) | LO(29))
#endif /* SLJIT_MIPS_REV >= 6 */
#define DIV_S (HI(17) | FMT_S | LO(3))
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
#define DINSU (HI(31) | LO(6))
+#endif /* SLJIT_MIPS_REV >= 2 */
+#define DMFC1 (HI(17) | (1 << 21))
+#define DMTC1 (HI(17) | (5 << 21))
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
#define DROTR (HI(0) | (1 << 21) | LO(58))
#define DROTR32 (HI(0) | (1 << 21) | LO(62))
#define DROTRV (HI(0) | (1 << 6) | LO(22))
+#define DSBH (HI(31) | (2 << 6) | LO(36))
+#define DSHD (HI(31) | (5 << 6) | LO(36))
+#endif /* SLJIT_MIPS_REV >= 2 */
#define DSLL (HI(0) | LO(56))
#define DSLL32 (HI(0) | LO(60))
#define DSLLV (HI(0) | LO(20))
@@ -232,6 +255,9 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 4] = {
#define LWR (HI(38))
#define LWC1 (HI(49))
#define MFC1 (HI(17))
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
+#define MFHC1 (HI(17) | (3 << 21))
+#endif /* SLJIT_MIPS_REV >= 2 */
#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
#define MOD (HI(0) | (3 << 6) | LO(26))
#define MODU (HI(0) | (3 << 6) | LO(27))
@@ -239,8 +265,10 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 4] = {
#define MFHI (HI(0) | LO(16))
#define MFLO (HI(0) | LO(18))
#endif /* SLJIT_MIPS_REV >= 6 */
-#define MOV_S (HI(17) | FMT_S | LO(6))
#define MTC1 (HI(17) | (4 << 21))
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
+#define MTHC1 (HI(17) | (7 << 21))
+#endif /* SLJIT_MIPS_REV >= 2 */
#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
#define MUH (HI(0) | (3 << 6) | LO(24))
#define MUHU (HI(0) | (3 << 6) | LO(25))
@@ -256,8 +284,10 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 4] = {
#define NOR (HI(0) | LO(39))
#define OR (HI(0) | LO(37))
#define ORI (HI(13))
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
#define ROTR (HI(0) | (1 << 21) | LO(2))
#define ROTRV (HI(0) | (1 << 6) | LO(6))
+#endif /* SLJIT_MIPS_REV >= 2 */
#define SD (HI(63))
#define SDL (HI(44))
#define SDR (HI(45))
@@ -279,6 +309,9 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 4] = {
#define SWR (HI(46))
#define SWC1 (HI(57))
#define TRUNC_W_S (HI(17) | FMT_S | LO(13))
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
+#define WSBH (HI(31) | (2 << 6) | LO(32))
+#endif /* SLJIT_MIPS_REV >= 2 */
#define XOR (HI(0) | LO(38))
#define XORI (HI(14))
@@ -289,15 +322,21 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 4] = {
#else /* SLJIT_MIPS_REV < 6 */
#define DCLZ (HI(28) | LO(36))
#define MOVF (HI(0) | (0 << 16) | LO(1))
+#define MOVF_S (HI(17) | FMT_S | (0 << 16) | LO(17))
#define MOVN (HI(0) | LO(11))
+#define MOVN_S (HI(17) | FMT_S | LO(19))
#define MOVT (HI(0) | (1 << 16) | LO(1))
+#define MOVT_S (HI(17) | FMT_S | (1 << 16) | LO(17))
#define MOVZ (HI(0) | LO(10))
+#define MOVZ_S (HI(17) | FMT_S | LO(18))
#define MUL (HI(28) | LO(2))
#endif /* SLJIT_MIPS_REV >= 6 */
#define PREF (HI(51))
#define PREFX (HI(19) | LO(15))
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
#define SEB (HI(31) | (16 << 6) | LO(32))
#define SEH (HI(31) | (24 << 6) | LO(32))
+#endif /* SLJIT_MIPS_REV >= 2 */
#endif /* SLJIT_MIPS_REV >= 1 */
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
@@ -318,10 +357,107 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 4] = {
#define LOAD_W LD
#endif
+#define MOV_fmt(f) (HI(17) | f | LO(6))
+
#define SIMM_MAX (0x7fff)
#define SIMM_MIN (-0x8000)
#define UIMM_MAX (0xffff)
+#define CPU_FEATURE_DETECTED (1 << 0)
+#define CPU_FEATURE_FPU (1 << 1)
+#define CPU_FEATURE_FP64 (1 << 2)
+#define CPU_FEATURE_FR (1 << 3)
+
+static sljit_u32 cpu_feature_list = 0;
+
+#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) \
+ && (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
+
+static sljit_s32 function_check_is_freg(struct sljit_compiler *compiler, sljit_s32 fr, sljit_s32 is_32)
+{
+ if (compiler->scratches == -1)
+ return 0;
+
+ if (is_32 && fr >= SLJIT_F64_SECOND(SLJIT_FR0))
+ fr -= SLJIT_F64_SECOND(0);
+
+ return (fr >= SLJIT_FR0 && fr < (SLJIT_FR0 + compiler->fscratches))
+ || (fr > (SLJIT_FS0 - compiler->fsaveds) && fr <= SLJIT_FS0)
+ || (fr >= SLJIT_TMP_FREGISTER_BASE && fr < (SLJIT_TMP_FREGISTER_BASE + SLJIT_NUMBER_OF_TEMPORARY_FLOAT_REGISTERS));
+}
+
+#endif /* SLJIT_CONFIG_MIPS_32 && SLJIT_ARGUMENT_CHECKS */
+
+static void get_cpu_features(void)
+{
+#if !defined(SLJIT_IS_FPU_AVAILABLE) && defined(__GNUC__)
+ sljit_u32 fir = 0;
+#endif /* !SLJIT_IS_FPU_AVAILABLE && __GNUC__ */
+ sljit_u32 feature_list = CPU_FEATURE_DETECTED;
+
+#if defined(SLJIT_IS_FPU_AVAILABLE)
+#if SLJIT_IS_FPU_AVAILABLE
+ feature_list |= CPU_FEATURE_FPU;
+#if SLJIT_IS_FPU_AVAILABLE == 64
+ feature_list |= CPU_FEATURE_FP64;
+#endif /* SLJIT_IS_FPU_AVAILABLE == 64 */
+#endif /* SLJIT_IS_FPU_AVAILABLE */
+#elif defined(__GNUC__)
+ __asm__ ("cfc1 %0, $0" : "=r"(fir));
+ if ((fir & (0x3 << 16)) == (0x3 << 16))
+ feature_list |= CPU_FEATURE_FPU;
+
+#if (defined(SLJIT_CONFIG_MIPS_64) && SLJIT_CONFIG_MIPS_64) \
+ && (!defined(SLJIT_MIPS_REV) || SLJIT_MIPS_REV < 2)
+ if ((feature_list & CPU_FEATURE_FPU))
+ feature_list |= CPU_FEATURE_FP64;
+#else /* SLJIT_CONFIG_MIPS32 || SLJIT_MIPS_REV >= 2 */
+ if ((fir & (1 << 22)))
+ feature_list |= CPU_FEATURE_FP64;
+#endif /* SLJIT_CONFIG_MIPS_64 && SLJIT_MIPS_REV < 2 */
+#endif /* SLJIT_IS_FPU_AVAILABLE */
+
+ if ((feature_list & CPU_FEATURE_FPU) && (feature_list & CPU_FEATURE_FP64)) {
+#if defined(SLJIT_CONFIG_MIPS_32) && SLJIT_CONFIG_MIPS_32
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 6
+ feature_list |= CPU_FEATURE_FR;
+#elif defined(SLJIT_DETECT_FR) && SLJIT_DETECT_FR == 0
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 5
+ feature_list |= CPU_FEATURE_FR;
+#endif /* SLJIT_MIPS_REV >= 5 */
+#else
+ sljit_s32 flag = -1;
+#ifndef FR_GET_FP_MODE
+ sljit_f64 zero = 0.0;
+#else /* PR_GET_FP_MODE */
+ flag = prctl(PR_GET_FP_MODE);
+
+ if (flag > 0)
+ feature_list |= CPU_FEATURE_FR;
+#endif /* FP_GET_PR_MODE */
+#if ((defined(SLJIT_DETECT_FR) && SLJIT_DETECT_FR == 2) \
+ || (!defined(PR_GET_FP_MODE) && (!defined(SLJIT_DETECT_FR) || SLJIT_DETECT_FR >= 1))) \
+ && (defined(__GNUC__) && (defined(__mips) && __mips >= 2))
+ if (flag < 0) {
+ __asm__ (".set oddspreg\n"
+ "lwc1 $f17, %0\n"
+ "ldc1 $f16, %1\n"
+ "swc1 $f17, %0\n"
+ : "+m" (flag) : "m" (zero) : "$f16", "$f17");
+ if (flag)
+ feature_list |= CPU_FEATURE_FR;
+ }
+#endif /* (!PR_GET_FP_MODE || (PR_GET_FP_MODE && SLJIT_DETECT_FR == 2)) && __GNUC__ */
+#endif /* SLJIT_MIPS_REV >= 6 */
+#else /* !SLJIT_CONFIG_MIPS_32 */
+ /* StatusFR=1 is the only mode supported by the code in MIPS64 */
+ feature_list |= CPU_FEATURE_FR;
+#endif /* SLJIT_CONFIG_MIPS_32 */
+ }
+
+ cpu_feature_list = feature_list;
+}
+
/* dest_reg is the absolute name of the register
Useful for reordering instructions in the delay slot. */
static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins, sljit_s32 delay_slot)
@@ -368,7 +504,7 @@ static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_i
if (jump->flags & JUMP_ADDR)
target_addr = jump->u.target;
else {
- SLJIT_ASSERT(jump->flags & JUMP_LABEL);
+ SLJIT_ASSERT(jump->u.label != NULL);
target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset;
}
@@ -499,75 +635,66 @@ static __attribute__ ((noinline)) void sljit_cache_flush(void* code, void* code_
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
-static SLJIT_INLINE sljit_sw put_label_get_length(struct sljit_put_label *put_label, sljit_uw max_label)
+static SLJIT_INLINE sljit_sw mov_addr_get_length(struct sljit_jump *jump, sljit_ins *code, sljit_sw executable_offset)
{
- if (max_label < 0x80000000l) {
- put_label->flags = PATCH_ABS32;
+ sljit_uw addr;
+ SLJIT_UNUSED_ARG(executable_offset);
+
+ if (jump->flags & JUMP_ADDR)
+ addr = jump->u.target;
+ else
+ addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code + jump->u.label->size, executable_offset);
+
+ if (addr < 0x80000000l) {
+ jump->flags |= PATCH_ABS32;
return 1;
}
- if (max_label < 0x800000000000l) {
- put_label->flags = PATCH_ABS48;
+ if (addr < 0x800000000000l) {
+ jump->flags |= PATCH_ABS48;
return 3;
}
- put_label->flags = 0;
return 5;
}
#endif /* SLJIT_CONFIG_MIPS_64 */
-static SLJIT_INLINE void load_addr_to_reg(void *dst, sljit_u32 reg)
+static SLJIT_INLINE void load_addr_to_reg(struct sljit_jump *jump)
{
- struct sljit_jump *jump;
- struct sljit_put_label *put_label;
- sljit_uw flags;
- sljit_ins *inst;
- sljit_uw addr;
-
- if (reg != 0) {
- jump = (struct sljit_jump*)dst;
- flags = jump->flags;
- inst = (sljit_ins*)jump->addr;
- addr = (flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
- } else {
- put_label = (struct sljit_put_label*)dst;
-#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
- flags = put_label->flags;
-#endif
- inst = (sljit_ins*)put_label->addr;
- addr = put_label->label->addr;
- reg = *inst;
- }
+ sljit_uw flags = jump->flags;
+ sljit_ins *ins = (sljit_ins*)jump->addr;
+ sljit_uw addr = (flags & JUMP_ADDR) ? jump->u.target : jump->u.label->u.addr;
+ sljit_u32 reg = (flags & JUMP_MOV_ADDR) ? *ins : PIC_ADDR_REG;
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
- inst[0] = LUI | T(reg) | IMM(addr >> 16);
+ ins[0] = LUI | T(reg) | IMM(addr >> 16);
#else /* !SLJIT_CONFIG_MIPS_32 */
if (flags & PATCH_ABS32) {
SLJIT_ASSERT(addr < 0x80000000l);
- inst[0] = LUI | T(reg) | IMM(addr >> 16);
+ ins[0] = LUI | T(reg) | IMM(addr >> 16);
}
else if (flags & PATCH_ABS48) {
SLJIT_ASSERT(addr < 0x800000000000l);
- inst[0] = LUI | T(reg) | IMM(addr >> 32);
- inst[1] = ORI | S(reg) | T(reg) | IMM((addr >> 16) & 0xffff);
- inst[2] = DSLL | T(reg) | D(reg) | SH_IMM(16);
- inst += 2;
+ ins[0] = LUI | T(reg) | IMM(addr >> 32);
+ ins[1] = ORI | S(reg) | T(reg) | IMM((addr >> 16) & 0xffff);
+ ins[2] = DSLL | T(reg) | D(reg) | SH_IMM(16);
+ ins += 2;
}
else {
- inst[0] = LUI | T(reg) | IMM(addr >> 48);
- inst[1] = ORI | S(reg) | T(reg) | IMM((addr >> 32) & 0xffff);
- inst[2] = DSLL | T(reg) | D(reg) | SH_IMM(16);
- inst[3] = ORI | S(reg) | T(reg) | IMM((addr >> 16) & 0xffff);
- inst[4] = DSLL | T(reg) | D(reg) | SH_IMM(16);
- inst += 4;
+ ins[0] = LUI | T(reg) | IMM(addr >> 48);
+ ins[1] = ORI | S(reg) | T(reg) | IMM((addr >> 32) & 0xffff);
+ ins[2] = DSLL | T(reg) | D(reg) | SH_IMM(16);
+ ins[3] = ORI | S(reg) | T(reg) | IMM((addr >> 16) & 0xffff);
+ ins[4] = DSLL | T(reg) | D(reg) | SH_IMM(16);
+ ins += 4;
}
#endif /* SLJIT_CONFIG_MIPS_32 */
- inst[1] = ORI | S(reg) | T(reg) | IMM(addr & 0xffff);
+ ins[1] = ORI | S(reg) | T(reg) | IMM(addr & 0xffff);
}
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
+SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler, sljit_s32 options, void *exec_allocator_data)
{
struct sljit_memory_fragment *buf;
sljit_ins *code;
@@ -575,77 +702,76 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
sljit_ins *buf_ptr;
sljit_ins *buf_end;
sljit_uw word_count;
- sljit_uw next_addr;
+ SLJIT_NEXT_DEFINE_TYPES;
sljit_sw executable_offset;
sljit_uw addr;
-
struct sljit_label *label;
struct sljit_jump *jump;
struct sljit_const *const_;
- struct sljit_put_label *put_label;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_generate_code(compiler));
reverse_buf(compiler);
- code = (sljit_ins*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_ins), compiler->exec_allocator_data);
+ code = (sljit_ins*)allocate_executable_memory(compiler->size * sizeof(sljit_ins), options, exec_allocator_data, &executable_offset);
PTR_FAIL_WITH_EXEC_IF(code);
buf = compiler->buf;
code_ptr = code;
word_count = 0;
- next_addr = 0;
- executable_offset = SLJIT_EXEC_OFFSET(code);
-
label = compiler->labels;
jump = compiler->jumps;
const_ = compiler->consts;
- put_label = compiler->put_labels;
+ SLJIT_NEXT_INIT_TYPES();
+ SLJIT_GET_NEXT_MIN();
do {
buf_ptr = (sljit_ins*)buf->memory;
buf_end = buf_ptr + (buf->used_size >> 2);
do {
*code_ptr = *buf_ptr++;
- if (next_addr == word_count) {
+ if (next_min_addr == word_count) {
SLJIT_ASSERT(!label || label->size >= word_count);
SLJIT_ASSERT(!jump || jump->addr >= word_count);
SLJIT_ASSERT(!const_ || const_->addr >= word_count);
- SLJIT_ASSERT(!put_label || put_label->addr >= word_count);
/* These structures are ordered by their address. */
- if (label && label->size == word_count) {
- label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ if (next_min_addr == next_label_size) {
+ label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = (sljit_uw)(code_ptr - code);
label = label->next;
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
}
- if (jump && jump->addr == word_count) {
+
+ if (next_min_addr == next_jump_addr) {
+ if (!(jump->flags & JUMP_MOV_ADDR)) {
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
- word_count += 2;
-#else
- word_count += 6;
-#endif
- jump->addr = (sljit_uw)(code_ptr - 1);
- code_ptr = detect_jump_type(jump, code, executable_offset);
+ word_count += 2;
+#else /* !SLJIT_CONFIG_MIPS_32 */
+ word_count += 6;
+#endif /* SLJIT_CONFIG_MIPS_32 */
+ jump->addr = (sljit_uw)(code_ptr - 1);
+ code_ptr = detect_jump_type(jump, code, executable_offset);
+ } else {
+ jump->addr = (sljit_uw)code_ptr;
+#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
+ code_ptr += 1;
+ word_count += 1;
+#else /* !SLJIT_CONFIG_MIPS_32 */
+ code_ptr += mov_addr_get_length(jump, code, executable_offset);
+ word_count += 5;
+#endif /* SLJIT_CONFIG_MIPS_32 */
+ }
+
jump = jump->next;
- }
- if (const_ && const_->addr == word_count) {
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+ } else if (next_min_addr == next_const_addr) {
const_->addr = (sljit_uw)code_ptr;
const_ = const_->next;
+ next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
}
- if (put_label && put_label->addr == word_count) {
- SLJIT_ASSERT(put_label->label);
- put_label->addr = (sljit_uw)code_ptr;
-#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
- code_ptr += 1;
- word_count += 1;
-#else
- code_ptr += put_label_get_length(put_label, (sljit_uw)(SLJIT_ADD_EXEC_OFFSET(code, executable_offset) + put_label->label->size));
- word_count += 5;
-#endif
- put_label = put_label->next;
- }
- next_addr = compute_next_addr(label, jump, const_, put_label);
+
+ SLJIT_GET_NEXT_MIN();
}
code_ptr++;
word_count++;
@@ -655,7 +781,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
} while (buf);
if (label && label->size == word_count) {
- label->addr = (sljit_uw)code_ptr;
+ label->u.addr = (sljit_uw)code_ptr;
label->size = (sljit_uw)(code_ptr - code);
label = label->next;
}
@@ -663,13 +789,12 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
SLJIT_ASSERT(!label);
SLJIT_ASSERT(!jump);
SLJIT_ASSERT(!const_);
- SLJIT_ASSERT(!put_label);
SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size);
jump = compiler->jumps;
while (jump) {
do {
- addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
+ addr = (jump->flags & JUMP_ADDR) ? jump->u.target : jump->u.label->u.addr;
buf_ptr = (sljit_ins *)jump->addr;
if (jump->flags & PATCH_B) {
@@ -685,15 +810,10 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
break;
}
- load_addr_to_reg(jump, PIC_ADDR_REG);
+ load_addr_to_reg(jump);
} while (0);
- jump = jump->next;
- }
- put_label = compiler->put_labels;
- while (put_label) {
- load_addr_to_reg(put_label, 0);
- put_label = put_label->next;
+ jump = jump->next;
}
compiler->error = SLJIT_ERR_COMPILED;
@@ -715,21 +835,23 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
{
-#if defined(__GNUC__) && !defined(SLJIT_IS_FPU_AVAILABLE)
- sljit_sw fir = 0;
-#endif /* __GNUC__ && !SLJIT_IS_FPU_AVAILABLE */
-
switch (feature_type) {
+#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) \
+ && (!defined(SLJIT_IS_FPU_AVAILABLE) || SLJIT_IS_FPU_AVAILABLE)
+ case SLJIT_HAS_F64_AS_F32_PAIR:
+ if (!cpu_feature_list)
+ get_cpu_features();
+
+ return (cpu_feature_list & CPU_FEATURE_FR) != 0;
+#endif /* SLJIT_CONFIG_MIPS_32 && SLJIT_IS_FPU_AVAILABLE */
case SLJIT_HAS_FPU:
-#ifdef SLJIT_IS_FPU_AVAILABLE
- return SLJIT_IS_FPU_AVAILABLE;
-#elif defined(__GNUC__)
- __asm__ ("cfc1 %0, $0" : "=r"(fir));
- return (fir >> 22) & 0x1;
-#else
-#error "FIR check is not implemented for this architecture"
-#endif
+ if (!cpu_feature_list)
+ get_cpu_features();
+
+ return (cpu_feature_list & CPU_FEATURE_FPU) != 0;
case SLJIT_HAS_ZERO_REGISTER:
+ case SLJIT_HAS_COPY_F32:
+ case SLJIT_HAS_COPY_F64:
return 1;
#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
case SLJIT_HAS_CLZ:
@@ -741,6 +863,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
return 2;
#endif /* SLJIT_MIPS_REV >= 1 */
#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 2)
+ case SLJIT_HAS_REV:
case SLJIT_HAS_ROT:
return 1;
#endif /* SLJIT_MIPS_REV >= 2 */
@@ -751,7 +874,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
{
- return (type >= SLJIT_ORDERED_EQUAL && type <= SLJIT_ORDERED_LESS_EQUAL);
+ SLJIT_UNUSED_ARG(type);
+ return 0;
}
/* --------------------------------------------------------------------- */
@@ -792,6 +916,12 @@ static sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, s
static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit_s32 frame_size, sljit_ins *ins_ptr);
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
+#define SELECT_OP(d, w) (w)
+#else
+#define SELECT_OP(d, w) (!(op & SLJIT_32) ? (d) : (w))
+#endif
+
+#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
#include "sljitNativeMIPS_32.c"
#else
#include "sljitNativeMIPS_64.c"
@@ -815,12 +945,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
if (fsaveds > 0 || fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG) {
if ((local_size & SSIZE_OF(sw)) != 0)
local_size += SSIZE_OF(sw);
- local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
+ local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
}
local_size = (local_size + SLJIT_LOCALS_OFFSET + 15) & ~0xf;
#else
- local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
+ local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
local_size = (local_size + SLJIT_LOCALS_OFFSET + 31) & ~0x1f;
#endif
compiler->local_size = local_size;
@@ -855,9 +985,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
offset = local_size - SSIZE_OF(sw);
} else {
FAIL_IF(load_immediate(compiler, OTHER_FLAG, local_size));
- FAIL_IF(push_inst(compiler, ADDU_W | S(SLJIT_SP) | TA(0) | D(TMP_REG2), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, ADDU_W | S(SLJIT_SP) | TA(0) | D(TMP_REG1), DR(TMP_REG1)));
FAIL_IF(push_inst(compiler, SUBU_W | S(SLJIT_SP) | TA(OTHER_FLAG) | D(SLJIT_SP), DR(SLJIT_SP)));
- base = S(TMP_REG2);
+ base = S(TMP_REG1);
offset = -SSIZE_OF(sw);
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
local_size = 0;
@@ -918,10 +1048,19 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
if (word_arg_count == 0 && float_arg_count <= 2) {
if (float_arg_count == 1)
- FAIL_IF(push_inst(compiler, MOV_S | FMT_D | FS(TMP_FREG1) | FD(SLJIT_FR0), MOVABLE_INS));
+ FAIL_IF(push_inst(compiler, MOV_fmt(FMT_D) | FS(TMP_FREG1) | FD(SLJIT_FR0), MOVABLE_INS));
} else if (arg_count < 4) {
FAIL_IF(push_inst(compiler, MTC1 | TA(4 + arg_count) | FS(float_arg_count), MOVABLE_INS));
- FAIL_IF(push_inst(compiler, MTC1 | TA(5 + arg_count) | FS(float_arg_count) | (1 << 11), MOVABLE_INS));
+ switch (cpu_feature_list & CPU_FEATURE_FR) {
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
+ case CPU_FEATURE_FR:
+ FAIL_IF(push_inst(compiler, MTHC1 | TA(5 + arg_count) | FS(float_arg_count), MOVABLE_INS));
+ break;
+#endif /* SLJIT_MIPS_REV >= 2 */
+ default:
+ FAIL_IF(push_inst(compiler, MTC1 | TA(5 + arg_count) | FS(float_arg_count) | (1 << 11), MOVABLE_INS));
+ break;
+ }
} else
FAIL_IF(push_inst(compiler, LDC1 | base | FT(float_arg_count) | IMM(local_size + (arg_count << 2)), MOVABLE_INS));
arg_count++;
@@ -931,7 +1070,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
if (word_arg_count == 0 && float_arg_count <= 2) {
if (float_arg_count == 1)
- FAIL_IF(push_inst(compiler, MOV_S | FMT_S | FS(TMP_FREG1) | FD(SLJIT_FR0), MOVABLE_INS));
+ FAIL_IF(push_inst(compiler, MOV_fmt(FMT_S) | FS(TMP_FREG1) | FD(SLJIT_FR0), MOVABLE_INS));
} else if (arg_count < 4)
FAIL_IF(push_inst(compiler, MTC1 | TA(4 + arg_count) | FS(float_arg_count), MOVABLE_INS));
else
@@ -966,16 +1105,16 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
case SLJIT_ARG_TYPE_F64:
float_arg_count++;
if (arg_count != float_arg_count)
- FAIL_IF(push_inst(compiler, MOV_S | FMT_D | FS(arg_count) | FD(float_arg_count), MOVABLE_INS));
+ FAIL_IF(push_inst(compiler, MOV_fmt(FMT_D) | FS(arg_count) | FD(float_arg_count), MOVABLE_INS));
else if (arg_count == 1)
- FAIL_IF(push_inst(compiler, MOV_S | FMT_D | FS(TMP_FREG1) | FD(SLJIT_FR0), MOVABLE_INS));
+ FAIL_IF(push_inst(compiler, MOV_fmt(FMT_D) | FS(TMP_FREG1) | FD(SLJIT_FR0), MOVABLE_INS));
break;
case SLJIT_ARG_TYPE_F32:
float_arg_count++;
if (arg_count != float_arg_count)
- FAIL_IF(push_inst(compiler, MOV_S | FMT_S | FS(arg_count) | FD(float_arg_count), MOVABLE_INS));
+ FAIL_IF(push_inst(compiler, MOV_fmt(FMT_S) | FS(arg_count) | FD(float_arg_count), MOVABLE_INS));
else if (arg_count == 1)
- FAIL_IF(push_inst(compiler, MOV_S | FMT_S | FS(TMP_FREG1) | FD(SLJIT_FR0), MOVABLE_INS));
+ FAIL_IF(push_inst(compiler, MOV_fmt(FMT_S) | FS(TMP_FREG1) | FD(SLJIT_FR0), MOVABLE_INS));
break;
default:
word_arg_count++;
@@ -1011,12 +1150,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *comp
if (fsaveds > 0 || fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG) {
if ((local_size & SSIZE_OF(sw)) != 0)
local_size += SSIZE_OF(sw);
- local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
+ local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
}
compiler->local_size = (local_size + SLJIT_LOCALS_OFFSET + 15) & ~0xf;
#else
- local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
+ local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
compiler->local_size = (local_size + SLJIT_LOCALS_OFFSET + 31) & ~0x1f;
#endif
return SLJIT_SUCCESS;
@@ -1042,10 +1181,10 @@ static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit
if (fsaveds > 0 || fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG) {
if ((tmp & SSIZE_OF(sw)) != 0)
tmp += SSIZE_OF(sw);
- tmp += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
+ tmp += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
}
#else
- tmp += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
+ tmp += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
#endif
if (local_size <= SIMM_MAX) {
@@ -1057,8 +1196,8 @@ static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit
if (tmp < frame_size)
tmp = frame_size;
- FAIL_IF(load_immediate(compiler, DR(TMP_REG1), local_size - tmp));
- FAIL_IF(push_inst(compiler, ADDU_W | S(SLJIT_SP) | T(TMP_REG1) | D(SLJIT_SP), DR(SLJIT_SP)));
+ FAIL_IF(load_immediate(compiler, DR(TMP_REG2), local_size - tmp));
+ FAIL_IF(push_inst(compiler, ADDU_W | S(SLJIT_SP) | T(TMP_REG2) | D(SLJIT_SP), DR(SLJIT_SP)));
local_size = tmp;
}
@@ -1138,7 +1277,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_to(struct sljit_compiler *c
FAIL_IF(emit_stack_frame_release(compiler, 1, &ins));
- if (!(src & SLJIT_IMM)) {
+ if (src != SLJIT_IMM) {
FAIL_IF(push_inst(compiler, JR | S(src), UNMOVABLE_INS));
return push_inst(compiler, ins, UNMOVABLE_INS);
}
@@ -1388,16 +1527,12 @@ static SLJIT_INLINE sljit_s32 emit_op_mem2(struct sljit_compiler *compiler, slji
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
-#define SELECT_OP(a, b) (b)
-
#define EMIT_SHIFT(dimm, dimm32, imm, dv, v) \
op_imm = (imm); \
op_v = (v);
#else /* !SLJIT_CONFIG_MIPS_32 */
-#define SELECT_OP(a, b) \
- (!(op & SLJIT_32) ? a : b)
#define EMIT_SHIFT(dimm, dimm32, imm, dv, v) \
op_dimm = (dimm); \
@@ -1414,10 +1549,10 @@ static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 op, slj
{
sljit_s32 is_clz = (GET_OPCODE(op) == SLJIT_CLZ);
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
- sljit_ins max = (op & SLJIT_32) ? 32 : 64;
-#else /* !SLJIT_CONFIG_RISCV_64 */
- sljit_ins max = 32;
-#endif /* SLJIT_CONFIG_RISCV_64 */
+ sljit_ins word_size = (op & SLJIT_32) ? 32 : 64;
+#else /* !SLJIT_CONFIG_MIPS_64 */
+ sljit_ins word_size = 32;
+#endif /* SLJIT_CONFIG_MIPS_64 */
/* The TMP_REG2 is the next value. */
if (src != TMP_REG2)
@@ -1425,7 +1560,7 @@ static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 op, slj
FAIL_IF(push_inst(compiler, BEQ | S(TMP_REG2) | TA(0) | IMM(is_clz ? 13 : 14), UNMOVABLE_INS));
/* The OTHER_FLAG is the counter. Delay slot. */
- FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | SA(0) | TA(OTHER_FLAG) | IMM(max), OTHER_FLAG));
+ FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | SA(0) | TA(OTHER_FLAG) | IMM(word_size), OTHER_FLAG));
if (!is_clz) {
FAIL_IF(push_inst(compiler, ANDI | S(TMP_REG2) | T(TMP_REG1) | IMM(1), DR(TMP_REG1)));
@@ -1437,7 +1572,7 @@ static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 op, slj
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | SA(0) | TA(OTHER_FLAG) | IMM(0), OTHER_FLAG));
/* The TMP_REG1 is the next shift. */
- FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | SA(0) | T(TMP_REG1) | IMM(max), DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | SA(0) | T(TMP_REG1) | IMM(word_size), DR(TMP_REG1)));
FAIL_IF(push_inst(compiler, SELECT_OP(DADDU, ADDU) | S(TMP_REG2) | TA(0) | DA(EQUAL_FLAG), EQUAL_FLAG));
FAIL_IF(push_inst(compiler, SELECT_OP(DSRL, SRL) | T(TMP_REG1) | D(TMP_REG1) | SH_IMM(1), DR(TMP_REG1)));
@@ -1459,10 +1594,108 @@ static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 op, slj
#endif /* SLJIT_MIPS_REV < 1 */
+static sljit_s32 emit_rev(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw src)
+{
+#if defined(SLJIT_CONFIG_MIPS_64) && SLJIT_CONFIG_MIPS_64
+ int is_32 = (op & SLJIT_32);
+#endif /* SLJIT_CONFIG_MIPS_64 */
+
+ op = GET_OPCODE(op);
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
+#if defined(SLJIT_CONFIG_MIPS_64) && SLJIT_CONFIG_MIPS_64
+ if (!is_32 && (op == SLJIT_REV)) {
+ FAIL_IF(push_inst(compiler, DSBH | T(src) | D(dst), DR(dst)));
+ return push_inst(compiler, DSHD | T(dst) | D(dst), DR(dst));
+ }
+ if (op != SLJIT_REV && src != TMP_REG2) {
+ FAIL_IF(push_inst(compiler, SLL | T(src) | D(TMP_REG1), DR(TMP_REG1)));
+ src = TMP_REG1;
+ }
+#endif /* SLJIT_CONFIG_MIPS_64 */
+ FAIL_IF(push_inst(compiler, WSBH | T(src) | D(dst), DR(dst)));
+ FAIL_IF(push_inst(compiler, ROTR | T(dst) | D(dst) | SH_IMM(16), DR(dst)));
+#if defined(SLJIT_CONFIG_MIPS_64) && SLJIT_CONFIG_MIPS_64
+ if (op == SLJIT_REV_U32 && dst != TMP_REG2 && dst != TMP_REG3)
+ FAIL_IF(push_inst(compiler, DINSU | T(dst) | SA(0) | (31 << 11), DR(dst)));
+#endif /* SLJIT_CONFIG_MIPS_64 */
+#else /* SLJIT_MIPS_REV < 2 */
+#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
+ if (!is_32) {
+ FAIL_IF(push_inst(compiler, DSRL32 | T(src) | D(TMP_REG1) | SH_IMM(0), DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, ORI | SA(0) | TA(OTHER_FLAG) | 0xffff, OTHER_FLAG));
+ FAIL_IF(push_inst(compiler, DSLL32 | T(src) | D(dst) | SH_IMM(0), DR(dst)));
+ FAIL_IF(push_inst(compiler, DSLL32 | TA(OTHER_FLAG) | DA(OTHER_FLAG) | SH_IMM(0), OTHER_FLAG));
+ FAIL_IF(push_inst(compiler, OR | S(dst) | T(TMP_REG1) | D(dst), DR(dst)));
+
+ FAIL_IF(push_inst(compiler, DSRL | T(dst) | D(TMP_REG1) | SH_IMM(16), DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, ORI | SA(OTHER_FLAG) | TA(OTHER_FLAG) | 0xffff, OTHER_FLAG));
+ FAIL_IF(push_inst(compiler, AND | S(dst) | TA(OTHER_FLAG) | D(dst), DR(dst)));
+ FAIL_IF(push_inst(compiler, AND | S(TMP_REG1) | TA(OTHER_FLAG) | D(TMP_REG1), DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, DSLL | TA(OTHER_FLAG) | DA(EQUAL_FLAG) | SH_IMM(8), EQUAL_FLAG));
+ FAIL_IF(push_inst(compiler, DSLL | T(dst) | D(dst) | SH_IMM(16), DR(dst)));
+ FAIL_IF(push_inst(compiler, XOR | SA(OTHER_FLAG) | TA(EQUAL_FLAG) | DA(OTHER_FLAG), OTHER_FLAG));
+ FAIL_IF(push_inst(compiler, OR | S(dst) | T(TMP_REG1) | D(dst), DR(dst)));
+
+ FAIL_IF(push_inst(compiler, DSRL | T(dst) | D(TMP_REG1) | SH_IMM(8), DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, AND | S(dst) | TA(OTHER_FLAG) | D(dst), DR(dst)));
+ FAIL_IF(push_inst(compiler, AND | S(TMP_REG1) | TA(OTHER_FLAG) | D(TMP_REG1), DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, DSLL | T(dst) | D(dst) | SH_IMM(8), DR(dst)));
+ return push_inst(compiler, OR | S(dst) | T(TMP_REG1) | D(dst), DR(dst));
+ }
+
+ if (op != SLJIT_REV && src != TMP_REG2) {
+ FAIL_IF(push_inst(compiler, SLL | T(src) | D(TMP_REG2) | SH_IMM(0), DR(TMP_REG2)));
+ src = TMP_REG2;
+ }
+#endif /* SLJIT_CONFIG_MIPS_64 */
+
+ FAIL_IF(push_inst(compiler, SRL | T(src) | D(TMP_REG1) | SH_IMM(16), DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, LUI | TA(OTHER_FLAG) | 0xff, OTHER_FLAG));
+ FAIL_IF(push_inst(compiler, SLL | T(src) | D(dst) | SH_IMM(16), DR(dst)));
+ FAIL_IF(push_inst(compiler, ORI | SA(OTHER_FLAG) | TA(OTHER_FLAG) | 0xff, OTHER_FLAG));
+ FAIL_IF(push_inst(compiler, OR | S(dst) | T(TMP_REG1) | D(dst), DR(dst)));
+
+ FAIL_IF(push_inst(compiler, SRL | T(dst) | D(TMP_REG1) | SH_IMM(8), DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, AND | S(dst) | TA(OTHER_FLAG) | D(dst), DR(dst)));
+ FAIL_IF(push_inst(compiler, AND | S(TMP_REG1) | TA(OTHER_FLAG) | D(TMP_REG1), DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, SLL | T(dst) | D(dst) | SH_IMM(8), DR(dst)));
+ FAIL_IF(push_inst(compiler, OR | S(dst) | T(TMP_REG1) | D(dst), DR(dst)));
+
+#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
+ if (op == SLJIT_REV_U32 && dst != TMP_REG2 && dst != TMP_REG3) {
+ FAIL_IF(push_inst(compiler, DSLL32 | T(dst) | D(dst) | SH_IMM(0), DR(dst)));
+ FAIL_IF(push_inst(compiler, DSRL32 | T(dst) | D(dst) | SH_IMM(0), DR(dst)));
+ }
+#endif /* SLJIT_CONFIG_MIPS_64 */
+#endif /* SLJIT_MIPR_REV >= 2 */
+ return SLJIT_SUCCESS;
+}
+
+static sljit_s32 emit_rev16(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw src)
+{
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
+#if defined(SLJIT_CONFIG_MIPS_32) && SLJIT_CONFIG_MIPS_32
+ FAIL_IF(push_inst(compiler, WSBH | T(src) | D(dst), DR(dst)));
+#else /* !SLJIT_CONFIG_MIPS_32 */
+ FAIL_IF(push_inst(compiler, DSBH | T(src) | D(dst), DR(dst)));
+#endif /* SLJIT_CONFIG_MIPS_32 */
+ if (GET_OPCODE(op) == SLJIT_REV_U16)
+ return push_inst(compiler, ANDI | S(dst) | T(dst) | 0xffff, DR(dst));
+ else
+ return push_inst(compiler, SEH | T(dst) | D(dst), DR(dst));
+#else /* SLJIT_MIPS_REV < 2 */
+ FAIL_IF(push_inst(compiler, SELECT_OP(DSRL, SRL) | T(src) | D(TMP_REG1) | SH_IMM(8), DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, SELECT_OP(DSLL32, SLL) | T(src) | D(dst) | SH_IMM(24), DR(dst)));
+ FAIL_IF(push_inst(compiler, ANDI | S(TMP_REG1) | T(TMP_REG1) | 0xff, DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, (GET_OPCODE(op) == SLJIT_REV_U16 ? SELECT_OP(DSRL32, SRL) : SELECT_OP(DSRA32, SRA)) | T(dst) | D(dst) | SH_IMM(16), DR(dst)));
+ return push_inst(compiler, OR | S(dst) | T(TMP_REG1) | D(dst), DR(dst));
+#endif /* SLJIT_MIPS_REV >= 2 */
+}
+
static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
sljit_s32 dst, sljit_s32 src1, sljit_sw src2)
{
- sljit_s32 is_overflow, is_carry, carry_src_ar, is_handled;
+ sljit_s32 is_overflow, is_carry, carry_src_ar, is_handled, reg;
sljit_ins op_imm, op_v;
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
sljit_ins ins, op_dimm, op_dimm32, op_dv;
@@ -1486,17 +1719,17 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
-#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
+#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 2)
return push_inst(compiler, SEB | T(src2) | D(dst), DR(dst));
-#else /* SLJIT_MIPS_REV < 1 */
+#else /* SLJIT_MIPS_REV < 2 */
FAIL_IF(push_inst(compiler, SLL | T(src2) | D(dst) | SH_IMM(24), DR(dst)));
return push_inst(compiler, SRA | T(dst) | D(dst) | SH_IMM(24), DR(dst));
-#endif /* SLJIT_MIPS_REV >= 1 */
+#endif /* SLJIT_MIPS_REV >= 2 */
#else /* !SLJIT_CONFIG_MIPS_32 */
-#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
+#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 2)
if (op & SLJIT_32)
return push_inst(compiler, SEB | T(src2) | D(dst), DR(dst));
-#endif /* SLJIT_MIPS_REV >= 1 */
+#endif /* SLJIT_MIPS_REV >= 2 */
FAIL_IF(push_inst(compiler, DSLL32 | T(src2) | D(dst) | SH_IMM(24), DR(dst)));
return push_inst(compiler, DSRA32 | T(dst) | D(dst) | SH_IMM(24), DR(dst));
#endif /* SLJIT_CONFIG_MIPS_32 */
@@ -1515,17 +1748,17 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
-#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
+#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 2)
return push_inst(compiler, SEH | T(src2) | D(dst), DR(dst));
-#else /* SLJIT_MIPS_REV < 1 */
+#else /* SLJIT_MIPS_REV < 2 */
FAIL_IF(push_inst(compiler, SLL | T(src2) | D(dst) | SH_IMM(16), DR(dst)));
return push_inst(compiler, SRA | T(dst) | D(dst) | SH_IMM(16), DR(dst));
-#endif /* SLJIT_MIPS_REV >= 1 */
+#endif /* SLJIT_MIPS_REV >= 2 */
#else /* !SLJIT_CONFIG_MIPS_32 */
-#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
+#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 2)
if (op & SLJIT_32)
return push_inst(compiler, SEH | T(src2) | D(dst), DR(dst));
-#endif /* SLJIT_MIPS_REV >= 1 */
+#endif /* SLJIT_MIPS_REV >= 2 */
FAIL_IF(push_inst(compiler, DSLL32 | T(src2) | D(dst) | SH_IMM(16), DR(dst)));
return push_inst(compiler, DSRA32 | T(dst) | D(dst) | SH_IMM(16), DR(dst));
#endif /* SLJIT_CONFIG_MIPS_32 */
@@ -1539,7 +1772,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 2)
if (dst == src2)
- return push_inst(compiler, DINSU | T(src2) | SA(0) | (31 << 11) | (0 << 11), DR(dst));
+ return push_inst(compiler, DINSU | T(src2) | SA(0) | (31 << 11), DR(dst));
#endif /* SLJIT_MIPS_REV >= 2 */
FAIL_IF(push_inst(compiler, DSLL32 | T(src2) | D(dst) | SH_IMM(0), DR(dst)));
return push_inst(compiler, DSRL32 | T(dst) | D(dst) | SH_IMM(0), DR(dst));
@@ -1556,14 +1789,6 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return SLJIT_SUCCESS;
#endif /* SLJIT_CONFIG_MIPS_64 */
- case SLJIT_NOT:
- SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
- if (op & SLJIT_SET_Z)
- FAIL_IF(push_inst(compiler, NOR | S(src2) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
- if (!(flags & UNUSED_DEST))
- FAIL_IF(push_inst(compiler, NOR | S(src2) | T(src2) | D(dst), DR(dst)));
- return SLJIT_SUCCESS;
-
#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
case SLJIT_CLZ:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
@@ -1591,10 +1816,21 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return emit_clz_ctz(compiler, op, dst, src2);
#endif /* SLJIT_MIPS_REV >= 1 */
+ case SLJIT_REV:
+ case SLJIT_REV_U32:
+ case SLJIT_REV_S32:
+ SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM) && src2 != TMP_REG1 && dst != TMP_REG1);
+ return emit_rev(compiler, op, dst, src2);
+
+ case SLJIT_REV_U16:
+ case SLJIT_REV_S16:
+ SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
+ return emit_rev16(compiler, op, dst, src2);
+
case SLJIT_ADD:
/* Overflow computation (both add and sub): overflow = src1_sign ^ src2_sign ^ result_sign ^ carry_flag */
is_overflow = GET_FLAG_TYPE(op) == SLJIT_OVERFLOW;
- carry_src_ar = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
+ carry_src_ar = GET_FLAG_TYPE(op) == SLJIT_CARRY;
if (flags & SRC2_IMM) {
if (is_overflow) {
@@ -1650,7 +1886,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return push_inst(compiler, XOR | S(TMP_REG1) | TA(OTHER_FLAG) | DA(OTHER_FLAG), OTHER_FLAG);
case SLJIT_ADDC:
- carry_src_ar = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
+ carry_src_ar = GET_FLAG_TYPE(op) == SLJIT_CARRY;
if (flags & SRC2_IMM) {
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | S(src1) | T(dst) | IMM(src2), DR(dst)));
@@ -1697,11 +1933,11 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
is_handled = 0;
if (flags & SRC2_IMM) {
- if (GET_FLAG_TYPE(op) == SLJIT_LESS || GET_FLAG_TYPE(op) == SLJIT_GREATER_EQUAL) {
+ if (GET_FLAG_TYPE(op) == SLJIT_LESS) {
FAIL_IF(push_inst(compiler, SLTIU | S(src1) | TA(OTHER_FLAG) | IMM(src2), OTHER_FLAG));
is_handled = 1;
}
- else if (GET_FLAG_TYPE(op) == SLJIT_SIG_LESS || GET_FLAG_TYPE(op) == SLJIT_SIG_GREATER_EQUAL) {
+ else if (GET_FLAG_TYPE(op) == SLJIT_SIG_LESS) {
FAIL_IF(push_inst(compiler, SLTI | S(src1) | TA(OTHER_FLAG) | IMM(src2), OTHER_FLAG));
is_handled = 1;
}
@@ -1711,26 +1947,23 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
is_handled = 1;
if (flags & SRC2_IMM) {
- FAIL_IF(push_inst(compiler, ADDIU | SA(0) | T(TMP_REG2) | IMM(src2), DR(TMP_REG2)));
- src2 = TMP_REG2;
+ reg = (src1 == TMP_REG1) ? TMP_REG2 : TMP_REG1;
+ FAIL_IF(push_inst(compiler, ADDIU | SA(0) | T(reg) | IMM(src2), DR(reg)));
+ src2 = reg;
flags &= ~SRC2_IMM;
}
switch (GET_FLAG_TYPE(op)) {
case SLJIT_LESS:
- case SLJIT_GREATER_EQUAL:
FAIL_IF(push_inst(compiler, SLTU | S(src1) | T(src2) | DA(OTHER_FLAG), OTHER_FLAG));
break;
case SLJIT_GREATER:
- case SLJIT_LESS_EQUAL:
FAIL_IF(push_inst(compiler, SLTU | S(src2) | T(src1) | DA(OTHER_FLAG), OTHER_FLAG));
break;
case SLJIT_SIG_LESS:
- case SLJIT_SIG_GREATER_EQUAL:
FAIL_IF(push_inst(compiler, SLT | S(src1) | T(src2) | DA(OTHER_FLAG), OTHER_FLAG));
break;
case SLJIT_SIG_GREATER:
- case SLJIT_SIG_LESS_EQUAL:
FAIL_IF(push_inst(compiler, SLT | S(src2) | T(src1) | DA(OTHER_FLAG), OTHER_FLAG));
break;
}
@@ -1753,7 +1986,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
}
is_overflow = GET_FLAG_TYPE(op) == SLJIT_OVERFLOW;
- is_carry = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
+ is_carry = GET_FLAG_TYPE(op) == SLJIT_CARRY;
if (flags & SRC2_IMM) {
if (is_overflow) {
@@ -1802,7 +2035,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
flags &= ~SRC2_IMM;
}
- is_carry = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
+ is_carry = GET_FLAG_TYPE(op) == SLJIT_CARRY;
if (flags & SRC2_IMM) {
if (is_carry)
@@ -1868,6 +2101,14 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return SLJIT_SUCCESS;
case SLJIT_XOR:
+ if (!(flags & LOGICAL_OP)) {
+ SLJIT_ASSERT((flags & SRC2_IMM) && src2 == -1);
+ if (op & SLJIT_SET_Z)
+ FAIL_IF(push_inst(compiler, NOR | S(src1) | T(src1) | DA(EQUAL_FLAG), EQUAL_FLAG));
+ if (!(flags & UNUSED_DEST))
+ FAIL_IF(push_inst(compiler, NOR | S(src1) | T(src1) | D(dst), DR(dst)));
+ return SLJIT_SUCCESS;
+ }
EMIT_LOGICAL(XORI, XOR);
return SLJIT_SUCCESS;
@@ -2027,31 +2268,32 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
sljit_s32 dst_r = TMP_REG2;
sljit_s32 src1_r;
sljit_sw src2_r = 0;
- sljit_s32 sugg_src2_r = TMP_REG2;
+ sljit_s32 src2_tmp_reg = (GET_OPCODE(op) >= SLJIT_OP2_BASE && FAST_IS_REG(src1)) ? TMP_REG1 : TMP_REG2;
if (!(flags & ALT_KEEP_CACHE)) {
compiler->cache_arg = 0;
compiler->cache_argw = 0;
}
- if (dst == TMP_REG2) {
+ if (dst == 0) {
SLJIT_ASSERT(HAS_FLAGS(op));
flags |= UNUSED_DEST;
+ dst = TMP_REG2;
}
else if (FAST_IS_REG(dst)) {
dst_r = dst;
flags |= REG_DEST;
if (flags & MOVE_OP)
- sugg_src2_r = dst_r;
+ src2_tmp_reg = dst_r;
}
else if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, flags | ARG_TEST, DR(TMP_REG1), dst, dstw))
flags |= SLOW_DEST;
if (flags & IMM_OP) {
- if ((src2 & SLJIT_IMM) && src2w != 0 && CHECK_IMM(flags, src2w)) {
+ if (src2 == SLJIT_IMM && src2w != 0 && CHECK_IMM(flags, src2w)) {
flags |= SRC2_IMM;
src2_r = src2w;
- } else if ((flags & CUMULATIVE_OP) && (src1 & SLJIT_IMM) && src1w != 0 && CHECK_IMM(flags, src1w)) {
+ } else if ((flags & CUMULATIVE_OP) && src1 == SLJIT_IMM && src1w != 0 && CHECK_IMM(flags, src1w)) {
flags |= SRC2_IMM;
src2_r = src1w;
@@ -2068,7 +2310,7 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
src1_r = src1;
flags |= REG1_SOURCE;
}
- else if (src1 & SLJIT_IMM) {
+ else if (src1 == SLJIT_IMM) {
if (src1w) {
FAIL_IF(load_immediate(compiler, DR(TMP_REG1), src1w));
src1_r = TMP_REG1;
@@ -2091,11 +2333,11 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
if ((flags & (REG_DEST | MOVE_OP)) == MOVE_OP)
dst_r = (sljit_s32)src2_r;
}
- else if (src2 & SLJIT_IMM) {
+ else if (src2 == SLJIT_IMM) {
if (!(flags & SRC2_IMM)) {
if (src2w) {
- FAIL_IF(load_immediate(compiler, DR(sugg_src2_r), src2w));
- src2_r = sugg_src2_r;
+ FAIL_IF(load_immediate(compiler, DR(src2_tmp_reg), src2w));
+ src2_r = src2_tmp_reg;
}
else {
src2_r = 0;
@@ -2109,16 +2351,16 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
}
}
else {
- if (getput_arg_fast(compiler, flags | LOAD_DATA, DR(sugg_src2_r), src2, src2w))
+ if (getput_arg_fast(compiler, flags | LOAD_DATA, DR(src2_tmp_reg), src2, src2w))
FAIL_IF(compiler->error);
else
flags |= SLOW_SRC2;
- src2_r = sugg_src2_r;
+ src2_r = src2_tmp_reg;
}
if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) {
SLJIT_ASSERT(src2_r == TMP_REG2);
- if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
+ if ((flags & SLOW_DEST) && !can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(TMP_REG2), src2, src2w, src1, src1w));
FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(TMP_REG1), src1, src1w, dst, dstw));
}
@@ -2130,7 +2372,7 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
else if (flags & SLOW_SRC1)
FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(TMP_REG1), src1, src1w, dst, dstw));
else if (flags & SLOW_SRC2)
- FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(sugg_src2_r), src2, src2w, dst, dstw));
+ FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(src2_tmp_reg), src2, src2w, dst, dstw));
FAIL_IF(emit_single_op(compiler, op, flags, dst_r, src1_r, src2_r));
@@ -2279,31 +2521,37 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
case SLJIT_MOV_U32:
- return emit_op(compiler, SLJIT_MOV_U32, INT_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u32)srcw : srcw);
+ return emit_op(compiler, SLJIT_MOV_U32, INT_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src == SLJIT_IMM) ? (sljit_u32)srcw : srcw);
case SLJIT_MOV_S32:
case SLJIT_MOV32:
- return emit_op(compiler, SLJIT_MOV_S32, INT_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s32)srcw : srcw);
+ return emit_op(compiler, SLJIT_MOV_S32, INT_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src == SLJIT_IMM) ? (sljit_s32)srcw : srcw);
#endif
case SLJIT_MOV_U8:
- return emit_op(compiler, op, BYTE_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8)srcw : srcw);
+ return emit_op(compiler, op, BYTE_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src == SLJIT_IMM) ? (sljit_u8)srcw : srcw);
case SLJIT_MOV_S8:
- return emit_op(compiler, op, BYTE_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8)srcw : srcw);
+ return emit_op(compiler, op, BYTE_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src == SLJIT_IMM) ? (sljit_s8)srcw : srcw);
case SLJIT_MOV_U16:
- return emit_op(compiler, op, HALF_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16)srcw : srcw);
+ return emit_op(compiler, op, HALF_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src == SLJIT_IMM) ? (sljit_u16)srcw : srcw);
case SLJIT_MOV_S16:
- return emit_op(compiler, op, HALF_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16)srcw : srcw);
-
- case SLJIT_NOT:
- return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, src, srcw);
+ return emit_op(compiler, op, HALF_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src == SLJIT_IMM) ? (sljit_s16)srcw : srcw);
case SLJIT_CLZ:
case SLJIT_CTZ:
+ case SLJIT_REV:
return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, src, srcw);
+
+ case SLJIT_REV_U16:
+ case SLJIT_REV_S16:
+ return emit_op(compiler, op, HALF_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
+
+ case SLJIT_REV_U32:
+ case SLJIT_REV_S32:
+ return emit_op(compiler, op | SLJIT_32, INT_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
}
SLJIT_UNREACHABLE();
@@ -2326,9 +2574,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
if (op & SLJIT_32) {
flags |= INT_DATA | SIGNED_DATA;
- if (src1 & SLJIT_IMM)
+ if (src1 == SLJIT_IMM)
src1w = (sljit_s32)src1w;
- if (src2 & SLJIT_IMM)
+ if (src2 == SLJIT_IMM)
src2w = (sljit_s32)src2w;
}
#endif
@@ -2348,9 +2596,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
compiler->status_flags_state = 0;
return emit_op(compiler, op, flags | CUMULATIVE_OP, dst, dstw, src1, src1w, src2, src2w);
+ case SLJIT_XOR:
+ if ((src1 == SLJIT_IMM && src1w == -1) || (src2 == SLJIT_IMM && src2w == -1)) {
+ return emit_op(compiler, op, flags | CUMULATIVE_OP | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
+ }
+ /* fallthrough */
case SLJIT_AND:
case SLJIT_OR:
- case SLJIT_XOR:
return emit_op(compiler, op, flags | CUMULATIVE_OP | LOGICAL_OP | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
case SLJIT_SHL:
@@ -2362,10 +2614,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
case SLJIT_ROTL:
case SLJIT_ROTR:
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
- if (src2 & SLJIT_IMM)
+ if (src2 == SLJIT_IMM)
src2w &= 0x1f;
#else
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
if (op & SLJIT_32)
src2w &= 0x1f;
else
@@ -2387,22 +2639,39 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compil
CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
SLJIT_SKIP_CHECKS(compiler);
- return sljit_emit_op2(compiler, op, TMP_REG2, 0, src1, src1w, src2, src2w);
+ return sljit_emit_op2(compiler, op, 0, 0, src1, src1w, src2, src2w);
}
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
#define SELECT_OP3(op, src2w, D, D32, W) (((op & SLJIT_32) ? (W) : ((src2w) < 32) ? (D) : (D32)) | (((sljit_ins)src2w & 0x1f) << 6))
-#define SELECT_OP2(op, D, W) ((op & SLJIT_32) ? (W) : (D))
#else /* !SLJIT_CONFIG_MIPS_64 */
#define SELECT_OP3(op, src2w, D, D32, W) ((W) | ((sljit_ins)(src2w) << 6))
-#define SELECT_OP2(op, D, W) (W)
#endif /* SLJIT_CONFIG_MIPS_64 */
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src_dst,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op2r(compiler, op, dst_reg, src1, src1w, src2, src2w));
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MULADD:
+ SLJIT_SKIP_CHECKS(compiler);
+ FAIL_IF(sljit_emit_op2(compiler, SLJIT_MUL | (op & SLJIT_32), TMP_REG2, 0, src1, src1w, src2, src2w));
+ return push_inst(compiler, SELECT_OP(DADDU, ADDU) | S(dst_reg) | T(TMP_REG2) | D(dst_reg), DR(dst_reg));
+ }
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 src1_reg,
+ sljit_s32 src2_reg,
+ sljit_s32 src3, sljit_sw src3w)
+{
sljit_s32 is_left;
sljit_ins ins1, ins2, ins3;
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
@@ -2414,77 +2683,70 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *
#endif /* SLJIT_CONFIG_MIPS_64 */
CHECK_ERROR();
- CHECK(check_sljit_emit_shift_into(compiler, op, src_dst, src1, src1w, src2, src2w));
+ CHECK(check_sljit_emit_shift_into(compiler, op, dst_reg, src1_reg, src2_reg, src3, src3w));
is_left = (GET_OPCODE(op) == SLJIT_SHL || GET_OPCODE(op) == SLJIT_MSHL);
- if (src_dst == src1) {
+ if (src1_reg == src2_reg) {
SLJIT_SKIP_CHECKS(compiler);
- return sljit_emit_op2(compiler, (is_left ? SLJIT_ROTL : SLJIT_ROTR) | (op & SLJIT_32), src_dst, 0, src_dst, 0, src2, src2w);
+ return sljit_emit_op2(compiler, (is_left ? SLJIT_ROTL : SLJIT_ROTR) | (op & SLJIT_32), dst_reg, 0, src1_reg, 0, src3, src3w);
}
- ADJUST_LOCAL_OFFSET(src1, src1w);
- ADJUST_LOCAL_OFFSET(src2, src2w);
+ ADJUST_LOCAL_OFFSET(src3, src3w);
- if (src2 & SLJIT_IMM) {
- src2w &= bit_length - 1;
+ if (src3 == SLJIT_IMM) {
+ src3w &= bit_length - 1;
- if (src2w == 0)
+ if (src3w == 0)
return SLJIT_SUCCESS;
- } else if (src2 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem(compiler, inp_flags, DR(TMP_REG2), src2, src2w));
- src2 = TMP_REG2;
- }
-
- if (src1 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem(compiler, inp_flags, DR(TMP_REG1), src1, src1w));
- src1 = TMP_REG1;
- } else if (src1 & SLJIT_IMM) {
- FAIL_IF(load_immediate(compiler, DR(TMP_REG1), src1w));
- src1 = TMP_REG1;
- }
- if (src2 & SLJIT_IMM) {
if (is_left) {
- ins1 = SELECT_OP3(op, src2w, DSLL, DSLL32, SLL);
- src2w = bit_length - src2w;
- ins2 = SELECT_OP3(op, src2w, DSRL, DSRL32, SRL);
+ ins1 = SELECT_OP3(op, src3w, DSLL, DSLL32, SLL);
+ src3w = bit_length - src3w;
+ ins2 = SELECT_OP3(op, src3w, DSRL, DSRL32, SRL);
} else {
- ins1 = SELECT_OP3(op, src2w, DSRL, DSRL32, SRL);
- src2w = bit_length - src2w;
- ins2 = SELECT_OP3(op, src2w, DSLL, DSLL32, SLL);
+ ins1 = SELECT_OP3(op, src3w, DSRL, DSRL32, SRL);
+ src3w = bit_length - src3w;
+ ins2 = SELECT_OP3(op, src3w, DSLL, DSLL32, SLL);
}
- FAIL_IF(push_inst(compiler, ins1 | T(src_dst) | D(src_dst), DR(src_dst)));
- FAIL_IF(push_inst(compiler, ins2 | T(src1) | D(TMP_REG1), DR(TMP_REG1)));
- return push_inst(compiler, OR | S(src_dst) | T(TMP_REG1) | D(src_dst), DR(src_dst));
+ FAIL_IF(push_inst(compiler, ins1 | T(src1_reg) | D(dst_reg), DR(dst_reg)));
+ FAIL_IF(push_inst(compiler, ins2 | T(src2_reg) | D(TMP_REG1), DR(TMP_REG1)));
+ return push_inst(compiler, OR | S(dst_reg) | T(TMP_REG1) | D(dst_reg), DR(dst_reg));
+ }
+
+ if (src3 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, inp_flags, DR(TMP_REG2), src3, src3w));
+ src3 = TMP_REG2;
+ } else if (dst_reg == src3) {
+ FAIL_IF(push_inst(compiler, SELECT_OP(DADDU, ADDU) | S(src3) | TA(0) | D(TMP_REG2), DR(TMP_REG2)));
+ src3 = TMP_REG2;
}
if (is_left) {
- ins1 = SELECT_OP2(op, DSRL, SRL);
- ins2 = SELECT_OP2(op, DSLLV, SLLV);
- ins3 = SELECT_OP2(op, DSRLV, SRLV);
+ ins1 = SELECT_OP(DSRL, SRL);
+ ins2 = SELECT_OP(DSLLV, SLLV);
+ ins3 = SELECT_OP(DSRLV, SRLV);
} else {
- ins1 = SELECT_OP2(op, DSLL, SLL);
- ins2 = SELECT_OP2(op, DSRLV, SRLV);
- ins3 = SELECT_OP2(op, DSLLV, SLLV);
+ ins1 = SELECT_OP(DSLL, SLL);
+ ins2 = SELECT_OP(DSRLV, SRLV);
+ ins3 = SELECT_OP(DSLLV, SLLV);
}
- FAIL_IF(push_inst(compiler, ins2 | S(src2) | T(src_dst) | D(src_dst), DR(src_dst)));
+ FAIL_IF(push_inst(compiler, ins2 | S(src3) | T(src1_reg) | D(dst_reg), DR(dst_reg)));
if (!(op & SLJIT_SHIFT_INTO_NON_ZERO)) {
- FAIL_IF(push_inst(compiler, ins1 | T(src1) | D(TMP_REG1) | (1 << 6), DR(TMP_REG1)));
- FAIL_IF(push_inst(compiler, XORI | S(src2) | T(TMP_REG2) | ((sljit_ins)bit_length - 1), DR(TMP_REG2)));
- src1 = TMP_REG1;
+ FAIL_IF(push_inst(compiler, ins1 | T(src2_reg) | D(TMP_REG1) | (1 << 6), DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, XORI | S(src3) | T(TMP_REG2) | ((sljit_ins)bit_length - 1), DR(TMP_REG2)));
+ src2_reg = TMP_REG1;
} else
- FAIL_IF(push_inst(compiler, SELECT_OP2(op, DSUBU, SUBU) | SA(0) | T(src2) | D(TMP_REG2), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, SELECT_OP(DSUBU, SUBU) | SA(0) | T(src3) | D(TMP_REG2), DR(TMP_REG2)));
- FAIL_IF(push_inst(compiler, ins3 | S(TMP_REG2) | T(src1) | D(TMP_REG1), DR(TMP_REG1)));
- return push_inst(compiler, OR | S(src_dst) | T(TMP_REG1) | D(src_dst), DR(src_dst));
+ FAIL_IF(push_inst(compiler, ins3 | S(TMP_REG2) | T(src2_reg) | D(TMP_REG1), DR(TMP_REG1)));
+ return push_inst(compiler, OR | S(dst_reg) | T(TMP_REG1) | D(dst_reg), DR(dst_reg));
}
#undef SELECT_OP3
-#undef SELECT_OP2
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 src, sljit_sw srcw)
@@ -2518,21 +2780,54 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *comp
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_dst(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw)
{
- CHECK_REG_INDEX(check_sljit_get_register_index(reg));
- return reg_map[reg];
+ sljit_s32 dst_ar = RETURN_ADDR_REG;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op_dst(compiler, op, dst, dstw));
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ switch (op) {
+ case SLJIT_FAST_ENTER:
+ if (FAST_IS_REG(dst))
+ return push_inst(compiler, ADDU_W | SA(RETURN_ADDR_REG) | TA(0) | D(dst), UNMOVABLE_INS);
+ break;
+ case SLJIT_GET_RETURN_ADDRESS:
+ dst_ar = DR(FAST_IS_REG(dst) ? dst : TMP_REG2);
+ FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, dst_ar, SLJIT_MEM1(SLJIT_SP), compiler->local_size - SSIZE_OF(sw)));
+ break;
+ }
+
+ if (dst & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, WORD_DATA, dst_ar, dst, dstw));
+
+ if (op == SLJIT_FAST_ENTER)
+ compiler->delay_slot = UNMOVABLE_INS;
+ }
+
+ return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 type, sljit_s32 reg)
{
- CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
+ CHECK_REG_INDEX(check_sljit_get_register_index(type, reg));
+
+ if (type == SLJIT_GP_REGISTER)
+ return reg_map[reg];
+
+ if (type != SLJIT_FLOAT_REGISTER)
+ return -1;
+
return FR(reg);
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
void *instruction, sljit_u32 size)
{
+ SLJIT_UNUSED_ARG(size);
+
CHECK_ERROR();
CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
@@ -2544,14 +2839,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *c
/* --------------------------------------------------------------------- */
#define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_32) >> 7))
-#define FMT(op) ((((sljit_ins)op & SLJIT_32) ^ SLJIT_32) << (21 - 8))
+#define FMT(op) (FMT_S | (~(sljit_ins)op & SLJIT_32) << (21 - (5 + 3)))
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw)
{
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
-# define flags (sljit_u32)0
+ sljit_u32 flags = 0;
#else
sljit_u32 flags = ((sljit_u32)(GET_OPCODE(op) == SLJIT_CONV_SW_FROM_F64)) << 21;
#endif
@@ -2565,18 +2860,13 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
if (FAST_IS_REG(dst)) {
FAIL_IF(push_inst(compiler, MFC1 | flags | T(dst) | FS(TMP_FREG1), MOVABLE_INS));
-#if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
+#if !defined(SLJIT_MIPS_REV) || (SLJIT_CONFIG_MIPS_32 && SLJIT_MIPS_REV <= 1)
FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
-#endif
+#endif /* MIPS III */
return SLJIT_SUCCESS;
}
- /* Store the integer value from a VFP register. */
return emit_op_mem2(compiler, flags ? DOUBLE_DATA : SINGLE_DATA, FR(TMP_FREG1), dst, dstw, 0, 0);
-
-#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
-# undef flags
-#endif
}
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
@@ -2584,43 +2874,158 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_comp
sljit_s32 src, sljit_sw srcw)
{
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
-# define flags (sljit_u32)0
+ sljit_u32 flags = 0;
#else
sljit_u32 flags = ((sljit_u32)(GET_OPCODE(op) == SLJIT_CONV_F64_FROM_SW)) << 21;
#endif
-
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
- if (FAST_IS_REG(src)) {
- FAIL_IF(push_inst(compiler, MTC1 | flags | T(src) | FS(TMP_FREG1), MOVABLE_INS));
-#if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
- FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
-#endif
- } else if (src & SLJIT_MEM) {
- /* Load the integer value into a VFP register. */
+ if (src & SLJIT_MEM)
FAIL_IF(emit_op_mem2(compiler, (flags ? DOUBLE_DATA : SINGLE_DATA) | LOAD_DATA, FR(TMP_FREG1), src, srcw, dst, dstw));
- }
else {
+ if (src == SLJIT_IMM) {
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
- if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
- srcw = (sljit_s32)srcw;
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
+ srcw = (sljit_s32)srcw;
#endif
- FAIL_IF(load_immediate(compiler, DR(TMP_REG1), srcw));
- FAIL_IF(push_inst(compiler, MTC1 | flags | T(TMP_REG1) | FS(TMP_FREG1), MOVABLE_INS));
-#if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
+ FAIL_IF(load_immediate(compiler, DR(TMP_REG1), srcw));
+ src = TMP_REG1;
+ }
+
+ FAIL_IF(push_inst(compiler, MTC1 | flags | T(src) | FS(TMP_FREG1), MOVABLE_INS));
+#if !defined(SLJIT_MIPS_REV) || (SLJIT_CONFIG_MIPS_32 && SLJIT_MIPS_REV <= 1)
FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
-#endif
+#endif /* MIPS III */
}
- FAIL_IF(push_inst(compiler, CVT_S_S | flags | (4 << 21) | ((((sljit_ins)op & SLJIT_32) ^ SLJIT_32) >> 8) | FS(TMP_FREG1) | FD(dst_r), MOVABLE_INS));
+ FAIL_IF(push_inst(compiler, CVT_S_S | flags | (4 << 21) | ((~(sljit_ins)op & SLJIT_32) >> 8) | FS(TMP_FREG1) | FD(dst_r), MOVABLE_INS));
if (dst & SLJIT_MEM)
return emit_op_mem2(compiler, FLOAT_DATA(op), FR(TMP_FREG1), dst, dstw, 0, 0);
return SLJIT_SUCCESS;
+}
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_uw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
-# undef flags
+ sljit_u32 flags = 0;
+#else
+ sljit_u32 flags = 1 << 21;
#endif
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem2(compiler, (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_UW ? WORD_DATA : INT_DATA) | LOAD_DATA, DR(TMP_REG1), src, srcw, dst, dstw));
+ src = TMP_REG1;
+ } else if (src == SLJIT_IMM) {
+#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_U32)
+ srcw = (sljit_u32)srcw;
+#endif
+ FAIL_IF(load_immediate(compiler, DR(TMP_REG1), srcw));
+ src = TMP_REG1;
+ }
+
+#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_U32) {
+ if (src != TMP_REG1) {
+ FAIL_IF(push_inst(compiler, DSLL32 | T(src) | D(TMP_REG1) | SH_IMM(0), DR(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, DSRL32 | T(TMP_REG1) | D(TMP_REG1) | SH_IMM(0), DR(TMP_REG1)));
+ }
+
+ FAIL_IF(push_inst(compiler, MTC1 | flags | T(TMP_REG1) | FS(TMP_FREG1), MOVABLE_INS));
+#if !defined(SLJIT_MIPS_REV)
+ FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
+#endif /* MIPS III */
+
+ FAIL_IF(push_inst(compiler, CVT_S_S | flags | (4 << 21) | ((~(sljit_ins)op & SLJIT_32) >> 8) | FS(TMP_FREG1) | FD(dst_r), MOVABLE_INS));
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem2(compiler, FLOAT_DATA(op), FR(TMP_FREG1), dst, dstw, 0, 0);
+ return SLJIT_SUCCESS;
+ }
+#else /* !SLJIT_CONFIG_MIPS_64 */
+ if (!(op & SLJIT_32)) {
+ FAIL_IF(push_inst(compiler, SLL | T(src) | D(TMP_REG2) | SH_IMM(1), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, SRL | T(TMP_REG2) | D(TMP_REG2) | SH_IMM(1), DR(TMP_REG2)));
+
+ FAIL_IF(push_inst(compiler, MTC1 | flags | T(TMP_REG2) | FS(TMP_FREG1), MOVABLE_INS));
+#if !defined(SLJIT_MIPS_REV) || SLJIT_MIPS_REV <= 1
+ FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
+#endif /* MIPS III */
+
+ FAIL_IF(push_inst(compiler, CVT_S_S | flags | (4 << 21) | 1 | FS(TMP_FREG1) | FD(dst_r), MOVABLE_INS));
+
+#if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 1)
+ FAIL_IF(push_inst(compiler, BGEZ | S(src) | 5, UNMOVABLE_INS));
+#else /* SLJIT_MIPS_REV >= 1 */
+ FAIL_IF(push_inst(compiler, BGEZ | S(src) | 4, UNMOVABLE_INS));
+#endif /* SLJIT_MIPS_REV < 1 */
+
+ FAIL_IF(push_inst(compiler, LUI | T(TMP_REG2) | IMM(0x41e0), UNMOVABLE_INS));
+ FAIL_IF(push_inst(compiler, MTC1 | TA(0) | FS(TMP_FREG2), UNMOVABLE_INS));
+ switch (cpu_feature_list & CPU_FEATURE_FR) {
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
+ case CPU_FEATURE_FR:
+ FAIL_IF(push_inst(compiler, MTHC1 | T(TMP_REG2) | FS(TMP_FREG2), UNMOVABLE_INS));
+ break;
+#endif /* SLJIT_MIPS_REV >= 2 */
+ default:
+ FAIL_IF(push_inst(compiler, MTC1 | T(TMP_REG2) | FS(TMP_FREG2) | (1 << 11), UNMOVABLE_INS));
+#if !defined(SLJIT_MIPS_REV) || SLJIT_MIPS_REV <= 1
+ FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
+#endif /* MIPS III */
+ break;
+ }
+ FAIL_IF(push_inst(compiler, ADD_S | FMT(op) | FT(TMP_FREG2) | FS(dst_r) | FD(dst_r), UNMOVABLE_INS));
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem2(compiler, FLOAT_DATA(op), FR(TMP_FREG1), dst, dstw, 0, 0);
+ return SLJIT_SUCCESS;
+ }
+#endif /* SLJIT_CONFIG_MIPS_64 */
+
+#if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 1)
+ FAIL_IF(push_inst(compiler, BLTZ | S(src) | 5, UNMOVABLE_INS));
+#else /* SLJIT_MIPS_REV >= 1 */
+ FAIL_IF(push_inst(compiler, BLTZ | S(src) | 4, UNMOVABLE_INS));
+#endif /* SLJIT_MIPS_REV < 1 */
+ FAIL_IF(push_inst(compiler, ANDI | S(src) | T(TMP_REG2) | IMM(1), DR(TMP_REG2)));
+
+ FAIL_IF(push_inst(compiler, MTC1 | flags | T(src) | FS(TMP_FREG1), MOVABLE_INS));
+#if !defined(SLJIT_MIPS_REV)
+ FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
+#endif /* !SLJIT_MIPS_REV */
+
+ FAIL_IF(push_inst(compiler, CVT_S_S | flags | (4 << 21) | ((~(sljit_ins)op & SLJIT_32) >> 8) | FS(TMP_FREG1) | FD(dst_r), MOVABLE_INS));
+
+#if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 1)
+ FAIL_IF(push_inst(compiler, BEQ | 6, UNMOVABLE_INS));
+#else /* SLJIT_MIPS_REV >= 1 */
+ FAIL_IF(push_inst(compiler, BEQ | 5, UNMOVABLE_INS));
+#endif /* SLJIT_MIPS_REV < 1 */
+
+#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
+ FAIL_IF(push_inst(compiler, DSRL | T(src) | D(TMP_REG1) | SH_IMM(1), DR(TMP_REG1)));
+#else /* !SLJIT_CONFIG_MIPS_64 */
+ FAIL_IF(push_inst(compiler, SRL | T(src) | D(TMP_REG1) | SH_IMM(1), DR(TMP_REG1)));
+#endif /* SLJIT_CONFIG_MIPS_64 */
+
+ FAIL_IF(push_inst(compiler, OR | S(TMP_REG1) | T(TMP_REG2) | D(TMP_REG1), DR(TMP_REG1)));
+
+ FAIL_IF(push_inst(compiler, MTC1 | flags | T(TMP_REG1) | FS(TMP_FREG1), MOVABLE_INS));
+#if !defined(SLJIT_MIPS_REV)
+ FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
+#endif /* !SLJIT_MIPS_REV */
+
+ FAIL_IF(push_inst(compiler, CVT_S_S | flags | (4 << 21) | ((~(sljit_ins)op & SLJIT_32) >> 8) | FS(TMP_FREG1) | FD(dst_r), MOVABLE_INS));
+ FAIL_IF(push_inst(compiler, ADD_S | FMT(op) | FT(dst_r) | FS(dst_r) | FD(dst_r), UNMOVABLE_INS));
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem2(compiler, FLOAT_DATA(op), FR(TMP_FREG1), dst, dstw, 0, 0);
+ return SLJIT_SUCCESS;
}
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
@@ -2642,36 +3047,30 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compile
switch (GET_FLAG_TYPE(op)) {
case SLJIT_F_EQUAL:
case SLJIT_ORDERED_EQUAL:
- case SLJIT_UNORDERED_OR_NOT_EQUAL:
inst = C_EQ_S;
break;
case SLJIT_F_NOT_EQUAL:
case SLJIT_UNORDERED_OR_EQUAL:
- case SLJIT_ORDERED_NOT_EQUAL:
inst = C_UEQ_S;
break;
case SLJIT_F_LESS:
case SLJIT_ORDERED_LESS:
- case SLJIT_UNORDERED_OR_GREATER_EQUAL:
inst = C_OLT_S;
break;
case SLJIT_F_GREATER_EQUAL:
case SLJIT_UNORDERED_OR_LESS:
- case SLJIT_ORDERED_GREATER_EQUAL:
inst = C_ULT_S;
break;
case SLJIT_F_GREATER:
case SLJIT_ORDERED_GREATER:
- case SLJIT_UNORDERED_OR_LESS_EQUAL:
inst = C_ULE_S;
break;
case SLJIT_F_LESS_EQUAL:
case SLJIT_UNORDERED_OR_GREATER:
- case SLJIT_ORDERED_LESS_EQUAL:
inst = C_OLE_S;
break;
default:
- SLJIT_ASSERT(GET_FLAG_TYPE(op) == SLJIT_UNORDERED || GET_FLAG_TYPE(op) == SLJIT_ORDERED);
+ SLJIT_ASSERT(GET_FLAG_TYPE(op) == SLJIT_UNORDERED);
inst = C_UN_S;
break;
}
@@ -2704,8 +3103,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compil
switch (GET_OPCODE(op)) {
case SLJIT_MOV_F64:
if (src != dst_r) {
- if (dst_r != TMP_FREG1)
- FAIL_IF(push_inst(compiler, MOV_S | FMT(op) | FS(src) | FD(dst_r), MOVABLE_INS));
+ if (!(dst & SLJIT_MEM))
+ FAIL_IF(push_inst(compiler, MOV_fmt(FMT(op)) | FS(src) | FD(dst_r), MOVABLE_INS));
else
dst_r = src;
}
@@ -2763,11 +3162,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
}
if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) {
- if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
+ if ((dst & SLJIT_MEM) && !can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, FR(TMP_FREG2), src2, src2w, src1, src1w));
FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, FR(TMP_FREG1), src1, src1w, dst, dstw));
- }
- else {
+ } else {
FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, FR(TMP_FREG1), src1, src1w, src2, src2w));
FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, FR(TMP_FREG2), src2, src2w, dst, dstw));
}
@@ -2786,18 +3184,17 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
case SLJIT_ADD_F64:
FAIL_IF(push_inst(compiler, ADD_S | FMT(op) | FT(src2) | FS(src1) | FD(dst_r), MOVABLE_INS));
break;
-
case SLJIT_SUB_F64:
FAIL_IF(push_inst(compiler, SUB_S | FMT(op) | FT(src2) | FS(src1) | FD(dst_r), MOVABLE_INS));
break;
-
case SLJIT_MUL_F64:
FAIL_IF(push_inst(compiler, MUL_S | FMT(op) | FT(src2) | FS(src1) | FD(dst_r), MOVABLE_INS));
break;
-
case SLJIT_DIV_F64:
FAIL_IF(push_inst(compiler, DIV_S | FMT(op) | FT(src2) | FS(src1) | FD(dst_r), MOVABLE_INS));
break;
+ case SLJIT_COPYSIGN_F64:
+ return emit_copysign(compiler, op, src1, src2, dst_r);
}
if (dst_r == TMP_FREG2)
@@ -2806,26 +3203,24 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
return SLJIT_SUCCESS;
}
-#undef FLOAT_DATA
-#undef FMT
-
-/* --------------------------------------------------------------------- */
-/* Other instructions */
-/* --------------------------------------------------------------------- */
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset32(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f32 value)
{
+ union {
+ sljit_s32 imm;
+ sljit_f32 value;
+ } u;
+
CHECK_ERROR();
- CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
- ADJUST_LOCAL_OFFSET(dst, dstw);
+ CHECK(check_sljit_emit_fset32(compiler, freg, value));
- if (FAST_IS_REG(dst))
- return push_inst(compiler, ADDU_W | SA(RETURN_ADDR_REG) | TA(0) | D(dst), UNMOVABLE_INS);
+ u.value = value;
- /* Memory. */
- FAIL_IF(emit_op_mem(compiler, WORD_DATA, RETURN_ADDR_REG, dst, dstw));
- compiler->delay_slot = UNMOVABLE_INS;
- return SLJIT_SUCCESS;
+ if (u.imm == 0)
+ return push_inst(compiler, MTC1 | TA(0) | FS(freg), MOVABLE_INS);
+
+ FAIL_IF(load_immediate(compiler, DR(TMP_REG1), u.imm));
+ return push_inst(compiler, MTC1 | T(TMP_REG1) | FS(freg), MOVABLE_INS);
}
/* --------------------------------------------------------------------- */
@@ -2965,10 +3360,10 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
PTR_FAIL_IF(push_inst(compiler, inst, UNMOVABLE_INS));
if (type <= SLJIT_JUMP)
- PTR_FAIL_IF(push_inst(compiler, JR | S(TMP_REG2), UNMOVABLE_INS));
+ PTR_FAIL_IF(push_inst(compiler, JR | S(PIC_ADDR_REG), UNMOVABLE_INS));
else {
jump->flags |= IS_JAL;
- PTR_FAIL_IF(push_inst(compiler, JALR | S(TMP_REG2) | DA(RETURN_ADDR_REG), UNMOVABLE_INS));
+ PTR_FAIL_IF(push_inst(compiler, JALR | S(PIC_ADDR_REG) | DA(RETURN_ADDR_REG), UNMOVABLE_INS));
}
jump->addr = compiler->size;
@@ -2984,7 +3379,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
}
#define RESOLVE_IMM1() \
- if (src1 & SLJIT_IMM) { \
+ if (src1 == SLJIT_IMM) { \
if (src1w) { \
PTR_FAIL_IF(load_immediate(compiler, DR(TMP_REG1), src1w)); \
src1 = TMP_REG1; \
@@ -2994,10 +3389,10 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
}
#define RESOLVE_IMM2() \
- if (src2 & SLJIT_IMM) { \
+ if (src2 == SLJIT_IMM) { \
if (src2w) { \
- PTR_FAIL_IF(load_immediate(compiler, DR(TMP_REG2), src2w)); \
- src2 = TMP_REG2; \
+ PTR_FAIL_IF(load_immediate(compiler, DR(src2_tmp_reg), src2w)); \
+ src2 = src2_tmp_reg; \
} \
else \
src2 = 0; \
@@ -3010,6 +3405,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
struct sljit_jump *jump;
sljit_s32 flags;
sljit_ins inst;
+ sljit_s32 src2_tmp_reg = FAST_IS_REG(src1) ? TMP_REG1 : TMP_REG2;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_cmp(compiler, type, src1, src1w, src2, src2w));
@@ -3030,8 +3426,8 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
}
if (src2 & SLJIT_MEM) {
- PTR_FAIL_IF(emit_op_mem2(compiler, flags, DR(TMP_REG2), src2, src2w, 0, 0));
- src2 = TMP_REG2;
+ PTR_FAIL_IF(emit_op_mem2(compiler, flags, DR(src2_tmp_reg), src2, src2w, 0, 0));
+ src2 = src2_tmp_reg;
}
jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
@@ -3046,10 +3442,9 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
if (compiler->delay_slot == MOVABLE_INS || (compiler->delay_slot != UNMOVABLE_INS && compiler->delay_slot != DR(src1) && compiler->delay_slot != DR(src2)))
jump->flags |= IS_MOVABLE;
PTR_FAIL_IF(push_inst(compiler, (type == SLJIT_EQUAL ? BNE : BEQ) | S(src1) | T(src2) | BRANCH_LENGTH, UNMOVABLE_INS));
- }
- else if (type >= SLJIT_SIG_LESS && (((src1 & SLJIT_IMM) && (src1w == 0)) || ((src2 & SLJIT_IMM) && (src2w == 0)))) {
+ } else if (type >= SLJIT_SIG_LESS && ((src1 == SLJIT_IMM && src1w == 0) || (src2 == SLJIT_IMM && src2w == 0))) {
inst = NOP;
- if ((src1 & SLJIT_IMM) && (src1w == 0)) {
+ if (src1 == SLJIT_IMM && src1w == 0) {
RESOLVE_IMM2();
switch (type) {
case SLJIT_SIG_LESS:
@@ -3097,7 +3492,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
else {
if (type == SLJIT_LESS || type == SLJIT_GREATER_EQUAL || type == SLJIT_SIG_LESS || type == SLJIT_SIG_GREATER_EQUAL) {
RESOLVE_IMM1();
- if ((src2 & SLJIT_IMM) && src2w <= SIMM_MAX && src2w >= SIMM_MIN)
+ if (src2 == SLJIT_IMM && src2w <= SIMM_MAX && src2w >= SIMM_MIN)
PTR_FAIL_IF(push_inst(compiler, (type <= SLJIT_LESS_EQUAL ? SLTIU : SLTI) | S(src1) | T(TMP_REG1) | IMM(src2w), DR(TMP_REG1)));
else {
RESOLVE_IMM2();
@@ -3107,7 +3502,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
}
else {
RESOLVE_IMM2();
- if ((src1 & SLJIT_IMM) && src1w <= SIMM_MAX && src1w >= SIMM_MIN)
+ if (src1 == SLJIT_IMM && src1w <= SIMM_MAX && src1w >= SIMM_MIN)
PTR_FAIL_IF(push_inst(compiler, (type <= SLJIT_LESS_EQUAL ? SLTIU : SLTI) | S(src2) | T(TMP_REG1) | IMM(src1w), DR(TMP_REG1)));
else {
RESOLVE_IMM1();
@@ -3120,7 +3515,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
PTR_FAIL_IF(push_inst(compiler, (type == SLJIT_EQUAL ? BNE : BEQ) | S(TMP_REG1) | TA(0) | BRANCH_LENGTH, UNMOVABLE_INS));
}
- PTR_FAIL_IF(push_inst(compiler, JR | S(TMP_REG2), UNMOVABLE_INS));
+ PTR_FAIL_IF(push_inst(compiler, JR | S(PIC_ADDR_REG), UNMOVABLE_INS));
jump->addr = compiler->size;
PTR_FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
@@ -3142,9 +3537,6 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
#undef BR_T
#undef BR_F
-#undef FLOAT_DATA
-#undef FMT
-
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
{
struct sljit_jump *jump = NULL;
@@ -3152,7 +3544,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
CHECK_ERROR();
CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
- if (src & SLJIT_IMM) {
+ if (src == SLJIT_IMM) {
jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
FAIL_IF(!jump);
set_jump(jump, compiler, JUMP_ADDR | ((type >= SLJIT_FAST_CALL) ? IS_JAL : 0));
@@ -3161,11 +3553,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
if (compiler->delay_slot != UNMOVABLE_INS)
jump->flags |= IS_MOVABLE;
- src = TMP_REG2;
+ src = PIC_ADDR_REG;
} else if (src & SLJIT_MEM) {
ADJUST_LOCAL_OFFSET(src, srcw);
- FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, DR(TMP_REG2), src, srcw));
- src = TMP_REG2;
+ FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, DR(PIC_ADDR_REG), src, srcw));
+ src = PIC_ADDR_REG;
}
if (type <= SLJIT_JUMP)
@@ -3184,8 +3576,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
#endif
}
- FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
- return SLJIT_SUCCESS;
+ return push_inst(compiler, NOP, UNMOVABLE_INS);
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
@@ -3287,50 +3678,29 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
return emit_op(compiler, saved_op, mem_type, dst, dstw, dst, dstw, TMP_REG2, 0);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
- sljit_s32 dst_reg,
- sljit_s32 src, sljit_sw srcw)
-{
-#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6)
- sljit_ins ins;
-#endif /* SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6 */
-
- CHECK_ERROR();
- CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
-
#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6)
- if (SLJIT_UNLIKELY(src & SLJIT_IMM)) {
-#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
- if (type & SLJIT_32)
- srcw = (sljit_s32)srcw;
-#endif
- FAIL_IF(load_immediate(compiler, DR(TMP_REG1), srcw));
- src = TMP_REG1;
- srcw = 0;
- }
-
+static sljit_ins get_select_cc(sljit_s32 type, sljit_s32 is_float)
+{
switch (type & ~SLJIT_32) {
case SLJIT_EQUAL:
- ins = MOVZ | TA(EQUAL_FLAG);
- break;
+ return (is_float ? MOVZ_S : MOVZ) | TA(EQUAL_FLAG);
case SLJIT_NOT_EQUAL:
- ins = MOVN | TA(EQUAL_FLAG);
- break;
+ return (is_float ? MOVN_S : MOVN) | TA(EQUAL_FLAG);
case SLJIT_LESS:
case SLJIT_GREATER:
case SLJIT_SIG_LESS:
case SLJIT_SIG_GREATER:
case SLJIT_OVERFLOW:
- ins = MOVN | TA(OTHER_FLAG);
- break;
+ case SLJIT_CARRY:
+ return (is_float ? MOVN_S : MOVN) | TA(OTHER_FLAG);
case SLJIT_GREATER_EQUAL:
case SLJIT_LESS_EQUAL:
case SLJIT_SIG_GREATER_EQUAL:
case SLJIT_SIG_LESS_EQUAL:
case SLJIT_NOT_OVERFLOW:
- ins = MOVZ | TA(OTHER_FLAG);
- break;
+ case SLJIT_NOT_CARRY:
+ return (is_float ? MOVZ_S : MOVZ) | TA(OTHER_FLAG);
case SLJIT_F_EQUAL:
case SLJIT_F_LESS:
case SLJIT_F_LESS_EQUAL:
@@ -3341,8 +3711,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compil
case SLJIT_UNORDERED_OR_LESS_EQUAL:
case SLJIT_ORDERED_LESS_EQUAL:
case SLJIT_UNORDERED:
- ins = MOVT;
- break;
+ return is_float ? MOVT_S : MOVT;
case SLJIT_F_NOT_EQUAL:
case SLJIT_F_GREATER_EQUAL:
case SLJIT_F_GREATER:
@@ -3353,21 +3722,159 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compil
case SLJIT_ORDERED_GREATER:
case SLJIT_UNORDERED_OR_GREATER:
case SLJIT_ORDERED:
- ins = MOVF;
- break;
+ return is_float ? MOVF_S : MOVF;
default:
- ins = MOVZ | TA(OTHER_FLAG);
SLJIT_UNREACHABLE();
- break;
+ return (is_float ? MOVZ_S : MOVZ) | TA(OTHER_FLAG);
}
+}
+
+#endif /* SLJIT_MIPS_REV >= 1 */
- return push_inst(compiler, ins | S(src) | D(dst_reg), DR(dst_reg));
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_reg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_reg)
+{
+#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
+ sljit_s32 inp_flags = ((type & SLJIT_32) ? INT_DATA : WORD_DATA) | LOAD_DATA;
+ sljit_ins mov_ins = (type & SLJIT_32) ? ADDU : DADDU;
+#else /* !SLJIT_CONFIG_MIPS_64 */
+ sljit_s32 inp_flags = WORD_DATA | LOAD_DATA;
+ sljit_ins mov_ins = ADDU;
+#endif /* SLJIT_CONFIG_MIPS_64 */
+
+#if !(defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6)
+ struct sljit_label *label;
+ struct sljit_jump *jump;
+#endif /* !(SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6) */
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_select(compiler, type, dst_reg, src1, src1w, src2_reg));
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6)
+ if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, inp_flags, DR(TMP_REG1), src1, src1w));
+ src1 = TMP_REG1;
+ } else if (src1 == SLJIT_IMM) {
+#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
+ if (type & SLJIT_32)
+ src1w = (sljit_s32)src1w;
+#endif
+ FAIL_IF(load_immediate(compiler, DR(TMP_REG1), src1w));
+ src1 = TMP_REG1;
+ }
+
+ if (dst_reg != src2_reg) {
+ if (dst_reg == src1) {
+ src1 = src2_reg;
+ type ^= 0x1;
+ } else
+ FAIL_IF(push_inst(compiler, mov_ins | S(src2_reg) | TA(0) | D(dst_reg), DR(dst_reg)));
+ }
+
+ return push_inst(compiler, get_select_cc(type, 0) | S(src1) | D(dst_reg), DR(dst_reg));
#else /* SLJIT_MIPS_REV < 1 || SLJIT_MIPS_REV >= 6 */
- return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);
+ if (dst_reg != src2_reg) {
+ if (dst_reg == src1) {
+ src1 = src2_reg;
+ src1w = 0;
+ type ^= 0x1;
+ } else {
+ if (ADDRESSING_DEPENDS_ON(src1, dst_reg)) {
+ FAIL_IF(push_inst(compiler, ADDU_W | S(dst_reg) | TA(0) | D(TMP_REG1), DR(TMP_REG1)));
+
+ if ((src1 & REG_MASK) == dst_reg)
+ src1 = (src1 & ~REG_MASK) | TMP_REG1;
+
+ if (OFFS_REG(src1) == dst_reg)
+ src1 = (src1 & ~OFFS_REG_MASK) | TO_OFFS_REG(TMP_REG1);
+ }
+
+ FAIL_IF(push_inst(compiler, mov_ins | S(src2_reg) | TA(0) | D(dst_reg), DR(dst_reg)));
+ }
+ }
+
+ SLJIT_SKIP_CHECKS(compiler);
+ jump = sljit_emit_jump(compiler, (type & ~SLJIT_32) ^ 0x1);
+ FAIL_IF(!jump);
+
+ if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, inp_flags, DR(dst_reg), src1, src1w));
+ } else if (src1 == SLJIT_IMM) {
+#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
+ if (type & SLJIT_32)
+ src1w = (sljit_s32)src1w;
+#endif /* SLJIT_CONFIG_MIPS_64 */
+ FAIL_IF(load_immediate(compiler, DR(dst_reg), src1w));
+ } else
+ FAIL_IF(push_inst(compiler, mov_ins | S(src1) | TA(0) | D(dst_reg), DR(dst_reg)));
+
+ SLJIT_SKIP_CHECKS(compiler);
+ label = sljit_emit_label(compiler);
+ FAIL_IF(!label);
+
+ sljit_set_label(jump, label);
+ return SLJIT_SUCCESS;
#endif /* SLJIT_MIPS_REV >= 1 */
}
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fselect(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_freg)
+{
+#if !(defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6)
+ struct sljit_label *label;
+ struct sljit_jump *jump;
+#endif /* !(SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6) */
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fselect(compiler, type, dst_freg, src1, src1w, src2_freg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+ if (dst_freg != src2_freg) {
+ if (dst_freg == src1) {
+ src1 = src2_freg;
+ src1w = 0;
+ type ^= 0x1;
+ } else
+ FAIL_IF(push_inst(compiler, MOV_fmt(FMT(type)) | FS(src2_freg) | FD(dst_freg), MOVABLE_INS));
+ }
+
+#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6)
+ if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, FLOAT_DATA(type) | LOAD_DATA, FR(TMP_FREG2), src1, src1w));
+ src1 = TMP_FREG2;
+ }
+
+ return push_inst(compiler, get_select_cc(type, 1) | FMT(type) | FS(src1) | FD(dst_freg), MOVABLE_INS);
+
+#else /* SLJIT_MIPS_REV < 1 || SLJIT_MIPS_REV >= 6 */
+ SLJIT_SKIP_CHECKS(compiler);
+ jump = sljit_emit_jump(compiler, (type & ~SLJIT_32) ^ 0x1);
+ FAIL_IF(!jump);
+
+ if (src1 & SLJIT_MEM)
+ FAIL_IF(emit_op_mem(compiler, FLOAT_DATA(type) | LOAD_DATA, FR(dst_freg), src1, src1w));
+ else
+ FAIL_IF(push_inst(compiler, MOV_fmt(FMT(type)) | FS(src1) | FD(dst_freg), MOVABLE_INS));
+
+ SLJIT_SKIP_CHECKS(compiler);
+ label = sljit_emit_label(compiler);
+ FAIL_IF(!label);
+
+ sljit_set_label(jump, label);
+ return SLJIT_SUCCESS;
+#endif /* SLJIT_MIPS_REV >= 1 */
+}
+
+#undef FLOAT_DATA
+#undef FMT
+
static sljit_s32 update_mem_addr(struct sljit_compiler *compiler, sljit_s32 *mem, sljit_sw *memw, sljit_s16 max_offset)
{
sljit_s32 arg = *mem;
@@ -3410,21 +3917,33 @@ static sljit_s32 update_mem_addr(struct sljit_compiler *compiler, sljit_s32 *mem
}
#if (defined SLJIT_LITTLE_ENDIAN && SLJIT_LITTLE_ENDIAN)
-#define MEM16_IMM_FIRST(memw) IMM((memw) + 1)
-#define MEM16_IMM_SECOND(memw) IMM(memw)
-#define MEMF64_FS_FIRST(freg) FS(freg)
-#define MEMF64_FS_SECOND(freg) (FS(freg) | ((sljit_ins)1 << 11))
+#define IMM_LEFT(memw) IMM((memw) + SSIZE_OF(sw) - 1)
+#define IMM_RIGHT(memw) IMM(memw)
+#define IMM_32_LEFT(memw) IMM((memw) + SSIZE_OF(s32) - 1)
+#define IMM_32_RIGHT(memw) IMM(memw)
+#define IMM_F64_FIRST_LEFT(memw) IMM((memw) + SSIZE_OF(s32) - 1)
+#define IMM_F64_FIRST_RIGHT(memw) IMM(memw)
+#define IMM_F64_SECOND_LEFT(memw) IMM((memw) + SSIZE_OF(f64) - 1)
+#define IMM_F64_SECOND_RIGHT(memw) IMM((memw) + SSIZE_OF(s32))
+#define IMM_16_FIRST(memw) IMM((memw) + 1)
+#define IMM_16_SECOND(memw) IMM(memw)
#else /* !SLJIT_LITTLE_ENDIAN */
-#define MEM16_IMM_FIRST(memw) IMM(memw)
-#define MEM16_IMM_SECOND(memw) IMM((memw) + 1)
-#define MEMF64_FS_FIRST(freg) (FS(freg) | ((sljit_ins)1 << 11))
-#define MEMF64_FS_SECOND(freg) FS(freg)
+#define IMM_LEFT(memw) IMM(memw)
+#define IMM_RIGHT(memw) IMM((memw) + SSIZE_OF(sw) - 1)
+#define IMM_32_LEFT(memw) IMM(memw)
+#define IMM_32_RIGHT(memw) IMM((memw) + SSIZE_OF(s32) - 1)
+#define IMM_F64_FIRST_LEFT(memw) IMM((memw) + SSIZE_OF(s32))
+#define IMM_F64_FIRST_RIGHT(memw) IMM((memw) + SSIZE_OF(f64) - 1)
+#define IMM_F64_SECOND_LEFT(memw) IMM(memw)
+#define IMM_F64_SECOND_RIGHT(memw) IMM((memw) + SSIZE_OF(s32) - 1)
+#define IMM_16_FIRST(memw) IMM(memw)
+#define IMM_16_SECOND(memw) IMM((memw) + 1)
#endif /* SLJIT_LITTLE_ENDIAN */
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
-#define MEM_CHECK_UNALIGNED(type) ((type) & (SLJIT_MEM_UNALIGNED | SLJIT_MEM_UNALIGNED_16))
+#define MEM_CHECK_UNALIGNED(type) ((type) & (SLJIT_MEM_UNALIGNED | SLJIT_MEM_ALIGNED_16))
#else /* !SLJIT_CONFIG_MIPS_32 */
-#define MEM_CHECK_UNALIGNED(type) ((type) & (SLJIT_MEM_UNALIGNED | SLJIT_MEM_UNALIGNED_16 | SLJIT_MEM_UNALIGNED_32))
+#define MEM_CHECK_UNALIGNED(type) ((type) & (SLJIT_MEM_UNALIGNED | SLJIT_MEM_ALIGNED_16 | SLJIT_MEM_ALIGNED_32))
#endif /* SLJIT_CONFIG_MIPS_32 */
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
@@ -3461,10 +3980,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compile
ins_right = ((type & SLJIT_MEM_STORE) ? SDR : LDR) | S(mem);
#endif /* SLJIT_CONFIG_MIPS_32 */
- FAIL_IF(push_inst(compiler, ins | T(REG_PAIR_FIRST(reg)) | IMM(memw), DR(REG_PAIR_FIRST(reg))));
- FAIL_IF(push_inst(compiler, ins_right | T(REG_PAIR_FIRST(reg)) | IMM(memw + (SSIZE_OF(sw) - 1)), DR(REG_PAIR_FIRST(reg))));
- FAIL_IF(push_inst(compiler, ins | T(REG_PAIR_SECOND(reg)) | IMM(memw + SSIZE_OF(sw)), DR(REG_PAIR_SECOND(reg))));
- return push_inst(compiler, ins_right | T(REG_PAIR_SECOND(reg)) | IMM((memw + 2 * SSIZE_OF(sw) - 1)), DR(REG_PAIR_SECOND(reg)));
+ FAIL_IF(push_inst(compiler, ins | T(REG_PAIR_FIRST(reg)) | IMM_LEFT(memw), DR(REG_PAIR_FIRST(reg))));
+ FAIL_IF(push_inst(compiler, ins_right | T(REG_PAIR_FIRST(reg)) | IMM_RIGHT(memw), DR(REG_PAIR_FIRST(reg))));
+ FAIL_IF(push_inst(compiler, ins | T(REG_PAIR_SECOND(reg)) | IMM_LEFT(memw + SSIZE_OF(sw)), DR(REG_PAIR_SECOND(reg))));
+ return push_inst(compiler, ins_right | T(REG_PAIR_SECOND(reg)) | IMM_RIGHT(memw + SSIZE_OF(sw)), DR(REG_PAIR_SECOND(reg)));
}
#endif /* !(SLJIT_MIPS_REV >= 6) */
@@ -3505,8 +4024,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compile
if (type & SLJIT_MEM_STORE) {
FAIL_IF(push_inst(compiler, SRA_W | T(reg) | D(TMP_REG2) | SH_IMM(8), DR(TMP_REG2)));
- FAIL_IF(push_inst(compiler, data_transfer_insts[BYTE_DATA] | S(mem) | T(TMP_REG2) | MEM16_IMM_FIRST(memw), MOVABLE_INS));
- return push_inst(compiler, data_transfer_insts[BYTE_DATA] | S(mem) | T(reg) | MEM16_IMM_SECOND(memw), MOVABLE_INS);
+ FAIL_IF(push_inst(compiler, data_transfer_insts[BYTE_DATA] | S(mem) | T(TMP_REG2) | IMM_16_FIRST(memw), MOVABLE_INS));
+ return push_inst(compiler, data_transfer_insts[BYTE_DATA] | S(mem) | T(reg) | IMM_16_SECOND(memw), MOVABLE_INS);
}
flags = BYTE_DATA | LOAD_DATA;
@@ -3514,15 +4033,15 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compile
if (op == SLJIT_MOV_S16)
flags |= SIGNED_DATA;
- FAIL_IF(push_inst(compiler, data_transfer_insts[flags] | S(mem) | T(TMP_REG2) | MEM16_IMM_FIRST(memw), DR(TMP_REG2)));
- FAIL_IF(push_inst(compiler, data_transfer_insts[BYTE_DATA | LOAD_DATA] | S(mem) | T(reg) | MEM16_IMM_SECOND(memw), DR(reg)));
+ FAIL_IF(push_inst(compiler, data_transfer_insts[flags] | S(mem) | T(TMP_REG2) | IMM_16_FIRST(memw), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, data_transfer_insts[BYTE_DATA | LOAD_DATA] | S(mem) | T(reg) | IMM_16_SECOND(memw), DR(reg)));
FAIL_IF(push_inst(compiler, SLL_W | T(TMP_REG2) | D(TMP_REG2) | SH_IMM(8), DR(TMP_REG2)));
return push_inst(compiler, OR | S(reg) | T(TMP_REG2) | D(reg), DR(reg));
case SLJIT_MOV:
case SLJIT_MOV_P:
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
- if (type & SLJIT_MEM_UNALIGNED_32) {
+ if (type & SLJIT_MEM_ALIGNED_32) {
flags = WORD_DATA;
if (!(type & SLJIT_MEM_STORE))
flags |= LOAD_DATA;
@@ -3534,8 +4053,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compile
SLJIT_ASSERT(FAST_IS_REG(mem) && mem != TMP_REG2);
if (type & SLJIT_MEM_STORE) {
- FAIL_IF(push_inst(compiler, SDL | S(mem) | T(reg) | IMM(memw), MOVABLE_INS));
- return push_inst(compiler, SDR | S(mem) | T(reg) | IMM(memw + 7), MOVABLE_INS);
+ FAIL_IF(push_inst(compiler, SDL | S(mem) | T(reg) | IMM_LEFT(memw), MOVABLE_INS));
+ return push_inst(compiler, SDR | S(mem) | T(reg) | IMM_RIGHT(memw), MOVABLE_INS);
}
if (mem == reg) {
@@ -3543,8 +4062,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compile
mem = TMP_REG1;
}
- FAIL_IF(push_inst(compiler, LDL | S(mem) | T(reg) | IMM(memw), DR(reg)));
- return push_inst(compiler, LDR | S(mem) | T(reg) | IMM(memw + 7), DR(reg));
+ FAIL_IF(push_inst(compiler, LDL | S(mem) | T(reg) | IMM_LEFT(memw), DR(reg)));
+ return push_inst(compiler, LDR | S(mem) | T(reg) | IMM_RIGHT(memw), DR(reg));
#endif /* SLJIT_CONFIG_MIPS_32 */
}
@@ -3552,8 +4071,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compile
SLJIT_ASSERT(FAST_IS_REG(mem) && mem != TMP_REG2);
if (type & SLJIT_MEM_STORE) {
- FAIL_IF(push_inst(compiler, SWL | S(mem) | T(reg) | IMM(memw), MOVABLE_INS));
- return push_inst(compiler, SWR | S(mem) | T(reg) | IMM(memw + 3), MOVABLE_INS);
+ FAIL_IF(push_inst(compiler, SWL | S(mem) | T(reg) | IMM_32_LEFT(memw), MOVABLE_INS));
+ return push_inst(compiler, SWR | S(mem) | T(reg) | IMM_32_RIGHT(memw), MOVABLE_INS);
}
if (mem == reg) {
@@ -3561,18 +4080,18 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compile
mem = TMP_REG1;
}
- FAIL_IF(push_inst(compiler, LWL | S(mem) | T(reg) | IMM(memw), DR(reg)));
+ FAIL_IF(push_inst(compiler, LWL | S(mem) | T(reg) | IMM_32_LEFT(memw), DR(reg)));
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
- return push_inst(compiler, LWR | S(mem) | T(reg) | IMM(memw + 3), DR(reg));
+ return push_inst(compiler, LWR | S(mem) | T(reg) | IMM_32_RIGHT(memw), DR(reg));
#else /* !SLJIT_CONFIG_MIPS_32 */
- FAIL_IF(push_inst(compiler, LWR | S(mem) | T(reg) | IMM(memw + 3), DR(reg)));
+ FAIL_IF(push_inst(compiler, LWR | S(mem) | T(reg) | IMM_32_RIGHT(memw), DR(reg)));
if (op != SLJIT_MOV_U32)
return SLJIT_SUCCESS;
#if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 2)
- return push_inst(compiler, DINSU | T(reg) | SA(0) | (31 << 11) | (0 << 11), DR(reg));
-#else /* SLJIT_MIPS_REV < 1 */
+ return push_inst(compiler, DINSU | T(reg) | SA(0) | (31 << 11), DR(reg));
+#else /* SLJIT_MIPS_REV < 2 */
FAIL_IF(push_inst(compiler, DSLL32 | T(reg) | D(reg) | SH_IMM(0), DR(reg)));
return push_inst(compiler, DSRL32 | T(reg) | D(reg) | SH_IMM(0), DR(reg));
#endif /* SLJIT_MIPS_REV >= 2 */
@@ -3595,77 +4114,97 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fmem(struct sljit_compiler *compil
if (type & SLJIT_MEM_STORE) {
if (type & SLJIT_32) {
FAIL_IF(push_inst(compiler, MFC1 | T(TMP_REG2) | FS(freg), DR(TMP_REG2)));
-#if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
+#if !defined(SLJIT_MIPS_REV) || (SLJIT_CONFIG_MIPS_32 && SLJIT_MIPS_REV <= 1)
FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
-#endif
- FAIL_IF(push_inst(compiler, SWL | S(mem) | T(TMP_REG2) | IMM(memw), MOVABLE_INS));
- return push_inst(compiler, SWR | S(mem) | T(TMP_REG2) | IMM(memw + 3), MOVABLE_INS);
+#endif /* MIPS III */
+ FAIL_IF(push_inst(compiler, SWL | S(mem) | T(TMP_REG2) | IMM_32_LEFT(memw), MOVABLE_INS));
+ return push_inst(compiler, SWR | S(mem) | T(TMP_REG2) | IMM_32_RIGHT(memw), MOVABLE_INS);
}
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
- FAIL_IF(push_inst(compiler, MFC1 | T(TMP_REG2) | MEMF64_FS_FIRST(freg), DR(TMP_REG2)));
-#if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
+ FAIL_IF(push_inst(compiler, MFC1 | T(TMP_REG2) | FS(freg), DR(TMP_REG2)));
+#if !defined(SLJIT_MIPS_REV) || SLJIT_MIPS_REV <= 1
FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
+#endif /* MIPS III */
+ FAIL_IF(push_inst(compiler, SWL | S(mem) | T(TMP_REG2) | IMM_F64_FIRST_LEFT(memw), MOVABLE_INS));
+ FAIL_IF(push_inst(compiler, SWR | S(mem) | T(TMP_REG2) | IMM_F64_FIRST_RIGHT(memw), MOVABLE_INS));
+ switch (cpu_feature_list & CPU_FEATURE_FR) {
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
+ case CPU_FEATURE_FR:
+ FAIL_IF(push_inst(compiler, MFHC1 | T(TMP_REG2) | FS(freg), DR(TMP_REG2)));
+ break;
+#endif /* SLJIT_MIPS_REV >= 2 */
+ default:
+ FAIL_IF(push_inst(compiler, MFC1 | T(TMP_REG2) | FS(freg) | (1 << 11), DR(TMP_REG2)));
+#if !defined(SLJIT_MIPS_REV) || SLJIT_MIPS_REV <= 1
+ FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
#endif
- FAIL_IF(push_inst(compiler, SWL | S(mem) | T(TMP_REG2) | IMM(memw), MOVABLE_INS));
- FAIL_IF(push_inst(compiler, SWR | S(mem) | T(TMP_REG2) | IMM(memw + 3), MOVABLE_INS));
+ break;
+ }
- FAIL_IF(push_inst(compiler, MFC1 | T(TMP_REG2) | MEMF64_FS_SECOND(freg), DR(TMP_REG2)));
-#if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
- FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
-#endif
- FAIL_IF(push_inst(compiler, SWL | S(mem) | T(TMP_REG2) | IMM(memw + 4), MOVABLE_INS));
- return push_inst(compiler, SWR | S(mem) | T(TMP_REG2) | IMM(memw + 7), MOVABLE_INS);
+ FAIL_IF(push_inst(compiler, SWL | S(mem) | T(TMP_REG2) | IMM_F64_SECOND_LEFT(memw), MOVABLE_INS));
+ return push_inst(compiler, SWR | S(mem) | T(TMP_REG2) | IMM_F64_SECOND_RIGHT(memw), MOVABLE_INS);
#else /* !SLJIT_CONFIG_MIPS_32 */
- FAIL_IF(push_inst(compiler, MFC1 | (1 << 21) | T(TMP_REG2) | FS(freg), DR(TMP_REG2)));
-#if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
+ FAIL_IF(push_inst(compiler, DMFC1 | T(TMP_REG2) | FS(freg), DR(TMP_REG2)));
+#if !defined(SLJIT_MIPS_REV) || SLJIT_MIPS_REV <= 1
FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
-#endif
- FAIL_IF(push_inst(compiler, SDL | S(mem) | T(TMP_REG2) | IMM(memw), MOVABLE_INS));
- return push_inst(compiler, SDR | S(mem) | T(TMP_REG2) | IMM(memw + 7), MOVABLE_INS);
+#endif /* MIPS III */
+ FAIL_IF(push_inst(compiler, SDL | S(mem) | T(TMP_REG2) | IMM_LEFT(memw), MOVABLE_INS));
+ return push_inst(compiler, SDR | S(mem) | T(TMP_REG2) | IMM_RIGHT(memw), MOVABLE_INS);
#endif /* SLJIT_CONFIG_MIPS_32 */
}
if (type & SLJIT_32) {
- FAIL_IF(push_inst(compiler, LWL | S(mem) | T(TMP_REG2) | IMM(memw), DR(TMP_REG2)));
- FAIL_IF(push_inst(compiler, LWR | S(mem) | T(TMP_REG2) | IMM(memw + 3), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, LWL | S(mem) | T(TMP_REG2) | IMM_32_LEFT(memw), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, LWR | S(mem) | T(TMP_REG2) | IMM_32_RIGHT(memw), DR(TMP_REG2)));
FAIL_IF(push_inst(compiler, MTC1 | T(TMP_REG2) | FS(freg), MOVABLE_INS));
-#if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
+#if !defined(SLJIT_MIPS_REV) || (SLJIT_CONFIG_MIPS_32 && SLJIT_MIPS_REV <= 1)
FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
-#endif
+#endif /* MIPS III */
return SLJIT_SUCCESS;
}
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
- FAIL_IF(push_inst(compiler, LWL | S(mem) | T(TMP_REG2) | IMM(memw), DR(TMP_REG2)));
- FAIL_IF(push_inst(compiler, LWR | S(mem) | T(TMP_REG2) | IMM(memw + 3), DR(TMP_REG2)));
- FAIL_IF(push_inst(compiler, MTC1 | T(TMP_REG2) | MEMF64_FS_FIRST(freg), MOVABLE_INS));
-
- FAIL_IF(push_inst(compiler, LWL | S(mem) | T(TMP_REG2) | IMM(memw + 4), DR(TMP_REG2)));
- FAIL_IF(push_inst(compiler, LWR | S(mem) | T(TMP_REG2) | IMM(memw + 7), DR(TMP_REG2)));
- FAIL_IF(push_inst(compiler, MTC1 | T(TMP_REG2) | MEMF64_FS_SECOND(freg), MOVABLE_INS));
-#if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
- FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
-#endif
+ FAIL_IF(push_inst(compiler, LWL | S(mem) | T(TMP_REG2) | IMM_F64_FIRST_LEFT(memw), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, LWR | S(mem) | T(TMP_REG2) | IMM_F64_FIRST_RIGHT(memw), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, MTC1 | T(TMP_REG2) | FS(freg), MOVABLE_INS));
+
+ FAIL_IF(push_inst(compiler, LWL | S(mem) | T(TMP_REG2) | IMM_F64_SECOND_LEFT(memw), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, LWR | S(mem) | T(TMP_REG2) | IMM_F64_SECOND_RIGHT(memw), DR(TMP_REG2)));
+ switch (cpu_feature_list & CPU_FEATURE_FR) {
+#if defined(SLJIT_MIPS_REV) && SLJIT_MIPS_REV >= 2
+ case CPU_FEATURE_FR:
+ return push_inst(compiler, MTHC1 | T(TMP_REG2) | FS(freg), MOVABLE_INS);
+#endif /* SLJIT_MIPS_REV >= 2 */
+ default:
+ FAIL_IF(push_inst(compiler, MTC1 | T(TMP_REG2) | FS(freg) | (1 << 11), MOVABLE_INS));
+ break;
+ }
#else /* !SLJIT_CONFIG_MIPS_32 */
- FAIL_IF(push_inst(compiler, LDL | S(mem) | T(TMP_REG2) | IMM(memw), DR(TMP_REG2)));
- FAIL_IF(push_inst(compiler, LDR | S(mem) | T(TMP_REG2) | IMM(memw + 7), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, LDL | S(mem) | T(TMP_REG2) | IMM_LEFT(memw), DR(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, LDR | S(mem) | T(TMP_REG2) | IMM_RIGHT(memw), DR(TMP_REG2)));
- FAIL_IF(push_inst(compiler, MTC1 | (1 << 21) | T(TMP_REG2) | FS(freg), MOVABLE_INS));
-#if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
- FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
-#endif
+ FAIL_IF(push_inst(compiler, DMTC1 | T(TMP_REG2) | FS(freg), MOVABLE_INS));
#endif /* SLJIT_CONFIG_MIPS_32 */
+#if !defined(SLJIT_MIPS_REV) || SLJIT_MIPS_REV <= 1
+ FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
+#endif /* MIPS III */
return SLJIT_SUCCESS;
}
#endif /* !SLJIT_MIPS_REV || SLJIT_MIPS_REV < 6 */
-#undef MEM16_IMM_FIRST
-#undef MEM16_IMM_SECOND
-#undef MEMF64_FS_FIRST
-#undef MEMF64_FS_SECOND
+#undef IMM_16_SECOND
+#undef IMM_16_FIRST
+#undef IMM_F64_SECOND_RIGHT
+#undef IMM_F64_SECOND_LEFT
+#undef IMM_F64_FIRST_RIGHT
+#undef IMM_F64_FIRST_LEFT
+#undef IMM_32_RIGHT
+#undef IMM_32_LEFT
+#undef IMM_RIGHT
+#undef IMM_LEFT
#undef MEM_CHECK_UNALIGNED
#undef TO_ARGW_HI
@@ -3692,18 +4231,18 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
return const_;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_mov_addr(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
- struct sljit_put_label *put_label;
+ struct sljit_jump *jump;
sljit_s32 dst_r;
CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
+ CHECK_PTR(check_sljit_emit_mov_addr(compiler, dst, dstw));
ADJUST_LOCAL_OFFSET(dst, dstw);
- put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
- PTR_FAIL_IF(!put_label);
- set_put_label(put_label, compiler, 0);
+ jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
+ PTR_FAIL_IF(!jump);
+ set_mov_addr(jump, compiler, 0);
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
PTR_FAIL_IF(push_inst(compiler, (sljit_ins)dst_r, UNMOVABLE_INS));
@@ -3716,5 +4255,5 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct slj
if (dst & SLJIT_MEM)
PTR_FAIL_IF(emit_op_mem(compiler, WORD_DATA, DR(TMP_REG2), dst, dstw));
- return put_label;
+ return jump;
}
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativePPC_32.c b/src/3rdparty/pcre2/src/sljit/sljitNativePPC_32.c
index 9449e4b9d7..2352fad5d4 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativePPC_32.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativePPC_32.c
@@ -85,10 +85,6 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
}
return SLJIT_SUCCESS;
- case SLJIT_NOT:
- SLJIT_ASSERT(src1 == TMP_REG1);
- return push_inst(compiler, NOR | RC(flags) | S(src2) | A(dst) | B(src2));
-
case SLJIT_CLZ:
SLJIT_ASSERT(src1 == TMP_REG1);
return push_inst(compiler, CNTLZW | S(src2) | A(dst));
@@ -246,6 +242,10 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
FAIL_IF(push_inst(compiler, XORI | S(src1) | A(dst) | IMM(imm)));
return push_inst(compiler, XORIS | S(dst) | A(dst) | IMM(imm >> 16));
}
+ if (flags & ALT_FORM4) {
+ SLJIT_ASSERT(src1 == TMP_REG1);
+ return push_inst(compiler, NOR | RC(flags) | S(src2) | A(dst) | B(src2));
+ }
return push_inst(compiler, XOR | RC(flags) | S(src1) | A(dst) | B(src2));
case SLJIT_SHL:
@@ -325,6 +325,151 @@ static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_
return push_inst(compiler, ORI | S(reg) | A(reg) | IMM(init_value));
}
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
+ sljit_s32 invert_sign = 1;
+
+ if (src == SLJIT_IMM) {
+ FAIL_IF(load_immediate(compiler, TMP_REG1, srcw ^ (sljit_sw)0x80000000));
+ src = TMP_REG1;
+ invert_sign = 0;
+ } else if (!FAST_IS_REG(src)) {
+ FAIL_IF(emit_op_mem(compiler, WORD_DATA | SIGNED_DATA | LOAD_DATA, TMP_REG1, src, srcw, TMP_REG1));
+ src = TMP_REG1;
+ }
+
+ /* First, a special double precision floating point value is constructed:
+ (2^53 + (src xor (2^31)))
+ The upper 32 bits of this number is a constant, and the lower 32 bits
+ is simply the value of the source argument. The xor 2^31 operation adds
+ 0x80000000 to the source argument, which moves it into the 0 - 0xffffffff
+ range. Finally we substract 2^53 + 2^31 to get the converted value. */
+ FAIL_IF(push_inst(compiler, ADDIS | D(TMP_REG2) | A(0) | 0x4330));
+ if (invert_sign)
+ FAIL_IF(push_inst(compiler, XORIS | S(src) | A(TMP_REG1) | 0x8000));
+ FAIL_IF(push_inst(compiler, STW | S(TMP_REG2) | A(SLJIT_SP) | TMP_MEM_OFFSET_HI));
+ FAIL_IF(push_inst(compiler, STW | S(TMP_REG1) | A(SLJIT_SP) | TMP_MEM_OFFSET_LO));
+ FAIL_IF(push_inst(compiler, ADDIS | D(TMP_REG1) | A(0) | 0x8000));
+ FAIL_IF(push_inst(compiler, LFD | FS(TMP_FREG1) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ FAIL_IF(push_inst(compiler, STW | S(TMP_REG1) | A(SLJIT_SP) | TMP_MEM_OFFSET_LO));
+ FAIL_IF(push_inst(compiler, LFD | FS(TMP_FREG2) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+
+ FAIL_IF(push_inst(compiler, FSUB | FD(dst_r) | FA(TMP_FREG1) | FB(TMP_FREG2)));
+
+ if (op & SLJIT_32)
+ FAIL_IF(push_inst(compiler, FRSP | FD(dst_r) | FB(dst_r)));
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, TMP_REG1);
+ return SLJIT_SUCCESS;
+}
+
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_uw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
+
+ if (src == SLJIT_IMM) {
+ FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
+ src = TMP_REG1;
+ } else if (!FAST_IS_REG(src)) {
+ FAIL_IF(emit_op_mem(compiler, WORD_DATA | SIGNED_DATA | LOAD_DATA, TMP_REG1, src, srcw, TMP_REG1));
+ src = TMP_REG1;
+ }
+
+ /* First, a special double precision floating point value is constructed:
+ (2^53 + src)
+ The upper 32 bits of this number is a constant, and the lower 32 bits
+ is simply the value of the source argument. Finally we substract 2^53
+ to get the converted value. */
+ FAIL_IF(push_inst(compiler, ADDIS | D(TMP_REG2) | A(0) | 0x4330));
+ FAIL_IF(push_inst(compiler, STW | S(src) | A(SLJIT_SP) | TMP_MEM_OFFSET_LO));
+ FAIL_IF(push_inst(compiler, STW | S(TMP_REG2) | A(SLJIT_SP) | TMP_MEM_OFFSET_HI));
+
+ FAIL_IF(push_inst(compiler, LFD | FS(TMP_FREG1) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ FAIL_IF(push_inst(compiler, STW | S(TMP_ZERO) | A(SLJIT_SP) | TMP_MEM_OFFSET_LO));
+ FAIL_IF(push_inst(compiler, LFD | FS(TMP_FREG2) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+
+ FAIL_IF(push_inst(compiler, FSUB | FD(dst_r) | FA(TMP_FREG1) | FB(TMP_FREG2)));
+
+ if (op & SLJIT_32)
+ FAIL_IF(push_inst(compiler, FRSP | FD(dst_r) | FB(dst_r)));
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, TMP_REG1);
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f64 value)
+{
+ union {
+ sljit_s32 imm[2];
+ sljit_f64 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset64(compiler, freg, value));
+
+ u.value = value;
+
+ if (u.imm[0] != 0)
+ FAIL_IF(load_immediate(compiler, TMP_REG1, u.imm[0]));
+ if (u.imm[1] != 0)
+ FAIL_IF(load_immediate(compiler, TMP_REG2, u.imm[1]));
+
+ /* Saved in the same endianness. */
+ FAIL_IF(push_inst(compiler, STW | S(u.imm[0] != 0 ? TMP_REG1 : TMP_ZERO) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ FAIL_IF(push_inst(compiler, STW | S(u.imm[1] != 0 ? TMP_REG2 : TMP_ZERO) | A(SLJIT_SP) | (TMP_MEM_OFFSET + sizeof(sljit_s32))));
+ return push_inst(compiler, LFD | FS(freg) | A(SLJIT_SP) | TMP_MEM_OFFSET);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 freg, sljit_s32 reg)
+{
+ sljit_s32 reg2 = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
+
+ if (op & SLJIT_32) {
+ if (op == SLJIT_COPY32_TO_F32) {
+ FAIL_IF(push_inst(compiler, STW | S(reg) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ return push_inst(compiler, LFS | FS(freg) | A(SLJIT_SP) | TMP_MEM_OFFSET);
+ }
+
+ FAIL_IF(push_inst(compiler, STFS | FS(freg) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ return push_inst(compiler, LWZ | S(reg) | A(SLJIT_SP) | TMP_MEM_OFFSET);
+ }
+
+ if (reg & REG_PAIR_MASK) {
+ reg2 = REG_PAIR_SECOND(reg);
+ reg = REG_PAIR_FIRST(reg);
+ }
+
+ if (op == SLJIT_COPY_TO_F64) {
+ FAIL_IF(push_inst(compiler, STW | S(reg) | A(SLJIT_SP) | TMP_MEM_OFFSET_HI));
+
+ if (reg2 != 0)
+ FAIL_IF(push_inst(compiler, STW | S(reg2) | A(SLJIT_SP) | TMP_MEM_OFFSET_LO));
+ else
+ FAIL_IF(push_inst(compiler, STFD | FS(freg) | A(SLJIT_SP) | TMP_MEM_OFFSET_LO));
+
+ return push_inst(compiler, LFD | FS(freg) | A(SLJIT_SP) | TMP_MEM_OFFSET);
+ }
+
+ FAIL_IF(push_inst(compiler, STFD | FS(freg) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+
+ if (reg2 != 0)
+ FAIL_IF(push_inst(compiler, LWZ | S(reg2) | A(SLJIT_SP) | TMP_MEM_OFFSET_LO));
+
+ return push_inst(compiler, LWZ | S(reg) | A(SLJIT_SP) | TMP_MEM_OFFSET_HI);
+}
+
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
{
sljit_ins *inst = (sljit_ins *)addr;
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativePPC_64.c b/src/3rdparty/pcre2/src/sljit/sljitNativePPC_64.c
index 80549108bf..b3cf9d074d 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativePPC_64.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativePPC_64.c
@@ -49,7 +49,7 @@ static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 reg,
if (imm <= SIMM_MAX && imm >= SIMM_MIN)
return push_inst(compiler, ADDI | D(reg) | A(0) | IMM(imm));
- if (!(imm & ~0xffff))
+ if (((sljit_uw)imm >> 16) == 0)
return push_inst(compiler, ORI | S(TMP_ZERO) | A(reg) | IMM(imm));
if (imm <= 0x7fffffffl && imm >= -0x80000000l) {
@@ -57,6 +57,11 @@ static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 reg,
return (imm & 0xffff) ? push_inst(compiler, ORI | S(reg) | A(reg) | IMM(imm)) : SLJIT_SUCCESS;
}
+ if (((sljit_uw)imm >> 32) == 0) {
+ FAIL_IF(push_inst(compiler, ORIS | S(TMP_ZERO) | A(reg) | IMM(imm >> 16)));
+ return (imm & 0xffff) ? push_inst(compiler, ORI | S(reg) | A(reg) | IMM(imm)) : SLJIT_SUCCESS;
+ }
+
/* Count leading zeroes. */
tmp = (sljit_uw)((imm >= 0) ? imm : ~imm);
ASM_SLJIT_CLZ(tmp, shift);
@@ -198,11 +203,6 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
}
return SLJIT_SUCCESS;
- case SLJIT_NOT:
- SLJIT_ASSERT(src1 == TMP_REG1);
- UN_EXTS();
- return push_inst(compiler, NOR | RC(flags) | S(src2) | A(dst) | B(src2));
-
case SLJIT_CLZ:
SLJIT_ASSERT(src1 == TMP_REG1);
return push_inst(compiler, ((flags & ALT_FORM1) ? CNTLZW : CNTLZD) | S(src2) | A(dst));
@@ -399,6 +399,11 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
FAIL_IF(push_inst(compiler, XORI | S(src1) | A(dst) | IMM(imm)));
return push_inst(compiler, XORIS | S(dst) | A(dst) | IMM(imm >> 16));
}
+ if (flags & ALT_FORM4) {
+ SLJIT_ASSERT(src1 == TMP_REG1);
+ UN_EXTS();
+ return push_inst(compiler, NOR | RC(flags) | S(src2) | A(dst) | B(src2));
+ }
return push_inst(compiler, XOR | RC(flags) | S(src1) | A(dst) | B(src2));
case SLJIT_SHL:
@@ -563,6 +568,141 @@ static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_
return push_inst(compiler, ORI | S(reg) | A(reg) | IMM(init_value));
}
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
+
+ if (src == SLJIT_IMM) {
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
+ srcw = (sljit_s32)srcw;
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
+ src = TMP_REG1;
+ } else if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32) {
+ if (FAST_IS_REG(src))
+ FAIL_IF(push_inst(compiler, EXTSW | S(src) | A(TMP_REG1)));
+ else
+ FAIL_IF(emit_op_mem(compiler, INT_DATA | SIGNED_DATA | LOAD_DATA, TMP_REG1, src, srcw, TMP_REG1));
+ src = TMP_REG1;
+ }
+
+ if (FAST_IS_REG(src)) {
+ FAIL_IF(push_inst(compiler, STD | S(src) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ FAIL_IF(push_inst(compiler, LFD | FS(TMP_FREG1) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ } else
+ FAIL_IF(emit_op_mem(compiler, DOUBLE_DATA | LOAD_DATA, TMP_FREG1, src, srcw, TMP_REG1));
+
+ FAIL_IF(push_inst(compiler, FCFID | FD(dst_r) | FB(TMP_FREG1)));
+
+ if (op & SLJIT_32)
+ FAIL_IF(push_inst(compiler, FRSP | FD(dst_r) | FB(dst_r)));
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, TMP_REG1);
+ return SLJIT_SUCCESS;
+}
+
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_uw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
+
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_U32) {
+ if (src == SLJIT_IMM) {
+ FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_u32)srcw));
+ src = TMP_REG1;
+ } else {
+ if (FAST_IS_REG(src))
+ FAIL_IF(push_inst(compiler, CLRLDI(TMP_REG1, src, 32)));
+ else
+ FAIL_IF(emit_op_mem(compiler, INT_DATA | LOAD_DATA, TMP_REG1, src, srcw, TMP_REG1));
+ src = TMP_REG1;
+ }
+
+ FAIL_IF(push_inst(compiler, STD | S(src) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ FAIL_IF(push_inst(compiler, LFD | FS(TMP_FREG1) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ FAIL_IF(push_inst(compiler, FCFID | FD(dst_r) | FB(TMP_FREG1)));
+ } else {
+ if (src == SLJIT_IMM) {
+ FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
+ src = TMP_REG1;
+ } else if (src & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw, TMP_REG1));
+ src = TMP_REG1;
+ }
+
+ FAIL_IF(push_inst(compiler, CMPI | CRD(0 | 1) | A(src) | 0));
+ FAIL_IF(push_inst(compiler, BCx | (12 << 21) | (0 << 16) | 20));
+ FAIL_IF(push_inst(compiler, STD | S(src) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ FAIL_IF(push_inst(compiler, LFD | FS(TMP_FREG1) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ FAIL_IF(push_inst(compiler, FCFID | FD(dst_r) | FB(TMP_FREG1)));
+ FAIL_IF(push_inst(compiler, Bx | ((op & SLJIT_32) ? 36 : 32)));
+
+ if (op & SLJIT_32)
+ FAIL_IF(push_inst(compiler, RLWINM | S(src) | A(TMP_REG2) | RLWI_SH(10) | RLWI_MBE(10, 21)));
+ else
+ FAIL_IF(push_inst(compiler, ANDI | S(src) | A(TMP_REG2) | 0x1));
+
+ /* Shift right. */
+ FAIL_IF(push_inst(compiler, RLDICL | S(src) | A(TMP_REG1) | RLDI_SH(63) | RLDI_MB(1)));
+
+ if (op & SLJIT_32)
+ FAIL_IF(push_inst(compiler, RLDICR | S(TMP_REG1) | A(TMP_REG1) | RLDI_SH(0) | RLDI_ME(53)));
+
+ FAIL_IF(push_inst(compiler, OR | S(TMP_REG1) | A(TMP_REG1) | B(TMP_REG2)));
+
+ FAIL_IF(push_inst(compiler, STD | S(TMP_REG1) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ FAIL_IF(push_inst(compiler, LFD | FS(TMP_FREG1) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ FAIL_IF(push_inst(compiler, FCFID | FD(dst_r) | FB(TMP_FREG1)));
+ FAIL_IF(push_inst(compiler, FADD | FD(dst_r) | FA(dst_r) | FB(dst_r)));
+ }
+
+ if (op & SLJIT_32)
+ FAIL_IF(push_inst(compiler, FRSP | FD(dst_r) | FB(dst_r)));
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, TMP_REG1);
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f64 value)
+{
+ union {
+ sljit_sw imm;
+ sljit_f64 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset64(compiler, freg, value));
+
+ u.value = value;
+
+ if (u.imm != 0)
+ FAIL_IF(load_immediate(compiler, TMP_REG1, u.imm));
+
+ FAIL_IF(push_inst(compiler, STD | S(u.imm != 0 ? TMP_REG1 : TMP_ZERO) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ return push_inst(compiler, LFD | FS(freg) | A(SLJIT_SP) | TMP_MEM_OFFSET);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 freg, sljit_s32 reg)
+{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
+
+ if (GET_OPCODE(op) == SLJIT_COPY_TO_F64) {
+ FAIL_IF(push_inst(compiler, ((op & SLJIT_32) ? STW : STD) | S(reg) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ return push_inst(compiler, ((op & SLJIT_32) ? LFS : LFD) | FS(freg) | A(SLJIT_SP) | TMP_MEM_OFFSET);
+ }
+
+ FAIL_IF(push_inst(compiler, ((op & SLJIT_32) ? STFS : STFD) | FS(freg) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ return push_inst(compiler, ((op & SLJIT_32) ? LWZ : LD) | S(reg) | A(SLJIT_SP) | TMP_MEM_OFFSET);
+}
+
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
{
sljit_ins *inst = (sljit_ins*)addr;
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativePPC_common.c b/src/3rdparty/pcre2/src/sljit/sljitNativePPC_common.c
index f387114733..1f17d90423 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativePPC_common.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativePPC_common.c
@@ -98,7 +98,7 @@ static void ppc_cache_flush(sljit_ins *from, sljit_ins *to)
#if (defined SLJIT_PASS_ENTRY_ADDR_TO_CALL && SLJIT_PASS_ENTRY_ADDR_TO_CALL)
#define TMP_CALL_REG (SLJIT_NUMBER_OF_REGISTERS + 5)
#else
-#define TMP_CALL_REG TMP_REG2
+#define TMP_CALL_REG TMP_REG1
#endif
#define TMP_FREG1 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
@@ -132,7 +132,7 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
OE and Rc flag (see ALT_SET_FLAGS). */
#define OE(flags) ((flags) & ALT_SET_FLAGS)
/* Rc flag (see ALT_SET_FLAGS). */
-#define RC(flags) (((flags) & ALT_SET_FLAGS) >> 10)
+#define RC(flags) ((sljit_ins)((flags) & ALT_SET_FLAGS) >> 10)
#define HI(opcode) ((sljit_ins)(opcode) << 26)
#define LO(opcode) ((sljit_ins)(opcode) << 1)
@@ -150,6 +150,9 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
#define BCx (HI(16))
#define BCCTR (HI(19) | LO(528) | (3 << 11))
#define BLR (HI(19) | LO(16) | (0x14 << 21))
+#if defined(_ARCH_PWR10) && _ARCH_PWR10
+#define BRD (HI(31) | LO(187))
+#endif /* POWER10 */
#define CNTLZD (HI(31) | LO(58))
#define CNTLZW (HI(31) | LO(26))
#define CMP (HI(31) | LO(0))
@@ -183,6 +186,12 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
#define FSUBS (HI(59) | LO(20))
#define LD (HI(58) | 0)
#define LFD (HI(50))
+#define LFS (HI(48))
+#if defined(_ARCH_PWR7) && _ARCH_PWR7
+#define LDBRX (HI(31) | LO(532))
+#endif /* POWER7 */
+#define LHBRX (HI(31) | LO(790))
+#define LWBRX (HI(31) | LO(534))
#define LWZ (HI(32))
#define MFCR (HI(31) | LO(19))
#define MFLR (HI(31) | LO(339) | 0x80000)
@@ -219,11 +228,17 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
#define SRD (HI(31) | LO(539))
#define SRW (HI(31) | LO(536))
#define STD (HI(62) | 0)
+#if defined(_ARCH_PWR7) && _ARCH_PWR7
+#define STDBRX (HI(31) | LO(660))
+#endif /* POWER7 */
#define STDU (HI(62) | 1)
#define STDUX (HI(31) | LO(181))
#define STFD (HI(54))
#define STFIWX (HI(31) | LO(983))
+#define STFS (HI(52))
+#define STHBRX (HI(31) | LO(918))
#define STW (HI(36))
+#define STWBRX (HI(31) | LO(662))
#define STWU (HI(37))
#define STWUX (HI(31) | LO(183))
#define SUBF (HI(31) | LO(40))
@@ -253,10 +268,24 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
#define SLWI_W(shift) SLWI(shift)
+#define TMP_MEM_OFFSET (2 * sizeof(sljit_sw))
#else /* !SLJIT_CONFIG_PPC_32 */
#define SLWI_W(shift) SLDI(shift)
+#define TMP_MEM_OFFSET (6 * sizeof(sljit_sw))
#endif /* SLJIT_CONFIG_PPC_32 */
+#if (defined SLJIT_LITTLE_ENDIAN && SLJIT_LITTLE_ENDIAN)
+#define TMP_MEM_OFFSET_LO (TMP_MEM_OFFSET)
+#define TMP_MEM_OFFSET_HI (TMP_MEM_OFFSET + sizeof(sljit_s32))
+#define LWBRX_FIRST_REG S(TMP_REG1)
+#define LWBRX_SECOND_REG S(dst)
+#else /* !SLJIT_LITTLE_ENDIAN */
+#define TMP_MEM_OFFSET_LO (TMP_MEM_OFFSET + sizeof(sljit_s32))
+#define TMP_MEM_OFFSET_HI (TMP_MEM_OFFSET)
+#define LWBRX_FIRST_REG S(dst)
+#define LWBRX_SECOND_REG S(TMP_REG1)
+#endif /* SLJIT_LITTLE_ENDIAN */
+
#if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_function_context(void** func_ptr, struct sljit_function_context* context, sljit_uw addr, void* func)
{
@@ -281,24 +310,23 @@ static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins)
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
+static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
{
sljit_sw diff;
sljit_uw target_addr;
- sljit_uw extra_jump_flags;
#if (defined SLJIT_PASS_ENTRY_ADDR_TO_CALL && SLJIT_PASS_ENTRY_ADDR_TO_CALL) && (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
if (jump->flags & (SLJIT_REWRITABLE_JUMP | IS_CALL))
- return 0;
+ goto exit;
#else
if (jump->flags & SLJIT_REWRITABLE_JUMP)
- return 0;
+ goto exit;
#endif
if (jump->flags & JUMP_ADDR)
target_addr = jump->u.target;
else {
- SLJIT_ASSERT(jump->flags & JUMP_LABEL);
+ SLJIT_ASSERT(jump->u.label != NULL);
target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset;
}
@@ -307,101 +335,256 @@ static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_in
goto keep_address;
#endif
- diff = ((sljit_sw)target_addr - (sljit_sw)(code_ptr) - executable_offset) & ~0x3l;
+ diff = (sljit_sw)target_addr - (sljit_sw)code_ptr - executable_offset;
- extra_jump_flags = 0;
if (jump->flags & IS_COND) {
if (diff <= 0x7fff && diff >= -0x8000) {
jump->flags |= PATCH_B;
- return 1;
+ return code_ptr;
}
if (target_addr <= 0xffff) {
jump->flags |= PATCH_B | PATCH_ABS_B;
- return 1;
+ return code_ptr;
}
- extra_jump_flags = REMOVE_COND;
diff -= SSIZE_OF(ins);
}
if (diff <= 0x01ffffff && diff >= -0x02000000) {
- jump->flags |= PATCH_B | extra_jump_flags;
- return 1;
+ jump->flags |= PATCH_B;
+ } else if (target_addr <= 0x01ffffff) {
+ jump->flags |= PATCH_B | PATCH_ABS_B;
}
- if (target_addr <= 0x03ffffff) {
- jump->flags |= PATCH_B | PATCH_ABS_B | extra_jump_flags;
- return 1;
+ if (jump->flags & PATCH_B) {
+ if (!(jump->flags & IS_COND))
+ return code_ptr;
+
+ code_ptr[0] = BCx | (2 << 2) | ((code_ptr[0] ^ (8 << 21)) & 0x03ff0001);
+ code_ptr[1] = Bx;
+ jump->addr += sizeof(sljit_ins);
+ jump->flags -= IS_COND;
+ return code_ptr + 1;
}
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
#if (defined SLJIT_PASS_ENTRY_ADDR_TO_CALL && SLJIT_PASS_ENTRY_ADDR_TO_CALL)
keep_address:
-#endif
- if (target_addr <= 0x7fffffff) {
+#endif /* SLJIT_PASS_ENTRY_ADDR_TO_CALL */
+ if (target_addr < 0x80000000l) {
jump->flags |= PATCH_ABS32;
- return 1;
+ code_ptr[2] = MTCTR | S(TMP_CALL_REG);
+ code_ptr[3] = code_ptr[0];
+ return code_ptr + 3;
}
- if (target_addr <= 0x7fffffffffffl) {
+ if (target_addr < 0x800000000000l) {
jump->flags |= PATCH_ABS48;
- return 1;
+ code_ptr[4] = MTCTR | S(TMP_CALL_REG);
+ code_ptr[5] = code_ptr[0];
+ return code_ptr + 5;
}
-#endif
+#endif /* SLJIT_CONFIG_PPC_64 */
- return 0;
+exit:
+#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
+ code_ptr[2] = MTCTR | S(TMP_CALL_REG);
+ code_ptr[3] = code_ptr[0];
+#else /* !SLJIT_CONFIG_PPC_32 */
+ code_ptr[5] = MTCTR | S(TMP_CALL_REG);
+ code_ptr[6] = code_ptr[0];
+#endif /* SLJIT_CONFIG_PPC_32 */
+ return code_ptr + JUMP_MAX_SIZE - 1;
}
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
-static SLJIT_INLINE sljit_sw put_label_get_length(struct sljit_put_label *put_label, sljit_uw max_label)
+static SLJIT_INLINE sljit_sw mov_addr_get_length(struct sljit_jump *jump, sljit_ins *code, sljit_sw executable_offset)
{
- if (max_label < 0x100000000l) {
- put_label->flags = 0;
+ sljit_uw addr;
+ SLJIT_UNUSED_ARG(executable_offset);
+
+ SLJIT_ASSERT(jump->flags < ((sljit_uw)5 << JUMP_SIZE_SHIFT));
+ if (jump->flags & JUMP_ADDR)
+ addr = jump->u.target;
+ else
+ addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code + jump->u.label->size, executable_offset);
+
+ if (addr < 0x80000000l) {
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)1 << JUMP_SIZE_SHIFT));
+ jump->flags |= PATCH_ABS32;
return 1;
}
- if (max_label < 0x1000000000000l) {
- put_label->flags = 1;
+ if (addr < 0x800000000000l) {
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)3 << JUMP_SIZE_SHIFT));
+ jump->flags |= PATCH_ABS48;
return 3;
}
- put_label->flags = 2;
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)4 << JUMP_SIZE_SHIFT));
return 4;
}
-static SLJIT_INLINE void put_label_set(struct sljit_put_label *put_label)
-{
- sljit_uw addr = put_label->label->addr;
- sljit_ins *inst = (sljit_ins *)put_label->addr;
- sljit_u32 reg = *inst;
+#endif /* SLJIT_CONFIG_PPC_64 */
- if (put_label->flags == 0) {
- SLJIT_ASSERT(addr < 0x100000000l);
- inst[0] = ORIS | S(TMP_ZERO) | A(reg) | IMM(addr >> 16);
- }
- else {
- if (put_label->flags == 1) {
- SLJIT_ASSERT(addr < 0x1000000000000l);
- inst[0] = ORI | S(TMP_ZERO) | A(reg) | IMM(addr >> 32);
+static void generate_jump_or_mov_addr(struct sljit_jump *jump, sljit_sw executable_offset)
+{
+ sljit_uw flags = jump->flags;
+ sljit_uw addr = (flags & JUMP_ADDR) ? jump->u.target : jump->u.label->u.addr;
+ sljit_ins *ins = (sljit_ins*)jump->addr;
+ sljit_s32 reg;
+ SLJIT_UNUSED_ARG(executable_offset);
+
+ if (flags & PATCH_B) {
+ if (flags & IS_COND) {
+ if (!(flags & PATCH_ABS_B)) {
+ addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET(ins, executable_offset);
+ SLJIT_ASSERT((sljit_sw)addr <= 0x7fff && (sljit_sw)addr >= -0x8000);
+ ins[0] = BCx | ((sljit_ins)addr & 0xfffc) | (ins[0] & 0x03ff0001);
+ } else {
+ SLJIT_ASSERT(addr <= 0xffff);
+ ins[0] = BCx | ((sljit_ins)addr & 0xfffc) | 0x2 | ((*ins) & 0x03ff0001);
+ }
+ return;
}
- else {
- inst[0] = ORIS | S(TMP_ZERO) | A(reg) | IMM(addr >> 48);
- inst[1] = ORI | S(reg) | A(reg) | IMM((addr >> 32) & 0xffff);
- inst++;
+
+ if (!(flags & PATCH_ABS_B)) {
+ addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET(ins, executable_offset);
+ SLJIT_ASSERT((sljit_sw)addr <= 0x01ffffff && (sljit_sw)addr >= -0x02000000);
+ ins[0] = Bx | ((sljit_ins)addr & 0x03fffffc) | (ins[0] & 0x1);
+ } else {
+ SLJIT_ASSERT(addr <= 0x03ffffff);
+ ins[0] = Bx | ((sljit_ins)addr & 0x03fffffc) | 0x2 | (ins[0] & 0x1);
}
+ return;
+ }
+
+ reg = (flags & JUMP_MOV_ADDR) ? (sljit_s32)ins[0] : TMP_CALL_REG;
+
+#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
+ ins[0] = ADDIS | D(reg) | A(0) | IMM(addr >> 16);
+ ins[1] = ORI | S(reg) | A(reg) | IMM(addr);
+#else /* !SLJIT_CONFIG_PPC_32 */
+
+ /* The TMP_ZERO cannot be used because it is restored for tail calls. */
+ if (flags & PATCH_ABS32) {
+ SLJIT_ASSERT(addr < 0x80000000l);
+ ins[0] = ADDIS | D(reg) | A(0) | IMM(addr >> 16);
+ ins[1] = ORI | S(reg) | A(reg) | IMM(addr);
+ return;
+ }
- inst[1] = SLDI(32) | S(reg) | A(reg);
- inst[2] = ORIS | S(reg) | A(reg) | IMM((addr >> 16) & 0xffff);
- inst += 2;
+ if (flags & PATCH_ABS48) {
+ SLJIT_ASSERT(addr < 0x800000000000l);
+ ins[0] = ADDIS | D(reg) | A(0) | IMM(addr >> 32);
+ ins[1] = ORI | S(reg) | A(reg) | IMM(addr >> 16);
+ ins[2] = SLDI(16) | S(reg) | A(reg);
+ ins[3] = ORI | S(reg) | A(reg) | IMM(addr);
+ return;
}
- inst[1] = ORI | S(reg) | A(reg) | IMM(addr & 0xffff);
+ ins[0] = ADDIS | D(reg) | A(0) | IMM(addr >> 48);
+ ins[1] = ORI | S(reg) | A(reg) | IMM(addr >> 32);
+ ins[2] = SLDI(32) | S(reg) | A(reg);
+ ins[3] = ORIS | S(reg) | A(reg) | IMM(addr >> 16);
+ ins[4] = ORI | S(reg) | A(reg) | IMM(addr);
+#endif /* SLJIT_CONFIG_PPC_32 */
}
+static void reduce_code_size(struct sljit_compiler *compiler)
+{
+ struct sljit_label *label;
+ struct sljit_jump *jump;
+ struct sljit_const *const_;
+ SLJIT_NEXT_DEFINE_TYPES;
+ sljit_uw total_size;
+ sljit_uw size_reduce = 0;
+ sljit_sw diff;
+
+ label = compiler->labels;
+ jump = compiler->jumps;
+ const_ = compiler->consts;
+ SLJIT_NEXT_INIT_TYPES();
+
+ while (1) {
+ SLJIT_GET_NEXT_MIN();
+
+ if (next_min_addr == SLJIT_MAX_ADDRESS)
+ break;
+
+ if (next_min_addr == next_label_size) {
+ label->size -= size_reduce;
+
+ label = label->next;
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
+ }
+
+ if (next_min_addr == next_const_addr) {
+ const_->addr -= size_reduce;
+ const_ = const_->next;
+ next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
+ continue;
+ }
+
+ if (next_min_addr != next_jump_addr)
+ continue;
+
+ jump->addr -= size_reduce;
+ if (!(jump->flags & JUMP_MOV_ADDR)) {
+ total_size = JUMP_MAX_SIZE - 1;
+
+ if (!(jump->flags & SLJIT_REWRITABLE_JUMP)) {
+ if (jump->flags & JUMP_ADDR) {
+ if (jump->u.target <= 0x01ffffff)
+ total_size = 1 - 1;
+#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+ else if (jump->u.target < 0x80000000l)
+ total_size = 4 - 1;
+ else if (jump->u.target < 0x800000000000l)
+ total_size = 6 - 1;
+#endif /* SLJIT_CONFIG_PPC_64 */
+ } else {
+ /* Unit size: instruction. */
+ diff = (sljit_sw)jump->u.label->size - (sljit_sw)jump->addr;
+
+ if (jump->flags & IS_COND) {
+ if (diff <= (0x7fff / SSIZE_OF(ins)) && diff >= (-0x8000 / SSIZE_OF(ins)))
+ total_size = 1 - 1;
+ else if ((diff - 1) <= (0x01ffffff / SSIZE_OF(ins)) && (diff - 1) >= (-0x02000000 / SSIZE_OF(ins)))
+ total_size = 2 - 1;
+ } else if (diff <= (0x01ffffff / SSIZE_OF(ins)) && diff >= (-0x02000000 / SSIZE_OF(ins)))
+ total_size = 1 - 1;
+ }
+ }
+
+ size_reduce += (JUMP_MAX_SIZE - 1) - total_size;
+ jump->flags |= total_size << JUMP_SIZE_SHIFT;
+#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+ } else {
+ total_size = (sljit_uw)4 << JUMP_SIZE_SHIFT;
+
+ if (jump->flags & JUMP_ADDR) {
+ if (jump->u.target < 0x80000000l) {
+ total_size = (sljit_uw)1 << JUMP_SIZE_SHIFT;
+ size_reduce += 3;
+ } else if (jump->u.target < 0x800000000000l) {
+ total_size = (sljit_uw)3 << JUMP_SIZE_SHIFT;
+ size_reduce += 1;
+ }
+ }
+ jump->flags |= total_size;
#endif /* SLJIT_CONFIG_PPC_64 */
+ }
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
+ jump = jump->next;
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+ }
+
+ compiler->size -= size_reduce;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler, sljit_s32 options, void *exec_allocator_data)
{
struct sljit_memory_fragment *buf;
sljit_ins *code;
@@ -409,113 +592,84 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
sljit_ins *buf_ptr;
sljit_ins *buf_end;
sljit_uw word_count;
- sljit_uw next_addr;
+ SLJIT_NEXT_DEFINE_TYPES;
sljit_sw executable_offset;
- sljit_uw addr;
struct sljit_label *label;
struct sljit_jump *jump;
struct sljit_const *const_;
- struct sljit_put_label *put_label;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_generate_code(compiler));
- reverse_buf(compiler);
+
+ reduce_code_size(compiler);
#if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
+ /* add to compiler->size additional instruction space to hold the trampoline and padding */
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
compiler->size += (compiler->size & 0x1) + (sizeof(struct sljit_function_context) / sizeof(sljit_ins));
#else
compiler->size += (sizeof(struct sljit_function_context) / sizeof(sljit_ins));
#endif
#endif
- code = (sljit_ins*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_ins), compiler->exec_allocator_data);
+ code = (sljit_ins*)allocate_executable_memory(compiler->size * sizeof(sljit_ins), options, exec_allocator_data, &executable_offset);
PTR_FAIL_WITH_EXEC_IF(code);
+
+ reverse_buf(compiler);
buf = compiler->buf;
code_ptr = code;
word_count = 0;
- next_addr = 0;
- executable_offset = SLJIT_EXEC_OFFSET(code);
-
label = compiler->labels;
jump = compiler->jumps;
const_ = compiler->consts;
- put_label = compiler->put_labels;
+ SLJIT_NEXT_INIT_TYPES();
+ SLJIT_GET_NEXT_MIN();
do {
buf_ptr = (sljit_ins*)buf->memory;
buf_end = buf_ptr + (buf->used_size >> 2);
do {
*code_ptr = *buf_ptr++;
- if (next_addr == word_count) {
+ if (next_min_addr == word_count) {
SLJIT_ASSERT(!label || label->size >= word_count);
SLJIT_ASSERT(!jump || jump->addr >= word_count);
SLJIT_ASSERT(!const_ || const_->addr >= word_count);
- SLJIT_ASSERT(!put_label || put_label->addr >= word_count);
/* These structures are ordered by their address. */
- if (label && label->size == word_count) {
+ if (next_min_addr == next_label_size) {
/* Just recording the address. */
- label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = (sljit_uw)(code_ptr - code);
label = label->next;
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
}
- if (jump && jump->addr == word_count) {
-#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
- jump->addr = (sljit_uw)(code_ptr - 3);
-#else
- jump->addr = (sljit_uw)(code_ptr - 6);
-#endif
- if (detect_jump_type(jump, code_ptr, code, executable_offset)) {
-#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
- code_ptr[-3] = code_ptr[0];
- code_ptr -= 3;
-#else
- if (jump->flags & PATCH_ABS32) {
- code_ptr -= 3;
- code_ptr[-1] = code_ptr[2];
- code_ptr[0] = code_ptr[3];
- }
- else if (jump->flags & PATCH_ABS48) {
- code_ptr--;
- code_ptr[-1] = code_ptr[0];
- code_ptr[0] = code_ptr[1];
- /* rldicr rX,rX,32,31 -> rX,rX,16,47 */
- SLJIT_ASSERT((code_ptr[-3] & 0xfc00ffff) == 0x780007c6);
- code_ptr[-3] ^= 0x8422;
- /* oris -> ori */
- code_ptr[-2] ^= 0x4000000;
- }
- else {
- code_ptr[-6] = code_ptr[0];
- code_ptr -= 6;
- }
-#endif
- if (jump->flags & REMOVE_COND) {
- code_ptr[0] = BCx | (2 << 2) | ((code_ptr[0] ^ (8 << 21)) & 0x03ff0001);
- code_ptr++;
- jump->addr += sizeof(sljit_ins);
- code_ptr[0] = Bx;
- jump->flags -= IS_COND;
- }
+
+ if (next_min_addr == next_jump_addr) {
+ if (!(jump->flags & JUMP_MOV_ADDR)) {
+ word_count += jump->flags >> JUMP_SIZE_SHIFT;
+ jump->addr = (sljit_uw)code_ptr;
+ code_ptr = detect_jump_type(jump, code_ptr, code, executable_offset);
+ SLJIT_ASSERT(((sljit_uw)code_ptr - jump->addr <= (jump->flags >> JUMP_SIZE_SHIFT) * sizeof(sljit_ins)));
+ } else {
+ jump->addr = (sljit_uw)code_ptr;
+#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+ word_count += jump->flags >> JUMP_SIZE_SHIFT;
+ code_ptr += mov_addr_get_length(jump, code, executable_offset);
+#else /* !SLJIT_CONFIG_PPC_64 */
+ word_count++;
+ code_ptr++;
+#endif /* SLJIT_CONFIG_PPC_64 */
}
jump = jump->next;
- }
- if (const_ && const_->addr == word_count) {
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+ } else if (next_min_addr == next_const_addr) {
const_->addr = (sljit_uw)code_ptr;
const_ = const_->next;
+ next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
}
- if (put_label && put_label->addr == word_count) {
- SLJIT_ASSERT(put_label->label);
- put_label->addr = (sljit_uw)code_ptr;
-#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
- code_ptr += put_label_get_length(put_label, (sljit_uw)(SLJIT_ADD_EXEC_OFFSET(code, executable_offset) + put_label->label->size));
- word_count += 4;
-#endif
- put_label = put_label->next;
- }
- next_addr = compute_next_addr(label, jump, const_, put_label);
+
+ SLJIT_GET_NEXT_MIN();
}
code_ptr++;
word_count++;
@@ -525,7 +679,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
} while (buf);
if (label && label->size == word_count) {
- label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = (sljit_uw)(code_ptr - code);
label = label->next;
}
@@ -533,7 +687,6 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
SLJIT_ASSERT(!label);
SLJIT_ASSERT(!jump);
SLJIT_ASSERT(!const_);
- SLJIT_ASSERT(!put_label);
#if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
SLJIT_ASSERT(code_ptr - code <= (sljit_sw)(compiler->size - (sizeof(struct sljit_function_context) / sizeof(sljit_ins))));
@@ -543,87 +696,12 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
jump = compiler->jumps;
while (jump) {
- do {
- addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
- buf_ptr = (sljit_ins *)jump->addr;
-
- if (jump->flags & PATCH_B) {
- if (jump->flags & IS_COND) {
- if (!(jump->flags & PATCH_ABS_B)) {
- addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset);
- SLJIT_ASSERT((sljit_sw)addr <= 0x7fff && (sljit_sw)addr >= -0x8000);
- *buf_ptr = BCx | ((sljit_ins)addr & 0xfffc) | ((*buf_ptr) & 0x03ff0001);
- }
- else {
- SLJIT_ASSERT(addr <= 0xffff);
- *buf_ptr = BCx | ((sljit_ins)addr & 0xfffc) | 0x2 | ((*buf_ptr) & 0x03ff0001);
- }
- }
- else {
- if (!(jump->flags & PATCH_ABS_B)) {
- addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset);
- SLJIT_ASSERT((sljit_sw)addr <= 0x01ffffff && (sljit_sw)addr >= -0x02000000);
- *buf_ptr = Bx | ((sljit_ins)addr & 0x03fffffc) | ((*buf_ptr) & 0x1);
- }
- else {
- SLJIT_ASSERT(addr <= 0x03ffffff);
- *buf_ptr = Bx | ((sljit_ins)addr & 0x03fffffc) | 0x2 | ((*buf_ptr) & 0x1);
- }
- }
- break;
- }
-
- /* Set the fields of immediate loads. */
-#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
- SLJIT_ASSERT(((buf_ptr[0] | buf_ptr[1]) & 0xffff) == 0);
- buf_ptr[0] |= (sljit_ins)(addr >> 16) & 0xffff;
- buf_ptr[1] |= (sljit_ins)addr & 0xffff;
-#else
- if (jump->flags & PATCH_ABS32) {
- SLJIT_ASSERT(addr <= 0x7fffffff);
- SLJIT_ASSERT(((buf_ptr[0] | buf_ptr[1]) & 0xffff) == 0);
- buf_ptr[0] |= (sljit_ins)(addr >> 16) & 0xffff;
- buf_ptr[1] |= (sljit_ins)addr & 0xffff;
- break;
- }
-
- if (jump->flags & PATCH_ABS48) {
- SLJIT_ASSERT(addr <= 0x7fffffffffff);
- SLJIT_ASSERT(((buf_ptr[0] | buf_ptr[1] | buf_ptr[3]) & 0xffff) == 0);
- buf_ptr[0] |= (sljit_ins)(addr >> 32) & 0xffff;
- buf_ptr[1] |= (sljit_ins)(addr >> 16) & 0xffff;
- buf_ptr[3] |= (sljit_ins)addr & 0xffff;
- break;
- }
-
- SLJIT_ASSERT(((buf_ptr[0] | buf_ptr[1] | buf_ptr[3] | buf_ptr[4]) & 0xffff) == 0);
- buf_ptr[0] |= (sljit_ins)(addr >> 48) & 0xffff;
- buf_ptr[1] |= (sljit_ins)(addr >> 32) & 0xffff;
- buf_ptr[3] |= (sljit_ins)(addr >> 16) & 0xffff;
- buf_ptr[4] |= (sljit_ins)addr & 0xffff;
-#endif
- } while (0);
+ generate_jump_or_mov_addr(jump, executable_offset);
jump = jump->next;
}
- put_label = compiler->put_labels;
- while (put_label) {
-#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
- addr = put_label->label->addr;
- buf_ptr = (sljit_ins *)put_label->addr;
-
- SLJIT_ASSERT((buf_ptr[0] & 0xfc1f0000) == ADDIS && (buf_ptr[1] & 0xfc000000) == ORI);
- buf_ptr[0] |= (addr >> 16) & 0xffff;
- buf_ptr[1] |= addr & 0xffff;
-#else
- put_label_set(put_label);
-#endif
- put_label = put_label->next;
- }
-
compiler->error = SLJIT_ERR_COMPILED;
compiler->executable_offset = executable_offset;
- compiler->executable_size = (sljit_uw)(code_ptr - code) * sizeof(sljit_ins);
code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
@@ -641,8 +719,10 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
SLJIT_UPDATE_WX_FLAGS(code, code_ptr, 1);
#if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
+ compiler->executable_size = (sljit_uw)(code_ptr - code) * sizeof(sljit_ins) + sizeof(struct sljit_function_context);
return code_ptr;
#else
+ compiler->executable_size = (sljit_uw)(code_ptr - code) * sizeof(sljit_ins);
return code;
#endif
}
@@ -652,12 +732,17 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
switch (feature_type) {
case SLJIT_HAS_FPU:
#ifdef SLJIT_IS_FPU_AVAILABLE
- return SLJIT_IS_FPU_AVAILABLE;
+ return (SLJIT_IS_FPU_AVAILABLE) != 0;
#else
/* Available by default. */
return 1;
#endif
-
+ case SLJIT_HAS_REV:
+#if defined(_ARCH_PWR10) && _ARCH_PWR10
+ return 1;
+#else /* !POWER10 */
+ return 2;
+#endif /* POWER10 */
/* A saved register is set to a zero value. */
case SLJIT_HAS_ZERO_REGISTER:
case SLJIT_HAS_CLZ:
@@ -675,7 +760,17 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
{
- return (type >= SLJIT_UNORDERED && type <= SLJIT_ORDERED_LESS_EQUAL);
+ switch (type) {
+ case SLJIT_UNORDERED_OR_EQUAL:
+ case SLJIT_ORDERED_NOT_EQUAL:
+ case SLJIT_UNORDERED_OR_LESS:
+ case SLJIT_ORDERED_GREATER_EQUAL:
+ case SLJIT_UNORDERED_OR_GREATER:
+ case SLJIT_ORDERED_LESS_EQUAL:
+ return 1;
+ }
+
+ return 0;
}
/* --------------------------------------------------------------------- */
@@ -699,6 +794,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
#define MEM_MASK 0x7f
+#define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_32) >> 6))
+
/* Other inp_flags. */
/* Integer opertion and set flags -> requires exts on 64 bit systems. */
@@ -722,6 +819,9 @@ ALT_FORM1 0x001000
...
ALT_FORM5 0x010000 */
+static sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 inp_flags, sljit_s32 reg,
+ sljit_s32 arg, sljit_sw argw, sljit_s32 tmp_reg);
+
#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
#include "sljitNativePPC_32.c"
#else
@@ -737,16 +837,13 @@ ALT_FORM5 0x010000 */
#endif
#if (defined SLJIT_PPC_STACK_FRAME_V2 && SLJIT_PPC_STACK_FRAME_V2)
-#define LR_SAVE_OFFSET 2 * SSIZE_OF(sw)
+#define LR_SAVE_OFFSET (2 * SSIZE_OF(sw))
#else
#define LR_SAVE_OFFSET SSIZE_OF(sw)
#endif
#define STACK_MAX_DISTANCE (0x8000 - SSIZE_OF(sw) - LR_SAVE_OFFSET)
-static sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 inp_flags, sljit_s32 reg,
- sljit_s32 arg, sljit_sw argw, sljit_s32 tmp_reg);
-
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
@@ -763,7 +860,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
set_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
local_size += GET_SAVED_REGISTERS_SIZE(scratches, saveds - saved_arg_count, 0)
- + GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
+ + GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
if (!(options & SLJIT_ENTER_REG_ARG))
local_size += SSIZE_OF(sw);
@@ -873,7 +970,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *comp
set_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
local_size += GET_SAVED_REGISTERS_SIZE(scratches, saveds - SLJIT_KEPT_SAVEDS_COUNT(options), 0)
- + GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
+ + GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
if (!(options & SLJIT_ENTER_REG_ARG))
local_size += SSIZE_OF(sw);
@@ -887,14 +984,16 @@ static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit
sljit_s32 i, tmp, base, offset;
sljit_s32 local_size = compiler->local_size;
+ SLJIT_ASSERT(TMP_CALL_REG != TMP_REG2);
+
base = SLJIT_SP;
if (local_size > STACK_MAX_DISTANCE) {
- base = TMP_REG1;
+ base = TMP_REG2;
if (local_size > 2 * STACK_MAX_DISTANCE + LR_SAVE_OFFSET) {
FAIL_IF(push_inst(compiler, STACK_LOAD | D(base) | A(SLJIT_SP) | IMM(0)));
local_size = 0;
} else {
- FAIL_IF(push_inst(compiler, ADDI | D(TMP_REG1) | A(SLJIT_SP) | IMM(local_size - STACK_MAX_DISTANCE)));
+ FAIL_IF(push_inst(compiler, ADDI | D(TMP_REG2) | A(SLJIT_SP) | IMM(local_size - STACK_MAX_DISTANCE)));
local_size = STACK_MAX_DISTANCE;
}
}
@@ -936,7 +1035,7 @@ static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit
if (local_size > 0)
return push_inst(compiler, ADDI | D(SLJIT_SP) | A(base) | IMM(local_size));
- SLJIT_ASSERT(base == TMP_REG1);
+ SLJIT_ASSERT(base == TMP_REG2);
return push_inst(compiler, OR | S(base) | A(SLJIT_SP) | B(base));
}
@@ -1203,7 +1302,7 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
sljit_s32 dst_r = TMP_REG2;
sljit_s32 src1_r;
sljit_s32 src2_r;
- sljit_s32 sugg_src2_r = TMP_REG2;
+ sljit_s32 src2_tmp_reg = (!(input_flags & ALT_SIGN_EXT) && GET_OPCODE(op) >= SLJIT_OP2_BASE && FAST_IS_REG(src1)) ? TMP_REG1 : TMP_REG2;
sljit_s32 flags = input_flags & (ALT_FORM1 | ALT_FORM2 | ALT_FORM3 | ALT_FORM4 | ALT_FORM5 | ALT_SIGN_EXT | ALT_SET_FLAGS);
/* Destination check. */
@@ -1214,24 +1313,7 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
flags |= REG_DEST;
if (op >= SLJIT_MOV && op <= SLJIT_MOV_P)
- sugg_src2_r = dst_r;
- }
-
- /* Source 1. */
- if (FAST_IS_REG(src1)) {
- src1_r = src1;
- flags |= REG1_SOURCE;
- }
- else if (src1 & SLJIT_IMM) {
- src1_r = TMP_ZERO;
- if (src1w != 0) {
- FAIL_IF(load_immediate(compiler, TMP_REG1, src1w));
- src1_r = TMP_REG1;
- }
- }
- else {
- FAIL_IF(emit_op_mem(compiler, input_flags | LOAD_DATA, TMP_REG1, src1, src1w, TMP_REG1));
- src1_r = TMP_REG1;
+ src2_tmp_reg = dst_r;
}
/* Source 2. */
@@ -1241,17 +1323,30 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
if (!(flags & REG_DEST) && op >= SLJIT_MOV && op <= SLJIT_MOV_P)
dst_r = src2_r;
- }
- else if (src2 & SLJIT_IMM) {
+ } else if (src2 == SLJIT_IMM) {
src2_r = TMP_ZERO;
if (src2w != 0) {
- FAIL_IF(load_immediate(compiler, sugg_src2_r, src2w));
- src2_r = sugg_src2_r;
+ FAIL_IF(load_immediate(compiler, src2_tmp_reg, src2w));
+ src2_r = src2_tmp_reg;
}
+ } else {
+ FAIL_IF(emit_op_mem(compiler, input_flags | LOAD_DATA, src2_tmp_reg, src2, src2w, TMP_REG1));
+ src2_r = src2_tmp_reg;
}
- else {
- FAIL_IF(emit_op_mem(compiler, input_flags | LOAD_DATA, sugg_src2_r, src2, src2w, TMP_REG2));
- src2_r = sugg_src2_r;
+
+ /* Source 1. */
+ if (FAST_IS_REG(src1)) {
+ src1_r = src1;
+ flags |= REG1_SOURCE;
+ } else if (src1 == SLJIT_IMM) {
+ src1_r = TMP_ZERO;
+ if (src1w != 0) {
+ FAIL_IF(load_immediate(compiler, TMP_REG1, src1w));
+ src1_r = TMP_REG1;
+ }
+ } else {
+ FAIL_IF(emit_op_mem(compiler, input_flags | LOAD_DATA, TMP_REG1, src1, src1w, TMP_REG1));
+ src1_r = TMP_REG1;
}
FAIL_IF(emit_single_op(compiler, op, flags, dst_r, src1_r, src2_r));
@@ -1312,29 +1407,161 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compile
return SLJIT_SUCCESS;
}
-static sljit_s32 emit_prefetch(struct sljit_compiler *compiler,
- sljit_s32 src, sljit_sw srcw)
+static sljit_s32 emit_rev(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
- if (!(src & OFFS_REG_MASK)) {
- if (srcw == 0 && (src & REG_MASK))
- return push_inst(compiler, DCBT | A(0) | B(src & REG_MASK));
+ sljit_s32 mem, offs_reg, inp_flags;
+ sljit_sw memw;
+#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+ sljit_s32 is_32 = op & SLJIT_32;
- FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
- /* Works with SLJIT_MEM0() case as well. */
- return push_inst(compiler, DCBT | A(src & REG_MASK) | B(TMP_REG1));
+ op = GET_OPCODE(op);
+#endif /* SLJIT_CONFIG_PPC_64 */
+
+ if (!((dst | src) & SLJIT_MEM)) {
+ /* Both are registers. */
+ if (op == SLJIT_REV_U16 || op == SLJIT_REV_S16) {
+ if (src == dst) {
+ FAIL_IF(push_inst(compiler, RLWIMI | S(dst) | A(dst) | RLWI_SH(16) | RLWI_MBE(8, 15)));
+ FAIL_IF(push_inst(compiler, RLWINM | S(dst) | A(dst) | RLWI_SH(24) | RLWI_MBE(16, 31)));
+ } else {
+ FAIL_IF(push_inst(compiler, RLWINM | S(src) | A(dst) | RLWI_SH(8) | RLWI_MBE(16, 23)));
+ FAIL_IF(push_inst(compiler, RLWIMI | S(src) | A(dst) | RLWI_SH(24) | RLWI_MBE(24, 31)));
+ }
+
+ if (op == SLJIT_REV_U16)
+ return SLJIT_SUCCESS;
+ return push_inst(compiler, EXTSH | S(dst) | A(dst));
+ }
+
+#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+ if (!is_32) {
+#if defined(_ARCH_PWR10) && _ARCH_PWR10
+ return push_inst(compiler, BRD | S(src) | A(dst));
+#else /* !POWER10 */
+ FAIL_IF(push_inst(compiler, ADDI | D(TMP_REG2) | A(0) | IMM(TMP_MEM_OFFSET_HI)));
+ FAIL_IF(push_inst(compiler, RLDICL | S(src) | A(TMP_REG1) | RLDI_SH(32) | RLDI_MB(32)));
+ FAIL_IF(push_inst(compiler, STWBRX | S(src) | A(SLJIT_SP) | B(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, ADDI | D(TMP_REG2) | A(0) | IMM(TMP_MEM_OFFSET_LO)));
+ FAIL_IF(push_inst(compiler, STWBRX | S(TMP_REG1) | A(SLJIT_SP) | B(TMP_REG2)));
+ return push_inst(compiler, LD | D(dst) | A(SLJIT_SP) | TMP_MEM_OFFSET);
+#endif /* POWER10 */
+ }
+#endif /* SLJIT_CONFIG_PPC_64 */
+
+ FAIL_IF(push_inst(compiler, ADDI | D(TMP_REG2) | A(0) | IMM(TMP_MEM_OFFSET)));
+ FAIL_IF(push_inst(compiler, STWBRX | S(src) | A(SLJIT_SP) | B(TMP_REG2)));
+ FAIL_IF(push_inst(compiler, LWZ | D(dst) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+
+#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+ if (op == SLJIT_REV_S32)
+ return push_inst(compiler, EXTSW | S(dst) | A(dst));
+#endif /* SLJIT_CONFIG_PPC_64 */
+ return SLJIT_SUCCESS;
}
- srcw &= 0x3;
+ mem = src;
+ memw = srcw;
- if (srcw == 0)
- return push_inst(compiler, DCBT | A(src & REG_MASK) | B(OFFS_REG(src)));
+ if (dst & SLJIT_MEM) {
+ mem = dst;
+ memw = dstw;
- FAIL_IF(push_inst(compiler, SLWI_W(srcw) | S(OFFS_REG(src)) | A(TMP_REG1)));
- return push_inst(compiler, DCBT | A(src & REG_MASK) | B(TMP_REG1));
+ if (src & SLJIT_MEM) {
+ inp_flags = HALF_DATA | LOAD_DATA;
+
+ if (op != SLJIT_REV_U16 && op != SLJIT_REV_S16) {
+#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+ inp_flags = (is_32 ? INT_DATA : WORD_DATA) | LOAD_DATA;
+#else /* !SLJIT_CONFIG_PPC_64 */
+ inp_flags = WORD_DATA | LOAD_DATA;
+#endif /* SLJIT_CONFIG_PPC_64 */
+ }
+
+ FAIL_IF(emit_op_mem(compiler, inp_flags, TMP_REG1, src, srcw, TMP_REG2));
+ src = TMP_REG1;
+ }
+ }
+
+ if (SLJIT_UNLIKELY(mem & OFFS_REG_MASK)) {
+ offs_reg = OFFS_REG(mem);
+ mem &= REG_MASK;
+ memw &= 0x3;
+
+ if (memw != 0) {
+ FAIL_IF(push_inst(compiler, SLWI_W(memw) | S(offs_reg) | A(TMP_REG2)));
+ offs_reg = TMP_REG2;
+ }
+#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+ } else if (memw > 0x7fff7fffl || memw < -0x80000000l) {
+ FAIL_IF(load_immediate(compiler, TMP_REG2, memw));
+ offs_reg = TMP_REG2;
+ mem &= REG_MASK;
+#endif /* SLJIT_CONFIG_PPC_64 */
+ } else {
+ FAIL_IF(push_inst(compiler, ADDI | D(TMP_REG2) | A(mem & REG_MASK) | IMM(memw)));
+ if (memw > SIMM_MAX || memw < SIMM_MIN)
+ FAIL_IF(push_inst(compiler, ADDIS | D(TMP_REG2) | A(TMP_REG2) | IMM((memw + 0x8000) >> 16)));
+
+ mem = 0;
+ offs_reg = TMP_REG2;
+ }
+
+ if (op == SLJIT_REV_U16 || op == SLJIT_REV_S16) {
+ if (dst & SLJIT_MEM)
+ return push_inst(compiler, STHBRX | S(src) | A(mem) | B(offs_reg));
+
+ FAIL_IF(push_inst(compiler, LHBRX | S(dst) | A(mem) | B(offs_reg)));
+
+ if (op == SLJIT_REV_U16)
+ return SLJIT_SUCCESS;
+ return push_inst(compiler, EXTSH | S(dst) | A(dst));
+ }
+
+#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+ if (!is_32) {
+ if (dst & SLJIT_MEM) {
+#if defined(_ARCH_PWR7) && _ARCH_PWR7
+ return push_inst(compiler, STDBRX | S(src) | A(mem) | B(offs_reg));
+#else /* !POWER7 */
+#if defined(SLJIT_LITTLE_ENDIAN) && SLJIT_LITTLE_ENDIAN
+ FAIL_IF(push_inst(compiler, RLDICL | S(src) | A(TMP_REG1) | RLDI_SH(32) | RLDI_MB(32)));
+ FAIL_IF(push_inst(compiler, STWBRX | S(TMP_REG1) | A(mem) | B(offs_reg)));
+ FAIL_IF(push_inst(compiler, ADDI | D(TMP_REG2) | A(offs_reg) | IMM(SSIZE_OF(s32))));
+ return push_inst(compiler, STWBRX | S(src) | A(mem) | B(TMP_REG2));
+#else /* !SLJIT_LITTLE_ENDIAN */
+ FAIL_IF(push_inst(compiler, STWBRX | S(src) | A(mem) | B(offs_reg)));
+ FAIL_IF(push_inst(compiler, RLDICL | S(src) | A(TMP_REG1) | RLDI_SH(32) | RLDI_MB(32)));
+ FAIL_IF(push_inst(compiler, ADDI | D(TMP_REG2) | A(offs_reg) | IMM(SSIZE_OF(s32))));
+ return push_inst(compiler, STWBRX | S(TMP_REG1) | A(mem) | B(TMP_REG2));
+#endif /* SLJIT_LITTLE_ENDIAN */
+#endif /* POWER7 */
+ }
+#if defined(_ARCH_PWR7) && _ARCH_PWR7
+ return push_inst(compiler, LDBRX | S(dst) | A(mem) | B(offs_reg));
+#else /* !POWER7 */
+ FAIL_IF(push_inst(compiler, LWBRX | LWBRX_FIRST_REG | A(mem) | B(offs_reg)));
+ FAIL_IF(push_inst(compiler, ADDI | D(TMP_REG2) | A(offs_reg) | IMM(SSIZE_OF(s32))));
+ FAIL_IF(push_inst(compiler, LWBRX | LWBRX_SECOND_REG | A(mem) | B(TMP_REG2)));
+ return push_inst(compiler, RLDIMI | S(TMP_REG1) | A(dst) | RLDI_SH(32) | RLDI_MB(0));
+#endif /* POWER7 */
+ }
+#endif /* SLJIT_CONFIG_PPC_64 */
+
+ if (dst & SLJIT_MEM)
+ return push_inst(compiler, STWBRX | S(src) | A(mem) | B(offs_reg));
+
+ FAIL_IF(push_inst(compiler, LWBRX | S(dst) | A(mem) | B(offs_reg)));
+#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+ if (op == SLJIT_REV_S32)
+ return push_inst(compiler, EXTSW | S(dst) | A(dst));
+#endif /* SLJIT_CONFIG_PPC_64 */
+ return SLJIT_SUCCESS;
}
#define EMIT_MOV(type, type_flags, type_cast) \
- emit_op(compiler, (src & SLJIT_IMM) ? SLJIT_MOV : type, flags | (type_flags), dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? type_cast srcw : srcw)
+ emit_op(compiler, (src == SLJIT_IMM) ? SLJIT_MOV : type, flags | (type_flags), dst, dstw, TMP_REG1, 0, src, (src == SLJIT_IMM) ? type_cast srcw : srcw)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw,
@@ -1353,19 +1580,19 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
if (GET_FLAG_TYPE(op_flags) == SLJIT_OVERFLOW)
FAIL_IF(push_inst(compiler, MTXER | S(TMP_ZERO)));
- if (op < SLJIT_NOT && FAST_IS_REG(src) && src == dst) {
+ if (op <= SLJIT_MOV_P && FAST_IS_REG(src) && src == dst) {
if (!TYPE_CAST_NEEDED(op))
return SLJIT_SUCCESS;
}
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
if (op_flags & SLJIT_32) {
- if (op < SLJIT_NOT) {
+ if (op <= SLJIT_MOV_P) {
if (src & SLJIT_MEM) {
if (op == SLJIT_MOV_S32)
op = SLJIT_MOV_U32;
}
- else if (src & SLJIT_IMM) {
+ else if (src == SLJIT_IMM) {
if (op == SLJIT_MOV_U32)
op = SLJIT_MOV_S32;
}
@@ -1410,16 +1637,26 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
case SLJIT_MOV_S16:
return EMIT_MOV(SLJIT_MOV_S16, HALF_DATA | SIGNED_DATA, (sljit_s16));
- case SLJIT_NOT:
- return emit_op(compiler, SLJIT_NOT, flags, dst, dstw, TMP_REG1, 0, src, srcw);
-
case SLJIT_CLZ:
case SLJIT_CTZ:
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
- return emit_op(compiler, op, flags | (!(op_flags & SLJIT_32) ? 0 : ALT_FORM1), dst, dstw, TMP_REG1, 0, src, srcw);
-#else
+ if (op_flags & SLJIT_32)
+ flags |= ALT_FORM1;
+#endif /* SLJIT_CONFIG_PPC_64 */
return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, src, srcw);
-#endif
+ case SLJIT_REV_U32:
+ case SLJIT_REV_S32:
+#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+ op |= SLJIT_32;
+#endif /* SLJIT_CONFIG_PPC_64 */
+ /* fallthrough */
+ case SLJIT_REV:
+ case SLJIT_REV_U16:
+ case SLJIT_REV_S16:
+#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+ op |= (op_flags & SLJIT_32);
+#endif /* SLJIT_CONFIG_PPC_64 */
+ return emit_rev(compiler, op, dst, dstw, src, srcw);
}
return SLJIT_SUCCESS;
@@ -1427,40 +1664,22 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
#undef EMIT_MOV
+/* Macros for checking different operand types / values. */
#define TEST_SL_IMM(src, srcw) \
- (((src) & SLJIT_IMM) && (srcw) <= SIMM_MAX && (srcw) >= SIMM_MIN)
-
+ ((src) == SLJIT_IMM && (srcw) <= SIMM_MAX && (srcw) >= SIMM_MIN)
#define TEST_UL_IMM(src, srcw) \
- (((src) & SLJIT_IMM) && !((srcw) & ~0xffff))
-
-#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
-#define TEST_SH_IMM(src, srcw) \
- (((src) & SLJIT_IMM) && !((srcw) & 0xffff) && (srcw) <= 0x7fffffffl && (srcw) >= -0x80000000l)
-#else
-#define TEST_SH_IMM(src, srcw) \
- (((src) & SLJIT_IMM) && !((srcw) & 0xffff))
-#endif
-
+ ((src) == SLJIT_IMM && !((srcw) & ~0xffff))
#define TEST_UH_IMM(src, srcw) \
- (((src) & SLJIT_IMM) && !((srcw) & ~(sljit_sw)0xffff0000))
+ ((src) == SLJIT_IMM && !((srcw) & ~(sljit_sw)0xffff0000))
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+#define TEST_SH_IMM(src, srcw) \
+ ((src) == SLJIT_IMM && !((srcw) & 0xffff) && (srcw) <= 0x7fffffffl && (srcw) >= -0x80000000l)
#define TEST_ADD_IMM(src, srcw) \
- (((src) & SLJIT_IMM) && (srcw) <= 0x7fff7fffl && (srcw) >= -0x80000000l)
-#else
-#define TEST_ADD_IMM(src, srcw) \
- ((src) & SLJIT_IMM)
-#endif
-
-#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+ ((src) == SLJIT_IMM && (srcw) <= 0x7fff7fffl && (srcw) >= -0x80000000l)
#define TEST_UI_IMM(src, srcw) \
- (((src) & SLJIT_IMM) && !((srcw) & ~0xffffffff))
-#else
-#define TEST_UI_IMM(src, srcw) \
- ((src) & SLJIT_IMM)
-#endif
+ ((src) == SLJIT_IMM && !((srcw) & ~0xffffffff))
-#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
#define TEST_ADD_FORM1(op) \
(GET_FLAG_TYPE(op) == SLJIT_OVERFLOW \
|| (op & (SLJIT_32 | SLJIT_SET_Z | VARIABLE_FLAG_MASK)) == (SLJIT_32 | SLJIT_SET_Z | SLJIT_SET_CARRY))
@@ -1470,14 +1689,22 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
#define TEST_SUB_FORM3(op) \
(GET_FLAG_TYPE(op) == SLJIT_OVERFLOW \
|| (op & (SLJIT_32 | SLJIT_SET_Z)) == (SLJIT_32 | SLJIT_SET_Z))
-#else
+
+#else /* !SLJIT_CONFIG_PPC_64 */
+#define TEST_SH_IMM(src, srcw) \
+ ((src) == SLJIT_IMM && !((srcw) & 0xffff))
+#define TEST_ADD_IMM(src, srcw) \
+ ((src) == SLJIT_IMM)
+#define TEST_UI_IMM(src, srcw) \
+ ((src) == SLJIT_IMM)
+
#define TEST_ADD_FORM1(op) \
(GET_FLAG_TYPE(op) == SLJIT_OVERFLOW)
#define TEST_SUB_FORM2(op) \
(GET_FLAG_TYPE(op) >= SLJIT_SIG_LESS && GET_FLAG_TYPE(op) <= SLJIT_SIG_LESS_EQUAL)
#define TEST_SUB_FORM3(op) \
(GET_FLAG_TYPE(op) == SLJIT_OVERFLOW)
-#endif
+#endif /* SLJIT_CONFIG_PPC_64 */
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw,
@@ -1496,9 +1723,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
if (op & SLJIT_32) {
/* Most operations expect sign extended arguments. */
flags |= INT_DATA | SIGNED_DATA;
- if (src1 & SLJIT_IMM)
+ if (src1 == SLJIT_IMM)
src1w = (sljit_s32)(src1w);
- if (src2 & SLJIT_IMM)
+ if (src2 == SLJIT_IMM)
src2w = (sljit_s32)(src2w);
if (HAS_FLAGS(op))
flags |= ALT_SIGN_EXT;
@@ -1514,7 +1741,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
if (TEST_ADD_FORM1(op))
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM1, dst, dstw, src1, src1w, src2, src2w);
- if (!HAS_FLAGS(op) && ((src1 | src2) & SLJIT_IMM)) {
+ if (!HAS_FLAGS(op) && (src1 == SLJIT_IMM || src2 == SLJIT_IMM)) {
if (TEST_SL_IMM(src2, src2w)) {
compiler->imm = (sljit_ins)src2w & 0xffff;
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0);
@@ -1565,7 +1792,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM3, dst, dstw, src2, src2w, TMP_REG2, 0);
}
}
- return emit_op(compiler, SLJIT_ADD, flags | ((GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY)) ? ALT_FORM5 : 0), dst, dstw, src1, src1w, src2, src2w);
+ return emit_op(compiler, SLJIT_ADD, flags | ((GET_FLAG_TYPE(op) == SLJIT_CARRY) ? ALT_FORM5 : 0), dst, dstw, src1, src1w, src2, src2w);
case SLJIT_ADDC:
compiler->status_flags_state = SLJIT_CURRENT_FLAGS_ADD;
@@ -1575,7 +1802,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
compiler->status_flags_state = SLJIT_CURRENT_FLAGS_SUB;
if (GET_FLAG_TYPE(op) >= SLJIT_LESS && GET_FLAG_TYPE(op) <= SLJIT_LESS_EQUAL) {
- if (dst == TMP_REG2) {
+ if (dst == TMP_REG1) {
if (TEST_UL_IMM(src2, src2w)) {
compiler->imm = (sljit_ins)src2w & 0xffff;
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1 | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0);
@@ -1583,14 +1810,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1, dst, dstw, src1, src1w, src2, src2w);
}
- if ((src2 & SLJIT_IMM) && src2w >= 0 && src2w <= (SIMM_MAX + 1)) {
+ if (src2 == SLJIT_IMM && src2w >= 0 && src2w <= (SIMM_MAX + 1)) {
compiler->imm = (sljit_ins)src2w;
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1 | ALT_FORM2 | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
}
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1 | ALT_FORM3, dst, dstw, src1, src1w, src2, src2w);
}
- if (dst == TMP_REG2 && GET_FLAG_TYPE(op) <= SLJIT_SIG_LESS_EQUAL) {
+ if (dst == TMP_REG1 && GET_FLAG_TYPE(op) <= SLJIT_SIG_LESS_EQUAL) {
if (TEST_SL_IMM(src2, src2w)) {
compiler->imm = (sljit_ins)src2w & 0xffff;
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM2 | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
@@ -1599,7 +1826,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
}
if (TEST_SUB_FORM2(op)) {
- if ((src2 & SLJIT_IMM) && src2w >= -SIMM_MAX && src2w <= SIMM_MAX) {
+ if (src2 == SLJIT_IMM && src2w >= -SIMM_MAX && src2w <= SIMM_MAX) {
compiler->imm = (sljit_ins)src2w & 0xffff;
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM2 | ALT_FORM3 | ALT_FORM4, dst, dstw, src1, src1w, TMP_REG2, 0);
}
@@ -1632,7 +1859,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
}
/* We know ALT_SIGN_EXT is set if it is an SLJIT_32 on 64 bit systems. */
- return emit_op(compiler, SLJIT_SUB, flags | ((GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY)) ? ALT_FORM5 : 0), dst, dstw, src1, src1w, src2, src2w);
+ return emit_op(compiler, SLJIT_SUB, flags | ((GET_FLAG_TYPE(op) == SLJIT_CARRY) ? ALT_FORM5 : 0), dst, dstw, src1, src1w, src2, src2w);
case SLJIT_SUBC:
compiler->status_flags_state = SLJIT_CURRENT_FLAGS_SUB;
@@ -1657,9 +1884,16 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
FAIL_IF(push_inst(compiler, MTXER | S(TMP_ZERO)));
return emit_op(compiler, SLJIT_MUL, flags, dst, dstw, src1, src1w, src2, src2w);
+ case SLJIT_XOR:
+ if (src2 == SLJIT_IMM && src2w == -1) {
+ return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM4, dst, dstw, TMP_REG1, 0, src1, src1w);
+ }
+ if (src1 == SLJIT_IMM && src1w == -1) {
+ return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM4, dst, dstw, TMP_REG1, 0, src2, src2w);
+ }
+ /* fallthrough */
case SLJIT_AND:
case SLJIT_OR:
- case SLJIT_XOR:
/* Commutative unsigned operations. */
if (!HAS_FLAGS(op) || GET_OPCODE(op) == SLJIT_AND) {
if (TEST_UL_IMM(src2, src2w)) {
@@ -1704,7 +1938,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
if (op & SLJIT_32)
flags |= ALT_FORM2;
#endif
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
compiler->imm = (sljit_ins)src2w;
return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM1, dst, dstw, src1, src1w, TMP_REG2, 0);
}
@@ -1722,18 +1956,37 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compil
CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
SLJIT_SKIP_CHECKS(compiler);
- return sljit_emit_op2(compiler, op, TMP_REG2, 0, src1, src1w, src2, src2w);
+ return sljit_emit_op2(compiler, op, TMP_REG1, 0, src1, src1w, src2, src2w);
}
#undef TEST_ADD_FORM1
#undef TEST_SUB_FORM2
#undef TEST_SUB_FORM3
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src_dst,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op2r(compiler, op, dst_reg, src1, src1w, src2, src2w));
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MULADD:
+ SLJIT_SKIP_CHECKS(compiler);
+ FAIL_IF(sljit_emit_op2(compiler, SLJIT_MUL | (op & SLJIT_32), TMP_REG2, 0, src1, src1w, src2, src2w));
+ return push_inst(compiler, ADD | D(dst_reg) | A(dst_reg) | B(TMP_REG2));
+ }
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 src1_reg,
+ sljit_s32 src2_reg,
+ sljit_s32 src3, sljit_sw src3w)
+{
sljit_s32 is_right;
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
sljit_s32 inp_flags = ((op & SLJIT_32) ? INT_DATA : WORD_DATA) | LOAD_DATA;
@@ -1744,85 +1997,97 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *
#endif /* SLJIT_CONFIG_PPC_64 */
CHECK_ERROR();
- CHECK(check_sljit_emit_shift_into(compiler, op, src_dst, src1, src1w, src2, src2w));
+ CHECK(check_sljit_emit_shift_into(compiler, op, dst_reg, src1_reg, src2_reg, src3, src3w));
is_right = (GET_OPCODE(op) == SLJIT_LSHR || GET_OPCODE(op) == SLJIT_MLSHR);
- if (src_dst == src1) {
+ if (src1_reg == src2_reg) {
SLJIT_SKIP_CHECKS(compiler);
- return sljit_emit_op2(compiler, (is_right ? SLJIT_ROTR : SLJIT_ROTL) | (op & SLJIT_32), src_dst, 0, src_dst, 0, src2, src2w);
+ return sljit_emit_op2(compiler, (is_right ? SLJIT_ROTR : SLJIT_ROTL) | (op & SLJIT_32), dst_reg, 0, src1_reg, 0, src3, src3w);
}
- ADJUST_LOCAL_OFFSET(src1, src1w);
- ADJUST_LOCAL_OFFSET(src2, src2w);
+ ADJUST_LOCAL_OFFSET(src3, src3w);
- if (src2 & SLJIT_IMM) {
- src2w &= bit_length - 1;
+ if (src3 == SLJIT_IMM) {
+ src3w &= bit_length - 1;
- if (src2w == 0)
+ if (src3w == 0)
return SLJIT_SUCCESS;
- } else if (src2 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem(compiler, inp_flags, TMP_REG2, src2, src2w, TMP_REG2));
- src2 = TMP_REG2;
- }
-
- if (src1 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem(compiler, inp_flags, TMP_REG1, src1, src1w, TMP_REG1));
- src1 = TMP_REG1;
- } else if (src1 & SLJIT_IMM) {
- FAIL_IF(load_immediate(compiler, TMP_REG1, src1w));
- src1 = TMP_REG1;
- }
- if (src2 & SLJIT_IMM) {
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
if (!(op & SLJIT_32)) {
if (is_right) {
- FAIL_IF(push_inst(compiler, SRDI(src2w) | S(src_dst) | A(src_dst)));
- return push_inst(compiler, RLDIMI | S(src1) | A(src_dst) | RLDI_SH(64 - src2w) | RLDI_MB(0));
+ FAIL_IF(push_inst(compiler, SRDI(src3w) | S(src1_reg) | A(dst_reg)));
+ return push_inst(compiler, RLDIMI | S(src2_reg) | A(dst_reg) | RLDI_SH(64 - src3w) | RLDI_MB(0));
}
- FAIL_IF(push_inst(compiler, SLDI(src2w) | S(src_dst) | A(src_dst)));
+ FAIL_IF(push_inst(compiler, SLDI(src3w) | S(src1_reg) | A(dst_reg)));
/* Computes SRDI(64 - src2w). */
- FAIL_IF(push_inst(compiler, RLDICL | S(src1) | A(TMP_REG1) | RLDI_SH(src2w) | RLDI_MB(64 - src2w)));
- return push_inst(compiler, OR | S(src_dst) | A(src_dst) | B(TMP_REG1));
+ FAIL_IF(push_inst(compiler, RLDICL | S(src2_reg) | A(TMP_REG1) | RLDI_SH(src3w) | RLDI_MB(64 - src3w)));
+ return push_inst(compiler, OR | S(dst_reg) | A(dst_reg) | B(TMP_REG1));
}
#endif /* SLJIT_CONFIG_PPC_64 */
if (is_right) {
- FAIL_IF(push_inst(compiler, SRWI(src2w) | S(src_dst) | A(src_dst)));
- return push_inst(compiler, RLWIMI | S(src1) | A(src_dst) | RLWI_SH(32 - src2w) | RLWI_MBE(0, src2w - 1));
+ FAIL_IF(push_inst(compiler, SRWI(src3w) | S(src1_reg) | A(dst_reg)));
+ return push_inst(compiler, RLWIMI | S(src2_reg) | A(dst_reg) | RLWI_SH(32 - src3w) | RLWI_MBE(0, src3w - 1));
}
- FAIL_IF(push_inst(compiler, SLWI(src2w) | S(src_dst) | A(src_dst)));
- return push_inst(compiler, RLWIMI | S(src1) | A(src_dst) | RLWI_SH(src2w) | RLWI_MBE(32 - src2w, 31));
+ FAIL_IF(push_inst(compiler, SLWI(src3w) | S(src1_reg) | A(dst_reg)));
+ return push_inst(compiler, RLWIMI | S(src2_reg) | A(dst_reg) | RLWI_SH(src3w) | RLWI_MBE(32 - src3w, 31));
+ }
+
+ if (src3 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, inp_flags, TMP_REG2, src3, src3w, TMP_REG2));
+ src3 = TMP_REG2;
}
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
if (!(op & SLJIT_32)) {
- if (GET_OPCODE(op) == SLJIT_MSHL || GET_OPCODE(op) == SLJIT_MLSHR) {
- FAIL_IF(push_inst(compiler, ANDI | S(src2) | A(TMP_REG2) | 0x3f));
- src2 = TMP_REG2;
+ if (GET_OPCODE(op) == SLJIT_MSHL || GET_OPCODE(op) == SLJIT_MLSHR || dst_reg == src3) {
+ FAIL_IF(push_inst(compiler, ANDI | S(src3) | A(TMP_REG2) | 0x3f));
+ src3 = TMP_REG2;
}
- FAIL_IF(push_inst(compiler, (is_right ? SRD : SLD) | S(src_dst) | A(src_dst) | B(src2)));
- FAIL_IF(push_inst(compiler, (is_right ? SLDI(1) : SRDI(1)) | S(src1) | A(TMP_REG1)));
- FAIL_IF(push_inst(compiler, XORI | S(src2) | A(TMP_REG2) | 0x3f));
+ FAIL_IF(push_inst(compiler, (is_right ? SRD : SLD) | S(src1_reg) | A(dst_reg) | B(src3)));
+ FAIL_IF(push_inst(compiler, (is_right ? SLDI(1) : SRDI(1)) | S(src2_reg) | A(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, XORI | S(src3) | A(TMP_REG2) | 0x3f));
FAIL_IF(push_inst(compiler, (is_right ? SLD : SRD) | S(TMP_REG1) | A(TMP_REG1) | B(TMP_REG2)));
- return push_inst(compiler, OR | S(src_dst) | A(src_dst) | B(TMP_REG1));
+ return push_inst(compiler, OR | S(dst_reg) | A(dst_reg) | B(TMP_REG1));
}
#endif /* SLJIT_CONFIG_PPC_64 */
- if (GET_OPCODE(op) == SLJIT_MSHL || GET_OPCODE(op) == SLJIT_MLSHR) {
- FAIL_IF(push_inst(compiler, ANDI | S(src2) | A(TMP_REG2) | 0x1f));
- src2 = TMP_REG2;
+ if (GET_OPCODE(op) == SLJIT_MSHL || GET_OPCODE(op) == SLJIT_MLSHR || dst_reg == src3) {
+ FAIL_IF(push_inst(compiler, ANDI | S(src3) | A(TMP_REG2) | 0x1f));
+ src3 = TMP_REG2;
}
- FAIL_IF(push_inst(compiler, (is_right ? SRW : SLW) | S(src_dst) | A(src_dst) | B(src2)));
- FAIL_IF(push_inst(compiler, (is_right ? SLWI(1) : SRWI(1)) | S(src1) | A(TMP_REG1)));
- FAIL_IF(push_inst(compiler, XORI | S(src2) | A(TMP_REG2) | 0x1f));
+ FAIL_IF(push_inst(compiler, (is_right ? SRW : SLW) | S(src1_reg) | A(dst_reg) | B(src3)));
+ FAIL_IF(push_inst(compiler, (is_right ? SLWI(1) : SRWI(1)) | S(src2_reg) | A(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, XORI | S(src3) | A(TMP_REG2) | 0x1f));
FAIL_IF(push_inst(compiler, (is_right ? SLW : SRW) | S(TMP_REG1) | A(TMP_REG1) | B(TMP_REG2)));
- return push_inst(compiler, OR | S(src_dst) | A(src_dst) | B(TMP_REG1));
+ return push_inst(compiler, OR | S(dst_reg) | A(dst_reg) | B(TMP_REG1));
+}
+
+static sljit_s32 emit_prefetch(struct sljit_compiler *compiler,
+ sljit_s32 src, sljit_sw srcw)
+{
+ if (!(src & OFFS_REG_MASK)) {
+ if (srcw == 0 && (src & REG_MASK))
+ return push_inst(compiler, DCBT | A(0) | B(src & REG_MASK));
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
+ /* Works with SLJIT_MEM0() case as well. */
+ return push_inst(compiler, DCBT | A(src & REG_MASK) | B(TMP_REG1));
+ }
+
+ srcw &= 0x3;
+
+ if (srcw == 0)
+ return push_inst(compiler, DCBT | A(src & REG_MASK) | B(OFFS_REG(src)));
+
+ FAIL_IF(push_inst(compiler, SLWI_W(srcw) | S(OFFS_REG(src)) | A(TMP_REG1)));
+ return push_inst(compiler, DCBT | A(src & REG_MASK) | B(TMP_REG1));
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
@@ -1854,21 +2119,52 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *comp
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_dst(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw)
{
- CHECK_REG_INDEX(check_sljit_get_register_index(reg));
- return reg_map[reg];
+ sljit_s32 dst_r;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op_dst(compiler, op, dst, dstw));
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ switch (op) {
+ case SLJIT_FAST_ENTER:
+ if (FAST_IS_REG(dst))
+ return push_inst(compiler, MFLR | D(dst));
+
+ FAIL_IF(push_inst(compiler, MFLR | D(TMP_REG1)));
+ break;
+ case SLJIT_GET_RETURN_ADDRESS:
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
+ FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, dst_r, SLJIT_MEM1(SLJIT_SP), compiler->local_size + LR_SAVE_OFFSET, TMP_REG2));
+ break;
+ }
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem(compiler, WORD_DATA, TMP_REG1, dst, dstw, TMP_REG2);
+
+ return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 type, sljit_s32 reg)
{
- CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
+ CHECK_REG_INDEX(check_sljit_get_register_index(type, reg));
+
+ if (type == SLJIT_GP_REGISTER)
+ return reg_map[reg];
+
+ if (type != SLJIT_FLOAT_REGISTER)
+ return -1;
+
return freg_map[reg];
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
void *instruction, sljit_u32 size)
{
+ SLJIT_UNUSED_ARG(size);
+
CHECK_ERROR();
CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
@@ -1879,24 +2175,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *c
/* Floating point operators */
/* --------------------------------------------------------------------- */
-#define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_32) >> 6))
#define SELECT_FOP(op, single, double) ((sljit_ins)((op & SLJIT_32) ? single : double))
-#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
-#define FLOAT_TMP_MEM_OFFSET (6 * sizeof(sljit_sw))
-#else
-#define FLOAT_TMP_MEM_OFFSET (2 * sizeof(sljit_sw))
-
-#if (defined SLJIT_LITTLE_ENDIAN && SLJIT_LITTLE_ENDIAN)
-#define FLOAT_TMP_MEM_OFFSET_LOW (2 * sizeof(sljit_sw))
-#define FLOAT_TMP_MEM_OFFSET_HI (3 * sizeof(sljit_sw))
-#else
-#define FLOAT_TMP_MEM_OFFSET_LOW (3 * sizeof(sljit_sw))
-#define FLOAT_TMP_MEM_OFFSET_HI (2 * sizeof(sljit_sw))
-#endif
-
-#endif /* SLJIT_CONFIG_PPC_64 */
-
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw)
@@ -1913,19 +2193,19 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
if (op == SLJIT_CONV_SW_FROM_F64) {
if (FAST_IS_REG(dst)) {
- FAIL_IF(emit_op_mem(compiler, DOUBLE_DATA, TMP_FREG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, TMP_REG1));
- return emit_op_mem(compiler, WORD_DATA | LOAD_DATA, dst, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, TMP_REG1);
+ FAIL_IF(push_inst(compiler, STFD | FS(TMP_FREG1) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ return push_inst(compiler, LD | S(dst) | A(SLJIT_SP) | TMP_MEM_OFFSET);
}
return emit_op_mem(compiler, DOUBLE_DATA, TMP_FREG1, dst, dstw, TMP_REG1);
}
-#else
+#else /* !SLJIT_CONFIG_PPC_64 */
FAIL_IF(push_inst(compiler, FCTIWZ | FD(TMP_FREG1) | FB(src)));
-#endif
+#endif /* SLJIT_CONFIG_PPC_64 */
if (FAST_IS_REG(dst)) {
- FAIL_IF(load_immediate(compiler, TMP_REG1, FLOAT_TMP_MEM_OFFSET));
+ FAIL_IF(load_immediate(compiler, TMP_REG1, TMP_MEM_OFFSET));
FAIL_IF(push_inst(compiler, STFIWX | FS(TMP_FREG1) | A(SLJIT_SP) | B(TMP_REG1)));
- return emit_op_mem(compiler, INT_DATA | LOAD_DATA, dst, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, TMP_REG1);
+ return push_inst(compiler, LWZ | S(dst) | A(SLJIT_SP) | TMP_MEM_OFFSET);
}
SLJIT_ASSERT(dst & SLJIT_MEM);
@@ -1935,16 +2215,14 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
if (dstw) {
FAIL_IF(push_inst(compiler, SLWI_W(dstw) | S(OFFS_REG(dst)) | A(TMP_REG1)));
dstw = TMP_REG1;
- }
- else
+ } else
dstw = OFFS_REG(dst);
}
else {
if ((dst & REG_MASK) && !dstw) {
dstw = dst & REG_MASK;
dst = 0;
- }
- else {
+ } else {
/* This works regardless we have SLJIT_MEM1 or SLJIT_MEM0. */
FAIL_IF(load_immediate(compiler, TMP_REG1, dstw));
dstw = TMP_REG1;
@@ -1954,85 +2232,6 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
return push_inst(compiler, STFIWX | FS(TMP_FREG1) | A(dst & REG_MASK) | B(dstw));
}
-static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
-{
-#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
-
- sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
-
- if (src & SLJIT_IMM) {
- if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
- srcw = (sljit_s32)srcw;
-
- FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
- src = TMP_REG1;
- }
- else if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32) {
- if (FAST_IS_REG(src))
- FAIL_IF(push_inst(compiler, EXTSW | S(src) | A(TMP_REG1)));
- else
- FAIL_IF(emit_op_mem(compiler, INT_DATA | SIGNED_DATA | LOAD_DATA, TMP_REG1, src, srcw, TMP_REG1));
- src = TMP_REG1;
- }
-
- if (FAST_IS_REG(src)) {
- FAIL_IF(emit_op_mem(compiler, WORD_DATA, src, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, TMP_REG1));
- FAIL_IF(emit_op_mem(compiler, DOUBLE_DATA | LOAD_DATA, TMP_FREG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, TMP_REG1));
- }
- else
- FAIL_IF(emit_op_mem(compiler, DOUBLE_DATA | LOAD_DATA, TMP_FREG1, src, srcw, TMP_REG1));
-
- FAIL_IF(push_inst(compiler, FCFID | FD(dst_r) | FB(TMP_FREG1)));
-
- if (dst & SLJIT_MEM)
- return emit_op_mem(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, TMP_REG1);
- if (op & SLJIT_32)
- return push_inst(compiler, FRSP | FD(dst_r) | FB(dst_r));
- return SLJIT_SUCCESS;
-
-#else
-
- sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
- sljit_s32 invert_sign = 1;
-
- if (src & SLJIT_IMM) {
- FAIL_IF(load_immediate(compiler, TMP_REG1, srcw ^ (sljit_sw)0x80000000));
- src = TMP_REG1;
- invert_sign = 0;
- }
- else if (!FAST_IS_REG(src)) {
- FAIL_IF(emit_op_mem(compiler, WORD_DATA | SIGNED_DATA | LOAD_DATA, TMP_REG1, src, srcw, TMP_REG1));
- src = TMP_REG1;
- }
-
- /* First, a special double floating point value is constructed: (2^53 + (input xor (2^31)))
- The double precision format has exactly 53 bit precision, so the lower 32 bit represents
- the lower 32 bit of such value. The result of xor 2^31 is the same as adding 0x80000000
- to the input, which shifts it into the 0 - 0xffffffff range. To get the converted floating
- point value, we need to subtract 2^53 + 2^31 from the constructed value. */
- FAIL_IF(push_inst(compiler, ADDIS | D(TMP_REG2) | A(0) | 0x4330));
- if (invert_sign)
- FAIL_IF(push_inst(compiler, XORIS | S(src) | A(TMP_REG1) | 0x8000));
- FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG2, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET_HI, TMP_REG1));
- FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET_LOW, TMP_REG2));
- FAIL_IF(push_inst(compiler, ADDIS | D(TMP_REG1) | A(0) | 0x8000));
- FAIL_IF(emit_op_mem(compiler, DOUBLE_DATA | LOAD_DATA, TMP_FREG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, TMP_REG1));
- FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET_LOW, TMP_REG2));
- FAIL_IF(emit_op_mem(compiler, DOUBLE_DATA | LOAD_DATA, TMP_FREG2, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, TMP_REG1));
-
- FAIL_IF(push_inst(compiler, FSUB | FD(dst_r) | FA(TMP_FREG1) | FB(TMP_FREG2)));
-
- if (dst & SLJIT_MEM)
- return emit_op_mem(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, TMP_REG1);
- if (op & SLJIT_32)
- return push_inst(compiler, FRSP | FD(dst_r) | FB(dst_r));
- return SLJIT_SUCCESS;
-
-#endif
-}
-
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
@@ -2051,13 +2250,10 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compile
switch (GET_FLAG_TYPE(op)) {
case SLJIT_UNORDERED_OR_EQUAL:
- case SLJIT_ORDERED_NOT_EQUAL:
return push_inst(compiler, CROR | ((4 + 2) << 21) | ((4 + 2) << 16) | ((4 + 3) << 11));
case SLJIT_UNORDERED_OR_LESS:
- case SLJIT_ORDERED_GREATER_EQUAL:
return push_inst(compiler, CROR | ((4 + 0) << 21) | ((4 + 0) << 16) | ((4 + 3) << 11));
case SLJIT_UNORDERED_OR_GREATER:
- case SLJIT_ORDERED_LESS_EQUAL:
return push_inst(compiler, CROR | ((4 + 1) << 21) | ((4 + 1) << 16) | ((4 + 3) << 11));
}
@@ -2095,7 +2291,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compil
/* Fall through. */
case SLJIT_MOV_F64:
if (src != dst_r) {
- if (dst_r != TMP_FREG1)
+ if (!(dst & SLJIT_MEM))
FAIL_IF(push_inst(compiler, FMR | FD(dst_r) | FB(src)));
else
dst_r = src;
@@ -2135,7 +2331,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
}
if (src2 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, TMP_REG2));
+ FAIL_IF(emit_op_mem(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, TMP_REG1));
src2 = TMP_FREG2;
}
@@ -2143,18 +2339,30 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
case SLJIT_ADD_F64:
FAIL_IF(push_inst(compiler, SELECT_FOP(op, FADDS, FADD) | FD(dst_r) | FA(src1) | FB(src2)));
break;
-
case SLJIT_SUB_F64:
FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSUBS, FSUB) | FD(dst_r) | FA(src1) | FB(src2)));
break;
-
case SLJIT_MUL_F64:
FAIL_IF(push_inst(compiler, SELECT_FOP(op, FMULS, FMUL) | FD(dst_r) | FA(src1) | FC(src2) /* FMUL use FC as src2 */));
break;
-
case SLJIT_DIV_F64:
FAIL_IF(push_inst(compiler, SELECT_FOP(op, FDIVS, FDIV) | FD(dst_r) | FA(src1) | FB(src2)));
break;
+ case SLJIT_COPYSIGN_F64:
+ FAIL_IF(push_inst(compiler, ((op & SLJIT_32) ? STFS : STFD) | FS(src2) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
+ FAIL_IF(push_inst(compiler, LWZ | S(TMP_REG1) | A(SLJIT_SP) | ((op & SLJIT_32) ? TMP_MEM_OFFSET : TMP_MEM_OFFSET_HI)));
+#else /* !SLJIT_CONFIG_PPC_32 */
+ FAIL_IF(push_inst(compiler, ((op & SLJIT_32) ? LWZ : LD) | S(TMP_REG1) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+#endif /* SLJIT_CONFIG_PPC_32 */
+ FAIL_IF(push_inst(compiler, FABS | FD(dst_r) | FB(src1)));
+#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
+ FAIL_IF(push_inst(compiler, CMPI | CRD(0) | A(TMP_REG1) | 0));
+#else /* !SLJIT_CONFIG_PPC_32 */
+ FAIL_IF(push_inst(compiler, CMPI | CRD(0 | ((op & SLJIT_32) ? 0 : 1)) | A(TMP_REG1) | 0));
+#endif /* SLJIT_CONFIG_PPC_32 */
+ FAIL_IF(push_inst(compiler, BCx | (4 << 21) | (0 << 16) | 8));
+ return push_inst(compiler, FNEG | FD(dst_r) | FB(dst_r));
}
if (dst & SLJIT_MEM)
@@ -2165,22 +2373,24 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
#undef SELECT_FOP
-/* --------------------------------------------------------------------- */
-/* Other instructions */
-/* --------------------------------------------------------------------- */
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset32(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f32 value)
{
+ union {
+ sljit_s32 imm;
+ sljit_f32 value;
+ } u;
+
CHECK_ERROR();
- CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
- ADJUST_LOCAL_OFFSET(dst, dstw);
+ CHECK(check_sljit_emit_fset32(compiler, freg, value));
- if (FAST_IS_REG(dst))
- return push_inst(compiler, MFLR | D(dst));
+ u.value = value;
- /* Memory. */
- FAIL_IF(push_inst(compiler, MFLR | D(TMP_REG2)));
- return emit_op(compiler, SLJIT_MOV, WORD_DATA, dst, dstw, TMP_REG1, 0, TMP_REG2, 0);
+ if (u.imm != 0)
+ FAIL_IF(load_immediate(compiler, TMP_REG1, u.imm));
+
+ FAIL_IF(push_inst(compiler, STW | S(u.imm != 0 ? TMP_REG1 : TMP_ZERO) | A(SLJIT_SP) | TMP_MEM_OFFSET));
+ return push_inst(compiler, LFS | FS(freg) | A(SLJIT_SP) | TMP_MEM_OFFSET);
}
/* --------------------------------------------------------------------- */
@@ -2303,8 +2513,8 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
set_jump(jump, compiler, (sljit_u32)type & SLJIT_REWRITABLE_JUMP);
type &= 0xff;
- if (type == SLJIT_CARRY || type == SLJIT_NOT_CARRY)
- PTR_FAIL_IF(push_inst(compiler, ADDE | RC(ALT_SET_FLAGS) | D(TMP_REG1) | A(TMP_ZERO) | B(TMP_ZERO)));
+ if ((type | 0x1) == SLJIT_NOT_CARRY)
+ PTR_FAIL_IF(push_inst(compiler, ADDE | RC(ALT_SET_FLAGS) | D(TMP_REG2) | A(TMP_ZERO) | B(TMP_ZERO)));
/* In PPC, we don't need to touch the arguments. */
if (type < SLJIT_JUMP)
@@ -2314,16 +2524,19 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
jump->flags |= IS_CALL;
#endif
- PTR_FAIL_IF(emit_const(compiler, TMP_CALL_REG, 0));
- PTR_FAIL_IF(push_inst(compiler, MTCTR | S(TMP_CALL_REG)));
jump->addr = compiler->size;
PTR_FAIL_IF(push_inst(compiler, BCCTR | bo_bi_flags | (type >= SLJIT_FAST_CALL ? 1 : 0)));
+
+ /* Maximum number of instructions required for generating a constant. */
+ compiler->size += JUMP_MAX_SIZE - 1;
return jump;
}
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 arg_types)
{
+ SLJIT_UNUSED_ARG(arg_types);
+
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_call(compiler, type, arg_types));
@@ -2349,18 +2562,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
CHECK_ERROR();
CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
- if (FAST_IS_REG(src)) {
-#if (defined SLJIT_PASS_ENTRY_ADDR_TO_CALL && SLJIT_PASS_ENTRY_ADDR_TO_CALL)
- if (type >= SLJIT_CALL && src != TMP_CALL_REG) {
- FAIL_IF(push_inst(compiler, OR | S(src) | A(TMP_CALL_REG) | B(src)));
- src_r = TMP_CALL_REG;
- }
- else
- src_r = src;
-#else /* SLJIT_PASS_ENTRY_ADDR_TO_CALL */
- src_r = src;
-#endif /* SLJIT_PASS_ENTRY_ADDR_TO_CALL */
- } else if (src & SLJIT_IMM) {
+ if (src == SLJIT_IMM) {
/* These jumps are converted to jump/call instructions when possible. */
jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
FAIL_IF(!jump);
@@ -2372,8 +2574,24 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
jump->flags |= IS_CALL;
#endif /* SLJIT_PASS_ENTRY_ADDR_TO_CALL */
- FAIL_IF(emit_const(compiler, TMP_CALL_REG, 0));
- src_r = TMP_CALL_REG;
+ jump->addr = compiler->size;
+ FAIL_IF(push_inst(compiler, BCCTR | (20 << 21) | (type >= SLJIT_FAST_CALL ? 1 : 0)));
+
+ /* Maximum number of instructions required for generating a constant. */
+ compiler->size += JUMP_MAX_SIZE - 1;
+ return SLJIT_SUCCESS;
+ }
+
+ if (FAST_IS_REG(src)) {
+#if (defined SLJIT_PASS_ENTRY_ADDR_TO_CALL && SLJIT_PASS_ENTRY_ADDR_TO_CALL)
+ if (type >= SLJIT_CALL && src != TMP_CALL_REG) {
+ FAIL_IF(push_inst(compiler, OR | S(src) | A(TMP_CALL_REG) | B(src)));
+ src_r = TMP_CALL_REG;
+ } else
+ src_r = src;
+#else /* SLJIT_PASS_ENTRY_ADDR_TO_CALL */
+ src_r = src;
+#endif /* SLJIT_PASS_ENTRY_ADDR_TO_CALL */
} else {
ADJUST_LOCAL_OFFSET(src, srcw);
FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_CALL_REG, src, srcw, TMP_CALL_REG));
@@ -2381,8 +2599,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
}
FAIL_IF(push_inst(compiler, MTCTR | S(src_r)));
- if (jump)
- jump->addr = compiler->size;
return push_inst(compiler, BCCTR | (20 << 21) | (type >= SLJIT_FAST_CALL ? 1 : 0));
}
@@ -2390,6 +2606,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compi
sljit_s32 arg_types,
sljit_s32 src, sljit_sw srcw)
{
+ SLJIT_UNUSED_ARG(arg_types);
+
CHECK_ERROR();
CHECK(check_sljit_emit_icall(compiler, type, arg_types, src, srcw));
@@ -2572,14 +2790,106 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
return sljit_emit_op2(compiler, saved_op, dst, 0, dst, 0, TMP_REG2, 0);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 dst_reg,
- sljit_s32 src, sljit_sw srcw)
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_reg)
+{
+ sljit_ins *ptr;
+ sljit_uw size;
+#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
+ sljit_s32 inp_flags = ((type & SLJIT_32) ? INT_DATA : WORD_DATA) | LOAD_DATA;
+#else /* !SLJIT_CONFIG_PPC_64 */
+ sljit_s32 inp_flags = WORD_DATA | LOAD_DATA;
+#endif /* SLJIT_CONFIG_PPC_64 */
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_select(compiler, type, dst_reg, src1, src1w, src2_reg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+ if (dst_reg != src2_reg) {
+ if (dst_reg == src1) {
+ src1 = src2_reg;
+ src1w = 0;
+ type ^= 0x1;
+ } else {
+ if (ADDRESSING_DEPENDS_ON(src1, dst_reg)) {
+ FAIL_IF(push_inst(compiler, OR | S(dst_reg) | A(TMP_REG1) | B(dst_reg)));
+
+ if ((src1 & REG_MASK) == dst_reg)
+ src1 = (src1 & ~REG_MASK) | TMP_REG1;
+
+ if (OFFS_REG(src1) == dst_reg)
+ src1 = (src1 & ~OFFS_REG_MASK) | TO_OFFS_REG(TMP_REG1);
+ }
+
+ FAIL_IF(push_inst(compiler, OR | S(src2_reg) | A(dst_reg) | B(src2_reg)));
+ }
+ }
+
+ if (((type & ~SLJIT_32) | 0x1) == SLJIT_NOT_CARRY)
+ FAIL_IF(push_inst(compiler, ADDE | RC(ALT_SET_FLAGS) | D(TMP_REG1) | A(TMP_ZERO) | B(TMP_ZERO)));
+
+ size = compiler->size;
+
+ ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
+ FAIL_IF(!ptr);
+ compiler->size++;
+
+ if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, inp_flags, dst_reg, src1, src1w, TMP_REG1));
+ } else if (src1 == SLJIT_IMM) {
+#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
+ if (type & SLJIT_32)
+ src1w = (sljit_s32)src1w;
+#endif /* SLJIT_CONFIG_RISCV_64 */
+ FAIL_IF(load_immediate(compiler, dst_reg, src1w));
+ } else
+ FAIL_IF(push_inst(compiler, OR | S(src1) | A(dst_reg) | B(src1)));
+
+ *ptr = BCx | get_bo_bi_flags(compiler, (type ^ 0x1) & ~SLJIT_32) | (sljit_ins)((compiler->size - size) << 2);
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fselect(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_freg)
{
+ sljit_ins *ptr;
+ sljit_uw size;
+
CHECK_ERROR();
- CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
+ CHECK(check_sljit_emit_fselect(compiler, type, dst_freg, src1, src1w, src2_freg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+ if (dst_freg != src2_freg) {
+ if (dst_freg == src1) {
+ src1 = src2_freg;
+ src1w = 0;
+ type ^= 0x1;
+ } else
+ FAIL_IF(push_inst(compiler, FMR | FD(dst_freg) | FB(src2_freg)));
+ }
+
+ if (((type & ~SLJIT_32) | 0x1) == SLJIT_NOT_CARRY)
+ FAIL_IF(push_inst(compiler, ADDE | RC(ALT_SET_FLAGS) | D(TMP_REG1) | A(TMP_ZERO) | B(TMP_ZERO)));
- return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);;
+ size = compiler->size;
+
+ ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
+ FAIL_IF(!ptr);
+ compiler->size++;
+
+ if (src1 & SLJIT_MEM)
+ FAIL_IF(emit_op_mem(compiler, FLOAT_DATA(type) | LOAD_DATA, dst_freg, src1, src1w, TMP_REG1));
+ else
+ FAIL_IF(push_inst(compiler, FMR | FD(dst_freg) | FB(src1)));
+
+ *ptr = BCx | get_bo_bi_flags(compiler, (type ^ 0x1) & ~SLJIT_32) | (sljit_ins)((compiler->size - size) << 2);
+ return SLJIT_SUCCESS;
}
#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
@@ -2813,36 +3123,36 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
PTR_FAIL_IF(emit_const(compiler, dst_r, init_value));
if (dst & SLJIT_MEM)
- PTR_FAIL_IF(emit_op(compiler, SLJIT_MOV, WORD_DATA, dst, dstw, TMP_REG1, 0, TMP_REG2, 0));
+ PTR_FAIL_IF(emit_op_mem(compiler, WORD_DATA, dst_r, dst, dstw, TMP_REG1));
return const_;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_mov_addr(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
- struct sljit_put_label *put_label;
+ struct sljit_jump *jump;
sljit_s32 dst_r;
CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
+ CHECK_PTR(check_sljit_emit_mov_addr(compiler, dst, dstw));
ADJUST_LOCAL_OFFSET(dst, dstw);
- put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
- PTR_FAIL_IF(!put_label);
- set_put_label(put_label, compiler, 0);
+ jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
+ PTR_FAIL_IF(!jump);
+ set_mov_addr(jump, compiler, 0);
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
+ PTR_FAIL_IF(push_inst(compiler, (sljit_ins)dst_r));
#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
- PTR_FAIL_IF(emit_const(compiler, dst_r, 0));
+ compiler->size++;
#else
- PTR_FAIL_IF(push_inst(compiler, (sljit_ins)dst_r));
compiler->size += 4;
#endif
if (dst & SLJIT_MEM)
PTR_FAIL_IF(emit_op(compiler, SLJIT_MOV, WORD_DATA, dst, dstw, TMP_REG1, 0, TMP_REG2, 0));
- return put_label;
+ return jump;
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeRISCV_32.c b/src/3rdparty/pcre2/src/sljit/sljitNativeRISCV_32.c
index b38e6924c8..396c956c19 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeRISCV_32.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativeRISCV_32.c
@@ -27,7 +27,6 @@
static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst_r, sljit_sw imm, sljit_s32 tmp_r)
{
SLJIT_UNUSED_ARG(tmp_r);
- SLJIT_ASSERT(dst_r != tmp_r);
if (imm <= SIMM_MAX && imm >= SIMM_MIN)
return push_inst(compiler, ADDI | RD(dst_r) | RS1(TMP_ZERO) | IMM_I(imm));
@@ -43,6 +42,76 @@ static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst_r
return push_inst(compiler, ADDI | RD(dst_r) | RS1(dst_r) | IMM_I(imm));
}
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f64 value)
+{
+ union {
+ sljit_s32 imm[2];
+ sljit_f64 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset64(compiler, freg, value));
+
+ u.value = value;
+
+ if (u.imm[0] != 0)
+ FAIL_IF(load_immediate(compiler, TMP_REG1, u.imm[0], TMP_REG3));
+ if (u.imm[1] != 0)
+ FAIL_IF(load_immediate(compiler, TMP_REG2, u.imm[1], TMP_REG3));
+
+ FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RS1(SLJIT_SP) | IMM_I(-16)));
+ FAIL_IF(push_inst(compiler, SW | RS1(SLJIT_SP) | RS2(u.imm[0] != 0 ? TMP_REG1 : TMP_ZERO) | (8 << 7)));
+ FAIL_IF(push_inst(compiler, SW | RS1(SLJIT_SP) | RS2(u.imm[1] != 0 ? TMP_REG2 : TMP_ZERO) | (12 << 7)));
+ FAIL_IF(push_inst(compiler, FLD | FRD(freg) | RS1(SLJIT_SP) | IMM_I(8)));
+ return push_inst(compiler, ADDI | RD(SLJIT_SP) | RS1(SLJIT_SP) | IMM_I(16));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 freg, sljit_s32 reg)
+{
+ sljit_ins inst;
+ sljit_s32 reg2 = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
+
+ if (op & SLJIT_32) {
+ if (op == SLJIT_COPY32_TO_F32)
+ inst = FMV_W_X | RS1(reg) | FRD(freg);
+ else
+ inst = FMV_X_W | FRS1(freg) | RD(reg);
+
+ return push_inst(compiler, inst);
+ }
+
+ FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RS1(SLJIT_SP) | IMM_I(-16)));
+
+ if (reg & REG_PAIR_MASK) {
+ reg2 = REG_PAIR_SECOND(reg);
+ reg = REG_PAIR_FIRST(reg);
+ }
+
+ if (op == SLJIT_COPY_TO_F64) {
+ if (reg2 != 0)
+ FAIL_IF(push_inst(compiler, SW | RS1(SLJIT_SP) | RS2(reg2) | (8 << 7)));
+ else
+ FAIL_IF(push_inst(compiler, FSW | RS1(SLJIT_SP) | FRS2(freg) | (8 << 7)));
+
+ FAIL_IF(push_inst(compiler, SW | RS1(SLJIT_SP) | RS2(reg) | (12 << 7)));
+ FAIL_IF(push_inst(compiler, FLD | FRD(freg) | RS1(SLJIT_SP) | IMM_I(8)));
+ } else {
+ FAIL_IF(push_inst(compiler, FSD | RS1(SLJIT_SP) | FRS2(freg) | (8 << 7)));
+
+ if (reg2 != 0)
+ FAIL_IF(push_inst(compiler, FMV_X_W | FRS1(freg) | RD(reg2)));
+
+ FAIL_IF(push_inst(compiler, LW | RD(reg) | RS1(SLJIT_SP) | IMM_I(12)));
+ }
+
+ return push_inst(compiler, ADDI | RD(SLJIT_SP) | RS1(SLJIT_SP) | IMM_I(16));
+}
+
static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw init_value, sljit_ins last_ins)
{
if ((init_value & 0x800) != 0)
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeRISCV_64.c b/src/3rdparty/pcre2/src/sljit/sljitNativeRISCV_64.c
index 32cec7848d..7fcf2c5273 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeRISCV_64.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativeRISCV_64.c
@@ -28,8 +28,6 @@ static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst_r
{
sljit_sw high;
- SLJIT_ASSERT(dst_r != tmp_r);
-
if (imm <= SIMM_MAX && imm >= SIMM_MIN)
return push_inst(compiler, ADDI | RD(dst_r) | RS1(TMP_ZERO) | IMM_I(imm));
@@ -81,6 +79,8 @@ static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst_r
return SLJIT_SUCCESS;
}
+ SLJIT_ASSERT(dst_r != tmp_r);
+
high = imm >> 32;
imm = (sljit_s32)imm;
@@ -126,6 +126,45 @@ static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst_r
return push_inst(compiler, XOR | RD(dst_r) | RS1(dst_r) | RS2(tmp_r));
}
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f64 value)
+{
+ union {
+ sljit_sw imm;
+ sljit_f64 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset64(compiler, freg, value));
+
+ u.value = value;
+
+ if (u.imm == 0)
+ return push_inst(compiler, FMV_W_X | (1 << 25) | RS1(TMP_ZERO) | FRD(freg));
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, u.imm, TMP_REG3));
+ return push_inst(compiler, FMV_W_X | (1 << 25) | RS1(TMP_REG1) | FRD(freg));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 freg, sljit_s32 reg)
+{
+ sljit_ins inst;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
+
+ if (GET_OPCODE(op) == SLJIT_COPY_TO_F64)
+ inst = FMV_W_X | RS1(reg) | FRD(freg);
+ else
+ inst = FMV_X_W | FRS1(freg) | RD(reg);
+
+ if (!(op & SLJIT_32))
+ inst |= (sljit_ins)1 << 25;
+
+ return push_inst(compiler, inst);
+}
+
static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw init_value, sljit_ins last_ins)
{
sljit_sw high;
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeRISCV_common.c b/src/3rdparty/pcre2/src/sljit/sljitNativeRISCV_common.c
index 58a48c649c..d86100a80c 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeRISCV_common.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativeRISCV_common.c
@@ -97,16 +97,20 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
#define FLD (F3(0x3) | OPC(0x7))
#define FLE_S (F7(0x50) | F3(0x0) | OPC(0x53))
#define FLT_S (F7(0x50) | F3(0x1) | OPC(0x53))
-#define FSD (F3(0x3) | OPC(0x27))
/* These conversion opcodes are partly defined. */
#define FCVT_S_D (F7(0x20) | OPC(0x53))
#define FCVT_S_W (F7(0x68) | OPC(0x53))
+#define FCVT_S_WU (F7(0x68) | F12(0x1) | OPC(0x53))
#define FCVT_W_S (F7(0x60) | F3(0x1) | OPC(0x53))
#define FMUL_S (F7(0x8) | F3(0x7) | OPC(0x53))
+#define FMV_X_W (F7(0x70) | F3(0x0) | OPC(0x53))
+#define FMV_W_X (F7(0x78) | F3(0x0) | OPC(0x53))
+#define FSD (F3(0x3) | OPC(0x27))
#define FSGNJ_S (F7(0x10) | F3(0x0) | OPC(0x53))
#define FSGNJN_S (F7(0x10) | F3(0x1) | OPC(0x53))
#define FSGNJX_S (F7(0x10) | F3(0x2) | OPC(0x53))
#define FSUB_S (F7(0x4) | F3(0x7) | OPC(0x53))
+#define FSW (F3(0x2) | OPC(0x27))
#define JAL (OPC(0x6f))
#define JALR (F3(0x0) | OPC(0x67))
#define LD (F3(0x3) | OPC(0x3))
@@ -177,24 +181,23 @@ static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_i
if (jump->flags & JUMP_ADDR)
target_addr = jump->u.target;
else {
- SLJIT_ASSERT(jump->flags & JUMP_LABEL);
+ SLJIT_ASSERT(jump->u.label != NULL);
target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset;
}
diff = (sljit_sw)target_addr - (sljit_sw)inst - executable_offset;
if (jump->flags & IS_COND) {
- inst--;
diff += SSIZE_OF(ins);
if (diff >= BRANCH_MIN && diff <= BRANCH_MAX) {
- jump->flags |= PATCH_B;
+ inst--;
inst[0] = (inst[0] & 0x1fff07f) ^ 0x1000;
+ jump->flags |= PATCH_B;
jump->addr = (sljit_uw)inst;
return inst;
}
- inst++;
diff -= SSIZE_OF(ins);
}
@@ -261,116 +264,239 @@ exit:
#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
-static SLJIT_INLINE sljit_sw put_label_get_length(struct sljit_put_label *put_label, sljit_uw max_label)
+static SLJIT_INLINE sljit_sw mov_addr_get_length(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
{
- if (max_label <= (sljit_uw)S32_MAX) {
- put_label->flags = PATCH_ABS32;
+ sljit_uw addr;
+ sljit_sw diff;
+ SLJIT_UNUSED_ARG(executable_offset);
+
+ SLJIT_ASSERT(jump->flags < ((sljit_uw)6 << JUMP_SIZE_SHIFT));
+ if (jump->flags & JUMP_ADDR)
+ addr = jump->u.target;
+ else
+ addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code + jump->u.label->size, executable_offset);
+
+ diff = (sljit_sw)addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+
+ if (diff >= S32_MIN && diff <= S32_MAX) {
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)1 << JUMP_SIZE_SHIFT));
+ jump->flags |= PATCH_REL32;
+ return 1;
+ }
+
+ if (addr <= S32_MAX) {
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)1 << JUMP_SIZE_SHIFT));
+ jump->flags |= PATCH_ABS32;
return 1;
}
- if (max_label <= S44_MAX) {
- put_label->flags = PATCH_ABS44;
+ if (addr <= S44_MAX) {
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)3 << JUMP_SIZE_SHIFT));
+ jump->flags |= PATCH_ABS44;
return 3;
}
- if (max_label <= S52_MAX) {
- put_label->flags = PATCH_ABS52;
+ if (addr <= S52_MAX) {
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)4 << JUMP_SIZE_SHIFT));
+ jump->flags |= PATCH_ABS52;
return 4;
}
- put_label->flags = 0;
+ SLJIT_ASSERT(jump->flags >= ((sljit_uw)5 << JUMP_SIZE_SHIFT));
return 5;
}
#endif /* SLJIT_CONFIG_RISCV_64 */
-static SLJIT_INLINE void load_addr_to_reg(void *dst, sljit_u32 reg)
+static SLJIT_INLINE void load_addr_to_reg(struct sljit_jump *jump, sljit_sw executable_offset)
{
- struct sljit_jump *jump = NULL;
- struct sljit_put_label *put_label;
- sljit_uw flags;
- sljit_ins *inst;
+ sljit_uw flags = jump->flags;
+ sljit_uw addr = (flags & JUMP_ADDR) ? jump->u.target : jump->u.label->u.addr;
+ sljit_ins *ins = (sljit_ins*)jump->addr;
+ sljit_u32 reg = (flags & JUMP_MOV_ADDR) ? *ins : TMP_REG1;
#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
sljit_sw high;
#endif
- sljit_uw addr;
+ SLJIT_UNUSED_ARG(executable_offset);
- if (reg != 0) {
- jump = (struct sljit_jump*)dst;
- flags = jump->flags;
- inst = (sljit_ins*)jump->addr;
- addr = (flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
- } else {
- put_label = (struct sljit_put_label*)dst;
#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
- flags = put_label->flags;
-#endif
- inst = (sljit_ins*)put_label->addr;
- addr = put_label->label->addr;
- reg = *inst;
+ if (flags & PATCH_REL32) {
+ addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET(ins, executable_offset);
+
+ SLJIT_ASSERT((sljit_sw)addr >= S32_MIN && (sljit_sw)addr <= S32_MAX);
+
+ if ((addr & 0x800) != 0)
+ addr += 0x1000;
+
+ ins[0] = AUIPC | RD(reg) | (sljit_ins)((sljit_sw)addr & ~0xfff);
+
+ if (!(flags & JUMP_MOV_ADDR)) {
+ SLJIT_ASSERT((ins[1] & 0x707f) == JALR);
+ ins[1] = (ins[1] & 0xfffff) | IMM_I(addr);
+ } else
+ ins[1] = ADDI | RD(reg) | RS1(reg) | IMM_I(addr);
+ return;
}
+#endif
if ((addr & 0x800) != 0)
addr += 0x1000;
#if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
- inst[0] = LUI | RD(reg) | (sljit_ins)((sljit_sw)addr & ~0xfff);
+ ins[0] = LUI | RD(reg) | (sljit_ins)((sljit_sw)addr & ~0xfff);
#else /* !SLJIT_CONFIG_RISCV_32 */
if (flags & PATCH_ABS32) {
SLJIT_ASSERT(addr <= S32_MAX);
- inst[0] = LUI | RD(reg) | (sljit_ins)((sljit_sw)addr & ~0xfff);
+ ins[0] = LUI | RD(reg) | (sljit_ins)((sljit_sw)addr & ~0xfff);
} else if (flags & PATCH_ABS44) {
high = (sljit_sw)addr >> 12;
SLJIT_ASSERT((sljit_uw)high <= 0x7fffffff);
if (high > S32_MAX) {
SLJIT_ASSERT((high & 0x800) != 0);
- inst[0] = LUI | RD(reg) | (sljit_ins)0x80000000u;
- inst[1] = XORI | RD(reg) | RS1(reg) | IMM_I(high);
+ ins[0] = LUI | RD(reg) | (sljit_ins)0x80000000u;
+ ins[1] = XORI | RD(reg) | RS1(reg) | IMM_I(high);
} else {
if ((high & 0x800) != 0)
high += 0x1000;
- inst[0] = LUI | RD(reg) | (sljit_ins)(high & ~0xfff);
- inst[1] = ADDI | RD(reg) | RS1(reg) | IMM_I(high);
+ ins[0] = LUI | RD(reg) | (sljit_ins)(high & ~0xfff);
+ ins[1] = ADDI | RD(reg) | RS1(reg) | IMM_I(high);
}
- inst[2] = SLLI | RD(reg) | RS1(reg) | IMM_I(12);
- inst += 2;
+ ins[2] = SLLI | RD(reg) | RS1(reg) | IMM_I(12);
+ ins += 2;
} else {
high = (sljit_sw)addr >> 32;
if ((addr & 0x80000000l) != 0)
high = ~high;
- if ((high & 0x800) != 0)
- high += 0x1000;
-
if (flags & PATCH_ABS52) {
SLJIT_ASSERT(addr <= S52_MAX);
- inst[0] = LUI | RD(TMP_REG3) | (sljit_ins)(high << 12);
+ ins[0] = LUI | RD(TMP_REG3) | (sljit_ins)(high << 12);
} else {
- inst[0] = LUI | RD(TMP_REG3) | (sljit_ins)(high & ~0xfff);
- inst[1] = ADDI | RD(TMP_REG3) | RS1(TMP_REG3) | IMM_I(high);
- inst++;
+ if ((high & 0x800) != 0)
+ high += 0x1000;
+ ins[0] = LUI | RD(TMP_REG3) | (sljit_ins)(high & ~0xfff);
+ ins[1] = ADDI | RD(TMP_REG3) | RS1(TMP_REG3) | IMM_I(high);
+ ins++;
}
- inst[1] = LUI | RD(reg) | (sljit_ins)((sljit_sw)addr & ~0xfff);
- inst[2] = SLLI | RD(TMP_REG3) | RS1(TMP_REG3) | IMM_I((flags & PATCH_ABS52) ? 20 : 32);
- inst[3] = XOR | RD(reg) | RS1(reg) | RS2(TMP_REG3);
- inst += 3;
+ ins[1] = LUI | RD(reg) | (sljit_ins)((sljit_sw)addr & ~0xfff);
+ ins[2] = SLLI | RD(TMP_REG3) | RS1(TMP_REG3) | IMM_I((flags & PATCH_ABS52) ? 20 : 32);
+ ins[3] = XOR | RD(reg) | RS1(reg) | RS2(TMP_REG3);
+ ins += 3;
}
#endif /* !SLJIT_CONFIG_RISCV_32 */
- if (jump != NULL) {
- SLJIT_ASSERT((inst[1] & 0x707f) == JALR);
- inst[1] = (inst[1] & 0xfffff) | IMM_I(addr);
+ if (!(flags & JUMP_MOV_ADDR)) {
+ SLJIT_ASSERT((ins[1] & 0x707f) == JALR);
+ ins[1] = (ins[1] & 0xfffff) | IMM_I(addr);
} else
- inst[1] = ADDI | RD(reg) | RS1(reg) | IMM_I(addr);
+ ins[1] = ADDI | RD(reg) | RS1(reg) | IMM_I(addr);
+}
+
+static void reduce_code_size(struct sljit_compiler *compiler)
+{
+ struct sljit_label *label;
+ struct sljit_jump *jump;
+ struct sljit_const *const_;
+ SLJIT_NEXT_DEFINE_TYPES;
+ sljit_uw total_size;
+ sljit_uw size_reduce = 0;
+ sljit_sw diff;
+
+ label = compiler->labels;
+ jump = compiler->jumps;
+ const_ = compiler->consts;
+ SLJIT_NEXT_INIT_TYPES();
+
+ while (1) {
+ SLJIT_GET_NEXT_MIN();
+
+ if (next_min_addr == SLJIT_MAX_ADDRESS)
+ break;
+
+ if (next_min_addr == next_label_size) {
+ label->size -= size_reduce;
+
+ label = label->next;
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
+ }
+
+ if (next_min_addr == next_const_addr) {
+ const_->addr -= size_reduce;
+ const_ = const_->next;
+ next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
+ continue;
+ }
+
+ if (next_min_addr != next_jump_addr)
+ continue;
+
+ jump->addr -= size_reduce;
+ if (!(jump->flags & JUMP_MOV_ADDR)) {
+ total_size = JUMP_MAX_SIZE;
+
+ if (!(jump->flags & SLJIT_REWRITABLE_JUMP)) {
+ if (jump->flags & JUMP_ADDR) {
+#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
+ if (jump->u.target <= S32_MAX)
+ total_size = 2;
+ else if (jump->u.target <= S44_MAX)
+ total_size = 4;
+ else if (jump->u.target <= S52_MAX)
+ total_size = 5;
+#endif /* SLJIT_CONFIG_RISCV_64 */
+ } else {
+ /* Unit size: instruction. */
+ diff = (sljit_sw)jump->u.label->size - (sljit_sw)jump->addr;
+
+ if ((jump->flags & IS_COND) && (diff + 1) <= (BRANCH_MAX / SSIZE_OF(ins)) && (diff + 1) >= (BRANCH_MIN / SSIZE_OF(ins)))
+ total_size = 0;
+ else if (diff >= (JUMP_MIN / SSIZE_OF(ins)) && diff <= (JUMP_MAX / SSIZE_OF(ins)))
+ total_size = 1;
+#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
+ else if (diff >= (S32_MIN / SSIZE_OF(ins)) && diff <= (S32_MAX / SSIZE_OF(ins)))
+ total_size = 2;
+#endif /* SLJIT_CONFIG_RISCV_64 */
+ }
+ }
+
+ size_reduce += JUMP_MAX_SIZE - total_size;
+ jump->flags |= total_size << JUMP_SIZE_SHIFT;
+#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
+ } else {
+ total_size = 5;
+
+ if (!(jump->flags & JUMP_ADDR)) {
+ /* Real size minus 1. Unit size: instruction. */
+ diff = (sljit_sw)jump->u.label->size - (sljit_sw)jump->addr;
+
+ if (diff >= (S32_MIN / SSIZE_OF(ins)) && diff <= (S32_MAX / SSIZE_OF(ins)))
+ total_size = 1;
+ } else if (jump->u.target < S32_MAX)
+ total_size = 1;
+ else if (jump->u.target < S44_MAX)
+ total_size = 3;
+ else if (jump->u.target <= S52_MAX)
+ total_size = 4;
+
+ size_reduce += 5 - total_size;
+ jump->flags |= total_size << JUMP_SIZE_SHIFT;
+#endif /* !SLJIT_CONFIG_RISCV_64 */
+ }
+
+ jump = jump->next;
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+ }
+
+ compiler->size -= size_reduce;
}
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
+SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler, sljit_s32 options, void *exec_allocator_data)
{
struct sljit_memory_fragment *buf;
sljit_ins *code;
@@ -378,77 +504,78 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
sljit_ins *buf_ptr;
sljit_ins *buf_end;
sljit_uw word_count;
- sljit_uw next_addr;
+ SLJIT_NEXT_DEFINE_TYPES;
sljit_sw executable_offset;
sljit_uw addr;
struct sljit_label *label;
struct sljit_jump *jump;
struct sljit_const *const_;
- struct sljit_put_label *put_label;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_generate_code(compiler));
- reverse_buf(compiler);
- code = (sljit_ins*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_ins), compiler->exec_allocator_data);
+ reduce_code_size(compiler);
+
+ code = (sljit_ins*)allocate_executable_memory(compiler->size * sizeof(sljit_ins), options, exec_allocator_data, &executable_offset);
PTR_FAIL_WITH_EXEC_IF(code);
+
+ reverse_buf(compiler);
buf = compiler->buf;
code_ptr = code;
word_count = 0;
- next_addr = 0;
- executable_offset = SLJIT_EXEC_OFFSET(code);
-
label = compiler->labels;
jump = compiler->jumps;
const_ = compiler->consts;
- put_label = compiler->put_labels;
+ SLJIT_NEXT_INIT_TYPES();
+ SLJIT_GET_NEXT_MIN();
do {
buf_ptr = (sljit_ins*)buf->memory;
buf_end = buf_ptr + (buf->used_size >> 2);
do {
*code_ptr = *buf_ptr++;
- if (next_addr == word_count) {
+ if (next_min_addr == word_count) {
SLJIT_ASSERT(!label || label->size >= word_count);
SLJIT_ASSERT(!jump || jump->addr >= word_count);
SLJIT_ASSERT(!const_ || const_->addr >= word_count);
- SLJIT_ASSERT(!put_label || put_label->addr >= word_count);
/* These structures are ordered by their address. */
- if (label && label->size == word_count) {
- label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ if (next_min_addr == next_label_size) {
+ label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = (sljit_uw)(code_ptr - code);
label = label->next;
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
}
- if (jump && jump->addr == word_count) {
+
+ if (next_min_addr == next_jump_addr) {
+ if (!(jump->flags & JUMP_MOV_ADDR)) {
+ word_count = word_count - 1 + (jump->flags >> JUMP_SIZE_SHIFT);
+ jump->addr = (sljit_uw)code_ptr;
+ code_ptr = detect_jump_type(jump, code, executable_offset);
+ SLJIT_ASSERT((jump->flags & PATCH_B) || ((sljit_uw)code_ptr - jump->addr < (jump->flags >> JUMP_SIZE_SHIFT) * sizeof(sljit_ins)));
+ } else {
#if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
- word_count += 1;
-#else
- word_count += 5;
-#endif
- jump->addr = (sljit_uw)code_ptr;
- code_ptr = detect_jump_type(jump, code, executable_offset);
+ word_count += 1;
+ jump->addr = (sljit_uw)code_ptr;
+ code_ptr += 1;
+#else /* !SLJIT_CONFIG_RISCV_32 */
+ word_count += jump->flags >> JUMP_SIZE_SHIFT;
+ addr = (sljit_uw)code_ptr;
+ code_ptr += mov_addr_get_length(jump, code_ptr, code, executable_offset);
+ jump->addr = addr;
+#endif /* SLJIT_CONFIG_RISCV_32 */
+ }
jump = jump->next;
- }
- if (const_ && const_->addr == word_count) {
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+ } else if (next_min_addr == next_const_addr) {
const_->addr = (sljit_uw)code_ptr;
const_ = const_->next;
+ next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
}
- if (put_label && put_label->addr == word_count) {
- SLJIT_ASSERT(put_label->label);
- put_label->addr = (sljit_uw)code_ptr;
-#if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
- code_ptr += 1;
- word_count += 1;
-#else
- code_ptr += put_label_get_length(put_label, (sljit_uw)(SLJIT_ADD_EXEC_OFFSET(code, executable_offset) + put_label->label->size));
- word_count += 5;
-#endif
- put_label = put_label->next;
- }
- next_addr = compute_next_addr(label, jump, const_, put_label);
+
+ SLJIT_GET_NEXT_MIN();
}
code_ptr++;
word_count++;
@@ -458,7 +585,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
} while (buf);
if (label && label->size == word_count) {
- label->addr = (sljit_uw)code_ptr;
+ label->u.addr = (sljit_uw)code_ptr;
label->size = (sljit_uw)(code_ptr - code);
label = label->next;
}
@@ -466,18 +593,17 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
SLJIT_ASSERT(!label);
SLJIT_ASSERT(!jump);
SLJIT_ASSERT(!const_);
- SLJIT_ASSERT(!put_label);
SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size);
jump = compiler->jumps;
while (jump) {
do {
- if (!(jump->flags & (PATCH_B | PATCH_J | PATCH_REL32))) {
- load_addr_to_reg(jump, TMP_REG1);
+ if (!(jump->flags & (PATCH_B | PATCH_J)) || (jump->flags & JUMP_MOV_ADDR)) {
+ load_addr_to_reg(jump, executable_offset);
break;
}
- addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
+ addr = (jump->flags & JUMP_ADDR) ? jump->u.target : jump->u.label->u.addr;
buf_ptr = (sljit_ins *)jump->addr;
addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset);
@@ -488,31 +614,12 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
break;
}
-#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
- if (jump->flags & PATCH_REL32) {
- SLJIT_ASSERT((sljit_sw)addr >= S32_MIN && (sljit_sw)addr <= S32_MAX);
-
- if ((addr & 0x800) != 0)
- addr += 0x1000;
-
- buf_ptr[0] = AUIPC | RD(TMP_REG1) | (sljit_ins)((sljit_sw)addr & ~0xfff);
- SLJIT_ASSERT((buf_ptr[1] & 0x707f) == JALR);
- buf_ptr[1] |= IMM_I(addr);
- break;
- }
-#endif
-
SLJIT_ASSERT((sljit_sw)addr >= JUMP_MIN && (sljit_sw)addr <= JUMP_MAX);
addr = (addr & 0xff000) | ((addr & 0x800) << 9) | ((addr & 0x7fe) << 20) | ((addr & 0x100000) << 11);
buf_ptr[0] = JAL | RD((jump->flags & IS_CALL) ? RETURN_ADDR_REG : TMP_ZERO) | (sljit_ins)addr;
} while (0);
- jump = jump->next;
- }
- put_label = compiler->put_labels;
- while (put_label) {
- load_addr_to_reg(put_label, 0);
- put_label = put_label->next;
+ jump = jump->next;
}
compiler->error = SLJIT_ERR_COMPILED;
@@ -531,7 +638,18 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
{
switch (feature_type) {
case SLJIT_HAS_FPU:
+#ifdef SLJIT_IS_FPU_AVAILABLE
+ return (SLJIT_IS_FPU_AVAILABLE) != 0;
+#elif defined(__riscv_float_abi_soft)
+ return 0;
+#else
+ return 1;
+#endif /* SLJIT_IS_FPU_AVAILABLE */
case SLJIT_HAS_ZERO_REGISTER:
+ case SLJIT_HAS_COPY_F32:
+#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
+ case SLJIT_HAS_COPY_F64:
+#endif /* !SLJIT_CONFIG_RISCV_64 */
return 1;
default:
return 0;
@@ -540,7 +658,17 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
{
- return (type >= SLJIT_ORDERED_EQUAL && type <= SLJIT_ORDERED_LESS_EQUAL);
+ switch (type) {
+ case SLJIT_UNORDERED_OR_EQUAL:
+ case SLJIT_ORDERED_NOT_EQUAL:
+ return 2;
+
+ case SLJIT_UNORDERED:
+ case SLJIT_ORDERED:
+ return 1;
+ }
+
+ return 0;
}
/* --------------------------------------------------------------------- */
@@ -575,6 +703,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
#define SLOW_SRC1 0x08000
#define SLOW_SRC2 0x10000
#define SLOW_DEST 0x20000
+#define MEM_USE_TMP2 0x40000
#if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
#define STACK_STORE SW
@@ -610,10 +739,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
if (fsaveds > 0 || fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG) {
if ((local_size & SSIZE_OF(sw)) != 0)
local_size += SSIZE_OF(sw);
- local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
+ local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
}
#else
- local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
+ local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
#endif
local_size = (local_size + SLJIT_LOCALS_OFFSET + 15) & ~0xf;
compiler->local_size = local_size;
@@ -704,10 +833,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *comp
if (fsaveds > 0 || fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG) {
if ((local_size & SSIZE_OF(sw)) != 0)
local_size += SSIZE_OF(sw);
- local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
+ local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
}
#else
- local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
+ local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, f64);
#endif
compiler->local_size = (local_size + SLJIT_LOCALS_OFFSET + 15) & ~0xf;
@@ -859,7 +988,6 @@ static sljit_s32 push_mem_inst(struct sljit_compiler *compiler, sljit_s32 flags,
/* Can perform an operation using at most 1 instruction. */
static sljit_s32 getput_arg_fast(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
{
-
SLJIT_ASSERT(arg & SLJIT_MEM);
if (!(arg & OFFS_REG_MASK) && argw <= SIMM_MAX && argw >= SIMM_MIN) {
@@ -904,7 +1032,7 @@ static sljit_s32 can_cache(sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, slj
static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
{
sljit_s32 base = arg & REG_MASK;
- sljit_s32 tmp_r = TMP_REG1;
+ sljit_s32 tmp_r = (flags & MEM_USE_TMP2) ? TMP_REG2 : TMP_REG1;
sljit_sw offset, argw_hi;
SLJIT_ASSERT(arg & SLJIT_MEM);
@@ -913,11 +1041,6 @@ static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sl
next_argw = 0;
}
- /* Since tmp can be the same as base or offset registers,
- * these might be unavailable after modifying tmp. */
- if ((flags & MEM_MASK) <= GPR_REG && (flags & LOAD_DATA))
- tmp_r = reg;
-
if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
argw &= 0x3;
@@ -1031,9 +1154,11 @@ static SLJIT_INLINE sljit_s32 emit_op_mem2(struct sljit_compiler *compiler, slji
#if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
#define WORD 0
+#define WORD_32 0
#define IMM_EXTEND(v) (IMM_I(v))
#else /* !SLJIT_CONFIG_RISCV_32 */
#define WORD word
+#define WORD_32 0x08
#define IMM_EXTEND(v) (IMM_I((op & SLJIT_32) ? (v) : (32 + (v))))
#endif /* SLJIT_CONFIG_RISCV_32 */
@@ -1041,16 +1166,16 @@ static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 op, slj
{
sljit_s32 is_clz = (GET_OPCODE(op) == SLJIT_CLZ);
#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
- sljit_ins word = (op & SLJIT_32) >> 5;
- sljit_ins max = (op & SLJIT_32) ? 32 : 64;
+ sljit_ins word = (sljit_ins)(op & SLJIT_32) >> 5;
+ sljit_ins word_size = (op & SLJIT_32) ? 32 : 64;
#else /* !SLJIT_CONFIG_RISCV_64 */
- sljit_ins max = 32;
+ sljit_ins word_size = 32;
#endif /* SLJIT_CONFIG_RISCV_64 */
SLJIT_ASSERT(WORD == 0 || WORD == 0x8);
/* The OTHER_FLAG is the counter. */
- FAIL_IF(push_inst(compiler, ADDI | WORD | RD(OTHER_FLAG) | RS1(TMP_ZERO) | IMM_I(max)));
+ FAIL_IF(push_inst(compiler, ADDI | WORD | RD(OTHER_FLAG) | RS1(TMP_ZERO) | IMM_I(word_size)));
/* The TMP_REG2 is the next value. */
if (src != TMP_REG2)
@@ -1066,7 +1191,7 @@ static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 op, slj
FAIL_IF(push_inst(compiler, BLT | RS1(TMP_REG2) | RS2(TMP_ZERO) | ((sljit_ins)(2 * SSIZE_OF(ins)) << 7) | ((sljit_ins)(8 * SSIZE_OF(ins)) << 20)));
/* The TMP_REG1 is the next shift. */
- FAIL_IF(push_inst(compiler, ADDI | WORD | RD(TMP_REG1) | RS1(TMP_ZERO) | IMM_I(max)));
+ FAIL_IF(push_inst(compiler, ADDI | WORD | RD(TMP_REG1) | RS1(TMP_ZERO) | IMM_I(word_size)));
FAIL_IF(push_inst(compiler, ADDI | WORD | RD(EQUAL_FLAG) | RS1(TMP_REG2) | IMM_I(0)));
FAIL_IF(push_inst(compiler, SRLI | WORD | RD(TMP_REG1) | RS1(TMP_REG1) | IMM_I(1)));
@@ -1081,6 +1206,65 @@ static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 op, slj
return push_inst(compiler, ADDI | WORD | RD(dst) | RS1(OTHER_FLAG) | IMM_I(0));
}
+static sljit_s32 emit_rev(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw src)
+{
+ SLJIT_UNUSED_ARG(op);
+
+#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
+ if (!(op & SLJIT_32)) {
+ FAIL_IF(push_inst(compiler, LUI | RD(OTHER_FLAG) | 0x10000));
+ FAIL_IF(push_inst(compiler, SRLI | RD(TMP_REG1) | RS1(src) | IMM_I(32)));
+ FAIL_IF(push_inst(compiler, ADDI | RD(OTHER_FLAG) | RS1(OTHER_FLAG) | IMM_I(0xfff)));
+ FAIL_IF(push_inst(compiler, SLLI | RD(dst) | RS1(src) | IMM_I(32)));
+ FAIL_IF(push_inst(compiler, SLLI | RD(EQUAL_FLAG) | RS1(OTHER_FLAG) | IMM_I(32)));
+ FAIL_IF(push_inst(compiler, OR | RD(dst) | RS1(dst) | RS2(TMP_REG1)));
+ FAIL_IF(push_inst(compiler, OR | RD(OTHER_FLAG) | RS1(OTHER_FLAG) | RS2(EQUAL_FLAG)));
+
+ FAIL_IF(push_inst(compiler, SRLI | RD(TMP_REG1) | RS1(dst) | IMM_I(16)));
+ FAIL_IF(push_inst(compiler, AND | RD(dst) | RS1(dst) | RS2(OTHER_FLAG)));
+ FAIL_IF(push_inst(compiler, AND | RD(TMP_REG1) | RS1(TMP_REG1) | RS2(OTHER_FLAG)));
+ FAIL_IF(push_inst(compiler, SLLI | RD(EQUAL_FLAG) | RS1(OTHER_FLAG) | IMM_I(8)));
+ FAIL_IF(push_inst(compiler, SLLI | RD(dst) | RS1(dst) | IMM_I(16)));
+ FAIL_IF(push_inst(compiler, XOR | RD(OTHER_FLAG) | RS1(OTHER_FLAG) | RS2(EQUAL_FLAG)));
+ FAIL_IF(push_inst(compiler, OR | RD(dst) | RS1(dst) | RS2(TMP_REG1)));
+
+ FAIL_IF(push_inst(compiler, SRLI | RD(TMP_REG1) | RS1(dst) | IMM_I(8)));
+ FAIL_IF(push_inst(compiler, AND | RD(dst) | RS1(dst) | RS2(OTHER_FLAG)));
+ FAIL_IF(push_inst(compiler, AND | RD(TMP_REG1) | RS1(TMP_REG1) | RS2(OTHER_FLAG)));
+ FAIL_IF(push_inst(compiler, SLLI | RD(dst) | RS1(dst) | IMM_I(8)));
+ return push_inst(compiler, OR | RD(dst) | RS1(dst) | RS2(TMP_REG1));
+ }
+#endif /* SLJIT_CONFIG_RISCV_64 */
+
+ FAIL_IF(push_inst(compiler, SRLI | WORD_32 | RD(TMP_REG1) | RS1(src) | IMM_I(16)));
+ FAIL_IF(push_inst(compiler, LUI | RD(OTHER_FLAG) | 0xff0000));
+ FAIL_IF(push_inst(compiler, SLLI | WORD_32 | RD(dst) | RS1(src) | IMM_I(16)));
+ FAIL_IF(push_inst(compiler, ORI | RD(OTHER_FLAG) | RS1(OTHER_FLAG) | IMM_I(0xff)));
+ FAIL_IF(push_inst(compiler, OR | RD(dst) | RS1(dst) | RS2(TMP_REG1)));
+
+ FAIL_IF(push_inst(compiler, SRLI | WORD_32 | RD(TMP_REG1) | RS1(dst) | IMM_I(8)));
+ FAIL_IF(push_inst(compiler, AND | RD(dst) | RS1(dst) | RS2(OTHER_FLAG)));
+ FAIL_IF(push_inst(compiler, AND | RD(TMP_REG1) | RS1(TMP_REG1) | RS2(OTHER_FLAG)));
+ FAIL_IF(push_inst(compiler, SLLI | WORD_32 | RD(dst) | RS1(dst) | IMM_I(8)));
+ return push_inst(compiler, OR | RD(dst) | RS1(dst) | RS2(TMP_REG1));
+}
+
+static sljit_s32 emit_rev16(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw src)
+{
+#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
+ sljit_ins word = (sljit_ins)(op & SLJIT_32) >> 5;
+ sljit_ins word_size = (op & SLJIT_32) ? 32 : 64;
+#else /* !SLJIT_CONFIG_RISCV_64 */
+ sljit_ins word_size = 32;
+#endif /* SLJIT_CONFIG_RISCV_64 */
+
+ FAIL_IF(push_inst(compiler, SRLI | WORD | RD(TMP_REG1) | RS1(src) | IMM_I(8)));
+ FAIL_IF(push_inst(compiler, SLLI | WORD | RD(dst) | RS1(src) | IMM_I(word_size - 8)));
+ FAIL_IF(push_inst(compiler, ANDI | RD(TMP_REG1) | RS1(TMP_REG1) | IMM_I(0xff)));
+ FAIL_IF(push_inst(compiler, (GET_OPCODE(op) == SLJIT_REV_U16 ? SRLI : SRAI) | WORD | RD(dst) | RS1(dst) | IMM_I(word_size - 16)));
+ return push_inst(compiler, OR | RD(dst) | RS1(dst) | RS2(TMP_REG1));
+}
+
#define EMIT_LOGICAL(op_imm, op_reg) \
if (flags & SRC2_IMM) { \
if (op & SLJIT_SET_Z) \
@@ -1102,30 +1286,30 @@ static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 op, slj
static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
sljit_s32 dst, sljit_s32 src1, sljit_sw src2)
{
- sljit_s32 is_overflow, is_carry, carry_src_r, is_handled;
+ sljit_s32 is_overflow, is_carry, carry_src_r, is_handled, reg;
sljit_ins op_imm, op_reg;
#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
- sljit_ins word = (op & SLJIT_32) >> 5;
+ sljit_ins word = (sljit_ins)(op & SLJIT_32) >> 5;
#endif /* SLJIT_CONFIG_RISCV_64 */
SLJIT_ASSERT(WORD == 0 || WORD == 0x8);
switch (GET_OPCODE(op)) {
case SLJIT_MOV:
- SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
if (dst != src2)
return push_inst(compiler, ADDI | RD(dst) | RS1(src2) | IMM_I(0));
return SLJIT_SUCCESS;
case SLJIT_MOV_U8:
- SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE))
return push_inst(compiler, ANDI | RD(dst) | RS1(src2) | IMM_I(0xff));
SLJIT_ASSERT(dst == src2);
return SLJIT_SUCCESS;
case SLJIT_MOV_S8:
- SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
FAIL_IF(push_inst(compiler, SLLI | WORD | RD(dst) | RS1(src2) | IMM_EXTEND(24)));
return push_inst(compiler, SRAI | WORD | RD(dst) | RS1(dst) | IMM_EXTEND(24));
@@ -1134,7 +1318,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return SLJIT_SUCCESS;
case SLJIT_MOV_U16:
- SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
FAIL_IF(push_inst(compiler, SLLI | WORD | RD(dst) | RS1(src2) | IMM_EXTEND(16)));
return push_inst(compiler, SRLI | WORD | RD(dst) | RS1(dst) | IMM_EXTEND(16));
@@ -1143,7 +1327,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return SLJIT_SUCCESS;
case SLJIT_MOV_S16:
- SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
FAIL_IF(push_inst(compiler, SLLI | WORD | RD(dst) | RS1(src2) | IMM_EXTEND(16)));
return push_inst(compiler, SRAI | WORD | RD(dst) | RS1(dst) | IMM_EXTEND(16));
@@ -1153,7 +1337,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
case SLJIT_MOV_U32:
- SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
FAIL_IF(push_inst(compiler, SLLI | RD(dst) | RS1(src2) | IMM_I(32)));
return push_inst(compiler, SRLI | RD(dst) | RS1(dst) | IMM_I(32));
@@ -1162,7 +1346,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return SLJIT_SUCCESS;
case SLJIT_MOV_S32:
- SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE))
return push_inst(compiler, ADDI | 0x8 | RD(dst) | RS1(src2) | IMM_I(0));
SLJIT_ASSERT(dst == src2);
@@ -1171,13 +1355,36 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
case SLJIT_CLZ:
case SLJIT_CTZ:
- SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
return emit_clz_ctz(compiler, op, dst, src2);
+ case SLJIT_REV:
+ case SLJIT_REV_S32:
+#if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
+ case SLJIT_REV_U32:
+#endif /* SLJIT_CONFIG_RISCV_32 */
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
+ return emit_rev(compiler, op, dst, src2);
+
+ case SLJIT_REV_U16:
+ case SLJIT_REV_S16:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM));
+ return emit_rev16(compiler, op, dst, src2);
+
+#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
+ case SLJIT_REV_U32:
+ SLJIT_ASSERT(src1 == TMP_ZERO && !(flags & SRC2_IMM) && dst != TMP_REG1);
+ FAIL_IF(emit_rev(compiler, op, dst, src2));
+ if (dst == TMP_REG2)
+ return SLJIT_SUCCESS;
+ FAIL_IF(push_inst(compiler, SLLI | RD(dst) | RS1(dst) | IMM_I(32)));
+ return push_inst(compiler, SRLI | RD(dst) | RS1(dst) | IMM_I(32));
+#endif /* SLJIT_CONFIG_RISCV_32 */
+
case SLJIT_ADD:
/* Overflow computation (both add and sub): overflow = src1_sign ^ src2_sign ^ result_sign ^ carry_flag */
is_overflow = GET_FLAG_TYPE(op) == SLJIT_OVERFLOW;
- carry_src_r = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
+ carry_src_r = GET_FLAG_TYPE(op) == SLJIT_CARRY;
if (flags & SRC2_IMM) {
if (is_overflow) {
@@ -1233,7 +1440,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return push_inst(compiler, XOR | RD(OTHER_FLAG) | RS1(TMP_REG1) | RS2(OTHER_FLAG));
case SLJIT_ADDC:
- carry_src_r = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
+ carry_src_r = GET_FLAG_TYPE(op) == SLJIT_CARRY;
if (flags & SRC2_IMM) {
FAIL_IF(push_inst(compiler, ADDI | WORD | RD(dst) | RS1(src1) | IMM_I(src2)));
@@ -1280,11 +1487,11 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
is_handled = 0;
if (flags & SRC2_IMM) {
- if (GET_FLAG_TYPE(op) == SLJIT_LESS || GET_FLAG_TYPE(op) == SLJIT_GREATER_EQUAL) {
+ if (GET_FLAG_TYPE(op) == SLJIT_LESS) {
FAIL_IF(push_inst(compiler, SLTUI | RD(OTHER_FLAG) | RS1(src1) | IMM_I(src2)));
is_handled = 1;
}
- else if (GET_FLAG_TYPE(op) == SLJIT_SIG_LESS || GET_FLAG_TYPE(op) == SLJIT_SIG_GREATER_EQUAL) {
+ else if (GET_FLAG_TYPE(op) == SLJIT_SIG_LESS) {
FAIL_IF(push_inst(compiler, SLTI | RD(OTHER_FLAG) | RS1(src1) | IMM_I(src2)));
is_handled = 1;
}
@@ -1294,26 +1501,23 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
is_handled = 1;
if (flags & SRC2_IMM) {
- FAIL_IF(push_inst(compiler, ADDI | RD(TMP_REG2) | RS1(TMP_ZERO) | IMM_I(src2)));
- src2 = TMP_REG2;
+ reg = (src1 == TMP_REG1) ? TMP_REG2 : TMP_REG1;
+ FAIL_IF(push_inst(compiler, ADDI | RD(reg) | RS1(TMP_ZERO) | IMM_I(src2)));
+ src2 = reg;
flags &= ~SRC2_IMM;
}
switch (GET_FLAG_TYPE(op)) {
case SLJIT_LESS:
- case SLJIT_GREATER_EQUAL:
FAIL_IF(push_inst(compiler, SLTU | RD(OTHER_FLAG) | RS1(src1) | RS2(src2)));
break;
case SLJIT_GREATER:
- case SLJIT_LESS_EQUAL:
FAIL_IF(push_inst(compiler, SLTU | RD(OTHER_FLAG) | RS1(src2) | RS2(src1)));
break;
case SLJIT_SIG_LESS:
- case SLJIT_SIG_GREATER_EQUAL:
FAIL_IF(push_inst(compiler, SLT | RD(OTHER_FLAG) | RS1(src1) | RS2(src2)));
break;
case SLJIT_SIG_GREATER:
- case SLJIT_SIG_LESS_EQUAL:
FAIL_IF(push_inst(compiler, SLT | RD(OTHER_FLAG) | RS1(src2) | RS2(src1)));
break;
}
@@ -1336,7 +1540,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
}
is_overflow = GET_FLAG_TYPE(op) == SLJIT_OVERFLOW;
- is_carry = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
+ is_carry = GET_FLAG_TYPE(op) == SLJIT_CARRY;
if (flags & SRC2_IMM) {
if (is_overflow) {
@@ -1385,7 +1589,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
flags &= ~SRC2_IMM;
}
- is_carry = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
+ is_carry = GET_FLAG_TYPE(op) == SLJIT_CARRY;
if (flags & SRC2_IMM) {
if (is_carry)
@@ -1527,32 +1731,33 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
sljit_s32 dst_r = TMP_REG2;
sljit_s32 src1_r;
sljit_sw src2_r = 0;
- sljit_s32 sugg_src2_r = TMP_REG2;
+ sljit_s32 src2_tmp_reg = (GET_OPCODE(op) >= SLJIT_OP2_BASE && FAST_IS_REG(src1)) ? TMP_REG1 : TMP_REG2;
if (!(flags & ALT_KEEP_CACHE)) {
compiler->cache_arg = 0;
compiler->cache_argw = 0;
}
- if (dst == TMP_REG2) {
+ if (dst == 0) {
SLJIT_ASSERT(HAS_FLAGS(op));
flags |= UNUSED_DEST;
+ dst = TMP_REG2;
}
else if (FAST_IS_REG(dst)) {
dst_r = dst;
flags |= REG_DEST;
if (flags & MOVE_OP)
- sugg_src2_r = dst_r;
+ src2_tmp_reg = dst_r;
}
else if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, flags | ARG_TEST, TMP_REG1, dst, dstw))
flags |= SLOW_DEST;
if (flags & IMM_OP) {
- if ((src2 & SLJIT_IMM) && src2w != 0 && src2w <= SIMM_MAX && src2w >= SIMM_MIN) {
+ if (src2 == SLJIT_IMM && src2w != 0 && src2w <= SIMM_MAX && src2w >= SIMM_MIN) {
flags |= SRC2_IMM;
src2_r = src2w;
}
- else if ((flags & CUMULATIVE_OP) && (src1 & SLJIT_IMM) && src1w != 0 && src1w <= SIMM_MAX && src1w >= SIMM_MIN) {
+ else if ((flags & CUMULATIVE_OP) && src1 == SLJIT_IMM && src1w != 0 && src1w <= SIMM_MAX && src1w >= SIMM_MIN) {
flags |= SRC2_IMM;
src2_r = src1w;
@@ -1568,16 +1773,14 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
if (FAST_IS_REG(src1)) {
src1_r = src1;
flags |= REG1_SOURCE;
- }
- else if (src1 & SLJIT_IMM) {
+ } else if (src1 == SLJIT_IMM) {
if (src1w) {
FAIL_IF(load_immediate(compiler, TMP_REG1, src1w, TMP_REG3));
src1_r = TMP_REG1;
}
else
src1_r = TMP_ZERO;
- }
- else {
+ } else {
if (getput_arg_fast(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w))
FAIL_IF(compiler->error);
else
@@ -1591,14 +1794,12 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
flags |= REG2_SOURCE;
if ((flags & (REG_DEST | MOVE_OP)) == MOVE_OP)
dst_r = (sljit_s32)src2_r;
- }
- else if (src2 & SLJIT_IMM) {
+ } else if (src2 == SLJIT_IMM) {
if (!(flags & SRC2_IMM)) {
if (src2w) {
- FAIL_IF(load_immediate(compiler, sugg_src2_r, src2w, TMP_REG3));
- src2_r = sugg_src2_r;
- }
- else {
+ FAIL_IF(load_immediate(compiler, src2_tmp_reg, src2w, TMP_REG3));
+ src2_r = src2_tmp_reg;
+ } else {
src2_r = TMP_ZERO;
if (flags & MOVE_OP) {
if (dst & SLJIT_MEM)
@@ -1608,30 +1809,28 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
}
}
}
- }
- else {
- if (getput_arg_fast(compiler, flags | LOAD_DATA, sugg_src2_r, src2, src2w))
+ } else {
+ if (getput_arg_fast(compiler, flags | LOAD_DATA, src2_tmp_reg, src2, src2w))
FAIL_IF(compiler->error);
else
flags |= SLOW_SRC2;
- src2_r = sugg_src2_r;
+ src2_r = src2_tmp_reg;
}
if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) {
SLJIT_ASSERT(src2_r == TMP_REG2);
- if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
+ if ((flags & SLOW_DEST) && !can_cache(src2, src2w, src1, src1w) && can_cache(src2, src2w, dst, dstw)) {
+ FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, src2, src2w));
+ FAIL_IF(getput_arg(compiler, flags | LOAD_DATA | MEM_USE_TMP2, TMP_REG2, src2, src2w, dst, dstw));
+ } else {
FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG2, src2, src2w, src1, src1w));
FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, dst, dstw));
}
- else {
- FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, src2, src2w));
- FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG2, src2, src2w, dst, dstw));
- }
}
else if (flags & SLOW_SRC1)
FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, dst, dstw));
else if (flags & SLOW_SRC2)
- FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, sugg_src2_r, src2, src2w, dst, dstw));
+ FAIL_IF(getput_arg(compiler, flags | LOAD_DATA | ((src1_r == TMP_REG1) ? MEM_USE_TMP2 : 0), src2_tmp_reg, src2, src2w, dst, dstw));
FAIL_IF(emit_single_op(compiler, op, flags, dst_r, src1_r, src2_r));
@@ -1649,7 +1848,7 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
{
#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
- sljit_ins word = (op & SLJIT_32) >> 5;
+ sljit_ins word = (sljit_ins)(op & SLJIT_32) >> 5;
SLJIT_ASSERT(word == 0 || word == 0x8);
#endif /* SLJIT_CONFIG_RISCV_64 */
@@ -1714,36 +1913,42 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
case SLJIT_MOV32:
#endif
case SLJIT_MOV_P:
- return emit_op(compiler, SLJIT_MOV, WORD_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, srcw);
+ return emit_op(compiler, SLJIT_MOV, WORD_DATA | MOVE_OP, dst, dstw, TMP_ZERO, 0, src, srcw);
#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
case SLJIT_MOV_U32:
- return emit_op(compiler, SLJIT_MOV_U32, INT_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u32)srcw : srcw);
+ return emit_op(compiler, SLJIT_MOV_U32, INT_DATA | MOVE_OP, dst, dstw, TMP_ZERO, 0, src, (src == SLJIT_IMM) ? (sljit_u32)srcw : srcw);
case SLJIT_MOV_S32:
/* Logical operators have no W variant, so sign extended input is necessary for them. */
case SLJIT_MOV32:
- return emit_op(compiler, SLJIT_MOV_S32, INT_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s32)srcw : srcw);
+ return emit_op(compiler, SLJIT_MOV_S32, INT_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_ZERO, 0, src, (src == SLJIT_IMM) ? (sljit_s32)srcw : srcw);
#endif
case SLJIT_MOV_U8:
- return emit_op(compiler, op, BYTE_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8)srcw : srcw);
+ return emit_op(compiler, op, BYTE_DATA | MOVE_OP, dst, dstw, TMP_ZERO, 0, src, (src == SLJIT_IMM) ? (sljit_u8)srcw : srcw);
case SLJIT_MOV_S8:
- return emit_op(compiler, op, BYTE_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8)srcw : srcw);
+ return emit_op(compiler, op, BYTE_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_ZERO, 0, src, (src == SLJIT_IMM) ? (sljit_s8)srcw : srcw);
case SLJIT_MOV_U16:
- return emit_op(compiler, op, HALF_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16)srcw : srcw);
+ return emit_op(compiler, op, HALF_DATA | MOVE_OP, dst, dstw, TMP_ZERO, 0, src, (src == SLJIT_IMM) ? (sljit_u16)srcw : srcw);
case SLJIT_MOV_S16:
- return emit_op(compiler, op, HALF_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16)srcw : srcw);
-
- case SLJIT_NOT:
- return emit_op(compiler, SLJIT_XOR | (op & (SLJIT_32 | SLJIT_SET_Z)), flags, dst, dstw, src, srcw, SLJIT_IMM, -1);
+ return emit_op(compiler, op, HALF_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_ZERO, 0, src, (src == SLJIT_IMM) ? (sljit_s16)srcw : srcw);
case SLJIT_CLZ:
case SLJIT_CTZ:
- return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, src, srcw);
+ case SLJIT_REV:
+ return emit_op(compiler, op, flags, dst, dstw, TMP_ZERO, 0, src, srcw);
+
+ case SLJIT_REV_U16:
+ case SLJIT_REV_S16:
+ return emit_op(compiler, op, HALF_DATA, dst, dstw, TMP_ZERO, 0, src, srcw);
+
+ case SLJIT_REV_U32:
+ case SLJIT_REV_S32:
+ return emit_op(compiler, op | SLJIT_32, INT_DATA, dst, dstw, TMP_ZERO, 0, src, srcw);
}
SLJIT_UNREACHABLE();
@@ -1766,9 +1971,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
if (op & SLJIT_32) {
flags |= INT_DATA | SIGNED_DATA;
- if (src1 & SLJIT_IMM)
+ if (src1 == SLJIT_IMM)
src1w = (sljit_s32)src1w;
- if (src2 & SLJIT_IMM)
+ if (src2 == SLJIT_IMM)
src2w = (sljit_s32)src2w;
}
#endif
@@ -1801,7 +2006,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
case SLJIT_MASHR:
case SLJIT_ROTL:
case SLJIT_ROTR:
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
#if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
src2w &= 0x1f;
#else /* !SLJIT_CONFIG_RISCV_32 */
@@ -1827,18 +2032,43 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compil
CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
SLJIT_SKIP_CHECKS(compiler);
- return sljit_emit_op2(compiler, op, TMP_REG2, 0, src1, src1w, src2, src2w);
+ return sljit_emit_op2(compiler, op, 0, 0, src1, src1w, src2, src2w);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src_dst,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
+#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
+ sljit_ins word = (sljit_ins)(op & SLJIT_32) >> 5;
+#endif /* SLJIT_CONFIG_RISCV_64 */
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op2r(compiler, op, dst_reg, src1, src1w, src2, src2w));
+
+ SLJIT_ASSERT(WORD == 0 || WORD == 0x8);
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MULADD:
+ SLJIT_SKIP_CHECKS(compiler);
+ FAIL_IF(sljit_emit_op2(compiler, SLJIT_MUL | (op & SLJIT_32), TMP_REG2, 0, src1, src1w, src2, src2w));
+ return push_inst(compiler, ADD | WORD | RD(dst_reg) | RS1(dst_reg) | RS2(TMP_REG2));
+ }
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 src1_reg,
+ sljit_s32 src2_reg,
+ sljit_s32 src3, sljit_sw src3w)
+{
sljit_s32 is_left;
sljit_ins ins1, ins2, ins3;
#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
- sljit_ins word = (op & SLJIT_32) >> 5;
+ sljit_ins word = (sljit_ins)(op & SLJIT_32) >> 5;
sljit_s32 inp_flags = ((op & SLJIT_32) ? INT_DATA : WORD_DATA) | LOAD_DATA;
sljit_sw bit_length = (op & SLJIT_32) ? 32 : 64;
#else /* !SLJIT_CONFIG_RISCV_64 */
@@ -1849,50 +2079,44 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *
SLJIT_ASSERT(WORD == 0 || WORD == 0x8);
CHECK_ERROR();
- CHECK(check_sljit_emit_shift_into(compiler, op, src_dst, src1, src1w, src2, src2w));
+ CHECK(check_sljit_emit_shift_into(compiler, op, dst_reg, src1_reg, src2_reg, src3, src3w));
is_left = (GET_OPCODE(op) == SLJIT_SHL || GET_OPCODE(op) == SLJIT_MSHL);
- if (src_dst == src1) {
+ if (src1_reg == src2_reg) {
SLJIT_SKIP_CHECKS(compiler);
- return sljit_emit_op2(compiler, (is_left ? SLJIT_ROTL : SLJIT_ROTR) | (op & SLJIT_32), src_dst, 0, src_dst, 0, src2, src2w);
+ return sljit_emit_op2(compiler, (is_left ? SLJIT_ROTL : SLJIT_ROTR) | (op & SLJIT_32), dst_reg, 0, src1_reg, 0, src3, src3w);
}
- ADJUST_LOCAL_OFFSET(src1, src1w);
- ADJUST_LOCAL_OFFSET(src2, src2w);
+ ADJUST_LOCAL_OFFSET(src3, src3w);
- if (src2 & SLJIT_IMM) {
- src2w &= bit_length - 1;
+ if (src3 == SLJIT_IMM) {
+ src3w &= bit_length - 1;
- if (src2w == 0)
+ if (src3w == 0)
return SLJIT_SUCCESS;
- } else if (src2 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem(compiler, inp_flags, TMP_REG2, src2, src2w));
- src2 = TMP_REG2;
- }
- if (src1 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem(compiler, inp_flags, TMP_REG1, src1, src1w));
- src1 = TMP_REG1;
- } else if (src1 & SLJIT_IMM) {
- FAIL_IF(load_immediate(compiler, TMP_REG1, src1w, TMP_REG3));
- src1 = TMP_REG1;
- }
-
- if (src2 & SLJIT_IMM) {
if (is_left) {
- ins1 = SLLI | WORD | IMM_I(src2w);
- src2w = bit_length - src2w;
- ins2 = SRLI | WORD | IMM_I(src2w);
+ ins1 = SLLI | WORD | IMM_I(src3w);
+ src3w = bit_length - src3w;
+ ins2 = SRLI | WORD | IMM_I(src3w);
} else {
- ins1 = SRLI | WORD | IMM_I(src2w);
- src2w = bit_length - src2w;
- ins2 = SLLI | WORD | IMM_I(src2w);
+ ins1 = SRLI | WORD | IMM_I(src3w);
+ src3w = bit_length - src3w;
+ ins2 = SLLI | WORD | IMM_I(src3w);
}
- FAIL_IF(push_inst(compiler, ins1 | RD(src_dst) | RS1(src_dst)));
- FAIL_IF(push_inst(compiler, ins2 | RD(TMP_REG1) | RS1(src1)));
- return push_inst(compiler, OR | RD(src_dst) | RS1(src_dst) | RS2(TMP_REG1));
+ FAIL_IF(push_inst(compiler, ins1 | RD(dst_reg) | RS1(src1_reg)));
+ FAIL_IF(push_inst(compiler, ins2 | RD(TMP_REG1) | RS1(src2_reg)));
+ return push_inst(compiler, OR | RD(dst_reg) | RS1(dst_reg) | RS2(TMP_REG1));
+ }
+
+ if (src3 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, inp_flags, TMP_REG2, src3, src3w));
+ src3 = TMP_REG2;
+ } else if (dst_reg == src3) {
+ push_inst(compiler, ADDI | WORD | RD(TMP_REG2) | RS1(src3) | IMM_I(0));
+ src3 = TMP_REG2;
}
if (is_left) {
@@ -1905,21 +2129,19 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *
ins3 = SLL;
}
- FAIL_IF(push_inst(compiler, ins1 | WORD | RD(src_dst) | RS1(src_dst) | RS2(src2)));
+ FAIL_IF(push_inst(compiler, ins1 | WORD | RD(dst_reg) | RS1(src1_reg) | RS2(src3)));
if (!(op & SLJIT_SHIFT_INTO_NON_ZERO)) {
- FAIL_IF(push_inst(compiler, ins2 | WORD | RD(TMP_REG1) | RS1(src1) | IMM_I(1)));
- FAIL_IF(push_inst(compiler, XORI | RD(TMP_REG2) | RS1(src2) | IMM_I((sljit_ins)bit_length - 1)));
- src1 = TMP_REG1;
+ FAIL_IF(push_inst(compiler, ins2 | WORD | RD(TMP_REG1) | RS1(src2_reg) | IMM_I(1)));
+ FAIL_IF(push_inst(compiler, XORI | RD(TMP_REG2) | RS1(src3) | IMM_I((sljit_ins)bit_length - 1)));
+ src2_reg = TMP_REG1;
} else
- FAIL_IF(push_inst(compiler, SUB | WORD | RD(TMP_REG2) | RS1(TMP_ZERO) | RS2(src2)));
+ FAIL_IF(push_inst(compiler, SUB | WORD | RD(TMP_REG2) | RS1(TMP_ZERO) | RS2(src3)));
- FAIL_IF(push_inst(compiler, ins3 | WORD | RD(TMP_REG1) | RS1(src1) | RS2(TMP_REG2)));
- return push_inst(compiler, OR | RD(src_dst) | RS1(src_dst) | RS2(TMP_REG1));
+ FAIL_IF(push_inst(compiler, ins3 | WORD | RD(TMP_REG1) | RS1(src2_reg) | RS2(TMP_REG2)));
+ return push_inst(compiler, OR | RD(dst_reg) | RS1(dst_reg) | RS2(TMP_REG1));
}
-#undef WORD
-
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 src, sljit_sw srcw)
{
@@ -1947,21 +2169,52 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *comp
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_dst(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw)
{
- CHECK_REG_INDEX(check_sljit_get_register_index(reg));
- return reg_map[reg];
+ sljit_s32 dst_r;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op_dst(compiler, op, dst, dstw));
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ switch (op) {
+ case SLJIT_FAST_ENTER:
+ if (FAST_IS_REG(dst))
+ return push_inst(compiler, ADDI | RD(dst) | RS1(RETURN_ADDR_REG) | IMM_I(0));
+
+ SLJIT_ASSERT(RETURN_ADDR_REG == TMP_REG2);
+ break;
+ case SLJIT_GET_RETURN_ADDRESS:
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
+ FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, dst_r, SLJIT_MEM1(SLJIT_SP), compiler->local_size - SSIZE_OF(sw)));
+ break;
+ }
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw);
+
+ return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 type, sljit_s32 reg)
{
- CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
+ CHECK_REG_INDEX(check_sljit_get_register_index(type, reg));
+
+ if (type == SLJIT_GP_REGISTER)
+ return reg_map[reg];
+
+ if (type != SLJIT_FLOAT_REGISTER)
+ return -1;
+
return freg_map[reg];
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
void *instruction, sljit_u32 size)
{
+ SLJIT_UNUSED_ARG(size);
+
CHECK_ERROR();
CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
@@ -2008,51 +2261,73 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
#endif
}
-static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+static sljit_s32 sljit_emit_fop1_conv_f64_from_w(struct sljit_compiler *compiler, sljit_ins ins,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw)
{
- sljit_ins inst;
-#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
- sljit_u32 flags = ((sljit_u32)(GET_OPCODE(op) == SLJIT_CONV_F64_FROM_SW)) << 21;
-#endif
-
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
if (src & SLJIT_MEM) {
#if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
FAIL_IF(emit_op_mem2(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw, dst, dstw));
-#else
- FAIL_IF(emit_op_mem2(compiler, (flags ? WORD_DATA : INT_DATA) | LOAD_DATA, TMP_REG1, src, srcw, dst, dstw));
-#endif
+#else /* SLJIT_CONFIG_RISCV_32 */
+ FAIL_IF(emit_op_mem2(compiler, ((ins & (1 << 21)) ? WORD_DATA : INT_DATA) | LOAD_DATA, TMP_REG1, src, srcw, dst, dstw));
+#endif /* !SLJIT_CONFIG_RISCV_32 */
src = TMP_REG1;
- } else if (src & SLJIT_IMM) {
-#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
- if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
- srcw = (sljit_s32)srcw;
-#endif
-
+ } else if (src == SLJIT_IMM) {
FAIL_IF(load_immediate(compiler, TMP_REG1, srcw, TMP_REG3));
src = TMP_REG1;
}
- inst = FCVT_S_W | FMT(op) | FRD(dst_r) | RS1(src);
+ FAIL_IF(push_inst(compiler, ins | FRD(dst_r) | RS1(src)));
+
+ if (dst & SLJIT_MEM)
+ return emit_op_mem2(compiler, DOUBLE_DATA | ((sljit_s32)(~ins >> 24) & 0x2), TMP_FREG1, dst, dstw, 0, 0);
+ return SLJIT_SUCCESS;
+}
+
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_ins ins = FCVT_S_W | FMT(op);
#if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
if (op & SLJIT_32)
- inst |= F3(0x7);
-#else
- inst |= flags;
+ ins |= F3(0x7);
+#else /* !SLJIT_CONFIG_RISCV_32 */
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_SW)
+ ins |= (1 << 21);
+ else if (src == SLJIT_IMM)
+ srcw = (sljit_s32)srcw;
if (op != SLJIT_CONV_F64_FROM_S32)
- inst |= F3(0x7);
-#endif
+ ins |= F3(0x7);
+#endif /* SLJIT_CONFIG_RISCV_32 */
- FAIL_IF(push_inst(compiler, inst));
+ return sljit_emit_fop1_conv_f64_from_w(compiler, ins, dst, dstw, src, srcw);
+}
- if (dst & SLJIT_MEM)
- return emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, 0, 0);
- return SLJIT_SUCCESS;
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_uw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_ins ins = FCVT_S_WU | FMT(op);
+
+#if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
+ if (op & SLJIT_32)
+ ins |= F3(0x7);
+#else /* !SLJIT_CONFIG_RISCV_32 */
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_UW)
+ ins |= (1 << 21);
+ else if (src == SLJIT_IMM)
+ srcw = (sljit_u32)srcw;
+
+ if (op != SLJIT_CONV_F64_FROM_S32)
+ ins |= F3(0x7);
+#endif /* SLJIT_CONFIG_RISCV_32 */
+
+ return sljit_emit_fop1_conv_f64_from_w(compiler, ins, dst, dstw, src, srcw);
}
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
@@ -2073,40 +2348,36 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compile
switch (GET_FLAG_TYPE(op)) {
case SLJIT_F_EQUAL:
- case SLJIT_F_NOT_EQUAL:
case SLJIT_ORDERED_EQUAL:
- case SLJIT_UNORDERED_OR_NOT_EQUAL:
inst = FEQ_S | FMT(op) | RD(OTHER_FLAG) | FRS1(src1) | FRS2(src2);
break;
case SLJIT_F_LESS:
- case SLJIT_F_GREATER_EQUAL:
case SLJIT_ORDERED_LESS:
- case SLJIT_UNORDERED_OR_GREATER_EQUAL:
inst = FLT_S | FMT(op) | RD(OTHER_FLAG) | FRS1(src1) | FRS2(src2);
break;
case SLJIT_ORDERED_GREATER:
- case SLJIT_UNORDERED_OR_LESS_EQUAL:
inst = FLT_S | FMT(op) | RD(OTHER_FLAG) | FRS1(src2) | FRS2(src1);
break;
case SLJIT_F_GREATER:
- case SLJIT_F_LESS_EQUAL:
case SLJIT_UNORDERED_OR_GREATER:
- case SLJIT_ORDERED_LESS_EQUAL:
inst = FLE_S | FMT(op) | RD(OTHER_FLAG) | FRS1(src1) | FRS2(src2);
break;
case SLJIT_UNORDERED_OR_LESS:
- case SLJIT_ORDERED_GREATER_EQUAL:
inst = FLE_S | FMT(op) | RD(OTHER_FLAG) | FRS1(src2) | FRS2(src1);
break;
- case SLJIT_UNORDERED_OR_EQUAL: /* Not supported. */
- case SLJIT_ORDERED_NOT_EQUAL: /* Not supported. */
+ case SLJIT_UNORDERED_OR_EQUAL:
FAIL_IF(push_inst(compiler, FLT_S | FMT(op) | RD(OTHER_FLAG) | FRS1(src1) | FRS2(src2)));
FAIL_IF(push_inst(compiler, FLT_S | FMT(op) | RD(TMP_REG1) | FRS1(src2) | FRS2(src1)));
inst = OR | RD(OTHER_FLAG) | RS1(OTHER_FLAG) | RS2(TMP_REG1);
break;
- default: /* SLJIT_UNORDERED, SLJIT_ORDERED */
- FAIL_IF(push_inst(compiler, FADD_S | FMT(op) | FRD(TMP_FREG1) | FRS1(src1) | FRS2(src2)));
- inst = FEQ_S | FMT(op) | RD(OTHER_FLAG) | FRS1(TMP_FREG1) | FRS2(TMP_FREG1);
+ default: /* SLJIT_UNORDERED */
+ if (src1 == src2) {
+ inst = FEQ_S | FMT(op) | RD(OTHER_FLAG) | FRS1(src1) | FRS2(src1);
+ break;
+ }
+ FAIL_IF(push_inst(compiler, FEQ_S | FMT(op) | RD(OTHER_FLAG) | FRS1(src1) | FRS2(src1)));
+ FAIL_IF(push_inst(compiler, FEQ_S | FMT(op) | RD(TMP_REG1) | FRS1(src2) | FRS2(src2)));
+ inst = AND | RD(OTHER_FLAG) | RS1(OTHER_FLAG) | RS2(TMP_REG1);
break;
}
@@ -2139,7 +2410,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compil
switch (GET_OPCODE(op)) {
case SLJIT_MOV_F64:
if (src != dst_r) {
- if (dst_r != TMP_FREG1)
+ if (!(dst & SLJIT_MEM))
FAIL_IF(push_inst(compiler, FSGNJ_S | FMT(op) | FRD(dst_r) | FRS1(src) | FRS2(src)));
else
dst_r = src;
@@ -2198,11 +2469,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
}
if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) {
- if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
+ if ((dst & SLJIT_MEM) && !can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, src1, src1w));
FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, dst, dstw));
- }
- else {
+ } else {
FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w));
FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, dst, dstw));
}
@@ -2233,32 +2503,35 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
case SLJIT_DIV_F64:
FAIL_IF(push_inst(compiler, FDIV_S | FMT(op) | FRD(dst_r) | FRS1(src1) | FRS2(src2)));
break;
+
+ case SLJIT_COPYSIGN_F64:
+ return push_inst(compiler, FSGNJ_S | FMT(op) | FRD(dst_r) | FRS1(src1) | FRS2(src2));
}
- if (dst_r == TMP_FREG2)
+ if (dst_r != dst)
FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG2, dst, dstw, 0, 0));
return SLJIT_SUCCESS;
}
-#undef FLOAT_DATA
-#undef FMT
-
-/* --------------------------------------------------------------------- */
-/* Other instructions */
-/* --------------------------------------------------------------------- */
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset32(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f32 value)
{
+ union {
+ sljit_s32 imm;
+ sljit_f32 value;
+ } u;
+
CHECK_ERROR();
- CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
- ADJUST_LOCAL_OFFSET(dst, dstw);
+ CHECK(check_sljit_emit_fset32(compiler, freg, value));
- if (FAST_IS_REG(dst))
- return push_inst(compiler, ADDI | RD(dst) | RS1(RETURN_ADDR_REG) | IMM_I(0));
+ u.value = value;
- /* Memory. */
- return emit_op_mem(compiler, WORD_DATA, RETURN_ADDR_REG, dst, dstw);
+ if (u.imm == 0)
+ return push_inst(compiler, FMV_W_X | RS1(TMP_ZERO) | FRD(freg));
+
+ FAIL_IF(load_immediate(compiler, TMP_REG1, u.imm, TMP_REG3));
+ return push_inst(compiler, FMV_W_X | RS1(TMP_REG1) | FRD(freg));
}
/* --------------------------------------------------------------------- */
@@ -2287,26 +2560,13 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
#define BRANCH_LENGTH ((sljit_ins)(7 * sizeof(sljit_ins)) << 7)
#endif
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
+static sljit_ins get_jump_instruction(sljit_s32 type)
{
- struct sljit_jump *jump;
- sljit_ins inst;
-
- CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_jump(compiler, type));
-
- jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
- PTR_FAIL_IF(!jump);
- set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
- type &= 0xff;
-
switch (type) {
case SLJIT_EQUAL:
- inst = BNE | RS1(EQUAL_FLAG) | RS2(TMP_ZERO) | BRANCH_LENGTH;
- break;
+ return BNE | RS1(EQUAL_FLAG) | RS2(TMP_ZERO);
case SLJIT_NOT_EQUAL:
- inst = BEQ | RS1(EQUAL_FLAG) | RS2(TMP_ZERO) | BRANCH_LENGTH;
- break;
+ return BEQ | RS1(EQUAL_FLAG) | RS2(TMP_ZERO);
case SLJIT_LESS:
case SLJIT_GREATER:
case SLJIT_SIG_LESS:
@@ -2315,7 +2575,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
case SLJIT_CARRY:
case SLJIT_F_EQUAL:
case SLJIT_ORDERED_EQUAL:
- case SLJIT_ORDERED_NOT_EQUAL: /* Not supported. */
+ case SLJIT_ORDERED_NOT_EQUAL:
case SLJIT_F_LESS:
case SLJIT_ORDERED_LESS:
case SLJIT_ORDERED_GREATER:
@@ -2323,7 +2583,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
case SLJIT_ORDERED_LESS_EQUAL:
case SLJIT_ORDERED_GREATER_EQUAL:
case SLJIT_ORDERED:
- inst = BEQ | RS1(OTHER_FLAG) | RS2(TMP_ZERO) | BRANCH_LENGTH;
+ return BEQ | RS1(OTHER_FLAG) | RS2(TMP_ZERO);
break;
case SLJIT_GREATER_EQUAL:
case SLJIT_LESS_EQUAL:
@@ -2333,7 +2593,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
case SLJIT_NOT_CARRY:
case SLJIT_F_NOT_EQUAL:
case SLJIT_UNORDERED_OR_NOT_EQUAL:
- case SLJIT_UNORDERED_OR_EQUAL: /* Not supported. */
+ case SLJIT_UNORDERED_OR_EQUAL:
case SLJIT_F_GREATER_EQUAL:
case SLJIT_UNORDERED_OR_GREATER_EQUAL:
case SLJIT_UNORDERED_OR_LESS_EQUAL:
@@ -2341,16 +2601,30 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
case SLJIT_UNORDERED_OR_GREATER:
case SLJIT_UNORDERED_OR_LESS:
case SLJIT_UNORDERED:
- inst = BNE | RS1(OTHER_FLAG) | RS2(TMP_ZERO) | BRANCH_LENGTH;
- break;
+ return BNE | RS1(OTHER_FLAG) | RS2(TMP_ZERO);
default:
/* Not conditional branch. */
- inst = 0;
- break;
+ return 0;
}
+}
+
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
+{
+ struct sljit_jump *jump;
+ sljit_ins inst;
+
+ CHECK_ERROR_PTR();
+ CHECK_PTR(check_sljit_emit_jump(compiler, type));
+
+ jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
+ PTR_FAIL_IF(!jump);
+ set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
+ type &= 0xff;
+
+ inst = get_jump_instruction(type);
if (inst != 0) {
- PTR_FAIL_IF(push_inst(compiler, inst));
+ PTR_FAIL_IF(push_inst(compiler, inst | BRANCH_LENGTH));
jump->flags |= IS_COND;
}
@@ -2365,11 +2639,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
PTR_FAIL_IF(push_inst(compiler, inst));
/* Maximum number of instructions required for generating a constant. */
-#if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
- compiler->size += 1;
-#else
- compiler->size += 5;
-#endif
+ compiler->size += JUMP_MAX_SIZE - 1;
return jump;
}
@@ -2396,6 +2666,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
struct sljit_jump *jump;
sljit_s32 flags;
sljit_ins inst;
+ sljit_s32 src2_tmp_reg = FAST_IS_REG(src1) ? TMP_REG1 : TMP_REG2;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_cmp(compiler, type, src1, src1w, src2, src2w));
@@ -2416,11 +2687,11 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
}
if (src2 & SLJIT_MEM) {
- PTR_FAIL_IF(emit_op_mem2(compiler, flags, TMP_REG2, src2, src2w, 0, 0));
- src2 = TMP_REG2;
+ PTR_FAIL_IF(emit_op_mem2(compiler, flags, src2_tmp_reg, src2, src2w, 0, 0));
+ src2 = src2_tmp_reg;
}
- if (src1 & SLJIT_IMM) {
+ if (src1 == SLJIT_IMM) {
if (src1w != 0) {
PTR_FAIL_IF(load_immediate(compiler, TMP_REG1, src1w, TMP_REG3));
src1 = TMP_REG1;
@@ -2429,10 +2700,10 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
src1 = TMP_ZERO;
}
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
if (src2w != 0) {
- PTR_FAIL_IF(load_immediate(compiler, TMP_REG2, src2w, TMP_REG3));
- src2 = TMP_REG2;
+ PTR_FAIL_IF(load_immediate(compiler, src2_tmp_reg, src2w, TMP_REG3));
+ src2 = src2_tmp_reg;
}
else
src2 = TMP_ZERO;
@@ -2482,11 +2753,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
PTR_FAIL_IF(push_inst(compiler, JALR | RD(TMP_ZERO) | RS1(TMP_REG1) | IMM_I(0)));
/* Maximum number of instructions required for generating a constant. */
-#if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
- compiler->size += 1;
-#else
- compiler->size += 5;
-#endif
+ compiler->size += JUMP_MAX_SIZE - 1;
return jump;
}
@@ -2499,7 +2766,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
CHECK_ERROR();
CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
- if (!(src & SLJIT_IMM)) {
+ if (src != SLJIT_IMM) {
if (src & SLJIT_MEM) {
ADJUST_LOCAL_OFFSET(src, srcw);
FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw));
@@ -2518,11 +2785,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
FAIL_IF(push_inst(compiler, JALR | RD((type >= SLJIT_FAST_CALL) ? RETURN_ADDR_REG : TMP_ZERO) | RS1(TMP_REG1) | IMM_I(0)));
/* Maximum number of instructions required for generating a constant. */
-#if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
- compiler->size += 1;
-#else
- compiler->size += 5;
-#endif
+ compiler->size += JUMP_MAX_SIZE - 1;
return SLJIT_SUCCESS;
}
@@ -2641,16 +2904,112 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
return emit_op(compiler, saved_op, mem_type, dst, dstw, dst, dstw, src_r, 0);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 dst_reg,
- sljit_s32 src, sljit_sw srcw)
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_reg)
{
+ sljit_ins *ptr;
+ sljit_uw size;
+#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
+ sljit_ins word = (sljit_ins)(type & SLJIT_32) >> 5;
+ sljit_s32 inp_flags = ((type & SLJIT_32) ? INT_DATA : WORD_DATA) | LOAD_DATA;
+#else /* !SLJIT_CONFIG_RISCV_64 */
+ sljit_s32 inp_flags = WORD_DATA | LOAD_DATA;
+#endif /* SLJIT_CONFIG_RISCV_64 */
+
+ SLJIT_ASSERT(WORD == 0 || WORD == 0x8);
+
CHECK_ERROR();
- CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
+ CHECK(check_sljit_emit_select(compiler, type, dst_reg, src1, src1w, src2_reg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+ if (dst_reg != src2_reg) {
+ if (dst_reg == src1) {
+ src1 = src2_reg;
+ src1w = 0;
+ type ^= 0x1;
+ } else {
+ if (ADDRESSING_DEPENDS_ON(src1, dst_reg)) {
+ FAIL_IF(push_inst(compiler, ADDI | RD(TMP_REG1) | RS1(dst_reg) | IMM_I(0)));
+
+ if ((src1 & REG_MASK) == dst_reg)
+ src1 = (src1 & ~REG_MASK) | TMP_REG1;
+
+ if (OFFS_REG(src1) == dst_reg)
+ src1 = (src1 & ~OFFS_REG_MASK) | TO_OFFS_REG(TMP_REG1);
+ }
- return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);;
+ FAIL_IF(push_inst(compiler, ADDI | WORD | RD(dst_reg) | RS1(src2_reg) | IMM_I(0)));
+ }
+ }
+
+ size = compiler->size;
+
+ ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
+ FAIL_IF(!ptr);
+ compiler->size++;
+
+ if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_op_mem(compiler, inp_flags, dst_reg, src1, src1w));
+ } else if (src1 == SLJIT_IMM) {
+#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
+ if (word)
+ src1w = (sljit_s32)src1w;
+#endif /* SLJIT_CONFIG_RISCV_64 */
+ FAIL_IF(load_immediate(compiler, dst_reg, src1w, TMP_REG1));
+ } else
+ FAIL_IF(push_inst(compiler, ADDI | WORD | RD(dst_reg) | RS1(src1) | IMM_I(0)));
+
+ size = compiler->size - size;
+ *ptr = get_jump_instruction(type & ~SLJIT_32) | (sljit_ins)((size & 0x7) << 9) | (sljit_ins)((size >> 3) << 25);
+ return SLJIT_SUCCESS;
+}
+
+#undef WORD
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fselect(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_freg)
+{
+ sljit_ins *ptr;
+ sljit_uw size;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fselect(compiler, type, dst_freg, src1, src1w, src2_freg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+ if (dst_freg != src2_freg) {
+ if (dst_freg == src1) {
+ src1 = src2_freg;
+ src1w = 0;
+ type ^= 0x1;
+ } else
+ FAIL_IF(push_inst(compiler, FSGNJ_S | FMT(type) | FRD(dst_freg) | FRS1(src2_freg) | FRS2(src2_freg)));
+ }
+
+ size = compiler->size;
+
+ ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
+ FAIL_IF(!ptr);
+ compiler->size++;
+
+ if (src1 & SLJIT_MEM)
+ FAIL_IF(emit_op_mem(compiler, FLOAT_DATA(type) | LOAD_DATA, dst_freg, src1, src1w));
+ else
+ FAIL_IF(push_inst(compiler, FSGNJ_S | FMT(type) | FRD(dst_freg) | FRS1(src1) | FRS2(src1)));
+
+ size = compiler->size - size;
+ *ptr = get_jump_instruction(type & ~SLJIT_32) | (sljit_ins)((size & 0x7) << 9) | (sljit_ins)((size >> 3) << 25);
+ return SLJIT_SUCCESS;
}
+#undef FLOAT_DATA
+#undef FMT
+
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 reg,
sljit_s32 mem, sljit_sw memw)
@@ -2729,31 +3088,31 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
return const_;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_mov_addr(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
- struct sljit_put_label *put_label;
+ struct sljit_jump *jump;
sljit_s32 dst_r;
CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
+ CHECK_PTR(check_sljit_emit_mov_addr(compiler, dst, dstw));
ADJUST_LOCAL_OFFSET(dst, dstw);
- put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
- PTR_FAIL_IF(!put_label);
- set_put_label(put_label, compiler, 0);
+ jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
+ PTR_FAIL_IF(!jump);
+ set_mov_addr(jump, compiler, 0);
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
PTR_FAIL_IF(push_inst(compiler, (sljit_ins)dst_r));
#if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
compiler->size += 1;
-#else
+#else /* !SLJIT_CONFIG_RISCV_32 */
compiler->size += 5;
-#endif
+#endif /* SLJIT_CONFIG_RISCV_32 */
if (dst & SLJIT_MEM)
PTR_FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw));
- return put_label;
+ return jump;
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeS390X.c b/src/3rdparty/pcre2/src/sljit/sljitNativeS390X.c
index 8b51bad9bc..99e846350f 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeS390X.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativeS390X.c
@@ -38,17 +38,14 @@ SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
return "s390x" SLJIT_CPUINFO;
}
-/* Instructions. */
+/* Instructions are stored as 64 bit values regardless their size. */
typedef sljit_uw sljit_ins;
-/* Instruction tags (most significant halfword). */
-static const sljit_ins sljit_ins_const = (sljit_ins)1 << 48;
-
#define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
#define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
-static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 4] = {
- 0, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 15, 0, 1
+static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
+ 0, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 15, 0, 1, 14
};
/* there are also a[2-15] available, but they are slower to access and
@@ -83,7 +80,7 @@ static const sljit_gpr r10 = 10; /* reg_map[9] */
static const sljit_gpr r11 = 11; /* reg_map[10] */
static const sljit_gpr r12 = 12; /* reg_map[11]: GOT */
static const sljit_gpr r13 = 13; /* reg_map[12]: Literal Pool pointer */
-static const sljit_gpr r14 = 14; /* reg_map[0]: return address and flag register */
+static const sljit_gpr r14 = 14; /* reg_map[0]: return address */
static const sljit_gpr r15 = 15; /* reg_map[SLJIT_NUMBER_OF_REGISTERS + 1]: stack pointer */
/* WARNING: r12 and r13 shouldn't be used as per ABI recommendation */
@@ -96,20 +93,16 @@ static const sljit_gpr r15 = 15; /* reg_map[SLJIT_NUMBER_OF_REGISTERS + 1]: stac
#define tmp0 r0
#define tmp1 r1
-/* TODO(carenas): flags should move to a different register so that
- * link register doesn't need to change
- */
-
/* When reg cannot be unused. */
#define IS_GPR_REG(reg) ((reg > 0) && (reg) <= SLJIT_SP)
/* Link register. */
static const sljit_gpr link_r = 14; /* r14 */
-#define TMP_FREG1 (0)
+#define TMP_FREG1 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
-static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1] = {
- 1, 0, 2, 4, 6, 3, 5, 7, 15, 14, 13, 12, 11, 10, 9, 8,
+static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2] = {
+ 0, 0, 2, 4, 6, 3, 5, 7, 15, 14, 13, 12, 11, 10, 9, 8, 1
};
#define R0A(r) (r)
@@ -126,7 +119,10 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1] = {
#define F0(r) ((sljit_ins)freg_map[r])
#define F4(r) (R4A((sljit_ins)freg_map[r]))
+#define F12(r) (R12A((sljit_ins)freg_map[r]))
#define F20(r) (R20A((sljit_ins)freg_map[r]))
+#define F28(r) (R28A((sljit_ins)freg_map[r]))
+#define F32(r) (R32A((sljit_ins)freg_map[r]))
#define F36(r) (R36A((sljit_ins)freg_map[r]))
struct sljit_s390x_const {
@@ -141,56 +137,21 @@ static SLJIT_INLINE sljit_gpr gpr(sljit_s32 r)
return reg_map[r];
}
-static SLJIT_INLINE sljit_gpr fgpr(sljit_s32 r)
-{
- SLJIT_ASSERT(r >= 0 && r < (sljit_s32)(sizeof(freg_map) / sizeof(freg_map[0])));
- return freg_map[r];
-}
-
-/* Size of instruction in bytes. Tags must already be cleared. */
-static SLJIT_INLINE sljit_uw sizeof_ins(sljit_ins ins)
-{
- /* keep faulting instructions */
- if (ins == 0)
- return 2;
-
- if ((ins & 0x00000000ffffL) == ins)
- return 2;
- if ((ins & 0x0000ffffffffL) == ins)
- return 4;
- if ((ins & 0xffffffffffffL) == ins)
- return 6;
-
- SLJIT_UNREACHABLE();
- return (sljit_uw)-1;
-}
-
static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins)
{
sljit_ins *ibuf = (sljit_ins *)ensure_buf(compiler, sizeof(sljit_ins));
FAIL_IF(!ibuf);
*ibuf = ins;
+
+ SLJIT_ASSERT(ins <= 0xffffffffffffL);
+
compiler->size++;
- return SLJIT_SUCCESS;
-}
+ if (ins & 0xffff00000000L)
+ compiler->size++;
-static sljit_s32 encode_inst(void **ptr, sljit_ins ins)
-{
- sljit_u16 *ibuf = (sljit_u16 *)*ptr;
- sljit_uw size = sizeof_ins(ins);
+ if (ins & 0xffffffff0000L)
+ compiler->size++;
- SLJIT_ASSERT((size & 6) == size);
- switch (size) {
- case 6:
- *ibuf++ = (sljit_u16)(ins >> 32);
- /* fallthrough */
- case 4:
- *ibuf++ = (sljit_u16)(ins >> 16);
- /* fallthrough */
- case 2:
- *ibuf++ = (sljit_u16)(ins);
- }
- *ptr = (void*)ibuf;
return SLJIT_SUCCESS;
}
@@ -217,6 +178,7 @@ static SLJIT_INLINE sljit_u8 get_cc(struct sljit_compiler *compiler, sljit_s32 t
}
/* fallthrough */
+ case SLJIT_ATOMIC_STORED:
case SLJIT_F_EQUAL:
case SLJIT_ORDERED_EQUAL:
return cc0;
@@ -236,6 +198,7 @@ static SLJIT_INLINE sljit_u8 get_cc(struct sljit_compiler *compiler, sljit_s32 t
return (cc1 | cc2 | cc3);
case SLJIT_LESS:
+ case SLJIT_ATOMIC_NOT_STORED:
return cc1;
case SLJIT_GREATER_EQUAL:
@@ -454,10 +417,12 @@ HAVE_FACILITY(have_misc2, MISCELLANEOUS_INSTRUCTION_EXTENSIONS_2_FACILITY)
static SLJIT_INLINE sljit_ins disp_s20(sljit_s32 d)
{
+ sljit_uw dh, dl;
+
SLJIT_ASSERT(is_s20(d));
- sljit_uw dh = (d >> 12) & 0xff;
- sljit_uw dl = (d << 8) & 0xfff00;
+ dh = (d >> 12) & 0xff;
+ dl = ((sljit_uw)d << 8) & 0xfff00;
return (dh | dl) << 8;
}
@@ -899,23 +864,17 @@ static sljit_s32 push_load_imm_inst(struct sljit_compiler *compiler, sljit_gpr t
if (((sljit_uw)v & ~(sljit_uw)0xffff000000000000) == 0)
return push_inst(compiler, llihh(target, (sljit_u16)(v >> 48)));
- /* 6 byte instructions (requires extended immediate facility) */
- if (have_eimm()) {
- if (is_s32(v))
- return push_inst(compiler, lgfi(target, (sljit_s32)v));
+ if (is_s32(v))
+ return push_inst(compiler, lgfi(target, (sljit_s32)v));
- if (((sljit_uw)v >> 32) == 0)
- return push_inst(compiler, llilf(target, (sljit_u32)v));
+ if (((sljit_uw)v >> 32) == 0)
+ return push_inst(compiler, llilf(target, (sljit_u32)v));
- if (((sljit_uw)v << 32) == 0)
- return push_inst(compiler, llihf(target, (sljit_u32)((sljit_uw)v >> 32)));
+ if (((sljit_uw)v << 32) == 0)
+ return push_inst(compiler, llihf(target, (sljit_u32)((sljit_uw)v >> 32)));
- FAIL_IF(push_inst(compiler, llilf(target, (sljit_u32)v)));
- return push_inst(compiler, iihf(target, (sljit_u32)(v >> 32)));
- }
-
- /* TODO(mundaym): instruction sequences that don't use extended immediates */
- abort();
+ FAIL_IF(push_inst(compiler, llilf(target, (sljit_u32)v)));
+ return push_inst(compiler, iihf(target, (sljit_u32)(v >> 32)));
}
struct addr {
@@ -995,24 +954,47 @@ static sljit_s32 make_addr_bx(struct sljit_compiler *compiler,
(cond) ? EVAL(i1, r, addr) : EVAL(i2, r, addr)
/* May clobber tmp1. */
-static sljit_s32 load_word(struct sljit_compiler *compiler, sljit_gpr dst_r,
- sljit_s32 src, sljit_sw srcw,
- sljit_s32 is_32bit)
+static sljit_s32 load_store_op(struct sljit_compiler *compiler, sljit_gpr reg,
+ sljit_s32 mem, sljit_sw memw,
+ sljit_s32 is_32bit, const sljit_ins* forms)
{
struct addr addr;
- sljit_ins ins;
- SLJIT_ASSERT(src & SLJIT_MEM);
+ SLJIT_ASSERT(mem & SLJIT_MEM);
- if (is_32bit && ((src & OFFS_REG_MASK) || is_u12(srcw) || !is_s20(srcw))) {
- FAIL_IF(make_addr_bx(compiler, &addr, src, srcw, tmp1));
- return push_inst(compiler, 0x58000000 /* l */ | R20A(dst_r) | R16A(addr.index) | R12A(addr.base) | (sljit_ins)addr.offset);
+ if (is_32bit && ((mem & OFFS_REG_MASK) || is_u12(memw) || !is_s20(memw))) {
+ FAIL_IF(make_addr_bx(compiler, &addr, mem, memw, tmp1));
+ return push_inst(compiler, forms[0] | R20A(reg) | R16A(addr.index) | R12A(addr.base) | (sljit_ins)addr.offset);
}
- FAIL_IF(make_addr_bxy(compiler, &addr, src, srcw, tmp1));
+ FAIL_IF(make_addr_bxy(compiler, &addr, mem, memw, tmp1));
+ return push_inst(compiler, (is_32bit ? forms[1] : forms[2]) | R36A(reg) | R32A(addr.index) | R28A(addr.base) | disp_s20(addr.offset));
+}
- ins = is_32bit ? 0xe30000000058 /* ly */ : 0xe30000000004 /* lg */;
- return push_inst(compiler, ins | R36A(dst_r) | R32A(addr.index) | R28A(addr.base) | disp_s20(addr.offset));
+static const sljit_ins load_forms[3] = {
+ 0x58000000 /* l */,
+ 0xe30000000058 /* ly */,
+ 0xe30000000004 /* lg */
+};
+
+static const sljit_ins store_forms[3] = {
+ 0x50000000 /* st */,
+ 0xe30000000050 /* sty */,
+ 0xe30000000024 /* stg */
+};
+
+static const sljit_ins load_halfword_forms[3] = {
+ 0x48000000 /* lh */,
+ 0xe30000000078 /* lhy */,
+ 0xe30000000015 /* lgh */
+};
+
+/* May clobber tmp1. */
+static SLJIT_INLINE sljit_s32 load_word(struct sljit_compiler *compiler, sljit_gpr dst_r,
+ sljit_s32 src, sljit_sw srcw,
+ sljit_s32 is_32bit)
+{
+ return load_store_op(compiler, dst_r, src, srcw, is_32bit, load_forms);
}
/* May clobber tmp1. */
@@ -1032,24 +1014,11 @@ static sljit_s32 load_unsigned_word(struct sljit_compiler *compiler, sljit_gpr d
}
/* May clobber tmp1. */
-static sljit_s32 store_word(struct sljit_compiler *compiler, sljit_gpr src_r,
+static SLJIT_INLINE sljit_s32 store_word(struct sljit_compiler *compiler, sljit_gpr src_r,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 is_32bit)
{
- struct addr addr;
- sljit_ins ins;
-
- SLJIT_ASSERT(dst & SLJIT_MEM);
-
- if (is_32bit && ((dst & OFFS_REG_MASK) || is_u12(dstw) || !is_s20(dstw))) {
- FAIL_IF(make_addr_bx(compiler, &addr, dst, dstw, tmp1));
- return push_inst(compiler, 0x50000000 /* st */ | R20A(src_r) | R16A(addr.index) | R12A(addr.base) | (sljit_ins)addr.offset);
- }
-
- FAIL_IF(make_addr_bxy(compiler, &addr, dst, dstw, tmp1));
-
- ins = is_32bit ? 0xe30000000050 /* sty */ : 0xe30000000024 /* stg */;
- return push_inst(compiler, ins | R36A(src_r) | R32A(addr.index) | R28A(addr.base) | disp_s20(addr.offset));
+ return load_store_op(compiler, src_r, dst, dstw, is_32bit, store_forms);
}
#undef WHEN
@@ -1058,15 +1027,17 @@ static sljit_s32 emit_move(struct sljit_compiler *compiler,
sljit_gpr dst_r,
sljit_s32 src, sljit_sw srcw)
{
+ sljit_gpr src_r;
+
SLJIT_ASSERT(!IS_GPR_REG(src) || dst_r != gpr(src & REG_MASK));
- if (src & SLJIT_IMM)
+ if (src == SLJIT_IMM)
return push_load_imm_inst(compiler, dst_r, srcw);
if (src & SLJIT_MEM)
return load_word(compiler, dst_r, src, srcw, (compiler->mode & SLJIT_32) != 0);
- sljit_gpr src_r = gpr(src & REG_MASK);
+ src_r = gpr(src & REG_MASK);
return push_inst(compiler, (compiler->mode & SLJIT_32) ? lr(dst_r, src_r) : lgr(dst_r, src_r));
}
@@ -1259,10 +1230,10 @@ static sljit_s32 emit_siy(struct sljit_compiler *compiler, sljit_ins ins,
sljit_s32 dst, sljit_sw dstw,
sljit_sw srcw)
{
- SLJIT_ASSERT(dst & SLJIT_MEM);
-
sljit_gpr dst_r = tmp1;
+ SLJIT_ASSERT(dst & SLJIT_MEM);
+
if (dst & OFFS_REG_MASK) {
sljit_gpr index = tmp1;
@@ -1421,97 +1392,60 @@ static sljit_s32 emit_non_commutative(struct sljit_compiler *compiler, const str
return emit_rrf(compiler, ins, dst, src1, src1w, src2, src2w);
}
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
+SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler, sljit_s32 options, void *exec_allocator_data)
{
struct sljit_label *label;
struct sljit_jump *jump;
- struct sljit_s390x_const *const_;
- struct sljit_put_label *put_label;
+ struct sljit_const *const_;
sljit_sw executable_offset;
- sljit_uw ins_size = 0; /* instructions */
+ sljit_uw ins_size = compiler->size << 1;
sljit_uw pool_size = 0; /* literal pool */
sljit_uw pad_size;
- sljit_uw i, j = 0;
+ sljit_uw half_count;
+ SLJIT_NEXT_DEFINE_TYPES;
struct sljit_memory_fragment *buf;
- void *code, *code_ptr;
+ sljit_ins *buf_ptr;
+ sljit_ins *buf_end;
+ sljit_u16 *code;
+ sljit_u16 *code_ptr;
sljit_uw *pool, *pool_ptr;
- sljit_sw source, offset; /* TODO(carenas): only need 32 bit */
+ sljit_ins ins;
+ sljit_sw source, offset;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_generate_code(compiler));
reverse_buf(compiler);
- /* branch handling */
- label = compiler->labels;
jump = compiler->jumps;
- put_label = compiler->put_labels;
-
- /* TODO(carenas): compiler->executable_size could be calculated
- * before to avoid the following loop (except for
- * pool_size)
- */
- /* calculate the size of the code */
- for (buf = compiler->buf; buf != NULL; buf = buf->next) {
- sljit_uw len = buf->used_size / sizeof(sljit_ins);
- sljit_ins *ibuf = (sljit_ins *)buf->memory;
- for (i = 0; i < len; ++i, ++j) {
- sljit_ins ins = ibuf[i];
-
- /* TODO(carenas): instruction tag vs size/addr == j
- * using instruction tags for const is creative
- * but unlike all other architectures, and is not
- * done consistently for all other objects.
- * This might need reviewing later.
- */
- if (ins & sljit_ins_const) {
- pool_size += sizeof(*pool);
- ins &= ~sljit_ins_const;
- }
- if (label && label->size == j) {
- label->size = ins_size;
- label = label->next;
- }
- if (jump && jump->addr == j) {
- if ((jump->flags & SLJIT_REWRITABLE_JUMP) || (jump->flags & JUMP_ADDR)) {
- /* encoded: */
- /* brasl %r14, <rel_addr> (or brcl <mask>, <rel_addr>) */
- /* replace with: */
- /* lgrl %r1, <pool_addr> */
- /* bras %r14, %r1 (or bcr <mask>, %r1) */
- pool_size += sizeof(*pool);
- ins_size += 2;
- }
- jump = jump->next;
- }
- if (put_label && put_label->addr == j) {
- pool_size += sizeof(*pool);
- put_label = put_label->next;
- }
- ins_size += sizeof_ins(ins);
+ while (jump != NULL) {
+ if (jump->flags & (SLJIT_REWRITABLE_JUMP | JUMP_ADDR | JUMP_MOV_ADDR)) {
+ /* encoded: */
+ /* brasl %r14, <rel_addr> (or brcl <mask>, <rel_addr>) */
+ /* replace with: */
+ /* lgrl %r1, <pool_addr> */
+ /* bras %r14, %r1 (or bcr <mask>, %r1) */
+ pool_size += sizeof(*pool);
+ if (!(jump->flags & JUMP_MOV_ADDR))
+ ins_size += 2;
}
+ jump = jump->next;
}
- /* emit trailing label */
- if (label && label->size == j) {
- label->size = ins_size;
- label = label->next;
+ const_ = compiler->consts;
+ while (const_) {
+ pool_size += sizeof(*pool);
+ const_ = const_->next;
}
- SLJIT_ASSERT(!label);
- SLJIT_ASSERT(!jump);
- SLJIT_ASSERT(!put_label);
-
/* pad code size to 8 bytes so is accessible with half word offsets */
/* the literal pool needs to be doubleword aligned */
pad_size = ((ins_size + 7UL) & ~7UL) - ins_size;
SLJIT_ASSERT(pad_size < 8UL);
/* allocate target buffer */
- code = SLJIT_MALLOC_EXEC(ins_size + pad_size + pool_size,
- compiler->exec_allocator_data);
+ code = (sljit_u16*)allocate_executable_memory(ins_size + pad_size + pool_size, options, exec_allocator_data, &executable_offset);
PTR_FAIL_WITH_EXEC_IF(code);
code_ptr = code;
- executable_offset = SLJIT_EXEC_OFFSET(code);
/* TODO(carenas): pool is optional, and the ABI recommends it to
* be created before the function code, instead of
@@ -1520,126 +1454,166 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
*/
pool = (sljit_uw *)((sljit_uw)code + ins_size + pad_size);
pool_ptr = pool;
- const_ = (struct sljit_s390x_const *)compiler->consts;
+ buf = compiler->buf;
+ half_count = 0;
- /* update label addresses */
label = compiler->labels;
- while (label) {
- label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(
- (sljit_uw)code_ptr + label->size, executable_offset);
- label = label->next;
- }
-
- /* reset jumps */
jump = compiler->jumps;
- put_label = compiler->put_labels;
+ const_ = compiler->consts;
+ SLJIT_NEXT_INIT_TYPES();
+ SLJIT_GET_NEXT_MIN();
- /* emit the code */
- j = 0;
- for (buf = compiler->buf; buf != NULL; buf = buf->next) {
- sljit_uw len = buf->used_size / sizeof(sljit_ins);
- sljit_ins *ibuf = (sljit_ins *)buf->memory;
- for (i = 0; i < len; ++i, ++j) {
- sljit_ins ins = ibuf[i];
- if (ins & sljit_ins_const) {
- /* clear the const tag */
- ins &= ~sljit_ins_const;
+ do {
+ buf_ptr = (sljit_ins*)buf->memory;
+ buf_end = buf_ptr + (buf->used_size >> 3);
+ do {
+ ins = *buf_ptr++;
+
+ if (next_min_addr == half_count) {
+ SLJIT_ASSERT(!label || label->size >= half_count);
+ SLJIT_ASSERT(!jump || jump->addr >= half_count);
+ SLJIT_ASSERT(!const_ || const_->addr >= half_count);
+
+ if (next_min_addr == next_label_size) {
+ label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ label = label->next;
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
+ }
- /* update instruction with relative address of constant */
- source = (sljit_sw)code_ptr;
- offset = (sljit_sw)pool_ptr - source;
+ if (next_min_addr == next_jump_addr) {
+ if (SLJIT_UNLIKELY(jump->flags & JUMP_MOV_ADDR)) {
+ source = (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+
+ jump->addr = (sljit_uw)pool_ptr;
+
+ /* store target into pool */
+ offset = (sljit_sw)SLJIT_ADD_EXEC_OFFSET(pool_ptr, executable_offset) - source;
+ pool_ptr++;
+
+ SLJIT_ASSERT(!(offset & 1));
+ offset >>= 1;
+ SLJIT_ASSERT(is_s32(offset));
+ ins |= (sljit_ins)offset & 0xffffffff;
+ } else if (jump->flags & (SLJIT_REWRITABLE_JUMP | JUMP_ADDR)) {
+ sljit_ins arg;
+
+ jump->addr = (sljit_uw)pool_ptr;
+
+ /* load address into tmp1 */
+ source = (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ offset = (sljit_sw)SLJIT_ADD_EXEC_OFFSET(pool_ptr, executable_offset) - source;
+
+ SLJIT_ASSERT(!(offset & 1));
+ offset >>= 1;
+ SLJIT_ASSERT(is_s32(offset));
+
+ code_ptr[0] = (sljit_u16)(0xc408 | R4A(tmp1) /* lgrl */);
+ code_ptr[1] = (sljit_u16)(offset >> 16);
+ code_ptr[2] = (sljit_u16)offset;
+ code_ptr += 3;
+ pool_ptr++;
+
+ /* branch to tmp1 */
+ arg = (ins >> 36) & 0xf;
+ if (((ins >> 32) & 0xf) == 4) {
+ /* brcl -> bcr */
+ ins = bcr(arg, tmp1);
+ } else {
+ SLJIT_ASSERT(((ins >> 32) & 0xf) == 5);
+ /* brasl -> basr */
+ ins = basr(arg, tmp1);
+ }
+
+ /* Adjust half_count. */
+ half_count += 2;
+ } else
+ jump->addr = (sljit_uw)code_ptr;
+
+ jump = jump->next;
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+ } else if (next_min_addr == next_const_addr) {
+ /* update instruction with relative address of constant */
+ source = (sljit_sw)code_ptr;
+ offset = (sljit_sw)pool_ptr - source;
+
+ SLJIT_ASSERT(!(offset & 0x1));
+ offset >>= 1; /* halfword (not byte) offset */
+ SLJIT_ASSERT(is_s32(offset));
- SLJIT_ASSERT(!(offset & 1));
- offset >>= 1; /* halfword (not byte) offset */
- SLJIT_ASSERT(is_s32(offset));
+ ins |= (sljit_ins)offset & 0xffffffff;
- ins |= (sljit_ins)offset & 0xffffffff;
+ /* update address */
+ const_->addr = (sljit_uw)pool_ptr;
- /* update address */
- const_->const_.addr = (sljit_uw)pool_ptr;
+ /* store initial value into pool and update pool address */
+ *(pool_ptr++) = (sljit_uw)(((struct sljit_s390x_const*)const_)->init_value);
- /* store initial value into pool and update pool address */
- *(pool_ptr++) = (sljit_uw)const_->init_value;
+ /* move to next constant */
+ const_ = const_->next;
+ next_const_addr = SLJIT_GET_NEXT_ADDRESS(const_);
+ }
- /* move to next constant */
- const_ = (struct sljit_s390x_const *)const_->const_.next;
+ SLJIT_GET_NEXT_MIN();
}
- if (jump && jump->addr == j) {
- sljit_sw target = (sljit_sw)((jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target);
- if ((jump->flags & SLJIT_REWRITABLE_JUMP) || (jump->flags & JUMP_ADDR)) {
- jump->addr = (sljit_uw)pool_ptr;
- /* load address into tmp1 */
- source = (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
- offset = (sljit_sw)SLJIT_ADD_EXEC_OFFSET(pool_ptr, executable_offset) - source;
+ if (ins & 0xffff00000000L) {
+ *code_ptr++ = (sljit_u16)(ins >> 32);
+ half_count++;
+ }
- SLJIT_ASSERT(!(offset & 1));
- offset >>= 1;
- SLJIT_ASSERT(is_s32(offset));
+ if (ins & 0xffffffff0000L) {
+ *code_ptr++ = (sljit_u16)(ins >> 16);
+ half_count++;
+ }
- encode_inst(&code_ptr, lgrl(tmp1, offset & 0xffffffff));
-
- /* store jump target into pool and update pool address */
- *(pool_ptr++) = (sljit_uw)target;
-
- /* branch to tmp1 */
- sljit_ins op = (ins >> 32) & 0xf;
- sljit_ins arg = (ins >> 36) & 0xf;
- switch (op) {
- case 4: /* brcl -> bcr */
- ins = bcr(arg, tmp1);
- break;
- case 5: /* brasl -> basr */
- ins = basr(arg, tmp1);
- break;
- default:
- abort();
- }
- }
- else {
- jump->addr = (sljit_uw)code_ptr + 2;
- source = (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
- offset = target - source;
+ *code_ptr++ = (sljit_u16)ins;
+ half_count++;
+ } while (buf_ptr < buf_end);
- /* offset must be halfword aligned */
- SLJIT_ASSERT(!(offset & 1));
- offset >>= 1;
- SLJIT_ASSERT(is_s32(offset)); /* TODO(mundaym): handle arbitrary offsets */
+ buf = buf->next;
+ } while (buf);
- /* patch jump target */
- ins |= (sljit_ins)offset & 0xffffffff;
- }
- jump = jump->next;
- }
- if (put_label && put_label->addr == j) {
- source = (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ if (next_label_size == half_count) {
+ label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ label = label->next;
+ }
- SLJIT_ASSERT(put_label->label);
- put_label->addr = (sljit_uw)code_ptr;
+ SLJIT_ASSERT(!label);
+ SLJIT_ASSERT(!jump);
+ SLJIT_ASSERT(!const_);
+ SLJIT_ASSERT(code + (ins_size >> 1) == code_ptr);
+ SLJIT_ASSERT((sljit_u8 *)pool + pool_size == (sljit_u8 *)pool_ptr);
- /* store target into pool */
- *pool_ptr = put_label->label->addr;
- offset = (sljit_sw)SLJIT_ADD_EXEC_OFFSET(pool_ptr, executable_offset) - source;
- pool_ptr++;
+ jump = compiler->jumps;
+ while (jump != NULL) {
+ offset = (sljit_sw)((jump->flags & JUMP_ADDR) ? jump->u.target : jump->u.label->u.addr);
- SLJIT_ASSERT(!(offset & 1));
- offset >>= 1;
- SLJIT_ASSERT(is_s32(offset));
- ins |= (sljit_ins)offset & 0xffffffff;
+ if (jump->flags & (SLJIT_REWRITABLE_JUMP | JUMP_ADDR | JUMP_MOV_ADDR)) {
+ /* Store jump target into pool. */
+ *(sljit_uw*)(jump->addr) = (sljit_uw)offset;
+ } else {
+ code_ptr = (sljit_u16*)jump->addr;
+ offset -= (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
- put_label = put_label->next;
- }
- encode_inst(&code_ptr, ins);
+ /* offset must be halfword aligned */
+ SLJIT_ASSERT(!(offset & 1));
+ offset >>= 1;
+ SLJIT_ASSERT(is_s32(offset)); /* TODO(mundaym): handle arbitrary offsets */
+
+ code_ptr[1] = (sljit_u16)(offset >> 16);
+ code_ptr[2] = (sljit_u16)offset;
}
+ jump = jump->next;
}
- SLJIT_ASSERT((sljit_u8 *)code + ins_size == code_ptr);
- SLJIT_ASSERT((sljit_u8 *)pool + pool_size == (sljit_u8 *)pool_ptr);
compiler->error = SLJIT_ERR_COMPILED;
compiler->executable_offset = executable_offset;
compiler->executable_size = ins_size;
- code = SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
- code_ptr = SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ if (pool_size)
+ compiler->executable_size += (pad_size + pool_size);
+
+ code = (sljit_u16 *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
+ code_ptr = (sljit_u16 *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
SLJIT_CACHE_FLUSH(code, code_ptr);
SLJIT_UPDATE_WX_FLAGS(code, code_ptr, 1);
return code;
@@ -1650,12 +1624,25 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
/* TODO(mundaym): implement all */
switch (feature_type) {
case SLJIT_HAS_FPU:
+#ifdef SLJIT_IS_FPU_AVAILABLE
+ return (SLJIT_IS_FPU_AVAILABLE) != 0;
+#else
+ return 1;
+#endif /* SLJIT_IS_FPU_AVAILABLE */
+
case SLJIT_HAS_CLZ:
+ case SLJIT_HAS_REV:
case SLJIT_HAS_ROT:
case SLJIT_HAS_PREFETCH:
+ case SLJIT_HAS_COPY_F32:
+ case SLJIT_HAS_COPY_F64:
+ case SLJIT_HAS_SIMD:
+ case SLJIT_HAS_ATOMIC:
return 1;
+
case SLJIT_HAS_CTZ:
return 2;
+
case SLJIT_HAS_CMOV:
return have_lscond1() ? 1 : 0;
}
@@ -1664,7 +1651,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
{
- return (type >= SLJIT_UNORDERED && type <= SLJIT_ORDERED_LESS_EQUAL);
+ SLJIT_UNUSED_ARG(type);
+ return 0;
}
/* --------------------------------------------------------------------- */
@@ -1741,7 +1729,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
local_size = (local_size + SLJIT_S390X_DEFAULT_STACK_FRAME_SIZE + 0xf) & ~0xf;
compiler->local_size = local_size;
- FAIL_IF(push_inst(compiler, 0xe30000000071 /* lay */ | R36A(r15) | R28A(r15) | disp_s20(-local_size)));
+ if (is_s20(-local_size))
+ FAIL_IF(push_inst(compiler, 0xe30000000071 /* lay */ | R36A(r15) | R28A(r15) | disp_s20(-local_size)));
+ else
+ FAIL_IF(push_inst(compiler, 0xc20400000000 /* slgfi */ | R36A(r15) | (sljit_ins)local_size));
if (options & SLJIT_ENTER_REG_ARG)
return SLJIT_SUCCESS;
@@ -1786,8 +1777,10 @@ static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit
if (is_u12(local_size))
FAIL_IF(push_inst(compiler, 0x41000000 /* ly */ | R20A(r15) | R12A(r15) | (sljit_ins)local_size));
- else
+ else if (is_s20(local_size))
FAIL_IF(push_inst(compiler, 0xe30000000071 /* lay */ | R36A(r15) | R28A(r15) | disp_s20(local_size)));
+ else
+ FAIL_IF(push_inst(compiler, 0xc20a00000000 /* algfi */ | R36A(r15) | (sljit_ins)local_size));
offset = 2 * SSIZE_OF(sw);
if (saveds + scratches >= SLJIT_NUMBER_OF_REGISTERS) {
@@ -2011,12 +2004,85 @@ static sljit_s32 sljit_emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 o
return push_inst(compiler, ((op & SLJIT_32) ? 0x1800 /* lr */ : 0xb9040000 /* lgr */) | R4A(dst_r) | R0A(tmp0));
}
+static sljit_s32 sljit_emit_rev(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ struct addr addr;
+ sljit_gpr reg;
+ sljit_ins ins;
+ sljit_s32 opcode = GET_OPCODE(op);
+ sljit_s32 is_16bit = (opcode == SLJIT_REV_U16 || opcode == SLJIT_REV_S16);
+
+ if (dst & SLJIT_MEM) {
+ if (src & SLJIT_MEM) {
+ FAIL_IF(load_store_op(compiler, tmp0, src, srcw, op & SLJIT_32, is_16bit ? load_halfword_forms : load_forms));
+ reg = tmp0;
+ } else
+ reg = gpr(src);
+
+ FAIL_IF(make_addr_bxy(compiler, &addr, dst, dstw, tmp1));
+
+ if (is_16bit)
+ ins = 0xe3000000003f /* strvh */;
+ else
+ ins = (op & SLJIT_32) ? 0xe3000000003e /* strv */ : 0xe3000000002f /* strvg */;
+
+ return push_inst(compiler, ins | R36A(reg) | R32A(addr.index) | R28A(addr.base) | disp_s20(addr.offset));
+ }
+
+ reg = gpr(dst);
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(make_addr_bxy(compiler, &addr, src, srcw, tmp1));
+
+ if (is_16bit)
+ ins = 0xe3000000001f /* lrvh */;
+ else
+ ins = (op & SLJIT_32) ? 0xe3000000001e /* lrv */ : 0xe3000000000f /* lrvg */;
+
+ FAIL_IF(push_inst(compiler, ins | R36A(reg) | R32A(addr.index) | R28A(addr.base) | disp_s20(addr.offset)));
+
+ if (opcode == SLJIT_REV)
+ return SLJIT_SUCCESS;
+
+ if (is_16bit) {
+ if (op & SLJIT_32)
+ ins = (opcode == SLJIT_REV_U16) ? 0xb9950000 /* llhr */ : 0xb9270000 /* lhr */;
+ else
+ ins = (opcode == SLJIT_REV_U16) ? 0xb9850000 /* llghr */ : 0xb9070000 /* lghr */;
+ } else
+ ins = (opcode == SLJIT_REV_U32) ? 0xb9160000 /* llgfr */ : 0xb9140000 /* lgfr */;
+
+ return push_inst(compiler, ins | R4A(reg) | R0A(reg));
+ }
+
+ ins = (op & SLJIT_32) ? 0xb91f0000 /* lrvr */ : 0xb90f0000 /* lrvgr */;
+ FAIL_IF(push_inst(compiler, ins | R4A(reg) | R0A(gpr(src))));
+
+ if (opcode == SLJIT_REV)
+ return SLJIT_SUCCESS;
+
+ if (!is_16bit) {
+ ins = (opcode == SLJIT_REV_U32) ? 0xb9160000 /* llgfr */ : 0xb9140000 /* lgfr */;
+ return push_inst(compiler, ins | R4A(reg) | R0A(reg));
+ }
+
+ if (op & SLJIT_32) {
+ ins = (opcode == SLJIT_REV_U16) ? 0x88000000 /* srl */ : 0x8a000000 /* sra */;
+ return push_inst(compiler, ins | R20A(reg) | 16);
+ }
+
+ ins = (opcode == SLJIT_REV_U16) ? 0xeb000000000c /* srlg */ : 0xeb000000000a /* srag */;
+ return push_inst(compiler, ins | R36A(reg) | R32A(reg) | (48 << 16));
+}
+
/* LEVAL will be defined later with different parameters as needed */
#define WHEN2(cond, i1, i2) (cond) ? LEVAL(i1) : LEVAL(i2)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
{
sljit_ins ins;
struct addr mem;
@@ -2087,7 +2153,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
return SLJIT_SUCCESS;
}
/* LOAD IMMEDIATE */
- if (FAST_IS_REG(dst) && (src & SLJIT_IMM)) {
+ if (FAST_IS_REG(dst) && src == SLJIT_IMM) {
switch (opcode) {
case SLJIT_MOV_U8:
srcw = (sljit_sw)((sljit_u8)(srcw));
@@ -2166,14 +2232,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
return SLJIT_SUCCESS;
}
/* STORE and STORE IMMEDIATE */
- if ((dst & SLJIT_MEM)
- && (FAST_IS_REG(src) || (src & SLJIT_IMM))) {
+ if ((dst & SLJIT_MEM) && (FAST_IS_REG(src) || src == SLJIT_IMM)) {
+ struct addr mem;
sljit_gpr reg = FAST_IS_REG(src) ? gpr(src) : tmp0;
- if (src & SLJIT_IMM) {
+
+ if (src == SLJIT_IMM) {
/* TODO(mundaym): MOVE IMMEDIATE? */
FAIL_IF(push_load_imm_inst(compiler, reg, srcw));
}
- struct addr mem;
FAIL_IF(make_addr_bxy(compiler, &mem, dst, dstw, tmp1));
switch (opcode) {
case SLJIT_MOV_U8:
@@ -2240,39 +2306,15 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
SLJIT_UNREACHABLE();
}
- SLJIT_ASSERT((src & SLJIT_IMM) == 0); /* no immediates */
+ SLJIT_ASSERT(src != SLJIT_IMM);
- dst_r = FAST_IS_REG(dst) ? gpr(REG_MASK & dst) : tmp0;
- src_r = FAST_IS_REG(src) ? gpr(REG_MASK & src) : tmp0;
+ dst_r = FAST_IS_REG(dst) ? gpr(dst) : tmp0;
+ src_r = FAST_IS_REG(src) ? gpr(src) : tmp0;
compiler->status_flags_state = op & (VARIABLE_FLAG_MASK | SLJIT_SET_Z);
/* TODO(mundaym): optimize loads and stores */
switch (opcode) {
- case SLJIT_NOT:
- if (src & SLJIT_MEM)
- FAIL_IF(load_word(compiler, src_r, src, srcw, op & SLJIT_32));
-
- /* emulate ~x with x^-1 */
- if (!(op & SLJIT_32)) {
- FAIL_IF(push_load_imm_inst(compiler, tmp1, -1));
- if (src_r != dst_r)
- FAIL_IF(push_inst(compiler, lgr(dst_r, src_r)));
-
- FAIL_IF(push_inst(compiler, xgr(dst_r, tmp1)));
- break;
- }
-
- if (have_eimm())
- FAIL_IF(push_inst(compiler, xilf(dst_r, 0xffffffff)));
- else {
- FAIL_IF(push_load_imm_inst(compiler, tmp1, -1));
- if (src_r != dst_r)
- FAIL_IF(push_inst(compiler, lr(dst_r, src_r)));
-
- FAIL_IF(push_inst(compiler, xr(dst_r, tmp1)));
- }
- break;
case SLJIT_CLZ:
case SLJIT_CTZ:
if (src & SLJIT_MEM)
@@ -2280,13 +2322,18 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
FAIL_IF(sljit_emit_clz_ctz(compiler, op, dst_r, src_r));
break;
+ case SLJIT_REV_U32:
+ case SLJIT_REV_S32:
+ op |= SLJIT_32;
+ /* fallthrough */
+ case SLJIT_REV:
+ case SLJIT_REV_U16:
+ case SLJIT_REV_S16:
+ return sljit_emit_rev(compiler, op, dst, dstw, src, srcw);
default:
SLJIT_UNREACHABLE();
}
- if ((op & (SLJIT_SET_Z | VARIABLE_FLAG_MASK)) == (SLJIT_SET_Z | SLJIT_SET_OVERFLOW))
- FAIL_IF(update_zero_overflow(compiler, op, dst_r));
-
if (dst & SLJIT_MEM)
return store_word(compiler, dst_r, dst, dstw, op & SLJIT_32);
@@ -2337,7 +2384,7 @@ static sljit_s32 sljit_emit_add(struct sljit_compiler *compiler, sljit_s32 op,
const struct ins_forms *forms;
sljit_ins ins;
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
if (!sets_zero_overflow && is_s8(src2w) && (src1 & SLJIT_MEM) && (dst == src1 && dstw == src1w)) {
if (sets_overflow)
ins = (op & SLJIT_32) ? 0xeb000000006a /* asi */ : 0xeb000000007a /* agsi */;
@@ -2417,14 +2464,13 @@ static sljit_s32 sljit_emit_sub(struct sljit_compiler *compiler, sljit_s32 op,
const struct ins_forms *forms;
sljit_ins ins;
- if (dst == (sljit_s32)tmp0 && flag_type <= SLJIT_SIG_LESS_EQUAL) {
+ if (dst == TMP_REG2 && flag_type <= SLJIT_SIG_LESS_EQUAL) {
int compare_signed = flag_type >= SLJIT_SIG_LESS;
compiler->status_flags_state |= SLJIT_CURRENT_FLAGS_COMPARE;
- if (src2 & SLJIT_IMM) {
- if (compare_signed || ((op & VARIABLE_FLAG_MASK) == 0 && is_s32(src2w)))
- {
+ if (src2 == SLJIT_IMM) {
+ if (compare_signed || ((op & VARIABLE_FLAG_MASK) == 0 && is_s32(src2w))) {
if ((op & SLJIT_32) || is_s32(src2w)) {
ins = (op & SLJIT_32) ? 0xc20d00000000 /* cfi */ : 0xc20c00000000 /* cgfi */;
return emit_ri(compiler, ins, src1, src1, src1w, src2w, RIL_A);
@@ -2465,7 +2511,7 @@ static sljit_s32 sljit_emit_sub(struct sljit_compiler *compiler, sljit_s32 op,
goto done;
}
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
sljit_sw neg_src2w = -src2w;
if (sets_signed || neg_src2w != 0 || (op & (SLJIT_SET_Z | VARIABLE_FLAG_MASK)) == 0) {
@@ -2518,7 +2564,7 @@ done:
- the first operand is less if the sign bit of the result is not set
The -result operation sets the corrent sign, because the result cannot be zero.
The overflow is considered greater, since the result must be equal to INT_MIN so its sign bit is set. */
- FAIL_IF(push_inst(compiler, brc(0xe, 2 + 2)));
+ FAIL_IF(push_inst(compiler, brc(0xe, (op & SLJIT_32) ? (2 + 1) : (2 + 2))));
FAIL_IF(push_inst(compiler, (op & SLJIT_32) ? lcr(tmp1, dst_r) : lcgr(tmp1, dst_r)));
}
else if (op & SLJIT_SET_Z)
@@ -2573,7 +2619,7 @@ static sljit_s32 sljit_emit_multiply(struct sljit_compiler *compiler, sljit_s32
return emit_commutative(compiler, &multiply_overflow_forms, dst, src1, src1w, src2, src2w);
}
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
if (is_s16(src2w)) {
ins = (op & SLJIT_32) ? 0xa70c0000 /* mhi */ : 0xa70d0000 /* mghi */;
return emit_ri(compiler, ins, dst, src1, src1w, src2w, RI_A);
@@ -2680,7 +2726,7 @@ static sljit_s32 sljit_emit_bitwise(struct sljit_compiler *compiler, sljit_s32 o
sljit_s32 type = GET_OPCODE(op);
const struct ins_forms *forms;
- if ((src2 & SLJIT_IMM) && (!(op & SLJIT_SET_Z) || (type == SLJIT_AND && dst == (sljit_s32)tmp0))) {
+ if (src2 == SLJIT_IMM && (!(op & SLJIT_SET_Z) || (type == SLJIT_AND && dst == TMP_REG2))) {
sljit_s32 count16 = 0;
sljit_uw imm = (sljit_uw)src2w;
@@ -2696,21 +2742,21 @@ static sljit_s32 sljit_emit_bitwise(struct sljit_compiler *compiler, sljit_s32 o
if ((imm & 0xffff000000000000ull) != 0)
count16++;
- if (type == SLJIT_AND && dst == (sljit_s32)tmp0 && count16 == 1) {
- sljit_gpr src_r = tmp0;
+ if (type == SLJIT_AND && dst == TMP_REG2 && count16 == 1) {
+ sljit_gpr src_r = tmp1;
if (FAST_IS_REG(src1))
src_r = gpr(src1 & REG_MASK);
else
- FAIL_IF(emit_move(compiler, tmp0, src1, src1w));
+ FAIL_IF(emit_move(compiler, tmp1, src1, src1w));
if ((imm & 0x000000000000ffffull) != 0 || imm == 0)
- return push_inst(compiler, 0xa7010000 | R20A(src_r) | imm);
+ return push_inst(compiler, 0xa7010000 /* tmll */ | R20A(src_r) | imm);
if ((imm & 0x00000000ffff0000ull) != 0)
- return push_inst(compiler, 0xa7000000 | R20A(src_r) | (imm >> 16));
+ return push_inst(compiler, 0xa7000000 /* tmlh */ | R20A(src_r) | (imm >> 16));
if ((imm & 0x0000ffff00000000ull) != 0)
- return push_inst(compiler, 0xa7030000 | R20A(src_r) | (imm >> 32));
- return push_inst(compiler, 0xa7020000 | R20A(src_r) | (imm >> 48));
+ return push_inst(compiler, 0xa7030000 /* tmhl */ | R20A(src_r) | (imm >> 32));
+ return push_inst(compiler, 0xa7020000 /* tmhh */ | R20A(src_r) | (imm >> 48));
}
if (!(op & SLJIT_SET_Z))
@@ -2744,7 +2790,7 @@ static sljit_s32 sljit_emit_shift(struct sljit_compiler *compiler, sljit_s32 op,
else
FAIL_IF(emit_move(compiler, tmp0, src1, src1w));
- if (!(src2 & SLJIT_IMM)) {
+ if (src2 != SLJIT_IMM) {
if (FAST_IS_REG(src2))
base_r = gpr(src2);
else {
@@ -2804,7 +2850,7 @@ static sljit_s32 sljit_emit_rotate(struct sljit_compiler *compiler, sljit_s32 op
else
FAIL_IF(emit_move(compiler, tmp0, src1, src1w));
- if (!(src2 & SLJIT_IMM)) {
+ if (src2 != SLJIT_IMM) {
if (FAST_IS_REG(src2))
base_r = gpr(src2);
else {
@@ -2814,7 +2860,7 @@ static sljit_s32 sljit_emit_rotate(struct sljit_compiler *compiler, sljit_s32 op
}
if (GET_OPCODE(op) == SLJIT_ROTR) {
- if (!(src2 & SLJIT_IMM)) {
+ if (src2 != SLJIT_IMM) {
ins = (op & SLJIT_32) ? 0x1300 /* lcr */ : 0xb9030000 /* lcgr */;
FAIL_IF(push_inst(compiler, ins | R4A(tmp1) | R0A(base_r)));
base_r = tmp1;
@@ -2822,7 +2868,7 @@ static sljit_s32 sljit_emit_rotate(struct sljit_compiler *compiler, sljit_s32 op
src2w = -src2w;
}
- if (src2 & SLJIT_IMM)
+ if (src2 == SLJIT_IMM)
imm = (sljit_ins)(src2w & ((op & SLJIT_32) ? 0x1f : 0x3f));
ins = (op & SLJIT_32) ? 0xeb000000001d /* rll */ : 0xeb000000001c /* rllg */;
@@ -2863,7 +2909,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
compiler->mode = op & SLJIT_32;
compiler->status_flags_state = op & (VARIABLE_FLAG_MASK | SLJIT_SET_Z);
- if (is_commutative(op) && (src1 & SLJIT_IMM) && !(src2 & SLJIT_IMM)) {
+ if (is_commutative(op) && src1 == SLJIT_IMM && src2 != SLJIT_IMM) {
src1 ^= src2;
src2 ^= src1;
src1 ^= src2;
@@ -2923,130 +2969,153 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compil
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
+ sljit_s32 dst_reg = (GET_OPCODE(op) == SLJIT_SUB || GET_OPCODE(op) == SLJIT_AND) ? TMP_REG2 : TMP_REG1;
+
CHECK_ERROR();
CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
SLJIT_SKIP_CHECKS(compiler);
- return sljit_emit_op2(compiler, op, (sljit_s32)tmp0, 0, src1, src1w, src2, src2w);
+ return sljit_emit_op2(compiler, op, dst_reg, 0, src1, src1w, src2, src2w);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src_dst,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op2r(compiler, op, dst_reg, src1, src1w, src2, src2w));
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MULADD:
+ SLJIT_SKIP_CHECKS(compiler);
+ FAIL_IF(sljit_emit_op2(compiler, SLJIT_MUL | (op & SLJIT_32), 0 /* tmp0 */, 0, src1, src1w, src2, src2w));
+ return push_inst(compiler, ((op & SLJIT_32) ? 0x1a00 /* ar */ : 0xb9080000 /* agr */) | R4A(gpr(dst_reg)) | R0A(tmp0));
+ }
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 src1_reg,
+ sljit_s32 src2_reg,
+ sljit_s32 src3, sljit_sw src3w)
+{
sljit_s32 is_right;
sljit_sw bit_length = (op & SLJIT_32) ? 32 : 64;
- sljit_gpr src_dst_r = gpr(src_dst);
- sljit_gpr src1_r = tmp0;
- sljit_gpr src2_r = tmp1;
+ sljit_gpr dst_r = gpr(dst_reg);
+ sljit_gpr src1_r = gpr(src1_reg);
+ sljit_gpr src2_r = gpr(src2_reg);
+ sljit_gpr src3_r = tmp1;
sljit_ins ins;
CHECK_ERROR();
- CHECK(check_sljit_emit_shift_into(compiler, op, src_dst, src1, src1w, src2, src2w));
+ CHECK(check_sljit_emit_shift_into(compiler, op, dst_reg, src1_reg, src2_reg, src3, src3w));
is_right = (GET_OPCODE(op) == SLJIT_LSHR || GET_OPCODE(op) == SLJIT_MLSHR);
- if (src_dst == src1) {
+ if (src1_reg == src2_reg) {
SLJIT_SKIP_CHECKS(compiler);
- return sljit_emit_op2(compiler, (is_right ? SLJIT_ROTR : SLJIT_ROTL) | (op & SLJIT_32), src_dst, 0, src_dst, 0, src2, src2w);
+ return sljit_emit_op2(compiler, (is_right ? SLJIT_ROTR : SLJIT_ROTL) | (op & SLJIT_32), dst_reg, 0, src1_reg, 0, src3, src3w);
}
- ADJUST_LOCAL_OFFSET(src1, src1w);
- ADJUST_LOCAL_OFFSET(src2, src2w);
+ ADJUST_LOCAL_OFFSET(src3, src3w);
- if (src1 & SLJIT_MEM)
- FAIL_IF(load_word(compiler, tmp0, src1, src1w, op & SLJIT_32));
- else if (src1 & SLJIT_IMM)
- FAIL_IF(push_load_imm_inst(compiler, tmp0, src1w));
- else
- src1_r = gpr(src1);
+ if (src3 == SLJIT_IMM) {
+ src3w &= bit_length - 1;
- if (src2 & SLJIT_IMM) {
- src2w &= bit_length - 1;
-
- if (src2w == 0)
+ if (src3w == 0)
return SLJIT_SUCCESS;
- } else if (!(src2 & SLJIT_MEM))
- src2_r = gpr(src2);
- else
- FAIL_IF(load_word(compiler, tmp1, src2, src2w, op & SLJIT_32));
- if (src2 & SLJIT_IMM) {
if (op & SLJIT_32) {
- ins = is_right ? 0x88000000 /* srl */ : 0x89000000 /* sll */;
- FAIL_IF(push_inst(compiler, ins | R20A(src_dst_r) | (sljit_ins)src2w));
+ if (dst_r == src1_r) {
+ ins = is_right ? 0x88000000 /* srl */ : 0x89000000 /* sll */;
+ FAIL_IF(push_inst(compiler, ins | R20A(dst_r) | (sljit_ins)src3w));
+ } else {
+ ins = is_right ? 0xeb00000000de /* srlk */ : 0xeb00000000df /* sllk */;
+ FAIL_IF(push_inst(compiler, ins | R36A(dst_r) | R32A(src1_r) | ((sljit_ins)src3w << 16)));
+ }
} else {
ins = is_right ? 0xeb000000000c /* srlg */ : 0xeb000000000d /* sllg */;
- FAIL_IF(push_inst(compiler, ins | R36A(src_dst_r) | R32A(src_dst_r) | ((sljit_ins)src2w << 16)));
+ FAIL_IF(push_inst(compiler, ins | R36A(dst_r) | R32A(src1_r) | ((sljit_ins)src3w << 16)));
}
ins = 0xec0000000055 /* risbg */;
if (is_right) {
- src2w = bit_length - src2w;
- ins |= ((sljit_ins)(64 - bit_length) << 24) | ((sljit_ins)(63 - src2w) << 16) | ((sljit_ins)src2w << 8);
+ src3w = bit_length - src3w;
+ ins |= ((sljit_ins)(64 - bit_length) << 24) | ((sljit_ins)(63 - src3w) << 16) | ((sljit_ins)src3w << 8);
} else
- ins |= ((sljit_ins)(64 - src2w) << 24) | ((sljit_ins)63 << 16) | ((sljit_ins)src2w << 8);
+ ins |= ((sljit_ins)(64 - src3w) << 24) | ((sljit_ins)63 << 16) | ((sljit_ins)(src3w + 64 - bit_length) << 8);
- return push_inst(compiler, ins | R36A(src_dst_r) | R32A(src1_r));
+ return push_inst(compiler, ins | R36A(dst_r) | R32A(src2_r));
}
+ if (!(src3 & SLJIT_MEM)) {
+ src3_r = gpr(src3);
+
+ if (dst_r == src3_r) {
+ FAIL_IF(push_inst(compiler, 0x1800 /* lr */ | R4A(tmp1) | R0A(src3_r)));
+ src3_r = tmp1;
+ }
+ } else
+ FAIL_IF(load_word(compiler, tmp1, src3, src3w, op & SLJIT_32));
+
if (op & SLJIT_32) {
if (GET_OPCODE(op) == SLJIT_MSHL || GET_OPCODE(op) == SLJIT_MLSHR) {
- if (src2_r != tmp1) {
- FAIL_IF(push_inst(compiler, 0xec0000000055 /* risbg */ | R36A(tmp1) | R32A(src2_r) | (59 << 24) | (1 << 23) | (63 << 16)));
- src2_r = tmp1;
+ if (src3_r != tmp1) {
+ FAIL_IF(push_inst(compiler, 0xec0000000055 /* risbg */ | R36A(tmp1) | R32A(src3_r) | (59 << 24) | (1 << 23) | (63 << 16)));
+ src3_r = tmp1;
} else
FAIL_IF(push_inst(compiler, 0xa5070000 /* nill */ | R20A(tmp1) | 0x1f));
}
- ins = is_right ? 0x88000000 /* srl */ : 0x89000000 /* sll */;
- FAIL_IF(push_inst(compiler, ins | R20A(src_dst_r) | R12A(src2_r)));
+ if (dst_r == src1_r) {
+ ins = is_right ? 0x88000000 /* srl */ : 0x89000000 /* sll */;
+ FAIL_IF(push_inst(compiler, ins | R20A(dst_r) | R12A(src3_r)));
+ } else {
+ ins = is_right ? 0xeb00000000de /* srlk */ : 0xeb00000000df /* sllk */;
+ FAIL_IF(push_inst(compiler, ins | R36A(dst_r) | R32A(src1_r) | R28A(src3_r)));
+ }
- if (src2_r != tmp1) {
+ if (src3_r != tmp1) {
FAIL_IF(push_inst(compiler, 0xa50f0000 /* llill */ | R20A(tmp1) | 0x1f));
- FAIL_IF(push_inst(compiler, 0x1700 /* xr */ | R4A(tmp1) | R0A(src2_r)));
+ FAIL_IF(push_inst(compiler, 0x1700 /* xr */ | R4A(tmp1) | R0A(src3_r)));
} else
FAIL_IF(push_inst(compiler, 0xc00700000000 /* xilf */ | R36A(tmp1) | 0x1f));
- if (src1_r == tmp0) {
- ins = is_right ? 0x89000000 /* sll */ : 0x88000000 /* srl */;
- FAIL_IF(push_inst(compiler, ins | R20A(tmp0) | R12A(tmp1) | 0x1));
- } else {
- ins = is_right ? 0xeb00000000df /* sllk */ : 0xeb00000000de /* srlk */;
- FAIL_IF(push_inst(compiler, ins | R36A(tmp0) | R32A(src1_r) | R28A(tmp1) | (0x1 << 16)));
- }
+ ins = is_right ? 0xeb00000000df /* sllk */ : 0xeb00000000de /* srlk */;
+ FAIL_IF(push_inst(compiler, ins | R36A(tmp0) | R32A(src2_r) | R28A(tmp1) | (0x1 << 16)));
- return push_inst(compiler, 0x1600 /* or */ | R4A(src_dst_r) | R0A(tmp0));
+ return push_inst(compiler, 0x1600 /* or */ | R4A(dst_r) | R0A(tmp0));
}
ins = is_right ? 0xeb000000000c /* srlg */ : 0xeb000000000d /* sllg */;
- FAIL_IF(push_inst(compiler, ins | R36A(src_dst_r) | R32A(src_dst_r) | R28A(src2_r)));
+ FAIL_IF(push_inst(compiler, ins | R36A(dst_r) | R32A(src1_r) | R28A(src3_r)));
ins = is_right ? 0xeb000000000d /* sllg */ : 0xeb000000000c /* srlg */;
if (!(op & SLJIT_SHIFT_INTO_NON_ZERO)) {
- if (src2_r != tmp1)
+ if (src3_r != tmp1)
FAIL_IF(push_inst(compiler, 0xa50f0000 /* llill */ | R20A(tmp1) | 0x3f));
- FAIL_IF(push_inst(compiler, ins | R36A(tmp0) | R32A(src1_r) | (0x1 << 16)));
- src1_r = tmp0;
+ FAIL_IF(push_inst(compiler, ins | R36A(tmp0) | R32A(src2_r) | (0x1 << 16)));
+ src2_r = tmp0;
- if (src2_r != tmp1)
- FAIL_IF(push_inst(compiler, 0xb9820000 /* xgr */ | R4A(tmp1) | R0A(src2_r)));
+ if (src3_r != tmp1)
+ FAIL_IF(push_inst(compiler, 0xb9820000 /* xgr */ | R4A(tmp1) | R0A(src3_r)));
else
FAIL_IF(push_inst(compiler, 0xc00700000000 /* xilf */ | R36A(tmp1) | 0x3f));
} else
- FAIL_IF(push_inst(compiler, 0xb9030000 /* lcgr */ | R4A(tmp1) | R0A(src2_r)));
+ FAIL_IF(push_inst(compiler, 0xb9030000 /* lcgr */ | R4A(tmp1) | R0A(src3_r)));
- FAIL_IF(push_inst(compiler, ins | R36A(tmp0) | R32A(src1_r) | R28A(tmp1)));
- return push_inst(compiler, 0xb9810000 /* ogr */ | R4A(src_dst_r) | R0A(tmp0));
+ FAIL_IF(push_inst(compiler, ins | R36A(tmp0) | R32A(src2_r) | R28A(tmp1)));
+ return push_inst(compiler, 0xb9810000 /* ogr */ | R4A(dst_r) | R0A(tmp0));
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(
- struct sljit_compiler *compiler,
- sljit_s32 op, sljit_s32 src, sljit_sw srcw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src, sljit_sw srcw)
{
sljit_gpr src_r;
struct addr addr;
@@ -3077,16 +3146,46 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_dst(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw)
{
- CHECK_REG_INDEX(check_sljit_get_register_index(reg));
- return (sljit_s32)gpr(reg);
+ sljit_gpr dst_r = link_r;
+ sljit_s32 size;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op_dst(compiler, op, dst, dstw));
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ switch (op) {
+ case SLJIT_FAST_ENTER:
+ if (FAST_IS_REG(dst))
+ return push_inst(compiler, lgr(gpr(dst), link_r));
+ break;
+ case SLJIT_GET_RETURN_ADDRESS:
+ dst_r = FAST_IS_REG(dst) ? gpr(dst) : tmp0;
+
+ size = GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds - SLJIT_KEPT_SAVEDS_COUNT(compiler->options), 2);
+ FAIL_IF(load_word(compiler, dst_r, SLJIT_MEM1(SLJIT_SP), compiler->local_size + size, 0));
+ break;
+ }
+
+ if (dst & SLJIT_MEM)
+ return store_word(compiler, dst_r, dst, dstw, 0);
+
+ return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 type, sljit_s32 reg)
{
- CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
- return (sljit_s32)fgpr(reg);
+ CHECK_REG_INDEX(check_sljit_get_register_index(type, reg));
+
+ if (type == SLJIT_GP_REGISTER)
+ return (sljit_s32)gpr(reg);
+
+ if (type != SLJIT_FLOAT_REGISTER)
+ return -1;
+
+ return (sljit_s32)freg_map[reg];
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
@@ -3177,33 +3276,61 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
return SLJIT_SUCCESS;
}
-static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+static sljit_s32 sljit_emit_fop1_conv_f64_from_w(struct sljit_compiler *compiler, sljit_ins ins,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw)
{
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
- sljit_ins ins;
- if (src & SLJIT_IMM) {
+ if (src == SLJIT_IMM) {
FAIL_IF(push_load_imm_inst(compiler, tmp0, srcw));
src = (sljit_s32)tmp0;
}
else if (src & SLJIT_MEM) {
- FAIL_IF(load_word(compiler, tmp0, src, srcw, GET_OPCODE(op) >= SLJIT_CONV_F64_FROM_S32));
+ FAIL_IF(load_word(compiler, tmp0, src, srcw, ins & 0x100000));
src = (sljit_s32)tmp0;
}
+ FAIL_IF(push_inst(compiler, ins | F4(dst_r) | R0(src)));
+
+ if (dst & SLJIT_MEM)
+ return float_mem(compiler, FLOAT_STORE | ((ins & 0x10000) ? 0 : SLJIT_32), TMP_FREG1, dst, dstw);
+
+ return SLJIT_SUCCESS;
+}
+
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_ins ins;
+
+ if (src == SLJIT_IMM && GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
+ srcw = (sljit_s32)srcw;
+
if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_SW)
ins = (op & SLJIT_32) ? 0xb3a40000 /* cegbr */ : 0xb3a50000 /* cdgbr */;
else
ins = (op & SLJIT_32) ? 0xb3940000 /* cefbr */ : 0xb3950000 /* cdfbr */;
- FAIL_IF(push_inst(compiler, ins | F4(dst_r) | R0(src)));
+ return sljit_emit_fop1_conv_f64_from_w(compiler, ins, dst, dstw, src, srcw);
+}
- if (dst & SLJIT_MEM)
- return float_mem(compiler, FLOAT_STORE | (op & SLJIT_32), TMP_FREG1, dst, dstw);
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_uw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_ins ins;
- return SLJIT_SUCCESS;
+ if (src == SLJIT_IMM && GET_OPCODE(op) == SLJIT_CONV_F64_FROM_U32)
+ srcw = (sljit_u32)srcw;
+
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_UW)
+ ins = (op & SLJIT_32) ? 0xb3a00000 /* celgbr */ : 0xb3a10000 /* cdlgbr */;
+ else
+ ins = (op & SLJIT_32) ? 0xb3900000 /* celfbr */ : 0xb3910000 /* cdlfbr */;
+
+ return sljit_emit_fop1_conv_f64_from_w(compiler, ins, dst, dstw, src, srcw);
}
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
@@ -3275,12 +3402,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compil
FAIL_IF(push_inst(compiler, ins | F4(dst_r) | F0(src)));
}
- if (!(dst & SLJIT_MEM))
- return SLJIT_SUCCESS;
-
- SLJIT_ASSERT(dst_r == TMP_FREG1);
+ if (dst & SLJIT_MEM)
+ return float_mem(compiler, FLOAT_STORE | (op & SLJIT_32), TMP_FREG1, dst, dstw);
- return float_mem(compiler, FLOAT_STORE | (op & SLJIT_32), TMP_FREG1, dst, dstw);
+ return SLJIT_SUCCESS;
}
#define FLOAT_MOV(op, dst_r, src_r) \
@@ -3351,25 +3476,94 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
if (dst & SLJIT_MEM)
return float_mem(compiler, FLOAT_STORE | (op & SLJIT_32), TMP_FREG1, dst, dstw);
- SLJIT_ASSERT(dst_r != TMP_FREG1);
return SLJIT_SUCCESS;
}
-/* --------------------------------------------------------------------- */
-/* Other instructions */
-/* --------------------------------------------------------------------- */
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
+{
+ sljit_s32 reg;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fop2r(compiler, op, dst_freg, src1, src1w, src2, src2w));
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+ ADJUST_LOCAL_OFFSET(src2, src2w);
+
+ if (src2 & SLJIT_MEM) {
+ FAIL_IF(float_mem(compiler, FLOAT_LOAD | (op & SLJIT_32), TMP_FREG1, src2, src2w));
+ src2 = TMP_FREG1;
+ }
+
+ if (src1 & SLJIT_MEM) {
+ reg = (dst_freg == src2) ? TMP_FREG1 : dst_freg;
+ FAIL_IF(float_mem(compiler, FLOAT_LOAD | (op & SLJIT_32), reg, src1, src1w));
+ src1 = reg;
+ }
+
+ return push_inst(compiler, 0xb3720000 /* cpsdr */ | F12(src2) | F4(dst_freg) | F0(src1));
+}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset32(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f32 value)
{
+ union {
+ sljit_s32 imm;
+ sljit_f32 value;
+ } u;
+
CHECK_ERROR();
- CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
- ADJUST_LOCAL_OFFSET(dst, dstw);
+ CHECK(check_sljit_emit_fset32(compiler, freg, value));
+
+ u.value = value;
+
+ FAIL_IF(push_load_imm_inst(compiler, tmp1, (sljit_sw)(((sljit_uw)u.imm << 32))));
+ return push_inst(compiler, 0xb3c10000 /* ldgr */ | F4(freg) | R0A(tmp1));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f64 value)
+{
+ union {
+ sljit_sw imm;
+ sljit_f64 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset64(compiler, freg, value));
+
+ u.value = value;
+
+ FAIL_IF(push_load_imm_inst(compiler, tmp1, (sljit_sw)u.imm));
+ return push_inst(compiler, 0xb3c10000 /* ldgr */ | F4(freg) | R0A(tmp1));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 freg, sljit_s32 reg)
+{
+ sljit_gpr gen_r;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
+
+ gen_r = gpr(reg);
+
+ if (GET_OPCODE(op) == SLJIT_COPY_TO_F64) {
+ if (op & SLJIT_32) {
+ FAIL_IF(push_inst(compiler, 0xeb000000000d /* sllg */ | R36A(tmp0) | R32A(gen_r) | (32 << 16)));
+ gen_r = tmp0;
+ }
+
+ return push_inst(compiler, 0xb3c10000 /* ldgr */ | F4(freg) | R0A(gen_r));
+ }
+
+ FAIL_IF(push_inst(compiler, 0xb3cd0000 /* lgdr */ | R4A(gen_r) | F0(freg)));
- if (FAST_IS_REG(dst))
- return push_inst(compiler, lgr(gpr(dst), link_r));
+ if (!(op & SLJIT_32))
+ return SLJIT_SUCCESS;
- /* memory */
- return store_word(compiler, link_r, dst, dstw, 0);
+ return push_inst(compiler, 0xeb000000000c /* srlg */ | R36A(gen_r) | R32A(gen_r) | (32 << 16));
}
/* --------------------------------------------------------------------- */
@@ -3394,14 +3588,14 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
{
+ struct sljit_jump *jump;
sljit_u8 mask = ((type & 0xff) < SLJIT_JUMP) ? get_cc(compiler, type & 0xff) : 0xf;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_jump(compiler, type));
/* record jump */
- struct sljit_jump *jump = (struct sljit_jump *)
- ensure_abuf(compiler, sizeof(struct sljit_jump));
+ jump = (struct sljit_jump *)ensure_abuf(compiler, sizeof(struct sljit_jump));
PTR_FAIL_IF(!jump);
set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
jump->addr = compiler->size;
@@ -3439,7 +3633,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
CHECK_ERROR();
CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
- if (src & SLJIT_IMM) {
+ if (src == SLJIT_IMM) {
SLJIT_ASSERT(!(srcw & 1)); /* target address must be even */
FAIL_IF(push_load_imm_inst(compiler, src_r, srcw));
}
@@ -3459,6 +3653,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compi
sljit_s32 arg_types,
sljit_s32 src, sljit_sw srcw)
{
+ SLJIT_UNUSED_ARG(arg_types);
+
CHECK_ERROR();
CHECK(check_sljit_emit_icall(compiler, type, arg_types, src, srcw));
@@ -3490,13 +3686,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
sljit_s32 dst, sljit_sw dstw,
sljit_s32 type)
{
+ sljit_gpr dst_r = FAST_IS_REG(dst) ? gpr(dst & REG_MASK) : tmp0;
+ sljit_gpr loc_r = tmp1;
sljit_u8 mask = get_cc(compiler, type);
CHECK_ERROR();
CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
- sljit_gpr dst_r = FAST_IS_REG(dst) ? gpr(dst & REG_MASK) : tmp0;
- sljit_gpr loc_r = tmp1;
switch (GET_OPCODE(op)) {
case SLJIT_AND:
case SLJIT_OR:
@@ -3526,8 +3722,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
FAIL_IF(push_inst(compiler,
WHEN2(op & SLJIT_32, lochi, locghi)));
} else {
- /* TODO(mundaym): no load/store-on-condition 2 facility (ipm? branch-and-set?) */
- abort();
+ FAIL_IF(push_load_imm_inst(compiler, loc_r, 1));
+ FAIL_IF(push_inst(compiler, brc(mask, 2 + 2)));
+ FAIL_IF(push_load_imm_inst(compiler, loc_r, 0));
}
#undef LEVAL
@@ -3556,37 +3753,125 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 dst_reg,
- sljit_s32 src, sljit_sw srcw)
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_reg)
{
- sljit_ins mask = get_cc(compiler, type & ~SLJIT_32);
+ sljit_ins mask;
sljit_gpr src_r;
+ sljit_gpr dst_r = gpr(dst_reg);
sljit_ins ins;
CHECK_ERROR();
- CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
+ CHECK(check_sljit_emit_select(compiler, type, dst_reg, src1, src1w, src2_reg));
- if (type & SLJIT_32)
- srcw = (sljit_s32)srcw;
+ ADJUST_LOCAL_OFFSET(src1, src1w);
- if (have_lscond2() && (src & SLJIT_IMM) && is_s16(srcw)) {
- ins = (type & SLJIT_32) ? 0xec0000000042 /* lochi */ : 0xec0000000046 /* locghi */;
- return push_inst(compiler, ins | R36A(gpr(dst_reg)) | (mask << 32) | (sljit_ins)(srcw & 0xffff) << 16);
+ if (dst_reg != src2_reg) {
+ if (src1 == dst_reg) {
+ src1 = src2_reg;
+ src1w = 0;
+ type ^= 0x1;
+ } else {
+ if (ADDRESSING_DEPENDS_ON(src1, dst_reg)) {
+ FAIL_IF(load_word(compiler, dst_r, src1, src1w, type & SLJIT_32));
+ src1 = src2_reg;
+ src1w = 0;
+ type ^= 0x1;
+ } else
+ FAIL_IF(push_inst(compiler, ((type & SLJIT_32) ? 0x1800 /* lr */ : 0xb9040000 /* lgr */) | R4A(dst_r) | R0A(gpr(src2_reg))));
+ }
}
- if (src & SLJIT_IMM) {
- FAIL_IF(push_load_imm_inst(compiler, tmp0, srcw));
- src_r = tmp0;
+ mask = get_cc(compiler, type & ~SLJIT_32);
+
+ if (src1 & SLJIT_MEM) {
+ if (src1 & OFFS_REG_MASK) {
+ src_r = gpr(OFFS_REG(src1));
+
+ if (src1w != 0) {
+ FAIL_IF(push_inst(compiler, 0xeb000000000d /* sllg */ | R36A(tmp1) | R32A(src_r) | ((sljit_ins)(src1w & 0x3) << 16)));
+ src_r = tmp1;
+ }
+
+ FAIL_IF(push_inst(compiler, 0xb9e80000 /* agrk */ | R12A(src_r) | R4A(tmp1) | R0A(gpr(src1 & REG_MASK))));
+ src_r = tmp1;
+ src1w = 0;
+ } else if (!is_s20(src1w)) {
+ FAIL_IF(push_load_imm_inst(compiler, tmp1, src1w));
+
+ if (src1 & REG_MASK)
+ FAIL_IF(push_inst(compiler, 0xb9e80000 /* agrk */ | R12A(tmp1) | R4A(tmp1) | R0A(gpr(src1 & REG_MASK))));
+
+ src_r = tmp1;
+ src1w = 0;
+ } else
+ src_r = gpr(src1 & REG_MASK);
+
+ ins = (type & SLJIT_32) ? 0xeb00000000f2 /* loc */ : 0xeb00000000e2 /* locg */;
+ return push_inst(compiler, ins | R36A(dst_r) | (mask << 32) | R28A(src_r) | disp_s20((sljit_s32)src1w));
+ }
+
+ if (src1 == SLJIT_IMM) {
+ if (type & SLJIT_32)
+ src1w = (sljit_s32)src1w;
+
+ if (have_lscond2() && is_s16(src1w)) {
+ ins = (type & SLJIT_32) ? 0xec0000000042 /* lochi */ : 0xec0000000046 /* locghi */;
+ return push_inst(compiler, ins | R36A(dst_r) | (mask << 32) | (sljit_ins)(src1w & 0xffff) << 16);
+ }
+
+ FAIL_IF(push_load_imm_inst(compiler, tmp1, src1w));
+ src_r = tmp1;
} else
- src_r = gpr(src);
+ src_r = gpr(src1);
+
+ ins = (type & SLJIT_32) ? 0xb9f20000 /* locr */ : 0xb9e20000 /* locgr */;
+ return push_inst(compiler, ins | (mask << 12) | R4A(dst_r) | R0A(src_r));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fselect(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_freg)
+{
+ sljit_ins ins;
+ struct sljit_label *label;
+ struct sljit_jump *jump;
- if (have_lscond1()) {
- ins = (type & SLJIT_32) ? 0xb9f20000 /* locr */ : 0xb9e20000 /* locgr */;
- return push_inst(compiler, ins | (mask << 12) | R4A(gpr(dst_reg)) | R0A(src_r));
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fselect(compiler, type, dst_freg, src1, src1w, src2_freg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+ if (dst_freg != src2_freg) {
+ if (dst_freg == src1) {
+ src1 = src2_freg;
+ src1w = 0;
+ type ^= 0x1;
+ } else {
+ ins = (type & SLJIT_32) ? 0x3800 /* ler */ : 0x2800 /* ldr */;
+ FAIL_IF(push_inst(compiler, ins | F4(dst_freg) | F0(src2_freg)));
+ }
}
- return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);
+ SLJIT_SKIP_CHECKS(compiler);
+ jump = sljit_emit_jump(compiler, (type & ~SLJIT_32) ^ 0x1);
+ FAIL_IF(!jump);
+
+ if (!(src1 & SLJIT_MEM)) {
+ ins = (type & SLJIT_32) ? 0x3800 /* ler */ : 0x2800 /* ldr */;
+ FAIL_IF(push_inst(compiler, ins | F4(dst_freg) | F0(src1)));
+ } else
+ FAIL_IF(float_mem(compiler, FLOAT_LOAD | (type & SLJIT_32), dst_freg, src1, src1w));
+
+ SLJIT_SKIP_CHECKS(compiler);
+ label = sljit_emit_label(compiler);
+ FAIL_IF(!label);
+
+ sljit_set_label(jump, label);
+ return SLJIT_SUCCESS;
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
@@ -3648,6 +3933,502 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compile
return push_inst(compiler, ins | R36A(reg2) | disp_s20((sljit_s32)memw + SSIZE_OF(sw)));
}
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 srcdst, sljit_sw srcdstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_s32 alignment = SLJIT_SIMD_GET_ELEM2_SIZE(type);
+ struct addr addr;
+ sljit_ins ins;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_mov(compiler, type, freg, srcdst, srcdstw));
+
+ ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
+
+ if (reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (!(srcdst & SLJIT_MEM)) {
+ if (type & SLJIT_SIMD_STORE)
+ ins = F36(srcdst) | F32(freg);
+ else
+ ins = F36(freg) | F32(srcdst);
+
+ return push_inst(compiler, 0xe70000000056 /* vlr */ | ins);
+ }
+
+ FAIL_IF(make_addr_bx(compiler, &addr, srcdst, srcdstw, tmp1));
+ ins = F36(freg) | R32A(addr.index) | R28A(addr.base) | disp_s20(addr.offset);
+
+ if (alignment >= 4)
+ ins |= 4 << 12;
+ else if (alignment == 3)
+ ins |= 3 << 12;
+
+ return push_inst(compiler, ((type & SLJIT_SIMD_STORE) ? 0xe7000000000e /* vst */ : 0xe70000000006 /* vl */) | ins);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ struct addr addr;
+ sljit_gpr reg;
+ sljit_sw sign_ext;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_replicate(compiler, type, freg, src, srcw));
+
+ ADJUST_LOCAL_OFFSET(src, srcw);
+
+ if (reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && elem_size < 2)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(make_addr_bx(compiler, &addr, src, srcw, tmp1));
+ return push_inst(compiler, 0xe70000000005 /* vlrep */ | F36(freg)
+ | R32A(addr.index) | R28A(addr.base) | disp_s20(addr.offset) | ((sljit_ins)elem_size << 12));
+ }
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ if (src == SLJIT_IMM)
+ return push_inst(compiler, 0xe70000000044 /* vgbm */ | F36(freg));
+
+ return push_inst(compiler, 0xe7000000004d /* vrep */ | F36(freg) | F32(src) | ((sljit_ins)elem_size << 12));
+ }
+
+ if (src == SLJIT_IMM) {
+ sign_ext = 0x10000;
+
+ switch (elem_size) {
+ case 0:
+ srcw &= 0xff;
+ sign_ext = (sljit_s8)srcw;
+ break;
+ case 1:
+ srcw &= 0xffff;
+ sign_ext = (sljit_s16)srcw;
+ break;
+ case 2:
+ if ((sljit_s32)srcw == (sljit_s16)srcw) {
+ srcw &= 0xffff;
+ sign_ext = (sljit_s16)srcw;
+ } else
+ srcw &= 0xffffffff;
+ break;
+ default:
+ if (srcw == (sljit_s16)srcw) {
+ srcw &= 0xffff;
+ sign_ext = (sljit_s16)srcw;
+ }
+ break;
+ }
+
+ if (sign_ext != 0x10000) {
+ if (sign_ext == 0 || sign_ext == -1)
+ return push_inst(compiler, 0xe70000000044 /* vgbm */ | F36(freg)
+ | (sign_ext == 0 ? 0 : ((sljit_ins)0xffff << 16)));
+
+ return push_inst(compiler, 0xe70000000045 /* vrepi */ | F36(freg)
+ | ((sljit_ins)srcw << 16) | ((sljit_ins)elem_size << 12));
+ }
+
+ push_load_imm_inst(compiler, tmp0, srcw);
+ reg = tmp0;
+ } else
+ reg = gpr(src);
+
+ FAIL_IF(push_inst(compiler, 0xe70000000022 /* vlvg */ | F36(freg) | R32A(reg) | ((sljit_ins)elem_size << 12)));
+ return push_inst(compiler, 0xe7000000004d /* vrep */ | F36(freg) | F32(freg) | ((sljit_ins)elem_size << 12));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg, sljit_s32 lane_index,
+ sljit_s32 srcdst, sljit_sw srcdstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ struct addr addr;
+ sljit_gpr reg;
+ sljit_ins ins = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_lane_mov(compiler, type, freg, lane_index, srcdst, srcdstw));
+
+ ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
+
+ if (reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && elem_size < 2)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (srcdst & SLJIT_MEM) {
+ FAIL_IF(make_addr_bx(compiler, &addr, srcdst, srcdstw, tmp1));
+ ins = F36(freg) | R32A(addr.index) | R28A(addr.base) | disp_s20(addr.offset);
+ }
+
+ if (type & SLJIT_SIMD_LANE_ZERO) {
+ if ((srcdst & SLJIT_MEM) && lane_index == ((1 << (3 - elem_size)) - 1))
+ return push_inst(compiler, 0xe70000000004 /* vllez */ | ins | ((sljit_ins)elem_size << 12));
+
+ if ((type & SLJIT_SIMD_FLOAT) && freg == srcdst) {
+ FAIL_IF(push_inst(compiler, 0xe70000000056 /* vlr */ | F36(TMP_FREG1) | F32(freg)));
+ srcdst = TMP_FREG1;
+ srcdstw = 0;
+ }
+
+ FAIL_IF(push_inst(compiler, 0xe70000000044 /* vgbm */ | F36(freg)));
+ }
+
+ if (srcdst & SLJIT_MEM) {
+ switch (elem_size) {
+ case 0:
+ ins |= 0xe70000000000 /* vleb */;
+ break;
+ case 1:
+ ins |= 0xe70000000001 /* vleh */;
+ break;
+ case 2:
+ ins |= 0xe70000000003 /* vlef */;
+ break;
+ default:
+ ins |= 0xe70000000002 /* vleg */;
+ break;
+ }
+
+ /* Convert to vsteb - vsteg */
+ if (type & SLJIT_SIMD_STORE)
+ ins |= 0x8;
+
+ return push_inst(compiler, ins | ((sljit_ins)lane_index << 12));
+ }
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ if (type & SLJIT_SIMD_STORE)
+ return push_inst(compiler, 0xe7000000004d /* vrep */ | F36(srcdst) | F32(freg) | ((sljit_ins)lane_index << 16) | ((sljit_ins)elem_size << 12));
+
+ if (elem_size == 3) {
+ if (lane_index == 0)
+ ins = F32(srcdst) | F28(freg) | (1 << 12);
+ else
+ ins = F32(freg) | F28(srcdst);
+
+ return push_inst(compiler, 0xe70000000084 /* vpdi */ | F36(freg) | ins);
+ }
+
+ FAIL_IF(push_inst(compiler, 0xe70000000021 /* vlgv */ | R36A(tmp0) | F32(srcdst) | ((sljit_ins)2 << 12)));
+ return push_inst(compiler, 0xe70000000022 /* vlvg */ | F36(freg) | R32A(tmp0) | ((sljit_ins)lane_index << 16) | ((sljit_ins)2 << 12));
+ }
+
+ if (srcdst == SLJIT_IMM) {
+ switch (elem_size) {
+ case 0:
+ ins = 0xe70000000040 /* vleib */;
+ srcdstw &= 0xff;
+ break;
+ case 1:
+ ins = 0xe70000000041 /* vleih */;
+ srcdstw &= 0xffff;
+ break;
+ case 2:
+ if ((sljit_s32)srcdstw == (sljit_s16)srcdstw) {
+ srcdstw &= 0xffff;
+ ins = 0xe70000000043 /* vleif */;
+ } else
+ srcdstw &= 0xffffffff;
+ break;
+ default:
+ if (srcdstw == (sljit_s16)srcdstw) {
+ srcdstw &= 0xffff;
+ ins = 0xe70000000042 /* vleig */;
+ }
+ break;
+ }
+
+ if (ins != 0)
+ return push_inst(compiler, ins | F36(freg) | ((sljit_ins)srcdstw << 16) | ((sljit_ins)lane_index << 12));
+
+ push_load_imm_inst(compiler, tmp0, srcdstw);
+ reg = tmp0;
+ } else
+ reg = gpr(srcdst);
+
+ ins = ((sljit_ins)lane_index << 16) | ((sljit_ins)elem_size << 12);
+
+ if (!(type & SLJIT_SIMD_STORE))
+ return push_inst(compiler, 0xe70000000022 /* vlvg */ | F36(freg) | R32A(reg) | ins);
+
+ FAIL_IF(push_inst(compiler, 0xe70000000021 /* vlgv */ | R36A(reg) | F32(freg) | ins));
+
+ if (!(type & SLJIT_SIMD_LANE_SIGNED) || elem_size >= 3)
+ return SLJIT_SUCCESS;
+
+ switch (elem_size) {
+ case 0:
+ ins = 0xb9060000 /* lgbr */;
+ break;
+ case 1:
+ ins = 0xb9070000 /* lghr */;
+ break;
+ default:
+ ins = 0xb9140000 /* lgfr */;
+ break;
+ }
+
+ return push_inst(compiler, ins | R4A(reg) | R0A(reg));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_s32 src_lane_index)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_lane_replicate(compiler, type, freg, src, src_lane_index));
+
+ if (reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && elem_size < 2)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ return push_inst(compiler, 0xe7000000004d /* vrep */ | F36(freg) | F32(src)
+ | ((sljit_ins)src_lane_index << 16) | ((sljit_ins)elem_size << 12));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_extend(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_s32 elem2_size = SLJIT_SIMD_GET_ELEM2_SIZE(type);
+ struct addr addr;
+ sljit_ins ins;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_extend(compiler, type, freg, src, srcw));
+
+ ADJUST_LOCAL_OFFSET(src, srcw);
+
+ if (reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && elem_size < 2)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (src & SLJIT_MEM) {
+ FAIL_IF(make_addr_bx(compiler, &addr, src, srcw, tmp1));
+ ins = F36(freg) | R32A(addr.index) | R28A(addr.base) | disp_s20(addr.offset);
+
+ switch (elem2_size - elem_size) {
+ case 1:
+ ins |= 0xe70000000002 /* vleg */;
+ break;
+ case 2:
+ ins |= 0xe70000000003 /* vlef */;
+ break;
+ default:
+ ins |= 0xe70000000001 /* vleh */;
+ break;
+ }
+
+ FAIL_IF(push_inst(compiler, ins));
+ src = freg;
+ }
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ FAIL_IF(push_inst(compiler, 0xe700000000d5 /* vuplh */ | F36(freg) | F32(src) | (2 << 12)));
+ FAIL_IF(push_inst(compiler, 0xe70000000030 /* vesl */ | F36(freg) | F32(freg) | (32 << 16) | (3 << 12)));
+ return push_inst(compiler, 0xe700000000c4 /* vfll */ | F36(freg) | F32(freg) | (2 << 12));
+ }
+
+ ins = ((type & SLJIT_SIMD_EXTEND_SIGNED) ? 0xe700000000d7 /* vuph */ : 0xe700000000d5 /* vuplh */) | F36(freg);
+
+ do {
+ FAIL_IF(push_inst(compiler, ins | F32(src) | ((sljit_ins)elem_size << 12)));
+ src = freg;
+ } while (++elem_size < elem2_size);
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_sign(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 dst, sljit_sw dstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_gpr dst_r;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_sign(compiler, type, freg, dst, dstw));
+
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ if (reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && elem_size < 2)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ switch (elem_size) {
+ case 0:
+ push_load_imm_inst(compiler, tmp0, (sljit_sw)0x4048505860687078);
+ push_load_imm_inst(compiler, tmp1, (sljit_sw)0x0008101820283038);
+ FAIL_IF(push_inst(compiler, 0xe70000000062 /* vlvgp */ | F36(TMP_FREG1) | R32A(tmp1) | R28A(tmp0)));
+ break;
+ case 1:
+ push_load_imm_inst(compiler, tmp0, (sljit_sw)0x0010203040506070);
+ break;
+ case 2:
+ push_load_imm_inst(compiler, tmp0, (sljit_sw)0x8080808000204060);
+ break;
+ default:
+ push_load_imm_inst(compiler, tmp0, (sljit_sw)0x8080808080800040);
+ break;
+ }
+
+ if (elem_size != 0)
+ FAIL_IF(push_inst(compiler, 0xe70000000022 /* vlvg */ | F36(TMP_FREG1) | R32A(tmp0) | (1 << 16) | (3 << 12)));
+
+ FAIL_IF(push_inst(compiler, 0xe70000000085 /* vbperm */ | F36(TMP_FREG1) | F32(freg) | F28(TMP_FREG1)));
+
+ dst_r = FAST_IS_REG(dst) ? gpr(dst) : tmp0;
+ FAIL_IF(push_inst(compiler, 0xe70000000021 /* vlgv */ | R36A(dst_r) | F32(TMP_FREG1)
+ | (elem_size == 0 ? ((3 << 16) | (1 << 12)) : (7 << 16))));
+
+ if (dst_r == tmp0)
+ return store_word(compiler, tmp0, dst, dstw, type & SLJIT_32);
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_op2(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg, sljit_s32 src1_freg, sljit_s32 src2_freg)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_ins ins = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_op2(compiler, type, dst_freg, src1_freg, src2_freg));
+
+ if (reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ switch (SLJIT_SIMD_GET_OPCODE(type)) {
+ case SLJIT_SIMD_OP2_AND:
+ ins = 0xe70000000068 /* vn */;
+ break;
+ case SLJIT_SIMD_OP2_OR:
+ ins = 0xe7000000006a /* vo */;
+ break;
+ case SLJIT_SIMD_OP2_XOR:
+ ins = 0xe7000000006d /* vx */;
+ break;
+ }
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ return push_inst(compiler, ins | F36(dst_freg) | F32(src1_freg) | F28(src2_freg));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_load(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 mem_reg)
+{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_atomic_load(compiler, op, dst_reg, mem_reg));
+
+ SLJIT_SKIP_CHECKS(compiler);
+ return sljit_emit_op1(compiler, op, dst_reg, 0, SLJIT_MEM1(mem_reg), 0);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_store(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src_reg,
+ sljit_s32 mem_reg,
+ sljit_s32 temp_reg)
+{
+ sljit_ins mask;
+ sljit_gpr tmp_r = gpr(temp_reg);
+ sljit_gpr mem_r = gpr(mem_reg);
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_atomic_store(compiler, op, src_reg, mem_reg, temp_reg));
+
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MOV32:
+ case SLJIT_MOV_U32:
+ return push_inst(compiler, 0xba000000 /* cs */ | R20A(tmp_r) | R16A(gpr(src_reg)) | R12A(mem_r));
+ case SLJIT_MOV_U8:
+ mask = 0xff;
+ break;
+ case SLJIT_MOV_U16:
+ mask = 0xffff;
+ break;
+ default:
+ return push_inst(compiler, 0xeb0000000030 /* csg */ | R36A(tmp_r) | R32A(gpr(src_reg)) | R28A(mem_r));
+ }
+
+ /* tmp0 = (src_reg ^ tmp_r) & mask */
+ FAIL_IF(push_inst(compiler, 0xa50f0000 /* llill */ | R20A(tmp1) | mask));
+ FAIL_IF(push_inst(compiler, 0xb9e70000 /* xgrk */ | R4A(tmp0) | R0A(gpr(src_reg)) | R12A(tmp_r)));
+ FAIL_IF(push_inst(compiler, 0xa7090000 /* lghi */ | R20A(tmp_r) | 0xfffc));
+ FAIL_IF(push_inst(compiler, 0xb9800000 /* ngr */ | R4A(tmp0) | R0A(tmp1)));
+
+ /* tmp0 = tmp0 << (((mem_r ^ 0x3) & 0x3) << 3) */
+ FAIL_IF(push_inst(compiler, 0xa50f0000 /* llill */ | R20A(tmp1) | (sljit_ins)((mask == 0xff) ? 0x18 : 0x10)));
+ FAIL_IF(push_inst(compiler, 0xb9800000 /* ngr */ | R4A(tmp_r) | R0A(mem_r)));
+ FAIL_IF(push_inst(compiler, 0xec0000000057 /* rxsbg */ | R36A(tmp1) | R32A(mem_r) | (59 << 24) | (60 << 16) | (3 << 8)));
+ FAIL_IF(push_inst(compiler, 0xeb000000000d /* sllg */ | R36A(tmp0) | R32A(tmp0) | R28A(tmp1)));
+
+ /* Already computed: tmp_r = mem_r & ~0x3 */
+
+ FAIL_IF(push_inst(compiler, 0x58000000 /* l */ | R20A(tmp1) | R12A(tmp_r)));
+ FAIL_IF(push_inst(compiler, 0x1700 /* x */ | R4A(tmp0) | R0A(tmp1)));
+ return push_inst(compiler, 0xba000000 /* cs */ | R20A(tmp1) | R16A(tmp0) | R12A(tmp_r));
+}
+
/* --------------------------------------------------------------------- */
/* Other instructions */
/* --------------------------------------------------------------------- */
@@ -3678,9 +4459,9 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
dst_r = FAST_IS_REG(dst) ? gpr(dst & REG_MASK) : tmp0;
if (have_genext())
- PTR_FAIL_IF(push_inst(compiler, sljit_ins_const | lgrl(dst_r, 0)));
+ PTR_FAIL_IF(push_inst(compiler, lgrl(dst_r, 0)));
else {
- PTR_FAIL_IF(push_inst(compiler, sljit_ins_const | larl(tmp1, 0)));
+ PTR_FAIL_IF(push_inst(compiler, larl(tmp1, 0)));
PTR_FAIL_IF(push_inst(compiler, lg(dst_r, 0, r0, tmp1)));
}
@@ -3707,20 +4488,18 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_consta
sljit_set_jump_addr(addr, (sljit_uw)new_constant, executable_offset);
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label *sljit_emit_put_label(
- struct sljit_compiler *compiler,
- sljit_s32 dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_mov_addr(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
- struct sljit_put_label *put_label;
+ struct sljit_jump *jump;
sljit_gpr dst_r;
CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
+ CHECK_PTR(check_sljit_emit_mov_addr(compiler, dst, dstw));
ADJUST_LOCAL_OFFSET(dst, dstw);
- put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
- PTR_FAIL_IF(!put_label);
- set_put_label(put_label, compiler, 0);
+ jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
+ PTR_FAIL_IF(!jump);
+ set_mov_addr(jump, compiler, 0);
dst_r = FAST_IS_REG(dst) ? gpr(dst & REG_MASK) : tmp0;
@@ -3734,7 +4513,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label *sljit_emit_put_label(
if (dst & SLJIT_MEM)
PTR_FAIL_IF(store_word(compiler, dst_r, dst, dstw, 0));
- return put_label;
+ return jump;
}
/* TODO(carenas): EVAL probably should move up or be refactored */
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeSPARC_32.c b/src/3rdparty/pcre2/src/sljit/sljitNativeSPARC_32.c
deleted file mode 100644
index 218992b355..0000000000
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeSPARC_32.c
+++ /dev/null
@@ -1,283 +0,0 @@
-/*
- * Stack-less Just-In-Time compiler
- *
- * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without modification, are
- * permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice, this list of
- * conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright notice, this list
- * of conditions and the following disclaimer in the documentation and/or other materials
- * provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
- * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
- * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
- * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
- * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
- * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw imm)
-{
- if (imm <= SIMM_MAX && imm >= SIMM_MIN)
- return push_inst(compiler, OR | D(dst) | S1(0) | IMM(imm), DR(dst));
-
- FAIL_IF(push_inst(compiler, SETHI | D(dst) | ((imm >> 10) & 0x3fffff), DR(dst)));
- return (imm & 0x3ff) ? push_inst(compiler, OR | D(dst) | S1(dst) | IMM_ARG | (imm & 0x3ff), DR(dst)) : SLJIT_SUCCESS;
-}
-
-#define ARG2(flags, src2) ((flags & SRC2_IMM) ? IMM(src2) : S2(src2))
-
-static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_u32 flags,
- sljit_s32 dst, sljit_s32 src1, sljit_sw src2)
-{
- SLJIT_COMPILE_ASSERT(ICC_IS_SET == SET_FLAGS, icc_is_set_and_set_flags_must_be_the_same);
-
- switch (op) {
- case SLJIT_MOV:
- SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
- if (dst != src2)
- return push_inst(compiler, OR | D(dst) | S1(0) | S2(src2), DR(dst));
- return SLJIT_SUCCESS;
-
- case SLJIT_MOV_U8:
- case SLJIT_MOV_S8:
- SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
- if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
- if (op == SLJIT_MOV_U8)
- return push_inst(compiler, AND | D(dst) | S1(src2) | IMM(0xff), DR(dst));
- FAIL_IF(push_inst(compiler, SLL | D(dst) | S1(src2) | IMM(24), DR(dst)));
- return push_inst(compiler, SRA | D(dst) | S1(dst) | IMM(24), DR(dst));
- }
- SLJIT_ASSERT(dst == src2);
- return SLJIT_SUCCESS;
-
- case SLJIT_MOV_U16:
- case SLJIT_MOV_S16:
- SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
- if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
- FAIL_IF(push_inst(compiler, SLL | D(dst) | S1(src2) | IMM(16), DR(dst)));
- return push_inst(compiler, (op == SLJIT_MOV_S16 ? SRA : SRL) | D(dst) | S1(dst) | IMM(16), DR(dst));
- }
- SLJIT_ASSERT(dst == src2);
- return SLJIT_SUCCESS;
-
- case SLJIT_NOT:
- SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
- return push_inst(compiler, XNOR | (flags & SET_FLAGS) | D(dst) | S1(0) | S2(src2), DRF(dst, flags));
-
- case SLJIT_CLZ:
- SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
- FAIL_IF(push_inst(compiler, SUB | SET_FLAGS | D(0) | S1(src2) | S2(0), SET_FLAGS));
- FAIL_IF(push_inst(compiler, OR | D(TMP_REG1) | S1(0) | S2(src2), DR(TMP_REG1)));
- FAIL_IF(push_inst(compiler, BICC | DA(0x1) | (7 & DISP_MASK), UNMOVABLE_INS));
- FAIL_IF(push_inst(compiler, OR | D(dst) | S1(0) | IMM(32), UNMOVABLE_INS));
- FAIL_IF(push_inst(compiler, OR | D(dst) | S1(0) | IMM(-1), DR(dst)));
-
- /* Loop. */
- FAIL_IF(push_inst(compiler, SUB | SET_FLAGS | D(0) | S1(TMP_REG1) | S2(0), SET_FLAGS));
- FAIL_IF(push_inst(compiler, SLL | D(TMP_REG1) | S1(TMP_REG1) | IMM(1), DR(TMP_REG1)));
- FAIL_IF(push_inst(compiler, BICC | DA(0xe) | ((sljit_ins)-2 & DISP_MASK), UNMOVABLE_INS));
- return push_inst(compiler, ADD | D(dst) | S1(dst) | IMM(1), UNMOVABLE_INS);
-
- case SLJIT_ADD:
- compiler->status_flags_state = SLJIT_CURRENT_FLAGS_ADD;
- return push_inst(compiler, ADD | (flags & SET_FLAGS) | D(dst) | S1(src1) | ARG2(flags, src2), DRF(dst, flags));
-
- case SLJIT_ADDC:
- compiler->status_flags_state = SLJIT_CURRENT_FLAGS_ADD;
- return push_inst(compiler, ADDC | (flags & SET_FLAGS) | D(dst) | S1(src1) | ARG2(flags, src2), DRF(dst, flags));
-
- case SLJIT_SUB:
- compiler->status_flags_state = SLJIT_CURRENT_FLAGS_SUB;
- return push_inst(compiler, SUB | (flags & SET_FLAGS) | D(dst) | S1(src1) | ARG2(flags, src2), DRF(dst, flags));
-
- case SLJIT_SUBC:
- compiler->status_flags_state = SLJIT_CURRENT_FLAGS_SUB;
- return push_inst(compiler, SUBC | (flags & SET_FLAGS) | D(dst) | S1(src1) | ARG2(flags, src2), DRF(dst, flags));
-
- case SLJIT_MUL:
- compiler->status_flags_state = 0;
- FAIL_IF(push_inst(compiler, SMUL | D(dst) | S1(src1) | ARG2(flags, src2), DR(dst)));
- if (!(flags & SET_FLAGS))
- return SLJIT_SUCCESS;
- FAIL_IF(push_inst(compiler, SRA | D(TMP_REG1) | S1(dst) | IMM(31), DR(TMP_REG1)));
- FAIL_IF(push_inst(compiler, RDY | D(TMP_LINK), DR(TMP_LINK)));
- return push_inst(compiler, SUB | SET_FLAGS | D(0) | S1(TMP_REG1) | S2(TMP_LINK), MOVABLE_INS | SET_FLAGS);
-
- case SLJIT_AND:
- return push_inst(compiler, AND | (flags & SET_FLAGS) | D(dst) | S1(src1) | ARG2(flags, src2), DRF(dst, flags));
-
- case SLJIT_OR:
- return push_inst(compiler, OR | (flags & SET_FLAGS) | D(dst) | S1(src1) | ARG2(flags, src2), DRF(dst, flags));
-
- case SLJIT_XOR:
- return push_inst(compiler, XOR | (flags & SET_FLAGS) | D(dst) | S1(src1) | ARG2(flags, src2), DRF(dst, flags));
-
- case SLJIT_SHL:
- FAIL_IF(push_inst(compiler, SLL | D(dst) | S1(src1) | ARG2(flags, src2), DR(dst)));
- return !(flags & SET_FLAGS) ? SLJIT_SUCCESS : push_inst(compiler, SUB | SET_FLAGS | D(0) | S1(dst) | S2(0), SET_FLAGS);
-
- case SLJIT_LSHR:
- FAIL_IF(push_inst(compiler, SRL | D(dst) | S1(src1) | ARG2(flags, src2), DR(dst)));
- return !(flags & SET_FLAGS) ? SLJIT_SUCCESS : push_inst(compiler, SUB | SET_FLAGS | D(0) | S1(dst) | S2(0), SET_FLAGS);
-
- case SLJIT_ASHR:
- FAIL_IF(push_inst(compiler, SRA | D(dst) | S1(src1) | ARG2(flags, src2), DR(dst)));
- return !(flags & SET_FLAGS) ? SLJIT_SUCCESS : push_inst(compiler, SUB | SET_FLAGS | D(0) | S1(dst) | S2(0), SET_FLAGS);
- }
-
- SLJIT_UNREACHABLE();
- return SLJIT_SUCCESS;
-}
-
-static sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 arg_types, sljit_s32 *src)
-{
- sljit_s32 reg_index = 8;
- sljit_s32 word_reg_index = 8;
- sljit_s32 float_arg_index = 1;
- sljit_s32 double_arg_count = 0;
- sljit_u32 float_offset = (16 + 6) * sizeof(sljit_sw);
- sljit_s32 types = 0;
- sljit_s32 reg = 0;
- sljit_s32 move_to_tmp2 = 0;
-
- if (src)
- reg = reg_map[*src & REG_MASK];
-
- arg_types >>= SLJIT_ARG_SHIFT;
-
- while (arg_types) {
- types = (types << SLJIT_ARG_SHIFT) | (arg_types & SLJIT_ARG_MASK);
-
- switch (arg_types & SLJIT_ARG_MASK) {
- case SLJIT_ARG_TYPE_F64:
- float_arg_index++;
- double_arg_count++;
- if (reg_index == reg || reg_index + 1 == reg)
- move_to_tmp2 = 1;
- reg_index += 2;
- break;
- case SLJIT_ARG_TYPE_F32:
- float_arg_index++;
- if (reg_index == reg)
- move_to_tmp2 = 1;
- reg_index++;
- break;
- default:
- if (reg_index != word_reg_index && reg_index == reg)
- move_to_tmp2 = 1;
- reg_index++;
- word_reg_index++;
- break;
- }
-
- arg_types >>= SLJIT_ARG_SHIFT;
- }
-
- if (move_to_tmp2) {
- if (reg < 14)
- FAIL_IF(push_inst(compiler, OR | D(TMP_REG1) | S1(0) | S2A(reg), DR(TMP_REG1)));
- *src = TMP_REG1;
- }
-
- arg_types = types;
-
- while (arg_types) {
- switch (arg_types & SLJIT_ARG_MASK) {
- case SLJIT_ARG_TYPE_F64:
- float_arg_index--;
- if (float_arg_index == 4 && double_arg_count == 4) {
- /* The address is not doubleword aligned, so two instructions are required to store the double. */
- FAIL_IF(push_inst(compiler, STF | FD(float_arg_index) | S1(SLJIT_SP) | IMM((16 + 7) * sizeof(sljit_sw)), MOVABLE_INS));
- FAIL_IF(push_inst(compiler, STF | FD(float_arg_index) | (1 << 25) | S1(SLJIT_SP) | IMM((16 + 8) * sizeof(sljit_sw)), MOVABLE_INS));
- }
- else
- FAIL_IF(push_inst(compiler, STDF | FD(float_arg_index) | S1(SLJIT_SP) | IMM(float_offset), MOVABLE_INS));
- float_offset -= sizeof(sljit_f64);
- break;
- case SLJIT_ARG_TYPE_F32:
- float_arg_index--;
- FAIL_IF(push_inst(compiler, STF | FD(float_arg_index) | S1(SLJIT_SP) | IMM(float_offset), MOVABLE_INS));
- float_offset -= sizeof(sljit_f64);
- break;
- default:
- break;
- }
-
- arg_types >>= SLJIT_ARG_SHIFT;
- }
-
- float_offset = (16 + 6) * sizeof(sljit_sw);
-
- while (types) {
- switch (types & SLJIT_ARG_MASK) {
- case SLJIT_ARG_TYPE_F64:
- reg_index -= 2;
- if (reg_index < 14) {
- if ((reg_index & 0x1) != 0) {
- FAIL_IF(push_inst(compiler, LDUW | DA(reg_index) | S1(SLJIT_SP) | IMM(float_offset), reg_index));
- if (reg_index < 8 + 6 - 1)
- FAIL_IF(push_inst(compiler, LDUW | DA(reg_index + 1) | S1(SLJIT_SP) | IMM(float_offset + sizeof(sljit_sw)), reg_index + 1));
- }
- else
- FAIL_IF(push_inst(compiler, LDD | DA(reg_index) | S1(SLJIT_SP) | IMM(float_offset), reg_index));
- }
- float_offset -= sizeof(sljit_f64);
- break;
- case SLJIT_ARG_TYPE_F32:
- reg_index--;
- if (reg_index < 8 + 6)
- FAIL_IF(push_inst(compiler, LDUW | DA(reg_index) | S1(SLJIT_SP) | IMM(float_offset), reg_index));
- float_offset -= sizeof(sljit_f64);
- break;
- default:
- reg_index--;
- word_reg_index--;
-
- if (reg_index != word_reg_index) {
- if (reg_index < 14)
- FAIL_IF(push_inst(compiler, OR | DA(reg_index) | S1(0) | S2A(word_reg_index), reg_index));
- else
- FAIL_IF(push_inst(compiler, STW | DA(word_reg_index) | S1(SLJIT_SP) | IMM(92), word_reg_index));
- }
- break;
- }
-
- types >>= SLJIT_ARG_SHIFT;
- }
-
- return SLJIT_SUCCESS;
-}
-
-static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw init_value)
-{
- FAIL_IF(push_inst(compiler, SETHI | D(dst) | ((init_value >> 10) & 0x3fffff), DR(dst)));
- return push_inst(compiler, OR | D(dst) | S1(dst) | IMM_ARG | (init_value & 0x3ff), DR(dst));
-}
-
-SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
-{
- sljit_ins *inst = (sljit_ins *)addr;
- SLJIT_UNUSED_ARG(executable_offset);
-
- SLJIT_UPDATE_WX_FLAGS(inst, inst + 2, 0);
- SLJIT_ASSERT(((inst[0] & 0xc1c00000) == 0x01000000) && ((inst[1] & 0xc1f82000) == 0x80102000));
- inst[0] = (inst[0] & 0xffc00000) | ((new_target >> 10) & 0x3fffff);
- inst[1] = (inst[1] & 0xfffffc00) | (new_target & 0x3ff);
- SLJIT_UPDATE_WX_FLAGS(inst, inst + 2, 1);
- inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
- SLJIT_CACHE_FLUSH(inst, inst + 2);
-}
-
-SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
-{
- sljit_set_jump_addr(addr, (sljit_uw)new_constant, executable_offset);
-}
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeSPARC_common.c b/src/3rdparty/pcre2/src/sljit/sljitNativeSPARC_common.c
deleted file mode 100644
index c8d19e16c6..0000000000
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeSPARC_common.c
+++ /dev/null
@@ -1,1673 +0,0 @@
-/*
- * Stack-less Just-In-Time compiler
- *
- * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without modification, are
- * permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice, this list of
- * conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright notice, this list
- * of conditions and the following disclaimer in the documentation and/or other materials
- * provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
- * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
- * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
- * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
- * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
- * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
-{
- return "SPARC" SLJIT_CPUINFO;
-}
-
-/* Length of an instruction word
- Both for sparc-32 and sparc-64 */
-typedef sljit_u32 sljit_ins;
-
-#if (defined SLJIT_CACHE_FLUSH_OWN_IMPL && SLJIT_CACHE_FLUSH_OWN_IMPL)
-
-static void sparc_cache_flush(sljit_ins *from, sljit_ins *to)
-{
-#if defined(__SUNPRO_C) && __SUNPRO_C < 0x590
- __asm (
- /* if (from == to) return */
- "cmp %i0, %i1\n"
- "be .leave\n"
- "nop\n"
-
- /* loop until from >= to */
- ".mainloop:\n"
- "flush %i0\n"
- "add %i0, 8, %i0\n"
- "cmp %i0, %i1\n"
- "bcs .mainloop\n"
- "nop\n"
-
- /* The comparison was done above. */
- "bne .leave\n"
- /* nop is not necessary here, since the
- sub operation has no side effect. */
- "sub %i0, 4, %i0\n"
- "flush %i0\n"
- ".leave:"
- );
-#else
- if (SLJIT_UNLIKELY(from == to))
- return;
-
- do {
- __asm__ volatile (
- "flush %0\n"
- : : "r"(from)
- );
- /* Operates at least on doubleword. */
- from += 2;
- } while (from < to);
-
- if (from == to) {
- /* Flush the last word. */
- from --;
- __asm__ volatile (
- "flush %0\n"
- : : "r"(from)
- );
- }
-#endif
-}
-
-#endif /* (defined SLJIT_CACHE_FLUSH_OWN_IMPL && SLJIT_CACHE_FLUSH_OWN_IMPL) */
-
-/* TMP_REG2 is not used by getput_arg */
-#define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
-#define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
-#define TMP_REG3 (SLJIT_NUMBER_OF_REGISTERS + 4)
-/* This register is modified by calls, which affects the instruction
- in the delay slot if it is used as a source register. */
-#define TMP_LINK (SLJIT_NUMBER_OF_REGISTERS + 5)
-
-#define TMP_FREG1 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
-#define TMP_FREG2 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2)
-
-static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 6] = {
- 0, 8, 9, 10, 11, 23, 22, 21, 20, 19, 18, 17, 16, 29, 28, 27, 26, 25, 24, 14, 1, 12, 13, 15
-};
-
-static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
- 0, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30
-};
-
-/* --------------------------------------------------------------------- */
-/* Instrucion forms */
-/* --------------------------------------------------------------------- */
-
-#define D(d) ((sljit_ins)reg_map[d] << 25)
-#define FD(d) ((sljit_ins)freg_map[d] << 25)
-#define FDN(d) (((sljit_ins)freg_map[d] | 0x1) << 25)
-#define DA(d) ((sljit_ins)(d) << 25)
-#define S1(s1) ((sljit_ins)reg_map[s1] << 14)
-#define FS1(s1) ((sljit_ins)freg_map[s1] << 14)
-#define S1A(s1) ((sljit_ins)(s1) << 14)
-#define S2(s2) ((sljit_ins)reg_map[s2])
-#define FS2(s2) ((sljit_ins)freg_map[s2])
-#define FS2N(s2) ((sljit_ins)freg_map[s2] | 0x1)
-#define S2A(s2) ((sljit_ins)(s2))
-#define IMM_ARG 0x2000
-#define DOP(op) ((sljit_ins)(op) << 5)
-#define IMM(imm) (((sljit_ins)(imm) & 0x1fff) | IMM_ARG)
-
-#define DR(dr) (reg_map[dr])
-#define DRF(dr, flags) ((sljit_s32)(reg_map[dr] | ((flags) & SET_FLAGS)))
-#define OPC1(opcode) ((sljit_ins)(opcode) << 30)
-#define OPC2(opcode) ((sljit_ins)(opcode) << 22)
-#define OPC3(opcode) ((sljit_ins)(opcode) << 19)
-#define SET_FLAGS OPC3(0x10)
-
-#define ADD (OPC1(0x2) | OPC3(0x00))
-#define ADDC (OPC1(0x2) | OPC3(0x08))
-#define AND (OPC1(0x2) | OPC3(0x01))
-#define ANDN (OPC1(0x2) | OPC3(0x05))
-#define CALL (OPC1(0x1))
-#define FABSS (OPC1(0x2) | OPC3(0x34) | DOP(0x09))
-#define FADDD (OPC1(0x2) | OPC3(0x34) | DOP(0x42))
-#define FADDS (OPC1(0x2) | OPC3(0x34) | DOP(0x41))
-#define FCMPD (OPC1(0x2) | OPC3(0x35) | DOP(0x52))
-#define FCMPS (OPC1(0x2) | OPC3(0x35) | DOP(0x51))
-#define FDIVD (OPC1(0x2) | OPC3(0x34) | DOP(0x4e))
-#define FDIVS (OPC1(0x2) | OPC3(0x34) | DOP(0x4d))
-#define FDTOI (OPC1(0x2) | OPC3(0x34) | DOP(0xd2))
-#define FDTOS (OPC1(0x2) | OPC3(0x34) | DOP(0xc6))
-#define FITOD (OPC1(0x2) | OPC3(0x34) | DOP(0xc8))
-#define FITOS (OPC1(0x2) | OPC3(0x34) | DOP(0xc4))
-#define FMOVS (OPC1(0x2) | OPC3(0x34) | DOP(0x01))
-#define FMULD (OPC1(0x2) | OPC3(0x34) | DOP(0x4a))
-#define FMULS (OPC1(0x2) | OPC3(0x34) | DOP(0x49))
-#define FNEGS (OPC1(0x2) | OPC3(0x34) | DOP(0x05))
-#define FSTOD (OPC1(0x2) | OPC3(0x34) | DOP(0xc9))
-#define FSTOI (OPC1(0x2) | OPC3(0x34) | DOP(0xd1))
-#define FSUBD (OPC1(0x2) | OPC3(0x34) | DOP(0x46))
-#define FSUBS (OPC1(0x2) | OPC3(0x34) | DOP(0x45))
-#define JMPL (OPC1(0x2) | OPC3(0x38))
-#define LDD (OPC1(0x3) | OPC3(0x03))
-#define LDDF (OPC1(0x3) | OPC3(0x23))
-#define LDF (OPC1(0x3) | OPC3(0x20))
-#define LDUW (OPC1(0x3) | OPC3(0x00))
-#define NOP (OPC1(0x0) | OPC2(0x04))
-#define OR (OPC1(0x2) | OPC3(0x02))
-#define ORN (OPC1(0x2) | OPC3(0x06))
-#define RDY (OPC1(0x2) | OPC3(0x28) | S1A(0))
-#define RESTORE (OPC1(0x2) | OPC3(0x3d))
-#define SAVE (OPC1(0x2) | OPC3(0x3c))
-#define SETHI (OPC1(0x0) | OPC2(0x04))
-#define SLL (OPC1(0x2) | OPC3(0x25))
-#define SLLX (OPC1(0x2) | OPC3(0x25) | (1 << 12))
-#define SRA (OPC1(0x2) | OPC3(0x27))
-#define SRAX (OPC1(0x2) | OPC3(0x27) | (1 << 12))
-#define SRL (OPC1(0x2) | OPC3(0x26))
-#define SRLX (OPC1(0x2) | OPC3(0x26) | (1 << 12))
-#define STD (OPC1(0x3) | OPC3(0x07))
-#define STDF (OPC1(0x3) | OPC3(0x27))
-#define STF (OPC1(0x3) | OPC3(0x24))
-#define STW (OPC1(0x3) | OPC3(0x04))
-#define SUB (OPC1(0x2) | OPC3(0x04))
-#define SUBC (OPC1(0x2) | OPC3(0x0c))
-#define TA (OPC1(0x2) | OPC3(0x3a) | (8 << 25))
-#define WRY (OPC1(0x2) | OPC3(0x30) | DA(0))
-#define XOR (OPC1(0x2) | OPC3(0x03))
-#define XNOR (OPC1(0x2) | OPC3(0x07))
-
-#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
-#define MAX_DISP (0x1fffff)
-#define MIN_DISP (-0x200000)
-#define DISP_MASK ((sljit_ins)0x3fffff)
-
-#define BICC (OPC1(0x0) | OPC2(0x2))
-#define FBFCC (OPC1(0x0) | OPC2(0x6))
-#define SLL_W SLL
-#define SDIV (OPC1(0x2) | OPC3(0x0f))
-#define SMUL (OPC1(0x2) | OPC3(0x0b))
-#define UDIV (OPC1(0x2) | OPC3(0x0e))
-#define UMUL (OPC1(0x2) | OPC3(0x0a))
-#else
-#define SLL_W SLLX
-#endif
-
-#define SIMM_MAX (0x0fff)
-#define SIMM_MIN (-0x1000)
-
-/* dest_reg is the absolute name of the register
- Useful for reordering instructions in the delay slot. */
-static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins, sljit_s32 delay_slot)
-{
- sljit_ins *ptr;
- SLJIT_ASSERT((delay_slot & DST_INS_MASK) == UNMOVABLE_INS
- || (delay_slot & DST_INS_MASK) == MOVABLE_INS
- || (delay_slot & DST_INS_MASK) == ((ins >> 25) & 0x1f));
- ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
- FAIL_IF(!ptr);
- *ptr = ins;
- compiler->size++;
- compiler->delay_slot = delay_slot;
- return SLJIT_SUCCESS;
-}
-
-static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
-{
- sljit_sw diff;
- sljit_uw target_addr;
- sljit_ins *inst;
- sljit_ins saved_inst;
-
- if (jump->flags & SLJIT_REWRITABLE_JUMP)
- return code_ptr;
-
- if (jump->flags & JUMP_ADDR)
- target_addr = jump->u.target;
- else {
- SLJIT_ASSERT(jump->flags & JUMP_LABEL);
- target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset;
- }
- inst = (sljit_ins*)jump->addr;
-
-#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
- if (jump->flags & IS_CALL) {
- /* Call is always patchable on sparc 32. */
- jump->flags |= PATCH_CALL;
- if (jump->flags & IS_MOVABLE) {
- inst[0] = inst[-1];
- inst[-1] = CALL;
- jump->addr -= sizeof(sljit_ins);
- return inst;
- }
- inst[0] = CALL;
- inst[1] = NOP;
- return inst + 1;
- }
-#else
- /* Both calls and BPr instructions shall not pass this point. */
-#error "Implementation required"
-#endif
-
- if (jump->flags & IS_COND)
- inst--;
-
- diff = ((sljit_sw)target_addr - (sljit_sw)(inst - 1) - executable_offset) >> 2;
-
- if (jump->flags & IS_MOVABLE) {
- if (diff <= MAX_DISP && diff >= MIN_DISP) {
- jump->flags |= PATCH_B;
- inst--;
- if (jump->flags & IS_COND) {
- saved_inst = inst[0];
- inst[0] = inst[1] ^ (1 << 28);
- inst[1] = saved_inst;
- } else {
- inst[1] = inst[0];
- inst[0] = BICC | DA(0x8);
- }
- jump->addr = (sljit_uw)inst;
- return inst + 1;
- }
- }
-
- diff += SSIZE_OF(ins);
-
- if (diff <= MAX_DISP && diff >= MIN_DISP) {
- jump->flags |= PATCH_B;
- if (jump->flags & IS_COND)
- inst[0] ^= (1 << 28);
- else
- inst[0] = BICC | DA(0x8);
- inst[1] = NOP;
- jump->addr = (sljit_uw)inst;
- return inst + 1;
- }
-
- return code_ptr;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
-{
- struct sljit_memory_fragment *buf;
- sljit_ins *code;
- sljit_ins *code_ptr;
- sljit_ins *buf_ptr;
- sljit_ins *buf_end;
- sljit_uw word_count;
- sljit_uw next_addr;
- sljit_sw executable_offset;
- sljit_sw addr;
-
- struct sljit_label *label;
- struct sljit_jump *jump;
- struct sljit_const *const_;
- struct sljit_put_label *put_label;
-
- CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_generate_code(compiler));
- reverse_buf(compiler);
-
- code = (sljit_ins*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_ins), compiler->exec_allocator_data);
- PTR_FAIL_WITH_EXEC_IF(code);
- buf = compiler->buf;
-
- code_ptr = code;
- word_count = 0;
- next_addr = 0;
- executable_offset = SLJIT_EXEC_OFFSET(code);
-
- label = compiler->labels;
- jump = compiler->jumps;
- const_ = compiler->consts;
- put_label = compiler->put_labels;
-
- do {
- buf_ptr = (sljit_ins*)buf->memory;
- buf_end = buf_ptr + (buf->used_size >> 2);
- do {
- *code_ptr = *buf_ptr++;
- if (next_addr == word_count) {
- SLJIT_ASSERT(!label || label->size >= word_count);
- SLJIT_ASSERT(!jump || jump->addr >= word_count);
- SLJIT_ASSERT(!const_ || const_->addr >= word_count);
- SLJIT_ASSERT(!put_label || put_label->addr >= word_count);
-
- /* These structures are ordered by their address. */
- if (label && label->size == word_count) {
- /* Just recording the address. */
- label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
- label->size = (sljit_uw)(code_ptr - code);
- label = label->next;
- }
- if (jump && jump->addr == word_count) {
-#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
- jump->addr = (sljit_uw)(code_ptr - 3);
-#else
- jump->addr = (sljit_uw)(code_ptr - 6);
-#endif
- code_ptr = detect_jump_type(jump, code_ptr, code, executable_offset);
- jump = jump->next;
- }
- if (const_ && const_->addr == word_count) {
- /* Just recording the address. */
- const_->addr = (sljit_uw)code_ptr;
- const_ = const_->next;
- }
- if (put_label && put_label->addr == word_count) {
- SLJIT_ASSERT(put_label->label);
- put_label->addr = (sljit_uw)code_ptr;
- put_label = put_label->next;
- }
- next_addr = compute_next_addr(label, jump, const_, put_label);
- }
- code_ptr ++;
- word_count ++;
- } while (buf_ptr < buf_end);
-
- buf = buf->next;
- } while (buf);
-
- if (label && label->size == word_count) {
- label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
- label->size = (sljit_uw)(code_ptr - code);
- label = label->next;
- }
-
- SLJIT_ASSERT(!label);
- SLJIT_ASSERT(!jump);
- SLJIT_ASSERT(!const_);
- SLJIT_ASSERT(!put_label);
- SLJIT_ASSERT(code_ptr - code <= (sljit_s32)compiler->size);
-
- jump = compiler->jumps;
- while (jump) {
- do {
- addr = (sljit_sw)((jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target);
- buf_ptr = (sljit_ins *)jump->addr;
-
- if (jump->flags & PATCH_CALL) {
- addr = (addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
- SLJIT_ASSERT(addr <= 0x1fffffff && addr >= -0x20000000);
- buf_ptr[0] = CALL | ((sljit_ins)addr & 0x3fffffff);
- break;
- }
- if (jump->flags & PATCH_B) {
- addr = (addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
- SLJIT_ASSERT(addr <= MAX_DISP && addr >= MIN_DISP);
- buf_ptr[0] = (buf_ptr[0] & ~DISP_MASK) | ((sljit_ins)addr & DISP_MASK);
- break;
- }
-
- /* Set the fields of immediate loads. */
-#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
- SLJIT_ASSERT(((buf_ptr[0] & 0xc1cfffff) == 0x01000000) && ((buf_ptr[1] & 0xc1f83fff) == 0x80102000));
- buf_ptr[0] |= (sljit_ins)(addr >> 10) & 0x3fffff;
- buf_ptr[1] |= (sljit_ins)addr & 0x3ff;
-#else
-#error "Implementation required"
-#endif
- } while (0);
- jump = jump->next;
- }
-
- put_label = compiler->put_labels;
- while (put_label) {
- addr = (sljit_sw)put_label->label->addr;
- buf_ptr = (sljit_ins *)put_label->addr;
-
-#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
- SLJIT_ASSERT(((buf_ptr[0] & 0xc1cfffff) == 0x01000000) && ((buf_ptr[1] & 0xc1f83fff) == 0x80102000));
- buf_ptr[0] |= (addr >> 10) & 0x3fffff;
- buf_ptr[1] |= addr & 0x3ff;
-#else
-#error "Implementation required"
-#endif
- put_label = put_label->next;
- }
-
- compiler->error = SLJIT_ERR_COMPILED;
- compiler->executable_offset = executable_offset;
- compiler->executable_size = (sljit_uw)(code_ptr - code) * sizeof(sljit_ins);
-
- code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
- code_ptr = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
-
- SLJIT_CACHE_FLUSH(code, code_ptr);
- SLJIT_UPDATE_WX_FLAGS(code, code_ptr, 1);
- return code;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
-{
- switch (feature_type) {
- case SLJIT_HAS_FPU:
-#ifdef SLJIT_IS_FPU_AVAILABLE
- return SLJIT_IS_FPU_AVAILABLE;
-#else
- /* Available by default. */
- return 1;
-#endif
-
- case SLJIT_HAS_ZERO_REGISTER:
- return 1;
-
-#if (defined SLJIT_CONFIG_SPARC_64 && SLJIT_CONFIG_SPARC_64)
- case SLJIT_HAS_CMOV:
- return 1;
-#endif
-
- default:
- return 0;
- }
-}
-
-/* --------------------------------------------------------------------- */
-/* Entry, exit */
-/* --------------------------------------------------------------------- */
-
-/* Creates an index in data_transfer_insts array. */
-#define LOAD_DATA 0x01
-#define WORD_DATA 0x00
-#define BYTE_DATA 0x02
-#define HALF_DATA 0x04
-#define INT_DATA 0x06
-#define SIGNED_DATA 0x08
-/* Separates integer and floating point registers */
-#define GPR_REG 0x0f
-#define DOUBLE_DATA 0x10
-#define SINGLE_DATA 0x12
-
-#define MEM_MASK 0x1f
-
-#define ARG_TEST 0x00020
-#define ALT_KEEP_CACHE 0x00040
-#define CUMULATIVE_OP 0x00080
-#define IMM_OP 0x00100
-#define MOVE_OP 0x00200
-#define SRC2_IMM 0x00400
-
-#define REG_DEST 0x00800
-#define REG2_SOURCE 0x01000
-#define SLOW_SRC1 0x02000
-#define SLOW_SRC2 0x04000
-#define SLOW_DEST 0x08000
-
-/* SET_FLAGS (0x10 << 19) also belong here! */
-
-#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
-#include "sljitNativeSPARC_32.c"
-#else
-#include "sljitNativeSPARC_64.c"
-#endif
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
- sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
- sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
-{
- sljit_s32 reg_index, types, tmp;
- sljit_u32 float_offset, args_offset;
- sljit_s32 saved_arg_index, scratch_arg_index, float_arg_index;
-
- CHECK_ERROR();
- CHECK(check_sljit_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
- set_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
-
- local_size = (local_size + SLJIT_LOCALS_OFFSET + 7) & ~0x7;
- compiler->local_size = local_size;
-
- if (local_size <= -SIMM_MIN) {
- FAIL_IF(push_inst(compiler, SAVE | D(SLJIT_SP) | S1(SLJIT_SP) | IMM(-local_size), UNMOVABLE_INS));
- }
- else {
- FAIL_IF(load_immediate(compiler, TMP_REG1, -local_size));
- FAIL_IF(push_inst(compiler, SAVE | D(SLJIT_SP) | S1(SLJIT_SP) | S2(TMP_REG1), UNMOVABLE_INS));
- }
-
- arg_types >>= SLJIT_ARG_SHIFT;
-
- types = arg_types;
- float_offset = 16 * sizeof(sljit_sw);
- reg_index = 24;
-
- while (types && reg_index < 24 + 6) {
- switch (types & SLJIT_ARG_MASK) {
- case SLJIT_ARG_TYPE_F64:
- if (reg_index & 0x1) {
- FAIL_IF(push_inst(compiler, STW | DA(reg_index) | S1(SLJIT_SP) | IMM(float_offset), MOVABLE_INS));
- if (reg_index >= 24 + 6 - 1)
- break;
- FAIL_IF(push_inst(compiler, STW | DA(reg_index + 1) | S1(SLJIT_SP) | IMM(float_offset + sizeof(sljit_sw)), MOVABLE_INS));
- } else
- FAIL_IF(push_inst(compiler, STD | DA(reg_index) | S1(SLJIT_SP) | IMM(float_offset), MOVABLE_INS));
-
- float_offset += sizeof(sljit_f64);
- reg_index++;
- break;
- case SLJIT_ARG_TYPE_F32:
- FAIL_IF(push_inst(compiler, STW | DA(reg_index) | S1(SLJIT_SP) | IMM(float_offset), MOVABLE_INS));
- float_offset += sizeof(sljit_f64);
- break;
- }
-
- reg_index++;
- types >>= SLJIT_ARG_SHIFT;
- }
-
- args_offset = (16 + 1 + 6) * sizeof(sljit_sw);
- float_offset = 16 * sizeof(sljit_sw);
- reg_index = 24;
- saved_arg_index = 24;
- scratch_arg_index = 8 - 1;
- float_arg_index = 1;
-
- while (arg_types) {
- switch (arg_types & SLJIT_ARG_MASK) {
- case SLJIT_ARG_TYPE_F64:
- if (reg_index < 24 + 6 - 1) {
- FAIL_IF(push_inst(compiler, LDDF | FD(float_arg_index) | S1(SLJIT_SP) | IMM(float_offset), MOVABLE_INS));
- } else if (reg_index < 24 + 6) {
- FAIL_IF(push_inst(compiler, LDF | FD(float_arg_index) | S1(SLJIT_SP) | IMM(float_offset), MOVABLE_INS));
- FAIL_IF(push_inst(compiler, LDF | FD(float_arg_index) | (1 << 25) | S1A(30) | IMM(args_offset), MOVABLE_INS));
- } else {
- FAIL_IF(push_inst(compiler, LDF | FD(float_arg_index) | S1A(30) | IMM(args_offset), MOVABLE_INS));
- FAIL_IF(push_inst(compiler, LDF | FD(float_arg_index) | (1 << 25) | S1A(30) | IMM(args_offset + sizeof(sljit_sw)), MOVABLE_INS));
- }
-
- float_arg_index++;
- float_offset += sizeof(sljit_f64);
- reg_index++;
- break;
- case SLJIT_ARG_TYPE_F32:
- if (reg_index < 24 + 6)
- FAIL_IF(push_inst(compiler, LDF | FD(float_arg_index) | S1(SLJIT_SP) | IMM(float_offset), MOVABLE_INS));
- else
- FAIL_IF(push_inst(compiler, LDF | FD(float_arg_index) | S1A(30) | IMM(args_offset), MOVABLE_INS));
- float_arg_index++;
- float_offset += sizeof(sljit_f64);
- break;
- default:
- scratch_arg_index++;
-
- if (!(arg_types & SLJIT_ARG_TYPE_SCRATCH_REG)) {
- tmp = saved_arg_index++;
- if (tmp == reg_index)
- break;
- } else
- tmp = scratch_arg_index;
-
- if (reg_index < 24 + 6)
- FAIL_IF(push_inst(compiler, OR | DA(tmp) | S1(0) | S2A(reg_index), tmp));
- else
- FAIL_IF(push_inst(compiler, LDUW | DA(tmp) | S1A(30) | IMM(args_offset), tmp));
- break;
- }
-
- reg_index++;
- arg_types >>= SLJIT_ARG_SHIFT;
- }
-
- return SLJIT_SUCCESS;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
- sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
- sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
-{
- CHECK_ERROR();
- CHECK(check_sljit_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
- set_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
-
- compiler->local_size = (local_size + SLJIT_LOCALS_OFFSET + 7) & ~0x7;
- return SLJIT_SUCCESS;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_void(struct sljit_compiler *compiler)
-{
- CHECK_ERROR();
- CHECK(check_sljit_emit_return_void(compiler));
-
- FAIL_IF(push_inst(compiler, JMPL | D(0) | S1A(31) | IMM(8), UNMOVABLE_INS));
- return push_inst(compiler, RESTORE | D(SLJIT_R0) | S1(SLJIT_R0) | S2(0), UNMOVABLE_INS);
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
-{
- CHECK_ERROR();
- CHECK(check_sljit_emit_return(compiler, op, src, srcw));
-
- if (TYPE_CAST_NEEDED(op) || !FAST_IS_REG(src)) {
- FAIL_IF(emit_mov_before_return(compiler, op, src, srcw));
- src = SLJIT_R0;
- }
-
- FAIL_IF(push_inst(compiler, JMPL | D(0) | S1A(31) | IMM(8), UNMOVABLE_INS));
- return push_inst(compiler, RESTORE | D(SLJIT_R0) | S1(src) | S2(0), UNMOVABLE_INS);
-}
-
-/* --------------------------------------------------------------------- */
-/* Operators */
-/* --------------------------------------------------------------------- */
-
-#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
-#define ARCH_32_64(a, b) a
-#else
-#define ARCH_32_64(a, b) b
-#endif
-
-static const sljit_ins data_transfer_insts[16 + 4] = {
-/* u w s */ ARCH_32_64(OPC1(3) | OPC3(0x04) /* stw */, OPC1(3) | OPC3(0x0e) /* stx */),
-/* u w l */ ARCH_32_64(OPC1(3) | OPC3(0x00) /* lduw */, OPC1(3) | OPC3(0x0b) /* ldx */),
-/* u b s */ OPC1(3) | OPC3(0x05) /* stb */,
-/* u b l */ OPC1(3) | OPC3(0x01) /* ldub */,
-/* u h s */ OPC1(3) | OPC3(0x06) /* sth */,
-/* u h l */ OPC1(3) | OPC3(0x02) /* lduh */,
-/* u i s */ OPC1(3) | OPC3(0x04) /* stw */,
-/* u i l */ OPC1(3) | OPC3(0x00) /* lduw */,
-
-/* s w s */ ARCH_32_64(OPC1(3) | OPC3(0x04) /* stw */, OPC1(3) | OPC3(0x0e) /* stx */),
-/* s w l */ ARCH_32_64(OPC1(3) | OPC3(0x00) /* lduw */, OPC1(3) | OPC3(0x0b) /* ldx */),
-/* s b s */ OPC1(3) | OPC3(0x05) /* stb */,
-/* s b l */ OPC1(3) | OPC3(0x09) /* ldsb */,
-/* s h s */ OPC1(3) | OPC3(0x06) /* sth */,
-/* s h l */ OPC1(3) | OPC3(0x0a) /* ldsh */,
-/* s i s */ OPC1(3) | OPC3(0x04) /* stw */,
-/* s i l */ ARCH_32_64(OPC1(3) | OPC3(0x00) /* lduw */, OPC1(3) | OPC3(0x08) /* ldsw */),
-
-/* d s */ OPC1(3) | OPC3(0x27),
-/* d l */ OPC1(3) | OPC3(0x23),
-/* s s */ OPC1(3) | OPC3(0x24),
-/* s l */ OPC1(3) | OPC3(0x20),
-};
-
-#undef ARCH_32_64
-
-/* Can perform an operation using at most 1 instruction. */
-static sljit_s32 getput_arg_fast(struct sljit_compiler *compiler, sljit_u32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
-{
- SLJIT_ASSERT(arg & SLJIT_MEM);
-
- if ((!(arg & OFFS_REG_MASK) && argw <= SIMM_MAX && argw >= SIMM_MIN)
- || ((arg & OFFS_REG_MASK) && (argw & 0x3) == 0)) {
- /* Works for both absoulte and relative addresses (immediate case). */
- if (SLJIT_UNLIKELY(flags & ARG_TEST))
- return 1;
- FAIL_IF(push_inst(compiler, data_transfer_insts[flags & MEM_MASK]
- | ((flags & MEM_MASK) <= GPR_REG ? D(reg) : FD(reg))
- | S1(arg & REG_MASK) | ((arg & OFFS_REG_MASK) ? S2(OFFS_REG(arg)) : IMM(argw)),
- ((flags & MEM_MASK) <= GPR_REG && (flags & LOAD_DATA)) ? DR(reg) : MOVABLE_INS));
- return -1;
- }
- return 0;
-}
-
-/* See getput_arg below.
- Note: can_cache is called only for binary operators. Those
- operators always uses word arguments without write back. */
-static sljit_s32 can_cache(sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
-{
- SLJIT_ASSERT((arg & SLJIT_MEM) && (next_arg & SLJIT_MEM));
-
- /* Simple operation except for updates. */
- if (arg & OFFS_REG_MASK) {
- argw &= 0x3;
- SLJIT_ASSERT(argw);
- next_argw &= 0x3;
- if ((arg & OFFS_REG_MASK) == (next_arg & OFFS_REG_MASK) && argw == next_argw)
- return 1;
- return 0;
- }
-
- if (((next_argw - argw) <= SIMM_MAX && (next_argw - argw) >= SIMM_MIN))
- return 1;
- return 0;
-}
-
-/* Emit the necessary instructions. See can_cache above. */
-static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_u32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
-{
- sljit_s32 base, arg2, delay_slot;
- sljit_ins dest;
-
- SLJIT_ASSERT(arg & SLJIT_MEM);
- if (!(next_arg & SLJIT_MEM)) {
- next_arg = 0;
- next_argw = 0;
- }
-
- base = arg & REG_MASK;
- if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
- argw &= 0x3;
-
- /* Using the cache. */
- if (((SLJIT_MEM | (arg & OFFS_REG_MASK)) == compiler->cache_arg) && (argw == compiler->cache_argw))
- arg2 = TMP_REG3;
- else {
- if ((arg & OFFS_REG_MASK) == (next_arg & OFFS_REG_MASK) && argw == (next_argw & 0x3)) {
- compiler->cache_arg = SLJIT_MEM | (arg & OFFS_REG_MASK);
- compiler->cache_argw = argw;
- arg2 = TMP_REG3;
- }
- else if ((flags & LOAD_DATA) && ((flags & MEM_MASK) <= GPR_REG) && reg != base && reg != OFFS_REG(arg))
- arg2 = reg;
- else /* It must be a mov operation, so tmp1 must be free to use. */
- arg2 = TMP_REG1;
- FAIL_IF(push_inst(compiler, SLL_W | D(arg2) | S1(OFFS_REG(arg)) | IMM_ARG | (sljit_ins)argw, DR(arg2)));
- }
- }
- else {
- /* Using the cache. */
- if ((compiler->cache_arg == SLJIT_MEM) && (argw - compiler->cache_argw) <= SIMM_MAX && (argw - compiler->cache_argw) >= SIMM_MIN) {
- if (argw != compiler->cache_argw) {
- FAIL_IF(push_inst(compiler, ADD | D(TMP_REG3) | S1(TMP_REG3) | IMM(argw - compiler->cache_argw), DR(TMP_REG3)));
- compiler->cache_argw = argw;
- }
- arg2 = TMP_REG3;
- } else {
- if ((next_argw - argw) <= SIMM_MAX && (next_argw - argw) >= SIMM_MIN) {
- compiler->cache_arg = SLJIT_MEM;
- compiler->cache_argw = argw;
- arg2 = TMP_REG3;
- }
- else if ((flags & LOAD_DATA) && ((flags & MEM_MASK) <= GPR_REG) && reg != base)
- arg2 = reg;
- else /* It must be a mov operation, so tmp1 must be free to use. */
- arg2 = TMP_REG1;
- FAIL_IF(load_immediate(compiler, arg2, argw));
- }
- }
-
- dest = ((flags & MEM_MASK) <= GPR_REG ? D(reg) : FD(reg));
- delay_slot = ((flags & MEM_MASK) <= GPR_REG && (flags & LOAD_DATA)) ? DR(reg) : MOVABLE_INS;
- if (!base)
- return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | dest | S1(arg2) | IMM(0), delay_slot);
- return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | dest | S1(base) | S2(arg2), delay_slot);
-}
-
-static SLJIT_INLINE sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_u32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
-{
- if (getput_arg_fast(compiler, flags, reg, arg, argw))
- return compiler->error;
- compiler->cache_arg = 0;
- compiler->cache_argw = 0;
- return getput_arg(compiler, flags, reg, arg, argw, 0, 0);
-}
-
-static SLJIT_INLINE sljit_s32 emit_op_mem2(struct sljit_compiler *compiler, sljit_u32 flags, sljit_s32 reg, sljit_s32 arg1, sljit_sw arg1w, sljit_s32 arg2, sljit_sw arg2w)
-{
- if (getput_arg_fast(compiler, flags, reg, arg1, arg1w))
- return compiler->error;
- return getput_arg(compiler, flags, reg, arg1, arg1w, arg2, arg2w);
-}
-
-static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_u32 flags,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
-{
- /* arg1 goes to TMP_REG1 or src reg
- arg2 goes to TMP_REG2, imm or src reg
- TMP_REG3 can be used for caching
- result goes to TMP_REG2, so put result can use TMP_REG1 and TMP_REG3. */
- sljit_s32 dst_r = TMP_REG2;
- sljit_s32 src1_r;
- sljit_sw src2_r = 0;
- sljit_s32 sugg_src2_r = TMP_REG2;
-
- if (!(flags & ALT_KEEP_CACHE)) {
- compiler->cache_arg = 0;
- compiler->cache_argw = 0;
- }
-
- if (dst != TMP_REG2) {
- if (FAST_IS_REG(dst)) {
- dst_r = dst;
- flags |= REG_DEST;
- if (flags & MOVE_OP)
- sugg_src2_r = dst_r;
- }
- else if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, flags | ARG_TEST, TMP_REG1, dst, dstw))
- flags |= SLOW_DEST;
- }
-
- if (flags & IMM_OP) {
- if ((src2 & SLJIT_IMM) && src2w) {
- if (src2w <= SIMM_MAX && src2w >= SIMM_MIN) {
- flags |= SRC2_IMM;
- src2_r = src2w;
- }
- }
- if (!(flags & SRC2_IMM) && (flags & CUMULATIVE_OP) && (src1 & SLJIT_IMM) && src1w) {
- if (src1w <= SIMM_MAX && src1w >= SIMM_MIN) {
- flags |= SRC2_IMM;
- src2_r = src1w;
-
- /* And swap arguments. */
- src1 = src2;
- src1w = src2w;
- src2 = SLJIT_IMM;
- /* src2w = src2_r unneeded. */
- }
- }
- }
-
- /* Source 1. */
- if (FAST_IS_REG(src1))
- src1_r = src1;
- else if (src1 & SLJIT_IMM) {
- if (src1w) {
- FAIL_IF(load_immediate(compiler, TMP_REG1, src1w));
- src1_r = TMP_REG1;
- }
- else
- src1_r = 0;
- }
- else {
- if (getput_arg_fast(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w))
- FAIL_IF(compiler->error);
- else
- flags |= SLOW_SRC1;
- src1_r = TMP_REG1;
- }
-
- /* Source 2. */
- if (FAST_IS_REG(src2)) {
- src2_r = src2;
- flags |= REG2_SOURCE;
- if ((flags & (REG_DEST | MOVE_OP)) == MOVE_OP)
- dst_r = src2_r;
- }
- else if (src2 & SLJIT_IMM) {
- if (!(flags & SRC2_IMM)) {
- if (src2w) {
- FAIL_IF(load_immediate(compiler, sugg_src2_r, src2w));
- src2_r = sugg_src2_r;
- }
- else {
- src2_r = 0;
- if (flags & MOVE_OP) {
- if (dst & SLJIT_MEM)
- dst_r = 0;
- else
- op = SLJIT_MOV;
- }
- }
- }
- }
- else {
- if (getput_arg_fast(compiler, flags | LOAD_DATA, sugg_src2_r, src2, src2w))
- FAIL_IF(compiler->error);
- else
- flags |= SLOW_SRC2;
- src2_r = sugg_src2_r;
- }
-
- if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) {
- SLJIT_ASSERT(src2_r == TMP_REG2);
- if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
- FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG2, src2, src2w, src1, src1w));
- FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, dst, dstw));
- }
- else {
- FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, src2, src2w));
- FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG2, src2, src2w, dst, dstw));
- }
- }
- else if (flags & SLOW_SRC1)
- FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, dst, dstw));
- else if (flags & SLOW_SRC2)
- FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, sugg_src2_r, src2, src2w, dst, dstw));
-
- FAIL_IF(emit_single_op(compiler, op, flags, dst_r, src1_r, src2_r));
-
- if (dst & SLJIT_MEM) {
- if (!(flags & SLOW_DEST)) {
- getput_arg_fast(compiler, flags, dst_r, dst, dstw);
- return compiler->error;
- }
- return getput_arg(compiler, flags, dst_r, dst, dstw, 0, 0);
- }
-
- return SLJIT_SUCCESS;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
-{
- CHECK_ERROR();
- CHECK(check_sljit_emit_op0(compiler, op));
-
- op = GET_OPCODE(op);
- switch (op) {
- case SLJIT_BREAKPOINT:
- return push_inst(compiler, TA, UNMOVABLE_INS);
- case SLJIT_NOP:
- return push_inst(compiler, NOP, UNMOVABLE_INS);
- case SLJIT_LMUL_UW:
- case SLJIT_LMUL_SW:
-#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
- FAIL_IF(push_inst(compiler, (op == SLJIT_LMUL_UW ? UMUL : SMUL) | D(SLJIT_R0) | S1(SLJIT_R0) | S2(SLJIT_R1), DR(SLJIT_R0)));
- return push_inst(compiler, RDY | D(SLJIT_R1), DR(SLJIT_R1));
-#else
-#error "Implementation required"
-#endif
- case SLJIT_DIVMOD_UW:
- case SLJIT_DIVMOD_SW:
- case SLJIT_DIV_UW:
- case SLJIT_DIV_SW:
- SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW & 0x2) == 0 && SLJIT_DIV_UW - 0x2 == SLJIT_DIVMOD_UW, bad_div_opcode_assignments);
-#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
- if ((op | 0x2) == SLJIT_DIV_UW)
- FAIL_IF(push_inst(compiler, WRY | S1(0), MOVABLE_INS));
- else {
- FAIL_IF(push_inst(compiler, SRA | D(TMP_REG1) | S1(SLJIT_R0) | IMM(31), DR(TMP_REG1)));
- FAIL_IF(push_inst(compiler, WRY | S1(TMP_REG1), MOVABLE_INS));
- }
- if (op <= SLJIT_DIVMOD_SW)
- FAIL_IF(push_inst(compiler, OR | D(TMP_REG2) | S1(0) | S2(SLJIT_R0), DR(TMP_REG2)));
- FAIL_IF(push_inst(compiler, ((op | 0x2) == SLJIT_DIV_UW ? UDIV : SDIV) | D(SLJIT_R0) | S1(SLJIT_R0) | S2(SLJIT_R1), DR(SLJIT_R0)));
- if (op >= SLJIT_DIV_UW)
- return SLJIT_SUCCESS;
- FAIL_IF(push_inst(compiler, SMUL | D(SLJIT_R1) | S1(SLJIT_R0) | S2(SLJIT_R1), DR(SLJIT_R1)));
- return push_inst(compiler, SUB | D(SLJIT_R1) | S1(TMP_REG2) | S2(SLJIT_R1), DR(SLJIT_R1));
-#else
-#error "Implementation required"
-#endif
- case SLJIT_ENDBR:
- case SLJIT_SKIP_FRAMES_BEFORE_RETURN:
- return SLJIT_SUCCESS;
- }
-
- return SLJIT_SUCCESS;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
-{
- sljit_u32 flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
-
- CHECK_ERROR();
- CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
- ADJUST_LOCAL_OFFSET(dst, dstw);
- ADJUST_LOCAL_OFFSET(src, srcw);
-
- op = GET_OPCODE(op);
- switch (op) {
- case SLJIT_MOV:
-#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
- case SLJIT_MOV_U32:
- case SLJIT_MOV_S32:
- case SLJIT_MOV32:
-#endif
- case SLJIT_MOV_P:
- return emit_op(compiler, SLJIT_MOV, flags | WORD_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, srcw);
-
- case SLJIT_MOV_U8:
- return emit_op(compiler, SLJIT_MOV_U8, flags | BYTE_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8)srcw : srcw);
-
- case SLJIT_MOV_S8:
- return emit_op(compiler, SLJIT_MOV_S8, flags | BYTE_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8)srcw : srcw);
-
- case SLJIT_MOV_U16:
- return emit_op(compiler, SLJIT_MOV_U16, flags | HALF_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16)srcw : srcw);
-
- case SLJIT_MOV_S16:
- return emit_op(compiler, SLJIT_MOV_S16, flags | HALF_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16)srcw : srcw);
-
- case SLJIT_NOT:
- case SLJIT_CLZ:
- return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, src, srcw);
- }
-
- return SLJIT_SUCCESS;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
-{
- sljit_u32 flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
-
- CHECK_ERROR();
- CHECK(check_sljit_emit_op2(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w));
- ADJUST_LOCAL_OFFSET(dst, dstw);
- ADJUST_LOCAL_OFFSET(src1, src1w);
- ADJUST_LOCAL_OFFSET(src2, src2w);
-
- op = GET_OPCODE(op);
- switch (op) {
- case SLJIT_ADD:
- case SLJIT_ADDC:
- case SLJIT_MUL:
- case SLJIT_AND:
- case SLJIT_OR:
- case SLJIT_XOR:
- return emit_op(compiler, op, flags | CUMULATIVE_OP | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
-
- case SLJIT_SUB:
- case SLJIT_SUBC:
- return emit_op(compiler, op, flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
-
- case SLJIT_SHL:
- case SLJIT_LSHR:
- case SLJIT_ASHR:
-#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
- if (src2 & SLJIT_IMM)
- src2w &= 0x1f;
-#else
- SLJIT_UNREACHABLE();
-#endif
- return emit_op(compiler, op, flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
- }
-
- return SLJIT_SUCCESS;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
-{
- CHECK_ERROR();
- CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
-
-#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
- || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
- compiler->skip_checks = 1;
-#endif
- return sljit_emit_op2(compiler, op, TMP_REG2, 0, src1, src1w, src2, src2w);
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src, sljit_sw srcw)
-{
- CHECK_ERROR();
- CHECK(check_sljit_emit_op_src(compiler, op, src, srcw));
- ADJUST_LOCAL_OFFSET(src, srcw);
-
- switch (op) {
- case SLJIT_FAST_RETURN:
- if (FAST_IS_REG(src))
- FAIL_IF(push_inst(compiler, OR | D(TMP_LINK) | S1(0) | S2(src), DR(TMP_LINK)));
- else
- FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_LINK, src, srcw));
-
- FAIL_IF(push_inst(compiler, JMPL | D(0) | S1(TMP_LINK) | IMM(8), UNMOVABLE_INS));
- return push_inst(compiler, NOP, UNMOVABLE_INS);
- case SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN:
- case SLJIT_PREFETCH_L1:
- case SLJIT_PREFETCH_L2:
- case SLJIT_PREFETCH_L3:
- case SLJIT_PREFETCH_ONCE:
- return SLJIT_SUCCESS;
- }
-
- return SLJIT_SUCCESS;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
-{
- CHECK_REG_INDEX(check_sljit_get_register_index(reg));
- return reg_map[reg];
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
-{
- CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
- return freg_map[reg];
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
- void *instruction, sljit_u32 size)
-{
- CHECK_ERROR();
- CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
-
- return push_inst(compiler, *(sljit_ins*)instruction, UNMOVABLE_INS);
-}
-
-/* --------------------------------------------------------------------- */
-/* Floating point operators */
-/* --------------------------------------------------------------------- */
-
-#define FLOAT_DATA(op) ((sljit_ins)DOUBLE_DATA | (((sljit_ins)(op) & SLJIT_32) >> 7))
-#define SELECT_FOP(op, single, double) ((op & SLJIT_32) ? single : double)
-#define FLOAT_TMP_MEM_OFFSET (22 * sizeof(sljit_sw))
-
-static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
-{
- if (src & SLJIT_MEM) {
- FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src, srcw, dst, dstw));
- src = TMP_FREG1;
- }
-
- FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSTOI, FDTOI) | FD(TMP_FREG1) | FS2(src), MOVABLE_INS));
-
- if (FAST_IS_REG(dst)) {
- FAIL_IF(emit_op_mem2(compiler, SINGLE_DATA, TMP_FREG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET));
- return emit_op_mem2(compiler, WORD_DATA | LOAD_DATA, dst, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET);
- }
-
- /* Store the integer value from a VFP register. */
- return emit_op_mem2(compiler, SINGLE_DATA, TMP_FREG1, dst, dstw, 0, 0);
-}
-
-static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
-{
- sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
-
- if (src & SLJIT_IMM) {
-#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
- srcw = (sljit_s32)srcw;
-#endif
- FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
- src = TMP_REG1;
- srcw = 0;
- }
-
- if (FAST_IS_REG(src)) {
- FAIL_IF(emit_op_mem2(compiler, WORD_DATA, src, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET));
- src = SLJIT_MEM1(SLJIT_SP);
- srcw = FLOAT_TMP_MEM_OFFSET;
- }
-
- FAIL_IF(emit_op_mem2(compiler, SINGLE_DATA | LOAD_DATA, TMP_FREG1, src, srcw, dst, dstw));
- FAIL_IF(push_inst(compiler, SELECT_FOP(op, FITOS, FITOD) | FD(dst_r) | FS2(TMP_FREG1), MOVABLE_INS));
-
- if (dst & SLJIT_MEM)
- return emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, 0, 0);
- return SLJIT_SUCCESS;
-}
-
-static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
-{
- if (src1 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w));
- src1 = TMP_FREG1;
- }
-
- if (src2 & SLJIT_MEM) {
- FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, 0, 0));
- src2 = TMP_FREG2;
- }
-
- return push_inst(compiler, SELECT_FOP(op, FCMPS, FCMPD) | FS1(src1) | FS2(src2), FCC_IS_SET | MOVABLE_INS);
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
-{
- sljit_s32 dst_r;
-
- CHECK_ERROR();
- compiler->cache_arg = 0;
- compiler->cache_argw = 0;
-
- SLJIT_COMPILE_ASSERT((SLJIT_32 == 0x100) && !(DOUBLE_DATA & 0x2), float_transfer_bit_error);
- SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
-
- if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32)
- op ^= SLJIT_32;
-
- dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
-
- if (src & SLJIT_MEM) {
- FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, dst_r, src, srcw, dst, dstw));
- src = dst_r;
- }
-
- switch (GET_OPCODE(op)) {
- case SLJIT_MOV_F64:
- if (src != dst_r) {
- if (dst_r != TMP_FREG1) {
- FAIL_IF(push_inst(compiler, FMOVS | FD(dst_r) | FS2(src), MOVABLE_INS));
- if (!(op & SLJIT_32))
- FAIL_IF(push_inst(compiler, FMOVS | FDN(dst_r) | FS2N(src), MOVABLE_INS));
- }
- else
- dst_r = src;
- }
- break;
- case SLJIT_NEG_F64:
- FAIL_IF(push_inst(compiler, FNEGS | FD(dst_r) | FS2(src), MOVABLE_INS));
- if (dst_r != src && !(op & SLJIT_32))
- FAIL_IF(push_inst(compiler, FMOVS | FDN(dst_r) | FS2N(src), MOVABLE_INS));
- break;
- case SLJIT_ABS_F64:
- FAIL_IF(push_inst(compiler, FABSS | FD(dst_r) | FS2(src), MOVABLE_INS));
- if (dst_r != src && !(op & SLJIT_32))
- FAIL_IF(push_inst(compiler, FMOVS | FDN(dst_r) | FS2N(src), MOVABLE_INS));
- break;
- case SLJIT_CONV_F64_FROM_F32:
- FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSTOD, FDTOS) | FD(dst_r) | FS2(src), MOVABLE_INS));
- op ^= SLJIT_32;
- break;
- }
-
- if (dst & SLJIT_MEM)
- FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op), dst_r, dst, dstw, 0, 0));
- return SLJIT_SUCCESS;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
-{
- sljit_s32 dst_r, flags = 0;
-
- CHECK_ERROR();
- CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
- ADJUST_LOCAL_OFFSET(dst, dstw);
- ADJUST_LOCAL_OFFSET(src1, src1w);
- ADJUST_LOCAL_OFFSET(src2, src2w);
-
- compiler->cache_arg = 0;
- compiler->cache_argw = 0;
-
- dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG2;
-
- if (src1 & SLJIT_MEM) {
- if (getput_arg_fast(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w)) {
- FAIL_IF(compiler->error);
- src1 = TMP_FREG1;
- } else
- flags |= SLOW_SRC1;
- }
-
- if (src2 & SLJIT_MEM) {
- if (getput_arg_fast(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w)) {
- FAIL_IF(compiler->error);
- src2 = TMP_FREG2;
- } else
- flags |= SLOW_SRC2;
- }
-
- if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) {
- if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
- FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, src1, src1w));
- FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, dst, dstw));
- }
- else {
- FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w));
- FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, dst, dstw));
- }
- }
- else if (flags & SLOW_SRC1)
- FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, dst, dstw));
- else if (flags & SLOW_SRC2)
- FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, dst, dstw));
-
- if (flags & SLOW_SRC1)
- src1 = TMP_FREG1;
- if (flags & SLOW_SRC2)
- src2 = TMP_FREG2;
-
- switch (GET_OPCODE(op)) {
- case SLJIT_ADD_F64:
- FAIL_IF(push_inst(compiler, SELECT_FOP(op, FADDS, FADDD) | FD(dst_r) | FS1(src1) | FS2(src2), MOVABLE_INS));
- break;
-
- case SLJIT_SUB_F64:
- FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSUBS, FSUBD) | FD(dst_r) | FS1(src1) | FS2(src2), MOVABLE_INS));
- break;
-
- case SLJIT_MUL_F64:
- FAIL_IF(push_inst(compiler, SELECT_FOP(op, FMULS, FMULD) | FD(dst_r) | FS1(src1) | FS2(src2), MOVABLE_INS));
- break;
-
- case SLJIT_DIV_F64:
- FAIL_IF(push_inst(compiler, SELECT_FOP(op, FDIVS, FDIVD) | FD(dst_r) | FS1(src1) | FS2(src2), MOVABLE_INS));
- break;
- }
-
- if (dst_r == TMP_FREG2)
- FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG2, dst, dstw, 0, 0));
-
- return SLJIT_SUCCESS;
-}
-
-#undef FLOAT_DATA
-#undef SELECT_FOP
-
-/* --------------------------------------------------------------------- */
-/* Other instructions */
-/* --------------------------------------------------------------------- */
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
-{
- CHECK_ERROR();
- CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
- ADJUST_LOCAL_OFFSET(dst, dstw);
-
- if (FAST_IS_REG(dst))
- return push_inst(compiler, OR | D(dst) | S1(0) | S2(TMP_LINK), UNMOVABLE_INS);
-
- /* Memory. */
- FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_LINK, dst, dstw));
- compiler->delay_slot = UNMOVABLE_INS;
- return SLJIT_SUCCESS;
-}
-
-/* --------------------------------------------------------------------- */
-/* Conditional instructions */
-/* --------------------------------------------------------------------- */
-
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
-{
- struct sljit_label *label;
-
- CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_label(compiler));
-
- if (compiler->last_label && compiler->last_label->size == compiler->size)
- return compiler->last_label;
-
- label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
- PTR_FAIL_IF(!label);
- set_label(label, compiler);
- compiler->delay_slot = UNMOVABLE_INS;
- return label;
-}
-
-static sljit_ins get_cc(struct sljit_compiler *compiler, sljit_s32 type)
-{
- switch (type) {
- case SLJIT_EQUAL:
- case SLJIT_NOT_EQUAL_F64: /* Unordered. */
- return DA(0x1);
-
- case SLJIT_NOT_EQUAL:
- case SLJIT_EQUAL_F64:
- return DA(0x9);
-
- case SLJIT_LESS:
- case SLJIT_GREATER_F64: /* Unordered. */
- case SLJIT_CARRY:
- return DA(0x5);
-
- case SLJIT_GREATER_EQUAL:
- case SLJIT_LESS_EQUAL_F64:
- case SLJIT_NOT_CARRY:
- return DA(0xd);
-
- case SLJIT_GREATER:
- case SLJIT_GREATER_EQUAL_F64: /* Unordered. */
- return DA(0xc);
-
- case SLJIT_LESS_EQUAL:
- case SLJIT_LESS_F64:
- return DA(0x4);
-
- case SLJIT_SIG_LESS:
- return DA(0x3);
-
- case SLJIT_SIG_GREATER_EQUAL:
- return DA(0xb);
-
- case SLJIT_SIG_GREATER:
- return DA(0xa);
-
- case SLJIT_SIG_LESS_EQUAL:
- return DA(0x2);
-
- case SLJIT_OVERFLOW:
- if (!(compiler->status_flags_state & (SLJIT_CURRENT_FLAGS_ADD | SLJIT_CURRENT_FLAGS_SUB)))
- return DA(0x9);
- /* fallthrough */
-
- case SLJIT_UNORDERED_F64:
- return DA(0x7);
-
- case SLJIT_NOT_OVERFLOW:
- if (!(compiler->status_flags_state & (SLJIT_CURRENT_FLAGS_ADD | SLJIT_CURRENT_FLAGS_SUB)))
- return DA(0x1);
- /* fallthrough */
-
- case SLJIT_ORDERED_F64:
- return DA(0xf);
-
- default:
- SLJIT_UNREACHABLE();
- return DA(0x8);
- }
-}
-
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
-{
- struct sljit_jump *jump;
-
- CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_jump(compiler, type));
-
- jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
- PTR_FAIL_IF(!jump);
- set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
- type &= 0xff;
-
- if (type < SLJIT_EQUAL_F64) {
- jump->flags |= IS_COND;
- if (((compiler->delay_slot & DST_INS_MASK) != UNMOVABLE_INS) && !(compiler->delay_slot & ICC_IS_SET))
- jump->flags |= IS_MOVABLE;
-#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
- PTR_FAIL_IF(push_inst(compiler, BICC | get_cc(compiler, type ^ 1) | 5, UNMOVABLE_INS));
-#else
-#error "Implementation required"
-#endif
- }
- else if (type < SLJIT_JUMP) {
- jump->flags |= IS_COND;
- if (((compiler->delay_slot & DST_INS_MASK) != UNMOVABLE_INS) && !(compiler->delay_slot & FCC_IS_SET))
- jump->flags |= IS_MOVABLE;
-#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
- PTR_FAIL_IF(push_inst(compiler, FBFCC | get_cc(compiler, type ^ 1) | 5, UNMOVABLE_INS));
-#else
-#error "Implementation required"
-#endif
- }
- else {
- if ((compiler->delay_slot & DST_INS_MASK) != UNMOVABLE_INS)
- jump->flags |= IS_MOVABLE;
- if (type >= SLJIT_FAST_CALL)
- jump->flags |= IS_CALL;
- }
-
- PTR_FAIL_IF(emit_const(compiler, TMP_REG1, 0));
- PTR_FAIL_IF(push_inst(compiler, JMPL | D(type >= SLJIT_FAST_CALL ? TMP_LINK : 0) | S1(TMP_REG1) | IMM(0), UNMOVABLE_INS));
- jump->addr = compiler->size;
- PTR_FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
-
- return jump;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type,
- sljit_s32 arg_types)
-{
- CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_call(compiler, type, arg_types));
-
- PTR_FAIL_IF(call_with_args(compiler, arg_types, NULL));
-
-#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
- || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
- compiler->skip_checks = 1;
-#endif
-
- return sljit_emit_jump(compiler, type);
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
-{
- struct sljit_jump *jump = NULL;
- sljit_s32 src_r;
-
- CHECK_ERROR();
- CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
- ADJUST_LOCAL_OFFSET(src, srcw);
-
- if (FAST_IS_REG(src))
- src_r = src;
- else if (src & SLJIT_IMM) {
- jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
- FAIL_IF(!jump);
- set_jump(jump, compiler, JUMP_ADDR);
- jump->u.target = (sljit_uw)srcw;
-
- if ((compiler->delay_slot & DST_INS_MASK) != UNMOVABLE_INS)
- jump->flags |= IS_MOVABLE;
- if (type >= SLJIT_FAST_CALL)
- jump->flags |= IS_CALL;
-
- FAIL_IF(emit_const(compiler, TMP_REG1, 0));
- src_r = TMP_REG1;
- }
- else {
- FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw));
- src_r = TMP_REG1;
- }
-
- FAIL_IF(push_inst(compiler, JMPL | D(type >= SLJIT_FAST_CALL ? TMP_LINK : 0) | S1(src_r) | IMM(0), UNMOVABLE_INS));
- if (jump)
- jump->addr = compiler->size;
- return push_inst(compiler, NOP, UNMOVABLE_INS);
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compiler, sljit_s32 type,
- sljit_s32 arg_types,
- sljit_s32 src, sljit_sw srcw)
-{
- CHECK_ERROR();
- CHECK(check_sljit_emit_icall(compiler, type, arg_types, src, srcw));
-
- if (src & SLJIT_MEM) {
- ADJUST_LOCAL_OFFSET(src, srcw);
- FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw));
- src = TMP_REG1;
- }
-
- FAIL_IF(call_with_args(compiler, arg_types, &src));
-
-#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
- || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
- compiler->skip_checks = 1;
-#endif
-
- return sljit_emit_ijump(compiler, type, src, srcw);
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 type)
-{
- sljit_s32 reg;
- sljit_u32 flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
-
- CHECK_ERROR();
- CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
- ADJUST_LOCAL_OFFSET(dst, dstw);
-
-#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
- op = GET_OPCODE(op);
- reg = (op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2;
-
- compiler->cache_arg = 0;
- compiler->cache_argw = 0;
-
- if (op >= SLJIT_ADD && (dst & SLJIT_MEM))
- FAIL_IF(emit_op_mem2(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, dst, dstw, dst, dstw));
-
- type &= 0xff;
- if (type < SLJIT_EQUAL_F64)
- FAIL_IF(push_inst(compiler, BICC | get_cc(compiler, type) | 3, UNMOVABLE_INS));
- else
- FAIL_IF(push_inst(compiler, FBFCC | get_cc(compiler, type) | 3, UNMOVABLE_INS));
-
- FAIL_IF(push_inst(compiler, OR | D(reg) | S1(0) | IMM(1), UNMOVABLE_INS));
- FAIL_IF(push_inst(compiler, OR | D(reg) | S1(0) | IMM(0), UNMOVABLE_INS));
-
- if (op >= SLJIT_ADD) {
- flags |= CUMULATIVE_OP | IMM_OP | ALT_KEEP_CACHE;
- if (dst & SLJIT_MEM)
- return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, TMP_REG2, 0);
- return emit_op(compiler, op, flags, dst, 0, dst, 0, TMP_REG2, 0);
- }
-
- if (!(dst & SLJIT_MEM))
- return SLJIT_SUCCESS;
-
- return emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw);
-#else
-#error "Implementation required"
-#endif
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
- sljit_s32 dst_reg,
- sljit_s32 src, sljit_sw srcw)
-{
- CHECK_ERROR();
- CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
-
-#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
- return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);;
-#else
-#error "Implementation required"
-#endif
-}
-
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
-{
- struct sljit_const *const_;
- sljit_s32 dst_r;
-
- CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
- ADJUST_LOCAL_OFFSET(dst, dstw);
-
- const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
- PTR_FAIL_IF(!const_);
- set_const(const_, compiler);
-
- dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
- PTR_FAIL_IF(emit_const(compiler, dst_r, init_value));
-
- if (dst & SLJIT_MEM)
- PTR_FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw));
- return const_;
-}
-
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
-{
- struct sljit_put_label *put_label;
- sljit_s32 dst_r;
-
- CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
- ADJUST_LOCAL_OFFSET(dst, dstw);
-
- put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
- PTR_FAIL_IF(!put_label);
- set_put_label(put_label, compiler, 0);
-
- dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
- PTR_FAIL_IF(emit_const(compiler, dst_r, 0));
-
- if (dst & SLJIT_MEM)
- PTR_FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw));
- return put_label;
-}
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeX86_32.c b/src/3rdparty/pcre2/src/sljit/sljitNativeX86_32.c
index 08da03026d..59ea04a5c8 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeX86_32.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativeX86_32.c
@@ -62,21 +62,19 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
/* Both size flags cannot be switched on. */
SLJIT_ASSERT((flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) != (EX86_BYTE_ARG | EX86_HALF_ARG));
/* SSE2 and immediate is not possible. */
- SLJIT_ASSERT(!(a & SLJIT_IMM) || !(flags & EX86_SSE2));
- SLJIT_ASSERT((flags & (EX86_PREF_F2 | EX86_PREF_F3)) != (EX86_PREF_F2 | EX86_PREF_F3)
- && (flags & (EX86_PREF_F2 | EX86_PREF_66)) != (EX86_PREF_F2 | EX86_PREF_66)
- && (flags & (EX86_PREF_F3 | EX86_PREF_66)) != (EX86_PREF_F3 | EX86_PREF_66));
+ SLJIT_ASSERT(a != SLJIT_IMM || !(flags & EX86_SSE2));
+ SLJIT_ASSERT(((flags & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66))
+ & ((flags & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66)) - 1)) == 0);
+ SLJIT_ASSERT((flags & (EX86_VEX_EXT | EX86_REX)) != EX86_VEX_EXT);
size &= 0xf;
- inst_size = size;
+ /* The mod r/m byte is always present. */
+ inst_size = size + 1;
- if (flags & (EX86_PREF_F2 | EX86_PREF_F3))
- inst_size++;
- if (flags & EX86_PREF_66)
+ if (flags & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66))
inst_size++;
/* Calculate size of b. */
- inst_size += 1; /* mod r/m byte. */
if (b & SLJIT_MEM) {
if (!(b & REG_MASK))
inst_size += sizeof(sljit_sw);
@@ -87,8 +85,7 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
inst_size += sizeof(sljit_s8);
else
inst_size += sizeof(sljit_sw);
- }
- else if (reg_map[b & REG_MASK] == 5) {
+ } else if (reg_map[b & REG_MASK] == 5) {
/* Swap registers if possible. */
if ((b & OFFS_REG_MASK) && (immb & 0x3) == 0 && reg_map[OFFS_REG(b)] != 5)
b = SLJIT_MEM | OFFS_REG(b) | TO_OFFS_REG(b & REG_MASK);
@@ -105,15 +102,14 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
}
/* Calculate size of a. */
- if (a & SLJIT_IMM) {
+ if (a == SLJIT_IMM) {
if (flags & EX86_BIN_INS) {
if (imma <= 127 && imma >= -128) {
inst_size += 1;
flags |= EX86_BYTE_ARG;
} else
inst_size += 4;
- }
- else if (flags & EX86_SHIFT_INS) {
+ } else if (flags & EX86_SHIFT_INS) {
SLJIT_ASSERT(imma <= 0x1f);
if (imma != 1) {
inst_size++;
@@ -125,8 +121,7 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
inst_size += sizeof(short);
else
inst_size += sizeof(sljit_sw);
- }
- else
+ } else
SLJIT_ASSERT(!(flags & EX86_SHIFT_INS) || a == SLJIT_PREF_SHIFT_REG);
inst = (sljit_u8*)ensure_buf(compiler, 1 + inst_size);
@@ -136,27 +131,26 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
INC_SIZE(inst_size);
if (flags & EX86_PREF_F2)
*inst++ = 0xf2;
- if (flags & EX86_PREF_F3)
+ else if (flags & EX86_PREF_F3)
*inst++ = 0xf3;
- if (flags & EX86_PREF_66)
+ else if (flags & EX86_PREF_66)
*inst++ = 0x66;
buf_ptr = inst + size;
/* Encode mod/rm byte. */
if (!(flags & EX86_SHIFT_INS)) {
- if ((flags & EX86_BIN_INS) && (a & SLJIT_IMM))
+ if ((flags & EX86_BIN_INS) && a == SLJIT_IMM)
*inst = (flags & EX86_BYTE_ARG) ? GROUP_BINARY_83 : GROUP_BINARY_81;
- if (a & SLJIT_IMM)
+ if (a == SLJIT_IMM)
*buf_ptr = 0;
else if (!(flags & EX86_SSE2_OP1))
*buf_ptr = U8(reg_map[a] << 3);
else
- *buf_ptr = U8(a << 3);
- }
- else {
- if (a & SLJIT_IMM) {
+ *buf_ptr = U8(freg_map[a] << 3);
+ } else {
+ if (a == SLJIT_IMM) {
if (imma == 1)
*inst = GROUP_SHIFT_1;
else
@@ -167,7 +161,7 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
}
if (!(b & SLJIT_MEM)) {
- *buf_ptr = U8(*buf_ptr | MOD_REG | (!(flags & EX86_SSE2_OP2) ? reg_map[b] : b));
+ *buf_ptr = U8(*buf_ptr | MOD_REG | (!(flags & EX86_SSE2_OP2) ? reg_map[b] : freg_map[b]));
buf_ptr++;
} else if (b & REG_MASK) {
reg_map_b = reg_map[b & REG_MASK];
@@ -183,8 +177,9 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
if (!(b & OFFS_REG_MASK))
*buf_ptr++ |= reg_map_b;
else {
- *buf_ptr++ |= 0x04;
- *buf_ptr++ = U8(reg_map_b | (reg_map[OFFS_REG(b)] << 3));
+ buf_ptr[0] |= 0x04;
+ buf_ptr[1] = U8(reg_map_b | (reg_map[OFFS_REG(b)] << 3));
+ buf_ptr += 2;
}
if (immb != 0 || reg_map_b == 5) {
@@ -195,25 +190,24 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
buf_ptr += sizeof(sljit_sw);
}
}
- }
- else {
+ } else {
if (reg_map_b == 5)
*buf_ptr |= 0x40;
- *buf_ptr++ |= 0x04;
- *buf_ptr++ = U8(reg_map_b | (reg_map[OFFS_REG(b)] << 3) | (immb << 6));
+ buf_ptr[0] |= 0x04;
+ buf_ptr[1] = U8(reg_map_b | (reg_map[OFFS_REG(b)] << 3) | (immb << 6));
+ buf_ptr += 2;
if (reg_map_b == 5)
*buf_ptr++ = 0;
}
- }
- else {
+ } else {
*buf_ptr++ |= 0x05;
sljit_unaligned_store_sw(buf_ptr, immb); /* 32 bit displacement. */
buf_ptr += sizeof(sljit_sw);
}
- if (a & SLJIT_IMM) {
+ if (a == SLJIT_IMM) {
if (flags & EX86_BYTE_ARG)
*buf_ptr = U8(imma);
else if (flags & EX86_HALF_ARG)
@@ -222,35 +216,92 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
sljit_unaligned_store_sw(buf_ptr, imma);
}
- return !(flags & EX86_SHIFT_INS) ? inst : (inst + 1);
+ return inst;
+}
+
+static sljit_s32 emit_vex_instruction(struct sljit_compiler *compiler, sljit_uw op,
+ /* The first and second register operand. */
+ sljit_s32 a, sljit_s32 v,
+ /* The general operand (not immediate). */
+ sljit_s32 b, sljit_sw immb)
+{
+ sljit_u8 *inst;
+ sljit_u8 vex = 0;
+ sljit_u8 vex_m = 0;
+ sljit_uw size;
+
+ SLJIT_ASSERT(((op & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66))
+ & ((op & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66)) - 1)) == 0);
+
+ if (op & VEX_OP_0F38)
+ vex_m = 0x2;
+ else if (op & VEX_OP_0F3A)
+ vex_m = 0x3;
+
+ if (op & VEX_W) {
+ if (vex_m == 0)
+ vex_m = 0x1;
+
+ vex |= 0x80;
+ }
+
+ if (op & EX86_PREF_66)
+ vex |= 0x1;
+ else if (op & EX86_PREF_F2)
+ vex |= 0x3;
+ else if (op & EX86_PREF_F3)
+ vex |= 0x2;
+
+ op &= ~(EX86_PREF_66 | EX86_PREF_F2 | EX86_PREF_F3);
+
+ if (op & VEX_256)
+ vex |= 0x4;
+
+ vex = U8(vex | ((((op & VEX_SSE2_OPV) ? freg_map[v] : reg_map[v]) ^ 0xf) << 3));
+
+ size = op & ~(sljit_uw)0xff;
+ size |= (vex_m == 0) ? 3 : 4;
+
+ inst = emit_x86_instruction(compiler, size, a, 0, b, immb);
+ FAIL_IF(!inst);
+
+ if (vex_m == 0) {
+ inst[0] = 0xc5;
+ inst[1] = U8(vex | 0x80);
+ inst[2] = U8(op);
+ return SLJIT_SUCCESS;
+ }
+
+ inst[0] = 0xc4;
+ inst[1] = U8(vex_m | 0xe0);
+ inst[2] = vex;
+ inst[3] = U8(op);
+ return SLJIT_SUCCESS;
}
/* --------------------------------------------------------------------- */
/* Enter / return */
/* --------------------------------------------------------------------- */
-static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_sw executable_offset)
+static sljit_u8* detect_far_jump_type(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_sw executable_offset)
{
sljit_uw type = jump->flags >> TYPE_SHIFT;
if (type == SLJIT_JUMP) {
*code_ptr++ = JMP_i32;
- jump->addr++;
- }
- else if (type >= SLJIT_FAST_CALL) {
+ } else if (type >= SLJIT_FAST_CALL) {
*code_ptr++ = CALL_i32;
- jump->addr++;
- }
- else {
+ } else {
*code_ptr++ = GROUP_0F;
*code_ptr++ = get_jump_code(type);
- jump->addr += 2;
}
- if (jump->flags & JUMP_LABEL)
- jump->flags |= PATCH_MW;
- else
+ jump->addr = (sljit_uw)code_ptr;
+
+ if (jump->flags & JUMP_ADDR)
sljit_unaligned_store_sw(code_ptr, (sljit_sw)(jump->u.target - (jump->addr + 4) - (sljit_uw)executable_offset));
+ else
+ jump->flags |= PATCH_MW;
code_ptr += 4;
return code_ptr;
@@ -578,8 +629,6 @@ static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_void(struct sljit_compiler *compiler)
{
- sljit_u8 *inst;
-
CHECK_ERROR();
CHECK(check_sljit_emit_return_void(compiler));
@@ -588,11 +637,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_void(struct sljit_compiler
FAIL_IF(emit_stack_frame_release(compiler, 0));
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- RET();
- return SLJIT_SUCCESS;
+ return emit_byte(compiler, RET_near);
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_to(struct sljit_compiler *compiler,
@@ -782,7 +827,7 @@ static sljit_s32 tail_call_with_args(struct sljit_compiler *compiler,
offset = stack_size + compiler->local_size;
- if (!(src & SLJIT_IMM) && src != SLJIT_R0) {
+ if (src != SLJIT_IMM && src != SLJIT_R0) {
if (word_arg_count >= 1) {
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_R0, 0);
r2_offset = sizeof(sljit_sw);
@@ -836,7 +881,7 @@ static sljit_s32 tail_call_with_args(struct sljit_compiler *compiler,
stack_size = args_size + SSIZE_OF(sw);
- if (word_arg_count >= 1 && !(src & SLJIT_IMM) && src != SLJIT_R0) {
+ if (word_arg_count >= 1 && src != SLJIT_IMM && src != SLJIT_R0) {
r2_offset = SSIZE_OF(sw);
stack_size += SSIZE_OF(sw);
}
@@ -865,7 +910,7 @@ static sljit_s32 tail_call_with_args(struct sljit_compiler *compiler,
EMIT_MOV(compiler, SLJIT_R2, 0, SLJIT_MEM1(SLJIT_SP), word_arg4_offset);
}
- if (!(src & SLJIT_IMM) && src != SLJIT_R0) {
+ if (src != SLJIT_IMM && src != SLJIT_R0) {
if (word_arg_count >= 1) {
SLJIT_ASSERT(r2_offset == sizeof(sljit_sw));
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_R0, 0);
@@ -952,13 +997,7 @@ static sljit_s32 emit_tail_call_end(struct sljit_compiler *compiler, sljit_s32 e
sljit_u8 *inst;
BINARY_IMM32(ADD, extra_space, SLJIT_SP, 0);
-
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- RET();
-
- return SLJIT_SUCCESS;
+ return emit_byte(compiler, RET_near);
}
static sljit_s32 tail_call_reg_arg_with_args(struct sljit_compiler *compiler, sljit_s32 arg_types)
@@ -1075,7 +1114,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compi
stack_size = type;
FAIL_IF(tail_call_with_args(compiler, &stack_size, arg_types, src, srcw));
- if (!(src & SLJIT_IMM)) {
+ if (src != SLJIT_IMM) {
src = SLJIT_R0;
srcw = 0;
}
@@ -1142,30 +1181,20 @@ static SLJIT_INLINE sljit_s32 emit_fmov_before_return(struct sljit_compiler *com
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+static sljit_s32 emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
sljit_u8 *inst;
- CHECK_ERROR();
- CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
- ADJUST_LOCAL_OFFSET(dst, dstw);
-
CHECK_EXTRA_REGS(dst, dstw, (void)0);
- if (FAST_IS_REG(dst)) {
- /* Unused dest is possible here. */
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
-
- INC_SIZE(1);
- POP_REG(reg_map[dst]);
- return SLJIT_SUCCESS;
- }
+ /* Unused dest is possible here. */
+ if (FAST_IS_REG(dst))
+ return emit_byte(compiler, U8(POP_r + reg_map[dst]));
/* Memory. */
inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
FAIL_IF(!inst);
- *inst++ = POP_rm;
+ *inst = POP_rm;
return SLJIT_SUCCESS;
}
@@ -1185,8 +1214,8 @@ static sljit_s32 emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src
else {
inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
FAIL_IF(!inst);
- *inst++ = GROUP_FF;
- *inst |= PUSH_rm;
+ inst[0] = GROUP_FF;
+ inst[1] |= PUSH_rm;
inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
FAIL_IF(!inst);
@@ -1197,10 +1226,88 @@ static sljit_s32 emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src
return SLJIT_SUCCESS;
}
+static sljit_s32 sljit_emit_get_return_address(struct sljit_compiler *compiler,
+ sljit_s32 dst, sljit_sw dstw)
+{
+ sljit_s32 options = compiler->options;
+ sljit_s32 saveds = compiler->saveds;
+ sljit_s32 scratches = compiler->scratches;
+
+ saveds = ((scratches > 9 ? (scratches - 9) : 0) + (saveds <= 3 ? saveds : 3) - SLJIT_KEPT_SAVEDS_COUNT(options)) * SSIZE_OF(sw);
+
+ /* Saving ebp. */
+ if (!(options & SLJIT_ENTER_REG_ARG))
+ saveds += SSIZE_OF(sw);
+
+ return emit_mov(compiler, dst, dstw, SLJIT_MEM1(SLJIT_SP), compiler->local_size + saveds);
+}
+
/* --------------------------------------------------------------------- */
/* Other operations */
/* --------------------------------------------------------------------- */
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_reg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_reg)
+{
+ sljit_s32 dst = dst_reg;
+ sljit_sw dstw = 0;
+ sljit_sw src2w = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_select(compiler, type, dst_reg, src1, src1w, src2_reg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+ CHECK_EXTRA_REGS(dst, dstw, (void)0);
+ CHECK_EXTRA_REGS(src1, src1w, (void)0);
+ CHECK_EXTRA_REGS(src2_reg, src2w, (void)0);
+
+ type &= ~SLJIT_32;
+
+ if (dst & SLJIT_MEM) {
+ if (src1 == SLJIT_IMM || (!(src1 & SLJIT_MEM) && (src2_reg & SLJIT_MEM))) {
+ EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
+ src1 = src2_reg;
+ src1w = src2w;
+ type ^= 0x1;
+ } else
+ EMIT_MOV(compiler, TMP_REG1, 0, src2_reg, src2w);
+
+ dst_reg = TMP_REG1;
+ } else {
+ if (dst_reg != src2_reg) {
+ if (dst_reg == src1) {
+ src1 = src2_reg;
+ src1w = src2w;
+ type ^= 0x1;
+ } else if (ADDRESSING_DEPENDS_ON(src1, dst_reg)) {
+ EMIT_MOV(compiler, dst_reg, 0, src1, src1w);
+ src1 = src2_reg;
+ src1w = src2w;
+ type ^= 0x1;
+ } else
+ EMIT_MOV(compiler, dst_reg, 0, src2_reg, src2w);
+ }
+ }
+
+ if (sljit_has_cpu_feature(SLJIT_HAS_CMOV) && (src1 != SLJIT_IMM || dst_reg != TMP_REG1)) {
+ if (SLJIT_UNLIKELY(src1 == SLJIT_IMM)) {
+ EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
+ src1 = TMP_REG1;
+ src1w = 0;
+ }
+
+ FAIL_IF(emit_groupf(compiler, U8(get_jump_code((sljit_uw)type) - 0x40), dst_reg, src1, src1w));
+ } else
+ FAIL_IF(emit_cmov_generic(compiler, type, dst_reg, src1, src1w));
+
+ if (dst & SLJIT_MEM)
+ return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
+ return SLJIT_SUCCESS;
+}
+
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 reg,
sljit_s32 mem, sljit_sw memw)
@@ -1279,6 +1386,286 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compile
return SLJIT_SUCCESS;
}
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_uw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
+ sljit_u8 *inst, *jump_inst1, *jump_inst2;
+ sljit_uw size1, size2;
+
+ /* Binary representation of 0x80000000. */
+ static const sljit_f64 f64_high_bit = (sljit_f64)0x80000000ul;
+
+ CHECK_EXTRA_REGS(src, srcw, (void)0);
+
+ if (!(op & SLJIT_32)) {
+ EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
+
+ inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 1, TMP_REG1, 0);
+ FAIL_IF(!inst);
+ inst[1] |= ROL;
+
+ inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 1, TMP_REG1, 0);
+ FAIL_IF(!inst);
+ inst[1] |= SHR;
+
+ FAIL_IF(emit_groupf(compiler, CVTSI2SD_x_rm | EX86_PREF_F2 | EX86_SSE2_OP1, dst_r, TMP_REG1, 0));
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
+ FAIL_IF(!inst);
+ INC_SIZE(2);
+ inst[0] = U8(get_jump_code(SLJIT_NOT_CARRY) - 0x10);
+
+ size1 = compiler->size;
+ FAIL_IF(emit_groupf(compiler, ADDSD_x_xm | EX86_PREF_F2 | EX86_SSE2, dst_r, SLJIT_MEM0(), (sljit_sw)&f64_high_bit));
+
+ inst[1] = U8(compiler->size - size1);
+
+ if (dst_r == TMP_FREG)
+ return emit_sse2_store(compiler, 0, dst, dstw, TMP_FREG);
+ return SLJIT_SUCCESS;
+ }
+
+ if (!FAST_IS_REG(src)) {
+ EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
+ src = TMP_REG1;
+ }
+
+ BINARY_IMM32(CMP, 0, src, 0);
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
+ FAIL_IF(!inst);
+ INC_SIZE(2);
+ inst[0] = JL_i8;
+ jump_inst1 = inst;
+
+ size1 = compiler->size;
+
+ FAIL_IF(emit_groupf(compiler, CVTSI2SD_x_rm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP1, dst_r, src, 0));
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
+ FAIL_IF(!inst);
+ INC_SIZE(2);
+ inst[0] = JMP_i8;
+ jump_inst2 = inst;
+
+ size2 = compiler->size;
+
+ jump_inst1[1] = U8(size2 - size1);
+
+ if (src != TMP_REG1)
+ EMIT_MOV(compiler, TMP_REG1, 0, src, 0);
+
+ inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 1, TMP_REG1, 0);
+ FAIL_IF(!inst);
+ inst[1] |= SHR;
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
+ FAIL_IF(!inst);
+ INC_SIZE(2);
+ inst[0] = JNC_i8;
+ jump_inst1 = inst;
+
+ size1 = compiler->size;
+
+ BINARY_IMM32(OR, 1, TMP_REG1, 0);
+ jump_inst1[1] = U8(compiler->size - size1);
+
+ FAIL_IF(emit_groupf(compiler, CVTSI2SD_x_rm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP1, dst_r, TMP_REG1, 0));
+ FAIL_IF(emit_groupf(compiler, ADDSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, dst_r, 0));
+
+ jump_inst2[1] = U8(compiler->size - size2);
+
+ if (dst_r == TMP_FREG)
+ return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset32(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f32 value)
+{
+ sljit_u8 *inst;
+ union {
+ sljit_s32 imm;
+ sljit_f32 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset32(compiler, freg, value));
+
+ u.value = value;
+
+ if (u.imm != 0)
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, u.imm);
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
+ FAIL_IF(!inst);
+ INC_SIZE(4);
+
+ inst[0] = GROUP_66;
+ inst[1] = GROUP_0F;
+
+ if (u.imm == 0) {
+ inst[2] = PXOR_x_xm;
+ inst[3] = U8(freg_map[freg] | (freg_map[freg] << 3) | MOD_REG);
+ } else {
+ inst[2] = MOVD_x_rm;
+ inst[3] = U8(reg_map[TMP_REG1] | (freg_map[freg] << 3) | MOD_REG);
+ }
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f64 value)
+{
+ sljit_u8 *inst;
+ union {
+ sljit_s32 imm[2];
+ sljit_f64 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset64(compiler, freg, value));
+
+ u.value = value;
+
+ if (u.imm[0] == 0) {
+ if (u.imm[1] == 0)
+ return emit_groupf(compiler, PXOR_x_xm | EX86_PREF_66 | EX86_SSE2, freg, freg, 0);
+
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, u.imm[1]);
+ } else {
+ SLJIT_ASSERT(cpu_feature_list != 0);
+
+ if (!(cpu_feature_list & CPU_FEATURE_SSE41) && u.imm[1] != 0 && u.imm[0] != u.imm[1]) {
+ EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_IMM, u.imm[0]);
+ EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_sw), SLJIT_IMM, u.imm[1]);
+
+ return emit_groupf(compiler, MOVLPD_x_m | EX86_SSE2, freg, SLJIT_MEM1(SLJIT_SP), 0);
+ }
+
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, u.imm[0]);
+ }
+
+ FAIL_IF(emit_groupf(compiler, MOVD_x_rm | EX86_PREF_66 | EX86_SSE2_OP1, freg, TMP_REG1, 0));
+
+ if (u.imm[1] == 0)
+ return SLJIT_SUCCESS;
+
+ if (u.imm[0] == 0) {
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
+ FAIL_IF(!inst);
+ INC_SIZE(4);
+
+ inst[0] = GROUP_0F;
+ inst[1] = SHUFPS_x_xm;
+ inst[2] = U8(MOD_REG | (freg_map[freg] << 3) | freg_map[freg]);
+ inst[3] = 0x51;
+ return SLJIT_SUCCESS;
+ }
+
+ if (u.imm[0] != u.imm[1]) {
+ SLJIT_ASSERT(cpu_feature_list & CPU_FEATURE_SSE41);
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, u.imm[1]);
+
+ FAIL_IF(emit_groupf_ext(compiler, PINSRD_x_rm_i8 | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2_OP1, freg, TMP_REG1, 0));
+ return emit_byte(compiler, 1);
+ }
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
+ FAIL_IF(!inst);
+ INC_SIZE(3);
+
+ inst[0] = GROUP_0F;
+ inst[1] = UNPCKLPS_x_xm;
+ inst[2] = U8(MOD_REG | (freg_map[freg] << 3) | freg_map[freg]);
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 freg, sljit_s32 reg)
+{
+ sljit_u8 *inst;
+ sljit_s32 reg2;
+ sljit_sw regw, reg2w;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
+
+ regw = 0;
+ reg2 = 0;
+ reg2w = 0;
+
+ SLJIT_ASSERT(cpu_feature_list != 0);
+
+ if (!(op & SLJIT_32) && (cpu_feature_list & CPU_FEATURE_SSE41)) {
+ if (reg & REG_PAIR_MASK) {
+ reg2 = REG_PAIR_FIRST(reg);
+ reg = REG_PAIR_SECOND(reg);
+
+ CHECK_EXTRA_REGS(reg, regw, (void)0);
+
+ FAIL_IF(emit_groupf(compiler, (GET_OPCODE(op) == SLJIT_COPY_TO_F64 ? MOVD_x_rm : MOVD_rm_x)
+ | EX86_PREF_66 | EX86_SSE2_OP1, freg, reg, regw));
+ } else
+ reg2 = reg;
+
+ CHECK_EXTRA_REGS(reg2, reg2w, (void)0);
+
+ FAIL_IF(emit_groupf_ext(compiler, (GET_OPCODE(op) == SLJIT_COPY_TO_F64 ? PINSRD_x_rm_i8 : PEXTRD_rm_x_i8)
+ | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2_OP1, freg, reg2, reg2w));
+ return emit_byte(compiler, 1);
+ }
+
+ if (reg & REG_PAIR_MASK) {
+ reg2 = REG_PAIR_SECOND(reg);
+ reg = REG_PAIR_FIRST(reg);
+
+ if (reg == reg2)
+ reg = 0;
+
+ CHECK_EXTRA_REGS(reg2, reg2w, (void)0);
+ }
+
+ CHECK_EXTRA_REGS(reg, regw, (void)0);
+
+ if (op & SLJIT_32)
+ return emit_groupf(compiler, (GET_OPCODE(op) == SLJIT_COPY_TO_F64 ? MOVD_x_rm : MOVD_rm_x)
+ | EX86_PREF_66 | EX86_SSE2_OP1, freg, reg, regw);
+
+ if (op == SLJIT_COPY_FROM_F64) {
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 5);
+ FAIL_IF(!inst);
+ INC_SIZE(5);
+
+ inst[0] = GROUP_66;
+ inst[1] = GROUP_0F;
+ inst[2] = PSHUFD_x_xm;
+ inst[3] = U8(MOD_REG | (TMP_FREG << 3) | freg_map[freg]);
+ inst[4] = 1;
+ } else if (reg != 0)
+ FAIL_IF(emit_groupf(compiler, MOVD_x_rm | EX86_PREF_66 | EX86_SSE2_OP1, TMP_FREG, reg, regw));
+
+ if (reg2 != 0)
+ FAIL_IF(emit_groupf(compiler, (GET_OPCODE(op) == SLJIT_COPY_TO_F64 ? MOVD_x_rm : MOVD_rm_x)
+ | EX86_PREF_66 | EX86_SSE2_OP1, freg, reg2, reg2w));
+
+ if (GET_OPCODE(op) == SLJIT_COPY_TO_F64) {
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
+ FAIL_IF(!inst);
+ INC_SIZE(3);
+
+ inst[0] = GROUP_0F;
+ inst[1] = UNPCKLPS_x_xm;
+ inst[2] = U8(MOD_REG | (freg_map[freg] << 3) | freg_map[reg == 0 ? freg : TMP_FREG]);
+ } else
+ FAIL_IF(emit_groupf(compiler, MOVD_rm_x | EX86_PREF_66 | EX86_SSE2_OP1, TMP_FREG, reg, regw));
+
+ return SLJIT_SUCCESS;
+}
+
static sljit_s32 skip_frames_before_return(struct sljit_compiler *compiler)
{
sljit_sw size;
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeX86_64.c b/src/3rdparty/pcre2/src/sljit/sljitNativeX86_64.c
index 4e938ffcf3..1ab79293c7 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeX86_64.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativeX86_64.c
@@ -37,9 +37,9 @@ static sljit_s32 emit_load_imm64(struct sljit_compiler *compiler, sljit_s32 reg,
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + sizeof(sljit_sw));
FAIL_IF(!inst);
INC_SIZE(2 + sizeof(sljit_sw));
- *inst++ = REX_W | ((reg_map[reg] <= 7) ? 0 : REX_B);
- *inst++ = U8(MOV_r_i32 | (reg_map[reg] & 0x7));
- sljit_unaligned_store_sw(inst, imm);
+ inst[0] = REX_W | ((reg_map[reg] <= 7) ? 0 : REX_B);
+ inst[1] = U8(MOV_r_i32 | reg_lmap[reg]);
+ sljit_unaligned_store_sw(inst + 2, imm);
return SLJIT_SUCCESS;
}
@@ -72,7 +72,7 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
sljit_uw inst_size;
/* The immediate operand must be 32 bit. */
- SLJIT_ASSERT(!(a & SLJIT_IMM) || compiler->mode32 || IS_HALFWORD(imma));
+ SLJIT_ASSERT(a != SLJIT_IMM || compiler->mode32 || IS_HALFWORD(imma));
/* Both cannot be switched on. */
SLJIT_ASSERT((flags & (EX86_BIN_INS | EX86_SHIFT_INS)) != (EX86_BIN_INS | EX86_SHIFT_INS));
/* Size flags not allowed for typed instructions. */
@@ -80,26 +80,24 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
/* Both size flags cannot be switched on. */
SLJIT_ASSERT((flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) != (EX86_BYTE_ARG | EX86_HALF_ARG));
/* SSE2 and immediate is not possible. */
- SLJIT_ASSERT(!(a & SLJIT_IMM) || !(flags & EX86_SSE2));
- SLJIT_ASSERT((flags & (EX86_PREF_F2 | EX86_PREF_F3)) != (EX86_PREF_F2 | EX86_PREF_F3)
- && (flags & (EX86_PREF_F2 | EX86_PREF_66)) != (EX86_PREF_F2 | EX86_PREF_66)
- && (flags & (EX86_PREF_F3 | EX86_PREF_66)) != (EX86_PREF_F3 | EX86_PREF_66));
+ SLJIT_ASSERT(a != SLJIT_IMM || !(flags & EX86_SSE2));
+ SLJIT_ASSERT(((flags & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66))
+ & ((flags & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66)) - 1)) == 0);
+ SLJIT_ASSERT((flags & (EX86_VEX_EXT | EX86_REX)) != EX86_VEX_EXT);
size &= 0xf;
- inst_size = size;
+ /* The mod r/m byte is always present. */
+ inst_size = size + 1;
if (!compiler->mode32 && !(flags & EX86_NO_REXW))
rex |= REX_W;
else if (flags & EX86_REX)
rex |= REX;
- if (flags & (EX86_PREF_F2 | EX86_PREF_F3))
- inst_size++;
- if (flags & EX86_PREF_66)
+ if (flags & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66))
inst_size++;
/* Calculate size of b. */
- inst_size += 1; /* mod r/m byte. */
if (b & SLJIT_MEM) {
if (!(b & OFFS_REG_MASK) && NOT_HALFWORD(immb)) {
PTR_FAIL_IF(emit_load_imm64(compiler, TMP_REG2, immb));
@@ -119,8 +117,7 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
inst_size += sizeof(sljit_s8);
else
inst_size += sizeof(sljit_s32);
- }
- else if (reg_lmap[b & REG_MASK] == 5) {
+ } else if (reg_lmap[b & REG_MASK] == 5) {
/* Swap registers if possible. */
if ((b & OFFS_REG_MASK) && (immb & 0x3) == 0 && reg_lmap[OFFS_REG(b)] != 5)
b = SLJIT_MEM | OFFS_REG(b) | TO_OFFS_REG(b & REG_MASK);
@@ -140,23 +137,26 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
rex |= REX_X;
}
}
- }
- else if (!(flags & EX86_SSE2_OP2)) {
+ } else if (!(flags & EX86_SSE2_OP2)) {
if (reg_map[b] >= 8)
rex |= REX_B;
- }
- else if (freg_map[b] >= 8)
+ } else if (freg_map[b] >= 8)
rex |= REX_B;
- if (a & SLJIT_IMM) {
+ if ((flags & EX86_VEX_EXT) && (rex & 0x3)) {
+ SLJIT_ASSERT(size == 2);
+ size++;
+ inst_size++;
+ }
+
+ if (a == SLJIT_IMM) {
if (flags & EX86_BIN_INS) {
if (imma <= 127 && imma >= -128) {
inst_size += 1;
flags |= EX86_BYTE_ARG;
} else
inst_size += 4;
- }
- else if (flags & EX86_SHIFT_INS) {
+ } else if (flags & EX86_SHIFT_INS) {
SLJIT_ASSERT(imma <= (compiler->mode32 ? 0x1f : 0x3f));
if (imma != 1) {
inst_size++;
@@ -168,8 +168,7 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
inst_size += sizeof(short);
else
inst_size += sizeof(sljit_s32);
- }
- else {
+ } else {
SLJIT_ASSERT(!(flags & EX86_SHIFT_INS) || a == SLJIT_PREF_SHIFT_REG);
/* reg_map[SLJIT_PREF_SHIFT_REG] is less than 8. */
if (!(flags & EX86_SSE2_OP1)) {
@@ -186,32 +185,34 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
inst = (sljit_u8*)ensure_buf(compiler, 1 + inst_size);
PTR_FAIL_IF(!inst);
- /* Encoding the byte. */
+ /* Encoding prefixes. */
INC_SIZE(inst_size);
if (flags & EX86_PREF_F2)
*inst++ = 0xf2;
- if (flags & EX86_PREF_F3)
+ else if (flags & EX86_PREF_F3)
*inst++ = 0xf3;
- if (flags & EX86_PREF_66)
+ else if (flags & EX86_PREF_66)
*inst++ = 0x66;
+
+ /* Rex is always the last prefix. */
if (rex)
*inst++ = rex;
+
buf_ptr = inst + size;
/* Encode mod/rm byte. */
if (!(flags & EX86_SHIFT_INS)) {
- if ((flags & EX86_BIN_INS) && (a & SLJIT_IMM))
+ if ((flags & EX86_BIN_INS) && a == SLJIT_IMM)
*inst = (flags & EX86_BYTE_ARG) ? GROUP_BINARY_83 : GROUP_BINARY_81;
- if (a & SLJIT_IMM)
+ if (a == SLJIT_IMM)
*buf_ptr = 0;
else if (!(flags & EX86_SSE2_OP1))
*buf_ptr = U8(reg_lmap[a] << 3);
else
*buf_ptr = U8(freg_lmap[a] << 3);
- }
- else {
- if (a & SLJIT_IMM) {
+ } else {
+ if (a == SLJIT_IMM) {
if (imma == 1)
*inst = GROUP_SHIFT_1;
else
@@ -238,8 +239,9 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
if (!(b & OFFS_REG_MASK))
*buf_ptr++ |= reg_lmap_b;
else {
- *buf_ptr++ |= 0x04;
- *buf_ptr++ = U8(reg_lmap_b | (reg_lmap[OFFS_REG(b)] << 3));
+ buf_ptr[0] |= 0x04;
+ buf_ptr[1] = U8(reg_lmap_b | (reg_lmap[OFFS_REG(b)] << 3));
+ buf_ptr += 2;
}
if (immb != 0 || reg_lmap_b == 5) {
@@ -250,26 +252,26 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
buf_ptr += sizeof(sljit_s32);
}
}
- }
- else {
+ } else {
if (reg_lmap_b == 5)
*buf_ptr |= 0x40;
- *buf_ptr++ |= 0x04;
- *buf_ptr++ = U8(reg_lmap_b | (reg_lmap[OFFS_REG(b)] << 3) | (immb << 6));
+ buf_ptr[0] |= 0x04;
+ buf_ptr[1] = U8(reg_lmap_b | (reg_lmap[OFFS_REG(b)] << 3) | (immb << 6));
+ buf_ptr += 2;
if (reg_lmap_b == 5)
*buf_ptr++ = 0;
}
- }
- else {
- *buf_ptr++ |= 0x04;
- *buf_ptr++ = 0x25;
+ } else {
+ buf_ptr[0] |= 0x04;
+ buf_ptr[1] = 0x25;
+ buf_ptr += 2;
sljit_unaligned_store_s32(buf_ptr, (sljit_s32)immb); /* 32 bit displacement. */
buf_ptr += sizeof(sljit_s32);
}
- if (a & SLJIT_IMM) {
+ if (a == SLJIT_IMM) {
if (flags & EX86_BYTE_ARG)
*buf_ptr = U8(imma);
else if (flags & EX86_HALF_ARG)
@@ -278,33 +280,106 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw
sljit_unaligned_store_s32(buf_ptr, (sljit_s32)imma);
}
- return !(flags & EX86_SHIFT_INS) ? inst : (inst + 1);
+ return inst;
+}
+
+static sljit_s32 emit_vex_instruction(struct sljit_compiler *compiler, sljit_uw op,
+ /* The first and second register operand. */
+ sljit_s32 a, sljit_s32 v,
+ /* The general operand (not immediate). */
+ sljit_s32 b, sljit_sw immb)
+{
+ sljit_u8 *inst;
+ sljit_u8 vex = 0;
+ sljit_u8 vex_m = 0;
+ sljit_uw size;
+
+ SLJIT_ASSERT(((op & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66))
+ & ((op & (EX86_PREF_F2 | EX86_PREF_F3 | EX86_PREF_66)) - 1)) == 0);
+
+ op |= EX86_REX;
+
+ if (op & VEX_OP_0F38)
+ vex_m = 0x2;
+ else if (op & VEX_OP_0F3A)
+ vex_m = 0x3;
+
+ if ((op & VEX_W) || ((op & VEX_AUTO_W) && !compiler->mode32)) {
+ if (vex_m == 0)
+ vex_m = 0x1;
+
+ vex |= 0x80;
+ }
+
+ if (op & EX86_PREF_66)
+ vex |= 0x1;
+ else if (op & EX86_PREF_F2)
+ vex |= 0x3;
+ else if (op & EX86_PREF_F3)
+ vex |= 0x2;
+
+ op &= ~(EX86_PREF_66 | EX86_PREF_F2 | EX86_PREF_F3);
+
+ if (op & VEX_256)
+ vex |= 0x4;
+
+ vex = U8(vex | ((((op & VEX_SSE2_OPV) ? freg_map[v] : reg_map[v]) ^ 0xf) << 3));
+
+ size = op & ~(sljit_uw)0xff;
+ size |= (vex_m == 0) ? (EX86_VEX_EXT | 2) : 3;
+
+ inst = emit_x86_instruction(compiler, size, a, 0, b, immb);
+ FAIL_IF(!inst);
+
+ SLJIT_ASSERT((inst[-1] & 0xf0) == REX);
+
+ /* If X or B is present in REX prefix. */
+ if (vex_m == 0 && inst[-1] & 0x3)
+ vex_m = 0x1;
+
+ if (vex_m == 0) {
+ vex |= U8(((inst[-1] >> 2) ^ 0x1) << 7);
+
+ inst[-1] = 0xc5;
+ inst[0] = vex;
+ inst[1] = U8(op);
+ return SLJIT_SUCCESS;
+ }
+
+ vex_m |= U8((inst[-1] ^ 0x7) << 5);
+ inst[-1] = 0xc4;
+ inst[0] = vex_m;
+ inst[1] = vex;
+ inst[2] = U8(op);
+ return SLJIT_SUCCESS;
}
/* --------------------------------------------------------------------- */
/* Enter / return */
/* --------------------------------------------------------------------- */
-static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr)
+static sljit_u8* detect_far_jump_type(struct sljit_jump *jump, sljit_u8 *code_ptr)
{
sljit_uw type = jump->flags >> TYPE_SHIFT;
- int short_addr = !(jump->flags & SLJIT_REWRITABLE_JUMP) && !(jump->flags & JUMP_LABEL) && (jump->u.target <= 0xffffffff);
+ int short_addr = !(jump->flags & SLJIT_REWRITABLE_JUMP) && (jump->flags & JUMP_ADDR) && (jump->u.target <= 0xffffffff);
/* The relative jump below specialized for this case. */
- SLJIT_ASSERT(reg_map[TMP_REG2] >= 8);
+ SLJIT_ASSERT(reg_map[TMP_REG2] >= 8 && TMP_REG2 != SLJIT_TMP_DEST_REG);
if (type < SLJIT_JUMP) {
/* Invert type. */
- *code_ptr++ = U8(get_jump_code(type ^ 0x1) - 0x10);
- *code_ptr++ = short_addr ? (6 + 3) : (10 + 3);
+ code_ptr[0] = U8(get_jump_code(type ^ 0x1) - 0x10);
+ code_ptr[1] = short_addr ? (6 + 3) : (10 + 3);
+ code_ptr += 2;
}
- *code_ptr++ = short_addr ? REX_B : (REX_W | REX_B);
- *code_ptr++ = MOV_r_i32 | reg_lmap[TMP_REG2];
+ code_ptr[0] = short_addr ? REX_B : (REX_W | REX_B);
+ code_ptr[1] = MOV_r_i32 | reg_lmap[TMP_REG2];
+ code_ptr += 2;
jump->addr = (sljit_uw)code_ptr;
- if (jump->flags & JUMP_LABEL)
+ if (!(jump->flags & JUMP_ADDR))
jump->flags |= PATCH_MD;
else if (short_addr)
sljit_unaligned_store_s32(code_ptr, (sljit_s32)jump->u.target);
@@ -313,63 +388,71 @@ static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_
code_ptr += short_addr ? sizeof(sljit_s32) : sizeof(sljit_sw);
- *code_ptr++ = REX_B;
- *code_ptr++ = GROUP_FF;
- *code_ptr++ = U8(MOD_REG | (type >= SLJIT_FAST_CALL ? CALL_rm : JMP_rm) | reg_lmap[TMP_REG2]);
+ code_ptr[0] = REX_B;
+ code_ptr[1] = GROUP_FF;
+ code_ptr[2] = U8(MOD_REG | (type >= SLJIT_FAST_CALL ? CALL_rm : JMP_rm) | reg_lmap[TMP_REG2]);
- return code_ptr;
+ return code_ptr + 3;
}
-static sljit_u8* generate_put_label_code(struct sljit_put_label *put_label, sljit_u8 *code_ptr, sljit_uw max_label)
+static sljit_u8* generate_mov_addr_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_sw executable_offset)
{
- if (max_label > HALFWORD_MAX) {
- put_label->addr -= put_label->flags;
- put_label->flags = PATCH_MD;
- return code_ptr;
- }
+ sljit_uw addr;
+ sljit_sw diff;
+ SLJIT_UNUSED_ARG(executable_offset);
+
+ SLJIT_ASSERT(((jump->flags >> JUMP_SIZE_SHIFT) & 0x1f) <= 10);
+ if (jump->flags & JUMP_ADDR)
+ addr = jump->u.target;
+ else
+ addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code, executable_offset) + jump->u.label->size;
- if (put_label->flags == 0) {
- /* Destination is register. */
- code_ptr = (sljit_u8*)put_label->addr - 2 - sizeof(sljit_uw);
+ if (addr > 0xffffffffl) {
+ diff = (sljit_sw)addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
- SLJIT_ASSERT((code_ptr[0] & 0xf8) == REX_W);
- SLJIT_ASSERT((code_ptr[1] & 0xf8) == MOV_r_i32);
+ if (diff <= HALFWORD_MAX && diff >= HALFWORD_MIN) {
+ SLJIT_ASSERT(((jump->flags >> JUMP_SIZE_SHIFT) & 0x1f) >= 7);
+ code_ptr -= SSIZE_OF(s32) - 1;
- if ((code_ptr[0] & 0x07) != 0) {
- code_ptr[0] = U8(code_ptr[0] & ~0x08);
- code_ptr += 2 + sizeof(sljit_s32);
- }
- else {
- code_ptr[0] = code_ptr[1];
- code_ptr += 1 + sizeof(sljit_s32);
+ SLJIT_ASSERT((code_ptr[-3 - SSIZE_OF(s32)] & 0xf8) == REX_W);
+ SLJIT_ASSERT((code_ptr[-2 - SSIZE_OF(s32)] & 0xf8) == MOV_r_i32);
+
+ code_ptr[-3 - SSIZE_OF(s32)] = U8(REX_W | ((code_ptr[-3 - SSIZE_OF(s32)] & 0x1) << 2));
+ code_ptr[-1 - SSIZE_OF(s32)] = U8(((code_ptr[-2 - SSIZE_OF(s32)] & 0x7) << 3) | 0x5);
+ code_ptr[-2 - SSIZE_OF(s32)] = LEA_r_m;
+
+ jump->flags |= PATCH_MW;
+ return code_ptr;
}
- put_label->addr = (sljit_uw)code_ptr;
+ jump->flags |= PATCH_MD;
return code_ptr;
}
- code_ptr -= put_label->flags + (2 + sizeof(sljit_uw));
- SLJIT_MEMMOVE(code_ptr, code_ptr + (2 + sizeof(sljit_uw)), put_label->flags);
+ code_ptr -= 2 + sizeof(sljit_uw);
SLJIT_ASSERT((code_ptr[0] & 0xf8) == REX_W);
-
- if ((code_ptr[1] & 0xf8) == MOV_r_i32) {
- code_ptr += 2 + sizeof(sljit_uw);
- SLJIT_ASSERT((code_ptr[0] & 0xf8) == REX_W);
+ SLJIT_ASSERT((code_ptr[1] & 0xf8) == MOV_r_i32);
+
+ if ((code_ptr[0] & 0x07) != 0) {
+ SLJIT_ASSERT(((jump->flags >> JUMP_SIZE_SHIFT) & 0x1f) >= 6);
+ code_ptr[0] = U8(code_ptr[0] & ~0x08);
+ code_ptr += 2 + sizeof(sljit_s32);
+ } else {
+ SLJIT_ASSERT(((jump->flags >> JUMP_SIZE_SHIFT) & 0x1f) >= 5);
+ code_ptr[0] = code_ptr[1];
+ code_ptr += 1 + sizeof(sljit_s32);
}
- SLJIT_ASSERT(code_ptr[1] == MOV_rm_r);
-
- code_ptr[0] = U8(code_ptr[0] & ~0x4);
- code_ptr[1] = MOV_rm_i32;
- code_ptr[2] = U8(code_ptr[2] & ~(0x7 << 3));
-
- code_ptr = (sljit_u8*)(put_label->addr - (2 + sizeof(sljit_uw)) + sizeof(sljit_s32));
- put_label->addr = (sljit_uw)code_ptr;
- put_label->flags = 0;
return code_ptr;
}
+#ifdef _WIN64
+typedef struct {
+ sljit_sw regs[2];
+} sljit_sse2_reg;
+#endif /* _WIN64 */
+
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
@@ -423,7 +506,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
#ifdef _WIN64
local_size += SLJIT_LOCALS_OFFSET;
- saved_float_regs_size = GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, 16);
+ saved_float_regs_size = GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sse2_reg);
if (saved_float_regs_size > 0) {
saved_float_regs_offset = ((local_size + 0xf) & ~0xf);
@@ -532,16 +615,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
tmp = SLJIT_FS0 - fsaveds;
for (i = SLJIT_FS0; i > tmp; i--) {
- inst = emit_x86_instruction(compiler, 2 | EX86_SSE2, i, 0, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset);
- *inst++ = GROUP_0F;
- *inst = MOVAPS_xm_x;
+ FAIL_IF(emit_groupf(compiler, MOVAPS_xm_x | EX86_SSE2, i, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset));
saved_float_regs_offset += 16;
}
for (i = fscratches; i >= SLJIT_FIRST_SAVED_FLOAT_REG; i--) {
- inst = emit_x86_instruction(compiler, 2 | EX86_SSE2, i, 0, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset);
- *inst++ = GROUP_0F;
- *inst = MOVAPS_xm_x;
+ FAIL_IF(emit_groupf(compiler, MOVAPS_xm_x | EX86_SSE2, i, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset));
saved_float_regs_offset += 16;
}
}
@@ -565,7 +644,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *comp
#ifdef _WIN64
local_size += SLJIT_LOCALS_OFFSET;
- saved_float_regs_size = GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, 16);
+ saved_float_regs_size = GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sse2_reg);
if (saved_float_regs_size > 0)
local_size = ((local_size + 0xf) & ~0xf) + saved_float_regs_size;
@@ -591,7 +670,7 @@ static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit
#endif /* _WIN64 */
#ifdef _WIN64
- saved_float_regs_offset = GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, 16);
+ saved_float_regs_offset = GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sse2_reg);
if (saved_float_regs_offset > 0) {
compiler->mode32 = 1;
@@ -599,16 +678,12 @@ static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit
tmp = SLJIT_FS0 - fsaveds;
for (i = SLJIT_FS0; i > tmp; i--) {
- inst = emit_x86_instruction(compiler, 2 | EX86_SSE2, i, 0, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset);
- *inst++ = GROUP_0F;
- *inst = MOVAPS_x_xm;
+ FAIL_IF(emit_groupf(compiler, MOVAPS_x_xm | EX86_SSE2, i, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset));
saved_float_regs_offset += 16;
}
for (i = fscratches; i >= SLJIT_FIRST_SAVED_FLOAT_REG; i--) {
- inst = emit_x86_instruction(compiler, 2 | EX86_SSE2, i, 0, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset);
- *inst++ = GROUP_0F;
- *inst = MOVAPS_x_xm;
+ FAIL_IF(emit_groupf(compiler, MOVAPS_x_xm | EX86_SSE2, i, SLJIT_MEM1(SLJIT_SP), saved_float_regs_offset));
saved_float_regs_offset += 16;
}
@@ -656,20 +731,13 @@ static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_void(struct sljit_compiler *compiler)
{
- sljit_u8 *inst;
-
CHECK_ERROR();
CHECK(check_sljit_emit_return_void(compiler));
compiler->mode32 = 0;
FAIL_IF(emit_stack_frame_release(compiler, 0));
-
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- RET();
- return SLJIT_SUCCESS;
+ return emit_byte(compiler, RET_near);
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_to(struct sljit_compiler *compiler,
@@ -863,22 +931,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compi
return sljit_emit_ijump(compiler, type, src, srcw);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+static sljit_s32 emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
sljit_u8 *inst;
- CHECK_ERROR();
- CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
- ADJUST_LOCAL_OFFSET(dst, dstw);
-
if (FAST_IS_REG(dst)) {
- if (reg_map[dst] < 8) {
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- POP_REG(reg_lmap[dst]);
- return SLJIT_SUCCESS;
- }
+ if (reg_map[dst] < 8)
+ return emit_byte(compiler, U8(POP_r + reg_lmap[dst]));
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
FAIL_IF(!inst);
@@ -892,7 +951,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *
compiler->mode32 = 1;
inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
FAIL_IF(!inst);
- *inst++ = POP_rm;
+ *inst = POP_rm;
return SLJIT_SUCCESS;
}
@@ -922,8 +981,8 @@ static sljit_s32 emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src
compiler->mode32 = 1;
inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
FAIL_IF(!inst);
- *inst++ = GROUP_FF;
- *inst |= PUSH_rm;
+ inst[0] = GROUP_FF;
+ inst[1] |= PUSH_rm;
inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
FAIL_IF(!inst);
@@ -934,10 +993,60 @@ static sljit_s32 emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src
return SLJIT_SUCCESS;
}
+static sljit_s32 sljit_emit_get_return_address(struct sljit_compiler *compiler,
+ sljit_s32 dst, sljit_sw dstw)
+{
+ sljit_s32 saved_regs_size;
+
+ compiler->mode32 = 0;
+ saved_regs_size = GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds - SLJIT_KEPT_SAVEDS_COUNT(compiler->options), 0);
+ return emit_mov(compiler, dst, dstw, SLJIT_MEM1(SLJIT_SP), compiler->local_size + saved_regs_size);
+}
+
/* --------------------------------------------------------------------- */
/* Other operations */
/* --------------------------------------------------------------------- */
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_reg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_reg)
+{
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_select(compiler, type, dst_reg, src1, src1w, src2_reg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+ compiler->mode32 = type & SLJIT_32;
+ type &= ~SLJIT_32;
+
+ if (dst_reg != src2_reg) {
+ if (dst_reg == src1) {
+ src1 = src2_reg;
+ src1w = 0;
+ type ^= 0x1;
+ } else if (ADDRESSING_DEPENDS_ON(src1, dst_reg)) {
+ EMIT_MOV(compiler, dst_reg, 0, src1, src1w);
+ src1 = src2_reg;
+ src1w = 0;
+ type ^= 0x1;
+ } else
+ EMIT_MOV(compiler, dst_reg, 0, src2_reg, 0);
+ }
+
+ if (sljit_has_cpu_feature(SLJIT_HAS_CMOV)) {
+ if (SLJIT_UNLIKELY(src1 == SLJIT_IMM)) {
+ EMIT_MOV(compiler, TMP_REG2, 0, src1, src1w);
+ src1 = TMP_REG2;
+ src1w = 0;
+ }
+
+ return emit_groupf(compiler, U8(get_jump_code((sljit_uw)type) - 0x40), dst_reg, src1, src1w);
+ }
+
+ return emit_cmov_generic(compiler, type, dst_reg, src1, src1w);
+}
+
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 reg,
sljit_s32 mem, sljit_sw memw)
@@ -1027,15 +1136,15 @@ static sljit_s32 emit_mov_int(struct sljit_compiler *compiler, sljit_s32 sign,
compiler->mode32 = 0;
- if (src & SLJIT_IMM) {
+ if (src == SLJIT_IMM) {
if (FAST_IS_REG(dst)) {
- if (sign || ((sljit_uw)srcw <= 0x7fffffff)) {
- inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, (sljit_sw)(sljit_s32)srcw, dst, dstw);
- FAIL_IF(!inst);
- *inst = MOV_rm_i32;
- return SLJIT_SUCCESS;
- }
- return emit_load_imm64(compiler, dst, srcw);
+ if (!sign || ((sljit_u32)srcw <= 0x7fffffff))
+ return emit_do_imm32(compiler, reg_map[dst] <= 7 ? 0 : REX_B, U8(MOV_r_i32 | reg_lmap[dst]), srcw);
+
+ inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, (sljit_sw)(sljit_s32)srcw, dst, dstw);
+ FAIL_IF(!inst);
+ *inst = MOV_rm_i32;
+ return SLJIT_SUCCESS;
}
compiler->mode32 = 1;
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, (sljit_sw)(sljit_s32)srcw, dst, dstw);
@@ -1053,10 +1162,10 @@ static sljit_s32 emit_mov_int(struct sljit_compiler *compiler, sljit_s32 sign,
if (sign) {
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src, srcw);
FAIL_IF(!inst);
- *inst++ = MOVSXD_r_rm;
+ *inst = MOVSXD_r_rm;
} else {
compiler->mode32 = 1;
- FAIL_IF(emit_mov(compiler, dst_r, 0, src, srcw));
+ EMIT_MOV(compiler, dst_r, 0, src, srcw);
compiler->mode32 = 0;
}
}
@@ -1072,6 +1181,203 @@ static sljit_s32 emit_mov_int(struct sljit_compiler *compiler, sljit_s32 sign,
return SLJIT_SUCCESS;
}
+static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_uw(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
+ sljit_u8 *inst, *jump_inst1, *jump_inst2;
+ sljit_uw size1, size2;
+
+ compiler->mode32 = 0;
+
+ if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_U32) {
+ if (src != SLJIT_IMM) {
+ compiler->mode32 = 1;
+ EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
+ compiler->mode32 = 0;
+ } else
+ FAIL_IF(emit_do_imm32(compiler, reg_map[TMP_REG1] <= 7 ? 0 : REX_B, U8(MOV_r_i32 | reg_lmap[TMP_REG1]), srcw));
+
+ FAIL_IF(emit_groupf(compiler, CVTSI2SD_x_rm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP1, dst_r, TMP_REG1, 0));
+
+ compiler->mode32 = 1;
+
+ if (dst_r == TMP_FREG)
+ return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
+ return SLJIT_SUCCESS;
+ }
+
+ if (!FAST_IS_REG(src)) {
+ EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
+ src = TMP_REG1;
+ }
+
+ BINARY_IMM32(CMP, 0, src, 0);
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
+ FAIL_IF(!inst);
+ INC_SIZE(2);
+ inst[0] = JL_i8;
+ jump_inst1 = inst;
+
+ size1 = compiler->size;
+
+ compiler->mode32 = 0;
+ FAIL_IF(emit_groupf(compiler, CVTSI2SD_x_rm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP1, dst_r, src, 0));
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
+ FAIL_IF(!inst);
+ INC_SIZE(2);
+ inst[0] = JMP_i8;
+ jump_inst2 = inst;
+
+ size2 = compiler->size;
+
+ jump_inst1[1] = U8(size2 - size1);
+
+ if (src != TMP_REG1)
+ EMIT_MOV(compiler, TMP_REG1, 0, src, 0);
+
+ EMIT_MOV(compiler, TMP_REG2, 0, src, 0);
+
+ inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 1, TMP_REG1, 0);
+ FAIL_IF(!inst);
+ inst[1] |= SHR;
+
+ compiler->mode32 = 1;
+ BINARY_IMM32(AND, 1, TMP_REG2, 0);
+
+ compiler->mode32 = 0;
+ inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, TMP_REG2, 0);
+ FAIL_IF(!inst);
+ inst[0] = OR_r_rm;
+
+ FAIL_IF(emit_groupf(compiler, CVTSI2SD_x_rm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP1, dst_r, TMP_REG1, 0));
+ compiler->mode32 = 1;
+ FAIL_IF(emit_groupf(compiler, ADDSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, dst_r, 0));
+
+ jump_inst2[1] = U8(compiler->size - size2);
+
+ if (dst_r == TMP_FREG)
+ return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
+ return SLJIT_SUCCESS;
+}
+
+static sljit_s32 sljit_emit_fset(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_u8 rex, sljit_s32 is_zero)
+{
+ sljit_u8 *inst;
+ sljit_u32 size;
+
+ if (is_zero) {
+ rex = freg_map[freg] >= 8 ? (REX_R | REX_B) : 0;
+ } else {
+ if (freg_map[freg] >= 8)
+ rex |= REX_R;
+ if (reg_map[TMP_REG1] >= 8)
+ rex |= REX_B;
+ }
+
+ size = (rex != 0) ? 5 : 4;
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
+ FAIL_IF(!inst);
+ INC_SIZE(size);
+
+ *inst++ = GROUP_66;
+ if (rex != 0)
+ *inst++ = rex;
+ inst[0] = GROUP_0F;
+
+ if (is_zero) {
+ inst[1] = PXOR_x_xm;
+ inst[2] = U8(freg_lmap[freg] | (freg_lmap[freg] << 3) | MOD_REG);
+ } else {
+ inst[1] = MOVD_x_rm;
+ inst[2] = U8(reg_lmap[TMP_REG1] | (freg_lmap[freg] << 3) | MOD_REG);
+ }
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset32(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f32 value)
+{
+ union {
+ sljit_s32 imm;
+ sljit_f32 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset32(compiler, freg, value));
+
+ u.value = value;
+
+ if (u.imm != 0) {
+ compiler->mode32 = 1;
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, u.imm);
+ }
+
+ return sljit_emit_fset(compiler, freg, 0, u.imm == 0);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fset64(struct sljit_compiler *compiler,
+ sljit_s32 freg, sljit_f64 value)
+{
+ union {
+ sljit_sw imm;
+ sljit_f64 value;
+ } u;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fset64(compiler, freg, value));
+
+ u.value = value;
+
+ if (u.imm != 0) {
+ compiler->mode32 = 0;
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, u.imm);
+ }
+
+ return sljit_emit_fset(compiler, freg, REX_W, u.imm == 0);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fcopy(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 freg, sljit_s32 reg)
+{
+ sljit_u8 *inst;
+ sljit_u32 size;
+ sljit_u8 rex = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fcopy(compiler, op, freg, reg));
+
+ if (!(op & SLJIT_32))
+ rex = REX_W;
+
+ if (freg_map[freg] >= 8)
+ rex |= REX_R;
+
+ if (reg_map[reg] >= 8)
+ rex |= REX_B;
+
+ size = (rex != 0) ? 5 : 4;
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
+ FAIL_IF(!inst);
+ INC_SIZE(size);
+
+ *inst++ = GROUP_66;
+ if (rex != 0)
+ *inst++ = rex;
+ inst[0] = GROUP_0F;
+ inst[1] = GET_OPCODE(op) == SLJIT_COPY_TO_F64 ? MOVD_x_rm : MOVD_rm_x;
+ inst[2] = U8(reg_lmap[reg] | (freg_lmap[freg] << 3) | MOD_REG);
+
+ return SLJIT_SUCCESS;
+}
+
static sljit_s32 skip_frames_before_return(struct sljit_compiler *compiler)
{
sljit_s32 tmp, size;
diff --git a/src/3rdparty/pcre2/src/sljit/sljitNativeX86_common.c b/src/3rdparty/pcre2/src/sljit/sljitNativeX86_common.c
index 651942be80..ecb7e9be3b 100644
--- a/src/3rdparty/pcre2/src/sljit/sljitNativeX86_common.c
+++ b/src/3rdparty/pcre2/src/sljit/sljitNativeX86_common.c
@@ -61,15 +61,17 @@ SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
15 - R15
*/
-#define TMP_FREG (0)
+#define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
+#define TMP_FREG (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
-/* Last register + 1. */
-#define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
-
static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 3] = {
- 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 7, 6, 3, 4, 5
+ 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 5, 7, 6, 4, 3
+};
+
+static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2] = {
+ 0, 1, 2, 3, 4, 5, 6, 7, 0
};
#define CHECK_EXTRA_REGS(p, w, do) \
@@ -81,12 +83,10 @@ static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 3] = {
#else /* SLJIT_CONFIG_X86_32 */
-/* Last register + 1. */
-#define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
#define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
/* Note: r12 & 0x7 == 0b100, which decoded as SIB byte present
- Note: avoid to use r12 and r13 for memory addessing
+ Note: avoid to use r12 and r13 for memory addressing
therefore r12 is better to be a higher saved register. */
#ifndef _WIN64
/* Args: rdi(=7), rsi(=6), rdx(=2), rcx(=1), r8, r9. Scratches: rax(=0), r10, r11 */
@@ -95,7 +95,7 @@ static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 4] = {
};
/* low-map. reg_map & 0x7. */
static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 4] = {
- 0, 0, 6, 7, 1, 0, 3, 2, 4, 5, 5, 6, 7, 3, 4, 2, 1
+ 0, 0, 6, 7, 1, 0, 3, 2, 4, 5, 5, 6, 7, 3, 4, 2, 1
};
#else
/* Args: rcx(=1), rdx(=2), r8, r9. Scratches: rax(=0), r10, r11 */
@@ -109,12 +109,12 @@ static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 4] = {
#endif
/* Args: xmm0-xmm3 */
-static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1] = {
- 4, 0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
+static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2] = {
+ 0, 0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 4
};
/* low-map. freg_map & 0x7. */
-static const sljit_u8 freg_lmap[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1] = {
- 4, 0, 1, 2, 3, 5, 6, 7, 0, 1, 2, 3, 4, 5, 6, 7
+static const sljit_u8 freg_lmap[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2] = {
+ 0, 0, 1, 2, 3, 5, 6, 7, 0, 1, 2, 3, 4, 5, 6, 7, 4
};
#define REX_W 0x48
@@ -140,155 +140,242 @@ static const sljit_u8 freg_lmap[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1] = {
#define U8(v) ((sljit_u8)(v))
-
/* Size flags for emit_x86_instruction: */
-#define EX86_BIN_INS 0x0010
-#define EX86_SHIFT_INS 0x0020
-#define EX86_REX 0x0040
-#define EX86_NO_REXW 0x0080
-#define EX86_BYTE_ARG 0x0100
-#define EX86_HALF_ARG 0x0200
-#define EX86_PREF_66 0x0400
-#define EX86_PREF_F2 0x0800
-#define EX86_PREF_F3 0x1000
-#define EX86_SSE2_OP1 0x2000
-#define EX86_SSE2_OP2 0x4000
+#define EX86_BIN_INS ((sljit_uw)0x000010)
+#define EX86_SHIFT_INS ((sljit_uw)0x000020)
+#define EX86_BYTE_ARG ((sljit_uw)0x000040)
+#define EX86_HALF_ARG ((sljit_uw)0x000080)
+/* Size flags for both emit_x86_instruction and emit_vex_instruction: */
+#define EX86_REX ((sljit_uw)0x000100)
+#define EX86_NO_REXW ((sljit_uw)0x000200)
+#define EX86_PREF_66 ((sljit_uw)0x000400)
+#define EX86_PREF_F2 ((sljit_uw)0x000800)
+#define EX86_PREF_F3 ((sljit_uw)0x001000)
+#define EX86_SSE2_OP1 ((sljit_uw)0x002000)
+#define EX86_SSE2_OP2 ((sljit_uw)0x004000)
#define EX86_SSE2 (EX86_SSE2_OP1 | EX86_SSE2_OP2)
+#define EX86_VEX_EXT ((sljit_uw)0x008000)
+/* Op flags for emit_vex_instruction: */
+#define VEX_OP_0F38 ((sljit_uw)0x010000)
+#define VEX_OP_0F3A ((sljit_uw)0x020000)
+#define VEX_SSE2_OPV ((sljit_uw)0x040000)
+#define VEX_AUTO_W ((sljit_uw)0x080000)
+#define VEX_W ((sljit_uw)0x100000)
+#define VEX_256 ((sljit_uw)0x200000)
+
+#define EX86_SELECT_66(op) (((op) & SLJIT_32) ? 0 : EX86_PREF_66)
+#define EX86_SELECT_F2_F3(op) (((op) & SLJIT_32) ? EX86_PREF_F3 : EX86_PREF_F2)
/* --------------------------------------------------------------------- */
-/* Instrucion forms */
+/* Instruction forms */
/* --------------------------------------------------------------------- */
-#define ADD (/* BINARY */ 0 << 3)
-#define ADD_EAX_i32 0x05
-#define ADD_r_rm 0x03
-#define ADD_rm_r 0x01
-#define ADDSD_x_xm 0x58
-#define ADC (/* BINARY */ 2 << 3)
-#define ADC_EAX_i32 0x15
-#define ADC_r_rm 0x13
-#define ADC_rm_r 0x11
-#define AND (/* BINARY */ 4 << 3)
-#define AND_EAX_i32 0x25
-#define AND_r_rm 0x23
-#define AND_rm_r 0x21
-#define ANDPD_x_xm 0x54
-#define BSR_r_rm (/* GROUP_0F */ 0xbd)
-#define BSF_r_rm (/* GROUP_0F */ 0xbc)
-#define CALL_i32 0xe8
-#define CALL_rm (/* GROUP_FF */ 2 << 3)
-#define CDQ 0x99
-#define CMOVE_r_rm (/* GROUP_0F */ 0x44)
-#define CMP (/* BINARY */ 7 << 3)
-#define CMP_EAX_i32 0x3d
-#define CMP_r_rm 0x3b
-#define CMP_rm_r 0x39
-#define CVTPD2PS_x_xm 0x5a
-#define CVTSI2SD_x_rm 0x2a
-#define CVTTSD2SI_r_xm 0x2c
-#define DIV (/* GROUP_F7 */ 6 << 3)
-#define DIVSD_x_xm 0x5e
-#define FLDS 0xd9
-#define FLDL 0xdd
-#define FSTPS 0xd9
-#define FSTPD 0xdd
-#define INT3 0xcc
-#define IDIV (/* GROUP_F7 */ 7 << 3)
-#define IMUL (/* GROUP_F7 */ 5 << 3)
-#define IMUL_r_rm (/* GROUP_0F */ 0xaf)
-#define IMUL_r_rm_i8 0x6b
-#define IMUL_r_rm_i32 0x69
-#define JE_i8 0x74
-#define JNE_i8 0x75
-#define JMP_i8 0xeb
-#define JMP_i32 0xe9
-#define JMP_rm (/* GROUP_FF */ 4 << 3)
-#define LEA_r_m 0x8d
-#define LOOP_i8 0xe2
-#define LZCNT_r_rm (/* GROUP_F3 */ /* GROUP_0F */ 0xbd)
-#define MOV_r_rm 0x8b
-#define MOV_r_i32 0xb8
-#define MOV_rm_r 0x89
-#define MOV_rm_i32 0xc7
-#define MOV_rm8_i8 0xc6
-#define MOV_rm8_r8 0x88
-#define MOVAPS_x_xm 0x28
-#define MOVAPS_xm_x 0x29
-#define MOVSD_x_xm 0x10
-#define MOVSD_xm_x 0x11
-#define MOVSXD_r_rm 0x63
-#define MOVSX_r_rm8 (/* GROUP_0F */ 0xbe)
-#define MOVSX_r_rm16 (/* GROUP_0F */ 0xbf)
-#define MOVZX_r_rm8 (/* GROUP_0F */ 0xb6)
-#define MOVZX_r_rm16 (/* GROUP_0F */ 0xb7)
-#define MUL (/* GROUP_F7 */ 4 << 3)
-#define MULSD_x_xm 0x59
-#define NEG_rm (/* GROUP_F7 */ 3 << 3)
-#define NOP 0x90
-#define NOT_rm (/* GROUP_F7 */ 2 << 3)
-#define OR (/* BINARY */ 1 << 3)
-#define OR_r_rm 0x0b
-#define OR_EAX_i32 0x0d
-#define OR_rm_r 0x09
-#define OR_rm8_r8 0x08
-#define POP_r 0x58
-#define POP_rm 0x8f
-#define POPF 0x9d
-#define PREFETCH 0x18
-#define PUSH_i32 0x68
-#define PUSH_r 0x50
-#define PUSH_rm (/* GROUP_FF */ 6 << 3)
-#define PUSHF 0x9c
-#define ROL (/* SHIFT */ 0 << 3)
-#define ROR (/* SHIFT */ 1 << 3)
-#define RET_near 0xc3
-#define RET_i16 0xc2
-#define SBB (/* BINARY */ 3 << 3)
-#define SBB_EAX_i32 0x1d
-#define SBB_r_rm 0x1b
-#define SBB_rm_r 0x19
-#define SAR (/* SHIFT */ 7 << 3)
-#define SHL (/* SHIFT */ 4 << 3)
-#define SHLD (/* GROUP_0F */ 0xa5)
-#define SHRD (/* GROUP_0F */ 0xad)
-#define SHR (/* SHIFT */ 5 << 3)
-#define SUB (/* BINARY */ 5 << 3)
-#define SUB_EAX_i32 0x2d
-#define SUB_r_rm 0x2b
-#define SUB_rm_r 0x29
-#define SUBSD_x_xm 0x5c
-#define TEST_EAX_i32 0xa9
-#define TEST_rm_r 0x85
-#define TZCNT_r_rm (/* GROUP_F3 */ /* GROUP_0F */ 0xbc)
-#define UCOMISD_x_xm 0x2e
-#define UNPCKLPD_x_xm 0x14
-#define XCHG_EAX_r 0x90
-#define XCHG_r_rm 0x87
-#define XOR (/* BINARY */ 6 << 3)
-#define XOR_EAX_i32 0x35
-#define XOR_r_rm 0x33
-#define XOR_rm_r 0x31
-#define XORPD_x_xm 0x57
-
-#define GROUP_0F 0x0f
-#define GROUP_F3 0xf3
-#define GROUP_F7 0xf7
-#define GROUP_FF 0xff
-#define GROUP_BINARY_81 0x81
-#define GROUP_BINARY_83 0x83
-#define GROUP_SHIFT_1 0xd1
-#define GROUP_SHIFT_N 0xc1
-#define GROUP_SHIFT_CL 0xd3
-
-#define MOD_REG 0xc0
-#define MOD_DISP8 0x40
-
-#define INC_SIZE(s) (*inst++ = U8(s), compiler->size += (s))
-
-#define PUSH_REG(r) (*inst++ = U8(PUSH_r + (r)))
-#define POP_REG(r) (*inst++ = U8(POP_r + (r)))
-#define RET() (*inst++ = RET_near)
-#define RET_I16(n) (*inst++ = RET_i16, *inst++ = U8(n), *inst++ = 0)
+#define ADD (/* BINARY */ 0 << 3)
+#define ADD_EAX_i32 0x05
+#define ADD_r_rm 0x03
+#define ADD_rm_r 0x01
+#define ADDSD_x_xm 0x58
+#define ADC (/* BINARY */ 2 << 3)
+#define ADC_EAX_i32 0x15
+#define ADC_r_rm 0x13
+#define ADC_rm_r 0x11
+#define AND (/* BINARY */ 4 << 3)
+#define AND_EAX_i32 0x25
+#define AND_r_rm 0x23
+#define AND_rm_r 0x21
+#define ANDPD_x_xm 0x54
+#define BSR_r_rm (/* GROUP_0F */ 0xbd)
+#define BSF_r_rm (/* GROUP_0F */ 0xbc)
+#define BSWAP_r (/* GROUP_0F */ 0xc8)
+#define CALL_i32 0xe8
+#define CALL_rm (/* GROUP_FF */ 2 << 3)
+#define CDQ 0x99
+#define CMOVE_r_rm (/* GROUP_0F */ 0x44)
+#define CMP (/* BINARY */ 7 << 3)
+#define CMP_EAX_i32 0x3d
+#define CMP_r_rm 0x3b
+#define CMP_rm_r 0x39
+#define CMPS_x_xm 0xc2
+#define CMPXCHG_rm_r 0xb1
+#define CMPXCHG_rm8_r 0xb0
+#define CVTPD2PS_x_xm 0x5a
+#define CVTPS2PD_x_xm 0x5a
+#define CVTSI2SD_x_rm 0x2a
+#define CVTTSD2SI_r_xm 0x2c
+#define DIV (/* GROUP_F7 */ 6 << 3)
+#define DIVSD_x_xm 0x5e
+#define EXTRACTPS_x_xm 0x17
+#define FLDS 0xd9
+#define FLDL 0xdd
+#define FSTPS 0xd9
+#define FSTPD 0xdd
+#define INSERTPS_x_xm 0x21
+#define INT3 0xcc
+#define IDIV (/* GROUP_F7 */ 7 << 3)
+#define IMUL (/* GROUP_F7 */ 5 << 3)
+#define IMUL_r_rm (/* GROUP_0F */ 0xaf)
+#define IMUL_r_rm_i8 0x6b
+#define IMUL_r_rm_i32 0x69
+#define JL_i8 0x7c
+#define JE_i8 0x74
+#define JNC_i8 0x73
+#define JNE_i8 0x75
+#define JMP_i8 0xeb
+#define JMP_i32 0xe9
+#define JMP_rm (/* GROUP_FF */ 4 << 3)
+#define LEA_r_m 0x8d
+#define LOOP_i8 0xe2
+#define LZCNT_r_rm (/* GROUP_F3 */ /* GROUP_0F */ 0xbd)
+#define MOV_r_rm 0x8b
+#define MOV_r_i32 0xb8
+#define MOV_rm_r 0x89
+#define MOV_rm_i32 0xc7
+#define MOV_rm8_i8 0xc6
+#define MOV_rm8_r8 0x88
+#define MOVAPS_x_xm 0x28
+#define MOVAPS_xm_x 0x29
+#define MOVD_x_rm 0x6e
+#define MOVD_rm_x 0x7e
+#define MOVDDUP_x_xm 0x12
+#define MOVDQA_x_xm 0x6f
+#define MOVDQA_xm_x 0x7f
+#define MOVHLPS_x_x 0x12
+#define MOVHPD_m_x 0x17
+#define MOVHPD_x_m 0x16
+#define MOVLHPS_x_x 0x16
+#define MOVLPD_m_x 0x13
+#define MOVLPD_x_m 0x12
+#define MOVMSKPS_r_x (/* GROUP_0F */ 0x50)
+#define MOVQ_x_xm (/* GROUP_0F */ 0x7e)
+#define MOVSD_x_xm 0x10
+#define MOVSD_xm_x 0x11
+#define MOVSHDUP_x_xm 0x16
+#define MOVSXD_r_rm 0x63
+#define MOVSX_r_rm8 (/* GROUP_0F */ 0xbe)
+#define MOVSX_r_rm16 (/* GROUP_0F */ 0xbf)
+#define MOVUPS_x_xm 0x10
+#define MOVZX_r_rm8 (/* GROUP_0F */ 0xb6)
+#define MOVZX_r_rm16 (/* GROUP_0F */ 0xb7)
+#define MUL (/* GROUP_F7 */ 4 << 3)
+#define MULSD_x_xm 0x59
+#define NEG_rm (/* GROUP_F7 */ 3 << 3)
+#define NOP 0x90
+#define NOT_rm (/* GROUP_F7 */ 2 << 3)
+#define OR (/* BINARY */ 1 << 3)
+#define OR_r_rm 0x0b
+#define OR_EAX_i32 0x0d
+#define OR_rm_r 0x09
+#define OR_rm8_r8 0x08
+#define ORPD_x_xm 0x56
+#define PACKSSWB_x_xm (/* GROUP_0F */ 0x63)
+#define PAND_x_xm 0xdb
+#define PCMPEQD_x_xm 0x76
+#define PINSRB_x_rm_i8 0x20
+#define PINSRW_x_rm_i8 0xc4
+#define PINSRD_x_rm_i8 0x22
+#define PEXTRB_rm_x_i8 0x14
+#define PEXTRW_rm_x_i8 0x15
+#define PEXTRD_rm_x_i8 0x16
+#define PMOVMSKB_r_x (/* GROUP_0F */ 0xd7)
+#define PMOVSXBD_x_xm 0x21
+#define PMOVSXBQ_x_xm 0x22
+#define PMOVSXBW_x_xm 0x20
+#define PMOVSXDQ_x_xm 0x25
+#define PMOVSXWD_x_xm 0x23
+#define PMOVSXWQ_x_xm 0x24
+#define PMOVZXBD_x_xm 0x31
+#define PMOVZXBQ_x_xm 0x32
+#define PMOVZXBW_x_xm 0x30
+#define PMOVZXDQ_x_xm 0x35
+#define PMOVZXWD_x_xm 0x33
+#define PMOVZXWQ_x_xm 0x34
+#define POP_r 0x58
+#define POP_rm 0x8f
+#define POPF 0x9d
+#define POR_x_xm 0xeb
+#define PREFETCH 0x18
+#define PSHUFB_x_xm 0x00
+#define PSHUFD_x_xm 0x70
+#define PSHUFLW_x_xm 0x70
+#define PSRLDQ_x 0x73
+#define PSLLD_x_i8 0x72
+#define PSLLQ_x_i8 0x73
+#define PUSH_i32 0x68
+#define PUSH_r 0x50
+#define PUSH_rm (/* GROUP_FF */ 6 << 3)
+#define PUSHF 0x9c
+#define PXOR_x_xm 0xef
+#define ROL (/* SHIFT */ 0 << 3)
+#define ROR (/* SHIFT */ 1 << 3)
+#define RET_near 0xc3
+#define RET_i16 0xc2
+#define SBB (/* BINARY */ 3 << 3)
+#define SBB_EAX_i32 0x1d
+#define SBB_r_rm 0x1b
+#define SBB_rm_r 0x19
+#define SAR (/* SHIFT */ 7 << 3)
+#define SHL (/* SHIFT */ 4 << 3)
+#define SHLD (/* GROUP_0F */ 0xa5)
+#define SHRD (/* GROUP_0F */ 0xad)
+#define SHR (/* SHIFT */ 5 << 3)
+#define SHUFPS_x_xm 0xc6
+#define SUB (/* BINARY */ 5 << 3)
+#define SUB_EAX_i32 0x2d
+#define SUB_r_rm 0x2b
+#define SUB_rm_r 0x29
+#define SUBSD_x_xm 0x5c
+#define TEST_EAX_i32 0xa9
+#define TEST_rm_r 0x85
+#define TZCNT_r_rm (/* GROUP_F3 */ /* GROUP_0F */ 0xbc)
+#define UCOMISD_x_xm 0x2e
+#define UNPCKLPD_x_xm 0x14
+#define UNPCKLPS_x_xm 0x14
+#define VBROADCASTSD_x_xm 0x19
+#define VBROADCASTSS_x_xm 0x18
+#define VEXTRACTF128_x_ym 0x19
+#define VEXTRACTI128_x_ym 0x39
+#define VINSERTF128_y_y_xm 0x18
+#define VINSERTI128_y_y_xm 0x38
+#define VPBROADCASTB_x_xm 0x78
+#define VPBROADCASTD_x_xm 0x58
+#define VPBROADCASTQ_x_xm 0x59
+#define VPBROADCASTW_x_xm 0x79
+#define VPERMPD_y_ym 0x01
+#define VPERMQ_y_ym 0x00
+#define XCHG_EAX_r 0x90
+#define XCHG_r_rm 0x87
+#define XOR (/* BINARY */ 6 << 3)
+#define XOR_EAX_i32 0x35
+#define XOR_r_rm 0x33
+#define XOR_rm_r 0x31
+#define XORPD_x_xm 0x57
+
+#define GROUP_0F 0x0f
+#define GROUP_66 0x66
+#define GROUP_F3 0xf3
+#define GROUP_F7 0xf7
+#define GROUP_FF 0xff
+#define GROUP_BINARY_81 0x81
+#define GROUP_BINARY_83 0x83
+#define GROUP_SHIFT_1 0xd1
+#define GROUP_SHIFT_N 0xc1
+#define GROUP_SHIFT_CL 0xd3
+#define GROUP_LOCK 0xf0
+
+#define MOD_REG 0xc0
+#define MOD_DISP8 0x40
+
+#define INC_SIZE(s) (*inst++ = U8(s), compiler->size += (s))
+
+#define PUSH_REG(r) (*inst++ = U8(PUSH_r + (r)))
+#define POP_REG(r) (*inst++ = U8(POP_r + (r)))
+#define RET() (*inst++ = RET_near)
+#define RET_I16(n) (*inst++ = RET_i16, *inst++ = U8(n), *inst++ = 0)
+
+#define SLJIT_INST_LABEL 255
+#define SLJIT_INST_JUMP 254
+#define SLJIT_INST_MOV_ADDR 253
+#define SLJIT_INST_CONST 252
/* Multithreading does not affect these static variables, since they store
built-in CPU features. Therefore they can be overwritten by different threads
@@ -297,9 +384,13 @@ static const sljit_u8 freg_lmap[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1] = {
#if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
#define CPU_FEATURE_SSE2 0x002
#endif
-#define CPU_FEATURE_LZCNT 0x004
-#define CPU_FEATURE_TZCNT 0x008
-#define CPU_FEATURE_CMOV 0x010
+#define CPU_FEATURE_SSE41 0x004
+#define CPU_FEATURE_LZCNT 0x008
+#define CPU_FEATURE_TZCNT 0x010
+#define CPU_FEATURE_CMOV 0x020
+#define CPU_FEATURE_AVX 0x040
+#define CPU_FEATURE_AVX2 0x080
+#define CPU_FEATURE_OSXSAVE 0x100
static sljit_u32 cpu_feature_list = 0;
@@ -332,124 +423,156 @@ static SLJIT_INLINE void sljit_unaligned_store_sw(void *addr, sljit_sw value)
/* Utility functions */
/******************************************************/
-static void get_cpu_features(void)
+static void execute_cpu_id(sljit_u32 info[4])
{
- sljit_u32 feature_list = CPU_FEATURE_DETECTED;
- sljit_u32 value;
-
#if defined(_MSC_VER) && _MSC_VER >= 1400
- int CPUInfo[4];
+ __cpuidex((int*)info, (int)info[0], (int)info[2]);
- __cpuid(CPUInfo, 0);
- if (CPUInfo[0] >= 7) {
- __cpuidex(CPUInfo, 7, 0);
- if (CPUInfo[1] & 0x8)
- feature_list |= CPU_FEATURE_TZCNT;
- }
-
- __cpuid(CPUInfo, (int)0x80000001);
- if (CPUInfo[2] & 0x20)
- feature_list |= CPU_FEATURE_LZCNT;
-
- __cpuid(CPUInfo, 1);
- value = (sljit_u32)CPUInfo[3];
-
-#elif defined(__GNUC__) || defined(__INTEL_COMPILER) || defined(__SUNPRO_C)
+#elif defined(__GNUC__) || defined(__INTEL_COMPILER) || defined(__SUNPRO_C) || defined(__TINYC__)
/* AT&T syntax. */
__asm__ (
- "movl $0x0, %%eax\n"
- "lzcnt %%eax, %%eax\n"
- "setnz %%al\n"
- "movl %%eax, %0\n"
- : "=g" (value)
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
+ "movl %0, %%esi\n"
+ "movl (%%esi), %%eax\n"
+ "movl 8(%%esi), %%ecx\n"
+ "pushl %%ebx\n"
+ "cpuid\n"
+ "movl %%eax, (%%esi)\n"
+ "movl %%ebx, 4(%%esi)\n"
+ "popl %%ebx\n"
+ "movl %%ecx, 8(%%esi)\n"
+ "movl %%edx, 12(%%esi)\n"
+#else /* !SLJIT_CONFIG_X86_32 */
+ "movq %0, %%rsi\n"
+ "movl (%%rsi), %%eax\n"
+ "movl 8(%%rsi), %%ecx\n"
+ "cpuid\n"
+ "movl %%eax, (%%rsi)\n"
+ "movl %%ebx, 4(%%rsi)\n"
+ "movl %%ecx, 8(%%rsi)\n"
+ "movl %%edx, 12(%%rsi)\n"
+#endif /* SLJIT_CONFIG_X86_32 */
:
+ : "r" (info)
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- : "eax"
-#else
- : "rax"
-#endif
+ : "memory", "eax", "ecx", "edx", "esi"
+#else /* !SLJIT_CONFIG_X86_32 */
+ : "memory", "rax", "rbx", "rcx", "rdx", "rsi"
+#endif /* SLJIT_CONFIG_X86_32 */
);
- if (value & 0x1)
- feature_list |= CPU_FEATURE_LZCNT;
+#else /* _MSC_VER < 1400 */
- __asm__ (
- "movl $0x0, %%eax\n"
- "tzcnt %%eax, %%eax\n"
- "setnz %%al\n"
- "movl %%eax, %0\n"
- : "=g" (value)
- :
+ /* Intel syntax. */
+ __asm {
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- : "eax"
-#else
- : "rax"
-#endif
- );
+ mov esi, info
+ mov eax, [esi]
+ mov ecx, [esi + 8]
+ cpuid
+ mov [esi], eax
+ mov [esi + 4], ebx
+ mov [esi + 8], ecx
+ mov [esi + 12], edx
+#else /* !SLJIT_CONFIG_X86_32 */
+ mov rsi, info
+ mov eax, [rsi]
+ mov ecx, [rsi + 8]
+ cpuid
+ mov [rsi], eax
+ mov [rsi + 4], ebx
+ mov [rsi + 8], ecx
+ mov [rsi + 12], edx
+#endif /* SLJIT_CONFIG_X86_32 */
+ }
+
+#endif /* _MSC_VER && _MSC_VER >= 1400 */
+}
+
+static sljit_u32 execute_get_xcr0_low(void)
+{
+ sljit_u32 xcr0;
+
+#if defined(_MSC_VER) && _MSC_VER >= 1400
+
+ xcr0 = (sljit_u32)_xgetbv(0);
- if (value & 0x1)
- feature_list |= CPU_FEATURE_TZCNT;
+#elif defined(__GNUC__) || defined(__INTEL_COMPILER) || defined(__SUNPRO_C) || defined(__TINYC__)
+ /* AT&T syntax. */
__asm__ (
- "movl $0x1, %%eax\n"
-#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- /* On x86-32, there is no red zone, so this
- should work (no need for a local variable). */
- "push %%ebx\n"
-#endif
- "cpuid\n"
-#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- "pop %%ebx\n"
-#endif
- "movl %%edx, %0\n"
- : "=g" (value)
+ "xorl %%ecx, %%ecx\n"
+ "xgetbv\n"
+ : "=a" (xcr0)
:
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- : "%eax", "%ecx", "%edx"
-#else
- : "%rax", "%rbx", "%rcx", "%rdx"
-#endif
+ : "ecx", "edx"
+#else /* !SLJIT_CONFIG_X86_32 */
+ : "rcx", "rdx"
+#endif /* SLJIT_CONFIG_X86_32 */
);
-#else /* _MSC_VER && _MSC_VER >= 1400 */
+#else /* _MSC_VER < 1400 */
/* Intel syntax. */
__asm {
- mov eax, 0
- lzcnt eax, eax
- setnz al
- mov value, eax
+ mov ecx, 0
+ xgetbv
+ mov xcr0, eax
}
- if (value & 0x1)
- feature_list |= CPU_FEATURE_LZCNT;
+#endif /* _MSC_VER && _MSC_VER >= 1400 */
+ return xcr0;
+}
- __asm {
- mov eax, 0
- tzcnt eax, eax
- setnz al
- mov value, eax
- }
+static void get_cpu_features(void)
+{
+ sljit_u32 feature_list = CPU_FEATURE_DETECTED;
+ sljit_u32 info[4] = {0};
+ sljit_u32 max_id;
- if (value & 0x1)
- feature_list |= CPU_FEATURE_TZCNT;
+ execute_cpu_id(info);
+ max_id = info[0];
- __asm {
- mov eax, 1
- cpuid
- mov value, edx
+ if (max_id >= 7) {
+ info[0] = 7;
+ info[2] = 0;
+ execute_cpu_id(info);
+
+ if (info[1] & 0x8)
+ feature_list |= CPU_FEATURE_TZCNT;
+ if (info[1] & 0x20)
+ feature_list |= CPU_FEATURE_AVX2;
}
-#endif /* _MSC_VER && _MSC_VER >= 1400 */
+ if (max_id >= 1) {
+ info[0] = 1;
+ execute_cpu_id(info);
+ if (info[2] & 0x80000)
+ feature_list |= CPU_FEATURE_SSE41;
+ if (info[2] & 0x8000000)
+ feature_list |= CPU_FEATURE_OSXSAVE;
+ if (info[2] & 0x10000000)
+ feature_list |= CPU_FEATURE_AVX;
#if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
- if (value & 0x4000000)
- feature_list |= CPU_FEATURE_SSE2;
+ if (info[3] & 0x4000000)
+ feature_list |= CPU_FEATURE_SSE2;
#endif
- if (value & 0x8000)
- feature_list |= CPU_FEATURE_CMOV;
+ if (info[3] & 0x8000)
+ feature_list |= CPU_FEATURE_CMOV;
+ }
+
+ info[0] = 0x80000001;
+ execute_cpu_id(info);
+
+ if (info[2] & 0x20)
+ feature_list |= CPU_FEATURE_LZCNT;
+
+ if ((feature_list & CPU_FEATURE_OSXSAVE) && (execute_get_xcr0_low() & 0x4) == 0)
+ feature_list &= ~(sljit_u32)(CPU_FEATURE_AVX | CPU_FEATURE_AVX2);
cpu_feature_list = feature_list;
}
@@ -458,15 +581,15 @@ static sljit_u8 get_jump_code(sljit_uw type)
{
switch (type) {
case SLJIT_EQUAL:
+ case SLJIT_ATOMIC_STORED:
case SLJIT_F_EQUAL:
case SLJIT_UNORDERED_OR_EQUAL:
- case SLJIT_ORDERED_EQUAL: /* Not supported. */
return 0x84 /* je */;
case SLJIT_NOT_EQUAL:
+ case SLJIT_ATOMIC_NOT_STORED:
case SLJIT_F_NOT_EQUAL:
case SLJIT_ORDERED_NOT_EQUAL:
- case SLJIT_UNORDERED_OR_NOT_EQUAL: /* Not supported. */
return 0x85 /* jne */;
case SLJIT_LESS:
@@ -514,61 +637,58 @@ static sljit_u8 get_jump_code(sljit_uw type)
return 0x81 /* jno */;
case SLJIT_UNORDERED:
+ case SLJIT_ORDERED_EQUAL: /* NaN. */
return 0x8a /* jp */;
case SLJIT_ORDERED:
+ case SLJIT_UNORDERED_OR_NOT_EQUAL: /* Not NaN. */
return 0x8b /* jpo */;
}
return 0;
}
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
-static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_sw executable_offset);
-#else
-static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr);
-static sljit_u8* generate_put_label_code(struct sljit_put_label *put_label, sljit_u8 *code_ptr, sljit_uw max_label);
-#endif
+static sljit_u8* detect_far_jump_type(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_sw executable_offset);
+#else /* !SLJIT_CONFIG_X86_32 */
+static sljit_u8* detect_far_jump_type(struct sljit_jump *jump, sljit_u8 *code_ptr);
+static sljit_u8* generate_mov_addr_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_sw executable_offset);
+#endif /* SLJIT_CONFIG_X86_32 */
-static sljit_u8* generate_near_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_sw executable_offset)
+static sljit_u8* detect_near_jump_type(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_sw executable_offset)
{
sljit_uw type = jump->flags >> TYPE_SHIFT;
sljit_s32 short_jump;
sljit_uw label_addr;
- if (jump->flags & JUMP_LABEL)
- label_addr = (sljit_uw)(code + jump->u.label->size);
- else
+ if (jump->flags & JUMP_ADDR)
label_addr = jump->u.target - (sljit_uw)executable_offset;
+ else
+ label_addr = (sljit_uw)(code + jump->u.label->size);
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if ((sljit_sw)(label_addr - (jump->addr + 1)) > HALFWORD_MAX || (sljit_sw)(label_addr - (jump->addr + 1)) < HALFWORD_MIN)
- return generate_far_jump_code(jump, code_ptr);
-#endif
+ if ((sljit_sw)(label_addr - (sljit_uw)(code_ptr + 6)) > HALFWORD_MAX || (sljit_sw)(label_addr - (sljit_uw)(code_ptr + 5)) < HALFWORD_MIN)
+ return detect_far_jump_type(jump, code_ptr);
+#endif /* SLJIT_CONFIG_X86_64 */
- short_jump = (sljit_sw)(label_addr - (jump->addr + 2)) >= -128 && (sljit_sw)(label_addr - (jump->addr + 2)) <= 127;
+ short_jump = (sljit_sw)(label_addr - (sljit_uw)(code_ptr + 2)) >= -0x80 && (sljit_sw)(label_addr - (sljit_uw)(code_ptr + 2)) <= 0x7f;
if (type == SLJIT_JUMP) {
if (short_jump)
*code_ptr++ = JMP_i8;
else
*code_ptr++ = JMP_i32;
- jump->addr++;
- }
- else if (type >= SLJIT_FAST_CALL) {
+ } else if (type > SLJIT_JUMP) {
short_jump = 0;
*code_ptr++ = CALL_i32;
- jump->addr++;
- }
- else if (short_jump) {
+ } else if (short_jump) {
*code_ptr++ = U8(get_jump_code(type) - 0x10);
- jump->addr++;
- }
- else {
+ } else {
*code_ptr++ = GROUP_0F;
*code_ptr++ = get_jump_code(type);
- jump->addr += 2;
}
+ jump->addr = (sljit_uw)code_ptr;
+
if (short_jump) {
jump->flags |= PATCH_MB;
code_ptr += sizeof(sljit_s8);
@@ -580,7 +700,172 @@ static sljit_u8* generate_near_jump_code(struct sljit_jump *jump, sljit_u8 *code
return code_ptr;
}
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
+static void generate_jump_or_mov_addr(struct sljit_jump *jump, sljit_sw executable_offset)
+{
+ sljit_uw flags = jump->flags;
+ sljit_uw addr = (flags & JUMP_ADDR) ? jump->u.target : jump->u.label->u.addr;
+ sljit_uw jump_addr = jump->addr;
+ SLJIT_UNUSED_ARG(executable_offset);
+
+ if (SLJIT_UNLIKELY(flags & JUMP_MOV_ADDR)) {
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
+ sljit_unaligned_store_sw((void*)(jump_addr - sizeof(sljit_sw)), (sljit_sw)addr);
+#else /* SLJIT_CONFIG_X86_32 */
+ if (flags & PATCH_MD) {
+ SLJIT_ASSERT(addr > HALFWORD_MAX);
+ sljit_unaligned_store_sw((void*)(jump_addr - sizeof(sljit_sw)), (sljit_sw)addr);
+ return;
+ }
+
+ if (flags & PATCH_MW) {
+ addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET((sljit_u8*)jump_addr, executable_offset);
+ SLJIT_ASSERT((sljit_sw)addr <= HALFWORD_MAX && (sljit_sw)addr >= HALFWORD_MIN);
+ } else {
+ SLJIT_ASSERT(addr <= HALFWORD_MAX);
+ }
+ sljit_unaligned_store_s32((void*)(jump_addr - sizeof(sljit_s32)), (sljit_s32)addr);
+#endif /* !SLJIT_CONFIG_X86_32 */
+ return;
+ }
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (SLJIT_UNLIKELY(flags & PATCH_MD)) {
+ SLJIT_ASSERT(!(flags & JUMP_ADDR));
+ sljit_unaligned_store_sw((void*)jump_addr, (sljit_sw)addr);
+ return;
+ }
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET((sljit_u8*)jump_addr, executable_offset);
+
+ if (flags & PATCH_MB) {
+ addr -= sizeof(sljit_s8);
+ SLJIT_ASSERT((sljit_sw)addr <= 0x7f && (sljit_sw)addr >= -0x80);
+ *(sljit_u8*)jump_addr = U8(addr);
+ return;
+ } else if (flags & PATCH_MW) {
+ addr -= sizeof(sljit_s32);
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
+ sljit_unaligned_store_sw((void*)jump_addr, (sljit_sw)addr);
+#else /* !SLJIT_CONFIG_X86_32 */
+ SLJIT_ASSERT((sljit_sw)addr <= HALFWORD_MAX && (sljit_sw)addr >= HALFWORD_MIN);
+ sljit_unaligned_store_s32((void*)jump_addr, (sljit_s32)addr);
+#endif /* SLJIT_CONFIG_X86_32 */
+ }
+}
+
+static void reduce_code_size(struct sljit_compiler *compiler)
+{
+ struct sljit_label *label;
+ struct sljit_jump *jump;
+ sljit_uw next_label_size;
+ sljit_uw next_jump_addr;
+ sljit_uw next_min_addr;
+ sljit_uw size_reduce = 0;
+ sljit_sw diff;
+ sljit_uw type;
+#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
+ sljit_uw size_reduce_max;
+#endif /* SLJIT_DEBUG */
+
+ label = compiler->labels;
+ jump = compiler->jumps;
+
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+
+ while (1) {
+ next_min_addr = next_label_size;
+ if (next_jump_addr < next_min_addr)
+ next_min_addr = next_jump_addr;
+
+ if (next_min_addr == SLJIT_MAX_ADDRESS)
+ break;
+
+ if (next_min_addr == next_label_size) {
+ label->size -= size_reduce;
+
+ label = label->next;
+ next_label_size = SLJIT_GET_NEXT_SIZE(label);
+ }
+
+ if (next_min_addr != next_jump_addr)
+ continue;
+
+ if (!(jump->flags & JUMP_MOV_ADDR)) {
+#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
+ size_reduce_max = size_reduce + (((jump->flags >> TYPE_SHIFT) < SLJIT_JUMP) ? CJUMP_MAX_SIZE : JUMP_MAX_SIZE);
+#endif /* SLJIT_DEBUG */
+
+ if (!(jump->flags & SLJIT_REWRITABLE_JUMP)) {
+ if (jump->flags & JUMP_ADDR) {
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (jump->u.target <= 0xffffffffl)
+ size_reduce += sizeof(sljit_s32);
+#endif /* SLJIT_CONFIG_X86_64 */
+ } else {
+ /* Unit size: instruction. */
+ diff = (sljit_sw)jump->u.label->size - (sljit_sw)(jump->addr - size_reduce);
+ type = jump->flags >> TYPE_SHIFT;
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (type == SLJIT_JUMP) {
+ if (diff <= 0x7f + 2 && diff >= -0x80 + 2)
+ size_reduce += JUMP_MAX_SIZE - 2;
+ else if (diff <= HALFWORD_MAX + 5 && diff >= HALFWORD_MIN + 5)
+ size_reduce += JUMP_MAX_SIZE - 5;
+ } else if (type < SLJIT_JUMP) {
+ if (diff <= 0x7f + 2 && diff >= -0x80 + 2)
+ size_reduce += CJUMP_MAX_SIZE - 2;
+ else if (diff <= HALFWORD_MAX + 6 && diff >= HALFWORD_MIN + 6)
+ size_reduce += CJUMP_MAX_SIZE - 6;
+ } else {
+ if (diff <= HALFWORD_MAX + 5 && diff >= HALFWORD_MIN + 5)
+ size_reduce += JUMP_MAX_SIZE - 5;
+ }
+#else /* !SLJIT_CONFIG_X86_64 */
+ if (type == SLJIT_JUMP) {
+ if (diff <= 0x7f + 2 && diff >= -0x80 + 2)
+ size_reduce += JUMP_MAX_SIZE - 2;
+ } else if (type < SLJIT_JUMP) {
+ if (diff <= 0x7f + 2 && diff >= -0x80 + 2)
+ size_reduce += CJUMP_MAX_SIZE - 2;
+ }
+#endif /* SLJIT_CONFIG_X86_64 */
+ }
+ }
+
+#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
+ jump->flags |= (size_reduce_max - size_reduce) << JUMP_SIZE_SHIFT;
+#endif /* SLJIT_DEBUG */
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ } else {
+#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
+ size_reduce_max = size_reduce + 10;
+#endif /* SLJIT_DEBUG */
+
+ if (!(jump->flags & JUMP_ADDR)) {
+ diff = (sljit_sw)jump->u.label->size - (sljit_sw)(jump->addr - size_reduce - 3);
+
+ if (diff <= HALFWORD_MAX && diff >= HALFWORD_MIN)
+ size_reduce += 3;
+ } else if (jump->u.target <= 0xffffffffl)
+ size_reduce += (jump->flags & MOV_ADDR_HI) ? 4 : 5;
+
+#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
+ jump->flags |= (size_reduce_max - size_reduce) << JUMP_SIZE_SHIFT;
+#endif /* SLJIT_DEBUG */
+#endif /* SLJIT_CONFIG_X86_64 */
+ }
+
+ jump = jump->next;
+ next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);
+ }
+
+ compiler->size -= size_reduce;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler, sljit_s32 options, void *exec_allocator_data)
{
struct sljit_memory_fragment *buf;
sljit_u8 *code;
@@ -589,77 +874,82 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
sljit_u8 *buf_end;
sljit_u8 len;
sljit_sw executable_offset;
- sljit_uw jump_addr;
+#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
+ sljit_uw addr;
+#endif /* SLJIT_DEBUG */
struct sljit_label *label;
struct sljit_jump *jump;
struct sljit_const *const_;
- struct sljit_put_label *put_label;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_generate_code(compiler));
- reverse_buf(compiler);
+
+ reduce_code_size(compiler);
/* Second code generation pass. */
- code = (sljit_u8*)SLJIT_MALLOC_EXEC(compiler->size, compiler->exec_allocator_data);
+ code = (sljit_u8*)allocate_executable_memory(compiler->size, options, exec_allocator_data, &executable_offset);
PTR_FAIL_WITH_EXEC_IF(code);
+
+ reverse_buf(compiler);
buf = compiler->buf;
code_ptr = code;
label = compiler->labels;
jump = compiler->jumps;
const_ = compiler->consts;
- put_label = compiler->put_labels;
- executable_offset = SLJIT_EXEC_OFFSET(code);
do {
buf_ptr = buf->memory;
buf_end = buf_ptr + buf->used_size;
do {
len = *buf_ptr++;
- if (len > 0) {
+ SLJIT_ASSERT(len > 0);
+ if (len < SLJIT_INST_CONST) {
/* The code is already generated. */
SLJIT_MEMCPY(code_ptr, buf_ptr, len);
code_ptr += len;
buf_ptr += len;
- }
- else {
- switch (*buf_ptr) {
- case 0:
- label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
+ } else {
+ switch (len) {
+ case SLJIT_INST_LABEL:
+ label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = (sljit_uw)(code_ptr - code);
label = label->next;
break;
- case 1:
- jump->addr = (sljit_uw)code_ptr;
+ case SLJIT_INST_JUMP:
+#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
+ addr = (sljit_uw)code_ptr;
+#endif /* SLJIT_DEBUG */
if (!(jump->flags & SLJIT_REWRITABLE_JUMP))
- code_ptr = generate_near_jump_code(jump, code_ptr, code, executable_offset);
+ code_ptr = detect_near_jump_type(jump, code_ptr, code, executable_offset);
else {
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- code_ptr = generate_far_jump_code(jump, code_ptr, executable_offset);
-#else
- code_ptr = generate_far_jump_code(jump, code_ptr);
-#endif
+ code_ptr = detect_far_jump_type(jump, code_ptr, executable_offset);
+#else /* !SLJIT_CONFIG_X86_32 */
+ code_ptr = detect_far_jump_type(jump, code_ptr);
+#endif /* SLJIT_CONFIG_X86_32 */
}
+
+ SLJIT_ASSERT((sljit_uw)code_ptr - addr <= ((jump->flags >> JUMP_SIZE_SHIFT) & 0x1f));
jump = jump->next;
break;
- case 2:
- const_->addr = ((sljit_uw)code_ptr) - sizeof(sljit_sw);
- const_ = const_->next;
+ case SLJIT_INST_MOV_ADDR:
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ code_ptr = generate_mov_addr_code(jump, code_ptr, code, executable_offset);
+#endif /* SLJIT_CONFIG_X86_64 */
+ jump->addr = (sljit_uw)code_ptr;
+ jump = jump->next;
break;
default:
- SLJIT_ASSERT(*buf_ptr == 3);
- SLJIT_ASSERT(put_label->label);
- put_label->addr = (sljit_uw)code_ptr;
-#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- code_ptr = generate_put_label_code(put_label, code_ptr, (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code, executable_offset) + put_label->label->size);
-#endif
- put_label = put_label->next;
+ SLJIT_ASSERT(len == SLJIT_INST_CONST);
+ const_->addr = ((sljit_uw)code_ptr) - sizeof(sljit_sw);
+ const_ = const_->next;
break;
}
- buf_ptr++;
}
} while (buf_ptr < buf_end);
+
SLJIT_ASSERT(buf_ptr == buf_end);
buf = buf->next;
} while (buf);
@@ -667,61 +957,14 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
SLJIT_ASSERT(!label);
SLJIT_ASSERT(!jump);
SLJIT_ASSERT(!const_);
- SLJIT_ASSERT(!put_label);
SLJIT_ASSERT(code_ptr <= code + compiler->size);
jump = compiler->jumps;
while (jump) {
- if (jump->flags & (PATCH_MB | PATCH_MW)) {
- if (jump->flags & JUMP_LABEL)
- jump_addr = jump->u.label->addr;
- else
- jump_addr = jump->u.target;
-
- jump_addr -= jump->addr + (sljit_uw)executable_offset;
-
- if (jump->flags & PATCH_MB) {
- jump_addr -= sizeof(sljit_s8);
- SLJIT_ASSERT((sljit_sw)jump_addr >= -128 && (sljit_sw)jump_addr <= 127);
- *(sljit_u8*)jump->addr = U8(jump_addr);
- } else {
- jump_addr -= sizeof(sljit_s32);
-#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- sljit_unaligned_store_sw((void*)jump->addr, (sljit_sw)jump_addr);
-#else
- SLJIT_ASSERT((sljit_sw)jump_addr >= HALFWORD_MIN && (sljit_sw)jump_addr <= HALFWORD_MAX);
- sljit_unaligned_store_s32((void*)jump->addr, (sljit_s32)jump_addr);
-#endif
- }
- }
-#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- else if (jump->flags & PATCH_MD) {
- SLJIT_ASSERT(jump->flags & JUMP_LABEL);
- sljit_unaligned_store_sw((void*)jump->addr, (sljit_sw)jump->u.label->addr);
- }
-#endif
-
+ generate_jump_or_mov_addr(jump, executable_offset);
jump = jump->next;
}
- put_label = compiler->put_labels;
- while (put_label) {
-#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- sljit_unaligned_store_sw((void*)(put_label->addr - sizeof(sljit_sw)), (sljit_sw)put_label->label->addr);
-#else
- if (put_label->flags & PATCH_MD) {
- SLJIT_ASSERT(put_label->label->addr > HALFWORD_MAX);
- sljit_unaligned_store_sw((void*)(put_label->addr - sizeof(sljit_sw)), (sljit_sw)put_label->label->addr);
- }
- else {
- SLJIT_ASSERT(put_label->label->addr <= HALFWORD_MAX);
- sljit_unaligned_store_s32((void*)(put_label->addr - sizeof(sljit_s32)), (sljit_s32)put_label->label->addr);
- }
-#endif
-
- put_label = put_label->next;
- }
-
compiler->error = SLJIT_ERR_COMPILED;
compiler->executable_offset = executable_offset;
compiler->executable_size = (sljit_uw)(code_ptr - code);
@@ -737,7 +980,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
switch (feature_type) {
case SLJIT_HAS_FPU:
#ifdef SLJIT_IS_FPU_AVAILABLE
- return SLJIT_IS_FPU_AVAILABLE;
+ return (SLJIT_IS_FPU_AVAILABLE) != 0;
#elif (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
if (cpu_feature_list == 0)
get_cpu_features();
@@ -768,19 +1011,28 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
get_cpu_features();
return (cpu_feature_list & CPU_FEATURE_CMOV) != 0;
+ case SLJIT_HAS_REV:
case SLJIT_HAS_ROT:
case SLJIT_HAS_PREFETCH:
+ case SLJIT_HAS_COPY_F32:
+ case SLJIT_HAS_COPY_F64:
+ case SLJIT_HAS_ATOMIC:
return 1;
- case SLJIT_HAS_SSE2:
-#if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
+#if !(defined SLJIT_IS_FPU_AVAILABLE) || SLJIT_IS_FPU_AVAILABLE
+ case SLJIT_HAS_AVX:
if (cpu_feature_list == 0)
get_cpu_features();
- return (cpu_feature_list & CPU_FEATURE_SSE2) != 0;
-#else /* !SLJIT_DETECT_SSE2 */
- return 1;
-#endif /* SLJIT_DETECT_SSE2 */
-
+ return (cpu_feature_list & CPU_FEATURE_AVX) != 0;
+ case SLJIT_HAS_AVX2:
+ if (cpu_feature_list == 0)
+ get_cpu_features();
+ return (cpu_feature_list & CPU_FEATURE_AVX2) != 0;
+ case SLJIT_HAS_SIMD:
+ if (cpu_feature_list == 0)
+ get_cpu_features();
+ return (cpu_feature_list & CPU_FEATURE_SSE41) != 0;
+#endif /* SLJIT_IS_FPU_AVAILABLE */
default:
return 0;
}
@@ -788,16 +1040,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
{
- if (type < SLJIT_UNORDERED || type > SLJIT_ORDERED_LESS_EQUAL)
- return 0;
-
switch (type) {
case SLJIT_ORDERED_EQUAL:
case SLJIT_UNORDERED_OR_NOT_EQUAL:
- return 0;
+ return 2;
}
- return 1;
+ return 0;
}
/* --------------------------------------------------------------------- */
@@ -821,8 +1070,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
BINARY_IMM32(op_imm, immw, arg, argw); \
} \
else { \
- FAIL_IF(emit_load_imm64(compiler, (arg == TMP_REG1) ? TMP_REG2 : TMP_REG1, immw)); \
- inst = emit_x86_instruction(compiler, 1, (arg == TMP_REG1) ? TMP_REG2 : TMP_REG1, 0, arg, argw); \
+ FAIL_IF(emit_load_imm64(compiler, FAST_IS_REG(arg) ? TMP_REG2 : TMP_REG1, immw)); \
+ inst = emit_x86_instruction(compiler, 1, FAST_IS_REG(arg) ? TMP_REG2 : TMP_REG1, 0, arg, argw); \
FAIL_IF(!inst); \
*inst = (op_mr); \
} \
@@ -841,6 +1090,15 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
#endif /* SLJIT_CONFIG_X86_64 */
+static sljit_s32 emit_byte(struct sljit_compiler *compiler, sljit_u8 byte)
+{
+ sljit_u8 *inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
+ FAIL_IF(!inst);
+ INC_SIZE(1);
+ *inst = byte;
+ return SLJIT_SUCCESS;
+}
+
static sljit_s32 emit_mov(struct sljit_compiler *compiler,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw);
@@ -848,6 +1106,14 @@ static sljit_s32 emit_mov(struct sljit_compiler *compiler,
#define EMIT_MOV(compiler, dst, dstw, src, srcw) \
FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw));
+static sljit_s32 emit_groupf(struct sljit_compiler *compiler,
+ sljit_uw op,
+ sljit_s32 dst, sljit_s32 src, sljit_sw srcw);
+
+static sljit_s32 emit_groupf_ext(struct sljit_compiler *compiler,
+ sljit_uw op,
+ sljit_s32 dst, sljit_s32 src, sljit_sw srcw);
+
static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler,
sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src);
@@ -858,6 +1124,10 @@ static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w);
+static sljit_s32 emit_cmov_generic(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_reg,
+ sljit_s32 src, sljit_sw srcw);
+
static SLJIT_INLINE sljit_s32 emit_endbranch(struct sljit_compiler *compiler)
{
#if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET)
@@ -866,14 +1136,14 @@ static SLJIT_INLINE sljit_s32 emit_endbranch(struct sljit_compiler *compiler)
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
FAIL_IF(!inst);
INC_SIZE(4);
- *inst++ = 0xf3;
- *inst++ = 0x0f;
- *inst++ = 0x1e;
+ inst[0] = GROUP_F3;
+ inst[1] = GROUP_0F;
+ inst[2] = 0x1e;
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- *inst = 0xfb;
-#else
- *inst = 0xfa;
-#endif
+ inst[3] = 0xfb;
+#else /* !SLJIT_CONFIG_X86_32 */
+ inst[3] = 0xfa;
+#endif /* SLJIT_CONFIG_X86_32 */
#else /* !SLJIT_CONFIG_X86_CET */
SLJIT_UNUSED_ARG(compiler);
#endif /* SLJIT_CONFIG_X86_CET */
@@ -896,13 +1166,17 @@ static SLJIT_INLINE sljit_s32 emit_rdssp(struct sljit_compiler *compiler, sljit_
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
FAIL_IF(!inst);
INC_SIZE(size);
- *inst++ = 0xf3;
+ *inst++ = GROUP_F3;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
*inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : REX_B);
#endif
- *inst++ = 0x0f;
- *inst++ = 0x1e;
- *inst = (0x3 << 6) | (0x1 << 3) | (reg_map[reg] & 0x7);
+ inst[0] = GROUP_0F;
+ inst[1] = 0x1e;
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ inst[2] = U8(MOD_REG | (0x1 << 3) | reg_lmap[reg]);
+#else
+ inst[2] = U8(MOD_REG | (0x1 << 3) | reg_map[reg]);
+#endif
return SLJIT_SUCCESS;
}
@@ -920,13 +1194,13 @@ static SLJIT_INLINE sljit_s32 emit_incssp(struct sljit_compiler *compiler, sljit
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
FAIL_IF(!inst);
INC_SIZE(size);
- *inst++ = 0xf3;
+ *inst++ = GROUP_F3;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
*inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : REX_B);
#endif
- *inst++ = 0x0f;
- *inst++ = 0xae;
- *inst = (0x3 << 6) | (0x5 << 3) | (reg_map[reg] & 0x7);
+ inst[0] = GROUP_0F;
+ inst[1] = 0xae;
+ inst[2] = (0x3 << 6) | (0x5 << 3) | (reg_map[reg] & 0x7);
return SLJIT_SUCCESS;
}
@@ -954,19 +1228,7 @@ static SLJIT_INLINE sljit_s32 adjust_shadow_stack(struct sljit_compiler *compile
FAIL_IF(emit_rdssp(compiler, TMP_REG1));
/* Load return address on shadow stack into TMP_REG1. */
-#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- SLJIT_ASSERT(reg_map[TMP_REG1] == 5);
-
- /* Hand code unsupported "mov 0x0(%ebp),%ebp". */
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
- FAIL_IF(!inst);
- INC_SIZE(3);
- *inst++ = 0x8b;
- *inst++ = 0x6d;
- *inst = 0;
-#else /* !SLJIT_CONFIG_X86_32 */
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(TMP_REG1), 0);
-#endif /* SLJIT_CONFIG_X86_32 */
/* Compare return address against TMP_REG1. */
FAIL_IF(emit_cmp_binary (compiler, TMP_REG1, 0, src, srcw));
@@ -994,8 +1256,8 @@ static SLJIT_INLINE sljit_s32 adjust_shadow_stack(struct sljit_compiler *compile
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
FAIL_IF(!inst);
INC_SIZE(2);
- *inst++ = JMP_i8;
- *inst = size_before_rdssp_inst - compiler->size;
+ inst[0] = JMP_i8;
+ inst[1] = size_before_rdssp_inst - compiler->size;
*jz_after_cmp_inst = compiler->size - size_jz_after_cmp_inst;
#else /* !SLJIT_CONFIG_X86_CET || !__SHSTK__ */
@@ -1024,7 +1286,8 @@ static sljit_s32 emit_mov(struct sljit_compiler *compiler,
*inst = MOV_rm_r;
return SLJIT_SUCCESS;
}
- if (src & SLJIT_IMM) {
+
+ if (src == SLJIT_IMM) {
if (FAST_IS_REG(dst)) {
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
return emit_do_imm(compiler, MOV_r_i32 | reg_map[dst], srcw);
@@ -1071,6 +1334,27 @@ static sljit_s32 emit_mov(struct sljit_compiler *compiler,
return SLJIT_SUCCESS;
}
+static sljit_s32 emit_cmov_generic(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_reg,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_u8* inst;
+ sljit_uw size;
+
+ SLJIT_ASSERT(type >= SLJIT_EQUAL && type <= SLJIT_ORDERED_LESS_EQUAL);
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
+ FAIL_IF(!inst);
+ INC_SIZE(2);
+ inst[0] = U8(get_jump_code((sljit_uw)type ^ 0x1) - 0x10);
+
+ size = compiler->size;
+ EMIT_MOV(compiler, dst_reg, 0, src, srcw);
+
+ inst[1] = U8(compiler->size - size);
+ return SLJIT_SUCCESS;
+}
+
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
{
sljit_u8 *inst;
@@ -1083,17 +1367,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compile
switch (GET_OPCODE(op)) {
case SLJIT_BREAKPOINT:
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- *inst = INT3;
- break;
+ return emit_byte(compiler, INT3);
case SLJIT_NOP:
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- *inst = NOP;
- break;
+ return emit_byte(compiler, NOP);
case SLJIT_LMUL_UW:
case SLJIT_LMUL_SW:
case SLJIT_DIVMOD_UW:
@@ -1134,23 +1410,16 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compile
#endif
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- *inst = CDQ;
+ FAIL_IF(emit_byte(compiler, CDQ));
#else
- if (compiler->mode32) {
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- *inst = CDQ;
- } else {
+ if (!compiler->mode32) {
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
FAIL_IF(!inst);
INC_SIZE(2);
- *inst++ = REX_W;
- *inst = CDQ;
- }
+ inst[0] = REX_W;
+ inst[1] = CDQ;
+ } else
+ FAIL_IF(emit_byte(compiler, CDQ));
#endif
}
@@ -1158,14 +1427,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compile
inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
FAIL_IF(!inst);
INC_SIZE(2);
- *inst++ = GROUP_F7;
- *inst = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_map[TMP_REG1] : reg_map[SLJIT_R1]);
-#else
+ inst[0] = GROUP_F7;
+ inst[1] = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_map[TMP_REG1] : reg_map[SLJIT_R1]);
+#else /* !SLJIT_CONFIG_X86_32 */
#ifdef _WIN64
size = (!compiler->mode32 || op >= SLJIT_DIVMOD_UW) ? 3 : 2;
-#else
+#else /* !_WIN64 */
size = (!compiler->mode32) ? 3 : 2;
-#endif
+#endif /* _WIN64 */
inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
FAIL_IF(!inst);
INC_SIZE(size);
@@ -1174,29 +1443,29 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compile
*inst++ = REX_W | ((op >= SLJIT_DIVMOD_UW) ? REX_B : 0);
else if (op >= SLJIT_DIVMOD_UW)
*inst++ = REX_B;
- *inst++ = GROUP_F7;
- *inst = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_lmap[TMP_REG1] : reg_lmap[SLJIT_R1]);
-#else
+ inst[0] = GROUP_F7;
+ inst[1] = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_lmap[TMP_REG1] : reg_lmap[SLJIT_R1]);
+#else /* !_WIN64 */
if (!compiler->mode32)
*inst++ = REX_W;
- *inst++ = GROUP_F7;
- *inst = MOD_REG | reg_map[SLJIT_R1];
-#endif
-#endif
+ inst[0] = GROUP_F7;
+ inst[1] = MOD_REG | reg_map[SLJIT_R1];
+#endif /* _WIN64 */
+#endif /* SLJIT_CONFIG_X86_32 */
switch (op) {
case SLJIT_LMUL_UW:
- *inst |= MUL;
+ inst[1] |= MUL;
break;
case SLJIT_LMUL_SW:
- *inst |= IMUL;
+ inst[1] |= IMUL;
break;
case SLJIT_DIVMOD_UW:
case SLJIT_DIV_UW:
- *inst |= DIV;
+ inst[1] |= DIV;
break;
case SLJIT_DIVMOD_SW:
case SLJIT_DIV_SW:
- *inst |= IDIV;
+ inst[1] |= IDIV;
break;
}
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) && !defined(_WIN64)
@@ -1216,29 +1485,18 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compile
return SLJIT_SUCCESS;
}
-#define ENCODE_PREFIX(prefix) \
- do { \
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1); \
- FAIL_IF(!inst); \
- INC_SIZE(1); \
- *inst = U8(prefix); \
- } while (0)
-
static sljit_s32 emit_mov_byte(struct sljit_compiler *compiler, sljit_s32 sign,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw)
{
sljit_u8* inst;
sljit_s32 dst_r;
-#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- sljit_s32 work_r;
-#endif
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
compiler->mode32 = 0;
#endif
- if (src & SLJIT_IMM) {
+ if (src == SLJIT_IMM) {
if (FAST_IS_REG(dst)) {
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
return emit_do_imm(compiler, MOV_r_i32 | reg_map[dst], srcw);
@@ -1267,100 +1525,33 @@ static sljit_s32 emit_mov_byte(struct sljit_compiler *compiler, sljit_s32 sign,
#else
dst_r = src;
#endif
- }
+ } else {
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- else if (FAST_IS_REG(src) && reg_map[src] >= 4) {
- /* src, dst are registers. */
- SLJIT_ASSERT(FAST_IS_REG(dst));
- if (reg_map[dst] < 4) {
- if (dst != src)
- EMIT_MOV(compiler, dst, 0, src, 0);
- inst = emit_x86_instruction(compiler, 2, dst, 0, dst, 0);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = sign ? MOVSX_r_rm8 : MOVZX_r_rm8;
- }
- else {
- if (dst != src)
- EMIT_MOV(compiler, dst, 0, src, 0);
- if (sign) {
- /* shl reg, 24 */
- inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0);
- FAIL_IF(!inst);
- *inst |= SHL;
- /* sar reg, 24 */
- inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0);
- FAIL_IF(!inst);
- *inst |= SAR;
- }
- else {
+ if (FAST_IS_REG(src) && reg_map[src] >= 4) {
+ /* Both src and dst are registers. */
+ SLJIT_ASSERT(FAST_IS_REG(dst));
+
+ if (src == dst && !sign) {
inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 0xff, dst, 0);
FAIL_IF(!inst);
*(inst + 1) |= AND;
+ return SLJIT_SUCCESS;
}
+
+ EMIT_MOV(compiler, TMP_REG1, 0, src, 0);
+ src = TMP_REG1;
+ srcw = 0;
}
- return SLJIT_SUCCESS;
- }
-#endif
- else {
+#endif /* !SLJIT_CONFIG_X86_32 */
+
/* src can be memory addr or reg_map[src] < 4 on x86_32 architectures. */
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = sign ? MOVSX_r_rm8 : MOVZX_r_rm8;
+ FAIL_IF(emit_groupf(compiler, sign ? MOVSX_r_rm8 : MOVZX_r_rm8, dst_r, src, srcw));
}
if (dst & SLJIT_MEM) {
-#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- if (dst_r == TMP_REG1) {
- /* Find a non-used register, whose reg_map[src] < 4. */
- if ((dst & REG_MASK) == SLJIT_R0) {
- if ((dst & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_R1))
- work_r = SLJIT_R2;
- else
- work_r = SLJIT_R1;
- }
- else {
- if ((dst & OFFS_REG_MASK) != TO_OFFS_REG(SLJIT_R0))
- work_r = SLJIT_R0;
- else if ((dst & REG_MASK) == SLJIT_R1)
- work_r = SLJIT_R2;
- else
- work_r = SLJIT_R1;
- }
-
- if (work_r == SLJIT_R0) {
- ENCODE_PREFIX(XCHG_EAX_r | reg_map[TMP_REG1]);
- }
- else {
- inst = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0);
- FAIL_IF(!inst);
- *inst = XCHG_r_rm;
- }
-
- inst = emit_x86_instruction(compiler, 1, work_r, 0, dst, dstw);
- FAIL_IF(!inst);
- *inst = MOV_rm8_r8;
-
- if (work_r == SLJIT_R0) {
- ENCODE_PREFIX(XCHG_EAX_r | reg_map[TMP_REG1]);
- }
- else {
- inst = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0);
- FAIL_IF(!inst);
- *inst = XCHG_r_rm;
- }
- }
- else {
- inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw);
- FAIL_IF(!inst);
- *inst = MOV_rm8_r8;
- }
-#else
inst = emit_x86_instruction(compiler, 1 | EX86_REX | EX86_NO_REXW, dst_r, 0, dst, dstw);
FAIL_IF(!inst);
*inst = MOV_rm8_r8;
-#endif
}
return SLJIT_SUCCESS;
@@ -1377,15 +1568,15 @@ static sljit_s32 emit_prefetch(struct sljit_compiler *compiler, sljit_s32 op,
inst = emit_x86_instruction(compiler, 2, 0, 0, src, srcw);
FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst++ = PREFETCH;
+ inst[0] = GROUP_0F;
+ inst[1] = PREFETCH;
if (op == SLJIT_PREFETCH_L1)
- *inst |= (1 << 3);
+ inst[2] |= (1 << 3);
else if (op == SLJIT_PREFETCH_L2)
- *inst |= (2 << 3);
+ inst[2] |= (2 << 3);
else if (op == SLJIT_PREFETCH_L3)
- *inst |= (3 << 3);
+ inst[2] |= (3 << 3);
return SLJIT_SUCCESS;
}
@@ -1401,7 +1592,7 @@ static sljit_s32 emit_mov_half(struct sljit_compiler *compiler, sljit_s32 sign,
compiler->mode32 = 0;
#endif
- if (src & SLJIT_IMM) {
+ if (src == SLJIT_IMM) {
if (FAST_IS_REG(dst)) {
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
return emit_do_imm(compiler, MOV_r_i32 | reg_map[dst], srcw);
@@ -1422,12 +1613,8 @@ static sljit_s32 emit_mov_half(struct sljit_compiler *compiler, sljit_s32 sign,
if ((dst & SLJIT_MEM) && FAST_IS_REG(src))
dst_r = src;
- else {
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = sign ? MOVSX_r_rm16 : MOVZX_r_rm16;
- }
+ else
+ FAIL_IF(emit_groupf(compiler, sign ? MOVSX_r_rm16 : MOVZX_r_rm16, dst_r, src, srcw));
if (dst & SLJIT_MEM) {
inst = emit_x86_instruction(compiler, 1 | EX86_NO_REXW | EX86_PREF_66, dst_r, 0, dst, dstw);
@@ -1448,55 +1635,25 @@ static sljit_s32 emit_unary(struct sljit_compiler *compiler, sljit_u8 opcode,
/* Same input and output */
inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
FAIL_IF(!inst);
- *inst++ = GROUP_F7;
- *inst |= opcode;
- return SLJIT_SUCCESS;
- }
-
- if (FAST_IS_REG(dst)) {
- EMIT_MOV(compiler, dst, 0, src, srcw);
- inst = emit_x86_instruction(compiler, 1, 0, 0, dst, 0);
- FAIL_IF(!inst);
- *inst++ = GROUP_F7;
- *inst |= opcode;
+ inst[0] = GROUP_F7;
+ inst[1] |= opcode;
return SLJIT_SUCCESS;
}
- EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
- inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REG1, 0);
- FAIL_IF(!inst);
- *inst++ = GROUP_F7;
- *inst |= opcode;
- EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
- return SLJIT_SUCCESS;
-}
-
-static sljit_s32 emit_not_with_flags(struct sljit_compiler *compiler,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
-{
- sljit_u8* inst;
-
if (FAST_IS_REG(dst)) {
EMIT_MOV(compiler, dst, 0, src, srcw);
inst = emit_x86_instruction(compiler, 1, 0, 0, dst, 0);
FAIL_IF(!inst);
- *inst++ = GROUP_F7;
- *inst |= NOT_rm;
- inst = emit_x86_instruction(compiler, 1, dst, 0, dst, 0);
- FAIL_IF(!inst);
- *inst = OR_r_rm;
+ inst[0] = GROUP_F7;
+ inst[1] |= opcode;
return SLJIT_SUCCESS;
}
EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REG1, 0);
FAIL_IF(!inst);
- *inst++ = GROUP_F7;
- *inst |= NOT_rm;
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, TMP_REG1, 0);
- FAIL_IF(!inst);
- *inst = OR_r_rm;
+ inst[0] = GROUP_F7;
+ inst[1] |= opcode;
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
return SLJIT_SUCCESS;
}
@@ -1514,32 +1671,19 @@ static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 is_clz,
sljit_s32 dst_r;
sljit_sw max;
- if (cpu_feature_list == 0)
- get_cpu_features();
+ SLJIT_ASSERT(cpu_feature_list != 0);
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
if (is_clz ? (cpu_feature_list & CPU_FEATURE_LZCNT) : (cpu_feature_list & CPU_FEATURE_TZCNT)) {
- /* Group prefix added separately. */
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- *inst++ = GROUP_F3;
-
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = is_clz ? LZCNT_r_rm : TZCNT_r_rm;
+ FAIL_IF(emit_groupf(compiler, (is_clz ? LZCNT_r_rm : TZCNT_r_rm) | EX86_PREF_F3, dst_r, src, srcw));
if (dst & SLJIT_MEM)
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
return SLJIT_SUCCESS;
}
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = is_clz ? BSR_r_rm : BSF_r_rm;
+ FAIL_IF(emit_groupf(compiler, is_clz ? BSR_r_rm : BSF_r_rm, dst_r, src, srcw));
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
max = is_clz ? (32 + 31) : 32;
@@ -1553,11 +1697,11 @@ static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 is_clz,
inst = emit_x86_instruction(compiler, 2, dst_r, 0, SLJIT_MEM0(), is_clz ? (sljit_sw)&emit_clz_arg : (sljit_sw)&emit_ctz_arg);
FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = CMOVE_r_rm;
+ inst[0] = GROUP_0F;
+ inst[1] = CMOVE_r_rm;
}
else
- FAIL_IF(sljit_emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, max));
+ FAIL_IF(emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, max));
if (is_clz) {
inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 31, dst_r, 0);
@@ -1572,14 +1716,9 @@ static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 is_clz,
if (cpu_feature_list & CPU_FEATURE_CMOV) {
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_IMM, max);
-
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = CMOVE_r_rm;
- }
- else
- FAIL_IF(sljit_emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, max));
+ FAIL_IF(emit_groupf(compiler, CMOVE_r_rm, dst_r, TMP_REG2, 0));
+ } else
+ FAIL_IF(emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, max));
if (is_clz) {
inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, max >> 1, dst_r, 0);
@@ -1593,14 +1732,109 @@ static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 is_clz,
return SLJIT_SUCCESS;
}
+static sljit_s32 emit_bswap(struct sljit_compiler *compiler,
+ sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_u8 *inst;
+ sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
+ sljit_uw size;
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ sljit_u8 rex = 0;
+#else /* !SLJIT_CONFIG_X86_64 */
+ sljit_s32 dst_is_ereg = op & SLJIT_32;
+#endif /* SLJIT_CONFIG_X86_64 */
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (op == SLJIT_REV_U32 || op == SLJIT_REV_S32)
+ compiler->mode32 = 1;
+#else /* !SLJIT_CONFIG_X86_64 */
+ op &= ~SLJIT_32;
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ if (src != dst_r) {
+ /* Only the lower 16 bit is read for eregs. */
+ if (op == SLJIT_REV_U16 || op == SLJIT_REV_S16)
+ FAIL_IF(emit_mov_half(compiler, 0, dst_r, 0, src, srcw));
+ else
+ EMIT_MOV(compiler, dst_r, 0, src, srcw);
+ }
+
+ size = 2;
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (!compiler->mode32)
+ rex = REX_W;
+
+ if (reg_map[dst_r] >= 8)
+ rex |= REX_B;
+
+ if (rex != 0)
+ size++;
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
+ FAIL_IF(!inst);
+ INC_SIZE(size);
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (rex != 0)
+ *inst++ = rex;
+
+ inst[0] = GROUP_0F;
+ inst[1] = BSWAP_r | reg_lmap[dst_r];
+#else /* !SLJIT_CONFIG_X86_64 */
+ inst[0] = GROUP_0F;
+ inst[1] = BSWAP_r | reg_map[dst_r];
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ if (op == SLJIT_REV_U16 || op == SLJIT_REV_S16) {
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ size = compiler->mode32 ? 16 : 48;
+#else /* !SLJIT_CONFIG_X86_64 */
+ size = 16;
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, (sljit_sw)size, dst_r, 0);
+ FAIL_IF(!inst);
+ if (op == SLJIT_REV_U16)
+ inst[1] |= SHR;
+ else
+ inst[1] |= SAR;
+ }
+
+ if (dst & SLJIT_MEM) {
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
+ if (dst_is_ereg)
+ op = SLJIT_REV;
+#endif /* SLJIT_CONFIG_X86_32 */
+ if (op == SLJIT_REV_U16 || op == SLJIT_REV_S16)
+ return emit_mov_half(compiler, 0, dst, dstw, TMP_REG1, 0);
+
+ return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
+ }
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (op == SLJIT_REV_S32) {
+ compiler->mode32 = 0;
+ inst = emit_x86_instruction(compiler, 1, dst, 0, dst, 0);
+ FAIL_IF(!inst);
+ *inst = MOVSXD_r_rm;
+ }
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ return SLJIT_SUCCESS;
+}
+
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw)
{
- sljit_s32 op_flags = GET_ALL_FLAGS(op);
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
sljit_s32 dst_is_ereg = 0;
-#endif
+#else /* !SLJIT_CONFIG_X86_32 */
+ sljit_s32 op_flags = GET_ALL_FLAGS(op);
+#endif /* SLJIT_CONFIG_X86_32 */
CHECK_ERROR();
CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
@@ -1611,14 +1845,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
CHECK_EXTRA_REGS(src, srcw, (void)0);
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
compiler->mode32 = op_flags & SLJIT_32;
-#endif
+#endif /* SLJIT_CONFIG_X86_64 */
op = GET_OPCODE(op);
if (op >= SLJIT_MOV && op <= SLJIT_MOV_P) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
compiler->mode32 = 0;
-#endif
+#endif /* SLJIT_CONFIG_X86_64 */
if (FAST_IS_REG(src) && src == dst) {
if (!TYPE_CAST_NEEDED(op))
@@ -1631,14 +1865,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
if (op == SLJIT_MOV_S32)
op = SLJIT_MOV_U32;
}
- else if (src & SLJIT_IMM) {
+ else if (src == SLJIT_IMM) {
if (op == SLJIT_MOV_U32)
op = SLJIT_MOV_S32;
}
}
-#endif
+#endif /* SLJIT_CONFIG_X86_64 */
- if (src & SLJIT_IMM) {
+ if (src == SLJIT_IMM) {
switch (op) {
case SLJIT_MOV_U8:
srcw = (sljit_u8)srcw;
@@ -1659,12 +1893,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
case SLJIT_MOV_S32:
srcw = (sljit_s32)srcw;
break;
-#endif
+#endif /* SLJIT_CONFIG_X86_64 */
}
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
if (SLJIT_UNLIKELY(dst_is_ereg))
return emit_mov(compiler, dst, dstw, src, srcw);
-#endif
+#endif /* SLJIT_CONFIG_X86_32 */
}
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
@@ -1672,7 +1906,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
SLJIT_ASSERT(dst == SLJIT_MEM1(SLJIT_SP));
dst = TMP_REG1;
}
-#endif
+#endif /* SLJIT_CONFIG_X86_32 */
switch (op) {
case SLJIT_MOV:
@@ -1681,7 +1915,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
case SLJIT_MOV_U32:
case SLJIT_MOV_S32:
case SLJIT_MOV32:
-#endif
+#endif /* SLJIT_CONFIG_X86_32 */
EMIT_MOV(compiler, dst, dstw, src, srcw);
break;
case SLJIT_MOV_U8:
@@ -1708,25 +1942,30 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
EMIT_MOV(compiler, dst, dstw, src, srcw);
compiler->mode32 = 0;
break;
-#endif
+#endif /* SLJIT_CONFIG_X86_64 */
}
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
if (SLJIT_UNLIKELY(dst_is_ereg) && dst == TMP_REG1)
return emit_mov(compiler, SLJIT_MEM1(SLJIT_SP), dstw, TMP_REG1, 0);
-#endif
+#endif /* SLJIT_CONFIG_X86_32 */
return SLJIT_SUCCESS;
}
switch (op) {
- case SLJIT_NOT:
- if (SLJIT_UNLIKELY(op_flags & SLJIT_SET_Z))
- return emit_not_with_flags(compiler, dst, dstw, src, srcw);
- return emit_unary(compiler, NOT_rm, dst, dstw, src, srcw);
-
case SLJIT_CLZ:
case SLJIT_CTZ:
return emit_clz_ctz(compiler, (op == SLJIT_CLZ), dst, dstw, src, srcw);
+ case SLJIT_REV:
+ case SLJIT_REV_U16:
+ case SLJIT_REV_S16:
+ case SLJIT_REV_U32:
+ case SLJIT_REV_S32:
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
+ if (dst_is_ereg)
+ op |= SLJIT_32;
+#endif /* SLJIT_CONFIG_X86_32 */
+ return emit_bswap(compiler, op, dst, dstw, src, srcw);
}
return SLJIT_SUCCESS;
@@ -1745,7 +1984,7 @@ static sljit_s32 emit_cum_binary(struct sljit_compiler *compiler,
sljit_u8 op_imm = U8(op_types & 0xff);
if (dst == src1 && dstw == src1w) {
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
#else
@@ -1779,7 +2018,7 @@ static sljit_s32 emit_cum_binary(struct sljit_compiler *compiler,
/* Only for cumulative operations. */
if (dst == src2 && dstw == src2w) {
- if (src1 & SLJIT_IMM) {
+ if (src1 == SLJIT_IMM) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
if ((dst == SLJIT_R0) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) {
#else
@@ -1813,7 +2052,7 @@ static sljit_s32 emit_cum_binary(struct sljit_compiler *compiler,
/* General version. */
if (FAST_IS_REG(dst)) {
EMIT_MOV(compiler, dst, 0, src1, src1w);
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
}
else {
@@ -1825,7 +2064,7 @@ static sljit_s32 emit_cum_binary(struct sljit_compiler *compiler,
else {
/* This version requires less memory writing. */
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
}
else {
@@ -1852,7 +2091,7 @@ static sljit_s32 emit_non_cum_binary(struct sljit_compiler *compiler,
sljit_u8 op_imm = U8(op_types & 0xff);
if (dst == src1 && dstw == src1w) {
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
#else
@@ -1886,7 +2125,7 @@ static sljit_s32 emit_non_cum_binary(struct sljit_compiler *compiler,
/* General version. */
if (FAST_IS_REG(dst) && dst != src2) {
EMIT_MOV(compiler, dst, 0, src1, src1w);
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
}
else {
@@ -1898,7 +2137,7 @@ static sljit_s32 emit_non_cum_binary(struct sljit_compiler *compiler,
else {
/* This version requires less memory writing. */
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
}
else {
@@ -1921,20 +2160,12 @@ static sljit_s32 emit_mul(struct sljit_compiler *compiler,
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
/* Register destination. */
- if (dst_r == src1 && !(src2 & SLJIT_IMM)) {
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = IMUL_r_rm;
- }
- else if (dst_r == src2 && !(src1 & SLJIT_IMM)) {
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, src1, src1w);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = IMUL_r_rm;
- }
- else if (src1 & SLJIT_IMM) {
- if (src2 & SLJIT_IMM) {
+ if (dst_r == src1 && src2 != SLJIT_IMM) {
+ FAIL_IF(emit_groupf(compiler, IMUL_r_rm, dst_r, src2, src2w));
+ } else if (dst_r == src2 && src1 != SLJIT_IMM) {
+ FAIL_IF(emit_groupf(compiler, IMUL_r_rm, dst_r, src1, src1w));
+ } else if (src1 == SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
EMIT_MOV(compiler, dst_r, 0, SLJIT_IMM, src2w);
src2 = dst_r;
src2w = 0;
@@ -1944,10 +2175,8 @@ static sljit_s32 emit_mul(struct sljit_compiler *compiler,
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
FAIL_IF(!inst);
*inst = IMUL_r_rm_i8;
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- *inst = U8(src1w);
+
+ FAIL_IF(emit_byte(compiler, U8(src1w)));
}
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
else {
@@ -1973,30 +2202,26 @@ static sljit_s32 emit_mul(struct sljit_compiler *compiler,
if (dst_r != src2)
EMIT_MOV(compiler, dst_r, 0, src2, src2w);
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src1w));
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = IMUL_r_rm;
+ FAIL_IF(emit_groupf(compiler, IMUL_r_rm, dst_r, TMP_REG2, 0));
}
#endif
}
- else if (src2 & SLJIT_IMM) {
+ else if (src2 == SLJIT_IMM) {
/* Note: src1 is NOT immediate. */
if (src2w <= 127 && src2w >= -128) {
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
FAIL_IF(!inst);
*inst = IMUL_r_rm_i8;
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- *inst = U8(src2w);
+
+ FAIL_IF(emit_byte(compiler, U8(src2w)));
}
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
else {
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
FAIL_IF(!inst);
*inst = IMUL_r_rm_i32;
+
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
FAIL_IF(!inst);
INC_SIZE(4);
@@ -2007,31 +2232,24 @@ static sljit_s32 emit_mul(struct sljit_compiler *compiler,
inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
FAIL_IF(!inst);
*inst = IMUL_r_rm_i32;
+
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
FAIL_IF(!inst);
INC_SIZE(4);
sljit_unaligned_store_s32(inst, (sljit_s32)src2w);
- }
- else {
+ } else {
if (dst_r != src1)
EMIT_MOV(compiler, dst_r, 0, src1, src1w);
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w));
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = IMUL_r_rm;
+ FAIL_IF(emit_groupf(compiler, IMUL_r_rm, dst_r, TMP_REG2, 0));
}
#endif
- }
- else {
+ } else {
/* Neither argument is immediate. */
if (ADDRESSING_DEPENDS_ON(src2, dst_r))
dst_r = TMP_REG1;
EMIT_MOV(compiler, dst_r, 0, src1, src1w);
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = IMUL_r_rm;
+ FAIL_IF(emit_groupf(compiler, IMUL_r_rm, dst_r, src2, src2w));
}
if (dst & SLJIT_MEM)
@@ -2064,10 +2282,10 @@ static sljit_s32 emit_lea_binary(struct sljit_compiler *compiler,
done = 1;
}
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if ((src2 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src2w))) {
+ if (src2 == SLJIT_IMM && (compiler->mode32 || IS_HALFWORD(src2w))) {
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), (sljit_s32)src2w);
#else
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), src2w);
#endif
FAIL_IF(!inst);
@@ -2077,10 +2295,10 @@ static sljit_s32 emit_lea_binary(struct sljit_compiler *compiler,
}
else if (FAST_IS_REG(src2)) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if ((src1 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src1w))) {
+ if (src1 == SLJIT_IMM && (compiler->mode32 || IS_HALFWORD(src1w))) {
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), (sljit_s32)src1w);
#else
- if (src1 & SLJIT_IMM) {
+ if (src1 == SLJIT_IMM) {
inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), src1w);
#endif
FAIL_IF(!inst);
@@ -2104,16 +2322,16 @@ static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler,
sljit_u8* inst;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
+ if (src1 == SLJIT_R0 && src2 == SLJIT_IMM && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
#else
- if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128)) {
+ if (src1 == SLJIT_R0 && src2 == SLJIT_IMM && (src2w > 127 || src2w < -128)) {
#endif
BINARY_EAX_IMM(CMP_EAX_i32, src2w);
return SLJIT_SUCCESS;
}
if (FAST_IS_REG(src1)) {
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
BINARY_IMM(CMP, CMP_rm_r, src2w, src1, 0);
}
else {
@@ -2124,15 +2342,15 @@ static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler,
return SLJIT_SUCCESS;
}
- if (FAST_IS_REG(src2) && !(src1 & SLJIT_IMM)) {
+ if (FAST_IS_REG(src2) && src1 != SLJIT_IMM) {
inst = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w);
FAIL_IF(!inst);
*inst = CMP_rm_r;
return SLJIT_SUCCESS;
}
- if (src2 & SLJIT_IMM) {
- if (src1 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
+ if (src1 == SLJIT_IMM) {
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
src1 = TMP_REG1;
src1w = 0;
@@ -2155,34 +2373,33 @@ static sljit_s32 emit_test_binary(struct sljit_compiler *compiler,
sljit_u8* inst;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
+ if (src1 == SLJIT_R0 && src2 == SLJIT_IMM && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
#else
- if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128)) {
+ if (src1 == SLJIT_R0 && src2 == SLJIT_IMM && (src2w > 127 || src2w < -128)) {
#endif
BINARY_EAX_IMM(TEST_EAX_i32, src2w);
return SLJIT_SUCCESS;
}
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (src2 == SLJIT_R0 && (src1 & SLJIT_IMM) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) {
+ if (src2 == SLJIT_R0 && src1 == SLJIT_IMM && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) {
#else
- if (src2 == SLJIT_R0 && (src1 & SLJIT_IMM) && (src1w > 127 || src1w < -128)) {
+ if (src2 == SLJIT_R0 && src1 == SLJIT_IMM && (src1w > 127 || src1w < -128)) {
#endif
BINARY_EAX_IMM(TEST_EAX_i32, src1w);
return SLJIT_SUCCESS;
}
- if (!(src1 & SLJIT_IMM)) {
- if (src2 & SLJIT_IMM) {
+ if (src1 != SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
if (IS_HALFWORD(src2w) || compiler->mode32) {
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, src1w);
FAIL_IF(!inst);
*inst = GROUP_F7;
- }
- else {
- FAIL_IF(emit_load_imm64(compiler, TMP_REG1, src2w));
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src1, src1w);
+ } else {
+ FAIL_IF(emit_load_imm64(compiler, FAST_IS_REG(src1) ? TMP_REG2 : TMP_REG1, src2w));
+ inst = emit_x86_instruction(compiler, 1, FAST_IS_REG(src1) ? TMP_REG2 : TMP_REG1, 0, src1, src1w);
FAIL_IF(!inst);
*inst = TEST_rm_r;
}
@@ -2201,8 +2418,8 @@ static sljit_s32 emit_test_binary(struct sljit_compiler *compiler,
}
}
- if (!(src2 & SLJIT_IMM)) {
- if (src1 & SLJIT_IMM) {
+ if (src2 != SLJIT_IMM) {
+ if (src1 == SLJIT_IMM) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
if (IS_HALFWORD(src1w) || compiler->mode32) {
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src1w, src2, src2w);
@@ -2231,7 +2448,7 @@ static sljit_s32 emit_test_binary(struct sljit_compiler *compiler,
}
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
if (IS_HALFWORD(src2w) || compiler->mode32) {
inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REG1, 0);
@@ -2269,18 +2486,18 @@ static sljit_s32 emit_shift(struct sljit_compiler *compiler,
#endif
sljit_u8* inst;
- if ((src2 & SLJIT_IMM) || (src2 == SLJIT_PREF_SHIFT_REG)) {
+ if (src2 == SLJIT_IMM || src2 == SLJIT_PREF_SHIFT_REG) {
if (dst == src1 && dstw == src1w) {
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, dstw);
FAIL_IF(!inst);
- *inst |= mode;
+ inst[1] |= mode;
return SLJIT_SUCCESS;
}
if (dst == SLJIT_PREF_SHIFT_REG && src2 == SLJIT_PREF_SHIFT_REG) {
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
FAIL_IF(!inst);
- *inst |= mode;
+ inst[1] |= mode;
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
return SLJIT_SUCCESS;
}
@@ -2288,14 +2505,14 @@ static sljit_s32 emit_shift(struct sljit_compiler *compiler,
EMIT_MOV(compiler, dst, 0, src1, src1w);
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, 0);
FAIL_IF(!inst);
- *inst |= mode;
+ inst[1] |= mode;
return SLJIT_SUCCESS;
}
EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, TMP_REG1, 0);
FAIL_IF(!inst);
- *inst |= mode;
+ inst[1] |= mode;
EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
return SLJIT_SUCCESS;
}
@@ -2305,7 +2522,7 @@ static sljit_s32 emit_shift(struct sljit_compiler *compiler,
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
FAIL_IF(!inst);
- *inst |= mode;
+ inst[1] |= mode;
return emit_mov(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
}
@@ -2323,7 +2540,7 @@ static sljit_s32 emit_shift(struct sljit_compiler *compiler,
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, dst, 0);
FAIL_IF(!inst);
- *inst |= mode;
+ inst[1] |= mode;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
compiler->mode32 = 0;
#endif
@@ -2349,7 +2566,7 @@ static sljit_s32 emit_shift(struct sljit_compiler *compiler,
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
FAIL_IF(!inst);
- *inst |= mode;
+ inst[1] |= mode;
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, SLJIT_MEM1(SLJIT_SP), 0);
@@ -2372,7 +2589,7 @@ static sljit_s32 emit_shift_with_flags(struct sljit_compiler *compiler,
sljit_s32 src2, sljit_sw src2w)
{
/* The CPU does not set flags if the shift count is 0. */
- if (src2 & SLJIT_IMM) {
+ if (src2 == SLJIT_IMM) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
src2w &= compiler->mode32 ? 0x1f : 0x3f;
#else /* !SLJIT_CONFIG_X86_64 */
@@ -2419,8 +2636,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
compiler->mode32 = op & SLJIT_32;
#endif
- SLJIT_ASSERT(dst != TMP_REG1 || HAS_FLAGS(op));
-
switch (GET_OPCODE(op)) {
case SLJIT_ADD:
if (!HAS_FLAGS(op)) {
@@ -2437,7 +2652,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
return emit_unary(compiler, NEG_rm, dst, dstw, src2, src2w);
if (!HAS_FLAGS(op)) {
- if ((src2 & SLJIT_IMM) && emit_lea_binary(compiler, dst, dstw, src1, src1w, SLJIT_IMM, -src2w) != SLJIT_ERR_UNSUPPORTED)
+ if (src2 == SLJIT_IMM && emit_lea_binary(compiler, dst, dstw, src1, src1w, SLJIT_IMM, -src2w) != SLJIT_ERR_UNSUPPORTED)
return compiler->error;
if (FAST_IS_REG(dst) && src2 == dst) {
FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB), dst, 0, dst, 0, src1, src1w));
@@ -2459,6 +2674,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
return emit_cum_binary(compiler, BINARY_OPCODE(OR),
dst, dstw, src1, src1w, src2, src2w);
case SLJIT_XOR:
+ if (!HAS_FLAGS(op)) {
+ if (src2 == SLJIT_IMM && src2w == -1)
+ return emit_unary(compiler, NOT_rm, dst, dstw, src1, src1w);
+ if (src1 == SLJIT_IMM && src1w == -1)
+ return emit_unary(compiler, NOT_rm, dst, dstw, src2, src2w);
+ }
+
return emit_cum_binary(compiler, BINARY_OPCODE(XOR),
dst, dstw, src1, src1w, src2, src2w);
case SLJIT_SHL:
@@ -2507,124 +2729,231 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compil
compiler->mode32 = op & SLJIT_32;
#endif
- if (opcode == SLJIT_SUB) {
+ if (opcode == SLJIT_SUB)
return emit_cmp_binary(compiler, src1, src1w, src2, src2w);
- }
+
return emit_test_binary(compiler, src1, src1w, src2, src2w);
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src_dst,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
- sljit_s32 restore_ecx = 0;
- sljit_s32 is_rotate, is_left;
sljit_u8* inst;
sljit_sw dstw = 0;
-#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- sljit_s32 tmp2 = SLJIT_MEM1(SLJIT_SP);
-#else /* !SLJIT_CONFIG_X86_32 */
- sljit_s32 tmp2 = TMP_REG2;
-#endif /* SLJIT_CONFIG_X86_32 */
CHECK_ERROR();
- CHECK(check_sljit_emit_shift_into(compiler, op, src_dst, src1, src1w, src2, src2w));
+ CHECK(check_sljit_emit_op2r(compiler, op, dst_reg, src1, src1w, src2, src2w));
ADJUST_LOCAL_OFFSET(src1, src1w);
ADJUST_LOCAL_OFFSET(src2, src2w);
+ CHECK_EXTRA_REGS(dst_reg, dstw, (void)0);
CHECK_EXTRA_REGS(src1, src1w, (void)0);
CHECK_EXTRA_REGS(src2, src2w, (void)0);
-
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
compiler->mode32 = op & SLJIT_32;
#endif
- if (src2 & SLJIT_IMM) {
+ switch (GET_OPCODE(op)) {
+ case SLJIT_MULADD:
+ FAIL_IF(emit_mul(compiler, TMP_REG1, 0, src1, src1w, src2, src2w));
+ inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst_reg, dstw);
+ FAIL_IF(!inst);
+ *inst = ADD_rm_r;
+ return SLJIT_SUCCESS;
+ }
+
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_reg,
+ sljit_s32 src1_reg,
+ sljit_s32 src2_reg,
+ sljit_s32 src3, sljit_sw src3w)
+{
+ sljit_s32 is_rotate, is_left, move_src1;
+ sljit_u8* inst;
+ sljit_sw src1w = 0;
+ sljit_sw dstw = 0;
+ /* The whole register must be saved even for 32 bit operations. */
+ sljit_u8 restore_ecx = 0;
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- src2w &= 0x1f;
+ sljit_sw src2w = 0;
+ sljit_s32 restore_sp4 = 0;
+#endif /* SLJIT_CONFIG_X86_32 */
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_shift_into(compiler, op, dst_reg, src1_reg, src2_reg, src3, src3w));
+ ADJUST_LOCAL_OFFSET(src3, src3w);
+
+ CHECK_EXTRA_REGS(dst_reg, dstw, (void)0);
+ CHECK_EXTRA_REGS(src3, src3w, (void)0);
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ compiler->mode32 = op & SLJIT_32;
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ if (src3 == SLJIT_IMM) {
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
+ src3w &= 0x1f;
#else /* !SLJIT_CONFIG_X86_32 */
- src2w &= (op & SLJIT_32) ? 0x1f : 0x3f;
+ src3w &= (op & SLJIT_32) ? 0x1f : 0x3f;
#endif /* SLJIT_CONFIG_X86_32 */
- if (src2w == 0)
+ if (src3w == 0)
return SLJIT_SUCCESS;
}
is_left = (GET_OPCODE(op) == SLJIT_SHL || GET_OPCODE(op) == SLJIT_MSHL);
- is_rotate = (src_dst == src1);
- CHECK_EXTRA_REGS(src_dst, dstw, (void)0);
+ is_rotate = (src1_reg == src2_reg);
+ CHECK_EXTRA_REGS(src1_reg, src1w, (void)0);
+ CHECK_EXTRA_REGS(src2_reg, src2w, (void)0);
if (is_rotate)
- return emit_shift(compiler, is_left ? ROL : ROR, src_dst, dstw, src1, src1w, src2, src2w);
+ return emit_shift(compiler, is_left ? ROL : ROR, dst_reg, dstw, src1_reg, src1w, src3, src3w);
- if ((src2 & SLJIT_IMM) || src2 == SLJIT_PREF_SHIFT_REG) {
- if (!FAST_IS_REG(src1)) {
- EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- src1 = TMP_REG1;
- }
- } else if (FAST_IS_REG(src1)) {
-#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = 0;
-#endif
- EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
-#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = op & SLJIT_32;
-#endif
- EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
-
- if (src1 == SLJIT_PREF_SHIFT_REG)
- src1 = TMP_REG1;
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
+ if (src2_reg & SLJIT_MEM) {
+ EMIT_MOV(compiler, TMP_REG1, 0, src2_reg, src2w);
+ src2_reg = TMP_REG1;
+ }
+#endif /* SLJIT_CONFIG_X86_32 */
- if (src_dst == SLJIT_PREF_SHIFT_REG)
- src_dst = TMP_REG1;
+ if (dst_reg == SLJIT_PREF_SHIFT_REG && src3 != SLJIT_IMM && (src3 != SLJIT_PREF_SHIFT_REG || src1_reg != SLJIT_PREF_SHIFT_REG)) {
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ EMIT_MOV(compiler, TMP_REG1, 0, src1_reg, src1w);
+ src1_reg = TMP_REG1;
+ src1w = 0;
+#else /* !SLJIT_CONFIG_X86_64 */
+ if (src2_reg != TMP_REG1) {
+ EMIT_MOV(compiler, TMP_REG1, 0, src1_reg, src1w);
+ src1_reg = TMP_REG1;
+ src1w = 0;
+ } else if ((src1_reg & SLJIT_MEM) || src1_reg == SLJIT_PREF_SHIFT_REG) {
+ restore_sp4 = (src3 == SLJIT_R0) ? SLJIT_R1 : SLJIT_R0;
+ EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_s32), restore_sp4, 0);
+ EMIT_MOV(compiler, restore_sp4, 0, src1_reg, src1w);
+ src1_reg = restore_sp4;
+ src1w = 0;
+ } else {
+ EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_s32), src1_reg, 0);
+ restore_sp4 = src1_reg;
+ }
+#endif /* SLJIT_CONFIG_X86_64 */
- restore_ecx = 1;
+ if (src3 != SLJIT_PREF_SHIFT_REG)
+ EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src3, src3w);
} else {
- EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
+ if (src2_reg == SLJIT_PREF_SHIFT_REG && src3 != SLJIT_IMM && src3 != SLJIT_PREF_SHIFT_REG) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = 0;
-#endif
- EMIT_MOV(compiler, tmp2, 0, SLJIT_PREF_SHIFT_REG, 0);
+ compiler->mode32 = 0;
+#endif /* SLJIT_CONFIG_X86_64 */
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = op & SLJIT_32;
-#endif
- EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
+ compiler->mode32 = op & SLJIT_32;
+#endif /* SLJIT_CONFIG_X86_64 */
+ src2_reg = TMP_REG1;
+ restore_ecx = 1;
+ }
- src1 = TMP_REG1;
+ move_src1 = 0;
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (dst_reg != src1_reg) {
+ if (dst_reg != src3) {
+ EMIT_MOV(compiler, dst_reg, 0, src1_reg, src1w);
+ src1_reg = dst_reg;
+ src1w = 0;
+ } else
+ move_src1 = 1;
+ }
+#else /* !SLJIT_CONFIG_X86_64 */
+ if (dst_reg & SLJIT_MEM) {
+ if (src2_reg != TMP_REG1) {
+ EMIT_MOV(compiler, TMP_REG1, 0, src1_reg, src1w);
+ src1_reg = TMP_REG1;
+ src1w = 0;
+ } else if ((src1_reg & SLJIT_MEM) || src1_reg == SLJIT_PREF_SHIFT_REG) {
+ restore_sp4 = (src3 == SLJIT_R0) ? SLJIT_R1 : SLJIT_R0;
+ EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_s32), restore_sp4, 0);
+ EMIT_MOV(compiler, restore_sp4, 0, src1_reg, src1w);
+ src1_reg = restore_sp4;
+ src1w = 0;
+ } else {
+ EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_s32), src1_reg, 0);
+ restore_sp4 = src1_reg;
+ }
+ } else if (dst_reg != src1_reg) {
+ if (dst_reg != src3) {
+ EMIT_MOV(compiler, dst_reg, 0, src1_reg, src1w);
+ src1_reg = dst_reg;
+ src1w = 0;
+ } else
+ move_src1 = 1;
+ }
+#endif /* SLJIT_CONFIG_X86_64 */
- if (src_dst == SLJIT_PREF_SHIFT_REG) {
- src_dst = tmp2;
- SLJIT_ASSERT(dstw == 0);
+ if (src3 != SLJIT_IMM && src3 != SLJIT_PREF_SHIFT_REG) {
+ if (!restore_ecx) {
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ compiler->mode32 = 0;
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
+ compiler->mode32 = op & SLJIT_32;
+ restore_ecx = 1;
+#else /* !SLJIT_CONFIG_X86_64 */
+ if (src1_reg != TMP_REG1 && src2_reg != TMP_REG1) {
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
+ restore_ecx = 1;
+ } else {
+ EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_PREF_SHIFT_REG, 0);
+ restore_ecx = 2;
+ }
+#endif /* SLJIT_CONFIG_X86_64 */
+ }
+ EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src3, src3w);
}
- restore_ecx = 2;
+ if (move_src1) {
+ EMIT_MOV(compiler, dst_reg, 0, src1_reg, src1w);
+ src1_reg = dst_reg;
+ src1w = 0;
+ }
}
- inst = emit_x86_instruction(compiler, 2, src1, 0, src_dst, dstw);
+ inst = emit_x86_instruction(compiler, 2, src2_reg, 0, src1_reg, src1w);
FAIL_IF(!inst);
inst[0] = GROUP_0F;
- if (src2 & SLJIT_IMM) {
+ if (src3 == SLJIT_IMM) {
inst[1] = U8((is_left ? SHLD : SHRD) - 1);
- /* Immedate argument is added separately. */
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- *inst = U8(src2w);
+ /* Immediate argument is added separately. */
+ FAIL_IF(emit_byte(compiler, U8(src3w)));
} else
inst[1] = U8(is_left ? SHLD : SHRD);
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = 0;
-#endif
+ if (restore_ecx) {
+ compiler->mode32 = 0;
+ EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
+ }
- if (restore_ecx == 1)
- return emit_mov(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
- if (restore_ecx == 2)
- return emit_mov(compiler, SLJIT_PREF_SHIFT_REG, 0, tmp2, 0);
+ if (src1_reg != dst_reg) {
+ compiler->mode32 = op & SLJIT_32;
+ return emit_mov(compiler, dst_reg, dstw, src1_reg, 0);
+ }
+#else /* !SLJIT_CONFIG_X86_64 */
+ if (restore_ecx)
+ EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, restore_ecx == 1 ? TMP_REG1 : SLJIT_MEM1(SLJIT_SP), 0);
+
+ if (src1_reg != dst_reg)
+ EMIT_MOV(compiler, dst_reg, dstw, src1_reg, 0);
+
+ if (restore_sp4)
+ return emit_mov(compiler, restore_sp4, 0, SLJIT_MEM1(SLJIT_SP), sizeof(sljit_s32));
+#endif /* SLJIT_CONFIG_X86_32 */
return SLJIT_SUCCESS;
}
@@ -2656,24 +2985,41 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *comp
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_dst(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst, sljit_sw dstw)
{
- CHECK_REG_INDEX(check_sljit_get_register_index(reg));
-#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- if (reg >= SLJIT_R3 && reg <= SLJIT_R8)
- return -1;
-#endif
- return reg_map[reg];
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_op_dst(compiler, op, dst, dstw));
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ CHECK_EXTRA_REGS(dst, dstw, (void)0);
+
+ switch (op) {
+ case SLJIT_FAST_ENTER:
+ return emit_fast_enter(compiler, dst, dstw);
+ case SLJIT_GET_RETURN_ADDRESS:
+ return sljit_emit_get_return_address(compiler, dst, dstw);
+ }
+
+ return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 type, sljit_s32 reg)
{
- CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
+ CHECK_REG_INDEX(check_sljit_get_register_index(type, reg));
+
+ if (type == SLJIT_GP_REGISTER) {
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- return reg;
-#else
+ if (reg >= SLJIT_R3 && reg <= SLJIT_R8)
+ return -1;
+#endif /* SLJIT_CONFIG_X86_32 */
+ return reg_map[reg];
+ }
+
+ if (type != SLJIT_FLOAT_REGISTER && type != SLJIT_SIMD_REG_128 && type != SLJIT_SIMD_REG_256 && type != SLJIT_SIMD_REG_512)
+ return -1;
+
return freg_map[reg];
-#endif
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
@@ -2701,6 +3047,8 @@ static sljit_u32 *sse2_buffer;
static void init_compiler(void)
{
+ get_cpu_features();
+
/* Align to 16 bytes. */
sse2_buffer = (sljit_u32*)(((sljit_uw)sse2_data + 15) & ~(sljit_uw)0xf);
@@ -2714,58 +3062,60 @@ static void init_compiler(void)
sse2_buffer[13] = 0x7fffffff;
}
-static sljit_s32 emit_sse2(struct sljit_compiler *compiler, sljit_u8 opcode,
- sljit_s32 single, sljit_s32 xmm1, sljit_s32 xmm2, sljit_sw xmm2w)
+static sljit_s32 emit_groupf(struct sljit_compiler *compiler,
+ sljit_uw op,
+ sljit_s32 dst, sljit_s32 src, sljit_sw srcw)
{
- sljit_u8 *inst;
-
- inst = emit_x86_instruction(compiler, 2 | (single ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, xmm1, 0, xmm2, xmm2w);
+ sljit_u8 *inst = emit_x86_instruction(compiler, 2 | (op & ~(sljit_uw)0xff), dst, 0, src, srcw);
FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = opcode;
+ inst[0] = GROUP_0F;
+ inst[1] = op & 0xff;
return SLJIT_SUCCESS;
}
-static sljit_s32 emit_sse2_logic(struct sljit_compiler *compiler, sljit_u8 opcode,
- sljit_s32 pref66, sljit_s32 xmm1, sljit_s32 xmm2, sljit_sw xmm2w)
+static sljit_s32 emit_groupf_ext(struct sljit_compiler *compiler,
+ sljit_uw op,
+ sljit_s32 dst, sljit_s32 src, sljit_sw srcw)
{
sljit_u8 *inst;
- inst = emit_x86_instruction(compiler, 2 | (pref66 ? EX86_PREF_66 : 0) | EX86_SSE2, xmm1, 0, xmm2, xmm2w);
+ SLJIT_ASSERT((op & EX86_SSE2) && ((op & VEX_OP_0F38) || (op & VEX_OP_0F3A)));
+
+ inst = emit_x86_instruction(compiler, 3 | (op & ~((sljit_uw)0xff | VEX_OP_0F38 | VEX_OP_0F3A)), dst, 0, src, srcw);
FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = opcode;
+ inst[0] = GROUP_0F;
+ inst[1] = U8((op & VEX_OP_0F38) ? 0x38 : 0x3A);
+ inst[2] = op & 0xff;
return SLJIT_SUCCESS;
}
static SLJIT_INLINE sljit_s32 emit_sse2_load(struct sljit_compiler *compiler,
sljit_s32 single, sljit_s32 dst, sljit_s32 src, sljit_sw srcw)
{
- return emit_sse2(compiler, MOVSD_x_xm, single, dst, src, srcw);
+ return emit_groupf(compiler, MOVSD_x_xm | (single ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, dst, src, srcw);
}
static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler,
sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src)
{
- return emit_sse2(compiler, MOVSD_xm_x, single, src, dst, dstw);
+ return emit_groupf(compiler, MOVSD_xm_x | (single ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, src, dst, dstw);
}
static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw)
{
- sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
- sljit_u8 *inst;
+ sljit_s32 dst_r;
+
+ CHECK_EXTRA_REGS(dst, dstw, (void)0);
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
if (GET_OPCODE(op) == SLJIT_CONV_SW_FROM_F64)
compiler->mode32 = 0;
#endif
- inst = emit_x86_instruction(compiler, 2 | ((op & SLJIT_32) ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2_OP2, dst_r, 0, src, srcw);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = CVTTSD2SI_r_xm;
+ FAIL_IF(emit_groupf(compiler, CVTTSD2SI_r_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP2, dst_r, src, srcw));
if (dst & SLJIT_MEM)
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
@@ -2777,14 +3127,15 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_comp
sljit_s32 src, sljit_sw srcw)
{
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
- sljit_u8 *inst;
+
+ CHECK_EXTRA_REGS(src, srcw, (void)0);
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_SW)
compiler->mode32 = 0;
#endif
- if (src & SLJIT_IMM) {
+ if (src == SLJIT_IMM) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
srcw = (sljit_s32)srcw;
@@ -2794,10 +3145,7 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_comp
srcw = 0;
}
- inst = emit_x86_instruction(compiler, 2 | ((op & SLJIT_32) ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2_OP1, dst_r, 0, src, srcw);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = CVTSI2SD_x_rm;
+ FAIL_IF(emit_groupf(compiler, CVTSI2SD_x_rm | EX86_SELECT_F2_F3(op) | EX86_SSE2_OP1, dst_r, src, srcw));
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
compiler->mode32 = 1;
@@ -2812,16 +3160,28 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compile
sljit_s32 src2, sljit_sw src2w)
{
switch (GET_FLAG_TYPE(op)) {
+ case SLJIT_ORDERED_EQUAL:
+ /* Also: SLJIT_UNORDERED_OR_NOT_EQUAL */
+ FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
+ FAIL_IF(emit_groupf(compiler, CMPS_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, TMP_FREG, src2, src2w));
+
+ /* EQ */
+ FAIL_IF(emit_byte(compiler, 0));
+
+ src1 = TMP_FREG;
+ src2 = TMP_FREG;
+ src2w = 0;
+ break;
+
case SLJIT_ORDERED_LESS:
- case SLJIT_UNORDERED_OR_GREATER_EQUAL:
case SLJIT_UNORDERED_OR_GREATER:
- case SLJIT_ORDERED_LESS_EQUAL:
+ /* Also: SLJIT_UNORDERED_OR_GREATER_EQUAL, SLJIT_ORDERED_LESS_EQUAL */
if (!FAST_IS_REG(src2)) {
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src2, src2w));
src2 = TMP_FREG;
}
- return emit_sse2_logic(compiler, UCOMISD_x_xm, !(op & SLJIT_32), src2, src1, src1w);
+ return emit_groupf(compiler, UCOMISD_x_xm | EX86_SELECT_66(op) | EX86_SSE2, src2, src1, src1w);
}
if (!FAST_IS_REG(src1)) {
@@ -2829,7 +3189,7 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compile
src1 = TMP_FREG;
}
- return emit_sse2_logic(compiler, UCOMISD_x_xm, !(op & SLJIT_32), src1, src2, src2w);
+ return emit_groupf(compiler, UCOMISD_x_xm | EX86_SELECT_66(op) | EX86_SSE2, src1, src2, src2w);
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
@@ -2837,6 +3197,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compil
sljit_s32 src, sljit_sw srcw)
{
sljit_s32 dst_r;
+ sljit_u8 *inst;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
compiler->mode32 = 1;
@@ -2860,42 +3221,57 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compil
/* We overwrite the high bits of source. From SLJIT point of view,
this is not an issue.
Note: In SSE3, we could also use MOVDDUP and MOVSLDUP. */
- FAIL_IF(emit_sse2_logic(compiler, UNPCKLPD_x_xm, op & SLJIT_32, src, src, 0));
- }
- else {
+ FAIL_IF(emit_groupf(compiler, UNPCKLPD_x_xm | ((op & SLJIT_32) ? EX86_PREF_66 : 0) | EX86_SSE2, src, src, 0));
+ } else {
FAIL_IF(emit_sse2_load(compiler, !(op & SLJIT_32), TMP_FREG, src, srcw));
src = TMP_FREG;
}
- FAIL_IF(emit_sse2_logic(compiler, CVTPD2PS_x_xm, op & SLJIT_32, dst_r, src, 0));
+ FAIL_IF(emit_groupf(compiler, CVTPD2PS_x_xm | ((op & SLJIT_32) ? EX86_PREF_66 : 0) | EX86_SSE2, dst_r, src, 0));
if (dst_r == TMP_FREG)
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
return SLJIT_SUCCESS;
}
if (FAST_IS_REG(dst)) {
- dst_r = dst;
- if (dst != src)
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, dst_r, src, srcw));
- }
- else {
- dst_r = TMP_FREG;
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, dst_r, src, srcw));
+ dst_r = (dst == src) ? TMP_FREG : dst;
+
+ if (src & SLJIT_MEM)
+ FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src, srcw));
+
+ FAIL_IF(emit_groupf(compiler, PCMPEQD_x_xm | EX86_PREF_66 | EX86_SSE2, dst_r, dst_r, 0));
+
+ inst = emit_x86_instruction(compiler, 2 | EX86_PREF_66 | EX86_SSE2_OP2, 0, 0, dst_r, 0);
+ inst[0] = GROUP_0F;
+ /* Same as PSRLD_x / PSRLQ_x */
+ inst[1] = (op & SLJIT_32) ? PSLLD_x_i8 : PSLLQ_x_i8;
+
+ if (GET_OPCODE(op) == SLJIT_ABS_F64) {
+ inst[2] |= 2 << 3;
+ FAIL_IF(emit_byte(compiler, 1));
+ } else {
+ inst[2] |= 6 << 3;
+ FAIL_IF(emit_byte(compiler, ((op & SLJIT_32) ? 31 : 63)));
+ }
+
+ if (dst_r != TMP_FREG)
+ dst_r = (src & SLJIT_MEM) ? TMP_FREG : src;
+ return emit_groupf(compiler, (GET_OPCODE(op) == SLJIT_NEG_F64 ? XORPD_x_xm : ANDPD_x_xm) | EX86_SSE2, dst, dst_r, 0);
}
+ FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src, srcw));
+
switch (GET_OPCODE(op)) {
case SLJIT_NEG_F64:
- FAIL_IF(emit_sse2_logic(compiler, XORPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_32 ? sse2_buffer : sse2_buffer + 8)));
+ FAIL_IF(emit_groupf(compiler, XORPD_x_xm | EX86_SELECT_66(op) | EX86_SSE2, TMP_FREG, SLJIT_MEM0(), (sljit_sw)((op & SLJIT_32) ? sse2_buffer : sse2_buffer + 8)));
break;
case SLJIT_ABS_F64:
- FAIL_IF(emit_sse2_logic(compiler, ANDPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_32 ? sse2_buffer + 4 : sse2_buffer + 12)));
+ FAIL_IF(emit_groupf(compiler, ANDPD_x_xm | EX86_SELECT_66(op) | EX86_SSE2, TMP_FREG, SLJIT_MEM0(), (sljit_sw)((op & SLJIT_32) ? sse2_buffer + 4 : sse2_buffer + 12)));
break;
}
- if (dst_r == TMP_FREG)
- return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
- return SLJIT_SUCCESS;
+ return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
@@ -2919,46 +3295,83 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compil
dst_r = dst;
if (dst == src1)
; /* Do nothing here. */
- else if (dst == src2 && (op == SLJIT_ADD_F64 || op == SLJIT_MUL_F64)) {
+ else if (dst == src2 && (GET_OPCODE(op) == SLJIT_ADD_F64 || GET_OPCODE(op) == SLJIT_MUL_F64)) {
/* Swap arguments. */
src2 = src1;
src2w = src1w;
- }
- else if (dst != src2)
+ } else if (dst != src2)
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, dst_r, src1, src1w));
else {
dst_r = TMP_FREG;
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
}
- }
- else {
+ } else {
dst_r = TMP_FREG;
FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
}
switch (GET_OPCODE(op)) {
case SLJIT_ADD_F64:
- FAIL_IF(emit_sse2(compiler, ADDSD_x_xm, op & SLJIT_32, dst_r, src2, src2w));
+ FAIL_IF(emit_groupf(compiler, ADDSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, src2, src2w));
break;
case SLJIT_SUB_F64:
- FAIL_IF(emit_sse2(compiler, SUBSD_x_xm, op & SLJIT_32, dst_r, src2, src2w));
+ FAIL_IF(emit_groupf(compiler, SUBSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, src2, src2w));
break;
case SLJIT_MUL_F64:
- FAIL_IF(emit_sse2(compiler, MULSD_x_xm, op & SLJIT_32, dst_r, src2, src2w));
+ FAIL_IF(emit_groupf(compiler, MULSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, src2, src2w));
break;
case SLJIT_DIV_F64:
- FAIL_IF(emit_sse2(compiler, DIVSD_x_xm, op & SLJIT_32, dst_r, src2, src2w));
+ FAIL_IF(emit_groupf(compiler, DIVSD_x_xm | EX86_SELECT_F2_F3(op) | EX86_SSE2, dst_r, src2, src2w));
break;
}
- if (dst_r == TMP_FREG)
+ if (dst_r != dst)
return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
return SLJIT_SUCCESS;
}
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2r(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2, sljit_sw src2w)
+{
+ sljit_uw pref;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fop2r(compiler, op, dst_freg, src1, src1w, src2, src2w));
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+ ADJUST_LOCAL_OFFSET(src2, src2w);
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ compiler->mode32 = 1;
+#endif
+
+ if (dst_freg == src1) {
+ FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src2, src2w));
+ pref = EX86_SELECT_66(op) | EX86_SSE2;
+ FAIL_IF(emit_groupf(compiler, XORPD_x_xm | pref, TMP_FREG, src1, src1w));
+ FAIL_IF(emit_groupf(compiler, ANDPD_x_xm | pref, TMP_FREG, SLJIT_MEM0(), (sljit_sw)((op & SLJIT_32) ? sse2_buffer : sse2_buffer + 8)));
+ return emit_groupf(compiler, XORPD_x_xm | pref, dst_freg, TMP_FREG, 0);
+ }
+
+ if (src1 & SLJIT_MEM) {
+ FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
+ src1 = TMP_FREG;
+ src1w = 0;
+ }
+
+ if (dst_freg != src2)
+ FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, dst_freg, src2, src2w));
+
+ pref = EX86_SELECT_66(op) | EX86_SSE2;
+ FAIL_IF(emit_groupf(compiler, XORPD_x_xm | pref, dst_freg, src1, src1w));
+ FAIL_IF(emit_groupf(compiler, ANDPD_x_xm | pref, dst_freg, SLJIT_MEM0(), (sljit_sw)((op & SLJIT_32) ? sse2_buffer : sse2_buffer + 8)));
+ return emit_groupf(compiler, XORPD_x_xm | pref, dst_freg, src1, src1w);
+}
+
/* --------------------------------------------------------------------- */
/* Conditional instructions */
/* --------------------------------------------------------------------- */
@@ -2978,11 +3391,9 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
PTR_FAIL_IF(!label);
set_label(label, compiler);
- inst = (sljit_u8*)ensure_buf(compiler, 2);
+ inst = (sljit_u8*)ensure_buf(compiler, 1);
PTR_FAIL_IF(!inst);
-
- *inst++ = 0;
- *inst++ = 0;
+ inst[0] = SLJIT_INST_LABEL;
return label;
}
@@ -3000,18 +3411,13 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
set_jump(jump, compiler, (sljit_u32)((type & SLJIT_REWRITABLE_JUMP) | ((type & 0xff) << TYPE_SHIFT)));
type &= 0xff;
+ jump->addr = compiler->size;
/* Worst case size. */
-#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- compiler->size += (type >= SLJIT_JUMP) ? 5 : 6;
-#else
- compiler->size += (type >= SLJIT_JUMP) ? (10 + 3) : (2 + 10 + 3);
-#endif
-
- inst = (sljit_u8*)ensure_buf(compiler, 2);
+ compiler->size += (type >= SLJIT_JUMP) ? JUMP_MAX_SIZE : CJUMP_MAX_SIZE;
+ inst = (sljit_u8*)ensure_buf(compiler, 1);
PTR_FAIL_IF_NULL(inst);
- *inst++ = 0;
- *inst++ = 1;
+ inst[0] = SLJIT_INST_JUMP;
return jump;
}
@@ -3032,28 +3438,22 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
set_jump(jump, compiler, (sljit_u32)(JUMP_ADDR | (type << TYPE_SHIFT)));
jump->u.target = (sljit_uw)srcw;
+ jump->addr = compiler->size;
/* Worst case size. */
-#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- compiler->size += 5;
-#else
- compiler->size += 10 + 3;
-#endif
-
- inst = (sljit_u8*)ensure_buf(compiler, 2);
+ compiler->size += JUMP_MAX_SIZE;
+ inst = (sljit_u8*)ensure_buf(compiler, 1);
FAIL_IF_NULL(inst);
- *inst++ = 0;
- *inst++ = 1;
- }
- else {
+ inst[0] = SLJIT_INST_JUMP;
+ } else {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
/* REX_W is not necessary (src is not immediate). */
compiler->mode32 = 1;
#endif
inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
FAIL_IF(!inst);
- *inst++ = GROUP_FF;
- *inst = U8(*inst | ((type >= SLJIT_FAST_CALL) ? CALL_rm : JMP_rm));
+ inst[0] = GROUP_FF;
+ inst[1] = U8(inst[1] | ((type >= SLJIT_FAST_CALL) ? CALL_rm : JMP_rm));
}
return SLJIT_SUCCESS;
}
@@ -3063,10 +3463,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
sljit_s32 type)
{
sljit_u8 *inst;
- sljit_u8 cond_set = 0;
+ sljit_u8 cond_set;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
sljit_s32 reg;
-#endif
+#endif /* !SLJIT_CONFIG_X86_64 */
/* ADJUST_LOCAL_OFFSET and CHECK_EXTRA_REGS might overwrite these values. */
sljit_s32 dst_save = dst;
sljit_sw dstw_save = dstw;
@@ -3086,13 +3486,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
FAIL_IF(!inst);
INC_SIZE(4 + 3);
/* Set low register to conditional flag. */
- *inst++ = (reg_map[TMP_REG1] <= 7) ? REX : REX_B;
- *inst++ = GROUP_0F;
- *inst++ = cond_set;
- *inst++ = MOD_REG | reg_lmap[TMP_REG1];
- *inst++ = U8(REX | (reg_map[TMP_REG1] <= 7 ? 0 : REX_R) | (reg_map[dst] <= 7 ? 0 : REX_B));
- *inst++ = OR_rm8_r8;
- *inst++ = U8(MOD_REG | (reg_lmap[TMP_REG1] << 3) | reg_lmap[dst]);
+ inst[0] = (reg_map[TMP_REG1] <= 7) ? REX : REX_B;
+ inst[1] = GROUP_0F;
+ inst[2] = cond_set;
+ inst[3] = MOD_REG | reg_lmap[TMP_REG1];
+ inst[4] = U8(REX | (reg_map[TMP_REG1] <= 7 ? 0 : REX_R) | (reg_map[dst] <= 7 ? 0 : REX_B));
+ inst[5] = OR_rm8_r8;
+ inst[6] = U8(MOD_REG | (reg_lmap[TMP_REG1] << 3) | reg_lmap[dst]);
return SLJIT_SUCCESS;
}
@@ -3102,15 +3502,15 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
FAIL_IF(!inst);
INC_SIZE(4 + 4);
/* Set low register to conditional flag. */
- *inst++ = (reg_map[reg] <= 7) ? REX : REX_B;
- *inst++ = GROUP_0F;
- *inst++ = cond_set;
- *inst++ = MOD_REG | reg_lmap[reg];
- *inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : (REX_B | REX_R));
+ inst[0] = (reg_map[reg] <= 7) ? REX : REX_B;
+ inst[1] = GROUP_0F;
+ inst[2] = cond_set;
+ inst[3] = MOD_REG | reg_lmap[reg];
+ inst[4] = REX_W | (reg_map[reg] <= 7 ? 0 : (REX_B | REX_R));
/* The movzx instruction does not affect flags. */
- *inst++ = GROUP_0F;
- *inst++ = MOVZX_r_rm8;
- *inst = U8(MOD_REG | (reg_lmap[reg] << 3) | reg_lmap[reg]);
+ inst[5] = GROUP_0F;
+ inst[6] = MOVZX_r_rm8;
+ inst[7] = U8(MOD_REG | (reg_lmap[reg] << 3) | reg_lmap[reg]);
if (reg != TMP_REG1)
return SLJIT_SUCCESS;
@@ -3123,156 +3523,1340 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
SLJIT_SKIP_CHECKS(compiler);
return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REG1, 0);
-#else
+#else /* !SLJIT_CONFIG_X86_64 */
+ SLJIT_ASSERT(reg_map[TMP_REG1] < 4);
+
/* The SLJIT_CONFIG_X86_32 code path starts here. */
- if (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst)) {
- if (reg_map[dst] <= 4) {
- /* Low byte is accessible. */
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 3 + 3);
- FAIL_IF(!inst);
- INC_SIZE(3 + 3);
- /* Set low byte to conditional flag. */
- *inst++ = GROUP_0F;
- *inst++ = cond_set;
- *inst++ = U8(MOD_REG | reg_map[dst]);
-
- *inst++ = GROUP_0F;
- *inst++ = MOVZX_r_rm8;
- *inst = U8(MOD_REG | (reg_map[dst] << 3) | reg_map[dst]);
- return SLJIT_SUCCESS;
+ if (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst) && reg_map[dst] <= 4) {
+ /* Low byte is accessible. */
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 3 + 3);
+ FAIL_IF(!inst);
+ INC_SIZE(3 + 3);
+ /* Set low byte to conditional flag. */
+ inst[0] = GROUP_0F;
+ inst[1] = cond_set;
+ inst[2] = U8(MOD_REG | reg_map[dst]);
+
+ inst[3] = GROUP_0F;
+ inst[4] = MOVZX_r_rm8;
+ inst[5] = U8(MOD_REG | (reg_map[dst] << 3) | reg_map[dst]);
+ return SLJIT_SUCCESS;
+ }
+
+ if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst) && reg_map[dst] <= 4) {
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 3 + 2);
+ FAIL_IF(!inst);
+ INC_SIZE(3 + 2);
+
+ /* Set low byte to conditional flag. */
+ inst[0] = GROUP_0F;
+ inst[1] = cond_set;
+ inst[2] = U8(MOD_REG | reg_map[TMP_REG1]);
+
+ inst[3] = OR_rm8_r8;
+ inst[4] = U8(MOD_REG | (reg_map[TMP_REG1] << 3) | reg_map[dst]);
+ return SLJIT_SUCCESS;
+ }
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 3 + 3);
+ FAIL_IF(!inst);
+ INC_SIZE(3 + 3);
+ /* Set low byte to conditional flag. */
+ inst[0] = GROUP_0F;
+ inst[1] = cond_set;
+ inst[2] = U8(MOD_REG | reg_map[TMP_REG1]);
+
+ inst[3] = GROUP_0F;
+ inst[4] = MOVZX_r_rm8;
+ inst[5] = U8(MOD_REG | (reg_map[TMP_REG1] << 3) | reg_map[TMP_REG1]);
+
+ if (GET_OPCODE(op) < SLJIT_ADD)
+ return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
+
+ SLJIT_SKIP_CHECKS(compiler);
+ return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REG1, 0);
+#endif /* SLJIT_CONFIG_X86_64 */
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fselect(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg,
+ sljit_s32 src1, sljit_sw src1w,
+ sljit_s32 src2_freg)
+{
+ sljit_u8* inst;
+ sljit_uw size;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_fselect(compiler, type, dst_freg, src1, src1w, src2_freg));
+
+ ADJUST_LOCAL_OFFSET(src1, src1w);
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ compiler->mode32 = 1;
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ if (dst_freg != src2_freg) {
+ if (dst_freg == src1) {
+ src1 = src2_freg;
+ src1w = 0;
+ type ^= 0x1;
+ } else
+ FAIL_IF(emit_sse2_load(compiler, type & SLJIT_32, dst_freg, src2_freg, 0));
+ }
+
+ inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
+ FAIL_IF(!inst);
+ INC_SIZE(2);
+ inst[0] = U8(get_jump_code((sljit_uw)(type & ~SLJIT_32) ^ 0x1) - 0x10);
+
+ size = compiler->size;
+ FAIL_IF(emit_sse2_load(compiler, type & SLJIT_32, dst_freg, src1, src1w));
+
+ inst[1] = U8(compiler->size - size);
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 srcdst, sljit_sw srcdstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_s32 alignment = SLJIT_SIMD_GET_ELEM2_SIZE(type);
+ sljit_uw op;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_mov(compiler, type, freg, srcdst, srcdstw));
+
+ ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ compiler->mode32 = 1;
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ switch (reg_size) {
+ case 4:
+ op = EX86_SSE2;
+ break;
+ case 5:
+ if (!(cpu_feature_list & CPU_FEATURE_AVX2))
+ return SLJIT_ERR_UNSUPPORTED;
+ op = EX86_SSE2 | VEX_256;
+ break;
+ default:
+ return SLJIT_ERR_UNSUPPORTED;
+ }
+
+ if (!(srcdst & SLJIT_MEM))
+ alignment = reg_size;
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ if (elem_size == 2 || elem_size == 3) {
+ op |= alignment >= reg_size ? MOVAPS_x_xm : MOVUPS_x_xm;
+
+ if (elem_size == 3)
+ op |= EX86_PREF_66;
+
+ if (type & SLJIT_SIMD_STORE)
+ op += 1;
+ } else
+ return SLJIT_ERR_UNSUPPORTED;
+ } else {
+ op |= ((type & SLJIT_SIMD_STORE) ? MOVDQA_xm_x : MOVDQA_x_xm)
+ | (alignment >= reg_size ? EX86_PREF_66 : EX86_PREF_F3);
+ }
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if ((op & VEX_256) || ((cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX)))
+ return emit_vex_instruction(compiler, op, freg, 0, srcdst, srcdstw);
+
+ return emit_groupf(compiler, op, freg, srcdst, srcdstw);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
+ sljit_u8 *inst;
+ sljit_u8 opcode = 0;
+ sljit_uw op;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_replicate(compiler, type, freg, src, srcw));
+
+ ADJUST_LOCAL_OFFSET(src, srcw);
+
+ if (!(type & SLJIT_SIMD_FLOAT)) {
+ CHECK_EXTRA_REGS(src, srcw, (void)0);
+ }
+
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
+ if ((type & SLJIT_SIMD_FLOAT) ? (elem_size < 2 || elem_size > 3) : (elem_size > 2))
+ return SLJIT_ERR_UNSUPPORTED;
+#else /* !SLJIT_CONFIG_X86_32 */
+ compiler->mode32 = 1;
+
+ if (elem_size > 3 || ((type & SLJIT_SIMD_FLOAT) && elem_size < 2))
+ return SLJIT_ERR_UNSUPPORTED;
+#endif /* SLJIT_CONFIG_X86_32 */
+
+ if (reg_size != 4 && (reg_size != 5 || !(cpu_feature_list & CPU_FEATURE_AVX2)))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 5)
+ use_vex = 1;
+
+ if (use_vex && src != SLJIT_IMM) {
+ op = 0;
+
+ switch (elem_size) {
+ case 0:
+ if (cpu_feature_list & CPU_FEATURE_AVX2)
+ op = VPBROADCASTB_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
+ break;
+ case 1:
+ if (cpu_feature_list & CPU_FEATURE_AVX2)
+ op = VPBROADCASTW_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
+ break;
+ case 2:
+ if (type & SLJIT_SIMD_FLOAT) {
+ if ((cpu_feature_list & CPU_FEATURE_AVX2) || ((cpu_feature_list & CPU_FEATURE_AVX) && (src & SLJIT_MEM)))
+ op = VBROADCASTSS_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
+ } else if (cpu_feature_list & CPU_FEATURE_AVX2)
+ op = VPBROADCASTD_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
+ break;
+ default:
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (!(type & SLJIT_SIMD_FLOAT)) {
+ if (cpu_feature_list & CPU_FEATURE_AVX2)
+ op = VPBROADCASTQ_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
+ break;
+ }
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ if (reg_size == 5)
+ op = VBROADCASTSD_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
+ break;
}
- /* Low byte is not accessible. */
- if (cpu_feature_list == 0)
- get_cpu_features();
+ if (op != 0) {
+ if (!(src & SLJIT_MEM) && !(type & SLJIT_SIMD_FLOAT)) {
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (elem_size >= 3)
+ compiler->mode32 = 0;
+#endif /* SLJIT_CONFIG_X86_64 */
+ FAIL_IF(emit_vex_instruction(compiler, MOVD_x_rm | VEX_AUTO_W | EX86_PREF_66 | EX86_SSE2_OP1, freg, 0, src, srcw));
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ compiler->mode32 = 1;
+#endif /* SLJIT_CONFIG_X86_64 */
+ src = freg;
+ srcw = 0;
+ }
- if (cpu_feature_list & CPU_FEATURE_CMOV) {
- EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, 1);
- /* a xor reg, reg operation would overwrite the flags. */
- EMIT_MOV(compiler, dst, 0, SLJIT_IMM, 0);
+ if (reg_size == 5)
+ op |= VEX_256;
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
- FAIL_IF(!inst);
- INC_SIZE(3);
+ return emit_vex_instruction(compiler, op, freg, 0, src, srcw);
+ }
+ }
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ if (src == SLJIT_IMM) {
+ if (use_vex)
+ return emit_vex_instruction(compiler, XORPD_x_xm | (reg_size == 5 ? VEX_256 : 0) | (elem_size == 3 ? EX86_PREF_66 : 0) | EX86_SSE2 | VEX_SSE2_OPV, freg, freg, freg, 0);
+
+ return emit_groupf(compiler, XORPD_x_xm | (elem_size == 3 ? EX86_PREF_66 : 0) | EX86_SSE2, freg, freg, 0);
+ }
+
+ SLJIT_ASSERT(reg_size == 4);
+
+ if (use_vex) {
+ if (elem_size == 3)
+ return emit_vex_instruction(compiler, MOVDDUP_x_xm | EX86_PREF_F2 | EX86_SSE2, freg, 0, src, srcw);
+
+ SLJIT_ASSERT(!(src & SLJIT_MEM));
+ FAIL_IF(emit_vex_instruction(compiler, SHUFPS_x_xm | EX86_SSE2 | VEX_SSE2_OPV, freg, src, src, 0));
+ return emit_byte(compiler, 0);
+ }
+
+ if (elem_size == 2 && freg != src) {
+ FAIL_IF(emit_sse2_load(compiler, 1, freg, src, srcw));
+ src = freg;
+ srcw = 0;
+ }
+
+ op = (elem_size == 2 ? SHUFPS_x_xm : MOVDDUP_x_xm) | (elem_size == 2 ? 0 : EX86_PREF_F2) | EX86_SSE2;
+ FAIL_IF(emit_groupf(compiler, op, freg, src, srcw));
+
+ if (elem_size == 2)
+ return emit_byte(compiler, 0);
+ return SLJIT_SUCCESS;
+ }
+
+ if (src == SLJIT_IMM) {
+ if (elem_size == 0) {
+ srcw = (sljit_u8)srcw;
+ srcw |= srcw << 8;
+ srcw |= srcw << 16;
+ elem_size = 2;
+ } else if (elem_size == 1) {
+ srcw = (sljit_u16)srcw;
+ srcw |= srcw << 16;
+ elem_size = 2;
+ }
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (elem_size == 2 && (sljit_s32)srcw == -1)
+ srcw = -1;
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ if (srcw == 0 || srcw == -1) {
+ if (use_vex)
+ return emit_vex_instruction(compiler, (srcw == 0 ? PXOR_x_xm : PCMPEQD_x_xm) | (reg_size == 5 ? VEX_256 : 0) | EX86_PREF_66 | EX86_SSE2 | VEX_SSE2_OPV, freg, freg, freg, 0);
+
+ return emit_groupf(compiler, (srcw == 0 ? PXOR_x_xm : PCMPEQD_x_xm) | EX86_PREF_66 | EX86_SSE2, freg, freg, 0);
+ }
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (elem_size == 3)
+ FAIL_IF(emit_load_imm64(compiler, TMP_REG1, srcw));
+ else
+#endif /* SLJIT_CONFIG_X86_64 */
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, srcw);
+
+ src = TMP_REG1;
+ srcw = 0;
+
+ }
+
+ op = 2;
+ opcode = MOVD_x_rm;
+
+ switch (elem_size) {
+ case 0:
+ if (!FAST_IS_REG(src)) {
+ opcode = 0x3a /* Prefix of PINSRB_x_rm_i8. */;
+ op = 3;
+ }
+ break;
+ case 1:
+ if (!FAST_IS_REG(src))
+ opcode = PINSRW_x_rm_i8;
+ break;
+ case 2:
+ break;
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ case 3:
+ /* MOVQ */
+ compiler->mode32 = 0;
+ break;
+#endif /* SLJIT_CONFIG_X86_64 */
+ }
+
+ if (use_vex) {
+ if (opcode != MOVD_x_rm) {
+ op = (opcode == 0x3a) ? (PINSRB_x_rm_i8 | VEX_OP_0F3A) : opcode;
+ FAIL_IF(emit_vex_instruction(compiler, op | EX86_PREF_66 | EX86_SSE2_OP1 | VEX_SSE2_OPV, freg, freg, src, srcw));
+ } else
+ FAIL_IF(emit_vex_instruction(compiler, MOVD_x_rm | VEX_AUTO_W | EX86_PREF_66 | EX86_SSE2_OP1, freg, 0, src, srcw));
+ } else {
+ inst = emit_x86_instruction(compiler, op | EX86_PREF_66 | EX86_SSE2_OP1, freg, 0, src, srcw);
+ FAIL_IF(!inst);
+ inst[0] = GROUP_0F;
+ inst[1] = opcode;
+
+ if (op == 3) {
+ SLJIT_ASSERT(opcode == 0x3a);
+ inst[2] = PINSRB_x_rm_i8;
+ }
+ }
+
+ if (use_vex && elem_size >= 2) {
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
+ op = VPBROADCASTD_x_xm;
+#else /* !SLJIT_CONFIG_X86_32 */
+ op = (elem_size == 3) ? VPBROADCASTQ_x_xm : VPBROADCASTD_x_xm;
+#endif /* SLJIT_CONFIG_X86_32 */
+ return emit_vex_instruction(compiler, op | ((reg_size == 5) ? VEX_256 : 0) | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, freg, 0, freg, 0);
+ }
+
+ SLJIT_ASSERT(reg_size == 4);
+
+ if (opcode != MOVD_x_rm)
+ FAIL_IF(emit_byte(compiler, 0));
+
+ switch (elem_size) {
+ case 0:
+ if (use_vex) {
+ FAIL_IF(emit_vex_instruction(compiler, PXOR_x_xm | EX86_PREF_66 | EX86_SSE2 | VEX_SSE2_OPV, TMP_FREG, TMP_FREG, TMP_FREG, 0));
+ return emit_vex_instruction(compiler, PSHUFB_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2 | VEX_SSE2_OPV, freg, freg, TMP_FREG, 0);
+ }
+ FAIL_IF(emit_groupf(compiler, PXOR_x_xm | EX86_PREF_66 | EX86_SSE2, TMP_FREG, TMP_FREG, 0));
+ return emit_groupf_ext(compiler, PSHUFB_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, freg, TMP_FREG, 0);
+ case 1:
+ if (use_vex)
+ FAIL_IF(emit_vex_instruction(compiler, PSHUFLW_x_xm | EX86_PREF_F2 | EX86_SSE2, freg, 0, freg, 0));
+ else
+ FAIL_IF(emit_groupf(compiler, PSHUFLW_x_xm | EX86_PREF_F2 | EX86_SSE2, freg, freg, 0));
+ FAIL_IF(emit_byte(compiler, 0));
+ /* fallthrough */
+ default:
+ if (use_vex)
+ FAIL_IF(emit_vex_instruction(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, freg, 0, freg, 0));
+ else
+ FAIL_IF(emit_groupf(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, freg, freg, 0));
+ return emit_byte(compiler, 0);
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ case 3:
+ compiler->mode32 = 1;
+ if (use_vex)
+ FAIL_IF(emit_vex_instruction(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, freg, 0, freg, 0));
+ else
+ FAIL_IF(emit_groupf(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, freg, freg, 0));
+ return emit_byte(compiler, 0x44);
+#endif /* SLJIT_CONFIG_X86_64 */
+ }
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg, sljit_s32 lane_index,
+ sljit_s32 srcdst, sljit_sw srcdstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
+ sljit_u8 *inst;
+ sljit_u8 opcode = 0;
+ sljit_uw op;
+ sljit_s32 freg_orig = freg;
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
+ sljit_s32 srcdst_is_ereg = 0;
+ sljit_s32 srcdst_orig = 0;
+ sljit_sw srcdstw_orig = 0;
+#endif /* SLJIT_CONFIG_X86_32 */
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_lane_mov(compiler, type, freg, lane_index, srcdst, srcdstw));
+
+ ADJUST_LOCAL_OFFSET(srcdst, srcdstw);
+
+ if (reg_size == 5) {
+ if (!(cpu_feature_list & CPU_FEATURE_AVX2))
+ return SLJIT_ERR_UNSUPPORTED;
+ use_vex = 1;
+ } else if (reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
+ if ((type & SLJIT_SIMD_FLOAT) ? (elem_size < 2 || elem_size > 3) : elem_size > 2)
+ return SLJIT_ERR_UNSUPPORTED;
+#else /* SLJIT_CONFIG_X86_32 */
+ if (elem_size > 3 || ((type & SLJIT_SIMD_FLOAT) && elem_size < 2))
+ return SLJIT_ERR_UNSUPPORTED;
+#endif /* SLJIT_CONFIG_X86_32 */
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ compiler->mode32 = 1;
+#else /* !SLJIT_CONFIG_X86_64 */
+ if (!(type & SLJIT_SIMD_FLOAT)) {
+ CHECK_EXTRA_REGS(srcdst, srcdstw, srcdst_is_ereg = 1);
+
+ if ((type & SLJIT_SIMD_STORE) && ((srcdst_is_ereg && elem_size < 2) || (elem_size == 0 && (type & SLJIT_SIMD_LANE_SIGNED) && FAST_IS_REG(srcdst) && reg_map[srcdst] >= 4))) {
+ srcdst_orig = srcdst;
+ srcdstw_orig = srcdstw;
+ srcdst = TMP_REG1;
+ srcdstw = 0;
+ }
+ }
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ if (type & SLJIT_SIMD_LANE_ZERO) {
+ if (lane_index == 0) {
+ if (!(type & SLJIT_SIMD_FLOAT)) {
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (elem_size == 3) {
+ compiler->mode32 = 0;
+ elem_size = 2;
+ }
+#endif /* SLJIT_CONFIG_X86_64 */
+ if (srcdst == SLJIT_IMM) {
+ if (elem_size == 0)
+ srcdstw = (sljit_u8)srcdstw;
+ else if (elem_size == 1)
+ srcdstw = (sljit_u16)srcdstw;
+
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, srcdstw);
+ srcdst = TMP_REG1;
+ srcdstw = 0;
+ elem_size = 2;
+ }
+
+ if (elem_size == 2) {
+ if (use_vex)
+ return emit_vex_instruction(compiler, MOVD_x_rm | VEX_AUTO_W | EX86_PREF_66 | EX86_SSE2_OP1, freg, 0, srcdst, srcdstw);
+ return emit_groupf(compiler, MOVD_x_rm | EX86_PREF_66 | EX86_SSE2_OP1, freg, srcdst, srcdstw);
+ }
+ } else if (srcdst & SLJIT_MEM) {
+ SLJIT_ASSERT(elem_size == 2 || elem_size == 3);
+
+ if (use_vex)
+ return emit_vex_instruction(compiler, MOVSD_x_xm | (elem_size == 2 ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, freg, 0, srcdst, srcdstw);
+ return emit_groupf(compiler, MOVSD_x_xm | (elem_size == 2 ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, freg, srcdst, srcdstw);
+ } else if (elem_size == 3) {
+ if (use_vex)
+ return emit_vex_instruction(compiler, MOVQ_x_xm | EX86_PREF_F3 | EX86_SSE2, freg, 0, srcdst, 0);
+ return emit_groupf(compiler, MOVQ_x_xm | EX86_PREF_F3 | EX86_SSE2, freg, srcdst, 0);
+ } else if (use_vex) {
+ FAIL_IF(emit_vex_instruction(compiler, XORPD_x_xm | EX86_SSE2 | VEX_SSE2_OPV, TMP_FREG, TMP_FREG, TMP_FREG, 0));
+ return emit_vex_instruction(compiler, MOVSD_x_xm | EX86_PREF_F3 | EX86_SSE2 | VEX_SSE2_OPV, freg, TMP_FREG, srcdst, 0);
+ }
+ }
+
+ if (reg_size == 5 && lane_index >= (1 << (4 - elem_size))) {
+ freg = TMP_FREG;
+ lane_index -= (1 << (4 - elem_size));
+ } else if ((type & SLJIT_SIMD_FLOAT) && freg == srcdst) {
+ if (use_vex)
+ FAIL_IF(emit_vex_instruction(compiler, MOVSD_x_xm | (elem_size == 2 ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2 | VEX_SSE2_OPV, TMP_FREG, TMP_FREG, srcdst, srcdstw));
+ else
+ FAIL_IF(emit_sse2_load(compiler, elem_size == 2, TMP_FREG, srcdst, srcdstw));
+ srcdst = TMP_FREG;
+ srcdstw = 0;
+ }
+
+ op = ((!(type & SLJIT_SIMD_FLOAT) || elem_size != 2) ? EX86_PREF_66 : 0)
+ | ((type & SLJIT_SIMD_FLOAT) ? XORPD_x_xm : PXOR_x_xm) | EX86_SSE2;
+
+ if (use_vex)
+ FAIL_IF(emit_vex_instruction(compiler, op | (reg_size == 5 ? VEX_256 : 0) | VEX_SSE2_OPV, freg, freg, freg, 0));
+ else
+ FAIL_IF(emit_groupf(compiler, op, freg, freg, 0));
+ } else if (reg_size == 5 && lane_index >= (1 << (4 - elem_size))) {
+ FAIL_IF(emit_vex_instruction(compiler, ((type & SLJIT_SIMD_FLOAT) ? VEXTRACTF128_x_ym : VEXTRACTI128_x_ym) | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2, freg, 0, TMP_FREG, 0));
+ FAIL_IF(emit_byte(compiler, 1));
+
+ freg = TMP_FREG;
+ lane_index -= (1 << (4 - elem_size));
+ }
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ if (elem_size == 3) {
+ if (srcdst & SLJIT_MEM) {
+ if (type & SLJIT_SIMD_STORE)
+ op = lane_index == 0 ? MOVLPD_m_x : MOVHPD_m_x;
+ else
+ op = lane_index == 0 ? MOVLPD_x_m : MOVHPD_x_m;
+
+ /* VEX prefix clears upper bits of the target register. */
+ if (use_vex && ((type & SLJIT_SIMD_STORE) || reg_size == 4 || freg == TMP_FREG))
+ FAIL_IF(emit_vex_instruction(compiler, op | EX86_PREF_66 | EX86_SSE2
+ | ((type & SLJIT_SIMD_STORE) ? 0 : VEX_SSE2_OPV), freg, (type & SLJIT_SIMD_STORE) ? 0 : freg, srcdst, srcdstw));
+ else
+ FAIL_IF(emit_groupf(compiler, op | EX86_PREF_66 | EX86_SSE2, freg, srcdst, srcdstw));
+
+ /* In case of store, freg is not TMP_FREG. */
+ } else if (type & SLJIT_SIMD_STORE) {
+ if (lane_index == 1) {
+ if (use_vex)
+ return emit_vex_instruction(compiler, MOVHLPS_x_x | EX86_SSE2 | VEX_SSE2_OPV, srcdst, srcdst, freg, 0);
+ return emit_groupf(compiler, MOVHLPS_x_x | EX86_SSE2, srcdst, freg, 0);
+ }
+ if (use_vex)
+ return emit_vex_instruction(compiler, MOVSD_x_xm | EX86_PREF_F2 | EX86_SSE2 | VEX_SSE2_OPV, srcdst, srcdst, freg, 0);
+ return emit_sse2_load(compiler, 0, srcdst, freg, 0);
+ } else if (use_vex && (reg_size == 4 || freg == TMP_FREG)) {
+ if (lane_index == 1)
+ FAIL_IF(emit_vex_instruction(compiler, MOVLHPS_x_x | EX86_SSE2 | VEX_SSE2_OPV, freg, freg, srcdst, 0));
+ else
+ FAIL_IF(emit_vex_instruction(compiler, MOVSD_x_xm | EX86_PREF_F2 | EX86_SSE2 | VEX_SSE2_OPV, freg, freg, srcdst, 0));
+ } else {
+ if (lane_index == 1)
+ FAIL_IF(emit_groupf(compiler, MOVLHPS_x_x | EX86_SSE2, freg, srcdst, 0));
+ else
+ FAIL_IF(emit_sse2_load(compiler, 0, freg, srcdst, 0));
+ }
+ } else if (type & SLJIT_SIMD_STORE) {
+ if (lane_index == 0) {
+ if (use_vex)
+ return emit_vex_instruction(compiler, ((srcdst & SLJIT_MEM) ? MOVSD_xm_x : MOVSD_x_xm) | EX86_PREF_F3 | EX86_SSE2
+ | ((srcdst & SLJIT_MEM) ? 0 : VEX_SSE2_OPV), freg, ((srcdst & SLJIT_MEM) ? 0 : freg), srcdst, srcdstw);
+ return emit_sse2_store(compiler, 1, srcdst, srcdstw, freg);
+ }
+
+ if (srcdst & SLJIT_MEM) {
+ if (use_vex)
+ FAIL_IF(emit_vex_instruction(compiler, EXTRACTPS_x_xm | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2, freg, 0, srcdst, srcdstw));
+ else
+ FAIL_IF(emit_groupf_ext(compiler, EXTRACTPS_x_xm | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2, freg, srcdst, srcdstw));
+ return emit_byte(compiler, U8(lane_index));
+ }
+
+ if (use_vex) {
+ FAIL_IF(emit_vex_instruction(compiler, SHUFPS_x_xm | EX86_SSE2 | VEX_SSE2_OPV, srcdst, freg, freg, 0));
+ return emit_byte(compiler, U8(lane_index));
+ }
+
+ if (srcdst == freg)
+ op = SHUFPS_x_xm | EX86_SSE2;
+ else {
+ switch (lane_index) {
+ case 1:
+ op = MOVSHDUP_x_xm | EX86_PREF_F3 | EX86_SSE2;
+ break;
+ case 2:
+ op = MOVHLPS_x_x | EX86_SSE2;
+ break;
+ default:
+ SLJIT_ASSERT(lane_index == 3);
+ op = PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2;
+ break;
+ }
+ }
+
+ FAIL_IF(emit_groupf(compiler, op, srcdst, freg, 0));
+
+ op &= 0xff;
+ if (op == SHUFPS_x_xm || op == PSHUFD_x_xm)
+ return emit_byte(compiler, U8(lane_index));
- *inst++ = GROUP_0F;
- /* cmovcc = setcc - 0x50. */
- *inst++ = U8(cond_set - 0x50);
- *inst++ = U8(MOD_REG | (reg_map[dst] << 3) | reg_map[TMP_REG1]);
return SLJIT_SUCCESS;
+ } else {
+ if (lane_index != 0 || (srcdst & SLJIT_MEM)) {
+ FAIL_IF(emit_groupf_ext(compiler, INSERTPS_x_xm | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2, freg, srcdst, srcdstw));
+ FAIL_IF(emit_byte(compiler, U8(lane_index << 4)));
+ } else
+ FAIL_IF(emit_sse2_store(compiler, 1, freg, 0, srcdst));
+ }
+
+ if (freg != TMP_FREG || (type & SLJIT_SIMD_STORE))
+ return SLJIT_SUCCESS;
+
+ SLJIT_ASSERT(reg_size == 5);
+
+ if (type & SLJIT_SIMD_LANE_ZERO) {
+ FAIL_IF(emit_vex_instruction(compiler, VPERMPD_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, freg_orig, 0, TMP_FREG, 0));
+ return emit_byte(compiler, 0x4e);
}
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1);
+ FAIL_IF(emit_vex_instruction(compiler, VINSERTF128_y_y_xm | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2 | VEX_SSE2_OPV, freg_orig, freg_orig, TMP_FREG, 0));
+ return emit_byte(compiler, 1);
+ }
+
+ if (srcdst == SLJIT_IMM) {
+ EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, srcdstw);
+ srcdst = TMP_REG1;
+ srcdstw = 0;
+ }
+
+ op = 3;
+
+ switch (elem_size) {
+ case 0:
+ opcode = (type & SLJIT_SIMD_STORE) ? PEXTRB_rm_x_i8 : PINSRB_x_rm_i8;
+ break;
+ case 1:
+ if (!(type & SLJIT_SIMD_STORE)) {
+ op = 2;
+ opcode = PINSRW_x_rm_i8;
+ } else
+ opcode = PEXTRW_rm_x_i8;
+ break;
+ case 2:
+ opcode = (type & SLJIT_SIMD_STORE) ? PEXTRD_rm_x_i8 : PINSRD_x_rm_i8;
+ break;
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ case 3:
+ /* PINSRQ / PEXTRQ */
+ opcode = (type & SLJIT_SIMD_STORE) ? PEXTRD_rm_x_i8 : PINSRD_x_rm_i8;
+ compiler->mode32 = 0;
+ break;
+#endif /* SLJIT_CONFIG_X86_64 */
+ }
+
+ if (use_vex && (type & SLJIT_SIMD_STORE)) {
+ op = opcode | ((op == 3) ? VEX_OP_0F3A : 0);
+ FAIL_IF(emit_vex_instruction(compiler, op | EX86_PREF_66 | VEX_AUTO_W | EX86_SSE2_OP1 | VEX_SSE2_OPV, freg, 0, srcdst, srcdstw));
+ } else {
+ inst = emit_x86_instruction(compiler, op | EX86_PREF_66 | EX86_SSE2_OP1, freg, 0, srcdst, srcdstw);
FAIL_IF(!inst);
- INC_SIZE(1 + 3 + 3 + 1);
- *inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
- /* Set al to conditional flag. */
- *inst++ = GROUP_0F;
- *inst++ = cond_set;
- *inst++ = MOD_REG | 0 /* eax */;
-
- *inst++ = GROUP_0F;
- *inst++ = MOVZX_r_rm8;
- *inst++ = U8(MOD_REG | (reg_map[dst] << 3) | 0 /* eax */);
- *inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
+ inst[0] = GROUP_0F;
+
+ if (op == 3) {
+ inst[1] = 0x3a;
+ inst[2] = opcode;
+ } else
+ inst[1] = opcode;
+ }
+
+ FAIL_IF(emit_byte(compiler, U8(lane_index)));
+
+ if (!(type & SLJIT_SIMD_LANE_SIGNED) || (srcdst & SLJIT_MEM)) {
+ if (freg == TMP_FREG && !(type & SLJIT_SIMD_STORE)) {
+ SLJIT_ASSERT(reg_size == 5);
+
+ if (type & SLJIT_SIMD_LANE_ZERO) {
+ FAIL_IF(emit_vex_instruction(compiler, VPERMQ_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, freg_orig, 0, TMP_FREG, 0));
+ return emit_byte(compiler, 0x4e);
+ }
+
+ FAIL_IF(emit_vex_instruction(compiler, VINSERTI128_y_y_xm | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2 | VEX_SSE2_OPV, freg_orig, freg_orig, TMP_FREG, 0));
+ return emit_byte(compiler, 1);
+ }
+
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
+ if (srcdst_orig & SLJIT_MEM)
+ return emit_mov(compiler, srcdst_orig, srcdstw_orig, TMP_REG1, 0);
+#endif /* SLJIT_CONFIG_X86_32 */
return SLJIT_SUCCESS;
}
- if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst) && reg_map[dst] <= 4) {
- SLJIT_ASSERT(reg_map[SLJIT_R0] == 0);
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (elem_size >= 3)
+ return SLJIT_SUCCESS;
- if (dst != SLJIT_R0) {
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 2 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1 + 3 + 2 + 1);
- /* Set low register to conditional flag. */
- *inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
- *inst++ = GROUP_0F;
- *inst++ = cond_set;
- *inst++ = MOD_REG | 0 /* eax */;
- *inst++ = OR_rm8_r8;
- *inst++ = MOD_REG | (0 /* eax */ << 3) | reg_map[dst];
- *inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
+ compiler->mode32 = (type & SLJIT_32);
+
+ op = 2;
+
+ if (elem_size == 0)
+ op |= EX86_REX;
+
+ if (elem_size == 2) {
+ if (type & SLJIT_32)
+ return SLJIT_SUCCESS;
+
+ SLJIT_ASSERT(!(compiler->mode32));
+ op = 1;
+ }
+
+ inst = emit_x86_instruction(compiler, op, srcdst, 0, srcdst, 0);
+ FAIL_IF(!inst);
+
+ if (op != 1) {
+ inst[0] = GROUP_0F;
+ inst[1] = U8((elem_size == 0) ? MOVSX_r_rm8 : MOVSX_r_rm16);
+ } else
+ inst[0] = MOVSXD_r_rm;
+#else /* !SLJIT_CONFIG_X86_64 */
+ if (elem_size >= 2)
+ return SLJIT_SUCCESS;
+
+ FAIL_IF(emit_groupf(compiler, (elem_size == 0) ? MOVSX_r_rm8 : MOVSX_r_rm16,
+ (srcdst_orig != 0 && FAST_IS_REG(srcdst_orig)) ? srcdst_orig : srcdst, srcdst, 0));
+
+ if (srcdst_orig & SLJIT_MEM)
+ return emit_mov(compiler, srcdst_orig, srcdstw_orig, TMP_REG1, 0);
+#endif /* SLJIT_CONFIG_X86_64 */
+ return SLJIT_SUCCESS;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_replicate(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_s32 src_lane_index)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
+ sljit_uw pref;
+ sljit_u8 byte;
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
+ sljit_s32 opcode3 = TMP_REG1;
+#else /* !SLJIT_CONFIG_X86_32 */
+ sljit_s32 opcode3 = SLJIT_S0;
+#endif /* SLJIT_CONFIG_X86_32 */
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_lane_replicate(compiler, type, freg, src, src_lane_index));
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ compiler->mode32 = 1;
+#endif /* SLJIT_CONFIG_X86_64 */
+ SLJIT_ASSERT(reg_map[opcode3] == 3);
+
+ if (reg_size == 5) {
+ if (!(cpu_feature_list & CPU_FEATURE_AVX2))
+ return SLJIT_ERR_UNSUPPORTED;
+ use_vex = 1;
+ } else if (reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ pref = 0;
+ byte = U8(src_lane_index);
+
+ if (elem_size == 3) {
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 5) {
+ if (src_lane_index == 0)
+ return emit_vex_instruction(compiler, VBROADCASTSD_x_xm | VEX_256 | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, freg, 0, src, 0);
+
+ FAIL_IF(emit_vex_instruction(compiler, VPERMPD_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, freg, 0, src, 0));
+
+ byte = U8(byte | (byte << 2));
+ return emit_byte(compiler, U8(byte | (byte << 4)));
+ }
+
+ if (src_lane_index == 0) {
+ if (use_vex)
+ return emit_vex_instruction(compiler, MOVDDUP_x_xm | EX86_PREF_F2 | EX86_SSE2, freg, 0, src, 0);
+ return emit_groupf(compiler, MOVDDUP_x_xm | EX86_PREF_F2 | EX86_SSE2, freg, src, 0);
+ }
+
+ /* Changes it to SHUFPD_x_xm. */
+ pref = EX86_PREF_66;
+ } else if (elem_size != 2)
+ return SLJIT_ERR_UNSUPPORTED;
+ else if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 5) {
+ SLJIT_ASSERT(elem_size == 2);
+
+ if (src_lane_index == 0)
+ return emit_vex_instruction(compiler, VBROADCASTSS_x_xm | VEX_256 | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, freg, 0, src, 0);
+
+ FAIL_IF(emit_vex_instruction(compiler, VPERMPD_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, freg, 0, src, 0));
+
+ byte = 0x44;
+ if (src_lane_index >= 4) {
+ byte = 0xee;
+ src_lane_index -= 4;
+ }
+
+ FAIL_IF(emit_byte(compiler, byte));
+ FAIL_IF(emit_vex_instruction(compiler, SHUFPS_x_xm | VEX_256 | pref | EX86_SSE2 | VEX_SSE2_OPV, freg, freg, freg, 0));
+ byte = U8(src_lane_index);
+ } else if (use_vex) {
+ FAIL_IF(emit_vex_instruction(compiler, SHUFPS_x_xm | pref | EX86_SSE2 | VEX_SSE2_OPV, freg, src, src, 0));
+ } else {
+ if (freg != src)
+ FAIL_IF(emit_groupf(compiler, MOVAPS_x_xm | pref | EX86_SSE2, freg, src, 0));
+
+ FAIL_IF(emit_groupf(compiler, SHUFPS_x_xm | pref | EX86_SSE2, freg, freg, 0));
}
- else {
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + 3 + 2 + 2);
+
+ if (elem_size == 2) {
+ byte = U8(byte | (byte << 2));
+ byte = U8(byte | (byte << 4));
+ } else
+ byte = U8(byte | (byte << 1));
+
+ return emit_byte(compiler, U8(byte));
+ }
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (elem_size == 0) {
+ if (reg_size == 5 && src_lane_index >= 16) {
+ FAIL_IF(emit_vex_instruction(compiler, VPERMQ_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, freg, 0, src, 0));
+ FAIL_IF(emit_byte(compiler, src_lane_index >= 24 ? 0xff : 0xaa));
+ src_lane_index &= 0x7;
+ src = freg;
+ }
+
+ if (src_lane_index != 0 || (freg != src && (!(cpu_feature_list & CPU_FEATURE_AVX2) || !use_vex))) {
+ pref = 0;
+
+ if ((src_lane_index & 0x3) == 0) {
+ pref = EX86_PREF_66;
+ byte = U8(src_lane_index >> 2);
+ } else if (src_lane_index < 8 && (src_lane_index & 0x1) == 0) {
+ pref = EX86_PREF_F2;
+ byte = U8(src_lane_index >> 1);
+ } else {
+ if (!use_vex) {
+ if (freg != src)
+ FAIL_IF(emit_groupf(compiler, MOVDQA_x_xm | EX86_PREF_66 | EX86_SSE2, freg, src, 0));
+
+ FAIL_IF(emit_groupf(compiler, PSRLDQ_x | EX86_PREF_66 | EX86_SSE2_OP2, opcode3, freg, 0));
+ } else
+ FAIL_IF(emit_vex_instruction(compiler, PSRLDQ_x | EX86_PREF_66 | EX86_SSE2_OP2 | VEX_SSE2_OPV, opcode3, freg, src, 0));
+
+ FAIL_IF(emit_byte(compiler, U8(src_lane_index)));
+ }
+
+ if (pref != 0) {
+ if (use_vex)
+ FAIL_IF(emit_vex_instruction(compiler, PSHUFLW_x_xm | pref | EX86_SSE2, freg, 0, src, 0));
+ else
+ FAIL_IF(emit_groupf(compiler, PSHUFLW_x_xm | pref | EX86_SSE2, freg, src, 0));
+ FAIL_IF(emit_byte(compiler, byte));
+ }
+
+ src = freg;
+ }
+
+ if (use_vex && (cpu_feature_list & CPU_FEATURE_AVX2))
+ return emit_vex_instruction(compiler, VPBROADCASTB_x_xm | (reg_size == 5 ? VEX_256 : 0) | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, freg, 0, src, 0);
+
+ SLJIT_ASSERT(reg_size == 4);
+ FAIL_IF(emit_groupf(compiler, PXOR_x_xm | EX86_PREF_66 | EX86_SSE2, TMP_FREG, TMP_FREG, 0));
+ return emit_groupf_ext(compiler, PSHUFB_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, freg, TMP_FREG, 0);
+ }
+
+ if ((cpu_feature_list & CPU_FEATURE_AVX2) && use_vex && src_lane_index == 0 && elem_size <= 3) {
+ switch (elem_size) {
+ case 1:
+ pref = VPBROADCASTW_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
+ break;
+ case 2:
+ pref = VPBROADCASTD_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
+ break;
+ default:
+ pref = VPBROADCASTQ_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2;
+ break;
+ }
+
+ if (reg_size == 5)
+ pref |= VEX_256;
+
+ return emit_vex_instruction(compiler, pref, freg, 0, src, 0);
+ }
+
+ if (reg_size == 5) {
+ switch (elem_size) {
+ case 1:
+ byte = U8(src_lane_index & 0x3);
+ src_lane_index >>= 2;
+ pref = PSHUFLW_x_xm | VEX_256 | ((src_lane_index & 1) == 0 ? EX86_PREF_F2 : EX86_PREF_F3) | EX86_SSE2;
+ break;
+ case 2:
+ byte = U8(src_lane_index & 0x3);
+ src_lane_index >>= 1;
+ pref = PSHUFD_x_xm | VEX_256 | EX86_PREF_66 | EX86_SSE2;
+ break;
+ case 3:
+ pref = 0;
+ break;
+ default:
+ FAIL_IF(emit_vex_instruction(compiler, VPERMQ_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, freg, 0, src, 0));
+ return emit_byte(compiler, U8(src_lane_index == 0 ? 0x44 : 0xee));
+ }
+
+ if (pref != 0) {
+ FAIL_IF(emit_vex_instruction(compiler, pref, freg, 0, src, 0));
+ byte = U8(byte | (byte << 2));
+ FAIL_IF(emit_byte(compiler, U8(byte | (byte << 4))));
+
+ if (src_lane_index == 0)
+ return emit_vex_instruction(compiler, VPBROADCASTQ_x_xm | VEX_256 | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, freg, 0, freg, 0);
+
+ src = freg;
+ }
+
+ FAIL_IF(emit_vex_instruction(compiler, VPERMQ_y_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | VEX_W | EX86_SSE2, freg, 0, src, 0));
+ byte = U8(src_lane_index);
+ byte = U8(byte | (byte << 2));
+ return emit_byte(compiler, U8(byte | (byte << 4)));
+ }
+
+ switch (elem_size) {
+ case 1:
+ byte = U8(src_lane_index & 0x3);
+ src_lane_index >>= 1;
+ pref = (src_lane_index & 2) == 0 ? EX86_PREF_F2 : EX86_PREF_F3;
+
+ if (use_vex)
+ FAIL_IF(emit_vex_instruction(compiler, PSHUFLW_x_xm | pref | EX86_SSE2, freg, 0, src, 0));
+ else
+ FAIL_IF(emit_groupf(compiler, PSHUFLW_x_xm | pref | EX86_SSE2, freg, src, 0));
+ byte = U8(byte | (byte << 2));
+ FAIL_IF(emit_byte(compiler, U8(byte | (byte << 4))));
+
+ if ((cpu_feature_list & CPU_FEATURE_AVX2) && use_vex && pref == EX86_PREF_F2)
+ return emit_vex_instruction(compiler, VPBROADCASTD_x_xm | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, freg, 0, freg, 0);
+
+ src = freg;
+ /* fallthrough */
+ case 2:
+ byte = U8(src_lane_index);
+ byte = U8(byte | (byte << 2));
+ break;
+ default:
+ byte = U8(src_lane_index << 1);
+ byte = U8(byte | (byte << 2) | 0x4);
+ break;
+ }
+
+ if (use_vex)
+ FAIL_IF(emit_vex_instruction(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, freg, 0, src, 0));
+ else
+ FAIL_IF(emit_groupf(compiler, PSHUFD_x_xm | EX86_PREF_66 | EX86_SSE2, freg, src, 0));
+ return emit_byte(compiler, U8(byte | (byte << 4)));
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_extend(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 src, sljit_sw srcw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_s32 elem2_size = SLJIT_SIMD_GET_ELEM2_SIZE(type);
+ sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
+ sljit_u8 opcode;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_extend(compiler, type, freg, src, srcw));
+
+ ADJUST_LOCAL_OFFSET(src, srcw);
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ compiler->mode32 = 1;
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ if (reg_size == 5) {
+ if (!(cpu_feature_list & CPU_FEATURE_AVX2))
+ return SLJIT_ERR_UNSUPPORTED;
+ use_vex = 1;
+ } else if (reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_FLOAT) {
+ if (elem_size != 2 || elem2_size != 3)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (use_vex)
+ return emit_vex_instruction(compiler, CVTPS2PD_x_xm | ((reg_size == 5) ? VEX_256 : 0) | EX86_SSE2, freg, 0, src, srcw);
+ return emit_groupf(compiler, CVTPS2PD_x_xm | EX86_SSE2, freg, src, srcw);
+ }
+
+ switch (elem_size) {
+ case 0:
+ if (elem2_size == 1)
+ opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXBW_x_xm : PMOVZXBW_x_xm;
+ else if (elem2_size == 2)
+ opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXBD_x_xm : PMOVZXBD_x_xm;
+ else if (elem2_size == 3)
+ opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXBQ_x_xm : PMOVZXBQ_x_xm;
+ else
+ return SLJIT_ERR_UNSUPPORTED;
+ break;
+ case 1:
+ if (elem2_size == 2)
+ opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXWD_x_xm : PMOVZXWD_x_xm;
+ else if (elem2_size == 3)
+ opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXWQ_x_xm : PMOVZXWQ_x_xm;
+ else
+ return SLJIT_ERR_UNSUPPORTED;
+ break;
+ case 2:
+ if (elem2_size == 3)
+ opcode = (type & SLJIT_SIMD_EXTEND_SIGNED) ? PMOVSXDQ_x_xm : PMOVZXDQ_x_xm;
+ else
+ return SLJIT_ERR_UNSUPPORTED;
+ break;
+ default:
+ return SLJIT_ERR_UNSUPPORTED;
+ }
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (use_vex)
+ return emit_vex_instruction(compiler, opcode | ((reg_size == 5) ? VEX_256 : 0) | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, freg, 0, src, srcw);
+ return emit_groupf_ext(compiler, opcode | EX86_PREF_66 | VEX_OP_0F38 | EX86_SSE2, freg, src, srcw);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_sign(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 freg,
+ sljit_s32 dst, sljit_sw dstw)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_s32 use_vex = (cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX);
+ sljit_s32 dst_r;
+ sljit_uw op;
+ sljit_u8 *inst;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_sign(compiler, type, freg, dst, dstw));
+
+ ADJUST_LOCAL_OFFSET(dst, dstw);
+
+ CHECK_EXTRA_REGS(dst, dstw, (void)0);
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ compiler->mode32 = 1;
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ if (elem_size > 3 || ((type & SLJIT_SIMD_FLOAT) && elem_size < 2))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if (reg_size == 4) {
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ op = EX86_PREF_66 | EX86_SSE2_OP2;
+
+ switch (elem_size) {
+ case 1:
+ if (use_vex)
+ FAIL_IF(emit_vex_instruction(compiler, PACKSSWB_x_xm | EX86_PREF_66 | EX86_SSE2 | VEX_SSE2_OPV, TMP_FREG, freg, freg, 0));
+ else
+ FAIL_IF(emit_groupf(compiler, PACKSSWB_x_xm | EX86_PREF_66 | EX86_SSE2, TMP_FREG, freg, 0));
+ freg = TMP_FREG;
+ break;
+ case 2:
+ op = EX86_SSE2_OP2;
+ break;
+ }
+
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
+ op |= (elem_size < 2) ? PMOVMSKB_r_x : MOVMSKPS_r_x;
+
+ if (use_vex)
+ FAIL_IF(emit_vex_instruction(compiler, op, dst_r, 0, freg, 0));
+ else
+ FAIL_IF(emit_groupf(compiler, op, dst_r, freg, 0));
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ compiler->mode32 = type & SLJIT_32;
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ if (elem_size == 1) {
+ inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 8, dst_r, 0);
FAIL_IF(!inst);
- INC_SIZE(2 + 3 + 2 + 2);
- /* Set low register to conditional flag. */
- *inst++ = XCHG_r_rm;
- *inst++ = U8(MOD_REG | (1 /* ecx */ << 3) | reg_map[TMP_REG1]);
- *inst++ = GROUP_0F;
- *inst++ = cond_set;
- *inst++ = MOD_REG | 1 /* ecx */;
- *inst++ = OR_rm8_r8;
- *inst++ = MOD_REG | (1 /* ecx */ << 3) | 0 /* eax */;
- *inst++ = XCHG_r_rm;
- *inst++ = U8(MOD_REG | (1 /* ecx */ << 3) | reg_map[TMP_REG1]);
+ inst[1] |= SHR;
}
+
+ if (dst_r == TMP_REG1)
+ return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
+
return SLJIT_SUCCESS;
}
- /* Set TMP_REG1 to the bit. */
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1 + 3 + 3 + 1);
- *inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
- /* Set al to conditional flag. */
- *inst++ = GROUP_0F;
- *inst++ = cond_set;
- *inst++ = MOD_REG | 0 /* eax */;
+ if (reg_size != 5 || !(cpu_feature_list & CPU_FEATURE_AVX2))
+ return SLJIT_ERR_UNSUPPORTED;
- *inst++ = GROUP_0F;
- *inst++ = MOVZX_r_rm8;
- *inst++ = MOD_REG | (0 << 3) /* eax */ | 0 /* eax */;
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
- *inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
+ dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
- if (GET_OPCODE(op) < SLJIT_ADD)
+ if (elem_size == 1) {
+ FAIL_IF(emit_vex_instruction(compiler, VEXTRACTI128_x_ym | VEX_256 | EX86_PREF_66 | VEX_OP_0F3A | EX86_SSE2, freg, 0, TMP_FREG, 0));
+ FAIL_IF(emit_byte(compiler, 1));
+ FAIL_IF(emit_vex_instruction(compiler, PACKSSWB_x_xm | VEX_256 | EX86_PREF_66 | EX86_SSE2 | VEX_SSE2_OPV, TMP_FREG, freg, TMP_FREG, 0));
+ FAIL_IF(emit_groupf(compiler, PMOVMSKB_r_x | EX86_PREF_66 | EX86_SSE2_OP2, dst_r, TMP_FREG, 0));
+ } else {
+ op = MOVMSKPS_r_x | VEX_256 | EX86_SSE2_OP2;
+
+ if (elem_size == 0)
+ op = PMOVMSKB_r_x | VEX_256 | EX86_PREF_66 | EX86_SSE2_OP2;
+ else if (elem_size == 3)
+ op |= EX86_PREF_66;
+
+ FAIL_IF(emit_vex_instruction(compiler, op, dst_r, 0, freg, 0));
+ }
+
+ if (dst_r == TMP_REG1) {
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ compiler->mode32 = type & SLJIT_32;
+#endif /* SLJIT_CONFIG_X86_64 */
return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
+ }
- SLJIT_SKIP_CHECKS(compiler);
- return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REG1, 0);
+ return SLJIT_SUCCESS;
+}
+
+static sljit_s32 emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg, sljit_s32 src_freg)
+{
+ sljit_uw op = ((type & SLJIT_SIMD_FLOAT) ? MOVAPS_x_xm : MOVDQA_x_xm) | EX86_SSE2;
+
+ SLJIT_ASSERT(SLJIT_SIMD_GET_REG_SIZE(type) == 4);
+
+ if (!(type & SLJIT_SIMD_FLOAT) || SLJIT_SIMD_GET_ELEM_SIZE(type) == 3)
+ op |= EX86_PREF_66;
+
+ return emit_groupf(compiler, op, dst_freg, src_freg, 0);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_op2(struct sljit_compiler *compiler, sljit_s32 type,
+ sljit_s32 dst_freg, sljit_s32 src1_freg, sljit_s32 src2_freg)
+{
+ sljit_s32 reg_size = SLJIT_SIMD_GET_REG_SIZE(type);
+ sljit_s32 elem_size = SLJIT_SIMD_GET_ELEM_SIZE(type);
+ sljit_uw op = 0;
+
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_simd_op2(compiler, type, dst_freg, src1_freg, src2_freg));
+
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ compiler->mode32 = 1;
#endif /* SLJIT_CONFIG_X86_64 */
+
+ if (reg_size == 5) {
+ if (!(cpu_feature_list & CPU_FEATURE_AVX2))
+ return SLJIT_ERR_UNSUPPORTED;
+ } else if (reg_size != 4)
+ return SLJIT_ERR_UNSUPPORTED;
+
+ if ((type & SLJIT_SIMD_FLOAT) && (elem_size < 2 || elem_size > 3))
+ return SLJIT_ERR_UNSUPPORTED;
+
+ switch (SLJIT_SIMD_GET_OPCODE(type)) {
+ case SLJIT_SIMD_OP2_AND:
+ op = (type & SLJIT_SIMD_FLOAT) ? ANDPD_x_xm : PAND_x_xm;
+
+ if (!(type & SLJIT_SIMD_FLOAT) || elem_size == 3)
+ op |= EX86_PREF_66;
+ break;
+ case SLJIT_SIMD_OP2_OR:
+ op = (type & SLJIT_SIMD_FLOAT) ? ORPD_x_xm : POR_x_xm;
+
+ if (!(type & SLJIT_SIMD_FLOAT) || elem_size == 3)
+ op |= EX86_PREF_66;
+ break;
+ case SLJIT_SIMD_OP2_XOR:
+ op = (type & SLJIT_SIMD_FLOAT) ? XORPD_x_xm : PXOR_x_xm;
+
+ if (!(type & SLJIT_SIMD_FLOAT) || elem_size == 3)
+ op |= EX86_PREF_66;
+ break;
+ }
+
+ if (type & SLJIT_SIMD_TEST)
+ return SLJIT_SUCCESS;
+
+ if (reg_size == 5 || ((cpu_feature_list & CPU_FEATURE_AVX) && (compiler->options & SLJIT_ENTER_USE_VEX))) {
+ if (reg_size == 5)
+ op |= VEX_256;
+
+ return emit_vex_instruction(compiler, op | EX86_SSE2 | VEX_SSE2_OPV, dst_freg, src1_freg, src2_freg, 0);
+ }
+
+ if (dst_freg != src1_freg) {
+ if (dst_freg == src2_freg)
+ src2_freg = src1_freg;
+ else
+ FAIL_IF(emit_simd_mov(compiler, type, dst_freg, src1_freg));
+ }
+
+ FAIL_IF(emit_groupf(compiler, op | EX86_SSE2, dst_freg, src2_freg, 0));
+ return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_load(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst_reg,
- sljit_s32 src, sljit_sw srcw)
+ sljit_s32 mem_reg)
{
- sljit_u8* inst;
+ CHECK_ERROR();
+ CHECK(check_sljit_emit_atomic_load(compiler, op, dst_reg, mem_reg));
+
+ SLJIT_SKIP_CHECKS(compiler);
+ return sljit_emit_op1(compiler, op, dst_reg, 0, SLJIT_MEM1(mem_reg), 0);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_store(struct sljit_compiler *compiler, sljit_s32 op,
+ sljit_s32 src_reg,
+ sljit_s32 mem_reg,
+ sljit_s32 temp_reg)
+{
+ sljit_uw pref;
+ sljit_s32 free_reg = TMP_REG1;
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
+ sljit_sw srcw = 0;
+ sljit_sw tempw = 0;
+#endif /* SLJIT_CONFIG_X86_32 */
CHECK_ERROR();
- CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
+ CHECK(check_sljit_emit_atomic_store(compiler, op, src_reg, mem_reg, temp_reg));
+ CHECK_EXTRA_REGS(src_reg, srcw, (void)0);
+ CHECK_EXTRA_REGS(temp_reg, tempw, (void)0);
+
+ SLJIT_ASSERT(FAST_IS_REG(src_reg) || src_reg == SLJIT_MEM1(SLJIT_SP));
+ SLJIT_ASSERT(FAST_IS_REG(temp_reg) || temp_reg == SLJIT_MEM1(SLJIT_SP));
+ op = GET_OPCODE(op);
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- type &= ~SLJIT_32;
+ if ((src_reg & SLJIT_MEM) || (op == SLJIT_MOV_U8 && reg_map[src_reg] >= 4)) {
+ /* Src is virtual register or its low byte is not accessible. */
+ SLJIT_ASSERT(src_reg != SLJIT_R1);
+ free_reg = src_reg;
- if (!sljit_has_cpu_feature(SLJIT_HAS_CMOV) || (dst_reg >= SLJIT_R3 && dst_reg <= SLJIT_S3))
- return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);
-#else
- if (!sljit_has_cpu_feature(SLJIT_HAS_CMOV))
- return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);
-#endif
+ EMIT_MOV(compiler, TMP_REG1, 0, src_reg, srcw);
+ src_reg = TMP_REG1;
- /* ADJUST_LOCAL_OFFSET is not needed. */
- CHECK_EXTRA_REGS(src, srcw, (void)0);
+ if (mem_reg == src_reg)
+ mem_reg = TMP_REG1;
+ }
+#endif /* SLJIT_CONFIG_X86_32 */
+ if (temp_reg != SLJIT_R0) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = type & SLJIT_32;
- type &= ~SLJIT_32;
-#endif
+ compiler->mode32 = 0;
- if (SLJIT_UNLIKELY(src & SLJIT_IMM)) {
- EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, srcw);
- src = TMP_REG1;
- srcw = 0;
+ EMIT_MOV(compiler, free_reg, 0, SLJIT_R0, 0);
+ EMIT_MOV(compiler, SLJIT_R0, 0, temp_reg, 0);
+
+ if (src_reg == SLJIT_R0)
+ src_reg = free_reg;
+ if (mem_reg == SLJIT_R0)
+ mem_reg = free_reg;
+#else /* !SLJIT_CONFIG_X86_64 */
+ if (src_reg == TMP_REG1 && mem_reg == SLJIT_R0 && (free_reg & SLJIT_MEM)) {
+ EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_R1, 0);
+ EMIT_MOV(compiler, SLJIT_R1, 0, SLJIT_R0, 0);
+ EMIT_MOV(compiler, SLJIT_R0, 0, temp_reg, tempw);
+
+ mem_reg = SLJIT_R1;
+ free_reg = SLJIT_R1;
+ } else {
+ EMIT_MOV(compiler, free_reg, 0, SLJIT_R0, 0);
+ EMIT_MOV(compiler, SLJIT_R0, 0, temp_reg, tempw);
+
+ if (src_reg == SLJIT_R0)
+ src_reg = free_reg;
+ if (mem_reg == SLJIT_R0)
+ mem_reg = free_reg;
+ }
+#endif /* SLJIT_CONFIG_X86_64 */
}
- inst = emit_x86_instruction(compiler, 2, dst_reg, 0, src, srcw);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = U8(get_jump_code((sljit_uw)type) - 0x40);
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ compiler->mode32 = op != SLJIT_MOV && op != SLJIT_MOV_P;
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ /* Lock prefix. */
+ FAIL_IF(emit_byte(compiler, GROUP_LOCK));
+
+ pref = 0;
+ if (op == SLJIT_MOV_U16)
+ pref = EX86_HALF_ARG | EX86_PREF_66;
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (op == SLJIT_MOV_U8)
+ pref = EX86_REX;
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ FAIL_IF(emit_groupf(compiler, (op == SLJIT_MOV_U8 ? CMPXCHG_rm8_r : CMPXCHG_rm_r) | pref, src_reg, SLJIT_MEM1(mem_reg), 0));
+
+ if (temp_reg != SLJIT_R0) {
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ compiler->mode32 = 0;
+ return emit_mov(compiler, SLJIT_R0, 0, TMP_REG1, 0);
+#else /* !SLJIT_CONFIG_X86_64 */
+ EMIT_MOV(compiler, SLJIT_R0, 0, free_reg, 0);
+ if (free_reg != TMP_REG1)
+ return emit_mov(compiler, free_reg, 0, (free_reg == SLJIT_R1) ? SLJIT_MEM1(SLJIT_SP) : TMP_REG1, 0);
+#endif /* SLJIT_CONFIG_X86_64 */
+ }
return SLJIT_SUCCESS;
}
@@ -3336,11 +4920,10 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
return NULL;
#endif
- inst = (sljit_u8*)ensure_buf(compiler, 2);
+ inst = (sljit_u8*)ensure_buf(compiler, 1);
PTR_FAIL_IF(!inst);
- *inst++ = 0;
- *inst++ = 2;
+ inst[0] = SLJIT_INST_CONST;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
if (dst & SLJIT_MEM)
@@ -3351,52 +4934,48 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
return const_;
}
-SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_mov_addr(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
{
- struct sljit_put_label *put_label;
+ struct sljit_jump *jump;
sljit_u8 *inst;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
sljit_s32 reg;
- sljit_uw start_size;
-#endif
+#endif /* SLJIT_CONFIG_X86_64 */
CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
+ CHECK_PTR(check_sljit_emit_mov_addr(compiler, dst, dstw));
ADJUST_LOCAL_OFFSET(dst, dstw);
CHECK_EXTRA_REGS(dst, dstw, (void)0);
- put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
- PTR_FAIL_IF(!put_label);
- set_put_label(put_label, compiler, 0);
+ jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
+ PTR_FAIL_IF(!jump);
+ set_mov_addr(jump, compiler, 0);
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
compiler->mode32 = 0;
reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
- if (emit_load_imm64(compiler, reg, 0))
- return NULL;
-#else
- if (emit_mov(compiler, dst, dstw, SLJIT_IMM, 0))
- return NULL;
-#endif
+ PTR_FAIL_IF(emit_load_imm64(compiler, reg, 0));
+ jump->addr = compiler->size;
-#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (dst & SLJIT_MEM) {
- start_size = compiler->size;
- if (emit_mov(compiler, dst, dstw, TMP_REG1, 0))
- return NULL;
- put_label->flags = compiler->size - start_size;
- }
-#endif
+ if (reg_map[reg] >= 8)
+ jump->flags |= MOV_ADDR_HI;
+#else /* !SLJIT_CONFIG_X86_64 */
+ PTR_FAIL_IF(emit_mov(compiler, dst, dstw, SLJIT_IMM, 0));
+#endif /* SLJIT_CONFIG_X86_64 */
- inst = (sljit_u8*)ensure_buf(compiler, 2);
+ inst = (sljit_u8*)ensure_buf(compiler, 1);
PTR_FAIL_IF(!inst);
- *inst++ = 0;
- *inst++ = 3;
+ inst[0] = SLJIT_INST_MOV_ADDR;
- return put_label;
+#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
+ if (dst & SLJIT_MEM)
+ PTR_FAIL_IF(emit_mov(compiler, dst, dstw, TMP_REG1, 0));
+#endif /* SLJIT_CONFIG_X86_64 */
+
+ return jump;
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
diff --git a/src/3rdparty/pcre2/src/sljit/sljitProtExecAllocator.c b/src/3rdparty/pcre2/src/sljit/sljitProtExecAllocator.c
deleted file mode 100644
index 915411fbed..0000000000
--- a/src/3rdparty/pcre2/src/sljit/sljitProtExecAllocator.c
+++ /dev/null
@@ -1,474 +0,0 @@
-/*
- * Stack-less Just-In-Time compiler
- *
- * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without modification, are
- * permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice, this list of
- * conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright notice, this list
- * of conditions and the following disclaimer in the documentation and/or other materials
- * provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
- * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
- * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
- * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
- * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
- * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-/*
- This file contains a simple executable memory allocator
-
- It is assumed, that executable code blocks are usually medium (or sometimes
- large) memory blocks, and the allocator is not too frequently called (less
- optimized than other allocators). Thus, using it as a generic allocator is
- not suggested.
-
- How does it work:
- Memory is allocated in continuous memory areas called chunks by alloc_chunk()
- Chunk format:
- [ block ][ block ] ... [ block ][ block terminator ]
-
- All blocks and the block terminator is started with block_header. The block
- header contains the size of the previous and the next block. These sizes
- can also contain special values.
- Block size:
- 0 - The block is a free_block, with a different size member.
- 1 - The block is a block terminator.
- n - The block is used at the moment, and the value contains its size.
- Previous block size:
- 0 - This is the first block of the memory chunk.
- n - The size of the previous block.
-
- Using these size values we can go forward or backward on the block chain.
- The unused blocks are stored in a chain list pointed by free_blocks. This
- list is useful if we need to find a suitable memory area when the allocator
- is called.
-
- When a block is freed, the new free block is connected to its adjacent free
- blocks if possible.
-
- [ free block ][ used block ][ free block ]
- and "used block" is freed, the three blocks are connected together:
- [ one big free block ]
-*/
-
-/* --------------------------------------------------------------------- */
-/* System (OS) functions */
-/* --------------------------------------------------------------------- */
-
-/* 64 KByte. */
-#define CHUNK_SIZE (sljit_uw)0x10000
-
-struct chunk_header {
- void *executable;
-};
-
-/*
- alloc_chunk / free_chunk :
- * allocate executable system memory chunks
- * the size is always divisible by CHUNK_SIZE
- SLJIT_ALLOCATOR_LOCK / SLJIT_ALLOCATOR_UNLOCK :
- * provided as part of sljitUtils
- * only the allocator requires this lock, sljit is fully thread safe
- as it only uses local variables
-*/
-
-#ifndef __NetBSD__
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <stdio.h>
-#include <string.h>
-
-#ifndef O_NOATIME
-#define O_NOATIME 0
-#endif
-
-/* this is a linux extension available since kernel 3.11 */
-#ifndef O_TMPFILE
-#define O_TMPFILE 020200000
-#endif
-
-#ifndef _GNU_SOURCE
-char *secure_getenv(const char *name);
-int mkostemp(char *template, int flags);
-#endif
-
-static SLJIT_INLINE int create_tempfile(void)
-{
- int fd;
- char tmp_name[256];
- size_t tmp_name_len = 0;
- char *dir;
- struct stat st;
-#if defined(SLJIT_SINGLE_THREADED) && SLJIT_SINGLE_THREADED
- mode_t mode;
-#endif
-
-#ifdef HAVE_MEMFD_CREATE
- /* this is a GNU extension, make sure to use -D_GNU_SOURCE */
- fd = memfd_create("sljit", MFD_CLOEXEC);
- if (fd != -1) {
- fchmod(fd, 0);
- return fd;
- }
-#endif
-
- dir = secure_getenv("TMPDIR");
-
- if (dir) {
- tmp_name_len = strlen(dir);
- if (tmp_name_len > 0 && tmp_name_len < sizeof(tmp_name)) {
- if ((stat(dir, &st) == 0) && S_ISDIR(st.st_mode))
- strcpy(tmp_name, dir);
- }
- }
-
-#ifdef P_tmpdir
- if (!tmp_name_len) {
- tmp_name_len = strlen(P_tmpdir);
- if (tmp_name_len > 0 && tmp_name_len < sizeof(tmp_name))
- strcpy(tmp_name, P_tmpdir);
- }
-#endif
- if (!tmp_name_len) {
- strcpy(tmp_name, "/tmp");
- tmp_name_len = 4;
- }
-
- SLJIT_ASSERT(tmp_name_len > 0 && tmp_name_len < sizeof(tmp_name));
-
- if (tmp_name[tmp_name_len - 1] == '/')
- tmp_name[--tmp_name_len] = '\0';
-
-#ifdef __linux__
- /*
- * the previous trimming might had left an empty string if TMPDIR="/"
- * so work around the problem below
- */
- fd = open(tmp_name_len ? tmp_name : "/",
- O_TMPFILE | O_EXCL | O_RDWR | O_NOATIME | O_CLOEXEC, 0);
- if (fd != -1)
- return fd;
-#endif
-
- if (tmp_name_len + 7 >= sizeof(tmp_name))
- return -1;
-
- strcpy(tmp_name + tmp_name_len, "/XXXXXX");
-#if defined(SLJIT_SINGLE_THREADED) && SLJIT_SINGLE_THREADED
- mode = umask(0777);
-#endif
- fd = mkostemp(tmp_name, O_CLOEXEC | O_NOATIME);
-#if defined(SLJIT_SINGLE_THREADED) && SLJIT_SINGLE_THREADED
- umask(mode);
-#else
- fchmod(fd, 0);
-#endif
-
- if (fd == -1)
- return -1;
-
- if (unlink(tmp_name)) {
- close(fd);
- return -1;
- }
-
- return fd;
-}
-
-static SLJIT_INLINE struct chunk_header* alloc_chunk(sljit_uw size)
-{
- struct chunk_header *retval;
- int fd;
-
- fd = create_tempfile();
- if (fd == -1)
- return NULL;
-
- if (ftruncate(fd, (off_t)size)) {
- close(fd);
- return NULL;
- }
-
- retval = (struct chunk_header *)mmap(NULL, size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
-
- if (retval == MAP_FAILED) {
- close(fd);
- return NULL;
- }
-
- retval->executable = mmap(NULL, size, PROT_READ | PROT_EXEC, MAP_SHARED, fd, 0);
-
- if (retval->executable == MAP_FAILED) {
- munmap((void *)retval, size);
- close(fd);
- return NULL;
- }
-
- close(fd);
- return retval;
-}
-#else
-/*
- * MAP_REMAPDUP is a NetBSD extension available sinde 8.0, make sure to
- * adjust your feature macros (ex: -D_NETBSD_SOURCE) as needed
- */
-static SLJIT_INLINE struct chunk_header* alloc_chunk(sljit_uw size)
-{
- struct chunk_header *retval;
-
- retval = (struct chunk_header *)mmap(NULL, size,
- PROT_READ | PROT_WRITE | PROT_MPROTECT(PROT_EXEC),
- MAP_ANON | MAP_SHARED, -1, 0);
-
- if (retval == MAP_FAILED)
- return NULL;
-
- retval->executable = mremap(retval, size, NULL, size, MAP_REMAPDUP);
- if (retval->executable == MAP_FAILED) {
- munmap((void *)retval, size);
- return NULL;
- }
-
- if (mprotect(retval->executable, size, PROT_READ | PROT_EXEC) == -1) {
- munmap(retval->executable, size);
- munmap((void *)retval, size);
- return NULL;
- }
-
- return retval;
-}
-#endif /* NetBSD */
-
-static SLJIT_INLINE void free_chunk(void *chunk, sljit_uw size)
-{
- struct chunk_header *header = ((struct chunk_header *)chunk) - 1;
-
- munmap(header->executable, size);
- munmap((void *)header, size);
-}
-
-/* --------------------------------------------------------------------- */
-/* Common functions */
-/* --------------------------------------------------------------------- */
-
-#define CHUNK_MASK (~(CHUNK_SIZE - 1))
-
-struct block_header {
- sljit_uw size;
- sljit_uw prev_size;
- sljit_sw executable_offset;
-};
-
-struct free_block {
- struct block_header header;
- struct free_block *next;
- struct free_block *prev;
- sljit_uw size;
-};
-
-#define AS_BLOCK_HEADER(base, offset) \
- ((struct block_header*)(((sljit_u8*)base) + offset))
-#define AS_FREE_BLOCK(base, offset) \
- ((struct free_block*)(((sljit_u8*)base) + offset))
-#define MEM_START(base) ((void*)((base) + 1))
-#define ALIGN_SIZE(size) (((size) + sizeof(struct block_header) + 7u) & ~(sljit_uw)7)
-
-static struct free_block* free_blocks;
-static sljit_uw allocated_size;
-static sljit_uw total_size;
-
-static SLJIT_INLINE void sljit_insert_free_block(struct free_block *free_block, sljit_uw size)
-{
- free_block->header.size = 0;
- free_block->size = size;
-
- free_block->next = free_blocks;
- free_block->prev = NULL;
- if (free_blocks)
- free_blocks->prev = free_block;
- free_blocks = free_block;
-}
-
-static SLJIT_INLINE void sljit_remove_free_block(struct free_block *free_block)
-{
- if (free_block->next)
- free_block->next->prev = free_block->prev;
-
- if (free_block->prev)
- free_block->prev->next = free_block->next;
- else {
- SLJIT_ASSERT(free_blocks == free_block);
- free_blocks = free_block->next;
- }
-}
-
-SLJIT_API_FUNC_ATTRIBUTE void* sljit_malloc_exec(sljit_uw size)
-{
- struct chunk_header *chunk_header;
- struct block_header *header;
- struct block_header *next_header;
- struct free_block *free_block;
- sljit_uw chunk_size;
- sljit_sw executable_offset;
-
- SLJIT_ALLOCATOR_LOCK();
- if (size < (64 - sizeof(struct block_header)))
- size = (64 - sizeof(struct block_header));
- size = ALIGN_SIZE(size);
-
- free_block = free_blocks;
- while (free_block) {
- if (free_block->size >= size) {
- chunk_size = free_block->size;
- if (chunk_size > size + 64) {
- /* We just cut a block from the end of the free block. */
- chunk_size -= size;
- free_block->size = chunk_size;
- header = AS_BLOCK_HEADER(free_block, chunk_size);
- header->prev_size = chunk_size;
- header->executable_offset = free_block->header.executable_offset;
- AS_BLOCK_HEADER(header, size)->prev_size = size;
- }
- else {
- sljit_remove_free_block(free_block);
- header = (struct block_header*)free_block;
- size = chunk_size;
- }
- allocated_size += size;
- header->size = size;
- SLJIT_ALLOCATOR_UNLOCK();
- return MEM_START(header);
- }
- free_block = free_block->next;
- }
-
- chunk_size = sizeof(struct chunk_header) + sizeof(struct block_header);
- chunk_size = (chunk_size + size + CHUNK_SIZE - 1) & CHUNK_MASK;
-
- chunk_header = alloc_chunk(chunk_size);
- if (!chunk_header) {
- SLJIT_ALLOCATOR_UNLOCK();
- return NULL;
- }
-
- executable_offset = (sljit_sw)((sljit_u8*)chunk_header->executable - (sljit_u8*)chunk_header);
-
- chunk_size -= sizeof(struct chunk_header) + sizeof(struct block_header);
- total_size += chunk_size;
-
- header = (struct block_header *)(chunk_header + 1);
-
- header->prev_size = 0;
- header->executable_offset = executable_offset;
- if (chunk_size > size + 64) {
- /* Cut the allocated space into a free and a used block. */
- allocated_size += size;
- header->size = size;
- chunk_size -= size;
-
- free_block = AS_FREE_BLOCK(header, size);
- free_block->header.prev_size = size;
- free_block->header.executable_offset = executable_offset;
- sljit_insert_free_block(free_block, chunk_size);
- next_header = AS_BLOCK_HEADER(free_block, chunk_size);
- }
- else {
- /* All space belongs to this allocation. */
- allocated_size += chunk_size;
- header->size = chunk_size;
- next_header = AS_BLOCK_HEADER(header, chunk_size);
- }
- next_header->size = 1;
- next_header->prev_size = chunk_size;
- next_header->executable_offset = executable_offset;
- SLJIT_ALLOCATOR_UNLOCK();
- return MEM_START(header);
-}
-
-SLJIT_API_FUNC_ATTRIBUTE void sljit_free_exec(void* ptr)
-{
- struct block_header *header;
- struct free_block* free_block;
-
- SLJIT_ALLOCATOR_LOCK();
- header = AS_BLOCK_HEADER(ptr, -(sljit_sw)sizeof(struct block_header));
- header = AS_BLOCK_HEADER(header, -header->executable_offset);
- allocated_size -= header->size;
-
- /* Connecting free blocks together if possible. */
-
- /* If header->prev_size == 0, free_block will equal to header.
- In this case, free_block->header.size will be > 0. */
- free_block = AS_FREE_BLOCK(header, -(sljit_sw)header->prev_size);
- if (SLJIT_UNLIKELY(!free_block->header.size)) {
- free_block->size += header->size;
- header = AS_BLOCK_HEADER(free_block, free_block->size);
- header->prev_size = free_block->size;
- }
- else {
- free_block = (struct free_block*)header;
- sljit_insert_free_block(free_block, header->size);
- }
-
- header = AS_BLOCK_HEADER(free_block, free_block->size);
- if (SLJIT_UNLIKELY(!header->size)) {
- free_block->size += ((struct free_block*)header)->size;
- sljit_remove_free_block((struct free_block*)header);
- header = AS_BLOCK_HEADER(free_block, free_block->size);
- header->prev_size = free_block->size;
- }
-
- /* The whole chunk is free. */
- if (SLJIT_UNLIKELY(!free_block->header.prev_size && header->size == 1)) {
- /* If this block is freed, we still have (allocated_size / 2) free space. */
- if (total_size - free_block->size > (allocated_size * 3 / 2)) {
- total_size -= free_block->size;
- sljit_remove_free_block(free_block);
- free_chunk(free_block, free_block->size +
- sizeof(struct chunk_header) +
- sizeof(struct block_header));
- }
- }
-
- SLJIT_ALLOCATOR_UNLOCK();
-}
-
-SLJIT_API_FUNC_ATTRIBUTE void sljit_free_unused_memory_exec(void)
-{
- struct free_block* free_block;
- struct free_block* next_free_block;
-
- SLJIT_ALLOCATOR_LOCK();
-
- free_block = free_blocks;
- while (free_block) {
- next_free_block = free_block->next;
- if (!free_block->header.prev_size &&
- AS_BLOCK_HEADER(free_block, free_block->size)->size == 1) {
- total_size -= free_block->size;
- sljit_remove_free_block(free_block);
- free_chunk(free_block, free_block->size +
- sizeof(struct chunk_header) +
- sizeof(struct block_header));
- }
- free_block = next_free_block;
- }
-
- SLJIT_ASSERT((total_size && free_blocks) || (!total_size && !free_blocks));
- SLJIT_ALLOCATOR_UNLOCK();
-}
-
-SLJIT_API_FUNC_ATTRIBUTE sljit_sw sljit_exec_offset(void* ptr)
-{
- return ((struct block_header *)(ptr))[-1].executable_offset;
-}
diff --git a/src/3rdparty/pcre2/src/sljit/sljitSerialize.c b/src/3rdparty/pcre2/src/sljit/sljitSerialize.c
new file mode 100644
index 0000000000..6ef161fd49
--- /dev/null
+++ b/src/3rdparty/pcre2/src/sljit/sljitSerialize.c
@@ -0,0 +1,516 @@
+/*
+ * Stack-less Just-In-Time compiler
+ *
+ * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
+ * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_jump_has_label(struct sljit_jump *jump)
+{
+ return !(jump->flags & JUMP_ADDR) && (jump->u.label != NULL);
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_jump_has_target(struct sljit_jump *jump)
+{
+ return (jump->flags & JUMP_ADDR) != 0;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_jump_is_mov_addr(struct sljit_jump *jump)
+{
+ return (jump->flags & JUMP_MOV_ADDR) != 0;
+}
+
+#define SLJIT_SERIALIZE_DEBUG ((sljit_u16)0x1)
+
+struct sljit_serialized_compiler {
+ sljit_u32 signature;
+ sljit_u16 version;
+ sljit_u16 cpu_type;
+
+ sljit_uw buf_segment_count;
+ sljit_uw label_count;
+ sljit_uw jump_count;
+ sljit_uw const_count;
+
+ sljit_s32 options;
+ sljit_s32 scratches;
+ sljit_s32 saveds;
+ sljit_s32 fscratches;
+ sljit_s32 fsaveds;
+ sljit_s32 local_size;
+ sljit_uw size;
+
+#if (defined SLJIT_HAS_STATUS_FLAGS_STATE && SLJIT_HAS_STATUS_FLAGS_STATE)
+ sljit_s32 status_flags_state;
+#endif /* SLJIT_HAS_STATUS_FLAGS_STATE */
+
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
+ sljit_s32 args_size;
+#endif /* SLJIT_CONFIG_X86_32 */
+
+#if ((defined SLJIT_CONFIG_ARM_32 && SLJIT_CONFIG_ARM_32) && (defined __SOFTFP__)) \
+ || (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
+ sljit_uw args_size;
+#endif /* (SLJIT_CONFIG_ARM_32 && __SOFTFP__) || SLJIT_CONFIG_MIPS_32 */
+
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ sljit_uw cpool_diff;
+ sljit_uw cpool_fill;
+ sljit_uw patches;
+#endif /* SLJIT_CONFIG_ARM_V6 */
+
+#if (defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS)
+ sljit_s32 delay_slot;
+#endif /* SLJIT_CONFIG_MIPS */
+
+};
+
+struct sljit_serialized_debug_info {
+ sljit_sw last_flags;
+ sljit_s32 last_return;
+ sljit_s32 logical_local_size;
+};
+
+struct sljit_serialized_label {
+ sljit_uw size;
+};
+
+struct sljit_serialized_jump {
+ sljit_uw addr;
+ sljit_uw flags;
+ sljit_uw value;
+};
+
+struct sljit_serialized_const {
+ sljit_uw addr;
+};
+
+#define SLJIT_SERIALIZE_ALIGN(v) (((v) + sizeof(sljit_uw) - 1) & ~(sljit_uw)(sizeof(sljit_uw) - 1))
+#if (defined SLJIT_LITTLE_ENDIAN && SLJIT_LITTLE_ENDIAN)
+#define SLJIT_SERIALIZE_SIGNATURE 0x534c4a54
+#else /* !SLJIT_LITTLE_ENDIAN */
+#define SLJIT_SERIALIZE_SIGNATURE 0x544a4c53
+#endif /* SLJIT_LITTLE_ENDIAN */
+#define SLJIT_SERIALIZE_VERSION 1
+
+SLJIT_API_FUNC_ATTRIBUTE sljit_uw* sljit_serialize_compiler(struct sljit_compiler *compiler,
+ sljit_s32 options, sljit_uw *size)
+{
+ sljit_uw serialized_size = sizeof(struct sljit_serialized_compiler);
+ struct sljit_memory_fragment *buf;
+ struct sljit_label *label;
+ struct sljit_jump *jump;
+ struct sljit_const *const_;
+ struct sljit_serialized_compiler *serialized_compiler;
+ struct sljit_serialized_label *serialized_label;
+ struct sljit_serialized_jump *serialized_jump;
+ struct sljit_serialized_const *serialized_const;
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS) \
+ || (defined SLJIT_DEBUG && SLJIT_DEBUG)
+ struct sljit_serialized_debug_info *serialized_debug_info;
+#endif /* SLJIT_ARGUMENT_CHECKS || SLJIT_DEBUG */
+ sljit_uw counter, used_size;
+ sljit_u8 *result;
+ sljit_u8 *ptr;
+ SLJIT_UNUSED_ARG(options);
+
+ if (size != NULL)
+ *size = 0;
+
+ PTR_FAIL_IF(compiler->error);
+
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS) \
+ || (defined SLJIT_DEBUG && SLJIT_DEBUG)
+ if (!(options & SLJIT_SERIALIZE_IGNORE_DEBUG))
+ serialized_size += sizeof(struct sljit_serialized_debug_info);
+#endif /* SLJIT_ARGUMENT_CHECKS || SLJIT_DEBUG */
+
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ serialized_size += SLJIT_SERIALIZE_ALIGN(compiler->cpool_fill * (sizeof(sljit_uw) + 1));
+#endif /* SLJIT_CONFIG_ARM_V6 */
+
+ /* Compute the size of the data. */
+ buf = compiler->buf;
+ while (buf != NULL) {
+ serialized_size += sizeof(sljit_uw) + SLJIT_SERIALIZE_ALIGN(buf->used_size);
+ buf = buf->next;
+ }
+
+ serialized_size += compiler->label_count * sizeof(struct sljit_serialized_label);
+
+ jump = compiler->jumps;
+ while (jump != NULL) {
+ serialized_size += sizeof(struct sljit_serialized_jump);
+ jump = jump->next;
+ }
+
+ const_ = compiler->consts;
+ while (const_ != NULL) {
+ serialized_size += sizeof(struct sljit_serialized_const);
+ const_ = const_->next;
+ }
+
+ result = (sljit_u8*)SLJIT_MALLOC(serialized_size, compiler->allocator_data);
+ PTR_FAIL_IF_NULL(result);
+
+ if (size != NULL)
+ *size = serialized_size;
+
+ ptr = result;
+ serialized_compiler = (struct sljit_serialized_compiler*)ptr;
+ ptr += sizeof(struct sljit_serialized_compiler);
+
+ serialized_compiler->signature = SLJIT_SERIALIZE_SIGNATURE;
+ serialized_compiler->version = SLJIT_SERIALIZE_VERSION;
+ serialized_compiler->cpu_type = 0;
+ serialized_compiler->label_count = compiler->label_count;
+ serialized_compiler->options = compiler->options;
+ serialized_compiler->scratches = compiler->scratches;
+ serialized_compiler->saveds = compiler->saveds;
+ serialized_compiler->fscratches = compiler->fscratches;
+ serialized_compiler->fsaveds = compiler->fsaveds;
+ serialized_compiler->local_size = compiler->local_size;
+ serialized_compiler->size = compiler->size;
+
+#if (defined SLJIT_HAS_STATUS_FLAGS_STATE && SLJIT_HAS_STATUS_FLAGS_STATE)
+ serialized_compiler->status_flags_state = compiler->status_flags_state;
+#endif /* SLJIT_HAS_STATUS_FLAGS_STATE */
+
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) \
+ || ((defined SLJIT_CONFIG_ARM_32 && SLJIT_CONFIG_ARM_32) && (defined __SOFTFP__)) \
+ || (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
+ serialized_compiler->args_size = compiler->args_size;
+#endif /* SLJIT_CONFIG_X86_32 || (SLJIT_CONFIG_ARM_32 && __SOFTFP__) || SLJIT_CONFIG_MIPS_32 */
+
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ serialized_compiler->cpool_diff = compiler->cpool_diff;
+ serialized_compiler->cpool_fill = compiler->cpool_fill;
+ serialized_compiler->patches = compiler->patches;
+
+ SLJIT_MEMCPY(ptr, compiler->cpool, compiler->cpool_fill * sizeof(sljit_uw));
+ SLJIT_MEMCPY(ptr + compiler->cpool_fill * sizeof(sljit_uw), compiler->cpool_unique, compiler->cpool_fill);
+ ptr += SLJIT_SERIALIZE_ALIGN(compiler->cpool_fill * (sizeof(sljit_uw) + 1));
+#endif /* SLJIT_CONFIG_ARM_V6 */
+
+#if (defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS)
+ serialized_compiler->delay_slot = compiler->delay_slot;
+#endif /* SLJIT_CONFIG_MIPS */
+
+ buf = compiler->buf;
+ counter = 0;
+ while (buf != NULL) {
+ used_size = buf->used_size;
+ *(sljit_uw*)ptr = used_size;
+ ptr += sizeof(sljit_uw);
+ SLJIT_MEMCPY(ptr, buf->memory, used_size);
+ ptr += SLJIT_SERIALIZE_ALIGN(used_size);
+ buf = buf->next;
+ counter++;
+ }
+ serialized_compiler->buf_segment_count = counter;
+
+ label = compiler->labels;
+ while (label != NULL) {
+ serialized_label = (struct sljit_serialized_label*)ptr;
+ serialized_label->size = label->size;
+ ptr += sizeof(struct sljit_serialized_label);
+ label = label->next;
+ }
+
+ jump = compiler->jumps;
+ counter = 0;
+ while (jump != NULL) {
+ serialized_jump = (struct sljit_serialized_jump*)ptr;
+ serialized_jump->addr = jump->addr;
+ serialized_jump->flags = jump->flags;
+
+ if (jump->flags & JUMP_ADDR)
+ serialized_jump->value = jump->u.target;
+ else if (jump->u.label != NULL)
+ serialized_jump->value = jump->u.label->u.index;
+ else
+ serialized_jump->value = SLJIT_MAX_ADDRESS;
+
+ ptr += sizeof(struct sljit_serialized_jump);
+ jump = jump->next;
+ counter++;
+ }
+ serialized_compiler->jump_count = counter;
+
+ const_ = compiler->consts;
+ counter = 0;
+ while (const_ != NULL) {
+ serialized_const = (struct sljit_serialized_const*)ptr;
+ serialized_const->addr = const_->addr;
+ ptr += sizeof(struct sljit_serialized_const);
+ const_ = const_->next;
+ counter++;
+ }
+ serialized_compiler->const_count = counter;
+
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS) \
+ || (defined SLJIT_DEBUG && SLJIT_DEBUG)
+ if (!(options & SLJIT_SERIALIZE_IGNORE_DEBUG)) {
+ serialized_debug_info = (struct sljit_serialized_debug_info*)ptr;
+ serialized_debug_info->last_flags = compiler->last_flags;
+ serialized_debug_info->last_return = compiler->last_return;
+ serialized_debug_info->logical_local_size = compiler->logical_local_size;
+ serialized_compiler->cpu_type |= SLJIT_SERIALIZE_DEBUG;
+#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
+ ptr += sizeof(struct sljit_serialized_debug_info);
+#endif /* SLJIT_DEBUG */
+ }
+#endif /* SLJIT_ARGUMENT_CHECKS || SLJIT_DEBUG */
+
+ SLJIT_ASSERT((sljit_uw)(ptr - result) == serialized_size);
+ return (sljit_uw*)result;
+}
+
+SLJIT_API_FUNC_ATTRIBUTE struct sljit_compiler *sljit_deserialize_compiler(sljit_uw* buffer, sljit_uw size,
+ sljit_s32 options, void *allocator_data)
+{
+ struct sljit_compiler *compiler;
+ struct sljit_serialized_compiler *serialized_compiler;
+ struct sljit_serialized_label *serialized_label;
+ struct sljit_serialized_jump *serialized_jump;
+ struct sljit_serialized_const *serialized_const;
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS) \
+ || (defined SLJIT_DEBUG && SLJIT_DEBUG)
+ struct sljit_serialized_debug_info *serialized_debug_info;
+#endif /* SLJIT_ARGUMENT_CHECKS || SLJIT_DEBUG */
+ struct sljit_memory_fragment *buf;
+ struct sljit_memory_fragment *last_buf;
+ struct sljit_label *label;
+ struct sljit_label *last_label;
+ struct sljit_label **label_list = NULL;
+ struct sljit_jump *jump;
+ struct sljit_jump *last_jump;
+ struct sljit_const *const_;
+ struct sljit_const *last_const;
+ sljit_u8 *ptr = (sljit_u8*)buffer;
+ sljit_u8 *end = ptr + size;
+ sljit_uw i, used_size, aligned_size, label_count;
+ SLJIT_UNUSED_ARG(options);
+
+ if (size < sizeof(struct sljit_serialized_compiler) || (size & (sizeof(sljit_uw) - 1)) != 0)
+ return NULL;
+
+ serialized_compiler = (struct sljit_serialized_compiler*)ptr;
+
+ if (serialized_compiler->signature != SLJIT_SERIALIZE_SIGNATURE || serialized_compiler->version != SLJIT_SERIALIZE_VERSION)
+ return NULL;
+
+ compiler = sljit_create_compiler(allocator_data);
+ PTR_FAIL_IF(compiler == NULL);
+
+ compiler->label_count = serialized_compiler->label_count;
+ compiler->options = serialized_compiler->options;
+ compiler->scratches = serialized_compiler->scratches;
+ compiler->saveds = serialized_compiler->saveds;
+ compiler->fscratches = serialized_compiler->fscratches;
+ compiler->fsaveds = serialized_compiler->fsaveds;
+ compiler->local_size = serialized_compiler->local_size;
+ compiler->size = serialized_compiler->size;
+
+#if (defined SLJIT_HAS_STATUS_FLAGS_STATE && SLJIT_HAS_STATUS_FLAGS_STATE)
+ compiler->status_flags_state = serialized_compiler->status_flags_state;
+#endif /* SLJIT_HAS_STATUS_FLAGS_STATE */
+
+#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) \
+ || ((defined SLJIT_CONFIG_ARM_32 && SLJIT_CONFIG_ARM_32) && (defined __SOFTFP__)) \
+ || (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
+ compiler->args_size = serialized_compiler->args_size;
+#endif /* SLJIT_CONFIG_X86_32 || (SLJIT_CONFIG_ARM_32 && __SOFTFP__) || SLJIT_CONFIG_MIPS_32 */
+
+#if (defined SLJIT_CONFIG_ARM_V6 && SLJIT_CONFIG_ARM_V6)
+ used_size = serialized_compiler->cpool_fill;
+ aligned_size = SLJIT_SERIALIZE_ALIGN(used_size * (sizeof(sljit_uw) + 1));
+ compiler->cpool_diff = serialized_compiler->cpool_diff;
+ compiler->cpool_fill = used_size;
+ compiler->patches = serialized_compiler->patches;
+
+ if ((sljit_uw)(end - ptr) < aligned_size)
+ goto error;
+
+ SLJIT_MEMCPY(compiler->cpool, ptr, used_size * sizeof(sljit_uw));
+ SLJIT_MEMCPY(compiler->cpool_unique, ptr + used_size * sizeof(sljit_uw), used_size);
+ ptr += aligned_size;
+#endif /* SLJIT_CONFIG_ARM_V6 */
+
+#if (defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS)
+ compiler->delay_slot = serialized_compiler->delay_slot;
+#endif /* SLJIT_CONFIG_MIPS */
+
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS) \
+ || (defined SLJIT_DEBUG && SLJIT_DEBUG)
+ if (!(serialized_compiler->cpu_type & SLJIT_SERIALIZE_DEBUG))
+ goto error;
+#endif /* SLJIT_ARGUMENT_CHECKS || SLJIT_DEBUG */
+
+ ptr += sizeof(struct sljit_serialized_compiler);
+ i = serialized_compiler->buf_segment_count;
+ last_buf = NULL;
+ while (i > 0) {
+ if ((sljit_uw)(end - ptr) < sizeof(sljit_uw))
+ goto error;
+
+ used_size = *(sljit_uw*)ptr;
+ aligned_size = SLJIT_SERIALIZE_ALIGN(used_size);
+ ptr += sizeof(sljit_uw);
+
+ if ((sljit_uw)(end - ptr) < aligned_size)
+ goto error;
+
+ if (last_buf == NULL) {
+ SLJIT_ASSERT(compiler->buf != NULL && compiler->buf->next == NULL);
+ buf = compiler->buf;
+ } else {
+ buf = (struct sljit_memory_fragment*)SLJIT_MALLOC(BUF_SIZE, allocator_data);
+ if (!buf)
+ goto error;
+ buf->next = NULL;
+ }
+
+ buf->used_size = used_size;
+ SLJIT_MEMCPY(buf->memory, ptr, used_size);
+
+ if (last_buf != NULL)
+ last_buf->next = buf;
+ last_buf = buf;
+
+ ptr += aligned_size;
+ i--;
+ }
+
+ last_label = NULL;
+ label_count = serialized_compiler->label_count;
+ if ((sljit_uw)(end - ptr) < label_count * sizeof(struct sljit_serialized_label))
+ goto error;
+
+ label_list = (struct sljit_label **)SLJIT_MALLOC(label_count * sizeof(struct sljit_label*), allocator_data);
+ if (label_list == NULL)
+ goto error;
+
+ for (i = 0; i < label_count; i++) {
+ label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
+ if (label == NULL)
+ goto error;
+
+ serialized_label = (struct sljit_serialized_label*)ptr;
+ label->next = NULL;
+ label->u.index = i;
+ label->size = serialized_label->size;
+
+ if (last_label != NULL)
+ last_label->next = label;
+ else
+ compiler->labels = label;
+ last_label = label;
+
+ label_list[i] = label;
+ ptr += sizeof(struct sljit_serialized_label);
+ }
+ compiler->last_label = last_label;
+
+ last_jump = NULL;
+ i = serialized_compiler->jump_count;
+ if ((sljit_uw)(end - ptr) < i * sizeof(struct sljit_serialized_jump))
+ goto error;
+
+ while (i > 0) {
+ jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
+ if (jump == NULL)
+ goto error;
+
+ serialized_jump = (struct sljit_serialized_jump*)ptr;
+ jump->next = NULL;
+ jump->addr = serialized_jump->addr;
+ jump->flags = serialized_jump->flags;
+
+ if (!(serialized_jump->flags & JUMP_ADDR)) {
+ if (serialized_jump->value != SLJIT_MAX_ADDRESS) {
+ if (serialized_jump->value >= label_count)
+ goto error;
+ jump->u.label = label_list[serialized_jump->value];
+ } else
+ jump->u.label = NULL;
+ } else
+ jump->u.target = serialized_jump->value;
+
+ if (last_jump != NULL)
+ last_jump->next = jump;
+ else
+ compiler->jumps = jump;
+ last_jump = jump;
+
+ ptr += sizeof(struct sljit_serialized_jump);
+ i--;
+ }
+ compiler->last_jump = last_jump;
+
+ SLJIT_FREE(label_list, allocator_data);
+ label_list = NULL;
+
+ last_const = NULL;
+ i = serialized_compiler->const_count;
+ if ((sljit_uw)(end - ptr) < i * sizeof(struct sljit_serialized_const))
+ goto error;
+
+ while (i > 0) {
+ const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
+ if (const_ == NULL)
+ goto error;
+
+ serialized_const = (struct sljit_serialized_const*)ptr;
+ const_->next = NULL;
+ const_->addr = serialized_const->addr;
+
+ if (last_const != NULL)
+ last_const->next = const_;
+ else
+ compiler->consts = const_;
+ last_const = const_;
+
+ ptr += sizeof(struct sljit_serialized_const);
+ i--;
+ }
+ compiler->last_const = last_const;
+
+#if (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS) \
+ || (defined SLJIT_DEBUG && SLJIT_DEBUG)
+ if ((sljit_uw)(end - ptr) < sizeof(struct sljit_serialized_debug_info))
+ goto error;
+
+ serialized_debug_info = (struct sljit_serialized_debug_info*)ptr;
+ compiler->last_flags = (sljit_s32)serialized_debug_info->last_flags;
+ compiler->last_return = serialized_debug_info->last_return;
+ compiler->logical_local_size = serialized_debug_info->logical_local_size;
+#endif /* SLJIT_ARGUMENT_CHECKS || SLJIT_DEBUG */
+
+ return compiler;
+
+error:
+ sljit_free_compiler(compiler);
+ if (label_list != NULL)
+ SLJIT_FREE(label_list, allocator_data);
+ return NULL;
+}