Modified: tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativePPC_common.c URL: http://svn.apache.org/viewvc/tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativePPC_common.c?rev=1815927&r1=1815926&r2=1815927&view=diff ============================================================================== --- tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativePPC_common.c (original) +++ tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativePPC_common.c Tue Nov 21 14:37:37 2017 @@ -1,7 +1,7 @@ /* * Stack-less Just-In-Time compiler * - * Copyright 2009-2012 Zoltan Herczeg (hzmes...@freemail.hu). All rights reserved. + * Copyright Zoltan Herczeg (hzmes...@freemail.hu). All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: @@ -127,9 +127,9 @@ static const sljit_u8 reg_map[SLJIT_NUMB /* Instruction bit sections. OE and Rc flag (see ALT_SET_FLAGS). */ -#define OERC(flags) (((flags & ALT_SET_FLAGS) >> 10) | (flags & ALT_SET_FLAGS)) +#define OE(flags) ((flags) & ALT_SET_FLAGS) /* Rc flag (see ALT_SET_FLAGS). */ -#define RC(flags) ((flags & ALT_SET_FLAGS) >> 10) +#define RC(flags) (((flags) & ALT_SET_FLAGS) >> 10) #define HI(opcode) ((opcode) << 26) #define LO(opcode) ((opcode) << 1) @@ -154,6 +154,7 @@ static const sljit_u8 reg_map[SLJIT_NUMB #define CMPL (HI(31) | LO(32)) #define CMPLI (HI(10)) #define CROR (HI(19) | LO(449)) +#define DCBT (HI(31) | LO(278)) #define DIVD (HI(31) | LO(489)) #define DIVDU (HI(31) | LO(457)) #define DIVW (HI(31) | LO(491)) @@ -249,7 +250,7 @@ static sljit_s32 push_inst(struct sljit_ return SLJIT_SUCCESS; } -static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code) +static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset) { sljit_sw diff; sljit_uw target_addr; @@ -267,7 +268,7 @@ static SLJIT_INLINE sljit_s32 detect_jum target_addr = jump->u.target; else { SLJIT_ASSERT(jump->flags & JUMP_LABEL); - target_addr = (sljit_uw)(code + jump->u.label->size); + target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset; } #if (defined SLJIT_PASS_ENTRY_ADDR_TO_CALL && SLJIT_PASS_ENTRY_ADDR_TO_CALL) && (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64) @@ -275,7 +276,7 @@ static SLJIT_INLINE sljit_s32 detect_jum goto keep_address; #endif - diff = ((sljit_sw)target_addr - (sljit_sw)(code_ptr)) & ~0x3l; + diff = ((sljit_sw)target_addr - (sljit_sw)(code_ptr) - executable_offset) & ~0x3l; extra_jump_flags = 0; if (jump->flags & IS_COND) { @@ -296,6 +297,7 @@ static SLJIT_INLINE sljit_s32 detect_jum jump->flags |= PATCH_B | extra_jump_flags; return 1; } + if (target_addr <= 0x03ffffff) { jump->flags |= PATCH_B | PATCH_ABS_B | extra_jump_flags; return 1; @@ -309,6 +311,7 @@ keep_address: jump->flags |= PATCH_ABS32; return 1; } + if (target_addr <= 0x7fffffffffffl) { jump->flags |= PATCH_ABS48; return 1; @@ -326,6 +329,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen sljit_ins *buf_ptr; sljit_ins *buf_end; sljit_uw word_count; + sljit_sw executable_offset; sljit_uw addr; struct sljit_label *label; @@ -349,9 +353,12 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen code_ptr = code; word_count = 0; + executable_offset = SLJIT_EXEC_OFFSET(code); + label = compiler->labels; jump = compiler->jumps; const_ = compiler->consts; + do { buf_ptr = (sljit_ins*)buf->memory; buf_end = buf_ptr + (buf->used_size >> 2); @@ -363,7 +370,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen /* These structures are ordered by their address. */ if (label && label->size == word_count) { /* Just recording the address. */ - label->addr = (sljit_uw)code_ptr; + label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset); label->size = code_ptr - code; label = label->next; } @@ -373,7 +380,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen #else jump->addr = (sljit_uw)(code_ptr - 6); #endif - if (detect_jump_type(jump, code_ptr, code)) { + if (detect_jump_type(jump, code_ptr, code, executable_offset)) { #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32) code_ptr[-3] = code_ptr[0]; code_ptr -= 3; @@ -420,7 +427,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen } while (buf); if (label && label->size == word_count) { - label->addr = (sljit_uw)code_ptr; + label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset); label->size = code_ptr - code; label = label->next; } @@ -438,11 +445,12 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen while (jump) { do { addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target; - buf_ptr = (sljit_ins*)jump->addr; + buf_ptr = (sljit_ins *)jump->addr; + if (jump->flags & PATCH_B) { if (jump->flags & IS_COND) { if (!(jump->flags & PATCH_ABS_B)) { - addr = addr - jump->addr; + addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset); SLJIT_ASSERT((sljit_sw)addr <= 0x7fff && (sljit_sw)addr >= -0x8000); *buf_ptr = BCx | (addr & 0xfffc) | ((*buf_ptr) & 0x03ff0001); } @@ -453,7 +461,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen } else { if (!(jump->flags & PATCH_ABS_B)) { - addr = addr - jump->addr; + addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset); SLJIT_ASSERT((sljit_sw)addr <= 0x01ffffff && (sljit_sw)addr >= -0x02000000); *buf_ptr = Bx | (addr & 0x03fffffc) | ((*buf_ptr) & 0x1); } @@ -464,6 +472,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen } break; } + /* Set the fields of immediate loads. */ #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32) buf_ptr[0] = (buf_ptr[0] & 0xffff0000) | ((addr >> 16) & 0xffff); @@ -492,22 +501,48 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen } compiler->error = SLJIT_ERR_COMPILED; + compiler->executable_offset = executable_offset; compiler->executable_size = (code_ptr - code) * sizeof(sljit_ins); - SLJIT_CACHE_FLUSH(code, code_ptr); + + code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset); #if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL) #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64) if (((sljit_sw)code_ptr) & 0x4) code_ptr++; +#endif sljit_set_function_context(NULL, (struct sljit_function_context*)code_ptr, (sljit_sw)code, (void*)sljit_generate_code); +#endif + + code_ptr = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset); + + SLJIT_CACHE_FLUSH(code, code_ptr); + +#if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL) return code_ptr; #else - sljit_set_function_context(NULL, (struct sljit_function_context*)code_ptr, (sljit_sw)code, (void*)sljit_generate_code); - return code_ptr; + return code; #endif +} + +SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type) +{ + switch (feature_type) { + case SLJIT_HAS_FPU: +#ifdef SLJIT_IS_FPU_AVAILABLE + return SLJIT_IS_FPU_AVAILABLE; #else - return code; + /* Available by default. */ + return 1; #endif + + case SLJIT_HAS_PRE_UPDATE: + case SLJIT_HAS_CLZ: + return 1; + + default: + return 0; + } } /* --------------------------------------------------------------------- */ @@ -544,7 +579,6 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen #define ALT_FORM3 0x040000 #define ALT_FORM4 0x080000 #define ALT_FORM5 0x100000 -#define ALT_FORM6 0x200000 /* Source and destination is register. */ #define REG_DEST 0x000001 @@ -559,7 +593,7 @@ ALT_SIGN_EXT 0x000200 ALT_SET_FLAGS 0x000400 ALT_FORM1 0x010000 ... -ALT_FORM6 0x200000 */ +ALT_FORM5 0x100000 */ #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32) #include "sljitNativePPC_32.c" @@ -850,7 +884,7 @@ static sljit_s32 getput_arg_fast(struct sljit_ins inst; /* Should work when (arg & REG_MASK) == 0. */ - SLJIT_COMPILE_ASSERT(A(0) == 0, a0_must_be_0); + SLJIT_ASSERT(A(0) == 0); SLJIT_ASSERT(arg & SLJIT_MEM); if (arg & OFFS_REG_MASK) { @@ -1005,10 +1039,6 @@ static sljit_s32 getput_arg(struct sljit #endif if (inp_flags & WRITE_BACK) { - if (arg == reg) { - FAIL_IF(push_inst(compiler, OR | S(reg) | A(tmp_r) | B(reg))); - reg = tmp_r; - } tmp_r = arg; FAIL_IF(push_inst(compiler, ADDIS | D(arg) | A(arg) | IMM(high_short >> 16))); } @@ -1131,7 +1161,7 @@ static sljit_s32 emit_op(struct sljit_co sljit_s32 src1_r; sljit_s32 src2_r; sljit_s32 sugg_src2_r = TMP_REG2; - sljit_s32 flags = input_flags & (ALT_FORM1 | ALT_FORM2 | ALT_FORM3 | ALT_FORM4 | ALT_FORM5 | ALT_FORM6 | ALT_SIGN_EXT | ALT_SET_FLAGS); + sljit_s32 flags = input_flags & (ALT_FORM1 | ALT_FORM2 | ALT_FORM3 | ALT_FORM4 | ALT_FORM5 | ALT_SIGN_EXT | ALT_SET_FLAGS); if (!(input_flags & ALT_KEEP_CACHE)) { compiler->cache_arg = 0; @@ -1140,8 +1170,6 @@ static sljit_s32 emit_op(struct sljit_co /* Destination check. */ if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) { - if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32 && !(src2 & SLJIT_MEM)) - return SLJIT_SUCCESS; dst_r = TMP_REG2; } else if (FAST_IS_REG(dst)) { @@ -1294,6 +1322,31 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit return SLJIT_SUCCESS; } +static sljit_s32 emit_prefetch(struct sljit_compiler *compiler, + sljit_s32 src, sljit_sw srcw) +{ + if (!(src & OFFS_REG_MASK)) { + if (srcw == 0 && (src & REG_MASK) != SLJIT_UNUSED) + return push_inst(compiler, DCBT | A(0) | B(src & REG_MASK)); + + FAIL_IF(load_immediate(compiler, TMP_REG1, srcw)); + /* Works with SLJIT_MEM0() case as well. */ + return push_inst(compiler, DCBT | A(src & REG_MASK) | B(TMP_REG1)); + } + + srcw &= 0x3; + + if (srcw == 0) + return push_inst(compiler, DCBT | A(src & REG_MASK) | B(OFFS_REG(src))); + +#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32) + FAIL_IF(push_inst(compiler, RLWINM | S(OFFS_REG(src)) | A(TMP_REG1) | (srcw << 11) | ((31 - srcw) << 1))); +#else + FAIL_IF(push_inst(compiler, RLDI(TMP_REG1, OFFS_REG(src), srcw, 63 - srcw, 1))); +#endif + return push_inst(compiler, DCBT | A(src & REG_MASK) | B(TMP_REG1)); +} + #define EMIT_MOV(type, type_flags, type_cast) \ emit_op(compiler, (src & SLJIT_IMM) ? SLJIT_MOV : type, flags | (type_flags), dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? type_cast srcw : srcw) @@ -1301,7 +1354,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw) { - sljit_s32 flags = GET_FLAGS(op) ? ALT_SET_FLAGS : 0; + sljit_s32 flags = HAS_FLAGS(op) ? ALT_SET_FLAGS : 0; sljit_s32 op_flags = GET_ALL_FLAGS(op); CHECK_ERROR(); @@ -1309,11 +1362,18 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit ADJUST_LOCAL_OFFSET(dst, dstw); ADJUST_LOCAL_OFFSET(src, srcw); + if (dst == SLJIT_UNUSED && !HAS_FLAGS(op)) { + if (op <= SLJIT_MOV_P && (src & SLJIT_MEM)) + return emit_prefetch(compiler, src, srcw); + + return SLJIT_SUCCESS; + } + op = GET_OPCODE(op); if ((src & SLJIT_IMM) && srcw == 0) src = TMP_ZERO; - if (op_flags & SLJIT_SET_O) + if (GET_FLAG_TYPE(op_flags) == SLJIT_OVERFLOW) FAIL_IF(push_inst(compiler, MTXER | S(TMP_ZERO))); if (op_flags & SLJIT_I32_OP) { @@ -1339,6 +1399,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit flags |= INT_DATA | SIGNED_DATA; if (src & SLJIT_IMM) srcw = (sljit_s32)srcw; + if (HAS_FLAGS(op_flags)) + flags |= ALT_SIGN_EXT; } #endif } @@ -1404,7 +1466,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit return emit_op(compiler, SLJIT_NOT, flags, dst, dstw, TMP_REG1, 0, src, srcw); case SLJIT_NEG: - return emit_op(compiler, SLJIT_NEG, flags, dst, dstw, TMP_REG1, 0, src, srcw); + return emit_op(compiler, SLJIT_NEG, flags | (GET_FLAG_TYPE(op_flags) ? ALT_FORM1 : 0), dst, dstw, TMP_REG1, 0, src, srcw); case SLJIT_CLZ: #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64) @@ -1457,7 +1519,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w) { - sljit_s32 flags = GET_FLAGS(op) ? ALT_SET_FLAGS : 0; + sljit_s32 flags = HAS_FLAGS(op) ? ALT_SET_FLAGS : 0; CHECK_ERROR(); CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w)); @@ -1465,6 +1527,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit ADJUST_LOCAL_OFFSET(src1, src1w); ADJUST_LOCAL_OFFSET(src2, src2w); + if (dst == SLJIT_UNUSED && !HAS_FLAGS(op)) + return SLJIT_SUCCESS; + if ((src1 & SLJIT_IMM) && src1w == 0) src1 = TMP_ZERO; if ((src2 & SLJIT_IMM) && src2w == 0) @@ -1478,45 +1543,48 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit src1w = (sljit_s32)(src1w); if (src2 & SLJIT_IMM) src2w = (sljit_s32)(src2w); - if (GET_FLAGS(op)) + if (HAS_FLAGS(op)) flags |= ALT_SIGN_EXT; } #endif - if (op & SLJIT_SET_O) + if (GET_FLAG_TYPE(op) == SLJIT_OVERFLOW) FAIL_IF(push_inst(compiler, MTXER | S(TMP_ZERO))); if (src2 == TMP_REG2) flags |= ALT_KEEP_CACHE; switch (GET_OPCODE(op)) { case SLJIT_ADD: - if (!GET_FLAGS(op) && ((src1 | src2) & SLJIT_IMM)) { + if (GET_FLAG_TYPE(op) == SLJIT_OVERFLOW) + return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM1, dst, dstw, src1, src1w, src2, src2w); + + if (!HAS_FLAGS(op) && ((src1 | src2) & SLJIT_IMM)) { if (TEST_SL_IMM(src2, src2w)) { compiler->imm = src2w & 0xffff; - return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM1, dst, dstw, src1, src1w, TMP_REG2, 0); + return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0); } if (TEST_SL_IMM(src1, src1w)) { compiler->imm = src1w & 0xffff; - return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM1, dst, dstw, src2, src2w, TMP_REG2, 0); + return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2, dst, dstw, src2, src2w, TMP_REG2, 0); } if (TEST_SH_IMM(src2, src2w)) { compiler->imm = (src2w >> 16) & 0xffff; - return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0); + return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0); } if (TEST_SH_IMM(src1, src1w)) { compiler->imm = (src1w >> 16) & 0xffff; - return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2, dst, dstw, src2, src2w, TMP_REG2, 0); + return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM3, dst, dstw, src2, src2w, TMP_REG2, 0); } /* Range between -1 and -32768 is covered above. */ if (TEST_ADD_IMM(src2, src2w)) { compiler->imm = src2w & 0xffffffff; - return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM4, dst, dstw, src1, src1w, TMP_REG2, 0); + return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM4, dst, dstw, src1, src1w, TMP_REG2, 0); } if (TEST_ADD_IMM(src1, src1w)) { compiler->imm = src1w & 0xffffffff; - return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM4, dst, dstw, src2, src2w, TMP_REG2, 0); + return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM4, dst, dstw, src2, src2w, TMP_REG2, 0); } } - if (!(GET_FLAGS(op) & (SLJIT_SET_E | SLJIT_SET_O))) { + if (HAS_FLAGS(op)) { if (TEST_SL_IMM(src2, src2w)) { compiler->imm = src2w & 0xffff; return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0); @@ -1526,75 +1594,75 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM3, dst, dstw, src2, src2w, TMP_REG2, 0); } } - return emit_op(compiler, SLJIT_ADD, flags, dst, dstw, src1, src1w, src2, src2w); + return emit_op(compiler, SLJIT_ADD, flags | ((GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY)) ? ALT_FORM4 : 0), dst, dstw, src1, src1w, src2, src2w); case SLJIT_ADDC: - return emit_op(compiler, SLJIT_ADDC, flags | (!(op & SLJIT_KEEP_FLAGS) ? 0 : ALT_FORM1), dst, dstw, src1, src1w, src2, src2w); + return emit_op(compiler, SLJIT_ADDC, flags, dst, dstw, src1, src1w, src2, src2w); case SLJIT_SUB: - if (!GET_FLAGS(op) && ((src1 | src2) & SLJIT_IMM)) { + if (GET_FLAG_TYPE(op) >= SLJIT_LESS && GET_FLAG_TYPE(op) <= SLJIT_LESS_EQUAL) { + if (dst == SLJIT_UNUSED) { + if (TEST_UL_IMM(src2, src2w)) { + compiler->imm = src2w & 0xffff; + return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1 | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0); + } + return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1, dst, dstw, src1, src1w, src2, src2w); + } + + if ((src2 & SLJIT_IMM) && src2w >= 0 && src2w <= (SIMM_MAX + 1)) { + compiler->imm = src2w; + return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1 | ALT_FORM2 | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0); + } + return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1 | ALT_FORM3, dst, dstw, src1, src1w, src2, src2w); + } + + if (GET_FLAG_TYPE(op) == SLJIT_OVERFLOW) + return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM2, dst, dstw, src1, src1w, src2, src2w); + + if (!HAS_FLAGS(op) && ((src1 | src2) & SLJIT_IMM)) { if (TEST_SL_IMM(src2, -src2w)) { compiler->imm = (-src2w) & 0xffff; - return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM1, dst, dstw, src1, src1w, TMP_REG2, 0); + return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0); } if (TEST_SL_IMM(src1, src1w)) { compiler->imm = src1w & 0xffff; - return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1, dst, dstw, src2, src2w, TMP_REG2, 0); + return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM3, dst, dstw, src2, src2w, TMP_REG2, 0); } if (TEST_SH_IMM(src2, -src2w)) { compiler->imm = ((-src2w) >> 16) & 0xffff; - return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0); + return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0); } /* Range between -1 and -32768 is covered above. */ if (TEST_ADD_IMM(src2, -src2w)) { compiler->imm = -src2w & 0xffffffff; - return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM4, dst, dstw, src1, src1w, TMP_REG2, 0); + return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM4, dst, dstw, src1, src1w, TMP_REG2, 0); } } - if (dst == SLJIT_UNUSED && (op & (SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S)) && !(op & (SLJIT_SET_O | SLJIT_SET_C))) { - if (!(op & SLJIT_SET_U)) { - /* We know ALT_SIGN_EXT is set if it is an SLJIT_I32_OP on 64 bit systems. */ - if (TEST_SL_IMM(src2, src2w)) { - compiler->imm = src2w & 0xffff; - return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0); - } - if (GET_FLAGS(op) == SLJIT_SET_E && TEST_SL_IMM(src1, src1w)) { - compiler->imm = src1w & 0xffff; - return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM2, dst, dstw, src2, src2w, TMP_REG2, 0); - } - } - if (!(op & (SLJIT_SET_E | SLJIT_SET_S))) { - /* We know ALT_SIGN_EXT is set if it is an SLJIT_I32_OP on 64 bit systems. */ - if (TEST_UL_IMM(src2, src2w)) { - compiler->imm = src2w & 0xffff; - return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0); - } - return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM4, dst, dstw, src1, src1w, src2, src2w); - } - if ((src2 & SLJIT_IMM) && src2w >= 0 && src2w <= 0x7fff) { - compiler->imm = src2w; - return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM2 | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0); + + if (dst == SLJIT_UNUSED && GET_FLAG_TYPE(op) != GET_FLAG_TYPE(SLJIT_SET_CARRY)) { + if (TEST_SL_IMM(src2, src2w)) { + compiler->imm = src2w & 0xffff; + return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM4 | ALT_FORM5, dst, dstw, src1, src1w, TMP_REG2, 0); } - return emit_op(compiler, SLJIT_SUB, flags | ((op & SLJIT_SET_U) ? ALT_FORM4 : 0) | ((op & (SLJIT_SET_E | SLJIT_SET_S)) ? ALT_FORM5 : 0), dst, dstw, src1, src1w, src2, src2w); + return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM4, dst, dstw, src1, src1w, src2, src2w); } - if (!(op & (SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S | SLJIT_SET_O))) { - if (TEST_SL_IMM(src2, -src2w)) { - compiler->imm = (-src2w) & 0xffff; - return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0); - } + + if (TEST_SL_IMM(src2, -src2w)) { + compiler->imm = (-src2w) & 0xffff; + return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0); } /* We know ALT_SIGN_EXT is set if it is an SLJIT_I32_OP on 64 bit systems. */ - return emit_op(compiler, SLJIT_SUB, flags | (!(op & SLJIT_SET_U) ? 0 : ALT_FORM6), dst, dstw, src1, src1w, src2, src2w); + return emit_op(compiler, SLJIT_SUB, flags | ((GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY)) ? ALT_FORM5 : 0), dst, dstw, src1, src1w, src2, src2w); case SLJIT_SUBC: - return emit_op(compiler, SLJIT_SUBC, flags | (!(op & SLJIT_KEEP_FLAGS) ? 0 : ALT_FORM1), dst, dstw, src1, src1w, src2, src2w); + return emit_op(compiler, SLJIT_SUBC, flags, dst, dstw, src1, src1w, src2, src2w); case SLJIT_MUL: #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64) if (op & SLJIT_I32_OP) flags |= ALT_FORM2; #endif - if (!GET_FLAGS(op)) { + if (!HAS_FLAGS(op)) { if (TEST_SL_IMM(src2, src2w)) { compiler->imm = src2w & 0xffff; return emit_op(compiler, SLJIT_MUL, flags | ALT_FORM1, dst, dstw, src1, src1w, TMP_REG2, 0); @@ -1604,13 +1672,15 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit return emit_op(compiler, SLJIT_MUL, flags | ALT_FORM1, dst, dstw, src2, src2w, TMP_REG2, 0); } } + else + FAIL_IF(push_inst(compiler, MTXER | S(TMP_ZERO))); return emit_op(compiler, SLJIT_MUL, flags, dst, dstw, src1, src1w, src2, src2w); case SLJIT_AND: case SLJIT_OR: case SLJIT_XOR: /* Commutative unsigned operations. */ - if (!GET_FLAGS(op) || GET_OPCODE(op) == SLJIT_AND) { + if (!HAS_FLAGS(op) || GET_OPCODE(op) == SLJIT_AND) { if (TEST_UL_IMM(src2, src2w)) { compiler->imm = src2w; return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM1, dst, dstw, src1, src1w, TMP_REG2, 0); @@ -1628,7 +1698,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM2, dst, dstw, src2, src2w, TMP_REG2, 0); } } - if (!GET_FLAGS(op) && GET_OPCODE(op) != SLJIT_AND) { + if (GET_OPCODE(op) != SLJIT_AND && GET_OPCODE(op) != SLJIT_AND) { + /* Unlike or and xor, and resets unwanted bits as well. */ if (TEST_UI_IMM(src2, src2w)) { compiler->imm = src2w; return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0); @@ -1640,12 +1711,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit } return emit_op(compiler, GET_OPCODE(op), flags, dst, dstw, src1, src1w, src2, src2w); - case SLJIT_ASHR: - if (op & SLJIT_KEEP_FLAGS) - flags |= ALT_FORM3; - /* Fall through. */ case SLJIT_SHL: case SLJIT_LSHR: + case SLJIT_ASHR: #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64) if (op & SLJIT_I32_OP) flags |= ALT_FORM2; @@ -1685,16 +1753,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit /* Floating point operators */ /* --------------------------------------------------------------------- */ -SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void) -{ -#ifdef SLJIT_IS_FPU_AVAILABLE - return SLJIT_IS_FPU_AVAILABLE; -#else - /* Available by default. */ - return 1; -#endif -} - #define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_F32_OP) >> 6)) #define SELECT_FOP(op, single, double) ((op & SLJIT_F32_OP) ? single : double) @@ -1727,9 +1785,6 @@ static SLJIT_INLINE sljit_s32 sljit_emit op = GET_OPCODE(op); FAIL_IF(push_inst(compiler, (op == SLJIT_CONV_S32_FROM_F64 ? FCTIWZ : FCTIDZ) | FD(TMP_FREG1) | FB(src))); - if (dst == SLJIT_UNUSED) - return SLJIT_SUCCESS; - if (op == SLJIT_CONV_SW_FROM_F64) { if (FAST_IS_REG(dst)) { FAIL_IF(emit_op_mem2(compiler, DOUBLE_DATA, TMP_FREG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, 0, 0)); @@ -1737,12 +1792,8 @@ static SLJIT_INLINE sljit_s32 sljit_emit } return emit_op_mem2(compiler, DOUBLE_DATA, TMP_FREG1, dst, dstw, 0, 0); } - #else FAIL_IF(push_inst(compiler, FCTIWZ | FD(TMP_FREG1) | FB(src))); - - if (dst == SLJIT_UNUSED) - return SLJIT_SUCCESS; #endif if (FAST_IS_REG(dst)) { @@ -2019,10 +2070,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw)); ADJUST_LOCAL_OFFSET(dst, dstw); - /* For UNUSED dst. Uncommon, but possible. */ - if (dst == SLJIT_UNUSED) - return SLJIT_SUCCESS; - if (FAST_IS_REG(dst)) return push_inst(compiler, MFLR | D(dst)); @@ -2079,33 +2126,33 @@ static sljit_ins get_bo_bi_flags(sljit_s return (4 << 21) | (2 << 16); case SLJIT_LESS: - case SLJIT_LESS_F64: - return (12 << 21) | ((4 + 0) << 16); - - case SLJIT_GREATER_EQUAL: - case SLJIT_GREATER_EQUAL_F64: - return (4 << 21) | ((4 + 0) << 16); - - case SLJIT_GREATER: - case SLJIT_GREATER_F64: - return (12 << 21) | ((4 + 1) << 16); - - case SLJIT_LESS_EQUAL: - case SLJIT_LESS_EQUAL_F64: - return (4 << 21) | ((4 + 1) << 16); - case SLJIT_SIG_LESS: return (12 << 21) | (0 << 16); + case SLJIT_GREATER_EQUAL: case SLJIT_SIG_GREATER_EQUAL: return (4 << 21) | (0 << 16); + case SLJIT_GREATER: case SLJIT_SIG_GREATER: return (12 << 21) | (1 << 16); + case SLJIT_LESS_EQUAL: case SLJIT_SIG_LESS_EQUAL: return (4 << 21) | (1 << 16); + case SLJIT_LESS_F64: + return (12 << 21) | ((4 + 0) << 16); + + case SLJIT_GREATER_EQUAL_F64: + return (4 << 21) | ((4 + 0) << 16); + + case SLJIT_GREATER_F64: + return (12 << 21) | ((4 + 1) << 16); + + case SLJIT_LESS_EQUAL_F64: + return (4 << 21) | ((4 + 1) << 16); + case SLJIT_OVERFLOW: case SLJIT_MUL_OVERFLOW: return (12 << 21) | (3 << 16); @@ -2207,153 +2254,148 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit return push_inst(compiler, BCCTR | (20 << 21) | (type >= SLJIT_FAST_CALL ? 1 : 0)); } -/* Get a bit from CR, all other bits are zeroed. */ -#define GET_CR_BIT(bit, dst) \ - FAIL_IF(push_inst(compiler, MFCR | D(dst))); \ - FAIL_IF(push_inst(compiler, RLWINM | S(dst) | A(dst) | ((1 + (bit)) << 11) | (31 << 6) | (31 << 1))); - -#define INVERT_BIT(dst) \ - FAIL_IF(push_inst(compiler, XORI | S(dst) | A(dst) | 0x1)); - SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, - sljit_s32 src, sljit_sw srcw, sljit_s32 type) { - sljit_s32 reg, input_flags; - sljit_s32 flags = GET_ALL_FLAGS(op); - sljit_sw original_dstw = dstw; + sljit_s32 reg, input_flags, cr_bit, invert; + sljit_s32 saved_op = op; + sljit_sw saved_dstw = dstw; CHECK_ERROR(); - CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type)); + CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type)); ADJUST_LOCAL_OFFSET(dst, dstw); - if (dst == SLJIT_UNUSED) - return SLJIT_SUCCESS; +#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64) + input_flags = (op & SLJIT_I32_OP) ? INT_DATA : WORD_DATA; +#else + input_flags = WORD_DATA; +#endif op = GET_OPCODE(op); reg = (op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2; compiler->cache_arg = 0; compiler->cache_argw = 0; - if (op >= SLJIT_ADD && (src & SLJIT_MEM)) { - ADJUST_LOCAL_OFFSET(src, srcw); -#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64) - input_flags = (flags & SLJIT_I32_OP) ? INT_DATA : WORD_DATA; -#else - input_flags = WORD_DATA; -#endif - FAIL_IF(emit_op_mem2(compiler, input_flags | LOAD_DATA, TMP_REG1, src, srcw, dst, dstw)); - src = TMP_REG1; - srcw = 0; - } - switch (type & 0xff) { - case SLJIT_EQUAL: - GET_CR_BIT(2, reg); - break; + if (op >= SLJIT_ADD && (dst & SLJIT_MEM)) + FAIL_IF(emit_op_mem2(compiler, input_flags | LOAD_DATA, TMP_REG1, dst, dstw, dst, dstw)); - case SLJIT_NOT_EQUAL: - GET_CR_BIT(2, reg); - INVERT_BIT(reg); - break; + invert = 0; + switch (type & 0xff) { case SLJIT_LESS: - case SLJIT_LESS_F64: - GET_CR_BIT(4 + 0, reg); + case SLJIT_SIG_LESS: + cr_bit = 0; break; case SLJIT_GREATER_EQUAL: - case SLJIT_GREATER_EQUAL_F64: - GET_CR_BIT(4 + 0, reg); - INVERT_BIT(reg); + case SLJIT_SIG_GREATER_EQUAL: + cr_bit = 0; + invert = 1; break; case SLJIT_GREATER: - case SLJIT_GREATER_F64: - GET_CR_BIT(4 + 1, reg); + case SLJIT_SIG_GREATER: + cr_bit = 1; break; case SLJIT_LESS_EQUAL: - case SLJIT_LESS_EQUAL_F64: - GET_CR_BIT(4 + 1, reg); - INVERT_BIT(reg); - break; - - case SLJIT_SIG_LESS: - GET_CR_BIT(0, reg); - break; - - case SLJIT_SIG_GREATER_EQUAL: - GET_CR_BIT(0, reg); - INVERT_BIT(reg); + case SLJIT_SIG_LESS_EQUAL: + cr_bit = 1; + invert = 1; break; - case SLJIT_SIG_GREATER: - GET_CR_BIT(1, reg); + case SLJIT_EQUAL: + cr_bit = 2; break; - case SLJIT_SIG_LESS_EQUAL: - GET_CR_BIT(1, reg); - INVERT_BIT(reg); + case SLJIT_NOT_EQUAL: + cr_bit = 2; + invert = 1; break; case SLJIT_OVERFLOW: case SLJIT_MUL_OVERFLOW: - GET_CR_BIT(3, reg); + cr_bit = 3; break; case SLJIT_NOT_OVERFLOW: case SLJIT_MUL_NOT_OVERFLOW: - GET_CR_BIT(3, reg); - INVERT_BIT(reg); + cr_bit = 3; + invert = 1; + break; + + case SLJIT_LESS_F64: + cr_bit = 4 + 0; + break; + + case SLJIT_GREATER_EQUAL_F64: + cr_bit = 4 + 0; + invert = 1; + break; + + case SLJIT_GREATER_F64: + cr_bit = 4 + 1; + break; + + case SLJIT_LESS_EQUAL_F64: + cr_bit = 4 + 1; + invert = 1; break; case SLJIT_EQUAL_F64: - GET_CR_BIT(4 + 2, reg); + cr_bit = 4 + 2; break; case SLJIT_NOT_EQUAL_F64: - GET_CR_BIT(4 + 2, reg); - INVERT_BIT(reg); + cr_bit = 4 + 2; + invert = 1; break; case SLJIT_UNORDERED_F64: - GET_CR_BIT(4 + 3, reg); + cr_bit = 4 + 3; break; case SLJIT_ORDERED_F64: - GET_CR_BIT(4 + 3, reg); - INVERT_BIT(reg); + cr_bit = 4 + 3; + invert = 1; break; default: - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); break; } + FAIL_IF(push_inst(compiler, MFCR | D(reg))); + FAIL_IF(push_inst(compiler, RLWINM | S(reg) | A(reg) | ((1 + (cr_bit)) << 11) | (31 << 6) | (31 << 1))); + + if (invert) + FAIL_IF(push_inst(compiler, XORI | S(reg) | A(reg) | 0x1)); + if (op < SLJIT_ADD) { -#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64) - if (op == SLJIT_MOV) - input_flags = WORD_DATA; - else { - op = SLJIT_MOV_U32; - input_flags = INT_DATA; - } -#else - op = SLJIT_MOV; - input_flags = WORD_DATA; -#endif - if (reg != TMP_REG2) + if (!(dst & SLJIT_MEM)) return SLJIT_SUCCESS; - return emit_op(compiler, op, input_flags, dst, dstw, TMP_REG1, 0, TMP_REG2, 0); + return emit_op_mem2(compiler, input_flags, reg, dst, dstw, reg, 0); } #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \ || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS) compiler->skip_checks = 1; #endif - return sljit_emit_op2(compiler, op | flags, dst, original_dstw, src, srcw, TMP_REG2, 0); + if (dst & SLJIT_MEM) + return sljit_emit_op2(compiler, saved_op, dst, saved_dstw, TMP_REG1, 0, TMP_REG2, 0); + return sljit_emit_op2(compiler, saved_op, dst, 0, dst, 0, TMP_REG2, 0); +} + +SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type, + sljit_s32 dst_reg, + sljit_s32 src, sljit_sw srcw) +{ + CHECK_ERROR(); + CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw)); + + return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);; } SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value) @@ -2369,7 +2411,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_co PTR_FAIL_IF(!const_); set_const(const_, compiler); - reg = SLOW_IS_REG(dst) ? dst : TMP_REG2; + reg = FAST_IS_REG(dst) ? dst : TMP_REG2; PTR_FAIL_IF(emit_const(compiler, reg, init_value));
Modified: tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeSPARC_32.c URL: http://svn.apache.org/viewvc/tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeSPARC_32.c?rev=1815927&r1=1815926&r2=1815927&view=diff ============================================================================== --- tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeSPARC_32.c (original) +++ tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeSPARC_32.c Tue Nov 21 14:37:37 2017 @@ -1,7 +1,7 @@ /* * Stack-less Just-In-Time compiler * - * Copyright 2009-2012 Zoltan Herczeg (hzmes...@freemail.hu). All rights reserved. + * Copyright Zoltan Herczeg (hzmes...@freemail.hu). All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: @@ -60,7 +60,7 @@ static SLJIT_INLINE sljit_s32 emit_singl return push_inst(compiler, SRA | D(dst) | S1(dst) | IMM(24), DR(dst)); } else if (dst != src2) - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); return SLJIT_SUCCESS; case SLJIT_MOV_U16: @@ -71,7 +71,7 @@ static SLJIT_INLINE sljit_s32 emit_singl return push_inst(compiler, (op == SLJIT_MOV_S16 ? SRA : SRL) | D(dst) | S1(dst) | IMM(16), DR(dst)); } else if (dst != src2) - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); return SLJIT_SUCCESS; case SLJIT_NOT: @@ -80,18 +80,17 @@ static SLJIT_INLINE sljit_s32 emit_singl case SLJIT_CLZ: SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM)); - /* sparc 32 does not support SLJIT_KEEP_FLAGS. Not sure I can fix this. */ FAIL_IF(push_inst(compiler, SUB | SET_FLAGS | D(0) | S1(src2) | S2(0), SET_FLAGS)); FAIL_IF(push_inst(compiler, OR | D(TMP_REG1) | S1(0) | S2(src2), DR(TMP_REG1))); FAIL_IF(push_inst(compiler, BICC | DA(0x1) | (7 & DISP_MASK), UNMOVABLE_INS)); - FAIL_IF(push_inst(compiler, OR | (flags & SET_FLAGS) | D(dst) | S1(0) | IMM(32), UNMOVABLE_INS | (flags & SET_FLAGS))); + FAIL_IF(push_inst(compiler, OR | D(dst) | S1(0) | IMM(32), UNMOVABLE_INS)); FAIL_IF(push_inst(compiler, OR | D(dst) | S1(0) | IMM(-1), DR(dst))); /* Loop. */ FAIL_IF(push_inst(compiler, SUB | SET_FLAGS | D(0) | S1(TMP_REG1) | S2(0), SET_FLAGS)); FAIL_IF(push_inst(compiler, SLL | D(TMP_REG1) | S1(TMP_REG1) | IMM(1), DR(TMP_REG1))); FAIL_IF(push_inst(compiler, BICC | DA(0xe) | (-2 & DISP_MASK), UNMOVABLE_INS)); - return push_inst(compiler, ADD | (flags & SET_FLAGS) | D(dst) | S1(dst) | IMM(1), UNMOVABLE_INS | (flags & SET_FLAGS)); + return push_inst(compiler, ADD | D(dst) | S1(dst) | IMM(1), UNMOVABLE_INS); case SLJIT_ADD: return push_inst(compiler, ADD | (flags & SET_FLAGS) | D(dst) | S1(src1) | ARG2(flags, src2), DR(dst) | (flags & SET_FLAGS)); @@ -135,7 +134,7 @@ static SLJIT_INLINE sljit_s32 emit_singl return !(flags & SET_FLAGS) ? SLJIT_SUCCESS : push_inst(compiler, SUB | SET_FLAGS | D(0) | S1(dst) | S2(0), SET_FLAGS); } - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); return SLJIT_SUCCESS; } @@ -145,20 +144,22 @@ static SLJIT_INLINE sljit_s32 emit_const return push_inst(compiler, OR | D(dst) | S1(dst) | IMM_ARG | (init_value & 0x3ff), DR(dst)); } -SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_addr) +SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset) { - sljit_ins *inst = (sljit_ins*)addr; + sljit_ins *inst = (sljit_ins *)addr; - inst[0] = (inst[0] & 0xffc00000) | ((new_addr >> 10) & 0x3fffff); - inst[1] = (inst[1] & 0xfffffc00) | (new_addr & 0x3ff); + inst[0] = (inst[0] & 0xffc00000) | ((new_target >> 10) & 0x3fffff); + inst[1] = (inst[1] & 0xfffffc00) | (new_target & 0x3ff); + inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset); SLJIT_CACHE_FLUSH(inst, inst + 2); } -SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant) +SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset) { - sljit_ins *inst = (sljit_ins*)addr; + sljit_ins *inst = (sljit_ins *)addr; inst[0] = (inst[0] & 0xffc00000) | ((new_constant >> 10) & 0x3fffff); inst[1] = (inst[1] & 0xfffffc00) | (new_constant & 0x3ff); + inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset); SLJIT_CACHE_FLUSH(inst, inst + 2); } Modified: tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeSPARC_common.c URL: http://svn.apache.org/viewvc/tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeSPARC_common.c?rev=1815927&r1=1815926&r2=1815927&view=diff ============================================================================== --- tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeSPARC_common.c (original) +++ tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeSPARC_common.c Tue Nov 21 14:37:37 2017 @@ -1,7 +1,7 @@ /* * Stack-less Just-In-Time compiler * - * Copyright 2009-2012 Zoltan Herczeg (hzmes...@freemail.hu). All rights reserved. + * Copyright Zoltan Herczeg (hzmes...@freemail.hu). All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: @@ -199,7 +199,7 @@ static sljit_s32 push_inst(struct sljit_ return SLJIT_SUCCESS; } -static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code) +static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset) { sljit_sw diff; sljit_uw target_addr; @@ -213,7 +213,7 @@ static SLJIT_INLINE sljit_ins* detect_ju target_addr = jump->u.target; else { SLJIT_ASSERT(jump->flags & JUMP_LABEL); - target_addr = (sljit_uw)(code + jump->u.label->size); + target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset; } inst = (sljit_ins*)jump->addr; @@ -239,8 +239,9 @@ static SLJIT_INLINE sljit_ins* detect_ju if (jump->flags & IS_COND) inst--; + diff = ((sljit_sw)target_addr - (sljit_sw)(inst - 1) - executable_offset) >> 2; + if (jump->flags & IS_MOVABLE) { - diff = ((sljit_sw)target_addr - (sljit_sw)(inst - 1)) >> 2; if (diff <= MAX_DISP && diff >= MIN_DISP) { jump->flags |= PATCH_B; inst--; @@ -257,7 +258,8 @@ static SLJIT_INLINE sljit_ins* detect_ju } } - diff = ((sljit_sw)target_addr - (sljit_sw)(inst)) >> 2; + diff += sizeof(sljit_ins); + if (diff <= MAX_DISP && diff >= MIN_DISP) { jump->flags |= PATCH_B; if (jump->flags & IS_COND) @@ -280,6 +282,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen sljit_ins *buf_ptr; sljit_ins *buf_end; sljit_uw word_count; + sljit_sw executable_offset; sljit_uw addr; struct sljit_label *label; @@ -296,9 +299,12 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen code_ptr = code; word_count = 0; + executable_offset = SLJIT_EXEC_OFFSET(code); + label = compiler->labels; jump = compiler->jumps; const_ = compiler->consts; + do { buf_ptr = (sljit_ins*)buf->memory; buf_end = buf_ptr + (buf->used_size >> 2); @@ -310,7 +316,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen /* These structures are ordered by their address. */ if (label && label->size == word_count) { /* Just recording the address. */ - label->addr = (sljit_uw)code_ptr; + label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset); label->size = code_ptr - code; label = label->next; } @@ -320,7 +326,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen #else jump->addr = (sljit_uw)(code_ptr - 6); #endif - code_ptr = detect_jump_type(jump, code_ptr, code); + code_ptr = detect_jump_type(jump, code_ptr, code, executable_offset); jump = jump->next; } if (const_ && const_->addr == word_count) { @@ -336,7 +342,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen } while (buf); if (label && label->size == word_count) { - label->addr = (sljit_uw)code_ptr; + label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset); label->size = code_ptr - code; label = label->next; } @@ -350,16 +356,16 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen while (jump) { do { addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target; - buf_ptr = (sljit_ins*)jump->addr; + buf_ptr = (sljit_ins *)jump->addr; if (jump->flags & PATCH_CALL) { - addr = (sljit_sw)(addr - jump->addr) >> 2; + addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2; SLJIT_ASSERT((sljit_sw)addr <= 0x1fffffff && (sljit_sw)addr >= -0x20000000); buf_ptr[0] = CALL | (addr & 0x3fffffff); break; } if (jump->flags & PATCH_B) { - addr = (sljit_sw)(addr - jump->addr) >> 2; + addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2; SLJIT_ASSERT((sljit_sw)addr <= MAX_DISP && (sljit_sw)addr >= MIN_DISP); buf_ptr[0] = (buf_ptr[0] & ~DISP_MASK) | (addr & DISP_MASK); break; @@ -378,11 +384,37 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_gen compiler->error = SLJIT_ERR_COMPILED; + compiler->executable_offset = executable_offset; compiler->executable_size = (code_ptr - code) * sizeof(sljit_ins); + + code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset); + code_ptr = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset); + SLJIT_CACHE_FLUSH(code, code_ptr); return code; } +SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type) +{ + switch (feature_type) { + case SLJIT_HAS_FPU: +#ifdef SLJIT_IS_FPU_AVAILABLE + return SLJIT_IS_FPU_AVAILABLE; +#else + /* Available by default. */ + return 1; +#endif + +#if (defined SLJIT_CONFIG_SPARC_64 && SLJIT_CONFIG_SPARC_64) + case SLJIT_HAS_CMOV: + return 1; +#endif + + default: + return 0; + } +} + /* --------------------------------------------------------------------- */ /* Entry, exit */ /* --------------------------------------------------------------------- */ @@ -567,7 +599,6 @@ static sljit_s32 getput_arg(struct sljit base = arg & REG_MASK; if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) { argw &= 0x3; - SLJIT_ASSERT(argw != 0); /* Using the cache. */ if (((SLJIT_MEM | (arg & OFFS_REG_MASK)) == compiler->cache_arg) && (argw == compiler->cache_argw)) @@ -652,18 +683,16 @@ static sljit_s32 emit_op(struct sljit_co compiler->cache_argw = 0; } - if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) { - if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32 && !(src2 & SLJIT_MEM)) - return SLJIT_SUCCESS; - } - else if (FAST_IS_REG(dst)) { - dst_r = dst; - flags |= REG_DEST; - if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32) - sugg_src2_r = dst_r; + if (dst != SLJIT_UNUSED) { + if (FAST_IS_REG(dst)) { + dst_r = dst; + flags |= REG_DEST; + if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32) + sugg_src2_r = dst_r; + } + else if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, flags | ARG_TEST, TMP_REG1, dst, dstw)) + flags |= SLOW_DEST; } - else if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, flags | ARG_TEST, TMP_REG1, dst, dstw)) - flags |= SLOW_DEST; if (flags & IMM_OP) { if ((src2 & SLJIT_IMM) && src2w) { @@ -812,13 +841,16 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw) { - sljit_s32 flags = GET_FLAGS(op) ? SET_FLAGS : 0; + sljit_s32 flags = HAS_FLAGS(op) ? SET_FLAGS : 0; CHECK_ERROR(); CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw)); ADJUST_LOCAL_OFFSET(dst, dstw); ADJUST_LOCAL_OFFSET(src, srcw); + if (dst == SLJIT_UNUSED && !HAS_FLAGS(op)) + return SLJIT_SUCCESS; + op = GET_OPCODE(op); switch (op) { case SLJIT_MOV: @@ -881,7 +913,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w) { - sljit_s32 flags = GET_FLAGS(op) ? SET_FLAGS : 0; + sljit_s32 flags = HAS_FLAGS(op) ? SET_FLAGS : 0; CHECK_ERROR(); CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w)); @@ -889,6 +921,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit ADJUST_LOCAL_OFFSET(src1, src1w); ADJUST_LOCAL_OFFSET(src2, src2w); + if (dst == SLJIT_UNUSED && !HAS_FLAGS(op)) + return SLJIT_SUCCESS; + op = GET_OPCODE(op); switch (op) { case SLJIT_ADD: @@ -910,7 +945,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit if (src2 & SLJIT_IMM) src2w &= 0x1f; #else - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); #endif return emit_op(compiler, op, flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w); } @@ -943,16 +978,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit /* Floating point operators */ /* --------------------------------------------------------------------- */ -SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void) -{ -#ifdef SLJIT_IS_FPU_AVAILABLE - return SLJIT_IS_FPU_AVAILABLE; -#else - /* Available by default. */ - return 1; -#endif -} - #define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_F32_OP) >> 7)) #define SELECT_FOP(op, single, double) ((op & SLJIT_F32_OP) ? single : double) #define FLOAT_TMP_MEM_OFFSET (22 * sizeof(sljit_sw)) @@ -970,9 +995,6 @@ static SLJIT_INLINE sljit_s32 sljit_emit FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSTOI, FDTOI) | DA(TMP_FREG1) | S2A(src), MOVABLE_INS)); - if (dst == SLJIT_UNUSED) - return SLJIT_SUCCESS; - if (FAST_IS_REG(dst)) { FAIL_IF(emit_op_mem2(compiler, SINGLE_DATA, TMP_FREG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET)); return emit_op_mem2(compiler, WORD_DATA | LOAD_DATA, dst, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET); @@ -1186,10 +1208,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw)); ADJUST_LOCAL_OFFSET(dst, dstw); - /* For UNUSED dst. Uncommon, but possible. */ - if (dst == SLJIT_UNUSED) - return SLJIT_SUCCESS; - if (FAST_IS_REG(dst)) return push_inst(compiler, OR | D(dst) | S1(0) | S2(TMP_LINK), DR(dst)); @@ -1285,7 +1303,7 @@ static sljit_ins get_cc(sljit_s32 type) return DA(0xf); default: - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); return DA(0x8); } } @@ -1373,30 +1391,23 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, - sljit_s32 src, sljit_sw srcw, sljit_s32 type) { - sljit_s32 reg, flags = (GET_FLAGS(op) ? SET_FLAGS : 0); + sljit_s32 reg, flags = HAS_FLAGS(op) ? SET_FLAGS : 0; CHECK_ERROR(); - CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type)); + CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type)); ADJUST_LOCAL_OFFSET(dst, dstw); - if (dst == SLJIT_UNUSED) - return SLJIT_SUCCESS; - #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32) op = GET_OPCODE(op); reg = (op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2; compiler->cache_arg = 0; compiler->cache_argw = 0; - if (op >= SLJIT_ADD && (src & SLJIT_MEM)) { - ADJUST_LOCAL_OFFSET(src, srcw); - FAIL_IF(emit_op_mem2(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw, dst, dstw)); - src = TMP_REG1; - srcw = 0; - } + + if (op >= SLJIT_ADD && (dst & SLJIT_MEM)) + FAIL_IF(emit_op_mem2(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, dst, dstw, dst, dstw)); type &= 0xff; if (type < SLJIT_EQUAL_F64) @@ -1407,10 +1418,31 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit FAIL_IF(push_inst(compiler, OR | D(reg) | S1(0) | IMM(1), UNMOVABLE_INS)); FAIL_IF(push_inst(compiler, OR | D(reg) | S1(0) | IMM(0), UNMOVABLE_INS)); - if (op >= SLJIT_ADD) - return emit_op(compiler, op, flags | CUMULATIVE_OP | IMM_OP | ALT_KEEP_CACHE, dst, dstw, src, srcw, TMP_REG2, 0); + if (op >= SLJIT_ADD) { + flags |= CUMULATIVE_OP | IMM_OP | ALT_KEEP_CACHE; + if (dst & SLJIT_MEM) + return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, TMP_REG2, 0); + return emit_op(compiler, op, flags, dst, 0, dst, 0, TMP_REG2, 0); + } + + if (!(dst & SLJIT_MEM)) + return SLJIT_SUCCESS; + + return emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw); +#else +#error "Implementation required" +#endif +} + +SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type, + sljit_s32 dst_reg, + sljit_s32 src, sljit_sw srcw) +{ + CHECK_ERROR(); + CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw)); - return (reg == TMP_REG2) ? emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw) : SLJIT_SUCCESS; +#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32) + return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);; #else #error "Implementation required" #endif @@ -1429,7 +1461,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_co PTR_FAIL_IF(!const_); set_const(const_, compiler); - reg = SLOW_IS_REG(dst) ? dst : TMP_REG2; + reg = FAST_IS_REG(dst) ? dst : TMP_REG2; PTR_FAIL_IF(emit_const(compiler, reg, init_value)); Modified: tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeTILEGX-encoder.c URL: http://svn.apache.org/viewvc/tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeTILEGX-encoder.c?rev=1815927&r1=1815926&r2=1815927&view=diff ============================================================================== --- tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeTILEGX-encoder.c (original) +++ tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeTILEGX-encoder.c Tue Nov 21 14:37:37 2017 @@ -2,7 +2,7 @@ * Stack-less Just-In-Time compiler * * Copyright 2013-2013 Tilera Corporation(jiw...@tilera.com). All rights reserved. - * Copyright 2009-2012 Zoltan Herczeg (hzmes...@freemail.hu). All rights reserved. + * Copyright Zoltan Herczeg (hzmes...@freemail.hu). All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: Modified: tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeTILEGX_64.c URL: http://svn.apache.org/viewvc/tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeTILEGX_64.c?rev=1815927&r1=1815926&r2=1815927&view=diff ============================================================================== --- tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeTILEGX_64.c (original) +++ tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeTILEGX_64.c Tue Nov 21 14:37:37 2017 @@ -2,7 +2,7 @@ * Stack-less Just-In-Time compiler * * Copyright 2013-2013 Tilera Corporation(jiw...@tilera.com). All rights reserved. - * Copyright 2009-2012 Zoltan Herczeg (hzmes...@freemail.hu). All rights reserved. + * Copyright Zoltan Herczeg (hzmes...@freemail.hu). All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: @@ -687,7 +687,7 @@ static sljit_s32 update_buffer(struct sl inst_buf[0] = inst1; inst_buf_index = 1; } else - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); #ifdef TILEGX_JIT_DEBUG return push_inst_nodebug(compiler, bits); @@ -727,10 +727,10 @@ static sljit_s32 update_buffer(struct sl return push_inst(compiler, bits); #endif } else - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); } - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); } static sljit_s32 flush_buffer(struct sljit_compiler *compiler) @@ -814,7 +814,7 @@ static sljit_s32 push_3_buffer(struct sl break; default: printf("unrecoginzed opc: %s\n", opcode->name); - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); } inst_buf_index++; @@ -859,7 +859,7 @@ static sljit_s32 push_2_buffer(struct sl break; default: printf("unrecoginzed opc: %s\n", opcode->name); - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); } inst_buf_index++; @@ -1952,7 +1952,7 @@ static SLJIT_INLINE sljit_s32 emit_singl return SLJIT_SUCCESS; } - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); return SLJIT_SUCCESS; } @@ -2092,9 +2092,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type)); ADJUST_LOCAL_OFFSET(dst, dstw); - if (dst == SLJIT_UNUSED) - return SLJIT_SUCCESS; - op = GET_OPCODE(op); if (op == SLJIT_MOV_S32 || op == SLJIT_MOV_U32) mem_type = INT_DATA | SIGNED_DATA; @@ -2143,7 +2140,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit break; default: - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); dst_ar = sugg_dst_ar; break; } @@ -2186,7 +2183,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit case SLJIT_DIVMOD_SW: case SLJIT_DIV_UW: case SLJIT_DIV_SW: - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); } return SLJIT_SUCCESS; @@ -2487,19 +2484,14 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_ju return jump; } -SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void) -{ - return 0; -} - SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw) { - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); } SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w) { - SLJIT_ASSERT_STOP(); + SLJIT_UNREACHABLE(); } SLJIT_API_FUNC_ATTRIBUTE struct sljit_const * sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value) @@ -2526,13 +2518,13 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_co return const_; } -SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_addr) +SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target) { sljit_ins *inst = (sljit_ins *)addr; - inst[0] = (inst[0] & ~(0xFFFFL << 43)) | (((new_addr >> 32) & 0xffff) << 43); - inst[1] = (inst[1] & ~(0xFFFFL << 43)) | (((new_addr >> 16) & 0xffff) << 43); - inst[2] = (inst[2] & ~(0xFFFFL << 43)) | ((new_addr & 0xffff) << 43); + inst[0] = (inst[0] & ~(0xFFFFL << 43)) | (((new_target >> 32) & 0xffff) << 43); + inst[1] = (inst[1] & ~(0xFFFFL << 43)) | (((new_target >> 16) & 0xffff) << 43); + inst[2] = (inst[2] & ~(0xFFFFL << 43)) | ((new_target & 0xffff) << 43); SLJIT_CACHE_FLUSH(inst, inst + 3); } Modified: tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeX86_32.c URL: http://svn.apache.org/viewvc/tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeX86_32.c?rev=1815927&r1=1815926&r2=1815927&view=diff ============================================================================== --- tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeX86_32.c (original) +++ tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeX86_32.c Tue Nov 21 14:37:37 2017 @@ -1,7 +1,7 @@ /* * Stack-less Just-In-Time compiler * - * Copyright 2009-2012 Zoltan Herczeg (hzmes...@freemail.hu). All rights reserved. + * Copyright Zoltan Herczeg (hzmes...@freemail.hu). All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: @@ -34,11 +34,11 @@ static sljit_s32 emit_do_imm(struct slji FAIL_IF(!inst); INC_SIZE(1 + sizeof(sljit_sw)); *inst++ = opcode; - *(sljit_sw*)inst = imm; + sljit_unaligned_store_sw(inst, imm); return SLJIT_SUCCESS; } -static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type) +static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type, sljit_sw executable_offset) { if (type == SLJIT_JUMP) { *code_ptr++ = JMP_i32; @@ -57,7 +57,7 @@ static sljit_u8* generate_far_jump_code( if (jump->flags & JUMP_LABEL) jump->flags |= PATCH_MW; else - *(sljit_sw*)code_ptr = jump->u.target - (jump->addr + 4); + sljit_unaligned_store_sw(code_ptr, jump->u.target - (jump->addr + 4) - (sljit_uw)executable_offset); code_ptr += 4; return code_ptr; @@ -75,9 +75,30 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit set_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size); compiler->args = args; - compiler->flags_saved = 0; - size = 1 + (scratches > 7 ? (scratches - 7) : 0) + (saveds <= 3 ? saveds : 3); +#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) + /* [esp+0] for saving temporaries and third argument for calls. */ + compiler->saveds_offset = 1 * sizeof(sljit_sw); +#else + /* [esp+0] for saving temporaries and space for maximum three arguments. */ + if (scratches <= 1) + compiler->saveds_offset = 1 * sizeof(sljit_sw); + else + compiler->saveds_offset = ((scratches == 2) ? 2 : 3) * sizeof(sljit_sw); +#endif + + if (scratches > 3) + compiler->saveds_offset += ((scratches > (3 + 6)) ? 6 : (scratches - 3)) * sizeof(sljit_sw); + + compiler->locals_offset = compiler->saveds_offset; + + if (saveds > 3) + compiler->locals_offset += (saveds - 3) * sizeof(sljit_sw); + + if (options & SLJIT_F64_ALIGNMENT) + compiler->locals_offset = (compiler->locals_offset + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1); + + size = 1 + (scratches > 9 ? (scratches - 9) : 0) + (saveds <= 3 ? saveds : 3); #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) size += (args > 0 ? (args * 2) : 0) + (args > 2 ? 2 : 0); #else @@ -94,11 +115,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit *inst++ = MOD_REG | (reg_map[TMP_REG1] << 3) | 0x4 /* esp */; } #endif - if (saveds > 2 || scratches > 7) + if (saveds > 2 || scratches > 9) PUSH_REG(reg_map[SLJIT_S2]); - if (saveds > 1 || scratches > 8) + if (saveds > 1 || scratches > 10) PUSH_REG(reg_map[SLJIT_S1]); - if (saveds > 0 || scratches > 9) + if (saveds > 0 || scratches > 11) PUSH_REG(reg_map[SLJIT_S0]); #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) @@ -134,51 +155,64 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit } #endif - SLJIT_COMPILE_ASSERT(SLJIT_LOCALS_OFFSET >= (2 + 4) * sizeof(sljit_uw), require_at_least_two_words); + SLJIT_ASSERT(SLJIT_LOCALS_OFFSET > 0); + #if defined(__APPLE__) /* Ignore pushed registers and SLJIT_LOCALS_OFFSET when computing the aligned local size. */ - saveds = (2 + (scratches > 7 ? (scratches - 7) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw); + saveds = (2 + (scratches > 9 ? (scratches - 9) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw); local_size = ((SLJIT_LOCALS_OFFSET + saveds + local_size + 15) & ~15) - saveds; #else - if (options & SLJIT_DOUBLE_ALIGNMENT) { - local_size = SLJIT_LOCALS_OFFSET + ((local_size + 7) & ~7); - - inst = (sljit_u8*)ensure_buf(compiler, 1 + 17); - FAIL_IF(!inst); - - INC_SIZE(17); - inst[0] = MOV_r_rm; - inst[1] = MOD_REG | (reg_map[TMP_REG1] << 3) | reg_map[SLJIT_SP]; - inst[2] = GROUP_F7; - inst[3] = MOD_REG | (0 << 3) | reg_map[SLJIT_SP]; - *(sljit_sw*)(inst + 4) = 0x4; - inst[8] = JNE_i8; - inst[9] = 6; - inst[10] = GROUP_BINARY_81; - inst[11] = MOD_REG | (5 << 3) | reg_map[SLJIT_SP]; - *(sljit_sw*)(inst + 12) = 0x4; - inst[16] = PUSH_r + reg_map[TMP_REG1]; - } + if (options & SLJIT_F64_ALIGNMENT) + local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1)); else - local_size = SLJIT_LOCALS_OFFSET + ((local_size + 3) & ~3); + local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_sw) - 1) & ~(sizeof(sljit_sw) - 1)); #endif compiler->local_size = local_size; + #ifdef _WIN32 if (local_size > 1024) { #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) FAIL_IF(emit_do_imm(compiler, MOV_r_i32 + reg_map[SLJIT_R0], local_size)); #else - local_size -= SLJIT_LOCALS_OFFSET; + /* Space for a single argument. This amount is excluded when the stack is allocated below. */ + local_size -= sizeof(sljit_sw); FAIL_IF(emit_do_imm(compiler, MOV_r_i32 + reg_map[SLJIT_R0], local_size)); FAIL_IF(emit_non_cum_binary(compiler, SUB_r_rm, SUB_rm_r, SUB, SUB_EAX_i32, - SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, SLJIT_LOCALS_OFFSET)); + SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, sizeof(sljit_sw))); #endif FAIL_IF(sljit_emit_ijump(compiler, SLJIT_CALL1, SLJIT_IMM, SLJIT_FUNC_OFFSET(sljit_grow_stack))); } #endif SLJIT_ASSERT(local_size > 0); + +#if !defined(__APPLE__) + if (options & SLJIT_F64_ALIGNMENT) { + EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_SP, 0); + + /* Some space might allocated during sljit_grow_stack() above on WIN32. */ + FAIL_IF(emit_non_cum_binary(compiler, SUB_r_rm, SUB_rm_r, SUB, SUB_EAX_i32, + SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, local_size + sizeof(sljit_sw))); + +#if defined _WIN32 && !(defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) + if (compiler->local_size > 1024) + FAIL_IF(emit_cum_binary(compiler, ADD_r_rm, ADD_rm_r, ADD, ADD_EAX_i32, + TMP_REG1, 0, TMP_REG1, 0, SLJIT_IMM, sizeof(sljit_sw))); +#endif + + inst = (sljit_u8*)ensure_buf(compiler, 1 + 6); + FAIL_IF(!inst); + + INC_SIZE(6); + inst[0] = GROUP_BINARY_81; + inst[1] = MOD_REG | AND | reg_map[SLJIT_SP]; + sljit_unaligned_store_sw(inst + 2, ~(sizeof(sljit_f64) - 1)); + + /* The real local size must be used. */ + return emit_mov(compiler, SLJIT_MEM1(SLJIT_SP), compiler->local_size, TMP_REG1, 0); + } +#endif return emit_non_cum_binary(compiler, SUB_r_rm, SUB_rm_r, SUB, SUB_EAX_i32, SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, local_size); } @@ -193,14 +227,36 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit compiler->args = args; +#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) + /* [esp+0] for saving temporaries and third argument for calls. */ + compiler->saveds_offset = 1 * sizeof(sljit_sw); +#else + /* [esp+0] for saving temporaries and space for maximum three arguments. */ + if (scratches <= 1) + compiler->saveds_offset = 1 * sizeof(sljit_sw); + else + compiler->saveds_offset = ((scratches == 2) ? 2 : 3) * sizeof(sljit_sw); +#endif + + if (scratches > 3) + compiler->saveds_offset += ((scratches > (3 + 6)) ? 6 : (scratches - 3)) * sizeof(sljit_sw); + + compiler->locals_offset = compiler->saveds_offset; + + if (saveds > 3) + compiler->locals_offset += (saveds - 3) * sizeof(sljit_sw); + + if (options & SLJIT_F64_ALIGNMENT) + compiler->locals_offset = (compiler->locals_offset + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1); + #if defined(__APPLE__) - saveds = (2 + (scratches > 7 ? (scratches - 7) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw); + saveds = (2 + (scratches > 9 ? (scratches - 9) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw); compiler->local_size = ((SLJIT_LOCALS_OFFSET + saveds + local_size + 15) & ~15) - saveds; #else - if (options & SLJIT_DOUBLE_ALIGNMENT) - compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + 7) & ~7); + if (options & SLJIT_F64_ALIGNMENT) + compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1)); else - compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + 3) & ~3); + compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_sw) - 1) & ~(sizeof(sljit_sw) - 1)); #endif return SLJIT_SUCCESS; } @@ -214,23 +270,19 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit CHECK(check_sljit_emit_return(compiler, op, src, srcw)); SLJIT_ASSERT(compiler->args >= 0); - compiler->flags_saved = 0; FAIL_IF(emit_mov_before_return(compiler, op, src, srcw)); SLJIT_ASSERT(compiler->local_size > 0); - FAIL_IF(emit_cum_binary(compiler, ADD_r_rm, ADD_rm_r, ADD, ADD_EAX_i32, - SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, compiler->local_size)); #if !defined(__APPLE__) - if (compiler->options & SLJIT_DOUBLE_ALIGNMENT) { - inst = (sljit_u8*)ensure_buf(compiler, 1 + 3); - FAIL_IF(!inst); - - INC_SIZE(3); - inst[0] = MOV_r_rm; - inst[1] = (reg_map[SLJIT_SP] << 3) | 0x4 /* SIB */; - inst[2] = (4 << 3) | reg_map[SLJIT_SP]; - } + if (compiler->options & SLJIT_F64_ALIGNMENT) + EMIT_MOV(compiler, SLJIT_SP, 0, SLJIT_MEM1(SLJIT_SP), compiler->local_size) + else + FAIL_IF(emit_cum_binary(compiler, ADD_r_rm, ADD_rm_r, ADD, ADD_EAX_i32, + SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, compiler->local_size)); +#else + FAIL_IF(emit_cum_binary(compiler, ADD_r_rm, ADD_rm_r, ADD, ADD_EAX_i32, + SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, compiler->local_size)); #endif size = 2 + (compiler->scratches > 7 ? (compiler->scratches - 7) : 0) + @@ -247,11 +299,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit INC_SIZE(size); - if (compiler->saveds > 0 || compiler->scratches > 9) + if (compiler->saveds > 0 || compiler->scratches > 11) POP_REG(reg_map[SLJIT_S0]); - if (compiler->saveds > 1 || compiler->scratches > 8) + if (compiler->saveds > 1 || compiler->scratches > 10) POP_REG(reg_map[SLJIT_S1]); - if (compiler->saveds > 2 || compiler->scratches > 7) + if (compiler->saveds > 2 || compiler->scratches > 9) POP_REG(reg_map[SLJIT_S2]); POP_REG(reg_map[TMP_REG1]); #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) @@ -406,7 +458,7 @@ static sljit_u8* emit_x86_instruction(st if (immb <= 127 && immb >= -128) *buf_ptr++ = immb; /* 8 bit displacement. */ else { - *(sljit_sw*)buf_ptr = immb; /* 32 bit displacement. */ + sljit_unaligned_store_sw(buf_ptr, immb); /* 32 bit displacement. */ buf_ptr += sizeof(sljit_sw); } } @@ -418,7 +470,7 @@ static sljit_u8* emit_x86_instruction(st } else { *buf_ptr++ |= 0x05; - *(sljit_sw*)buf_ptr = immb; /* 32 bit displacement. */ + sljit_unaligned_store_sw(buf_ptr, immb); /* 32 bit displacement. */ buf_ptr += sizeof(sljit_sw); } @@ -426,9 +478,9 @@ static sljit_u8* emit_x86_instruction(st if (flags & EX86_BYTE_ARG) *buf_ptr = imma; else if (flags & EX86_HALF_ARG) - *(short*)buf_ptr = imma; + sljit_unaligned_store_s16(buf_ptr, imma); else if (!(flags & EX86_SHIFT_INS)) - *(sljit_sw*)buf_ptr = imma; + sljit_unaligned_store_sw(buf_ptr, imma); } return !(flags & EX86_SHIFT_INS) ? inst : (inst + 1); @@ -541,7 +593,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit INC_SIZE(5 + 1); *inst++ = PUSH_i32; - *(sljit_sw*)inst = srcw; + sljit_unaligned_store_sw(inst, srcw); inst += sizeof(sljit_sw); } Modified: tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeX86_64.c URL: http://svn.apache.org/viewvc/tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeX86_64.c?rev=1815927&r1=1815926&r2=1815927&view=diff ============================================================================== --- tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeX86_64.c (original) +++ tomcat/jk/trunk/native/iis/pcre/sljit/sljitNativeX86_64.c Tue Nov 21 14:37:37 2017 @@ -1,7 +1,7 @@ /* * Stack-less Just-In-Time compiler * - * Copyright 2009-2012 Zoltan Herczeg (hzmes...@freemail.hu). All rights reserved. + * Copyright Zoltan Herczeg (hzmes...@freemail.hu). All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: @@ -35,7 +35,7 @@ static sljit_s32 emit_load_imm64(struct INC_SIZE(2 + sizeof(sljit_sw)); *inst++ = REX_W | ((reg_map[reg] <= 7) ? 0 : REX_B); *inst++ = MOV_r_i32 + (reg_map[reg] & 0x7); - *(sljit_sw*)inst = imm; + sljit_unaligned_store_sw(inst, imm); return SLJIT_SUCCESS; } @@ -47,42 +47,20 @@ static sljit_u8* generate_far_jump_code( *code_ptr++ = 10 + 3; } - SLJIT_COMPILE_ASSERT(reg_map[TMP_REG3] == 9, tmp3_is_9_first); - *code_ptr++ = REX_W | REX_B; - *code_ptr++ = MOV_r_i32 + 1; + *code_ptr++ = REX_W | ((reg_map[TMP_REG2] <= 7) ? 0 : REX_B); + *code_ptr++ = MOV_r_i32 | reg_lmap[TMP_REG2]; jump->addr = (sljit_uw)code_ptr; if (jump->flags & JUMP_LABEL) jump->flags |= PATCH_MD; else - *(sljit_sw*)code_ptr = jump->u.target; + sljit_unaligned_store_sw(code_ptr, jump->u.target); code_ptr += sizeof(sljit_sw); - *code_ptr++ = REX_B; - *code_ptr++ = GROUP_FF; - *code_ptr++ = (type >= SLJIT_FAST_CALL) ? (MOD_REG | CALL_rm | 1) : (MOD_REG | JMP_rm | 1); - - return code_ptr; -} - -static sljit_u8* generate_fixed_jump(sljit_u8 *code_ptr, sljit_sw addr, sljit_s32 type) -{ - sljit_sw delta = addr - ((sljit_sw)code_ptr + 1 + sizeof(sljit_s32)); - - if (delta <= HALFWORD_MAX && delta >= HALFWORD_MIN) { - *code_ptr++ = (type == 2) ? CALL_i32 : JMP_i32; - *(sljit_sw*)code_ptr = delta; - } - else { - SLJIT_COMPILE_ASSERT(reg_map[TMP_REG3] == 9, tmp3_is_9_second); - *code_ptr++ = REX_W | REX_B; - *code_ptr++ = MOV_r_i32 + 1; - *(sljit_sw*)code_ptr = addr; - code_ptr += sizeof(sljit_sw); + if (reg_map[TMP_REG2] >= 8) *code_ptr++ = REX_B; - *code_ptr++ = GROUP_FF; - *code_ptr++ = (type == 2) ? (MOD_REG | CALL_rm | 1) : (MOD_REG | JMP_rm | 1); - } + *code_ptr++ = GROUP_FF; + *code_ptr++ = MOD_REG | (type >= SLJIT_FAST_CALL ? CALL_rm : JMP_rm) | reg_lmap[TMP_REG2]; return code_ptr; } @@ -98,7 +76,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit CHECK(check_sljit_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size)); set_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size); - compiler->flags_saved = 0; +#ifdef _WIN64 + /* Two/four register slots for parameters plus space for xmm6 register if needed. */ + if (fscratches >= 6 || fsaveds >= 1) + compiler->locals_offset = 6 * sizeof(sljit_sw); + else + compiler->locals_offset = ((scratches > 2) ? 4 : 2) * sizeof(sljit_sw); +#endif /* Including the return address saved by the call instruction. */ saved_register_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds, 1); @@ -177,7 +161,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit INC_SIZE(4 + (3 + sizeof(sljit_s32))); *inst++ = REX_W; *inst++ = GROUP_BINARY_83; - *inst++ = MOD_REG | SUB | 4; + *inst++ = MOD_REG | SUB | reg_map[SLJIT_SP]; /* Allocated size for registers must be divisible by 8. */ SLJIT_ASSERT(!(saved_register_size & 0x7)); /* Aligned to 16 byte. */ @@ -189,11 +173,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit local_size -= 4 * sizeof(sljit_sw); } /* Second instruction */ - SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R0] < 8, temporary_reg1_is_loreg); + SLJIT_ASSERT(reg_map[SLJIT_R0] < 8); *inst++ = REX_W; *inst++ = MOV_rm_i32; *inst++ = MOD_REG | reg_lmap[SLJIT_R0]; - *(sljit_s32*)inst = local_size; + sljit_unaligned_store_s32(inst, local_size); #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \ || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS) compiler->skip_checks = 1; @@ -202,25 +186,26 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit } #endif - SLJIT_ASSERT(local_size > 0); - if (local_size <= 127) { - inst = (sljit_u8*)ensure_buf(compiler, 1 + 4); - FAIL_IF(!inst); - INC_SIZE(4); - *inst++ = REX_W; - *inst++ = GROUP_BINARY_83; - *inst++ = MOD_REG | SUB | 4; - *inst++ = local_size; - } - else { - inst = (sljit_u8*)ensure_buf(compiler, 1 + 7); - FAIL_IF(!inst); - INC_SIZE(7); - *inst++ = REX_W; - *inst++ = GROUP_BINARY_81; - *inst++ = MOD_REG | SUB | 4; - *(sljit_s32*)inst = local_size; - inst += sizeof(sljit_s32); + if (local_size > 0) { + if (local_size <= 127) { + inst = (sljit_u8*)ensure_buf(compiler, 1 + 4); + FAIL_IF(!inst); + INC_SIZE(4); + *inst++ = REX_W; + *inst++ = GROUP_BINARY_83; + *inst++ = MOD_REG | SUB | reg_map[SLJIT_SP]; + *inst++ = local_size; + } + else { + inst = (sljit_u8*)ensure_buf(compiler, 1 + 7); + FAIL_IF(!inst); + INC_SIZE(7); + *inst++ = REX_W; + *inst++ = GROUP_BINARY_81; + *inst++ = MOD_REG | SUB | reg_map[SLJIT_SP]; + sljit_unaligned_store_s32(inst, local_size); + inst += sizeof(sljit_s32); + } } #ifdef _WIN64 @@ -230,7 +215,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit FAIL_IF(!inst); INC_SIZE(5); *inst++ = GROUP_0F; - *(sljit_s32*)inst = 0x20247429; + sljit_unaligned_store_s32(inst, 0x20247429); } #endif @@ -247,6 +232,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size)); set_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size); +#ifdef _WIN64 + /* Two/four register slots for parameters plus space for xmm6 register if needed. */ + if (fscratches >= 6 || fsaveds >= 1) + compiler->locals_offset = 6 * sizeof(sljit_sw); + else + compiler->locals_offset = ((scratches > 2) ? 4 : 2) * sizeof(sljit_sw); +#endif + /* Including the return address saved by the call instruction. */ saved_register_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds, 1); compiler->local_size = ((local_size + SLJIT_LOCALS_OFFSET + saved_register_size + 15) & ~15) - saved_register_size; @@ -261,7 +254,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit CHECK_ERROR(); CHECK(check_sljit_emit_return(compiler, op, src, srcw)); - compiler->flags_saved = 0; FAIL_IF(emit_mov_before_return(compiler, op, src, srcw)); #ifdef _WIN64 @@ -271,28 +263,29 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit FAIL_IF(!inst); INC_SIZE(5); *inst++ = GROUP_0F; - *(sljit_s32*)inst = 0x20247428; + sljit_unaligned_store_s32(inst, 0x20247428); } #endif - SLJIT_ASSERT(compiler->local_size > 0); - if (compiler->local_size <= 127) { - inst = (sljit_u8*)ensure_buf(compiler, 1 + 4); - FAIL_IF(!inst); - INC_SIZE(4); - *inst++ = REX_W; - *inst++ = GROUP_BINARY_83; - *inst++ = MOD_REG | ADD | 4; - *inst = compiler->local_size; - } - else { - inst = (sljit_u8*)ensure_buf(compiler, 1 + 7); - FAIL_IF(!inst); - INC_SIZE(7); - *inst++ = REX_W; - *inst++ = GROUP_BINARY_81; - *inst++ = MOD_REG | ADD | 4; - *(sljit_s32*)inst = compiler->local_size; + if (compiler->local_size > 0) { + if (compiler->local_size <= 127) { + inst = (sljit_u8*)ensure_buf(compiler, 1 + 4); + FAIL_IF(!inst); + INC_SIZE(4); + *inst++ = REX_W; + *inst++ = GROUP_BINARY_83; + *inst++ = MOD_REG | ADD | 4; + *inst = compiler->local_size; + } + else { + inst = (sljit_u8*)ensure_buf(compiler, 1 + 7); + FAIL_IF(!inst); + INC_SIZE(7); + *inst++ = REX_W; + *inst++ = GROUP_BINARY_81; + *inst++ = MOD_REG | ADD | 4; + sljit_unaligned_store_s32(inst, compiler->local_size); + } } tmp = compiler->scratches; @@ -339,7 +332,7 @@ static sljit_s32 emit_do_imm32(struct sl if (rex) *inst++ = rex; *inst++ = opcode; - *(sljit_s32*)inst = imm; + sljit_unaligned_store_s32(inst, imm); return SLJIT_SUCCESS; } @@ -387,13 +380,12 @@ static sljit_u8* emit_x86_instruction(st if (b & SLJIT_MEM) { if (!(b & OFFS_REG_MASK)) { if (NOT_HALFWORD(immb)) { - if (emit_load_imm64(compiler, TMP_REG3, immb)) - return NULL; + PTR_FAIL_IF(emit_load_imm64(compiler, TMP_REG2, immb)); immb = 0; if (b & REG_MASK) - b |= TO_OFFS_REG(TMP_REG3); + b |= TO_OFFS_REG(TMP_REG2); else - b |= TMP_REG3; + b |= TMP_REG2; } else if (reg_lmap[b & REG_MASK] == 4) b |= TO_OFFS_REG(SLJIT_SP); @@ -516,7 +508,7 @@ static sljit_u8* emit_x86_instruction(st if (immb <= 127 && immb >= -128) *buf_ptr++ = immb; /* 8 bit displacement. */ else { - *(sljit_s32*)buf_ptr = immb; /* 32 bit displacement. */ + sljit_unaligned_store_s32(buf_ptr, immb); /* 32 bit displacement. */ buf_ptr += sizeof(sljit_s32); } } @@ -533,7 +525,7 @@ static sljit_u8* emit_x86_instruction(st else { *buf_ptr++ |= 0x04; *buf_ptr++ = 0x25; - *(sljit_s32*)buf_ptr = immb; /* 32 bit displacement. */ + sljit_unaligned_store_s32(buf_ptr, immb); /* 32 bit displacement. */ buf_ptr += sizeof(sljit_s32); } @@ -541,9 +533,9 @@ static sljit_u8* emit_x86_instruction(st if (flags & EX86_BYTE_ARG) *buf_ptr = imma; else if (flags & EX86_HALF_ARG) - *(short*)buf_ptr = imma; + sljit_unaligned_store_s16(buf_ptr, imma); else if (!(flags & EX86_SHIFT_INS)) - *(sljit_s32*)buf_ptr = imma; + sljit_unaligned_store_s32(buf_ptr, imma); } return !(flags & EX86_SHIFT_INS) ? inst : (inst + 1); @@ -553,17 +545,19 @@ static sljit_u8* emit_x86_instruction(st /* Call / return instructions */ /* --------------------------------------------------------------------- */ -static SLJIT_INLINE sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 type) +static sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 type) { sljit_u8 *inst; + /* After any change update IS_REG_CHANGED_BY_CALL as well. */ #ifndef _WIN64 - SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R1] == 6 && reg_map[SLJIT_R0] < 8 && reg_map[SLJIT_R2] < 8, args_registers); + SLJIT_ASSERT(reg_map[SLJIT_R1] == 6 && reg_map[SLJIT_R0] < 8 && reg_map[SLJIT_R2] < 8 && reg_map[TMP_REG1] == 2); inst = (sljit_u8*)ensure_buf(compiler, 1 + ((type < SLJIT_CALL3) ? 3 : 6)); FAIL_IF(!inst); INC_SIZE((type < SLJIT_CALL3) ? 3 : 6); if (type >= SLJIT_CALL3) { + /* Move third argument to TMP_REG1. */ *inst++ = REX_W; *inst++ = MOV_r_rm; *inst++ = MOD_REG | (0x2 /* rdx */ << 3) | reg_lmap[SLJIT_R2]; @@ -572,12 +566,13 @@ static SLJIT_INLINE sljit_s32 call_with_ *inst++ = MOV_r_rm; *inst++ = MOD_REG | (0x7 /* rdi */ << 3) | reg_lmap[SLJIT_R0]; #else - SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R1] == 2 && reg_map[SLJIT_R0] < 8 && reg_map[SLJIT_R2] < 8, args_registers); + SLJIT_ASSERT(reg_map[SLJIT_R1] == 2 && reg_map[SLJIT_R0] < 8 && reg_map[SLJIT_R2] < 8 && reg_map[TMP_REG1] == 8); inst = (sljit_u8*)ensure_buf(compiler, 1 + ((type < SLJIT_CALL3) ? 3 : 6)); FAIL_IF(!inst); INC_SIZE((type < SLJIT_CALL3) ? 3 : 6); if (type >= SLJIT_CALL3) { + /* Move third argument to TMP_REG1. */ *inst++ = REX_W | REX_R; *inst++ = MOV_r_rm; *inst++ = MOD_REG | (0x0 /* r8 */ << 3) | reg_lmap[SLJIT_R2]; @@ -676,7 +671,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit INC_SIZE(5 + 1); *inst++ = PUSH_i32; - *(sljit_s32*)inst = srcw; + sljit_unaligned_store_s32(inst, srcw); inst += sizeof(sljit_s32); } --------------------------------------------------------------------- To unsubscribe, e-mail: dev-unsubscr...@tomcat.apache.org For additional commands, e-mail: dev-h...@tomcat.apache.org