diff --git a/gcc/config/aarch64/aarch64.c b/gcc/config/aarch64/aarch64.c
index aed035a..f42cb75 100644
--- a/gcc/config/aarch64/aarch64.c
+++ b/gcc/config/aarch64/aarch64.c
@@ -109,6 +109,7 @@ enum aarch64_code_model aarch64_cmodel;
 #define TARGET_HAVE_TLS 1
 #endif
 
+static bool aarch64_lra_p (void);
 static bool aarch64_composite_type_p (const_tree, enum machine_mode);
 static bool aarch64_vfp_is_call_or_return_candidate (enum machine_mode,
 						     const_tree,
@@ -6083,6 +6084,13 @@ aapcs_vfp_sub_candidate (const_tree type, enum machine_mode *modep)
   return -1;
 }
 
+/* Return true if we use LRA instead of reload pass.  */
+static bool
+aarch64_lra_p (void)
+{
+  return aarch64_lra_flag;
+}
+
 /* Return TRUE if the type, as described by TYPE and MODE, is a composite
    type as described in AAPCS64 \S 4.3.  This includes aggregate, union and
    array types.  The C99 floating-point complex types are also considered
@@ -8208,6 +8216,9 @@ aarch64_vectorize_vec_perm_const_ok (enum machine_mode vmode,
 #undef TARGET_LIBGCC_CMP_RETURN_MODE
 #define TARGET_LIBGCC_CMP_RETURN_MODE aarch64_libgcc_cmp_return_mode
 
+#undef TARGET_LRA_P
+#define TARGET_LRA_P aarch64_lra_p
+
 #undef TARGET_MANGLE_TYPE
 #define TARGET_MANGLE_TYPE aarch64_mangle_type
 
diff --git a/gcc/config/aarch64/aarch64.opt b/gcc/config/aarch64/aarch64.opt
index 8ff6ca1..0c31312 100644
--- a/gcc/config/aarch64/aarch64.opt
+++ b/gcc/config/aarch64/aarch64.opt
@@ -103,6 +103,10 @@ mabi=
 Target RejectNegative Joined Enum(aarch64_abi) Var(aarch64_abi) Init(AARCH64_ABI_DEFAULT)
 -mabi=ABI	Generate code that conforms to the specified ABI
 
+mlra
+Target Report Var(aarch64_lra_flag) Init(1) Save
+Use LRA instead of reload
+
 Enum
 Name(aarch64_abi) Type(int)
 Known AArch64 ABIs (for use with the -mabi= option):
diff --git a/gcc/config/arm/arm.c b/gcc/config/arm/arm.c
index f731bb6..182b009 100644
--- a/gcc/config/arm/arm.c
+++ b/gcc/config/arm/arm.c
@@ -68,6 +68,7 @@ struct four_ints
 };
 
 /* Forward function declarations.  */
+static bool arm_lra_p (void);
 static bool arm_needs_doubleword_align (enum machine_mode, const_tree);
 static int arm_compute_static_chain_stack_bytes (void);
 static arm_stack_offsets *arm_get_frame_offsets (void);
@@ -338,6 +339,9 @@ static const struct attribute_spec arm_attribute_table[] =
 #undef TARGET_LEGITIMIZE_ADDRESS
 #define TARGET_LEGITIMIZE_ADDRESS arm_legitimize_address
 
+#undef TARGET_LRA_P
+#define TARGET_LRA_P arm_lra_p
+
 #undef  TARGET_ATTRIBUTE_TABLE
 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
 
@@ -4971,6 +4975,12 @@ arm_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
     }
 }
 
+/* Return true if we use LRA instead of reload pass.  */
+static bool
+arm_lra_p (void)
+{
+  return arm_lra_flag;
+}
 
 /* Return true if mode/type need doubleword alignment.  */
 static bool
diff --git a/gcc/config/arm/arm.h b/gcc/config/arm/arm.h
index 1781b75..05a271e 100644
--- a/gcc/config/arm/arm.h
+++ b/gcc/config/arm/arm.h
@@ -1266,11 +1266,12 @@ enum reg_class
 
 /* Must leave BASE_REGS reloads alone */
 #define THUMB_SECONDARY_INPUT_RELOAD_CLASS(CLASS, MODE, X)		\
+  (lra_in_progress ? NO_REGS : 						\
   ((CLASS) != LO_REGS && (CLASS) != BASE_REGS				\
    ? ((true_regnum (X) == -1 ? LO_REGS					\
        : (true_regnum (X) + HARD_REGNO_NREGS (0, MODE) > 8) ? LO_REGS	\
        : NO_REGS)) 							\
-   : NO_REGS)
+   : NO_REGS))
 
 #define THUMB_SECONDARY_OUTPUT_RELOAD_CLASS(CLASS, MODE, X)		\
   ((CLASS) != LO_REGS && (CLASS) != BASE_REGS				\
diff --git a/gcc/config/arm/arm.opt b/gcc/config/arm/arm.opt
index b9ae2b0..7c9ea36 100644
--- a/gcc/config/arm/arm.opt
+++ b/gcc/config/arm/arm.opt
@@ -109,6 +109,10 @@ mfloat-abi=
 Target RejectNegative Joined Enum(float_abi_type) Var(arm_float_abi) Init(TARGET_DEFAULT_FLOAT_ABI)
 Specify if floating point hardware should be used
 
+mlra
+Target Report Var(arm_lra_flag) Init(1) Save
+Use LRA instead of reload
+
 Enum
 Name(float_abi_type) Type(enum float_abi_type)
 Known floating-point ABIs (for use with the -mfloat-abi= option):
diff --git a/gcc/rtlanal.c b/gcc/rtlanal.c
index 95a314f..0959ff5 100644
--- a/gcc/rtlanal.c
+++ b/gcc/rtlanal.c
@@ -5437,12 +5437,29 @@ split_double (rtx value, rtx *first, rtx *second)
     }
 }
 
+/* Return true if X is a sign_extract or zero_extract from the least
+   significant bit.  */
+
+static bool
+lsb_bitfield_op_p (rtx x)
+{
+  if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS)
+    {
+      enum machine_mode mode = GET_MODE(x);
+      unsigned HOST_WIDE_INT len = INTVAL (XEXP (x, 1));
+      HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
+
+      return (pos == (BITS_BIG_ENDIAN ? GET_MODE_PRECISION (mode) - len : 0));
+    }
+  return 0;
+}
+
 /* Strip outer address "mutations" from LOC and return a pointer to the
    inner value.  If OUTER_CODE is nonnull, store the code of the innermost
    stripped expression there.
 
    "Mutations" either convert between modes or apply some kind of
-   alignment.  */
+   extension, truncation or alignment.  */
 
 rtx *
 strip_address_mutations (rtx *loc, enum rtx_code *outer_code)
@@ -5454,6 +5471,10 @@ strip_address_mutations (rtx *loc, enum rtx_code *outer_code)
 	/* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be
 	   used to convert between pointer sizes.  */
 	loc = &XEXP (*loc, 0);
+      else if (lsb_bitfield_op_p (*loc))
+	/* Bitfield operations [SIGN|ZERO]_EXTRACT from the least significant
+	   bit can be used too.  */
+	loc = &XEXP (*loc, 0);
       else if (code == AND && CONST_INT_P (XEXP (*loc, 1)))
 	/* (and ... (const_int -X)) is used to align to X bytes.  */
 	loc = &XEXP (*loc, 0);
@@ -5470,6 +5491,18 @@ strip_address_mutations (rtx *loc, enum rtx_code *outer_code)
     }
 }
 
+/* Return true if X is a shifting operation.  */
+
+static bool
+shift_code_p (rtx x)
+{
+  return (GET_CODE (x) == ASHIFT
+          || GET_CODE (x) == ASHIFTRT
+          || GET_CODE (x) == LSHIFTRT
+          || GET_CODE (x) == ROTATE
+          || GET_CODE (x) == ROTATERT);
+}
+
 /* Return true if X must be a base rather than an index.  */
 
 static bool
@@ -5483,7 +5516,7 @@ must_be_base_p (rtx x)
 static bool
 must_be_index_p (rtx x)
 {
-  return GET_CODE (x) == MULT || GET_CODE (x) == ASHIFT;
+  return GET_CODE (x) == MULT || shift_code_p (x);
 }
 
 /* Set the segment part of address INFO to LOC, given that INNER is the
@@ -5522,7 +5555,7 @@ set_address_base (struct address_info *info, rtx *loc, rtx *inner)
 static void
 set_address_index (struct address_info *info, rtx *loc, rtx *inner)
 {
-  if ((GET_CODE (*inner) == MULT || GET_CODE (*inner) == ASHIFT)
+  if ((GET_CODE (*inner) == MULT || shift_code_p (*inner))
       && CONSTANT_P (XEXP (*inner, 1)))
     inner = strip_address_mutations (&XEXP (*inner, 0));
   gcc_checking_assert (REG_P (*inner)
