---
 .../cpu/aarch64/aarch64-context-validate.S    |  40 +++---
 .../cpu/aarch64/aarch64-exception-default.S   |  38 +++---
 .../cpu/aarch64/aarch64-exception-interrupt.S | 116 +++++++++---------
 cpukit/score/cpu/aarch64/cpu_asm.S            |  12 +-
 .../cpu/aarch64/include/rtems/score/cpu.h     |  26 ++--
 5 files changed, 116 insertions(+), 116 deletions(-)

diff --git a/cpukit/score/cpu/aarch64/aarch64-context-validate.S 
b/cpukit/score/cpu/aarch64/aarch64-context-validate.S
index 31c8d5571c..57f634934b 100644
--- a/cpukit/score/cpu/aarch64/aarch64-context-validate.S
+++ b/cpukit/score/cpu/aarch64/aarch64-context-validate.S
@@ -43,29 +43,29 @@
 #include <rtems/asm.h>
 #include <rtems/score/cpu.h>
 
-#define FRAME_OFFSET_X4 0
-#define FRAME_OFFSET_X5 8
-#define FRAME_OFFSET_X6 16
-#define FRAME_OFFSET_X7 24
-#define FRAME_OFFSET_X8 32
-#define FRAME_OFFSET_X9 40
-#define FRAME_OFFSET_X10 48
-#define FRAME_OFFSET_X11 56
-#define FRAME_OFFSET_LR 64
+#define FRAME_OFFSET_X4  0x00
+#define FRAME_OFFSET_X5  0x08
+#define FRAME_OFFSET_X6  0x10
+#define FRAME_OFFSET_X7  0x18
+#define FRAME_OFFSET_X8  0x20
+#define FRAME_OFFSET_X9  0x28
+#define FRAME_OFFSET_X10 0x30
+#define FRAME_OFFSET_X11 0x38
+#define FRAME_OFFSET_LR  0x40
 
 #ifdef AARCH64_MULTILIB_VFP
-  #define FRAME_OFFSET_V8 72
-  #define FRAME_OFFSET_V9 88
-  #define FRAME_OFFSET_V10 104
-  #define FRAME_OFFSET_V11 120
-  #define FRAME_OFFSET_V12 136
-  #define FRAME_OFFSET_V13 152
-  #define FRAME_OFFSET_V14 168
-  #define FRAME_OFFSET_V15 184
-
-  #define FRAME_SIZE (FRAME_OFFSET_V15 + 16)
+  #define FRAME_OFFSET_V8  0x48
+  #define FRAME_OFFSET_V9  0x58
+  #define FRAME_OFFSET_V10 0x68
+  #define FRAME_OFFSET_V11 0x78
+  #define FRAME_OFFSET_V12 0x88
+  #define FRAME_OFFSET_V13 0x98
+  #define FRAME_OFFSET_V14 0xA8
+  #define FRAME_OFFSET_V15 0xB8
+
+  #define FRAME_SIZE (FRAME_OFFSET_V15 + 0x10)
 #else
-  #define FRAME_SIZE (FRAME_OFFSET_LR + 8)
+  #define FRAME_SIZE (FRAME_OFFSET_LR + 0x08)
 #endif
 
        .section        .text
diff --git a/cpukit/score/cpu/aarch64/aarch64-exception-default.S 
b/cpukit/score/cpu/aarch64/aarch64-exception-default.S
index 3d311df280..81aa82558e 100644
--- a/cpukit/score/cpu/aarch64/aarch64-exception-default.S
+++ b/cpukit/score/cpu/aarch64/aarch64-exception-default.S
@@ -74,7 +74,7 @@
  */
        blr x0
 /* Pop x0,lr from stack */
-       ldp x0, lr,     [sp],   #16
+       ldp x0, lr,     [sp],   #0x10
 /* Return from exception */
        eret
        nop
@@ -129,7 +129,7 @@ Vector_table_el3:
  * using SP0.
  */
 curr_el_sp0_sync:
-       stp x0, lr,     [sp, #-16]!     /* Push x0,lr on to the stack */
+       stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
        bl curr_el_sp0_sync_get_pc      /* Get current execution address */
 curr_el_sp0_sync_get_pc:               /* The current PC is now in LR */
        JUMP_HANDLER
@@ -137,7 +137,7 @@ curr_el_sp0_sync_get_pc:            /* The current PC is 
now in LR */
 .balign 0x80
 /* The exception handler for IRQ exceptions from the current EL using SP0. */
 curr_el_sp0_irq:
-       stp x0, lr,     [sp, #-16]!     /* Push x0,lr on to the stack */
+       stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
        bl curr_el_sp0_irq_get_pc       /* Get current execution address */
 curr_el_sp0_irq_get_pc:                        /* The current PC is now in LR 
*/
        JUMP_HANDLER
@@ -145,7 +145,7 @@ curr_el_sp0_irq_get_pc:                     /* The current 
PC is now in LR */
 .balign 0x80
 /* The exception handler for FIQ exceptions from the current EL using SP0. */
 curr_el_sp0_fiq:
-       stp x0, lr,     [sp, #-16]!     /* Push x0,lr on to the stack */
+       stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
        bl curr_el_sp0_fiq_get_pc       /* Get current execution address */
 curr_el_sp0_fiq_get_pc:                        /* The current PC is now in LR 
*/
        JUMP_HANDLER
@@ -156,7 +156,7 @@ curr_el_sp0_fiq_get_pc:                     /* The current 
PC is now in LR */
  * SP0.
  */
 curr_el_sp0_serror:
-       stp x0, lr,     [sp, #-16]!     /* Push x0,lr on to the stack */
+       stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
        bl curr_el_sp0_serror_get_pc    /* Get current execution address */
 curr_el_sp0_serror_get_pc:             /* The current PC is now in LR */
        JUMP_HANDLER
@@ -167,7 +167,7 @@ curr_el_sp0_serror_get_pc:          /* The current PC is 
now in LR */
  * the current SP.
  */
 curr_el_spx_sync:
-       stp x0, lr,     [sp, #-16]!     /* Push x0,lr on to the stack */
+       stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
        bl curr_el_spx_sync_get_pc      /* Get current execution address */
 curr_el_spx_sync_get_pc:               /* The current PC is now in LR */
        JUMP_HANDLER
@@ -178,7 +178,7 @@ curr_el_spx_sync_get_pc:            /* The current PC is 
now in LR */
  * current SP.
  */
 curr_el_spx_irq:
-       stp x0, lr,     [sp, #-16]!     /* Push x0,lr on to the stack */
+       stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
        bl curr_el_spx_irq_get_pc       /* Get current execution address */
 curr_el_spx_irq_get_pc:                        /* The current PC is now in LR 
*/
        JUMP_HANDLER
@@ -189,7 +189,7 @@ curr_el_spx_irq_get_pc:                     /* The current 
PC is now in LR */
  * current SP.
  */
 curr_el_spx_fiq:
-       stp x0, lr,     [sp, #-16]!     /* Push x0,lr on to the stack */
+       stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
        bl curr_el_spx_fiq_get_pc       /* Get current execution address */
 curr_el_spx_fiq_get_pc:                        /* The current PC is now in LR 
*/
        JUMP_HANDLER
@@ -200,7 +200,7 @@ curr_el_spx_fiq_get_pc:                     /* The current 
PC is now in LR */
  * the current SP.
  */
 curr_el_spx_serror:
-       stp x0, lr,     [sp, #-16]!     /* Push x0,lr on to the stack */
+       stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
        bl curr_el_spx_serror_get_pc    /* Get current execution address */
 curr_el_spx_serror_get_pc:             /* The current PC is now in LR */
        JUMP_HANDLER
@@ -210,7 +210,7 @@ curr_el_spx_serror_get_pc:          /* The current PC is 
now in LR */
  * The exception handler for synchronous exceptions from a lower EL (AArch64).
  */
 lower_el_aarch64_sync:
-       stp x0, lr,     [sp, #-16]!     /* Push x0,lr on to the stack */
+       stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
        bl lower_el_aarch64_sync_get_pc /* Get current execution address */
 lower_el_aarch64_sync_get_pc:          /* The current PC is now in LR */
        JUMP_HANDLER
@@ -218,7 +218,7 @@ lower_el_aarch64_sync_get_pc:               /* The current 
PC is now in LR */
 .balign 0x80
 /* The exception handler for IRQ exceptions from a lower EL (AArch64). */
 lower_el_aarch64_irq:
-       stp x0, lr,     [sp, #-16]!     /* Push x0,lr on to the stack */
+       stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
        bl lower_el_aarch64_irq_get_pc  /* Get current execution address */
 lower_el_aarch64_irq_get_pc:           /* The current PC is now in LR */
        JUMP_HANDLER
@@ -226,7 +226,7 @@ lower_el_aarch64_irq_get_pc:                /* The current 
PC is now in LR */
 .balign 0x80
 /* The exception handler for FIQ exceptions from a lower EL (AArch64). */
 lower_el_aarch64_fiq:
-       stp x0, lr,     [sp, #-16]!     /* Push x0,lr on to the stack */
+       stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
        bl lower_el_aarch64_fiq_get_pc  /* Get current execution address */
 lower_el_aarch64_fiq_get_pc:           /* The current PC is now in LR */
        JUMP_HANDLER
@@ -237,7 +237,7 @@ lower_el_aarch64_fiq_get_pc:                /* The current 
PC is now in LR */
  */
 lower_el_aarch64_serror:
 /* Push x0,lr on to the stack */
-       stp x0, lr,     [sp, #-16]!
+       stp x0, lr,     [sp, #-0x10]!
 /* Get current execution address */
        bl lower_el_aarch64_serror_get_pc
 lower_el_aarch64_serror_get_pc:                /* The current PC is now in LR 
*/
@@ -248,7 +248,7 @@ lower_el_aarch64_serror_get_pc:             /* The current 
PC is now in LR */
  * The exception handler for the synchronous exception from a lower 
EL(AArch32).
  */
 lower_el_aarch32_sync:
-       stp x0, lr,     [sp, #-16]!     /* Push x0,lr on to the stack */
+       stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
        bl lower_el_aarch32_sync_get_pc /* Get current execution address */
 lower_el_aarch32_sync_get_pc:          /* The current PC is now in LR */
        JUMP_HANDLER
@@ -256,7 +256,7 @@ lower_el_aarch32_sync_get_pc:               /* The current 
PC is now in LR */
 .balign 0x80
 /* The exception handler for the IRQ exception from a lower EL (AArch32). */
 lower_el_aarch32_irq:
-       stp x0, lr,     [sp, #-16]!     /* Push x0,lr on to the stack */
+       stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
        bl lower_el_aarch32_irq_get_pc  /* Get current execution address */
 lower_el_aarch32_irq_get_pc:           /* The current PC is now in LR */
        JUMP_HANDLER
@@ -264,7 +264,7 @@ lower_el_aarch32_irq_get_pc:                /* The current 
PC is now in LR */
 .balign 0x80
 /* The exception handler for the FIQ exception from a lower EL (AArch32). */
 lower_el_aarch32_fiq:
-       stp x0, lr,     [sp, #-16]!     /* Push x0,lr on to the stack */
+       stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
        bl lower_el_aarch32_fiq_get_pc  /* Get current execution address */
 lower_el_aarch32_fiq_get_pc:           /* The current PC is now in LR */
        JUMP_HANDLER
@@ -276,7 +276,7 @@ lower_el_aarch32_fiq_get_pc:                /* The current 
PC is now in LR */
  */
 lower_el_aarch32_serror:
 /* Push x0,lr on to the stack */
-       stp x0, lr,     [sp, #-16]!
+       stp x0, lr,     [sp, #-0x10]!
 /* Get current execution address */
        bl lower_el_aarch32_serror_get_pc
 lower_el_aarch32_serror_get_pc :               /* The current PC is now in LR 
*/
@@ -311,7 +311,7 @@ bsp_start_vector_table_end:
  * safe because this will never return
  */
        msr spsel, #1
-       ldp x0, lr, [sp], #16
+       ldp x0, lr, [sp], #0x10
        msr spsel, #0
 /* Save LR */
        str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]
@@ -349,7 +349,7 @@ bsp_start_vector_table_end:
 /* Push the start of the context */
        bl .push_exception_context_start
 /* Save original sp in x0 for .push_exception_context_finish */
-       add x0, sp, #(AARCH64_EXCEPTION_FRAME_SIZE + 16)
+       add x0, sp, #(AARCH64_EXCEPTION_FRAME_SIZE + 0x10)
 /* Push the remainder of the context */
        bl .push_exception_context_finish
 /* Save sp (exception frame) into x0 for handler */
diff --git a/cpukit/score/cpu/aarch64/aarch64-exception-interrupt.S 
b/cpukit/score/cpu/aarch64/aarch64-exception-interrupt.S
index fc04af6987..f534a526b3 100644
--- a/cpukit/score/cpu/aarch64/aarch64-exception-interrupt.S
+++ b/cpukit/score/cpu/aarch64/aarch64-exception-interrupt.S
@@ -161,90 +161,90 @@
  * Push x1-x21 on to the stack, need 19-21 because they're modified without
  * obeying PCS
  */
-       stp lr,         x1,     [sp, #-16]!
-       stp x2,         x3,     [sp, #-16]!
-       stp x4,         x5,     [sp, #-16]!
-       stp x6,         x7,     [sp, #-16]!
-       stp x8,         x9,     [sp, #-16]!
-       stp x10,        x11,    [sp, #-16]!
-       stp x12,        x13,    [sp, #-16]!
-       stp x14,        x15,    [sp, #-16]!
-       stp x16,        x17,    [sp, #-16]!
-       stp x18,        x19,    [sp, #-16]!
-       stp x20,        x21,    [sp, #-16]!
+       stp lr,         x1,     [sp, #-0x10]!
+       stp x2,         x3,     [sp, #-0x10]!
+       stp x4,         x5,     [sp, #-0x10]!
+       stp x6,         x7,     [sp, #-0x10]!
+       stp x8,         x9,     [sp, #-0x10]!
+       stp x10,        x11,    [sp, #-0x10]!
+       stp x12,        x13,    [sp, #-0x10]!
+       stp x14,        x15,    [sp, #-0x10]!
+       stp x16,        x17,    [sp, #-0x10]!
+       stp x18,        x19,    [sp, #-0x10]!
+       stp x20,        x21,    [sp, #-0x10]!
 /*
  * Push q0-q31 on to the stack, need everything because parts of every register
  * are volatile/corruptible
  */
-       stp q0,         q1,     [sp, #-32]!
-       stp q2,         q3,     [sp, #-32]!
-       stp q4,         q5,     [sp, #-32]!
-       stp q6,         q7,     [sp, #-32]!
-       stp q8,         q9,     [sp, #-32]!
-       stp q10,        q11,    [sp, #-32]!
-       stp q12,        q13,    [sp, #-32]!
-       stp q14,        q15,    [sp, #-32]!
-       stp q16,        q17,    [sp, #-32]!
-       stp q18,        q19,    [sp, #-32]!
-       stp q20,        q21,    [sp, #-32]!
-       stp q22,        q23,    [sp, #-32]!
-       stp q24,        q25,    [sp, #-32]!
-       stp q26,        q27,    [sp, #-32]!
-       stp q28,        q29,    [sp, #-32]!
-       stp q30,        q31,    [sp, #-32]!
+       stp q0,         q1,     [sp, #-0x20]!
+       stp q2,         q3,     [sp, #-0x20]!
+       stp q4,         q5,     [sp, #-0x20]!
+       stp q6,         q7,     [sp, #-0x20]!
+       stp q8,         q9,     [sp, #-0x20]!
+       stp q10,        q11,    [sp, #-0x20]!
+       stp q12,        q13,    [sp, #-0x20]!
+       stp q14,        q15,    [sp, #-0x20]!
+       stp q16,        q17,    [sp, #-0x20]!
+       stp q18,        q19,    [sp, #-0x20]!
+       stp q20,        q21,    [sp, #-0x20]!
+       stp q22,        q23,    [sp, #-0x20]!
+       stp q24,        q25,    [sp, #-0x20]!
+       stp q26,        q27,    [sp, #-0x20]!
+       stp q28,        q29,    [sp, #-0x20]!
+       stp q30,        q31,    [sp, #-0x20]!
 /* Get exception LR for PC and spsr */
        mrs x0, ELR_EL1
        mrs x1, SPSR_EL1
 /* Push pc and spsr */
-       stp x0,         x1,     [sp, #-16]!
+       stp x0,         x1,     [sp, #-0x10]!
 /* Get fpcr and fpsr */
        mrs x0, FPSR
        mrs x1, FPCR
 /* Push fpcr and fpsr */
-       stp x0,         x1,     [sp, #-16]!
+       stp x0,         x1,     [sp, #-0x10]!
 .endm
 
 /* Must match inverse order of .push_interrupt_context */
 .macro pop_interrupt_context
 /* Pop fpcr and fpsr */
-       ldp x0,         x1,     [sp], #16
+       ldp x0,         x1,     [sp], #0x10
 /* Restore fpcr and fpsr */
        msr FPCR, x1
        msr FPSR, x0
 /* Pop pc and spsr */
-       ldp x0,         x1,     [sp], #16
+       ldp x0,         x1,     [sp], #0x10
 /* Restore exception LR for PC and spsr */
        msr SPSR_EL1, x1
        msr ELR_EL1, x0
 /* Pop q0-q31 */
-       ldp q30,        q31,    [sp], #32
-       ldp q28,        q29,    [sp], #32
-       ldp q26,        q27,    [sp], #32
-       ldp q24,        q25,    [sp], #32
-       ldp q22,        q23,    [sp], #32
-       ldp q20,        q21,    [sp], #32
-       ldp q18,        q19,    [sp], #32
-       ldp q16,        q17,    [sp], #32
-       ldp q14,        q15,    [sp], #32
-       ldp q12,        q13,    [sp], #32
-       ldp q10,        q11,    [sp], #32
-       ldp q8,         q9,     [sp], #32
-       ldp q6,         q7,     [sp], #32
-       ldp q4,         q5,     [sp], #32
-       ldp q2,         q3,     [sp], #32
-       ldp q0,         q1,     [sp], #32
+       ldp q30,        q31,    [sp], #0x20
+       ldp q28,        q29,    [sp], #0x20
+       ldp q26,        q27,    [sp], #0x20
+       ldp q24,        q25,    [sp], #0x20
+       ldp q22,        q23,    [sp], #0x20
+       ldp q20,        q21,    [sp], #0x20
+       ldp q18,        q19,    [sp], #0x20
+       ldp q16,        q17,    [sp], #0x20
+       ldp q14,        q15,    [sp], #0x20
+       ldp q12,        q13,    [sp], #0x20
+       ldp q10,        q11,    [sp], #0x20
+       ldp q8,         q9,     [sp], #0x20
+       ldp q6,         q7,     [sp], #0x20
+       ldp q4,         q5,     [sp], #0x20
+       ldp q2,         q3,     [sp], #0x20
+       ldp q0,         q1,     [sp], #0x20
 /* Pop x1-x21 */
-       ldp x20,        x21,    [sp], #16
-       ldp x18,        x19,    [sp], #16
-       ldp x16,        x17,    [sp], #16
-       ldp x14,        x15,    [sp], #16
-       ldp x12,        x13,    [sp], #16
-       ldp x10,        x11,    [sp], #16
-       ldp x8,         x9,     [sp], #16
-       ldp x6,         x7,     [sp], #16
-       ldp x4,         x5,     [sp], #16
-       ldp x2,         x3,     [sp], #16
-       ldp lr,         x1,     [sp], #16
+       ldp x20,        x21,    [sp], #0x10
+       ldp x18,        x19,    [sp], #0x10
+       ldp x16,        x17,    [sp], #0x10
+       ldp x14,        x15,    [sp], #0x10
+       ldp x12,        x13,    [sp], #0x10
+       ldp x10,        x11,    [sp], #0x10
+       ldp x8,         x9,     [sp], #0x10
+       ldp x6,         x7,     [sp], #0x10
+       ldp x4,         x5,     [sp], #0x10
+       ldp x2,         x3,     [sp], #0x10
+       ldp lr,         x1,     [sp], #0x10
 /* Must clear reservations here to ensure consistency with atomic operations */
        clrex
 .endm
diff --git a/cpukit/score/cpu/aarch64/cpu_asm.S 
b/cpukit/score/cpu/aarch64/cpu_asm.S
index 0e1b803610..6c4da04628 100644
--- a/cpukit/score/cpu/aarch64/cpu_asm.S
+++ b/cpukit/score/cpu/aarch64/cpu_asm.S
@@ -78,9 +78,9 @@ DEFINE_FUNCTION_AARCH64(_CPU_Context_switch)
 #ifdef AARCH64_MULTILIB_VFP
        add     x5, x0, #AARCH64_CONTEXT_CONTROL_D8_OFFSET
        stp d8,  d9,  [x5]
-       stp d10, d11, [x5, #16]
-       stp d12, d13, [x5, #32]
-       stp d14, d15, [x5, #48]
+       stp d10, d11, [x5, #0x10]
+       stp d12, d13, [x5, #0x20]
+       stp d14, d15, [x5, #0x30]
 #endif
 
        str     x3, [x0, #AARCH64_CONTEXT_CONTROL_ISR_DISPATCH_DISABLE]
@@ -102,9 +102,9 @@ DEFINE_FUNCTION_AARCH64(_CPU_Context_switch)
 #ifdef AARCH64_MULTILIB_VFP
        add     x5, x1, #AARCH64_CONTEXT_CONTROL_D8_OFFSET
        ldp d8,  d9,  [x5]
-       ldp d10, d11, [x5, #16]
-       ldp d12, d13, [x5, #32]
-       ldp d14, d15, [x5, #48]
+       ldp d10, d11, [x5, #0x10]
+       ldp d12, d13, [x5, #0x20]
+       ldp d14, d15, [x5, #0x30]
 #endif
 
        msr     TPIDR_EL0, x3
diff --git a/cpukit/score/cpu/aarch64/include/rtems/score/cpu.h 
b/cpukit/score/cpu/aarch64/include/rtems/score/cpu.h
index d86543b12a..380d1380fb 100644
--- a/cpukit/score/cpu/aarch64/include/rtems/score/cpu.h
+++ b/cpukit/score/cpu/aarch64/include/rtems/score/cpu.h
@@ -124,31 +124,31 @@
 
 #define CPU_MAXIMUM_PROCESSORS 32
 
-#define AARCH64_CONTEXT_CONTROL_THREAD_ID_OFFSET 112
+#define AARCH64_CONTEXT_CONTROL_THREAD_ID_OFFSET 0x70
 
 #ifdef AARCH64_MULTILIB_VFP
-  #define AARCH64_CONTEXT_CONTROL_D8_OFFSET 120
+  #define AARCH64_CONTEXT_CONTROL_D8_OFFSET 0x78
 #endif
 
-#define AARCH64_CONTEXT_CONTROL_ISR_DISPATCH_DISABLE 104
+#define AARCH64_CONTEXT_CONTROL_ISR_DISPATCH_DISABLE 0x68
 
 #ifdef RTEMS_SMP
   #if defined(AARCH64_MULTILIB_VFP)
-    #define AARCH64_CONTEXT_CONTROL_IS_EXECUTING_OFFSET 112
+    #define AARCH64_CONTEXT_CONTROL_IS_EXECUTING_OFFSET 0x70
   #else
-    #define AARCH64_CONTEXT_CONTROL_IS_EXECUTING_OFFSET 48
+    #define AARCH64_CONTEXT_CONTROL_IS_EXECUTING_OFFSET 0x30
   #endif
 #endif
 
-#define AARCH64_EXCEPTION_FRAME_SIZE 848
+#define AARCH64_EXCEPTION_FRAME_SIZE 0x350
 
-#define AARCH64_EXCEPTION_FRAME_REGISTER_SP_OFFSET 248
-#define AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET 240
-#define AARCH64_EXCEPTION_FRAME_REGISTER_DAIF_OFFSET 264
-#define AARCH64_EXCEPTION_FRAME_REGISTER_SYNDROME_OFFSET 280
-#define AARCH64_EXCEPTION_FRAME_REGISTER_VECTOR_OFFSET 296
-#define AARCH64_EXCEPTION_FRAME_REGISTER_FPSR_OFFSET 312
-#define AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET 336
+#define AARCH64_EXCEPTION_FRAME_REGISTER_SP_OFFSET 0xF8
+#define AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET 0xF0
+#define AARCH64_EXCEPTION_FRAME_REGISTER_DAIF_OFFSET 0x108
+#define AARCH64_EXCEPTION_FRAME_REGISTER_SYNDROME_OFFSET 0x118
+#define AARCH64_EXCEPTION_FRAME_REGISTER_VECTOR_OFFSET 0x128
+#define AARCH64_EXCEPTION_FRAME_REGISTER_FPSR_OFFSET 0x138
+#define AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET 0x150
 
 #ifndef ASM
 
-- 
2.20.1

_______________________________________________
devel mailing list
devel@rtems.org
http://lists.rtems.org/mailman/listinfo/devel

Reply via email to