aboutsummaryrefslogtreecommitdiffstats
path: root/roms/edk2/ArmPkg/Library/ArmLib
diff options
context:
space:
mode:
authorAngelos Mouzakitis <a.mouzakitis@virtualopensystems.com>2023-10-10 14:33:42 +0000
committerAngelos Mouzakitis <a.mouzakitis@virtualopensystems.com>2023-10-10 14:33:42 +0000
commitaf1a266670d040d2f4083ff309d732d648afba2a (patch)
tree2fc46203448ddcc6f81546d379abfaeb323575e9 /roms/edk2/ArmPkg/Library/ArmLib
parente02cda008591317b1625707ff8e115a4841aa889 (diff)
Add submodule dependency filesHEADmaster
Change-Id: Iaf8d18082d3991dec7c0ebbea540f092188eb4ec
Diffstat (limited to 'roms/edk2/ArmPkg/Library/ArmLib')
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64ArchTimerSupport.S113
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64Lib.c73
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64Lib.h39
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S489
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/AArch64/ArmLibSupport.S217
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/AArch64/ArmLibSupportV8.S106
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupport.S170
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupport.asm174
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupportV7.S89
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupportV7.asm93
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7ArchTimerSupport.S92
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7ArchTimerSupport.asm93
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Lib.c73
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Lib.h52
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Support.S301
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Support.asm292
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/ArmBaseLib.inf55
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/ArmLib.c99
-rw-r--r--roms/edk2/ArmPkg/Library/ArmLib/ArmLibPrivate.h74
19 files changed, 2694 insertions, 0 deletions
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64ArchTimerSupport.S b/roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64ArchTimerSupport.S
new file mode 100644
index 000000000..574e0d593
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64ArchTimerSupport.S
@@ -0,0 +1,113 @@
+#------------------------------------------------------------------------------
+#
+# Copyright (c) 2011 - 2013, ARM Limited. All rights reserved.
+# Copyright (c) 2016, Linaro Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+#------------------------------------------------------------------------------
+
+#include <AsmMacroIoLibV8.h>
+
+ASM_FUNC(ArmReadCntFrq)
+ mrs x0, cntfrq_el0 // Read CNTFRQ
+ ret
+
+
+# NOTE - Can only write while at highest implemented EL level (EL3 on model). Else ReadOnly (EL2, EL1, EL0)
+ASM_FUNC(ArmWriteCntFrq)
+ msr cntfrq_el0, x0 // Write to CNTFRQ
+ ret
+
+
+ASM_FUNC(ArmReadCntPct)
+ mrs x0, cntpct_el0 // Read CNTPCT (Physical counter register)
+ ret
+
+
+ASM_FUNC(ArmReadCntkCtl)
+ mrs x0, cntkctl_el1 // Read CNTK_CTL (Timer PL1 Control Register)
+ ret
+
+
+ASM_FUNC(ArmWriteCntkCtl)
+ msr cntkctl_el1, x0 // Write to CNTK_CTL (Timer PL1 Control Register)
+ ret
+
+
+ASM_FUNC(ArmReadCntpTval)
+ mrs x0, cntp_tval_el0 // Read CNTP_TVAL (PL1 physical timer value register)
+ ret
+
+
+ASM_FUNC(ArmWriteCntpTval)
+ msr cntp_tval_el0, x0 // Write to CNTP_TVAL (PL1 physical timer value register)
+ ret
+
+
+ASM_FUNC(ArmReadCntpCtl)
+ mrs x0, cntp_ctl_el0 // Read CNTP_CTL (PL1 Physical Timer Control Register)
+ ret
+
+
+ASM_FUNC(ArmWriteCntpCtl)
+ msr cntp_ctl_el0, x0 // Write to CNTP_CTL (PL1 Physical Timer Control Register)
+ ret
+
+
+ASM_FUNC(ArmReadCntvTval)
+ mrs x0, cntv_tval_el0 // Read CNTV_TVAL (Virtual Timer Value register)
+ ret
+
+
+ASM_FUNC(ArmWriteCntvTval)
+ msr cntv_tval_el0, x0 // Write to CNTV_TVAL (Virtual Timer Value register)
+ ret
+
+
+ASM_FUNC(ArmReadCntvCtl)
+ mrs x0, cntv_ctl_el0 // Read CNTV_CTL (Virtual Timer Control Register)
+ ret
+
+
+ASM_FUNC(ArmWriteCntvCtl)
+ msr cntv_ctl_el0, x0 // Write to CNTV_CTL (Virtual Timer Control Register)
+ ret
+
+
+ASM_FUNC(ArmReadCntvCt)
+ mrs x0, cntvct_el0 // Read CNTVCT (Virtual Count Register)
+ ret
+
+
+ASM_FUNC(ArmReadCntpCval)
+ mrs x0, cntp_cval_el0 // Read CNTP_CTVAL (Physical Timer Compare Value Register)
+ ret
+
+
+ASM_FUNC(ArmWriteCntpCval)
+ msr cntp_cval_el0, x0 // Write to CNTP_CTVAL (Physical Timer Compare Value Register)
+ ret
+
+
+ASM_FUNC(ArmReadCntvCval)
+ mrs x0, cntv_cval_el0 // Read CNTV_CTVAL (Virtual Timer Compare Value Register)
+ ret
+
+
+ASM_FUNC(ArmWriteCntvCval)
+ msr cntv_cval_el0, x0 // write to CNTV_CTVAL (Virtual Timer Compare Value Register)
+ ret
+
+
+ASM_FUNC(ArmReadCntvOff)
+ mrs x0, cntvoff_el2 // Read CNTVOFF (virtual Offset register)
+ ret
+
+
+ASM_FUNC(ArmWriteCntvOff)
+ msr cntvoff_el2, x0 // Write to CNTVOFF (Virtual Offset register)
+ ret
+
+
+ASM_FUNCTION_REMOVE_IF_UNREFERENCED
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64Lib.c b/roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64Lib.c
new file mode 100644
index 000000000..3fbd59119
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64Lib.c
@@ -0,0 +1,73 @@
+/** @file
+
+ Copyright (c) 2008 - 2009, Apple Inc. All rights reserved.<BR>
+ Portions copyright (c) 2011 - 2014, ARM Ltd. All rights reserved.<BR>
+
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+
+**/
+
+#include <Base.h>
+
+#include <Library/ArmLib.h>
+#include <Library/DebugLib.h>
+
+#include <Chipset/AArch64.h>
+
+#include "AArch64Lib.h"
+#include "ArmLibPrivate.h"
+
+VOID
+AArch64DataCacheOperation (
+ IN AARCH64_CACHE_OPERATION DataCacheOperation
+ )
+{
+ UINTN SavedInterruptState;
+
+ SavedInterruptState = ArmGetInterruptState ();
+ ArmDisableInterrupts();
+
+ AArch64AllDataCachesOperation (DataCacheOperation);
+
+ ArmDataSynchronizationBarrier ();
+
+ if (SavedInterruptState) {
+ ArmEnableInterrupts ();
+ }
+}
+
+VOID
+EFIAPI
+ArmInvalidateDataCache (
+ VOID
+ )
+{
+ ASSERT (!ArmMmuEnabled ());
+
+ ArmDataSynchronizationBarrier ();
+ AArch64DataCacheOperation (ArmInvalidateDataCacheEntryBySetWay);
+}
+
+VOID
+EFIAPI
+ArmCleanInvalidateDataCache (
+ VOID
+ )
+{
+ ASSERT (!ArmMmuEnabled ());
+
+ ArmDataSynchronizationBarrier ();
+ AArch64DataCacheOperation (ArmCleanInvalidateDataCacheEntryBySetWay);
+}
+
+VOID
+EFIAPI
+ArmCleanDataCache (
+ VOID
+ )
+{
+ ASSERT (!ArmMmuEnabled ());
+
+ ArmDataSynchronizationBarrier ();
+ AArch64DataCacheOperation (ArmCleanDataCacheEntryBySetWay);
+}
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64Lib.h b/roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64Lib.h
new file mode 100644
index 000000000..b2c8a8ea0
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64Lib.h
@@ -0,0 +1,39 @@
+/** @file
+
+ Copyright (c) 2008 - 2009, Apple Inc. All rights reserved.<BR>
+ Portions Copyright (c) 2011 - 2013, ARM Ltd. All rights reserved.<BR>
+
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+
+**/
+
+#ifndef __AARCH64_LIB_H__
+#define __AARCH64_LIB_H__
+
+typedef VOID (*AARCH64_CACHE_OPERATION)(UINTN);
+
+VOID
+AArch64AllDataCachesOperation (
+ IN AARCH64_CACHE_OPERATION DataCacheOperation
+ );
+
+VOID
+EFIAPI
+ArmInvalidateDataCacheEntryBySetWay (
+ IN UINTN SetWayFormat
+ );
+
+VOID
+EFIAPI
+ArmCleanDataCacheEntryBySetWay (
+ IN UINTN SetWayFormat
+ );
+
+VOID
+EFIAPI
+ArmCleanInvalidateDataCacheEntryBySetWay (
+ IN UINTN SetWayFormat
+ );
+
+#endif // __AARCH64_LIB_H__
+
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S b/roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S
new file mode 100644
index 000000000..199374ff5
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S
@@ -0,0 +1,489 @@
+#------------------------------------------------------------------------------
+#
+# Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>
+# Copyright (c) 2011 - 2017, ARM Limited. All rights reserved.
+# Copyright (c) 2016, Linaro Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+#------------------------------------------------------------------------------
+
+#include <Chipset/AArch64.h>
+#include <AsmMacroIoLibV8.h>
+
+.set CTRL_M_BIT, (1 << 0)
+.set CTRL_A_BIT, (1 << 1)
+.set CTRL_C_BIT, (1 << 2)
+.set CTRL_SA_BIT, (1 << 3)
+.set CTRL_I_BIT, (1 << 12)
+.set CTRL_V_BIT, (1 << 12)
+.set CPACR_VFP_BITS, (3 << 20)
+
+ASM_FUNC(ArmInvalidateDataCacheEntryByMVA)
+ dc ivac, x0 // Invalidate single data cache line
+ ret
+
+
+ASM_FUNC(ArmCleanDataCacheEntryByMVA)
+ dc cvac, x0 // Clean single data cache line
+ ret
+
+
+ASM_FUNC(ArmCleanDataCacheEntryToPoUByMVA)
+ dc cvau, x0 // Clean single data cache line to PoU
+ ret
+
+ASM_FUNC(ArmInvalidateInstructionCacheEntryToPoUByMVA)
+ ic ivau, x0 // Invalidate single instruction cache line to PoU
+ ret
+
+
+ASM_FUNC(ArmCleanInvalidateDataCacheEntryByMVA)
+ dc civac, x0 // Clean and invalidate single data cache line
+ ret
+
+
+ASM_FUNC(ArmInvalidateDataCacheEntryBySetWay)
+ dc isw, x0 // Invalidate this line
+ ret
+
+
+ASM_FUNC(ArmCleanInvalidateDataCacheEntryBySetWay)
+ dc cisw, x0 // Clean and Invalidate this line
+ ret
+
+
+ASM_FUNC(ArmCleanDataCacheEntryBySetWay)
+ dc csw, x0 // Clean this line
+ ret
+
+
+ASM_FUNC(ArmInvalidateInstructionCache)
+ ic iallu // Invalidate entire instruction cache
+ dsb sy
+ isb
+ ret
+
+
+ASM_FUNC(ArmEnableMmu)
+ EL1_OR_EL2_OR_EL3(x1)
+1: mrs x0, sctlr_el1 // Read System control register EL1
+ b 4f
+2: mrs x0, sctlr_el2 // Read System control register EL2
+ b 4f
+3: mrs x0, sctlr_el3 // Read System control register EL3
+4: orr x0, x0, #CTRL_M_BIT // Set MMU enable bit
+ EL1_OR_EL2_OR_EL3(x1)
+1: tlbi vmalle1
+ dsb nsh
+ isb
+ msr sctlr_el1, x0 // Write back
+ b 4f
+2: tlbi alle2
+ dsb nsh
+ isb
+ msr sctlr_el2, x0 // Write back
+ b 4f
+3: tlbi alle3
+ dsb nsh
+ isb
+ msr sctlr_el3, x0 // Write back
+4: isb
+ ret
+
+
+ASM_FUNC(ArmDisableMmu)
+ EL1_OR_EL2_OR_EL3(x1)
+1: mrs x0, sctlr_el1 // Read System Control Register EL1
+ b 4f
+2: mrs x0, sctlr_el2 // Read System Control Register EL2
+ b 4f
+3: mrs x0, sctlr_el3 // Read System Control Register EL3
+4: and x0, x0, #~CTRL_M_BIT // Clear MMU enable bit
+ EL1_OR_EL2_OR_EL3(x1)
+1: msr sctlr_el1, x0 // Write back
+ tlbi vmalle1
+ b 4f
+2: msr sctlr_el2, x0 // Write back
+ tlbi alle2
+ b 4f
+3: msr sctlr_el3, x0 // Write back
+ tlbi alle3
+4: dsb sy
+ isb
+ ret
+
+
+ASM_FUNC(ArmDisableCachesAndMmu)
+ EL1_OR_EL2_OR_EL3(x1)
+1: mrs x0, sctlr_el1 // Get control register EL1
+ b 4f
+2: mrs x0, sctlr_el2 // Get control register EL2
+ b 4f
+3: mrs x0, sctlr_el3 // Get control register EL3
+4: mov x1, #~(CTRL_M_BIT | CTRL_C_BIT | CTRL_I_BIT) // Disable MMU, D & I caches
+ and x0, x0, x1
+ EL1_OR_EL2_OR_EL3(x1)
+1: msr sctlr_el1, x0 // Write back control register
+ b 4f
+2: msr sctlr_el2, x0 // Write back control register
+ b 4f
+3: msr sctlr_el3, x0 // Write back control register
+4: dsb sy
+ isb
+ ret
+
+
+ASM_FUNC(ArmMmuEnabled)
+ EL1_OR_EL2_OR_EL3(x1)
+1: mrs x0, sctlr_el1 // Get control register EL1
+ b 4f
+2: mrs x0, sctlr_el2 // Get control register EL2
+ b 4f
+3: mrs x0, sctlr_el3 // Get control register EL3
+4: and x0, x0, #CTRL_M_BIT
+ ret
+
+
+ASM_FUNC(ArmEnableDataCache)
+ EL1_OR_EL2_OR_EL3(x1)
+1: mrs x0, sctlr_el1 // Get control register EL1
+ b 4f
+2: mrs x0, sctlr_el2 // Get control register EL2
+ b 4f
+3: mrs x0, sctlr_el3 // Get control register EL3
+4: orr x0, x0, #CTRL_C_BIT // Set C bit
+ EL1_OR_EL2_OR_EL3(x1)
+1: msr sctlr_el1, x0 // Write back control register
+ b 4f
+2: msr sctlr_el2, x0 // Write back control register
+ b 4f
+3: msr sctlr_el3, x0 // Write back control register
+4: dsb sy
+ isb
+ ret
+
+
+ASM_FUNC(ArmDisableDataCache)
+ EL1_OR_EL2_OR_EL3(x1)
+1: mrs x0, sctlr_el1 // Get control register EL1
+ b 4f
+2: mrs x0, sctlr_el2 // Get control register EL2
+ b 4f
+3: mrs x0, sctlr_el3 // Get control register EL3
+4: and x0, x0, #~CTRL_C_BIT // Clear C bit
+ EL1_OR_EL2_OR_EL3(x1)
+1: msr sctlr_el1, x0 // Write back control register
+ b 4f
+2: msr sctlr_el2, x0 // Write back control register
+ b 4f
+3: msr sctlr_el3, x0 // Write back control register
+4: dsb sy
+ isb
+ ret
+
+
+ASM_FUNC(ArmEnableInstructionCache)
+ EL1_OR_EL2_OR_EL3(x1)
+1: mrs x0, sctlr_el1 // Get control register EL1
+ b 4f
+2: mrs x0, sctlr_el2 // Get control register EL2
+ b 4f
+3: mrs x0, sctlr_el3 // Get control register EL3
+4: orr x0, x0, #CTRL_I_BIT // Set I bit
+ EL1_OR_EL2_OR_EL3(x1)
+1: msr sctlr_el1, x0 // Write back control register
+ b 4f
+2: msr sctlr_el2, x0 // Write back control register
+ b 4f
+3: msr sctlr_el3, x0 // Write back control register
+4: dsb sy
+ isb
+ ret
+
+
+ASM_FUNC(ArmDisableInstructionCache)
+ EL1_OR_EL2_OR_EL3(x1)
+1: mrs x0, sctlr_el1 // Get control register EL1
+ b 4f
+2: mrs x0, sctlr_el2 // Get control register EL2
+ b 4f
+3: mrs x0, sctlr_el3 // Get control register EL3
+4: and x0, x0, #~CTRL_I_BIT // Clear I bit
+ EL1_OR_EL2_OR_EL3(x1)
+1: msr sctlr_el1, x0 // Write back control register
+ b 4f
+2: msr sctlr_el2, x0 // Write back control register
+ b 4f
+3: msr sctlr_el3, x0 // Write back control register
+4: dsb sy
+ isb
+ ret
+
+
+ASM_FUNC(ArmEnableAlignmentCheck)
+ EL1_OR_EL2(x1)
+1: mrs x0, sctlr_el1 // Get control register EL1
+ b 3f
+2: mrs x0, sctlr_el2 // Get control register EL2
+3: orr x0, x0, #CTRL_A_BIT // Set A (alignment check) bit
+ EL1_OR_EL2(x1)
+1: msr sctlr_el1, x0 // Write back control register
+ b 3f
+2: msr sctlr_el2, x0 // Write back control register
+3: dsb sy
+ isb
+ ret
+
+
+ASM_FUNC(ArmDisableAlignmentCheck)
+ EL1_OR_EL2_OR_EL3(x1)
+1: mrs x0, sctlr_el1 // Get control register EL1
+ b 4f
+2: mrs x0, sctlr_el2 // Get control register EL2
+ b 4f
+3: mrs x0, sctlr_el3 // Get control register EL3
+4: and x0, x0, #~CTRL_A_BIT // Clear A (alignment check) bit
+ EL1_OR_EL2_OR_EL3(x1)
+1: msr sctlr_el1, x0 // Write back control register
+ b 4f
+2: msr sctlr_el2, x0 // Write back control register
+ b 4f
+3: msr sctlr_el3, x0 // Write back control register
+4: dsb sy
+ isb
+ ret
+
+ASM_FUNC(ArmEnableStackAlignmentCheck)
+ EL1_OR_EL2(x1)
+1: mrs x0, sctlr_el1 // Get control register EL1
+ b 3f
+2: mrs x0, sctlr_el2 // Get control register EL2
+3: orr x0, x0, #CTRL_SA_BIT // Set SA (stack alignment check) bit
+ EL1_OR_EL2(x1)
+1: msr sctlr_el1, x0 // Write back control register
+ b 3f
+2: msr sctlr_el2, x0 // Write back control register
+3: dsb sy
+ isb
+ ret
+
+
+ASM_FUNC(ArmDisableStackAlignmentCheck)
+ EL1_OR_EL2_OR_EL3(x1)
+1: mrs x0, sctlr_el1 // Get control register EL1
+ b 4f
+2: mrs x0, sctlr_el2 // Get control register EL2
+ b 4f
+3: mrs x0, sctlr_el3 // Get control register EL3
+4: bic x0, x0, #CTRL_SA_BIT // Clear SA (stack alignment check) bit
+ EL1_OR_EL2_OR_EL3(x1)
+1: msr sctlr_el1, x0 // Write back control register
+ b 4f
+2: msr sctlr_el2, x0 // Write back control register
+ b 4f
+3: msr sctlr_el3, x0 // Write back control register
+4: dsb sy
+ isb
+ ret
+
+
+// Always turned on in AArch64. Else implementation specific. Leave in for C compatibility for now
+ASM_FUNC(ArmEnableBranchPrediction)
+ ret
+
+
+// Always turned on in AArch64. Else implementation specific. Leave in for C compatibility for now.
+ASM_FUNC(ArmDisableBranchPrediction)
+ ret
+
+
+ASM_FUNC(AArch64AllDataCachesOperation)
+// We can use regs 0-7 and 9-15 without having to save/restore.
+// Save our link register on the stack. - The stack must always be quad-word aligned
+ stp x29, x30, [sp, #-16]!
+ mov x29, sp
+ mov x1, x0 // Save Function call in x1
+ mrs x6, clidr_el1 // Read EL1 CLIDR
+ and x3, x6, #0x7000000 // Mask out all but Level of Coherency (LoC)
+ lsr x3, x3, #23 // Left align cache level value - the level is shifted by 1 to the
+ // right to ease the access to CSSELR and the Set/Way operation.
+ cbz x3, L_Finished // No need to clean if LoC is 0
+ mov x10, #0 // Start clean at cache level 0
+
+Loop1:
+ add x2, x10, x10, lsr #1 // Work out 3x cachelevel for cache info
+ lsr x12, x6, x2 // bottom 3 bits are the Cache type for this level
+ and x12, x12, #7 // get those 3 bits alone
+ cmp x12, #2 // what cache at this level?
+ b.lt L_Skip // no cache or only instruction cache at this level
+ msr csselr_el1, x10 // write the Cache Size selection register with current level (CSSELR)
+ isb // isb to sync the change to the CacheSizeID reg
+ mrs x12, ccsidr_el1 // reads current Cache Size ID register (CCSIDR)
+ and x2, x12, #0x7 // extract the line length field
+ add x2, x2, #4 // add 4 for the line length offset (log2 16 bytes)
+ mov x4, #0x400
+ sub x4, x4, #1
+ and x4, x4, x12, lsr #3 // x4 is the max number on the way size (right aligned)
+ clz w5, w4 // w5 is the bit position of the way size increment
+ mov x7, #0x00008000
+ sub x7, x7, #1
+ and x7, x7, x12, lsr #13 // x7 is the max number of the index size (right aligned)
+
+Loop2:
+ mov x9, x4 // x9 working copy of the max way size (right aligned)
+
+Loop3:
+ lsl x11, x9, x5
+ orr x0, x10, x11 // factor in the way number and cache number
+ lsl x11, x7, x2
+ orr x0, x0, x11 // factor in the index number
+
+ blr x1 // Goto requested cache operation
+
+ subs x9, x9, #1 // decrement the way number
+ b.ge Loop3
+ subs x7, x7, #1 // decrement the index
+ b.ge Loop2
+L_Skip:
+ add x10, x10, #2 // increment the cache number
+ cmp x3, x10
+ b.gt Loop1
+
+L_Finished:
+ dsb sy
+ isb
+ ldp x29, x30, [sp], #0x10
+ ret
+
+
+ASM_FUNC(ArmDataMemoryBarrier)
+ dmb sy
+ ret
+
+
+ASM_FUNC(ArmDataSynchronizationBarrier)
+ dsb sy
+ ret
+
+
+ASM_FUNC(ArmInstructionSynchronizationBarrier)
+ isb
+ ret
+
+
+ASM_FUNC(ArmWriteVBar)
+ EL1_OR_EL2_OR_EL3(x1)
+1: msr vbar_el1, x0 // Set the Address of the EL1 Vector Table in the VBAR register
+ b 4f
+2: msr vbar_el2, x0 // Set the Address of the EL2 Vector Table in the VBAR register
+ b 4f
+3: msr vbar_el3, x0 // Set the Address of the EL3 Vector Table in the VBAR register
+4: isb
+ ret
+
+ASM_FUNC(ArmReadVBar)
+ EL1_OR_EL2_OR_EL3(x1)
+1: mrs x0, vbar_el1 // Set the Address of the EL1 Vector Table in the VBAR register
+ ret
+2: mrs x0, vbar_el2 // Set the Address of the EL2 Vector Table in the VBAR register
+ ret
+3: mrs x0, vbar_el3 // Set the Address of the EL3 Vector Table in the VBAR register
+ ret
+
+
+ASM_FUNC(ArmEnableVFP)
+ // Check whether floating-point is implemented in the processor.
+ mov x1, x30 // Save LR
+ bl ArmReadIdPfr0 // Read EL1 Processor Feature Register (PFR0)
+ mov x30, x1 // Restore LR
+ ubfx x0, x0, #16, #4 // Extract the FP bits 16:19
+ cmp x0, #0xF // Check if FP bits are '1111b',
+ // i.e. Floating Point not implemented
+ b.eq 4f // Exit when VFP is not implemented.
+
+ // FVP is implemented.
+ // Make sure VFP exceptions are not trapped (to any exception level).
+ mrs x0, cpacr_el1 // Read EL1 Coprocessor Access Control Register (CPACR)
+ orr x0, x0, #CPACR_VFP_BITS // Disable FVP traps to EL1
+ msr cpacr_el1, x0 // Write back EL1 Coprocessor Access Control Register (CPACR)
+ mov x1, #AARCH64_CPTR_TFP // TFP Bit for trapping VFP Exceptions
+ EL1_OR_EL2_OR_EL3(x2)
+1:ret // Not configurable in EL1
+2:mrs x0, cptr_el2 // Disable VFP traps to EL2
+ bic x0, x0, x1
+ msr cptr_el2, x0
+ ret
+3:mrs x0, cptr_el3 // Disable VFP traps to EL3
+ bic x0, x0, x1
+ msr cptr_el3, x0
+4:ret
+
+
+ASM_FUNC(ArmCallWFI)
+ wfi
+ ret
+
+
+ASM_FUNC(ArmReadMpidr)
+ mrs x0, mpidr_el1 // read EL1 MPIDR
+ ret
+
+
+// Keep old function names for C compatibility for now. Change later?
+ASM_FUNC(ArmReadTpidrurw)
+ mrs x0, tpidr_el0 // read tpidr_el0 (v7 TPIDRURW) -> (v8 TPIDR_EL0)
+ ret
+
+
+// Keep old function names for C compatibility for now. Change later?
+ASM_FUNC(ArmWriteTpidrurw)
+ msr tpidr_el0, x0 // write tpidr_el0 (v7 TPIDRURW) -> (v8 TPIDR_EL0)
+ ret
+
+
+// Arch timers are mandatory on AArch64
+ASM_FUNC(ArmIsArchTimerImplemented)
+ mov x0, #1
+ ret
+
+
+ASM_FUNC(ArmReadIdPfr0)
+ mrs x0, id_aa64pfr0_el1 // Read ID_AA64PFR0 Register
+ ret
+
+
+// Q: id_aa64pfr1_el1 not defined yet. What does this function want to access?
+// A: used to setup arch timer. Check if we have security extensions, permissions to set stuff.
+// See: ArmPkg/Library/ArmArchTimerLib/AArch64/ArmArchTimerLib.c
+// Not defined yet, but stick in here for now, should read all zeros.
+ASM_FUNC(ArmReadIdPfr1)
+ mrs x0, id_aa64pfr1_el1 // Read ID_PFR1 Register
+ ret
+
+// VOID ArmWriteHcr(UINTN Hcr)
+ASM_FUNC(ArmWriteHcr)
+ msr hcr_el2, x0 // Write the passed HCR value
+ ret
+
+// UINTN ArmReadHcr(VOID)
+ASM_FUNC(ArmReadHcr)
+ mrs x0, hcr_el2
+ ret
+
+// UINTN ArmReadCurrentEL(VOID)
+ASM_FUNC(ArmReadCurrentEL)
+ mrs x0, CurrentEL
+ ret
+
+// UINT32 ArmReadCntHctl(VOID)
+ASM_FUNC(ArmReadCntHctl)
+ mrs x0, cnthctl_el2
+ ret
+
+// VOID ArmWriteCntHctl(UINT32 CntHctl)
+ASM_FUNC(ArmWriteCntHctl)
+ msr cnthctl_el2, x0
+ ret
+
+ASM_FUNCTION_REMOVE_IF_UNREFERENCED
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/AArch64/ArmLibSupport.S b/roms/edk2/ArmPkg/Library/ArmLib/AArch64/ArmLibSupport.S
new file mode 100644
index 000000000..ba0ec5682
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/AArch64/ArmLibSupport.S
@@ -0,0 +1,217 @@
+#------------------------------------------------------------------------------
+#
+# Copyright (c) 2008 - 2009, Apple Inc. All rights reserved.<BR>
+# Copyright (c) 2011 - 2016, ARM Limited. All rights reserved.
+# Copyright (c) 2016, Linaro Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+#------------------------------------------------------------------------------
+
+#include <AsmMacroIoLibV8.h>
+
+.set DAIF_RD_FIQ_BIT, (1 << 6)
+.set DAIF_RD_IRQ_BIT, (1 << 7)
+
+.set SCTLR_ELx_M_BIT_POS, (0)
+
+ASM_FUNC(ArmReadMidr)
+ mrs x0, midr_el1 // Read from Main ID Register (MIDR)
+ ret
+
+ASM_FUNC(ArmCacheInfo)
+ mrs x0, ctr_el0 // Read from Cache Type Regiter (CTR)
+ ret
+
+ASM_FUNC(ArmGetInterruptState)
+ mrs x0, daif
+ tst w0, #DAIF_RD_IRQ_BIT // Check if IRQ is enabled. Enabled if 0 (Z=1)
+ cset w0, eq // if Z=1 return 1, else 0
+ ret
+
+ASM_FUNC(ArmGetFiqState)
+ mrs x0, daif
+ tst w0, #DAIF_RD_FIQ_BIT // Check if FIQ is enabled. Enabled if 0 (Z=1)
+ cset w0, eq // if Z=1 return 1, else 0
+ ret
+
+ASM_FUNC(ArmWriteCpacr)
+ msr cpacr_el1, x0 // Coprocessor Access Control Reg (CPACR)
+ ret
+
+ASM_FUNC(ArmWriteAuxCr)
+ EL1_OR_EL2(x1)
+1:msr actlr_el1, x0 // Aux Control Reg (ACTLR) at EL1. Also available in EL2 and EL3
+ ret
+2:msr actlr_el2, x0 // Aux Control Reg (ACTLR) at EL1. Also available in EL2 and EL3
+ ret
+
+ASM_FUNC(ArmReadAuxCr)
+ EL1_OR_EL2(x1)
+1:mrs x0, actlr_el1 // Aux Control Reg (ACTLR) at EL1. Also available in EL2 and EL3
+ ret
+2:mrs x0, actlr_el2 // Aux Control Reg (ACTLR) at EL1. Also available in EL2 and EL3
+ ret
+
+ASM_FUNC(ArmSetTTBR0)
+ EL1_OR_EL2_OR_EL3(x1)
+1:msr ttbr0_el1, x0 // Translation Table Base Reg 0 (TTBR0)
+ b 4f
+2:msr ttbr0_el2, x0 // Translation Table Base Reg 0 (TTBR0)
+ b 4f
+3:msr ttbr0_el3, x0 // Translation Table Base Reg 0 (TTBR0)
+4:isb
+ ret
+
+ASM_FUNC(ArmGetTTBR0BaseAddress)
+ EL1_OR_EL2(x1)
+1:mrs x0, ttbr0_el1
+ b 3f
+2:mrs x0, ttbr0_el2
+3:and x0, x0, 0xFFFFFFFFFFFF /* Look at bottom 48 bits */
+ isb
+ ret
+
+ASM_FUNC(ArmGetTCR)
+ EL1_OR_EL2_OR_EL3(x1)
+1:mrs x0, tcr_el1
+ b 4f
+2:mrs x0, tcr_el2
+ b 4f
+3:mrs x0, tcr_el3
+4:isb
+ ret
+
+ASM_FUNC(ArmSetTCR)
+ EL1_OR_EL2_OR_EL3(x1)
+1:msr tcr_el1, x0
+ b 4f
+2:msr tcr_el2, x0
+ b 4f
+3:msr tcr_el3, x0
+4:isb
+ ret
+
+ASM_FUNC(ArmGetMAIR)
+ EL1_OR_EL2_OR_EL3(x1)
+1:mrs x0, mair_el1
+ b 4f
+2:mrs x0, mair_el2
+ b 4f
+3:mrs x0, mair_el3
+4:isb
+ ret
+
+ASM_FUNC(ArmSetMAIR)
+ EL1_OR_EL2_OR_EL3(x1)
+1:msr mair_el1, x0
+ b 4f
+2:msr mair_el2, x0
+ b 4f
+3:msr mair_el3, x0
+4:isb
+ ret
+
+
+//
+//VOID
+//ArmUpdateTranslationTableEntry (
+// IN VOID *TranslationTableEntry // X0
+// IN VOID *MVA // X1
+// );
+ASM_FUNC(ArmUpdateTranslationTableEntry)
+ dsb nshst
+ lsr x1, x1, #12
+ EL1_OR_EL2_OR_EL3(x2)
+1: tlbi vaae1, x1 // TLB Invalidate VA , EL1
+ mrs x2, sctlr_el1
+ b 4f
+2: tlbi vae2, x1 // TLB Invalidate VA , EL2
+ mrs x2, sctlr_el2
+ b 4f
+3: tlbi vae3, x1 // TLB Invalidate VA , EL3
+ mrs x2, sctlr_el3
+4: tbnz x2, SCTLR_ELx_M_BIT_POS, 5f
+ dc ivac, x0 // invalidate in Dcache if MMU is still off
+5: dsb nsh
+ isb
+ ret
+
+ASM_FUNC(ArmInvalidateTlb)
+ EL1_OR_EL2_OR_EL3(x0)
+1: tlbi vmalle1
+ b 4f
+2: tlbi alle2
+ b 4f
+3: tlbi alle3
+4: dsb sy
+ isb
+ ret
+
+ASM_FUNC(ArmWriteCptr)
+ msr cptr_el3, x0 // EL3 Coprocessor Trap Reg (CPTR)
+ ret
+
+ASM_FUNC(ArmWriteScr)
+ msr scr_el3, x0 // Secure configuration register EL3
+ isb
+ ret
+
+ASM_FUNC(ArmWriteMVBar)
+ msr vbar_el3, x0 // Exception Vector Base address for Monitor on EL3
+ ret
+
+ASM_FUNC(ArmCallWFE)
+ wfe
+ ret
+
+ASM_FUNC(ArmCallSEV)
+ sev
+ ret
+
+ASM_FUNC(ArmReadCpuActlr)
+ mrs x0, S3_1_c15_c2_0
+ ret
+
+ASM_FUNC(ArmWriteCpuActlr)
+ msr S3_1_c15_c2_0, x0
+ dsb sy
+ isb
+ ret
+
+ASM_FUNC(ArmReadSctlr)
+ EL1_OR_EL2_OR_EL3(x1)
+1:mrs x0, sctlr_el1
+ ret
+2:mrs x0, sctlr_el2
+ ret
+3:mrs x0, sctlr_el3
+4:ret
+
+ASM_FUNC(ArmWriteSctlr)
+ EL1_OR_EL2_OR_EL3(x1)
+1:msr sctlr_el1, x0
+ ret
+2:msr sctlr_el2, x0
+ ret
+3:msr sctlr_el3, x0
+4:ret
+
+ASM_FUNC(ArmGetPhysicalAddressBits)
+ mrs x0, id_aa64mmfr0_el1
+ adr x1, .LPARanges
+ and x0, x0, #0xf
+ ldrb w0, [x1, x0]
+ ret
+
+//
+// Bits 0..3 of the AA64MFR0_EL1 system register encode the size of the
+// physical address space support on this CPU:
+// 0 == 32 bits, 1 == 36 bits, etc etc
+// 7 and up are reserved
+//
+.LPARanges:
+ .byte 32, 36, 40, 42, 44, 48, 52, 0
+ .byte 0, 0, 0, 0, 0, 0, 0, 0
+
+ASM_FUNCTION_REMOVE_IF_UNREFERENCED
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/AArch64/ArmLibSupportV8.S b/roms/edk2/ArmPkg/Library/ArmLib/AArch64/ArmLibSupportV8.S
new file mode 100644
index 000000000..0e8d21e22
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/AArch64/ArmLibSupportV8.S
@@ -0,0 +1,106 @@
+#------------------------------------------------------------------------------
+#
+# Copyright (c) 2008 - 2009, Apple Inc. All rights reserved.<BR>
+# Copyright (c) 2011 - 2014, ARM Limited. All rights reserved.
+# Copyright (c) 2016, Linaro Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+#------------------------------------------------------------------------------
+
+#include <AsmMacroIoLibV8.h>
+
+.set MPIDR_U_BIT, (30)
+.set MPIDR_U_MASK, (1 << MPIDR_U_BIT)
+
+// DAIF bit definitions for writing through msr daifclr/sr daifset
+.set DAIF_WR_FIQ_BIT, (1 << 0)
+.set DAIF_WR_IRQ_BIT, (1 << 1)
+.set DAIF_WR_ABORT_BIT, (1 << 2)
+.set DAIF_WR_DEBUG_BIT, (1 << 3)
+.set DAIF_WR_INT_BITS, (DAIF_WR_FIQ_BIT | DAIF_WR_IRQ_BIT)
+.set DAIF_WR_ALL, (DAIF_WR_DEBUG_BIT | DAIF_WR_ABORT_BIT | DAIF_WR_INT_BITS)
+
+
+ASM_FUNC(ArmIsMpCore)
+ mrs x0, mpidr_el1 // Read EL1 Multiprocessor Affinty Reg (MPIDR)
+ and x0, x0, #MPIDR_U_MASK // U Bit clear, the processor is part of a multiprocessor system
+ lsr x0, x0, #MPIDR_U_BIT
+ eor x0, x0, #1
+ ret
+
+
+ASM_FUNC(ArmEnableAsynchronousAbort)
+ msr daifclr, #DAIF_WR_ABORT_BIT
+ isb
+ ret
+
+
+ASM_FUNC(ArmDisableAsynchronousAbort)
+ msr daifset, #DAIF_WR_ABORT_BIT
+ isb
+ ret
+
+
+ASM_FUNC(ArmEnableIrq)
+ msr daifclr, #DAIF_WR_IRQ_BIT
+ isb
+ ret
+
+
+ASM_FUNC(ArmDisableIrq)
+ msr daifset, #DAIF_WR_IRQ_BIT
+ isb
+ ret
+
+
+ASM_FUNC(ArmEnableFiq)
+ msr daifclr, #DAIF_WR_FIQ_BIT
+ isb
+ ret
+
+
+ASM_FUNC(ArmDisableFiq)
+ msr daifset, #DAIF_WR_FIQ_BIT
+ isb
+ ret
+
+
+ASM_FUNC(ArmEnableInterrupts)
+ msr daifclr, #DAIF_WR_INT_BITS
+ isb
+ ret
+
+
+ASM_FUNC(ArmDisableInterrupts)
+ msr daifset, #DAIF_WR_INT_BITS
+ isb
+ ret
+
+
+ASM_FUNC(ArmDisableAllExceptions)
+ msr daifset, #DAIF_WR_ALL
+ isb
+ ret
+
+
+// UINT32
+// ReadCCSIDR (
+// IN UINT32 CSSELR
+// )
+ASM_FUNC(ReadCCSIDR)
+ msr csselr_el1, x0 // Write Cache Size Selection Register (CSSELR)
+ isb
+ mrs x0, ccsidr_el1 // Read current Cache Size ID Register (CCSIDR)
+ ret
+
+
+// UINT32
+// ReadCLIDR (
+// IN UINT32 CSSELR
+// )
+ASM_FUNC(ReadCLIDR)
+ mrs x0, clidr_el1 // Read Cache Level ID Register
+ ret
+
+ASM_FUNCTION_REMOVE_IF_UNREFERENCED
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupport.S b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupport.S
new file mode 100644
index 000000000..0856740e3
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupport.S
@@ -0,0 +1,170 @@
+#------------------------------------------------------------------------------
+#
+# Copyright (c) 2008 - 2009, Apple Inc. All rights reserved.<BR>
+# Copyright (c) 2011 - 2016, ARM Limited. All rights reserved.
+# Copyright (c) 2016, Linaro Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+#------------------------------------------------------------------------------
+
+#include <AsmMacroIoLib.h>
+
+ASM_FUNC(ArmReadMidr)
+ mrc p15,0,R0,c0,c0,0
+ bx LR
+
+ASM_FUNC(ArmCacheInfo)
+ mrc p15,0,R0,c0,c0,1
+ bx LR
+
+ASM_FUNC(ArmGetInterruptState)
+ mrs R0,CPSR
+ tst R0,#0x80 @Check if IRQ is enabled.
+ moveq R0,#1
+ movne R0,#0
+ bx LR
+
+ASM_FUNC(ArmGetFiqState)
+ mrs R0,CPSR
+ tst R0,#0x40 @Check if FIQ is enabled.
+ moveq R0,#1
+ movne R0,#0
+ bx LR
+
+ASM_FUNC(ArmSetDomainAccessControl)
+ mcr p15,0,r0,c3,c0,0
+ bx lr
+
+ASM_FUNC(CPSRMaskInsert) @ on entry, r0 is the mask and r1 is the field to insert
+ stmfd sp!, {r4-r12, lr} @ save all the banked registers
+ mov r3, sp @ copy the stack pointer into a non-banked register
+ mrs r2, cpsr @ read the cpsr
+ bic r2, r2, r0 @ clear mask in the cpsr
+ and r1, r1, r0 @ clear bits outside the mask in the input
+ orr r2, r2, r1 @ set field
+ msr cpsr_cxsf, r2 @ write back cpsr (may have caused a mode switch)
+ isb
+ mov sp, r3 @ restore stack pointer
+ ldmfd sp!, {r4-r12, lr} @ restore registers
+ bx lr @ return (hopefully thumb-safe!)
+
+ASM_FUNC(CPSRRead)
+ mrs r0, cpsr
+ bx lr
+
+ASM_FUNC(ArmReadCpacr)
+ mrc p15, 0, r0, c1, c0, 2
+ bx lr
+
+ASM_FUNC(ArmWriteCpacr)
+ mcr p15, 0, r0, c1, c0, 2
+ isb
+ bx lr
+
+ASM_FUNC(ArmWriteAuxCr)
+ mcr p15, 0, r0, c1, c0, 1
+ bx lr
+
+ASM_FUNC(ArmReadAuxCr)
+ mrc p15, 0, r0, c1, c0, 1
+ bx lr
+
+ASM_FUNC(ArmSetTTBR0)
+ mcr p15,0,r0,c2,c0,0
+ isb
+ bx lr
+
+ASM_FUNC(ArmSetTTBCR)
+ mcr p15, 0, r0, c2, c0, 2
+ isb
+ bx lr
+
+ASM_FUNC(ArmGetTTBR0BaseAddress)
+ mrc p15,0,r0,c2,c0,0
+ MOV32 (r1, 0xFFFFC000)
+ and r0, r0, r1
+ isb
+ bx lr
+
+//
+//VOID
+//ArmUpdateTranslationTableEntry (
+// IN VOID *TranslationTableEntry // R0
+// IN VOID *MVA // R1
+// );
+ASM_FUNC(ArmUpdateTranslationTableEntry)
+ mcr p15,0,R1,c8,c7,1 @ TLBIMVA TLB Invalidate MVA
+ mcr p15,0,R9,c7,c5,6 @ BPIALL Invalidate Branch predictor array. R9 == NoOp
+ dsb
+ isb
+ bx lr
+
+ASM_FUNC(ArmInvalidateTlb)
+ mov r0,#0
+ mcr p15,0,r0,c8,c7,0
+ mcr p15,0,R9,c7,c5,6 @ BPIALL Invalidate Branch predictor array. R9 == NoOp
+ dsb
+ isb
+ bx lr
+
+ASM_FUNC(ArmReadScr)
+ mrc p15, 0, r0, c1, c1, 0
+ bx lr
+
+ASM_FUNC(ArmWriteScr)
+ mcr p15, 0, r0, c1, c1, 0
+ isb
+ bx lr
+
+ASM_FUNC(ArmReadHVBar)
+ mrc p15, 4, r0, c12, c0, 0
+ bx lr
+
+ASM_FUNC(ArmWriteHVBar)
+ mcr p15, 4, r0, c12, c0, 0
+ bx lr
+
+ASM_FUNC(ArmReadMVBar)
+ mrc p15, 0, r0, c12, c0, 1
+ bx lr
+
+ASM_FUNC(ArmWriteMVBar)
+ mcr p15, 0, r0, c12, c0, 1
+ bx lr
+
+ASM_FUNC(ArmCallWFE)
+ wfe
+ bx lr
+
+ASM_FUNC(ArmCallSEV)
+ sev
+ bx lr
+
+ASM_FUNC(ArmReadSctlr)
+ mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)
+ bx lr
+
+ASM_FUNC(ArmWriteSctlr)
+ mcr p15, 0, r0, c1, c0, 0
+ bx lr
+
+ASM_FUNC(ArmReadCpuActlr)
+ mrc p15, 0, r0, c1, c0, 1
+ bx lr
+
+ASM_FUNC(ArmWriteCpuActlr)
+ mcr p15, 0, r0, c1, c0, 1
+ dsb
+ isb
+ bx lr
+
+ASM_FUNC (ArmGetPhysicalAddressBits)
+ mrc p15, 0, r0, c0, c1, 4 // MMFR0
+ and r0, r0, #0xf // VMSA [3:0]
+ cmp r0, #5 // >= 5 implies LPAE support
+ movlt r0, #32 // 32 bits if no LPAE
+ movge r0, #40 // 40 bits if LPAE
+ bx lr
+
+ASM_FUNCTION_REMOVE_IF_UNREFERENCED
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupport.asm b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupport.asm
new file mode 100644
index 000000000..1265dddea
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupport.asm
@@ -0,0 +1,174 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) 2008 - 2009, Apple Inc. All rights reserved.<BR>
+// Copyright (c) 2011 - 2016, ARM Limited. All rights reserved.
+//
+// SPDX-License-Identifier: BSD-2-Clause-Patent
+//
+//------------------------------------------------------------------------------
+
+ INCLUDE AsmMacroIoLib.inc
+
+
+ INCLUDE AsmMacroExport.inc
+
+ RVCT_ASM_EXPORT ArmReadMidr
+ mrc p15,0,R0,c0,c0,0
+ bx LR
+
+ RVCT_ASM_EXPORT ArmCacheInfo
+ mrc p15,0,R0,c0,c0,1
+ bx LR
+
+ RVCT_ASM_EXPORT ArmGetInterruptState
+ mrs R0,CPSR
+ tst R0,#0x80 // Check if IRQ is enabled.
+ moveq R0,#1
+ movne R0,#0
+ bx LR
+
+ RVCT_ASM_EXPORT ArmGetFiqState
+ mrs R0,CPSR
+ tst R0,#0x40 // Check if FIQ is enabled.
+ moveq R0,#1
+ movne R0,#0
+ bx LR
+
+ RVCT_ASM_EXPORT ArmSetDomainAccessControl
+ mcr p15,0,r0,c3,c0,0
+ bx lr
+
+ RVCT_ASM_EXPORT CPSRMaskInsert
+ stmfd sp!, {r4-r12, lr} // save all the banked registers
+ mov r3, sp // copy the stack pointer into a non-banked register
+ mrs r2, cpsr // read the cpsr
+ bic r2, r2, r0 // clear mask in the cpsr
+ and r1, r1, r0 // clear bits outside the mask in the input
+ orr r2, r2, r1 // set field
+ msr cpsr_cxsf, r2 // write back cpsr (may have caused a mode switch)
+ isb
+ mov sp, r3 // restore stack pointer
+ ldmfd sp!, {r4-r12, lr} // restore registers
+ bx lr // return (hopefully thumb-safe!) // return (hopefully thumb-safe!)
+
+ RVCT_ASM_EXPORT CPSRRead
+ mrs r0, cpsr
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadCpacr
+ mrc p15, 0, r0, c1, c0, 2
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteCpacr
+ mcr p15, 0, r0, c1, c0, 2
+ isb
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteAuxCr
+ mcr p15, 0, r0, c1, c0, 1
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadAuxCr
+ mrc p15, 0, r0, c1, c0, 1
+ bx lr
+
+ RVCT_ASM_EXPORT ArmSetTTBR0
+ mcr p15,0,r0,c2,c0,0
+ isb
+ bx lr
+
+ RVCT_ASM_EXPORT ArmSetTTBCR
+ mcr p15, 0, r0, c2, c0, 2
+ isb
+ bx lr
+
+ RVCT_ASM_EXPORT ArmGetTTBR0BaseAddress
+ mrc p15,0,r0,c2,c0,0
+ MOV32 r1, 0xFFFFC000
+ and r0, r0, r1
+ isb
+ bx lr
+
+//
+//VOID
+//ArmUpdateTranslationTableEntry (
+// IN VOID *TranslationTableEntry // R0
+// IN VOID *MVA // R1
+// );
+ RVCT_ASM_EXPORT ArmUpdateTranslationTableEntry
+ mcr p15,0,R0,c7,c14,1 // DCCIMVAC Clean data cache by MVA
+ dsb
+ mcr p15,0,R1,c8,c7,1 // TLBIMVA TLB Invalidate MVA
+ mcr p15,0,R9,c7,c5,6 // BPIALL Invalidate Branch predictor array. R9 == NoOp
+ dsb
+ isb
+ bx lr
+
+ RVCT_ASM_EXPORT ArmInvalidateTlb
+ mov r0,#0
+ mcr p15,0,r0,c8,c7,0
+ mcr p15,0,R9,c7,c5,6 // BPIALL Invalidate Branch predictor array. R9 == NoOp
+ dsb
+ isb
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadScr
+ mrc p15, 0, r0, c1, c1, 0
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteScr
+ mcr p15, 0, r0, c1, c1, 0
+ isb
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadHVBar
+ mrc p15, 4, r0, c12, c0, 0
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteHVBar
+ mcr p15, 4, r0, c12, c0, 0
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadMVBar
+ mrc p15, 0, r0, c12, c0, 1
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteMVBar
+ mcr p15, 0, r0, c12, c0, 1
+ bx lr
+
+ RVCT_ASM_EXPORT ArmCallWFE
+ wfe
+ bx lr
+
+ RVCT_ASM_EXPORT ArmCallSEV
+ sev
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadSctlr
+ mrc p15, 0, r0, c1, c0, 0 // Read SCTLR into R0 (Read control register configuration data)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteSctlr
+ mcr p15, 0, r0, c1, c0, 0
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadCpuActlr
+ mrc p15, 0, r0, c1, c0, 1
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteCpuActlr
+ mcr p15, 0, r0, c1, c0, 1
+ dsb
+ isb
+ bx lr
+
+ RVCT_ASM_EXPORT ArmGetPhysicalAddressBits
+ mrc p15, 0, r0, c0, c1, 4 ; MMFR0
+ and r0, r0, #0xf ; VMSA [3:0]
+ cmp r0, #5 ; >= 5 implies LPAE support
+ movlt r0, #32 ; 32 bits if no LPAE
+ movge r0, #40 ; 40 bits if LPAE
+ bx lr
+
+ END
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupportV7.S b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupportV7.S
new file mode 100644
index 000000000..01c91b10f
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupportV7.S
@@ -0,0 +1,89 @@
+#------------------------------------------------------------------------------
+#
+# Copyright (c) 2008 - 2009, Apple Inc. All rights reserved.<BR>
+# Copyright (c) 2011-2013, ARM Limited. All rights reserved.
+# Copyright (c) 2016, Linaro Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+#------------------------------------------------------------------------------
+
+#include <AsmMacroIoLib.h>
+
+ASM_FUNC(ArmIsMpCore)
+ mrc p15,0,R0,c0,c0,5
+ // Get Multiprocessing extension (bit31) & U bit (bit30)
+ and R0, R0, #0xC0000000
+ // if (bit31 == 1) && (bit30 == 0) then the processor is part of a multiprocessor system
+ cmp R0, #0x80000000
+ moveq R0, #1
+ movne R0, #0
+ bx LR
+
+ASM_FUNC(ArmEnableAsynchronousAbort)
+ cpsie a
+ isb
+ bx LR
+
+ASM_FUNC(ArmDisableAsynchronousAbort)
+ cpsid a
+ isb
+ bx LR
+
+ASM_FUNC(ArmEnableIrq)
+ cpsie i
+ isb
+ bx LR
+
+ASM_FUNC(ArmDisableIrq)
+ cpsid i
+ isb
+ bx LR
+
+ASM_FUNC(ArmEnableFiq)
+ cpsie f
+ isb
+ bx LR
+
+ASM_FUNC(ArmDisableFiq)
+ cpsid f
+ isb
+ bx LR
+
+ASM_FUNC(ArmEnableInterrupts)
+ cpsie if
+ isb
+ bx LR
+
+ASM_FUNC(ArmDisableInterrupts)
+ cpsid if
+ isb
+ bx LR
+
+// UINT32
+// ReadCCSIDR (
+// IN UINT32 CSSELR
+// )
+ASM_FUNC(ReadCCSIDR)
+ mcr p15,2,r0,c0,c0,0 @ Write Cache Size Selection Register (CSSELR)
+ isb
+ mrc p15,1,r0,c0,c0,0 @ Read current CP15 Cache Size ID Register (CCSIDR)
+ bx lr
+
+// UINT32
+// ReadCLIDR (
+// IN UINT32 CSSELR
+// )
+ASM_FUNC(ReadCLIDR)
+ mrc p15,1,r0,c0,c0,1 @ Read CP15 Cache Level ID Register
+ bx lr
+
+ASM_FUNC(ArmReadNsacr)
+ mrc p15, 0, r0, c1, c1, 2
+ bx lr
+
+ASM_FUNC(ArmWriteNsacr)
+ mcr p15, 0, r0, c1, c1, 2
+ bx lr
+
+ASM_FUNCTION_REMOVE_IF_UNREFERENCED
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupportV7.asm b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupportV7.asm
new file mode 100644
index 000000000..26ffa331b
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmLibSupportV7.asm
@@ -0,0 +1,93 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>
+// Copyright (c) 2011-2013, ARM Limited. All rights reserved.
+//
+// SPDX-License-Identifier: BSD-2-Clause-Patent
+//
+//------------------------------------------------------------------------------
+
+
+
+ INCLUDE AsmMacroExport.inc
+
+
+//------------------------------------------------------------------------------
+
+ RVCT_ASM_EXPORT ArmIsMpCore
+ mrc p15,0,R0,c0,c0,5
+ // Get Multiprocessing extension (bit31) & U bit (bit30)
+ and R0, R0, #0xC0000000
+ // if (bit31 == 1) && (bit30 == 0) then the processor is part of a multiprocessor system
+ cmp R0, #0x80000000
+ moveq R0, #1
+ movne R0, #0
+ bx LR
+
+ RVCT_ASM_EXPORT ArmEnableAsynchronousAbort
+ cpsie a
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmDisableAsynchronousAbort
+ cpsid a
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmEnableIrq
+ cpsie i
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmDisableIrq
+ cpsid i
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmEnableFiq
+ cpsie f
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmDisableFiq
+ cpsid f
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmEnableInterrupts
+ cpsie if
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmDisableInterrupts
+ cpsid if
+ isb
+ bx LR
+
+// UINT32
+// ReadCCSIDR (
+// IN UINT32 CSSELR
+// )
+ RVCT_ASM_EXPORT ReadCCSIDR
+ mcr p15,2,r0,c0,c0,0 ; Write Cache Size Selection Register (CSSELR)
+ isb
+ mrc p15,1,r0,c0,c0,0 ; Read current CP15 Cache Size ID Register (CCSIDR)
+ bx lr
+
+// UINT32
+// ReadCLIDR (
+// IN UINT32 CSSELR
+// )
+ RVCT_ASM_EXPORT ReadCLIDR
+ mrc p15,1,r0,c0,c0,1 ; Read CP15 Cache Level ID Register
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadNsacr
+ mrc p15, 0, r0, c1, c1, 2
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteNsacr
+ mcr p15, 0, r0, c1, c1, 2
+ bx lr
+
+ END
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7ArchTimerSupport.S b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7ArchTimerSupport.S
new file mode 100644
index 000000000..7abaa7963
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7ArchTimerSupport.S
@@ -0,0 +1,92 @@
+#------------------------------------------------------------------------------
+#
+# Copyright (c) 2011, ARM Limited. All rights reserved.
+# Copyright (c) 2016, Linaro Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+#------------------------------------------------------------------------------
+
+#include <AsmMacroIoLib.h>
+
+ASM_FUNC(ArmReadCntFrq)
+ mrc p15, 0, r0, c14, c0, 0 @ Read CNTFRQ
+ bx lr
+
+ASM_FUNC(ArmWriteCntFrq)
+ mcr p15, 0, r0, c14, c0, 0 @ Write to CNTFRQ
+ bx lr
+
+ASM_FUNC(ArmReadCntPct)
+ mrrc p15, 0, r0, r1, c14 @ Read CNTPT (Physical counter register)
+ bx lr
+
+ASM_FUNC(ArmReadCntkCtl)
+ mrc p15, 0, r0, c14, c1, 0 @ Read CNTK_CTL (Timer PL1 Control Register)
+ bx lr
+
+ASM_FUNC(ArmWriteCntkCtl)
+ mcr p15, 0, r0, c14, c1, 0 @ Write to CNTK_CTL (Timer PL1 Control Register)
+ bx lr
+
+ASM_FUNC(ArmReadCntpTval)
+ mrc p15, 0, r0, c14, c2, 0 @ Read CNTP_TVAL (PL1 physical timer value register)
+ bx lr
+
+ASM_FUNC(ArmWriteCntpTval)
+ mcr p15, 0, r0, c14, c2, 0 @ Write to CNTP_TVAL (PL1 physical timer value register)
+ bx lr
+
+ASM_FUNC(ArmReadCntpCtl)
+ mrc p15, 0, r0, c14, c2, 1 @ Read CNTP_CTL (PL1 Physical Timer Control Register)
+ bx lr
+
+ASM_FUNC(ArmWriteCntpCtl)
+ mcr p15, 0, r0, c14, c2, 1 @ Write to CNTP_CTL (PL1 Physical Timer Control Register)
+ bx lr
+
+ASM_FUNC(ArmReadCntvTval)
+ mrc p15, 0, r0, c14, c3, 0 @ Read CNTV_TVAL (Virtual Timer Value register)
+ bx lr
+
+ASM_FUNC(ArmWriteCntvTval)
+ mcr p15, 0, r0, c14, c3, 0 @ Write to CNTV_TVAL (Virtual Timer Value register)
+ bx lr
+
+ASM_FUNC(ArmReadCntvCtl)
+ mrc p15, 0, r0, c14, c3, 1 @ Read CNTV_CTL (Virtual Timer Control Register)
+ bx lr
+
+ASM_FUNC(ArmWriteCntvCtl)
+ mcr p15, 0, r0, c14, c3, 1 @ Write to CNTV_CTL (Virtual Timer Control Register)
+ bx lr
+
+ASM_FUNC(ArmReadCntvCt)
+ mrrc p15, 1, r0, r1, c14 @ Read CNTVCT (Virtual Count Register)
+ bx lr
+
+ASM_FUNC(ArmReadCntpCval)
+ mrrc p15, 2, r0, r1, c14 @ Read CNTP_CTVAL (Physical Timer Compare Value Register)
+ bx lr
+
+ASM_FUNC(ArmWriteCntpCval)
+ mcrr p15, 2, r0, r1, c14 @ Write to CNTP_CTVAL (Physical Timer Compare Value Register)
+ bx lr
+
+ASM_FUNC(ArmReadCntvCval)
+ mrrc p15, 3, r0, r1, c14 @ Read CNTV_CTVAL (Virtual Timer Compare Value Register)
+ bx lr
+
+ASM_FUNC(ArmWriteCntvCval)
+ mcrr p15, 3, r0, r1, c14 @ write to CNTV_CTVAL (Virtual Timer Compare Value Register)
+ bx lr
+
+ASM_FUNC(ArmReadCntvOff)
+ mrrc p15, 4, r0, r1, c14 @ Read CNTVOFF (virtual Offset register)
+ bx lr
+
+ASM_FUNC(ArmWriteCntvOff)
+ mcrr p15, 4, r0, r1, c14 @ Write to CNTVOFF (Virtual Offset register)
+ bx lr
+
+ASM_FUNCTION_REMOVE_IF_UNREFERENCED
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7ArchTimerSupport.asm b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7ArchTimerSupport.asm
new file mode 100644
index 000000000..6896c1be2
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7ArchTimerSupport.asm
@@ -0,0 +1,93 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) 2011, ARM Limited. All rights reserved.
+//
+// SPDX-License-Identifier: BSD-2-Clause-Patent
+//
+//------------------------------------------------------------------------------
+
+
+ INCLUDE AsmMacroExport.inc
+ PRESERVE8
+
+ RVCT_ASM_EXPORT ArmReadCntFrq
+ mrc p15, 0, r0, c14, c0, 0 ; Read CNTFRQ
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteCntFrq
+ mcr p15, 0, r0, c14, c0, 0 ; Write to CNTFRQ
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadCntPct
+ mrrc p15, 0, r0, r1, c14 ; Read CNTPT (Physical counter register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadCntkCtl
+ mrc p15, 0, r0, c14, c1, 0 ; Read CNTK_CTL (Timer PL1 Control Register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteCntkCtl
+ mcr p15, 0, r0, c14, c1, 0 ; Write to CNTK_CTL (Timer PL1 Control Register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadCntpTval
+ mrc p15, 0, r0, c14, c2, 0 ; Read CNTP_TVAL (PL1 physical timer value register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteCntpTval
+ mcr p15, 0, r0, c14, c2, 0 ; Write to CNTP_TVAL (PL1 physical timer value register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadCntpCtl
+ mrc p15, 0, r0, c14, c2, 1 ; Read CNTP_CTL (PL1 Physical Timer Control Register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteCntpCtl
+ mcr p15, 0, r0, c14, c2, 1 ; Write to CNTP_CTL (PL1 Physical Timer Control Register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadCntvTval
+ mrc p15, 0, r0, c14, c3, 0 ; Read CNTV_TVAL (Virtual Timer Value register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteCntvTval
+ mcr p15, 0, r0, c14, c3, 0 ; Write to CNTV_TVAL (Virtual Timer Value register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadCntvCtl
+ mrc p15, 0, r0, c14, c3, 1 ; Read CNTV_CTL (Virtual Timer Control Register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteCntvCtl
+ mcr p15, 0, r0, c14, c3, 1 ; Write to CNTV_CTL (Virtual Timer Control Register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadCntvCt
+ mrrc p15, 1, r0, r1, c14 ; Read CNTVCT (Virtual Count Register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadCntpCval
+ mrrc p15, 2, r0, r1, c14 ; Read CNTP_CTVAL (Physical Timer Compare Value Register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteCntpCval
+ mcrr p15, 2, r0, r1, c14 ; Write to CNTP_CTVAL (Physical Timer Compare Value Register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadCntvCval
+ mrrc p15, 3, r0, r1, c14 ; Read CNTV_CTVAL (Virtual Timer Compare Value Register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteCntvCval
+ mcrr p15, 3, r0, r1, c14 ; write to CNTV_CTVAL (Virtual Timer Compare Value Register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadCntvOff
+ mrrc p15, 4, r0, r1, c14 ; Read CNTVOFF (virtual Offset register)
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteCntvOff
+ mcrr p15, 4, r0, r1, c14 ; Write to CNTVOFF (Virtual Offset register)
+ bx lr
+
+ END
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Lib.c b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Lib.c
new file mode 100644
index 000000000..2c4a23e1a
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Lib.c
@@ -0,0 +1,73 @@
+/** @file
+
+ Copyright (c) 2008 - 2009, Apple Inc. All rights reserved.<BR>
+ Copyright (c) 2011 - 2014, ARM Limited. All rights reserved.
+
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+
+**/
+
+#include <Base.h>
+
+#include <Library/ArmLib.h>
+#include <Library/DebugLib.h>
+
+#include <Chipset/ArmV7.h>
+
+#include "ArmV7Lib.h"
+#include "ArmLibPrivate.h"
+
+VOID
+ArmV7DataCacheOperation (
+ IN ARM_V7_CACHE_OPERATION DataCacheOperation
+ )
+{
+ UINTN SavedInterruptState;
+
+ SavedInterruptState = ArmGetInterruptState ();
+ ArmDisableInterrupts ();
+
+ ArmV7AllDataCachesOperation (DataCacheOperation);
+
+ ArmDataSynchronizationBarrier ();
+
+ if (SavedInterruptState) {
+ ArmEnableInterrupts ();
+ }
+}
+
+VOID
+EFIAPI
+ArmInvalidateDataCache (
+ VOID
+ )
+{
+ ASSERT (!ArmMmuEnabled ());
+
+ ArmDataSynchronizationBarrier ();
+ ArmV7DataCacheOperation (ArmInvalidateDataCacheEntryBySetWay);
+}
+
+VOID
+EFIAPI
+ArmCleanInvalidateDataCache (
+ VOID
+ )
+{
+ ASSERT (!ArmMmuEnabled ());
+
+ ArmDataSynchronizationBarrier ();
+ ArmV7DataCacheOperation (ArmCleanInvalidateDataCacheEntryBySetWay);
+}
+
+VOID
+EFIAPI
+ArmCleanDataCache (
+ VOID
+ )
+{
+ ASSERT (!ArmMmuEnabled ());
+
+ ArmDataSynchronizationBarrier ();
+ ArmV7DataCacheOperation (ArmCleanDataCacheEntryBySetWay);
+}
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Lib.h b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Lib.h
new file mode 100644
index 000000000..93183e672
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Lib.h
@@ -0,0 +1,52 @@
+/** @file
+
+ Copyright (c) 2008 - 2009, Apple Inc. All rights reserved.<BR>
+
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+
+**/
+
+#ifndef __ARM_V7_LIB_H__
+#define __ARM_V7_LIB_H__
+
+#define ID_MMFR0_SHARELVL_SHIFT 12
+#define ID_MMFR0_SHARELVL_MASK 0xf
+#define ID_MMFR0_SHARELVL_ONE 0
+#define ID_MMFR0_SHARELVL_TWO 1
+
+#define ID_MMFR0_INNERSHR_SHIFT 28
+#define ID_MMFR0_INNERSHR_MASK 0xf
+#define ID_MMFR0_OUTERSHR_SHIFT 8
+#define ID_MMFR0_OUTERSHR_MASK 0xf
+
+#define ID_MMFR0_SHR_IMP_UNCACHED 0
+#define ID_MMFR0_SHR_IMP_HW_COHERENT 1
+#define ID_MMFR0_SHR_IGNORED 0xf
+
+typedef VOID (*ARM_V7_CACHE_OPERATION)(UINT32);
+
+VOID
+ArmV7AllDataCachesOperation (
+ IN ARM_V7_CACHE_OPERATION DataCacheOperation
+ );
+
+VOID
+EFIAPI
+ArmInvalidateDataCacheEntryBySetWay (
+ IN UINTN SetWayFormat
+ );
+
+VOID
+EFIAPI
+ArmCleanDataCacheEntryBySetWay (
+ IN UINTN SetWayFormat
+ );
+
+VOID
+EFIAPI
+ArmCleanInvalidateDataCacheEntryBySetWay (
+ IN UINTN SetWayFormat
+ );
+
+#endif // __ARM_V7_LIB_H__
+
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Support.S b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Support.S
new file mode 100644
index 000000000..4925f6628
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Support.S
@@ -0,0 +1,301 @@
+#------------------------------------------------------------------------------
+#
+# Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>
+# Copyright (c) 2011 - 2014, ARM Limited. All rights reserved.
+# Copyright (c) 2016, Linaro Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+#------------------------------------------------------------------------------
+
+#include <AsmMacroIoLib.h>
+
+.set DC_ON, (0x1<<2)
+.set IC_ON, (0x1<<12)
+.set CTRL_M_BIT, (1 << 0)
+.set CTRL_C_BIT, (1 << 2)
+.set CTRL_B_BIT, (1 << 7)
+.set CTRL_I_BIT, (1 << 12)
+
+
+ASM_FUNC(ArmInvalidateDataCacheEntryByMVA)
+ mcr p15, 0, r0, c7, c6, 1 @invalidate single data cache line
+ bx lr
+
+ASM_FUNC(ArmCleanDataCacheEntryByMVA)
+ mcr p15, 0, r0, c7, c10, 1 @clean single data cache line
+ bx lr
+
+
+ASM_FUNC(ArmCleanDataCacheEntryToPoUByMVA)
+ mcr p15, 0, r0, c7, c11, 1 @clean single data cache line to PoU
+ bx lr
+
+ASM_FUNC(ArmInvalidateInstructionCacheEntryToPoUByMVA)
+ mcr p15, 0, r0, c7, c5, 1 @Invalidate single instruction cache line to PoU
+ mcr p15, 0, r0, c7, c5, 7 @Invalidate branch predictor
+ bx lr
+
+ASM_FUNC(ArmCleanInvalidateDataCacheEntryByMVA)
+ mcr p15, 0, r0, c7, c14, 1 @clean and invalidate single data cache line
+ bx lr
+
+
+ASM_FUNC(ArmInvalidateDataCacheEntryBySetWay)
+ mcr p15, 0, r0, c7, c6, 2 @ Invalidate this line
+ bx lr
+
+
+ASM_FUNC(ArmCleanInvalidateDataCacheEntryBySetWay)
+ mcr p15, 0, r0, c7, c14, 2 @ Clean and Invalidate this line
+ bx lr
+
+
+ASM_FUNC(ArmCleanDataCacheEntryBySetWay)
+ mcr p15, 0, r0, c7, c10, 2 @ Clean this line
+ bx lr
+
+ASM_FUNC(ArmInvalidateInstructionCache)
+ mcr p15,0,R0,c7,c5,0 @Invalidate entire instruction cache
+ dsb
+ isb
+ bx LR
+
+ASM_FUNC(ArmEnableMmu)
+ mrc p15,0,R0,c1,c0,0
+ orr R0,R0,#1
+ mcr p15,0,R0,c1,c0,0
+ dsb
+ isb
+ bx LR
+
+
+ASM_FUNC(ArmDisableMmu)
+ mrc p15,0,R0,c1,c0,0
+ bic R0,R0,#1
+ mcr p15,0,R0,c1,c0,0 @Disable MMU
+
+ mcr p15,0,R0,c8,c7,0 @Invalidate TLB
+ mcr p15,0,R0,c7,c5,6 @Invalidate Branch predictor array
+ dsb
+ isb
+ bx LR
+
+ASM_FUNC(ArmDisableCachesAndMmu)
+ mrc p15, 0, r0, c1, c0, 0 @ Get control register
+ bic r0, r0, #CTRL_M_BIT @ Disable MMU
+ bic r0, r0, #CTRL_C_BIT @ Disable D Cache
+ bic r0, r0, #CTRL_I_BIT @ Disable I Cache
+ mcr p15, 0, r0, c1, c0, 0 @ Write control register
+ dsb
+ isb
+ bx LR
+
+ASM_FUNC(ArmMmuEnabled)
+ mrc p15,0,R0,c1,c0,0
+ and R0,R0,#1
+ bx LR
+
+ASM_FUNC(ArmEnableDataCache)
+ ldr R1,=DC_ON
+ mrc p15,0,R0,c1,c0,0 @Read control register configuration data
+ orr R0,R0,R1 @Set C bit
+ mcr p15,0,r0,c1,c0,0 @Write control register configuration data
+ dsb
+ isb
+ bx LR
+
+ASM_FUNC(ArmDisableDataCache)
+ ldr R1,=DC_ON
+ mrc p15,0,R0,c1,c0,0 @Read control register configuration data
+ bic R0,R0,R1 @Clear C bit
+ mcr p15,0,r0,c1,c0,0 @Write control register configuration data
+ dsb
+ isb
+ bx LR
+
+ASM_FUNC(ArmEnableInstructionCache)
+ ldr R1,=IC_ON
+ mrc p15,0,R0,c1,c0,0 @Read control register configuration data
+ orr R0,R0,R1 @Set I bit
+ mcr p15,0,r0,c1,c0,0 @Write control register configuration data
+ dsb
+ isb
+ bx LR
+
+ASM_FUNC(ArmDisableInstructionCache)
+ ldr R1,=IC_ON
+ mrc p15,0,R0,c1,c0,0 @Read control register configuration data
+ bic R0,R0,R1 @Clear I bit.
+ mcr p15,0,r0,c1,c0,0 @Write control register configuration data
+ dsb
+ isb
+ bx LR
+
+ASM_FUNC(ArmEnableSWPInstruction)
+ mrc p15, 0, r0, c1, c0, 0
+ orr r0, r0, #0x00000400
+ mcr p15, 0, r0, c1, c0, 0
+ isb
+ bx LR
+
+ASM_FUNC(ArmEnableBranchPrediction)
+ mrc p15, 0, r0, c1, c0, 0
+ orr r0, r0, #0x00000800
+ mcr p15, 0, r0, c1, c0, 0
+ dsb
+ isb
+ bx LR
+
+ASM_FUNC(ArmDisableBranchPrediction)
+ mrc p15, 0, r0, c1, c0, 0
+ bic r0, r0, #0x00000800
+ mcr p15, 0, r0, c1, c0, 0
+ dsb
+ isb
+ bx LR
+
+ASM_FUNC(ArmSetLowVectors)
+ mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)
+ bic r0, r0, #0x00002000 @ clear V bit
+ mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)
+ isb
+ bx LR
+
+ASM_FUNC(ArmSetHighVectors)
+ mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)
+ orr r0, r0, #0x00002000 @ Set V bit
+ mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)
+ isb
+ bx LR
+
+ASM_FUNC(ArmV7AllDataCachesOperation)
+ stmfd SP!,{r4-r12, LR}
+ mov R1, R0 @ Save Function call in R1
+ mrc p15, 1, R6, c0, c0, 1 @ Read CLIDR
+ ands R3, R6, #0x7000000 @ Mask out all but Level of Coherency (LoC)
+ mov R3, R3, LSR #23 @ Cache level value (naturally aligned)
+ beq L_Finished
+ mov R10, #0
+
+Loop1:
+ add R2, R10, R10, LSR #1 @ Work out 3xcachelevel
+ mov R12, R6, LSR R2 @ bottom 3 bits are the Cache type for this level
+ and R12, R12, #7 @ get those 3 bits alone
+ cmp R12, #2
+ blt L_Skip @ no cache or only instruction cache at this level
+ mcr p15, 2, R10, c0, c0, 0 @ write the Cache Size selection register (CSSELR) // OR in 1 for Instruction
+ isb @ isb to sync the change to the CacheSizeID reg
+ mrc p15, 1, R12, c0, c0, 0 @ reads current Cache Size ID register (CCSIDR)
+ and R2, R12, #0x7 @ extract the line length field
+ add R2, R2, #4 @ add 4 for the line length offset (log2 16 bytes)
+@ ldr R4, =0x3FF
+ mov R4, #0x400
+ sub R4, R4, #1
+ ands R4, R4, R12, LSR #3 @ R4 is the max number on the way size (right aligned)
+ clz R5, R4 @ R5 is the bit position of the way size increment
+@ ldr R7, =0x00007FFF
+ mov R7, #0x00008000
+ sub R7, R7, #1
+ ands R7, R7, R12, LSR #13 @ R7 is the max number of the index size (right aligned)
+
+Loop2:
+ mov R9, R4 @ R9 working copy of the max way size (right aligned)
+
+Loop3:
+ orr R0, R10, R9, LSL R5 @ factor in the way number and cache number into R11
+ orr R0, R0, R7, LSL R2 @ factor in the index number
+
+ blx R1
+
+ subs R9, R9, #1 @ decrement the way number
+ bge Loop3
+ subs R7, R7, #1 @ decrement the index
+ bge Loop2
+L_Skip:
+ add R10, R10, #2 @ increment the cache number
+ cmp R3, R10
+ bgt Loop1
+
+L_Finished:
+ dsb
+ ldmfd SP!, {r4-r12, lr}
+ bx LR
+
+ASM_FUNC(ArmDataMemoryBarrier)
+ dmb
+ bx LR
+
+ASM_FUNC(ArmDataSynchronizationBarrier)
+ dsb
+ bx LR
+
+ASM_FUNC(ArmInstructionSynchronizationBarrier)
+ isb
+ bx LR
+
+ASM_FUNC(ArmReadVBar)
+ # Set the Address of the Vector Table in the VBAR register
+ mrc p15, 0, r0, c12, c0, 0
+ bx lr
+
+ASM_FUNC(ArmWriteVBar)
+ # Set the Address of the Vector Table in the VBAR register
+ mcr p15, 0, r0, c12, c0, 0
+ # Ensure the SCTLR.V bit is clear
+ mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)
+ bic r0, r0, #0x00002000 @ clear V bit
+ mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)
+ isb
+ bx lr
+
+ASM_FUNC(ArmEnableVFP)
+ # Read CPACR (Coprocessor Access Control Register)
+ mrc p15, 0, r0, c1, c0, 2
+ # Enable VPF access (Full Access to CP10, CP11) (V* instructions)
+ orr r0, r0, #0x00f00000
+ # Write back CPACR (Coprocessor Access Control Register)
+ mcr p15, 0, r0, c1, c0, 2
+ isb
+ # Set EN bit in FPEXC. The Advanced SIMD and VFP extensions are enabled and operate normally.
+ mov r0, #0x40000000
+#ifndef __clang__
+ mcr p10,#0x7,r0,c8,c0,#0
+#else
+ # Set the FPU model so Clang does not choke on the next instruction
+ .fpu neon
+ vmsr fpexc, r0
+#endif
+ bx lr
+
+ASM_FUNC(ArmCallWFI)
+ wfi
+ bx lr
+
+#Note: Return 0 in Uniprocessor implementation
+ASM_FUNC(ArmReadCbar)
+ mrc p15, 4, r0, c15, c0, 0 @ Read Configuration Base Address Register
+ bx lr
+
+ASM_FUNC(ArmReadMpidr)
+ mrc p15, 0, r0, c0, c0, 5 @ read MPIDR
+ bx lr
+
+ASM_FUNC(ArmReadTpidrurw)
+ mrc p15, 0, r0, c13, c0, 2 @ read TPIDRURW
+ bx lr
+
+ASM_FUNC(ArmWriteTpidrurw)
+ mcr p15, 0, r0, c13, c0, 2 @ write TPIDRURW
+ bx lr
+
+ASM_FUNC(ArmIsArchTimerImplemented)
+ mrc p15, 0, r0, c0, c1, 1 @ Read ID_PFR1
+ and r0, r0, #0x000F0000
+ bx lr
+
+ASM_FUNC(ArmReadIdPfr1)
+ mrc p15, 0, r0, c0, c1, 1 @ Read ID_PFR1 Register
+ bx lr
+
+ASM_FUNCTION_REMOVE_IF_UNREFERENCED
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Support.asm b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Support.asm
new file mode 100644
index 000000000..3146c2b52
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/Arm/ArmV7Support.asm
@@ -0,0 +1,292 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>
+// Copyright (c) 2011 - 2014, ARM Limited. All rights reserved.
+//
+// SPDX-License-Identifier: BSD-2-Clause-Patent
+//
+//------------------------------------------------------------------------------
+
+
+ INCLUDE AsmMacroExport.inc
+ PRESERVE8
+
+DC_ON EQU ( 0x1:SHL:2 )
+IC_ON EQU ( 0x1:SHL:12 )
+CTRL_M_BIT EQU (1 << 0)
+CTRL_C_BIT EQU (1 << 2)
+CTRL_B_BIT EQU (1 << 7)
+CTRL_I_BIT EQU (1 << 12)
+
+
+ RVCT_ASM_EXPORT ArmInvalidateDataCacheEntryByMVA
+ mcr p15, 0, r0, c7, c6, 1 ; invalidate single data cache line
+ bx lr
+
+ RVCT_ASM_EXPORT ArmCleanDataCacheEntryByMVA
+ mcr p15, 0, r0, c7, c10, 1 ; clean single data cache line
+ bx lr
+
+
+ RVCT_ASM_EXPORT ArmInvalidateInstructionCacheEntryToPoUByMVA
+ mcr p15, 0, r0, c7, c5, 1 ; invalidate single instruction cache line to PoU
+ mcr p15, 0, r0, c7, c5, 7 ; invalidate branch predictor
+ bx lr
+
+
+ RVCT_ASM_EXPORT ArmCleanDataCacheEntryToPoUByMVA
+ mcr p15, 0, r0, c7, c11, 1 ; clean single data cache line to PoU
+ bx lr
+
+
+ RVCT_ASM_EXPORT ArmCleanInvalidateDataCacheEntryByMVA
+ mcr p15, 0, r0, c7, c14, 1 ; clean and invalidate single data cache line
+ bx lr
+
+
+ RVCT_ASM_EXPORT ArmInvalidateDataCacheEntryBySetWay
+ mcr p15, 0, r0, c7, c6, 2 ; Invalidate this line
+ bx lr
+
+
+ RVCT_ASM_EXPORT ArmCleanInvalidateDataCacheEntryBySetWay
+ mcr p15, 0, r0, c7, c14, 2 ; Clean and Invalidate this line
+ bx lr
+
+
+ RVCT_ASM_EXPORT ArmCleanDataCacheEntryBySetWay
+ mcr p15, 0, r0, c7, c10, 2 ; Clean this line
+ bx lr
+
+
+ RVCT_ASM_EXPORT ArmInvalidateInstructionCache
+ mcr p15,0,R0,c7,c5,0 ;Invalidate entire instruction cache
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmEnableMmu
+ mrc p15,0,R0,c1,c0,0 ; Read SCTLR into R0 (Read control register configuration data)
+ orr R0,R0,#1 ; Set SCTLR.M bit : Enable MMU
+ mcr p15,0,R0,c1,c0,0 ; Write R0 into SCTLR (Write control register configuration data)
+ dsb
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmDisableMmu
+ mrc p15,0,R0,c1,c0,0 ; Read SCTLR into R0 (Read control register configuration data)
+ bic R0,R0,#1 ; Clear SCTLR.M bit : Disable MMU
+ mcr p15,0,R0,c1,c0,0 ; Write R0 into SCTLR (Write control register configuration data)
+
+ mcr p15,0,R0,c8,c7,0 ; TLBIALL : Invalidate unified TLB
+ mcr p15,0,R0,c7,c5,6 ; BPIALL : Invalidate entire branch predictor array
+ dsb
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmDisableCachesAndMmu
+ mrc p15, 0, r0, c1, c0, 0 ; Get control register
+ bic r0, r0, #CTRL_M_BIT ; Disable MMU
+ bic r0, r0, #CTRL_C_BIT ; Disable D Cache
+ bic r0, r0, #CTRL_I_BIT ; Disable I Cache
+ mcr p15, 0, r0, c1, c0, 0 ; Write control register
+ dsb
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmMmuEnabled
+ mrc p15,0,R0,c1,c0,0 ; Read SCTLR into R0 (Read control register configuration data)
+ and R0,R0,#1
+ bx LR
+
+ RVCT_ASM_EXPORT ArmEnableDataCache
+ ldr R1,=DC_ON ; Specify SCTLR.C bit : (Data) Cache enable bit
+ mrc p15,0,R0,c1,c0,0 ; Read SCTLR into R0 (Read control register configuration data)
+ orr R0,R0,R1 ; Set SCTLR.C bit : Data and unified caches enabled
+ mcr p15,0,R0,c1,c0,0 ; Write R0 into SCTLR (Write control register configuration data)
+ dsb
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmDisableDataCache
+ ldr R1,=DC_ON ; Specify SCTLR.C bit : (Data) Cache enable bit
+ mrc p15,0,R0,c1,c0,0 ; Read SCTLR into R0 (Read control register configuration data)
+ bic R0,R0,R1 ; Clear SCTLR.C bit : Data and unified caches disabled
+ mcr p15,0,R0,c1,c0,0 ; Write R0 into SCTLR (Write control register configuration data)
+ dsb
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmEnableInstructionCache
+ ldr R1,=IC_ON ; Specify SCTLR.I bit : Instruction cache enable bit
+ mrc p15,0,R0,c1,c0,0 ; Read SCTLR into R0 (Read control register configuration data)
+ orr R0,R0,R1 ; Set SCTLR.I bit : Instruction caches enabled
+ mcr p15,0,R0,c1,c0,0 ; Write R0 into SCTLR (Write control register configuration data)
+ dsb
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmDisableInstructionCache
+ ldr R1,=IC_ON ; Specify SCTLR.I bit : Instruction cache enable bit
+ mrc p15,0,R0,c1,c0,0 ; Read SCTLR into R0 (Read control register configuration data)
+ BIC R0,R0,R1 ; Clear SCTLR.I bit : Instruction caches disabled
+ mcr p15,0,R0,c1,c0,0 ; Write R0 into SCTLR (Write control register configuration data)
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmEnableSWPInstruction
+ mrc p15, 0, r0, c1, c0, 0
+ orr r0, r0, #0x00000400
+ mcr p15, 0, r0, c1, c0, 0
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmEnableBranchPrediction
+ mrc p15, 0, r0, c1, c0, 0 ; Read SCTLR into R0 (Read control register configuration data)
+ orr r0, r0, #0x00000800 ;
+ mcr p15, 0, r0, c1, c0, 0 ; Write R0 into SCTLR (Write control register configuration data)
+ dsb
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmDisableBranchPrediction
+ mrc p15, 0, r0, c1, c0, 0 ; Read SCTLR into R0 (Read control register configuration data)
+ bic r0, r0, #0x00000800 ;
+ mcr p15, 0, r0, c1, c0, 0 ; Write R0 into SCTLR (Write control register configuration data)
+ dsb
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmSetLowVectors
+ mrc p15, 0, r0, c1, c0, 0 ; Read SCTLR into R0 (Read control register configuration data)
+ bic r0, r0, #0x00002000 ; clear V bit
+ mcr p15, 0, r0, c1, c0, 0 ; Write R0 into SCTLR (Write control register configuration data)
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmSetHighVectors
+ mrc p15, 0, r0, c1, c0, 0 ; Read SCTLR into R0 (Read control register configuration data)
+ orr r0, r0, #0x00002000 ; Set V bit
+ mcr p15, 0, r0, c1, c0, 0 ; Write R0 into SCTLR (Write control register configuration data)
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmV7AllDataCachesOperation
+ stmfd SP!,{r4-r12, LR}
+ mov R1, R0 ; Save Function call in R1
+ mrc p15, 1, R6, c0, c0, 1 ; Read CLIDR
+ ands R3, R6, #&7000000 ; Mask out all but Level of Coherency (LoC)
+ mov R3, R3, LSR #23 ; Cache level value (naturally aligned)
+ beq Finished
+ mov R10, #0
+
+Loop1
+ add R2, R10, R10, LSR #1 ; Work out 3xcachelevel
+ mov R12, R6, LSR R2 ; bottom 3 bits are the Cache type for this level
+ and R12, R12, #7 ; get those 3 bits alone
+ cmp R12, #2
+ blt Skip ; no cache or only instruction cache at this level
+ mcr p15, 2, R10, c0, c0, 0 ; write the Cache Size selection register (CSSELR) // OR in 1 for Instruction
+ isb ; isb to sync the change to the CacheSizeID reg
+ mrc p15, 1, R12, c0, c0, 0 ; reads current Cache Size ID register (CCSIDR)
+ and R2, R12, #&7 ; extract the line length field
+ add R2, R2, #4 ; add 4 for the line length offset (log2 16 bytes)
+ ldr R4, =0x3FF
+ ands R4, R4, R12, LSR #3 ; R4 is the max number on the way size (right aligned)
+ clz R5, R4 ; R5 is the bit position of the way size increment
+ ldr R7, =0x00007FFF
+ ands R7, R7, R12, LSR #13 ; R7 is the max number of the index size (right aligned)
+
+Loop2
+ mov R9, R4 ; R9 working copy of the max way size (right aligned)
+
+Loop3
+ orr R0, R10, R9, LSL R5 ; factor in the way number and cache number into R11
+ orr R0, R0, R7, LSL R2 ; factor in the index number
+
+ blx R1
+
+ subs R9, R9, #1 ; decrement the way number
+ bge Loop3
+ subs R7, R7, #1 ; decrement the index
+ bge Loop2
+Skip
+ add R10, R10, #2 ; increment the cache number
+ cmp R3, R10
+ bgt Loop1
+
+Finished
+ dsb
+ ldmfd SP!, {r4-r12, lr}
+ bx LR
+
+ RVCT_ASM_EXPORT ArmDataMemoryBarrier
+ dmb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmDataSynchronizationBarrier
+ dsb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmInstructionSynchronizationBarrier
+ isb
+ bx LR
+
+ RVCT_ASM_EXPORT ArmReadVBar
+ // Set the Address of the Vector Table in the VBAR register
+ mrc p15, 0, r0, c12, c0, 0
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteVBar
+ // Set the Address of the Vector Table in the VBAR register
+ mcr p15, 0, r0, c12, c0, 0
+ // Ensure the SCTLR.V bit is clear
+ mrc p15, 0, r0, c1, c0, 0 ; Read SCTLR into R0 (Read control register configuration data)
+ bic r0, r0, #0x00002000 ; clear V bit
+ mcr p15, 0, r0, c1, c0, 0 ; Write R0 into SCTLR (Write control register configuration data)
+ isb
+ bx lr
+
+ RVCT_ASM_EXPORT ArmEnableVFP
+ // Read CPACR (Coprocessor Access Control Register)
+ mrc p15, 0, r0, c1, c0, 2
+ // Enable VPF access (Full Access to CP10, CP11) (V* instructions)
+ orr r0, r0, #0x00f00000
+ // Write back CPACR (Coprocessor Access Control Register)
+ mcr p15, 0, r0, c1, c0, 2
+ isb
+ // Set EN bit in FPEXC. The Advanced SIMD and VFP extensions are enabled and operate normally.
+ mov r0, #0x40000000
+ mcr p10,#0x7,r0,c8,c0,#0
+ bx lr
+
+ RVCT_ASM_EXPORT ArmCallWFI
+ wfi
+ bx lr
+
+//Note: Return 0 in Uniprocessor implementation
+ RVCT_ASM_EXPORT ArmReadCbar
+ mrc p15, 4, r0, c15, c0, 0 //Read Configuration Base Address Register
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadMpidr
+ mrc p15, 0, r0, c0, c0, 5 ; read MPIDR
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadTpidrurw
+ mrc p15, 0, r0, c13, c0, 2 ; read TPIDRURW
+ bx lr
+
+ RVCT_ASM_EXPORT ArmWriteTpidrurw
+ mcr p15, 0, r0, c13, c0, 2 ; write TPIDRURW
+ bx lr
+
+ RVCT_ASM_EXPORT ArmIsArchTimerImplemented
+ mrc p15, 0, r0, c0, c1, 1 ; Read ID_PFR1
+ and r0, r0, #0x000F0000
+ bx lr
+
+ RVCT_ASM_EXPORT ArmReadIdPfr1
+ mrc p15, 0, r0, c0, c1, 1 ; Read ID_PFR1 Register
+ bx lr
+
+ END
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/ArmBaseLib.inf b/roms/edk2/ArmPkg/Library/ArmLib/ArmBaseLib.inf
new file mode 100644
index 000000000..f61c71b67
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/ArmBaseLib.inf
@@ -0,0 +1,55 @@
+#/** @file
+#
+# Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>
+# Portions copyright (c) 2011 - 2014, ARM Limited. All rights reserved.
+# Copyright (c) 2016, Linaro Ltd. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+#
+#**/
+
+[Defines]
+ INF_VERSION = 0x00010005
+ BASE_NAME = ArmBaseLib
+ FILE_GUID = f1d943b6-99c5-46d5-af5a-66ec67662700
+ MODULE_TYPE = BASE
+ VERSION_STRING = 1.0
+ LIBRARY_CLASS = ArmLib
+
+[Sources]
+ ArmLibPrivate.h
+ ArmLib.c
+
+[Sources.ARM]
+ Arm/ArmV7Lib.h
+ Arm/ArmV7Lib.c
+
+ Arm/ArmLibSupport.S | GCC
+ Arm/ArmLibSupportV7.S | GCC
+ Arm/ArmV7Support.S | GCC
+ Arm/ArmV7ArchTimerSupport.S | GCC
+
+ Arm/ArmLibSupport.asm | RVCT
+ Arm/ArmLibSupportV7.asm | RVCT
+ Arm/ArmV7Support.asm | RVCT
+ Arm/ArmV7ArchTimerSupport.asm | RVCT
+
+[Sources.AARCH64]
+ AArch64/AArch64Lib.h
+ AArch64/AArch64Lib.c
+
+ AArch64/ArmLibSupport.S
+ AArch64/ArmLibSupportV8.S
+ AArch64/AArch64Support.S
+ AArch64/AArch64ArchTimerSupport.S
+
+[LibraryClasses]
+ DebugLib
+
+[Packages]
+ ArmPkg/ArmPkg.dec
+ MdePkg/MdePkg.dec
+
+[FeaturePcd.ARM]
+ gArmTokenSpaceGuid.PcdNormalMemoryNonshareableOverride
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/ArmLib.c b/roms/edk2/ArmPkg/Library/ArmLib/ArmLib.c
new file mode 100644
index 000000000..3905d02c5
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/ArmLib.c
@@ -0,0 +1,99 @@
+/** @file
+
+ Copyright (c) 2008 - 2009, Apple Inc. All rights reserved.<BR>
+ Copyright (c) 2011 - 2014, ARM Ltd. All rights reserved.<BR>
+
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+
+**/
+
+#include <Base.h>
+
+#include <Library/ArmLib.h>
+
+#include "ArmLibPrivate.h"
+
+VOID
+EFIAPI
+ArmSetAuxCrBit (
+ IN UINT32 Bits
+ )
+{
+ UINT32 val = ArmReadAuxCr();
+ val |= Bits;
+ ArmWriteAuxCr(val);
+}
+
+VOID
+EFIAPI
+ArmUnsetAuxCrBit (
+ IN UINT32 Bits
+ )
+{
+ UINT32 val = ArmReadAuxCr();
+ val &= ~Bits;
+ ArmWriteAuxCr(val);
+}
+
+//
+// Helper functions for accessing CPUACTLR
+//
+
+VOID
+EFIAPI
+ArmSetCpuActlrBit (
+ IN UINTN Bits
+ )
+{
+ UINTN Value;
+ Value = ArmReadCpuActlr ();
+ Value |= Bits;
+ ArmWriteCpuActlr (Value);
+}
+
+VOID
+EFIAPI
+ArmUnsetCpuActlrBit (
+ IN UINTN Bits
+ )
+{
+ UINTN Value;
+ Value = ArmReadCpuActlr ();
+ Value &= ~Bits;
+ ArmWriteCpuActlr (Value);
+}
+
+UINTN
+EFIAPI
+ArmDataCacheLineLength (
+ VOID
+ )
+{
+ return 4 << ((ArmCacheInfo () >> 16) & 0xf); // CTR_EL0.DminLine
+}
+
+UINTN
+EFIAPI
+ArmInstructionCacheLineLength (
+ VOID
+ )
+{
+ return 4 << (ArmCacheInfo () & 0xf); // CTR_EL0.IminLine
+}
+
+UINTN
+EFIAPI
+ArmCacheWritebackGranule (
+ VOID
+ )
+{
+ UINTN CWG;
+
+ CWG = (ArmCacheInfo () >> 24) & 0xf; // CTR_EL0.CWG
+
+ if (CWG == 0) {
+ return SIZE_2KB;
+ }
+
+ return 4 << CWG;
+}
diff --git a/roms/edk2/ArmPkg/Library/ArmLib/ArmLibPrivate.h b/roms/edk2/ArmPkg/Library/ArmLib/ArmLibPrivate.h
new file mode 100644
index 000000000..2e90739eb
--- /dev/null
+++ b/roms/edk2/ArmPkg/Library/ArmLib/ArmLibPrivate.h
@@ -0,0 +1,74 @@
+/** @file
+
+ Copyright (c) 2008 - 2009, Apple Inc. All rights reserved.<BR>
+
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+
+**/
+
+#ifndef __ARM_LIB_PRIVATE_H__
+#define __ARM_LIB_PRIVATE_H__
+
+#define CACHE_SIZE_4_KB (3UL)
+#define CACHE_SIZE_8_KB (4UL)
+#define CACHE_SIZE_16_KB (5UL)
+#define CACHE_SIZE_32_KB (6UL)
+#define CACHE_SIZE_64_KB (7UL)
+#define CACHE_SIZE_128_KB (8UL)
+
+#define CACHE_ASSOCIATIVITY_DIRECT (0UL)
+#define CACHE_ASSOCIATIVITY_4_WAY (2UL)
+#define CACHE_ASSOCIATIVITY_8_WAY (3UL)
+
+#define CACHE_PRESENT (0UL)
+#define CACHE_NOT_PRESENT (1UL)
+
+#define CACHE_LINE_LENGTH_32_BYTES (2UL)
+
+#define SIZE_FIELD_TO_CACHE_SIZE(x) (((x) >> 6) & 0x0F)
+#define SIZE_FIELD_TO_CACHE_ASSOCIATIVITY(x) (((x) >> 3) & 0x07)
+#define SIZE_FIELD_TO_CACHE_PRESENCE(x) (((x) >> 2) & 0x01)
+#define SIZE_FIELD_TO_CACHE_LINE_LENGTH(x) (((x) >> 0) & 0x03)
+
+#define DATA_CACHE_SIZE_FIELD(x) (((x) >> 12) & 0x0FFF)
+#define INSTRUCTION_CACHE_SIZE_FIELD(x) (((x) >> 0) & 0x0FFF)
+
+#define DATA_CACHE_SIZE(x) (SIZE_FIELD_TO_CACHE_SIZE(DATA_CACHE_SIZE_FIELD(x)))
+#define DATA_CACHE_ASSOCIATIVITY(x) (SIZE_FIELD_TO_CACHE_ASSOCIATIVITY(DATA_CACHE_SIZE_FIELD(x)))
+#define DATA_CACHE_PRESENT(x) (SIZE_FIELD_TO_CACHE_PRESENCE(DATA_CACHE_SIZE_FIELD(x)))
+#define DATA_CACHE_LINE_LENGTH(x) (SIZE_FIELD_TO_CACHE_LINE_LENGTH(DATA_CACHE_SIZE_FIELD(x)))
+
+#define INSTRUCTION_CACHE_SIZE(x) (SIZE_FIELD_TO_CACHE_SIZE(INSTRUCTION_CACHE_SIZE_FIELD(x)))
+#define INSTRUCTION_CACHE_ASSOCIATIVITY(x) (SIZE_FIELD_TO_CACHE_ASSOCIATIVITY(INSTRUCTION_CACHE_SIZE_FIELD(x)))
+#define INSTRUCTION_CACHE_PRESENT(x) (SIZE_FIELD_TO_CACHE_PRESENCE(INSTRUCTION_CACHE_SIZE_FIELD(x)))
+#define INSTRUCTION_CACHE_LINE_LENGTH(x) (SIZE_FIELD_TO_CACHE_LINE_LENGTH(INSTRUCTION_CACHE_SIZE_FIELD(x)))
+
+#define CACHE_TYPE(x) (((x) >> 25) & 0x0F)
+#define CACHE_TYPE_WRITE_BACK (0x0EUL)
+
+#define CACHE_ARCHITECTURE(x) (((x) >> 24) & 0x01)
+#define CACHE_ARCHITECTURE_UNIFIED (0UL)
+#define CACHE_ARCHITECTURE_SEPARATE (1UL)
+
+VOID
+CPSRMaskInsert (
+ IN UINT32 Mask,
+ IN UINT32 Value
+ );
+
+UINT32
+CPSRRead (
+ VOID
+ );
+
+UINT32
+ReadCCSIDR (
+ IN UINT32 CSSELR
+ );
+
+UINT32
+ReadCLIDR (
+ VOID
+ );
+
+#endif // __ARM_LIB_PRIVATE_H__