summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVince Leung <vincentl@codeaurora.org>2015-05-19 13:27:50 -0700
committerRohit Vaswani <rvaswani@codeaurora.org>2016-03-01 12:22:14 -0800
commita1efe43c52eb79045b1b47b8f4d109aa143f6108 (patch)
tree9e757ff63498a8f74c3be3cd0a0f0ab5846c7a16
parent486155f53e2180d5fac41e85bb12cf8755e12241 (diff)
arm64: entry: add support for CONFIG_ARM64_REG_REBALANCE_ON_CTX_SW
Add support for re-balancing register rename pools on context switches, for a potential performance boost on some ARM64 targets. Change-Id: I7577c11fac566ae91a210787c70b23591bfbd693 Signed-off-by: Sanrio Alvares <salvares@codeaurora.org>
-rw-r--r--arch/arm64/Kconfig7
-rw-r--r--arch/arm64/kernel/entry.S27
2 files changed, 34 insertions, 0 deletions
diff --git a/arch/arm64/Kconfig b/arch/arm64/Kconfig
index 579ab688312d..beea70ba7995 100644
--- a/arch/arm64/Kconfig
+++ b/arch/arm64/Kconfig
@@ -559,6 +559,13 @@ config HW_PERF_EVENTS
def_bool y
depends on ARM_PMU
+config ARM64_REG_REBALANCE_ON_CTX_SW
+ bool "Rebalance registers during context switches."
+ def_bool ARCH_MSM8996
+ help
+ Forcefully re-balance register rename pools on context switches for
+ improved performance on some devices.
+
config SYS_SUPPORTS_HUGETLBFS
def_bool y
diff --git a/arch/arm64/kernel/entry.S b/arch/arm64/kernel/entry.S
index 7ed3d75f6304..77827e89ea80 100644
--- a/arch/arm64/kernel/entry.S
+++ b/arch/arm64/kernel/entry.S
@@ -598,6 +598,33 @@ ENTRY(cpu_switch_to)
ldp x27, x28, [x8], #16
ldp x29, x9, [x8], #16
ldr lr, [x8]
+#ifdef CONFIG_ARM64_REG_REBALANCE_ON_CTX_SW
+ ORR x13, x13, x13
+ ORR x14, x14, x14
+ ORR x15, x15, x15
+ ORR x16, x16, x16
+ ORR x17, x17, x17
+ ORR x18, x18, x18
+ ORR x19, x19, x19
+ ORR x20, x20, x20
+ ORR x21, x21, x21
+ MOV V0.16B, V0.16B
+ MOV V1.16B, V1.16B
+ MOV V2.16B, V2.16B
+ MOV V3.16B, V3.16B
+ MOV V4.16B, V4.16B
+ MOV V5.16B, V5.16B
+ MOV V6.16B, V6.16B
+ MOV V7.16B, V7.16B
+ MOV V8.16B, V8.16B
+ MOV V9.16B, V9.16B
+ MOV V10.16B, V10.16B
+ MOV V11.16B, V11.16B
+ MOV V12.16B, V12.16B
+ MOV V13.16B, V13.16B
+ MOV V14.16B, V14.16B
+ MOV V15.16B, V15.16B
+#endif
mov sp, x9
ret
ENDPROC(cpu_switch_to)