summaryrefslogtreecommitdiff
path: root/arch/arm64/include/asm/memory.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm64/include/asm/memory.h')
-rw-r--r--arch/arm64/include/asm/memory.h23
1 files changed, 22 insertions, 1 deletions
diff --git a/arch/arm64/include/asm/memory.h b/arch/arm64/include/asm/memory.h
index 7fa6ad48d574..c5cd2c599b24 100644
--- a/arch/arm64/include/asm/memory.h
+++ b/arch/arm64/include/asm/memory.h
@@ -102,7 +102,17 @@
#define KASAN_SHADOW_SIZE (0)
#endif
-#define THREAD_SHIFT 14
+#define MIN_THREAD_SHIFT 14
+
+/*
+ * VMAP'd stacks are allocated at page granularity, so we must ensure that such
+ * stacks are a multiple of page size.
+ */
+#if defined(CONFIG_VMAP_STACK) && (MIN_THREAD_SHIFT < PAGE_SHIFT)
+#define THREAD_SHIFT PAGE_SHIFT
+#else
+#define THREAD_SHIFT MIN_THREAD_SHIFT
+#endif
#if THREAD_SHIFT >= PAGE_SHIFT
#define THREAD_SIZE_ORDER (THREAD_SHIFT - PAGE_SHIFT)
@@ -110,6 +120,17 @@
#define THREAD_SIZE (UL(1) << THREAD_SHIFT)
+/*
+ * By aligning VMAP'd stacks to 2 * THREAD_SIZE, we can detect overflow by
+ * checking sp & (1 << THREAD_SHIFT), which we can do cheaply in the entry
+ * assembly.
+ */
+#ifdef CONFIG_VMAP_STACK
+#define THREAD_ALIGN (2 * THREAD_SIZE)
+#else
+#define THREAD_ALIGN THREAD_SIZE
+#endif
+
#define IRQ_STACK_SIZE THREAD_SIZE
/*