summaryrefslogtreecommitdiff
path: root/arch/arm64/include/asm/memory.h
diff options
context:
space:
mode:
authorDan Williams <dan.j.williams@intel.com>2017-11-15 16:56:11 -0800
committerDan Williams <dan.j.williams@intel.com>2017-11-15 16:56:11 -0800
commit4247f24c23589bcc3bc3490515ef8c9497e9ae55 (patch)
tree89726a0e171c443a3e8def2992b56dbd8a21df21 /arch/arm64/include/asm/memory.h
parent79ab67ede21f536851a99ea68ee6fc1f5435e055 (diff)
parent9f586fff6574f6ecbf323f92d44ffaf0d96225fe (diff)
Merge branch 'for-4.15/dax' into libnvdimm-for-next
Diffstat (limited to 'arch/arm64/include/asm/memory.h')
-rw-r--r--arch/arm64/include/asm/memory.h9
1 files changed, 6 insertions, 3 deletions
diff --git a/arch/arm64/include/asm/memory.h b/arch/arm64/include/asm/memory.h
index 3585a5e26151..f7c4d2146aed 100644
--- a/arch/arm64/include/asm/memory.h
+++ b/arch/arm64/include/asm/memory.h
@@ -95,16 +95,19 @@
#define KERNEL_END _end
/*
- * The size of the KASAN shadow region. This should be 1/8th of the
- * size of the entire kernel virtual address space.
+ * KASAN requires 1/8th of the kernel virtual address space for the shadow
+ * region. KASAN can bloat the stack significantly, so double the (minimum)
+ * stack size when KASAN is in use.
*/
#ifdef CONFIG_KASAN
#define KASAN_SHADOW_SIZE (UL(1) << (VA_BITS - 3))
+#define KASAN_THREAD_SHIFT 1
#else
#define KASAN_SHADOW_SIZE (0)
+#define KASAN_THREAD_SHIFT 0
#endif
-#define MIN_THREAD_SHIFT 14
+#define MIN_THREAD_SHIFT (14 + KASAN_THREAD_SHIFT)
/*
* VMAP'd stacks are allocated at page granularity, so we must ensure that such