summaryrefslogtreecommitdiff
path: root/arch/powerpc/include/asm/head-64.h
blob: 4cb9efa2eb218b6448f50ab82a499ed3c210cd22 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_POWERPC_HEAD_64_H
#define _ASM_POWERPC_HEAD_64_H

#include <asm/cache.h>

#ifdef __ASSEMBLY__
/*
 * We can't do CPP stringification and concatination directly into the section
 * name for some reason, so these macros can do it for us.
 */
.macro define_ftsec name
	.section ".head.text.\name\()","ax",@progbits
.endm
.macro define_data_ftsec name
	.section ".head.data.\name\()","a",@progbits
.endm
.macro use_ftsec name
	.section ".head.text.\name\()"
.endm

/*
 * Fixed (location) sections are used by opening fixed sections and emitting
 * fixed section entries into them before closing them. Multiple fixed sections
 * can be open at any time.
 *
 * Each fixed section created in a .S file must have corresponding linkage
 * directives including location, added to  arch/powerpc/kernel/vmlinux.lds.S
 *
 * For each fixed section, code is generated into it in the order which it
 * appears in the source.  Fixed section entries can be placed at a fixed
 * location within the section using _LOCATION postifx variants. These must
 * be ordered according to their relative placements within the section.
 *
 * OPEN_FIXED_SECTION(section_name, start_address, end_address)
 * FIXED_SECTION_ENTRY_BEGIN(section_name, label1)
 *
 * USE_FIXED_SECTION(section_name)
 * label3:
 *     li  r10,128
 *     mv  r11,r10

 * FIXED_SECTION_ENTRY_BEGIN_LOCATION(section_name, label2, start_address, size)
 * FIXED_SECTION_ENTRY_END_LOCATION(section_name, label2, start_address, size)
 * CLOSE_FIXED_SECTION(section_name)
 *
 * ZERO_FIXED_SECTION can be used to emit zeroed data.
 *
 * Troubleshooting:
 * - If the build dies with "Error: attempt to move .org backwards" at
 *   CLOSE_FIXED_SECTION() or elsewhere, there may be something
 *   unexpected being added there. Remove the '. = x_len' line, rebuild, and
 *   check what is pushing the section down.
 * - If the build dies in linking, check arch/powerpc/tools/head_check.sh
 *   comments.
 * - If the kernel crashes or hangs in very early boot, it could be linker
 *   stubs at the start of the main text.
 */

#define OPEN_FIXED_SECTION(sname, start, end)			\
	sname##_start = (start);				\
	sname##_end = (end);					\
	sname##_len = (end) - (start);				\
	define_ftsec sname;					\
	. = 0x0;						\
start_##sname:

/*
 * .linker_stub_catch section is used to catch linker stubs from being
 * inserted in our .text section, above the start_text label (which breaks
 * the ABS_ADDR calculation). See kernel/vmlinux.lds.S and tools/head_check.sh
 * for more details. We would prefer to just keep a cacheline (0x80), but
 * 0x100 seems to be how the linker aligns branch stub groups.
 */
#ifdef CONFIG_LD_HEAD_STUB_CATCH
#define OPEN_TEXT_SECTION(start)				\
	.section ".linker_stub_catch","ax",@progbits;		\
linker_stub_catch:						\
	. = 0x4;						\
	text_start = (start) + 0x100;				\
	.section ".text","ax",@progbits;			\
	.balign 0x100;						\
start_text:
#else
#define OPEN_TEXT_SECTION(start)				\
	text_start = (start);					\
	.section ".text","ax",@progbits;			\
	. = 0x0;						\
start_text:
#endif

#define ZERO_FIXED_SECTION(sname, start, end)			\
	sname##_start = (start);				\
	sname##_end = (end);					\
	sname##_len = (end) - (start);				\
	define_data_ftsec sname;				\
	. = 0x0;						\
	. = sname##_len;

#define USE_FIXED_SECTION(sname)				\
	fs_label = start_##sname;				\
	fs_start = sname##_start;				\
	use_ftsec sname;

#define USE_TEXT_SECTION()					\
	fs_label = start_text;					\
	fs_start = text_start;					\
	.text

#define CLOSE_FIXED_SECTION(sname)				\
	USE_FIXED_SECTION(sname);				\
	. = sname##_len;					\
end_##sname:


#define __FIXED_SECTION_ENTRY_BEGIN(sname, name, __align)	\
	USE_FIXED_SECTION(sname);				\
	.balign __align;					\
	.global name;						\
name:

#define FIXED_SECTION_ENTRY_BEGIN(sname, name)			\
	__FIXED_SECTION_ENTRY_BEGIN(sname, name, IFETCH_ALIGN_BYTES)

#define FIXED_SECTION_ENTRY_BEGIN_LOCATION(sname, name, start, size) \
	USE_FIXED_SECTION(sname);				\
	name##_start = (start);					\
	.if ((start) % (size) != 0);				\
	.error "Fixed section exception vector misalignment";	\
	.endif;							\
	.if ((size) != 0x20) && ((size) != 0x80) && ((size) != 0x100) && ((size) != 0x1000); \
	.error "Fixed section exception vector bad size";	\
	.endif;							\
	.if (start) < sname##_start;				\
	.error "Fixed section underflow";			\
	.abort;							\
	.endif;							\
	. = (start) - sname##_start;				\
	.global name;						\
name:

#define FIXED_SECTION_ENTRY_END_LOCATION(sname, name, start, size) \
	.if (start) + (size) > sname##_end;			\
	.error "Fixed section overflow";			\
	.abort;							\
	.endif;							\
	.if (. - name > (start) + (size) - name##_start);	\
	.error "Fixed entry overflow";				\
	.abort;							\
	.endif;							\
	. = ((start) + (size) - sname##_start);			\


/*
 * These macros are used to change symbols in other fixed sections to be
 * absolute or related to our current fixed section.
 *
 * - DEFINE_FIXED_SYMBOL / FIXED_SYMBOL_ABS_ADDR is used to find the
 *   absolute address of a symbol within a fixed section, from any section.
 *
 * - ABS_ADDR is used to find the absolute address of any symbol, from within
 *   a fixed section.
 */
#define DEFINE_FIXED_SYMBOL(label)				\
	label##_absolute = (label - fs_label + fs_start)

#define FIXED_SYMBOL_ABS_ADDR(label)				\
	(label##_absolute)

#define ABS_ADDR(label) (label - fs_label + fs_start)

#endif /* __ASSEMBLY__ */

#endif	/* _ASM_POWERPC_HEAD_64_H */