summaryrefslogtreecommitdiff
path: root/lib/test_bpf.c
diff options
context:
space:
mode:
Diffstat (limited to 'lib/test_bpf.c')
-rw-r--r--lib/test_bpf.c141
1 files changed, 141 insertions, 0 deletions
diff --git a/lib/test_bpf.c b/lib/test_bpf.c
index 072f9c51bd9b..e3c256963020 100644
--- a/lib/test_bpf.c
+++ b/lib/test_bpf.c
@@ -1917,6 +1917,147 @@ static struct bpf_test tests[] = {
{ { 0, -1 } }
},
{
+ /*
+ * Register (non-)clobbering test, in the case where a 32-bit
+ * JIT implements complex ALU64 operations via function calls.
+ * If so, the function call must be invisible in the eBPF
+ * registers. The JIT must then save and restore relevant
+ * registers during the call. The following tests check that
+ * the eBPF registers retain their values after such a call.
+ */
+ "INT: Register clobbering, R1 updated",
+ .u.insns_int = {
+ BPF_ALU32_IMM(BPF_MOV, R0, 0),
+ BPF_ALU32_IMM(BPF_MOV, R1, 123456789),
+ BPF_ALU32_IMM(BPF_MOV, R2, 2),
+ BPF_ALU32_IMM(BPF_MOV, R3, 3),
+ BPF_ALU32_IMM(BPF_MOV, R4, 4),
+ BPF_ALU32_IMM(BPF_MOV, R5, 5),
+ BPF_ALU32_IMM(BPF_MOV, R6, 6),
+ BPF_ALU32_IMM(BPF_MOV, R7, 7),
+ BPF_ALU32_IMM(BPF_MOV, R8, 8),
+ BPF_ALU32_IMM(BPF_MOV, R9, 9),
+ BPF_ALU64_IMM(BPF_DIV, R1, 123456789),
+ BPF_JMP_IMM(BPF_JNE, R0, 0, 10),
+ BPF_JMP_IMM(BPF_JNE, R1, 1, 9),
+ BPF_JMP_IMM(BPF_JNE, R2, 2, 8),
+ BPF_JMP_IMM(BPF_JNE, R3, 3, 7),
+ BPF_JMP_IMM(BPF_JNE, R4, 4, 6),
+ BPF_JMP_IMM(BPF_JNE, R5, 5, 5),
+ BPF_JMP_IMM(BPF_JNE, R6, 6, 4),
+ BPF_JMP_IMM(BPF_JNE, R7, 7, 3),
+ BPF_JMP_IMM(BPF_JNE, R8, 8, 2),
+ BPF_JMP_IMM(BPF_JNE, R9, 9, 1),
+ BPF_ALU32_IMM(BPF_MOV, R0, 1),
+ BPF_EXIT_INSN(),
+ },
+ INTERNAL,
+ { },
+ { { 0, 1 } }
+ },
+ {
+ "INT: Register clobbering, R2 updated",
+ .u.insns_int = {
+ BPF_ALU32_IMM(BPF_MOV, R0, 0),
+ BPF_ALU32_IMM(BPF_MOV, R1, 1),
+ BPF_ALU32_IMM(BPF_MOV, R2, 2 * 123456789),
+ BPF_ALU32_IMM(BPF_MOV, R3, 3),
+ BPF_ALU32_IMM(BPF_MOV, R4, 4),
+ BPF_ALU32_IMM(BPF_MOV, R5, 5),
+ BPF_ALU32_IMM(BPF_MOV, R6, 6),
+ BPF_ALU32_IMM(BPF_MOV, R7, 7),
+ BPF_ALU32_IMM(BPF_MOV, R8, 8),
+ BPF_ALU32_IMM(BPF_MOV, R9, 9),
+ BPF_ALU64_IMM(BPF_DIV, R2, 123456789),
+ BPF_JMP_IMM(BPF_JNE, R0, 0, 10),
+ BPF_JMP_IMM(BPF_JNE, R1, 1, 9),
+ BPF_JMP_IMM(BPF_JNE, R2, 2, 8),
+ BPF_JMP_IMM(BPF_JNE, R3, 3, 7),
+ BPF_JMP_IMM(BPF_JNE, R4, 4, 6),
+ BPF_JMP_IMM(BPF_JNE, R5, 5, 5),
+ BPF_JMP_IMM(BPF_JNE, R6, 6, 4),
+ BPF_JMP_IMM(BPF_JNE, R7, 7, 3),
+ BPF_JMP_IMM(BPF_JNE, R8, 8, 2),
+ BPF_JMP_IMM(BPF_JNE, R9, 9, 1),
+ BPF_ALU32_IMM(BPF_MOV, R0, 1),
+ BPF_EXIT_INSN(),
+ },
+ INTERNAL,
+ { },
+ { { 0, 1 } }
+ },
+ {
+ /*
+ * Test 32-bit JITs that implement complex ALU64 operations as
+ * function calls R0 = f(R1, R2), and must re-arrange operands.
+ */
+#define NUMER 0xfedcba9876543210ULL
+#define DENOM 0x0123456789abcdefULL
+ "ALU64_DIV X: Operand register permutations",
+ .u.insns_int = {
+ /* R0 / R2 */
+ BPF_LD_IMM64(R0, NUMER),
+ BPF_LD_IMM64(R2, DENOM),
+ BPF_ALU64_REG(BPF_DIV, R0, R2),
+ BPF_JMP_IMM(BPF_JEQ, R0, NUMER / DENOM, 1),
+ BPF_EXIT_INSN(),
+ /* R1 / R0 */
+ BPF_LD_IMM64(R1, NUMER),
+ BPF_LD_IMM64(R0, DENOM),
+ BPF_ALU64_REG(BPF_DIV, R1, R0),
+ BPF_JMP_IMM(BPF_JEQ, R1, NUMER / DENOM, 1),
+ BPF_EXIT_INSN(),
+ /* R0 / R1 */
+ BPF_LD_IMM64(R0, NUMER),
+ BPF_LD_IMM64(R1, DENOM),
+ BPF_ALU64_REG(BPF_DIV, R0, R1),
+ BPF_JMP_IMM(BPF_JEQ, R0, NUMER / DENOM, 1),
+ BPF_EXIT_INSN(),
+ /* R2 / R0 */
+ BPF_LD_IMM64(R2, NUMER),
+ BPF_LD_IMM64(R0, DENOM),
+ BPF_ALU64_REG(BPF_DIV, R2, R0),
+ BPF_JMP_IMM(BPF_JEQ, R2, NUMER / DENOM, 1),
+ BPF_EXIT_INSN(),
+ /* R2 / R1 */
+ BPF_LD_IMM64(R2, NUMER),
+ BPF_LD_IMM64(R1, DENOM),
+ BPF_ALU64_REG(BPF_DIV, R2, R1),
+ BPF_JMP_IMM(BPF_JEQ, R2, NUMER / DENOM, 1),
+ BPF_EXIT_INSN(),
+ /* R1 / R2 */
+ BPF_LD_IMM64(R1, NUMER),
+ BPF_LD_IMM64(R2, DENOM),
+ BPF_ALU64_REG(BPF_DIV, R1, R2),
+ BPF_JMP_IMM(BPF_JEQ, R1, NUMER / DENOM, 1),
+ BPF_EXIT_INSN(),
+ /* R1 / R1 */
+ BPF_LD_IMM64(R1, NUMER),
+ BPF_ALU64_REG(BPF_DIV, R1, R1),
+ BPF_JMP_IMM(BPF_JEQ, R1, 1, 1),
+ BPF_EXIT_INSN(),
+ /* R2 / R2 */
+ BPF_LD_IMM64(R2, DENOM),
+ BPF_ALU64_REG(BPF_DIV, R2, R2),
+ BPF_JMP_IMM(BPF_JEQ, R2, 1, 1),
+ BPF_EXIT_INSN(),
+ /* R3 / R4 */
+ BPF_LD_IMM64(R3, NUMER),
+ BPF_LD_IMM64(R4, DENOM),
+ BPF_ALU64_REG(BPF_DIV, R3, R4),
+ BPF_JMP_IMM(BPF_JEQ, R3, NUMER / DENOM, 1),
+ BPF_EXIT_INSN(),
+ /* Successful return */
+ BPF_LD_IMM64(R0, 1),
+ BPF_EXIT_INSN(),
+ },
+ INTERNAL,
+ { },
+ { { 0, 1 } },
+#undef NUMER
+#undef DENOM
+ },
+ {
"check: missing ret",
.u.insns = {
BPF_STMT(BPF_LD | BPF_IMM, 1),