bpf, test: add couple of test cases
authorDaniel Borkmann <daniel@iogearbox.net>
Thu, 17 Dec 2015 22:51:57 +0000 (23:51 +0100)
committerDavid S. Miller <davem@davemloft.net>
Fri, 18 Dec 2015 21:04:51 +0000 (16:04 -0500)
Add couple of test cases for interpreter but also JITs, f.e. to test that
when imm32 moves are being done, upper 32bits of the regs are being zero
extended.

Without JIT:

  [...]
  [ 1114.129301] test_bpf: #43 MOV REG64 jited:0 128 PASS
  [ 1114.130626] test_bpf: #44 MOV REG32 jited:0 139 PASS
  [ 1114.132055] test_bpf: #45 LD IMM64 jited:0 124 PASS
  [...]

With JIT (generated code can as usual be nicely verified with the help of
bpf_jit_disasm tool):

  [...]
  [ 1062.726782] test_bpf: #43 MOV REG64 jited:1 6 PASS
  [ 1062.726890] test_bpf: #44 MOV REG32 jited:1 6 PASS
  [ 1062.726993] test_bpf: #45 LD IMM64 jited:1 6 PASS
  [...]

Signed-off-by: Daniel Borkmann <daniel@iogearbox.net>
Acked-by: Alexei Starovoitov <ast@kernel.org>
Signed-off-by: David S. Miller <davem@davemloft.net>
lib/test_bpf.c

index 10cd1860e5b04aa339853ff893855941aba41600..27a7a26b1ece2e145296b144096191ffa6af504e 100644 (file)
@@ -1685,6 +1685,126 @@ static struct bpf_test tests[] = {
                { },
                { { 0, 0x35d97ef2 } }
        },
+       {       /* Mainly checking JIT here. */
+               "MOV REG64",
+               .u.insns_int = {
+                       BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
+                       BPF_MOV64_REG(R1, R0),
+                       BPF_MOV64_REG(R2, R1),
+                       BPF_MOV64_REG(R3, R2),
+                       BPF_MOV64_REG(R4, R3),
+                       BPF_MOV64_REG(R5, R4),
+                       BPF_MOV64_REG(R6, R5),
+                       BPF_MOV64_REG(R7, R6),
+                       BPF_MOV64_REG(R8, R7),
+                       BPF_MOV64_REG(R9, R8),
+                       BPF_ALU64_IMM(BPF_MOV, R0, 0),
+                       BPF_ALU64_IMM(BPF_MOV, R1, 0),
+                       BPF_ALU64_IMM(BPF_MOV, R2, 0),
+                       BPF_ALU64_IMM(BPF_MOV, R3, 0),
+                       BPF_ALU64_IMM(BPF_MOV, R4, 0),
+                       BPF_ALU64_IMM(BPF_MOV, R5, 0),
+                       BPF_ALU64_IMM(BPF_MOV, R6, 0),
+                       BPF_ALU64_IMM(BPF_MOV, R7, 0),
+                       BPF_ALU64_IMM(BPF_MOV, R8, 0),
+                       BPF_ALU64_IMM(BPF_MOV, R9, 0),
+                       BPF_ALU64_REG(BPF_ADD, R0, R0),
+                       BPF_ALU64_REG(BPF_ADD, R0, R1),
+                       BPF_ALU64_REG(BPF_ADD, R0, R2),
+                       BPF_ALU64_REG(BPF_ADD, R0, R3),
+                       BPF_ALU64_REG(BPF_ADD, R0, R4),
+                       BPF_ALU64_REG(BPF_ADD, R0, R5),
+                       BPF_ALU64_REG(BPF_ADD, R0, R6),
+                       BPF_ALU64_REG(BPF_ADD, R0, R7),
+                       BPF_ALU64_REG(BPF_ADD, R0, R8),
+                       BPF_ALU64_REG(BPF_ADD, R0, R9),
+                       BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
+                       BPF_EXIT_INSN(),
+               },
+               INTERNAL,
+               { },
+               { { 0, 0xfefe } }
+       },
+       {       /* Mainly checking JIT here. */
+               "MOV REG32",
+               .u.insns_int = {
+                       BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
+                       BPF_MOV64_REG(R1, R0),
+                       BPF_MOV64_REG(R2, R1),
+                       BPF_MOV64_REG(R3, R2),
+                       BPF_MOV64_REG(R4, R3),
+                       BPF_MOV64_REG(R5, R4),
+                       BPF_MOV64_REG(R6, R5),
+                       BPF_MOV64_REG(R7, R6),
+                       BPF_MOV64_REG(R8, R7),
+                       BPF_MOV64_REG(R9, R8),
+                       BPF_ALU32_IMM(BPF_MOV, R0, 0),
+                       BPF_ALU32_IMM(BPF_MOV, R1, 0),
+                       BPF_ALU32_IMM(BPF_MOV, R2, 0),
+                       BPF_ALU32_IMM(BPF_MOV, R3, 0),
+                       BPF_ALU32_IMM(BPF_MOV, R4, 0),
+                       BPF_ALU32_IMM(BPF_MOV, R5, 0),
+                       BPF_ALU32_IMM(BPF_MOV, R6, 0),
+                       BPF_ALU32_IMM(BPF_MOV, R7, 0),
+                       BPF_ALU32_IMM(BPF_MOV, R8, 0),
+                       BPF_ALU32_IMM(BPF_MOV, R9, 0),
+                       BPF_ALU64_REG(BPF_ADD, R0, R0),
+                       BPF_ALU64_REG(BPF_ADD, R0, R1),
+                       BPF_ALU64_REG(BPF_ADD, R0, R2),
+                       BPF_ALU64_REG(BPF_ADD, R0, R3),
+                       BPF_ALU64_REG(BPF_ADD, R0, R4),
+                       BPF_ALU64_REG(BPF_ADD, R0, R5),
+                       BPF_ALU64_REG(BPF_ADD, R0, R6),
+                       BPF_ALU64_REG(BPF_ADD, R0, R7),
+                       BPF_ALU64_REG(BPF_ADD, R0, R8),
+                       BPF_ALU64_REG(BPF_ADD, R0, R9),
+                       BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
+                       BPF_EXIT_INSN(),
+               },
+               INTERNAL,
+               { },
+               { { 0, 0xfefe } }
+       },
+       {       /* Mainly checking JIT here. */
+               "LD IMM64",
+               .u.insns_int = {
+                       BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
+                       BPF_MOV64_REG(R1, R0),
+                       BPF_MOV64_REG(R2, R1),
+                       BPF_MOV64_REG(R3, R2),
+                       BPF_MOV64_REG(R4, R3),
+                       BPF_MOV64_REG(R5, R4),
+                       BPF_MOV64_REG(R6, R5),
+                       BPF_MOV64_REG(R7, R6),
+                       BPF_MOV64_REG(R8, R7),
+                       BPF_MOV64_REG(R9, R8),
+                       BPF_LD_IMM64(R0, 0x0LL),
+                       BPF_LD_IMM64(R1, 0x0LL),
+                       BPF_LD_IMM64(R2, 0x0LL),
+                       BPF_LD_IMM64(R3, 0x0LL),
+                       BPF_LD_IMM64(R4, 0x0LL),
+                       BPF_LD_IMM64(R5, 0x0LL),
+                       BPF_LD_IMM64(R6, 0x0LL),
+                       BPF_LD_IMM64(R7, 0x0LL),
+                       BPF_LD_IMM64(R8, 0x0LL),
+                       BPF_LD_IMM64(R9, 0x0LL),
+                       BPF_ALU64_REG(BPF_ADD, R0, R0),
+                       BPF_ALU64_REG(BPF_ADD, R0, R1),
+                       BPF_ALU64_REG(BPF_ADD, R0, R2),
+                       BPF_ALU64_REG(BPF_ADD, R0, R3),
+                       BPF_ALU64_REG(BPF_ADD, R0, R4),
+                       BPF_ALU64_REG(BPF_ADD, R0, R5),
+                       BPF_ALU64_REG(BPF_ADD, R0, R6),
+                       BPF_ALU64_REG(BPF_ADD, R0, R7),
+                       BPF_ALU64_REG(BPF_ADD, R0, R8),
+                       BPF_ALU64_REG(BPF_ADD, R0, R9),
+                       BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
+                       BPF_EXIT_INSN(),
+               },
+               INTERNAL,
+               { },
+               { { 0, 0xfefe } }
+       },
        {
                "INT: ALU MIX",
                .u.insns_int = {