diff mbox

lib: test_bpf: purge CPP register redefinitions

Message ID 87twu0df3k.fsf@gmail.com
State Not Applicable, archived
Delegated to: David Miller
Headers show

Commit Message

Nicolai Stange June 21, 2015, 7:41 p.m. UTC
Fix compilation failer with allmodconfig on ARCH=um:
  lib/test_bpf.c:50:0: warning: "R8" redefined
   #define R8  BPF_REG_8
   ^
  In file included from arch/um/include/asm/ptrace-generic.h:11:0,
                   from arch/x86/um/asm/ptrace.h:8,
                   from arch/x86/include/asm/alternative.h:8,
                   from arch/x86/include/asm/bitops.h:16,
                   from include/linux/bitops.h:36,
                   from include/linux/kernel.h:10,
                   from include/linux/list.h:8,
                   from include/linux/module.h:9,
                   from lib/test_bpf.c:19:
  arch/x86/include/uapi/asm/ptrace-abi.h:42:0:
    note: this is the location of the previous definition
     #define R8 72

Get rid of the
  #define Rx BPF_REG_x
defines by substituting the Rx macros with their BPF_REG_x expansion
in test_bpf.c.

Signed-off-by: Nicolai Stange <nicstange@gmail.com>
---
Tested:
  - compilation for ARCH=x86_64 and ARCH=um
  - 'modprobe test_bpf' on ARCH=x86_64

 lib/test_bpf.c | 2374 ++++++++++++++++++++++++++++----------------------------
 1 file changed, 1193 insertions(+), 1181 deletions(-)

Comments

Alexei Starovoitov June 22, 2015, 6:05 a.m. UTC | #1
On Sun, Jun 21, 2015 at 09:41:03PM +0200, Nicolai Stange wrote:
> Fix compilation failer with allmodconfig on ARCH=um:
>   lib/test_bpf.c:50:0: warning: "R8" redefined
>    #define R8  BPF_REG_8
>    ^
>   In file included from arch/um/include/asm/ptrace-generic.h:11:0,
>                    from arch/x86/um/asm/ptrace.h:8,
>                    from arch/x86/include/asm/alternative.h:8,
>                    from arch/x86/include/asm/bitops.h:16,
>                    from include/linux/bitops.h:36,
>                    from include/linux/kernel.h:10,
>                    from include/linux/list.h:8,
>                    from include/linux/module.h:9,
>                    from lib/test_bpf.c:19:
>   arch/x86/include/uapi/asm/ptrace-abi.h:42:0:
>     note: this is the location of the previous definition
>      #define R8 72
> 
> Get rid of the
>   #define Rx BPF_REG_x
> defines by substituting the Rx macros with their BPF_REG_x expansion
> in test_bpf.c.
> 
> Signed-off-by: Nicolai Stange <nicstange@gmail.com>
> ---
> Tested:
>   - compilation for ARCH=x86_64 and ARCH=um
>   - 'modprobe test_bpf' on ARCH=x86_64
> 
>  lib/test_bpf.c | 2374 ++++++++++++++++++++++++++++----------------------------
>  1 file changed, 1193 insertions(+), 1181 deletions(-)

to get rid of warning you proposing to do 1k line renames?!
Just add:
+#undef R8
+#undef R9
+#undef R10
 #define R0             BPF_REG_0

Though I think the better fix woud be to clean up:
arch/x86/include/uapi/asm/ptrace-abi.h
What's the point of:
#define R8 72
from 'uapi' point of view?
Look like kernel details that shouldn't be exposed in uapi.

--
To unsubscribe from this list: send the line "unsubscribe netdev" in
Richard Weinberger June 22, 2015, 6:52 a.m. UTC | #2
Am 22.06.2015 um 08:05 schrieb Alexei Starovoitov:
> to get rid of warning you proposing to do 1k line renames?!
> Just add:
> +#undef R8
> +#undef R9
> +#undef R10
>  #define R0             BPF_REG_0

This would be also just another hack.

> Though I think the better fix woud be to clean up:
> arch/x86/include/uapi/asm/ptrace-abi.h
> What's the point of:
> #define R8 72
> from 'uapi' point of view?

To query cpu registers using ptrace(2).

> Look like kernel details that shouldn't be exposed in uapi.

These are not kernel details.

Actually the problem is the other way around.
UML is Linux ported to it's own userspace ABI.
Hence, the arch/um and arch/x86/um use uapi header files.

Maybe we can rework UML's header files such that
no uapi header pollutes the kernel namespace.

That said, lib/test_bpf.c should still not use
defines like R8 as such symbols are very generic.

Thanks,
//richard
--
To unsubscribe from this list: send the line "unsubscribe netdev" in
David Miller June 23, 2015, 1:47 p.m. UTC | #3
From: Nicolai Stange <nicstange@gmail.com>
Date: Sun, 21 Jun 2015 21:41:03 +0200

> Fix compilation failer with allmodconfig on ARCH=um:
>   lib/test_bpf.c:50:0: warning: "R8" redefined
>    #define R8  BPF_REG_8
>    ^
>   In file included from arch/um/include/asm/ptrace-generic.h:11:0,
>                    from arch/x86/um/asm/ptrace.h:8,
>                    from arch/x86/include/asm/alternative.h:8,
>                    from arch/x86/include/asm/bitops.h:16,
>                    from include/linux/bitops.h:36,
>                    from include/linux/kernel.h:10,
>                    from include/linux/list.h:8,
>                    from include/linux/module.h:9,
>                    from lib/test_bpf.c:19:
>   arch/x86/include/uapi/asm/ptrace-abi.h:42:0:
>     note: this is the location of the previous definition
>      #define R8 72
> 
> Get rid of the
>   #define Rx BPF_REG_x
> defines by substituting the Rx macros with their BPF_REG_x expansion
> in test_bpf.c.
> 
> Signed-off-by: Nicolai Stange <nicstange@gmail.com>

Applied, thanks.
--
To unsubscribe from this list: send the line "unsubscribe netdev" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
diff mbox

Patch

diff --git a/lib/test_bpf.c b/lib/test_bpf.c
index 7f58c73..8618325 100644
--- a/lib/test_bpf.c
+++ b/lib/test_bpf.c
@@ -39,19 +39,6 @@ 
 #define SKB_DEV_IFINDEX	577
 #define SKB_DEV_TYPE	588
 
-/* Redefine REGs to make tests less verbose */
-#define R0		BPF_REG_0
-#define R1		BPF_REG_1
-#define R2		BPF_REG_2
-#define R3		BPF_REG_3
-#define R4		BPF_REG_4
-#define R5		BPF_REG_5
-#define R6		BPF_REG_6
-#define R7		BPF_REG_7
-#define R8		BPF_REG_8
-#define R9		BPF_REG_9
-#define R10		BPF_REG_10
-
 /* Flags that can be passed to test cases */
 #define FLAG_NO_DATA		BIT(0)
 #define FLAG_EXPECTED_FAIL	BIT(1)
@@ -274,11 +261,11 @@  static int bpf_fill_maxinsns9(struct bpf_test *self)
 		return -ENOMEM;
 
 	insn[0] = BPF_JMP_IMM(BPF_JA, 0, 0, len - 2);
-	insn[1] = BPF_ALU32_IMM(BPF_MOV, R0, 0xcbababab);
+	insn[1] = BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0xcbababab);
 	insn[2] = BPF_EXIT_INSN();
 
 	for (i = 3; i < len - 2; i++)
-		insn[i] = BPF_ALU32_IMM(BPF_MOV, R0, 0xfefefefe);
+		insn[i] = BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0xfefefefe);
 
 	insn[len - 2] = BPF_EXIT_INSN();
 	insn[len - 1] = BPF_JMP_IMM(BPF_JA, 0, 0, -(len - 1));
@@ -305,7 +292,7 @@  static int bpf_fill_maxinsns10(struct bpf_test *self)
 		insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 1 - 2 * i);
 
 	insn[hlen / 2] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen / 2 - 1);
-	insn[hlen]     = BPF_ALU32_IMM(BPF_MOV, R0, 0xabababac);
+	insn[hlen]     = BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0xabababac);
 	insn[hlen + 1] = BPF_EXIT_INSN();
 
 	self->u.ptr.insns = insn;
@@ -974,13 +961,13 @@  static struct bpf_test tests[] = {
 	{
 		"INT: ADD trivial",
 		.u.insns_int = {
-			BPF_ALU64_IMM(BPF_MOV, R1, 1),
-			BPF_ALU64_IMM(BPF_ADD, R1, 2),
-			BPF_ALU64_IMM(BPF_MOV, R2, 3),
-			BPF_ALU64_REG(BPF_SUB, R1, R2),
-			BPF_ALU64_IMM(BPF_ADD, R1, -1),
-			BPF_ALU64_IMM(BPF_MUL, R1, 3),
-			BPF_ALU64_REG(BPF_MOV, R0, R1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_1, 1),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 2),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_2, 3),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_1, BPF_REG_2),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -1),
+			BPF_ALU64_IMM(BPF_MUL, BPF_REG_1, 3),
+			BPF_ALU64_REG(BPF_MOV, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -990,13 +977,13 @@  static struct bpf_test tests[] = {
 	{
 		"INT: MUL_X",
 		.u.insns_int = {
-			BPF_ALU64_IMM(BPF_MOV, R0, -1),
-			BPF_ALU64_IMM(BPF_MOV, R1, -1),
-			BPF_ALU64_IMM(BPF_MOV, R2, 3),
-			BPF_ALU64_REG(BPF_MUL, R1, R2),
-			BPF_JMP_IMM(BPF_JEQ, R1, 0xfffffffd, 1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_0, -1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_1, -1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_2, 3),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_1, BPF_REG_2),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_1, 0xfffffffd, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU64_IMM(BPF_MOV, R0, 1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -1006,14 +993,14 @@  static struct bpf_test tests[] = {
 	{
 		"INT: MUL_X2",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, -1),
-			BPF_ALU32_IMM(BPF_MOV, R1, -1),
-			BPF_ALU32_IMM(BPF_MOV, R2, 3),
-			BPF_ALU64_REG(BPF_MUL, R1, R2),
-			BPF_ALU64_IMM(BPF_RSH, R1, 8),
-			BPF_JMP_IMM(BPF_JEQ, R1, 0x2ffffff, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, -1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, -1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_2, 3),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_1, BPF_REG_2),
+			BPF_ALU64_IMM(BPF_RSH, BPF_REG_1, 8),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_1, 0x2ffffff, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -1023,14 +1010,14 @@  static struct bpf_test tests[] = {
 	{
 		"INT: MUL32_X",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, -1),
-			BPF_ALU64_IMM(BPF_MOV, R1, -1),
-			BPF_ALU32_IMM(BPF_MOV, R2, 3),
-			BPF_ALU32_REG(BPF_MUL, R1, R2),
-			BPF_ALU64_IMM(BPF_RSH, R1, 8),
-			BPF_JMP_IMM(BPF_JEQ, R1, 0xffffff, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, -1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_1, -1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_2, 3),
+			BPF_ALU32_REG(BPF_MUL, BPF_REG_1, BPF_REG_2),
+			BPF_ALU64_IMM(BPF_RSH, BPF_REG_1, 8),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_1, 0xffffff, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -1044,155 +1031,165 @@  static struct bpf_test tests[] = {
 		 */
 		"INT: ADD 64-bit",
 		.u.insns_int = {
-			BPF_ALU64_IMM(BPF_MOV, R0, 0),
-			BPF_ALU64_IMM(BPF_MOV, R1, 1),
-			BPF_ALU64_IMM(BPF_MOV, R2, 2),
-			BPF_ALU64_IMM(BPF_MOV, R3, 3),
-			BPF_ALU64_IMM(BPF_MOV, R4, 4),
-			BPF_ALU64_IMM(BPF_MOV, R5, 5),
-			BPF_ALU64_IMM(BPF_MOV, R6, 6),
-			BPF_ALU64_IMM(BPF_MOV, R7, 7),
-			BPF_ALU64_IMM(BPF_MOV, R8, 8),
-			BPF_ALU64_IMM(BPF_MOV, R9, 9),
-			BPF_ALU64_IMM(BPF_ADD, R0, 20),
-			BPF_ALU64_IMM(BPF_ADD, R1, 20),
-			BPF_ALU64_IMM(BPF_ADD, R2, 20),
-			BPF_ALU64_IMM(BPF_ADD, R3, 20),
-			BPF_ALU64_IMM(BPF_ADD, R4, 20),
-			BPF_ALU64_IMM(BPF_ADD, R5, 20),
-			BPF_ALU64_IMM(BPF_ADD, R6, 20),
-			BPF_ALU64_IMM(BPF_ADD, R7, 20),
-			BPF_ALU64_IMM(BPF_ADD, R8, 20),
-			BPF_ALU64_IMM(BPF_ADD, R9, 20),
-			BPF_ALU64_IMM(BPF_SUB, R0, 10),
-			BPF_ALU64_IMM(BPF_SUB, R1, 10),
-			BPF_ALU64_IMM(BPF_SUB, R2, 10),
-			BPF_ALU64_IMM(BPF_SUB, R3, 10),
-			BPF_ALU64_IMM(BPF_SUB, R4, 10),
-			BPF_ALU64_IMM(BPF_SUB, R5, 10),
-			BPF_ALU64_IMM(BPF_SUB, R6, 10),
-			BPF_ALU64_IMM(BPF_SUB, R7, 10),
-			BPF_ALU64_IMM(BPF_SUB, R8, 10),
-			BPF_ALU64_IMM(BPF_SUB, R9, 10),
-			BPF_ALU64_REG(BPF_ADD, R0, R0),
-			BPF_ALU64_REG(BPF_ADD, R0, R1),
-			BPF_ALU64_REG(BPF_ADD, R0, R2),
-			BPF_ALU64_REG(BPF_ADD, R0, R3),
-			BPF_ALU64_REG(BPF_ADD, R0, R4),
-			BPF_ALU64_REG(BPF_ADD, R0, R5),
-			BPF_ALU64_REG(BPF_ADD, R0, R6),
-			BPF_ALU64_REG(BPF_ADD, R0, R7),
-			BPF_ALU64_REG(BPF_ADD, R0, R8),
-			BPF_ALU64_REG(BPF_ADD, R0, R9), /* R0 == 155 */
-			BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_ADD, R1, R0),
-			BPF_ALU64_REG(BPF_ADD, R1, R1),
-			BPF_ALU64_REG(BPF_ADD, R1, R2),
-			BPF_ALU64_REG(BPF_ADD, R1, R3),
-			BPF_ALU64_REG(BPF_ADD, R1, R4),
-			BPF_ALU64_REG(BPF_ADD, R1, R5),
-			BPF_ALU64_REG(BPF_ADD, R1, R6),
-			BPF_ALU64_REG(BPF_ADD, R1, R7),
-			BPF_ALU64_REG(BPF_ADD, R1, R8),
-			BPF_ALU64_REG(BPF_ADD, R1, R9), /* R1 == 456 */
-			BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_ADD, R2, R0),
-			BPF_ALU64_REG(BPF_ADD, R2, R1),
-			BPF_ALU64_REG(BPF_ADD, R2, R2),
-			BPF_ALU64_REG(BPF_ADD, R2, R3),
-			BPF_ALU64_REG(BPF_ADD, R2, R4),
-			BPF_ALU64_REG(BPF_ADD, R2, R5),
-			BPF_ALU64_REG(BPF_ADD, R2, R6),
-			BPF_ALU64_REG(BPF_ADD, R2, R7),
-			BPF_ALU64_REG(BPF_ADD, R2, R8),
-			BPF_ALU64_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
-			BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_ADD, R3, R0),
-			BPF_ALU64_REG(BPF_ADD, R3, R1),
-			BPF_ALU64_REG(BPF_ADD, R3, R2),
-			BPF_ALU64_REG(BPF_ADD, R3, R3),
-			BPF_ALU64_REG(BPF_ADD, R3, R4),
-			BPF_ALU64_REG(BPF_ADD, R3, R5),
-			BPF_ALU64_REG(BPF_ADD, R3, R6),
-			BPF_ALU64_REG(BPF_ADD, R3, R7),
-			BPF_ALU64_REG(BPF_ADD, R3, R8),
-			BPF_ALU64_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
-			BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_ADD, R4, R0),
-			BPF_ALU64_REG(BPF_ADD, R4, R1),
-			BPF_ALU64_REG(BPF_ADD, R4, R2),
-			BPF_ALU64_REG(BPF_ADD, R4, R3),
-			BPF_ALU64_REG(BPF_ADD, R4, R4),
-			BPF_ALU64_REG(BPF_ADD, R4, R5),
-			BPF_ALU64_REG(BPF_ADD, R4, R6),
-			BPF_ALU64_REG(BPF_ADD, R4, R7),
-			BPF_ALU64_REG(BPF_ADD, R4, R8),
-			BPF_ALU64_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
-			BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_ADD, R5, R0),
-			BPF_ALU64_REG(BPF_ADD, R5, R1),
-			BPF_ALU64_REG(BPF_ADD, R5, R2),
-			BPF_ALU64_REG(BPF_ADD, R5, R3),
-			BPF_ALU64_REG(BPF_ADD, R5, R4),
-			BPF_ALU64_REG(BPF_ADD, R5, R5),
-			BPF_ALU64_REG(BPF_ADD, R5, R6),
-			BPF_ALU64_REG(BPF_ADD, R5, R7),
-			BPF_ALU64_REG(BPF_ADD, R5, R8),
-			BPF_ALU64_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
-			BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_ADD, R6, R0),
-			BPF_ALU64_REG(BPF_ADD, R6, R1),
-			BPF_ALU64_REG(BPF_ADD, R6, R2),
-			BPF_ALU64_REG(BPF_ADD, R6, R3),
-			BPF_ALU64_REG(BPF_ADD, R6, R4),
-			BPF_ALU64_REG(BPF_ADD, R6, R5),
-			BPF_ALU64_REG(BPF_ADD, R6, R6),
-			BPF_ALU64_REG(BPF_ADD, R6, R7),
-			BPF_ALU64_REG(BPF_ADD, R6, R8),
-			BPF_ALU64_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
-			BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_ADD, R7, R0),
-			BPF_ALU64_REG(BPF_ADD, R7, R1),
-			BPF_ALU64_REG(BPF_ADD, R7, R2),
-			BPF_ALU64_REG(BPF_ADD, R7, R3),
-			BPF_ALU64_REG(BPF_ADD, R7, R4),
-			BPF_ALU64_REG(BPF_ADD, R7, R5),
-			BPF_ALU64_REG(BPF_ADD, R7, R6),
-			BPF_ALU64_REG(BPF_ADD, R7, R7),
-			BPF_ALU64_REG(BPF_ADD, R7, R8),
-			BPF_ALU64_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
-			BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_ADD, R8, R0),
-			BPF_ALU64_REG(BPF_ADD, R8, R1),
-			BPF_ALU64_REG(BPF_ADD, R8, R2),
-			BPF_ALU64_REG(BPF_ADD, R8, R3),
-			BPF_ALU64_REG(BPF_ADD, R8, R4),
-			BPF_ALU64_REG(BPF_ADD, R8, R5),
-			BPF_ALU64_REG(BPF_ADD, R8, R6),
-			BPF_ALU64_REG(BPF_ADD, R8, R7),
-			BPF_ALU64_REG(BPF_ADD, R8, R8),
-			BPF_ALU64_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
-			BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_ADD, R9, R0),
-			BPF_ALU64_REG(BPF_ADD, R9, R1),
-			BPF_ALU64_REG(BPF_ADD, R9, R2),
-			BPF_ALU64_REG(BPF_ADD, R9, R3),
-			BPF_ALU64_REG(BPF_ADD, R9, R4),
-			BPF_ALU64_REG(BPF_ADD, R9, R5),
-			BPF_ALU64_REG(BPF_ADD, R9, R6),
-			BPF_ALU64_REG(BPF_ADD, R9, R7),
-			BPF_ALU64_REG(BPF_ADD, R9, R8),
-			BPF_ALU64_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
-			BPF_ALU64_REG(BPF_MOV, R0, R9),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_1, 1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_2, 2),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_3, 3),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_4, 4),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_5, 5),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_6, 6),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_7, 7),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_8, 8),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_9, 9),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 20),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 20),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, 20),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_3, 20),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_4, 20),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_5, 20),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_6, 20),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_7, 20),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_8, 20),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_9, 20),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_0, 10),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_1, 10),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_2, 10),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_3, 10),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_4, 10),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_5, 10),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_6, 10),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_7, 10),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_8, 10),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_9, 10),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_0),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_2),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_3),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_4),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_5),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_6),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_7),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_8),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_9),
+			/* BPF_REG_0 == 155 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 155, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_0),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_2),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_3),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_4),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_5),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_6),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_7),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_8),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_9),
+			/* BPF_REG_1 == 456 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_1, 456, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_2, BPF_REG_0),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_2, BPF_REG_2),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_2, BPF_REG_3),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_2, BPF_REG_4),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_2, BPF_REG_5),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_2, BPF_REG_6),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_2, BPF_REG_7),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_2, BPF_REG_8),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_2, BPF_REG_9),
+			/* BPF_REG_2 == 1358 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_2, 1358, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_3, BPF_REG_0),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_3, BPF_REG_1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_3, BPF_REG_2),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_3, BPF_REG_3),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_3, BPF_REG_4),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_3, BPF_REG_5),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_3, BPF_REG_6),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_3, BPF_REG_7),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_3, BPF_REG_8),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_3, BPF_REG_9),
+			/* BPF_REG_3 == 4063 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_3, 4063, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_4, BPF_REG_0),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_4, BPF_REG_1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_4, BPF_REG_2),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_4, BPF_REG_3),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_4, BPF_REG_4),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_4, BPF_REG_5),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_4, BPF_REG_6),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_4, BPF_REG_7),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_4, BPF_REG_8),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_4, BPF_REG_9),
+			/* BPF_REG_4 == 12177 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_4, 12177, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_0),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_2),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_3),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_4),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_5),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_6),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_7),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_8),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_9),
+			/* BPF_REG_5 == 36518 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_5, 36518, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_6, BPF_REG_0),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_6, BPF_REG_1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_6, BPF_REG_2),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_6, BPF_REG_3),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_6, BPF_REG_4),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_6, BPF_REG_5),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_6, BPF_REG_6),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_6, BPF_REG_7),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_6, BPF_REG_8),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_6, BPF_REG_9),
+			/* BPF_REG_6 == 109540 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_6, 109540, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_7, BPF_REG_0),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_7, BPF_REG_1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_7, BPF_REG_2),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_7, BPF_REG_3),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_7, BPF_REG_4),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_7, BPF_REG_5),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_7, BPF_REG_6),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_7, BPF_REG_7),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_7, BPF_REG_8),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_7, BPF_REG_9),
+			/* BPF_REG_7 == 328605 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_7, 328605, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_0),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_2),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_3),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_4),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_5),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_6),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_7),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_8),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_9),
+			/* BPF_REG_8 == 985799 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_8, 985799, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_9, BPF_REG_0),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_9, BPF_REG_1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_9, BPF_REG_2),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_9, BPF_REG_3),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_9, BPF_REG_4),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_9, BPF_REG_5),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_9, BPF_REG_6),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_9, BPF_REG_7),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_9, BPF_REG_8),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_9, BPF_REG_9),
+			/* BPF_REG_9 == 2957380 */
+			BPF_ALU64_REG(BPF_MOV, BPF_REG_0, BPF_REG_9),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -1202,143 +1199,153 @@  static struct bpf_test tests[] = {
 	{
 		"INT: ADD 32-bit",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 20),
-			BPF_ALU32_IMM(BPF_MOV, R1, 1),
-			BPF_ALU32_IMM(BPF_MOV, R2, 2),
-			BPF_ALU32_IMM(BPF_MOV, R3, 3),
-			BPF_ALU32_IMM(BPF_MOV, R4, 4),
-			BPF_ALU32_IMM(BPF_MOV, R5, 5),
-			BPF_ALU32_IMM(BPF_MOV, R6, 6),
-			BPF_ALU32_IMM(BPF_MOV, R7, 7),
-			BPF_ALU32_IMM(BPF_MOV, R8, 8),
-			BPF_ALU32_IMM(BPF_MOV, R9, 9),
-			BPF_ALU64_IMM(BPF_ADD, R1, 10),
-			BPF_ALU64_IMM(BPF_ADD, R2, 10),
-			BPF_ALU64_IMM(BPF_ADD, R3, 10),
-			BPF_ALU64_IMM(BPF_ADD, R4, 10),
-			BPF_ALU64_IMM(BPF_ADD, R5, 10),
-			BPF_ALU64_IMM(BPF_ADD, R6, 10),
-			BPF_ALU64_IMM(BPF_ADD, R7, 10),
-			BPF_ALU64_IMM(BPF_ADD, R8, 10),
-			BPF_ALU64_IMM(BPF_ADD, R9, 10),
-			BPF_ALU32_REG(BPF_ADD, R0, R1),
-			BPF_ALU32_REG(BPF_ADD, R0, R2),
-			BPF_ALU32_REG(BPF_ADD, R0, R3),
-			BPF_ALU32_REG(BPF_ADD, R0, R4),
-			BPF_ALU32_REG(BPF_ADD, R0, R5),
-			BPF_ALU32_REG(BPF_ADD, R0, R6),
-			BPF_ALU32_REG(BPF_ADD, R0, R7),
-			BPF_ALU32_REG(BPF_ADD, R0, R8),
-			BPF_ALU32_REG(BPF_ADD, R0, R9), /* R0 == 155 */
-			BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU32_REG(BPF_ADD, R1, R0),
-			BPF_ALU32_REG(BPF_ADD, R1, R1),
-			BPF_ALU32_REG(BPF_ADD, R1, R2),
-			BPF_ALU32_REG(BPF_ADD, R1, R3),
-			BPF_ALU32_REG(BPF_ADD, R1, R4),
-			BPF_ALU32_REG(BPF_ADD, R1, R5),
-			BPF_ALU32_REG(BPF_ADD, R1, R6),
-			BPF_ALU32_REG(BPF_ADD, R1, R7),
-			BPF_ALU32_REG(BPF_ADD, R1, R8),
-			BPF_ALU32_REG(BPF_ADD, R1, R9), /* R1 == 456 */
-			BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU32_REG(BPF_ADD, R2, R0),
-			BPF_ALU32_REG(BPF_ADD, R2, R1),
-			BPF_ALU32_REG(BPF_ADD, R2, R2),
-			BPF_ALU32_REG(BPF_ADD, R2, R3),
-			BPF_ALU32_REG(BPF_ADD, R2, R4),
-			BPF_ALU32_REG(BPF_ADD, R2, R5),
-			BPF_ALU32_REG(BPF_ADD, R2, R6),
-			BPF_ALU32_REG(BPF_ADD, R2, R7),
-			BPF_ALU32_REG(BPF_ADD, R2, R8),
-			BPF_ALU32_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
-			BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU32_REG(BPF_ADD, R3, R0),
-			BPF_ALU32_REG(BPF_ADD, R3, R1),
-			BPF_ALU32_REG(BPF_ADD, R3, R2),
-			BPF_ALU32_REG(BPF_ADD, R3, R3),
-			BPF_ALU32_REG(BPF_ADD, R3, R4),
-			BPF_ALU32_REG(BPF_ADD, R3, R5),
-			BPF_ALU32_REG(BPF_ADD, R3, R6),
-			BPF_ALU32_REG(BPF_ADD, R3, R7),
-			BPF_ALU32_REG(BPF_ADD, R3, R8),
-			BPF_ALU32_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
-			BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU32_REG(BPF_ADD, R4, R0),
-			BPF_ALU32_REG(BPF_ADD, R4, R1),
-			BPF_ALU32_REG(BPF_ADD, R4, R2),
-			BPF_ALU32_REG(BPF_ADD, R4, R3),
-			BPF_ALU32_REG(BPF_ADD, R4, R4),
-			BPF_ALU32_REG(BPF_ADD, R4, R5),
-			BPF_ALU32_REG(BPF_ADD, R4, R6),
-			BPF_ALU32_REG(BPF_ADD, R4, R7),
-			BPF_ALU32_REG(BPF_ADD, R4, R8),
-			BPF_ALU32_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
-			BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU32_REG(BPF_ADD, R5, R0),
-			BPF_ALU32_REG(BPF_ADD, R5, R1),
-			BPF_ALU32_REG(BPF_ADD, R5, R2),
-			BPF_ALU32_REG(BPF_ADD, R5, R3),
-			BPF_ALU32_REG(BPF_ADD, R5, R4),
-			BPF_ALU32_REG(BPF_ADD, R5, R5),
-			BPF_ALU32_REG(BPF_ADD, R5, R6),
-			BPF_ALU32_REG(BPF_ADD, R5, R7),
-			BPF_ALU32_REG(BPF_ADD, R5, R8),
-			BPF_ALU32_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
-			BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU32_REG(BPF_ADD, R6, R0),
-			BPF_ALU32_REG(BPF_ADD, R6, R1),
-			BPF_ALU32_REG(BPF_ADD, R6, R2),
-			BPF_ALU32_REG(BPF_ADD, R6, R3),
-			BPF_ALU32_REG(BPF_ADD, R6, R4),
-			BPF_ALU32_REG(BPF_ADD, R6, R5),
-			BPF_ALU32_REG(BPF_ADD, R6, R6),
-			BPF_ALU32_REG(BPF_ADD, R6, R7),
-			BPF_ALU32_REG(BPF_ADD, R6, R8),
-			BPF_ALU32_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
-			BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU32_REG(BPF_ADD, R7, R0),
-			BPF_ALU32_REG(BPF_ADD, R7, R1),
-			BPF_ALU32_REG(BPF_ADD, R7, R2),
-			BPF_ALU32_REG(BPF_ADD, R7, R3),
-			BPF_ALU32_REG(BPF_ADD, R7, R4),
-			BPF_ALU32_REG(BPF_ADD, R7, R5),
-			BPF_ALU32_REG(BPF_ADD, R7, R6),
-			BPF_ALU32_REG(BPF_ADD, R7, R7),
-			BPF_ALU32_REG(BPF_ADD, R7, R8),
-			BPF_ALU32_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
-			BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU32_REG(BPF_ADD, R8, R0),
-			BPF_ALU32_REG(BPF_ADD, R8, R1),
-			BPF_ALU32_REG(BPF_ADD, R8, R2),
-			BPF_ALU32_REG(BPF_ADD, R8, R3),
-			BPF_ALU32_REG(BPF_ADD, R8, R4),
-			BPF_ALU32_REG(BPF_ADD, R8, R5),
-			BPF_ALU32_REG(BPF_ADD, R8, R6),
-			BPF_ALU32_REG(BPF_ADD, R8, R7),
-			BPF_ALU32_REG(BPF_ADD, R8, R8),
-			BPF_ALU32_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
-			BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU32_REG(BPF_ADD, R9, R0),
-			BPF_ALU32_REG(BPF_ADD, R9, R1),
-			BPF_ALU32_REG(BPF_ADD, R9, R2),
-			BPF_ALU32_REG(BPF_ADD, R9, R3),
-			BPF_ALU32_REG(BPF_ADD, R9, R4),
-			BPF_ALU32_REG(BPF_ADD, R9, R5),
-			BPF_ALU32_REG(BPF_ADD, R9, R6),
-			BPF_ALU32_REG(BPF_ADD, R9, R7),
-			BPF_ALU32_REG(BPF_ADD, R9, R8),
-			BPF_ALU32_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
-			BPF_ALU32_REG(BPF_MOV, R0, R9),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 20),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_2, 2),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_3, 3),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_4, 4),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_5, 5),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_6, 6),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_7, 7),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_8, 8),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_9, 9),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, 10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_3, 10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_4, 10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_5, 10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_6, 10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_7, 10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_8, 10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_9, 10),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_0, BPF_REG_2),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_0, BPF_REG_3),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_0, BPF_REG_4),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_0, BPF_REG_5),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_0, BPF_REG_6),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_0, BPF_REG_7),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_0, BPF_REG_8),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_0, BPF_REG_9),
+			/* BPF_REG_0 == 155 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 155, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_1, BPF_REG_0),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_1, BPF_REG_1),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_1, BPF_REG_2),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_1, BPF_REG_3),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_1, BPF_REG_4),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_1, BPF_REG_5),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_1, BPF_REG_6),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_1, BPF_REG_7),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_1, BPF_REG_8),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_1, BPF_REG_9),
+			/* BPF_REG_1 == 456 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_1, 456, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_2, BPF_REG_0),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_2, BPF_REG_1),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_2, BPF_REG_2),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_2, BPF_REG_3),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_2, BPF_REG_4),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_2, BPF_REG_5),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_2, BPF_REG_6),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_2, BPF_REG_7),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_2, BPF_REG_8),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_2, BPF_REG_9),
+			/* BPF_REG_2 == 1358 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_2, 1358, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_3, BPF_REG_0),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_3, BPF_REG_1),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_3, BPF_REG_2),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_3, BPF_REG_3),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_3, BPF_REG_4),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_3, BPF_REG_5),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_3, BPF_REG_6),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_3, BPF_REG_7),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_3, BPF_REG_8),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_3, BPF_REG_9),
+			/* BPF_REG_3 == 4063 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_3, 4063, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_4, BPF_REG_0),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_4, BPF_REG_1),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_4, BPF_REG_2),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_4, BPF_REG_3),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_4, BPF_REG_4),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_4, BPF_REG_5),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_4, BPF_REG_6),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_4, BPF_REG_7),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_4, BPF_REG_8),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_4, BPF_REG_9),
+			/* BPF_REG_4 == 12177 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_4, 12177, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_5, BPF_REG_0),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_5, BPF_REG_1),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_5, BPF_REG_2),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_5, BPF_REG_3),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_5, BPF_REG_4),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_5, BPF_REG_5),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_5, BPF_REG_6),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_5, BPF_REG_7),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_5, BPF_REG_8),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_5, BPF_REG_9),
+			/* BPF_REG_5 == 36518 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_5, 36518, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_6, BPF_REG_0),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_6, BPF_REG_1),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_6, BPF_REG_2),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_6, BPF_REG_3),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_6, BPF_REG_4),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_6, BPF_REG_5),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_6, BPF_REG_6),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_6, BPF_REG_7),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_6, BPF_REG_8),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_6, BPF_REG_9),
+			/* BPF_REG_6 == 109540 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_6, 109540, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_7, BPF_REG_0),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_7, BPF_REG_1),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_7, BPF_REG_2),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_7, BPF_REG_3),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_7, BPF_REG_4),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_7, BPF_REG_5),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_7, BPF_REG_6),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_7, BPF_REG_7),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_7, BPF_REG_8),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_7, BPF_REG_9),
+			/* BPF_REG_7 == 328605 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_7, 328605, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_8, BPF_REG_0),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_8, BPF_REG_1),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_8, BPF_REG_2),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_8, BPF_REG_3),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_8, BPF_REG_4),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_8, BPF_REG_5),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_8, BPF_REG_6),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_8, BPF_REG_7),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_8, BPF_REG_8),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_8, BPF_REG_9),
+			/* BPF_REG_8 == 985799 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_8, 985799, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_9, BPF_REG_0),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_9, BPF_REG_1),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_9, BPF_REG_2),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_9, BPF_REG_3),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_9, BPF_REG_4),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_9, BPF_REG_5),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_9, BPF_REG_6),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_9, BPF_REG_7),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_9, BPF_REG_8),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_9, BPF_REG_9),
+			/* BPF_REG_9 == 2957380 */
+			BPF_ALU32_REG(BPF_MOV, BPF_REG_0, BPF_REG_9),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -1348,130 +1355,130 @@  static struct bpf_test tests[] = {
 	{	/* Mainly checking JIT here. */
 		"INT: SUB",
 		.u.insns_int = {
-			BPF_ALU64_IMM(BPF_MOV, R0, 0),
-			BPF_ALU64_IMM(BPF_MOV, R1, 1),
-			BPF_ALU64_IMM(BPF_MOV, R2, 2),
-			BPF_ALU64_IMM(BPF_MOV, R3, 3),
-			BPF_ALU64_IMM(BPF_MOV, R4, 4),
-			BPF_ALU64_IMM(BPF_MOV, R5, 5),
-			BPF_ALU64_IMM(BPF_MOV, R6, 6),
-			BPF_ALU64_IMM(BPF_MOV, R7, 7),
-			BPF_ALU64_IMM(BPF_MOV, R8, 8),
-			BPF_ALU64_IMM(BPF_MOV, R9, 9),
-			BPF_ALU64_REG(BPF_SUB, R0, R0),
-			BPF_ALU64_REG(BPF_SUB, R0, R1),
-			BPF_ALU64_REG(BPF_SUB, R0, R2),
-			BPF_ALU64_REG(BPF_SUB, R0, R3),
-			BPF_ALU64_REG(BPF_SUB, R0, R4),
-			BPF_ALU64_REG(BPF_SUB, R0, R5),
-			BPF_ALU64_REG(BPF_SUB, R0, R6),
-			BPF_ALU64_REG(BPF_SUB, R0, R7),
-			BPF_ALU64_REG(BPF_SUB, R0, R8),
-			BPF_ALU64_REG(BPF_SUB, R0, R9),
-			BPF_ALU64_IMM(BPF_SUB, R0, 10),
-			BPF_JMP_IMM(BPF_JEQ, R0, -55, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_SUB, R1, R0),
-			BPF_ALU64_REG(BPF_SUB, R1, R2),
-			BPF_ALU64_REG(BPF_SUB, R1, R3),
-			BPF_ALU64_REG(BPF_SUB, R1, R4),
-			BPF_ALU64_REG(BPF_SUB, R1, R5),
-			BPF_ALU64_REG(BPF_SUB, R1, R6),
-			BPF_ALU64_REG(BPF_SUB, R1, R7),
-			BPF_ALU64_REG(BPF_SUB, R1, R8),
-			BPF_ALU64_REG(BPF_SUB, R1, R9),
-			BPF_ALU64_IMM(BPF_SUB, R1, 10),
-			BPF_ALU64_REG(BPF_SUB, R2, R0),
-			BPF_ALU64_REG(BPF_SUB, R2, R1),
-			BPF_ALU64_REG(BPF_SUB, R2, R3),
-			BPF_ALU64_REG(BPF_SUB, R2, R4),
-			BPF_ALU64_REG(BPF_SUB, R2, R5),
-			BPF_ALU64_REG(BPF_SUB, R2, R6),
-			BPF_ALU64_REG(BPF_SUB, R2, R7),
-			BPF_ALU64_REG(BPF_SUB, R2, R8),
-			BPF_ALU64_REG(BPF_SUB, R2, R9),
-			BPF_ALU64_IMM(BPF_SUB, R2, 10),
-			BPF_ALU64_REG(BPF_SUB, R3, R0),
-			BPF_ALU64_REG(BPF_SUB, R3, R1),
-			BPF_ALU64_REG(BPF_SUB, R3, R2),
-			BPF_ALU64_REG(BPF_SUB, R3, R4),
-			BPF_ALU64_REG(BPF_SUB, R3, R5),
-			BPF_ALU64_REG(BPF_SUB, R3, R6),
-			BPF_ALU64_REG(BPF_SUB, R3, R7),
-			BPF_ALU64_REG(BPF_SUB, R3, R8),
-			BPF_ALU64_REG(BPF_SUB, R3, R9),
-			BPF_ALU64_IMM(BPF_SUB, R3, 10),
-			BPF_ALU64_REG(BPF_SUB, R4, R0),
-			BPF_ALU64_REG(BPF_SUB, R4, R1),
-			BPF_ALU64_REG(BPF_SUB, R4, R2),
-			BPF_ALU64_REG(BPF_SUB, R4, R3),
-			BPF_ALU64_REG(BPF_SUB, R4, R5),
-			BPF_ALU64_REG(BPF_SUB, R4, R6),
-			BPF_ALU64_REG(BPF_SUB, R4, R7),
-			BPF_ALU64_REG(BPF_SUB, R4, R8),
-			BPF_ALU64_REG(BPF_SUB, R4, R9),
-			BPF_ALU64_IMM(BPF_SUB, R4, 10),
-			BPF_ALU64_REG(BPF_SUB, R5, R0),
-			BPF_ALU64_REG(BPF_SUB, R5, R1),
-			BPF_ALU64_REG(BPF_SUB, R5, R2),
-			BPF_ALU64_REG(BPF_SUB, R5, R3),
-			BPF_ALU64_REG(BPF_SUB, R5, R4),
-			BPF_ALU64_REG(BPF_SUB, R5, R6),
-			BPF_ALU64_REG(BPF_SUB, R5, R7),
-			BPF_ALU64_REG(BPF_SUB, R5, R8),
-			BPF_ALU64_REG(BPF_SUB, R5, R9),
-			BPF_ALU64_IMM(BPF_SUB, R5, 10),
-			BPF_ALU64_REG(BPF_SUB, R6, R0),
-			BPF_ALU64_REG(BPF_SUB, R6, R1),
-			BPF_ALU64_REG(BPF_SUB, R6, R2),
-			BPF_ALU64_REG(BPF_SUB, R6, R3),
-			BPF_ALU64_REG(BPF_SUB, R6, R4),
-			BPF_ALU64_REG(BPF_SUB, R6, R5),
-			BPF_ALU64_REG(BPF_SUB, R6, R7),
-			BPF_ALU64_REG(BPF_SUB, R6, R8),
-			BPF_ALU64_REG(BPF_SUB, R6, R9),
-			BPF_ALU64_IMM(BPF_SUB, R6, 10),
-			BPF_ALU64_REG(BPF_SUB, R7, R0),
-			BPF_ALU64_REG(BPF_SUB, R7, R1),
-			BPF_ALU64_REG(BPF_SUB, R7, R2),
-			BPF_ALU64_REG(BPF_SUB, R7, R3),
-			BPF_ALU64_REG(BPF_SUB, R7, R4),
-			BPF_ALU64_REG(BPF_SUB, R7, R5),
-			BPF_ALU64_REG(BPF_SUB, R7, R6),
-			BPF_ALU64_REG(BPF_SUB, R7, R8),
-			BPF_ALU64_REG(BPF_SUB, R7, R9),
-			BPF_ALU64_IMM(BPF_SUB, R7, 10),
-			BPF_ALU64_REG(BPF_SUB, R8, R0),
-			BPF_ALU64_REG(BPF_SUB, R8, R1),
-			BPF_ALU64_REG(BPF_SUB, R8, R2),
-			BPF_ALU64_REG(BPF_SUB, R8, R3),
-			BPF_ALU64_REG(BPF_SUB, R8, R4),
-			BPF_ALU64_REG(BPF_SUB, R8, R5),
-			BPF_ALU64_REG(BPF_SUB, R8, R6),
-			BPF_ALU64_REG(BPF_SUB, R8, R7),
-			BPF_ALU64_REG(BPF_SUB, R8, R9),
-			BPF_ALU64_IMM(BPF_SUB, R8, 10),
-			BPF_ALU64_REG(BPF_SUB, R9, R0),
-			BPF_ALU64_REG(BPF_SUB, R9, R1),
-			BPF_ALU64_REG(BPF_SUB, R9, R2),
-			BPF_ALU64_REG(BPF_SUB, R9, R3),
-			BPF_ALU64_REG(BPF_SUB, R9, R4),
-			BPF_ALU64_REG(BPF_SUB, R9, R5),
-			BPF_ALU64_REG(BPF_SUB, R9, R6),
-			BPF_ALU64_REG(BPF_SUB, R9, R7),
-			BPF_ALU64_REG(BPF_SUB, R9, R8),
-			BPF_ALU64_IMM(BPF_SUB, R9, 10),
-			BPF_ALU64_IMM(BPF_SUB, R0, 10),
-			BPF_ALU64_IMM(BPF_NEG, R0, 0),
-			BPF_ALU64_REG(BPF_SUB, R0, R1),
-			BPF_ALU64_REG(BPF_SUB, R0, R2),
-			BPF_ALU64_REG(BPF_SUB, R0, R3),
-			BPF_ALU64_REG(BPF_SUB, R0, R4),
-			BPF_ALU64_REG(BPF_SUB, R0, R5),
-			BPF_ALU64_REG(BPF_SUB, R0, R6),
-			BPF_ALU64_REG(BPF_SUB, R0, R7),
-			BPF_ALU64_REG(BPF_SUB, R0, R8),
-			BPF_ALU64_REG(BPF_SUB, R0, R9),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_1, 1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_2, 2),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_3, 3),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_4, 4),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_5, 5),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_6, 6),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_7, 7),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_8, 8),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_9, 9),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_0),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_2),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_3),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_4),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_5),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_6),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_7),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_8),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_9),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_0, 10),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, -55, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_1, BPF_REG_0),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_1, BPF_REG_2),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_1, BPF_REG_3),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_1, BPF_REG_4),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_1, BPF_REG_5),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_1, BPF_REG_6),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_1, BPF_REG_7),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_1, BPF_REG_8),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_1, BPF_REG_9),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_1, 10),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_0),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_3),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_4),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_5),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_6),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_7),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_8),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_9),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_2, 10),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_3, BPF_REG_0),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_3, BPF_REG_1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_3, BPF_REG_2),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_3, BPF_REG_4),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_3, BPF_REG_5),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_3, BPF_REG_6),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_3, BPF_REG_7),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_3, BPF_REG_8),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_3, BPF_REG_9),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_3, 10),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_4, BPF_REG_0),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_4, BPF_REG_1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_4, BPF_REG_2),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_4, BPF_REG_3),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_4, BPF_REG_5),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_4, BPF_REG_6),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_4, BPF_REG_7),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_4, BPF_REG_8),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_4, BPF_REG_9),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_4, 10),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_5, BPF_REG_0),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_5, BPF_REG_1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_5, BPF_REG_2),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_5, BPF_REG_3),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_5, BPF_REG_4),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_5, BPF_REG_6),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_5, BPF_REG_7),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_5, BPF_REG_8),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_5, BPF_REG_9),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_5, 10),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_6, BPF_REG_0),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_6, BPF_REG_1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_6, BPF_REG_2),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_6, BPF_REG_3),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_6, BPF_REG_4),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_6, BPF_REG_5),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_6, BPF_REG_7),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_6, BPF_REG_8),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_6, BPF_REG_9),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_6, 10),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_7, BPF_REG_0),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_7, BPF_REG_1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_7, BPF_REG_2),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_7, BPF_REG_3),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_7, BPF_REG_4),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_7, BPF_REG_5),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_7, BPF_REG_6),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_7, BPF_REG_8),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_7, BPF_REG_9),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_7, 10),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_8, BPF_REG_0),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_8, BPF_REG_1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_8, BPF_REG_2),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_8, BPF_REG_3),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_8, BPF_REG_4),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_8, BPF_REG_5),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_8, BPF_REG_6),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_8, BPF_REG_7),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_8, BPF_REG_9),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_8, 10),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_9, BPF_REG_0),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_9, BPF_REG_1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_9, BPF_REG_2),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_9, BPF_REG_3),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_9, BPF_REG_4),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_9, BPF_REG_5),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_9, BPF_REG_6),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_9, BPF_REG_7),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_9, BPF_REG_8),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_9, 10),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_0, 10),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_0, 0),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_2),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_3),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_4),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_5),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_6),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_7),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_8),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_9),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -1481,63 +1488,63 @@  static struct bpf_test tests[] = {
 	{	/* Mainly checking JIT here. */
 		"INT: XOR",
 		.u.insns_int = {
-			BPF_ALU64_REG(BPF_SUB, R0, R0),
-			BPF_ALU64_REG(BPF_XOR, R1, R1),
-			BPF_JMP_REG(BPF_JEQ, R0, R1, 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_0),
+			BPF_ALU64_REG(BPF_XOR, BPF_REG_1, BPF_REG_1),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_0, BPF_REG_1, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU64_IMM(BPF_MOV, R0, 10),
-			BPF_ALU64_IMM(BPF_MOV, R1, -1),
-			BPF_ALU64_REG(BPF_SUB, R1, R1),
-			BPF_ALU64_REG(BPF_XOR, R2, R2),
-			BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_0, 10),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_1, -1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_1, BPF_REG_1),
+			BPF_ALU64_REG(BPF_XOR, BPF_REG_2, BPF_REG_2),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_1, BPF_REG_2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_SUB, R2, R2),
-			BPF_ALU64_REG(BPF_XOR, R3, R3),
-			BPF_ALU64_IMM(BPF_MOV, R0, 10),
-			BPF_ALU64_IMM(BPF_MOV, R1, -1),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_2),
+			BPF_ALU64_REG(BPF_XOR, BPF_REG_3, BPF_REG_3),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_0, 10),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_1, -1),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_SUB, R3, R3),
-			BPF_ALU64_REG(BPF_XOR, R4, R4),
-			BPF_ALU64_IMM(BPF_MOV, R2, 1),
-			BPF_ALU64_IMM(BPF_MOV, R5, -1),
-			BPF_JMP_REG(BPF_JEQ, R3, R4, 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_3, BPF_REG_3),
+			BPF_ALU64_REG(BPF_XOR, BPF_REG_4, BPF_REG_4),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_2, 1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_5, -1),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_3, BPF_REG_4, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_SUB, R4, R4),
-			BPF_ALU64_REG(BPF_XOR, R5, R5),
-			BPF_ALU64_IMM(BPF_MOV, R3, 1),
-			BPF_ALU64_IMM(BPF_MOV, R7, -1),
-			BPF_JMP_REG(BPF_JEQ, R5, R4, 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_4, BPF_REG_4),
+			BPF_ALU64_REG(BPF_XOR, BPF_REG_5, BPF_REG_5),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_3, 1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_7, -1),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_5, BPF_REG_4, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU64_IMM(BPF_MOV, R5, 1),
-			BPF_ALU64_REG(BPF_SUB, R5, R5),
-			BPF_ALU64_REG(BPF_XOR, R6, R6),
-			BPF_ALU64_IMM(BPF_MOV, R1, 1),
-			BPF_ALU64_IMM(BPF_MOV, R8, -1),
-			BPF_JMP_REG(BPF_JEQ, R5, R6, 1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_5, 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_5, BPF_REG_5),
+			BPF_ALU64_REG(BPF_XOR, BPF_REG_6, BPF_REG_6),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_1, 1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_8, -1),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_5, BPF_REG_6, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_SUB, R6, R6),
-			BPF_ALU64_REG(BPF_XOR, R7, R7),
-			BPF_JMP_REG(BPF_JEQ, R7, R6, 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_6, BPF_REG_6),
+			BPF_ALU64_REG(BPF_XOR, BPF_REG_7, BPF_REG_7),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_7, BPF_REG_6, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_SUB, R7, R7),
-			BPF_ALU64_REG(BPF_XOR, R8, R8),
-			BPF_JMP_REG(BPF_JEQ, R7, R8, 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_7, BPF_REG_7),
+			BPF_ALU64_REG(BPF_XOR, BPF_REG_8, BPF_REG_8),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_7, BPF_REG_8, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_SUB, R8, R8),
-			BPF_ALU64_REG(BPF_XOR, R9, R9),
-			BPF_JMP_REG(BPF_JEQ, R9, R8, 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_8, BPF_REG_8),
+			BPF_ALU64_REG(BPF_XOR, BPF_REG_9, BPF_REG_9),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_9, BPF_REG_8, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_SUB, R9, R9),
-			BPF_ALU64_REG(BPF_XOR, R0, R0),
-			BPF_JMP_REG(BPF_JEQ, R9, R0, 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_9, BPF_REG_9),
+			BPF_ALU64_REG(BPF_XOR, BPF_REG_0, BPF_REG_0),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_9, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_SUB, R1, R1),
-			BPF_ALU64_REG(BPF_XOR, R0, R0),
-			BPF_JMP_REG(BPF_JEQ, R9, R0, 2),
-			BPF_ALU64_IMM(BPF_MOV, R0, 0),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_1, BPF_REG_1),
+			BPF_ALU64_REG(BPF_XOR, BPF_REG_0, BPF_REG_0),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_9, BPF_REG_0, 2),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_0, 0),
 			BPF_EXIT_INSN(),
-			BPF_ALU64_IMM(BPF_MOV, R0, 1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -1547,59 +1554,59 @@  static struct bpf_test tests[] = {
 	{	/* Mainly checking JIT here. */
 		"INT: MUL",
 		.u.insns_int = {
-			BPF_ALU64_IMM(BPF_MOV, R0, 11),
-			BPF_ALU64_IMM(BPF_MOV, R1, 1),
-			BPF_ALU64_IMM(BPF_MOV, R2, 2),
-			BPF_ALU64_IMM(BPF_MOV, R3, 3),
-			BPF_ALU64_IMM(BPF_MOV, R4, 4),
-			BPF_ALU64_IMM(BPF_MOV, R5, 5),
-			BPF_ALU64_IMM(BPF_MOV, R6, 6),
-			BPF_ALU64_IMM(BPF_MOV, R7, 7),
-			BPF_ALU64_IMM(BPF_MOV, R8, 8),
-			BPF_ALU64_IMM(BPF_MOV, R9, 9),
-			BPF_ALU64_REG(BPF_MUL, R0, R0),
-			BPF_ALU64_REG(BPF_MUL, R0, R1),
-			BPF_ALU64_REG(BPF_MUL, R0, R2),
-			BPF_ALU64_REG(BPF_MUL, R0, R3),
-			BPF_ALU64_REG(BPF_MUL, R0, R4),
-			BPF_ALU64_REG(BPF_MUL, R0, R5),
-			BPF_ALU64_REG(BPF_MUL, R0, R6),
-			BPF_ALU64_REG(BPF_MUL, R0, R7),
-			BPF_ALU64_REG(BPF_MUL, R0, R8),
-			BPF_ALU64_REG(BPF_MUL, R0, R9),
-			BPF_ALU64_IMM(BPF_MUL, R0, 10),
-			BPF_JMP_IMM(BPF_JEQ, R0, 439084800, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_MUL, R1, R0),
-			BPF_ALU64_REG(BPF_MUL, R1, R2),
-			BPF_ALU64_REG(BPF_MUL, R1, R3),
-			BPF_ALU64_REG(BPF_MUL, R1, R4),
-			BPF_ALU64_REG(BPF_MUL, R1, R5),
-			BPF_ALU64_REG(BPF_MUL, R1, R6),
-			BPF_ALU64_REG(BPF_MUL, R1, R7),
-			BPF_ALU64_REG(BPF_MUL, R1, R8),
-			BPF_ALU64_REG(BPF_MUL, R1, R9),
-			BPF_ALU64_IMM(BPF_MUL, R1, 10),
-			BPF_ALU64_REG(BPF_MOV, R2, R1),
-			BPF_ALU64_IMM(BPF_RSH, R2, 32),
-			BPF_JMP_IMM(BPF_JEQ, R2, 0x5a924, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU64_IMM(BPF_LSH, R1, 32),
-			BPF_ALU64_IMM(BPF_ARSH, R1, 32),
-			BPF_JMP_IMM(BPF_JEQ, R1, 0xebb90000, 1),
-			BPF_EXIT_INSN(),
-			BPF_ALU64_REG(BPF_MUL, R2, R0),
-			BPF_ALU64_REG(BPF_MUL, R2, R1),
-			BPF_ALU64_REG(BPF_MUL, R2, R3),
-			BPF_ALU64_REG(BPF_MUL, R2, R4),
-			BPF_ALU64_REG(BPF_MUL, R2, R5),
-			BPF_ALU64_REG(BPF_MUL, R2, R6),
-			BPF_ALU64_REG(BPF_MUL, R2, R7),
-			BPF_ALU64_REG(BPF_MUL, R2, R8),
-			BPF_ALU64_REG(BPF_MUL, R2, R9),
-			BPF_ALU64_IMM(BPF_MUL, R2, 10),
-			BPF_ALU64_IMM(BPF_RSH, R2, 32),
-			BPF_ALU64_REG(BPF_MOV, R0, R2),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_0, 11),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_1, 1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_2, 2),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_3, 3),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_4, 4),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_5, 5),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_6, 6),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_7, 7),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_8, 8),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_9, 9),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_0, BPF_REG_0),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_0, BPF_REG_1),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_0, BPF_REG_2),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_0, BPF_REG_3),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_0, BPF_REG_4),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_0, BPF_REG_5),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_0, BPF_REG_6),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_0, BPF_REG_7),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_0, BPF_REG_8),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_0, BPF_REG_9),
+			BPF_ALU64_IMM(BPF_MUL, BPF_REG_0, 10),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 439084800, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_1, BPF_REG_0),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_1, BPF_REG_2),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_1, BPF_REG_3),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_1, BPF_REG_4),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_1, BPF_REG_5),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_1, BPF_REG_6),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_1, BPF_REG_7),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_1, BPF_REG_8),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_1, BPF_REG_9),
+			BPF_ALU64_IMM(BPF_MUL, BPF_REG_1, 10),
+			BPF_ALU64_REG(BPF_MOV, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_RSH, BPF_REG_2, 32),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_2, 0x5a924, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU64_IMM(BPF_LSH, BPF_REG_1, 32),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_1, 32),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_1, 0xebb90000, 1),
+			BPF_EXIT_INSN(),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_2, BPF_REG_0),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_2, BPF_REG_3),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_2, BPF_REG_4),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_2, BPF_REG_5),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_2, BPF_REG_6),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_2, BPF_REG_7),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_2, BPF_REG_8),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_2, BPF_REG_9),
+			BPF_ALU64_IMM(BPF_MUL, BPF_REG_2, 10),
+			BPF_ALU64_IMM(BPF_RSH, BPF_REG_2, 32),
+			BPF_ALU64_REG(BPF_MOV, BPF_REG_0, BPF_REG_2),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -1609,17 +1616,17 @@  static struct bpf_test tests[] = {
 	{
 		"INT: ALU MIX",
 		.u.insns_int = {
-			BPF_ALU64_IMM(BPF_MOV, R0, 11),
-			BPF_ALU64_IMM(BPF_ADD, R0, -1),
-			BPF_ALU64_IMM(BPF_MOV, R2, 2),
-			BPF_ALU64_IMM(BPF_XOR, R2, 3),
-			BPF_ALU64_REG(BPF_DIV, R0, R2),
-			BPF_JMP_IMM(BPF_JEQ, R0, 10, 1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_0, 11),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, -1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_2, 2),
+			BPF_ALU64_IMM(BPF_XOR, BPF_REG_2, 3),
+			BPF_ALU64_REG(BPF_DIV, BPF_REG_0, BPF_REG_2),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 10, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU64_IMM(BPF_MOD, R0, 3),
-			BPF_JMP_IMM(BPF_JEQ, R0, 1, 1),
+			BPF_ALU64_IMM(BPF_MOD, BPF_REG_0, 3),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 1, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU64_IMM(BPF_MOV, R0, -1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_0, -1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -1629,35 +1636,39 @@  static struct bpf_test tests[] = {
 	{
 		"INT: shifts by register",
 		.u.insns_int = {
-			BPF_MOV64_IMM(R0, -1234),
-			BPF_MOV64_IMM(R1, 1),
-			BPF_ALU32_REG(BPF_RSH, R0, R1),
-			BPF_JMP_IMM(BPF_JEQ, R0, 0x7ffffd97, 1),
+			BPF_MOV64_IMM(BPF_REG_0, -1234),
+			BPF_MOV64_IMM(BPF_REG_1, 1),
+			BPF_ALU32_REG(BPF_RSH, BPF_REG_0, BPF_REG_1),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0x7ffffd97, 1),
 			BPF_EXIT_INSN(),
-			BPF_MOV64_IMM(R2, 1),
-			BPF_ALU64_REG(BPF_LSH, R0, R2),
-			BPF_MOV32_IMM(R4, -1234),
-			BPF_JMP_REG(BPF_JEQ, R0, R4, 1),
+			BPF_MOV64_IMM(BPF_REG_2, 1),
+			BPF_ALU64_REG(BPF_LSH, BPF_REG_0, BPF_REG_2),
+			BPF_MOV32_IMM(BPF_REG_4, -1234),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_0, BPF_REG_4, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU64_IMM(BPF_AND, R4, 63),
-			BPF_ALU64_REG(BPF_LSH, R0, R4), /* R0 <= 46 */
-			BPF_MOV64_IMM(R3, 47),
-			BPF_ALU64_REG(BPF_ARSH, R0, R3),
-			BPF_JMP_IMM(BPF_JEQ, R0, -617, 1),
+			BPF_ALU64_IMM(BPF_AND, BPF_REG_4, 63),
+			BPF_ALU64_REG(BPF_LSH, BPF_REG_0, BPF_REG_4),
+			/* BPF_REG_0 <= 46 */
+			BPF_MOV64_IMM(BPF_REG_3, 47),
+			BPF_ALU64_REG(BPF_ARSH, BPF_REG_0, BPF_REG_3),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, -617, 1),
 			BPF_EXIT_INSN(),
-			BPF_MOV64_IMM(R2, 1),
-			BPF_ALU64_REG(BPF_LSH, R4, R2), /* R4 = 46 << 1 */
-			BPF_JMP_IMM(BPF_JEQ, R4, 92, 1),
+			BPF_MOV64_IMM(BPF_REG_2, 1),
+			BPF_ALU64_REG(BPF_LSH, BPF_REG_4, BPF_REG_2),
+			/* BPF_REG_4 = 46 << 1 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_4, 92, 1),
 			BPF_EXIT_INSN(),
-			BPF_MOV64_IMM(R4, 4),
-			BPF_ALU64_REG(BPF_LSH, R4, R4), /* R4 = 4 << 4 */
-			BPF_JMP_IMM(BPF_JEQ, R4, 64, 1),
+			BPF_MOV64_IMM(BPF_REG_4, 4),
+			BPF_ALU64_REG(BPF_LSH, BPF_REG_4, BPF_REG_4),
+			/* BPF_REG_4 = 4 << 4 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_4, 64, 1),
 			BPF_EXIT_INSN(),
-			BPF_MOV64_IMM(R4, 5),
-			BPF_ALU32_REG(BPF_LSH, R4, R4), /* R4 = 5 << 5 */
-			BPF_JMP_IMM(BPF_JEQ, R4, 160, 1),
+			BPF_MOV64_IMM(BPF_REG_4, 5),
+			BPF_ALU32_REG(BPF_LSH, BPF_REG_4, BPF_REG_4),
+			/* BPF_REG_4 = 5 << 5 */
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_4, 160, 1),
 			BPF_EXIT_INSN(),
-			BPF_MOV64_IMM(R0, -1),
+			BPF_MOV64_IMM(BPF_REG_0, -1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -1667,14 +1678,14 @@  static struct bpf_test tests[] = {
 	{
 		"INT: DIV + ABS",
 		.u.insns_int = {
-			BPF_ALU64_REG(BPF_MOV, R6, R1),
+			BPF_ALU64_REG(BPF_MOV, BPF_REG_6, BPF_REG_1),
 			BPF_LD_ABS(BPF_B, 3),
-			BPF_ALU64_IMM(BPF_MOV, R2, 2),
-			BPF_ALU32_REG(BPF_DIV, R0, R2),
-			BPF_ALU64_REG(BPF_MOV, R8, R0),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_2, 2),
+			BPF_ALU32_REG(BPF_DIV, BPF_REG_0, BPF_REG_2),
+			BPF_ALU64_REG(BPF_MOV, BPF_REG_8, BPF_REG_0),
 			BPF_LD_ABS(BPF_B, 4),
-			BPF_ALU64_REG(BPF_ADD, R8, R0),
-			BPF_LD_IND(BPF_B, R8, -70),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_0),
+			BPF_LD_IND(BPF_B, BPF_REG_8, -70),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -1684,10 +1695,10 @@  static struct bpf_test tests[] = {
 	{
 		"INT: DIV by zero",
 		.u.insns_int = {
-			BPF_ALU64_REG(BPF_MOV, R6, R1),
-			BPF_ALU64_IMM(BPF_MOV, R7, 0),
+			BPF_ALU64_REG(BPF_MOV, BPF_REG_6, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_7, 0),
 			BPF_LD_ABS(BPF_B, 3),
-			BPF_ALU32_REG(BPF_DIV, R0, R7),
+			BPF_ALU32_REG(BPF_DIV, BPF_REG_0, BPF_REG_7),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2023,19 +2034,20 @@  static struct bpf_test tests[] = {
 	{
 		"load 64-bit immediate",
 		.u.insns_int = {
-			BPF_LD_IMM64(R1, 0x567800001234LL),
-			BPF_MOV64_REG(R2, R1),
-			BPF_MOV64_REG(R3, R2),
-			BPF_ALU64_IMM(BPF_RSH, R2, 32),
-			BPF_ALU64_IMM(BPF_LSH, R3, 32),
-			BPF_ALU64_IMM(BPF_RSH, R3, 32),
-			BPF_ALU64_IMM(BPF_MOV, R0, 0),
-			BPF_JMP_IMM(BPF_JEQ, R2, 0x5678, 1),
+			BPF_LD_IMM64(BPF_REG_1, 0x567800001234LL),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_1),
+			BPF_MOV64_REG(BPF_REG_3, BPF_REG_2),
+			BPF_ALU64_IMM(BPF_RSH, BPF_REG_2, 32),
+			BPF_ALU64_IMM(BPF_LSH, BPF_REG_3, 32),
+			BPF_ALU64_IMM(BPF_RSH, BPF_REG_3, 32),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_2, 0x5678, 1),
 			BPF_EXIT_INSN(),
-			BPF_JMP_IMM(BPF_JEQ, R3, 0x1234, 1),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_3, 0x1234, 1),
 			BPF_EXIT_INSN(),
-			BPF_LD_IMM64(R0, 0x1ffffffffLL),
-			BPF_ALU64_IMM(BPF_RSH, R0, 32), /* R0 = 1 */
+			BPF_LD_IMM64(BPF_REG_0, 0x1ffffffffLL),
+			BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 32),
+			/* BPF_REG_0 = 1 */
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2045,38 +2057,38 @@  static struct bpf_test tests[] = {
 	{
 		"nmap reduced",
 		.u.insns_int = {
-			BPF_MOV64_REG(R6, R1),
+			BPF_MOV64_REG(BPF_REG_6, BPF_REG_1),
 			BPF_LD_ABS(BPF_H, 12),
-			BPF_JMP_IMM(BPF_JNE, R0, 0x806, 28),
+			BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0x806, 28),
 			BPF_LD_ABS(BPF_H, 12),
-			BPF_JMP_IMM(BPF_JNE, R0, 0x806, 26),
-			BPF_MOV32_IMM(R0, 18),
-			BPF_STX_MEM(BPF_W, R10, R0, -64),
-			BPF_LDX_MEM(BPF_W, R7, R10, -64),
-			BPF_LD_IND(BPF_W, R7, 14),
-			BPF_STX_MEM(BPF_W, R10, R0, -60),
-			BPF_MOV32_IMM(R0, 280971478),
-			BPF_STX_MEM(BPF_W, R10, R0, -56),
-			BPF_LDX_MEM(BPF_W, R7, R10, -56),
-			BPF_LDX_MEM(BPF_W, R0, R10, -60),
-			BPF_ALU32_REG(BPF_SUB, R0, R7),
-			BPF_JMP_IMM(BPF_JNE, R0, 0, 15),
+			BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0x806, 26),
+			BPF_MOV32_IMM(BPF_REG_0, 18),
+			BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_0, -64),
+			BPF_LDX_MEM(BPF_W, BPF_REG_7, BPF_REG_10, -64),
+			BPF_LD_IND(BPF_W, BPF_REG_7, 14),
+			BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_0, -60),
+			BPF_MOV32_IMM(BPF_REG_0, 280971478),
+			BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_0, -56),
+			BPF_LDX_MEM(BPF_W, BPF_REG_7, BPF_REG_10, -56),
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_10, -60),
+			BPF_ALU32_REG(BPF_SUB, BPF_REG_0, BPF_REG_7),
+			BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 15),
 			BPF_LD_ABS(BPF_H, 12),
-			BPF_JMP_IMM(BPF_JNE, R0, 0x806, 13),
-			BPF_MOV32_IMM(R0, 22),
-			BPF_STX_MEM(BPF_W, R10, R0, -56),
-			BPF_LDX_MEM(BPF_W, R7, R10, -56),
-			BPF_LD_IND(BPF_H, R7, 14),
-			BPF_STX_MEM(BPF_W, R10, R0, -52),
-			BPF_MOV32_IMM(R0, 17366),
-			BPF_STX_MEM(BPF_W, R10, R0, -48),
-			BPF_LDX_MEM(BPF_W, R7, R10, -48),
-			BPF_LDX_MEM(BPF_W, R0, R10, -52),
-			BPF_ALU32_REG(BPF_SUB, R0, R7),
-			BPF_JMP_IMM(BPF_JNE, R0, 0, 2),
-			BPF_MOV32_IMM(R0, 256),
+			BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0x806, 13),
+			BPF_MOV32_IMM(BPF_REG_0, 22),
+			BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_0, -56),
+			BPF_LDX_MEM(BPF_W, BPF_REG_7, BPF_REG_10, -56),
+			BPF_LD_IND(BPF_H, BPF_REG_7, 14),
+			BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_0, -52),
+			BPF_MOV32_IMM(BPF_REG_0, 17366),
+			BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_0, -48),
+			BPF_LDX_MEM(BPF_W, BPF_REG_7, BPF_REG_10, -48),
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_10, -52),
+			BPF_ALU32_REG(BPF_SUB, BPF_REG_0, BPF_REG_7),
+			BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 256),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 0),
+			BPF_MOV32_IMM(BPF_REG_0, 0),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2089,8 +2101,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MOV_X: dst = 2",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R1, 2),
-			BPF_ALU32_REG(BPF_MOV, R0, R1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 2),
+			BPF_ALU32_REG(BPF_MOV, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2100,8 +2112,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MOV_X: dst = 4294967295",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
-			BPF_ALU32_REG(BPF_MOV, R0, R1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 4294967295U),
+			BPF_ALU32_REG(BPF_MOV, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2111,8 +2123,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MOV_X: dst = 2",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R1, 2),
-			BPF_ALU64_REG(BPF_MOV, R0, R1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 2),
+			BPF_ALU64_REG(BPF_MOV, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2122,8 +2134,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MOV_X: dst = 4294967295",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
-			BPF_ALU64_REG(BPF_MOV, R0, R1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 4294967295U),
+			BPF_ALU64_REG(BPF_MOV, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2134,7 +2146,7 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MOV_K: dst = 2",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 2),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2144,7 +2156,7 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MOV_K: dst = 4294967295",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 4294967295U),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 4294967295U),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2154,13 +2166,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MOV_K: 0x0000ffffffff0000 = 0x00000000ffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
-			BPF_LD_IMM64(R3, 0x00000000ffffffffLL),
-			BPF_ALU32_IMM(BPF_MOV, R2, 0xffffffff),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x0000ffffffff0000LL),
+			BPF_LD_IMM64(BPF_REG_3, 0x00000000ffffffffLL),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_2, 0xffffffff),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2170,7 +2182,7 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MOV_K: dst = 2",
 		.u.insns_int = {
-			BPF_ALU64_IMM(BPF_MOV, R0, 2),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2180,7 +2192,7 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MOV_K: dst = 2147483647",
 		.u.insns_int = {
-			BPF_ALU64_IMM(BPF_MOV, R0, 2147483647),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_0, 2147483647),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2190,13 +2202,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_OR_K: dst = 0x0",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
-			BPF_LD_IMM64(R3, 0x0),
-			BPF_ALU64_IMM(BPF_MOV, R2, 0x0),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x0000ffffffff0000LL),
+			BPF_LD_IMM64(BPF_REG_3, 0x0),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_2, 0x0),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2206,13 +2218,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MOV_K: dst = -1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
-			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
-			BPF_ALU64_IMM(BPF_MOV, R2, 0xffffffff),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x0000ffffffff0000LL),
+			BPF_LD_IMM64(BPF_REG_3, 0xffffffffffffffffLL),
+			BPF_ALU64_IMM(BPF_MOV, BPF_REG_2, 0xffffffff),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2223,9 +2235,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_ADD_X: 1 + 2 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_MOV, R1, 2),
-			BPF_ALU32_REG(BPF_ADD, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 2),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2235,9 +2247,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_ADD_X: 1 + 4294967294 = 4294967295",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
-			BPF_ALU32_REG(BPF_ADD, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 4294967294U),
+			BPF_ALU32_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2247,9 +2259,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_ADD_X: 1 + 2 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_MOV, R1, 2),
-			BPF_ALU64_REG(BPF_ADD, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 2),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2259,9 +2271,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_ADD_X: 1 + 4294967294 = 4294967295",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
-			BPF_ALU64_REG(BPF_ADD, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 4294967294U),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2272,8 +2284,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_ADD_K: 1 + 2 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_ADD, R0, 2),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_ADD, BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2283,8 +2295,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_ADD_K: 3 + 0 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU32_IMM(BPF_ADD, R0, 0),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU32_IMM(BPF_ADD, BPF_REG_0, 0),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2294,8 +2306,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_ADD_K: 1 + 4294967294 = 4294967295",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_ADD, R0, 4294967294U),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_ADD, BPF_REG_0, 4294967294U),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2305,13 +2317,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_ADD_K: 0 + (-1) = 0x00000000ffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x0),
-			BPF_LD_IMM64(R3, 0x00000000ffffffff),
-			BPF_ALU32_IMM(BPF_ADD, R2, 0xffffffff),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x0),
+			BPF_LD_IMM64(BPF_REG_3, 0x00000000ffffffff),
+			BPF_ALU32_IMM(BPF_ADD, BPF_REG_2, 0xffffffff),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2321,8 +2333,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_ADD_K: 1 + 2 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU64_IMM(BPF_ADD, R0, 2),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2332,8 +2344,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_ADD_K: 3 + 0 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU64_IMM(BPF_ADD, R0, 0),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 0),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2343,8 +2355,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_ADD_K: 1 + 2147483646 = 2147483647",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU64_IMM(BPF_ADD, R0, 2147483646),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 2147483646),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2354,8 +2366,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_ADD_K: 2147483646 + -2147483647 = -1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2147483646),
-			BPF_ALU64_IMM(BPF_ADD, R0, -2147483647),
+			BPF_LD_IMM64(BPF_REG_0, 2147483646),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, -2147483647),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2365,13 +2377,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_ADD_K: 1 + 0 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x1),
-			BPF_LD_IMM64(R3, 0x1),
-			BPF_ALU64_IMM(BPF_ADD, R2, 0x0),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x1),
+			BPF_LD_IMM64(BPF_REG_3, 0x1),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, 0x0),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2381,13 +2393,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_ADD_K: 0 + (-1) = 0xffffffffffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x0),
-			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
-			BPF_ALU64_IMM(BPF_ADD, R2, 0xffffffff),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x0),
+			BPF_LD_IMM64(BPF_REG_3, 0xffffffffffffffffLL),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, 0xffffffff),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2398,9 +2410,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_SUB_X: 3 - 1 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU32_IMM(BPF_MOV, R1, 1),
-			BPF_ALU32_REG(BPF_SUB, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 1),
+			BPF_ALU32_REG(BPF_SUB, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2410,9 +2422,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_SUB_X: 4294967295 - 4294967294 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 4294967295U),
-			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
-			BPF_ALU32_REG(BPF_SUB, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 4294967295U),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 4294967294U),
+			BPF_ALU32_REG(BPF_SUB, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2422,9 +2434,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_SUB_X: 3 - 1 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU32_IMM(BPF_MOV, R1, 1),
-			BPF_ALU64_REG(BPF_SUB, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2434,9 +2446,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_SUB_X: 4294967295 - 4294967294 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 4294967295U),
-			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
-			BPF_ALU64_REG(BPF_SUB, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 4294967295U),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 4294967294U),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2447,8 +2459,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_SUB_K: 3 - 1 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU32_IMM(BPF_SUB, R0, 1),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU32_IMM(BPF_SUB, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2458,8 +2470,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_SUB_K: 3 - 0 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU32_IMM(BPF_SUB, R0, 0),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU32_IMM(BPF_SUB, BPF_REG_0, 0),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2469,8 +2481,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_SUB_K: 4294967295 - 4294967294 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 4294967295U),
-			BPF_ALU32_IMM(BPF_SUB, R0, 4294967294U),
+			BPF_LD_IMM64(BPF_REG_0, 4294967295U),
+			BPF_ALU32_IMM(BPF_SUB, BPF_REG_0, 4294967294U),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2480,8 +2492,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_SUB_K: 3 - 1 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU64_IMM(BPF_SUB, R0, 1),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2491,8 +2503,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_SUB_K: 3 - 0 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU64_IMM(BPF_SUB, R0, 0),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_0, 0),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2502,8 +2514,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_SUB_K: 4294967294 - 4294967295 = -1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 4294967294U),
-			BPF_ALU64_IMM(BPF_SUB, R0, 4294967295U),
+			BPF_LD_IMM64(BPF_REG_0, 4294967294U),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_0, 4294967295U),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2513,8 +2525,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_ADD_K: 2147483646 - 2147483647 = -1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2147483646),
-			BPF_ALU64_IMM(BPF_SUB, R0, 2147483647),
+			BPF_LD_IMM64(BPF_REG_0, 2147483646),
+			BPF_ALU64_IMM(BPF_SUB, BPF_REG_0, 2147483647),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2525,9 +2537,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MUL_X: 2 * 3 = 6",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2),
-			BPF_ALU32_IMM(BPF_MOV, R1, 3),
-			BPF_ALU32_REG(BPF_MUL, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 2),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 3),
+			BPF_ALU32_REG(BPF_MUL, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2537,9 +2549,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MUL_X: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2),
-			BPF_ALU32_IMM(BPF_MOV, R1, 0x7FFFFFF8),
-			BPF_ALU32_REG(BPF_MUL, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 2),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 0x7FFFFFF8),
+			BPF_ALU32_REG(BPF_MUL, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2549,9 +2561,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MUL_X: -1 * -1 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, -1),
-			BPF_ALU32_IMM(BPF_MOV, R1, -1),
-			BPF_ALU32_REG(BPF_MUL, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, -1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, -1),
+			BPF_ALU32_REG(BPF_MUL, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2561,9 +2573,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MUL_X: 2 * 3 = 6",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2),
-			BPF_ALU32_IMM(BPF_MOV, R1, 3),
-			BPF_ALU64_REG(BPF_MUL, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 2),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 3),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2573,9 +2585,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MUL_X: 1 * 2147483647 = 2147483647",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
-			BPF_ALU64_REG(BPF_MUL, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 2147483647),
+			BPF_ALU64_REG(BPF_MUL, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2586,8 +2598,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MUL_K: 2 * 3 = 6",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2),
-			BPF_ALU32_IMM(BPF_MUL, R0, 3),
+			BPF_LD_IMM64(BPF_REG_0, 2),
+			BPF_ALU32_IMM(BPF_MUL, BPF_REG_0, 3),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2597,8 +2609,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MUL_K: 3 * 1 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU32_IMM(BPF_MUL, R0, 1),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU32_IMM(BPF_MUL, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2608,8 +2620,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MUL_K: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2),
-			BPF_ALU32_IMM(BPF_MUL, R0, 0x7FFFFFF8),
+			BPF_LD_IMM64(BPF_REG_0, 2),
+			BPF_ALU32_IMM(BPF_MUL, BPF_REG_0, 0x7FFFFFF8),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2619,13 +2631,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MUL_K: 1 * (-1) = 0x00000000ffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x1),
-			BPF_LD_IMM64(R3, 0x00000000ffffffff),
-			BPF_ALU32_IMM(BPF_MUL, R2, 0xffffffff),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x1),
+			BPF_LD_IMM64(BPF_REG_3, 0x00000000ffffffff),
+			BPF_ALU32_IMM(BPF_MUL, BPF_REG_2, 0xffffffff),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2635,8 +2647,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MUL_K: 2 * 3 = 6",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2),
-			BPF_ALU64_IMM(BPF_MUL, R0, 3),
+			BPF_LD_IMM64(BPF_REG_0, 2),
+			BPF_ALU64_IMM(BPF_MUL, BPF_REG_0, 3),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2646,8 +2658,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MUL_K: 3 * 1 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU64_IMM(BPF_MUL, R0, 1),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU64_IMM(BPF_MUL, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2657,8 +2669,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MUL_K: 1 * 2147483647 = 2147483647",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU64_IMM(BPF_MUL, R0, 2147483647),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU64_IMM(BPF_MUL, BPF_REG_0, 2147483647),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2668,8 +2680,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MUL_K: 1 * -2147483647 = -2147483647",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU64_IMM(BPF_MUL, R0, -2147483647),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU64_IMM(BPF_MUL, BPF_REG_0, -2147483647),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2679,13 +2691,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MUL_K: 1 * (-1) = 0xffffffffffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x1),
-			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
-			BPF_ALU64_IMM(BPF_MUL, R2, 0xffffffff),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x1),
+			BPF_LD_IMM64(BPF_REG_3, 0xffffffffffffffffLL),
+			BPF_ALU64_IMM(BPF_MUL, BPF_REG_2, 0xffffffff),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2696,9 +2708,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_DIV_X: 6 / 2 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 6),
-			BPF_ALU32_IMM(BPF_MOV, R1, 2),
-			BPF_ALU32_REG(BPF_DIV, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 6),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 2),
+			BPF_ALU32_REG(BPF_DIV, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2708,9 +2720,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_DIV_X: 4294967295 / 4294967295 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 4294967295U),
-			BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
-			BPF_ALU32_REG(BPF_DIV, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 4294967295U),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 4294967295U),
+			BPF_ALU32_REG(BPF_DIV, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2720,9 +2732,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_DIV_X: 6 / 2 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 6),
-			BPF_ALU32_IMM(BPF_MOV, R1, 2),
-			BPF_ALU64_REG(BPF_DIV, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 6),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 2),
+			BPF_ALU64_REG(BPF_DIV, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2732,9 +2744,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_DIV_X: 2147483647 / 2147483647 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2147483647),
-			BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
-			BPF_ALU64_REG(BPF_DIV, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 2147483647),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 2147483647),
+			BPF_ALU64_REG(BPF_DIV, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2744,14 +2756,14 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_DIV_X: 0xffffffffffffffff / (-1) = 0x0000000000000001",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
-			BPF_LD_IMM64(R4, 0xffffffffffffffffLL),
-			BPF_LD_IMM64(R3, 0x0000000000000001LL),
-			BPF_ALU64_REG(BPF_DIV, R2, R4),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0xffffffffffffffffLL),
+			BPF_LD_IMM64(BPF_REG_4, 0xffffffffffffffffLL),
+			BPF_LD_IMM64(BPF_REG_3, 0x0000000000000001LL),
+			BPF_ALU64_REG(BPF_DIV, BPF_REG_2, BPF_REG_4),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2762,8 +2774,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_DIV_K: 6 / 2 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 6),
-			BPF_ALU32_IMM(BPF_DIV, R0, 2),
+			BPF_LD_IMM64(BPF_REG_0, 6),
+			BPF_ALU32_IMM(BPF_DIV, BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2773,8 +2785,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_DIV_K: 3 / 1 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU32_IMM(BPF_DIV, R0, 1),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU32_IMM(BPF_DIV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2784,8 +2796,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_DIV_K: 4294967295 / 4294967295 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 4294967295U),
-			BPF_ALU32_IMM(BPF_DIV, R0, 4294967295U),
+			BPF_LD_IMM64(BPF_REG_0, 4294967295U),
+			BPF_ALU32_IMM(BPF_DIV, BPF_REG_0, 4294967295U),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2795,13 +2807,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_DIV_K: 0xffffffffffffffff / (-1) = 0x1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
-			BPF_LD_IMM64(R3, 0x1UL),
-			BPF_ALU32_IMM(BPF_DIV, R2, 0xffffffff),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0xffffffffffffffffLL),
+			BPF_LD_IMM64(BPF_REG_3, 0x1UL),
+			BPF_ALU32_IMM(BPF_DIV, BPF_REG_2, 0xffffffff),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2811,8 +2823,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_DIV_K: 6 / 2 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 6),
-			BPF_ALU64_IMM(BPF_DIV, R0, 2),
+			BPF_LD_IMM64(BPF_REG_0, 6),
+			BPF_ALU64_IMM(BPF_DIV, BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2822,8 +2834,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_DIV_K: 3 / 1 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU64_IMM(BPF_DIV, R0, 1),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU64_IMM(BPF_DIV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2833,8 +2845,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_DIV_K: 2147483647 / 2147483647 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2147483647),
-			BPF_ALU64_IMM(BPF_DIV, R0, 2147483647),
+			BPF_LD_IMM64(BPF_REG_0, 2147483647),
+			BPF_ALU64_IMM(BPF_DIV, BPF_REG_0, 2147483647),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2844,13 +2856,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_DIV_K: 0xffffffffffffffff / (-1) = 0x0000000000000001",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
-			BPF_LD_IMM64(R3, 0x0000000000000001LL),
-			BPF_ALU64_IMM(BPF_DIV, R2, 0xffffffff),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0xffffffffffffffffLL),
+			BPF_LD_IMM64(BPF_REG_3, 0x0000000000000001LL),
+			BPF_ALU64_IMM(BPF_DIV, BPF_REG_2, 0xffffffff),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2861,9 +2873,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MOD_X: 3 % 2 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU32_IMM(BPF_MOV, R1, 2),
-			BPF_ALU32_REG(BPF_MOD, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 2),
+			BPF_ALU32_REG(BPF_MOD, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2873,9 +2885,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MOD_X: 4294967295 % 4294967293 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 4294967295U),
-			BPF_ALU32_IMM(BPF_MOV, R1, 4294967293U),
-			BPF_ALU32_REG(BPF_MOD, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 4294967295U),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 4294967293U),
+			BPF_ALU32_REG(BPF_MOD, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2885,9 +2897,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MOD_X: 3 % 2 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU32_IMM(BPF_MOV, R1, 2),
-			BPF_ALU64_REG(BPF_MOD, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 2),
+			BPF_ALU64_REG(BPF_MOD, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2897,9 +2909,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MOD_X: 2147483647 % 2147483645 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2147483647),
-			BPF_ALU32_IMM(BPF_MOV, R1, 2147483645),
-			BPF_ALU64_REG(BPF_MOD, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 2147483647),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 2147483645),
+			BPF_ALU64_REG(BPF_MOD, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2910,8 +2922,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MOD_K: 3 % 2 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU32_IMM(BPF_MOD, R0, 2),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU32_IMM(BPF_MOD, BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2921,8 +2933,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MOD_K: 3 % 1 = 0",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU32_IMM(BPF_MOD, R0, 1),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU32_IMM(BPF_MOD, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2932,8 +2944,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_MOD_K: 4294967295 % 4294967293 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 4294967295U),
-			BPF_ALU32_IMM(BPF_MOD, R0, 4294967293U),
+			BPF_LD_IMM64(BPF_REG_0, 4294967295U),
+			BPF_ALU32_IMM(BPF_MOD, BPF_REG_0, 4294967293U),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2943,8 +2955,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MOD_K: 3 % 2 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU64_IMM(BPF_MOD, R0, 2),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU64_IMM(BPF_MOD, BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2954,8 +2966,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MOD_K: 3 % 1 = 0",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU64_IMM(BPF_MOD, R0, 1),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU64_IMM(BPF_MOD, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2965,8 +2977,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_MOD_K: 2147483647 % 2147483645 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2147483647),
-			BPF_ALU64_IMM(BPF_MOD, R0, 2147483645),
+			BPF_LD_IMM64(BPF_REG_0, 2147483647),
+			BPF_ALU64_IMM(BPF_MOD, BPF_REG_0, 2147483645),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2977,9 +2989,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_AND_X: 3 & 2 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU32_IMM(BPF_MOV, R1, 2),
-			BPF_ALU32_REG(BPF_AND, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 2),
+			BPF_ALU32_REG(BPF_AND, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -2989,9 +3001,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0xffffffff),
-			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
-			BPF_ALU32_REG(BPF_AND, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 0xffffffff),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 0xffffffff),
+			BPF_ALU32_REG(BPF_AND, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3001,9 +3013,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_AND_X: 3 & 2 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU32_IMM(BPF_MOV, R1, 2),
-			BPF_ALU64_REG(BPF_AND, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 2),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3013,9 +3025,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0xffffffff),
-			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
-			BPF_ALU64_REG(BPF_AND, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 0xffffffff),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 0xffffffff),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3026,8 +3038,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_AND_K: 3 & 2 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU32_IMM(BPF_AND, R0, 2),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU32_IMM(BPF_AND, BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3037,8 +3049,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0xffffffff),
-			BPF_ALU32_IMM(BPF_AND, R0, 0xffffffff),
+			BPF_LD_IMM64(BPF_REG_0, 0xffffffff),
+			BPF_ALU32_IMM(BPF_AND, BPF_REG_0, 0xffffffff),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3048,8 +3060,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_AND_K: 3 & 2 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU64_IMM(BPF_AND, R0, 2),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU64_IMM(BPF_AND, BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3059,8 +3071,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0xffffffff),
-			BPF_ALU64_IMM(BPF_AND, R0, 0xffffffff),
+			BPF_LD_IMM64(BPF_REG_0, 0xffffffff),
+			BPF_ALU64_IMM(BPF_AND, BPF_REG_0, 0xffffffff),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3070,13 +3082,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_AND_K: 0x0000ffffffff0000 & 0x0 = 0x0000ffff00000000",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
-			BPF_LD_IMM64(R3, 0x0000000000000000LL),
-			BPF_ALU64_IMM(BPF_AND, R2, 0x0),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x0000ffffffff0000LL),
+			BPF_LD_IMM64(BPF_REG_3, 0x0000000000000000LL),
+			BPF_ALU64_IMM(BPF_AND, BPF_REG_2, 0x0),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3086,13 +3098,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_AND_K: 0x0000ffffffff0000 & -1 = 0x0000ffffffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
-			BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
-			BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x0000ffffffff0000LL),
+			BPF_LD_IMM64(BPF_REG_3, 0x0000ffffffff0000LL),
+			BPF_ALU64_IMM(BPF_AND, BPF_REG_2, 0xffffffff),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3102,13 +3114,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_AND_K: 0xffffffffffffffff & -1 = 0xffffffffffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
-			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
-			BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0xffffffffffffffffLL),
+			BPF_LD_IMM64(BPF_REG_3, 0xffffffffffffffffLL),
+			BPF_ALU64_IMM(BPF_AND, BPF_REG_2, 0xffffffff),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3119,9 +3131,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_OR_X: 1 | 2 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_MOV, R1, 2),
-			BPF_ALU32_REG(BPF_OR, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 2),
+			BPF_ALU32_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3131,9 +3143,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_OR_X: 0x0 | 0xffffffff = 0xffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0),
-			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
-			BPF_ALU32_REG(BPF_OR, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 0),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 0xffffffff),
+			BPF_ALU32_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3143,9 +3155,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_OR_X: 1 | 2 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_MOV, R1, 2),
-			BPF_ALU64_REG(BPF_OR, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 2),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3155,9 +3167,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_OR_X: 0 | 0xffffffff = 0xffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0),
-			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
-			BPF_ALU64_REG(BPF_OR, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 0),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 0xffffffff),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3168,8 +3180,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_OR_K: 1 | 2 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_OR, R0, 2),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_OR, BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3179,8 +3191,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_OR_K: 0 & 0xffffffff = 0xffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0),
-			BPF_ALU32_IMM(BPF_OR, R0, 0xffffffff),
+			BPF_LD_IMM64(BPF_REG_0, 0),
+			BPF_ALU32_IMM(BPF_OR, BPF_REG_0, 0xffffffff),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3190,8 +3202,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_OR_K: 1 | 2 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU64_IMM(BPF_OR, R0, 2),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU64_IMM(BPF_OR, BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3201,8 +3213,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_OR_K: 0 & 0xffffffff = 0xffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0),
-			BPF_ALU64_IMM(BPF_OR, R0, 0xffffffff),
+			BPF_LD_IMM64(BPF_REG_0, 0),
+			BPF_ALU64_IMM(BPF_OR, BPF_REG_0, 0xffffffff),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3212,13 +3224,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_OR_K: 0x0000ffffffff0000 | 0x0 = 0x0000ffff00000000",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
-			BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
-			BPF_ALU64_IMM(BPF_OR, R2, 0x0),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x0000ffffffff0000LL),
+			BPF_LD_IMM64(BPF_REG_3, 0x0000ffffffff0000LL),
+			BPF_ALU64_IMM(BPF_OR, BPF_REG_2, 0x0),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3228,13 +3240,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_OR_K: 0x0000ffffffff0000 | -1 = 0xffffffffffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
-			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
-			BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x0000ffffffff0000LL),
+			BPF_LD_IMM64(BPF_REG_3, 0xffffffffffffffffLL),
+			BPF_ALU64_IMM(BPF_OR, BPF_REG_2, 0xffffffff),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3244,13 +3256,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_OR_K: 0x000000000000000 | -1 = 0xffffffffffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x0000000000000000LL),
-			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
-			BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x0000000000000000LL),
+			BPF_LD_IMM64(BPF_REG_3, 0xffffffffffffffffLL),
+			BPF_ALU64_IMM(BPF_OR, BPF_REG_2, 0xffffffff),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3261,9 +3273,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_XOR_X: 5 ^ 6 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 5),
-			BPF_ALU32_IMM(BPF_MOV, R1, 6),
-			BPF_ALU32_REG(BPF_XOR, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 5),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 6),
+			BPF_ALU32_REG(BPF_XOR, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3273,9 +3285,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_XOR_X: 0x1 ^ 0xffffffff = 0xfffffffe",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
-			BPF_ALU32_REG(BPF_XOR, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 0xffffffff),
+			BPF_ALU32_REG(BPF_XOR, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3285,9 +3297,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_XOR_X: 5 ^ 6 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 5),
-			BPF_ALU32_IMM(BPF_MOV, R1, 6),
-			BPF_ALU64_REG(BPF_XOR, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 5),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 6),
+			BPF_ALU64_REG(BPF_XOR, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3297,9 +3309,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_XOR_X: 1 ^ 0xffffffff = 0xfffffffe",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
-			BPF_ALU64_REG(BPF_XOR, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 0xffffffff),
+			BPF_ALU64_REG(BPF_XOR, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3310,8 +3322,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_XOR_K: 5 ^ 6 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 5),
-			BPF_ALU32_IMM(BPF_XOR, R0, 6),
+			BPF_LD_IMM64(BPF_REG_0, 5),
+			BPF_ALU32_IMM(BPF_XOR, BPF_REG_0, 6),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3321,8 +3333,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_XOR, R0, 0xffffffff),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_XOR, BPF_REG_0, 0xffffffff),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3332,8 +3344,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_XOR_K: 5 ^ 6 = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 5),
-			BPF_ALU64_IMM(BPF_XOR, R0, 6),
+			BPF_LD_IMM64(BPF_REG_0, 5),
+			BPF_ALU64_IMM(BPF_XOR, BPF_REG_0, 6),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3343,8 +3355,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_XOR_K: 1 & 0xffffffff = 0xfffffffe",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU64_IMM(BPF_XOR, R0, 0xffffffff),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU64_IMM(BPF_XOR, BPF_REG_0, 0xffffffff),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3354,13 +3366,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_XOR_K: 0x0000ffffffff0000 ^ 0x0 = 0x0000ffffffff0000",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
-			BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
-			BPF_ALU64_IMM(BPF_XOR, R2, 0x0),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x0000ffffffff0000LL),
+			BPF_LD_IMM64(BPF_REG_3, 0x0000ffffffff0000LL),
+			BPF_ALU64_IMM(BPF_XOR, BPF_REG_2, 0x0),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3370,13 +3382,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_XOR_K: 0x0000ffffffff0000 ^ -1 = 0xffff00000000ffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
-			BPF_LD_IMM64(R3, 0xffff00000000ffffLL),
-			BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x0000ffffffff0000LL),
+			BPF_LD_IMM64(BPF_REG_3, 0xffff00000000ffffLL),
+			BPF_ALU64_IMM(BPF_XOR, BPF_REG_2, 0xffffffff),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3386,13 +3398,13 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_XOR_K: 0x000000000000000 ^ -1 = 0xffffffffffffffff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0x0000000000000000LL),
-			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
-			BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0x0000000000000000LL),
+			BPF_LD_IMM64(BPF_REG_3, 0xffffffffffffffffLL),
+			BPF_ALU64_IMM(BPF_XOR, BPF_REG_2, 0xffffffff),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3403,9 +3415,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_LSH_X: 1 << 1 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_MOV, R1, 1),
-			BPF_ALU32_REG(BPF_LSH, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 1),
+			BPF_ALU32_REG(BPF_LSH, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3415,9 +3427,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_LSH_X: 1 << 31 = 0x80000000",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_MOV, R1, 31),
-			BPF_ALU32_REG(BPF_LSH, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 31),
+			BPF_ALU32_REG(BPF_LSH, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3427,9 +3439,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_LSH_X: 1 << 1 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_MOV, R1, 1),
-			BPF_ALU64_REG(BPF_LSH, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 1),
+			BPF_ALU64_REG(BPF_LSH, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3439,9 +3451,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_LSH_X: 1 << 31 = 0x80000000",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_MOV, R1, 31),
-			BPF_ALU64_REG(BPF_LSH, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 31),
+			BPF_ALU64_REG(BPF_LSH, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3452,8 +3464,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_LSH_K: 1 << 1 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_LSH, R0, 1),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_LSH, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3463,8 +3475,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_LSH_K: 1 << 31 = 0x80000000",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU32_IMM(BPF_LSH, R0, 31),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU32_IMM(BPF_LSH, BPF_REG_0, 31),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3474,8 +3486,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_LSH_K: 1 << 1 = 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU64_IMM(BPF_LSH, R0, 1),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU64_IMM(BPF_LSH, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3485,8 +3497,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_LSH_K: 1 << 31 = 0x80000000",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 1),
-			BPF_ALU64_IMM(BPF_LSH, R0, 31),
+			BPF_LD_IMM64(BPF_REG_0, 1),
+			BPF_ALU64_IMM(BPF_LSH, BPF_REG_0, 31),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3497,9 +3509,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_RSH_X: 2 >> 1 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2),
-			BPF_ALU32_IMM(BPF_MOV, R1, 1),
-			BPF_ALU32_REG(BPF_RSH, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 2),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 1),
+			BPF_ALU32_REG(BPF_RSH, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3509,9 +3521,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_RSH_X: 0x80000000 >> 31 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0x80000000),
-			BPF_ALU32_IMM(BPF_MOV, R1, 31),
-			BPF_ALU32_REG(BPF_RSH, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 0x80000000),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 31),
+			BPF_ALU32_REG(BPF_RSH, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3521,9 +3533,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_RSH_X: 2 >> 1 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2),
-			BPF_ALU32_IMM(BPF_MOV, R1, 1),
-			BPF_ALU64_REG(BPF_RSH, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 2),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 1),
+			BPF_ALU64_REG(BPF_RSH, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3533,9 +3545,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_RSH_X: 0x80000000 >> 31 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0x80000000),
-			BPF_ALU32_IMM(BPF_MOV, R1, 31),
-			BPF_ALU64_REG(BPF_RSH, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 0x80000000),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 31),
+			BPF_ALU64_REG(BPF_RSH, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3546,8 +3558,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_RSH_K: 2 >> 1 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2),
-			BPF_ALU32_IMM(BPF_RSH, R0, 1),
+			BPF_LD_IMM64(BPF_REG_0, 2),
+			BPF_ALU32_IMM(BPF_RSH, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3557,8 +3569,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_RSH_K: 0x80000000 >> 31 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0x80000000),
-			BPF_ALU32_IMM(BPF_RSH, R0, 31),
+			BPF_LD_IMM64(BPF_REG_0, 0x80000000),
+			BPF_ALU32_IMM(BPF_RSH, BPF_REG_0, 31),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3568,8 +3580,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_RSH_K: 2 >> 1 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 2),
-			BPF_ALU64_IMM(BPF_RSH, R0, 1),
+			BPF_LD_IMM64(BPF_REG_0, 2),
+			BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3579,8 +3591,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_RSH_K: 0x80000000 >> 31 = 1",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0x80000000),
-			BPF_ALU64_IMM(BPF_RSH, R0, 31),
+			BPF_LD_IMM64(BPF_REG_0, 0x80000000),
+			BPF_ALU64_IMM(BPF_RSH, BPF_REG_0, 31),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3591,9 +3603,9 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_ARSH_X: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
-			BPF_ALU32_IMM(BPF_MOV, R1, 40),
-			BPF_ALU64_REG(BPF_ARSH, R0, R1),
+			BPF_LD_IMM64(BPF_REG_0, 0xff00ff0000000000LL),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_1, 40),
+			BPF_ALU64_REG(BPF_ARSH, BPF_REG_0, BPF_REG_1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3604,8 +3616,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_ARSH_K: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
-			BPF_ALU64_IMM(BPF_ARSH, R0, 40),
+			BPF_LD_IMM64(BPF_REG_0, 0xff00ff0000000000LL),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_0, 40),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3616,8 +3628,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_NEG: -(3) = -3",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 3),
-			BPF_ALU32_IMM(BPF_NEG, R0, 0),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 3),
+			BPF_ALU32_IMM(BPF_NEG, BPF_REG_0, 0),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3627,8 +3639,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_NEG: -(-3) = 3",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, -3),
-			BPF_ALU32_IMM(BPF_NEG, R0, 0),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, -3),
+			BPF_ALU32_IMM(BPF_NEG, BPF_REG_0, 0),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3638,8 +3650,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_NEG: -(3) = -3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 3),
-			BPF_ALU64_IMM(BPF_NEG, R0, 0),
+			BPF_LD_IMM64(BPF_REG_0, 3),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_0, 0),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3649,8 +3661,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU64_NEG: -(-3) = 3",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, -3),
-			BPF_ALU64_IMM(BPF_NEG, R0, 0),
+			BPF_LD_IMM64(BPF_REG_0, -3),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_0, 0),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3661,8 +3673,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_END_FROM_BE 16: 0x0123456789abcdef -> 0xcdef",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
-			BPF_ENDIAN(BPF_FROM_BE, R0, 16),
+			BPF_LD_IMM64(BPF_REG_0, 0x0123456789abcdefLL),
+			BPF_ENDIAN(BPF_FROM_BE, BPF_REG_0, 16),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3672,8 +3684,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_END_FROM_BE 32: 0x0123456789abcdef -> 0x89abcdef",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
-			BPF_ENDIAN(BPF_FROM_BE, R0, 32),
+			BPF_LD_IMM64(BPF_REG_0, 0x0123456789abcdefLL),
+			BPF_ENDIAN(BPF_FROM_BE, BPF_REG_0, 32),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3683,8 +3695,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_END_FROM_BE 64: 0x0123456789abcdef -> 0x89abcdef",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
-			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
+			BPF_LD_IMM64(BPF_REG_0, 0x0123456789abcdefLL),
+			BPF_ENDIAN(BPF_FROM_BE, BPF_REG_0, 64),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3695,8 +3707,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_END_FROM_LE 16: 0x0123456789abcdef -> 0xefcd",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
-			BPF_ENDIAN(BPF_FROM_LE, R0, 16),
+			BPF_LD_IMM64(BPF_REG_0, 0x0123456789abcdefLL),
+			BPF_ENDIAN(BPF_FROM_LE, BPF_REG_0, 16),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3706,8 +3718,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_END_FROM_LE 32: 0x0123456789abcdef -> 0xefcdab89",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
-			BPF_ENDIAN(BPF_FROM_LE, R0, 32),
+			BPF_LD_IMM64(BPF_REG_0, 0x0123456789abcdefLL),
+			BPF_ENDIAN(BPF_FROM_LE, BPF_REG_0, 32),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3717,8 +3729,8 @@  static struct bpf_test tests[] = {
 	{
 		"ALU_END_FROM_LE 64: 0x0123456789abcdef -> 0x67452301",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
-			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
+			BPF_LD_IMM64(BPF_REG_0, 0x0123456789abcdefLL),
+			BPF_ENDIAN(BPF_FROM_LE, BPF_REG_0, 64),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3729,9 +3741,9 @@  static struct bpf_test tests[] = {
 	{
 		"ST_MEM_B: Store/Load byte: max negative",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
-			BPF_ST_MEM(BPF_B, R10, -40, 0xff),
-			BPF_LDX_MEM(BPF_B, R0, R10, -40),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
+			BPF_ST_MEM(BPF_B, BPF_REG_10, -40, 0xff),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_10, -40),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3741,9 +3753,9 @@  static struct bpf_test tests[] = {
 	{
 		"ST_MEM_B: Store/Load byte: max positive",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
-			BPF_ST_MEM(BPF_H, R10, -40, 0x7f),
-			BPF_LDX_MEM(BPF_H, R0, R10, -40),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
+			BPF_ST_MEM(BPF_H, BPF_REG_10, -40, 0x7f),
+			BPF_LDX_MEM(BPF_H, BPF_REG_0, BPF_REG_10, -40),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3753,10 +3765,10 @@  static struct bpf_test tests[] = {
 	{
 		"STX_MEM_B: Store/Load byte: max negative",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0),
-			BPF_LD_IMM64(R1, 0xffLL),
-			BPF_STX_MEM(BPF_B, R10, R1, -40),
-			BPF_LDX_MEM(BPF_B, R0, R10, -40),
+			BPF_LD_IMM64(BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 0xffLL),
+			BPF_STX_MEM(BPF_B, BPF_REG_10, BPF_REG_1, -40),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_10, -40),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3766,9 +3778,9 @@  static struct bpf_test tests[] = {
 	{
 		"ST_MEM_H: Store/Load half word: max negative",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
-			BPF_ST_MEM(BPF_H, R10, -40, 0xffff),
-			BPF_LDX_MEM(BPF_H, R0, R10, -40),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
+			BPF_ST_MEM(BPF_H, BPF_REG_10, -40, 0xffff),
+			BPF_LDX_MEM(BPF_H, BPF_REG_0, BPF_REG_10, -40),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3778,9 +3790,9 @@  static struct bpf_test tests[] = {
 	{
 		"ST_MEM_H: Store/Load half word: max positive",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
-			BPF_ST_MEM(BPF_H, R10, -40, 0x7fff),
-			BPF_LDX_MEM(BPF_H, R0, R10, -40),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
+			BPF_ST_MEM(BPF_H, BPF_REG_10, -40, 0x7fff),
+			BPF_LDX_MEM(BPF_H, BPF_REG_0, BPF_REG_10, -40),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3790,10 +3802,10 @@  static struct bpf_test tests[] = {
 	{
 		"STX_MEM_H: Store/Load half word: max negative",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0),
-			BPF_LD_IMM64(R1, 0xffffLL),
-			BPF_STX_MEM(BPF_H, R10, R1, -40),
-			BPF_LDX_MEM(BPF_H, R0, R10, -40),
+			BPF_LD_IMM64(BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 0xffffLL),
+			BPF_STX_MEM(BPF_H, BPF_REG_10, BPF_REG_1, -40),
+			BPF_LDX_MEM(BPF_H, BPF_REG_0, BPF_REG_10, -40),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3803,9 +3815,9 @@  static struct bpf_test tests[] = {
 	{
 		"ST_MEM_W: Store/Load word: max negative",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
-			BPF_ST_MEM(BPF_W, R10, -40, 0xffffffff),
-			BPF_LDX_MEM(BPF_W, R0, R10, -40),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
+			BPF_ST_MEM(BPF_W, BPF_REG_10, -40, 0xffffffff),
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_10, -40),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3815,9 +3827,9 @@  static struct bpf_test tests[] = {
 	{
 		"ST_MEM_W: Store/Load word: max positive",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
-			BPF_ST_MEM(BPF_W, R10, -40, 0x7fffffff),
-			BPF_LDX_MEM(BPF_W, R0, R10, -40),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
+			BPF_ST_MEM(BPF_W, BPF_REG_10, -40, 0x7fffffff),
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_10, -40),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3827,10 +3839,10 @@  static struct bpf_test tests[] = {
 	{
 		"STX_MEM_W: Store/Load word: max negative",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0),
-			BPF_LD_IMM64(R1, 0xffffffffLL),
-			BPF_STX_MEM(BPF_W, R10, R1, -40),
-			BPF_LDX_MEM(BPF_W, R0, R10, -40),
+			BPF_LD_IMM64(BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 0xffffffffLL),
+			BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_1, -40),
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_10, -40),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3840,9 +3852,9 @@  static struct bpf_test tests[] = {
 	{
 		"ST_MEM_DW: Store/Load double word: max negative",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
-			BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
-			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -40, 0xffffffff),
+			BPF_LDX_MEM(BPF_DW, BPF_REG_0, BPF_REG_10, -40),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3852,14 +3864,14 @@  static struct bpf_test tests[] = {
 	{
 		"ST_MEM_DW: Store/Load double word: max negative 2",
 		.u.insns_int = {
-			BPF_LD_IMM64(R2, 0xffff00000000ffffLL),
-			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
-			BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
-			BPF_LDX_MEM(BPF_DW, R2, R10, -40),
-			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
-			BPF_MOV32_IMM(R0, 2),
+			BPF_LD_IMM64(BPF_REG_2, 0xffff00000000ffffLL),
+			BPF_LD_IMM64(BPF_REG_3, 0xffffffffffffffffLL),
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -40, 0xffffffff),
+			BPF_LDX_MEM(BPF_DW, BPF_REG_2, BPF_REG_10, -40),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2),
+			BPF_MOV32_IMM(BPF_REG_0, 2),
 			BPF_EXIT_INSN(),
-			BPF_MOV32_IMM(R0, 1),
+			BPF_MOV32_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3869,9 +3881,9 @@  static struct bpf_test tests[] = {
 	{
 		"ST_MEM_DW: Store/Load double word: max positive",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
-			BPF_ST_MEM(BPF_DW, R10, -40, 0x7fffffff),
-			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -40, 0x7fffffff),
+			BPF_LDX_MEM(BPF_DW, BPF_REG_0, BPF_REG_10, -40),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3881,10 +3893,10 @@  static struct bpf_test tests[] = {
 	{
 		"STX_MEM_DW: Store/Load double word: max negative",
 		.u.insns_int = {
-			BPF_LD_IMM64(R0, 0),
-			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
-			BPF_STX_MEM(BPF_W, R10, R1, -40),
-			BPF_LDX_MEM(BPF_W, R0, R10, -40),
+			BPF_LD_IMM64(BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 0xffffffffffffffffLL),
+			BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_1, -40),
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_10, -40),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3895,10 +3907,10 @@  static struct bpf_test tests[] = {
 	{
 		"STX_XADD_W: Test: 0x12 + 0x10 = 0x22",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0x12),
-			BPF_ST_MEM(BPF_W, R10, -40, 0x10),
-			BPF_STX_XADD(BPF_W, R10, R0, -40),
-			BPF_LDX_MEM(BPF_W, R0, R10, -40),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0x12),
+			BPF_ST_MEM(BPF_W, BPF_REG_10, -40, 0x10),
+			BPF_STX_XADD(BPF_W, BPF_REG_10, BPF_REG_0, -40),
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_10, -40),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3908,10 +3920,10 @@  static struct bpf_test tests[] = {
 	{
 		"STX_XADD_DW: Test: 0x12 + 0x10 = 0x22",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0x12),
-			BPF_ST_MEM(BPF_DW, R10, -40, 0x10),
-			BPF_STX_XADD(BPF_DW, R10, R0, -40),
-			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0x12),
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -40, 0x10),
+			BPF_STX_XADD(BPF_DW, BPF_REG_10, BPF_REG_0, -40),
+			BPF_LDX_MEM(BPF_DW, BPF_REG_0, BPF_REG_10, -40),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3922,9 +3934,9 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_EXIT",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0x4711),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0x4711),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 0x4712),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0x4712),
 		},
 		INTERNAL,
 		{ },
@@ -3934,10 +3946,10 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JA: Unconditional jump: if (true) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
 			BPF_JMP_IMM(BPF_JA, 0, 0, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3948,11 +3960,11 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JSGT_K: Signed jump: if (-1 > -2) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
-			BPF_JMP_IMM(BPF_JSGT, R1, -2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 0xffffffffffffffffLL),
+			BPF_JMP_IMM(BPF_JSGT, BPF_REG_1, -2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3962,11 +3974,11 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JSGT_K: Signed jump: if (-1 > -1) return 0",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
-			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
-			BPF_JMP_IMM(BPF_JSGT, R1, -1, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
+			BPF_LD_IMM64(BPF_REG_1, 0xffffffffffffffffLL),
+			BPF_JMP_IMM(BPF_JSGT, BPF_REG_1, -1, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3977,11 +3989,11 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JSGE_K: Signed jump: if (-1 >= -2) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
-			BPF_JMP_IMM(BPF_JSGE, R1, -2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 0xffffffffffffffffLL),
+			BPF_JMP_IMM(BPF_JSGE, BPF_REG_1, -2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -3991,11 +4003,11 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JSGE_K: Signed jump: if (-1 >= -1) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
-			BPF_JMP_IMM(BPF_JSGE, R1, -1, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 0xffffffffffffffffLL),
+			BPF_JMP_IMM(BPF_JSGE, BPF_REG_1, -1, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4006,11 +4018,11 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JGT_K: if (3 > 2) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 3),
-			BPF_JMP_IMM(BPF_JGT, R1, 2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 3),
+			BPF_JMP_IMM(BPF_JGT, BPF_REG_1, 2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4021,11 +4033,11 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JGE_K: if (3 >= 2) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 3),
-			BPF_JMP_IMM(BPF_JGE, R1, 2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 3),
+			BPF_JMP_IMM(BPF_JGE, BPF_REG_1, 2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4037,11 +4049,11 @@  static struct bpf_test tests[] = {
 		"JMP_JGT_K: if (3 > 2) return 1 (jump backwards)",
 		.u.insns_int = {
 			BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
-			BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1), /* out: */
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
-			BPF_LD_IMM64(R1, 3), /* note: this takes 2 insns */
-			BPF_JMP_IMM(BPF_JGT, R1, 2, -6), /* goto out */
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0), /* start: */
+			BPF_LD_IMM64(BPF_REG_1, 3), /* this takes 2 insns */
+			BPF_JMP_IMM(BPF_JGT, BPF_REG_1, 2, -6), /* goto out */
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4051,11 +4063,11 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JGE_K: if (3 >= 3) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 3),
-			BPF_JMP_IMM(BPF_JGE, R1, 3, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 3),
+			BPF_JMP_IMM(BPF_JGE, BPF_REG_1, 3, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4066,11 +4078,11 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JNE_K: if (3 != 2) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 3),
-			BPF_JMP_IMM(BPF_JNE, R1, 2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 3),
+			BPF_JMP_IMM(BPF_JNE, BPF_REG_1, 2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4081,11 +4093,11 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JEQ_K: if (3 == 3) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 3),
-			BPF_JMP_IMM(BPF_JEQ, R1, 3, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 3),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_1, 3, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4096,11 +4108,11 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JSET_K: if (0x3 & 0x2) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 3),
-			BPF_JMP_IMM(BPF_JNE, R1, 2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 3),
+			BPF_JMP_IMM(BPF_JNE, BPF_REG_1, 2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4110,11 +4122,11 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JSET_K: if (0x3 & 0xffffffff) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 3),
-			BPF_JMP_IMM(BPF_JNE, R1, 0xffffffff, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 3),
+			BPF_JMP_IMM(BPF_JNE, BPF_REG_1, 0xffffffff, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4125,12 +4137,12 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JSGT_X: Signed jump: if (-1 > -2) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, -1),
-			BPF_LD_IMM64(R2, -2),
-			BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, -1),
+			BPF_LD_IMM64(BPF_REG_2, -2),
+			BPF_JMP_REG(BPF_JSGT, BPF_REG_1, BPF_REG_2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4140,12 +4152,12 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JSGT_X: Signed jump: if (-1 > -1) return 0",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
-			BPF_LD_IMM64(R1, -1),
-			BPF_LD_IMM64(R2, -1),
-			BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
+			BPF_LD_IMM64(BPF_REG_1, -1),
+			BPF_LD_IMM64(BPF_REG_2, -1),
+			BPF_JMP_REG(BPF_JSGT, BPF_REG_1, BPF_REG_2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4156,12 +4168,12 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JSGE_X: Signed jump: if (-1 >= -2) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, -1),
-			BPF_LD_IMM64(R2, -2),
-			BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, -1),
+			BPF_LD_IMM64(BPF_REG_2, -2),
+			BPF_JMP_REG(BPF_JSGE, BPF_REG_1, BPF_REG_2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4171,12 +4183,12 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JSGE_X: Signed jump: if (-1 >= -1) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, -1),
-			BPF_LD_IMM64(R2, -1),
-			BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, -1),
+			BPF_LD_IMM64(BPF_REG_2, -1),
+			BPF_JMP_REG(BPF_JSGE, BPF_REG_1, BPF_REG_2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4187,12 +4199,12 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JGT_X: if (3 > 2) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 3),
-			BPF_LD_IMM64(R2, 2),
-			BPF_JMP_REG(BPF_JGT, R1, R2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 3),
+			BPF_LD_IMM64(BPF_REG_2, 2),
+			BPF_JMP_REG(BPF_JGT, BPF_REG_1, BPF_REG_2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4203,12 +4215,12 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JGE_X: if (3 >= 2) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 3),
-			BPF_LD_IMM64(R2, 2),
-			BPF_JMP_REG(BPF_JGE, R1, R2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 3),
+			BPF_LD_IMM64(BPF_REG_2, 2),
+			BPF_JMP_REG(BPF_JGE, BPF_REG_1, BPF_REG_2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4218,12 +4230,12 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JGE_X: if (3 >= 3) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 3),
-			BPF_LD_IMM64(R2, 3),
-			BPF_JMP_REG(BPF_JGE, R1, R2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 3),
+			BPF_LD_IMM64(BPF_REG_2, 3),
+			BPF_JMP_REG(BPF_JGE, BPF_REG_1, BPF_REG_2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4234,12 +4246,12 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JNE_X: if (3 != 2) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 3),
-			BPF_LD_IMM64(R2, 2),
-			BPF_JMP_REG(BPF_JNE, R1, R2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 3),
+			BPF_LD_IMM64(BPF_REG_2, 2),
+			BPF_JMP_REG(BPF_JNE, BPF_REG_1, BPF_REG_2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4250,12 +4262,12 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JEQ_X: if (3 == 3) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 3),
-			BPF_LD_IMM64(R2, 3),
-			BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 3),
+			BPF_LD_IMM64(BPF_REG_2, 3),
+			BPF_JMP_REG(BPF_JEQ, BPF_REG_1, BPF_REG_2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4266,12 +4278,12 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JSET_X: if (0x3 & 0x2) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 3),
-			BPF_LD_IMM64(R2, 2),
-			BPF_JMP_REG(BPF_JNE, R1, R2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 3),
+			BPF_LD_IMM64(BPF_REG_2, 2),
+			BPF_JMP_REG(BPF_JNE, BPF_REG_1, BPF_REG_2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,
@@ -4281,12 +4293,12 @@  static struct bpf_test tests[] = {
 	{
 		"JMP_JSET_X: if (0x3 & 0xffffffff) return 1",
 		.u.insns_int = {
-			BPF_ALU32_IMM(BPF_MOV, R0, 0),
-			BPF_LD_IMM64(R1, 3),
-			BPF_LD_IMM64(R2, 0xffffffff),
-			BPF_JMP_REG(BPF_JNE, R1, R2, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 0),
+			BPF_LD_IMM64(BPF_REG_1, 3),
+			BPF_LD_IMM64(BPF_REG_2, 0xffffffff),
+			BPF_JMP_REG(BPF_JNE, BPF_REG_1, BPF_REG_2, 1),
 			BPF_EXIT_INSN(),
-			BPF_ALU32_IMM(BPF_MOV, R0, 1),
+			BPF_ALU32_IMM(BPF_MOV, BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
 		},
 		INTERNAL,