Key: ELF symbol (primary) ELF symbol source (with column ║) frame instruction +variable (range start) -variable (range end) inlining control-flow forwards branch ──>   backwards branch ══>

Compilation unit 0000f090 0000f3c0 arch/arm64/kvm/hyp/nvhe/../aarch32.c instructions

header .debug_abbrev die abbreviation table .debug_info die tree .debug_line line number info .debug_line evaluated line info simple die tree simple die tree globals simple die tree locals inlined subroutine info inlined subroutine info by range **0000f090 <kvm_condition_valid32>: 0000f090 <$x>: + kvm_condition_valid32 params: +vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf090 0xf1e0 (DW_OP_fbreg 0x20) kvm_condition_valid32:48.0 (aarch32.c) Sbepe ║{ 0000f090 CFA:r31 +vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf090 0xf1e0 (DW_OP_fbreg 0x20) kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:47 +cpsr var long unsigned int (base type, DW_ATE_unsigned size:8) 0xf090 0xf1e0 (DW_OP_fbreg 0x18) kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:49 +cpsr_cond var typedef(u32=typedef(__u32=unsigned int (base type, DW_ATE_unsigned size:4))) 0xf090 0xf1e0 (DW_OP_fbreg 0x14) kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:50 +cond var int (base type, DW_ATE_signed size:4) 0xf090 0xf1e0 (DW_OP_fbreg 0x10) kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:51 ~ 0000f090: d10183ff sub sp, sp, #0x60 <- 0000d180(bl)<kvm_condition_valid32> ~ 0000f094: f90013e0 str x0, [sp, #32] kvm_condition_valid32:54.23 (aarch32.c) SbePe if (kvm_vcpu_get_esr(║vcpu) >> 30) ~ 0000f098: f94013e8 ldr x8, [sp, #32] ~ 0000f09c: f9001be8 str x8, [sp, #48] t: 0xf0a0 0xf0a8 kvm_vcpu_get_esr inlined from kvm_condition_valid32:54 (aarch32.c) <b807c>: t kvm_vcpu_get_esr:224.9 (kvm_emulate.h) Sbepe return ║vcpu->arch.fault.esr_el2; +vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf0a0 0xf0a8 (DW_OP_fbreg 0x30) kvm_vcpu_get_esr(inlined):kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c ~t 0000f0a0: f9401be8 ldr x8, [sp, #48] t kvm_vcpu_get_esr:224.26 (kvm_emulate.h) sbepe return vcpu->arch.fault.║esr_el2; ~t 0000f0a4: b9488909 ldr w9, [x8, #2184] -vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf0a0 0xf0a8 (DW_OP_fbreg 0x30) kvm_vcpu_get_esr(inlined):kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c kvm_condition_valid32:54.29 (aarch32.c) Sbepe if (kvm_vcpu_get_esr(vcpu) ║>> 30) ~ 0000f0a8: 531e7d29 lsr w9, w9, #30 kvm_condition_valid32:54.6 (aarch32.c) sbepe if (║kvm_vcpu_get_esr(vcpu) >> 30) ~ ┌──0000f0ac: 340000a9 cbz w9, f0c0 <kvm_condition_valid32+0x30> ~ │┌─0000f0b0: 14000001 b f0b4 <kvm_condition_valid32+0x24> <- 0000f0ac(b.cc-succ)<fallthrough> ││ ~ │└>0000f0b4: 52800028 mov w8, #0x1 // #1 <- 0000f0b0(b)<kvm_condition_valid32+0x24> kvm_condition_valid32:55.3 (aarch32.c) Sbepe ║return true; ~ 0000f0b8: 3900bfe8 strb w8, [sp, #47] ~ ┌──────┼──0000f0bc: 14000046 b f1d4 <kvm_condition_valid32+0x144> │ │ │ │ kvm_condition_valid32:58.32 (aarch32.c) Sbepe cond = kvm_vcpu_get_condition(║vcpu); ~ │ └─>0000f0c0: f94013e8 ldr x8, [sp, #32] <- 0000f0ac(b.cc)<kvm_condition_valid32+0x30> ~ 0000f0c4: f90027e8 str x8, [sp, #72] u: 0xf0c8 0xf110 kvm_vcpu_get_condition inlined from kvm_condition_valid32:58 (aarch32.c) <b8099>: u kvm_vcpu_get_condition:229.29 (kvm_emulate.h) Sbepe u32 esr = kvm_vcpu_get_esr(║vcpu); +vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf0c8 0xf110 (DW_OP_fbreg 0x48) kvm_vcpu_get_condition(inlined):kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c +esr var typedef(u32=typedef(__u32=unsigned int (base type, DW_ATE_unsigned size:4))) 0xf0c8 0xf110 (DW_OP_fbreg 0x44) kvm_vcpu_get_condition(inlined):kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c ~u 0000f0c8: f94027e8 ldr x8, [sp, #72] ~u 0000f0cc: f9002fe8 str x8, [sp, #88] v: 0xf0d0 0xf0d8 kvm_vcpu_get_esr inlined from kvm_vcpu_get_condition:229 (kvm_emulate.h) <b80bf>:<b8099>: uv kvm_vcpu_get_esr:224.9 (kvm_emulate.h) Sbepe return ║vcpu->arch.fault.esr_el2; +vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf0d0 0xf0d8 (DW_OP_fbreg 0x58) kvm_vcpu_get_esr(inlined):kvm_vcpu_get_condition(inlined):kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c ~uv 0000f0d0: f9402fe8 ldr x8, [sp, #88] uv kvm_vcpu_get_esr:224.26 (kvm_emulate.h) sbepe return vcpu->arch.fault.║esr_el2; ~uv 0000f0d4: b9488909 ldr w9, [x8, #2184] -vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf0d0 0xf0d8 (DW_OP_fbreg 0x58) kvm_vcpu_get_esr(inlined):kvm_vcpu_get_condition(inlined):kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c u kvm_vcpu_get_condition:229.6 (kvm_emulate.h) Sbepe u32 ║esr = kvm_vcpu_get_esr(vcpu); ~u 0000f0d8: b90047e9 str w9, [sp, #68] u kvm_vcpu_get_condition:231.6 (kvm_emulate.h) Sbepe if (║esr & ESR_ELx_CV) ~u 0000f0dc: b94047e9 ldr w9, [sp, #68] ~u 0000f0e0: 2a0903e0 mov w0, w9 u kvm_vcpu_get_condition:231.6 (kvm_emulate.h) sbepe if (║esr & ESR_ELx_CV) ~u │ ┌──0000f0e4: 36c000e0 tbz w0, #24, f100 <kvm_condition_valid32+0x70> │ │ ~u │ │┌─0000f0e8: 14000001 b f0ec <kvm_condition_valid32+0x5c> <- 0000f0e4(b.cc-succ)<fallthrough> │ ││ u │ ││ kvm_vcpu_get_condition:232.11 (kvm_emulate.h) Sbepe return (║esr & ESR_ELx_COND_MASK) >> ESR_ELx_COND_SHIFT; ~u │ │└>0000f0ec: b94047e8 ldr w8, [sp, #68] <- 0000f0e8(b)<kvm_condition_valid32+0x5c> ~u │ │ 0000f0f0: 2a0803e9 mov w9, w8 u │ │ kvm_vcpu_get_condition:232.36 (kvm_emulate.h) sbepe return (esr & ESR_ELx_COND_MASK) ║>> ESR_ELx_COND_SHIFT; ~u │ │ 0000f0f4: d3545d29 ubfx x9, x9, #20, #4 u │ │ kvm_vcpu_get_condition:232.3 (kvm_emulate.h) sbepe ║return (esr & ESR_ELx_COND_MASK) >> ESR_ELx_COND_SHIFT; ~u │ │ 0000f0f8: b90057e9 str w9, [sp, #84] ~u │ ┌┼──0000f0fc: 14000004 b f10c <kvm_condition_valid32+0x7c> │ ││ ~u │ │└─>0000f100: 12800008 mov w8, #0xffffffff // #-1 <- 0000f0e4(b.cc)<kvm_condition_valid32+0x70> u │ │ kvm_vcpu_get_condition:234.2 (kvm_emulate.h) Sbepe ║return -1; ~u │ │ 0000f104: b90057e8 str w8, [sp, #84] ~u │ │ ┌─0000f108: 14000001 b f10c <kvm_condition_valid32+0x7c> │ │ │ u │ │ │ kvm_vcpu_get_condition:235.1 (kvm_emulate.h) Sbepe ║} ~u │ └>└>0000f10c: b94057e8 ldr w8, [sp, #84] <- 0000f0fc(b)<kvm_condition_valid32+0x7c>,0000f108(b)<kvm_condition_valid32+0x7c> -vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf0c8 0xf110 (DW_OP_fbreg 0x48) kvm_vcpu_get_condition(inlined):kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c -esr var typedef(u32=typedef(__u32=unsigned int (base type, DW_ATE_unsigned size:4))) 0xf0c8 0xf110 (DW_OP_fbreg 0x44) kvm_vcpu_get_condition(inlined):kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c kvm_condition_valid32:58.7 (aarch32.c) Sbepe cond ║= kvm_vcpu_get_condition(vcpu); ~ 0000f110: b90013e8 str w8, [sp, #16] kvm_condition_valid32:59.6 (aarch32.c) Sbepe if (║cond == 0xE) ~ 0000f114: b94013e8 ldr w8, [sp, #16] kvm_condition_valid32:59.6 (aarch32.c) sbepe if (║cond == 0xE) ~ 0000f118: 71003908 subs w8, w8, #0xe ~ │ ┌──0000f11c: 540000a1 b.ne f130 <kvm_condition_valid32+0xa0> // b.any │ │ ~ │ │┌─0000f120: 14000001 b f124 <kvm_condition_valid32+0x94> <- 0000f11c(b.cc-succ)<fallthrough> │ ││ ~ │ │└>0000f124: 52800028 mov w8, #0x1 // #1 <- 0000f120(b)<kvm_condition_valid32+0x94> │ │ kvm_condition_valid32:60.3 (aarch32.c) Sbepe ║return true; ~ │ │ 0000f128: 3900bfe8 strb w8, [sp, #47] ~ │ ┌────┼──0000f12c: 1400002a b f1d4 <kvm_condition_valid32+0x144> │ │ │ │ │ │ kvm_condition_valid32:62.20 (aarch32.c) Sbepe cpsr = *vcpu_cpsr(║vcpu); ~ │ │ └─>0000f130: f94013e8 ldr x8, [sp, #32] <- 0000f11c(b.cc)<kvm_condition_valid32+0xa0> ~ │ │ 0000f134: f9001fe8 str x8, [sp, #56] w: 0xf138 0xf13c vcpu_cpsr inlined from kvm_condition_valid32:62 (aarch32.c) <b80de>: w │ │ vcpu_cpsr:137.27 (kvm_emulate.h) Sbepe return (unsigned long *)&║vcpu_gp_regs(vcpu)->pstate; +vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf138 0xf13c (DW_OP_fbreg 0x38) vcpu_cpsr(inlined):kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c ~w │ │ 0000f138: f9401fe8 ldr x8, [sp, #56] -vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf138 0xf13c (DW_OP_fbreg 0x38) vcpu_cpsr(inlined):kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c │ │ kvm_condition_valid32:62.9 (aarch32.c) Sbepe cpsr = ║*vcpu_cpsr(vcpu); ~ │ │ 0000f13c: f9413508 ldr x8, [x8, #616] │ │ kvm_condition_valid32:62.7 (aarch32.c) sbepe cpsr ║= *vcpu_cpsr(vcpu); ~ │ │ 0000f140: f9000fe8 str x8, [sp, #24] │ │ kvm_condition_valid32:64.6 (aarch32.c) Sbepe if (║cond < 0) { ~ │ │ 0000f144: b94013e9 ldr w9, [sp, #16] │ │ kvm_condition_valid32:64.6 (aarch32.c) sbepe if (║cond < 0) { ~ │ │ ┌───0000f148: 36f80229 tbz w9, #31, f18c <kvm_condition_valid32+0xfc> │ │ │ ~ │ │ │ ┌─0000f14c: 14000001 b f150 <kvm_condition_valid32+0xc0> <- 0000f148(b.cc-succ)<fallthrough> │ │ │ │ │ │ │ │ kvm_condition_valid32:68.10 (aarch32.c) Sbepe it = ((║cpsr >> 8) & 0xFC) | ((cpsr >> 25) & 0x3); +it var long unsigned int (base type, DW_ATE_unsigned size:8) 0xf150 0xf18c (DW_OP_fbreg 0x8) lexblock:kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:66 ~ │ │ │ └>0000f150: f9400fe8 ldr x8, [sp, #24] <- 0000f14c(b)<kvm_condition_valid32+0xc0> │ │ │ kvm_condition_valid32:68.15 (aarch32.c) sbepe it = ((cpsr ║>> 8) & 0xFC) | ((cpsr >> 25) & 0x3); ~ │ │ │ 0000f154: d348fd09 lsr x9, x8, #8 │ │ │ kvm_condition_valid32:68.21 (aarch32.c) sbepe it = ((cpsr >> 8) ║& 0xFC) | ((cpsr >> 25) & 0x3); ~ │ │ │ 0000f158: 927e1529 and x9, x9, #0xfc │ │ │ kvm_condition_valid32:68.29 (aarch32.c) sbepe it = ((cpsr >> 8) & 0xFC) ║| ((cpsr >> 25) & 0x3); ~ │ │ │ 0000f15c: b3596909 bfxil x9, x8, #25, #2 │ │ │ kvm_condition_valid32:68.6 (aarch32.c) sbepe it ║= ((cpsr >> 8) & 0xFC) | ((cpsr >> 25) & 0x3); ~ │ │ │ 0000f160: f90007e9 str x9, [sp, #8] │ │ │ kvm_condition_valid32:71.7 (aarch32.c) Sbepe if (║it == 0) ~ │ │ │ 0000f164: f94007e8 ldr x8, [sp, #8] │ │ │ kvm_condition_valid32:71.7 (aarch32.c) sbepe if (║it == 0) ~ │ │ │┌──0000f168: b50000a8 cbnz x8, f17c <kvm_condition_valid32+0xec> │ │ ││ ~ │ │ ││┌─0000f16c: 14000001 b f170 <kvm_condition_valid32+0xe0> <- 0000f168(b.cc-succ)<fallthrough> │ │ │││ ~ │ │ ││└>0000f170: 52800028 mov w8, #0x1 // #1 <- 0000f16c(b)<kvm_condition_valid32+0xe0> │ │ ││ kvm_condition_valid32:72.4 (aarch32.c) Sbepe ║return true; ~ │ │ ││ 0000f174: 3900bfe8 strb w8, [sp, #47] ~ │ │ ┌─┼┼──0000f178: 14000017 b f1d4 <kvm_condition_valid32+0x144> │ │ │ ││ │ │ │ ││ kvm_condition_valid32:75.11 (aarch32.c) Sbepe cond = (║it >> 4); ~ │ │ │ │└─>0000f17c: f94007e8 ldr x8, [sp, #8] <- 0000f168(b.cc)<kvm_condition_valid32+0xec> │ │ │ │ kvm_condition_valid32:75.14 (aarch32.c) sbepe cond = (it ║>> 4); ~ │ │ │ │ 0000f180: d344fd08 lsr x8, x8, #4 │ │ │ │ kvm_condition_valid32:75.8 (aarch32.c) sbepe cond ║= (it >> 4); ~ │ │ │ │ 0000f184: b90013e8 str w8, [sp, #16] │ │ │ │ kvm_condition_valid32:76.2 (aarch32.c) Sbepe } ~ │ │ │ │ ┌─0000f188: 14000001 b f18c <kvm_condition_valid32+0xfc> -it var long unsigned int (base type, DW_ATE_unsigned size:8) 0xf150 0xf18c (DW_OP_fbreg 0x8) lexblock:kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:66 │ │ │ │ │ │ │ │ │ │ kvm_condition_valid32:78.14 (aarch32.c) Sbepe cpsr_cond = ║cpsr >> 28; ~ │ │ │ └>└>0000f18c: f9400fe8 ldr x8, [sp, #24] <- 0000f148(b.cc)<kvm_condition_valid32+0xfc>,0000f188(b)<kvm_condition_valid32+0xfc> │ │ │ kvm_condition_valid32:78.19 (aarch32.c) sbepe cpsr_cond = cpsr ║>> 28; ~ │ │ │ 0000f190: d35cfd08 lsr x8, x8, #28 │ │ │ kvm_condition_valid32:78.12 (aarch32.c) sbepe cpsr_cond ║= cpsr >> 28; ~ │ │ │ 0000f194: b90017e8 str w8, [sp, #20] │ │ │ kvm_condition_valid32:80.16 (aarch32.c) Sbepe if (!((cc_map[║cond] >> cpsr_cond) & 1)) ~ │ │ │ 0000f198: b98013e9 ldrsw x9, [sp, #16] │ │ │ kvm_condition_valid32:80.9 (aarch32.c) sbepe if (!((║cc_map[cond] >> cpsr_cond) & 1)) ~ │ │ │ 0000f19c: 9000004a adrp x10, 17000 <___kvm_hyp_init+0x3c> ~ │ │ │ 0000f1a0: 913e314a add x10, x10, #0xf8c ~ │ │ │ 0000f1a4: 78697948 ldrh w8, [x10, x9, lsl #1] │ │ │ kvm_condition_valid32:80.25 (aarch32.c) sbepe if (!((cc_map[cond] >> ║cpsr_cond) & 1)) ~ │ │ │ 0000f1a8: b94017eb ldr w11, [sp, #20] ~ │ │ │ 0000f1ac: 2a0b03e0 mov w0, w11 │ │ │ kvm_condition_valid32:80.22 (aarch32.c) sbepe if (!((cc_map[cond] ║>> cpsr_cond) & 1)) ~ │ │ │ 0000f1b0: 1ac02508 lsr w8, w8, w0 │ │ │ kvm_condition_valid32:80.6 (aarch32.c) sbepe if (║!((cc_map[cond] >> cpsr_cond) & 1)) ~ │ │ │ ┌──0000f1b4: 370000a8 tbnz w8, #0, f1c8 <kvm_condition_valid32+0x138> │ │ │ │ ~ │ │ │ │┌─0000f1b8: 14000001 b f1bc <kvm_condition_valid32+0x12c> <- 0000f1b4(b.cc-succ)<fallthrough> │ │ │ ││ ~ │ │ │ │└>0000f1bc: 2a1f03e8 mov w8, wzr <- 0000f1b8(b)<kvm_condition_valid32+0x12c> │ │ │ │ kvm_condition_valid32:81.3 (aarch32.c) Sbepe ║return false; ~ │ │ │ │ 0000f1c0: 3900bfe8 strb w8, [sp, #47] ~ │ │ │ ┌┼──0000f1c4: 14000004 b f1d4 <kvm_condition_valid32+0x144> │ │ │ ││ ~ │ │ │ │└─>0000f1c8: 52800028 mov w8, #0x1 // #1 <- 0000f1b4(b.cc)<kvm_condition_valid32+0x138> │ │ │ │ kvm_condition_valid32:83.2 (aarch32.c) Sbepe ║return true; ~ │ │ │ │ 0000f1cc: 3900bfe8 strb w8, [sp, #47] ~ │ │ │ │ ┌─0000f1d0: 14000001 b f1d4 <kvm_condition_valid32+0x144> │ │ │ │ │ │ │ │ │ │ kvm_condition_valid32:84.1 (aarch32.c) Sbepe ║} ~ └>└>└>└>└>0000f1d4: 3940bfe0 ldrb w0, [sp, #47] <- 0000f0bc(b)<kvm_condition_valid32+0x144>,0000f12c(b)<kvm_condition_valid32+0x144>,0000f178(b)<kvm_condition_valid32+0x144>,0000f1c4(b)<kvm_condition_valid32+0x144>,0000f1d0(b)<kvm_condition_valid32+0x144> ~ 0000f1d8: 910183ff add sp, sp, #0x60 0000f094 CFA:r31+96 ~ 0000f1dc: d65f03c0 ret -vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf090 0xf1e0 (DW_OP_fbreg 0x20) kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:47 -cpsr var long unsigned int (base type, DW_ATE_unsigned size:8) 0xf090 0xf1e0 (DW_OP_fbreg 0x18) kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:49 -cpsr_cond var typedef(u32=typedef(__u32=unsigned int (base type, DW_ATE_unsigned size:4))) 0xf090 0xf1e0 (DW_OP_fbreg 0x14) kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:50 -cond var int (base type, DW_ATE_signed size:4) 0xf090 0xf1e0 (DW_OP_fbreg 0x10) kvm_condition_valid32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:51 **0000f1e0 <kvm_skip_instr32>: + kvm_skip_instr32 params: +vcpu param pointer(struct kvm_vcpu<a9b6b>/<a9eeb>) 0xf1e0 0xf298 (DW_OP_breg31 0x10) kvm_skip_instr32:127.0 (aarch32.c) Sbepe ║{ +vcpu param pointer(struct kvm_vcpu<a9b6b>/<a9eeb>) 0xf1e0 0xf298 (DW_OP_breg31 0x10) kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:126 +pc var typedef(u32=typedef(__u32=unsigned int (base type, DW_ATE_unsigned size:4))) 0xf1e0 0xf298 (DW_OP_breg31 0xc) kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:128 +is_thumb var typedef(bool=_Bool (base type, DW_ATE_boolean size:1)) 0xf1e0 0xf298 (DW_OP_breg31 0x8) kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:129 ~ 0000f1e0: d10143ff sub sp, sp, #0x50 <- 00003a80(bl)<kvm_skip_instr32>,0000e6f8(bl)<kvm_skip_instr32>,0000f840(bl)<kvm_skip_instr32> ~ 0000f1e4: a9047bfd stp x29, x30, [sp, #64] 0000f1e0 CFA:r31 r29:u r30:u ~ 0000f1e8: 910103fd add x29, sp, #0x40 ~ 0000f1ec: f9000be0 str x0, [sp, #16] kvm_skip_instr32:128.20 (aarch32.c) SbePe u32 pc = *vcpu_pc(║vcpu); ~ 0000f1f0: f9400be8 ldr x8, [sp, #16] ~ 0000f1f4: f9000fe8 str x8, [sp, #24] x: 0xf1f8 0xf1fc vcpu_pc inlined from kvm_skip_instr32:128 (aarch32.c) <b8189>: x vcpu_pc:132.27 (kvm_emulate.h) Sbepe return (unsigned long *)&║vcpu_gp_regs(vcpu)->pc; +vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf1f8 0xf1fc (DW_OP_breg31 0x18) vcpu_pc(inlined):kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c ~x 0000f1f8: f9400fe8 ldr x8, [sp, #24] -vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf1f8 0xf1fc (DW_OP_breg31 0x18) vcpu_pc(inlined):kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c kvm_skip_instr32:128.11 (aarch32.c) Sbepe u32 pc = ║*vcpu_pc(vcpu); ~ 0000f1fc: f9413108 ldr x8, [x8, #608] kvm_skip_instr32:128.6 (aarch32.c) sbepe u32 ║pc = *vcpu_pc(vcpu); ~ 0000f200: b9000fe8 str w8, [sp, #12] kvm_skip_instr32:131.27 (aarch32.c) Sbepe is_thumb = !!(*vcpu_cpsr(║vcpu) & PSR_AA32_T_BIT); ~ 0000f204: f9400be9 ldr x9, [sp, #16] ~ 0000f208: f90013e9 str x9, [sp, #32] y: 0xf20c 0xf210 vcpu_cpsr inlined from kvm_skip_instr32:131 (aarch32.c) <b81a6>: y vcpu_cpsr:137.27 (kvm_emulate.h) Sbepe return (unsigned long *)&║vcpu_gp_regs(vcpu)->pstate; +vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf20c 0xf210 (DW_OP_breg31 0x20) vcpu_cpsr(inlined):kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c ~y 0000f20c: f94013e9 ldr x9, [sp, #32] -vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf20c 0xf210 (DW_OP_breg31 0x20) vcpu_cpsr(inlined):kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c kvm_skip_instr32:131.16 (aarch32.c) Sbepe is_thumb = !!(║*vcpu_cpsr(vcpu) & PSR_AA32_T_BIT); ~ 0000f210: b9426928 ldr w8, [x9, #616] kvm_skip_instr32:131.14 (aarch32.c) sbepe is_thumb = !║!(*vcpu_cpsr(vcpu) & PSR_AA32_T_BIT); ~ 0000f214: 53051508 ubfx w8, w8, #5, #1 kvm_skip_instr32:131.11 (aarch32.c) sbepe is_thumb ║= !!(*vcpu_cpsr(vcpu) & PSR_AA32_T_BIT); ~ 0000f218: 390023e8 strb w8, [sp, #8] kvm_skip_instr32:132.6 (aarch32.c) Sbepe if (║is_thumb && !kvm_vcpu_trap_il_is32bit(vcpu)) ~ 0000f21c: 394023e8 ldrb w8, [sp, #8] kvm_skip_instr32:132.15 (aarch32.c) sbepe if (is_thumb ║&& !kvm_vcpu_trap_il_is32bit(vcpu)) ~ ┌─────0000f220: 360001e8 tbz w8, #0, f25c <kvm_skip_instr32+0x7c> ~ │ ┌─0000f224: 14000001 b f228 <kvm_skip_instr32+0x48> <- 0000f220(b.cc-succ)<fallthrough> │ │ │ │ kvm_skip_instr32:132.44 (aarch32.c) sbepe if (is_thumb && !kvm_vcpu_trap_il_is32bit(║vcpu)) ~ │ └>0000f228: f9400be8 ldr x8, [sp, #16] <- 0000f224(b)<kvm_skip_instr32+0x48> ~ 0000f22c: f81f03a8 stur x8, [x29, #-16] z: 0xf230 0xf244 kvm_vcpu_trap_il_is32bit inlined from kvm_skip_instr32:132 (aarch32.c) <b81c3>: z kvm_vcpu_trap_il_is32bit:306.29 (kvm_emulate.h) Sbepe return !!(kvm_vcpu_get_esr(║vcpu) & ESR_ELx_IL); +vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf230 0xf244 (DW_OP_fbreg -0x10) kvm_vcpu_trap_il_is32bit(inlined):kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c ~z 0000f230: f85f03a8 ldur x8, [x29, #-16] ~z 0000f234: f81f83a8 stur x8, [x29, #-8] a: 0xf238 0xf23c kvm_vcpu_get_esr inlined from kvm_vcpu_trap_il_is32bit:306 (kvm_emulate.h) <b81df>:<b81c3>: za kvm_vcpu_get_esr:224.9 (kvm_emulate.h) Sbepe return ║vcpu->arch.fault.esr_el2; +vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf238 0xf23c (DW_OP_fbreg -0x8) kvm_vcpu_get_esr(inlined):kvm_vcpu_trap_il_is32bit(inlined):kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c ~za 0000f238: f85f83a8 ldur x8, [x29, #-8] -vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf238 0xf23c (DW_OP_fbreg -0x8) kvm_vcpu_get_esr(inlined):kvm_vcpu_trap_il_is32bit(inlined):kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c z kvm_vcpu_trap_il_is32bit:306.12 (kvm_emulate.h) Sbepe return !!(║kvm_vcpu_get_esr(vcpu) & ESR_ELx_IL); ~z 0000f23c: b9488909 ldr w9, [x8, #2184] ~z 0000f240: 2a0903e0 mov w0, w9 -vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf230 0xf244 (DW_OP_fbreg -0x10) kvm_vcpu_trap_il_is32bit(inlined):kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c kvm_skip_instr32:132.6 (aarch32.c) Sbepe if (║is_thumb && !kvm_vcpu_trap_il_is32bit(vcpu)) ~ │ ┌──0000f244: 37c800c0 tbnz w0, #25, f25c <kvm_skip_instr32+0x7c> │ │ ~ │ │┌─0000f248: 14000001 b f24c <kvm_skip_instr32+0x6c> <- 0000f244(b.cc-succ)<fallthrough> │ ││ │ ││ kvm_skip_instr32:133.6 (aarch32.c) Sbepe pc ║+= 2; ~ │ │└>0000f24c: b9400fe8 ldr w8, [sp, #12] <- 0000f248(b)<kvm_skip_instr32+0x6c> ~ │ │ 0000f250: 11000908 add w8, w8, #0x2 ~ │ │ 0000f254: b9000fe8 str w8, [sp, #12] │ │ kvm_skip_instr32:133.3 (aarch32.c) sbepe ║pc += 2; ~ │ ┌┼──0000f258: 14000005 b f26c <kvm_skip_instr32+0x8c> │ ││ │ ││ kvm_skip_instr32:135.6 (aarch32.c) Sbepe pc ║+= 4; ~ └>│└─>0000f25c: b9400fe8 ldr w8, [sp, #12] <- 0000f220(b.cc)<kvm_skip_instr32+0x7c>,0000f244(b.cc)<kvm_skip_instr32+0x7c> ~ 0000f260: 11001108 add w8, w8, #0x4 ~ 0000f264: b9000fe8 str w8, [sp, #12] ~ │ ┌─0000f268: 14000001 b f26c <kvm_skip_instr32+0x8c> │ │ │ │ kvm_skip_instr32:137.19 (aarch32.c) Sbepe *vcpu_pc(vcpu) = ║pc; ~ └>└>0000f26c: b9400fe8 ldr w8, [sp, #12] <- 0000f258(b)<kvm_skip_instr32+0x8c>,0000f268(b)<kvm_skip_instr32+0x8c> ~ 0000f270: 2a0803e9 mov w9, w8 kvm_skip_instr32:137.11 (aarch32.c) sbepe *vcpu_pc(║vcpu) = pc; ~ 0000f274: f9400bea ldr x10, [sp, #16] ~ 0000f278: f81e83aa stur x10, [x29, #-24] b: 0xf27c 0xf280 vcpu_pc inlined from kvm_skip_instr32:137 (aarch32.c) <b81fe>: b vcpu_pc:132.27 (kvm_emulate.h) Sbepe return (unsigned long *)&║vcpu_gp_regs(vcpu)->pc; +vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf27c 0xf280 (DW_OP_fbreg -0x18) vcpu_pc(inlined):kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c ~b 0000f27c: f85e83aa ldur x10, [x29, #-24] -vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf27c 0xf280 (DW_OP_fbreg -0x18) vcpu_pc(inlined):kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c kvm_skip_instr32:137.17 (aarch32.c) Sbepe *vcpu_pc(vcpu) ║= pc; ~ 0000f280: f9013149 str x9, [x10, #608] kvm_skip_instr32:139.21 (aarch32.c) Sbepe kvm_adjust_itstate(║vcpu); ~ 0000f284: f9400be0 ldr x0, [sp, #16] kvm_skip_instr32:139.2 (aarch32.c) sbepe ║kvm_adjust_itstate(vcpu); ~ 0000f288: 94000004 bl f298 <kvm_adjust_itstate> kvm_skip_instr32:140.1 (aarch32.c) Sbepe ║} ~ 0000f28c: a9447bfd ldp x29, x30, [sp, #64] <- 0000f288(bl-succ)<return> ~ 0000f290: 910143ff add sp, sp, #0x50 0000f1ec CFA:r29+16 r29:c-16 r30:c-8 ~ 0000f294: d65f03c0 ret -vcpu param pointer(struct kvm_vcpu<a9b6b>/<a9eeb>) 0xf1e0 0xf298 (DW_OP_breg31 0x10) kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:126 -pc var typedef(u32=typedef(__u32=unsigned int (base type, DW_ATE_unsigned size:4))) 0xf1e0 0xf298 (DW_OP_breg31 0xc) kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:128 -is_thumb var typedef(bool=_Bool (base type, DW_ATE_boolean size:1)) 0xf1e0 0xf298 (DW_OP_breg31 0x8) kvm_skip_instr32:arch/arm64/kvm/hyp/nvhe/../aarch32.c:129 **0000f298 <kvm_adjust_itstate>: + kvm_adjust_itstate params: +vcpu param pointer(struct kvm_vcpu<a9b6b>/<a9eeb>) 0xf298 0xf3c0 (DW_OP_fbreg 0x28) kvm_adjust_itstate:97.0 (aarch32.c) Sbepe ║{ 0000f298 CFA:r31 +vcpu param pointer(struct kvm_vcpu<a9b6b>/<a9eeb>) 0xf298 0xf3c0 (DW_OP_fbreg 0x28) kvm_adjust_itstate:arch/arm64/kvm/hyp/nvhe/../aarch32.c:96 +itbits var long unsigned int (base type, DW_ATE_unsigned size:8) 0xf298 0xf3c0 (DW_OP_fbreg 0x20) kvm_adjust_itstate:arch/arm64/kvm/hyp/nvhe/../aarch32.c:98 +cond var long unsigned int (base type, DW_ATE_unsigned size:8) 0xf298 0xf3c0 (DW_OP_fbreg 0x18) kvm_adjust_itstate:arch/arm64/kvm/hyp/nvhe/../aarch32.c:98 +cpsr var long unsigned int (base type, DW_ATE_unsigned size:8) 0xf298 0xf3c0 (DW_OP_fbreg 0x10) kvm_adjust_itstate:arch/arm64/kvm/hyp/nvhe/../aarch32.c:99 +is_arm var typedef(bool=_Bool (base type, DW_ATE_boolean size:1)) 0xf298 0xf3c0 (DW_OP_fbreg 0xc) kvm_adjust_itstate:arch/arm64/kvm/hyp/nvhe/../aarch32.c:100 ~ 0000f298: d10103ff sub sp, sp, #0x40 <- 0000f288(bl)<kvm_adjust_itstate> ~ 0000f29c: f90017e0 str x0, [sp, #40] kvm_adjust_itstate:99.34 (aarch32.c) SbePe unsigned long cpsr = *vcpu_cpsr(║vcpu); ~ 0000f2a0: f94017e8 ldr x8, [sp, #40] ~ 0000f2a4: f9001be8 str x8, [sp, #48] c: 0xf2a8 0xf2ac vcpu_cpsr inlined from kvm_adjust_itstate:99 (aarch32.c) <b8277>: c vcpu_cpsr:137.27 (kvm_emulate.h) Sbepe return (unsigned long *)&║vcpu_gp_regs(vcpu)->pstate; +vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf2a8 0xf2ac (DW_OP_fbreg 0x30) vcpu_cpsr(inlined):kvm_adjust_itstate:arch/arm64/kvm/hyp/nvhe/../aarch32.c ~c 0000f2a8: f9401be8 ldr x8, [sp, #48] -vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf2a8 0xf2ac (DW_OP_fbreg 0x30) vcpu_cpsr(inlined):kvm_adjust_itstate:arch/arm64/kvm/hyp/nvhe/../aarch32.c kvm_adjust_itstate:99.23 (aarch32.c) Sbepe unsigned long cpsr = ║*vcpu_cpsr(vcpu); ~ 0000f2ac: f9413508 ldr x8, [x8, #616] kvm_adjust_itstate:99.16 (aarch32.c) sbepe unsigned long ║cpsr = *vcpu_cpsr(vcpu); ~ 0000f2b0: f9000be8 str x8, [sp, #16] kvm_adjust_itstate:100.18 (aarch32.c) Sbepe bool is_arm = !(║cpsr & PSR_AA32_T_BIT); ~ 0000f2b4: f9400be8 ldr x8, [sp, #16] kvm_adjust_itstate:100.16 (aarch32.c) sbepe bool is_arm = ║!(cpsr & PSR_AA32_T_BIT); ~ 0000f2b8: f27b0108 ands x8, x8, #0x20 ~ 0000f2bc: 1a9f17e9 cset w9, eq // eq = none kvm_adjust_itstate:100.7 (aarch32.c) sbepe bool ║is_arm = !(cpsr & PSR_AA32_T_BIT); ~ 0000f2c0: 390033e9 strb w9, [sp, #12] kvm_adjust_itstate:102.6 (aarch32.c) Sbepe if (║is_arm || !(cpsr & PSR_AA32_IT_MASK)) ~ 0000f2c4: 394033e9 ldrb w9, [sp, #12] kvm_adjust_itstate:102.13 (aarch32.c) sbepe if (is_arm ║|| !(cpsr & PSR_AA32_IT_MASK)) ~ ┌────0000f2c8: 37000109 tbnz w9, #0, f2e8 <kvm_adjust_itstate+0x50> ~ │ ┌─0000f2cc: 14000001 b f2d0 <kvm_adjust_itstate+0x38> <- 0000f2c8(b.cc-succ)<fallthrough> │ │ │ │ kvm_adjust_itstate:102.6 (aarch32.c) sbepe if (║is_arm || !(cpsr & PSR_AA32_IT_MASK)) ~ │ └>0000f2d0: b94013e8 ldr w8, [sp, #16] <- 0000f2cc(b)<kvm_adjust_itstate+0x38> ~ 0000f2d4: 529f8009 mov w9, #0xfc00 // #64512 ~ 0000f2d8: 72a0c009 movk w9, #0x600, lsl #16 ~ 0000f2dc: 0a090108 and w8, w8, w9 ~ │ ┌──0000f2e0: 35000068 cbnz w8, f2ec <kvm_adjust_itstate+0x54> │ │ ~ │ │┌─0000f2e4: 14000001 b f2e8 <kvm_adjust_itstate+0x50> <- 0000f2e0(b.cc-succ)<fallthrough> │ ││ │ ││ kvm_adjust_itstate:103.3 (aarch32.c) Sbepe ║return; ~ ┌─└>│└>0000f2e8: 14000034 b f3b8 <kvm_adjust_itstate+0x120> <- 0000f2c8(b.cc)<kvm_adjust_itstate+0x50>,0000f2e4(b)<kvm_adjust_itstate+0x50> │ │ │ │ kvm_adjust_itstate:105.10 (aarch32.c) Sbepe cond = (║cpsr & 0xe000) >> 13; ~ │ └─>0000f2ec: f9400be8 ldr x8, [sp, #16] <- 0000f2e0(b.cc)<kvm_adjust_itstate+0x54> kvm_adjust_itstate:105.25 (aarch32.c) sbepe cond = (cpsr & 0xe000) ║>> 13; ~ 0000f2f0: d34d3d08 ubfx x8, x8, #13, #3 kvm_adjust_itstate:105.7 (aarch32.c) sbepe cond ║= (cpsr & 0xe000) >> 13; ~ 0000f2f4: f9000fe8 str x8, [sp, #24] kvm_adjust_itstate:106.12 (aarch32.c) Sbepe itbits = (║cpsr & 0x1c00) >> (10 - 2); ~ 0000f2f8: f9400be8 ldr x8, [sp, #16] kvm_adjust_itstate:106.17 (aarch32.c) sbepe itbits = (cpsr ║& 0x1c00) >> (10 - 2); ~ 0000f2fc: 92760908 and x8, x8, #0x1c00 kvm_adjust_itstate:106.27 (aarch32.c) sbepe itbits = (cpsr & 0x1c00) ║>> (10 - 2); ~ 0000f300: d348fd08 lsr x8, x8, #8 kvm_adjust_itstate:106.9 (aarch32.c) sbepe itbits ║= (cpsr & 0x1c00) >> (10 - 2); ~ 0000f304: f90013e8 str x8, [sp, #32] kvm_adjust_itstate:107.13 (aarch32.c) Sbepe itbits |= (║cpsr & (0x3 << 25)) >> 25; ~ 0000f308: f9400be8 ldr x8, [sp, #16] kvm_adjust_itstate:107.18 (aarch32.c) sbepe itbits |= (cpsr ║& (0x3 << 25)) >> 25; ~ 0000f30c: 92670508 and x8, x8, #0x6000000 kvm_adjust_itstate:107.9 (aarch32.c) sbepe itbits ║|= (cpsr & (0x3 << 25)) >> 25; ~ 0000f310: f94013e9 ldr x9, [sp, #32] ~ 0000f314: aa486528 orr x8, x9, x8, lsr #25 ~ 0000f318: f90013e8 str x8, [sp, #32] kvm_adjust_itstate:110.6 (aarch32.c) Sbepe if (║(itbits & 0x7) == 0) ~ 0000f31c: 394083ea ldrb w10, [sp, #32] ~ 0000f320: 1200094a and w10, w10, #0x7 ~ │ ┌──0000f324: 350000ca cbnz w10, f33c <kvm_adjust_itstate+0xa4> │ │ ~ │ │┌─0000f328: 14000001 b f32c <kvm_adjust_itstate+0x94> <- 0000f324(b.cc-succ)<fallthrough> │ ││ ~ │ │└>0000f32c: aa1f03e8 mov x8, xzr <- 0000f328(b)<kvm_adjust_itstate+0x94> │ │ kvm_adjust_itstate:111.17 (aarch32.c) Sbepe itbits = cond ║= 0; ~ │ │ 0000f330: f9000fe8 str x8, [sp, #24] │ │ kvm_adjust_itstate:111.10 (aarch32.c) sbepe itbits ║= cond = 0; ~ │ │ 0000f334: f90013e8 str x8, [sp, #32] │ │ kvm_adjust_itstate:111.3 (aarch32.c) sbepe ║itbits = cond = 0; ~ │ ┌┼──0000f338: 14000007 b f354 <kvm_adjust_itstate+0xbc> │ ││ │ ││ kvm_adjust_itstate:113.13 (aarch32.c) Sbepe itbits = (║itbits << 1) & 0x1f; ~ │ │└─>0000f33c: b94023e8 ldr w8, [sp, #32] <- 0000f324(b.cc)<kvm_adjust_itstate+0xa4> │ │ kvm_adjust_itstate:113.20 (aarch32.c) sbepe itbits = (itbits ║<< 1) & 0x1f; ~ │ │ 0000f340: 531f7908 lsl w8, w8, #1 ~ │ │ 0000f344: 2a0803e0 mov w0, w8 │ │ kvm_adjust_itstate:113.26 (aarch32.c) sbepe itbits = (itbits << 1) ║& 0x1f; ~ │ │ 0000f348: 927f0c09 and x9, x0, #0x1e │ │ kvm_adjust_itstate:113.10 (aarch32.c) sbepe itbits ║= (itbits << 1) & 0x1f; ~ │ │ 0000f34c: f90013e9 str x9, [sp, #32] ~ │ │ ┌─0000f350: 14000001 b f354 <kvm_adjust_itstate+0xbc> │ │ │ │ │ │ kvm_adjust_itstate:115.7 (aarch32.c) Sbepe cpsr ║&= ~PSR_AA32_IT_MASK; ~ │ └>└>0000f354: f9400be8 ldr x8, [sp, #16] <- 0000f338(b)<kvm_adjust_itstate+0xbc>,0000f350(b)<kvm_adjust_itstate+0xbc> ~ 0000f358: 929f8009 mov x9, #0xffffffffffff03ff // #-64513 ~ 0000f35c: f2bf3fe9 movk x9, #0xf9ff, lsl #16 ~ 0000f360: 8a090108 and x8, x8, x9 ~ 0000f364: f9000be8 str x8, [sp, #16] kvm_adjust_itstate:116.10 (aarch32.c) Sbepe cpsr |= ║cond << 13; ~ 0000f368: f9400fe8 ldr x8, [sp, #24] kvm_adjust_itstate:116.7 (aarch32.c) sbepe cpsr ║|= cond << 13; ~ 0000f36c: f9400be9 ldr x9, [sp, #16] ~ 0000f370: aa083528 orr x8, x9, x8, lsl #13 ~ 0000f374: f9000be8 str x8, [sp, #16] kvm_adjust_itstate:117.11 (aarch32.c) Sbepe cpsr |= (║itbits & 0x1c) << (10 - 2); ~ 0000f378: f94013e8 ldr x8, [sp, #32] kvm_adjust_itstate:117.18 (aarch32.c) sbepe cpsr |= (itbits ║& 0x1c) << (10 - 2); ~ 0000f37c: 927e0908 and x8, x8, #0x1c kvm_adjust_itstate:117.7 (aarch32.c) sbepe cpsr ║|= (itbits & 0x1c) << (10 - 2); ~ 0000f380: f9400be9 ldr x9, [sp, #16] ~ 0000f384: aa082128 orr x8, x9, x8, lsl #8 ~ 0000f388: f9000be8 str x8, [sp, #16] kvm_adjust_itstate:118.11 (aarch32.c) Sbepe cpsr |= (║itbits & 0x3) << 25; ~ 0000f38c: f94013e8 ldr x8, [sp, #32] kvm_adjust_itstate:118.18 (aarch32.c) sbepe cpsr |= (itbits ║& 0x3) << 25; ~ 0000f390: 92400508 and x8, x8, #0x3 kvm_adjust_itstate:118.7 (aarch32.c) sbepe cpsr ║|= (itbits & 0x3) << 25; ~ 0000f394: f9400be9 ldr x9, [sp, #16] ~ 0000f398: aa086528 orr x8, x9, x8, lsl #25 ~ 0000f39c: f9000be8 str x8, [sp, #16] kvm_adjust_itstate:119.21 (aarch32.c) Sbepe *vcpu_cpsr(vcpu) = ║cpsr; ~ 0000f3a0: f9400be8 ldr x8, [sp, #16] kvm_adjust_itstate:119.13 (aarch32.c) sbepe *vcpu_cpsr(║vcpu) = cpsr; ~ 0000f3a4: f94017e9 ldr x9, [sp, #40] ~ 0000f3a8: f9001fe9 str x9, [sp, #56] d: 0xf3ac 0xf3b0 vcpu_cpsr inlined from kvm_adjust_itstate:119 (aarch32.c) <b8294>: d vcpu_cpsr:137.27 (kvm_emulate.h) Sbepe return (unsigned long *)&║vcpu_gp_regs(vcpu)->pstate; +vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf3ac 0xf3b0 (DW_OP_fbreg 0x38) vcpu_cpsr(inlined):kvm_adjust_itstate:arch/arm64/kvm/hyp/nvhe/../aarch32.c ~d 0000f3ac: f9401fe9 ldr x9, [sp, #56] -vcpu param pointer(const(struct kvm_vcpu<a9b6b>/<a9eeb>)) 0xf3ac 0xf3b0 (DW_OP_fbreg 0x38) vcpu_cpsr(inlined):kvm_adjust_itstate:arch/arm64/kvm/hyp/nvhe/../aarch32.c kvm_adjust_itstate:119.19 (aarch32.c) Sbepe *vcpu_cpsr(vcpu) ║= cpsr; ~ 0000f3b0: f9013528 str x8, [x9, #616] kvm_adjust_itstate:120.1 (aarch32.c) Sbepe ║} ~ │ ┌─0000f3b4: 14000001 b f3b8 <kvm_adjust_itstate+0x120> │ │ ~ └───>└>0000f3b8: 910103ff add sp, sp, #0x40 <- 0000f2e8(b)<kvm_adjust_itstate+0x120>,0000f3b4(b)<kvm_adjust_itstate+0x120> 0000f29c CFA:r31+64 ~ 0000f3bc: d65f03c0 ret -vcpu param pointer(struct kvm_vcpu<a9b6b>/<a9eeb>) 0xf298 0xf3c0 (DW_OP_fbreg 0x28) kvm_adjust_itstate:arch/arm64/kvm/hyp/nvhe/../aarch32.c:96 -itbits var long unsigned int (base type, DW_ATE_unsigned size:8) 0xf298 0xf3c0 (DW_OP_fbreg 0x20) kvm_adjust_itstate:arch/arm64/kvm/hyp/nvhe/../aarch32.c:98 -cond var long unsigned int (base type, DW_ATE_unsigned size:8) 0xf298 0xf3c0 (DW_OP_fbreg 0x18) kvm_adjust_itstate:arch/arm64/kvm/hyp/nvhe/../aarch32.c:98 -cpsr var long unsigned int (base type, DW_ATE_unsigned size:8) 0xf298 0xf3c0 (DW_OP_fbreg 0x10) kvm_adjust_itstate:arch/arm64/kvm/hyp/nvhe/../aarch32.c:99 -is_arm var typedef(bool=_Bool (base type, DW_ATE_boolean size:1)) 0xf298 0xf3c0 (DW_OP_fbreg 0xc) kvm_adjust_itstate:arch/arm64/kvm/hyp/nvhe/../aarch32.c:100