Skip to content

Commit

Permalink
RISC-V: KVM: Use Svinval for local TLB maintenance when available
Browse files Browse the repository at this point in the history
We should prefer HINVAL.GVMA and HINVAL.VVMA instruction for local TLB
maintenance when underlying host supports Svinval extension.

Signed-off-by: Anup Patel <apatel@ventanamicro.com>
Reviewed-by: Andrew Jones <ajones@ventanamicro.com>
Signed-off-by: Anup Patel <anup@brainfault.org>
  • Loading branch information
Anup Patel authored and Anup Patel committed Oct 2, 2022
1 parent 122979a commit 5ff1124
Show file tree
Hide file tree
Showing 2 changed files with 68 additions and 12 deletions.
20 changes: 20 additions & 0 deletions arch/riscv/include/asm/insn-def.h
Original file line number Diff line number Diff line change
Expand Up @@ -114,4 +114,24 @@
__ASM_STR(.error "hlv.d requires 64-bit support")
#endif

#define SINVAL_VMA(vaddr, asid) \
INSN_R(OPCODE_SYSTEM, FUNC3(0), FUNC7(11), \
__RD(0), RS1(vaddr), RS2(asid))

#define SFENCE_W_INVAL() \
INSN_R(OPCODE_SYSTEM, FUNC3(0), FUNC7(12), \
__RD(0), __RS1(0), __RS2(0))

#define SFENCE_INVAL_IR() \
INSN_R(OPCODE_SYSTEM, FUNC3(0), FUNC7(12), \
__RD(0), __RS1(0), __RS2(1))

#define HINVAL_VVMA(vaddr, asid) \
INSN_R(OPCODE_SYSTEM, FUNC3(0), FUNC7(19), \
__RD(0), RS1(vaddr), RS2(asid))

#define HINVAL_GVMA(gaddr, vmid) \
INSN_R(OPCODE_SYSTEM, FUNC3(0), FUNC7(51), \
__RD(0), RS1(gaddr), RS2(vmid))

#endif /* __ASM_INSN_DEF_H */
60 changes: 48 additions & 12 deletions arch/riscv/kvm/tlb.c
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,12 @@
#include <linux/kvm_host.h>
#include <asm/cacheflush.h>
#include <asm/csr.h>
#include <asm/hwcap.h>
#include <asm/insn-def.h>

#define has_svinval() \
static_branch_unlikely(&riscv_isa_ext_keys[RISCV_ISA_EXT_KEY_SVINVAL])

void kvm_riscv_local_hfence_gvma_vmid_gpa(unsigned long vmid,
gpa_t gpa, gpa_t gpsz,
unsigned long order)
Expand All @@ -25,9 +29,17 @@ void kvm_riscv_local_hfence_gvma_vmid_gpa(unsigned long vmid,
return;
}

for (pos = gpa; pos < (gpa + gpsz); pos += BIT(order))
asm volatile (HFENCE_GVMA(%0, %1)
: : "r" (pos >> 2), "r" (vmid) : "memory");
if (has_svinval()) {
asm volatile (SFENCE_W_INVAL() ::: "memory");
for (pos = gpa; pos < (gpa + gpsz); pos += BIT(order))
asm volatile (HINVAL_GVMA(%0, %1)
: : "r" (pos >> 2), "r" (vmid) : "memory");
asm volatile (SFENCE_INVAL_IR() ::: "memory");
} else {
for (pos = gpa; pos < (gpa + gpsz); pos += BIT(order))
asm volatile (HFENCE_GVMA(%0, %1)
: : "r" (pos >> 2), "r" (vmid) : "memory");
}
}

void kvm_riscv_local_hfence_gvma_vmid_all(unsigned long vmid)
Expand All @@ -45,9 +57,17 @@ void kvm_riscv_local_hfence_gvma_gpa(gpa_t gpa, gpa_t gpsz,
return;
}

for (pos = gpa; pos < (gpa + gpsz); pos += BIT(order))
asm volatile(HFENCE_GVMA(%0, zero)
: : "r" (pos >> 2) : "memory");
if (has_svinval()) {
asm volatile (SFENCE_W_INVAL() ::: "memory");
for (pos = gpa; pos < (gpa + gpsz); pos += BIT(order))
asm volatile(HINVAL_GVMA(%0, zero)
: : "r" (pos >> 2) : "memory");
asm volatile (SFENCE_INVAL_IR() ::: "memory");
} else {
for (pos = gpa; pos < (gpa + gpsz); pos += BIT(order))
asm volatile(HFENCE_GVMA(%0, zero)
: : "r" (pos >> 2) : "memory");
}
}

void kvm_riscv_local_hfence_gvma_all(void)
Expand All @@ -70,9 +90,17 @@ void kvm_riscv_local_hfence_vvma_asid_gva(unsigned long vmid,

hgatp = csr_swap(CSR_HGATP, vmid << HGATP_VMID_SHIFT);

for (pos = gva; pos < (gva + gvsz); pos += BIT(order))
asm volatile(HFENCE_VVMA(%0, %1)
: : "r" (pos), "r" (asid) : "memory");
if (has_svinval()) {
asm volatile (SFENCE_W_INVAL() ::: "memory");
for (pos = gva; pos < (gva + gvsz); pos += BIT(order))
asm volatile(HINVAL_VVMA(%0, %1)
: : "r" (pos), "r" (asid) : "memory");
asm volatile (SFENCE_INVAL_IR() ::: "memory");
} else {
for (pos = gva; pos < (gva + gvsz); pos += BIT(order))
asm volatile(HFENCE_VVMA(%0, %1)
: : "r" (pos), "r" (asid) : "memory");
}

csr_write(CSR_HGATP, hgatp);
}
Expand Down Expand Up @@ -102,9 +130,17 @@ void kvm_riscv_local_hfence_vvma_gva(unsigned long vmid,

hgatp = csr_swap(CSR_HGATP, vmid << HGATP_VMID_SHIFT);

for (pos = gva; pos < (gva + gvsz); pos += BIT(order))
asm volatile(HFENCE_VVMA(%0, zero)
: : "r" (pos) : "memory");
if (has_svinval()) {
asm volatile (SFENCE_W_INVAL() ::: "memory");
for (pos = gva; pos < (gva + gvsz); pos += BIT(order))
asm volatile(HINVAL_VVMA(%0, zero)
: : "r" (pos) : "memory");
asm volatile (SFENCE_INVAL_IR() ::: "memory");
} else {
for (pos = gva; pos < (gva + gvsz); pos += BIT(order))
asm volatile(HFENCE_VVMA(%0, zero)
: : "r" (pos) : "memory");
}

csr_write(CSR_HGATP, hgatp);
}
Expand Down

0 comments on commit 5ff1124

Please sign in to comment.