On 6/6/24 07:02, Don Porter wrote:
+/**
+ * get_pte - Copy the contents of the page table entry at node[i] into
pt_entry.
+ * Optionally, add the relevant bits to the virtual address in
+ * vaddr_pte.
+ *
+ * @cs - CPU state
+ * @node - physical address of the current page table node
+ * @i - index (in page table entries, not bytes) of the page table
+ * entry, within node
+ * @height - height of node within the tree (leaves are 1, not 0)
+ * @pt_entry - Poiter to a PTE_t, stores the contents of the page table entry
+ * @vaddr_parent - The virtual address bits already translated in walking the
+ * page table to node. Optional: only used if vaddr_pte is
set.
+ * @vaddr_pte - Optional pointer to a variable storing the virtual address bits
+ * translated by node[i].
+ * @pte_paddr - Pointer to the physical address of the PTE within node.
+ * Optional parameter.
+ */
+void
+x86_get_pte(CPUState *cs, hwaddr node, int i, int height,
+ PTE_t *pt_entry, vaddr vaddr_parent, vaddr *vaddr_pte,
+ hwaddr *pte_paddr)
+
+{
+ X86CPU *cpu = X86_CPU(cs);
+ CPUX86State *env = &cpu->env;
+ int32_t a20_mask = x86_get_a20_mask(env);
+ hwaddr pte;
+
+ if (env->hflags & HF_LMA_MASK) {
+ /* 64 bit */
+ int pte_width = 8;
+ pte = (node + (i * pte_width)) & a20_mask;
+ pt_entry->pte64_t = address_space_ldq(cs->as, pte,
+ MEMTXATTRS_UNSPECIFIED, NULL);
+ } else {
+ /* 32 bit */
+ int pte_width = 4;
+ pte = (node + (i * pte_width)) & a20_mask;
+ pt_entry->pte32_t = address_space_ldl(cs->as, pte,
+ MEMTXATTRS_UNSPECIFIED, NULL);
+ }
+
+ if (vaddr_pte) {
+ int shift = 0;
+ _mmu_decode_va_parameters(cs, height, &shift, NULL);
+ *vaddr_pte = vaddr_parent | ((i & 0x1ffULL) << shift);
+ }
+
+ if (pte_paddr) {
+ *pte_paddr = pte;
+ }
+}
This fails to recurse with nested page tables, which definitely breaks the TCG
walker.
r~