[ Upstream commit
4f6deb8cbab532a8d7250bc09234c1795ecb5e2c ]
On pre-Niagara systems, we fetch the fault address on data TLB
exceptions from the TLB_TAG_ACCESS register. But this register also
contains the context ID assosciated with the fault in the low 13 bits
of the register value.
This propagates into current_thread_info()->fault_address and can
cause trouble later on.
So clear the low 13-bits out of the TLB_TAG_ACCESS value in the cases
where it matters.
Reported-by: Mikulas Patocka <mpatocka@redhat.com>
Signed-off-by: David S. Miller <davem@davemloft.net>
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
/* PROT ** ICACHE line 2: More real fault processing */
ldxa [%g4] ASI_DMMU, %g5 ! Put tagaccess in %g5
+ srlx %g5, PAGE_SHIFT, %g5
+ sllx %g5, PAGE_SHIFT, %g5 ! Clear context ID bits
bgu,pn %xcc, winfix_trampoline ! Yes, perform winfixup
mov FAULT_CODE_DTLB | FAULT_CODE_WRITE, %g4
ba,pt %xcc, sparc64_realfault_common ! Nope, normal fault
nop
nop
- nop
- nop
/* PROT ** ICACHE line 3: Unused... */
nop
mov TLB_TAG_ACCESS, %g4
ldxa [%g4] ASI_IMMU, %g4
+ /* The kernel executes in context zero, therefore we do not
+ * need to clear the context ID bits out of %g4 here.
+ */
+
/* sun4v_itlb_miss branches here with the missing virtual
* address already loaded into %g4
*/
mov TLB_TAG_ACCESS, %g4
ldxa [%g4] ASI_DMMU, %g4
+ /* The kernel executes in context zero, therefore we do not
+ * need to clear the context ID bits out of %g4 here.
+ */
+
/* sun4v_dtlb_miss branches here with the missing virtual
* address already loaded into %g4
*/
nop
.previous
+ /* The kernel executes in context zero, therefore we do not
+ * need to clear the context ID bits out of %g5 here.
+ */
+
be,pt %xcc, sparc64_realfault_common
mov FAULT_CODE_DTLB, %g4
ba,pt %xcc, winfix_trampoline
*/
tsb_miss_dtlb:
mov TLB_TAG_ACCESS, %g4
+ ldxa [%g4] ASI_DMMU, %g4
+ srlx %g4, PAGE_SHIFT, %g4
ba,pt %xcc, tsb_miss_page_table_walk
- ldxa [%g4] ASI_DMMU, %g4
+ sllx %g4, PAGE_SHIFT, %g4
tsb_miss_itlb:
mov TLB_TAG_ACCESS, %g4
+ ldxa [%g4] ASI_IMMU, %g4
+ srlx %g4, PAGE_SHIFT, %g4
ba,pt %xcc, tsb_miss_page_table_walk
- ldxa [%g4] ASI_IMMU, %g4
+ sllx %g4, PAGE_SHIFT, %g4
/* At this point we have:
* %g1 -- PAGE_SIZE TSB entry address
nop
.previous
+ /* Clear context ID bits. */
+ srlx %g5, PAGE_SHIFT, %g5
+ sllx %g5, PAGE_SHIFT, %g5
+
be,pt %xcc, sparc64_realfault_common
mov FAULT_CODE_DTLB, %g4
ba,pt %xcc, winfix_trampoline