diff -uNr a/m/MANIFEST.TXT b/m/MANIFEST.TXT --- a/m/MANIFEST.TXT d9f1b9e441a1f4e5d314d5c5f43319591ca03904b3ac36e1ede4d7310b5ff30d2e0c720695f95733f1ac3e2e829fa96700b6e8776acc974cfd30c6994edaf4b6 +++ b/m/MANIFEST.TXT 95d0cef8c6fd3896260dcddf172f30fbb868dc843903341b1ef03b9845d032d6f8c8b6cb3fa0624bb6a5390ac3b55ea020203bdedeb1dc8ac35d28ed84a6e642 @@ -2,3 +2,4 @@ 586747 errata_slaveirq "Fix of slave IRQ clearing." 586983 tlb_and_exc_speedup "Exc. handler fastpaths and TLB caching." 587480 simd_tlb_lookup "Experimental variant with SIMDistic TLB." +587535 simd_tlb_errata "Remove the nonfunctional TLB cache." diff -uNr a/m/flags.asm b/m/flags.asm --- a/m/flags.asm cb16f8ab1a1e89fce1364577dd83a456e0859379a3a9fa42c883a18bc9962d7b017e5d2c99341c60e1d41b510fd0d588fce6d55d058e49e62d89701099cc8080 +++ b/m/flags.asm 97d32b9fac3a97819e48c890750394851ef48f2776c02904160bf945143307e5c055dcc451279bf59a7ed3988a05bf98209b7b7006a1339d4fd2db49688765cb @@ -23,9 +23,7 @@ %define RunningDelaySlot 1 %define Waiting 2 %define LL_Bit 3 -%define TLB_Rd_Cache_Valid 4 -%define TLB_Wr_Cache_Valid 5 -%define Shutdown 6 +%define Shutdown 4 ; Positions 31 .. 15 store TLB's 'G' Flags ;----------------------------------------------------------------------------- ; Set a given Flag: diff -uNr a/m/mipsinst/m_instrs.asm b/m/mipsinst/m_instrs.asm --- a/m/mipsinst/m_instrs.asm 343ff34a3cbc7cd5d51c465b8b91754c546c841055b6e84dfc8e928262e958534e727dc20ec0900b103f82f57895cbfb372d0789fae1410b593746f76125187a +++ b/m/mipsinst/m_instrs.asm d0fc5bd7c10b69136b93c51def7a043bb287515d3cee61fbcc46c2f07024b12165c81b979658e748be6dcd67fc48d73630fdeaef1f0f0d99536a5e424d01e10c @@ -486,11 +486,7 @@ jnz _mtc0_unknown ; ... then unknown; else: and ebx, ~0x1F00 ; T := T & ~0x1F00 cmp ebx, Sr(CP0_EntryHi) ; Find whether changing CP0_EntryHi - je .Not_Changed_EntryHi ; ... if not, skip; -.Changed_EntryHi: ; If we are changing CP0_EntryHi: - Invalidate_TLB_Cache ; Invalidate both R and W TLB Caches mov Sr(CP0_EntryHi), ebx ; CP0_EntryHi := ebx -.Not_Changed_EntryHi: jmp _end_cycle ; Done ;----------------------------------------------------------------------------- _mtc0_r11: ; 0x0b diff -uNr a/m/ram.asm b/m/ram.asm --- a/m/ram.asm 19593abc66ab9ff8a02fa39884524bba012aaf3afab4c0a588272d07b9269bb59140f584519317ba0c2f412fc0b47d31cd4fea28d3d6754a3f5bbf7bacca3e78 +++ b/m/ram.asm 59a2d5464b9827f83cd714d25071d7608c79c1c7aba9004a308ea6d216943293347f57d909496810c79f2b9298d80c99a3130224c49543a26d60f0b0dc77d72b @@ -85,14 +85,6 @@ ;----------------------------------------------------------------------------- ;----------------------------------------------------------------------------- -; Mark both Read and Write TLB Caches as blown: -;----------------------------------------------------------------------------- -%macro Invalidate_TLB_Cache 0 - and Flag_Reg, ~((1 << TLB_Rd_Cache_Valid) | (1 << TLB_Wr_Cache_Valid)) -%endmacro -;----------------------------------------------------------------------------- - -;----------------------------------------------------------------------------- section .bss align 32 TLB_TAG_BYTE_0_COPY resb 16 ; Byte-0 of each TLB entry Tag @@ -140,27 +132,6 @@ mov ecx, eax ; ecx := eax (vAddr) and ecx, 0xFFFFF000 ; ecx := ecx & 0xFFFFF000 shr ecx, 13 ; ecx := ecx >> 13 (get vAddr's Tag) - ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - ; Find out whether we actually must do the lookup, or can use cached: - Flg_Get TLB_Rd_Cache_Valid ; Is Read TLB Cache valid? - jnc .Lookup_Must ; If Read TLB Cache invalid -- must! - ; If cache is valid, lookup: - mov AUX, ecx ; AUX := tag - xor ecx, ecx ; ecx := 0 - bt eax, 12 ; Test vAddr's odd/even junior bit - setc cl ; ecx := {1 if a-odd, 0 if a-even} - shl rcx, 6 ; rcx := {64 if a-odd, 0 if a-even} - ; get the last-good-Tags: - movq rbx, R_TLB_Last_Good_Tag ; Get last good R-Tag pair - shr rbx, cl ; if arity is odd, get top half - cmp ebx, AUX ; is current Tag == to last-good ? - jne .Lookup_Must ; ... if not, go to Lookup_Must - ; given Tag matched last-good. So get last-good PFN and wrap up: - movq rbx, R_TLB_Last_Good_PFN ; Get last good PFN pair - shr rbx, cl ; if arity is odd, get top half - jmp .PFN_And_Done ; use ebx as the PFN and wrap up. - ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -.Lookup_Must: movd xmm4, ecx ; ecx := copy of Tag ;; Search for B0, B1, B2 of Tag, accumulate result in ebx ;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -245,20 +216,9 @@ jnc .Invalid_R ; ... V == 0, then go to Invalid ; Now let's load the PFN: mov rbx, TLB_PFN(AUX64) ; load the PFN pair to rbx - ; ebx is now the PFN. Before wrapping up, update the TLB read cache : - movq R_TLB_Last_Good_PFN, rbx ; Set last good PFN to this PFN: + ; ebx is now the PFN. ; now leave only the correct half of PFN, at bottom of rbx: shr rbx, cl ; if arity is odd, get upper 32bit - ; set correct half of R_TLB_Last_Good_Tag to the found Tag: - mov rdx, 0xFFFFFFFF00000000 ; rdx := 0xFFFFFFFF00000000 - shr rdx, cl ; if arity is odd, keep bottom - movq AUX64, R_TLB_Last_Good_Tag ; get last good Tag - and AUX64, rdx ; zap correct half of last good tag - movq rdx, xmm4 ; get the Tag again : - shl rdx, cl ; if arity if odd, slide into pos: - or AUX64, rdx ; now or it into place - movq R_TLB_Last_Good_Tag, AUX64 ; update last good Tag. -.PFN_And_Done: and eax, 0xFFF ; vAddr := vAddr & 0xFFF or eax, ebx ; vAddr := vAddr | entry.PFN[lowbit] jmp _Lookup_TLB_Done ; vAddr is now correct pAddr, done. @@ -313,27 +273,6 @@ mov ecx, eax ; ecx := eax (vAddr) and ecx, 0xFFFFF000 ; ecx := ecx & 0xFFFFF000 shr ecx, 13 ; ecx := ecx >> 13 (get vAddr's Tag) - ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - ; Find out whether we actually must do the lookup, or can use cached: - Flg_Get TLB_Wr_Cache_Valid ; Is Write TLB Cache valid? - jnc .Lookup_Must ; If Write TLB Cache invalid -- must! - ; If cache is valid, lookup: - mov AUX, ecx ; AUX := tag - xor ecx, ecx ; ecx := 0 - bt eax, 12 ; Test vAddr's odd/even junior bit - setc cl ; ecx := {1 if a-odd, 0 if a-even} - shl rcx, 6 ; rcx := {64 if a-odd, 0 if a-even} - ; get the last-good-Tags: - movq rbx, W_TLB_Last_Good_Tag ; Get last good W-Tag pair - shr rbx, cl ; if arity is odd, get top half - cmp ebx, AUX ; is current Tag == to last-good ? - jne .Lookup_Must ; ... if not, go to Lookup_Must - ; given Tag matched last-good. So get last-good PFN and wrap up: - movq rbx, W_TLB_Last_Good_PFN ; Get last good PFN pair - shr rbx, cl ; if arity is odd, get top half - jmp .PFN_And_Done ; use ebx as the PFN and wrap up. - ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -.Lookup_Must: movd xmm4, ecx ; ecx := copy of Tag ;; Search for B0, B1, B2 of Tag, accumulate result in ebx ;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -422,20 +361,8 @@ jnc .Dirty_W ; ... if D == 0, then go to Dirty ; Now let's load the correct odd or even PFN: mov rbx, TLB_PFN(AUX64) ; load the PFN pair to rbx - ; ebx is now the PFN. Before wrapping up, update the TLB read cache : - movq W_TLB_Last_Good_PFN, rbx ; Set last good PFN to this PFN: ; now leave only the correct half of PFN, at bottom of rbx: shr rbx, cl ; if arity is odd, get upper 32bit - ; set correct half of R_TLB_Last_Good_Tag to the found Tag: - mov rdx, 0xFFFFFFFF00000000 ; rdx := 0xFFFFFFFF00000000 - shr rdx, cl ; if arity is odd, keep bottom - movq AUX64, W_TLB_Last_Good_Tag ; get last good Tag - and AUX64, rdx ; zap correct half of last good tag - movq rdx, xmm4 ; get the Tag again : - shl rdx, cl ; if arity if odd, slide into pos: - or AUX64, rdx ; now or it into place - movq W_TLB_Last_Good_Tag, AUX64 ; update last good Tag. -.PFN_And_Done: and eax, 0xFFF ; vAddr := vAddr & 0xFFF or eax, ebx ; vAddr := vAddr | entry.PFN[lowbit] jmp _Lookup_TLB_Done ; vAddr is now correct pAddr, done. @@ -475,12 +402,7 @@ and ebx, 0xFF ; ebx := ebx & 0xFF and ecx, 0xFFFFE000 ; ecx := ecx & 0xFFFFE000 or ebx, ecx ; ebx := ebx | ecx - cmp ebx, Sr(CP0_EntryHi) ; Find whether changing CP0_EntryHi - je .Not_Changed_EntryHi ; ... if not, skip; -.Changed_EntryHi: ; If we are changing CP0_EntryHi: - Invalidate_TLB_Cache ; Invalidate both R and W TLB Caches mov Sr(CP0_EntryHi), ebx ; CP0_EntryHi := ebx -.Not_Changed_EntryHi: ;; Will go into exception handler instead of back to _Virt_xxx etc ;; and drop into 'done' : _Lookup_TLB_Done: