tree checksum vpatch file split hunks

all signers: asciilifeform

antecedents: simd_tlb_lookup.kv m_genesis.kv errata_slaveirq.kv

press order:

m_genesis.kvasciilifeform
errata_slaveirq.kvasciilifeform

patch:

- 0F03021AE4E766B63BE5EFE5CF5E672FC90202022CA05772621F5299C9B808D2C8C2439B7FA555716CB33A6ADB8351E582275FFBAFDF6CC633E528522343720E
+ B93A4289F9DECD7FC48508715AFE4F4237A1EDD72646FEF53EBAB319D50F16ADF546473B023C657C5C6D16A47CF3DFA1ECFDEE1F00E5796126B8A6C727292904
m/MANIFEST.TXT
(1 . 2)(1 . 3)
5 586606 m_genesis "Genesis."
6 586747 errata_slaveirq "Fix of slave IRQ clearing."
7 586983 tlb_and_exc_speedup "Exc. handler fastpaths and TLB caching."
- 6CEECB9422F3D01B04ED2ECB183FB96665FF969087E2967ECCEF2703337401AFB4F0EC91E4B7EFDA625AE4534BA1C3DABA539A1FC794B2E594D7404DDF72399C
+ 1427E0C04C15C733AED037B1A56252C2F7F8EE7960171AB548B6CE1244320EA1E9B4AA00045D94A4478AAD0E605E9206AA9E8B92581BE252B3E08C893ED372FE
m/cpustate.asm
(76 . 6)(76 . 19)
12 ;-----------------------------------------------------------------------------
13
14 ;-----------------------------------------------------------------------------
15 ; XMM Regs used for TLB Caching:
16 ;-----------------------------------------------------------------------------
17 %define Rd_E_Last_Tag xmm5 ; Last good Tag on reading Even vAddr
18 %define Rd_E_Last_PFN xmm6 ; Last good PFN on reading Even vAddr
19 %define Rd_O_Last_Tag xmm7 ; Last good Tag on reading Odd vAddr
20 %define Rd_O_Last_PFN xmm8 ; Last good PFN on reading Odd vAddr
21 %define Wr_E_Last_Tag xmm9 ; Last good Tag on writing Even vAddr
22 %define Wr_E_Last_PFN xmm10 ; Last good PFN on writing Even vAddr
23 %define Wr_O_Last_Tag xmm11 ; Last good Tag on writing Odd vAddr
24 %define Wr_O_Last_PFN xmm12 ; Last good PFN on writing Odd vAddr
25 ;-----------------------------------------------------------------------------
26
27 ;-----------------------------------------------------------------------------
28 ; Access to MIPS Registers that live in MCPU (Emulator State) :
29 ;-----------------------------------------------------------------------------
30 ; Refer to given MIPS special Reg:
- A7D2BE62ABFD1319DBAFAA5F8EEDD8718C3364A59D89E89C611D0A95D04BA6D81D089E93A65853F85FF3FDD45B413C2C582CBE9E7B082E40F18F206D2CAFDBB1
+ 7DEDB8135F032539DD5F6C0133070AFF4078CADB322FCB7E665A56BDFE7940E13D463B2803EA9C9726E7606579218BA9684D21AE106890C6EF7A285119887364
m/flags.asm
(23 . 8)(23 . 8)
35 %define RunningDelaySlot 1
36 %define Waiting 2
37 %define LL_Bit 3
38 %define IsWriting 4
39 %define ExcWasTLBNoMatch 5
40 %define TLB_Rd_Cache_Valid 4
41 %define TLB_Wr_Cache_Valid 5
42 %define Shutdown 6
43 ;-----------------------------------------------------------------------------
44 ; Set a given Flag:
- 46635728670946901EDCA4062D650365295341CF87401AC4FBB7F78423FAF76B5C246A07EA8E5FC76FCD4B49FF669102F556F47F7C5A247670FD00FFEE665B80
+ AD646CEE418F8C037CDA4CB8DB4838733687E9D522537FCF080E044199B5B26DAEFA7ECDAC572E0E2A606F46492130CDE66A58AD33F12967809FAFD949A8D524
m/i_decode.asm
(261 . 16)(261 . 35)
49 ;-----------------------------------------------------------------------------
50
51 ;-----------------------------------------------------------------------------
52 ; Denote privileged (permitted in Kernel-Mode strictly) instructions.
53 ; Jump to given target if interrupts are DISABLED:
54 ;-----------------------------------------------------------------------------
55 %macro PRIVILEGED 0
56 %macro JMP_IF_IRQS_OFF 1
57 mov eax, CP0_Status ; eax := CP0_Status
58 and eax, 3 ; select only IE | EXL | ERL bits
59 sub eax, 1 ; if eax == 1, then interrupts enabled
60 jnz %1 ; so if interrupts not enabled, jump.
61 %endmacro
62 ;-----------------------------------------------------------------------------
63
64 ;-----------------------------------------------------------------------------
65 ; Jump to given target if CPU is currently in Kernel Mode:
66 ;-----------------------------------------------------------------------------
67 %macro JMP_IF_KERNELMODE 1
68 bt CP0_Status, CP0St_UM ; CF := CP0St_UM Flag
69 jnc %%proceed ; If UM = 0: Kernel Mode, proceed.
70 jnc %1 ; If UM = 0: Kernel Mode, proceed.
71 test CP0_Status, (1 << CP0St_EXL) | (1 << CP0St_ERL) ; EXL or ERL
72 jnz %%proceed ; If EXL && ERL: Kernel Mode, proceed.
73 jnz %1 ; If EXL or ERL: Kernel Mode, proceed.
74 %endmacro
75 ;-----------------------------------------------------------------------------
76
77 ;-----------------------------------------------------------------------------
78 ; Denote privileged (permitted in Kernel-Mode strictly) instructions.
79 ;-----------------------------------------------------------------------------
80 %macro PRIVILEGED 0
81 JMP_IF_KERNELMODE %%proceed ; If in Kernel Mode : proceed;
82 ;; We are NOT in kernel mode, but trying to execute a privileged inst:
83 SetEXC EXC_RI ; Set the 'Reserved Instr.' Exception.
84 jmp _Handle_Exception ; Go straight to exception handler.
85 jmp _Handle_Exception_Other ; Go straight to exception handler.
86 %%proceed ; We're in Kernel Mode, so proceed with the privileged instruction.
87 %endmacro
88 ;-----------------------------------------------------------------------------
- E75680EEE6B4D6DAB5E13FD02DB2A86702136633846D4E9D9CA17FFAAE25CE6C1D0D138DB69081802520D9B418B7027A8150271E15E954971BA44D2506F70AD1
+ 3B8E7B9CF4B6B37A941B53F534FA000B523941E5C52747F0CCF92397C5E64FDCF74BBDD241E70E51BEF8893954C0CF5F4DB5A89066B68349A3DE4F24F737BDBC
m/knobs.asm
(23 . 6)(23 . 21)
93 ;-----------------------------------------------------------------------------
94
95 ;-----------------------------------------------------------------------------
96 ; If TLBWR_CHEAT is enabled, the TLBWR ('Write Random TLB Entry') instruction
97 ; will slide all unwired entries down by one slot and write into the freed
98 ; slot near the top permitted by CP0_Wired, instead of the traditional
99 ; behaviour (where entry indexed by a modulus of the tick counter is killed.)
100 ; No known code (incl. Linux) tries to rely on the absolute position of
101 ; unwired TLB entries after a TLBWR instruction. So this gives faster lookup
102 ; when iterating over TLB, as the newer unwired entries will aggregate near
103 ; the base of the table. Iron MIPSen do not iterate, they look up in parallel,
104 ; ergo the original MIPS designer did not see any reason to attempt to order
105 ; TLB entries by frequency of use.
106 ;-----------------------------------------------------------------------------
107 %define TLBWR_CHEAT 1
108 ;-----------------------------------------------------------------------------
109
110 ;-----------------------------------------------------------------------------
111 ; Alignment Grain
112 ;-----------------------------------------------------------------------------
113 %define GRAIN 32
- 93767FBF28AC3CA8A8D7CEA92C2295D7F4A041C9B67B003EDC7CA1A32AE624B423F8F33E5D099403A9E68E794F0F5D90F4F3F2A12E8D3CEE4FCAFB12E7502684
+ 35A5F7D843A515A6301C0D09957D3529F10F0443A50BD54177BCAAECC96054C502D2E14CCF1D5F106247DD2F566839AB49883E97B86CFF1D5AD889652E8F5EAF
m/mips_cpu.asm
(40 . 12)(40 . 8)
118 SetIRQ TIMER_IRQ ; Timer reached limit, invoke timer IRQ
119
120 _cycle_no_mips_timer:
121
122 ;; Test if Interrupts are Disabled:
123 bt CP0_Status, CP0St_IE ; CF := CP0St_IE
124 jnc _cycle_no_irq ; If 0, IRQs are disabled, go to no_irq
125 test CP0_Status, (1 << CP0St_ERL) | (1 << CP0St_EXL) ; If ERL/EXL:
126 jnz _cycle_no_irq ; ... then also interrupts are disabled
127
128 JMP_IF_IRQS_OFF _cycle_no_irq ; Skip IRQ processing if IRQs disabled
129
130 GetSlaveIRQ ; See if slave threw interrupt
131
(64 . 7)(60 . 7)
133 ;; Copy InDelaySlot Flag to RunningDelaySlot Flag:
134 Flg_Cpy RunningDelaySlot, InDelaySlot
135
136 jmp _Handle_Exception ; Handle exception and end cycle.
137 jmp _Handle_Exception_IRQ ; Handle IRQ exception and end cycle.
138
139 _cycle_no_irq: ; If ints disabled or none pending
140
- FD686B4ADBDB138A08F6B8F95620FD0533B2BD68C5B5056D24100F28B552CB6E7AF74B32BFD739F029E065D257F456024D223073DA3F0F8168E6D98F75BE053D
+ 2AC1B0ACACD2CD01845DC0E9AD36BAE84577B76ACE6BE6E079C7E11B96A22508301B7B0012B119164EC5C87AB196D180DA0A4E787D3B7AB9E3ED3436D82BEEA0
m/mips_exc.asm
(37 . 49)(37 . 31)
145 %endmacro
146 ;-----------------------------------------------------------------------------
147
148 ;------------------------------
149 ; Exception (and IRQ) Handler |
150 ;-----------------------------------------------------------------------------
151 _Handle_Exception:
152 ; Exception Prologue
153 ;-----------------------------------------------------------------------------
154 %macro EXC_HANDLER_PROLOGUE 0
155 Flg_Off InDelaySlot ; Clear the InDelaySlot Flag
156 bt CP0_Status, CP0St_EXL ; CF := EXL
157 jc _H_Exc_Off_Is_180 ; If EXL is 1, Offset := 0x180; else:
158 jc _H_Exc_Off_Is_180 ; If EXL is 1, Offset := 0x180; else:
159 ;; EXL is 0:
160 mov Sr(CP0_Epc), PC ; CP0_Epc := PC
161 Flg_Get RunningDelaySlot ; Are we running the delay slot?
162 jnc _H_Exc_Not_R_Delay ; If yes, then skip, else:
163 _H_Exc_R_Delay: ; We ARE running the delay slot:
164 jnc %%no_delay ; If yes, then skip, else:
165 %%yes_delay: ; We ARE running the delay slot:
166 sub Sr(CP0_Epc), 0x4 ; CP0_Epc := CP0_Epc - 0x4
167 bts CP0_Cause, CP0Cau_BD ; Set BD Flag in CP0_Cause
168 jmp _H_Exc_Test_TLB ; Proceed to test for TLBism
169 _H_Exc_Not_R_Delay: ; We are NOT running the delay slot:
170 jmp %%exc_prologue_done ; Continue in _Handle_Exception
171 %%no_delay: ; We are NOT running the delay slot:
172 btr CP0_Cause, CP0Cau_BD ; Clear BD Flag in CP0_Cause
173 _H_Exc_Test_TLB:
174 ;; Test whether this was a TLB Exception:
175 GetEXC eax ; EAX := the current exception code
176 cmp eax, EXC_TLBL ; was it EXC_TLBL ?
177 je __H_Exc_Was_TLB ; ... if yes, go to H_Exc_Was_TLB
178 cmp eax, EXC_TLBS ; was it EXC_TLBS ?
179 je __H_Exc_Was_TLB ; ... if yes, go to H_Exc_Was_TLB
180 ;; This was NOT a TLB Exception:
181 cmp eax, EXC_Int ; was code EXC_Int ?
182 jne _H_Exc_Off_Is_180 ; if not, Offset := 0x180
183 bt CP0_Cause, CP0Cau_IV ; Was CP0_Cause bit 23 (IV) zero?
184 jnc _H_Exc_Off_Is_180 ; if was zero, Offset := 0x180
185 ;; If CP0_Cause bit 23 != 0: Offset := 0x200
186 mov eax, 0x200 ; Offset := 0x200
187 jmp _H_Exc_Have_Offset ; Go to H_Exc_Have_Offset
188 __H_Exc_Was_TLB: ; This WAS a TLB Exception:
189 Flg_Get ExcWasTLBNoMatch ; CF := 'TLB No Match'
190 ;; TODO: in 'cmips', this case was reversed? why?
191 ;; For now, we'll do likewise:
192 jnc _H_Exc_Off_Is_180 ; ... if 0, Offset := 0x180
193 ;; Else: Offset := 0x0
194 xor eax, eax ; Clear EAX (Offset := 0)
195 jmp _H_Exc_Have_Offset ; Finish up
196 _H_Exc_Off_Is_180: ; Offset := 0x180
197 mov eax, 0x180 ; Set the Offset
198 _H_Exc_Have_Offset: ; We finished calculating the Offset:
199 %%exc_prologue_done:
200 %endmacro
201 ;-----------------------------------------------------------------------------
202
203 ;-----------------------------------------------------------------------------
204 ; Exception Epilogue:
205 ;-----------------------------------------------------------------------------
206 _Exception_Epilogue:
207 bts CP0_Status, CP0St_EXL ; Set the EXL Flag
208 mov PC, eax ; PC := Offset (eax)
209 mov eax, 0x80000000 ; Base that will be used if BEV=0
(90 . 3)(72 . 34)
211 ;; Done handling exception
212 jmp _cycle ; Start next cycle.
213 ;-----------------------------------------------------------------------------
214
215 ;------------------------------------------
216 ; Exception Handler: TLB NoMatch Fastpath |
217 ;-----------------------------------------------------------------------------
218 _Handle_Exception_TLB_NoMatch:
219 EXC_HANDLER_PROLOGUE
220 xor eax, eax ; Clear EAX (Offset := 0)
221 jmp _Exception_Epilogue ; Finish up
222 ;-----------------------------------------------------------------------------
223
224 ;-------------------------------------------------------
225 ; Exception Handler: Not-IRQ, Not-TLB-NoMatch Fastpath |
226 ;-----------------------------------------------------------------------------
227 _Handle_Exception_Other:
228 EXC_HANDLER_PROLOGUE
229 _H_Exc_Off_Is_180:
230 mov eax, 0x180 ; Set the Offset
231 jmp _Exception_Epilogue ; Finish up
232 ;-----------------------------------------------------------------------------
233
234 ;----------------------------------
235 ; Exception Handler: IRQ Fastpath |
236 ;-----------------------------------------------------------------------------
237 _Handle_Exception_IRQ:
238 EXC_HANDLER_PROLOGUE
239 bt CP0_Cause, CP0Cau_IV ; Was CP0_Cause bit 23 (IV) zero?
240 jnc _H_Exc_Off_Is_180 ; if was zero, Offset := 0x180
241 ;; If CP0_Cause bit 23 != 0: Offset := 0x200
242 mov eax, 0x200 ; Offset := 0x200
243 jmp _Exception_Epilogue ; Finish up
244 ;-----------------------------------------------------------------------------
- 5B25235B8644D82E985ED6CC354E5E6E0D94AAAF0D0B5A7B33D8A5586CF67A90F13885398E5C10B935C9EE991B1CC720AE9B832DFF9961D6090CF5E17A0445B9
+ 42FCA47AC9E2CC3D6CEE432168B103FC0955CD846DAC338B966B81DE7D427A6CD682A3177B86BA3441BCB859FC4A6219011060292A8A8F756C053E391550E018
m/mipsinst/i_instrs.asm
(171 . 7)(171 . 7)
249 align GRAIN, db 0x90
250 _bad:
251 SetEXC EXC_RI ; Set the EXC_RI Exception
252 jmp _Handle_Exception ; Go straight to exception handler.
253 jmp _Handle_Exception_Other ; Go straight to exception handler.
254 ;-----------------------------------------------------------------------------
255
256 ;-----------------------------------------------------------------------------
(707 . 7)(707 . 7)
258 bt Imm, 0 ; Get low bit of vAddr
259 jnc _i_lh_aligned_ok ; If zero, vAddr was properly aligned;
260 SetEXC EXC_AdEL ; If vAddr was NOT properly aligned:
261 jmp _Handle_Exception ; Go straight to exception handler.
262 jmp _Handle_Exception_Other ; Go straight to exception handler.
263 _i_lh_aligned_ok: ; Load is to proceed normally:
264 mov ecx, Imm ; Save vAddr to ECX
265 xor TMP, TMP ; Clear TMP, where we will put the HW
(830 . 7)(830 . 7)
267 bt Imm, 0 ; Get low bit of vAddr
268 jnc _i_lhu_aligned_ok ; If zero, vAddr was properly aligned;
269 SetEXC EXC_AdEL ; If vAddr was NOT properly aligned:
270 jmp _Handle_Exception ; Go straight to exception handler.
271 jmp _Handle_Exception_Other ; Go straight to exception handler.
272 _i_lhu_aligned_ok: ; Load is to proceed normally:
273 mov ecx, Imm ; Save vAddr to ECX
274 xor TMP, TMP ; Clear TMP, where we will put the HW
(933 . 7)(933 . 7)
276 bt Imm, 0 ; Get low bit of vAddr
277 jnc _i_sh_aligned_ok ; If zero, vAddr was properly aligned;
278 SetEXC EXC_AdES ; If vAddr was NOT properly aligned:
279 jmp _Handle_Exception ; Go straight to exception handler.
280 jmp _Handle_Exception_Other ; Go straight to exception handler.
281 _i_sh_aligned_ok:
282 mov AUX, Imm ; Save Imm (vAddr) to AUX
283 ;; rS (ecx) is not needed any more, can be reused
- 0B4157A311317D55241B7A9700E16B0952CC540285D4922EC2D6626C29C3A9C83DE29D18068D9BE2A721B801CDED2D930A4CDECF4991C5C2EAB707E5E987BDE6
+ 931B5FD9AC59730BBCB95E9A9D3DBA41483BBE6B3FC204AD8194397191795BACF3EF76DF5335F8F17B3479A007DE3A808DF640FCA949A7802B183BC25E7FE0C3
m/mipsinst/m_instrs.asm
(92 . 6)(92 . 25)
288 _m_tlbwr:
289 ; no fields
290 mov ecx, Sr(CP0_Wired) ; ecx := CP0_Wired
291 %ifdef TLBWR_CHEAT ; 'Cheat' behaviour (see knobs.asm for rationale) :
292 mov AUX, ecx ; save this index in AUX, we will use
293 mov edx, TLB_ENTRIES_COUNT - 1 ; index of last entry
294 ; Move all TLB entries after the Wired entries down by one slot:
295 .tlbwr_slide: ; Start by replacing the last entry with the next-to-last:
296 cmp edx, AUX ; time to stop?
297 je .tlbr_slide_done ; ... then stop.
298 mov ecx, edx ; ecx := edx
299 dec ecx ; ecx := ecx - 1 (prev. TLB index)
300 mov eax, TLB_E(ecx) ; eax := current TLB entry
301 mov TLB_E(edx), eax ; move the current into the next
302 mov eax, TLB_PFN_E(ecx) ; eax := current PFN_EVEN entry
303 mov TLB_PFN_E(edx), eax ; move the current into the next
304 mov eax, TLB_PFN_O(ecx) ; eax := current PFN_ODD entry
305 mov TLB_PFN_O(edx), eax ; move the current into the next
306 dec edx ; move back by one
307 jmp .tlbwr_slide ; Continue the slide.
308 .tlbr_slide_done: ; Now we freed up the top-most non-wired slot in TLB table:
309 %else ; 'Traditional' behaviour per the MIPS Standard:
310 mov ebx, TLB_ENTRIES_COUNT ; ebx := #tlbentries
311 sub ebx, ecx ; ebx := #tlbentries - Wired
312 mov edx, 0 ; edx (upper half of dividend) := 0
(99 . 7)(118 . 8)
314 div ebx ; edx:eax / ebx
315 add edx, ecx ; edx (remainder) := edx + wired
316 mov AUX, edx ; make edx the index for tlb write
317 call _write_tlb_entry ; Write the indexed TLB entry.
318 %endif
319 call _write_tlb_entry ; Write the AUX-indexed TLB entry.
320 jmp _end_cycle
321 ;-----------------------------------------------------------------------------
322
(446 . 7)(466 . 12)
324 test ecx, ecx ; Sel != 0 ?
325 jnz _mtc0_unknown ; ... then unknown; else:
326 and ebx, ~0x1F00 ; T := T & ~0x1F00
327 mov Sr(CP0_EntryHi), ebx ; CP0_EntryHi := T
328 cmp ebx, Sr(CP0_EntryHi) ; Find whether changing CP0_EntryHi
329 je .Not_Changed_EntryHi ; ... if not, skip;
330 .Changed_EntryHi: ; If we are changing CP0_EntryHi:
331 Invalidate_TLB_Cache ; Invalidate both R and W TLB Caches
332 mov Sr(CP0_EntryHi), ebx ; CP0_EntryHi := ebx
333 .Not_Changed_EntryHi:
334 jmp _end_cycle ; Done
335 ;-----------------------------------------------------------------------------
336 _mtc0_r11: ; 0x0b
- 40EF74622647A11AC973984A9F831AD39BBEC47EAC267E3F62FB30E36058AC4B01074B97585E9B36E0B32DEB7344EC4C5AA6585E9571BAA2E0432E8D78DEB254
+ F729FFA37ED0E9C666A6CDE198317D24A05AD91612B0408C22B30A2D9C179B61F757BD6B8820A87C109D22786FB376C63A6A1252402F47ECB92DBD18F8769A80
m/mipsinst/r_instrs.asm
(298 . 7)(298 . 7)
341 _r_syscall:
342 ; no fields
343 SetEXC EXC_SYS ; Set the EXC_SYS Exception
344 jmp _Handle_Exception ; Go straight to exception handler.
345 jmp _Handle_Exception_Other ; Go straight to exception handler.
346 ;-----------------------------------------------------------------------------
347
348 ;-----------------------------------------------------------------------------
(632 . 7)(632 . 7)
350 cmp TMP, R(rT) ; CMP(TMP, Regs[rT])
351 jne _r_teq_neql
352 SetEXC EXC_Tr ; Spring the Trap Exception.
353 jmp _Handle_Exception ; Go straight to exception handler.
354 jmp _Handle_Exception_Other ; Go straight to exception handler.
355 _r_teq_neql:
356 jmp _end_cycle
357 ;-----------------------------------------------------------------------------
(656 . 7)(656 . 7)
359 cmp TMP, R(rT) ; CMP(TMP, Regs[rT])
360 je _r_tne_eql
361 SetEXC EXC_Tr ; Spring the Trap Exception.
362 jmp _Handle_Exception ; Go straight to exception handler.
363 jmp _Handle_Exception_Other ; Go straight to exception handler.
364 _r_tne_eql:
365 jmp _end_cycle
366 ;-----------------------------------------------------------------------------
- 63181E522486B260324663A2C7CB928E8110114503A0711AC596F35176CB774BCA680F59C83B3723A3ABC4EB57A3FA3D10A657CA6E6BC79A4A3706325279068B
+ D4B9D027BED497801C62B87F65849AFE1C0AD557E304FE581DBC3E3117B082D735599B13CA8093A2AC97D463FC0A5E77C0C52D682A42EF1B50917CD8FA36AD72
m/os/linux.asm
(34 . 23)(34 . 31)
371 push r14
372 push r15
373
374 ; lea rsp,[rsp-16*4]
375 ; movdqu [rsp+16*0], xmm0
376 ; movdqu [rsp+16*1], xmm1
377 ; movdqu [rsp+16*2], xmm2
378 ; movdqu [rsp+16*3], xmm2
379
380 ;; Save TLB cache:
381 lea rsp,[rsp-16*8]
382 movdqu [rsp+16*0], xmm5
383 movdqu [rsp+16*1], xmm6
384 movdqu [rsp+16*2], xmm7
385 movdqu [rsp+16*3], xmm8
386 movdqu [rsp+16*4], xmm9
387 movdqu [rsp+16*5], xmm10
388 movdqu [rsp+16*6], xmm11
389 movdqu [rsp+16*7], xmm12
390 %endmacro
391 ;-----------------------------------------------------------------------------
392
393 ;-----------------------------------------------------------------------------
394 %macro POPA 0
395
396 ; movdqu xmm1,[rsp+16*3]
397 ; movdqu xmm1,[rsp+16*2]
398 ; movdqu xmm1,[rsp+16*1]
399 ; movdqu xmm0,[rsp+16*0]
400 ; lea rsp,[rsp+16*4]
401 ;; Restore TLB cache:
402 movdqu xmm5, [rsp+16*0]
403 movdqu xmm6, [rsp+16*1]
404 movdqu xmm7, [rsp+16*2]
405 movdqu xmm8, [rsp+16*3]
406 movdqu xmm9, [rsp+16*4]
407 movdqu xmm10, [rsp+16*5]
408 movdqu xmm11, [rsp+16*6]
409 movdqu xmm12, [rsp+16*7]
410 lea rsp,[rsp+16*8]
411
412 pop r15
413 pop r14
- 6AD0018398EFC679130DB6E4ABDF4082D644BCBB623CE66C49899053B0A032A27FF1DE0702EFECD0A5BF6718ADFBDF9F1C38B7E2F1EFAE34CFAF0258E0731F8D
+ 016C026DBE4230BD120C0FC4269E61BD8A44B82580289EFC90FED0792B5893A5727E069191FBFB0E32C3C40D2700B4A39A5ACB0BE1FDBFC475274C344368626A
m/ram.asm
(85 . 44)(85 . 40)
418 ;-----------------------------------------------------------------------------
419
420 ;-----------------------------------------------------------------------------
421 ; Translate_Address : virtual address in eax; output (physical addr) in eax
422 ; Mark both Read and Write TLB Caches as blown:
423 ;-----------------------------------------------------------------------------
424 %macro Invalidate_TLB_Cache 0
425 and Flag_Reg, ~((1 << TLB_Rd_Cache_Valid) | (1 << TLB_Wr_Cache_Valid))
426 %endmacro
427 ;-----------------------------------------------------------------------------
428
429 ;-----------------------------------------------------------------------------
430 ; Virt2Phys Read : virtual address in eax; output (physical addr) in eax
431 ;-----------------------------------------------------------------------------
432 align GRAIN, db 0x90
433 _Virt_To_Phys:
434 _Virt_To_Phys_Read:
435 bt eax, 31 ; CF := 31st (top) bit of vAddr
436 jc _Above_7FFFFFFF ; If 31st bit = 1, kseg 0/1/2; else:
437 jc .Above_7FFFFFFF ; If 31st bit = 1, kseg 0/1/2; else:
438 ;; 0x00000000 <= vAddr <= 0x7FFFFFFF (kuseg) :
439 bt CP0_Status, CP0St_ERL ; CF := CP0St_ERL Flag
440 jnc _TLB_Lookup ; If ERL = 0: TLB Lookup required; else:
441 jmp _No_Tlb_Lookup ; pAddr is equal to vAddr, return.
442 _Above_7FFFFFFF:
443 jnc .TLB_Lookup ; If ERL = 0: TLB Lookup required; else:
444 ret ; pAddr is equal to vAddr, return.
445 .Above_7FFFFFFF:
446 bt eax, 30 ; CF := 30th (2nd from top) bt of vAddr
447 jc _Above_BFFFFFFF ; If 30th bit = 1 : kseg2; else:
448 jc .Above_BFFFFFFF ; If 30th bit = 1 : kseg2; else:
449 ;; 0x80000000 <= vAddr <= 0x9FFFFFFF (kseg0) :
450 ;; 0xA0000000 <= vAddr <= 0xBFFFFFFF (kseg1) :
451 and eax, 0x1FFFFFFF ; kseg0 and kseg1: clear top 3 bits,
452 jmp _No_Tlb_Lookup ; i.e. pAddr := bottom 29 bts of vAddr.
453 _Above_BFFFFFFF: ;; 0xC0000000 <= vAddr <= 0xFFFFFFFF (kseg2) :
454 bt CP0_Status, CP0St_UM ; CF := CP0St_UM Flag
455 jnc _TLB_Lookup ; If UM = 0: K. Mode, so do TLB; else:
456 test CP0_Status, (1 << CP0St_EXL) | (1 << CP0St_ERL) ; EXL or ERL
457 jnz _TLB_Lookup ; If EXL && ERL, K. Mode, do TLB
458 ret ; i.e. pAddr := bottom 29 bts of vAddr.
459 .Above_BFFFFFFF: ;; 0xC0000000 <= vAddr <= 0xFFFFFFFF (kseg2) :
460 JMP_IF_KERNELMODE .TLB_Lookup ; If Kernel Mode, do TLB lookup;
461 ;; Else: vAddr is in kseg2, but we are NOT in Kernel Mode:
462 Flg_Get IsWriting ; Is Writing?
463 jc _V2P_Eggog_Wr ; If so, we want to set AdES;
464 _V2P_Eggog_Rd: ; ... otherwise, set AdEL.
465 SetEXC EXC_AdEL ; Fetch address error.
466 jmp _V2P_Eggog_Fin ; Proceed to abort.
467 _V2P_Eggog_Wr:
468 SetEXC EXC_AdES ; Store address error.
469 _V2P_Eggog_Fin:
470 ;; Will go into exception handler instead of back to _Virt_xxx etc
471 add rsp, 16 ; squelch return to _Virt_xxx and its caller
472 push _Handle_Exception ; 'return' directly to exc handler.
473 _No_Tlb_Lookup:
474 Flg_Off IsWriting ; Consume 'is writing' flag.
475 push _Handle_Exception_Other ; 'return' directly to exc handler.
476 ret ; Done.
477 _TLB_Lookup: ; TLB Lookup Required:
478 .TLB_Lookup: ; TLB Lookup Required:
479 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
480 ;; Save ebx, ecx, edx, AUX, to xmm ;;
481 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(131 . 71)(127 . 235)
483 movd xmm2, edx
484 movd xmm3, AUX
485 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
486 Flg_Off ExcWasTLBNoMatch ; Clear the ExcWasTLBNoMatch Flag
487 mov ecx, eax ; ecx := eax (vAddr)
488 and ecx, 0xFFFFF000 ; ecx := ecx & 0xFFFFF000
489 shr ecx, 13 ; ecx := ecx >> 13 (get vAddr's Tag)
490 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
491 ;; Find out whether we actually must do the lookup, or can use cached:
492 Flg_Get TLB_Rd_Cache_Valid ; Is Read TLB Cache valid?
493 jnc .Lookup_Must ; If Read TLB Cache invalid -- must!
494 bt eax, 12 ; Test odd/even junior bit
495 jc .Rd_Cached_Odd ; If odd, look at last Odd vAddr Tag
496 .Rd_Cached_Even: ; If even, look at last Even vAddr Tag
497 movd edx, Rd_E_Last_Tag ; edx := last Even vAddr's Tag
498 cmp ecx, edx ; is the current vAddr's Tag equal?
499 jne .Lookup_Must ; ... if not, must do the lookup dance;
500 ;; ... Otherwise, we have an Even cache hit:
501 movd ebx, Rd_E_Last_PFN ; ebx := last good Even PFN
502 jmp .Cache_Hit ; apply the PFN and wrap up.
503 .Rd_Cached_Odd:
504 movd edx, Rd_O_Last_Tag ; edx := last Odd vAddr's Tag
505 cmp ecx, edx ; is the current vAddr's Tag equal?
506 jne .Lookup_Must ; ... if not, must do the lookup dance;
507 ;; ... Otherwise, we have an Odd cache hit:
508 movd ebx, Rd_O_Last_PFN ; ebx := last good Odd PFN
509 jmp .Cache_Hit ; apply the PFN and wrap up.
510 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
511 ;; Failing the above, we must actually walk the TLB:
512 .Lookup_Must:
513 movd xmm4, ecx ; xmm4 := current vAddr's Tag
514 ;; Get the active ASID:
515 mov edx, Sr(CP0_EntryHi) ; edx := CP0_EntryHi
516 and edx, 0xFF ; edx := edx & 0xFF (get current ASID)
517 ;; For each slot in table (0 .. 15), attempt lookup
518 xor AUX, AUX ; Start with the 0-th entry in table
519 _Lookup_TLB_E:
520 mov ecx, eax ; ecx := eax (vAddr)
521 and ecx, 0xFFFFF000 ; ecx := ecx & 0xFFFFF000
522 shr ecx, 13 ; ecx := ecx >> 13 (get vAddr's Tag)
523 .Lookup_TLB_E:
524 movd ecx, xmm4 ; ecx := current vAddr's Tag
525 mov ebx, TLB_E(AUX) ; ebx := current TLB entry
526 and ebx, TLB_VPN2_Mask ; get VPN2 of this entry
527 cmp ebx, ecx ; cmp(entry.VPN2, vAddr.tag)
528 jne _Lookup_TLB_E_Not_Here ; if entry.VPN2 != vAddr.tag: no match
529 jne .Lookup_TLB_E_Not_Here ; if entry.VPN2 != vAddr.tag: no match
530 mov ebx, TLB_E(AUX) ; ebx := current TLB entry
531 bt ebx, TLB_G ; is entry.G = 1?
532 jc _Lookup_TLB_E_Match ; then match.
533 jc .Lookup_TLB_E_Match ; then match.
534 shr ebx, TLB_ASID_Shift ; ebx := ebx >> TLB_ASID_Shift
535 and ebx, TLB_ASID_Mask ; ebx := entry.ASID
536 cmp ebx, edx ; entry.ASID = current ASID ?
537 jne _Lookup_TLB_E_Not_Here ; if neither G=1 nor ASID match.
538 jne .Lookup_TLB_E_Not_Here ; if neither G=1 nor ASID match.
539 mov ebx, TLB_E(AUX) ; ebx := current TLB entry
540 _Lookup_TLB_E_Match: ; TLB Match:
541 .Lookup_TLB_E_Match: ; TLB Match:
542 bt eax, 12 ; Test odd/even junior bit
543 jc _Lookup_TLB_E_Match_Odd ; If odd: test V1, D1
544 _Lookup_TLB_E_Match_Even: ; If even: test V0, D0
545 jc .Lookup_TLB_E_Match_Odd ; If odd: test V1, D1
546 .Lookup_TLB_E_Match_Even: ; If even: test V0, D0
547 bt ebx, TLB_V0 ; Is entry.V0=1 ?
548 jnc .Lookup_TLB_E_Invalid ; If not, TLBRET_INVALID
549 lea ecx, TLB_PFN_E(AUX) ; prepare to load even PFN entry
550 mov ebx, dword [ecx] ; Actually load the current PFN entry
551 movd Rd_E_Last_PFN, ebx ; Save the current PFN as last Even
552 movd ecx, xmm4 ; ecx := the current Tag
553 movd Rd_E_Last_Tag, ecx ; Save the current Tag as last Even
554 jmp .Lookup_TLB_E_Match_Yes ; Since we're reading: go to Match Yes
555 .Lookup_TLB_E_Match_Odd: ; Odd bit:
556 bt ebx, TLB_V1 ; Is entry.V1=1 ?
557 jnc .Lookup_TLB_E_Invalid ; If not, TLBRET_INVALID
558 lea ecx, TLB_PFN_O(AUX) ; prepare to load odd PFN entry
559 mov ebx, dword [ecx] ; Actually load the current PFN entry
560 movd Rd_O_Last_PFN, ebx ; Save the current PFN as last Odd
561 movd ecx, xmm4 ; ecx := the current Tag
562 movd Rd_O_Last_Tag, ecx ; Save the current Tag as last Odd
563 .Lookup_TLB_E_Match_Yes: ; This is the 'success' case
564 Flg_On TLB_Rd_Cache_Valid
565 ; Upon next TLB lookup, if cache is valid, and Tag remains same
566 ; as before, we can use the same PFN as was obtained last time
567 ; for the respective 12th bit arity of the vAddr!
568 .Cache_Hit:
569 and eax, 0xFFF ; vAddr := vAddr & 0xFFF
570 or eax, ebx ; vAddr := vAddr | entry.PFN[lowbit]
571 jmp _Lookup_TLB_Done ; vAddr is now correct pAddr, done.
572 .Lookup_TLB_E_Not_Here: ; try next one in the table, if any
573 inc AUX ; index := index + 1
574 cmp AUX, TLB_ENTRIES_COUNT ; see if still in range 0 .. n-1
575 jb .Lookup_TLB_E ; if in range, go to next entry
576 ;; ... else:
577 add rsp, 16 ; squelch return to _Virt_xxx and its caller
578 push _Handle_Exception_TLB_NoMatch ; 'return' straight to handler.
579 jmp _Lookup_TLB_E_WriteExtr ; Wrap up
580 .Lookup_TLB_E_Invalid:
581 SetEXC EXC_TLBL ; Set the EXC_TLBL Exception
582 add rsp, 16 ; squelch return to _Virt_xxx and its caller
583 push _Handle_Exception_Other ; 'return' straight to handler.
584 jmp _Lookup_TLB_E_WriteExtr ; Go to the common epilogue.
585 ;-----------------------------------------------------------------------------
586
587 ;-----------------------------------------------------------------------------
588 ; Virt2Phys Write : virtual address in eax; output (physical addr) in eax
589 ;-----------------------------------------------------------------------------
590 align GRAIN, db 0x90
591 _Virt_To_Phys_Write:
592 bt eax, 31 ; CF := 31st (top) bit of vAddr
593 jc .Above_7FFFFFFF ; If 31st bit = 1, kseg 0/1/2; else:
594 ;; 0x00000000 <= vAddr <= 0x7FFFFFFF (kuseg) :
595 bt CP0_Status, CP0St_ERL ; CF := CP0St_ERL Flag
596 jnc .TLB_Lookup ; If ERL = 0: TLB Lookup required; else:
597 ret ; pAddr is equal to vAddr, return.
598 .Above_7FFFFFFF:
599 bt eax, 30 ; CF := 30th (2nd from top) bt of vAddr
600 jc .Above_BFFFFFFF ; If 30th bit = 1 : kseg2; else:
601 ;; 0x80000000 <= vAddr <= 0x9FFFFFFF (kseg0) :
602 ;; 0xA0000000 <= vAddr <= 0xBFFFFFFF (kseg1) :
603 and eax, 0x1FFFFFFF ; kseg0 and kseg1: clear top 3 bits,
604 ret ; i.e. pAddr := bottom 29 bts of vAddr.
605 .Above_BFFFFFFF: ;; 0xC0000000 <= vAddr <= 0xFFFFFFFF (kseg2) :
606 JMP_IF_KERNELMODE .TLB_Lookup ; If Kernel Mode, do TLB lookup;
607 ;; Else: vAddr is in kseg2, but we are NOT in Kernel Mode:
608 SetEXC EXC_AdES ; Store address error.
609 ;; Will go into exception handler instead of back to _Virt_xxx etc
610 add rsp, 16 ; squelch return to _Virt_xxx and its caller
611 push _Handle_Exception_Other ; 'return' directly to exc handler.
612 ret ; Done.
613 .TLB_Lookup: ; TLB Lookup Required:
614 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
615 ;; Save ebx, ecx, edx, AUX, to xmm ;;
616 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
617 movd xmm0, ebx
618 movd xmm1, ecx
619 movd xmm2, edx
620 movd xmm3, AUX
621 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
622 mov ecx, eax ; ecx := eax (vAddr)
623 and ecx, 0xFFFFF000 ; ecx := ecx & 0xFFFFF000
624 shr ecx, 13 ; ecx := ecx >> 13 (get vAddr's Tag)
625 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
626 ;; Find out whether we actually must do the lookup, or can use cached:
627 Flg_Get TLB_Wr_Cache_Valid ; Is Write TLB Cache valid?
628 jnc .Lookup_Must ; If Write TLB Cache invalid -- must!
629 bt eax, 12 ; Test odd/even junior bit
630 jc .Wr_Cached_Odd ; If odd, look at last Odd vAddr Tag
631 .Wr_Cached_Even: ; If even, look at last Even vAddr Tag
632 movd edx, Wr_E_Last_Tag ; edx := last Even vAddr's Tag
633 cmp ecx, edx ; is the current vAddr's Tag equal?
634 jne .Lookup_Must ; ... if not, must do the lookup dance;
635 ;; ... Otherwise, we have an Even cache hit:
636 movd ebx, Wr_E_Last_PFN ; ebx := last good Even PFN
637 jmp .Cache_Hit ; apply the PFN and wrap up.
638 .Wr_Cached_Odd:
639 movd edx, Wr_O_Last_Tag ; edx := last Odd vAddr's Tag
640 cmp ecx, edx ; is the current vAddr's Tag equal?
641 jne .Lookup_Must ; ... if not, must do the lookup dance;
642 ;; ... Otherwise, we have an Odd cache hit:
643 movd ebx, Wr_O_Last_PFN ; ebx := last good Odd PFN
644 jmp .Cache_Hit ; apply the PFN and wrap up.
645 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
646 ;; Failing the above, we must actually walk the TLB:
647 .Lookup_Must:
648 movd xmm4, ecx ; xmm4 := current vAddr's Tag
649 ;; Get the active ASID:
650 mov edx, Sr(CP0_EntryHi) ; edx := CP0_EntryHi
651 and edx, 0xFF ; edx := edx & 0xFF (get current ASID)
652 ;; For each slot in table (0 .. 15), attempt lookup
653 xor AUX, AUX ; Start with the 0-th entry in table
654 .Lookup_TLB_E:
655 movd ecx, xmm4 ; ecx := current vAddr's Tag
656 mov ebx, TLB_E(AUX) ; ebx := current TLB entry
657 and ebx, TLB_VPN2_Mask ; get VPN2 of this entry
658 cmp ebx, ecx ; cmp(entry.VPN2, vAddr.tag)
659 jne .Lookup_TLB_E_Not_Here ; if entry.VPN2 != vAddr.tag: no match
660 mov ebx, TLB_E(AUX) ; ebx := current TLB entry
661 bt ebx, TLB_G ; is entry.G = 1?
662 jc .Lookup_TLB_E_Match ; then match.
663 shr ebx, TLB_ASID_Shift ; ebx := ebx >> TLB_ASID_Shift
664 and ebx, TLB_ASID_Mask ; ebx := entry.ASID
665 cmp ebx, edx ; entry.ASID = current ASID ?
666 jne .Lookup_TLB_E_Not_Here ; if neither G=1 nor ASID match.
667 mov ebx, TLB_E(AUX) ; ebx := current TLB entry
668 .Lookup_TLB_E_Match: ; TLB Match:
669 bt eax, 12 ; Test odd/even junior bit
670 jc .Lookup_TLB_E_Match_Odd ; If odd: test V1, D1
671 .Lookup_TLB_E_Match_Even: ; If even: test V0, D0
672 bt ebx, TLB_V0 ; Is entry.V0=1 ?
673 jnc _Lookup_TLB_E_Invalid ; If not, TLBRET_INVALID
674 jnc .Lookup_TLB_E_Invalid ; If not, TLBRET_INVALID
675 bt ebx, TLB_D0 ; Is entry.D0=1 ?
676 jc _Lookup_TLB_E_Match_Yes ; If entry.D0=1, then Match Yes
677 jmp _Lookup_TLB_E_Match_Wr ; else, go to 'is writing?'
678 _Lookup_TLB_E_Match_Odd: ; Odd bit:
679 lea ecx, TLB_PFN_O(AUX) ; prepare to load odd PFN entry
680 jnc .Lookup_TLB_E_Dirty ; If not, go to 'Dirty'
681 ;; Not invalid or dirty:
682 lea ecx, TLB_PFN_E(AUX) ; prepare to load even PFN entry
683 mov ebx, dword [ecx] ; Actually load the current PFN entry
684 movd Wr_E_Last_PFN, ebx ; Save the current PFN as last Even
685 movd ecx, xmm4 ; ecx := the current Tag
686 movd Wr_E_Last_Tag, ecx ; Save the current Tag as last Even
687 jmp .Lookup_TLB_E_Match_Yes ; ;; Proceed to 'match' :
688 .Lookup_TLB_E_Match_Odd: ; Odd bit:
689 bt ebx, TLB_V1 ; Is entry.V1=1 ?
690 jnc _Lookup_TLB_E_Invalid ; If not, TLBRET_INVALID
691 jnc .Lookup_TLB_E_Invalid ; If not, TLBRET_INVALID
692 bt ebx, TLB_D1 ; Is entry.D1=1 ?
693 jc _Lookup_TLB_E_Match_Yes ; If entry.D1=1, then Match Yes
694 _Lookup_TLB_E_Match_Wr:
695 Flg_Get IsWriting ; Is Writing?
696 jnc _Lookup_TLB_E_Match_Yes ; If not writing, go to Match Yes
697 _Lookup_TLB_E_Dirty: ; ... else, Dirty:
698 SetEXC EXC_Mod ; Set the EXC_Mod Exception
699 jmp _Lookup_TLB_E_WriteExtr ; Write the 'extra data' and finish.
700 _Lookup_TLB_E_Match_Yes: ; This is the 'success' case
701 jnc .Lookup_TLB_E_Dirty ; If not, go to 'Dirty'
702 ;; Not invalid or dirty:
703 lea ecx, TLB_PFN_O(AUX) ; prepare to load odd PFN entry
704 mov ebx, dword [ecx] ; Actually load the current PFN entry
705 movd Wr_O_Last_PFN, ebx ; Save the current PFN as last Odd
706 movd ecx, xmm4 ; ecx := the current Tag
707 movd Wr_O_Last_Tag, ecx ; Save the current Tag as last Odd
708 ;; Proceed to 'match' :
709 .Lookup_TLB_E_Match_Yes: ; This is the 'success' case
710 Flg_On TLB_Wr_Cache_Valid
711 ; Upon next TLB lookup, if cache is valid, and Tag remains same
712 ; as before, we can use the same PFN as was obtained last time
713 ; for the respective 12th bit arity of the vAddr!
714 .Cache_Hit:
715 and eax, 0xFFF ; vAddr := vAddr & 0xFFF
716 or eax, ebx ; vAddr := vAddr | entry.PFN[lowbit]
717 jmp _Lookup_TLB_Done ; vAddr is now correct pAddr, done.
718 _Lookup_TLB_E_Not_Here: ; try next one in the table, if any
719 .Lookup_TLB_E_Not_Here: ; try next one in the table, if any
720 inc AUX ; index := index + 1
721 cmp AUX, TLB_ENTRIES_COUNT ; see if still in range 0 .. n-1
722 jb _Lookup_TLB_E ; if in range, go to next entry
723 jb .Lookup_TLB_E ; if in range, go to next entry
724 ;; ... else:
725 Flg_On ExcWasTLBNoMatch ; Set the ExcWasTLBNoMatch Flag
726 ;; ... now drop down into 'invalid' :
727 _Lookup_TLB_E_Invalid:
728 Flg_Get IsWriting ; Was Writing?
729 jc _Lookup_TLB_E_Invalid_W ; If so, we want to set EXC_TLBS
730 _Lookup_TLB_E_Invalid_R: ; Otherwise, set EXC_TLBL exception
731 SetEXC EXC_TLBL ; Set the EXC_TLBL Exception
732 add rsp, 16 ; squelch return to _Virt_xxx and its caller
733 push _Handle_Exception_TLB_NoMatch ; 'return' straight to handler.
734 jmp _Lookup_TLB_E_WriteExtr ; Wrap up
735 .Lookup_TLB_E_Dirty: ; ... else, Dirty:
736 SetEXC EXC_Mod ; Set the EXC_Mod Exception
737 add rsp, 16 ; squelch return to _Virt_xxx and its caller
738 push _Handle_Exception_Other ; 'return' straight to handler.
739 jmp _Lookup_TLB_E_WriteExtr ; Write the 'extra data' and finish.
740 _Lookup_TLB_E_Invalid_W:
741 .Lookup_TLB_E_Invalid: ; Invalid Write:
742 SetEXC EXC_TLBS ; Set the EXC_TLBS Exception
743 ;; then drop down to 'write extra' :
744 add rsp, 16 ; squelch return to _Virt_xxx and its caller
745 push _Handle_Exception_Other ; 'return' straight to handler.
746 ;; then drop down to _Lookup_TLB_E_WriteExtr
747
748 ;-----------------------------------------------------------------------------
749 ; Epilogue common to _Virt_To_Phys_Read and _Virt_To_Phys_Write:
750 ;-----------------------------------------------------------------------------
751 _Lookup_TLB_E_WriteExtr: ; Write the 'extra data' and finish
752 mov Sr(CP0_BadVAddr), eax ; CP0_BadVAddr := vAddr
753 mov ecx, eax ; ecx := vAddr
(210 . 13)(370 . 15)
755 and ebx, 0xFF ; ebx := ebx & 0xFF
756 and ecx, 0xFFFFE000 ; ecx := ecx & 0xFFFFE000
757 or ebx, ecx ; ebx := ebx | ecx
758 cmp ebx, Sr(CP0_EntryHi) ; Find whether changing CP0_EntryHi
759 je .Not_Changed_EntryHi ; ... if not, skip;
760 .Changed_EntryHi: ; If we are changing CP0_EntryHi:
761 Invalidate_TLB_Cache ; Invalidate both R and W TLB Caches
762 mov Sr(CP0_EntryHi), ebx ; CP0_EntryHi := ebx
763 .Not_Changed_EntryHi:
764 ;; Will go into exception handler instead of back to _Virt_xxx etc
765 add rsp, 16 ; squelch return to _Virt_xxx and its caller
766 push _Handle_Exception ; 'return' directly to exc handler.
767 ;; and drop into 'done' :
768 _Lookup_TLB_Done:
769 Flg_Off IsWriting ; Consume 'is writing' flag.
770 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
771 ;; Restore ebx, ecx, edx, AUX, from xmm ;;
772 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(234 . 7)(396 . 7)
774 ; 2) would go faster if used tmp instead of self-clobbering rax ?
775 align GRAIN, db 0x90
776 _Virt_Read_Word:
777 call _Virt_To_Phys ; Transform vAddr to pAddr
778 call _Virt_To_Phys_Read ; Transform vAddr to pAddr (for Read)
779 test eax, 0x3 ; Are any of the bottom 2 bits set?
780 jnz _V_Rd_Word_Unaligned ; If yes, go to eggog. Else:
781 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(256 . 7)(418 . 7)
783 SetEXC EXC_AdEL ; Fetch address error.
784 ;; Will go into exception handler instead of back to caller
785 add rsp, 8 ; squelch return to original caller
786 push _Handle_Exception ; 'return' directly to exc handler.
787 push _Handle_Exception_Other ; 'return' directly to exc handler.
788 ret ; Go there.
789 ;-----------------------------------------------------------------------------
790
(268 . 8)(430 . 7)
792 ; 3) do we need to explicitly zero-extend rax here?
793 align GRAIN, db 0x90
794 _Virt_Write_Word:
795 Flg_On IsWriting ; Tell the translator that we're writing
796 call _Virt_To_Phys ; Transform vAddr to pAddr
797 call _Virt_To_Phys_Write ; Transform vAddr to pAddr (for Write)
798 test eax, 0x3 ; Are any of the bottom 2 bits set?
799 jnz _V_Wr_Word_Unaligned ; If yes, go to eggog. Else:
800 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(291 . 7)(452 . 7)
802 SetEXC EXC_AdES ; Store address error.
803 ;; Will go into exception handler instead of back to caller
804 add rsp, 8 ; squelch return to original caller
805 push _Handle_Exception ; 'return' directly to exc handler.
806 push _Handle_Exception_Other ; 'return' directly to exc handler.
807 ret ; Go there.
808 ;-----------------------------------------------------------------------------
809
(300 . 7)(461 . 7)
811 ;-----------------------------------------------------------------------------
812 align GRAIN, db 0x90
813 _Virt_Read_Byte:
814 call _Virt_To_Phys ; Transform vAddr to pAddr
815 call _Virt_To_Phys_Read ; Transform vAddr to pAddr (for Read)
816 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
817 ;; If pAddr is in Memory-Mapped Device space:
818 JMP_If_In eax, MMIO_BASE, MMIO_TOP, _Phys_Device_Read_Byte
(324 . 7)(485 . 7)
820 SetEXC EXC_AdEL ; Fetch address error.
821 ;; Will go into exception handler instead of back to caller
822 add rsp, 8 ; squelch return to original caller
823 push _Handle_Exception ; 'return' directly to exc handler.
824 push _Handle_Exception_Other ; 'return' directly to exc handler.
825 ret ; Go there.
826 ;-----------------------------------------------------------------------------
827
(333 . 8)(494 . 7)
829 ;-----------------------------------------------------------------------------
830 align GRAIN, db 0x90
831 _Virt_Write_Byte:
832 Flg_On IsWriting ; Tell the translator that we're writing
833 call _Virt_To_Phys ; Transform vAddr to pAddr
834 call _Virt_To_Phys_Write ; Transform vAddr to pAddr (for Write)
835 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
836 ;; If pAddr is in Memory-Mapped Device space:
837 JMP_If_In eax, MMIO_BASE, MMIO_TOP, _Phys_Device_Write_Byte
(357 . 7)(517 . 7)
839 SetEXC EXC_AdES ; Store address error.
840 ;; Will go into exception handler instead of back to caller
841 add rsp, 8 ; squelch return to original caller
842 push _Handle_Exception ; 'return' directly to exc handler.
843 push _Handle_Exception_Other ; 'return' directly to exc handler.
844 ret ; Go there.
845 ;-----------------------------------------------------------------------------
846
(416 . 5)(576 . 6)
848 ;; Store PFN:
849 mov TLB_PFN_E(AUX), edx ; Store PFN0
850 mov TLB_PFN_O(AUX), ebx ; Store PFN1
851 Invalidate_TLB_Cache ; Invalidate both R and W TLB Caches
852 ret ; Done.
853 ;-----------------------------------------------------------------------------