tree checksum vpatch file split hunks

all signers: asciilifeform

antecedents: simd_tlb_lookup.kv

press order:

m_genesis.kvasciilifeform
errata_slaveirq.kvasciilifeform

patch:

- D9F1B9E441A1F4E5D314D5C5F43319591CA03904B3AC36E1EDE4D7310B5FF30D2E0C720695F95733F1AC3E2E829FA96700B6E8776ACC974CFD30C6994EDAF4B6
+ 95D0CEF8C6FD3896260DCDDF172F30FBB868DC843903341B1EF03B9845D032D6F8C8B6CB3FA0624BB6A5390AC3B55EA020203BDEDEB1DC8AC35D28ED84A6E642
m/MANIFEST.TXT
(2 . 3)(2 . 4)
5 586747 errata_slaveirq "Fix of slave IRQ clearing."
6 586983 tlb_and_exc_speedup "Exc. handler fastpaths and TLB caching."
7 587480 simd_tlb_lookup "Experimental variant with SIMDistic TLB."
8 587535 simd_tlb_errata "Remove the nonfunctional TLB cache."
- CB16F8AB1A1E89FCE1364577DD83A456E0859379A3A9FA42C883A18BC9962D7B017E5D2C99341C60E1D41B510FD0D588FCE6D55D058E49E62D89701099CC8080
+ 97D32B9FAC3A97819E48C890750394851EF48F2776C02904160BF945143307E5C055DCC451279BF59A7ED3988A05BF98209B7B7006A1339D4FD2DB49688765CB
m/flags.asm
(23 . 9)(23 . 7)
13 %define RunningDelaySlot 1
14 %define Waiting 2
15 %define LL_Bit 3
16 %define TLB_Rd_Cache_Valid 4
17 %define TLB_Wr_Cache_Valid 5
18 %define Shutdown 6
19 %define Shutdown 4
20 ; Positions 31 .. 15 store TLB's 'G' Flags
21 ;-----------------------------------------------------------------------------
22 ; Set a given Flag:
- 343FF34A3CBC7CD5D51C465B8B91754C546C841055B6E84DFC8E928262E958534E727DC20EC0900B103F82F57895CBFB372D0789FAE1410B593746F76125187A
+ D0FC5BD7C10B69136B93C51DEF7A043BB287515D3CEE61FBCC46C2F07024B12165C81B979658E748BE6DCD67FC48D73630FDEAEF1F0F0D99536A5E424D01E10C
m/mipsinst/m_instrs.asm
(486 . 11)(486 . 7)
27 jnz _mtc0_unknown ; ... then unknown; else:
28 and ebx, ~0x1F00 ; T := T & ~0x1F00
29 cmp ebx, Sr(CP0_EntryHi) ; Find whether changing CP0_EntryHi
30 je .Not_Changed_EntryHi ; ... if not, skip;
31 .Changed_EntryHi: ; If we are changing CP0_EntryHi:
32 Invalidate_TLB_Cache ; Invalidate both R and W TLB Caches
33 mov Sr(CP0_EntryHi), ebx ; CP0_EntryHi := ebx
34 .Not_Changed_EntryHi:
35 jmp _end_cycle ; Done
36 ;-----------------------------------------------------------------------------
37 _mtc0_r11: ; 0x0b
- 19593ABC66AB9FF8A02FA39884524BBA012AAF3AFAB4C0A588272D07B9269BB59140F584519317BA0C2F412FC0B47D31CD4FEA28D3D6754A3F5BBF7BACCA3E78
+ 59A2D5464B9827F83CD714D25071D7608C79C1C7ABA9004A308EA6D216943293347F57D909496810C79F2B9298D80C99A3130224C49543A26D60F0B0DC77D72B
m/ram.asm
(85 . 14)(85 . 6)
42 ;-----------------------------------------------------------------------------
43
44 ;-----------------------------------------------------------------------------
45 ; Mark both Read and Write TLB Caches as blown:
46 ;-----------------------------------------------------------------------------
47 %macro Invalidate_TLB_Cache 0
48 and Flag_Reg, ~((1 << TLB_Rd_Cache_Valid) | (1 << TLB_Wr_Cache_Valid))
49 %endmacro
50 ;-----------------------------------------------------------------------------
51
52 ;-----------------------------------------------------------------------------
53 section .bss
54 align 32
55 TLB_TAG_BYTE_0_COPY resb 16 ; Byte-0 of each TLB entry Tag
(140 . 27)(132 . 6)
57 mov ecx, eax ; ecx := eax (vAddr)
58 and ecx, 0xFFFFF000 ; ecx := ecx & 0xFFFFF000
59 shr ecx, 13 ; ecx := ecx >> 13 (get vAddr's Tag)
60 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
61 ; Find out whether we actually must do the lookup, or can use cached:
62 Flg_Get TLB_Rd_Cache_Valid ; Is Read TLB Cache valid?
63 jnc .Lookup_Must ; If Read TLB Cache invalid -- must!
64 ; If cache is valid, lookup:
65 mov AUX, ecx ; AUX := tag
66 xor ecx, ecx ; ecx := 0
67 bt eax, 12 ; Test vAddr's odd/even junior bit
68 setc cl ; ecx := {1 if a-odd, 0 if a-even}
69 shl rcx, 6 ; rcx := {64 if a-odd, 0 if a-even}
70 ; get the last-good-Tags:
71 movq rbx, R_TLB_Last_Good_Tag ; Get last good R-Tag pair
72 shr rbx, cl ; if arity is odd, get top half
73 cmp ebx, AUX ; is current Tag == to last-good ?
74 jne .Lookup_Must ; ... if not, go to Lookup_Must
75 ; given Tag matched last-good. So get last-good PFN and wrap up:
76 movq rbx, R_TLB_Last_Good_PFN ; Get last good PFN pair
77 shr rbx, cl ; if arity is odd, get top half
78 jmp .PFN_And_Done ; use ebx as the PFN and wrap up.
79 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
80 .Lookup_Must:
81 movd xmm4, ecx ; ecx := copy of Tag
82 ;; Search for B0, B1, B2 of Tag, accumulate result in ebx ;;
83 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(245 . 20)(216 . 9)
85 jnc .Invalid_R ; ... V == 0, then go to Invalid
86 ; Now let's load the PFN:
87 mov rbx, TLB_PFN(AUX64) ; load the PFN pair to rbx
88 ; ebx is now the PFN. Before wrapping up, update the TLB read cache :
89 movq R_TLB_Last_Good_PFN, rbx ; Set last good PFN to this PFN:
90 ; ebx is now the PFN.
91 ; now leave only the correct half of PFN, at bottom of rbx:
92 shr rbx, cl ; if arity is odd, get upper 32bit
93 ; set correct half of R_TLB_Last_Good_Tag to the found Tag:
94 mov rdx, 0xFFFFFFFF00000000 ; rdx := 0xFFFFFFFF00000000
95 shr rdx, cl ; if arity is odd, keep bottom
96 movq AUX64, R_TLB_Last_Good_Tag ; get last good Tag
97 and AUX64, rdx ; zap correct half of last good tag
98 movq rdx, xmm4 ; get the Tag again :
99 shl rdx, cl ; if arity if odd, slide into pos:
100 or AUX64, rdx ; now or it into place
101 movq R_TLB_Last_Good_Tag, AUX64 ; update last good Tag.
102 .PFN_And_Done:
103 and eax, 0xFFF ; vAddr := vAddr & 0xFFF
104 or eax, ebx ; vAddr := vAddr | entry.PFN[lowbit]
105 jmp _Lookup_TLB_Done ; vAddr is now correct pAddr, done.
(313 . 27)(273 . 6)
107 mov ecx, eax ; ecx := eax (vAddr)
108 and ecx, 0xFFFFF000 ; ecx := ecx & 0xFFFFF000
109 shr ecx, 13 ; ecx := ecx >> 13 (get vAddr's Tag)
110 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
111 ; Find out whether we actually must do the lookup, or can use cached:
112 Flg_Get TLB_Wr_Cache_Valid ; Is Write TLB Cache valid?
113 jnc .Lookup_Must ; If Write TLB Cache invalid -- must!
114 ; If cache is valid, lookup:
115 mov AUX, ecx ; AUX := tag
116 xor ecx, ecx ; ecx := 0
117 bt eax, 12 ; Test vAddr's odd/even junior bit
118 setc cl ; ecx := {1 if a-odd, 0 if a-even}
119 shl rcx, 6 ; rcx := {64 if a-odd, 0 if a-even}
120 ; get the last-good-Tags:
121 movq rbx, W_TLB_Last_Good_Tag ; Get last good W-Tag pair
122 shr rbx, cl ; if arity is odd, get top half
123 cmp ebx, AUX ; is current Tag == to last-good ?
124 jne .Lookup_Must ; ... if not, go to Lookup_Must
125 ; given Tag matched last-good. So get last-good PFN and wrap up:
126 movq rbx, W_TLB_Last_Good_PFN ; Get last good PFN pair
127 shr rbx, cl ; if arity is odd, get top half
128 jmp .PFN_And_Done ; use ebx as the PFN and wrap up.
129 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
130 .Lookup_Must:
131 movd xmm4, ecx ; ecx := copy of Tag
132 ;; Search for B0, B1, B2 of Tag, accumulate result in ebx ;;
133 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(422 . 20)(361 . 8)
135 jnc .Dirty_W ; ... if D == 0, then go to Dirty
136 ; Now let's load the correct odd or even PFN:
137 mov rbx, TLB_PFN(AUX64) ; load the PFN pair to rbx
138 ; ebx is now the PFN. Before wrapping up, update the TLB read cache :
139 movq W_TLB_Last_Good_PFN, rbx ; Set last good PFN to this PFN:
140 ; now leave only the correct half of PFN, at bottom of rbx:
141 shr rbx, cl ; if arity is odd, get upper 32bit
142 ; set correct half of R_TLB_Last_Good_Tag to the found Tag:
143 mov rdx, 0xFFFFFFFF00000000 ; rdx := 0xFFFFFFFF00000000
144 shr rdx, cl ; if arity is odd, keep bottom
145 movq AUX64, W_TLB_Last_Good_Tag ; get last good Tag
146 and AUX64, rdx ; zap correct half of last good tag
147 movq rdx, xmm4 ; get the Tag again :
148 shl rdx, cl ; if arity if odd, slide into pos:
149 or AUX64, rdx ; now or it into place
150 movq W_TLB_Last_Good_Tag, AUX64 ; update last good Tag.
151 .PFN_And_Done:
152 and eax, 0xFFF ; vAddr := vAddr & 0xFFF
153 or eax, ebx ; vAddr := vAddr | entry.PFN[lowbit]
154 jmp _Lookup_TLB_Done ; vAddr is now correct pAddr, done.
(475 . 12)(402 . 7)
156 and ebx, 0xFF ; ebx := ebx & 0xFF
157 and ecx, 0xFFFFE000 ; ecx := ecx & 0xFFFFE000
158 or ebx, ecx ; ebx := ebx | ecx
159 cmp ebx, Sr(CP0_EntryHi) ; Find whether changing CP0_EntryHi
160 je .Not_Changed_EntryHi ; ... if not, skip;
161 .Changed_EntryHi: ; If we are changing CP0_EntryHi:
162 Invalidate_TLB_Cache ; Invalidate both R and W TLB Caches
163 mov Sr(CP0_EntryHi), ebx ; CP0_EntryHi := ebx
164 .Not_Changed_EntryHi:
165 ;; Will go into exception handler instead of back to _Virt_xxx etc
166 ;; and drop into 'done' :
167 _Lookup_TLB_Done: