1
2
3
4
5
6
7 package maps
8
9 import (
10 "internal/abi"
11 "internal/race"
12 "internal/runtime/sys"
13 "unsafe"
14 )
15
16
17 func runtime_mapaccess1_fast32(typ *abi.SwissMapType, m *Map, key uint32) unsafe.Pointer {
18 if race.Enabled && m != nil {
19 callerpc := sys.GetCallerPC()
20 pc := abi.FuncPCABIInternal(runtime_mapaccess1_fast32)
21 race.ReadPC(unsafe.Pointer(m), callerpc, pc)
22 }
23
24 if m == nil || m.Used() == 0 {
25 return unsafe.Pointer(&zeroVal[0])
26 }
27
28 if m.writing != 0 {
29 fatal("concurrent map read and map write")
30 return nil
31 }
32
33 if m.dirLen == 0 {
34 g := groupReference{
35 data: m.dirPtr,
36 }
37 full := g.ctrls().matchFull()
38 slotKey := g.key(typ, 0)
39 slotSize := typ.SlotSize
40 for full != 0 {
41 if key == *(*uint32)(slotKey) && full.lowestSet() {
42 slotElem := unsafe.Pointer(uintptr(slotKey) + typ.ElemOff)
43 return slotElem
44 }
45 slotKey = unsafe.Pointer(uintptr(slotKey) + slotSize)
46 full = full.shiftOutLowest()
47 }
48 return unsafe.Pointer(&zeroVal[0])
49 }
50
51 k := key
52 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
53
54
55 idx := m.directoryIndex(hash)
56 t := m.directoryAt(idx)
57
58
59 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
60 for ; ; seq = seq.next() {
61 g := t.groups.group(typ, seq.offset)
62
63 match := g.ctrls().matchH2(h2(hash))
64
65 for match != 0 {
66 i := match.first()
67
68 slotKey := g.key(typ, i)
69 if key == *(*uint32)(slotKey) {
70 slotElem := unsafe.Pointer(uintptr(slotKey) + typ.ElemOff)
71 return slotElem
72 }
73 match = match.removeFirst()
74 }
75
76 match = g.ctrls().matchEmpty()
77 if match != 0 {
78
79
80 return unsafe.Pointer(&zeroVal[0])
81 }
82 }
83 }
84
85
86 func runtime_mapaccess2_fast32(typ *abi.SwissMapType, m *Map, key uint32) (unsafe.Pointer, bool) {
87 if race.Enabled && m != nil {
88 callerpc := sys.GetCallerPC()
89 pc := abi.FuncPCABIInternal(runtime_mapaccess2_fast32)
90 race.ReadPC(unsafe.Pointer(m), callerpc, pc)
91 }
92
93 if m == nil || m.Used() == 0 {
94 return unsafe.Pointer(&zeroVal[0]), false
95 }
96
97 if m.writing != 0 {
98 fatal("concurrent map read and map write")
99 return nil, false
100 }
101
102 if m.dirLen == 0 {
103 g := groupReference{
104 data: m.dirPtr,
105 }
106 full := g.ctrls().matchFull()
107 slotKey := g.key(typ, 0)
108 slotSize := typ.SlotSize
109 for full != 0 {
110 if key == *(*uint32)(slotKey) && full.lowestSet() {
111 slotElem := unsafe.Pointer(uintptr(slotKey) + typ.ElemOff)
112 return slotElem, true
113 }
114 slotKey = unsafe.Pointer(uintptr(slotKey) + slotSize)
115 full = full.shiftOutLowest()
116 }
117 return unsafe.Pointer(&zeroVal[0]), false
118 }
119
120 k := key
121 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
122
123
124 idx := m.directoryIndex(hash)
125 t := m.directoryAt(idx)
126
127
128 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
129 for ; ; seq = seq.next() {
130 g := t.groups.group(typ, seq.offset)
131
132 match := g.ctrls().matchH2(h2(hash))
133
134 for match != 0 {
135 i := match.first()
136
137 slotKey := g.key(typ, i)
138 if key == *(*uint32)(slotKey) {
139 slotElem := unsafe.Pointer(uintptr(slotKey) + typ.ElemOff)
140 return slotElem, true
141 }
142 match = match.removeFirst()
143 }
144
145 match = g.ctrls().matchEmpty()
146 if match != 0 {
147
148
149 return unsafe.Pointer(&zeroVal[0]), false
150 }
151 }
152 }
153
154 func (m *Map) putSlotSmallFast32(typ *abi.SwissMapType, hash uintptr, key uint32) unsafe.Pointer {
155 g := groupReference{
156 data: m.dirPtr,
157 }
158
159 match := g.ctrls().matchH2(h2(hash))
160
161
162 for match != 0 {
163 i := match.first()
164
165 slotKey := g.key(typ, i)
166 if key == *(*uint32)(slotKey) {
167 slotElem := g.elem(typ, i)
168 return slotElem
169 }
170 match = match.removeFirst()
171 }
172
173
174
175
176 match = g.ctrls().matchEmptyOrDeleted()
177 if match == 0 {
178 fatal("small map with no empty slot (concurrent map writes?)")
179 }
180
181 i := match.first()
182
183 slotKey := g.key(typ, i)
184 *(*uint32)(slotKey) = key
185
186 slotElem := g.elem(typ, i)
187
188 g.ctrls().set(i, ctrl(h2(hash)))
189 m.used++
190
191 return slotElem
192 }
193
194
195 func runtime_mapassign_fast32(typ *abi.SwissMapType, m *Map, key uint32) unsafe.Pointer {
196 if m == nil {
197 panic(errNilAssign)
198 }
199 if race.Enabled {
200 callerpc := sys.GetCallerPC()
201 pc := abi.FuncPCABIInternal(runtime_mapassign_fast32)
202 race.WritePC(unsafe.Pointer(m), callerpc, pc)
203 }
204 if m.writing != 0 {
205 fatal("concurrent map writes")
206 }
207
208 k := key
209 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
210
211
212
213 m.writing ^= 1
214
215 if m.dirPtr == nil {
216 m.growToSmall(typ)
217 }
218
219 if m.dirLen == 0 {
220 if m.used < abi.SwissMapGroupSlots {
221 elem := m.putSlotSmallFast32(typ, hash, key)
222
223 if m.writing == 0 {
224 fatal("concurrent map writes")
225 }
226 m.writing ^= 1
227
228 return elem
229 }
230
231
232 m.growToTable(typ)
233 }
234
235 var slotElem unsafe.Pointer
236 outer:
237 for {
238
239 idx := m.directoryIndex(hash)
240 t := m.directoryAt(idx)
241
242 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
243
244
245
246
247 var firstDeletedGroup groupReference
248 var firstDeletedSlot uintptr
249
250 for ; ; seq = seq.next() {
251 g := t.groups.group(typ, seq.offset)
252 match := g.ctrls().matchH2(h2(hash))
253
254
255 for match != 0 {
256 i := match.first()
257
258 slotKey := g.key(typ, i)
259 if key == *(*uint32)(slotKey) {
260 slotElem = g.elem(typ, i)
261
262 t.checkInvariants(typ, m)
263 break outer
264 }
265 match = match.removeFirst()
266 }
267
268
269
270 match = g.ctrls().matchEmptyOrDeleted()
271 if match == 0 {
272 continue
273 }
274 i := match.first()
275 if g.ctrls().get(i) == ctrlDeleted {
276
277
278 if firstDeletedGroup.data == nil {
279 firstDeletedGroup = g
280 firstDeletedSlot = i
281 }
282 continue
283 }
284
285
286
287
288
289 if firstDeletedGroup.data != nil {
290 g = firstDeletedGroup
291 i = firstDeletedSlot
292 t.growthLeft++
293 }
294
295
296 if t.growthLeft == 0 {
297 t.pruneTombstones(typ, m)
298 }
299
300
301 if t.growthLeft > 0 {
302 slotKey := g.key(typ, i)
303 *(*uint32)(slotKey) = key
304
305 slotElem = g.elem(typ, i)
306
307 g.ctrls().set(i, ctrl(h2(hash)))
308 t.growthLeft--
309 t.used++
310 m.used++
311
312 t.checkInvariants(typ, m)
313 break outer
314 }
315
316 t.rehash(typ, m)
317 continue outer
318 }
319 }
320
321 if m.writing == 0 {
322 fatal("concurrent map writes")
323 }
324 m.writing ^= 1
325
326 return slotElem
327 }
328
329
330
331
332
333
334 func runtime_mapassign_fast32ptr(typ *abi.SwissMapType, m *Map, key unsafe.Pointer) unsafe.Pointer {
335 if m == nil {
336 panic(errNilAssign)
337 }
338 if race.Enabled {
339 callerpc := sys.GetCallerPC()
340 pc := abi.FuncPCABIInternal(runtime_mapassign_fast32ptr)
341 race.WritePC(unsafe.Pointer(m), callerpc, pc)
342 }
343 if m.writing != 0 {
344 fatal("concurrent map writes")
345 }
346
347 k := key
348 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
349
350
351
352 m.writing ^= 1
353
354 if m.dirPtr == nil {
355 m.growToSmall(typ)
356 }
357
358 if m.dirLen == 0 {
359 if m.used < abi.SwissMapGroupSlots {
360 elem := m.putSlotSmallFastPtr(typ, hash, key)
361
362 if m.writing == 0 {
363 fatal("concurrent map writes")
364 }
365 m.writing ^= 1
366
367 return elem
368 }
369
370
371 m.growToTable(typ)
372 }
373
374 var slotElem unsafe.Pointer
375 outer:
376 for {
377
378 idx := m.directoryIndex(hash)
379 t := m.directoryAt(idx)
380
381 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
382
383
384
385 var firstDeletedGroup groupReference
386 var firstDeletedSlot uintptr
387
388 for ; ; seq = seq.next() {
389 g := t.groups.group(typ, seq.offset)
390 match := g.ctrls().matchH2(h2(hash))
391
392
393 for match != 0 {
394 i := match.first()
395
396 slotKey := g.key(typ, i)
397 if key == *(*unsafe.Pointer)(slotKey) {
398 slotElem = g.elem(typ, i)
399
400 t.checkInvariants(typ, m)
401 break outer
402 }
403 match = match.removeFirst()
404 }
405
406
407
408 match = g.ctrls().matchEmptyOrDeleted()
409 if match == 0 {
410 continue
411 }
412 i := match.first()
413 if g.ctrls().get(i) == ctrlDeleted {
414
415
416 if firstDeletedGroup.data == nil {
417 firstDeletedGroup = g
418 firstDeletedSlot = i
419 }
420 continue
421 }
422
423
424
425
426
427 if firstDeletedGroup.data != nil {
428 g = firstDeletedGroup
429 i = firstDeletedSlot
430 t.growthLeft++
431 }
432
433
434 if t.growthLeft > 0 {
435 slotKey := g.key(typ, i)
436 *(*unsafe.Pointer)(slotKey) = key
437
438 slotElem = g.elem(typ, i)
439
440 g.ctrls().set(i, ctrl(h2(hash)))
441 t.growthLeft--
442 t.used++
443 m.used++
444
445 t.checkInvariants(typ, m)
446 break outer
447 }
448
449 t.rehash(typ, m)
450 continue outer
451 }
452 }
453
454 if m.writing == 0 {
455 fatal("concurrent map writes")
456 }
457 m.writing ^= 1
458
459 return slotElem
460 }
461
462
463 func runtime_mapdelete_fast32(typ *abi.SwissMapType, m *Map, key uint32) {
464 if race.Enabled {
465 callerpc := sys.GetCallerPC()
466 pc := abi.FuncPCABIInternal(runtime_mapdelete_fast32)
467 race.WritePC(unsafe.Pointer(m), callerpc, pc)
468 }
469
470 if m == nil || m.Used() == 0 {
471 return
472 }
473
474 m.Delete(typ, abi.NoEscape(unsafe.Pointer(&key)))
475 }
476
View as plain text