1
2
3
4
5
6
7 package maps
8
9 import (
10 "internal/abi"
11 "internal/race"
12 "internal/runtime/sys"
13 "unsafe"
14 )
15
16
17 func runtime_mapaccess1_fast64(typ *abi.SwissMapType, m *Map, key uint64) unsafe.Pointer {
18 if race.Enabled && m != nil {
19 callerpc := sys.GetCallerPC()
20 pc := abi.FuncPCABIInternal(runtime_mapaccess1_fast64)
21 race.ReadPC(unsafe.Pointer(m), callerpc, pc)
22 }
23
24 if m == nil || m.Used() == 0 {
25 return unsafe.Pointer(&zeroVal[0])
26 }
27
28 if m.writing != 0 {
29 fatal("concurrent map read and map write")
30 return nil
31 }
32
33 if m.dirLen == 0 {
34 g := groupReference{
35 data: m.dirPtr,
36 }
37 full := g.ctrls().matchFull()
38 slotKey := g.key(typ, 0)
39 slotSize := typ.SlotSize
40 for full != 0 {
41 if key == *(*uint64)(slotKey) && full.lowestSet() {
42 slotElem := unsafe.Pointer(uintptr(slotKey) + 8)
43 return slotElem
44 }
45 slotKey = unsafe.Pointer(uintptr(slotKey) + slotSize)
46 full = full.shiftOutLowest()
47 }
48 return unsafe.Pointer(&zeroVal[0])
49 }
50
51 k := key
52 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
53
54
55 idx := m.directoryIndex(hash)
56 t := m.directoryAt(idx)
57
58
59 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
60 for ; ; seq = seq.next() {
61 g := t.groups.group(typ, seq.offset)
62
63 match := g.ctrls().matchH2(h2(hash))
64
65 for match != 0 {
66 i := match.first()
67
68 slotKey := g.key(typ, i)
69 if key == *(*uint64)(slotKey) {
70 slotElem := unsafe.Pointer(uintptr(slotKey) + 8)
71 return slotElem
72 }
73 match = match.removeFirst()
74 }
75
76 match = g.ctrls().matchEmpty()
77 if match != 0 {
78
79
80 return unsafe.Pointer(&zeroVal[0])
81 }
82 }
83 }
84
85
86 func runtime_mapaccess2_fast64(typ *abi.SwissMapType, m *Map, key uint64) (unsafe.Pointer, bool) {
87 if race.Enabled && m != nil {
88 callerpc := sys.GetCallerPC()
89 pc := abi.FuncPCABIInternal(runtime_mapaccess2_fast64)
90 race.ReadPC(unsafe.Pointer(m), callerpc, pc)
91 }
92
93 if m == nil || m.Used() == 0 {
94 return unsafe.Pointer(&zeroVal[0]), false
95 }
96
97 if m.writing != 0 {
98 fatal("concurrent map read and map write")
99 return nil, false
100 }
101
102 if m.dirLen == 0 {
103 g := groupReference{
104 data: m.dirPtr,
105 }
106 full := g.ctrls().matchFull()
107 slotKey := g.key(typ, 0)
108 slotSize := typ.SlotSize
109 for full != 0 {
110 if key == *(*uint64)(slotKey) && full.lowestSet() {
111 slotElem := unsafe.Pointer(uintptr(slotKey) + 8)
112 return slotElem, true
113 }
114 slotKey = unsafe.Pointer(uintptr(slotKey) + slotSize)
115 full = full.shiftOutLowest()
116 }
117 return unsafe.Pointer(&zeroVal[0]), false
118 }
119
120 k := key
121 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
122
123
124 idx := m.directoryIndex(hash)
125 t := m.directoryAt(idx)
126
127
128 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
129 for ; ; seq = seq.next() {
130 g := t.groups.group(typ, seq.offset)
131
132 match := g.ctrls().matchH2(h2(hash))
133
134 for match != 0 {
135 i := match.first()
136
137 slotKey := g.key(typ, i)
138 if key == *(*uint64)(slotKey) {
139 slotElem := unsafe.Pointer(uintptr(slotKey) + 8)
140 return slotElem, true
141 }
142 match = match.removeFirst()
143 }
144
145 match = g.ctrls().matchEmpty()
146 if match != 0 {
147
148
149 return unsafe.Pointer(&zeroVal[0]), false
150 }
151 }
152 }
153
154 func (m *Map) putSlotSmallFast64(typ *abi.SwissMapType, hash uintptr, key uint64) unsafe.Pointer {
155 g := groupReference{
156 data: m.dirPtr,
157 }
158
159 match := g.ctrls().matchH2(h2(hash))
160
161
162 for match != 0 {
163 i := match.first()
164
165 slotKey := g.key(typ, i)
166 if key == *(*uint64)(slotKey) {
167 slotElem := g.elem(typ, i)
168 return slotElem
169 }
170 match = match.removeFirst()
171 }
172
173
174
175
176 match = g.ctrls().matchEmptyOrDeleted()
177 if match == 0 {
178 fatal("small map with no empty slot (concurrent map writes?)")
179 }
180
181 i := match.first()
182
183 slotKey := g.key(typ, i)
184 *(*uint64)(slotKey) = key
185
186 slotElem := g.elem(typ, i)
187
188 g.ctrls().set(i, ctrl(h2(hash)))
189 m.used++
190
191 return slotElem
192 }
193
194
195 func runtime_mapassign_fast64(typ *abi.SwissMapType, m *Map, key uint64) unsafe.Pointer {
196 if m == nil {
197 panic(errNilAssign)
198 }
199 if race.Enabled {
200 callerpc := sys.GetCallerPC()
201 pc := abi.FuncPCABIInternal(runtime_mapassign_fast64)
202 race.WritePC(unsafe.Pointer(m), callerpc, pc)
203 }
204 if m.writing != 0 {
205 fatal("concurrent map writes")
206 }
207
208 k := key
209 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
210
211
212
213 m.writing ^= 1
214
215 if m.dirPtr == nil {
216 m.growToSmall(typ)
217 }
218
219 if m.dirLen == 0 {
220 if m.used < abi.SwissMapGroupSlots {
221 elem := m.putSlotSmallFast64(typ, hash, key)
222
223 if m.writing == 0 {
224 fatal("concurrent map writes")
225 }
226 m.writing ^= 1
227
228 return elem
229 }
230
231
232 m.growToTable(typ)
233 }
234
235 var slotElem unsafe.Pointer
236 outer:
237 for {
238
239 idx := m.directoryIndex(hash)
240 t := m.directoryAt(idx)
241
242 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
243
244
245
246
247 var firstDeletedGroup groupReference
248 var firstDeletedSlot uintptr
249
250 for ; ; seq = seq.next() {
251 g := t.groups.group(typ, seq.offset)
252 match := g.ctrls().matchH2(h2(hash))
253
254
255 for match != 0 {
256 i := match.first()
257
258 slotKey := g.key(typ, i)
259 if key == *(*uint64)(slotKey) {
260 slotElem = g.elem(typ, i)
261
262 t.checkInvariants(typ, m)
263 break outer
264 }
265 match = match.removeFirst()
266 }
267
268
269
270 match = g.ctrls().matchEmptyOrDeleted()
271 if match == 0 {
272 continue
273 }
274 i := match.first()
275 if g.ctrls().get(i) == ctrlDeleted {
276
277
278 if firstDeletedGroup.data == nil {
279 firstDeletedGroup = g
280 firstDeletedSlot = i
281 }
282 continue
283 }
284
285
286
287
288
289 if firstDeletedGroup.data != nil {
290 g = firstDeletedGroup
291 i = firstDeletedSlot
292 t.growthLeft++
293 }
294
295
296 if t.growthLeft == 0 {
297 t.pruneTombstones(typ, m)
298 }
299
300
301 if t.growthLeft > 0 {
302 slotKey := g.key(typ, i)
303 *(*uint64)(slotKey) = key
304
305 slotElem = g.elem(typ, i)
306
307 g.ctrls().set(i, ctrl(h2(hash)))
308 t.growthLeft--
309 t.used++
310 m.used++
311
312 t.checkInvariants(typ, m)
313 break outer
314 }
315
316 t.rehash(typ, m)
317 continue outer
318 }
319 }
320
321 if m.writing == 0 {
322 fatal("concurrent map writes")
323 }
324 m.writing ^= 1
325
326 return slotElem
327 }
328
329 func (m *Map) putSlotSmallFastPtr(typ *abi.SwissMapType, hash uintptr, key unsafe.Pointer) unsafe.Pointer {
330 g := groupReference{
331 data: m.dirPtr,
332 }
333
334 match := g.ctrls().matchH2(h2(hash))
335
336
337 for match != 0 {
338 i := match.first()
339
340 slotKey := g.key(typ, i)
341 if key == *(*unsafe.Pointer)(slotKey) {
342 slotElem := g.elem(typ, i)
343 return slotElem
344 }
345 match = match.removeFirst()
346 }
347
348
349
350
351 match = g.ctrls().matchEmptyOrDeleted()
352 if match == 0 {
353 fatal("small map with no empty slot (concurrent map writes?)")
354 }
355
356 i := match.first()
357
358 slotKey := g.key(typ, i)
359 *(*unsafe.Pointer)(slotKey) = key
360
361 slotElem := g.elem(typ, i)
362
363 g.ctrls().set(i, ctrl(h2(hash)))
364 m.used++
365
366 return slotElem
367 }
368
369
370
371
372 func runtime_mapassign_fast64ptr(typ *abi.SwissMapType, m *Map, key unsafe.Pointer) unsafe.Pointer {
373 if m == nil {
374 panic(errNilAssign)
375 }
376 if race.Enabled {
377 callerpc := sys.GetCallerPC()
378 pc := abi.FuncPCABIInternal(runtime_mapassign_fast64ptr)
379 race.WritePC(unsafe.Pointer(m), callerpc, pc)
380 }
381 if m.writing != 0 {
382 fatal("concurrent map writes")
383 }
384
385 k := key
386 hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed)
387
388
389
390 m.writing ^= 1
391
392 if m.dirPtr == nil {
393 m.growToSmall(typ)
394 }
395
396 if m.dirLen == 0 {
397 if m.used < abi.SwissMapGroupSlots {
398 elem := m.putSlotSmallFastPtr(typ, hash, key)
399
400 if m.writing == 0 {
401 fatal("concurrent map writes")
402 }
403 m.writing ^= 1
404
405 return elem
406 }
407
408
409 m.growToTable(typ)
410 }
411
412 var slotElem unsafe.Pointer
413 outer:
414 for {
415
416 idx := m.directoryIndex(hash)
417 t := m.directoryAt(idx)
418
419 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
420
421
422
423
424 var firstDeletedGroup groupReference
425 var firstDeletedSlot uintptr
426
427 for ; ; seq = seq.next() {
428 g := t.groups.group(typ, seq.offset)
429 match := g.ctrls().matchH2(h2(hash))
430
431
432 for match != 0 {
433 i := match.first()
434
435 slotKey := g.key(typ, i)
436 if key == *(*unsafe.Pointer)(slotKey) {
437 slotElem = g.elem(typ, i)
438
439 t.checkInvariants(typ, m)
440 break outer
441 }
442 match = match.removeFirst()
443 }
444
445
446
447 match = g.ctrls().matchEmptyOrDeleted()
448 if match == 0 {
449 continue
450 }
451 i := match.first()
452 if g.ctrls().get(i) == ctrlDeleted {
453
454
455 if firstDeletedGroup.data == nil {
456 firstDeletedGroup = g
457 firstDeletedSlot = i
458 }
459 continue
460 }
461
462
463
464
465
466 if firstDeletedGroup.data != nil {
467 g = firstDeletedGroup
468 i = firstDeletedSlot
469 t.growthLeft++
470 }
471
472
473 if t.growthLeft > 0 {
474 slotKey := g.key(typ, i)
475 *(*unsafe.Pointer)(slotKey) = key
476
477 slotElem = g.elem(typ, i)
478
479 g.ctrls().set(i, ctrl(h2(hash)))
480 t.growthLeft--
481 t.used++
482 m.used++
483
484 t.checkInvariants(typ, m)
485 break outer
486 }
487
488 t.rehash(typ, m)
489 continue outer
490 }
491 }
492
493 if m.writing == 0 {
494 fatal("concurrent map writes")
495 }
496 m.writing ^= 1
497
498 return slotElem
499 }
500
501
502 func runtime_mapdelete_fast64(typ *abi.SwissMapType, m *Map, key uint64) {
503 if race.Enabled {
504 callerpc := sys.GetCallerPC()
505 pc := abi.FuncPCABIInternal(runtime_mapdelete_fast64)
506 race.WritePC(unsafe.Pointer(m), callerpc, pc)
507 }
508
509 if m == nil || m.Used() == 0 {
510 return
511 }
512
513 m.Delete(typ, abi.NoEscape(unsafe.Pointer(&key)))
514 }
515
View as plain text