| | |
| | |
| | |
| |
|
| | package maps |
| |
|
| | import ( |
| | "internal/abi" |
| | "internal/race" |
| | "internal/runtime/sys" |
| | "unsafe" |
| | ) |
| |
|
| | |
| | func runtime_mapaccess1_fast32(typ *abi.MapType, m *Map, key uint32) unsafe.Pointer { |
| | if race.Enabled && m != nil { |
| | callerpc := sys.GetCallerPC() |
| | pc := abi.FuncPCABIInternal(runtime_mapaccess1_fast32) |
| | race.ReadPC(unsafe.Pointer(m), callerpc, pc) |
| | } |
| |
|
| | if m == nil || m.Used() == 0 { |
| | return unsafe.Pointer(&zeroVal[0]) |
| | } |
| |
|
| | if m.writing != 0 { |
| | fatal("concurrent map read and map write") |
| | return nil |
| | } |
| |
|
| | if m.dirLen == 0 { |
| | g := groupReference{ |
| | data: m.dirPtr, |
| | } |
| | full := g.ctrls().matchFull() |
| | slotKey := g.key(typ, 0) |
| | slotSize := typ.SlotSize |
| | for full != 0 { |
| | if key == *(*uint32)(slotKey) && full.lowestSet() { |
| | slotElem := unsafe.Pointer(uintptr(slotKey) + typ.ElemOff) |
| | return slotElem |
| | } |
| | slotKey = unsafe.Pointer(uintptr(slotKey) + slotSize) |
| | full = full.shiftOutLowest() |
| | } |
| | return unsafe.Pointer(&zeroVal[0]) |
| | } |
| |
|
| | k := key |
| | hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) |
| |
|
| | |
| | idx := m.directoryIndex(hash) |
| | t := m.directoryAt(idx) |
| |
|
| | |
| | seq := makeProbeSeq(h1(hash), t.groups.lengthMask) |
| | h2Hash := h2(hash) |
| | for ; ; seq = seq.next() { |
| | g := t.groups.group(typ, seq.offset) |
| |
|
| | match := g.ctrls().matchH2(h2Hash) |
| |
|
| | for match != 0 { |
| | i := match.first() |
| |
|
| | slotKey := g.key(typ, i) |
| | if key == *(*uint32)(slotKey) { |
| | slotElem := unsafe.Pointer(uintptr(slotKey) + typ.ElemOff) |
| | return slotElem |
| | } |
| | match = match.removeFirst() |
| | } |
| |
|
| | match = g.ctrls().matchEmpty() |
| | if match != 0 { |
| | |
| | |
| | return unsafe.Pointer(&zeroVal[0]) |
| | } |
| | } |
| | } |
| |
|
| | |
| | func runtime_mapaccess2_fast32(typ *abi.MapType, m *Map, key uint32) (unsafe.Pointer, bool) { |
| | if race.Enabled && m != nil { |
| | callerpc := sys.GetCallerPC() |
| | pc := abi.FuncPCABIInternal(runtime_mapaccess2_fast32) |
| | race.ReadPC(unsafe.Pointer(m), callerpc, pc) |
| | } |
| |
|
| | if m == nil || m.Used() == 0 { |
| | return unsafe.Pointer(&zeroVal[0]), false |
| | } |
| |
|
| | if m.writing != 0 { |
| | fatal("concurrent map read and map write") |
| | return nil, false |
| | } |
| |
|
| | if m.dirLen == 0 { |
| | g := groupReference{ |
| | data: m.dirPtr, |
| | } |
| | full := g.ctrls().matchFull() |
| | slotKey := g.key(typ, 0) |
| | slotSize := typ.SlotSize |
| | for full != 0 { |
| | if key == *(*uint32)(slotKey) && full.lowestSet() { |
| | slotElem := unsafe.Pointer(uintptr(slotKey) + typ.ElemOff) |
| | return slotElem, true |
| | } |
| | slotKey = unsafe.Pointer(uintptr(slotKey) + slotSize) |
| | full = full.shiftOutLowest() |
| | } |
| | return unsafe.Pointer(&zeroVal[0]), false |
| | } |
| |
|
| | k := key |
| | hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) |
| |
|
| | |
| | idx := m.directoryIndex(hash) |
| | t := m.directoryAt(idx) |
| |
|
| | |
| | seq := makeProbeSeq(h1(hash), t.groups.lengthMask) |
| | h2Hash := h2(hash) |
| | for ; ; seq = seq.next() { |
| | g := t.groups.group(typ, seq.offset) |
| |
|
| | match := g.ctrls().matchH2(h2Hash) |
| |
|
| | for match != 0 { |
| | i := match.first() |
| |
|
| | slotKey := g.key(typ, i) |
| | if key == *(*uint32)(slotKey) { |
| | slotElem := unsafe.Pointer(uintptr(slotKey) + typ.ElemOff) |
| | return slotElem, true |
| | } |
| | match = match.removeFirst() |
| | } |
| |
|
| | match = g.ctrls().matchEmpty() |
| | if match != 0 { |
| | |
| | |
| | return unsafe.Pointer(&zeroVal[0]), false |
| | } |
| | } |
| | } |
| |
|
| | func (m *Map) putSlotSmallFast32(typ *abi.MapType, hash uintptr, key uint32) unsafe.Pointer { |
| | g := groupReference{ |
| | data: m.dirPtr, |
| | } |
| |
|
| | match := g.ctrls().matchH2(h2(hash)) |
| |
|
| | |
| | for match != 0 { |
| | i := match.first() |
| |
|
| | slotKey := g.key(typ, i) |
| | if key == *(*uint32)(slotKey) { |
| | slotElem := g.elem(typ, i) |
| | return slotElem |
| | } |
| | match = match.removeFirst() |
| | } |
| |
|
| | |
| | |
| | |
| | match = g.ctrls().matchEmptyOrDeleted() |
| | if match == 0 { |
| | fatal("small map with no empty slot (concurrent map writes?)") |
| | } |
| |
|
| | i := match.first() |
| |
|
| | slotKey := g.key(typ, i) |
| | *(*uint32)(slotKey) = key |
| |
|
| | slotElem := g.elem(typ, i) |
| |
|
| | g.ctrls().set(i, ctrl(h2(hash))) |
| | m.used++ |
| |
|
| | return slotElem |
| | } |
| |
|
| | |
| | func runtime_mapassign_fast32(typ *abi.MapType, m *Map, key uint32) unsafe.Pointer { |
| | if m == nil { |
| | panic(errNilAssign) |
| | } |
| | if race.Enabled { |
| | callerpc := sys.GetCallerPC() |
| | pc := abi.FuncPCABIInternal(runtime_mapassign_fast32) |
| | race.WritePC(unsafe.Pointer(m), callerpc, pc) |
| | } |
| | if m.writing != 0 { |
| | fatal("concurrent map writes") |
| | } |
| |
|
| | k := key |
| | hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) |
| |
|
| | |
| | |
| | m.writing ^= 1 |
| |
|
| | if m.dirPtr == nil { |
| | m.growToSmall(typ) |
| | } |
| |
|
| | if m.dirLen == 0 { |
| | if m.used < abi.MapGroupSlots { |
| | elem := m.putSlotSmallFast32(typ, hash, key) |
| |
|
| | if m.writing == 0 { |
| | fatal("concurrent map writes") |
| | } |
| | m.writing ^= 1 |
| |
|
| | return elem |
| | } |
| |
|
| | |
| | m.growToTable(typ) |
| | } |
| |
|
| | var slotElem unsafe.Pointer |
| | outer: |
| | for { |
| | |
| | idx := m.directoryIndex(hash) |
| | t := m.directoryAt(idx) |
| |
|
| | seq := makeProbeSeq(h1(hash), t.groups.lengthMask) |
| |
|
| | |
| | |
| | |
| | var firstDeletedGroup groupReference |
| | var firstDeletedSlot uintptr |
| |
|
| | h2Hash := h2(hash) |
| | for ; ; seq = seq.next() { |
| | g := t.groups.group(typ, seq.offset) |
| | match := g.ctrls().matchH2(h2Hash) |
| |
|
| | |
| | for match != 0 { |
| | i := match.first() |
| |
|
| | slotKey := g.key(typ, i) |
| | if key == *(*uint32)(slotKey) { |
| | slotElem = g.elem(typ, i) |
| |
|
| | t.checkInvariants(typ, m) |
| | break outer |
| | } |
| | match = match.removeFirst() |
| | } |
| |
|
| | |
| | |
| | match = g.ctrls().matchEmptyOrDeleted() |
| | if match == 0 { |
| | continue |
| | } |
| | i := match.first() |
| | if g.ctrls().get(i) == ctrlDeleted { |
| | |
| | |
| | if firstDeletedGroup.data == nil { |
| | firstDeletedGroup = g |
| | firstDeletedSlot = i |
| | } |
| | continue |
| | } |
| | |
| | |
| |
|
| | |
| | |
| | if firstDeletedGroup.data != nil { |
| | g = firstDeletedGroup |
| | i = firstDeletedSlot |
| | t.growthLeft++ |
| | } |
| |
|
| | |
| | if t.growthLeft == 0 { |
| | t.pruneTombstones(typ, m) |
| | } |
| |
|
| | |
| | if t.growthLeft > 0 { |
| | slotKey := g.key(typ, i) |
| | *(*uint32)(slotKey) = key |
| |
|
| | slotElem = g.elem(typ, i) |
| |
|
| | g.ctrls().set(i, ctrl(h2Hash)) |
| | t.growthLeft-- |
| | t.used++ |
| | m.used++ |
| |
|
| | t.checkInvariants(typ, m) |
| | break outer |
| | } |
| |
|
| | t.rehash(typ, m) |
| | continue outer |
| | } |
| | } |
| |
|
| | if m.writing == 0 { |
| | fatal("concurrent map writes") |
| | } |
| | m.writing ^= 1 |
| |
|
| | return slotElem |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func runtime_mapassign_fast32ptr(typ *abi.MapType, m *Map, key unsafe.Pointer) unsafe.Pointer { |
| | if m == nil { |
| | panic(errNilAssign) |
| | } |
| | if race.Enabled { |
| | callerpc := sys.GetCallerPC() |
| | pc := abi.FuncPCABIInternal(runtime_mapassign_fast32ptr) |
| | race.WritePC(unsafe.Pointer(m), callerpc, pc) |
| | } |
| | if m.writing != 0 { |
| | fatal("concurrent map writes") |
| | } |
| |
|
| | k := key |
| | hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) |
| |
|
| | |
| | |
| | m.writing ^= 1 |
| |
|
| | if m.dirPtr == nil { |
| | m.growToSmall(typ) |
| | } |
| |
|
| | if m.dirLen == 0 { |
| | if m.used < abi.MapGroupSlots { |
| | elem := m.putSlotSmallFastPtr(typ, hash, key) |
| |
|
| | if m.writing == 0 { |
| | fatal("concurrent map writes") |
| | } |
| | m.writing ^= 1 |
| |
|
| | return elem |
| | } |
| |
|
| | |
| | m.growToTable(typ) |
| | } |
| |
|
| | var slotElem unsafe.Pointer |
| | outer: |
| | for { |
| | |
| | idx := m.directoryIndex(hash) |
| | t := m.directoryAt(idx) |
| |
|
| | seq := makeProbeSeq(h1(hash), t.groups.lengthMask) |
| |
|
| | |
| | |
| | var firstDeletedGroup groupReference |
| | var firstDeletedSlot uintptr |
| |
|
| | h2Hash := h2(hash) |
| | for ; ; seq = seq.next() { |
| | g := t.groups.group(typ, seq.offset) |
| | match := g.ctrls().matchH2(h2Hash) |
| |
|
| | |
| | for match != 0 { |
| | i := match.first() |
| |
|
| | slotKey := g.key(typ, i) |
| | if key == *(*unsafe.Pointer)(slotKey) { |
| | slotElem = g.elem(typ, i) |
| |
|
| | t.checkInvariants(typ, m) |
| | break outer |
| | } |
| | match = match.removeFirst() |
| | } |
| |
|
| | |
| | |
| | match = g.ctrls().matchEmptyOrDeleted() |
| | if match == 0 { |
| | continue |
| | } |
| | i := match.first() |
| | if g.ctrls().get(i) == ctrlDeleted { |
| | |
| | |
| | if firstDeletedGroup.data == nil { |
| | firstDeletedGroup = g |
| | firstDeletedSlot = i |
| | } |
| | continue |
| | } |
| | |
| | |
| |
|
| | |
| | |
| | if firstDeletedGroup.data != nil { |
| | g = firstDeletedGroup |
| | i = firstDeletedSlot |
| | t.growthLeft++ |
| | } |
| |
|
| | |
| | if t.growthLeft > 0 { |
| | slotKey := g.key(typ, i) |
| | *(*unsafe.Pointer)(slotKey) = key |
| |
|
| | slotElem = g.elem(typ, i) |
| |
|
| | g.ctrls().set(i, ctrl(h2Hash)) |
| | t.growthLeft-- |
| | t.used++ |
| | m.used++ |
| |
|
| | t.checkInvariants(typ, m) |
| | break outer |
| | } |
| |
|
| | t.rehash(typ, m) |
| | continue outer |
| | } |
| | } |
| |
|
| | if m.writing == 0 { |
| | fatal("concurrent map writes") |
| | } |
| | m.writing ^= 1 |
| |
|
| | return slotElem |
| | } |
| |
|
| | |
| | func runtime_mapdelete_fast32(typ *abi.MapType, m *Map, key uint32) { |
| | if race.Enabled { |
| | callerpc := sys.GetCallerPC() |
| | pc := abi.FuncPCABIInternal(runtime_mapdelete_fast32) |
| | race.WritePC(unsafe.Pointer(m), callerpc, pc) |
| | } |
| |
|
| | if m == nil || m.Used() == 0 { |
| | return |
| | } |
| |
|
| | m.Delete(typ, abi.NoEscape(unsafe.Pointer(&key))) |
| | } |
| |
|