Skip to content

Commit

Permalink
[PersistentCollections] Start splitting up _HashPath into _HashValue …
Browse files Browse the repository at this point in the history
…& _Level

The original approach was a good one — gluing the level (aka shift) onto the hash value isn’t helpful when we’re passing around multiple hashes. The current level also seems like an important enough concept to keep as a standalone parameter.
  • Loading branch information
lorentey committed Sep 12, 2022
1 parent 4aff69e commit b1cf1ee
Show file tree
Hide file tree
Showing 5 changed files with 90 additions and 34 deletions.
22 changes: 11 additions & 11 deletions Sources/PersistentCollections/PersistentDictionary._Node.swift
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,7 @@ extension PersistentDictionary._Node: _DictionaryNodeProtocol {
func get(_ key: Key, _ path: _HashPath) -> Value? {
guard isRegularNode else {
let hash = _HashValue(_items.first!.key)
guard path._hash == hash else { return nil }
guard path.hash == hash else { return nil }
return _items.first(where: { key == $0.key })?.value
}

Expand All @@ -425,7 +425,7 @@ extension PersistentDictionary._Node: _DictionaryNodeProtocol {
func containsKey(_ key: Key, _ path: _HashPath) -> Bool {
guard isRegularNode else {
let hash = _HashValue(_items.first!.key)
guard path._hash == hash else { return false }
guard path.hash == hash else { return false }
return _items.contains(where: { key == $0.key })
}

Expand Down Expand Up @@ -454,7 +454,7 @@ extension PersistentDictionary._Node: _DictionaryNodeProtocol {
) -> Index? {
guard isRegularNode else {
let hash = _HashValue(_items.first!.key)
assert(path._hash == hash)
assert(path.hash == hash)
return _items
.firstIndex(where: { $0.key == key })
.map { Index(_value: skippedBefore + $0) }
Expand Down Expand Up @@ -503,7 +503,7 @@ extension PersistentDictionary._Node: _DictionaryNodeProtocol {
return _copyAndSetValue(isUnique, bucket, item.value)
}
let hash0 = _HashValue(item0.key)
if hash0 == path._hash {
if hash0 == path.hash {
let newChild = _Node(collisions: [item0, item])
effect.setModified()
if self.count == 1 { return newChild }
Expand Down Expand Up @@ -552,7 +552,7 @@ extension PersistentDictionary._Node: _DictionaryNodeProtocol {
assert(isCollisionNode)

let hash = _HashValue(_items.first!.key)
guard path._hash == hash else {
guard path.hash == hash else {
effect.setModified()
return _mergeKeyValPairAndCollisionNode(item, path, self, hash)
}
Expand Down Expand Up @@ -731,7 +731,7 @@ extension PersistentDictionary._Node {
_ item0: Element, _ path0: _HashPath,
_ item1: Element, _ hash1: _HashValue
) -> _Node {
let path1 = _HashPath(_hash: hash1, shift: path0._shift)
let path1 = _HashPath(hash: hash1, level: path0.level)
return _mergeTwoKeyValPairs(item0, path0, item1, path1)
}

Expand All @@ -740,8 +740,8 @@ extension PersistentDictionary._Node {
_ item0: Element, _ path0: _HashPath,
_ item1: Element, _ path1: _HashPath
) -> _Node {
assert(path0._hash != path1._hash)
assert(path0._shift == path1._shift)
assert(path0.hash != path1.hash)
assert(path0.level == path1.level)

let bucket0 = path0.currentBucket
let bucket1 = path1.currentBucket
Expand All @@ -766,7 +766,7 @@ extension PersistentDictionary._Node {
_ item0: Element, _ path0: _HashPath,
_ node1: _Node, _ hash1: _HashValue
) -> _Node {
let path1 = _HashPath(_hash: hash1, shift: path0._shift)
let path1 = _HashPath(hash: hash1, level: path0.level)
return _mergeKeyValPairAndCollisionNode(item0, path0, node1, path1)
}

Expand All @@ -775,8 +775,8 @@ extension PersistentDictionary._Node {
_ item0: Element, _ path0: _HashPath,
_ node1: _Node, _ path1: _HashPath
) -> _Node {
assert(path0._hash != path1._hash)
assert(path0._shift == path1._shift)
assert(path0.hash != path1.hash)
assert(path0.level == path1.level)

let bucket0 = path0.currentBucket
let bucket1 = path1.currentBucket
Expand Down
7 changes: 1 addition & 6 deletions Sources/PersistentCollections/_Bitmap.swift
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,6 @@ internal struct _Bitmap {
self._value = Value(bitPattern)
}

@inline(__always)
internal init() {
_value = 0
}

@inlinable @inline(__always)
internal init(_ bucket: _Bucket) {
assert(bucket.value < Self.capacity)
Expand Down Expand Up @@ -62,8 +57,8 @@ extension _Bitmap: Equatable {
}

extension _Bitmap {
internal static var empty: Self { .init() }
@inlinable @inline(__always)
internal static var empty: Self { .init(_value: 0) }

@inlinable @inline(__always)
internal static var capacity: Int { Value.bitWidth }
Expand Down
3 changes: 3 additions & 0 deletions Sources/PersistentCollections/_Bucket.swift
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ internal struct _Bucket {

@inlinable @inline(__always)
static var bitMask: UInt { UInt(bitPattern: _Bitmap.capacity) &- 1 }

@inlinable @inline(__always)
static var invalid: _Bucket { _Bucket(.max) }
}

extension _Bucket: Equatable {
Expand Down
40 changes: 23 additions & 17 deletions Sources/PersistentCollections/_HashPath.swift
Original file line number Diff line number Diff line change
Expand Up @@ -15,49 +15,55 @@
@frozen
internal struct _HashPath {
@usableFromInline
internal var _hash: _HashValue
internal var hash: _HashValue

@usableFromInline
internal var _shift: UInt
internal var level: _Level

@inlinable
internal init(_hash: _HashValue, shift: UInt) {
self._hash = _hash
self._shift = shift
internal init(hash: _HashValue, level: _Level) {
self.hash = hash
self.level = level
}

internal init<Key: Hashable>(_ key: Key) {
_hash = _HashValue(key)
_shift = 0
@inlinable
internal init(hash: _HashValue, shift: UInt) {
self.init(hash: hash, level: _Level(shift: shift))
}

@inlinable
internal init<Key: Hashable>(_ key: Key, level: _Level = .top) {
self.init(hash: _HashValue(key), level: level)
}

@inlinable
internal init<Key: Hashable>(_ key: Key, shift: UInt) {
self.init(hash: _HashValue(key), level: _Level(shift: shift))
}

@inlinable
internal var isAtRoot: Bool { _shift == 0 }
internal var isAtRoot: Bool { level.isAtRoot }

@inlinable
internal var currentBucket: _Bucket {
precondition(_shift < UInt.bitWidth, "Ran out of hash bits")
return _Bucket((_hash.value &>> _shift) & _Bucket.bitMask)
hash[level]
}

@inlinable
internal func descend() -> _HashPath {
// FIXME: Consider returning nil when we run out of bits
let s = _shift &+ UInt(bitPattern: _Bucket.bitWidth)
return _HashPath(_hash: _hash, shift: s)
_HashPath(hash: hash, level: level.descend())
}

@inlinable
internal func ascend() -> _HashPath {
precondition(_shift >= _Bucket.bitWidth)
let s = _shift &- UInt(bitPattern: _Bucket.bitWidth)
return _HashPath(_hash: _hash, shift: s)
_HashPath(hash: hash, level: level.ascend())
}

@inlinable
internal func top() -> _HashPath {
var result = self
result._shift = 0
result.level = .top
return result
}
}
52 changes: 52 additions & 0 deletions Sources/PersistentCollections/_HashValue.swift
Original file line number Diff line number Diff line change
Expand Up @@ -29,3 +29,55 @@ extension _HashValue: Equatable {
left.value == right.value
}
}

extension _HashValue {
@inlinable
internal subscript(_ level: _Level) -> _Bucket {
assert(!level.isAtBottom)
return _Bucket((value &>> level.shift) & _Bucket.bitMask)
}
}

@usableFromInline
@frozen
internal struct _Level {
@usableFromInline
internal var shift: UInt

@inlinable
init(shift: UInt) {
self.shift = shift
}
}

extension _Level {
@inlinable
internal static var top: _Level {
_Level(shift: 0)
}

@inlinable
internal var isAtRoot: Bool { shift == 0 }

@inlinable
internal var isAtBottom: Bool { shift >= UInt.bitWidth }

@inlinable
internal func descend() -> _Level {
// FIXME: Consider returning nil when we run out of bits
_Level(shift: shift &+ UInt(bitPattern: _Bucket.bitWidth))
}

@inlinable
internal func ascend() -> _Level {
assert(!isAtRoot)
return _Level(shift: shift &+ UInt(bitPattern: _Bucket.bitWidth))
}
}

extension _Level: Equatable {
@inlinable
internal static func ==(left: Self, right: Self) -> Bool {
left.shift == right.shift
}
}

0 comments on commit b1cf1ee

Please sign in to comment.