| b69ab31 | | | 1 | /** |
| b69ab31 | | | 2 | * Copyright (c) Meta Platforms, Inc. and affiliates. |
| b69ab31 | | | 3 | * |
| b69ab31 | | | 4 | * This source code is licensed under the MIT license found in the |
| b69ab31 | | | 5 | * LICENSE file in the root directory of this source tree. |
| b69ab31 | | | 6 | */ |
| b69ab31 | | | 7 | |
| b69ab31 | | | 8 | import type {RecordOf} from 'immutable'; |
| b69ab31 | | | 9 | import type {Hash} from '../types'; |
| b69ab31 | | | 10 | import type {SetLike} from './set'; |
| b69ab31 | | | 11 | |
| b69ab31 | | | 12 | import {Map as ImMap, List, Record} from 'immutable'; |
| b69ab31 | | | 13 | import {LRU, cachedMethod} from 'shared/LRU'; |
| b69ab31 | | | 14 | import {SelfUpdate} from 'shared/immutableExt'; |
| b69ab31 | | | 15 | import {nullthrows} from 'shared/utils'; |
| b69ab31 | | | 16 | import {HashSet} from './set'; |
| b69ab31 | | | 17 | |
| b69ab31 | | | 18 | /** |
| b69ab31 | | | 19 | * Hash-map like container with graph related queries. |
| b69ab31 | | | 20 | * |
| b69ab31 | | | 21 | * Unlike a traditional source control dag, this works more like a map: |
| b69ab31 | | | 22 | * - add() does not require parents to (recursively) exist. |
| b69ab31 | | | 23 | * - remove() does not remove descendants (recursively). |
| b69ab31 | | | 24 | * |
| b69ab31 | | | 25 | * The `BaseDag` is a minimal implementation for core graph query |
| b69ab31 | | | 26 | * needs (like heads, roots, range, ancestors, gca, etc). |
| b69ab31 | | | 27 | * For use-cases that involve commit attributes, like phases, bookmarks, |
| b69ab31 | | | 28 | * successors, use `dag/Dag` instead. |
| b69ab31 | | | 29 | * |
| b69ab31 | | | 30 | * Internally maintains a "parent -> child" mapping for efficient queries. |
| b69ab31 | | | 31 | * All queries, regardless of the input size, are O(N) worst case. None |
| b69ab31 | | | 32 | * should be O(N^2). |
| b69ab31 | | | 33 | */ |
| b69ab31 | | | 34 | export class BaseDag<C extends HashWithParents> extends SelfUpdate<BaseDagRecord<C>> { |
| b69ab31 | | | 35 | constructor(record?: BaseDagRecord<C>) { |
| b69ab31 | | | 36 | super(record ?? (EMPTY_DAG_RECORD as BaseDagRecord<C>)); |
| b69ab31 | | | 37 | } |
| b69ab31 | | | 38 | |
| b69ab31 | | | 39 | // Edit |
| b69ab31 | | | 40 | |
| b69ab31 | | | 41 | /** |
| b69ab31 | | | 42 | * Add commits. Parents do not have to be added first. |
| b69ab31 | | | 43 | * If a commit with the same hash already exists, it will be replaced. |
| b69ab31 | | | 44 | */ |
| b69ab31 | | | 45 | add(commits: Iterable<C>): BaseDag<C> { |
| b69ab31 | | | 46 | const commitArray = [...commits]; |
| b69ab31 | | | 47 | const dag = this.remove(commitArray.map(c => c.hash)); |
| b69ab31 | | | 48 | let {childMap, infoMap} = dag; |
| b69ab31 | | | 49 | for (const commit of commitArray) { |
| b69ab31 | | | 50 | commit.parents.forEach(p => { |
| b69ab31 | | | 51 | const children = childMap.get(p); |
| b69ab31 | | | 52 | const child = commit.hash; |
| b69ab31 | | | 53 | const newChildren = |
| b69ab31 | | | 54 | children == null |
| b69ab31 | | | 55 | ? List([child]) |
| b69ab31 | | | 56 | : children.contains(child) |
| b69ab31 | | | 57 | ? children |
| b69ab31 | | | 58 | : children.push(child); |
| b69ab31 | | | 59 | childMap = childMap.set(p, newChildren); |
| b69ab31 | | | 60 | }); |
| b69ab31 | | | 61 | infoMap = infoMap.set(commit.hash, commit); |
| b69ab31 | | | 62 | } |
| b69ab31 | | | 63 | const record = dag.inner.merge({infoMap, childMap}); |
| b69ab31 | | | 64 | return new BaseDag(record); |
| b69ab31 | | | 65 | } |
| b69ab31 | | | 66 | |
| b69ab31 | | | 67 | /** Remove commits by hash. Descendants are not removed automatically. */ |
| b69ab31 | | | 68 | remove(set: SetLike): BaseDag<C> { |
| b69ab31 | | | 69 | const hashSet = HashSet.fromHashes(set); |
| b69ab31 | | | 70 | let {childMap, infoMap} = this; |
| b69ab31 | | | 71 | for (const hash of hashSet) { |
| b69ab31 | | | 72 | const commit = this.get(hash); |
| b69ab31 | | | 73 | if (commit == undefined) { |
| b69ab31 | | | 74 | continue; |
| b69ab31 | | | 75 | } |
| b69ab31 | | | 76 | commit.parents.forEach(p => { |
| b69ab31 | | | 77 | const children = childMap.get(p); |
| b69ab31 | | | 78 | if (children != null) { |
| b69ab31 | | | 79 | const newChildren = children.filter(h => h !== hash); |
| b69ab31 | | | 80 | childMap = childMap.set(p, newChildren); |
| b69ab31 | | | 81 | } |
| b69ab31 | | | 82 | }); |
| b69ab31 | | | 83 | infoMap = infoMap.remove(hash); |
| b69ab31 | | | 84 | } |
| b69ab31 | | | 85 | const record = this.inner.merge({infoMap, childMap}); |
| b69ab31 | | | 86 | return new BaseDag(record); |
| b69ab31 | | | 87 | } |
| b69ab31 | | | 88 | |
| b69ab31 | | | 89 | // Basic query |
| b69ab31 | | | 90 | |
| b69ab31 | | | 91 | get(hash: Hash | undefined | null): Readonly<C> | undefined { |
| b69ab31 | | | 92 | return hash == null ? undefined : this.infoMap.get(hash); |
| b69ab31 | | | 93 | } |
| b69ab31 | | | 94 | |
| b69ab31 | | | 95 | has(hash: Hash | undefined | null): boolean { |
| b69ab31 | | | 96 | return this.get(hash) !== undefined; |
| b69ab31 | | | 97 | } |
| b69ab31 | | | 98 | |
| b69ab31 | | | 99 | [Symbol.iterator](): IterableIterator<Hash> { |
| b69ab31 | | | 100 | return this.infoMap.keys(); |
| b69ab31 | | | 101 | } |
| b69ab31 | | | 102 | |
| b69ab31 | | | 103 | values(): Iterable<Readonly<C>> { |
| b69ab31 | | | 104 | return this.infoMap.values(); |
| b69ab31 | | | 105 | } |
| b69ab31 | | | 106 | |
| b69ab31 | | | 107 | /** Get parent hashes. Only return hashes present in this.infoMap. */ |
| b69ab31 | | | 108 | parentHashes(hash: Hash): Readonly<Hash[]> { |
| b69ab31 | | | 109 | return this.infoMap.get(hash)?.parents?.filter(p => this.infoMap.has(p)) ?? []; |
| b69ab31 | | | 110 | } |
| b69ab31 | | | 111 | |
| b69ab31 | | | 112 | /** Get child hashes. Only return hashes present in this.infoMap. */ |
| b69ab31 | | | 113 | childHashes(hash: Hash): List<Hash> { |
| b69ab31 | | | 114 | if (!this.infoMap.has(hash)) { |
| b69ab31 | | | 115 | return EMPTY_LIST; |
| b69ab31 | | | 116 | } |
| b69ab31 | | | 117 | return this.childMap.get(hash) ?? EMPTY_LIST; |
| b69ab31 | | | 118 | } |
| b69ab31 | | | 119 | |
| b69ab31 | | | 120 | // High-level query |
| b69ab31 | | | 121 | |
| b69ab31 | | | 122 | parents(set: SetLike): HashSet { |
| b69ab31 | | | 123 | return flatMap(set, h => this.parentHashes(h)); |
| b69ab31 | | | 124 | } |
| b69ab31 | | | 125 | |
| b69ab31 | | | 126 | children(set: SetLike): HashSet { |
| b69ab31 | | | 127 | return flatMap(set, h => this.childHashes(h)); |
| b69ab31 | | | 128 | } |
| b69ab31 | | | 129 | |
| b69ab31 | | | 130 | /** |
| b69ab31 | | | 131 | * set + parents(set) + parents(parents(set)) + ... |
| b69ab31 | | | 132 | * If `within` is set, change `parents` to only return hashes within `within`. |
| b69ab31 | | | 133 | */ |
| b69ab31 | | | 134 | ancestors = cachedMethod(this.ancestorsImpl, {cache: ancestorsCache}); |
| b69ab31 | | | 135 | ancestorsImpl(set: SetLike, props?: {within?: SetLike}): HashSet { |
| b69ab31 | | | 136 | const filter = nullableWithinContains(props?.within); |
| b69ab31 | | | 137 | return unionFlatMap(set, h => this.parentHashes(h).filter(filter)); |
| b69ab31 | | | 138 | } |
| b69ab31 | | | 139 | |
| b69ab31 | | | 140 | /** |
| b69ab31 | | | 141 | * set + children(set) + children(children(set)) + ... |
| b69ab31 | | | 142 | * If `within` is set, change `children` to only return hashes within `within`. |
| b69ab31 | | | 143 | */ |
| b69ab31 | | | 144 | descendants(set: SetLike, props?: {within?: SetLike}): HashSet { |
| b69ab31 | | | 145 | const filter = nullableWithinContains(props?.within); |
| b69ab31 | | | 146 | return unionFlatMap(set, h => this.childHashes(h).filter(filter)); |
| b69ab31 | | | 147 | } |
| b69ab31 | | | 148 | |
| b69ab31 | | | 149 | /** ancestors(heads) & descendants(roots) */ |
| b69ab31 | | | 150 | range(roots: SetLike, heads: SetLike): HashSet { |
| b69ab31 | | | 151 | // PERF: This is not the most efficient, but easy to write. |
| b69ab31 | | | 152 | const ancestors = this.ancestors(heads); |
| b69ab31 | | | 153 | return ancestors.intersect(this.descendants(roots, {within: ancestors})); |
| b69ab31 | | | 154 | } |
| b69ab31 | | | 155 | |
| b69ab31 | | | 156 | /** set - children(set) */ |
| b69ab31 | | | 157 | roots(set: SetLike): HashSet { |
| b69ab31 | | | 158 | const children = this.children(set); |
| b69ab31 | | | 159 | return HashSet.fromHashes(set).subtract(children); |
| b69ab31 | | | 160 | } |
| b69ab31 | | | 161 | |
| b69ab31 | | | 162 | /** set - parents(set) */ |
| b69ab31 | | | 163 | heads(set: SetLike): HashSet { |
| b69ab31 | | | 164 | const parents = this.parents(set); |
| b69ab31 | | | 165 | return HashSet.fromHashes(set).subtract(parents); |
| b69ab31 | | | 166 | } |
| b69ab31 | | | 167 | |
| b69ab31 | | | 168 | /** Greatest common ancestor. heads(ancestors(set1) & ancestors(set2)). */ |
| b69ab31 | | | 169 | gca(set1: SetLike, set2: SetLike): HashSet { |
| b69ab31 | | | 170 | return this.heads(this.ancestors(set1).intersect(this.ancestors(set2))); |
| b69ab31 | | | 171 | } |
| b69ab31 | | | 172 | |
| b69ab31 | | | 173 | /** ancestor in ancestors(descendant) */ |
| b69ab31 | | | 174 | isAncestor(ancestor: Hash, descendant: Hash): boolean { |
| b69ab31 | | | 175 | // PERF: This is not the most efficient, but easy to write. |
| b69ab31 | | | 176 | return this.ancestors(descendant).contains(ancestor); |
| b69ab31 | | | 177 | } |
| b69ab31 | | | 178 | |
| b69ab31 | | | 179 | /** Return hashes present in this dag. */ |
| b69ab31 | | | 180 | present(set: SetLike): HashSet { |
| b69ab31 | | | 181 | const hashes = HashSet.fromHashes(set) |
| b69ab31 | | | 182 | .toHashes() |
| b69ab31 | | | 183 | .filter(h => this.has(h)); |
| b69ab31 | | | 184 | return HashSet.fromHashes(hashes); |
| b69ab31 | | | 185 | } |
| b69ab31 | | | 186 | |
| b69ab31 | | | 187 | /** |
| b69ab31 | | | 188 | * Return commits that match the given condition. |
| b69ab31 | | | 189 | * This can be useful for things like "obsolete()". |
| b69ab31 | | | 190 | * `set`, if not undefined, limits the search space. |
| b69ab31 | | | 191 | */ |
| b69ab31 | | | 192 | filter(predicate: (commit: Readonly<C>) => boolean, set?: SetLike): HashSet { |
| b69ab31 | | | 193 | let hashes: SetLike; |
| b69ab31 | | | 194 | if (set === undefined) { |
| b69ab31 | | | 195 | hashes = this.infoMap.filter((commit, _hash) => predicate(commit)).keys(); |
| b69ab31 | | | 196 | } else { |
| b69ab31 | | | 197 | hashes = HashSet.fromHashes(set) |
| b69ab31 | | | 198 | .toHashes() |
| b69ab31 | | | 199 | .filter(h => { |
| b69ab31 | | | 200 | const c = this.get(h); |
| b69ab31 | | | 201 | return c != undefined && predicate(c); |
| b69ab31 | | | 202 | }); |
| b69ab31 | | | 203 | } |
| b69ab31 | | | 204 | return HashSet.fromHashes(hashes); |
| b69ab31 | | | 205 | } |
| b69ab31 | | | 206 | |
| b69ab31 | | | 207 | /** |
| b69ab31 | | | 208 | * Topologically sort hashes from roots to heads. |
| b69ab31 | | | 209 | * |
| b69ab31 | | | 210 | * When there are multiple children (or roots), 'compare' breaks ties. |
| b69ab31 | | | 211 | * Attempt to avoid interleved branches by outputting a branch continuously |
| b69ab31 | | | 212 | * until completing the branch or hitting a merge. |
| b69ab31 | | | 213 | * |
| b69ab31 | | | 214 | * With `gap` set to `true`, there could be "gap" in `set`. For example, |
| b69ab31 | | | 215 | * with the graph x--y--z, the `set` can be ['x', 'z'] and sort will work |
| b69ab31 | | | 216 | * as expected, with the downside of being O(dag size). If you run sort |
| b69ab31 | | | 217 | * in a tight loop, consider setting `gap` to `false` for performance. |
| b69ab31 | | | 218 | */ |
| b69ab31 | | | 219 | sortAsc(set: SetLike, props?: SortProps<C>): Array<Hash> { |
| b69ab31 | | | 220 | const iter = this.sortImpl( |
| b69ab31 | | | 221 | set, |
| b69ab31 | | | 222 | props?.compare ?? ((a, b) => (a.hash < b.hash ? -1 : 1)), |
| b69ab31 | | | 223 | s => this.roots(s), |
| b69ab31 | | | 224 | h => this.childHashes(h), |
| b69ab31 | | | 225 | h => this.parents(h), |
| b69ab31 | | | 226 | props?.gap ?? true, |
| b69ab31 | | | 227 | ); |
| b69ab31 | | | 228 | return [...iter]; |
| b69ab31 | | | 229 | } |
| b69ab31 | | | 230 | |
| b69ab31 | | | 231 | /** |
| b69ab31 | | | 232 | * Topologically sort hashes from heads to roots. |
| b69ab31 | | | 233 | * |
| b69ab31 | | | 234 | * See `sortAsc` for details. |
| b69ab31 | | | 235 | */ |
| b69ab31 | | | 236 | sortDesc(set: SetLike, props?: SortProps<C>): Array<Hash> { |
| b69ab31 | | | 237 | const iter = this.sortImpl( |
| b69ab31 | | | 238 | set, |
| b69ab31 | | | 239 | props?.compare ?? ((a, b) => (a.hash < b.hash ? 1 : -1)), |
| b69ab31 | | | 240 | s => this.heads(s), |
| b69ab31 | | | 241 | h => List(this.parentHashes(h)), |
| b69ab31 | | | 242 | h => this.children(h), |
| b69ab31 | | | 243 | props?.gap ?? true, |
| b69ab31 | | | 244 | ); |
| b69ab31 | | | 245 | return [...iter]; |
| b69ab31 | | | 246 | } |
| b69ab31 | | | 247 | |
| b69ab31 | | | 248 | // DFS from 'start' to 'next' |
| b69ab31 | | | 249 | // not considering branch length. |
| b69ab31 | | | 250 | private *sortImpl( |
| b69ab31 | | | 251 | set: SetLike, |
| b69ab31 | | | 252 | compare: (a: C, b: C) => number, |
| b69ab31 | | | 253 | getStart: (set: HashSet) => HashSet, |
| b69ab31 | | | 254 | getNext: (hash: Hash) => List<Hash>, |
| b69ab31 | | | 255 | getPrev: (hash: Hash) => HashSet, |
| b69ab31 | | | 256 | gap: boolean, |
| b69ab31 | | | 257 | ): Generator<Hash> { |
| b69ab31 | | | 258 | const hashSet = this.present(set); |
| b69ab31 | | | 259 | // There might be "gaps" in hashSet. For example, set might be 'x' and 'z' |
| b69ab31 | | | 260 | // in a 'x--y--z' graph. We need to fill the gap ('y') to sort properly. |
| b69ab31 | | | 261 | // However, range(set, set) is O(dag.size) so we allow the callsite to |
| b69ab31 | | | 262 | // disable this feature with an option. |
| b69ab31 | | | 263 | const filledSet = gap ? this.range(hashSet, hashSet) : hashSet; |
| b69ab31 | | | 264 | const alreadyVisited = new Set<Hash>(); |
| b69ab31 | | | 265 | // We use concat and pop (not unshift) so the order is reversed. |
| b69ab31 | | | 266 | const compareHash = (a: Hash, b: Hash) => |
| b69ab31 | | | 267 | -compare(nullthrows(this.get(a)), nullthrows(this.get(b))); |
| b69ab31 | | | 268 | // The number of parents remaining to be visited. This ensures merges are not |
| b69ab31 | | | 269 | // outputted until all parents are outputted. |
| b69ab31 | | | 270 | const remaining = new Map<Hash, number>( |
| b69ab31 | | | 271 | filledSet.toSeq().map(h => [h, getPrev(h).intersect(filledSet).size]), |
| b69ab31 | | | 272 | ); |
| b69ab31 | | | 273 | // List is used as a dequeue. |
| b69ab31 | | | 274 | let toVisit = getStart(filledSet).toHashes().toList().sort(compareHash); |
| b69ab31 | | | 275 | while (true) { |
| b69ab31 | | | 276 | // pop front |
| b69ab31 | | | 277 | const next = toVisit.last(); |
| b69ab31 | | | 278 | if (next == null) { |
| b69ab31 | | | 279 | break; |
| b69ab31 | | | 280 | } |
| b69ab31 | | | 281 | toVisit = toVisit.pop(); |
| b69ab31 | | | 282 | if (alreadyVisited.has(next)) { |
| b69ab31 | | | 283 | continue; |
| b69ab31 | | | 284 | } |
| b69ab31 | | | 285 | // Need to wait for visiting other parents first? |
| b69ab31 | | | 286 | if ((remaining.get(next) ?? 0) > 0) { |
| b69ab31 | | | 287 | toVisit = toVisit.unshift(next); |
| b69ab31 | | | 288 | continue; |
| b69ab31 | | | 289 | } |
| b69ab31 | | | 290 | // Output it. |
| b69ab31 | | | 291 | if (hashSet.contains(next)) { |
| b69ab31 | | | 292 | yield next; |
| b69ab31 | | | 293 | } |
| b69ab31 | | | 294 | alreadyVisited.add(next); |
| b69ab31 | | | 295 | // Visit next. |
| b69ab31 | | | 296 | const children = getNext(next).sort(compareHash); |
| b69ab31 | | | 297 | for (const c of children) { |
| b69ab31 | | | 298 | remaining.set(c, (remaining.get(c) ?? 1) - 1); |
| b69ab31 | | | 299 | } |
| b69ab31 | | | 300 | toVisit = toVisit.concat(children); |
| b69ab31 | | | 301 | } |
| b69ab31 | | | 302 | } |
| b69ab31 | | | 303 | |
| b69ab31 | | | 304 | // Delegates |
| b69ab31 | | | 305 | |
| b69ab31 | | | 306 | get infoMap(): ImMap<Hash, Readonly<C>> { |
| b69ab31 | | | 307 | return this.inner.infoMap; |
| b69ab31 | | | 308 | } |
| b69ab31 | | | 309 | |
| b69ab31 | | | 310 | get childMap(): ImMap<Hash, List<Hash>> { |
| b69ab31 | | | 311 | return this.inner.childMap; |
| b69ab31 | | | 312 | } |
| b69ab31 | | | 313 | |
| b69ab31 | | | 314 | // Filters. |
| b69ab31 | | | 315 | |
| b69ab31 | | | 316 | merge(set?: SetLike): HashSet { |
| b69ab31 | | | 317 | return this.filter(c => c.parents.length > 1, set); |
| b69ab31 | | | 318 | } |
| b69ab31 | | | 319 | } |
| b69ab31 | | | 320 | |
| b69ab31 | | | 321 | const ancestorsCache = new LRU(1000); |
| b69ab31 | | | 322 | |
| b69ab31 | | | 323 | function flatMap(set: SetLike, f: (h: Hash) => List<Hash> | Readonly<Array<Hash>>): HashSet { |
| b69ab31 | | | 324 | return new HashSet( |
| b69ab31 | | | 325 | HashSet.fromHashes(set) |
| b69ab31 | | | 326 | .toHashes() |
| b69ab31 | | | 327 | .flatMap(h => f(h)), |
| b69ab31 | | | 328 | ); |
| b69ab31 | | | 329 | } |
| b69ab31 | | | 330 | |
| b69ab31 | | | 331 | /** set + flatMap(set, f) + flatMap(flatMap(set, f), f) + ... */ |
| b69ab31 | | | 332 | export function unionFlatMap( |
| b69ab31 | | | 333 | set: SetLike, |
| b69ab31 | | | 334 | f: (h: Hash) => List<Hash> | Readonly<Array<Hash>>, |
| b69ab31 | | | 335 | ): HashSet { |
| b69ab31 | | | 336 | let result = new HashSet().toHashes(); |
| b69ab31 | | | 337 | let newHashes = [...HashSet.fromHashes(set)]; |
| b69ab31 | | | 338 | while (newHashes.length > 0) { |
| b69ab31 | | | 339 | result = result.concat(newHashes); |
| b69ab31 | | | 340 | const nextNewHashes: Hash[] = []; |
| b69ab31 | | | 341 | newHashes.forEach(h => { |
| b69ab31 | | | 342 | f(h).forEach(v => { |
| b69ab31 | | | 343 | if (!result.contains(v)) { |
| b69ab31 | | | 344 | nextNewHashes.push(v); |
| b69ab31 | | | 345 | } |
| b69ab31 | | | 346 | }); |
| b69ab31 | | | 347 | }); |
| b69ab31 | | | 348 | newHashes = nextNewHashes; |
| b69ab31 | | | 349 | } |
| b69ab31 | | | 350 | return HashSet.fromHashes(result); |
| b69ab31 | | | 351 | } |
| b69ab31 | | | 352 | |
| b69ab31 | | | 353 | export type SortProps<C> = {compare?: (a: C, b: C) => number; gap?: boolean}; |
| b69ab31 | | | 354 | |
| b69ab31 | | | 355 | /** |
| b69ab31 | | | 356 | * If `set` is undefined, return a function that always returns true. |
| b69ab31 | | | 357 | * Otherwise, return a function that checks whether `set` contains `h`. |
| b69ab31 | | | 358 | */ |
| b69ab31 | | | 359 | function nullableWithinContains(set?: SetLike): (h: Hash) => boolean { |
| b69ab31 | | | 360 | if (set === undefined) { |
| b69ab31 | | | 361 | return _h => true; |
| b69ab31 | | | 362 | } else { |
| b69ab31 | | | 363 | const hashSet = HashSet.fromHashes(set); |
| b69ab31 | | | 364 | return h => hashSet.contains(h); |
| b69ab31 | | | 365 | } |
| b69ab31 | | | 366 | } |
| b69ab31 | | | 367 | |
| b69ab31 | | | 368 | /** Minimal fields needed to be used in commit graph structures. */ |
| b69ab31 | | | 369 | export interface HashWithParents { |
| b69ab31 | | | 370 | hash: Hash; |
| b69ab31 | | | 371 | parents: ReadonlyArray<Hash>; |
| b69ab31 | | | 372 | grandparents: ReadonlyArray<Hash>; |
| b69ab31 | | | 373 | } |
| b69ab31 | | | 374 | |
| b69ab31 | | | 375 | type BaseDagProps<C extends HashWithParents> = { |
| b69ab31 | | | 376 | infoMap: ImMap<Hash, Readonly<C>>; |
| b69ab31 | | | 377 | // childMap is derived from infoMap. |
| b69ab31 | | | 378 | childMap: ImMap<Hash, List<Hash>>; |
| b69ab31 | | | 379 | }; |
| b69ab31 | | | 380 | |
| b69ab31 | | | 381 | const BaseDagRecord = Record<BaseDagProps<HashWithParents>>({ |
| b69ab31 | | | 382 | infoMap: ImMap(), |
| b69ab31 | | | 383 | childMap: ImMap(), |
| b69ab31 | | | 384 | }); |
| b69ab31 | | | 385 | type BaseDagRecord<C extends HashWithParents> = RecordOf<BaseDagProps<C>>; |
| b69ab31 | | | 386 | |
| b69ab31 | | | 387 | const EMPTY_DAG_RECORD = BaseDagRecord(); |
| b69ab31 | | | 388 | const EMPTY_LIST = List<Hash>(); |