|
| 1 | +/** |
| 2 | + * @license |
| 3 | + * Copyright Google LLC All Rights Reserved. |
| 4 | + * |
| 5 | + * Use of this source code is governed by an MIT-style license that can be |
| 6 | + * found in the LICENSE file at https://angular.io/license |
| 7 | + */ |
| 8 | + |
| 9 | +import {TrackByFunction} from '../change_detection'; |
| 10 | + |
| 11 | +/** |
| 12 | + * A type representing the live collection to be reconciled with any new (incoming) collection. This |
| 13 | + * is an adapter class that makes it possible to work with different internal data structures, |
| 14 | + * regardless of the actual values of the incoming collection. |
| 15 | + */ |
| 16 | +export abstract class LiveCollection<T, V> { |
| 17 | + abstract get length(): number; |
| 18 | + abstract at(index: number): T; |
| 19 | + abstract key(index: number): unknown; |
| 20 | + abstract attach(index: number, item: T): void; |
| 21 | + abstract detach(index: number): T; |
| 22 | + abstract create(index: number, value: V): T; |
| 23 | + destroy(item: T): void { |
| 24 | + // noop by default |
| 25 | + } |
| 26 | + updateValue(index: number, value: V): void { |
| 27 | + // noop by default |
| 28 | + } |
| 29 | + |
| 30 | + // operations below could be implemented on top of the operations defined so far, but having |
| 31 | + // them explicitly allow clear expression of intent and potentially more performant |
| 32 | + // implementations |
| 33 | + swap(index1: number, index2: number): void { |
| 34 | + const startIdx = Math.min(index1, index2); |
| 35 | + const endIdx = Math.max(index1, index2); |
| 36 | + const endItem = this.detach(endIdx); |
| 37 | + if (endIdx - startIdx > 1) { |
| 38 | + const startItem = this.detach(startIdx); |
| 39 | + this.attach(startIdx, endItem); |
| 40 | + this.attach(endIdx, startItem); |
| 41 | + } else { |
| 42 | + this.attach(startIdx, endItem); |
| 43 | + } |
| 44 | + } |
| 45 | + move(prevIndex: number, newIdx: number): void { |
| 46 | + this.attach(newIdx, this.detach(prevIndex)); |
| 47 | + } |
| 48 | +} |
| 49 | + |
| 50 | +/** |
| 51 | + * The live collection reconciliation algorithm that perform various in-place operations, so it |
| 52 | + * reflects the content of the new (incoming) collection. |
| 53 | + * |
| 54 | + * The reconciliation algorithm has 2 code paths: |
| 55 | + * - "fast" path that don't require any memory allocation; |
| 56 | + * - "slow" path that requires additional memory allocation for intermediate data structures used to |
| 57 | + * collect additional information about the live collection. |
| 58 | + * It might happen that the algorithm switches between the two modes in question in a single |
| 59 | + * reconciliation path - generally it tries to stay on the "fast" path as much as possible. |
| 60 | + * |
| 61 | + * The overall complexity of the algorithm is O(n + m) for speed and O(n) for memory (where n is the |
| 62 | + * length of the live collection and m is the length of the incoming collection). Given the problem |
| 63 | + * at hand the complexity / performance constraints makes it impossible to perform the absolute |
| 64 | + * minimum of operation to reconcile the 2 collections. The algorithm makes different tradeoffs to |
| 65 | + * stay within reasonable performance bounds and may apply sub-optimal number of operations in |
| 66 | + * certain situations. |
| 67 | + * |
| 68 | + * @param liveCollection the current, live collection; |
| 69 | + * @param newCollection the new, incoming collection; |
| 70 | + * @param trackByFn key generation function that determines equality between items in the life and |
| 71 | + * incoming collection; |
| 72 | + */ |
| 73 | +export function reconcile<T, V>( |
| 74 | + liveCollection: LiveCollection<T, V>, newCollection: Iterable<V>|undefined|null, |
| 75 | + trackByFn: TrackByFunction<V>): void { |
| 76 | + let detachedItems: MultiMap<unknown, T>|undefined = undefined; |
| 77 | + let liveKeysInTheFuture: Set<unknown>|undefined = undefined; |
| 78 | + |
| 79 | + let liveStartIdx = 0; |
| 80 | + let liveEndIdx = liveCollection.length - 1; |
| 81 | + |
| 82 | + if (Array.isArray(newCollection)) { |
| 83 | + let newEndIdx = newCollection.length - 1; |
| 84 | + |
| 85 | + while (liveStartIdx <= liveEndIdx && liveStartIdx <= newEndIdx) { |
| 86 | + // compare from the beginning |
| 87 | + const liveStartKey = liveCollection.key(liveStartIdx); |
| 88 | + const newStartValue = newCollection[liveStartIdx]; |
| 89 | + const newStartKey = trackByFn(liveStartIdx, newStartValue); |
| 90 | + if (Object.is(liveStartKey, newStartKey)) { |
| 91 | + liveCollection.updateValue(liveStartIdx, newStartValue); |
| 92 | + liveStartIdx++; |
| 93 | + continue; |
| 94 | + } |
| 95 | + |
| 96 | + // compare from the end |
| 97 | + // TODO(perf): do _all_ the matching from the end |
| 98 | + const liveEndKey = liveCollection.key(liveEndIdx); |
| 99 | + const newEndItem = newCollection[newEndIdx]; |
| 100 | + const newEndKey = trackByFn(newEndIdx, newEndItem); |
| 101 | + if (Object.is(liveEndKey, newEndKey)) { |
| 102 | + liveCollection.updateValue(liveEndIdx, newEndItem); |
| 103 | + liveEndIdx--; |
| 104 | + newEndIdx--; |
| 105 | + continue; |
| 106 | + } |
| 107 | + |
| 108 | + // Detect swap / moves: |
| 109 | + if (Object.is(newStartKey, liveEndKey) && Object.is(newEndKey, liveStartKey)) { |
| 110 | + // swap on both ends; |
| 111 | + liveCollection.swap(liveStartIdx, liveEndIdx); |
| 112 | + liveCollection.updateValue(liveStartIdx, newStartValue); |
| 113 | + liveCollection.updateValue(liveEndIdx, newEndItem); |
| 114 | + newEndIdx--; |
| 115 | + liveStartIdx++; |
| 116 | + liveEndIdx--; |
| 117 | + continue; |
| 118 | + } else if (Object.is(newStartKey, liveEndKey)) { |
| 119 | + // the new item is the same as the live item with the end pointer - this is a move forward |
| 120 | + // to an earlier index; |
| 121 | + liveCollection.move(liveEndIdx, liveStartIdx); |
| 122 | + liveCollection.updateValue(liveStartIdx, newStartValue); |
| 123 | + liveStartIdx++; |
| 124 | + continue; |
| 125 | + } |
| 126 | + |
| 127 | + // Fallback to the slow path: we need to learn more about the content of the live and new |
| 128 | + // collections. |
| 129 | + detachedItems ??= new MultiMap(); |
| 130 | + liveKeysInTheFuture ??= initLiveItemsInTheFuture(liveCollection, liveStartIdx, liveEndIdx); |
| 131 | + |
| 132 | + // Check if I'm inserting a previously detached item: if so, attach it here |
| 133 | + if (attachPreviouslyDetached(liveCollection, detachedItems, liveStartIdx, newStartKey)) { |
| 134 | + liveCollection.updateValue(liveStartIdx, newStartValue); |
| 135 | + liveStartIdx++; |
| 136 | + liveEndIdx++; |
| 137 | + } else if (!liveKeysInTheFuture.has(newStartKey)) { |
| 138 | + // Check if we seen a new item that doesn't exist in the old collection and must be INSERTED |
| 139 | + const newItem = liveCollection.create(liveStartIdx, newCollection[liveStartIdx]); |
| 140 | + liveCollection.attach(liveStartIdx, newItem); |
| 141 | + liveStartIdx++; |
| 142 | + liveEndIdx++; |
| 143 | + } else { |
| 144 | + // We know that the new item exists later on in old collection but we don't know its index |
| 145 | + // and as the consequence can't move it (don't know where to find it). Detach the old item, |
| 146 | + // hoping that it unlocks the fast path again. |
| 147 | + detachedItems.set(liveStartKey, liveCollection.detach(liveStartIdx)); |
| 148 | + liveEndIdx--; |
| 149 | + } |
| 150 | + } |
| 151 | + |
| 152 | + // Final cleanup steps: |
| 153 | + // - more items in the new collection => insert |
| 154 | + while (liveStartIdx <= newEndIdx) { |
| 155 | + createOrAttach( |
| 156 | + liveCollection, detachedItems, trackByFn, liveStartIdx, newCollection[liveStartIdx]); |
| 157 | + liveStartIdx++; |
| 158 | + } |
| 159 | + |
| 160 | + } else if (newCollection != null) { |
| 161 | + // iterable - immediately fallback to the slow path |
| 162 | + const newCollectionIterator = newCollection[Symbol.iterator](); |
| 163 | + let newIterationResult = newCollectionIterator.next(); |
| 164 | + while (!newIterationResult.done && liveStartIdx <= liveEndIdx) { |
| 165 | + const newValue = newIterationResult.value; |
| 166 | + const newKey = trackByFn(liveStartIdx, newValue); |
| 167 | + const liveKey = liveCollection.key(liveStartIdx); |
| 168 | + if (Object.is(liveKey, newKey)) { |
| 169 | + // found a match - move on |
| 170 | + liveCollection.updateValue(liveStartIdx, newValue); |
| 171 | + liveStartIdx++; |
| 172 | + newIterationResult = newCollectionIterator.next(); |
| 173 | + } else { |
| 174 | + detachedItems ??= new MultiMap(); |
| 175 | + liveKeysInTheFuture ??= initLiveItemsInTheFuture(liveCollection, liveStartIdx, liveEndIdx); |
| 176 | + |
| 177 | + // Check if I'm inserting a previously detached item: if so, attach it here |
| 178 | + if (attachPreviouslyDetached(liveCollection, detachedItems, liveStartIdx, newKey)) { |
| 179 | + liveCollection.updateValue(liveStartIdx, newValue); |
| 180 | + liveStartIdx++; |
| 181 | + liveEndIdx++; |
| 182 | + newIterationResult = newCollectionIterator.next(); |
| 183 | + } else if (!liveKeysInTheFuture.has(newKey)) { |
| 184 | + liveCollection.attach(liveStartIdx, liveCollection.create(liveStartIdx, newValue)); |
| 185 | + liveStartIdx++; |
| 186 | + liveEndIdx++; |
| 187 | + newIterationResult = newCollectionIterator.next(); |
| 188 | + } else { |
| 189 | + // it is a move forward - detach the current item without advancing in collections |
| 190 | + detachedItems.set(liveKey, liveCollection.detach(liveStartIdx)); |
| 191 | + liveEndIdx--; |
| 192 | + } |
| 193 | + } |
| 194 | + } |
| 195 | + |
| 196 | + // this is a new item as we run out of the items in the old collection - create or attach a |
| 197 | + // previously detached one |
| 198 | + while (!newIterationResult.done) { |
| 199 | + createOrAttach( |
| 200 | + liveCollection, detachedItems, trackByFn, liveCollection.length, |
| 201 | + newIterationResult.value); |
| 202 | + newIterationResult = newCollectionIterator.next(); |
| 203 | + } |
| 204 | + } |
| 205 | + |
| 206 | + // Cleanups common to the array and iterable: |
| 207 | + // - more items in the live collection => delete starting from the end; |
| 208 | + while (liveStartIdx <= liveEndIdx) { |
| 209 | + liveCollection.destroy(liveCollection.detach(liveEndIdx--)); |
| 210 | + } |
| 211 | + |
| 212 | + // - destroy items that were detached but never attached again. |
| 213 | + detachedItems?.forEach(item => liveCollection.destroy(item)); |
| 214 | +} |
| 215 | + |
| 216 | +function attachPreviouslyDetached<T, V>( |
| 217 | + prevCollection: LiveCollection<T, V>, detachedItems: MultiMap<unknown, T>|undefined, |
| 218 | + index: number, key: unknown): boolean { |
| 219 | + if (detachedItems !== undefined && detachedItems.has(key)) { |
| 220 | + prevCollection.attach(index, detachedItems.get(key)!); |
| 221 | + detachedItems.delete(key); |
| 222 | + return true; |
| 223 | + } |
| 224 | + return false; |
| 225 | +} |
| 226 | + |
| 227 | +function createOrAttach<T, V>( |
| 228 | + liveCollection: LiveCollection<T, V>, detachedItems: MultiMap<unknown, T>|undefined, |
| 229 | + trackByFn: TrackByFunction<unknown>, index: number, value: V) { |
| 230 | + if (!attachPreviouslyDetached(liveCollection, detachedItems, index, trackByFn(index, value))) { |
| 231 | + const newItem = liveCollection.create(index, value); |
| 232 | + liveCollection.attach(index, newItem); |
| 233 | + } else { |
| 234 | + liveCollection.updateValue(index, value); |
| 235 | + } |
| 236 | +} |
| 237 | + |
| 238 | +function initLiveItemsInTheFuture<T>( |
| 239 | + liveCollection: LiveCollection<unknown, unknown>, start: number, end: number): Set<unknown> { |
| 240 | + const keys = new Set(); |
| 241 | + for (let i = start; i <= end; i++) { |
| 242 | + keys.add(liveCollection.key(i)); |
| 243 | + } |
| 244 | + return keys; |
| 245 | +} |
| 246 | + |
| 247 | +class MultiMap<K, V> { |
| 248 | + private map = new Map<K, Array<V>>(); |
| 249 | + |
| 250 | + has(key: K): boolean { |
| 251 | + const listOfKeys = this.map.get(key); |
| 252 | + return listOfKeys !== undefined && listOfKeys.length > 0; |
| 253 | + } |
| 254 | + |
| 255 | + delete(key: K): boolean { |
| 256 | + const listOfKeys = this.map.get(key); |
| 257 | + if (listOfKeys !== undefined) { |
| 258 | + // THINK: pop from the end or shift from the front? "Correct" vs. "slow". |
| 259 | + listOfKeys.pop(); |
| 260 | + return true; |
| 261 | + } |
| 262 | + return false; |
| 263 | + } |
| 264 | + |
| 265 | + get(key: K): V|undefined { |
| 266 | + const listOfKeys = this.map.get(key); |
| 267 | + return listOfKeys !== undefined && listOfKeys.length > 0 ? listOfKeys[0] : undefined; |
| 268 | + } |
| 269 | + |
| 270 | + set(key: K, value: V): void { |
| 271 | + // if value is array, they we always store it as [value]. |
| 272 | + if (!this.map.has(key)) { |
| 273 | + this.map.set(key, [value]); |
| 274 | + return; |
| 275 | + } |
| 276 | + // THINK: this allows duplicate values, but I guess this is fine? |
| 277 | + // Is the existing key an array or not? |
| 278 | + this.map.get(key)?.push(value); |
| 279 | + } |
| 280 | + |
| 281 | + forEach(cb: (v: V, k: K) => void) { |
| 282 | + for (const [key, values] of this.map) { |
| 283 | + for (const value of values) { |
| 284 | + cb(value, key); |
| 285 | + } |
| 286 | + } |
| 287 | + } |
| 288 | +} |
0 commit comments