deno.land / x / replicache@v10.0.0-beta.0 / btree / read.ts

نووسراو ببینە
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
import {deepEqual, getSizeOfValue, ReadonlyJSONValue} from '../json';import type * as dag from '../dag/mod';import {Hash, emptyHash} from '../hash';import { DataNodeImpl, InternalNodeImpl, Entry, emptyDataNodeImpl, assertBTreeNode, newNodeImpl, findLeaf, binarySearch, DiffResult, DiffResultOp, ReadonlyEntry, NODE_LEVEL, NODE_ENTRIES, isInternalNode, DataNode, InternalNode,} from './node';import { computeSplices, SPLICE_ADDED, SPLICE_AT, SPLICE_FROM, SPLICE_REMOVED,} from './splice';
/** * The size of the header of a node. (If we had compile time * constants we would have used that). * * There is a test ensuring this is correct. */export const NODE_HEADER_SIZE = 11;
export class BTreeRead implements AsyncIterable<ReadonlyEntry<ReadonlyJSONValue>>{ rootHash: Hash; protected readonly _dagRead: dag.Read; private readonly _cache: Map<Hash, DataNodeImpl | InternalNodeImpl> = new Map();
readonly getEntrySize: <T>(e: Entry<T>) => number; readonly chunkHeaderSize: number;
constructor( dagRead: dag.Read, root: Hash = emptyHash, getEntrySize: <T>(e: Entry<T>) => number = getSizeOfValue, chunkHeaderSize = NODE_HEADER_SIZE, ) { this.rootHash = root; this._dagRead = dagRead; this.getEntrySize = getEntrySize; this.chunkHeaderSize = chunkHeaderSize; }
async getNode(hash: Hash): Promise<DataNodeImpl | InternalNodeImpl> { if (hash === emptyHash) { return emptyDataNodeImpl; }
const cached = this._cache.get(hash); if (cached) { return cached; }
const {data} = await this._dagRead.mustGetChunk(hash); assertBTreeNode(data); const impl = newNodeImpl( // We enforce that we do not mutate this at runtime by first checking the // hash. data[NODE_ENTRIES] as Entry<ReadonlyJSONValue>[], hash, data[NODE_LEVEL], false, ); this._cache.set(hash, impl); return impl; }
async get(key: string): Promise<ReadonlyJSONValue | undefined> { const leaf = await findLeaf(key, this.rootHash, this); const index = binarySearch(key, leaf.entries); if (index < 0) { return undefined; } return leaf.entries[index][1]; }
async has(key: string): Promise<boolean> { const leaf = await findLeaf(key, this.rootHash, this); return binarySearch(key, leaf.entries) >= 0; }
async isEmpty(): Promise<boolean> { const node = await this.getNode(this.rootHash); return node.entries.length === 0; }
// We don't do any encoding of the key in the map, so we have no way of // determining from an entry.key alone whether it is a regular key or an // encoded IndexKey in an index map. Without encoding regular map keys the // caller has to deal with encoding and decoding the keys for the index map. scan( fromKey: string, ): AsyncIterableIterator<ReadonlyEntry<ReadonlyJSONValue>> { return scanForHash(this.rootHash, fromKey, async (hash: Hash) => { const cached = await this.getNode(hash); if (cached) { return cached.toChunkData(); } const {data} = await this._dagRead.mustGetChunk(hash); assertBTreeNode(data); return data; }); }
async *keys(): AsyncIterableIterator<string> { const node = await this.getNode(this.rootHash); yield* node.keys(this); }
async *entries(): AsyncIterableIterator<ReadonlyEntry<ReadonlyJSONValue>> { const node = await this.getNode(this.rootHash); yield* node.entriesIter(this); }
[Symbol.asyncIterator](): AsyncIterableIterator< ReadonlyEntry<ReadonlyJSONValue> > { return this.entries(); }
async *diff( last: BTreeRead, ): AsyncIterableIterator<DiffResult<ReadonlyJSONValue>> { const [currentNode, lastNode] = await Promise.all([ this.getNode(this.rootHash), last.getNode(last.rootHash), ]); yield* diffNodes(lastNode, currentNode, last, this); }
async *diffKeys(last: BTreeRead): AsyncIterableIterator<string> { for await (const {key} of this.diff(last)) { yield key; } }}
async function* diffNodes( last: InternalNodeImpl | DataNodeImpl, current: InternalNodeImpl | DataNodeImpl, lastTree: BTreeRead, currentTree: BTreeRead,): AsyncIterableIterator<DiffResult<ReadonlyJSONValue>> { if (last.level > current.level) { // merge all of last's children into a new node // We know last is an internal node because level > 0. const lastChild = (await (last as InternalNodeImpl).getCompositeChildren( 0, last.entries.length, lastTree, )) as InternalNodeImpl; yield* diffNodes(lastChild, current, lastTree, currentTree); return; }
if (current.level > last.level) { // We know current is an internal node because level > 0. const currentChild = (await ( current as InternalNodeImpl ).getCompositeChildren( 0, current.entries.length, currentTree, )) as InternalNodeImpl; yield* diffNodes(last, currentChild, lastTree, currentTree); return; }
if (last.level === 0 && current.level === 0) { yield* diffEntries(last.entries, current.entries); return; }
// Now we have two internal nodes with the same level. We compute the diff as // splices for the internal node entries. We then flatten these and call diff // recursively. const initialSplices = computeSplices(last.entries, current.entries); for (const splice of initialSplices) { const [lastChild, currentChild] = await Promise.all([ (last as InternalNodeImpl).getCompositeChildren( splice[SPLICE_AT], splice[SPLICE_REMOVED], lastTree, ), (current as InternalNodeImpl).getCompositeChildren( splice[SPLICE_FROM], splice[SPLICE_ADDED], currentTree, ), ]); yield* diffNodes(lastChild, currentChild, lastTree, currentTree); }}
function* diffEntries<T>( lastEntries: ReadonlyArray<ReadonlyEntry<T>>, currentEntries: ReadonlyArray<ReadonlyEntry<T>>,): IterableIterator<DiffResult<ReadonlyJSONValue>> { const lastLength = lastEntries.length; const currentLength = currentEntries.length; let i = 0; let j = 0; while (i < lastLength && j < currentLength) { const lastKey = lastEntries[i][0]; const currentKey = currentEntries[j][0]; if (lastKey === currentKey) { if (!deepEqual(lastEntries[i][1], currentEntries[j][1])) { yield { op: DiffResultOp.Change, key: lastKey, oldValue: lastEntries[i][1], newValue: currentEntries[j][1], }; } i++; j++; } else if (lastKey < currentKey) { yield { op: DiffResultOp.Delete, key: lastKey, oldValue: lastEntries[i][1], }; i++; } else { yield { op: DiffResultOp.Add, key: currentKey, newValue: currentEntries[j][1], }; j++; } } for (; i < lastLength; i++) { yield { op: DiffResultOp.Delete, key: lastEntries[i][0], oldValue: lastEntries[i][1], }; } for (; j < currentLength; j++) { yield { op: DiffResultOp.Add, key: currentEntries[j][0], newValue: currentEntries[j][1], }; }}
type ReadNode = (hash: Hash) => Promise<InternalNode | DataNode>;
export async function* scanForHash( hash: Hash, fromKey: string, readNode: ReadNode,): AsyncIterableIterator<ReadonlyEntry<ReadonlyJSONValue>> { if (hash === emptyHash) { return; }
const data = await readNode(hash); assertBTreeNode(data); const entries = data[NODE_ENTRIES]; let i = 0; if (fromKey) { i = binarySearch(fromKey, entries); if (i < 0) { i = ~i; } }
if (isInternalNode(data)) { for (; i < entries.length; i++) { yield* scanForHash( (entries[i] as ReadonlyEntry<Hash>)[1], fromKey, readNode, ); fromKey = ''; } } else { for (; i < entries.length; i++) { yield entries[i]; } }}
replicache

Version Info

Tagged at
2 years ago