deno.land / x / replicache@v10.0.0-beta.0 / btree / read.ts
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303import {deepEqual, getSizeOfValue, ReadonlyJSONValue} from '../json';import type * as dag from '../dag/mod';import {Hash, emptyHash} from '../hash';import { DataNodeImpl, InternalNodeImpl, Entry, emptyDataNodeImpl, assertBTreeNode, newNodeImpl, findLeaf, binarySearch, DiffResult, DiffResultOp, ReadonlyEntry, NODE_LEVEL, NODE_ENTRIES, isInternalNode, DataNode, InternalNode,} from './node';import { computeSplices, SPLICE_ADDED, SPLICE_AT, SPLICE_FROM, SPLICE_REMOVED,} from './splice';
/** * The size of the header of a node. (If we had compile time * constants we would have used that). * * There is a test ensuring this is correct. */export const NODE_HEADER_SIZE = 11;
export class BTreeRead implements AsyncIterable<ReadonlyEntry<ReadonlyJSONValue>>{ rootHash: Hash; protected readonly _dagRead: dag.Read; private readonly _cache: Map<Hash, DataNodeImpl | InternalNodeImpl> = new Map();
readonly getEntrySize: <T>(e: Entry<T>) => number; readonly chunkHeaderSize: number;
constructor( dagRead: dag.Read, root: Hash = emptyHash, getEntrySize: <T>(e: Entry<T>) => number = getSizeOfValue, chunkHeaderSize = NODE_HEADER_SIZE, ) { this.rootHash = root; this._dagRead = dagRead; this.getEntrySize = getEntrySize; this.chunkHeaderSize = chunkHeaderSize; }
async getNode(hash: Hash): Promise<DataNodeImpl | InternalNodeImpl> { if (hash === emptyHash) { return emptyDataNodeImpl; }
const cached = this._cache.get(hash); if (cached) { return cached; }
const {data} = await this._dagRead.mustGetChunk(hash); assertBTreeNode(data); const impl = newNodeImpl( // We enforce that we do not mutate this at runtime by first checking the // hash. data[NODE_ENTRIES] as Entry<ReadonlyJSONValue>[], hash, data[NODE_LEVEL], false, ); this._cache.set(hash, impl); return impl; }
async get(key: string): Promise<ReadonlyJSONValue | undefined> { const leaf = await findLeaf(key, this.rootHash, this); const index = binarySearch(key, leaf.entries); if (index < 0) { return undefined; } return leaf.entries[index][1]; }
async has(key: string): Promise<boolean> { const leaf = await findLeaf(key, this.rootHash, this); return binarySearch(key, leaf.entries) >= 0; }
async isEmpty(): Promise<boolean> { const node = await this.getNode(this.rootHash); return node.entries.length === 0; }
// We don't do any encoding of the key in the map, so we have no way of // determining from an entry.key alone whether it is a regular key or an // encoded IndexKey in an index map. Without encoding regular map keys the // caller has to deal with encoding and decoding the keys for the index map. scan( fromKey: string, ): AsyncIterableIterator<ReadonlyEntry<ReadonlyJSONValue>> { return scanForHash(this.rootHash, fromKey, async (hash: Hash) => { const cached = await this.getNode(hash); if (cached) { return cached.toChunkData(); } const {data} = await this._dagRead.mustGetChunk(hash); assertBTreeNode(data); return data; }); }
async *keys(): AsyncIterableIterator<string> { const node = await this.getNode(this.rootHash); yield* node.keys(this); }
async *entries(): AsyncIterableIterator<ReadonlyEntry<ReadonlyJSONValue>> { const node = await this.getNode(this.rootHash); yield* node.entriesIter(this); }
[Symbol.asyncIterator](): AsyncIterableIterator< ReadonlyEntry<ReadonlyJSONValue> > { return this.entries(); }
async *diff( last: BTreeRead, ): AsyncIterableIterator<DiffResult<ReadonlyJSONValue>> { const [currentNode, lastNode] = await Promise.all([ this.getNode(this.rootHash), last.getNode(last.rootHash), ]); yield* diffNodes(lastNode, currentNode, last, this); }
async *diffKeys(last: BTreeRead): AsyncIterableIterator<string> { for await (const {key} of this.diff(last)) { yield key; } }}
async function* diffNodes( last: InternalNodeImpl | DataNodeImpl, current: InternalNodeImpl | DataNodeImpl, lastTree: BTreeRead, currentTree: BTreeRead,): AsyncIterableIterator<DiffResult<ReadonlyJSONValue>> { if (last.level > current.level) { // merge all of last's children into a new node // We know last is an internal node because level > 0. const lastChild = (await (last as InternalNodeImpl).getCompositeChildren( 0, last.entries.length, lastTree, )) as InternalNodeImpl; yield* diffNodes(lastChild, current, lastTree, currentTree); return; }
if (current.level > last.level) { // We know current is an internal node because level > 0. const currentChild = (await ( current as InternalNodeImpl ).getCompositeChildren( 0, current.entries.length, currentTree, )) as InternalNodeImpl; yield* diffNodes(last, currentChild, lastTree, currentTree); return; }
if (last.level === 0 && current.level === 0) { yield* diffEntries(last.entries, current.entries); return; }
// Now we have two internal nodes with the same level. We compute the diff as // splices for the internal node entries. We then flatten these and call diff // recursively. const initialSplices = computeSplices(last.entries, current.entries); for (const splice of initialSplices) { const [lastChild, currentChild] = await Promise.all([ (last as InternalNodeImpl).getCompositeChildren( splice[SPLICE_AT], splice[SPLICE_REMOVED], lastTree, ), (current as InternalNodeImpl).getCompositeChildren( splice[SPLICE_FROM], splice[SPLICE_ADDED], currentTree, ), ]); yield* diffNodes(lastChild, currentChild, lastTree, currentTree); }}
function* diffEntries<T>( lastEntries: ReadonlyArray<ReadonlyEntry<T>>, currentEntries: ReadonlyArray<ReadonlyEntry<T>>,): IterableIterator<DiffResult<ReadonlyJSONValue>> { const lastLength = lastEntries.length; const currentLength = currentEntries.length; let i = 0; let j = 0; while (i < lastLength && j < currentLength) { const lastKey = lastEntries[i][0]; const currentKey = currentEntries[j][0]; if (lastKey === currentKey) { if (!deepEqual(lastEntries[i][1], currentEntries[j][1])) { yield { op: DiffResultOp.Change, key: lastKey, oldValue: lastEntries[i][1], newValue: currentEntries[j][1], }; } i++; j++; } else if (lastKey < currentKey) { yield { op: DiffResultOp.Delete, key: lastKey, oldValue: lastEntries[i][1], }; i++; } else { yield { op: DiffResultOp.Add, key: currentKey, newValue: currentEntries[j][1], }; j++; } } for (; i < lastLength; i++) { yield { op: DiffResultOp.Delete, key: lastEntries[i][0], oldValue: lastEntries[i][1], }; } for (; j < currentLength; j++) { yield { op: DiffResultOp.Add, key: currentEntries[j][0], newValue: currentEntries[j][1], }; }}
type ReadNode = (hash: Hash) => Promise<InternalNode | DataNode>;
export async function* scanForHash( hash: Hash, fromKey: string, readNode: ReadNode,): AsyncIterableIterator<ReadonlyEntry<ReadonlyJSONValue>> { if (hash === emptyHash) { return; }
const data = await readNode(hash); assertBTreeNode(data); const entries = data[NODE_ENTRIES]; let i = 0; if (fromKey) { i = binarySearch(fromKey, entries); if (i < 0) { i = ~i; } }
if (isInternalNode(data)) { for (; i < entries.length; i++) { yield* scanForHash( (entries[i] as ReadonlyEntry<Hash>)[1], fromKey, readNode, ); fromKey = ''; } } else { for (; i < entries.length; i++) { yield entries[i]; } }}
Version Info