FRE-600: Fix code review blockers

- Consolidated duplicate UndoManagers to single instance
- Fixed connection promise to only resolve on 'connected' status
- Fixed WebSocketProvider import (WebsocketProvider)
- Added proper doc.destroy() cleanup
- Renamed isPresenceInitialized property to avoid conflict

Co-Authored-By: Paperclip <noreply@paperclip.ing>
This commit is contained in:
2026-04-25 00:08:01 -04:00
parent 65b552bb08
commit 7c684a42cc
48450 changed files with 5679671 additions and 383 deletions

121
node_modules/y-leveldb/dist/src/y-leveldb.d.ts generated vendored Normal file
View File

@@ -0,0 +1,121 @@
/// <reference types="node" />
export const PREFERRED_TRIM_SIZE: 500;
export function writeUint32BigEndian(encoder: encoding.Encoder, num: number): void;
export function readUint32BigEndian(decoder: decoding.Decoder): number;
export const keyEncoding: {
buffer: boolean;
type: string;
encode: (arr: Array<string | number>) => Buffer;
decode: (buf: Uint8Array) => (string | number)[];
};
export function getLevelBulkData(db: any, opts: object): Promise<Array<any>>;
export function getLevelUpdates(db: any, docName: string, opts?: any): Promise<Array<Buffer>>;
export function getAllDocs(db: any, values: boolean, keys: boolean): Promise<Array<any>>;
export function getCurrentUpdateClock(db: any, docName: string): Promise<number>;
export class LeveldbPersistence {
/**
* @param {string} location
* @param {object} [opts]
* @param {any} [opts.level] Level-compatible adapter. E.g. leveldown, level-rem, level-indexeddb. Defaults to `level`
* @param {object} [opts.levelOptions] Options that are passed down to the level instance
*/
constructor(location: string, { level, levelOptions }?: {
level?: any;
levelOptions?: object;
} | undefined);
tr: Promise<any>;
/**
* Execute an transaction on a database. This will ensure that other processes are currently not writing.
*
* This is a private method and might change in the future.
*
* @todo only transact on the same room-name. Allow for concurrency of different rooms.
*
* @template T
*
* @param {function(any):Promise<T>} f A transaction that receives the db object
* @return {Promise<T>}
*/
_transact: <T>(f: (arg0: any) => Promise<T>) => Promise<T>;
/**
* @param {string} docName
*/
flushDocument(docName: string): Promise<void>;
/**
* @param {string} docName
* @return {Promise<Y.Doc>}
*/
getYDoc(docName: string): Promise<Y.Doc>;
/**
* @param {string} docName
* @return {Promise<Uint8Array>}
*/
getStateVector(docName: string): Promise<Uint8Array>;
/**
* @param {string} docName
* @param {Uint8Array} update
* @return {Promise<number>} Returns the clock of the stored update
*/
storeUpdate(docName: string, update: Uint8Array): Promise<number>;
/**
* @param {string} docName
* @param {Uint8Array} stateVector
*/
getDiff(docName: string, stateVector: Uint8Array): Promise<Uint8Array>;
/**
* @param {string} docName
* @return {Promise<void>}
*/
clearDocument(docName: string): Promise<void>;
/**
* @param {string} docName
* @param {string} metaKey
* @param {any} value
* @return {Promise<void>}
*/
setMeta(docName: string, metaKey: string, value: any): Promise<void>;
/**
* @param {string} docName
* @param {string} metaKey
* @return {Promise<any>}
*/
delMeta(docName: string, metaKey: string): Promise<any>;
/**
* @param {string} docName
* @param {string} metaKey
* @return {Promise<any>}
*/
getMeta(docName: string, metaKey: string): Promise<any>;
/**
* @return {Promise<Array<string>>}
*/
getAllDocNames(): Promise<Array<string>>;
/**
* @return {Promise<Array<{ name: string, sv: Uint8Array, clock: number }>>}
*/
getAllDocStateVectors(): Promise<Array<{
name: string;
sv: Uint8Array;
clock: number;
}>>;
/**
* @param {string} docName
* @return {Promise<Map<string, any>>}
*/
getMetas(docName: string): Promise<Map<string, any>>;
/**
* Close connection to a leveldb database and discard all state and bindings
*
* @return {Promise<void>}
*/
destroy(): Promise<void>;
/**
* Delete all data in database.
*/
clearAll(): Promise<any>;
}
import * as encoding from "lib0/encoding";
import * as decoding from "lib0/decoding";
import { Buffer } from "buffer";
import * as Y from "yjs";
//# sourceMappingURL=y-leveldb.d.ts.map

1
node_modules/y-leveldb/dist/src/y-leveldb.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"y-leveldb.d.ts","sourceRoot":"","sources":["../../src/y-leveldb.js"],"names":[],"mappings":";AAUA,sCAAsC;AAoB/B,8CAHI,QAAQ,CAAC,OAAO,OAChB,MAAM,QAMhB;AAYM,6CAHI,QAAQ,CAAC,OAAO,GACf,MAAM,CAUjB;AAED;;;kBAIsB,KAAK,CAAC,MAAM,GAAC,MAAM,CAAC;kBAgBpB,UAAU;EAe/B;AA4CM,qCAJI,GAAG,QACH,MAAM,GACL,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAc7B;AAUK,oCALI,GAAG,WACH,MAAM,SACN,GAAG,GACF,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAMhC;AAUK,+BALI,GAAG,UACH,OAAO,QACP,OAAO,GACN,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAO7B;AAOK,0CAJI,GAAG,WACH,MAAM,GACL,OAAO,CAAC,MAAM,CAAC,CAQzB;AA8JF;IACE;;;;;OAKG;IACH,sBALW,MAAM;gBAEN,GAAG;uBACH,MAAM;mBAgChB;IA5BC,iBAA2B;IAC3B;;;;;;;;;;;OAWG;IACH,yBAHoB,GAAG,+BAiBtB;IAGH;;OAEG;IACH,uBAFW,MAAM,iBAQhB;IAED;;;OAGG;IACH,iBAHW,MAAM,GACL,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAgBzB;IAED;;;OAGG;IACH,wBAHW,MAAM,GACL,OAAO,CAAC,UAAU,CAAC,CAmB9B;IAED;;;;OAIG;IACH,qBAJW,MAAM,UACN,UAAU,GACT,OAAO,CAAC,MAAM,CAAC,CAI1B;IAED;;;OAGG;IACH,iBAHW,MAAM,eACN,UAAU,uBAKpB;IAED;;;OAGG;IACH,uBAHW,MAAM,GACL,OAAO,CAAC,IAAI,CAAC,CAOxB;IAED;;;;;OAKG;IACH,iBALW,MAAM,WACN,MAAM,SACN,GAAG,GACF,OAAO,CAAC,IAAI,CAAC,CAIxB;IAED;;;;OAIG;IACH,iBAJW,MAAM,WACN,MAAM,GACL,OAAO,CAAC,GAAG,CAAC,CAIvB;IAED;;;;OAIG;IACH,iBAJW,MAAM,WACN,MAAM,GACL,OAAO,CAAC,GAAG,CAAC,CAUvB;IAED;;OAEG;IACH,kBAFY,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAOjC;IAED;;OAEG;IACH,yBAFY,OAAO,CAAC,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,EAAE,EAAE,UAAU,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC,CAU1E;IAED;;;OAGG;IACH,kBAHW,MAAM,GACL,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,CAcpC;IAED;;;;OAIG;IACH,WAFY,OAAO,CAAC,IAAI,CAAC,CAIxB;IAED;;OAEG;IACH,yBAEC;CACF"}

861
node_modules/y-leveldb/dist/test.cjs generated vendored Normal file
View File

@@ -0,0 +1,861 @@
'use strict';
var Y = require('yjs');
var encoding = require('lib0/dist/encoding.cjs');
var decoding = require('lib0/dist/decoding.cjs');
var binary = require('lib0/dist/binary.cjs');
var promise = require('lib0/dist/promise.cjs');
var buffer = require('lib0/dist/buffer.cjs');
var defaultLevel = require('level');
var buffer$1 = require('buffer');
var t = require('lib0/dist/testing.cjs');
var environment_js = require('lib0/dist/environment.cjs');
var log = require('lib0/dist/logging.cjs');
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
function _interopNamespace(e) {
if (e && e.__esModule) return e;
var n = Object.create(null);
if (e) {
Object.keys(e).forEach(function (k) {
if (k !== 'default') {
var d = Object.getOwnPropertyDescriptor(e, k);
Object.defineProperty(n, k, d.get ? d : {
enumerable: true,
get: function () { return e[k]; }
});
}
});
}
n["default"] = e;
return Object.freeze(n);
}
var Y__namespace = /*#__PURE__*/_interopNamespace(Y);
var encoding__namespace = /*#__PURE__*/_interopNamespace(encoding);
var decoding__namespace = /*#__PURE__*/_interopNamespace(decoding);
var binary__namespace = /*#__PURE__*/_interopNamespace(binary);
var promise__namespace = /*#__PURE__*/_interopNamespace(promise);
var buffer__namespace = /*#__PURE__*/_interopNamespace(buffer);
var defaultLevel__default = /*#__PURE__*/_interopDefaultLegacy(defaultLevel);
var t__namespace = /*#__PURE__*/_interopNamespace(t);
var log__namespace = /*#__PURE__*/_interopNamespace(log);
const PREFERRED_TRIM_SIZE = 500;
const YEncodingString = 0;
const YEncodingUint32 = 1;
const valueEncoding = {
buffer: true,
type: 'y-value',
encode: /** @param {any} data */ data => data,
decode: /** @param {any} data */ data => data
};
/**
* Write two bytes as an unsigned integer in big endian order.
* (most significant byte first)
*
* @function
* @param {encoding.Encoder} encoder
* @param {number} num The number that is to be encoded.
*/
const writeUint32BigEndian = (encoder, num) => {
for (let i = 3; i >= 0; i--) {
encoding__namespace.write(encoder, (num >>> (8 * i)) & binary__namespace.BITS8);
}
};
/**
* Read 4 bytes as unsigned integer in big endian order.
* (most significant byte first)
*
* @todo use lib0/decoding instead
*
* @function
* @param {decoding.Decoder} decoder
* @return {number} An unsigned integer.
*/
const readUint32BigEndian = decoder => {
const uint =
(decoder.arr[decoder.pos + 3] +
(decoder.arr[decoder.pos + 2] << 8) +
(decoder.arr[decoder.pos + 1] << 16) +
(decoder.arr[decoder.pos] << 24)) >>> 0;
decoder.pos += 4;
return uint
};
const keyEncoding = {
buffer: true,
type: 'y-keys',
/* istanbul ignore next */
encode: /** @param {Array<string|number>} arr */ arr => {
const encoder = encoding__namespace.createEncoder();
for (let i = 0; i < arr.length; i++) {
const v = arr[i];
if (typeof v === 'string') {
encoding__namespace.writeUint8(encoder, YEncodingString);
encoding__namespace.writeVarString(encoder, v);
} else /* istanbul ignore else */ if (typeof v === 'number') {
encoding__namespace.writeUint8(encoder, YEncodingUint32);
writeUint32BigEndian(encoder, v);
} else {
throw new Error('Unexpected key value')
}
}
return buffer$1.Buffer.from(encoding__namespace.toUint8Array(encoder))
},
decode: /** @param {Uint8Array} buf */ buf => {
const decoder = decoding__namespace.createDecoder(buf);
const key = [];
while (decoding__namespace.hasContent(decoder)) {
switch (decoding__namespace.readUint8(decoder)) {
case YEncodingString:
key.push(decoding__namespace.readVarString(decoder));
break
case YEncodingUint32:
key.push(readUint32BigEndian(decoder));
break
}
}
return key
}
};
/**
* level returns an error if a value is not found.
*
* This helper method for level returns `null` instead if the key is not found.
*
* @param {any} db
* @param {any} key
*/
const levelGet = async (db, key) => {
let res;
try {
res = await db.get(key);
} catch (err) {
/* istanbul ignore else */
if (err.notFound) {
return null
} else {
throw err
}
}
return res
};
/**
* Level expects a Buffer, but in Yjs we typically work with Uint8Arrays.
*
* Since Level thinks that these are two entirely different things,
* we transform the Uint8array to a Buffer before storing it.
*
* @param {any} db
* @param {any} key
* @param {Uint8Array} val
*/
const levelPut = async (db, key, val) => db.put(key, buffer$1.Buffer.from(val));
/**
* A "bulkier" implementation of level streams. Returns the result in one flush.
*
* @param {any} db
* @param {object} opts
* @return {Promise<Array<any>>}
*/
const getLevelBulkData = (db, opts) => promise__namespace.create((resolve, reject) => {
/**
* @type {Array<any>} result
*/
const result = [];
db.createReadStream(
opts
).on('data', /** @param {any} data */ data =>
result.push(data)
).on('end', () =>
resolve(result)
).on('error', reject);
});
/**
* Get all document updates for a specific document.
*
* @param {any} db
* @param {string} docName
* @param {any} [opts]
* @return {Promise<Array<Buffer>>}
*/
const getLevelUpdates = (db, docName, opts = { values: true, keys: false }) => getLevelBulkData(db, {
gte: createDocumentUpdateKey(docName, 0),
lt: createDocumentUpdateKey(docName, binary__namespace.BITS32),
...opts
});
/**
* Get all document updates for a specific document.
*
* @param {any} db
* @param {boolean} values
* @param {boolean} keys
* @return {Promise<Array<any>>}
*/
const getAllDocs = (db, values, keys) => getLevelBulkData(db, {
gte: ['v1_sv'],
lt: ['v1_sw'],
keys,
values
});
/**
* @param {any} db
* @param {string} docName
* @return {Promise<number>} Returns -1 if this document doesn't exist yet
*/
const getCurrentUpdateClock = (db, docName) => getLevelUpdates(db, docName, { keys: true, values: false, reverse: true, limit: 1 }).then(keys => {
if (keys.length === 0) {
return -1
} else {
return keys[0][3]
}
});
/**
* @param {any} db
* @param {Array<string|number>} gte Greater than or equal
* @param {Array<string|number>} lt lower than (not equal)
* @return {Promise<void>}
*/
const clearRange = async (db, gte, lt) => {
/* istanbul ignore else */
if (db.supports.clear) {
await db.clear({ gte, lt });
} else {
const keys = await getLevelBulkData(db, { values: false, keys: true, gte, lt });
const ops = keys.map(key => ({ type: 'del', key }));
await db.batch(ops);
}
};
/**
* @param {any} db
* @param {string} docName
* @param {number} from Greater than or equal
* @param {number} to lower than (not equal)
* @return {Promise<void>}
*/
const clearUpdatesRange = async (db, docName, from, to) => clearRange(db, createDocumentUpdateKey(docName, from), createDocumentUpdateKey(docName, to));
/**
* Create a unique key for a update message.
* We encode the result using `keyEncoding` which expects an array.
*
* @param {string} docName
* @param {number} clock must be unique
* @return {Array<string|number>}
*/
const createDocumentUpdateKey = (docName, clock) => ['v1', docName, 'update', clock];
/**
* @param {string} docName
* @param {string} metaKey
*/
const createDocumentMetaKey = (docName, metaKey) => ['v1', docName, 'meta', metaKey];
/**
* @param {string} docName
*/
const createDocumentMetaEndKey = (docName) => ['v1', docName, 'metb']; // simple trick
/**
* We have a separate state vector key so we can iterate efficiently over all documents
* @param {string} docName
*/
const createDocumentStateVectorKey = (docName) => ['v1_sv', docName];
/**
* @param {string} docName
*/
const createDocumentFirstKey = (docName) => ['v1', docName];
/**
* We use this key as the upper limit of all keys that can be written.
* Make sure that all document keys are smaller! Strings are encoded using varLength string encoding,
* so we need to make sure that this key has the biggest size!
*
* @param {string} docName
*/
const createDocumentLastKey = (docName) => ['v1', docName, 'zzzzzzz'];
// const emptyStateVector = (() => Y.encodeStateVector(new Y.Doc()))()
/**
* For now this is a helper method that creates a Y.Doc and then re-encodes a document update.
* In the future this will be handled by Yjs without creating a Y.Doc (constant memory consumption).
*
* @param {Array<Uint8Array>} updates
* @return {{update:Uint8Array, sv: Uint8Array}}
*/
const mergeUpdates = (updates) => {
const ydoc = new Y__namespace.Doc();
ydoc.transact(() => {
for (let i = 0; i < updates.length; i++) {
Y__namespace.applyUpdate(ydoc, updates[i]);
}
});
return { update: Y__namespace.encodeStateAsUpdate(ydoc), sv: Y__namespace.encodeStateVector(ydoc) }
};
/**
* @param {any} db
* @param {string} docName
* @param {Uint8Array} sv state vector
* @param {number} clock current clock of the document so we can determine when this statevector was created
*/
const writeStateVector = async (db, docName, sv, clock) => {
const encoder = encoding__namespace.createEncoder();
encoding__namespace.writeVarUint(encoder, clock);
encoding__namespace.writeVarUint8Array(encoder, sv);
await levelPut(db, createDocumentStateVectorKey(docName), encoding__namespace.toUint8Array(encoder));
};
/**
* @param {Uint8Array} buf
* @return {{ sv: Uint8Array, clock: number }}
*/
const decodeLeveldbStateVector = buf => {
const decoder = decoding__namespace.createDecoder(buf);
const clock = decoding__namespace.readVarUint(decoder);
const sv = decoding__namespace.readVarUint8Array(decoder);
return { sv, clock }
};
/**
* @param {any} db
* @param {string} docName
*/
const readStateVector$1 = async (db, docName) => {
const buf = await levelGet(db, createDocumentStateVectorKey(docName));
if (buf === null) {
// no state vector created yet or no document exists
return { sv: null, clock: -1 }
}
return decodeLeveldbStateVector(buf)
};
/**
* @param {any} db
* @param {string} docName
* @param {Uint8Array} stateAsUpdate
* @param {Uint8Array} stateVector
* @return {Promise<number>} returns the clock of the flushed doc
*/
const flushDocument = async (db, docName, stateAsUpdate, stateVector) => {
const clock = await storeUpdate(db, docName, stateAsUpdate);
await writeStateVector(db, docName, stateVector, clock);
await clearUpdatesRange(db, docName, 0, clock); // intentionally not waiting for the promise to resolve!
return clock
};
/**
* @param {any} db
* @param {string} docName
* @param {Uint8Array} update
* @return {Promise<number>} Returns the clock of the stored update
*/
const storeUpdate = async (db, docName, update) => {
const clock = await getCurrentUpdateClock(db, docName);
if (clock === -1) {
// make sure that a state vector is aways written, so we can search for available documents
const ydoc = new Y__namespace.Doc();
Y__namespace.applyUpdate(ydoc, update);
const sv = Y__namespace.encodeStateVector(ydoc);
await writeStateVector(db, docName, sv, 0);
}
await levelPut(db, createDocumentUpdateKey(docName, clock + 1), update);
return clock + 1
};
class LeveldbPersistence {
/**
* @param {string} location
* @param {object} [opts]
* @param {any} [opts.level] Level-compatible adapter. E.g. leveldown, level-rem, level-indexeddb. Defaults to `level`
* @param {object} [opts.levelOptions] Options that are passed down to the level instance
*/
constructor (location, /* istanbul ignore next */ { level = defaultLevel__default["default"], levelOptions = {} } = {}) {
const db = level(location, { ...levelOptions, valueEncoding, keyEncoding });
this.tr = promise__namespace.resolve();
/**
* Execute an transaction on a database. This will ensure that other processes are currently not writing.
*
* This is a private method and might change in the future.
*
* @todo only transact on the same room-name. Allow for concurrency of different rooms.
*
* @template T
*
* @param {function(any):Promise<T>} f A transaction that receives the db object
* @return {Promise<T>}
*/
this._transact = f => {
const currTr = this.tr;
this.tr = (async () => {
await currTr;
let res = /** @type {any} */ (null);
try {
res = await f(db);
} catch (err) {
/* istanbul ignore next */
console.warn('Error during y-leveldb transaction', err);
}
return res
})();
return this.tr
};
}
/**
* @param {string} docName
*/
flushDocument (docName) {
return this._transact(async db => {
const updates = await getLevelUpdates(db, docName);
const { update, sv } = mergeUpdates(updates);
await flushDocument(db, docName, update, sv);
})
}
/**
* @param {string} docName
* @return {Promise<Y.Doc>}
*/
getYDoc (docName) {
return this._transact(async db => {
const updates = await getLevelUpdates(db, docName);
const ydoc = new Y__namespace.Doc();
ydoc.transact(() => {
for (let i = 0; i < updates.length; i++) {
Y__namespace.applyUpdate(ydoc, updates[i]);
}
});
if (updates.length > PREFERRED_TRIM_SIZE) {
await flushDocument(db, docName, Y__namespace.encodeStateAsUpdate(ydoc), Y__namespace.encodeStateVector(ydoc));
}
return ydoc
})
}
/**
* @param {string} docName
* @return {Promise<Uint8Array>}
*/
getStateVector (docName) {
return this._transact(async db => {
const { clock, sv } = await readStateVector$1(db, docName);
let curClock = -1;
if (sv !== null) {
curClock = await getCurrentUpdateClock(db, docName);
}
if (sv !== null && clock === curClock) {
return sv
} else {
// current state vector is outdated
const updates = await getLevelUpdates(db, docName);
const { update, sv } = mergeUpdates(updates);
await flushDocument(db, docName, update, sv);
return sv
}
})
}
/**
* @param {string} docName
* @param {Uint8Array} update
* @return {Promise<number>} Returns the clock of the stored update
*/
storeUpdate (docName, update) {
return this._transact(db => storeUpdate(db, docName, update))
}
/**
* @param {string} docName
* @param {Uint8Array} stateVector
*/
async getDiff (docName, stateVector) {
const ydoc = await this.getYDoc(docName);
return Y__namespace.encodeStateAsUpdate(ydoc, stateVector)
}
/**
* @param {string} docName
* @return {Promise<void>}
*/
clearDocument (docName) {
return this._transact(async db => {
await db.del(createDocumentStateVectorKey(docName));
await clearRange(db, createDocumentFirstKey(docName), createDocumentLastKey(docName));
})
}
/**
* @param {string} docName
* @param {string} metaKey
* @param {any} value
* @return {Promise<void>}
*/
setMeta (docName, metaKey, value) {
return this._transact(db => levelPut(db, createDocumentMetaKey(docName, metaKey), buffer__namespace.encodeAny(value)))
}
/**
* @param {string} docName
* @param {string} metaKey
* @return {Promise<any>}
*/
delMeta (docName, metaKey) {
return this._transact(db => db.del(createDocumentMetaKey(docName, metaKey)))
}
/**
* @param {string} docName
* @param {string} metaKey
* @return {Promise<any>}
*/
getMeta (docName, metaKey) {
return this._transact(async db => {
const res = await levelGet(db, createDocumentMetaKey(docName, metaKey));
if (res === null) {
return// return void
}
return buffer__namespace.decodeAny(res)
})
}
/**
* @return {Promise<Array<string>>}
*/
getAllDocNames () {
return this._transact(async db => {
const docKeys = await getAllDocs(db, false, true);
return docKeys.map(key => key[1])
})
}
/**
* @return {Promise<Array<{ name: string, sv: Uint8Array, clock: number }>>}
*/
getAllDocStateVectors () {
return this._transact(async db => {
const docs = /** @type {any} */ (await getAllDocs(db, true, true));
return docs.map(doc => {
const { sv, clock } = decodeLeveldbStateVector(doc.value);
return { name: doc.key[1], sv, clock }
})
})
}
/**
* @param {string} docName
* @return {Promise<Map<string, any>>}
*/
getMetas (docName) {
return this._transact(async db => {
const data = await getLevelBulkData(db, {
gte: createDocumentMetaKey(docName, ''),
lt: createDocumentMetaEndKey(docName),
keys: true,
values: true
});
const metas = new Map();
data.forEach(v => { metas.set(v.key[3], buffer__namespace.decodeAny(v.value)); });
return metas
})
}
/**
* Close connection to a leveldb database and discard all state and bindings
*
* @return {Promise<void>}
*/
destroy () {
return this._transact(db => db.close())
}
/**
* Delete all data in database.
*/
clearAll () {
return this._transact(async db => db.clear())
}
}
// When changing this, also make sure to change the file in gitignore
const storageName = 'tmp-leveldb-storage';
/**
* Read state vector from Decoder and return as Map. This is a helper method that will be exported by Yjs directly.
*
* @param {decoding.Decoder} decoder
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
*
* @function
*/
const readStateVector = decoder => {
const ss = new Map();
const ssLength = decoding__namespace.readVarUint(decoder);
for (let i = 0; i < ssLength; i++) {
const client = decoding__namespace.readVarUint(decoder);
const clock = decoding__namespace.readVarUint(decoder);
ss.set(client, clock);
}
return ss
};
/**
* Read decodedState and return State as Map.
*
* @param {Uint8Array} decodedState
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
*
* @function
*/
const decodeStateVector = decodedState => readStateVector(decoding__namespace.createDecoder(decodedState));
/**
* Flushes all updates to ldb and delets items from updates array.
*
* @param {LeveldbPersistence} ldb
* @param {string} docName
* @param {Array<Uint8Array>} updates
*/
const flushUpdatesHelper = (ldb, docName, updates) =>
Promise.all(updates.splice(0).map(update => ldb.storeUpdate(docName, update)));
/**
* @param {t.TestCase} tc
*/
const testLeveldbUpdateStorage = async tc => {
const docName = tc.testName;
const ydoc1 = new Y__namespace.Doc();
ydoc1.clientID = 0; // so we can check the state vector
const leveldbPersistence = new LeveldbPersistence(storageName);
// clear all data, so we can check allData later
await leveldbPersistence._transact(async db => db.clear());
t__namespace.compareArrays([], await leveldbPersistence.getAllDocNames());
const updates = [];
ydoc1.on('update', update => {
updates.push(update);
});
ydoc1.getArray('arr').insert(0, [1]);
ydoc1.getArray('arr').insert(0, [2]);
await flushUpdatesHelper(leveldbPersistence, docName, updates);
const encodedSv = await leveldbPersistence.getStateVector(docName);
const sv = decodeStateVector(encodedSv);
t__namespace.assert(sv.size === 1);
t__namespace.assert(sv.get(0) === 2);
const ydoc2 = await leveldbPersistence.getYDoc(docName);
t__namespace.compareArrays(ydoc2.getArray('arr').toArray(), [2, 1]);
const allData = await leveldbPersistence._transact(async db => getLevelBulkData(db, { gte: ['v1'], lt: ['v2'] }));
t__namespace.assert(allData.length > 0, 'some data exists');
t__namespace.compareArrays([docName], await leveldbPersistence.getAllDocNames());
await leveldbPersistence.clearDocument(docName);
t__namespace.compareArrays([], await leveldbPersistence.getAllDocNames());
const allData2 = await leveldbPersistence._transact(async db => getLevelBulkData(db, { gte: ['v1'], lt: ['v2'] }));
console.log(allData2);
t__namespace.assert(allData2.length === 0, 'really deleted all data');
await leveldbPersistence.destroy();
};
/**
* @param {t.TestCase} tc
*/
const testEncodeManyUpdates = async tc => {
const N = PREFERRED_TRIM_SIZE * 7;
const docName = tc.testName;
const ydoc1 = new Y__namespace.Doc();
ydoc1.clientID = 0; // so we can check the state vector
const leveldbPersistence = new LeveldbPersistence(storageName);
await leveldbPersistence.clearDocument(docName);
const updates = [];
ydoc1.on('update', update => {
updates.push(update);
});
await flushUpdatesHelper(leveldbPersistence, docName, updates);
const keys = await leveldbPersistence._transact(db => getLevelUpdates(db, docName, { keys: true, values: false }));
for (let i = 0; i < keys.length; i++) {
t__namespace.assert(keys[i][3] === i);
}
const yarray = ydoc1.getArray('arr');
for (let i = 0; i < N; i++) {
yarray.insert(0, [i]);
}
await flushUpdatesHelper(leveldbPersistence, docName, updates);
const ydoc2 = await leveldbPersistence.getYDoc(docName);
t__namespace.assert(ydoc2.getArray('arr').length === N);
await leveldbPersistence.flushDocument(docName);
const mergedKeys = await leveldbPersistence._transact(db => getLevelUpdates(db, docName, { keys: true, values: false }));
t__namespace.assert(mergedKeys.length === 1);
// getYDoc still works after flush/merge
const ydoc3 = await leveldbPersistence.getYDoc(docName);
t__namespace.assert(ydoc3.getArray('arr').length === N);
// test if state vector is properly generated
t__namespace.compare(Y__namespace.encodeStateVector(ydoc1), await leveldbPersistence.getStateVector(docName));
// add new update so that sv needs to be updated
ydoc1.getArray('arr').insert(0, ['new']);
await flushUpdatesHelper(leveldbPersistence, docName, updates);
t__namespace.compare(Y__namespace.encodeStateVector(ydoc1), await leveldbPersistence.getStateVector(docName));
await leveldbPersistence.destroy();
};
/**
* @param {t.TestCase} tc
*/
const testDiff = async tc => {
const N = PREFERRED_TRIM_SIZE * 2; // primes are awesome - ensure that the document is at least flushed once
const docName = tc.testName;
const ydoc1 = new Y__namespace.Doc();
ydoc1.clientID = 0; // so we can check the state vector
const leveldbPersistence = new LeveldbPersistence(storageName);
await leveldbPersistence.clearDocument(docName);
const updates = [];
ydoc1.on('update', update => {
updates.push(update);
});
const yarray = ydoc1.getArray('arr');
// create N changes
for (let i = 0; i < N; i++) {
yarray.insert(0, [i]);
}
await flushUpdatesHelper(leveldbPersistence, docName, updates);
// create partially merged doc
const ydoc2 = await leveldbPersistence.getYDoc(docName);
// another N updates
for (let i = 0; i < N; i++) {
yarray.insert(0, [i]);
}
await flushUpdatesHelper(leveldbPersistence, docName, updates);
// apply diff to doc
const diffUpdate = await leveldbPersistence.getDiff(docName, Y__namespace.encodeStateVector(ydoc2));
Y__namespace.applyUpdate(ydoc2, diffUpdate);
t__namespace.assert(ydoc2.getArray('arr').length === ydoc1.getArray('arr').length);
t__namespace.assert(ydoc2.getArray('arr').length === N * 2);
await leveldbPersistence.destroy();
};
/**
* @param {t.TestCase} tc
*/
const testMetas = async tc => {
const docName = tc.testName;
const leveldbPersistence = new LeveldbPersistence(storageName);
await leveldbPersistence.clearDocument(docName);
await leveldbPersistence.setMeta(docName, 'a', 4);
await leveldbPersistence.setMeta(docName, 'a', 5);
await leveldbPersistence.setMeta(docName, 'b', 4);
const a = await leveldbPersistence.getMeta(docName, 'a');
const b = await leveldbPersistence.getMeta(docName, 'b');
t__namespace.assert(a === 5);
t__namespace.assert(b === 4);
const metas = await leveldbPersistence.getMetas(docName);
t__namespace.assert(metas.size === 2);
t__namespace.assert(metas.get('a') === 5);
t__namespace.assert(metas.get('b') === 4);
await leveldbPersistence.delMeta(docName, 'a');
const c = await leveldbPersistence.getMeta(docName, 'a');
t__namespace.assert(c === undefined);
await leveldbPersistence.clearDocument(docName);
const metasEmpty = await leveldbPersistence.getMetas(docName);
t__namespace.assert(metasEmpty.size === 0);
await leveldbPersistence.destroy();
};
/**
* @param {t.TestCase} tc
*/
const testDeleteEmptySv = async tc => {
const docName = tc.testName;
const leveldbPersistence = new LeveldbPersistence(storageName);
await leveldbPersistence.clearAll();
const ydoc = new Y__namespace.Doc();
ydoc.clientID = 0;
ydoc.getArray('arr').insert(0, [1]);
const singleUpdate = Y__namespace.encodeStateAsUpdate(ydoc);
t__namespace.compareArrays([], await leveldbPersistence.getAllDocNames());
await leveldbPersistence.storeUpdate(docName, singleUpdate);
t__namespace.compareArrays([docName], await leveldbPersistence.getAllDocNames());
const docSvs = await leveldbPersistence.getAllDocStateVectors();
t__namespace.assert(docSvs.length === 1);
t__namespace.compare([{ name: docName, clock: 0, sv: Y__namespace.encodeStateVector(ydoc) }], docSvs);
await leveldbPersistence.clearDocument(docName);
t__namespace.compareArrays([], await leveldbPersistence.getAllDocNames());
await leveldbPersistence.destroy();
};
const testMisc = async tc => {
const docName = tc.testName;
const leveldbPersistence = new LeveldbPersistence(storageName);
await leveldbPersistence.clearDocument(docName);
const sv = await leveldbPersistence.getStateVector('does not exist');
t__namespace.assert(sv.byteLength === 1);
await leveldbPersistence.destroy();
};
var leveldb = /*#__PURE__*/Object.freeze({
__proto__: null,
testLeveldbUpdateStorage: testLeveldbUpdateStorage,
testEncodeManyUpdates: testEncodeManyUpdates,
testDiff: testDiff,
testMetas: testMetas,
testDeleteEmptySv: testDeleteEmptySv,
testMisc: testMisc
});
if (environment_js.isBrowser) {
log__namespace.createVConsole(document.body);
}
t.runTests({
leveldb
}).then(success => {
/* istanbul ignore next */
if (environment_js.isNode) {
process.exit(success ? 0 : 1);
}
});
//# sourceMappingURL=test.cjs.map

1
node_modules/y-leveldb/dist/test.cjs.map generated vendored Normal file

File diff suppressed because one or more lines are too long

23086
node_modules/y-leveldb/dist/test.js generated vendored Normal file

File diff suppressed because one or more lines are too long

1
node_modules/y-leveldb/dist/test.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

2
node_modules/y-leveldb/dist/tests/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,2 @@
export {};
//# sourceMappingURL=index.d.ts.map

1
node_modules/y-leveldb/dist/tests/index.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../tests/index.js"],"names":[],"mappings":""}

View File

@@ -0,0 +1,8 @@
export function testLeveldbUpdateStorage(tc: t.TestCase): Promise<void>;
export function testEncodeManyUpdates(tc: t.TestCase): Promise<void>;
export function testDiff(tc: t.TestCase): Promise<void>;
export function testMetas(tc: t.TestCase): Promise<void>;
export function testDeleteEmptySv(tc: t.TestCase): Promise<void>;
export function testMisc(tc: any): Promise<void>;
import * as t from "lib0/testing";
//# sourceMappingURL=y-leveldb.tests.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"y-leveldb.tests.d.ts","sourceRoot":"","sources":["../../tests/y-leveldb.tests.js"],"names":[],"mappings":"AAmDO,6CAFI,CAAC,CAAC,QAAQ,iBAyCpB;AAKM,0CAFI,CAAC,CAAC,QAAQ,iBAgDpB;AAKM,6BAFI,CAAC,CAAC,QAAQ,iBAuCpB;AAKM,8BAFI,CAAC,CAAC,QAAQ,iBA0BpB;AAKM,sCAFI,CAAC,CAAC,QAAQ,iBAsBpB;AAEM,iDASN"}

602
node_modules/y-leveldb/dist/y-leveldb.cjs generated vendored Normal file
View File

@@ -0,0 +1,602 @@
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var Y = require('yjs');
var encoding = require('lib0/dist/encoding.cjs');
var decoding = require('lib0/dist/decoding.cjs');
var binary = require('lib0/dist/binary.cjs');
var promise = require('lib0/dist/promise.cjs');
var buffer$1 = require('lib0/dist/buffer.cjs');
var defaultLevel = require('level');
var buffer = require('buffer');
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
function _interopNamespace(e) {
if (e && e.__esModule) return e;
var n = Object.create(null);
if (e) {
Object.keys(e).forEach(function (k) {
if (k !== 'default') {
var d = Object.getOwnPropertyDescriptor(e, k);
Object.defineProperty(n, k, d.get ? d : {
enumerable: true,
get: function () { return e[k]; }
});
}
});
}
n["default"] = e;
return Object.freeze(n);
}
var Y__namespace = /*#__PURE__*/_interopNamespace(Y);
var encoding__namespace = /*#__PURE__*/_interopNamespace(encoding);
var decoding__namespace = /*#__PURE__*/_interopNamespace(decoding);
var binary__namespace = /*#__PURE__*/_interopNamespace(binary);
var promise__namespace = /*#__PURE__*/_interopNamespace(promise);
var buffer__namespace = /*#__PURE__*/_interopNamespace(buffer$1);
var defaultLevel__default = /*#__PURE__*/_interopDefaultLegacy(defaultLevel);
const PREFERRED_TRIM_SIZE = 500;
const YEncodingString = 0;
const YEncodingUint32 = 1;
const valueEncoding = {
buffer: true,
type: 'y-value',
encode: /** @param {any} data */ data => data,
decode: /** @param {any} data */ data => data
};
/**
* Write two bytes as an unsigned integer in big endian order.
* (most significant byte first)
*
* @function
* @param {encoding.Encoder} encoder
* @param {number} num The number that is to be encoded.
*/
const writeUint32BigEndian = (encoder, num) => {
for (let i = 3; i >= 0; i--) {
encoding__namespace.write(encoder, (num >>> (8 * i)) & binary__namespace.BITS8);
}
};
/**
* Read 4 bytes as unsigned integer in big endian order.
* (most significant byte first)
*
* @todo use lib0/decoding instead
*
* @function
* @param {decoding.Decoder} decoder
* @return {number} An unsigned integer.
*/
const readUint32BigEndian = decoder => {
const uint =
(decoder.arr[decoder.pos + 3] +
(decoder.arr[decoder.pos + 2] << 8) +
(decoder.arr[decoder.pos + 1] << 16) +
(decoder.arr[decoder.pos] << 24)) >>> 0;
decoder.pos += 4;
return uint
};
const keyEncoding = {
buffer: true,
type: 'y-keys',
/* istanbul ignore next */
encode: /** @param {Array<string|number>} arr */ arr => {
const encoder = encoding__namespace.createEncoder();
for (let i = 0; i < arr.length; i++) {
const v = arr[i];
if (typeof v === 'string') {
encoding__namespace.writeUint8(encoder, YEncodingString);
encoding__namespace.writeVarString(encoder, v);
} else /* istanbul ignore else */ if (typeof v === 'number') {
encoding__namespace.writeUint8(encoder, YEncodingUint32);
writeUint32BigEndian(encoder, v);
} else {
throw new Error('Unexpected key value')
}
}
return buffer.Buffer.from(encoding__namespace.toUint8Array(encoder))
},
decode: /** @param {Uint8Array} buf */ buf => {
const decoder = decoding__namespace.createDecoder(buf);
const key = [];
while (decoding__namespace.hasContent(decoder)) {
switch (decoding__namespace.readUint8(decoder)) {
case YEncodingString:
key.push(decoding__namespace.readVarString(decoder));
break
case YEncodingUint32:
key.push(readUint32BigEndian(decoder));
break
}
}
return key
}
};
/**
* level returns an error if a value is not found.
*
* This helper method for level returns `null` instead if the key is not found.
*
* @param {any} db
* @param {any} key
*/
const levelGet = async (db, key) => {
let res;
try {
res = await db.get(key);
} catch (err) {
/* istanbul ignore else */
if (err.notFound) {
return null
} else {
throw err
}
}
return res
};
/**
* Level expects a Buffer, but in Yjs we typically work with Uint8Arrays.
*
* Since Level thinks that these are two entirely different things,
* we transform the Uint8array to a Buffer before storing it.
*
* @param {any} db
* @param {any} key
* @param {Uint8Array} val
*/
const levelPut = async (db, key, val) => db.put(key, buffer.Buffer.from(val));
/**
* A "bulkier" implementation of level streams. Returns the result in one flush.
*
* @param {any} db
* @param {object} opts
* @return {Promise<Array<any>>}
*/
const getLevelBulkData = (db, opts) => promise__namespace.create((resolve, reject) => {
/**
* @type {Array<any>} result
*/
const result = [];
db.createReadStream(
opts
).on('data', /** @param {any} data */ data =>
result.push(data)
).on('end', () =>
resolve(result)
).on('error', reject);
});
/**
* Get all document updates for a specific document.
*
* @param {any} db
* @param {string} docName
* @param {any} [opts]
* @return {Promise<Array<Buffer>>}
*/
const getLevelUpdates = (db, docName, opts = { values: true, keys: false }) => getLevelBulkData(db, {
gte: createDocumentUpdateKey(docName, 0),
lt: createDocumentUpdateKey(docName, binary__namespace.BITS32),
...opts
});
/**
* Get all document updates for a specific document.
*
* @param {any} db
* @param {boolean} values
* @param {boolean} keys
* @return {Promise<Array<any>>}
*/
const getAllDocs = (db, values, keys) => getLevelBulkData(db, {
gte: ['v1_sv'],
lt: ['v1_sw'],
keys,
values
});
/**
* @param {any} db
* @param {string} docName
* @return {Promise<number>} Returns -1 if this document doesn't exist yet
*/
const getCurrentUpdateClock = (db, docName) => getLevelUpdates(db, docName, { keys: true, values: false, reverse: true, limit: 1 }).then(keys => {
if (keys.length === 0) {
return -1
} else {
return keys[0][3]
}
});
/**
* @param {any} db
* @param {Array<string|number>} gte Greater than or equal
* @param {Array<string|number>} lt lower than (not equal)
* @return {Promise<void>}
*/
const clearRange = async (db, gte, lt) => {
/* istanbul ignore else */
if (db.supports.clear) {
await db.clear({ gte, lt });
} else {
const keys = await getLevelBulkData(db, { values: false, keys: true, gte, lt });
const ops = keys.map(key => ({ type: 'del', key }));
await db.batch(ops);
}
};
/**
* @param {any} db
* @param {string} docName
* @param {number} from Greater than or equal
* @param {number} to lower than (not equal)
* @return {Promise<void>}
*/
const clearUpdatesRange = async (db, docName, from, to) => clearRange(db, createDocumentUpdateKey(docName, from), createDocumentUpdateKey(docName, to));
/**
* Create a unique key for a update message.
* We encode the result using `keyEncoding` which expects an array.
*
* @param {string} docName
* @param {number} clock must be unique
* @return {Array<string|number>}
*/
const createDocumentUpdateKey = (docName, clock) => ['v1', docName, 'update', clock];
/**
* @param {string} docName
* @param {string} metaKey
*/
const createDocumentMetaKey = (docName, metaKey) => ['v1', docName, 'meta', metaKey];
/**
* @param {string} docName
*/
const createDocumentMetaEndKey = (docName) => ['v1', docName, 'metb']; // simple trick
/**
* We have a separate state vector key so we can iterate efficiently over all documents
* @param {string} docName
*/
const createDocumentStateVectorKey = (docName) => ['v1_sv', docName];
/**
* @param {string} docName
*/
const createDocumentFirstKey = (docName) => ['v1', docName];
/**
* We use this key as the upper limit of all keys that can be written.
* Make sure that all document keys are smaller! Strings are encoded using varLength string encoding,
* so we need to make sure that this key has the biggest size!
*
* @param {string} docName
*/
const createDocumentLastKey = (docName) => ['v1', docName, 'zzzzzzz'];
// const emptyStateVector = (() => Y.encodeStateVector(new Y.Doc()))()
/**
* For now this is a helper method that creates a Y.Doc and then re-encodes a document update.
* In the future this will be handled by Yjs without creating a Y.Doc (constant memory consumption).
*
* @param {Array<Uint8Array>} updates
* @return {{update:Uint8Array, sv: Uint8Array}}
*/
const mergeUpdates = (updates) => {
const ydoc = new Y__namespace.Doc();
ydoc.transact(() => {
for (let i = 0; i < updates.length; i++) {
Y__namespace.applyUpdate(ydoc, updates[i]);
}
});
return { update: Y__namespace.encodeStateAsUpdate(ydoc), sv: Y__namespace.encodeStateVector(ydoc) }
};
/**
* @param {any} db
* @param {string} docName
* @param {Uint8Array} sv state vector
* @param {number} clock current clock of the document so we can determine when this statevector was created
*/
const writeStateVector = async (db, docName, sv, clock) => {
const encoder = encoding__namespace.createEncoder();
encoding__namespace.writeVarUint(encoder, clock);
encoding__namespace.writeVarUint8Array(encoder, sv);
await levelPut(db, createDocumentStateVectorKey(docName), encoding__namespace.toUint8Array(encoder));
};
/**
* @param {Uint8Array} buf
* @return {{ sv: Uint8Array, clock: number }}
*/
const decodeLeveldbStateVector = buf => {
const decoder = decoding__namespace.createDecoder(buf);
const clock = decoding__namespace.readVarUint(decoder);
const sv = decoding__namespace.readVarUint8Array(decoder);
return { sv, clock }
};
/**
* @param {any} db
* @param {string} docName
*/
const readStateVector = async (db, docName) => {
const buf = await levelGet(db, createDocumentStateVectorKey(docName));
if (buf === null) {
// no state vector created yet or no document exists
return { sv: null, clock: -1 }
}
return decodeLeveldbStateVector(buf)
};
/**
* @param {any} db
* @param {string} docName
* @param {Uint8Array} stateAsUpdate
* @param {Uint8Array} stateVector
* @return {Promise<number>} returns the clock of the flushed doc
*/
const flushDocument = async (db, docName, stateAsUpdate, stateVector) => {
const clock = await storeUpdate(db, docName, stateAsUpdate);
await writeStateVector(db, docName, stateVector, clock);
await clearUpdatesRange(db, docName, 0, clock); // intentionally not waiting for the promise to resolve!
return clock
};
/**
* @param {any} db
* @param {string} docName
* @param {Uint8Array} update
* @return {Promise<number>} Returns the clock of the stored update
*/
const storeUpdate = async (db, docName, update) => {
const clock = await getCurrentUpdateClock(db, docName);
if (clock === -1) {
// make sure that a state vector is aways written, so we can search for available documents
const ydoc = new Y__namespace.Doc();
Y__namespace.applyUpdate(ydoc, update);
const sv = Y__namespace.encodeStateVector(ydoc);
await writeStateVector(db, docName, sv, 0);
}
await levelPut(db, createDocumentUpdateKey(docName, clock + 1), update);
return clock + 1
};
class LeveldbPersistence {
/**
* @param {string} location
* @param {object} [opts]
* @param {any} [opts.level] Level-compatible adapter. E.g. leveldown, level-rem, level-indexeddb. Defaults to `level`
* @param {object} [opts.levelOptions] Options that are passed down to the level instance
*/
constructor (location, /* istanbul ignore next */ { level = defaultLevel__default["default"], levelOptions = {} } = {}) {
const db = level(location, { ...levelOptions, valueEncoding, keyEncoding });
this.tr = promise__namespace.resolve();
/**
* Execute an transaction on a database. This will ensure that other processes are currently not writing.
*
* This is a private method and might change in the future.
*
* @todo only transact on the same room-name. Allow for concurrency of different rooms.
*
* @template T
*
* @param {function(any):Promise<T>} f A transaction that receives the db object
* @return {Promise<T>}
*/
this._transact = f => {
const currTr = this.tr;
this.tr = (async () => {
await currTr;
let res = /** @type {any} */ (null);
try {
res = await f(db);
} catch (err) {
/* istanbul ignore next */
console.warn('Error during y-leveldb transaction', err);
}
return res
})();
return this.tr
};
}
/**
* @param {string} docName
*/
flushDocument (docName) {
return this._transact(async db => {
const updates = await getLevelUpdates(db, docName);
const { update, sv } = mergeUpdates(updates);
await flushDocument(db, docName, update, sv);
})
}
/**
* @param {string} docName
* @return {Promise<Y.Doc>}
*/
getYDoc (docName) {
return this._transact(async db => {
const updates = await getLevelUpdates(db, docName);
const ydoc = new Y__namespace.Doc();
ydoc.transact(() => {
for (let i = 0; i < updates.length; i++) {
Y__namespace.applyUpdate(ydoc, updates[i]);
}
});
if (updates.length > PREFERRED_TRIM_SIZE) {
await flushDocument(db, docName, Y__namespace.encodeStateAsUpdate(ydoc), Y__namespace.encodeStateVector(ydoc));
}
return ydoc
})
}
/**
* @param {string} docName
* @return {Promise<Uint8Array>}
*/
getStateVector (docName) {
return this._transact(async db => {
const { clock, sv } = await readStateVector(db, docName);
let curClock = -1;
if (sv !== null) {
curClock = await getCurrentUpdateClock(db, docName);
}
if (sv !== null && clock === curClock) {
return sv
} else {
// current state vector is outdated
const updates = await getLevelUpdates(db, docName);
const { update, sv } = mergeUpdates(updates);
await flushDocument(db, docName, update, sv);
return sv
}
})
}
/**
* @param {string} docName
* @param {Uint8Array} update
* @return {Promise<number>} Returns the clock of the stored update
*/
storeUpdate (docName, update) {
return this._transact(db => storeUpdate(db, docName, update))
}
/**
* @param {string} docName
* @param {Uint8Array} stateVector
*/
async getDiff (docName, stateVector) {
const ydoc = await this.getYDoc(docName);
return Y__namespace.encodeStateAsUpdate(ydoc, stateVector)
}
/**
* @param {string} docName
* @return {Promise<void>}
*/
clearDocument (docName) {
return this._transact(async db => {
await db.del(createDocumentStateVectorKey(docName));
await clearRange(db, createDocumentFirstKey(docName), createDocumentLastKey(docName));
})
}
/**
* @param {string} docName
* @param {string} metaKey
* @param {any} value
* @return {Promise<void>}
*/
setMeta (docName, metaKey, value) {
return this._transact(db => levelPut(db, createDocumentMetaKey(docName, metaKey), buffer__namespace.encodeAny(value)))
}
/**
* @param {string} docName
* @param {string} metaKey
* @return {Promise<any>}
*/
delMeta (docName, metaKey) {
return this._transact(db => db.del(createDocumentMetaKey(docName, metaKey)))
}
/**
* @param {string} docName
* @param {string} metaKey
* @return {Promise<any>}
*/
getMeta (docName, metaKey) {
return this._transact(async db => {
const res = await levelGet(db, createDocumentMetaKey(docName, metaKey));
if (res === null) {
return// return void
}
return buffer__namespace.decodeAny(res)
})
}
/**
* @return {Promise<Array<string>>}
*/
getAllDocNames () {
return this._transact(async db => {
const docKeys = await getAllDocs(db, false, true);
return docKeys.map(key => key[1])
})
}
/**
* @return {Promise<Array<{ name: string, sv: Uint8Array, clock: number }>>}
*/
getAllDocStateVectors () {
return this._transact(async db => {
const docs = /** @type {any} */ (await getAllDocs(db, true, true));
return docs.map(doc => {
const { sv, clock } = decodeLeveldbStateVector(doc.value);
return { name: doc.key[1], sv, clock }
})
})
}
/**
* @param {string} docName
* @return {Promise<Map<string, any>>}
*/
getMetas (docName) {
return this._transact(async db => {
const data = await getLevelBulkData(db, {
gte: createDocumentMetaKey(docName, ''),
lt: createDocumentMetaEndKey(docName),
keys: true,
values: true
});
const metas = new Map();
data.forEach(v => { metas.set(v.key[3], buffer__namespace.decodeAny(v.value)); });
return metas
})
}
/**
* Close connection to a leveldb database and discard all state and bindings
*
* @return {Promise<void>}
*/
destroy () {
return this._transact(db => db.close())
}
/**
* Delete all data in database.
*/
clearAll () {
return this._transact(async db => db.clear())
}
}
exports.LeveldbPersistence = LeveldbPersistence;
exports.PREFERRED_TRIM_SIZE = PREFERRED_TRIM_SIZE;
exports.getAllDocs = getAllDocs;
exports.getCurrentUpdateClock = getCurrentUpdateClock;
exports.getLevelBulkData = getLevelBulkData;
exports.getLevelUpdates = getLevelUpdates;
exports.keyEncoding = keyEncoding;
exports.readUint32BigEndian = readUint32BigEndian;
exports.writeUint32BigEndian = writeUint32BigEndian;
//# sourceMappingURL=y-leveldb.cjs.map

1
node_modules/y-leveldb/dist/y-leveldb.cjs.map generated vendored Normal file

File diff suppressed because one or more lines are too long