- Consolidated duplicate UndoManagers to single instance - Fixed connection promise to only resolve on 'connected' status - Fixed WebSocketProvider import (WebsocketProvider) - Added proper doc.destroy() cleanup - Renamed isPresenceInitialized property to avoid conflict Co-Authored-By: Paperclip <noreply@paperclip.ing>
23087 lines
666 KiB
JavaScript
23087 lines
666 KiB
JavaScript
(function () {
|
||
'use strict';
|
||
|
||
/**
|
||
* Utility module to work with key-value stores.
|
||
*
|
||
* @module map
|
||
*/
|
||
|
||
/**
|
||
* Creates a new Map instance.
|
||
*
|
||
* @function
|
||
* @return {Map<any, any>}
|
||
*
|
||
* @function
|
||
*/
|
||
const create$6 = () => new Map();
|
||
|
||
/**
|
||
* Copy a Map object into a fresh Map object.
|
||
*
|
||
* @function
|
||
* @template X,Y
|
||
* @param {Map<X,Y>} m
|
||
* @return {Map<X,Y>}
|
||
*/
|
||
const copy = m => {
|
||
const r = create$6();
|
||
m.forEach((v, k) => { r.set(k, v); });
|
||
return r
|
||
};
|
||
|
||
/**
|
||
* Get map property. Create T if property is undefined and set T on map.
|
||
*
|
||
* ```js
|
||
* const listeners = map.setIfUndefined(events, 'eventName', set.create)
|
||
* listeners.add(listener)
|
||
* ```
|
||
*
|
||
* @function
|
||
* @template T,K
|
||
* @param {Map<K, T>} map
|
||
* @param {K} key
|
||
* @param {function():T} createT
|
||
* @return {T}
|
||
*/
|
||
const setIfUndefined = (map, key, createT) => {
|
||
let set = map.get(key);
|
||
if (set === undefined) {
|
||
map.set(key, set = createT());
|
||
}
|
||
return set
|
||
};
|
||
|
||
/**
|
||
* Creates an Array and populates it with the content of all key-value pairs using the `f(value, key)` function.
|
||
*
|
||
* @function
|
||
* @template K
|
||
* @template V
|
||
* @template R
|
||
* @param {Map<K,V>} m
|
||
* @param {function(V,K):R} f
|
||
* @return {Array<R>}
|
||
*/
|
||
const map$1 = (m, f) => {
|
||
const res = [];
|
||
for (const [key, value] of m) {
|
||
res.push(f(value, key));
|
||
}
|
||
return res
|
||
};
|
||
|
||
/**
|
||
* Tests whether any key-value pairs pass the test implemented by `f(value, key)`.
|
||
*
|
||
* @todo should rename to some - similarly to Array.some
|
||
*
|
||
* @function
|
||
* @template K
|
||
* @template V
|
||
* @param {Map<K,V>} m
|
||
* @param {function(V,K):boolean} f
|
||
* @return {boolean}
|
||
*/
|
||
const any = (m, f) => {
|
||
for (const [key, value] of m) {
|
||
if (f(value, key)) {
|
||
return true
|
||
}
|
||
}
|
||
return false
|
||
};
|
||
|
||
/**
|
||
* Utility module to work with sets.
|
||
*
|
||
* @module set
|
||
*/
|
||
|
||
const create$5 = () => new Set();
|
||
|
||
/**
|
||
* Utility module to work with Arrays.
|
||
*
|
||
* @module array
|
||
*/
|
||
|
||
/**
|
||
* Return the last element of an array. The element must exist
|
||
*
|
||
* @template L
|
||
* @param {Array<L>} arr
|
||
* @return {L}
|
||
*/
|
||
const last = arr => arr[arr.length - 1];
|
||
|
||
/**
|
||
* Append elements from src to dest
|
||
*
|
||
* @template M
|
||
* @param {Array<M>} dest
|
||
* @param {Array<M>} src
|
||
*/
|
||
const appendTo = (dest, src) => {
|
||
for (let i = 0; i < src.length; i++) {
|
||
dest.push(src[i]);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Transforms something array-like to an actual Array.
|
||
*
|
||
* @function
|
||
* @template T
|
||
* @param {ArrayLike<T>|Iterable<T>} arraylike
|
||
* @return {T}
|
||
*/
|
||
const from$1 = Array.from;
|
||
|
||
const isArray$1 = Array.isArray;
|
||
|
||
/**
|
||
* Observable class prototype.
|
||
*
|
||
* @module observable
|
||
*/
|
||
|
||
/**
|
||
* Handles named events.
|
||
*
|
||
* @template N
|
||
*/
|
||
class Observable {
|
||
constructor () {
|
||
/**
|
||
* Some desc.
|
||
* @type {Map<N, any>}
|
||
*/
|
||
this._observers = create$6();
|
||
}
|
||
|
||
/**
|
||
* @param {N} name
|
||
* @param {function} f
|
||
*/
|
||
on (name, f) {
|
||
setIfUndefined(this._observers, name, create$5).add(f);
|
||
}
|
||
|
||
/**
|
||
* @param {N} name
|
||
* @param {function} f
|
||
*/
|
||
once (name, f) {
|
||
/**
|
||
* @param {...any} args
|
||
*/
|
||
const _f = (...args) => {
|
||
this.off(name, _f);
|
||
f(...args);
|
||
};
|
||
this.on(name, _f);
|
||
}
|
||
|
||
/**
|
||
* @param {N} name
|
||
* @param {function} f
|
||
*/
|
||
off (name, f) {
|
||
const observers = this._observers.get(name);
|
||
if (observers !== undefined) {
|
||
observers.delete(f);
|
||
if (observers.size === 0) {
|
||
this._observers.delete(name);
|
||
}
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Emit a named event. All registered event listeners that listen to the
|
||
* specified name will receive the event.
|
||
*
|
||
* @todo This should catch exceptions
|
||
*
|
||
* @param {N} name The event name.
|
||
* @param {Array<any>} args The arguments that are applied to the event listener.
|
||
*/
|
||
emit (name, args) {
|
||
// copy all listeners to an array first to make sure that no event is emitted to listeners that are subscribed while the event handler is called.
|
||
return from$1((this._observers.get(name) || create$6()).values()).forEach(f => f(...args))
|
||
}
|
||
|
||
destroy () {
|
||
this._observers = create$6();
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Common Math expressions.
|
||
*
|
||
* @module math
|
||
*/
|
||
|
||
const floor = Math.floor;
|
||
const ceil = Math.ceil;
|
||
const abs = Math.abs;
|
||
const round = Math.round;
|
||
const log10 = Math.log10;
|
||
|
||
/**
|
||
* @function
|
||
* @param {number} a
|
||
* @param {number} b
|
||
* @return {number} The sum of a and b
|
||
*/
|
||
const add = (a, b) => a + b;
|
||
|
||
/**
|
||
* @function
|
||
* @param {number} a
|
||
* @param {number} b
|
||
* @return {number} The smaller element of a and b
|
||
*/
|
||
const min = (a, b) => a < b ? a : b;
|
||
|
||
/**
|
||
* @function
|
||
* @param {number} a
|
||
* @param {number} b
|
||
* @return {number} The bigger element of a and b
|
||
*/
|
||
const max = (a, b) => a > b ? a : b;
|
||
/**
|
||
* Base 10 exponential function. Returns the value of 10 raised to the power of pow.
|
||
*
|
||
* @param {number} exp
|
||
* @return {number}
|
||
*/
|
||
const exp10 = exp => Math.pow(10, exp);
|
||
|
||
/**
|
||
* @param {number} n
|
||
* @return {boolean} Wether n is negative. This function also differentiates between -0 and +0
|
||
*/
|
||
const isNegativeZero = n => n !== 0 ? n < 0 : 1 / n < 0;
|
||
|
||
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
|
||
|
||
function createCommonjsModule(fn, basedir, module) {
|
||
return module = {
|
||
path: basedir,
|
||
exports: {},
|
||
require: function (path, base) {
|
||
return commonjsRequire(path, (base === undefined || base === null) ? module.path : base);
|
||
}
|
||
}, fn(module, module.exports), module.exports;
|
||
}
|
||
|
||
function getCjsExportFromNamespace (n) {
|
||
return n && n['default'] || n;
|
||
}
|
||
|
||
function commonjsRequire () {
|
||
throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
|
||
}
|
||
|
||
var byteLength_1 = byteLength;
|
||
var toByteArray_1 = toByteArray;
|
||
var fromByteArray_1 = fromByteArray;
|
||
|
||
var lookup = [];
|
||
var revLookup = [];
|
||
var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array;
|
||
|
||
var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||
for (var i$1 = 0, len$1 = code.length; i$1 < len$1; ++i$1) {
|
||
lookup[i$1] = code[i$1];
|
||
revLookup[code.charCodeAt(i$1)] = i$1;
|
||
}
|
||
|
||
// Support decoding URL-safe base64 strings, as Node.js does.
|
||
// See: https://en.wikipedia.org/wiki/Base64#URL_applications
|
||
revLookup['-'.charCodeAt(0)] = 62;
|
||
revLookup['_'.charCodeAt(0)] = 63;
|
||
|
||
function getLens (b64) {
|
||
var len = b64.length;
|
||
|
||
if (len % 4 > 0) {
|
||
throw new Error('Invalid string. Length must be a multiple of 4')
|
||
}
|
||
|
||
// Trim off extra bytes after placeholder bytes are found
|
||
// See: https://github.com/beatgammit/base64-js/issues/42
|
||
var validLen = b64.indexOf('=');
|
||
if (validLen === -1) validLen = len;
|
||
|
||
var placeHoldersLen = validLen === len
|
||
? 0
|
||
: 4 - (validLen % 4);
|
||
|
||
return [validLen, placeHoldersLen]
|
||
}
|
||
|
||
// base64 is 4/3 + up to two characters of the original data
|
||
function byteLength (b64) {
|
||
var lens = getLens(b64);
|
||
var validLen = lens[0];
|
||
var placeHoldersLen = lens[1];
|
||
return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen
|
||
}
|
||
|
||
function _byteLength (b64, validLen, placeHoldersLen) {
|
||
return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen
|
||
}
|
||
|
||
function toByteArray (b64) {
|
||
var tmp;
|
||
var lens = getLens(b64);
|
||
var validLen = lens[0];
|
||
var placeHoldersLen = lens[1];
|
||
|
||
var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen));
|
||
|
||
var curByte = 0;
|
||
|
||
// if there are placeholders, only get up to the last complete 4 chars
|
||
var len = placeHoldersLen > 0
|
||
? validLen - 4
|
||
: validLen;
|
||
|
||
var i;
|
||
for (i = 0; i < len; i += 4) {
|
||
tmp =
|
||
(revLookup[b64.charCodeAt(i)] << 18) |
|
||
(revLookup[b64.charCodeAt(i + 1)] << 12) |
|
||
(revLookup[b64.charCodeAt(i + 2)] << 6) |
|
||
revLookup[b64.charCodeAt(i + 3)];
|
||
arr[curByte++] = (tmp >> 16) & 0xFF;
|
||
arr[curByte++] = (tmp >> 8) & 0xFF;
|
||
arr[curByte++] = tmp & 0xFF;
|
||
}
|
||
|
||
if (placeHoldersLen === 2) {
|
||
tmp =
|
||
(revLookup[b64.charCodeAt(i)] << 2) |
|
||
(revLookup[b64.charCodeAt(i + 1)] >> 4);
|
||
arr[curByte++] = tmp & 0xFF;
|
||
}
|
||
|
||
if (placeHoldersLen === 1) {
|
||
tmp =
|
||
(revLookup[b64.charCodeAt(i)] << 10) |
|
||
(revLookup[b64.charCodeAt(i + 1)] << 4) |
|
||
(revLookup[b64.charCodeAt(i + 2)] >> 2);
|
||
arr[curByte++] = (tmp >> 8) & 0xFF;
|
||
arr[curByte++] = tmp & 0xFF;
|
||
}
|
||
|
||
return arr
|
||
}
|
||
|
||
function tripletToBase64 (num) {
|
||
return lookup[num >> 18 & 0x3F] +
|
||
lookup[num >> 12 & 0x3F] +
|
||
lookup[num >> 6 & 0x3F] +
|
||
lookup[num & 0x3F]
|
||
}
|
||
|
||
function encodeChunk (uint8, start, end) {
|
||
var tmp;
|
||
var output = [];
|
||
for (var i = start; i < end; i += 3) {
|
||
tmp =
|
||
((uint8[i] << 16) & 0xFF0000) +
|
||
((uint8[i + 1] << 8) & 0xFF00) +
|
||
(uint8[i + 2] & 0xFF);
|
||
output.push(tripletToBase64(tmp));
|
||
}
|
||
return output.join('')
|
||
}
|
||
|
||
function fromByteArray (uint8) {
|
||
var tmp;
|
||
var len = uint8.length;
|
||
var extraBytes = len % 3; // if we have 1 byte left, pad 2 bytes
|
||
var parts = [];
|
||
var maxChunkLength = 16383; // must be multiple of 3
|
||
|
||
// go through the array every three bytes, we'll deal with trailing stuff later
|
||
for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
||
parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)));
|
||
}
|
||
|
||
// pad the end with zeros, but make sure to not forget the extra bytes
|
||
if (extraBytes === 1) {
|
||
tmp = uint8[len - 1];
|
||
parts.push(
|
||
lookup[tmp >> 2] +
|
||
lookup[(tmp << 4) & 0x3F] +
|
||
'=='
|
||
);
|
||
} else if (extraBytes === 2) {
|
||
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
||
parts.push(
|
||
lookup[tmp >> 10] +
|
||
lookup[(tmp >> 4) & 0x3F] +
|
||
lookup[(tmp << 2) & 0x3F] +
|
||
'='
|
||
);
|
||
}
|
||
|
||
return parts.join('')
|
||
}
|
||
|
||
var base64Js = {
|
||
byteLength: byteLength_1,
|
||
toByteArray: toByteArray_1,
|
||
fromByteArray: fromByteArray_1
|
||
};
|
||
|
||
/*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh <https://feross.org/opensource> */
|
||
var read = function (buffer, offset, isLE, mLen, nBytes) {
|
||
var e, m;
|
||
var eLen = (nBytes * 8) - mLen - 1;
|
||
var eMax = (1 << eLen) - 1;
|
||
var eBias = eMax >> 1;
|
||
var nBits = -7;
|
||
var i = isLE ? (nBytes - 1) : 0;
|
||
var d = isLE ? -1 : 1;
|
||
var s = buffer[offset + i];
|
||
|
||
i += d;
|
||
|
||
e = s & ((1 << (-nBits)) - 1);
|
||
s >>= (-nBits);
|
||
nBits += eLen;
|
||
for (; nBits > 0; e = (e * 256) + buffer[offset + i], i += d, nBits -= 8) {}
|
||
|
||
m = e & ((1 << (-nBits)) - 1);
|
||
e >>= (-nBits);
|
||
nBits += mLen;
|
||
for (; nBits > 0; m = (m * 256) + buffer[offset + i], i += d, nBits -= 8) {}
|
||
|
||
if (e === 0) {
|
||
e = 1 - eBias;
|
||
} else if (e === eMax) {
|
||
return m ? NaN : ((s ? -1 : 1) * Infinity)
|
||
} else {
|
||
m = m + Math.pow(2, mLen);
|
||
e = e - eBias;
|
||
}
|
||
return (s ? -1 : 1) * m * Math.pow(2, e - mLen)
|
||
};
|
||
|
||
var write$1 = function (buffer, value, offset, isLE, mLen, nBytes) {
|
||
var e, m, c;
|
||
var eLen = (nBytes * 8) - mLen - 1;
|
||
var eMax = (1 << eLen) - 1;
|
||
var eBias = eMax >> 1;
|
||
var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0);
|
||
var i = isLE ? 0 : (nBytes - 1);
|
||
var d = isLE ? 1 : -1;
|
||
var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0;
|
||
|
||
value = Math.abs(value);
|
||
|
||
if (isNaN(value) || value === Infinity) {
|
||
m = isNaN(value) ? 1 : 0;
|
||
e = eMax;
|
||
} else {
|
||
e = Math.floor(Math.log(value) / Math.LN2);
|
||
if (value * (c = Math.pow(2, -e)) < 1) {
|
||
e--;
|
||
c *= 2;
|
||
}
|
||
if (e + eBias >= 1) {
|
||
value += rt / c;
|
||
} else {
|
||
value += rt * Math.pow(2, 1 - eBias);
|
||
}
|
||
if (value * c >= 2) {
|
||
e++;
|
||
c /= 2;
|
||
}
|
||
|
||
if (e + eBias >= eMax) {
|
||
m = 0;
|
||
e = eMax;
|
||
} else if (e + eBias >= 1) {
|
||
m = ((value * c) - 1) * Math.pow(2, mLen);
|
||
e = e + eBias;
|
||
} else {
|
||
m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen);
|
||
e = 0;
|
||
}
|
||
}
|
||
|
||
for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {}
|
||
|
||
e = (e << mLen) | m;
|
||
eLen += mLen;
|
||
for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {}
|
||
|
||
buffer[offset + i - d] |= s * 128;
|
||
};
|
||
|
||
var ieee754 = {
|
||
read: read,
|
||
write: write$1
|
||
};
|
||
|
||
var buffer = createCommonjsModule(function (module, exports) {
|
||
|
||
|
||
|
||
var customInspectSymbol =
|
||
(typeof Symbol === 'function' && typeof Symbol['for'] === 'function') // eslint-disable-line dot-notation
|
||
? Symbol['for']('nodejs.util.inspect.custom') // eslint-disable-line dot-notation
|
||
: null;
|
||
|
||
exports.Buffer = Buffer;
|
||
exports.SlowBuffer = SlowBuffer;
|
||
exports.INSPECT_MAX_BYTES = 50;
|
||
|
||
var K_MAX_LENGTH = 0x7fffffff;
|
||
exports.kMaxLength = K_MAX_LENGTH;
|
||
|
||
/**
|
||
* If `Buffer.TYPED_ARRAY_SUPPORT`:
|
||
* === true Use Uint8Array implementation (fastest)
|
||
* === false Print warning and recommend using `buffer` v4.x which has an Object
|
||
* implementation (most compatible, even IE6)
|
||
*
|
||
* Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+,
|
||
* Opera 11.6+, iOS 4.2+.
|
||
*
|
||
* We report that the browser does not support typed arrays if the are not subclassable
|
||
* using __proto__. Firefox 4-29 lacks support for adding new properties to `Uint8Array`
|
||
* (See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438). IE 10 lacks support
|
||
* for __proto__ and has a buggy typed array implementation.
|
||
*/
|
||
Buffer.TYPED_ARRAY_SUPPORT = typedArraySupport();
|
||
|
||
if (!Buffer.TYPED_ARRAY_SUPPORT && typeof console !== 'undefined' &&
|
||
typeof console.error === 'function') {
|
||
console.error(
|
||
'This browser lacks typed array (Uint8Array) support which is required by ' +
|
||
'`buffer` v5.x. Use `buffer` v4.x if you require old browser support.'
|
||
);
|
||
}
|
||
|
||
function typedArraySupport () {
|
||
// Can typed array instances can be augmented?
|
||
try {
|
||
var arr = new Uint8Array(1);
|
||
var proto = { foo: function () { return 42 } };
|
||
Object.setPrototypeOf(proto, Uint8Array.prototype);
|
||
Object.setPrototypeOf(arr, proto);
|
||
return arr.foo() === 42
|
||
} catch (e) {
|
||
return false
|
||
}
|
||
}
|
||
|
||
Object.defineProperty(Buffer.prototype, 'parent', {
|
||
enumerable: true,
|
||
get: function () {
|
||
if (!Buffer.isBuffer(this)) return undefined
|
||
return this.buffer
|
||
}
|
||
});
|
||
|
||
Object.defineProperty(Buffer.prototype, 'offset', {
|
||
enumerable: true,
|
||
get: function () {
|
||
if (!Buffer.isBuffer(this)) return undefined
|
||
return this.byteOffset
|
||
}
|
||
});
|
||
|
||
function createBuffer (length) {
|
||
if (length > K_MAX_LENGTH) {
|
||
throw new RangeError('The value "' + length + '" is invalid for option "size"')
|
||
}
|
||
// Return an augmented `Uint8Array` instance
|
||
var buf = new Uint8Array(length);
|
||
Object.setPrototypeOf(buf, Buffer.prototype);
|
||
return buf
|
||
}
|
||
|
||
/**
|
||
* The Buffer constructor returns instances of `Uint8Array` that have their
|
||
* prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of
|
||
* `Uint8Array`, so the returned instances will have all the node `Buffer` methods
|
||
* and the `Uint8Array` methods. Square bracket notation works as expected -- it
|
||
* returns a single octet.
|
||
*
|
||
* The `Uint8Array` prototype remains unmodified.
|
||
*/
|
||
|
||
function Buffer (arg, encodingOrOffset, length) {
|
||
// Common case.
|
||
if (typeof arg === 'number') {
|
||
if (typeof encodingOrOffset === 'string') {
|
||
throw new TypeError(
|
||
'The "string" argument must be of type string. Received type number'
|
||
)
|
||
}
|
||
return allocUnsafe(arg)
|
||
}
|
||
return from(arg, encodingOrOffset, length)
|
||
}
|
||
|
||
Buffer.poolSize = 8192; // not used by this implementation
|
||
|
||
function from (value, encodingOrOffset, length) {
|
||
if (typeof value === 'string') {
|
||
return fromString(value, encodingOrOffset)
|
||
}
|
||
|
||
if (ArrayBuffer.isView(value)) {
|
||
return fromArrayView(value)
|
||
}
|
||
|
||
if (value == null) {
|
||
throw new TypeError(
|
||
'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +
|
||
'or Array-like Object. Received type ' + (typeof value)
|
||
)
|
||
}
|
||
|
||
if (isInstance(value, ArrayBuffer) ||
|
||
(value && isInstance(value.buffer, ArrayBuffer))) {
|
||
return fromArrayBuffer(value, encodingOrOffset, length)
|
||
}
|
||
|
||
if (typeof SharedArrayBuffer !== 'undefined' &&
|
||
(isInstance(value, SharedArrayBuffer) ||
|
||
(value && isInstance(value.buffer, SharedArrayBuffer)))) {
|
||
return fromArrayBuffer(value, encodingOrOffset, length)
|
||
}
|
||
|
||
if (typeof value === 'number') {
|
||
throw new TypeError(
|
||
'The "value" argument must not be of type number. Received type number'
|
||
)
|
||
}
|
||
|
||
var valueOf = value.valueOf && value.valueOf();
|
||
if (valueOf != null && valueOf !== value) {
|
||
return Buffer.from(valueOf, encodingOrOffset, length)
|
||
}
|
||
|
||
var b = fromObject(value);
|
||
if (b) return b
|
||
|
||
if (typeof Symbol !== 'undefined' && Symbol.toPrimitive != null &&
|
||
typeof value[Symbol.toPrimitive] === 'function') {
|
||
return Buffer.from(
|
||
value[Symbol.toPrimitive]('string'), encodingOrOffset, length
|
||
)
|
||
}
|
||
|
||
throw new TypeError(
|
||
'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +
|
||
'or Array-like Object. Received type ' + (typeof value)
|
||
)
|
||
}
|
||
|
||
/**
|
||
* Functionally equivalent to Buffer(arg, encoding) but throws a TypeError
|
||
* if value is a number.
|
||
* Buffer.from(str[, encoding])
|
||
* Buffer.from(array)
|
||
* Buffer.from(buffer)
|
||
* Buffer.from(arrayBuffer[, byteOffset[, length]])
|
||
**/
|
||
Buffer.from = function (value, encodingOrOffset, length) {
|
||
return from(value, encodingOrOffset, length)
|
||
};
|
||
|
||
// Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug:
|
||
// https://github.com/feross/buffer/pull/148
|
||
Object.setPrototypeOf(Buffer.prototype, Uint8Array.prototype);
|
||
Object.setPrototypeOf(Buffer, Uint8Array);
|
||
|
||
function assertSize (size) {
|
||
if (typeof size !== 'number') {
|
||
throw new TypeError('"size" argument must be of type number')
|
||
} else if (size < 0) {
|
||
throw new RangeError('The value "' + size + '" is invalid for option "size"')
|
||
}
|
||
}
|
||
|
||
function alloc (size, fill, encoding) {
|
||
assertSize(size);
|
||
if (size <= 0) {
|
||
return createBuffer(size)
|
||
}
|
||
if (fill !== undefined) {
|
||
// Only pay attention to encoding if it's a string. This
|
||
// prevents accidentally sending in a number that would
|
||
// be interpreted as a start offset.
|
||
return typeof encoding === 'string'
|
||
? createBuffer(size).fill(fill, encoding)
|
||
: createBuffer(size).fill(fill)
|
||
}
|
||
return createBuffer(size)
|
||
}
|
||
|
||
/**
|
||
* Creates a new filled Buffer instance.
|
||
* alloc(size[, fill[, encoding]])
|
||
**/
|
||
Buffer.alloc = function (size, fill, encoding) {
|
||
return alloc(size, fill, encoding)
|
||
};
|
||
|
||
function allocUnsafe (size) {
|
||
assertSize(size);
|
||
return createBuffer(size < 0 ? 0 : checked(size) | 0)
|
||
}
|
||
|
||
/**
|
||
* Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance.
|
||
* */
|
||
Buffer.allocUnsafe = function (size) {
|
||
return allocUnsafe(size)
|
||
};
|
||
/**
|
||
* Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
||
*/
|
||
Buffer.allocUnsafeSlow = function (size) {
|
||
return allocUnsafe(size)
|
||
};
|
||
|
||
function fromString (string, encoding) {
|
||
if (typeof encoding !== 'string' || encoding === '') {
|
||
encoding = 'utf8';
|
||
}
|
||
|
||
if (!Buffer.isEncoding(encoding)) {
|
||
throw new TypeError('Unknown encoding: ' + encoding)
|
||
}
|
||
|
||
var length = byteLength(string, encoding) | 0;
|
||
var buf = createBuffer(length);
|
||
|
||
var actual = buf.write(string, encoding);
|
||
|
||
if (actual !== length) {
|
||
// Writing a hex string, for example, that contains invalid characters will
|
||
// cause everything after the first invalid character to be ignored. (e.g.
|
||
// 'abxxcd' will be treated as 'ab')
|
||
buf = buf.slice(0, actual);
|
||
}
|
||
|
||
return buf
|
||
}
|
||
|
||
function fromArrayLike (array) {
|
||
var length = array.length < 0 ? 0 : checked(array.length) | 0;
|
||
var buf = createBuffer(length);
|
||
for (var i = 0; i < length; i += 1) {
|
||
buf[i] = array[i] & 255;
|
||
}
|
||
return buf
|
||
}
|
||
|
||
function fromArrayView (arrayView) {
|
||
if (isInstance(arrayView, Uint8Array)) {
|
||
var copy = new Uint8Array(arrayView);
|
||
return fromArrayBuffer(copy.buffer, copy.byteOffset, copy.byteLength)
|
||
}
|
||
return fromArrayLike(arrayView)
|
||
}
|
||
|
||
function fromArrayBuffer (array, byteOffset, length) {
|
||
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
||
throw new RangeError('"offset" is outside of buffer bounds')
|
||
}
|
||
|
||
if (array.byteLength < byteOffset + (length || 0)) {
|
||
throw new RangeError('"length" is outside of buffer bounds')
|
||
}
|
||
|
||
var buf;
|
||
if (byteOffset === undefined && length === undefined) {
|
||
buf = new Uint8Array(array);
|
||
} else if (length === undefined) {
|
||
buf = new Uint8Array(array, byteOffset);
|
||
} else {
|
||
buf = new Uint8Array(array, byteOffset, length);
|
||
}
|
||
|
||
// Return an augmented `Uint8Array` instance
|
||
Object.setPrototypeOf(buf, Buffer.prototype);
|
||
|
||
return buf
|
||
}
|
||
|
||
function fromObject (obj) {
|
||
if (Buffer.isBuffer(obj)) {
|
||
var len = checked(obj.length) | 0;
|
||
var buf = createBuffer(len);
|
||
|
||
if (buf.length === 0) {
|
||
return buf
|
||
}
|
||
|
||
obj.copy(buf, 0, 0, len);
|
||
return buf
|
||
}
|
||
|
||
if (obj.length !== undefined) {
|
||
if (typeof obj.length !== 'number' || numberIsNaN(obj.length)) {
|
||
return createBuffer(0)
|
||
}
|
||
return fromArrayLike(obj)
|
||
}
|
||
|
||
if (obj.type === 'Buffer' && Array.isArray(obj.data)) {
|
||
return fromArrayLike(obj.data)
|
||
}
|
||
}
|
||
|
||
function checked (length) {
|
||
// Note: cannot use `length < K_MAX_LENGTH` here because that fails when
|
||
// length is NaN (which is otherwise coerced to zero.)
|
||
if (length >= K_MAX_LENGTH) {
|
||
throw new RangeError('Attempt to allocate Buffer larger than maximum ' +
|
||
'size: 0x' + K_MAX_LENGTH.toString(16) + ' bytes')
|
||
}
|
||
return length | 0
|
||
}
|
||
|
||
function SlowBuffer (length) {
|
||
if (+length != length) { // eslint-disable-line eqeqeq
|
||
length = 0;
|
||
}
|
||
return Buffer.alloc(+length)
|
||
}
|
||
|
||
Buffer.isBuffer = function isBuffer (b) {
|
||
return b != null && b._isBuffer === true &&
|
||
b !== Buffer.prototype // so Buffer.isBuffer(Buffer.prototype) will be false
|
||
};
|
||
|
||
Buffer.compare = function compare (a, b) {
|
||
if (isInstance(a, Uint8Array)) a = Buffer.from(a, a.offset, a.byteLength);
|
||
if (isInstance(b, Uint8Array)) b = Buffer.from(b, b.offset, b.byteLength);
|
||
if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {
|
||
throw new TypeError(
|
||
'The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array'
|
||
)
|
||
}
|
||
|
||
if (a === b) return 0
|
||
|
||
var x = a.length;
|
||
var y = b.length;
|
||
|
||
for (var i = 0, len = Math.min(x, y); i < len; ++i) {
|
||
if (a[i] !== b[i]) {
|
||
x = a[i];
|
||
y = b[i];
|
||
break
|
||
}
|
||
}
|
||
|
||
if (x < y) return -1
|
||
if (y < x) return 1
|
||
return 0
|
||
};
|
||
|
||
Buffer.isEncoding = function isEncoding (encoding) {
|
||
switch (String(encoding).toLowerCase()) {
|
||
case 'hex':
|
||
case 'utf8':
|
||
case 'utf-8':
|
||
case 'ascii':
|
||
case 'latin1':
|
||
case 'binary':
|
||
case 'base64':
|
||
case 'ucs2':
|
||
case 'ucs-2':
|
||
case 'utf16le':
|
||
case 'utf-16le':
|
||
return true
|
||
default:
|
||
return false
|
||
}
|
||
};
|
||
|
||
Buffer.concat = function concat (list, length) {
|
||
if (!Array.isArray(list)) {
|
||
throw new TypeError('"list" argument must be an Array of Buffers')
|
||
}
|
||
|
||
if (list.length === 0) {
|
||
return Buffer.alloc(0)
|
||
}
|
||
|
||
var i;
|
||
if (length === undefined) {
|
||
length = 0;
|
||
for (i = 0; i < list.length; ++i) {
|
||
length += list[i].length;
|
||
}
|
||
}
|
||
|
||
var buffer = Buffer.allocUnsafe(length);
|
||
var pos = 0;
|
||
for (i = 0; i < list.length; ++i) {
|
||
var buf = list[i];
|
||
if (isInstance(buf, Uint8Array)) {
|
||
if (pos + buf.length > buffer.length) {
|
||
Buffer.from(buf).copy(buffer, pos);
|
||
} else {
|
||
Uint8Array.prototype.set.call(
|
||
buffer,
|
||
buf,
|
||
pos
|
||
);
|
||
}
|
||
} else if (!Buffer.isBuffer(buf)) {
|
||
throw new TypeError('"list" argument must be an Array of Buffers')
|
||
} else {
|
||
buf.copy(buffer, pos);
|
||
}
|
||
pos += buf.length;
|
||
}
|
||
return buffer
|
||
};
|
||
|
||
function byteLength (string, encoding) {
|
||
if (Buffer.isBuffer(string)) {
|
||
return string.length
|
||
}
|
||
if (ArrayBuffer.isView(string) || isInstance(string, ArrayBuffer)) {
|
||
return string.byteLength
|
||
}
|
||
if (typeof string !== 'string') {
|
||
throw new TypeError(
|
||
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. ' +
|
||
'Received type ' + typeof string
|
||
)
|
||
}
|
||
|
||
var len = string.length;
|
||
var mustMatch = (arguments.length > 2 && arguments[2] === true);
|
||
if (!mustMatch && len === 0) return 0
|
||
|
||
// Use a for loop to avoid recursion
|
||
var loweredCase = false;
|
||
for (;;) {
|
||
switch (encoding) {
|
||
case 'ascii':
|
||
case 'latin1':
|
||
case 'binary':
|
||
return len
|
||
case 'utf8':
|
||
case 'utf-8':
|
||
return utf8ToBytes(string).length
|
||
case 'ucs2':
|
||
case 'ucs-2':
|
||
case 'utf16le':
|
||
case 'utf-16le':
|
||
return len * 2
|
||
case 'hex':
|
||
return len >>> 1
|
||
case 'base64':
|
||
return base64ToBytes(string).length
|
||
default:
|
||
if (loweredCase) {
|
||
return mustMatch ? -1 : utf8ToBytes(string).length // assume utf8
|
||
}
|
||
encoding = ('' + encoding).toLowerCase();
|
||
loweredCase = true;
|
||
}
|
||
}
|
||
}
|
||
Buffer.byteLength = byteLength;
|
||
|
||
function slowToString (encoding, start, end) {
|
||
var loweredCase = false;
|
||
|
||
// No need to verify that "this.length <= MAX_UINT32" since it's a read-only
|
||
// property of a typed array.
|
||
|
||
// This behaves neither like String nor Uint8Array in that we set start/end
|
||
// to their upper/lower bounds if the value passed is out of range.
|
||
// undefined is handled specially as per ECMA-262 6th Edition,
|
||
// Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization.
|
||
if (start === undefined || start < 0) {
|
||
start = 0;
|
||
}
|
||
// Return early if start > this.length. Done here to prevent potential uint32
|
||
// coercion fail below.
|
||
if (start > this.length) {
|
||
return ''
|
||
}
|
||
|
||
if (end === undefined || end > this.length) {
|
||
end = this.length;
|
||
}
|
||
|
||
if (end <= 0) {
|
||
return ''
|
||
}
|
||
|
||
// Force coercion to uint32. This will also coerce falsey/NaN values to 0.
|
||
end >>>= 0;
|
||
start >>>= 0;
|
||
|
||
if (end <= start) {
|
||
return ''
|
||
}
|
||
|
||
if (!encoding) encoding = 'utf8';
|
||
|
||
while (true) {
|
||
switch (encoding) {
|
||
case 'hex':
|
||
return hexSlice(this, start, end)
|
||
|
||
case 'utf8':
|
||
case 'utf-8':
|
||
return utf8Slice(this, start, end)
|
||
|
||
case 'ascii':
|
||
return asciiSlice(this, start, end)
|
||
|
||
case 'latin1':
|
||
case 'binary':
|
||
return latin1Slice(this, start, end)
|
||
|
||
case 'base64':
|
||
return base64Slice(this, start, end)
|
||
|
||
case 'ucs2':
|
||
case 'ucs-2':
|
||
case 'utf16le':
|
||
case 'utf-16le':
|
||
return utf16leSlice(this, start, end)
|
||
|
||
default:
|
||
if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
|
||
encoding = (encoding + '').toLowerCase();
|
||
loweredCase = true;
|
||
}
|
||
}
|
||
}
|
||
|
||
// This property is used by `Buffer.isBuffer` (and the `is-buffer` npm package)
|
||
// to detect a Buffer instance. It's not possible to use `instanceof Buffer`
|
||
// reliably in a browserify context because there could be multiple different
|
||
// copies of the 'buffer' package in use. This method works even for Buffer
|
||
// instances that were created from another copy of the `buffer` package.
|
||
// See: https://github.com/feross/buffer/issues/154
|
||
Buffer.prototype._isBuffer = true;
|
||
|
||
function swap (b, n, m) {
|
||
var i = b[n];
|
||
b[n] = b[m];
|
||
b[m] = i;
|
||
}
|
||
|
||
Buffer.prototype.swap16 = function swap16 () {
|
||
var len = this.length;
|
||
if (len % 2 !== 0) {
|
||
throw new RangeError('Buffer size must be a multiple of 16-bits')
|
||
}
|
||
for (var i = 0; i < len; i += 2) {
|
||
swap(this, i, i + 1);
|
||
}
|
||
return this
|
||
};
|
||
|
||
Buffer.prototype.swap32 = function swap32 () {
|
||
var len = this.length;
|
||
if (len % 4 !== 0) {
|
||
throw new RangeError('Buffer size must be a multiple of 32-bits')
|
||
}
|
||
for (var i = 0; i < len; i += 4) {
|
||
swap(this, i, i + 3);
|
||
swap(this, i + 1, i + 2);
|
||
}
|
||
return this
|
||
};
|
||
|
||
Buffer.prototype.swap64 = function swap64 () {
|
||
var len = this.length;
|
||
if (len % 8 !== 0) {
|
||
throw new RangeError('Buffer size must be a multiple of 64-bits')
|
||
}
|
||
for (var i = 0; i < len; i += 8) {
|
||
swap(this, i, i + 7);
|
||
swap(this, i + 1, i + 6);
|
||
swap(this, i + 2, i + 5);
|
||
swap(this, i + 3, i + 4);
|
||
}
|
||
return this
|
||
};
|
||
|
||
Buffer.prototype.toString = function toString () {
|
||
var length = this.length;
|
||
if (length === 0) return ''
|
||
if (arguments.length === 0) return utf8Slice(this, 0, length)
|
||
return slowToString.apply(this, arguments)
|
||
};
|
||
|
||
Buffer.prototype.toLocaleString = Buffer.prototype.toString;
|
||
|
||
Buffer.prototype.equals = function equals (b) {
|
||
if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer')
|
||
if (this === b) return true
|
||
return Buffer.compare(this, b) === 0
|
||
};
|
||
|
||
Buffer.prototype.inspect = function inspect () {
|
||
var str = '';
|
||
var max = exports.INSPECT_MAX_BYTES;
|
||
str = this.toString('hex', 0, max).replace(/(.{2})/g, '$1 ').trim();
|
||
if (this.length > max) str += ' ... ';
|
||
return '<Buffer ' + str + '>'
|
||
};
|
||
if (customInspectSymbol) {
|
||
Buffer.prototype[customInspectSymbol] = Buffer.prototype.inspect;
|
||
}
|
||
|
||
Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) {
|
||
if (isInstance(target, Uint8Array)) {
|
||
target = Buffer.from(target, target.offset, target.byteLength);
|
||
}
|
||
if (!Buffer.isBuffer(target)) {
|
||
throw new TypeError(
|
||
'The "target" argument must be one of type Buffer or Uint8Array. ' +
|
||
'Received type ' + (typeof target)
|
||
)
|
||
}
|
||
|
||
if (start === undefined) {
|
||
start = 0;
|
||
}
|
||
if (end === undefined) {
|
||
end = target ? target.length : 0;
|
||
}
|
||
if (thisStart === undefined) {
|
||
thisStart = 0;
|
||
}
|
||
if (thisEnd === undefined) {
|
||
thisEnd = this.length;
|
||
}
|
||
|
||
if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) {
|
||
throw new RangeError('out of range index')
|
||
}
|
||
|
||
if (thisStart >= thisEnd && start >= end) {
|
||
return 0
|
||
}
|
||
if (thisStart >= thisEnd) {
|
||
return -1
|
||
}
|
||
if (start >= end) {
|
||
return 1
|
||
}
|
||
|
||
start >>>= 0;
|
||
end >>>= 0;
|
||
thisStart >>>= 0;
|
||
thisEnd >>>= 0;
|
||
|
||
if (this === target) return 0
|
||
|
||
var x = thisEnd - thisStart;
|
||
var y = end - start;
|
||
var len = Math.min(x, y);
|
||
|
||
var thisCopy = this.slice(thisStart, thisEnd);
|
||
var targetCopy = target.slice(start, end);
|
||
|
||
for (var i = 0; i < len; ++i) {
|
||
if (thisCopy[i] !== targetCopy[i]) {
|
||
x = thisCopy[i];
|
||
y = targetCopy[i];
|
||
break
|
||
}
|
||
}
|
||
|
||
if (x < y) return -1
|
||
if (y < x) return 1
|
||
return 0
|
||
};
|
||
|
||
// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`,
|
||
// OR the last index of `val` in `buffer` at offset <= `byteOffset`.
|
||
//
|
||
// Arguments:
|
||
// - buffer - a Buffer to search
|
||
// - val - a string, Buffer, or number
|
||
// - byteOffset - an index into `buffer`; will be clamped to an int32
|
||
// - encoding - an optional encoding, relevant is val is a string
|
||
// - dir - true for indexOf, false for lastIndexOf
|
||
function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) {
|
||
// Empty buffer means no match
|
||
if (buffer.length === 0) return -1
|
||
|
||
// Normalize byteOffset
|
||
if (typeof byteOffset === 'string') {
|
||
encoding = byteOffset;
|
||
byteOffset = 0;
|
||
} else if (byteOffset > 0x7fffffff) {
|
||
byteOffset = 0x7fffffff;
|
||
} else if (byteOffset < -0x80000000) {
|
||
byteOffset = -0x80000000;
|
||
}
|
||
byteOffset = +byteOffset; // Coerce to Number.
|
||
if (numberIsNaN(byteOffset)) {
|
||
// byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer
|
||
byteOffset = dir ? 0 : (buffer.length - 1);
|
||
}
|
||
|
||
// Normalize byteOffset: negative offsets start from the end of the buffer
|
||
if (byteOffset < 0) byteOffset = buffer.length + byteOffset;
|
||
if (byteOffset >= buffer.length) {
|
||
if (dir) return -1
|
||
else byteOffset = buffer.length - 1;
|
||
} else if (byteOffset < 0) {
|
||
if (dir) byteOffset = 0;
|
||
else return -1
|
||
}
|
||
|
||
// Normalize val
|
||
if (typeof val === 'string') {
|
||
val = Buffer.from(val, encoding);
|
||
}
|
||
|
||
// Finally, search either indexOf (if dir is true) or lastIndexOf
|
||
if (Buffer.isBuffer(val)) {
|
||
// Special case: looking for empty string/buffer always fails
|
||
if (val.length === 0) {
|
||
return -1
|
||
}
|
||
return arrayIndexOf(buffer, val, byteOffset, encoding, dir)
|
||
} else if (typeof val === 'number') {
|
||
val = val & 0xFF; // Search for a byte value [0-255]
|
||
if (typeof Uint8Array.prototype.indexOf === 'function') {
|
||
if (dir) {
|
||
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset)
|
||
} else {
|
||
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset)
|
||
}
|
||
}
|
||
return arrayIndexOf(buffer, [val], byteOffset, encoding, dir)
|
||
}
|
||
|
||
throw new TypeError('val must be string, number or Buffer')
|
||
}
|
||
|
||
function arrayIndexOf (arr, val, byteOffset, encoding, dir) {
|
||
var indexSize = 1;
|
||
var arrLength = arr.length;
|
||
var valLength = val.length;
|
||
|
||
if (encoding !== undefined) {
|
||
encoding = String(encoding).toLowerCase();
|
||
if (encoding === 'ucs2' || encoding === 'ucs-2' ||
|
||
encoding === 'utf16le' || encoding === 'utf-16le') {
|
||
if (arr.length < 2 || val.length < 2) {
|
||
return -1
|
||
}
|
||
indexSize = 2;
|
||
arrLength /= 2;
|
||
valLength /= 2;
|
||
byteOffset /= 2;
|
||
}
|
||
}
|
||
|
||
function read (buf, i) {
|
||
if (indexSize === 1) {
|
||
return buf[i]
|
||
} else {
|
||
return buf.readUInt16BE(i * indexSize)
|
||
}
|
||
}
|
||
|
||
var i;
|
||
if (dir) {
|
||
var foundIndex = -1;
|
||
for (i = byteOffset; i < arrLength; i++) {
|
||
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
||
if (foundIndex === -1) foundIndex = i;
|
||
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize
|
||
} else {
|
||
if (foundIndex !== -1) i -= i - foundIndex;
|
||
foundIndex = -1;
|
||
}
|
||
}
|
||
} else {
|
||
if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength;
|
||
for (i = byteOffset; i >= 0; i--) {
|
||
var found = true;
|
||
for (var j = 0; j < valLength; j++) {
|
||
if (read(arr, i + j) !== read(val, j)) {
|
||
found = false;
|
||
break
|
||
}
|
||
}
|
||
if (found) return i
|
||
}
|
||
}
|
||
|
||
return -1
|
||
}
|
||
|
||
Buffer.prototype.includes = function includes (val, byteOffset, encoding) {
|
||
return this.indexOf(val, byteOffset, encoding) !== -1
|
||
};
|
||
|
||
Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) {
|
||
return bidirectionalIndexOf(this, val, byteOffset, encoding, true)
|
||
};
|
||
|
||
Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) {
|
||
return bidirectionalIndexOf(this, val, byteOffset, encoding, false)
|
||
};
|
||
|
||
function hexWrite (buf, string, offset, length) {
|
||
offset = Number(offset) || 0;
|
||
var remaining = buf.length - offset;
|
||
if (!length) {
|
||
length = remaining;
|
||
} else {
|
||
length = Number(length);
|
||
if (length > remaining) {
|
||
length = remaining;
|
||
}
|
||
}
|
||
|
||
var strLen = string.length;
|
||
|
||
if (length > strLen / 2) {
|
||
length = strLen / 2;
|
||
}
|
||
for (var i = 0; i < length; ++i) {
|
||
var parsed = parseInt(string.substr(i * 2, 2), 16);
|
||
if (numberIsNaN(parsed)) return i
|
||
buf[offset + i] = parsed;
|
||
}
|
||
return i
|
||
}
|
||
|
||
function utf8Write (buf, string, offset, length) {
|
||
return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length)
|
||
}
|
||
|
||
function asciiWrite (buf, string, offset, length) {
|
||
return blitBuffer(asciiToBytes(string), buf, offset, length)
|
||
}
|
||
|
||
function base64Write (buf, string, offset, length) {
|
||
return blitBuffer(base64ToBytes(string), buf, offset, length)
|
||
}
|
||
|
||
function ucs2Write (buf, string, offset, length) {
|
||
return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length)
|
||
}
|
||
|
||
Buffer.prototype.write = function write (string, offset, length, encoding) {
|
||
// Buffer#write(string)
|
||
if (offset === undefined) {
|
||
encoding = 'utf8';
|
||
length = this.length;
|
||
offset = 0;
|
||
// Buffer#write(string, encoding)
|
||
} else if (length === undefined && typeof offset === 'string') {
|
||
encoding = offset;
|
||
length = this.length;
|
||
offset = 0;
|
||
// Buffer#write(string, offset[, length][, encoding])
|
||
} else if (isFinite(offset)) {
|
||
offset = offset >>> 0;
|
||
if (isFinite(length)) {
|
||
length = length >>> 0;
|
||
if (encoding === undefined) encoding = 'utf8';
|
||
} else {
|
||
encoding = length;
|
||
length = undefined;
|
||
}
|
||
} else {
|
||
throw new Error(
|
||
'Buffer.write(string, encoding, offset[, length]) is no longer supported'
|
||
)
|
||
}
|
||
|
||
var remaining = this.length - offset;
|
||
if (length === undefined || length > remaining) length = remaining;
|
||
|
||
if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) {
|
||
throw new RangeError('Attempt to write outside buffer bounds')
|
||
}
|
||
|
||
if (!encoding) encoding = 'utf8';
|
||
|
||
var loweredCase = false;
|
||
for (;;) {
|
||
switch (encoding) {
|
||
case 'hex':
|
||
return hexWrite(this, string, offset, length)
|
||
|
||
case 'utf8':
|
||
case 'utf-8':
|
||
return utf8Write(this, string, offset, length)
|
||
|
||
case 'ascii':
|
||
case 'latin1':
|
||
case 'binary':
|
||
return asciiWrite(this, string, offset, length)
|
||
|
||
case 'base64':
|
||
// Warning: maxLength not taken into account in base64Write
|
||
return base64Write(this, string, offset, length)
|
||
|
||
case 'ucs2':
|
||
case 'ucs-2':
|
||
case 'utf16le':
|
||
case 'utf-16le':
|
||
return ucs2Write(this, string, offset, length)
|
||
|
||
default:
|
||
if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
|
||
encoding = ('' + encoding).toLowerCase();
|
||
loweredCase = true;
|
||
}
|
||
}
|
||
};
|
||
|
||
Buffer.prototype.toJSON = function toJSON () {
|
||
return {
|
||
type: 'Buffer',
|
||
data: Array.prototype.slice.call(this._arr || this, 0)
|
||
}
|
||
};
|
||
|
||
function base64Slice (buf, start, end) {
|
||
if (start === 0 && end === buf.length) {
|
||
return base64Js.fromByteArray(buf)
|
||
} else {
|
||
return base64Js.fromByteArray(buf.slice(start, end))
|
||
}
|
||
}
|
||
|
||
function utf8Slice (buf, start, end) {
|
||
end = Math.min(buf.length, end);
|
||
var res = [];
|
||
|
||
var i = start;
|
||
while (i < end) {
|
||
var firstByte = buf[i];
|
||
var codePoint = null;
|
||
var bytesPerSequence = (firstByte > 0xEF)
|
||
? 4
|
||
: (firstByte > 0xDF)
|
||
? 3
|
||
: (firstByte > 0xBF)
|
||
? 2
|
||
: 1;
|
||
|
||
if (i + bytesPerSequence <= end) {
|
||
var secondByte, thirdByte, fourthByte, tempCodePoint;
|
||
|
||
switch (bytesPerSequence) {
|
||
case 1:
|
||
if (firstByte < 0x80) {
|
||
codePoint = firstByte;
|
||
}
|
||
break
|
||
case 2:
|
||
secondByte = buf[i + 1];
|
||
if ((secondByte & 0xC0) === 0x80) {
|
||
tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F);
|
||
if (tempCodePoint > 0x7F) {
|
||
codePoint = tempCodePoint;
|
||
}
|
||
}
|
||
break
|
||
case 3:
|
||
secondByte = buf[i + 1];
|
||
thirdByte = buf[i + 2];
|
||
if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) {
|
||
tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F);
|
||
if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) {
|
||
codePoint = tempCodePoint;
|
||
}
|
||
}
|
||
break
|
||
case 4:
|
||
secondByte = buf[i + 1];
|
||
thirdByte = buf[i + 2];
|
||
fourthByte = buf[i + 3];
|
||
if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) {
|
||
tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F);
|
||
if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) {
|
||
codePoint = tempCodePoint;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
if (codePoint === null) {
|
||
// we did not generate a valid codePoint so insert a
|
||
// replacement char (U+FFFD) and advance only 1 byte
|
||
codePoint = 0xFFFD;
|
||
bytesPerSequence = 1;
|
||
} else if (codePoint > 0xFFFF) {
|
||
// encode to utf16 (surrogate pair dance)
|
||
codePoint -= 0x10000;
|
||
res.push(codePoint >>> 10 & 0x3FF | 0xD800);
|
||
codePoint = 0xDC00 | codePoint & 0x3FF;
|
||
}
|
||
|
||
res.push(codePoint);
|
||
i += bytesPerSequence;
|
||
}
|
||
|
||
return decodeCodePointsArray(res)
|
||
}
|
||
|
||
// Based on http://stackoverflow.com/a/22747272/680742, the browser with
|
||
// the lowest limit is Chrome, with 0x10000 args.
|
||
// We go 1 magnitude less, for safety
|
||
var MAX_ARGUMENTS_LENGTH = 0x1000;
|
||
|
||
function decodeCodePointsArray (codePoints) {
|
||
var len = codePoints.length;
|
||
if (len <= MAX_ARGUMENTS_LENGTH) {
|
||
return String.fromCharCode.apply(String, codePoints) // avoid extra slice()
|
||
}
|
||
|
||
// Decode in chunks to avoid "call stack size exceeded".
|
||
var res = '';
|
||
var i = 0;
|
||
while (i < len) {
|
||
res += String.fromCharCode.apply(
|
||
String,
|
||
codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH)
|
||
);
|
||
}
|
||
return res
|
||
}
|
||
|
||
function asciiSlice (buf, start, end) {
|
||
var ret = '';
|
||
end = Math.min(buf.length, end);
|
||
|
||
for (var i = start; i < end; ++i) {
|
||
ret += String.fromCharCode(buf[i] & 0x7F);
|
||
}
|
||
return ret
|
||
}
|
||
|
||
function latin1Slice (buf, start, end) {
|
||
var ret = '';
|
||
end = Math.min(buf.length, end);
|
||
|
||
for (var i = start; i < end; ++i) {
|
||
ret += String.fromCharCode(buf[i]);
|
||
}
|
||
return ret
|
||
}
|
||
|
||
function hexSlice (buf, start, end) {
|
||
var len = buf.length;
|
||
|
||
if (!start || start < 0) start = 0;
|
||
if (!end || end < 0 || end > len) end = len;
|
||
|
||
var out = '';
|
||
for (var i = start; i < end; ++i) {
|
||
out += hexSliceLookupTable[buf[i]];
|
||
}
|
||
return out
|
||
}
|
||
|
||
function utf16leSlice (buf, start, end) {
|
||
var bytes = buf.slice(start, end);
|
||
var res = '';
|
||
// If bytes.length is odd, the last 8 bits must be ignored (same as node.js)
|
||
for (var i = 0; i < bytes.length - 1; i += 2) {
|
||
res += String.fromCharCode(bytes[i] + (bytes[i + 1] * 256));
|
||
}
|
||
return res
|
||
}
|
||
|
||
Buffer.prototype.slice = function slice (start, end) {
|
||
var len = this.length;
|
||
start = ~~start;
|
||
end = end === undefined ? len : ~~end;
|
||
|
||
if (start < 0) {
|
||
start += len;
|
||
if (start < 0) start = 0;
|
||
} else if (start > len) {
|
||
start = len;
|
||
}
|
||
|
||
if (end < 0) {
|
||
end += len;
|
||
if (end < 0) end = 0;
|
||
} else if (end > len) {
|
||
end = len;
|
||
}
|
||
|
||
if (end < start) end = start;
|
||
|
||
var newBuf = this.subarray(start, end);
|
||
// Return an augmented `Uint8Array` instance
|
||
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
||
|
||
return newBuf
|
||
};
|
||
|
||
/*
|
||
* Need to make sure that buffer isn't trying to write out of bounds.
|
||
*/
|
||
function checkOffset (offset, ext, length) {
|
||
if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint')
|
||
if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length')
|
||
}
|
||
|
||
Buffer.prototype.readUintLE =
|
||
Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) {
|
||
offset = offset >>> 0;
|
||
byteLength = byteLength >>> 0;
|
||
if (!noAssert) checkOffset(offset, byteLength, this.length);
|
||
|
||
var val = this[offset];
|
||
var mul = 1;
|
||
var i = 0;
|
||
while (++i < byteLength && (mul *= 0x100)) {
|
||
val += this[offset + i] * mul;
|
||
}
|
||
|
||
return val
|
||
};
|
||
|
||
Buffer.prototype.readUintBE =
|
||
Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) {
|
||
offset = offset >>> 0;
|
||
byteLength = byteLength >>> 0;
|
||
if (!noAssert) {
|
||
checkOffset(offset, byteLength, this.length);
|
||
}
|
||
|
||
var val = this[offset + --byteLength];
|
||
var mul = 1;
|
||
while (byteLength > 0 && (mul *= 0x100)) {
|
||
val += this[offset + --byteLength] * mul;
|
||
}
|
||
|
||
return val
|
||
};
|
||
|
||
Buffer.prototype.readUint8 =
|
||
Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) {
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkOffset(offset, 1, this.length);
|
||
return this[offset]
|
||
};
|
||
|
||
Buffer.prototype.readUint16LE =
|
||
Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) {
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkOffset(offset, 2, this.length);
|
||
return this[offset] | (this[offset + 1] << 8)
|
||
};
|
||
|
||
Buffer.prototype.readUint16BE =
|
||
Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) {
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkOffset(offset, 2, this.length);
|
||
return (this[offset] << 8) | this[offset + 1]
|
||
};
|
||
|
||
Buffer.prototype.readUint32LE =
|
||
Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) {
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkOffset(offset, 4, this.length);
|
||
|
||
return ((this[offset]) |
|
||
(this[offset + 1] << 8) |
|
||
(this[offset + 2] << 16)) +
|
||
(this[offset + 3] * 0x1000000)
|
||
};
|
||
|
||
Buffer.prototype.readUint32BE =
|
||
Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) {
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkOffset(offset, 4, this.length);
|
||
|
||
return (this[offset] * 0x1000000) +
|
||
((this[offset + 1] << 16) |
|
||
(this[offset + 2] << 8) |
|
||
this[offset + 3])
|
||
};
|
||
|
||
Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) {
|
||
offset = offset >>> 0;
|
||
byteLength = byteLength >>> 0;
|
||
if (!noAssert) checkOffset(offset, byteLength, this.length);
|
||
|
||
var val = this[offset];
|
||
var mul = 1;
|
||
var i = 0;
|
||
while (++i < byteLength && (mul *= 0x100)) {
|
||
val += this[offset + i] * mul;
|
||
}
|
||
mul *= 0x80;
|
||
|
||
if (val >= mul) val -= Math.pow(2, 8 * byteLength);
|
||
|
||
return val
|
||
};
|
||
|
||
Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) {
|
||
offset = offset >>> 0;
|
||
byteLength = byteLength >>> 0;
|
||
if (!noAssert) checkOffset(offset, byteLength, this.length);
|
||
|
||
var i = byteLength;
|
||
var mul = 1;
|
||
var val = this[offset + --i];
|
||
while (i > 0 && (mul *= 0x100)) {
|
||
val += this[offset + --i] * mul;
|
||
}
|
||
mul *= 0x80;
|
||
|
||
if (val >= mul) val -= Math.pow(2, 8 * byteLength);
|
||
|
||
return val
|
||
};
|
||
|
||
Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) {
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkOffset(offset, 1, this.length);
|
||
if (!(this[offset] & 0x80)) return (this[offset])
|
||
return ((0xff - this[offset] + 1) * -1)
|
||
};
|
||
|
||
Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) {
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkOffset(offset, 2, this.length);
|
||
var val = this[offset] | (this[offset + 1] << 8);
|
||
return (val & 0x8000) ? val | 0xFFFF0000 : val
|
||
};
|
||
|
||
Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) {
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkOffset(offset, 2, this.length);
|
||
var val = this[offset + 1] | (this[offset] << 8);
|
||
return (val & 0x8000) ? val | 0xFFFF0000 : val
|
||
};
|
||
|
||
Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) {
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkOffset(offset, 4, this.length);
|
||
|
||
return (this[offset]) |
|
||
(this[offset + 1] << 8) |
|
||
(this[offset + 2] << 16) |
|
||
(this[offset + 3] << 24)
|
||
};
|
||
|
||
Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) {
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkOffset(offset, 4, this.length);
|
||
|
||
return (this[offset] << 24) |
|
||
(this[offset + 1] << 16) |
|
||
(this[offset + 2] << 8) |
|
||
(this[offset + 3])
|
||
};
|
||
|
||
Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) {
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkOffset(offset, 4, this.length);
|
||
return ieee754.read(this, offset, true, 23, 4)
|
||
};
|
||
|
||
Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) {
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkOffset(offset, 4, this.length);
|
||
return ieee754.read(this, offset, false, 23, 4)
|
||
};
|
||
|
||
Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) {
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkOffset(offset, 8, this.length);
|
||
return ieee754.read(this, offset, true, 52, 8)
|
||
};
|
||
|
||
Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) {
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkOffset(offset, 8, this.length);
|
||
return ieee754.read(this, offset, false, 52, 8)
|
||
};
|
||
|
||
function checkInt (buf, value, offset, ext, max, min) {
|
||
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance')
|
||
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds')
|
||
if (offset + ext > buf.length) throw new RangeError('Index out of range')
|
||
}
|
||
|
||
Buffer.prototype.writeUintLE =
|
||
Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
byteLength = byteLength >>> 0;
|
||
if (!noAssert) {
|
||
var maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
||
checkInt(this, value, offset, byteLength, maxBytes, 0);
|
||
}
|
||
|
||
var mul = 1;
|
||
var i = 0;
|
||
this[offset] = value & 0xFF;
|
||
while (++i < byteLength && (mul *= 0x100)) {
|
||
this[offset + i] = (value / mul) & 0xFF;
|
||
}
|
||
|
||
return offset + byteLength
|
||
};
|
||
|
||
Buffer.prototype.writeUintBE =
|
||
Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
byteLength = byteLength >>> 0;
|
||
if (!noAssert) {
|
||
var maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
||
checkInt(this, value, offset, byteLength, maxBytes, 0);
|
||
}
|
||
|
||
var i = byteLength - 1;
|
||
var mul = 1;
|
||
this[offset + i] = value & 0xFF;
|
||
while (--i >= 0 && (mul *= 0x100)) {
|
||
this[offset + i] = (value / mul) & 0xFF;
|
||
}
|
||
|
||
return offset + byteLength
|
||
};
|
||
|
||
Buffer.prototype.writeUint8 =
|
||
Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0);
|
||
this[offset] = (value & 0xff);
|
||
return offset + 1
|
||
};
|
||
|
||
Buffer.prototype.writeUint16LE =
|
||
Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0);
|
||
this[offset] = (value & 0xff);
|
||
this[offset + 1] = (value >>> 8);
|
||
return offset + 2
|
||
};
|
||
|
||
Buffer.prototype.writeUint16BE =
|
||
Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0);
|
||
this[offset] = (value >>> 8);
|
||
this[offset + 1] = (value & 0xff);
|
||
return offset + 2
|
||
};
|
||
|
||
Buffer.prototype.writeUint32LE =
|
||
Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0);
|
||
this[offset + 3] = (value >>> 24);
|
||
this[offset + 2] = (value >>> 16);
|
||
this[offset + 1] = (value >>> 8);
|
||
this[offset] = (value & 0xff);
|
||
return offset + 4
|
||
};
|
||
|
||
Buffer.prototype.writeUint32BE =
|
||
Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0);
|
||
this[offset] = (value >>> 24);
|
||
this[offset + 1] = (value >>> 16);
|
||
this[offset + 2] = (value >>> 8);
|
||
this[offset + 3] = (value & 0xff);
|
||
return offset + 4
|
||
};
|
||
|
||
Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
if (!noAssert) {
|
||
var limit = Math.pow(2, (8 * byteLength) - 1);
|
||
|
||
checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
||
}
|
||
|
||
var i = 0;
|
||
var mul = 1;
|
||
var sub = 0;
|
||
this[offset] = value & 0xFF;
|
||
while (++i < byteLength && (mul *= 0x100)) {
|
||
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
||
sub = 1;
|
||
}
|
||
this[offset + i] = ((value / mul) >> 0) - sub & 0xFF;
|
||
}
|
||
|
||
return offset + byteLength
|
||
};
|
||
|
||
Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
if (!noAssert) {
|
||
var limit = Math.pow(2, (8 * byteLength) - 1);
|
||
|
||
checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
||
}
|
||
|
||
var i = byteLength - 1;
|
||
var mul = 1;
|
||
var sub = 0;
|
||
this[offset + i] = value & 0xFF;
|
||
while (--i >= 0 && (mul *= 0x100)) {
|
||
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
||
sub = 1;
|
||
}
|
||
this[offset + i] = ((value / mul) >> 0) - sub & 0xFF;
|
||
}
|
||
|
||
return offset + byteLength
|
||
};
|
||
|
||
Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80);
|
||
if (value < 0) value = 0xff + value + 1;
|
||
this[offset] = (value & 0xff);
|
||
return offset + 1
|
||
};
|
||
|
||
Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000);
|
||
this[offset] = (value & 0xff);
|
||
this[offset + 1] = (value >>> 8);
|
||
return offset + 2
|
||
};
|
||
|
||
Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000);
|
||
this[offset] = (value >>> 8);
|
||
this[offset + 1] = (value & 0xff);
|
||
return offset + 2
|
||
};
|
||
|
||
Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000);
|
||
this[offset] = (value & 0xff);
|
||
this[offset + 1] = (value >>> 8);
|
||
this[offset + 2] = (value >>> 16);
|
||
this[offset + 3] = (value >>> 24);
|
||
return offset + 4
|
||
};
|
||
|
||
Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000);
|
||
if (value < 0) value = 0xffffffff + value + 1;
|
||
this[offset] = (value >>> 24);
|
||
this[offset + 1] = (value >>> 16);
|
||
this[offset + 2] = (value >>> 8);
|
||
this[offset + 3] = (value & 0xff);
|
||
return offset + 4
|
||
};
|
||
|
||
function checkIEEE754 (buf, value, offset, ext, max, min) {
|
||
if (offset + ext > buf.length) throw new RangeError('Index out of range')
|
||
if (offset < 0) throw new RangeError('Index out of range')
|
||
}
|
||
|
||
function writeFloat (buf, value, offset, littleEndian, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
if (!noAssert) {
|
||
checkIEEE754(buf, value, offset, 4);
|
||
}
|
||
ieee754.write(buf, value, offset, littleEndian, 23, 4);
|
||
return offset + 4
|
||
}
|
||
|
||
Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) {
|
||
return writeFloat(this, value, offset, true, noAssert)
|
||
};
|
||
|
||
Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) {
|
||
return writeFloat(this, value, offset, false, noAssert)
|
||
};
|
||
|
||
function writeDouble (buf, value, offset, littleEndian, noAssert) {
|
||
value = +value;
|
||
offset = offset >>> 0;
|
||
if (!noAssert) {
|
||
checkIEEE754(buf, value, offset, 8);
|
||
}
|
||
ieee754.write(buf, value, offset, littleEndian, 52, 8);
|
||
return offset + 8
|
||
}
|
||
|
||
Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) {
|
||
return writeDouble(this, value, offset, true, noAssert)
|
||
};
|
||
|
||
Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) {
|
||
return writeDouble(this, value, offset, false, noAssert)
|
||
};
|
||
|
||
// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length)
|
||
Buffer.prototype.copy = function copy (target, targetStart, start, end) {
|
||
if (!Buffer.isBuffer(target)) throw new TypeError('argument should be a Buffer')
|
||
if (!start) start = 0;
|
||
if (!end && end !== 0) end = this.length;
|
||
if (targetStart >= target.length) targetStart = target.length;
|
||
if (!targetStart) targetStart = 0;
|
||
if (end > 0 && end < start) end = start;
|
||
|
||
// Copy 0 bytes; we're done
|
||
if (end === start) return 0
|
||
if (target.length === 0 || this.length === 0) return 0
|
||
|
||
// Fatal error conditions
|
||
if (targetStart < 0) {
|
||
throw new RangeError('targetStart out of bounds')
|
||
}
|
||
if (start < 0 || start >= this.length) throw new RangeError('Index out of range')
|
||
if (end < 0) throw new RangeError('sourceEnd out of bounds')
|
||
|
||
// Are we oob?
|
||
if (end > this.length) end = this.length;
|
||
if (target.length - targetStart < end - start) {
|
||
end = target.length - targetStart + start;
|
||
}
|
||
|
||
var len = end - start;
|
||
|
||
if (this === target && typeof Uint8Array.prototype.copyWithin === 'function') {
|
||
// Use built-in when available, missing from IE11
|
||
this.copyWithin(targetStart, start, end);
|
||
} else {
|
||
Uint8Array.prototype.set.call(
|
||
target,
|
||
this.subarray(start, end),
|
||
targetStart
|
||
);
|
||
}
|
||
|
||
return len
|
||
};
|
||
|
||
// Usage:
|
||
// buffer.fill(number[, offset[, end]])
|
||
// buffer.fill(buffer[, offset[, end]])
|
||
// buffer.fill(string[, offset[, end]][, encoding])
|
||
Buffer.prototype.fill = function fill (val, start, end, encoding) {
|
||
// Handle string cases:
|
||
if (typeof val === 'string') {
|
||
if (typeof start === 'string') {
|
||
encoding = start;
|
||
start = 0;
|
||
end = this.length;
|
||
} else if (typeof end === 'string') {
|
||
encoding = end;
|
||
end = this.length;
|
||
}
|
||
if (encoding !== undefined && typeof encoding !== 'string') {
|
||
throw new TypeError('encoding must be a string')
|
||
}
|
||
if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) {
|
||
throw new TypeError('Unknown encoding: ' + encoding)
|
||
}
|
||
if (val.length === 1) {
|
||
var code = val.charCodeAt(0);
|
||
if ((encoding === 'utf8' && code < 128) ||
|
||
encoding === 'latin1') {
|
||
// Fast path: If `val` fits into a single byte, use that numeric value.
|
||
val = code;
|
||
}
|
||
}
|
||
} else if (typeof val === 'number') {
|
||
val = val & 255;
|
||
} else if (typeof val === 'boolean') {
|
||
val = Number(val);
|
||
}
|
||
|
||
// Invalid ranges are not set to a default, so can range check early.
|
||
if (start < 0 || this.length < start || this.length < end) {
|
||
throw new RangeError('Out of range index')
|
||
}
|
||
|
||
if (end <= start) {
|
||
return this
|
||
}
|
||
|
||
start = start >>> 0;
|
||
end = end === undefined ? this.length : end >>> 0;
|
||
|
||
if (!val) val = 0;
|
||
|
||
var i;
|
||
if (typeof val === 'number') {
|
||
for (i = start; i < end; ++i) {
|
||
this[i] = val;
|
||
}
|
||
} else {
|
||
var bytes = Buffer.isBuffer(val)
|
||
? val
|
||
: Buffer.from(val, encoding);
|
||
var len = bytes.length;
|
||
if (len === 0) {
|
||
throw new TypeError('The value "' + val +
|
||
'" is invalid for argument "value"')
|
||
}
|
||
for (i = 0; i < end - start; ++i) {
|
||
this[i + start] = bytes[i % len];
|
||
}
|
||
}
|
||
|
||
return this
|
||
};
|
||
|
||
// HELPER FUNCTIONS
|
||
// ================
|
||
|
||
var INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
||
|
||
function base64clean (str) {
|
||
// Node takes equal signs as end of the Base64 encoding
|
||
str = str.split('=')[0];
|
||
// Node strips out invalid characters like \n and \t from the string, base64-js does not
|
||
str = str.trim().replace(INVALID_BASE64_RE, '');
|
||
// Node converts strings with length < 2 to ''
|
||
if (str.length < 2) return ''
|
||
// Node allows for non-padded base64 strings (missing trailing ===), base64-js does not
|
||
while (str.length % 4 !== 0) {
|
||
str = str + '=';
|
||
}
|
||
return str
|
||
}
|
||
|
||
function utf8ToBytes (string, units) {
|
||
units = units || Infinity;
|
||
var codePoint;
|
||
var length = string.length;
|
||
var leadSurrogate = null;
|
||
var bytes = [];
|
||
|
||
for (var i = 0; i < length; ++i) {
|
||
codePoint = string.charCodeAt(i);
|
||
|
||
// is surrogate component
|
||
if (codePoint > 0xD7FF && codePoint < 0xE000) {
|
||
// last char was a lead
|
||
if (!leadSurrogate) {
|
||
// no lead yet
|
||
if (codePoint > 0xDBFF) {
|
||
// unexpected trail
|
||
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD);
|
||
continue
|
||
} else if (i + 1 === length) {
|
||
// unpaired lead
|
||
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD);
|
||
continue
|
||
}
|
||
|
||
// valid lead
|
||
leadSurrogate = codePoint;
|
||
|
||
continue
|
||
}
|
||
|
||
// 2 leads in a row
|
||
if (codePoint < 0xDC00) {
|
||
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD);
|
||
leadSurrogate = codePoint;
|
||
continue
|
||
}
|
||
|
||
// valid surrogate pair
|
||
codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000;
|
||
} else if (leadSurrogate) {
|
||
// valid bmp char, but last char was a lead
|
||
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD);
|
||
}
|
||
|
||
leadSurrogate = null;
|
||
|
||
// encode utf8
|
||
if (codePoint < 0x80) {
|
||
if ((units -= 1) < 0) break
|
||
bytes.push(codePoint);
|
||
} else if (codePoint < 0x800) {
|
||
if ((units -= 2) < 0) break
|
||
bytes.push(
|
||
codePoint >> 0x6 | 0xC0,
|
||
codePoint & 0x3F | 0x80
|
||
);
|
||
} else if (codePoint < 0x10000) {
|
||
if ((units -= 3) < 0) break
|
||
bytes.push(
|
||
codePoint >> 0xC | 0xE0,
|
||
codePoint >> 0x6 & 0x3F | 0x80,
|
||
codePoint & 0x3F | 0x80
|
||
);
|
||
} else if (codePoint < 0x110000) {
|
||
if ((units -= 4) < 0) break
|
||
bytes.push(
|
||
codePoint >> 0x12 | 0xF0,
|
||
codePoint >> 0xC & 0x3F | 0x80,
|
||
codePoint >> 0x6 & 0x3F | 0x80,
|
||
codePoint & 0x3F | 0x80
|
||
);
|
||
} else {
|
||
throw new Error('Invalid code point')
|
||
}
|
||
}
|
||
|
||
return bytes
|
||
}
|
||
|
||
function asciiToBytes (str) {
|
||
var byteArray = [];
|
||
for (var i = 0; i < str.length; ++i) {
|
||
// Node's code seems to be doing this and not & 0x7F..
|
||
byteArray.push(str.charCodeAt(i) & 0xFF);
|
||
}
|
||
return byteArray
|
||
}
|
||
|
||
function utf16leToBytes (str, units) {
|
||
var c, hi, lo;
|
||
var byteArray = [];
|
||
for (var i = 0; i < str.length; ++i) {
|
||
if ((units -= 2) < 0) break
|
||
|
||
c = str.charCodeAt(i);
|
||
hi = c >> 8;
|
||
lo = c % 256;
|
||
byteArray.push(lo);
|
||
byteArray.push(hi);
|
||
}
|
||
|
||
return byteArray
|
||
}
|
||
|
||
function base64ToBytes (str) {
|
||
return base64Js.toByteArray(base64clean(str))
|
||
}
|
||
|
||
function blitBuffer (src, dst, offset, length) {
|
||
for (var i = 0; i < length; ++i) {
|
||
if ((i + offset >= dst.length) || (i >= src.length)) break
|
||
dst[i + offset] = src[i];
|
||
}
|
||
return i
|
||
}
|
||
|
||
// ArrayBuffer or Uint8Array objects from other contexts (i.e. iframes) do not pass
|
||
// the `instanceof` check but they should be treated as of that type.
|
||
// See: https://github.com/feross/buffer/issues/166
|
||
function isInstance (obj, type) {
|
||
return obj instanceof type ||
|
||
(obj != null && obj.constructor != null && obj.constructor.name != null &&
|
||
obj.constructor.name === type.name)
|
||
}
|
||
function numberIsNaN (obj) {
|
||
// For IE11 support
|
||
return obj !== obj // eslint-disable-line no-self-compare
|
||
}
|
||
|
||
// Create lookup table for `toString('hex')`
|
||
// See: https://github.com/feross/buffer/issues/219
|
||
var hexSliceLookupTable = (function () {
|
||
var alphabet = '0123456789abcdef';
|
||
var table = new Array(256);
|
||
for (var i = 0; i < 16; ++i) {
|
||
var i16 = i * 16;
|
||
for (var j = 0; j < 16; ++j) {
|
||
table[i16 + j] = alphabet[i] + alphabet[j];
|
||
}
|
||
}
|
||
return table
|
||
})();
|
||
});
|
||
|
||
/**
|
||
* Utility module to work with strings.
|
||
*
|
||
* @module string
|
||
*/
|
||
|
||
/**
|
||
* @param {string} s
|
||
* @return {string}
|
||
*/
|
||
const toLowerCase = s => s.toLowerCase();
|
||
|
||
const trimLeftRegex = /^\s*/g;
|
||
|
||
/**
|
||
* @param {string} s
|
||
* @return {string}
|
||
*/
|
||
const trimLeft = s => s.replace(trimLeftRegex, '');
|
||
|
||
const fromCamelCaseRegex = /([A-Z])/g;
|
||
|
||
/**
|
||
* @param {string} s
|
||
* @param {string} separator
|
||
* @return {string}
|
||
*/
|
||
const fromCamelCase = (s, separator) => trimLeft(s.replace(fromCamelCaseRegex, match => `${separator}${toLowerCase(match)}`));
|
||
|
||
/**
|
||
* @param {string} str
|
||
* @return {Uint8Array}
|
||
*/
|
||
const _encodeUtf8Polyfill = str => {
|
||
const encodedString = unescape(encodeURIComponent(str));
|
||
const len = encodedString.length;
|
||
const buf = new Uint8Array(len);
|
||
for (let i = 0; i < len; i++) {
|
||
buf[i] = /** @type {number} */ (encodedString.codePointAt(i));
|
||
}
|
||
return buf
|
||
};
|
||
|
||
/* istanbul ignore next */
|
||
const utf8TextEncoder = /** @type {TextEncoder} */ (typeof TextEncoder !== 'undefined' ? new TextEncoder() : null);
|
||
|
||
/**
|
||
* @param {string} str
|
||
* @return {Uint8Array}
|
||
*/
|
||
const _encodeUtf8Native = str => utf8TextEncoder.encode(str);
|
||
|
||
/**
|
||
* @param {string} str
|
||
* @return {Uint8Array}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const encodeUtf8 = utf8TextEncoder ? _encodeUtf8Native : _encodeUtf8Polyfill;
|
||
|
||
/* istanbul ignore next */
|
||
let utf8TextDecoder = typeof TextDecoder === 'undefined' ? null : new TextDecoder('utf-8', { fatal: true, ignoreBOM: true });
|
||
|
||
/* istanbul ignore next */
|
||
if (utf8TextDecoder && utf8TextDecoder.decode(new Uint8Array()).length === 1) {
|
||
// Safari doesn't handle BOM correctly.
|
||
// This fixes a bug in Safari 13.0.5 where it produces a BOM the first time it is called.
|
||
// utf8TextDecoder.decode(new Uint8Array()).length === 1 on the first call and
|
||
// utf8TextDecoder.decode(new Uint8Array()).length === 1 on the second call
|
||
// Another issue is that from then on no BOM chars are recognized anymore
|
||
/* istanbul ignore next */
|
||
utf8TextDecoder = null;
|
||
}
|
||
|
||
var global$1 = (typeof global !== "undefined" ? global :
|
||
typeof self !== "undefined" ? self :
|
||
typeof window !== "undefined" ? window : {});
|
||
|
||
// shim for using process in browser
|
||
// based off https://github.com/defunctzombie/node-process/blob/master/browser.js
|
||
|
||
function defaultSetTimout() {
|
||
throw new Error('setTimeout has not been defined');
|
||
}
|
||
function defaultClearTimeout () {
|
||
throw new Error('clearTimeout has not been defined');
|
||
}
|
||
var cachedSetTimeout = defaultSetTimout;
|
||
var cachedClearTimeout = defaultClearTimeout;
|
||
if (typeof global$1.setTimeout === 'function') {
|
||
cachedSetTimeout = setTimeout;
|
||
}
|
||
if (typeof global$1.clearTimeout === 'function') {
|
||
cachedClearTimeout = clearTimeout;
|
||
}
|
||
|
||
function runTimeout(fun) {
|
||
if (cachedSetTimeout === setTimeout) {
|
||
//normal enviroments in sane situations
|
||
return setTimeout(fun, 0);
|
||
}
|
||
// if setTimeout wasn't available but was latter defined
|
||
if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) {
|
||
cachedSetTimeout = setTimeout;
|
||
return setTimeout(fun, 0);
|
||
}
|
||
try {
|
||
// when when somebody has screwed with setTimeout but no I.E. maddness
|
||
return cachedSetTimeout(fun, 0);
|
||
} catch(e){
|
||
try {
|
||
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
|
||
return cachedSetTimeout.call(null, fun, 0);
|
||
} catch(e){
|
||
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error
|
||
return cachedSetTimeout.call(this, fun, 0);
|
||
}
|
||
}
|
||
|
||
|
||
}
|
||
function runClearTimeout(marker) {
|
||
if (cachedClearTimeout === clearTimeout) {
|
||
//normal enviroments in sane situations
|
||
return clearTimeout(marker);
|
||
}
|
||
// if clearTimeout wasn't available but was latter defined
|
||
if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) {
|
||
cachedClearTimeout = clearTimeout;
|
||
return clearTimeout(marker);
|
||
}
|
||
try {
|
||
// when when somebody has screwed with setTimeout but no I.E. maddness
|
||
return cachedClearTimeout(marker);
|
||
} catch (e){
|
||
try {
|
||
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
|
||
return cachedClearTimeout.call(null, marker);
|
||
} catch (e){
|
||
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error.
|
||
// Some versions of I.E. have different rules for clearTimeout vs setTimeout
|
||
return cachedClearTimeout.call(this, marker);
|
||
}
|
||
}
|
||
|
||
|
||
|
||
}
|
||
var queue$2 = [];
|
||
var draining$1 = false;
|
||
var currentQueue$1;
|
||
var queueIndex$1 = -1;
|
||
|
||
function cleanUpNextTick$1() {
|
||
if (!draining$1 || !currentQueue$1) {
|
||
return;
|
||
}
|
||
draining$1 = false;
|
||
if (currentQueue$1.length) {
|
||
queue$2 = currentQueue$1.concat(queue$2);
|
||
} else {
|
||
queueIndex$1 = -1;
|
||
}
|
||
if (queue$2.length) {
|
||
drainQueue();
|
||
}
|
||
}
|
||
|
||
function drainQueue() {
|
||
if (draining$1) {
|
||
return;
|
||
}
|
||
var timeout = runTimeout(cleanUpNextTick$1);
|
||
draining$1 = true;
|
||
|
||
var len = queue$2.length;
|
||
while(len) {
|
||
currentQueue$1 = queue$2;
|
||
queue$2 = [];
|
||
while (++queueIndex$1 < len) {
|
||
if (currentQueue$1) {
|
||
currentQueue$1[queueIndex$1].run();
|
||
}
|
||
}
|
||
queueIndex$1 = -1;
|
||
len = queue$2.length;
|
||
}
|
||
currentQueue$1 = null;
|
||
draining$1 = false;
|
||
runClearTimeout(timeout);
|
||
}
|
||
function nextTick$1(fun) {
|
||
var args = new Array(arguments.length - 1);
|
||
if (arguments.length > 1) {
|
||
for (var i = 1; i < arguments.length; i++) {
|
||
args[i - 1] = arguments[i];
|
||
}
|
||
}
|
||
queue$2.push(new Item$2(fun, args));
|
||
if (queue$2.length === 1 && !draining$1) {
|
||
runTimeout(drainQueue);
|
||
}
|
||
}
|
||
// v8 likes predictible objects
|
||
function Item$2(fun, array) {
|
||
this.fun = fun;
|
||
this.array = array;
|
||
}
|
||
Item$2.prototype.run = function () {
|
||
this.fun.apply(null, this.array);
|
||
};
|
||
var title = 'browser';
|
||
var platform = 'browser';
|
||
var browser$2 = true;
|
||
var env = {};
|
||
var argv = [];
|
||
var version = ''; // empty string to avoid regexp issues
|
||
var versions = {};
|
||
var release = {};
|
||
var config$1 = {};
|
||
|
||
function noop$3() {}
|
||
|
||
var on = noop$3;
|
||
var addListener = noop$3;
|
||
var once$2 = noop$3;
|
||
var off = noop$3;
|
||
var removeListener = noop$3;
|
||
var removeAllListeners = noop$3;
|
||
var emit = noop$3;
|
||
|
||
function binding(name) {
|
||
throw new Error('process.binding is not supported');
|
||
}
|
||
|
||
function cwd () { return '/' }
|
||
function chdir (dir) {
|
||
throw new Error('process.chdir is not supported');
|
||
}function umask() { return 0; }
|
||
|
||
// from https://github.com/kumavis/browser-process-hrtime/blob/master/index.js
|
||
var performance$1 = global$1.performance || {};
|
||
var performanceNow =
|
||
performance$1.now ||
|
||
performance$1.mozNow ||
|
||
performance$1.msNow ||
|
||
performance$1.oNow ||
|
||
performance$1.webkitNow ||
|
||
function(){ return (new Date()).getTime() };
|
||
|
||
// generate timestamp or delta
|
||
// see http://nodejs.org/api/process.html#process_process_hrtime
|
||
function hrtime(previousTimestamp){
|
||
var clocktime = performanceNow.call(performance$1)*1e-3;
|
||
var seconds = Math.floor(clocktime);
|
||
var nanoseconds = Math.floor((clocktime%1)*1e9);
|
||
if (previousTimestamp) {
|
||
seconds = seconds - previousTimestamp[0];
|
||
nanoseconds = nanoseconds - previousTimestamp[1];
|
||
if (nanoseconds<0) {
|
||
seconds--;
|
||
nanoseconds += 1e9;
|
||
}
|
||
}
|
||
return [seconds,nanoseconds]
|
||
}
|
||
|
||
var startTime = new Date();
|
||
function uptime() {
|
||
var currentTime = new Date();
|
||
var dif = currentTime - startTime;
|
||
return dif / 1000;
|
||
}
|
||
|
||
var browser$1$1 = {
|
||
nextTick: nextTick$1,
|
||
title: title,
|
||
browser: browser$2,
|
||
env: env,
|
||
argv: argv,
|
||
version: version,
|
||
versions: versions,
|
||
on: on,
|
||
addListener: addListener,
|
||
once: once$2,
|
||
off: off,
|
||
removeListener: removeListener,
|
||
removeAllListeners: removeAllListeners,
|
||
emit: emit,
|
||
binding: binding,
|
||
cwd: cwd,
|
||
chdir: chdir,
|
||
umask: umask,
|
||
hrtime: hrtime,
|
||
platform: platform,
|
||
release: release,
|
||
config: config$1,
|
||
uptime: uptime
|
||
};
|
||
|
||
var process$1 = browser$1$1;
|
||
|
||
/**
|
||
* Often used conditions.
|
||
*
|
||
* @module conditions
|
||
*/
|
||
|
||
/**
|
||
* @template T
|
||
* @param {T|null|undefined} v
|
||
* @return {T|null}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const undefinedToNull = v => v === undefined ? null : v;
|
||
|
||
/* global localStorage, addEventListener */
|
||
|
||
/**
|
||
* Isomorphic variable storage.
|
||
*
|
||
* Uses LocalStorage in the browser and falls back to in-memory storage.
|
||
*
|
||
* @module storage
|
||
*/
|
||
|
||
/* istanbul ignore next */
|
||
class VarStoragePolyfill {
|
||
constructor () {
|
||
this.map = new Map();
|
||
}
|
||
|
||
/**
|
||
* @param {string} key
|
||
* @param {any} newValue
|
||
*/
|
||
setItem (key, newValue) {
|
||
this.map.set(key, newValue);
|
||
}
|
||
|
||
/**
|
||
* @param {string} key
|
||
*/
|
||
getItem (key) {
|
||
return this.map.get(key)
|
||
}
|
||
}
|
||
|
||
/* istanbul ignore next */
|
||
/**
|
||
* @type {any}
|
||
*/
|
||
let _localStorage = new VarStoragePolyfill();
|
||
let usePolyfill = true;
|
||
|
||
try {
|
||
// if the same-origin rule is violated, accessing localStorage might thrown an error
|
||
/* istanbul ignore next */
|
||
if (typeof localStorage !== 'undefined') {
|
||
_localStorage = localStorage;
|
||
usePolyfill = false;
|
||
}
|
||
} catch (e) { }
|
||
|
||
/* istanbul ignore next */
|
||
/**
|
||
* This is basically localStorage in browser, or a polyfill in nodejs
|
||
*/
|
||
const varStorage = _localStorage;
|
||
|
||
/**
|
||
* Utility functions for working with EcmaScript objects.
|
||
*
|
||
* @module object
|
||
*/
|
||
|
||
/**
|
||
* @param {Object<string,any>} obj
|
||
*/
|
||
const keys$1 = Object.keys;
|
||
|
||
/**
|
||
* @param {Object<string,any>} obj
|
||
* @param {function(any,string):any} f
|
||
*/
|
||
const forEach$1 = (obj, f) => {
|
||
for (const key in obj) {
|
||
f(obj[key], key);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @template R
|
||
* @param {Object<string,any>} obj
|
||
* @param {function(any,string):R} f
|
||
* @return {Array<R>}
|
||
*/
|
||
const map = (obj, f) => {
|
||
const results = [];
|
||
for (const key in obj) {
|
||
results.push(f(obj[key], key));
|
||
}
|
||
return results
|
||
};
|
||
|
||
/**
|
||
* @param {Object<string,any>} obj
|
||
* @return {number}
|
||
*/
|
||
const length$1 = obj => keys$1(obj).length;
|
||
|
||
/**
|
||
* @param {Object<string,any>} obj
|
||
* @param {function(any,string):boolean} f
|
||
* @return {boolean}
|
||
*/
|
||
const every = (obj, f) => {
|
||
for (const key in obj) {
|
||
if (!f(obj[key], key)) {
|
||
return false
|
||
}
|
||
}
|
||
return true
|
||
};
|
||
|
||
/**
|
||
* Calls `Object.prototype.hasOwnProperty`.
|
||
*
|
||
* @param {any} obj
|
||
* @param {string|symbol} key
|
||
* @return {boolean}
|
||
*/
|
||
const hasProperty = (obj, key) => Object.prototype.hasOwnProperty.call(obj, key);
|
||
|
||
/**
|
||
* @param {Object<string,any>} a
|
||
* @param {Object<string,any>} b
|
||
* @return {boolean}
|
||
*/
|
||
const equalFlat = (a, b) => a === b || (length$1(a) === length$1(b) && every(a, (val, key) => (val !== undefined || hasProperty(b, key)) && b[key] === val));
|
||
|
||
/**
|
||
* Common functions and function call helpers.
|
||
*
|
||
* @module function
|
||
*/
|
||
|
||
/**
|
||
* Calls all functions in `fs` with args. Only throws after all functions were called.
|
||
*
|
||
* @param {Array<function>} fs
|
||
* @param {Array<any>} args
|
||
*/
|
||
const callAll = (fs, args, i = 0) => {
|
||
try {
|
||
for (; i < fs.length; i++) {
|
||
fs[i](...args);
|
||
}
|
||
} finally {
|
||
if (i < fs.length) {
|
||
callAll(fs, args, i + 1);
|
||
}
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @template V
|
||
* @template {V} OPTS
|
||
*
|
||
* @param {V} value
|
||
* @param {Array<OPTS>} options
|
||
*/
|
||
// @ts-ignore
|
||
const isOneOf = (value, options) => options.includes(value);
|
||
|
||
/* istanbul ignore next */
|
||
// @ts-ignore
|
||
const isNode = typeof process$1 !== 'undefined' && process$1.release &&
|
||
/node|io\.js/.test(process$1.release.name);
|
||
/* istanbul ignore next */
|
||
const isBrowser = typeof window !== 'undefined' && !isNode;
|
||
/* istanbul ignore next */
|
||
typeof navigator !== 'undefined'
|
||
? /Mac/.test(navigator.platform)
|
||
: false;
|
||
|
||
/**
|
||
* @type {Map<string,string>}
|
||
*/
|
||
let params;
|
||
|
||
/* istanbul ignore next */
|
||
const computeParams = () => {
|
||
if (params === undefined) {
|
||
if (isNode) {
|
||
params = create$6();
|
||
const pargs = process$1.argv;
|
||
let currParamName = null;
|
||
/* istanbul ignore next */
|
||
for (let i = 0; i < pargs.length; i++) {
|
||
const parg = pargs[i];
|
||
if (parg[0] === '-') {
|
||
if (currParamName !== null) {
|
||
params.set(currParamName, '');
|
||
}
|
||
currParamName = parg;
|
||
} else {
|
||
if (currParamName !== null) {
|
||
params.set(currParamName, parg);
|
||
currParamName = null;
|
||
}
|
||
}
|
||
}
|
||
if (currParamName !== null) {
|
||
params.set(currParamName, '');
|
||
}
|
||
// in ReactNative for example this would not be true (unless connected to the Remote Debugger)
|
||
} else if (typeof location === 'object') {
|
||
params = create$6(); // eslint-disable-next-line no-undef
|
||
(location.search || '?').slice(1).split('&').forEach((kv) => {
|
||
if (kv.length !== 0) {
|
||
const [key, value] = kv.split('=');
|
||
params.set(`--${fromCamelCase(key, '-')}`, value);
|
||
params.set(`-${fromCamelCase(key, '-')}`, value);
|
||
}
|
||
});
|
||
} else {
|
||
params = create$6();
|
||
}
|
||
}
|
||
return params
|
||
};
|
||
|
||
/**
|
||
* @param {string} name
|
||
* @return {boolean}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const hasParam = (name) => computeParams().has(name);
|
||
|
||
/**
|
||
* @param {string} name
|
||
* @param {string} defaultVal
|
||
* @return {string}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const getParam = (name, defaultVal) =>
|
||
computeParams().get(name) || defaultVal;
|
||
// export const getArgs = name => computeParams() && args
|
||
|
||
/**
|
||
* @param {string} name
|
||
* @return {string|null}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const getVariable = (name) =>
|
||
isNode
|
||
? undefinedToNull(process$1.env[name.toUpperCase()])
|
||
: undefinedToNull(varStorage.getItem(name));
|
||
|
||
/**
|
||
* @param {string} name
|
||
* @return {boolean}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const hasConf = (name) =>
|
||
hasParam('--' + name) || getVariable(name) !== null;
|
||
|
||
/* istanbul ignore next */
|
||
hasConf('production');
|
||
|
||
/* istanbul ignore next */
|
||
const forceColor = isNode &&
|
||
isOneOf(process$1.env.FORCE_COLOR, ['true', '1', '2']);
|
||
|
||
/* istanbul ignore next */
|
||
const supportsColor = !hasParam('no-colors') &&
|
||
(!isNode || process$1.stdout.isTTY || forceColor) && (
|
||
!isNode || hasParam('color') || forceColor ||
|
||
getVariable('COLORTERM') !== null ||
|
||
(getVariable('TERM') || '').includes('color')
|
||
);
|
||
|
||
/* eslint-env browser */
|
||
|
||
/**
|
||
* Binary data constants.
|
||
*
|
||
* @module binary
|
||
*/
|
||
|
||
/**
|
||
* n-th bit activated.
|
||
*
|
||
* @type {number}
|
||
*/
|
||
const BIT1 = 1;
|
||
const BIT2 = 2;
|
||
const BIT3 = 4;
|
||
const BIT4 = 8;
|
||
const BIT6 = 32;
|
||
const BIT7 = 64;
|
||
const BIT8 = 128;
|
||
const BITS5 = 31;
|
||
const BITS6 = 63;
|
||
const BITS7 = 127;
|
||
const BITS8 = 255;
|
||
/**
|
||
* @type {number}
|
||
*/
|
||
const BITS31 = 0x7FFFFFFF;
|
||
/**
|
||
* @type {number}
|
||
*/
|
||
const BITS32 = 0xFFFFFFFF;
|
||
|
||
/**
|
||
* Utility helpers for working with numbers.
|
||
*
|
||
* @module number
|
||
*/
|
||
|
||
const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER;
|
||
|
||
/**
|
||
* @module number
|
||
*/
|
||
|
||
/* istanbul ignore next */
|
||
const isInteger = Number.isInteger || (num => typeof num === 'number' && isFinite(num) && floor(num) === num);
|
||
|
||
/**
|
||
* Error helpers.
|
||
*
|
||
* @module error
|
||
*/
|
||
|
||
/* istanbul ignore next */
|
||
/**
|
||
* @param {string} s
|
||
* @return {Error}
|
||
*/
|
||
const create$4 = s => new Error(s);
|
||
|
||
/* istanbul ignore next */
|
||
/**
|
||
* @throws {Error}
|
||
* @return {never}
|
||
*/
|
||
const methodUnimplemented = () => {
|
||
throw create$4('Method unimplemented')
|
||
};
|
||
|
||
/* istanbul ignore next */
|
||
/**
|
||
* @throws {Error}
|
||
* @return {never}
|
||
*/
|
||
const unexpectedCase = () => {
|
||
throw create$4('Unexpected case')
|
||
};
|
||
|
||
/**
|
||
* Efficient schema-less binary decoding with support for variable length encoding.
|
||
*
|
||
* Use [lib0/decoding] with [lib0/encoding]. Every encoding function has a corresponding decoding function.
|
||
*
|
||
* Encodes numbers in little-endian order (least to most significant byte order)
|
||
* and is compatible with Golang's binary encoding (https://golang.org/pkg/encoding/binary/)
|
||
* which is also used in Protocol Buffers.
|
||
*
|
||
* ```js
|
||
* // encoding step
|
||
* const encoder = new encoding.createEncoder()
|
||
* encoding.writeVarUint(encoder, 256)
|
||
* encoding.writeVarString(encoder, 'Hello world!')
|
||
* const buf = encoding.toUint8Array(encoder)
|
||
* ```
|
||
*
|
||
* ```js
|
||
* // decoding step
|
||
* const decoder = new decoding.createDecoder(buf)
|
||
* decoding.readVarUint(decoder) // => 256
|
||
* decoding.readVarString(decoder) // => 'Hello world!'
|
||
* decoding.hasContent(decoder) // => false - all data is read
|
||
* ```
|
||
*
|
||
* @module decoding
|
||
*/
|
||
|
||
const errorUnexpectedEndOfArray = create$4('Unexpected end of array');
|
||
const errorIntegerOutOfRange = create$4('Integer out of Range');
|
||
|
||
/**
|
||
* A Decoder handles the decoding of an Uint8Array.
|
||
*/
|
||
class Decoder {
|
||
/**
|
||
* @param {Uint8Array} uint8Array Binary data to decode
|
||
*/
|
||
constructor (uint8Array) {
|
||
/**
|
||
* Decoding target.
|
||
*
|
||
* @type {Uint8Array}
|
||
*/
|
||
this.arr = uint8Array;
|
||
/**
|
||
* Current decoding position.
|
||
*
|
||
* @type {number}
|
||
*/
|
||
this.pos = 0;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @function
|
||
* @param {Uint8Array} uint8Array
|
||
* @return {Decoder}
|
||
*/
|
||
const createDecoder = uint8Array => new Decoder(uint8Array);
|
||
|
||
/**
|
||
* @function
|
||
* @param {Decoder} decoder
|
||
* @return {boolean}
|
||
*/
|
||
const hasContent = decoder => decoder.pos !== decoder.arr.length;
|
||
|
||
/**
|
||
* Create an Uint8Array view of the next `len` bytes and advance the position by `len`.
|
||
*
|
||
* Important: The Uint8Array still points to the underlying ArrayBuffer. Make sure to discard the result as soon as possible to prevent any memory leaks.
|
||
* Use `buffer.copyUint8Array` to copy the result into a new Uint8Array.
|
||
*
|
||
* @function
|
||
* @param {Decoder} decoder The decoder instance
|
||
* @param {number} len The length of bytes to read
|
||
* @return {Uint8Array}
|
||
*/
|
||
const readUint8Array = (decoder, len) => {
|
||
const view = createUint8ArrayViewFromArrayBuffer(decoder.arr.buffer, decoder.pos + decoder.arr.byteOffset, len);
|
||
decoder.pos += len;
|
||
return view
|
||
};
|
||
|
||
/**
|
||
* Read variable length Uint8Array.
|
||
*
|
||
* Important: The Uint8Array still points to the underlying ArrayBuffer. Make sure to discard the result as soon as possible to prevent any memory leaks.
|
||
* Use `buffer.copyUint8Array` to copy the result into a new Uint8Array.
|
||
*
|
||
* @function
|
||
* @param {Decoder} decoder
|
||
* @return {Uint8Array}
|
||
*/
|
||
const readVarUint8Array = decoder => readUint8Array(decoder, readVarUint(decoder));
|
||
|
||
/**
|
||
* Read one byte as unsigned integer.
|
||
* @function
|
||
* @param {Decoder} decoder The decoder instance
|
||
* @return {number} Unsigned 8-bit integer
|
||
*/
|
||
const readUint8 = decoder => decoder.arr[decoder.pos++];
|
||
|
||
/**
|
||
* Read unsigned integer (32bit) with variable length.
|
||
* 1/8th of the storage is used as encoding overhead.
|
||
* * numbers < 2^7 is stored in one bytlength
|
||
* * numbers < 2^14 is stored in two bylength
|
||
*
|
||
* @function
|
||
* @param {Decoder} decoder
|
||
* @return {number} An unsigned integer.length
|
||
*/
|
||
const readVarUint = decoder => {
|
||
let num = 0;
|
||
let mult = 1;
|
||
const len = decoder.arr.length;
|
||
while (decoder.pos < len) {
|
||
const r = decoder.arr[decoder.pos++];
|
||
// num = num | ((r & binary.BITS7) << len)
|
||
num = num + (r & BITS7) * mult; // shift $r << (7*#iterations) and add it to num
|
||
mult *= 128; // next iteration, shift 7 "more" to the left
|
||
if (r < BIT8) {
|
||
return num
|
||
}
|
||
/* istanbul ignore if */
|
||
if (num > MAX_SAFE_INTEGER) {
|
||
throw errorIntegerOutOfRange
|
||
}
|
||
}
|
||
throw errorUnexpectedEndOfArray
|
||
};
|
||
|
||
/**
|
||
* Read signed integer (32bit) with variable length.
|
||
* 1/8th of the storage is used as encoding overhead.
|
||
* * numbers < 2^7 is stored in one bytlength
|
||
* * numbers < 2^14 is stored in two bylength
|
||
* @todo This should probably create the inverse ~num if number is negative - but this would be a breaking change.
|
||
*
|
||
* @function
|
||
* @param {Decoder} decoder
|
||
* @return {number} An unsigned integer.length
|
||
*/
|
||
const readVarInt = decoder => {
|
||
let r = decoder.arr[decoder.pos++];
|
||
let num = r & BITS6;
|
||
let mult = 64;
|
||
const sign = (r & BIT7) > 0 ? -1 : 1;
|
||
if ((r & BIT8) === 0) {
|
||
// don't continue reading
|
||
return sign * num
|
||
}
|
||
const len = decoder.arr.length;
|
||
while (decoder.pos < len) {
|
||
r = decoder.arr[decoder.pos++];
|
||
// num = num | ((r & binary.BITS7) << len)
|
||
num = num + (r & BITS7) * mult;
|
||
mult *= 128;
|
||
if (r < BIT8) {
|
||
return sign * num
|
||
}
|
||
/* istanbul ignore if */
|
||
if (num > MAX_SAFE_INTEGER) {
|
||
throw errorIntegerOutOfRange
|
||
}
|
||
}
|
||
throw errorUnexpectedEndOfArray
|
||
};
|
||
|
||
/**
|
||
* We don't test this function anymore as we use native decoding/encoding by default now.
|
||
* Better not modify this anymore..
|
||
*
|
||
* Transforming utf8 to a string is pretty expensive. The code performs 10x better
|
||
* when String.fromCodePoint is fed with all characters as arguments.
|
||
* But most environments have a maximum number of arguments per functions.
|
||
* For effiency reasons we apply a maximum of 10000 characters at once.
|
||
*
|
||
* @function
|
||
* @param {Decoder} decoder
|
||
* @return {String} The read String.
|
||
*/
|
||
/* istanbul ignore next */
|
||
const _readVarStringPolyfill = decoder => {
|
||
let remainingLen = readVarUint(decoder);
|
||
if (remainingLen === 0) {
|
||
return ''
|
||
} else {
|
||
let encodedString = String.fromCodePoint(readUint8(decoder)); // remember to decrease remainingLen
|
||
if (--remainingLen < 100) { // do not create a Uint8Array for small strings
|
||
while (remainingLen--) {
|
||
encodedString += String.fromCodePoint(readUint8(decoder));
|
||
}
|
||
} else {
|
||
while (remainingLen > 0) {
|
||
const nextLen = remainingLen < 10000 ? remainingLen : 10000;
|
||
// this is dangerous, we create a fresh array view from the existing buffer
|
||
const bytes = decoder.arr.subarray(decoder.pos, decoder.pos + nextLen);
|
||
decoder.pos += nextLen;
|
||
// Starting with ES5.1 we can supply a generic array-like object as arguments
|
||
encodedString += String.fromCodePoint.apply(null, /** @type {any} */ (bytes));
|
||
remainingLen -= nextLen;
|
||
}
|
||
}
|
||
return decodeURIComponent(escape(encodedString))
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @function
|
||
* @param {Decoder} decoder
|
||
* @return {String} The read String
|
||
*/
|
||
const _readVarStringNative = decoder =>
|
||
/** @type any */ (utf8TextDecoder).decode(readVarUint8Array(decoder));
|
||
|
||
/**
|
||
* Read string of variable length
|
||
* * varUint is used to store the length of the string
|
||
*
|
||
* @function
|
||
* @param {Decoder} decoder
|
||
* @return {String} The read String
|
||
*
|
||
*/
|
||
/* istanbul ignore next */
|
||
const readVarString = utf8TextDecoder ? _readVarStringNative : _readVarStringPolyfill;
|
||
|
||
/**
|
||
* @param {Decoder} decoder
|
||
* @param {number} len
|
||
* @return {DataView}
|
||
*/
|
||
const readFromDataView = (decoder, len) => {
|
||
const dv = new DataView(decoder.arr.buffer, decoder.arr.byteOffset + decoder.pos, len);
|
||
decoder.pos += len;
|
||
return dv
|
||
};
|
||
|
||
/**
|
||
* @param {Decoder} decoder
|
||
*/
|
||
const readFloat32 = decoder => readFromDataView(decoder, 4).getFloat32(0, false);
|
||
|
||
/**
|
||
* @param {Decoder} decoder
|
||
*/
|
||
const readFloat64 = decoder => readFromDataView(decoder, 8).getFloat64(0, false);
|
||
|
||
/**
|
||
* @param {Decoder} decoder
|
||
*/
|
||
const readBigInt64 = decoder => /** @type {any} */ (readFromDataView(decoder, 8)).getBigInt64(0, false);
|
||
|
||
/**
|
||
* @type {Array<function(Decoder):any>}
|
||
*/
|
||
const readAnyLookupTable = [
|
||
decoder => undefined, // CASE 127: undefined
|
||
decoder => null, // CASE 126: null
|
||
readVarInt, // CASE 125: integer
|
||
readFloat32, // CASE 124: float32
|
||
readFloat64, // CASE 123: float64
|
||
readBigInt64, // CASE 122: bigint
|
||
decoder => false, // CASE 121: boolean (false)
|
||
decoder => true, // CASE 120: boolean (true)
|
||
readVarString, // CASE 119: string
|
||
decoder => { // CASE 118: object<string,any>
|
||
const len = readVarUint(decoder);
|
||
/**
|
||
* @type {Object<string,any>}
|
||
*/
|
||
const obj = {};
|
||
for (let i = 0; i < len; i++) {
|
||
const key = readVarString(decoder);
|
||
obj[key] = readAny(decoder);
|
||
}
|
||
return obj
|
||
},
|
||
decoder => { // CASE 117: array<any>
|
||
const len = readVarUint(decoder);
|
||
const arr = [];
|
||
for (let i = 0; i < len; i++) {
|
||
arr.push(readAny(decoder));
|
||
}
|
||
return arr
|
||
},
|
||
readVarUint8Array // CASE 116: Uint8Array
|
||
];
|
||
|
||
/**
|
||
* @param {Decoder} decoder
|
||
*/
|
||
const readAny = decoder => readAnyLookupTable[127 - readUint8(decoder)](decoder);
|
||
|
||
/**
|
||
* T must not be null.
|
||
*
|
||
* @template T
|
||
*/
|
||
class RleDecoder extends Decoder {
|
||
/**
|
||
* @param {Uint8Array} uint8Array
|
||
* @param {function(Decoder):T} reader
|
||
*/
|
||
constructor (uint8Array, reader) {
|
||
super(uint8Array);
|
||
/**
|
||
* The reader
|
||
*/
|
||
this.reader = reader;
|
||
/**
|
||
* Current state
|
||
* @type {T|null}
|
||
*/
|
||
this.s = null;
|
||
this.count = 0;
|
||
}
|
||
|
||
read () {
|
||
if (this.count === 0) {
|
||
this.s = this.reader(this);
|
||
if (hasContent(this)) {
|
||
this.count = readVarUint(this) + 1; // see encoder implementation for the reason why this is incremented
|
||
} else {
|
||
this.count = -1; // read the current value forever
|
||
}
|
||
}
|
||
this.count--;
|
||
return /** @type {T} */ (this.s)
|
||
}
|
||
}
|
||
|
||
class UintOptRleDecoder extends Decoder {
|
||
/**
|
||
* @param {Uint8Array} uint8Array
|
||
*/
|
||
constructor (uint8Array) {
|
||
super(uint8Array);
|
||
/**
|
||
* @type {number}
|
||
*/
|
||
this.s = 0;
|
||
this.count = 0;
|
||
}
|
||
|
||
read () {
|
||
if (this.count === 0) {
|
||
this.s = readVarInt(this);
|
||
// if the sign is negative, we read the count too, otherwise count is 1
|
||
const isNegative = isNegativeZero(this.s);
|
||
this.count = 1;
|
||
if (isNegative) {
|
||
this.s = -this.s;
|
||
this.count = readVarUint(this) + 2;
|
||
}
|
||
}
|
||
this.count--;
|
||
return /** @type {number} */ (this.s)
|
||
}
|
||
}
|
||
|
||
class IntDiffOptRleDecoder extends Decoder {
|
||
/**
|
||
* @param {Uint8Array} uint8Array
|
||
*/
|
||
constructor (uint8Array) {
|
||
super(uint8Array);
|
||
/**
|
||
* @type {number}
|
||
*/
|
||
this.s = 0;
|
||
this.count = 0;
|
||
this.diff = 0;
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
read () {
|
||
if (this.count === 0) {
|
||
const diff = readVarInt(this);
|
||
// if the first bit is set, we read more data
|
||
const hasCount = diff & 1;
|
||
this.diff = floor(diff / 2); // shift >> 1
|
||
this.count = 1;
|
||
if (hasCount) {
|
||
this.count = readVarUint(this) + 2;
|
||
}
|
||
}
|
||
this.s += this.diff;
|
||
this.count--;
|
||
return this.s
|
||
}
|
||
}
|
||
|
||
class StringDecoder$2 {
|
||
/**
|
||
* @param {Uint8Array} uint8Array
|
||
*/
|
||
constructor (uint8Array) {
|
||
this.decoder = new UintOptRleDecoder(uint8Array);
|
||
this.str = readVarString(this.decoder);
|
||
/**
|
||
* @type {number}
|
||
*/
|
||
this.spos = 0;
|
||
}
|
||
|
||
/**
|
||
* @return {string}
|
||
*/
|
||
read () {
|
||
const end = this.spos + this.decoder.read();
|
||
const res = this.str.slice(this.spos, end);
|
||
this.spos = end;
|
||
return res
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {number} len
|
||
*/
|
||
const createUint8ArrayFromLen = len => new Uint8Array(len);
|
||
|
||
/**
|
||
* Create Uint8Array with initial content from buffer
|
||
*
|
||
* @param {ArrayBuffer} buffer
|
||
* @param {number} byteOffset
|
||
* @param {number} length
|
||
*/
|
||
const createUint8ArrayViewFromArrayBuffer = (buffer, byteOffset, length) => new Uint8Array(buffer, byteOffset, length);
|
||
|
||
/**
|
||
* Copy the content of an Uint8Array view to a new ArrayBuffer.
|
||
*
|
||
* @param {Uint8Array} uint8Array
|
||
* @return {Uint8Array}
|
||
*/
|
||
const copyUint8Array = uint8Array => {
|
||
const newBuf = createUint8ArrayFromLen(uint8Array.byteLength);
|
||
newBuf.set(uint8Array);
|
||
return newBuf
|
||
};
|
||
|
||
/**
|
||
* Encode anything as a UInt8Array. It's a pun on typescripts's `any` type.
|
||
* See encoding.writeAny for more information.
|
||
*
|
||
* @param {any} data
|
||
* @return {Uint8Array}
|
||
*/
|
||
const encodeAny = data => {
|
||
const encoder = createEncoder();
|
||
writeAny(encoder, data);
|
||
return toUint8Array(encoder)
|
||
};
|
||
|
||
/**
|
||
* Decode an any-encoded value.
|
||
*
|
||
* @param {Uint8Array} buf
|
||
* @return {any}
|
||
*/
|
||
const decodeAny = buf => readAny(createDecoder(buf));
|
||
|
||
/**
|
||
* Efficient schema-less binary encoding with support for variable length encoding.
|
||
*
|
||
* Use [lib0/encoding] with [lib0/decoding]. Every encoding function has a corresponding decoding function.
|
||
*
|
||
* Encodes numbers in little-endian order (least to most significant byte order)
|
||
* and is compatible with Golang's binary encoding (https://golang.org/pkg/encoding/binary/)
|
||
* which is also used in Protocol Buffers.
|
||
*
|
||
* ```js
|
||
* // encoding step
|
||
* const encoder = new encoding.createEncoder()
|
||
* encoding.writeVarUint(encoder, 256)
|
||
* encoding.writeVarString(encoder, 'Hello world!')
|
||
* const buf = encoding.toUint8Array(encoder)
|
||
* ```
|
||
*
|
||
* ```js
|
||
* // decoding step
|
||
* const decoder = new decoding.createDecoder(buf)
|
||
* decoding.readVarUint(decoder) // => 256
|
||
* decoding.readVarString(decoder) // => 'Hello world!'
|
||
* decoding.hasContent(decoder) // => false - all data is read
|
||
* ```
|
||
*
|
||
* @module encoding
|
||
*/
|
||
|
||
/**
|
||
* A BinaryEncoder handles the encoding to an Uint8Array.
|
||
*/
|
||
class Encoder {
|
||
constructor () {
|
||
this.cpos = 0;
|
||
this.cbuf = new Uint8Array(100);
|
||
/**
|
||
* @type {Array<Uint8Array>}
|
||
*/
|
||
this.bufs = [];
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @function
|
||
* @return {Encoder}
|
||
*/
|
||
const createEncoder = () => new Encoder();
|
||
|
||
/**
|
||
* The current length of the encoded data.
|
||
*
|
||
* @function
|
||
* @param {Encoder} encoder
|
||
* @return {number}
|
||
*/
|
||
const length = encoder => {
|
||
let len = encoder.cpos;
|
||
for (let i = 0; i < encoder.bufs.length; i++) {
|
||
len += encoder.bufs[i].length;
|
||
}
|
||
return len
|
||
};
|
||
|
||
/**
|
||
* Transform to Uint8Array.
|
||
*
|
||
* @function
|
||
* @param {Encoder} encoder
|
||
* @return {Uint8Array} The created ArrayBuffer.
|
||
*/
|
||
const toUint8Array = encoder => {
|
||
const uint8arr = new Uint8Array(length(encoder));
|
||
let curPos = 0;
|
||
for (let i = 0; i < encoder.bufs.length; i++) {
|
||
const d = encoder.bufs[i];
|
||
uint8arr.set(d, curPos);
|
||
curPos += d.length;
|
||
}
|
||
uint8arr.set(createUint8ArrayViewFromArrayBuffer(encoder.cbuf.buffer, 0, encoder.cpos), curPos);
|
||
return uint8arr
|
||
};
|
||
|
||
/**
|
||
* Verify that it is possible to write `len` bytes wtihout checking. If
|
||
* necessary, a new Buffer with the required length is attached.
|
||
*
|
||
* @param {Encoder} encoder
|
||
* @param {number} len
|
||
*/
|
||
const verifyLen = (encoder, len) => {
|
||
const bufferLen = encoder.cbuf.length;
|
||
if (bufferLen - encoder.cpos < len) {
|
||
encoder.bufs.push(createUint8ArrayViewFromArrayBuffer(encoder.cbuf.buffer, 0, encoder.cpos));
|
||
encoder.cbuf = new Uint8Array(max(bufferLen, len) * 2);
|
||
encoder.cpos = 0;
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Write one byte to the encoder.
|
||
*
|
||
* @function
|
||
* @param {Encoder} encoder
|
||
* @param {number} num The byte that is to be encoded.
|
||
*/
|
||
const write = (encoder, num) => {
|
||
const bufferLen = encoder.cbuf.length;
|
||
if (encoder.cpos === bufferLen) {
|
||
encoder.bufs.push(encoder.cbuf);
|
||
encoder.cbuf = new Uint8Array(bufferLen * 2);
|
||
encoder.cpos = 0;
|
||
}
|
||
encoder.cbuf[encoder.cpos++] = num;
|
||
};
|
||
|
||
/**
|
||
* Write one byte as an unsigned integer.
|
||
*
|
||
* @function
|
||
* @param {Encoder} encoder
|
||
* @param {number} num The number that is to be encoded.
|
||
*/
|
||
const writeUint8 = write;
|
||
|
||
/**
|
||
* Write a variable length unsigned integer. Max encodable integer is 2^53.
|
||
*
|
||
* @function
|
||
* @param {Encoder} encoder
|
||
* @param {number} num The number that is to be encoded.
|
||
*/
|
||
const writeVarUint = (encoder, num) => {
|
||
while (num > BITS7) {
|
||
write(encoder, BIT8 | (BITS7 & num));
|
||
num = floor(num / 128); // shift >>> 7
|
||
}
|
||
write(encoder, BITS7 & num);
|
||
};
|
||
|
||
/**
|
||
* Write a variable length integer.
|
||
*
|
||
* We use the 7th bit instead for signaling that this is a negative number.
|
||
*
|
||
* @function
|
||
* @param {Encoder} encoder
|
||
* @param {number} num The number that is to be encoded.
|
||
*/
|
||
const writeVarInt = (encoder, num) => {
|
||
const isNegative = isNegativeZero(num);
|
||
if (isNegative) {
|
||
num = -num;
|
||
}
|
||
// |- whether to continue reading |- whether is negative |- number
|
||
write(encoder, (num > BITS6 ? BIT8 : 0) | (isNegative ? BIT7 : 0) | (BITS6 & num));
|
||
num = floor(num / 64); // shift >>> 6
|
||
// We don't need to consider the case of num === 0 so we can use a different
|
||
// pattern here than above.
|
||
while (num > 0) {
|
||
write(encoder, (num > BITS7 ? BIT8 : 0) | (BITS7 & num));
|
||
num = floor(num / 128); // shift >>> 7
|
||
}
|
||
};
|
||
|
||
/**
|
||
* A cache to store strings temporarily
|
||
*/
|
||
const _strBuffer = new Uint8Array(30000);
|
||
const _maxStrBSize = _strBuffer.length / 3;
|
||
|
||
/**
|
||
* Write a variable length string.
|
||
*
|
||
* @function
|
||
* @param {Encoder} encoder
|
||
* @param {String} str The string that is to be encoded.
|
||
*/
|
||
const _writeVarStringNative = (encoder, str) => {
|
||
if (str.length < _maxStrBSize) {
|
||
// We can encode the string into the existing buffer
|
||
/* istanbul ignore else */
|
||
const written = utf8TextEncoder.encodeInto(str, _strBuffer).written || 0;
|
||
writeVarUint(encoder, written);
|
||
for (let i = 0; i < written; i++) {
|
||
write(encoder, _strBuffer[i]);
|
||
}
|
||
} else {
|
||
writeVarUint8Array(encoder, encodeUtf8(str));
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Write a variable length string.
|
||
*
|
||
* @function
|
||
* @param {Encoder} encoder
|
||
* @param {String} str The string that is to be encoded.
|
||
*/
|
||
const _writeVarStringPolyfill = (encoder, str) => {
|
||
const encodedString = unescape(encodeURIComponent(str));
|
||
const len = encodedString.length;
|
||
writeVarUint(encoder, len);
|
||
for (let i = 0; i < len; i++) {
|
||
write(encoder, /** @type {number} */ (encodedString.codePointAt(i)));
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Write a variable length string.
|
||
*
|
||
* @function
|
||
* @param {Encoder} encoder
|
||
* @param {String} str The string that is to be encoded.
|
||
*/
|
||
/* istanbul ignore next */
|
||
const writeVarString = (utf8TextEncoder && utf8TextEncoder.encodeInto) ? _writeVarStringNative : _writeVarStringPolyfill;
|
||
|
||
/**
|
||
* Append fixed-length Uint8Array to the encoder.
|
||
*
|
||
* @function
|
||
* @param {Encoder} encoder
|
||
* @param {Uint8Array} uint8Array
|
||
*/
|
||
const writeUint8Array = (encoder, uint8Array) => {
|
||
const bufferLen = encoder.cbuf.length;
|
||
const cpos = encoder.cpos;
|
||
const leftCopyLen = min(bufferLen - cpos, uint8Array.length);
|
||
const rightCopyLen = uint8Array.length - leftCopyLen;
|
||
encoder.cbuf.set(uint8Array.subarray(0, leftCopyLen), cpos);
|
||
encoder.cpos += leftCopyLen;
|
||
if (rightCopyLen > 0) {
|
||
// Still something to write, write right half..
|
||
// Append new buffer
|
||
encoder.bufs.push(encoder.cbuf);
|
||
// must have at least size of remaining buffer
|
||
encoder.cbuf = new Uint8Array(max(bufferLen * 2, rightCopyLen));
|
||
// copy array
|
||
encoder.cbuf.set(uint8Array.subarray(leftCopyLen));
|
||
encoder.cpos = rightCopyLen;
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Append an Uint8Array to Encoder.
|
||
*
|
||
* @function
|
||
* @param {Encoder} encoder
|
||
* @param {Uint8Array} uint8Array
|
||
*/
|
||
const writeVarUint8Array = (encoder, uint8Array) => {
|
||
writeVarUint(encoder, uint8Array.byteLength);
|
||
writeUint8Array(encoder, uint8Array);
|
||
};
|
||
|
||
/**
|
||
* Create an DataView of the next `len` bytes. Use it to write data after
|
||
* calling this function.
|
||
*
|
||
* ```js
|
||
* // write float32 using DataView
|
||
* const dv = writeOnDataView(encoder, 4)
|
||
* dv.setFloat32(0, 1.1)
|
||
* // read float32 using DataView
|
||
* const dv = readFromDataView(encoder, 4)
|
||
* dv.getFloat32(0) // => 1.100000023841858 (leaving it to the reader to find out why this is the correct result)
|
||
* ```
|
||
*
|
||
* @param {Encoder} encoder
|
||
* @param {number} len
|
||
* @return {DataView}
|
||
*/
|
||
const writeOnDataView = (encoder, len) => {
|
||
verifyLen(encoder, len);
|
||
const dview = new DataView(encoder.cbuf.buffer, encoder.cpos, len);
|
||
encoder.cpos += len;
|
||
return dview
|
||
};
|
||
|
||
/**
|
||
* @param {Encoder} encoder
|
||
* @param {number} num
|
||
*/
|
||
const writeFloat32 = (encoder, num) => writeOnDataView(encoder, 4).setFloat32(0, num, false);
|
||
|
||
/**
|
||
* @param {Encoder} encoder
|
||
* @param {number} num
|
||
*/
|
||
const writeFloat64 = (encoder, num) => writeOnDataView(encoder, 8).setFloat64(0, num, false);
|
||
|
||
/**
|
||
* @param {Encoder} encoder
|
||
* @param {bigint} num
|
||
*/
|
||
const writeBigInt64 = (encoder, num) => /** @type {any} */ (writeOnDataView(encoder, 8)).setBigInt64(0, num, false);
|
||
|
||
const floatTestBed = new DataView(new ArrayBuffer(4));
|
||
/**
|
||
* Check if a number can be encoded as a 32 bit float.
|
||
*
|
||
* @param {number} num
|
||
* @return {boolean}
|
||
*/
|
||
const isFloat32 = num => {
|
||
floatTestBed.setFloat32(0, num);
|
||
return floatTestBed.getFloat32(0) === num
|
||
};
|
||
|
||
/**
|
||
* Encode data with efficient binary format.
|
||
*
|
||
* Differences to JSON:
|
||
* • Transforms data to a binary format (not to a string)
|
||
* • Encodes undefined, NaN, and ArrayBuffer (these can't be represented in JSON)
|
||
* • Numbers are efficiently encoded either as a variable length integer, as a
|
||
* 32 bit float, as a 64 bit float, or as a 64 bit bigint.
|
||
*
|
||
* Encoding table:
|
||
*
|
||
* | Data Type | Prefix | Encoding Method | Comment |
|
||
* | ------------------- | -------- | ------------------ | ------- |
|
||
* | undefined | 127 | | Functions, symbol, and everything that cannot be identified is encoded as undefined |
|
||
* | null | 126 | | |
|
||
* | integer | 125 | writeVarInt | Only encodes 32 bit signed integers |
|
||
* | float32 | 124 | writeFloat32 | |
|
||
* | float64 | 123 | writeFloat64 | |
|
||
* | bigint | 122 | writeBigInt64 | |
|
||
* | boolean (false) | 121 | | True and false are different data types so we save the following byte |
|
||
* | boolean (true) | 120 | | - 0b01111000 so the last bit determines whether true or false |
|
||
* | string | 119 | writeVarString | |
|
||
* | object<string,any> | 118 | custom | Writes {length} then {length} key-value pairs |
|
||
* | array<any> | 117 | custom | Writes {length} then {length} json values |
|
||
* | Uint8Array | 116 | writeVarUint8Array | We use Uint8Array for any kind of binary data |
|
||
*
|
||
* Reasons for the decreasing prefix:
|
||
* We need the first bit for extendability (later we may want to encode the
|
||
* prefix with writeVarUint). The remaining 7 bits are divided as follows:
|
||
* [0-30] the beginning of the data range is used for custom purposes
|
||
* (defined by the function that uses this library)
|
||
* [31-127] the end of the data range is used for data encoding by
|
||
* lib0/encoding.js
|
||
*
|
||
* @param {Encoder} encoder
|
||
* @param {undefined|null|number|bigint|boolean|string|Object<string,any>|Array<any>|Uint8Array} data
|
||
*/
|
||
const writeAny = (encoder, data) => {
|
||
switch (typeof data) {
|
||
case 'string':
|
||
// TYPE 119: STRING
|
||
write(encoder, 119);
|
||
writeVarString(encoder, data);
|
||
break
|
||
case 'number':
|
||
if (isInteger(data) && abs(data) <= BITS31) {
|
||
// TYPE 125: INTEGER
|
||
write(encoder, 125);
|
||
writeVarInt(encoder, data);
|
||
} else if (isFloat32(data)) {
|
||
// TYPE 124: FLOAT32
|
||
write(encoder, 124);
|
||
writeFloat32(encoder, data);
|
||
} else {
|
||
// TYPE 123: FLOAT64
|
||
write(encoder, 123);
|
||
writeFloat64(encoder, data);
|
||
}
|
||
break
|
||
case 'bigint':
|
||
// TYPE 122: BigInt
|
||
write(encoder, 122);
|
||
writeBigInt64(encoder, data);
|
||
break
|
||
case 'object':
|
||
if (data === null) {
|
||
// TYPE 126: null
|
||
write(encoder, 126);
|
||
} else if (data instanceof Array) {
|
||
// TYPE 117: Array
|
||
write(encoder, 117);
|
||
writeVarUint(encoder, data.length);
|
||
for (let i = 0; i < data.length; i++) {
|
||
writeAny(encoder, data[i]);
|
||
}
|
||
} else if (data instanceof Uint8Array) {
|
||
// TYPE 116: ArrayBuffer
|
||
write(encoder, 116);
|
||
writeVarUint8Array(encoder, data);
|
||
} else {
|
||
// TYPE 118: Object
|
||
write(encoder, 118);
|
||
const keys = Object.keys(data);
|
||
writeVarUint(encoder, keys.length);
|
||
for (let i = 0; i < keys.length; i++) {
|
||
const key = keys[i];
|
||
writeVarString(encoder, key);
|
||
writeAny(encoder, data[key]);
|
||
}
|
||
}
|
||
break
|
||
case 'boolean':
|
||
// TYPE 120/121: boolean (true/false)
|
||
write(encoder, data ? 120 : 121);
|
||
break
|
||
default:
|
||
// TYPE 127: undefined
|
||
write(encoder, 127);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Now come a few stateful encoder that have their own classes.
|
||
*/
|
||
|
||
/**
|
||
* Basic Run Length Encoder - a basic compression implementation.
|
||
*
|
||
* Encodes [1,1,1,7] to [1,3,7,1] (3 times 1, 1 time 7). This encoder might do more harm than good if there are a lot of values that are not repeated.
|
||
*
|
||
* It was originally used for image compression. Cool .. article http://csbruce.com/cbm/transactor/pdfs/trans_v7_i06.pdf
|
||
*
|
||
* @note T must not be null!
|
||
*
|
||
* @template T
|
||
*/
|
||
class RleEncoder extends Encoder {
|
||
/**
|
||
* @param {function(Encoder, T):void} writer
|
||
*/
|
||
constructor (writer) {
|
||
super();
|
||
/**
|
||
* The writer
|
||
*/
|
||
this.w = writer;
|
||
/**
|
||
* Current state
|
||
* @type {T|null}
|
||
*/
|
||
this.s = null;
|
||
this.count = 0;
|
||
}
|
||
|
||
/**
|
||
* @param {T} v
|
||
*/
|
||
write (v) {
|
||
if (this.s === v) {
|
||
this.count++;
|
||
} else {
|
||
if (this.count > 0) {
|
||
// flush counter, unless this is the first value (count = 0)
|
||
writeVarUint(this, this.count - 1); // since count is always > 0, we can decrement by one. non-standard encoding ftw
|
||
}
|
||
this.count = 1;
|
||
// write first value
|
||
this.w(this, v);
|
||
this.s = v;
|
||
}
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UintOptRleEncoder} encoder
|
||
*/
|
||
const flushUintOptRleEncoder = encoder => {
|
||
/* istanbul ignore else */
|
||
if (encoder.count > 0) {
|
||
// flush counter, unless this is the first value (count = 0)
|
||
// case 1: just a single value. set sign to positive
|
||
// case 2: write several values. set sign to negative to indicate that there is a length coming
|
||
writeVarInt(encoder.encoder, encoder.count === 1 ? encoder.s : -encoder.s);
|
||
if (encoder.count > 1) {
|
||
writeVarUint(encoder.encoder, encoder.count - 2); // since count is always > 1, we can decrement by one. non-standard encoding ftw
|
||
}
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Optimized Rle encoder that does not suffer from the mentioned problem of the basic Rle encoder.
|
||
*
|
||
* Internally uses VarInt encoder to write unsigned integers. If the input occurs multiple times, we write
|
||
* write it as a negative number. The UintOptRleDecoder then understands that it needs to read a count.
|
||
*
|
||
* Encodes [1,2,3,3,3] as [1,2,-3,3] (once 1, once 2, three times 3)
|
||
*/
|
||
class UintOptRleEncoder {
|
||
constructor () {
|
||
this.encoder = new Encoder();
|
||
/**
|
||
* @type {number}
|
||
*/
|
||
this.s = 0;
|
||
this.count = 0;
|
||
}
|
||
|
||
/**
|
||
* @param {number} v
|
||
*/
|
||
write (v) {
|
||
if (this.s === v) {
|
||
this.count++;
|
||
} else {
|
||
flushUintOptRleEncoder(this);
|
||
this.count = 1;
|
||
this.s = v;
|
||
}
|
||
}
|
||
|
||
toUint8Array () {
|
||
flushUintOptRleEncoder(this);
|
||
return toUint8Array(this.encoder)
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {IntDiffOptRleEncoder} encoder
|
||
*/
|
||
const flushIntDiffOptRleEncoder = encoder => {
|
||
if (encoder.count > 0) {
|
||
// 31 bit making up the diff | wether to write the counter
|
||
// const encodedDiff = encoder.diff << 1 | (encoder.count === 1 ? 0 : 1)
|
||
const encodedDiff = encoder.diff * 2 + (encoder.count === 1 ? 0 : 1);
|
||
// flush counter, unless this is the first value (count = 0)
|
||
// case 1: just a single value. set first bit to positive
|
||
// case 2: write several values. set first bit to negative to indicate that there is a length coming
|
||
writeVarInt(encoder.encoder, encodedDiff);
|
||
if (encoder.count > 1) {
|
||
writeVarUint(encoder.encoder, encoder.count - 2); // since count is always > 1, we can decrement by one. non-standard encoding ftw
|
||
}
|
||
}
|
||
};
|
||
|
||
/**
|
||
* A combination of the IntDiffEncoder and the UintOptRleEncoder.
|
||
*
|
||
* The count approach is similar to the UintDiffOptRleEncoder, but instead of using the negative bitflag, it encodes
|
||
* in the LSB whether a count is to be read. Therefore this Encoder only supports 31 bit integers!
|
||
*
|
||
* Encodes [1, 2, 3, 2] as [3, 1, 6, -1] (more specifically [(1 << 1) | 1, (3 << 0) | 0, -1])
|
||
*
|
||
* Internally uses variable length encoding. Contrary to normal UintVar encoding, the first byte contains:
|
||
* * 1 bit that denotes whether the next value is a count (LSB)
|
||
* * 1 bit that denotes whether this value is negative (MSB - 1)
|
||
* * 1 bit that denotes whether to continue reading the variable length integer (MSB)
|
||
*
|
||
* Therefore, only five bits remain to encode diff ranges.
|
||
*
|
||
* Use this Encoder only when appropriate. In most cases, this is probably a bad idea.
|
||
*/
|
||
class IntDiffOptRleEncoder {
|
||
constructor () {
|
||
this.encoder = new Encoder();
|
||
/**
|
||
* @type {number}
|
||
*/
|
||
this.s = 0;
|
||
this.count = 0;
|
||
this.diff = 0;
|
||
}
|
||
|
||
/**
|
||
* @param {number} v
|
||
*/
|
||
write (v) {
|
||
if (this.diff === v - this.s) {
|
||
this.s = v;
|
||
this.count++;
|
||
} else {
|
||
flushIntDiffOptRleEncoder(this);
|
||
this.count = 1;
|
||
this.diff = v - this.s;
|
||
this.s = v;
|
||
}
|
||
}
|
||
|
||
toUint8Array () {
|
||
flushIntDiffOptRleEncoder(this);
|
||
return toUint8Array(this.encoder)
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Optimized String Encoder.
|
||
*
|
||
* Encoding many small strings in a simple Encoder is not very efficient. The function call to decode a string takes some time and creates references that must be eventually deleted.
|
||
* In practice, when decoding several million small strings, the GC will kick in more and more often to collect orphaned string objects (or maybe there is another reason?).
|
||
*
|
||
* This string encoder solves the above problem. All strings are concatenated and written as a single string using a single encoding call.
|
||
*
|
||
* The lengths are encoded using a UintOptRleEncoder.
|
||
*/
|
||
class StringEncoder {
|
||
constructor () {
|
||
/**
|
||
* @type {Array<string>}
|
||
*/
|
||
this.sarr = [];
|
||
this.s = '';
|
||
this.lensE = new UintOptRleEncoder();
|
||
}
|
||
|
||
/**
|
||
* @param {string} string
|
||
*/
|
||
write (string) {
|
||
this.s += string;
|
||
if (this.s.length > 19) {
|
||
this.sarr.push(this.s);
|
||
this.s = '';
|
||
}
|
||
this.lensE.write(string.length);
|
||
}
|
||
|
||
toUint8Array () {
|
||
const encoder = new Encoder();
|
||
this.sarr.push(this.s);
|
||
this.s = '';
|
||
writeVarString(encoder, this.sarr.join(''));
|
||
writeUint8Array(encoder, this.lensE.toUint8Array());
|
||
return toUint8Array(encoder)
|
||
}
|
||
}
|
||
|
||
/* eslint-env browser */
|
||
const performance = typeof window === 'undefined' ? null : (typeof window.performance !== 'undefined' && window.performance) || null;
|
||
|
||
const isoCrypto = typeof crypto === 'undefined' ? null : crypto;
|
||
|
||
/**
|
||
* @type {function(number):ArrayBuffer}
|
||
*/
|
||
const cryptoRandomBuffer = isoCrypto !== null
|
||
? len => {
|
||
// browser
|
||
const buf = new ArrayBuffer(len);
|
||
const arr = new Uint8Array(buf);
|
||
isoCrypto.getRandomValues(arr);
|
||
return buf
|
||
}
|
||
: len => {
|
||
// polyfill
|
||
const buf = new ArrayBuffer(len);
|
||
const arr = new Uint8Array(buf);
|
||
for (let i = 0; i < len; i++) {
|
||
arr[i] = Math.ceil((Math.random() * 0xFFFFFFFF) >>> 0);
|
||
}
|
||
return buf
|
||
};
|
||
|
||
const uint32 = () => new Uint32Array(cryptoRandomBuffer(4))[0];
|
||
|
||
// @ts-ignore
|
||
const uuidv4Template = [1e7] + -1e3 + -4e3 + -8e3 + -1e11;
|
||
const uuidv4 = () => uuidv4Template.replace(/[018]/g, /** @param {number} c */ c =>
|
||
(c ^ uint32() & 15 >> c / 4).toString(16)
|
||
);
|
||
|
||
/**
|
||
* Utility module to convert metric values.
|
||
*
|
||
* @module metric
|
||
*/
|
||
|
||
const prefixUp = ['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'];
|
||
const prefixDown = ['', 'm', 'μ', 'n', 'p', 'f', 'a', 'z', 'y'];
|
||
|
||
/**
|
||
* Calculate the metric prefix for a number. Assumes E.g. `prefix(1000) = { n: 1, prefix: 'k' }`
|
||
*
|
||
* @param {number} n
|
||
* @param {number} [baseMultiplier] Multiplier of the base (10^(3*baseMultiplier)). E.g. `convert(time, -3)` if time is already in milli seconds
|
||
* @return {{n:number,prefix:string}}
|
||
*/
|
||
const prefix = (n, baseMultiplier = 0) => {
|
||
const nPow = n === 0 ? 0 : log10(n);
|
||
let mult = 0;
|
||
while (nPow < mult * 3 && baseMultiplier > -8) {
|
||
baseMultiplier--;
|
||
mult--;
|
||
}
|
||
while (nPow >= 3 + mult * 3 && baseMultiplier < 8) {
|
||
baseMultiplier++;
|
||
mult++;
|
||
}
|
||
const prefix = baseMultiplier < 0 ? prefixDown[-baseMultiplier] : prefixUp[baseMultiplier];
|
||
return {
|
||
n: round((mult > 0 ? n / exp10(mult * 3) : n * exp10(mult * -3)) * 1e12) / 1e12,
|
||
prefix
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Utility module to work with time.
|
||
*
|
||
* @module time
|
||
*/
|
||
|
||
/**
|
||
* Return current unix time.
|
||
*
|
||
* @return {number}
|
||
*/
|
||
const getUnixTime = Date.now;
|
||
|
||
/**
|
||
* Transform time (in ms) to a human readable format. E.g. 1100 => 1.1s. 60s => 1min. .001 => 10μs.
|
||
*
|
||
* @param {number} d duration in milliseconds
|
||
* @return {string} humanized approximation of time
|
||
*/
|
||
const humanizeDuration = d => {
|
||
if (d < 60000) {
|
||
const p = prefix(d, -1);
|
||
return round(p.n * 100) / 100 + p.prefix + 's'
|
||
}
|
||
d = floor(d / 1000);
|
||
const seconds = d % 60;
|
||
const minutes = floor(d / 60) % 60;
|
||
const hours = floor(d / 3600) % 24;
|
||
const days = floor(d / 86400);
|
||
if (days > 0) {
|
||
return days + 'd' + ((hours > 0 || minutes > 30) ? ' ' + (minutes > 30 ? hours + 1 : hours) + 'h' : '')
|
||
}
|
||
if (hours > 0) {
|
||
/* istanbul ignore next */
|
||
return hours + 'h' + ((minutes > 0 || seconds > 30) ? ' ' + (seconds > 30 ? minutes + 1 : minutes) + 'min' : '')
|
||
}
|
||
return minutes + 'min' + (seconds > 0 ? ' ' + seconds + 's' : '')
|
||
};
|
||
|
||
/**
|
||
* Utility helpers to work with promises.
|
||
*
|
||
* @module promise
|
||
*/
|
||
|
||
/**
|
||
* @template T
|
||
* @callback PromiseResolve
|
||
* @param {T|PromiseLike<T>} [result]
|
||
*/
|
||
|
||
/**
|
||
* @template T
|
||
* @param {function(PromiseResolve<T>,function(Error):void):any} f
|
||
* @return {Promise<T>}
|
||
*/
|
||
const create$3 = f => /** @type {Promise<T>} */ (new Promise(f));
|
||
|
||
/**
|
||
* @template T
|
||
* @param {T|void} res
|
||
* @return {Promise<T|void>}
|
||
*/
|
||
const resolve = res => Promise.resolve(res);
|
||
|
||
/**
|
||
* Checks if an object is a promise using ducktyping.
|
||
*
|
||
* Promises are often polyfilled, so it makes sense to add some additional guarantees if the user of this
|
||
* library has some insane environment where global Promise objects are overwritten.
|
||
*
|
||
* @param {any} p
|
||
* @return {boolean}
|
||
*/
|
||
const isPromise = p => p instanceof Promise || (p && p.then && p.catch && p.finally);
|
||
|
||
/**
|
||
* Utility module to work with EcmaScript Symbols.
|
||
*
|
||
* @module symbol
|
||
*/
|
||
|
||
/**
|
||
* Return fresh symbol.
|
||
*
|
||
* @return {Symbol}
|
||
*/
|
||
const create$2 = Symbol;
|
||
|
||
/**
|
||
* Working with value pairs.
|
||
*
|
||
* @module pair
|
||
*/
|
||
|
||
/**
|
||
* @template L,R
|
||
*/
|
||
class Pair {
|
||
/**
|
||
* @param {L} left
|
||
* @param {R} right
|
||
*/
|
||
constructor (left, right) {
|
||
this.left = left;
|
||
this.right = right;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @template L,R
|
||
* @param {L} left
|
||
* @param {R} right
|
||
* @return {Pair<L,R>}
|
||
*/
|
||
const create$1 = (left, right) => new Pair(left, right);
|
||
|
||
/**
|
||
* @template L,R
|
||
* @param {Array<Pair<L,R>>} arr
|
||
* @param {function(L, R):any} f
|
||
*/
|
||
const forEach = (arr, f) => arr.forEach(p => f(p.left, p.right));
|
||
|
||
/* eslint-env browser */
|
||
|
||
/* istanbul ignore next */
|
||
/**
|
||
* @type {Document}
|
||
*/
|
||
const doc = /** @type {Document} */ (typeof document !== 'undefined' ? document : {});
|
||
|
||
/**
|
||
* @param {string} name
|
||
* @return {HTMLElement}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const createElement = name => doc.createElement(name);
|
||
|
||
/**
|
||
* @return {DocumentFragment}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const createDocumentFragment = () => doc.createDocumentFragment();
|
||
|
||
/**
|
||
* @param {string} text
|
||
* @return {Text}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const createTextNode = text => doc.createTextNode(text);
|
||
|
||
/* istanbul ignore next */
|
||
/** @type {DOMParser} */ (typeof DOMParser !== 'undefined' ? new DOMParser() : null);
|
||
|
||
/**
|
||
* @param {Element} el
|
||
* @param {Array<pair.Pair<string,string|boolean>>} attrs Array of key-value pairs
|
||
* @return {Element}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const setAttributes = (el, attrs) => {
|
||
forEach(attrs, (key, value) => {
|
||
if (value === false) {
|
||
el.removeAttribute(key);
|
||
} else if (value === true) {
|
||
el.setAttribute(key, '');
|
||
} else {
|
||
// @ts-ignore
|
||
el.setAttribute(key, value);
|
||
}
|
||
});
|
||
return el
|
||
};
|
||
|
||
/**
|
||
* @param {Array<Node>|HTMLCollection} children
|
||
* @return {DocumentFragment}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const fragment = children => {
|
||
const fragment = createDocumentFragment();
|
||
for (let i = 0; i < children.length; i++) {
|
||
appendChild(fragment, children[i]);
|
||
}
|
||
return fragment
|
||
};
|
||
|
||
/**
|
||
* @param {Element} parent
|
||
* @param {Array<Node>} nodes
|
||
* @return {Element}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const append = (parent, nodes) => {
|
||
appendChild(parent, fragment(nodes));
|
||
return parent
|
||
};
|
||
|
||
/**
|
||
* @param {EventTarget} el
|
||
* @param {string} name
|
||
* @param {EventListener} f
|
||
*/
|
||
/* istanbul ignore next */
|
||
const addEventListener = (el, name, f) => el.addEventListener(name, f);
|
||
|
||
/**
|
||
* @param {string} name
|
||
* @param {Array<pair.Pair<string,string>|pair.Pair<string,boolean>>} attrs Array of key-value pairs
|
||
* @param {Array<Node>} children
|
||
* @return {Element}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const element = (name, attrs = [], children = []) =>
|
||
append(setAttributes(createElement(name), attrs), children);
|
||
|
||
/**
|
||
* @param {string} t
|
||
* @return {Text}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const text = createTextNode;
|
||
|
||
/**
|
||
* @param {Map<string,string>} m
|
||
* @return {string}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const mapToStyleString = m => map$1(m, (value, key) => `${key}:${value};`).join('');
|
||
|
||
/**
|
||
* @param {Node} parent
|
||
* @param {Node} child
|
||
* @return {Node}
|
||
*/
|
||
/* istanbul ignore next */
|
||
const appendChild = (parent, child) => parent.appendChild(child);
|
||
|
||
doc.ELEMENT_NODE;
|
||
doc.TEXT_NODE;
|
||
doc.CDATA_SECTION_NODE;
|
||
doc.COMMENT_NODE;
|
||
doc.DOCUMENT_NODE;
|
||
doc.DOCUMENT_TYPE_NODE;
|
||
doc.DOCUMENT_FRAGMENT_NODE;
|
||
|
||
/**
|
||
* JSON utility functions.
|
||
*
|
||
* @module json
|
||
*/
|
||
|
||
/**
|
||
* Transform JavaScript object to JSON.
|
||
*
|
||
* @param {any} object
|
||
* @return {string}
|
||
*/
|
||
const stringify = JSON.stringify;
|
||
|
||
/* global requestIdleCallback, requestAnimationFrame, cancelIdleCallback, cancelAnimationFrame */
|
||
|
||
/**
|
||
* Utility module to work with EcmaScript's event loop.
|
||
*
|
||
* @module eventloop
|
||
*/
|
||
|
||
/**
|
||
* @type {Array<function>}
|
||
*/
|
||
let queue$1 = [];
|
||
|
||
const _runQueue = () => {
|
||
for (let i = 0; i < queue$1.length; i++) {
|
||
queue$1[i]();
|
||
}
|
||
queue$1 = [];
|
||
};
|
||
|
||
/**
|
||
* @param {function():void} f
|
||
*/
|
||
const enqueue = f => {
|
||
queue$1.push(f);
|
||
if (queue$1.length === 1) {
|
||
setTimeout(_runQueue, 0);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Isomorphic logging module with support for colors!
|
||
*
|
||
* @module logging
|
||
*/
|
||
|
||
const BOLD = create$2();
|
||
const UNBOLD = create$2();
|
||
const BLUE = create$2();
|
||
const GREY = create$2();
|
||
const GREEN = create$2();
|
||
const RED = create$2();
|
||
const PURPLE = create$2();
|
||
const ORANGE = create$2();
|
||
const UNCOLOR = create$2();
|
||
|
||
/**
|
||
* @type {Object<Symbol,pair.Pair<string,string>>}
|
||
*/
|
||
const _browserStyleMap = {
|
||
[BOLD]: create$1('font-weight', 'bold'),
|
||
[UNBOLD]: create$1('font-weight', 'normal'),
|
||
[BLUE]: create$1('color', 'blue'),
|
||
[GREEN]: create$1('color', 'green'),
|
||
[GREY]: create$1('color', 'grey'),
|
||
[RED]: create$1('color', 'red'),
|
||
[PURPLE]: create$1('color', 'purple'),
|
||
[ORANGE]: create$1('color', 'orange'), // not well supported in chrome when debugging node with inspector - TODO: deprecate
|
||
[UNCOLOR]: create$1('color', 'black')
|
||
};
|
||
|
||
const _nodeStyleMap = {
|
||
[BOLD]: '\u001b[1m',
|
||
[UNBOLD]: '\u001b[2m',
|
||
[BLUE]: '\x1b[34m',
|
||
[GREEN]: '\x1b[32m',
|
||
[GREY]: '\u001b[37m',
|
||
[RED]: '\x1b[31m',
|
||
[PURPLE]: '\x1b[35m',
|
||
[ORANGE]: '\x1b[38;5;208m',
|
||
[UNCOLOR]: '\x1b[0m'
|
||
};
|
||
|
||
/* istanbul ignore next */
|
||
/**
|
||
* @param {Array<string|Symbol|Object|number>} args
|
||
* @return {Array<string|object|number>}
|
||
*/
|
||
const computeBrowserLoggingArgs = (args) => {
|
||
const strBuilder = [];
|
||
const styles = [];
|
||
const currentStyle = create$6();
|
||
/**
|
||
* @type {Array<string|Object|number>}
|
||
*/
|
||
let logArgs = [];
|
||
// try with formatting until we find something unsupported
|
||
let i = 0;
|
||
|
||
for (; i < args.length; i++) {
|
||
const arg = args[i];
|
||
// @ts-ignore
|
||
const style = _browserStyleMap[arg];
|
||
if (style !== undefined) {
|
||
currentStyle.set(style.left, style.right);
|
||
} else {
|
||
if (arg.constructor === String || arg.constructor === Number) {
|
||
const style = mapToStyleString(currentStyle);
|
||
if (i > 0 || style.length > 0) {
|
||
strBuilder.push('%c' + arg);
|
||
styles.push(style);
|
||
} else {
|
||
strBuilder.push(arg);
|
||
}
|
||
} else {
|
||
break
|
||
}
|
||
}
|
||
}
|
||
|
||
if (i > 0) {
|
||
// create logArgs with what we have so far
|
||
logArgs = styles;
|
||
logArgs.unshift(strBuilder.join(''));
|
||
}
|
||
// append the rest
|
||
for (; i < args.length; i++) {
|
||
const arg = args[i];
|
||
if (!(arg instanceof Symbol)) {
|
||
logArgs.push(arg);
|
||
}
|
||
}
|
||
return logArgs
|
||
};
|
||
|
||
/* istanbul ignore next */
|
||
/**
|
||
* @param {Array<string|Symbol|Object|number>} args
|
||
* @return {Array<string|object|number>}
|
||
*/
|
||
const computeNoColorLoggingArgs = args => {
|
||
const strBuilder = [];
|
||
const logArgs = [];
|
||
|
||
// try with formatting until we find something unsupported
|
||
let i = 0;
|
||
|
||
for (; i < args.length; i++) {
|
||
const arg = args[i];
|
||
// @ts-ignore
|
||
const style = _nodeStyleMap[arg];
|
||
if (style === undefined) {
|
||
if (arg.constructor === String || arg.constructor === Number) {
|
||
strBuilder.push(arg);
|
||
} else {
|
||
break
|
||
}
|
||
}
|
||
}
|
||
if (i > 0) {
|
||
logArgs.push(strBuilder.join(''));
|
||
}
|
||
// append the rest
|
||
for (; i < args.length; i++) {
|
||
const arg = args[i];
|
||
/* istanbul ignore else */
|
||
if (!(arg instanceof Symbol)) {
|
||
if (arg.constructor === Object) {
|
||
logArgs.push(JSON.stringify(arg));
|
||
} else {
|
||
logArgs.push(arg);
|
||
}
|
||
}
|
||
}
|
||
return logArgs
|
||
};
|
||
|
||
/* istanbul ignore next */
|
||
/**
|
||
* @param {Array<string|Symbol|Object|number>} args
|
||
* @return {Array<string|object|number>}
|
||
*/
|
||
const computeNodeLoggingArgs = (args) => {
|
||
const strBuilder = [];
|
||
const logArgs = [];
|
||
|
||
// try with formatting until we find something unsupported
|
||
let i = 0;
|
||
|
||
for (; i < args.length; i++) {
|
||
const arg = args[i];
|
||
// @ts-ignore
|
||
const style = _nodeStyleMap[arg];
|
||
if (style !== undefined) {
|
||
strBuilder.push(style);
|
||
} else {
|
||
if (arg.constructor === String || arg.constructor === Number) {
|
||
strBuilder.push(arg);
|
||
} else {
|
||
break
|
||
}
|
||
}
|
||
}
|
||
if (i > 0) {
|
||
// create logArgs with what we have so far
|
||
strBuilder.push('\x1b[0m');
|
||
logArgs.push(strBuilder.join(''));
|
||
}
|
||
// append the rest
|
||
for (; i < args.length; i++) {
|
||
const arg = args[i];
|
||
/* istanbul ignore else */
|
||
if (!(arg instanceof Symbol)) {
|
||
logArgs.push(arg);
|
||
}
|
||
}
|
||
return logArgs
|
||
};
|
||
|
||
/* istanbul ignore next */
|
||
const computeLoggingArgs = supportsColor
|
||
? (isNode ? computeNodeLoggingArgs : computeBrowserLoggingArgs)
|
||
: computeNoColorLoggingArgs;
|
||
|
||
/**
|
||
* @param {Array<string|Symbol|Object|number>} args
|
||
*/
|
||
const print = (...args) => {
|
||
console.log(...computeLoggingArgs(args));
|
||
/* istanbul ignore next */
|
||
vconsoles.forEach((vc) => vc.print(args));
|
||
};
|
||
|
||
/* istanbul ignore next */
|
||
/**
|
||
* @param {Error} err
|
||
*/
|
||
const printError = (err) => {
|
||
console.error(err);
|
||
vconsoles.forEach((vc) => vc.printError(err));
|
||
};
|
||
|
||
/* istanbul ignore next */
|
||
/**
|
||
* @param {string} url image location
|
||
* @param {number} height height of the image in pixel
|
||
*/
|
||
const printImg = (url, height) => {
|
||
if (isBrowser) {
|
||
console.log(
|
||
'%c ',
|
||
`font-size: ${height}px; background-size: contain; background-repeat: no-repeat; background-image: url(${url})`
|
||
);
|
||
// console.log('%c ', `font-size: ${height}x; background: url(${url}) no-repeat;`)
|
||
}
|
||
vconsoles.forEach((vc) => vc.printImg(url, height));
|
||
};
|
||
|
||
/* istanbul ignore next */
|
||
/**
|
||
* @param {string} base64
|
||
* @param {number} height
|
||
*/
|
||
const printImgBase64 = (base64, height) =>
|
||
printImg(`data:image/gif;base64,${base64}`, height);
|
||
|
||
/**
|
||
* @param {Array<string|Symbol|Object|number>} args
|
||
*/
|
||
const group = (...args) => {
|
||
console.group(...computeLoggingArgs(args));
|
||
/* istanbul ignore next */
|
||
vconsoles.forEach((vc) => vc.group(args));
|
||
};
|
||
|
||
/**
|
||
* @param {Array<string|Symbol|Object|number>} args
|
||
*/
|
||
const groupCollapsed = (...args) => {
|
||
console.groupCollapsed(...computeLoggingArgs(args));
|
||
/* istanbul ignore next */
|
||
vconsoles.forEach((vc) => vc.groupCollapsed(args));
|
||
};
|
||
|
||
const groupEnd = () => {
|
||
console.groupEnd();
|
||
/* istanbul ignore next */
|
||
vconsoles.forEach((vc) => vc.groupEnd());
|
||
};
|
||
|
||
const vconsoles = new Set();
|
||
|
||
/* istanbul ignore next */
|
||
/**
|
||
* @param {Array<string|Symbol|Object|number>} args
|
||
* @return {Array<Element>}
|
||
*/
|
||
const _computeLineSpans = (args) => {
|
||
const spans = [];
|
||
const currentStyle = new Map();
|
||
// try with formatting until we find something unsupported
|
||
let i = 0;
|
||
for (; i < args.length; i++) {
|
||
const arg = args[i];
|
||
// @ts-ignore
|
||
const style = _browserStyleMap[arg];
|
||
if (style !== undefined) {
|
||
currentStyle.set(style.left, style.right);
|
||
} else {
|
||
if (arg.constructor === String || arg.constructor === Number) {
|
||
// @ts-ignore
|
||
const span = element('span', [
|
||
create$1('style', mapToStyleString(currentStyle))
|
||
], [text(arg.toString())]);
|
||
if (span.innerHTML === '') {
|
||
span.innerHTML = ' ';
|
||
}
|
||
spans.push(span);
|
||
} else {
|
||
break
|
||
}
|
||
}
|
||
}
|
||
// append the rest
|
||
for (; i < args.length; i++) {
|
||
let content = args[i];
|
||
if (!(content instanceof Symbol)) {
|
||
if (content.constructor !== String && content.constructor !== Number) {
|
||
content = ' ' + stringify(content) + ' ';
|
||
}
|
||
spans.push(
|
||
element('span', [], [text(/** @type {string} */ (content))])
|
||
);
|
||
}
|
||
}
|
||
return spans
|
||
};
|
||
|
||
const lineStyle =
|
||
'font-family:monospace;border-bottom:1px solid #e2e2e2;padding:2px;';
|
||
|
||
/* istanbul ignore next */
|
||
class VConsole {
|
||
/**
|
||
* @param {Element} dom
|
||
*/
|
||
constructor (dom) {
|
||
this.dom = dom;
|
||
/**
|
||
* @type {Element}
|
||
*/
|
||
this.ccontainer = this.dom;
|
||
this.depth = 0;
|
||
vconsoles.add(this);
|
||
}
|
||
|
||
/**
|
||
* @param {Array<string|Symbol|Object|number>} args
|
||
* @param {boolean} collapsed
|
||
*/
|
||
group (args, collapsed = false) {
|
||
enqueue(() => {
|
||
const triangleDown = element('span', [
|
||
create$1('hidden', collapsed),
|
||
create$1('style', 'color:grey;font-size:120%;')
|
||
], [text('▼')]);
|
||
const triangleRight = element('span', [
|
||
create$1('hidden', !collapsed),
|
||
create$1('style', 'color:grey;font-size:125%;')
|
||
], [text('▶')]);
|
||
const content = element(
|
||
'div',
|
||
[create$1(
|
||
'style',
|
||
`${lineStyle};padding-left:${this.depth * 10}px`
|
||
)],
|
||
[triangleDown, triangleRight, text(' ')].concat(
|
||
_computeLineSpans(args)
|
||
)
|
||
);
|
||
const nextContainer = element('div', [
|
||
create$1('hidden', collapsed)
|
||
]);
|
||
const nextLine = element('div', [], [content, nextContainer]);
|
||
append(this.ccontainer, [nextLine]);
|
||
this.ccontainer = nextContainer;
|
||
this.depth++;
|
||
// when header is clicked, collapse/uncollapse container
|
||
addEventListener(content, 'click', (_event) => {
|
||
nextContainer.toggleAttribute('hidden');
|
||
triangleDown.toggleAttribute('hidden');
|
||
triangleRight.toggleAttribute('hidden');
|
||
});
|
||
});
|
||
}
|
||
|
||
/**
|
||
* @param {Array<string|Symbol|Object|number>} args
|
||
*/
|
||
groupCollapsed (args) {
|
||
this.group(args, true);
|
||
}
|
||
|
||
groupEnd () {
|
||
enqueue(() => {
|
||
if (this.depth > 0) {
|
||
this.depth--;
|
||
// @ts-ignore
|
||
this.ccontainer = this.ccontainer.parentElement.parentElement;
|
||
}
|
||
});
|
||
}
|
||
|
||
/**
|
||
* @param {Array<string|Symbol|Object|number>} args
|
||
*/
|
||
print (args) {
|
||
enqueue(() => {
|
||
append(this.ccontainer, [
|
||
element('div', [
|
||
create$1(
|
||
'style',
|
||
`${lineStyle};padding-left:${this.depth * 10}px`
|
||
)
|
||
], _computeLineSpans(args))
|
||
]);
|
||
});
|
||
}
|
||
|
||
/**
|
||
* @param {Error} err
|
||
*/
|
||
printError (err) {
|
||
this.print([RED, BOLD, err.toString()]);
|
||
}
|
||
|
||
/**
|
||
* @param {string} url
|
||
* @param {number} height
|
||
*/
|
||
printImg (url, height) {
|
||
enqueue(() => {
|
||
append(this.ccontainer, [
|
||
element('img', [
|
||
create$1('src', url),
|
||
create$1('height', `${round(height * 1.5)}px`)
|
||
])
|
||
]);
|
||
});
|
||
}
|
||
|
||
/**
|
||
* @param {Node} node
|
||
*/
|
||
printDom (node) {
|
||
enqueue(() => {
|
||
append(this.ccontainer, [node]);
|
||
});
|
||
}
|
||
|
||
destroy () {
|
||
enqueue(() => {
|
||
vconsoles.delete(this);
|
||
});
|
||
}
|
||
}
|
||
|
||
/* istanbul ignore next */
|
||
/**
|
||
* @param {Element} dom
|
||
*/
|
||
const createVConsole = (dom) => new VConsole(dom);
|
||
|
||
/**
|
||
* Utility module to create and manipulate Iterators.
|
||
*
|
||
* @module iterator
|
||
*/
|
||
|
||
/**
|
||
* @template T
|
||
* @param {function():IteratorResult<T>} next
|
||
* @return {IterableIterator<T>}
|
||
*/
|
||
const createIterator = next => ({
|
||
/**
|
||
* @return {IterableIterator<T>}
|
||
*/
|
||
[Symbol.iterator] () {
|
||
return this
|
||
},
|
||
// @ts-ignore
|
||
next
|
||
});
|
||
|
||
/**
|
||
* @template T
|
||
* @param {Iterator<T>} iterator
|
||
* @param {function(T):boolean} filter
|
||
*/
|
||
const iteratorFilter = (iterator, filter) => createIterator(() => {
|
||
let res;
|
||
do {
|
||
res = iterator.next();
|
||
} while (!res.done && !filter(res.value))
|
||
return res
|
||
});
|
||
|
||
/**
|
||
* @template T,M
|
||
* @param {Iterator<T>} iterator
|
||
* @param {function(T):M} fmap
|
||
*/
|
||
const iteratorMap = (iterator, fmap) => createIterator(() => {
|
||
const { done, value } = iterator.next();
|
||
return { done, value: done ? undefined : fmap(value) }
|
||
});
|
||
|
||
/**
|
||
* This is an abstract interface that all Connectors should implement to keep them interchangeable.
|
||
*
|
||
* @note This interface is experimental and it is not advised to actually inherit this class.
|
||
* It just serves as typing information.
|
||
*
|
||
* @extends {Observable<any>}
|
||
*/
|
||
class AbstractConnector extends Observable {
|
||
/**
|
||
* @param {Doc} ydoc
|
||
* @param {any} awareness
|
||
*/
|
||
constructor (ydoc, awareness) {
|
||
super();
|
||
this.doc = ydoc;
|
||
this.awareness = awareness;
|
||
}
|
||
}
|
||
|
||
class DeleteItem {
|
||
/**
|
||
* @param {number} clock
|
||
* @param {number} len
|
||
*/
|
||
constructor (clock, len) {
|
||
/**
|
||
* @type {number}
|
||
*/
|
||
this.clock = clock;
|
||
/**
|
||
* @type {number}
|
||
*/
|
||
this.len = len;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* We no longer maintain a DeleteStore. DeleteSet is a temporary object that is created when needed.
|
||
* - When created in a transaction, it must only be accessed after sorting, and merging
|
||
* - This DeleteSet is send to other clients
|
||
* - We do not create a DeleteSet when we send a sync message. The DeleteSet message is created directly from StructStore
|
||
* - We read a DeleteSet as part of a sync/update message. In this case the DeleteSet is already sorted and merged.
|
||
*/
|
||
class DeleteSet {
|
||
constructor () {
|
||
/**
|
||
* @type {Map<number,Array<DeleteItem>>}
|
||
*/
|
||
this.clients = new Map();
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Iterate over all structs that the DeleteSet gc's.
|
||
*
|
||
* @param {Transaction} transaction
|
||
* @param {DeleteSet} ds
|
||
* @param {function(GC|Item):void} f
|
||
*
|
||
* @function
|
||
*/
|
||
const iterateDeletedStructs = (transaction, ds, f) =>
|
||
ds.clients.forEach((deletes, clientid) => {
|
||
const structs = /** @type {Array<GC|Item>} */ (transaction.doc.store.clients.get(clientid));
|
||
for (let i = 0; i < deletes.length; i++) {
|
||
const del = deletes[i];
|
||
iterateStructs(transaction, structs, del.clock, del.len, f);
|
||
}
|
||
});
|
||
|
||
/**
|
||
* @param {Array<DeleteItem>} dis
|
||
* @param {number} clock
|
||
* @return {number|null}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const findIndexDS = (dis, clock) => {
|
||
let left = 0;
|
||
let right = dis.length - 1;
|
||
while (left <= right) {
|
||
const midindex = floor((left + right) / 2);
|
||
const mid = dis[midindex];
|
||
const midclock = mid.clock;
|
||
if (midclock <= clock) {
|
||
if (clock < midclock + mid.len) {
|
||
return midindex
|
||
}
|
||
left = midindex + 1;
|
||
} else {
|
||
right = midindex - 1;
|
||
}
|
||
}
|
||
return null
|
||
};
|
||
|
||
/**
|
||
* @param {DeleteSet} ds
|
||
* @param {ID} id
|
||
* @return {boolean}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const isDeleted = (ds, id) => {
|
||
const dis = ds.clients.get(id.client);
|
||
return dis !== undefined && findIndexDS(dis, id.clock) !== null
|
||
};
|
||
|
||
/**
|
||
* @param {DeleteSet} ds
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const sortAndMergeDeleteSet = ds => {
|
||
ds.clients.forEach(dels => {
|
||
dels.sort((a, b) => a.clock - b.clock);
|
||
// merge items without filtering or splicing the array
|
||
// i is the current pointer
|
||
// j refers to the current insert position for the pointed item
|
||
// try to merge dels[i] into dels[j-1] or set dels[j]=dels[i]
|
||
let i, j;
|
||
for (i = 1, j = 1; i < dels.length; i++) {
|
||
const left = dels[j - 1];
|
||
const right = dels[i];
|
||
if (left.clock + left.len >= right.clock) {
|
||
left.len = max(left.len, right.clock + right.len - left.clock);
|
||
} else {
|
||
if (j < i) {
|
||
dels[j] = right;
|
||
}
|
||
j++;
|
||
}
|
||
}
|
||
dels.length = j;
|
||
});
|
||
};
|
||
|
||
/**
|
||
* @param {Array<DeleteSet>} dss
|
||
* @return {DeleteSet} A fresh DeleteSet
|
||
*/
|
||
const mergeDeleteSets = dss => {
|
||
const merged = new DeleteSet();
|
||
for (let dssI = 0; dssI < dss.length; dssI++) {
|
||
dss[dssI].clients.forEach((delsLeft, client) => {
|
||
if (!merged.clients.has(client)) {
|
||
// Write all missing keys from current ds and all following.
|
||
// If merged already contains `client` current ds has already been added.
|
||
/**
|
||
* @type {Array<DeleteItem>}
|
||
*/
|
||
const dels = delsLeft.slice();
|
||
for (let i = dssI + 1; i < dss.length; i++) {
|
||
appendTo(dels, dss[i].clients.get(client) || []);
|
||
}
|
||
merged.clients.set(client, dels);
|
||
}
|
||
});
|
||
}
|
||
sortAndMergeDeleteSet(merged);
|
||
return merged
|
||
};
|
||
|
||
/**
|
||
* @param {DeleteSet} ds
|
||
* @param {number} client
|
||
* @param {number} clock
|
||
* @param {number} length
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const addToDeleteSet = (ds, client, clock, length) => {
|
||
setIfUndefined(ds.clients, client, () => []).push(new DeleteItem(clock, length));
|
||
};
|
||
|
||
const createDeleteSet = () => new DeleteSet();
|
||
|
||
/**
|
||
* @param {StructStore} ss
|
||
* @return {DeleteSet} Merged and sorted DeleteSet
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const createDeleteSetFromStructStore = ss => {
|
||
const ds = createDeleteSet();
|
||
ss.clients.forEach((structs, client) => {
|
||
/**
|
||
* @type {Array<DeleteItem>}
|
||
*/
|
||
const dsitems = [];
|
||
for (let i = 0; i < structs.length; i++) {
|
||
const struct = structs[i];
|
||
if (struct.deleted) {
|
||
const clock = struct.id.clock;
|
||
let len = struct.length;
|
||
if (i + 1 < structs.length) {
|
||
for (let next = structs[i + 1]; i + 1 < structs.length && next.deleted; next = structs[++i + 1]) {
|
||
len += next.length;
|
||
}
|
||
}
|
||
dsitems.push(new DeleteItem(clock, len));
|
||
}
|
||
}
|
||
if (dsitems.length > 0) {
|
||
ds.clients.set(client, dsitems);
|
||
}
|
||
});
|
||
return ds
|
||
};
|
||
|
||
/**
|
||
* @param {DSEncoderV1 | DSEncoderV2} encoder
|
||
* @param {DeleteSet} ds
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const writeDeleteSet = (encoder, ds) => {
|
||
writeVarUint(encoder.restEncoder, ds.clients.size);
|
||
ds.clients.forEach((dsitems, client) => {
|
||
encoder.resetDsCurVal();
|
||
writeVarUint(encoder.restEncoder, client);
|
||
const len = dsitems.length;
|
||
writeVarUint(encoder.restEncoder, len);
|
||
for (let i = 0; i < len; i++) {
|
||
const item = dsitems[i];
|
||
encoder.writeDsClock(item.clock);
|
||
encoder.writeDsLen(item.len);
|
||
}
|
||
});
|
||
};
|
||
|
||
/**
|
||
* @param {DSDecoderV1 | DSDecoderV2} decoder
|
||
* @return {DeleteSet}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const readDeleteSet = decoder => {
|
||
const ds = new DeleteSet();
|
||
const numClients = readVarUint(decoder.restDecoder);
|
||
for (let i = 0; i < numClients; i++) {
|
||
decoder.resetDsCurVal();
|
||
const client = readVarUint(decoder.restDecoder);
|
||
const numberOfDeletes = readVarUint(decoder.restDecoder);
|
||
if (numberOfDeletes > 0) {
|
||
const dsField = setIfUndefined(ds.clients, client, () => []);
|
||
for (let i = 0; i < numberOfDeletes; i++) {
|
||
dsField.push(new DeleteItem(decoder.readDsClock(), decoder.readDsLen()));
|
||
}
|
||
}
|
||
}
|
||
return ds
|
||
};
|
||
|
||
/**
|
||
* @todo YDecoder also contains references to String and other Decoders. Would make sense to exchange YDecoder.toUint8Array for YDecoder.DsToUint8Array()..
|
||
*/
|
||
|
||
/**
|
||
* @param {DSDecoderV1 | DSDecoderV2} decoder
|
||
* @param {Transaction} transaction
|
||
* @param {StructStore} store
|
||
* @return {Uint8Array|null} Returns a v2 update containing all deletes that couldn't be applied yet; or null if all deletes were applied successfully.
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const readAndApplyDeleteSet = (decoder, transaction, store) => {
|
||
const unappliedDS = new DeleteSet();
|
||
const numClients = readVarUint(decoder.restDecoder);
|
||
for (let i = 0; i < numClients; i++) {
|
||
decoder.resetDsCurVal();
|
||
const client = readVarUint(decoder.restDecoder);
|
||
const numberOfDeletes = readVarUint(decoder.restDecoder);
|
||
const structs = store.clients.get(client) || [];
|
||
const state = getState(store, client);
|
||
for (let i = 0; i < numberOfDeletes; i++) {
|
||
const clock = decoder.readDsClock();
|
||
const clockEnd = clock + decoder.readDsLen();
|
||
if (clock < state) {
|
||
if (state < clockEnd) {
|
||
addToDeleteSet(unappliedDS, client, state, clockEnd - state);
|
||
}
|
||
let index = findIndexSS(structs, clock);
|
||
/**
|
||
* We can ignore the case of GC and Delete structs, because we are going to skip them
|
||
* @type {Item}
|
||
*/
|
||
// @ts-ignore
|
||
let struct = structs[index];
|
||
// split the first item if necessary
|
||
if (!struct.deleted && struct.id.clock < clock) {
|
||
structs.splice(index + 1, 0, splitItem(transaction, struct, clock - struct.id.clock));
|
||
index++; // increase we now want to use the next struct
|
||
}
|
||
while (index < structs.length) {
|
||
// @ts-ignore
|
||
struct = structs[index++];
|
||
if (struct.id.clock < clockEnd) {
|
||
if (!struct.deleted) {
|
||
if (clockEnd < struct.id.clock + struct.length) {
|
||
structs.splice(index, 0, splitItem(transaction, struct, clockEnd - struct.id.clock));
|
||
}
|
||
struct.delete(transaction);
|
||
}
|
||
} else {
|
||
break
|
||
}
|
||
}
|
||
} else {
|
||
addToDeleteSet(unappliedDS, client, clock, clockEnd - clock);
|
||
}
|
||
}
|
||
}
|
||
if (unappliedDS.clients.size > 0) {
|
||
const ds = new UpdateEncoderV2();
|
||
writeVarUint(ds.restEncoder, 0); // encode 0 structs
|
||
writeDeleteSet(ds, unappliedDS);
|
||
return ds.toUint8Array()
|
||
}
|
||
return null
|
||
};
|
||
|
||
/**
|
||
* @module Y
|
||
*/
|
||
|
||
const generateNewClientId = uint32;
|
||
|
||
/**
|
||
* @typedef {Object} DocOpts
|
||
* @property {boolean} [DocOpts.gc=true] Disable garbage collection (default: gc=true)
|
||
* @property {function(Item):boolean} [DocOpts.gcFilter] Will be called before an Item is garbage collected. Return false to keep the Item.
|
||
* @property {string} [DocOpts.guid] Define a globally unique identifier for this document
|
||
* @property {string | null} [DocOpts.collectionid] Associate this document with a collection. This only plays a role if your provider has a concept of collection.
|
||
* @property {any} [DocOpts.meta] Any kind of meta information you want to associate with this document. If this is a subdocument, remote peers will store the meta information as well.
|
||
* @property {boolean} [DocOpts.autoLoad] If a subdocument, automatically load document. If this is a subdocument, remote peers will load the document as well automatically.
|
||
* @property {boolean} [DocOpts.shouldLoad] Whether the document should be synced by the provider now. This is toggled to true when you call ydoc.load()
|
||
*/
|
||
|
||
/**
|
||
* A Yjs instance handles the state of shared data.
|
||
* @extends Observable<string>
|
||
*/
|
||
class Doc extends Observable {
|
||
/**
|
||
* @param {DocOpts} [opts] configuration
|
||
*/
|
||
constructor ({ guid = uuidv4(), collectionid = null, gc = true, gcFilter = () => true, meta = null, autoLoad = false, shouldLoad = true } = {}) {
|
||
super();
|
||
this.gc = gc;
|
||
this.gcFilter = gcFilter;
|
||
this.clientID = generateNewClientId();
|
||
this.guid = guid;
|
||
this.collectionid = collectionid;
|
||
/**
|
||
* @type {Map<string, AbstractType<YEvent<any>>>}
|
||
*/
|
||
this.share = new Map();
|
||
this.store = new StructStore();
|
||
/**
|
||
* @type {Transaction | null}
|
||
*/
|
||
this._transaction = null;
|
||
/**
|
||
* @type {Array<Transaction>}
|
||
*/
|
||
this._transactionCleanups = [];
|
||
/**
|
||
* @type {Set<Doc>}
|
||
*/
|
||
this.subdocs = new Set();
|
||
/**
|
||
* If this document is a subdocument - a document integrated into another document - then _item is defined.
|
||
* @type {Item?}
|
||
*/
|
||
this._item = null;
|
||
this.shouldLoad = shouldLoad;
|
||
this.autoLoad = autoLoad;
|
||
this.meta = meta;
|
||
this.isLoaded = false;
|
||
this.whenLoaded = create$3(resolve => {
|
||
this.on('load', () => {
|
||
this.isLoaded = true;
|
||
resolve(this);
|
||
});
|
||
});
|
||
}
|
||
|
||
/**
|
||
* Notify the parent document that you request to load data into this subdocument (if it is a subdocument).
|
||
*
|
||
* `load()` might be used in the future to request any provider to load the most current data.
|
||
*
|
||
* It is safe to call `load()` multiple times.
|
||
*/
|
||
load () {
|
||
const item = this._item;
|
||
if (item !== null && !this.shouldLoad) {
|
||
transact(/** @type {any} */ (item.parent).doc, transaction => {
|
||
transaction.subdocsLoaded.add(this);
|
||
}, null, true);
|
||
}
|
||
this.shouldLoad = true;
|
||
}
|
||
|
||
getSubdocs () {
|
||
return this.subdocs
|
||
}
|
||
|
||
getSubdocGuids () {
|
||
return new Set(Array.from(this.subdocs).map(doc => doc.guid))
|
||
}
|
||
|
||
/**
|
||
* Changes that happen inside of a transaction are bundled. This means that
|
||
* the observer fires _after_ the transaction is finished and that all changes
|
||
* that happened inside of the transaction are sent as one message to the
|
||
* other peers.
|
||
*
|
||
* @param {function(Transaction):void} f The function that should be executed as a transaction
|
||
* @param {any} [origin] Origin of who started the transaction. Will be stored on transaction.origin
|
||
*
|
||
* @public
|
||
*/
|
||
transact (f, origin = null) {
|
||
transact(this, f, origin);
|
||
}
|
||
|
||
/**
|
||
* Define a shared data type.
|
||
*
|
||
* Multiple calls of `y.get(name, TypeConstructor)` yield the same result
|
||
* and do not overwrite each other. I.e.
|
||
* `y.define(name, Y.Array) === y.define(name, Y.Array)`
|
||
*
|
||
* After this method is called, the type is also available on `y.share.get(name)`.
|
||
*
|
||
* *Best Practices:*
|
||
* Define all types right after the Yjs instance is created and store them in a separate object.
|
||
* Also use the typed methods `getText(name)`, `getArray(name)`, ..
|
||
*
|
||
* @example
|
||
* const y = new Y(..)
|
||
* const appState = {
|
||
* document: y.getText('document')
|
||
* comments: y.getArray('comments')
|
||
* }
|
||
*
|
||
* @param {string} name
|
||
* @param {Function} TypeConstructor The constructor of the type definition. E.g. Y.Text, Y.Array, Y.Map, ...
|
||
* @return {AbstractType<any>} The created type. Constructed with TypeConstructor
|
||
*
|
||
* @public
|
||
*/
|
||
get (name, TypeConstructor = AbstractType) {
|
||
const type = setIfUndefined(this.share, name, () => {
|
||
// @ts-ignore
|
||
const t = new TypeConstructor();
|
||
t._integrate(this, null);
|
||
return t
|
||
});
|
||
const Constr = type.constructor;
|
||
if (TypeConstructor !== AbstractType && Constr !== TypeConstructor) {
|
||
if (Constr === AbstractType) {
|
||
// @ts-ignore
|
||
const t = new TypeConstructor();
|
||
t._map = type._map;
|
||
type._map.forEach(/** @param {Item?} n */ n => {
|
||
for (; n !== null; n = n.left) {
|
||
// @ts-ignore
|
||
n.parent = t;
|
||
}
|
||
});
|
||
t._start = type._start;
|
||
for (let n = t._start; n !== null; n = n.right) {
|
||
n.parent = t;
|
||
}
|
||
t._length = type._length;
|
||
this.share.set(name, t);
|
||
t._integrate(this, null);
|
||
return t
|
||
} else {
|
||
throw new Error(`Type with the name ${name} has already been defined with a different constructor`)
|
||
}
|
||
}
|
||
return type
|
||
}
|
||
|
||
/**
|
||
* @template T
|
||
* @param {string} [name]
|
||
* @return {YArray<T>}
|
||
*
|
||
* @public
|
||
*/
|
||
getArray (name = '') {
|
||
// @ts-ignore
|
||
return this.get(name, YArray)
|
||
}
|
||
|
||
/**
|
||
* @param {string} [name]
|
||
* @return {YText}
|
||
*
|
||
* @public
|
||
*/
|
||
getText (name = '') {
|
||
// @ts-ignore
|
||
return this.get(name, YText)
|
||
}
|
||
|
||
/**
|
||
* @template T
|
||
* @param {string} [name]
|
||
* @return {YMap<T>}
|
||
*
|
||
* @public
|
||
*/
|
||
getMap (name = '') {
|
||
// @ts-ignore
|
||
return this.get(name, YMap)
|
||
}
|
||
|
||
/**
|
||
* @param {string} [name]
|
||
* @return {YXmlFragment}
|
||
*
|
||
* @public
|
||
*/
|
||
getXmlFragment (name = '') {
|
||
// @ts-ignore
|
||
return this.get(name, YXmlFragment)
|
||
}
|
||
|
||
/**
|
||
* Converts the entire document into a js object, recursively traversing each yjs type
|
||
* Doesn't log types that have not been defined (using ydoc.getType(..)).
|
||
*
|
||
* @deprecated Do not use this method and rather call toJSON directly on the shared types.
|
||
*
|
||
* @return {Object<string, any>}
|
||
*/
|
||
toJSON () {
|
||
/**
|
||
* @type {Object<string, any>}
|
||
*/
|
||
const doc = {};
|
||
|
||
this.share.forEach((value, key) => {
|
||
doc[key] = value.toJSON();
|
||
});
|
||
|
||
return doc
|
||
}
|
||
|
||
/**
|
||
* Emit `destroy` event and unregister all event handlers.
|
||
*/
|
||
destroy () {
|
||
from$1(this.subdocs).forEach(subdoc => subdoc.destroy());
|
||
const item = this._item;
|
||
if (item !== null) {
|
||
this._item = null;
|
||
const content = /** @type {ContentDoc} */ (item.content);
|
||
content.doc = new Doc({ guid: this.guid, ...content.opts, shouldLoad: false });
|
||
content.doc._item = item;
|
||
transact(/** @type {any} */ (item).parent.doc, transaction => {
|
||
const doc = content.doc;
|
||
if (!item.deleted) {
|
||
transaction.subdocsAdded.add(doc);
|
||
}
|
||
transaction.subdocsRemoved.add(this);
|
||
}, null, true);
|
||
}
|
||
this.emit('destroyed', [true]);
|
||
this.emit('destroy', [this]);
|
||
super.destroy();
|
||
}
|
||
|
||
/**
|
||
* @param {string} eventName
|
||
* @param {function(...any):any} f
|
||
*/
|
||
on (eventName, f) {
|
||
super.on(eventName, f);
|
||
}
|
||
|
||
/**
|
||
* @param {string} eventName
|
||
* @param {function} f
|
||
*/
|
||
off (eventName, f) {
|
||
super.off(eventName, f);
|
||
}
|
||
}
|
||
|
||
class DSDecoderV1 {
|
||
/**
|
||
* @param {decoding.Decoder} decoder
|
||
*/
|
||
constructor (decoder) {
|
||
this.restDecoder = decoder;
|
||
}
|
||
|
||
resetDsCurVal () {
|
||
// nop
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
readDsClock () {
|
||
return readVarUint(this.restDecoder)
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
readDsLen () {
|
||
return readVarUint(this.restDecoder)
|
||
}
|
||
}
|
||
|
||
class UpdateDecoderV1 extends DSDecoderV1 {
|
||
/**
|
||
* @return {ID}
|
||
*/
|
||
readLeftID () {
|
||
return createID(readVarUint(this.restDecoder), readVarUint(this.restDecoder))
|
||
}
|
||
|
||
/**
|
||
* @return {ID}
|
||
*/
|
||
readRightID () {
|
||
return createID(readVarUint(this.restDecoder), readVarUint(this.restDecoder))
|
||
}
|
||
|
||
/**
|
||
* Read the next client id.
|
||
* Use this in favor of readID whenever possible to reduce the number of objects created.
|
||
*/
|
||
readClient () {
|
||
return readVarUint(this.restDecoder)
|
||
}
|
||
|
||
/**
|
||
* @return {number} info An unsigned 8-bit integer
|
||
*/
|
||
readInfo () {
|
||
return readUint8(this.restDecoder)
|
||
}
|
||
|
||
/**
|
||
* @return {string}
|
||
*/
|
||
readString () {
|
||
return readVarString(this.restDecoder)
|
||
}
|
||
|
||
/**
|
||
* @return {boolean} isKey
|
||
*/
|
||
readParentInfo () {
|
||
return readVarUint(this.restDecoder) === 1
|
||
}
|
||
|
||
/**
|
||
* @return {number} info An unsigned 8-bit integer
|
||
*/
|
||
readTypeRef () {
|
||
return readVarUint(this.restDecoder)
|
||
}
|
||
|
||
/**
|
||
* Write len of a struct - well suited for Opt RLE encoder.
|
||
*
|
||
* @return {number} len
|
||
*/
|
||
readLen () {
|
||
return readVarUint(this.restDecoder)
|
||
}
|
||
|
||
/**
|
||
* @return {any}
|
||
*/
|
||
readAny () {
|
||
return readAny(this.restDecoder)
|
||
}
|
||
|
||
/**
|
||
* @return {Uint8Array}
|
||
*/
|
||
readBuf () {
|
||
return copyUint8Array(readVarUint8Array(this.restDecoder))
|
||
}
|
||
|
||
/**
|
||
* Legacy implementation uses JSON parse. We use any-decoding in v2.
|
||
*
|
||
* @return {any}
|
||
*/
|
||
readJSON () {
|
||
return JSON.parse(readVarString(this.restDecoder))
|
||
}
|
||
|
||
/**
|
||
* @return {string}
|
||
*/
|
||
readKey () {
|
||
return readVarString(this.restDecoder)
|
||
}
|
||
}
|
||
|
||
class DSDecoderV2 {
|
||
/**
|
||
* @param {decoding.Decoder} decoder
|
||
*/
|
||
constructor (decoder) {
|
||
/**
|
||
* @private
|
||
*/
|
||
this.dsCurrVal = 0;
|
||
this.restDecoder = decoder;
|
||
}
|
||
|
||
resetDsCurVal () {
|
||
this.dsCurrVal = 0;
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
readDsClock () {
|
||
this.dsCurrVal += readVarUint(this.restDecoder);
|
||
return this.dsCurrVal
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
readDsLen () {
|
||
const diff = readVarUint(this.restDecoder) + 1;
|
||
this.dsCurrVal += diff;
|
||
return diff
|
||
}
|
||
}
|
||
|
||
class UpdateDecoderV2 extends DSDecoderV2 {
|
||
/**
|
||
* @param {decoding.Decoder} decoder
|
||
*/
|
||
constructor (decoder) {
|
||
super(decoder);
|
||
/**
|
||
* List of cached keys. If the keys[id] does not exist, we read a new key
|
||
* from stringEncoder and push it to keys.
|
||
*
|
||
* @type {Array<string>}
|
||
*/
|
||
this.keys = [];
|
||
readVarUint(decoder); // read feature flag - currently unused
|
||
this.keyClockDecoder = new IntDiffOptRleDecoder(readVarUint8Array(decoder));
|
||
this.clientDecoder = new UintOptRleDecoder(readVarUint8Array(decoder));
|
||
this.leftClockDecoder = new IntDiffOptRleDecoder(readVarUint8Array(decoder));
|
||
this.rightClockDecoder = new IntDiffOptRleDecoder(readVarUint8Array(decoder));
|
||
this.infoDecoder = new RleDecoder(readVarUint8Array(decoder), readUint8);
|
||
this.stringDecoder = new StringDecoder$2(readVarUint8Array(decoder));
|
||
this.parentInfoDecoder = new RleDecoder(readVarUint8Array(decoder), readUint8);
|
||
this.typeRefDecoder = new UintOptRleDecoder(readVarUint8Array(decoder));
|
||
this.lenDecoder = new UintOptRleDecoder(readVarUint8Array(decoder));
|
||
}
|
||
|
||
/**
|
||
* @return {ID}
|
||
*/
|
||
readLeftID () {
|
||
return new ID(this.clientDecoder.read(), this.leftClockDecoder.read())
|
||
}
|
||
|
||
/**
|
||
* @return {ID}
|
||
*/
|
||
readRightID () {
|
||
return new ID(this.clientDecoder.read(), this.rightClockDecoder.read())
|
||
}
|
||
|
||
/**
|
||
* Read the next client id.
|
||
* Use this in favor of readID whenever possible to reduce the number of objects created.
|
||
*/
|
||
readClient () {
|
||
return this.clientDecoder.read()
|
||
}
|
||
|
||
/**
|
||
* @return {number} info An unsigned 8-bit integer
|
||
*/
|
||
readInfo () {
|
||
return /** @type {number} */ (this.infoDecoder.read())
|
||
}
|
||
|
||
/**
|
||
* @return {string}
|
||
*/
|
||
readString () {
|
||
return this.stringDecoder.read()
|
||
}
|
||
|
||
/**
|
||
* @return {boolean}
|
||
*/
|
||
readParentInfo () {
|
||
return this.parentInfoDecoder.read() === 1
|
||
}
|
||
|
||
/**
|
||
* @return {number} An unsigned 8-bit integer
|
||
*/
|
||
readTypeRef () {
|
||
return this.typeRefDecoder.read()
|
||
}
|
||
|
||
/**
|
||
* Write len of a struct - well suited for Opt RLE encoder.
|
||
*
|
||
* @return {number}
|
||
*/
|
||
readLen () {
|
||
return this.lenDecoder.read()
|
||
}
|
||
|
||
/**
|
||
* @return {any}
|
||
*/
|
||
readAny () {
|
||
return readAny(this.restDecoder)
|
||
}
|
||
|
||
/**
|
||
* @return {Uint8Array}
|
||
*/
|
||
readBuf () {
|
||
return readVarUint8Array(this.restDecoder)
|
||
}
|
||
|
||
/**
|
||
* This is mainly here for legacy purposes.
|
||
*
|
||
* Initial we incoded objects using JSON. Now we use the much faster lib0/any-encoder. This method mainly exists for legacy purposes for the v1 encoder.
|
||
*
|
||
* @return {any}
|
||
*/
|
||
readJSON () {
|
||
return readAny(this.restDecoder)
|
||
}
|
||
|
||
/**
|
||
* @return {string}
|
||
*/
|
||
readKey () {
|
||
const keyClock = this.keyClockDecoder.read();
|
||
if (keyClock < this.keys.length) {
|
||
return this.keys[keyClock]
|
||
} else {
|
||
const key = this.stringDecoder.read();
|
||
this.keys.push(key);
|
||
return key
|
||
}
|
||
}
|
||
}
|
||
|
||
class DSEncoderV1 {
|
||
constructor () {
|
||
this.restEncoder = createEncoder();
|
||
}
|
||
|
||
toUint8Array () {
|
||
return toUint8Array(this.restEncoder)
|
||
}
|
||
|
||
resetDsCurVal () {
|
||
// nop
|
||
}
|
||
|
||
/**
|
||
* @param {number} clock
|
||
*/
|
||
writeDsClock (clock) {
|
||
writeVarUint(this.restEncoder, clock);
|
||
}
|
||
|
||
/**
|
||
* @param {number} len
|
||
*/
|
||
writeDsLen (len) {
|
||
writeVarUint(this.restEncoder, len);
|
||
}
|
||
}
|
||
|
||
class UpdateEncoderV1 extends DSEncoderV1 {
|
||
/**
|
||
* @param {ID} id
|
||
*/
|
||
writeLeftID (id) {
|
||
writeVarUint(this.restEncoder, id.client);
|
||
writeVarUint(this.restEncoder, id.clock);
|
||
}
|
||
|
||
/**
|
||
* @param {ID} id
|
||
*/
|
||
writeRightID (id) {
|
||
writeVarUint(this.restEncoder, id.client);
|
||
writeVarUint(this.restEncoder, id.clock);
|
||
}
|
||
|
||
/**
|
||
* Use writeClient and writeClock instead of writeID if possible.
|
||
* @param {number} client
|
||
*/
|
||
writeClient (client) {
|
||
writeVarUint(this.restEncoder, client);
|
||
}
|
||
|
||
/**
|
||
* @param {number} info An unsigned 8-bit integer
|
||
*/
|
||
writeInfo (info) {
|
||
writeUint8(this.restEncoder, info);
|
||
}
|
||
|
||
/**
|
||
* @param {string} s
|
||
*/
|
||
writeString (s) {
|
||
writeVarString(this.restEncoder, s);
|
||
}
|
||
|
||
/**
|
||
* @param {boolean} isYKey
|
||
*/
|
||
writeParentInfo (isYKey) {
|
||
writeVarUint(this.restEncoder, isYKey ? 1 : 0);
|
||
}
|
||
|
||
/**
|
||
* @param {number} info An unsigned 8-bit integer
|
||
*/
|
||
writeTypeRef (info) {
|
||
writeVarUint(this.restEncoder, info);
|
||
}
|
||
|
||
/**
|
||
* Write len of a struct - well suited for Opt RLE encoder.
|
||
*
|
||
* @param {number} len
|
||
*/
|
||
writeLen (len) {
|
||
writeVarUint(this.restEncoder, len);
|
||
}
|
||
|
||
/**
|
||
* @param {any} any
|
||
*/
|
||
writeAny (any) {
|
||
writeAny(this.restEncoder, any);
|
||
}
|
||
|
||
/**
|
||
* @param {Uint8Array} buf
|
||
*/
|
||
writeBuf (buf) {
|
||
writeVarUint8Array(this.restEncoder, buf);
|
||
}
|
||
|
||
/**
|
||
* @param {any} embed
|
||
*/
|
||
writeJSON (embed) {
|
||
writeVarString(this.restEncoder, JSON.stringify(embed));
|
||
}
|
||
|
||
/**
|
||
* @param {string} key
|
||
*/
|
||
writeKey (key) {
|
||
writeVarString(this.restEncoder, key);
|
||
}
|
||
}
|
||
|
||
class DSEncoderV2 {
|
||
constructor () {
|
||
this.restEncoder = createEncoder(); // encodes all the rest / non-optimized
|
||
this.dsCurrVal = 0;
|
||
}
|
||
|
||
toUint8Array () {
|
||
return toUint8Array(this.restEncoder)
|
||
}
|
||
|
||
resetDsCurVal () {
|
||
this.dsCurrVal = 0;
|
||
}
|
||
|
||
/**
|
||
* @param {number} clock
|
||
*/
|
||
writeDsClock (clock) {
|
||
const diff = clock - this.dsCurrVal;
|
||
this.dsCurrVal = clock;
|
||
writeVarUint(this.restEncoder, diff);
|
||
}
|
||
|
||
/**
|
||
* @param {number} len
|
||
*/
|
||
writeDsLen (len) {
|
||
if (len === 0) {
|
||
unexpectedCase();
|
||
}
|
||
writeVarUint(this.restEncoder, len - 1);
|
||
this.dsCurrVal += len;
|
||
}
|
||
}
|
||
|
||
class UpdateEncoderV2 extends DSEncoderV2 {
|
||
constructor () {
|
||
super();
|
||
/**
|
||
* @type {Map<string,number>}
|
||
*/
|
||
this.keyMap = new Map();
|
||
/**
|
||
* Refers to the next uniqe key-identifier to me used.
|
||
* See writeKey method for more information.
|
||
*
|
||
* @type {number}
|
||
*/
|
||
this.keyClock = 0;
|
||
this.keyClockEncoder = new IntDiffOptRleEncoder();
|
||
this.clientEncoder = new UintOptRleEncoder();
|
||
this.leftClockEncoder = new IntDiffOptRleEncoder();
|
||
this.rightClockEncoder = new IntDiffOptRleEncoder();
|
||
this.infoEncoder = new RleEncoder(writeUint8);
|
||
this.stringEncoder = new StringEncoder();
|
||
this.parentInfoEncoder = new RleEncoder(writeUint8);
|
||
this.typeRefEncoder = new UintOptRleEncoder();
|
||
this.lenEncoder = new UintOptRleEncoder();
|
||
}
|
||
|
||
toUint8Array () {
|
||
const encoder = createEncoder();
|
||
writeVarUint(encoder, 0); // this is a feature flag that we might use in the future
|
||
writeVarUint8Array(encoder, this.keyClockEncoder.toUint8Array());
|
||
writeVarUint8Array(encoder, this.clientEncoder.toUint8Array());
|
||
writeVarUint8Array(encoder, this.leftClockEncoder.toUint8Array());
|
||
writeVarUint8Array(encoder, this.rightClockEncoder.toUint8Array());
|
||
writeVarUint8Array(encoder, toUint8Array(this.infoEncoder));
|
||
writeVarUint8Array(encoder, this.stringEncoder.toUint8Array());
|
||
writeVarUint8Array(encoder, toUint8Array(this.parentInfoEncoder));
|
||
writeVarUint8Array(encoder, this.typeRefEncoder.toUint8Array());
|
||
writeVarUint8Array(encoder, this.lenEncoder.toUint8Array());
|
||
// @note The rest encoder is appended! (note the missing var)
|
||
writeUint8Array(encoder, toUint8Array(this.restEncoder));
|
||
return toUint8Array(encoder)
|
||
}
|
||
|
||
/**
|
||
* @param {ID} id
|
||
*/
|
||
writeLeftID (id) {
|
||
this.clientEncoder.write(id.client);
|
||
this.leftClockEncoder.write(id.clock);
|
||
}
|
||
|
||
/**
|
||
* @param {ID} id
|
||
*/
|
||
writeRightID (id) {
|
||
this.clientEncoder.write(id.client);
|
||
this.rightClockEncoder.write(id.clock);
|
||
}
|
||
|
||
/**
|
||
* @param {number} client
|
||
*/
|
||
writeClient (client) {
|
||
this.clientEncoder.write(client);
|
||
}
|
||
|
||
/**
|
||
* @param {number} info An unsigned 8-bit integer
|
||
*/
|
||
writeInfo (info) {
|
||
this.infoEncoder.write(info);
|
||
}
|
||
|
||
/**
|
||
* @param {string} s
|
||
*/
|
||
writeString (s) {
|
||
this.stringEncoder.write(s);
|
||
}
|
||
|
||
/**
|
||
* @param {boolean} isYKey
|
||
*/
|
||
writeParentInfo (isYKey) {
|
||
this.parentInfoEncoder.write(isYKey ? 1 : 0);
|
||
}
|
||
|
||
/**
|
||
* @param {number} info An unsigned 8-bit integer
|
||
*/
|
||
writeTypeRef (info) {
|
||
this.typeRefEncoder.write(info);
|
||
}
|
||
|
||
/**
|
||
* Write len of a struct - well suited for Opt RLE encoder.
|
||
*
|
||
* @param {number} len
|
||
*/
|
||
writeLen (len) {
|
||
this.lenEncoder.write(len);
|
||
}
|
||
|
||
/**
|
||
* @param {any} any
|
||
*/
|
||
writeAny (any) {
|
||
writeAny(this.restEncoder, any);
|
||
}
|
||
|
||
/**
|
||
* @param {Uint8Array} buf
|
||
*/
|
||
writeBuf (buf) {
|
||
writeVarUint8Array(this.restEncoder, buf);
|
||
}
|
||
|
||
/**
|
||
* This is mainly here for legacy purposes.
|
||
*
|
||
* Initial we incoded objects using JSON. Now we use the much faster lib0/any-encoder. This method mainly exists for legacy purposes for the v1 encoder.
|
||
*
|
||
* @param {any} embed
|
||
*/
|
||
writeJSON (embed) {
|
||
writeAny(this.restEncoder, embed);
|
||
}
|
||
|
||
/**
|
||
* Property keys are often reused. For example, in y-prosemirror the key `bold` might
|
||
* occur very often. For a 3d application, the key `position` might occur very often.
|
||
*
|
||
* We cache these keys in a Map and refer to them via a unique number.
|
||
*
|
||
* @param {string} key
|
||
*/
|
||
writeKey (key) {
|
||
const clock = this.keyMap.get(key);
|
||
if (clock === undefined) {
|
||
/**
|
||
* @todo uncomment to introduce this feature finally
|
||
*
|
||
* Background. The ContentFormat object was always encoded using writeKey, but the decoder used to use readString.
|
||
* Furthermore, I forgot to set the keyclock. So everything was working fine.
|
||
*
|
||
* However, this feature here is basically useless as it is not being used (it actually only consumes extra memory).
|
||
*
|
||
* I don't know yet how to reintroduce this feature..
|
||
*
|
||
* Older clients won't be able to read updates when we reintroduce this feature. So this should probably be done using a flag.
|
||
*
|
||
*/
|
||
// this.keyMap.set(key, this.keyClock)
|
||
this.keyClockEncoder.write(this.keyClock++);
|
||
this.stringEncoder.write(key);
|
||
} else {
|
||
this.keyClockEncoder.write(clock);
|
||
}
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {Array<GC|Item>} structs All structs by `client`
|
||
* @param {number} client
|
||
* @param {number} clock write structs starting with `ID(client,clock)`
|
||
*
|
||
* @function
|
||
*/
|
||
const writeStructs = (encoder, structs, client, clock) => {
|
||
// write first id
|
||
clock = max(clock, structs[0].id.clock); // make sure the first id exists
|
||
const startNewStructs = findIndexSS(structs, clock);
|
||
// write # encoded structs
|
||
writeVarUint(encoder.restEncoder, structs.length - startNewStructs);
|
||
encoder.writeClient(client);
|
||
writeVarUint(encoder.restEncoder, clock);
|
||
const firstStruct = structs[startNewStructs];
|
||
// write first struct with an offset
|
||
firstStruct.write(encoder, clock - firstStruct.id.clock);
|
||
for (let i = startNewStructs + 1; i < structs.length; i++) {
|
||
structs[i].write(encoder, 0);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {StructStore} store
|
||
* @param {Map<number,number>} _sm
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const writeClientsStructs = (encoder, store, _sm) => {
|
||
// we filter all valid _sm entries into sm
|
||
const sm = new Map();
|
||
_sm.forEach((clock, client) => {
|
||
// only write if new structs are available
|
||
if (getState(store, client) > clock) {
|
||
sm.set(client, clock);
|
||
}
|
||
});
|
||
getStateVector(store).forEach((clock, client) => {
|
||
if (!_sm.has(client)) {
|
||
sm.set(client, 0);
|
||
}
|
||
});
|
||
// write # states that were updated
|
||
writeVarUint(encoder.restEncoder, sm.size);
|
||
// Write items with higher client ids first
|
||
// This heavily improves the conflict algorithm.
|
||
Array.from(sm.entries()).sort((a, b) => b[0] - a[0]).forEach(([client, clock]) => {
|
||
// @ts-ignore
|
||
writeStructs(encoder, store.clients.get(client), client, clock);
|
||
});
|
||
};
|
||
|
||
/**
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder The decoder object to read data from.
|
||
* @param {Doc} doc
|
||
* @return {Map<number, { i: number, refs: Array<Item | GC> }>}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const readClientsStructRefs = (decoder, doc) => {
|
||
/**
|
||
* @type {Map<number, { i: number, refs: Array<Item | GC> }>}
|
||
*/
|
||
const clientRefs = create$6();
|
||
const numOfStateUpdates = readVarUint(decoder.restDecoder);
|
||
for (let i = 0; i < numOfStateUpdates; i++) {
|
||
const numberOfStructs = readVarUint(decoder.restDecoder);
|
||
/**
|
||
* @type {Array<GC|Item>}
|
||
*/
|
||
const refs = new Array(numberOfStructs);
|
||
const client = decoder.readClient();
|
||
let clock = readVarUint(decoder.restDecoder);
|
||
// const start = performance.now()
|
||
clientRefs.set(client, { i: 0, refs });
|
||
for (let i = 0; i < numberOfStructs; i++) {
|
||
const info = decoder.readInfo();
|
||
switch (BITS5 & info) {
|
||
case 0: { // GC
|
||
const len = decoder.readLen();
|
||
refs[i] = new GC(createID(client, clock), len);
|
||
clock += len;
|
||
break
|
||
}
|
||
case 10: { // Skip Struct (nothing to apply)
|
||
// @todo we could reduce the amount of checks by adding Skip struct to clientRefs so we know that something is missing.
|
||
const len = readVarUint(decoder.restDecoder);
|
||
refs[i] = new Skip(createID(client, clock), len);
|
||
clock += len;
|
||
break
|
||
}
|
||
default: { // Item with content
|
||
/**
|
||
* The optimized implementation doesn't use any variables because inlining variables is faster.
|
||
* Below a non-optimized version is shown that implements the basic algorithm with
|
||
* a few comments
|
||
*/
|
||
const cantCopyParentInfo = (info & (BIT7 | BIT8)) === 0;
|
||
// If parent = null and neither left nor right are defined, then we know that `parent` is child of `y`
|
||
// and we read the next string as parentYKey.
|
||
// It indicates how we store/retrieve parent from `y.share`
|
||
// @type {string|null}
|
||
const struct = new Item$1(
|
||
createID(client, clock),
|
||
null, // leftd
|
||
(info & BIT8) === BIT8 ? decoder.readLeftID() : null, // origin
|
||
null, // right
|
||
(info & BIT7) === BIT7 ? decoder.readRightID() : null, // right origin
|
||
cantCopyParentInfo ? (decoder.readParentInfo() ? doc.get(decoder.readString()) : decoder.readLeftID()) : null, // parent
|
||
cantCopyParentInfo && (info & BIT6) === BIT6 ? decoder.readString() : null, // parentSub
|
||
readItemContent(decoder, info) // item content
|
||
);
|
||
/* A non-optimized implementation of the above algorithm:
|
||
|
||
// The item that was originally to the left of this item.
|
||
const origin = (info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null
|
||
// The item that was originally to the right of this item.
|
||
const rightOrigin = (info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null
|
||
const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0
|
||
const hasParentYKey = cantCopyParentInfo ? decoder.readParentInfo() : false
|
||
// If parent = null and neither left nor right are defined, then we know that `parent` is child of `y`
|
||
// and we read the next string as parentYKey.
|
||
// It indicates how we store/retrieve parent from `y.share`
|
||
// @type {string|null}
|
||
const parentYKey = cantCopyParentInfo && hasParentYKey ? decoder.readString() : null
|
||
|
||
const struct = new Item(
|
||
createID(client, clock),
|
||
null, // leftd
|
||
origin, // origin
|
||
null, // right
|
||
rightOrigin, // right origin
|
||
cantCopyParentInfo && !hasParentYKey ? decoder.readLeftID() : (parentYKey !== null ? doc.get(parentYKey) : null), // parent
|
||
cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub
|
||
readItemContent(decoder, info) // item content
|
||
)
|
||
*/
|
||
refs[i] = struct;
|
||
clock += struct.length;
|
||
}
|
||
}
|
||
}
|
||
// console.log('time to read: ', performance.now() - start) // @todo remove
|
||
}
|
||
return clientRefs
|
||
};
|
||
|
||
/**
|
||
* Resume computing structs generated by struct readers.
|
||
*
|
||
* While there is something to do, we integrate structs in this order
|
||
* 1. top element on stack, if stack is not empty
|
||
* 2. next element from current struct reader (if empty, use next struct reader)
|
||
*
|
||
* If struct causally depends on another struct (ref.missing), we put next reader of
|
||
* `ref.id.client` on top of stack.
|
||
*
|
||
* At some point we find a struct that has no causal dependencies,
|
||
* then we start emptying the stack.
|
||
*
|
||
* It is not possible to have circles: i.e. struct1 (from client1) depends on struct2 (from client2)
|
||
* depends on struct3 (from client1). Therefore the max stack size is eqaul to `structReaders.length`.
|
||
*
|
||
* This method is implemented in a way so that we can resume computation if this update
|
||
* causally depends on another update.
|
||
*
|
||
* @param {Transaction} transaction
|
||
* @param {StructStore} store
|
||
* @param {Map<number, { i: number, refs: (GC | Item)[] }>} clientsStructRefs
|
||
* @return { null | { update: Uint8Array, missing: Map<number,number> } }
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const integrateStructs = (transaction, store, clientsStructRefs) => {
|
||
/**
|
||
* @type {Array<Item | GC>}
|
||
*/
|
||
const stack = [];
|
||
// sort them so that we take the higher id first, in case of conflicts the lower id will probably not conflict with the id from the higher user.
|
||
let clientsStructRefsIds = Array.from(clientsStructRefs.keys()).sort((a, b) => a - b);
|
||
if (clientsStructRefsIds.length === 0) {
|
||
return null
|
||
}
|
||
const getNextStructTarget = () => {
|
||
if (clientsStructRefsIds.length === 0) {
|
||
return null
|
||
}
|
||
let nextStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]));
|
||
while (nextStructsTarget.refs.length === nextStructsTarget.i) {
|
||
clientsStructRefsIds.pop();
|
||
if (clientsStructRefsIds.length > 0) {
|
||
nextStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]));
|
||
} else {
|
||
return null
|
||
}
|
||
}
|
||
return nextStructsTarget
|
||
};
|
||
let curStructsTarget = getNextStructTarget();
|
||
if (curStructsTarget === null && stack.length === 0) {
|
||
return null
|
||
}
|
||
|
||
/**
|
||
* @type {StructStore}
|
||
*/
|
||
const restStructs = new StructStore();
|
||
const missingSV = new Map();
|
||
/**
|
||
* @param {number} client
|
||
* @param {number} clock
|
||
*/
|
||
const updateMissingSv = (client, clock) => {
|
||
const mclock = missingSV.get(client);
|
||
if (mclock == null || mclock > clock) {
|
||
missingSV.set(client, clock);
|
||
}
|
||
};
|
||
/**
|
||
* @type {GC|Item}
|
||
*/
|
||
let stackHead = /** @type {any} */ (curStructsTarget).refs[/** @type {any} */ (curStructsTarget).i++];
|
||
// caching the state because it is used very often
|
||
const state = new Map();
|
||
|
||
const addStackToRestSS = () => {
|
||
for (const item of stack) {
|
||
const client = item.id.client;
|
||
const unapplicableItems = clientsStructRefs.get(client);
|
||
if (unapplicableItems) {
|
||
// decrement because we weren't able to apply previous operation
|
||
unapplicableItems.i--;
|
||
restStructs.clients.set(client, unapplicableItems.refs.slice(unapplicableItems.i));
|
||
clientsStructRefs.delete(client);
|
||
unapplicableItems.i = 0;
|
||
unapplicableItems.refs = [];
|
||
} else {
|
||
// item was the last item on clientsStructRefs and the field was already cleared. Add item to restStructs and continue
|
||
restStructs.clients.set(client, [item]);
|
||
}
|
||
// remove client from clientsStructRefsIds to prevent users from applying the same update again
|
||
clientsStructRefsIds = clientsStructRefsIds.filter(c => c !== client);
|
||
}
|
||
stack.length = 0;
|
||
};
|
||
|
||
// iterate over all struct readers until we are done
|
||
while (true) {
|
||
if (stackHead.constructor !== Skip) {
|
||
const localClock = setIfUndefined(state, stackHead.id.client, () => getState(store, stackHead.id.client));
|
||
const offset = localClock - stackHead.id.clock;
|
||
if (offset < 0) {
|
||
// update from the same client is missing
|
||
stack.push(stackHead);
|
||
updateMissingSv(stackHead.id.client, stackHead.id.clock - 1);
|
||
// hid a dead wall, add all items from stack to restSS
|
||
addStackToRestSS();
|
||
} else {
|
||
const missing = stackHead.getMissing(transaction, store);
|
||
if (missing !== null) {
|
||
stack.push(stackHead);
|
||
// get the struct reader that has the missing struct
|
||
/**
|
||
* @type {{ refs: Array<GC|Item>, i: number }}
|
||
*/
|
||
const structRefs = clientsStructRefs.get(/** @type {number} */ (missing)) || { refs: [], i: 0 };
|
||
if (structRefs.refs.length === structRefs.i) {
|
||
// This update message causally depends on another update message that doesn't exist yet
|
||
updateMissingSv(/** @type {number} */ (missing), getState(store, missing));
|
||
addStackToRestSS();
|
||
} else {
|
||
stackHead = structRefs.refs[structRefs.i++];
|
||
continue
|
||
}
|
||
} else if (offset === 0 || offset < stackHead.length) {
|
||
// all fine, apply the stackhead
|
||
stackHead.integrate(transaction, offset);
|
||
state.set(stackHead.id.client, stackHead.id.clock + stackHead.length);
|
||
}
|
||
}
|
||
}
|
||
// iterate to next stackHead
|
||
if (stack.length > 0) {
|
||
stackHead = /** @type {GC|Item} */ (stack.pop());
|
||
} else if (curStructsTarget !== null && curStructsTarget.i < curStructsTarget.refs.length) {
|
||
stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++]);
|
||
} else {
|
||
curStructsTarget = getNextStructTarget();
|
||
if (curStructsTarget === null) {
|
||
// we are done!
|
||
break
|
||
} else {
|
||
stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++]);
|
||
}
|
||
}
|
||
}
|
||
if (restStructs.clients.size > 0) {
|
||
const encoder = new UpdateEncoderV2();
|
||
writeClientsStructs(encoder, restStructs, new Map());
|
||
// write empty deleteset
|
||
// writeDeleteSet(encoder, new DeleteSet())
|
||
writeVarUint(encoder.restEncoder, 0); // => no need for an extra function call, just write 0 deletes
|
||
return { missing: missingSV, update: encoder.toUint8Array() }
|
||
}
|
||
return null
|
||
};
|
||
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {Transaction} transaction
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const writeStructsFromTransaction = (encoder, transaction) => writeClientsStructs(encoder, transaction.doc.store, transaction.beforeState);
|
||
|
||
/**
|
||
* Read and apply a document update.
|
||
*
|
||
* This function has the same effect as `applyUpdate` but accepts an decoder.
|
||
*
|
||
* @param {decoding.Decoder} decoder
|
||
* @param {Doc} ydoc
|
||
* @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))`
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} [structDecoder]
|
||
*
|
||
* @function
|
||
*/
|
||
const readUpdateV2 = (decoder, ydoc, transactionOrigin, structDecoder = new UpdateDecoderV2(decoder)) =>
|
||
transact(ydoc, transaction => {
|
||
// force that transaction.local is set to non-local
|
||
transaction.local = false;
|
||
let retry = false;
|
||
const doc = transaction.doc;
|
||
const store = doc.store;
|
||
// let start = performance.now()
|
||
const ss = readClientsStructRefs(structDecoder, doc);
|
||
// console.log('time to read structs: ', performance.now() - start) // @todo remove
|
||
// start = performance.now()
|
||
// console.log('time to merge: ', performance.now() - start) // @todo remove
|
||
// start = performance.now()
|
||
const restStructs = integrateStructs(transaction, store, ss);
|
||
const pending = store.pendingStructs;
|
||
if (pending) {
|
||
// check if we can apply something
|
||
for (const [client, clock] of pending.missing) {
|
||
if (clock < getState(store, client)) {
|
||
retry = true;
|
||
break
|
||
}
|
||
}
|
||
if (restStructs) {
|
||
// merge restStructs into store.pending
|
||
for (const [client, clock] of restStructs.missing) {
|
||
const mclock = pending.missing.get(client);
|
||
if (mclock == null || mclock > clock) {
|
||
pending.missing.set(client, clock);
|
||
}
|
||
}
|
||
pending.update = mergeUpdatesV2([pending.update, restStructs.update]);
|
||
}
|
||
} else {
|
||
store.pendingStructs = restStructs;
|
||
}
|
||
// console.log('time to integrate: ', performance.now() - start) // @todo remove
|
||
// start = performance.now()
|
||
const dsRest = readAndApplyDeleteSet(structDecoder, transaction, store);
|
||
if (store.pendingDs) {
|
||
// @todo we could make a lower-bound state-vector check as we do above
|
||
const pendingDSUpdate = new UpdateDecoderV2(createDecoder(store.pendingDs));
|
||
readVarUint(pendingDSUpdate.restDecoder); // read 0 structs, because we only encode deletes in pendingdsupdate
|
||
const dsRest2 = readAndApplyDeleteSet(pendingDSUpdate, transaction, store);
|
||
if (dsRest && dsRest2) {
|
||
// case 1: ds1 != null && ds2 != null
|
||
store.pendingDs = mergeUpdatesV2([dsRest, dsRest2]);
|
||
} else {
|
||
// case 2: ds1 != null
|
||
// case 3: ds2 != null
|
||
// case 4: ds1 == null && ds2 == null
|
||
store.pendingDs = dsRest || dsRest2;
|
||
}
|
||
} else {
|
||
// Either dsRest == null && pendingDs == null OR dsRest != null
|
||
store.pendingDs = dsRest;
|
||
}
|
||
// console.log('time to cleanup: ', performance.now() - start) // @todo remove
|
||
// start = performance.now()
|
||
|
||
// console.log('time to resume delete readers: ', performance.now() - start) // @todo remove
|
||
// start = performance.now()
|
||
if (retry) {
|
||
const update = /** @type {{update: Uint8Array}} */ (store.pendingStructs).update;
|
||
store.pendingStructs = null;
|
||
applyUpdateV2(transaction.doc, update);
|
||
}
|
||
}, transactionOrigin, false);
|
||
|
||
/**
|
||
* Apply a document update created by, for example, `y.on('update', update => ..)` or `update = encodeStateAsUpdate()`.
|
||
*
|
||
* This function has the same effect as `readUpdate` but accepts an Uint8Array instead of a Decoder.
|
||
*
|
||
* @param {Doc} ydoc
|
||
* @param {Uint8Array} update
|
||
* @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))`
|
||
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} [YDecoder]
|
||
*
|
||
* @function
|
||
*/
|
||
const applyUpdateV2 = (ydoc, update, transactionOrigin, YDecoder = UpdateDecoderV2) => {
|
||
const decoder = createDecoder(update);
|
||
readUpdateV2(decoder, ydoc, transactionOrigin, new YDecoder(decoder));
|
||
};
|
||
|
||
/**
|
||
* Apply a document update created by, for example, `y.on('update', update => ..)` or `update = encodeStateAsUpdate()`.
|
||
*
|
||
* This function has the same effect as `readUpdate` but accepts an Uint8Array instead of a Decoder.
|
||
*
|
||
* @param {Doc} ydoc
|
||
* @param {Uint8Array} update
|
||
* @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))`
|
||
*
|
||
* @function
|
||
*/
|
||
const applyUpdate = (ydoc, update, transactionOrigin) => applyUpdateV2(ydoc, update, transactionOrigin, UpdateDecoderV1);
|
||
|
||
/**
|
||
* Write all the document as a single update message. If you specify the state of the remote client (`targetStateVector`) it will
|
||
* only write the operations that are missing.
|
||
*
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {Doc} doc
|
||
* @param {Map<number,number>} [targetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
||
*
|
||
* @function
|
||
*/
|
||
const writeStateAsUpdate = (encoder, doc, targetStateVector = new Map()) => {
|
||
writeClientsStructs(encoder, doc.store, targetStateVector);
|
||
writeDeleteSet(encoder, createDeleteSetFromStructStore(doc.store));
|
||
};
|
||
|
||
/**
|
||
* Write all the document as a single update message that can be applied on the remote document. If you specify the state of the remote client (`targetState`) it will
|
||
* only write the operations that are missing.
|
||
*
|
||
* Use `writeStateAsUpdate` instead if you are working with lib0/encoding.js#Encoder
|
||
*
|
||
* @param {Doc} doc
|
||
* @param {Uint8Array} [encodedTargetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} [encoder]
|
||
* @return {Uint8Array}
|
||
*
|
||
* @function
|
||
*/
|
||
const encodeStateAsUpdateV2 = (doc, encodedTargetStateVector = new Uint8Array([0]), encoder = new UpdateEncoderV2()) => {
|
||
const targetStateVector = decodeStateVector$1(encodedTargetStateVector);
|
||
writeStateAsUpdate(encoder, doc, targetStateVector);
|
||
const updates = [encoder.toUint8Array()];
|
||
// also add the pending updates (if there are any)
|
||
if (doc.store.pendingDs) {
|
||
updates.push(doc.store.pendingDs);
|
||
}
|
||
if (doc.store.pendingStructs) {
|
||
updates.push(diffUpdateV2(doc.store.pendingStructs.update, encodedTargetStateVector));
|
||
}
|
||
if (updates.length > 1) {
|
||
if (encoder.constructor === UpdateEncoderV1) {
|
||
return mergeUpdates$1(updates.map((update, i) => i === 0 ? update : convertUpdateFormatV2ToV1(update)))
|
||
} else if (encoder.constructor === UpdateEncoderV2) {
|
||
return mergeUpdatesV2(updates)
|
||
}
|
||
}
|
||
return updates[0]
|
||
};
|
||
|
||
/**
|
||
* Write all the document as a single update message that can be applied on the remote document. If you specify the state of the remote client (`targetState`) it will
|
||
* only write the operations that are missing.
|
||
*
|
||
* Use `writeStateAsUpdate` instead if you are working with lib0/encoding.js#Encoder
|
||
*
|
||
* @param {Doc} doc
|
||
* @param {Uint8Array} [encodedTargetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
||
* @return {Uint8Array}
|
||
*
|
||
* @function
|
||
*/
|
||
const encodeStateAsUpdate = (doc, encodedTargetStateVector) => encodeStateAsUpdateV2(doc, encodedTargetStateVector, new UpdateEncoderV1());
|
||
|
||
/**
|
||
* Read state vector from Decoder and return as Map
|
||
*
|
||
* @param {DSDecoderV1 | DSDecoderV2} decoder
|
||
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
|
||
*
|
||
* @function
|
||
*/
|
||
const readStateVector$2 = decoder => {
|
||
const ss = new Map();
|
||
const ssLength = readVarUint(decoder.restDecoder);
|
||
for (let i = 0; i < ssLength; i++) {
|
||
const client = readVarUint(decoder.restDecoder);
|
||
const clock = readVarUint(decoder.restDecoder);
|
||
ss.set(client, clock);
|
||
}
|
||
return ss
|
||
};
|
||
|
||
/**
|
||
* Read decodedState and return State as Map.
|
||
*
|
||
* @param {Uint8Array} decodedState
|
||
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
|
||
*
|
||
* @function
|
||
*/
|
||
// export const decodeStateVectorV2 = decodedState => readStateVector(new DSDecoderV2(decoding.createDecoder(decodedState)))
|
||
|
||
/**
|
||
* Read decodedState and return State as Map.
|
||
*
|
||
* @param {Uint8Array} decodedState
|
||
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
|
||
*
|
||
* @function
|
||
*/
|
||
const decodeStateVector$1 = decodedState => readStateVector$2(new DSDecoderV1(createDecoder(decodedState)));
|
||
|
||
/**
|
||
* @param {DSEncoderV1 | DSEncoderV2} encoder
|
||
* @param {Map<number,number>} sv
|
||
* @function
|
||
*/
|
||
const writeStateVector$1 = (encoder, sv) => {
|
||
writeVarUint(encoder.restEncoder, sv.size);
|
||
Array.from(sv.entries()).sort((a, b) => b[0] - a[0]).forEach(([client, clock]) => {
|
||
writeVarUint(encoder.restEncoder, client); // @todo use a special client decoder that is based on mapping
|
||
writeVarUint(encoder.restEncoder, clock);
|
||
});
|
||
return encoder
|
||
};
|
||
|
||
/**
|
||
* @param {DSEncoderV1 | DSEncoderV2} encoder
|
||
* @param {Doc} doc
|
||
*
|
||
* @function
|
||
*/
|
||
const writeDocumentStateVector = (encoder, doc) => writeStateVector$1(encoder, getStateVector(doc.store));
|
||
|
||
/**
|
||
* Encode State as Uint8Array.
|
||
*
|
||
* @param {Doc|Map<number,number>} doc
|
||
* @param {DSEncoderV1 | DSEncoderV2} [encoder]
|
||
* @return {Uint8Array}
|
||
*
|
||
* @function
|
||
*/
|
||
const encodeStateVectorV2 = (doc, encoder = new DSEncoderV2()) => {
|
||
if (doc instanceof Map) {
|
||
writeStateVector$1(encoder, doc);
|
||
} else {
|
||
writeDocumentStateVector(encoder, doc);
|
||
}
|
||
return encoder.toUint8Array()
|
||
};
|
||
|
||
/**
|
||
* Encode State as Uint8Array.
|
||
*
|
||
* @param {Doc|Map<number,number>} doc
|
||
* @return {Uint8Array}
|
||
*
|
||
* @function
|
||
*/
|
||
const encodeStateVector = doc => encodeStateVectorV2(doc, new DSEncoderV1());
|
||
|
||
/**
|
||
* General event handler implementation.
|
||
*
|
||
* @template ARG0, ARG1
|
||
*
|
||
* @private
|
||
*/
|
||
class EventHandler {
|
||
constructor () {
|
||
/**
|
||
* @type {Array<function(ARG0, ARG1):void>}
|
||
*/
|
||
this.l = [];
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @template ARG0,ARG1
|
||
* @returns {EventHandler<ARG0,ARG1>}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const createEventHandler = () => new EventHandler();
|
||
|
||
/**
|
||
* Adds an event listener that is called when
|
||
* {@link EventHandler#callEventListeners} is called.
|
||
*
|
||
* @template ARG0,ARG1
|
||
* @param {EventHandler<ARG0,ARG1>} eventHandler
|
||
* @param {function(ARG0,ARG1):void} f The event handler.
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const addEventHandlerListener = (eventHandler, f) =>
|
||
eventHandler.l.push(f);
|
||
|
||
/**
|
||
* Removes an event listener.
|
||
*
|
||
* @template ARG0,ARG1
|
||
* @param {EventHandler<ARG0,ARG1>} eventHandler
|
||
* @param {function(ARG0,ARG1):void} f The event handler that was added with
|
||
* {@link EventHandler#addEventListener}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const removeEventHandlerListener = (eventHandler, f) => {
|
||
const l = eventHandler.l;
|
||
const len = l.length;
|
||
eventHandler.l = l.filter(g => f !== g);
|
||
if (len === eventHandler.l.length) {
|
||
console.error('[yjs] Tried to remove event handler that doesn\'t exist.');
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Call all event listeners that were added via
|
||
* {@link EventHandler#addEventListener}.
|
||
*
|
||
* @template ARG0,ARG1
|
||
* @param {EventHandler<ARG0,ARG1>} eventHandler
|
||
* @param {ARG0} arg0
|
||
* @param {ARG1} arg1
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const callEventHandlerListeners = (eventHandler, arg0, arg1) =>
|
||
callAll(eventHandler.l, [arg0, arg1]);
|
||
|
||
class ID {
|
||
/**
|
||
* @param {number} client client id
|
||
* @param {number} clock unique per client id, continuous number
|
||
*/
|
||
constructor (client, clock) {
|
||
/**
|
||
* Client id
|
||
* @type {number}
|
||
*/
|
||
this.client = client;
|
||
/**
|
||
* unique per client id, continuous number
|
||
* @type {number}
|
||
*/
|
||
this.clock = clock;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {ID | null} a
|
||
* @param {ID | null} b
|
||
* @return {boolean}
|
||
*
|
||
* @function
|
||
*/
|
||
const compareIDs = (a, b) => a === b || (a !== null && b !== null && a.client === b.client && a.clock === b.clock);
|
||
|
||
/**
|
||
* @param {number} client
|
||
* @param {number} clock
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const createID = (client, clock) => new ID(client, clock);
|
||
|
||
/**
|
||
* The top types are mapped from y.share.get(keyname) => type.
|
||
* `type` does not store any information about the `keyname`.
|
||
* This function finds the correct `keyname` for `type` and throws otherwise.
|
||
*
|
||
* @param {AbstractType<any>} type
|
||
* @return {string}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const findRootTypeKey = type => {
|
||
// @ts-ignore _y must be defined, otherwise unexpected case
|
||
for (const [key, value] of type.doc.share.entries()) {
|
||
if (value === type) {
|
||
return key
|
||
}
|
||
}
|
||
throw unexpectedCase()
|
||
};
|
||
|
||
/**
|
||
* Check if `parent` is a parent of `child`.
|
||
*
|
||
* @param {AbstractType<any>} parent
|
||
* @param {Item|null} child
|
||
* @return {Boolean} Whether `parent` is a parent of `child`.
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const isParentOf = (parent, child) => {
|
||
while (child !== null) {
|
||
if (child.parent === parent) {
|
||
return true
|
||
}
|
||
child = /** @type {AbstractType<any>} */ (child.parent)._item;
|
||
}
|
||
return false
|
||
};
|
||
|
||
class Snapshot {
|
||
/**
|
||
* @param {DeleteSet} ds
|
||
* @param {Map<number,number>} sv state map
|
||
*/
|
||
constructor (ds, sv) {
|
||
/**
|
||
* @type {DeleteSet}
|
||
*/
|
||
this.ds = ds;
|
||
/**
|
||
* State Map
|
||
* @type {Map<number,number>}
|
||
*/
|
||
this.sv = sv;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {DeleteSet} ds
|
||
* @param {Map<number,number>} sm
|
||
* @return {Snapshot}
|
||
*/
|
||
const createSnapshot = (ds, sm) => new Snapshot(ds, sm);
|
||
|
||
createSnapshot(createDeleteSet(), new Map());
|
||
|
||
/**
|
||
* @param {Item} item
|
||
* @param {Snapshot|undefined} snapshot
|
||
*
|
||
* @protected
|
||
* @function
|
||
*/
|
||
const isVisible = (item, snapshot) => snapshot === undefined
|
||
? !item.deleted
|
||
: snapshot.sv.has(item.id.client) && (snapshot.sv.get(item.id.client) || 0) > item.id.clock && !isDeleted(snapshot.ds, item.id);
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {Snapshot} snapshot
|
||
*/
|
||
const splitSnapshotAffectedStructs = (transaction, snapshot) => {
|
||
const meta = setIfUndefined(transaction.meta, splitSnapshotAffectedStructs, create$5);
|
||
const store = transaction.doc.store;
|
||
// check if we already split for this snapshot
|
||
if (!meta.has(snapshot)) {
|
||
snapshot.sv.forEach((clock, client) => {
|
||
if (clock < getState(store, client)) {
|
||
getItemCleanStart(transaction, createID(client, clock));
|
||
}
|
||
});
|
||
iterateDeletedStructs(transaction, snapshot.ds, item => {});
|
||
meta.add(snapshot);
|
||
}
|
||
};
|
||
|
||
class StructStore {
|
||
constructor () {
|
||
/**
|
||
* @type {Map<number,Array<GC|Item>>}
|
||
*/
|
||
this.clients = new Map();
|
||
/**
|
||
* @type {null | { missing: Map<number, number>, update: Uint8Array }}
|
||
*/
|
||
this.pendingStructs = null;
|
||
/**
|
||
* @type {null | Uint8Array}
|
||
*/
|
||
this.pendingDs = null;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Return the states as a Map<client,clock>.
|
||
* Note that clock refers to the next expected clock id.
|
||
*
|
||
* @param {StructStore} store
|
||
* @return {Map<number,number>}
|
||
*
|
||
* @public
|
||
* @function
|
||
*/
|
||
const getStateVector = store => {
|
||
const sm = new Map();
|
||
store.clients.forEach((structs, client) => {
|
||
const struct = structs[structs.length - 1];
|
||
sm.set(client, struct.id.clock + struct.length);
|
||
});
|
||
return sm
|
||
};
|
||
|
||
/**
|
||
* @param {StructStore} store
|
||
* @param {number} client
|
||
* @return {number}
|
||
*
|
||
* @public
|
||
* @function
|
||
*/
|
||
const getState = (store, client) => {
|
||
const structs = store.clients.get(client);
|
||
if (structs === undefined) {
|
||
return 0
|
||
}
|
||
const lastStruct = structs[structs.length - 1];
|
||
return lastStruct.id.clock + lastStruct.length
|
||
};
|
||
|
||
/**
|
||
* @param {StructStore} store
|
||
* @param {GC|Item} struct
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const addStruct = (store, struct) => {
|
||
let structs = store.clients.get(struct.id.client);
|
||
if (structs === undefined) {
|
||
structs = [];
|
||
store.clients.set(struct.id.client, structs);
|
||
} else {
|
||
const lastStruct = structs[structs.length - 1];
|
||
if (lastStruct.id.clock + lastStruct.length !== struct.id.clock) {
|
||
throw unexpectedCase()
|
||
}
|
||
}
|
||
structs.push(struct);
|
||
};
|
||
|
||
/**
|
||
* Perform a binary search on a sorted array
|
||
* @param {Array<Item|GC>} structs
|
||
* @param {number} clock
|
||
* @return {number}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const findIndexSS = (structs, clock) => {
|
||
let left = 0;
|
||
let right = structs.length - 1;
|
||
let mid = structs[right];
|
||
let midclock = mid.id.clock;
|
||
if (midclock === clock) {
|
||
return right
|
||
}
|
||
// @todo does it even make sense to pivot the search?
|
||
// If a good split misses, it might actually increase the time to find the correct item.
|
||
// Currently, the only advantage is that search with pivoting might find the item on the first try.
|
||
let midindex = floor((clock / (midclock + mid.length - 1)) * right); // pivoting the search
|
||
while (left <= right) {
|
||
mid = structs[midindex];
|
||
midclock = mid.id.clock;
|
||
if (midclock <= clock) {
|
||
if (clock < midclock + mid.length) {
|
||
return midindex
|
||
}
|
||
left = midindex + 1;
|
||
} else {
|
||
right = midindex - 1;
|
||
}
|
||
midindex = floor((left + right) / 2);
|
||
}
|
||
// Always check state before looking for a struct in StructStore
|
||
// Therefore the case of not finding a struct is unexpected
|
||
throw unexpectedCase()
|
||
};
|
||
|
||
/**
|
||
* Expects that id is actually in store. This function throws or is an infinite loop otherwise.
|
||
*
|
||
* @param {StructStore} store
|
||
* @param {ID} id
|
||
* @return {GC|Item}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const find = (store, id) => {
|
||
/**
|
||
* @type {Array<GC|Item>}
|
||
*/
|
||
// @ts-ignore
|
||
const structs = store.clients.get(id.client);
|
||
return structs[findIndexSS(structs, id.clock)]
|
||
};
|
||
|
||
/**
|
||
* Expects that id is actually in store. This function throws or is an infinite loop otherwise.
|
||
* @private
|
||
* @function
|
||
*/
|
||
const getItem = /** @type {function(StructStore,ID):Item} */ (find);
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {Array<Item|GC>} structs
|
||
* @param {number} clock
|
||
*/
|
||
const findIndexCleanStart = (transaction, structs, clock) => {
|
||
const index = findIndexSS(structs, clock);
|
||
const struct = structs[index];
|
||
if (struct.id.clock < clock && struct instanceof Item$1) {
|
||
structs.splice(index + 1, 0, splitItem(transaction, struct, clock - struct.id.clock));
|
||
return index + 1
|
||
}
|
||
return index
|
||
};
|
||
|
||
/**
|
||
* Expects that id is actually in store. This function throws or is an infinite loop otherwise.
|
||
*
|
||
* @param {Transaction} transaction
|
||
* @param {ID} id
|
||
* @return {Item}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const getItemCleanStart = (transaction, id) => {
|
||
const structs = /** @type {Array<Item>} */ (transaction.doc.store.clients.get(id.client));
|
||
return structs[findIndexCleanStart(transaction, structs, id.clock)]
|
||
};
|
||
|
||
/**
|
||
* Expects that id is actually in store. This function throws or is an infinite loop otherwise.
|
||
*
|
||
* @param {Transaction} transaction
|
||
* @param {StructStore} store
|
||
* @param {ID} id
|
||
* @return {Item}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const getItemCleanEnd = (transaction, store, id) => {
|
||
/**
|
||
* @type {Array<Item>}
|
||
*/
|
||
// @ts-ignore
|
||
const structs = store.clients.get(id.client);
|
||
const index = findIndexSS(structs, id.clock);
|
||
const struct = structs[index];
|
||
if (id.clock !== struct.id.clock + struct.length - 1 && struct.constructor !== GC) {
|
||
structs.splice(index + 1, 0, splitItem(transaction, struct, id.clock - struct.id.clock + 1));
|
||
}
|
||
return struct
|
||
};
|
||
|
||
/**
|
||
* Replace `item` with `newitem` in store
|
||
* @param {StructStore} store
|
||
* @param {GC|Item} struct
|
||
* @param {GC|Item} newStruct
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const replaceStruct = (store, struct, newStruct) => {
|
||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(struct.id.client));
|
||
structs[findIndexSS(structs, struct.id.clock)] = newStruct;
|
||
};
|
||
|
||
/**
|
||
* Iterate over a range of structs
|
||
*
|
||
* @param {Transaction} transaction
|
||
* @param {Array<Item|GC>} structs
|
||
* @param {number} clockStart Inclusive start
|
||
* @param {number} len
|
||
* @param {function(GC|Item):void} f
|
||
*
|
||
* @function
|
||
*/
|
||
const iterateStructs = (transaction, structs, clockStart, len, f) => {
|
||
if (len === 0) {
|
||
return
|
||
}
|
||
const clockEnd = clockStart + len;
|
||
let index = findIndexCleanStart(transaction, structs, clockStart);
|
||
let struct;
|
||
do {
|
||
struct = structs[index++];
|
||
if (clockEnd < struct.id.clock + struct.length) {
|
||
findIndexCleanStart(transaction, structs, clockEnd);
|
||
}
|
||
f(struct);
|
||
} while (index < structs.length && structs[index].id.clock < clockEnd)
|
||
};
|
||
|
||
/**
|
||
* A transaction is created for every change on the Yjs model. It is possible
|
||
* to bundle changes on the Yjs model in a single transaction to
|
||
* minimize the number on messages sent and the number of observer calls.
|
||
* If possible the user of this library should bundle as many changes as
|
||
* possible. Here is an example to illustrate the advantages of bundling:
|
||
*
|
||
* @example
|
||
* const map = y.define('map', YMap)
|
||
* // Log content when change is triggered
|
||
* map.observe(() => {
|
||
* console.log('change triggered')
|
||
* })
|
||
* // Each change on the map type triggers a log message:
|
||
* map.set('a', 0) // => "change triggered"
|
||
* map.set('b', 0) // => "change triggered"
|
||
* // When put in a transaction, it will trigger the log after the transaction:
|
||
* y.transact(() => {
|
||
* map.set('a', 1)
|
||
* map.set('b', 1)
|
||
* }) // => "change triggered"
|
||
*
|
||
* @public
|
||
*/
|
||
class Transaction {
|
||
/**
|
||
* @param {Doc} doc
|
||
* @param {any} origin
|
||
* @param {boolean} local
|
||
*/
|
||
constructor (doc, origin, local) {
|
||
/**
|
||
* The Yjs instance.
|
||
* @type {Doc}
|
||
*/
|
||
this.doc = doc;
|
||
/**
|
||
* Describes the set of deleted items by ids
|
||
* @type {DeleteSet}
|
||
*/
|
||
this.deleteSet = new DeleteSet();
|
||
/**
|
||
* Holds the state before the transaction started.
|
||
* @type {Map<Number,Number>}
|
||
*/
|
||
this.beforeState = getStateVector(doc.store);
|
||
/**
|
||
* Holds the state after the transaction.
|
||
* @type {Map<Number,Number>}
|
||
*/
|
||
this.afterState = new Map();
|
||
/**
|
||
* All types that were directly modified (property added or child
|
||
* inserted/deleted). New types are not included in this Set.
|
||
* Maps from type to parentSubs (`item.parentSub = null` for YArray)
|
||
* @type {Map<AbstractType<YEvent<any>>,Set<String|null>>}
|
||
*/
|
||
this.changed = new Map();
|
||
/**
|
||
* Stores the events for the types that observe also child elements.
|
||
* It is mainly used by `observeDeep`.
|
||
* @type {Map<AbstractType<YEvent<any>>,Array<YEvent<any>>>}
|
||
*/
|
||
this.changedParentTypes = new Map();
|
||
/**
|
||
* @type {Array<AbstractStruct>}
|
||
*/
|
||
this._mergeStructs = [];
|
||
/**
|
||
* @type {any}
|
||
*/
|
||
this.origin = origin;
|
||
/**
|
||
* Stores meta information on the transaction
|
||
* @type {Map<any,any>}
|
||
*/
|
||
this.meta = new Map();
|
||
/**
|
||
* Whether this change originates from this doc.
|
||
* @type {boolean}
|
||
*/
|
||
this.local = local;
|
||
/**
|
||
* @type {Set<Doc>}
|
||
*/
|
||
this.subdocsAdded = new Set();
|
||
/**
|
||
* @type {Set<Doc>}
|
||
*/
|
||
this.subdocsRemoved = new Set();
|
||
/**
|
||
* @type {Set<Doc>}
|
||
*/
|
||
this.subdocsLoaded = new Set();
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {Transaction} transaction
|
||
* @return {boolean} Whether data was written.
|
||
*/
|
||
const writeUpdateMessageFromTransaction = (encoder, transaction) => {
|
||
if (transaction.deleteSet.clients.size === 0 && !any(transaction.afterState, (clock, client) => transaction.beforeState.get(client) !== clock)) {
|
||
return false
|
||
}
|
||
sortAndMergeDeleteSet(transaction.deleteSet);
|
||
writeStructsFromTransaction(encoder, transaction);
|
||
writeDeleteSet(encoder, transaction.deleteSet);
|
||
return true
|
||
};
|
||
|
||
/**
|
||
* If `type.parent` was added in current transaction, `type` technically
|
||
* did not change, it was just added and we should not fire events for `type`.
|
||
*
|
||
* @param {Transaction} transaction
|
||
* @param {AbstractType<YEvent<any>>} type
|
||
* @param {string|null} parentSub
|
||
*/
|
||
const addChangedTypeToTransaction = (transaction, type, parentSub) => {
|
||
const item = type._item;
|
||
if (item === null || (item.id.clock < (transaction.beforeState.get(item.id.client) || 0) && !item.deleted)) {
|
||
setIfUndefined(transaction.changed, type, create$5).add(parentSub);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @param {Array<AbstractStruct>} structs
|
||
* @param {number} pos
|
||
*/
|
||
const tryToMergeWithLeft = (structs, pos) => {
|
||
const left = structs[pos - 1];
|
||
const right = structs[pos];
|
||
if (left.deleted === right.deleted && left.constructor === right.constructor) {
|
||
if (left.mergeWith(right)) {
|
||
structs.splice(pos, 1);
|
||
if (right instanceof Item$1 && right.parentSub !== null && /** @type {AbstractType<any>} */ (right.parent)._map.get(right.parentSub) === right) {
|
||
/** @type {AbstractType<any>} */ (right.parent)._map.set(right.parentSub, /** @type {Item} */ (left));
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @param {DeleteSet} ds
|
||
* @param {StructStore} store
|
||
* @param {function(Item):boolean} gcFilter
|
||
*/
|
||
const tryGcDeleteSet = (ds, store, gcFilter) => {
|
||
for (const [client, deleteItems] of ds.clients.entries()) {
|
||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client));
|
||
for (let di = deleteItems.length - 1; di >= 0; di--) {
|
||
const deleteItem = deleteItems[di];
|
||
const endDeleteItemClock = deleteItem.clock + deleteItem.len;
|
||
for (
|
||
let si = findIndexSS(structs, deleteItem.clock), struct = structs[si];
|
||
si < structs.length && struct.id.clock < endDeleteItemClock;
|
||
struct = structs[++si]
|
||
) {
|
||
const struct = structs[si];
|
||
if (deleteItem.clock + deleteItem.len <= struct.id.clock) {
|
||
break
|
||
}
|
||
if (struct instanceof Item$1 && struct.deleted && !struct.keep && gcFilter(struct)) {
|
||
struct.gc(store, false);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @param {DeleteSet} ds
|
||
* @param {StructStore} store
|
||
*/
|
||
const tryMergeDeleteSet = (ds, store) => {
|
||
// try to merge deleted / gc'd items
|
||
// merge from right to left for better efficiecy and so we don't miss any merge targets
|
||
ds.clients.forEach((deleteItems, client) => {
|
||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client));
|
||
for (let di = deleteItems.length - 1; di >= 0; di--) {
|
||
const deleteItem = deleteItems[di];
|
||
// start with merging the item next to the last deleted item
|
||
const mostRightIndexToCheck = min(structs.length - 1, 1 + findIndexSS(structs, deleteItem.clock + deleteItem.len - 1));
|
||
for (
|
||
let si = mostRightIndexToCheck, struct = structs[si];
|
||
si > 0 && struct.id.clock >= deleteItem.clock;
|
||
struct = structs[--si]
|
||
) {
|
||
tryToMergeWithLeft(structs, si);
|
||
}
|
||
}
|
||
});
|
||
};
|
||
|
||
/**
|
||
* @param {Array<Transaction>} transactionCleanups
|
||
* @param {number} i
|
||
*/
|
||
const cleanupTransactions = (transactionCleanups, i) => {
|
||
if (i < transactionCleanups.length) {
|
||
const transaction = transactionCleanups[i];
|
||
const doc = transaction.doc;
|
||
const store = doc.store;
|
||
const ds = transaction.deleteSet;
|
||
const mergeStructs = transaction._mergeStructs;
|
||
try {
|
||
sortAndMergeDeleteSet(ds);
|
||
transaction.afterState = getStateVector(transaction.doc.store);
|
||
doc.emit('beforeObserverCalls', [transaction, doc]);
|
||
/**
|
||
* An array of event callbacks.
|
||
*
|
||
* Each callback is called even if the other ones throw errors.
|
||
*
|
||
* @type {Array<function():void>}
|
||
*/
|
||
const fs = [];
|
||
// observe events on changed types
|
||
transaction.changed.forEach((subs, itemtype) =>
|
||
fs.push(() => {
|
||
if (itemtype._item === null || !itemtype._item.deleted) {
|
||
itemtype._callObserver(transaction, subs);
|
||
}
|
||
})
|
||
);
|
||
fs.push(() => {
|
||
// deep observe events
|
||
transaction.changedParentTypes.forEach((events, type) =>
|
||
fs.push(() => {
|
||
// We need to think about the possibility that the user transforms the
|
||
// Y.Doc in the event.
|
||
if (type._item === null || !type._item.deleted) {
|
||
events = events
|
||
.filter(event =>
|
||
event.target._item === null || !event.target._item.deleted
|
||
);
|
||
events
|
||
.forEach(event => {
|
||
event.currentTarget = type;
|
||
});
|
||
// sort events by path length so that top-level events are fired first.
|
||
events
|
||
.sort((event1, event2) => event1.path.length - event2.path.length);
|
||
// We don't need to check for events.length
|
||
// because we know it has at least one element
|
||
callEventHandlerListeners(type._dEH, events, transaction);
|
||
}
|
||
})
|
||
);
|
||
fs.push(() => doc.emit('afterTransaction', [transaction, doc]));
|
||
});
|
||
callAll(fs, []);
|
||
} finally {
|
||
// Replace deleted items with ItemDeleted / GC.
|
||
// This is where content is actually remove from the Yjs Doc.
|
||
if (doc.gc) {
|
||
tryGcDeleteSet(ds, store, doc.gcFilter);
|
||
}
|
||
tryMergeDeleteSet(ds, store);
|
||
|
||
// on all affected store.clients props, try to merge
|
||
transaction.afterState.forEach((clock, client) => {
|
||
const beforeClock = transaction.beforeState.get(client) || 0;
|
||
if (beforeClock !== clock) {
|
||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client));
|
||
// we iterate from right to left so we can safely remove entries
|
||
const firstChangePos = max(findIndexSS(structs, beforeClock), 1);
|
||
for (let i = structs.length - 1; i >= firstChangePos; i--) {
|
||
tryToMergeWithLeft(structs, i);
|
||
}
|
||
}
|
||
});
|
||
// try to merge mergeStructs
|
||
// @todo: it makes more sense to transform mergeStructs to a DS, sort it, and merge from right to left
|
||
// but at the moment DS does not handle duplicates
|
||
for (let i = 0; i < mergeStructs.length; i++) {
|
||
const { client, clock } = mergeStructs[i].id;
|
||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client));
|
||
const replacedStructPos = findIndexSS(structs, clock);
|
||
if (replacedStructPos + 1 < structs.length) {
|
||
tryToMergeWithLeft(structs, replacedStructPos + 1);
|
||
}
|
||
if (replacedStructPos > 0) {
|
||
tryToMergeWithLeft(structs, replacedStructPos);
|
||
}
|
||
}
|
||
if (!transaction.local && transaction.afterState.get(doc.clientID) !== transaction.beforeState.get(doc.clientID)) {
|
||
print(ORANGE, BOLD, '[yjs] ', UNBOLD, RED, 'Changed the client-id because another client seems to be using it.');
|
||
doc.clientID = generateNewClientId();
|
||
}
|
||
// @todo Merge all the transactions into one and provide send the data as a single update message
|
||
doc.emit('afterTransactionCleanup', [transaction, doc]);
|
||
if (doc._observers.has('update')) {
|
||
const encoder = new UpdateEncoderV1();
|
||
const hasContent = writeUpdateMessageFromTransaction(encoder, transaction);
|
||
if (hasContent) {
|
||
doc.emit('update', [encoder.toUint8Array(), transaction.origin, doc, transaction]);
|
||
}
|
||
}
|
||
if (doc._observers.has('updateV2')) {
|
||
const encoder = new UpdateEncoderV2();
|
||
const hasContent = writeUpdateMessageFromTransaction(encoder, transaction);
|
||
if (hasContent) {
|
||
doc.emit('updateV2', [encoder.toUint8Array(), transaction.origin, doc, transaction]);
|
||
}
|
||
}
|
||
const { subdocsAdded, subdocsLoaded, subdocsRemoved } = transaction;
|
||
if (subdocsAdded.size > 0 || subdocsRemoved.size > 0 || subdocsLoaded.size > 0) {
|
||
subdocsAdded.forEach(subdoc => {
|
||
subdoc.clientID = doc.clientID;
|
||
if (subdoc.collectionid == null) {
|
||
subdoc.collectionid = doc.collectionid;
|
||
}
|
||
doc.subdocs.add(subdoc);
|
||
});
|
||
subdocsRemoved.forEach(subdoc => doc.subdocs.delete(subdoc));
|
||
doc.emit('subdocs', [{ loaded: subdocsLoaded, added: subdocsAdded, removed: subdocsRemoved }, doc, transaction]);
|
||
subdocsRemoved.forEach(subdoc => subdoc.destroy());
|
||
}
|
||
|
||
if (transactionCleanups.length <= i + 1) {
|
||
doc._transactionCleanups = [];
|
||
doc.emit('afterAllTransactions', [doc, transactionCleanups]);
|
||
} else {
|
||
cleanupTransactions(transactionCleanups, i + 1);
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Implements the functionality of `y.transact(()=>{..})`
|
||
*
|
||
* @param {Doc} doc
|
||
* @param {function(Transaction):void} f
|
||
* @param {any} [origin=true]
|
||
*
|
||
* @function
|
||
*/
|
||
const transact = (doc, f, origin = null, local = true) => {
|
||
const transactionCleanups = doc._transactionCleanups;
|
||
let initialCall = false;
|
||
if (doc._transaction === null) {
|
||
initialCall = true;
|
||
doc._transaction = new Transaction(doc, origin, local);
|
||
transactionCleanups.push(doc._transaction);
|
||
if (transactionCleanups.length === 1) {
|
||
doc.emit('beforeAllTransactions', [doc]);
|
||
}
|
||
doc.emit('beforeTransaction', [doc._transaction, doc]);
|
||
}
|
||
try {
|
||
f(doc._transaction);
|
||
} finally {
|
||
if (initialCall) {
|
||
const finishCleanup = doc._transaction === transactionCleanups[0];
|
||
doc._transaction = null;
|
||
if (finishCleanup) {
|
||
// The first transaction ended, now process observer calls.
|
||
// Observer call may create new transactions for which we need to call the observers and do cleanup.
|
||
// We don't want to nest these calls, so we execute these calls one after
|
||
// another.
|
||
// Also we need to ensure that all cleanups are called, even if the
|
||
// observes throw errors.
|
||
// This file is full of hacky try {} finally {} blocks to ensure that an
|
||
// event can throw errors and also that the cleanup is called.
|
||
cleanupTransactions(transactionCleanups, 0);
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
class StackItem {
|
||
/**
|
||
* @param {DeleteSet} deletions
|
||
* @param {DeleteSet} insertions
|
||
*/
|
||
constructor (deletions, insertions) {
|
||
this.insertions = insertions;
|
||
this.deletions = deletions;
|
||
/**
|
||
* Use this to save and restore metadata like selection range
|
||
*/
|
||
this.meta = new Map();
|
||
}
|
||
}
|
||
/**
|
||
* @param {Transaction} tr
|
||
* @param {UndoManager} um
|
||
* @param {StackItem} stackItem
|
||
*/
|
||
const clearUndoManagerStackItem = (tr, um, stackItem) => {
|
||
iterateDeletedStructs(tr, stackItem.deletions, item => {
|
||
if (item instanceof Item$1 && um.scope.some(type => isParentOf(type, item))) {
|
||
keepItem(item, false);
|
||
}
|
||
});
|
||
};
|
||
|
||
/**
|
||
* @param {UndoManager} undoManager
|
||
* @param {Array<StackItem>} stack
|
||
* @param {string} eventType
|
||
* @return {StackItem?}
|
||
*/
|
||
const popStackItem = (undoManager, stack, eventType) => {
|
||
/**
|
||
* Whether a change happened
|
||
* @type {StackItem?}
|
||
*/
|
||
let result = null;
|
||
/**
|
||
* Keep a reference to the transaction so we can fire the event with the changedParentTypes
|
||
* @type {any}
|
||
*/
|
||
let _tr = null;
|
||
const doc = undoManager.doc;
|
||
const scope = undoManager.scope;
|
||
transact(doc, transaction => {
|
||
while (stack.length > 0 && result === null) {
|
||
const store = doc.store;
|
||
const stackItem = /** @type {StackItem} */ (stack.pop());
|
||
/**
|
||
* @type {Set<Item>}
|
||
*/
|
||
const itemsToRedo = new Set();
|
||
/**
|
||
* @type {Array<Item>}
|
||
*/
|
||
const itemsToDelete = [];
|
||
let performedChange = false;
|
||
iterateDeletedStructs(transaction, stackItem.insertions, struct => {
|
||
if (struct instanceof Item$1) {
|
||
if (struct.redone !== null) {
|
||
let { item, diff } = followRedone(store, struct.id);
|
||
if (diff > 0) {
|
||
item = getItemCleanStart(transaction, createID(item.id.client, item.id.clock + diff));
|
||
}
|
||
struct = item;
|
||
}
|
||
if (!struct.deleted && scope.some(type => isParentOf(type, /** @type {Item} */ (struct)))) {
|
||
itemsToDelete.push(struct);
|
||
}
|
||
}
|
||
});
|
||
iterateDeletedStructs(transaction, stackItem.deletions, struct => {
|
||
if (
|
||
struct instanceof Item$1 &&
|
||
scope.some(type => isParentOf(type, struct)) &&
|
||
// Never redo structs in stackItem.insertions because they were created and deleted in the same capture interval.
|
||
!isDeleted(stackItem.insertions, struct.id)
|
||
) {
|
||
itemsToRedo.add(struct);
|
||
}
|
||
});
|
||
itemsToRedo.forEach(struct => {
|
||
performedChange = redoItem(transaction, struct, itemsToRedo, stackItem.insertions, undoManager.ignoreRemoteMapChanges) !== null || performedChange;
|
||
});
|
||
// We want to delete in reverse order so that children are deleted before
|
||
// parents, so we have more information available when items are filtered.
|
||
for (let i = itemsToDelete.length - 1; i >= 0; i--) {
|
||
const item = itemsToDelete[i];
|
||
if (undoManager.deleteFilter(item)) {
|
||
item.delete(transaction);
|
||
performedChange = true;
|
||
}
|
||
}
|
||
result = performedChange ? stackItem : null;
|
||
}
|
||
transaction.changed.forEach((subProps, type) => {
|
||
// destroy search marker if necessary
|
||
if (subProps.has(null) && type._searchMarker) {
|
||
type._searchMarker.length = 0;
|
||
}
|
||
});
|
||
_tr = transaction;
|
||
}, undoManager);
|
||
if (result != null) {
|
||
const changedParentTypes = _tr.changedParentTypes;
|
||
undoManager.emit('stack-item-popped', [{ stackItem: result, type: eventType, changedParentTypes }, undoManager]);
|
||
}
|
||
return result
|
||
};
|
||
|
||
/**
|
||
* @typedef {Object} UndoManagerOptions
|
||
* @property {number} [UndoManagerOptions.captureTimeout=500]
|
||
* @property {function(Transaction):boolean} [UndoManagerOptions.captureTransaction] Do not capture changes of a Transaction if result false.
|
||
* @property {function(Item):boolean} [UndoManagerOptions.deleteFilter=()=>true] Sometimes
|
||
* it is necessary to filter what an Undo/Redo operation can delete. If this
|
||
* filter returns false, the type/item won't be deleted even it is in the
|
||
* undo/redo scope.
|
||
* @property {Set<any>} [UndoManagerOptions.trackedOrigins=new Set([null])]
|
||
* @property {boolean} [ignoreRemoteMapChanges] Experimental. By default, the UndoManager will never overwrite remote changes. Enable this property to enable overwriting remote changes on key-value changes (Y.Map, properties on Y.Xml, etc..).
|
||
* @property {Doc} [doc] The document that this UndoManager operates on. Only needed if typeScope is empty.
|
||
*/
|
||
|
||
/**
|
||
* Fires 'stack-item-added' event when a stack item was added to either the undo- or
|
||
* the redo-stack. You may store additional stack information via the
|
||
* metadata property on `event.stackItem.meta` (it is a `Map` of metadata properties).
|
||
* Fires 'stack-item-popped' event when a stack item was popped from either the
|
||
* undo- or the redo-stack. You may restore the saved stack information from `event.stackItem.meta`.
|
||
*
|
||
* @extends {Observable<'stack-item-added'|'stack-item-popped'|'stack-cleared'|'stack-item-updated'>}
|
||
*/
|
||
class UndoManager extends Observable {
|
||
/**
|
||
* @param {AbstractType<any>|Array<AbstractType<any>>} typeScope Accepts either a single type, or an array of types
|
||
* @param {UndoManagerOptions} options
|
||
*/
|
||
constructor (typeScope, {
|
||
captureTimeout = 500,
|
||
captureTransaction = tr => true,
|
||
deleteFilter = () => true,
|
||
trackedOrigins = new Set([null]),
|
||
ignoreRemoteMapChanges = false,
|
||
doc = /** @type {Doc} */ (isArray$1(typeScope) ? typeScope[0].doc : typeScope.doc)
|
||
} = {}) {
|
||
super();
|
||
/**
|
||
* @type {Array<AbstractType<any>>}
|
||
*/
|
||
this.scope = [];
|
||
this.addToScope(typeScope);
|
||
this.deleteFilter = deleteFilter;
|
||
trackedOrigins.add(this);
|
||
this.trackedOrigins = trackedOrigins;
|
||
this.captureTransaction = captureTransaction;
|
||
/**
|
||
* @type {Array<StackItem>}
|
||
*/
|
||
this.undoStack = [];
|
||
/**
|
||
* @type {Array<StackItem>}
|
||
*/
|
||
this.redoStack = [];
|
||
/**
|
||
* Whether the client is currently undoing (calling UndoManager.undo)
|
||
*
|
||
* @type {boolean}
|
||
*/
|
||
this.undoing = false;
|
||
this.redoing = false;
|
||
this.doc = doc;
|
||
this.lastChange = 0;
|
||
this.ignoreRemoteMapChanges = ignoreRemoteMapChanges;
|
||
this.captureTimeout = captureTimeout;
|
||
/**
|
||
* @param {Transaction} transaction
|
||
*/
|
||
this.afterTransactionHandler = transaction => {
|
||
// Only track certain transactions
|
||
if (
|
||
!this.captureTransaction(transaction) ||
|
||
!this.scope.some(type => transaction.changedParentTypes.has(type)) ||
|
||
(!this.trackedOrigins.has(transaction.origin) && (!transaction.origin || !this.trackedOrigins.has(transaction.origin.constructor)))
|
||
) {
|
||
return
|
||
}
|
||
const undoing = this.undoing;
|
||
const redoing = this.redoing;
|
||
const stack = undoing ? this.redoStack : this.undoStack;
|
||
if (undoing) {
|
||
this.stopCapturing(); // next undo should not be appended to last stack item
|
||
} else if (!redoing) {
|
||
// neither undoing nor redoing: delete redoStack
|
||
this.clear(false, true);
|
||
}
|
||
const insertions = new DeleteSet();
|
||
transaction.afterState.forEach((endClock, client) => {
|
||
const startClock = transaction.beforeState.get(client) || 0;
|
||
const len = endClock - startClock;
|
||
if (len > 0) {
|
||
addToDeleteSet(insertions, client, startClock, len);
|
||
}
|
||
});
|
||
const now = getUnixTime();
|
||
let didAdd = false;
|
||
if (this.lastChange > 0 && now - this.lastChange < this.captureTimeout && stack.length > 0 && !undoing && !redoing) {
|
||
// append change to last stack op
|
||
const lastOp = stack[stack.length - 1];
|
||
lastOp.deletions = mergeDeleteSets([lastOp.deletions, transaction.deleteSet]);
|
||
lastOp.insertions = mergeDeleteSets([lastOp.insertions, insertions]);
|
||
} else {
|
||
// create a new stack op
|
||
stack.push(new StackItem(transaction.deleteSet, insertions));
|
||
didAdd = true;
|
||
}
|
||
if (!undoing && !redoing) {
|
||
this.lastChange = now;
|
||
}
|
||
// make sure that deleted structs are not gc'd
|
||
iterateDeletedStructs(transaction, transaction.deleteSet, /** @param {Item|GC} item */ item => {
|
||
if (item instanceof Item$1 && this.scope.some(type => isParentOf(type, item))) {
|
||
keepItem(item, true);
|
||
}
|
||
});
|
||
const changeEvent = [{ stackItem: stack[stack.length - 1], origin: transaction.origin, type: undoing ? 'redo' : 'undo', changedParentTypes: transaction.changedParentTypes }, this];
|
||
if (didAdd) {
|
||
this.emit('stack-item-added', changeEvent);
|
||
} else {
|
||
this.emit('stack-item-updated', changeEvent);
|
||
}
|
||
};
|
||
this.doc.on('afterTransaction', this.afterTransactionHandler);
|
||
this.doc.on('destroy', () => {
|
||
this.destroy();
|
||
});
|
||
}
|
||
|
||
/**
|
||
* @param {Array<AbstractType<any>> | AbstractType<any>} ytypes
|
||
*/
|
||
addToScope (ytypes) {
|
||
ytypes = isArray$1(ytypes) ? ytypes : [ytypes];
|
||
ytypes.forEach(ytype => {
|
||
if (this.scope.every(yt => yt !== ytype)) {
|
||
this.scope.push(ytype);
|
||
}
|
||
});
|
||
}
|
||
|
||
/**
|
||
* @param {any} origin
|
||
*/
|
||
addTrackedOrigin (origin) {
|
||
this.trackedOrigins.add(origin);
|
||
}
|
||
|
||
/**
|
||
* @param {any} origin
|
||
*/
|
||
removeTrackedOrigin (origin) {
|
||
this.trackedOrigins.delete(origin);
|
||
}
|
||
|
||
clear (clearUndoStack = true, clearRedoStack = true) {
|
||
if ((clearUndoStack && this.canUndo()) || (clearRedoStack && this.canRedo())) {
|
||
this.doc.transact(tr => {
|
||
if (clearUndoStack) {
|
||
this.undoStack.forEach(item => clearUndoManagerStackItem(tr, this, item));
|
||
this.undoStack = [];
|
||
}
|
||
if (clearRedoStack) {
|
||
this.redoStack.forEach(item => clearUndoManagerStackItem(tr, this, item));
|
||
this.redoStack = [];
|
||
}
|
||
this.emit('stack-cleared', [{ undoStackCleared: clearUndoStack, redoStackCleared: clearRedoStack }]);
|
||
});
|
||
}
|
||
}
|
||
|
||
/**
|
||
* UndoManager merges Undo-StackItem if they are created within time-gap
|
||
* smaller than `options.captureTimeout`. Call `um.stopCapturing()` so that the next
|
||
* StackItem won't be merged.
|
||
*
|
||
*
|
||
* @example
|
||
* // without stopCapturing
|
||
* ytext.insert(0, 'a')
|
||
* ytext.insert(1, 'b')
|
||
* um.undo()
|
||
* ytext.toString() // => '' (note that 'ab' was removed)
|
||
* // with stopCapturing
|
||
* ytext.insert(0, 'a')
|
||
* um.stopCapturing()
|
||
* ytext.insert(0, 'b')
|
||
* um.undo()
|
||
* ytext.toString() // => 'a' (note that only 'b' was removed)
|
||
*
|
||
*/
|
||
stopCapturing () {
|
||
this.lastChange = 0;
|
||
}
|
||
|
||
/**
|
||
* Undo last changes on type.
|
||
*
|
||
* @return {StackItem?} Returns StackItem if a change was applied
|
||
*/
|
||
undo () {
|
||
this.undoing = true;
|
||
let res;
|
||
try {
|
||
res = popStackItem(this, this.undoStack, 'undo');
|
||
} finally {
|
||
this.undoing = false;
|
||
}
|
||
return res
|
||
}
|
||
|
||
/**
|
||
* Redo last undo operation.
|
||
*
|
||
* @return {StackItem?} Returns StackItem if a change was applied
|
||
*/
|
||
redo () {
|
||
this.redoing = true;
|
||
let res;
|
||
try {
|
||
res = popStackItem(this, this.redoStack, 'redo');
|
||
} finally {
|
||
this.redoing = false;
|
||
}
|
||
return res
|
||
}
|
||
|
||
/**
|
||
* Are undo steps available?
|
||
*
|
||
* @return {boolean} `true` if undo is possible
|
||
*/
|
||
canUndo () {
|
||
return this.undoStack.length > 0
|
||
}
|
||
|
||
/**
|
||
* Are redo steps available?
|
||
*
|
||
* @return {boolean} `true` if redo is possible
|
||
*/
|
||
canRedo () {
|
||
return this.redoStack.length > 0
|
||
}
|
||
|
||
destroy () {
|
||
this.trackedOrigins.delete(this);
|
||
this.doc.off('afterTransaction', this.afterTransactionHandler);
|
||
super.destroy();
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
*/
|
||
function * lazyStructReaderGenerator (decoder) {
|
||
const numOfStateUpdates = readVarUint(decoder.restDecoder);
|
||
for (let i = 0; i < numOfStateUpdates; i++) {
|
||
const numberOfStructs = readVarUint(decoder.restDecoder);
|
||
const client = decoder.readClient();
|
||
let clock = readVarUint(decoder.restDecoder);
|
||
for (let i = 0; i < numberOfStructs; i++) {
|
||
const info = decoder.readInfo();
|
||
// @todo use switch instead of ifs
|
||
if (info === 10) {
|
||
const len = readVarUint(decoder.restDecoder);
|
||
yield new Skip(createID(client, clock), len);
|
||
clock += len;
|
||
} else if ((BITS5 & info) !== 0) {
|
||
const cantCopyParentInfo = (info & (BIT7 | BIT8)) === 0;
|
||
// If parent = null and neither left nor right are defined, then we know that `parent` is child of `y`
|
||
// and we read the next string as parentYKey.
|
||
// It indicates how we store/retrieve parent from `y.share`
|
||
// @type {string|null}
|
||
const struct = new Item$1(
|
||
createID(client, clock),
|
||
null, // left
|
||
(info & BIT8) === BIT8 ? decoder.readLeftID() : null, // origin
|
||
null, // right
|
||
(info & BIT7) === BIT7 ? decoder.readRightID() : null, // right origin
|
||
// @ts-ignore Force writing a string here.
|
||
cantCopyParentInfo ? (decoder.readParentInfo() ? decoder.readString() : decoder.readLeftID()) : null, // parent
|
||
cantCopyParentInfo && (info & BIT6) === BIT6 ? decoder.readString() : null, // parentSub
|
||
readItemContent(decoder, info) // item content
|
||
);
|
||
yield struct;
|
||
clock += struct.length;
|
||
} else {
|
||
const len = decoder.readLen();
|
||
yield new GC(createID(client, clock), len);
|
||
clock += len;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
class LazyStructReader {
|
||
/**
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
* @param {boolean} filterSkips
|
||
*/
|
||
constructor (decoder, filterSkips) {
|
||
this.gen = lazyStructReaderGenerator(decoder);
|
||
/**
|
||
* @type {null | Item | Skip | GC}
|
||
*/
|
||
this.curr = null;
|
||
this.done = false;
|
||
this.filterSkips = filterSkips;
|
||
this.next();
|
||
}
|
||
|
||
/**
|
||
* @return {Item | GC | Skip |null}
|
||
*/
|
||
next () {
|
||
// ignore "Skip" structs
|
||
do {
|
||
this.curr = this.gen.next().value || null;
|
||
} while (this.filterSkips && this.curr !== null && this.curr.constructor === Skip)
|
||
return this.curr
|
||
}
|
||
}
|
||
|
||
class LazyStructWriter {
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
*/
|
||
constructor (encoder) {
|
||
this.currClient = 0;
|
||
this.startClock = 0;
|
||
this.written = 0;
|
||
this.encoder = encoder;
|
||
/**
|
||
* We want to write operations lazily, but also we need to know beforehand how many operations we want to write for each client.
|
||
*
|
||
* This kind of meta-information (#clients, #structs-per-client-written) is written to the restEncoder.
|
||
*
|
||
* We fragment the restEncoder and store a slice of it per-client until we know how many clients there are.
|
||
* When we flush (toUint8Array) we write the restEncoder using the fragments and the meta-information.
|
||
*
|
||
* @type {Array<{ written: number, restEncoder: Uint8Array }>}
|
||
*/
|
||
this.clientStructs = [];
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {Array<Uint8Array>} updates
|
||
* @return {Uint8Array}
|
||
*/
|
||
const mergeUpdates$1 = updates => mergeUpdatesV2(updates, UpdateDecoderV1, UpdateEncoderV1);
|
||
|
||
/**
|
||
* This method is intended to slice any kind of struct and retrieve the right part.
|
||
* It does not handle side-effects, so it should only be used by the lazy-encoder.
|
||
*
|
||
* @param {Item | GC | Skip} left
|
||
* @param {number} diff
|
||
* @return {Item | GC}
|
||
*/
|
||
const sliceStruct = (left, diff) => {
|
||
if (left.constructor === GC) {
|
||
const { client, clock } = left.id;
|
||
return new GC(createID(client, clock + diff), left.length - diff)
|
||
} else if (left.constructor === Skip) {
|
||
const { client, clock } = left.id;
|
||
return new Skip(createID(client, clock + diff), left.length - diff)
|
||
} else {
|
||
const leftItem = /** @type {Item} */ (left);
|
||
const { client, clock } = leftItem.id;
|
||
return new Item$1(
|
||
createID(client, clock + diff),
|
||
null,
|
||
createID(client, clock + diff - 1),
|
||
null,
|
||
leftItem.rightOrigin,
|
||
leftItem.parent,
|
||
leftItem.parentSub,
|
||
leftItem.content.splice(diff)
|
||
)
|
||
}
|
||
};
|
||
|
||
/**
|
||
*
|
||
* This function works similarly to `readUpdateV2`.
|
||
*
|
||
* @param {Array<Uint8Array>} updates
|
||
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} [YDecoder]
|
||
* @param {typeof UpdateEncoderV1 | typeof UpdateEncoderV2} [YEncoder]
|
||
* @return {Uint8Array}
|
||
*/
|
||
const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = UpdateEncoderV2) => {
|
||
if (updates.length === 1) {
|
||
return updates[0]
|
||
}
|
||
const updateDecoders = updates.map(update => new YDecoder(createDecoder(update)));
|
||
let lazyStructDecoders = updateDecoders.map(decoder => new LazyStructReader(decoder, true));
|
||
|
||
/**
|
||
* @todo we don't need offset because we always slice before
|
||
* @type {null | { struct: Item | GC | Skip, offset: number }}
|
||
*/
|
||
let currWrite = null;
|
||
|
||
const updateEncoder = new YEncoder();
|
||
// write structs lazily
|
||
const lazyStructEncoder = new LazyStructWriter(updateEncoder);
|
||
|
||
// Note: We need to ensure that all lazyStructDecoders are fully consumed
|
||
// Note: Should merge document updates whenever possible - even from different updates
|
||
// Note: Should handle that some operations cannot be applied yet ()
|
||
|
||
while (true) {
|
||
// Write higher clients first ⇒ sort by clientID & clock and remove decoders without content
|
||
lazyStructDecoders = lazyStructDecoders.filter(dec => dec.curr !== null);
|
||
lazyStructDecoders.sort(
|
||
/** @type {function(any,any):number} */ (dec1, dec2) => {
|
||
if (dec1.curr.id.client === dec2.curr.id.client) {
|
||
const clockDiff = dec1.curr.id.clock - dec2.curr.id.clock;
|
||
if (clockDiff === 0) {
|
||
// @todo remove references to skip since the structDecoders must filter Skips.
|
||
return dec1.curr.constructor === dec2.curr.constructor
|
||
? 0
|
||
: dec1.curr.constructor === Skip ? 1 : -1 // we are filtering skips anyway.
|
||
} else {
|
||
return clockDiff
|
||
}
|
||
} else {
|
||
return dec2.curr.id.client - dec1.curr.id.client
|
||
}
|
||
}
|
||
);
|
||
if (lazyStructDecoders.length === 0) {
|
||
break
|
||
}
|
||
const currDecoder = lazyStructDecoders[0];
|
||
// write from currDecoder until the next operation is from another client or if filler-struct
|
||
// then we need to reorder the decoders and find the next operation to write
|
||
const firstClient = /** @type {Item | GC} */ (currDecoder.curr).id.client;
|
||
|
||
if (currWrite !== null) {
|
||
let curr = /** @type {Item | GC | null} */ (currDecoder.curr);
|
||
let iterated = false;
|
||
|
||
// iterate until we find something that we haven't written already
|
||
// remember: first the high client-ids are written
|
||
while (curr !== null && curr.id.clock + curr.length <= currWrite.struct.id.clock + currWrite.struct.length && curr.id.client >= currWrite.struct.id.client) {
|
||
curr = currDecoder.next();
|
||
iterated = true;
|
||
}
|
||
if (
|
||
curr === null || // current decoder is empty
|
||
curr.id.client !== firstClient || // check whether there is another decoder that has has updates from `firstClient`
|
||
(iterated && curr.id.clock > currWrite.struct.id.clock + currWrite.struct.length) // the above while loop was used and we are potentially missing updates
|
||
) {
|
||
continue
|
||
}
|
||
|
||
if (firstClient !== currWrite.struct.id.client) {
|
||
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset);
|
||
currWrite = { struct: curr, offset: 0 };
|
||
currDecoder.next();
|
||
} else {
|
||
if (currWrite.struct.id.clock + currWrite.struct.length < curr.id.clock) {
|
||
// @todo write currStruct & set currStruct = Skip(clock = currStruct.id.clock + currStruct.length, length = curr.id.clock - self.clock)
|
||
if (currWrite.struct.constructor === Skip) {
|
||
// extend existing skip
|
||
currWrite.struct.length = curr.id.clock + curr.length - currWrite.struct.id.clock;
|
||
} else {
|
||
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset);
|
||
const diff = curr.id.clock - currWrite.struct.id.clock - currWrite.struct.length;
|
||
/**
|
||
* @type {Skip}
|
||
*/
|
||
const struct = new Skip(createID(firstClient, currWrite.struct.id.clock + currWrite.struct.length), diff);
|
||
currWrite = { struct, offset: 0 };
|
||
}
|
||
} else { // if (currWrite.struct.id.clock + currWrite.struct.length >= curr.id.clock) {
|
||
const diff = currWrite.struct.id.clock + currWrite.struct.length - curr.id.clock;
|
||
if (diff > 0) {
|
||
if (currWrite.struct.constructor === Skip) {
|
||
// prefer to slice Skip because the other struct might contain more information
|
||
currWrite.struct.length -= diff;
|
||
} else {
|
||
curr = sliceStruct(curr, diff);
|
||
}
|
||
}
|
||
if (!currWrite.struct.mergeWith(/** @type {any} */ (curr))) {
|
||
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset);
|
||
currWrite = { struct: curr, offset: 0 };
|
||
currDecoder.next();
|
||
}
|
||
}
|
||
}
|
||
} else {
|
||
currWrite = { struct: /** @type {Item | GC} */ (currDecoder.curr), offset: 0 };
|
||
currDecoder.next();
|
||
}
|
||
for (
|
||
let next = currDecoder.curr;
|
||
next !== null && next.id.client === firstClient && next.id.clock === currWrite.struct.id.clock + currWrite.struct.length && next.constructor !== Skip;
|
||
next = currDecoder.next()
|
||
) {
|
||
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset);
|
||
currWrite = { struct: next, offset: 0 };
|
||
}
|
||
}
|
||
if (currWrite !== null) {
|
||
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset);
|
||
currWrite = null;
|
||
}
|
||
finishLazyStructWriting(lazyStructEncoder);
|
||
|
||
const dss = updateDecoders.map(decoder => readDeleteSet(decoder));
|
||
const ds = mergeDeleteSets(dss);
|
||
writeDeleteSet(updateEncoder, ds);
|
||
return updateEncoder.toUint8Array()
|
||
};
|
||
|
||
/**
|
||
* @param {Uint8Array} update
|
||
* @param {Uint8Array} sv
|
||
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} [YDecoder]
|
||
* @param {typeof UpdateEncoderV1 | typeof UpdateEncoderV2} [YEncoder]
|
||
*/
|
||
const diffUpdateV2 = (update, sv, YDecoder = UpdateDecoderV2, YEncoder = UpdateEncoderV2) => {
|
||
const state = decodeStateVector$1(sv);
|
||
const encoder = new YEncoder();
|
||
const lazyStructWriter = new LazyStructWriter(encoder);
|
||
const decoder = new YDecoder(createDecoder(update));
|
||
const reader = new LazyStructReader(decoder, false);
|
||
while (reader.curr) {
|
||
const curr = reader.curr;
|
||
const currClient = curr.id.client;
|
||
const svClock = state.get(currClient) || 0;
|
||
if (reader.curr.constructor === Skip) {
|
||
// the first written struct shouldn't be a skip
|
||
reader.next();
|
||
continue
|
||
}
|
||
if (curr.id.clock + curr.length > svClock) {
|
||
writeStructToLazyStructWriter(lazyStructWriter, curr, max(svClock - curr.id.clock, 0));
|
||
reader.next();
|
||
while (reader.curr && reader.curr.id.client === currClient) {
|
||
writeStructToLazyStructWriter(lazyStructWriter, reader.curr, 0);
|
||
reader.next();
|
||
}
|
||
} else {
|
||
// read until something new comes up
|
||
while (reader.curr && reader.curr.id.client === currClient && reader.curr.id.clock + reader.curr.length <= svClock) {
|
||
reader.next();
|
||
}
|
||
}
|
||
}
|
||
finishLazyStructWriting(lazyStructWriter);
|
||
// write ds
|
||
const ds = readDeleteSet(decoder);
|
||
writeDeleteSet(encoder, ds);
|
||
return encoder.toUint8Array()
|
||
};
|
||
|
||
/**
|
||
* @param {LazyStructWriter} lazyWriter
|
||
*/
|
||
const flushLazyStructWriter = lazyWriter => {
|
||
if (lazyWriter.written > 0) {
|
||
lazyWriter.clientStructs.push({ written: lazyWriter.written, restEncoder: toUint8Array(lazyWriter.encoder.restEncoder) });
|
||
lazyWriter.encoder.restEncoder = createEncoder();
|
||
lazyWriter.written = 0;
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @param {LazyStructWriter} lazyWriter
|
||
* @param {Item | GC} struct
|
||
* @param {number} offset
|
||
*/
|
||
const writeStructToLazyStructWriter = (lazyWriter, struct, offset) => {
|
||
// flush curr if we start another client
|
||
if (lazyWriter.written > 0 && lazyWriter.currClient !== struct.id.client) {
|
||
flushLazyStructWriter(lazyWriter);
|
||
}
|
||
if (lazyWriter.written === 0) {
|
||
lazyWriter.currClient = struct.id.client;
|
||
// write next client
|
||
lazyWriter.encoder.writeClient(struct.id.client);
|
||
// write startClock
|
||
writeVarUint(lazyWriter.encoder.restEncoder, struct.id.clock + offset);
|
||
}
|
||
struct.write(lazyWriter.encoder, offset);
|
||
lazyWriter.written++;
|
||
};
|
||
/**
|
||
* Call this function when we collected all parts and want to
|
||
* put all the parts together. After calling this method,
|
||
* you can continue using the UpdateEncoder.
|
||
*
|
||
* @param {LazyStructWriter} lazyWriter
|
||
*/
|
||
const finishLazyStructWriting = (lazyWriter) => {
|
||
flushLazyStructWriter(lazyWriter);
|
||
|
||
// this is a fresh encoder because we called flushCurr
|
||
const restEncoder = lazyWriter.encoder.restEncoder;
|
||
|
||
/**
|
||
* Now we put all the fragments together.
|
||
* This works similarly to `writeClientsStructs`
|
||
*/
|
||
|
||
// write # states that were updated - i.e. the clients
|
||
writeVarUint(restEncoder, lazyWriter.clientStructs.length);
|
||
|
||
for (let i = 0; i < lazyWriter.clientStructs.length; i++) {
|
||
const partStructs = lazyWriter.clientStructs[i];
|
||
/**
|
||
* Works similarly to `writeStructs`
|
||
*/
|
||
// write # encoded structs
|
||
writeVarUint(restEncoder, partStructs.written);
|
||
// write the rest of the fragment
|
||
writeUint8Array(restEncoder, partStructs.restEncoder);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @param {Uint8Array} update
|
||
* @param {typeof UpdateDecoderV2 | typeof UpdateDecoderV1} YDecoder
|
||
* @param {typeof UpdateEncoderV2 | typeof UpdateEncoderV1 } YEncoder
|
||
*/
|
||
const convertUpdateFormat = (update, YDecoder, YEncoder) => {
|
||
const updateDecoder = new YDecoder(createDecoder(update));
|
||
const lazyDecoder = new LazyStructReader(updateDecoder, false);
|
||
const updateEncoder = new YEncoder();
|
||
const lazyWriter = new LazyStructWriter(updateEncoder);
|
||
|
||
for (let curr = lazyDecoder.curr; curr !== null; curr = lazyDecoder.next()) {
|
||
writeStructToLazyStructWriter(lazyWriter, curr, 0);
|
||
}
|
||
finishLazyStructWriting(lazyWriter);
|
||
const ds = readDeleteSet(updateDecoder);
|
||
writeDeleteSet(updateEncoder, ds);
|
||
return updateEncoder.toUint8Array()
|
||
};
|
||
|
||
/**
|
||
* @param {Uint8Array} update
|
||
*/
|
||
const convertUpdateFormatV2ToV1 = update => convertUpdateFormat(update, UpdateDecoderV2, UpdateEncoderV1);
|
||
|
||
/**
|
||
* @template {AbstractType<any>} T
|
||
* YEvent describes the changes on a YType.
|
||
*/
|
||
class YEvent {
|
||
/**
|
||
* @param {T} target The changed type.
|
||
* @param {Transaction} transaction
|
||
*/
|
||
constructor (target, transaction) {
|
||
/**
|
||
* The type on which this event was created on.
|
||
* @type {T}
|
||
*/
|
||
this.target = target;
|
||
/**
|
||
* The current target on which the observe callback is called.
|
||
* @type {AbstractType<any>}
|
||
*/
|
||
this.currentTarget = target;
|
||
/**
|
||
* The transaction that triggered this event.
|
||
* @type {Transaction}
|
||
*/
|
||
this.transaction = transaction;
|
||
/**
|
||
* @type {Object|null}
|
||
*/
|
||
this._changes = null;
|
||
/**
|
||
* @type {null | Map<string, { action: 'add' | 'update' | 'delete', oldValue: any, newValue: any }>}
|
||
*/
|
||
this._keys = null;
|
||
/**
|
||
* @type {null | Array<{ insert?: string | Array<any> | object | AbstractType<any>, retain?: number, delete?: number, attributes?: Object<string, any> }>}
|
||
*/
|
||
this._delta = null;
|
||
}
|
||
|
||
/**
|
||
* Computes the path from `y` to the changed type.
|
||
*
|
||
* @todo v14 should standardize on path: Array<{parent, index}> because that is easier to work with.
|
||
*
|
||
* The following property holds:
|
||
* @example
|
||
* let type = y
|
||
* event.path.forEach(dir => {
|
||
* type = type.get(dir)
|
||
* })
|
||
* type === event.target // => true
|
||
*/
|
||
get path () {
|
||
// @ts-ignore _item is defined because target is integrated
|
||
return getPathTo(this.currentTarget, this.target)
|
||
}
|
||
|
||
/**
|
||
* Check if a struct is deleted by this event.
|
||
*
|
||
* In contrast to change.deleted, this method also returns true if the struct was added and then deleted.
|
||
*
|
||
* @param {AbstractStruct} struct
|
||
* @return {boolean}
|
||
*/
|
||
deletes (struct) {
|
||
return isDeleted(this.transaction.deleteSet, struct.id)
|
||
}
|
||
|
||
/**
|
||
* @type {Map<string, { action: 'add' | 'update' | 'delete', oldValue: any, newValue: any }>}
|
||
*/
|
||
get keys () {
|
||
if (this._keys === null) {
|
||
const keys = new Map();
|
||
const target = this.target;
|
||
const changed = /** @type Set<string|null> */ (this.transaction.changed.get(target));
|
||
changed.forEach(key => {
|
||
if (key !== null) {
|
||
const item = /** @type {Item} */ (target._map.get(key));
|
||
/**
|
||
* @type {'delete' | 'add' | 'update'}
|
||
*/
|
||
let action;
|
||
let oldValue;
|
||
if (this.adds(item)) {
|
||
let prev = item.left;
|
||
while (prev !== null && this.adds(prev)) {
|
||
prev = prev.left;
|
||
}
|
||
if (this.deletes(item)) {
|
||
if (prev !== null && this.deletes(prev)) {
|
||
action = 'delete';
|
||
oldValue = last(prev.content.getContent());
|
||
} else {
|
||
return
|
||
}
|
||
} else {
|
||
if (prev !== null && this.deletes(prev)) {
|
||
action = 'update';
|
||
oldValue = last(prev.content.getContent());
|
||
} else {
|
||
action = 'add';
|
||
oldValue = undefined;
|
||
}
|
||
}
|
||
} else {
|
||
if (this.deletes(item)) {
|
||
action = 'delete';
|
||
oldValue = last(/** @type {Item} */ item.content.getContent());
|
||
} else {
|
||
return // nop
|
||
}
|
||
}
|
||
keys.set(key, { action, oldValue });
|
||
}
|
||
});
|
||
this._keys = keys;
|
||
}
|
||
return this._keys
|
||
}
|
||
|
||
/**
|
||
* @type {Array<{insert?: string | Array<any> | object | AbstractType<any>, retain?: number, delete?: number, attributes?: Object<string, any>}>}
|
||
*/
|
||
get delta () {
|
||
return this.changes.delta
|
||
}
|
||
|
||
/**
|
||
* Check if a struct is added by this event.
|
||
*
|
||
* In contrast to change.deleted, this method also returns true if the struct was added and then deleted.
|
||
*
|
||
* @param {AbstractStruct} struct
|
||
* @return {boolean}
|
||
*/
|
||
adds (struct) {
|
||
return struct.id.clock >= (this.transaction.beforeState.get(struct.id.client) || 0)
|
||
}
|
||
|
||
/**
|
||
* @type {{added:Set<Item>,deleted:Set<Item>,keys:Map<string,{action:'add'|'update'|'delete',oldValue:any}>,delta:Array<{insert?:Array<any>|string, delete?:number, retain?:number}>}}
|
||
*/
|
||
get changes () {
|
||
let changes = this._changes;
|
||
if (changes === null) {
|
||
const target = this.target;
|
||
const added = create$5();
|
||
const deleted = create$5();
|
||
/**
|
||
* @type {Array<{insert:Array<any>}|{delete:number}|{retain:number}>}
|
||
*/
|
||
const delta = [];
|
||
changes = {
|
||
added,
|
||
deleted,
|
||
delta,
|
||
keys: this.keys
|
||
};
|
||
const changed = /** @type Set<string|null> */ (this.transaction.changed.get(target));
|
||
if (changed.has(null)) {
|
||
/**
|
||
* @type {any}
|
||
*/
|
||
let lastOp = null;
|
||
const packOp = () => {
|
||
if (lastOp) {
|
||
delta.push(lastOp);
|
||
}
|
||
};
|
||
for (let item = target._start; item !== null; item = item.right) {
|
||
if (item.deleted) {
|
||
if (this.deletes(item) && !this.adds(item)) {
|
||
if (lastOp === null || lastOp.delete === undefined) {
|
||
packOp();
|
||
lastOp = { delete: 0 };
|
||
}
|
||
lastOp.delete += item.length;
|
||
deleted.add(item);
|
||
} // else nop
|
||
} else {
|
||
if (this.adds(item)) {
|
||
if (lastOp === null || lastOp.insert === undefined) {
|
||
packOp();
|
||
lastOp = { insert: [] };
|
||
}
|
||
lastOp.insert = lastOp.insert.concat(item.content.getContent());
|
||
added.add(item);
|
||
} else {
|
||
if (lastOp === null || lastOp.retain === undefined) {
|
||
packOp();
|
||
lastOp = { retain: 0 };
|
||
}
|
||
lastOp.retain += item.length;
|
||
}
|
||
}
|
||
}
|
||
if (lastOp !== null && lastOp.retain === undefined) {
|
||
packOp();
|
||
}
|
||
}
|
||
this._changes = changes;
|
||
}
|
||
return /** @type {any} */ (changes)
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Compute the path from this type to the specified target.
|
||
*
|
||
* @example
|
||
* // `child` should be accessible via `type.get(path[0]).get(path[1])..`
|
||
* const path = type.getPathTo(child)
|
||
* // assuming `type instanceof YArray`
|
||
* console.log(path) // might look like => [2, 'key1']
|
||
* child === type.get(path[0]).get(path[1])
|
||
*
|
||
* @param {AbstractType<any>} parent
|
||
* @param {AbstractType<any>} child target
|
||
* @return {Array<string|number>} Path to the target
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const getPathTo = (parent, child) => {
|
||
const path = [];
|
||
while (child._item !== null && child !== parent) {
|
||
if (child._item.parentSub !== null) {
|
||
// parent is map-ish
|
||
path.unshift(child._item.parentSub);
|
||
} else {
|
||
// parent is array-ish
|
||
let i = 0;
|
||
let c = /** @type {AbstractType<any>} */ (child._item.parent)._start;
|
||
while (c !== child._item && c !== null) {
|
||
if (!c.deleted) {
|
||
i++;
|
||
}
|
||
c = c.right;
|
||
}
|
||
path.unshift(i);
|
||
}
|
||
child = /** @type {AbstractType<any>} */ (child._item.parent);
|
||
}
|
||
return path
|
||
};
|
||
|
||
const maxSearchMarker = 80;
|
||
|
||
/**
|
||
* A unique timestamp that identifies each marker.
|
||
*
|
||
* Time is relative,.. this is more like an ever-increasing clock.
|
||
*
|
||
* @type {number}
|
||
*/
|
||
let globalSearchMarkerTimestamp = 0;
|
||
|
||
class ArraySearchMarker {
|
||
/**
|
||
* @param {Item} p
|
||
* @param {number} index
|
||
*/
|
||
constructor (p, index) {
|
||
p.marker = true;
|
||
this.p = p;
|
||
this.index = index;
|
||
this.timestamp = globalSearchMarkerTimestamp++;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {ArraySearchMarker} marker
|
||
*/
|
||
const refreshMarkerTimestamp = marker => { marker.timestamp = globalSearchMarkerTimestamp++; };
|
||
|
||
/**
|
||
* This is rather complex so this function is the only thing that should overwrite a marker
|
||
*
|
||
* @param {ArraySearchMarker} marker
|
||
* @param {Item} p
|
||
* @param {number} index
|
||
*/
|
||
const overwriteMarker = (marker, p, index) => {
|
||
marker.p.marker = false;
|
||
marker.p = p;
|
||
p.marker = true;
|
||
marker.index = index;
|
||
marker.timestamp = globalSearchMarkerTimestamp++;
|
||
};
|
||
|
||
/**
|
||
* @param {Array<ArraySearchMarker>} searchMarker
|
||
* @param {Item} p
|
||
* @param {number} index
|
||
*/
|
||
const markPosition = (searchMarker, p, index) => {
|
||
if (searchMarker.length >= maxSearchMarker) {
|
||
// override oldest marker (we don't want to create more objects)
|
||
const marker = searchMarker.reduce((a, b) => a.timestamp < b.timestamp ? a : b);
|
||
overwriteMarker(marker, p, index);
|
||
return marker
|
||
} else {
|
||
// create new marker
|
||
const pm = new ArraySearchMarker(p, index);
|
||
searchMarker.push(pm);
|
||
return pm
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Search marker help us to find positions in the associative array faster.
|
||
*
|
||
* They speed up the process of finding a position without much bookkeeping.
|
||
*
|
||
* A maximum of `maxSearchMarker` objects are created.
|
||
*
|
||
* This function always returns a refreshed marker (updated timestamp)
|
||
*
|
||
* @param {AbstractType<any>} yarray
|
||
* @param {number} index
|
||
*/
|
||
const findMarker = (yarray, index) => {
|
||
if (yarray._start === null || index === 0 || yarray._searchMarker === null) {
|
||
return null
|
||
}
|
||
const marker = yarray._searchMarker.length === 0 ? null : yarray._searchMarker.reduce((a, b) => abs(index - a.index) < abs(index - b.index) ? a : b);
|
||
let p = yarray._start;
|
||
let pindex = 0;
|
||
if (marker !== null) {
|
||
p = marker.p;
|
||
pindex = marker.index;
|
||
refreshMarkerTimestamp(marker); // we used it, we might need to use it again
|
||
}
|
||
// iterate to right if possible
|
||
while (p.right !== null && pindex < index) {
|
||
if (!p.deleted && p.countable) {
|
||
if (index < pindex + p.length) {
|
||
break
|
||
}
|
||
pindex += p.length;
|
||
}
|
||
p = p.right;
|
||
}
|
||
// iterate to left if necessary (might be that pindex > index)
|
||
while (p.left !== null && pindex > index) {
|
||
p = p.left;
|
||
if (!p.deleted && p.countable) {
|
||
pindex -= p.length;
|
||
}
|
||
}
|
||
// we want to make sure that p can't be merged with left, because that would screw up everything
|
||
// in that cas just return what we have (it is most likely the best marker anyway)
|
||
// iterate to left until p can't be merged with left
|
||
while (p.left !== null && p.left.id.client === p.id.client && p.left.id.clock + p.left.length === p.id.clock) {
|
||
p = p.left;
|
||
if (!p.deleted && p.countable) {
|
||
pindex -= p.length;
|
||
}
|
||
}
|
||
|
||
// @todo remove!
|
||
// assure position
|
||
// {
|
||
// let start = yarray._start
|
||
// let pos = 0
|
||
// while (start !== p) {
|
||
// if (!start.deleted && start.countable) {
|
||
// pos += start.length
|
||
// }
|
||
// start = /** @type {Item} */ (start.right)
|
||
// }
|
||
// if (pos !== pindex) {
|
||
// debugger
|
||
// throw new Error('Gotcha position fail!')
|
||
// }
|
||
// }
|
||
// if (marker) {
|
||
// if (window.lengthes == null) {
|
||
// window.lengthes = []
|
||
// window.getLengthes = () => window.lengthes.sort((a, b) => a - b)
|
||
// }
|
||
// window.lengthes.push(marker.index - pindex)
|
||
// console.log('distance', marker.index - pindex, 'len', p && p.parent.length)
|
||
// }
|
||
if (marker !== null && abs(marker.index - pindex) < /** @type {YText|YArray<any>} */ (p.parent).length / maxSearchMarker) {
|
||
// adjust existing marker
|
||
overwriteMarker(marker, p, pindex);
|
||
return marker
|
||
} else {
|
||
// create new marker
|
||
return markPosition(yarray._searchMarker, p, pindex)
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Update markers when a change happened.
|
||
*
|
||
* This should be called before doing a deletion!
|
||
*
|
||
* @param {Array<ArraySearchMarker>} searchMarker
|
||
* @param {number} index
|
||
* @param {number} len If insertion, len is positive. If deletion, len is negative.
|
||
*/
|
||
const updateMarkerChanges = (searchMarker, index, len) => {
|
||
for (let i = searchMarker.length - 1; i >= 0; i--) {
|
||
const m = searchMarker[i];
|
||
if (len > 0) {
|
||
/**
|
||
* @type {Item|null}
|
||
*/
|
||
let p = m.p;
|
||
p.marker = false;
|
||
// Ideally we just want to do a simple position comparison, but this will only work if
|
||
// search markers don't point to deleted items for formats.
|
||
// Iterate marker to prev undeleted countable position so we know what to do when updating a position
|
||
while (p && (p.deleted || !p.countable)) {
|
||
p = p.left;
|
||
if (p && !p.deleted && p.countable) {
|
||
// adjust position. the loop should break now
|
||
m.index -= p.length;
|
||
}
|
||
}
|
||
if (p === null || p.marker === true) {
|
||
// remove search marker if updated position is null or if position is already marked
|
||
searchMarker.splice(i, 1);
|
||
continue
|
||
}
|
||
m.p = p;
|
||
p.marker = true;
|
||
}
|
||
if (index < m.index || (len > 0 && index === m.index)) { // a simple index <= m.index check would actually suffice
|
||
m.index = max(index, m.index + len);
|
||
}
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Call event listeners with an event. This will also add an event to all
|
||
* parents (for `.observeDeep` handlers).
|
||
*
|
||
* @template EventType
|
||
* @param {AbstractType<EventType>} type
|
||
* @param {Transaction} transaction
|
||
* @param {EventType} event
|
||
*/
|
||
const callTypeObservers = (type, transaction, event) => {
|
||
const changedType = type;
|
||
const changedParentTypes = transaction.changedParentTypes;
|
||
while (true) {
|
||
// @ts-ignore
|
||
setIfUndefined(changedParentTypes, type, () => []).push(event);
|
||
if (type._item === null) {
|
||
break
|
||
}
|
||
type = /** @type {AbstractType<any>} */ (type._item.parent);
|
||
}
|
||
callEventHandlerListeners(changedType._eH, event, transaction);
|
||
};
|
||
|
||
/**
|
||
* @template EventType
|
||
* Abstract Yjs Type class
|
||
*/
|
||
class AbstractType {
|
||
constructor () {
|
||
/**
|
||
* @type {Item|null}
|
||
*/
|
||
this._item = null;
|
||
/**
|
||
* @type {Map<string,Item>}
|
||
*/
|
||
this._map = new Map();
|
||
/**
|
||
* @type {Item|null}
|
||
*/
|
||
this._start = null;
|
||
/**
|
||
* @type {Doc|null}
|
||
*/
|
||
this.doc = null;
|
||
this._length = 0;
|
||
/**
|
||
* Event handlers
|
||
* @type {EventHandler<EventType,Transaction>}
|
||
*/
|
||
this._eH = createEventHandler();
|
||
/**
|
||
* Deep event handlers
|
||
* @type {EventHandler<Array<YEvent<any>>,Transaction>}
|
||
*/
|
||
this._dEH = createEventHandler();
|
||
/**
|
||
* @type {null | Array<ArraySearchMarker>}
|
||
*/
|
||
this._searchMarker = null;
|
||
}
|
||
|
||
/**
|
||
* @return {AbstractType<any>|null}
|
||
*/
|
||
get parent () {
|
||
return this._item ? /** @type {AbstractType<any>} */ (this._item.parent) : null
|
||
}
|
||
|
||
/**
|
||
* Integrate this type into the Yjs instance.
|
||
*
|
||
* * Save this struct in the os
|
||
* * This type is sent to other client
|
||
* * Observer functions are fired
|
||
*
|
||
* @param {Doc} y The Yjs instance
|
||
* @param {Item|null} item
|
||
*/
|
||
_integrate (y, item) {
|
||
this.doc = y;
|
||
this._item = item;
|
||
}
|
||
|
||
/**
|
||
* @return {AbstractType<EventType>}
|
||
*/
|
||
_copy () {
|
||
throw methodUnimplemented()
|
||
}
|
||
|
||
/**
|
||
* @return {AbstractType<EventType>}
|
||
*/
|
||
clone () {
|
||
throw methodUnimplemented()
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
*/
|
||
_write (encoder) { }
|
||
|
||
/**
|
||
* The first non-deleted item
|
||
*/
|
||
get _first () {
|
||
let n = this._start;
|
||
while (n !== null && n.deleted) {
|
||
n = n.right;
|
||
}
|
||
return n
|
||
}
|
||
|
||
/**
|
||
* Creates YEvent and calls all type observers.
|
||
* Must be implemented by each type.
|
||
*
|
||
* @param {Transaction} transaction
|
||
* @param {Set<null|string>} parentSubs Keys changed on this type. `null` if list was modified.
|
||
*/
|
||
_callObserver (transaction, parentSubs) {
|
||
if (!transaction.local && this._searchMarker) {
|
||
this._searchMarker.length = 0;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Observe all events that are created on this type.
|
||
*
|
||
* @param {function(EventType, Transaction):void} f Observer function
|
||
*/
|
||
observe (f) {
|
||
addEventHandlerListener(this._eH, f);
|
||
}
|
||
|
||
/**
|
||
* Observe all events that are created by this type and its children.
|
||
*
|
||
* @param {function(Array<YEvent<any>>,Transaction):void} f Observer function
|
||
*/
|
||
observeDeep (f) {
|
||
addEventHandlerListener(this._dEH, f);
|
||
}
|
||
|
||
/**
|
||
* Unregister an observer function.
|
||
*
|
||
* @param {function(EventType,Transaction):void} f Observer function
|
||
*/
|
||
unobserve (f) {
|
||
removeEventHandlerListener(this._eH, f);
|
||
}
|
||
|
||
/**
|
||
* Unregister an observer function.
|
||
*
|
||
* @param {function(Array<YEvent<any>>,Transaction):void} f Observer function
|
||
*/
|
||
unobserveDeep (f) {
|
||
removeEventHandlerListener(this._dEH, f);
|
||
}
|
||
|
||
/**
|
||
* @abstract
|
||
* @return {any}
|
||
*/
|
||
toJSON () {}
|
||
}
|
||
|
||
/**
|
||
* @param {AbstractType<any>} type
|
||
* @param {number} start
|
||
* @param {number} end
|
||
* @return {Array<any>}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const typeListSlice = (type, start, end) => {
|
||
if (start < 0) {
|
||
start = type._length + start;
|
||
}
|
||
if (end < 0) {
|
||
end = type._length + end;
|
||
}
|
||
let len = end - start;
|
||
const cs = [];
|
||
let n = type._start;
|
||
while (n !== null && len > 0) {
|
||
if (n.countable && !n.deleted) {
|
||
const c = n.content.getContent();
|
||
if (c.length <= start) {
|
||
start -= c.length;
|
||
} else {
|
||
for (let i = start; i < c.length && len > 0; i++) {
|
||
cs.push(c[i]);
|
||
len--;
|
||
}
|
||
start = 0;
|
||
}
|
||
}
|
||
n = n.right;
|
||
}
|
||
return cs
|
||
};
|
||
|
||
/**
|
||
* @param {AbstractType<any>} type
|
||
* @return {Array<any>}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const typeListToArray = type => {
|
||
const cs = [];
|
||
let n = type._start;
|
||
while (n !== null) {
|
||
if (n.countable && !n.deleted) {
|
||
const c = n.content.getContent();
|
||
for (let i = 0; i < c.length; i++) {
|
||
cs.push(c[i]);
|
||
}
|
||
}
|
||
n = n.right;
|
||
}
|
||
return cs
|
||
};
|
||
|
||
/**
|
||
* Executes a provided function on once on overy element of this YArray.
|
||
*
|
||
* @param {AbstractType<any>} type
|
||
* @param {function(any,number,any):void} f A function to execute on every element of this YArray.
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const typeListForEach = (type, f) => {
|
||
let index = 0;
|
||
let n = type._start;
|
||
while (n !== null) {
|
||
if (n.countable && !n.deleted) {
|
||
const c = n.content.getContent();
|
||
for (let i = 0; i < c.length; i++) {
|
||
f(c[i], index++, type);
|
||
}
|
||
}
|
||
n = n.right;
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @template C,R
|
||
* @param {AbstractType<any>} type
|
||
* @param {function(C,number,AbstractType<any>):R} f
|
||
* @return {Array<R>}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const typeListMap = (type, f) => {
|
||
/**
|
||
* @type {Array<any>}
|
||
*/
|
||
const result = [];
|
||
typeListForEach(type, (c, i) => {
|
||
result.push(f(c, i, type));
|
||
});
|
||
return result
|
||
};
|
||
|
||
/**
|
||
* @param {AbstractType<any>} type
|
||
* @return {IterableIterator<any>}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const typeListCreateIterator = type => {
|
||
let n = type._start;
|
||
/**
|
||
* @type {Array<any>|null}
|
||
*/
|
||
let currentContent = null;
|
||
let currentContentIndex = 0;
|
||
return {
|
||
[Symbol.iterator] () {
|
||
return this
|
||
},
|
||
next: () => {
|
||
// find some content
|
||
if (currentContent === null) {
|
||
while (n !== null && n.deleted) {
|
||
n = n.right;
|
||
}
|
||
// check if we reached the end, no need to check currentContent, because it does not exist
|
||
if (n === null) {
|
||
return {
|
||
done: true,
|
||
value: undefined
|
||
}
|
||
}
|
||
// we found n, so we can set currentContent
|
||
currentContent = n.content.getContent();
|
||
currentContentIndex = 0;
|
||
n = n.right; // we used the content of n, now iterate to next
|
||
}
|
||
const value = currentContent[currentContentIndex++];
|
||
// check if we need to empty currentContent
|
||
if (currentContent.length <= currentContentIndex) {
|
||
currentContent = null;
|
||
}
|
||
return {
|
||
done: false,
|
||
value
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @param {AbstractType<any>} type
|
||
* @param {number} index
|
||
* @return {any}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const typeListGet = (type, index) => {
|
||
const marker = findMarker(type, index);
|
||
let n = type._start;
|
||
if (marker !== null) {
|
||
n = marker.p;
|
||
index -= marker.index;
|
||
}
|
||
for (; n !== null; n = n.right) {
|
||
if (!n.deleted && n.countable) {
|
||
if (index < n.length) {
|
||
return n.content.getContent()[index]
|
||
}
|
||
index -= n.length;
|
||
}
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {AbstractType<any>} parent
|
||
* @param {Item?} referenceItem
|
||
* @param {Array<Object<string,any>|Array<any>|boolean|number|null|string|Uint8Array>} content
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const typeListInsertGenericsAfter = (transaction, parent, referenceItem, content) => {
|
||
let left = referenceItem;
|
||
const doc = transaction.doc;
|
||
const ownClientId = doc.clientID;
|
||
const store = doc.store;
|
||
const right = referenceItem === null ? parent._start : referenceItem.right;
|
||
/**
|
||
* @type {Array<Object|Array<any>|number|null>}
|
||
*/
|
||
let jsonContent = [];
|
||
const packJsonContent = () => {
|
||
if (jsonContent.length > 0) {
|
||
left = new Item$1(createID(ownClientId, getState(store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentAny(jsonContent));
|
||
left.integrate(transaction, 0);
|
||
jsonContent = [];
|
||
}
|
||
};
|
||
content.forEach(c => {
|
||
if (c === null) {
|
||
jsonContent.push(c);
|
||
} else {
|
||
switch (c.constructor) {
|
||
case Number:
|
||
case Object:
|
||
case Boolean:
|
||
case Array:
|
||
case String:
|
||
jsonContent.push(c);
|
||
break
|
||
default:
|
||
packJsonContent();
|
||
switch (c.constructor) {
|
||
case Uint8Array:
|
||
case ArrayBuffer:
|
||
left = new Item$1(createID(ownClientId, getState(store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentBinary(new Uint8Array(/** @type {Uint8Array} */ (c))));
|
||
left.integrate(transaction, 0);
|
||
break
|
||
case Doc:
|
||
left = new Item$1(createID(ownClientId, getState(store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentDoc(/** @type {Doc} */ (c)));
|
||
left.integrate(transaction, 0);
|
||
break
|
||
default:
|
||
if (c instanceof AbstractType) {
|
||
left = new Item$1(createID(ownClientId, getState(store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentType(c));
|
||
left.integrate(transaction, 0);
|
||
} else {
|
||
throw new Error('Unexpected content type in insert operation')
|
||
}
|
||
}
|
||
}
|
||
}
|
||
});
|
||
packJsonContent();
|
||
};
|
||
|
||
const lengthExceeded = create$4('Length exceeded!');
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {AbstractType<any>} parent
|
||
* @param {number} index
|
||
* @param {Array<Object<string,any>|Array<any>|number|null|string|Uint8Array>} content
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const typeListInsertGenerics = (transaction, parent, index, content) => {
|
||
if (index > parent._length) {
|
||
throw lengthExceeded
|
||
}
|
||
if (index === 0) {
|
||
if (parent._searchMarker) {
|
||
updateMarkerChanges(parent._searchMarker, index, content.length);
|
||
}
|
||
return typeListInsertGenericsAfter(transaction, parent, null, content)
|
||
}
|
||
const startIndex = index;
|
||
const marker = findMarker(parent, index);
|
||
let n = parent._start;
|
||
if (marker !== null) {
|
||
n = marker.p;
|
||
index -= marker.index;
|
||
// we need to iterate one to the left so that the algorithm works
|
||
if (index === 0) {
|
||
// @todo refactor this as it actually doesn't consider formats
|
||
n = n.prev; // important! get the left undeleted item so that we can actually decrease index
|
||
index += (n && n.countable && !n.deleted) ? n.length : 0;
|
||
}
|
||
}
|
||
for (; n !== null; n = n.right) {
|
||
if (!n.deleted && n.countable) {
|
||
if (index <= n.length) {
|
||
if (index < n.length) {
|
||
// insert in-between
|
||
getItemCleanStart(transaction, createID(n.id.client, n.id.clock + index));
|
||
}
|
||
break
|
||
}
|
||
index -= n.length;
|
||
}
|
||
}
|
||
if (parent._searchMarker) {
|
||
updateMarkerChanges(parent._searchMarker, startIndex, content.length);
|
||
}
|
||
return typeListInsertGenericsAfter(transaction, parent, n, content)
|
||
};
|
||
|
||
/**
|
||
* Pushing content is special as we generally want to push after the last item. So we don't have to update
|
||
* the serach marker.
|
||
*
|
||
* @param {Transaction} transaction
|
||
* @param {AbstractType<any>} parent
|
||
* @param {Array<Object<string,any>|Array<any>|number|null|string|Uint8Array>} content
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const typeListPushGenerics = (transaction, parent, content) => {
|
||
// Use the marker with the highest index and iterate to the right.
|
||
const marker = (parent._searchMarker || []).reduce((maxMarker, currMarker) => currMarker.index > maxMarker.index ? currMarker : maxMarker, { index: 0, p: parent._start });
|
||
let n = marker.p;
|
||
if (n) {
|
||
while (n.right) {
|
||
n = n.right;
|
||
}
|
||
}
|
||
return typeListInsertGenericsAfter(transaction, parent, n, content)
|
||
};
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {AbstractType<any>} parent
|
||
* @param {number} index
|
||
* @param {number} length
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const typeListDelete = (transaction, parent, index, length) => {
|
||
if (length === 0) { return }
|
||
const startIndex = index;
|
||
const startLength = length;
|
||
const marker = findMarker(parent, index);
|
||
let n = parent._start;
|
||
if (marker !== null) {
|
||
n = marker.p;
|
||
index -= marker.index;
|
||
}
|
||
// compute the first item to be deleted
|
||
for (; n !== null && index > 0; n = n.right) {
|
||
if (!n.deleted && n.countable) {
|
||
if (index < n.length) {
|
||
getItemCleanStart(transaction, createID(n.id.client, n.id.clock + index));
|
||
}
|
||
index -= n.length;
|
||
}
|
||
}
|
||
// delete all items until done
|
||
while (length > 0 && n !== null) {
|
||
if (!n.deleted) {
|
||
if (length < n.length) {
|
||
getItemCleanStart(transaction, createID(n.id.client, n.id.clock + length));
|
||
}
|
||
n.delete(transaction);
|
||
length -= n.length;
|
||
}
|
||
n = n.right;
|
||
}
|
||
if (length > 0) {
|
||
throw lengthExceeded
|
||
}
|
||
if (parent._searchMarker) {
|
||
updateMarkerChanges(parent._searchMarker, startIndex, -startLength + length /* in case we remove the above exception */);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {AbstractType<any>} parent
|
||
* @param {string} key
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const typeMapDelete = (transaction, parent, key) => {
|
||
const c = parent._map.get(key);
|
||
if (c !== undefined) {
|
||
c.delete(transaction);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {AbstractType<any>} parent
|
||
* @param {string} key
|
||
* @param {Object|number|null|Array<any>|string|Uint8Array|AbstractType<any>} value
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const typeMapSet = (transaction, parent, key, value) => {
|
||
const left = parent._map.get(key) || null;
|
||
const doc = transaction.doc;
|
||
const ownClientId = doc.clientID;
|
||
let content;
|
||
if (value == null) {
|
||
content = new ContentAny([value]);
|
||
} else {
|
||
switch (value.constructor) {
|
||
case Number:
|
||
case Object:
|
||
case Boolean:
|
||
case Array:
|
||
case String:
|
||
content = new ContentAny([value]);
|
||
break
|
||
case Uint8Array:
|
||
content = new ContentBinary(/** @type {Uint8Array} */ (value));
|
||
break
|
||
case Doc:
|
||
content = new ContentDoc(/** @type {Doc} */ (value));
|
||
break
|
||
default:
|
||
if (value instanceof AbstractType) {
|
||
content = new ContentType(value);
|
||
} else {
|
||
throw new Error('Unexpected content type')
|
||
}
|
||
}
|
||
}
|
||
new Item$1(createID(ownClientId, getState(doc.store, ownClientId)), left, left && left.lastId, null, null, parent, key, content).integrate(transaction, 0);
|
||
};
|
||
|
||
/**
|
||
* @param {AbstractType<any>} parent
|
||
* @param {string} key
|
||
* @return {Object<string,any>|number|null|Array<any>|string|Uint8Array|AbstractType<any>|undefined}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const typeMapGet = (parent, key) => {
|
||
const val = parent._map.get(key);
|
||
return val !== undefined && !val.deleted ? val.content.getContent()[val.length - 1] : undefined
|
||
};
|
||
|
||
/**
|
||
* @param {AbstractType<any>} parent
|
||
* @return {Object<string,Object<string,any>|number|null|Array<any>|string|Uint8Array|AbstractType<any>|undefined>}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const typeMapGetAll = (parent) => {
|
||
/**
|
||
* @type {Object<string,any>}
|
||
*/
|
||
const res = {};
|
||
parent._map.forEach((value, key) => {
|
||
if (!value.deleted) {
|
||
res[key] = value.content.getContent()[value.length - 1];
|
||
}
|
||
});
|
||
return res
|
||
};
|
||
|
||
/**
|
||
* @param {AbstractType<any>} parent
|
||
* @param {string} key
|
||
* @return {boolean}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const typeMapHas = (parent, key) => {
|
||
const val = parent._map.get(key);
|
||
return val !== undefined && !val.deleted
|
||
};
|
||
|
||
/**
|
||
* @param {Map<string,Item>} map
|
||
* @return {IterableIterator<Array<any>>}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const createMapIterator = map => iteratorFilter(map.entries(), /** @param {any} entry */ entry => !entry[1].deleted);
|
||
|
||
/**
|
||
* @module YArray
|
||
*/
|
||
|
||
/**
|
||
* Event that describes the changes on a YArray
|
||
* @template T
|
||
* @extends YEvent<YArray<T>>
|
||
*/
|
||
class YArrayEvent extends YEvent {
|
||
/**
|
||
* @param {YArray<T>} yarray The changed type
|
||
* @param {Transaction} transaction The transaction object
|
||
*/
|
||
constructor (yarray, transaction) {
|
||
super(yarray, transaction);
|
||
this._transaction = transaction;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* A shared Array implementation.
|
||
* @template T
|
||
* @extends AbstractType<YArrayEvent<T>>
|
||
* @implements {Iterable<T>}
|
||
*/
|
||
class YArray extends AbstractType {
|
||
constructor () {
|
||
super();
|
||
/**
|
||
* @type {Array<any>?}
|
||
* @private
|
||
*/
|
||
this._prelimContent = [];
|
||
/**
|
||
* @type {Array<ArraySearchMarker>}
|
||
*/
|
||
this._searchMarker = [];
|
||
}
|
||
|
||
/**
|
||
* Construct a new YArray containing the specified items.
|
||
* @template T
|
||
* @param {Array<T>} items
|
||
* @return {YArray<T>}
|
||
*/
|
||
static from (items) {
|
||
const a = new YArray();
|
||
a.push(items);
|
||
return a
|
||
}
|
||
|
||
/**
|
||
* Integrate this type into the Yjs instance.
|
||
*
|
||
* * Save this struct in the os
|
||
* * This type is sent to other client
|
||
* * Observer functions are fired
|
||
*
|
||
* @param {Doc} y The Yjs instance
|
||
* @param {Item} item
|
||
*/
|
||
_integrate (y, item) {
|
||
super._integrate(y, item);
|
||
this.insert(0, /** @type {Array<any>} */ (this._prelimContent));
|
||
this._prelimContent = null;
|
||
}
|
||
|
||
_copy () {
|
||
return new YArray()
|
||
}
|
||
|
||
/**
|
||
* @return {YArray<T>}
|
||
*/
|
||
clone () {
|
||
const arr = new YArray();
|
||
arr.insert(0, this.toArray().map(el =>
|
||
el instanceof AbstractType ? el.clone() : el
|
||
));
|
||
return arr
|
||
}
|
||
|
||
get length () {
|
||
return this._prelimContent === null ? this._length : this._prelimContent.length
|
||
}
|
||
|
||
/**
|
||
* Creates YArrayEvent and calls observers.
|
||
*
|
||
* @param {Transaction} transaction
|
||
* @param {Set<null|string>} parentSubs Keys changed on this type. `null` if list was modified.
|
||
*/
|
||
_callObserver (transaction, parentSubs) {
|
||
super._callObserver(transaction, parentSubs);
|
||
callTypeObservers(this, transaction, new YArrayEvent(this, transaction));
|
||
}
|
||
|
||
/**
|
||
* Inserts new content at an index.
|
||
*
|
||
* Important: This function expects an array of content. Not just a content
|
||
* object. The reason for this "weirdness" is that inserting several elements
|
||
* is very efficient when it is done as a single operation.
|
||
*
|
||
* @example
|
||
* // Insert character 'a' at position 0
|
||
* yarray.insert(0, ['a'])
|
||
* // Insert numbers 1, 2 at position 1
|
||
* yarray.insert(1, [1, 2])
|
||
*
|
||
* @param {number} index The index to insert content at.
|
||
* @param {Array<T>} content The array of content
|
||
*/
|
||
insert (index, content) {
|
||
if (this.doc !== null) {
|
||
transact(this.doc, transaction => {
|
||
typeListInsertGenerics(transaction, this, index, content);
|
||
});
|
||
} else {
|
||
/** @type {Array<any>} */ (this._prelimContent).splice(index, 0, ...content);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Appends content to this YArray.
|
||
*
|
||
* @param {Array<T>} content Array of content to append.
|
||
*
|
||
* @todo Use the following implementation in all types.
|
||
*/
|
||
push (content) {
|
||
if (this.doc !== null) {
|
||
transact(this.doc, transaction => {
|
||
typeListPushGenerics(transaction, this, content);
|
||
});
|
||
} else {
|
||
/** @type {Array<any>} */ (this._prelimContent).push(...content);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Preppends content to this YArray.
|
||
*
|
||
* @param {Array<T>} content Array of content to preppend.
|
||
*/
|
||
unshift (content) {
|
||
this.insert(0, content);
|
||
}
|
||
|
||
/**
|
||
* Deletes elements starting from an index.
|
||
*
|
||
* @param {number} index Index at which to start deleting elements
|
||
* @param {number} length The number of elements to remove. Defaults to 1.
|
||
*/
|
||
delete (index, length = 1) {
|
||
if (this.doc !== null) {
|
||
transact(this.doc, transaction => {
|
||
typeListDelete(transaction, this, index, length);
|
||
});
|
||
} else {
|
||
/** @type {Array<any>} */ (this._prelimContent).splice(index, length);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Returns the i-th element from a YArray.
|
||
*
|
||
* @param {number} index The index of the element to return from the YArray
|
||
* @return {T}
|
||
*/
|
||
get (index) {
|
||
return typeListGet(this, index)
|
||
}
|
||
|
||
/**
|
||
* Transforms this YArray to a JavaScript Array.
|
||
*
|
||
* @return {Array<T>}
|
||
*/
|
||
toArray () {
|
||
return typeListToArray(this)
|
||
}
|
||
|
||
/**
|
||
* Transforms this YArray to a JavaScript Array.
|
||
*
|
||
* @param {number} [start]
|
||
* @param {number} [end]
|
||
* @return {Array<T>}
|
||
*/
|
||
slice (start = 0, end = this.length) {
|
||
return typeListSlice(this, start, end)
|
||
}
|
||
|
||
/**
|
||
* Transforms this Shared Type to a JSON object.
|
||
*
|
||
* @return {Array<any>}
|
||
*/
|
||
toJSON () {
|
||
return this.map(c => c instanceof AbstractType ? c.toJSON() : c)
|
||
}
|
||
|
||
/**
|
||
* Returns an Array with the result of calling a provided function on every
|
||
* element of this YArray.
|
||
*
|
||
* @template M
|
||
* @param {function(T,number,YArray<T>):M} f Function that produces an element of the new Array
|
||
* @return {Array<M>} A new array with each element being the result of the
|
||
* callback function
|
||
*/
|
||
map (f) {
|
||
return typeListMap(this, /** @type {any} */ (f))
|
||
}
|
||
|
||
/**
|
||
* Executes a provided function on once on overy element of this YArray.
|
||
*
|
||
* @param {function(T,number,YArray<T>):void} f A function to execute on every element of this YArray.
|
||
*/
|
||
forEach (f) {
|
||
typeListForEach(this, f);
|
||
}
|
||
|
||
/**
|
||
* @return {IterableIterator<T>}
|
||
*/
|
||
[Symbol.iterator] () {
|
||
return typeListCreateIterator(this)
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
*/
|
||
_write (encoder) {
|
||
encoder.writeTypeRef(YArrayRefID);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const readYArray = decoder => new YArray();
|
||
|
||
/**
|
||
* @template T
|
||
* @extends YEvent<YMap<T>>
|
||
* Event that describes the changes on a YMap.
|
||
*/
|
||
class YMapEvent extends YEvent {
|
||
/**
|
||
* @param {YMap<T>} ymap The YArray that changed.
|
||
* @param {Transaction} transaction
|
||
* @param {Set<any>} subs The keys that changed.
|
||
*/
|
||
constructor (ymap, transaction, subs) {
|
||
super(ymap, transaction);
|
||
this.keysChanged = subs;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @template MapType
|
||
* A shared Map implementation.
|
||
*
|
||
* @extends AbstractType<YMapEvent<MapType>>
|
||
* @implements {Iterable<MapType>}
|
||
*/
|
||
class YMap extends AbstractType {
|
||
/**
|
||
*
|
||
* @param {Iterable<readonly [string, any]>=} entries - an optional iterable to initialize the YMap
|
||
*/
|
||
constructor (entries) {
|
||
super();
|
||
/**
|
||
* @type {Map<string,any>?}
|
||
* @private
|
||
*/
|
||
this._prelimContent = null;
|
||
|
||
if (entries === undefined) {
|
||
this._prelimContent = new Map();
|
||
} else {
|
||
this._prelimContent = new Map(entries);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Integrate this type into the Yjs instance.
|
||
*
|
||
* * Save this struct in the os
|
||
* * This type is sent to other client
|
||
* * Observer functions are fired
|
||
*
|
||
* @param {Doc} y The Yjs instance
|
||
* @param {Item} item
|
||
*/
|
||
_integrate (y, item) {
|
||
super._integrate(y, item)
|
||
;/** @type {Map<string, any>} */ (this._prelimContent).forEach((value, key) => {
|
||
this.set(key, value);
|
||
});
|
||
this._prelimContent = null;
|
||
}
|
||
|
||
_copy () {
|
||
return new YMap()
|
||
}
|
||
|
||
/**
|
||
* @return {YMap<MapType>}
|
||
*/
|
||
clone () {
|
||
const map = new YMap();
|
||
this.forEach((value, key) => {
|
||
map.set(key, value instanceof AbstractType ? value.clone() : value);
|
||
});
|
||
return map
|
||
}
|
||
|
||
/**
|
||
* Creates YMapEvent and calls observers.
|
||
*
|
||
* @param {Transaction} transaction
|
||
* @param {Set<null|string>} parentSubs Keys changed on this type. `null` if list was modified.
|
||
*/
|
||
_callObserver (transaction, parentSubs) {
|
||
callTypeObservers(this, transaction, new YMapEvent(this, transaction, parentSubs));
|
||
}
|
||
|
||
/**
|
||
* Transforms this Shared Type to a JSON object.
|
||
*
|
||
* @return {Object<string,any>}
|
||
*/
|
||
toJSON () {
|
||
/**
|
||
* @type {Object<string,MapType>}
|
||
*/
|
||
const map = {};
|
||
this._map.forEach((item, key) => {
|
||
if (!item.deleted) {
|
||
const v = item.content.getContent()[item.length - 1];
|
||
map[key] = v instanceof AbstractType ? v.toJSON() : v;
|
||
}
|
||
});
|
||
return map
|
||
}
|
||
|
||
/**
|
||
* Returns the size of the YMap (count of key/value pairs)
|
||
*
|
||
* @return {number}
|
||
*/
|
||
get size () {
|
||
return [...createMapIterator(this._map)].length
|
||
}
|
||
|
||
/**
|
||
* Returns the keys for each element in the YMap Type.
|
||
*
|
||
* @return {IterableIterator<string>}
|
||
*/
|
||
keys () {
|
||
return iteratorMap(createMapIterator(this._map), /** @param {any} v */ v => v[0])
|
||
}
|
||
|
||
/**
|
||
* Returns the values for each element in the YMap Type.
|
||
*
|
||
* @return {IterableIterator<any>}
|
||
*/
|
||
values () {
|
||
return iteratorMap(createMapIterator(this._map), /** @param {any} v */ v => v[1].content.getContent()[v[1].length - 1])
|
||
}
|
||
|
||
/**
|
||
* Returns an Iterator of [key, value] pairs
|
||
*
|
||
* @return {IterableIterator<any>}
|
||
*/
|
||
entries () {
|
||
return iteratorMap(createMapIterator(this._map), /** @param {any} v */ v => [v[0], v[1].content.getContent()[v[1].length - 1]])
|
||
}
|
||
|
||
/**
|
||
* Executes a provided function on once on every key-value pair.
|
||
*
|
||
* @param {function(MapType,string,YMap<MapType>):void} f A function to execute on every element of this YArray.
|
||
*/
|
||
forEach (f) {
|
||
this._map.forEach((item, key) => {
|
||
if (!item.deleted) {
|
||
f(item.content.getContent()[item.length - 1], key, this);
|
||
}
|
||
});
|
||
}
|
||
|
||
/**
|
||
* Returns an Iterator of [key, value] pairs
|
||
*
|
||
* @return {IterableIterator<any>}
|
||
*/
|
||
[Symbol.iterator] () {
|
||
return this.entries()
|
||
}
|
||
|
||
/**
|
||
* Remove a specified element from this YMap.
|
||
*
|
||
* @param {string} key The key of the element to remove.
|
||
*/
|
||
delete (key) {
|
||
if (this.doc !== null) {
|
||
transact(this.doc, transaction => {
|
||
typeMapDelete(transaction, this, key);
|
||
});
|
||
} else {
|
||
/** @type {Map<string, any>} */ (this._prelimContent).delete(key);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Adds or updates an element with a specified key and value.
|
||
*
|
||
* @param {string} key The key of the element to add to this YMap
|
||
* @param {MapType} value The value of the element to add
|
||
*/
|
||
set (key, value) {
|
||
if (this.doc !== null) {
|
||
transact(this.doc, transaction => {
|
||
typeMapSet(transaction, this, key, value);
|
||
});
|
||
} else {
|
||
/** @type {Map<string, any>} */ (this._prelimContent).set(key, value);
|
||
}
|
||
return value
|
||
}
|
||
|
||
/**
|
||
* Returns a specified element from this YMap.
|
||
*
|
||
* @param {string} key
|
||
* @return {MapType|undefined}
|
||
*/
|
||
get (key) {
|
||
return /** @type {any} */ (typeMapGet(this, key))
|
||
}
|
||
|
||
/**
|
||
* Returns a boolean indicating whether the specified key exists or not.
|
||
*
|
||
* @param {string} key The key to test.
|
||
* @return {boolean}
|
||
*/
|
||
has (key) {
|
||
return typeMapHas(this, key)
|
||
}
|
||
|
||
/**
|
||
* Removes all elements from this YMap.
|
||
*/
|
||
clear () {
|
||
if (this.doc !== null) {
|
||
transact(this.doc, transaction => {
|
||
this.forEach(function (value, key, map) {
|
||
typeMapDelete(transaction, map, key);
|
||
});
|
||
});
|
||
} else {
|
||
/** @type {Map<string, any>} */ (this._prelimContent).clear();
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
*/
|
||
_write (encoder) {
|
||
encoder.writeTypeRef(YMapRefID);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const readYMap = decoder => new YMap();
|
||
|
||
/**
|
||
* @param {any} a
|
||
* @param {any} b
|
||
* @return {boolean}
|
||
*/
|
||
const equalAttrs = (a, b) => a === b || (typeof a === 'object' && typeof b === 'object' && a && b && equalFlat(a, b));
|
||
|
||
class ItemTextListPosition {
|
||
/**
|
||
* @param {Item|null} left
|
||
* @param {Item|null} right
|
||
* @param {number} index
|
||
* @param {Map<string,any>} currentAttributes
|
||
*/
|
||
constructor (left, right, index, currentAttributes) {
|
||
this.left = left;
|
||
this.right = right;
|
||
this.index = index;
|
||
this.currentAttributes = currentAttributes;
|
||
}
|
||
|
||
/**
|
||
* Only call this if you know that this.right is defined
|
||
*/
|
||
forward () {
|
||
if (this.right === null) {
|
||
unexpectedCase();
|
||
}
|
||
switch (this.right.content.constructor) {
|
||
case ContentFormat:
|
||
if (!this.right.deleted) {
|
||
updateCurrentAttributes(this.currentAttributes, /** @type {ContentFormat} */ (this.right.content));
|
||
}
|
||
break
|
||
default:
|
||
if (!this.right.deleted) {
|
||
this.index += this.right.length;
|
||
}
|
||
break
|
||
}
|
||
this.left = this.right;
|
||
this.right = this.right.right;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {ItemTextListPosition} pos
|
||
* @param {number} count steps to move forward
|
||
* @return {ItemTextListPosition}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const findNextPosition = (transaction, pos, count) => {
|
||
while (pos.right !== null && count > 0) {
|
||
switch (pos.right.content.constructor) {
|
||
case ContentFormat:
|
||
if (!pos.right.deleted) {
|
||
updateCurrentAttributes(pos.currentAttributes, /** @type {ContentFormat} */ (pos.right.content));
|
||
}
|
||
break
|
||
default:
|
||
if (!pos.right.deleted) {
|
||
if (count < pos.right.length) {
|
||
// split right
|
||
getItemCleanStart(transaction, createID(pos.right.id.client, pos.right.id.clock + count));
|
||
}
|
||
pos.index += pos.right.length;
|
||
count -= pos.right.length;
|
||
}
|
||
break
|
||
}
|
||
pos.left = pos.right;
|
||
pos.right = pos.right.right;
|
||
// pos.forward() - we don't forward because that would halve the performance because we already do the checks above
|
||
}
|
||
return pos
|
||
};
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {AbstractType<any>} parent
|
||
* @param {number} index
|
||
* @return {ItemTextListPosition}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const findPosition = (transaction, parent, index) => {
|
||
const currentAttributes = new Map();
|
||
const marker = findMarker(parent, index);
|
||
if (marker) {
|
||
const pos = new ItemTextListPosition(marker.p.left, marker.p, marker.index, currentAttributes);
|
||
return findNextPosition(transaction, pos, index - marker.index)
|
||
} else {
|
||
const pos = new ItemTextListPosition(null, parent._start, 0, currentAttributes);
|
||
return findNextPosition(transaction, pos, index)
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Negate applied formats
|
||
*
|
||
* @param {Transaction} transaction
|
||
* @param {AbstractType<any>} parent
|
||
* @param {ItemTextListPosition} currPos
|
||
* @param {Map<string,any>} negatedAttributes
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const insertNegatedAttributes = (transaction, parent, currPos, negatedAttributes) => {
|
||
// check if we really need to remove attributes
|
||
while (
|
||
currPos.right !== null && (
|
||
currPos.right.deleted === true || (
|
||
currPos.right.content.constructor === ContentFormat &&
|
||
equalAttrs(negatedAttributes.get(/** @type {ContentFormat} */ (currPos.right.content).key), /** @type {ContentFormat} */ (currPos.right.content).value)
|
||
)
|
||
)
|
||
) {
|
||
if (!currPos.right.deleted) {
|
||
negatedAttributes.delete(/** @type {ContentFormat} */ (currPos.right.content).key);
|
||
}
|
||
currPos.forward();
|
||
}
|
||
const doc = transaction.doc;
|
||
const ownClientId = doc.clientID;
|
||
negatedAttributes.forEach((val, key) => {
|
||
const left = currPos.left;
|
||
const right = currPos.right;
|
||
const nextFormat = new Item$1(createID(ownClientId, getState(doc.store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentFormat(key, val));
|
||
nextFormat.integrate(transaction, 0);
|
||
currPos.right = nextFormat;
|
||
currPos.forward();
|
||
});
|
||
};
|
||
|
||
/**
|
||
* @param {Map<string,any>} currentAttributes
|
||
* @param {ContentFormat} format
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const updateCurrentAttributes = (currentAttributes, format) => {
|
||
const { key, value } = format;
|
||
if (value === null) {
|
||
currentAttributes.delete(key);
|
||
} else {
|
||
currentAttributes.set(key, value);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @param {ItemTextListPosition} currPos
|
||
* @param {Object<string,any>} attributes
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const minimizeAttributeChanges = (currPos, attributes) => {
|
||
// go right while attributes[right.key] === right.value (or right is deleted)
|
||
while (true) {
|
||
if (currPos.right === null) {
|
||
break
|
||
} else if (currPos.right.deleted || (currPos.right.content.constructor === ContentFormat && equalAttrs(attributes[(/** @type {ContentFormat} */ (currPos.right.content)).key] || null, /** @type {ContentFormat} */ (currPos.right.content).value))) ; else {
|
||
break
|
||
}
|
||
currPos.forward();
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {AbstractType<any>} parent
|
||
* @param {ItemTextListPosition} currPos
|
||
* @param {Object<string,any>} attributes
|
||
* @return {Map<string,any>}
|
||
*
|
||
* @private
|
||
* @function
|
||
**/
|
||
const insertAttributes = (transaction, parent, currPos, attributes) => {
|
||
const doc = transaction.doc;
|
||
const ownClientId = doc.clientID;
|
||
const negatedAttributes = new Map();
|
||
// insert format-start items
|
||
for (const key in attributes) {
|
||
const val = attributes[key];
|
||
const currentVal = currPos.currentAttributes.get(key) || null;
|
||
if (!equalAttrs(currentVal, val)) {
|
||
// save negated attribute (set null if currentVal undefined)
|
||
negatedAttributes.set(key, currentVal);
|
||
const { left, right } = currPos;
|
||
currPos.right = new Item$1(createID(ownClientId, getState(doc.store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentFormat(key, val));
|
||
currPos.right.integrate(transaction, 0);
|
||
currPos.forward();
|
||
}
|
||
}
|
||
return negatedAttributes
|
||
};
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {AbstractType<any>} parent
|
||
* @param {ItemTextListPosition} currPos
|
||
* @param {string|object|AbstractType<any>} text
|
||
* @param {Object<string,any>} attributes
|
||
*
|
||
* @private
|
||
* @function
|
||
**/
|
||
const insertText = (transaction, parent, currPos, text, attributes) => {
|
||
currPos.currentAttributes.forEach((val, key) => {
|
||
if (attributes[key] === undefined) {
|
||
attributes[key] = null;
|
||
}
|
||
});
|
||
const doc = transaction.doc;
|
||
const ownClientId = doc.clientID;
|
||
minimizeAttributeChanges(currPos, attributes);
|
||
const negatedAttributes = insertAttributes(transaction, parent, currPos, attributes);
|
||
// insert content
|
||
const content = text.constructor === String ? new ContentString(/** @type {string} */ (text)) : (text instanceof AbstractType ? new ContentType(text) : new ContentEmbed(text));
|
||
let { left, right, index } = currPos;
|
||
if (parent._searchMarker) {
|
||
updateMarkerChanges(parent._searchMarker, currPos.index, content.getLength());
|
||
}
|
||
right = new Item$1(createID(ownClientId, getState(doc.store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, content);
|
||
right.integrate(transaction, 0);
|
||
currPos.right = right;
|
||
currPos.index = index;
|
||
currPos.forward();
|
||
insertNegatedAttributes(transaction, parent, currPos, negatedAttributes);
|
||
};
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {AbstractType<any>} parent
|
||
* @param {ItemTextListPosition} currPos
|
||
* @param {number} length
|
||
* @param {Object<string,any>} attributes
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const formatText = (transaction, parent, currPos, length, attributes) => {
|
||
const doc = transaction.doc;
|
||
const ownClientId = doc.clientID;
|
||
minimizeAttributeChanges(currPos, attributes);
|
||
const negatedAttributes = insertAttributes(transaction, parent, currPos, attributes);
|
||
// iterate until first non-format or null is found
|
||
// delete all formats with attributes[format.key] != null
|
||
// also check the attributes after the first non-format as we do not want to insert redundant negated attributes there
|
||
// eslint-disable-next-line no-labels
|
||
iterationLoop: while (
|
||
currPos.right !== null &&
|
||
(length > 0 ||
|
||
(
|
||
negatedAttributes.size > 0 &&
|
||
(currPos.right.deleted || currPos.right.content.constructor === ContentFormat)
|
||
)
|
||
)
|
||
) {
|
||
if (!currPos.right.deleted) {
|
||
switch (currPos.right.content.constructor) {
|
||
case ContentFormat: {
|
||
const { key, value } = /** @type {ContentFormat} */ (currPos.right.content);
|
||
const attr = attributes[key];
|
||
if (attr !== undefined) {
|
||
if (equalAttrs(attr, value)) {
|
||
negatedAttributes.delete(key);
|
||
} else {
|
||
if (length === 0) {
|
||
// no need to further extend negatedAttributes
|
||
// eslint-disable-next-line no-labels
|
||
break iterationLoop
|
||
}
|
||
negatedAttributes.set(key, value);
|
||
}
|
||
currPos.right.delete(transaction);
|
||
} else {
|
||
currPos.currentAttributes.set(key, value);
|
||
}
|
||
break
|
||
}
|
||
default:
|
||
if (length < currPos.right.length) {
|
||
getItemCleanStart(transaction, createID(currPos.right.id.client, currPos.right.id.clock + length));
|
||
}
|
||
length -= currPos.right.length;
|
||
break
|
||
}
|
||
}
|
||
currPos.forward();
|
||
}
|
||
// Quill just assumes that the editor starts with a newline and that it always
|
||
// ends with a newline. We only insert that newline when a new newline is
|
||
// inserted - i.e when length is bigger than type.length
|
||
if (length > 0) {
|
||
let newlines = '';
|
||
for (; length > 0; length--) {
|
||
newlines += '\n';
|
||
}
|
||
currPos.right = new Item$1(createID(ownClientId, getState(doc.store, ownClientId)), currPos.left, currPos.left && currPos.left.lastId, currPos.right, currPos.right && currPos.right.id, parent, null, new ContentString(newlines));
|
||
currPos.right.integrate(transaction, 0);
|
||
currPos.forward();
|
||
}
|
||
insertNegatedAttributes(transaction, parent, currPos, negatedAttributes);
|
||
};
|
||
|
||
/**
|
||
* Call this function after string content has been deleted in order to
|
||
* clean up formatting Items.
|
||
*
|
||
* @param {Transaction} transaction
|
||
* @param {Item} start
|
||
* @param {Item|null} curr exclusive end, automatically iterates to the next Content Item
|
||
* @param {Map<string,any>} startAttributes
|
||
* @param {Map<string,any>} currAttributes
|
||
* @return {number} The amount of formatting Items deleted.
|
||
*
|
||
* @function
|
||
*/
|
||
const cleanupFormattingGap = (transaction, start, curr, startAttributes, currAttributes) => {
|
||
let end = curr;
|
||
const endAttributes = copy(currAttributes);
|
||
while (end && (!end.countable || end.deleted)) {
|
||
if (!end.deleted && end.content.constructor === ContentFormat) {
|
||
updateCurrentAttributes(endAttributes, /** @type {ContentFormat} */ (end.content));
|
||
}
|
||
end = end.right;
|
||
}
|
||
let cleanups = 0;
|
||
let reachedEndOfCurr = false;
|
||
while (start !== end) {
|
||
if (curr === start) {
|
||
reachedEndOfCurr = true;
|
||
}
|
||
if (!start.deleted) {
|
||
const content = start.content;
|
||
switch (content.constructor) {
|
||
case ContentFormat: {
|
||
const { key, value } = /** @type {ContentFormat} */ (content);
|
||
if ((endAttributes.get(key) || null) !== value || (startAttributes.get(key) || null) === value) {
|
||
// Either this format is overwritten or it is not necessary because the attribute already existed.
|
||
start.delete(transaction);
|
||
cleanups++;
|
||
if (!reachedEndOfCurr && (currAttributes.get(key) || null) === value && (startAttributes.get(key) || null) !== value) {
|
||
currAttributes.delete(key);
|
||
}
|
||
}
|
||
break
|
||
}
|
||
}
|
||
}
|
||
start = /** @type {Item} */ (start.right);
|
||
}
|
||
return cleanups
|
||
};
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {Item | null} item
|
||
*/
|
||
const cleanupContextlessFormattingGap = (transaction, item) => {
|
||
// iterate until item.right is null or content
|
||
while (item && item.right && (item.right.deleted || !item.right.countable)) {
|
||
item = item.right;
|
||
}
|
||
const attrs = new Set();
|
||
// iterate back until a content item is found
|
||
while (item && (item.deleted || !item.countable)) {
|
||
if (!item.deleted && item.content.constructor === ContentFormat) {
|
||
const key = /** @type {ContentFormat} */ (item.content).key;
|
||
if (attrs.has(key)) {
|
||
item.delete(transaction);
|
||
} else {
|
||
attrs.add(key);
|
||
}
|
||
}
|
||
item = item.left;
|
||
}
|
||
};
|
||
|
||
/**
|
||
* This function is experimental and subject to change / be removed.
|
||
*
|
||
* Ideally, we don't need this function at all. Formatting attributes should be cleaned up
|
||
* automatically after each change. This function iterates twice over the complete YText type
|
||
* and removes unnecessary formatting attributes. This is also helpful for testing.
|
||
*
|
||
* This function won't be exported anymore as soon as there is confidence that the YText type works as intended.
|
||
*
|
||
* @param {YText} type
|
||
* @return {number} How many formatting attributes have been cleaned up.
|
||
*/
|
||
const cleanupYTextFormatting = type => {
|
||
let res = 0;
|
||
transact(/** @type {Doc} */ (type.doc), transaction => {
|
||
let start = /** @type {Item} */ (type._start);
|
||
let end = type._start;
|
||
let startAttributes = create$6();
|
||
const currentAttributes = copy(startAttributes);
|
||
while (end) {
|
||
if (end.deleted === false) {
|
||
switch (end.content.constructor) {
|
||
case ContentFormat:
|
||
updateCurrentAttributes(currentAttributes, /** @type {ContentFormat} */ (end.content));
|
||
break
|
||
default:
|
||
res += cleanupFormattingGap(transaction, start, end, startAttributes, currentAttributes);
|
||
startAttributes = copy(currentAttributes);
|
||
start = end;
|
||
break
|
||
}
|
||
}
|
||
end = end.right;
|
||
}
|
||
});
|
||
return res
|
||
};
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {ItemTextListPosition} currPos
|
||
* @param {number} length
|
||
* @return {ItemTextListPosition}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const deleteText = (transaction, currPos, length) => {
|
||
const startLength = length;
|
||
const startAttrs = copy(currPos.currentAttributes);
|
||
const start = currPos.right;
|
||
while (length > 0 && currPos.right !== null) {
|
||
if (currPos.right.deleted === false) {
|
||
switch (currPos.right.content.constructor) {
|
||
case ContentType:
|
||
case ContentEmbed:
|
||
case ContentString:
|
||
if (length < currPos.right.length) {
|
||
getItemCleanStart(transaction, createID(currPos.right.id.client, currPos.right.id.clock + length));
|
||
}
|
||
length -= currPos.right.length;
|
||
currPos.right.delete(transaction);
|
||
break
|
||
}
|
||
}
|
||
currPos.forward();
|
||
}
|
||
if (start) {
|
||
cleanupFormattingGap(transaction, start, currPos.right, startAttrs, currPos.currentAttributes);
|
||
}
|
||
const parent = /** @type {AbstractType<any>} */ (/** @type {Item} */ (currPos.left || currPos.right).parent);
|
||
if (parent._searchMarker) {
|
||
updateMarkerChanges(parent._searchMarker, currPos.index, -startLength + length);
|
||
}
|
||
return currPos
|
||
};
|
||
|
||
/**
|
||
* The Quill Delta format represents changes on a text document with
|
||
* formatting information. For mor information visit {@link https://quilljs.com/docs/delta/|Quill Delta}
|
||
*
|
||
* @example
|
||
* {
|
||
* ops: [
|
||
* { insert: 'Gandalf', attributes: { bold: true } },
|
||
* { insert: ' the ' },
|
||
* { insert: 'Grey', attributes: { color: '#cccccc' } }
|
||
* ]
|
||
* }
|
||
*
|
||
*/
|
||
|
||
/**
|
||
* Attributes that can be assigned to a selection of text.
|
||
*
|
||
* @example
|
||
* {
|
||
* bold: true,
|
||
* font-size: '40px'
|
||
* }
|
||
*
|
||
* @typedef {Object} TextAttributes
|
||
*/
|
||
|
||
/**
|
||
* @extends YEvent<YText>
|
||
* Event that describes the changes on a YText type.
|
||
*/
|
||
class YTextEvent extends YEvent {
|
||
/**
|
||
* @param {YText} ytext
|
||
* @param {Transaction} transaction
|
||
* @param {Set<any>} subs The keys that changed
|
||
*/
|
||
constructor (ytext, transaction, subs) {
|
||
super(ytext, transaction);
|
||
/**
|
||
* Whether the children changed.
|
||
* @type {Boolean}
|
||
* @private
|
||
*/
|
||
this.childListChanged = false;
|
||
/**
|
||
* Set of all changed attributes.
|
||
* @type {Set<string>}
|
||
*/
|
||
this.keysChanged = new Set();
|
||
subs.forEach((sub) => {
|
||
if (sub === null) {
|
||
this.childListChanged = true;
|
||
} else {
|
||
this.keysChanged.add(sub);
|
||
}
|
||
});
|
||
}
|
||
|
||
/**
|
||
* @type {{added:Set<Item>,deleted:Set<Item>,keys:Map<string,{action:'add'|'update'|'delete',oldValue:any}>,delta:Array<{insert?:Array<any>|string, delete?:number, retain?:number}>}}
|
||
*/
|
||
get changes () {
|
||
if (this._changes === null) {
|
||
/**
|
||
* @type {{added:Set<Item>,deleted:Set<Item>,keys:Map<string,{action:'add'|'update'|'delete',oldValue:any}>,delta:Array<{insert?:Array<any>|string|AbstractType<any>|object, delete?:number, retain?:number}>}}
|
||
*/
|
||
const changes = {
|
||
keys: this.keys,
|
||
delta: this.delta,
|
||
added: new Set(),
|
||
deleted: new Set()
|
||
};
|
||
this._changes = changes;
|
||
}
|
||
return /** @type {any} */ (this._changes)
|
||
}
|
||
|
||
/**
|
||
* Compute the changes in the delta format.
|
||
* A {@link https://quilljs.com/docs/delta/|Quill Delta}) that represents the changes on the document.
|
||
*
|
||
* @type {Array<{insert?:string|object|AbstractType<any>, delete?:number, retain?:number, attributes?: Object<string,any>}>}
|
||
*
|
||
* @public
|
||
*/
|
||
get delta () {
|
||
if (this._delta === null) {
|
||
const y = /** @type {Doc} */ (this.target.doc);
|
||
/**
|
||
* @type {Array<{insert?:string|object|AbstractType<any>, delete?:number, retain?:number, attributes?: Object<string,any>}>}
|
||
*/
|
||
const delta = [];
|
||
transact(y, transaction => {
|
||
const currentAttributes = new Map(); // saves all current attributes for insert
|
||
const oldAttributes = new Map();
|
||
let item = this.target._start;
|
||
/**
|
||
* @type {string?}
|
||
*/
|
||
let action = null;
|
||
/**
|
||
* @type {Object<string,any>}
|
||
*/
|
||
const attributes = {}; // counts added or removed new attributes for retain
|
||
/**
|
||
* @type {string|object}
|
||
*/
|
||
let insert = '';
|
||
let retain = 0;
|
||
let deleteLen = 0;
|
||
const addOp = () => {
|
||
if (action !== null) {
|
||
/**
|
||
* @type {any}
|
||
*/
|
||
let op;
|
||
switch (action) {
|
||
case 'delete':
|
||
op = { delete: deleteLen };
|
||
deleteLen = 0;
|
||
break
|
||
case 'insert':
|
||
op = { insert };
|
||
if (currentAttributes.size > 0) {
|
||
op.attributes = {};
|
||
currentAttributes.forEach((value, key) => {
|
||
if (value !== null) {
|
||
op.attributes[key] = value;
|
||
}
|
||
});
|
||
}
|
||
insert = '';
|
||
break
|
||
case 'retain':
|
||
op = { retain };
|
||
if (Object.keys(attributes).length > 0) {
|
||
op.attributes = {};
|
||
for (const key in attributes) {
|
||
op.attributes[key] = attributes[key];
|
||
}
|
||
}
|
||
retain = 0;
|
||
break
|
||
}
|
||
delta.push(op);
|
||
action = null;
|
||
}
|
||
};
|
||
while (item !== null) {
|
||
switch (item.content.constructor) {
|
||
case ContentType:
|
||
case ContentEmbed:
|
||
if (this.adds(item)) {
|
||
if (!this.deletes(item)) {
|
||
addOp();
|
||
action = 'insert';
|
||
insert = item.content.getContent()[0];
|
||
addOp();
|
||
}
|
||
} else if (this.deletes(item)) {
|
||
if (action !== 'delete') {
|
||
addOp();
|
||
action = 'delete';
|
||
}
|
||
deleteLen += 1;
|
||
} else if (!item.deleted) {
|
||
if (action !== 'retain') {
|
||
addOp();
|
||
action = 'retain';
|
||
}
|
||
retain += 1;
|
||
}
|
||
break
|
||
case ContentString:
|
||
if (this.adds(item)) {
|
||
if (!this.deletes(item)) {
|
||
if (action !== 'insert') {
|
||
addOp();
|
||
action = 'insert';
|
||
}
|
||
insert += /** @type {ContentString} */ (item.content).str;
|
||
}
|
||
} else if (this.deletes(item)) {
|
||
if (action !== 'delete') {
|
||
addOp();
|
||
action = 'delete';
|
||
}
|
||
deleteLen += item.length;
|
||
} else if (!item.deleted) {
|
||
if (action !== 'retain') {
|
||
addOp();
|
||
action = 'retain';
|
||
}
|
||
retain += item.length;
|
||
}
|
||
break
|
||
case ContentFormat: {
|
||
const { key, value } = /** @type {ContentFormat} */ (item.content);
|
||
if (this.adds(item)) {
|
||
if (!this.deletes(item)) {
|
||
const curVal = currentAttributes.get(key) || null;
|
||
if (!equalAttrs(curVal, value)) {
|
||
if (action === 'retain') {
|
||
addOp();
|
||
}
|
||
if (equalAttrs(value, (oldAttributes.get(key) || null))) {
|
||
delete attributes[key];
|
||
} else {
|
||
attributes[key] = value;
|
||
}
|
||
} else if (value !== null) {
|
||
item.delete(transaction);
|
||
}
|
||
}
|
||
} else if (this.deletes(item)) {
|
||
oldAttributes.set(key, value);
|
||
const curVal = currentAttributes.get(key) || null;
|
||
if (!equalAttrs(curVal, value)) {
|
||
if (action === 'retain') {
|
||
addOp();
|
||
}
|
||
attributes[key] = curVal;
|
||
}
|
||
} else if (!item.deleted) {
|
||
oldAttributes.set(key, value);
|
||
const attr = attributes[key];
|
||
if (attr !== undefined) {
|
||
if (!equalAttrs(attr, value)) {
|
||
if (action === 'retain') {
|
||
addOp();
|
||
}
|
||
if (value === null) {
|
||
delete attributes[key];
|
||
} else {
|
||
attributes[key] = value;
|
||
}
|
||
} else if (attr !== null) { // this will be cleaned up automatically by the contextless cleanup function
|
||
item.delete(transaction);
|
||
}
|
||
}
|
||
}
|
||
if (!item.deleted) {
|
||
if (action === 'insert') {
|
||
addOp();
|
||
}
|
||
updateCurrentAttributes(currentAttributes, /** @type {ContentFormat} */ (item.content));
|
||
}
|
||
break
|
||
}
|
||
}
|
||
item = item.right;
|
||
}
|
||
addOp();
|
||
while (delta.length > 0) {
|
||
const lastOp = delta[delta.length - 1];
|
||
if (lastOp.retain !== undefined && lastOp.attributes === undefined) {
|
||
// retain delta's if they don't assign attributes
|
||
delta.pop();
|
||
} else {
|
||
break
|
||
}
|
||
}
|
||
});
|
||
this._delta = delta;
|
||
}
|
||
return /** @type {any} */ (this._delta)
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Type that represents text with formatting information.
|
||
*
|
||
* This type replaces y-richtext as this implementation is able to handle
|
||
* block formats (format information on a paragraph), embeds (complex elements
|
||
* like pictures and videos), and text formats (**bold**, *italic*).
|
||
*
|
||
* @extends AbstractType<YTextEvent>
|
||
*/
|
||
class YText extends AbstractType {
|
||
/**
|
||
* @param {String} [string] The initial value of the YText.
|
||
*/
|
||
constructor (string) {
|
||
super();
|
||
/**
|
||
* Array of pending operations on this type
|
||
* @type {Array<function():void>?}
|
||
*/
|
||
this._pending = string !== undefined ? [() => this.insert(0, string)] : [];
|
||
/**
|
||
* @type {Array<ArraySearchMarker>}
|
||
*/
|
||
this._searchMarker = [];
|
||
}
|
||
|
||
/**
|
||
* Number of characters of this text type.
|
||
*
|
||
* @type {number}
|
||
*/
|
||
get length () {
|
||
return this._length
|
||
}
|
||
|
||
/**
|
||
* @param {Doc} y
|
||
* @param {Item} item
|
||
*/
|
||
_integrate (y, item) {
|
||
super._integrate(y, item);
|
||
try {
|
||
/** @type {Array<function>} */ (this._pending).forEach(f => f());
|
||
} catch (e) {
|
||
console.error(e);
|
||
}
|
||
this._pending = null;
|
||
}
|
||
|
||
_copy () {
|
||
return new YText()
|
||
}
|
||
|
||
/**
|
||
* @return {YText}
|
||
*/
|
||
clone () {
|
||
const text = new YText();
|
||
text.applyDelta(this.toDelta());
|
||
return text
|
||
}
|
||
|
||
/**
|
||
* Creates YTextEvent and calls observers.
|
||
*
|
||
* @param {Transaction} transaction
|
||
* @param {Set<null|string>} parentSubs Keys changed on this type. `null` if list was modified.
|
||
*/
|
||
_callObserver (transaction, parentSubs) {
|
||
super._callObserver(transaction, parentSubs);
|
||
const event = new YTextEvent(this, transaction, parentSubs);
|
||
const doc = transaction.doc;
|
||
callTypeObservers(this, transaction, event);
|
||
// If a remote change happened, we try to cleanup potential formatting duplicates.
|
||
if (!transaction.local) {
|
||
// check if another formatting item was inserted
|
||
let foundFormattingItem = false;
|
||
for (const [client, afterClock] of transaction.afterState.entries()) {
|
||
const clock = transaction.beforeState.get(client) || 0;
|
||
if (afterClock === clock) {
|
||
continue
|
||
}
|
||
iterateStructs(transaction, /** @type {Array<Item|GC>} */ (doc.store.clients.get(client)), clock, afterClock, item => {
|
||
if (!item.deleted && /** @type {Item} */ (item).content.constructor === ContentFormat) {
|
||
foundFormattingItem = true;
|
||
}
|
||
});
|
||
if (foundFormattingItem) {
|
||
break
|
||
}
|
||
}
|
||
if (!foundFormattingItem) {
|
||
iterateDeletedStructs(transaction, transaction.deleteSet, item => {
|
||
if (item instanceof GC || foundFormattingItem) {
|
||
return
|
||
}
|
||
if (item.parent === this && item.content.constructor === ContentFormat) {
|
||
foundFormattingItem = true;
|
||
}
|
||
});
|
||
}
|
||
transact(doc, (t) => {
|
||
if (foundFormattingItem) {
|
||
// If a formatting item was inserted, we simply clean the whole type.
|
||
// We need to compute currentAttributes for the current position anyway.
|
||
cleanupYTextFormatting(this);
|
||
} else {
|
||
// If no formatting attribute was inserted, we can make due with contextless
|
||
// formatting cleanups.
|
||
// Contextless: it is not necessary to compute currentAttributes for the affected position.
|
||
iterateDeletedStructs(t, t.deleteSet, item => {
|
||
if (item instanceof GC) {
|
||
return
|
||
}
|
||
if (item.parent === this) {
|
||
cleanupContextlessFormattingGap(t, item);
|
||
}
|
||
});
|
||
}
|
||
});
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Returns the unformatted string representation of this YText type.
|
||
*
|
||
* @public
|
||
*/
|
||
toString () {
|
||
let str = '';
|
||
/**
|
||
* @type {Item|null}
|
||
*/
|
||
let n = this._start;
|
||
while (n !== null) {
|
||
if (!n.deleted && n.countable && n.content.constructor === ContentString) {
|
||
str += /** @type {ContentString} */ (n.content).str;
|
||
}
|
||
n = n.right;
|
||
}
|
||
return str
|
||
}
|
||
|
||
/**
|
||
* Returns the unformatted string representation of this YText type.
|
||
*
|
||
* @return {string}
|
||
* @public
|
||
*/
|
||
toJSON () {
|
||
return this.toString()
|
||
}
|
||
|
||
/**
|
||
* Apply a {@link Delta} on this shared YText type.
|
||
*
|
||
* @param {any} delta The changes to apply on this element.
|
||
* @param {object} [opts]
|
||
* @param {boolean} [opts.sanitize] Sanitize input delta. Removes ending newlines if set to true.
|
||
*
|
||
*
|
||
* @public
|
||
*/
|
||
applyDelta (delta, { sanitize = true } = {}) {
|
||
if (this.doc !== null) {
|
||
transact(this.doc, transaction => {
|
||
const currPos = new ItemTextListPosition(null, this._start, 0, new Map());
|
||
for (let i = 0; i < delta.length; i++) {
|
||
const op = delta[i];
|
||
if (op.insert !== undefined) {
|
||
// Quill assumes that the content starts with an empty paragraph.
|
||
// Yjs/Y.Text assumes that it starts empty. We always hide that
|
||
// there is a newline at the end of the content.
|
||
// If we omit this step, clients will see a different number of
|
||
// paragraphs, but nothing bad will happen.
|
||
const ins = (!sanitize && typeof op.insert === 'string' && i === delta.length - 1 && currPos.right === null && op.insert.slice(-1) === '\n') ? op.insert.slice(0, -1) : op.insert;
|
||
if (typeof ins !== 'string' || ins.length > 0) {
|
||
insertText(transaction, this, currPos, ins, op.attributes || {});
|
||
}
|
||
} else if (op.retain !== undefined) {
|
||
formatText(transaction, this, currPos, op.retain, op.attributes || {});
|
||
} else if (op.delete !== undefined) {
|
||
deleteText(transaction, currPos, op.delete);
|
||
}
|
||
}
|
||
});
|
||
} else {
|
||
/** @type {Array<function>} */ (this._pending).push(() => this.applyDelta(delta));
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Returns the Delta representation of this YText type.
|
||
*
|
||
* @param {Snapshot} [snapshot]
|
||
* @param {Snapshot} [prevSnapshot]
|
||
* @param {function('removed' | 'added', ID):any} [computeYChange]
|
||
* @return {any} The Delta representation of this type.
|
||
*
|
||
* @public
|
||
*/
|
||
toDelta (snapshot, prevSnapshot, computeYChange) {
|
||
/**
|
||
* @type{Array<any>}
|
||
*/
|
||
const ops = [];
|
||
const currentAttributes = new Map();
|
||
const doc = /** @type {Doc} */ (this.doc);
|
||
let str = '';
|
||
let n = this._start;
|
||
function packStr () {
|
||
if (str.length > 0) {
|
||
// pack str with attributes to ops
|
||
/**
|
||
* @type {Object<string,any>}
|
||
*/
|
||
const attributes = {};
|
||
let addAttributes = false;
|
||
currentAttributes.forEach((value, key) => {
|
||
addAttributes = true;
|
||
attributes[key] = value;
|
||
});
|
||
/**
|
||
* @type {Object<string,any>}
|
||
*/
|
||
const op = { insert: str };
|
||
if (addAttributes) {
|
||
op.attributes = attributes;
|
||
}
|
||
ops.push(op);
|
||
str = '';
|
||
}
|
||
}
|
||
// snapshots are merged again after the transaction, so we need to keep the
|
||
// transalive until we are done
|
||
transact(doc, transaction => {
|
||
if (snapshot) {
|
||
splitSnapshotAffectedStructs(transaction, snapshot);
|
||
}
|
||
if (prevSnapshot) {
|
||
splitSnapshotAffectedStructs(transaction, prevSnapshot);
|
||
}
|
||
while (n !== null) {
|
||
if (isVisible(n, snapshot) || (prevSnapshot !== undefined && isVisible(n, prevSnapshot))) {
|
||
switch (n.content.constructor) {
|
||
case ContentString: {
|
||
const cur = currentAttributes.get('ychange');
|
||
if (snapshot !== undefined && !isVisible(n, snapshot)) {
|
||
if (cur === undefined || cur.user !== n.id.client || cur.type !== 'removed') {
|
||
packStr();
|
||
currentAttributes.set('ychange', computeYChange ? computeYChange('removed', n.id) : { type: 'removed' });
|
||
}
|
||
} else if (prevSnapshot !== undefined && !isVisible(n, prevSnapshot)) {
|
||
if (cur === undefined || cur.user !== n.id.client || cur.type !== 'added') {
|
||
packStr();
|
||
currentAttributes.set('ychange', computeYChange ? computeYChange('added', n.id) : { type: 'added' });
|
||
}
|
||
} else if (cur !== undefined) {
|
||
packStr();
|
||
currentAttributes.delete('ychange');
|
||
}
|
||
str += /** @type {ContentString} */ (n.content).str;
|
||
break
|
||
}
|
||
case ContentType:
|
||
case ContentEmbed: {
|
||
packStr();
|
||
/**
|
||
* @type {Object<string,any>}
|
||
*/
|
||
const op = {
|
||
insert: n.content.getContent()[0]
|
||
};
|
||
if (currentAttributes.size > 0) {
|
||
const attrs = /** @type {Object<string,any>} */ ({});
|
||
op.attributes = attrs;
|
||
currentAttributes.forEach((value, key) => {
|
||
attrs[key] = value;
|
||
});
|
||
}
|
||
ops.push(op);
|
||
break
|
||
}
|
||
case ContentFormat:
|
||
if (isVisible(n, snapshot)) {
|
||
packStr();
|
||
updateCurrentAttributes(currentAttributes, /** @type {ContentFormat} */ (n.content));
|
||
}
|
||
break
|
||
}
|
||
}
|
||
n = n.right;
|
||
}
|
||
packStr();
|
||
}, 'cleanup');
|
||
return ops
|
||
}
|
||
|
||
/**
|
||
* Insert text at a given index.
|
||
*
|
||
* @param {number} index The index at which to start inserting.
|
||
* @param {String} text The text to insert at the specified position.
|
||
* @param {TextAttributes} [attributes] Optionally define some formatting
|
||
* information to apply on the inserted
|
||
* Text.
|
||
* @public
|
||
*/
|
||
insert (index, text, attributes) {
|
||
if (text.length <= 0) {
|
||
return
|
||
}
|
||
const y = this.doc;
|
||
if (y !== null) {
|
||
transact(y, transaction => {
|
||
const pos = findPosition(transaction, this, index);
|
||
if (!attributes) {
|
||
attributes = {};
|
||
// @ts-ignore
|
||
pos.currentAttributes.forEach((v, k) => { attributes[k] = v; });
|
||
}
|
||
insertText(transaction, this, pos, text, attributes);
|
||
});
|
||
} else {
|
||
/** @type {Array<function>} */ (this._pending).push(() => this.insert(index, text, attributes));
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Inserts an embed at a index.
|
||
*
|
||
* @param {number} index The index to insert the embed at.
|
||
* @param {Object | AbstractType<any>} embed The Object that represents the embed.
|
||
* @param {TextAttributes} attributes Attribute information to apply on the
|
||
* embed
|
||
*
|
||
* @public
|
||
*/
|
||
insertEmbed (index, embed, attributes = {}) {
|
||
const y = this.doc;
|
||
if (y !== null) {
|
||
transact(y, transaction => {
|
||
const pos = findPosition(transaction, this, index);
|
||
insertText(transaction, this, pos, embed, attributes);
|
||
});
|
||
} else {
|
||
/** @type {Array<function>} */ (this._pending).push(() => this.insertEmbed(index, embed, attributes));
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Deletes text starting from an index.
|
||
*
|
||
* @param {number} index Index at which to start deleting.
|
||
* @param {number} length The number of characters to remove. Defaults to 1.
|
||
*
|
||
* @public
|
||
*/
|
||
delete (index, length) {
|
||
if (length === 0) {
|
||
return
|
||
}
|
||
const y = this.doc;
|
||
if (y !== null) {
|
||
transact(y, transaction => {
|
||
deleteText(transaction, findPosition(transaction, this, index), length);
|
||
});
|
||
} else {
|
||
/** @type {Array<function>} */ (this._pending).push(() => this.delete(index, length));
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Assigns properties to a range of text.
|
||
*
|
||
* @param {number} index The position where to start formatting.
|
||
* @param {number} length The amount of characters to assign properties to.
|
||
* @param {TextAttributes} attributes Attribute information to apply on the
|
||
* text.
|
||
*
|
||
* @public
|
||
*/
|
||
format (index, length, attributes) {
|
||
if (length === 0) {
|
||
return
|
||
}
|
||
const y = this.doc;
|
||
if (y !== null) {
|
||
transact(y, transaction => {
|
||
const pos = findPosition(transaction, this, index);
|
||
if (pos.right === null) {
|
||
return
|
||
}
|
||
formatText(transaction, this, pos, length, attributes);
|
||
});
|
||
} else {
|
||
/** @type {Array<function>} */ (this._pending).push(() => this.format(index, length, attributes));
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Removes an attribute.
|
||
*
|
||
* @note Xml-Text nodes don't have attributes. You can use this feature to assign properties to complete text-blocks.
|
||
*
|
||
* @param {String} attributeName The attribute name that is to be removed.
|
||
*
|
||
* @public
|
||
*/
|
||
removeAttribute (attributeName) {
|
||
if (this.doc !== null) {
|
||
transact(this.doc, transaction => {
|
||
typeMapDelete(transaction, this, attributeName);
|
||
});
|
||
} else {
|
||
/** @type {Array<function>} */ (this._pending).push(() => this.removeAttribute(attributeName));
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Sets or updates an attribute.
|
||
*
|
||
* @note Xml-Text nodes don't have attributes. You can use this feature to assign properties to complete text-blocks.
|
||
*
|
||
* @param {String} attributeName The attribute name that is to be set.
|
||
* @param {any} attributeValue The attribute value that is to be set.
|
||
*
|
||
* @public
|
||
*/
|
||
setAttribute (attributeName, attributeValue) {
|
||
if (this.doc !== null) {
|
||
transact(this.doc, transaction => {
|
||
typeMapSet(transaction, this, attributeName, attributeValue);
|
||
});
|
||
} else {
|
||
/** @type {Array<function>} */ (this._pending).push(() => this.setAttribute(attributeName, attributeValue));
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Returns an attribute value that belongs to the attribute name.
|
||
*
|
||
* @note Xml-Text nodes don't have attributes. You can use this feature to assign properties to complete text-blocks.
|
||
*
|
||
* @param {String} attributeName The attribute name that identifies the
|
||
* queried value.
|
||
* @return {any} The queried attribute value.
|
||
*
|
||
* @public
|
||
*/
|
||
getAttribute (attributeName) {
|
||
return /** @type {any} */ (typeMapGet(this, attributeName))
|
||
}
|
||
|
||
/**
|
||
* Returns all attribute name/value pairs in a JSON Object.
|
||
*
|
||
* @note Xml-Text nodes don't have attributes. You can use this feature to assign properties to complete text-blocks.
|
||
*
|
||
* @param {Snapshot} [snapshot]
|
||
* @return {Object<string, any>} A JSON Object that describes the attributes.
|
||
*
|
||
* @public
|
||
*/
|
||
getAttributes (snapshot) {
|
||
return typeMapGetAll(this)
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
*/
|
||
_write (encoder) {
|
||
encoder.writeTypeRef(YTextRefID);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
* @return {YText}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const readYText = decoder => new YText();
|
||
|
||
/**
|
||
* @module YXml
|
||
*/
|
||
|
||
/**
|
||
* Define the elements to which a set of CSS queries apply.
|
||
* {@link https://developer.mozilla.org/en-US/docs/Web/CSS/CSS_Selectors|CSS_Selectors}
|
||
*
|
||
* @example
|
||
* query = '.classSelector'
|
||
* query = 'nodeSelector'
|
||
* query = '#idSelector'
|
||
*
|
||
* @typedef {string} CSS_Selector
|
||
*/
|
||
|
||
/**
|
||
* Dom filter function.
|
||
*
|
||
* @callback domFilter
|
||
* @param {string} nodeName The nodeName of the element
|
||
* @param {Map} attributes The map of attributes.
|
||
* @return {boolean} Whether to include the Dom node in the YXmlElement.
|
||
*/
|
||
|
||
/**
|
||
* Represents a subset of the nodes of a YXmlElement / YXmlFragment and a
|
||
* position within them.
|
||
*
|
||
* Can be created with {@link YXmlFragment#createTreeWalker}
|
||
*
|
||
* @public
|
||
* @implements {Iterable<YXmlElement|YXmlText|YXmlElement|YXmlHook>}
|
||
*/
|
||
class YXmlTreeWalker {
|
||
/**
|
||
* @param {YXmlFragment | YXmlElement} root
|
||
* @param {function(AbstractType<any>):boolean} [f]
|
||
*/
|
||
constructor (root, f = () => true) {
|
||
this._filter = f;
|
||
this._root = root;
|
||
/**
|
||
* @type {Item}
|
||
*/
|
||
this._currentNode = /** @type {Item} */ (root._start);
|
||
this._firstCall = true;
|
||
}
|
||
|
||
[Symbol.iterator] () {
|
||
return this
|
||
}
|
||
|
||
/**
|
||
* Get the next node.
|
||
*
|
||
* @return {IteratorResult<YXmlElement|YXmlText|YXmlHook>} The next node.
|
||
*
|
||
* @public
|
||
*/
|
||
next () {
|
||
/**
|
||
* @type {Item|null}
|
||
*/
|
||
let n = this._currentNode;
|
||
let type = n && n.content && /** @type {any} */ (n.content).type;
|
||
if (n !== null && (!this._firstCall || n.deleted || !this._filter(type))) { // if first call, we check if we can use the first item
|
||
do {
|
||
type = /** @type {any} */ (n.content).type;
|
||
if (!n.deleted && (type.constructor === YXmlElement || type.constructor === YXmlFragment) && type._start !== null) {
|
||
// walk down in the tree
|
||
n = type._start;
|
||
} else {
|
||
// walk right or up in the tree
|
||
while (n !== null) {
|
||
if (n.right !== null) {
|
||
n = n.right;
|
||
break
|
||
} else if (n.parent === this._root) {
|
||
n = null;
|
||
} else {
|
||
n = /** @type {AbstractType<any>} */ (n.parent)._item;
|
||
}
|
||
}
|
||
}
|
||
} while (n !== null && (n.deleted || !this._filter(/** @type {ContentType} */ (n.content).type)))
|
||
}
|
||
this._firstCall = false;
|
||
if (n === null) {
|
||
// @ts-ignore
|
||
return { value: undefined, done: true }
|
||
}
|
||
this._currentNode = n;
|
||
return { value: /** @type {any} */ (n.content).type, done: false }
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Represents a list of {@link YXmlElement}.and {@link YXmlText} types.
|
||
* A YxmlFragment is similar to a {@link YXmlElement}, but it does not have a
|
||
* nodeName and it does not have attributes. Though it can be bound to a DOM
|
||
* element - in this case the attributes and the nodeName are not shared.
|
||
*
|
||
* @public
|
||
* @extends AbstractType<YXmlEvent>
|
||
*/
|
||
class YXmlFragment extends AbstractType {
|
||
constructor () {
|
||
super();
|
||
/**
|
||
* @type {Array<any>|null}
|
||
*/
|
||
this._prelimContent = [];
|
||
}
|
||
|
||
/**
|
||
* @type {YXmlElement|YXmlText|null}
|
||
*/
|
||
get firstChild () {
|
||
const first = this._first;
|
||
return first ? first.content.getContent()[0] : null
|
||
}
|
||
|
||
/**
|
||
* Integrate this type into the Yjs instance.
|
||
*
|
||
* * Save this struct in the os
|
||
* * This type is sent to other client
|
||
* * Observer functions are fired
|
||
*
|
||
* @param {Doc} y The Yjs instance
|
||
* @param {Item} item
|
||
*/
|
||
_integrate (y, item) {
|
||
super._integrate(y, item);
|
||
this.insert(0, /** @type {Array<any>} */ (this._prelimContent));
|
||
this._prelimContent = null;
|
||
}
|
||
|
||
_copy () {
|
||
return new YXmlFragment()
|
||
}
|
||
|
||
/**
|
||
* @return {YXmlFragment}
|
||
*/
|
||
clone () {
|
||
const el = new YXmlFragment();
|
||
// @ts-ignore
|
||
el.insert(0, this.toArray().map(item => item instanceof AbstractType ? item.clone() : item));
|
||
return el
|
||
}
|
||
|
||
get length () {
|
||
return this._prelimContent === null ? this._length : this._prelimContent.length
|
||
}
|
||
|
||
/**
|
||
* Create a subtree of childNodes.
|
||
*
|
||
* @example
|
||
* const walker = elem.createTreeWalker(dom => dom.nodeName === 'div')
|
||
* for (let node in walker) {
|
||
* // `node` is a div node
|
||
* nop(node)
|
||
* }
|
||
*
|
||
* @param {function(AbstractType<any>):boolean} filter Function that is called on each child element and
|
||
* returns a Boolean indicating whether the child
|
||
* is to be included in the subtree.
|
||
* @return {YXmlTreeWalker} A subtree and a position within it.
|
||
*
|
||
* @public
|
||
*/
|
||
createTreeWalker (filter) {
|
||
return new YXmlTreeWalker(this, filter)
|
||
}
|
||
|
||
/**
|
||
* Returns the first YXmlElement that matches the query.
|
||
* Similar to DOM's {@link querySelector}.
|
||
*
|
||
* Query support:
|
||
* - tagname
|
||
* TODO:
|
||
* - id
|
||
* - attribute
|
||
*
|
||
* @param {CSS_Selector} query The query on the children.
|
||
* @return {YXmlElement|YXmlText|YXmlHook|null} The first element that matches the query or null.
|
||
*
|
||
* @public
|
||
*/
|
||
querySelector (query) {
|
||
query = query.toUpperCase();
|
||
// @ts-ignore
|
||
const iterator = new YXmlTreeWalker(this, element => element.nodeName && element.nodeName.toUpperCase() === query);
|
||
const next = iterator.next();
|
||
if (next.done) {
|
||
return null
|
||
} else {
|
||
return next.value
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Returns all YXmlElements that match the query.
|
||
* Similar to Dom's {@link querySelectorAll}.
|
||
*
|
||
* @todo Does not yet support all queries. Currently only query by tagName.
|
||
*
|
||
* @param {CSS_Selector} query The query on the children
|
||
* @return {Array<YXmlElement|YXmlText|YXmlHook|null>} The elements that match this query.
|
||
*
|
||
* @public
|
||
*/
|
||
querySelectorAll (query) {
|
||
query = query.toUpperCase();
|
||
// @ts-ignore
|
||
return Array.from(new YXmlTreeWalker(this, element => element.nodeName && element.nodeName.toUpperCase() === query))
|
||
}
|
||
|
||
/**
|
||
* Creates YXmlEvent and calls observers.
|
||
*
|
||
* @param {Transaction} transaction
|
||
* @param {Set<null|string>} parentSubs Keys changed on this type. `null` if list was modified.
|
||
*/
|
||
_callObserver (transaction, parentSubs) {
|
||
callTypeObservers(this, transaction, new YXmlEvent(this, parentSubs, transaction));
|
||
}
|
||
|
||
/**
|
||
* Get the string representation of all the children of this YXmlFragment.
|
||
*
|
||
* @return {string} The string representation of all children.
|
||
*/
|
||
toString () {
|
||
return typeListMap(this, xml => xml.toString()).join('')
|
||
}
|
||
|
||
/**
|
||
* @return {string}
|
||
*/
|
||
toJSON () {
|
||
return this.toString()
|
||
}
|
||
|
||
/**
|
||
* Creates a Dom Element that mirrors this YXmlElement.
|
||
*
|
||
* @param {Document} [_document=document] The document object (you must define
|
||
* this when calling this method in
|
||
* nodejs)
|
||
* @param {Object<string, any>} [hooks={}] Optional property to customize how hooks
|
||
* are presented in the DOM
|
||
* @param {any} [binding] You should not set this property. This is
|
||
* used if DomBinding wants to create a
|
||
* association to the created DOM type.
|
||
* @return {Node} The {@link https://developer.mozilla.org/en-US/docs/Web/API/Element|Dom Element}
|
||
*
|
||
* @public
|
||
*/
|
||
toDOM (_document = document, hooks = {}, binding) {
|
||
const fragment = _document.createDocumentFragment();
|
||
if (binding !== undefined) {
|
||
binding._createAssociation(fragment, this);
|
||
}
|
||
typeListForEach(this, xmlType => {
|
||
fragment.insertBefore(xmlType.toDOM(_document, hooks, binding), null);
|
||
});
|
||
return fragment
|
||
}
|
||
|
||
/**
|
||
* Inserts new content at an index.
|
||
*
|
||
* @example
|
||
* // Insert character 'a' at position 0
|
||
* xml.insert(0, [new Y.XmlText('text')])
|
||
*
|
||
* @param {number} index The index to insert content at
|
||
* @param {Array<YXmlElement|YXmlText>} content The array of content
|
||
*/
|
||
insert (index, content) {
|
||
if (this.doc !== null) {
|
||
transact(this.doc, transaction => {
|
||
typeListInsertGenerics(transaction, this, index, content);
|
||
});
|
||
} else {
|
||
// @ts-ignore _prelimContent is defined because this is not yet integrated
|
||
this._prelimContent.splice(index, 0, ...content);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Inserts new content at an index.
|
||
*
|
||
* @example
|
||
* // Insert character 'a' at position 0
|
||
* xml.insert(0, [new Y.XmlText('text')])
|
||
*
|
||
* @param {null|Item|YXmlElement|YXmlText} ref The index to insert content at
|
||
* @param {Array<YXmlElement|YXmlText>} content The array of content
|
||
*/
|
||
insertAfter (ref, content) {
|
||
if (this.doc !== null) {
|
||
transact(this.doc, transaction => {
|
||
const refItem = (ref && ref instanceof AbstractType) ? ref._item : ref;
|
||
typeListInsertGenericsAfter(transaction, this, refItem, content);
|
||
});
|
||
} else {
|
||
const pc = /** @type {Array<any>} */ (this._prelimContent);
|
||
const index = ref === null ? 0 : pc.findIndex(el => el === ref) + 1;
|
||
if (index === 0 && ref !== null) {
|
||
throw create$4('Reference item not found')
|
||
}
|
||
pc.splice(index, 0, ...content);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Deletes elements starting from an index.
|
||
*
|
||
* @param {number} index Index at which to start deleting elements
|
||
* @param {number} [length=1] The number of elements to remove. Defaults to 1.
|
||
*/
|
||
delete (index, length = 1) {
|
||
if (this.doc !== null) {
|
||
transact(this.doc, transaction => {
|
||
typeListDelete(transaction, this, index, length);
|
||
});
|
||
} else {
|
||
// @ts-ignore _prelimContent is defined because this is not yet integrated
|
||
this._prelimContent.splice(index, length);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Transforms this YArray to a JavaScript Array.
|
||
*
|
||
* @return {Array<YXmlElement|YXmlText|YXmlHook>}
|
||
*/
|
||
toArray () {
|
||
return typeListToArray(this)
|
||
}
|
||
|
||
/**
|
||
* Appends content to this YArray.
|
||
*
|
||
* @param {Array<YXmlElement|YXmlText>} content Array of content to append.
|
||
*/
|
||
push (content) {
|
||
this.insert(this.length, content);
|
||
}
|
||
|
||
/**
|
||
* Preppends content to this YArray.
|
||
*
|
||
* @param {Array<YXmlElement|YXmlText>} content Array of content to preppend.
|
||
*/
|
||
unshift (content) {
|
||
this.insert(0, content);
|
||
}
|
||
|
||
/**
|
||
* Returns the i-th element from a YArray.
|
||
*
|
||
* @param {number} index The index of the element to return from the YArray
|
||
* @return {YXmlElement|YXmlText}
|
||
*/
|
||
get (index) {
|
||
return typeListGet(this, index)
|
||
}
|
||
|
||
/**
|
||
* Transforms this YArray to a JavaScript Array.
|
||
*
|
||
* @param {number} [start]
|
||
* @param {number} [end]
|
||
* @return {Array<YXmlElement|YXmlText>}
|
||
*/
|
||
slice (start = 0, end = this.length) {
|
||
return typeListSlice(this, start, end)
|
||
}
|
||
|
||
/**
|
||
* Executes a provided function on once on overy child element.
|
||
*
|
||
* @param {function(YXmlElement|YXmlText,number, typeof this):void} f A function to execute on every element of this YArray.
|
||
*/
|
||
forEach (f) {
|
||
typeListForEach(this, f);
|
||
}
|
||
|
||
/**
|
||
* Transform the properties of this type to binary and write it to an
|
||
* BinaryEncoder.
|
||
*
|
||
* This is called when this Item is sent to a remote peer.
|
||
*
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
||
*/
|
||
_write (encoder) {
|
||
encoder.writeTypeRef(YXmlFragmentRefID);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
* @return {YXmlFragment}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const readYXmlFragment = decoder => new YXmlFragment();
|
||
|
||
/**
|
||
* An YXmlElement imitates the behavior of a
|
||
* {@link https://developer.mozilla.org/en-US/docs/Web/API/Element|Dom Element}.
|
||
*
|
||
* * An YXmlElement has attributes (key value pairs)
|
||
* * An YXmlElement has childElements that must inherit from YXmlElement
|
||
*/
|
||
class YXmlElement extends YXmlFragment {
|
||
constructor (nodeName = 'UNDEFINED') {
|
||
super();
|
||
this.nodeName = nodeName;
|
||
/**
|
||
* @type {Map<string, any>|null}
|
||
*/
|
||
this._prelimAttrs = new Map();
|
||
}
|
||
|
||
/**
|
||
* @type {YXmlElement|YXmlText|null}
|
||
*/
|
||
get nextSibling () {
|
||
const n = this._item ? this._item.next : null;
|
||
return n ? /** @type {YXmlElement|YXmlText} */ (/** @type {ContentType} */ (n.content).type) : null
|
||
}
|
||
|
||
/**
|
||
* @type {YXmlElement|YXmlText|null}
|
||
*/
|
||
get prevSibling () {
|
||
const n = this._item ? this._item.prev : null;
|
||
return n ? /** @type {YXmlElement|YXmlText} */ (/** @type {ContentType} */ (n.content).type) : null
|
||
}
|
||
|
||
/**
|
||
* Integrate this type into the Yjs instance.
|
||
*
|
||
* * Save this struct in the os
|
||
* * This type is sent to other client
|
||
* * Observer functions are fired
|
||
*
|
||
* @param {Doc} y The Yjs instance
|
||
* @param {Item} item
|
||
*/
|
||
_integrate (y, item) {
|
||
super._integrate(y, item)
|
||
;(/** @type {Map<string, any>} */ (this._prelimAttrs)).forEach((value, key) => {
|
||
this.setAttribute(key, value);
|
||
});
|
||
this._prelimAttrs = null;
|
||
}
|
||
|
||
/**
|
||
* Creates an Item with the same effect as this Item (without position effect)
|
||
*
|
||
* @return {YXmlElement}
|
||
*/
|
||
_copy () {
|
||
return new YXmlElement(this.nodeName)
|
||
}
|
||
|
||
/**
|
||
* @return {YXmlElement}
|
||
*/
|
||
clone () {
|
||
const el = new YXmlElement(this.nodeName);
|
||
const attrs = this.getAttributes();
|
||
for (const key in attrs) {
|
||
el.setAttribute(key, attrs[key]);
|
||
}
|
||
// @ts-ignore
|
||
el.insert(0, this.toArray().map(item => item instanceof AbstractType ? item.clone() : item));
|
||
return el
|
||
}
|
||
|
||
/**
|
||
* Returns the XML serialization of this YXmlElement.
|
||
* The attributes are ordered by attribute-name, so you can easily use this
|
||
* method to compare YXmlElements
|
||
*
|
||
* @return {string} The string representation of this type.
|
||
*
|
||
* @public
|
||
*/
|
||
toString () {
|
||
const attrs = this.getAttributes();
|
||
const stringBuilder = [];
|
||
const keys = [];
|
||
for (const key in attrs) {
|
||
keys.push(key);
|
||
}
|
||
keys.sort();
|
||
const keysLen = keys.length;
|
||
for (let i = 0; i < keysLen; i++) {
|
||
const key = keys[i];
|
||
stringBuilder.push(key + '="' + attrs[key] + '"');
|
||
}
|
||
const nodeName = this.nodeName.toLocaleLowerCase();
|
||
const attrsString = stringBuilder.length > 0 ? ' ' + stringBuilder.join(' ') : '';
|
||
return `<${nodeName}${attrsString}>${super.toString()}</${nodeName}>`
|
||
}
|
||
|
||
/**
|
||
* Removes an attribute from this YXmlElement.
|
||
*
|
||
* @param {String} attributeName The attribute name that is to be removed.
|
||
*
|
||
* @public
|
||
*/
|
||
removeAttribute (attributeName) {
|
||
if (this.doc !== null) {
|
||
transact(this.doc, transaction => {
|
||
typeMapDelete(transaction, this, attributeName);
|
||
});
|
||
} else {
|
||
/** @type {Map<string,any>} */ (this._prelimAttrs).delete(attributeName);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Sets or updates an attribute.
|
||
*
|
||
* @param {String} attributeName The attribute name that is to be set.
|
||
* @param {String} attributeValue The attribute value that is to be set.
|
||
*
|
||
* @public
|
||
*/
|
||
setAttribute (attributeName, attributeValue) {
|
||
if (this.doc !== null) {
|
||
transact(this.doc, transaction => {
|
||
typeMapSet(transaction, this, attributeName, attributeValue);
|
||
});
|
||
} else {
|
||
/** @type {Map<string, any>} */ (this._prelimAttrs).set(attributeName, attributeValue);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Returns an attribute value that belongs to the attribute name.
|
||
*
|
||
* @param {String} attributeName The attribute name that identifies the
|
||
* queried value.
|
||
* @return {String} The queried attribute value.
|
||
*
|
||
* @public
|
||
*/
|
||
getAttribute (attributeName) {
|
||
return /** @type {any} */ (typeMapGet(this, attributeName))
|
||
}
|
||
|
||
/**
|
||
* Returns whether an attribute exists
|
||
*
|
||
* @param {String} attributeName The attribute name to check for existence.
|
||
* @return {boolean} whether the attribute exists.
|
||
*
|
||
* @public
|
||
*/
|
||
hasAttribute (attributeName) {
|
||
return /** @type {any} */ (typeMapHas(this, attributeName))
|
||
}
|
||
|
||
/**
|
||
* Returns all attribute name/value pairs in a JSON Object.
|
||
*
|
||
* @return {Object<string, any>} A JSON Object that describes the attributes.
|
||
*
|
||
* @public
|
||
*/
|
||
getAttributes () {
|
||
return typeMapGetAll(this)
|
||
}
|
||
|
||
/**
|
||
* Creates a Dom Element that mirrors this YXmlElement.
|
||
*
|
||
* @param {Document} [_document=document] The document object (you must define
|
||
* this when calling this method in
|
||
* nodejs)
|
||
* @param {Object<string, any>} [hooks={}] Optional property to customize how hooks
|
||
* are presented in the DOM
|
||
* @param {any} [binding] You should not set this property. This is
|
||
* used if DomBinding wants to create a
|
||
* association to the created DOM type.
|
||
* @return {Node} The {@link https://developer.mozilla.org/en-US/docs/Web/API/Element|Dom Element}
|
||
*
|
||
* @public
|
||
*/
|
||
toDOM (_document = document, hooks = {}, binding) {
|
||
const dom = _document.createElement(this.nodeName);
|
||
const attrs = this.getAttributes();
|
||
for (const key in attrs) {
|
||
dom.setAttribute(key, attrs[key]);
|
||
}
|
||
typeListForEach(this, yxml => {
|
||
dom.appendChild(yxml.toDOM(_document, hooks, binding));
|
||
});
|
||
if (binding !== undefined) {
|
||
binding._createAssociation(dom, this);
|
||
}
|
||
return dom
|
||
}
|
||
|
||
/**
|
||
* Transform the properties of this type to binary and write it to an
|
||
* BinaryEncoder.
|
||
*
|
||
* This is called when this Item is sent to a remote peer.
|
||
*
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
||
*/
|
||
_write (encoder) {
|
||
encoder.writeTypeRef(YXmlElementRefID);
|
||
encoder.writeKey(this.nodeName);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
* @return {YXmlElement}
|
||
*
|
||
* @function
|
||
*/
|
||
const readYXmlElement = decoder => new YXmlElement(decoder.readKey());
|
||
|
||
/**
|
||
* @extends YEvent<YXmlElement|YXmlText|YXmlFragment>
|
||
* An Event that describes changes on a YXml Element or Yxml Fragment
|
||
*/
|
||
class YXmlEvent extends YEvent {
|
||
/**
|
||
* @param {YXmlElement|YXmlText|YXmlFragment} target The target on which the event is created.
|
||
* @param {Set<string|null>} subs The set of changed attributes. `null` is included if the
|
||
* child list changed.
|
||
* @param {Transaction} transaction The transaction instance with wich the
|
||
* change was created.
|
||
*/
|
||
constructor (target, subs, transaction) {
|
||
super(target, transaction);
|
||
/**
|
||
* Whether the children changed.
|
||
* @type {Boolean}
|
||
* @private
|
||
*/
|
||
this.childListChanged = false;
|
||
/**
|
||
* Set of all changed attributes.
|
||
* @type {Set<string>}
|
||
*/
|
||
this.attributesChanged = new Set();
|
||
subs.forEach((sub) => {
|
||
if (sub === null) {
|
||
this.childListChanged = true;
|
||
} else {
|
||
this.attributesChanged.add(sub);
|
||
}
|
||
});
|
||
}
|
||
}
|
||
|
||
/**
|
||
* You can manage binding to a custom type with YXmlHook.
|
||
*
|
||
* @extends {YMap<any>}
|
||
*/
|
||
class YXmlHook extends YMap {
|
||
/**
|
||
* @param {string} hookName nodeName of the Dom Node.
|
||
*/
|
||
constructor (hookName) {
|
||
super();
|
||
/**
|
||
* @type {string}
|
||
*/
|
||
this.hookName = hookName;
|
||
}
|
||
|
||
/**
|
||
* Creates an Item with the same effect as this Item (without position effect)
|
||
*/
|
||
_copy () {
|
||
return new YXmlHook(this.hookName)
|
||
}
|
||
|
||
/**
|
||
* @return {YXmlHook}
|
||
*/
|
||
clone () {
|
||
const el = new YXmlHook(this.hookName);
|
||
this.forEach((value, key) => {
|
||
el.set(key, value);
|
||
});
|
||
return el
|
||
}
|
||
|
||
/**
|
||
* Creates a Dom Element that mirrors this YXmlElement.
|
||
*
|
||
* @param {Document} [_document=document] The document object (you must define
|
||
* this when calling this method in
|
||
* nodejs)
|
||
* @param {Object.<string, any>} [hooks] Optional property to customize how hooks
|
||
* are presented in the DOM
|
||
* @param {any} [binding] You should not set this property. This is
|
||
* used if DomBinding wants to create a
|
||
* association to the created DOM type
|
||
* @return {Element} The {@link https://developer.mozilla.org/en-US/docs/Web/API/Element|Dom Element}
|
||
*
|
||
* @public
|
||
*/
|
||
toDOM (_document = document, hooks = {}, binding) {
|
||
const hook = hooks[this.hookName];
|
||
let dom;
|
||
if (hook !== undefined) {
|
||
dom = hook.createDom(this);
|
||
} else {
|
||
dom = document.createElement(this.hookName);
|
||
}
|
||
dom.setAttribute('data-yjs-hook', this.hookName);
|
||
if (binding !== undefined) {
|
||
binding._createAssociation(dom, this);
|
||
}
|
||
return dom
|
||
}
|
||
|
||
/**
|
||
* Transform the properties of this type to binary and write it to an
|
||
* BinaryEncoder.
|
||
*
|
||
* This is called when this Item is sent to a remote peer.
|
||
*
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
||
*/
|
||
_write (encoder) {
|
||
encoder.writeTypeRef(YXmlHookRefID);
|
||
encoder.writeKey(this.hookName);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
* @return {YXmlHook}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const readYXmlHook = decoder =>
|
||
new YXmlHook(decoder.readKey());
|
||
|
||
/**
|
||
* Represents text in a Dom Element. In the future this type will also handle
|
||
* simple formatting information like bold and italic.
|
||
*/
|
||
class YXmlText extends YText {
|
||
/**
|
||
* @type {YXmlElement|YXmlText|null}
|
||
*/
|
||
get nextSibling () {
|
||
const n = this._item ? this._item.next : null;
|
||
return n ? /** @type {YXmlElement|YXmlText} */ (/** @type {ContentType} */ (n.content).type) : null
|
||
}
|
||
|
||
/**
|
||
* @type {YXmlElement|YXmlText|null}
|
||
*/
|
||
get prevSibling () {
|
||
const n = this._item ? this._item.prev : null;
|
||
return n ? /** @type {YXmlElement|YXmlText} */ (/** @type {ContentType} */ (n.content).type) : null
|
||
}
|
||
|
||
_copy () {
|
||
return new YXmlText()
|
||
}
|
||
|
||
/**
|
||
* @return {YXmlText}
|
||
*/
|
||
clone () {
|
||
const text = new YXmlText();
|
||
text.applyDelta(this.toDelta());
|
||
return text
|
||
}
|
||
|
||
/**
|
||
* Creates a Dom Element that mirrors this YXmlText.
|
||
*
|
||
* @param {Document} [_document=document] The document object (you must define
|
||
* this when calling this method in
|
||
* nodejs)
|
||
* @param {Object<string, any>} [hooks] Optional property to customize how hooks
|
||
* are presented in the DOM
|
||
* @param {any} [binding] You should not set this property. This is
|
||
* used if DomBinding wants to create a
|
||
* association to the created DOM type.
|
||
* @return {Text} The {@link https://developer.mozilla.org/en-US/docs/Web/API/Element|Dom Element}
|
||
*
|
||
* @public
|
||
*/
|
||
toDOM (_document = document, hooks, binding) {
|
||
const dom = _document.createTextNode(this.toString());
|
||
if (binding !== undefined) {
|
||
binding._createAssociation(dom, this);
|
||
}
|
||
return dom
|
||
}
|
||
|
||
toString () {
|
||
// @ts-ignore
|
||
return this.toDelta().map(delta => {
|
||
const nestedNodes = [];
|
||
for (const nodeName in delta.attributes) {
|
||
const attrs = [];
|
||
for (const key in delta.attributes[nodeName]) {
|
||
attrs.push({ key, value: delta.attributes[nodeName][key] });
|
||
}
|
||
// sort attributes to get a unique order
|
||
attrs.sort((a, b) => a.key < b.key ? -1 : 1);
|
||
nestedNodes.push({ nodeName, attrs });
|
||
}
|
||
// sort node order to get a unique order
|
||
nestedNodes.sort((a, b) => a.nodeName < b.nodeName ? -1 : 1);
|
||
// now convert to dom string
|
||
let str = '';
|
||
for (let i = 0; i < nestedNodes.length; i++) {
|
||
const node = nestedNodes[i];
|
||
str += `<${node.nodeName}`;
|
||
for (let j = 0; j < node.attrs.length; j++) {
|
||
const attr = node.attrs[j];
|
||
str += ` ${attr.key}="${attr.value}"`;
|
||
}
|
||
str += '>';
|
||
}
|
||
str += delta.insert;
|
||
for (let i = nestedNodes.length - 1; i >= 0; i--) {
|
||
str += `</${nestedNodes[i].nodeName}>`;
|
||
}
|
||
return str
|
||
}).join('')
|
||
}
|
||
|
||
/**
|
||
* @return {string}
|
||
*/
|
||
toJSON () {
|
||
return this.toString()
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
*/
|
||
_write (encoder) {
|
||
encoder.writeTypeRef(YXmlTextRefID);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
* @return {YXmlText}
|
||
*
|
||
* @private
|
||
* @function
|
||
*/
|
||
const readYXmlText = decoder => new YXmlText();
|
||
|
||
class AbstractStruct {
|
||
/**
|
||
* @param {ID} id
|
||
* @param {number} length
|
||
*/
|
||
constructor (id, length) {
|
||
this.id = id;
|
||
this.length = length;
|
||
}
|
||
|
||
/**
|
||
* @type {boolean}
|
||
*/
|
||
get deleted () {
|
||
throw methodUnimplemented()
|
||
}
|
||
|
||
/**
|
||
* Merge this struct with the item to the right.
|
||
* This method is already assuming that `this.id.clock + this.length === this.id.clock`.
|
||
* Also this method does *not* remove right from StructStore!
|
||
* @param {AbstractStruct} right
|
||
* @return {boolean} wether this merged with right
|
||
*/
|
||
mergeWith (right) {
|
||
return false
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
||
* @param {number} offset
|
||
* @param {number} encodingRef
|
||
*/
|
||
write (encoder, offset, encodingRef) {
|
||
throw methodUnimplemented()
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {number} offset
|
||
*/
|
||
integrate (transaction, offset) {
|
||
throw methodUnimplemented()
|
||
}
|
||
}
|
||
|
||
const structGCRefNumber = 0;
|
||
|
||
/**
|
||
* @private
|
||
*/
|
||
class GC extends AbstractStruct {
|
||
get deleted () {
|
||
return true
|
||
}
|
||
|
||
delete () {}
|
||
|
||
/**
|
||
* @param {GC} right
|
||
* @return {boolean}
|
||
*/
|
||
mergeWith (right) {
|
||
if (this.constructor !== right.constructor) {
|
||
return false
|
||
}
|
||
this.length += right.length;
|
||
return true
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {number} offset
|
||
*/
|
||
integrate (transaction, offset) {
|
||
if (offset > 0) {
|
||
this.id.clock += offset;
|
||
this.length -= offset;
|
||
}
|
||
addStruct(transaction.doc.store, this);
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {number} offset
|
||
*/
|
||
write (encoder, offset) {
|
||
encoder.writeInfo(structGCRefNumber);
|
||
encoder.writeLen(this.length - offset);
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {StructStore} store
|
||
* @return {null | number}
|
||
*/
|
||
getMissing (transaction, store) {
|
||
return null
|
||
}
|
||
}
|
||
|
||
class ContentBinary {
|
||
/**
|
||
* @param {Uint8Array} content
|
||
*/
|
||
constructor (content) {
|
||
this.content = content;
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getLength () {
|
||
return 1
|
||
}
|
||
|
||
/**
|
||
* @return {Array<any>}
|
||
*/
|
||
getContent () {
|
||
return [this.content]
|
||
}
|
||
|
||
/**
|
||
* @return {boolean}
|
||
*/
|
||
isCountable () {
|
||
return true
|
||
}
|
||
|
||
/**
|
||
* @return {ContentBinary}
|
||
*/
|
||
copy () {
|
||
return new ContentBinary(this.content)
|
||
}
|
||
|
||
/**
|
||
* @param {number} offset
|
||
* @return {ContentBinary}
|
||
*/
|
||
splice (offset) {
|
||
throw methodUnimplemented()
|
||
}
|
||
|
||
/**
|
||
* @param {ContentBinary} right
|
||
* @return {boolean}
|
||
*/
|
||
mergeWith (right) {
|
||
return false
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {Item} item
|
||
*/
|
||
integrate (transaction, item) {}
|
||
/**
|
||
* @param {Transaction} transaction
|
||
*/
|
||
delete (transaction) {}
|
||
/**
|
||
* @param {StructStore} store
|
||
*/
|
||
gc (store) {}
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {number} offset
|
||
*/
|
||
write (encoder, offset) {
|
||
encoder.writeBuf(this.content);
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getRef () {
|
||
return 3
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2 } decoder
|
||
* @return {ContentBinary}
|
||
*/
|
||
const readContentBinary = decoder => new ContentBinary(decoder.readBuf());
|
||
|
||
class ContentDeleted {
|
||
/**
|
||
* @param {number} len
|
||
*/
|
||
constructor (len) {
|
||
this.len = len;
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getLength () {
|
||
return this.len
|
||
}
|
||
|
||
/**
|
||
* @return {Array<any>}
|
||
*/
|
||
getContent () {
|
||
return []
|
||
}
|
||
|
||
/**
|
||
* @return {boolean}
|
||
*/
|
||
isCountable () {
|
||
return false
|
||
}
|
||
|
||
/**
|
||
* @return {ContentDeleted}
|
||
*/
|
||
copy () {
|
||
return new ContentDeleted(this.len)
|
||
}
|
||
|
||
/**
|
||
* @param {number} offset
|
||
* @return {ContentDeleted}
|
||
*/
|
||
splice (offset) {
|
||
const right = new ContentDeleted(this.len - offset);
|
||
this.len = offset;
|
||
return right
|
||
}
|
||
|
||
/**
|
||
* @param {ContentDeleted} right
|
||
* @return {boolean}
|
||
*/
|
||
mergeWith (right) {
|
||
this.len += right.len;
|
||
return true
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {Item} item
|
||
*/
|
||
integrate (transaction, item) {
|
||
addToDeleteSet(transaction.deleteSet, item.id.client, item.id.clock, this.len);
|
||
item.markDeleted();
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
*/
|
||
delete (transaction) {}
|
||
/**
|
||
* @param {StructStore} store
|
||
*/
|
||
gc (store) {}
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {number} offset
|
||
*/
|
||
write (encoder, offset) {
|
||
encoder.writeLen(this.len - offset);
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getRef () {
|
||
return 1
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @private
|
||
*
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2 } decoder
|
||
* @return {ContentDeleted}
|
||
*/
|
||
const readContentDeleted = decoder => new ContentDeleted(decoder.readLen());
|
||
|
||
/**
|
||
* @param {string} guid
|
||
* @param {Object<string, any>} opts
|
||
*/
|
||
const createDocFromOpts = (guid, opts) => new Doc({ guid, ...opts, shouldLoad: opts.shouldLoad || opts.autoLoad || false });
|
||
|
||
/**
|
||
* @private
|
||
*/
|
||
class ContentDoc {
|
||
/**
|
||
* @param {Doc} doc
|
||
*/
|
||
constructor (doc) {
|
||
if (doc._item) {
|
||
console.error('This document was already integrated as a sub-document. You should create a second instance instead with the same guid.');
|
||
}
|
||
/**
|
||
* @type {Doc}
|
||
*/
|
||
this.doc = doc;
|
||
/**
|
||
* @type {any}
|
||
*/
|
||
const opts = {};
|
||
this.opts = opts;
|
||
if (!doc.gc) {
|
||
opts.gc = false;
|
||
}
|
||
if (doc.autoLoad) {
|
||
opts.autoLoad = true;
|
||
}
|
||
if (doc.meta !== null) {
|
||
opts.meta = doc.meta;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getLength () {
|
||
return 1
|
||
}
|
||
|
||
/**
|
||
* @return {Array<any>}
|
||
*/
|
||
getContent () {
|
||
return [this.doc]
|
||
}
|
||
|
||
/**
|
||
* @return {boolean}
|
||
*/
|
||
isCountable () {
|
||
return true
|
||
}
|
||
|
||
/**
|
||
* @return {ContentDoc}
|
||
*/
|
||
copy () {
|
||
return new ContentDoc(createDocFromOpts(this.doc.guid, this.opts))
|
||
}
|
||
|
||
/**
|
||
* @param {number} offset
|
||
* @return {ContentDoc}
|
||
*/
|
||
splice (offset) {
|
||
throw methodUnimplemented()
|
||
}
|
||
|
||
/**
|
||
* @param {ContentDoc} right
|
||
* @return {boolean}
|
||
*/
|
||
mergeWith (right) {
|
||
return false
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {Item} item
|
||
*/
|
||
integrate (transaction, item) {
|
||
// this needs to be reflected in doc.destroy as well
|
||
this.doc._item = item;
|
||
transaction.subdocsAdded.add(this.doc);
|
||
if (this.doc.shouldLoad) {
|
||
transaction.subdocsLoaded.add(this.doc);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
*/
|
||
delete (transaction) {
|
||
if (transaction.subdocsAdded.has(this.doc)) {
|
||
transaction.subdocsAdded.delete(this.doc);
|
||
} else {
|
||
transaction.subdocsRemoved.add(this.doc);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {StructStore} store
|
||
*/
|
||
gc (store) { }
|
||
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {number} offset
|
||
*/
|
||
write (encoder, offset) {
|
||
encoder.writeString(this.doc.guid);
|
||
encoder.writeAny(this.opts);
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getRef () {
|
||
return 9
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @private
|
||
*
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
* @return {ContentDoc}
|
||
*/
|
||
const readContentDoc = decoder => new ContentDoc(createDocFromOpts(decoder.readString(), decoder.readAny()));
|
||
|
||
/**
|
||
* @private
|
||
*/
|
||
class ContentEmbed {
|
||
/**
|
||
* @param {Object} embed
|
||
*/
|
||
constructor (embed) {
|
||
this.embed = embed;
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getLength () {
|
||
return 1
|
||
}
|
||
|
||
/**
|
||
* @return {Array<any>}
|
||
*/
|
||
getContent () {
|
||
return [this.embed]
|
||
}
|
||
|
||
/**
|
||
* @return {boolean}
|
||
*/
|
||
isCountable () {
|
||
return true
|
||
}
|
||
|
||
/**
|
||
* @return {ContentEmbed}
|
||
*/
|
||
copy () {
|
||
return new ContentEmbed(this.embed)
|
||
}
|
||
|
||
/**
|
||
* @param {number} offset
|
||
* @return {ContentEmbed}
|
||
*/
|
||
splice (offset) {
|
||
throw methodUnimplemented()
|
||
}
|
||
|
||
/**
|
||
* @param {ContentEmbed} right
|
||
* @return {boolean}
|
||
*/
|
||
mergeWith (right) {
|
||
return false
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {Item} item
|
||
*/
|
||
integrate (transaction, item) {}
|
||
/**
|
||
* @param {Transaction} transaction
|
||
*/
|
||
delete (transaction) {}
|
||
/**
|
||
* @param {StructStore} store
|
||
*/
|
||
gc (store) {}
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {number} offset
|
||
*/
|
||
write (encoder, offset) {
|
||
encoder.writeJSON(this.embed);
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getRef () {
|
||
return 5
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @private
|
||
*
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
* @return {ContentEmbed}
|
||
*/
|
||
const readContentEmbed = decoder => new ContentEmbed(decoder.readJSON());
|
||
|
||
/**
|
||
* @private
|
||
*/
|
||
class ContentFormat {
|
||
/**
|
||
* @param {string} key
|
||
* @param {Object} value
|
||
*/
|
||
constructor (key, value) {
|
||
this.key = key;
|
||
this.value = value;
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getLength () {
|
||
return 1
|
||
}
|
||
|
||
/**
|
||
* @return {Array<any>}
|
||
*/
|
||
getContent () {
|
||
return []
|
||
}
|
||
|
||
/**
|
||
* @return {boolean}
|
||
*/
|
||
isCountable () {
|
||
return false
|
||
}
|
||
|
||
/**
|
||
* @return {ContentFormat}
|
||
*/
|
||
copy () {
|
||
return new ContentFormat(this.key, this.value)
|
||
}
|
||
|
||
/**
|
||
* @param {number} offset
|
||
* @return {ContentFormat}
|
||
*/
|
||
splice (offset) {
|
||
throw methodUnimplemented()
|
||
}
|
||
|
||
/**
|
||
* @param {ContentFormat} right
|
||
* @return {boolean}
|
||
*/
|
||
mergeWith (right) {
|
||
return false
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {Item} item
|
||
*/
|
||
integrate (transaction, item) {
|
||
// @todo searchmarker are currently unsupported for rich text documents
|
||
/** @type {AbstractType<any>} */ (item.parent)._searchMarker = null;
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
*/
|
||
delete (transaction) {}
|
||
/**
|
||
* @param {StructStore} store
|
||
*/
|
||
gc (store) {}
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {number} offset
|
||
*/
|
||
write (encoder, offset) {
|
||
encoder.writeKey(this.key);
|
||
encoder.writeJSON(this.value);
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getRef () {
|
||
return 6
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
* @return {ContentFormat}
|
||
*/
|
||
const readContentFormat = decoder => new ContentFormat(decoder.readKey(), decoder.readJSON());
|
||
|
||
/**
|
||
* @private
|
||
*/
|
||
class ContentJSON {
|
||
/**
|
||
* @param {Array<any>} arr
|
||
*/
|
||
constructor (arr) {
|
||
/**
|
||
* @type {Array<any>}
|
||
*/
|
||
this.arr = arr;
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getLength () {
|
||
return this.arr.length
|
||
}
|
||
|
||
/**
|
||
* @return {Array<any>}
|
||
*/
|
||
getContent () {
|
||
return this.arr
|
||
}
|
||
|
||
/**
|
||
* @return {boolean}
|
||
*/
|
||
isCountable () {
|
||
return true
|
||
}
|
||
|
||
/**
|
||
* @return {ContentJSON}
|
||
*/
|
||
copy () {
|
||
return new ContentJSON(this.arr)
|
||
}
|
||
|
||
/**
|
||
* @param {number} offset
|
||
* @return {ContentJSON}
|
||
*/
|
||
splice (offset) {
|
||
const right = new ContentJSON(this.arr.slice(offset));
|
||
this.arr = this.arr.slice(0, offset);
|
||
return right
|
||
}
|
||
|
||
/**
|
||
* @param {ContentJSON} right
|
||
* @return {boolean}
|
||
*/
|
||
mergeWith (right) {
|
||
this.arr = this.arr.concat(right.arr);
|
||
return true
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {Item} item
|
||
*/
|
||
integrate (transaction, item) {}
|
||
/**
|
||
* @param {Transaction} transaction
|
||
*/
|
||
delete (transaction) {}
|
||
/**
|
||
* @param {StructStore} store
|
||
*/
|
||
gc (store) {}
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {number} offset
|
||
*/
|
||
write (encoder, offset) {
|
||
const len = this.arr.length;
|
||
encoder.writeLen(len - offset);
|
||
for (let i = offset; i < len; i++) {
|
||
const c = this.arr[i];
|
||
encoder.writeString(c === undefined ? 'undefined' : JSON.stringify(c));
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getRef () {
|
||
return 2
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @private
|
||
*
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
* @return {ContentJSON}
|
||
*/
|
||
const readContentJSON = decoder => {
|
||
const len = decoder.readLen();
|
||
const cs = [];
|
||
for (let i = 0; i < len; i++) {
|
||
const c = decoder.readString();
|
||
if (c === 'undefined') {
|
||
cs.push(undefined);
|
||
} else {
|
||
cs.push(JSON.parse(c));
|
||
}
|
||
}
|
||
return new ContentJSON(cs)
|
||
};
|
||
|
||
class ContentAny {
|
||
/**
|
||
* @param {Array<any>} arr
|
||
*/
|
||
constructor (arr) {
|
||
/**
|
||
* @type {Array<any>}
|
||
*/
|
||
this.arr = arr;
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getLength () {
|
||
return this.arr.length
|
||
}
|
||
|
||
/**
|
||
* @return {Array<any>}
|
||
*/
|
||
getContent () {
|
||
return this.arr
|
||
}
|
||
|
||
/**
|
||
* @return {boolean}
|
||
*/
|
||
isCountable () {
|
||
return true
|
||
}
|
||
|
||
/**
|
||
* @return {ContentAny}
|
||
*/
|
||
copy () {
|
||
return new ContentAny(this.arr)
|
||
}
|
||
|
||
/**
|
||
* @param {number} offset
|
||
* @return {ContentAny}
|
||
*/
|
||
splice (offset) {
|
||
const right = new ContentAny(this.arr.slice(offset));
|
||
this.arr = this.arr.slice(0, offset);
|
||
return right
|
||
}
|
||
|
||
/**
|
||
* @param {ContentAny} right
|
||
* @return {boolean}
|
||
*/
|
||
mergeWith (right) {
|
||
this.arr = this.arr.concat(right.arr);
|
||
return true
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {Item} item
|
||
*/
|
||
integrate (transaction, item) {}
|
||
/**
|
||
* @param {Transaction} transaction
|
||
*/
|
||
delete (transaction) {}
|
||
/**
|
||
* @param {StructStore} store
|
||
*/
|
||
gc (store) {}
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {number} offset
|
||
*/
|
||
write (encoder, offset) {
|
||
const len = this.arr.length;
|
||
encoder.writeLen(len - offset);
|
||
for (let i = offset; i < len; i++) {
|
||
const c = this.arr[i];
|
||
encoder.writeAny(c);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getRef () {
|
||
return 8
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
* @return {ContentAny}
|
||
*/
|
||
const readContentAny = decoder => {
|
||
const len = decoder.readLen();
|
||
const cs = [];
|
||
for (let i = 0; i < len; i++) {
|
||
cs.push(decoder.readAny());
|
||
}
|
||
return new ContentAny(cs)
|
||
};
|
||
|
||
/**
|
||
* @private
|
||
*/
|
||
class ContentString {
|
||
/**
|
||
* @param {string} str
|
||
*/
|
||
constructor (str) {
|
||
/**
|
||
* @type {string}
|
||
*/
|
||
this.str = str;
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getLength () {
|
||
return this.str.length
|
||
}
|
||
|
||
/**
|
||
* @return {Array<any>}
|
||
*/
|
||
getContent () {
|
||
return this.str.split('')
|
||
}
|
||
|
||
/**
|
||
* @return {boolean}
|
||
*/
|
||
isCountable () {
|
||
return true
|
||
}
|
||
|
||
/**
|
||
* @return {ContentString}
|
||
*/
|
||
copy () {
|
||
return new ContentString(this.str)
|
||
}
|
||
|
||
/**
|
||
* @param {number} offset
|
||
* @return {ContentString}
|
||
*/
|
||
splice (offset) {
|
||
const right = new ContentString(this.str.slice(offset));
|
||
this.str = this.str.slice(0, offset);
|
||
|
||
// Prevent encoding invalid documents because of splitting of surrogate pairs: https://github.com/yjs/yjs/issues/248
|
||
const firstCharCode = this.str.charCodeAt(offset - 1);
|
||
if (firstCharCode >= 0xD800 && firstCharCode <= 0xDBFF) {
|
||
// Last character of the left split is the start of a surrogate utf16/ucs2 pair.
|
||
// We don't support splitting of surrogate pairs because this may lead to invalid documents.
|
||
// Replace the invalid character with a unicode replacement character (<28> / U+FFFD)
|
||
this.str = this.str.slice(0, offset - 1) + '<27>';
|
||
// replace right as well
|
||
right.str = '<27>' + right.str.slice(1);
|
||
}
|
||
return right
|
||
}
|
||
|
||
/**
|
||
* @param {ContentString} right
|
||
* @return {boolean}
|
||
*/
|
||
mergeWith (right) {
|
||
this.str += right.str;
|
||
return true
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {Item} item
|
||
*/
|
||
integrate (transaction, item) {}
|
||
/**
|
||
* @param {Transaction} transaction
|
||
*/
|
||
delete (transaction) {}
|
||
/**
|
||
* @param {StructStore} store
|
||
*/
|
||
gc (store) {}
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {number} offset
|
||
*/
|
||
write (encoder, offset) {
|
||
encoder.writeString(offset === 0 ? this.str : this.str.slice(offset));
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getRef () {
|
||
return 4
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @private
|
||
*
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
* @return {ContentString}
|
||
*/
|
||
const readContentString = decoder => new ContentString(decoder.readString());
|
||
|
||
/**
|
||
* @type {Array<function(UpdateDecoderV1 | UpdateDecoderV2):AbstractType<any>>}
|
||
* @private
|
||
*/
|
||
const typeRefs = [
|
||
readYArray,
|
||
readYMap,
|
||
readYText,
|
||
readYXmlElement,
|
||
readYXmlFragment,
|
||
readYXmlHook,
|
||
readYXmlText
|
||
];
|
||
|
||
const YArrayRefID = 0;
|
||
const YMapRefID = 1;
|
||
const YTextRefID = 2;
|
||
const YXmlElementRefID = 3;
|
||
const YXmlFragmentRefID = 4;
|
||
const YXmlHookRefID = 5;
|
||
const YXmlTextRefID = 6;
|
||
|
||
/**
|
||
* @private
|
||
*/
|
||
class ContentType {
|
||
/**
|
||
* @param {AbstractType<any>} type
|
||
*/
|
||
constructor (type) {
|
||
/**
|
||
* @type {AbstractType<any>}
|
||
*/
|
||
this.type = type;
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getLength () {
|
||
return 1
|
||
}
|
||
|
||
/**
|
||
* @return {Array<any>}
|
||
*/
|
||
getContent () {
|
||
return [this.type]
|
||
}
|
||
|
||
/**
|
||
* @return {boolean}
|
||
*/
|
||
isCountable () {
|
||
return true
|
||
}
|
||
|
||
/**
|
||
* @return {ContentType}
|
||
*/
|
||
copy () {
|
||
return new ContentType(this.type._copy())
|
||
}
|
||
|
||
/**
|
||
* @param {number} offset
|
||
* @return {ContentType}
|
||
*/
|
||
splice (offset) {
|
||
throw methodUnimplemented()
|
||
}
|
||
|
||
/**
|
||
* @param {ContentType} right
|
||
* @return {boolean}
|
||
*/
|
||
mergeWith (right) {
|
||
return false
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {Item} item
|
||
*/
|
||
integrate (transaction, item) {
|
||
this.type._integrate(transaction.doc, item);
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
*/
|
||
delete (transaction) {
|
||
let item = this.type._start;
|
||
while (item !== null) {
|
||
if (!item.deleted) {
|
||
item.delete(transaction);
|
||
} else {
|
||
// This will be gc'd later and we want to merge it if possible
|
||
// We try to merge all deleted items after each transaction,
|
||
// but we have no knowledge about that this needs to be merged
|
||
// since it is not in transaction.ds. Hence we add it to transaction._mergeStructs
|
||
transaction._mergeStructs.push(item);
|
||
}
|
||
item = item.right;
|
||
}
|
||
this.type._map.forEach(item => {
|
||
if (!item.deleted) {
|
||
item.delete(transaction);
|
||
} else {
|
||
// same as above
|
||
transaction._mergeStructs.push(item);
|
||
}
|
||
});
|
||
transaction.changed.delete(this.type);
|
||
}
|
||
|
||
/**
|
||
* @param {StructStore} store
|
||
*/
|
||
gc (store) {
|
||
let item = this.type._start;
|
||
while (item !== null) {
|
||
item.gc(store, true);
|
||
item = item.right;
|
||
}
|
||
this.type._start = null;
|
||
this.type._map.forEach(/** @param {Item | null} item */ (item) => {
|
||
while (item !== null) {
|
||
item.gc(store, true);
|
||
item = item.left;
|
||
}
|
||
});
|
||
this.type._map = new Map();
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {number} offset
|
||
*/
|
||
write (encoder, offset) {
|
||
this.type._write(encoder);
|
||
}
|
||
|
||
/**
|
||
* @return {number}
|
||
*/
|
||
getRef () {
|
||
return 7
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @private
|
||
*
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
* @return {ContentType}
|
||
*/
|
||
const readContentType = decoder => new ContentType(typeRefs[decoder.readTypeRef()](decoder));
|
||
|
||
/**
|
||
* @todo This should return several items
|
||
*
|
||
* @param {StructStore} store
|
||
* @param {ID} id
|
||
* @return {{item:Item, diff:number}}
|
||
*/
|
||
const followRedone = (store, id) => {
|
||
/**
|
||
* @type {ID|null}
|
||
*/
|
||
let nextID = id;
|
||
let diff = 0;
|
||
let item;
|
||
do {
|
||
if (diff > 0) {
|
||
nextID = createID(nextID.client, nextID.clock + diff);
|
||
}
|
||
item = getItem(store, nextID);
|
||
diff = nextID.clock - item.id.clock;
|
||
nextID = item.redone;
|
||
} while (nextID !== null && item instanceof Item$1)
|
||
return {
|
||
item, diff
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Make sure that neither item nor any of its parents is ever deleted.
|
||
*
|
||
* This property does not persist when storing it into a database or when
|
||
* sending it to other peers
|
||
*
|
||
* @param {Item|null} item
|
||
* @param {boolean} keep
|
||
*/
|
||
const keepItem = (item, keep) => {
|
||
while (item !== null && item.keep !== keep) {
|
||
item.keep = keep;
|
||
item = /** @type {AbstractType<any>} */ (item.parent)._item;
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Split leftItem into two items
|
||
* @param {Transaction} transaction
|
||
* @param {Item} leftItem
|
||
* @param {number} diff
|
||
* @return {Item}
|
||
*
|
||
* @function
|
||
* @private
|
||
*/
|
||
const splitItem = (transaction, leftItem, diff) => {
|
||
// create rightItem
|
||
const { client, clock } = leftItem.id;
|
||
const rightItem = new Item$1(
|
||
createID(client, clock + diff),
|
||
leftItem,
|
||
createID(client, clock + diff - 1),
|
||
leftItem.right,
|
||
leftItem.rightOrigin,
|
||
leftItem.parent,
|
||
leftItem.parentSub,
|
||
leftItem.content.splice(diff)
|
||
);
|
||
if (leftItem.deleted) {
|
||
rightItem.markDeleted();
|
||
}
|
||
if (leftItem.keep) {
|
||
rightItem.keep = true;
|
||
}
|
||
if (leftItem.redone !== null) {
|
||
rightItem.redone = createID(leftItem.redone.client, leftItem.redone.clock + diff);
|
||
}
|
||
// update left (do not set leftItem.rightOrigin as it will lead to problems when syncing)
|
||
leftItem.right = rightItem;
|
||
// update right
|
||
if (rightItem.right !== null) {
|
||
rightItem.right.left = rightItem;
|
||
}
|
||
// right is more specific.
|
||
transaction._mergeStructs.push(rightItem);
|
||
// update parent._map
|
||
if (rightItem.parentSub !== null && rightItem.right === null) {
|
||
/** @type {AbstractType<any>} */ (rightItem.parent)._map.set(rightItem.parentSub, rightItem);
|
||
}
|
||
leftItem.length = diff;
|
||
return rightItem
|
||
};
|
||
|
||
/**
|
||
* Redoes the effect of this operation.
|
||
*
|
||
* @param {Transaction} transaction The Yjs instance.
|
||
* @param {Item} item
|
||
* @param {Set<Item>} redoitems
|
||
* @param {DeleteSet} itemsToDelete
|
||
* @param {boolean} ignoreRemoteMapChanges
|
||
*
|
||
* @return {Item|null}
|
||
*
|
||
* @private
|
||
*/
|
||
const redoItem = (transaction, item, redoitems, itemsToDelete, ignoreRemoteMapChanges) => {
|
||
const doc = transaction.doc;
|
||
const store = doc.store;
|
||
const ownClientID = doc.clientID;
|
||
const redone = item.redone;
|
||
if (redone !== null) {
|
||
return getItemCleanStart(transaction, redone)
|
||
}
|
||
let parentItem = /** @type {AbstractType<any>} */ (item.parent)._item;
|
||
/**
|
||
* @type {Item|null}
|
||
*/
|
||
let left = null;
|
||
/**
|
||
* @type {Item|null}
|
||
*/
|
||
let right;
|
||
// make sure that parent is redone
|
||
if (parentItem !== null && parentItem.deleted === true) {
|
||
// try to undo parent if it will be undone anyway
|
||
if (parentItem.redone === null && (!redoitems.has(parentItem) || redoItem(transaction, parentItem, redoitems, itemsToDelete, ignoreRemoteMapChanges) === null)) {
|
||
return null
|
||
}
|
||
while (parentItem.redone !== null) {
|
||
parentItem = getItemCleanStart(transaction, parentItem.redone);
|
||
}
|
||
}
|
||
const parentType = parentItem === null ? /** @type {AbstractType<any>} */ (item.parent) : /** @type {ContentType} */ (parentItem.content).type;
|
||
|
||
if (item.parentSub === null) {
|
||
// Is an array item. Insert at the old position
|
||
left = item.left;
|
||
right = item;
|
||
// find next cloned_redo items
|
||
while (left !== null) {
|
||
/**
|
||
* @type {Item|null}
|
||
*/
|
||
let leftTrace = left;
|
||
// trace redone until parent matches
|
||
while (leftTrace !== null && /** @type {AbstractType<any>} */ (leftTrace.parent)._item !== parentItem) {
|
||
leftTrace = leftTrace.redone === null ? null : getItemCleanStart(transaction, leftTrace.redone);
|
||
}
|
||
if (leftTrace !== null && /** @type {AbstractType<any>} */ (leftTrace.parent)._item === parentItem) {
|
||
left = leftTrace;
|
||
break
|
||
}
|
||
left = left.left;
|
||
}
|
||
while (right !== null) {
|
||
/**
|
||
* @type {Item|null}
|
||
*/
|
||
let rightTrace = right;
|
||
// trace redone until parent matches
|
||
while (rightTrace !== null && /** @type {AbstractType<any>} */ (rightTrace.parent)._item !== parentItem) {
|
||
rightTrace = rightTrace.redone === null ? null : getItemCleanStart(transaction, rightTrace.redone);
|
||
}
|
||
if (rightTrace !== null && /** @type {AbstractType<any>} */ (rightTrace.parent)._item === parentItem) {
|
||
right = rightTrace;
|
||
break
|
||
}
|
||
right = right.right;
|
||
}
|
||
} else {
|
||
right = null;
|
||
if (item.right && !ignoreRemoteMapChanges) {
|
||
left = item;
|
||
// Iterate right while right is in itemsToDelete
|
||
// If it is intended to delete right while item is redone, we can expect that item should replace right.
|
||
while (left !== null && left.right !== null && isDeleted(itemsToDelete, left.right.id)) {
|
||
left = left.right;
|
||
}
|
||
// follow redone
|
||
// trace redone until parent matches
|
||
while (left !== null && left.redone !== null) {
|
||
left = getItemCleanStart(transaction, left.redone);
|
||
}
|
||
if (left && left.right !== null) {
|
||
// It is not possible to redo this item because it conflicts with a
|
||
// change from another client
|
||
return null
|
||
}
|
||
} else {
|
||
left = parentType._map.get(item.parentSub) || null;
|
||
}
|
||
}
|
||
const nextClock = getState(store, ownClientID);
|
||
const nextId = createID(ownClientID, nextClock);
|
||
const redoneItem = new Item$1(
|
||
nextId,
|
||
left, left && left.lastId,
|
||
right, right && right.id,
|
||
parentType,
|
||
item.parentSub,
|
||
item.content.copy()
|
||
);
|
||
item.redone = nextId;
|
||
keepItem(redoneItem, true);
|
||
redoneItem.integrate(transaction, 0);
|
||
return redoneItem
|
||
};
|
||
|
||
/**
|
||
* Abstract class that represents any content.
|
||
*/
|
||
class Item$1 extends AbstractStruct {
|
||
/**
|
||
* @param {ID} id
|
||
* @param {Item | null} left
|
||
* @param {ID | null} origin
|
||
* @param {Item | null} right
|
||
* @param {ID | null} rightOrigin
|
||
* @param {AbstractType<any>|ID|null} parent Is a type if integrated, is null if it is possible to copy parent from left or right, is ID before integration to search for it.
|
||
* @param {string | null} parentSub
|
||
* @param {AbstractContent} content
|
||
*/
|
||
constructor (id, left, origin, right, rightOrigin, parent, parentSub, content) {
|
||
super(id, content.getLength());
|
||
/**
|
||
* The item that was originally to the left of this item.
|
||
* @type {ID | null}
|
||
*/
|
||
this.origin = origin;
|
||
/**
|
||
* The item that is currently to the left of this item.
|
||
* @type {Item | null}
|
||
*/
|
||
this.left = left;
|
||
/**
|
||
* The item that is currently to the right of this item.
|
||
* @type {Item | null}
|
||
*/
|
||
this.right = right;
|
||
/**
|
||
* The item that was originally to the right of this item.
|
||
* @type {ID | null}
|
||
*/
|
||
this.rightOrigin = rightOrigin;
|
||
/**
|
||
* @type {AbstractType<any>|ID|null}
|
||
*/
|
||
this.parent = parent;
|
||
/**
|
||
* If the parent refers to this item with some kind of key (e.g. YMap, the
|
||
* key is specified here. The key is then used to refer to the list in which
|
||
* to insert this item. If `parentSub = null` type._start is the list in
|
||
* which to insert to. Otherwise it is `parent._map`.
|
||
* @type {String | null}
|
||
*/
|
||
this.parentSub = parentSub;
|
||
/**
|
||
* If this type's effect is redone this type refers to the type that undid
|
||
* this operation.
|
||
* @type {ID | null}
|
||
*/
|
||
this.redone = null;
|
||
/**
|
||
* @type {AbstractContent}
|
||
*/
|
||
this.content = content;
|
||
/**
|
||
* bit1: keep
|
||
* bit2: countable
|
||
* bit3: deleted
|
||
* bit4: mark - mark node as fast-search-marker
|
||
* @type {number} byte
|
||
*/
|
||
this.info = this.content.isCountable() ? BIT2 : 0;
|
||
}
|
||
|
||
/**
|
||
* This is used to mark the item as an indexed fast-search marker
|
||
*
|
||
* @type {boolean}
|
||
*/
|
||
set marker (isMarked) {
|
||
if (((this.info & BIT4) > 0) !== isMarked) {
|
||
this.info ^= BIT4;
|
||
}
|
||
}
|
||
|
||
get marker () {
|
||
return (this.info & BIT4) > 0
|
||
}
|
||
|
||
/**
|
||
* If true, do not garbage collect this Item.
|
||
*/
|
||
get keep () {
|
||
return (this.info & BIT1) > 0
|
||
}
|
||
|
||
set keep (doKeep) {
|
||
if (this.keep !== doKeep) {
|
||
this.info ^= BIT1;
|
||
}
|
||
}
|
||
|
||
get countable () {
|
||
return (this.info & BIT2) > 0
|
||
}
|
||
|
||
/**
|
||
* Whether this item was deleted or not.
|
||
* @type {Boolean}
|
||
*/
|
||
get deleted () {
|
||
return (this.info & BIT3) > 0
|
||
}
|
||
|
||
set deleted (doDelete) {
|
||
if (this.deleted !== doDelete) {
|
||
this.info ^= BIT3;
|
||
}
|
||
}
|
||
|
||
markDeleted () {
|
||
this.info |= BIT3;
|
||
}
|
||
|
||
/**
|
||
* Return the creator clientID of the missing op or define missing items and return null.
|
||
*
|
||
* @param {Transaction} transaction
|
||
* @param {StructStore} store
|
||
* @return {null | number}
|
||
*/
|
||
getMissing (transaction, store) {
|
||
if (this.origin && this.origin.client !== this.id.client && this.origin.clock >= getState(store, this.origin.client)) {
|
||
return this.origin.client
|
||
}
|
||
if (this.rightOrigin && this.rightOrigin.client !== this.id.client && this.rightOrigin.clock >= getState(store, this.rightOrigin.client)) {
|
||
return this.rightOrigin.client
|
||
}
|
||
if (this.parent && this.parent.constructor === ID && this.id.client !== this.parent.client && this.parent.clock >= getState(store, this.parent.client)) {
|
||
return this.parent.client
|
||
}
|
||
|
||
// We have all missing ids, now find the items
|
||
|
||
if (this.origin) {
|
||
this.left = getItemCleanEnd(transaction, store, this.origin);
|
||
this.origin = this.left.lastId;
|
||
}
|
||
if (this.rightOrigin) {
|
||
this.right = getItemCleanStart(transaction, this.rightOrigin);
|
||
this.rightOrigin = this.right.id;
|
||
}
|
||
if ((this.left && this.left.constructor === GC) || (this.right && this.right.constructor === GC)) {
|
||
this.parent = null;
|
||
}
|
||
// only set parent if this shouldn't be garbage collected
|
||
if (!this.parent) {
|
||
if (this.left && this.left.constructor === Item$1) {
|
||
this.parent = this.left.parent;
|
||
this.parentSub = this.left.parentSub;
|
||
}
|
||
if (this.right && this.right.constructor === Item$1) {
|
||
this.parent = this.right.parent;
|
||
this.parentSub = this.right.parentSub;
|
||
}
|
||
} else if (this.parent.constructor === ID) {
|
||
const parentItem = getItem(store, this.parent);
|
||
if (parentItem.constructor === GC) {
|
||
this.parent = null;
|
||
} else {
|
||
this.parent = /** @type {ContentType} */ (parentItem.content).type;
|
||
}
|
||
}
|
||
return null
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {number} offset
|
||
*/
|
||
integrate (transaction, offset) {
|
||
if (offset > 0) {
|
||
this.id.clock += offset;
|
||
this.left = getItemCleanEnd(transaction, transaction.doc.store, createID(this.id.client, this.id.clock - 1));
|
||
this.origin = this.left.lastId;
|
||
this.content = this.content.splice(offset);
|
||
this.length -= offset;
|
||
}
|
||
|
||
if (this.parent) {
|
||
if ((!this.left && (!this.right || this.right.left !== null)) || (this.left && this.left.right !== this.right)) {
|
||
/**
|
||
* @type {Item|null}
|
||
*/
|
||
let left = this.left;
|
||
|
||
/**
|
||
* @type {Item|null}
|
||
*/
|
||
let o;
|
||
// set o to the first conflicting item
|
||
if (left !== null) {
|
||
o = left.right;
|
||
} else if (this.parentSub !== null) {
|
||
o = /** @type {AbstractType<any>} */ (this.parent)._map.get(this.parentSub) || null;
|
||
while (o !== null && o.left !== null) {
|
||
o = o.left;
|
||
}
|
||
} else {
|
||
o = /** @type {AbstractType<any>} */ (this.parent)._start;
|
||
}
|
||
// TODO: use something like DeleteSet here (a tree implementation would be best)
|
||
// @todo use global set definitions
|
||
/**
|
||
* @type {Set<Item>}
|
||
*/
|
||
const conflictingItems = new Set();
|
||
/**
|
||
* @type {Set<Item>}
|
||
*/
|
||
const itemsBeforeOrigin = new Set();
|
||
// Let c in conflictingItems, b in itemsBeforeOrigin
|
||
// ***{origin}bbbb{this}{c,b}{c,b}{o}***
|
||
// Note that conflictingItems is a subset of itemsBeforeOrigin
|
||
while (o !== null && o !== this.right) {
|
||
itemsBeforeOrigin.add(o);
|
||
conflictingItems.add(o);
|
||
if (compareIDs(this.origin, o.origin)) {
|
||
// case 1
|
||
if (o.id.client < this.id.client) {
|
||
left = o;
|
||
conflictingItems.clear();
|
||
} else if (compareIDs(this.rightOrigin, o.rightOrigin)) {
|
||
// this and o are conflicting and point to the same integration points. The id decides which item comes first.
|
||
// Since this is to the left of o, we can break here
|
||
break
|
||
} // else, o might be integrated before an item that this conflicts with. If so, we will find it in the next iterations
|
||
} else if (o.origin !== null && itemsBeforeOrigin.has(getItem(transaction.doc.store, o.origin))) { // use getItem instead of getItemCleanEnd because we don't want / need to split items.
|
||
// case 2
|
||
if (!conflictingItems.has(getItem(transaction.doc.store, o.origin))) {
|
||
left = o;
|
||
conflictingItems.clear();
|
||
}
|
||
} else {
|
||
break
|
||
}
|
||
o = o.right;
|
||
}
|
||
this.left = left;
|
||
}
|
||
// reconnect left/right + update parent map/start if necessary
|
||
if (this.left !== null) {
|
||
const right = this.left.right;
|
||
this.right = right;
|
||
this.left.right = this;
|
||
} else {
|
||
let r;
|
||
if (this.parentSub !== null) {
|
||
r = /** @type {AbstractType<any>} */ (this.parent)._map.get(this.parentSub) || null;
|
||
while (r !== null && r.left !== null) {
|
||
r = r.left;
|
||
}
|
||
} else {
|
||
r = /** @type {AbstractType<any>} */ (this.parent)._start
|
||
;/** @type {AbstractType<any>} */ (this.parent)._start = this;
|
||
}
|
||
this.right = r;
|
||
}
|
||
if (this.right !== null) {
|
||
this.right.left = this;
|
||
} else if (this.parentSub !== null) {
|
||
// set as current parent value if right === null and this is parentSub
|
||
/** @type {AbstractType<any>} */ (this.parent)._map.set(this.parentSub, this);
|
||
if (this.left !== null) {
|
||
// this is the current attribute value of parent. delete right
|
||
this.left.delete(transaction);
|
||
}
|
||
}
|
||
// adjust length of parent
|
||
if (this.parentSub === null && this.countable && !this.deleted) {
|
||
/** @type {AbstractType<any>} */ (this.parent)._length += this.length;
|
||
}
|
||
addStruct(transaction.doc.store, this);
|
||
this.content.integrate(transaction, this);
|
||
// add parent to transaction.changed
|
||
addChangedTypeToTransaction(transaction, /** @type {AbstractType<any>} */ (this.parent), this.parentSub);
|
||
if ((/** @type {AbstractType<any>} */ (this.parent)._item !== null && /** @type {AbstractType<any>} */ (this.parent)._item.deleted) || (this.parentSub !== null && this.right !== null)) {
|
||
// delete if parent is deleted or if this is not the current attribute value of parent
|
||
this.delete(transaction);
|
||
}
|
||
} else {
|
||
// parent is not defined. Integrate GC struct instead
|
||
new GC(this.id, this.length).integrate(transaction, 0);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Returns the next non-deleted item
|
||
*/
|
||
get next () {
|
||
let n = this.right;
|
||
while (n !== null && n.deleted) {
|
||
n = n.right;
|
||
}
|
||
return n
|
||
}
|
||
|
||
/**
|
||
* Returns the previous non-deleted item
|
||
*/
|
||
get prev () {
|
||
let n = this.left;
|
||
while (n !== null && n.deleted) {
|
||
n = n.left;
|
||
}
|
||
return n
|
||
}
|
||
|
||
/**
|
||
* Computes the last content address of this Item.
|
||
*/
|
||
get lastId () {
|
||
// allocating ids is pretty costly because of the amount of ids created, so we try to reuse whenever possible
|
||
return this.length === 1 ? this.id : createID(this.id.client, this.id.clock + this.length - 1)
|
||
}
|
||
|
||
/**
|
||
* Try to merge two items
|
||
*
|
||
* @param {Item} right
|
||
* @return {boolean}
|
||
*/
|
||
mergeWith (right) {
|
||
if (
|
||
this.constructor === right.constructor &&
|
||
compareIDs(right.origin, this.lastId) &&
|
||
this.right === right &&
|
||
compareIDs(this.rightOrigin, right.rightOrigin) &&
|
||
this.id.client === right.id.client &&
|
||
this.id.clock + this.length === right.id.clock &&
|
||
this.deleted === right.deleted &&
|
||
this.redone === null &&
|
||
right.redone === null &&
|
||
this.content.constructor === right.content.constructor &&
|
||
this.content.mergeWith(right.content)
|
||
) {
|
||
const searchMarker = /** @type {AbstractType<any>} */ (this.parent)._searchMarker;
|
||
if (searchMarker) {
|
||
searchMarker.forEach(marker => {
|
||
if (marker.p === right) {
|
||
// right is going to be "forgotten" so we need to update the marker
|
||
marker.p = this;
|
||
// adjust marker index
|
||
if (!this.deleted && this.countable) {
|
||
marker.index -= this.length;
|
||
}
|
||
}
|
||
});
|
||
}
|
||
if (right.keep) {
|
||
this.keep = true;
|
||
}
|
||
this.right = right.right;
|
||
if (this.right !== null) {
|
||
this.right.left = this;
|
||
}
|
||
this.length += right.length;
|
||
return true
|
||
}
|
||
return false
|
||
}
|
||
|
||
/**
|
||
* Mark this Item as deleted.
|
||
*
|
||
* @param {Transaction} transaction
|
||
*/
|
||
delete (transaction) {
|
||
if (!this.deleted) {
|
||
const parent = /** @type {AbstractType<any>} */ (this.parent);
|
||
// adjust the length of parent
|
||
if (this.countable && this.parentSub === null) {
|
||
parent._length -= this.length;
|
||
}
|
||
this.markDeleted();
|
||
addToDeleteSet(transaction.deleteSet, this.id.client, this.id.clock, this.length);
|
||
addChangedTypeToTransaction(transaction, parent, this.parentSub);
|
||
this.content.delete(transaction);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {StructStore} store
|
||
* @param {boolean} parentGCd
|
||
*/
|
||
gc (store, parentGCd) {
|
||
if (!this.deleted) {
|
||
throw unexpectedCase()
|
||
}
|
||
this.content.gc(store);
|
||
if (parentGCd) {
|
||
replaceStruct(store, this, new GC(this.id, this.length));
|
||
} else {
|
||
this.content = new ContentDeleted(this.length);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Transform the properties of this type to binary and write it to an
|
||
* BinaryEncoder.
|
||
*
|
||
* This is called when this Item is sent to a remote peer.
|
||
*
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
||
* @param {number} offset
|
||
*/
|
||
write (encoder, offset) {
|
||
const origin = offset > 0 ? createID(this.id.client, this.id.clock + offset - 1) : this.origin;
|
||
const rightOrigin = this.rightOrigin;
|
||
const parentSub = this.parentSub;
|
||
const info = (this.content.getRef() & BITS5) |
|
||
(origin === null ? 0 : BIT8) | // origin is defined
|
||
(rightOrigin === null ? 0 : BIT7) | // right origin is defined
|
||
(parentSub === null ? 0 : BIT6); // parentSub is non-null
|
||
encoder.writeInfo(info);
|
||
if (origin !== null) {
|
||
encoder.writeLeftID(origin);
|
||
}
|
||
if (rightOrigin !== null) {
|
||
encoder.writeRightID(rightOrigin);
|
||
}
|
||
if (origin === null && rightOrigin === null) {
|
||
const parent = /** @type {AbstractType<any>} */ (this.parent);
|
||
if (parent._item !== undefined) {
|
||
const parentItem = parent._item;
|
||
if (parentItem === null) {
|
||
// parent type on y._map
|
||
// find the correct key
|
||
const ykey = findRootTypeKey(parent);
|
||
encoder.writeParentInfo(true); // write parentYKey
|
||
encoder.writeString(ykey);
|
||
} else {
|
||
encoder.writeParentInfo(false); // write parent id
|
||
encoder.writeLeftID(parentItem.id);
|
||
}
|
||
} else if (parent.constructor === String) { // this edge case was added by differential updates
|
||
encoder.writeParentInfo(true); // write parentYKey
|
||
encoder.writeString(parent);
|
||
} else if (parent.constructor === ID) {
|
||
encoder.writeParentInfo(false); // write parent id
|
||
encoder.writeLeftID(parent);
|
||
} else {
|
||
unexpectedCase();
|
||
}
|
||
if (parentSub !== null) {
|
||
encoder.writeString(parentSub);
|
||
}
|
||
}
|
||
this.content.write(encoder, offset);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||
* @param {number} info
|
||
*/
|
||
const readItemContent = (decoder, info) => contentRefs[info & BITS5](decoder);
|
||
|
||
/**
|
||
* A lookup map for reading Item content.
|
||
*
|
||
* @type {Array<function(UpdateDecoderV1 | UpdateDecoderV2):AbstractContent>}
|
||
*/
|
||
const contentRefs = [
|
||
() => { unexpectedCase(); }, // GC is not ItemContent
|
||
readContentDeleted, // 1
|
||
readContentJSON, // 2
|
||
readContentBinary, // 3
|
||
readContentString, // 4
|
||
readContentEmbed, // 5
|
||
readContentFormat, // 6
|
||
readContentType, // 7
|
||
readContentAny, // 8
|
||
readContentDoc, // 9
|
||
() => { unexpectedCase(); } // 10 - Skip is not ItemContent
|
||
];
|
||
|
||
const structSkipRefNumber = 10;
|
||
|
||
/**
|
||
* @private
|
||
*/
|
||
class Skip extends AbstractStruct {
|
||
get deleted () {
|
||
return true
|
||
}
|
||
|
||
delete () {}
|
||
|
||
/**
|
||
* @param {Skip} right
|
||
* @return {boolean}
|
||
*/
|
||
mergeWith (right) {
|
||
if (this.constructor !== right.constructor) {
|
||
return false
|
||
}
|
||
this.length += right.length;
|
||
return true
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {number} offset
|
||
*/
|
||
integrate (transaction, offset) {
|
||
// skip structs cannot be integrated
|
||
unexpectedCase();
|
||
}
|
||
|
||
/**
|
||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||
* @param {number} offset
|
||
*/
|
||
write (encoder, offset) {
|
||
encoder.writeInfo(structSkipRefNumber);
|
||
// write as VarUint because Skips can't make use of predictable length-encoding
|
||
writeVarUint(encoder.restEncoder, this.length - offset);
|
||
}
|
||
|
||
/**
|
||
* @param {Transaction} transaction
|
||
* @param {StructStore} store
|
||
* @return {null | number}
|
||
*/
|
||
getMissing (transaction, store) {
|
||
return null
|
||
}
|
||
}
|
||
|
||
/** eslint-env browser */
|
||
|
||
const glo = /** @type {any} */ (typeof globalThis !== 'undefined'
|
||
? globalThis
|
||
: typeof window !== 'undefined'
|
||
? window
|
||
// @ts-ignore
|
||
: typeof global !== 'undefined' ? global : {});
|
||
|
||
const importIdentifier = '__ $YJS$ __';
|
||
|
||
if (glo[importIdentifier] === true) {
|
||
/**
|
||
* Dear reader of this message. Please take this seriously.
|
||
*
|
||
* If you see this message, make sure that you only import one version of Yjs. In many cases,
|
||
* your package manager installs two versions of Yjs that are used by different packages within your project.
|
||
* Another reason for this message is that some parts of your project use the commonjs version of Yjs
|
||
* and others use the EcmaScript version of Yjs.
|
||
*
|
||
* This often leads to issues that are hard to debug. We often need to perform constructor checks,
|
||
* e.g. `struct instanceof GC`. If you imported different versions of Yjs, it is impossible for us to
|
||
* do the constructor checks anymore - which might break the CRDT algorithm.
|
||
*
|
||
* https://github.com/yjs/yjs/issues/438
|
||
*/
|
||
console.error('Yjs was already imported. This breaks constructor checks and will lead to issues! - https://github.com/yjs/yjs/issues/438');
|
||
}
|
||
glo[importIdentifier] = true;
|
||
|
||
var domain;
|
||
|
||
// This constructor is used to store event handlers. Instantiating this is
|
||
// faster than explicitly calling `Object.create(null)` to get a "clean" empty
|
||
// object (tested with v8 v4.9).
|
||
function EventHandlers() {}
|
||
EventHandlers.prototype = Object.create(null);
|
||
|
||
function EventEmitter$1() {
|
||
EventEmitter$1.init.call(this);
|
||
}
|
||
|
||
// nodejs oddity
|
||
// require('events') === require('events').EventEmitter
|
||
EventEmitter$1.EventEmitter = EventEmitter$1;
|
||
|
||
EventEmitter$1.usingDomains = false;
|
||
|
||
EventEmitter$1.prototype.domain = undefined;
|
||
EventEmitter$1.prototype._events = undefined;
|
||
EventEmitter$1.prototype._maxListeners = undefined;
|
||
|
||
// By default EventEmitters will print a warning if more than 10 listeners are
|
||
// added to it. This is a useful default which helps finding memory leaks.
|
||
EventEmitter$1.defaultMaxListeners = 10;
|
||
|
||
EventEmitter$1.init = function() {
|
||
this.domain = null;
|
||
if (EventEmitter$1.usingDomains) {
|
||
// if there is an active domain, then attach to it.
|
||
if (domain.active ) ;
|
||
}
|
||
|
||
if (!this._events || this._events === Object.getPrototypeOf(this)._events) {
|
||
this._events = new EventHandlers();
|
||
this._eventsCount = 0;
|
||
}
|
||
|
||
this._maxListeners = this._maxListeners || undefined;
|
||
};
|
||
|
||
// Obviously not all Emitters should be limited to 10. This function allows
|
||
// that to be increased. Set to zero for unlimited.
|
||
EventEmitter$1.prototype.setMaxListeners = function setMaxListeners(n) {
|
||
if (typeof n !== 'number' || n < 0 || isNaN(n))
|
||
throw new TypeError('"n" argument must be a positive number');
|
||
this._maxListeners = n;
|
||
return this;
|
||
};
|
||
|
||
function $getMaxListeners(that) {
|
||
if (that._maxListeners === undefined)
|
||
return EventEmitter$1.defaultMaxListeners;
|
||
return that._maxListeners;
|
||
}
|
||
|
||
EventEmitter$1.prototype.getMaxListeners = function getMaxListeners() {
|
||
return $getMaxListeners(this);
|
||
};
|
||
|
||
// These standalone emit* functions are used to optimize calling of event
|
||
// handlers for fast cases because emit() itself often has a variable number of
|
||
// arguments and can be deoptimized because of that. These functions always have
|
||
// the same number of arguments and thus do not get deoptimized, so the code
|
||
// inside them can execute faster.
|
||
function emitNone(handler, isFn, self) {
|
||
if (isFn)
|
||
handler.call(self);
|
||
else {
|
||
var len = handler.length;
|
||
var listeners = arrayClone(handler, len);
|
||
for (var i = 0; i < len; ++i)
|
||
listeners[i].call(self);
|
||
}
|
||
}
|
||
function emitOne(handler, isFn, self, arg1) {
|
||
if (isFn)
|
||
handler.call(self, arg1);
|
||
else {
|
||
var len = handler.length;
|
||
var listeners = arrayClone(handler, len);
|
||
for (var i = 0; i < len; ++i)
|
||
listeners[i].call(self, arg1);
|
||
}
|
||
}
|
||
function emitTwo(handler, isFn, self, arg1, arg2) {
|
||
if (isFn)
|
||
handler.call(self, arg1, arg2);
|
||
else {
|
||
var len = handler.length;
|
||
var listeners = arrayClone(handler, len);
|
||
for (var i = 0; i < len; ++i)
|
||
listeners[i].call(self, arg1, arg2);
|
||
}
|
||
}
|
||
function emitThree(handler, isFn, self, arg1, arg2, arg3) {
|
||
if (isFn)
|
||
handler.call(self, arg1, arg2, arg3);
|
||
else {
|
||
var len = handler.length;
|
||
var listeners = arrayClone(handler, len);
|
||
for (var i = 0; i < len; ++i)
|
||
listeners[i].call(self, arg1, arg2, arg3);
|
||
}
|
||
}
|
||
|
||
function emitMany(handler, isFn, self, args) {
|
||
if (isFn)
|
||
handler.apply(self, args);
|
||
else {
|
||
var len = handler.length;
|
||
var listeners = arrayClone(handler, len);
|
||
for (var i = 0; i < len; ++i)
|
||
listeners[i].apply(self, args);
|
||
}
|
||
}
|
||
|
||
EventEmitter$1.prototype.emit = function emit(type) {
|
||
var er, handler, len, args, i, events, domain;
|
||
var doError = (type === 'error');
|
||
|
||
events = this._events;
|
||
if (events)
|
||
doError = (doError && events.error == null);
|
||
else if (!doError)
|
||
return false;
|
||
|
||
domain = this.domain;
|
||
|
||
// If there is no 'error' event listener then throw.
|
||
if (doError) {
|
||
er = arguments[1];
|
||
if (domain) {
|
||
if (!er)
|
||
er = new Error('Uncaught, unspecified "error" event');
|
||
er.domainEmitter = this;
|
||
er.domain = domain;
|
||
er.domainThrown = false;
|
||
domain.emit('error', er);
|
||
} else if (er instanceof Error) {
|
||
throw er; // Unhandled 'error' event
|
||
} else {
|
||
// At least give some kind of context to the user
|
||
var err = new Error('Uncaught, unspecified "error" event. (' + er + ')');
|
||
err.context = er;
|
||
throw err;
|
||
}
|
||
return false;
|
||
}
|
||
|
||
handler = events[type];
|
||
|
||
if (!handler)
|
||
return false;
|
||
|
||
var isFn = typeof handler === 'function';
|
||
len = arguments.length;
|
||
switch (len) {
|
||
// fast cases
|
||
case 1:
|
||
emitNone(handler, isFn, this);
|
||
break;
|
||
case 2:
|
||
emitOne(handler, isFn, this, arguments[1]);
|
||
break;
|
||
case 3:
|
||
emitTwo(handler, isFn, this, arguments[1], arguments[2]);
|
||
break;
|
||
case 4:
|
||
emitThree(handler, isFn, this, arguments[1], arguments[2], arguments[3]);
|
||
break;
|
||
// slower
|
||
default:
|
||
args = new Array(len - 1);
|
||
for (i = 1; i < len; i++)
|
||
args[i - 1] = arguments[i];
|
||
emitMany(handler, isFn, this, args);
|
||
}
|
||
|
||
return true;
|
||
};
|
||
|
||
function _addListener(target, type, listener, prepend) {
|
||
var m;
|
||
var events;
|
||
var existing;
|
||
|
||
if (typeof listener !== 'function')
|
||
throw new TypeError('"listener" argument must be a function');
|
||
|
||
events = target._events;
|
||
if (!events) {
|
||
events = target._events = new EventHandlers();
|
||
target._eventsCount = 0;
|
||
} else {
|
||
// To avoid recursion in the case that type === "newListener"! Before
|
||
// adding it to the listeners, first emit "newListener".
|
||
if (events.newListener) {
|
||
target.emit('newListener', type,
|
||
listener.listener ? listener.listener : listener);
|
||
|
||
// Re-assign `events` because a newListener handler could have caused the
|
||
// this._events to be assigned to a new object
|
||
events = target._events;
|
||
}
|
||
existing = events[type];
|
||
}
|
||
|
||
if (!existing) {
|
||
// Optimize the case of one listener. Don't need the extra array object.
|
||
existing = events[type] = listener;
|
||
++target._eventsCount;
|
||
} else {
|
||
if (typeof existing === 'function') {
|
||
// Adding the second element, need to change to array.
|
||
existing = events[type] = prepend ? [listener, existing] :
|
||
[existing, listener];
|
||
} else {
|
||
// If we've already got an array, just append.
|
||
if (prepend) {
|
||
existing.unshift(listener);
|
||
} else {
|
||
existing.push(listener);
|
||
}
|
||
}
|
||
|
||
// Check for listener leak
|
||
if (!existing.warned) {
|
||
m = $getMaxListeners(target);
|
||
if (m && m > 0 && existing.length > m) {
|
||
existing.warned = true;
|
||
var w = new Error('Possible EventEmitter memory leak detected. ' +
|
||
existing.length + ' ' + type + ' listeners added. ' +
|
||
'Use emitter.setMaxListeners() to increase limit');
|
||
w.name = 'MaxListenersExceededWarning';
|
||
w.emitter = target;
|
||
w.type = type;
|
||
w.count = existing.length;
|
||
emitWarning(w);
|
||
}
|
||
}
|
||
}
|
||
|
||
return target;
|
||
}
|
||
function emitWarning(e) {
|
||
typeof console.warn === 'function' ? console.warn(e) : console.log(e);
|
||
}
|
||
EventEmitter$1.prototype.addListener = function addListener(type, listener) {
|
||
return _addListener(this, type, listener, false);
|
||
};
|
||
|
||
EventEmitter$1.prototype.on = EventEmitter$1.prototype.addListener;
|
||
|
||
EventEmitter$1.prototype.prependListener =
|
||
function prependListener(type, listener) {
|
||
return _addListener(this, type, listener, true);
|
||
};
|
||
|
||
function _onceWrap(target, type, listener) {
|
||
var fired = false;
|
||
function g() {
|
||
target.removeListener(type, g);
|
||
if (!fired) {
|
||
fired = true;
|
||
listener.apply(target, arguments);
|
||
}
|
||
}
|
||
g.listener = listener;
|
||
return g;
|
||
}
|
||
|
||
EventEmitter$1.prototype.once = function once(type, listener) {
|
||
if (typeof listener !== 'function')
|
||
throw new TypeError('"listener" argument must be a function');
|
||
this.on(type, _onceWrap(this, type, listener));
|
||
return this;
|
||
};
|
||
|
||
EventEmitter$1.prototype.prependOnceListener =
|
||
function prependOnceListener(type, listener) {
|
||
if (typeof listener !== 'function')
|
||
throw new TypeError('"listener" argument must be a function');
|
||
this.prependListener(type, _onceWrap(this, type, listener));
|
||
return this;
|
||
};
|
||
|
||
// emits a 'removeListener' event iff the listener was removed
|
||
EventEmitter$1.prototype.removeListener =
|
||
function removeListener(type, listener) {
|
||
var list, events, position, i, originalListener;
|
||
|
||
if (typeof listener !== 'function')
|
||
throw new TypeError('"listener" argument must be a function');
|
||
|
||
events = this._events;
|
||
if (!events)
|
||
return this;
|
||
|
||
list = events[type];
|
||
if (!list)
|
||
return this;
|
||
|
||
if (list === listener || (list.listener && list.listener === listener)) {
|
||
if (--this._eventsCount === 0)
|
||
this._events = new EventHandlers();
|
||
else {
|
||
delete events[type];
|
||
if (events.removeListener)
|
||
this.emit('removeListener', type, list.listener || listener);
|
||
}
|
||
} else if (typeof list !== 'function') {
|
||
position = -1;
|
||
|
||
for (i = list.length; i-- > 0;) {
|
||
if (list[i] === listener ||
|
||
(list[i].listener && list[i].listener === listener)) {
|
||
originalListener = list[i].listener;
|
||
position = i;
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (position < 0)
|
||
return this;
|
||
|
||
if (list.length === 1) {
|
||
list[0] = undefined;
|
||
if (--this._eventsCount === 0) {
|
||
this._events = new EventHandlers();
|
||
return this;
|
||
} else {
|
||
delete events[type];
|
||
}
|
||
} else {
|
||
spliceOne(list, position);
|
||
}
|
||
|
||
if (events.removeListener)
|
||
this.emit('removeListener', type, originalListener || listener);
|
||
}
|
||
|
||
return this;
|
||
};
|
||
|
||
EventEmitter$1.prototype.removeAllListeners =
|
||
function removeAllListeners(type) {
|
||
var listeners, events;
|
||
|
||
events = this._events;
|
||
if (!events)
|
||
return this;
|
||
|
||
// not listening for removeListener, no need to emit
|
||
if (!events.removeListener) {
|
||
if (arguments.length === 0) {
|
||
this._events = new EventHandlers();
|
||
this._eventsCount = 0;
|
||
} else if (events[type]) {
|
||
if (--this._eventsCount === 0)
|
||
this._events = new EventHandlers();
|
||
else
|
||
delete events[type];
|
||
}
|
||
return this;
|
||
}
|
||
|
||
// emit removeListener for all listeners on all events
|
||
if (arguments.length === 0) {
|
||
var keys = Object.keys(events);
|
||
for (var i = 0, key; i < keys.length; ++i) {
|
||
key = keys[i];
|
||
if (key === 'removeListener') continue;
|
||
this.removeAllListeners(key);
|
||
}
|
||
this.removeAllListeners('removeListener');
|
||
this._events = new EventHandlers();
|
||
this._eventsCount = 0;
|
||
return this;
|
||
}
|
||
|
||
listeners = events[type];
|
||
|
||
if (typeof listeners === 'function') {
|
||
this.removeListener(type, listeners);
|
||
} else if (listeners) {
|
||
// LIFO order
|
||
do {
|
||
this.removeListener(type, listeners[listeners.length - 1]);
|
||
} while (listeners[0]);
|
||
}
|
||
|
||
return this;
|
||
};
|
||
|
||
EventEmitter$1.prototype.listeners = function listeners(type) {
|
||
var evlistener;
|
||
var ret;
|
||
var events = this._events;
|
||
|
||
if (!events)
|
||
ret = [];
|
||
else {
|
||
evlistener = events[type];
|
||
if (!evlistener)
|
||
ret = [];
|
||
else if (typeof evlistener === 'function')
|
||
ret = [evlistener.listener || evlistener];
|
||
else
|
||
ret = unwrapListeners(evlistener);
|
||
}
|
||
|
||
return ret;
|
||
};
|
||
|
||
EventEmitter$1.listenerCount = function(emitter, type) {
|
||
if (typeof emitter.listenerCount === 'function') {
|
||
return emitter.listenerCount(type);
|
||
} else {
|
||
return listenerCount.call(emitter, type);
|
||
}
|
||
};
|
||
|
||
EventEmitter$1.prototype.listenerCount = listenerCount;
|
||
function listenerCount(type) {
|
||
var events = this._events;
|
||
|
||
if (events) {
|
||
var evlistener = events[type];
|
||
|
||
if (typeof evlistener === 'function') {
|
||
return 1;
|
||
} else if (evlistener) {
|
||
return evlistener.length;
|
||
}
|
||
}
|
||
|
||
return 0;
|
||
}
|
||
|
||
EventEmitter$1.prototype.eventNames = function eventNames() {
|
||
return this._eventsCount > 0 ? Reflect.ownKeys(this._events) : [];
|
||
};
|
||
|
||
// About 1.5x faster than the two-arg version of Array#splice().
|
||
function spliceOne(list, index) {
|
||
for (var i = index, k = i + 1, n = list.length; k < n; i += 1, k += 1)
|
||
list[i] = list[k];
|
||
list.pop();
|
||
}
|
||
|
||
function arrayClone(arr, i) {
|
||
var copy = new Array(i);
|
||
while (i--)
|
||
copy[i] = arr[i];
|
||
return copy;
|
||
}
|
||
|
||
function unwrapListeners(arr) {
|
||
var ret = new Array(arr.length);
|
||
for (var i = 0; i < ret.length; ++i) {
|
||
ret[i] = arr[i].listener || arr[i];
|
||
}
|
||
return ret;
|
||
}
|
||
|
||
var inherits$1;
|
||
if (typeof Object.create === 'function'){
|
||
inherits$1 = function inherits(ctor, superCtor) {
|
||
// implementation from standard node.js 'util' module
|
||
ctor.super_ = superCtor;
|
||
ctor.prototype = Object.create(superCtor.prototype, {
|
||
constructor: {
|
||
value: ctor,
|
||
enumerable: false,
|
||
writable: true,
|
||
configurable: true
|
||
}
|
||
});
|
||
};
|
||
} else {
|
||
inherits$1 = function inherits(ctor, superCtor) {
|
||
ctor.super_ = superCtor;
|
||
var TempCtor = function () {};
|
||
TempCtor.prototype = superCtor.prototype;
|
||
ctor.prototype = new TempCtor();
|
||
ctor.prototype.constructor = ctor;
|
||
};
|
||
}
|
||
var inherits$2 = inherits$1;
|
||
|
||
var formatRegExp = /%[sdj%]/g;
|
||
function format(f) {
|
||
if (!isString(f)) {
|
||
var objects = [];
|
||
for (var i = 0; i < arguments.length; i++) {
|
||
objects.push(inspect$2(arguments[i]));
|
||
}
|
||
return objects.join(' ');
|
||
}
|
||
|
||
var i = 1;
|
||
var args = arguments;
|
||
var len = args.length;
|
||
var str = String(f).replace(formatRegExp, function(x) {
|
||
if (x === '%%') return '%';
|
||
if (i >= len) return x;
|
||
switch (x) {
|
||
case '%s': return String(args[i++]);
|
||
case '%d': return Number(args[i++]);
|
||
case '%j':
|
||
try {
|
||
return JSON.stringify(args[i++]);
|
||
} catch (_) {
|
||
return '[Circular]';
|
||
}
|
||
default:
|
||
return x;
|
||
}
|
||
});
|
||
for (var x = args[i]; i < len; x = args[++i]) {
|
||
if (isNull(x) || !isObject(x)) {
|
||
str += ' ' + x;
|
||
} else {
|
||
str += ' ' + inspect$2(x);
|
||
}
|
||
}
|
||
return str;
|
||
}
|
||
|
||
// Mark that a method should not be used.
|
||
// Returns a modified function which warns once by default.
|
||
// If --no-deprecation is set, then it is a no-op.
|
||
function deprecate$1(fn, msg) {
|
||
// Allow for deprecating things in the process of starting up.
|
||
if (isUndefined(global$1.process)) {
|
||
return function() {
|
||
return deprecate$1(fn, msg).apply(this, arguments);
|
||
};
|
||
}
|
||
|
||
if (process$1.noDeprecation === true) {
|
||
return fn;
|
||
}
|
||
|
||
var warned = false;
|
||
function deprecated() {
|
||
if (!warned) {
|
||
if (process$1.throwDeprecation) {
|
||
throw new Error(msg);
|
||
} else if (process$1.traceDeprecation) {
|
||
console.trace(msg);
|
||
} else {
|
||
console.error(msg);
|
||
}
|
||
warned = true;
|
||
}
|
||
return fn.apply(this, arguments);
|
||
}
|
||
|
||
return deprecated;
|
||
}
|
||
|
||
var debugs = {};
|
||
var debugEnviron;
|
||
function debuglog(set) {
|
||
if (isUndefined(debugEnviron))
|
||
debugEnviron = process$1.env.NODE_DEBUG || '';
|
||
set = set.toUpperCase();
|
||
if (!debugs[set]) {
|
||
if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) {
|
||
var pid = 0;
|
||
debugs[set] = function() {
|
||
var msg = format.apply(null, arguments);
|
||
console.error('%s %d: %s', set, pid, msg);
|
||
};
|
||
} else {
|
||
debugs[set] = function() {};
|
||
}
|
||
}
|
||
return debugs[set];
|
||
}
|
||
|
||
/**
|
||
* Echos the value of a value. Trys to print the value out
|
||
* in the best way possible given the different types.
|
||
*
|
||
* @param {Object} obj The object to print out.
|
||
* @param {Object} opts Optional options object that alters the output.
|
||
*/
|
||
/* legacy: obj, showHidden, depth, colors*/
|
||
function inspect$2(obj, opts) {
|
||
// default options
|
||
var ctx = {
|
||
seen: [],
|
||
stylize: stylizeNoColor
|
||
};
|
||
// legacy...
|
||
if (arguments.length >= 3) ctx.depth = arguments[2];
|
||
if (arguments.length >= 4) ctx.colors = arguments[3];
|
||
if (isBoolean(opts)) {
|
||
// legacy...
|
||
ctx.showHidden = opts;
|
||
} else if (opts) {
|
||
// got an "options" object
|
||
_extend(ctx, opts);
|
||
}
|
||
// set default options
|
||
if (isUndefined(ctx.showHidden)) ctx.showHidden = false;
|
||
if (isUndefined(ctx.depth)) ctx.depth = 2;
|
||
if (isUndefined(ctx.colors)) ctx.colors = false;
|
||
if (isUndefined(ctx.customInspect)) ctx.customInspect = true;
|
||
if (ctx.colors) ctx.stylize = stylizeWithColor;
|
||
return formatValue(ctx, obj, ctx.depth);
|
||
}
|
||
|
||
// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics
|
||
inspect$2.colors = {
|
||
'bold' : [1, 22],
|
||
'italic' : [3, 23],
|
||
'underline' : [4, 24],
|
||
'inverse' : [7, 27],
|
||
'white' : [37, 39],
|
||
'grey' : [90, 39],
|
||
'black' : [30, 39],
|
||
'blue' : [34, 39],
|
||
'cyan' : [36, 39],
|
||
'green' : [32, 39],
|
||
'magenta' : [35, 39],
|
||
'red' : [31, 39],
|
||
'yellow' : [33, 39]
|
||
};
|
||
|
||
// Don't use 'blue' not visible on cmd.exe
|
||
inspect$2.styles = {
|
||
'special': 'cyan',
|
||
'number': 'yellow',
|
||
'boolean': 'yellow',
|
||
'undefined': 'grey',
|
||
'null': 'bold',
|
||
'string': 'green',
|
||
'date': 'magenta',
|
||
// "name": intentionally not styling
|
||
'regexp': 'red'
|
||
};
|
||
|
||
|
||
function stylizeWithColor(str, styleType) {
|
||
var style = inspect$2.styles[styleType];
|
||
|
||
if (style) {
|
||
return '\u001b[' + inspect$2.colors[style][0] + 'm' + str +
|
||
'\u001b[' + inspect$2.colors[style][1] + 'm';
|
||
} else {
|
||
return str;
|
||
}
|
||
}
|
||
|
||
|
||
function stylizeNoColor(str, styleType) {
|
||
return str;
|
||
}
|
||
|
||
|
||
function arrayToHash(array) {
|
||
var hash = {};
|
||
|
||
array.forEach(function(val, idx) {
|
||
hash[val] = true;
|
||
});
|
||
|
||
return hash;
|
||
}
|
||
|
||
|
||
function formatValue(ctx, value, recurseTimes) {
|
||
// Provide a hook for user-specified inspect functions.
|
||
// Check that value is an object with an inspect function on it
|
||
if (ctx.customInspect &&
|
||
value &&
|
||
isFunction(value.inspect) &&
|
||
// Filter out the util module, it's inspect function is special
|
||
value.inspect !== inspect$2 &&
|
||
// Also filter out any prototype objects using the circular check.
|
||
!(value.constructor && value.constructor.prototype === value)) {
|
||
var ret = value.inspect(recurseTimes, ctx);
|
||
if (!isString(ret)) {
|
||
ret = formatValue(ctx, ret, recurseTimes);
|
||
}
|
||
return ret;
|
||
}
|
||
|
||
// Primitive types cannot have properties
|
||
var primitive = formatPrimitive(ctx, value);
|
||
if (primitive) {
|
||
return primitive;
|
||
}
|
||
|
||
// Look up the keys of the object.
|
||
var keys = Object.keys(value);
|
||
var visibleKeys = arrayToHash(keys);
|
||
|
||
if (ctx.showHidden) {
|
||
keys = Object.getOwnPropertyNames(value);
|
||
}
|
||
|
||
// IE doesn't make error fields non-enumerable
|
||
// http://msdn.microsoft.com/en-us/library/ie/dww52sbt(v=vs.94).aspx
|
||
if (isError(value)
|
||
&& (keys.indexOf('message') >= 0 || keys.indexOf('description') >= 0)) {
|
||
return formatError(value);
|
||
}
|
||
|
||
// Some type of object without properties can be shortcutted.
|
||
if (keys.length === 0) {
|
||
if (isFunction(value)) {
|
||
var name = value.name ? ': ' + value.name : '';
|
||
return ctx.stylize('[Function' + name + ']', 'special');
|
||
}
|
||
if (isRegExp(value)) {
|
||
return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp');
|
||
}
|
||
if (isDate(value)) {
|
||
return ctx.stylize(Date.prototype.toString.call(value), 'date');
|
||
}
|
||
if (isError(value)) {
|
||
return formatError(value);
|
||
}
|
||
}
|
||
|
||
var base = '', array = false, braces = ['{', '}'];
|
||
|
||
// Make Array say that they are Array
|
||
if (isArray(value)) {
|
||
array = true;
|
||
braces = ['[', ']'];
|
||
}
|
||
|
||
// Make functions say that they are functions
|
||
if (isFunction(value)) {
|
||
var n = value.name ? ': ' + value.name : '';
|
||
base = ' [Function' + n + ']';
|
||
}
|
||
|
||
// Make RegExps say that they are RegExps
|
||
if (isRegExp(value)) {
|
||
base = ' ' + RegExp.prototype.toString.call(value);
|
||
}
|
||
|
||
// Make dates with properties first say the date
|
||
if (isDate(value)) {
|
||
base = ' ' + Date.prototype.toUTCString.call(value);
|
||
}
|
||
|
||
// Make error with message first say the error
|
||
if (isError(value)) {
|
||
base = ' ' + formatError(value);
|
||
}
|
||
|
||
if (keys.length === 0 && (!array || value.length == 0)) {
|
||
return braces[0] + base + braces[1];
|
||
}
|
||
|
||
if (recurseTimes < 0) {
|
||
if (isRegExp(value)) {
|
||
return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp');
|
||
} else {
|
||
return ctx.stylize('[Object]', 'special');
|
||
}
|
||
}
|
||
|
||
ctx.seen.push(value);
|
||
|
||
var output;
|
||
if (array) {
|
||
output = formatArray(ctx, value, recurseTimes, visibleKeys, keys);
|
||
} else {
|
||
output = keys.map(function(key) {
|
||
return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array);
|
||
});
|
||
}
|
||
|
||
ctx.seen.pop();
|
||
|
||
return reduceToSingleString(output, base, braces);
|
||
}
|
||
|
||
|
||
function formatPrimitive(ctx, value) {
|
||
if (isUndefined(value))
|
||
return ctx.stylize('undefined', 'undefined');
|
||
if (isString(value)) {
|
||
var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '')
|
||
.replace(/'/g, "\\'")
|
||
.replace(/\\"/g, '"') + '\'';
|
||
return ctx.stylize(simple, 'string');
|
||
}
|
||
if (isNumber(value))
|
||
return ctx.stylize('' + value, 'number');
|
||
if (isBoolean(value))
|
||
return ctx.stylize('' + value, 'boolean');
|
||
// For some reason typeof null is "object", so special case here.
|
||
if (isNull(value))
|
||
return ctx.stylize('null', 'null');
|
||
}
|
||
|
||
|
||
function formatError(value) {
|
||
return '[' + Error.prototype.toString.call(value) + ']';
|
||
}
|
||
|
||
|
||
function formatArray(ctx, value, recurseTimes, visibleKeys, keys) {
|
||
var output = [];
|
||
for (var i = 0, l = value.length; i < l; ++i) {
|
||
if (hasOwnProperty$3(value, String(i))) {
|
||
output.push(formatProperty(ctx, value, recurseTimes, visibleKeys,
|
||
String(i), true));
|
||
} else {
|
||
output.push('');
|
||
}
|
||
}
|
||
keys.forEach(function(key) {
|
||
if (!key.match(/^\d+$/)) {
|
||
output.push(formatProperty(ctx, value, recurseTimes, visibleKeys,
|
||
key, true));
|
||
}
|
||
});
|
||
return output;
|
||
}
|
||
|
||
|
||
function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) {
|
||
var name, str, desc;
|
||
desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] };
|
||
if (desc.get) {
|
||
if (desc.set) {
|
||
str = ctx.stylize('[Getter/Setter]', 'special');
|
||
} else {
|
||
str = ctx.stylize('[Getter]', 'special');
|
||
}
|
||
} else {
|
||
if (desc.set) {
|
||
str = ctx.stylize('[Setter]', 'special');
|
||
}
|
||
}
|
||
if (!hasOwnProperty$3(visibleKeys, key)) {
|
||
name = '[' + key + ']';
|
||
}
|
||
if (!str) {
|
||
if (ctx.seen.indexOf(desc.value) < 0) {
|
||
if (isNull(recurseTimes)) {
|
||
str = formatValue(ctx, desc.value, null);
|
||
} else {
|
||
str = formatValue(ctx, desc.value, recurseTimes - 1);
|
||
}
|
||
if (str.indexOf('\n') > -1) {
|
||
if (array) {
|
||
str = str.split('\n').map(function(line) {
|
||
return ' ' + line;
|
||
}).join('\n').substr(2);
|
||
} else {
|
||
str = '\n' + str.split('\n').map(function(line) {
|
||
return ' ' + line;
|
||
}).join('\n');
|
||
}
|
||
}
|
||
} else {
|
||
str = ctx.stylize('[Circular]', 'special');
|
||
}
|
||
}
|
||
if (isUndefined(name)) {
|
||
if (array && key.match(/^\d+$/)) {
|
||
return str;
|
||
}
|
||
name = JSON.stringify('' + key);
|
||
if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) {
|
||
name = name.substr(1, name.length - 2);
|
||
name = ctx.stylize(name, 'name');
|
||
} else {
|
||
name = name.replace(/'/g, "\\'")
|
||
.replace(/\\"/g, '"')
|
||
.replace(/(^"|"$)/g, "'");
|
||
name = ctx.stylize(name, 'string');
|
||
}
|
||
}
|
||
|
||
return name + ': ' + str;
|
||
}
|
||
|
||
|
||
function reduceToSingleString(output, base, braces) {
|
||
var length = output.reduce(function(prev, cur) {
|
||
if (cur.indexOf('\n') >= 0) ;
|
||
return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1;
|
||
}, 0);
|
||
|
||
if (length > 60) {
|
||
return braces[0] +
|
||
(base === '' ? '' : base + '\n ') +
|
||
' ' +
|
||
output.join(',\n ') +
|
||
' ' +
|
||
braces[1];
|
||
}
|
||
|
||
return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1];
|
||
}
|
||
|
||
|
||
// NOTE: These type checking functions intentionally don't use `instanceof`
|
||
// because it is fragile and can be easily faked with `Object.create()`.
|
||
function isArray(ar) {
|
||
return Array.isArray(ar);
|
||
}
|
||
|
||
function isBoolean(arg) {
|
||
return typeof arg === 'boolean';
|
||
}
|
||
|
||
function isNull(arg) {
|
||
return arg === null;
|
||
}
|
||
|
||
function isNullOrUndefined(arg) {
|
||
return arg == null;
|
||
}
|
||
|
||
function isNumber(arg) {
|
||
return typeof arg === 'number';
|
||
}
|
||
|
||
function isString(arg) {
|
||
return typeof arg === 'string';
|
||
}
|
||
|
||
function isSymbol(arg) {
|
||
return typeof arg === 'symbol';
|
||
}
|
||
|
||
function isUndefined(arg) {
|
||
return arg === void 0;
|
||
}
|
||
|
||
function isRegExp(re) {
|
||
return isObject(re) && objectToString(re) === '[object RegExp]';
|
||
}
|
||
|
||
function isObject(arg) {
|
||
return typeof arg === 'object' && arg !== null;
|
||
}
|
||
|
||
function isDate(d) {
|
||
return isObject(d) && objectToString(d) === '[object Date]';
|
||
}
|
||
|
||
function isError(e) {
|
||
return isObject(e) &&
|
||
(objectToString(e) === '[object Error]' || e instanceof Error);
|
||
}
|
||
|
||
function isFunction(arg) {
|
||
return typeof arg === 'function';
|
||
}
|
||
|
||
function isPrimitive(arg) {
|
||
return arg === null ||
|
||
typeof arg === 'boolean' ||
|
||
typeof arg === 'number' ||
|
||
typeof arg === 'string' ||
|
||
typeof arg === 'symbol' || // ES6 symbol
|
||
typeof arg === 'undefined';
|
||
}
|
||
|
||
function isBuffer(maybeBuf) {
|
||
return buffer.Buffer.isBuffer(maybeBuf);
|
||
}
|
||
|
||
function objectToString(o) {
|
||
return Object.prototype.toString.call(o);
|
||
}
|
||
|
||
|
||
function pad(n) {
|
||
return n < 10 ? '0' + n.toString(10) : n.toString(10);
|
||
}
|
||
|
||
|
||
var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep',
|
||
'Oct', 'Nov', 'Dec'];
|
||
|
||
// 26 Feb 16:19:34
|
||
function timestamp() {
|
||
var d = new Date();
|
||
var time = [pad(d.getHours()),
|
||
pad(d.getMinutes()),
|
||
pad(d.getSeconds())].join(':');
|
||
return [d.getDate(), months[d.getMonth()], time].join(' ');
|
||
}
|
||
|
||
|
||
// log is just a thin wrapper to console.log that prepends a timestamp
|
||
function log() {
|
||
console.log('%s - %s', timestamp(), format.apply(null, arguments));
|
||
}
|
||
|
||
function _extend(origin, add) {
|
||
// Don't do anything if add isn't an object
|
||
if (!add || !isObject(add)) return origin;
|
||
|
||
var keys = Object.keys(add);
|
||
var i = keys.length;
|
||
while (i--) {
|
||
origin[keys[i]] = add[keys[i]];
|
||
}
|
||
return origin;
|
||
}
|
||
function hasOwnProperty$3(obj, prop) {
|
||
return Object.prototype.hasOwnProperty.call(obj, prop);
|
||
}
|
||
|
||
var require$$1 = {
|
||
inherits: inherits$2,
|
||
_extend: _extend,
|
||
log: log,
|
||
isBuffer: isBuffer,
|
||
isPrimitive: isPrimitive,
|
||
isFunction: isFunction,
|
||
isError: isError,
|
||
isDate: isDate,
|
||
isObject: isObject,
|
||
isRegExp: isRegExp,
|
||
isUndefined: isUndefined,
|
||
isSymbol: isSymbol,
|
||
isString: isString,
|
||
isNumber: isNumber,
|
||
isNullOrUndefined: isNullOrUndefined,
|
||
isNull: isNull,
|
||
isBoolean: isBoolean,
|
||
isArray: isArray,
|
||
inspect: inspect$2,
|
||
deprecate: deprecate$1,
|
||
format: format,
|
||
debuglog: debuglog
|
||
};
|
||
|
||
var immutable = extend$1;
|
||
|
||
var hasOwnProperty$2 = Object.prototype.hasOwnProperty;
|
||
|
||
function extend$1() {
|
||
var target = {};
|
||
|
||
for (var i = 0; i < arguments.length; i++) {
|
||
var source = arguments[i];
|
||
|
||
for (var key in source) {
|
||
if (hasOwnProperty$2.call(source, key)) {
|
||
target[key] = source[key];
|
||
}
|
||
}
|
||
}
|
||
|
||
return target
|
||
}
|
||
|
||
var mutable = extend;
|
||
|
||
var hasOwnProperty$1 = Object.prototype.hasOwnProperty;
|
||
|
||
function extend(target) {
|
||
for (var i = 1; i < arguments.length; i++) {
|
||
var source = arguments[i];
|
||
|
||
for (var key in source) {
|
||
if (hasOwnProperty$1.call(source, key)) {
|
||
target[key] = source[key];
|
||
}
|
||
}
|
||
}
|
||
|
||
return target
|
||
}
|
||
|
||
// For (old) browser support
|
||
|
||
|
||
|
||
var levelSupports = function supports () {
|
||
var manifest = immutable.apply(null, arguments);
|
||
|
||
return mutable(manifest, {
|
||
// Features of abstract-leveldown
|
||
bufferKeys: manifest.bufferKeys || false,
|
||
snapshots: manifest.snapshots || false,
|
||
permanence: manifest.permanence || false,
|
||
seek: manifest.seek || false,
|
||
clear: manifest.clear || false,
|
||
|
||
// Features of abstract-leveldown that levelup doesn't have
|
||
status: manifest.status || false,
|
||
|
||
// Features of disk-based implementations
|
||
createIfMissing: manifest.createIfMissing || false,
|
||
errorIfExists: manifest.errorIfExists || false,
|
||
|
||
// Features of level(up) that abstract-leveldown doesn't have yet
|
||
deferredOpen: manifest.deferredOpen || false,
|
||
openCallback: manifest.openCallback || false,
|
||
promises: manifest.promises || false,
|
||
streams: manifest.streams || false,
|
||
encodings: manifest.encodings || false,
|
||
|
||
// Methods that are not part of abstract-leveldown or levelup
|
||
additionalMethods: immutable(manifest.additionalMethods)
|
||
})
|
||
};
|
||
|
||
var _nodeResolve_empty = {};
|
||
|
||
var _nodeResolve_empty$1 = /*#__PURE__*/Object.freeze({
|
||
__proto__: null,
|
||
'default': _nodeResolve_empty
|
||
});
|
||
|
||
var test$4 = function () {
|
||
return typeof commonjsGlobal.queueMicrotask === 'function';
|
||
};
|
||
|
||
var install$4 = function (func) {
|
||
return function () {
|
||
commonjsGlobal.queueMicrotask(func);
|
||
};
|
||
};
|
||
|
||
var queueMicrotask = {
|
||
test: test$4,
|
||
install: install$4
|
||
};
|
||
|
||
//based off rsvp https://github.com/tildeio/rsvp.js
|
||
//license https://github.com/tildeio/rsvp.js/blob/master/LICENSE
|
||
//https://github.com/tildeio/rsvp.js/blob/master/lib/rsvp/asap.js
|
||
|
||
var Mutation = commonjsGlobal.MutationObserver || commonjsGlobal.WebKitMutationObserver;
|
||
|
||
var test$3 = function () {
|
||
return Mutation;
|
||
};
|
||
|
||
var install$3 = function (handle) {
|
||
var called = 0;
|
||
var observer = new Mutation(handle);
|
||
var element = commonjsGlobal.document.createTextNode('');
|
||
observer.observe(element, {
|
||
characterData: true
|
||
});
|
||
return function () {
|
||
element.data = (called = ++called % 2);
|
||
};
|
||
};
|
||
|
||
var mutation = {
|
||
test: test$3,
|
||
install: install$3
|
||
};
|
||
|
||
var test$2 = function () {
|
||
if (commonjsGlobal.setImmediate) {
|
||
// we can only get here in IE10
|
||
// which doesn't handel postMessage well
|
||
return false;
|
||
}
|
||
return typeof commonjsGlobal.MessageChannel !== 'undefined';
|
||
};
|
||
|
||
var install$2 = function (func) {
|
||
var channel = new commonjsGlobal.MessageChannel();
|
||
channel.port1.onmessage = func;
|
||
return function () {
|
||
channel.port2.postMessage(0);
|
||
};
|
||
};
|
||
|
||
var messageChannel = {
|
||
test: test$2,
|
||
install: install$2
|
||
};
|
||
|
||
var test$1 = function () {
|
||
return 'document' in commonjsGlobal && 'onreadystatechange' in commonjsGlobal.document.createElement('script');
|
||
};
|
||
|
||
var install$1 = function (handle) {
|
||
return function () {
|
||
|
||
// Create a <script> element; its readystatechange event will be fired asynchronously once it is inserted
|
||
// into the document. Do so, thus queuing up the task. Remember to clean up once it's been called.
|
||
var scriptEl = commonjsGlobal.document.createElement('script');
|
||
scriptEl.onreadystatechange = function () {
|
||
handle();
|
||
|
||
scriptEl.onreadystatechange = null;
|
||
scriptEl.parentNode.removeChild(scriptEl);
|
||
scriptEl = null;
|
||
};
|
||
commonjsGlobal.document.documentElement.appendChild(scriptEl);
|
||
|
||
return handle;
|
||
};
|
||
};
|
||
|
||
var stateChange = {
|
||
test: test$1,
|
||
install: install$1
|
||
};
|
||
|
||
var test = function () {
|
||
return true;
|
||
};
|
||
|
||
var install = function (t) {
|
||
return function () {
|
||
setTimeout(t, 0);
|
||
};
|
||
};
|
||
|
||
var timeout = {
|
||
test: test,
|
||
install: install
|
||
};
|
||
|
||
var debugUtil = getCjsExportFromNamespace(_nodeResolve_empty$1);
|
||
|
||
var types = [
|
||
debugUtil,
|
||
queueMicrotask,
|
||
mutation,
|
||
messageChannel,
|
||
stateChange,
|
||
timeout
|
||
];
|
||
var draining;
|
||
var currentQueue;
|
||
var queueIndex = -1;
|
||
var queue = [];
|
||
var scheduled = false;
|
||
function cleanUpNextTick() {
|
||
if (!draining || !currentQueue) {
|
||
return;
|
||
}
|
||
draining = false;
|
||
if (currentQueue.length) {
|
||
queue = currentQueue.concat(queue);
|
||
} else {
|
||
queueIndex = -1;
|
||
}
|
||
if (queue.length) {
|
||
nextTick();
|
||
}
|
||
}
|
||
|
||
//named nextTick for less confusing stack traces
|
||
function nextTick() {
|
||
if (draining) {
|
||
return;
|
||
}
|
||
scheduled = false;
|
||
draining = true;
|
||
var len = queue.length;
|
||
var timeout = setTimeout(cleanUpNextTick);
|
||
while (len) {
|
||
currentQueue = queue;
|
||
queue = [];
|
||
while (currentQueue && ++queueIndex < len) {
|
||
currentQueue[queueIndex].run();
|
||
}
|
||
queueIndex = -1;
|
||
len = queue.length;
|
||
}
|
||
currentQueue = null;
|
||
queueIndex = -1;
|
||
draining = false;
|
||
clearTimeout(timeout);
|
||
}
|
||
var scheduleDrain;
|
||
var i = -1;
|
||
var len = types.length;
|
||
while (++i < len) {
|
||
if (types[i] && types[i].test && types[i].test()) {
|
||
scheduleDrain = types[i].install(nextTick);
|
||
break;
|
||
}
|
||
}
|
||
// v8 likes predictible objects
|
||
function Item(fun, array) {
|
||
this.fun = fun;
|
||
this.array = array;
|
||
}
|
||
Item.prototype.run = function () {
|
||
var fun = this.fun;
|
||
var array = this.array;
|
||
switch (array.length) {
|
||
case 0:
|
||
return fun();
|
||
case 1:
|
||
return fun(array[0]);
|
||
case 2:
|
||
return fun(array[0], array[1]);
|
||
case 3:
|
||
return fun(array[0], array[1], array[2]);
|
||
default:
|
||
return fun.apply(null, array);
|
||
}
|
||
|
||
};
|
||
var lib = immediate;
|
||
function immediate(task) {
|
||
var args = new Array(arguments.length - 1);
|
||
if (arguments.length > 1) {
|
||
for (var i = 1; i < arguments.length; i++) {
|
||
args[i - 1] = arguments[i];
|
||
}
|
||
}
|
||
queue.push(new Item(task, args));
|
||
if (!scheduled && !draining) {
|
||
scheduled = true;
|
||
scheduleDrain();
|
||
}
|
||
}
|
||
|
||
var nextTickBrowser = lib;
|
||
|
||
function AbstractIterator$4 (db) {
|
||
if (typeof db !== 'object' || db === null) {
|
||
throw new TypeError('First argument must be an abstract-leveldown compliant store')
|
||
}
|
||
|
||
this.db = db;
|
||
this._ended = false;
|
||
this._nexting = false;
|
||
}
|
||
|
||
AbstractIterator$4.prototype.next = function (callback) {
|
||
var self = this;
|
||
|
||
if (typeof callback !== 'function') {
|
||
throw new Error('next() requires a callback argument')
|
||
}
|
||
|
||
if (self._ended) {
|
||
nextTickBrowser(callback, new Error('cannot call next() after end()'));
|
||
return self
|
||
}
|
||
|
||
if (self._nexting) {
|
||
nextTickBrowser(callback, new Error('cannot call next() before previous next() has completed'));
|
||
return self
|
||
}
|
||
|
||
self._nexting = true;
|
||
self._next(function () {
|
||
self._nexting = false;
|
||
callback.apply(null, arguments);
|
||
});
|
||
|
||
return self
|
||
};
|
||
|
||
AbstractIterator$4.prototype._next = function (callback) {
|
||
nextTickBrowser(callback);
|
||
};
|
||
|
||
AbstractIterator$4.prototype.seek = function (target) {
|
||
if (this._ended) {
|
||
throw new Error('cannot call seek() after end()')
|
||
}
|
||
if (this._nexting) {
|
||
throw new Error('cannot call seek() before next() has completed')
|
||
}
|
||
|
||
target = this.db._serializeKey(target);
|
||
this._seek(target);
|
||
};
|
||
|
||
AbstractIterator$4.prototype._seek = function (target) {};
|
||
|
||
AbstractIterator$4.prototype.end = function (callback) {
|
||
if (typeof callback !== 'function') {
|
||
throw new Error('end() requires a callback argument')
|
||
}
|
||
|
||
if (this._ended) {
|
||
return nextTickBrowser(callback, new Error('end() already called on iterator'))
|
||
}
|
||
|
||
this._ended = true;
|
||
this._end(callback);
|
||
};
|
||
|
||
AbstractIterator$4.prototype._end = function (callback) {
|
||
nextTickBrowser(callback);
|
||
};
|
||
|
||
// Expose browser-compatible nextTick for dependents
|
||
AbstractIterator$4.prototype._nextTick = nextTickBrowser;
|
||
|
||
var abstractIterator = AbstractIterator$4;
|
||
|
||
function AbstractChainedBatch$2 (db) {
|
||
if (typeof db !== 'object' || db === null) {
|
||
throw new TypeError('First argument must be an abstract-leveldown compliant store')
|
||
}
|
||
|
||
this.db = db;
|
||
this._operations = [];
|
||
this._written = false;
|
||
}
|
||
|
||
AbstractChainedBatch$2.prototype._checkWritten = function () {
|
||
if (this._written) {
|
||
throw new Error('write() already called on this batch')
|
||
}
|
||
};
|
||
|
||
AbstractChainedBatch$2.prototype.put = function (key, value) {
|
||
this._checkWritten();
|
||
|
||
var err = this.db._checkKey(key) || this.db._checkValue(value);
|
||
if (err) throw err
|
||
|
||
key = this.db._serializeKey(key);
|
||
value = this.db._serializeValue(value);
|
||
|
||
this._put(key, value);
|
||
|
||
return this
|
||
};
|
||
|
||
AbstractChainedBatch$2.prototype._put = function (key, value) {
|
||
this._operations.push({ type: 'put', key: key, value: value });
|
||
};
|
||
|
||
AbstractChainedBatch$2.prototype.del = function (key) {
|
||
this._checkWritten();
|
||
|
||
var err = this.db._checkKey(key);
|
||
if (err) throw err
|
||
|
||
key = this.db._serializeKey(key);
|
||
this._del(key);
|
||
|
||
return this
|
||
};
|
||
|
||
AbstractChainedBatch$2.prototype._del = function (key) {
|
||
this._operations.push({ type: 'del', key: key });
|
||
};
|
||
|
||
AbstractChainedBatch$2.prototype.clear = function () {
|
||
this._checkWritten();
|
||
this._clear();
|
||
|
||
return this
|
||
};
|
||
|
||
AbstractChainedBatch$2.prototype._clear = function () {
|
||
this._operations = [];
|
||
};
|
||
|
||
AbstractChainedBatch$2.prototype.write = function (options, callback) {
|
||
this._checkWritten();
|
||
|
||
if (typeof options === 'function') { callback = options; }
|
||
if (typeof callback !== 'function') {
|
||
throw new Error('write() requires a callback argument')
|
||
}
|
||
if (typeof options !== 'object' || options === null) {
|
||
options = {};
|
||
}
|
||
|
||
this._written = true;
|
||
this._write(options, callback);
|
||
};
|
||
|
||
AbstractChainedBatch$2.prototype._write = function (options, callback) {
|
||
this.db._batch(this._operations, options, callback);
|
||
};
|
||
|
||
// Expose browser-compatible nextTick for dependents
|
||
AbstractChainedBatch$2.prototype._nextTick = nextTickBrowser;
|
||
|
||
var abstractChainedBatch = AbstractChainedBatch$2;
|
||
|
||
var Buffer$6 = buffer.Buffer;
|
||
|
||
|
||
|
||
var hasOwnProperty = Object.prototype.hasOwnProperty;
|
||
var rangeOptions = 'start end gt gte lt lte'.split(' ');
|
||
|
||
function AbstractLevelDOWN$4 (manifest) {
|
||
this.status = 'new';
|
||
|
||
// TODO (next major): make this mandatory
|
||
this.supports = levelSupports(manifest, {
|
||
status: true
|
||
});
|
||
}
|
||
|
||
AbstractLevelDOWN$4.prototype.open = function (options, callback) {
|
||
var self = this;
|
||
var oldStatus = this.status;
|
||
|
||
if (typeof options === 'function') callback = options;
|
||
|
||
if (typeof callback !== 'function') {
|
||
throw new Error('open() requires a callback argument')
|
||
}
|
||
|
||
if (typeof options !== 'object' || options === null) options = {};
|
||
|
||
options.createIfMissing = options.createIfMissing !== false;
|
||
options.errorIfExists = !!options.errorIfExists;
|
||
|
||
this.status = 'opening';
|
||
this._open(options, function (err) {
|
||
if (err) {
|
||
self.status = oldStatus;
|
||
return callback(err)
|
||
}
|
||
self.status = 'open';
|
||
callback();
|
||
});
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype._open = function (options, callback) {
|
||
nextTickBrowser(callback);
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype.close = function (callback) {
|
||
var self = this;
|
||
var oldStatus = this.status;
|
||
|
||
if (typeof callback !== 'function') {
|
||
throw new Error('close() requires a callback argument')
|
||
}
|
||
|
||
this.status = 'closing';
|
||
this._close(function (err) {
|
||
if (err) {
|
||
self.status = oldStatus;
|
||
return callback(err)
|
||
}
|
||
self.status = 'closed';
|
||
callback();
|
||
});
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype._close = function (callback) {
|
||
nextTickBrowser(callback);
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype.get = function (key, options, callback) {
|
||
if (typeof options === 'function') callback = options;
|
||
|
||
if (typeof callback !== 'function') {
|
||
throw new Error('get() requires a callback argument')
|
||
}
|
||
|
||
var err = this._checkKey(key);
|
||
if (err) return nextTickBrowser(callback, err)
|
||
|
||
key = this._serializeKey(key);
|
||
|
||
if (typeof options !== 'object' || options === null) options = {};
|
||
|
||
options.asBuffer = options.asBuffer !== false;
|
||
|
||
this._get(key, options, callback);
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype._get = function (key, options, callback) {
|
||
nextTickBrowser(function () { callback(new Error('NotFound')); });
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype.put = function (key, value, options, callback) {
|
||
if (typeof options === 'function') callback = options;
|
||
|
||
if (typeof callback !== 'function') {
|
||
throw new Error('put() requires a callback argument')
|
||
}
|
||
|
||
var err = this._checkKey(key) || this._checkValue(value);
|
||
if (err) return nextTickBrowser(callback, err)
|
||
|
||
key = this._serializeKey(key);
|
||
value = this._serializeValue(value);
|
||
|
||
if (typeof options !== 'object' || options === null) options = {};
|
||
|
||
this._put(key, value, options, callback);
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype._put = function (key, value, options, callback) {
|
||
nextTickBrowser(callback);
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype.del = function (key, options, callback) {
|
||
if (typeof options === 'function') callback = options;
|
||
|
||
if (typeof callback !== 'function') {
|
||
throw new Error('del() requires a callback argument')
|
||
}
|
||
|
||
var err = this._checkKey(key);
|
||
if (err) return nextTickBrowser(callback, err)
|
||
|
||
key = this._serializeKey(key);
|
||
|
||
if (typeof options !== 'object' || options === null) options = {};
|
||
|
||
this._del(key, options, callback);
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype._del = function (key, options, callback) {
|
||
nextTickBrowser(callback);
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype.batch = function (array, options, callback) {
|
||
if (!arguments.length) return this._chainedBatch()
|
||
|
||
if (typeof options === 'function') callback = options;
|
||
|
||
if (typeof array === 'function') callback = array;
|
||
|
||
if (typeof callback !== 'function') {
|
||
throw new Error('batch(array) requires a callback argument')
|
||
}
|
||
|
||
if (!Array.isArray(array)) {
|
||
return nextTickBrowser(callback, new Error('batch(array) requires an array argument'))
|
||
}
|
||
|
||
if (array.length === 0) {
|
||
return nextTickBrowser(callback)
|
||
}
|
||
|
||
if (typeof options !== 'object' || options === null) options = {};
|
||
|
||
var serialized = new Array(array.length);
|
||
|
||
for (var i = 0; i < array.length; i++) {
|
||
if (typeof array[i] !== 'object' || array[i] === null) {
|
||
return nextTickBrowser(callback, new Error('batch(array) element must be an object and not `null`'))
|
||
}
|
||
|
||
var e = immutable(array[i]);
|
||
|
||
if (e.type !== 'put' && e.type !== 'del') {
|
||
return nextTickBrowser(callback, new Error("`type` must be 'put' or 'del'"))
|
||
}
|
||
|
||
var err = this._checkKey(e.key);
|
||
if (err) return nextTickBrowser(callback, err)
|
||
|
||
e.key = this._serializeKey(e.key);
|
||
|
||
if (e.type === 'put') {
|
||
var valueErr = this._checkValue(e.value);
|
||
if (valueErr) return nextTickBrowser(callback, valueErr)
|
||
|
||
e.value = this._serializeValue(e.value);
|
||
}
|
||
|
||
serialized[i] = e;
|
||
}
|
||
|
||
this._batch(serialized, options, callback);
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype._batch = function (array, options, callback) {
|
||
nextTickBrowser(callback);
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype.clear = function (options, callback) {
|
||
if (typeof options === 'function') {
|
||
callback = options;
|
||
} else if (typeof callback !== 'function') {
|
||
throw new Error('clear() requires a callback argument')
|
||
}
|
||
|
||
options = cleanRangeOptions(this, options);
|
||
options.reverse = !!options.reverse;
|
||
options.limit = 'limit' in options ? options.limit : -1;
|
||
|
||
this._clear(options, callback);
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype._clear = function (options, callback) {
|
||
// Avoid setupIteratorOptions, would serialize range options a second time.
|
||
options.keys = true;
|
||
options.values = false;
|
||
options.keyAsBuffer = true;
|
||
options.valueAsBuffer = true;
|
||
|
||
var iterator = this._iterator(options);
|
||
var emptyOptions = {};
|
||
var self = this;
|
||
|
||
var next = function (err) {
|
||
if (err) {
|
||
return iterator.end(function () {
|
||
callback(err);
|
||
})
|
||
}
|
||
|
||
iterator.next(function (err, key) {
|
||
if (err) return next(err)
|
||
if (key === undefined) return iterator.end(callback)
|
||
|
||
// This could be optimized by using a batch, but the default _clear
|
||
// is not meant to be fast. Implementations have more room to optimize
|
||
// if they override _clear. Note: using _del bypasses key serialization.
|
||
self._del(key, emptyOptions, next);
|
||
});
|
||
};
|
||
|
||
next();
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype._setupIteratorOptions = function (options) {
|
||
options = cleanRangeOptions(this, options);
|
||
|
||
options.reverse = !!options.reverse;
|
||
options.keys = options.keys !== false;
|
||
options.values = options.values !== false;
|
||
options.limit = 'limit' in options ? options.limit : -1;
|
||
options.keyAsBuffer = options.keyAsBuffer !== false;
|
||
options.valueAsBuffer = options.valueAsBuffer !== false;
|
||
|
||
return options
|
||
};
|
||
|
||
function cleanRangeOptions (db, options) {
|
||
var result = {};
|
||
|
||
for (var k in options) {
|
||
if (!hasOwnProperty.call(options, k)) continue
|
||
|
||
var opt = options[k];
|
||
|
||
if (isRangeOption(k)) {
|
||
// Note that we don't reject nullish and empty options here. While
|
||
// those types are invalid as keys, they are valid as range options.
|
||
opt = db._serializeKey(opt);
|
||
}
|
||
|
||
result[k] = opt;
|
||
}
|
||
|
||
return result
|
||
}
|
||
|
||
function isRangeOption (k) {
|
||
return rangeOptions.indexOf(k) !== -1
|
||
}
|
||
|
||
AbstractLevelDOWN$4.prototype.iterator = function (options) {
|
||
if (typeof options !== 'object' || options === null) options = {};
|
||
options = this._setupIteratorOptions(options);
|
||
return this._iterator(options)
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype._iterator = function (options) {
|
||
return new abstractIterator(this)
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype._chainedBatch = function () {
|
||
return new abstractChainedBatch(this)
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype._serializeKey = function (key) {
|
||
return key
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype._serializeValue = function (value) {
|
||
return value
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype._checkKey = function (key) {
|
||
if (key === null || key === undefined) {
|
||
return new Error('key cannot be `null` or `undefined`')
|
||
} else if (Buffer$6.isBuffer(key) && key.length === 0) {
|
||
return new Error('key cannot be an empty Buffer')
|
||
} else if (key === '') {
|
||
return new Error('key cannot be an empty String')
|
||
} else if (Array.isArray(key) && key.length === 0) {
|
||
return new Error('key cannot be an empty Array')
|
||
}
|
||
};
|
||
|
||
AbstractLevelDOWN$4.prototype._checkValue = function (value) {
|
||
if (value === null || value === undefined) {
|
||
return new Error('value cannot be `null` or `undefined`')
|
||
}
|
||
};
|
||
|
||
// Expose browser-compatible nextTick for dependents
|
||
AbstractLevelDOWN$4.prototype._nextTick = nextTickBrowser;
|
||
|
||
var abstractLeveldown$1 = AbstractLevelDOWN$4;
|
||
|
||
var AbstractLevelDOWN$3 = abstractLeveldown$1;
|
||
var AbstractIterator$3 = abstractIterator;
|
||
var AbstractChainedBatch$1 = abstractChainedBatch;
|
||
|
||
var abstractLeveldown = {
|
||
AbstractLevelDOWN: AbstractLevelDOWN$3,
|
||
AbstractIterator: AbstractIterator$3,
|
||
AbstractChainedBatch: AbstractChainedBatch$1
|
||
};
|
||
|
||
var inherits_browser = createCommonjsModule(function (module) {
|
||
if (typeof Object.create === 'function') {
|
||
// implementation from standard node.js 'util' module
|
||
module.exports = function inherits(ctor, superCtor) {
|
||
if (superCtor) {
|
||
ctor.super_ = superCtor;
|
||
ctor.prototype = Object.create(superCtor.prototype, {
|
||
constructor: {
|
||
value: ctor,
|
||
enumerable: false,
|
||
writable: true,
|
||
configurable: true
|
||
}
|
||
});
|
||
}
|
||
};
|
||
} else {
|
||
// old school shim for old browsers
|
||
module.exports = function inherits(ctor, superCtor) {
|
||
if (superCtor) {
|
||
ctor.super_ = superCtor;
|
||
var TempCtor = function () {};
|
||
TempCtor.prototype = superCtor.prototype;
|
||
ctor.prototype = new TempCtor();
|
||
ctor.prototype.constructor = ctor;
|
||
}
|
||
};
|
||
}
|
||
});
|
||
|
||
var AbstractIterator$2 = abstractLeveldown.AbstractIterator;
|
||
|
||
|
||
function DeferredIterator (db, options) {
|
||
AbstractIterator$2.call(this, db);
|
||
|
||
this._options = options;
|
||
this._iterator = null;
|
||
this._operations = [];
|
||
}
|
||
|
||
inherits_browser(DeferredIterator, AbstractIterator$2);
|
||
|
||
DeferredIterator.prototype.setDb = function (db) {
|
||
var it = this._iterator = db.iterator(this._options);
|
||
this._operations.forEach(function (op) {
|
||
it[op.method].apply(it, op.args);
|
||
});
|
||
};
|
||
|
||
DeferredIterator.prototype._operation = function (method, args) {
|
||
if (this._iterator) return this._iterator[method].apply(this._iterator, args)
|
||
this._operations.push({ method: method, args: args });
|
||
};
|
||
|
||
'next end'.split(' ').forEach(function (m) {
|
||
DeferredIterator.prototype['_' + m] = function () {
|
||
this._operation(m, arguments);
|
||
};
|
||
});
|
||
|
||
// Must defer seek() rather than _seek() because it requires db._serializeKey to be available
|
||
DeferredIterator.prototype.seek = function () {
|
||
this._operation('seek', arguments);
|
||
};
|
||
|
||
var deferredIterator = DeferredIterator;
|
||
|
||
var AbstractLevelDOWN$2 = abstractLeveldown.AbstractLevelDOWN;
|
||
|
||
|
||
var deferrables = 'put get del batch clear'.split(' ');
|
||
var optionalDeferrables = 'approximateSize compactRange'.split(' ');
|
||
|
||
function DeferredLevelDOWN (db) {
|
||
AbstractLevelDOWN$2.call(this, db.supports || {});
|
||
|
||
// TODO (future major): remove this fallback; db must have manifest that
|
||
// declares approximateSize and compactRange in additionalMethods.
|
||
optionalDeferrables.forEach(function (m) {
|
||
if (typeof db[m] === 'function' && !this.supports.additionalMethods[m]) {
|
||
this.supports.additionalMethods[m] = true;
|
||
}
|
||
}, this);
|
||
|
||
this._db = db;
|
||
this._operations = [];
|
||
closed(this);
|
||
}
|
||
|
||
inherits_browser(DeferredLevelDOWN, AbstractLevelDOWN$2);
|
||
|
||
DeferredLevelDOWN.prototype.type = 'deferred-leveldown';
|
||
|
||
DeferredLevelDOWN.prototype._open = function (options, callback) {
|
||
var self = this;
|
||
|
||
this._db.open(options, function (err) {
|
||
if (err) return callback(err)
|
||
|
||
self._operations.forEach(function (op) {
|
||
if (op.iterator) {
|
||
op.iterator.setDb(self._db);
|
||
} else {
|
||
self._db[op.method].apply(self._db, op.args);
|
||
}
|
||
});
|
||
self._operations = [];
|
||
|
||
open(self);
|
||
callback();
|
||
});
|
||
};
|
||
|
||
DeferredLevelDOWN.prototype._close = function (callback) {
|
||
var self = this;
|
||
|
||
this._db.close(function (err) {
|
||
if (err) return callback(err)
|
||
closed(self);
|
||
callback();
|
||
});
|
||
};
|
||
|
||
function open (self) {
|
||
deferrables.concat('iterator').forEach(function (m) {
|
||
self['_' + m] = function () {
|
||
return this._db[m].apply(this._db, arguments)
|
||
};
|
||
});
|
||
Object.keys(self.supports.additionalMethods).forEach(function (m) {
|
||
self[m] = function () {
|
||
return this._db[m].apply(this._db, arguments)
|
||
};
|
||
});
|
||
}
|
||
|
||
function closed (self) {
|
||
deferrables.forEach(function (m) {
|
||
self['_' + m] = function () {
|
||
this._operations.push({ method: m, args: arguments });
|
||
};
|
||
});
|
||
Object.keys(self.supports.additionalMethods).forEach(function (m) {
|
||
self[m] = function () {
|
||
this._operations.push({ method: m, args: arguments });
|
||
};
|
||
});
|
||
self._iterator = function (options) {
|
||
var it = new deferredIterator(self, options);
|
||
this._operations.push({ iterator: it });
|
||
return it
|
||
};
|
||
}
|
||
|
||
DeferredLevelDOWN.prototype._serializeKey = function (key) {
|
||
return key
|
||
};
|
||
|
||
DeferredLevelDOWN.prototype._serializeValue = function (value) {
|
||
return value
|
||
};
|
||
|
||
var deferredLeveldown = DeferredLevelDOWN;
|
||
var DeferredIterator_1 = deferredIterator;
|
||
deferredLeveldown.DeferredIterator = DeferredIterator_1;
|
||
|
||
var streamBrowser = EventEmitter$1.EventEmitter;
|
||
|
||
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
|
||
|
||
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty$1(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
|
||
|
||
function _defineProperty$1(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
||
|
||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
||
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
var Buffer$5 = buffer.Buffer;
|
||
|
||
var inspect$1 = debugUtil.inspect;
|
||
|
||
var custom$1 = inspect$1 && inspect$1.custom || 'inspect';
|
||
|
||
function copyBuffer(src, target, offset) {
|
||
Buffer$5.prototype.copy.call(src, target, offset);
|
||
}
|
||
|
||
var buffer_list =
|
||
/*#__PURE__*/
|
||
function () {
|
||
function BufferList() {
|
||
_classCallCheck(this, BufferList);
|
||
|
||
this.head = null;
|
||
this.tail = null;
|
||
this.length = 0;
|
||
}
|
||
|
||
_createClass(BufferList, [{
|
||
key: "push",
|
||
value: function push(v) {
|
||
var entry = {
|
||
data: v,
|
||
next: null
|
||
};
|
||
if (this.length > 0) this.tail.next = entry;else this.head = entry;
|
||
this.tail = entry;
|
||
++this.length;
|
||
}
|
||
}, {
|
||
key: "unshift",
|
||
value: function unshift(v) {
|
||
var entry = {
|
||
data: v,
|
||
next: this.head
|
||
};
|
||
if (this.length === 0) this.tail = entry;
|
||
this.head = entry;
|
||
++this.length;
|
||
}
|
||
}, {
|
||
key: "shift",
|
||
value: function shift() {
|
||
if (this.length === 0) return;
|
||
var ret = this.head.data;
|
||
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
|
||
--this.length;
|
||
return ret;
|
||
}
|
||
}, {
|
||
key: "clear",
|
||
value: function clear() {
|
||
this.head = this.tail = null;
|
||
this.length = 0;
|
||
}
|
||
}, {
|
||
key: "join",
|
||
value: function join(s) {
|
||
if (this.length === 0) return '';
|
||
var p = this.head;
|
||
var ret = '' + p.data;
|
||
|
||
while (p = p.next) {
|
||
ret += s + p.data;
|
||
}
|
||
|
||
return ret;
|
||
}
|
||
}, {
|
||
key: "concat",
|
||
value: function concat(n) {
|
||
if (this.length === 0) return Buffer$5.alloc(0);
|
||
var ret = Buffer$5.allocUnsafe(n >>> 0);
|
||
var p = this.head;
|
||
var i = 0;
|
||
|
||
while (p) {
|
||
copyBuffer(p.data, ret, i);
|
||
i += p.data.length;
|
||
p = p.next;
|
||
}
|
||
|
||
return ret;
|
||
} // Consumes a specified amount of bytes or characters from the buffered data.
|
||
|
||
}, {
|
||
key: "consume",
|
||
value: function consume(n, hasStrings) {
|
||
var ret;
|
||
|
||
if (n < this.head.data.length) {
|
||
// `slice` is the same for buffers and strings.
|
||
ret = this.head.data.slice(0, n);
|
||
this.head.data = this.head.data.slice(n);
|
||
} else if (n === this.head.data.length) {
|
||
// First chunk is a perfect match.
|
||
ret = this.shift();
|
||
} else {
|
||
// Result spans more than one buffer.
|
||
ret = hasStrings ? this._getString(n) : this._getBuffer(n);
|
||
}
|
||
|
||
return ret;
|
||
}
|
||
}, {
|
||
key: "first",
|
||
value: function first() {
|
||
return this.head.data;
|
||
} // Consumes a specified amount of characters from the buffered data.
|
||
|
||
}, {
|
||
key: "_getString",
|
||
value: function _getString(n) {
|
||
var p = this.head;
|
||
var c = 1;
|
||
var ret = p.data;
|
||
n -= ret.length;
|
||
|
||
while (p = p.next) {
|
||
var str = p.data;
|
||
var nb = n > str.length ? str.length : n;
|
||
if (nb === str.length) ret += str;else ret += str.slice(0, n);
|
||
n -= nb;
|
||
|
||
if (n === 0) {
|
||
if (nb === str.length) {
|
||
++c;
|
||
if (p.next) this.head = p.next;else this.head = this.tail = null;
|
||
} else {
|
||
this.head = p;
|
||
p.data = str.slice(nb);
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
++c;
|
||
}
|
||
|
||
this.length -= c;
|
||
return ret;
|
||
} // Consumes a specified amount of bytes from the buffered data.
|
||
|
||
}, {
|
||
key: "_getBuffer",
|
||
value: function _getBuffer(n) {
|
||
var ret = Buffer$5.allocUnsafe(n);
|
||
var p = this.head;
|
||
var c = 1;
|
||
p.data.copy(ret);
|
||
n -= p.data.length;
|
||
|
||
while (p = p.next) {
|
||
var buf = p.data;
|
||
var nb = n > buf.length ? buf.length : n;
|
||
buf.copy(ret, ret.length - n, 0, nb);
|
||
n -= nb;
|
||
|
||
if (n === 0) {
|
||
if (nb === buf.length) {
|
||
++c;
|
||
if (p.next) this.head = p.next;else this.head = this.tail = null;
|
||
} else {
|
||
this.head = p;
|
||
p.data = buf.slice(nb);
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
++c;
|
||
}
|
||
|
||
this.length -= c;
|
||
return ret;
|
||
} // Make sure the linked list only shows the minimal necessary information.
|
||
|
||
}, {
|
||
key: custom$1,
|
||
value: function value(_, options) {
|
||
return inspect$1(this, _objectSpread({}, options, {
|
||
// Only inspect one level.
|
||
depth: 0,
|
||
// It should not recurse.
|
||
customInspect: false
|
||
}));
|
||
}
|
||
}]);
|
||
|
||
return BufferList;
|
||
}();
|
||
|
||
function destroy(err, cb) {
|
||
var _this = this;
|
||
|
||
var readableDestroyed = this._readableState && this._readableState.destroyed;
|
||
var writableDestroyed = this._writableState && this._writableState.destroyed;
|
||
|
||
if (readableDestroyed || writableDestroyed) {
|
||
if (cb) {
|
||
cb(err);
|
||
} else if (err) {
|
||
if (!this._writableState) {
|
||
process$1.nextTick(emitErrorNT, this, err);
|
||
} else if (!this._writableState.errorEmitted) {
|
||
this._writableState.errorEmitted = true;
|
||
process$1.nextTick(emitErrorNT, this, err);
|
||
}
|
||
}
|
||
|
||
return this;
|
||
} // we set destroyed to true before firing error callbacks in order
|
||
// to make it re-entrance safe in case destroy() is called within callbacks
|
||
|
||
|
||
if (this._readableState) {
|
||
this._readableState.destroyed = true;
|
||
} // if this is a duplex stream mark the writable part as destroyed as well
|
||
|
||
|
||
if (this._writableState) {
|
||
this._writableState.destroyed = true;
|
||
}
|
||
|
||
this._destroy(err || null, function (err) {
|
||
if (!cb && err) {
|
||
if (!_this._writableState) {
|
||
process$1.nextTick(emitErrorAndCloseNT, _this, err);
|
||
} else if (!_this._writableState.errorEmitted) {
|
||
_this._writableState.errorEmitted = true;
|
||
process$1.nextTick(emitErrorAndCloseNT, _this, err);
|
||
} else {
|
||
process$1.nextTick(emitCloseNT, _this);
|
||
}
|
||
} else if (cb) {
|
||
process$1.nextTick(emitCloseNT, _this);
|
||
cb(err);
|
||
} else {
|
||
process$1.nextTick(emitCloseNT, _this);
|
||
}
|
||
});
|
||
|
||
return this;
|
||
}
|
||
|
||
function emitErrorAndCloseNT(self, err) {
|
||
emitErrorNT(self, err);
|
||
emitCloseNT(self);
|
||
}
|
||
|
||
function emitCloseNT(self) {
|
||
if (self._writableState && !self._writableState.emitClose) return;
|
||
if (self._readableState && !self._readableState.emitClose) return;
|
||
self.emit('close');
|
||
}
|
||
|
||
function undestroy() {
|
||
if (this._readableState) {
|
||
this._readableState.destroyed = false;
|
||
this._readableState.reading = false;
|
||
this._readableState.ended = false;
|
||
this._readableState.endEmitted = false;
|
||
}
|
||
|
||
if (this._writableState) {
|
||
this._writableState.destroyed = false;
|
||
this._writableState.ended = false;
|
||
this._writableState.ending = false;
|
||
this._writableState.finalCalled = false;
|
||
this._writableState.prefinished = false;
|
||
this._writableState.finished = false;
|
||
this._writableState.errorEmitted = false;
|
||
}
|
||
}
|
||
|
||
function emitErrorNT(self, err) {
|
||
self.emit('error', err);
|
||
}
|
||
|
||
function errorOrDestroy$2(stream, err) {
|
||
// We have tests that rely on errors being emitted
|
||
// in the same tick, so changing this is semver major.
|
||
// For now when you opt-in to autoDestroy we allow
|
||
// the error to be emitted nextTick. In a future
|
||
// semver major update we should change the default to this.
|
||
var rState = stream._readableState;
|
||
var wState = stream._writableState;
|
||
if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err);
|
||
}
|
||
|
||
var destroy_1 = {
|
||
destroy: destroy,
|
||
undestroy: undestroy,
|
||
errorOrDestroy: errorOrDestroy$2
|
||
};
|
||
|
||
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
|
||
|
||
var codes = {};
|
||
|
||
function createErrorType(code, message, Base) {
|
||
if (!Base) {
|
||
Base = Error;
|
||
}
|
||
|
||
function getMessage(arg1, arg2, arg3) {
|
||
if (typeof message === 'string') {
|
||
return message;
|
||
} else {
|
||
return message(arg1, arg2, arg3);
|
||
}
|
||
}
|
||
|
||
var NodeError =
|
||
/*#__PURE__*/
|
||
function (_Base) {
|
||
_inheritsLoose(NodeError, _Base);
|
||
|
||
function NodeError(arg1, arg2, arg3) {
|
||
return _Base.call(this, getMessage(arg1, arg2, arg3)) || this;
|
||
}
|
||
|
||
return NodeError;
|
||
}(Base);
|
||
|
||
NodeError.prototype.name = Base.name;
|
||
NodeError.prototype.code = code;
|
||
codes[code] = NodeError;
|
||
} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
|
||
|
||
|
||
function oneOf(expected, thing) {
|
||
if (Array.isArray(expected)) {
|
||
var len = expected.length;
|
||
expected = expected.map(function (i) {
|
||
return String(i);
|
||
});
|
||
|
||
if (len > 2) {
|
||
return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1];
|
||
} else if (len === 2) {
|
||
return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]);
|
||
} else {
|
||
return "of ".concat(thing, " ").concat(expected[0]);
|
||
}
|
||
} else {
|
||
return "of ".concat(thing, " ").concat(String(expected));
|
||
}
|
||
} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
|
||
|
||
|
||
function startsWith(str, search, pos) {
|
||
return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;
|
||
} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||
|
||
|
||
function endsWith(str, search, this_len) {
|
||
if (this_len === undefined || this_len > str.length) {
|
||
this_len = str.length;
|
||
}
|
||
|
||
return str.substring(this_len - search.length, this_len) === search;
|
||
} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
|
||
|
||
|
||
function includes(str, search, start) {
|
||
if (typeof start !== 'number') {
|
||
start = 0;
|
||
}
|
||
|
||
if (start + search.length > str.length) {
|
||
return false;
|
||
} else {
|
||
return str.indexOf(search, start) !== -1;
|
||
}
|
||
}
|
||
|
||
createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {
|
||
return 'The value "' + value + '" is invalid for option "' + name + '"';
|
||
}, TypeError);
|
||
createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {
|
||
// determiner: 'must be' or 'must not be'
|
||
var determiner;
|
||
|
||
if (typeof expected === 'string' && startsWith(expected, 'not ')) {
|
||
determiner = 'must not be';
|
||
expected = expected.replace(/^not /, '');
|
||
} else {
|
||
determiner = 'must be';
|
||
}
|
||
|
||
var msg;
|
||
|
||
if (endsWith(name, ' argument')) {
|
||
// For cases like 'first argument'
|
||
msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
|
||
} else {
|
||
var type = includes(name, '.') ? 'property' : 'argument';
|
||
msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
|
||
}
|
||
|
||
msg += ". Received type ".concat(typeof actual);
|
||
return msg;
|
||
}, TypeError);
|
||
createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
|
||
createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {
|
||
return 'The ' + name + ' method is not implemented';
|
||
});
|
||
createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');
|
||
createErrorType('ERR_STREAM_DESTROYED', function (name) {
|
||
return 'Cannot call ' + name + ' after a stream was destroyed';
|
||
});
|
||
createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
|
||
createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
|
||
createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');
|
||
createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);
|
||
createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
|
||
return 'Unknown encoding: ' + arg;
|
||
}, TypeError);
|
||
createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
|
||
var codes_1 = codes;
|
||
|
||
var errorsBrowser = {
|
||
codes: codes_1
|
||
};
|
||
|
||
var ERR_INVALID_OPT_VALUE = errorsBrowser.codes.ERR_INVALID_OPT_VALUE;
|
||
|
||
function highWaterMarkFrom(options, isDuplex, duplexKey) {
|
||
return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null;
|
||
}
|
||
|
||
function getHighWaterMark$2(state, options, duplexKey, isDuplex) {
|
||
var hwm = highWaterMarkFrom(options, isDuplex, duplexKey);
|
||
|
||
if (hwm != null) {
|
||
if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) {
|
||
var name = isDuplex ? duplexKey : 'highWaterMark';
|
||
throw new ERR_INVALID_OPT_VALUE(name, hwm);
|
||
}
|
||
|
||
return Math.floor(hwm);
|
||
} // Default value
|
||
|
||
|
||
return state.objectMode ? 16 : 16 * 1024;
|
||
}
|
||
|
||
var state = {
|
||
getHighWaterMark: getHighWaterMark$2
|
||
};
|
||
|
||
/**
|
||
* Module exports.
|
||
*/
|
||
|
||
var browser$1 = deprecate;
|
||
|
||
/**
|
||
* Mark that a method should not be used.
|
||
* Returns a modified function which warns once by default.
|
||
*
|
||
* If `localStorage.noDeprecation = true` is set, then it is a no-op.
|
||
*
|
||
* If `localStorage.throwDeprecation = true` is set, then deprecated functions
|
||
* will throw an Error when invoked.
|
||
*
|
||
* If `localStorage.traceDeprecation = true` is set, then deprecated functions
|
||
* will invoke `console.trace()` instead of `console.error()`.
|
||
*
|
||
* @param {Function} fn - the function to deprecate
|
||
* @param {String} msg - the string to print to the console when `fn` is invoked
|
||
* @returns {Function} a new "deprecated" version of `fn`
|
||
* @api public
|
||
*/
|
||
|
||
function deprecate (fn, msg) {
|
||
if (config('noDeprecation')) {
|
||
return fn;
|
||
}
|
||
|
||
var warned = false;
|
||
function deprecated() {
|
||
if (!warned) {
|
||
if (config('throwDeprecation')) {
|
||
throw new Error(msg);
|
||
} else if (config('traceDeprecation')) {
|
||
console.trace(msg);
|
||
} else {
|
||
console.warn(msg);
|
||
}
|
||
warned = true;
|
||
}
|
||
return fn.apply(this, arguments);
|
||
}
|
||
|
||
return deprecated;
|
||
}
|
||
|
||
/**
|
||
* Checks `localStorage` for boolean values for the given `name`.
|
||
*
|
||
* @param {String} name
|
||
* @returns {Boolean}
|
||
* @api private
|
||
*/
|
||
|
||
function config (name) {
|
||
// accessing global.localStorage can trigger a DOMException in sandboxed iframes
|
||
try {
|
||
if (!commonjsGlobal.localStorage) return false;
|
||
} catch (_) {
|
||
return false;
|
||
}
|
||
var val = commonjsGlobal.localStorage[name];
|
||
if (null == val) return false;
|
||
return String(val).toLowerCase() === 'true';
|
||
}
|
||
|
||
var require$$2 = _stream_duplex;
|
||
|
||
var _stream_writable = Writable;
|
||
// there will be only 2 of these for each stream
|
||
|
||
|
||
function CorkedRequest(state) {
|
||
var _this = this;
|
||
|
||
this.next = null;
|
||
this.entry = null;
|
||
|
||
this.finish = function () {
|
||
onCorkedFinish(_this, state);
|
||
};
|
||
}
|
||
/* </replacement> */
|
||
|
||
/*<replacement>*/
|
||
|
||
|
||
var Duplex$2;
|
||
/*</replacement>*/
|
||
|
||
Writable.WritableState = WritableState;
|
||
/*<replacement>*/
|
||
|
||
var internalUtil = {
|
||
deprecate: browser$1
|
||
};
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
|
||
|
||
/*</replacement>*/
|
||
|
||
|
||
var Buffer$4 = buffer.Buffer;
|
||
|
||
var OurUint8Array$1 = commonjsGlobal.Uint8Array || function () {};
|
||
|
||
function _uint8ArrayToBuffer$1(chunk) {
|
||
return Buffer$4.from(chunk);
|
||
}
|
||
|
||
function _isUint8Array$1(obj) {
|
||
return Buffer$4.isBuffer(obj) || obj instanceof OurUint8Array$1;
|
||
}
|
||
|
||
|
||
|
||
var getHighWaterMark$1 = state.getHighWaterMark;
|
||
|
||
var _require$codes$3 = errorsBrowser.codes,
|
||
ERR_INVALID_ARG_TYPE$1 = _require$codes$3.ERR_INVALID_ARG_TYPE,
|
||
ERR_METHOD_NOT_IMPLEMENTED$2 = _require$codes$3.ERR_METHOD_NOT_IMPLEMENTED,
|
||
ERR_MULTIPLE_CALLBACK$1 = _require$codes$3.ERR_MULTIPLE_CALLBACK,
|
||
ERR_STREAM_CANNOT_PIPE = _require$codes$3.ERR_STREAM_CANNOT_PIPE,
|
||
ERR_STREAM_DESTROYED$1 = _require$codes$3.ERR_STREAM_DESTROYED,
|
||
ERR_STREAM_NULL_VALUES = _require$codes$3.ERR_STREAM_NULL_VALUES,
|
||
ERR_STREAM_WRITE_AFTER_END = _require$codes$3.ERR_STREAM_WRITE_AFTER_END,
|
||
ERR_UNKNOWN_ENCODING = _require$codes$3.ERR_UNKNOWN_ENCODING;
|
||
|
||
var errorOrDestroy$1 = destroy_1.errorOrDestroy;
|
||
|
||
inherits_browser(Writable, streamBrowser);
|
||
|
||
function nop() {}
|
||
|
||
function WritableState(options, stream, isDuplex) {
|
||
Duplex$2 = Duplex$2 || require$$2;
|
||
options = options || {}; // Duplex streams are both readable and writable, but share
|
||
// the same options object.
|
||
// However, some cases require setting options to different
|
||
// values for the readable and the writable sides of the duplex stream,
|
||
// e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
|
||
|
||
if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex$2; // object stream flag to indicate whether or not this stream
|
||
// contains buffers or objects.
|
||
|
||
this.objectMode = !!options.objectMode;
|
||
if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false
|
||
// Note: 0 is a valid value, means that we always return false if
|
||
// the entire buffer is not flushed immediately on write()
|
||
|
||
this.highWaterMark = getHighWaterMark$1(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called
|
||
|
||
this.finalCalled = false; // drain event flag.
|
||
|
||
this.needDrain = false; // at the start of calling end()
|
||
|
||
this.ending = false; // when end() has been called, and returned
|
||
|
||
this.ended = false; // when 'finish' is emitted
|
||
|
||
this.finished = false; // has it been destroyed
|
||
|
||
this.destroyed = false; // should we decode strings into buffers before passing to _write?
|
||
// this is here so that some node-core streams can optimize string
|
||
// handling at a lower level.
|
||
|
||
var noDecode = options.decodeStrings === false;
|
||
this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string
|
||
// encoding is 'binary' so we have to make this configurable.
|
||
// Everything else in the universe uses 'utf8', though.
|
||
|
||
this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement
|
||
// of how much we're waiting to get pushed to some underlying
|
||
// socket or file.
|
||
|
||
this.length = 0; // a flag to see when we're in the middle of a write.
|
||
|
||
this.writing = false; // when true all writes will be buffered until .uncork() call
|
||
|
||
this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately,
|
||
// or on a later tick. We set this to true at first, because any
|
||
// actions that shouldn't happen until "later" should generally also
|
||
// not happen before the first write call.
|
||
|
||
this.sync = true; // a flag to know if we're processing previously buffered items, which
|
||
// may call the _write() callback in the same tick, so that we don't
|
||
// end up in an overlapped onwrite situation.
|
||
|
||
this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb)
|
||
|
||
this.onwrite = function (er) {
|
||
onwrite(stream, er);
|
||
}; // the callback that the user supplies to write(chunk,encoding,cb)
|
||
|
||
|
||
this.writecb = null; // the amount that is being written when _write is called.
|
||
|
||
this.writelen = 0;
|
||
this.bufferedRequest = null;
|
||
this.lastBufferedRequest = null; // number of pending user-supplied write callbacks
|
||
// this must be 0 before 'finish' can be emitted
|
||
|
||
this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs
|
||
// This is relevant for synchronous Transform streams
|
||
|
||
this.prefinished = false; // True if the error was already emitted and should not be thrown again
|
||
|
||
this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true.
|
||
|
||
this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end')
|
||
|
||
this.autoDestroy = !!options.autoDestroy; // count buffered requests
|
||
|
||
this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always
|
||
// one allocated and free to use, and we maintain at most two
|
||
|
||
this.corkedRequestsFree = new CorkedRequest(this);
|
||
}
|
||
|
||
WritableState.prototype.getBuffer = function getBuffer() {
|
||
var current = this.bufferedRequest;
|
||
var out = [];
|
||
|
||
while (current) {
|
||
out.push(current);
|
||
current = current.next;
|
||
}
|
||
|
||
return out;
|
||
};
|
||
|
||
(function () {
|
||
try {
|
||
Object.defineProperty(WritableState.prototype, 'buffer', {
|
||
get: internalUtil.deprecate(function writableStateBufferGetter() {
|
||
return this.getBuffer();
|
||
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
|
||
});
|
||
} catch (_) {}
|
||
})(); // Test _writableState for inheritance to account for Duplex streams,
|
||
// whose prototype chain only points to Readable.
|
||
|
||
|
||
var realHasInstance;
|
||
|
||
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
|
||
realHasInstance = Function.prototype[Symbol.hasInstance];
|
||
Object.defineProperty(Writable, Symbol.hasInstance, {
|
||
value: function value(object) {
|
||
if (realHasInstance.call(this, object)) return true;
|
||
if (this !== Writable) return false;
|
||
return object && object._writableState instanceof WritableState;
|
||
}
|
||
});
|
||
} else {
|
||
realHasInstance = function realHasInstance(object) {
|
||
return object instanceof this;
|
||
};
|
||
}
|
||
|
||
function Writable(options) {
|
||
Duplex$2 = Duplex$2 || require$$2; // Writable ctor is applied to Duplexes, too.
|
||
// `realHasInstance` is necessary because using plain `instanceof`
|
||
// would return false, as no `_writableState` property is attached.
|
||
// Trying to use the custom `instanceof` for Writable here will also break the
|
||
// Node.js LazyTransform implementation, which has a non-trivial getter for
|
||
// `_writableState` that would lead to infinite recursion.
|
||
// Checking for a Stream.Duplex instance is faster here instead of inside
|
||
// the WritableState constructor, at least with V8 6.5
|
||
|
||
var isDuplex = this instanceof Duplex$2;
|
||
if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);
|
||
this._writableState = new WritableState(options, this, isDuplex); // legacy.
|
||
|
||
this.writable = true;
|
||
|
||
if (options) {
|
||
if (typeof options.write === 'function') this._write = options.write;
|
||
if (typeof options.writev === 'function') this._writev = options.writev;
|
||
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
||
if (typeof options.final === 'function') this._final = options.final;
|
||
}
|
||
|
||
streamBrowser.call(this);
|
||
} // Otherwise people can pipe Writable streams, which is just wrong.
|
||
|
||
|
||
Writable.prototype.pipe = function () {
|
||
errorOrDestroy$1(this, new ERR_STREAM_CANNOT_PIPE());
|
||
};
|
||
|
||
function writeAfterEnd(stream, cb) {
|
||
var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb
|
||
|
||
errorOrDestroy$1(stream, er);
|
||
process$1.nextTick(cb, er);
|
||
} // Checks that a user-supplied chunk is valid, especially for the particular
|
||
// mode the stream is in. Currently this means that `null` is never accepted
|
||
// and undefined/non-string values are only allowed in object mode.
|
||
|
||
|
||
function validChunk(stream, state, chunk, cb) {
|
||
var er;
|
||
|
||
if (chunk === null) {
|
||
er = new ERR_STREAM_NULL_VALUES();
|
||
} else if (typeof chunk !== 'string' && !state.objectMode) {
|
||
er = new ERR_INVALID_ARG_TYPE$1('chunk', ['string', 'Buffer'], chunk);
|
||
}
|
||
|
||
if (er) {
|
||
errorOrDestroy$1(stream, er);
|
||
process$1.nextTick(cb, er);
|
||
return false;
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
Writable.prototype.write = function (chunk, encoding, cb) {
|
||
var state = this._writableState;
|
||
var ret = false;
|
||
|
||
var isBuf = !state.objectMode && _isUint8Array$1(chunk);
|
||
|
||
if (isBuf && !Buffer$4.isBuffer(chunk)) {
|
||
chunk = _uint8ArrayToBuffer$1(chunk);
|
||
}
|
||
|
||
if (typeof encoding === 'function') {
|
||
cb = encoding;
|
||
encoding = null;
|
||
}
|
||
|
||
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
|
||
if (typeof cb !== 'function') cb = nop;
|
||
if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
|
||
state.pendingcb++;
|
||
ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
|
||
}
|
||
return ret;
|
||
};
|
||
|
||
Writable.prototype.cork = function () {
|
||
this._writableState.corked++;
|
||
};
|
||
|
||
Writable.prototype.uncork = function () {
|
||
var state = this._writableState;
|
||
|
||
if (state.corked) {
|
||
state.corked--;
|
||
if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
|
||
}
|
||
};
|
||
|
||
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
|
||
// node::ParseEncoding() requires lower case.
|
||
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
|
||
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);
|
||
this._writableState.defaultEncoding = encoding;
|
||
return this;
|
||
};
|
||
|
||
Object.defineProperty(Writable.prototype, 'writableBuffer', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState && this._writableState.getBuffer();
|
||
}
|
||
});
|
||
|
||
function decodeChunk(state, chunk, encoding) {
|
||
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
|
||
chunk = Buffer$4.from(chunk, encoding);
|
||
}
|
||
|
||
return chunk;
|
||
}
|
||
|
||
Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState.highWaterMark;
|
||
}
|
||
}); // if we're already writing something, then just put this
|
||
// in the queue, and wait our turn. Otherwise, call _write
|
||
// If we return false, then we need a drain event, so set that flag.
|
||
|
||
function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
|
||
if (!isBuf) {
|
||
var newChunk = decodeChunk(state, chunk, encoding);
|
||
|
||
if (chunk !== newChunk) {
|
||
isBuf = true;
|
||
encoding = 'buffer';
|
||
chunk = newChunk;
|
||
}
|
||
}
|
||
|
||
var len = state.objectMode ? 1 : chunk.length;
|
||
state.length += len;
|
||
var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false.
|
||
|
||
if (!ret) state.needDrain = true;
|
||
|
||
if (state.writing || state.corked) {
|
||
var last = state.lastBufferedRequest;
|
||
state.lastBufferedRequest = {
|
||
chunk: chunk,
|
||
encoding: encoding,
|
||
isBuf: isBuf,
|
||
callback: cb,
|
||
next: null
|
||
};
|
||
|
||
if (last) {
|
||
last.next = state.lastBufferedRequest;
|
||
} else {
|
||
state.bufferedRequest = state.lastBufferedRequest;
|
||
}
|
||
|
||
state.bufferedRequestCount += 1;
|
||
} else {
|
||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||
}
|
||
|
||
return ret;
|
||
}
|
||
|
||
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
|
||
state.writelen = len;
|
||
state.writecb = cb;
|
||
state.writing = true;
|
||
state.sync = true;
|
||
if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED$1('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
|
||
state.sync = false;
|
||
}
|
||
|
||
function onwriteError(stream, state, sync, er, cb) {
|
||
--state.pendingcb;
|
||
|
||
if (sync) {
|
||
// defer the callback if we are being called synchronously
|
||
// to avoid piling up things on the stack
|
||
process$1.nextTick(cb, er); // this can emit finish, and it will always happen
|
||
// after error
|
||
|
||
process$1.nextTick(finishMaybe, stream, state);
|
||
stream._writableState.errorEmitted = true;
|
||
errorOrDestroy$1(stream, er);
|
||
} else {
|
||
// the caller expect this to happen before if
|
||
// it is async
|
||
cb(er);
|
||
stream._writableState.errorEmitted = true;
|
||
errorOrDestroy$1(stream, er); // this can emit finish, but finish must
|
||
// always follow error
|
||
|
||
finishMaybe(stream, state);
|
||
}
|
||
}
|
||
|
||
function onwriteStateUpdate(state) {
|
||
state.writing = false;
|
||
state.writecb = null;
|
||
state.length -= state.writelen;
|
||
state.writelen = 0;
|
||
}
|
||
|
||
function onwrite(stream, er) {
|
||
var state = stream._writableState;
|
||
var sync = state.sync;
|
||
var cb = state.writecb;
|
||
if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK$1();
|
||
onwriteStateUpdate(state);
|
||
if (er) onwriteError(stream, state, sync, er, cb);else {
|
||
// Check if we're actually ready to finish, but don't emit yet
|
||
var finished = needFinish(state) || stream.destroyed;
|
||
|
||
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
|
||
clearBuffer(stream, state);
|
||
}
|
||
|
||
if (sync) {
|
||
process$1.nextTick(afterWrite, stream, state, finished, cb);
|
||
} else {
|
||
afterWrite(stream, state, finished, cb);
|
||
}
|
||
}
|
||
}
|
||
|
||
function afterWrite(stream, state, finished, cb) {
|
||
if (!finished) onwriteDrain(stream, state);
|
||
state.pendingcb--;
|
||
cb();
|
||
finishMaybe(stream, state);
|
||
} // Must force callback to be called on nextTick, so that we don't
|
||
// emit 'drain' before the write() consumer gets the 'false' return
|
||
// value, and has a chance to attach a 'drain' listener.
|
||
|
||
|
||
function onwriteDrain(stream, state) {
|
||
if (state.length === 0 && state.needDrain) {
|
||
state.needDrain = false;
|
||
stream.emit('drain');
|
||
}
|
||
} // if there's something in the buffer waiting, then process it
|
||
|
||
|
||
function clearBuffer(stream, state) {
|
||
state.bufferProcessing = true;
|
||
var entry = state.bufferedRequest;
|
||
|
||
if (stream._writev && entry && entry.next) {
|
||
// Fast case, write everything using _writev()
|
||
var l = state.bufferedRequestCount;
|
||
var buffer = new Array(l);
|
||
var holder = state.corkedRequestsFree;
|
||
holder.entry = entry;
|
||
var count = 0;
|
||
var allBuffers = true;
|
||
|
||
while (entry) {
|
||
buffer[count] = entry;
|
||
if (!entry.isBuf) allBuffers = false;
|
||
entry = entry.next;
|
||
count += 1;
|
||
}
|
||
|
||
buffer.allBuffers = allBuffers;
|
||
doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time
|
||
// as the hot path ends with doWrite
|
||
|
||
state.pendingcb++;
|
||
state.lastBufferedRequest = null;
|
||
|
||
if (holder.next) {
|
||
state.corkedRequestsFree = holder.next;
|
||
holder.next = null;
|
||
} else {
|
||
state.corkedRequestsFree = new CorkedRequest(state);
|
||
}
|
||
|
||
state.bufferedRequestCount = 0;
|
||
} else {
|
||
// Slow case, write chunks one-by-one
|
||
while (entry) {
|
||
var chunk = entry.chunk;
|
||
var encoding = entry.encoding;
|
||
var cb = entry.callback;
|
||
var len = state.objectMode ? 1 : chunk.length;
|
||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||
entry = entry.next;
|
||
state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then
|
||
// it means that we need to wait until it does.
|
||
// also, that means that the chunk and cb are currently
|
||
// being processed, so move the buffer counter past them.
|
||
|
||
if (state.writing) {
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (entry === null) state.lastBufferedRequest = null;
|
||
}
|
||
|
||
state.bufferedRequest = entry;
|
||
state.bufferProcessing = false;
|
||
}
|
||
|
||
Writable.prototype._write = function (chunk, encoding, cb) {
|
||
cb(new ERR_METHOD_NOT_IMPLEMENTED$2('_write()'));
|
||
};
|
||
|
||
Writable.prototype._writev = null;
|
||
|
||
Writable.prototype.end = function (chunk, encoding, cb) {
|
||
var state = this._writableState;
|
||
|
||
if (typeof chunk === 'function') {
|
||
cb = chunk;
|
||
chunk = null;
|
||
encoding = null;
|
||
} else if (typeof encoding === 'function') {
|
||
cb = encoding;
|
||
encoding = null;
|
||
}
|
||
|
||
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks
|
||
|
||
if (state.corked) {
|
||
state.corked = 1;
|
||
this.uncork();
|
||
} // ignore unnecessary end() calls.
|
||
|
||
|
||
if (!state.ending) endWritable(this, state, cb);
|
||
return this;
|
||
};
|
||
|
||
Object.defineProperty(Writable.prototype, 'writableLength', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState.length;
|
||
}
|
||
});
|
||
|
||
function needFinish(state) {
|
||
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
|
||
}
|
||
|
||
function callFinal(stream, state) {
|
||
stream._final(function (err) {
|
||
state.pendingcb--;
|
||
|
||
if (err) {
|
||
errorOrDestroy$1(stream, err);
|
||
}
|
||
|
||
state.prefinished = true;
|
||
stream.emit('prefinish');
|
||
finishMaybe(stream, state);
|
||
});
|
||
}
|
||
|
||
function prefinish$1(stream, state) {
|
||
if (!state.prefinished && !state.finalCalled) {
|
||
if (typeof stream._final === 'function' && !state.destroyed) {
|
||
state.pendingcb++;
|
||
state.finalCalled = true;
|
||
process$1.nextTick(callFinal, stream, state);
|
||
} else {
|
||
state.prefinished = true;
|
||
stream.emit('prefinish');
|
||
}
|
||
}
|
||
}
|
||
|
||
function finishMaybe(stream, state) {
|
||
var need = needFinish(state);
|
||
|
||
if (need) {
|
||
prefinish$1(stream, state);
|
||
|
||
if (state.pendingcb === 0) {
|
||
state.finished = true;
|
||
stream.emit('finish');
|
||
|
||
if (state.autoDestroy) {
|
||
// In case of duplex streams we need a way to detect
|
||
// if the readable side is ready for autoDestroy as well
|
||
var rState = stream._readableState;
|
||
|
||
if (!rState || rState.autoDestroy && rState.endEmitted) {
|
||
stream.destroy();
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
return need;
|
||
}
|
||
|
||
function endWritable(stream, state, cb) {
|
||
state.ending = true;
|
||
finishMaybe(stream, state);
|
||
|
||
if (cb) {
|
||
if (state.finished) process$1.nextTick(cb);else stream.once('finish', cb);
|
||
}
|
||
|
||
state.ended = true;
|
||
stream.writable = false;
|
||
}
|
||
|
||
function onCorkedFinish(corkReq, state, err) {
|
||
var entry = corkReq.entry;
|
||
corkReq.entry = null;
|
||
|
||
while (entry) {
|
||
var cb = entry.callback;
|
||
state.pendingcb--;
|
||
cb(err);
|
||
entry = entry.next;
|
||
} // reuse the free corkReq.
|
||
|
||
|
||
state.corkedRequestsFree.next = corkReq;
|
||
}
|
||
|
||
Object.defineProperty(Writable.prototype, 'destroyed', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
if (this._writableState === undefined) {
|
||
return false;
|
||
}
|
||
|
||
return this._writableState.destroyed;
|
||
},
|
||
set: function set(value) {
|
||
// we ignore the value if the stream
|
||
// has not been initialized yet
|
||
if (!this._writableState) {
|
||
return;
|
||
} // backward compatibility, the user is explicitly
|
||
// managing destroyed
|
||
|
||
|
||
this._writableState.destroyed = value;
|
||
}
|
||
});
|
||
Writable.prototype.destroy = destroy_1.destroy;
|
||
Writable.prototype._undestroy = destroy_1.undestroy;
|
||
|
||
Writable.prototype._destroy = function (err, cb) {
|
||
cb(err);
|
||
};
|
||
|
||
var require$$0 = _stream_readable;
|
||
|
||
/*<replacement>*/
|
||
|
||
var objectKeys$1 = Object.keys || function (obj) {
|
||
var keys = [];
|
||
|
||
for (var key in obj) {
|
||
keys.push(key);
|
||
}
|
||
|
||
return keys;
|
||
};
|
||
/*</replacement>*/
|
||
|
||
|
||
var _stream_duplex = Duplex$1;
|
||
|
||
|
||
|
||
|
||
|
||
inherits_browser(Duplex$1, require$$0);
|
||
|
||
{
|
||
// Allow the keys array to be GC'ed.
|
||
var keys = objectKeys$1(_stream_writable.prototype);
|
||
|
||
for (var v = 0; v < keys.length; v++) {
|
||
var method = keys[v];
|
||
if (!Duplex$1.prototype[method]) Duplex$1.prototype[method] = _stream_writable.prototype[method];
|
||
}
|
||
}
|
||
|
||
function Duplex$1(options) {
|
||
if (!(this instanceof Duplex$1)) return new Duplex$1(options);
|
||
require$$0.call(this, options);
|
||
_stream_writable.call(this, options);
|
||
this.allowHalfOpen = true;
|
||
|
||
if (options) {
|
||
if (options.readable === false) this.readable = false;
|
||
if (options.writable === false) this.writable = false;
|
||
|
||
if (options.allowHalfOpen === false) {
|
||
this.allowHalfOpen = false;
|
||
this.once('end', onend);
|
||
}
|
||
}
|
||
}
|
||
|
||
Object.defineProperty(Duplex$1.prototype, 'writableHighWaterMark', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState.highWaterMark;
|
||
}
|
||
});
|
||
Object.defineProperty(Duplex$1.prototype, 'writableBuffer', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState && this._writableState.getBuffer();
|
||
}
|
||
});
|
||
Object.defineProperty(Duplex$1.prototype, 'writableLength', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._writableState.length;
|
||
}
|
||
}); // the no-half-open enforcer
|
||
|
||
function onend() {
|
||
// If the writable side ended, then we're ok.
|
||
if (this._writableState.ended) return; // no more data can be written.
|
||
// But allow more writes to happen in this tick.
|
||
|
||
process$1.nextTick(onEndNT, this);
|
||
}
|
||
|
||
function onEndNT(self) {
|
||
self.end();
|
||
}
|
||
|
||
Object.defineProperty(Duplex$1.prototype, 'destroyed', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
if (this._readableState === undefined || this._writableState === undefined) {
|
||
return false;
|
||
}
|
||
|
||
return this._readableState.destroyed && this._writableState.destroyed;
|
||
},
|
||
set: function set(value) {
|
||
// we ignore the value if the stream
|
||
// has not been initialized yet
|
||
if (this._readableState === undefined || this._writableState === undefined) {
|
||
return;
|
||
} // backward compatibility, the user is explicitly
|
||
// managing destroyed
|
||
|
||
|
||
this._readableState.destroyed = value;
|
||
this._writableState.destroyed = value;
|
||
}
|
||
});
|
||
|
||
var safeBuffer = createCommonjsModule(function (module, exports) {
|
||
/*! safe-buffer. MIT License. Feross Aboukhadijeh <https://feross.org/opensource> */
|
||
/* eslint-disable node/no-deprecated-api */
|
||
|
||
var Buffer = buffer.Buffer;
|
||
|
||
// alternative to using Object.keys for old browsers
|
||
function copyProps (src, dst) {
|
||
for (var key in src) {
|
||
dst[key] = src[key];
|
||
}
|
||
}
|
||
if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
|
||
module.exports = buffer;
|
||
} else {
|
||
// Copy properties from require('buffer')
|
||
copyProps(buffer, exports);
|
||
exports.Buffer = SafeBuffer;
|
||
}
|
||
|
||
function SafeBuffer (arg, encodingOrOffset, length) {
|
||
return Buffer(arg, encodingOrOffset, length)
|
||
}
|
||
|
||
SafeBuffer.prototype = Object.create(Buffer.prototype);
|
||
|
||
// Copy static methods from Buffer
|
||
copyProps(Buffer, SafeBuffer);
|
||
|
||
SafeBuffer.from = function (arg, encodingOrOffset, length) {
|
||
if (typeof arg === 'number') {
|
||
throw new TypeError('Argument must not be a number')
|
||
}
|
||
return Buffer(arg, encodingOrOffset, length)
|
||
};
|
||
|
||
SafeBuffer.alloc = function (size, fill, encoding) {
|
||
if (typeof size !== 'number') {
|
||
throw new TypeError('Argument must be a number')
|
||
}
|
||
var buf = Buffer(size);
|
||
if (fill !== undefined) {
|
||
if (typeof encoding === 'string') {
|
||
buf.fill(fill, encoding);
|
||
} else {
|
||
buf.fill(fill);
|
||
}
|
||
} else {
|
||
buf.fill(0);
|
||
}
|
||
return buf
|
||
};
|
||
|
||
SafeBuffer.allocUnsafe = function (size) {
|
||
if (typeof size !== 'number') {
|
||
throw new TypeError('Argument must be a number')
|
||
}
|
||
return Buffer(size)
|
||
};
|
||
|
||
SafeBuffer.allocUnsafeSlow = function (size) {
|
||
if (typeof size !== 'number') {
|
||
throw new TypeError('Argument must be a number')
|
||
}
|
||
return buffer.SlowBuffer(size)
|
||
};
|
||
});
|
||
|
||
/*<replacement>*/
|
||
|
||
var Buffer$3 = safeBuffer.Buffer;
|
||
/*</replacement>*/
|
||
|
||
var isEncoding = Buffer$3.isEncoding || function (encoding) {
|
||
encoding = '' + encoding;
|
||
switch (encoding && encoding.toLowerCase()) {
|
||
case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
|
||
return true;
|
||
default:
|
||
return false;
|
||
}
|
||
};
|
||
|
||
function _normalizeEncoding(enc) {
|
||
if (!enc) return 'utf8';
|
||
var retried;
|
||
while (true) {
|
||
switch (enc) {
|
||
case 'utf8':
|
||
case 'utf-8':
|
||
return 'utf8';
|
||
case 'ucs2':
|
||
case 'ucs-2':
|
||
case 'utf16le':
|
||
case 'utf-16le':
|
||
return 'utf16le';
|
||
case 'latin1':
|
||
case 'binary':
|
||
return 'latin1';
|
||
case 'base64':
|
||
case 'ascii':
|
||
case 'hex':
|
||
return enc;
|
||
default:
|
||
if (retried) return; // undefined
|
||
enc = ('' + enc).toLowerCase();
|
||
retried = true;
|
||
}
|
||
}
|
||
}
|
||
// Do not cache `Buffer.isEncoding` when checking encoding names as some
|
||
// modules monkey-patch it to support additional encodings
|
||
function normalizeEncoding(enc) {
|
||
var nenc = _normalizeEncoding(enc);
|
||
if (typeof nenc !== 'string' && (Buffer$3.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
|
||
return nenc || enc;
|
||
}
|
||
|
||
// StringDecoder provides an interface for efficiently splitting a series of
|
||
// buffers into a series of JS strings without breaking apart multi-byte
|
||
// characters.
|
||
var StringDecoder_1 = StringDecoder$1;
|
||
function StringDecoder$1(encoding) {
|
||
this.encoding = normalizeEncoding(encoding);
|
||
var nb;
|
||
switch (this.encoding) {
|
||
case 'utf16le':
|
||
this.text = utf16Text;
|
||
this.end = utf16End;
|
||
nb = 4;
|
||
break;
|
||
case 'utf8':
|
||
this.fillLast = utf8FillLast;
|
||
nb = 4;
|
||
break;
|
||
case 'base64':
|
||
this.text = base64Text;
|
||
this.end = base64End;
|
||
nb = 3;
|
||
break;
|
||
default:
|
||
this.write = simpleWrite;
|
||
this.end = simpleEnd;
|
||
return;
|
||
}
|
||
this.lastNeed = 0;
|
||
this.lastTotal = 0;
|
||
this.lastChar = Buffer$3.allocUnsafe(nb);
|
||
}
|
||
|
||
StringDecoder$1.prototype.write = function (buf) {
|
||
if (buf.length === 0) return '';
|
||
var r;
|
||
var i;
|
||
if (this.lastNeed) {
|
||
r = this.fillLast(buf);
|
||
if (r === undefined) return '';
|
||
i = this.lastNeed;
|
||
this.lastNeed = 0;
|
||
} else {
|
||
i = 0;
|
||
}
|
||
if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
|
||
return r || '';
|
||
};
|
||
|
||
StringDecoder$1.prototype.end = utf8End;
|
||
|
||
// Returns only complete characters in a Buffer
|
||
StringDecoder$1.prototype.text = utf8Text;
|
||
|
||
// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
|
||
StringDecoder$1.prototype.fillLast = function (buf) {
|
||
if (this.lastNeed <= buf.length) {
|
||
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
|
||
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
||
}
|
||
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
|
||
this.lastNeed -= buf.length;
|
||
};
|
||
|
||
// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
|
||
// continuation byte. If an invalid byte is detected, -2 is returned.
|
||
function utf8CheckByte(byte) {
|
||
if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
|
||
return byte >> 6 === 0x02 ? -1 : -2;
|
||
}
|
||
|
||
// Checks at most 3 bytes at the end of a Buffer in order to detect an
|
||
// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
|
||
// needed to complete the UTF-8 character (if applicable) are returned.
|
||
function utf8CheckIncomplete(self, buf, i) {
|
||
var j = buf.length - 1;
|
||
if (j < i) return 0;
|
||
var nb = utf8CheckByte(buf[j]);
|
||
if (nb >= 0) {
|
||
if (nb > 0) self.lastNeed = nb - 1;
|
||
return nb;
|
||
}
|
||
if (--j < i || nb === -2) return 0;
|
||
nb = utf8CheckByte(buf[j]);
|
||
if (nb >= 0) {
|
||
if (nb > 0) self.lastNeed = nb - 2;
|
||
return nb;
|
||
}
|
||
if (--j < i || nb === -2) return 0;
|
||
nb = utf8CheckByte(buf[j]);
|
||
if (nb >= 0) {
|
||
if (nb > 0) {
|
||
if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
|
||
}
|
||
return nb;
|
||
}
|
||
return 0;
|
||
}
|
||
|
||
// Validates as many continuation bytes for a multi-byte UTF-8 character as
|
||
// needed or are available. If we see a non-continuation byte where we expect
|
||
// one, we "replace" the validated continuation bytes we've seen so far with
|
||
// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
|
||
// behavior. The continuation byte check is included three times in the case
|
||
// where all of the continuation bytes for a character exist in the same buffer.
|
||
// It is also done this way as a slight performance increase instead of using a
|
||
// loop.
|
||
function utf8CheckExtraBytes(self, buf, p) {
|
||
if ((buf[0] & 0xC0) !== 0x80) {
|
||
self.lastNeed = 0;
|
||
return '\ufffd';
|
||
}
|
||
if (self.lastNeed > 1 && buf.length > 1) {
|
||
if ((buf[1] & 0xC0) !== 0x80) {
|
||
self.lastNeed = 1;
|
||
return '\ufffd';
|
||
}
|
||
if (self.lastNeed > 2 && buf.length > 2) {
|
||
if ((buf[2] & 0xC0) !== 0x80) {
|
||
self.lastNeed = 2;
|
||
return '\ufffd';
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
|
||
function utf8FillLast(buf) {
|
||
var p = this.lastTotal - this.lastNeed;
|
||
var r = utf8CheckExtraBytes(this, buf);
|
||
if (r !== undefined) return r;
|
||
if (this.lastNeed <= buf.length) {
|
||
buf.copy(this.lastChar, p, 0, this.lastNeed);
|
||
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
||
}
|
||
buf.copy(this.lastChar, p, 0, buf.length);
|
||
this.lastNeed -= buf.length;
|
||
}
|
||
|
||
// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
|
||
// partial character, the character's bytes are buffered until the required
|
||
// number of bytes are available.
|
||
function utf8Text(buf, i) {
|
||
var total = utf8CheckIncomplete(this, buf, i);
|
||
if (!this.lastNeed) return buf.toString('utf8', i);
|
||
this.lastTotal = total;
|
||
var end = buf.length - (total - this.lastNeed);
|
||
buf.copy(this.lastChar, 0, end);
|
||
return buf.toString('utf8', i, end);
|
||
}
|
||
|
||
// For UTF-8, a replacement character is added when ending on a partial
|
||
// character.
|
||
function utf8End(buf) {
|
||
var r = buf && buf.length ? this.write(buf) : '';
|
||
if (this.lastNeed) return r + '\ufffd';
|
||
return r;
|
||
}
|
||
|
||
// UTF-16LE typically needs two bytes per character, but even if we have an even
|
||
// number of bytes available, we need to check if we end on a leading/high
|
||
// surrogate. In that case, we need to wait for the next two bytes in order to
|
||
// decode the last character properly.
|
||
function utf16Text(buf, i) {
|
||
if ((buf.length - i) % 2 === 0) {
|
||
var r = buf.toString('utf16le', i);
|
||
if (r) {
|
||
var c = r.charCodeAt(r.length - 1);
|
||
if (c >= 0xD800 && c <= 0xDBFF) {
|
||
this.lastNeed = 2;
|
||
this.lastTotal = 4;
|
||
this.lastChar[0] = buf[buf.length - 2];
|
||
this.lastChar[1] = buf[buf.length - 1];
|
||
return r.slice(0, -1);
|
||
}
|
||
}
|
||
return r;
|
||
}
|
||
this.lastNeed = 1;
|
||
this.lastTotal = 2;
|
||
this.lastChar[0] = buf[buf.length - 1];
|
||
return buf.toString('utf16le', i, buf.length - 1);
|
||
}
|
||
|
||
// For UTF-16LE we do not explicitly append special replacement characters if we
|
||
// end on a partial character, we simply let v8 handle that.
|
||
function utf16End(buf) {
|
||
var r = buf && buf.length ? this.write(buf) : '';
|
||
if (this.lastNeed) {
|
||
var end = this.lastTotal - this.lastNeed;
|
||
return r + this.lastChar.toString('utf16le', 0, end);
|
||
}
|
||
return r;
|
||
}
|
||
|
||
function base64Text(buf, i) {
|
||
var n = (buf.length - i) % 3;
|
||
if (n === 0) return buf.toString('base64', i);
|
||
this.lastNeed = 3 - n;
|
||
this.lastTotal = 3;
|
||
if (n === 1) {
|
||
this.lastChar[0] = buf[buf.length - 1];
|
||
} else {
|
||
this.lastChar[0] = buf[buf.length - 2];
|
||
this.lastChar[1] = buf[buf.length - 1];
|
||
}
|
||
return buf.toString('base64', i, buf.length - n);
|
||
}
|
||
|
||
function base64End(buf) {
|
||
var r = buf && buf.length ? this.write(buf) : '';
|
||
if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
|
||
return r;
|
||
}
|
||
|
||
// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
|
||
function simpleWrite(buf) {
|
||
return buf.toString(this.encoding);
|
||
}
|
||
|
||
function simpleEnd(buf) {
|
||
return buf && buf.length ? this.write(buf) : '';
|
||
}
|
||
|
||
var string_decoder = {
|
||
StringDecoder: StringDecoder_1
|
||
};
|
||
|
||
var ERR_STREAM_PREMATURE_CLOSE = errorsBrowser.codes.ERR_STREAM_PREMATURE_CLOSE;
|
||
|
||
function once$1(callback) {
|
||
var called = false;
|
||
return function () {
|
||
if (called) return;
|
||
called = true;
|
||
|
||
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
|
||
args[_key] = arguments[_key];
|
||
}
|
||
|
||
callback.apply(this, args);
|
||
};
|
||
}
|
||
|
||
function noop$2() {}
|
||
|
||
function isRequest$1(stream) {
|
||
return stream.setHeader && typeof stream.abort === 'function';
|
||
}
|
||
|
||
function eos$1(stream, opts, callback) {
|
||
if (typeof opts === 'function') return eos$1(stream, null, opts);
|
||
if (!opts) opts = {};
|
||
callback = once$1(callback || noop$2);
|
||
var readable = opts.readable || opts.readable !== false && stream.readable;
|
||
var writable = opts.writable || opts.writable !== false && stream.writable;
|
||
|
||
var onlegacyfinish = function onlegacyfinish() {
|
||
if (!stream.writable) onfinish();
|
||
};
|
||
|
||
var writableEnded = stream._writableState && stream._writableState.finished;
|
||
|
||
var onfinish = function onfinish() {
|
||
writable = false;
|
||
writableEnded = true;
|
||
if (!readable) callback.call(stream);
|
||
};
|
||
|
||
var readableEnded = stream._readableState && stream._readableState.endEmitted;
|
||
|
||
var onend = function onend() {
|
||
readable = false;
|
||
readableEnded = true;
|
||
if (!writable) callback.call(stream);
|
||
};
|
||
|
||
var onerror = function onerror(err) {
|
||
callback.call(stream, err);
|
||
};
|
||
|
||
var onclose = function onclose() {
|
||
var err;
|
||
|
||
if (readable && !readableEnded) {
|
||
if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
|
||
return callback.call(stream, err);
|
||
}
|
||
|
||
if (writable && !writableEnded) {
|
||
if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
|
||
return callback.call(stream, err);
|
||
}
|
||
};
|
||
|
||
var onrequest = function onrequest() {
|
||
stream.req.on('finish', onfinish);
|
||
};
|
||
|
||
if (isRequest$1(stream)) {
|
||
stream.on('complete', onfinish);
|
||
stream.on('abort', onclose);
|
||
if (stream.req) onrequest();else stream.on('request', onrequest);
|
||
} else if (writable && !stream._writableState) {
|
||
// legacy streams
|
||
stream.on('end', onlegacyfinish);
|
||
stream.on('close', onlegacyfinish);
|
||
}
|
||
|
||
stream.on('end', onend);
|
||
stream.on('finish', onfinish);
|
||
if (opts.error !== false) stream.on('error', onerror);
|
||
stream.on('close', onclose);
|
||
return function () {
|
||
stream.removeListener('complete', onfinish);
|
||
stream.removeListener('abort', onclose);
|
||
stream.removeListener('request', onrequest);
|
||
if (stream.req) stream.req.removeListener('finish', onfinish);
|
||
stream.removeListener('end', onlegacyfinish);
|
||
stream.removeListener('close', onlegacyfinish);
|
||
stream.removeListener('finish', onfinish);
|
||
stream.removeListener('end', onend);
|
||
stream.removeListener('error', onerror);
|
||
stream.removeListener('close', onclose);
|
||
};
|
||
}
|
||
|
||
var endOfStream = eos$1;
|
||
|
||
var _Object$setPrototypeO;
|
||
|
||
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
||
|
||
|
||
|
||
var kLastResolve = Symbol('lastResolve');
|
||
var kLastReject = Symbol('lastReject');
|
||
var kError = Symbol('error');
|
||
var kEnded = Symbol('ended');
|
||
var kLastPromise = Symbol('lastPromise');
|
||
var kHandlePromise = Symbol('handlePromise');
|
||
var kStream = Symbol('stream');
|
||
|
||
function createIterResult(value, done) {
|
||
return {
|
||
value: value,
|
||
done: done
|
||
};
|
||
}
|
||
|
||
function readAndResolve(iter) {
|
||
var resolve = iter[kLastResolve];
|
||
|
||
if (resolve !== null) {
|
||
var data = iter[kStream].read(); // we defer if data is null
|
||
// we can be expecting either 'end' or
|
||
// 'error'
|
||
|
||
if (data !== null) {
|
||
iter[kLastPromise] = null;
|
||
iter[kLastResolve] = null;
|
||
iter[kLastReject] = null;
|
||
resolve(createIterResult(data, false));
|
||
}
|
||
}
|
||
}
|
||
|
||
function onReadable(iter) {
|
||
// we wait for the next tick, because it might
|
||
// emit an error with process.nextTick
|
||
process$1.nextTick(readAndResolve, iter);
|
||
}
|
||
|
||
function wrapForNext(lastPromise, iter) {
|
||
return function (resolve, reject) {
|
||
lastPromise.then(function () {
|
||
if (iter[kEnded]) {
|
||
resolve(createIterResult(undefined, true));
|
||
return;
|
||
}
|
||
|
||
iter[kHandlePromise](resolve, reject);
|
||
}, reject);
|
||
};
|
||
}
|
||
|
||
var AsyncIteratorPrototype = Object.getPrototypeOf(function () {});
|
||
var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = {
|
||
get stream() {
|
||
return this[kStream];
|
||
},
|
||
|
||
next: function next() {
|
||
var _this = this;
|
||
|
||
// if we have detected an error in the meanwhile
|
||
// reject straight away
|
||
var error = this[kError];
|
||
|
||
if (error !== null) {
|
||
return Promise.reject(error);
|
||
}
|
||
|
||
if (this[kEnded]) {
|
||
return Promise.resolve(createIterResult(undefined, true));
|
||
}
|
||
|
||
if (this[kStream].destroyed) {
|
||
// We need to defer via nextTick because if .destroy(err) is
|
||
// called, the error will be emitted via nextTick, and
|
||
// we cannot guarantee that there is no error lingering around
|
||
// waiting to be emitted.
|
||
return new Promise(function (resolve, reject) {
|
||
process$1.nextTick(function () {
|
||
if (_this[kError]) {
|
||
reject(_this[kError]);
|
||
} else {
|
||
resolve(createIterResult(undefined, true));
|
||
}
|
||
});
|
||
});
|
||
} // if we have multiple next() calls
|
||
// we will wait for the previous Promise to finish
|
||
// this logic is optimized to support for await loops,
|
||
// where next() is only called once at a time
|
||
|
||
|
||
var lastPromise = this[kLastPromise];
|
||
var promise;
|
||
|
||
if (lastPromise) {
|
||
promise = new Promise(wrapForNext(lastPromise, this));
|
||
} else {
|
||
// fast path needed to support multiple this.push()
|
||
// without triggering the next() queue
|
||
var data = this[kStream].read();
|
||
|
||
if (data !== null) {
|
||
return Promise.resolve(createIterResult(data, false));
|
||
}
|
||
|
||
promise = new Promise(this[kHandlePromise]);
|
||
}
|
||
|
||
this[kLastPromise] = promise;
|
||
return promise;
|
||
}
|
||
}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () {
|
||
return this;
|
||
}), _defineProperty(_Object$setPrototypeO, "return", function _return() {
|
||
var _this2 = this;
|
||
|
||
// destroy(err, cb) is a private API
|
||
// we can guarantee we have that here, because we control the
|
||
// Readable class this is attached to
|
||
return new Promise(function (resolve, reject) {
|
||
_this2[kStream].destroy(null, function (err) {
|
||
if (err) {
|
||
reject(err);
|
||
return;
|
||
}
|
||
|
||
resolve(createIterResult(undefined, true));
|
||
});
|
||
});
|
||
}), _Object$setPrototypeO), AsyncIteratorPrototype);
|
||
|
||
var createReadableStreamAsyncIterator$1 = function createReadableStreamAsyncIterator(stream) {
|
||
var _Object$create;
|
||
|
||
var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, {
|
||
value: stream,
|
||
writable: true
|
||
}), _defineProperty(_Object$create, kLastResolve, {
|
||
value: null,
|
||
writable: true
|
||
}), _defineProperty(_Object$create, kLastReject, {
|
||
value: null,
|
||
writable: true
|
||
}), _defineProperty(_Object$create, kError, {
|
||
value: null,
|
||
writable: true
|
||
}), _defineProperty(_Object$create, kEnded, {
|
||
value: stream._readableState.endEmitted,
|
||
writable: true
|
||
}), _defineProperty(_Object$create, kHandlePromise, {
|
||
value: function value(resolve, reject) {
|
||
var data = iterator[kStream].read();
|
||
|
||
if (data) {
|
||
iterator[kLastPromise] = null;
|
||
iterator[kLastResolve] = null;
|
||
iterator[kLastReject] = null;
|
||
resolve(createIterResult(data, false));
|
||
} else {
|
||
iterator[kLastResolve] = resolve;
|
||
iterator[kLastReject] = reject;
|
||
}
|
||
},
|
||
writable: true
|
||
}), _Object$create));
|
||
iterator[kLastPromise] = null;
|
||
endOfStream(stream, function (err) {
|
||
if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
|
||
var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise
|
||
// returned by next() and store the error
|
||
|
||
if (reject !== null) {
|
||
iterator[kLastPromise] = null;
|
||
iterator[kLastResolve] = null;
|
||
iterator[kLastReject] = null;
|
||
reject(err);
|
||
}
|
||
|
||
iterator[kError] = err;
|
||
return;
|
||
}
|
||
|
||
var resolve = iterator[kLastResolve];
|
||
|
||
if (resolve !== null) {
|
||
iterator[kLastPromise] = null;
|
||
iterator[kLastResolve] = null;
|
||
iterator[kLastReject] = null;
|
||
resolve(createIterResult(undefined, true));
|
||
}
|
||
|
||
iterator[kEnded] = true;
|
||
});
|
||
stream.on('readable', onReadable.bind(null, iterator));
|
||
return iterator;
|
||
};
|
||
|
||
var async_iterator = createReadableStreamAsyncIterator$1;
|
||
|
||
var fromBrowser = function () {
|
||
throw new Error('Readable.from is not available in the browser')
|
||
};
|
||
|
||
var _stream_readable = Readable$1;
|
||
/*<replacement>*/
|
||
|
||
var Duplex;
|
||
/*</replacement>*/
|
||
|
||
Readable$1.ReadableState = ReadableState;
|
||
/*<replacement>*/
|
||
|
||
EventEmitter$1.EventEmitter;
|
||
|
||
var EElistenerCount = function EElistenerCount(emitter, type) {
|
||
return emitter.listeners(type).length;
|
||
};
|
||
/*</replacement>*/
|
||
|
||
/*<replacement>*/
|
||
|
||
|
||
|
||
/*</replacement>*/
|
||
|
||
|
||
var Buffer$2 = buffer.Buffer;
|
||
|
||
var OurUint8Array = commonjsGlobal.Uint8Array || function () {};
|
||
|
||
function _uint8ArrayToBuffer(chunk) {
|
||
return Buffer$2.from(chunk);
|
||
}
|
||
|
||
function _isUint8Array(obj) {
|
||
return Buffer$2.isBuffer(obj) || obj instanceof OurUint8Array;
|
||
}
|
||
/*<replacement>*/
|
||
|
||
|
||
|
||
|
||
var debug;
|
||
|
||
if (debugUtil && debugUtil.debuglog) {
|
||
debug = debugUtil.debuglog('stream');
|
||
} else {
|
||
debug = function debug() {};
|
||
}
|
||
/*</replacement>*/
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var getHighWaterMark = state.getHighWaterMark;
|
||
|
||
var _require$codes$2 = errorsBrowser.codes,
|
||
ERR_INVALID_ARG_TYPE = _require$codes$2.ERR_INVALID_ARG_TYPE,
|
||
ERR_STREAM_PUSH_AFTER_EOF = _require$codes$2.ERR_STREAM_PUSH_AFTER_EOF,
|
||
ERR_METHOD_NOT_IMPLEMENTED$1 = _require$codes$2.ERR_METHOD_NOT_IMPLEMENTED,
|
||
ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes$2.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance.
|
||
|
||
|
||
var StringDecoder;
|
||
var createReadableStreamAsyncIterator;
|
||
var from;
|
||
|
||
inherits_browser(Readable$1, streamBrowser);
|
||
|
||
var errorOrDestroy = destroy_1.errorOrDestroy;
|
||
var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
|
||
|
||
function prependListener(emitter, event, fn) {
|
||
// Sadly this is not cacheable as some libraries bundle their own
|
||
// event emitter implementation with them.
|
||
if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any
|
||
// userland ones. NEVER DO THIS. This is here only because this code needs
|
||
// to continue to work with older versions of Node.js that do not include
|
||
// the prependListener() method. The goal is to eventually remove this hack.
|
||
|
||
if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
|
||
}
|
||
|
||
function ReadableState(options, stream, isDuplex) {
|
||
Duplex = Duplex || require$$2;
|
||
options = options || {}; // Duplex streams are both readable and writable, but share
|
||
// the same options object.
|
||
// However, some cases require setting options to different
|
||
// values for the readable and the writable sides of the duplex stream.
|
||
// These options can be provided separately as readableXXX and writableXXX.
|
||
|
||
if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to
|
||
// make all the buffer merging and length checks go away
|
||
|
||
this.objectMode = !!options.objectMode;
|
||
if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer
|
||
// Note: 0 is a valid value, means "don't call _read preemptively ever"
|
||
|
||
this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the
|
||
// linked list can remove elements from the beginning faster than
|
||
// array.shift()
|
||
|
||
this.buffer = new buffer_list();
|
||
this.length = 0;
|
||
this.pipes = null;
|
||
this.pipesCount = 0;
|
||
this.flowing = null;
|
||
this.ended = false;
|
||
this.endEmitted = false;
|
||
this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted
|
||
// immediately, or on a later tick. We set this to true at first, because
|
||
// any actions that shouldn't happen until "later" should generally also
|
||
// not happen before the first read call.
|
||
|
||
this.sync = true; // whenever we return null, then we set a flag to say
|
||
// that we're awaiting a 'readable' event emission.
|
||
|
||
this.needReadable = false;
|
||
this.emittedReadable = false;
|
||
this.readableListening = false;
|
||
this.resumeScheduled = false;
|
||
this.paused = true; // Should close be emitted on destroy. Defaults to true.
|
||
|
||
this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish')
|
||
|
||
this.autoDestroy = !!options.autoDestroy; // has it been destroyed
|
||
|
||
this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string
|
||
// encoding is 'binary' so we have to make this configurable.
|
||
// Everything else in the universe uses 'utf8', though.
|
||
|
||
this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s
|
||
|
||
this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled
|
||
|
||
this.readingMore = false;
|
||
this.decoder = null;
|
||
this.encoding = null;
|
||
|
||
if (options.encoding) {
|
||
if (!StringDecoder) StringDecoder = string_decoder.StringDecoder;
|
||
this.decoder = new StringDecoder(options.encoding);
|
||
this.encoding = options.encoding;
|
||
}
|
||
}
|
||
|
||
function Readable$1(options) {
|
||
Duplex = Duplex || require$$2;
|
||
if (!(this instanceof Readable$1)) return new Readable$1(options); // Checking for a Stream.Duplex instance is faster here instead of inside
|
||
// the ReadableState constructor, at least with V8 6.5
|
||
|
||
var isDuplex = this instanceof Duplex;
|
||
this._readableState = new ReadableState(options, this, isDuplex); // legacy
|
||
|
||
this.readable = true;
|
||
|
||
if (options) {
|
||
if (typeof options.read === 'function') this._read = options.read;
|
||
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
||
}
|
||
|
||
streamBrowser.call(this);
|
||
}
|
||
|
||
Object.defineProperty(Readable$1.prototype, 'destroyed', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
if (this._readableState === undefined) {
|
||
return false;
|
||
}
|
||
|
||
return this._readableState.destroyed;
|
||
},
|
||
set: function set(value) {
|
||
// we ignore the value if the stream
|
||
// has not been initialized yet
|
||
if (!this._readableState) {
|
||
return;
|
||
} // backward compatibility, the user is explicitly
|
||
// managing destroyed
|
||
|
||
|
||
this._readableState.destroyed = value;
|
||
}
|
||
});
|
||
Readable$1.prototype.destroy = destroy_1.destroy;
|
||
Readable$1.prototype._undestroy = destroy_1.undestroy;
|
||
|
||
Readable$1.prototype._destroy = function (err, cb) {
|
||
cb(err);
|
||
}; // Manually shove something into the read() buffer.
|
||
// This returns true if the highWaterMark has not been hit yet,
|
||
// similar to how Writable.write() returns true if you should
|
||
// write() some more.
|
||
|
||
|
||
Readable$1.prototype.push = function (chunk, encoding) {
|
||
var state = this._readableState;
|
||
var skipChunkCheck;
|
||
|
||
if (!state.objectMode) {
|
||
if (typeof chunk === 'string') {
|
||
encoding = encoding || state.defaultEncoding;
|
||
|
||
if (encoding !== state.encoding) {
|
||
chunk = Buffer$2.from(chunk, encoding);
|
||
encoding = '';
|
||
}
|
||
|
||
skipChunkCheck = true;
|
||
}
|
||
} else {
|
||
skipChunkCheck = true;
|
||
}
|
||
|
||
return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
|
||
}; // Unshift should *always* be something directly out of read()
|
||
|
||
|
||
Readable$1.prototype.unshift = function (chunk) {
|
||
return readableAddChunk(this, chunk, null, true, false);
|
||
};
|
||
|
||
function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
|
||
debug('readableAddChunk', chunk);
|
||
var state = stream._readableState;
|
||
|
||
if (chunk === null) {
|
||
state.reading = false;
|
||
onEofChunk(stream, state);
|
||
} else {
|
||
var er;
|
||
if (!skipChunkCheck) er = chunkInvalid(state, chunk);
|
||
|
||
if (er) {
|
||
errorOrDestroy(stream, er);
|
||
} else if (state.objectMode || chunk && chunk.length > 0) {
|
||
if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer$2.prototype) {
|
||
chunk = _uint8ArrayToBuffer(chunk);
|
||
}
|
||
|
||
if (addToFront) {
|
||
if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true);
|
||
} else if (state.ended) {
|
||
errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF());
|
||
} else if (state.destroyed) {
|
||
return false;
|
||
} else {
|
||
state.reading = false;
|
||
|
||
if (state.decoder && !encoding) {
|
||
chunk = state.decoder.write(chunk);
|
||
if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
|
||
} else {
|
||
addChunk(stream, state, chunk, false);
|
||
}
|
||
}
|
||
} else if (!addToFront) {
|
||
state.reading = false;
|
||
maybeReadMore(stream, state);
|
||
}
|
||
} // We can push more data if we are below the highWaterMark.
|
||
// Also, if we have no data yet, we can stand some more bytes.
|
||
// This is to work around cases where hwm=0, such as the repl.
|
||
|
||
|
||
return !state.ended && (state.length < state.highWaterMark || state.length === 0);
|
||
}
|
||
|
||
function addChunk(stream, state, chunk, addToFront) {
|
||
if (state.flowing && state.length === 0 && !state.sync) {
|
||
state.awaitDrain = 0;
|
||
stream.emit('data', chunk);
|
||
} else {
|
||
// update the buffer info.
|
||
state.length += state.objectMode ? 1 : chunk.length;
|
||
if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
|
||
if (state.needReadable) emitReadable(stream);
|
||
}
|
||
|
||
maybeReadMore(stream, state);
|
||
}
|
||
|
||
function chunkInvalid(state, chunk) {
|
||
var er;
|
||
|
||
if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
|
||
er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk);
|
||
}
|
||
|
||
return er;
|
||
}
|
||
|
||
Readable$1.prototype.isPaused = function () {
|
||
return this._readableState.flowing === false;
|
||
}; // backwards compatibility.
|
||
|
||
|
||
Readable$1.prototype.setEncoding = function (enc) {
|
||
if (!StringDecoder) StringDecoder = string_decoder.StringDecoder;
|
||
var decoder = new StringDecoder(enc);
|
||
this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8
|
||
|
||
this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers:
|
||
|
||
var p = this._readableState.buffer.head;
|
||
var content = '';
|
||
|
||
while (p !== null) {
|
||
content += decoder.write(p.data);
|
||
p = p.next;
|
||
}
|
||
|
||
this._readableState.buffer.clear();
|
||
|
||
if (content !== '') this._readableState.buffer.push(content);
|
||
this._readableState.length = content.length;
|
||
return this;
|
||
}; // Don't raise the hwm > 1GB
|
||
|
||
|
||
var MAX_HWM = 0x40000000;
|
||
|
||
function computeNewHighWaterMark(n) {
|
||
if (n >= MAX_HWM) {
|
||
// TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE.
|
||
n = MAX_HWM;
|
||
} else {
|
||
// Get the next highest power of 2 to prevent increasing hwm excessively in
|
||
// tiny amounts
|
||
n--;
|
||
n |= n >>> 1;
|
||
n |= n >>> 2;
|
||
n |= n >>> 4;
|
||
n |= n >>> 8;
|
||
n |= n >>> 16;
|
||
n++;
|
||
}
|
||
|
||
return n;
|
||
} // This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
|
||
|
||
function howMuchToRead(n, state) {
|
||
if (n <= 0 || state.length === 0 && state.ended) return 0;
|
||
if (state.objectMode) return 1;
|
||
|
||
if (n !== n) {
|
||
// Only flow one buffer at a time
|
||
if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
|
||
} // If we're asking for more than the current hwm, then raise the hwm.
|
||
|
||
|
||
if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
|
||
if (n <= state.length) return n; // Don't have enough
|
||
|
||
if (!state.ended) {
|
||
state.needReadable = true;
|
||
return 0;
|
||
}
|
||
|
||
return state.length;
|
||
} // you can override either this method, or the async _read(n) below.
|
||
|
||
|
||
Readable$1.prototype.read = function (n) {
|
||
debug('read', n);
|
||
n = parseInt(n, 10);
|
||
var state = this._readableState;
|
||
var nOrig = n;
|
||
if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we
|
||
// already have a bunch of data in the buffer, then just trigger
|
||
// the 'readable' event and move on.
|
||
|
||
if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) {
|
||
debug('read: emitReadable', state.length, state.ended);
|
||
if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
|
||
return null;
|
||
}
|
||
|
||
n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up.
|
||
|
||
if (n === 0 && state.ended) {
|
||
if (state.length === 0) endReadable(this);
|
||
return null;
|
||
} // All the actual chunk generation logic needs to be
|
||
// *below* the call to _read. The reason is that in certain
|
||
// synthetic stream cases, such as passthrough streams, _read
|
||
// may be a completely synchronous operation which may change
|
||
// the state of the read buffer, providing enough data when
|
||
// before there was *not* enough.
|
||
//
|
||
// So, the steps are:
|
||
// 1. Figure out what the state of things will be after we do
|
||
// a read from the buffer.
|
||
//
|
||
// 2. If that resulting state will trigger a _read, then call _read.
|
||
// Note that this may be asynchronous, or synchronous. Yes, it is
|
||
// deeply ugly to write APIs this way, but that still doesn't mean
|
||
// that the Readable class should behave improperly, as streams are
|
||
// designed to be sync/async agnostic.
|
||
// Take note if the _read call is sync or async (ie, if the read call
|
||
// has returned yet), so that we know whether or not it's safe to emit
|
||
// 'readable' etc.
|
||
//
|
||
// 3. Actually pull the requested chunks out of the buffer and return.
|
||
// if we need a readable event, then we need to do some reading.
|
||
|
||
|
||
var doRead = state.needReadable;
|
||
debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some
|
||
|
||
if (state.length === 0 || state.length - n < state.highWaterMark) {
|
||
doRead = true;
|
||
debug('length less than watermark', doRead);
|
||
} // however, if we've ended, then there's no point, and if we're already
|
||
// reading, then it's unnecessary.
|
||
|
||
|
||
if (state.ended || state.reading) {
|
||
doRead = false;
|
||
debug('reading or ended', doRead);
|
||
} else if (doRead) {
|
||
debug('do read');
|
||
state.reading = true;
|
||
state.sync = true; // if the length is currently zero, then we *need* a readable event.
|
||
|
||
if (state.length === 0) state.needReadable = true; // call internal read method
|
||
|
||
this._read(state.highWaterMark);
|
||
|
||
state.sync = false; // If _read pushed data synchronously, then `reading` will be false,
|
||
// and we need to re-evaluate how much data we can return to the user.
|
||
|
||
if (!state.reading) n = howMuchToRead(nOrig, state);
|
||
}
|
||
|
||
var ret;
|
||
if (n > 0) ret = fromList(n, state);else ret = null;
|
||
|
||
if (ret === null) {
|
||
state.needReadable = state.length <= state.highWaterMark;
|
||
n = 0;
|
||
} else {
|
||
state.length -= n;
|
||
state.awaitDrain = 0;
|
||
}
|
||
|
||
if (state.length === 0) {
|
||
// If we have nothing in the buffer, then we want to know
|
||
// as soon as we *do* get something into the buffer.
|
||
if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick.
|
||
|
||
if (nOrig !== n && state.ended) endReadable(this);
|
||
}
|
||
|
||
if (ret !== null) this.emit('data', ret);
|
||
return ret;
|
||
};
|
||
|
||
function onEofChunk(stream, state) {
|
||
debug('onEofChunk');
|
||
if (state.ended) return;
|
||
|
||
if (state.decoder) {
|
||
var chunk = state.decoder.end();
|
||
|
||
if (chunk && chunk.length) {
|
||
state.buffer.push(chunk);
|
||
state.length += state.objectMode ? 1 : chunk.length;
|
||
}
|
||
}
|
||
|
||
state.ended = true;
|
||
|
||
if (state.sync) {
|
||
// if we are sync, wait until next tick to emit the data.
|
||
// Otherwise we risk emitting data in the flow()
|
||
// the readable code triggers during a read() call
|
||
emitReadable(stream);
|
||
} else {
|
||
// emit 'readable' now to make sure it gets picked up.
|
||
state.needReadable = false;
|
||
|
||
if (!state.emittedReadable) {
|
||
state.emittedReadable = true;
|
||
emitReadable_(stream);
|
||
}
|
||
}
|
||
} // Don't emit readable right away in sync mode, because this can trigger
|
||
// another read() call => stack overflow. This way, it might trigger
|
||
// a nextTick recursion warning, but that's not so bad.
|
||
|
||
|
||
function emitReadable(stream) {
|
||
var state = stream._readableState;
|
||
debug('emitReadable', state.needReadable, state.emittedReadable);
|
||
state.needReadable = false;
|
||
|
||
if (!state.emittedReadable) {
|
||
debug('emitReadable', state.flowing);
|
||
state.emittedReadable = true;
|
||
process$1.nextTick(emitReadable_, stream);
|
||
}
|
||
}
|
||
|
||
function emitReadable_(stream) {
|
||
var state = stream._readableState;
|
||
debug('emitReadable_', state.destroyed, state.length, state.ended);
|
||
|
||
if (!state.destroyed && (state.length || state.ended)) {
|
||
stream.emit('readable');
|
||
state.emittedReadable = false;
|
||
} // The stream needs another readable event if
|
||
// 1. It is not flowing, as the flow mechanism will take
|
||
// care of it.
|
||
// 2. It is not ended.
|
||
// 3. It is below the highWaterMark, so we can schedule
|
||
// another readable later.
|
||
|
||
|
||
state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark;
|
||
flow(stream);
|
||
} // at this point, the user has presumably seen the 'readable' event,
|
||
// and called read() to consume some data. that may have triggered
|
||
// in turn another _read(n) call, in which case reading = true if
|
||
// it's in progress.
|
||
// However, if we're not ended, or reading, and the length < hwm,
|
||
// then go ahead and try to read some more preemptively.
|
||
|
||
|
||
function maybeReadMore(stream, state) {
|
||
if (!state.readingMore) {
|
||
state.readingMore = true;
|
||
process$1.nextTick(maybeReadMore_, stream, state);
|
||
}
|
||
}
|
||
|
||
function maybeReadMore_(stream, state) {
|
||
// Attempt to read more data if we should.
|
||
//
|
||
// The conditions for reading more data are (one of):
|
||
// - Not enough data buffered (state.length < state.highWaterMark). The loop
|
||
// is responsible for filling the buffer with enough data if such data
|
||
// is available. If highWaterMark is 0 and we are not in the flowing mode
|
||
// we should _not_ attempt to buffer any extra data. We'll get more data
|
||
// when the stream consumer calls read() instead.
|
||
// - No data in the buffer, and the stream is in flowing mode. In this mode
|
||
// the loop below is responsible for ensuring read() is called. Failing to
|
||
// call read here would abort the flow and there's no other mechanism for
|
||
// continuing the flow if the stream consumer has just subscribed to the
|
||
// 'data' event.
|
||
//
|
||
// In addition to the above conditions to keep reading data, the following
|
||
// conditions prevent the data from being read:
|
||
// - The stream has ended (state.ended).
|
||
// - There is already a pending 'read' operation (state.reading). This is a
|
||
// case where the the stream has called the implementation defined _read()
|
||
// method, but they are processing the call asynchronously and have _not_
|
||
// called push() with new data. In this case we skip performing more
|
||
// read()s. The execution ends in this method again after the _read() ends
|
||
// up calling push() with more data.
|
||
while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) {
|
||
var len = state.length;
|
||
debug('maybeReadMore read 0');
|
||
stream.read(0);
|
||
if (len === state.length) // didn't get any data, stop spinning.
|
||
break;
|
||
}
|
||
|
||
state.readingMore = false;
|
||
} // abstract method. to be overridden in specific implementation classes.
|
||
// call cb(er, data) where data is <= n in length.
|
||
// for virtual (non-string, non-buffer) streams, "length" is somewhat
|
||
// arbitrary, and perhaps not very meaningful.
|
||
|
||
|
||
Readable$1.prototype._read = function (n) {
|
||
errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED$1('_read()'));
|
||
};
|
||
|
||
Readable$1.prototype.pipe = function (dest, pipeOpts) {
|
||
var src = this;
|
||
var state = this._readableState;
|
||
|
||
switch (state.pipesCount) {
|
||
case 0:
|
||
state.pipes = dest;
|
||
break;
|
||
|
||
case 1:
|
||
state.pipes = [state.pipes, dest];
|
||
break;
|
||
|
||
default:
|
||
state.pipes.push(dest);
|
||
break;
|
||
}
|
||
|
||
state.pipesCount += 1;
|
||
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
|
||
var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process$1.stdout && dest !== process$1.stderr;
|
||
var endFn = doEnd ? onend : unpipe;
|
||
if (state.endEmitted) process$1.nextTick(endFn);else src.once('end', endFn);
|
||
dest.on('unpipe', onunpipe);
|
||
|
||
function onunpipe(readable, unpipeInfo) {
|
||
debug('onunpipe');
|
||
|
||
if (readable === src) {
|
||
if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
|
||
unpipeInfo.hasUnpiped = true;
|
||
cleanup();
|
||
}
|
||
}
|
||
}
|
||
|
||
function onend() {
|
||
debug('onend');
|
||
dest.end();
|
||
} // when the dest drains, it reduces the awaitDrain counter
|
||
// on the source. This would be more elegant with a .once()
|
||
// handler in flow(), but adding and removing repeatedly is
|
||
// too slow.
|
||
|
||
|
||
var ondrain = pipeOnDrain(src);
|
||
dest.on('drain', ondrain);
|
||
var cleanedUp = false;
|
||
|
||
function cleanup() {
|
||
debug('cleanup'); // cleanup event handlers once the pipe is broken
|
||
|
||
dest.removeListener('close', onclose);
|
||
dest.removeListener('finish', onfinish);
|
||
dest.removeListener('drain', ondrain);
|
||
dest.removeListener('error', onerror);
|
||
dest.removeListener('unpipe', onunpipe);
|
||
src.removeListener('end', onend);
|
||
src.removeListener('end', unpipe);
|
||
src.removeListener('data', ondata);
|
||
cleanedUp = true; // if the reader is waiting for a drain event from this
|
||
// specific writer, then it would cause it to never start
|
||
// flowing again.
|
||
// So, if this is awaiting a drain, then we just call it now.
|
||
// If we don't know, then assume that we are waiting for one.
|
||
|
||
if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
|
||
}
|
||
|
||
src.on('data', ondata);
|
||
|
||
function ondata(chunk) {
|
||
debug('ondata');
|
||
var ret = dest.write(chunk);
|
||
debug('dest.write', ret);
|
||
|
||
if (ret === false) {
|
||
// If the user unpiped during `dest.write()`, it is possible
|
||
// to get stuck in a permanently paused state if that write
|
||
// also returned false.
|
||
// => Check whether `dest` is still a piping destination.
|
||
if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
|
||
debug('false write response, pause', state.awaitDrain);
|
||
state.awaitDrain++;
|
||
}
|
||
|
||
src.pause();
|
||
}
|
||
} // if the dest has an error, then stop piping into it.
|
||
// however, don't suppress the throwing behavior for this.
|
||
|
||
|
||
function onerror(er) {
|
||
debug('onerror', er);
|
||
unpipe();
|
||
dest.removeListener('error', onerror);
|
||
if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er);
|
||
} // Make sure our error handler is attached before userland ones.
|
||
|
||
|
||
prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once.
|
||
|
||
function onclose() {
|
||
dest.removeListener('finish', onfinish);
|
||
unpipe();
|
||
}
|
||
|
||
dest.once('close', onclose);
|
||
|
||
function onfinish() {
|
||
debug('onfinish');
|
||
dest.removeListener('close', onclose);
|
||
unpipe();
|
||
}
|
||
|
||
dest.once('finish', onfinish);
|
||
|
||
function unpipe() {
|
||
debug('unpipe');
|
||
src.unpipe(dest);
|
||
} // tell the dest that it's being piped to
|
||
|
||
|
||
dest.emit('pipe', src); // start the flow if it hasn't been started already.
|
||
|
||
if (!state.flowing) {
|
||
debug('pipe resume');
|
||
src.resume();
|
||
}
|
||
|
||
return dest;
|
||
};
|
||
|
||
function pipeOnDrain(src) {
|
||
return function pipeOnDrainFunctionResult() {
|
||
var state = src._readableState;
|
||
debug('pipeOnDrain', state.awaitDrain);
|
||
if (state.awaitDrain) state.awaitDrain--;
|
||
|
||
if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
|
||
state.flowing = true;
|
||
flow(src);
|
||
}
|
||
};
|
||
}
|
||
|
||
Readable$1.prototype.unpipe = function (dest) {
|
||
var state = this._readableState;
|
||
var unpipeInfo = {
|
||
hasUnpiped: false
|
||
}; // if we're not piping anywhere, then do nothing.
|
||
|
||
if (state.pipesCount === 0) return this; // just one destination. most common case.
|
||
|
||
if (state.pipesCount === 1) {
|
||
// passed in one, but it's not the right one.
|
||
if (dest && dest !== state.pipes) return this;
|
||
if (!dest) dest = state.pipes; // got a match.
|
||
|
||
state.pipes = null;
|
||
state.pipesCount = 0;
|
||
state.flowing = false;
|
||
if (dest) dest.emit('unpipe', this, unpipeInfo);
|
||
return this;
|
||
} // slow case. multiple pipe destinations.
|
||
|
||
|
||
if (!dest) {
|
||
// remove all.
|
||
var dests = state.pipes;
|
||
var len = state.pipesCount;
|
||
state.pipes = null;
|
||
state.pipesCount = 0;
|
||
state.flowing = false;
|
||
|
||
for (var i = 0; i < len; i++) {
|
||
dests[i].emit('unpipe', this, {
|
||
hasUnpiped: false
|
||
});
|
||
}
|
||
|
||
return this;
|
||
} // try to find the right one.
|
||
|
||
|
||
var index = indexOf(state.pipes, dest);
|
||
if (index === -1) return this;
|
||
state.pipes.splice(index, 1);
|
||
state.pipesCount -= 1;
|
||
if (state.pipesCount === 1) state.pipes = state.pipes[0];
|
||
dest.emit('unpipe', this, unpipeInfo);
|
||
return this;
|
||
}; // set up data events if they are asked for
|
||
// Ensure readable listeners eventually get something
|
||
|
||
|
||
Readable$1.prototype.on = function (ev, fn) {
|
||
var res = streamBrowser.prototype.on.call(this, ev, fn);
|
||
var state = this._readableState;
|
||
|
||
if (ev === 'data') {
|
||
// update readableListening so that resume() may be a no-op
|
||
// a few lines down. This is needed to support once('readable').
|
||
state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused
|
||
|
||
if (state.flowing !== false) this.resume();
|
||
} else if (ev === 'readable') {
|
||
if (!state.endEmitted && !state.readableListening) {
|
||
state.readableListening = state.needReadable = true;
|
||
state.flowing = false;
|
||
state.emittedReadable = false;
|
||
debug('on readable', state.length, state.reading);
|
||
|
||
if (state.length) {
|
||
emitReadable(this);
|
||
} else if (!state.reading) {
|
||
process$1.nextTick(nReadingNextTick, this);
|
||
}
|
||
}
|
||
}
|
||
|
||
return res;
|
||
};
|
||
|
||
Readable$1.prototype.addListener = Readable$1.prototype.on;
|
||
|
||
Readable$1.prototype.removeListener = function (ev, fn) {
|
||
var res = streamBrowser.prototype.removeListener.call(this, ev, fn);
|
||
|
||
if (ev === 'readable') {
|
||
// We need to check if there is someone still listening to
|
||
// readable and reset the state. However this needs to happen
|
||
// after readable has been emitted but before I/O (nextTick) to
|
||
// support once('readable', fn) cycles. This means that calling
|
||
// resume within the same tick will have no
|
||
// effect.
|
||
process$1.nextTick(updateReadableListening, this);
|
||
}
|
||
|
||
return res;
|
||
};
|
||
|
||
Readable$1.prototype.removeAllListeners = function (ev) {
|
||
var res = streamBrowser.prototype.removeAllListeners.apply(this, arguments);
|
||
|
||
if (ev === 'readable' || ev === undefined) {
|
||
// We need to check if there is someone still listening to
|
||
// readable and reset the state. However this needs to happen
|
||
// after readable has been emitted but before I/O (nextTick) to
|
||
// support once('readable', fn) cycles. This means that calling
|
||
// resume within the same tick will have no
|
||
// effect.
|
||
process$1.nextTick(updateReadableListening, this);
|
||
}
|
||
|
||
return res;
|
||
};
|
||
|
||
function updateReadableListening(self) {
|
||
var state = self._readableState;
|
||
state.readableListening = self.listenerCount('readable') > 0;
|
||
|
||
if (state.resumeScheduled && !state.paused) {
|
||
// flowing needs to be set to true now, otherwise
|
||
// the upcoming resume will not flow.
|
||
state.flowing = true; // crude way to check if we should resume
|
||
} else if (self.listenerCount('data') > 0) {
|
||
self.resume();
|
||
}
|
||
}
|
||
|
||
function nReadingNextTick(self) {
|
||
debug('readable nexttick read 0');
|
||
self.read(0);
|
||
} // pause() and resume() are remnants of the legacy readable stream API
|
||
// If the user uses them, then switch into old mode.
|
||
|
||
|
||
Readable$1.prototype.resume = function () {
|
||
var state = this._readableState;
|
||
|
||
if (!state.flowing) {
|
||
debug('resume'); // we flow only if there is no one listening
|
||
// for readable, but we still have to call
|
||
// resume()
|
||
|
||
state.flowing = !state.readableListening;
|
||
resume(this, state);
|
||
}
|
||
|
||
state.paused = false;
|
||
return this;
|
||
};
|
||
|
||
function resume(stream, state) {
|
||
if (!state.resumeScheduled) {
|
||
state.resumeScheduled = true;
|
||
process$1.nextTick(resume_, stream, state);
|
||
}
|
||
}
|
||
|
||
function resume_(stream, state) {
|
||
debug('resume', state.reading);
|
||
|
||
if (!state.reading) {
|
||
stream.read(0);
|
||
}
|
||
|
||
state.resumeScheduled = false;
|
||
stream.emit('resume');
|
||
flow(stream);
|
||
if (state.flowing && !state.reading) stream.read(0);
|
||
}
|
||
|
||
Readable$1.prototype.pause = function () {
|
||
debug('call pause flowing=%j', this._readableState.flowing);
|
||
|
||
if (this._readableState.flowing !== false) {
|
||
debug('pause');
|
||
this._readableState.flowing = false;
|
||
this.emit('pause');
|
||
}
|
||
|
||
this._readableState.paused = true;
|
||
return this;
|
||
};
|
||
|
||
function flow(stream) {
|
||
var state = stream._readableState;
|
||
debug('flow', state.flowing);
|
||
|
||
while (state.flowing && stream.read() !== null) {
|
||
}
|
||
} // wrap an old-style stream as the async data source.
|
||
// This is *not* part of the readable stream interface.
|
||
// It is an ugly unfortunate mess of history.
|
||
|
||
|
||
Readable$1.prototype.wrap = function (stream) {
|
||
var _this = this;
|
||
|
||
var state = this._readableState;
|
||
var paused = false;
|
||
stream.on('end', function () {
|
||
debug('wrapped end');
|
||
|
||
if (state.decoder && !state.ended) {
|
||
var chunk = state.decoder.end();
|
||
if (chunk && chunk.length) _this.push(chunk);
|
||
}
|
||
|
||
_this.push(null);
|
||
});
|
||
stream.on('data', function (chunk) {
|
||
debug('wrapped data');
|
||
if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode
|
||
|
||
if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
|
||
|
||
var ret = _this.push(chunk);
|
||
|
||
if (!ret) {
|
||
paused = true;
|
||
stream.pause();
|
||
}
|
||
}); // proxy all the other methods.
|
||
// important when wrapping filters and duplexes.
|
||
|
||
for (var i in stream) {
|
||
if (this[i] === undefined && typeof stream[i] === 'function') {
|
||
this[i] = function methodWrap(method) {
|
||
return function methodWrapReturnFunction() {
|
||
return stream[method].apply(stream, arguments);
|
||
};
|
||
}(i);
|
||
}
|
||
} // proxy certain important events.
|
||
|
||
|
||
for (var n = 0; n < kProxyEvents.length; n++) {
|
||
stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
|
||
} // when we try to consume some more bytes, simply unpause the
|
||
// underlying stream.
|
||
|
||
|
||
this._read = function (n) {
|
||
debug('wrapped _read', n);
|
||
|
||
if (paused) {
|
||
paused = false;
|
||
stream.resume();
|
||
}
|
||
};
|
||
|
||
return this;
|
||
};
|
||
|
||
if (typeof Symbol === 'function') {
|
||
Readable$1.prototype[Symbol.asyncIterator] = function () {
|
||
if (createReadableStreamAsyncIterator === undefined) {
|
||
createReadableStreamAsyncIterator = async_iterator;
|
||
}
|
||
|
||
return createReadableStreamAsyncIterator(this);
|
||
};
|
||
}
|
||
|
||
Object.defineProperty(Readable$1.prototype, 'readableHighWaterMark', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._readableState.highWaterMark;
|
||
}
|
||
});
|
||
Object.defineProperty(Readable$1.prototype, 'readableBuffer', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._readableState && this._readableState.buffer;
|
||
}
|
||
});
|
||
Object.defineProperty(Readable$1.prototype, 'readableFlowing', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._readableState.flowing;
|
||
},
|
||
set: function set(state) {
|
||
if (this._readableState) {
|
||
this._readableState.flowing = state;
|
||
}
|
||
}
|
||
}); // exposed for testing purposes only.
|
||
|
||
Readable$1._fromList = fromList;
|
||
Object.defineProperty(Readable$1.prototype, 'readableLength', {
|
||
// making it explicit this property is not enumerable
|
||
// because otherwise some prototype manipulation in
|
||
// userland will fail
|
||
enumerable: false,
|
||
get: function get() {
|
||
return this._readableState.length;
|
||
}
|
||
}); // Pluck off n bytes from an array of buffers.
|
||
// Length is the combined lengths of all the buffers in the list.
|
||
// This function is designed to be inlinable, so please take care when making
|
||
// changes to the function body.
|
||
|
||
function fromList(n, state) {
|
||
// nothing buffered
|
||
if (state.length === 0) return null;
|
||
var ret;
|
||
if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
|
||
// read it all, truncate the list
|
||
if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length);
|
||
state.buffer.clear();
|
||
} else {
|
||
// read part of list
|
||
ret = state.buffer.consume(n, state.decoder);
|
||
}
|
||
return ret;
|
||
}
|
||
|
||
function endReadable(stream) {
|
||
var state = stream._readableState;
|
||
debug('endReadable', state.endEmitted);
|
||
|
||
if (!state.endEmitted) {
|
||
state.ended = true;
|
||
process$1.nextTick(endReadableNT, state, stream);
|
||
}
|
||
}
|
||
|
||
function endReadableNT(state, stream) {
|
||
debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift.
|
||
|
||
if (!state.endEmitted && state.length === 0) {
|
||
state.endEmitted = true;
|
||
stream.readable = false;
|
||
stream.emit('end');
|
||
|
||
if (state.autoDestroy) {
|
||
// In case of duplex streams we need a way to detect
|
||
// if the writable side is ready for autoDestroy as well
|
||
var wState = stream._writableState;
|
||
|
||
if (!wState || wState.autoDestroy && wState.finished) {
|
||
stream.destroy();
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
if (typeof Symbol === 'function') {
|
||
Readable$1.from = function (iterable, opts) {
|
||
if (from === undefined) {
|
||
from = fromBrowser;
|
||
}
|
||
|
||
return from(Readable$1, iterable, opts);
|
||
};
|
||
}
|
||
|
||
function indexOf(xs, x) {
|
||
for (var i = 0, l = xs.length; i < l; i++) {
|
||
if (xs[i] === x) return i;
|
||
}
|
||
|
||
return -1;
|
||
}
|
||
|
||
var _stream_transform = Transform;
|
||
|
||
var _require$codes$1 = errorsBrowser.codes,
|
||
ERR_METHOD_NOT_IMPLEMENTED = _require$codes$1.ERR_METHOD_NOT_IMPLEMENTED,
|
||
ERR_MULTIPLE_CALLBACK = _require$codes$1.ERR_MULTIPLE_CALLBACK,
|
||
ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes$1.ERR_TRANSFORM_ALREADY_TRANSFORMING,
|
||
ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes$1.ERR_TRANSFORM_WITH_LENGTH_0;
|
||
|
||
|
||
|
||
inherits_browser(Transform, require$$2);
|
||
|
||
function afterTransform(er, data) {
|
||
var ts = this._transformState;
|
||
ts.transforming = false;
|
||
var cb = ts.writecb;
|
||
|
||
if (cb === null) {
|
||
return this.emit('error', new ERR_MULTIPLE_CALLBACK());
|
||
}
|
||
|
||
ts.writechunk = null;
|
||
ts.writecb = null;
|
||
if (data != null) // single equals check for both `null` and `undefined`
|
||
this.push(data);
|
||
cb(er);
|
||
var rs = this._readableState;
|
||
rs.reading = false;
|
||
|
||
if (rs.needReadable || rs.length < rs.highWaterMark) {
|
||
this._read(rs.highWaterMark);
|
||
}
|
||
}
|
||
|
||
function Transform(options) {
|
||
if (!(this instanceof Transform)) return new Transform(options);
|
||
require$$2.call(this, options);
|
||
this._transformState = {
|
||
afterTransform: afterTransform.bind(this),
|
||
needTransform: false,
|
||
transforming: false,
|
||
writecb: null,
|
||
writechunk: null,
|
||
writeencoding: null
|
||
}; // start out asking for a readable event once data is transformed.
|
||
|
||
this._readableState.needReadable = true; // we have implemented the _read method, and done the other things
|
||
// that Readable wants before the first _read call, so unset the
|
||
// sync guard flag.
|
||
|
||
this._readableState.sync = false;
|
||
|
||
if (options) {
|
||
if (typeof options.transform === 'function') this._transform = options.transform;
|
||
if (typeof options.flush === 'function') this._flush = options.flush;
|
||
} // When the writable side finishes, then flush out anything remaining.
|
||
|
||
|
||
this.on('prefinish', prefinish);
|
||
}
|
||
|
||
function prefinish() {
|
||
var _this = this;
|
||
|
||
if (typeof this._flush === 'function' && !this._readableState.destroyed) {
|
||
this._flush(function (er, data) {
|
||
done(_this, er, data);
|
||
});
|
||
} else {
|
||
done(this, null, null);
|
||
}
|
||
}
|
||
|
||
Transform.prototype.push = function (chunk, encoding) {
|
||
this._transformState.needTransform = false;
|
||
return require$$2.prototype.push.call(this, chunk, encoding);
|
||
}; // This is the part where you do stuff!
|
||
// override this function in implementation classes.
|
||
// 'chunk' is an input chunk.
|
||
//
|
||
// Call `push(newChunk)` to pass along transformed output
|
||
// to the readable side. You may call 'push' zero or more times.
|
||
//
|
||
// Call `cb(err)` when you are done with this chunk. If you pass
|
||
// an error, then that'll put the hurt on the whole operation. If you
|
||
// never call cb(), then you'll never get another chunk.
|
||
|
||
|
||
Transform.prototype._transform = function (chunk, encoding, cb) {
|
||
cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()'));
|
||
};
|
||
|
||
Transform.prototype._write = function (chunk, encoding, cb) {
|
||
var ts = this._transformState;
|
||
ts.writecb = cb;
|
||
ts.writechunk = chunk;
|
||
ts.writeencoding = encoding;
|
||
|
||
if (!ts.transforming) {
|
||
var rs = this._readableState;
|
||
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
|
||
}
|
||
}; // Doesn't matter what the args are here.
|
||
// _transform does all the work.
|
||
// That we got here means that the readable side wants more data.
|
||
|
||
|
||
Transform.prototype._read = function (n) {
|
||
var ts = this._transformState;
|
||
|
||
if (ts.writechunk !== null && !ts.transforming) {
|
||
ts.transforming = true;
|
||
|
||
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
|
||
} else {
|
||
// mark that we need a transform, so that any data that comes in
|
||
// will get processed, now that we've asked for it.
|
||
ts.needTransform = true;
|
||
}
|
||
};
|
||
|
||
Transform.prototype._destroy = function (err, cb) {
|
||
require$$2.prototype._destroy.call(this, err, function (err2) {
|
||
cb(err2);
|
||
});
|
||
};
|
||
|
||
function done(stream, er, data) {
|
||
if (er) return stream.emit('error', er);
|
||
if (data != null) // single equals check for both `null` and `undefined`
|
||
stream.push(data); // TODO(BridgeAR): Write a test for these two error cases
|
||
// if there's nothing in the write buffer, then that means
|
||
// that nothing more will ever be provided
|
||
|
||
if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0();
|
||
if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();
|
||
return stream.push(null);
|
||
}
|
||
|
||
var _stream_passthrough = PassThrough;
|
||
|
||
|
||
|
||
inherits_browser(PassThrough, _stream_transform);
|
||
|
||
function PassThrough(options) {
|
||
if (!(this instanceof PassThrough)) return new PassThrough(options);
|
||
_stream_transform.call(this, options);
|
||
}
|
||
|
||
PassThrough.prototype._transform = function (chunk, encoding, cb) {
|
||
cb(null, chunk);
|
||
};
|
||
|
||
var eos;
|
||
|
||
function once(callback) {
|
||
var called = false;
|
||
return function () {
|
||
if (called) return;
|
||
called = true;
|
||
callback.apply(void 0, arguments);
|
||
};
|
||
}
|
||
|
||
var _require$codes = errorsBrowser.codes,
|
||
ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,
|
||
ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;
|
||
|
||
function noop$1(err) {
|
||
// Rethrow the error if it exists to avoid swallowing it
|
||
if (err) throw err;
|
||
}
|
||
|
||
function isRequest(stream) {
|
||
return stream.setHeader && typeof stream.abort === 'function';
|
||
}
|
||
|
||
function destroyer(stream, reading, writing, callback) {
|
||
callback = once(callback);
|
||
var closed = false;
|
||
stream.on('close', function () {
|
||
closed = true;
|
||
});
|
||
if (eos === undefined) eos = endOfStream;
|
||
eos(stream, {
|
||
readable: reading,
|
||
writable: writing
|
||
}, function (err) {
|
||
if (err) return callback(err);
|
||
closed = true;
|
||
callback();
|
||
});
|
||
var destroyed = false;
|
||
return function (err) {
|
||
if (closed) return;
|
||
if (destroyed) return;
|
||
destroyed = true; // request.destroy just do .end - .abort is what we want
|
||
|
||
if (isRequest(stream)) return stream.abort();
|
||
if (typeof stream.destroy === 'function') return stream.destroy();
|
||
callback(err || new ERR_STREAM_DESTROYED('pipe'));
|
||
};
|
||
}
|
||
|
||
function call(fn) {
|
||
fn();
|
||
}
|
||
|
||
function pipe(from, to) {
|
||
return from.pipe(to);
|
||
}
|
||
|
||
function popCallback(streams) {
|
||
if (!streams.length) return noop$1;
|
||
if (typeof streams[streams.length - 1] !== 'function') return noop$1;
|
||
return streams.pop();
|
||
}
|
||
|
||
function pipeline() {
|
||
for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) {
|
||
streams[_key] = arguments[_key];
|
||
}
|
||
|
||
var callback = popCallback(streams);
|
||
if (Array.isArray(streams[0])) streams = streams[0];
|
||
|
||
if (streams.length < 2) {
|
||
throw new ERR_MISSING_ARGS('streams');
|
||
}
|
||
|
||
var error;
|
||
var destroys = streams.map(function (stream, i) {
|
||
var reading = i < streams.length - 1;
|
||
var writing = i > 0;
|
||
return destroyer(stream, reading, writing, function (err) {
|
||
if (!error) error = err;
|
||
if (err) destroys.forEach(call);
|
||
if (reading) return;
|
||
destroys.forEach(call);
|
||
callback(error);
|
||
});
|
||
});
|
||
return streams.reduce(pipe);
|
||
}
|
||
|
||
var pipeline_1 = pipeline;
|
||
|
||
var readableBrowser = createCommonjsModule(function (module, exports) {
|
||
exports = module.exports = require$$0;
|
||
exports.Stream = exports;
|
||
exports.Readable = exports;
|
||
exports.Writable = _stream_writable;
|
||
exports.Duplex = require$$2;
|
||
exports.Transform = _stream_transform;
|
||
exports.PassThrough = _stream_passthrough;
|
||
exports.finished = endOfStream;
|
||
exports.pipeline = pipeline_1;
|
||
});
|
||
|
||
var Readable = readableBrowser.Readable;
|
||
|
||
|
||
var levelIteratorStream = ReadStream;
|
||
inherits_browser(ReadStream, Readable);
|
||
|
||
function ReadStream (iterator, options) {
|
||
if (!(this instanceof ReadStream)) return new ReadStream(iterator, options)
|
||
options = options || {};
|
||
Readable.call(this, immutable(options, {
|
||
objectMode: true
|
||
}));
|
||
this._iterator = iterator;
|
||
this._options = options;
|
||
this.on('end', this.destroy.bind(this, null, null));
|
||
}
|
||
|
||
ReadStream.prototype._read = function () {
|
||
var self = this;
|
||
var options = this._options;
|
||
if (this.destroyed) return
|
||
|
||
this._iterator.next(function (err, key, value) {
|
||
if (self.destroyed) return
|
||
if (err) return self.destroy(err)
|
||
|
||
if (key === undefined && value === undefined) {
|
||
self.push(null);
|
||
} else if (options.keys !== false && options.values === false) {
|
||
self.push(key);
|
||
} else if (options.keys === false && options.values !== false) {
|
||
self.push(value);
|
||
} else {
|
||
self.push({ key: key, value: value });
|
||
}
|
||
});
|
||
};
|
||
|
||
ReadStream.prototype._destroy = function (err, callback) {
|
||
this._iterator.end(function (err2) {
|
||
callback(err || err2);
|
||
});
|
||
};
|
||
|
||
var prr = createCommonjsModule(function (module) {
|
||
/*!
|
||
* prr
|
||
* (c) 2013 Rod Vagg <rod@vagg.org>
|
||
* https://github.com/rvagg/prr
|
||
* License: MIT
|
||
*/
|
||
|
||
(function (name, context, definition) {
|
||
if (module.exports)
|
||
module.exports = definition();
|
||
else
|
||
context[name] = definition();
|
||
})('prr', commonjsGlobal, function() {
|
||
|
||
var setProperty = typeof Object.defineProperty == 'function'
|
||
? function (obj, key, options) {
|
||
Object.defineProperty(obj, key, options);
|
||
return obj
|
||
}
|
||
: function (obj, key, options) { // < es5
|
||
obj[key] = options.value;
|
||
return obj
|
||
}
|
||
|
||
, makeOptions = function (value, options) {
|
||
var oo = typeof options == 'object'
|
||
, os = !oo && typeof options == 'string'
|
||
, op = function (p) {
|
||
return oo
|
||
? !!options[p]
|
||
: os
|
||
? options.indexOf(p[0]) > -1
|
||
: false
|
||
};
|
||
|
||
return {
|
||
enumerable : op('enumerable')
|
||
, configurable : op('configurable')
|
||
, writable : op('writable')
|
||
, value : value
|
||
}
|
||
}
|
||
|
||
, prr = function (obj, key, value, options) {
|
||
var k;
|
||
|
||
options = makeOptions(value, options);
|
||
|
||
if (typeof key == 'object') {
|
||
for (k in key) {
|
||
if (Object.hasOwnProperty.call(key, k)) {
|
||
options.value = key[k];
|
||
setProperty(obj, k, options);
|
||
}
|
||
}
|
||
return obj
|
||
}
|
||
|
||
return setProperty(obj, key, options)
|
||
};
|
||
|
||
return prr
|
||
});
|
||
});
|
||
|
||
function init (type, message, cause) {
|
||
if (!!message && typeof message != 'string') {
|
||
message = message.message || message.name;
|
||
}
|
||
prr(this, {
|
||
type : type
|
||
, name : type
|
||
// can be passed just a 'cause'
|
||
, cause : typeof message != 'string' ? message : cause
|
||
, message : message
|
||
}, 'ewr');
|
||
}
|
||
|
||
// generic prototype, not intended to be actually used - helpful for `instanceof`
|
||
function CustomError (message, cause) {
|
||
Error.call(this);
|
||
if (Error.captureStackTrace)
|
||
Error.captureStackTrace(this, this.constructor);
|
||
init.call(this, 'CustomError', message, cause);
|
||
}
|
||
|
||
CustomError.prototype = new Error();
|
||
|
||
function createError$1 (errno, type, proto) {
|
||
var err = function (message, cause) {
|
||
init.call(this, type, message, cause);
|
||
//TODO: the specificity here is stupid, errno should be available everywhere
|
||
if (type == 'FilesystemError') {
|
||
this.code = this.cause.code;
|
||
this.path = this.cause.path;
|
||
this.errno = this.cause.errno;
|
||
this.message =
|
||
(errno.errno[this.cause.errno]
|
||
? errno.errno[this.cause.errno].description
|
||
: this.cause.message)
|
||
+ (this.cause.path ? ' [' + this.cause.path + ']' : '');
|
||
}
|
||
Error.call(this);
|
||
if (Error.captureStackTrace)
|
||
Error.captureStackTrace(this, err);
|
||
};
|
||
err.prototype = !!proto ? new proto() : new CustomError();
|
||
return err
|
||
}
|
||
|
||
var custom = function (errno) {
|
||
var ce = function (type, proto) {
|
||
return createError$1(errno, type, proto)
|
||
};
|
||
return {
|
||
CustomError : CustomError
|
||
, FilesystemError : ce('FilesystemError')
|
||
, createError : ce
|
||
}
|
||
};
|
||
|
||
var errno = createCommonjsModule(function (module) {
|
||
var all = module.exports.all = [
|
||
{
|
||
errno: -2,
|
||
code: 'ENOENT',
|
||
description: 'no such file or directory'
|
||
},
|
||
{
|
||
errno: -1,
|
||
code: 'UNKNOWN',
|
||
description: 'unknown error'
|
||
},
|
||
{
|
||
errno: 0,
|
||
code: 'OK',
|
||
description: 'success'
|
||
},
|
||
{
|
||
errno: 1,
|
||
code: 'EOF',
|
||
description: 'end of file'
|
||
},
|
||
{
|
||
errno: 2,
|
||
code: 'EADDRINFO',
|
||
description: 'getaddrinfo error'
|
||
},
|
||
{
|
||
errno: 3,
|
||
code: 'EACCES',
|
||
description: 'permission denied'
|
||
},
|
||
{
|
||
errno: 4,
|
||
code: 'EAGAIN',
|
||
description: 'resource temporarily unavailable'
|
||
},
|
||
{
|
||
errno: 5,
|
||
code: 'EADDRINUSE',
|
||
description: 'address already in use'
|
||
},
|
||
{
|
||
errno: 6,
|
||
code: 'EADDRNOTAVAIL',
|
||
description: 'address not available'
|
||
},
|
||
{
|
||
errno: 7,
|
||
code: 'EAFNOSUPPORT',
|
||
description: 'address family not supported'
|
||
},
|
||
{
|
||
errno: 8,
|
||
code: 'EALREADY',
|
||
description: 'connection already in progress'
|
||
},
|
||
{
|
||
errno: 9,
|
||
code: 'EBADF',
|
||
description: 'bad file descriptor'
|
||
},
|
||
{
|
||
errno: 10,
|
||
code: 'EBUSY',
|
||
description: 'resource busy or locked'
|
||
},
|
||
{
|
||
errno: 11,
|
||
code: 'ECONNABORTED',
|
||
description: 'software caused connection abort'
|
||
},
|
||
{
|
||
errno: 12,
|
||
code: 'ECONNREFUSED',
|
||
description: 'connection refused'
|
||
},
|
||
{
|
||
errno: 13,
|
||
code: 'ECONNRESET',
|
||
description: 'connection reset by peer'
|
||
},
|
||
{
|
||
errno: 14,
|
||
code: 'EDESTADDRREQ',
|
||
description: 'destination address required'
|
||
},
|
||
{
|
||
errno: 15,
|
||
code: 'EFAULT',
|
||
description: 'bad address in system call argument'
|
||
},
|
||
{
|
||
errno: 16,
|
||
code: 'EHOSTUNREACH',
|
||
description: 'host is unreachable'
|
||
},
|
||
{
|
||
errno: 17,
|
||
code: 'EINTR',
|
||
description: 'interrupted system call'
|
||
},
|
||
{
|
||
errno: 18,
|
||
code: 'EINVAL',
|
||
description: 'invalid argument'
|
||
},
|
||
{
|
||
errno: 19,
|
||
code: 'EISCONN',
|
||
description: 'socket is already connected'
|
||
},
|
||
{
|
||
errno: 20,
|
||
code: 'EMFILE',
|
||
description: 'too many open files'
|
||
},
|
||
{
|
||
errno: 21,
|
||
code: 'EMSGSIZE',
|
||
description: 'message too long'
|
||
},
|
||
{
|
||
errno: 22,
|
||
code: 'ENETDOWN',
|
||
description: 'network is down'
|
||
},
|
||
{
|
||
errno: 23,
|
||
code: 'ENETUNREACH',
|
||
description: 'network is unreachable'
|
||
},
|
||
{
|
||
errno: 24,
|
||
code: 'ENFILE',
|
||
description: 'file table overflow'
|
||
},
|
||
{
|
||
errno: 25,
|
||
code: 'ENOBUFS',
|
||
description: 'no buffer space available'
|
||
},
|
||
{
|
||
errno: 26,
|
||
code: 'ENOMEM',
|
||
description: 'not enough memory'
|
||
},
|
||
{
|
||
errno: 27,
|
||
code: 'ENOTDIR',
|
||
description: 'not a directory'
|
||
},
|
||
{
|
||
errno: 28,
|
||
code: 'EISDIR',
|
||
description: 'illegal operation on a directory'
|
||
},
|
||
{
|
||
errno: 29,
|
||
code: 'ENONET',
|
||
description: 'machine is not on the network'
|
||
},
|
||
{
|
||
errno: 31,
|
||
code: 'ENOTCONN',
|
||
description: 'socket is not connected'
|
||
},
|
||
{
|
||
errno: 32,
|
||
code: 'ENOTSOCK',
|
||
description: 'socket operation on non-socket'
|
||
},
|
||
{
|
||
errno: 33,
|
||
code: 'ENOTSUP',
|
||
description: 'operation not supported on socket'
|
||
},
|
||
{
|
||
errno: 34,
|
||
code: 'ENOENT',
|
||
description: 'no such file or directory'
|
||
},
|
||
{
|
||
errno: 35,
|
||
code: 'ENOSYS',
|
||
description: 'function not implemented'
|
||
},
|
||
{
|
||
errno: 36,
|
||
code: 'EPIPE',
|
||
description: 'broken pipe'
|
||
},
|
||
{
|
||
errno: 37,
|
||
code: 'EPROTO',
|
||
description: 'protocol error'
|
||
},
|
||
{
|
||
errno: 38,
|
||
code: 'EPROTONOSUPPORT',
|
||
description: 'protocol not supported'
|
||
},
|
||
{
|
||
errno: 39,
|
||
code: 'EPROTOTYPE',
|
||
description: 'protocol wrong type for socket'
|
||
},
|
||
{
|
||
errno: 40,
|
||
code: 'ETIMEDOUT',
|
||
description: 'connection timed out'
|
||
},
|
||
{
|
||
errno: 41,
|
||
code: 'ECHARSET',
|
||
description: 'invalid Unicode character'
|
||
},
|
||
{
|
||
errno: 42,
|
||
code: 'EAIFAMNOSUPPORT',
|
||
description: 'address family for hostname not supported'
|
||
},
|
||
{
|
||
errno: 44,
|
||
code: 'EAISERVICE',
|
||
description: 'servname not supported for ai_socktype'
|
||
},
|
||
{
|
||
errno: 45,
|
||
code: 'EAISOCKTYPE',
|
||
description: 'ai_socktype not supported'
|
||
},
|
||
{
|
||
errno: 46,
|
||
code: 'ESHUTDOWN',
|
||
description: 'cannot send after transport endpoint shutdown'
|
||
},
|
||
{
|
||
errno: 47,
|
||
code: 'EEXIST',
|
||
description: 'file already exists'
|
||
},
|
||
{
|
||
errno: 48,
|
||
code: 'ESRCH',
|
||
description: 'no such process'
|
||
},
|
||
{
|
||
errno: 49,
|
||
code: 'ENAMETOOLONG',
|
||
description: 'name too long'
|
||
},
|
||
{
|
||
errno: 50,
|
||
code: 'EPERM',
|
||
description: 'operation not permitted'
|
||
},
|
||
{
|
||
errno: 51,
|
||
code: 'ELOOP',
|
||
description: 'too many symbolic links encountered'
|
||
},
|
||
{
|
||
errno: 52,
|
||
code: 'EXDEV',
|
||
description: 'cross-device link not permitted'
|
||
},
|
||
{
|
||
errno: 53,
|
||
code: 'ENOTEMPTY',
|
||
description: 'directory not empty'
|
||
},
|
||
{
|
||
errno: 54,
|
||
code: 'ENOSPC',
|
||
description: 'no space left on device'
|
||
},
|
||
{
|
||
errno: 55,
|
||
code: 'EIO',
|
||
description: 'i/o error'
|
||
},
|
||
{
|
||
errno: 56,
|
||
code: 'EROFS',
|
||
description: 'read-only file system'
|
||
},
|
||
{
|
||
errno: 57,
|
||
code: 'ENODEV',
|
||
description: 'no such device'
|
||
},
|
||
{
|
||
errno: 58,
|
||
code: 'ESPIPE',
|
||
description: 'invalid seek'
|
||
},
|
||
{
|
||
errno: 59,
|
||
code: 'ECANCELED',
|
||
description: 'operation canceled'
|
||
}
|
||
];
|
||
|
||
module.exports.errno = {};
|
||
module.exports.code = {};
|
||
|
||
all.forEach(function (error) {
|
||
module.exports.errno[error.errno] = error;
|
||
module.exports.code[error.code] = error;
|
||
});
|
||
|
||
module.exports.custom = custom(module.exports);
|
||
module.exports.create = module.exports.custom.createError;
|
||
});
|
||
|
||
var createError = errno.create;
|
||
var LevelUPError = createError('LevelUPError');
|
||
var NotFoundError$1 = createError('NotFoundError', LevelUPError);
|
||
|
||
NotFoundError$1.prototype.notFound = true;
|
||
NotFoundError$1.prototype.status = 404;
|
||
|
||
var errors = {
|
||
LevelUPError: LevelUPError,
|
||
InitializationError: createError('InitializationError', LevelUPError),
|
||
OpenError: createError('OpenError', LevelUPError),
|
||
ReadError: createError('ReadError', LevelUPError),
|
||
WriteError: createError('WriteError', LevelUPError),
|
||
NotFoundError: NotFoundError$1,
|
||
EncodingError: createError('EncodingError', LevelUPError)
|
||
};
|
||
|
||
function promisify () {
|
||
var callback;
|
||
var promise = new Promise(function (resolve, reject) {
|
||
callback = function callback (err, value) {
|
||
if (err) reject(err);
|
||
else resolve(value);
|
||
};
|
||
});
|
||
callback.promise = promise;
|
||
return callback
|
||
}
|
||
|
||
var promisify_1 = promisify;
|
||
|
||
var getCallback$2 = function (options, callback) {
|
||
return typeof options === 'function' ? options : callback
|
||
};
|
||
|
||
var getOptions$2 = function (options) {
|
||
return typeof options === 'object' && options !== null ? options : {}
|
||
};
|
||
|
||
var common = {
|
||
getCallback: getCallback$2,
|
||
getOptions: getOptions$2
|
||
};
|
||
|
||
var WriteError$1 = errors.WriteError;
|
||
|
||
var getCallback$1 = common.getCallback;
|
||
var getOptions$1 = common.getOptions;
|
||
|
||
function Batch$1 (levelup) {
|
||
// TODO (next major): remove this._levelup alias
|
||
this.db = this._levelup = levelup;
|
||
this.batch = levelup.db.batch();
|
||
this.ops = [];
|
||
this.length = 0;
|
||
}
|
||
|
||
Batch$1.prototype.put = function (key, value) {
|
||
try {
|
||
this.batch.put(key, value);
|
||
} catch (e) {
|
||
throw new WriteError$1(e)
|
||
}
|
||
|
||
this.ops.push({ type: 'put', key: key, value: value });
|
||
this.length++;
|
||
|
||
return this
|
||
};
|
||
|
||
Batch$1.prototype.del = function (key) {
|
||
try {
|
||
this.batch.del(key);
|
||
} catch (err) {
|
||
throw new WriteError$1(err)
|
||
}
|
||
|
||
this.ops.push({ type: 'del', key: key });
|
||
this.length++;
|
||
|
||
return this
|
||
};
|
||
|
||
Batch$1.prototype.clear = function () {
|
||
try {
|
||
this.batch.clear();
|
||
} catch (err) {
|
||
throw new WriteError$1(err)
|
||
}
|
||
|
||
this.ops = [];
|
||
this.length = 0;
|
||
|
||
return this
|
||
};
|
||
|
||
Batch$1.prototype.write = function (options, callback) {
|
||
var levelup = this._levelup;
|
||
var ops = this.ops;
|
||
var promise;
|
||
|
||
callback = getCallback$1(options, callback);
|
||
|
||
if (!callback) {
|
||
callback = promisify_1();
|
||
promise = callback.promise;
|
||
}
|
||
|
||
options = getOptions$1(options);
|
||
|
||
try {
|
||
this.batch.write(options, function (err) {
|
||
if (err) { return callback(new WriteError$1(err)) }
|
||
levelup.emit('batch', ops);
|
||
callback();
|
||
});
|
||
} catch (err) {
|
||
throw new WriteError$1(err)
|
||
}
|
||
|
||
return promise
|
||
};
|
||
|
||
var batch = Batch$1;
|
||
|
||
function compare$1(a, b) {
|
||
if (a === b) {
|
||
return 0;
|
||
}
|
||
|
||
var x = a.length;
|
||
var y = b.length;
|
||
|
||
for (var i = 0, len = Math.min(x, y); i < len; ++i) {
|
||
if (a[i] !== b[i]) {
|
||
x = a[i];
|
||
y = b[i];
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (x < y) {
|
||
return -1;
|
||
}
|
||
if (y < x) {
|
||
return 1;
|
||
}
|
||
return 0;
|
||
}
|
||
var hasOwn = Object.prototype.hasOwnProperty;
|
||
|
||
var objectKeys = Object.keys || function (obj) {
|
||
var keys = [];
|
||
for (var key in obj) {
|
||
if (hasOwn.call(obj, key)) keys.push(key);
|
||
}
|
||
return keys;
|
||
};
|
||
var pSlice = Array.prototype.slice;
|
||
var _functionsHaveNames;
|
||
function functionsHaveNames() {
|
||
if (typeof _functionsHaveNames !== 'undefined') {
|
||
return _functionsHaveNames;
|
||
}
|
||
return _functionsHaveNames = (function () {
|
||
return function foo() {}.name === 'foo';
|
||
}());
|
||
}
|
||
function pToString (obj) {
|
||
return Object.prototype.toString.call(obj);
|
||
}
|
||
function isView(arrbuf) {
|
||
if (buffer.isBuffer(arrbuf)) {
|
||
return false;
|
||
}
|
||
if (typeof global$1.ArrayBuffer !== 'function') {
|
||
return false;
|
||
}
|
||
if (typeof ArrayBuffer.isView === 'function') {
|
||
return ArrayBuffer.isView(arrbuf);
|
||
}
|
||
if (!arrbuf) {
|
||
return false;
|
||
}
|
||
if (arrbuf instanceof DataView) {
|
||
return true;
|
||
}
|
||
if (arrbuf.buffer && arrbuf.buffer instanceof ArrayBuffer) {
|
||
return true;
|
||
}
|
||
return false;
|
||
}
|
||
// 1. The assert module provides functions that throw
|
||
// AssertionError's when particular conditions are not met. The
|
||
// assert module must conform to the following interface.
|
||
|
||
function assert$1(value, message) {
|
||
if (!value) fail$1(value, true, message, '==', ok);
|
||
}
|
||
|
||
// 2. The AssertionError is defined in assert.
|
||
// new assert.AssertionError({ message: message,
|
||
// actual: actual,
|
||
// expected: expected })
|
||
|
||
var regex = /\s*function\s+([^\(\s]*)\s*/;
|
||
// based on https://github.com/ljharb/function.prototype.name/blob/adeeeec8bfcc6068b187d7d9fb3d5bb1d3a30899/implementation.js
|
||
function getName(func) {
|
||
if (!isFunction(func)) {
|
||
return;
|
||
}
|
||
if (functionsHaveNames()) {
|
||
return func.name;
|
||
}
|
||
var str = func.toString();
|
||
var match = str.match(regex);
|
||
return match && match[1];
|
||
}
|
||
assert$1.AssertionError = AssertionError;
|
||
function AssertionError(options) {
|
||
this.name = 'AssertionError';
|
||
this.actual = options.actual;
|
||
this.expected = options.expected;
|
||
this.operator = options.operator;
|
||
if (options.message) {
|
||
this.message = options.message;
|
||
this.generatedMessage = false;
|
||
} else {
|
||
this.message = getMessage(this);
|
||
this.generatedMessage = true;
|
||
}
|
||
var stackStartFunction = options.stackStartFunction || fail$1;
|
||
if (Error.captureStackTrace) {
|
||
Error.captureStackTrace(this, stackStartFunction);
|
||
} else {
|
||
// non v8 browsers so we can have a stacktrace
|
||
var err = new Error();
|
||
if (err.stack) {
|
||
var out = err.stack;
|
||
|
||
// try to strip useless frames
|
||
var fn_name = getName(stackStartFunction);
|
||
var idx = out.indexOf('\n' + fn_name);
|
||
if (idx >= 0) {
|
||
// once we have located the function frame
|
||
// we need to strip out everything before it (and its line)
|
||
var next_line = out.indexOf('\n', idx + 1);
|
||
out = out.substring(next_line + 1);
|
||
}
|
||
|
||
this.stack = out;
|
||
}
|
||
}
|
||
}
|
||
|
||
// assert.AssertionError instanceof Error
|
||
inherits$2(AssertionError, Error);
|
||
|
||
function truncate(s, n) {
|
||
if (typeof s === 'string') {
|
||
return s.length < n ? s : s.slice(0, n);
|
||
} else {
|
||
return s;
|
||
}
|
||
}
|
||
function inspect(something) {
|
||
if (functionsHaveNames() || !isFunction(something)) {
|
||
return inspect$2(something);
|
||
}
|
||
var rawname = getName(something);
|
||
var name = rawname ? ': ' + rawname : '';
|
||
return '[Function' + name + ']';
|
||
}
|
||
function getMessage(self) {
|
||
return truncate(inspect(self.actual), 128) + ' ' +
|
||
self.operator + ' ' +
|
||
truncate(inspect(self.expected), 128);
|
||
}
|
||
|
||
// At present only the three keys mentioned above are used and
|
||
// understood by the spec. Implementations or sub modules can pass
|
||
// other keys to the AssertionError's constructor - they will be
|
||
// ignored.
|
||
|
||
// 3. All of the following functions must throw an AssertionError
|
||
// when a corresponding condition is not met, with a message that
|
||
// may be undefined if not provided. All assertion methods provide
|
||
// both the actual and expected values to the assertion error for
|
||
// display purposes.
|
||
|
||
function fail$1(actual, expected, message, operator, stackStartFunction) {
|
||
throw new AssertionError({
|
||
message: message,
|
||
actual: actual,
|
||
expected: expected,
|
||
operator: operator,
|
||
stackStartFunction: stackStartFunction
|
||
});
|
||
}
|
||
|
||
// EXTENSION! allows for well behaved errors defined elsewhere.
|
||
assert$1.fail = fail$1;
|
||
|
||
// 4. Pure assertion tests whether a value is truthy, as determined
|
||
// by !!guard.
|
||
// assert.ok(guard, message_opt);
|
||
// This statement is equivalent to assert.equal(true, !!guard,
|
||
// message_opt);. To test strictly for the value true, use
|
||
// assert.strictEqual(true, guard, message_opt);.
|
||
|
||
function ok(value, message) {
|
||
if (!value) fail$1(value, true, message, '==', ok);
|
||
}
|
||
assert$1.ok = ok;
|
||
|
||
// 5. The equality assertion tests shallow, coercive equality with
|
||
// ==.
|
||
// assert.equal(actual, expected, message_opt);
|
||
assert$1.equal = equal;
|
||
function equal(actual, expected, message) {
|
||
if (actual != expected) fail$1(actual, expected, message, '==', equal);
|
||
}
|
||
|
||
// 6. The non-equality assertion tests for whether two objects are not equal
|
||
// with != assert.notEqual(actual, expected, message_opt);
|
||
assert$1.notEqual = notEqual;
|
||
function notEqual(actual, expected, message) {
|
||
if (actual == expected) {
|
||
fail$1(actual, expected, message, '!=', notEqual);
|
||
}
|
||
}
|
||
|
||
// 7. The equivalence assertion tests a deep equality relation.
|
||
// assert.deepEqual(actual, expected, message_opt);
|
||
assert$1.deepEqual = deepEqual;
|
||
function deepEqual(actual, expected, message) {
|
||
if (!_deepEqual(actual, expected, false)) {
|
||
fail$1(actual, expected, message, 'deepEqual', deepEqual);
|
||
}
|
||
}
|
||
assert$1.deepStrictEqual = deepStrictEqual;
|
||
function deepStrictEqual(actual, expected, message) {
|
||
if (!_deepEqual(actual, expected, true)) {
|
||
fail$1(actual, expected, message, 'deepStrictEqual', deepStrictEqual);
|
||
}
|
||
}
|
||
|
||
function _deepEqual(actual, expected, strict, memos) {
|
||
// 7.1. All identical values are equivalent, as determined by ===.
|
||
if (actual === expected) {
|
||
return true;
|
||
} else if (buffer.isBuffer(actual) && buffer.isBuffer(expected)) {
|
||
return compare$1(actual, expected) === 0;
|
||
|
||
// 7.2. If the expected value is a Date object, the actual value is
|
||
// equivalent if it is also a Date object that refers to the same time.
|
||
} else if (isDate(actual) && isDate(expected)) {
|
||
return actual.getTime() === expected.getTime();
|
||
|
||
// 7.3 If the expected value is a RegExp object, the actual value is
|
||
// equivalent if it is also a RegExp object with the same source and
|
||
// properties (`global`, `multiline`, `lastIndex`, `ignoreCase`).
|
||
} else if (isRegExp(actual) && isRegExp(expected)) {
|
||
return actual.source === expected.source &&
|
||
actual.global === expected.global &&
|
||
actual.multiline === expected.multiline &&
|
||
actual.lastIndex === expected.lastIndex &&
|
||
actual.ignoreCase === expected.ignoreCase;
|
||
|
||
// 7.4. Other pairs that do not both pass typeof value == 'object',
|
||
// equivalence is determined by ==.
|
||
} else if ((actual === null || typeof actual !== 'object') &&
|
||
(expected === null || typeof expected !== 'object')) {
|
||
return strict ? actual === expected : actual == expected;
|
||
|
||
// If both values are instances of typed arrays, wrap their underlying
|
||
// ArrayBuffers in a Buffer each to increase performance
|
||
// This optimization requires the arrays to have the same type as checked by
|
||
// Object.prototype.toString (aka pToString). Never perform binary
|
||
// comparisons for Float*Arrays, though, since e.g. +0 === -0 but their
|
||
// bit patterns are not identical.
|
||
} else if (isView(actual) && isView(expected) &&
|
||
pToString(actual) === pToString(expected) &&
|
||
!(actual instanceof Float32Array ||
|
||
actual instanceof Float64Array)) {
|
||
return compare$1(new Uint8Array(actual.buffer),
|
||
new Uint8Array(expected.buffer)) === 0;
|
||
|
||
// 7.5 For all other Object pairs, including Array objects, equivalence is
|
||
// determined by having the same number of owned properties (as verified
|
||
// with Object.prototype.hasOwnProperty.call), the same set of keys
|
||
// (although not necessarily the same order), equivalent values for every
|
||
// corresponding key, and an identical 'prototype' property. Note: this
|
||
// accounts for both named and indexed properties on Arrays.
|
||
} else if (buffer.isBuffer(actual) !== buffer.isBuffer(expected)) {
|
||
return false;
|
||
} else {
|
||
memos = memos || {actual: [], expected: []};
|
||
|
||
var actualIndex = memos.actual.indexOf(actual);
|
||
if (actualIndex !== -1) {
|
||
if (actualIndex === memos.expected.indexOf(expected)) {
|
||
return true;
|
||
}
|
||
}
|
||
|
||
memos.actual.push(actual);
|
||
memos.expected.push(expected);
|
||
|
||
return objEquiv(actual, expected, strict, memos);
|
||
}
|
||
}
|
||
|
||
function isArguments(object) {
|
||
return Object.prototype.toString.call(object) == '[object Arguments]';
|
||
}
|
||
|
||
function objEquiv(a, b, strict, actualVisitedObjects) {
|
||
if (a === null || a === undefined || b === null || b === undefined)
|
||
return false;
|
||
// if one is a primitive, the other must be same
|
||
if (isPrimitive(a) || isPrimitive(b))
|
||
return a === b;
|
||
if (strict && Object.getPrototypeOf(a) !== Object.getPrototypeOf(b))
|
||
return false;
|
||
var aIsArgs = isArguments(a);
|
||
var bIsArgs = isArguments(b);
|
||
if ((aIsArgs && !bIsArgs) || (!aIsArgs && bIsArgs))
|
||
return false;
|
||
if (aIsArgs) {
|
||
a = pSlice.call(a);
|
||
b = pSlice.call(b);
|
||
return _deepEqual(a, b, strict);
|
||
}
|
||
var ka = objectKeys(a);
|
||
var kb = objectKeys(b);
|
||
var key, i;
|
||
// having the same number of owned properties (keys incorporates
|
||
// hasOwnProperty)
|
||
if (ka.length !== kb.length)
|
||
return false;
|
||
//the same set of keys (although not necessarily the same order),
|
||
ka.sort();
|
||
kb.sort();
|
||
//~~~cheap key test
|
||
for (i = ka.length - 1; i >= 0; i--) {
|
||
if (ka[i] !== kb[i])
|
||
return false;
|
||
}
|
||
//equivalent values for every corresponding key, and
|
||
//~~~possibly expensive deep test
|
||
for (i = ka.length - 1; i >= 0; i--) {
|
||
key = ka[i];
|
||
if (!_deepEqual(a[key], b[key], strict, actualVisitedObjects))
|
||
return false;
|
||
}
|
||
return true;
|
||
}
|
||
|
||
// 8. The non-equivalence assertion tests for any deep inequality.
|
||
// assert.notDeepEqual(actual, expected, message_opt);
|
||
assert$1.notDeepEqual = notDeepEqual;
|
||
function notDeepEqual(actual, expected, message) {
|
||
if (_deepEqual(actual, expected, false)) {
|
||
fail$1(actual, expected, message, 'notDeepEqual', notDeepEqual);
|
||
}
|
||
}
|
||
|
||
assert$1.notDeepStrictEqual = notDeepStrictEqual;
|
||
function notDeepStrictEqual(actual, expected, message) {
|
||
if (_deepEqual(actual, expected, true)) {
|
||
fail$1(actual, expected, message, 'notDeepStrictEqual', notDeepStrictEqual);
|
||
}
|
||
}
|
||
|
||
|
||
// 9. The strict equality assertion tests strict equality, as determined by ===.
|
||
// assert.strictEqual(actual, expected, message_opt);
|
||
assert$1.strictEqual = strictEqual;
|
||
function strictEqual(actual, expected, message) {
|
||
if (actual !== expected) {
|
||
fail$1(actual, expected, message, '===', strictEqual);
|
||
}
|
||
}
|
||
|
||
// 10. The strict non-equality assertion tests for strict inequality, as
|
||
// determined by !==. assert.notStrictEqual(actual, expected, message_opt);
|
||
assert$1.notStrictEqual = notStrictEqual;
|
||
function notStrictEqual(actual, expected, message) {
|
||
if (actual === expected) {
|
||
fail$1(actual, expected, message, '!==', notStrictEqual);
|
||
}
|
||
}
|
||
|
||
function expectedException(actual, expected) {
|
||
if (!actual || !expected) {
|
||
return false;
|
||
}
|
||
|
||
if (Object.prototype.toString.call(expected) == '[object RegExp]') {
|
||
return expected.test(actual);
|
||
}
|
||
|
||
try {
|
||
if (actual instanceof expected) {
|
||
return true;
|
||
}
|
||
} catch (e) {
|
||
// Ignore. The instanceof check doesn't work for arrow functions.
|
||
}
|
||
|
||
if (Error.isPrototypeOf(expected)) {
|
||
return false;
|
||
}
|
||
|
||
return expected.call({}, actual) === true;
|
||
}
|
||
|
||
function _tryBlock(block) {
|
||
var error;
|
||
try {
|
||
block();
|
||
} catch (e) {
|
||
error = e;
|
||
}
|
||
return error;
|
||
}
|
||
|
||
function _throws(shouldThrow, block, expected, message) {
|
||
var actual;
|
||
|
||
if (typeof block !== 'function') {
|
||
throw new TypeError('"block" argument must be a function');
|
||
}
|
||
|
||
if (typeof expected === 'string') {
|
||
message = expected;
|
||
expected = null;
|
||
}
|
||
|
||
actual = _tryBlock(block);
|
||
|
||
message = (expected && expected.name ? ' (' + expected.name + ').' : '.') +
|
||
(message ? ' ' + message : '.');
|
||
|
||
if (shouldThrow && !actual) {
|
||
fail$1(actual, expected, 'Missing expected exception' + message);
|
||
}
|
||
|
||
var userProvidedMessage = typeof message === 'string';
|
||
var isUnwantedException = !shouldThrow && isError(actual);
|
||
var isUnexpectedException = !shouldThrow && actual && !expected;
|
||
|
||
if ((isUnwantedException &&
|
||
userProvidedMessage &&
|
||
expectedException(actual, expected)) ||
|
||
isUnexpectedException) {
|
||
fail$1(actual, expected, 'Got unwanted exception' + message);
|
||
}
|
||
|
||
if ((shouldThrow && actual && expected &&
|
||
!expectedException(actual, expected)) || (!shouldThrow && actual)) {
|
||
throw actual;
|
||
}
|
||
}
|
||
|
||
// 11. Expected to throw an error:
|
||
// assert.throws(block, Error_opt, message_opt);
|
||
assert$1.throws = throws;
|
||
function throws(block, /*optional*/error, /*optional*/message) {
|
||
_throws(true, block, error, message);
|
||
}
|
||
|
||
// EXTENSION! This is annoying to write outside this module.
|
||
assert$1.doesNotThrow = doesNotThrow;
|
||
function doesNotThrow(block, /*optional*/error, /*optional*/message) {
|
||
_throws(false, block, error, message);
|
||
}
|
||
|
||
assert$1.ifError = ifError;
|
||
function ifError(err) {
|
||
if (err) throw err;
|
||
}
|
||
|
||
var EventEmitter = EventEmitter$1.EventEmitter;
|
||
var inherits = require$$1.inherits;
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var getCallback = common.getCallback;
|
||
var getOptions = common.getOptions;
|
||
|
||
var WriteError = errors.WriteError;
|
||
var ReadError = errors.ReadError;
|
||
var NotFoundError = errors.NotFoundError;
|
||
var OpenError = errors.OpenError;
|
||
var InitializationError = errors.InitializationError;
|
||
|
||
// Possible AbstractLevelDOWN#status values:
|
||
// - 'new' - newly created, not opened or closed
|
||
// - 'opening' - waiting for the database to be opened, post open()
|
||
// - 'open' - successfully opened the database, available for use
|
||
// - 'closing' - waiting for the database to be closed, post close()
|
||
// - 'closed' - database has been successfully closed, should not be
|
||
// used except for another open() operation
|
||
|
||
function LevelUP (db, options, callback) {
|
||
if (!(this instanceof LevelUP)) {
|
||
return new LevelUP(db, options, callback)
|
||
}
|
||
|
||
var error;
|
||
var self = this;
|
||
|
||
EventEmitter.call(this);
|
||
this.setMaxListeners(Infinity);
|
||
|
||
if (typeof options === 'function') {
|
||
callback = options;
|
||
options = {};
|
||
}
|
||
|
||
options = options || {};
|
||
|
||
if (!db || typeof db !== 'object') {
|
||
error = new InitializationError('First argument must be an abstract-leveldown compliant store');
|
||
if (typeof callback === 'function') {
|
||
return process$1.nextTick(callback, error)
|
||
}
|
||
throw error
|
||
}
|
||
|
||
assert$1.strictEqual(typeof db.status, 'string', '.status required, old abstract-leveldown');
|
||
|
||
this.options = getOptions(options);
|
||
this._db = db;
|
||
this.db = new deferredLeveldown(db);
|
||
this.open(callback || function (err) {
|
||
if (err) self.emit('error', err);
|
||
});
|
||
|
||
// Create manifest based on deferred-leveldown's
|
||
this.supports = levelSupports(this.db.supports, {
|
||
status: false,
|
||
deferredOpen: true,
|
||
openCallback: true,
|
||
promises: true,
|
||
streams: true
|
||
});
|
||
|
||
// Experimental: enrich levelup interface
|
||
Object.keys(this.supports.additionalMethods).forEach(function (method) {
|
||
if (this[method] != null) return
|
||
|
||
// Don't do this.db[method].bind() because this.db is dynamic.
|
||
this[method] = function () {
|
||
return this.db[method].apply(this.db, arguments)
|
||
};
|
||
}, this);
|
||
}
|
||
|
||
LevelUP.prototype.emit = EventEmitter.prototype.emit;
|
||
LevelUP.prototype.once = EventEmitter.prototype.once;
|
||
inherits(LevelUP, EventEmitter);
|
||
|
||
LevelUP.prototype.open = function (opts, callback) {
|
||
var self = this;
|
||
var promise;
|
||
|
||
if (typeof opts === 'function') {
|
||
callback = opts;
|
||
opts = null;
|
||
}
|
||
|
||
if (!callback) {
|
||
callback = promisify_1();
|
||
promise = callback.promise;
|
||
}
|
||
|
||
if (!opts) {
|
||
opts = this.options;
|
||
}
|
||
|
||
if (this.isOpen()) {
|
||
process$1.nextTick(callback, null, self);
|
||
return promise
|
||
}
|
||
|
||
if (this._isOpening()) {
|
||
this.once('open', function () { callback(null, self); });
|
||
return promise
|
||
}
|
||
|
||
this.emit('opening');
|
||
|
||
this.db.open(opts, function (err) {
|
||
if (err) {
|
||
return callback(new OpenError(err))
|
||
}
|
||
self.db = self._db;
|
||
callback(null, self);
|
||
self.emit('open');
|
||
self.emit('ready');
|
||
});
|
||
|
||
return promise
|
||
};
|
||
|
||
LevelUP.prototype.close = function (callback) {
|
||
var self = this;
|
||
var promise;
|
||
|
||
if (!callback) {
|
||
callback = promisify_1();
|
||
promise = callback.promise;
|
||
}
|
||
|
||
if (this.isOpen()) {
|
||
this.db.close(function () {
|
||
self.emit('closed');
|
||
callback.apply(null, arguments);
|
||
});
|
||
this.emit('closing');
|
||
this.db = new deferredLeveldown(this._db);
|
||
} else if (this.isClosed()) {
|
||
process$1.nextTick(callback);
|
||
} else if (this.db.status === 'closing') {
|
||
this.once('closed', callback);
|
||
} else if (this._isOpening()) {
|
||
this.once('open', function () {
|
||
self.close(callback);
|
||
});
|
||
}
|
||
|
||
return promise
|
||
};
|
||
|
||
LevelUP.prototype.isOpen = function () {
|
||
return this.db.status === 'open'
|
||
};
|
||
|
||
LevelUP.prototype._isOpening = function () {
|
||
return this.db.status === 'opening'
|
||
};
|
||
|
||
LevelUP.prototype.isClosed = function () {
|
||
return (/^clos|new/).test(this.db.status)
|
||
};
|
||
|
||
LevelUP.prototype.get = function (key, options, callback) {
|
||
var promise;
|
||
|
||
callback = getCallback(options, callback);
|
||
|
||
if (!callback) {
|
||
callback = promisify_1();
|
||
promise = callback.promise;
|
||
}
|
||
|
||
if (maybeError(this, callback)) { return promise }
|
||
|
||
options = getOptions(options);
|
||
|
||
this.db.get(key, options, function (err, value) {
|
||
if (err) {
|
||
if ((/notfound/i).test(err) || err.notFound) {
|
||
err = new NotFoundError('Key not found in database [' + key + ']', err);
|
||
} else {
|
||
err = new ReadError(err);
|
||
}
|
||
return callback(err)
|
||
}
|
||
callback(null, value);
|
||
});
|
||
|
||
return promise
|
||
};
|
||
|
||
LevelUP.prototype.put = function (key, value, options, callback) {
|
||
var self = this;
|
||
var promise;
|
||
|
||
callback = getCallback(options, callback);
|
||
|
||
if (!callback) {
|
||
callback = promisify_1();
|
||
promise = callback.promise;
|
||
}
|
||
|
||
if (maybeError(this, callback)) { return promise }
|
||
|
||
options = getOptions(options);
|
||
|
||
this.db.put(key, value, options, function (err) {
|
||
if (err) {
|
||
return callback(new WriteError(err))
|
||
}
|
||
self.emit('put', key, value);
|
||
callback();
|
||
});
|
||
|
||
return promise
|
||
};
|
||
|
||
LevelUP.prototype.del = function (key, options, callback) {
|
||
var self = this;
|
||
var promise;
|
||
|
||
callback = getCallback(options, callback);
|
||
|
||
if (!callback) {
|
||
callback = promisify_1();
|
||
promise = callback.promise;
|
||
}
|
||
|
||
if (maybeError(this, callback)) { return promise }
|
||
|
||
options = getOptions(options);
|
||
|
||
this.db.del(key, options, function (err) {
|
||
if (err) {
|
||
return callback(new WriteError(err))
|
||
}
|
||
self.emit('del', key);
|
||
callback();
|
||
});
|
||
|
||
return promise
|
||
};
|
||
|
||
LevelUP.prototype.batch = function (arr, options, callback) {
|
||
if (!arguments.length) {
|
||
return new batch(this)
|
||
}
|
||
|
||
var self = this;
|
||
var promise;
|
||
|
||
if (typeof arr === 'function') callback = arr;
|
||
else callback = getCallback(options, callback);
|
||
|
||
if (!callback) {
|
||
callback = promisify_1();
|
||
promise = callback.promise;
|
||
}
|
||
|
||
if (maybeError(this, callback)) { return promise }
|
||
|
||
options = getOptions(options);
|
||
|
||
this.db.batch(arr, options, function (err) {
|
||
if (err) {
|
||
return callback(new WriteError(err))
|
||
}
|
||
self.emit('batch', arr);
|
||
callback();
|
||
});
|
||
|
||
return promise
|
||
};
|
||
|
||
LevelUP.prototype.iterator = function (options) {
|
||
return this.db.iterator(options)
|
||
};
|
||
|
||
LevelUP.prototype.clear = function (options, callback) {
|
||
var self = this;
|
||
var promise;
|
||
|
||
callback = getCallback(options, callback);
|
||
options = getOptions(options);
|
||
|
||
if (!callback) {
|
||
callback = promisify_1();
|
||
promise = callback.promise;
|
||
}
|
||
|
||
if (maybeError(this, callback)) {
|
||
return promise
|
||
}
|
||
|
||
this.db.clear(options, function (err) {
|
||
if (err) {
|
||
return callback(new WriteError(err))
|
||
}
|
||
self.emit('clear', options);
|
||
callback();
|
||
});
|
||
|
||
return promise
|
||
};
|
||
|
||
LevelUP.prototype.readStream =
|
||
LevelUP.prototype.createReadStream = function (options) {
|
||
options = immutable({ keys: true, values: true }, options);
|
||
if (typeof options.limit !== 'number') { options.limit = -1; }
|
||
return new levelIteratorStream(this.db.iterator(options), options)
|
||
};
|
||
|
||
LevelUP.prototype.keyStream =
|
||
LevelUP.prototype.createKeyStream = function (options) {
|
||
return this.createReadStream(immutable(options, { keys: true, values: false }))
|
||
};
|
||
|
||
LevelUP.prototype.valueStream =
|
||
LevelUP.prototype.createValueStream = function (options) {
|
||
return this.createReadStream(immutable(options, { keys: false, values: true }))
|
||
};
|
||
|
||
LevelUP.prototype.toString = function () {
|
||
return 'LevelUP'
|
||
};
|
||
|
||
LevelUP.prototype.type = 'levelup';
|
||
|
||
function maybeError (db, callback) {
|
||
if (!db._isOpening() && !db.isOpen()) {
|
||
process$1.nextTick(callback, new ReadError('Database is not open'));
|
||
return true
|
||
}
|
||
}
|
||
|
||
LevelUP.errors = errors;
|
||
var levelup = LevelUP.default = LevelUP;
|
||
|
||
var encodings = createCommonjsModule(function (module, exports) {
|
||
var Buffer = buffer.Buffer;
|
||
|
||
exports.utf8 = exports['utf-8'] = {
|
||
encode: function (data) {
|
||
return isBinary(data) ? data : String(data)
|
||
},
|
||
decode: identity,
|
||
buffer: false,
|
||
type: 'utf8'
|
||
};
|
||
|
||
exports.json = {
|
||
encode: JSON.stringify,
|
||
decode: JSON.parse,
|
||
buffer: false,
|
||
type: 'json'
|
||
};
|
||
|
||
exports.binary = {
|
||
encode: function (data) {
|
||
return isBinary(data) ? data : Buffer.from(data)
|
||
},
|
||
decode: identity,
|
||
buffer: true,
|
||
type: 'binary'
|
||
};
|
||
|
||
exports.none = {
|
||
encode: identity,
|
||
decode: identity,
|
||
buffer: false,
|
||
type: 'id'
|
||
};
|
||
|
||
exports.id = exports.none;
|
||
|
||
var bufferEncodings = [
|
||
'hex',
|
||
'ascii',
|
||
'base64',
|
||
'ucs2',
|
||
'ucs-2',
|
||
'utf16le',
|
||
'utf-16le'
|
||
];
|
||
|
||
bufferEncodings.forEach(function (type) {
|
||
exports[type] = {
|
||
encode: function (data) {
|
||
return isBinary(data) ? data : Buffer.from(data, type)
|
||
},
|
||
decode: function (buffer) {
|
||
return buffer.toString(type)
|
||
},
|
||
buffer: true,
|
||
type: type
|
||
};
|
||
});
|
||
|
||
function identity (value) {
|
||
return value
|
||
}
|
||
|
||
function isBinary (data) {
|
||
return data === undefined || data === null || Buffer.isBuffer(data)
|
||
}
|
||
});
|
||
|
||
var levelCodec = Codec;
|
||
|
||
function Codec (opts) {
|
||
if (!(this instanceof Codec)) {
|
||
return new Codec(opts)
|
||
}
|
||
this.opts = opts || {};
|
||
this.encodings = encodings;
|
||
}
|
||
|
||
Codec.prototype._encoding = function (encoding) {
|
||
if (typeof encoding === 'string') encoding = encodings[encoding];
|
||
if (!encoding) encoding = encodings.id;
|
||
return encoding
|
||
};
|
||
|
||
Codec.prototype._keyEncoding = function (opts, batchOpts) {
|
||
return this._encoding((batchOpts && batchOpts.keyEncoding) ||
|
||
(opts && opts.keyEncoding) ||
|
||
this.opts.keyEncoding)
|
||
};
|
||
|
||
Codec.prototype._valueEncoding = function (opts, batchOpts) {
|
||
return this._encoding((batchOpts && (batchOpts.valueEncoding || batchOpts.encoding)) ||
|
||
(opts && (opts.valueEncoding || opts.encoding)) ||
|
||
(this.opts.valueEncoding || this.opts.encoding))
|
||
};
|
||
|
||
Codec.prototype.encodeKey = function (key, opts, batchOpts) {
|
||
return this._keyEncoding(opts, batchOpts).encode(key)
|
||
};
|
||
|
||
Codec.prototype.encodeValue = function (value, opts, batchOpts) {
|
||
return this._valueEncoding(opts, batchOpts).encode(value)
|
||
};
|
||
|
||
Codec.prototype.decodeKey = function (key, opts) {
|
||
return this._keyEncoding(opts).decode(key)
|
||
};
|
||
|
||
Codec.prototype.decodeValue = function (value, opts) {
|
||
return this._valueEncoding(opts).decode(value)
|
||
};
|
||
|
||
Codec.prototype.encodeBatch = function (ops, opts) {
|
||
var self = this;
|
||
|
||
return ops.map(function (_op) {
|
||
var op = {
|
||
type: _op.type,
|
||
key: self.encodeKey(_op.key, opts, _op)
|
||
};
|
||
if (self.keyAsBuffer(opts, _op)) op.keyEncoding = 'binary';
|
||
if (_op.prefix) op.prefix = _op.prefix;
|
||
if ('value' in _op) {
|
||
op.value = self.encodeValue(_op.value, opts, _op);
|
||
if (self.valueAsBuffer(opts, _op)) op.valueEncoding = 'binary';
|
||
}
|
||
return op
|
||
})
|
||
};
|
||
|
||
var ltgtKeys = ['lt', 'gt', 'lte', 'gte', 'start', 'end'];
|
||
|
||
Codec.prototype.encodeLtgt = function (ltgt) {
|
||
var self = this;
|
||
var ret = {};
|
||
Object.keys(ltgt).forEach(function (key) {
|
||
ret[key] = ltgtKeys.indexOf(key) > -1
|
||
? self.encodeKey(ltgt[key], ltgt)
|
||
: ltgt[key];
|
||
});
|
||
return ret
|
||
};
|
||
|
||
Codec.prototype.createStreamDecoder = function (opts) {
|
||
var self = this;
|
||
|
||
if (opts.keys && opts.values) {
|
||
return function (key, value) {
|
||
return {
|
||
key: self.decodeKey(key, opts),
|
||
value: self.decodeValue(value, opts)
|
||
}
|
||
}
|
||
} else if (opts.keys) {
|
||
return function (key) {
|
||
return self.decodeKey(key, opts)
|
||
}
|
||
} else if (opts.values) {
|
||
return function (_, value) {
|
||
return self.decodeValue(value, opts)
|
||
}
|
||
} else {
|
||
return function () {}
|
||
}
|
||
};
|
||
|
||
Codec.prototype.keyAsBuffer = function (opts) {
|
||
return this._keyEncoding(opts).buffer
|
||
};
|
||
|
||
Codec.prototype.valueAsBuffer = function (opts) {
|
||
return this._valueEncoding(opts).buffer
|
||
};
|
||
|
||
var AbstractLevelDOWN$1 = abstractLeveldown.AbstractLevelDOWN;
|
||
var AbstractChainedBatch = abstractLeveldown.AbstractChainedBatch;
|
||
var AbstractIterator$1 = abstractLeveldown.AbstractIterator;
|
||
|
||
|
||
var EncodingError = errors.EncodingError;
|
||
var rangeMethods = ['approximateSize', 'compactRange'];
|
||
|
||
var encodingDown = DB.default = DB;
|
||
|
||
function DB (db, opts) {
|
||
if (!(this instanceof DB)) return new DB(db, opts)
|
||
|
||
var manifest = db.supports || {};
|
||
var additionalMethods = manifest.additionalMethods || {};
|
||
|
||
AbstractLevelDOWN$1.call(this, manifest);
|
||
|
||
this.supports.encodings = true;
|
||
this.supports.additionalMethods = {};
|
||
|
||
rangeMethods.forEach(function (m) {
|
||
// TODO (future major): remove this fallback
|
||
var fallback = typeof db[m] === 'function';
|
||
|
||
if (additionalMethods[m] || fallback) {
|
||
this.supports.additionalMethods[m] = true;
|
||
|
||
this[m] = function (start, end, opts, cb) {
|
||
start = this.codec.encodeKey(start, opts);
|
||
end = this.codec.encodeKey(end, opts);
|
||
return this.db[m](start, end, opts, cb)
|
||
};
|
||
}
|
||
}, this);
|
||
|
||
opts = opts || {};
|
||
if (typeof opts.keyEncoding === 'undefined') opts.keyEncoding = 'utf8';
|
||
if (typeof opts.valueEncoding === 'undefined') opts.valueEncoding = 'utf8';
|
||
|
||
this.db = db;
|
||
this.codec = new levelCodec(opts);
|
||
}
|
||
|
||
inherits_browser(DB, AbstractLevelDOWN$1);
|
||
|
||
DB.prototype.type = 'encoding-down';
|
||
|
||
DB.prototype._serializeKey =
|
||
DB.prototype._serializeValue = function (datum) {
|
||
return datum
|
||
};
|
||
|
||
DB.prototype._open = function (opts, cb) {
|
||
this.db.open(opts, cb);
|
||
};
|
||
|
||
DB.prototype._close = function (cb) {
|
||
this.db.close(cb);
|
||
};
|
||
|
||
DB.prototype._put = function (key, value, opts, cb) {
|
||
key = this.codec.encodeKey(key, opts);
|
||
value = this.codec.encodeValue(value, opts);
|
||
this.db.put(key, value, opts, cb);
|
||
};
|
||
|
||
DB.prototype._get = function (key, opts, cb) {
|
||
var self = this;
|
||
key = this.codec.encodeKey(key, opts);
|
||
opts.asBuffer = this.codec.valueAsBuffer(opts);
|
||
this.db.get(key, opts, function (err, value) {
|
||
if (err) return cb(err)
|
||
try {
|
||
value = self.codec.decodeValue(value, opts);
|
||
} catch (err) {
|
||
return cb(new EncodingError(err))
|
||
}
|
||
cb(null, value);
|
||
});
|
||
};
|
||
|
||
DB.prototype._del = function (key, opts, cb) {
|
||
key = this.codec.encodeKey(key, opts);
|
||
this.db.del(key, opts, cb);
|
||
};
|
||
|
||
DB.prototype._chainedBatch = function () {
|
||
return new Batch(this)
|
||
};
|
||
|
||
DB.prototype._batch = function (ops, opts, cb) {
|
||
ops = this.codec.encodeBatch(ops, opts);
|
||
this.db.batch(ops, opts, cb);
|
||
};
|
||
|
||
DB.prototype._iterator = function (opts) {
|
||
opts.keyAsBuffer = this.codec.keyAsBuffer(opts);
|
||
opts.valueAsBuffer = this.codec.valueAsBuffer(opts);
|
||
return new Iterator$1(this, opts)
|
||
};
|
||
|
||
DB.prototype._clear = function (opts, callback) {
|
||
opts = this.codec.encodeLtgt(opts);
|
||
this.db.clear(opts, callback);
|
||
};
|
||
|
||
function Iterator$1 (db, opts) {
|
||
AbstractIterator$1.call(this, db);
|
||
this.codec = db.codec;
|
||
this.keys = opts.keys;
|
||
this.values = opts.values;
|
||
this.opts = this.codec.encodeLtgt(opts);
|
||
this.it = db.db.iterator(this.opts);
|
||
}
|
||
|
||
inherits_browser(Iterator$1, AbstractIterator$1);
|
||
|
||
Iterator$1.prototype._next = function (cb) {
|
||
var self = this;
|
||
this.it.next(function (err, key, value) {
|
||
if (err) return cb(err)
|
||
try {
|
||
if (self.keys && typeof key !== 'undefined') {
|
||
key = self.codec.decodeKey(key, self.opts);
|
||
} else {
|
||
key = undefined;
|
||
}
|
||
|
||
if (self.values && typeof value !== 'undefined') {
|
||
value = self.codec.decodeValue(value, self.opts);
|
||
} else {
|
||
value = undefined;
|
||
}
|
||
} catch (err) {
|
||
return cb(new EncodingError(err))
|
||
}
|
||
cb(null, key, value);
|
||
});
|
||
};
|
||
|
||
Iterator$1.prototype._seek = function (key) {
|
||
key = this.codec.encodeKey(key, this.opts);
|
||
this.it.seek(key);
|
||
};
|
||
|
||
Iterator$1.prototype._end = function (cb) {
|
||
this.it.end(cb);
|
||
};
|
||
|
||
function Batch (db, codec) {
|
||
AbstractChainedBatch.call(this, db);
|
||
this.codec = db.codec;
|
||
this.batch = db.db.batch();
|
||
}
|
||
|
||
inherits_browser(Batch, AbstractChainedBatch);
|
||
|
||
Batch.prototype._put = function (key, value) {
|
||
key = this.codec.encodeKey(key);
|
||
value = this.codec.encodeValue(value);
|
||
this.batch.put(key, value);
|
||
};
|
||
|
||
Batch.prototype._del = function (key) {
|
||
key = this.codec.encodeKey(key);
|
||
this.batch.del(key);
|
||
};
|
||
|
||
Batch.prototype._clear = function () {
|
||
this.batch.clear();
|
||
};
|
||
|
||
Batch.prototype._write = function (opts, cb) {
|
||
this.batch.write(opts, cb);
|
||
};
|
||
|
||
function packager (leveldown) {
|
||
function Level (location, options, callback) {
|
||
if (typeof location === 'function') {
|
||
callback = location;
|
||
} else if (typeof options === 'function') {
|
||
callback = options;
|
||
}
|
||
|
||
if (!isObject(options)) {
|
||
options = isObject(location) ? location : {};
|
||
}
|
||
|
||
return levelup(encodingDown(leveldown(location, options), options), options, callback)
|
||
}
|
||
|
||
function isObject (o) {
|
||
return typeof o === 'object' && o !== null
|
||
}
|
||
|
||
['destroy', 'repair'].forEach(function (m) {
|
||
if (typeof leveldown[m] === 'function') {
|
||
Level[m] = function () {
|
||
leveldown[m].apply(leveldown, arguments);
|
||
};
|
||
}
|
||
});
|
||
|
||
Level.errors = levelup.errors;
|
||
|
||
return Level
|
||
}
|
||
|
||
var levelPackager = packager;
|
||
|
||
var ltgt = createCommonjsModule(function (module, exports) {
|
||
exports.compare = function (a, b) {
|
||
|
||
if(buffer.Buffer.isBuffer(a)) {
|
||
var l = Math.min(a.length, b.length);
|
||
for(var i = 0; i < l; i++) {
|
||
var cmp = a[i] - b[i];
|
||
if(cmp) return cmp
|
||
}
|
||
return a.length - b.length
|
||
}
|
||
|
||
return a < b ? -1 : a > b ? 1 : 0
|
||
};
|
||
|
||
// to be compatible with the current abstract-leveldown tests
|
||
// nullish or empty strings.
|
||
// I could use !!val but I want to permit numbers and booleans,
|
||
// if possible.
|
||
|
||
function isDef (val) {
|
||
return val !== undefined && val !== ''
|
||
}
|
||
|
||
function has (range, name) {
|
||
return Object.hasOwnProperty.call(range, name)
|
||
}
|
||
|
||
function hasKey(range, name) {
|
||
return Object.hasOwnProperty.call(range, name) && name
|
||
}
|
||
|
||
var lowerBoundKey = exports.lowerBoundKey = function (range) {
|
||
return (
|
||
hasKey(range, 'gt')
|
||
|| hasKey(range, 'gte')
|
||
|| hasKey(range, 'min')
|
||
|| (range.reverse ? hasKey(range, 'end') : hasKey(range, 'start'))
|
||
|| undefined
|
||
)
|
||
};
|
||
|
||
var lowerBound = exports.lowerBound = function (range, def) {
|
||
var k = lowerBoundKey(range);
|
||
return k ? range[k] : def
|
||
};
|
||
|
||
var lowerBoundInclusive = exports.lowerBoundInclusive = function (range) {
|
||
return has(range, 'gt') ? false : true
|
||
};
|
||
|
||
var upperBoundInclusive = exports.upperBoundInclusive =
|
||
function (range) {
|
||
return (has(range, 'lt') /*&& !range.maxEx*/) ? false : true
|
||
};
|
||
|
||
var lowerBoundExclusive = exports.lowerBoundExclusive =
|
||
function (range) {
|
||
return !lowerBoundInclusive(range)
|
||
};
|
||
|
||
var upperBoundExclusive = exports.upperBoundExclusive =
|
||
function (range) {
|
||
return !upperBoundInclusive(range)
|
||
};
|
||
|
||
var upperBoundKey = exports.upperBoundKey = function (range) {
|
||
return (
|
||
hasKey(range, 'lt')
|
||
|| hasKey(range, 'lte')
|
||
|| hasKey(range, 'max')
|
||
|| (range.reverse ? hasKey(range, 'start') : hasKey(range, 'end'))
|
||
|| undefined
|
||
)
|
||
};
|
||
|
||
var upperBound = exports.upperBound = function (range, def) {
|
||
var k = upperBoundKey(range);
|
||
return k ? range[k] : def
|
||
};
|
||
|
||
exports.start = function (range, def) {
|
||
return range.reverse ? upperBound(range, def) : lowerBound(range, def)
|
||
};
|
||
exports.end = function (range, def) {
|
||
return range.reverse ? lowerBound(range, def) : upperBound(range, def)
|
||
};
|
||
exports.startInclusive = function (range) {
|
||
return (
|
||
range.reverse
|
||
? upperBoundInclusive(range)
|
||
: lowerBoundInclusive(range)
|
||
)
|
||
};
|
||
exports.endInclusive = function (range) {
|
||
return (
|
||
range.reverse
|
||
? lowerBoundInclusive(range)
|
||
: upperBoundInclusive(range)
|
||
)
|
||
};
|
||
|
||
function id (e) { return e }
|
||
|
||
exports.toLtgt = function (range, _range, map, lower, upper) {
|
||
_range = _range || {};
|
||
map = map || id;
|
||
var defaults = arguments.length > 3;
|
||
var lb = exports.lowerBoundKey(range);
|
||
var ub = exports.upperBoundKey(range);
|
||
if(lb) {
|
||
if(lb === 'gt') _range.gt = map(range.gt, false);
|
||
else _range.gte = map(range[lb], false);
|
||
}
|
||
else if(defaults)
|
||
_range.gte = map(lower, false);
|
||
|
||
if(ub) {
|
||
if(ub === 'lt') _range.lt = map(range.lt, true);
|
||
else _range.lte = map(range[ub], true);
|
||
}
|
||
else if(defaults)
|
||
_range.lte = map(upper, true);
|
||
|
||
if(range.reverse != null)
|
||
_range.reverse = !!range.reverse;
|
||
|
||
//if range was used mutably
|
||
//(in level-sublevel it's part of an options object
|
||
//that has more properties on it.)
|
||
if(has(_range, 'max')) delete _range.max;
|
||
if(has(_range, 'min')) delete _range.min;
|
||
if(has(_range, 'start')) delete _range.start;
|
||
if(has(_range, 'end')) delete _range.end;
|
||
|
||
return _range
|
||
};
|
||
|
||
exports.contains = function (range, key, compare) {
|
||
compare = compare || exports.compare;
|
||
|
||
var lb = lowerBound(range);
|
||
if(isDef(lb)) {
|
||
var cmp = compare(key, lb);
|
||
if(cmp < 0 || (cmp === 0 && lowerBoundExclusive(range)))
|
||
return false
|
||
}
|
||
|
||
var ub = upperBound(range);
|
||
if(isDef(ub)) {
|
||
var cmp = compare(key, ub);
|
||
if(cmp > 0 || (cmp === 0) && upperBoundExclusive(range))
|
||
return false
|
||
}
|
||
|
||
return true
|
||
};
|
||
|
||
exports.filter = function (range, compare) {
|
||
return function (key) {
|
||
return exports.contains(range, key, compare)
|
||
}
|
||
};
|
||
});
|
||
|
||
var NONE = {};
|
||
|
||
var keyRange = function createKeyRange (options) {
|
||
var lower = ltgt.lowerBound(options, NONE);
|
||
var upper = ltgt.upperBound(options, NONE);
|
||
var lowerOpen = ltgt.lowerBoundExclusive(options, NONE);
|
||
var upperOpen = ltgt.upperBoundExclusive(options, NONE);
|
||
|
||
if (lower !== NONE && upper !== NONE) {
|
||
return IDBKeyRange.bound(lower, upper, lowerOpen, upperOpen)
|
||
} else if (lower !== NONE) {
|
||
return IDBKeyRange.lowerBound(lower, lowerOpen)
|
||
} else if (upper !== NONE) {
|
||
return IDBKeyRange.upperBound(upper, upperOpen)
|
||
} else {
|
||
return null
|
||
}
|
||
};
|
||
|
||
var Buffer$1 = buffer.Buffer;
|
||
var ta2str = (function () {
|
||
if (commonjsGlobal.TextDecoder) {
|
||
var decoder = new TextDecoder('utf-8');
|
||
return decoder.decode.bind(decoder)
|
||
} else {
|
||
return function ta2str (ta) {
|
||
return ta2buf(ta).toString()
|
||
}
|
||
}
|
||
})();
|
||
|
||
var ab2str = (function () {
|
||
if (commonjsGlobal.TextDecoder) {
|
||
var decoder = new TextDecoder('utf-8');
|
||
return decoder.decode.bind(decoder)
|
||
} else {
|
||
return function ab2str (ab) {
|
||
return Buffer$1.from(ab).toString()
|
||
}
|
||
}
|
||
})();
|
||
|
||
function ta2buf (ta) {
|
||
var buf = Buffer$1.from(ta.buffer);
|
||
|
||
if (ta.byteLength === ta.buffer.byteLength) {
|
||
return buf
|
||
} else {
|
||
return buf.slice(ta.byteOffset, ta.byteOffset + ta.byteLength)
|
||
}
|
||
}
|
||
|
||
var deserialize = function (data, asBuffer) {
|
||
if (data instanceof Uint8Array) {
|
||
return asBuffer ? ta2buf(data) : ta2str(data)
|
||
} else if (data instanceof ArrayBuffer) {
|
||
return asBuffer ? Buffer$1.from(data) : ab2str(data)
|
||
} else {
|
||
return asBuffer ? Buffer$1.from(String(data)) : String(data)
|
||
}
|
||
};
|
||
|
||
var AbstractIterator = abstractLeveldown.AbstractIterator;
|
||
|
||
|
||
var noop = function () {};
|
||
|
||
var iterator = Iterator;
|
||
|
||
function Iterator (db, location, options) {
|
||
AbstractIterator.call(this, db);
|
||
|
||
this._limit = options.limit;
|
||
this._count = 0;
|
||
this._callback = null;
|
||
this._cache = [];
|
||
this._completed = false;
|
||
this._aborted = false;
|
||
this._error = null;
|
||
this._transaction = null;
|
||
|
||
this._keys = options.keys;
|
||
this._values = options.values;
|
||
this._keyAsBuffer = options.keyAsBuffer;
|
||
this._valueAsBuffer = options.valueAsBuffer;
|
||
|
||
if (this._limit === 0) {
|
||
this._completed = true;
|
||
return
|
||
}
|
||
|
||
try {
|
||
var keyRange$1 = keyRange(options);
|
||
} catch (e) {
|
||
// The lower key is greater than the upper key.
|
||
// IndexedDB throws an error, but we'll just return 0 results.
|
||
this._completed = true;
|
||
return
|
||
}
|
||
|
||
this.createIterator(location, keyRange$1, options.reverse);
|
||
}
|
||
|
||
inherits_browser(Iterator, AbstractIterator);
|
||
|
||
Iterator.prototype.createIterator = function (location, keyRange, reverse) {
|
||
var self = this;
|
||
var transaction = this.db.db.transaction([location], 'readonly');
|
||
var store = transaction.objectStore(location);
|
||
var req = store.openCursor(keyRange, reverse ? 'prev' : 'next');
|
||
|
||
req.onsuccess = function (ev) {
|
||
var cursor = ev.target.result;
|
||
if (cursor) self.onItem(cursor);
|
||
};
|
||
|
||
this._transaction = transaction;
|
||
|
||
// If an error occurs (on the request), the transaction will abort.
|
||
transaction.onabort = function () {
|
||
self.onAbort(self._transaction.error || new Error('aborted by user'));
|
||
};
|
||
|
||
transaction.oncomplete = function () {
|
||
self.onComplete();
|
||
};
|
||
};
|
||
|
||
Iterator.prototype.onItem = function (cursor) {
|
||
this._cache.push(cursor.key, cursor.value);
|
||
|
||
if (this._limit <= 0 || ++this._count < this._limit) {
|
||
cursor.continue();
|
||
}
|
||
|
||
this.maybeNext();
|
||
};
|
||
|
||
Iterator.prototype.onAbort = function (err) {
|
||
this._aborted = true;
|
||
this._error = err;
|
||
this.maybeNext();
|
||
};
|
||
|
||
Iterator.prototype.onComplete = function () {
|
||
this._completed = true;
|
||
this.maybeNext();
|
||
};
|
||
|
||
Iterator.prototype.maybeNext = function () {
|
||
if (this._callback) {
|
||
this._next(this._callback);
|
||
this._callback = null;
|
||
}
|
||
};
|
||
|
||
Iterator.prototype._next = function (callback) {
|
||
if (this._aborted) {
|
||
// The error should be picked up by either next() or end().
|
||
var err = this._error;
|
||
this._error = null;
|
||
this._nextTick(callback, err);
|
||
} else if (this._cache.length > 0) {
|
||
var key = this._cache.shift();
|
||
var value = this._cache.shift();
|
||
|
||
if (this._keys && key !== undefined) {
|
||
key = this._deserializeKey(key, this._keyAsBuffer);
|
||
} else {
|
||
key = undefined;
|
||
}
|
||
|
||
if (this._values && value !== undefined) {
|
||
value = this._deserializeValue(value, this._valueAsBuffer);
|
||
} else {
|
||
value = undefined;
|
||
}
|
||
|
||
this._nextTick(callback, null, key, value);
|
||
} else if (this._completed) {
|
||
this._nextTick(callback);
|
||
} else {
|
||
this._callback = callback;
|
||
}
|
||
};
|
||
|
||
// Exposed for the v4 to v5 upgrade utility
|
||
Iterator.prototype._deserializeKey = deserialize;
|
||
Iterator.prototype._deserializeValue = deserialize;
|
||
|
||
Iterator.prototype._end = function (callback) {
|
||
if (this._aborted || this._completed) {
|
||
return this._nextTick(callback, this._error)
|
||
}
|
||
|
||
// Don't advance the cursor anymore, and the transaction will complete
|
||
// on its own in the next tick. This approach is much cleaner than calling
|
||
// transaction.abort() with its unpredictable event order.
|
||
this.onItem = noop;
|
||
this.onAbort = callback;
|
||
this.onComplete = callback;
|
||
};
|
||
|
||
var Buffer = buffer.Buffer;
|
||
// Returns either a Uint8Array or Buffer (doesn't matter to
|
||
// IndexedDB, because Buffer is a subclass of Uint8Array)
|
||
var str2bin = (function () {
|
||
if (commonjsGlobal.TextEncoder) {
|
||
var encoder = new TextEncoder('utf-8');
|
||
return encoder.encode.bind(encoder)
|
||
} else {
|
||
return Buffer.from
|
||
}
|
||
})();
|
||
|
||
var serialize = function (data, asBuffer) {
|
||
if (asBuffer) {
|
||
return Buffer.isBuffer(data) ? data : str2bin(String(data))
|
||
} else {
|
||
return String(data)
|
||
}
|
||
};
|
||
|
||
var support = createCommonjsModule(function (module, exports) {
|
||
|
||
var Buffer = buffer.Buffer;
|
||
|
||
exports.test = function (key) {
|
||
return function test (impl) {
|
||
try {
|
||
impl.cmp(key, 0);
|
||
return true
|
||
} catch (err) {
|
||
return false
|
||
}
|
||
}
|
||
};
|
||
|
||
// Detect binary key support (IndexedDB Second Edition)
|
||
exports.bufferKeys = exports.test(Buffer.alloc(0));
|
||
});
|
||
|
||
var clear = function clear (db, location, keyRange, options, callback) {
|
||
if (options.limit === 0) return db._nextTick(callback)
|
||
|
||
var transaction = db.db.transaction([location], 'readwrite');
|
||
var store = transaction.objectStore(location);
|
||
var count = 0;
|
||
|
||
transaction.oncomplete = function () {
|
||
callback();
|
||
};
|
||
|
||
transaction.onabort = function () {
|
||
callback(transaction.error || new Error('aborted by user'));
|
||
};
|
||
|
||
// A key cursor is faster (skips reading values) but not supported by IE
|
||
var method = store.openKeyCursor ? 'openKeyCursor' : 'openCursor';
|
||
var direction = options.reverse ? 'prev' : 'next';
|
||
|
||
store[method](keyRange, direction).onsuccess = function (ev) {
|
||
var cursor = ev.target.result;
|
||
|
||
if (cursor) {
|
||
// Wait for a request to complete before continuing, saving CPU.
|
||
store.delete(cursor.key).onsuccess = function () {
|
||
if (options.limit <= 0 || ++count < options.limit) {
|
||
cursor.continue();
|
||
}
|
||
};
|
||
}
|
||
};
|
||
};
|
||
|
||
var levelJs = Level;
|
||
|
||
var AbstractLevelDOWN = abstractLeveldown.AbstractLevelDOWN;
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var DEFAULT_PREFIX = 'level-js-';
|
||
|
||
function Level (location, opts) {
|
||
if (!(this instanceof Level)) return new Level(location, opts)
|
||
|
||
AbstractLevelDOWN.call(this, {
|
||
bufferKeys: support.bufferKeys(indexedDB),
|
||
snapshots: true,
|
||
permanence: true,
|
||
clear: true
|
||
});
|
||
|
||
opts = opts || {};
|
||
|
||
if (typeof location !== 'string') {
|
||
throw new Error('constructor requires a location string argument')
|
||
}
|
||
|
||
this.location = location;
|
||
this.prefix = opts.prefix == null ? DEFAULT_PREFIX : opts.prefix;
|
||
this.version = parseInt(opts.version || 1, 10);
|
||
}
|
||
|
||
inherits_browser(Level, AbstractLevelDOWN);
|
||
|
||
Level.prototype.type = 'level-js';
|
||
|
||
Level.prototype._open = function (options, callback) {
|
||
var req = indexedDB.open(this.prefix + this.location, this.version);
|
||
var self = this;
|
||
|
||
req.onerror = function () {
|
||
callback(req.error || new Error('unknown error'));
|
||
};
|
||
|
||
req.onsuccess = function () {
|
||
self.db = req.result;
|
||
callback();
|
||
};
|
||
|
||
req.onupgradeneeded = function (ev) {
|
||
var db = ev.target.result;
|
||
|
||
if (!db.objectStoreNames.contains(self.location)) {
|
||
db.createObjectStore(self.location);
|
||
}
|
||
};
|
||
};
|
||
|
||
Level.prototype.store = function (mode) {
|
||
var transaction = this.db.transaction([this.location], mode);
|
||
return transaction.objectStore(this.location)
|
||
};
|
||
|
||
Level.prototype.await = function (request, callback) {
|
||
var transaction = request.transaction;
|
||
|
||
// Take advantage of the fact that a non-canceled request error aborts
|
||
// the transaction. I.e. no need to listen for "request.onerror".
|
||
transaction.onabort = function () {
|
||
callback(transaction.error || new Error('aborted by user'));
|
||
};
|
||
|
||
transaction.oncomplete = function () {
|
||
callback(null, request.result);
|
||
};
|
||
};
|
||
|
||
Level.prototype._get = function (key, options, callback) {
|
||
var store = this.store('readonly');
|
||
|
||
try {
|
||
var req = store.get(key);
|
||
} catch (err) {
|
||
return this._nextTick(callback, err)
|
||
}
|
||
|
||
this.await(req, function (err, value) {
|
||
if (err) return callback(err)
|
||
|
||
if (value === undefined) {
|
||
// 'NotFound' error, consistent with LevelDOWN API
|
||
return callback(new Error('NotFound'))
|
||
}
|
||
|
||
callback(null, deserialize(value, options.asBuffer));
|
||
});
|
||
};
|
||
|
||
Level.prototype._del = function (key, options, callback) {
|
||
var store = this.store('readwrite');
|
||
|
||
try {
|
||
var req = store.delete(key);
|
||
} catch (err) {
|
||
return this._nextTick(callback, err)
|
||
}
|
||
|
||
this.await(req, callback);
|
||
};
|
||
|
||
Level.prototype._put = function (key, value, options, callback) {
|
||
var store = this.store('readwrite');
|
||
|
||
try {
|
||
// Will throw a DataError or DataCloneError if the environment
|
||
// does not support serializing the key or value respectively.
|
||
var req = store.put(value, key);
|
||
} catch (err) {
|
||
return this._nextTick(callback, err)
|
||
}
|
||
|
||
this.await(req, callback);
|
||
};
|
||
|
||
Level.prototype._serializeKey = function (key) {
|
||
return serialize(key, this.supports.bufferKeys)
|
||
};
|
||
|
||
Level.prototype._serializeValue = function (value) {
|
||
return serialize(value, true)
|
||
};
|
||
|
||
Level.prototype._iterator = function (options) {
|
||
return new iterator(this, this.location, options)
|
||
};
|
||
|
||
Level.prototype._batch = function (operations, options, callback) {
|
||
if (operations.length === 0) return this._nextTick(callback)
|
||
|
||
var store = this.store('readwrite');
|
||
var transaction = store.transaction;
|
||
var index = 0;
|
||
var error;
|
||
|
||
transaction.onabort = function () {
|
||
callback(error || transaction.error || new Error('aborted by user'));
|
||
};
|
||
|
||
transaction.oncomplete = function () {
|
||
callback();
|
||
};
|
||
|
||
// Wait for a request to complete before making the next, saving CPU.
|
||
function loop () {
|
||
var op = operations[index++];
|
||
var key = op.key;
|
||
|
||
try {
|
||
var req = op.type === 'del' ? store.delete(key) : store.put(op.value, key);
|
||
} catch (err) {
|
||
error = err;
|
||
transaction.abort();
|
||
return
|
||
}
|
||
|
||
if (index < operations.length) {
|
||
req.onsuccess = loop;
|
||
}
|
||
}
|
||
|
||
loop();
|
||
};
|
||
|
||
Level.prototype._clear = function (options, callback) {
|
||
try {
|
||
var keyRange$1 = keyRange(options);
|
||
} catch (e) {
|
||
// The lower key is greater than the upper key.
|
||
// IndexedDB throws an error, but we'll just do nothing.
|
||
return this._nextTick(callback)
|
||
}
|
||
|
||
if (options.limit >= 0) {
|
||
// IDBObjectStore#delete(range) doesn't have such an option.
|
||
// Fall back to cursor-based implementation.
|
||
return clear(this, this.location, keyRange$1, options, callback)
|
||
}
|
||
|
||
try {
|
||
var store = this.store('readwrite');
|
||
var req = keyRange$1 ? store.delete(keyRange$1) : store.clear();
|
||
} catch (err) {
|
||
return this._nextTick(callback, err)
|
||
}
|
||
|
||
this.await(req, callback);
|
||
};
|
||
|
||
Level.prototype._close = function (callback) {
|
||
this.db.close();
|
||
this._nextTick(callback);
|
||
};
|
||
|
||
// NOTE: remove in a next major release
|
||
Level.prototype.upgrade = function (callback) {
|
||
if (this.status !== 'open') {
|
||
return this._nextTick(callback, new Error('cannot upgrade() before open()'))
|
||
}
|
||
|
||
var it = this.iterator();
|
||
var batchOptions = {};
|
||
var self = this;
|
||
|
||
it._deserializeKey = it._deserializeValue = identity;
|
||
next();
|
||
|
||
function next (err) {
|
||
if (err) return finish(err)
|
||
it.next(each);
|
||
}
|
||
|
||
function each (err, key, value) {
|
||
if (err || key === undefined) {
|
||
return finish(err)
|
||
}
|
||
|
||
var newKey = self._serializeKey(deserialize(key, true));
|
||
var newValue = self._serializeValue(deserialize(value, true));
|
||
|
||
// To bypass serialization on the old key, use _batch() instead of batch().
|
||
// NOTE: if we disable snapshotting (#86) this could lead to a loop of
|
||
// inserting and then iterating those same entries, because the new keys
|
||
// possibly sort after the old keys.
|
||
self._batch([
|
||
{ type: 'del', key: key },
|
||
{ type: 'put', key: newKey, value: newValue }
|
||
], batchOptions, next);
|
||
}
|
||
|
||
function finish (err) {
|
||
it.end(function (err2) {
|
||
callback(err || err2);
|
||
});
|
||
}
|
||
|
||
function identity (data) {
|
||
return data
|
||
}
|
||
};
|
||
|
||
Level.destroy = function (location, prefix, callback) {
|
||
if (typeof prefix === 'function') {
|
||
callback = prefix;
|
||
prefix = DEFAULT_PREFIX;
|
||
}
|
||
var request = indexedDB.deleteDatabase(prefix + location);
|
||
request.onsuccess = function () {
|
||
callback();
|
||
};
|
||
request.onerror = function (err) {
|
||
callback(err);
|
||
};
|
||
};
|
||
|
||
var browser = levelPackager(levelJs);
|
||
|
||
const PREFERRED_TRIM_SIZE = 500;
|
||
|
||
const YEncodingString = 0;
|
||
const YEncodingUint32 = 1;
|
||
|
||
const valueEncoding = {
|
||
buffer: true,
|
||
type: 'y-value',
|
||
encode: /** @param {any} data */ data => data,
|
||
decode: /** @param {any} data */ data => data
|
||
};
|
||
|
||
/**
|
||
* Write two bytes as an unsigned integer in big endian order.
|
||
* (most significant byte first)
|
||
*
|
||
* @function
|
||
* @param {encoding.Encoder} encoder
|
||
* @param {number} num The number that is to be encoded.
|
||
*/
|
||
const writeUint32BigEndian = (encoder, num) => {
|
||
for (let i = 3; i >= 0; i--) {
|
||
write(encoder, (num >>> (8 * i)) & BITS8);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Read 4 bytes as unsigned integer in big endian order.
|
||
* (most significant byte first)
|
||
*
|
||
* @todo use lib0/decoding instead
|
||
*
|
||
* @function
|
||
* @param {decoding.Decoder} decoder
|
||
* @return {number} An unsigned integer.
|
||
*/
|
||
const readUint32BigEndian = decoder => {
|
||
const uint =
|
||
(decoder.arr[decoder.pos + 3] +
|
||
(decoder.arr[decoder.pos + 2] << 8) +
|
||
(decoder.arr[decoder.pos + 1] << 16) +
|
||
(decoder.arr[decoder.pos] << 24)) >>> 0;
|
||
decoder.pos += 4;
|
||
return uint
|
||
};
|
||
|
||
const keyEncoding = {
|
||
buffer: true,
|
||
type: 'y-keys',
|
||
/* istanbul ignore next */
|
||
encode: /** @param {Array<string|number>} arr */ arr => {
|
||
const encoder = createEncoder();
|
||
for (let i = 0; i < arr.length; i++) {
|
||
const v = arr[i];
|
||
if (typeof v === 'string') {
|
||
writeUint8(encoder, YEncodingString);
|
||
writeVarString(encoder, v);
|
||
} else /* istanbul ignore else */ if (typeof v === 'number') {
|
||
writeUint8(encoder, YEncodingUint32);
|
||
writeUint32BigEndian(encoder, v);
|
||
} else {
|
||
throw new Error('Unexpected key value')
|
||
}
|
||
}
|
||
return buffer.Buffer.from(toUint8Array(encoder))
|
||
},
|
||
decode: /** @param {Uint8Array} buf */ buf => {
|
||
const decoder = createDecoder(buf);
|
||
const key = [];
|
||
while (hasContent(decoder)) {
|
||
switch (readUint8(decoder)) {
|
||
case YEncodingString:
|
||
key.push(readVarString(decoder));
|
||
break
|
||
case YEncodingUint32:
|
||
key.push(readUint32BigEndian(decoder));
|
||
break
|
||
}
|
||
}
|
||
return key
|
||
}
|
||
};
|
||
|
||
/**
|
||
* level returns an error if a value is not found.
|
||
*
|
||
* This helper method for level returns `null` instead if the key is not found.
|
||
*
|
||
* @param {any} db
|
||
* @param {any} key
|
||
*/
|
||
const levelGet = async (db, key) => {
|
||
let res;
|
||
try {
|
||
res = await db.get(key);
|
||
} catch (err) {
|
||
/* istanbul ignore else */
|
||
if (err.notFound) {
|
||
return null
|
||
} else {
|
||
throw err
|
||
}
|
||
}
|
||
return res
|
||
};
|
||
|
||
/**
|
||
* Level expects a Buffer, but in Yjs we typically work with Uint8Arrays.
|
||
*
|
||
* Since Level thinks that these are two entirely different things,
|
||
* we transform the Uint8array to a Buffer before storing it.
|
||
*
|
||
* @param {any} db
|
||
* @param {any} key
|
||
* @param {Uint8Array} val
|
||
*/
|
||
const levelPut = async (db, key, val) => db.put(key, buffer.Buffer.from(val));
|
||
|
||
/**
|
||
* A "bulkier" implementation of level streams. Returns the result in one flush.
|
||
*
|
||
* @param {any} db
|
||
* @param {object} opts
|
||
* @return {Promise<Array<any>>}
|
||
*/
|
||
const getLevelBulkData = (db, opts) => create$3((resolve, reject) => {
|
||
/**
|
||
* @type {Array<any>} result
|
||
*/
|
||
const result = [];
|
||
db.createReadStream(
|
||
opts
|
||
).on('data', /** @param {any} data */ data =>
|
||
result.push(data)
|
||
).on('end', () =>
|
||
resolve(result)
|
||
).on('error', reject);
|
||
});
|
||
|
||
/**
|
||
* Get all document updates for a specific document.
|
||
*
|
||
* @param {any} db
|
||
* @param {string} docName
|
||
* @param {any} [opts]
|
||
* @return {Promise<Array<Buffer>>}
|
||
*/
|
||
const getLevelUpdates = (db, docName, opts = { values: true, keys: false }) => getLevelBulkData(db, {
|
||
gte: createDocumentUpdateKey(docName, 0),
|
||
lt: createDocumentUpdateKey(docName, BITS32),
|
||
...opts
|
||
});
|
||
|
||
/**
|
||
* Get all document updates for a specific document.
|
||
*
|
||
* @param {any} db
|
||
* @param {boolean} values
|
||
* @param {boolean} keys
|
||
* @return {Promise<Array<any>>}
|
||
*/
|
||
const getAllDocs = (db, values, keys) => getLevelBulkData(db, {
|
||
gte: ['v1_sv'],
|
||
lt: ['v1_sw'],
|
||
keys,
|
||
values
|
||
});
|
||
|
||
/**
|
||
* @param {any} db
|
||
* @param {string} docName
|
||
* @return {Promise<number>} Returns -1 if this document doesn't exist yet
|
||
*/
|
||
const getCurrentUpdateClock = (db, docName) => getLevelUpdates(db, docName, { keys: true, values: false, reverse: true, limit: 1 }).then(keys => {
|
||
if (keys.length === 0) {
|
||
return -1
|
||
} else {
|
||
return keys[0][3]
|
||
}
|
||
});
|
||
|
||
/**
|
||
* @param {any} db
|
||
* @param {Array<string|number>} gte Greater than or equal
|
||
* @param {Array<string|number>} lt lower than (not equal)
|
||
* @return {Promise<void>}
|
||
*/
|
||
const clearRange = async (db, gte, lt) => {
|
||
/* istanbul ignore else */
|
||
if (db.supports.clear) {
|
||
await db.clear({ gte, lt });
|
||
} else {
|
||
const keys = await getLevelBulkData(db, { values: false, keys: true, gte, lt });
|
||
const ops = keys.map(key => ({ type: 'del', key }));
|
||
await db.batch(ops);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @param {any} db
|
||
* @param {string} docName
|
||
* @param {number} from Greater than or equal
|
||
* @param {number} to lower than (not equal)
|
||
* @return {Promise<void>}
|
||
*/
|
||
const clearUpdatesRange = async (db, docName, from, to) => clearRange(db, createDocumentUpdateKey(docName, from), createDocumentUpdateKey(docName, to));
|
||
|
||
/**
|
||
* Create a unique key for a update message.
|
||
* We encode the result using `keyEncoding` which expects an array.
|
||
*
|
||
* @param {string} docName
|
||
* @param {number} clock must be unique
|
||
* @return {Array<string|number>}
|
||
*/
|
||
const createDocumentUpdateKey = (docName, clock) => ['v1', docName, 'update', clock];
|
||
|
||
/**
|
||
* @param {string} docName
|
||
* @param {string} metaKey
|
||
*/
|
||
const createDocumentMetaKey = (docName, metaKey) => ['v1', docName, 'meta', metaKey];
|
||
|
||
/**
|
||
* @param {string} docName
|
||
*/
|
||
const createDocumentMetaEndKey = (docName) => ['v1', docName, 'metb']; // simple trick
|
||
|
||
/**
|
||
* We have a separate state vector key so we can iterate efficiently over all documents
|
||
* @param {string} docName
|
||
*/
|
||
const createDocumentStateVectorKey = (docName) => ['v1_sv', docName];
|
||
|
||
/**
|
||
* @param {string} docName
|
||
*/
|
||
const createDocumentFirstKey = (docName) => ['v1', docName];
|
||
|
||
/**
|
||
* We use this key as the upper limit of all keys that can be written.
|
||
* Make sure that all document keys are smaller! Strings are encoded using varLength string encoding,
|
||
* so we need to make sure that this key has the biggest size!
|
||
*
|
||
* @param {string} docName
|
||
*/
|
||
const createDocumentLastKey = (docName) => ['v1', docName, 'zzzzzzz'];
|
||
|
||
// const emptyStateVector = (() => Y.encodeStateVector(new Y.Doc()))()
|
||
|
||
/**
|
||
* For now this is a helper method that creates a Y.Doc and then re-encodes a document update.
|
||
* In the future this will be handled by Yjs without creating a Y.Doc (constant memory consumption).
|
||
*
|
||
* @param {Array<Uint8Array>} updates
|
||
* @return {{update:Uint8Array, sv: Uint8Array}}
|
||
*/
|
||
const mergeUpdates = (updates) => {
|
||
const ydoc = new Doc();
|
||
ydoc.transact(() => {
|
||
for (let i = 0; i < updates.length; i++) {
|
||
applyUpdate(ydoc, updates[i]);
|
||
}
|
||
});
|
||
return { update: encodeStateAsUpdate(ydoc), sv: encodeStateVector(ydoc) }
|
||
};
|
||
|
||
/**
|
||
* @param {any} db
|
||
* @param {string} docName
|
||
* @param {Uint8Array} sv state vector
|
||
* @param {number} clock current clock of the document so we can determine when this statevector was created
|
||
*/
|
||
const writeStateVector = async (db, docName, sv, clock) => {
|
||
const encoder = createEncoder();
|
||
writeVarUint(encoder, clock);
|
||
writeVarUint8Array(encoder, sv);
|
||
await levelPut(db, createDocumentStateVectorKey(docName), toUint8Array(encoder));
|
||
};
|
||
|
||
/**
|
||
* @param {Uint8Array} buf
|
||
* @return {{ sv: Uint8Array, clock: number }}
|
||
*/
|
||
const decodeLeveldbStateVector = buf => {
|
||
const decoder = createDecoder(buf);
|
||
const clock = readVarUint(decoder);
|
||
const sv = readVarUint8Array(decoder);
|
||
return { sv, clock }
|
||
};
|
||
|
||
/**
|
||
* @param {any} db
|
||
* @param {string} docName
|
||
*/
|
||
const readStateVector$1 = async (db, docName) => {
|
||
const buf = await levelGet(db, createDocumentStateVectorKey(docName));
|
||
if (buf === null) {
|
||
// no state vector created yet or no document exists
|
||
return { sv: null, clock: -1 }
|
||
}
|
||
return decodeLeveldbStateVector(buf)
|
||
};
|
||
|
||
/**
|
||
* @param {any} db
|
||
* @param {string} docName
|
||
* @param {Uint8Array} stateAsUpdate
|
||
* @param {Uint8Array} stateVector
|
||
* @return {Promise<number>} returns the clock of the flushed doc
|
||
*/
|
||
const flushDocument = async (db, docName, stateAsUpdate, stateVector) => {
|
||
const clock = await storeUpdate(db, docName, stateAsUpdate);
|
||
await writeStateVector(db, docName, stateVector, clock);
|
||
await clearUpdatesRange(db, docName, 0, clock); // intentionally not waiting for the promise to resolve!
|
||
return clock
|
||
};
|
||
|
||
/**
|
||
* @param {any} db
|
||
* @param {string} docName
|
||
* @param {Uint8Array} update
|
||
* @return {Promise<number>} Returns the clock of the stored update
|
||
*/
|
||
const storeUpdate = async (db, docName, update) => {
|
||
const clock = await getCurrentUpdateClock(db, docName);
|
||
if (clock === -1) {
|
||
// make sure that a state vector is aways written, so we can search for available documents
|
||
const ydoc = new Doc();
|
||
applyUpdate(ydoc, update);
|
||
const sv = encodeStateVector(ydoc);
|
||
await writeStateVector(db, docName, sv, 0);
|
||
}
|
||
await levelPut(db, createDocumentUpdateKey(docName, clock + 1), update);
|
||
return clock + 1
|
||
};
|
||
|
||
class LeveldbPersistence {
|
||
/**
|
||
* @param {string} location
|
||
* @param {object} [opts]
|
||
* @param {any} [opts.level] Level-compatible adapter. E.g. leveldown, level-rem, level-indexeddb. Defaults to `level`
|
||
* @param {object} [opts.levelOptions] Options that are passed down to the level instance
|
||
*/
|
||
constructor (location, /* istanbul ignore next */ { level = browser, levelOptions = {} } = {}) {
|
||
const db = level(location, { ...levelOptions, valueEncoding, keyEncoding });
|
||
this.tr = resolve();
|
||
/**
|
||
* Execute an transaction on a database. This will ensure that other processes are currently not writing.
|
||
*
|
||
* This is a private method and might change in the future.
|
||
*
|
||
* @todo only transact on the same room-name. Allow for concurrency of different rooms.
|
||
*
|
||
* @template T
|
||
*
|
||
* @param {function(any):Promise<T>} f A transaction that receives the db object
|
||
* @return {Promise<T>}
|
||
*/
|
||
this._transact = f => {
|
||
const currTr = this.tr;
|
||
this.tr = (async () => {
|
||
await currTr;
|
||
let res = /** @type {any} */ (null);
|
||
try {
|
||
res = await f(db);
|
||
} catch (err) {
|
||
/* istanbul ignore next */
|
||
console.warn('Error during y-leveldb transaction', err);
|
||
}
|
||
return res
|
||
})();
|
||
return this.tr
|
||
};
|
||
}
|
||
|
||
/**
|
||
* @param {string} docName
|
||
*/
|
||
flushDocument (docName) {
|
||
return this._transact(async db => {
|
||
const updates = await getLevelUpdates(db, docName);
|
||
const { update, sv } = mergeUpdates(updates);
|
||
await flushDocument(db, docName, update, sv);
|
||
})
|
||
}
|
||
|
||
/**
|
||
* @param {string} docName
|
||
* @return {Promise<Y.Doc>}
|
||
*/
|
||
getYDoc (docName) {
|
||
return this._transact(async db => {
|
||
const updates = await getLevelUpdates(db, docName);
|
||
const ydoc = new Doc();
|
||
ydoc.transact(() => {
|
||
for (let i = 0; i < updates.length; i++) {
|
||
applyUpdate(ydoc, updates[i]);
|
||
}
|
||
});
|
||
if (updates.length > PREFERRED_TRIM_SIZE) {
|
||
await flushDocument(db, docName, encodeStateAsUpdate(ydoc), encodeStateVector(ydoc));
|
||
}
|
||
return ydoc
|
||
})
|
||
}
|
||
|
||
/**
|
||
* @param {string} docName
|
||
* @return {Promise<Uint8Array>}
|
||
*/
|
||
getStateVector (docName) {
|
||
return this._transact(async db => {
|
||
const { clock, sv } = await readStateVector$1(db, docName);
|
||
let curClock = -1;
|
||
if (sv !== null) {
|
||
curClock = await getCurrentUpdateClock(db, docName);
|
||
}
|
||
if (sv !== null && clock === curClock) {
|
||
return sv
|
||
} else {
|
||
// current state vector is outdated
|
||
const updates = await getLevelUpdates(db, docName);
|
||
const { update, sv } = mergeUpdates(updates);
|
||
await flushDocument(db, docName, update, sv);
|
||
return sv
|
||
}
|
||
})
|
||
}
|
||
|
||
/**
|
||
* @param {string} docName
|
||
* @param {Uint8Array} update
|
||
* @return {Promise<number>} Returns the clock of the stored update
|
||
*/
|
||
storeUpdate (docName, update) {
|
||
return this._transact(db => storeUpdate(db, docName, update))
|
||
}
|
||
|
||
/**
|
||
* @param {string} docName
|
||
* @param {Uint8Array} stateVector
|
||
*/
|
||
async getDiff (docName, stateVector) {
|
||
const ydoc = await this.getYDoc(docName);
|
||
return encodeStateAsUpdate(ydoc, stateVector)
|
||
}
|
||
|
||
/**
|
||
* @param {string} docName
|
||
* @return {Promise<void>}
|
||
*/
|
||
clearDocument (docName) {
|
||
return this._transact(async db => {
|
||
await db.del(createDocumentStateVectorKey(docName));
|
||
await clearRange(db, createDocumentFirstKey(docName), createDocumentLastKey(docName));
|
||
})
|
||
}
|
||
|
||
/**
|
||
* @param {string} docName
|
||
* @param {string} metaKey
|
||
* @param {any} value
|
||
* @return {Promise<void>}
|
||
*/
|
||
setMeta (docName, metaKey, value) {
|
||
return this._transact(db => levelPut(db, createDocumentMetaKey(docName, metaKey), encodeAny(value)))
|
||
}
|
||
|
||
/**
|
||
* @param {string} docName
|
||
* @param {string} metaKey
|
||
* @return {Promise<any>}
|
||
*/
|
||
delMeta (docName, metaKey) {
|
||
return this._transact(db => db.del(createDocumentMetaKey(docName, metaKey)))
|
||
}
|
||
|
||
/**
|
||
* @param {string} docName
|
||
* @param {string} metaKey
|
||
* @return {Promise<any>}
|
||
*/
|
||
getMeta (docName, metaKey) {
|
||
return this._transact(async db => {
|
||
const res = await levelGet(db, createDocumentMetaKey(docName, metaKey));
|
||
if (res === null) {
|
||
return// return void
|
||
}
|
||
return decodeAny(res)
|
||
})
|
||
}
|
||
|
||
/**
|
||
* @return {Promise<Array<string>>}
|
||
*/
|
||
getAllDocNames () {
|
||
return this._transact(async db => {
|
||
const docKeys = await getAllDocs(db, false, true);
|
||
return docKeys.map(key => key[1])
|
||
})
|
||
}
|
||
|
||
/**
|
||
* @return {Promise<Array<{ name: string, sv: Uint8Array, clock: number }>>}
|
||
*/
|
||
getAllDocStateVectors () {
|
||
return this._transact(async db => {
|
||
const docs = /** @type {any} */ (await getAllDocs(db, true, true));
|
||
return docs.map(doc => {
|
||
const { sv, clock } = decodeLeveldbStateVector(doc.value);
|
||
return { name: doc.key[1], sv, clock }
|
||
})
|
||
})
|
||
}
|
||
|
||
/**
|
||
* @param {string} docName
|
||
* @return {Promise<Map<string, any>>}
|
||
*/
|
||
getMetas (docName) {
|
||
return this._transact(async db => {
|
||
const data = await getLevelBulkData(db, {
|
||
gte: createDocumentMetaKey(docName, ''),
|
||
lt: createDocumentMetaEndKey(docName),
|
||
keys: true,
|
||
values: true
|
||
});
|
||
const metas = new Map();
|
||
data.forEach(v => { metas.set(v.key[3], decodeAny(v.value)); });
|
||
return metas
|
||
})
|
||
}
|
||
|
||
/**
|
||
* Close connection to a leveldb database and discard all state and bindings
|
||
*
|
||
* @return {Promise<void>}
|
||
*/
|
||
destroy () {
|
||
return this._transact(db => db.close())
|
||
}
|
||
|
||
/**
|
||
* Delete all data in database.
|
||
*/
|
||
clearAll () {
|
||
return this._transact(async db => db.clear())
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @module prng
|
||
*/
|
||
|
||
/**
|
||
* Xorshift32 is a very simple but elegang PRNG with a period of `2^32-1`.
|
||
*/
|
||
class Xorshift32 {
|
||
/**
|
||
* @param {number} seed Unsigned 32 bit number
|
||
*/
|
||
constructor (seed) {
|
||
this.seed = seed;
|
||
/**
|
||
* @type {number}
|
||
*/
|
||
this._state = seed;
|
||
}
|
||
|
||
/**
|
||
* Generate a random signed integer.
|
||
*
|
||
* @return {Number} A 32 bit signed integer.
|
||
*/
|
||
next () {
|
||
let x = this._state;
|
||
x ^= x << 13;
|
||
x ^= x >> 17;
|
||
x ^= x << 5;
|
||
this._state = x;
|
||
return (x >>> 0) / (BITS32 + 1)
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @module prng
|
||
*/
|
||
|
||
/**
|
||
* This is a variant of xoroshiro128plus - the fastest full-period generator passing BigCrush without systematic failures.
|
||
*
|
||
* This implementation follows the idea of the original xoroshiro128plus implementation,
|
||
* but is optimized for the JavaScript runtime. I.e.
|
||
* * The operations are performed on 32bit integers (the original implementation works with 64bit values).
|
||
* * The initial 128bit state is computed based on a 32bit seed and Xorshift32.
|
||
* * This implementation returns two 32bit values based on the 64bit value that is computed by xoroshiro128plus.
|
||
* Caution: The last addition step works slightly different than in the original implementation - the add carry of the
|
||
* first 32bit addition is not carried over to the last 32bit.
|
||
*
|
||
* [Reference implementation](http://vigna.di.unimi.it/xorshift/xoroshiro128plus.c)
|
||
*/
|
||
class Xoroshiro128plus {
|
||
/**
|
||
* @param {number} seed Unsigned 32 bit number
|
||
*/
|
||
constructor (seed) {
|
||
this.seed = seed;
|
||
// This is a variant of Xoroshiro128plus to fill the initial state
|
||
const xorshift32 = new Xorshift32(seed);
|
||
this.state = new Uint32Array(4);
|
||
for (let i = 0; i < 4; i++) {
|
||
this.state[i] = xorshift32.next() * BITS32;
|
||
}
|
||
this._fresh = true;
|
||
}
|
||
|
||
/**
|
||
* @return {number} Float/Double in [0,1)
|
||
*/
|
||
next () {
|
||
const state = this.state;
|
||
if (this._fresh) {
|
||
this._fresh = false;
|
||
return ((state[0] + state[2]) >>> 0) / (BITS32 + 1)
|
||
} else {
|
||
this._fresh = true;
|
||
const s0 = state[0];
|
||
const s1 = state[1];
|
||
const s2 = state[2] ^ s0;
|
||
const s3 = state[3] ^ s1;
|
||
// function js_rotl (x, k) {
|
||
// k = k - 32
|
||
// const x1 = x[0]
|
||
// const x2 = x[1]
|
||
// x[0] = x2 << k | x1 >>> (32 - k)
|
||
// x[1] = x1 << k | x2 >>> (32 - k)
|
||
// }
|
||
// rotl(s0, 55) // k = 23 = 55 - 32; j = 9 = 32 - 23
|
||
state[0] = (s1 << 23 | s0 >>> 9) ^ s2 ^ (s2 << 14 | s3 >>> 18);
|
||
state[1] = (s0 << 23 | s1 >>> 9) ^ s3 ^ (s3 << 14);
|
||
// rol(s1, 36) // k = 4 = 36 - 32; j = 23 = 32 - 9
|
||
state[2] = s3 << 4 | s2 >>> 28;
|
||
state[3] = s2 << 4 | s3 >>> 28;
|
||
return (((state[1] + state[3]) >>> 0) / (BITS32 + 1))
|
||
}
|
||
}
|
||
}
|
||
|
||
/*
|
||
// Reference implementation
|
||
// Source: http://vigna.di.unimi.it/xorshift/xoroshiro128plus.c
|
||
// By David Blackman and Sebastiano Vigna
|
||
// Who published the reference implementation under Public Domain (CC0)
|
||
|
||
#include <stdint.h>
|
||
#include <stdio.h>
|
||
|
||
uint64_t s[2];
|
||
|
||
static inline uint64_t rotl(const uint64_t x, int k) {
|
||
return (x << k) | (x >> (64 - k));
|
||
}
|
||
|
||
uint64_t next(void) {
|
||
const uint64_t s0 = s[0];
|
||
uint64_t s1 = s[1];
|
||
s1 ^= s0;
|
||
s[0] = rotl(s0, 55) ^ s1 ^ (s1 << 14); // a, b
|
||
s[1] = rotl(s1, 36); // c
|
||
return (s[0] + s[1]) & 0xFFFFFFFF;
|
||
}
|
||
|
||
int main(void)
|
||
{
|
||
int i;
|
||
s[0] = 1111 | (1337ul << 32);
|
||
s[1] = 1234 | (9999ul << 32);
|
||
|
||
printf("1000 outputs of genrand_int31()\n");
|
||
for (i=0; i<100; i++) {
|
||
printf("%10lu ", i);
|
||
printf("%10lu ", next());
|
||
printf("- %10lu ", s[0] >> 32);
|
||
printf("%10lu ", (s[0] << 32) >> 32);
|
||
printf("%10lu ", s[1] >> 32);
|
||
printf("%10lu ", (s[1] << 32) >> 32);
|
||
printf("\n");
|
||
// if (i%5==4) printf("\n");
|
||
}
|
||
return 0;
|
||
}
|
||
*/
|
||
|
||
/**
|
||
* Fast Pseudo Random Number Generators.
|
||
*
|
||
* Given a seed a PRNG generates a sequence of numbers that cannot be reasonably predicted.
|
||
* Two PRNGs must generate the same random sequence of numbers if given the same seed.
|
||
*
|
||
* @module prng
|
||
*/
|
||
|
||
/**
|
||
* Description of the function
|
||
* @callback generatorNext
|
||
* @return {number} A random float in the cange of [0,1)
|
||
*/
|
||
|
||
/**
|
||
* A random type generator.
|
||
*
|
||
* @typedef {Object} PRNG
|
||
* @property {generatorNext} next Generate new number
|
||
*/
|
||
|
||
const DefaultPRNG = Xoroshiro128plus;
|
||
|
||
/**
|
||
* Create a Xoroshiro128plus Pseudo-Random-Number-Generator.
|
||
* This is the fastest full-period generator passing BigCrush without systematic failures.
|
||
* But there are more PRNGs available in ./PRNG/.
|
||
*
|
||
* @param {number} seed A positive 32bit integer. Do not use negative numbers.
|
||
* @return {PRNG}
|
||
*/
|
||
const create = seed => new DefaultPRNG(seed);
|
||
|
||
/**
|
||
* Utility helpers for generating statistics.
|
||
*
|
||
* @module statistics
|
||
*/
|
||
|
||
/**
|
||
* @param {Array<number>} arr Array of values
|
||
* @return {number} Returns null if the array is empty
|
||
*/
|
||
const median = arr => arr.length === 0 ? NaN : (arr.length % 2 === 1 ? arr[(arr.length - 1) / 2] : (arr[floor((arr.length - 1) / 2)] + arr[ceil((arr.length - 1) / 2)]) / 2);
|
||
|
||
/**
|
||
* @param {Array<number>} arr
|
||
* @return {number}
|
||
*/
|
||
const average = arr => arr.reduce(add, 0) / arr.length;
|
||
|
||
/**
|
||
* Testing framework with support for generating tests.
|
||
*
|
||
* ```js
|
||
* // test.js template for creating a test executable
|
||
* import { runTests } from 'lib0/testing'
|
||
* import * as log from 'lib0/logging'
|
||
* import * as mod1 from './mod1.test.js'
|
||
* import * as mod2 from './mod2.test.js'
|
||
|
||
* import { isBrowser, isNode } from 'lib0/environment.js'
|
||
*
|
||
* if (isBrowser) {
|
||
* // optional: if this is ran in the browser, attach a virtual console to the dom
|
||
* log.createVConsole(document.body)
|
||
* }
|
||
*
|
||
* runTests({
|
||
* mod1,
|
||
* mod2,
|
||
* }).then(success => {
|
||
* if (isNode) {
|
||
* process.exit(success ? 0 : 1)
|
||
* }
|
||
* })
|
||
* ```
|
||
*
|
||
* ```js
|
||
* // mod1.test.js
|
||
* /**
|
||
* * runTests automatically tests all exported functions that start with "test".
|
||
* * The name of the function should be in camelCase and is used for the logging output.
|
||
* *
|
||
* * @param {t.TestCase} tc
|
||
* *\/
|
||
* export const testMyFirstTest = tc => {
|
||
* t.compare({ a: 4 }, { a: 4 }, 'objects are equal')
|
||
* }
|
||
* ```
|
||
*
|
||
* Now you can simply run `node test.js` to run your test or run test.js in the browser.
|
||
*
|
||
* @module testing
|
||
*/
|
||
|
||
hasConf('extensive');
|
||
|
||
/* istanbul ignore next */
|
||
const envSeed = hasParam('--seed') ? Number.parseInt(getParam('--seed', '0')) : null;
|
||
|
||
class TestCase {
|
||
/**
|
||
* @param {string} moduleName
|
||
* @param {string} testName
|
||
*/
|
||
constructor (moduleName, testName) {
|
||
/**
|
||
* @type {string}
|
||
*/
|
||
this.moduleName = moduleName;
|
||
/**
|
||
* @type {string}
|
||
*/
|
||
this.testName = testName;
|
||
this._seed = null;
|
||
this._prng = null;
|
||
}
|
||
|
||
resetSeed () {
|
||
this._seed = null;
|
||
this._prng = null;
|
||
}
|
||
|
||
/**
|
||
* @type {number}
|
||
*/
|
||
/* istanbul ignore next */
|
||
get seed () {
|
||
/* istanbul ignore else */
|
||
if (this._seed === null) {
|
||
/* istanbul ignore next */
|
||
this._seed = envSeed === null ? uint32() : envSeed;
|
||
}
|
||
return this._seed
|
||
}
|
||
|
||
/**
|
||
* A PRNG for this test case. Use only this PRNG for randomness to make the test case reproducible.
|
||
*
|
||
* @type {prng.PRNG}
|
||
*/
|
||
get prng () {
|
||
/* istanbul ignore else */
|
||
if (this._prng === null) {
|
||
this._prng = create(this.seed);
|
||
}
|
||
return this._prng
|
||
}
|
||
}
|
||
|
||
const repetitionTime = Number(getParam('--repetition-time', '50'));
|
||
/* istanbul ignore next */
|
||
const testFilter = hasParam('--filter') ? getParam('--filter', '') : null;
|
||
|
||
/* istanbul ignore next */
|
||
const testFilterRegExp = testFilter !== null ? new RegExp(testFilter) : new RegExp('.*');
|
||
|
||
const repeatTestRegex = /^(repeat|repeating)\s/;
|
||
|
||
/**
|
||
* @param {string} moduleName
|
||
* @param {string} name
|
||
* @param {function(TestCase):void|Promise<any>} f
|
||
* @param {number} i
|
||
* @param {number} numberOfTests
|
||
*/
|
||
const run = async (moduleName, name, f, i, numberOfTests) => {
|
||
const uncamelized = fromCamelCase(name.slice(4), ' ');
|
||
const filtered = !testFilterRegExp.test(`[${i + 1}/${numberOfTests}] ${moduleName}: ${uncamelized}`);
|
||
/* istanbul ignore if */
|
||
if (filtered) {
|
||
return true
|
||
}
|
||
const tc = new TestCase(moduleName, name);
|
||
const repeat = repeatTestRegex.test(uncamelized);
|
||
const groupArgs = [GREY, `[${i + 1}/${numberOfTests}] `, PURPLE, `${moduleName}: `, BLUE, uncamelized];
|
||
/* istanbul ignore next */
|
||
if (testFilter === null) {
|
||
groupCollapsed(...groupArgs);
|
||
} else {
|
||
group(...groupArgs);
|
||
}
|
||
const times = [];
|
||
const start = performance.now();
|
||
let lastTime = start;
|
||
/**
|
||
* @type {any}
|
||
*/
|
||
let err = null;
|
||
performance.mark(`${name}-start`);
|
||
do {
|
||
try {
|
||
const p = f(tc);
|
||
if (isPromise(p)) {
|
||
await p;
|
||
}
|
||
} catch (_err) {
|
||
err = _err;
|
||
}
|
||
const currTime = performance.now();
|
||
times.push(currTime - lastTime);
|
||
lastTime = currTime;
|
||
if (repeat && err === null && (lastTime - start) < repetitionTime) {
|
||
tc.resetSeed();
|
||
} else {
|
||
break
|
||
}
|
||
} while (err === null && (lastTime - start) < repetitionTime)
|
||
performance.mark(`${name}-end`);
|
||
/* istanbul ignore if */
|
||
if (err !== null && err.constructor !== SkipError) {
|
||
printError(err);
|
||
}
|
||
performance.measure(name, `${name}-start`, `${name}-end`);
|
||
groupEnd();
|
||
const duration = lastTime - start;
|
||
let success = true;
|
||
times.sort((a, b) => a - b);
|
||
/* istanbul ignore next */
|
||
const againMessage = isBrowser
|
||
? ` - ${window.location.host + window.location.pathname}?filter=\\[${i + 1}/${tc._seed === null ? '' : `&seed=${tc._seed}`}`
|
||
: `\nrepeat: npm run test -- --filter "\\[${i + 1}/" ${tc._seed === null ? '' : `--seed ${tc._seed}`}`;
|
||
const timeInfo = (repeat && err === null)
|
||
? ` - ${times.length} repetitions in ${humanizeDuration(duration)} (best: ${humanizeDuration(times[0])}, worst: ${humanizeDuration(last(times))}, median: ${humanizeDuration(median(times))}, average: ${humanizeDuration(average(times))})`
|
||
: ` in ${humanizeDuration(duration)}`;
|
||
if (err !== null) {
|
||
/* istanbul ignore else */
|
||
if (err.constructor === SkipError) {
|
||
print(GREY, BOLD, 'Skipped: ', UNBOLD, uncamelized);
|
||
} else {
|
||
success = false;
|
||
print(RED, BOLD, 'Failure: ', UNBOLD, UNCOLOR, uncamelized, GREY, timeInfo, againMessage);
|
||
}
|
||
} else {
|
||
print(GREEN, BOLD, 'Success: ', UNBOLD, UNCOLOR, uncamelized, GREY, timeInfo, againMessage);
|
||
}
|
||
return success
|
||
};
|
||
|
||
/**
|
||
* @template T
|
||
* @param {Array<T>} as
|
||
* @param {Array<T>} bs
|
||
* @param {string} [m]
|
||
* @return {boolean}
|
||
*/
|
||
const compareArrays = (as, bs, m = 'Arrays match') => {
|
||
if (as.length !== bs.length) {
|
||
fail(m);
|
||
}
|
||
for (let i = 0; i < as.length; i++) {
|
||
if (as[i] !== bs[i]) {
|
||
fail(m);
|
||
}
|
||
}
|
||
return true
|
||
};
|
||
|
||
/**
|
||
* @param {any} constructor
|
||
* @param {any} a
|
||
* @param {any} b
|
||
* @param {string} path
|
||
* @throws {TestError}
|
||
*/
|
||
const compareValues = (constructor, a, b, path) => {
|
||
if (a !== b) {
|
||
fail(`Values ${stringify(a)} and ${stringify(b)} don't match (${path})`);
|
||
}
|
||
return true
|
||
};
|
||
|
||
/**
|
||
* @param {string?} message
|
||
* @param {string} reason
|
||
* @param {string} path
|
||
* @throws {TestError}
|
||
*/
|
||
const _failMessage = (message, reason, path) => fail(
|
||
message === null
|
||
? `${reason} ${path}`
|
||
: `${message} (${reason}) ${path}`
|
||
);
|
||
|
||
/**
|
||
* @param {any} a
|
||
* @param {any} b
|
||
* @param {string} path
|
||
* @param {string?} message
|
||
* @param {function(any,any,any,string,any):boolean} customCompare
|
||
*/
|
||
const _compare = (a, b, path, message, customCompare) => {
|
||
// we don't use assert here because we want to test all branches (istanbul errors if one branch is not tested)
|
||
if (a == null || b == null) {
|
||
return compareValues(null, a, b, path)
|
||
}
|
||
if (a.constructor !== b.constructor) {
|
||
_failMessage(message, 'Constructors don\'t match', path);
|
||
}
|
||
let success = true;
|
||
switch (a.constructor) {
|
||
case ArrayBuffer:
|
||
a = new Uint8Array(a);
|
||
b = new Uint8Array(b);
|
||
// eslint-disable-next-line no-fallthrough
|
||
case Uint8Array: {
|
||
if (a.byteLength !== b.byteLength) {
|
||
_failMessage(message, 'ArrayBuffer lengths match', path);
|
||
}
|
||
for (let i = 0; success && i < a.length; i++) {
|
||
success = success && a[i] === b[i];
|
||
}
|
||
break
|
||
}
|
||
case Set: {
|
||
if (a.size !== b.size) {
|
||
_failMessage(message, 'Sets have different number of attributes', path);
|
||
}
|
||
// @ts-ignore
|
||
a.forEach(value => {
|
||
if (!b.has(value)) {
|
||
_failMessage(message, `b.${path} does have ${value}`, path);
|
||
}
|
||
});
|
||
break
|
||
}
|
||
case Map: {
|
||
if (a.size !== b.size) {
|
||
_failMessage(message, 'Maps have different number of attributes', path);
|
||
}
|
||
// @ts-ignore
|
||
a.forEach((value, key) => {
|
||
if (!b.has(key)) {
|
||
_failMessage(message, `Property ${path}["${key}"] does not exist on second argument`, path);
|
||
}
|
||
_compare(value, b.get(key), `${path}["${key}"]`, message, customCompare);
|
||
});
|
||
break
|
||
}
|
||
case Object:
|
||
if (length$1(a) !== length$1(b)) {
|
||
_failMessage(message, 'Objects have a different number of attributes', path);
|
||
}
|
||
forEach$1(a, (value, key) => {
|
||
if (!hasProperty(b, key)) {
|
||
_failMessage(message, `Property ${path} does not exist on second argument`, path);
|
||
}
|
||
_compare(value, b[key], `${path}["${key}"]`, message, customCompare);
|
||
});
|
||
break
|
||
case Array:
|
||
if (a.length !== b.length) {
|
||
_failMessage(message, 'Arrays have a different number of attributes', path);
|
||
}
|
||
// @ts-ignore
|
||
a.forEach((value, i) => _compare(value, b[i], `${path}[${i}]`, message, customCompare));
|
||
break
|
||
/* istanbul ignore next */
|
||
default:
|
||
if (!customCompare(a.constructor, a, b, path, compareValues)) {
|
||
_failMessage(message, `Values ${stringify(a)} and ${stringify(b)} don't match`, path);
|
||
}
|
||
}
|
||
assert(success, message);
|
||
return true
|
||
};
|
||
|
||
/**
|
||
* @template T
|
||
* @param {T} a
|
||
* @param {T} b
|
||
* @param {string?} [message]
|
||
* @param {function(any,T,T,string,any):boolean} [customCompare]
|
||
*/
|
||
const compare = (a, b, message = null, customCompare = compareValues) => _compare(a, b, 'obj', message, customCompare);
|
||
|
||
/* istanbul ignore next */
|
||
/**
|
||
* @param {boolean} condition
|
||
* @param {string?} [message]
|
||
* @throws {TestError}
|
||
*/
|
||
const assert = (condition, message = null) => condition || fail(`Assertion failed${message !== null ? `: ${message}` : ''}`);
|
||
|
||
/**
|
||
* @param {Object<string, Object<string, function(TestCase):void|Promise<any>>>} tests
|
||
*/
|
||
const runTests = async tests => {
|
||
/**
|
||
* @param {string} testname
|
||
*/
|
||
const filterTest = testname => testname.startsWith('test') || testname.startsWith('benchmark');
|
||
const numberOfTests = map(tests, mod => map(mod, (f, fname) => /* istanbul ignore next */ f && filterTest(fname) ? 1 : 0).reduce(add, 0)).reduce(add, 0);
|
||
let successfulTests = 0;
|
||
let testnumber = 0;
|
||
const start = performance.now();
|
||
for (const modName in tests) {
|
||
const mod = tests[modName];
|
||
for (const fname in mod) {
|
||
const f = mod[fname];
|
||
/* istanbul ignore else */
|
||
if (f && filterTest(fname)) {
|
||
const repeatEachTest = 1;
|
||
let success = true;
|
||
for (let i = 0; success && i < repeatEachTest; i++) {
|
||
success = await run(modName, fname, f, testnumber, numberOfTests);
|
||
}
|
||
testnumber++;
|
||
/* istanbul ignore else */
|
||
if (success) {
|
||
successfulTests++;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
const end = performance.now();
|
||
print('');
|
||
const success = successfulTests === numberOfTests;
|
||
/* istanbul ignore next */
|
||
if (success) {
|
||
/* istanbul ignore next */
|
||
print(GREEN, BOLD, 'All tests successful!', GREY, UNBOLD, ` in ${humanizeDuration(end - start)}`);
|
||
/* istanbul ignore next */
|
||
printImgBase64(nyanCatImage, 50);
|
||
} else {
|
||
const failedTests = numberOfTests - successfulTests;
|
||
print(RED, BOLD, `> ${failedTests} test${failedTests > 1 ? 's' : ''} failed`);
|
||
}
|
||
return success
|
||
};
|
||
|
||
class TestError extends Error {}
|
||
|
||
/**
|
||
* @param {string} reason
|
||
* @throws {TestError}
|
||
*/
|
||
const fail = reason => {
|
||
print(RED, BOLD, 'X ', UNBOLD, reason);
|
||
throw new TestError('Test Failed')
|
||
};
|
||
|
||
class SkipError extends Error {}
|
||
|
||
// eslint-disable-next-line
|
||
const nyanCatImage = 'R0lGODlhjABMAPcAAMiSE0xMTEzMzUKJzjQ0NFsoKPc7//FM/9mH/z9x0HIiIoKCgmBHN+frGSkZLdDQ0LCwsDk71g0KCUzDdrQQEOFz/8yYdelmBdTiHFxcXDU2erR/mLrTHCgoKK5szBQUFNgSCTk6ymfpCB9VZS2Bl+cGBt2N8kWm0uDcGXhZRUvGq94NCFPhDiwsLGVlZTgqIPMDA1g3aEzS5D6xAURERDtG9JmBjJsZGWs2AD1W6Hp6eswyDeJ4CFNTU1LcEoJRmTMzSd14CTg5ser2GmDzBd17/xkZGUzMvoSMDiEhIfKruCwNAJaWlvRzA8kNDXDrCfi0pe1U/+GS6SZrAB4eHpZwVhoabsx9oiYmJt/TGHFxcYyMjOid0+Zl/0rF6j09PeRr/0zU9DxO6j+z0lXtBtp8qJhMAEssLGhoaPL/GVn/AAsWJ/9/AE3Z/zs9/3cAAOlf/+aa2RIyADo85uhh/0i84WtrazQ0UyMlmDMzPwUFBe16BTMmHau0E03X+g8pMEAoS1MBAf++kkzO8pBaqSZoe9uB/zE0BUQ3Sv///4WFheuiyzo880gzNDIyNissBNqF/8RiAOF2qG5ubj0vL1z6Avl5ASsgGkgUSy8vL/8n/z4zJy8lOv96uEssV1csAN5ZCDQ0Wz1a3tbEGHLeDdYKCg4PATE7PiMVFSoqU83eHEi43gUPAOZ8reGogeKU5dBBC8faHEez2lHYF4bQFMukFtl4CzY3kkzBVJfMGZkAAMfSFf27mP0t//g4/9R6Dfsy/1DRIUnSAPRD/0fMAFQ0Q+l7rnbaD0vEntCDD6rSGtO8GNpUCU/MK07LPNEfC7RaABUWWkgtOst+71v9AfD7GfDw8P19ATtA/NJpAONgB9yL+fm6jzIxMdnNGJxht1/2A9x//9jHGOSX3+5tBP27l35+fk5OTvZ9AhYgTjo0PUhGSDs9+LZjCFf2Aw0IDwcVAA8PD5lwg9+Q7YaChC0kJP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH/C05FVFNDQVBFMi4wAwEAAAAh/wtYTVAgRGF0YVhNUDw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMC1jMDYwIDYxLjEzNDc3NywgMjAxMC8wMi8xMi0xNzozMjowMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDpGNEM2MUEyMzE0QTRFMTExOUQzRkE3QTBCRDNBMjdBQyIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpERjQ0NEY0QkI2MTcxMUUxOUJEQkUzNUNGQTkwRTU2MiIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpERjQ0NEY0QUI2MTcxMUUxOUJEQkUzNUNGQTkwRTU2MiIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M1IFdpbmRvd3MiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo1OEE3RTIwRjcyQTlFMTExOTQ1QkY2QTU5QzVCQjJBOSIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDpGNEM2MUEyMzE0QTRFMTExOUQzRkE3QTBCRDNBMjdBQyIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PgH//v38+/r5+Pf29fTz8vHw7+7t7Ovq6ejn5uXk4+Lh4N/e3dzb2tnY19bV1NPS0dDPzs3My8rJyMfGxcTDwsHAv769vLu6ubi3trW0s7KxsK+urayrqqmop6alpKOioaCfnp2cm5qZmJeWlZSTkpGQj46NjIuKiYiHhoWEg4KBgH9+fXx7enl4d3Z1dHNycXBvbm1sa2ppaGdmZWRjYmFgX15dXFtaWVhXVlVUU1JRUE9OTUxLSklIR0ZFRENCQUA/Pj08Ozo5ODc2NTQzMjEwLy4tLCsqKSgnJiUkIyIhIB8eHRwbGhkYFxYVFBMSERAPDg0MCwoJCAcGBQQDAgEAACH5BAkKABEAIf4jUmVzaXplZCBvbiBodHRwczovL2V6Z2lmLmNvbS9yZXNpemUALAAAAACMAEwAAAj/ACMIHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIEOKHEmypMmTKFOqXLkxEcuXMAm6jElTZaKZNXOOvOnyps6fInECHdpRKNGjSJMqXZrSKNOnC51CnUq1qtWrWLNC9GmQq9avYMOKHUs2aFmmUs8SlcC2rdu3cNWeTEG3rt27eBnIHflBj6C/gAMLHpxCz16QElJw+7tom+PHkCOP+8utiuHDHRP/5WICgefPkIYV8RAjxudtkwVZjqCnNeaMmheZqADm8+coHn5kyPBt2udFvKrc+7A7gITXFzV77hLF9ucYGRaYo+FhWhHPUKokobFgQYbjyCsq/3fuHHr3BV88HMBeZd357+HFpxBEvnz0961b3+8OP37DtgON5xxznpl3ng5aJKiFDud5B55/Ct3TQwY93COQgLZV0AUC39ihRYMggjhJDw9CeNA9kyygxT2G6TGfcxUY8pkeH3YHgTkMNrgFBJOYs8Akl5l4Yoor3mPki6BpUsGMNS6QiA772WjNPR8CSRAjWBI0B5ZYikGQGFwyMseVYWoZppcDhSkmmVyaySWaAqk5pkBbljnQlnNYEZ05fGaAJGieVQAMjd2ZY+R+X2Rgh5FVBhmBG5BGKumklFZq6aWYZqrpppTOIQQNNPjoJ31RbGibIRXQuIExrSSY4wI66P9gToJlGHOFo374MQg2vGLjRa65etErNoMA68ew2Bi7a6+/Aitsr8UCi6yywzYb7LDR5jotsMvyau0qJJCwGw0vdrEkeTRe0UknC7hQYwYMQrmAMZ2U4WgY+Lahbxt+4Ovvvm34i68fAAscBsD9+kvwvgYDHLDACAu8sL4NFwzxvgkP3EYhhYzw52dFhOPZD5Ns0Iok6PUwyaIuTJLBBwuUIckG8RCkhhrUHKHzEUTcfLM7Ox/hjs9qBH0E0ZUE3bPPQO9cCdFGIx300EwH/bTPUfuc9M5U30zEzhN87NkwcDyXgY/oxaP22vFQIR2JBT3xBDhEUyO33FffXMndT1D/QzTfdPts9915qwEO3377DHjdfBd++N2J47y44Ij7PMN85UgBxzCeQQKJbd9wFyKI6jgqUBqoD6G66qinvvoQ1bSexutDyF4N7bLTHnvruLd+++u5v76766vb3jvxM0wxnyBQxHEued8Y8cX01Fc/fQcHZaG97A1or30DsqPgfRbDpzF+FtyPD37r4ns/fDXnp+/9+qif//74KMj/fRp9TEIDAxb4ixIWQcACFrAMFkigAhPIAAmwyHQDYYMEJ0jBClrwghjMoAY3yMEOYhAdQaCBFtBAAD244oQoTKEKV5iCbizEHjCkoCVgCENLULAJNLTHNSZ4jRzaQ4Y5tOEE+X24Qwn2MIdApKEQJUhEHvowiTBkhh7QVqT8GOmKWHwgFiWghR5AkCA+DKMYx0jGMprxjGhMYw5XMEXvGAZF5piEhQyih1CZ4wt6kIARfORFhjwDBoCEQQkIUoJAwmAFBDEkDAhSCkMOciCFDCQiB6JIgoDAkYQ0JAgSaUhLYnIgFLjH9AggkHsQYHo1oyMVptcCgUjvCx34opAWkp/L1BIhtxxILmfJy17KxJcrSQswhykWYRLzI8Y8pjKXycxfNvOZMEkmNC0izWlSpJrWlAg2s8kQnkRgJt7kpja92ZNwivOcNdkmOqOyzoyos50IeSc850nPegIzIAAh+QQJCgARACwAAAAAjABMAAAI/wAjCBxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDihxJcmKikihTZkx0UqXLlw5ZwpxJ02DLmjhz6twJkqVMnz55Ch1KtGhCmUaTYkSqtKnJm05rMl0aVefUqlhtFryatavXr2DDHoRKkKzYs2jTqpW61exani3jun0rlCvdrhLy6t3Lt+9dlykCCx5MuDCDvyU/6BHEuLHjx5BT6EEsUkIKbowXbdvMubPncYy5VZlM+aNlxlxMIFjNGtKwIggqDGO9DbSg0aVNpxC0yEQFMKxZRwmHoEiU4AgW8cKdu+Pp1V2OI6c9bdq2cLARQGEeIV7zjM+nT//3oEfPNDiztTOXoMf7d4vhxbP+ts6cORrfIK3efq+8FnN2kPbeRPEFF918NCywgBZafLNfFffEM4k5C0wi4IARFchaBV0gqGCFDX6zQQqZZPChhRgSuBtyFRiC3DcJfqgFDTTSYOKJF6boUIGQaFLBizF+KOSQKA7EyJEEzXHkkWIQJMaSjMxBEJSMJAllk0ZCKWWWS1q5JJYCUbllBEpC6SWTEehxzz0rBqdfbL1AEsONQ9b5oQ73DOTGnnz26eefgAYq6KCEFmoooCHccosdk5yzYhQdBmfIj3N++AAEdCqoiDU62LGAOXkK5Icfg2BjKjZejDqqF6diM4iqfrT/ig2spZ6aqqqsnvqqqrLS2uqtq7a666i9qlqrqbeeQEIGN2awYhc/ilepghAssM6JaCwAQQ8ufBpqBGGE28a4bfgR7rnktnFuuH6ku24Y6Zp7brvkvpuuuuvGuy6949rrbr7kmltHIS6Yw6AWjgoyXRHErTYnPRtskMEXdLrQgzlffKHDBjZ8q4Ya1Bwh8hFEfPyxOyMf4Y7JaqR8BMuVpFyyySiPXAnLLsOc8so0p3yzyTmbHPPIK8sxyYJr9tdmcMPAwdqcG3TSyQZ2fniF1N8+8QQ4LFOjtdY/f1zJ109QwzLZXJvs9ddhqwEO2WabjHbXZLf99tdxgzy32k8Y/70gK+5UMsNu5UiB3mqQvIkA1FJLfO0CFH8ajxZXd/JtGpgPobnmmGe++RDVdJ7G50OIXg3popMeeueod37656l/vrrnm5uOOgZIfJECBpr3sZsgUMQRLXLTEJJBxPRkkETGRmSS8T1a2CCPZANlYb3oDVhvfQOio6B9FrOn8X0W2H/Pfefeaz97NeOXr/35mI+//vcouJ9MO7V03gcDFjCmxCIADGAAr1CFG2mBWQhEoA600IMLseGBEIygBCdIwQpa8IIYzKAGMcgDaGTMFSAMoQhDaAE9HOyEKOyBewZijxZG0BItbKElItiEGNrjGhC8hg3t8UIbzhCCO8ThA+Z1aMMexvCHDwxiDndoRBk+8A03Slp/1CTFKpaHiv3JS9IMssMuevGLYAyjGMdIxjJ6EYoK0oNivmCfL+RIINAD0GT0YCI8rdAgz4CBHmFQAoKUYI8wWAFBAAkDgpQCkH0cyB/3KMiBEJIgIECkHwEJgkECEpKSVKQe39CCjH0gTUbIWAsQcg8CZMw78TDlF76lowxdUSBXfONArrhC9pSnlbjMpS7rssuZzKWXPQHKL4HZEWESMyXDPKZHkqnMZjrzLnZ5pjSnSc1qWmQuzLSmQrCpzW5685vfjCY4x0nOcprznB4JCAAh+QQJCgBIACwAAAAAjABMAAAI/wCRCBxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDihxJcmGiRCVTqsyIcqXLlzBjypxJs6bNmzgPtjR4MqfPn0CDCh1KtKjNnkaTPtyptKlToEyfShUYderTqlaNnkSJNGvTrl6dYg1bdCzZs2jTqvUpoa3bt3DjrnWZoq7du3jzMphb8oMeQYADCx5MOIUeviIlpOAGeNG2x5AjSx4HmFuVw4g/KgbMxQSCz6AhDSuCoMIw0NsoC7qcWXMKQYtMVAADGnSUcAiKRKmNYBEv1q07bv7cZTfvz9OSfw5HGgEU1vHiBdc4/Djvb3refY5y2jlrPeCnY/+sbv1zjAzmzFGZBgnS5+f3PqTvIUG8RfK1i5vPsGDBpB8egPbcF5P0l0F99jV0z4ILCoQfaBV0sV9/C7jwwzcYblAFGhQemGBDX9BAAwH3HKbHa7xVYEht51FYoYgictghgh8iZMQ95vSnBYP3oBiaJhWwyJ+LRLrooUGlwKCkkgSVsCQMKxD0JAwEgfBkCU0+GeVAUxK0wpVZLrmlQF0O9OWSTpRY4ALp0dCjILy5Vxow72hR5J0U2oGZQPb06eefgAYq6KCEFmrooYj6CQMIICgAIw0unINiFBLWZkgFetjZnzU62EEkEw/QoIN/eyLh5zWoXmPJn5akek0TrLr/Cqirq/rZaqqw2ppqrX02QWusuAKr6p++7trnDtAka8o5NKDYRZDHZUohBBkMWaEWTEBwj52TlMrGt+CGK+645JZr7rnopquuuejU9YmPtRWBGwKZ2rCBDV98IeMCPaChRb7ybCBPqVkUnMbBaTRQcMENIJwGCgtnUY3DEWfhsMILN4wwxAtPfHA1EaNwccQaH8xxwR6nAfLCIiOMMcMI9wEvaMPA8VmmV3TSCZ4UGtNJGaV+PMTQQztMNNFGH+1wNUcPkbTSCDe9tNRRH51yGlQLDfXBR8ssSDlSwNFdezdrkfPOX7jAZjzcUrGAz0ATBA44lahhtxrUzD133XdX/6I3ONTcrcbf4Aiet96B9/134nb/zbfdh8/NuBp+I3535HQbvrjdM0zxmiBQxAFtbR74u8EGC3yRSb73qPMFAR8sYIM8KdCIBORH5H4EGYITofsR7gj++xGCV/I773f7rnvwdw9f/O9E9P7742o4f7c70AtOxhEzuEADAxYApsQi5JdPvgUb9udCteyzX2EAtiMRxvxt1N+GH/PP74f9beRPP//+CwP/8Je//dkvgPzrn/8G6D8D1g+BAFyg/QiYv1XQQAtoIIAeXMHBDnqQg1VQhxZGSMISjlCDBvGDHwaBjRZiwwsqVKEXXIiNQcTQDzWg4Q1Z6EIYxnCGLrRhDP9z6MId0tCHMqShEFVIxBYasYc3PIEecrSAHZUIPDzK4hV5pAcJ6IFBCHGDGMdIxjKa8YxoTKMa18jGNqJxDlNcQAYOc49JmGMS9ziIHr6Qni+Axwg56kGpDMKIQhIkAoUs5BwIIoZEMiICBHGkGAgyB0cuciCNTGRBJElJSzLSkZtM5CQHUslECuEe+SKAQO5BgHxJxyB6oEK+WiAQI+SrA4Os0UPAEx4k8DKXAvklQXQwR2DqMiVgOeZLkqnMlTCzmdCcy1aQwJVpRjMk06zmM6/pEbNwEyTb/OZHwinOjpCznNREJzaj4k11TiSZ7XSnPHESz3lW5JnntKc+94kTFnjyUyP1/OdSBErQghr0oB0JCAAh+QQFCgAjACwAAAAAjABMAAAI/wBHCBxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDihxJkmCikihTWjw5giVLlTBjHkz0UmBNmThz6tzJs6fPkTRn3vxJtKjRo0iTbgxqUqlTiC5tPt05dOXUnkyval2YdatXg12/ih07lmZQs2bJql27NSzbqW7fOo0rN2nViBLy6t3Lt29dmfGqCB5MuLBhBvH+pmSQQpAgKJAjS54M2XEVBopLSmjseBGCz6BDi37lWFAVPZlHbnb8SvRnSL0qIKjQK/Q2y6hTh1z9ahuYKK4rGEJgSHboV1BO697d+HOFLq4/e/j2zTmYz8lR37u3vOPq6KGnEf/68mXaNjrAEWT/QL5b943fwX+OkWGBOT3TQie/92HBggwSvCeRHgQSKFB8osExzHz12UdDddhVQYM5/gEoYET3ZDBJBveghmBoRRhHn38LaKHFDyimYIcWJFp44UP39KCFDhno0WFzocERTmgjkrhhBkCy2GKALzq03Tk6LEADFffg+NowshU3jR1okGjllf658EWRMN7zhX80NCkIeLTpISSWaC4wSW4ElQLDm28SVAKcMKxAEJ0wEAQCnSXISaedA+FJ0Ap8+gknoAIJOhChcPYpUCAdUphBc8PAEZ2ZJCZC45UQWIPpmgTZI+qopJZq6qmopqrqqqy2eioMTtz/QwMNmTRXQRGXnqnIFw0u0EOVC9zDIqgDjXrNsddYQqolyF7TxLLNltqssqMyi+yz1SJLrahNTAvttd8mS2q32pJ6ATTQfCKma10YZ+YGV1wRJIkuzAgkvPKwOQIb/Pbr778AByzwwAQXbPDBBZvxSWNSbBMOrghEAR0CZl7RSSclJlkiheawaEwnZeibxchplJxGAyOP3IDJaaCQchbVsPxyFiyjnPLKJruccswlV/MyCjW/jHPJOo/Mcxo+pwy0yTarbHIfnL2ioGvvaGExxrzaJ+wCdvT3ccgE9TzE2GOzTDbZZp/NcjVnD5G22ia3vbbccZ99dBp0iw13yWdD/10aF5BERx899CzwhQTxxHMP4hL0R08GlxQEDjiVqGG5GtRMPnnll1eiOTjUXK7G5+CInrnmoXf+eeqWf8655adPzroanqN+eeyUm7665TNMsQlnUCgh/PDCu1JFD/6ZqPzyvhJgEOxHRH8EGaITIf0R7oh+/RGiV3I99ZdbL332l2/f/fVEVH/962qYf7k76ItOxhEzuABkBhbkr//++aeQyf0ADKDzDBKGArbhgG3wQwEL6AcEtmGBBnQgBMPgQAUusIEInKADHwjBCkIQgwfUoAQ7iEALMtAPa5iEfbTQIT0YgTxGKJAMvfSFDhDoHgT4AgE6hBA/+GEQ2AgiNvy84EMfekGI2BhEEf1QAyQuEYhCJGIRjyhEJRaxiUJ8IhKlaEQkWtGHWAyiFqO4RC/UIIUl2s4H9PAlw+lrBPHQQ4UCtDU7vJEgbsijHvfIxz768Y+ADKQgB0lIQGJjDdvZjkBstJ3EHCSRRLLRHQnCiEoSJAKVrOQcCCKGTDIiApTMpBgIMgdPbnIgncxkQTw5yoGUMpOnFEgqLRnKSrZSIK/U5Ag+kLjEDaSXCQGmQHzJpWIasyV3OaYyl8nMZi7nLsl0ZkagKc1qWvOa2JxLNLPJzW6+ZZvevAhdwrkStJCTI2gZ5zknos51shOc7oynPOdJz3ra857hDAgAOw==';
|
||
|
||
// When changing this, also make sure to change the file in gitignore
|
||
const storageName = 'tmp-leveldb-storage';
|
||
|
||
/**
|
||
* Read state vector from Decoder and return as Map. This is a helper method that will be exported by Yjs directly.
|
||
*
|
||
* @param {decoding.Decoder} decoder
|
||
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
|
||
*
|
||
* @function
|
||
*/
|
||
const readStateVector = decoder => {
|
||
const ss = new Map();
|
||
const ssLength = readVarUint(decoder);
|
||
for (let i = 0; i < ssLength; i++) {
|
||
const client = readVarUint(decoder);
|
||
const clock = readVarUint(decoder);
|
||
ss.set(client, clock);
|
||
}
|
||
return ss
|
||
};
|
||
|
||
/**
|
||
* Read decodedState and return State as Map.
|
||
*
|
||
* @param {Uint8Array} decodedState
|
||
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
|
||
*
|
||
* @function
|
||
*/
|
||
const decodeStateVector = decodedState => readStateVector(createDecoder(decodedState));
|
||
|
||
/**
|
||
* Flushes all updates to ldb and delets items from updates array.
|
||
*
|
||
* @param {LeveldbPersistence} ldb
|
||
* @param {string} docName
|
||
* @param {Array<Uint8Array>} updates
|
||
*/
|
||
const flushUpdatesHelper = (ldb, docName, updates) =>
|
||
Promise.all(updates.splice(0).map(update => ldb.storeUpdate(docName, update)));
|
||
|
||
/**
|
||
* @param {t.TestCase} tc
|
||
*/
|
||
const testLeveldbUpdateStorage = async tc => {
|
||
const docName = tc.testName;
|
||
const ydoc1 = new Doc();
|
||
ydoc1.clientID = 0; // so we can check the state vector
|
||
const leveldbPersistence = new LeveldbPersistence(storageName);
|
||
// clear all data, so we can check allData later
|
||
await leveldbPersistence._transact(async db => db.clear());
|
||
compareArrays([], await leveldbPersistence.getAllDocNames());
|
||
|
||
const updates = [];
|
||
|
||
ydoc1.on('update', update => {
|
||
updates.push(update);
|
||
});
|
||
|
||
ydoc1.getArray('arr').insert(0, [1]);
|
||
ydoc1.getArray('arr').insert(0, [2]);
|
||
|
||
await flushUpdatesHelper(leveldbPersistence, docName, updates);
|
||
|
||
const encodedSv = await leveldbPersistence.getStateVector(docName);
|
||
const sv = decodeStateVector(encodedSv);
|
||
assert(sv.size === 1);
|
||
assert(sv.get(0) === 2);
|
||
|
||
const ydoc2 = await leveldbPersistence.getYDoc(docName);
|
||
compareArrays(ydoc2.getArray('arr').toArray(), [2, 1]);
|
||
|
||
const allData = await leveldbPersistence._transact(async db => getLevelBulkData(db, { gte: ['v1'], lt: ['v2'] }));
|
||
assert(allData.length > 0, 'some data exists');
|
||
|
||
compareArrays([docName], await leveldbPersistence.getAllDocNames());
|
||
await leveldbPersistence.clearDocument(docName);
|
||
compareArrays([], await leveldbPersistence.getAllDocNames());
|
||
const allData2 = await leveldbPersistence._transact(async db => getLevelBulkData(db, { gte: ['v1'], lt: ['v2'] }));
|
||
console.log(allData2);
|
||
assert(allData2.length === 0, 'really deleted all data');
|
||
|
||
await leveldbPersistence.destroy();
|
||
};
|
||
|
||
/**
|
||
* @param {t.TestCase} tc
|
||
*/
|
||
const testEncodeManyUpdates = async tc => {
|
||
const N = PREFERRED_TRIM_SIZE * 7;
|
||
const docName = tc.testName;
|
||
const ydoc1 = new Doc();
|
||
ydoc1.clientID = 0; // so we can check the state vector
|
||
const leveldbPersistence = new LeveldbPersistence(storageName);
|
||
await leveldbPersistence.clearDocument(docName);
|
||
|
||
const updates = [];
|
||
|
||
ydoc1.on('update', update => {
|
||
updates.push(update);
|
||
});
|
||
await flushUpdatesHelper(leveldbPersistence, docName, updates);
|
||
|
||
const keys = await leveldbPersistence._transact(db => getLevelUpdates(db, docName, { keys: true, values: false }));
|
||
|
||
for (let i = 0; i < keys.length; i++) {
|
||
assert(keys[i][3] === i);
|
||
}
|
||
|
||
const yarray = ydoc1.getArray('arr');
|
||
for (let i = 0; i < N; i++) {
|
||
yarray.insert(0, [i]);
|
||
}
|
||
await flushUpdatesHelper(leveldbPersistence, docName, updates);
|
||
|
||
const ydoc2 = await leveldbPersistence.getYDoc(docName);
|
||
assert(ydoc2.getArray('arr').length === N);
|
||
|
||
await leveldbPersistence.flushDocument(docName);
|
||
const mergedKeys = await leveldbPersistence._transact(db => getLevelUpdates(db, docName, { keys: true, values: false }));
|
||
assert(mergedKeys.length === 1);
|
||
|
||
// getYDoc still works after flush/merge
|
||
const ydoc3 = await leveldbPersistence.getYDoc(docName);
|
||
assert(ydoc3.getArray('arr').length === N);
|
||
|
||
// test if state vector is properly generated
|
||
compare(encodeStateVector(ydoc1), await leveldbPersistence.getStateVector(docName));
|
||
// add new update so that sv needs to be updated
|
||
ydoc1.getArray('arr').insert(0, ['new']);
|
||
await flushUpdatesHelper(leveldbPersistence, docName, updates);
|
||
compare(encodeStateVector(ydoc1), await leveldbPersistence.getStateVector(docName));
|
||
|
||
await leveldbPersistence.destroy();
|
||
};
|
||
|
||
/**
|
||
* @param {t.TestCase} tc
|
||
*/
|
||
const testDiff = async tc => {
|
||
const N = PREFERRED_TRIM_SIZE * 2; // primes are awesome - ensure that the document is at least flushed once
|
||
const docName = tc.testName;
|
||
const ydoc1 = new Doc();
|
||
ydoc1.clientID = 0; // so we can check the state vector
|
||
const leveldbPersistence = new LeveldbPersistence(storageName);
|
||
await leveldbPersistence.clearDocument(docName);
|
||
|
||
const updates = [];
|
||
ydoc1.on('update', update => {
|
||
updates.push(update);
|
||
});
|
||
|
||
const yarray = ydoc1.getArray('arr');
|
||
// create N changes
|
||
for (let i = 0; i < N; i++) {
|
||
yarray.insert(0, [i]);
|
||
}
|
||
await flushUpdatesHelper(leveldbPersistence, docName, updates);
|
||
|
||
// create partially merged doc
|
||
const ydoc2 = await leveldbPersistence.getYDoc(docName);
|
||
|
||
// another N updates
|
||
for (let i = 0; i < N; i++) {
|
||
yarray.insert(0, [i]);
|
||
}
|
||
await flushUpdatesHelper(leveldbPersistence, docName, updates);
|
||
|
||
// apply diff to doc
|
||
const diffUpdate = await leveldbPersistence.getDiff(docName, encodeStateVector(ydoc2));
|
||
applyUpdate(ydoc2, diffUpdate);
|
||
|
||
assert(ydoc2.getArray('arr').length === ydoc1.getArray('arr').length);
|
||
assert(ydoc2.getArray('arr').length === N * 2);
|
||
|
||
await leveldbPersistence.destroy();
|
||
};
|
||
|
||
/**
|
||
* @param {t.TestCase} tc
|
||
*/
|
||
const testMetas = async tc => {
|
||
const docName = tc.testName;
|
||
const leveldbPersistence = new LeveldbPersistence(storageName);
|
||
await leveldbPersistence.clearDocument(docName);
|
||
|
||
await leveldbPersistence.setMeta(docName, 'a', 4);
|
||
await leveldbPersistence.setMeta(docName, 'a', 5);
|
||
await leveldbPersistence.setMeta(docName, 'b', 4);
|
||
const a = await leveldbPersistence.getMeta(docName, 'a');
|
||
const b = await leveldbPersistence.getMeta(docName, 'b');
|
||
assert(a === 5);
|
||
assert(b === 4);
|
||
const metas = await leveldbPersistence.getMetas(docName);
|
||
assert(metas.size === 2);
|
||
assert(metas.get('a') === 5);
|
||
assert(metas.get('b') === 4);
|
||
await leveldbPersistence.delMeta(docName, 'a');
|
||
const c = await leveldbPersistence.getMeta(docName, 'a');
|
||
assert(c === undefined);
|
||
await leveldbPersistence.clearDocument(docName);
|
||
const metasEmpty = await leveldbPersistence.getMetas(docName);
|
||
assert(metasEmpty.size === 0);
|
||
|
||
await leveldbPersistence.destroy();
|
||
};
|
||
|
||
/**
|
||
* @param {t.TestCase} tc
|
||
*/
|
||
const testDeleteEmptySv = async tc => {
|
||
const docName = tc.testName;
|
||
const leveldbPersistence = new LeveldbPersistence(storageName);
|
||
await leveldbPersistence.clearAll();
|
||
|
||
const ydoc = new Doc();
|
||
ydoc.clientID = 0;
|
||
ydoc.getArray('arr').insert(0, [1]);
|
||
const singleUpdate = encodeStateAsUpdate(ydoc);
|
||
|
||
compareArrays([], await leveldbPersistence.getAllDocNames());
|
||
await leveldbPersistence.storeUpdate(docName, singleUpdate);
|
||
compareArrays([docName], await leveldbPersistence.getAllDocNames());
|
||
const docSvs = await leveldbPersistence.getAllDocStateVectors();
|
||
assert(docSvs.length === 1);
|
||
compare([{ name: docName, clock: 0, sv: encodeStateVector(ydoc) }], docSvs);
|
||
|
||
await leveldbPersistence.clearDocument(docName);
|
||
compareArrays([], await leveldbPersistence.getAllDocNames());
|
||
await leveldbPersistence.destroy();
|
||
};
|
||
|
||
const testMisc = async tc => {
|
||
const docName = tc.testName;
|
||
const leveldbPersistence = new LeveldbPersistence(storageName);
|
||
await leveldbPersistence.clearDocument(docName);
|
||
|
||
const sv = await leveldbPersistence.getStateVector('does not exist');
|
||
assert(sv.byteLength === 1);
|
||
|
||
await leveldbPersistence.destroy();
|
||
};
|
||
|
||
var leveldb = /*#__PURE__*/Object.freeze({
|
||
__proto__: null,
|
||
testLeveldbUpdateStorage: testLeveldbUpdateStorage,
|
||
testEncodeManyUpdates: testEncodeManyUpdates,
|
||
testDiff: testDiff,
|
||
testMetas: testMetas,
|
||
testDeleteEmptySv: testDeleteEmptySv,
|
||
testMisc: testMisc
|
||
});
|
||
|
||
if (isBrowser) {
|
||
createVConsole(document.body);
|
||
}
|
||
runTests({
|
||
leveldb
|
||
}).then(success => {
|
||
/* istanbul ignore next */
|
||
if (isNode) {
|
||
process.exit(success ? 0 : 1);
|
||
}
|
||
});
|
||
|
||
})();
|
||
//# sourceMappingURL=test.js.map
|