5495 lines
185 KiB
JavaScript
5495 lines
185 KiB
JavaScript
var __defProp = Object.defineProperty;
|
|
var __typeError = (msg) => {
|
|
throw TypeError(msg);
|
|
};
|
|
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
|
|
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
|
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
|
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), setter ? setter.call(obj, value) : member.set(obj, value), value);
|
|
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
|
var __privateWrapper = (obj, member, setter, getter) => ({
|
|
set _(value) {
|
|
__privateSet(obj, member, value, setter);
|
|
},
|
|
get _() {
|
|
return __privateGet(obj, member, getter);
|
|
}
|
|
});
|
|
var _syncInfoByDocId, _ourSubscriptions, _theirSubscriptions, _generousPeers, _subscribedDocsByPeer, _log, _RemoteHeadsSubscriptions_instances, isPeerSubscribedToDoc_fn, changedHeads_fn, _log2, _adaptersByPeer, _count, _sessionId, _ephemeralSessionCounts, _storageAdapter, _storedHeads, _chunkInfos, _compacting, _log3, _StorageSubsystem_instances, saveIncremental_fn, saveTotal_fn, shouldSave_fn, shouldCompact_fn, _log4, _saveDebounceRate, _saveFn, _handleCache, _shareConfig, _remoteHeadsSubscriptions, _remoteHeadsGossipingEnabled, _progressCache, _saveFns, _idFactory, _Repo_instances, registerHandleWithSubsystems_fn, receiveMessage_fn, _throttledSaveSyncStateHandlers, saveSyncState_fn, getHandle_fn, loadDocumentWithProgress_fn, loadDocument_fn, _broadcastChannel, _disconnected, _ready, _markReady, _readyPromise, _options, _connectedPeers, _BroadcastChannelNetworkAdapter_instances, announceConnection_fn, _ready2, _readyResolver, _readyPromise2, _WebSocketClientAdapter_instances, forceReady_fn, _retryIntervalId, _log5;
|
|
import { E as EventEmitter, d as debug, a as isRepoMessage, b as isEphemeralMessage, s as sha256Exports, v as v4, l as loadIncremental, c as init, e as stats, g as getHeads, f as saveSince, h as save, j as decodeSyncState, k as encodeSyncState, m as headsAreSame, n as encodeHeads, C as CollectionSynchronizer, t as throttle, D as DocHandle, o as from, p as emptyChange, q as parseAutomergeUrl, r as generateAutomergeUrl, u as binaryToDocumentId, w as clone, i as isValidAutomergeUrl, x as interpretAsDocumentId, U as UNAVAILABLE, y as DELETED, R as READY, z as abortable, A as AbortError, B as load, F as UNLOADED, G as getAugmentedNamespace, H as getDefaultExportFromCjs, I as encode, J as decode } from "./fullfat_node-75TjwUrn.js";
|
|
import require$$0$3 from "events";
|
|
import require$$1$1 from "https";
|
|
import require$$2$1 from "http";
|
|
import require$$3 from "net";
|
|
import require$$4 from "tls";
|
|
import crypto from "crypto";
|
|
import require$$0$2 from "stream";
|
|
import require$$7 from "url";
|
|
import require$$0 from "zlib";
|
|
import require$$0$1 from "buffer";
|
|
import "tty";
|
|
import "util";
|
|
import "os";
|
|
import "node:crypto";
|
|
import "module";
|
|
import "fs";
|
|
class RemoteHeadsSubscriptions extends EventEmitter {
|
|
constructor() {
|
|
super(...arguments);
|
|
__privateAdd(this, _RemoteHeadsSubscriptions_instances);
|
|
// Last known heads and timestamp for each storageId that we know about
|
|
__privateAdd(this, _syncInfoByDocId, /* @__PURE__ */ new Map());
|
|
// Storage IDs we have subscribed to via Repo.subscribeToRemoteHeads
|
|
__privateAdd(this, _ourSubscriptions, /* @__PURE__ */ new Set());
|
|
// Storage IDs other peers have subscribed to by sending us a control message
|
|
__privateAdd(this, _theirSubscriptions, /* @__PURE__ */ new Map());
|
|
// Peers we will always share remote heads with even if they are not subscribed
|
|
__privateAdd(this, _generousPeers, /* @__PURE__ */ new Set());
|
|
// Documents each peer has open, we need this information so we only send remote heads of documents that the peer knows
|
|
__privateAdd(this, _subscribedDocsByPeer, /* @__PURE__ */ new Map());
|
|
__privateAdd(this, _log, debug("automerge-repo:remote-heads-subscriptions"));
|
|
}
|
|
subscribeToRemotes(remotes) {
|
|
__privateGet(this, _log).call(this, "subscribeToRemotes", remotes);
|
|
const remotesToAdd = [];
|
|
for (const remote of remotes) {
|
|
if (!__privateGet(this, _ourSubscriptions).has(remote)) {
|
|
__privateGet(this, _ourSubscriptions).add(remote);
|
|
remotesToAdd.push(remote);
|
|
}
|
|
}
|
|
if (remotesToAdd.length > 0) {
|
|
this.emit("change-remote-subs", {
|
|
add: remotesToAdd,
|
|
peers: Array.from(__privateGet(this, _generousPeers))
|
|
});
|
|
}
|
|
}
|
|
unsubscribeFromRemotes(remotes) {
|
|
__privateGet(this, _log).call(this, "subscribeToRemotes", remotes);
|
|
const remotesToRemove = [];
|
|
for (const remote of remotes) {
|
|
if (__privateGet(this, _ourSubscriptions).has(remote)) {
|
|
__privateGet(this, _ourSubscriptions).delete(remote);
|
|
if (!__privateGet(this, _theirSubscriptions).has(remote)) {
|
|
remotesToRemove.push(remote);
|
|
}
|
|
}
|
|
}
|
|
if (remotesToRemove.length > 0) {
|
|
this.emit("change-remote-subs", {
|
|
remove: remotesToRemove,
|
|
peers: Array.from(__privateGet(this, _generousPeers))
|
|
});
|
|
}
|
|
}
|
|
handleControlMessage(control) {
|
|
const remotesToAdd = [];
|
|
const remotesToRemove = [];
|
|
const addedRemotesWeKnow = [];
|
|
__privateGet(this, _log).call(this, "handleControlMessage", control);
|
|
if (control.add) {
|
|
for (const remote of control.add) {
|
|
let theirSubs = __privateGet(this, _theirSubscriptions).get(remote);
|
|
if (__privateGet(this, _ourSubscriptions).has(remote) || theirSubs) {
|
|
addedRemotesWeKnow.push(remote);
|
|
}
|
|
if (!theirSubs) {
|
|
theirSubs = /* @__PURE__ */ new Set();
|
|
__privateGet(this, _theirSubscriptions).set(remote, theirSubs);
|
|
if (!__privateGet(this, _ourSubscriptions).has(remote)) {
|
|
remotesToAdd.push(remote);
|
|
}
|
|
}
|
|
theirSubs.add(control.senderId);
|
|
}
|
|
}
|
|
if (control.remove) {
|
|
for (const remote of control.remove) {
|
|
const theirSubs = __privateGet(this, _theirSubscriptions).get(remote);
|
|
if (theirSubs) {
|
|
theirSubs.delete(control.senderId);
|
|
if (theirSubs.size == 0 && !__privateGet(this, _ourSubscriptions).has(remote)) {
|
|
remotesToRemove.push(remote);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
if (remotesToAdd.length > 0 || remotesToRemove.length > 0) {
|
|
this.emit("change-remote-subs", {
|
|
peers: Array.from(__privateGet(this, _generousPeers)),
|
|
add: remotesToAdd,
|
|
remove: remotesToRemove
|
|
});
|
|
}
|
|
for (const remote of addedRemotesWeKnow) {
|
|
const subscribedDocs = __privateGet(this, _subscribedDocsByPeer).get(control.senderId);
|
|
if (subscribedDocs) {
|
|
for (const documentId of subscribedDocs) {
|
|
const syncInfo = __privateGet(this, _syncInfoByDocId).get(documentId);
|
|
if (!syncInfo) {
|
|
continue;
|
|
}
|
|
const syncInfoForRemote = syncInfo.get(remote);
|
|
if (syncInfoForRemote) {
|
|
this.emit("notify-remote-heads", {
|
|
targetId: control.senderId,
|
|
documentId,
|
|
heads: syncInfoForRemote.lastHeads,
|
|
timestamp: syncInfoForRemote.lastSyncTimestamp,
|
|
storageId: remote
|
|
});
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
/** A peer we are not directly connected to has changed their heads */
|
|
handleRemoteHeads(msg) {
|
|
__privateGet(this, _log).call(this, "handleRemoteHeads", msg);
|
|
const changedHeads = __privateMethod(this, _RemoteHeadsSubscriptions_instances, changedHeads_fn).call(this, msg);
|
|
for (const event of changedHeads) {
|
|
if (__privateGet(this, _ourSubscriptions).has(event.storageId)) {
|
|
this.emit("remote-heads-changed", event);
|
|
}
|
|
}
|
|
for (const event of changedHeads) {
|
|
for (const peer of __privateGet(this, _generousPeers)) {
|
|
if (peer === msg.senderId) {
|
|
continue;
|
|
}
|
|
this.emit("notify-remote-heads", {
|
|
targetId: peer,
|
|
documentId: event.documentId,
|
|
heads: event.remoteHeads,
|
|
timestamp: event.timestamp,
|
|
storageId: event.storageId
|
|
});
|
|
}
|
|
}
|
|
for (const event of changedHeads) {
|
|
const theirSubs = __privateGet(this, _theirSubscriptions).get(event.storageId);
|
|
if (theirSubs) {
|
|
for (const peerId of theirSubs) {
|
|
if (__privateMethod(this, _RemoteHeadsSubscriptions_instances, isPeerSubscribedToDoc_fn).call(this, peerId, event.documentId)) {
|
|
this.emit("notify-remote-heads", {
|
|
targetId: peerId,
|
|
documentId: event.documentId,
|
|
heads: event.remoteHeads,
|
|
timestamp: event.timestamp,
|
|
storageId: event.storageId
|
|
});
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
/** A peer we are directly connected to has updated their heads */
|
|
handleImmediateRemoteHeadsChanged(documentId, storageId, heads) {
|
|
__privateGet(this, _log).call(this, "handleLocalHeadsChanged", documentId, storageId, heads);
|
|
const remote = __privateGet(this, _syncInfoByDocId).get(documentId);
|
|
const timestamp = Date.now();
|
|
if (!remote) {
|
|
__privateGet(this, _syncInfoByDocId).set(documentId, /* @__PURE__ */ new Map([
|
|
[storageId, { lastSyncTimestamp: timestamp, lastHeads: heads }]
|
|
]));
|
|
} else {
|
|
const docRemote = remote.get(storageId);
|
|
if (!docRemote || docRemote.lastSyncTimestamp < Date.now()) {
|
|
remote.set(storageId, {
|
|
lastSyncTimestamp: Date.now(),
|
|
lastHeads: heads
|
|
});
|
|
}
|
|
}
|
|
const theirSubs = __privateGet(this, _theirSubscriptions).get(storageId);
|
|
if (theirSubs) {
|
|
for (const peerId of theirSubs) {
|
|
if (__privateMethod(this, _RemoteHeadsSubscriptions_instances, isPeerSubscribedToDoc_fn).call(this, peerId, documentId)) {
|
|
this.emit("notify-remote-heads", {
|
|
targetId: peerId,
|
|
documentId,
|
|
heads,
|
|
timestamp,
|
|
storageId
|
|
});
|
|
}
|
|
}
|
|
}
|
|
}
|
|
addGenerousPeer(peerId) {
|
|
__privateGet(this, _log).call(this, "addGenerousPeer", peerId);
|
|
__privateGet(this, _generousPeers).add(peerId);
|
|
if (__privateGet(this, _ourSubscriptions).size > 0) {
|
|
this.emit("change-remote-subs", {
|
|
add: Array.from(__privateGet(this, _ourSubscriptions)),
|
|
peers: [peerId]
|
|
});
|
|
}
|
|
for (const [documentId, remote] of __privateGet(this, _syncInfoByDocId)) {
|
|
for (const [storageId, { lastHeads, lastSyncTimestamp }] of remote) {
|
|
this.emit("notify-remote-heads", {
|
|
targetId: peerId,
|
|
documentId,
|
|
heads: lastHeads,
|
|
timestamp: lastSyncTimestamp,
|
|
storageId
|
|
});
|
|
}
|
|
}
|
|
}
|
|
removePeer(peerId) {
|
|
__privateGet(this, _log).call(this, "removePeer", peerId);
|
|
const remotesToRemove = [];
|
|
__privateGet(this, _generousPeers).delete(peerId);
|
|
__privateGet(this, _subscribedDocsByPeer).delete(peerId);
|
|
for (const [storageId, peerIds] of __privateGet(this, _theirSubscriptions)) {
|
|
if (peerIds.has(peerId)) {
|
|
peerIds.delete(peerId);
|
|
if (peerIds.size == 0) {
|
|
remotesToRemove.push(storageId);
|
|
__privateGet(this, _theirSubscriptions).delete(storageId);
|
|
}
|
|
}
|
|
}
|
|
if (remotesToRemove.length > 0) {
|
|
this.emit("change-remote-subs", {
|
|
remove: remotesToRemove,
|
|
peers: Array.from(__privateGet(this, _generousPeers))
|
|
});
|
|
}
|
|
}
|
|
subscribePeerToDoc(peerId, documentId) {
|
|
let subscribedDocs = __privateGet(this, _subscribedDocsByPeer).get(peerId);
|
|
if (!subscribedDocs) {
|
|
subscribedDocs = /* @__PURE__ */ new Set();
|
|
__privateGet(this, _subscribedDocsByPeer).set(peerId, subscribedDocs);
|
|
}
|
|
subscribedDocs.add(documentId);
|
|
const remoteHeads = __privateGet(this, _syncInfoByDocId).get(documentId);
|
|
if (remoteHeads) {
|
|
for (const [storageId, lastHeads] of remoteHeads) {
|
|
const subscribedPeers = __privateGet(this, _theirSubscriptions).get(storageId);
|
|
if (subscribedPeers && subscribedPeers.has(peerId)) {
|
|
this.emit("notify-remote-heads", {
|
|
targetId: peerId,
|
|
documentId,
|
|
heads: lastHeads.lastHeads,
|
|
timestamp: lastHeads.lastSyncTimestamp,
|
|
storageId
|
|
});
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
_syncInfoByDocId = new WeakMap();
|
|
_ourSubscriptions = new WeakMap();
|
|
_theirSubscriptions = new WeakMap();
|
|
_generousPeers = new WeakMap();
|
|
_subscribedDocsByPeer = new WeakMap();
|
|
_log = new WeakMap();
|
|
_RemoteHeadsSubscriptions_instances = new WeakSet();
|
|
isPeerSubscribedToDoc_fn = function(peerId, documentId) {
|
|
const subscribedDocs = __privateGet(this, _subscribedDocsByPeer).get(peerId);
|
|
return subscribedDocs && subscribedDocs.has(documentId);
|
|
};
|
|
/** Returns the (document, storageId) pairs which have changed after processing msg */
|
|
changedHeads_fn = function(msg) {
|
|
const changedHeads = [];
|
|
const { documentId, newHeads } = msg;
|
|
for (const [storageId, { heads, timestamp }] of Object.entries(newHeads)) {
|
|
if (!__privateGet(this, _ourSubscriptions).has(storageId) && !__privateGet(this, _theirSubscriptions).has(storageId)) {
|
|
continue;
|
|
}
|
|
let remote = __privateGet(this, _syncInfoByDocId).get(documentId);
|
|
if (!remote) {
|
|
remote = /* @__PURE__ */ new Map();
|
|
__privateGet(this, _syncInfoByDocId).set(documentId, remote);
|
|
}
|
|
const docRemote = remote.get(storageId);
|
|
if (docRemote && docRemote.lastSyncTimestamp >= timestamp) {
|
|
continue;
|
|
} else {
|
|
remote.set(storageId, {
|
|
lastSyncTimestamp: timestamp,
|
|
lastHeads: heads
|
|
});
|
|
changedHeads.push({
|
|
documentId,
|
|
storageId,
|
|
remoteHeads: heads,
|
|
timestamp
|
|
});
|
|
}
|
|
}
|
|
return changedHeads;
|
|
};
|
|
const getEphemeralMessageSource = (message) => `${message.senderId}:${message.sessionId}`;
|
|
class NetworkSubsystem extends EventEmitter {
|
|
constructor(adapters, peerId, peerMetadata) {
|
|
super();
|
|
__publicField(this, "peerId");
|
|
__publicField(this, "peerMetadata");
|
|
__privateAdd(this, _log2);
|
|
__privateAdd(this, _adaptersByPeer, {});
|
|
__privateAdd(this, _count, 0);
|
|
__privateAdd(this, _sessionId, Math.random().toString(36).slice(2));
|
|
__privateAdd(this, _ephemeralSessionCounts, {});
|
|
__publicField(this, "adapters", []);
|
|
__publicField(this, "isReady", () => {
|
|
return this.adapters.every((a) => a.isReady());
|
|
});
|
|
__publicField(this, "whenReady", async () => {
|
|
return Promise.all(this.adapters.map((a) => a.whenReady()));
|
|
});
|
|
this.peerId = peerId;
|
|
this.peerMetadata = peerMetadata;
|
|
__privateSet(this, _log2, debug(`automerge-repo:network:${this.peerId}`));
|
|
adapters.forEach((a) => this.addNetworkAdapter(a));
|
|
}
|
|
disconnect() {
|
|
this.adapters.forEach((a) => a.disconnect());
|
|
}
|
|
reconnect() {
|
|
this.adapters.forEach((a) => a.connect(this.peerId));
|
|
}
|
|
addNetworkAdapter(networkAdapter) {
|
|
this.adapters.push(networkAdapter);
|
|
networkAdapter.on("peer-candidate", ({ peerId, peerMetadata }) => {
|
|
__privateGet(this, _log2).call(this, `peer candidate: ${peerId} `);
|
|
if (!__privateGet(this, _adaptersByPeer)[peerId]) {
|
|
__privateGet(this, _adaptersByPeer)[peerId] = networkAdapter;
|
|
}
|
|
this.emit("peer", { peerId, peerMetadata });
|
|
});
|
|
networkAdapter.on("peer-disconnected", ({ peerId }) => {
|
|
__privateGet(this, _log2).call(this, `peer disconnected: ${peerId} `);
|
|
delete __privateGet(this, _adaptersByPeer)[peerId];
|
|
this.emit("peer-disconnected", { peerId });
|
|
});
|
|
networkAdapter.on("message", (msg) => {
|
|
if (!isRepoMessage(msg)) {
|
|
__privateGet(this, _log2).call(this, `invalid message: ${JSON.stringify(msg)}`);
|
|
return;
|
|
}
|
|
__privateGet(this, _log2).call(this, `message from ${msg.senderId}`);
|
|
if (isEphemeralMessage(msg)) {
|
|
const source = getEphemeralMessageSource(msg);
|
|
if (__privateGet(this, _ephemeralSessionCounts)[source] === void 0 || msg.count > __privateGet(this, _ephemeralSessionCounts)[source]) {
|
|
__privateGet(this, _ephemeralSessionCounts)[source] = msg.count;
|
|
this.emit("message", msg);
|
|
}
|
|
return;
|
|
}
|
|
this.emit("message", msg);
|
|
});
|
|
networkAdapter.on("close", () => {
|
|
__privateGet(this, _log2).call(this, "adapter closed");
|
|
Object.entries(__privateGet(this, _adaptersByPeer)).forEach(([peerId, other]) => {
|
|
if (other === networkAdapter) {
|
|
delete __privateGet(this, _adaptersByPeer)[peerId];
|
|
}
|
|
});
|
|
this.adapters = this.adapters.filter((a) => a !== networkAdapter);
|
|
});
|
|
this.peerMetadata.then((peerMetadata) => {
|
|
networkAdapter.connect(this.peerId, peerMetadata);
|
|
}).catch((err) => {
|
|
__privateGet(this, _log2).call(this, "error connecting to network", err);
|
|
});
|
|
}
|
|
// TODO: this probably introduces a race condition for the ready event
|
|
// but I plan to refactor that as part of this branch in another patch
|
|
removeNetworkAdapter(networkAdapter) {
|
|
this.adapters = this.adapters.filter((a) => a !== networkAdapter);
|
|
networkAdapter.disconnect();
|
|
}
|
|
send(message) {
|
|
const peer = __privateGet(this, _adaptersByPeer)[message.targetId];
|
|
if (!peer) {
|
|
__privateGet(this, _log2).call(this, `Tried to send message but peer not found: ${message.targetId}`);
|
|
return;
|
|
}
|
|
const prepareMessage = (message2) => {
|
|
if (message2.type === "ephemeral") {
|
|
if ("count" in message2) {
|
|
return message2;
|
|
} else {
|
|
return {
|
|
...message2,
|
|
count: ++__privateWrapper(this, _count)._,
|
|
sessionId: __privateGet(this, _sessionId),
|
|
senderId: this.peerId
|
|
};
|
|
}
|
|
} else {
|
|
return {
|
|
...message2,
|
|
senderId: this.peerId
|
|
};
|
|
}
|
|
};
|
|
const outbound = prepareMessage(message);
|
|
__privateGet(this, _log2).call(this, "sending message %o", outbound);
|
|
peer.send(outbound);
|
|
}
|
|
}
|
|
_log2 = new WeakMap();
|
|
_adaptersByPeer = new WeakMap();
|
|
_count = new WeakMap();
|
|
_sessionId = new WeakMap();
|
|
_ephemeralSessionCounts = new WeakMap();
|
|
function mergeArrays(myArrays) {
|
|
let length = 0;
|
|
myArrays.forEach((item) => {
|
|
length += item.length;
|
|
});
|
|
const mergedArray = new Uint8Array(length);
|
|
let offset = 0;
|
|
myArrays.forEach((item) => {
|
|
mergedArray.set(item, offset);
|
|
offset += item.length;
|
|
});
|
|
return mergedArray;
|
|
}
|
|
function keyHash(binary) {
|
|
const hash = sha256Exports.hash(binary);
|
|
return bufferToHexString(hash);
|
|
}
|
|
function headsHash(heads) {
|
|
const encoder = new TextEncoder();
|
|
const headsbinary = mergeArrays(heads.map((h) => encoder.encode(h)));
|
|
return keyHash(headsbinary);
|
|
}
|
|
function bufferToHexString(data) {
|
|
return Array.from(data, (byte) => byte.toString(16).padStart(2, "0")).join("");
|
|
}
|
|
class StorageSubsystem extends EventEmitter {
|
|
constructor(storageAdapter) {
|
|
super();
|
|
__privateAdd(this, _StorageSubsystem_instances);
|
|
/** The storage adapter to use for saving and loading documents */
|
|
__privateAdd(this, _storageAdapter);
|
|
/** Record of the latest heads we've loaded or saved for each document */
|
|
__privateAdd(this, _storedHeads, /* @__PURE__ */ new Map());
|
|
/** Metadata on the chunks we've already loaded for each document */
|
|
__privateAdd(this, _chunkInfos, /* @__PURE__ */ new Map());
|
|
/** Flag to avoid compacting when a compaction is already underway */
|
|
__privateAdd(this, _compacting, false);
|
|
__privateAdd(this, _log3, debug(`automerge-repo:storage-subsystem`));
|
|
__privateSet(this, _storageAdapter, storageAdapter);
|
|
}
|
|
async id() {
|
|
const storedId = await __privateGet(this, _storageAdapter).load(["storage-adapter-id"]);
|
|
let id;
|
|
if (storedId) {
|
|
id = new TextDecoder().decode(storedId);
|
|
} else {
|
|
id = v4();
|
|
await __privateGet(this, _storageAdapter).save(["storage-adapter-id"], new TextEncoder().encode(id));
|
|
}
|
|
return id;
|
|
}
|
|
// ARBITRARY KEY/VALUE STORAGE
|
|
// The `load`, `save`, and `remove` methods are for generic key/value storage, as opposed to
|
|
// Automerge documents. For example, they're used by the LocalFirstAuthProvider to persist the
|
|
// encrypted team graph that encodes group membership and permissions.
|
|
//
|
|
// The namespace parameter is to prevent collisions with other users of the storage subsystem.
|
|
// Typically this will be the name of the plug-in, adapter, or other system that is using it. For
|
|
// example, the LocalFirstAuthProvider uses the namespace `LocalFirstAuthProvider`.
|
|
/** Loads a value from storage. */
|
|
async load(namespace, key) {
|
|
const storageKey = [namespace, key];
|
|
return await __privateGet(this, _storageAdapter).load(storageKey);
|
|
}
|
|
/** Saves a value in storage. */
|
|
async save(namespace, key, data) {
|
|
const storageKey = [namespace, key];
|
|
await __privateGet(this, _storageAdapter).save(storageKey, data);
|
|
}
|
|
/** Removes a value from storage. */
|
|
async remove(namespace, key) {
|
|
const storageKey = [namespace, key];
|
|
await __privateGet(this, _storageAdapter).remove(storageKey);
|
|
}
|
|
// AUTOMERGE DOCUMENT STORAGE
|
|
/**
|
|
* Loads and combines document chunks from storage, with snapshots first.
|
|
*/
|
|
async loadDocData(documentId) {
|
|
const snapshotChunks = await __privateGet(this, _storageAdapter).loadRange([
|
|
documentId,
|
|
"snapshot"
|
|
]);
|
|
const incrementalChunks = await __privateGet(this, _storageAdapter).loadRange([
|
|
documentId,
|
|
"incremental"
|
|
]);
|
|
const binaries = [];
|
|
const chunkInfos = [];
|
|
for (const chunk of snapshotChunks) {
|
|
if (chunk.data === void 0)
|
|
continue;
|
|
chunkInfos.push({
|
|
key: chunk.key,
|
|
type: "snapshot",
|
|
size: chunk.data.length
|
|
});
|
|
binaries.push(chunk.data);
|
|
}
|
|
for (const chunk of incrementalChunks) {
|
|
if (chunk.data === void 0)
|
|
continue;
|
|
chunkInfos.push({
|
|
key: chunk.key,
|
|
type: "incremental",
|
|
size: chunk.data.length
|
|
});
|
|
binaries.push(chunk.data);
|
|
}
|
|
__privateGet(this, _chunkInfos).set(documentId, chunkInfos);
|
|
if (binaries.length === 0) {
|
|
return null;
|
|
}
|
|
return mergeArrays(binaries);
|
|
}
|
|
/**
|
|
* Loads the Automerge document with the given ID from storage.
|
|
*/
|
|
async loadDoc(documentId) {
|
|
const binary = await this.loadDocData(documentId);
|
|
if (!binary)
|
|
return null;
|
|
const start = performance.now();
|
|
const newDoc = loadIncremental(init(), binary);
|
|
const end = performance.now();
|
|
this.emit("document-loaded", {
|
|
documentId,
|
|
durationMillis: end - start,
|
|
...stats(newDoc)
|
|
});
|
|
__privateGet(this, _storedHeads).set(documentId, getHeads(newDoc));
|
|
return newDoc;
|
|
}
|
|
/**
|
|
* Saves the provided Automerge document to storage.
|
|
*
|
|
* @remarks
|
|
* Under the hood this makes incremental saves until the incremental size is greater than the
|
|
* snapshot size, at which point the document is compacted into a single snapshot.
|
|
*/
|
|
async saveDoc(documentId, doc) {
|
|
if (!__privateMethod(this, _StorageSubsystem_instances, shouldSave_fn).call(this, documentId, doc))
|
|
return;
|
|
const sourceChunks = __privateGet(this, _chunkInfos).get(documentId) ?? [];
|
|
if (__privateMethod(this, _StorageSubsystem_instances, shouldCompact_fn).call(this, sourceChunks)) {
|
|
await __privateMethod(this, _StorageSubsystem_instances, saveTotal_fn).call(this, documentId, doc, sourceChunks);
|
|
} else {
|
|
await __privateMethod(this, _StorageSubsystem_instances, saveIncremental_fn).call(this, documentId, doc);
|
|
}
|
|
__privateGet(this, _storedHeads).set(documentId, getHeads(doc));
|
|
}
|
|
/**
|
|
* Removes the Automerge document with the given ID from storage
|
|
*/
|
|
async removeDoc(documentId) {
|
|
await __privateGet(this, _storageAdapter).removeRange([documentId, "snapshot"]);
|
|
await __privateGet(this, _storageAdapter).removeRange([documentId, "incremental"]);
|
|
await __privateGet(this, _storageAdapter).removeRange([documentId, "sync-state"]);
|
|
}
|
|
async loadSyncState(documentId, storageId) {
|
|
const key = [documentId, "sync-state", storageId];
|
|
try {
|
|
const loaded = await __privateGet(this, _storageAdapter).load(key);
|
|
return loaded ? decodeSyncState(loaded) : void 0;
|
|
} catch (e) {
|
|
__privateGet(this, _log3).call(this, `Error loading sync state for ${documentId} from ${storageId}`);
|
|
return void 0;
|
|
}
|
|
}
|
|
async saveSyncState(documentId, storageId, syncState) {
|
|
const key = [documentId, "sync-state", storageId];
|
|
await __privateGet(this, _storageAdapter).save(key, encodeSyncState(syncState));
|
|
}
|
|
}
|
|
_storageAdapter = new WeakMap();
|
|
_storedHeads = new WeakMap();
|
|
_chunkInfos = new WeakMap();
|
|
_compacting = new WeakMap();
|
|
_log3 = new WeakMap();
|
|
_StorageSubsystem_instances = new WeakSet();
|
|
saveIncremental_fn = async function(documentId, doc) {
|
|
const sinceHeads = __privateGet(this, _storedHeads).get(documentId) ?? [];
|
|
const start = performance.now();
|
|
const binary = saveSince(doc, sinceHeads);
|
|
const end = performance.now();
|
|
this.emit("doc-saved", {
|
|
documentId,
|
|
durationMillis: end - start,
|
|
sinceHeads
|
|
});
|
|
if (binary && binary.length > 0) {
|
|
const key = [documentId, "incremental", keyHash(binary)];
|
|
__privateGet(this, _log3).call(this, `Saving incremental ${key} for document ${documentId}`);
|
|
await __privateGet(this, _storageAdapter).save(key, binary);
|
|
if (!__privateGet(this, _chunkInfos).has(documentId)) {
|
|
__privateGet(this, _chunkInfos).set(documentId, []);
|
|
}
|
|
__privateGet(this, _chunkInfos).get(documentId).push({
|
|
key,
|
|
type: "incremental",
|
|
size: binary.length
|
|
});
|
|
__privateGet(this, _storedHeads).set(documentId, getHeads(doc));
|
|
} else {
|
|
return Promise.resolve();
|
|
}
|
|
};
|
|
saveTotal_fn = async function(documentId, doc, sourceChunks) {
|
|
var _a;
|
|
__privateSet(this, _compacting, true);
|
|
const start = performance.now();
|
|
const binary = save(doc);
|
|
const end = performance.now();
|
|
this.emit("doc-compacted", { documentId, durationMillis: end - start });
|
|
const snapshotHash = headsHash(getHeads(doc));
|
|
const key = [documentId, "snapshot", snapshotHash];
|
|
const oldKeys = new Set(sourceChunks.map((c) => c.key).filter((k) => k[2] !== snapshotHash));
|
|
__privateGet(this, _log3).call(this, `Saving snapshot ${key} for document ${documentId}`);
|
|
__privateGet(this, _log3).call(this, `deleting old chunks ${Array.from(oldKeys)}`);
|
|
await __privateGet(this, _storageAdapter).save(key, binary);
|
|
for (const key2 of oldKeys) {
|
|
await __privateGet(this, _storageAdapter).remove(key2);
|
|
}
|
|
const newChunkInfos = ((_a = __privateGet(this, _chunkInfos).get(documentId)) == null ? void 0 : _a.filter((c) => !oldKeys.has(c.key))) ?? [];
|
|
newChunkInfos.push({ key, type: "snapshot", size: binary.length });
|
|
__privateGet(this, _chunkInfos).set(documentId, newChunkInfos);
|
|
__privateSet(this, _compacting, false);
|
|
};
|
|
/**
|
|
* Returns true if the document has changed since the last time it was saved.
|
|
*/
|
|
shouldSave_fn = function(documentId, doc) {
|
|
const oldHeads = __privateGet(this, _storedHeads).get(documentId);
|
|
if (!oldHeads) {
|
|
return true;
|
|
}
|
|
const newHeads = getHeads(doc);
|
|
if (headsAreSame(encodeHeads(newHeads), encodeHeads(oldHeads))) {
|
|
return false;
|
|
}
|
|
return true;
|
|
};
|
|
/**
|
|
* We only compact if the incremental size is greater than the snapshot size.
|
|
*/
|
|
shouldCompact_fn = function(sourceChunks) {
|
|
if (__privateGet(this, _compacting))
|
|
return false;
|
|
let snapshotSize = 0;
|
|
let incrementalSize = 0;
|
|
for (const chunk of sourceChunks) {
|
|
if (chunk.type === "snapshot") {
|
|
snapshotSize += chunk.size;
|
|
} else {
|
|
incrementalSize += chunk.size;
|
|
}
|
|
}
|
|
return snapshotSize < 1024 || incrementalSize >= snapshotSize;
|
|
};
|
|
function randomPeerId() {
|
|
return "peer-" + Math.random().toString(36).slice(4);
|
|
}
|
|
class Repo extends EventEmitter {
|
|
constructor({ storage, network = [], peerId = randomPeerId(), sharePolicy, shareConfig, isEphemeral = storage === void 0, enableRemoteHeadsGossiping = false, denylist = [], saveDebounceRate = 100, idFactory } = {}) {
|
|
super();
|
|
__privateAdd(this, _Repo_instances);
|
|
__privateAdd(this, _log4);
|
|
/** @hidden */
|
|
__publicField(this, "networkSubsystem");
|
|
/** @hidden */
|
|
__publicField(this, "storageSubsystem");
|
|
/** @hidden */
|
|
__privateAdd(this, _saveDebounceRate);
|
|
/** @hidden */
|
|
__privateAdd(this, _saveFn);
|
|
__privateAdd(this, _handleCache, {});
|
|
/** @hidden */
|
|
__publicField(this, "synchronizer");
|
|
__privateAdd(this, _shareConfig, {
|
|
announce: async () => true,
|
|
access: async () => true
|
|
});
|
|
/** maps peer id to to persistence information (storageId, isEphemeral), access by collection synchronizer */
|
|
/** @hidden */
|
|
__publicField(this, "peerMetadataByPeerId", {});
|
|
__privateAdd(this, _remoteHeadsSubscriptions, new RemoteHeadsSubscriptions());
|
|
__privateAdd(this, _remoteHeadsGossipingEnabled, false);
|
|
__privateAdd(this, _progressCache, {});
|
|
__privateAdd(this, _saveFns, {});
|
|
__privateAdd(this, _idFactory);
|
|
__privateAdd(this, _throttledSaveSyncStateHandlers, {});
|
|
__publicField(this, "subscribeToRemotes", (remotes) => {
|
|
if (__privateGet(this, _remoteHeadsGossipingEnabled)) {
|
|
__privateGet(this, _log4).call(this, "subscribeToRemotes", { remotes });
|
|
__privateGet(this, _remoteHeadsSubscriptions).subscribeToRemotes(remotes);
|
|
} else {
|
|
__privateGet(this, _log4).call(this, "WARN: subscribeToRemotes called but remote heads gossiping is not enabled");
|
|
}
|
|
});
|
|
__publicField(this, "storageId", async () => {
|
|
if (!this.storageSubsystem) {
|
|
return void 0;
|
|
} else {
|
|
return this.storageSubsystem.id();
|
|
}
|
|
});
|
|
__privateSet(this, _remoteHeadsGossipingEnabled, enableRemoteHeadsGossiping);
|
|
__privateSet(this, _log4, debug(`automerge-repo:repo`));
|
|
__privateSet(this, _idFactory, idFactory || null);
|
|
if (sharePolicy != null && shareConfig != null) {
|
|
throw new Error("cannot provide both sharePolicy and shareConfig at once");
|
|
}
|
|
if (sharePolicy) {
|
|
__privateSet(this, _shareConfig, {
|
|
announce: sharePolicy,
|
|
access: async () => true
|
|
});
|
|
}
|
|
if (shareConfig) {
|
|
__privateSet(this, _shareConfig, shareConfig);
|
|
}
|
|
this.on("delete-document", ({ documentId }) => {
|
|
this.synchronizer.removeDocument(documentId);
|
|
if (storageSubsystem) {
|
|
storageSubsystem.removeDoc(documentId).catch((err) => {
|
|
__privateGet(this, _log4).call(this, "error deleting document", { documentId, err });
|
|
});
|
|
}
|
|
});
|
|
this.synchronizer = new CollectionSynchronizer(this, denylist);
|
|
this.synchronizer.on("message", (message) => {
|
|
__privateGet(this, _log4).call(this, `sending ${message.type} message to ${message.targetId}`);
|
|
networkSubsystem.send(message);
|
|
});
|
|
this.synchronizer.on("metrics", (event) => this.emit("doc-metrics", event));
|
|
if (__privateGet(this, _remoteHeadsGossipingEnabled)) {
|
|
this.synchronizer.on("open-doc", ({ peerId: peerId2, documentId }) => {
|
|
__privateGet(this, _remoteHeadsSubscriptions).subscribePeerToDoc(peerId2, documentId);
|
|
});
|
|
}
|
|
const storageSubsystem = storage ? new StorageSubsystem(storage) : void 0;
|
|
if (storageSubsystem) {
|
|
storageSubsystem.on("document-loaded", (event) => this.emit("doc-metrics", { type: "doc-loaded", ...event }));
|
|
storageSubsystem.on("doc-compacted", (event) => this.emit("doc-metrics", { type: "doc-compacted", ...event }));
|
|
storageSubsystem.on("doc-saved", (event) => this.emit("doc-metrics", { type: "doc-saved", ...event }));
|
|
}
|
|
this.storageSubsystem = storageSubsystem;
|
|
__privateSet(this, _saveDebounceRate, saveDebounceRate);
|
|
if (this.storageSubsystem) {
|
|
__privateSet(this, _saveFn, ({ handle, doc }) => {
|
|
let fn = __privateGet(this, _saveFns)[handle.documentId];
|
|
if (!fn) {
|
|
fn = throttle(({ doc: doc2, handle: handle2 }) => {
|
|
void this.storageSubsystem.saveDoc(handle2.documentId, doc2);
|
|
}, __privateGet(this, _saveDebounceRate));
|
|
__privateGet(this, _saveFns)[handle.documentId] = fn;
|
|
}
|
|
fn({ handle, doc });
|
|
});
|
|
} else {
|
|
__privateSet(this, _saveFn, () => {
|
|
});
|
|
}
|
|
const myPeerMetadata = (async () => ({
|
|
storageId: await (storageSubsystem == null ? void 0 : storageSubsystem.id()),
|
|
isEphemeral
|
|
}))();
|
|
const networkSubsystem = new NetworkSubsystem(network, peerId, myPeerMetadata);
|
|
this.networkSubsystem = networkSubsystem;
|
|
networkSubsystem.on("peer", async ({ peerId: peerId2, peerMetadata }) => {
|
|
__privateGet(this, _log4).call(this, "peer connected", { peerId: peerId2 });
|
|
if (peerMetadata) {
|
|
this.peerMetadataByPeerId[peerId2] = { ...peerMetadata };
|
|
}
|
|
__privateGet(this, _shareConfig).announce(peerId2).then((shouldShare) => {
|
|
if (shouldShare && __privateGet(this, _remoteHeadsGossipingEnabled)) {
|
|
__privateGet(this, _remoteHeadsSubscriptions).addGenerousPeer(peerId2);
|
|
}
|
|
}).catch((err) => {
|
|
console.log("error in share policy", { err });
|
|
});
|
|
this.synchronizer.addPeer(peerId2);
|
|
});
|
|
networkSubsystem.on("peer-disconnected", ({ peerId: peerId2 }) => {
|
|
this.synchronizer.removePeer(peerId2);
|
|
__privateGet(this, _remoteHeadsSubscriptions).removePeer(peerId2);
|
|
});
|
|
networkSubsystem.on("message", async (msg) => {
|
|
__privateMethod(this, _Repo_instances, receiveMessage_fn).call(this, msg);
|
|
});
|
|
this.synchronizer.on("sync-state", (message) => {
|
|
var _a;
|
|
__privateMethod(this, _Repo_instances, saveSyncState_fn).call(this, message);
|
|
const handle = __privateGet(this, _handleCache)[message.documentId];
|
|
const { storageId } = this.peerMetadataByPeerId[message.peerId] || {};
|
|
if (!storageId) {
|
|
return;
|
|
}
|
|
const heads = (_a = handle.getSyncInfo(storageId)) == null ? void 0 : _a.lastHeads;
|
|
const haveHeadsChanged = message.syncState.theirHeads && (!heads || !headsAreSame(heads, encodeHeads(message.syncState.theirHeads)));
|
|
if (haveHeadsChanged && message.syncState.theirHeads) {
|
|
handle.setSyncInfo(storageId, {
|
|
lastHeads: encodeHeads(message.syncState.theirHeads),
|
|
lastSyncTimestamp: Date.now()
|
|
});
|
|
if (storageId && __privateGet(this, _remoteHeadsGossipingEnabled)) {
|
|
__privateGet(this, _remoteHeadsSubscriptions).handleImmediateRemoteHeadsChanged(message.documentId, storageId, encodeHeads(message.syncState.theirHeads));
|
|
}
|
|
}
|
|
});
|
|
if (__privateGet(this, _remoteHeadsGossipingEnabled)) {
|
|
__privateGet(this, _remoteHeadsSubscriptions).on("notify-remote-heads", (message) => {
|
|
this.networkSubsystem.send({
|
|
type: "remote-heads-changed",
|
|
targetId: message.targetId,
|
|
documentId: message.documentId,
|
|
newHeads: {
|
|
[message.storageId]: {
|
|
heads: message.heads,
|
|
timestamp: message.timestamp
|
|
}
|
|
}
|
|
});
|
|
});
|
|
__privateGet(this, _remoteHeadsSubscriptions).on("change-remote-subs", (message) => {
|
|
__privateGet(this, _log4).call(this, "change-remote-subs", message);
|
|
for (const peer of message.peers) {
|
|
this.networkSubsystem.send({
|
|
type: "remote-subscription-change",
|
|
targetId: peer,
|
|
add: message.add,
|
|
remove: message.remove
|
|
});
|
|
}
|
|
});
|
|
__privateGet(this, _remoteHeadsSubscriptions).on("remote-heads-changed", ({ documentId, storageId, remoteHeads, timestamp }) => {
|
|
const handle = __privateGet(this, _handleCache)[documentId];
|
|
handle.setSyncInfo(storageId, {
|
|
lastHeads: remoteHeads,
|
|
lastSyncTimestamp: timestamp
|
|
});
|
|
});
|
|
}
|
|
}
|
|
/** Returns all the handles we have cached. */
|
|
get handles() {
|
|
return __privateGet(this, _handleCache);
|
|
}
|
|
/** Returns a list of all connected peer ids */
|
|
get peers() {
|
|
return this.synchronizer.peers;
|
|
}
|
|
get peerId() {
|
|
return this.networkSubsystem.peerId;
|
|
}
|
|
/** @hidden */
|
|
get sharePolicy() {
|
|
return __privateGet(this, _shareConfig).announce;
|
|
}
|
|
/** @hidden */
|
|
set sharePolicy(policy) {
|
|
__privateGet(this, _shareConfig).announce = policy;
|
|
}
|
|
/** @hidden */
|
|
get shareConfig() {
|
|
return __privateGet(this, _shareConfig);
|
|
}
|
|
/** @hidden */
|
|
set shareConfig(config) {
|
|
__privateSet(this, _shareConfig, config);
|
|
}
|
|
getStorageIdOfPeer(peerId) {
|
|
var _a;
|
|
return (_a = this.peerMetadataByPeerId[peerId]) == null ? void 0 : _a.storageId;
|
|
}
|
|
/**
|
|
* Creates a new document and returns a handle to it. The initial value of the document is an
|
|
* empty object `{}` unless an initial value is provided. Its documentId is generated by the
|
|
* system. we emit a `document` event to advertise interest in the document.
|
|
*/
|
|
create(initialValue) {
|
|
let initialDoc;
|
|
if (initialValue) {
|
|
initialDoc = from(initialValue);
|
|
} else {
|
|
initialDoc = emptyChange(init());
|
|
}
|
|
const { documentId } = parseAutomergeUrl(generateAutomergeUrl());
|
|
const handle = __privateMethod(this, _Repo_instances, getHandle_fn).call(this, {
|
|
documentId
|
|
});
|
|
__privateMethod(this, _Repo_instances, registerHandleWithSubsystems_fn).call(this, handle);
|
|
handle.update(() => {
|
|
return initialDoc;
|
|
});
|
|
handle.doneLoading();
|
|
return handle;
|
|
}
|
|
/**
|
|
* Creates a new document and returns a handle to it. The initial value of the
|
|
* document is an empty object `{}` unless an initial value is provided. The
|
|
* main difference between this and Repo.create is that if an `idGenerator`
|
|
* was provided at repo construction, that idGenerator will be used to
|
|
* generate the document ID of the document returned by this method.
|
|
*
|
|
* This is a hidden, experimental API which is subject to change or removal without notice.
|
|
* @hidden
|
|
* @experimental
|
|
*/
|
|
async create2(initialValue) {
|
|
let initialDoc;
|
|
if (initialValue) {
|
|
initialDoc = from(initialValue);
|
|
} else {
|
|
initialDoc = emptyChange(init());
|
|
}
|
|
let { documentId } = parseAutomergeUrl(generateAutomergeUrl());
|
|
if (__privateGet(this, _idFactory)) {
|
|
const rawDocId = await __privateGet(this, _idFactory).call(this, getHeads(initialDoc));
|
|
documentId = binaryToDocumentId(rawDocId);
|
|
}
|
|
const handle = __privateMethod(this, _Repo_instances, getHandle_fn).call(this, {
|
|
documentId
|
|
});
|
|
__privateMethod(this, _Repo_instances, registerHandleWithSubsystems_fn).call(this, handle);
|
|
handle.update(() => {
|
|
return initialDoc;
|
|
});
|
|
handle.doneLoading();
|
|
return handle;
|
|
}
|
|
/** Create a new DocHandle by cloning the history of an existing DocHandle.
|
|
*
|
|
* @param clonedHandle - The handle to clone
|
|
*
|
|
* @remarks This is a wrapper around the `clone` function in the Automerge library.
|
|
* The new `DocHandle` will have a new URL but will share history with the original,
|
|
* which means that changes made to the cloned handle can be sensibly merged back
|
|
* into the original.
|
|
*
|
|
* Any peers this `Repo` is connected to for whom `sharePolicy` returns `true` will
|
|
* be notified of the newly created DocHandle.
|
|
*
|
|
*/
|
|
clone(clonedHandle) {
|
|
if (!clonedHandle.isReady()) {
|
|
throw new Error(`Cloned handle is not yet in ready state.
|
|
(Try await handle.whenReady() first.)`);
|
|
}
|
|
const sourceDoc = clonedHandle.doc();
|
|
const handle = this.create();
|
|
handle.update(() => {
|
|
return clone(sourceDoc);
|
|
});
|
|
return handle;
|
|
}
|
|
findWithProgress(id, options = {}) {
|
|
const { signal } = options;
|
|
const { documentId, heads } = isValidAutomergeUrl(id) ? parseAutomergeUrl(id) : { documentId: interpretAsDocumentId(id), heads: void 0 };
|
|
if (__privateGet(this, _handleCache)[documentId]) {
|
|
const handle2 = __privateGet(this, _handleCache)[documentId];
|
|
if (handle2.state === UNAVAILABLE) {
|
|
const result2 = {
|
|
state: "unavailable",
|
|
error: new Error(`Document ${id} is unavailable`),
|
|
handle: handle2
|
|
};
|
|
return result2;
|
|
}
|
|
if (handle2.state === DELETED) {
|
|
const result2 = {
|
|
state: "failed",
|
|
error: new Error(`Document ${id} was deleted`),
|
|
handle: handle2
|
|
};
|
|
return result2;
|
|
}
|
|
if (handle2.state === READY) {
|
|
const result2 = {
|
|
state: "ready",
|
|
handle: heads ? handle2.view(heads) : handle2
|
|
};
|
|
return result2;
|
|
}
|
|
}
|
|
const cachedProgress = __privateGet(this, _progressCache)[documentId];
|
|
if (cachedProgress) {
|
|
const handle2 = __privateGet(this, _handleCache)[documentId];
|
|
if (handle2 && (handle2.state === READY || handle2.state === UNAVAILABLE || handle2.state === DELETED || handle2.state === "loading")) {
|
|
return cachedProgress;
|
|
}
|
|
}
|
|
const handle = __privateMethod(this, _Repo_instances, getHandle_fn).call(this, { documentId });
|
|
const initial = {
|
|
state: "loading",
|
|
progress: 0,
|
|
handle
|
|
};
|
|
const progressSignal = {
|
|
subscribers: /* @__PURE__ */ new Set(),
|
|
currentProgress: void 0,
|
|
notify: (progress) => {
|
|
progressSignal.currentProgress = progress;
|
|
progressSignal.subscribers.forEach((callback) => callback(progress));
|
|
__privateGet(this, _progressCache)[documentId] = progress;
|
|
},
|
|
peek: () => progressSignal.currentProgress || initial,
|
|
subscribe: (callback) => {
|
|
progressSignal.subscribers.add(callback);
|
|
return () => progressSignal.subscribers.delete(callback);
|
|
}
|
|
};
|
|
progressSignal.notify(initial);
|
|
void __privateMethod(this, _Repo_instances, loadDocumentWithProgress_fn).call(this, id, documentId, handle, progressSignal, signal ? abortable(new Promise(() => {
|
|
}), signal) : new Promise(() => {
|
|
}));
|
|
const result = {
|
|
...initial,
|
|
peek: progressSignal.peek,
|
|
subscribe: progressSignal.subscribe
|
|
};
|
|
__privateGet(this, _progressCache)[documentId] = result;
|
|
return result;
|
|
}
|
|
async find(id, options = {}) {
|
|
const { allowableStates = ["ready"], signal } = options;
|
|
if (signal == null ? void 0 : signal.aborted) {
|
|
throw new AbortError();
|
|
}
|
|
const progress = this.findWithProgress(id, { signal });
|
|
if ("subscribe" in progress) {
|
|
__privateMethod(this, _Repo_instances, registerHandleWithSubsystems_fn).call(this, progress.handle);
|
|
return new Promise((resolve, reject) => {
|
|
const unsubscribe = progress.subscribe((state) => {
|
|
if (allowableStates.includes(state.handle.state)) {
|
|
unsubscribe();
|
|
resolve(state.handle);
|
|
} else if (state.state === "unavailable") {
|
|
unsubscribe();
|
|
reject(new Error(`Document ${id} is unavailable`));
|
|
} else if (state.state === "failed") {
|
|
unsubscribe();
|
|
reject(state.error);
|
|
}
|
|
});
|
|
});
|
|
} else {
|
|
if (progress.handle.state === READY) {
|
|
return progress.handle;
|
|
}
|
|
await progress.handle.whenReady([READY, UNAVAILABLE]);
|
|
if (progress.handle.state === "unavailable" && !allowableStates.includes(UNAVAILABLE)) {
|
|
throw new Error(`Document ${id} is unavailable`);
|
|
}
|
|
return progress.handle;
|
|
}
|
|
}
|
|
/**
|
|
* Retrieves a document by id. It gets data from the local system, but also emits a `document`
|
|
* event to advertise interest in the document.
|
|
*/
|
|
async findClassic(id, options = {}) {
|
|
const documentId = interpretAsDocumentId(id);
|
|
const { allowableStates, signal } = options;
|
|
return abortable((async () => {
|
|
const handle = await __privateMethod(this, _Repo_instances, loadDocument_fn).call(this, documentId);
|
|
if (!allowableStates) {
|
|
await handle.whenReady([READY, UNAVAILABLE]);
|
|
if (handle.state === UNAVAILABLE && !(signal == null ? void 0 : signal.aborted)) {
|
|
throw new Error(`Document ${id} is unavailable`);
|
|
}
|
|
}
|
|
return handle;
|
|
})(), signal);
|
|
}
|
|
delete(id) {
|
|
const documentId = interpretAsDocumentId(id);
|
|
const handle = __privateMethod(this, _Repo_instances, getHandle_fn).call(this, { documentId });
|
|
handle.delete();
|
|
delete __privateGet(this, _handleCache)[documentId];
|
|
delete __privateGet(this, _progressCache)[documentId];
|
|
delete __privateGet(this, _saveFns)[documentId];
|
|
this.emit("delete-document", { documentId });
|
|
}
|
|
/**
|
|
* Exports a document to a binary format.
|
|
* @param id - The url or documentId of the handle to export
|
|
*
|
|
* @returns Promise<Uint8Array | undefined> - A Promise containing the binary document,
|
|
* or undefined if the document is unavailable.
|
|
*/
|
|
async export(id) {
|
|
const documentId = interpretAsDocumentId(id);
|
|
const handle = await this.find(documentId);
|
|
const doc = handle.doc();
|
|
return save(doc);
|
|
}
|
|
/**
|
|
* Imports document binary into the repo.
|
|
* @param binary - The binary to import
|
|
* @param args - Optional argument specifying what document ID to import into,
|
|
* if at all possible avoid using this, see the remarks below
|
|
*
|
|
* @remarks
|
|
* If no document ID is provided, a new document will be created. When
|
|
* specifying the document ID it is important to ensure that two documents using
|
|
* the same ID share the same history - i.e. don't create a document with the
|
|
* same ID on unrelated processes that have never communicated with each
|
|
* other. If you need to ship around a bunch of documents with their IDs
|
|
* consider using the `automerge-repo-bundles` package which provides a
|
|
* serialization format for documents and IDs and handles the boilerplate of
|
|
* importing and exporting these bundles.
|
|
*/
|
|
import(binary, args) {
|
|
const docId = args == null ? void 0 : args.docId;
|
|
if (docId != null) {
|
|
const handle = __privateMethod(this, _Repo_instances, getHandle_fn).call(this, { documentId: docId });
|
|
handle.update((doc) => {
|
|
return loadIncremental(doc, binary);
|
|
});
|
|
__privateMethod(this, _Repo_instances, registerHandleWithSubsystems_fn).call(this, handle);
|
|
return handle;
|
|
} else {
|
|
const doc = load(binary);
|
|
const handle = this.create();
|
|
handle.update(() => {
|
|
return clone(doc);
|
|
});
|
|
return handle;
|
|
}
|
|
}
|
|
/**
|
|
* Writes Documents to a disk.
|
|
* @hidden this API is experimental and may change.
|
|
* @param documents - if provided, only writes the specified documents.
|
|
* @returns Promise<void>
|
|
*/
|
|
async flush(documents) {
|
|
if (!this.storageSubsystem) {
|
|
return;
|
|
}
|
|
const handles = documents ? documents.map((id) => __privateGet(this, _handleCache)[id]) : Object.values(__privateGet(this, _handleCache));
|
|
await Promise.all(handles.map(async (handle) => {
|
|
return this.storageSubsystem.saveDoc(handle.documentId, handle.doc());
|
|
}));
|
|
}
|
|
/**
|
|
* Removes a DocHandle from the handleCache.
|
|
* @hidden this API is experimental and may change.
|
|
* @param documentId - documentId of the DocHandle to remove from handleCache, if present in cache.
|
|
* @returns Promise<void>
|
|
*/
|
|
async removeFromCache(documentId) {
|
|
if (!__privateGet(this, _handleCache)[documentId]) {
|
|
__privateGet(this, _log4).call(this, `WARN: removeFromCache called but handle not found in handleCache for documentId: ${documentId}`);
|
|
return;
|
|
}
|
|
const handle = __privateMethod(this, _Repo_instances, getHandle_fn).call(this, { documentId });
|
|
await handle.whenReady([READY, UNLOADED, DELETED, UNAVAILABLE]);
|
|
const doc = handle.doc();
|
|
if (doc) {
|
|
if (handle.isReady()) {
|
|
handle.unload();
|
|
} else {
|
|
__privateGet(this, _log4).call(this, `WARN: removeFromCache called but handle for documentId: ${documentId} in unexpected state: ${handle.state}`);
|
|
}
|
|
delete __privateGet(this, _handleCache)[documentId];
|
|
delete __privateGet(this, _progressCache)[documentId];
|
|
delete __privateGet(this, _saveFns)[documentId];
|
|
this.synchronizer.removeDocument(documentId);
|
|
} else {
|
|
__privateGet(this, _log4).call(this, `WARN: removeFromCache called but doc undefined for documentId: ${documentId}`);
|
|
}
|
|
}
|
|
shutdown() {
|
|
this.networkSubsystem.adapters.forEach((adapter) => {
|
|
adapter.disconnect();
|
|
});
|
|
return this.flush();
|
|
}
|
|
metrics() {
|
|
return { documents: this.synchronizer.metrics() };
|
|
}
|
|
shareConfigChanged() {
|
|
void this.synchronizer.reevaluateDocumentShare();
|
|
}
|
|
}
|
|
_log4 = new WeakMap();
|
|
_saveDebounceRate = new WeakMap();
|
|
_saveFn = new WeakMap();
|
|
_handleCache = new WeakMap();
|
|
_shareConfig = new WeakMap();
|
|
_remoteHeadsSubscriptions = new WeakMap();
|
|
_remoteHeadsGossipingEnabled = new WeakMap();
|
|
_progressCache = new WeakMap();
|
|
_saveFns = new WeakMap();
|
|
_idFactory = new WeakMap();
|
|
_Repo_instances = new WeakSet();
|
|
// The `document` event is fired by the DocCollection any time we create a new document or look
|
|
// up a document by ID. We listen for it in order to wire up storage and network synchronization.
|
|
registerHandleWithSubsystems_fn = function(handle) {
|
|
if (this.storageSubsystem) {
|
|
const existingListeners = handle.listeners("heads-changed");
|
|
if (!existingListeners.some((listener) => listener === __privateGet(this, _saveFn))) {
|
|
handle.on("heads-changed", __privateGet(this, _saveFn));
|
|
}
|
|
}
|
|
this.synchronizer.addDocument(handle);
|
|
};
|
|
receiveMessage_fn = function(message) {
|
|
switch (message.type) {
|
|
case "remote-subscription-change":
|
|
if (__privateGet(this, _remoteHeadsGossipingEnabled)) {
|
|
__privateGet(this, _remoteHeadsSubscriptions).handleControlMessage(message);
|
|
}
|
|
break;
|
|
case "remote-heads-changed":
|
|
if (__privateGet(this, _remoteHeadsGossipingEnabled)) {
|
|
__privateGet(this, _remoteHeadsSubscriptions).handleRemoteHeads(message);
|
|
}
|
|
break;
|
|
case "sync":
|
|
case "request":
|
|
case "ephemeral":
|
|
case "doc-unavailable":
|
|
this.synchronizer.receiveMessage(message).catch((err) => {
|
|
console.log("error receiving message", { err, message });
|
|
});
|
|
}
|
|
};
|
|
_throttledSaveSyncStateHandlers = new WeakMap();
|
|
/** saves sync state throttled per storage id, if a peer doesn't have a storage id it's sync state is not persisted */
|
|
saveSyncState_fn = function(payload) {
|
|
if (!this.storageSubsystem) {
|
|
return;
|
|
}
|
|
const { storageId, isEphemeral } = this.peerMetadataByPeerId[payload.peerId] || {};
|
|
if (!storageId || isEphemeral) {
|
|
return;
|
|
}
|
|
let handler = __privateGet(this, _throttledSaveSyncStateHandlers)[storageId];
|
|
if (!handler) {
|
|
handler = __privateGet(this, _throttledSaveSyncStateHandlers)[storageId] = throttle(({ documentId, syncState }) => {
|
|
void this.storageSubsystem.saveSyncState(documentId, storageId, syncState);
|
|
}, __privateGet(this, _saveDebounceRate));
|
|
}
|
|
handler(payload);
|
|
};
|
|
/** Returns an existing handle if we have it; creates one otherwise. */
|
|
getHandle_fn = function({ documentId }) {
|
|
if (__privateGet(this, _handleCache)[documentId])
|
|
return __privateGet(this, _handleCache)[documentId];
|
|
if (!documentId)
|
|
throw new Error(`Invalid documentId ${documentId}`);
|
|
const handle = new DocHandle(documentId);
|
|
__privateGet(this, _handleCache)[documentId] = handle;
|
|
return handle;
|
|
};
|
|
loadDocumentWithProgress_fn = async function(id, documentId, handle, progressSignal, abortPromise) {
|
|
try {
|
|
progressSignal.notify({
|
|
state: "loading",
|
|
progress: 25,
|
|
handle
|
|
});
|
|
const loadingPromise = await (this.storageSubsystem ? this.storageSubsystem.loadDoc(handle.documentId) : Promise.resolve(null));
|
|
const loadedDoc = await Promise.race([loadingPromise, abortPromise]);
|
|
if (loadedDoc) {
|
|
handle.update(() => loadedDoc);
|
|
handle.doneLoading();
|
|
progressSignal.notify({
|
|
state: "loading",
|
|
progress: 50,
|
|
handle
|
|
});
|
|
} else {
|
|
await Promise.race([this.networkSubsystem.whenReady(), abortPromise]);
|
|
handle.request();
|
|
progressSignal.notify({
|
|
state: "loading",
|
|
progress: 75,
|
|
handle
|
|
});
|
|
}
|
|
__privateMethod(this, _Repo_instances, registerHandleWithSubsystems_fn).call(this, handle);
|
|
await Promise.race([handle.whenReady([READY, UNAVAILABLE]), abortPromise]);
|
|
if (handle.state === UNAVAILABLE) {
|
|
const unavailableProgress = {
|
|
state: "unavailable",
|
|
handle
|
|
};
|
|
progressSignal.notify(unavailableProgress);
|
|
return;
|
|
}
|
|
if (handle.state === DELETED) {
|
|
throw new Error(`Document ${id} was deleted`);
|
|
}
|
|
progressSignal.notify({ state: "ready", handle });
|
|
} catch (error) {
|
|
progressSignal.notify({
|
|
state: "failed",
|
|
error: (
|
|
// In most JS environments DOMException extends Error, but not always, in some environments it's a separate type.
|
|
// Some Node.js DOM polyfills do not always extend the Error
|
|
// Jsdom polyfill doesn't extend Error, whereas happy-dom does.
|
|
error instanceof Error || error instanceof DOMException ? error : new Error(String(error))
|
|
),
|
|
handle: __privateMethod(this, _Repo_instances, getHandle_fn).call(this, { documentId })
|
|
});
|
|
}
|
|
};
|
|
loadDocument_fn = async function(documentId) {
|
|
if (__privateGet(this, _handleCache)[documentId]) {
|
|
return __privateGet(this, _handleCache)[documentId];
|
|
}
|
|
const handle = __privateMethod(this, _Repo_instances, getHandle_fn).call(this, { documentId });
|
|
const loadedDoc = await (this.storageSubsystem ? this.storageSubsystem.loadDoc(handle.documentId) : Promise.resolve(null));
|
|
if (loadedDoc) {
|
|
handle.update(() => loadedDoc);
|
|
handle.doneLoading();
|
|
} else {
|
|
await this.networkSubsystem.whenReady();
|
|
handle.request();
|
|
}
|
|
__privateMethod(this, _Repo_instances, registerHandleWithSubsystems_fn).call(this, handle);
|
|
return handle;
|
|
};
|
|
class NetworkAdapter extends EventEmitter {
|
|
constructor() {
|
|
super(...arguments);
|
|
__publicField(this, "peerId");
|
|
__publicField(this, "peerMetadata");
|
|
}
|
|
}
|
|
class BroadcastChannelNetworkAdapter extends NetworkAdapter {
|
|
constructor(options) {
|
|
super();
|
|
__privateAdd(this, _BroadcastChannelNetworkAdapter_instances);
|
|
__privateAdd(this, _broadcastChannel);
|
|
__privateAdd(this, _disconnected, false);
|
|
__privateAdd(this, _ready, false);
|
|
// reassigned in constructor, but keeps TS from complaining
|
|
__privateAdd(this, _markReady, () => {
|
|
});
|
|
__privateAdd(this, _readyPromise);
|
|
__privateAdd(this, _options);
|
|
__privateAdd(this, _connectedPeers, []);
|
|
__privateSet(this, _options, {
|
|
channelName: "broadcast",
|
|
peerWaitMs: 1e3,
|
|
...options ?? {}
|
|
});
|
|
__privateSet(this, _broadcastChannel, new BroadcastChannel(__privateGet(this, _options).channelName));
|
|
__privateSet(this, _readyPromise, new Promise((resolve) => {
|
|
__privateSet(this, _markReady, () => {
|
|
__privateSet(this, _ready, true);
|
|
resolve();
|
|
});
|
|
setTimeout(() => __privateGet(this, _markReady).call(this), __privateGet(this, _options).peerWaitMs);
|
|
}));
|
|
}
|
|
isReady() {
|
|
return __privateGet(this, _ready);
|
|
}
|
|
whenReady() {
|
|
return __privateGet(this, _readyPromise);
|
|
}
|
|
connect(peerId, peerMetadata) {
|
|
this.peerId = peerId;
|
|
this.peerMetadata = peerMetadata;
|
|
__privateSet(this, _disconnected, false);
|
|
__privateGet(this, _broadcastChannel).addEventListener("message", (e) => {
|
|
const message = e.data;
|
|
if ("targetId" in message && message.targetId !== this.peerId) {
|
|
return;
|
|
}
|
|
if (__privateGet(this, _disconnected)) {
|
|
return;
|
|
}
|
|
const { senderId, type } = message;
|
|
switch (type) {
|
|
case "arrive":
|
|
{
|
|
const { peerMetadata: peerMetadata2 } = message;
|
|
__privateGet(this, _broadcastChannel).postMessage({
|
|
senderId: this.peerId,
|
|
targetId: senderId,
|
|
type: "welcome",
|
|
peerMetadata: this.peerMetadata
|
|
});
|
|
__privateMethod(this, _BroadcastChannelNetworkAdapter_instances, announceConnection_fn).call(this, senderId, peerMetadata2);
|
|
}
|
|
break;
|
|
case "welcome":
|
|
{
|
|
const { peerMetadata: peerMetadata2 } = message;
|
|
__privateMethod(this, _BroadcastChannelNetworkAdapter_instances, announceConnection_fn).call(this, senderId, peerMetadata2);
|
|
}
|
|
break;
|
|
case "leave":
|
|
__privateSet(this, _connectedPeers, __privateGet(this, _connectedPeers).filter((p) => p !== senderId));
|
|
this.emit("peer-disconnected", { peerId: senderId });
|
|
break;
|
|
default:
|
|
if (!("data" in message)) {
|
|
this.emit("message", message);
|
|
} else {
|
|
if (!message.data) {
|
|
throw new Error("We got a message without data, we can't send this.");
|
|
}
|
|
const data = message.data;
|
|
this.emit("message", {
|
|
...message,
|
|
data: new Uint8Array(data)
|
|
});
|
|
}
|
|
break;
|
|
}
|
|
});
|
|
__privateGet(this, _broadcastChannel).postMessage({
|
|
senderId: this.peerId,
|
|
type: "arrive",
|
|
peerMetadata
|
|
});
|
|
}
|
|
send(message) {
|
|
if (__privateGet(this, _disconnected)) {
|
|
return false;
|
|
}
|
|
if ("data" in message) {
|
|
__privateGet(this, _broadcastChannel).postMessage({
|
|
...message,
|
|
data: message.data ? message.data.buffer.slice(message.data.byteOffset, message.data.byteOffset + message.data.byteLength) : void 0
|
|
});
|
|
} else {
|
|
__privateGet(this, _broadcastChannel).postMessage(message);
|
|
}
|
|
}
|
|
disconnect() {
|
|
__privateGet(this, _broadcastChannel).postMessage({
|
|
senderId: this.peerId,
|
|
type: "leave"
|
|
});
|
|
for (const peerId of __privateGet(this, _connectedPeers)) {
|
|
this.emit("peer-disconnected", { peerId });
|
|
}
|
|
__privateSet(this, _disconnected, true);
|
|
}
|
|
}
|
|
_broadcastChannel = new WeakMap();
|
|
_disconnected = new WeakMap();
|
|
_ready = new WeakMap();
|
|
_markReady = new WeakMap();
|
|
_readyPromise = new WeakMap();
|
|
_options = new WeakMap();
|
|
_connectedPeers = new WeakMap();
|
|
_BroadcastChannelNetworkAdapter_instances = new WeakSet();
|
|
announceConnection_fn = function(peerId, peerMetadata) {
|
|
__privateGet(this, _markReady).call(this);
|
|
__privateGet(this, _connectedPeers).push(peerId);
|
|
this.emit("peer-candidate", { peerId, peerMetadata });
|
|
};
|
|
class IndexedDBStorageAdapter {
|
|
/** Create a new {@link IndexedDBStorageAdapter}.
|
|
* @param database - The name of the database to use. Defaults to "automerge".
|
|
* @param store - The name of the object store to use. Defaults to "documents".
|
|
*/
|
|
constructor(database = "automerge", store = "documents") {
|
|
__publicField(this, "database");
|
|
__publicField(this, "store");
|
|
__publicField(this, "dbPromise");
|
|
this.database = database;
|
|
this.store = store;
|
|
this.dbPromise = this.createDatabasePromise();
|
|
}
|
|
createDatabasePromise() {
|
|
return new Promise((resolve, reject) => {
|
|
const request = indexedDB.open(this.database, 1);
|
|
request.onerror = () => {
|
|
reject(request.error);
|
|
};
|
|
request.onupgradeneeded = (event) => {
|
|
const db = event.target.result;
|
|
db.createObjectStore(this.store);
|
|
};
|
|
request.onsuccess = (event) => {
|
|
const db = event.target.result;
|
|
resolve(db);
|
|
};
|
|
});
|
|
}
|
|
async load(keyArray) {
|
|
const db = await this.dbPromise;
|
|
const transaction = db.transaction(this.store);
|
|
const objectStore = transaction.objectStore(this.store);
|
|
const request = objectStore.get(keyArray);
|
|
return new Promise((resolve, reject) => {
|
|
transaction.onerror = () => {
|
|
reject(request.error);
|
|
};
|
|
request.onsuccess = (event) => {
|
|
const result = event.target.result;
|
|
if (result && typeof result === "object" && "binary" in result) {
|
|
resolve(result.binary);
|
|
} else {
|
|
resolve(void 0);
|
|
}
|
|
};
|
|
});
|
|
}
|
|
async save(keyArray, binary) {
|
|
const db = await this.dbPromise;
|
|
const transaction = db.transaction(this.store, "readwrite");
|
|
const objectStore = transaction.objectStore(this.store);
|
|
objectStore.put({ key: keyArray, binary }, keyArray);
|
|
return new Promise((resolve, reject) => {
|
|
transaction.onerror = () => {
|
|
reject(transaction.error);
|
|
};
|
|
transaction.oncomplete = () => {
|
|
resolve();
|
|
};
|
|
});
|
|
}
|
|
async remove(keyArray) {
|
|
const db = await this.dbPromise;
|
|
const transaction = db.transaction(this.store, "readwrite");
|
|
const objectStore = transaction.objectStore(this.store);
|
|
objectStore.delete(keyArray);
|
|
return new Promise((resolve, reject) => {
|
|
transaction.onerror = () => {
|
|
reject(transaction.error);
|
|
};
|
|
transaction.oncomplete = () => {
|
|
resolve();
|
|
};
|
|
});
|
|
}
|
|
async loadRange(keyPrefix) {
|
|
const db = await this.dbPromise;
|
|
const lowerBound = keyPrefix;
|
|
const upperBound = [...keyPrefix, "\uFFFF"];
|
|
const range = IDBKeyRange.bound(lowerBound, upperBound);
|
|
const transaction = db.transaction(this.store);
|
|
const objectStore = transaction.objectStore(this.store);
|
|
const request = objectStore.openCursor(range);
|
|
const result = [];
|
|
return new Promise((resolve, reject) => {
|
|
transaction.onerror = () => {
|
|
reject(request.error);
|
|
};
|
|
request.onsuccess = (event) => {
|
|
const cursor = event.target.result;
|
|
if (cursor) {
|
|
result.push({
|
|
data: cursor.value.binary,
|
|
key: cursor.key
|
|
});
|
|
cursor.continue();
|
|
} else {
|
|
resolve(result);
|
|
}
|
|
};
|
|
});
|
|
}
|
|
async removeRange(keyPrefix) {
|
|
const db = await this.dbPromise;
|
|
const lowerBound = keyPrefix;
|
|
const upperBound = [...keyPrefix, "\uFFFF"];
|
|
const range = IDBKeyRange.bound(lowerBound, upperBound);
|
|
const transaction = db.transaction(this.store, "readwrite");
|
|
const objectStore = transaction.objectStore(this.store);
|
|
objectStore.delete(range);
|
|
return new Promise((resolve, reject) => {
|
|
transaction.onerror = () => {
|
|
reject(transaction.error);
|
|
};
|
|
transaction.oncomplete = () => {
|
|
resolve();
|
|
};
|
|
});
|
|
}
|
|
}
|
|
var bufferUtil = { exports: {} };
|
|
var constants;
|
|
var hasRequiredConstants;
|
|
function requireConstants() {
|
|
if (hasRequiredConstants) return constants;
|
|
hasRequiredConstants = 1;
|
|
const BINARY_TYPES = ["nodebuffer", "arraybuffer", "fragments"];
|
|
const hasBlob = typeof Blob !== "undefined";
|
|
if (hasBlob) BINARY_TYPES.push("blob");
|
|
constants = {
|
|
BINARY_TYPES,
|
|
CLOSE_TIMEOUT: 3e4,
|
|
EMPTY_BUFFER: Buffer.alloc(0),
|
|
GUID: "258EAFA5-E914-47DA-95CA-C5AB0DC85B11",
|
|
hasBlob,
|
|
kForOnEventAttribute: /* @__PURE__ */ Symbol("kIsForOnEventAttribute"),
|
|
kListener: /* @__PURE__ */ Symbol("kListener"),
|
|
kStatusCode: /* @__PURE__ */ Symbol("status-code"),
|
|
kWebSocket: /* @__PURE__ */ Symbol("websocket"),
|
|
NOOP: () => {
|
|
}
|
|
};
|
|
return constants;
|
|
}
|
|
const __viteOptionalPeerDep_bufferutil_ws_true = {};
|
|
const __viteOptionalPeerDep_bufferutil_ws_true$1 = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
|
|
__proto__: null,
|
|
default: __viteOptionalPeerDep_bufferutil_ws_true
|
|
}, Symbol.toStringTag, { value: "Module" }));
|
|
const require$$1 = /* @__PURE__ */ getAugmentedNamespace(__viteOptionalPeerDep_bufferutil_ws_true$1);
|
|
var hasRequiredBufferUtil;
|
|
function requireBufferUtil() {
|
|
if (hasRequiredBufferUtil) return bufferUtil.exports;
|
|
hasRequiredBufferUtil = 1;
|
|
const { EMPTY_BUFFER } = requireConstants();
|
|
const FastBuffer = Buffer[Symbol.species];
|
|
function concat(list, totalLength) {
|
|
if (list.length === 0) return EMPTY_BUFFER;
|
|
if (list.length === 1) return list[0];
|
|
const target = Buffer.allocUnsafe(totalLength);
|
|
let offset = 0;
|
|
for (let i = 0; i < list.length; i++) {
|
|
const buf = list[i];
|
|
target.set(buf, offset);
|
|
offset += buf.length;
|
|
}
|
|
if (offset < totalLength) {
|
|
return new FastBuffer(target.buffer, target.byteOffset, offset);
|
|
}
|
|
return target;
|
|
}
|
|
function _mask(source, mask, output, offset, length) {
|
|
for (let i = 0; i < length; i++) {
|
|
output[offset + i] = source[i] ^ mask[i & 3];
|
|
}
|
|
}
|
|
function _unmask(buffer, mask) {
|
|
for (let i = 0; i < buffer.length; i++) {
|
|
buffer[i] ^= mask[i & 3];
|
|
}
|
|
}
|
|
function toArrayBuffer2(buf) {
|
|
if (buf.length === buf.buffer.byteLength) {
|
|
return buf.buffer;
|
|
}
|
|
return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.length);
|
|
}
|
|
function toBuffer(data) {
|
|
toBuffer.readOnly = true;
|
|
if (Buffer.isBuffer(data)) return data;
|
|
let buf;
|
|
if (data instanceof ArrayBuffer) {
|
|
buf = new FastBuffer(data);
|
|
} else if (ArrayBuffer.isView(data)) {
|
|
buf = new FastBuffer(data.buffer, data.byteOffset, data.byteLength);
|
|
} else {
|
|
buf = Buffer.from(data);
|
|
toBuffer.readOnly = false;
|
|
}
|
|
return buf;
|
|
}
|
|
bufferUtil.exports = {
|
|
concat,
|
|
mask: _mask,
|
|
toArrayBuffer: toArrayBuffer2,
|
|
toBuffer,
|
|
unmask: _unmask
|
|
};
|
|
if (!process.env.WS_NO_BUFFER_UTIL) {
|
|
try {
|
|
const bufferUtil$1 = require$$1;
|
|
bufferUtil.exports.mask = function(source, mask, output, offset, length) {
|
|
if (length < 48) _mask(source, mask, output, offset, length);
|
|
else bufferUtil$1.mask(source, mask, output, offset, length);
|
|
};
|
|
bufferUtil.exports.unmask = function(buffer, mask) {
|
|
if (buffer.length < 32) _unmask(buffer, mask);
|
|
else bufferUtil$1.unmask(buffer, mask);
|
|
};
|
|
} catch (e) {
|
|
}
|
|
}
|
|
return bufferUtil.exports;
|
|
}
|
|
var limiter;
|
|
var hasRequiredLimiter;
|
|
function requireLimiter() {
|
|
if (hasRequiredLimiter) return limiter;
|
|
hasRequiredLimiter = 1;
|
|
const kDone = /* @__PURE__ */ Symbol("kDone");
|
|
const kRun = /* @__PURE__ */ Symbol("kRun");
|
|
class Limiter {
|
|
/**
|
|
* Creates a new `Limiter`.
|
|
*
|
|
* @param {Number} [concurrency=Infinity] The maximum number of jobs allowed
|
|
* to run concurrently
|
|
*/
|
|
constructor(concurrency) {
|
|
this[kDone] = () => {
|
|
this.pending--;
|
|
this[kRun]();
|
|
};
|
|
this.concurrency = concurrency || Infinity;
|
|
this.jobs = [];
|
|
this.pending = 0;
|
|
}
|
|
/**
|
|
* Adds a job to the queue.
|
|
*
|
|
* @param {Function} job The job to run
|
|
* @public
|
|
*/
|
|
add(job) {
|
|
this.jobs.push(job);
|
|
this[kRun]();
|
|
}
|
|
/**
|
|
* Removes a job from the queue and runs it if possible.
|
|
*
|
|
* @private
|
|
*/
|
|
[kRun]() {
|
|
if (this.pending === this.concurrency) return;
|
|
if (this.jobs.length) {
|
|
const job = this.jobs.shift();
|
|
this.pending++;
|
|
job(this[kDone]);
|
|
}
|
|
}
|
|
}
|
|
limiter = Limiter;
|
|
return limiter;
|
|
}
|
|
var permessageDeflate;
|
|
var hasRequiredPermessageDeflate;
|
|
function requirePermessageDeflate() {
|
|
if (hasRequiredPermessageDeflate) return permessageDeflate;
|
|
hasRequiredPermessageDeflate = 1;
|
|
const zlib = require$$0;
|
|
const bufferUtil2 = requireBufferUtil();
|
|
const Limiter = requireLimiter();
|
|
const { kStatusCode } = requireConstants();
|
|
const FastBuffer = Buffer[Symbol.species];
|
|
const TRAILER = Buffer.from([0, 0, 255, 255]);
|
|
const kPerMessageDeflate = /* @__PURE__ */ Symbol("permessage-deflate");
|
|
const kTotalLength = /* @__PURE__ */ Symbol("total-length");
|
|
const kCallback = /* @__PURE__ */ Symbol("callback");
|
|
const kBuffers = /* @__PURE__ */ Symbol("buffers");
|
|
const kError = /* @__PURE__ */ Symbol("error");
|
|
let zlibLimiter;
|
|
class PerMessageDeflate {
|
|
/**
|
|
* Creates a PerMessageDeflate instance.
|
|
*
|
|
* @param {Object} [options] Configuration options
|
|
* @param {(Boolean|Number)} [options.clientMaxWindowBits] Advertise support
|
|
* for, or request, a custom client window size
|
|
* @param {Boolean} [options.clientNoContextTakeover=false] Advertise/
|
|
* acknowledge disabling of client context takeover
|
|
* @param {Number} [options.concurrencyLimit=10] The number of concurrent
|
|
* calls to zlib
|
|
* @param {(Boolean|Number)} [options.serverMaxWindowBits] Request/confirm the
|
|
* use of a custom server window size
|
|
* @param {Boolean} [options.serverNoContextTakeover=false] Request/accept
|
|
* disabling of server context takeover
|
|
* @param {Number} [options.threshold=1024] Size (in bytes) below which
|
|
* messages should not be compressed if context takeover is disabled
|
|
* @param {Object} [options.zlibDeflateOptions] Options to pass to zlib on
|
|
* deflate
|
|
* @param {Object} [options.zlibInflateOptions] Options to pass to zlib on
|
|
* inflate
|
|
* @param {Boolean} [isServer=false] Create the instance in either server or
|
|
* client mode
|
|
* @param {Number} [maxPayload=0] The maximum allowed message length
|
|
*/
|
|
constructor(options, isServer, maxPayload) {
|
|
this._maxPayload = maxPayload | 0;
|
|
this._options = options || {};
|
|
this._threshold = this._options.threshold !== void 0 ? this._options.threshold : 1024;
|
|
this._isServer = !!isServer;
|
|
this._deflate = null;
|
|
this._inflate = null;
|
|
this.params = null;
|
|
if (!zlibLimiter) {
|
|
const concurrency = this._options.concurrencyLimit !== void 0 ? this._options.concurrencyLimit : 10;
|
|
zlibLimiter = new Limiter(concurrency);
|
|
}
|
|
}
|
|
/**
|
|
* @type {String}
|
|
*/
|
|
static get extensionName() {
|
|
return "permessage-deflate";
|
|
}
|
|
/**
|
|
* Create an extension negotiation offer.
|
|
*
|
|
* @return {Object} Extension parameters
|
|
* @public
|
|
*/
|
|
offer() {
|
|
const params = {};
|
|
if (this._options.serverNoContextTakeover) {
|
|
params.server_no_context_takeover = true;
|
|
}
|
|
if (this._options.clientNoContextTakeover) {
|
|
params.client_no_context_takeover = true;
|
|
}
|
|
if (this._options.serverMaxWindowBits) {
|
|
params.server_max_window_bits = this._options.serverMaxWindowBits;
|
|
}
|
|
if (this._options.clientMaxWindowBits) {
|
|
params.client_max_window_bits = this._options.clientMaxWindowBits;
|
|
} else if (this._options.clientMaxWindowBits == null) {
|
|
params.client_max_window_bits = true;
|
|
}
|
|
return params;
|
|
}
|
|
/**
|
|
* Accept an extension negotiation offer/response.
|
|
*
|
|
* @param {Array} configurations The extension negotiation offers/reponse
|
|
* @return {Object} Accepted configuration
|
|
* @public
|
|
*/
|
|
accept(configurations) {
|
|
configurations = this.normalizeParams(configurations);
|
|
this.params = this._isServer ? this.acceptAsServer(configurations) : this.acceptAsClient(configurations);
|
|
return this.params;
|
|
}
|
|
/**
|
|
* Releases all resources used by the extension.
|
|
*
|
|
* @public
|
|
*/
|
|
cleanup() {
|
|
if (this._inflate) {
|
|
this._inflate.close();
|
|
this._inflate = null;
|
|
}
|
|
if (this._deflate) {
|
|
const callback = this._deflate[kCallback];
|
|
this._deflate.close();
|
|
this._deflate = null;
|
|
if (callback) {
|
|
callback(
|
|
new Error(
|
|
"The deflate stream was closed while data was being processed"
|
|
)
|
|
);
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Accept an extension negotiation offer.
|
|
*
|
|
* @param {Array} offers The extension negotiation offers
|
|
* @return {Object} Accepted configuration
|
|
* @private
|
|
*/
|
|
acceptAsServer(offers) {
|
|
const opts = this._options;
|
|
const accepted = offers.find((params) => {
|
|
if (opts.serverNoContextTakeover === false && params.server_no_context_takeover || params.server_max_window_bits && (opts.serverMaxWindowBits === false || typeof opts.serverMaxWindowBits === "number" && opts.serverMaxWindowBits > params.server_max_window_bits) || typeof opts.clientMaxWindowBits === "number" && !params.client_max_window_bits) {
|
|
return false;
|
|
}
|
|
return true;
|
|
});
|
|
if (!accepted) {
|
|
throw new Error("None of the extension offers can be accepted");
|
|
}
|
|
if (opts.serverNoContextTakeover) {
|
|
accepted.server_no_context_takeover = true;
|
|
}
|
|
if (opts.clientNoContextTakeover) {
|
|
accepted.client_no_context_takeover = true;
|
|
}
|
|
if (typeof opts.serverMaxWindowBits === "number") {
|
|
accepted.server_max_window_bits = opts.serverMaxWindowBits;
|
|
}
|
|
if (typeof opts.clientMaxWindowBits === "number") {
|
|
accepted.client_max_window_bits = opts.clientMaxWindowBits;
|
|
} else if (accepted.client_max_window_bits === true || opts.clientMaxWindowBits === false) {
|
|
delete accepted.client_max_window_bits;
|
|
}
|
|
return accepted;
|
|
}
|
|
/**
|
|
* Accept the extension negotiation response.
|
|
*
|
|
* @param {Array} response The extension negotiation response
|
|
* @return {Object} Accepted configuration
|
|
* @private
|
|
*/
|
|
acceptAsClient(response) {
|
|
const params = response[0];
|
|
if (this._options.clientNoContextTakeover === false && params.client_no_context_takeover) {
|
|
throw new Error('Unexpected parameter "client_no_context_takeover"');
|
|
}
|
|
if (!params.client_max_window_bits) {
|
|
if (typeof this._options.clientMaxWindowBits === "number") {
|
|
params.client_max_window_bits = this._options.clientMaxWindowBits;
|
|
}
|
|
} else if (this._options.clientMaxWindowBits === false || typeof this._options.clientMaxWindowBits === "number" && params.client_max_window_bits > this._options.clientMaxWindowBits) {
|
|
throw new Error(
|
|
'Unexpected or invalid parameter "client_max_window_bits"'
|
|
);
|
|
}
|
|
return params;
|
|
}
|
|
/**
|
|
* Normalize parameters.
|
|
*
|
|
* @param {Array} configurations The extension negotiation offers/reponse
|
|
* @return {Array} The offers/response with normalized parameters
|
|
* @private
|
|
*/
|
|
normalizeParams(configurations) {
|
|
configurations.forEach((params) => {
|
|
Object.keys(params).forEach((key) => {
|
|
let value = params[key];
|
|
if (value.length > 1) {
|
|
throw new Error(`Parameter "${key}" must have only a single value`);
|
|
}
|
|
value = value[0];
|
|
if (key === "client_max_window_bits") {
|
|
if (value !== true) {
|
|
const num = +value;
|
|
if (!Number.isInteger(num) || num < 8 || num > 15) {
|
|
throw new TypeError(
|
|
`Invalid value for parameter "${key}": ${value}`
|
|
);
|
|
}
|
|
value = num;
|
|
} else if (!this._isServer) {
|
|
throw new TypeError(
|
|
`Invalid value for parameter "${key}": ${value}`
|
|
);
|
|
}
|
|
} else if (key === "server_max_window_bits") {
|
|
const num = +value;
|
|
if (!Number.isInteger(num) || num < 8 || num > 15) {
|
|
throw new TypeError(
|
|
`Invalid value for parameter "${key}": ${value}`
|
|
);
|
|
}
|
|
value = num;
|
|
} else if (key === "client_no_context_takeover" || key === "server_no_context_takeover") {
|
|
if (value !== true) {
|
|
throw new TypeError(
|
|
`Invalid value for parameter "${key}": ${value}`
|
|
);
|
|
}
|
|
} else {
|
|
throw new Error(`Unknown parameter "${key}"`);
|
|
}
|
|
params[key] = value;
|
|
});
|
|
});
|
|
return configurations;
|
|
}
|
|
/**
|
|
* Decompress data. Concurrency limited.
|
|
*
|
|
* @param {Buffer} data Compressed data
|
|
* @param {Boolean} fin Specifies whether or not this is the last fragment
|
|
* @param {Function} callback Callback
|
|
* @public
|
|
*/
|
|
decompress(data, fin, callback) {
|
|
zlibLimiter.add((done) => {
|
|
this._decompress(data, fin, (err, result) => {
|
|
done();
|
|
callback(err, result);
|
|
});
|
|
});
|
|
}
|
|
/**
|
|
* Compress data. Concurrency limited.
|
|
*
|
|
* @param {(Buffer|String)} data Data to compress
|
|
* @param {Boolean} fin Specifies whether or not this is the last fragment
|
|
* @param {Function} callback Callback
|
|
* @public
|
|
*/
|
|
compress(data, fin, callback) {
|
|
zlibLimiter.add((done) => {
|
|
this._compress(data, fin, (err, result) => {
|
|
done();
|
|
callback(err, result);
|
|
});
|
|
});
|
|
}
|
|
/**
|
|
* Decompress data.
|
|
*
|
|
* @param {Buffer} data Compressed data
|
|
* @param {Boolean} fin Specifies whether or not this is the last fragment
|
|
* @param {Function} callback Callback
|
|
* @private
|
|
*/
|
|
_decompress(data, fin, callback) {
|
|
const endpoint = this._isServer ? "client" : "server";
|
|
if (!this._inflate) {
|
|
const key = `${endpoint}_max_window_bits`;
|
|
const windowBits = typeof this.params[key] !== "number" ? zlib.Z_DEFAULT_WINDOWBITS : this.params[key];
|
|
this._inflate = zlib.createInflateRaw({
|
|
...this._options.zlibInflateOptions,
|
|
windowBits
|
|
});
|
|
this._inflate[kPerMessageDeflate] = this;
|
|
this._inflate[kTotalLength] = 0;
|
|
this._inflate[kBuffers] = [];
|
|
this._inflate.on("error", inflateOnError);
|
|
this._inflate.on("data", inflateOnData);
|
|
}
|
|
this._inflate[kCallback] = callback;
|
|
this._inflate.write(data);
|
|
if (fin) this._inflate.write(TRAILER);
|
|
this._inflate.flush(() => {
|
|
const err = this._inflate[kError];
|
|
if (err) {
|
|
this._inflate.close();
|
|
this._inflate = null;
|
|
callback(err);
|
|
return;
|
|
}
|
|
const data2 = bufferUtil2.concat(
|
|
this._inflate[kBuffers],
|
|
this._inflate[kTotalLength]
|
|
);
|
|
if (this._inflate._readableState.endEmitted) {
|
|
this._inflate.close();
|
|
this._inflate = null;
|
|
} else {
|
|
this._inflate[kTotalLength] = 0;
|
|
this._inflate[kBuffers] = [];
|
|
if (fin && this.params[`${endpoint}_no_context_takeover`]) {
|
|
this._inflate.reset();
|
|
}
|
|
}
|
|
callback(null, data2);
|
|
});
|
|
}
|
|
/**
|
|
* Compress data.
|
|
*
|
|
* @param {(Buffer|String)} data Data to compress
|
|
* @param {Boolean} fin Specifies whether or not this is the last fragment
|
|
* @param {Function} callback Callback
|
|
* @private
|
|
*/
|
|
_compress(data, fin, callback) {
|
|
const endpoint = this._isServer ? "server" : "client";
|
|
if (!this._deflate) {
|
|
const key = `${endpoint}_max_window_bits`;
|
|
const windowBits = typeof this.params[key] !== "number" ? zlib.Z_DEFAULT_WINDOWBITS : this.params[key];
|
|
this._deflate = zlib.createDeflateRaw({
|
|
...this._options.zlibDeflateOptions,
|
|
windowBits
|
|
});
|
|
this._deflate[kTotalLength] = 0;
|
|
this._deflate[kBuffers] = [];
|
|
this._deflate.on("data", deflateOnData);
|
|
}
|
|
this._deflate[kCallback] = callback;
|
|
this._deflate.write(data);
|
|
this._deflate.flush(zlib.Z_SYNC_FLUSH, () => {
|
|
if (!this._deflate) {
|
|
return;
|
|
}
|
|
let data2 = bufferUtil2.concat(
|
|
this._deflate[kBuffers],
|
|
this._deflate[kTotalLength]
|
|
);
|
|
if (fin) {
|
|
data2 = new FastBuffer(data2.buffer, data2.byteOffset, data2.length - 4);
|
|
}
|
|
this._deflate[kCallback] = null;
|
|
this._deflate[kTotalLength] = 0;
|
|
this._deflate[kBuffers] = [];
|
|
if (fin && this.params[`${endpoint}_no_context_takeover`]) {
|
|
this._deflate.reset();
|
|
}
|
|
callback(null, data2);
|
|
});
|
|
}
|
|
}
|
|
permessageDeflate = PerMessageDeflate;
|
|
function deflateOnData(chunk) {
|
|
this[kBuffers].push(chunk);
|
|
this[kTotalLength] += chunk.length;
|
|
}
|
|
function inflateOnData(chunk) {
|
|
this[kTotalLength] += chunk.length;
|
|
if (this[kPerMessageDeflate]._maxPayload < 1 || this[kTotalLength] <= this[kPerMessageDeflate]._maxPayload) {
|
|
this[kBuffers].push(chunk);
|
|
return;
|
|
}
|
|
this[kError] = new RangeError("Max payload size exceeded");
|
|
this[kError].code = "WS_ERR_UNSUPPORTED_MESSAGE_LENGTH";
|
|
this[kError][kStatusCode] = 1009;
|
|
this.removeListener("data", inflateOnData);
|
|
this.reset();
|
|
}
|
|
function inflateOnError(err) {
|
|
this[kPerMessageDeflate]._inflate = null;
|
|
if (this[kError]) {
|
|
this[kCallback](this[kError]);
|
|
return;
|
|
}
|
|
err[kStatusCode] = 1007;
|
|
this[kCallback](err);
|
|
}
|
|
return permessageDeflate;
|
|
}
|
|
var validation = { exports: {} };
|
|
const __viteOptionalPeerDep_utf8Validate_ws_true = {};
|
|
const __viteOptionalPeerDep_utf8Validate_ws_true$1 = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
|
|
__proto__: null,
|
|
default: __viteOptionalPeerDep_utf8Validate_ws_true
|
|
}, Symbol.toStringTag, { value: "Module" }));
|
|
const require$$2 = /* @__PURE__ */ getAugmentedNamespace(__viteOptionalPeerDep_utf8Validate_ws_true$1);
|
|
var hasRequiredValidation;
|
|
function requireValidation() {
|
|
if (hasRequiredValidation) return validation.exports;
|
|
hasRequiredValidation = 1;
|
|
const { isUtf8 } = require$$0$1;
|
|
const { hasBlob } = requireConstants();
|
|
const tokenChars = [
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
// 0 - 15
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
// 16 - 31
|
|
0,
|
|
1,
|
|
0,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
0,
|
|
0,
|
|
1,
|
|
1,
|
|
0,
|
|
1,
|
|
1,
|
|
0,
|
|
// 32 - 47
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
0,
|
|
// 48 - 63
|
|
0,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
// 64 - 79
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
0,
|
|
0,
|
|
0,
|
|
1,
|
|
1,
|
|
// 80 - 95
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
// 96 - 111
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
1,
|
|
0,
|
|
1,
|
|
0,
|
|
1,
|
|
0
|
|
// 112 - 127
|
|
];
|
|
function isValidStatusCode(code) {
|
|
return code >= 1e3 && code <= 1014 && code !== 1004 && code !== 1005 && code !== 1006 || code >= 3e3 && code <= 4999;
|
|
}
|
|
function _isValidUTF8(buf) {
|
|
const len = buf.length;
|
|
let i = 0;
|
|
while (i < len) {
|
|
if ((buf[i] & 128) === 0) {
|
|
i++;
|
|
} else if ((buf[i] & 224) === 192) {
|
|
if (i + 1 === len || (buf[i + 1] & 192) !== 128 || (buf[i] & 254) === 192) {
|
|
return false;
|
|
}
|
|
i += 2;
|
|
} else if ((buf[i] & 240) === 224) {
|
|
if (i + 2 >= len || (buf[i + 1] & 192) !== 128 || (buf[i + 2] & 192) !== 128 || buf[i] === 224 && (buf[i + 1] & 224) === 128 || // Overlong
|
|
buf[i] === 237 && (buf[i + 1] & 224) === 160) {
|
|
return false;
|
|
}
|
|
i += 3;
|
|
} else if ((buf[i] & 248) === 240) {
|
|
if (i + 3 >= len || (buf[i + 1] & 192) !== 128 || (buf[i + 2] & 192) !== 128 || (buf[i + 3] & 192) !== 128 || buf[i] === 240 && (buf[i + 1] & 240) === 128 || // Overlong
|
|
buf[i] === 244 && buf[i + 1] > 143 || buf[i] > 244) {
|
|
return false;
|
|
}
|
|
i += 4;
|
|
} else {
|
|
return false;
|
|
}
|
|
}
|
|
return true;
|
|
}
|
|
function isBlob(value) {
|
|
return hasBlob && typeof value === "object" && typeof value.arrayBuffer === "function" && typeof value.type === "string" && typeof value.stream === "function" && (value[Symbol.toStringTag] === "Blob" || value[Symbol.toStringTag] === "File");
|
|
}
|
|
validation.exports = {
|
|
isBlob,
|
|
isValidStatusCode,
|
|
isValidUTF8: _isValidUTF8,
|
|
tokenChars
|
|
};
|
|
if (isUtf8) {
|
|
validation.exports.isValidUTF8 = function(buf) {
|
|
return buf.length < 24 ? _isValidUTF8(buf) : isUtf8(buf);
|
|
};
|
|
} else if (!process.env.WS_NO_UTF_8_VALIDATE) {
|
|
try {
|
|
const isValidUTF8 = require$$2;
|
|
validation.exports.isValidUTF8 = function(buf) {
|
|
return buf.length < 32 ? _isValidUTF8(buf) : isValidUTF8(buf);
|
|
};
|
|
} catch (e) {
|
|
}
|
|
}
|
|
return validation.exports;
|
|
}
|
|
var receiver;
|
|
var hasRequiredReceiver;
|
|
function requireReceiver() {
|
|
if (hasRequiredReceiver) return receiver;
|
|
hasRequiredReceiver = 1;
|
|
const { Writable } = require$$0$2;
|
|
const PerMessageDeflate = requirePermessageDeflate();
|
|
const {
|
|
BINARY_TYPES,
|
|
EMPTY_BUFFER,
|
|
kStatusCode,
|
|
kWebSocket
|
|
} = requireConstants();
|
|
const { concat, toArrayBuffer: toArrayBuffer2, unmask } = requireBufferUtil();
|
|
const { isValidStatusCode, isValidUTF8 } = requireValidation();
|
|
const FastBuffer = Buffer[Symbol.species];
|
|
const GET_INFO = 0;
|
|
const GET_PAYLOAD_LENGTH_16 = 1;
|
|
const GET_PAYLOAD_LENGTH_64 = 2;
|
|
const GET_MASK = 3;
|
|
const GET_DATA = 4;
|
|
const INFLATING = 5;
|
|
const DEFER_EVENT = 6;
|
|
class Receiver extends Writable {
|
|
/**
|
|
* Creates a Receiver instance.
|
|
*
|
|
* @param {Object} [options] Options object
|
|
* @param {Boolean} [options.allowSynchronousEvents=true] Specifies whether
|
|
* any of the `'message'`, `'ping'`, and `'pong'` events can be emitted
|
|
* multiple times in the same tick
|
|
* @param {String} [options.binaryType=nodebuffer] The type for binary data
|
|
* @param {Object} [options.extensions] An object containing the negotiated
|
|
* extensions
|
|
* @param {Boolean} [options.isServer=false] Specifies whether to operate in
|
|
* client or server mode
|
|
* @param {Number} [options.maxPayload=0] The maximum allowed message length
|
|
* @param {Boolean} [options.skipUTF8Validation=false] Specifies whether or
|
|
* not to skip UTF-8 validation for text and close messages
|
|
*/
|
|
constructor(options = {}) {
|
|
super();
|
|
this._allowSynchronousEvents = options.allowSynchronousEvents !== void 0 ? options.allowSynchronousEvents : true;
|
|
this._binaryType = options.binaryType || BINARY_TYPES[0];
|
|
this._extensions = options.extensions || {};
|
|
this._isServer = !!options.isServer;
|
|
this._maxPayload = options.maxPayload | 0;
|
|
this._skipUTF8Validation = !!options.skipUTF8Validation;
|
|
this[kWebSocket] = void 0;
|
|
this._bufferedBytes = 0;
|
|
this._buffers = [];
|
|
this._compressed = false;
|
|
this._payloadLength = 0;
|
|
this._mask = void 0;
|
|
this._fragmented = 0;
|
|
this._masked = false;
|
|
this._fin = false;
|
|
this._opcode = 0;
|
|
this._totalPayloadLength = 0;
|
|
this._messageLength = 0;
|
|
this._fragments = [];
|
|
this._errored = false;
|
|
this._loop = false;
|
|
this._state = GET_INFO;
|
|
}
|
|
/**
|
|
* Implements `Writable.prototype._write()`.
|
|
*
|
|
* @param {Buffer} chunk The chunk of data to write
|
|
* @param {String} encoding The character encoding of `chunk`
|
|
* @param {Function} cb Callback
|
|
* @private
|
|
*/
|
|
_write(chunk, encoding, cb) {
|
|
if (this._opcode === 8 && this._state == GET_INFO) return cb();
|
|
this._bufferedBytes += chunk.length;
|
|
this._buffers.push(chunk);
|
|
this.startLoop(cb);
|
|
}
|
|
/**
|
|
* Consumes `n` bytes from the buffered data.
|
|
*
|
|
* @param {Number} n The number of bytes to consume
|
|
* @return {Buffer} The consumed bytes
|
|
* @private
|
|
*/
|
|
consume(n) {
|
|
this._bufferedBytes -= n;
|
|
if (n === this._buffers[0].length) return this._buffers.shift();
|
|
if (n < this._buffers[0].length) {
|
|
const buf = this._buffers[0];
|
|
this._buffers[0] = new FastBuffer(
|
|
buf.buffer,
|
|
buf.byteOffset + n,
|
|
buf.length - n
|
|
);
|
|
return new FastBuffer(buf.buffer, buf.byteOffset, n);
|
|
}
|
|
const dst = Buffer.allocUnsafe(n);
|
|
do {
|
|
const buf = this._buffers[0];
|
|
const offset = dst.length - n;
|
|
if (n >= buf.length) {
|
|
dst.set(this._buffers.shift(), offset);
|
|
} else {
|
|
dst.set(new Uint8Array(buf.buffer, buf.byteOffset, n), offset);
|
|
this._buffers[0] = new FastBuffer(
|
|
buf.buffer,
|
|
buf.byteOffset + n,
|
|
buf.length - n
|
|
);
|
|
}
|
|
n -= buf.length;
|
|
} while (n > 0);
|
|
return dst;
|
|
}
|
|
/**
|
|
* Starts the parsing loop.
|
|
*
|
|
* @param {Function} cb Callback
|
|
* @private
|
|
*/
|
|
startLoop(cb) {
|
|
this._loop = true;
|
|
do {
|
|
switch (this._state) {
|
|
case GET_INFO:
|
|
this.getInfo(cb);
|
|
break;
|
|
case GET_PAYLOAD_LENGTH_16:
|
|
this.getPayloadLength16(cb);
|
|
break;
|
|
case GET_PAYLOAD_LENGTH_64:
|
|
this.getPayloadLength64(cb);
|
|
break;
|
|
case GET_MASK:
|
|
this.getMask();
|
|
break;
|
|
case GET_DATA:
|
|
this.getData(cb);
|
|
break;
|
|
case INFLATING:
|
|
case DEFER_EVENT:
|
|
this._loop = false;
|
|
return;
|
|
}
|
|
} while (this._loop);
|
|
if (!this._errored) cb();
|
|
}
|
|
/**
|
|
* Reads the first two bytes of a frame.
|
|
*
|
|
* @param {Function} cb Callback
|
|
* @private
|
|
*/
|
|
getInfo(cb) {
|
|
if (this._bufferedBytes < 2) {
|
|
this._loop = false;
|
|
return;
|
|
}
|
|
const buf = this.consume(2);
|
|
if ((buf[0] & 48) !== 0) {
|
|
const error = this.createError(
|
|
RangeError,
|
|
"RSV2 and RSV3 must be clear",
|
|
true,
|
|
1002,
|
|
"WS_ERR_UNEXPECTED_RSV_2_3"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
const compressed = (buf[0] & 64) === 64;
|
|
if (compressed && !this._extensions[PerMessageDeflate.extensionName]) {
|
|
const error = this.createError(
|
|
RangeError,
|
|
"RSV1 must be clear",
|
|
true,
|
|
1002,
|
|
"WS_ERR_UNEXPECTED_RSV_1"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
this._fin = (buf[0] & 128) === 128;
|
|
this._opcode = buf[0] & 15;
|
|
this._payloadLength = buf[1] & 127;
|
|
if (this._opcode === 0) {
|
|
if (compressed) {
|
|
const error = this.createError(
|
|
RangeError,
|
|
"RSV1 must be clear",
|
|
true,
|
|
1002,
|
|
"WS_ERR_UNEXPECTED_RSV_1"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
if (!this._fragmented) {
|
|
const error = this.createError(
|
|
RangeError,
|
|
"invalid opcode 0",
|
|
true,
|
|
1002,
|
|
"WS_ERR_INVALID_OPCODE"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
this._opcode = this._fragmented;
|
|
} else if (this._opcode === 1 || this._opcode === 2) {
|
|
if (this._fragmented) {
|
|
const error = this.createError(
|
|
RangeError,
|
|
`invalid opcode ${this._opcode}`,
|
|
true,
|
|
1002,
|
|
"WS_ERR_INVALID_OPCODE"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
this._compressed = compressed;
|
|
} else if (this._opcode > 7 && this._opcode < 11) {
|
|
if (!this._fin) {
|
|
const error = this.createError(
|
|
RangeError,
|
|
"FIN must be set",
|
|
true,
|
|
1002,
|
|
"WS_ERR_EXPECTED_FIN"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
if (compressed) {
|
|
const error = this.createError(
|
|
RangeError,
|
|
"RSV1 must be clear",
|
|
true,
|
|
1002,
|
|
"WS_ERR_UNEXPECTED_RSV_1"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
if (this._payloadLength > 125 || this._opcode === 8 && this._payloadLength === 1) {
|
|
const error = this.createError(
|
|
RangeError,
|
|
`invalid payload length ${this._payloadLength}`,
|
|
true,
|
|
1002,
|
|
"WS_ERR_INVALID_CONTROL_PAYLOAD_LENGTH"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
} else {
|
|
const error = this.createError(
|
|
RangeError,
|
|
`invalid opcode ${this._opcode}`,
|
|
true,
|
|
1002,
|
|
"WS_ERR_INVALID_OPCODE"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
if (!this._fin && !this._fragmented) this._fragmented = this._opcode;
|
|
this._masked = (buf[1] & 128) === 128;
|
|
if (this._isServer) {
|
|
if (!this._masked) {
|
|
const error = this.createError(
|
|
RangeError,
|
|
"MASK must be set",
|
|
true,
|
|
1002,
|
|
"WS_ERR_EXPECTED_MASK"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
} else if (this._masked) {
|
|
const error = this.createError(
|
|
RangeError,
|
|
"MASK must be clear",
|
|
true,
|
|
1002,
|
|
"WS_ERR_UNEXPECTED_MASK"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
if (this._payloadLength === 126) this._state = GET_PAYLOAD_LENGTH_16;
|
|
else if (this._payloadLength === 127) this._state = GET_PAYLOAD_LENGTH_64;
|
|
else this.haveLength(cb);
|
|
}
|
|
/**
|
|
* Gets extended payload length (7+16).
|
|
*
|
|
* @param {Function} cb Callback
|
|
* @private
|
|
*/
|
|
getPayloadLength16(cb) {
|
|
if (this._bufferedBytes < 2) {
|
|
this._loop = false;
|
|
return;
|
|
}
|
|
this._payloadLength = this.consume(2).readUInt16BE(0);
|
|
this.haveLength(cb);
|
|
}
|
|
/**
|
|
* Gets extended payload length (7+64).
|
|
*
|
|
* @param {Function} cb Callback
|
|
* @private
|
|
*/
|
|
getPayloadLength64(cb) {
|
|
if (this._bufferedBytes < 8) {
|
|
this._loop = false;
|
|
return;
|
|
}
|
|
const buf = this.consume(8);
|
|
const num = buf.readUInt32BE(0);
|
|
if (num > Math.pow(2, 53 - 32) - 1) {
|
|
const error = this.createError(
|
|
RangeError,
|
|
"Unsupported WebSocket frame: payload length > 2^53 - 1",
|
|
false,
|
|
1009,
|
|
"WS_ERR_UNSUPPORTED_DATA_PAYLOAD_LENGTH"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
this._payloadLength = num * Math.pow(2, 32) + buf.readUInt32BE(4);
|
|
this.haveLength(cb);
|
|
}
|
|
/**
|
|
* Payload length has been read.
|
|
*
|
|
* @param {Function} cb Callback
|
|
* @private
|
|
*/
|
|
haveLength(cb) {
|
|
if (this._payloadLength && this._opcode < 8) {
|
|
this._totalPayloadLength += this._payloadLength;
|
|
if (this._totalPayloadLength > this._maxPayload && this._maxPayload > 0) {
|
|
const error = this.createError(
|
|
RangeError,
|
|
"Max payload size exceeded",
|
|
false,
|
|
1009,
|
|
"WS_ERR_UNSUPPORTED_MESSAGE_LENGTH"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
}
|
|
if (this._masked) this._state = GET_MASK;
|
|
else this._state = GET_DATA;
|
|
}
|
|
/**
|
|
* Reads mask bytes.
|
|
*
|
|
* @private
|
|
*/
|
|
getMask() {
|
|
if (this._bufferedBytes < 4) {
|
|
this._loop = false;
|
|
return;
|
|
}
|
|
this._mask = this.consume(4);
|
|
this._state = GET_DATA;
|
|
}
|
|
/**
|
|
* Reads data bytes.
|
|
*
|
|
* @param {Function} cb Callback
|
|
* @private
|
|
*/
|
|
getData(cb) {
|
|
let data = EMPTY_BUFFER;
|
|
if (this._payloadLength) {
|
|
if (this._bufferedBytes < this._payloadLength) {
|
|
this._loop = false;
|
|
return;
|
|
}
|
|
data = this.consume(this._payloadLength);
|
|
if (this._masked && (this._mask[0] | this._mask[1] | this._mask[2] | this._mask[3]) !== 0) {
|
|
unmask(data, this._mask);
|
|
}
|
|
}
|
|
if (this._opcode > 7) {
|
|
this.controlMessage(data, cb);
|
|
return;
|
|
}
|
|
if (this._compressed) {
|
|
this._state = INFLATING;
|
|
this.decompress(data, cb);
|
|
return;
|
|
}
|
|
if (data.length) {
|
|
this._messageLength = this._totalPayloadLength;
|
|
this._fragments.push(data);
|
|
}
|
|
this.dataMessage(cb);
|
|
}
|
|
/**
|
|
* Decompresses data.
|
|
*
|
|
* @param {Buffer} data Compressed data
|
|
* @param {Function} cb Callback
|
|
* @private
|
|
*/
|
|
decompress(data, cb) {
|
|
const perMessageDeflate = this._extensions[PerMessageDeflate.extensionName];
|
|
perMessageDeflate.decompress(data, this._fin, (err, buf) => {
|
|
if (err) return cb(err);
|
|
if (buf.length) {
|
|
this._messageLength += buf.length;
|
|
if (this._messageLength > this._maxPayload && this._maxPayload > 0) {
|
|
const error = this.createError(
|
|
RangeError,
|
|
"Max payload size exceeded",
|
|
false,
|
|
1009,
|
|
"WS_ERR_UNSUPPORTED_MESSAGE_LENGTH"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
this._fragments.push(buf);
|
|
}
|
|
this.dataMessage(cb);
|
|
if (this._state === GET_INFO) this.startLoop(cb);
|
|
});
|
|
}
|
|
/**
|
|
* Handles a data message.
|
|
*
|
|
* @param {Function} cb Callback
|
|
* @private
|
|
*/
|
|
dataMessage(cb) {
|
|
if (!this._fin) {
|
|
this._state = GET_INFO;
|
|
return;
|
|
}
|
|
const messageLength = this._messageLength;
|
|
const fragments = this._fragments;
|
|
this._totalPayloadLength = 0;
|
|
this._messageLength = 0;
|
|
this._fragmented = 0;
|
|
this._fragments = [];
|
|
if (this._opcode === 2) {
|
|
let data;
|
|
if (this._binaryType === "nodebuffer") {
|
|
data = concat(fragments, messageLength);
|
|
} else if (this._binaryType === "arraybuffer") {
|
|
data = toArrayBuffer2(concat(fragments, messageLength));
|
|
} else if (this._binaryType === "blob") {
|
|
data = new Blob(fragments);
|
|
} else {
|
|
data = fragments;
|
|
}
|
|
if (this._allowSynchronousEvents) {
|
|
this.emit("message", data, true);
|
|
this._state = GET_INFO;
|
|
} else {
|
|
this._state = DEFER_EVENT;
|
|
setImmediate(() => {
|
|
this.emit("message", data, true);
|
|
this._state = GET_INFO;
|
|
this.startLoop(cb);
|
|
});
|
|
}
|
|
} else {
|
|
const buf = concat(fragments, messageLength);
|
|
if (!this._skipUTF8Validation && !isValidUTF8(buf)) {
|
|
const error = this.createError(
|
|
Error,
|
|
"invalid UTF-8 sequence",
|
|
true,
|
|
1007,
|
|
"WS_ERR_INVALID_UTF8"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
if (this._state === INFLATING || this._allowSynchronousEvents) {
|
|
this.emit("message", buf, false);
|
|
this._state = GET_INFO;
|
|
} else {
|
|
this._state = DEFER_EVENT;
|
|
setImmediate(() => {
|
|
this.emit("message", buf, false);
|
|
this._state = GET_INFO;
|
|
this.startLoop(cb);
|
|
});
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Handles a control message.
|
|
*
|
|
* @param {Buffer} data Data to handle
|
|
* @return {(Error|RangeError|undefined)} A possible error
|
|
* @private
|
|
*/
|
|
controlMessage(data, cb) {
|
|
if (this._opcode === 8) {
|
|
if (data.length === 0) {
|
|
this._loop = false;
|
|
this.emit("conclude", 1005, EMPTY_BUFFER);
|
|
this.end();
|
|
} else {
|
|
const code = data.readUInt16BE(0);
|
|
if (!isValidStatusCode(code)) {
|
|
const error = this.createError(
|
|
RangeError,
|
|
`invalid status code ${code}`,
|
|
true,
|
|
1002,
|
|
"WS_ERR_INVALID_CLOSE_CODE"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
const buf = new FastBuffer(
|
|
data.buffer,
|
|
data.byteOffset + 2,
|
|
data.length - 2
|
|
);
|
|
if (!this._skipUTF8Validation && !isValidUTF8(buf)) {
|
|
const error = this.createError(
|
|
Error,
|
|
"invalid UTF-8 sequence",
|
|
true,
|
|
1007,
|
|
"WS_ERR_INVALID_UTF8"
|
|
);
|
|
cb(error);
|
|
return;
|
|
}
|
|
this._loop = false;
|
|
this.emit("conclude", code, buf);
|
|
this.end();
|
|
}
|
|
this._state = GET_INFO;
|
|
return;
|
|
}
|
|
if (this._allowSynchronousEvents) {
|
|
this.emit(this._opcode === 9 ? "ping" : "pong", data);
|
|
this._state = GET_INFO;
|
|
} else {
|
|
this._state = DEFER_EVENT;
|
|
setImmediate(() => {
|
|
this.emit(this._opcode === 9 ? "ping" : "pong", data);
|
|
this._state = GET_INFO;
|
|
this.startLoop(cb);
|
|
});
|
|
}
|
|
}
|
|
/**
|
|
* Builds an error object.
|
|
*
|
|
* @param {function(new:Error|RangeError)} ErrorCtor The error constructor
|
|
* @param {String} message The error message
|
|
* @param {Boolean} prefix Specifies whether or not to add a default prefix to
|
|
* `message`
|
|
* @param {Number} statusCode The status code
|
|
* @param {String} errorCode The exposed error code
|
|
* @return {(Error|RangeError)} The error
|
|
* @private
|
|
*/
|
|
createError(ErrorCtor, message, prefix, statusCode, errorCode) {
|
|
this._loop = false;
|
|
this._errored = true;
|
|
const err = new ErrorCtor(
|
|
prefix ? `Invalid WebSocket frame: ${message}` : message
|
|
);
|
|
Error.captureStackTrace(err, this.createError);
|
|
err.code = errorCode;
|
|
err[kStatusCode] = statusCode;
|
|
return err;
|
|
}
|
|
}
|
|
receiver = Receiver;
|
|
return receiver;
|
|
}
|
|
var sender;
|
|
var hasRequiredSender;
|
|
function requireSender() {
|
|
if (hasRequiredSender) return sender;
|
|
hasRequiredSender = 1;
|
|
const { Duplex } = require$$0$2;
|
|
const { randomFillSync } = crypto;
|
|
const PerMessageDeflate = requirePermessageDeflate();
|
|
const { EMPTY_BUFFER, kWebSocket, NOOP } = requireConstants();
|
|
const { isBlob, isValidStatusCode } = requireValidation();
|
|
const { mask: applyMask, toBuffer } = requireBufferUtil();
|
|
const kByteLength = /* @__PURE__ */ Symbol("kByteLength");
|
|
const maskBuffer = Buffer.alloc(4);
|
|
const RANDOM_POOL_SIZE = 8 * 1024;
|
|
let randomPool;
|
|
let randomPoolPointer = RANDOM_POOL_SIZE;
|
|
const DEFAULT = 0;
|
|
const DEFLATING = 1;
|
|
const GET_BLOB_DATA = 2;
|
|
class Sender {
|
|
/**
|
|
* Creates a Sender instance.
|
|
*
|
|
* @param {Duplex} socket The connection socket
|
|
* @param {Object} [extensions] An object containing the negotiated extensions
|
|
* @param {Function} [generateMask] The function used to generate the masking
|
|
* key
|
|
*/
|
|
constructor(socket, extensions, generateMask) {
|
|
this._extensions = extensions || {};
|
|
if (generateMask) {
|
|
this._generateMask = generateMask;
|
|
this._maskBuffer = Buffer.alloc(4);
|
|
}
|
|
this._socket = socket;
|
|
this._firstFragment = true;
|
|
this._compress = false;
|
|
this._bufferedBytes = 0;
|
|
this._queue = [];
|
|
this._state = DEFAULT;
|
|
this.onerror = NOOP;
|
|
this[kWebSocket] = void 0;
|
|
}
|
|
/**
|
|
* Frames a piece of data according to the HyBi WebSocket protocol.
|
|
*
|
|
* @param {(Buffer|String)} data The data to frame
|
|
* @param {Object} options Options object
|
|
* @param {Boolean} [options.fin=false] Specifies whether or not to set the
|
|
* FIN bit
|
|
* @param {Function} [options.generateMask] The function used to generate the
|
|
* masking key
|
|
* @param {Boolean} [options.mask=false] Specifies whether or not to mask
|
|
* `data`
|
|
* @param {Buffer} [options.maskBuffer] The buffer used to store the masking
|
|
* key
|
|
* @param {Number} options.opcode The opcode
|
|
* @param {Boolean} [options.readOnly=false] Specifies whether `data` can be
|
|
* modified
|
|
* @param {Boolean} [options.rsv1=false] Specifies whether or not to set the
|
|
* RSV1 bit
|
|
* @return {(Buffer|String)[]} The framed data
|
|
* @public
|
|
*/
|
|
static frame(data, options) {
|
|
let mask;
|
|
let merge = false;
|
|
let offset = 2;
|
|
let skipMasking = false;
|
|
if (options.mask) {
|
|
mask = options.maskBuffer || maskBuffer;
|
|
if (options.generateMask) {
|
|
options.generateMask(mask);
|
|
} else {
|
|
if (randomPoolPointer === RANDOM_POOL_SIZE) {
|
|
if (randomPool === void 0) {
|
|
randomPool = Buffer.alloc(RANDOM_POOL_SIZE);
|
|
}
|
|
randomFillSync(randomPool, 0, RANDOM_POOL_SIZE);
|
|
randomPoolPointer = 0;
|
|
}
|
|
mask[0] = randomPool[randomPoolPointer++];
|
|
mask[1] = randomPool[randomPoolPointer++];
|
|
mask[2] = randomPool[randomPoolPointer++];
|
|
mask[3] = randomPool[randomPoolPointer++];
|
|
}
|
|
skipMasking = (mask[0] | mask[1] | mask[2] | mask[3]) === 0;
|
|
offset = 6;
|
|
}
|
|
let dataLength;
|
|
if (typeof data === "string") {
|
|
if ((!options.mask || skipMasking) && options[kByteLength] !== void 0) {
|
|
dataLength = options[kByteLength];
|
|
} else {
|
|
data = Buffer.from(data);
|
|
dataLength = data.length;
|
|
}
|
|
} else {
|
|
dataLength = data.length;
|
|
merge = options.mask && options.readOnly && !skipMasking;
|
|
}
|
|
let payloadLength = dataLength;
|
|
if (dataLength >= 65536) {
|
|
offset += 8;
|
|
payloadLength = 127;
|
|
} else if (dataLength > 125) {
|
|
offset += 2;
|
|
payloadLength = 126;
|
|
}
|
|
const target = Buffer.allocUnsafe(merge ? dataLength + offset : offset);
|
|
target[0] = options.fin ? options.opcode | 128 : options.opcode;
|
|
if (options.rsv1) target[0] |= 64;
|
|
target[1] = payloadLength;
|
|
if (payloadLength === 126) {
|
|
target.writeUInt16BE(dataLength, 2);
|
|
} else if (payloadLength === 127) {
|
|
target[2] = target[3] = 0;
|
|
target.writeUIntBE(dataLength, 4, 6);
|
|
}
|
|
if (!options.mask) return [target, data];
|
|
target[1] |= 128;
|
|
target[offset - 4] = mask[0];
|
|
target[offset - 3] = mask[1];
|
|
target[offset - 2] = mask[2];
|
|
target[offset - 1] = mask[3];
|
|
if (skipMasking) return [target, data];
|
|
if (merge) {
|
|
applyMask(data, mask, target, offset, dataLength);
|
|
return [target];
|
|
}
|
|
applyMask(data, mask, data, 0, dataLength);
|
|
return [target, data];
|
|
}
|
|
/**
|
|
* Sends a close message to the other peer.
|
|
*
|
|
* @param {Number} [code] The status code component of the body
|
|
* @param {(String|Buffer)} [data] The message component of the body
|
|
* @param {Boolean} [mask=false] Specifies whether or not to mask the message
|
|
* @param {Function} [cb] Callback
|
|
* @public
|
|
*/
|
|
close(code, data, mask, cb) {
|
|
let buf;
|
|
if (code === void 0) {
|
|
buf = EMPTY_BUFFER;
|
|
} else if (typeof code !== "number" || !isValidStatusCode(code)) {
|
|
throw new TypeError("First argument must be a valid error code number");
|
|
} else if (data === void 0 || !data.length) {
|
|
buf = Buffer.allocUnsafe(2);
|
|
buf.writeUInt16BE(code, 0);
|
|
} else {
|
|
const length = Buffer.byteLength(data);
|
|
if (length > 123) {
|
|
throw new RangeError("The message must not be greater than 123 bytes");
|
|
}
|
|
buf = Buffer.allocUnsafe(2 + length);
|
|
buf.writeUInt16BE(code, 0);
|
|
if (typeof data === "string") {
|
|
buf.write(data, 2);
|
|
} else {
|
|
buf.set(data, 2);
|
|
}
|
|
}
|
|
const options = {
|
|
[kByteLength]: buf.length,
|
|
fin: true,
|
|
generateMask: this._generateMask,
|
|
mask,
|
|
maskBuffer: this._maskBuffer,
|
|
opcode: 8,
|
|
readOnly: false,
|
|
rsv1: false
|
|
};
|
|
if (this._state !== DEFAULT) {
|
|
this.enqueue([this.dispatch, buf, false, options, cb]);
|
|
} else {
|
|
this.sendFrame(Sender.frame(buf, options), cb);
|
|
}
|
|
}
|
|
/**
|
|
* Sends a ping message to the other peer.
|
|
*
|
|
* @param {*} data The message to send
|
|
* @param {Boolean} [mask=false] Specifies whether or not to mask `data`
|
|
* @param {Function} [cb] Callback
|
|
* @public
|
|
*/
|
|
ping(data, mask, cb) {
|
|
let byteLength;
|
|
let readOnly;
|
|
if (typeof data === "string") {
|
|
byteLength = Buffer.byteLength(data);
|
|
readOnly = false;
|
|
} else if (isBlob(data)) {
|
|
byteLength = data.size;
|
|
readOnly = false;
|
|
} else {
|
|
data = toBuffer(data);
|
|
byteLength = data.length;
|
|
readOnly = toBuffer.readOnly;
|
|
}
|
|
if (byteLength > 125) {
|
|
throw new RangeError("The data size must not be greater than 125 bytes");
|
|
}
|
|
const options = {
|
|
[kByteLength]: byteLength,
|
|
fin: true,
|
|
generateMask: this._generateMask,
|
|
mask,
|
|
maskBuffer: this._maskBuffer,
|
|
opcode: 9,
|
|
readOnly,
|
|
rsv1: false
|
|
};
|
|
if (isBlob(data)) {
|
|
if (this._state !== DEFAULT) {
|
|
this.enqueue([this.getBlobData, data, false, options, cb]);
|
|
} else {
|
|
this.getBlobData(data, false, options, cb);
|
|
}
|
|
} else if (this._state !== DEFAULT) {
|
|
this.enqueue([this.dispatch, data, false, options, cb]);
|
|
} else {
|
|
this.sendFrame(Sender.frame(data, options), cb);
|
|
}
|
|
}
|
|
/**
|
|
* Sends a pong message to the other peer.
|
|
*
|
|
* @param {*} data The message to send
|
|
* @param {Boolean} [mask=false] Specifies whether or not to mask `data`
|
|
* @param {Function} [cb] Callback
|
|
* @public
|
|
*/
|
|
pong(data, mask, cb) {
|
|
let byteLength;
|
|
let readOnly;
|
|
if (typeof data === "string") {
|
|
byteLength = Buffer.byteLength(data);
|
|
readOnly = false;
|
|
} else if (isBlob(data)) {
|
|
byteLength = data.size;
|
|
readOnly = false;
|
|
} else {
|
|
data = toBuffer(data);
|
|
byteLength = data.length;
|
|
readOnly = toBuffer.readOnly;
|
|
}
|
|
if (byteLength > 125) {
|
|
throw new RangeError("The data size must not be greater than 125 bytes");
|
|
}
|
|
const options = {
|
|
[kByteLength]: byteLength,
|
|
fin: true,
|
|
generateMask: this._generateMask,
|
|
mask,
|
|
maskBuffer: this._maskBuffer,
|
|
opcode: 10,
|
|
readOnly,
|
|
rsv1: false
|
|
};
|
|
if (isBlob(data)) {
|
|
if (this._state !== DEFAULT) {
|
|
this.enqueue([this.getBlobData, data, false, options, cb]);
|
|
} else {
|
|
this.getBlobData(data, false, options, cb);
|
|
}
|
|
} else if (this._state !== DEFAULT) {
|
|
this.enqueue([this.dispatch, data, false, options, cb]);
|
|
} else {
|
|
this.sendFrame(Sender.frame(data, options), cb);
|
|
}
|
|
}
|
|
/**
|
|
* Sends a data message to the other peer.
|
|
*
|
|
* @param {*} data The message to send
|
|
* @param {Object} options Options object
|
|
* @param {Boolean} [options.binary=false] Specifies whether `data` is binary
|
|
* or text
|
|
* @param {Boolean} [options.compress=false] Specifies whether or not to
|
|
* compress `data`
|
|
* @param {Boolean} [options.fin=false] Specifies whether the fragment is the
|
|
* last one
|
|
* @param {Boolean} [options.mask=false] Specifies whether or not to mask
|
|
* `data`
|
|
* @param {Function} [cb] Callback
|
|
* @public
|
|
*/
|
|
send(data, options, cb) {
|
|
const perMessageDeflate = this._extensions[PerMessageDeflate.extensionName];
|
|
let opcode = options.binary ? 2 : 1;
|
|
let rsv1 = options.compress;
|
|
let byteLength;
|
|
let readOnly;
|
|
if (typeof data === "string") {
|
|
byteLength = Buffer.byteLength(data);
|
|
readOnly = false;
|
|
} else if (isBlob(data)) {
|
|
byteLength = data.size;
|
|
readOnly = false;
|
|
} else {
|
|
data = toBuffer(data);
|
|
byteLength = data.length;
|
|
readOnly = toBuffer.readOnly;
|
|
}
|
|
if (this._firstFragment) {
|
|
this._firstFragment = false;
|
|
if (rsv1 && perMessageDeflate && perMessageDeflate.params[perMessageDeflate._isServer ? "server_no_context_takeover" : "client_no_context_takeover"]) {
|
|
rsv1 = byteLength >= perMessageDeflate._threshold;
|
|
}
|
|
this._compress = rsv1;
|
|
} else {
|
|
rsv1 = false;
|
|
opcode = 0;
|
|
}
|
|
if (options.fin) this._firstFragment = true;
|
|
const opts = {
|
|
[kByteLength]: byteLength,
|
|
fin: options.fin,
|
|
generateMask: this._generateMask,
|
|
mask: options.mask,
|
|
maskBuffer: this._maskBuffer,
|
|
opcode,
|
|
readOnly,
|
|
rsv1
|
|
};
|
|
if (isBlob(data)) {
|
|
if (this._state !== DEFAULT) {
|
|
this.enqueue([this.getBlobData, data, this._compress, opts, cb]);
|
|
} else {
|
|
this.getBlobData(data, this._compress, opts, cb);
|
|
}
|
|
} else if (this._state !== DEFAULT) {
|
|
this.enqueue([this.dispatch, data, this._compress, opts, cb]);
|
|
} else {
|
|
this.dispatch(data, this._compress, opts, cb);
|
|
}
|
|
}
|
|
/**
|
|
* Gets the contents of a blob as binary data.
|
|
*
|
|
* @param {Blob} blob The blob
|
|
* @param {Boolean} [compress=false] Specifies whether or not to compress
|
|
* the data
|
|
* @param {Object} options Options object
|
|
* @param {Boolean} [options.fin=false] Specifies whether or not to set the
|
|
* FIN bit
|
|
* @param {Function} [options.generateMask] The function used to generate the
|
|
* masking key
|
|
* @param {Boolean} [options.mask=false] Specifies whether or not to mask
|
|
* `data`
|
|
* @param {Buffer} [options.maskBuffer] The buffer used to store the masking
|
|
* key
|
|
* @param {Number} options.opcode The opcode
|
|
* @param {Boolean} [options.readOnly=false] Specifies whether `data` can be
|
|
* modified
|
|
* @param {Boolean} [options.rsv1=false] Specifies whether or not to set the
|
|
* RSV1 bit
|
|
* @param {Function} [cb] Callback
|
|
* @private
|
|
*/
|
|
getBlobData(blob, compress, options, cb) {
|
|
this._bufferedBytes += options[kByteLength];
|
|
this._state = GET_BLOB_DATA;
|
|
blob.arrayBuffer().then((arrayBuffer) => {
|
|
if (this._socket.destroyed) {
|
|
const err = new Error(
|
|
"The socket was closed while the blob was being read"
|
|
);
|
|
process.nextTick(callCallbacks, this, err, cb);
|
|
return;
|
|
}
|
|
this._bufferedBytes -= options[kByteLength];
|
|
const data = toBuffer(arrayBuffer);
|
|
if (!compress) {
|
|
this._state = DEFAULT;
|
|
this.sendFrame(Sender.frame(data, options), cb);
|
|
this.dequeue();
|
|
} else {
|
|
this.dispatch(data, compress, options, cb);
|
|
}
|
|
}).catch((err) => {
|
|
process.nextTick(onError, this, err, cb);
|
|
});
|
|
}
|
|
/**
|
|
* Dispatches a message.
|
|
*
|
|
* @param {(Buffer|String)} data The message to send
|
|
* @param {Boolean} [compress=false] Specifies whether or not to compress
|
|
* `data`
|
|
* @param {Object} options Options object
|
|
* @param {Boolean} [options.fin=false] Specifies whether or not to set the
|
|
* FIN bit
|
|
* @param {Function} [options.generateMask] The function used to generate the
|
|
* masking key
|
|
* @param {Boolean} [options.mask=false] Specifies whether or not to mask
|
|
* `data`
|
|
* @param {Buffer} [options.maskBuffer] The buffer used to store the masking
|
|
* key
|
|
* @param {Number} options.opcode The opcode
|
|
* @param {Boolean} [options.readOnly=false] Specifies whether `data` can be
|
|
* modified
|
|
* @param {Boolean} [options.rsv1=false] Specifies whether or not to set the
|
|
* RSV1 bit
|
|
* @param {Function} [cb] Callback
|
|
* @private
|
|
*/
|
|
dispatch(data, compress, options, cb) {
|
|
if (!compress) {
|
|
this.sendFrame(Sender.frame(data, options), cb);
|
|
return;
|
|
}
|
|
const perMessageDeflate = this._extensions[PerMessageDeflate.extensionName];
|
|
this._bufferedBytes += options[kByteLength];
|
|
this._state = DEFLATING;
|
|
perMessageDeflate.compress(data, options.fin, (_, buf) => {
|
|
if (this._socket.destroyed) {
|
|
const err = new Error(
|
|
"The socket was closed while data was being compressed"
|
|
);
|
|
callCallbacks(this, err, cb);
|
|
return;
|
|
}
|
|
this._bufferedBytes -= options[kByteLength];
|
|
this._state = DEFAULT;
|
|
options.readOnly = false;
|
|
this.sendFrame(Sender.frame(buf, options), cb);
|
|
this.dequeue();
|
|
});
|
|
}
|
|
/**
|
|
* Executes queued send operations.
|
|
*
|
|
* @private
|
|
*/
|
|
dequeue() {
|
|
while (this._state === DEFAULT && this._queue.length) {
|
|
const params = this._queue.shift();
|
|
this._bufferedBytes -= params[3][kByteLength];
|
|
Reflect.apply(params[0], this, params.slice(1));
|
|
}
|
|
}
|
|
/**
|
|
* Enqueues a send operation.
|
|
*
|
|
* @param {Array} params Send operation parameters.
|
|
* @private
|
|
*/
|
|
enqueue(params) {
|
|
this._bufferedBytes += params[3][kByteLength];
|
|
this._queue.push(params);
|
|
}
|
|
/**
|
|
* Sends a frame.
|
|
*
|
|
* @param {(Buffer | String)[]} list The frame to send
|
|
* @param {Function} [cb] Callback
|
|
* @private
|
|
*/
|
|
sendFrame(list, cb) {
|
|
if (list.length === 2) {
|
|
this._socket.cork();
|
|
this._socket.write(list[0]);
|
|
this._socket.write(list[1], cb);
|
|
this._socket.uncork();
|
|
} else {
|
|
this._socket.write(list[0], cb);
|
|
}
|
|
}
|
|
}
|
|
sender = Sender;
|
|
function callCallbacks(sender2, err, cb) {
|
|
if (typeof cb === "function") cb(err);
|
|
for (let i = 0; i < sender2._queue.length; i++) {
|
|
const params = sender2._queue[i];
|
|
const callback = params[params.length - 1];
|
|
if (typeof callback === "function") callback(err);
|
|
}
|
|
}
|
|
function onError(sender2, err, cb) {
|
|
callCallbacks(sender2, err, cb);
|
|
sender2.onerror(err);
|
|
}
|
|
return sender;
|
|
}
|
|
var eventTarget;
|
|
var hasRequiredEventTarget;
|
|
function requireEventTarget() {
|
|
if (hasRequiredEventTarget) return eventTarget;
|
|
hasRequiredEventTarget = 1;
|
|
const { kForOnEventAttribute, kListener } = requireConstants();
|
|
const kCode = /* @__PURE__ */ Symbol("kCode");
|
|
const kData = /* @__PURE__ */ Symbol("kData");
|
|
const kError = /* @__PURE__ */ Symbol("kError");
|
|
const kMessage = /* @__PURE__ */ Symbol("kMessage");
|
|
const kReason = /* @__PURE__ */ Symbol("kReason");
|
|
const kTarget = /* @__PURE__ */ Symbol("kTarget");
|
|
const kType = /* @__PURE__ */ Symbol("kType");
|
|
const kWasClean = /* @__PURE__ */ Symbol("kWasClean");
|
|
class Event {
|
|
/**
|
|
* Create a new `Event`.
|
|
*
|
|
* @param {String} type The name of the event
|
|
* @throws {TypeError} If the `type` argument is not specified
|
|
*/
|
|
constructor(type) {
|
|
this[kTarget] = null;
|
|
this[kType] = type;
|
|
}
|
|
/**
|
|
* @type {*}
|
|
*/
|
|
get target() {
|
|
return this[kTarget];
|
|
}
|
|
/**
|
|
* @type {String}
|
|
*/
|
|
get type() {
|
|
return this[kType];
|
|
}
|
|
}
|
|
Object.defineProperty(Event.prototype, "target", { enumerable: true });
|
|
Object.defineProperty(Event.prototype, "type", { enumerable: true });
|
|
class CloseEvent extends Event {
|
|
/**
|
|
* Create a new `CloseEvent`.
|
|
*
|
|
* @param {String} type The name of the event
|
|
* @param {Object} [options] A dictionary object that allows for setting
|
|
* attributes via object members of the same name
|
|
* @param {Number} [options.code=0] The status code explaining why the
|
|
* connection was closed
|
|
* @param {String} [options.reason=''] A human-readable string explaining why
|
|
* the connection was closed
|
|
* @param {Boolean} [options.wasClean=false] Indicates whether or not the
|
|
* connection was cleanly closed
|
|
*/
|
|
constructor(type, options = {}) {
|
|
super(type);
|
|
this[kCode] = options.code === void 0 ? 0 : options.code;
|
|
this[kReason] = options.reason === void 0 ? "" : options.reason;
|
|
this[kWasClean] = options.wasClean === void 0 ? false : options.wasClean;
|
|
}
|
|
/**
|
|
* @type {Number}
|
|
*/
|
|
get code() {
|
|
return this[kCode];
|
|
}
|
|
/**
|
|
* @type {String}
|
|
*/
|
|
get reason() {
|
|
return this[kReason];
|
|
}
|
|
/**
|
|
* @type {Boolean}
|
|
*/
|
|
get wasClean() {
|
|
return this[kWasClean];
|
|
}
|
|
}
|
|
Object.defineProperty(CloseEvent.prototype, "code", { enumerable: true });
|
|
Object.defineProperty(CloseEvent.prototype, "reason", { enumerable: true });
|
|
Object.defineProperty(CloseEvent.prototype, "wasClean", { enumerable: true });
|
|
class ErrorEvent extends Event {
|
|
/**
|
|
* Create a new `ErrorEvent`.
|
|
*
|
|
* @param {String} type The name of the event
|
|
* @param {Object} [options] A dictionary object that allows for setting
|
|
* attributes via object members of the same name
|
|
* @param {*} [options.error=null] The error that generated this event
|
|
* @param {String} [options.message=''] The error message
|
|
*/
|
|
constructor(type, options = {}) {
|
|
super(type);
|
|
this[kError] = options.error === void 0 ? null : options.error;
|
|
this[kMessage] = options.message === void 0 ? "" : options.message;
|
|
}
|
|
/**
|
|
* @type {*}
|
|
*/
|
|
get error() {
|
|
return this[kError];
|
|
}
|
|
/**
|
|
* @type {String}
|
|
*/
|
|
get message() {
|
|
return this[kMessage];
|
|
}
|
|
}
|
|
Object.defineProperty(ErrorEvent.prototype, "error", { enumerable: true });
|
|
Object.defineProperty(ErrorEvent.prototype, "message", { enumerable: true });
|
|
class MessageEvent extends Event {
|
|
/**
|
|
* Create a new `MessageEvent`.
|
|
*
|
|
* @param {String} type The name of the event
|
|
* @param {Object} [options] A dictionary object that allows for setting
|
|
* attributes via object members of the same name
|
|
* @param {*} [options.data=null] The message content
|
|
*/
|
|
constructor(type, options = {}) {
|
|
super(type);
|
|
this[kData] = options.data === void 0 ? null : options.data;
|
|
}
|
|
/**
|
|
* @type {*}
|
|
*/
|
|
get data() {
|
|
return this[kData];
|
|
}
|
|
}
|
|
Object.defineProperty(MessageEvent.prototype, "data", { enumerable: true });
|
|
const EventTarget = {
|
|
/**
|
|
* Register an event listener.
|
|
*
|
|
* @param {String} type A string representing the event type to listen for
|
|
* @param {(Function|Object)} handler The listener to add
|
|
* @param {Object} [options] An options object specifies characteristics about
|
|
* the event listener
|
|
* @param {Boolean} [options.once=false] A `Boolean` indicating that the
|
|
* listener should be invoked at most once after being added. If `true`,
|
|
* the listener would be automatically removed when invoked.
|
|
* @public
|
|
*/
|
|
addEventListener(type, handler, options = {}) {
|
|
for (const listener of this.listeners(type)) {
|
|
if (!options[kForOnEventAttribute] && listener[kListener] === handler && !listener[kForOnEventAttribute]) {
|
|
return;
|
|
}
|
|
}
|
|
let wrapper;
|
|
if (type === "message") {
|
|
wrapper = function onMessage(data, isBinary) {
|
|
const event = new MessageEvent("message", {
|
|
data: isBinary ? data : data.toString()
|
|
});
|
|
event[kTarget] = this;
|
|
callListener(handler, this, event);
|
|
};
|
|
} else if (type === "close") {
|
|
wrapper = function onClose(code, message) {
|
|
const event = new CloseEvent("close", {
|
|
code,
|
|
reason: message.toString(),
|
|
wasClean: this._closeFrameReceived && this._closeFrameSent
|
|
});
|
|
event[kTarget] = this;
|
|
callListener(handler, this, event);
|
|
};
|
|
} else if (type === "error") {
|
|
wrapper = function onError(error) {
|
|
const event = new ErrorEvent("error", {
|
|
error,
|
|
message: error.message
|
|
});
|
|
event[kTarget] = this;
|
|
callListener(handler, this, event);
|
|
};
|
|
} else if (type === "open") {
|
|
wrapper = function onOpen() {
|
|
const event = new Event("open");
|
|
event[kTarget] = this;
|
|
callListener(handler, this, event);
|
|
};
|
|
} else {
|
|
return;
|
|
}
|
|
wrapper[kForOnEventAttribute] = !!options[kForOnEventAttribute];
|
|
wrapper[kListener] = handler;
|
|
if (options.once) {
|
|
this.once(type, wrapper);
|
|
} else {
|
|
this.on(type, wrapper);
|
|
}
|
|
},
|
|
/**
|
|
* Remove an event listener.
|
|
*
|
|
* @param {String} type A string representing the event type to remove
|
|
* @param {(Function|Object)} handler The listener to remove
|
|
* @public
|
|
*/
|
|
removeEventListener(type, handler) {
|
|
for (const listener of this.listeners(type)) {
|
|
if (listener[kListener] === handler && !listener[kForOnEventAttribute]) {
|
|
this.removeListener(type, listener);
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
};
|
|
eventTarget = {
|
|
CloseEvent,
|
|
ErrorEvent,
|
|
Event,
|
|
EventTarget,
|
|
MessageEvent
|
|
};
|
|
function callListener(listener, thisArg, event) {
|
|
if (typeof listener === "object" && listener.handleEvent) {
|
|
listener.handleEvent.call(listener, event);
|
|
} else {
|
|
listener.call(thisArg, event);
|
|
}
|
|
}
|
|
return eventTarget;
|
|
}
|
|
var extension;
|
|
var hasRequiredExtension;
|
|
function requireExtension() {
|
|
if (hasRequiredExtension) return extension;
|
|
hasRequiredExtension = 1;
|
|
const { tokenChars } = requireValidation();
|
|
function push(dest, name, elem) {
|
|
if (dest[name] === void 0) dest[name] = [elem];
|
|
else dest[name].push(elem);
|
|
}
|
|
function parse(header) {
|
|
const offers = /* @__PURE__ */ Object.create(null);
|
|
let params = /* @__PURE__ */ Object.create(null);
|
|
let mustUnescape = false;
|
|
let isEscaping = false;
|
|
let inQuotes = false;
|
|
let extensionName;
|
|
let paramName;
|
|
let start = -1;
|
|
let code = -1;
|
|
let end = -1;
|
|
let i = 0;
|
|
for (; i < header.length; i++) {
|
|
code = header.charCodeAt(i);
|
|
if (extensionName === void 0) {
|
|
if (end === -1 && tokenChars[code] === 1) {
|
|
if (start === -1) start = i;
|
|
} else if (i !== 0 && (code === 32 || code === 9)) {
|
|
if (end === -1 && start !== -1) end = i;
|
|
} else if (code === 59 || code === 44) {
|
|
if (start === -1) {
|
|
throw new SyntaxError(`Unexpected character at index ${i}`);
|
|
}
|
|
if (end === -1) end = i;
|
|
const name = header.slice(start, end);
|
|
if (code === 44) {
|
|
push(offers, name, params);
|
|
params = /* @__PURE__ */ Object.create(null);
|
|
} else {
|
|
extensionName = name;
|
|
}
|
|
start = end = -1;
|
|
} else {
|
|
throw new SyntaxError(`Unexpected character at index ${i}`);
|
|
}
|
|
} else if (paramName === void 0) {
|
|
if (end === -1 && tokenChars[code] === 1) {
|
|
if (start === -1) start = i;
|
|
} else if (code === 32 || code === 9) {
|
|
if (end === -1 && start !== -1) end = i;
|
|
} else if (code === 59 || code === 44) {
|
|
if (start === -1) {
|
|
throw new SyntaxError(`Unexpected character at index ${i}`);
|
|
}
|
|
if (end === -1) end = i;
|
|
push(params, header.slice(start, end), true);
|
|
if (code === 44) {
|
|
push(offers, extensionName, params);
|
|
params = /* @__PURE__ */ Object.create(null);
|
|
extensionName = void 0;
|
|
}
|
|
start = end = -1;
|
|
} else if (code === 61 && start !== -1 && end === -1) {
|
|
paramName = header.slice(start, i);
|
|
start = end = -1;
|
|
} else {
|
|
throw new SyntaxError(`Unexpected character at index ${i}`);
|
|
}
|
|
} else {
|
|
if (isEscaping) {
|
|
if (tokenChars[code] !== 1) {
|
|
throw new SyntaxError(`Unexpected character at index ${i}`);
|
|
}
|
|
if (start === -1) start = i;
|
|
else if (!mustUnescape) mustUnescape = true;
|
|
isEscaping = false;
|
|
} else if (inQuotes) {
|
|
if (tokenChars[code] === 1) {
|
|
if (start === -1) start = i;
|
|
} else if (code === 34 && start !== -1) {
|
|
inQuotes = false;
|
|
end = i;
|
|
} else if (code === 92) {
|
|
isEscaping = true;
|
|
} else {
|
|
throw new SyntaxError(`Unexpected character at index ${i}`);
|
|
}
|
|
} else if (code === 34 && header.charCodeAt(i - 1) === 61) {
|
|
inQuotes = true;
|
|
} else if (end === -1 && tokenChars[code] === 1) {
|
|
if (start === -1) start = i;
|
|
} else if (start !== -1 && (code === 32 || code === 9)) {
|
|
if (end === -1) end = i;
|
|
} else if (code === 59 || code === 44) {
|
|
if (start === -1) {
|
|
throw new SyntaxError(`Unexpected character at index ${i}`);
|
|
}
|
|
if (end === -1) end = i;
|
|
let value = header.slice(start, end);
|
|
if (mustUnescape) {
|
|
value = value.replace(/\\/g, "");
|
|
mustUnescape = false;
|
|
}
|
|
push(params, paramName, value);
|
|
if (code === 44) {
|
|
push(offers, extensionName, params);
|
|
params = /* @__PURE__ */ Object.create(null);
|
|
extensionName = void 0;
|
|
}
|
|
paramName = void 0;
|
|
start = end = -1;
|
|
} else {
|
|
throw new SyntaxError(`Unexpected character at index ${i}`);
|
|
}
|
|
}
|
|
}
|
|
if (start === -1 || inQuotes || code === 32 || code === 9) {
|
|
throw new SyntaxError("Unexpected end of input");
|
|
}
|
|
if (end === -1) end = i;
|
|
const token = header.slice(start, end);
|
|
if (extensionName === void 0) {
|
|
push(offers, token, params);
|
|
} else {
|
|
if (paramName === void 0) {
|
|
push(params, token, true);
|
|
} else if (mustUnescape) {
|
|
push(params, paramName, token.replace(/\\/g, ""));
|
|
} else {
|
|
push(params, paramName, token);
|
|
}
|
|
push(offers, extensionName, params);
|
|
}
|
|
return offers;
|
|
}
|
|
function format(extensions) {
|
|
return Object.keys(extensions).map((extension2) => {
|
|
let configurations = extensions[extension2];
|
|
if (!Array.isArray(configurations)) configurations = [configurations];
|
|
return configurations.map((params) => {
|
|
return [extension2].concat(
|
|
Object.keys(params).map((k) => {
|
|
let values = params[k];
|
|
if (!Array.isArray(values)) values = [values];
|
|
return values.map((v) => v === true ? k : `${k}=${v}`).join("; ");
|
|
})
|
|
).join("; ");
|
|
}).join(", ");
|
|
}).join(", ");
|
|
}
|
|
extension = { format, parse };
|
|
return extension;
|
|
}
|
|
var websocket;
|
|
var hasRequiredWebsocket;
|
|
function requireWebsocket() {
|
|
if (hasRequiredWebsocket) return websocket;
|
|
hasRequiredWebsocket = 1;
|
|
const EventEmitter2 = require$$0$3;
|
|
const https = require$$1$1;
|
|
const http = require$$2$1;
|
|
const net = require$$3;
|
|
const tls = require$$4;
|
|
const { randomBytes, createHash } = crypto;
|
|
const { Duplex, Readable } = require$$0$2;
|
|
const { URL } = require$$7;
|
|
const PerMessageDeflate = requirePermessageDeflate();
|
|
const Receiver = requireReceiver();
|
|
const Sender = requireSender();
|
|
const { isBlob } = requireValidation();
|
|
const {
|
|
BINARY_TYPES,
|
|
CLOSE_TIMEOUT,
|
|
EMPTY_BUFFER,
|
|
GUID,
|
|
kForOnEventAttribute,
|
|
kListener,
|
|
kStatusCode,
|
|
kWebSocket,
|
|
NOOP
|
|
} = requireConstants();
|
|
const {
|
|
EventTarget: { addEventListener, removeEventListener }
|
|
} = requireEventTarget();
|
|
const { format, parse } = requireExtension();
|
|
const { toBuffer } = requireBufferUtil();
|
|
const kAborted = /* @__PURE__ */ Symbol("kAborted");
|
|
const protocolVersions = [8, 13];
|
|
const readyStates = ["CONNECTING", "OPEN", "CLOSING", "CLOSED"];
|
|
const subprotocolRegex = /^[!#$%&'*+\-.0-9A-Z^_`|a-z~]+$/;
|
|
class WebSocket2 extends EventEmitter2 {
|
|
/**
|
|
* Create a new `WebSocket`.
|
|
*
|
|
* @param {(String|URL)} address The URL to which to connect
|
|
* @param {(String|String[])} [protocols] The subprotocols
|
|
* @param {Object} [options] Connection options
|
|
*/
|
|
constructor(address, protocols, options) {
|
|
super();
|
|
this._binaryType = BINARY_TYPES[0];
|
|
this._closeCode = 1006;
|
|
this._closeFrameReceived = false;
|
|
this._closeFrameSent = false;
|
|
this._closeMessage = EMPTY_BUFFER;
|
|
this._closeTimer = null;
|
|
this._errorEmitted = false;
|
|
this._extensions = {};
|
|
this._paused = false;
|
|
this._protocol = "";
|
|
this._readyState = WebSocket2.CONNECTING;
|
|
this._receiver = null;
|
|
this._sender = null;
|
|
this._socket = null;
|
|
if (address !== null) {
|
|
this._bufferedAmount = 0;
|
|
this._isServer = false;
|
|
this._redirects = 0;
|
|
if (protocols === void 0) {
|
|
protocols = [];
|
|
} else if (!Array.isArray(protocols)) {
|
|
if (typeof protocols === "object" && protocols !== null) {
|
|
options = protocols;
|
|
protocols = [];
|
|
} else {
|
|
protocols = [protocols];
|
|
}
|
|
}
|
|
initAsClient(this, address, protocols, options);
|
|
} else {
|
|
this._autoPong = options.autoPong;
|
|
this._closeTimeout = options.closeTimeout;
|
|
this._isServer = true;
|
|
}
|
|
}
|
|
/**
|
|
* For historical reasons, the custom "nodebuffer" type is used by the default
|
|
* instead of "blob".
|
|
*
|
|
* @type {String}
|
|
*/
|
|
get binaryType() {
|
|
return this._binaryType;
|
|
}
|
|
set binaryType(type) {
|
|
if (!BINARY_TYPES.includes(type)) return;
|
|
this._binaryType = type;
|
|
if (this._receiver) this._receiver._binaryType = type;
|
|
}
|
|
/**
|
|
* @type {Number}
|
|
*/
|
|
get bufferedAmount() {
|
|
if (!this._socket) return this._bufferedAmount;
|
|
return this._socket._writableState.length + this._sender._bufferedBytes;
|
|
}
|
|
/**
|
|
* @type {String}
|
|
*/
|
|
get extensions() {
|
|
return Object.keys(this._extensions).join();
|
|
}
|
|
/**
|
|
* @type {Boolean}
|
|
*/
|
|
get isPaused() {
|
|
return this._paused;
|
|
}
|
|
/**
|
|
* @type {Function}
|
|
*/
|
|
/* istanbul ignore next */
|
|
get onclose() {
|
|
return null;
|
|
}
|
|
/**
|
|
* @type {Function}
|
|
*/
|
|
/* istanbul ignore next */
|
|
get onerror() {
|
|
return null;
|
|
}
|
|
/**
|
|
* @type {Function}
|
|
*/
|
|
/* istanbul ignore next */
|
|
get onopen() {
|
|
return null;
|
|
}
|
|
/**
|
|
* @type {Function}
|
|
*/
|
|
/* istanbul ignore next */
|
|
get onmessage() {
|
|
return null;
|
|
}
|
|
/**
|
|
* @type {String}
|
|
*/
|
|
get protocol() {
|
|
return this._protocol;
|
|
}
|
|
/**
|
|
* @type {Number}
|
|
*/
|
|
get readyState() {
|
|
return this._readyState;
|
|
}
|
|
/**
|
|
* @type {String}
|
|
*/
|
|
get url() {
|
|
return this._url;
|
|
}
|
|
/**
|
|
* Set up the socket and the internal resources.
|
|
*
|
|
* @param {Duplex} socket The network socket between the server and client
|
|
* @param {Buffer} head The first packet of the upgraded stream
|
|
* @param {Object} options Options object
|
|
* @param {Boolean} [options.allowSynchronousEvents=false] Specifies whether
|
|
* any of the `'message'`, `'ping'`, and `'pong'` events can be emitted
|
|
* multiple times in the same tick
|
|
* @param {Function} [options.generateMask] The function used to generate the
|
|
* masking key
|
|
* @param {Number} [options.maxPayload=0] The maximum allowed message size
|
|
* @param {Boolean} [options.skipUTF8Validation=false] Specifies whether or
|
|
* not to skip UTF-8 validation for text and close messages
|
|
* @private
|
|
*/
|
|
setSocket(socket, head, options) {
|
|
const receiver2 = new Receiver({
|
|
allowSynchronousEvents: options.allowSynchronousEvents,
|
|
binaryType: this.binaryType,
|
|
extensions: this._extensions,
|
|
isServer: this._isServer,
|
|
maxPayload: options.maxPayload,
|
|
skipUTF8Validation: options.skipUTF8Validation
|
|
});
|
|
const sender2 = new Sender(socket, this._extensions, options.generateMask);
|
|
this._receiver = receiver2;
|
|
this._sender = sender2;
|
|
this._socket = socket;
|
|
receiver2[kWebSocket] = this;
|
|
sender2[kWebSocket] = this;
|
|
socket[kWebSocket] = this;
|
|
receiver2.on("conclude", receiverOnConclude);
|
|
receiver2.on("drain", receiverOnDrain);
|
|
receiver2.on("error", receiverOnError);
|
|
receiver2.on("message", receiverOnMessage);
|
|
receiver2.on("ping", receiverOnPing);
|
|
receiver2.on("pong", receiverOnPong);
|
|
sender2.onerror = senderOnError;
|
|
if (socket.setTimeout) socket.setTimeout(0);
|
|
if (socket.setNoDelay) socket.setNoDelay();
|
|
if (head.length > 0) socket.unshift(head);
|
|
socket.on("close", socketOnClose);
|
|
socket.on("data", socketOnData);
|
|
socket.on("end", socketOnEnd);
|
|
socket.on("error", socketOnError);
|
|
this._readyState = WebSocket2.OPEN;
|
|
this.emit("open");
|
|
}
|
|
/**
|
|
* Emit the `'close'` event.
|
|
*
|
|
* @private
|
|
*/
|
|
emitClose() {
|
|
if (!this._socket) {
|
|
this._readyState = WebSocket2.CLOSED;
|
|
this.emit("close", this._closeCode, this._closeMessage);
|
|
return;
|
|
}
|
|
if (this._extensions[PerMessageDeflate.extensionName]) {
|
|
this._extensions[PerMessageDeflate.extensionName].cleanup();
|
|
}
|
|
this._receiver.removeAllListeners();
|
|
this._readyState = WebSocket2.CLOSED;
|
|
this.emit("close", this._closeCode, this._closeMessage);
|
|
}
|
|
/**
|
|
* Start a closing handshake.
|
|
*
|
|
* +----------+ +-----------+ +----------+
|
|
* - - -|ws.close()|-->|close frame|-->|ws.close()|- - -
|
|
* | +----------+ +-----------+ +----------+ |
|
|
* +----------+ +-----------+ |
|
|
* CLOSING |ws.close()|<--|close frame|<--+-----+ CLOSING
|
|
* +----------+ +-----------+ |
|
|
* | | | +---+ |
|
|
* +------------------------+-->|fin| - - - -
|
|
* | +---+ | +---+
|
|
* - - - - -|fin|<---------------------+
|
|
* +---+
|
|
*
|
|
* @param {Number} [code] Status code explaining why the connection is closing
|
|
* @param {(String|Buffer)} [data] The reason why the connection is
|
|
* closing
|
|
* @public
|
|
*/
|
|
close(code, data) {
|
|
if (this.readyState === WebSocket2.CLOSED) return;
|
|
if (this.readyState === WebSocket2.CONNECTING) {
|
|
const msg = "WebSocket was closed before the connection was established";
|
|
abortHandshake(this, this._req, msg);
|
|
return;
|
|
}
|
|
if (this.readyState === WebSocket2.CLOSING) {
|
|
if (this._closeFrameSent && (this._closeFrameReceived || this._receiver._writableState.errorEmitted)) {
|
|
this._socket.end();
|
|
}
|
|
return;
|
|
}
|
|
this._readyState = WebSocket2.CLOSING;
|
|
this._sender.close(code, data, !this._isServer, (err) => {
|
|
if (err) return;
|
|
this._closeFrameSent = true;
|
|
if (this._closeFrameReceived || this._receiver._writableState.errorEmitted) {
|
|
this._socket.end();
|
|
}
|
|
});
|
|
setCloseTimer(this);
|
|
}
|
|
/**
|
|
* Pause the socket.
|
|
*
|
|
* @public
|
|
*/
|
|
pause() {
|
|
if (this.readyState === WebSocket2.CONNECTING || this.readyState === WebSocket2.CLOSED) {
|
|
return;
|
|
}
|
|
this._paused = true;
|
|
this._socket.pause();
|
|
}
|
|
/**
|
|
* Send a ping.
|
|
*
|
|
* @param {*} [data] The data to send
|
|
* @param {Boolean} [mask] Indicates whether or not to mask `data`
|
|
* @param {Function} [cb] Callback which is executed when the ping is sent
|
|
* @public
|
|
*/
|
|
ping(data, mask, cb) {
|
|
if (this.readyState === WebSocket2.CONNECTING) {
|
|
throw new Error("WebSocket is not open: readyState 0 (CONNECTING)");
|
|
}
|
|
if (typeof data === "function") {
|
|
cb = data;
|
|
data = mask = void 0;
|
|
} else if (typeof mask === "function") {
|
|
cb = mask;
|
|
mask = void 0;
|
|
}
|
|
if (typeof data === "number") data = data.toString();
|
|
if (this.readyState !== WebSocket2.OPEN) {
|
|
sendAfterClose(this, data, cb);
|
|
return;
|
|
}
|
|
if (mask === void 0) mask = !this._isServer;
|
|
this._sender.ping(data || EMPTY_BUFFER, mask, cb);
|
|
}
|
|
/**
|
|
* Send a pong.
|
|
*
|
|
* @param {*} [data] The data to send
|
|
* @param {Boolean} [mask] Indicates whether or not to mask `data`
|
|
* @param {Function} [cb] Callback which is executed when the pong is sent
|
|
* @public
|
|
*/
|
|
pong(data, mask, cb) {
|
|
if (this.readyState === WebSocket2.CONNECTING) {
|
|
throw new Error("WebSocket is not open: readyState 0 (CONNECTING)");
|
|
}
|
|
if (typeof data === "function") {
|
|
cb = data;
|
|
data = mask = void 0;
|
|
} else if (typeof mask === "function") {
|
|
cb = mask;
|
|
mask = void 0;
|
|
}
|
|
if (typeof data === "number") data = data.toString();
|
|
if (this.readyState !== WebSocket2.OPEN) {
|
|
sendAfterClose(this, data, cb);
|
|
return;
|
|
}
|
|
if (mask === void 0) mask = !this._isServer;
|
|
this._sender.pong(data || EMPTY_BUFFER, mask, cb);
|
|
}
|
|
/**
|
|
* Resume the socket.
|
|
*
|
|
* @public
|
|
*/
|
|
resume() {
|
|
if (this.readyState === WebSocket2.CONNECTING || this.readyState === WebSocket2.CLOSED) {
|
|
return;
|
|
}
|
|
this._paused = false;
|
|
if (!this._receiver._writableState.needDrain) this._socket.resume();
|
|
}
|
|
/**
|
|
* Send a data message.
|
|
*
|
|
* @param {*} data The message to send
|
|
* @param {Object} [options] Options object
|
|
* @param {Boolean} [options.binary] Specifies whether `data` is binary or
|
|
* text
|
|
* @param {Boolean} [options.compress] Specifies whether or not to compress
|
|
* `data`
|
|
* @param {Boolean} [options.fin=true] Specifies whether the fragment is the
|
|
* last one
|
|
* @param {Boolean} [options.mask] Specifies whether or not to mask `data`
|
|
* @param {Function} [cb] Callback which is executed when data is written out
|
|
* @public
|
|
*/
|
|
send(data, options, cb) {
|
|
if (this.readyState === WebSocket2.CONNECTING) {
|
|
throw new Error("WebSocket is not open: readyState 0 (CONNECTING)");
|
|
}
|
|
if (typeof options === "function") {
|
|
cb = options;
|
|
options = {};
|
|
}
|
|
if (typeof data === "number") data = data.toString();
|
|
if (this.readyState !== WebSocket2.OPEN) {
|
|
sendAfterClose(this, data, cb);
|
|
return;
|
|
}
|
|
const opts = {
|
|
binary: typeof data !== "string",
|
|
mask: !this._isServer,
|
|
compress: true,
|
|
fin: true,
|
|
...options
|
|
};
|
|
if (!this._extensions[PerMessageDeflate.extensionName]) {
|
|
opts.compress = false;
|
|
}
|
|
this._sender.send(data || EMPTY_BUFFER, opts, cb);
|
|
}
|
|
/**
|
|
* Forcibly close the connection.
|
|
*
|
|
* @public
|
|
*/
|
|
terminate() {
|
|
if (this.readyState === WebSocket2.CLOSED) return;
|
|
if (this.readyState === WebSocket2.CONNECTING) {
|
|
const msg = "WebSocket was closed before the connection was established";
|
|
abortHandshake(this, this._req, msg);
|
|
return;
|
|
}
|
|
if (this._socket) {
|
|
this._readyState = WebSocket2.CLOSING;
|
|
this._socket.destroy();
|
|
}
|
|
}
|
|
}
|
|
Object.defineProperty(WebSocket2, "CONNECTING", {
|
|
enumerable: true,
|
|
value: readyStates.indexOf("CONNECTING")
|
|
});
|
|
Object.defineProperty(WebSocket2.prototype, "CONNECTING", {
|
|
enumerable: true,
|
|
value: readyStates.indexOf("CONNECTING")
|
|
});
|
|
Object.defineProperty(WebSocket2, "OPEN", {
|
|
enumerable: true,
|
|
value: readyStates.indexOf("OPEN")
|
|
});
|
|
Object.defineProperty(WebSocket2.prototype, "OPEN", {
|
|
enumerable: true,
|
|
value: readyStates.indexOf("OPEN")
|
|
});
|
|
Object.defineProperty(WebSocket2, "CLOSING", {
|
|
enumerable: true,
|
|
value: readyStates.indexOf("CLOSING")
|
|
});
|
|
Object.defineProperty(WebSocket2.prototype, "CLOSING", {
|
|
enumerable: true,
|
|
value: readyStates.indexOf("CLOSING")
|
|
});
|
|
Object.defineProperty(WebSocket2, "CLOSED", {
|
|
enumerable: true,
|
|
value: readyStates.indexOf("CLOSED")
|
|
});
|
|
Object.defineProperty(WebSocket2.prototype, "CLOSED", {
|
|
enumerable: true,
|
|
value: readyStates.indexOf("CLOSED")
|
|
});
|
|
[
|
|
"binaryType",
|
|
"bufferedAmount",
|
|
"extensions",
|
|
"isPaused",
|
|
"protocol",
|
|
"readyState",
|
|
"url"
|
|
].forEach((property) => {
|
|
Object.defineProperty(WebSocket2.prototype, property, { enumerable: true });
|
|
});
|
|
["open", "error", "close", "message"].forEach((method) => {
|
|
Object.defineProperty(WebSocket2.prototype, `on${method}`, {
|
|
enumerable: true,
|
|
get() {
|
|
for (const listener of this.listeners(method)) {
|
|
if (listener[kForOnEventAttribute]) return listener[kListener];
|
|
}
|
|
return null;
|
|
},
|
|
set(handler) {
|
|
for (const listener of this.listeners(method)) {
|
|
if (listener[kForOnEventAttribute]) {
|
|
this.removeListener(method, listener);
|
|
break;
|
|
}
|
|
}
|
|
if (typeof handler !== "function") return;
|
|
this.addEventListener(method, handler, {
|
|
[kForOnEventAttribute]: true
|
|
});
|
|
}
|
|
});
|
|
});
|
|
WebSocket2.prototype.addEventListener = addEventListener;
|
|
WebSocket2.prototype.removeEventListener = removeEventListener;
|
|
websocket = WebSocket2;
|
|
function initAsClient(websocket2, address, protocols, options) {
|
|
const opts = {
|
|
allowSynchronousEvents: true,
|
|
autoPong: true,
|
|
closeTimeout: CLOSE_TIMEOUT,
|
|
protocolVersion: protocolVersions[1],
|
|
maxPayload: 100 * 1024 * 1024,
|
|
skipUTF8Validation: false,
|
|
perMessageDeflate: true,
|
|
followRedirects: false,
|
|
maxRedirects: 10,
|
|
...options,
|
|
socketPath: void 0,
|
|
hostname: void 0,
|
|
protocol: void 0,
|
|
timeout: void 0,
|
|
method: "GET",
|
|
host: void 0,
|
|
path: void 0,
|
|
port: void 0
|
|
};
|
|
websocket2._autoPong = opts.autoPong;
|
|
websocket2._closeTimeout = opts.closeTimeout;
|
|
if (!protocolVersions.includes(opts.protocolVersion)) {
|
|
throw new RangeError(
|
|
`Unsupported protocol version: ${opts.protocolVersion} (supported versions: ${protocolVersions.join(", ")})`
|
|
);
|
|
}
|
|
let parsedUrl;
|
|
if (address instanceof URL) {
|
|
parsedUrl = address;
|
|
} else {
|
|
try {
|
|
parsedUrl = new URL(address);
|
|
} catch (e) {
|
|
throw new SyntaxError(`Invalid URL: ${address}`);
|
|
}
|
|
}
|
|
if (parsedUrl.protocol === "http:") {
|
|
parsedUrl.protocol = "ws:";
|
|
} else if (parsedUrl.protocol === "https:") {
|
|
parsedUrl.protocol = "wss:";
|
|
}
|
|
websocket2._url = parsedUrl.href;
|
|
const isSecure = parsedUrl.protocol === "wss:";
|
|
const isIpcUrl = parsedUrl.protocol === "ws+unix:";
|
|
let invalidUrlMessage;
|
|
if (parsedUrl.protocol !== "ws:" && !isSecure && !isIpcUrl) {
|
|
invalidUrlMessage = `The URL's protocol must be one of "ws:", "wss:", "http:", "https:", or "ws+unix:"`;
|
|
} else if (isIpcUrl && !parsedUrl.pathname) {
|
|
invalidUrlMessage = "The URL's pathname is empty";
|
|
} else if (parsedUrl.hash) {
|
|
invalidUrlMessage = "The URL contains a fragment identifier";
|
|
}
|
|
if (invalidUrlMessage) {
|
|
const err = new SyntaxError(invalidUrlMessage);
|
|
if (websocket2._redirects === 0) {
|
|
throw err;
|
|
} else {
|
|
emitErrorAndClose(websocket2, err);
|
|
return;
|
|
}
|
|
}
|
|
const defaultPort = isSecure ? 443 : 80;
|
|
const key = randomBytes(16).toString("base64");
|
|
const request = isSecure ? https.request : http.request;
|
|
const protocolSet = /* @__PURE__ */ new Set();
|
|
let perMessageDeflate;
|
|
opts.createConnection = opts.createConnection || (isSecure ? tlsConnect : netConnect);
|
|
opts.defaultPort = opts.defaultPort || defaultPort;
|
|
opts.port = parsedUrl.port || defaultPort;
|
|
opts.host = parsedUrl.hostname.startsWith("[") ? parsedUrl.hostname.slice(1, -1) : parsedUrl.hostname;
|
|
opts.headers = {
|
|
...opts.headers,
|
|
"Sec-WebSocket-Version": opts.protocolVersion,
|
|
"Sec-WebSocket-Key": key,
|
|
Connection: "Upgrade",
|
|
Upgrade: "websocket"
|
|
};
|
|
opts.path = parsedUrl.pathname + parsedUrl.search;
|
|
opts.timeout = opts.handshakeTimeout;
|
|
if (opts.perMessageDeflate) {
|
|
perMessageDeflate = new PerMessageDeflate(
|
|
opts.perMessageDeflate !== true ? opts.perMessageDeflate : {},
|
|
false,
|
|
opts.maxPayload
|
|
);
|
|
opts.headers["Sec-WebSocket-Extensions"] = format({
|
|
[PerMessageDeflate.extensionName]: perMessageDeflate.offer()
|
|
});
|
|
}
|
|
if (protocols.length) {
|
|
for (const protocol of protocols) {
|
|
if (typeof protocol !== "string" || !subprotocolRegex.test(protocol) || protocolSet.has(protocol)) {
|
|
throw new SyntaxError(
|
|
"An invalid or duplicated subprotocol was specified"
|
|
);
|
|
}
|
|
protocolSet.add(protocol);
|
|
}
|
|
opts.headers["Sec-WebSocket-Protocol"] = protocols.join(",");
|
|
}
|
|
if (opts.origin) {
|
|
if (opts.protocolVersion < 13) {
|
|
opts.headers["Sec-WebSocket-Origin"] = opts.origin;
|
|
} else {
|
|
opts.headers.Origin = opts.origin;
|
|
}
|
|
}
|
|
if (parsedUrl.username || parsedUrl.password) {
|
|
opts.auth = `${parsedUrl.username}:${parsedUrl.password}`;
|
|
}
|
|
if (isIpcUrl) {
|
|
const parts = opts.path.split(":");
|
|
opts.socketPath = parts[0];
|
|
opts.path = parts[1];
|
|
}
|
|
let req;
|
|
if (opts.followRedirects) {
|
|
if (websocket2._redirects === 0) {
|
|
websocket2._originalIpc = isIpcUrl;
|
|
websocket2._originalSecure = isSecure;
|
|
websocket2._originalHostOrSocketPath = isIpcUrl ? opts.socketPath : parsedUrl.host;
|
|
const headers = options && options.headers;
|
|
options = { ...options, headers: {} };
|
|
if (headers) {
|
|
for (const [key2, value] of Object.entries(headers)) {
|
|
options.headers[key2.toLowerCase()] = value;
|
|
}
|
|
}
|
|
} else if (websocket2.listenerCount("redirect") === 0) {
|
|
const isSameHost = isIpcUrl ? websocket2._originalIpc ? opts.socketPath === websocket2._originalHostOrSocketPath : false : websocket2._originalIpc ? false : parsedUrl.host === websocket2._originalHostOrSocketPath;
|
|
if (!isSameHost || websocket2._originalSecure && !isSecure) {
|
|
delete opts.headers.authorization;
|
|
delete opts.headers.cookie;
|
|
if (!isSameHost) delete opts.headers.host;
|
|
opts.auth = void 0;
|
|
}
|
|
}
|
|
if (opts.auth && !options.headers.authorization) {
|
|
options.headers.authorization = "Basic " + Buffer.from(opts.auth).toString("base64");
|
|
}
|
|
req = websocket2._req = request(opts);
|
|
if (websocket2._redirects) {
|
|
websocket2.emit("redirect", websocket2.url, req);
|
|
}
|
|
} else {
|
|
req = websocket2._req = request(opts);
|
|
}
|
|
if (opts.timeout) {
|
|
req.on("timeout", () => {
|
|
abortHandshake(websocket2, req, "Opening handshake has timed out");
|
|
});
|
|
}
|
|
req.on("error", (err) => {
|
|
if (req === null || req[kAborted]) return;
|
|
req = websocket2._req = null;
|
|
emitErrorAndClose(websocket2, err);
|
|
});
|
|
req.on("response", (res) => {
|
|
const location = res.headers.location;
|
|
const statusCode = res.statusCode;
|
|
if (location && opts.followRedirects && statusCode >= 300 && statusCode < 400) {
|
|
if (++websocket2._redirects > opts.maxRedirects) {
|
|
abortHandshake(websocket2, req, "Maximum redirects exceeded");
|
|
return;
|
|
}
|
|
req.abort();
|
|
let addr;
|
|
try {
|
|
addr = new URL(location, address);
|
|
} catch (e) {
|
|
const err = new SyntaxError(`Invalid URL: ${location}`);
|
|
emitErrorAndClose(websocket2, err);
|
|
return;
|
|
}
|
|
initAsClient(websocket2, addr, protocols, options);
|
|
} else if (!websocket2.emit("unexpected-response", req, res)) {
|
|
abortHandshake(
|
|
websocket2,
|
|
req,
|
|
`Unexpected server response: ${res.statusCode}`
|
|
);
|
|
}
|
|
});
|
|
req.on("upgrade", (res, socket, head) => {
|
|
websocket2.emit("upgrade", res);
|
|
if (websocket2.readyState !== WebSocket2.CONNECTING) return;
|
|
req = websocket2._req = null;
|
|
const upgrade = res.headers.upgrade;
|
|
if (upgrade === void 0 || upgrade.toLowerCase() !== "websocket") {
|
|
abortHandshake(websocket2, socket, "Invalid Upgrade header");
|
|
return;
|
|
}
|
|
const digest = createHash("sha1").update(key + GUID).digest("base64");
|
|
if (res.headers["sec-websocket-accept"] !== digest) {
|
|
abortHandshake(websocket2, socket, "Invalid Sec-WebSocket-Accept header");
|
|
return;
|
|
}
|
|
const serverProt = res.headers["sec-websocket-protocol"];
|
|
let protError;
|
|
if (serverProt !== void 0) {
|
|
if (!protocolSet.size) {
|
|
protError = "Server sent a subprotocol but none was requested";
|
|
} else if (!protocolSet.has(serverProt)) {
|
|
protError = "Server sent an invalid subprotocol";
|
|
}
|
|
} else if (protocolSet.size) {
|
|
protError = "Server sent no subprotocol";
|
|
}
|
|
if (protError) {
|
|
abortHandshake(websocket2, socket, protError);
|
|
return;
|
|
}
|
|
if (serverProt) websocket2._protocol = serverProt;
|
|
const secWebSocketExtensions = res.headers["sec-websocket-extensions"];
|
|
if (secWebSocketExtensions !== void 0) {
|
|
if (!perMessageDeflate) {
|
|
const message = "Server sent a Sec-WebSocket-Extensions header but no extension was requested";
|
|
abortHandshake(websocket2, socket, message);
|
|
return;
|
|
}
|
|
let extensions;
|
|
try {
|
|
extensions = parse(secWebSocketExtensions);
|
|
} catch (err) {
|
|
const message = "Invalid Sec-WebSocket-Extensions header";
|
|
abortHandshake(websocket2, socket, message);
|
|
return;
|
|
}
|
|
const extensionNames = Object.keys(extensions);
|
|
if (extensionNames.length !== 1 || extensionNames[0] !== PerMessageDeflate.extensionName) {
|
|
const message = "Server indicated an extension that was not requested";
|
|
abortHandshake(websocket2, socket, message);
|
|
return;
|
|
}
|
|
try {
|
|
perMessageDeflate.accept(extensions[PerMessageDeflate.extensionName]);
|
|
} catch (err) {
|
|
const message = "Invalid Sec-WebSocket-Extensions header";
|
|
abortHandshake(websocket2, socket, message);
|
|
return;
|
|
}
|
|
websocket2._extensions[PerMessageDeflate.extensionName] = perMessageDeflate;
|
|
}
|
|
websocket2.setSocket(socket, head, {
|
|
allowSynchronousEvents: opts.allowSynchronousEvents,
|
|
generateMask: opts.generateMask,
|
|
maxPayload: opts.maxPayload,
|
|
skipUTF8Validation: opts.skipUTF8Validation
|
|
});
|
|
});
|
|
if (opts.finishRequest) {
|
|
opts.finishRequest(req, websocket2);
|
|
} else {
|
|
req.end();
|
|
}
|
|
}
|
|
function emitErrorAndClose(websocket2, err) {
|
|
websocket2._readyState = WebSocket2.CLOSING;
|
|
websocket2._errorEmitted = true;
|
|
websocket2.emit("error", err);
|
|
websocket2.emitClose();
|
|
}
|
|
function netConnect(options) {
|
|
options.path = options.socketPath;
|
|
return net.connect(options);
|
|
}
|
|
function tlsConnect(options) {
|
|
options.path = void 0;
|
|
if (!options.servername && options.servername !== "") {
|
|
options.servername = net.isIP(options.host) ? "" : options.host;
|
|
}
|
|
return tls.connect(options);
|
|
}
|
|
function abortHandshake(websocket2, stream2, message) {
|
|
websocket2._readyState = WebSocket2.CLOSING;
|
|
const err = new Error(message);
|
|
Error.captureStackTrace(err, abortHandshake);
|
|
if (stream2.setHeader) {
|
|
stream2[kAborted] = true;
|
|
stream2.abort();
|
|
if (stream2.socket && !stream2.socket.destroyed) {
|
|
stream2.socket.destroy();
|
|
}
|
|
process.nextTick(emitErrorAndClose, websocket2, err);
|
|
} else {
|
|
stream2.destroy(err);
|
|
stream2.once("error", websocket2.emit.bind(websocket2, "error"));
|
|
stream2.once("close", websocket2.emitClose.bind(websocket2));
|
|
}
|
|
}
|
|
function sendAfterClose(websocket2, data, cb) {
|
|
if (data) {
|
|
const length = isBlob(data) ? data.size : toBuffer(data).length;
|
|
if (websocket2._socket) websocket2._sender._bufferedBytes += length;
|
|
else websocket2._bufferedAmount += length;
|
|
}
|
|
if (cb) {
|
|
const err = new Error(
|
|
`WebSocket is not open: readyState ${websocket2.readyState} (${readyStates[websocket2.readyState]})`
|
|
);
|
|
process.nextTick(cb, err);
|
|
}
|
|
}
|
|
function receiverOnConclude(code, reason) {
|
|
const websocket2 = this[kWebSocket];
|
|
websocket2._closeFrameReceived = true;
|
|
websocket2._closeMessage = reason;
|
|
websocket2._closeCode = code;
|
|
if (websocket2._socket[kWebSocket] === void 0) return;
|
|
websocket2._socket.removeListener("data", socketOnData);
|
|
process.nextTick(resume, websocket2._socket);
|
|
if (code === 1005) websocket2.close();
|
|
else websocket2.close(code, reason);
|
|
}
|
|
function receiverOnDrain() {
|
|
const websocket2 = this[kWebSocket];
|
|
if (!websocket2.isPaused) websocket2._socket.resume();
|
|
}
|
|
function receiverOnError(err) {
|
|
const websocket2 = this[kWebSocket];
|
|
if (websocket2._socket[kWebSocket] !== void 0) {
|
|
websocket2._socket.removeListener("data", socketOnData);
|
|
process.nextTick(resume, websocket2._socket);
|
|
websocket2.close(err[kStatusCode]);
|
|
}
|
|
if (!websocket2._errorEmitted) {
|
|
websocket2._errorEmitted = true;
|
|
websocket2.emit("error", err);
|
|
}
|
|
}
|
|
function receiverOnFinish() {
|
|
this[kWebSocket].emitClose();
|
|
}
|
|
function receiverOnMessage(data, isBinary) {
|
|
this[kWebSocket].emit("message", data, isBinary);
|
|
}
|
|
function receiverOnPing(data) {
|
|
const websocket2 = this[kWebSocket];
|
|
if (websocket2._autoPong) websocket2.pong(data, !this._isServer, NOOP);
|
|
websocket2.emit("ping", data);
|
|
}
|
|
function receiverOnPong(data) {
|
|
this[kWebSocket].emit("pong", data);
|
|
}
|
|
function resume(stream2) {
|
|
stream2.resume();
|
|
}
|
|
function senderOnError(err) {
|
|
const websocket2 = this[kWebSocket];
|
|
if (websocket2.readyState === WebSocket2.CLOSED) return;
|
|
if (websocket2.readyState === WebSocket2.OPEN) {
|
|
websocket2._readyState = WebSocket2.CLOSING;
|
|
setCloseTimer(websocket2);
|
|
}
|
|
this._socket.end();
|
|
if (!websocket2._errorEmitted) {
|
|
websocket2._errorEmitted = true;
|
|
websocket2.emit("error", err);
|
|
}
|
|
}
|
|
function setCloseTimer(websocket2) {
|
|
websocket2._closeTimer = setTimeout(
|
|
websocket2._socket.destroy.bind(websocket2._socket),
|
|
websocket2._closeTimeout
|
|
);
|
|
}
|
|
function socketOnClose() {
|
|
const websocket2 = this[kWebSocket];
|
|
this.removeListener("close", socketOnClose);
|
|
this.removeListener("data", socketOnData);
|
|
this.removeListener("end", socketOnEnd);
|
|
websocket2._readyState = WebSocket2.CLOSING;
|
|
if (!this._readableState.endEmitted && !websocket2._closeFrameReceived && !websocket2._receiver._writableState.errorEmitted && this._readableState.length !== 0) {
|
|
const chunk = this.read(this._readableState.length);
|
|
websocket2._receiver.write(chunk);
|
|
}
|
|
websocket2._receiver.end();
|
|
this[kWebSocket] = void 0;
|
|
clearTimeout(websocket2._closeTimer);
|
|
if (websocket2._receiver._writableState.finished || websocket2._receiver._writableState.errorEmitted) {
|
|
websocket2.emitClose();
|
|
} else {
|
|
websocket2._receiver.on("error", receiverOnFinish);
|
|
websocket2._receiver.on("finish", receiverOnFinish);
|
|
}
|
|
}
|
|
function socketOnData(chunk) {
|
|
if (!this[kWebSocket]._receiver.write(chunk)) {
|
|
this.pause();
|
|
}
|
|
}
|
|
function socketOnEnd() {
|
|
const websocket2 = this[kWebSocket];
|
|
websocket2._readyState = WebSocket2.CLOSING;
|
|
websocket2._receiver.end();
|
|
this.end();
|
|
}
|
|
function socketOnError() {
|
|
const websocket2 = this[kWebSocket];
|
|
this.removeListener("error", socketOnError);
|
|
this.on("error", NOOP);
|
|
if (websocket2) {
|
|
websocket2._readyState = WebSocket2.CLOSING;
|
|
this.destroy();
|
|
}
|
|
}
|
|
return websocket;
|
|
}
|
|
var stream;
|
|
var hasRequiredStream;
|
|
function requireStream() {
|
|
if (hasRequiredStream) return stream;
|
|
hasRequiredStream = 1;
|
|
requireWebsocket();
|
|
const { Duplex } = require$$0$2;
|
|
function emitClose(stream2) {
|
|
stream2.emit("close");
|
|
}
|
|
function duplexOnEnd() {
|
|
if (!this.destroyed && this._writableState.finished) {
|
|
this.destroy();
|
|
}
|
|
}
|
|
function duplexOnError(err) {
|
|
this.removeListener("error", duplexOnError);
|
|
this.destroy();
|
|
if (this.listenerCount("error") === 0) {
|
|
this.emit("error", err);
|
|
}
|
|
}
|
|
function createWebSocketStream(ws2, options) {
|
|
let terminateOnDestroy = true;
|
|
const duplex = new Duplex({
|
|
...options,
|
|
autoDestroy: false,
|
|
emitClose: false,
|
|
objectMode: false,
|
|
writableObjectMode: false
|
|
});
|
|
ws2.on("message", function message(msg, isBinary) {
|
|
const data = !isBinary && duplex._readableState.objectMode ? msg.toString() : msg;
|
|
if (!duplex.push(data)) ws2.pause();
|
|
});
|
|
ws2.once("error", function error(err) {
|
|
if (duplex.destroyed) return;
|
|
terminateOnDestroy = false;
|
|
duplex.destroy(err);
|
|
});
|
|
ws2.once("close", function close() {
|
|
if (duplex.destroyed) return;
|
|
duplex.push(null);
|
|
});
|
|
duplex._destroy = function(err, callback) {
|
|
if (ws2.readyState === ws2.CLOSED) {
|
|
callback(err);
|
|
process.nextTick(emitClose, duplex);
|
|
return;
|
|
}
|
|
let called = false;
|
|
ws2.once("error", function error(err2) {
|
|
called = true;
|
|
callback(err2);
|
|
});
|
|
ws2.once("close", function close() {
|
|
if (!called) callback(err);
|
|
process.nextTick(emitClose, duplex);
|
|
});
|
|
if (terminateOnDestroy) ws2.terminate();
|
|
};
|
|
duplex._final = function(callback) {
|
|
if (ws2.readyState === ws2.CONNECTING) {
|
|
ws2.once("open", function open() {
|
|
duplex._final(callback);
|
|
});
|
|
return;
|
|
}
|
|
if (ws2._socket === null) return;
|
|
if (ws2._socket._writableState.finished) {
|
|
callback();
|
|
if (duplex._readableState.endEmitted) duplex.destroy();
|
|
} else {
|
|
ws2._socket.once("finish", function finish() {
|
|
callback();
|
|
});
|
|
ws2.close();
|
|
}
|
|
};
|
|
duplex._read = function() {
|
|
if (ws2.isPaused) ws2.resume();
|
|
};
|
|
duplex._write = function(chunk, encoding, callback) {
|
|
if (ws2.readyState === ws2.CONNECTING) {
|
|
ws2.once("open", function open() {
|
|
duplex._write(chunk, encoding, callback);
|
|
});
|
|
return;
|
|
}
|
|
ws2.send(chunk, callback);
|
|
};
|
|
duplex.on("end", duplexOnEnd);
|
|
duplex.on("error", duplexOnError);
|
|
return duplex;
|
|
}
|
|
stream = createWebSocketStream;
|
|
return stream;
|
|
}
|
|
var subprotocol;
|
|
var hasRequiredSubprotocol;
|
|
function requireSubprotocol() {
|
|
if (hasRequiredSubprotocol) return subprotocol;
|
|
hasRequiredSubprotocol = 1;
|
|
const { tokenChars } = requireValidation();
|
|
function parse(header) {
|
|
const protocols = /* @__PURE__ */ new Set();
|
|
let start = -1;
|
|
let end = -1;
|
|
let i = 0;
|
|
for (i; i < header.length; i++) {
|
|
const code = header.charCodeAt(i);
|
|
if (end === -1 && tokenChars[code] === 1) {
|
|
if (start === -1) start = i;
|
|
} else if (i !== 0 && (code === 32 || code === 9)) {
|
|
if (end === -1 && start !== -1) end = i;
|
|
} else if (code === 44) {
|
|
if (start === -1) {
|
|
throw new SyntaxError(`Unexpected character at index ${i}`);
|
|
}
|
|
if (end === -1) end = i;
|
|
const protocol2 = header.slice(start, end);
|
|
if (protocols.has(protocol2)) {
|
|
throw new SyntaxError(`The "${protocol2}" subprotocol is duplicated`);
|
|
}
|
|
protocols.add(protocol2);
|
|
start = end = -1;
|
|
} else {
|
|
throw new SyntaxError(`Unexpected character at index ${i}`);
|
|
}
|
|
}
|
|
if (start === -1 || end !== -1) {
|
|
throw new SyntaxError("Unexpected end of input");
|
|
}
|
|
const protocol = header.slice(start, i);
|
|
if (protocols.has(protocol)) {
|
|
throw new SyntaxError(`The "${protocol}" subprotocol is duplicated`);
|
|
}
|
|
protocols.add(protocol);
|
|
return protocols;
|
|
}
|
|
subprotocol = { parse };
|
|
return subprotocol;
|
|
}
|
|
var websocketServer;
|
|
var hasRequiredWebsocketServer;
|
|
function requireWebsocketServer() {
|
|
if (hasRequiredWebsocketServer) return websocketServer;
|
|
hasRequiredWebsocketServer = 1;
|
|
const EventEmitter2 = require$$0$3;
|
|
const http = require$$2$1;
|
|
const { Duplex } = require$$0$2;
|
|
const { createHash } = crypto;
|
|
const extension2 = requireExtension();
|
|
const PerMessageDeflate = requirePermessageDeflate();
|
|
const subprotocol2 = requireSubprotocol();
|
|
const WebSocket2 = requireWebsocket();
|
|
const { CLOSE_TIMEOUT, GUID, kWebSocket } = requireConstants();
|
|
const keyRegex = /^[+/0-9A-Za-z]{22}==$/;
|
|
const RUNNING = 0;
|
|
const CLOSING = 1;
|
|
const CLOSED = 2;
|
|
class WebSocketServer extends EventEmitter2 {
|
|
/**
|
|
* Create a `WebSocketServer` instance.
|
|
*
|
|
* @param {Object} options Configuration options
|
|
* @param {Boolean} [options.allowSynchronousEvents=true] Specifies whether
|
|
* any of the `'message'`, `'ping'`, and `'pong'` events can be emitted
|
|
* multiple times in the same tick
|
|
* @param {Boolean} [options.autoPong=true] Specifies whether or not to
|
|
* automatically send a pong in response to a ping
|
|
* @param {Number} [options.backlog=511] The maximum length of the queue of
|
|
* pending connections
|
|
* @param {Boolean} [options.clientTracking=true] Specifies whether or not to
|
|
* track clients
|
|
* @param {Number} [options.closeTimeout=30000] Duration in milliseconds to
|
|
* wait for the closing handshake to finish after `websocket.close()` is
|
|
* called
|
|
* @param {Function} [options.handleProtocols] A hook to handle protocols
|
|
* @param {String} [options.host] The hostname where to bind the server
|
|
* @param {Number} [options.maxPayload=104857600] The maximum allowed message
|
|
* size
|
|
* @param {Boolean} [options.noServer=false] Enable no server mode
|
|
* @param {String} [options.path] Accept only connections matching this path
|
|
* @param {(Boolean|Object)} [options.perMessageDeflate=false] Enable/disable
|
|
* permessage-deflate
|
|
* @param {Number} [options.port] The port where to bind the server
|
|
* @param {(http.Server|https.Server)} [options.server] A pre-created HTTP/S
|
|
* server to use
|
|
* @param {Boolean} [options.skipUTF8Validation=false] Specifies whether or
|
|
* not to skip UTF-8 validation for text and close messages
|
|
* @param {Function} [options.verifyClient] A hook to reject connections
|
|
* @param {Function} [options.WebSocket=WebSocket] Specifies the `WebSocket`
|
|
* class to use. It must be the `WebSocket` class or class that extends it
|
|
* @param {Function} [callback] A listener for the `listening` event
|
|
*/
|
|
constructor(options, callback) {
|
|
super();
|
|
options = {
|
|
allowSynchronousEvents: true,
|
|
autoPong: true,
|
|
maxPayload: 100 * 1024 * 1024,
|
|
skipUTF8Validation: false,
|
|
perMessageDeflate: false,
|
|
handleProtocols: null,
|
|
clientTracking: true,
|
|
closeTimeout: CLOSE_TIMEOUT,
|
|
verifyClient: null,
|
|
noServer: false,
|
|
backlog: null,
|
|
// use default (511 as implemented in net.js)
|
|
server: null,
|
|
host: null,
|
|
path: null,
|
|
port: null,
|
|
WebSocket: WebSocket2,
|
|
...options
|
|
};
|
|
if (options.port == null && !options.server && !options.noServer || options.port != null && (options.server || options.noServer) || options.server && options.noServer) {
|
|
throw new TypeError(
|
|
'One and only one of the "port", "server", or "noServer" options must be specified'
|
|
);
|
|
}
|
|
if (options.port != null) {
|
|
this._server = http.createServer((req, res) => {
|
|
const body = http.STATUS_CODES[426];
|
|
res.writeHead(426, {
|
|
"Content-Length": body.length,
|
|
"Content-Type": "text/plain"
|
|
});
|
|
res.end(body);
|
|
});
|
|
this._server.listen(
|
|
options.port,
|
|
options.host,
|
|
options.backlog,
|
|
callback
|
|
);
|
|
} else if (options.server) {
|
|
this._server = options.server;
|
|
}
|
|
if (this._server) {
|
|
const emitConnection = this.emit.bind(this, "connection");
|
|
this._removeListeners = addListeners(this._server, {
|
|
listening: this.emit.bind(this, "listening"),
|
|
error: this.emit.bind(this, "error"),
|
|
upgrade: (req, socket, head) => {
|
|
this.handleUpgrade(req, socket, head, emitConnection);
|
|
}
|
|
});
|
|
}
|
|
if (options.perMessageDeflate === true) options.perMessageDeflate = {};
|
|
if (options.clientTracking) {
|
|
this.clients = /* @__PURE__ */ new Set();
|
|
this._shouldEmitClose = false;
|
|
}
|
|
this.options = options;
|
|
this._state = RUNNING;
|
|
}
|
|
/**
|
|
* Returns the bound address, the address family name, and port of the server
|
|
* as reported by the operating system if listening on an IP socket.
|
|
* If the server is listening on a pipe or UNIX domain socket, the name is
|
|
* returned as a string.
|
|
*
|
|
* @return {(Object|String|null)} The address of the server
|
|
* @public
|
|
*/
|
|
address() {
|
|
if (this.options.noServer) {
|
|
throw new Error('The server is operating in "noServer" mode');
|
|
}
|
|
if (!this._server) return null;
|
|
return this._server.address();
|
|
}
|
|
/**
|
|
* Stop the server from accepting new connections and emit the `'close'` event
|
|
* when all existing connections are closed.
|
|
*
|
|
* @param {Function} [cb] A one-time listener for the `'close'` event
|
|
* @public
|
|
*/
|
|
close(cb) {
|
|
if (this._state === CLOSED) {
|
|
if (cb) {
|
|
this.once("close", () => {
|
|
cb(new Error("The server is not running"));
|
|
});
|
|
}
|
|
process.nextTick(emitClose, this);
|
|
return;
|
|
}
|
|
if (cb) this.once("close", cb);
|
|
if (this._state === CLOSING) return;
|
|
this._state = CLOSING;
|
|
if (this.options.noServer || this.options.server) {
|
|
if (this._server) {
|
|
this._removeListeners();
|
|
this._removeListeners = this._server = null;
|
|
}
|
|
if (this.clients) {
|
|
if (!this.clients.size) {
|
|
process.nextTick(emitClose, this);
|
|
} else {
|
|
this._shouldEmitClose = true;
|
|
}
|
|
} else {
|
|
process.nextTick(emitClose, this);
|
|
}
|
|
} else {
|
|
const server = this._server;
|
|
this._removeListeners();
|
|
this._removeListeners = this._server = null;
|
|
server.close(() => {
|
|
emitClose(this);
|
|
});
|
|
}
|
|
}
|
|
/**
|
|
* See if a given request should be handled by this server instance.
|
|
*
|
|
* @param {http.IncomingMessage} req Request object to inspect
|
|
* @return {Boolean} `true` if the request is valid, else `false`
|
|
* @public
|
|
*/
|
|
shouldHandle(req) {
|
|
if (this.options.path) {
|
|
const index = req.url.indexOf("?");
|
|
const pathname = index !== -1 ? req.url.slice(0, index) : req.url;
|
|
if (pathname !== this.options.path) return false;
|
|
}
|
|
return true;
|
|
}
|
|
/**
|
|
* Handle a HTTP Upgrade request.
|
|
*
|
|
* @param {http.IncomingMessage} req The request object
|
|
* @param {Duplex} socket The network socket between the server and client
|
|
* @param {Buffer} head The first packet of the upgraded stream
|
|
* @param {Function} cb Callback
|
|
* @public
|
|
*/
|
|
handleUpgrade(req, socket, head, cb) {
|
|
socket.on("error", socketOnError);
|
|
const key = req.headers["sec-websocket-key"];
|
|
const upgrade = req.headers.upgrade;
|
|
const version = +req.headers["sec-websocket-version"];
|
|
if (req.method !== "GET") {
|
|
const message = "Invalid HTTP method";
|
|
abortHandshakeOrEmitwsClientError(this, req, socket, 405, message);
|
|
return;
|
|
}
|
|
if (upgrade === void 0 || upgrade.toLowerCase() !== "websocket") {
|
|
const message = "Invalid Upgrade header";
|
|
abortHandshakeOrEmitwsClientError(this, req, socket, 400, message);
|
|
return;
|
|
}
|
|
if (key === void 0 || !keyRegex.test(key)) {
|
|
const message = "Missing or invalid Sec-WebSocket-Key header";
|
|
abortHandshakeOrEmitwsClientError(this, req, socket, 400, message);
|
|
return;
|
|
}
|
|
if (version !== 13 && version !== 8) {
|
|
const message = "Missing or invalid Sec-WebSocket-Version header";
|
|
abortHandshakeOrEmitwsClientError(this, req, socket, 400, message, {
|
|
"Sec-WebSocket-Version": "13, 8"
|
|
});
|
|
return;
|
|
}
|
|
if (!this.shouldHandle(req)) {
|
|
abortHandshake(socket, 400);
|
|
return;
|
|
}
|
|
const secWebSocketProtocol = req.headers["sec-websocket-protocol"];
|
|
let protocols = /* @__PURE__ */ new Set();
|
|
if (secWebSocketProtocol !== void 0) {
|
|
try {
|
|
protocols = subprotocol2.parse(secWebSocketProtocol);
|
|
} catch (err) {
|
|
const message = "Invalid Sec-WebSocket-Protocol header";
|
|
abortHandshakeOrEmitwsClientError(this, req, socket, 400, message);
|
|
return;
|
|
}
|
|
}
|
|
const secWebSocketExtensions = req.headers["sec-websocket-extensions"];
|
|
const extensions = {};
|
|
if (this.options.perMessageDeflate && secWebSocketExtensions !== void 0) {
|
|
const perMessageDeflate = new PerMessageDeflate(
|
|
this.options.perMessageDeflate,
|
|
true,
|
|
this.options.maxPayload
|
|
);
|
|
try {
|
|
const offers = extension2.parse(secWebSocketExtensions);
|
|
if (offers[PerMessageDeflate.extensionName]) {
|
|
perMessageDeflate.accept(offers[PerMessageDeflate.extensionName]);
|
|
extensions[PerMessageDeflate.extensionName] = perMessageDeflate;
|
|
}
|
|
} catch (err) {
|
|
const message = "Invalid or unacceptable Sec-WebSocket-Extensions header";
|
|
abortHandshakeOrEmitwsClientError(this, req, socket, 400, message);
|
|
return;
|
|
}
|
|
}
|
|
if (this.options.verifyClient) {
|
|
const info = {
|
|
origin: req.headers[`${version === 8 ? "sec-websocket-origin" : "origin"}`],
|
|
secure: !!(req.socket.authorized || req.socket.encrypted),
|
|
req
|
|
};
|
|
if (this.options.verifyClient.length === 2) {
|
|
this.options.verifyClient(info, (verified, code, message, headers) => {
|
|
if (!verified) {
|
|
return abortHandshake(socket, code || 401, message, headers);
|
|
}
|
|
this.completeUpgrade(
|
|
extensions,
|
|
key,
|
|
protocols,
|
|
req,
|
|
socket,
|
|
head,
|
|
cb
|
|
);
|
|
});
|
|
return;
|
|
}
|
|
if (!this.options.verifyClient(info)) return abortHandshake(socket, 401);
|
|
}
|
|
this.completeUpgrade(extensions, key, protocols, req, socket, head, cb);
|
|
}
|
|
/**
|
|
* Upgrade the connection to WebSocket.
|
|
*
|
|
* @param {Object} extensions The accepted extensions
|
|
* @param {String} key The value of the `Sec-WebSocket-Key` header
|
|
* @param {Set} protocols The subprotocols
|
|
* @param {http.IncomingMessage} req The request object
|
|
* @param {Duplex} socket The network socket between the server and client
|
|
* @param {Buffer} head The first packet of the upgraded stream
|
|
* @param {Function} cb Callback
|
|
* @throws {Error} If called more than once with the same socket
|
|
* @private
|
|
*/
|
|
completeUpgrade(extensions, key, protocols, req, socket, head, cb) {
|
|
if (!socket.readable || !socket.writable) return socket.destroy();
|
|
if (socket[kWebSocket]) {
|
|
throw new Error(
|
|
"server.handleUpgrade() was called more than once with the same socket, possibly due to a misconfiguration"
|
|
);
|
|
}
|
|
if (this._state > RUNNING) return abortHandshake(socket, 503);
|
|
const digest = createHash("sha1").update(key + GUID).digest("base64");
|
|
const headers = [
|
|
"HTTP/1.1 101 Switching Protocols",
|
|
"Upgrade: websocket",
|
|
"Connection: Upgrade",
|
|
`Sec-WebSocket-Accept: ${digest}`
|
|
];
|
|
const ws2 = new this.options.WebSocket(null, void 0, this.options);
|
|
if (protocols.size) {
|
|
const protocol = this.options.handleProtocols ? this.options.handleProtocols(protocols, req) : protocols.values().next().value;
|
|
if (protocol) {
|
|
headers.push(`Sec-WebSocket-Protocol: ${protocol}`);
|
|
ws2._protocol = protocol;
|
|
}
|
|
}
|
|
if (extensions[PerMessageDeflate.extensionName]) {
|
|
const params = extensions[PerMessageDeflate.extensionName].params;
|
|
const value = extension2.format({
|
|
[PerMessageDeflate.extensionName]: [params]
|
|
});
|
|
headers.push(`Sec-WebSocket-Extensions: ${value}`);
|
|
ws2._extensions = extensions;
|
|
}
|
|
this.emit("headers", headers, req);
|
|
socket.write(headers.concat("\r\n").join("\r\n"));
|
|
socket.removeListener("error", socketOnError);
|
|
ws2.setSocket(socket, head, {
|
|
allowSynchronousEvents: this.options.allowSynchronousEvents,
|
|
maxPayload: this.options.maxPayload,
|
|
skipUTF8Validation: this.options.skipUTF8Validation
|
|
});
|
|
if (this.clients) {
|
|
this.clients.add(ws2);
|
|
ws2.on("close", () => {
|
|
this.clients.delete(ws2);
|
|
if (this._shouldEmitClose && !this.clients.size) {
|
|
process.nextTick(emitClose, this);
|
|
}
|
|
});
|
|
}
|
|
cb(ws2, req);
|
|
}
|
|
}
|
|
websocketServer = WebSocketServer;
|
|
function addListeners(server, map) {
|
|
for (const event of Object.keys(map)) server.on(event, map[event]);
|
|
return function removeListeners() {
|
|
for (const event of Object.keys(map)) {
|
|
server.removeListener(event, map[event]);
|
|
}
|
|
};
|
|
}
|
|
function emitClose(server) {
|
|
server._state = CLOSED;
|
|
server.emit("close");
|
|
}
|
|
function socketOnError() {
|
|
this.destroy();
|
|
}
|
|
function abortHandshake(socket, code, message, headers) {
|
|
message = message || http.STATUS_CODES[code];
|
|
headers = {
|
|
Connection: "close",
|
|
"Content-Type": "text/html",
|
|
"Content-Length": Buffer.byteLength(message),
|
|
...headers
|
|
};
|
|
socket.once("finish", socket.destroy);
|
|
socket.end(
|
|
`HTTP/1.1 ${code} ${http.STATUS_CODES[code]}\r
|
|
` + Object.keys(headers).map((h) => `${h}: ${headers[h]}`).join("\r\n") + "\r\n\r\n" + message
|
|
);
|
|
}
|
|
function abortHandshakeOrEmitwsClientError(server, req, socket, code, message, headers) {
|
|
if (server.listenerCount("wsClientError")) {
|
|
const err = new Error(message);
|
|
Error.captureStackTrace(err, abortHandshakeOrEmitwsClientError);
|
|
server.emit("wsClientError", err, socket, req);
|
|
} else {
|
|
abortHandshake(socket, code, message, headers);
|
|
}
|
|
}
|
|
return websocketServer;
|
|
}
|
|
var ws;
|
|
var hasRequiredWs;
|
|
function requireWs() {
|
|
if (hasRequiredWs) return ws;
|
|
hasRequiredWs = 1;
|
|
const WebSocket2 = requireWebsocket();
|
|
WebSocket2.createWebSocketStream = requireStream();
|
|
WebSocket2.Server = requireWebsocketServer();
|
|
WebSocket2.Receiver = requireReceiver();
|
|
WebSocket2.Sender = requireSender();
|
|
WebSocket2.WebSocket = WebSocket2;
|
|
WebSocket2.WebSocketServer = WebSocket2.Server;
|
|
ws = WebSocket2;
|
|
return ws;
|
|
}
|
|
var node;
|
|
var hasRequiredNode;
|
|
function requireNode() {
|
|
if (hasRequiredNode) return node;
|
|
hasRequiredNode = 1;
|
|
node = requireWs();
|
|
return node;
|
|
}
|
|
var nodeExports = requireNode();
|
|
const WebSocket = /* @__PURE__ */ getDefaultExportFromCjs(nodeExports);
|
|
const isPeerMessage = (message) => message.type === "peer";
|
|
const isErrorMessage = (message) => message.type === "error";
|
|
const ProtocolV1 = "1";
|
|
function assert(value, message = "Assertion failed") {
|
|
if (value === false || value === null || value === void 0) {
|
|
const error = new Error(trimLines(message));
|
|
error.stack = removeLine(error.stack, "assert.ts");
|
|
throw error;
|
|
}
|
|
}
|
|
const trimLines = (s) => s.split("\n").map((s2) => s2.trim()).join("\n");
|
|
const removeLine = (s = "", targetText) => s.split("\n").filter((line) => !line.includes(targetText)).join("\n");
|
|
const toArrayBuffer = (bytes) => {
|
|
const { buffer, byteOffset, byteLength } = bytes;
|
|
return buffer.slice(byteOffset, byteOffset + byteLength);
|
|
};
|
|
class WebSocketNetworkAdapter extends NetworkAdapter {
|
|
constructor() {
|
|
super(...arguments);
|
|
__publicField(this, "socket");
|
|
}
|
|
}
|
|
class WebSocketClientAdapter extends WebSocketNetworkAdapter {
|
|
// this adapter only connects to one remote client at a time
|
|
constructor(url, retryInterval = 5e3) {
|
|
super();
|
|
__privateAdd(this, _WebSocketClientAdapter_instances);
|
|
__publicField(this, "url");
|
|
__publicField(this, "retryInterval");
|
|
__privateAdd(this, _ready2, false);
|
|
__privateAdd(this, _readyResolver);
|
|
__privateAdd(this, _readyPromise2, new Promise((resolve) => {
|
|
__privateSet(this, _readyResolver, resolve);
|
|
}));
|
|
__privateAdd(this, _retryIntervalId);
|
|
__privateAdd(this, _log5, debug("automerge-repo:websocket:browser"));
|
|
__publicField(this, "remotePeerId");
|
|
__publicField(this, "onOpen", () => {
|
|
__privateGet(this, _log5).call(this, "open");
|
|
clearInterval(__privateGet(this, _retryIntervalId));
|
|
__privateSet(this, _retryIntervalId, void 0);
|
|
this.join();
|
|
});
|
|
// When a socket closes, or disconnects, remove it from the array.
|
|
__publicField(this, "onClose", () => {
|
|
__privateGet(this, _log5).call(this, "close");
|
|
if (this.remotePeerId)
|
|
this.emit("peer-disconnected", { peerId: this.remotePeerId });
|
|
if (this.retryInterval > 0 && !__privateGet(this, _retryIntervalId))
|
|
setTimeout(() => {
|
|
assert(this.peerId);
|
|
return this.connect(this.peerId, this.peerMetadata);
|
|
}, this.retryInterval);
|
|
});
|
|
__publicField(this, "onMessage", (event) => {
|
|
this.receiveMessage(event.data);
|
|
});
|
|
/** The websocket error handler signature is different on node and the browser. */
|
|
__publicField(this, "onError", (event) => {
|
|
if ("error" in event) {
|
|
if (event.error.code !== "ECONNREFUSED") {
|
|
throw event.error;
|
|
}
|
|
}
|
|
__privateGet(this, _log5).call(this, "Connection failed, retrying...");
|
|
});
|
|
this.url = url;
|
|
this.retryInterval = retryInterval;
|
|
__privateSet(this, _log5, __privateGet(this, _log5).extend(url));
|
|
}
|
|
isReady() {
|
|
return __privateGet(this, _ready2);
|
|
}
|
|
whenReady() {
|
|
return __privateGet(this, _readyPromise2);
|
|
}
|
|
connect(peerId, peerMetadata) {
|
|
if (!this.socket || !this.peerId) {
|
|
__privateGet(this, _log5).call(this, "connecting");
|
|
this.peerId = peerId;
|
|
this.peerMetadata = peerMetadata ?? {};
|
|
} else {
|
|
__privateGet(this, _log5).call(this, "reconnecting");
|
|
assert(peerId === this.peerId);
|
|
this.socket.removeEventListener("open", this.onOpen);
|
|
this.socket.removeEventListener("close", this.onClose);
|
|
this.socket.removeEventListener("message", this.onMessage);
|
|
this.socket.removeEventListener("error", this.onError);
|
|
}
|
|
if (!__privateGet(this, _retryIntervalId))
|
|
__privateSet(this, _retryIntervalId, setInterval(() => {
|
|
this.connect(peerId, peerMetadata);
|
|
}, this.retryInterval));
|
|
this.socket = new WebSocket(this.url);
|
|
this.socket.binaryType = "arraybuffer";
|
|
this.socket.addEventListener("open", this.onOpen);
|
|
this.socket.addEventListener("close", this.onClose);
|
|
this.socket.addEventListener("message", this.onMessage);
|
|
this.socket.addEventListener("error", this.onError);
|
|
setTimeout(() => __privateMethod(this, _WebSocketClientAdapter_instances, forceReady_fn).call(this), 1e3);
|
|
this.join();
|
|
}
|
|
join() {
|
|
assert(this.peerId);
|
|
assert(this.socket);
|
|
if (this.socket.readyState === WebSocket.OPEN) {
|
|
this.send(joinMessage(this.peerId, this.peerMetadata));
|
|
}
|
|
}
|
|
disconnect() {
|
|
assert(this.peerId);
|
|
assert(this.socket);
|
|
const socket = this.socket;
|
|
if (socket) {
|
|
socket.removeEventListener("open", this.onOpen);
|
|
socket.removeEventListener("close", this.onClose);
|
|
socket.removeEventListener("message", this.onMessage);
|
|
socket.removeEventListener("error", this.onError);
|
|
socket.close();
|
|
}
|
|
clearInterval(__privateGet(this, _retryIntervalId));
|
|
if (this.remotePeerId)
|
|
this.emit("peer-disconnected", { peerId: this.remotePeerId });
|
|
this.socket = void 0;
|
|
}
|
|
send(message) {
|
|
var _a;
|
|
if ("data" in message && ((_a = message.data) == null ? void 0 : _a.byteLength) === 0)
|
|
throw new Error("Tried to send a zero-length message");
|
|
assert(this.peerId);
|
|
if (!this.socket) {
|
|
__privateGet(this, _log5).call(this, "Tried to send on a disconnected socket.");
|
|
return;
|
|
}
|
|
if (this.socket.readyState !== WebSocket.OPEN)
|
|
throw new Error(`Websocket not ready (${this.socket.readyState})`);
|
|
const encoded = encode(message);
|
|
this.socket.send(toArrayBuffer(encoded));
|
|
}
|
|
peerCandidate(remotePeerId, peerMetadata) {
|
|
assert(this.socket);
|
|
__privateMethod(this, _WebSocketClientAdapter_instances, forceReady_fn).call(this);
|
|
this.remotePeerId = remotePeerId;
|
|
this.emit("peer-candidate", {
|
|
peerId: remotePeerId,
|
|
peerMetadata
|
|
});
|
|
}
|
|
receiveMessage(messageBytes) {
|
|
let message;
|
|
try {
|
|
message = decode(new Uint8Array(messageBytes));
|
|
} catch (e) {
|
|
__privateGet(this, _log5).call(this, "error decoding message:", e);
|
|
return;
|
|
}
|
|
assert(this.socket);
|
|
if (messageBytes.byteLength === 0)
|
|
throw new Error("received a zero-length message");
|
|
if (isPeerMessage(message)) {
|
|
const { peerMetadata } = message;
|
|
__privateGet(this, _log5).call(this, `peer: ${message.senderId}`);
|
|
this.peerCandidate(message.senderId, peerMetadata);
|
|
} else if (isErrorMessage(message)) {
|
|
__privateGet(this, _log5).call(this, `error: ${message.message}`);
|
|
} else {
|
|
this.emit("message", message);
|
|
}
|
|
}
|
|
}
|
|
_ready2 = new WeakMap();
|
|
_readyResolver = new WeakMap();
|
|
_readyPromise2 = new WeakMap();
|
|
_WebSocketClientAdapter_instances = new WeakSet();
|
|
forceReady_fn = function() {
|
|
var _a;
|
|
if (!__privateGet(this, _ready2)) {
|
|
__privateSet(this, _ready2, true);
|
|
(_a = __privateGet(this, _readyResolver)) == null ? void 0 : _a.call(this);
|
|
}
|
|
};
|
|
_retryIntervalId = new WeakMap();
|
|
_log5 = new WeakMap();
|
|
function joinMessage(senderId, peerMetadata) {
|
|
return {
|
|
type: "join",
|
|
senderId,
|
|
peerMetadata,
|
|
supportedProtocolVersions: [ProtocolV1]
|
|
};
|
|
}
|
|
debug("WebsocketServer");
|
|
let repo = null;
|
|
function getRepo() {
|
|
if (repo) return repo;
|
|
repo = new Repo({
|
|
network: [
|
|
new WebSocketClientAdapter("wss://sync.automerge.org"),
|
|
new BroadcastChannelNetworkAdapter()
|
|
],
|
|
storage: new IndexedDBStorageAdapter("p2p-poll-app")
|
|
});
|
|
return repo;
|
|
}
|
|
function cleanupRepo() {
|
|
if (repo) {
|
|
repo.shutdown();
|
|
repo = null;
|
|
}
|
|
}
|
|
export {
|
|
cleanupRepo,
|
|
getRepo
|
|
};
|