commit d1c13e06bdda52657db296d75ef9230fc8ec22be
parent 70e6d16fa38a412a54621178927ca82232a59341
Author: Lorenz A <me@lorenzackermann.xyz>
Date: Wed, 10 Dec 2025 16:15:10 +0000
Bug 2004226 - [devtools] Turn devtools/shared/transport/packets.js into an ES class. r=devtools-reviewers,nchevobbe
Differential Revision: https://phabricator.services.mozilla.com/D275620
Diffstat:
1 file changed, 304 insertions(+), 316 deletions(-)
diff --git a/devtools/shared/transport/packets.js b/devtools/shared/transport/packets.js
@@ -46,34 +46,32 @@ const PACKET_LENGTH_MAX = Math.pow(2, 40);
/**
* A generic Packet processing object (extended by two subtypes below).
*/
-function Packet(transport) {
- this._transport = transport;
- this._length = 0;
-}
-
-/**
- * Attempt to initialize a new Packet based on the incoming packet header we've
- * received so far. We try each of the types in succession, trying JSON packets
- * first since they are much more common.
- *
- * @param header string
- * The packet header string to attempt parsing.
- * @param transport DebuggerTransport
- * The transport instance that will own the packet.
- * @return Packet
- * The parsed packet of the matching type, or null if no types matched.
- */
-Packet.fromHeader = function (header, transport) {
- return (
- JSONPacket.fromHeader(header, transport) ||
- BulkPacket.fromHeader(header, transport)
- );
-};
-
-Packet.prototype = {
+class Packet {
+ constructor(transport) {
+ this._transport = transport;
+ this._length = 0;
+ }
+ /**
+ * Attempt to initialize a new Packet based on the incoming packet header we've
+ * received so far. We try each of the types in succession, trying JSON packets
+ * first since they are much more common.
+ *
+ * @param {string} header
+ * The packet header string to attempt parsing.
+ * @param {DebuggerTransport} transport
+ * The transport instance that will own the packet.
+ * @return Packet
+ * The parsed packet of the matching type, or null if no types matched.
+ */
+ static fromHeader(header, transport) {
+ return (
+ JSONPacket.fromHeader(header, transport) ||
+ BulkPacket.fromHeader(header, transport)
+ );
+ }
get length() {
return this._length;
- },
+ }
set length(length) {
if (length > PACKET_LENGTH_MAX) {
@@ -85,12 +83,12 @@ Packet.prototype = {
);
}
this._length = length;
- },
+ }
destroy() {
this._transport = null;
- },
-};
+ }
+}
exports.Packet = Packet;
@@ -101,135 +99,132 @@ exports.Packet = Packet;
* contents of the JSON packet are specified in the Remote Debugging Protocol
* specification.
*
- * @param transport DebuggerTransport
- * The transport instance that will own the packet.
- */
-function JSONPacket(transport) {
- Packet.call(this, transport);
- this._data = "";
- this._done = false;
-}
-
-/**
- * Attempt to initialize a new JSONPacket based on the incoming packet header
- * we've received so far.
- *
- * @param header string
- * The packet header string to attempt parsing.
- * @param transport DebuggerTransport
- * The transport instance that will own the packet.
- * @return JSONPacket
- * The parsed packet, or null if it's not a match.
*/
-JSONPacket.fromHeader = function (header, transport) {
- const match = this.HEADER_PATTERN.exec(header);
+class JSONPacket extends Packet {
+ /**
+ * @param {DebuggerTransport} transport
+ * The transport instance that will own the packet.
+ */
+ constructor(transport) {
+ super(transport);
- if (!match) {
- return null;
+ this._data = "";
+ this._done = false;
}
+ /**
+ * Attempt to initialize a new JSONPacket based on the incoming packet header
+ * we've received so far.
+ *
+ * @param {string} header
+ * The packet header string to attempt parsing.
+ * @param {DebuggerTransport} transport
+ * The transport instance that will own the packet.
+ * @return {JSONPacket}
+ * The parsed packet, or null if it's not a match.
+ */
+ static fromHeader(header, transport) {
+ const match = this.HEADER_PATTERN.exec(header);
- dumpv("Header matches JSON packet");
- const packet = new JSONPacket(transport);
- packet.length = +match[1];
- return packet;
-};
+ if (!match) {
+ return null;
+ }
-JSONPacket.HEADER_PATTERN = /^(\d+):$/;
+ dumpv("Header matches JSON packet");
+ const packet = new JSONPacket(transport);
+ packet.length = +match[1];
+ return packet;
+ }
-JSONPacket.prototype = Object.create(Packet.prototype);
+ static HEADER_PATTERN = /^(\d+):$/;
-Object.defineProperty(JSONPacket.prototype, "object", {
/**
* Gets the object (not the serialized string) being read or written.
*/
- get() {
+ get object() {
return this._object;
- },
-
+ }
/**
* Sets the object to be sent when write() is called.
*/
- set(object) {
+ set object(object) {
this._object = object;
const data = JSON.stringify(object);
this._data = unicodeConverter.ConvertFromUnicode(data);
this.length = this._data.length;
- },
-});
+ }
-JSONPacket.prototype.read = function (stream, scriptableStream) {
- dumpv("Reading JSON packet");
+ read(stream, scriptableStream) {
+ dumpv("Reading JSON packet");
- // Read in more packet data.
- this._readData(stream, scriptableStream);
+ // Read in more packet data.
+ this._readData(stream, scriptableStream);
- if (!this.done) {
- // Don't have a complete packet yet.
- return;
- }
+ if (!this.done) {
+ // Don't have a complete packet yet.
+ return;
+ }
- let json = this._data;
- try {
- json = unicodeConverter.ConvertToUnicode(json);
- this._object = JSON.parse(json);
- } catch (e) {
- const msg =
- "Error parsing incoming packet: " +
- json +
- " (" +
- e +
- " - " +
- e.stack +
- ")";
- console.error(msg);
- dumpn(msg);
- return;
+ let json = this._data;
+ try {
+ json = unicodeConverter.ConvertToUnicode(json);
+ this._object = JSON.parse(json);
+ } catch (e) {
+ const msg =
+ "Error parsing incoming packet: " +
+ json +
+ " (" +
+ e +
+ " - " +
+ e.stack +
+ ")";
+ console.error(msg);
+ dumpn(msg);
+ return;
+ }
+
+ this._transport._onJSONObjectReady(this._object);
}
- this._transport._onJSONObjectReady(this._object);
-};
-
-JSONPacket.prototype._readData = function (stream, scriptableStream) {
- if (flags.wantVerbose) {
- dumpv(
- "Reading JSON data: _l: " +
- this.length +
- " dL: " +
- this._data.length +
- " sA: " +
- stream.available()
+ _readData(stream, scriptableStream) {
+ if (flags.wantVerbose) {
+ dumpv(
+ "Reading JSON data: _l: " +
+ this.length +
+ " dL: " +
+ this._data.length +
+ " sA: " +
+ stream.available()
+ );
+ }
+ const bytesToRead = Math.min(
+ this.length - this._data.length,
+ stream.available()
);
+ this._data += scriptableStream.readBytes(bytesToRead);
+ this._done = this._data.length === this.length;
}
- const bytesToRead = Math.min(
- this.length - this._data.length,
- stream.available()
- );
- this._data += scriptableStream.readBytes(bytesToRead);
- this._done = this._data.length === this.length;
-};
-
-JSONPacket.prototype.write = function (stream) {
- dumpv("Writing JSON packet");
-
- if (this._outgoing === undefined) {
- // Format the serialized packet to a buffer
- this._outgoing = this.length + ":" + this._data;
- }
- const written = stream.write(this._outgoing, this._outgoing.length);
- this._outgoing = this._outgoing.slice(written);
- this._done = !this._outgoing.length;
-};
+ write(stream) {
+ dumpv("Writing JSON packet");
+
+ if (this._outgoing === undefined) {
+ // Format the serialized packet to a buffer
+ this._outgoing = this.length + ":" + this._data;
+ }
-Object.defineProperty(JSONPacket.prototype, "done", {
- get() {
+ const written = stream.write(this._outgoing, this._outgoing.length);
+ this._outgoing = this._outgoing.slice(written);
+ this._done = !this._outgoing.length;
+ }
+
+ get done() {
return this._done;
- },
-});
+ }
-JSONPacket.prototype.toString = function () {
- return JSON.stringify(this._object, null, 2);
-};
+ toString() {
+ return JSON.stringify(this._object, null, 2);
+ }
+}
exports.JSONPacket = JSONPacket;
@@ -246,229 +241,222 @@ exports.JSONPacket = JSONPacket;
* packet's type. See the Remote Debugging Protocol Stream Transport spec for
* more details.
*
- * @param transport DebuggerTransport
- * The transport instance that will own the packet.
*/
-function BulkPacket(transport) {
- Packet.call(this, transport);
- this._done = false;
- let _resolve;
- this._readyForWriting = new Promise(resolve => {
- _resolve = resolve;
- });
- this._readyForWriting.resolve = _resolve;
-}
-
-/**
- * Attempt to initialize a new BulkPacket based on the incoming packet header
- * we've received so far.
- *
- * @param header string
- * The packet header string to attempt parsing.
- * @param transport DebuggerTransport
- * The transport instance that will own the packet.
- * @return BulkPacket
- * The parsed packet, or null if it's not a match.
- */
-BulkPacket.fromHeader = function (header, transport) {
- const match = this.HEADER_PATTERN.exec(header);
+class BulkPacket extends Packet {
+ /**
+ * @param {DebuggerTransport} transport
+ * The transport instance that will own the packet.
+ */
+ constructor(transport) {
+ super(transport);
- if (!match) {
- return null;
+ this._done = false;
+ let _resolve;
+ this._readyForWriting = new Promise(resolve => {
+ _resolve = resolve;
+ });
+ this._readyForWriting.resolve = _resolve;
}
- dumpv("Header matches bulk packet");
- const packet = new BulkPacket(transport);
- packet.header = {
- actor: match[1],
- type: match[2],
- length: +match[3],
- };
- return packet;
-};
-
-BulkPacket.HEADER_PATTERN = /^bulk ([^: ]+) ([^: ]+) (\d+):$/;
-
-BulkPacket.prototype = Object.create(Packet.prototype);
+ /**
+ * Attempt to initialize a new BulkPacket based on the incoming packet header
+ * we've received so far.
+ *
+ * @param {string} header
+ * The packet header string to attempt parsing.
+ * @param {DebuggerTransport} transport
+ * The transport instance that will own the packet.
+ * @return {BulkPacket}
+ * The parsed packet, or null if it's not a match.
+ */
+ static fromHeader = function (header, transport) {
+ const match = this.HEADER_PATTERN.exec(header);
-BulkPacket.prototype.read = function (stream) {
- dumpv("Reading bulk packet, handing off input stream");
+ if (!match) {
+ return null;
+ }
- // Temporarily pause monitoring of the input stream
- this._transport.pauseIncoming();
+ dumpv("Header matches bulk packet");
+ const packet = new BulkPacket(transport);
+ packet.header = {
+ actor: match[1],
+ type: match[2],
+ length: +match[3],
+ };
+ return packet;
+ };
- new Promise(resolve => {
- this._transport._onBulkReadReady({
- actor: this.actor,
- type: this.type,
- length: this.length,
- copyTo: output => {
- dumpv("CT length: " + this.length);
- const copying = StreamUtils.copyStream(stream, output, this.length);
- resolve(copying);
- return copying;
- },
- copyToBuffer: outputBuffer => {
- if (outputBuffer.byteLength !== this.length) {
- throw new Error(
- `In copyToBuffer, the output buffer needs to have the same length as the data to read. ${outputBuffer.byteLength} !== ${this.length}`
+ static HEADER_PATTERN = /^bulk ([^: ]+) ([^: ]+) (\d+):$/;
+
+ read(stream) {
+ dumpv("Reading bulk packet, handing off input stream");
+
+ // Temporarily pause monitoring of the input stream
+ this._transport.pauseIncoming();
+
+ new Promise(resolve => {
+ this._transport._onBulkReadReady({
+ actor: this.actor,
+ type: this.type,
+ length: this.length,
+ copyTo: output => {
+ dumpv("CT length: " + this.length);
+ const copying = StreamUtils.copyStream(stream, output, this.length);
+ resolve(copying);
+ return copying;
+ },
+ copyToBuffer: outputBuffer => {
+ if (outputBuffer.byteLength !== this.length) {
+ throw new Error(
+ `In copyToBuffer, the output buffer needs to have the same length as the data to read. ${outputBuffer.byteLength} !== ${this.length}`
+ );
+ }
+ dumpv("CT length: " + this.length);
+ const copying = StreamUtils.copyAsyncStreamToArrayBuffer(
+ stream,
+ outputBuffer
);
- }
- dumpv("CT length: " + this.length);
- const copying = StreamUtils.copyAsyncStreamToArrayBuffer(
- stream,
- outputBuffer
- );
- resolve(copying);
- return copying;
- },
- stream,
- done: resolve,
- });
- // Await the result of reading from the stream
- }).then(() => {
- dumpv("onReadDone called, ending bulk mode");
- this._done = true;
- this._transport.resumeIncoming();
- }, this._transport.close);
-
- // Ensure this is only done once
- this.read = () => {
- throw new Error("Tried to read() a BulkPacket's stream multiple times.");
- };
-};
+ resolve(copying);
+ return copying;
+ },
+ stream,
+ done: resolve,
+ });
+ // Await the result of reading from the stream
+ }).then(() => {
+ dumpv("onReadDone called, ending bulk mode");
+ this._done = true;
+ this._transport.resumeIncoming();
+ }, this._transport.close);
+
+ // Ensure this is only done once
+ this.read = () => {
+ throw new Error("Tried to read() a BulkPacket's stream multiple times.");
+ };
+ }
-BulkPacket.prototype.write = function (stream) {
- dumpv("Writing bulk packet");
+ write(stream) {
+ dumpv("Writing bulk packet");
- if (this._outgoingHeader === undefined) {
- dumpv("Serializing bulk packet header");
- // Format the serialized packet header to a buffer
- this._outgoingHeader =
- "bulk " + this.actor + " " + this.type + " " + this.length + ":";
- }
+ if (this._outgoingHeader === undefined) {
+ dumpv("Serializing bulk packet header");
+ // Format the serialized packet header to a buffer
+ this._outgoingHeader =
+ "bulk " + this.actor + " " + this.type + " " + this.length + ":";
+ }
- // Write the header, or whatever's left of it to write.
- if (this._outgoingHeader.length) {
- dumpv("Writing bulk packet header");
- const written = stream.write(
- this._outgoingHeader,
- this._outgoingHeader.length
- );
- this._outgoingHeader = this._outgoingHeader.slice(written);
- return;
- }
+ // Write the header, or whatever's left of it to write.
+ if (this._outgoingHeader.length) {
+ dumpv("Writing bulk packet header");
+ const written = stream.write(
+ this._outgoingHeader,
+ this._outgoingHeader.length
+ );
+ this._outgoingHeader = this._outgoingHeader.slice(written);
+ return;
+ }
- dumpv("Handing off output stream");
-
- // Temporarily pause the monitoring of the output stream
- this._transport.pauseOutgoing();
-
- new Promise(resolve => {
- this._readyForWriting.resolve({
- copyFrom: input => {
- dumpv("CF length: " + this.length);
- const copying = StreamUtils.copyStream(input, stream, this.length);
- resolve(copying);
- return copying;
- },
- copyFromBuffer: inputBuffer => {
- if (inputBuffer.byteLength !== this.length) {
- throw new Error(
- `In copyFromBuffer, the input buffer needs to have the same length as the data to write. ${inputBuffer.byteLength} !== ${this.length}`
+ dumpv("Handing off output stream");
+
+ // Temporarily pause the monitoring of the output stream
+ this._transport.pauseOutgoing();
+
+ new Promise(resolve => {
+ this._readyForWriting.resolve({
+ copyFrom: input => {
+ dumpv("CF length: " + this.length);
+ const copying = StreamUtils.copyStream(input, stream, this.length);
+ resolve(copying);
+ return copying;
+ },
+ copyFromBuffer: inputBuffer => {
+ if (inputBuffer.byteLength !== this.length) {
+ throw new Error(
+ `In copyFromBuffer, the input buffer needs to have the same length as the data to write. ${inputBuffer.byteLength} !== ${this.length}`
+ );
+ }
+ dumpv("CF length: " + this.length);
+ const copying = StreamUtils.copyArrayBufferToAsyncStream(
+ inputBuffer,
+ stream
);
- }
- dumpv("CF length: " + this.length);
- const copying = StreamUtils.copyArrayBufferToAsyncStream(
- inputBuffer,
- stream
- );
- resolve(copying);
- return copying;
- },
- stream,
- done: resolve,
- });
- // Await the result of writing to the stream
- }).then(() => {
- dumpv("onWriteDone called, ending bulk mode");
- this._done = true;
- this._transport.resumeOutgoing();
- }, this._transport.close);
-
- // Ensure this is only done once
- this.write = () => {
- throw new Error("Tried to write() a BulkPacket's stream multiple times.");
- };
-};
+ resolve(copying);
+ return copying;
+ },
+ stream,
+ done: resolve,
+ });
+ // Await the result of writing to the stream
+ }).then(() => {
+ dumpv("onWriteDone called, ending bulk mode");
+ this._done = true;
+ this._transport.resumeOutgoing();
+ }, this._transport.close);
+
+ // Ensure this is only done once
+ this.write = () => {
+ throw new Error("Tried to write() a BulkPacket's stream multiple times.");
+ };
+ }
-Object.defineProperty(BulkPacket.prototype, "streamReadyForWriting", {
- get() {
+ get streamReadyForWriting() {
return this._readyForWriting;
- },
-});
+ }
-Object.defineProperty(BulkPacket.prototype, "header", {
- get() {
+ get header() {
return {
actor: this.actor,
type: this.type,
length: this.length,
};
- },
+ }
- set(header) {
+ set header(header) {
this.actor = header.actor;
this.type = header.type;
this.length = header.length;
- },
-});
+ }
-Object.defineProperty(BulkPacket.prototype, "done", {
- get() {
+ get done() {
return this._done;
- },
-});
+ }
-BulkPacket.prototype.toString = function () {
- return "Bulk: " + JSON.stringify(this.header, null, 2);
-};
+ toString() {
+ return "Bulk: " + JSON.stringify(this.header, null, 2);
+ }
+}
exports.BulkPacket = BulkPacket;
/**
* RawPacket is used to test the transport's error handling of malformed
* packets, by writing data directly onto the stream.
- *
- * @param transport DebuggerTransport
- * The transport instance that will own the packet.
- * @param data string
- * The raw string to send out onto the stream.
*/
-function RawPacket(transport, data) {
- Packet.call(this, transport);
- this._data = data;
- this.length = data.length;
- this._done = false;
-}
-
-RawPacket.prototype = Object.create(Packet.prototype);
-
-RawPacket.prototype.read = function () {
- // This hasn't yet been needed for testing.
- throw Error("Not implmented.");
-};
-
-RawPacket.prototype.write = function (stream) {
- const written = stream.write(this._data, this._data.length);
- this._data = this._data.slice(written);
- this._done = !this._data.length;
-};
-
-Object.defineProperty(RawPacket.prototype, "done", {
- get() {
+class RawPacket extends Packet {
+ /**
+ * @param {DebuggerTransport} transport
+ * The transport instance that will own the packet.
+ * @param {string} data
+ * The raw string to send out onto the stream.
+ */
+ constructor(transport, data) {
+ super(transport);
+ this._data = data;
+ this.length = data.length;
+ this._done = false;
+ }
+ read() {
+ // This hasn't yet been needed for testing.
+ throw Error("Not implmented.");
+ }
+ write(stream) {
+ const written = stream.write(this._data, this._data.length);
+ this._data = this._data.slice(written);
+ this._done = !this._data.length;
+ }
+ get done() {
return this._done;
- },
-});
+ }
+}
exports.RawPacket = RawPacket;