8 Commits

Author SHA1 Message Date
614b1858cb chore(release): v1.2.2 - bump from v1.2.1 2026-03-11 15:51:40 +01:00
45aae46925 Do not overwrite tags 2026-03-11 15:50:47 +01:00
d47007d905 Release script 2026-03-11 15:48:45 +01:00
6daadc97fc Treat transport codes and path as their own segments 2026-03-11 15:42:00 +01:00
10c7092313 Version 1.2.1 2026-03-11 15:06:48 +01:00
4973e1e52c Fixed bug with bit field order 2026-03-11 15:06:28 +01:00
f5fa45d11c Version 1.2.0 2026-03-11 13:24:09 +01:00
7c2cc0e0f6 Implemented Packet structure decoding 2026-03-11 13:23:52 +01:00
8 changed files with 654 additions and 97 deletions

View File

@@ -45,6 +45,53 @@ _Packet {
*/
```
## Packet structure parsing
The parser can also be instructed to generate a packet structure, useful for debugging or
printing packet details:
```ts
import { Packet } from '@hamradio/meshcore';
const raw = new Uint8Array(Buffer.from("050AA50E2CB0336DB67BBF78928A3BB9BF7A8B677C83B6EC0716F9DD10002A06", "hex"));
const packet = Packet.fromBytes(raw);
const { structure } = packet.decode(true);
console.log(structure);
/*
[
{
name: 'header',
data: Uint8Array(12) [
5, 10, 165, 14, 44,
176, 51, 109, 182, 123,
191, 120
],
fields: [
{ name: 'flags', type: 0, size: 1, bits: [Array] },
{ name: 'path length', type: 1, size: 1, bits: [Array] },
{ name: 'path hashes', type: 6, size: 10 }
]
},
{
name: 'response payload',
data: Uint8Array(20) [
146, 138, 59, 185, 191, 122,
139, 103, 124, 131, 182, 236,
7, 22, 249, 221, 16, 0,
42, 6
],
fields: [
{ name: 'destination hash', type: 1, size: 1, value: 146 },
{ name: 'source hash', type: 1, size: 1, value: 138 },
{ name: 'cipher MAC', type: 6, size: 2, value: [Uint8Array] },
{ name: 'cipher text', type: 6, size: 16, value: [Uint8Array] }
]
}
]
*/
```
## Identities
The package supports:

View File

@@ -1,6 +1,6 @@
{
"name": "@hamradio/meshcore",
"version": "1.1.3",
"version": "1.2.2",
"description": "MeshCore protocol support for Typescript",
"keywords": [
"MeshCore",

115
scripts/release.js Executable file
View File

@@ -0,0 +1,115 @@
#!/usr/bin/env node
// Minimal safe release script.
// Usage: node scripts/release.js [major|minor|patch|<version>]
const { execSync } = require("child_process");
const fs = require("fs");
const path = require("path");
const root = path.resolve(__dirname, "..");
const pkgPath = path.join(root, "package.json");
function run(cmd, opts = {}) {
return execSync(cmd, { stdio: "inherit", cwd: root, ...opts });
}
function runOutput(cmd) {
return execSync(cmd, { cwd: root }).toString().trim();
}
function bumpSemver(current, spec) {
if (["major","minor","patch"].includes(spec)) {
const [maj, min, patch] = current.split(".").map(n=>parseInt(n,10));
if (spec==="major") return `${maj+1}.0.0`;
if (spec==="minor") return `${maj}.${min+1}.0`;
return `${maj}.${min}.${patch+1}`;
}
if (!/^\d+\.\d+\.\d+$/.test(spec)) throw new Error("Invalid version spec");
return spec;
}
(async () => {
const arg = process.argv[2] || "patch";
const pkgRaw = fs.readFileSync(pkgPath, "utf8");
const pkg = JSON.parse(pkgRaw);
const oldVersion = pkg.version;
const newVersion = bumpSemver(oldVersion, arg);
let committed = false;
let tagged = false;
let pushedTags = false;
try {
// refuse to run if there are unstaged/uncommitted changes
const status = runOutput("git status --porcelain");
if (status) throw new Error("Repository has uncommitted changes; please commit or stash before releasing.");
console.log("Running tests...");
run("npm run test:ci");
console.log("Building...");
run("npm run build");
// write new version
pkg.version = newVersion;
fs.writeFileSync(pkgPath, JSON.stringify(pkg, null, 2) + "\n", "utf8");
console.log(`Bumped version: ${oldVersion} -> ${newVersion}`);
// commit
run(`git add ${pkgPath}`);
run(`git commit -m "chore(release): v${newVersion} - bump from v${oldVersion}"`);
committed = true;
// ensure tag doesn't already exist locally
let localTagExists = false;
try {
runOutput(`git rev-parse --verify refs/tags/v${newVersion}`);
localTagExists = true;
} catch (_) {
localTagExists = false;
}
if (localTagExists) throw new Error(`Tag v${newVersion} already exists locally — aborting to avoid overwrite.`);
// ensure tag doesn't exist on remote
const remoteTagInfo = (() => {
try { return runOutput(`git ls-remote --tags origin v${newVersion}`); } catch (_) { return ""; }
})();
if (remoteTagInfo) throw new Error(`Tag v${newVersion} already exists on remote — aborting to avoid overwrite.`);
// tag
run(`git tag -a v${newVersion} -m "Release v${newVersion}"`);
tagged = true;
// push commit and tags
run("git push");
run("git push --tags");
pushedTags = true;
// publish
console.log("Publishing to npm...");
const publishCmd = pkg.name && pkg.name.startsWith("@") ? "npm publish --access public" : "npm publish";
run(publishCmd);
console.log(`Release v${newVersion} succeeded.`);
process.exit(0);
} catch (err) {
console.error("Release failed:", err.message || err);
try {
// delete local tag
if (tagged) {
try { run(`git tag -d v${newVersion}`); } catch {}
if (pushedTags) {
try { run(`git push origin :refs/tags/v${newVersion}`); } catch {}
}
}
// undo commit if made
if (committed) {
try { run("git reset --hard HEAD~1"); } catch {
// fallback: restore package.json content
fs.writeFileSync(pkgPath, pkgRaw, "utf8");
}
} else {
// restore package.json
fs.writeFileSync(pkgPath, pkgRaw, "utf8");
}
} catch (rbErr) {
console.error("Rollback error:", rbErr.message || rbErr);
}
process.exit(1);
}
})();

View File

@@ -1,20 +1,50 @@
export * from './identity';
import * as identityTypes from './identity.types';
import type * as identityTypesTypes from './identity.types';
export {
type IPacket,
type Payload,
type EncryptedPayload,
type RequestPayload,
type ResponsePayload,
type TextPayload,
type AckPayload,
type AdvertPayload,
type GroupTextPayload,
type GroupDataPayload,
type AnonReqPayload,
type PathPayload,
type TracePayload,
type RawCustomPayload,
RouteType,
PayloadType,
RequestType,
TextType,
NodeType,
} from "./packet.types";
export { Packet } from "./packet";
export * from './crypto';
import * as cryptoTypes from './crypto.types';
import type * as cryptoTypesTypes from './crypto.types';
export {
type NodeHash,
type IIdentity,
type ILocalIdentity,
type IContact
} from "./identity.types";
export {
parseNodeHash,
Identity,
LocalIdentity,
Contact,
Group,
Contacts
} from "./identity";
export * from './packet';
import * as packetTypes from './packet.types';
import type * as packetTypesTypes from './packet.types';
export type {
identityTypes,
identityTypesTypes,
cryptoTypes,
cryptoTypesTypes,
packetTypes,
packetTypesTypes
};
export {
type IPublicKey,
type IPrivateKey,
type ISharedSecret,
type IStaticSecret
} from "./crypto.types";
export {
PublicKey,
PrivateKey,
SharedSecret,
StaticSecret,
} from "./crypto";

View File

@@ -2,6 +2,7 @@ import { sha256 } from "@noble/hashes/sha2.js";
import {
AckPayload,
AdvertAppData,
AdvertFlag,
AdvertPayload,
AnonReqPayload,
EncryptedPayload,
@@ -24,6 +25,7 @@ import {
BufferReader,
bytesToHex
} from "./parser";
import { FieldType, PacketSegment, PacketStructure } from "./parser.types";
export class Packet implements IPacket {
// Raw packet bytes.
@@ -40,6 +42,8 @@ export class Packet implements IPacket {
public pathHashSize: number;
public pathHashBytes: number;
public pathHashes: string[];
// Parsed packet segments.
public structure?: PacketStructure | undefined;
constructor(header: number, transport: [number, number] | undefined, pathLength: number, path: Uint8Array, payload: Uint8Array) {
this.header = header;
@@ -99,33 +103,137 @@ export class Packet implements IPacket {
return bytesToHex(digest.slice(0, 8));
}
public decode(): Payload {
private ensureStructure(): void {
if (typeof this.structure !== "undefined") {
return;
}
let pathHashType: FieldType
switch (this.pathHashSize) {
case 1: pathHashType = FieldType.BYTES; break;
case 2: pathHashType = FieldType.WORDS; break;
case 4: pathHashType = FieldType.DWORDS; break;
default:
throw new Error(`Unsupported path hash size: ${this.pathHashSize}`);
}
this.structure = [
/* Header segment */
{
name: "header",
data: new Uint8Array([this.header]),
fields: [
/* Header flags */
{
name: "flags",
type: FieldType.BITS,
size: 1,
bits: [
{ name: "payload version", size: 2 },
{ name: "payload type", size: 4 },
{ name: "route type", size: 2 },
]
},
]
},
/* Transport codes */
...(Packet.hasTransportCodes(this.routeType) ? [{
name: "transport codes",
data: new Uint8Array([
(this.transport![0] >> 8) & 0xff, this.transport![0] & 0xff,
(this.transport![1] >> 8) & 0xff, this.transport![1] & 0xff
]),
fields: [
{
name: "transport code 1",
type: FieldType.UINT16_BE,
size: 2,
value: this.transport![0]
},
{
name: "transport code 2",
type: FieldType.UINT16_BE,
size: 2,
value: this.transport![1]
},
]
}] : []),
/* Path length and hashes */
{
name: "path",
data: new Uint8Array([this.pathLength, ...this.path]),
fields: [
{
name: "path length",
type: FieldType.UINT8,
size: 1,
bits: [
{ name: "path hash size", size: 2 },
{ name: "path hash count", size: 6 },
]
},
{
name: "path hashes",
type: pathHashType,
size: this.path.length
}
]
},
]
}
public decode(withStructure?: boolean): Payload | { payload: Payload, structure: PacketStructure } {
let result: Payload | { payload: Payload, segment: PacketSegment };
switch (this.payloadType) {
case PayloadType.REQUEST:
return this.decodeRequest();
result = this.decodeRequest(withStructure);
break;
case PayloadType.RESPONSE:
return this.decodeResponse();
result = this.decodeResponse(withStructure);
break;
case PayloadType.TEXT:
return this.decodeText();
result = this.decodeText(withStructure);
break;
case PayloadType.ACK:
return this.decodeAck();
result = this.decodeAck(withStructure);
break;
case PayloadType.ADVERT:
return this.decodeAdvert();
result = this.decodeAdvert(withStructure);
break;
case PayloadType.GROUP_TEXT:
return this.decodeGroupText();
result = this.decodeGroupText(withStructure);
break;
case PayloadType.GROUP_DATA:
return this.decodeGroupData();
result = this.decodeGroupData(withStructure);
break;
case PayloadType.ANON_REQ:
return this.decodeAnonReq();
result = this.decodeAnonReq(withStructure);
break;
case PayloadType.PATH:
return this.decodePath();
result = this.decodePath(withStructure);
break;
case PayloadType.TRACE:
return this.decodeTrace();
result = this.decodeTrace(withStructure);
break;
case PayloadType.RAW_CUSTOM:
return this.decodeRawCustom();
result = this.decodeRawCustom(withStructure);
break;
default:
throw new Error(`Unsupported payload type: ${this.payloadType}`);
}
console.log('packet decode with structure:', typeof withStructure, withStructure, { result });
if (typeof withStructure === "boolean" && withStructure && "segment" in result && "payload" in result) {
this.ensureStructure();
const structure = [ ...this.structure!, result.segment ];
return { payload: result.payload, structure };
}
return result as Payload;
}
private decodeEncryptedPayload(reader: BufferReader): EncryptedPayload {
@@ -134,61 +242,128 @@ export class Packet implements IPacket {
return { cipherMAC, cipherText };
}
private decodeRequest(): RequestPayload {
private decodeRequest(withSegment?: boolean): Payload | { payload: Payload, segment: PacketSegment } {
if (this.payload.length < 4) {
throw new Error("Invalid request payload: too short");
}
const reader = new BufferReader(this.payload);
return {
type: PayloadType.REQUEST,
dst: reader.readByte(),
src: reader.readByte(),
encrypted: this.decodeEncryptedPayload(reader),
const dst = reader.readByte();
const src = reader.readByte();
const encrypted = this.decodeEncryptedPayload(reader);
const payload: RequestPayload = {
type: PayloadType.REQUEST,
dst,
src,
encrypted,
};
if (typeof withSegment === "boolean" && withSegment) {
const segment = {
name: "request payload",
data: this.payload,
fields: [
{ name: "destination hash", type: FieldType.UINT8, size: 1, value: dst },
{ name: "source hash", type: FieldType.UINT8, size: 1, value: src },
{ name: "cipher MAC", type: FieldType.BYTES, size: 2, value: encrypted.cipherMAC },
{ name: "cipher text", type: FieldType.BYTES, size: encrypted.cipherText.length, value: encrypted.cipherText }
]
}
return { payload, segment };
}
return payload;
}
private decodeResponse(): ResponsePayload {
private decodeResponse(withSegment?: boolean): Payload | { payload: Payload, segment: PacketSegment } {
if (this.payload.length < 4) {
throw new Error("Invalid response payload: too short");
}
const reader = new BufferReader(this.payload);
return {
const dst = reader.readByte();
const src = reader.readByte();
const encrypted = this.decodeEncryptedPayload(reader);
const payload: ResponsePayload = {
type: PayloadType.RESPONSE,
dst: reader.readByte(),
src: reader.readByte(),
encrypted: this.decodeEncryptedPayload(reader),
dst,
src,
encrypted,
};
if (typeof withSegment === "boolean" && withSegment) {
const segment = {
name: "response payload",
data: this.payload,
fields: [
{ name: "destination hash", type: FieldType.UINT8, size: 1, value: dst },
{ name: "source hash", type: FieldType.UINT8, size: 1, value: src },
{ name: "cipher MAC", type: FieldType.BYTES, size: 2, value: encrypted.cipherMAC },
{ name: "cipher text", type: FieldType.BYTES, size: encrypted.cipherText.length, value: encrypted.cipherText }
]
};
return { payload, segment };
}
return payload;
}
private decodeText(): TextPayload {
private decodeText(withSegment?: boolean): Payload | { payload: Payload, segment: PacketSegment } {
if (this.payload.length < 4) {
throw new Error("Invalid text payload: too short");
}
const reader = new BufferReader(this.payload);
return {
const dst = reader.readByte();
const src = reader.readByte();
const encrypted = this.decodeEncryptedPayload(reader);
const payload: TextPayload = {
type: PayloadType.TEXT,
dst: reader.readByte(),
src: reader.readByte(),
encrypted: this.decodeEncryptedPayload(reader),
dst,
src,
encrypted,
};
if (typeof withSegment === "boolean" && withSegment) {
const segment = {
name: "text payload",
data: this.payload,
fields: [
{ name: "destination hash", type: FieldType.UINT8, size: 1, value: dst },
{ name: "source hash", type: FieldType.UINT8, size: 1, value: src },
{ name: "cipher MAC", type: FieldType.BYTES, size: 2, value: encrypted.cipherMAC },
{ name: "cipher text", type: FieldType.BYTES, size: encrypted.cipherText.length, value: encrypted.cipherText }
]
};
return { payload, segment };
}
return payload;
}
private decodeAck(): AckPayload {
private decodeAck(withSegment?: boolean): Payload | { payload: AckPayload, segment: PacketSegment } {
if (this.payload.length < 4) {
throw new Error("Invalid ack payload: too short");
}
const reader = new BufferReader(this.payload);
return {
const checksum = reader.readBytes(4);
const payload: AckPayload = {
type: PayloadType.ACK,
checksum: reader.readBytes(4),
checksum,
};
if (typeof withSegment === "boolean" && withSegment) {
const segment = {
name: "ack payload",
data: this.payload,
fields: [
{ name: "checksum", type: FieldType.BYTES, size: 4, value: checksum }
]
};
return { payload, segment };
}
return payload;
}
private decodeAdvert(): AdvertPayload {
private decodeAdvert(withSegment?: boolean): Payload | { payload: AdvertPayload, segment: PacketSegment } {
if (this.payload.length < 4) {
throw new Error("Invalid advert payload: too short");
}
@@ -201,25 +376,57 @@ export class Packet implements IPacket {
signature: reader.readBytes(64),
}
let segment: PacketSegment | undefined;
if (typeof withSegment === "boolean" && withSegment) {
segment = {
name: "advert payload",
data: this.payload,
fields: [
{ type: FieldType.BYTES, name: "public key", size: 32 },
{ type: FieldType.UINT32_LE, name: "timestamp", size: 4, value: payload.timestamp! },
{ type: FieldType.BYTES, name: "signature", size: 64 },
]
};
}
const flags = reader.readByte();
const appdata: AdvertAppData = {
nodeType: flags & 0x0f,
hasLocation: (flags & 0x10) !== 0,
hasFeature1: (flags & 0x20) !== 0,
hasFeature2: (flags & 0x40) !== 0,
hasName: (flags & 0x80) !== 0,
hasLocation: (flags & AdvertFlag.HAS_LOCATION) !== 0,
hasFeature1: (flags & AdvertFlag.HAS_FEATURE1) !== 0,
hasFeature2: (flags & AdvertFlag.HAS_FEATURE2) !== 0,
hasName: (flags & AdvertFlag.HAS_NAME) !== 0,
}
if (typeof withSegment === "boolean" && withSegment) {
segment!.fields.push({ type: FieldType.BITS, name: "flags", size: 1, value: flags, bits: [
{ size: 1, name: "name flag" },
{ size: 1, name: "feature2 flag" },
{ size: 1, name: "feature1 flag" },
{ size: 1, name: "location flag" },
{ size: 4, name: "node type" },
]});
}
if (appdata.hasLocation) {
const lat = reader.readInt32LE() / 100000;
const lon = reader.readInt32LE() / 100000;
appdata.location = [lat, lon];
if (typeof withSegment === "boolean" && withSegment) {
segment!.fields.push({ type: FieldType.UINT32_LE, name: "latitude", size: 4, value: lat });
segment!.fields.push({ type: FieldType.UINT32_LE, name: "longitude", size: 4, value: lon });
}
}
if (appdata.hasFeature1) {
appdata.feature1 = reader.readUint16LE();
if (typeof withSegment === "boolean" && withSegment) {
segment!.fields.push({ type: FieldType.UINT16_LE, name: "feature1", size: 2, value: appdata.feature1 });
}
}
if (appdata.hasFeature2) {
appdata.feature2 = reader.readUint16LE();
if (typeof withSegment === "boolean" && withSegment) {
segment!.fields.push({ type: FieldType.UINT16_LE, name: "feature2", size: 2, value: appdata.feature2 });
}
}
if (appdata.hasName) {
const nameBytes = reader.readBytes();
@@ -228,86 +435,175 @@ export class Packet implements IPacket {
nullPos = nameBytes.length;
}
appdata.name = new TextDecoder('utf-8').decode(nameBytes.subarray(0, nullPos));
if (typeof withSegment === "boolean" && withSegment) {
segment!.fields.push({ type: FieldType.C_STRING, name: "name", size: nameBytes.length, value: appdata.name });
}
}
return {
...payload,
appdata
} as AdvertPayload;
if (typeof withSegment === "boolean" && withSegment && typeof segment !== "undefined") {
return { payload: { ...payload, appdata } as AdvertPayload, segment };
}
return { ...payload, appdata } as AdvertPayload;
}
private decodeGroupText(): GroupTextPayload {
private decodeGroupText(withSegment?: boolean): Payload | { payload: Payload, segment: PacketSegment } {
if (this.payload.length < 3) {
throw new Error("Invalid group text payload: too short");
}
const reader = new BufferReader(this.payload);
return {
const channelHash = reader.readByte();
const encrypted = this.decodeEncryptedPayload(reader);
const payload: GroupTextPayload = {
type: PayloadType.GROUP_TEXT,
channelHash: reader.readByte(),
encrypted: this.decodeEncryptedPayload(reader),
channelHash,
encrypted,
};
if (typeof withSegment === "boolean" && withSegment) {
const segment = {
name: "group text payload",
data: this.payload,
fields: [
{ name: "channel hash", type: FieldType.UINT8, size: 1, value: channelHash },
{ name: "cipher MAC", type: FieldType.BYTES, size: 2, value: encrypted.cipherMAC },
{ name: "cipher text", type: FieldType.BYTES, size: encrypted.cipherText.length, value: encrypted.cipherText }
]
};
return { payload, segment };
}
return payload;
}
private decodeGroupData(): GroupDataPayload {
private decodeGroupData(withSegment?: boolean): Payload | { payload: Payload, segment: PacketSegment } {
if (this.payload.length < 3) {
throw new Error("Invalid group data payload: too short");
}
const reader = new BufferReader(this.payload);
return {
const payload: GroupDataPayload = {
type: PayloadType.GROUP_DATA,
channelHash: reader.readByte(),
encrypted: this.decodeEncryptedPayload(reader),
};
if (typeof withSegment === "boolean" && withSegment) {
const segment = {
name: "group data payload",
data: this.payload,
fields: [
{ name: "channel hash", type: FieldType.UINT8, size: 1, value: payload.channelHash },
{ name: "cipher MAC", type: FieldType.BYTES, size: 2, value: payload.encrypted.cipherMAC },
{ name: "cipher text", type: FieldType.BYTES, size: payload.encrypted.cipherText.length, value: payload.encrypted.cipherText }
]
};
return { payload, segment };
}
return payload;
}
private decodeAnonReq(): AnonReqPayload {
private decodeAnonReq(withSegment?: boolean): Payload | { payload: Payload, segment: PacketSegment } {
if (this.payload.length < 1 + 32 + 2) {
throw new Error("Invalid anon req payload: too short");
}
const reader = new BufferReader(this.payload);
return {
const payload: AnonReqPayload = {
type: PayloadType.ANON_REQ,
dst: reader.readByte(),
publicKey: reader.readBytes(32),
encrypted: this.decodeEncryptedPayload(reader),
}
if (typeof withSegment === "boolean" && withSegment) {
const segment = {
name: "anon req payload",
data: this.payload,
fields: [
{ name: "destination hash", type: FieldType.UINT8, size: 1, value: payload.dst },
{ name: "public key", type: FieldType.BYTES, size: 32, value: payload.publicKey },
{ name: "cipher MAC", type: FieldType.BYTES, size: 2, value: payload.encrypted.cipherMAC },
{ name: "cipher text", type: FieldType.BYTES, size: payload.encrypted.cipherText.length, value: payload.encrypted.cipherText }
]
};
return { payload, segment };
}
return payload;
}
private decodePath(): PathPayload {
private decodePath(withSegment?: boolean): Payload | { payload: Payload, segment: PacketSegment } {
if (this.payload.length < 2) {
throw new Error("Invalid path payload: too short");
}
const reader = new BufferReader(this.payload);
return {
const payload: PathPayload = {
type: PayloadType.PATH,
dst: reader.readByte(),
src: reader.readByte(),
};
if (typeof withSegment === "boolean" && withSegment) {
const segment = {
name: "path payload",
data: this.payload,
fields: [
{ name: "destination hash", type: FieldType.UINT8, size: 1, value: payload.dst },
{ name: "source hash", type: FieldType.UINT8, size: 1, value: payload.src }
]
};
return { payload, segment };
}
return payload;
}
private decodeTrace(): TracePayload {
private decodeTrace(withSegment?: boolean): Payload | { payload: Payload, segment: PacketSegment } {
if (this.payload.length < 9) {
throw new Error("Invalid trace payload: too short");
}
const reader = new BufferReader(this.payload);
return {
const payload: TracePayload = {
type: PayloadType.TRACE,
tag: reader.readUint32LE() >>> 0,
authCode: reader.readUint32LE() >>> 0,
flags: reader.readByte() & 0x03,
nodes: reader.readBytes()
};
if (typeof withSegment === "boolean" && withSegment) {
const segment = {
name: "trace payload",
data: this.payload,
fields: [
{ name: "tag", type: FieldType.DWORDS, size: 4, value: payload.tag },
{ name: "auth code", type: FieldType.DWORDS, size: 4, value: payload.authCode },
{ name: "flags", type: FieldType.UINT8, size: 1, value: payload.flags },
{ name: "nodes", type: FieldType.BYTES, size: payload.nodes.length, value: payload.nodes }
]
};
return { payload, segment };
}
return payload;
}
private decodeRawCustom(): RawCustomPayload {
return {
private decodeRawCustom(withSegment?: boolean): Payload | { payload: Payload, segment: PacketSegment } {
const payload: RawCustomPayload = {
type: PayloadType.RAW_CUSTOM,
data: this.payload,
};
if (typeof withSegment === "boolean" && withSegment) {
const segment = {
name: "raw custom payload",
data: this.payload,
fields: [
{ name: "data", type: FieldType.BYTES, size: this.payload.length, value: this.payload }
]
};
return { payload, segment };
}
return payload;
}
}

View File

@@ -1,4 +1,5 @@
import { NodeHash } from "./identity.types";
import { PacketStructure } from "./parser.types";
// IPacket contains the raw packet bytes.
export type Uint16 = number; // 0..65535
@@ -12,7 +13,7 @@ export interface IPacket {
path: Uint8Array;
payload: Uint8Array;
decode(): Payload;
decode(withStructure?: boolean): Payload | { payload: Payload, structure: PacketStructure }
}
export enum RouteType {
@@ -36,7 +37,7 @@ export enum PayloadType {
RAW_CUSTOM = 0x0f,
}
export type Payload =
export type Payload = BasePayload & (
| RequestPayload
| ResponsePayload
| TextPayload
@@ -47,14 +48,19 @@ export type Payload =
| AnonReqPayload
| PathPayload
| TracePayload
| RawCustomPayload;
| RawCustomPayload
);
export interface BasePayload {
type: PayloadType;
}
export interface EncryptedPayload {
cipherMAC: Uint8Array;
cipherText: Uint8Array;
}
export interface RequestPayload {
export interface RequestPayload extends BasePayload {
type: PayloadType.REQUEST;
dst: NodeHash;
src: NodeHash;
@@ -78,7 +84,7 @@ export interface DecryptedRequest {
requestData: Uint8Array;
}
export interface ResponsePayload {
export interface ResponsePayload extends BasePayload {
type: PayloadType.RESPONSE;
dst: NodeHash;
src: NodeHash;
@@ -91,7 +97,7 @@ export interface DecryptedResponse {
responseData: Uint8Array;
}
export interface TextPayload {
export interface TextPayload extends BasePayload {
type: PayloadType.TEXT;
dst: NodeHash;
src: NodeHash;
@@ -112,12 +118,12 @@ export interface DecryptedText {
message: string;
}
export interface AckPayload {
export interface AckPayload extends BasePayload {
type: PayloadType.ACK;
checksum: Uint8Array;
}
export interface AdvertPayload {
export interface AdvertPayload extends BasePayload {
type: PayloadType.ADVERT;
publicKey: Uint8Array;
timestamp: Date;
@@ -132,6 +138,13 @@ export enum NodeType {
SENSOR_NODE = 0x04,
}
export enum AdvertFlag {
HAS_LOCATION = 0x10,
HAS_FEATURE1 = 0x20,
HAS_FEATURE2 = 0x40,
HAS_NAME = 0x80,
}
export interface AdvertAppData {
nodeType: NodeType;
hasLocation: boolean;
@@ -144,7 +157,7 @@ export interface AdvertAppData {
name?: string;
}
export interface GroupTextPayload {
export interface GroupTextPayload extends BasePayload {
type: PayloadType.GROUP_TEXT;
channelHash: NodeHash;
encrypted: EncryptedPayload;
@@ -158,7 +171,7 @@ export interface DecryptedGroupText {
message: string;
}
export interface GroupDataPayload {
export interface GroupDataPayload extends BasePayload {
type: PayloadType.GROUP_DATA;
channelHash: NodeHash;
encrypted: EncryptedPayload;
@@ -170,7 +183,7 @@ export interface DecryptedGroupData {
data: Uint8Array;
}
export interface AnonReqPayload {
export interface AnonReqPayload extends BasePayload {
type: PayloadType.ANON_REQ;
dst: NodeHash;
publicKey: Uint8Array;
@@ -183,13 +196,13 @@ export interface DecryptedAnonReq {
data: Uint8Array;
}
export interface PathPayload {
export interface PathPayload extends BasePayload {
type: PayloadType.PATH;
dst: NodeHash;
src: NodeHash;
}
export interface TracePayload {
export interface TracePayload extends BasePayload {
type: PayloadType.TRACE;
tag: number;
authCode: number;
@@ -197,7 +210,7 @@ export interface TracePayload {
nodes: Uint8Array;
}
export interface RawCustomPayload {
export interface RawCustomPayload extends BasePayload {
type: PayloadType.RAW_CUSTOM;
data: Uint8Array;
}

35
src/parser.types.ts Normal file
View File

@@ -0,0 +1,35 @@
export enum FieldType {
BITS = 0,
UINT8 = 1,
UINT16_LE = 2,
UINT16_BE = 3,
UINT32_LE = 4,
UINT32_BE = 5,
BYTES = 6, // 8-bits per value
WORDS = 7, // 16-bits per value
DWORDS = 8, // 32-bits per value
QWORDS = 9, // 64-bits per value
C_STRING = 10,
}
// Interface for the parsed packet segments, used for debugging and testing.
export type PacketStructure = PacketSegment[];
export interface PacketSegment {
name: string;
data: Uint8Array;
fields: PacketField[];
}
export interface PacketField {
type: FieldType;
size: number; // Size in bytes
name?: string;
bits?: PacketFieldBit[]; // Only for bit fields in FieldType.BITS
value?: any; // Optional decoded value
}
export interface PacketFieldBit {
name: string;
size: number; // Size in bits
}

View File

@@ -1,6 +1,6 @@
import { describe, expect, test } from 'vitest';
import { Packet } from '../src/packet';
import { PayloadType, RouteType, NodeType, TracePayload, AdvertPayload, RequestPayload, TextPayload, ResponsePayload, RawCustomPayload, AnonReqPayload } from '../src/packet.types';
import { PayloadType, RouteType, NodeType, TracePayload, AdvertPayload, RequestPayload, TextPayload, ResponsePayload, RawCustomPayload, AnonReqPayload, Payload, AckPayload, PathPayload, GroupDataPayload, GroupTextPayload } from '../src/packet.types';
import { hexToBytes, bytesToHex } from '../src/parser';
describe('Packet.fromBytes', () => {
@@ -51,7 +51,7 @@ describe('Packet.fromBytes', () => {
const pkt = Packet.fromBytes(bytes);
expect(pkt.routeType).toBe(RouteType.TRANSPORT_DIRECT);
expect(pkt.payloadType).toBe(PayloadType.TRACE);
const payload = pkt.decode();
const payload = pkt.decode() as TracePayload;
expect(payload.type).toBe(PayloadType.TRACE);
// the TRACE payload format has been updated; ensure we decode a TRACE payload
expect(payload.type).toBe(PayloadType.TRACE);
@@ -148,7 +148,7 @@ describe('Packet decode branches and transport/path parsing', () => {
test('ACK decode and RAW_CUSTOM', () => {
const ackPayload = new Uint8Array([0x01,0x02,0x03,0x04]);
const ack = Packet.fromBytes(makePacket(PayloadType.ACK, RouteType.DIRECT, new Uint8Array([]), ackPayload)).decode();
const ack = Packet.fromBytes(makePacket(PayloadType.ACK, RouteType.DIRECT, new Uint8Array([]), ackPayload)).decode() as AckPayload;
expect(ack.type).toBe(PayloadType.ACK);
const custom = new Uint8Array([0x99,0x88,0x77]);
@@ -173,9 +173,9 @@ describe('Packet decode branches and transport/path parsing', () => {
test('GROUP_TEXT and GROUP_DATA decode', () => {
const payload = new Uint8Array([0x55, 0x01, 0x02, 0x03]); // channelHash + mac(2) + cipher
const gt = Packet.fromBytes(makePacket(PayloadType.GROUP_TEXT, RouteType.DIRECT, new Uint8Array([]), payload)).decode();
const gt = Packet.fromBytes(makePacket(PayloadType.GROUP_TEXT, RouteType.DIRECT, new Uint8Array([]), payload)).decode() as GroupTextPayload;
expect(gt.type).toBe(PayloadType.GROUP_TEXT);
const gd = Packet.fromBytes(makePacket(PayloadType.GROUP_DATA, RouteType.DIRECT, new Uint8Array([]), payload)).decode();
const gd = Packet.fromBytes(makePacket(PayloadType.GROUP_DATA, RouteType.DIRECT, new Uint8Array([]), payload)).decode() as GroupDataPayload;
expect(gd.type).toBe(PayloadType.GROUP_DATA);
});
@@ -191,7 +191,7 @@ describe('Packet decode branches and transport/path parsing', () => {
test('PATH and TRACE decode nodes', () => {
const pathPayload = new Uint8Array([0x0a, 0x0b]);
const path = Packet.fromBytes(makePacket(PayloadType.PATH, RouteType.DIRECT, new Uint8Array([]), pathPayload)).decode();
const path = Packet.fromBytes(makePacket(PayloadType.PATH, RouteType.DIRECT, new Uint8Array([]), pathPayload)).decode() as PathPayload;
expect(path.type).toBe(PayloadType.PATH);
const nodes = new Uint8Array([0x01,0x02,0x03]);
@@ -217,10 +217,10 @@ describe('Packet decode branches and transport/path parsing', () => {
arr.set(pathBytes, parts.length);
arr.set(payload, parts.length + pathBytes.length);
const pkt = Packet.fromBytes(arr);
expect(pkt.pathHashCount).toBe(2);
expect(pkt.pathHashSize).toBe(3);
expect(pkt.pathHashes.length).toBe(2);
expect(pkt.pathHashes[0]).toBe(bytesToHex(pathBytes.subarray(0,3)));
expect(pkt.pathHashCount).toBe(3);
expect(pkt.pathHashSize).toBe(2);
expect(pkt.pathHashes.length).toBe(3);
expect(pkt.pathHashes[0]).toBe(bytesToHex(pathBytes.subarray(0,2)));
});
test('unsupported payload type throws', () => {
@@ -231,3 +231,24 @@ describe('Packet decode branches and transport/path parsing', () => {
expect(() => pkt.decode()).toThrow();
});
});
describe("Packet.decode overloads", () => {
const ackBytes = new Uint8Array([ /* header */ 13, /* pathLength */ 0, /* payload (4 bytes checksum) */ 1, 2, 3, 4 ]);
test("decode() returns payload only", () => {
const pkt = Packet.fromBytes(ackBytes);
const payload = pkt.decode() as Payload;
expect(payload.type).toBe(PayloadType.ACK);
expect((payload as any).checksum).toEqual(new Uint8Array([1, 2, 3, 4]));
});
test("decode(true) returns { payload, structure }", () => {
const pkt = Packet.fromBytes(ackBytes);
const res = pkt.decode(true) as any;
expect(res).toHaveProperty("payload");
expect(res).toHaveProperty("structure");
expect(res.payload.type).toBe(PayloadType.ACK);
expect(Array.isArray(res.structure)).toBe(true);
expect(res.structure[res.structure.length - 1].name).toBe("ack payload");
});
});