Added support for !DAO! marker parsing

This commit is contained in:
2026-03-20 13:53:36 +01:00
parent c7c54984ba
commit 4502f9902b
3 changed files with 410 additions and 181 deletions

View File

@@ -1,6 +1,6 @@
import { type Field, FieldType } from "@hamradio/packet";
import type { Extras, ITelemetry, Payload } from "./frame.types";
import type { Extras, IDAO, ITelemetry, Payload } from "./frame.types";
import { base91ToNumber, feetToMeters, knotsToKmh, milesToMeters } from "./parser";
/**
@@ -20,102 +20,203 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
if (!comment || comment.length === 0) return { comment };
const extras: Partial<Extras> = {};
const fields: Field[] = [];
const beforeFields: Field[] = [];
let altitudeOffset: number | undefined = undefined;
let altitudeFields: Field[] = [];
let commentOffset: number = 0;
let commentBefore: string | undefined = undefined;
const fields: FieldWithOffset[] = [];
const ignore: {
offset: number;
length: number;
}[] = []; // Tracks offsets of tokens to ignore when reconstructing comment
// eslint-disable-next-line no-useless-assignment
let match: RegExpMatchArray | null = null;
// Process successive 7-byte data extensions at the start of the comment.
comment = comment.trimStart();
let ext = comment;
while (ext.length >= 7) {
// We first process the altitude marker, because it may appear anywhere
// in the comment and we want to extract it and its value before
// processing other tokens that may be present.
//
// /A=NNNNNN -> altitude in feet (6 digits)
// /A=-NNNNN -> altitude in feet with leading minus for negative altitudes (5 digits)
const altMatch = ext.match(/\/A=(-\d{5}|\d{6})/);
if (altitudeOffset === undefined && altMatch) {
const altitude = feetToMeters(parseInt(altMatch[1], 10)); // feet to meters
if (isNaN(altitude)) {
break; // Invalid altitude format, stop parsing extras
}
let offset = 0; // Tracks the current offset in the original comment string for field positioning
let cutoff = comment.length; // Tracks the offset of the altitude token for relative positioning of subsequent fields
// Process the DAO (precision and datum option); if it is present it marks the
// cutoff for subsequent fields since it is typically at the end of the comment
// and relative to the position.
if ((match = comment.match(/!(...)!/))) {
const dao = decodeDAO(match[1]);
extras.dao = dao;
// Set cutoff to DAO token for subsequent fields to be relative to it (since it is typically at the end of the comment).
cutoff = comment.indexOf(match[0]);
ignore.push({ offset: cutoff, length: match[0].length });
if (withStructure) {
fields.push(
{
type: FieldType.CHAR,
name: "DAO marker",
length: 1,
value: "!",
offset: cutoff
},
{
type: FieldType.STRING,
name: "DAO data",
length: 3,
value: match[1],
offset: cutoff + 1
},
{
type: FieldType.CHAR,
name: "DAO end marker",
length: 1,
value: "!",
offset: cutoff + 4
}
);
}
// Mask DAO token in comment for further parsing
comment = comment.replace(match[0], "X".repeat(match[0].length));
}
// Process the altitude marker, because it may appear anywhere in the comment
// and we want to extract it and its value before processing other tokens
// that may be present.
//
// /A=NNNNNN -> altitude in feet (6 digits)
// /A=-NNNNN -> altitude in feet with leading minus for negative altitudes (5 digits)
if ((match = comment.substring(0, cutoff).match(/\/A=(-\d{5}|\d{6})/))) {
const altitude = feetToMeters(parseInt(match[1], 10)); // feet to meters
if (!isNaN(altitude)) {
extras.altitude = altitude;
// Keep track of where the altitude token appears in the comment for structure purposes.
altitudeOffset = comment.indexOf(altMatch[0]);
const altitudeOffset = comment.indexOf(match[0]);
ignore.push({ offset: altitudeOffset, length: match[0].length });
if (altitudeOffset === 0) {
offset = match[0].length; // Set offset to altitude token for subsequent fields to be relative to it (since we will remove it from the comment)
} else if (cutoff > altitudeOffset) {
cutoff = altitudeOffset; // Set cutoff for subsequent fields to be relative to altitude token if it appears before them in the comment
}
if (withStructure) {
altitudeFields = [
fields.push(
{
type: FieldType.STRING,
name: "altitude marker",
data: new TextEncoder().encode("/A=").buffer,
value: "/A=",
length: 3
length: 3,
offset: altitudeOffset
},
{
type: FieldType.STRING,
name: "altitude",
data: new TextEncoder().encode(altMatch[1]).buffer,
data: new TextEncoder().encode(match[1]).buffer,
value: altitude.toFixed(1) + "m",
length: 6
length: 6,
offset: altitudeOffset + 3
}
];
);
}
if (altitudeOffset > 0) {
// Reset the comment with the altitude marker removed.
commentBefore = comment.substring(0, altitudeOffset);
comment = comment.substring(altitudeOffset + altMatch[0].length);
ext = commentBefore; // Continue processing extensions in the part of the comment before the altitude marker
commentOffset = 0; // Reset
continue;
}
// remove altitude token from ext and advance ext for further parsing
commentOffset += altMatch[0].length;
ext = ext.replace(altMatch[0], "").trimStart();
continue;
// Mask in comment (for debugging)
comment = comment.replace(match[0], "X".repeat(match[0].length)); // Remove altitude token from comment for further parsing
}
}
// Next we process any inline telemetry comment, which is delimited by |...| and can appear anywhere in the comment. We want to extract it before processing other tokens that may be present.
if ((match = comment.substring(offset, cutoff).match(/\|([^|]+)\|/))) {
try {
const telemetry = decodeTelemetry(match[1]);
extras.telemetry = telemetry;
const telemetryOffset = comment.indexOf(match[0]);
ignore.push({ offset: telemetryOffset + offset, length: match[0].length });
if (telemetryOffset == 0) {
offset += match[0].length; // Set offset to telemetry token for subsequent fields to be relative to it (since we will remove it from the comment)
} else if (cutoff > telemetryOffset) {
cutoff = telemetryOffset; // Set cutoff for subsequent fields to be relative to telemetry token if it appears before them in the comment
}
if (withStructure) {
fields.push(
{
type: FieldType.CHAR,
name: "telemetry start",
length: 1,
value: "|",
offset: telemetryOffset
},
{
type: FieldType.STRING,
name: "sequence",
length: 2,
value: telemetry.sequence.toString(),
offset: telemetryOffset + 1
},
...telemetry.analog.map((a, i) => ({
type: FieldType.STRING,
name: `analog${i + 1}`,
length: 2,
value: a.toString(),
offset: telemetryOffset + 3 + i * 2
})),
...(telemetry.digital !== undefined
? [
{
type: FieldType.STRING,
name: "digital",
length: 2,
value: telemetry.digital.toString(),
offset: telemetryOffset + 3 + telemetry.analog.length * 2
}
]
: []),
{
type: FieldType.CHAR,
name: "telemetry end",
length: 1,
value: "|",
offset: telemetryOffset + match[1].length + 1
}
);
}
// Mask telemetry token in comment for further parsing
comment = comment.replace(match[0], "X".repeat(match[0].length));
} catch {
// Invalid telemetry format, ignore
}
}
// Process successive 7-byte data extensions at the start of the comment up to the first
// non-extension token, which may be altitude, telemetry, or other tokens. These
// extensions can appear in any order and we want to extract them all.
let ext = comment.substring(offset, cutoff);
while (ext.length >= 7) {
// RNGrrrr -> pre-calculated range in miles (4 digits)
if ((match = ext.match(/^RNG(\d{4})/))) {
const r = match[1];
extras.range = milesToMeters(parseInt(r, 10)) / 1000.0; // Convert to kilometers
if (withStructure) {
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
fields.push(
{
type: FieldType.STRING,
name: "range marker",
value: "RNG",
length: 3
length: 3,
offset: offset
},
{
type: FieldType.STRING,
name: "range (rrrr)",
length: 4,
value: extras.range.toFixed(1) + "km"
value: extras.range.toFixed(1) + "km",
offset: offset + 3
}
);
}
// remove range token from ext and advance ext for further parsing
if (commentBefore !== undefined && commentBefore.length > 0) {
commentBefore = commentBefore.substring(7);
ext = commentBefore;
} else {
commentOffset += 7;
ext = ext.substring(7);
}
ignore.push({ offset, length: 7 });
offset += 7;
ext = ext.substring(7);
continue;
}
@@ -154,25 +255,34 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
};
if (withStructure) {
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
{ type: FieldType.STRING, name: "PHG marker", length: 3, value: "PHG" },
fields.push(
{
type: FieldType.STRING,
name: "PHG marker",
length: 3,
value: "PHG",
offset: offset
},
{
type: FieldType.STRING,
name: "power (p)",
length: 1,
value: powerWatts !== undefined ? powerWatts.toString() + "W" : undefined
value: powerWatts !== undefined ? powerWatts.toString() + "W" : undefined,
offset: offset + 3
},
{
type: FieldType.STRING,
name: "height (h)",
length: 1,
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined,
offset: offset + 4
},
{
type: FieldType.STRING,
name: "gain (g)",
length: 1,
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined,
offset: offset + 5
},
{
type: FieldType.STRING,
@@ -183,19 +293,16 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
? typeof directivity === "number"
? directivity.toString() + "°"
: directivity
: undefined
: undefined,
offset: offset + 6
}
);
}
// remove PHG token from ext and advance ext for further parsing
if (commentBefore !== undefined && commentBefore.length > 0) {
commentBefore = commentBefore.substring(7);
} else {
commentOffset += 7;
}
ignore.push({ offset, length: 7 });
offset += 7;
ext = ext.substring(7).trimStart();
continue;
}
@@ -242,25 +349,34 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
};
if (withStructure) {
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
{ type: FieldType.STRING, name: "DFS marker", length: 3, value: "DFS" },
fields.push(
{
type: FieldType.STRING,
name: "DFS marker",
length: 3,
value: "DFS",
offset: offset
},
{
type: FieldType.STRING,
name: "strength (s)",
length: 1,
value: strength !== undefined ? strength.toString() : undefined
value: strength !== undefined ? strength.toString() : undefined,
offset: offset + 3
},
{
type: FieldType.STRING,
name: "height (h)",
length: 1,
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined,
offset: offset + 4
},
{
type: FieldType.STRING,
name: "gain (g)",
length: 1,
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined,
offset: offset + 5
},
{
type: FieldType.STRING,
@@ -271,19 +387,16 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
? typeof directivity === "number"
? directivity.toString() + "°"
: directivity
: undefined
: undefined,
offset: offset + 6
}
);
}
// remove DFS token from ext and advance ext for further parsing
if (commentBefore !== undefined && commentBefore.length > 0) {
commentBefore = commentBefore.substring(7);
} else {
commentOffset += 7;
}
ignore.push({ offset, length: 7 });
offset += 7;
ext = ext.substring(7).trimStart();
continue;
}
@@ -295,14 +408,22 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
extras.spd = knotsToKmh(parseInt(speedStr, 10));
if (withStructure) {
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
{ type: FieldType.STRING, name: "course", length: 3, value: extras.cse.toString() + "°" },
{ type: FieldType.CHAR, name: "marker", length: 1, value: "/" },
{ type: FieldType.STRING, name: "speed", length: 3, value: extras.spd.toString() + " km/h" }
fields.push(
{ type: FieldType.STRING, name: "course", length: 3, value: extras.cse.toString() + "°", offset: offset },
{ type: FieldType.CHAR, name: "CSE marker", length: 1, value: "/", offset: offset + 3 },
{
type: FieldType.STRING,
name: "speed",
length: 3,
value: extras.spd.toString() + " km/h",
offset: offset + 4
}
);
}
// remove course/speed token from comment and advance ext for further parsing
ignore.push({ offset, length: 7 });
offset += 7;
ext = ext.substring(7).trimStart();
// If there is an 8-byte DF/NRQ following (leading '/'), parse that too
@@ -319,20 +440,23 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
extras.dfs.strength = dfStrength;
if (withStructure) {
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
{ type: FieldType.STRING, name: "DF marker", length: 1, value: "/" },
{ type: FieldType.STRING, name: "bearing", length: 3, value: dfBearing.toString() + "°" },
{ type: FieldType.CHAR, name: "separator", length: 1, value: "/" },
{ type: FieldType.STRING, name: "strength", length: 3, value: dfStrength.toString() }
fields.push(
{ type: FieldType.STRING, name: "DFS marker", length: 1, value: "/", offset: offset },
{
type: FieldType.STRING,
name: "bearing",
length: 3,
value: dfBearing.toString() + "°",
offset: offset + 1
},
{ type: FieldType.CHAR, name: "separator", length: 1, value: "/", offset: offset + 4 },
{ type: FieldType.STRING, name: "strength", length: 3, value: dfStrength.toString(), offset: offset + 5 }
);
}
// remove DF token from ext and advance ext for further parsing
if (commentBefore !== undefined && commentBefore.length > 0) {
commentBefore = commentBefore.substring(8);
} else {
commentOffset += 8;
}
ignore.push({ offset, length: 8 });
offset += 8;
ext = ext.substring(8).trimStart();
continue;
@@ -345,133 +469,166 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
break;
}
// Parse embedded telemetry in comment. Look for |ss11|, |ss1122|, |ss112233|, |ss1122334455|, or |ss1122334455!"| patterns (where ss is sequence and each pair of digits is an analog channel in base91, and optional last pair is digital channel in base91).
if ((match = comment.match(/\|([^|]+)\|/))) {
try {
const telemetry = decodeTelemetry(match[1]);
extras.telemetry = telemetry;
comment = comment.replace(match[0], "").trim();
if (withStructure) {
fields.push(
{
type: FieldType.CHAR,
name: "telemetry start",
length: 1,
value: "|"
},
{
type: FieldType.STRING,
name: "sequence",
length: 2,
value: telemetry.sequence.toString()
},
...telemetry.analog.map((a, i) => ({
type: FieldType.STRING,
name: `analog${i + 1}`,
length: 2,
value: a.toString()
})),
...(telemetry.digital !== undefined
? [
{
type: FieldType.STRING,
name: "digital",
length: 2,
value: telemetry.digital.toString()
}
]
: []),
{
type: FieldType.CHAR,
name: "telemetry end",
length: 1,
value: "|"
}
);
}
} catch {
// Invalid telemetry format, ignore
// Any tokens we marked for ignoring should be removed from the comment when reconstructing the cleaned comment string, since they have been extracted as structured fields. We will use the ignore offsets to skip over these tokens when reconstructing the comment.
ignore.sort((a, b) => a.offset - b.offset);
offset = 0;
ignore.forEach((token) => {
if (token.offset - offset > 0) {
fields.push({
type: FieldType.STRING,
name: "comment",
offset: offset,
length: token.offset - offset
});
}
offset = token.offset + token.length;
});
if (offset < comment.length) {
fields.push({
type: FieldType.STRING,
name: "comment",
offset,
length: comment.length - offset
});
}
// Export comment with extras fields removed, if any were parsed.
if (commentOffset > 0 && commentBefore !== undefined && commentBefore.length > 0) {
extras.comment = commentBefore.substring(commentOffset) + comment;
} else if (commentBefore !== undefined && commentBefore.length > 0) {
extras.comment = commentBefore + comment;
} else {
extras.comment = comment.substring(commentOffset);
}
// Aggregate the comment fragments into a single cleaned comment string with the recognized tokens removed.
fields.sort((a, b) => a.offset - b.offset); // Ensure fields are in order of appearance in the original comment
extras.comment = fields
.map((field) => {
if (field.name !== "comment" || field.offset === undefined || field.length === undefined) {
return ""; // Remove recognized tokens from comment
}
return comment.substring(field.offset, field.offset + field.length);
})
.join("")
.trim();
if (withStructure) {
const commentBeforeFields: Field[] = commentBefore
? [
{
type: FieldType.STRING,
name: "comment",
length: commentBefore.length
}
]
: [];
const commentFields: Field[] = comment
? [
{
type: FieldType.STRING,
name: "comment",
length: comment.length
}
]
: [];
// Insert the altitude fields at the correct position in the comment section based on where the altitude token was located in the original comment. If there was no altitude token, put all fields at the start of the comment section.
extras.fields = [...beforeFields, ...commentBeforeFields, ...altitudeFields, ...fields, ...commentFields];
extras.fields = fields;
}
return extras as Extras;
};
interface FieldWithOffset extends Field {
offset: number;
}
export const attachExtras = (payload: Payload, extras: Extras): void => {
if ("position" in payload && payload.position) {
if (extras.dao !== undefined) {
payload.position.dao = extras.dao;
// Check N/S and E/W and apply DAO corrections with correct sign based on hemisphere.
if (payload.position.latitude !== undefined) {
if (payload.position.latitude < 0) {
payload.position.latitude -= extras.dao.latitude || 0;
} else {
payload.position.latitude += extras.dao.latitude || 0;
}
}
if (payload.position.longitude !== undefined) {
if (payload.position.longitude < 0) {
payload.position.longitude -= extras.dao.longitude || 0;
} else {
payload.position.longitude += extras.dao.longitude || 0;
}
}
}
if (extras.altitude !== undefined) {
payload.position.altitude = extras.altitude;
}
if (extras.range !== undefined) {
payload.position.range = extras.range;
}
if (extras.phg !== undefined) {
payload.position.phg = extras.phg;
}
if (extras.dfs !== undefined) {
payload.position.dfs = extras.dfs;
}
if (extras.cse !== undefined && payload.position.course === undefined) {
payload.position.course = extras.cse;
}
if (extras.spd !== undefined && payload.position.speed === undefined) {
payload.position.speed = extras.spd;
}
}
if ("dao" in payload && payload.dao === undefined) {
payload.dao = extras.dao;
}
if ("altitude" in payload && payload.altitude === undefined && extras.altitude !== undefined) {
payload.altitude = extras.altitude;
}
if ("range" in payload && payload.range === undefined && extras.range !== undefined) {
payload.range = extras.range;
}
if ("phg" in payload && payload.phg === undefined && extras.phg !== undefined) {
payload.phg = extras.phg;
}
if ("dfs" in payload && payload.dfs === undefined && extras.dfs !== undefined) {
payload.dfs = extras.dfs;
}
if ("course" in payload && payload.course === undefined && extras.cse !== undefined) {
payload.course = extras.cse;
}
if ("speed" in payload && payload.speed === undefined && extras.spd !== undefined) {
payload.speed = extras.spd;
}
};
export const decodeDAO = (ext: string): IDAO | undefined => {
if (ext.length !== 3) {
return undefined;
}
const dao: IDAO = { datum_id: "?" };
// eslint-disable-next-line no-useless-assignment
let match: RegExpMatchArray | null = null;
if ((match = /^([A-Z])(\d)(\d)$/.exec(ext))) {
// Human-radable datum with explicit resolution: e.g. W84 with 0-3 resolution (W84 is WGS84, R22 is NAD27, etc.)
dao.datum_id = match[1];
dao.resolution = getDAOResolution(3);
dao.latitude = (parseInt(match[2]) * 0.01) / 60; // Convert to minutes (since DAO resolution is in minutes)
dao.longitude = (parseInt(match[3]) * 0.01) / 60; // Convert to minutes (since DAO resolution is in minutes)
return dao;
} else if ((match = /^([a-z])([\x21-\x7b])([\x21-\x7b])$/.exec(ext))) {
// Base91-encoded latitude and longitude with implicit datum (WGS84 assumed); resolution is determined by the range of the Base91 characters (0-3)
dao.datum_id = match[1].toUpperCase();
dao.resolution = getDAOResolution(4);
dao.latitude = (base91ToNumber(match[2]) * 0.01) / 60; // Convert from Base91 to degrees, then to minutes (since DAO resolution is in minutes)
dao.longitude = (base91ToNumber(match[3]) * 0.01) / 60; // Convert from Base91 to degrees, then to minutes (since DAO resolution is in minutes)
return dao;
} else if ((match = /^([\x21-\x7b]) {2}$/.exec(ext))) {
dao.datum_id = match[1];
if (/^[a-z]$/.test(dao.datum_id)) {
dao.datum_id = dao.datum_id.toUpperCase();
}
return dao; // No resolution, just datum
}
return undefined; // Invalid DAO format
};
const getDAOResolution = (n: number): number | undefined => {
return knotsToKmh((n <= -2 ? 600 : 1000) * 10 ** (-1 * n));
};
/**
* Decodes a Base91 Telemetry extension string (delimited by '|') into its components.
*