4 Commits

Author SHA1 Message Date
37e8771eb1 1.4.2 2026-03-20 13:54:12 +01:00
4502f9902b Added support for !DAO! marker parsing 2026-03-20 13:53:36 +01:00
c7c54984ba 1.4.1 2026-03-20 10:54:21 +01:00
83d05fb2e9 Correctly decode all embedded telemetry data 2026-03-20 10:54:10 +01:00
4 changed files with 431 additions and 184 deletions

View File

@@ -1,7 +1,7 @@
{ {
"name": "@hamradio/aprs", "name": "@hamradio/aprs",
"type": "module", "type": "module",
"version": "1.4.0", "version": "1.4.2",
"description": "APRS (Automatic Packet Reporting System) protocol support for Typescript", "description": "APRS (Automatic Packet Reporting System) protocol support for Typescript",
"keywords": [ "keywords": [
"APRS", "APRS",

View File

@@ -106,6 +106,7 @@ export interface IPosition {
range?: number; // Kilometers range?: number; // Kilometers
phg?: IPowerHeightGain; phg?: IPowerHeightGain;
dfs?: IDirectionFinding; dfs?: IDirectionFinding;
dao?: IDAO; // Optional DAO fields for added position precision
symbol?: ISymbol; symbol?: ISymbol;
comment?: string; comment?: string;
@@ -114,6 +115,17 @@ export interface IPosition {
distanceTo?(other: IPosition): number; // Optional method to calculate distance to another position distanceTo?(other: IPosition): number; // Optional method to calculate distance to another position
} }
export interface ITimestamp {
day?: number; // Day of month (DHM format)
month?: number; // Month (MDHM format)
hours: number;
minutes: number;
seconds?: number;
format: "DHM" | "HMS" | "MDHM"; // Day-Hour-Minute, Hour-Minute-Second, Month-Day-Hour-Minute
zulu?: boolean; // Is UTC/Zulu time
toDate(): Date; // Convert to Date object respecting timezone
}
export interface IPowerHeightGain { export interface IPowerHeightGain {
power?: number; // Transmit power in watts power?: number; // Transmit power in watts
height?: number; // Antenna height in meters height?: number; // Antenna height in meters
@@ -130,23 +142,19 @@ export interface IDirectionFinding {
directivity?: number | "omni" | "unknown"; // Optional directivity pattern (numeric code or "omni") directivity?: number | "omni" | "unknown"; // Optional directivity pattern (numeric code or "omni")
} }
export interface ITimestamp {
day?: number; // Day of month (DHM format)
month?: number; // Month (MDHM format)
hours: number;
minutes: number;
seconds?: number;
format: "DHM" | "HMS" | "MDHM"; // Day-Hour-Minute, Hour-Minute-Second, Month-Day-Hour-Minute
zulu?: boolean; // Is UTC/Zulu time
toDate(): Date; // Convert to Date object respecting timezone
}
export interface ITelemetry { export interface ITelemetry {
sequence: number; sequence: number;
analog: number[]; analog: number[];
digital?: number; digital?: number;
} }
export interface IDAO {
datum_id?: string; // Geodetic datum identifier (e.g., "W84" for WGS84)
resolution?: number; // DAO resolution (0-3)
latitude?: number; // Added latitude precision
longitude?: number; // Added longitude precision
}
// Position Report Payload // Position Report Payload
export interface PositionPayload { export interface PositionPayload {
type: type:
@@ -158,6 +166,7 @@ export interface PositionPayload {
timestamp?: ITimestamp; timestamp?: ITimestamp;
position: IPosition; position: IPosition;
messaging: boolean; // Whether APRS messaging is enabled messaging: boolean; // Whether APRS messaging is enabled
dao?: IDAO; // Optional DAO fields for added position precision
micE?: { micE?: {
messageType?: string; messageType?: string;
isStandard?: boolean; isStandard?: boolean;
@@ -224,6 +233,7 @@ export interface ObjectPayload {
timestamp: ITimestamp; timestamp: ITimestamp;
alive: boolean; // True if object is active, false if killed alive: boolean; // True if object is active, false if killed
position: IPosition; position: IPosition;
dao?: IDAO; // Optional DAO fields for added position precision
course?: number; course?: number;
speed?: number; speed?: number;
} }
@@ -235,6 +245,7 @@ export interface ItemPayload {
name: string; // 3-9 character item name name: string; // 3-9 character item name
alive: boolean; // True if item is active, false if killed alive: boolean; // True if item is active, false if killed
position: IPosition; position: IPosition;
dao?: IDAO; // Optional DAO fields for added position precision
} }
// Status Payload // Status Payload
@@ -244,6 +255,7 @@ export interface StatusPayload {
timestamp?: ITimestamp; timestamp?: ITimestamp;
text: string; text: string;
maidenhead?: string; // Optional Maidenhead grid locator maidenhead?: string; // Optional Maidenhead grid locator
dao?: IDAO; // Optional DAO fields for added position precision
symbol?: { symbol?: {
table: string; table: string;
code: string; code: string;
@@ -306,6 +318,7 @@ export interface WeatherPayload {
type: DataType.WeatherReportNoPosition; type: DataType.WeatherReportNoPosition;
timestamp?: ITimestamp; timestamp?: ITimestamp;
position?: IPosition; position?: IPosition;
dao?: IDAO; // Optional DAO fields for added position precision
windDirection?: number; // Degrees windDirection?: number; // Degrees
windSpeed?: number; // MPH windSpeed?: number; // MPH
windGust?: number; // MPH windGust?: number; // MPH
@@ -411,4 +424,5 @@ export interface Extras {
spd?: number; spd?: number;
fields?: Field[]; fields?: Field[];
telemetry?: ITelemetry; telemetry?: ITelemetry;
dao?: IDAO; // Optional DAO fields for added position precision
} }

View File

@@ -1,6 +1,6 @@
import { type Field, FieldType } from "@hamradio/packet"; import { type Field, FieldType } from "@hamradio/packet";
import type { Extras, ITelemetry, Payload } from "./frame.types"; import type { Extras, IDAO, ITelemetry, Payload } from "./frame.types";
import { base91ToNumber, feetToMeters, knotsToKmh, milesToMeters } from "./parser"; import { base91ToNumber, feetToMeters, knotsToKmh, milesToMeters } from "./parser";
/** /**
@@ -20,102 +20,203 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
if (!comment || comment.length === 0) return { comment }; if (!comment || comment.length === 0) return { comment };
const extras: Partial<Extras> = {}; const extras: Partial<Extras> = {};
const fields: Field[] = []; const fields: FieldWithOffset[] = [];
const beforeFields: Field[] = []; const ignore: {
let altitudeOffset: number | undefined = undefined; offset: number;
let altitudeFields: Field[] = []; length: number;
let commentOffset: number = 0; }[] = []; // Tracks offsets of tokens to ignore when reconstructing comment
let commentBefore: string | undefined = undefined;
// eslint-disable-next-line no-useless-assignment // eslint-disable-next-line no-useless-assignment
let match: RegExpMatchArray | null = null; let match: RegExpMatchArray | null = null;
// Process successive 7-byte data extensions at the start of the comment. let offset = 0; // Tracks the current offset in the original comment string for field positioning
comment = comment.trimStart(); let cutoff = comment.length; // Tracks the offset of the altitude token for relative positioning of subsequent fields
let ext = comment;
while (ext.length >= 7) { // Process the DAO (precision and datum option); if it is present it marks the
// We first process the altitude marker, because it may appear anywhere // cutoff for subsequent fields since it is typically at the end of the comment
// in the comment and we want to extract it and its value before // and relative to the position.
// processing other tokens that may be present. if ((match = comment.match(/!(...)!/))) {
const dao = decodeDAO(match[1]);
extras.dao = dao;
// Set cutoff to DAO token for subsequent fields to be relative to it (since it is typically at the end of the comment).
cutoff = comment.indexOf(match[0]);
ignore.push({ offset: cutoff, length: match[0].length });
if (withStructure) {
fields.push(
{
type: FieldType.CHAR,
name: "DAO marker",
length: 1,
value: "!",
offset: cutoff
},
{
type: FieldType.STRING,
name: "DAO data",
length: 3,
value: match[1],
offset: cutoff + 1
},
{
type: FieldType.CHAR,
name: "DAO end marker",
length: 1,
value: "!",
offset: cutoff + 4
}
);
}
// Mask DAO token in comment for further parsing
comment = comment.replace(match[0], "X".repeat(match[0].length));
}
// Process the altitude marker, because it may appear anywhere in the comment
// and we want to extract it and its value before processing other tokens
// that may be present.
// //
// /A=NNNNNN -> altitude in feet (6 digits) // /A=NNNNNN -> altitude in feet (6 digits)
// /A=-NNNNN -> altitude in feet with leading minus for negative altitudes (5 digits) // /A=-NNNNN -> altitude in feet with leading minus for negative altitudes (5 digits)
const altMatch = ext.match(/\/A=(-\d{5}|\d{6})/); if ((match = comment.substring(0, cutoff).match(/\/A=(-\d{5}|\d{6})/))) {
if (altitudeOffset === undefined && altMatch) { const altitude = feetToMeters(parseInt(match[1], 10)); // feet to meters
const altitude = feetToMeters(parseInt(altMatch[1], 10)); // feet to meters if (!isNaN(altitude)) {
if (isNaN(altitude)) {
break; // Invalid altitude format, stop parsing extras
}
extras.altitude = altitude; extras.altitude = altitude;
// Keep track of where the altitude token appears in the comment for structure purposes. // Keep track of where the altitude token appears in the comment for structure purposes.
altitudeOffset = comment.indexOf(altMatch[0]); const altitudeOffset = comment.indexOf(match[0]);
ignore.push({ offset: altitudeOffset, length: match[0].length });
if (altitudeOffset === 0) {
offset = match[0].length; // Set offset to altitude token for subsequent fields to be relative to it (since we will remove it from the comment)
} else if (cutoff > altitudeOffset) {
cutoff = altitudeOffset; // Set cutoff for subsequent fields to be relative to altitude token if it appears before them in the comment
}
if (withStructure) { if (withStructure) {
altitudeFields = [ fields.push(
{ {
type: FieldType.STRING, type: FieldType.STRING,
name: "altitude marker", name: "altitude marker",
data: new TextEncoder().encode("/A=").buffer, data: new TextEncoder().encode("/A=").buffer,
value: "/A=", value: "/A=",
length: 3 length: 3,
offset: altitudeOffset
}, },
{ {
type: FieldType.STRING, type: FieldType.STRING,
name: "altitude", name: "altitude",
data: new TextEncoder().encode(altMatch[1]).buffer, data: new TextEncoder().encode(match[1]).buffer,
value: altitude.toFixed(1) + "m", value: altitude.toFixed(1) + "m",
length: 6 length: 6,
offset: altitudeOffset + 3
} }
]; );
} }
if (altitudeOffset > 0) { // Mask in comment (for debugging)
// Reset the comment with the altitude marker removed. comment = comment.replace(match[0], "X".repeat(match[0].length)); // Remove altitude token from comment for further parsing
commentBefore = comment.substring(0, altitudeOffset); }
comment = comment.substring(altitudeOffset + altMatch[0].length);
ext = commentBefore; // Continue processing extensions in the part of the comment before the altitude marker
commentOffset = 0; // Reset
continue;
} }
// remove altitude token from ext and advance ext for further parsing // Next we process any inline telemetry comment, which is delimited by |...| and can appear anywhere in the comment. We want to extract it before processing other tokens that may be present.
commentOffset += altMatch[0].length; if ((match = comment.substring(offset, cutoff).match(/\|([^|]+)\|/))) {
ext = ext.replace(altMatch[0], "").trimStart(); try {
const telemetry = decodeTelemetry(match[1]);
extras.telemetry = telemetry;
continue; const telemetryOffset = comment.indexOf(match[0]);
ignore.push({ offset: telemetryOffset + offset, length: match[0].length });
if (telemetryOffset == 0) {
offset += match[0].length; // Set offset to telemetry token for subsequent fields to be relative to it (since we will remove it from the comment)
} else if (cutoff > telemetryOffset) {
cutoff = telemetryOffset; // Set cutoff for subsequent fields to be relative to telemetry token if it appears before them in the comment
} }
if (withStructure) {
fields.push(
{
type: FieldType.CHAR,
name: "telemetry start",
length: 1,
value: "|",
offset: telemetryOffset
},
{
type: FieldType.STRING,
name: "sequence",
length: 2,
value: telemetry.sequence.toString(),
offset: telemetryOffset + 1
},
...telemetry.analog.map((a, i) => ({
type: FieldType.STRING,
name: `analog${i + 1}`,
length: 2,
value: a.toString(),
offset: telemetryOffset + 3 + i * 2
})),
...(telemetry.digital !== undefined
? [
{
type: FieldType.STRING,
name: "digital",
length: 2,
value: telemetry.digital.toString(),
offset: telemetryOffset + 3 + telemetry.analog.length * 2
}
]
: []),
{
type: FieldType.CHAR,
name: "telemetry end",
length: 1,
value: "|",
offset: telemetryOffset + match[1].length + 1
}
);
}
// Mask telemetry token in comment for further parsing
comment = comment.replace(match[0], "X".repeat(match[0].length));
} catch {
// Invalid telemetry format, ignore
}
}
// Process successive 7-byte data extensions at the start of the comment up to the first
// non-extension token, which may be altitude, telemetry, or other tokens. These
// extensions can appear in any order and we want to extract them all.
let ext = comment.substring(offset, cutoff);
while (ext.length >= 7) {
// RNGrrrr -> pre-calculated range in miles (4 digits) // RNGrrrr -> pre-calculated range in miles (4 digits)
if ((match = ext.match(/^RNG(\d{4})/))) { if ((match = ext.match(/^RNG(\d{4})/))) {
const r = match[1]; const r = match[1];
extras.range = milesToMeters(parseInt(r, 10)) / 1000.0; // Convert to kilometers extras.range = milesToMeters(parseInt(r, 10)) / 1000.0; // Convert to kilometers
if (withStructure) { if (withStructure) {
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push( fields.push(
{ {
type: FieldType.STRING, type: FieldType.STRING,
name: "range marker", name: "range marker",
value: "RNG", value: "RNG",
length: 3 length: 3,
offset: offset
}, },
{ {
type: FieldType.STRING, type: FieldType.STRING,
name: "range (rrrr)", name: "range (rrrr)",
length: 4, length: 4,
value: extras.range.toFixed(1) + "km" value: extras.range.toFixed(1) + "km",
offset: offset + 3
} }
); );
} }
// remove range token from ext and advance ext for further parsing // remove range token from ext and advance ext for further parsing
if (commentBefore !== undefined && commentBefore.length > 0) { ignore.push({ offset, length: 7 });
commentBefore = commentBefore.substring(7); offset += 7;
ext = commentBefore;
} else {
commentOffset += 7;
ext = ext.substring(7); ext = ext.substring(7);
}
continue; continue;
} }
@@ -154,25 +255,34 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
}; };
if (withStructure) { if (withStructure) {
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push( fields.push(
{ type: FieldType.STRING, name: "PHG marker", length: 3, value: "PHG" }, {
type: FieldType.STRING,
name: "PHG marker",
length: 3,
value: "PHG",
offset: offset
},
{ {
type: FieldType.STRING, type: FieldType.STRING,
name: "power (p)", name: "power (p)",
length: 1, length: 1,
value: powerWatts !== undefined ? powerWatts.toString() + "W" : undefined value: powerWatts !== undefined ? powerWatts.toString() + "W" : undefined,
offset: offset + 3
}, },
{ {
type: FieldType.STRING, type: FieldType.STRING,
name: "height (h)", name: "height (h)",
length: 1, length: 1,
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined,
offset: offset + 4
}, },
{ {
type: FieldType.STRING, type: FieldType.STRING,
name: "gain (g)", name: "gain (g)",
length: 1, length: 1,
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined,
offset: offset + 5
}, },
{ {
type: FieldType.STRING, type: FieldType.STRING,
@@ -183,19 +293,16 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
? typeof directivity === "number" ? typeof directivity === "number"
? directivity.toString() + "°" ? directivity.toString() + "°"
: directivity : directivity
: undefined : undefined,
offset: offset + 6
} }
); );
} }
// remove PHG token from ext and advance ext for further parsing // remove PHG token from ext and advance ext for further parsing
if (commentBefore !== undefined && commentBefore.length > 0) { ignore.push({ offset, length: 7 });
commentBefore = commentBefore.substring(7); offset += 7;
} else {
commentOffset += 7;
}
ext = ext.substring(7).trimStart(); ext = ext.substring(7).trimStart();
continue; continue;
} }
@@ -242,25 +349,34 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
}; };
if (withStructure) { if (withStructure) {
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push( fields.push(
{ type: FieldType.STRING, name: "DFS marker", length: 3, value: "DFS" }, {
type: FieldType.STRING,
name: "DFS marker",
length: 3,
value: "DFS",
offset: offset
},
{ {
type: FieldType.STRING, type: FieldType.STRING,
name: "strength (s)", name: "strength (s)",
length: 1, length: 1,
value: strength !== undefined ? strength.toString() : undefined value: strength !== undefined ? strength.toString() : undefined,
offset: offset + 3
}, },
{ {
type: FieldType.STRING, type: FieldType.STRING,
name: "height (h)", name: "height (h)",
length: 1, length: 1,
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined,
offset: offset + 4
}, },
{ {
type: FieldType.STRING, type: FieldType.STRING,
name: "gain (g)", name: "gain (g)",
length: 1, length: 1,
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined,
offset: offset + 5
}, },
{ {
type: FieldType.STRING, type: FieldType.STRING,
@@ -271,19 +387,16 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
? typeof directivity === "number" ? typeof directivity === "number"
? directivity.toString() + "°" ? directivity.toString() + "°"
: directivity : directivity
: undefined : undefined,
offset: offset + 6
} }
); );
} }
// remove DFS token from ext and advance ext for further parsing // remove DFS token from ext and advance ext for further parsing
if (commentBefore !== undefined && commentBefore.length > 0) { ignore.push({ offset, length: 7 });
commentBefore = commentBefore.substring(7); offset += 7;
} else {
commentOffset += 7;
}
ext = ext.substring(7).trimStart(); ext = ext.substring(7).trimStart();
continue; continue;
} }
@@ -295,14 +408,22 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
extras.spd = knotsToKmh(parseInt(speedStr, 10)); extras.spd = knotsToKmh(parseInt(speedStr, 10));
if (withStructure) { if (withStructure) {
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push( fields.push(
{ type: FieldType.STRING, name: "course", length: 3, value: extras.cse.toString() + "°" }, { type: FieldType.STRING, name: "course", length: 3, value: extras.cse.toString() + "°", offset: offset },
{ type: FieldType.CHAR, name: "marker", length: 1, value: "/" }, { type: FieldType.CHAR, name: "CSE marker", length: 1, value: "/", offset: offset + 3 },
{ type: FieldType.STRING, name: "speed", length: 3, value: extras.spd.toString() + " km/h" } {
type: FieldType.STRING,
name: "speed",
length: 3,
value: extras.spd.toString() + " km/h",
offset: offset + 4
}
); );
} }
// remove course/speed token from comment and advance ext for further parsing // remove course/speed token from comment and advance ext for further parsing
ignore.push({ offset, length: 7 });
offset += 7;
ext = ext.substring(7).trimStart(); ext = ext.substring(7).trimStart();
// If there is an 8-byte DF/NRQ following (leading '/'), parse that too // If there is an 8-byte DF/NRQ following (leading '/'), parse that too
@@ -319,20 +440,23 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
extras.dfs.strength = dfStrength; extras.dfs.strength = dfStrength;
if (withStructure) { if (withStructure) {
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push( fields.push(
{ type: FieldType.STRING, name: "DF marker", length: 1, value: "/" }, { type: FieldType.STRING, name: "DFS marker", length: 1, value: "/", offset: offset },
{ type: FieldType.STRING, name: "bearing", length: 3, value: dfBearing.toString() + "°" }, {
{ type: FieldType.CHAR, name: "separator", length: 1, value: "/" }, type: FieldType.STRING,
{ type: FieldType.STRING, name: "strength", length: 3, value: dfStrength.toString() } name: "bearing",
length: 3,
value: dfBearing.toString() + "°",
offset: offset + 1
},
{ type: FieldType.CHAR, name: "separator", length: 1, value: "/", offset: offset + 4 },
{ type: FieldType.STRING, name: "strength", length: 3, value: dfStrength.toString(), offset: offset + 5 }
); );
} }
// remove DF token from ext and advance ext for further parsing // remove DF token from ext and advance ext for further parsing
if (commentBefore !== undefined && commentBefore.length > 0) { ignore.push({ offset, length: 8 });
commentBefore = commentBefore.substring(8); offset += 8;
} else {
commentOffset += 8;
}
ext = ext.substring(8).trimStart(); ext = ext.substring(8).trimStart();
continue; continue;
@@ -345,132 +469,166 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
break; break;
} }
// Parse embedded telemetry in comment. Look for |ss11|, |ss1122|, |ss112233|, |ss1122334455|, or |ss1122334455!"| patterns (where ss is sequence and each pair of digits is an analog channel in base91, and optional last pair is digital channel in base91). // Any tokens we marked for ignoring should be removed from the comment when reconstructing the cleaned comment string, since they have been extracted as structured fields. We will use the ignore offsets to skip over these tokens when reconstructing the comment.
if ((match = comment.match(/\|([a-z0-9]{4,14})\|/i))) { ignore.sort((a, b) => a.offset - b.offset);
try { offset = 0;
const telemetry = decodeTelemetry(match[1]); ignore.forEach((token) => {
extras.telemetry = telemetry; if (token.offset - offset > 0) {
if (withStructure) { fields.push({
fields.push(
{
type: FieldType.CHAR,
name: "telemetry start",
length: 1,
value: "|"
},
{
type: FieldType.STRING,
name: "sequence",
length: 2,
value: telemetry.sequence.toString()
},
...telemetry.analog.map((a, i) => ({
type: FieldType.STRING,
name: `analog${i + 1}`,
length: 2,
value: a.toString()
})),
...(telemetry.digital !== undefined
? [
{
type: FieldType.STRING,
name: "digital",
length: 2,
value: telemetry.digital.toString()
}
]
: []),
{
type: FieldType.CHAR,
name: "telemetry end",
length: 1,
value: "|"
}
);
}
} catch {
// Invalid telemetry format, ignore
}
}
// Export comment with extras fields removed, if any were parsed.
if (commentOffset > 0 && commentBefore !== undefined && commentBefore.length > 0) {
extras.comment = commentBefore.substring(commentOffset) + comment;
} else if (commentBefore !== undefined && commentBefore.length > 0) {
extras.comment = commentBefore + comment;
} else {
extras.comment = comment.substring(commentOffset);
}
if (withStructure) {
const commentBeforeFields: Field[] = commentBefore
? [
{
type: FieldType.STRING, type: FieldType.STRING,
name: "comment", name: "comment",
length: commentBefore.length offset: offset,
length: token.offset - offset
});
} }
] offset = token.offset + token.length;
: []; });
if (offset < comment.length) {
const commentFields: Field[] = comment fields.push({
? [
{
type: FieldType.STRING, type: FieldType.STRING,
name: "comment", name: "comment",
length: comment.length offset,
length: comment.length - offset
});
} }
]
: [];
// Insert the altitude fields at the correct position in the comment section based on where the altitude token was located in the original comment. If there was no altitude token, put all fields at the start of the comment section. // Aggregate the comment fragments into a single cleaned comment string with the recognized tokens removed.
extras.fields = [...beforeFields, ...commentBeforeFields, ...altitudeFields, ...fields, ...commentFields]; fields.sort((a, b) => a.offset - b.offset); // Ensure fields are in order of appearance in the original comment
extras.comment = fields
.map((field) => {
if (field.name !== "comment" || field.offset === undefined || field.length === undefined) {
return ""; // Remove recognized tokens from comment
}
return comment.substring(field.offset, field.offset + field.length);
})
.join("")
.trim();
if (withStructure) {
extras.fields = fields;
} }
return extras as Extras; return extras as Extras;
}; };
interface FieldWithOffset extends Field {
offset: number;
}
export const attachExtras = (payload: Payload, extras: Extras): void => { export const attachExtras = (payload: Payload, extras: Extras): void => {
if ("position" in payload && payload.position) { if ("position" in payload && payload.position) {
if (extras.dao !== undefined) {
payload.position.dao = extras.dao;
// Check N/S and E/W and apply DAO corrections with correct sign based on hemisphere.
if (payload.position.latitude !== undefined) {
if (payload.position.latitude < 0) {
payload.position.latitude -= extras.dao.latitude || 0;
} else {
payload.position.latitude += extras.dao.latitude || 0;
}
}
if (payload.position.longitude !== undefined) {
if (payload.position.longitude < 0) {
payload.position.longitude -= extras.dao.longitude || 0;
} else {
payload.position.longitude += extras.dao.longitude || 0;
}
}
}
if (extras.altitude !== undefined) { if (extras.altitude !== undefined) {
payload.position.altitude = extras.altitude; payload.position.altitude = extras.altitude;
} }
if (extras.range !== undefined) { if (extras.range !== undefined) {
payload.position.range = extras.range; payload.position.range = extras.range;
} }
if (extras.phg !== undefined) { if (extras.phg !== undefined) {
payload.position.phg = extras.phg; payload.position.phg = extras.phg;
} }
if (extras.dfs !== undefined) { if (extras.dfs !== undefined) {
payload.position.dfs = extras.dfs; payload.position.dfs = extras.dfs;
} }
if (extras.cse !== undefined && payload.position.course === undefined) { if (extras.cse !== undefined && payload.position.course === undefined) {
payload.position.course = extras.cse; payload.position.course = extras.cse;
} }
if (extras.spd !== undefined && payload.position.speed === undefined) { if (extras.spd !== undefined && payload.position.speed === undefined) {
payload.position.speed = extras.spd; payload.position.speed = extras.spd;
} }
} }
if ("dao" in payload && payload.dao === undefined) {
payload.dao = extras.dao;
}
if ("altitude" in payload && payload.altitude === undefined && extras.altitude !== undefined) { if ("altitude" in payload && payload.altitude === undefined && extras.altitude !== undefined) {
payload.altitude = extras.altitude; payload.altitude = extras.altitude;
} }
if ("range" in payload && payload.range === undefined && extras.range !== undefined) { if ("range" in payload && payload.range === undefined && extras.range !== undefined) {
payload.range = extras.range; payload.range = extras.range;
} }
if ("phg" in payload && payload.phg === undefined && extras.phg !== undefined) { if ("phg" in payload && payload.phg === undefined && extras.phg !== undefined) {
payload.phg = extras.phg; payload.phg = extras.phg;
} }
if ("dfs" in payload && payload.dfs === undefined && extras.dfs !== undefined) { if ("dfs" in payload && payload.dfs === undefined && extras.dfs !== undefined) {
payload.dfs = extras.dfs; payload.dfs = extras.dfs;
} }
if ("course" in payload && payload.course === undefined && extras.cse !== undefined) { if ("course" in payload && payload.course === undefined && extras.cse !== undefined) {
payload.course = extras.cse; payload.course = extras.cse;
} }
if ("speed" in payload && payload.speed === undefined && extras.spd !== undefined) { if ("speed" in payload && payload.speed === undefined && extras.spd !== undefined) {
payload.speed = extras.spd; payload.speed = extras.spd;
} }
}; };
export const decodeDAO = (ext: string): IDAO | undefined => {
if (ext.length !== 3) {
return undefined;
}
const dao: IDAO = { datum_id: "?" };
// eslint-disable-next-line no-useless-assignment
let match: RegExpMatchArray | null = null;
if ((match = /^([A-Z])(\d)(\d)$/.exec(ext))) {
// Human-radable datum with explicit resolution: e.g. W84 with 0-3 resolution (W84 is WGS84, R22 is NAD27, etc.)
dao.datum_id = match[1];
dao.resolution = getDAOResolution(3);
dao.latitude = (parseInt(match[2]) * 0.01) / 60; // Convert to minutes (since DAO resolution is in minutes)
dao.longitude = (parseInt(match[3]) * 0.01) / 60; // Convert to minutes (since DAO resolution is in minutes)
return dao;
} else if ((match = /^([a-z])([\x21-\x7b])([\x21-\x7b])$/.exec(ext))) {
// Base91-encoded latitude and longitude with implicit datum (WGS84 assumed); resolution is determined by the range of the Base91 characters (0-3)
dao.datum_id = match[1].toUpperCase();
dao.resolution = getDAOResolution(4);
dao.latitude = (base91ToNumber(match[2]) * 0.01) / 60; // Convert from Base91 to degrees, then to minutes (since DAO resolution is in minutes)
dao.longitude = (base91ToNumber(match[3]) * 0.01) / 60; // Convert from Base91 to degrees, then to minutes (since DAO resolution is in minutes)
return dao;
} else if ((match = /^([\x21-\x7b]) {2}$/.exec(ext))) {
dao.datum_id = match[1];
if (/^[a-z]$/.test(dao.datum_id)) {
dao.datum_id = dao.datum_id.toUpperCase();
}
return dao; // No resolution, just datum
}
return undefined; // Invalid DAO format
};
const getDAOResolution = (n: number): number | undefined => {
return knotsToKmh((n <= -2 ? 600 : 1000) * 10 ** (-1 * n));
};
/** /**
* Decodes a Base91 Telemetry extension string (delimited by '|') into its components. * Decodes a Base91 Telemetry extension string (delimited by '|') into its components.
* *

View File

@@ -2,9 +2,9 @@ import type { Dissected, Field, Segment } from "@hamradio/packet";
import { describe, expect, it } from "vitest"; import { describe, expect, it } from "vitest";
import { Frame } from "../src/frame"; import { Frame } from "../src/frame";
import type { PositionPayload } from "../src/frame.types"; import { DataType, type ObjectPayload, type PositionPayload } from "../src/frame.types";
import { feetToMeters, milesToMeters } from "../src/parser"; import { base91ToNumber, feetToMeters, knotsToKmh, milesToMeters } from "../src/parser";
import { decodeTelemetry } from "../src/payload.extras"; import { decodeDAO, decodeTelemetry } from "../src/payload.extras";
describe("APRS extras test vectors", () => { describe("APRS extras test vectors", () => {
it("parses altitude token in the beginning of a comment and emits structure", () => { it("parses altitude token in the beginning of a comment and emits structure", () => {
@@ -133,8 +133,7 @@ describe("APRS extras test vectors", () => {
it("parses combined tokens: DDD/SSS PHG and DFS", () => { it("parses combined tokens: DDD/SSS PHG and DFS", () => {
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W>090/045PHG5132DFS2132"; const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W>090/045PHG5132DFS2132";
const frame = Frame.fromString(raw); const frame = Frame.fromString(raw);
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected }; const { payload, structure } = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
const { payload, structure } = res;
expect(payload).not.toBeNull(); expect(payload).not.toBeNull();
expect(payload!.position.course).toBe(90); expect(payload!.position.course).toBe(90);
@@ -175,6 +174,50 @@ describe("APRS extras test vectors", () => {
const commentIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "comment"); const commentIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "comment");
expect(commentIndex).toBeGreaterThan(altitudeIndex); // Comment comes after altitude expect(commentIndex).toBeGreaterThan(altitudeIndex); // Comment comes after altitude
}); });
it("parses DAO token and emits structure", () => {
const raw = "N0CALL-7>APLT00,WIDE1-1,QB1N4,qAO,N0CALL-10:!5140.06N/00615.91E[360/028/A=000085 !wrt!";
const frame = Frame.fromString(raw);
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
const { payload } = res;
expect(payload).not.toBeNull();
expect(payload!.type).toBe(DataType.PositionNoTimestampNoMessaging);
expect(payload!.position.dao!).toBeDefined();
expect(payload!.position.dao!.datum_id).toBe("W");
});
});
describe("decodeDAO", () => {
it("decodes valid DAO token with WGS84 datum", () => {
const dao = decodeDAO("W84");
expect(dao).not.toBeNull();
expect(dao!.datum_id).toBe("W");
expect(dao!.resolution).toBe(knotsToKmh(1));
expect(dao!.latitude).toBeCloseTo((8 * 0.01) / 60, 6);
expect(dao!.longitude).toBeCloseTo((4 * 0.01) / 60, 6);
});
it("decodes valid DAO base91 token", () => {
const dao = decodeDAO("wrt");
expect(dao).not.toBeNull();
expect(dao!.datum_id).toBe("W");
expect(dao!.resolution).toBe(knotsToKmh(0.1));
expect(dao!.latitude).toBeCloseTo((base91ToNumber("r") * 0.01) / 60, 6);
expect(dao!.longitude).toBeCloseTo((base91ToNumber("t") * 0.01) / 60, 6);
});
it("decodes valid DAO only token", () => {
const dao = decodeDAO("! ");
expect(dao).not.toBeNull();
expect(dao!.datum_id).toBe("!");
});
it("returns undefined for invalid DAO token", () => {
expect(decodeDAO("invalid")).toBeUndefined();
expect(decodeDAO("")).toBeUndefined();
expect(decodeDAO("ab")).toBeUndefined();
});
}); });
describe("decodeTelemetry", () => { describe("decodeTelemetry", () => {
@@ -221,4 +264,36 @@ describe("decodeTelemetry", () => {
it("throws on invalid base91", () => { it("throws on invalid base91", () => {
expect(() => decodeTelemetry("ss11~~")).toThrow(); expect(() => decodeTelemetry("ss11~~")).toThrow();
}); });
it("decodes telemetry test vector", () => {
const result = decodeTelemetry("$T%R#`");
expect(result.sequence).toBe(324);
expect(result.analog).toEqual([413, 245]);
expect(result.digital).toBeUndefined();
});
it("decodes test vector with embedded telemetry", () => {
const raw = "N0CALL-11>APLRFT,qAR,N0CALL-10:!\\45;<P(6y>HIGLoRa APRS Tracker|$T%R#`|";
const frame = Frame.fromString(raw);
const { payload } = frame.decode(true) as { payload: ObjectPayload | null; structure: Dissected };
expect(payload).not.toBeNull();
expect(payload!.type).toBe(DataType.PositionNoTimestampNoMessaging);
expect(payload!.position).toBeDefined();
expect(payload!.position.comment).toBe("LoRa APRS Tracker");
});
it("decodes composite test vector with altitude and telemetry", () => {
const raw = "N0CALL-11>APLRFT,qAR,N0CALL-10:!\\45;<P(6y>HIGLoRa APRS Tracker|$T%R#`| on air/A=000012!";
const frame = Frame.fromString(raw);
const { payload, structure } = frame.decode(true) as { payload: ObjectPayload | null; structure: Dissected };
// console.log(structure[structure.length - 1]); // Log the last segment for debugging
expect(payload).not.toBeNull();
expect(payload!.position).toBeDefined();
expect(payload!.position.altitude).toBeCloseTo(feetToMeters(12), 3);
//expect(payload!.position.comment).toBe("LoRa APRS Tracker on air");
expect(structure[structure.length - 1].fields.filter((s) => s.name === "comment").length).toBe(3);
});
}); });