2 Commits
v1.4.1 ... main

Author SHA1 Message Date
37e8771eb1 1.4.2 2026-03-20 13:54:12 +01:00
4502f9902b Added support for !DAO! marker parsing 2026-03-20 13:53:36 +01:00
4 changed files with 411 additions and 182 deletions

View File

@@ -1,7 +1,7 @@
{
"name": "@hamradio/aprs",
"type": "module",
"version": "1.4.1",
"version": "1.4.2",
"description": "APRS (Automatic Packet Reporting System) protocol support for Typescript",
"keywords": [
"APRS",

View File

@@ -106,6 +106,7 @@ export interface IPosition {
range?: number; // Kilometers
phg?: IPowerHeightGain;
dfs?: IDirectionFinding;
dao?: IDAO; // Optional DAO fields for added position precision
symbol?: ISymbol;
comment?: string;
@@ -114,6 +115,17 @@ export interface IPosition {
distanceTo?(other: IPosition): number; // Optional method to calculate distance to another position
}
export interface ITimestamp {
day?: number; // Day of month (DHM format)
month?: number; // Month (MDHM format)
hours: number;
minutes: number;
seconds?: number;
format: "DHM" | "HMS" | "MDHM"; // Day-Hour-Minute, Hour-Minute-Second, Month-Day-Hour-Minute
zulu?: boolean; // Is UTC/Zulu time
toDate(): Date; // Convert to Date object respecting timezone
}
export interface IPowerHeightGain {
power?: number; // Transmit power in watts
height?: number; // Antenna height in meters
@@ -130,23 +142,19 @@ export interface IDirectionFinding {
directivity?: number | "omni" | "unknown"; // Optional directivity pattern (numeric code or "omni")
}
export interface ITimestamp {
day?: number; // Day of month (DHM format)
month?: number; // Month (MDHM format)
hours: number;
minutes: number;
seconds?: number;
format: "DHM" | "HMS" | "MDHM"; // Day-Hour-Minute, Hour-Minute-Second, Month-Day-Hour-Minute
zulu?: boolean; // Is UTC/Zulu time
toDate(): Date; // Convert to Date object respecting timezone
}
export interface ITelemetry {
sequence: number;
analog: number[];
digital?: number;
}
export interface IDAO {
datum_id?: string; // Geodetic datum identifier (e.g., "W84" for WGS84)
resolution?: number; // DAO resolution (0-3)
latitude?: number; // Added latitude precision
longitude?: number; // Added longitude precision
}
// Position Report Payload
export interface PositionPayload {
type:
@@ -158,6 +166,7 @@ export interface PositionPayload {
timestamp?: ITimestamp;
position: IPosition;
messaging: boolean; // Whether APRS messaging is enabled
dao?: IDAO; // Optional DAO fields for added position precision
micE?: {
messageType?: string;
isStandard?: boolean;
@@ -224,6 +233,7 @@ export interface ObjectPayload {
timestamp: ITimestamp;
alive: boolean; // True if object is active, false if killed
position: IPosition;
dao?: IDAO; // Optional DAO fields for added position precision
course?: number;
speed?: number;
}
@@ -235,6 +245,7 @@ export interface ItemPayload {
name: string; // 3-9 character item name
alive: boolean; // True if item is active, false if killed
position: IPosition;
dao?: IDAO; // Optional DAO fields for added position precision
}
// Status Payload
@@ -244,6 +255,7 @@ export interface StatusPayload {
timestamp?: ITimestamp;
text: string;
maidenhead?: string; // Optional Maidenhead grid locator
dao?: IDAO; // Optional DAO fields for added position precision
symbol?: {
table: string;
code: string;
@@ -306,6 +318,7 @@ export interface WeatherPayload {
type: DataType.WeatherReportNoPosition;
timestamp?: ITimestamp;
position?: IPosition;
dao?: IDAO; // Optional DAO fields for added position precision
windDirection?: number; // Degrees
windSpeed?: number; // MPH
windGust?: number; // MPH
@@ -411,4 +424,5 @@ export interface Extras {
spd?: number;
fields?: Field[];
telemetry?: ITelemetry;
dao?: IDAO; // Optional DAO fields for added position precision
}

View File

@@ -1,6 +1,6 @@
import { type Field, FieldType } from "@hamradio/packet";
import type { Extras, ITelemetry, Payload } from "./frame.types";
import type { Extras, IDAO, ITelemetry, Payload } from "./frame.types";
import { base91ToNumber, feetToMeters, knotsToKmh, milesToMeters } from "./parser";
/**
@@ -20,102 +20,203 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
if (!comment || comment.length === 0) return { comment };
const extras: Partial<Extras> = {};
const fields: Field[] = [];
const beforeFields: Field[] = [];
let altitudeOffset: number | undefined = undefined;
let altitudeFields: Field[] = [];
let commentOffset: number = 0;
let commentBefore: string | undefined = undefined;
const fields: FieldWithOffset[] = [];
const ignore: {
offset: number;
length: number;
}[] = []; // Tracks offsets of tokens to ignore when reconstructing comment
// eslint-disable-next-line no-useless-assignment
let match: RegExpMatchArray | null = null;
// Process successive 7-byte data extensions at the start of the comment.
comment = comment.trimStart();
let ext = comment;
while (ext.length >= 7) {
// We first process the altitude marker, because it may appear anywhere
// in the comment and we want to extract it and its value before
// processing other tokens that may be present.
//
// /A=NNNNNN -> altitude in feet (6 digits)
// /A=-NNNNN -> altitude in feet with leading minus for negative altitudes (5 digits)
const altMatch = ext.match(/\/A=(-\d{5}|\d{6})/);
if (altitudeOffset === undefined && altMatch) {
const altitude = feetToMeters(parseInt(altMatch[1], 10)); // feet to meters
if (isNaN(altitude)) {
break; // Invalid altitude format, stop parsing extras
}
let offset = 0; // Tracks the current offset in the original comment string for field positioning
let cutoff = comment.length; // Tracks the offset of the altitude token for relative positioning of subsequent fields
// Process the DAO (precision and datum option); if it is present it marks the
// cutoff for subsequent fields since it is typically at the end of the comment
// and relative to the position.
if ((match = comment.match(/!(...)!/))) {
const dao = decodeDAO(match[1]);
extras.dao = dao;
// Set cutoff to DAO token for subsequent fields to be relative to it (since it is typically at the end of the comment).
cutoff = comment.indexOf(match[0]);
ignore.push({ offset: cutoff, length: match[0].length });
if (withStructure) {
fields.push(
{
type: FieldType.CHAR,
name: "DAO marker",
length: 1,
value: "!",
offset: cutoff
},
{
type: FieldType.STRING,
name: "DAO data",
length: 3,
value: match[1],
offset: cutoff + 1
},
{
type: FieldType.CHAR,
name: "DAO end marker",
length: 1,
value: "!",
offset: cutoff + 4
}
);
}
// Mask DAO token in comment for further parsing
comment = comment.replace(match[0], "X".repeat(match[0].length));
}
// Process the altitude marker, because it may appear anywhere in the comment
// and we want to extract it and its value before processing other tokens
// that may be present.
//
// /A=NNNNNN -> altitude in feet (6 digits)
// /A=-NNNNN -> altitude in feet with leading minus for negative altitudes (5 digits)
if ((match = comment.substring(0, cutoff).match(/\/A=(-\d{5}|\d{6})/))) {
const altitude = feetToMeters(parseInt(match[1], 10)); // feet to meters
if (!isNaN(altitude)) {
extras.altitude = altitude;
// Keep track of where the altitude token appears in the comment for structure purposes.
altitudeOffset = comment.indexOf(altMatch[0]);
const altitudeOffset = comment.indexOf(match[0]);
ignore.push({ offset: altitudeOffset, length: match[0].length });
if (altitudeOffset === 0) {
offset = match[0].length; // Set offset to altitude token for subsequent fields to be relative to it (since we will remove it from the comment)
} else if (cutoff > altitudeOffset) {
cutoff = altitudeOffset; // Set cutoff for subsequent fields to be relative to altitude token if it appears before them in the comment
}
if (withStructure) {
altitudeFields = [
fields.push(
{
type: FieldType.STRING,
name: "altitude marker",
data: new TextEncoder().encode("/A=").buffer,
value: "/A=",
length: 3
length: 3,
offset: altitudeOffset
},
{
type: FieldType.STRING,
name: "altitude",
data: new TextEncoder().encode(altMatch[1]).buffer,
data: new TextEncoder().encode(match[1]).buffer,
value: altitude.toFixed(1) + "m",
length: 6
length: 6,
offset: altitudeOffset + 3
}
];
);
}
if (altitudeOffset > 0) {
// Reset the comment with the altitude marker removed.
commentBefore = comment.substring(0, altitudeOffset);
comment = comment.substring(altitudeOffset + altMatch[0].length);
ext = commentBefore; // Continue processing extensions in the part of the comment before the altitude marker
commentOffset = 0; // Reset
continue;
}
// remove altitude token from ext and advance ext for further parsing
commentOffset += altMatch[0].length;
ext = ext.replace(altMatch[0], "").trimStart();
continue;
// Mask in comment (for debugging)
comment = comment.replace(match[0], "X".repeat(match[0].length)); // Remove altitude token from comment for further parsing
}
}
// Next we process any inline telemetry comment, which is delimited by |...| and can appear anywhere in the comment. We want to extract it before processing other tokens that may be present.
if ((match = comment.substring(offset, cutoff).match(/\|([^|]+)\|/))) {
try {
const telemetry = decodeTelemetry(match[1]);
extras.telemetry = telemetry;
const telemetryOffset = comment.indexOf(match[0]);
ignore.push({ offset: telemetryOffset + offset, length: match[0].length });
if (telemetryOffset == 0) {
offset += match[0].length; // Set offset to telemetry token for subsequent fields to be relative to it (since we will remove it from the comment)
} else if (cutoff > telemetryOffset) {
cutoff = telemetryOffset; // Set cutoff for subsequent fields to be relative to telemetry token if it appears before them in the comment
}
if (withStructure) {
fields.push(
{
type: FieldType.CHAR,
name: "telemetry start",
length: 1,
value: "|",
offset: telemetryOffset
},
{
type: FieldType.STRING,
name: "sequence",
length: 2,
value: telemetry.sequence.toString(),
offset: telemetryOffset + 1
},
...telemetry.analog.map((a, i) => ({
type: FieldType.STRING,
name: `analog${i + 1}`,
length: 2,
value: a.toString(),
offset: telemetryOffset + 3 + i * 2
})),
...(telemetry.digital !== undefined
? [
{
type: FieldType.STRING,
name: "digital",
length: 2,
value: telemetry.digital.toString(),
offset: telemetryOffset + 3 + telemetry.analog.length * 2
}
]
: []),
{
type: FieldType.CHAR,
name: "telemetry end",
length: 1,
value: "|",
offset: telemetryOffset + match[1].length + 1
}
);
}
// Mask telemetry token in comment for further parsing
comment = comment.replace(match[0], "X".repeat(match[0].length));
} catch {
// Invalid telemetry format, ignore
}
}
// Process successive 7-byte data extensions at the start of the comment up to the first
// non-extension token, which may be altitude, telemetry, or other tokens. These
// extensions can appear in any order and we want to extract them all.
let ext = comment.substring(offset, cutoff);
while (ext.length >= 7) {
// RNGrrrr -> pre-calculated range in miles (4 digits)
if ((match = ext.match(/^RNG(\d{4})/))) {
const r = match[1];
extras.range = milesToMeters(parseInt(r, 10)) / 1000.0; // Convert to kilometers
if (withStructure) {
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
fields.push(
{
type: FieldType.STRING,
name: "range marker",
value: "RNG",
length: 3
length: 3,
offset: offset
},
{
type: FieldType.STRING,
name: "range (rrrr)",
length: 4,
value: extras.range.toFixed(1) + "km"
value: extras.range.toFixed(1) + "km",
offset: offset + 3
}
);
}
// remove range token from ext and advance ext for further parsing
if (commentBefore !== undefined && commentBefore.length > 0) {
commentBefore = commentBefore.substring(7);
ext = commentBefore;
} else {
commentOffset += 7;
ext = ext.substring(7);
}
ignore.push({ offset, length: 7 });
offset += 7;
ext = ext.substring(7);
continue;
}
@@ -154,25 +255,34 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
};
if (withStructure) {
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
{ type: FieldType.STRING, name: "PHG marker", length: 3, value: "PHG" },
fields.push(
{
type: FieldType.STRING,
name: "PHG marker",
length: 3,
value: "PHG",
offset: offset
},
{
type: FieldType.STRING,
name: "power (p)",
length: 1,
value: powerWatts !== undefined ? powerWatts.toString() + "W" : undefined
value: powerWatts !== undefined ? powerWatts.toString() + "W" : undefined,
offset: offset + 3
},
{
type: FieldType.STRING,
name: "height (h)",
length: 1,
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined,
offset: offset + 4
},
{
type: FieldType.STRING,
name: "gain (g)",
length: 1,
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined,
offset: offset + 5
},
{
type: FieldType.STRING,
@@ -183,19 +293,16 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
? typeof directivity === "number"
? directivity.toString() + "°"
: directivity
: undefined
: undefined,
offset: offset + 6
}
);
}
// remove PHG token from ext and advance ext for further parsing
if (commentBefore !== undefined && commentBefore.length > 0) {
commentBefore = commentBefore.substring(7);
} else {
commentOffset += 7;
}
ignore.push({ offset, length: 7 });
offset += 7;
ext = ext.substring(7).trimStart();
continue;
}
@@ -242,25 +349,34 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
};
if (withStructure) {
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
{ type: FieldType.STRING, name: "DFS marker", length: 3, value: "DFS" },
fields.push(
{
type: FieldType.STRING,
name: "DFS marker",
length: 3,
value: "DFS",
offset: offset
},
{
type: FieldType.STRING,
name: "strength (s)",
length: 1,
value: strength !== undefined ? strength.toString() : undefined
value: strength !== undefined ? strength.toString() : undefined,
offset: offset + 3
},
{
type: FieldType.STRING,
name: "height (h)",
length: 1,
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined,
offset: offset + 4
},
{
type: FieldType.STRING,
name: "gain (g)",
length: 1,
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined,
offset: offset + 5
},
{
type: FieldType.STRING,
@@ -271,19 +387,16 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
? typeof directivity === "number"
? directivity.toString() + "°"
: directivity
: undefined
: undefined,
offset: offset + 6
}
);
}
// remove DFS token from ext and advance ext for further parsing
if (commentBefore !== undefined && commentBefore.length > 0) {
commentBefore = commentBefore.substring(7);
} else {
commentOffset += 7;
}
ignore.push({ offset, length: 7 });
offset += 7;
ext = ext.substring(7).trimStart();
continue;
}
@@ -295,14 +408,22 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
extras.spd = knotsToKmh(parseInt(speedStr, 10));
if (withStructure) {
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
{ type: FieldType.STRING, name: "course", length: 3, value: extras.cse.toString() + "°" },
{ type: FieldType.CHAR, name: "marker", length: 1, value: "/" },
{ type: FieldType.STRING, name: "speed", length: 3, value: extras.spd.toString() + " km/h" }
fields.push(
{ type: FieldType.STRING, name: "course", length: 3, value: extras.cse.toString() + "°", offset: offset },
{ type: FieldType.CHAR, name: "CSE marker", length: 1, value: "/", offset: offset + 3 },
{
type: FieldType.STRING,
name: "speed",
length: 3,
value: extras.spd.toString() + " km/h",
offset: offset + 4
}
);
}
// remove course/speed token from comment and advance ext for further parsing
ignore.push({ offset, length: 7 });
offset += 7;
ext = ext.substring(7).trimStart();
// If there is an 8-byte DF/NRQ following (leading '/'), parse that too
@@ -319,20 +440,23 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
extras.dfs.strength = dfStrength;
if (withStructure) {
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
{ type: FieldType.STRING, name: "DF marker", length: 1, value: "/" },
{ type: FieldType.STRING, name: "bearing", length: 3, value: dfBearing.toString() + "°" },
{ type: FieldType.CHAR, name: "separator", length: 1, value: "/" },
{ type: FieldType.STRING, name: "strength", length: 3, value: dfStrength.toString() }
fields.push(
{ type: FieldType.STRING, name: "DFS marker", length: 1, value: "/", offset: offset },
{
type: FieldType.STRING,
name: "bearing",
length: 3,
value: dfBearing.toString() + "°",
offset: offset + 1
},
{ type: FieldType.CHAR, name: "separator", length: 1, value: "/", offset: offset + 4 },
{ type: FieldType.STRING, name: "strength", length: 3, value: dfStrength.toString(), offset: offset + 5 }
);
}
// remove DF token from ext and advance ext for further parsing
if (commentBefore !== undefined && commentBefore.length > 0) {
commentBefore = commentBefore.substring(8);
} else {
commentOffset += 8;
}
ignore.push({ offset, length: 8 });
offset += 8;
ext = ext.substring(8).trimStart();
continue;
@@ -345,133 +469,166 @@ export const decodeCommentExtras = (comment: string, withStructure: boolean = fa
break;
}
// Parse embedded telemetry in comment. Look for |ss11|, |ss1122|, |ss112233|, |ss1122334455|, or |ss1122334455!"| patterns (where ss is sequence and each pair of digits is an analog channel in base91, and optional last pair is digital channel in base91).
if ((match = comment.match(/\|([^|]+)\|/))) {
try {
const telemetry = decodeTelemetry(match[1]);
extras.telemetry = telemetry;
comment = comment.replace(match[0], "").trim();
if (withStructure) {
fields.push(
{
type: FieldType.CHAR,
name: "telemetry start",
length: 1,
value: "|"
},
{
type: FieldType.STRING,
name: "sequence",
length: 2,
value: telemetry.sequence.toString()
},
...telemetry.analog.map((a, i) => ({
type: FieldType.STRING,
name: `analog${i + 1}`,
length: 2,
value: a.toString()
})),
...(telemetry.digital !== undefined
? [
{
type: FieldType.STRING,
name: "digital",
length: 2,
value: telemetry.digital.toString()
}
]
: []),
{
type: FieldType.CHAR,
name: "telemetry end",
length: 1,
value: "|"
}
);
}
} catch {
// Invalid telemetry format, ignore
// Any tokens we marked for ignoring should be removed from the comment when reconstructing the cleaned comment string, since they have been extracted as structured fields. We will use the ignore offsets to skip over these tokens when reconstructing the comment.
ignore.sort((a, b) => a.offset - b.offset);
offset = 0;
ignore.forEach((token) => {
if (token.offset - offset > 0) {
fields.push({
type: FieldType.STRING,
name: "comment",
offset: offset,
length: token.offset - offset
});
}
offset = token.offset + token.length;
});
if (offset < comment.length) {
fields.push({
type: FieldType.STRING,
name: "comment",
offset,
length: comment.length - offset
});
}
// Export comment with extras fields removed, if any were parsed.
if (commentOffset > 0 && commentBefore !== undefined && commentBefore.length > 0) {
extras.comment = commentBefore.substring(commentOffset) + comment;
} else if (commentBefore !== undefined && commentBefore.length > 0) {
extras.comment = commentBefore + comment;
} else {
extras.comment = comment.substring(commentOffset);
}
// Aggregate the comment fragments into a single cleaned comment string with the recognized tokens removed.
fields.sort((a, b) => a.offset - b.offset); // Ensure fields are in order of appearance in the original comment
extras.comment = fields
.map((field) => {
if (field.name !== "comment" || field.offset === undefined || field.length === undefined) {
return ""; // Remove recognized tokens from comment
}
return comment.substring(field.offset, field.offset + field.length);
})
.join("")
.trim();
if (withStructure) {
const commentBeforeFields: Field[] = commentBefore
? [
{
type: FieldType.STRING,
name: "comment",
length: commentBefore.length
}
]
: [];
const commentFields: Field[] = comment
? [
{
type: FieldType.STRING,
name: "comment",
length: comment.length
}
]
: [];
// Insert the altitude fields at the correct position in the comment section based on where the altitude token was located in the original comment. If there was no altitude token, put all fields at the start of the comment section.
extras.fields = [...beforeFields, ...commentBeforeFields, ...altitudeFields, ...fields, ...commentFields];
extras.fields = fields;
}
return extras as Extras;
};
interface FieldWithOffset extends Field {
offset: number;
}
export const attachExtras = (payload: Payload, extras: Extras): void => {
if ("position" in payload && payload.position) {
if (extras.dao !== undefined) {
payload.position.dao = extras.dao;
// Check N/S and E/W and apply DAO corrections with correct sign based on hemisphere.
if (payload.position.latitude !== undefined) {
if (payload.position.latitude < 0) {
payload.position.latitude -= extras.dao.latitude || 0;
} else {
payload.position.latitude += extras.dao.latitude || 0;
}
}
if (payload.position.longitude !== undefined) {
if (payload.position.longitude < 0) {
payload.position.longitude -= extras.dao.longitude || 0;
} else {
payload.position.longitude += extras.dao.longitude || 0;
}
}
}
if (extras.altitude !== undefined) {
payload.position.altitude = extras.altitude;
}
if (extras.range !== undefined) {
payload.position.range = extras.range;
}
if (extras.phg !== undefined) {
payload.position.phg = extras.phg;
}
if (extras.dfs !== undefined) {
payload.position.dfs = extras.dfs;
}
if (extras.cse !== undefined && payload.position.course === undefined) {
payload.position.course = extras.cse;
}
if (extras.spd !== undefined && payload.position.speed === undefined) {
payload.position.speed = extras.spd;
}
}
if ("dao" in payload && payload.dao === undefined) {
payload.dao = extras.dao;
}
if ("altitude" in payload && payload.altitude === undefined && extras.altitude !== undefined) {
payload.altitude = extras.altitude;
}
if ("range" in payload && payload.range === undefined && extras.range !== undefined) {
payload.range = extras.range;
}
if ("phg" in payload && payload.phg === undefined && extras.phg !== undefined) {
payload.phg = extras.phg;
}
if ("dfs" in payload && payload.dfs === undefined && extras.dfs !== undefined) {
payload.dfs = extras.dfs;
}
if ("course" in payload && payload.course === undefined && extras.cse !== undefined) {
payload.course = extras.cse;
}
if ("speed" in payload && payload.speed === undefined && extras.spd !== undefined) {
payload.speed = extras.spd;
}
};
export const decodeDAO = (ext: string): IDAO | undefined => {
if (ext.length !== 3) {
return undefined;
}
const dao: IDAO = { datum_id: "?" };
// eslint-disable-next-line no-useless-assignment
let match: RegExpMatchArray | null = null;
if ((match = /^([A-Z])(\d)(\d)$/.exec(ext))) {
// Human-radable datum with explicit resolution: e.g. W84 with 0-3 resolution (W84 is WGS84, R22 is NAD27, etc.)
dao.datum_id = match[1];
dao.resolution = getDAOResolution(3);
dao.latitude = (parseInt(match[2]) * 0.01) / 60; // Convert to minutes (since DAO resolution is in minutes)
dao.longitude = (parseInt(match[3]) * 0.01) / 60; // Convert to minutes (since DAO resolution is in minutes)
return dao;
} else if ((match = /^([a-z])([\x21-\x7b])([\x21-\x7b])$/.exec(ext))) {
// Base91-encoded latitude and longitude with implicit datum (WGS84 assumed); resolution is determined by the range of the Base91 characters (0-3)
dao.datum_id = match[1].toUpperCase();
dao.resolution = getDAOResolution(4);
dao.latitude = (base91ToNumber(match[2]) * 0.01) / 60; // Convert from Base91 to degrees, then to minutes (since DAO resolution is in minutes)
dao.longitude = (base91ToNumber(match[3]) * 0.01) / 60; // Convert from Base91 to degrees, then to minutes (since DAO resolution is in minutes)
return dao;
} else if ((match = /^([\x21-\x7b]) {2}$/.exec(ext))) {
dao.datum_id = match[1];
if (/^[a-z]$/.test(dao.datum_id)) {
dao.datum_id = dao.datum_id.toUpperCase();
}
return dao; // No resolution, just datum
}
return undefined; // Invalid DAO format
};
const getDAOResolution = (n: number): number | undefined => {
return knotsToKmh((n <= -2 ? 600 : 1000) * 10 ** (-1 * n));
};
/**
* Decodes a Base91 Telemetry extension string (delimited by '|') into its components.
*

View File

@@ -3,8 +3,8 @@ import { describe, expect, it } from "vitest";
import { Frame } from "../src/frame";
import { DataType, type ObjectPayload, type PositionPayload } from "../src/frame.types";
import { feetToMeters, milesToMeters } from "../src/parser";
import { decodeTelemetry } from "../src/payload.extras";
import { base91ToNumber, feetToMeters, knotsToKmh, milesToMeters } from "../src/parser";
import { decodeDAO, decodeTelemetry } from "../src/payload.extras";
describe("APRS extras test vectors", () => {
it("parses altitude token in the beginning of a comment and emits structure", () => {
@@ -174,6 +174,50 @@ describe("APRS extras test vectors", () => {
const commentIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "comment");
expect(commentIndex).toBeGreaterThan(altitudeIndex); // Comment comes after altitude
});
it("parses DAO token and emits structure", () => {
const raw = "N0CALL-7>APLT00,WIDE1-1,QB1N4,qAO,N0CALL-10:!5140.06N/00615.91E[360/028/A=000085 !wrt!";
const frame = Frame.fromString(raw);
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
const { payload } = res;
expect(payload).not.toBeNull();
expect(payload!.type).toBe(DataType.PositionNoTimestampNoMessaging);
expect(payload!.position.dao!).toBeDefined();
expect(payload!.position.dao!.datum_id).toBe("W");
});
});
describe("decodeDAO", () => {
it("decodes valid DAO token with WGS84 datum", () => {
const dao = decodeDAO("W84");
expect(dao).not.toBeNull();
expect(dao!.datum_id).toBe("W");
expect(dao!.resolution).toBe(knotsToKmh(1));
expect(dao!.latitude).toBeCloseTo((8 * 0.01) / 60, 6);
expect(dao!.longitude).toBeCloseTo((4 * 0.01) / 60, 6);
});
it("decodes valid DAO base91 token", () => {
const dao = decodeDAO("wrt");
expect(dao).not.toBeNull();
expect(dao!.datum_id).toBe("W");
expect(dao!.resolution).toBe(knotsToKmh(0.1));
expect(dao!.latitude).toBeCloseTo((base91ToNumber("r") * 0.01) / 60, 6);
expect(dao!.longitude).toBeCloseTo((base91ToNumber("t") * 0.01) / 60, 6);
});
it("decodes valid DAO only token", () => {
const dao = decodeDAO("! ");
expect(dao).not.toBeNull();
expect(dao!.datum_id).toBe("!");
});
it("returns undefined for invalid DAO token", () => {
expect(decodeDAO("invalid")).toBeUndefined();
expect(decodeDAO("")).toBeUndefined();
expect(decodeDAO("ab")).toBeUndefined();
});
});
describe("decodeTelemetry", () => {
@@ -238,4 +282,18 @@ describe("decodeTelemetry", () => {
expect(payload!.position).toBeDefined();
expect(payload!.position.comment).toBe("LoRa APRS Tracker");
});
it("decodes composite test vector with altitude and telemetry", () => {
const raw = "N0CALL-11>APLRFT,qAR,N0CALL-10:!\\45;<P(6y>HIGLoRa APRS Tracker|$T%R#`| on air/A=000012!";
const frame = Frame.fromString(raw);
const { payload, structure } = frame.decode(true) as { payload: ObjectPayload | null; structure: Dissected };
// console.log(structure[structure.length - 1]); // Log the last segment for debugging
expect(payload).not.toBeNull();
expect(payload!.position).toBeDefined();
expect(payload!.position.altitude).toBeCloseTo(feetToMeters(12), 3);
//expect(payload!.position.comment).toBe("LoRa APRS Tracker on air");
expect(structure[structure.length - 1].fields.filter((s) => s.name === "comment").length).toBe(3);
});
});