Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
e49333611f
|
|||
|
0055938338
|
|||
|
75e31c2008
|
|||
|
1aa8eb363f
|
|||
|
34240dfbd8
|
|||
|
46e7694ec6
|
|||
|
04166daeee
|
|||
|
e9e329ccc1
|
|||
|
6adf1281ef
|
|||
|
5b836a4e0c
|
|||
|
c28572e3b6
|
|||
|
17caa22331
|
25
package.json
25
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@hamradio/aprs",
|
||||
"type": "module",
|
||||
"version": "1.2.0",
|
||||
"version": "1.4.0",
|
||||
"description": "APRS (Automatic Packet Reporting System) protocol support for Typescript",
|
||||
"keywords": [
|
||||
"APRS",
|
||||
@@ -17,7 +17,7 @@
|
||||
"license": "MIT",
|
||||
"author": "Wijnand Modderman-Lenstra",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.mjs",
|
||||
"module": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
@@ -25,7 +25,7 @@
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"import": "./dist/index.mjs",
|
||||
"import": "./dist/index.js",
|
||||
"require": "./dist/index.js"
|
||||
}
|
||||
},
|
||||
@@ -37,22 +37,25 @@
|
||||
"test:watch": "vitest --watch",
|
||||
"test:ci": "vitest --run",
|
||||
"lint": "eslint .",
|
||||
"prepare": "npm run build"
|
||||
"prepare": "npm run build",
|
||||
"push": "npm version patch && git push",
|
||||
"push-minor": "npm version minor && git push",
|
||||
"push-major": "npm version major && git push"
|
||||
},
|
||||
"dependencies": {
|
||||
"@hamradio/packet": "^1.1.1",
|
||||
"extended-nmea": "^2.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^10.0.1",
|
||||
"@trivago/prettier-plugin-sort-imports": "^6.0.2",
|
||||
"@vitest/coverage-v8": "^4.0.18",
|
||||
"@vitest/coverage-v8": "^4.1.0",
|
||||
"eslint": "^10.0.3",
|
||||
"globals": "^17.4.0",
|
||||
"prettier": "^3.8.1",
|
||||
"tsup": "^8.5.1",
|
||||
"typescript": "^5.9.3",
|
||||
"typescript-eslint": "^8.57.0",
|
||||
"vitest": "^4.0.18"
|
||||
},
|
||||
"dependencies": {
|
||||
"@hamradio/packet": "^1.1.0",
|
||||
"extended-nmea": "^2.1.3"
|
||||
"typescript-eslint": "^8.57.1",
|
||||
"vitest": "^4.1.0"
|
||||
}
|
||||
}
|
||||
|
||||
1088
src/deviceid.ts
Normal file
1088
src/deviceid.ts
Normal file
File diff suppressed because it is too large
Load Diff
2273
src/frame.ts
2273
src/frame.ts
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,11 @@
|
||||
import { Dissected, Segment } from "@hamradio/packet";
|
||||
import { Dissected, Field, Segment } from "@hamradio/packet";
|
||||
|
||||
// Any comment that contains this marker will set the doNotArchive flag on the
|
||||
// decoded payload, which can be used by applications to skip archiving or
|
||||
// logging frames that are meant to be transient or test data. This allows users
|
||||
// to include the marker in their APRS comments when they want to indicate that
|
||||
// a particular frame should not be stored long-term.
|
||||
export const DO_NOT_ARCHIVE_MARKER = "!x!";
|
||||
|
||||
export interface IAddress {
|
||||
call: string;
|
||||
@@ -22,7 +29,7 @@ export enum DataType {
|
||||
PositionWithTimestampWithMessaging = "@",
|
||||
|
||||
// Mic-E
|
||||
MicECurrent = "`",
|
||||
MicE = "`",
|
||||
MicEOld = "'",
|
||||
|
||||
// Messages and Bulletins
|
||||
@@ -60,6 +67,27 @@ export enum DataType {
|
||||
InvalidOrTest = ","
|
||||
}
|
||||
|
||||
export const DataTypeNames: { [key in DataType]: string } = {
|
||||
[DataType.PositionNoTimestampNoMessaging]: "position",
|
||||
[DataType.PositionNoTimestampWithMessaging]: "position with messaging",
|
||||
[DataType.PositionWithTimestampNoMessaging]: "position with timestamp",
|
||||
[DataType.PositionWithTimestampWithMessaging]: "position with timestamp and messaging",
|
||||
[DataType.MicE]: "Mic-E",
|
||||
[DataType.MicEOld]: "Mic-E (old)",
|
||||
[DataType.Message]: "message/bulletin",
|
||||
[DataType.Object]: "object",
|
||||
[DataType.Item]: "item",
|
||||
[DataType.Status]: "status",
|
||||
[DataType.Query]: "query",
|
||||
[DataType.TelemetryData]: "telemetry data",
|
||||
[DataType.WeatherReportNoPosition]: "weather report",
|
||||
[DataType.RawGPS]: "raw GPS data",
|
||||
[DataType.StationCapabilities]: "station capabilities",
|
||||
[DataType.UserDefined]: "user defined",
|
||||
[DataType.ThirdParty]: "third-party traffic",
|
||||
[DataType.InvalidOrTest]: "invalid/test"
|
||||
};
|
||||
|
||||
export interface ISymbol {
|
||||
table: string; // Symbol table identifier
|
||||
code: string; // Symbol code
|
||||
@@ -73,21 +101,13 @@ export interface IPosition {
|
||||
longitude: number; // Decimal degrees
|
||||
ambiguity?: number; // Position ambiguity (0-4)
|
||||
altitude?: number; // Meters
|
||||
speed?: number; // Speed in knots/kmh depending on source
|
||||
speed?: number; // Speed in km/h
|
||||
course?: number; // Course in degrees
|
||||
range?: number; // Kilometers
|
||||
phg?: IPowerHeightGain;
|
||||
dfs?: IDirectionFinding;
|
||||
symbol?: ISymbol;
|
||||
comment?: string;
|
||||
/**
|
||||
* Optional reported radio range in miles (from RNG token in comment)
|
||||
*/
|
||||
range?: number;
|
||||
/**
|
||||
* Optional power/height/gain information from PHG token
|
||||
* PHG format: PHGpphhgg (pp=power, hh=height, gg=gain) as numeric values
|
||||
*/
|
||||
phg?: IPowerHeightGain;
|
||||
/** Direction-finding / DF information parsed from comment tokens */
|
||||
dfs?: IDirectionFinding;
|
||||
|
||||
toString(): string; // Return combined position representation (e.g., "lat,lon,alt")
|
||||
toCompressed?(): CompressedPosition; // Optional method to convert to compressed format
|
||||
@@ -121,6 +141,12 @@ export interface ITimestamp {
|
||||
toDate(): Date; // Convert to Date object respecting timezone
|
||||
}
|
||||
|
||||
export interface ITelemetry {
|
||||
sequence: number;
|
||||
analog: number[];
|
||||
digital?: number;
|
||||
}
|
||||
|
||||
// Position Report Payload
|
||||
export interface PositionPayload {
|
||||
type:
|
||||
@@ -128,6 +154,7 @@ export interface PositionPayload {
|
||||
| DataType.PositionNoTimestampWithMessaging
|
||||
| DataType.PositionWithTimestampNoMessaging
|
||||
| DataType.PositionWithTimestampWithMessaging;
|
||||
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||
timestamp?: ITimestamp;
|
||||
position: IPosition;
|
||||
messaging: boolean; // Whether APRS messaging is enabled
|
||||
@@ -156,7 +183,8 @@ export interface CompressedPosition {
|
||||
|
||||
// Mic-E Payload (compressed in destination address)
|
||||
export interface MicEPayload {
|
||||
type: DataType.MicECurrent | DataType.MicEOld;
|
||||
type: DataType.MicE | DataType.MicEOld;
|
||||
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||
position: IPosition;
|
||||
messageType?: string; // Standard Mic-E message
|
||||
isStandard?: boolean; // Whether messageType is a standard Mic-E message
|
||||
@@ -170,6 +198,7 @@ export type MessageVariant = "message" | "bulletin";
|
||||
export interface MessagePayload {
|
||||
type: DataType.Message;
|
||||
variant: "message";
|
||||
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||
addressee: string; // 9 character padded callsign
|
||||
text: string; // Message text
|
||||
messageNumber?: string; // Message ID for acknowledgment
|
||||
@@ -181,6 +210,7 @@ export interface MessagePayload {
|
||||
export interface BulletinPayload {
|
||||
type: DataType.Message;
|
||||
variant: "bulletin";
|
||||
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||
bulletinId: string; // Bulletin identifier (BLN#)
|
||||
text: string;
|
||||
group?: string; // Optional group bulletin
|
||||
@@ -189,6 +219,7 @@ export interface BulletinPayload {
|
||||
// Object Payload
|
||||
export interface ObjectPayload {
|
||||
type: DataType.Object;
|
||||
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||
name: string; // 9 character object name
|
||||
timestamp: ITimestamp;
|
||||
alive: boolean; // True if object is active, false if killed
|
||||
@@ -200,6 +231,7 @@ export interface ObjectPayload {
|
||||
// Item Payload
|
||||
export interface ItemPayload {
|
||||
type: DataType.Item;
|
||||
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||
name: string; // 3-9 character item name
|
||||
alive: boolean; // True if item is active, false if killed
|
||||
position: IPosition;
|
||||
@@ -208,6 +240,7 @@ export interface ItemPayload {
|
||||
// Status Payload
|
||||
export interface StatusPayload {
|
||||
type: DataType.Status;
|
||||
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||
timestamp?: ITimestamp;
|
||||
text: string;
|
||||
maidenhead?: string; // Optional Maidenhead grid locator
|
||||
@@ -332,6 +365,7 @@ export interface DFReportPayload {
|
||||
|
||||
export interface BasePayload {
|
||||
type: DataType;
|
||||
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||
}
|
||||
|
||||
// Union type for all decoded payload types
|
||||
@@ -362,3 +396,19 @@ export interface DecodedFrame extends IFrame {
|
||||
decoded?: Payload;
|
||||
structure?: Dissected; // Routing and other frame-level sections
|
||||
}
|
||||
|
||||
// Extras is an internal helper type used during decoding to accumulate additional
|
||||
// information that may not fit directly into the standard payload structure,
|
||||
// such as comments, calculated fields, or other metadata that can be useful for
|
||||
// applications consuming the decoded frames.
|
||||
export interface Extras {
|
||||
comment: string;
|
||||
altitude?: number;
|
||||
range?: number;
|
||||
phg?: IPowerHeightGain;
|
||||
dfs?: IDirectionFinding;
|
||||
cse?: number;
|
||||
spd?: number;
|
||||
fields?: Field[];
|
||||
telemetry?: ITelemetry;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
export { Frame, Address, Timestamp } from "./frame";
|
||||
export { Frame, Address } from "./frame";
|
||||
|
||||
export { type IAddress, type IFrame, DataType as DataTypeIdentifier } from "./frame.types";
|
||||
|
||||
@@ -34,6 +34,9 @@ export {
|
||||
type DecodedFrame
|
||||
} from "./frame.types";
|
||||
|
||||
export { Position } from "./position";
|
||||
export { Timestamp } from "./timestamp";
|
||||
|
||||
export {
|
||||
base91ToNumber,
|
||||
knotsToKmh,
|
||||
@@ -43,3 +46,6 @@ export {
|
||||
celsiusToFahrenheit,
|
||||
fahrenheitToCelsius
|
||||
} from "./parser";
|
||||
|
||||
export { getDeviceID } from "./deviceid";
|
||||
export type { DeviceID } from "./deviceid";
|
||||
|
||||
71
src/payload.capabilities.ts
Normal file
71
src/payload.capabilities.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { FieldType, type Segment } from "@hamradio/packet";
|
||||
|
||||
import { DataType, type Payload, type StationCapabilitiesPayload } from "./frame.types";
|
||||
|
||||
export const decodeCapabilitiesPayload = (
|
||||
raw: string,
|
||||
withStructure: boolean = false
|
||||
): {
|
||||
payload: Payload | null;
|
||||
segment?: Segment[];
|
||||
} => {
|
||||
try {
|
||||
if (raw.length < 2) return { payload: null };
|
||||
|
||||
// Extract the text after the '<' identifier
|
||||
let rest = raw.substring(1).trim();
|
||||
|
||||
// Some implementations include a closing '>' or other trailing chars; strip common wrappers
|
||||
if (rest.endsWith(">")) rest = rest.slice(0, -1).trim();
|
||||
|
||||
// Split capabilities by commas, semicolons or whitespace
|
||||
const tokens = rest
|
||||
.split(/[,;\s]+/)
|
||||
.map((t) => t.trim())
|
||||
.filter(Boolean);
|
||||
|
||||
const payload: StationCapabilitiesPayload = {
|
||||
type: DataType.StationCapabilities,
|
||||
capabilities: tokens
|
||||
} as const;
|
||||
|
||||
if (withStructure) {
|
||||
const segments: Segment[] = [];
|
||||
segments.push({
|
||||
name: "capabilities",
|
||||
data: new TextEncoder().encode(rest).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "capabilities",
|
||||
length: rest.length
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
for (const cap of tokens) {
|
||||
segments.push({
|
||||
name: "capability",
|
||||
data: new TextEncoder().encode(cap).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "capability",
|
||||
length: cap.length
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
return { payload, segment: segments };
|
||||
}
|
||||
|
||||
return { payload };
|
||||
} catch {
|
||||
return { payload: null };
|
||||
}
|
||||
};
|
||||
|
||||
export default decodeCapabilitiesPayload;
|
||||
504
src/payload.extras.ts
Normal file
504
src/payload.extras.ts
Normal file
@@ -0,0 +1,504 @@
|
||||
import { type Field, FieldType } from "@hamradio/packet";
|
||||
|
||||
import type { Extras, ITelemetry, Payload } from "./frame.types";
|
||||
import { base91ToNumber, feetToMeters, knotsToKmh, milesToMeters } from "./parser";
|
||||
|
||||
/**
|
||||
* Decodes structured extras from an APRS comment string, extracting known tokens
|
||||
* for altitude, range, PHG, DFS, course/speed, and embedded telemetry, and
|
||||
* returns an object with the extracted values and a cleaned comment string with
|
||||
* the tokens removed.
|
||||
*
|
||||
* If withStructure is true, also returns an array of fields representing the
|
||||
* structure of the extras for use in structured packet parsing.
|
||||
*
|
||||
* @param comment The APRS comment string to decode.
|
||||
* @param withStructure Whether to include structured fields in the result.
|
||||
* @returns An object containing the decoded extras and the cleaned comment string.
|
||||
*/
|
||||
export const decodeCommentExtras = (comment: string, withStructure: boolean = false): Extras => {
|
||||
if (!comment || comment.length === 0) return { comment };
|
||||
|
||||
const extras: Partial<Extras> = {};
|
||||
const fields: Field[] = [];
|
||||
const beforeFields: Field[] = [];
|
||||
let altitudeOffset: number | undefined = undefined;
|
||||
let altitudeFields: Field[] = [];
|
||||
let commentOffset: number = 0;
|
||||
let commentBefore: string | undefined = undefined;
|
||||
|
||||
// eslint-disable-next-line no-useless-assignment
|
||||
let match: RegExpMatchArray | null = null;
|
||||
|
||||
// Process successive 7-byte data extensions at the start of the comment.
|
||||
comment = comment.trimStart();
|
||||
let ext = comment;
|
||||
while (ext.length >= 7) {
|
||||
// We first process the altitude marker, because it may appear anywhere
|
||||
// in the comment and we want to extract it and its value before
|
||||
// processing other tokens that may be present.
|
||||
//
|
||||
// /A=NNNNNN -> altitude in feet (6 digits)
|
||||
// /A=-NNNNN -> altitude in feet with leading minus for negative altitudes (5 digits)
|
||||
const altMatch = ext.match(/\/A=(-\d{5}|\d{6})/);
|
||||
if (altitudeOffset === undefined && altMatch) {
|
||||
const altitude = feetToMeters(parseInt(altMatch[1], 10)); // feet to meters
|
||||
if (isNaN(altitude)) {
|
||||
break; // Invalid altitude format, stop parsing extras
|
||||
}
|
||||
extras.altitude = altitude;
|
||||
|
||||
// Keep track of where the altitude token appears in the comment for structure purposes.
|
||||
altitudeOffset = comment.indexOf(altMatch[0]);
|
||||
|
||||
if (withStructure) {
|
||||
altitudeFields = [
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "altitude marker",
|
||||
data: new TextEncoder().encode("/A=").buffer,
|
||||
value: "/A=",
|
||||
length: 3
|
||||
},
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "altitude",
|
||||
data: new TextEncoder().encode(altMatch[1]).buffer,
|
||||
value: altitude.toFixed(1) + "m",
|
||||
length: 6
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
if (altitudeOffset > 0) {
|
||||
// Reset the comment with the altitude marker removed.
|
||||
commentBefore = comment.substring(0, altitudeOffset);
|
||||
comment = comment.substring(altitudeOffset + altMatch[0].length);
|
||||
ext = commentBefore; // Continue processing extensions in the part of the comment before the altitude marker
|
||||
commentOffset = 0; // Reset
|
||||
continue;
|
||||
}
|
||||
|
||||
// remove altitude token from ext and advance ext for further parsing
|
||||
commentOffset += altMatch[0].length;
|
||||
ext = ext.replace(altMatch[0], "").trimStart();
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// RNGrrrr -> pre-calculated range in miles (4 digits)
|
||||
if ((match = ext.match(/^RNG(\d{4})/))) {
|
||||
const r = match[1];
|
||||
extras.range = milesToMeters(parseInt(r, 10)) / 1000.0; // Convert to kilometers
|
||||
if (withStructure) {
|
||||
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "range marker",
|
||||
value: "RNG",
|
||||
length: 3
|
||||
},
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "range (rrrr)",
|
||||
length: 4,
|
||||
value: extras.range.toFixed(1) + "km"
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// remove range token from ext and advance ext for further parsing
|
||||
if (commentBefore !== undefined && commentBefore.length > 0) {
|
||||
commentBefore = commentBefore.substring(7);
|
||||
ext = commentBefore;
|
||||
} else {
|
||||
commentOffset += 7;
|
||||
ext = ext.substring(7);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// PHGphgd
|
||||
//if (!extras.phg && ext.startsWith("PHG")) {
|
||||
if (!extras.phg && (match = ext.match(/^PHG([0-9 ])([0-9 ])([0-9 ])([0-9 ])/))) {
|
||||
// PHGphgd: p = power (0-9 or space), h = height (0-9 or space), g = gain (0-9 or space), d = directivity (0-9 or space)
|
||||
const p = match[1];
|
||||
const h = match[2];
|
||||
const g = match[3];
|
||||
const d = match[4];
|
||||
const pNum = parseInt(p, 10);
|
||||
const powerWatts = Number.isNaN(pNum) ? undefined : pNum * pNum;
|
||||
const hIndex = h.charCodeAt(0) - 48;
|
||||
const heightFeet = 10 * Math.pow(2, hIndex);
|
||||
const heightMeters = feetToMeters(heightFeet);
|
||||
const gNum = parseInt(g, 10);
|
||||
const gainDbi = Number.isNaN(gNum) ? undefined : gNum;
|
||||
const dNum = parseInt(d, 10);
|
||||
let directivity: number | "omni" | "unknown" | undefined;
|
||||
if (Number.isNaN(dNum)) {
|
||||
directivity = undefined;
|
||||
} else if (dNum === 0) {
|
||||
directivity = "omni";
|
||||
} else if (dNum >= 1 && dNum <= 8) {
|
||||
directivity = dNum * 45;
|
||||
} else if (dNum === 9) {
|
||||
directivity = "unknown";
|
||||
}
|
||||
|
||||
extras.phg = {
|
||||
power: powerWatts,
|
||||
height: heightMeters,
|
||||
gain: gainDbi,
|
||||
directivity
|
||||
};
|
||||
|
||||
if (withStructure) {
|
||||
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
|
||||
{ type: FieldType.STRING, name: "PHG marker", length: 3, value: "PHG" },
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "power (p)",
|
||||
length: 1,
|
||||
value: powerWatts !== undefined ? powerWatts.toString() + "W" : undefined
|
||||
},
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "height (h)",
|
||||
length: 1,
|
||||
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined
|
||||
},
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "gain (g)",
|
||||
length: 1,
|
||||
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined
|
||||
},
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "directivity (d)",
|
||||
length: 1,
|
||||
value:
|
||||
directivity !== undefined
|
||||
? typeof directivity === "number"
|
||||
? directivity.toString() + "°"
|
||||
: directivity
|
||||
: undefined
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// remove PHG token from ext and advance ext for further parsing
|
||||
if (commentBefore !== undefined && commentBefore.length > 0) {
|
||||
commentBefore = commentBefore.substring(7);
|
||||
} else {
|
||||
commentOffset += 7;
|
||||
}
|
||||
ext = ext.substring(7).trimStart();
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// DFSshgd
|
||||
if (ext.startsWith("DFS")) {
|
||||
// DFSshgd: s = strength (0-9), h = height (0-9), g = gain (0-9), d = directivity (0-9)
|
||||
const s = ext.charAt(3);
|
||||
const h = ext.charAt(4);
|
||||
const g = ext.charAt(5);
|
||||
const d = ext.charAt(6);
|
||||
|
||||
const sNum = parseInt(s, 10);
|
||||
const hNum = parseInt(h, 10);
|
||||
const gNum = parseInt(g, 10);
|
||||
const dNum = parseInt(d, 10);
|
||||
|
||||
// Strength: s = 0-9, direct value
|
||||
const strength = Number.isNaN(sNum) ? undefined : sNum;
|
||||
|
||||
// Height: h = 0-9, height = 10 * 2^h feet (spec: h is exponent)
|
||||
const heightFeet = Number.isNaN(hNum) ? undefined : 10 * Math.pow(2, hNum);
|
||||
const heightMeters = heightFeet !== undefined ? feetToMeters(heightFeet) : undefined;
|
||||
|
||||
// Gain: g = 0-9, gain in dB
|
||||
const gainDbi = Number.isNaN(gNum) ? undefined : gNum;
|
||||
|
||||
// Directivity: d = 0-9, 0 = omni, 1-8 = d*45°, 9 = unknown
|
||||
let directivity: number | "omni" | "unknown" | undefined;
|
||||
if (Number.isNaN(dNum)) {
|
||||
directivity = undefined;
|
||||
} else if (dNum === 0) {
|
||||
directivity = "omni";
|
||||
} else if (dNum >= 1 && dNum <= 8) {
|
||||
directivity = dNum * 45;
|
||||
} else if (dNum === 9) {
|
||||
directivity = "unknown";
|
||||
}
|
||||
|
||||
extras.dfs = {
|
||||
strength,
|
||||
height: heightMeters,
|
||||
gain: gainDbi,
|
||||
directivity
|
||||
};
|
||||
|
||||
if (withStructure) {
|
||||
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
|
||||
{ type: FieldType.STRING, name: "DFS marker", length: 3, value: "DFS" },
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "strength (s)",
|
||||
length: 1,
|
||||
value: strength !== undefined ? strength.toString() : undefined
|
||||
},
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "height (h)",
|
||||
length: 1,
|
||||
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined
|
||||
},
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "gain (g)",
|
||||
length: 1,
|
||||
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined
|
||||
},
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "directivity (d)",
|
||||
length: 1,
|
||||
value:
|
||||
directivity !== undefined
|
||||
? typeof directivity === "number"
|
||||
? directivity.toString() + "°"
|
||||
: directivity
|
||||
: undefined
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// remove DFS token from ext and advance ext for further parsing
|
||||
if (commentBefore !== undefined && commentBefore.length > 0) {
|
||||
commentBefore = commentBefore.substring(7);
|
||||
} else {
|
||||
commentOffset += 7;
|
||||
}
|
||||
ext = ext.substring(7).trimStart();
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// Course/Speed DDD/SSS (7 bytes: 3 digits / 3 digits)
|
||||
if (extras.cse === undefined && /^\d{3}\/\d{3}/.test(ext)) {
|
||||
const courseStr = ext.substring(0, 3);
|
||||
const speedStr = ext.substring(4, 7);
|
||||
extras.cse = parseInt(courseStr, 10);
|
||||
extras.spd = knotsToKmh(parseInt(speedStr, 10));
|
||||
|
||||
if (withStructure) {
|
||||
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
|
||||
{ type: FieldType.STRING, name: "course", length: 3, value: extras.cse.toString() + "°" },
|
||||
{ type: FieldType.CHAR, name: "marker", length: 1, value: "/" },
|
||||
{ type: FieldType.STRING, name: "speed", length: 3, value: extras.spd.toString() + " km/h" }
|
||||
);
|
||||
}
|
||||
|
||||
// remove course/speed token from comment and advance ext for further parsing
|
||||
ext = ext.substring(7).trimStart();
|
||||
|
||||
// If there is an 8-byte DF/NRQ following (leading '/'), parse that too
|
||||
if (ext.length >= 8 && ext.charAt(0) === "/") {
|
||||
const dfExt = ext.substring(0, 8); // e.g. /270/729
|
||||
const m = dfExt.match(/\/(\d{3})\/(\d{3})/);
|
||||
if (m) {
|
||||
const dfBearing = parseInt(m[1], 10);
|
||||
const dfStrength = parseInt(m[2], 10);
|
||||
if (extras.dfs === undefined) {
|
||||
extras.dfs = {};
|
||||
}
|
||||
extras.dfs.bearing = dfBearing;
|
||||
extras.dfs.strength = dfStrength;
|
||||
|
||||
if (withStructure) {
|
||||
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
|
||||
{ type: FieldType.STRING, name: "DF marker", length: 1, value: "/" },
|
||||
{ type: FieldType.STRING, name: "bearing", length: 3, value: dfBearing.toString() + "°" },
|
||||
{ type: FieldType.CHAR, name: "separator", length: 1, value: "/" },
|
||||
{ type: FieldType.STRING, name: "strength", length: 3, value: dfStrength.toString() }
|
||||
);
|
||||
}
|
||||
|
||||
// remove DF token from ext and advance ext for further parsing
|
||||
if (commentBefore !== undefined && commentBefore.length > 0) {
|
||||
commentBefore = commentBefore.substring(8);
|
||||
} else {
|
||||
commentOffset += 8;
|
||||
}
|
||||
ext = ext.substring(8).trimStart();
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// No recognized 7+-byte extension at start
|
||||
break;
|
||||
}
|
||||
|
||||
// Parse embedded telemetry in comment. Look for |ss11|, |ss1122|, |ss112233|, |ss1122334455|, or |ss1122334455!"| patterns (where ss is sequence and each pair of digits is an analog channel in base91, and optional last pair is digital channel in base91).
|
||||
if ((match = comment.match(/\|([a-z0-9]{4,14})\|/i))) {
|
||||
try {
|
||||
const telemetry = decodeTelemetry(match[1]);
|
||||
extras.telemetry = telemetry;
|
||||
if (withStructure) {
|
||||
fields.push(
|
||||
{
|
||||
type: FieldType.CHAR,
|
||||
name: "telemetry start",
|
||||
length: 1,
|
||||
value: "|"
|
||||
},
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "sequence",
|
||||
length: 2,
|
||||
value: telemetry.sequence.toString()
|
||||
},
|
||||
...telemetry.analog.map((a, i) => ({
|
||||
type: FieldType.STRING,
|
||||
name: `analog${i + 1}`,
|
||||
length: 2,
|
||||
value: a.toString()
|
||||
})),
|
||||
...(telemetry.digital !== undefined
|
||||
? [
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "digital",
|
||||
length: 2,
|
||||
value: telemetry.digital.toString()
|
||||
}
|
||||
]
|
||||
: []),
|
||||
{
|
||||
type: FieldType.CHAR,
|
||||
name: "telemetry end",
|
||||
length: 1,
|
||||
value: "|"
|
||||
}
|
||||
);
|
||||
}
|
||||
} catch {
|
||||
// Invalid telemetry format, ignore
|
||||
}
|
||||
}
|
||||
|
||||
// Export comment with extras fields removed, if any were parsed.
|
||||
if (commentOffset > 0 && commentBefore !== undefined && commentBefore.length > 0) {
|
||||
extras.comment = commentBefore.substring(commentOffset) + comment;
|
||||
} else if (commentBefore !== undefined && commentBefore.length > 0) {
|
||||
extras.comment = commentBefore + comment;
|
||||
} else {
|
||||
extras.comment = comment.substring(commentOffset);
|
||||
}
|
||||
|
||||
if (withStructure) {
|
||||
const commentBeforeFields: Field[] = commentBefore
|
||||
? [
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "comment",
|
||||
length: commentBefore.length
|
||||
}
|
||||
]
|
||||
: [];
|
||||
|
||||
const commentFields: Field[] = comment
|
||||
? [
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "comment",
|
||||
length: comment.length
|
||||
}
|
||||
]
|
||||
: [];
|
||||
|
||||
// Insert the altitude fields at the correct position in the comment section based on where the altitude token was located in the original comment. If there was no altitude token, put all fields at the start of the comment section.
|
||||
extras.fields = [...beforeFields, ...commentBeforeFields, ...altitudeFields, ...fields, ...commentFields];
|
||||
}
|
||||
|
||||
return extras as Extras;
|
||||
};
|
||||
|
||||
export const attachExtras = (payload: Payload, extras: Extras): void => {
|
||||
if ("position" in payload && payload.position) {
|
||||
if (extras.altitude !== undefined) {
|
||||
payload.position.altitude = extras.altitude;
|
||||
}
|
||||
if (extras.range !== undefined) {
|
||||
payload.position.range = extras.range;
|
||||
}
|
||||
if (extras.phg !== undefined) {
|
||||
payload.position.phg = extras.phg;
|
||||
}
|
||||
if (extras.dfs !== undefined) {
|
||||
payload.position.dfs = extras.dfs;
|
||||
}
|
||||
if (extras.cse !== undefined && payload.position.course === undefined) {
|
||||
payload.position.course = extras.cse;
|
||||
}
|
||||
if (extras.spd !== undefined && payload.position.speed === undefined) {
|
||||
payload.position.speed = extras.spd;
|
||||
}
|
||||
}
|
||||
if ("altitude" in payload && payload.altitude === undefined && extras.altitude !== undefined) {
|
||||
payload.altitude = extras.altitude;
|
||||
}
|
||||
if ("range" in payload && payload.range === undefined && extras.range !== undefined) {
|
||||
payload.range = extras.range;
|
||||
}
|
||||
if ("phg" in payload && payload.phg === undefined && extras.phg !== undefined) {
|
||||
payload.phg = extras.phg;
|
||||
}
|
||||
if ("dfs" in payload && payload.dfs === undefined && extras.dfs !== undefined) {
|
||||
payload.dfs = extras.dfs;
|
||||
}
|
||||
if ("course" in payload && payload.course === undefined && extras.cse !== undefined) {
|
||||
payload.course = extras.cse;
|
||||
}
|
||||
if ("speed" in payload && payload.speed === undefined && extras.spd !== undefined) {
|
||||
payload.speed = extras.spd;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Decodes a Base91 Telemetry extension string (delimited by '|') into its components.
|
||||
*
|
||||
* @param ext The string between the '|' delimiters (e.g. 'ss11', 'ss112233', 'ss1122334455!"')
|
||||
* @returns An object with sequence, analog (array), and optional digital (number)
|
||||
*/
|
||||
export const decodeTelemetry = (ext: string): ITelemetry => {
|
||||
if (!ext || ext.length < 4) throw new Error("Telemetry extension too short");
|
||||
// Must be even length, at least 4 (2 for seq, 2 for ch1)
|
||||
if (ext.length % 2 !== 0) throw new Error("Telemetry extension must have even length");
|
||||
|
||||
// Sequence counter is always first 2 chars
|
||||
const sequence = base91ToNumber(ext.slice(0, 2));
|
||||
const analog: number[] = [];
|
||||
let i = 2;
|
||||
// If there are more than 12 chars, last pair is digital
|
||||
let digital: number | undefined = undefined;
|
||||
const analogPairs = Math.min(Math.floor((ext.length - 2) / 2), 5);
|
||||
for (let j = 0; j < analogPairs; j++, i += 2) {
|
||||
analog.push(base91ToNumber(ext.slice(i, i + 2)));
|
||||
}
|
||||
// If there are 2 chars left after 5 analogs, it's digital
|
||||
if (ext.length === 14) {
|
||||
digital = base91ToNumber(ext.slice(12, 14));
|
||||
}
|
||||
return {
|
||||
sequence,
|
||||
analog,
|
||||
digital
|
||||
};
|
||||
};
|
||||
149
src/payload.item.ts
Normal file
149
src/payload.item.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
import { FieldType, type Segment } from "@hamradio/packet";
|
||||
|
||||
import { DO_NOT_ARCHIVE_MARKER, DataType, type IPosition, type ItemPayload, type Payload } from "./frame.types";
|
||||
import { attachExtras, decodeCommentExtras } from "./payload.extras";
|
||||
import { isCompressedPosition, parseCompressedPosition, parseUncompressedPosition } from "./payload.position";
|
||||
import Timestamp from "./timestamp";
|
||||
|
||||
export const decodeItemPayload = (
|
||||
raw: string,
|
||||
withStructure: boolean = false
|
||||
): {
|
||||
payload: Payload | null;
|
||||
segment?: Segment[];
|
||||
} => {
|
||||
// Item format is similar to Object but name may be 3-9 chars (stored in a 9-char field)
|
||||
// Example: )NNN... where ) is data type, next 9 chars are name, then state char, then timestamp, then position
|
||||
if (raw.length < 12) return { payload: null }; // minimal: 1 + 3 + 1 + 7
|
||||
|
||||
let offset = 1; // skip data type identifier ')'
|
||||
const segment: Segment[] = withStructure ? [] : [];
|
||||
|
||||
// Read 9-char name field (pad/truncate as present)
|
||||
const rawName = raw.substring(offset, offset + 9);
|
||||
const name = rawName.trimEnd();
|
||||
if (withStructure) {
|
||||
segment.push({
|
||||
name: "item name",
|
||||
data: new TextEncoder().encode(rawName).buffer,
|
||||
isString: true,
|
||||
fields: [{ type: FieldType.STRING, name: "name", length: 9 }]
|
||||
});
|
||||
}
|
||||
offset += 9;
|
||||
|
||||
// State character: '*' = alive, '_' = killed
|
||||
const stateChar = raw.charAt(offset);
|
||||
if (stateChar !== "*" && stateChar !== "_") {
|
||||
return { payload: null };
|
||||
}
|
||||
const alive = stateChar === "*";
|
||||
if (withStructure) {
|
||||
segment.push({
|
||||
name: "item state",
|
||||
data: new TextEncoder().encode(stateChar).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{
|
||||
type: FieldType.CHAR,
|
||||
name: "State (* alive, _ killed)",
|
||||
length: 1
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
offset += 1;
|
||||
|
||||
// Timestamp (7 chars)
|
||||
const timeStr = raw.substring(offset, offset + 7);
|
||||
const { timestamp, segment: timestampSection } = Timestamp.fromString(timeStr.substring(offset), withStructure);
|
||||
if (!timestamp) return { payload: null };
|
||||
if (timestampSection) segment.push(timestampSection);
|
||||
offset += 7;
|
||||
|
||||
const isCompressed = isCompressedPosition(raw.substring(offset));
|
||||
|
||||
// eslint-disable-next-line no-useless-assignment
|
||||
let position: IPosition | null = null;
|
||||
// eslint-disable-next-line no-useless-assignment
|
||||
let consumed = 0;
|
||||
|
||||
if (isCompressed) {
|
||||
const { position: compressed, segment: compressedSection } = parseCompressedPosition(
|
||||
raw.substring(offset),
|
||||
withStructure
|
||||
);
|
||||
if (!compressed) return { payload: null };
|
||||
|
||||
position = {
|
||||
latitude: compressed.latitude,
|
||||
longitude: compressed.longitude,
|
||||
symbol: compressed.symbol,
|
||||
altitude: compressed.altitude
|
||||
};
|
||||
consumed = 13;
|
||||
|
||||
if (compressedSection) segment.push(compressedSection);
|
||||
} else {
|
||||
const { position: uncompressed, segment: uncompressedSection } = parseUncompressedPosition(
|
||||
raw.substring(offset),
|
||||
withStructure
|
||||
);
|
||||
if (!uncompressed) return { payload: null };
|
||||
|
||||
position = {
|
||||
latitude: uncompressed.latitude,
|
||||
longitude: uncompressed.longitude,
|
||||
symbol: uncompressed.symbol,
|
||||
ambiguity: uncompressed.ambiguity
|
||||
};
|
||||
consumed = 19;
|
||||
|
||||
if (uncompressedSection) segment.push(uncompressedSection);
|
||||
}
|
||||
|
||||
offset += consumed;
|
||||
const remainder = raw.substring(offset);
|
||||
const doNotArchive = remainder.includes(DO_NOT_ARCHIVE_MARKER);
|
||||
let comment = remainder;
|
||||
|
||||
const extras = decodeCommentExtras(comment, withStructure);
|
||||
comment = extras.comment;
|
||||
|
||||
if (comment) {
|
||||
position.comment = comment;
|
||||
if (withStructure) {
|
||||
segment.push({
|
||||
name: "comment",
|
||||
data: new TextEncoder().encode(remainder).buffer,
|
||||
isString: true,
|
||||
fields: extras.fields || []
|
||||
});
|
||||
}
|
||||
} else if (withStructure && extras.fields) {
|
||||
// No free-text comment, but extras fields exist: emit comment-only segment
|
||||
segment.push({
|
||||
name: "comment",
|
||||
data: new TextEncoder().encode(remainder).buffer,
|
||||
isString: true,
|
||||
fields: extras.fields || []
|
||||
});
|
||||
}
|
||||
|
||||
const payload: ItemPayload = {
|
||||
type: DataType.Item,
|
||||
doNotArchive,
|
||||
name,
|
||||
alive,
|
||||
position
|
||||
};
|
||||
attachExtras(payload, extras);
|
||||
|
||||
if (withStructure) {
|
||||
return { payload, segment };
|
||||
}
|
||||
|
||||
return { payload };
|
||||
};
|
||||
|
||||
export default decodeItemPayload;
|
||||
94
src/payload.message.ts
Normal file
94
src/payload.message.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { FieldType, type Segment } from "@hamradio/packet";
|
||||
|
||||
import { DO_NOT_ARCHIVE_MARKER, DataType, type MessagePayload, type Payload } from "./frame.types";
|
||||
|
||||
export const decodeMessagePayload = (
|
||||
rawPayload: string,
|
||||
withStructure: boolean = false
|
||||
): {
|
||||
payload: Payload | null;
|
||||
segment?: Segment[];
|
||||
} => {
|
||||
// Message format: :AAAAAAAAA[ ]:message text
|
||||
// where AAAAAAAAA is a 9-character recipient field (padded with spaces)
|
||||
if (rawPayload.length < 2) return { payload: null };
|
||||
|
||||
let offset = 1; // skip ':' data type
|
||||
const segments: Segment[] = withStructure ? [] : [];
|
||||
|
||||
// Attempt to read a 9-char recipient field if present
|
||||
let recipient = "";
|
||||
if (rawPayload.length >= offset + 1) {
|
||||
// Try to read up to 9 chars for recipient, but stop early if a ':' separator appears
|
||||
const look = rawPayload.substring(offset, Math.min(offset + 9, rawPayload.length));
|
||||
const sepIdx = look.indexOf(":");
|
||||
let raw = look;
|
||||
if (sepIdx !== -1) {
|
||||
raw = look.substring(0, sepIdx);
|
||||
} else if (look.length < 9 && rawPayload.length >= offset + 9) {
|
||||
// pad to full 9 chars if possible
|
||||
raw = rawPayload.substring(offset, offset + 9);
|
||||
} else if (look.length === 9) {
|
||||
raw = look;
|
||||
}
|
||||
|
||||
recipient = raw.trimEnd();
|
||||
if (withStructure) {
|
||||
segments.push({
|
||||
name: "recipient",
|
||||
data: new TextEncoder().encode(raw).buffer,
|
||||
isString: true,
|
||||
fields: [{ type: FieldType.STRING, name: "to", length: 9 }]
|
||||
});
|
||||
}
|
||||
|
||||
// Advance offset past the raw we consumed
|
||||
offset += raw.length;
|
||||
// If there was a ':' immediately after the consumed raw, skip it as separator
|
||||
if (rawPayload.charAt(offset) === ":") {
|
||||
offset += 1;
|
||||
} else if (sepIdx !== -1) {
|
||||
// Shouldn't normally happen, but ensure we advance past separator
|
||||
offset += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// After recipient there is typically a space and a colon separator before the text
|
||||
// Find the first ':' after the recipient (it separates the address field from the text)
|
||||
let textStart = rawPayload.indexOf(":", offset);
|
||||
if (textStart === -1) {
|
||||
// No explicit separator; skip any spaces and take remainder as text
|
||||
while (rawPayload.charAt(offset) === " " && offset < rawPayload.length) offset += 1;
|
||||
textStart = offset - 1;
|
||||
}
|
||||
|
||||
let text = "";
|
||||
if (textStart >= 0 && textStart + 1 <= rawPayload.length) {
|
||||
text = rawPayload.substring(textStart + 1);
|
||||
}
|
||||
const doNotArchive = text.includes(DO_NOT_ARCHIVE_MARKER);
|
||||
|
||||
const payload: MessagePayload = {
|
||||
type: DataType.Message,
|
||||
variant: "message",
|
||||
doNotArchive,
|
||||
addressee: recipient,
|
||||
text
|
||||
};
|
||||
|
||||
if (withStructure) {
|
||||
// Emit text section
|
||||
segments.push({
|
||||
name: "text",
|
||||
data: new TextEncoder().encode(text).buffer,
|
||||
isString: true,
|
||||
fields: [{ type: FieldType.STRING, name: "text", length: text.length }]
|
||||
});
|
||||
|
||||
return { payload, segment: segments };
|
||||
}
|
||||
|
||||
return { payload };
|
||||
};
|
||||
|
||||
export default decodeMessagePayload;
|
||||
300
src/payload.mice.ts
Normal file
300
src/payload.mice.ts
Normal file
@@ -0,0 +1,300 @@
|
||||
import { FieldType, type Segment } from "@hamradio/packet";
|
||||
|
||||
import { base91ToNumber, knotsToKmh } from ".";
|
||||
import { DO_NOT_ARCHIVE_MARKER, DataType, type IAddress, MicEPayload, type Payload } from "./frame.types";
|
||||
import { attachExtras, decodeCommentExtras } from "./payload.extras";
|
||||
|
||||
export const decodeMicEPayload = (
|
||||
destination: IAddress,
|
||||
raw: string,
|
||||
withStructure: boolean = false
|
||||
): {
|
||||
payload: Payload | null;
|
||||
segment?: Segment[];
|
||||
} => {
|
||||
try {
|
||||
// Mic-E encodes position in both destination address and information field
|
||||
const dest = destination.call;
|
||||
|
||||
if (dest.length < 6) return { payload: null };
|
||||
if (raw.length < 9) return { payload: null }; // Need at least data type + 8 bytes
|
||||
|
||||
const segments: Segment[] = withStructure ? [] : [];
|
||||
|
||||
// Decode latitude from destination address (6 characters)
|
||||
const latResult = decodeMicELatitude(dest);
|
||||
if (!latResult) return { payload: null };
|
||||
|
||||
const { latitude, messageType, longitudeOffset, isWest, isStandard } = latResult;
|
||||
|
||||
if (withStructure) {
|
||||
segments.push({
|
||||
name: "mic-E destination",
|
||||
data: new TextEncoder().encode(dest).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "destination",
|
||||
length: dest.length
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
// Parse information field (skip data type identifier at position 0)
|
||||
let offset = 1;
|
||||
|
||||
// Longitude: 3 bytes (degrees, minutes, hundredths)
|
||||
const lonDegRaw = raw.charCodeAt(offset) - 28;
|
||||
const lonMinRaw = raw.charCodeAt(offset + 1) - 28;
|
||||
const lonHunRaw = raw.charCodeAt(offset + 2) - 28;
|
||||
offset += 3;
|
||||
|
||||
// Apply longitude offset and hemisphere
|
||||
let lonDeg = lonDegRaw;
|
||||
if (longitudeOffset) {
|
||||
lonDeg += 100;
|
||||
}
|
||||
if (lonDeg >= 180 && lonDeg <= 189) {
|
||||
lonDeg -= 80;
|
||||
} else if (lonDeg >= 190 && lonDeg <= 199) {
|
||||
lonDeg -= 190;
|
||||
}
|
||||
|
||||
let longitude = lonDeg + lonMinRaw / 60.0 + lonHunRaw / 6000.0;
|
||||
if (isWest) {
|
||||
longitude = -longitude;
|
||||
}
|
||||
|
||||
// Speed and course: 3 bytes
|
||||
const sp = raw.charCodeAt(offset) - 28;
|
||||
const dc = raw.charCodeAt(offset + 1) - 28;
|
||||
const se = raw.charCodeAt(offset + 2) - 28;
|
||||
offset += 3;
|
||||
|
||||
let speed = sp * 10 + Math.floor(dc / 10); // Speed in knots
|
||||
let course = (dc % 10) * 100 + se; // Course in degrees
|
||||
|
||||
if (course >= 400) course -= 400;
|
||||
if (speed >= 800) speed -= 800;
|
||||
|
||||
// Convert speed from knots to km/h
|
||||
const speedKmh = knotsToKmh(speed);
|
||||
|
||||
// Symbol code and table
|
||||
if (raw.length < offset + 2) return { payload: null };
|
||||
const symbolCode = raw.charAt(offset);
|
||||
const symbolTable = raw.charAt(offset + 1);
|
||||
offset += 2;
|
||||
|
||||
// Parse remaining data (altitude, comment, telemetry)
|
||||
const remaining = raw.substring(offset);
|
||||
const doNotArchive = remaining.includes(DO_NOT_ARCHIVE_MARKER);
|
||||
let altitude: number | undefined = undefined;
|
||||
let comment = remaining;
|
||||
|
||||
// Check for altitude in old format
|
||||
if (comment.length >= 4 && comment.charAt(3) === "}") {
|
||||
try {
|
||||
const altBase91 = comment.substring(0, 3);
|
||||
altitude = base91ToNumber(altBase91) - 10000; // Relative to 10km below mean sea level
|
||||
comment = comment.substring(4); // Remove altitude token from comment
|
||||
} catch {
|
||||
// Ignore altitude parsing errors
|
||||
}
|
||||
}
|
||||
|
||||
// Parse RNG/PHG tokens from comment (defer attaching to result until created)
|
||||
const remainder = comment; // Use the remaining comment text for parsing extras
|
||||
const extras = decodeCommentExtras(remainder, withStructure);
|
||||
comment = extras.comment;
|
||||
|
||||
let payloadType: DataType.MicE | DataType.MicEOld;
|
||||
switch (raw.charAt(0)) {
|
||||
case "`":
|
||||
payloadType = DataType.MicE;
|
||||
break;
|
||||
case "'":
|
||||
payloadType = DataType.MicEOld;
|
||||
break;
|
||||
default:
|
||||
return { payload: null };
|
||||
}
|
||||
|
||||
const result: MicEPayload = {
|
||||
type: payloadType,
|
||||
doNotArchive,
|
||||
position: {
|
||||
latitude,
|
||||
longitude,
|
||||
symbol: {
|
||||
table: symbolTable,
|
||||
code: symbolCode
|
||||
}
|
||||
},
|
||||
messageType,
|
||||
isStandard
|
||||
};
|
||||
|
||||
if (speed > 0) {
|
||||
result.position.speed = speedKmh;
|
||||
}
|
||||
|
||||
if (course > 0 && course < 360) {
|
||||
result.position.course = course;
|
||||
}
|
||||
|
||||
if (altitude !== undefined) {
|
||||
result.position.altitude = altitude;
|
||||
}
|
||||
|
||||
if (comment) {
|
||||
result.position.comment = comment;
|
||||
}
|
||||
|
||||
// Attach parsed extras if present
|
||||
attachExtras(result, extras);
|
||||
|
||||
if (withStructure) {
|
||||
// Information field section (bytes after data type up to comment)
|
||||
const infoData = raw.substring(1, offset);
|
||||
segments.push({
|
||||
name: "mic-E info",
|
||||
data: new TextEncoder().encode(infoData).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{ type: FieldType.CHAR, name: "longitude deg", length: 1 },
|
||||
{ type: FieldType.CHAR, name: "longitude min", length: 1 },
|
||||
{ type: FieldType.CHAR, name: "longitude hundredths", length: 1 },
|
||||
{ type: FieldType.CHAR, name: "speed byte", length: 1 },
|
||||
{ type: FieldType.CHAR, name: "course byte 1", length: 1 },
|
||||
{ type: FieldType.CHAR, name: "course byte 2", length: 1 },
|
||||
{ type: FieldType.CHAR, name: "symbol code", length: 1 },
|
||||
{ type: FieldType.CHAR, name: "symbol table", length: 1 }
|
||||
]
|
||||
});
|
||||
|
||||
if (comment && comment.length > 0) {
|
||||
segments.push({
|
||||
name: "comment",
|
||||
data: new TextEncoder().encode(remainder).buffer,
|
||||
isString: true,
|
||||
fields: extras.fields || []
|
||||
});
|
||||
} else if (extras.fields) {
|
||||
segments.push({
|
||||
name: "comment",
|
||||
data: new TextEncoder().encode(remainder).buffer,
|
||||
isString: true,
|
||||
fields: extras.fields
|
||||
});
|
||||
}
|
||||
|
||||
return { payload: result, segment: segments };
|
||||
}
|
||||
|
||||
return { payload: result };
|
||||
} catch {
|
||||
return { payload: null };
|
||||
}
|
||||
};
|
||||
|
||||
const decodeMicELatitude = (
|
||||
dest: string
|
||||
): {
|
||||
latitude: number;
|
||||
messageType: string;
|
||||
longitudeOffset: boolean;
|
||||
isWest: boolean;
|
||||
isStandard: boolean;
|
||||
} | null => {
|
||||
if (dest.length < 6) return null;
|
||||
|
||||
// Each destination character encodes a latitude digit and message bits
|
||||
const digits: number[] = [];
|
||||
const messageBits: number[] = [];
|
||||
|
||||
for (let i = 0; i < 6; i++) {
|
||||
const code = dest.charCodeAt(i);
|
||||
let digit: number;
|
||||
let msgBit: number;
|
||||
|
||||
if (code >= 48 && code <= 57) {
|
||||
// '0'-'9'
|
||||
digit = code - 48;
|
||||
msgBit = 0;
|
||||
} else if (code >= 65 && code <= 74) {
|
||||
// 'A'-'J' (A=0, B=1, ... J=9)
|
||||
digit = code - 65;
|
||||
msgBit = 1;
|
||||
} else if (code === 75) {
|
||||
// 'K' means space (used for ambiguity)
|
||||
digit = 0;
|
||||
msgBit = 1;
|
||||
} else if (code === 76) {
|
||||
// 'L' means space
|
||||
digit = 0;
|
||||
msgBit = 0;
|
||||
} else if (code >= 80 && code <= 89) {
|
||||
// 'P'-'Y' custom message types (P=0, Q=1, R=2, ... Y=9)
|
||||
digit = code - 80;
|
||||
msgBit = 1;
|
||||
} else if (code === 90) {
|
||||
// 'Z' means space
|
||||
digit = 0;
|
||||
msgBit = 1;
|
||||
} else {
|
||||
return null; // Invalid character
|
||||
}
|
||||
|
||||
digits.push(digit);
|
||||
messageBits.push(msgBit);
|
||||
}
|
||||
|
||||
// Decode latitude: format is DDMM.HH (degrees, minutes, hundredths)
|
||||
const latDeg = digits[0] * 10 + digits[1];
|
||||
const latMin = digits[2] * 10 + digits[3];
|
||||
const latHun = digits[4] * 10 + digits[5];
|
||||
|
||||
let latitude = latDeg + latMin / 60.0 + latHun / 6000.0;
|
||||
|
||||
// Message bits determine hemisphere and other flags
|
||||
// Bit 3 (messageBits[3]): 0 = North, 1 = South
|
||||
// Bit 4 (messageBits[4]): 0 = West, 1 = East
|
||||
// Bit 5 (messageBits[5]): 0 = longitude offset +0, 1 = longitude offset +100
|
||||
const isNorth = messageBits[3] === 0;
|
||||
const isWest = messageBits[4] === 0;
|
||||
const longitudeOffset = messageBits[5] === 1;
|
||||
|
||||
if (!isNorth) {
|
||||
latitude = -latitude;
|
||||
}
|
||||
|
||||
// Decode message type from bits 0, 1, 2
|
||||
const msgValue = messageBits[0] * 4 + messageBits[1] * 2 + messageBits[2];
|
||||
const messageTypes = [
|
||||
"M0: Off Duty",
|
||||
"M1: En Route",
|
||||
"M2: In Service",
|
||||
"M3: Returning",
|
||||
"M4: Committed",
|
||||
"M5: Special",
|
||||
"M6: Priority",
|
||||
"M7: Emergency"
|
||||
];
|
||||
const messageType = messageTypes[msgValue] || "Unknown";
|
||||
|
||||
// Standard vs custom message indicator
|
||||
const isStandard = messageBits[0] === 1;
|
||||
|
||||
return {
|
||||
latitude,
|
||||
messageType,
|
||||
longitudeOffset,
|
||||
isWest,
|
||||
isStandard
|
||||
};
|
||||
};
|
||||
|
||||
export default decodeMicEPayload;
|
||||
161
src/payload.object.ts
Normal file
161
src/payload.object.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
import { FieldType, Segment } from "@hamradio/packet";
|
||||
|
||||
import { DO_NOT_ARCHIVE_MARKER, DataType, type IPosition, ObjectPayload, type Payload } from "./frame.types";
|
||||
import { attachExtras, decodeCommentExtras } from "./payload.extras";
|
||||
import { isCompressedPosition, parseCompressedPosition, parseUncompressedPosition } from "./payload.position";
|
||||
import Timestamp from "./timestamp";
|
||||
|
||||
export const decodeObjectPayload = (
|
||||
raw: string,
|
||||
withStructure: boolean = false
|
||||
): {
|
||||
payload: Payload | null;
|
||||
segment?: Segment[];
|
||||
} => {
|
||||
try {
|
||||
// Object format: ;AAAAAAAAAcDDHHMMzDDMM.hhN/DDDMM.hhW$comment
|
||||
// ^ data type
|
||||
// 9-char name
|
||||
// alive (*) / killed (_)
|
||||
if (raw.length < 18) return { payload: null }; // 1 + 9 + 1 + 7 minimum
|
||||
|
||||
let offset = 1; // Skip data type identifier ';'
|
||||
const segment: Segment[] = withStructure ? [] : [];
|
||||
|
||||
const rawName = raw.substring(offset, offset + 9);
|
||||
const name = rawName.trimEnd();
|
||||
if (withStructure) {
|
||||
segment.push({
|
||||
name: "object",
|
||||
data: new TextEncoder().encode(rawName).buffer,
|
||||
isString: true,
|
||||
fields: [{ type: FieldType.STRING, name: "name", length: 9 }]
|
||||
});
|
||||
}
|
||||
offset += 9;
|
||||
|
||||
const stateChar = raw.charAt(offset);
|
||||
if (stateChar !== "*" && stateChar !== "_") {
|
||||
return { payload: null };
|
||||
}
|
||||
const alive = stateChar === "*";
|
||||
if (withStructure) {
|
||||
let state: string = "invalid";
|
||||
if (stateChar === "*") {
|
||||
state = "alive";
|
||||
} else if (stateChar === "_") {
|
||||
state = "killed";
|
||||
}
|
||||
segment[segment.length - 1].data = new TextEncoder().encode(raw.substring(offset - 9, offset + 1)).buffer;
|
||||
segment[segment.length - 1].fields.push({
|
||||
type: FieldType.CHAR,
|
||||
name: "state",
|
||||
length: 1,
|
||||
value: state
|
||||
});
|
||||
}
|
||||
offset += 1;
|
||||
|
||||
const timeStr = raw.substring(offset, offset + 7);
|
||||
const { timestamp, segment: timestampSection } = Timestamp.fromString(timeStr, withStructure);
|
||||
if (!timestamp) {
|
||||
return { payload: null };
|
||||
}
|
||||
if (timestampSection) {
|
||||
segment.push(timestampSection);
|
||||
}
|
||||
offset += 7;
|
||||
|
||||
const isCompressed = isCompressedPosition(raw.substring(offset));
|
||||
|
||||
let position: IPosition | null = null;
|
||||
let consumed = 0;
|
||||
|
||||
if (isCompressed) {
|
||||
const { position: compressed, segment: compressedSection } = parseCompressedPosition(
|
||||
raw.substring(offset),
|
||||
withStructure
|
||||
);
|
||||
if (!compressed) return { payload: null };
|
||||
|
||||
position = {
|
||||
latitude: compressed.latitude,
|
||||
longitude: compressed.longitude,
|
||||
symbol: compressed.symbol,
|
||||
altitude: compressed.altitude
|
||||
};
|
||||
consumed = 13;
|
||||
|
||||
if (compressedSection) {
|
||||
segment.push(compressedSection);
|
||||
}
|
||||
} else {
|
||||
const { position: uncompressed, segment: uncompressedSection } = parseUncompressedPosition(
|
||||
raw.substring(offset),
|
||||
withStructure
|
||||
);
|
||||
if (!uncompressed) return { payload: null };
|
||||
|
||||
position = {
|
||||
latitude: uncompressed.latitude,
|
||||
longitude: uncompressed.longitude,
|
||||
symbol: uncompressed.symbol,
|
||||
ambiguity: uncompressed.ambiguity
|
||||
};
|
||||
consumed = 19;
|
||||
|
||||
if (uncompressedSection) {
|
||||
segment.push(uncompressedSection);
|
||||
}
|
||||
}
|
||||
|
||||
offset += consumed;
|
||||
const remainder = raw.substring(offset);
|
||||
const doNotArchive = remainder.includes(DO_NOT_ARCHIVE_MARKER);
|
||||
let comment = remainder;
|
||||
|
||||
// Parse RNG/PHG tokens
|
||||
const extras = decodeCommentExtras(comment, withStructure);
|
||||
comment = extras.comment;
|
||||
|
||||
if (comment) {
|
||||
position.comment = comment;
|
||||
|
||||
if (withStructure) {
|
||||
segment.push({
|
||||
name: "comment",
|
||||
data: new TextEncoder().encode(remainder).buffer,
|
||||
isString: true,
|
||||
fields: extras.fields || []
|
||||
});
|
||||
}
|
||||
} else if (withStructure && extras.fields) {
|
||||
segment.push({
|
||||
name: "comment",
|
||||
data: new TextEncoder().encode(remainder).buffer,
|
||||
isString: true,
|
||||
fields: extras.fields || []
|
||||
});
|
||||
}
|
||||
|
||||
const payload: ObjectPayload = {
|
||||
type: DataType.Object,
|
||||
doNotArchive,
|
||||
name,
|
||||
timestamp,
|
||||
alive,
|
||||
position
|
||||
};
|
||||
attachExtras(payload, extras);
|
||||
|
||||
if (withStructure) {
|
||||
return { payload, segment };
|
||||
}
|
||||
|
||||
return { payload };
|
||||
} catch {
|
||||
return { payload: null };
|
||||
}
|
||||
};
|
||||
|
||||
export default decodeObjectPayload;
|
||||
344
src/payload.position.ts
Normal file
344
src/payload.position.ts
Normal file
@@ -0,0 +1,344 @@
|
||||
import { FieldType, type Segment } from "@hamradio/packet";
|
||||
|
||||
import { DO_NOT_ARCHIVE_MARKER, DataType, type IPosition, type Payload, type PositionPayload } from "./frame.types";
|
||||
import { base91ToNumber, feetToMeters } from "./parser";
|
||||
import { attachExtras, decodeCommentExtras } from "./payload.extras";
|
||||
import Position from "./position";
|
||||
import Timestamp from "./timestamp";
|
||||
|
||||
export const decodePositionPayload = (
|
||||
dataType: string,
|
||||
raw: string,
|
||||
withStructure: boolean = false
|
||||
): { payload: Payload | null; segment?: Segment[] } => {
|
||||
try {
|
||||
const hasTimestamp = dataType === "/" || dataType === "@";
|
||||
const messaging = dataType === "=" || dataType === "@";
|
||||
let offset = 1; // Skip data type identifier
|
||||
|
||||
// Build structure as we parse
|
||||
const structure: Segment[] = withStructure ? [] : [];
|
||||
|
||||
let timestamp: Timestamp | undefined = undefined;
|
||||
|
||||
// Parse timestamp if present (7 characters: DDHHMMz or HHMMSSh or MMDDHMMM)
|
||||
if (hasTimestamp) {
|
||||
if (raw.length < 8) return { payload: null };
|
||||
const timeStr = raw.substring(offset, offset + 7);
|
||||
const { timestamp: parsedTimestamp, segment: timestampSegment } = Timestamp.fromString(timeStr, withStructure);
|
||||
timestamp = parsedTimestamp;
|
||||
|
||||
if (timestampSegment) {
|
||||
structure.push(timestampSegment);
|
||||
}
|
||||
|
||||
offset += 7;
|
||||
}
|
||||
|
||||
// Need at least enough characters for compressed position (13) or
|
||||
// uncompressed (19). Allow parsing to continue if compressed-length is present.
|
||||
if (raw.length < offset + 13) return { payload: null };
|
||||
|
||||
// Check if compressed format
|
||||
const isCompressed = isCompressedPosition(raw.substring(offset));
|
||||
|
||||
let position: Position;
|
||||
let comment = "";
|
||||
|
||||
if (isCompressed) {
|
||||
// Compressed format: /YYYYXXXX$csT
|
||||
const { position: compressed, segment: compressedSegment } = parseCompressedPosition(
|
||||
raw.substring(offset),
|
||||
withStructure
|
||||
);
|
||||
if (!compressed) return { payload: null };
|
||||
|
||||
position = new Position({
|
||||
latitude: compressed.latitude,
|
||||
longitude: compressed.longitude,
|
||||
symbol: compressed.symbol
|
||||
});
|
||||
|
||||
if (compressed.altitude !== undefined) {
|
||||
position.altitude = compressed.altitude;
|
||||
}
|
||||
|
||||
if (compressedSegment) {
|
||||
structure.push(compressedSegment);
|
||||
}
|
||||
|
||||
offset += 13; // Compressed position is 13 chars
|
||||
comment = raw.substring(offset);
|
||||
} else {
|
||||
// Uncompressed format: DDMMmmH/DDDMMmmH$
|
||||
const { position: uncompressed, segment: uncompressedSegment } = parseUncompressedPosition(
|
||||
raw.substring(offset),
|
||||
withStructure
|
||||
);
|
||||
if (!uncompressed) return { payload: null };
|
||||
|
||||
position = new Position({
|
||||
latitude: uncompressed.latitude,
|
||||
longitude: uncompressed.longitude,
|
||||
symbol: uncompressed.symbol
|
||||
});
|
||||
|
||||
if (uncompressed.ambiguity !== undefined) {
|
||||
position.ambiguity = uncompressed.ambiguity;
|
||||
}
|
||||
|
||||
if (uncompressedSegment) {
|
||||
structure.push(uncompressedSegment);
|
||||
}
|
||||
|
||||
offset += 19; // Uncompressed position is 19 chars
|
||||
comment = raw.substring(offset);
|
||||
}
|
||||
|
||||
// Extract Altitude, CSE/SPD, RNG and PHG tokens and optionally emit sections
|
||||
const remainder = comment; // Use the remaining comment text for parsing extras
|
||||
const doNotArchive = remainder.includes(DO_NOT_ARCHIVE_MARKER);
|
||||
const extras = decodeCommentExtras(remainder, withStructure);
|
||||
comment = extras.comment;
|
||||
|
||||
if (comment) {
|
||||
position.comment = comment;
|
||||
|
||||
// Emit comment section as we parse
|
||||
if (withStructure) {
|
||||
structure.push({
|
||||
name: "comment",
|
||||
data: new TextEncoder().encode(remainder).buffer,
|
||||
isString: true,
|
||||
fields: extras.fields || []
|
||||
});
|
||||
}
|
||||
} else if (withStructure && extras.fields) {
|
||||
// No free-text comment, but extras were present: emit a comment section containing only fields
|
||||
structure.push({
|
||||
name: "comment",
|
||||
data: new TextEncoder().encode("").buffer,
|
||||
isString: true,
|
||||
fields: extras.fields || []
|
||||
});
|
||||
}
|
||||
|
||||
let payloadType:
|
||||
| DataType.PositionNoTimestampNoMessaging
|
||||
| DataType.PositionNoTimestampWithMessaging
|
||||
| DataType.PositionWithTimestampNoMessaging
|
||||
| DataType.PositionWithTimestampWithMessaging;
|
||||
switch (dataType) {
|
||||
case "!":
|
||||
payloadType = DataType.PositionNoTimestampNoMessaging;
|
||||
break;
|
||||
case "=":
|
||||
payloadType = DataType.PositionNoTimestampWithMessaging;
|
||||
break;
|
||||
case "/":
|
||||
payloadType = DataType.PositionWithTimestampNoMessaging;
|
||||
break;
|
||||
case "@":
|
||||
payloadType = DataType.PositionWithTimestampWithMessaging;
|
||||
break;
|
||||
default:
|
||||
return { payload: null };
|
||||
}
|
||||
|
||||
const payload: PositionPayload = {
|
||||
type: payloadType,
|
||||
doNotArchive,
|
||||
timestamp,
|
||||
position,
|
||||
messaging
|
||||
};
|
||||
attachExtras(payload, extras);
|
||||
|
||||
if (withStructure) {
|
||||
return { payload, segment: structure };
|
||||
}
|
||||
|
||||
return { payload };
|
||||
} catch {
|
||||
return { payload: null };
|
||||
}
|
||||
};
|
||||
|
||||
export const isCompressedPosition = (data: string): boolean => {
|
||||
if (data.length < 13) return false;
|
||||
|
||||
// First prefer uncompressed detection by attempting an uncompressed parse.
|
||||
// Uncompressed APRS positions do not have a fixed symbol table separator;
|
||||
// position 8 is a symbol table identifier and may vary.
|
||||
if (data.length >= 19) {
|
||||
const uncompressed = parseUncompressedPosition(data, false);
|
||||
if (uncompressed.position) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// For compressed format, check if the position part looks like base-91 encoded data
|
||||
// Compressed format: STYYYYXXXXcsT where ST is symbol table/code
|
||||
// Base-91 chars are in range 33-124 (! to |)
|
||||
const lat1 = data.charCodeAt(1);
|
||||
const lat2 = data.charCodeAt(2);
|
||||
const lon1 = data.charCodeAt(5);
|
||||
const lon2 = data.charCodeAt(6);
|
||||
|
||||
return (
|
||||
lat1 >= 33 && lat1 <= 124 && lat2 >= 33 && lat2 <= 124 && lon1 >= 33 && lon1 <= 124 && lon2 >= 33 && lon2 <= 124
|
||||
);
|
||||
};
|
||||
|
||||
export const parseCompressedPosition = (
|
||||
data: string,
|
||||
withStructure: boolean = false
|
||||
): {
|
||||
position: IPosition | null;
|
||||
segment?: Segment;
|
||||
} => {
|
||||
if (data.length < 13) return { position: null };
|
||||
|
||||
const symbolTable = data.charAt(0);
|
||||
const symbolCode = data.charAt(9);
|
||||
|
||||
// Extract base-91 encoded position (4 characters each)
|
||||
const latStr = data.substring(1, 5);
|
||||
const lonStr = data.substring(5, 9);
|
||||
|
||||
try {
|
||||
// Decode base-91 encoded latitude and longitude
|
||||
const latBase91 = base91ToNumber(latStr);
|
||||
const lonBase91 = base91ToNumber(lonStr);
|
||||
|
||||
// Convert to degrees
|
||||
const latitude = 90 - latBase91 / 380926;
|
||||
const longitude = -180 + lonBase91 / 190463;
|
||||
|
||||
const result: IPosition = {
|
||||
latitude,
|
||||
longitude,
|
||||
symbol: {
|
||||
table: symbolTable,
|
||||
code: symbolCode
|
||||
}
|
||||
};
|
||||
|
||||
// Check for compressed altitude (csT format)
|
||||
const cs = data.charAt(10);
|
||||
const t = data.charCodeAt(11);
|
||||
|
||||
if (cs === " " && t >= 33 && t <= 124) {
|
||||
// Compressed altitude: altitude = 1.002^(t-33) feet
|
||||
const altFeet = Math.pow(1.002, t - 33);
|
||||
result.altitude = feetToMeters(altFeet); // Convert to meters
|
||||
}
|
||||
|
||||
const section: Segment | undefined = withStructure
|
||||
? {
|
||||
name: "position",
|
||||
data: new TextEncoder().encode(data.substring(0, 13)).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{ type: FieldType.CHAR, length: 1, name: "symbol table" },
|
||||
{ type: FieldType.STRING, length: 4, name: "latitude" },
|
||||
{ type: FieldType.STRING, length: 4, name: "longitude" },
|
||||
{ type: FieldType.CHAR, length: 1, name: "symbol code" },
|
||||
{ type: FieldType.CHAR, length: 1, name: "course/speed type" },
|
||||
{ type: FieldType.CHAR, length: 1, name: "course/speed value" },
|
||||
{ type: FieldType.CHAR, length: 1, name: "altitude" }
|
||||
]
|
||||
}
|
||||
: undefined;
|
||||
|
||||
return { position: result, segment: section };
|
||||
} catch {
|
||||
return { position: null };
|
||||
}
|
||||
};
|
||||
|
||||
export const parseUncompressedPosition = (
|
||||
data: string,
|
||||
withStructure: boolean = false
|
||||
): {
|
||||
position: IPosition | null;
|
||||
segment?: Segment;
|
||||
} => {
|
||||
if (data.length < 19) return { position: null };
|
||||
|
||||
// Format: DDMMmmH/DDDMMmmH$ where H is hemisphere, $ is symbol code
|
||||
// Positions: 0-7 (latitude), 8 (symbol table), 9-17 (longitude), 18 (symbol code)
|
||||
// Spaces may replace rightmost digits for ambiguity/privacy
|
||||
|
||||
const latStr = data.substring(0, 8); // DDMMmmH (8 chars: 49 03.50 N)
|
||||
const symbolTable = data.charAt(8);
|
||||
const lonStr = data.substring(9, 18); // DDDMMmmH (9 chars: 072 01.75 W)
|
||||
const symbolCode = data.charAt(18);
|
||||
|
||||
// Count and handle ambiguity (spaces in minutes part replace rightmost digits)
|
||||
let ambiguity = 0;
|
||||
const latSpaceCount = (latStr.match(/ /g) || []).length;
|
||||
const lonSpaceCount = (lonStr.match(/ /g) || []).length;
|
||||
|
||||
if (latSpaceCount > 0 || lonSpaceCount > 0) {
|
||||
// Use the maximum space count (they should be the same, but be defensive)
|
||||
ambiguity = Math.max(latSpaceCount, lonSpaceCount);
|
||||
}
|
||||
|
||||
// Replace spaces with zeros for parsing
|
||||
const latStrNormalized = latStr.replace(/ /g, "0");
|
||||
const lonStrNormalized = lonStr.replace(/ /g, "0");
|
||||
|
||||
// Parse latitude
|
||||
const latDeg = parseInt(latStrNormalized.substring(0, 2), 10);
|
||||
const latMin = parseFloat(latStrNormalized.substring(2, 7));
|
||||
const latHem = latStrNormalized.charAt(7);
|
||||
|
||||
if (isNaN(latDeg) || isNaN(latMin)) return { position: null };
|
||||
if (latHem !== "N" && latHem !== "S") return { position: null };
|
||||
|
||||
let latitude = latDeg + latMin / 60;
|
||||
if (latHem === "S") latitude = -latitude;
|
||||
|
||||
// Parse longitude
|
||||
const lonDeg = parseInt(lonStrNormalized.substring(0, 3), 10);
|
||||
const lonMin = parseFloat(lonStrNormalized.substring(3, 8));
|
||||
const lonHem = lonStrNormalized.charAt(8);
|
||||
|
||||
if (isNaN(lonDeg) || isNaN(lonMin)) return { position: null };
|
||||
if (lonHem !== "E" && lonHem !== "W") return { position: null };
|
||||
|
||||
let longitude = lonDeg + lonMin / 60;
|
||||
if (lonHem === "W") longitude = -longitude;
|
||||
|
||||
const result: IPosition = {
|
||||
latitude,
|
||||
longitude,
|
||||
symbol: {
|
||||
table: symbolTable,
|
||||
code: symbolCode
|
||||
}
|
||||
};
|
||||
|
||||
if (ambiguity > 0) {
|
||||
result.ambiguity = ambiguity;
|
||||
}
|
||||
|
||||
const segment: Segment | undefined = withStructure
|
||||
? {
|
||||
name: "position",
|
||||
data: new TextEncoder().encode(data.substring(0, 19)).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{ type: FieldType.STRING, length: 8, name: "latitude" },
|
||||
{ type: FieldType.CHAR, length: 1, name: "symbol table" },
|
||||
{ type: FieldType.STRING, length: 9, name: "longitude" },
|
||||
{ type: FieldType.CHAR, length: 1, name: "symbol code" }
|
||||
]
|
||||
}
|
||||
: undefined;
|
||||
|
||||
return { position: result, segment };
|
||||
};
|
||||
|
||||
export default decodePositionPayload;
|
||||
69
src/payload.query.ts
Normal file
69
src/payload.query.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { FieldType, type Segment } from "@hamradio/packet";
|
||||
|
||||
import { DataType, type Payload, type QueryPayload } from "./frame.types";
|
||||
|
||||
export const decodeQueryPayload = (
|
||||
raw: string,
|
||||
withStructure: boolean = false
|
||||
): {
|
||||
payload: Payload | null;
|
||||
segment?: Segment[];
|
||||
} => {
|
||||
try {
|
||||
if (raw.length < 2) return { payload: null };
|
||||
|
||||
// Skip data type identifier '?'
|
||||
const segments: Segment[] = withStructure ? [] : [];
|
||||
|
||||
// Remaining payload
|
||||
const rest = raw.substring(1).trim();
|
||||
if (!rest) return { payload: null };
|
||||
|
||||
// Query type is the first token (up to first space)
|
||||
const firstSpace = rest.indexOf(" ");
|
||||
let queryType = "";
|
||||
let target: string | undefined = undefined;
|
||||
|
||||
if (firstSpace === -1) {
|
||||
queryType = rest;
|
||||
} else {
|
||||
queryType = rest.substring(0, firstSpace);
|
||||
target = rest.substring(firstSpace + 1).trim();
|
||||
if (target === "") target = undefined;
|
||||
}
|
||||
|
||||
if (!queryType) return { payload: null };
|
||||
|
||||
if (withStructure) {
|
||||
// Emit query type section
|
||||
segments.push({
|
||||
name: "query type",
|
||||
data: new TextEncoder().encode(queryType).buffer,
|
||||
isString: true,
|
||||
fields: [{ type: FieldType.STRING, name: "type", length: queryType.length }]
|
||||
});
|
||||
|
||||
if (target) {
|
||||
segments.push({
|
||||
name: "query target",
|
||||
data: new TextEncoder().encode(target).buffer,
|
||||
isString: true,
|
||||
fields: [{ type: FieldType.STRING, name: "target", length: target.length }]
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const payload: QueryPayload = {
|
||||
type: DataType.Query,
|
||||
queryType,
|
||||
...(target ? { target } : {})
|
||||
};
|
||||
|
||||
if (withStructure) return { payload, segment: segments };
|
||||
return { payload };
|
||||
} catch {
|
||||
return { payload: null };
|
||||
}
|
||||
};
|
||||
|
||||
export default decodeQueryPayload;
|
||||
161
src/payload.rawgps.ts
Normal file
161
src/payload.rawgps.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
import { FieldType, type Segment } from "@hamradio/packet";
|
||||
import { DTM, GGA, INmeaSentence, Decoder as NmeaDecoder, RMC } from "extended-nmea";
|
||||
|
||||
import { DataType, type IPosition, type Payload, type RawGPSPayload } from "./frame.types";
|
||||
|
||||
export const decodeRawGPSPayload = (
|
||||
raw: string,
|
||||
withStructure: boolean = false
|
||||
): {
|
||||
payload: Payload | null;
|
||||
segment?: Segment[];
|
||||
} => {
|
||||
try {
|
||||
if (raw.length < 2) return { payload: null };
|
||||
|
||||
// Raw GPS payloads start with '$' followed by an NMEA sentence
|
||||
const sentence = raw.substring(1).trim();
|
||||
|
||||
// Attempt to parse with extended-nmea Decoder to extract position (best-effort)
|
||||
let parsed: INmeaSentence | null = null;
|
||||
try {
|
||||
const full = sentence.startsWith("$") ? sentence : `$${sentence}`;
|
||||
parsed = NmeaDecoder.decode(full);
|
||||
} catch {
|
||||
// ignore parse errors - accept any sentence as raw-gps per APRS
|
||||
}
|
||||
|
||||
const payload: RawGPSPayload = {
|
||||
type: DataType.RawGPS,
|
||||
sentence
|
||||
};
|
||||
|
||||
// If parse produced latitude/longitude, attach structured position.
|
||||
// Otherwise fallback to a minimal NMEA parser for common sentences (RMC, GGA).
|
||||
if (
|
||||
parsed &&
|
||||
(parsed instanceof RMC || parsed instanceof GGA || parsed instanceof DTM) &&
|
||||
parsed.latitude &&
|
||||
parsed.longitude
|
||||
) {
|
||||
// extended-nmea latitude/longitude are GeoCoordinate objects with
|
||||
// fields { degrees, decimal, quadrant }
|
||||
const latObj = parsed.latitude;
|
||||
const lonObj = parsed.longitude;
|
||||
const lat = latObj.degrees + (Number(latObj.decimal) || 0) / 60.0;
|
||||
const lon = lonObj.degrees + (Number(lonObj.decimal) || 0) / 60.0;
|
||||
const latitude = latObj.quadrant === "S" ? -lat : lat;
|
||||
const longitude = lonObj.quadrant === "W" ? -lon : lon;
|
||||
|
||||
const pos: IPosition = {
|
||||
latitude,
|
||||
longitude
|
||||
};
|
||||
|
||||
// altitude
|
||||
if ("altMean" in parsed && parsed.altMean !== undefined) {
|
||||
pos.altitude = Number(parsed.altMean);
|
||||
}
|
||||
if ("altitude" in parsed && parsed.altitude !== undefined) {
|
||||
pos.altitude = Number(parsed.altitude);
|
||||
}
|
||||
|
||||
// speed/course (RMC fields)
|
||||
if ("speedOverGround" in parsed && parsed.speedOverGround !== undefined) {
|
||||
pos.speed = Number(parsed.speedOverGround);
|
||||
}
|
||||
if ("courseOverGround" in parsed && parsed.courseOverGround !== undefined) {
|
||||
pos.course = Number(parsed.courseOverGround);
|
||||
}
|
||||
|
||||
payload.position = pos;
|
||||
} else {
|
||||
try {
|
||||
const full = sentence.startsWith("$") ? sentence : `$${sentence}`;
|
||||
const withoutChecksum = full.split("*")[0];
|
||||
const parts = withoutChecksum.split(",");
|
||||
const header = parts[0].slice(1).toUpperCase();
|
||||
|
||||
const parseCoord = (coord: string, hemi: string) => {
|
||||
if (!coord || coord === "") return undefined;
|
||||
const degDigits = hemi === "N" || hemi === "S" ? 2 : 3;
|
||||
if (coord.length <= degDigits) return undefined;
|
||||
const degPart = coord.slice(0, degDigits);
|
||||
const minPart = coord.slice(degDigits);
|
||||
const degrees = parseFloat(degPart);
|
||||
const mins = parseFloat(minPart);
|
||||
if (Number.isNaN(degrees) || Number.isNaN(mins)) return undefined;
|
||||
let dec = degrees + mins / 60.0;
|
||||
if (hemi === "S" || hemi === "W") dec = -dec;
|
||||
return dec;
|
||||
};
|
||||
|
||||
if (header.endsWith("RMC")) {
|
||||
const lat = parseCoord(parts[3], parts[4]);
|
||||
const lon = parseCoord(parts[5], parts[6]);
|
||||
if (lat !== undefined && lon !== undefined) {
|
||||
const pos: IPosition = { latitude: lat, longitude: lon };
|
||||
if (parts[7]) pos.speed = Number(parts[7]);
|
||||
if (parts[8]) pos.course = Number(parts[8]);
|
||||
payload.position = pos;
|
||||
}
|
||||
} else if (header.endsWith("GGA")) {
|
||||
const lat = parseCoord(parts[2], parts[3]);
|
||||
const lon = parseCoord(parts[4], parts[5]);
|
||||
if (lat !== undefined && lon !== undefined) {
|
||||
const pos: IPosition = { latitude: lat, longitude: lon };
|
||||
if (parts[9]) pos.altitude = Number(parts[9]);
|
||||
payload.position = pos;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// ignore fallback parse errors
|
||||
}
|
||||
}
|
||||
|
||||
if (withStructure) {
|
||||
const segments: Segment[] = [
|
||||
{
|
||||
name: "raw-gps",
|
||||
data: new TextEncoder().encode(sentence).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "sentence",
|
||||
length: sentence.length
|
||||
}
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
if (payload.position) {
|
||||
segments.push({
|
||||
name: "raw-gps-position",
|
||||
data: new TextEncoder().encode(JSON.stringify(payload.position)).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "latitude",
|
||||
length: String(payload.position.latitude).length
|
||||
},
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "longitude",
|
||||
length: String(payload.position.longitude).length
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
return { payload, segment: segments };
|
||||
}
|
||||
|
||||
return { payload };
|
||||
} catch {
|
||||
return { payload: null };
|
||||
}
|
||||
};
|
||||
|
||||
export default decodeRawGPSPayload;
|
||||
79
src/payload.status.ts
Normal file
79
src/payload.status.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import { FieldType, type Segment } from "@hamradio/packet";
|
||||
|
||||
import { DO_NOT_ARCHIVE_MARKER, DataType, type Payload, type StatusPayload } from "./frame.types";
|
||||
import Timestamp from "./timestamp";
|
||||
|
||||
export const decodeStatusPayload = (
|
||||
raw: string,
|
||||
withStructure: boolean = false
|
||||
): {
|
||||
payload: Payload | null;
|
||||
segment?: Segment[];
|
||||
} => {
|
||||
// Status payload: optional 7-char timestamp followed by free text.
|
||||
// We'll also detect a trailing Maidenhead locator (4 or 6 chars) and expose it.
|
||||
const offsetBase = 1; // skip data type identifier '>'
|
||||
if (raw.length <= offsetBase) return { payload: null };
|
||||
|
||||
let offset = offsetBase;
|
||||
const segments: Segment[] = withStructure ? [] : [];
|
||||
|
||||
// Try parse optional timestamp (7 chars)
|
||||
if (raw.length >= offset + 7) {
|
||||
const timeStr = raw.substring(offset, offset + 7);
|
||||
const { timestamp, segment: tsSegment } = Timestamp.fromString(timeStr, withStructure);
|
||||
if (timestamp) {
|
||||
offset += 7;
|
||||
if (tsSegment) segments.push(tsSegment);
|
||||
}
|
||||
}
|
||||
|
||||
// Remaining text is status text
|
||||
const text = raw.substring(offset);
|
||||
if (!text) return { payload: null };
|
||||
const doNotArchive = text.includes(DO_NOT_ARCHIVE_MARKER);
|
||||
|
||||
// Detect trailing Maidenhead locator (4 or 6 chars) at end of text separated by space
|
||||
let maidenhead: string | undefined;
|
||||
const mhMatch = text.match(/\s([A-Ra-r]{2}\d{2}(?:[A-Ra-r]{2})?)$/);
|
||||
let statusText = text;
|
||||
if (mhMatch) {
|
||||
maidenhead = mhMatch[1].toUpperCase();
|
||||
statusText = text.slice(0, mhMatch.index).trimEnd();
|
||||
}
|
||||
|
||||
const payload: StatusPayload = {
|
||||
type: DataType.Status,
|
||||
doNotArchive,
|
||||
timestamp: undefined,
|
||||
text: statusText
|
||||
};
|
||||
|
||||
// If timestamp was parsed, attach it
|
||||
if (segments.length > 0) {
|
||||
// The first segment may be timestamp; parseTimestamp returns the Timestamp object
|
||||
// Re-parse to obtain timestamp object (cheap) - alternate would be to capture earlier
|
||||
const timeSegment = segments.find((s) => s.name === "timestamp");
|
||||
if (timeSegment) {
|
||||
const tsStr = new TextDecoder().decode(timeSegment.data);
|
||||
const { timestamp } = Timestamp.fromString(tsStr, false);
|
||||
if (timestamp) payload.timestamp = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
if (maidenhead) payload.maidenhead = maidenhead;
|
||||
|
||||
if (withStructure) {
|
||||
segments.push({
|
||||
name: "status",
|
||||
data: new TextEncoder().encode(text).buffer,
|
||||
isString: true,
|
||||
fields: [{ type: FieldType.STRING, name: "text", length: text.length }]
|
||||
});
|
||||
return { payload, segment: segments };
|
||||
}
|
||||
|
||||
return { payload };
|
||||
};
|
||||
|
||||
export default decodeStatusPayload;
|
||||
197
src/payload.telemetry.ts
Normal file
197
src/payload.telemetry.ts
Normal file
@@ -0,0 +1,197 @@
|
||||
import { FieldType, type Segment } from "@hamradio/packet";
|
||||
|
||||
import {
|
||||
DataType,
|
||||
type Payload,
|
||||
type TelemetryBitSensePayload,
|
||||
type TelemetryCoefficientsPayload,
|
||||
type TelemetryDataPayload,
|
||||
type TelemetryParameterPayload,
|
||||
type TelemetryUnitPayload
|
||||
} from "./frame.types";
|
||||
|
||||
export const decodeTelemetryPayload = (
|
||||
raw: string,
|
||||
withStructure: boolean = false
|
||||
): {
|
||||
payload: Payload | null;
|
||||
segment?: Segment[];
|
||||
} => {
|
||||
try {
|
||||
if (raw.length < 2) return { payload: null };
|
||||
|
||||
const rest = raw.substring(1).trim();
|
||||
if (!rest) return { payload: null };
|
||||
|
||||
const segments: Segment[] = withStructure ? [] : [];
|
||||
|
||||
// Telemetry data: convention used here: starts with '#' then sequence then analogs and digital
|
||||
if (rest.startsWith("#")) {
|
||||
const parts = rest.substring(1).trim().split(/\s+/);
|
||||
const seq = parseInt(parts[0], 10);
|
||||
let analog: number[] = [];
|
||||
let digital = 0;
|
||||
|
||||
if (parts.length >= 2) {
|
||||
// analogs as comma separated
|
||||
analog = parts[1].split(",").map((v) => parseFloat(v));
|
||||
}
|
||||
|
||||
if (parts.length >= 3) {
|
||||
digital = parseInt(parts[2], 10);
|
||||
}
|
||||
|
||||
if (withStructure) {
|
||||
segments.push({
|
||||
name: "telemetry sequence",
|
||||
data: new TextEncoder().encode(String(seq)).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "sequence",
|
||||
length: String(seq).length
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
segments.push({
|
||||
name: "telemetry analog",
|
||||
data: new TextEncoder().encode(parts[1] || "").buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "analogs",
|
||||
length: (parts[1] || "").length
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
segments.push({
|
||||
name: "telemetry digital",
|
||||
data: new TextEncoder().encode(String(digital)).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "digital",
|
||||
length: String(digital).length
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
const payload: TelemetryDataPayload = {
|
||||
type: DataType.TelemetryData,
|
||||
variant: "data",
|
||||
sequence: isNaN(seq) ? 0 : seq,
|
||||
analog,
|
||||
digital: isNaN(digital) ? 0 : digital
|
||||
};
|
||||
|
||||
if (withStructure) return { payload, segment: segments };
|
||||
return { payload };
|
||||
}
|
||||
|
||||
// Telemetry parameters: 'PARAM' keyword
|
||||
if (/^PARAM/i.test(rest)) {
|
||||
const after = rest.replace(/^PARAM\s*/i, "");
|
||||
const names = after.split(/[,\s]+/).filter(Boolean);
|
||||
if (withStructure) {
|
||||
segments.push({
|
||||
name: "telemetry parameters",
|
||||
data: new TextEncoder().encode(after).buffer,
|
||||
isString: true,
|
||||
fields: [{ type: FieldType.STRING, name: "names", length: after.length }]
|
||||
});
|
||||
}
|
||||
const payload: TelemetryParameterPayload = {
|
||||
type: DataType.TelemetryData,
|
||||
variant: "parameters",
|
||||
names
|
||||
};
|
||||
if (withStructure) return { payload, segment: segments };
|
||||
return { payload };
|
||||
}
|
||||
|
||||
// Telemetry units: 'UNIT'
|
||||
if (/^UNIT/i.test(rest)) {
|
||||
const after = rest.replace(/^UNIT\s*/i, "");
|
||||
const units = after.split(/[,\s]+/).filter(Boolean);
|
||||
if (withStructure) {
|
||||
segments.push({
|
||||
name: "telemetry units",
|
||||
data: new TextEncoder().encode(after).buffer,
|
||||
isString: true,
|
||||
fields: [{ type: FieldType.STRING, name: "units", length: after.length }]
|
||||
});
|
||||
}
|
||||
const payload: TelemetryUnitPayload = {
|
||||
type: DataType.TelemetryData,
|
||||
variant: "unit",
|
||||
units
|
||||
};
|
||||
if (withStructure) return { payload, segment: segments };
|
||||
return { payload };
|
||||
}
|
||||
|
||||
// Telemetry coefficients: 'COEFF' a:,b:,c:
|
||||
if (/^COEFF/i.test(rest)) {
|
||||
const after = rest.replace(/^COEFF\s*/i, "");
|
||||
const aMatch = after.match(/A:([^\s;]+)/i);
|
||||
const bMatch = after.match(/B:([^\s;]+)/i);
|
||||
const cMatch = after.match(/C:([^\s;]+)/i);
|
||||
const parseList = (s?: string) => (s ? s.split(",").map((v) => parseFloat(v)) : []);
|
||||
const coefficients = {
|
||||
a: parseList(aMatch?.[1]),
|
||||
b: parseList(bMatch?.[1]),
|
||||
c: parseList(cMatch?.[1])
|
||||
};
|
||||
if (withStructure) {
|
||||
segments.push({
|
||||
name: "telemetry coefficients",
|
||||
data: new TextEncoder().encode(after).buffer,
|
||||
isString: true,
|
||||
fields: [{ type: FieldType.STRING, name: "coeffs", length: after.length }]
|
||||
});
|
||||
}
|
||||
const payload: TelemetryCoefficientsPayload = {
|
||||
type: DataType.TelemetryData,
|
||||
variant: "coefficients",
|
||||
coefficients
|
||||
};
|
||||
if (withStructure) return { payload, segment: segments };
|
||||
return { payload };
|
||||
}
|
||||
|
||||
// Telemetry bitsense/project: 'BITS' <number> [project]
|
||||
if (/^BITS?/i.test(rest)) {
|
||||
const parts = rest.split(/\s+/).slice(1);
|
||||
const sense = parts.length > 0 ? parseInt(parts[0], 10) : 0;
|
||||
const projectName = parts.length > 1 ? parts.slice(1).join(" ") : undefined;
|
||||
if (withStructure) {
|
||||
segments.push({
|
||||
name: "telemetry bitsense",
|
||||
data: new TextEncoder().encode(rest).buffer,
|
||||
isString: true,
|
||||
fields: [{ type: FieldType.STRING, name: "bitsense", length: rest.length }]
|
||||
});
|
||||
}
|
||||
const payload: TelemetryBitSensePayload = {
|
||||
type: DataType.TelemetryData,
|
||||
variant: "bitsense",
|
||||
sense: isNaN(sense) ? 0 : sense,
|
||||
...(projectName ? { projectName } : {})
|
||||
};
|
||||
if (withStructure) return { payload, segment: segments };
|
||||
return { payload };
|
||||
}
|
||||
|
||||
return { payload: null };
|
||||
} catch {
|
||||
return { payload: null };
|
||||
}
|
||||
};
|
||||
|
||||
export default decodeTelemetryPayload;
|
||||
135
src/payload.thirdparty.ts
Normal file
135
src/payload.thirdparty.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import { FieldType, type Segment } from "@hamradio/packet";
|
||||
|
||||
import { Frame } from "./frame";
|
||||
import { DataType, type Payload, type ThirdPartyPayload, UserDefinedPayload } from "./frame.types";
|
||||
|
||||
export const decodeUserDefinedPayload = (
|
||||
raw: string,
|
||||
withStructure: boolean = false
|
||||
): {
|
||||
payload: Payload | null;
|
||||
segment?: Segment[];
|
||||
} => {
|
||||
try {
|
||||
if (raw.length < 2) return { payload: null };
|
||||
|
||||
// content after '{'
|
||||
const rest = raw.substring(1);
|
||||
|
||||
// user packet type is first token (up to first space) often like '01' or 'TYP'
|
||||
const match = rest.match(/^([^\s]+)\s*(.*)$/s);
|
||||
let userPacketType = "";
|
||||
let data = "";
|
||||
if (match) {
|
||||
userPacketType = match[1] || "";
|
||||
data = (match[2] || "").trim();
|
||||
}
|
||||
|
||||
const payload: UserDefinedPayload = {
|
||||
type: DataType.UserDefined,
|
||||
userPacketType,
|
||||
data
|
||||
} as const;
|
||||
|
||||
if (withStructure) {
|
||||
const segments: Segment[] = [];
|
||||
segments.push({
|
||||
name: "user-defined",
|
||||
data: new TextEncoder().encode(rest).buffer,
|
||||
isString: true,
|
||||
fields: [{ type: FieldType.STRING, name: "raw", length: rest.length }]
|
||||
});
|
||||
|
||||
segments.push({
|
||||
name: "user-packet-type",
|
||||
data: new TextEncoder().encode(userPacketType).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "type",
|
||||
length: userPacketType.length
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
segments.push({
|
||||
name: "user-data",
|
||||
data: new TextEncoder().encode(data).buffer,
|
||||
isString: true,
|
||||
fields: [{ type: FieldType.STRING, name: "data", length: data.length }]
|
||||
});
|
||||
|
||||
return { payload, segment: segments };
|
||||
}
|
||||
|
||||
return { payload };
|
||||
} catch {
|
||||
return { payload: null };
|
||||
}
|
||||
};
|
||||
|
||||
export const decodeThirdPartyPayload = (
|
||||
raw: string,
|
||||
withStructure: boolean = false
|
||||
): {
|
||||
payload: Payload | null;
|
||||
segment?: Segment[];
|
||||
} => {
|
||||
try {
|
||||
if (raw.length < 2) return { payload: null };
|
||||
|
||||
// Content after '}' is the encapsulated third-party frame or raw data
|
||||
const rest = raw.substring(1);
|
||||
|
||||
// Attempt to parse the embedded text as a full APRS frame (route:payload)
|
||||
let nestedFrame: Frame | undefined;
|
||||
try {
|
||||
// parseFrame is defined in this module; use Frame.parse to attempt parse
|
||||
nestedFrame = Frame.parse(rest);
|
||||
} catch {
|
||||
nestedFrame = undefined;
|
||||
}
|
||||
|
||||
const payload: ThirdPartyPayload = {
|
||||
type: DataType.ThirdParty,
|
||||
comment: rest,
|
||||
...(nestedFrame ? { frame: nestedFrame } : {})
|
||||
} as const;
|
||||
|
||||
if (withStructure) {
|
||||
const segments: Segment[] = [];
|
||||
|
||||
segments.push({
|
||||
name: "third-party",
|
||||
data: new TextEncoder().encode(rest).buffer,
|
||||
isString: true,
|
||||
fields: [{ type: FieldType.STRING, name: "raw", length: rest.length }]
|
||||
});
|
||||
|
||||
if (nestedFrame) {
|
||||
// Include a short section pointing to the nested frame's data (stringified)
|
||||
const nf = nestedFrame;
|
||||
const nfStr = `${nf.source.toString()}>${nf.destination.toString()}:${nf.payload}`;
|
||||
segments.push({
|
||||
name: "third-party-nested-frame",
|
||||
data: new TextEncoder().encode(nfStr).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{
|
||||
type: FieldType.STRING,
|
||||
name: "nested",
|
||||
length: nfStr.length
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
return { payload, segment: segments };
|
||||
}
|
||||
|
||||
return { payload };
|
||||
} catch {
|
||||
return { payload: null };
|
||||
}
|
||||
};
|
||||
129
src/payload.weather.ts
Normal file
129
src/payload.weather.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { FieldType, type Segment } from "@hamradio/packet";
|
||||
|
||||
import { DataType, type IPosition, type Payload, type WeatherPayload } from "./frame.types";
|
||||
import { isCompressedPosition, parseCompressedPosition, parseUncompressedPosition } from "./payload.position";
|
||||
import Timestamp from "./timestamp";
|
||||
|
||||
export const decodeWeatherPayload = (
|
||||
raw: string,
|
||||
withStructure: boolean = false
|
||||
): {
|
||||
payload: Payload | null;
|
||||
segment?: Segment[];
|
||||
} => {
|
||||
try {
|
||||
if (raw.length < 2) return { payload: null };
|
||||
|
||||
let offset = 1; // skip '_' data type
|
||||
const segments: Segment[] = withStructure ? [] : [];
|
||||
|
||||
// Try optional timestamp (7 chars)
|
||||
let timestamp;
|
||||
if (raw.length >= offset + 7) {
|
||||
const timeStr = raw.substring(offset, offset + 7);
|
||||
const parsed = Timestamp.fromString(timeStr, withStructure);
|
||||
timestamp = parsed.timestamp;
|
||||
if (parsed.segment) {
|
||||
segments.push(parsed.segment);
|
||||
}
|
||||
if (timestamp) offset += 7;
|
||||
}
|
||||
|
||||
// Try optional position following timestamp
|
||||
let position: IPosition | undefined;
|
||||
let consumed = 0;
|
||||
const tail = raw.substring(offset);
|
||||
if (tail.length > 0) {
|
||||
// If the tail starts with a wind token like DDD/SSS, treat it as weather data
|
||||
// and do not attempt to parse it as a position (avoids mis-detecting wind
|
||||
// values as compressed position fields).
|
||||
if (/^\s*\d{3}\/\d{1,3}/.test(tail)) {
|
||||
// no position present; leave consumed = 0
|
||||
} else if (isCompressedPosition(tail)) {
|
||||
const parsed = parseCompressedPosition(tail, withStructure);
|
||||
if (parsed.position) {
|
||||
position = {
|
||||
latitude: parsed.position.latitude,
|
||||
longitude: parsed.position.longitude,
|
||||
symbol: parsed.position.symbol,
|
||||
altitude: parsed.position.altitude
|
||||
};
|
||||
if (parsed.segment) segments.push(parsed.segment);
|
||||
consumed = 13;
|
||||
}
|
||||
} else {
|
||||
const parsed = parseUncompressedPosition(tail, withStructure);
|
||||
if (parsed.position) {
|
||||
position = {
|
||||
latitude: parsed.position.latitude,
|
||||
longitude: parsed.position.longitude,
|
||||
symbol: parsed.position.symbol,
|
||||
ambiguity: parsed.position.ambiguity
|
||||
};
|
||||
if (parsed.segment) segments.push(parsed.segment);
|
||||
consumed = 19;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
offset += consumed;
|
||||
|
||||
const rest = raw.substring(offset).trim();
|
||||
|
||||
const payload: WeatherPayload = {
|
||||
type: DataType.WeatherReportNoPosition
|
||||
};
|
||||
if (timestamp) payload.timestamp = timestamp;
|
||||
if (position) payload.position = position;
|
||||
|
||||
if (rest && rest.length > 0) {
|
||||
// Parse common tokens
|
||||
// Wind: DDD/SSS [gGGG]
|
||||
const windMatch = rest.match(/(\d{3})\/(\d{1,3})(?:g(\d{1,3}))?/);
|
||||
if (windMatch) {
|
||||
payload.windDirection = parseInt(windMatch[1], 10);
|
||||
payload.windSpeed = parseInt(windMatch[2], 10);
|
||||
if (windMatch[3]) payload.windGust = parseInt(windMatch[3], 10);
|
||||
}
|
||||
|
||||
// Temperature: tNNN (F)
|
||||
const tempMatch = rest.match(/t(-?\d{1,3})/i);
|
||||
if (tempMatch) payload.temperature = parseInt(tempMatch[1], 10);
|
||||
|
||||
// Rain: rNNN (last hour), pNNN (24h), PNNN (since midnight) - values are hundredths of inch
|
||||
const rMatch = rest.match(/r(\d{3})/);
|
||||
if (rMatch) payload.rainLastHour = parseInt(rMatch[1], 10);
|
||||
const pMatch = rest.match(/p(\d{3})/);
|
||||
if (pMatch) payload.rainLast24Hours = parseInt(pMatch[1], 10);
|
||||
const PMatch = rest.match(/P(\d{3})/);
|
||||
if (PMatch) payload.rainSinceMidnight = parseInt(PMatch[1], 10);
|
||||
|
||||
// Humidity: hNN
|
||||
const hMatch = rest.match(/h(\d{1,3})/);
|
||||
if (hMatch) payload.humidity = parseInt(hMatch[1], 10);
|
||||
|
||||
// Pressure: bXXXX or bXXXXX (tenths of millibar)
|
||||
const bMatch = rest.match(/b(\d{4,5})/);
|
||||
if (bMatch) payload.pressure = parseInt(bMatch[1], 10);
|
||||
|
||||
// Add raw comment
|
||||
payload.comment = rest;
|
||||
|
||||
if (withStructure) {
|
||||
segments.push({
|
||||
name: "weather",
|
||||
data: new TextEncoder().encode(rest).buffer,
|
||||
isString: true,
|
||||
fields: [{ type: FieldType.STRING, name: "text", length: rest.length }]
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (withStructure) return { payload, segment: segments };
|
||||
return { payload };
|
||||
} catch {
|
||||
return { payload: null };
|
||||
}
|
||||
};
|
||||
|
||||
export default decodeWeatherPayload;
|
||||
@@ -76,3 +76,5 @@ export class Position implements IPosition {
|
||||
return R * c; // Distance in meters
|
||||
}
|
||||
}
|
||||
|
||||
export default Position;
|
||||
|
||||
189
src/timestamp.ts
Normal file
189
src/timestamp.ts
Normal file
@@ -0,0 +1,189 @@
|
||||
import { FieldType, Segment } from "@hamradio/packet";
|
||||
|
||||
import { ITimestamp } from "./frame.types";
|
||||
|
||||
export class Timestamp implements ITimestamp {
|
||||
day?: number;
|
||||
month?: number;
|
||||
hours: number;
|
||||
minutes: number;
|
||||
seconds?: number;
|
||||
format: "DHM" | "HMS" | "MDHM";
|
||||
zulu?: boolean;
|
||||
|
||||
constructor(
|
||||
hours: number,
|
||||
minutes: number,
|
||||
format: "DHM" | "HMS" | "MDHM",
|
||||
options: {
|
||||
day?: number;
|
||||
month?: number;
|
||||
seconds?: number;
|
||||
zulu?: boolean;
|
||||
} = {}
|
||||
) {
|
||||
this.hours = hours;
|
||||
this.minutes = minutes;
|
||||
this.format = format;
|
||||
this.day = options.day;
|
||||
this.month = options.month;
|
||||
this.seconds = options.seconds;
|
||||
this.zulu = options.zulu;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert APRS timestamp to JavaScript Date object
|
||||
* Note: APRS timestamps don't include year, so we use current year
|
||||
* For DHM format, we find the most recent occurrence of that day
|
||||
* For HMS format, we use current date
|
||||
* For MDHM format, we use the specified month/day in current year
|
||||
*/
|
||||
toDate(): Date {
|
||||
const now = new Date();
|
||||
|
||||
if (this.format === "DHM") {
|
||||
// Day-Hour-Minute format (UTC)
|
||||
// Find the most recent occurrence of this day
|
||||
const currentYear = this.zulu ? now.getUTCFullYear() : now.getFullYear();
|
||||
const currentMonth = this.zulu ? now.getUTCMonth() : now.getMonth();
|
||||
|
||||
let date: Date;
|
||||
if (this.zulu) {
|
||||
date = new Date(Date.UTC(currentYear, currentMonth, this.day!, this.hours, this.minutes, 0, 0));
|
||||
} else {
|
||||
date = new Date(currentYear, currentMonth, this.day!, this.hours, this.minutes, 0, 0);
|
||||
}
|
||||
|
||||
// If the date is in the future, it's from last month
|
||||
if (date > now) {
|
||||
if (this.zulu) {
|
||||
date = new Date(Date.UTC(currentYear, currentMonth - 1, this.day!, this.hours, this.minutes, 0, 0));
|
||||
} else {
|
||||
date = new Date(currentYear, currentMonth - 1, this.day!, this.hours, this.minutes, 0, 0);
|
||||
}
|
||||
}
|
||||
|
||||
return date;
|
||||
} else if (this.format === "HMS") {
|
||||
// Hour-Minute-Second format (UTC)
|
||||
// Use current date
|
||||
if (this.zulu) {
|
||||
const date = new Date();
|
||||
date.setUTCHours(this.hours, this.minutes, this.seconds || 0, 0);
|
||||
|
||||
// If time is in the future, it's from yesterday
|
||||
if (date > now) {
|
||||
date.setUTCDate(date.getUTCDate() - 1);
|
||||
}
|
||||
|
||||
return date;
|
||||
} else {
|
||||
const date = new Date();
|
||||
date.setHours(this.hours, this.minutes, this.seconds || 0, 0);
|
||||
|
||||
if (date > now) {
|
||||
date.setDate(date.getDate() - 1);
|
||||
}
|
||||
|
||||
return date;
|
||||
}
|
||||
} else {
|
||||
// MDHM format: Month-Day-Hour-Minute (local time)
|
||||
const currentYear = now.getFullYear();
|
||||
let date = new Date(currentYear, (this.month || 1) - 1, this.day!, this.hours, this.minutes, 0, 0);
|
||||
|
||||
// If date is in the future, it's from last year
|
||||
if (date > now) {
|
||||
date = new Date(currentYear - 1, (this.month || 1) - 1, this.day!, this.hours, this.minutes, 0, 0);
|
||||
}
|
||||
|
||||
return date;
|
||||
}
|
||||
}
|
||||
|
||||
static fromString(
|
||||
str: string,
|
||||
withStructure: boolean = false
|
||||
): {
|
||||
timestamp: Timestamp | undefined;
|
||||
segment?: Segment;
|
||||
} {
|
||||
if (str.length !== 7) return { timestamp: undefined };
|
||||
|
||||
const timeType = str.charAt(6);
|
||||
|
||||
if (timeType === "z") {
|
||||
// DHM format: Day-Hour-Minute (UTC)
|
||||
const timestamp = new Timestamp(parseInt(str.substring(2, 4), 10), parseInt(str.substring(4, 6), 10), "DHM", {
|
||||
day: parseInt(str.substring(0, 2), 10),
|
||||
zulu: true
|
||||
});
|
||||
|
||||
const segment = withStructure
|
||||
? {
|
||||
name: "timestamp",
|
||||
data: new TextEncoder().encode(str).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{ type: FieldType.STRING, name: "day (DD)", length: 2 },
|
||||
{ type: FieldType.STRING, name: "hour (HH)", length: 2 },
|
||||
{ type: FieldType.STRING, name: "minute (MM)", length: 2 },
|
||||
{ type: FieldType.CHAR, name: "timezone indicator", length: 1 }
|
||||
]
|
||||
}
|
||||
: undefined;
|
||||
|
||||
return { timestamp, segment };
|
||||
} else if (timeType === "h") {
|
||||
// HMS format: Hour-Minute-Second (UTC)
|
||||
const timestamp = new Timestamp(parseInt(str.substring(0, 2), 10), parseInt(str.substring(2, 4), 10), "HMS", {
|
||||
seconds: parseInt(str.substring(4, 6), 10),
|
||||
zulu: true
|
||||
});
|
||||
|
||||
const segment = withStructure
|
||||
? {
|
||||
name: "timestamp",
|
||||
data: new TextEncoder().encode(str).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{ type: FieldType.STRING, name: "hour (HH)", length: 2 },
|
||||
{ type: FieldType.STRING, name: "minute (MM)", length: 2 },
|
||||
{ type: FieldType.STRING, name: "second (SS)", length: 2 },
|
||||
{ type: FieldType.CHAR, name: "timezone indicator", length: 1 }
|
||||
]
|
||||
}
|
||||
: undefined;
|
||||
|
||||
return { timestamp, segment };
|
||||
} else if (timeType === "/") {
|
||||
// MDHM format: Month-Day-Hour-Minute (local)
|
||||
const timestamp = new Timestamp(parseInt(str.substring(4, 6), 10), parseInt(str.substring(6, 8), 10), "MDHM", {
|
||||
month: parseInt(str.substring(0, 2), 10),
|
||||
day: parseInt(str.substring(2, 4), 10),
|
||||
zulu: false
|
||||
});
|
||||
|
||||
const segment = withStructure
|
||||
? {
|
||||
name: "timestamp",
|
||||
data: new TextEncoder().encode(str).buffer,
|
||||
isString: true,
|
||||
fields: [
|
||||
{ type: FieldType.STRING, name: "month (MM)", length: 2 },
|
||||
{ type: FieldType.STRING, name: "day (DD)", length: 2 },
|
||||
{ type: FieldType.STRING, name: "hour (HH)", length: 2 },
|
||||
{ type: FieldType.STRING, name: "minute (MM)", length: 2 },
|
||||
{ type: FieldType.CHAR, name: "timezone indicator", length: 1 }
|
||||
]
|
||||
}
|
||||
: undefined;
|
||||
|
||||
return { timestamp, segment };
|
||||
}
|
||||
|
||||
return { timestamp: undefined };
|
||||
}
|
||||
}
|
||||
|
||||
export default Timestamp;
|
||||
22
test/deviceid.test.ts
Normal file
22
test/deviceid.test.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { getDeviceID } from "../src/deviceid";
|
||||
import { Frame } from "../src/frame";
|
||||
|
||||
describe("DeviceID parsing", () => {
|
||||
it("parses known device ID from tocall", () => {
|
||||
const data = "WB2OSZ-5>APDW17:!4237.14NS07120.83W#PHG7140";
|
||||
const frame = Frame.fromString(data);
|
||||
const deviceID = getDeviceID(frame.destination);
|
||||
expect(deviceID).not.toBeNull();
|
||||
expect(deviceID?.tocall).toBe("APDW??");
|
||||
expect(deviceID?.vendor).toBe("WB2OSZ");
|
||||
});
|
||||
|
||||
it("returns null for unknown device ID", () => {
|
||||
const data = "CALL>WORLD:!4237.14NS07120.83W#PHG7140";
|
||||
const frame = Frame.fromString(data);
|
||||
const deviceID = getDeviceID(frame.destination);
|
||||
expect(deviceID).toBeNull();
|
||||
});
|
||||
});
|
||||
@@ -1,97 +0,0 @@
|
||||
import type { Dissected, Field, Segment } from "@hamradio/packet";
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { Frame } from "../src/frame";
|
||||
import type { PositionPayload } from "../src/frame.types";
|
||||
|
||||
describe("APRS extras test vectors (RNG / PHG / CSE/DDD / SPD / DFS)", () => {
|
||||
it("parses PHG from position with messaging (spec vector 1)", () => {
|
||||
const raw = "NOCALL>APZRAZ,qAS,PA2RDK-14:=5154.19N/00627.77E>PHG500073 de NOCALL";
|
||||
const frame = Frame.fromString(raw);
|
||||
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||
const { payload } = res;
|
||||
|
||||
expect(payload).not.toBeNull();
|
||||
expect(payload!.position.phg).toBeDefined();
|
||||
// PHG500073 parsed per spec: p=5 -> 25 W, h='0' -> 10 ft, g='0' -> 0 dBi
|
||||
expect(payload!.position.phg!.power).toBe(25);
|
||||
expect(payload!.position.phg!.height).toBeCloseTo(3.048, 3);
|
||||
expect(payload!.position.phg!.gain).toBe(0);
|
||||
});
|
||||
|
||||
it("parses PHG token with hyphen separators (spec vector 2)", () => {
|
||||
const raw = "NOCALL>APRS,TCPIP*,qAC,NINTH:;P-PA3RD *061000z5156.26NP00603.29E#PHG0210DAPNET";
|
||||
const frame = Frame.fromString(raw);
|
||||
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||
const { payload, structure } = res;
|
||||
|
||||
expect(payload).not.toBeNull();
|
||||
// Use a spec PHG example: PHG0210 -> p=0 -> power 0 W, h=2 -> 40 ft
|
||||
expect(payload!.position.phg).toBeDefined();
|
||||
expect(payload!.position.phg!.power).toBe(0);
|
||||
expect(payload!.position.phg!.height).toBeCloseTo(12.192, 3);
|
||||
|
||||
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||
expect(commentSeg).toBeDefined();
|
||||
const fields = (commentSeg!.fields ?? []) as Field[];
|
||||
const hasPHG = fields.some((f) => f.name === "PHG");
|
||||
expect(hasPHG).toBe(true);
|
||||
});
|
||||
|
||||
it("parses DFS token with long numeric strength", () => {
|
||||
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W#DFS2360/Your Comment";
|
||||
const frame = Frame.fromString(raw);
|
||||
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||
const { payload, structure } = res;
|
||||
|
||||
expect(payload).not.toBeNull();
|
||||
expect(payload!.position.dfs).toBeDefined();
|
||||
// DFSshgd: strength is single-digit s value (here '2')
|
||||
expect(payload!.position.dfs!.strength).toBe(2);
|
||||
|
||||
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||
expect(commentSeg).toBeDefined();
|
||||
const fieldsDFS = (commentSeg!.fields ?? []) as Field[];
|
||||
const hasDFS = fieldsDFS.some((f) => f.name === "DFS");
|
||||
expect(hasDFS).toBe(true);
|
||||
});
|
||||
|
||||
it("parses course/speed in DDD/SSS form and altitude /A=", () => {
|
||||
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W>090/045/A=001234";
|
||||
const frame = Frame.fromString(raw);
|
||||
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||
const { payload, structure } = res;
|
||||
|
||||
expect(payload).not.toBeNull();
|
||||
expect(payload!.position.course).toBe(90);
|
||||
// Speed is converted from knots to km/h
|
||||
expect(payload!.position.speed).toBeCloseTo(45 * 1.852, 3);
|
||||
// Altitude 001234 ft -> meters
|
||||
expect(Math.round((payload!.position.altitude || 0) / 0.3048)).toBe(1234);
|
||||
|
||||
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||
expect(commentSeg).toBeDefined();
|
||||
const fieldsCSE = (commentSeg!.fields ?? []) as Field[];
|
||||
const hasCSE = fieldsCSE.some((f) => f.name === "CSE/SPD");
|
||||
expect(hasCSE).toBe(true);
|
||||
});
|
||||
|
||||
it("parses combined tokens: DDD/SSS PHG and DFS", () => {
|
||||
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W>090/045PHG5132/DFS2132";
|
||||
const frame = Frame.fromString(raw);
|
||||
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||
const { payload, structure } = res;
|
||||
|
||||
expect(payload).not.toBeNull();
|
||||
expect(payload!.position.course).toBe(90);
|
||||
expect(payload!.position.speed).toBeCloseTo(45 * 1.852, 3);
|
||||
expect(payload!.position.phg).toBeDefined();
|
||||
expect(payload!.position.dfs).toBeDefined();
|
||||
expect(payload!.position.dfs!.strength).toBe(2);
|
||||
|
||||
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||
expect(commentSeg).toBeDefined();
|
||||
const fieldsCombined = (commentSeg!.fields ?? []) as Field[];
|
||||
expect(fieldsCombined.some((f) => ["CSE/SPD", "PHG", "DFS"].includes(String(f.name)))).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Dissected, FieldType } from "@hamradio/packet";
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { Address, Frame, Timestamp } from "../src/frame";
|
||||
import { Address, Frame } from "../src/frame";
|
||||
import {
|
||||
DataType,
|
||||
type ITimestamp,
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
type PositionPayload,
|
||||
type StatusPayload
|
||||
} from "../src/frame.types";
|
||||
import Timestamp from "../src/timestamp";
|
||||
|
||||
// Address parsing: split by method
|
||||
describe("Address.parse", () => {
|
||||
@@ -233,7 +234,7 @@ describe("Frame.decodeMicE", () => {
|
||||
const frame = Frame.fromString(data);
|
||||
const decoded = frame.decode() as MicEPayload;
|
||||
expect(decoded).not.toBeNull();
|
||||
expect(decoded?.type).toBe(DataType.MicECurrent);
|
||||
expect(decoded?.type).toBe(DataType.MicE);
|
||||
});
|
||||
|
||||
it("decodes a Mic-E packet with old format (single quote)", () => {
|
||||
@@ -322,6 +323,16 @@ describe("Frame.decodePosition", () => {
|
||||
const decoded = frame.decode() as PositionPayload;
|
||||
expect(decoded).not.toBeNull();
|
||||
});
|
||||
|
||||
it("should handle UTF-8 characters", () => {
|
||||
const data =
|
||||
"WB2OSZ-5>APDW17:!4237.14NS07120.83W#PHG7140 Did you know that APRS comments and messages can contain UTF-8 characters? アマチュア無線";
|
||||
const frame = Frame.fromString(data);
|
||||
const decoded = frame.decode() as PositionPayload;
|
||||
expect(decoded).not.toBeNull();
|
||||
expect(decoded?.type).toBe(DataType.PositionNoTimestampNoMessaging);
|
||||
expect(decoded?.position.comment).toContain("UTF-8 characters? アマチュア無線");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Frame.decodeStatus", () => {
|
||||
@@ -468,9 +479,9 @@ describe("Frame.decodeMicE", () => {
|
||||
const decoded = frame.decode() as MicEPayload;
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
expect(decoded?.type).toBe(DataType.MicECurrent);
|
||||
expect(decoded?.type).toBe(DataType.MicE);
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(decoded.position).toBeDefined();
|
||||
expect(typeof decoded.position.latitude).toBe("number");
|
||||
expect(typeof decoded.position.longitude).toBe("number");
|
||||
@@ -497,7 +508,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(decoded.position.latitude).toBeCloseTo(12 + 34.56 / 60, 3);
|
||||
}
|
||||
});
|
||||
@@ -509,7 +520,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(decoded.position.latitude).toBeCloseTo(1 + 20.45 / 60, 3);
|
||||
}
|
||||
});
|
||||
@@ -521,7 +532,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(decoded.position.latitude).toBeCloseTo(40 + 12.34 / 60, 3);
|
||||
}
|
||||
});
|
||||
@@ -533,7 +544,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(decoded.position.latitude).toBeLessThan(0);
|
||||
}
|
||||
});
|
||||
@@ -547,7 +558,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(typeof decoded.position.longitude).toBe("number");
|
||||
expect(decoded.position.longitude).toBeGreaterThanOrEqual(-180);
|
||||
expect(decoded.position.longitude).toBeLessThanOrEqual(180);
|
||||
@@ -561,7 +572,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(typeof decoded.position.longitude).toBe("number");
|
||||
}
|
||||
});
|
||||
@@ -573,7 +584,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(typeof decoded.position.longitude).toBe("number");
|
||||
expect(Math.abs(decoded.position.longitude)).toBeGreaterThan(90);
|
||||
}
|
||||
@@ -588,7 +599,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
if (decoded.position.speed !== undefined) {
|
||||
expect(decoded.position.speed).toBeGreaterThanOrEqual(0);
|
||||
expect(typeof decoded.position.speed).toBe("number");
|
||||
@@ -603,7 +614,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
if (decoded.position.course !== undefined) {
|
||||
expect(decoded.position.course).toBeGreaterThanOrEqual(0);
|
||||
expect(decoded.position.course).toBeLessThan(360);
|
||||
@@ -618,7 +629,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(decoded.position.speed).toBeUndefined();
|
||||
}
|
||||
});
|
||||
@@ -630,7 +641,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
if (decoded.position.course !== undefined) {
|
||||
expect(decoded.position.course).toBeGreaterThan(0);
|
||||
expect(decoded.position.course).toBeLessThan(360);
|
||||
@@ -647,7 +658,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(decoded.position.symbol).toBeDefined();
|
||||
expect(decoded.position.symbol?.table).toBeDefined();
|
||||
expect(decoded.position.symbol?.code).toBeDefined();
|
||||
@@ -659,29 +670,30 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
describe("Altitude decoding", () => {
|
||||
it("should decode altitude from /A=NNNNNN format", () => {
|
||||
const data = "CALL>4ABCDE:`c.l+@&'/'\"G:}/A=001234";
|
||||
const data = "CALL>4ABCDE:`c.l+@&'//A=001234";
|
||||
const frame = Frame.fromString(data);
|
||||
const decoded = frame.decode() as Payload;
|
||||
const decoded = frame.decode() as MicEPayload;
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
expect(decoded.type).toBe(DataType.MicE);
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
expect(decoded.position.altitude).toBeCloseTo(1234 * 0.3048, 1);
|
||||
}
|
||||
expect(decoded.position).toBeDefined();
|
||||
expect(decoded.position.altitude).toBeDefined();
|
||||
expect(decoded.position.altitude).toBeCloseTo(1234 * 0.3048, 1);
|
||||
});
|
||||
|
||||
it("should decode altitude from base-91 format }abc", () => {
|
||||
const data = "CALL>4AB2DE:`c.l+@&'/'\"G:}}S^X";
|
||||
it("should decode altitude from base-91 format abc}", () => {
|
||||
const data = "N83MZ>T2TQ5U,WA1PLE-4*:`c.l+@&'/\"4T}KJ6TMS";
|
||||
const frame = Frame.fromString(data);
|
||||
const decoded = frame.decode() as Payload;
|
||||
const decoded = frame.decode() as MicEPayload;
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
expect(decoded.type).toBe(DataType.MicE);
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded.position.comment?.startsWith("}")) {
|
||||
expect(decoded.position.altitude).toBeDefined();
|
||||
}
|
||||
}
|
||||
expect(decoded.position).toBeDefined();
|
||||
expect(decoded.position.altitude).toBeDefined();
|
||||
expect(decoded.position.comment).toBe("KJ6TMS");
|
||||
expect(decoded.position.altitude).toBeCloseTo(61, 1);
|
||||
});
|
||||
|
||||
it("should prefer /A= format over base-91 when both present", () => {
|
||||
@@ -691,7 +703,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(decoded.position.altitude).toBeCloseTo(5000 * 0.3048, 1);
|
||||
}
|
||||
});
|
||||
@@ -703,7 +715,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(decoded.position.altitude).toBeUndefined();
|
||||
expect(decoded.position.comment).toContain("Just a comment");
|
||||
}
|
||||
@@ -718,7 +730,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(decoded.messageType).toBe("M0: Off Duty");
|
||||
}
|
||||
});
|
||||
@@ -730,7 +742,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(decoded.messageType).toBeDefined();
|
||||
expect(typeof decoded.messageType).toBe("string");
|
||||
}
|
||||
@@ -753,7 +765,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(decoded.position.comment).toContain("This is a test comment");
|
||||
}
|
||||
});
|
||||
@@ -765,7 +777,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(decoded.position.comment).toBeDefined();
|
||||
}
|
||||
});
|
||||
@@ -802,7 +814,7 @@ describe("Frame.decodeMicE", () => {
|
||||
|
||||
expect(() => frame.decode()).not.toThrow();
|
||||
const decoded = frame.decode() as MicEPayload;
|
||||
expect(decoded === null || decoded?.type === DataType.MicECurrent).toBe(true);
|
||||
expect(decoded === null || decoded?.type === DataType.MicE).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -813,9 +825,9 @@ describe("Frame.decodeMicE", () => {
|
||||
const decoded = frame.decode() as MicEPayload;
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
expect(decoded?.type).toBe(DataType.MicECurrent);
|
||||
expect(decoded?.type).toBe(DataType.MicE);
|
||||
|
||||
if (decoded && decoded.type === DataType.MicECurrent) {
|
||||
if (decoded && decoded.type === DataType.MicE) {
|
||||
expect(decoded.position.latitude).toBeDefined();
|
||||
expect(decoded.position.longitude).toBeDefined();
|
||||
expect(decoded.position.symbol).toBeDefined();
|
||||
@@ -939,7 +951,7 @@ describe("Packet dissection with sections", () => {
|
||||
const commentSection = result.structure?.find((s) => s.name === "comment");
|
||||
expect(commentSection).toBeDefined();
|
||||
expect(commentSection?.data?.byteLength).toBe("Test message".length);
|
||||
expect(commentSection?.fields?.[0]?.name).toBe("text");
|
||||
expect(commentSection?.fields?.[0]?.name).toBe("comment");
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
224
test/payload.extras.test.ts
Normal file
224
test/payload.extras.test.ts
Normal file
@@ -0,0 +1,224 @@
|
||||
import type { Dissected, Field, Segment } from "@hamradio/packet";
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { Frame } from "../src/frame";
|
||||
import type { PositionPayload } from "../src/frame.types";
|
||||
import { feetToMeters, milesToMeters } from "../src/parser";
|
||||
import { decodeTelemetry } from "../src/payload.extras";
|
||||
|
||||
describe("APRS extras test vectors", () => {
|
||||
it("parses altitude token in the beginning of a comment and emits structure", () => {
|
||||
const raw =
|
||||
"DL3QP-R>APDG03,TCPIP*,qAC,T2ROMANIA:!5151.12ND00637.65E&/A=000000440 MMDVM Voice 439.40000MHz -7.6000MHz, DL3QP_Pi-Star";
|
||||
const frame = Frame.fromString(raw);
|
||||
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||
const { payload, structure } = res;
|
||||
|
||||
expect(payload).not.toBeNull();
|
||||
// Altitude 001234 ft -> meters
|
||||
expect(payload!.position.altitude).toBe(0);
|
||||
|
||||
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||
expect(commentSeg).toBeDefined();
|
||||
const fieldsAlt = (commentSeg!.fields ?? []) as Field[];
|
||||
const hasAlt = fieldsAlt.some((f) => f.name === "altitude");
|
||||
expect(hasAlt).toBe(true);
|
||||
|
||||
expect(payload!.position.comment).toBe("440 MMDVM Voice 439.40000MHz -7.6000MHz, DL3QP_Pi-Star");
|
||||
});
|
||||
|
||||
it("parses altitude token marker mid-comment and emits structure", () => {
|
||||
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W#RNG0001ALT/A=001234 Your Comment Here";
|
||||
const frame = Frame.fromString(raw);
|
||||
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||
const { payload, structure } = res;
|
||||
|
||||
// console.log(structure[structure.length - 1]); // Log the last segment for debugging
|
||||
|
||||
expect(payload).not.toBeNull();
|
||||
// Altitude 001234 ft -> meters
|
||||
expect(Math.round((payload!.position.altitude || 0) / 0.3048)).toBe(1234);
|
||||
|
||||
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||
expect(commentSeg).toBeDefined();
|
||||
const fieldsAlt = (commentSeg!.fields ?? []) as Field[];
|
||||
const hasAlt = fieldsAlt.some((f) => f.name === "altitude");
|
||||
expect(hasAlt).toBe(true);
|
||||
|
||||
const commentIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "comment");
|
||||
expect(commentIndex).toBe(2); // Range marker + range go before.
|
||||
|
||||
const altitudeIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "altitude");
|
||||
expect(altitudeIndex).toBeGreaterThan(0); // Altitude should come after comment in the structure
|
||||
expect(altitudeIndex).toBeGreaterThan(commentIndex);
|
||||
|
||||
const secondCommentIndex = (commentSeg!.fields ?? []).findIndex((f, i) => f.name === "comment" && i > commentIndex);
|
||||
expect(secondCommentIndex).toBeGreaterThan(altitudeIndex); // Any additional comment fields should come after altitude
|
||||
});
|
||||
|
||||
it("parses PHG from position with messaging (spec vector 1)", () => {
|
||||
const raw = "NOCALL>APZRAZ,qAS,PA2RDK-14:=5154.19N/00627.77E>PHG500073 de NOCALL";
|
||||
const frame = Frame.fromString(raw);
|
||||
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||
const { payload } = res;
|
||||
|
||||
expect(payload).not.toBeNull();
|
||||
expect(payload!.position.phg).toBeDefined();
|
||||
// PHG500073 parsed per spec: p=5 -> 25 W, h='0' -> 10 ft, g='0' -> 0 dBi
|
||||
expect(payload!.position.phg!.power).toBe(25);
|
||||
expect(payload!.position.phg!.height).toBeCloseTo(3.048, 3);
|
||||
expect(payload!.position.phg!.gain).toBe(0);
|
||||
expect(payload!.position!.comment).toBe("73 de NOCALL");
|
||||
});
|
||||
|
||||
it("parses PHG token with hyphen separators (spec vector 2)", () => {
|
||||
const raw = "NOCALL>APRS,TCPIP*,qAC,NINTH:;P-PA3RD *061000z5156.26NP00603.29E#PHG0210DAPNET";
|
||||
const frame = Frame.fromString(raw);
|
||||
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||
const { payload, structure } = res;
|
||||
|
||||
// console.log(structure[structure.length - 1]); // Log the last segment for debugging
|
||||
|
||||
expect(payload).not.toBeNull();
|
||||
// Use a spec PHG example: PHG0210 -> p=0 -> power 0 W, h=2 -> 40 ft
|
||||
expect(payload!.position.phg).toBeDefined();
|
||||
expect(payload!.position.phg!.power).toBe(0);
|
||||
expect(payload!.position.phg!.height).toBeCloseTo(12.192, 3);
|
||||
|
||||
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||
expect(commentSeg).toBeDefined();
|
||||
const fields = (commentSeg!.fields ?? []) as Field[];
|
||||
const hasPHG = fields.some((f) => f.name === "PHG marker");
|
||||
expect(hasPHG).toBe(true);
|
||||
});
|
||||
|
||||
it("parses DFS token with long numeric strength", () => {
|
||||
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W#DFS2360/Your Comment";
|
||||
const frame = Frame.fromString(raw);
|
||||
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||
const { payload, structure } = res;
|
||||
|
||||
expect(payload).not.toBeNull();
|
||||
expect(payload!.position.dfs).toBeDefined();
|
||||
// DFSshgd: strength is single-digit s value (here '2')
|
||||
expect(payload!.position.dfs!.strength).toBe(2);
|
||||
|
||||
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||
expect(commentSeg).toBeDefined();
|
||||
const fieldsDFS = (commentSeg!.fields ?? []) as Field[];
|
||||
const hasDFS = fieldsDFS.some((f) => f.name === "DFS marker");
|
||||
expect(hasDFS).toBe(true);
|
||||
});
|
||||
|
||||
it("parses course/speed in DDD/SSS form and altitude /A=", () => {
|
||||
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W>090/045/A=001234";
|
||||
const frame = Frame.fromString(raw);
|
||||
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||
const { payload, structure } = res;
|
||||
|
||||
expect(payload).not.toBeNull();
|
||||
expect(payload!.position.course).toBe(90);
|
||||
// Speed is converted from knots to km/h
|
||||
expect(payload!.position.speed).toBeCloseTo(45 * 1.852, 3);
|
||||
// Altitude 001234 ft -> meters
|
||||
expect(Math.round((payload!.position.altitude || 0) / 0.3048)).toBe(1234);
|
||||
|
||||
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||
expect(commentSeg).toBeDefined();
|
||||
const fieldsCSE = (commentSeg!.fields ?? []) as Field[];
|
||||
const hasCSE = fieldsCSE.some((f) => f.name === "course");
|
||||
expect(hasCSE).toBe(true);
|
||||
});
|
||||
|
||||
it("parses combined tokens: DDD/SSS PHG and DFS", () => {
|
||||
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W>090/045PHG5132DFS2132";
|
||||
const frame = Frame.fromString(raw);
|
||||
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||
const { payload, structure } = res;
|
||||
|
||||
expect(payload).not.toBeNull();
|
||||
expect(payload!.position.course).toBe(90);
|
||||
expect(payload!.position.speed).toBeCloseTo(45 * 1.852, 3);
|
||||
expect(payload!.position.phg).toBeDefined();
|
||||
expect(payload!.position.dfs).toBeDefined();
|
||||
expect(payload!.position.dfs!.strength).toBe(2);
|
||||
|
||||
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||
expect(commentSeg).toBeDefined();
|
||||
const fieldsCombined = (commentSeg!.fields ?? []) as Field[];
|
||||
expect(fieldsCombined.some((f) => ["course", "PHG marker", "DFS marker"].includes(String(f.name)))).toBe(true);
|
||||
});
|
||||
|
||||
it("parses RNG token and emits structure", () => {
|
||||
const raw =
|
||||
"N0CALL-S>APDG01,TCPIP*,qAC,N0CALL-GS:;N0CALL B *181721z5148.38ND00634.32EaRNG0001/A=000010 70cm Voice (D-Star) 439.50000MHz -7.6000MHz";
|
||||
const frame = Frame.fromString(raw);
|
||||
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||
const { payload, structure } = res;
|
||||
|
||||
expect(payload).not.toBeNull();
|
||||
expect(payload!.position.altitude).toBeCloseTo(feetToMeters(10), 3);
|
||||
expect(payload!.position.range).toBe(milesToMeters(1) / 1000);
|
||||
|
||||
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||
expect(commentSeg).toBeDefined();
|
||||
const fieldsRNG = (commentSeg!.fields ?? []) as Field[];
|
||||
const hasRNG = fieldsRNG.some((f) => f.name === "range marker");
|
||||
expect(hasRNG).toBe(true);
|
||||
|
||||
const rangeIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "range marker");
|
||||
expect(rangeIndex).toBeGreaterThanOrEqual(0);
|
||||
const altitudeIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "altitude");
|
||||
expect(altitudeIndex).toBeGreaterThanOrEqual(0);
|
||||
expect(rangeIndex).toBeGreaterThanOrEqual(0);
|
||||
expect(altitudeIndex).toBeGreaterThan(rangeIndex); // Altitude comes after range
|
||||
const commentIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "comment");
|
||||
expect(commentIndex).toBeGreaterThan(altitudeIndex); // Comment comes after altitude
|
||||
});
|
||||
});
|
||||
|
||||
describe("decodeTelemetry", () => {
|
||||
it("decodes minimal telemetry (|!!!!|)", () => {
|
||||
const result = decodeTelemetry("!!!!");
|
||||
expect(result.sequence).toBe(0);
|
||||
expect(result.analog).toEqual([0]);
|
||||
expect(result.digital).toBeUndefined();
|
||||
});
|
||||
|
||||
it("decodes sequence and one channel", () => {
|
||||
const result = decodeTelemetry("ss11");
|
||||
expect(result.sequence).toBe(7544);
|
||||
expect(result.analog).toEqual([1472]);
|
||||
expect(result.digital).toBeUndefined();
|
||||
});
|
||||
|
||||
it("decodes sequence and two channels", () => {
|
||||
const result = decodeTelemetry("ss1122");
|
||||
expect(result.sequence).toBe(7544);
|
||||
expect(result.analog).toEqual([1472, 1564]);
|
||||
expect(result.digital).toBeUndefined();
|
||||
});
|
||||
|
||||
it("decodes sequence and five channels", () => {
|
||||
const result = decodeTelemetry("ss1122334455");
|
||||
expect(result.sequence).toBe(7544);
|
||||
expect(result.analog).toEqual([1472, 1564, 1656, 1748, 1840]);
|
||||
expect(result.digital).toBeUndefined();
|
||||
});
|
||||
|
||||
it("decodes sequence, five channels, and digital", () => {
|
||||
const result = decodeTelemetry('ss1122334455!"');
|
||||
expect(result.sequence).toBe(7544);
|
||||
expect(result.analog).toEqual([1472, 1564, 1656, 1748, 1840]);
|
||||
expect(result.digital).toBe(1);
|
||||
});
|
||||
|
||||
it("throws on too short input", () => {
|
||||
expect(() => decodeTelemetry("!")).toThrow();
|
||||
expect(() => decodeTelemetry("")).toThrow();
|
||||
});
|
||||
|
||||
it("throws on invalid base91", () => {
|
||||
expect(() => decodeTelemetry("ss11~~")).toThrow();
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user