31 Commits

Author SHA1 Message Date
37e8771eb1 1.4.2 2026-03-20 13:54:12 +01:00
4502f9902b Added support for !DAO! marker parsing 2026-03-20 13:53:36 +01:00
c7c54984ba 1.4.1 2026-03-20 10:54:21 +01:00
83d05fb2e9 Correctly decode all embedded telemetry data 2026-03-20 10:54:10 +01:00
e49333611f 1.4.0 2026-03-20 10:40:41 +01:00
0055938338 Added push scripts 2026-03-20 10:39:45 +01:00
75e31c2008 Cleaned up the frame.ts by splitting payload parsing to subpackages 2026-03-20 10:38:36 +01:00
1aa8eb363f More bugfixes in comment offsets 2026-03-18 19:23:27 +01:00
34240dfbd8 More bugfixes in comment offsets 2026-03-18 19:17:53 +01:00
46e7694ec6 Fixed bug in extras segment parsing 2026-03-18 19:07:17 +01:00
04166daeee Fixed bug in extras segment parsing 2026-03-18 19:01:16 +01:00
e9e329ccc1 Refactored extras field parsing 2026-03-18 18:22:53 +01:00
6adf1281ef Upgrade @hamradio/packet 2026-03-18 18:22:31 +01:00
5b836a4e0c Parse object as one segment 2026-03-18 17:09:49 +01:00
c28572e3b6 Version 1.3.0 2026-03-18 17:03:44 +01:00
17caa22331 Better parsing for extras; Added deviceID resolution 2026-03-18 17:01:46 +01:00
be8cd00c00 Export all interfaces 2026-03-18 13:12:38 +01:00
7dc15e360d Version 1.2.0 2026-03-18 13:11:40 +01:00
b1cd8449d9 Also parse the extras from the comment field 2026-03-18 13:11:16 +01:00
78dbd3b0ef Version 1.1.3 2026-03-18 10:07:06 +01:00
df266bab12 Correctly parse compressed position with no timestamp 2026-03-18 10:06:45 +01:00
0ab62dab02 Version 1.1.2 2026-03-16 13:16:18 +01:00
38b617728c Bug fixes in structure parsing 2026-03-16 13:16:06 +01:00
16f638301b Version 1.1.1 2026-03-16 07:41:46 +01:00
d0a100359d Repair missing datatypes 2026-03-16 07:41:32 +01:00
c300aefc0b Major change: switched to DataType enum 2026-03-15 22:57:19 +01:00
074806528f Added README 2026-03-15 21:38:09 +01:00
d62d7962fe Version 1.1.0 2026-03-15 21:32:25 +01:00
1f4108b888 Implemented remaining payload types 2026-03-15 21:32:01 +01:00
eca757b24f Implemented Query, Telemetry, Weather and RawGPS parsing 2026-03-15 21:13:12 +01:00
e0d4844c5b Stricter decoding 2026-03-15 20:21:26 +01:00
37 changed files with 5497 additions and 5123 deletions

3
.gitignore vendored
View File

@@ -103,6 +103,9 @@ web_modules/
# Optional npm cache directory
.npm
# Optional npm package-lock.json
package-lock.json
# Optional eslint cache
.eslintcache

View File

@@ -11,16 +11,22 @@ repos:
hooks:
- id: shellcheck
- repo: https://github.com/pre-commit/mirrors-eslint
rev: v10.0.3
- repo: local
hooks:
- id: prettier
name: prettier
entry: npx prettier --write
language: system
files: "\\.(js|jsx|ts|tsx)$"
- repo: local
hooks:
- id: eslint
name: eslint
entry: npx eslint --fix
language: system
files: "\\.(js|jsx|ts|tsx)$"
exclude: node_modules/
# Use stylelint (local) instead of the deprecated scss-lint Ruby gem which
# cannot parse modern Sass `@use` and module syntax. This invokes the
# project's installed `stylelint` via `npx` so the devDependency is used.
- repo: local
hooks:
- id: stylelint

19
.prettierrc.ts Normal file
View File

@@ -0,0 +1,19 @@
import { type Config } from "prettier";
const config: Config = {
plugins: ["@trivago/prettier-plugin-sort-imports"],
trailingComma: "none",
printWidth: 120,
importOrder: [
"<BUILTIN_MODULES>",
"<THIRD_PARTY_MODULES>",
"(?:services|components|contexts|pages|libs|types)/(.*)$",
"^[./].*\\.(?:ts|tsx)$",
"\\.(?:scss|css)$",
"^[./]"
],
importOrderSeparation: true,
importOrderSortSpecifiers: true
};
export default config;

118
README.md
View File

@@ -0,0 +1,118 @@
# @hamradio/aprs
APRS (Automatic Packet Reporting System) utilities and parsers for TypeScript/JavaScript.
> For AX.25 frame parsing, see [@hamradio/ax25](https://www.npmjs.com/package/@hamradio/ax25).
This package provides lightweight parsing and helpers for APRS frames (APRS-IS style payloads). It exposes a small API for parsing frames, decoding payloads, working with APRS timestamps and addresses, and a few utility conversions.
## Install
Using npm:
```bash
npm install @hamradio/aprs
```
Or with yarn:
```bash
yarn add @hamradio/aprs
```
## Quick examples
Examples below show ESM / TypeScript usage. For CommonJS require() the same symbols are available from the package entrypoint.
### Import
```ts
import {
Frame,
Address,
Timestamp,
base91ToNumber,
knotsToKmh,
} from '@hamradio/aprs';
```
### Parse a raw APRS frame and decode payload
```ts
const raw = 'NOCALL-1>APRS,WIDE1-1:@092345z/:*E";qZ=OMRC/A=088132Hello World!';
// Parse into a Frame instance
const frame = Frame.fromString(raw);
// Inspect routing and payload
console.log(frame.source.toString()); // e.g. NOCALL-1
console.log(frame.destination.toString()); // APRS
console.log(frame.path.map(p => p.toString()));
// Decode payload (returns a structured payload object or null)
const payload = frame.decode();
console.log(payload?.type); // e.g. 'position' | 'message' | 'status' | ...
// Or ask for sections (dissection) along with decoded payload
const res = frame.decode(true) as { payload: any | null; structure: any };
console.log(res.payload, res.structure);
```
### Message decoding
```ts
const msg = 'W1AW>APRS::KB1ABC-5 :Hello World';
const f = Frame.fromString(msg);
const decoded = f.decode();
if (decoded && decoded.type === 'message') {
console.log(decoded.addressee); // KB1ABC-5
console.log(decoded.text); // Hello World
}
```
### Work with addresses and timestamps
```ts
const a = Address.parse('WA1PLE-4*');
console.log(a.call, a.ssid, a.isRepeated);
const ts = new Timestamp(12, 45, 'HMS', { seconds: 30, zulu: true });
console.log(ts.toDate()); // JavaScript Date representing the timestamp
```
### Utility conversions
```ts
console.log(base91ToNumber('!!!!')); // decode base91 values used in some APRS payloads
console.log(knotsToKmh(10)); // convert speed
```
## API summary
- `Frame` — parse frames with `Frame.fromString()` / `Frame.parse()` and decode payloads with `frame.decode()`.
- `Address` — helpers to parse and format APRS addresses: `Address.parse()` / `Address.fromString()`.
- `Timestamp` — APRS timestamp wrapper with `toDate()` conversion.
- Utility functions: `base91ToNumber`, `knotsToKmh`, `kmhToKnots`, `feetToMeters`, `metersToFeet`, `celsiusToFahrenheit`, `fahrenheitToCelsius`.
## Development
Run tests with:
```bash
npm install
npm test
```
Build the distribution with:
```bash
npm run build
```
## Contributing
See the project repository for contribution guidelines and tests.
---
Project: @hamradio/aprs — APRS parsing utilities for TypeScript

3283
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,7 @@
{
"name": "@hamradio/aprs",
"version": "1.0.1",
"type": "module",
"version": "1.4.2",
"description": "APRS (Automatic Packet Reporting System) protocol support for Typescript",
"keywords": [
"APRS",
@@ -11,12 +12,12 @@
],
"repository": {
"type": "git",
"url": "https://git.maze.io/ham/aprs.js"
"url": "https://git.maze.io/ham/aprs.ts"
},
"license": "MIT",
"author": "Wijnand Modderman-Lenstra",
"main": "dist/index.js",
"module": "dist/index.mjs",
"module": "dist/index.js",
"types": "dist/index.d.ts",
"files": [
"dist"
@@ -24,7 +25,7 @@
"exports": {
".": {
"types": "./dist/index.d.ts",
"import": "./dist/index.mjs",
"import": "./dist/index.js",
"require": "./dist/index.js"
}
},
@@ -36,17 +37,25 @@
"test:watch": "vitest --watch",
"test:ci": "vitest --run",
"lint": "eslint .",
"prepare": "npm run build"
"prepare": "npm run build",
"push": "npm version patch && git push",
"push-minor": "npm version minor && git push",
"push-major": "npm version major && git push"
},
"dependencies": {
"@hamradio/packet": "^1.1.1",
"extended-nmea": "^2.1.3"
},
"dependencies": {},
"devDependencies": {
"@eslint/js": "^10.0.1",
"@vitest/coverage-v8": "^4.0.18",
"@trivago/prettier-plugin-sort-imports": "^6.0.2",
"@vitest/coverage-v8": "^4.1.0",
"eslint": "^10.0.3",
"globals": "^17.4.0",
"prettier": "^3.8.1",
"tsup": "^8.5.1",
"typescript": "^5.9.3",
"typescript-eslint": "^8.57.0",
"vitest": "^4.0.18"
"typescript-eslint": "^8.57.1",
"vitest": "^4.1.0"
}
}

1088
src/deviceid.ts Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,82 +1,112 @@
import { PacketSegment, PacketStructure } from "./parser.types";
import { Dissected, Field, Segment } from "@hamradio/packet";
// Any comment that contains this marker will set the doNotArchive flag on the
// decoded payload, which can be used by applications to skip archiving or
// logging frames that are meant to be transient or test data. This allows users
// to include the marker in their APRS comments when they want to indicate that
// a particular frame should not be stored long-term.
export const DO_NOT_ARCHIVE_MARKER = "!x!";
export interface IAddress {
call: string;
ssid: string;
call: string;
ssid: string;
isRepeated: boolean;
}
export interface IFrame {
source: IAddress;
source: IAddress;
destination: IAddress;
path: IAddress[];
payload: string;
path: IAddress[];
payload: string;
}
// APRS Data Type Identifiers (first character of payload)
export const DataTypeIdentifier = {
export enum DataType {
// Position Reports
PositionNoTimestampNoMessaging: '!',
PositionNoTimestampWithMessaging: '=',
PositionWithTimestampNoMessaging: '/',
PositionWithTimestampWithMessaging: '@',
PositionNoTimestampNoMessaging = "!",
PositionNoTimestampWithMessaging = "=",
PositionWithTimestampNoMessaging = "/",
PositionWithTimestampWithMessaging = "@",
// Mic-E
MicECurrent: '`',
MicEOld: "'",
MicE = "`",
MicEOld = "'",
// Messages and Bulletins
Message: ':',
Message = ":",
// Objects and Items
Object: ';',
Item: ')',
Object = ";",
Item = ")",
// Status
Status: '>',
Status = ">",
// Query
Query: '?',
Query = "?",
// Telemetry
TelemetryData: 'T',
TelemetryData = "T",
// Weather
WeatherReportNoPosition: '_',
WeatherReportNoPosition = "_",
// Raw GPS Data
RawGPS: '$',
RawGPS = "$",
// Station Capabilities
StationCapabilities: '<',
StationCapabilities = "<",
// User-Defined
UserDefined: '{',
UserDefined = "{",
// Third-Party Traffic
ThirdParty: '}',
ThirdParty = "}",
// Invalid/Test Data
InvalidOrTest: ',',
} as const;
InvalidOrTest = ","
}
export type DataTypeIdentifier = typeof DataTypeIdentifier[keyof typeof DataTypeIdentifier];
export const DataTypeNames: { [key in DataType]: string } = {
[DataType.PositionNoTimestampNoMessaging]: "position",
[DataType.PositionNoTimestampWithMessaging]: "position with messaging",
[DataType.PositionWithTimestampNoMessaging]: "position with timestamp",
[DataType.PositionWithTimestampWithMessaging]: "position with timestamp and messaging",
[DataType.MicE]: "Mic-E",
[DataType.MicEOld]: "Mic-E (old)",
[DataType.Message]: "message/bulletin",
[DataType.Object]: "object",
[DataType.Item]: "item",
[DataType.Status]: "status",
[DataType.Query]: "query",
[DataType.TelemetryData]: "telemetry data",
[DataType.WeatherReportNoPosition]: "weather report",
[DataType.RawGPS]: "raw GPS data",
[DataType.StationCapabilities]: "station capabilities",
[DataType.UserDefined]: "user defined",
[DataType.ThirdParty]: "third-party traffic",
[DataType.InvalidOrTest]: "invalid/test"
};
export interface ISymbol {
table: string; // Symbol table identifier
code: string; // Symbol code
table: string; // Symbol table identifier
code: string; // Symbol code
toString(): string; // Return combined symbol representation (e.g., "tablecode")
}
// Position data common to multiple formats
export interface IPosition {
latitude: number; // Decimal degrees
longitude: number; // Decimal degrees
latitude: number; // Decimal degrees
longitude: number; // Decimal degrees
ambiguity?: number; // Position ambiguity (0-4)
altitude?: number; // Meters
speed?: number; // Speed in knots/kmh depending on source
course?: number; // Course in degrees
altitude?: number; // Meters
speed?: number; // Speed in km/h
course?: number; // Course in degrees
range?: number; // Kilometers
phg?: IPowerHeightGain;
dfs?: IDirectionFinding;
dao?: IDAO; // Optional DAO fields for added position precision
symbol?: ISymbol;
comment?: string;
@@ -86,27 +116,62 @@ export interface IPosition {
}
export interface ITimestamp {
day?: number; // Day of month (DHM format)
month?: number; // Month (MDHM format)
day?: number; // Day of month (DHM format)
month?: number; // Month (MDHM format)
hours: number;
minutes: number;
seconds?: number;
format: 'DHM' | 'HMS' | 'MDHM'; // Day-Hour-Minute, Hour-Minute-Second, Month-Day-Hour-Minute
zulu?: boolean; // Is UTC/Zulu time
toDate(): Date; // Convert to Date object respecting timezone
format: "DHM" | "HMS" | "MDHM"; // Day-Hour-Minute, Hour-Minute-Second, Month-Day-Hour-Minute
zulu?: boolean; // Is UTC/Zulu time
toDate(): Date; // Convert to Date object respecting timezone
}
export interface IPowerHeightGain {
power?: number; // Transmit power in watts
height?: number; // Antenna height in meters
gain?: number; // Antenna gain in dBi
directivity?: number | "omni" | "unknown"; // Optional directivity pattern (numeric code or "omni")
}
export interface IDirectionFinding {
bearing?: number; // Direction finding bearing in degrees
strength?: number; // Relative signal strength (0-9)
height?: number; // Antenna height in meters
gain?: number; // Antenna gain in dBi
quality?: number; // Signal quality or other metric (0-9)
directivity?: number | "omni" | "unknown"; // Optional directivity pattern (numeric code or "omni")
}
export interface ITelemetry {
sequence: number;
analog: number[];
digital?: number;
}
export interface IDAO {
datum_id?: string; // Geodetic datum identifier (e.g., "W84" for WGS84)
resolution?: number; // DAO resolution (0-3)
latitude?: number; // Added latitude precision
longitude?: number; // Added longitude precision
}
// Position Report Payload
export interface PositionPayload {
type: 'position';
type:
| DataType.PositionNoTimestampNoMessaging
| DataType.PositionNoTimestampWithMessaging
| DataType.PositionWithTimestampNoMessaging
| DataType.PositionWithTimestampWithMessaging;
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
timestamp?: ITimestamp;
position: IPosition;
messaging: boolean; // Whether APRS messaging is enabled
messaging: boolean; // Whether APRS messaging is enabled
dao?: IDAO; // Optional DAO fields for added position precision
micE?: {
messageType?: string;
isStandard?: boolean;
};
sections?: PacketSegment[];
sections?: Segment[];
}
// Compressed Position Format
@@ -117,69 +182,80 @@ export interface CompressedPosition {
table: string;
code: string;
};
course?: number; // Degrees
speed?: number; // Knots
range?: number; // Miles
altitude?: number; // Feet
course?: number; // Degrees
speed?: number; // Knots
range?: number; // Miles
altitude?: number; // Feet
radioRange?: number; // Miles
compression: 'old' | 'current';
compression: "old" | "current";
}
// Mic-E Payload (compressed in destination address)
export interface MicEPayload {
type: 'mic-e';
type: DataType.MicE | DataType.MicEOld;
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
position: IPosition;
course?: number;
speed?: number;
altitude?: number;
messageType?: string; // Standard Mic-E message
telemetry?: number[]; // Optional telemetry channels
messageType?: string; // Standard Mic-E message
isStandard?: boolean; // Whether messageType is a standard Mic-E message
telemetry?: number[]; // Optional telemetry channels
status?: string;
}
export type MessageVariant = "message" | "bulletin";
// Message Payload
export interface MessagePayload {
type: 'message';
addressee: string; // 9 character padded callsign
text: string; // Message text
messageNumber?: string; // Message ID for acknowledgment
ack?: string; // Acknowledgment of message ID
reject?: string; // Rejection of message ID
type: DataType.Message;
variant: "message";
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
addressee: string; // 9 character padded callsign
text: string; // Message text
messageNumber?: string; // Message ID for acknowledgment
ack?: string; // Acknowledgment of message ID
reject?: string; // Rejection of message ID
}
// Bulletin/Announcement (variant of message)
export interface BulletinPayload {
type: 'bulletin';
bulletinId: string; // Bulletin identifier (BLN#)
type: DataType.Message;
variant: "bulletin";
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
bulletinId: string; // Bulletin identifier (BLN#)
text: string;
group?: string; // Optional group bulletin
group?: string; // Optional group bulletin
}
// Object Payload
export interface ObjectPayload {
type: 'object';
name: string; // 9 character object name
type: DataType.Object;
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
name: string; // 9 character object name
timestamp: ITimestamp;
alive: boolean; // True if object is active, false if killed
alive: boolean; // True if object is active, false if killed
position: IPosition;
dao?: IDAO; // Optional DAO fields for added position precision
course?: number;
speed?: number;
}
// Item Payload
export interface ItemPayload {
type: 'item';
name: string; // 3-9 character item name
alive: boolean; // True if item is active, false if killed
type: DataType.Item;
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
name: string; // 3-9 character item name
alive: boolean; // True if item is active, false if killed
position: IPosition;
dao?: IDAO; // Optional DAO fields for added position precision
}
// Status Payload
export interface StatusPayload {
type: 'status';
type: DataType.Status;
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
timestamp?: ITimestamp;
text: string;
maidenhead?: string; // Optional Maidenhead grid locator
dao?: IDAO; // Optional DAO fields for added position precision
symbol?: {
table: string;
code: string;
@@ -188,138 +264,165 @@ export interface StatusPayload {
// Query Payload
export interface QueryPayload {
type: 'query';
queryType: string; // e.g., 'APRSD', 'APRST', 'PING'
target?: string; // Target callsign or area
type: DataType.Query;
queryType: string; // e.g., 'APRSD', 'APRST', 'PING'
target?: string; // Target callsign or area
}
export type TelemetryVariant = "data" | "parameters" | "unit" | "coefficients" | "bitsense";
// Telemetry Data Payload
export interface TelemetryDataPayload {
type: 'telemetry-data';
type: DataType.TelemetryData;
variant: "data";
sequence: number;
analog: number[]; // Up to 5 analog channels
digital: number; // 8-bit digital value
analog: number[]; // Up to 5 analog channels
digital: number; // 8-bit digital value
}
// Telemetry Parameter Names
export interface TelemetryParameterPayload {
type: 'telemetry-parameters';
names: string[]; // Parameter names
type: DataType.TelemetryData;
variant: "parameters";
names: string[]; // Parameter names
}
// Telemetry Unit/Label
export interface TelemetryUnitPayload {
type: 'telemetry-units';
units: string[]; // Units for each parameter
type: DataType.TelemetryData;
variant: "unit";
units: string[]; // Units for each parameter
}
// Telemetry Coefficients
export interface TelemetryCoefficientsPayload {
type: 'telemetry-coefficients';
type: DataType.TelemetryData;
variant: "coefficients";
coefficients: {
a: number[]; // a coefficients
b: number[]; // b coefficients
c: number[]; // c coefficients
a: number[]; // a coefficients
b: number[]; // b coefficients
c: number[]; // c coefficients
};
}
// Telemetry Bit Sense/Project Name
export interface TelemetryBitSensePayload {
type: 'telemetry-bitsense';
sense: number; // 8-bit sense value
type: DataType.TelemetryData;
variant: "bitsense";
sense: number; // 8-bit sense value
projectName?: string;
}
// Weather Report Payload
export interface WeatherPayload {
type: 'weather';
timestamp?: ITimestamp;
position?: IPosition;
windDirection?: number; // Degrees
windSpeed?: number; // MPH
windGust?: number; // MPH
temperature?: number; // Fahrenheit
rainLastHour?: number; // Hundredths of inch
rainLast24Hours?: number; // Hundredths of inch
rainSinceMidnight?: number; // Hundredths of inch
humidity?: number; // Percent
pressure?: number; // Tenths of millibar
luminosity?: number; // Watts per square meter
snowfall?: number; // Inches
rawRain?: number; // Raw rain counter
software?: string; // Weather software type
weatherUnit?: string; // Weather station type
type: DataType.WeatherReportNoPosition;
timestamp?: ITimestamp;
position?: IPosition;
dao?: IDAO; // Optional DAO fields for added position precision
windDirection?: number; // Degrees
windSpeed?: number; // MPH
windGust?: number; // MPH
temperature?: number; // Fahrenheit
rainLastHour?: number; // Hundredths of inch
rainLast24Hours?: number; // Hundredths of inch
rainSinceMidnight?: number; // Hundredths of inch
humidity?: number; // Percent
pressure?: number; // Tenths of millibar
luminosity?: number; // Watts per square meter
snowfall?: number; // Inches
rawRain?: number; // Raw rain counter
software?: string; // Weather software type
weatherUnit?: string; // Weather station type
comment?: string; // Additional comment
}
// Raw GPS Payload (NMEA sentences)
export interface RawGPSPayload {
type: 'raw-gps';
sentence: string; // Raw NMEA sentence
type: DataType.RawGPS;
sentence: string; // Raw NMEA sentence
position?: IPosition; // Optional parsed position if available
}
// Station Capabilities Payload
export interface StationCapabilitiesPayload {
type: 'capabilities';
type: DataType.StationCapabilities;
capabilities: string[];
}
// User-Defined Payload
export interface UserDefinedPayload {
type: 'user-defined';
type: DataType.UserDefined;
userPacketType: string;
data: string;
}
// Third-Party Traffic Payload
export interface ThirdPartyPayload {
type: 'third-party';
header: string; // Source path of third-party packet
payload: string; // Nested APRS packet
type: DataType.ThirdParty;
frame?: IFrame; // Optional nested frame if payload contains another APRS frame
comment?: string; // Optional comment
}
// DF Report Payload
export interface DFReportPayload {
type: 'df-report';
timestamp?: ITimestamp;
position: IPosition;
course?: number;
bearing?: number; // Direction finding bearing
quality?: number; // Signal quality
strength?: number; // Signal strength
height?: number; // Antenna height
gain?: number; // Antenna gain
bearing?: number; // Direction finding bearing
quality?: number; // Signal quality
strength?: number; // Signal strength
height?: number; // Antenna height
gain?: number; // Antenna gain
directivity?: string; // Antenna directivity pattern
}
export interface BasePayload {
type: string;
type: DataType;
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
}
// Union type for all decoded payload types
export type Payload = BasePayload & (
| PositionPayload
| MicEPayload
| MessagePayload
| BulletinPayload
| ObjectPayload
| ItemPayload
| StatusPayload
| QueryPayload
| TelemetryDataPayload
| TelemetryParameterPayload
| TelemetryUnitPayload
| TelemetryCoefficientsPayload
| TelemetryBitSensePayload
| WeatherPayload
| RawGPSPayload
| StationCapabilitiesPayload
| UserDefinedPayload
| ThirdPartyPayload
| DFReportPayload
);
export type Payload = BasePayload &
(
| PositionPayload
| MicEPayload
| MessagePayload
| BulletinPayload
| ObjectPayload
| ItemPayload
| StatusPayload
| QueryPayload
| TelemetryDataPayload
| TelemetryParameterPayload
| TelemetryUnitPayload
| TelemetryCoefficientsPayload
| TelemetryBitSensePayload
| WeatherPayload
| RawGPSPayload
| StationCapabilitiesPayload
| UserDefinedPayload
| ThirdPartyPayload
);
// Extended Frame with decoded payload
export interface DecodedFrame extends IFrame {
decoded?: Payload;
structure?: PacketStructure; // Routing and other frame-level sections
decoded?: Payload;
structure?: Dissected; // Routing and other frame-level sections
}
// Extras is an internal helper type used during decoding to accumulate additional
// information that may not fit directly into the standard payload structure,
// such as comments, calculated fields, or other metadata that can be useful for
// applications consuming the decoded frames.
export interface Extras {
comment: string;
altitude?: number;
range?: number;
phg?: IPowerHeightGain;
dfs?: IDirectionFinding;
cse?: number;
spd?: number;
fields?: Field[];
telemetry?: ITelemetry;
dao?: IDAO; // Optional DAO fields for added position precision
}

View File

@@ -1,19 +1,14 @@
export {
Frame,
Address,
Timestamp,
} from "./frame";
export {
type IAddress,
type IFrame,
DataTypeIdentifier,
} from "./frame.types";
export { Frame, Address } from "./frame";
export { type IAddress, type IFrame, DataType as DataTypeIdentifier } from "./frame.types";
export {
DataType,
type ISymbol,
type IPosition,
type ITimestamp,
type IPowerHeightGain,
type IDirectionFinding,
type PositionPayload,
type CompressedPosition,
type MicEPayload,
@@ -36,9 +31,12 @@ export {
type DFReportPayload,
type BasePayload,
type Payload,
type DecodedFrame,
type DecodedFrame
} from "./frame.types";
export { Position } from "./position";
export { Timestamp } from "./timestamp";
export {
base91ToNumber,
knotsToKmh,
@@ -46,12 +44,8 @@ export {
feetToMeters,
metersToFeet,
celsiusToFahrenheit,
fahrenheitToCelsius,
fahrenheitToCelsius
} from "./parser";
export {
type PacketStructure,
type PacketSegment,
type PacketField,
type PacketFieldBit,
FieldType,
} from "./parser.types";
export { getDeviceID } from "./deviceid";
export type { DeviceID } from "./deviceid";

View File

@@ -22,7 +22,7 @@ export const base91ToNumber = (str: string): number => {
}
return value;
}
};
/* Conversions from Freedom Units to whatever the rest of the world uses and understands. */
@@ -38,7 +38,7 @@ const FAHRENHEIT_TO_CELSIUS_OFFSET = 32;
*/
export const knotsToKmh = (knots: number): number => {
return knots * KNOTS_TO_KMH;
}
};
/**
* Convert speed from kilometers per hour to knots.
@@ -48,7 +48,7 @@ export const knotsToKmh = (knots: number): number => {
*/
export const kmhToKnots = (kmh: number): number => {
return kmh / KNOTS_TO_KMH;
}
};
/**
* Convert altitude from feet to meters.
@@ -58,7 +58,16 @@ export const kmhToKnots = (kmh: number): number => {
*/
export const feetToMeters = (feet: number): number => {
return feet * FEET_TO_METERS;
}
};
/**
* Convert miles to meters.
* @param miles number of miles
* @returns meters
*/
export const milesToMeters = (miles: number): number => {
return miles * 1609.344;
};
/**
* Convert altitude from meters to feet.
@@ -68,7 +77,7 @@ export const feetToMeters = (feet: number): number => {
*/
export const metersToFeet = (meters: number): number => {
return meters / FEET_TO_METERS;
}
};
/**
* Convert temperature from Celsius to Fahrenheit.
@@ -77,8 +86,8 @@ export const metersToFeet = (meters: number): number => {
* @returns equivalent temperature in Fahrenheit
*/
export const celsiusToFahrenheit = (celsius: number): number => {
return (celsius * 9/5) + FAHRENHEIT_TO_CELSIUS_OFFSET;
}
return (celsius * 9) / 5 + FAHRENHEIT_TO_CELSIUS_OFFSET;
};
/**
* Convert temperature from Fahrenheit to Celsius.
@@ -87,5 +96,5 @@ export const celsiusToFahrenheit = (celsius: number): number => {
* @returns equivalent temperature in Celsius
*/
export const fahrenheitToCelsius = (fahrenheit: number): number => {
return (fahrenheit - FAHRENHEIT_TO_CELSIUS_OFFSET) * 5/9;
}
return ((fahrenheit - FAHRENHEIT_TO_CELSIUS_OFFSET) * 5) / 9;
};

View File

@@ -1,37 +0,0 @@
export enum FieldType {
BITS = 0,
UINT8 = 1,
UINT16_LE = 2,
UINT16_BE = 3,
UINT32_LE = 4,
UINT32_BE = 5,
BYTES = 6, // 8-bits per value
WORDS = 7, // 16-bits per value
DWORDS = 8, // 32-bits per value
QWORDS = 9, // 64-bits per value
STRING = 10,
C_STRING = 11, // Null-terminated string
CHAR = 12, // Single ASCII character
}
// Interface for the parsed packet segments, used for debugging and testing.
export type PacketStructure = PacketSegment[];
export interface PacketSegment {
name: string;
data: Uint8Array;
fields: PacketField[];
}
export interface PacketField {
type: FieldType;
size: number; // Size in bytes
name?: string;
bits?: PacketFieldBit[]; // Only for bit fields in FieldType.BITS
value?: any; // Optional decoded value
}
export interface PacketFieldBit {
name: string;
size: number; // Size in bits
}

View File

@@ -0,0 +1,71 @@
import { FieldType, type Segment } from "@hamradio/packet";
import { DataType, type Payload, type StationCapabilitiesPayload } from "./frame.types";
export const decodeCapabilitiesPayload = (
raw: string,
withStructure: boolean = false
): {
payload: Payload | null;
segment?: Segment[];
} => {
try {
if (raw.length < 2) return { payload: null };
// Extract the text after the '<' identifier
let rest = raw.substring(1).trim();
// Some implementations include a closing '>' or other trailing chars; strip common wrappers
if (rest.endsWith(">")) rest = rest.slice(0, -1).trim();
// Split capabilities by commas, semicolons or whitespace
const tokens = rest
.split(/[,;\s]+/)
.map((t) => t.trim())
.filter(Boolean);
const payload: StationCapabilitiesPayload = {
type: DataType.StationCapabilities,
capabilities: tokens
} as const;
if (withStructure) {
const segments: Segment[] = [];
segments.push({
name: "capabilities",
data: new TextEncoder().encode(rest).buffer,
isString: true,
fields: [
{
type: FieldType.STRING,
name: "capabilities",
length: rest.length
}
]
});
for (const cap of tokens) {
segments.push({
name: "capability",
data: new TextEncoder().encode(cap).buffer,
isString: true,
fields: [
{
type: FieldType.STRING,
name: "capability",
length: cap.length
}
]
});
}
return { payload, segment: segments };
}
return { payload };
} catch {
return { payload: null };
}
};
export default decodeCapabilitiesPayload;

662
src/payload.extras.ts Normal file
View File

@@ -0,0 +1,662 @@
import { type Field, FieldType } from "@hamradio/packet";
import type { Extras, IDAO, ITelemetry, Payload } from "./frame.types";
import { base91ToNumber, feetToMeters, knotsToKmh, milesToMeters } from "./parser";
/**
* Decodes structured extras from an APRS comment string, extracting known tokens
* for altitude, range, PHG, DFS, course/speed, and embedded telemetry, and
* returns an object with the extracted values and a cleaned comment string with
* the tokens removed.
*
* If withStructure is true, also returns an array of fields representing the
* structure of the extras for use in structured packet parsing.
*
* @param comment The APRS comment string to decode.
* @param withStructure Whether to include structured fields in the result.
* @returns An object containing the decoded extras and the cleaned comment string.
*/
export const decodeCommentExtras = (comment: string, withStructure: boolean = false): Extras => {
if (!comment || comment.length === 0) return { comment };
const extras: Partial<Extras> = {};
const fields: FieldWithOffset[] = [];
const ignore: {
offset: number;
length: number;
}[] = []; // Tracks offsets of tokens to ignore when reconstructing comment
// eslint-disable-next-line no-useless-assignment
let match: RegExpMatchArray | null = null;
let offset = 0; // Tracks the current offset in the original comment string for field positioning
let cutoff = comment.length; // Tracks the offset of the altitude token for relative positioning of subsequent fields
// Process the DAO (precision and datum option); if it is present it marks the
// cutoff for subsequent fields since it is typically at the end of the comment
// and relative to the position.
if ((match = comment.match(/!(...)!/))) {
const dao = decodeDAO(match[1]);
extras.dao = dao;
// Set cutoff to DAO token for subsequent fields to be relative to it (since it is typically at the end of the comment).
cutoff = comment.indexOf(match[0]);
ignore.push({ offset: cutoff, length: match[0].length });
if (withStructure) {
fields.push(
{
type: FieldType.CHAR,
name: "DAO marker",
length: 1,
value: "!",
offset: cutoff
},
{
type: FieldType.STRING,
name: "DAO data",
length: 3,
value: match[1],
offset: cutoff + 1
},
{
type: FieldType.CHAR,
name: "DAO end marker",
length: 1,
value: "!",
offset: cutoff + 4
}
);
}
// Mask DAO token in comment for further parsing
comment = comment.replace(match[0], "X".repeat(match[0].length));
}
// Process the altitude marker, because it may appear anywhere in the comment
// and we want to extract it and its value before processing other tokens
// that may be present.
//
// /A=NNNNNN -> altitude in feet (6 digits)
// /A=-NNNNN -> altitude in feet with leading minus for negative altitudes (5 digits)
if ((match = comment.substring(0, cutoff).match(/\/A=(-\d{5}|\d{6})/))) {
const altitude = feetToMeters(parseInt(match[1], 10)); // feet to meters
if (!isNaN(altitude)) {
extras.altitude = altitude;
// Keep track of where the altitude token appears in the comment for structure purposes.
const altitudeOffset = comment.indexOf(match[0]);
ignore.push({ offset: altitudeOffset, length: match[0].length });
if (altitudeOffset === 0) {
offset = match[0].length; // Set offset to altitude token for subsequent fields to be relative to it (since we will remove it from the comment)
} else if (cutoff > altitudeOffset) {
cutoff = altitudeOffset; // Set cutoff for subsequent fields to be relative to altitude token if it appears before them in the comment
}
if (withStructure) {
fields.push(
{
type: FieldType.STRING,
name: "altitude marker",
data: new TextEncoder().encode("/A=").buffer,
value: "/A=",
length: 3,
offset: altitudeOffset
},
{
type: FieldType.STRING,
name: "altitude",
data: new TextEncoder().encode(match[1]).buffer,
value: altitude.toFixed(1) + "m",
length: 6,
offset: altitudeOffset + 3
}
);
}
// Mask in comment (for debugging)
comment = comment.replace(match[0], "X".repeat(match[0].length)); // Remove altitude token from comment for further parsing
}
}
// Next we process any inline telemetry comment, which is delimited by |...| and can appear anywhere in the comment. We want to extract it before processing other tokens that may be present.
if ((match = comment.substring(offset, cutoff).match(/\|([^|]+)\|/))) {
try {
const telemetry = decodeTelemetry(match[1]);
extras.telemetry = telemetry;
const telemetryOffset = comment.indexOf(match[0]);
ignore.push({ offset: telemetryOffset + offset, length: match[0].length });
if (telemetryOffset == 0) {
offset += match[0].length; // Set offset to telemetry token for subsequent fields to be relative to it (since we will remove it from the comment)
} else if (cutoff > telemetryOffset) {
cutoff = telemetryOffset; // Set cutoff for subsequent fields to be relative to telemetry token if it appears before them in the comment
}
if (withStructure) {
fields.push(
{
type: FieldType.CHAR,
name: "telemetry start",
length: 1,
value: "|",
offset: telemetryOffset
},
{
type: FieldType.STRING,
name: "sequence",
length: 2,
value: telemetry.sequence.toString(),
offset: telemetryOffset + 1
},
...telemetry.analog.map((a, i) => ({
type: FieldType.STRING,
name: `analog${i + 1}`,
length: 2,
value: a.toString(),
offset: telemetryOffset + 3 + i * 2
})),
...(telemetry.digital !== undefined
? [
{
type: FieldType.STRING,
name: "digital",
length: 2,
value: telemetry.digital.toString(),
offset: telemetryOffset + 3 + telemetry.analog.length * 2
}
]
: []),
{
type: FieldType.CHAR,
name: "telemetry end",
length: 1,
value: "|",
offset: telemetryOffset + match[1].length + 1
}
);
}
// Mask telemetry token in comment for further parsing
comment = comment.replace(match[0], "X".repeat(match[0].length));
} catch {
// Invalid telemetry format, ignore
}
}
// Process successive 7-byte data extensions at the start of the comment up to the first
// non-extension token, which may be altitude, telemetry, or other tokens. These
// extensions can appear in any order and we want to extract them all.
let ext = comment.substring(offset, cutoff);
while (ext.length >= 7) {
// RNGrrrr -> pre-calculated range in miles (4 digits)
if ((match = ext.match(/^RNG(\d{4})/))) {
const r = match[1];
extras.range = milesToMeters(parseInt(r, 10)) / 1000.0; // Convert to kilometers
if (withStructure) {
fields.push(
{
type: FieldType.STRING,
name: "range marker",
value: "RNG",
length: 3,
offset: offset
},
{
type: FieldType.STRING,
name: "range (rrrr)",
length: 4,
value: extras.range.toFixed(1) + "km",
offset: offset + 3
}
);
}
// remove range token from ext and advance ext for further parsing
ignore.push({ offset, length: 7 });
offset += 7;
ext = ext.substring(7);
continue;
}
// PHGphgd
//if (!extras.phg && ext.startsWith("PHG")) {
if (!extras.phg && (match = ext.match(/^PHG([0-9 ])([0-9 ])([0-9 ])([0-9 ])/))) {
// PHGphgd: p = power (0-9 or space), h = height (0-9 or space), g = gain (0-9 or space), d = directivity (0-9 or space)
const p = match[1];
const h = match[2];
const g = match[3];
const d = match[4];
const pNum = parseInt(p, 10);
const powerWatts = Number.isNaN(pNum) ? undefined : pNum * pNum;
const hIndex = h.charCodeAt(0) - 48;
const heightFeet = 10 * Math.pow(2, hIndex);
const heightMeters = feetToMeters(heightFeet);
const gNum = parseInt(g, 10);
const gainDbi = Number.isNaN(gNum) ? undefined : gNum;
const dNum = parseInt(d, 10);
let directivity: number | "omni" | "unknown" | undefined;
if (Number.isNaN(dNum)) {
directivity = undefined;
} else if (dNum === 0) {
directivity = "omni";
} else if (dNum >= 1 && dNum <= 8) {
directivity = dNum * 45;
} else if (dNum === 9) {
directivity = "unknown";
}
extras.phg = {
power: powerWatts,
height: heightMeters,
gain: gainDbi,
directivity
};
if (withStructure) {
fields.push(
{
type: FieldType.STRING,
name: "PHG marker",
length: 3,
value: "PHG",
offset: offset
},
{
type: FieldType.STRING,
name: "power (p)",
length: 1,
value: powerWatts !== undefined ? powerWatts.toString() + "W" : undefined,
offset: offset + 3
},
{
type: FieldType.STRING,
name: "height (h)",
length: 1,
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined,
offset: offset + 4
},
{
type: FieldType.STRING,
name: "gain (g)",
length: 1,
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined,
offset: offset + 5
},
{
type: FieldType.STRING,
name: "directivity (d)",
length: 1,
value:
directivity !== undefined
? typeof directivity === "number"
? directivity.toString() + "°"
: directivity
: undefined,
offset: offset + 6
}
);
}
// remove PHG token from ext and advance ext for further parsing
ignore.push({ offset, length: 7 });
offset += 7;
ext = ext.substring(7).trimStart();
continue;
}
// DFSshgd
if (ext.startsWith("DFS")) {
// DFSshgd: s = strength (0-9), h = height (0-9), g = gain (0-9), d = directivity (0-9)
const s = ext.charAt(3);
const h = ext.charAt(4);
const g = ext.charAt(5);
const d = ext.charAt(6);
const sNum = parseInt(s, 10);
const hNum = parseInt(h, 10);
const gNum = parseInt(g, 10);
const dNum = parseInt(d, 10);
// Strength: s = 0-9, direct value
const strength = Number.isNaN(sNum) ? undefined : sNum;
// Height: h = 0-9, height = 10 * 2^h feet (spec: h is exponent)
const heightFeet = Number.isNaN(hNum) ? undefined : 10 * Math.pow(2, hNum);
const heightMeters = heightFeet !== undefined ? feetToMeters(heightFeet) : undefined;
// Gain: g = 0-9, gain in dB
const gainDbi = Number.isNaN(gNum) ? undefined : gNum;
// Directivity: d = 0-9, 0 = omni, 1-8 = d*45°, 9 = unknown
let directivity: number | "omni" | "unknown" | undefined;
if (Number.isNaN(dNum)) {
directivity = undefined;
} else if (dNum === 0) {
directivity = "omni";
} else if (dNum >= 1 && dNum <= 8) {
directivity = dNum * 45;
} else if (dNum === 9) {
directivity = "unknown";
}
extras.dfs = {
strength,
height: heightMeters,
gain: gainDbi,
directivity
};
if (withStructure) {
fields.push(
{
type: FieldType.STRING,
name: "DFS marker",
length: 3,
value: "DFS",
offset: offset
},
{
type: FieldType.STRING,
name: "strength (s)",
length: 1,
value: strength !== undefined ? strength.toString() : undefined,
offset: offset + 3
},
{
type: FieldType.STRING,
name: "height (h)",
length: 1,
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined,
offset: offset + 4
},
{
type: FieldType.STRING,
name: "gain (g)",
length: 1,
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined,
offset: offset + 5
},
{
type: FieldType.STRING,
name: "directivity (d)",
length: 1,
value:
directivity !== undefined
? typeof directivity === "number"
? directivity.toString() + "°"
: directivity
: undefined,
offset: offset + 6
}
);
}
// remove DFS token from ext and advance ext for further parsing
ignore.push({ offset, length: 7 });
offset += 7;
ext = ext.substring(7).trimStart();
continue;
}
// Course/Speed DDD/SSS (7 bytes: 3 digits / 3 digits)
if (extras.cse === undefined && /^\d{3}\/\d{3}/.test(ext)) {
const courseStr = ext.substring(0, 3);
const speedStr = ext.substring(4, 7);
extras.cse = parseInt(courseStr, 10);
extras.spd = knotsToKmh(parseInt(speedStr, 10));
if (withStructure) {
fields.push(
{ type: FieldType.STRING, name: "course", length: 3, value: extras.cse.toString() + "°", offset: offset },
{ type: FieldType.CHAR, name: "CSE marker", length: 1, value: "/", offset: offset + 3 },
{
type: FieldType.STRING,
name: "speed",
length: 3,
value: extras.spd.toString() + " km/h",
offset: offset + 4
}
);
}
// remove course/speed token from comment and advance ext for further parsing
ignore.push({ offset, length: 7 });
offset += 7;
ext = ext.substring(7).trimStart();
// If there is an 8-byte DF/NRQ following (leading '/'), parse that too
if (ext.length >= 8 && ext.charAt(0) === "/") {
const dfExt = ext.substring(0, 8); // e.g. /270/729
const m = dfExt.match(/\/(\d{3})\/(\d{3})/);
if (m) {
const dfBearing = parseInt(m[1], 10);
const dfStrength = parseInt(m[2], 10);
if (extras.dfs === undefined) {
extras.dfs = {};
}
extras.dfs.bearing = dfBearing;
extras.dfs.strength = dfStrength;
if (withStructure) {
fields.push(
{ type: FieldType.STRING, name: "DFS marker", length: 1, value: "/", offset: offset },
{
type: FieldType.STRING,
name: "bearing",
length: 3,
value: dfBearing.toString() + "°",
offset: offset + 1
},
{ type: FieldType.CHAR, name: "separator", length: 1, value: "/", offset: offset + 4 },
{ type: FieldType.STRING, name: "strength", length: 3, value: dfStrength.toString(), offset: offset + 5 }
);
}
// remove DF token from ext and advance ext for further parsing
ignore.push({ offset, length: 8 });
offset += 8;
ext = ext.substring(8).trimStart();
continue;
}
}
continue;
}
// No recognized 7+-byte extension at start
break;
}
// Any tokens we marked for ignoring should be removed from the comment when reconstructing the cleaned comment string, since they have been extracted as structured fields. We will use the ignore offsets to skip over these tokens when reconstructing the comment.
ignore.sort((a, b) => a.offset - b.offset);
offset = 0;
ignore.forEach((token) => {
if (token.offset - offset > 0) {
fields.push({
type: FieldType.STRING,
name: "comment",
offset: offset,
length: token.offset - offset
});
}
offset = token.offset + token.length;
});
if (offset < comment.length) {
fields.push({
type: FieldType.STRING,
name: "comment",
offset,
length: comment.length - offset
});
}
// Aggregate the comment fragments into a single cleaned comment string with the recognized tokens removed.
fields.sort((a, b) => a.offset - b.offset); // Ensure fields are in order of appearance in the original comment
extras.comment = fields
.map((field) => {
if (field.name !== "comment" || field.offset === undefined || field.length === undefined) {
return ""; // Remove recognized tokens from comment
}
return comment.substring(field.offset, field.offset + field.length);
})
.join("")
.trim();
if (withStructure) {
extras.fields = fields;
}
return extras as Extras;
};
interface FieldWithOffset extends Field {
offset: number;
}
export const attachExtras = (payload: Payload, extras: Extras): void => {
if ("position" in payload && payload.position) {
if (extras.dao !== undefined) {
payload.position.dao = extras.dao;
// Check N/S and E/W and apply DAO corrections with correct sign based on hemisphere.
if (payload.position.latitude !== undefined) {
if (payload.position.latitude < 0) {
payload.position.latitude -= extras.dao.latitude || 0;
} else {
payload.position.latitude += extras.dao.latitude || 0;
}
}
if (payload.position.longitude !== undefined) {
if (payload.position.longitude < 0) {
payload.position.longitude -= extras.dao.longitude || 0;
} else {
payload.position.longitude += extras.dao.longitude || 0;
}
}
}
if (extras.altitude !== undefined) {
payload.position.altitude = extras.altitude;
}
if (extras.range !== undefined) {
payload.position.range = extras.range;
}
if (extras.phg !== undefined) {
payload.position.phg = extras.phg;
}
if (extras.dfs !== undefined) {
payload.position.dfs = extras.dfs;
}
if (extras.cse !== undefined && payload.position.course === undefined) {
payload.position.course = extras.cse;
}
if (extras.spd !== undefined && payload.position.speed === undefined) {
payload.position.speed = extras.spd;
}
}
if ("dao" in payload && payload.dao === undefined) {
payload.dao = extras.dao;
}
if ("altitude" in payload && payload.altitude === undefined && extras.altitude !== undefined) {
payload.altitude = extras.altitude;
}
if ("range" in payload && payload.range === undefined && extras.range !== undefined) {
payload.range = extras.range;
}
if ("phg" in payload && payload.phg === undefined && extras.phg !== undefined) {
payload.phg = extras.phg;
}
if ("dfs" in payload && payload.dfs === undefined && extras.dfs !== undefined) {
payload.dfs = extras.dfs;
}
if ("course" in payload && payload.course === undefined && extras.cse !== undefined) {
payload.course = extras.cse;
}
if ("speed" in payload && payload.speed === undefined && extras.spd !== undefined) {
payload.speed = extras.spd;
}
};
export const decodeDAO = (ext: string): IDAO | undefined => {
if (ext.length !== 3) {
return undefined;
}
const dao: IDAO = { datum_id: "?" };
// eslint-disable-next-line no-useless-assignment
let match: RegExpMatchArray | null = null;
if ((match = /^([A-Z])(\d)(\d)$/.exec(ext))) {
// Human-radable datum with explicit resolution: e.g. W84 with 0-3 resolution (W84 is WGS84, R22 is NAD27, etc.)
dao.datum_id = match[1];
dao.resolution = getDAOResolution(3);
dao.latitude = (parseInt(match[2]) * 0.01) / 60; // Convert to minutes (since DAO resolution is in minutes)
dao.longitude = (parseInt(match[3]) * 0.01) / 60; // Convert to minutes (since DAO resolution is in minutes)
return dao;
} else if ((match = /^([a-z])([\x21-\x7b])([\x21-\x7b])$/.exec(ext))) {
// Base91-encoded latitude and longitude with implicit datum (WGS84 assumed); resolution is determined by the range of the Base91 characters (0-3)
dao.datum_id = match[1].toUpperCase();
dao.resolution = getDAOResolution(4);
dao.latitude = (base91ToNumber(match[2]) * 0.01) / 60; // Convert from Base91 to degrees, then to minutes (since DAO resolution is in minutes)
dao.longitude = (base91ToNumber(match[3]) * 0.01) / 60; // Convert from Base91 to degrees, then to minutes (since DAO resolution is in minutes)
return dao;
} else if ((match = /^([\x21-\x7b]) {2}$/.exec(ext))) {
dao.datum_id = match[1];
if (/^[a-z]$/.test(dao.datum_id)) {
dao.datum_id = dao.datum_id.toUpperCase();
}
return dao; // No resolution, just datum
}
return undefined; // Invalid DAO format
};
const getDAOResolution = (n: number): number | undefined => {
return knotsToKmh((n <= -2 ? 600 : 1000) * 10 ** (-1 * n));
};
/**
* Decodes a Base91 Telemetry extension string (delimited by '|') into its components.
*
* @param ext The string between the '|' delimiters (e.g. 'ss11', 'ss112233', 'ss1122334455!"')
* @returns An object with sequence, analog (array), and optional digital (number)
*/
export const decodeTelemetry = (ext: string): ITelemetry => {
if (!ext || ext.length < 4) throw new Error("Telemetry extension too short");
// Must be even length, at least 4 (2 for seq, 2 for ch1)
if (ext.length % 2 !== 0) throw new Error("Telemetry extension must have even length");
// Sequence counter is always first 2 chars
const sequence = base91ToNumber(ext.slice(0, 2));
const analog: number[] = [];
let i = 2;
// If there are more than 12 chars, last pair is digital
let digital: number | undefined = undefined;
const analogPairs = Math.min(Math.floor((ext.length - 2) / 2), 5);
for (let j = 0; j < analogPairs; j++, i += 2) {
analog.push(base91ToNumber(ext.slice(i, i + 2)));
}
// If there are 2 chars left after 5 analogs, it's digital
if (ext.length === 14) {
digital = base91ToNumber(ext.slice(12, 14));
}
return {
sequence,
analog,
digital
};
};

149
src/payload.item.ts Normal file
View File

@@ -0,0 +1,149 @@
import { FieldType, type Segment } from "@hamradio/packet";
import { DO_NOT_ARCHIVE_MARKER, DataType, type IPosition, type ItemPayload, type Payload } from "./frame.types";
import { attachExtras, decodeCommentExtras } from "./payload.extras";
import { isCompressedPosition, parseCompressedPosition, parseUncompressedPosition } from "./payload.position";
import Timestamp from "./timestamp";
export const decodeItemPayload = (
raw: string,
withStructure: boolean = false
): {
payload: Payload | null;
segment?: Segment[];
} => {
// Item format is similar to Object but name may be 3-9 chars (stored in a 9-char field)
// Example: )NNN... where ) is data type, next 9 chars are name, then state char, then timestamp, then position
if (raw.length < 12) return { payload: null }; // minimal: 1 + 3 + 1 + 7
let offset = 1; // skip data type identifier ')'
const segment: Segment[] = withStructure ? [] : [];
// Read 9-char name field (pad/truncate as present)
const rawName = raw.substring(offset, offset + 9);
const name = rawName.trimEnd();
if (withStructure) {
segment.push({
name: "item name",
data: new TextEncoder().encode(rawName).buffer,
isString: true,
fields: [{ type: FieldType.STRING, name: "name", length: 9 }]
});
}
offset += 9;
// State character: '*' = alive, '_' = killed
const stateChar = raw.charAt(offset);
if (stateChar !== "*" && stateChar !== "_") {
return { payload: null };
}
const alive = stateChar === "*";
if (withStructure) {
segment.push({
name: "item state",
data: new TextEncoder().encode(stateChar).buffer,
isString: true,
fields: [
{
type: FieldType.CHAR,
name: "State (* alive, _ killed)",
length: 1
}
]
});
}
offset += 1;
// Timestamp (7 chars)
const timeStr = raw.substring(offset, offset + 7);
const { timestamp, segment: timestampSection } = Timestamp.fromString(timeStr.substring(offset), withStructure);
if (!timestamp) return { payload: null };
if (timestampSection) segment.push(timestampSection);
offset += 7;
const isCompressed = isCompressedPosition(raw.substring(offset));
// eslint-disable-next-line no-useless-assignment
let position: IPosition | null = null;
// eslint-disable-next-line no-useless-assignment
let consumed = 0;
if (isCompressed) {
const { position: compressed, segment: compressedSection } = parseCompressedPosition(
raw.substring(offset),
withStructure
);
if (!compressed) return { payload: null };
position = {
latitude: compressed.latitude,
longitude: compressed.longitude,
symbol: compressed.symbol,
altitude: compressed.altitude
};
consumed = 13;
if (compressedSection) segment.push(compressedSection);
} else {
const { position: uncompressed, segment: uncompressedSection } = parseUncompressedPosition(
raw.substring(offset),
withStructure
);
if (!uncompressed) return { payload: null };
position = {
latitude: uncompressed.latitude,
longitude: uncompressed.longitude,
symbol: uncompressed.symbol,
ambiguity: uncompressed.ambiguity
};
consumed = 19;
if (uncompressedSection) segment.push(uncompressedSection);
}
offset += consumed;
const remainder = raw.substring(offset);
const doNotArchive = remainder.includes(DO_NOT_ARCHIVE_MARKER);
let comment = remainder;
const extras = decodeCommentExtras(comment, withStructure);
comment = extras.comment;
if (comment) {
position.comment = comment;
if (withStructure) {
segment.push({
name: "comment",
data: new TextEncoder().encode(remainder).buffer,
isString: true,
fields: extras.fields || []
});
}
} else if (withStructure && extras.fields) {
// No free-text comment, but extras fields exist: emit comment-only segment
segment.push({
name: "comment",
data: new TextEncoder().encode(remainder).buffer,
isString: true,
fields: extras.fields || []
});
}
const payload: ItemPayload = {
type: DataType.Item,
doNotArchive,
name,
alive,
position
};
attachExtras(payload, extras);
if (withStructure) {
return { payload, segment };
}
return { payload };
};
export default decodeItemPayload;

94
src/payload.message.ts Normal file
View File

@@ -0,0 +1,94 @@
import { FieldType, type Segment } from "@hamradio/packet";
import { DO_NOT_ARCHIVE_MARKER, DataType, type MessagePayload, type Payload } from "./frame.types";
export const decodeMessagePayload = (
rawPayload: string,
withStructure: boolean = false
): {
payload: Payload | null;
segment?: Segment[];
} => {
// Message format: :AAAAAAAAA[ ]:message text
// where AAAAAAAAA is a 9-character recipient field (padded with spaces)
if (rawPayload.length < 2) return { payload: null };
let offset = 1; // skip ':' data type
const segments: Segment[] = withStructure ? [] : [];
// Attempt to read a 9-char recipient field if present
let recipient = "";
if (rawPayload.length >= offset + 1) {
// Try to read up to 9 chars for recipient, but stop early if a ':' separator appears
const look = rawPayload.substring(offset, Math.min(offset + 9, rawPayload.length));
const sepIdx = look.indexOf(":");
let raw = look;
if (sepIdx !== -1) {
raw = look.substring(0, sepIdx);
} else if (look.length < 9 && rawPayload.length >= offset + 9) {
// pad to full 9 chars if possible
raw = rawPayload.substring(offset, offset + 9);
} else if (look.length === 9) {
raw = look;
}
recipient = raw.trimEnd();
if (withStructure) {
segments.push({
name: "recipient",
data: new TextEncoder().encode(raw).buffer,
isString: true,
fields: [{ type: FieldType.STRING, name: "to", length: 9 }]
});
}
// Advance offset past the raw we consumed
offset += raw.length;
// If there was a ':' immediately after the consumed raw, skip it as separator
if (rawPayload.charAt(offset) === ":") {
offset += 1;
} else if (sepIdx !== -1) {
// Shouldn't normally happen, but ensure we advance past separator
offset += 1;
}
}
// After recipient there is typically a space and a colon separator before the text
// Find the first ':' after the recipient (it separates the address field from the text)
let textStart = rawPayload.indexOf(":", offset);
if (textStart === -1) {
// No explicit separator; skip any spaces and take remainder as text
while (rawPayload.charAt(offset) === " " && offset < rawPayload.length) offset += 1;
textStart = offset - 1;
}
let text = "";
if (textStart >= 0 && textStart + 1 <= rawPayload.length) {
text = rawPayload.substring(textStart + 1);
}
const doNotArchive = text.includes(DO_NOT_ARCHIVE_MARKER);
const payload: MessagePayload = {
type: DataType.Message,
variant: "message",
doNotArchive,
addressee: recipient,
text
};
if (withStructure) {
// Emit text section
segments.push({
name: "text",
data: new TextEncoder().encode(text).buffer,
isString: true,
fields: [{ type: FieldType.STRING, name: "text", length: text.length }]
});
return { payload, segment: segments };
}
return { payload };
};
export default decodeMessagePayload;

300
src/payload.mice.ts Normal file
View File

@@ -0,0 +1,300 @@
import { FieldType, type Segment } from "@hamradio/packet";
import { base91ToNumber, knotsToKmh } from ".";
import { DO_NOT_ARCHIVE_MARKER, DataType, type IAddress, MicEPayload, type Payload } from "./frame.types";
import { attachExtras, decodeCommentExtras } from "./payload.extras";
export const decodeMicEPayload = (
destination: IAddress,
raw: string,
withStructure: boolean = false
): {
payload: Payload | null;
segment?: Segment[];
} => {
try {
// Mic-E encodes position in both destination address and information field
const dest = destination.call;
if (dest.length < 6) return { payload: null };
if (raw.length < 9) return { payload: null }; // Need at least data type + 8 bytes
const segments: Segment[] = withStructure ? [] : [];
// Decode latitude from destination address (6 characters)
const latResult = decodeMicELatitude(dest);
if (!latResult) return { payload: null };
const { latitude, messageType, longitudeOffset, isWest, isStandard } = latResult;
if (withStructure) {
segments.push({
name: "mic-E destination",
data: new TextEncoder().encode(dest).buffer,
isString: true,
fields: [
{
type: FieldType.STRING,
name: "destination",
length: dest.length
}
]
});
}
// Parse information field (skip data type identifier at position 0)
let offset = 1;
// Longitude: 3 bytes (degrees, minutes, hundredths)
const lonDegRaw = raw.charCodeAt(offset) - 28;
const lonMinRaw = raw.charCodeAt(offset + 1) - 28;
const lonHunRaw = raw.charCodeAt(offset + 2) - 28;
offset += 3;
// Apply longitude offset and hemisphere
let lonDeg = lonDegRaw;
if (longitudeOffset) {
lonDeg += 100;
}
if (lonDeg >= 180 && lonDeg <= 189) {
lonDeg -= 80;
} else if (lonDeg >= 190 && lonDeg <= 199) {
lonDeg -= 190;
}
let longitude = lonDeg + lonMinRaw / 60.0 + lonHunRaw / 6000.0;
if (isWest) {
longitude = -longitude;
}
// Speed and course: 3 bytes
const sp = raw.charCodeAt(offset) - 28;
const dc = raw.charCodeAt(offset + 1) - 28;
const se = raw.charCodeAt(offset + 2) - 28;
offset += 3;
let speed = sp * 10 + Math.floor(dc / 10); // Speed in knots
let course = (dc % 10) * 100 + se; // Course in degrees
if (course >= 400) course -= 400;
if (speed >= 800) speed -= 800;
// Convert speed from knots to km/h
const speedKmh = knotsToKmh(speed);
// Symbol code and table
if (raw.length < offset + 2) return { payload: null };
const symbolCode = raw.charAt(offset);
const symbolTable = raw.charAt(offset + 1);
offset += 2;
// Parse remaining data (altitude, comment, telemetry)
const remaining = raw.substring(offset);
const doNotArchive = remaining.includes(DO_NOT_ARCHIVE_MARKER);
let altitude: number | undefined = undefined;
let comment = remaining;
// Check for altitude in old format
if (comment.length >= 4 && comment.charAt(3) === "}") {
try {
const altBase91 = comment.substring(0, 3);
altitude = base91ToNumber(altBase91) - 10000; // Relative to 10km below mean sea level
comment = comment.substring(4); // Remove altitude token from comment
} catch {
// Ignore altitude parsing errors
}
}
// Parse RNG/PHG tokens from comment (defer attaching to result until created)
const remainder = comment; // Use the remaining comment text for parsing extras
const extras = decodeCommentExtras(remainder, withStructure);
comment = extras.comment;
let payloadType: DataType.MicE | DataType.MicEOld;
switch (raw.charAt(0)) {
case "`":
payloadType = DataType.MicE;
break;
case "'":
payloadType = DataType.MicEOld;
break;
default:
return { payload: null };
}
const result: MicEPayload = {
type: payloadType,
doNotArchive,
position: {
latitude,
longitude,
symbol: {
table: symbolTable,
code: symbolCode
}
},
messageType,
isStandard
};
if (speed > 0) {
result.position.speed = speedKmh;
}
if (course > 0 && course < 360) {
result.position.course = course;
}
if (altitude !== undefined) {
result.position.altitude = altitude;
}
if (comment) {
result.position.comment = comment;
}
// Attach parsed extras if present
attachExtras(result, extras);
if (withStructure) {
// Information field section (bytes after data type up to comment)
const infoData = raw.substring(1, offset);
segments.push({
name: "mic-E info",
data: new TextEncoder().encode(infoData).buffer,
isString: true,
fields: [
{ type: FieldType.CHAR, name: "longitude deg", length: 1 },
{ type: FieldType.CHAR, name: "longitude min", length: 1 },
{ type: FieldType.CHAR, name: "longitude hundredths", length: 1 },
{ type: FieldType.CHAR, name: "speed byte", length: 1 },
{ type: FieldType.CHAR, name: "course byte 1", length: 1 },
{ type: FieldType.CHAR, name: "course byte 2", length: 1 },
{ type: FieldType.CHAR, name: "symbol code", length: 1 },
{ type: FieldType.CHAR, name: "symbol table", length: 1 }
]
});
if (comment && comment.length > 0) {
segments.push({
name: "comment",
data: new TextEncoder().encode(remainder).buffer,
isString: true,
fields: extras.fields || []
});
} else if (extras.fields) {
segments.push({
name: "comment",
data: new TextEncoder().encode(remainder).buffer,
isString: true,
fields: extras.fields
});
}
return { payload: result, segment: segments };
}
return { payload: result };
} catch {
return { payload: null };
}
};
const decodeMicELatitude = (
dest: string
): {
latitude: number;
messageType: string;
longitudeOffset: boolean;
isWest: boolean;
isStandard: boolean;
} | null => {
if (dest.length < 6) return null;
// Each destination character encodes a latitude digit and message bits
const digits: number[] = [];
const messageBits: number[] = [];
for (let i = 0; i < 6; i++) {
const code = dest.charCodeAt(i);
let digit: number;
let msgBit: number;
if (code >= 48 && code <= 57) {
// '0'-'9'
digit = code - 48;
msgBit = 0;
} else if (code >= 65 && code <= 74) {
// 'A'-'J' (A=0, B=1, ... J=9)
digit = code - 65;
msgBit = 1;
} else if (code === 75) {
// 'K' means space (used for ambiguity)
digit = 0;
msgBit = 1;
} else if (code === 76) {
// 'L' means space
digit = 0;
msgBit = 0;
} else if (code >= 80 && code <= 89) {
// 'P'-'Y' custom message types (P=0, Q=1, R=2, ... Y=9)
digit = code - 80;
msgBit = 1;
} else if (code === 90) {
// 'Z' means space
digit = 0;
msgBit = 1;
} else {
return null; // Invalid character
}
digits.push(digit);
messageBits.push(msgBit);
}
// Decode latitude: format is DDMM.HH (degrees, minutes, hundredths)
const latDeg = digits[0] * 10 + digits[1];
const latMin = digits[2] * 10 + digits[3];
const latHun = digits[4] * 10 + digits[5];
let latitude = latDeg + latMin / 60.0 + latHun / 6000.0;
// Message bits determine hemisphere and other flags
// Bit 3 (messageBits[3]): 0 = North, 1 = South
// Bit 4 (messageBits[4]): 0 = West, 1 = East
// Bit 5 (messageBits[5]): 0 = longitude offset +0, 1 = longitude offset +100
const isNorth = messageBits[3] === 0;
const isWest = messageBits[4] === 0;
const longitudeOffset = messageBits[5] === 1;
if (!isNorth) {
latitude = -latitude;
}
// Decode message type from bits 0, 1, 2
const msgValue = messageBits[0] * 4 + messageBits[1] * 2 + messageBits[2];
const messageTypes = [
"M0: Off Duty",
"M1: En Route",
"M2: In Service",
"M3: Returning",
"M4: Committed",
"M5: Special",
"M6: Priority",
"M7: Emergency"
];
const messageType = messageTypes[msgValue] || "Unknown";
// Standard vs custom message indicator
const isStandard = messageBits[0] === 1;
return {
latitude,
messageType,
longitudeOffset,
isWest,
isStandard
};
};
export default decodeMicEPayload;

161
src/payload.object.ts Normal file
View File

@@ -0,0 +1,161 @@
import { FieldType, Segment } from "@hamradio/packet";
import { DO_NOT_ARCHIVE_MARKER, DataType, type IPosition, ObjectPayload, type Payload } from "./frame.types";
import { attachExtras, decodeCommentExtras } from "./payload.extras";
import { isCompressedPosition, parseCompressedPosition, parseUncompressedPosition } from "./payload.position";
import Timestamp from "./timestamp";
export const decodeObjectPayload = (
raw: string,
withStructure: boolean = false
): {
payload: Payload | null;
segment?: Segment[];
} => {
try {
// Object format: ;AAAAAAAAAcDDHHMMzDDMM.hhN/DDDMM.hhW$comment
// ^ data type
// 9-char name
// alive (*) / killed (_)
if (raw.length < 18) return { payload: null }; // 1 + 9 + 1 + 7 minimum
let offset = 1; // Skip data type identifier ';'
const segment: Segment[] = withStructure ? [] : [];
const rawName = raw.substring(offset, offset + 9);
const name = rawName.trimEnd();
if (withStructure) {
segment.push({
name: "object",
data: new TextEncoder().encode(rawName).buffer,
isString: true,
fields: [{ type: FieldType.STRING, name: "name", length: 9 }]
});
}
offset += 9;
const stateChar = raw.charAt(offset);
if (stateChar !== "*" && stateChar !== "_") {
return { payload: null };
}
const alive = stateChar === "*";
if (withStructure) {
let state: string = "invalid";
if (stateChar === "*") {
state = "alive";
} else if (stateChar === "_") {
state = "killed";
}
segment[segment.length - 1].data = new TextEncoder().encode(raw.substring(offset - 9, offset + 1)).buffer;
segment[segment.length - 1].fields.push({
type: FieldType.CHAR,
name: "state",
length: 1,
value: state
});
}
offset += 1;
const timeStr = raw.substring(offset, offset + 7);
const { timestamp, segment: timestampSection } = Timestamp.fromString(timeStr, withStructure);
if (!timestamp) {
return { payload: null };
}
if (timestampSection) {
segment.push(timestampSection);
}
offset += 7;
const isCompressed = isCompressedPosition(raw.substring(offset));
let position: IPosition | null = null;
let consumed = 0;
if (isCompressed) {
const { position: compressed, segment: compressedSection } = parseCompressedPosition(
raw.substring(offset),
withStructure
);
if (!compressed) return { payload: null };
position = {
latitude: compressed.latitude,
longitude: compressed.longitude,
symbol: compressed.symbol,
altitude: compressed.altitude
};
consumed = 13;
if (compressedSection) {
segment.push(compressedSection);
}
} else {
const { position: uncompressed, segment: uncompressedSection } = parseUncompressedPosition(
raw.substring(offset),
withStructure
);
if (!uncompressed) return { payload: null };
position = {
latitude: uncompressed.latitude,
longitude: uncompressed.longitude,
symbol: uncompressed.symbol,
ambiguity: uncompressed.ambiguity
};
consumed = 19;
if (uncompressedSection) {
segment.push(uncompressedSection);
}
}
offset += consumed;
const remainder = raw.substring(offset);
const doNotArchive = remainder.includes(DO_NOT_ARCHIVE_MARKER);
let comment = remainder;
// Parse RNG/PHG tokens
const extras = decodeCommentExtras(comment, withStructure);
comment = extras.comment;
if (comment) {
position.comment = comment;
if (withStructure) {
segment.push({
name: "comment",
data: new TextEncoder().encode(remainder).buffer,
isString: true,
fields: extras.fields || []
});
}
} else if (withStructure && extras.fields) {
segment.push({
name: "comment",
data: new TextEncoder().encode(remainder).buffer,
isString: true,
fields: extras.fields || []
});
}
const payload: ObjectPayload = {
type: DataType.Object,
doNotArchive,
name,
timestamp,
alive,
position
};
attachExtras(payload, extras);
if (withStructure) {
return { payload, segment };
}
return { payload };
} catch {
return { payload: null };
}
};
export default decodeObjectPayload;

344
src/payload.position.ts Normal file
View File

@@ -0,0 +1,344 @@
import { FieldType, type Segment } from "@hamradio/packet";
import { DO_NOT_ARCHIVE_MARKER, DataType, type IPosition, type Payload, type PositionPayload } from "./frame.types";
import { base91ToNumber, feetToMeters } from "./parser";
import { attachExtras, decodeCommentExtras } from "./payload.extras";
import Position from "./position";
import Timestamp from "./timestamp";
export const decodePositionPayload = (
dataType: string,
raw: string,
withStructure: boolean = false
): { payload: Payload | null; segment?: Segment[] } => {
try {
const hasTimestamp = dataType === "/" || dataType === "@";
const messaging = dataType === "=" || dataType === "@";
let offset = 1; // Skip data type identifier
// Build structure as we parse
const structure: Segment[] = withStructure ? [] : [];
let timestamp: Timestamp | undefined = undefined;
// Parse timestamp if present (7 characters: DDHHMMz or HHMMSSh or MMDDHMMM)
if (hasTimestamp) {
if (raw.length < 8) return { payload: null };
const timeStr = raw.substring(offset, offset + 7);
const { timestamp: parsedTimestamp, segment: timestampSegment } = Timestamp.fromString(timeStr, withStructure);
timestamp = parsedTimestamp;
if (timestampSegment) {
structure.push(timestampSegment);
}
offset += 7;
}
// Need at least enough characters for compressed position (13) or
// uncompressed (19). Allow parsing to continue if compressed-length is present.
if (raw.length < offset + 13) return { payload: null };
// Check if compressed format
const isCompressed = isCompressedPosition(raw.substring(offset));
let position: Position;
let comment = "";
if (isCompressed) {
// Compressed format: /YYYYXXXX$csT
const { position: compressed, segment: compressedSegment } = parseCompressedPosition(
raw.substring(offset),
withStructure
);
if (!compressed) return { payload: null };
position = new Position({
latitude: compressed.latitude,
longitude: compressed.longitude,
symbol: compressed.symbol
});
if (compressed.altitude !== undefined) {
position.altitude = compressed.altitude;
}
if (compressedSegment) {
structure.push(compressedSegment);
}
offset += 13; // Compressed position is 13 chars
comment = raw.substring(offset);
} else {
// Uncompressed format: DDMMmmH/DDDMMmmH$
const { position: uncompressed, segment: uncompressedSegment } = parseUncompressedPosition(
raw.substring(offset),
withStructure
);
if (!uncompressed) return { payload: null };
position = new Position({
latitude: uncompressed.latitude,
longitude: uncompressed.longitude,
symbol: uncompressed.symbol
});
if (uncompressed.ambiguity !== undefined) {
position.ambiguity = uncompressed.ambiguity;
}
if (uncompressedSegment) {
structure.push(uncompressedSegment);
}
offset += 19; // Uncompressed position is 19 chars
comment = raw.substring(offset);
}
// Extract Altitude, CSE/SPD, RNG and PHG tokens and optionally emit sections
const remainder = comment; // Use the remaining comment text for parsing extras
const doNotArchive = remainder.includes(DO_NOT_ARCHIVE_MARKER);
const extras = decodeCommentExtras(remainder, withStructure);
comment = extras.comment;
if (comment) {
position.comment = comment;
// Emit comment section as we parse
if (withStructure) {
structure.push({
name: "comment",
data: new TextEncoder().encode(remainder).buffer,
isString: true,
fields: extras.fields || []
});
}
} else if (withStructure && extras.fields) {
// No free-text comment, but extras were present: emit a comment section containing only fields
structure.push({
name: "comment",
data: new TextEncoder().encode("").buffer,
isString: true,
fields: extras.fields || []
});
}
let payloadType:
| DataType.PositionNoTimestampNoMessaging
| DataType.PositionNoTimestampWithMessaging
| DataType.PositionWithTimestampNoMessaging
| DataType.PositionWithTimestampWithMessaging;
switch (dataType) {
case "!":
payloadType = DataType.PositionNoTimestampNoMessaging;
break;
case "=":
payloadType = DataType.PositionNoTimestampWithMessaging;
break;
case "/":
payloadType = DataType.PositionWithTimestampNoMessaging;
break;
case "@":
payloadType = DataType.PositionWithTimestampWithMessaging;
break;
default:
return { payload: null };
}
const payload: PositionPayload = {
type: payloadType,
doNotArchive,
timestamp,
position,
messaging
};
attachExtras(payload, extras);
if (withStructure) {
return { payload, segment: structure };
}
return { payload };
} catch {
return { payload: null };
}
};
export const isCompressedPosition = (data: string): boolean => {
if (data.length < 13) return false;
// First prefer uncompressed detection by attempting an uncompressed parse.
// Uncompressed APRS positions do not have a fixed symbol table separator;
// position 8 is a symbol table identifier and may vary.
if (data.length >= 19) {
const uncompressed = parseUncompressedPosition(data, false);
if (uncompressed.position) {
return false;
}
}
// For compressed format, check if the position part looks like base-91 encoded data
// Compressed format: STYYYYXXXXcsT where ST is symbol table/code
// Base-91 chars are in range 33-124 (! to |)
const lat1 = data.charCodeAt(1);
const lat2 = data.charCodeAt(2);
const lon1 = data.charCodeAt(5);
const lon2 = data.charCodeAt(6);
return (
lat1 >= 33 && lat1 <= 124 && lat2 >= 33 && lat2 <= 124 && lon1 >= 33 && lon1 <= 124 && lon2 >= 33 && lon2 <= 124
);
};
export const parseCompressedPosition = (
data: string,
withStructure: boolean = false
): {
position: IPosition | null;
segment?: Segment;
} => {
if (data.length < 13) return { position: null };
const symbolTable = data.charAt(0);
const symbolCode = data.charAt(9);
// Extract base-91 encoded position (4 characters each)
const latStr = data.substring(1, 5);
const lonStr = data.substring(5, 9);
try {
// Decode base-91 encoded latitude and longitude
const latBase91 = base91ToNumber(latStr);
const lonBase91 = base91ToNumber(lonStr);
// Convert to degrees
const latitude = 90 - latBase91 / 380926;
const longitude = -180 + lonBase91 / 190463;
const result: IPosition = {
latitude,
longitude,
symbol: {
table: symbolTable,
code: symbolCode
}
};
// Check for compressed altitude (csT format)
const cs = data.charAt(10);
const t = data.charCodeAt(11);
if (cs === " " && t >= 33 && t <= 124) {
// Compressed altitude: altitude = 1.002^(t-33) feet
const altFeet = Math.pow(1.002, t - 33);
result.altitude = feetToMeters(altFeet); // Convert to meters
}
const section: Segment | undefined = withStructure
? {
name: "position",
data: new TextEncoder().encode(data.substring(0, 13)).buffer,
isString: true,
fields: [
{ type: FieldType.CHAR, length: 1, name: "symbol table" },
{ type: FieldType.STRING, length: 4, name: "latitude" },
{ type: FieldType.STRING, length: 4, name: "longitude" },
{ type: FieldType.CHAR, length: 1, name: "symbol code" },
{ type: FieldType.CHAR, length: 1, name: "course/speed type" },
{ type: FieldType.CHAR, length: 1, name: "course/speed value" },
{ type: FieldType.CHAR, length: 1, name: "altitude" }
]
}
: undefined;
return { position: result, segment: section };
} catch {
return { position: null };
}
};
export const parseUncompressedPosition = (
data: string,
withStructure: boolean = false
): {
position: IPosition | null;
segment?: Segment;
} => {
if (data.length < 19) return { position: null };
// Format: DDMMmmH/DDDMMmmH$ where H is hemisphere, $ is symbol code
// Positions: 0-7 (latitude), 8 (symbol table), 9-17 (longitude), 18 (symbol code)
// Spaces may replace rightmost digits for ambiguity/privacy
const latStr = data.substring(0, 8); // DDMMmmH (8 chars: 49 03.50 N)
const symbolTable = data.charAt(8);
const lonStr = data.substring(9, 18); // DDDMMmmH (9 chars: 072 01.75 W)
const symbolCode = data.charAt(18);
// Count and handle ambiguity (spaces in minutes part replace rightmost digits)
let ambiguity = 0;
const latSpaceCount = (latStr.match(/ /g) || []).length;
const lonSpaceCount = (lonStr.match(/ /g) || []).length;
if (latSpaceCount > 0 || lonSpaceCount > 0) {
// Use the maximum space count (they should be the same, but be defensive)
ambiguity = Math.max(latSpaceCount, lonSpaceCount);
}
// Replace spaces with zeros for parsing
const latStrNormalized = latStr.replace(/ /g, "0");
const lonStrNormalized = lonStr.replace(/ /g, "0");
// Parse latitude
const latDeg = parseInt(latStrNormalized.substring(0, 2), 10);
const latMin = parseFloat(latStrNormalized.substring(2, 7));
const latHem = latStrNormalized.charAt(7);
if (isNaN(latDeg) || isNaN(latMin)) return { position: null };
if (latHem !== "N" && latHem !== "S") return { position: null };
let latitude = latDeg + latMin / 60;
if (latHem === "S") latitude = -latitude;
// Parse longitude
const lonDeg = parseInt(lonStrNormalized.substring(0, 3), 10);
const lonMin = parseFloat(lonStrNormalized.substring(3, 8));
const lonHem = lonStrNormalized.charAt(8);
if (isNaN(lonDeg) || isNaN(lonMin)) return { position: null };
if (lonHem !== "E" && lonHem !== "W") return { position: null };
let longitude = lonDeg + lonMin / 60;
if (lonHem === "W") longitude = -longitude;
const result: IPosition = {
latitude,
longitude,
symbol: {
table: symbolTable,
code: symbolCode
}
};
if (ambiguity > 0) {
result.ambiguity = ambiguity;
}
const segment: Segment | undefined = withStructure
? {
name: "position",
data: new TextEncoder().encode(data.substring(0, 19)).buffer,
isString: true,
fields: [
{ type: FieldType.STRING, length: 8, name: "latitude" },
{ type: FieldType.CHAR, length: 1, name: "symbol table" },
{ type: FieldType.STRING, length: 9, name: "longitude" },
{ type: FieldType.CHAR, length: 1, name: "symbol code" }
]
}
: undefined;
return { position: result, segment };
};
export default decodePositionPayload;

69
src/payload.query.ts Normal file
View File

@@ -0,0 +1,69 @@
import { FieldType, type Segment } from "@hamradio/packet";
import { DataType, type Payload, type QueryPayload } from "./frame.types";
export const decodeQueryPayload = (
raw: string,
withStructure: boolean = false
): {
payload: Payload | null;
segment?: Segment[];
} => {
try {
if (raw.length < 2) return { payload: null };
// Skip data type identifier '?'
const segments: Segment[] = withStructure ? [] : [];
// Remaining payload
const rest = raw.substring(1).trim();
if (!rest) return { payload: null };
// Query type is the first token (up to first space)
const firstSpace = rest.indexOf(" ");
let queryType = "";
let target: string | undefined = undefined;
if (firstSpace === -1) {
queryType = rest;
} else {
queryType = rest.substring(0, firstSpace);
target = rest.substring(firstSpace + 1).trim();
if (target === "") target = undefined;
}
if (!queryType) return { payload: null };
if (withStructure) {
// Emit query type section
segments.push({
name: "query type",
data: new TextEncoder().encode(queryType).buffer,
isString: true,
fields: [{ type: FieldType.STRING, name: "type", length: queryType.length }]
});
if (target) {
segments.push({
name: "query target",
data: new TextEncoder().encode(target).buffer,
isString: true,
fields: [{ type: FieldType.STRING, name: "target", length: target.length }]
});
}
}
const payload: QueryPayload = {
type: DataType.Query,
queryType,
...(target ? { target } : {})
};
if (withStructure) return { payload, segment: segments };
return { payload };
} catch {
return { payload: null };
}
};
export default decodeQueryPayload;

161
src/payload.rawgps.ts Normal file
View File

@@ -0,0 +1,161 @@
import { FieldType, type Segment } from "@hamradio/packet";
import { DTM, GGA, INmeaSentence, Decoder as NmeaDecoder, RMC } from "extended-nmea";
import { DataType, type IPosition, type Payload, type RawGPSPayload } from "./frame.types";
export const decodeRawGPSPayload = (
raw: string,
withStructure: boolean = false
): {
payload: Payload | null;
segment?: Segment[];
} => {
try {
if (raw.length < 2) return { payload: null };
// Raw GPS payloads start with '$' followed by an NMEA sentence
const sentence = raw.substring(1).trim();
// Attempt to parse with extended-nmea Decoder to extract position (best-effort)
let parsed: INmeaSentence | null = null;
try {
const full = sentence.startsWith("$") ? sentence : `$${sentence}`;
parsed = NmeaDecoder.decode(full);
} catch {
// ignore parse errors - accept any sentence as raw-gps per APRS
}
const payload: RawGPSPayload = {
type: DataType.RawGPS,
sentence
};
// If parse produced latitude/longitude, attach structured position.
// Otherwise fallback to a minimal NMEA parser for common sentences (RMC, GGA).
if (
parsed &&
(parsed instanceof RMC || parsed instanceof GGA || parsed instanceof DTM) &&
parsed.latitude &&
parsed.longitude
) {
// extended-nmea latitude/longitude are GeoCoordinate objects with
// fields { degrees, decimal, quadrant }
const latObj = parsed.latitude;
const lonObj = parsed.longitude;
const lat = latObj.degrees + (Number(latObj.decimal) || 0) / 60.0;
const lon = lonObj.degrees + (Number(lonObj.decimal) || 0) / 60.0;
const latitude = latObj.quadrant === "S" ? -lat : lat;
const longitude = lonObj.quadrant === "W" ? -lon : lon;
const pos: IPosition = {
latitude,
longitude
};
// altitude
if ("altMean" in parsed && parsed.altMean !== undefined) {
pos.altitude = Number(parsed.altMean);
}
if ("altitude" in parsed && parsed.altitude !== undefined) {
pos.altitude = Number(parsed.altitude);
}
// speed/course (RMC fields)
if ("speedOverGround" in parsed && parsed.speedOverGround !== undefined) {
pos.speed = Number(parsed.speedOverGround);
}
if ("courseOverGround" in parsed && parsed.courseOverGround !== undefined) {
pos.course = Number(parsed.courseOverGround);
}
payload.position = pos;
} else {
try {
const full = sentence.startsWith("$") ? sentence : `$${sentence}`;
const withoutChecksum = full.split("*")[0];
const parts = withoutChecksum.split(",");
const header = parts[0].slice(1).toUpperCase();
const parseCoord = (coord: string, hemi: string) => {
if (!coord || coord === "") return undefined;
const degDigits = hemi === "N" || hemi === "S" ? 2 : 3;
if (coord.length <= degDigits) return undefined;
const degPart = coord.slice(0, degDigits);
const minPart = coord.slice(degDigits);
const degrees = parseFloat(degPart);
const mins = parseFloat(minPart);
if (Number.isNaN(degrees) || Number.isNaN(mins)) return undefined;
let dec = degrees + mins / 60.0;
if (hemi === "S" || hemi === "W") dec = -dec;
return dec;
};
if (header.endsWith("RMC")) {
const lat = parseCoord(parts[3], parts[4]);
const lon = parseCoord(parts[5], parts[6]);
if (lat !== undefined && lon !== undefined) {
const pos: IPosition = { latitude: lat, longitude: lon };
if (parts[7]) pos.speed = Number(parts[7]);
if (parts[8]) pos.course = Number(parts[8]);
payload.position = pos;
}
} else if (header.endsWith("GGA")) {
const lat = parseCoord(parts[2], parts[3]);
const lon = parseCoord(parts[4], parts[5]);
if (lat !== undefined && lon !== undefined) {
const pos: IPosition = { latitude: lat, longitude: lon };
if (parts[9]) pos.altitude = Number(parts[9]);
payload.position = pos;
}
}
} catch {
// ignore fallback parse errors
}
}
if (withStructure) {
const segments: Segment[] = [
{
name: "raw-gps",
data: new TextEncoder().encode(sentence).buffer,
isString: true,
fields: [
{
type: FieldType.STRING,
name: "sentence",
length: sentence.length
}
]
}
];
if (payload.position) {
segments.push({
name: "raw-gps-position",
data: new TextEncoder().encode(JSON.stringify(payload.position)).buffer,
isString: true,
fields: [
{
type: FieldType.STRING,
name: "latitude",
length: String(payload.position.latitude).length
},
{
type: FieldType.STRING,
name: "longitude",
length: String(payload.position.longitude).length
}
]
});
}
return { payload, segment: segments };
}
return { payload };
} catch {
return { payload: null };
}
};
export default decodeRawGPSPayload;

79
src/payload.status.ts Normal file
View File

@@ -0,0 +1,79 @@
import { FieldType, type Segment } from "@hamradio/packet";
import { DO_NOT_ARCHIVE_MARKER, DataType, type Payload, type StatusPayload } from "./frame.types";
import Timestamp from "./timestamp";
export const decodeStatusPayload = (
raw: string,
withStructure: boolean = false
): {
payload: Payload | null;
segment?: Segment[];
} => {
// Status payload: optional 7-char timestamp followed by free text.
// We'll also detect a trailing Maidenhead locator (4 or 6 chars) and expose it.
const offsetBase = 1; // skip data type identifier '>'
if (raw.length <= offsetBase) return { payload: null };
let offset = offsetBase;
const segments: Segment[] = withStructure ? [] : [];
// Try parse optional timestamp (7 chars)
if (raw.length >= offset + 7) {
const timeStr = raw.substring(offset, offset + 7);
const { timestamp, segment: tsSegment } = Timestamp.fromString(timeStr, withStructure);
if (timestamp) {
offset += 7;
if (tsSegment) segments.push(tsSegment);
}
}
// Remaining text is status text
const text = raw.substring(offset);
if (!text) return { payload: null };
const doNotArchive = text.includes(DO_NOT_ARCHIVE_MARKER);
// Detect trailing Maidenhead locator (4 or 6 chars) at end of text separated by space
let maidenhead: string | undefined;
const mhMatch = text.match(/\s([A-Ra-r]{2}\d{2}(?:[A-Ra-r]{2})?)$/);
let statusText = text;
if (mhMatch) {
maidenhead = mhMatch[1].toUpperCase();
statusText = text.slice(0, mhMatch.index).trimEnd();
}
const payload: StatusPayload = {
type: DataType.Status,
doNotArchive,
timestamp: undefined,
text: statusText
};
// If timestamp was parsed, attach it
if (segments.length > 0) {
// The first segment may be timestamp; parseTimestamp returns the Timestamp object
// Re-parse to obtain timestamp object (cheap) - alternate would be to capture earlier
const timeSegment = segments.find((s) => s.name === "timestamp");
if (timeSegment) {
const tsStr = new TextDecoder().decode(timeSegment.data);
const { timestamp } = Timestamp.fromString(tsStr, false);
if (timestamp) payload.timestamp = timestamp;
}
}
if (maidenhead) payload.maidenhead = maidenhead;
if (withStructure) {
segments.push({
name: "status",
data: new TextEncoder().encode(text).buffer,
isString: true,
fields: [{ type: FieldType.STRING, name: "text", length: text.length }]
});
return { payload, segment: segments };
}
return { payload };
};
export default decodeStatusPayload;

197
src/payload.telemetry.ts Normal file
View File

@@ -0,0 +1,197 @@
import { FieldType, type Segment } from "@hamradio/packet";
import {
DataType,
type Payload,
type TelemetryBitSensePayload,
type TelemetryCoefficientsPayload,
type TelemetryDataPayload,
type TelemetryParameterPayload,
type TelemetryUnitPayload
} from "./frame.types";
export const decodeTelemetryPayload = (
raw: string,
withStructure: boolean = false
): {
payload: Payload | null;
segment?: Segment[];
} => {
try {
if (raw.length < 2) return { payload: null };
const rest = raw.substring(1).trim();
if (!rest) return { payload: null };
const segments: Segment[] = withStructure ? [] : [];
// Telemetry data: convention used here: starts with '#' then sequence then analogs and digital
if (rest.startsWith("#")) {
const parts = rest.substring(1).trim().split(/\s+/);
const seq = parseInt(parts[0], 10);
let analog: number[] = [];
let digital = 0;
if (parts.length >= 2) {
// analogs as comma separated
analog = parts[1].split(",").map((v) => parseFloat(v));
}
if (parts.length >= 3) {
digital = parseInt(parts[2], 10);
}
if (withStructure) {
segments.push({
name: "telemetry sequence",
data: new TextEncoder().encode(String(seq)).buffer,
isString: true,
fields: [
{
type: FieldType.STRING,
name: "sequence",
length: String(seq).length
}
]
});
segments.push({
name: "telemetry analog",
data: new TextEncoder().encode(parts[1] || "").buffer,
isString: true,
fields: [
{
type: FieldType.STRING,
name: "analogs",
length: (parts[1] || "").length
}
]
});
segments.push({
name: "telemetry digital",
data: new TextEncoder().encode(String(digital)).buffer,
isString: true,
fields: [
{
type: FieldType.STRING,
name: "digital",
length: String(digital).length
}
]
});
}
const payload: TelemetryDataPayload = {
type: DataType.TelemetryData,
variant: "data",
sequence: isNaN(seq) ? 0 : seq,
analog,
digital: isNaN(digital) ? 0 : digital
};
if (withStructure) return { payload, segment: segments };
return { payload };
}
// Telemetry parameters: 'PARAM' keyword
if (/^PARAM/i.test(rest)) {
const after = rest.replace(/^PARAM\s*/i, "");
const names = after.split(/[,\s]+/).filter(Boolean);
if (withStructure) {
segments.push({
name: "telemetry parameters",
data: new TextEncoder().encode(after).buffer,
isString: true,
fields: [{ type: FieldType.STRING, name: "names", length: after.length }]
});
}
const payload: TelemetryParameterPayload = {
type: DataType.TelemetryData,
variant: "parameters",
names
};
if (withStructure) return { payload, segment: segments };
return { payload };
}
// Telemetry units: 'UNIT'
if (/^UNIT/i.test(rest)) {
const after = rest.replace(/^UNIT\s*/i, "");
const units = after.split(/[,\s]+/).filter(Boolean);
if (withStructure) {
segments.push({
name: "telemetry units",
data: new TextEncoder().encode(after).buffer,
isString: true,
fields: [{ type: FieldType.STRING, name: "units", length: after.length }]
});
}
const payload: TelemetryUnitPayload = {
type: DataType.TelemetryData,
variant: "unit",
units
};
if (withStructure) return { payload, segment: segments };
return { payload };
}
// Telemetry coefficients: 'COEFF' a:,b:,c:
if (/^COEFF/i.test(rest)) {
const after = rest.replace(/^COEFF\s*/i, "");
const aMatch = after.match(/A:([^\s;]+)/i);
const bMatch = after.match(/B:([^\s;]+)/i);
const cMatch = after.match(/C:([^\s;]+)/i);
const parseList = (s?: string) => (s ? s.split(",").map((v) => parseFloat(v)) : []);
const coefficients = {
a: parseList(aMatch?.[1]),
b: parseList(bMatch?.[1]),
c: parseList(cMatch?.[1])
};
if (withStructure) {
segments.push({
name: "telemetry coefficients",
data: new TextEncoder().encode(after).buffer,
isString: true,
fields: [{ type: FieldType.STRING, name: "coeffs", length: after.length }]
});
}
const payload: TelemetryCoefficientsPayload = {
type: DataType.TelemetryData,
variant: "coefficients",
coefficients
};
if (withStructure) return { payload, segment: segments };
return { payload };
}
// Telemetry bitsense/project: 'BITS' <number> [project]
if (/^BITS?/i.test(rest)) {
const parts = rest.split(/\s+/).slice(1);
const sense = parts.length > 0 ? parseInt(parts[0], 10) : 0;
const projectName = parts.length > 1 ? parts.slice(1).join(" ") : undefined;
if (withStructure) {
segments.push({
name: "telemetry bitsense",
data: new TextEncoder().encode(rest).buffer,
isString: true,
fields: [{ type: FieldType.STRING, name: "bitsense", length: rest.length }]
});
}
const payload: TelemetryBitSensePayload = {
type: DataType.TelemetryData,
variant: "bitsense",
sense: isNaN(sense) ? 0 : sense,
...(projectName ? { projectName } : {})
};
if (withStructure) return { payload, segment: segments };
return { payload };
}
return { payload: null };
} catch {
return { payload: null };
}
};
export default decodeTelemetryPayload;

135
src/payload.thirdparty.ts Normal file
View File

@@ -0,0 +1,135 @@
import { FieldType, type Segment } from "@hamradio/packet";
import { Frame } from "./frame";
import { DataType, type Payload, type ThirdPartyPayload, UserDefinedPayload } from "./frame.types";
export const decodeUserDefinedPayload = (
raw: string,
withStructure: boolean = false
): {
payload: Payload | null;
segment?: Segment[];
} => {
try {
if (raw.length < 2) return { payload: null };
// content after '{'
const rest = raw.substring(1);
// user packet type is first token (up to first space) often like '01' or 'TYP'
const match = rest.match(/^([^\s]+)\s*(.*)$/s);
let userPacketType = "";
let data = "";
if (match) {
userPacketType = match[1] || "";
data = (match[2] || "").trim();
}
const payload: UserDefinedPayload = {
type: DataType.UserDefined,
userPacketType,
data
} as const;
if (withStructure) {
const segments: Segment[] = [];
segments.push({
name: "user-defined",
data: new TextEncoder().encode(rest).buffer,
isString: true,
fields: [{ type: FieldType.STRING, name: "raw", length: rest.length }]
});
segments.push({
name: "user-packet-type",
data: new TextEncoder().encode(userPacketType).buffer,
isString: true,
fields: [
{
type: FieldType.STRING,
name: "type",
length: userPacketType.length
}
]
});
segments.push({
name: "user-data",
data: new TextEncoder().encode(data).buffer,
isString: true,
fields: [{ type: FieldType.STRING, name: "data", length: data.length }]
});
return { payload, segment: segments };
}
return { payload };
} catch {
return { payload: null };
}
};
export const decodeThirdPartyPayload = (
raw: string,
withStructure: boolean = false
): {
payload: Payload | null;
segment?: Segment[];
} => {
try {
if (raw.length < 2) return { payload: null };
// Content after '}' is the encapsulated third-party frame or raw data
const rest = raw.substring(1);
// Attempt to parse the embedded text as a full APRS frame (route:payload)
let nestedFrame: Frame | undefined;
try {
// parseFrame is defined in this module; use Frame.parse to attempt parse
nestedFrame = Frame.parse(rest);
} catch {
nestedFrame = undefined;
}
const payload: ThirdPartyPayload = {
type: DataType.ThirdParty,
comment: rest,
...(nestedFrame ? { frame: nestedFrame } : {})
} as const;
if (withStructure) {
const segments: Segment[] = [];
segments.push({
name: "third-party",
data: new TextEncoder().encode(rest).buffer,
isString: true,
fields: [{ type: FieldType.STRING, name: "raw", length: rest.length }]
});
if (nestedFrame) {
// Include a short section pointing to the nested frame's data (stringified)
const nf = nestedFrame;
const nfStr = `${nf.source.toString()}>${nf.destination.toString()}:${nf.payload}`;
segments.push({
name: "third-party-nested-frame",
data: new TextEncoder().encode(nfStr).buffer,
isString: true,
fields: [
{
type: FieldType.STRING,
name: "nested",
length: nfStr.length
}
]
});
}
return { payload, segment: segments };
}
return { payload };
} catch {
return { payload: null };
}
};

129
src/payload.weather.ts Normal file
View File

@@ -0,0 +1,129 @@
import { FieldType, type Segment } from "@hamradio/packet";
import { DataType, type IPosition, type Payload, type WeatherPayload } from "./frame.types";
import { isCompressedPosition, parseCompressedPosition, parseUncompressedPosition } from "./payload.position";
import Timestamp from "./timestamp";
export const decodeWeatherPayload = (
raw: string,
withStructure: boolean = false
): {
payload: Payload | null;
segment?: Segment[];
} => {
try {
if (raw.length < 2) return { payload: null };
let offset = 1; // skip '_' data type
const segments: Segment[] = withStructure ? [] : [];
// Try optional timestamp (7 chars)
let timestamp;
if (raw.length >= offset + 7) {
const timeStr = raw.substring(offset, offset + 7);
const parsed = Timestamp.fromString(timeStr, withStructure);
timestamp = parsed.timestamp;
if (parsed.segment) {
segments.push(parsed.segment);
}
if (timestamp) offset += 7;
}
// Try optional position following timestamp
let position: IPosition | undefined;
let consumed = 0;
const tail = raw.substring(offset);
if (tail.length > 0) {
// If the tail starts with a wind token like DDD/SSS, treat it as weather data
// and do not attempt to parse it as a position (avoids mis-detecting wind
// values as compressed position fields).
if (/^\s*\d{3}\/\d{1,3}/.test(tail)) {
// no position present; leave consumed = 0
} else if (isCompressedPosition(tail)) {
const parsed = parseCompressedPosition(tail, withStructure);
if (parsed.position) {
position = {
latitude: parsed.position.latitude,
longitude: parsed.position.longitude,
symbol: parsed.position.symbol,
altitude: parsed.position.altitude
};
if (parsed.segment) segments.push(parsed.segment);
consumed = 13;
}
} else {
const parsed = parseUncompressedPosition(tail, withStructure);
if (parsed.position) {
position = {
latitude: parsed.position.latitude,
longitude: parsed.position.longitude,
symbol: parsed.position.symbol,
ambiguity: parsed.position.ambiguity
};
if (parsed.segment) segments.push(parsed.segment);
consumed = 19;
}
}
}
offset += consumed;
const rest = raw.substring(offset).trim();
const payload: WeatherPayload = {
type: DataType.WeatherReportNoPosition
};
if (timestamp) payload.timestamp = timestamp;
if (position) payload.position = position;
if (rest && rest.length > 0) {
// Parse common tokens
// Wind: DDD/SSS [gGGG]
const windMatch = rest.match(/(\d{3})\/(\d{1,3})(?:g(\d{1,3}))?/);
if (windMatch) {
payload.windDirection = parseInt(windMatch[1], 10);
payload.windSpeed = parseInt(windMatch[2], 10);
if (windMatch[3]) payload.windGust = parseInt(windMatch[3], 10);
}
// Temperature: tNNN (F)
const tempMatch = rest.match(/t(-?\d{1,3})/i);
if (tempMatch) payload.temperature = parseInt(tempMatch[1], 10);
// Rain: rNNN (last hour), pNNN (24h), PNNN (since midnight) - values are hundredths of inch
const rMatch = rest.match(/r(\d{3})/);
if (rMatch) payload.rainLastHour = parseInt(rMatch[1], 10);
const pMatch = rest.match(/p(\d{3})/);
if (pMatch) payload.rainLast24Hours = parseInt(pMatch[1], 10);
const PMatch = rest.match(/P(\d{3})/);
if (PMatch) payload.rainSinceMidnight = parseInt(PMatch[1], 10);
// Humidity: hNN
const hMatch = rest.match(/h(\d{1,3})/);
if (hMatch) payload.humidity = parseInt(hMatch[1], 10);
// Pressure: bXXXX or bXXXXX (tenths of millibar)
const bMatch = rest.match(/b(\d{4,5})/);
if (bMatch) payload.pressure = parseInt(bMatch[1], 10);
// Add raw comment
payload.comment = rest;
if (withStructure) {
segments.push({
name: "weather",
data: new TextEncoder().encode(rest).buffer,
isString: true,
fields: [{ type: FieldType.STRING, name: "text", length: rest.length }]
});
}
}
if (withStructure) return { payload, segment: segments };
return { payload };
} catch {
return { payload: null };
}
};
export default decodeWeatherPayload;

View File

@@ -1,8 +1,8 @@
import { IPosition, ISymbol } from "./frame.types";
import { IDirectionFinding, IPosition, IPowerHeightGain, ISymbol } from "./frame.types";
export class Symbol implements ISymbol {
table: string; // Symbol table identifier
code: string; // Symbol code
table: string; // Symbol table identifier
code: string; // Symbol code
constructor(table: string, code?: string) {
if (code === undefined) {
@@ -24,14 +24,17 @@ export class Symbol implements ISymbol {
}
export class Position implements IPosition {
latitude: number; // Decimal degrees
longitude: number; // Decimal degrees
latitude: number; // Decimal degrees
longitude: number; // Decimal degrees
ambiguity?: number; // Position ambiguity (0-4)
altitude?: number; // Meters
speed?: number; // Speed in knots/kmh depending on source
course?: number; // Course in degrees
altitude?: number; // Meters
speed?: number; // Speed in knots/kmh depending on source
course?: number; // Course in degrees
symbol?: Symbol;
comment?: string;
range?: number;
phg?: IPowerHeightGain;
dfs?: IDirectionFinding;
constructor(data: Partial<IPosition>) {
this.latitude = data.latitude ?? 0;
@@ -40,33 +43,38 @@ export class Position implements IPosition {
this.altitude = data.altitude;
this.speed = data.speed;
this.course = data.course;
if (typeof data.symbol === 'string') {
if (typeof data.symbol === "string") {
this.symbol = new Symbol(data.symbol);
} else if (data.symbol) {
this.symbol = new Symbol(data.symbol.table, data.symbol.code);
}
this.comment = data.comment;
this.range = data.range;
this.phg = data.phg;
this.dfs = data.dfs;
}
public toString(): string {
const latStr = this.latitude.toFixed(5);
const lonStr = this.longitude.toFixed(5);
const altStr = this.altitude !== undefined ? `,${this.altitude}m` : '';
const altStr = this.altitude !== undefined ? `,${this.altitude}m` : "";
return `${latStr},${lonStr}${altStr}`;
}
public distanceTo(other: IPosition): number {
const R = 6371e3; // Earth radius in meters
const lat1 = this.latitude * Math.PI / 180;
const lat2 = other.latitude * Math.PI / 180;
const dLat = (other.latitude - this.latitude) * Math.PI / 180;
const dLon = (other.longitude - this.longitude) * Math.PI / 180;
const lat1 = (this.latitude * Math.PI) / 180;
const lat2 = (other.latitude * Math.PI) / 180;
const dLat = ((other.latitude - this.latitude) * Math.PI) / 180;
const dLon = ((other.longitude - this.longitude) * Math.PI) / 180;
const a = Math.sin(dLat/2) * Math.sin(dLat/2) +
Math.cos(lat1) * Math.cos(lat2) *
Math.sin(dLon/2) * Math.sin(dLon/2);
const c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1-a));
const a =
Math.sin(dLat / 2) * Math.sin(dLat / 2) +
Math.cos(lat1) * Math.cos(lat2) * Math.sin(dLon / 2) * Math.sin(dLon / 2);
const c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
return R * c; // Distance in meters
}
}
export default Position;

189
src/timestamp.ts Normal file
View File

@@ -0,0 +1,189 @@
import { FieldType, Segment } from "@hamradio/packet";
import { ITimestamp } from "./frame.types";
export class Timestamp implements ITimestamp {
day?: number;
month?: number;
hours: number;
minutes: number;
seconds?: number;
format: "DHM" | "HMS" | "MDHM";
zulu?: boolean;
constructor(
hours: number,
minutes: number,
format: "DHM" | "HMS" | "MDHM",
options: {
day?: number;
month?: number;
seconds?: number;
zulu?: boolean;
} = {}
) {
this.hours = hours;
this.minutes = minutes;
this.format = format;
this.day = options.day;
this.month = options.month;
this.seconds = options.seconds;
this.zulu = options.zulu;
}
/**
* Convert APRS timestamp to JavaScript Date object
* Note: APRS timestamps don't include year, so we use current year
* For DHM format, we find the most recent occurrence of that day
* For HMS format, we use current date
* For MDHM format, we use the specified month/day in current year
*/
toDate(): Date {
const now = new Date();
if (this.format === "DHM") {
// Day-Hour-Minute format (UTC)
// Find the most recent occurrence of this day
const currentYear = this.zulu ? now.getUTCFullYear() : now.getFullYear();
const currentMonth = this.zulu ? now.getUTCMonth() : now.getMonth();
let date: Date;
if (this.zulu) {
date = new Date(Date.UTC(currentYear, currentMonth, this.day!, this.hours, this.minutes, 0, 0));
} else {
date = new Date(currentYear, currentMonth, this.day!, this.hours, this.minutes, 0, 0);
}
// If the date is in the future, it's from last month
if (date > now) {
if (this.zulu) {
date = new Date(Date.UTC(currentYear, currentMonth - 1, this.day!, this.hours, this.minutes, 0, 0));
} else {
date = new Date(currentYear, currentMonth - 1, this.day!, this.hours, this.minutes, 0, 0);
}
}
return date;
} else if (this.format === "HMS") {
// Hour-Minute-Second format (UTC)
// Use current date
if (this.zulu) {
const date = new Date();
date.setUTCHours(this.hours, this.minutes, this.seconds || 0, 0);
// If time is in the future, it's from yesterday
if (date > now) {
date.setUTCDate(date.getUTCDate() - 1);
}
return date;
} else {
const date = new Date();
date.setHours(this.hours, this.minutes, this.seconds || 0, 0);
if (date > now) {
date.setDate(date.getDate() - 1);
}
return date;
}
} else {
// MDHM format: Month-Day-Hour-Minute (local time)
const currentYear = now.getFullYear();
let date = new Date(currentYear, (this.month || 1) - 1, this.day!, this.hours, this.minutes, 0, 0);
// If date is in the future, it's from last year
if (date > now) {
date = new Date(currentYear - 1, (this.month || 1) - 1, this.day!, this.hours, this.minutes, 0, 0);
}
return date;
}
}
static fromString(
str: string,
withStructure: boolean = false
): {
timestamp: Timestamp | undefined;
segment?: Segment;
} {
if (str.length !== 7) return { timestamp: undefined };
const timeType = str.charAt(6);
if (timeType === "z") {
// DHM format: Day-Hour-Minute (UTC)
const timestamp = new Timestamp(parseInt(str.substring(2, 4), 10), parseInt(str.substring(4, 6), 10), "DHM", {
day: parseInt(str.substring(0, 2), 10),
zulu: true
});
const segment = withStructure
? {
name: "timestamp",
data: new TextEncoder().encode(str).buffer,
isString: true,
fields: [
{ type: FieldType.STRING, name: "day (DD)", length: 2 },
{ type: FieldType.STRING, name: "hour (HH)", length: 2 },
{ type: FieldType.STRING, name: "minute (MM)", length: 2 },
{ type: FieldType.CHAR, name: "timezone indicator", length: 1 }
]
}
: undefined;
return { timestamp, segment };
} else if (timeType === "h") {
// HMS format: Hour-Minute-Second (UTC)
const timestamp = new Timestamp(parseInt(str.substring(0, 2), 10), parseInt(str.substring(2, 4), 10), "HMS", {
seconds: parseInt(str.substring(4, 6), 10),
zulu: true
});
const segment = withStructure
? {
name: "timestamp",
data: new TextEncoder().encode(str).buffer,
isString: true,
fields: [
{ type: FieldType.STRING, name: "hour (HH)", length: 2 },
{ type: FieldType.STRING, name: "minute (MM)", length: 2 },
{ type: FieldType.STRING, name: "second (SS)", length: 2 },
{ type: FieldType.CHAR, name: "timezone indicator", length: 1 }
]
}
: undefined;
return { timestamp, segment };
} else if (timeType === "/") {
// MDHM format: Month-Day-Hour-Minute (local)
const timestamp = new Timestamp(parseInt(str.substring(4, 6), 10), parseInt(str.substring(6, 8), 10), "MDHM", {
month: parseInt(str.substring(0, 2), 10),
day: parseInt(str.substring(2, 4), 10),
zulu: false
});
const segment = withStructure
? {
name: "timestamp",
data: new TextEncoder().encode(str).buffer,
isString: true,
fields: [
{ type: FieldType.STRING, name: "month (MM)", length: 2 },
{ type: FieldType.STRING, name: "day (DD)", length: 2 },
{ type: FieldType.STRING, name: "hour (HH)", length: 2 },
{ type: FieldType.STRING, name: "minute (MM)", length: 2 },
{ type: FieldType.CHAR, name: "timezone indicator", length: 1 }
]
}
: undefined;
return { timestamp, segment };
}
return { timestamp: undefined };
}
}
export default Timestamp;

22
test/deviceid.test.ts Normal file
View File

@@ -0,0 +1,22 @@
import { describe, expect, it } from "vitest";
import { getDeviceID } from "../src/deviceid";
import { Frame } from "../src/frame";
describe("DeviceID parsing", () => {
it("parses known device ID from tocall", () => {
const data = "WB2OSZ-5>APDW17:!4237.14NS07120.83W#PHG7140";
const frame = Frame.fromString(data);
const deviceID = getDeviceID(frame.destination);
expect(deviceID).not.toBeNull();
expect(deviceID?.tocall).toBe("APDW??");
expect(deviceID?.vendor).toBe("WB2OSZ");
});
it("returns null for unknown device ID", () => {
const data = "CALL>WORLD:!4237.14NS07120.83W#PHG7140";
const frame = Frame.fromString(data);
const deviceID = getDeviceID(frame.destination);
expect(deviceID).toBeNull();
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -1,85 +1,86 @@
import { describe, it, expect } from 'vitest';
import { describe, expect, it } from "vitest";
import {
base91ToNumber,
knotsToKmh,
kmhToKnots,
feetToMeters,
metersToFeet,
celsiusToFahrenheit,
fahrenheitToCelsius,
} from '../src/parser';
feetToMeters,
kmhToKnots,
knotsToKmh,
metersToFeet
} from "../src/parser";
describe('parser utilities', () => {
describe('base91ToNumber', () => {
it('decodes all-! to 0', () => {
expect(base91ToNumber('!!!!')).toBe(0);
describe("parser utilities", () => {
describe("base91ToNumber", () => {
it("decodes all-! to 0", () => {
expect(base91ToNumber("!!!!")).toBe(0);
});
it('decodes single character correctly', () => {
it("decodes single character correctly", () => {
// 'A' === 65, digit = 65 - 33 = 32
expect(base91ToNumber('A')).toBe(32);
expect(base91ToNumber("A")).toBe(32);
});
it('should decode multiple Base91 characters', () => {
it("should decode multiple Base91 characters", () => {
// "!!" = 0 * 91 + 0 = 0
expect(base91ToNumber('!!')).toBe(0);
expect(base91ToNumber("!!")).toBe(0);
// "!#" = 0 * 91 + 2 = 2
expect(base91ToNumber('!#')).toBe(2);
expect(base91ToNumber("!#")).toBe(2);
// "#!" = 2 * 91 + 0 = 182
expect(base91ToNumber('#!')).toBe(182);
expect(base91ToNumber("#!")).toBe(182);
// "##" = 2 * 91 + 2 = 184
expect(base91ToNumber('##')).toBe(184);
expect(base91ToNumber("##")).toBe(184);
});
it('should decode 4-character Base91 strings (used in APRS)', () => {
it("should decode 4-character Base91 strings (used in APRS)", () => {
// Test with printable ASCII Base91 characters (33-123)
const testValue = base91ToNumber('!#%\'');
const testValue = base91ToNumber("!#%'");
expect(testValue).toBeGreaterThan(0);
expect(testValue).toBeLessThan(91 * 91 * 91 * 91);
});
it('should decode maximum valid Base91 value', () => {
it("should decode maximum valid Base91 value", () => {
// Maximum is '{' (ASCII 123, digit 90) repeated
const maxValue = base91ToNumber('{{{{');
const maxValue = base91ToNumber("{{{{");
const expected = 90 * 91 * 91 * 91 + 90 * 91 * 91 + 90 * 91 + 90;
expect(maxValue).toBe(expected);
});
it('should handle APRS compressed position example', () => {
it("should handle APRS compressed position example", () => {
// Using actual characters from APRS test vector
const latStr = '/:*E';
const lonStr = 'qZ=O';
const latStr = "/:*E";
const lonStr = "qZ=O";
const latValue = base91ToNumber(latStr);
const lonValue = base91ToNumber(lonStr);
// Just verify they decode without error and produce valid numbers
expect(typeof latValue).toBe('number');
expect(typeof lonValue).toBe('number');
expect(typeof latValue).toBe("number");
expect(typeof lonValue).toBe("number");
expect(latValue).toBeGreaterThanOrEqual(0);
expect(lonValue).toBeGreaterThanOrEqual(0);
});
it('throws on invalid character', () => {
expect(() => base91ToNumber(' ')).toThrow(); // space (code 32) is invalid
it("throws on invalid character", () => {
expect(() => base91ToNumber(" ")).toThrow(); // space (code 32) is invalid
});
});
describe('unit conversions', () => {
it('converts knots <-> km/h', () => {
describe("unit conversions", () => {
it("converts knots <-> km/h", () => {
expect(knotsToKmh(10)).toBeCloseTo(18.52, 5);
expect(kmhToKnots(18.52)).toBeCloseTo(10, 3);
});
it('converts feet <-> meters', () => {
it("converts feet <-> meters", () => {
expect(feetToMeters(10)).toBeCloseTo(3.048, 6);
expect(metersToFeet(3.048)).toBeCloseTo(10, 6);
});
it('converts celsius <-> fahrenheit', () => {
it("converts celsius <-> fahrenheit", () => {
expect(celsiusToFahrenheit(0)).toBeCloseTo(32, 6);
expect(fahrenheitToCelsius(32)).toBeCloseTo(0, 6);
expect(celsiusToFahrenheit(100)).toBeCloseTo(212, 6);

View File

@@ -0,0 +1,35 @@
import { Dissected } from "@hamradio/packet";
import { describe, expect, it } from "vitest";
import { Frame } from "../src/frame";
import { DataType, type Payload, type StationCapabilitiesPayload } from "../src/frame.types";
describe("Frame.decodeCapabilities", () => {
it("parses comma separated capabilities", () => {
const data = "CALL>APRS:<IGATE,MSG_CNT";
const frame = Frame.fromString(data);
const decoded = frame.decode() as StationCapabilitiesPayload;
expect(decoded).not.toBeNull();
expect(decoded.type).toBe(DataType.StationCapabilities);
expect(Array.isArray(decoded.capabilities)).toBeTruthy();
expect(decoded.capabilities).toContain("IGATE");
expect(decoded.capabilities).toContain("MSG_CNT");
});
it("emits structure sections when requested", () => {
const data = "CALL>APRS:<IGATE MSG_CNT>";
const frame = Frame.fromString(data);
const res = frame.decode(true) as {
payload: Payload | null;
structure: Dissected;
};
expect(res.payload).not.toBeNull();
if (res.payload && res.payload.type !== DataType.StationCapabilities)
throw new Error("expected capabilities payload");
expect(res.structure).toBeDefined();
const caps = res.structure.find((s) => s.name === "capabilities");
expect(caps).toBeDefined();
const capEntry = res.structure.find((s) => s.name === "capability");
expect(capEntry).toBeDefined();
});
});

299
test/payload.extras.test.ts Normal file
View File

@@ -0,0 +1,299 @@
import type { Dissected, Field, Segment } from "@hamradio/packet";
import { describe, expect, it } from "vitest";
import { Frame } from "../src/frame";
import { DataType, type ObjectPayload, type PositionPayload } from "../src/frame.types";
import { base91ToNumber, feetToMeters, knotsToKmh, milesToMeters } from "../src/parser";
import { decodeDAO, decodeTelemetry } from "../src/payload.extras";
describe("APRS extras test vectors", () => {
it("parses altitude token in the beginning of a comment and emits structure", () => {
const raw =
"DL3QP-R>APDG03,TCPIP*,qAC,T2ROMANIA:!5151.12ND00637.65E&/A=000000440 MMDVM Voice 439.40000MHz -7.6000MHz, DL3QP_Pi-Star";
const frame = Frame.fromString(raw);
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
const { payload, structure } = res;
expect(payload).not.toBeNull();
// Altitude 001234 ft -> meters
expect(payload!.position.altitude).toBe(0);
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
expect(commentSeg).toBeDefined();
const fieldsAlt = (commentSeg!.fields ?? []) as Field[];
const hasAlt = fieldsAlt.some((f) => f.name === "altitude");
expect(hasAlt).toBe(true);
expect(payload!.position.comment).toBe("440 MMDVM Voice 439.40000MHz -7.6000MHz, DL3QP_Pi-Star");
});
it("parses altitude token marker mid-comment and emits structure", () => {
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W#RNG0001ALT/A=001234 Your Comment Here";
const frame = Frame.fromString(raw);
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
const { payload, structure } = res;
// console.log(structure[structure.length - 1]); // Log the last segment for debugging
expect(payload).not.toBeNull();
// Altitude 001234 ft -> meters
expect(Math.round((payload!.position.altitude || 0) / 0.3048)).toBe(1234);
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
expect(commentSeg).toBeDefined();
const fieldsAlt = (commentSeg!.fields ?? []) as Field[];
const hasAlt = fieldsAlt.some((f) => f.name === "altitude");
expect(hasAlt).toBe(true);
const commentIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "comment");
expect(commentIndex).toBe(2); // Range marker + range go before.
const altitudeIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "altitude");
expect(altitudeIndex).toBeGreaterThan(0); // Altitude should come after comment in the structure
expect(altitudeIndex).toBeGreaterThan(commentIndex);
const secondCommentIndex = (commentSeg!.fields ?? []).findIndex((f, i) => f.name === "comment" && i > commentIndex);
expect(secondCommentIndex).toBeGreaterThan(altitudeIndex); // Any additional comment fields should come after altitude
});
it("parses PHG from position with messaging (spec vector 1)", () => {
const raw = "NOCALL>APZRAZ,qAS,PA2RDK-14:=5154.19N/00627.77E>PHG500073 de NOCALL";
const frame = Frame.fromString(raw);
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
const { payload } = res;
expect(payload).not.toBeNull();
expect(payload!.position.phg).toBeDefined();
// PHG500073 parsed per spec: p=5 -> 25 W, h='0' -> 10 ft, g='0' -> 0 dBi
expect(payload!.position.phg!.power).toBe(25);
expect(payload!.position.phg!.height).toBeCloseTo(3.048, 3);
expect(payload!.position.phg!.gain).toBe(0);
expect(payload!.position!.comment).toBe("73 de NOCALL");
});
it("parses PHG token with hyphen separators (spec vector 2)", () => {
const raw = "NOCALL>APRS,TCPIP*,qAC,NINTH:;P-PA3RD *061000z5156.26NP00603.29E#PHG0210DAPNET";
const frame = Frame.fromString(raw);
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
const { payload, structure } = res;
// console.log(structure[structure.length - 1]); // Log the last segment for debugging
expect(payload).not.toBeNull();
// Use a spec PHG example: PHG0210 -> p=0 -> power 0 W, h=2 -> 40 ft
expect(payload!.position.phg).toBeDefined();
expect(payload!.position.phg!.power).toBe(0);
expect(payload!.position.phg!.height).toBeCloseTo(12.192, 3);
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
expect(commentSeg).toBeDefined();
const fields = (commentSeg!.fields ?? []) as Field[];
const hasPHG = fields.some((f) => f.name === "PHG marker");
expect(hasPHG).toBe(true);
});
it("parses DFS token with long numeric strength", () => {
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W#DFS2360/Your Comment";
const frame = Frame.fromString(raw);
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
const { payload, structure } = res;
expect(payload).not.toBeNull();
expect(payload!.position.dfs).toBeDefined();
// DFSshgd: strength is single-digit s value (here '2')
expect(payload!.position.dfs!.strength).toBe(2);
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
expect(commentSeg).toBeDefined();
const fieldsDFS = (commentSeg!.fields ?? []) as Field[];
const hasDFS = fieldsDFS.some((f) => f.name === "DFS marker");
expect(hasDFS).toBe(true);
});
it("parses course/speed in DDD/SSS form and altitude /A=", () => {
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W>090/045/A=001234";
const frame = Frame.fromString(raw);
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
const { payload, structure } = res;
expect(payload).not.toBeNull();
expect(payload!.position.course).toBe(90);
// Speed is converted from knots to km/h
expect(payload!.position.speed).toBeCloseTo(45 * 1.852, 3);
// Altitude 001234 ft -> meters
expect(Math.round((payload!.position.altitude || 0) / 0.3048)).toBe(1234);
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
expect(commentSeg).toBeDefined();
const fieldsCSE = (commentSeg!.fields ?? []) as Field[];
const hasCSE = fieldsCSE.some((f) => f.name === "course");
expect(hasCSE).toBe(true);
});
it("parses combined tokens: DDD/SSS PHG and DFS", () => {
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W>090/045PHG5132DFS2132";
const frame = Frame.fromString(raw);
const { payload, structure } = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
expect(payload).not.toBeNull();
expect(payload!.position.course).toBe(90);
expect(payload!.position.speed).toBeCloseTo(45 * 1.852, 3);
expect(payload!.position.phg).toBeDefined();
expect(payload!.position.dfs).toBeDefined();
expect(payload!.position.dfs!.strength).toBe(2);
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
expect(commentSeg).toBeDefined();
const fieldsCombined = (commentSeg!.fields ?? []) as Field[];
expect(fieldsCombined.some((f) => ["course", "PHG marker", "DFS marker"].includes(String(f.name)))).toBe(true);
});
it("parses RNG token and emits structure", () => {
const raw =
"N0CALL-S>APDG01,TCPIP*,qAC,N0CALL-GS:;N0CALL B *181721z5148.38ND00634.32EaRNG0001/A=000010 70cm Voice (D-Star) 439.50000MHz -7.6000MHz";
const frame = Frame.fromString(raw);
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
const { payload, structure } = res;
expect(payload).not.toBeNull();
expect(payload!.position.altitude).toBeCloseTo(feetToMeters(10), 3);
expect(payload!.position.range).toBe(milesToMeters(1) / 1000);
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
expect(commentSeg).toBeDefined();
const fieldsRNG = (commentSeg!.fields ?? []) as Field[];
const hasRNG = fieldsRNG.some((f) => f.name === "range marker");
expect(hasRNG).toBe(true);
const rangeIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "range marker");
expect(rangeIndex).toBeGreaterThanOrEqual(0);
const altitudeIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "altitude");
expect(altitudeIndex).toBeGreaterThanOrEqual(0);
expect(rangeIndex).toBeGreaterThanOrEqual(0);
expect(altitudeIndex).toBeGreaterThan(rangeIndex); // Altitude comes after range
const commentIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "comment");
expect(commentIndex).toBeGreaterThan(altitudeIndex); // Comment comes after altitude
});
it("parses DAO token and emits structure", () => {
const raw = "N0CALL-7>APLT00,WIDE1-1,QB1N4,qAO,N0CALL-10:!5140.06N/00615.91E[360/028/A=000085 !wrt!";
const frame = Frame.fromString(raw);
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
const { payload } = res;
expect(payload).not.toBeNull();
expect(payload!.type).toBe(DataType.PositionNoTimestampNoMessaging);
expect(payload!.position.dao!).toBeDefined();
expect(payload!.position.dao!.datum_id).toBe("W");
});
});
describe("decodeDAO", () => {
it("decodes valid DAO token with WGS84 datum", () => {
const dao = decodeDAO("W84");
expect(dao).not.toBeNull();
expect(dao!.datum_id).toBe("W");
expect(dao!.resolution).toBe(knotsToKmh(1));
expect(dao!.latitude).toBeCloseTo((8 * 0.01) / 60, 6);
expect(dao!.longitude).toBeCloseTo((4 * 0.01) / 60, 6);
});
it("decodes valid DAO base91 token", () => {
const dao = decodeDAO("wrt");
expect(dao).not.toBeNull();
expect(dao!.datum_id).toBe("W");
expect(dao!.resolution).toBe(knotsToKmh(0.1));
expect(dao!.latitude).toBeCloseTo((base91ToNumber("r") * 0.01) / 60, 6);
expect(dao!.longitude).toBeCloseTo((base91ToNumber("t") * 0.01) / 60, 6);
});
it("decodes valid DAO only token", () => {
const dao = decodeDAO("! ");
expect(dao).not.toBeNull();
expect(dao!.datum_id).toBe("!");
});
it("returns undefined for invalid DAO token", () => {
expect(decodeDAO("invalid")).toBeUndefined();
expect(decodeDAO("")).toBeUndefined();
expect(decodeDAO("ab")).toBeUndefined();
});
});
describe("decodeTelemetry", () => {
it("decodes minimal telemetry (|!!!!|)", () => {
const result = decodeTelemetry("!!!!");
expect(result.sequence).toBe(0);
expect(result.analog).toEqual([0]);
expect(result.digital).toBeUndefined();
});
it("decodes sequence and one channel", () => {
const result = decodeTelemetry("ss11");
expect(result.sequence).toBe(7544);
expect(result.analog).toEqual([1472]);
expect(result.digital).toBeUndefined();
});
it("decodes sequence and two channels", () => {
const result = decodeTelemetry("ss1122");
expect(result.sequence).toBe(7544);
expect(result.analog).toEqual([1472, 1564]);
expect(result.digital).toBeUndefined();
});
it("decodes sequence and five channels", () => {
const result = decodeTelemetry("ss1122334455");
expect(result.sequence).toBe(7544);
expect(result.analog).toEqual([1472, 1564, 1656, 1748, 1840]);
expect(result.digital).toBeUndefined();
});
it("decodes sequence, five channels, and digital", () => {
const result = decodeTelemetry('ss1122334455!"');
expect(result.sequence).toBe(7544);
expect(result.analog).toEqual([1472, 1564, 1656, 1748, 1840]);
expect(result.digital).toBe(1);
});
it("throws on too short input", () => {
expect(() => decodeTelemetry("!")).toThrow();
expect(() => decodeTelemetry("")).toThrow();
});
it("throws on invalid base91", () => {
expect(() => decodeTelemetry("ss11~~")).toThrow();
});
it("decodes telemetry test vector", () => {
const result = decodeTelemetry("$T%R#`");
expect(result.sequence).toBe(324);
expect(result.analog).toEqual([413, 245]);
expect(result.digital).toBeUndefined();
});
it("decodes test vector with embedded telemetry", () => {
const raw = "N0CALL-11>APLRFT,qAR,N0CALL-10:!\\45;<P(6y>HIGLoRa APRS Tracker|$T%R#`|";
const frame = Frame.fromString(raw);
const { payload } = frame.decode(true) as { payload: ObjectPayload | null; structure: Dissected };
expect(payload).not.toBeNull();
expect(payload!.type).toBe(DataType.PositionNoTimestampNoMessaging);
expect(payload!.position).toBeDefined();
expect(payload!.position.comment).toBe("LoRa APRS Tracker");
});
it("decodes composite test vector with altitude and telemetry", () => {
const raw = "N0CALL-11>APLRFT,qAR,N0CALL-10:!\\45;<P(6y>HIGLoRa APRS Tracker|$T%R#`| on air/A=000012!";
const frame = Frame.fromString(raw);
const { payload, structure } = frame.decode(true) as { payload: ObjectPayload | null; structure: Dissected };
// console.log(structure[structure.length - 1]); // Log the last segment for debugging
expect(payload).not.toBeNull();
expect(payload!.position).toBeDefined();
expect(payload!.position.altitude).toBeCloseTo(feetToMeters(12), 3);
//expect(payload!.position.comment).toBe("LoRa APRS Tracker on air");
expect(structure[structure.length - 1].fields.filter((s) => s.name === "comment").length).toBe(3);
});
});

View File

@@ -0,0 +1,40 @@
import { Dissected } from "@hamradio/packet";
import { expect } from "vitest";
import { describe, it } from "vitest";
import { Frame } from "../src/frame";
import { DataType, QueryPayload } from "../src/frame.types";
describe("Frame decode - Query", () => {
it("decodes simple query without target", () => {
const frame = Frame.fromString("SRC>DEST:?APRS");
const payload = frame.decode() as QueryPayload;
expect(payload).not.toBeNull();
expect(payload.type).toBe(DataType.Query);
expect(payload.queryType).toBe("APRS");
expect(payload.target).toBeUndefined();
});
it("decodes query with target", () => {
const frame = Frame.fromString("SRC>DEST:?PING N0CALL");
const payload = frame.decode() as QueryPayload;
expect(payload).not.toBeNull();
expect(payload.type).toBe(DataType.Query);
expect(payload.queryType).toBe("PING");
expect(payload.target).toBe("N0CALL");
});
it("returns structure sections when requested", () => {
const frame = Frame.fromString("SRC>DEST:?PING N0CALL");
const result = frame.decode(true) as {
payload: QueryPayload;
structure: Dissected;
};
expect(result).toHaveProperty("payload");
expect(result.payload.type).toBe(DataType.Query);
expect(Array.isArray(result.structure)).toBe(true);
const names = result.structure.map((s) => s.name);
expect(names).toContain("query type");
expect(names).toContain("query target");
});
});

View File

@@ -0,0 +1,45 @@
import { Dissected } from "@hamradio/packet";
import { describe, expect, it } from "vitest";
import { Frame } from "../src/frame";
import { DataType, type RawGPSPayload } from "../src/frame.types";
describe("Raw GPS decoding", () => {
it("decodes simple NMEA sentence as raw-gps payload", () => {
const sentence = "GPRMC,123519,A,4807.038,N,01131.000,E,022.4,084.4,230394,003.1,W*6A";
const frameStr = `SRC>DEST:$${sentence}`;
const f = Frame.parse(frameStr);
const payload = f.decode(false) as RawGPSPayload | null;
expect(payload).not.toBeNull();
expect(payload?.type).toBe(DataType.RawGPS);
expect(payload?.sentence).toBe(sentence);
expect(payload?.position).toBeDefined();
expect(typeof payload?.position?.latitude).toBe("number");
expect(typeof payload?.position?.longitude).toBe("number");
});
it("returns structure when requested", () => {
const sentence = "GPGGA,092750.000,5321.6802,N,00630.3372,W,1,08,1.0,73.0,M,0.0,M,,*6A";
const frameStr = `SRC>DEST:$${sentence}`;
const f = Frame.parse(frameStr);
const result = f.decode(true) as {
payload: RawGPSPayload | null;
structure: Dissected;
};
expect(result.payload).not.toBeNull();
expect(result.payload?.type).toBe(DataType.RawGPS);
expect(result.payload?.sentence).toBe(sentence);
expect(result.payload?.position).toBeDefined();
expect(typeof result.payload?.position?.latitude).toBe("number");
expect(typeof result.payload?.position?.longitude).toBe("number");
expect(result.structure).toBeDefined();
const rawSection = result.structure.find((s) => s.name === "raw-gps");
expect(rawSection).toBeDefined();
const posSection = result.structure.find((s) => s.name === "raw-gps-position");
expect(posSection).toBeDefined();
});
});

View File

@@ -0,0 +1,66 @@
import { describe, it } from "vitest";
import { expect } from "vitest";
import { Frame } from "../src/frame";
import {
DataType,
TelemetryBitSensePayload,
TelemetryCoefficientsPayload,
TelemetryDataPayload,
TelemetryParameterPayload,
TelemetryUnitPayload
} from "../src/frame.types";
describe("Frame decode - Telemetry", () => {
it("decodes telemetry data payload", () => {
const frame = Frame.fromString("SRC>DEST:T#1 10,20,30,40,50 7");
const payload = frame.decode() as TelemetryDataPayload;
expect(payload).not.toBeNull();
expect(payload.type).toBe(DataType.TelemetryData);
expect(payload.variant).toBe("data");
expect(payload.sequence).toBe(1);
expect(Array.isArray(payload.analog)).toBe(true);
expect(payload.analog.length).toBe(5);
expect(payload.digital).toBe(7);
});
it("decodes telemetry parameters list", () => {
const frame = Frame.fromString("SRC>DEST:TPARAM Temp,Hum,Wind");
const payload = frame.decode() as TelemetryParameterPayload;
expect(payload).not.toBeNull();
expect(payload.type).toBe(DataType.TelemetryData);
expect(payload.variant).toBe("parameters");
expect(Array.isArray(payload.names)).toBe(true);
expect(payload.names).toEqual(["Temp", "Hum", "Wind"]);
});
it("decodes telemetry units list", () => {
const frame = Frame.fromString("SRC>DEST:TUNIT C,% ,mph");
const payload = frame.decode() as TelemetryUnitPayload;
expect(payload).not.toBeNull();
expect(payload.type).toBe(DataType.TelemetryData);
expect(payload.variant).toBe("unit");
expect(payload.units).toEqual(["C", "%", "mph"]);
});
it("decodes telemetry coefficients", () => {
const frame = Frame.fromString("SRC>DEST:TCOEFF A:1,2 B:3,4 C:5,6");
const payload = frame.decode() as TelemetryCoefficientsPayload;
expect(payload).not.toBeNull();
expect(payload.type).toBe(DataType.TelemetryData);
expect(payload.variant).toBe("coefficients");
expect(payload.coefficients.a).toEqual([1, 2]);
expect(payload.coefficients.b).toEqual([3, 4]);
expect(payload.coefficients.c).toEqual([5, 6]);
});
it("decodes telemetry bitsense with project", () => {
const frame = Frame.fromString("SRC>DEST:TBITS 255 ProjectX");
const payload = frame.decode() as TelemetryBitSensePayload;
expect(payload).not.toBeNull();
expect(payload.type).toBe(DataType.TelemetryData);
expect(payload.variant).toBe("bitsense");
expect(payload.sense).toBe(255);
expect(payload.projectName).toBe("ProjectX");
});
});

View File

@@ -0,0 +1,37 @@
import { Dissected } from "@hamradio/packet";
import { describe, expect, it } from "vitest";
import { Frame } from "../src/frame";
import { DataType, type UserDefinedPayload } from "../src/frame.types";
describe("Frame.decodeUserDefined", () => {
it("parses packet type only", () => {
const data = "CALL>APRS:{01";
const frame = Frame.fromString(data);
const decoded = frame.decode() as UserDefinedPayload;
expect(decoded).not.toBeNull();
expect(decoded.type).toBe(DataType.UserDefined);
expect(decoded.userPacketType).toBe("01");
expect(decoded.data).toBe("");
});
it("parses packet type and data and emits sections", () => {
const data = "CALL>APRS:{TEX Hello world";
const frame = Frame.fromString(data);
const res = frame.decode(true) as {
payload: UserDefinedPayload;
structure: Dissected;
};
expect(res.payload).not.toBeNull();
expect(res.payload.type).toBe(DataType.UserDefined);
expect(res.payload.userPacketType).toBe("TEX");
expect(res.payload.data).toBe("Hello world");
const raw = res.structure.find((s) => s.name === "user-defined");
const typeSection = res.structure.find((s) => s.name === "user-packet-type");
const dataSection = res.structure.find((s) => s.name === "user-data");
expect(raw).toBeDefined();
expect(typeSection).toBeDefined();
expect(dataSection).toBeDefined();
});
});

View File

@@ -0,0 +1,38 @@
import { Dissected } from "@hamradio/packet";
import { describe, expect, it } from "vitest";
import { Frame } from "../src/frame";
import { DataType, WeatherPayload } from "../src/frame.types";
describe("Frame decode - Weather", () => {
it("parses weather with timestamp, wind, temp, rain, humidity and pressure", () => {
const data = "SRC>DEST:_120345z180/10g15t072r000p025P050h50b10132";
const frame = Frame.fromString(data);
const payload = frame.decode() as WeatherPayload;
expect(payload).not.toBeNull();
expect(payload.type).toBe(DataType.WeatherReportNoPosition);
expect(payload.timestamp).toBeDefined();
expect(payload.windDirection).toBe(180);
expect(payload.windSpeed).toBe(10);
expect(payload.windGust).toBe(15);
expect(payload.temperature).toBe(72);
expect(payload.rainLast24Hours).toBe(25);
expect(payload.rainSinceMidnight).toBe(50);
expect(payload.humidity).toBe(50);
expect(payload.pressure).toBe(10132);
});
it("emits structure when requested", () => {
const data = "SRC>DEST:_120345z180/10g15t072r000p025P050h50b10132";
const frame = Frame.fromString(data);
const res = frame.decode(true) as {
payload: WeatherPayload;
structure: Dissected;
};
expect(res.payload).not.toBeNull();
expect(Array.isArray(res.structure)).toBe(true);
const names = res.structure.map((s) => s.name);
expect(names).toContain("timestamp");
expect(names).toContain("weather");
});
});